_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
2f41f6003eb92a4b43d6dc10c0cbfc9714d02e71eade7b450e8d864d0c9b4bdb | pedestal/pedestal | project.clj | Copyright 2013 Relevance , Inc.
Copyright 2014 - 2022 Cognitect , Inc.
; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( )
; which can be found in the file epl-v10.html at the root of this distribution.
;
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
;
; You must not remove this notice, or any other, from this software.
(defproject helloworld-metrics "0.5.1"
:description "Demonstration of metrics support"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.0"]
[io.pedestal/pedestal.service "0.5.5"]
Remove this line and uncomment one of the next lines to
use Immutant or Tomcat instead of Jetty :
[io.pedestal/pedestal.jetty "0.5.5"]
;; [io.pedestal/pedestal.immutant "0.5.1"]
;; [io.pedestal/pedestal.tomcat "0.5.1"]
[ch.qos.logback/logback-classic "1.2.10" :exclusions [org.slf4j/slf4j-api]]
[org.slf4j/jul-to-slf4j "1.7.35"]
[org.slf4j/jcl-over-slf4j "1.7.35"]
[org.slf4j/log4j-over-slf4j "1.7.35"]
[com.readytalk/metrics3-statsd "4.1.2"]]
:repositories [["jcenter" ""]]
:min-lein-version "2.0.0"
:resource-paths ["config", "resources"]
:profiles {:dev {:aliases {"run-dev" ["trampoline" "run" "-m" "helloworld-metrics.server/run-dev"]}}
:uberjar {:aot [helloworld-metrics.server]}}
:main ^{:skip-aot true} helloworld-metrics.server)
| null | https://raw.githubusercontent.com/pedestal/pedestal/53bfe70143a22cdfd2f0d183023334a199c9e9a2/samples/helloworld-metrics/project.clj | clojure | The use and distribution terms for this software are covered by the
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
[io.pedestal/pedestal.immutant "0.5.1"]
[io.pedestal/pedestal.tomcat "0.5.1"] | Copyright 2013 Relevance , Inc.
Copyright 2014 - 2022 Cognitect , Inc.
Eclipse Public License 1.0 ( )
(defproject helloworld-metrics "0.5.1"
:description "Demonstration of metrics support"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.0"]
[io.pedestal/pedestal.service "0.5.5"]
Remove this line and uncomment one of the next lines to
use Immutant or Tomcat instead of Jetty :
[io.pedestal/pedestal.jetty "0.5.5"]
[ch.qos.logback/logback-classic "1.2.10" :exclusions [org.slf4j/slf4j-api]]
[org.slf4j/jul-to-slf4j "1.7.35"]
[org.slf4j/jcl-over-slf4j "1.7.35"]
[org.slf4j/log4j-over-slf4j "1.7.35"]
[com.readytalk/metrics3-statsd "4.1.2"]]
:repositories [["jcenter" ""]]
:min-lein-version "2.0.0"
:resource-paths ["config", "resources"]
:profiles {:dev {:aliases {"run-dev" ["trampoline" "run" "-m" "helloworld-metrics.server/run-dev"]}}
:uberjar {:aot [helloworld-metrics.server]}}
:main ^{:skip-aot true} helloworld-metrics.server)
|
55d1f39175d4172e7f1b6b56c439975477090fd5dcc45066fd37ee584cab23a0 | ocramz/twelve | Build.hs | {-# OPTIONS_GHC -Wno-unused-imports #-}
module CLI.Build (cliBuild) where
-- directory
import System.Directory (makeAbsolute, listDirectory)
-- filepath
import System.FilePath.Posix (takeExtension, (</>), replaceDirectory)
import Data.Text (Text)
import qualified Data.Text.Lazy as TL (Text)
import Data.Text.Lazy.IO (readFile, writeFile)
import Text.Html (loadAndProcess)
import Config (Config(..))
import Prelude hiding (readFile, writeFile)
cliBuild :: Config
-> FilePath -- ^ file to be processed
-> IO ()
cliBuild cfg@(CD din dout) fp = do
fpsIn <- htmlPaths din
t <- loadAndProcess cfg fpsIn fp
let fpout = replaceDirectory fp dout
writeFile fpout t
htmlPaths :: FilePath -> IO [FilePath]
htmlPaths = paths ".html"
paths :: String
-> FilePath -- ^ directory path of template files
-> IO [FilePath]
paths fext dp = do
dpnorm <- makeAbsolute dp
fps <- listDirectory dpnorm
fpsAbs <- traverse (\f -> makeAbsolute $ dp </> f) fps
let fpaths = filter (\fp -> takeExtension fp == fext) fpsAbs
pure fpaths
-- loadModelFromDir dp = do
< - makeAbsolute dp
fps < - listDirectory
-- let fpaths = map (dp </>) $ filter (\fp -> takeExtension fp == ".yml") fps
-- loadModel fpaths
buildTest : : Config - > FilePath - > IO TL.Text
-- buildTest cfg@(CD din _) fp = do
-- fpsIn <- htmlPaths din
-- loadAndProcess cfg fpsIn fp
| null | https://raw.githubusercontent.com/ocramz/twelve/bf47a2e329ad905125a4bd97382ee5af583dda38/src/CLI/Build.hs | haskell | # OPTIONS_GHC -Wno-unused-imports #
directory
filepath
^ file to be processed
^ directory path of template files
loadModelFromDir dp = do
let fpaths = map (dp </>) $ filter (\fp -> takeExtension fp == ".yml") fps
loadModel fpaths
buildTest cfg@(CD din _) fp = do
fpsIn <- htmlPaths din
loadAndProcess cfg fpsIn fp | module CLI.Build (cliBuild) where
import System.Directory (makeAbsolute, listDirectory)
import System.FilePath.Posix (takeExtension, (</>), replaceDirectory)
import Data.Text (Text)
import qualified Data.Text.Lazy as TL (Text)
import Data.Text.Lazy.IO (readFile, writeFile)
import Text.Html (loadAndProcess)
import Config (Config(..))
import Prelude hiding (readFile, writeFile)
cliBuild :: Config
-> IO ()
cliBuild cfg@(CD din dout) fp = do
fpsIn <- htmlPaths din
t <- loadAndProcess cfg fpsIn fp
let fpout = replaceDirectory fp dout
writeFile fpout t
htmlPaths :: FilePath -> IO [FilePath]
htmlPaths = paths ".html"
paths :: String
-> IO [FilePath]
paths fext dp = do
dpnorm <- makeAbsolute dp
fps <- listDirectory dpnorm
fpsAbs <- traverse (\f -> makeAbsolute $ dp </> f) fps
let fpaths = filter (\fp -> takeExtension fp == fext) fpsAbs
pure fpaths
< - makeAbsolute dp
fps < - listDirectory
buildTest : : Config - > FilePath - > IO TL.Text
|
44ef0cd5cc21d08c0ce269fa1360d71b6b64c92cbc11cbd4c71a9e9254d9b699 | mhuebert/re-view | mdc.cljs | (ns re-view.material.mdc
(:require [re-view.core :as v]
[re-view.view-spec :as s]
[goog.dom.classes :as classes]
[goog.object :as gobj]
[re-view.material.util :as util]
["@material/animation" :refer [getCorrectEventName]]
["@material/drawer/util" :as mdc-util]
[clojure.string :as string])
(:require-macros re-view.material.mdc))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; Environment
;;
(def ^js browser? (exists? js/window))
(def ^js Document (when browser? js/document))
(def ^js Body (when Document (.-body Document)))
(def ^js Window (when browser? js/window))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; Functions to be called from a component's lifecycle methods
;;
(defn init
"Initialize an adapter with a re-view component (should be called in componentDidMount).
Adapters are written to the component on a property of the form `mdc{ComponentName}`"
[component & adapters]
(doseq [{:keys [name adapter]} adapters]
(let [^js foundation (adapter component)]
(gobj/set component (str "mdc" name) foundation)
(.init foundation))))
(defn destroy
"Destroy mdc foundation instances for component (should be called in componentWillUnmount)."
[component & adapters]
(doseq [{:keys [name]} adapters]
(let [^js foundation (gobj/get component (str "mdc" name))]
(when-let [onDestroy (aget foundation "adapter_" "onDestroy")]
(onDestroy))
(.destroy foundation))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; Handling of adapter state.
;;
;; Adapters are stored as plain javascript properties of re-view components.
When an adapter initializes , it stores references to relevant DOM nodes to
;; as properties on itself.
;;
State that changes while a component is mounted is stored in the component 's
;; `:view/state` atom.
;;
;; Property names and state keys should be predictably named after the official
MDC component name and/or the name that a DOM node is given during ` init ` .
;;
(defn adapter
"Returns the adapter for `mdc-component-name` attached to `component`."
[component mdc-component-name]
(gobj/getValueByKeys component (str "mdc" (name mdc-component-name)) "adapter_"))
(defn element
"Returns the element (stored in `init`) stored under `property-name`."
[adapter element-key]
(gobj/get adapter (name element-key)))
(defn styles-key
"Returns keyword under which styles should be stored in state, given an element key"
[element-key]
(keyword "mdc" (str (name element-key) "-styles")))
(defn classes-key
"Returns keyword under which classes should be stored in state, given an element key"
[element-key]
(keyword "mdc" (str (name element-key) "-classes")))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; Adapter implementation helpers
;;
(defn general-interaction-handler
"Returns a function which adds or removes an event handler to `element`.
`kind` may be `:listen` or `:unlisten`."
([kind element] (general-interaction-handler kind element {}))
([kind element {:keys [passive? remap-event]
:or {passive? true}}]
(fn [event-type handler]
(this-as this
(let [^js target (cond->> element
(string? element)
(gobj/get this))
event-type (cond-> event-type
remap-event (remap-event))]
(condp = kind
:listen (.addEventListener target event-type handler (if passive? (mdc-util/applyPassive) false))
:unlisten (.removeEventListener target event-type handler (if passive? (mdc-util/applyPassive) false))))))))
(defn interaction-handler
"Returns a function which adds or removes an event handler of `event-type` to `element`.
`kind` may be `:listen` or `:unlisten`."
([kind element event-type] (interaction-handler kind element event-type {}))
([kind element event-type {:keys [passive?]
:or {passive? true}}]
(fn [handler]
(this-as this
(let [^js target (cond->> element
(string? element)
(gobj/get this))]
(condp = kind
:listen (.addEventListener target event-type handler (if passive? (mdc-util/applyPassive) false))
:unlisten (.removeEventListener target event-type handler (if passive? (mdc-util/applyPassive) false))))))))
(defn style-handler
"Returns a function which adds the given CSS attribute-value pair to `element`"
[element]
(fn [attribute value]
(util/add-styles element {attribute value})))
(defn mdc-style-update
"Returns a function which updates styles for the given component-name / element-key pair.
Adapters follow a convention of keeping styles for a particular element under
a `:mdc/{element-key}-styles` key in the component's state.
Eg. (mdc-style-update :Ripple :root) will sync the styles stored under :mdc/Ripple-styles with
the element stored under the :root key in the adapter."
[mdc-component-name element-key]
(fn [{:keys [view/state
view/prev-state] :as this}]
(let [target (element (adapter this mdc-component-name) element-key)
style-key (styles-key element-key)]
(util/add-styles target (get @state style-key) (get prev-state style-key)))))
#_(defn mdc-classes-update
([mdc-key]
(mdc-classes-update mdc-key "root"))
([mdc-key element-key]
(fn [{:keys [view/state] :as this}]
(when-let [mdc-classes (seq (get @state (classes-key mdc-key)))]
(let [target (element (adapter this mdc-key) element-key)]
(doseq [class mdc-classes]
(classes/add target class)))))))
(defn class-handler
"Adds or removes a class from the current adapter/component.
`prefix` may be specified when classes must be handled for more than one element
of a component.
javascript `this` is used to look up current component."
([action]
(class-handler action nil))
([action prefix]
(let [f (condp = action :add (fnil conj #{}) :remove (fnil disj #{}))]
(fn [class-name]
(this-as this
(let [state-atom (aget this "state")
state-key (keyword "mdc"
(str (gobj/get this "name")
(some-> prefix (str "-"))
"-classes"))]
(swap! state-atom update state-key f class-name)))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
Adapter construction
;;
(def adapter-base
"Common adapter implementations which are used by multiple components."
{:addClass (class-handler :add)
:removeClass (class-handler :remove)
:hasClass #(this-as this
(classes/has (gobj/get this "root") %))
:registerInteractionHandler (general-interaction-handler :listen "root" nil)
:deregisterInteractionHandler (general-interaction-handler :unlisten "root" nil)
:registerDocumentKeydownHandler (interaction-handler :listen Document "keydown")
:deregisterDocumentKeydownHandler (interaction-handler :unlisten Document "keydown")
:registerDocumentClickHandler (interaction-handler :listen Document "click")
:deregisterDocumentClickHandler (interaction-handler :unlisten Document "click")
:isRtl #(this-as this
(let [^js root (gobj/get this "root")
^js styles (js/getComputedStyle root)]
(= "rtl" (.getPropertyValue styles "direction"))))
:addBodyClass #(classes/add Body %)
:removeBodyClass #(classes/remove Body %)})
(defn bind-adapter
"Return methods that bind an adapter to a specific component instance"
[{:keys [view/state] :as this}]
(let [root-node (v/dom-node this)]
{:root root-node
:nativeInput (util/find-tag root-node #"INPUT|TEXTAREA")
:state state
:component this}))
(defn make-foundation
"Extends adapter with base adapter methods, and wraps with Foundation class."
[name foundation-class methods]
(fn [this]
(foundation-class. (->> (merge (bind-adapter this)
adapter-base
(if (fn? methods) (methods this) methods)
{:name name})
(clj->js)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; View Specs
;;
See : -view.io/docs/re-view/view-specs
;;
(s/defspecs {::color {:spec #{:primary :accent}
:doc "Specifies color variable from theme."}
::raised {:spec :Boolean
:doc "Raised buttons gain elevation, and color is applied to background instead of text."}
::ripple {:spec :Boolean
:doc "Enables ripple effect on click/tap"
:default true}
::compact {:spec :Boolean
:doc "Reduces horizontal padding"}
::auto-focus {:spec :Boolean
:doc "If true, focuses element on mount"}
::id :String
::dirty {:spec :Boolean
:doc "If true, field should display validation errors"}
::dense {:spec :Boolean
:doc "Reduces text size and vertical padding"}
::disabled {:spec :Boolean
:doc "Disables input element or button"
:pass-through true}
::label {:spec :Element
:doc "Label for input element or button"}
::on-change :Function
::rtl {:spec :Boolean
:doc "Show content in right to left."}
::value {:spec :String
:doc "Providing a value causes an input component to be 'controlled'"}
::default-value {:spec :String
:doc "For an uncontrolled component, sets the initial value"}}) | null | https://raw.githubusercontent.com/mhuebert/re-view/ba38cd09b78b3c6db6a2983f3e2cf1bd6294564f/material/src/re_view/material/mdc.cljs | clojure |
Environment
Functions to be called from a component's lifecycle methods
Handling of adapter state.
Adapters are stored as plain javascript properties of re-view components.
as properties on itself.
`:view/state` atom.
Property names and state keys should be predictably named after the official
Adapter implementation helpers
View Specs
| (ns re-view.material.mdc
(:require [re-view.core :as v]
[re-view.view-spec :as s]
[goog.dom.classes :as classes]
[goog.object :as gobj]
[re-view.material.util :as util]
["@material/animation" :refer [getCorrectEventName]]
["@material/drawer/util" :as mdc-util]
[clojure.string :as string])
(:require-macros re-view.material.mdc))
(def ^js browser? (exists? js/window))
(def ^js Document (when browser? js/document))
(def ^js Body (when Document (.-body Document)))
(def ^js Window (when browser? js/window))
(defn init
"Initialize an adapter with a re-view component (should be called in componentDidMount).
Adapters are written to the component on a property of the form `mdc{ComponentName}`"
[component & adapters]
(doseq [{:keys [name adapter]} adapters]
(let [^js foundation (adapter component)]
(gobj/set component (str "mdc" name) foundation)
(.init foundation))))
(defn destroy
"Destroy mdc foundation instances for component (should be called in componentWillUnmount)."
[component & adapters]
(doseq [{:keys [name]} adapters]
(let [^js foundation (gobj/get component (str "mdc" name))]
(when-let [onDestroy (aget foundation "adapter_" "onDestroy")]
(onDestroy))
(.destroy foundation))))
When an adapter initializes , it stores references to relevant DOM nodes to
State that changes while a component is mounted is stored in the component 's
MDC component name and/or the name that a DOM node is given during ` init ` .
(defn adapter
"Returns the adapter for `mdc-component-name` attached to `component`."
[component mdc-component-name]
(gobj/getValueByKeys component (str "mdc" (name mdc-component-name)) "adapter_"))
(defn element
"Returns the element (stored in `init`) stored under `property-name`."
[adapter element-key]
(gobj/get adapter (name element-key)))
(defn styles-key
"Returns keyword under which styles should be stored in state, given an element key"
[element-key]
(keyword "mdc" (str (name element-key) "-styles")))
(defn classes-key
"Returns keyword under which classes should be stored in state, given an element key"
[element-key]
(keyword "mdc" (str (name element-key) "-classes")))
(defn general-interaction-handler
"Returns a function which adds or removes an event handler to `element`.
`kind` may be `:listen` or `:unlisten`."
([kind element] (general-interaction-handler kind element {}))
([kind element {:keys [passive? remap-event]
:or {passive? true}}]
(fn [event-type handler]
(this-as this
(let [^js target (cond->> element
(string? element)
(gobj/get this))
event-type (cond-> event-type
remap-event (remap-event))]
(condp = kind
:listen (.addEventListener target event-type handler (if passive? (mdc-util/applyPassive) false))
:unlisten (.removeEventListener target event-type handler (if passive? (mdc-util/applyPassive) false))))))))
(defn interaction-handler
"Returns a function which adds or removes an event handler of `event-type` to `element`.
`kind` may be `:listen` or `:unlisten`."
([kind element event-type] (interaction-handler kind element event-type {}))
([kind element event-type {:keys [passive?]
:or {passive? true}}]
(fn [handler]
(this-as this
(let [^js target (cond->> element
(string? element)
(gobj/get this))]
(condp = kind
:listen (.addEventListener target event-type handler (if passive? (mdc-util/applyPassive) false))
:unlisten (.removeEventListener target event-type handler (if passive? (mdc-util/applyPassive) false))))))))
(defn style-handler
"Returns a function which adds the given CSS attribute-value pair to `element`"
[element]
(fn [attribute value]
(util/add-styles element {attribute value})))
(defn mdc-style-update
"Returns a function which updates styles for the given component-name / element-key pair.
Adapters follow a convention of keeping styles for a particular element under
a `:mdc/{element-key}-styles` key in the component's state.
Eg. (mdc-style-update :Ripple :root) will sync the styles stored under :mdc/Ripple-styles with
the element stored under the :root key in the adapter."
[mdc-component-name element-key]
(fn [{:keys [view/state
view/prev-state] :as this}]
(let [target (element (adapter this mdc-component-name) element-key)
style-key (styles-key element-key)]
(util/add-styles target (get @state style-key) (get prev-state style-key)))))
#_(defn mdc-classes-update
([mdc-key]
(mdc-classes-update mdc-key "root"))
([mdc-key element-key]
(fn [{:keys [view/state] :as this}]
(when-let [mdc-classes (seq (get @state (classes-key mdc-key)))]
(let [target (element (adapter this mdc-key) element-key)]
(doseq [class mdc-classes]
(classes/add target class)))))))
(defn class-handler
"Adds or removes a class from the current adapter/component.
`prefix` may be specified when classes must be handled for more than one element
of a component.
javascript `this` is used to look up current component."
([action]
(class-handler action nil))
([action prefix]
(let [f (condp = action :add (fnil conj #{}) :remove (fnil disj #{}))]
(fn [class-name]
(this-as this
(let [state-atom (aget this "state")
state-key (keyword "mdc"
(str (gobj/get this "name")
(some-> prefix (str "-"))
"-classes"))]
(swap! state-atom update state-key f class-name)))))))
Adapter construction
(def adapter-base
"Common adapter implementations which are used by multiple components."
{:addClass (class-handler :add)
:removeClass (class-handler :remove)
:hasClass #(this-as this
(classes/has (gobj/get this "root") %))
:registerInteractionHandler (general-interaction-handler :listen "root" nil)
:deregisterInteractionHandler (general-interaction-handler :unlisten "root" nil)
:registerDocumentKeydownHandler (interaction-handler :listen Document "keydown")
:deregisterDocumentKeydownHandler (interaction-handler :unlisten Document "keydown")
:registerDocumentClickHandler (interaction-handler :listen Document "click")
:deregisterDocumentClickHandler (interaction-handler :unlisten Document "click")
:isRtl #(this-as this
(let [^js root (gobj/get this "root")
^js styles (js/getComputedStyle root)]
(= "rtl" (.getPropertyValue styles "direction"))))
:addBodyClass #(classes/add Body %)
:removeBodyClass #(classes/remove Body %)})
(defn bind-adapter
"Return methods that bind an adapter to a specific component instance"
[{:keys [view/state] :as this}]
(let [root-node (v/dom-node this)]
{:root root-node
:nativeInput (util/find-tag root-node #"INPUT|TEXTAREA")
:state state
:component this}))
(defn make-foundation
"Extends adapter with base adapter methods, and wraps with Foundation class."
[name foundation-class methods]
(fn [this]
(foundation-class. (->> (merge (bind-adapter this)
adapter-base
(if (fn? methods) (methods this) methods)
{:name name})
(clj->js)))))
See : -view.io/docs/re-view/view-specs
(s/defspecs {::color {:spec #{:primary :accent}
:doc "Specifies color variable from theme."}
::raised {:spec :Boolean
:doc "Raised buttons gain elevation, and color is applied to background instead of text."}
::ripple {:spec :Boolean
:doc "Enables ripple effect on click/tap"
:default true}
::compact {:spec :Boolean
:doc "Reduces horizontal padding"}
::auto-focus {:spec :Boolean
:doc "If true, focuses element on mount"}
::id :String
::dirty {:spec :Boolean
:doc "If true, field should display validation errors"}
::dense {:spec :Boolean
:doc "Reduces text size and vertical padding"}
::disabled {:spec :Boolean
:doc "Disables input element or button"
:pass-through true}
::label {:spec :Element
:doc "Label for input element or button"}
::on-change :Function
::rtl {:spec :Boolean
:doc "Show content in right to left."}
::value {:spec :String
:doc "Providing a value causes an input component to be 'controlled'"}
::default-value {:spec :String
:doc "For an uncontrolled component, sets the initial value"}}) |
bc1b09958ff24f9725fb2a63bcb648fa755ac38a970eb9554c3508f3fa43e0e8 | godfat/sandbox | 002.hs |
Problem 2
19 October 2001
Each new term in the Fibonacci sequence is generated by adding the previous
two terms . By starting with 1 and 2 , the first 10 terms will be :
1 , 2 , 3 , 5 , 8 , 13 , 21 , 34 , 55 , 89 , ...
By considering the terms in the Fibonacci sequence whose values do not
exceed four million , find the sum of the even - valued terms .
Problem 2
19 October 2001
Each new term in the Fibonacci sequence is generated by adding the previous
two terms. By starting with 1 and 2, the first 10 terms will be:
1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
By considering the terms in the Fibonacci sequence whose values do not
exceed four million, find the sum of the even-valued terms.
-}
fib :: [Int]
fib = 1 : 2 : [ a + b | (a, b) <- zip fib (tail fib)]
main = putStrLn . show $
sum (filter even (takeWhile (<=4000000) fib))
| null | https://raw.githubusercontent.com/godfat/sandbox/eb6294238f92543339adfdfb4ba88586ba0e82b8/haskell/projecteuler.net/002.hs | haskell |
Problem 2
19 October 2001
Each new term in the Fibonacci sequence is generated by adding the previous
two terms . By starting with 1 and 2 , the first 10 terms will be :
1 , 2 , 3 , 5 , 8 , 13 , 21 , 34 , 55 , 89 , ...
By considering the terms in the Fibonacci sequence whose values do not
exceed four million , find the sum of the even - valued terms .
Problem 2
19 October 2001
Each new term in the Fibonacci sequence is generated by adding the previous
two terms. By starting with 1 and 2, the first 10 terms will be:
1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
By considering the terms in the Fibonacci sequence whose values do not
exceed four million, find the sum of the even-valued terms.
-}
fib :: [Int]
fib = 1 : 2 : [ a + b | (a, b) <- zip fib (tail fib)]
main = putStrLn . show $
sum (filter even (takeWhile (<=4000000) fib))
|
|
59be3580cb37a3dddeb0e28f6c8e3de36d013aa49d2f8739567570d1e68be37c | cryptosense/pkcs11 | example_digest.ml | let mechanisms =
[ P11.Mechanism.CKM_MD5
; P11.Mechanism.CKM_SHA_1
; P11.Mechanism.CKM_SHA256
; P11.Mechanism.CKM_SHA384
; P11.Mechanism.CKM_SHA512 ]
let print_digest driver session plaintext mechanism =
let digest = P11_driver.digest driver session mechanism plaintext in
let (`Hex h) = Hex.of_string digest in
Printf.printf "Digest(%s, %S) = %S\n"
(P11.Mechanism.to_string mechanism)
plaintext h
let run ~dll ~slot_id ~pin ~plaintext =
Pkcs11_log.set_logging_function prerr_endline;
let driver = P11_driver.load_driver dll in
P11_driver.initialize driver;
let slot =
match P11_driver.get_slot driver slot_id with
| Ok s -> s
| Error e -> failwith e
in
let session =
P11_driver.open_session driver ~slot ~flags:P11.Flags._CKF_SERIAL_SESSION
in
P11_driver.login driver session P11.User_type.CKU_USER pin;
List.iter (print_digest driver session plaintext) mechanisms
let () =
match Sys.argv with
| [|_; dll; slot_string; pin; plaintext|] ->
let slot_id = P11.Slot.Index (int_of_string slot_string) in
run ~dll ~slot_id ~pin ~plaintext
| _ -> invalid_arg "Usage: digest <dll> <slot> <pin> <plaintext>"
| null | https://raw.githubusercontent.com/cryptosense/pkcs11/93c39c7a31c87f68f0beabf75ef90d85a782a983/test/examples/example_digest.ml | ocaml | let mechanisms =
[ P11.Mechanism.CKM_MD5
; P11.Mechanism.CKM_SHA_1
; P11.Mechanism.CKM_SHA256
; P11.Mechanism.CKM_SHA384
; P11.Mechanism.CKM_SHA512 ]
let print_digest driver session plaintext mechanism =
let digest = P11_driver.digest driver session mechanism plaintext in
let (`Hex h) = Hex.of_string digest in
Printf.printf "Digest(%s, %S) = %S\n"
(P11.Mechanism.to_string mechanism)
plaintext h
let run ~dll ~slot_id ~pin ~plaintext =
Pkcs11_log.set_logging_function prerr_endline;
let driver = P11_driver.load_driver dll in
P11_driver.initialize driver;
let slot =
match P11_driver.get_slot driver slot_id with
| Ok s -> s
| Error e -> failwith e
in
let session =
P11_driver.open_session driver ~slot ~flags:P11.Flags._CKF_SERIAL_SESSION
in
P11_driver.login driver session P11.User_type.CKU_USER pin;
List.iter (print_digest driver session plaintext) mechanisms
let () =
match Sys.argv with
| [|_; dll; slot_string; pin; plaintext|] ->
let slot_id = P11.Slot.Index (int_of_string slot_string) in
run ~dll ~slot_id ~pin ~plaintext
| _ -> invalid_arg "Usage: digest <dll> <slot> <pin> <plaintext>"
|
|
49f5926f271b625d6355518c57df286af98b155c01938c8710fc0e2aa1a0011e | xapix-io/matchete | poker_hand.cljc | (ns example.poker-hand
(:require [matchete.core :as m]))
;; == helpers ==
(defn card-comparator [card-a card-b]
(if (some m/pattern? [card-a card-b])
1
(compare card-a card-b)))
;; === rules ===
(def rules
{'%plus (fn [s m]
(fn [matches _ data]
(cond
(and (contains? matches s)
(= data (+ m (get matches s))))
(list matches)
(and (not (contains? matches s))
(> data m))
(list (assoc matches s (- data m))))))
'$high-card (fn [{[_ rank' :as card'] :card} _ [_ rank :as card]]
(list {:card (cond
(nil? card')
card
(> rank rank')
card
:else
card')}))})
;; =============
(defn poker-hand
{:test #(do
(assert
(= (poker-hand #{[:♠ 5] [:♠ 6] [:♠ 7] [:♠ 8] [:♠ 9]})
"Straight flush"))
(assert
(= (poker-hand #{[:♠ 5] [:♦ 5] [:♠ 7] [:♣ 5] [:♥ 5]})
"Four of a kind"))
(assert
(= (poker-hand #{[:♠ 5] [:♦ 5] [:♠ 7] [:♣ 5] [:♥ 7]})
"Full house"))
(assert
(= (poker-hand #{[:♠ 5] [:♠ 6] [:♠ 7] [:♠ 13] [:♠ 9]})
"Flush"))
(assert
(= (poker-hand #{[:♠ 5] [:♣ 6] [:♠ 7] [:♠ 8] [:♠ 9]})
"Straight"))
(assert
(= (poker-hand #{[:♠ 5] [:♦ 5] [:♠ 7] [:♣ 5] [:♥ 8]})
"Three of a kind"))
(assert
(= (poker-hand #{[:♠ 5] [:♦ 10] [:♠ 7] [:♣ 5] [:♥ 10]})
"Two pair"))
(assert
(= (poker-hand #{[:♠ 5] [:♦ 10] [:♠ 7] [:♣ 5] [:♥ 8]})
"One pair"))
(assert
(= (poker-hand #{[:♠ 5] [:♦ 11] [:♠ 6] [:♠ 7] [:♠ 8]})
[:♦ 11])))}
[hand]
(letfn [(match? [pattern hand]
(m/match? pattern rules hand))]
(condp match? hand
'#{[?s 14] [?s 13] [?s 12] [?s 11] [?s 10]}
"Royal flush"
'#{[?s ?n] [?s (%plus ?n 1)] [?s (%plus ?n 2)] [?s (%plus ?n 3)] [?s (%plus ?n 4)]}
"Straight flush"
(sorted-set-by card-comparator '[_ ?n] '[_ ?n] '[_ ?n] '[_ ?n] '_)
"Four of a kind"
(sorted-set-by card-comparator '[_ ?m] '[_ ?m] '[_ ?m] '[_ ?n] '[_ ?n])
"Full house"
(sorted-set-by card-comparator '[?s _] '[?s _] '[?s _] '[?s _] '[?s _])
"Flush"
'#{[_ ?n] [_ (%plus ?n 1)] [_ (%plus ?n 2)] [_ (%plus ?n 3)] [_ (%plus ?n 4)]}
"Straight"
(sorted-set-by card-comparator '[_ ?n] '[_ ?n] '[_ ?n] '_ '_)
"Three of a kind"
(sorted-set-by card-comparator '[_ ?n] '[_ ?n] '[_ ?m] '[_ ?m] '_)
"Two pair"
(sorted-set-by card-comparator '[_ ?n] '[_ ?n] '_ '_ '_)
"One pair"
(-> (m/matches (sorted-set-by card-comparator '$high-card '$high-card '$high-card '$high-card '$high-card) rules hand)
first
:card))))
| null | https://raw.githubusercontent.com/xapix-io/matchete/ed1dee189e123272c72ece6615f31ec14ddf87e6/dev/example/poker_hand.cljc | clojure | == helpers ==
=== rules ===
============= | (ns example.poker-hand
(:require [matchete.core :as m]))
(defn card-comparator [card-a card-b]
(if (some m/pattern? [card-a card-b])
1
(compare card-a card-b)))
(def rules
{'%plus (fn [s m]
(fn [matches _ data]
(cond
(and (contains? matches s)
(= data (+ m (get matches s))))
(list matches)
(and (not (contains? matches s))
(> data m))
(list (assoc matches s (- data m))))))
'$high-card (fn [{[_ rank' :as card'] :card} _ [_ rank :as card]]
(list {:card (cond
(nil? card')
card
(> rank rank')
card
:else
card')}))})
(defn poker-hand
{:test #(do
(assert
(= (poker-hand #{[:♠ 5] [:♠ 6] [:♠ 7] [:♠ 8] [:♠ 9]})
"Straight flush"))
(assert
(= (poker-hand #{[:♠ 5] [:♦ 5] [:♠ 7] [:♣ 5] [:♥ 5]})
"Four of a kind"))
(assert
(= (poker-hand #{[:♠ 5] [:♦ 5] [:♠ 7] [:♣ 5] [:♥ 7]})
"Full house"))
(assert
(= (poker-hand #{[:♠ 5] [:♠ 6] [:♠ 7] [:♠ 13] [:♠ 9]})
"Flush"))
(assert
(= (poker-hand #{[:♠ 5] [:♣ 6] [:♠ 7] [:♠ 8] [:♠ 9]})
"Straight"))
(assert
(= (poker-hand #{[:♠ 5] [:♦ 5] [:♠ 7] [:♣ 5] [:♥ 8]})
"Three of a kind"))
(assert
(= (poker-hand #{[:♠ 5] [:♦ 10] [:♠ 7] [:♣ 5] [:♥ 10]})
"Two pair"))
(assert
(= (poker-hand #{[:♠ 5] [:♦ 10] [:♠ 7] [:♣ 5] [:♥ 8]})
"One pair"))
(assert
(= (poker-hand #{[:♠ 5] [:♦ 11] [:♠ 6] [:♠ 7] [:♠ 8]})
[:♦ 11])))}
[hand]
(letfn [(match? [pattern hand]
(m/match? pattern rules hand))]
(condp match? hand
'#{[?s 14] [?s 13] [?s 12] [?s 11] [?s 10]}
"Royal flush"
'#{[?s ?n] [?s (%plus ?n 1)] [?s (%plus ?n 2)] [?s (%plus ?n 3)] [?s (%plus ?n 4)]}
"Straight flush"
(sorted-set-by card-comparator '[_ ?n] '[_ ?n] '[_ ?n] '[_ ?n] '_)
"Four of a kind"
(sorted-set-by card-comparator '[_ ?m] '[_ ?m] '[_ ?m] '[_ ?n] '[_ ?n])
"Full house"
(sorted-set-by card-comparator '[?s _] '[?s _] '[?s _] '[?s _] '[?s _])
"Flush"
'#{[_ ?n] [_ (%plus ?n 1)] [_ (%plus ?n 2)] [_ (%plus ?n 3)] [_ (%plus ?n 4)]}
"Straight"
(sorted-set-by card-comparator '[_ ?n] '[_ ?n] '[_ ?n] '_ '_)
"Three of a kind"
(sorted-set-by card-comparator '[_ ?n] '[_ ?n] '[_ ?m] '[_ ?m] '_)
"Two pair"
(sorted-set-by card-comparator '[_ ?n] '[_ ?n] '_ '_ '_)
"One pair"
(-> (m/matches (sorted-set-by card-comparator '$high-card '$high-card '$high-card '$high-card '$high-card) rules hand)
first
:card))))
|
63a45d004b905b44de34da1684ad95eea3da613280dc7d2aa6ebc8d358ade0ed | tip-org/tools | Passes.hs | -- | Passes
module Tip.Passes
(
-- * Running passes in the Fresh monad
freshPass
-- * Simplifications
, simplifyTheory, gently, aggressively, SimplifyOpts(..)
, removeNewtype
, uncurryTheory
-- * Simplifying conjectures
, module Tip.Pass.Conjecture
, module Tip.Pass.Concretise
-- * Changing status of conjectures
, makeConjecture
, selectConjecture
, provedConjecture
, deleteConjecture
-- * Boolean builtins
, ifToBoolOp
, boolOpToIf
, theoryBoolOpToIf
, removeBuiltinBool
, boolOpLift
-- * Match expressions
, addMatch
, commuteMatch
, removeMatch
, cseMatch
, cseMatchNormal
, cseMatchWhy3
, makeMatchExhaustive
-- * Duplicated functions
, collapseEqual
, removeAliases
-- * Lambda and let lifting
, lambdaLift
, letLift
, eliminateLetRec
, axiomatizeLambdas
-- * Function definitions
, axiomatizeFuncdefs
, axiomatizeFuncdefs2
-- * Data types
, axiomatizeDatadecls
* Monomorphisation
, monomorphise
-- * Induction
, induction
, recursionInduction
-- * Miscellaneous
, uniqLocals
, dropSuffix
, dropAttributes
, dropAttribute
, splitFormulas
-- * Building pass pipelines
, StandardPass(..)
, module Tip.Pass.Pipeline
) where
import Tip.Simplify
import Tip.Pass.AddMatch
import Tip.Pass.CommuteMatch
import Tip.Pass.RemoveMatch
import Tip.Pass.CSEMatch
import Tip.Pass.Uncurry
import Tip.Pass.RemoveNewtype
import Tip.Pass.Conjecture
import Tip.Pass.Concretise
import Tip.Pass.EqualFunctions
import Tip.Pass.Lift
import Tip.Pass.Monomorphise
import Tip.Pass.Booleans
import Tip.Pass.EliminateDeadCode
import Tip.Pass.MakeMatchExhaustive
import Tip.Pass.AxiomatizeFuncdefs
import Tip.Pass.AxiomatizeDatadecls
import Tip.Pass.SelectConjecture
import Tip.Pass.DropSuffix
import Tip.Pass.UniqLocals
import Tip.Pass.Induction
import Tip.Pass.DropAttributes
import Tip.Pass.SplitFormulas
import Tip.Fresh
import Tip.Pass.Pipeline
import Options.Applicative
import Data.Monoid ((<>))
-- | The passes in the standard Tip distribution
data StandardPass
= SimplifyGently
| SimplifyAggressively
| RemoveNewtype
| UncurryTheory
| NegateConjecture
| TypeSkolemConjecture
| IntToNat
| IntAxioms
| SortsToNat
| SplitConjecture
| SkolemiseConjecture
| IfToBoolOp
| BoolOpToIf
| RemoveBuiltinBool
| BoolOpLift
| AddMatch
| CommuteMatch
| RemoveMatch
| MakeMatchExhaustive
| CollapseEqual
| RemoveAliases
| LambdaLift
| LetLift
| AxiomatizeLambdas
| AxiomatizeFuncdefs
| AxiomatizeFuncdefs2
| AxiomatizeDatadecls
| AxiomatizeDatadeclsUEQ
| Monomorphise Bool Int
| CSEMatch
| CSEMatchWhy3
| EliminateDeadCode
| MakeConjecture Int
| SelectConjecture Int
| ProvedConjecture Int
| DeleteConjecture Int
| DropSuffix String
| UniqLocals
| DropAttributes
| DropAttribute String
| Induction [Int]
| RecursionInduction Int [Int]
| SplitFormulas
deriving (Eq,Ord,Show,Read)
instance Pass StandardPass where
passName = show
runPass p = case p of
SimplifyGently -> single $ simplifyTheory gently
SimplifyAggressively -> single $ simplifyTheory aggressively
RemoveNewtype -> single $ return . removeNewtype
UncurryTheory -> single $ uncurryTheory
NegateConjecture -> (return . splitConjecture) `followedBy` single negateConjecture
TypeSkolemConjecture -> single $ typeSkolemConjecture ModeConjecture
IntToNat -> single $ intToNat
IntAxioms -> single $ intAxioms
SortsToNat -> single $ sortsToNat
SplitConjecture -> return . splitConjecture
SkolemiseConjecture -> skolemiseConjecture
IfToBoolOp -> single $ return . ifToBoolOp
BoolOpToIf -> single $ return . theoryBoolOpToIf
RemoveBuiltinBool -> runPass BoolOpToIf `followedBy` single removeBuiltinBool
BoolOpLift -> single $ boolOpLift
AddMatch -> single $ addMatch
CommuteMatch -> single $ commuteMatchTheory
RemoveMatch -> single $ removeMatch
MakeMatchExhaustive -> single $ makeMatchExhaustive
CollapseEqual -> single $ return . removeAliases . collapseEqual
RemoveAliases -> single $ return . removeAliases
LambdaLift -> single $ lambdaLift
LetLift -> single $ letLift
AxiomatizeLambdas -> single lambdaLift `followedBy` single axiomatizeLambdas
AxiomatizeFuncdefs -> single (return . axiomatizeFuncdefs)
AxiomatizeFuncdefs2 -> single (return . axiomatizeFuncdefs2)
AxiomatizeDatadecls -> runPass RemoveMatch `followedBy` single (axiomatizeDatadecls False)
AxiomatizeDatadeclsUEQ -> runPass RemoveMatch `followedBy` single (axiomatizeDatadecls True)
Monomorphise b n -> single (typeSkolemConjecture ModeMonomorphise) `followedBy` single (monomorphise b n)
CSEMatch -> single $ return . cseMatch cseMatchNormal
CSEMatchWhy3 -> single $ return . cseMatch cseMatchWhy3
EliminateDeadCode -> single $ return . eliminateDeadCode
MakeConjecture n -> single $ return . makeConjecture n
SelectConjecture n -> single $ return . selectConjecture n
ProvedConjecture n -> single $ return . provedConjecture n
DeleteConjecture n -> single $ return . deleteConjecture n
DropSuffix cs -> single $ dropSuffix cs
UniqLocals -> single $ uniqLocals
DropAttributes -> single $ return . dropAttributes
DropAttribute attr -> single $ return . dropAttribute attr
Induction coords -> induction coords
RecursionInduction fn xsns -> recursionInduction fn xsns
SplitFormulas -> single $ return . splitFormulas
where
single m thy = do x <- m thy; return [x]
f `followedBy` g = \thy -> do
thys <- f thy
fmap concat (mapM g thys)
parsePass =
foldr (<|>) empty [
unitPass SimplifyGently $
help "Simplify the problem, trying not to increase its size",
unitPass SimplifyAggressively $
help "Simplify the problem even at the cost of making it bigger",
unitPass RemoveNewtype $
help "Eliminate single-constructor, single-argument datatypes",
unitPass UncurryTheory $
help "Eliminate unnecessary use of higher-order functions",
unitPass NegateConjecture $
help "Transform the goal into a negated conjecture",
unitPass TypeSkolemConjecture $
help "Skolemise the types in the conjecture",
unitPass IntToNat $
help "Replace builtin Integer with a a unary nat datatype nat (if only ordering is used)",
unitPass IntAxioms $
help "Add axioms for integers",
unitPass SortsToNat $
help "Replace abstract sorts with a unary nat datatype.",
unitPass SplitConjecture $
help "Puts goals in separate theories",
unitPass SkolemiseConjecture $
help "Skolemise the conjecture",
unitPass IfToBoolOp $
help "Replace if-then-else by and/or where appropriate",
unitPass BoolOpToIf $
help "Replace and/or by if-then-else",
unitPass RemoveBuiltinBool $
help "Replace the builtin bool with a datatype",
unitPass BoolOpLift $
help "Lift boolean operators to the top level",
unitPass AddMatch $
help "Transform SMTLIB-style datatype access into pattern matching",
unitPass CommuteMatch $
help "Eliminate matches that occur in weird positions (e.g. as arguments to function calls)",
unitPass RemoveMatch $
help "Replace pattern matching with SMTLIB-style datatype access",
unitPass MakeMatchExhaustive $
help "Fill in any missing cases by returning an unspecified constant",
unitPass CollapseEqual $
help "Merge functions with equal definitions",
unitPass RemoveAliases $
help "Eliminate any function defined simply as f(x) = g(x)",
unitPass LambdaLift $
help "Lift lambdas to the top level",
unitPass LetLift $
help "Lift let-expressions to the top level",
unitPass AxiomatizeLambdas $
help "Eliminate lambdas by axiomatisation",
unitPass AxiomatizeFuncdefs $
help "Transform function definitions to axioms in the most straightforward way",
unitPass AxiomatizeFuncdefs2 $
help "Transform function definitions to axioms with left hand side pattern matching instead of match",
unitPass AxiomatizeDatadecls $
help "Transform data declarations to axioms",
unitPass AxiomatizeDatadeclsUEQ $
help "Transform data declarations to unit equality axioms (incomplete)",
unitPass SplitFormulas $
help "Split formulas into simpler parts",
flag' () (long "monomorphise" <> help "Monomorphise the problem.") *> pure (Monomorphise False 1),
fmap (Monomorphise False) $
option auto $
long "monomorphise-with-rounds" <>
metavar "NUMBER-OF-ROUNDS" <>
help "Monomorphise the problem. When more rounds are run, more instances are generated.",
unitPass CSEMatch $
help "Perform CSE on match scrutinees",
unitPass CSEMatchWhy3 $
help "Aggressively perform CSE on match scrutinees (helps Why3's termination checker)",
unitPass EliminateDeadCode $
help "Dead code elimination (doesn't work on dead recursive functions)",
fmap MakeConjecture $
option auto $
long "make-conjecture" <>
metavar "CONJECTURE-NUMBER" <>
help "Make an assert into an assert-not",
fmap SelectConjecture $
option auto $
long "select-conjecture" <>
metavar "CONJECTURE-NUMBER" <>
help "Choose a particular conjecture from the problem",
fmap ProvedConjecture $
option auto $
long "proved-conjecture" <>
metavar "CONJECTURE-NUMBER" <>
help "Mark a particular conjecture as proved",
fmap DeleteConjecture $
option auto $
long "delete-conjecture" <>
metavar "CONJECTURE-NUMBER" <>
help "Delete a particular conjecture",
fmap DropSuffix $
option str $
long "drop-suffix" <>
metavar "SUFFIX-CHARS" <>
help "Drop the suffix delimited by some character set",
unitPass UniqLocals $
help "Make all local variables unique",
unitPass DropAttributes $
help "Remove all attributes (e.g. :keep) from declarations",
fmap DropAttribute $
option str $
long "drop-attribute" <>
metavar "NAME" <>
help "Remove the given attribute from declarations",
fmap Induction $
option auto $
long "induction" <>
metavar "VAR-COORD" <>
help "Perform induction on the variable coordinates",
fmap (uncurry RecursionInduction) $
option auto $
long "ri" <>
metavar "COORDS" <>
help "Perform recursion induction"
]
| null | https://raw.githubusercontent.com/tip-org/tools/34350072587bd29157d18331eb895a1b2819555f/tip-lib/src/Tip/Passes.hs | haskell | | Passes
* Running passes in the Fresh monad
* Simplifications
* Simplifying conjectures
* Changing status of conjectures
* Boolean builtins
* Match expressions
* Duplicated functions
* Lambda and let lifting
* Function definitions
* Data types
* Induction
* Miscellaneous
* Building pass pipelines
| The passes in the standard Tip distribution | module Tip.Passes
(
freshPass
, simplifyTheory, gently, aggressively, SimplifyOpts(..)
, removeNewtype
, uncurryTheory
, module Tip.Pass.Conjecture
, module Tip.Pass.Concretise
, makeConjecture
, selectConjecture
, provedConjecture
, deleteConjecture
, ifToBoolOp
, boolOpToIf
, theoryBoolOpToIf
, removeBuiltinBool
, boolOpLift
, addMatch
, commuteMatch
, removeMatch
, cseMatch
, cseMatchNormal
, cseMatchWhy3
, makeMatchExhaustive
, collapseEqual
, removeAliases
, lambdaLift
, letLift
, eliminateLetRec
, axiomatizeLambdas
, axiomatizeFuncdefs
, axiomatizeFuncdefs2
, axiomatizeDatadecls
* Monomorphisation
, monomorphise
, induction
, recursionInduction
, uniqLocals
, dropSuffix
, dropAttributes
, dropAttribute
, splitFormulas
, StandardPass(..)
, module Tip.Pass.Pipeline
) where
import Tip.Simplify
import Tip.Pass.AddMatch
import Tip.Pass.CommuteMatch
import Tip.Pass.RemoveMatch
import Tip.Pass.CSEMatch
import Tip.Pass.Uncurry
import Tip.Pass.RemoveNewtype
import Tip.Pass.Conjecture
import Tip.Pass.Concretise
import Tip.Pass.EqualFunctions
import Tip.Pass.Lift
import Tip.Pass.Monomorphise
import Tip.Pass.Booleans
import Tip.Pass.EliminateDeadCode
import Tip.Pass.MakeMatchExhaustive
import Tip.Pass.AxiomatizeFuncdefs
import Tip.Pass.AxiomatizeDatadecls
import Tip.Pass.SelectConjecture
import Tip.Pass.DropSuffix
import Tip.Pass.UniqLocals
import Tip.Pass.Induction
import Tip.Pass.DropAttributes
import Tip.Pass.SplitFormulas
import Tip.Fresh
import Tip.Pass.Pipeline
import Options.Applicative
import Data.Monoid ((<>))
data StandardPass
= SimplifyGently
| SimplifyAggressively
| RemoveNewtype
| UncurryTheory
| NegateConjecture
| TypeSkolemConjecture
| IntToNat
| IntAxioms
| SortsToNat
| SplitConjecture
| SkolemiseConjecture
| IfToBoolOp
| BoolOpToIf
| RemoveBuiltinBool
| BoolOpLift
| AddMatch
| CommuteMatch
| RemoveMatch
| MakeMatchExhaustive
| CollapseEqual
| RemoveAliases
| LambdaLift
| LetLift
| AxiomatizeLambdas
| AxiomatizeFuncdefs
| AxiomatizeFuncdefs2
| AxiomatizeDatadecls
| AxiomatizeDatadeclsUEQ
| Monomorphise Bool Int
| CSEMatch
| CSEMatchWhy3
| EliminateDeadCode
| MakeConjecture Int
| SelectConjecture Int
| ProvedConjecture Int
| DeleteConjecture Int
| DropSuffix String
| UniqLocals
| DropAttributes
| DropAttribute String
| Induction [Int]
| RecursionInduction Int [Int]
| SplitFormulas
deriving (Eq,Ord,Show,Read)
instance Pass StandardPass where
passName = show
runPass p = case p of
SimplifyGently -> single $ simplifyTheory gently
SimplifyAggressively -> single $ simplifyTheory aggressively
RemoveNewtype -> single $ return . removeNewtype
UncurryTheory -> single $ uncurryTheory
NegateConjecture -> (return . splitConjecture) `followedBy` single negateConjecture
TypeSkolemConjecture -> single $ typeSkolemConjecture ModeConjecture
IntToNat -> single $ intToNat
IntAxioms -> single $ intAxioms
SortsToNat -> single $ sortsToNat
SplitConjecture -> return . splitConjecture
SkolemiseConjecture -> skolemiseConjecture
IfToBoolOp -> single $ return . ifToBoolOp
BoolOpToIf -> single $ return . theoryBoolOpToIf
RemoveBuiltinBool -> runPass BoolOpToIf `followedBy` single removeBuiltinBool
BoolOpLift -> single $ boolOpLift
AddMatch -> single $ addMatch
CommuteMatch -> single $ commuteMatchTheory
RemoveMatch -> single $ removeMatch
MakeMatchExhaustive -> single $ makeMatchExhaustive
CollapseEqual -> single $ return . removeAliases . collapseEqual
RemoveAliases -> single $ return . removeAliases
LambdaLift -> single $ lambdaLift
LetLift -> single $ letLift
AxiomatizeLambdas -> single lambdaLift `followedBy` single axiomatizeLambdas
AxiomatizeFuncdefs -> single (return . axiomatizeFuncdefs)
AxiomatizeFuncdefs2 -> single (return . axiomatizeFuncdefs2)
AxiomatizeDatadecls -> runPass RemoveMatch `followedBy` single (axiomatizeDatadecls False)
AxiomatizeDatadeclsUEQ -> runPass RemoveMatch `followedBy` single (axiomatizeDatadecls True)
Monomorphise b n -> single (typeSkolemConjecture ModeMonomorphise) `followedBy` single (monomorphise b n)
CSEMatch -> single $ return . cseMatch cseMatchNormal
CSEMatchWhy3 -> single $ return . cseMatch cseMatchWhy3
EliminateDeadCode -> single $ return . eliminateDeadCode
MakeConjecture n -> single $ return . makeConjecture n
SelectConjecture n -> single $ return . selectConjecture n
ProvedConjecture n -> single $ return . provedConjecture n
DeleteConjecture n -> single $ return . deleteConjecture n
DropSuffix cs -> single $ dropSuffix cs
UniqLocals -> single $ uniqLocals
DropAttributes -> single $ return . dropAttributes
DropAttribute attr -> single $ return . dropAttribute attr
Induction coords -> induction coords
RecursionInduction fn xsns -> recursionInduction fn xsns
SplitFormulas -> single $ return . splitFormulas
where
single m thy = do x <- m thy; return [x]
f `followedBy` g = \thy -> do
thys <- f thy
fmap concat (mapM g thys)
parsePass =
foldr (<|>) empty [
unitPass SimplifyGently $
help "Simplify the problem, trying not to increase its size",
unitPass SimplifyAggressively $
help "Simplify the problem even at the cost of making it bigger",
unitPass RemoveNewtype $
help "Eliminate single-constructor, single-argument datatypes",
unitPass UncurryTheory $
help "Eliminate unnecessary use of higher-order functions",
unitPass NegateConjecture $
help "Transform the goal into a negated conjecture",
unitPass TypeSkolemConjecture $
help "Skolemise the types in the conjecture",
unitPass IntToNat $
help "Replace builtin Integer with a a unary nat datatype nat (if only ordering is used)",
unitPass IntAxioms $
help "Add axioms for integers",
unitPass SortsToNat $
help "Replace abstract sorts with a unary nat datatype.",
unitPass SplitConjecture $
help "Puts goals in separate theories",
unitPass SkolemiseConjecture $
help "Skolemise the conjecture",
unitPass IfToBoolOp $
help "Replace if-then-else by and/or where appropriate",
unitPass BoolOpToIf $
help "Replace and/or by if-then-else",
unitPass RemoveBuiltinBool $
help "Replace the builtin bool with a datatype",
unitPass BoolOpLift $
help "Lift boolean operators to the top level",
unitPass AddMatch $
help "Transform SMTLIB-style datatype access into pattern matching",
unitPass CommuteMatch $
help "Eliminate matches that occur in weird positions (e.g. as arguments to function calls)",
unitPass RemoveMatch $
help "Replace pattern matching with SMTLIB-style datatype access",
unitPass MakeMatchExhaustive $
help "Fill in any missing cases by returning an unspecified constant",
unitPass CollapseEqual $
help "Merge functions with equal definitions",
unitPass RemoveAliases $
help "Eliminate any function defined simply as f(x) = g(x)",
unitPass LambdaLift $
help "Lift lambdas to the top level",
unitPass LetLift $
help "Lift let-expressions to the top level",
unitPass AxiomatizeLambdas $
help "Eliminate lambdas by axiomatisation",
unitPass AxiomatizeFuncdefs $
help "Transform function definitions to axioms in the most straightforward way",
unitPass AxiomatizeFuncdefs2 $
help "Transform function definitions to axioms with left hand side pattern matching instead of match",
unitPass AxiomatizeDatadecls $
help "Transform data declarations to axioms",
unitPass AxiomatizeDatadeclsUEQ $
help "Transform data declarations to unit equality axioms (incomplete)",
unitPass SplitFormulas $
help "Split formulas into simpler parts",
flag' () (long "monomorphise" <> help "Monomorphise the problem.") *> pure (Monomorphise False 1),
fmap (Monomorphise False) $
option auto $
long "monomorphise-with-rounds" <>
metavar "NUMBER-OF-ROUNDS" <>
help "Monomorphise the problem. When more rounds are run, more instances are generated.",
unitPass CSEMatch $
help "Perform CSE on match scrutinees",
unitPass CSEMatchWhy3 $
help "Aggressively perform CSE on match scrutinees (helps Why3's termination checker)",
unitPass EliminateDeadCode $
help "Dead code elimination (doesn't work on dead recursive functions)",
fmap MakeConjecture $
option auto $
long "make-conjecture" <>
metavar "CONJECTURE-NUMBER" <>
help "Make an assert into an assert-not",
fmap SelectConjecture $
option auto $
long "select-conjecture" <>
metavar "CONJECTURE-NUMBER" <>
help "Choose a particular conjecture from the problem",
fmap ProvedConjecture $
option auto $
long "proved-conjecture" <>
metavar "CONJECTURE-NUMBER" <>
help "Mark a particular conjecture as proved",
fmap DeleteConjecture $
option auto $
long "delete-conjecture" <>
metavar "CONJECTURE-NUMBER" <>
help "Delete a particular conjecture",
fmap DropSuffix $
option str $
long "drop-suffix" <>
metavar "SUFFIX-CHARS" <>
help "Drop the suffix delimited by some character set",
unitPass UniqLocals $
help "Make all local variables unique",
unitPass DropAttributes $
help "Remove all attributes (e.g. :keep) from declarations",
fmap DropAttribute $
option str $
long "drop-attribute" <>
metavar "NAME" <>
help "Remove the given attribute from declarations",
fmap Induction $
option auto $
long "induction" <>
metavar "VAR-COORD" <>
help "Perform induction on the variable coordinates",
fmap (uncurry RecursionInduction) $
option auto $
long "ri" <>
metavar "COORDS" <>
help "Perform recursion induction"
]
|
e975865c29c28b5f950bc9c2e0d2b3e30b3fbb42215095108fb614015e12b4eb | slyrus/clem | metaclasses.lisp |
(in-package :clem)
Taken from KMR 's clsql package
(defun remove-keyword-arg (arglist akey)
(let ((mylist arglist)
(newlist ()))
(labels ((pop-arg (alist)
(let ((arg (pop alist))
(val (pop alist)))
(unless (equal arg akey)
(setf newlist (append (list arg val) newlist)))
(when alist (pop-arg alist)))))
(pop-arg mylist))
newlist))
Also taken from KMR 's clsql package
(declaim (inline delistify-dsd))
(defun delistify-dsd (list)
"Some MOPs, like openmcl 0.14.2, cons attribute values in a list."
(if (and (listp list) (null (cdr list)))
(car list)
list))
;;; Taken from util so I don't need to include it here
(defun insert-before (new old list)
(labels ((build-list (old c &optional newlist)
(if c
(if (eq old (car c))
(append (reverse (cdr c)) (cons (car c) (cons new newlist)))
(build-list old (cdr c) (cons (car c) newlist)))
(cons new newlist))))
(reverse (build-list old list))))
(defun fill-slot-from-ancestor (slot class)
(let ((ancestor (find-if #'(lambda (anc)
(when (slot-exists-p anc slot)
(slot-boundp anc slot)))
(cdr (compute-class-precedence-list class)))))
(when ancestor
(setf (slot-value class slot) (slot-value ancestor slot)))))
(defun fill-standard-matrix-class-slots-from-ancestors (class &rest all-keys)
(mapcar #'(lambda (x)
(let ((name (slot-definition-name x))
(initargs (slot-definition-initargs x)))
(unless (getf (car all-keys) (car initargs))
(fill-slot-from-ancestor name class))))
(standard-matrix-class-slots class)))
;;; NOTE: don't use accessors here as they will return a list!
at least on SBCL
(defclass standard-matrix-class (standard-class)
((element-type :initarg :element-type)
(accumulator-type :initarg :accumulator-type)
(specialized-array :initarg :specialized-array :initform nil)
(val-format :initarg :val-format :initform nil)
(minval :initarg :minval)
(maxval :initarg :maxval)))
(let ((smc (find-class 'standard-matrix-class)))
(defun standard-matrix-class-p (class)
(subtypep (class-of class) smc)))
(defun standard-matrix-class-precedence-list (class)
(remove-if-not
#'(lambda (x) (standard-matrix-class-p x))
(class-precedence-list class)))
(defun standard-matrix-class-slots (class)
(let ((slots) (slot-names))
(mapcar #'(lambda (x)
(mapcar #'(lambda (y)
(unless (member (slot-definition-name y)
slot-names)
(push y slots)
(push (slot-definition-name y)
slot-names)))
(class-direct-slots (class-of x))))
(standard-matrix-class-precedence-list class))
slots))
(defgeneric element-type (smc)
(:documentation "the type of the elements of instances
of this matrix class"))
(defmethod element-type ((smc standard-matrix-class))
(car (slot-value smc 'element-type)))
(defgeneric accumulator-type (smc)
(:documentation "the type of the result of various mathematical
opreations on instances of this matrix class. needs work."))
(defmethod accumulator-type ((smc standard-matrix-class))
(car (slot-value smc 'accumulator-type)))
;;; FIXME! This is a hack to get around the fact that
;;; if we have a say, integer-matrix class, we can't
;;; make certain declarations. this needs to be fixed
;;; and hopefully removed
(defgeneric specialized-array-p (smc))
(defmethod specialized-array-p ((smc standard-matrix-class))
(car (slot-value smc 'specialized-array)))
(defgeneric val-format (smc)
(:documentation "the format string used to print out
element values of instances of this matrix class"))
(defmethod val-format ((smc standard-matrix-class))
(car (slot-value smc 'val-format)))
FIXME this name is _ way _ too close to min - val . Should
;;; be something like min-allowed-value or some such.
;;; also should be enforced more places if we're going to
;;; really use this!
(defgeneric minval (smc)
(:documentation "the minimum value allowed by instances
of this matrix class."))
(defmethod minval ((smc standard-matrix-class))
(car (slot-value smc 'minval)))
FIXME this name is _ way _ too close to . Should
;;; be something like max-allowed-value or some such.
;;; also should be enforced more places if we're going to
;;; really use this!
(defgeneric maxval (smc)
(:documentation "the maximum value allowed by instances
of this matrix class."))
(defmethod maxval ((smc standard-matrix-class))
(car (slot-value smc 'maxval)))
;;;
;;; Need validate-superclass for some reason. Read AMOP and fix this note
;;;
(defmethod validate-superclass ((c1 standard-matrix-class) (c2 standard-class))
t)
(defmethod validate-superclass ((c1 standard-class) (c2 standard-matrix-class))
t)
(defun add-root-class (root-class direct-superclasses)
(if (member root-class direct-superclasses)
direct-superclasses
(insert-before root-class
(car (class-direct-superclasses root-class))
direct-superclasses)))
(defclass typed-mixin ()
((specialized-array :allocation :class :accessor specialized-array-p :initform nil)))
FIXME this needs work
(defgeneric set-val-fit (m i j v &key truncate))
(defmethod set-val-fit ((m typed-mixin) i j v &key (truncate nil))
(set-val m i j (if truncate (truncate v) v)))
(defgeneric map-matrix-fit (f a))
(defmethod map-matrix-fit (f (a typed-mixin))
(destructuring-bind (m n) (dim a)
(dotimes (i m)
(dotimes (j n)
(set-val-fit a i j (funcall f a i j)))))
a)
(defmethod initialize-instance :around
((class standard-matrix-class) &rest all-keys &key direct-superclasses &allow-other-keys)
(let ((root-class (find-class 'typed-mixin))
(mc (find-class 'standard-matrix-class)))
(if (and root-class (not (equal class root-class)))
(if (member-if #'(lambda (super)
(eq (class-of super) mc)) direct-superclasses)
(call-next-method)
(apply #'call-next-method class
:direct-superclasses
(add-root-class root-class direct-superclasses)
(remove-keyword-arg all-keys :direct-superclasses)))
(call-next-method))
#+lispworks (finalize-inheritance root-class))
(finalize-inheritance class)
(fill-standard-matrix-class-slots-from-ancestors class all-keys)
class)
(defmethod reinitialize-instance :around
((class standard-matrix-class) &rest all-keys &key direct-superclasses &allow-other-keys)
(let ((root-class (find-class 'typed-mixin))
(mc (find-class 'standard-matrix-class)))
(if (and root-class (not (equal class root-class)))
(if (member-if #'(lambda (super)
(eq (class-of super) mc)) direct-superclasses)
(call-next-method)
(apply #'call-next-method class
:direct-superclasses
(add-root-class root-class direct-superclasses)
(remove-keyword-arg all-keys :direct-superclasses)))
(call-next-method)))
(finalize-inheritance class)
(fill-standard-matrix-class-slots-from-ancestors class all-keys)
class)
| null | https://raw.githubusercontent.com/slyrus/clem/5eb055bb3f45840b24fd44825b975aa36bd6d97c/src/metaclasses.lisp | lisp | Taken from util so I don't need to include it here
NOTE: don't use accessors here as they will return a list!
FIXME! This is a hack to get around the fact that
if we have a say, integer-matrix class, we can't
make certain declarations. this needs to be fixed
and hopefully removed
be something like min-allowed-value or some such.
also should be enforced more places if we're going to
really use this!
be something like max-allowed-value or some such.
also should be enforced more places if we're going to
really use this!
Need validate-superclass for some reason. Read AMOP and fix this note
|
(in-package :clem)
Taken from KMR 's clsql package
(defun remove-keyword-arg (arglist akey)
(let ((mylist arglist)
(newlist ()))
(labels ((pop-arg (alist)
(let ((arg (pop alist))
(val (pop alist)))
(unless (equal arg akey)
(setf newlist (append (list arg val) newlist)))
(when alist (pop-arg alist)))))
(pop-arg mylist))
newlist))
Also taken from KMR 's clsql package
(declaim (inline delistify-dsd))
(defun delistify-dsd (list)
"Some MOPs, like openmcl 0.14.2, cons attribute values in a list."
(if (and (listp list) (null (cdr list)))
(car list)
list))
(defun insert-before (new old list)
(labels ((build-list (old c &optional newlist)
(if c
(if (eq old (car c))
(append (reverse (cdr c)) (cons (car c) (cons new newlist)))
(build-list old (cdr c) (cons (car c) newlist)))
(cons new newlist))))
(reverse (build-list old list))))
(defun fill-slot-from-ancestor (slot class)
(let ((ancestor (find-if #'(lambda (anc)
(when (slot-exists-p anc slot)
(slot-boundp anc slot)))
(cdr (compute-class-precedence-list class)))))
(when ancestor
(setf (slot-value class slot) (slot-value ancestor slot)))))
(defun fill-standard-matrix-class-slots-from-ancestors (class &rest all-keys)
(mapcar #'(lambda (x)
(let ((name (slot-definition-name x))
(initargs (slot-definition-initargs x)))
(unless (getf (car all-keys) (car initargs))
(fill-slot-from-ancestor name class))))
(standard-matrix-class-slots class)))
at least on SBCL
(defclass standard-matrix-class (standard-class)
((element-type :initarg :element-type)
(accumulator-type :initarg :accumulator-type)
(specialized-array :initarg :specialized-array :initform nil)
(val-format :initarg :val-format :initform nil)
(minval :initarg :minval)
(maxval :initarg :maxval)))
(let ((smc (find-class 'standard-matrix-class)))
(defun standard-matrix-class-p (class)
(subtypep (class-of class) smc)))
(defun standard-matrix-class-precedence-list (class)
(remove-if-not
#'(lambda (x) (standard-matrix-class-p x))
(class-precedence-list class)))
(defun standard-matrix-class-slots (class)
(let ((slots) (slot-names))
(mapcar #'(lambda (x)
(mapcar #'(lambda (y)
(unless (member (slot-definition-name y)
slot-names)
(push y slots)
(push (slot-definition-name y)
slot-names)))
(class-direct-slots (class-of x))))
(standard-matrix-class-precedence-list class))
slots))
(defgeneric element-type (smc)
(:documentation "the type of the elements of instances
of this matrix class"))
(defmethod element-type ((smc standard-matrix-class))
(car (slot-value smc 'element-type)))
(defgeneric accumulator-type (smc)
(:documentation "the type of the result of various mathematical
opreations on instances of this matrix class. needs work."))
(defmethod accumulator-type ((smc standard-matrix-class))
(car (slot-value smc 'accumulator-type)))
(defgeneric specialized-array-p (smc))
(defmethod specialized-array-p ((smc standard-matrix-class))
(car (slot-value smc 'specialized-array)))
(defgeneric val-format (smc)
(:documentation "the format string used to print out
element values of instances of this matrix class"))
(defmethod val-format ((smc standard-matrix-class))
(car (slot-value smc 'val-format)))
FIXME this name is _ way _ too close to min - val . Should
(defgeneric minval (smc)
(:documentation "the minimum value allowed by instances
of this matrix class."))
(defmethod minval ((smc standard-matrix-class))
(car (slot-value smc 'minval)))
FIXME this name is _ way _ too close to . Should
(defgeneric maxval (smc)
(:documentation "the maximum value allowed by instances
of this matrix class."))
(defmethod maxval ((smc standard-matrix-class))
(car (slot-value smc 'maxval)))
(defmethod validate-superclass ((c1 standard-matrix-class) (c2 standard-class))
t)
(defmethod validate-superclass ((c1 standard-class) (c2 standard-matrix-class))
t)
(defun add-root-class (root-class direct-superclasses)
(if (member root-class direct-superclasses)
direct-superclasses
(insert-before root-class
(car (class-direct-superclasses root-class))
direct-superclasses)))
(defclass typed-mixin ()
((specialized-array :allocation :class :accessor specialized-array-p :initform nil)))
FIXME this needs work
(defgeneric set-val-fit (m i j v &key truncate))
(defmethod set-val-fit ((m typed-mixin) i j v &key (truncate nil))
(set-val m i j (if truncate (truncate v) v)))
(defgeneric map-matrix-fit (f a))
(defmethod map-matrix-fit (f (a typed-mixin))
(destructuring-bind (m n) (dim a)
(dotimes (i m)
(dotimes (j n)
(set-val-fit a i j (funcall f a i j)))))
a)
(defmethod initialize-instance :around
((class standard-matrix-class) &rest all-keys &key direct-superclasses &allow-other-keys)
(let ((root-class (find-class 'typed-mixin))
(mc (find-class 'standard-matrix-class)))
(if (and root-class (not (equal class root-class)))
(if (member-if #'(lambda (super)
(eq (class-of super) mc)) direct-superclasses)
(call-next-method)
(apply #'call-next-method class
:direct-superclasses
(add-root-class root-class direct-superclasses)
(remove-keyword-arg all-keys :direct-superclasses)))
(call-next-method))
#+lispworks (finalize-inheritance root-class))
(finalize-inheritance class)
(fill-standard-matrix-class-slots-from-ancestors class all-keys)
class)
(defmethod reinitialize-instance :around
((class standard-matrix-class) &rest all-keys &key direct-superclasses &allow-other-keys)
(let ((root-class (find-class 'typed-mixin))
(mc (find-class 'standard-matrix-class)))
(if (and root-class (not (equal class root-class)))
(if (member-if #'(lambda (super)
(eq (class-of super) mc)) direct-superclasses)
(call-next-method)
(apply #'call-next-method class
:direct-superclasses
(add-root-class root-class direct-superclasses)
(remove-keyword-arg all-keys :direct-superclasses)))
(call-next-method)))
(finalize-inheritance class)
(fill-standard-matrix-class-slots-from-ancestors class all-keys)
class)
|
1f2892aeab5151f4da70e3a66ae2e19bde022c79a422ad80bc17d4a764e23196 | flipstone/haskell-for-beginners | 1_getting_our_feet_wet_with_maybe.hs | -- Define the applyMaybe function so we can apply
-- functions that produce Maybe values to Maybe
-- values
--
applyMaybe :: Maybe a -> (a -> Maybe b) -> Maybe b
applyMaybe = undefined
Define a function that divides an integer by 2
-- if it is even, but produces Nothing if given an
-- odd integer.
--
byTwo :: Integer -> Maybe Integer
byTwo = undefined
Try out your byTwo function on 7 and 8
--
sevenByTwo = undefined
eightByTwo = undefined
Using your byTwo function and applyMaybe , define a
function that divides multiples of 4 by 4 , producing
Nothing if the number is not a multiple of 4 .
--
byFour = undefined
Try out your byFour function on 7,8 , and 10
--
sevenByFour = undefined
eightByFour = undefined
tenByFour = undefined
| null | https://raw.githubusercontent.com/flipstone/haskell-for-beginners/e586a1f3ef08f21d5181171fe7a7b27057391f0b/problems/chapter_12/1_getting_our_feet_wet_with_maybe.hs | haskell | Define the applyMaybe function so we can apply
functions that produce Maybe values to Maybe
values
if it is even, but produces Nothing if given an
odd integer.
| applyMaybe :: Maybe a -> (a -> Maybe b) -> Maybe b
applyMaybe = undefined
Define a function that divides an integer by 2
byTwo :: Integer -> Maybe Integer
byTwo = undefined
Try out your byTwo function on 7 and 8
sevenByTwo = undefined
eightByTwo = undefined
Using your byTwo function and applyMaybe , define a
function that divides multiples of 4 by 4 , producing
Nothing if the number is not a multiple of 4 .
byFour = undefined
Try out your byFour function on 7,8 , and 10
sevenByFour = undefined
eightByFour = undefined
tenByFour = undefined
|
1f6fc79f27cadcf8dafcd7575ba1bab2073fddd662f78642dd1fceea0eccad13 | kuberlog/holon | tree-cybernetics.lisp | (ql:quickload :clack)
(in-package :holon)
(import :holon.daemons)
(defparameter *tree-cybernetics* (let (( tree-inc (make-instance 'Corporation)))
;; initialize tree cybernetics here
(setf (slot-value tree-inc 'share-holders) 'kuberlog)
(setf (slot-value tree-inc 'urls) '())
(setf (slot-value tree-inc 'urls) (holon.daemons:products))
(setf (slot-value tree-inc 'services) '((code explainations)))
(setf (slot-value tree-inc 'customers) '())
tree-inc))
(defun find-daemon (name)
(find name
(holon.daemons:products)
:test #'(lambda (name daemon)
(equal name (string-downcase (string (holon.daemons:name daemon)))))))
(defun app* (req)
(if (equal "/" (getf req :PATH-INFO))
`(200 (:content-type "text/plain")
(,(format nil "~a" (mapcar #'(lambda (daemon) (holon.daemons:name daemon)) (holon.daemons:products)))))
(let* ((daemon (find-daemon(remove #\/ (getf req :PATH-INFO)))))
(if daemon
`(200 (:content-type "text/plain") (,(holon.daemons:print-daemon daemon)))
'(200 (:content-type "text/plain") ("not found"))))))
(defun app (req)
(app* req))
(defparameter server (clack:clackup #'app :port 1111))
| null | https://raw.githubusercontent.com/kuberlog/holon/2c004239f82fce4aa6ee6512df6fc4dd391139f3/lisp/tree-cybernetics.lisp | lisp | initialize tree cybernetics here | (ql:quickload :clack)
(in-package :holon)
(import :holon.daemons)
(defparameter *tree-cybernetics* (let (( tree-inc (make-instance 'Corporation)))
(setf (slot-value tree-inc 'share-holders) 'kuberlog)
(setf (slot-value tree-inc 'urls) '())
(setf (slot-value tree-inc 'urls) (holon.daemons:products))
(setf (slot-value tree-inc 'services) '((code explainations)))
(setf (slot-value tree-inc 'customers) '())
tree-inc))
(defun find-daemon (name)
(find name
(holon.daemons:products)
:test #'(lambda (name daemon)
(equal name (string-downcase (string (holon.daemons:name daemon)))))))
(defun app* (req)
(if (equal "/" (getf req :PATH-INFO))
`(200 (:content-type "text/plain")
(,(format nil "~a" (mapcar #'(lambda (daemon) (holon.daemons:name daemon)) (holon.daemons:products)))))
(let* ((daemon (find-daemon(remove #\/ (getf req :PATH-INFO)))))
(if daemon
`(200 (:content-type "text/plain") (,(holon.daemons:print-daemon daemon)))
'(200 (:content-type "text/plain") ("not found"))))))
(defun app (req)
(app* req))
(defparameter server (clack:clackup #'app :port 1111))
|
4146265f1926c613f0e150d80a2d7184c692695a50c847c70b0e675dea9674d6 | chrisdone/ircbrowse | XML.hs | module Snap.App.XML where
import Text.XML.Light
import Snap.App
import qualified Data.Text as T
-- | Output the given XML element.
outputXML :: Element -> Controller c s ()
outputXML = writeText . T.pack . showElement
| null | https://raw.githubusercontent.com/chrisdone/ircbrowse/d956aaf185500792224b6b0c209eb67c179322a4/upstream/snap-app/src/Snap/App/XML.hs | haskell | | Output the given XML element. | module Snap.App.XML where
import Text.XML.Light
import Snap.App
import qualified Data.Text as T
outputXML :: Element -> Controller c s ()
outputXML = writeText . T.pack . showElement
|
f5eae9a30bf52546b722a6f806c9b5f825ed83e722a535320ece721dd62f2b16 | Shirakumo/speechless | package.lisp | (defpackage #:org.shirakumo.fraf.speechless.components
(:use #:cl)
(:shadow #:go #:speed #:setf #:eval #:map)
(:local-nicknames
(#:markless #:org.shirakumo.markless)
(#:components #:org.shirakumo.markless.components))
(:export
#:manual-newline
#:jump
#:placeholder
#:form
#:emote
#:conditional-part
#:choices
#:conditional
#:clauses
#:source
#:name
#:go
#:speed
#:camera-instruction
#:duration
#:shake
#:camera
#:arguments
#:move
#:setf
#:place
#:eval))
(defpackage #:org.shirakumo.fraf.speechless.syntax
(:use #:cl)
(:local-nicknames
(#:components #:org.shirakumo.fraf.speechless.components)
(#:mcomponents #:org.shirakumo.markless.components)
(#:markless #:org.shirakumo.markless))
(:export
#:*default-directives*
#:*default-instruction-types*
#:parser
#:jump
#:label
#:conditional
#:source
#:placeholder
#:emote
#:part-separator
#:conditional-part
#:manual-newline))
(defpackage #:org.shirakumo.fraf.speechless
(:use #:cl)
(:shadow #:compile #:eval #:disassemble)
(:local-nicknames
(#:components #:org.shirakumo.fraf.speechless.components)
(#:mcomponents #:org.shirakumo.markless.components)
(#:markless #:org.shirakumo.markless))
;; diff.lisp
(:export
#:diff-mismatch
#:location
#:message
#:differ
#:define-diff
#:diff
#:localisation-differ)
;; instructions.lisp
(:export
#:instruction
#:index
#:label
#:noop
#:source
#:name
#:jump
#:target
#:conditional
#:clauses
#:emote
#:pause
#:placeholder
#:choose
#:commit-choice
#:confirm
#:clear
#:begin-mark
#:end-mark
#:text
#:eval)
;; compiler.lisp
(:export
#:parse
#:compile
#:disassemble
#:wrap-lexenv
#:assembly
#:instructions
#:next-index
#:emit
#:walk
#:define-simple-walker
#:define-markup-walker
#:resolved-target)
;; optimizers.lisp
(:export
#:pass
#:run-pass
#:compile*
#:optimize-instructions
#:jump-resolution-pass
#:noop-elimination-pass)
;; printer.lisp
(:export
#:speechless
#:highlighted
#:highlight)
;; vm.lisp
(:export
#:request
#:input-request
#:target-request
#:target
#:text-request
#:text
#:markup
#:choice-request
#:choices
#:targets
#:confirm-request
#:clear-request
#:emote-request
#:emote
#:pause-request
#:duration
#:source-request
#:end-request
#:vm
#:instructions
#:text-buffer
#:choices
#:markup
#:execute
#:text
#:pop-text
#:run
#:reset
#:resume
#:suspend))
| null | https://raw.githubusercontent.com/Shirakumo/speechless/86359b6592e8ff0bb2d4baff0bdeeac0d46c2bcf/package.lisp | lisp | diff.lisp
instructions.lisp
compiler.lisp
optimizers.lisp
printer.lisp
vm.lisp | (defpackage #:org.shirakumo.fraf.speechless.components
(:use #:cl)
(:shadow #:go #:speed #:setf #:eval #:map)
(:local-nicknames
(#:markless #:org.shirakumo.markless)
(#:components #:org.shirakumo.markless.components))
(:export
#:manual-newline
#:jump
#:placeholder
#:form
#:emote
#:conditional-part
#:choices
#:conditional
#:clauses
#:source
#:name
#:go
#:speed
#:camera-instruction
#:duration
#:shake
#:camera
#:arguments
#:move
#:setf
#:place
#:eval))
(defpackage #:org.shirakumo.fraf.speechless.syntax
(:use #:cl)
(:local-nicknames
(#:components #:org.shirakumo.fraf.speechless.components)
(#:mcomponents #:org.shirakumo.markless.components)
(#:markless #:org.shirakumo.markless))
(:export
#:*default-directives*
#:*default-instruction-types*
#:parser
#:jump
#:label
#:conditional
#:source
#:placeholder
#:emote
#:part-separator
#:conditional-part
#:manual-newline))
(defpackage #:org.shirakumo.fraf.speechless
(:use #:cl)
(:shadow #:compile #:eval #:disassemble)
(:local-nicknames
(#:components #:org.shirakumo.fraf.speechless.components)
(#:mcomponents #:org.shirakumo.markless.components)
(#:markless #:org.shirakumo.markless))
(:export
#:diff-mismatch
#:location
#:message
#:differ
#:define-diff
#:diff
#:localisation-differ)
(:export
#:instruction
#:index
#:label
#:noop
#:source
#:name
#:jump
#:target
#:conditional
#:clauses
#:emote
#:pause
#:placeholder
#:choose
#:commit-choice
#:confirm
#:clear
#:begin-mark
#:end-mark
#:text
#:eval)
(:export
#:parse
#:compile
#:disassemble
#:wrap-lexenv
#:assembly
#:instructions
#:next-index
#:emit
#:walk
#:define-simple-walker
#:define-markup-walker
#:resolved-target)
(:export
#:pass
#:run-pass
#:compile*
#:optimize-instructions
#:jump-resolution-pass
#:noop-elimination-pass)
(:export
#:speechless
#:highlighted
#:highlight)
(:export
#:request
#:input-request
#:target-request
#:target
#:text-request
#:text
#:markup
#:choice-request
#:choices
#:targets
#:confirm-request
#:clear-request
#:emote-request
#:emote
#:pause-request
#:duration
#:source-request
#:end-request
#:vm
#:instructions
#:text-buffer
#:choices
#:markup
#:execute
#:text
#:pop-text
#:run
#:reset
#:resume
#:suspend))
|
1ae6ca87eca07da9d52affd9ce8aed2adc24e1432844996d28a35bd62e2259a8 | scicloj/clojisr-examples | violin.clj | (ns clojisr-examples.graph-gallery.violin
(:require [notespace.v2.note :as note
:refer [note note-void note-md note-as-md note-hiccup note-as-hiccup]]
[clojisr.v1.r :as r]))
(def target-path (notespace.v2.note/ns->out-dir *ns*))
(note-md "# [R Graph Gallery](-graph-gallery.com/) - [Violin](-graph-gallery.com/violin.html)")
(note-md "Code from [project](-graph-gallery.com/) by Yan Holtz")
(note-md "You can find here only translated code, please refer [original text](-graph-gallery.com/violin.html)")
(note-md "## Setup")
(note-void (require '[clojisr.v1.r :as r :refer [r+ r* r r->clj clj->r bra colon]]
'[clojisr.v1.require :refer [require-r]]
'[clojisr.v1.applications.plotting :refer [plot->file]]))
(note-void (require-r '[base :as base :refer [$ <-]]
'[utils :as u]
'[stats :as stats]
'[graphics :as g]
'[grDevices :as dev]
'[tidyverse]
'[knitr :as knitr]
'[dplyr :as dplyr]
'[tidyr :as tidyr]
'[ggplot2 :as gg]
'[viridis :as viridis]
'[forcats]
'[extrafont]
'[hrbrthemes :as th]
'[datasets :refer :all]))
(note-md "WARNING: To use `hrbrthemes` you may want to:
1. Install Arial Narrow or Roboto Condensed fonts.
2. Register system fonts with `extrafont::font_import()` or `(r.extrafont/font_import)`
3. Fix font database as described in [here](#issuecomment-299692978)
4. Call `hrbrthemes::import_roboto_condensed()` or `(th/import_roboto_condensed)`
5. Restart session")
(note-void (r.extrafont/loadfonts :quiet true))
(note-void (base/options :knitr.table.format "html"))
(note-void (base/set-seed 7337))
(note-md "## GGPlot2")
(note-md "### [Most basic violin chart](-graph-gallery.com/95-violin-plot-with-ggplot2.html)")
(note-void (def data-random (base/data-frame :name [(repeat 500 "A")
(repeat 500 "B")
(repeat 500 "B")
(repeat 20 "C")
(repeat 100 "D")]
:value [(stats/rnorm 500 10 5)
(stats/rnorm 500 13 1)
(stats/rnorm 500 18 1)
(stats/rnorm 20 25 4)
(stats/rnorm 100 12 1)])))
(note-void (plot->file (str target-path "a.png")
(r+ (gg/ggplot data-random (gg/aes :x 'name :y 'value :fill 'name))
(gg/geom_violin))))
(note-hiccup [:image {:src "a.png"}])
(note-void (def data-wide (bra iris nil (colon 1 4))))
(note-as-md (-> data-wide
(u/head)
(knitr/kable)
(r->clj)
(first)))
(note-void (plot->file (str target-path "b.png")
(let [data (-> data-wide
(tidyr/gather :key "MesureType" :value "Val"))]
(r+ (gg/ggplot data (gg/aes :x 'MesureType :y 'Val :fill 'MesureType))
(gg/geom_violin)))))
(note-hiccup [:image {:src "b.png"}])
(note-md "### [Control group order](-graph-gallery.com/267-reorder-a-variable-in-ggplot2.html)")
(note-void (def mpg (base/$<- gg/mpg 'class (base/with gg/mpg '(reorder class hwy median)))))
(note-void (plot->file (str target-path "c.png")
(r+ (gg/ggplot mpg (gg/aes :x 'class :y 'hwy :fill 'class))
(gg/geom_violin)
(gg/xlab "")
(gg/theme :legend.position "none"))))
(note-hiccup [:image {:src "c.png"}])
(note-md "### [Horizontal version](-graph-gallery.com/violin_horizontal_ggplot2.html)")
(note-void (def data (-> ""
(u/read-table :header true :sep ",")
(tidyr/gather :key "text" :value "value")
(dplyr/mutate :text '(gsub "\\\\." " " text))
(dplyr/mutate :value '(round (as.numeric value) 0))
(dplyr/filter '(%in% text ["Almost Certainly" "Very Good Chance" "We Believe" "Likely"
"About Even" "Little Chance" "Chances Are Slight" "Almost No Chance"]))
(dplyr/mutate :text '(fct_reorder text value)))))
(note-void (plot->file (str target-path "d.png")
(r+ (gg/ggplot data (gg/aes :x 'text :y 'value :color 'text :fill 'text))
(gg/geom_violin :width 2.1 :size 0.2)
(viridis/scale_fill_viridis :discrete true)
(viridis/scale_color_viridis :discrete true)
(th/theme_ipsum_rc)
(gg/theme :legend.position "none")
(gg/coord_flip)
(gg/xlab "")
(gg/ylab "Assigned Probability (%)"))))
(note-hiccup [:image {:src "d.png"}])
(note-md "### [Violin + boxplot + sample size](-graph-gallery.com/violin_and_boxplot_ggplot2.html)")
(note-void (def sample-size (-> data-random
(dplyr/group_by 'name)
(dplyr/summarize :num '(n)))))
(note-void (plot->file (str target-path "e.png")
(let [data (-> data-random
(dplyr/left_join sample-size)
(dplyr/mutate :myaxis '(paste0 name "\\\n" "n=" num)))]
(r+ (gg/ggplot data (gg/aes :x 'myaxis :y 'value :fill 'name))
(gg/geom_violin :width 1.4)
(gg/geom_boxplot :width 0.1 :color "grey" :alpha 0.2)
(viridis/scale_fill_viridis :discrete true)
(th/theme_ipsum_rc)
(gg/theme :legend.position "none" :plot.title (gg/element_text :size 11))
(gg/ggtitle "A Violing wrapping a boxplot")
(gg/xlab "")))))
(note-hiccup [:image {:src "e.png"}])
(note-md "### [Grouped violin chart](-graph-gallery.com/violin_grouped_ggplot2.html)")
(note-void (def data (-> ""
(u/read-table :header true :sep ",")
(dplyr/mutate :tip '(round (* (/ tip total_bill) 100) 1))
(dplyr/mutate :day '(fct_reorder day tip))
(dplyr/mutate :day '(factor day :levels ["Thur" "Fri" "Sat" "Sun"])))))
(note-void (plot->file (str target-path "f.png")
(r+ (gg/ggplot data (gg/aes :fill 'sex :y 'tip :x 'day))
(gg/geom_violin :position "dodge" :alpha 0.5 :outlier.colour "transparent")
(viridis/scale_fill_viridis :discrete true :name "")
(th/theme_ipsum_rc)
(gg/xlab "")
(gg/ylab "Tip (%)")
(gg/ylim 0 40))))
(note-hiccup [:image {:src "f.png"}])
(note-md "## Base R and Vioplot")
(note-md "### [Vioplot package](-graph-gallery.com/94-violin-plot.html)")
(note-void (require-r '[vioplot]))
(note-void (plot->file (str target-path "g.png")
(fn [] (let [treatment [(repeat 40 "A") (repeat 40 "B") (repeat 40 "C")]
value [(base/sample [2 3 4 5] 40 :replace true)
(base/sample [(colon 1 5) (colon 12 17)] 40 :replace true)
(base/sample (colon 1 7) 40 :replace true)]
data (base/data-frame :treatment treatment :value value)]
(base/with data '(vioplot (bra value (== treatment "A"))
(bra value (== treatment "B"))
(bra value (== treatment "C"))
:col (dev/rgb 0.1 0.4 0.7 0.7)
:names ["A" "B" "C"]))))))
(note-hiccup [:image {:src "g.png"}])
(comment (notespace.v2.note/compute-this-notespace!))
(comment (r/discard-all-sessions))
(comment (r "x11()"))
(comment (r "dev.off()"))
| null | https://raw.githubusercontent.com/scicloj/clojisr-examples/691c878b5916b8060d37a85af33fd338d353dfbf/src/clojisr_examples/graph_gallery/violin.clj | clojure | (ns clojisr-examples.graph-gallery.violin
(:require [notespace.v2.note :as note
:refer [note note-void note-md note-as-md note-hiccup note-as-hiccup]]
[clojisr.v1.r :as r]))
(def target-path (notespace.v2.note/ns->out-dir *ns*))
(note-md "# [R Graph Gallery](-graph-gallery.com/) - [Violin](-graph-gallery.com/violin.html)")
(note-md "Code from [project](-graph-gallery.com/) by Yan Holtz")
(note-md "You can find here only translated code, please refer [original text](-graph-gallery.com/violin.html)")
(note-md "## Setup")
(note-void (require '[clojisr.v1.r :as r :refer [r+ r* r r->clj clj->r bra colon]]
'[clojisr.v1.require :refer [require-r]]
'[clojisr.v1.applications.plotting :refer [plot->file]]))
(note-void (require-r '[base :as base :refer [$ <-]]
'[utils :as u]
'[stats :as stats]
'[graphics :as g]
'[grDevices :as dev]
'[tidyverse]
'[knitr :as knitr]
'[dplyr :as dplyr]
'[tidyr :as tidyr]
'[ggplot2 :as gg]
'[viridis :as viridis]
'[forcats]
'[extrafont]
'[hrbrthemes :as th]
'[datasets :refer :all]))
(note-md "WARNING: To use `hrbrthemes` you may want to:
1. Install Arial Narrow or Roboto Condensed fonts.
2. Register system fonts with `extrafont::font_import()` or `(r.extrafont/font_import)`
3. Fix font database as described in [here](#issuecomment-299692978)
4. Call `hrbrthemes::import_roboto_condensed()` or `(th/import_roboto_condensed)`
5. Restart session")
(note-void (r.extrafont/loadfonts :quiet true))
(note-void (base/options :knitr.table.format "html"))
(note-void (base/set-seed 7337))
(note-md "## GGPlot2")
(note-md "### [Most basic violin chart](-graph-gallery.com/95-violin-plot-with-ggplot2.html)")
(note-void (def data-random (base/data-frame :name [(repeat 500 "A")
(repeat 500 "B")
(repeat 500 "B")
(repeat 20 "C")
(repeat 100 "D")]
:value [(stats/rnorm 500 10 5)
(stats/rnorm 500 13 1)
(stats/rnorm 500 18 1)
(stats/rnorm 20 25 4)
(stats/rnorm 100 12 1)])))
(note-void (plot->file (str target-path "a.png")
(r+ (gg/ggplot data-random (gg/aes :x 'name :y 'value :fill 'name))
(gg/geom_violin))))
(note-hiccup [:image {:src "a.png"}])
(note-void (def data-wide (bra iris nil (colon 1 4))))
(note-as-md (-> data-wide
(u/head)
(knitr/kable)
(r->clj)
(first)))
(note-void (plot->file (str target-path "b.png")
(let [data (-> data-wide
(tidyr/gather :key "MesureType" :value "Val"))]
(r+ (gg/ggplot data (gg/aes :x 'MesureType :y 'Val :fill 'MesureType))
(gg/geom_violin)))))
(note-hiccup [:image {:src "b.png"}])
(note-md "### [Control group order](-graph-gallery.com/267-reorder-a-variable-in-ggplot2.html)")
(note-void (def mpg (base/$<- gg/mpg 'class (base/with gg/mpg '(reorder class hwy median)))))
(note-void (plot->file (str target-path "c.png")
(r+ (gg/ggplot mpg (gg/aes :x 'class :y 'hwy :fill 'class))
(gg/geom_violin)
(gg/xlab "")
(gg/theme :legend.position "none"))))
(note-hiccup [:image {:src "c.png"}])
(note-md "### [Horizontal version](-graph-gallery.com/violin_horizontal_ggplot2.html)")
(note-void (def data (-> ""
(u/read-table :header true :sep ",")
(tidyr/gather :key "text" :value "value")
(dplyr/mutate :text '(gsub "\\\\." " " text))
(dplyr/mutate :value '(round (as.numeric value) 0))
(dplyr/filter '(%in% text ["Almost Certainly" "Very Good Chance" "We Believe" "Likely"
"About Even" "Little Chance" "Chances Are Slight" "Almost No Chance"]))
(dplyr/mutate :text '(fct_reorder text value)))))
(note-void (plot->file (str target-path "d.png")
(r+ (gg/ggplot data (gg/aes :x 'text :y 'value :color 'text :fill 'text))
(gg/geom_violin :width 2.1 :size 0.2)
(viridis/scale_fill_viridis :discrete true)
(viridis/scale_color_viridis :discrete true)
(th/theme_ipsum_rc)
(gg/theme :legend.position "none")
(gg/coord_flip)
(gg/xlab "")
(gg/ylab "Assigned Probability (%)"))))
(note-hiccup [:image {:src "d.png"}])
(note-md "### [Violin + boxplot + sample size](-graph-gallery.com/violin_and_boxplot_ggplot2.html)")
(note-void (def sample-size (-> data-random
(dplyr/group_by 'name)
(dplyr/summarize :num '(n)))))
(note-void (plot->file (str target-path "e.png")
(let [data (-> data-random
(dplyr/left_join sample-size)
(dplyr/mutate :myaxis '(paste0 name "\\\n" "n=" num)))]
(r+ (gg/ggplot data (gg/aes :x 'myaxis :y 'value :fill 'name))
(gg/geom_violin :width 1.4)
(gg/geom_boxplot :width 0.1 :color "grey" :alpha 0.2)
(viridis/scale_fill_viridis :discrete true)
(th/theme_ipsum_rc)
(gg/theme :legend.position "none" :plot.title (gg/element_text :size 11))
(gg/ggtitle "A Violing wrapping a boxplot")
(gg/xlab "")))))
(note-hiccup [:image {:src "e.png"}])
(note-md "### [Grouped violin chart](-graph-gallery.com/violin_grouped_ggplot2.html)")
(note-void (def data (-> ""
(u/read-table :header true :sep ",")
(dplyr/mutate :tip '(round (* (/ tip total_bill) 100) 1))
(dplyr/mutate :day '(fct_reorder day tip))
(dplyr/mutate :day '(factor day :levels ["Thur" "Fri" "Sat" "Sun"])))))
(note-void (plot->file (str target-path "f.png")
(r+ (gg/ggplot data (gg/aes :fill 'sex :y 'tip :x 'day))
(gg/geom_violin :position "dodge" :alpha 0.5 :outlier.colour "transparent")
(viridis/scale_fill_viridis :discrete true :name "")
(th/theme_ipsum_rc)
(gg/xlab "")
(gg/ylab "Tip (%)")
(gg/ylim 0 40))))
(note-hiccup [:image {:src "f.png"}])
(note-md "## Base R and Vioplot")
(note-md "### [Vioplot package](-graph-gallery.com/94-violin-plot.html)")
(note-void (require-r '[vioplot]))
(note-void (plot->file (str target-path "g.png")
(fn [] (let [treatment [(repeat 40 "A") (repeat 40 "B") (repeat 40 "C")]
value [(base/sample [2 3 4 5] 40 :replace true)
(base/sample [(colon 1 5) (colon 12 17)] 40 :replace true)
(base/sample (colon 1 7) 40 :replace true)]
data (base/data-frame :treatment treatment :value value)]
(base/with data '(vioplot (bra value (== treatment "A"))
(bra value (== treatment "B"))
(bra value (== treatment "C"))
:col (dev/rgb 0.1 0.4 0.7 0.7)
:names ["A" "B" "C"]))))))
(note-hiccup [:image {:src "g.png"}])
(comment (notespace.v2.note/compute-this-notespace!))
(comment (r/discard-all-sessions))
(comment (r "x11()"))
(comment (r "dev.off()"))
|
|
923a639168766dbea45a6f109aa01d3f051d06dd952a8060d79afea5344ab3f3 | geocaml/ISO3166 | os.ml | let with_file path fn =
let ic = open_in path in
try fn ic with _ -> close_in ic
let with_lines path fn =
with_file path @@ fun ic ->
let rec aux () =
try
fn (input_line ic);
aux ()
with End_of_file -> ()
in
aux ()
let get_lines path =
let lines = ref [] in
let run () = with_lines path @@ fun line -> lines := line :: !lines in
run ();
List.rev !lines
| null | https://raw.githubusercontent.com/geocaml/ISO3166/1603a8ee8fe9b6c9476560bd9877de33842fb960/src/gen/os.ml | ocaml | let with_file path fn =
let ic = open_in path in
try fn ic with _ -> close_in ic
let with_lines path fn =
with_file path @@ fun ic ->
let rec aux () =
try
fn (input_line ic);
aux ()
with End_of_file -> ()
in
aux ()
let get_lines path =
let lines = ref [] in
let run () = with_lines path @@ fun line -> lines := line :: !lines in
run ();
List.rev !lines
|
|
7d14065608bf2e27a7dddb976192283af1c1bfb7c2515031be10701d626245e0 | McCLIM/McCLIM | medium.lisp | (in-package #:clim-tests)
(def-suite* :mcclim.drawing
:in :mcclim)
(test make-text-style.cache
"Test caching behavior of `make-text-style'."
(let ((climi::*text-style-hash-table* (make-hash-table :test #'eql)))
(let ((styles '()))
(flet ((test-style (family face size
&optional (family2 family) (face2 face) (size2 size))
(let ((first (make-text-style family face size))
(second (make-text-style family2 face2 size2)))
(push first styles)
(is (eq first second)))))
(test-style nil nil nil)
(test-style :serif :italic :normal)
(test-style :serif :roman 10
:serif :roman 10.0)
(test-style :serif '(:bold :italic) 10
:serif '(:italic :bold)))
(loop for (style1 . rest) on styles
do (loop for style2 in rest
do (is (not (eq style1 style2))))))))
| null | https://raw.githubusercontent.com/McCLIM/McCLIM/7c890f1ac79f0c6f36866c47af89398e2f05b343/Tests/drawing/medium.lisp | lisp | (in-package #:clim-tests)
(def-suite* :mcclim.drawing
:in :mcclim)
(test make-text-style.cache
"Test caching behavior of `make-text-style'."
(let ((climi::*text-style-hash-table* (make-hash-table :test #'eql)))
(let ((styles '()))
(flet ((test-style (family face size
&optional (family2 family) (face2 face) (size2 size))
(let ((first (make-text-style family face size))
(second (make-text-style family2 face2 size2)))
(push first styles)
(is (eq first second)))))
(test-style nil nil nil)
(test-style :serif :italic :normal)
(test-style :serif :roman 10
:serif :roman 10.0)
(test-style :serif '(:bold :italic) 10
:serif '(:italic :bold)))
(loop for (style1 . rest) on styles
do (loop for style2 in rest
do (is (not (eq style1 style2))))))))
|
|
901fa977a93a7727b92988f819a0a956948dde6749aa6da7c064a31f6156cee0 | fortytools/holumbus | Utility.hs | -- ----------------------------------------------------------------------------
|
Module : Holumbus . Utility
Copyright : Copyright ( C ) 2008
License : MIT
Maintainer : ( )
Stability : experimental
Portability : portable
Version : 0.1
Small utility functions which are probably useful somewhere else , too .
Module : Holumbus.Utility
Copyright : Copyright (C) 2008 Timo B. Huebel
License : MIT
Maintainer : Timo B. Huebel ()
Stability : experimental
Portability: portable
Version : 0.1
Small utility functions which are probably useful somewhere else, too.
-}
-- ----------------------------------------------------------------------------
module Holumbus.Utility where
import Control.Exception (bracket)
import Data.Binary
import qualified Data.ByteString.Lazy as B
import Data.ByteString.Lazy.Char8 (pack)
import Data.Char
import Data.Digest.Pure.MD5
import qualified Data.IntMap as IM
import qualified Data.List as L
-- import Data.Maybe
import Holumbus.Index.Common
import qualified Holumbus . Index . Documents as DOC
import Numeric
import System.IO
import Text.XML.HXT.Core
-- ------------------------------------------------------------
-- | Split a string into seperate strings at a specific character sequence.
split :: Eq a => [a] -> [a] -> [[a]]
split _ [] = [[]]
split at w@(x:xs) = maybe ((x:r):rs) ((:) [] . split at) (L.stripPrefix at w)
where (r:rs) = split at xs
-- | Join with a seperating character sequence.
join :: Eq a => [a] -> [[a]] -> [a]
join = L.intercalate
-- | Removes leading and trailing whitespace from a string.
strip :: String -> String
strip = stripWith isSpace
-- | Removes leading whitespace from a string.
stripl :: String -> String
stripl = dropWhile isSpace
-- | Removes trailing whitespace from a string.
stripr :: String -> String
stripr = reverse . dropWhile isSpace . reverse
-- | Strip leading and trailing elements matching a predicate.
stripWith :: (a -> Bool) -> [a] -> [a]
stripWith f = reverse . dropWhile f . reverse . dropWhile f
-- | found on the haskell cafe mailing list
-- (<-cafe/2008-April/041970.html>).
Depends on bytestring > = 0.9.0.4 ( ? )
strictDecodeFile :: Binary a => FilePath -> IO a
strictDecodeFile f =
bracket (openBinaryFile f ReadMode) hClose $ \h -> do
c <- B.hGetContents h
return $! decode c
-- | partition the list of input data into a list of input data lists of
-- approximately the same specified length
partitionListByLength :: Int -> [a] -> [[a]]
partitionListByLength _ [] = []
partitionListByLength count l = [take count l] ++ (partitionListByLength count (drop count l))
-- | partition the list of input data into a list of a specified number of input data lists with
-- approximately the same length
partitionListByCount :: Int -> [a] -> [[a]]
partitionListByCount sublistCount list = partition sublistCount list
where
partition 0 _ = []
partition sublists l
= let next = ((length l) `div` sublists)
in if next == 0 then [l]
else [take next l] ++ partition (sublists -1) (drop next l)
| Escapes non - alphanumeric or space characters in a
escape :: String -> String
escape [] = []
escape (c:cs)
= if isAlphaNum c || isSpace c
then c : escape cs
else '%' : showHex (fromEnum c) "" ++ escape cs
-- | Computes a filename for a local temporary file.
Since filename computation might depend on the DocId it is also submitted
-- as a parameter
tmpFile :: DocId -> URI -> String
tmpFile _ u = let f = escape u
in if (length f) > 255
then (show . md5 . pack) f
else f
-- | Helper function to replace original URIs by the corresponding pathes for
-- the locally saved files
tmpDocs :: HolDocuments d a => String -> d a -> d a
tmpDocs tmpPath =
fromMap
. (IM.mapWithKey (\docId doc -> Document (title doc) (tmpPath ++ (tmpFile docId (uri doc))) Nothing))
. toMap
-- | Compute the base of a webpage
stolen from ,
computeDocBase :: ArrowXml a => a XmlTree String
computeDocBase
= ( ( ( this
/> hasName "html"
/> hasName "head"
/> hasName "base"
>>> getAttrValue "href"
)
&&&
getAttrValue "transfer-URI"
)
>>> expandURI
)
`orElse`
getAttrValue "transfer-URI"
traceOffset :: Int
traceOffset = 3
trcMsg :: String -> IO ()
trcMsg m = hPutStrLn stderr ('-':"- (0) " ++ m)
-- ------------------------------------------------------------
--
-- simple and usefull access arrows
getByPath :: ArrowXml a => [String] -> a XmlTree XmlTree
getByPath = seqA . map (\ n -> getChildren >>> hasName n)
robotsNo :: String -> LA XmlTree XmlTree
robotsNo what = none
`when`
( getByPath ["html", "head", "meta"]
>>>
hasAttrValue "name" ( map toUpper
>>>
(== "ROBOTS")
)
>>>
getAttrValue0 "content"
>>>
isA ( map (toUpper >>> (\ x -> if isLetter x then x else ' '))
>>>
words
>>>
(what `elem`)
)
)
robotsNoIndex :: ArrowXml a => a XmlTree XmlTree
robotsNoIndex = fromLA $ robotsNo "NOINDEX"
robotsNoFollow :: ArrowXml a => a XmlTree XmlTree
robotsNoFollow = fromLA $ robotsNo "NOFOLLOW"
-- ------------------------------------------------------------
| null | https://raw.githubusercontent.com/fortytools/holumbus/4b2f7b832feab2715a4d48be0b07dca018eaa8e8/searchengine/source/Holumbus/Utility.hs | haskell | ----------------------------------------------------------------------------
----------------------------------------------------------------------------
import Data.Maybe
------------------------------------------------------------
| Split a string into seperate strings at a specific character sequence.
| Join with a seperating character sequence.
| Removes leading and trailing whitespace from a string.
| Removes leading whitespace from a string.
| Removes trailing whitespace from a string.
| Strip leading and trailing elements matching a predicate.
| found on the haskell cafe mailing list
(<-cafe/2008-April/041970.html>).
| partition the list of input data into a list of input data lists of
approximately the same specified length
| partition the list of input data into a list of a specified number of input data lists with
approximately the same length
| Computes a filename for a local temporary file.
as a parameter
| Helper function to replace original URIs by the corresponding pathes for
the locally saved files
| Compute the base of a webpage
------------------------------------------------------------
simple and usefull access arrows
------------------------------------------------------------ |
|
Module : Holumbus . Utility
Copyright : Copyright ( C ) 2008
License : MIT
Maintainer : ( )
Stability : experimental
Portability : portable
Version : 0.1
Small utility functions which are probably useful somewhere else , too .
Module : Holumbus.Utility
Copyright : Copyright (C) 2008 Timo B. Huebel
License : MIT
Maintainer : Timo B. Huebel ()
Stability : experimental
Portability: portable
Version : 0.1
Small utility functions which are probably useful somewhere else, too.
-}
module Holumbus.Utility where
import Control.Exception (bracket)
import Data.Binary
import qualified Data.ByteString.Lazy as B
import Data.ByteString.Lazy.Char8 (pack)
import Data.Char
import Data.Digest.Pure.MD5
import qualified Data.IntMap as IM
import qualified Data.List as L
import Holumbus.Index.Common
import qualified Holumbus . Index . Documents as DOC
import Numeric
import System.IO
import Text.XML.HXT.Core
split :: Eq a => [a] -> [a] -> [[a]]
split _ [] = [[]]
split at w@(x:xs) = maybe ((x:r):rs) ((:) [] . split at) (L.stripPrefix at w)
where (r:rs) = split at xs
join :: Eq a => [a] -> [[a]] -> [a]
join = L.intercalate
strip :: String -> String
strip = stripWith isSpace
stripl :: String -> String
stripl = dropWhile isSpace
stripr :: String -> String
stripr = reverse . dropWhile isSpace . reverse
stripWith :: (a -> Bool) -> [a] -> [a]
stripWith f = reverse . dropWhile f . reverse . dropWhile f
Depends on bytestring > = 0.9.0.4 ( ? )
strictDecodeFile :: Binary a => FilePath -> IO a
strictDecodeFile f =
bracket (openBinaryFile f ReadMode) hClose $ \h -> do
c <- B.hGetContents h
return $! decode c
partitionListByLength :: Int -> [a] -> [[a]]
partitionListByLength _ [] = []
partitionListByLength count l = [take count l] ++ (partitionListByLength count (drop count l))
partitionListByCount :: Int -> [a] -> [[a]]
partitionListByCount sublistCount list = partition sublistCount list
where
partition 0 _ = []
partition sublists l
= let next = ((length l) `div` sublists)
in if next == 0 then [l]
else [take next l] ++ partition (sublists -1) (drop next l)
| Escapes non - alphanumeric or space characters in a
escape :: String -> String
escape [] = []
escape (c:cs)
= if isAlphaNum c || isSpace c
then c : escape cs
else '%' : showHex (fromEnum c) "" ++ escape cs
Since filename computation might depend on the DocId it is also submitted
tmpFile :: DocId -> URI -> String
tmpFile _ u = let f = escape u
in if (length f) > 255
then (show . md5 . pack) f
else f
tmpDocs :: HolDocuments d a => String -> d a -> d a
tmpDocs tmpPath =
fromMap
. (IM.mapWithKey (\docId doc -> Document (title doc) (tmpPath ++ (tmpFile docId (uri doc))) Nothing))
. toMap
stolen from ,
computeDocBase :: ArrowXml a => a XmlTree String
computeDocBase
= ( ( ( this
/> hasName "html"
/> hasName "head"
/> hasName "base"
>>> getAttrValue "href"
)
&&&
getAttrValue "transfer-URI"
)
>>> expandURI
)
`orElse`
getAttrValue "transfer-URI"
traceOffset :: Int
traceOffset = 3
trcMsg :: String -> IO ()
trcMsg m = hPutStrLn stderr ('-':"- (0) " ++ m)
getByPath :: ArrowXml a => [String] -> a XmlTree XmlTree
getByPath = seqA . map (\ n -> getChildren >>> hasName n)
robotsNo :: String -> LA XmlTree XmlTree
robotsNo what = none
`when`
( getByPath ["html", "head", "meta"]
>>>
hasAttrValue "name" ( map toUpper
>>>
(== "ROBOTS")
)
>>>
getAttrValue0 "content"
>>>
isA ( map (toUpper >>> (\ x -> if isLetter x then x else ' '))
>>>
words
>>>
(what `elem`)
)
)
robotsNoIndex :: ArrowXml a => a XmlTree XmlTree
robotsNoIndex = fromLA $ robotsNo "NOINDEX"
robotsNoFollow :: ArrowXml a => a XmlTree XmlTree
robotsNoFollow = fromLA $ robotsNo "NOFOLLOW"
|
b11e19365b47bef92f74d4bed38612369a76a382a4d3bb9bec1215fe601d79b1 | umber-lang/umber | name_bindings.ml | open Import
open Names
module Name_entry = struct
module Type_source = struct
module T = struct
type t =
| Placeholder
| Let_inferred
| Val_declared
| Extern_declared
[@@deriving compare, enumerate, equal, sexp, variants]
end
include T
include Comparable.Make (T)
let%test "priority order" =
List.equal
equal
(List.sort ~compare all)
[ Placeholder; Let_inferred; Val_declared; Extern_declared ]
;;
end
module Type_or_scheme = struct
type t =
| Type of Type.t
| Scheme of Type.Scheme.t
[@@deriving equal, sexp]
end
TODO : Consider having this type be responsible for assigning / tracking unique names ,
rather than doing it in the MIR .
rather than doing it in the MIR. *)
type t =
{ typ : Type_or_scheme.t
; type_source : Type_source.t [@default Val_declared] [@sexp_drop_default.equal]
; fixity : Fixity.t option [@sexp.option]
; extern_name : Extern_name.t option [@sexp.option]
}
[@@deriving equal, fields, sexp]
let typ entry =
match entry.typ with
| Type typ -> typ
| Scheme scheme -> Type.Scheme.instantiate ~map_name:Fn.id scheme
;;
let scheme entry =
match entry.typ with
| Scheme scheme -> Some scheme
| Type _ -> None
;;
let val_declared ?fixity ?extern_name typ =
{ type_source = Val_declared; typ = Scheme typ; fixity; extern_name }
;;
let let_inferred ?fixity ?extern_name typ =
{ type_source = Let_inferred; typ = Type typ; fixity; extern_name }
;;
let placeholder =
{ type_source = Placeholder
; typ = Scheme (Var Type.Param.dummy)
; fixity = None
; extern_name = None
}
;;
let merge entry entry' =
let preferred, typ, other =
match
Ordering.of_int (Type_source.compare entry.type_source entry'.type_source)
with
| Greater -> entry, entry.typ, entry'
| Less -> entry', entry'.typ, entry
| Equal ->
let typ =
match entry.typ, entry'.typ with
| Type _, Scheme _ | Scheme _, Scheme _ | Type _, Type _ -> entry'.typ
| Scheme _, Type _ -> entry.typ
in
entry', typ, entry
in
let pick getter = Option.first_some (getter preferred) (getter other) in
{ typ
; type_source = preferred.type_source
; fixity = pick fixity
; extern_name = pick extern_name
}
;;
end
(* TODO: probably just make 'path the variable so we don't have to put unit for module paths *)
module Or_imported = struct
type ('entry, 'path) t =
| Local of 'entry
| Imported of 'path
[@@deriving sexp, variants]
end
module Path = struct
module T = struct
type t = (Module_name.t * [ `Sig | `Def ]) list
[@@deriving equal, compare, hash, sexp]
let to_string =
let rec loop buf = function
| [] -> Buffer.contents buf
| (module_name, place) :: rest ->
if Buffer.length buf > 0 then Buffer.add_char buf '.';
Ustring.add_to_buffer buf (Module_name.to_ustring module_name);
(match place with
| `Sig -> Buffer.add_string buf "(s)"
| `Def -> Buffer.add_string buf "(d)");
loop buf rest
in
fun t ->
let buf = Buffer.create (List.length t * 5) in
loop buf t
;;
let of_string =
let open Option.Let_syntax in
let rec lex_nonempty acc lexbuf =
let%bind module_name =
Result.ok (Lex_helpers.lex_upper_name lexbuf)
>>| Module_name.of_ustring_unchecked
in
let%bind place = Lex_helpers.lex_place lexbuf in
let acc = (module_name, place) :: acc in
match%sedlex lexbuf with
| '.' -> lex_nonempty acc lexbuf
| eof -> Some acc
| _ -> None
in
function
| "" -> []
| str ->
(match lex_nonempty [] (Sedlexing.Utf8.from_string str) with
| Some path -> List.rev path
| None -> failwith "Name_bindings.Path.of_string: parse failed")
;;
end
include T
include Sexpable.Of_stringable (T)
include Comparable.Make (T)
include Hashable.Make (T)
let to_module_path = List.map ~f:fst
let append t module_name ~place = t @ [ module_name, place ]
end
type t =
{ current_path : Path.t
; toplevel : defs
}
and sigs = Nothing.t bindings
and defs = sigs bindings
and 'a bindings =
{ names : (Name_entry.t, Value_name.Qualified.t) Or_imported.t Value_name.Map.t
; types : (Type.Decl.t, Type_name.Qualified.t) Or_imported.t option Type_name.Map.t
; modules : ('a option * 'a bindings, Module_path.t) Or_imported.t Module_name.Map.t
}
[@@deriving sexp]
type sigs_or_defs =
| Sigs of sigs
| Defs of defs
[@@deriving sexp_of]
let name_error ~msg ustr =
Compilation_error.raise Name_error ~msg:[%message msg ~_:(ustr : Ustring.t)]
;;
let name_error_path path =
name_error ~msg:"Couldn't find path" (Module_path.to_ustring path)
;;
let or_name_clash msg ustr = function
| `Ok value -> value
| `Duplicate -> name_error ~msg ustr
;;
let or_name_error_path x path =
Option.value_or_thunk x ~default:(fun () -> name_error_path path)
;;
let empty_bindings =
{ names = Value_name.Map.empty
; types = Type_name.Map.empty
; modules = Module_name.Map.empty
}
;;
let empty = { current_path = []; toplevel = empty_bindings }
let without_std t =
{ t with
toplevel =
{ t.toplevel with
modules = Map.remove t.toplevel.modules Intrinsics.std_module_name
}
}
;;
type f_bindings = { f : 'a. 'a bindings -> 'a bindings }
let update_current t ~f =
let updating_import_err t imported_module =
compiler_bug
[%message "Updating imported module" (imported_module : Module_path.t) (t : t)]
in
let rec loop_sigs t (sigs : sigs) path ~f =
match path with
| [] -> f.f sigs
| (_, `Def) :: _ -> compiler_bug [%message "`Def inside sig path" (t : t)]
| (module_name, `Sig) :: rest ->
{ sigs with
modules =
Map.update sigs.modules module_name ~f:(function
| Some (Local (None, sigs)) -> Local (None, loop_sigs t sigs rest ~f)
| Some (Imported imported_module) -> updating_import_err t imported_module
| None -> name_error_path (Path.to_module_path t.current_path)
| Some (Local (Some _, _)) -> .)
}
in
let rec loop_defs t defs path ~f =
match path with
| [] -> f.f defs
| (module_name, place) :: rest ->
{ defs with
modules =
Map.update defs.modules module_name ~f:(function
| Some (Local (sigs, defs)) ->
(match place with
| `Sig ->
let sigs = Option.value sigs ~default:empty_bindings in
Local (Some (loop_sigs t sigs rest ~f), defs)
| `Def -> Local (sigs, loop_defs t defs rest ~f))
| Some (Imported imported_module) -> updating_import_err t imported_module
| None -> name_error_path (Path.to_module_path t.current_path))
}
in
{ t with toplevel = loop_defs t t.toplevel t.current_path ~f }
;;
let into_module t ~place module_name =
let f bindings =
{ bindings with
modules =
Map.update
bindings.modules
module_name
~f:(Option.value ~default:(Or_imported.Local (None, empty_bindings)))
}
in
let t = update_current t ~f:{ f } in
{ t with current_path = t.current_path @ [ module_name, place ] }
;;
let into_parent t =
{ t with current_path = List.drop_last t.current_path |> Option.value ~default:[] }
;;
let with_submodule t ~place module_name ~f =
{ (f (into_module t ~place module_name)) with current_path = t.current_path }
;;
let with_submodule' t ~place module_name ~f =
let t', x = f (into_module ~place t module_name) in
{ t' with current_path = t.current_path }, x
;;
let core =
{ current_path = []
; toplevel =
{ empty_bindings with
types =
List.fold
Intrinsics.all
~init:empty_bindings.types
~f:(fun types (module Intrinsic) ->
Map.set types ~key:Intrinsic.name ~data:(Some (Local Intrinsic.decl)))
; names =
List.fold
Intrinsics.Bool.cnstrs
~init:empty_bindings.names
~f:(fun names (cnstr_name, extern_name) ->
Map.set
names
~key:(Value_name.of_cnstr_name cnstr_name)
~data:
(Local
(Name_entry.val_declared
~extern_name
(Type.Concrete.cast Intrinsics.Bool.typ))))
}
}
;;
let merge_no_shadow t1 t2 =
let err to_ustring ~key:name = name_error ~msg:"Name clash" (to_ustring name) in
{ names = Map.merge_skewed t1.names t2.names ~combine:(err Value_name.to_ustring)
; types = Map.merge_skewed t1.types t2.types ~combine:(err Type_name.to_ustring)
; modules = Map.merge_skewed t1.modules t2.modules ~combine:(err Module_name.to_ustring)
}
;;
let resolve_path =
let open Option.Let_syntax in
let rec loop_sigs t path sigs =
match path with
| [] -> Some (Sigs sigs)
| module_name :: rest ->
(match%bind Map.find sigs.modules module_name with
| Local (None, sigs) -> loop_sigs t rest sigs
| Local (Some _, _) -> .
| Imported path -> resolve_path t path ~defs_only:false)
and loop_defs t current_path path defs =
match path with
| [] -> Some (Defs defs)
| module_name :: rest ->
(match%bind Map.find defs.modules module_name with
| Local (sigs, defs) ->
let current_path, go_into =
match current_path with
| Some [] | None -> None, `Sig
| Some ((module_name', place) :: rest') ->
if Module_name.(module_name = module_name')
then Some rest', place
else None, `Sig
in
(match go_into, sigs with
| `Sig, Some sigs -> loop_sigs t rest sigs
| `Sig, None | `Def, _ -> loop_defs t current_path rest defs)
| Imported path -> resolve_path t path ~defs_only:false)
and loop_defs_only t path defs =
match path with
| [] -> Some (Defs defs)
| module_name :: rest ->
(match%bind Map.find defs.modules module_name with
| Local (_, defs) -> loop_defs_only t rest defs
| Imported path -> resolve_path t path ~defs_only:true)
and resolve_path t path ~defs_only =
if defs_only
then loop_defs_only t path t.toplevel
else loop_defs t (Some t.current_path) path t.toplevel
in
resolve_path
;;
let resolve_path_exn t path ~defs_only =
or_name_error_path (resolve_path t path ~defs_only) path
;;
let with_path t path ~f =
let t', x = f { t with current_path = path } in
{ t' with current_path = t.current_path }, x
;;
let find =
let rec loop ?at_path ?(defs_only = false) t ((path, name) as input) ~f ~to_ustring =
(* Try looking at the current scope, then travel up to parent scopes to find a matching name *)
let at_path = Option.value at_path ~default:(Path.to_module_path t.current_path) in
let bindings_at_current = resolve_path_exn ~defs_only t at_path in
match List.hd path with
| Some first_module ->
let full_path = at_path @ path in
let f bindings =
if Map.mem bindings.modules first_module
then (
let bindings =
or_name_error_path (resolve_path ~defs_only t full_path) at_path
in
option_or_default (f full_path name bindings) ~f:(fun () ->
name_error ~msg:"Couldn't find name" (to_ustring input)))
else check_parent t at_path input ~f ~to_ustring
in
(match bindings_at_current with
| Sigs sigs -> f sigs
| Defs defs -> f defs)
| None ->
option_or_default (f at_path name bindings_at_current) ~f:(fun () ->
check_parent t at_path input ~f ~to_ustring)
and check_parent t current_path input ~f ~to_ustring =
(* Recursively check the parent *)
match List.drop_last current_path with
| Some parent_path -> loop t ~at_path:parent_path input ~f ~to_ustring
| None -> name_error ~msg:"Couldn't find name" (to_ustring input)
in
fun ?at_path ?defs_only t input ~f ~to_ustring ->
loop ?at_path ?defs_only t input ~f ~to_ustring
;;
let rec find_entry' t name =
let open Option.Let_syntax in
find
t
name
~to_ustring:Value_name.Qualified.to_ustring
~f:(fun current_path name bindings ->
let f bindings =
Map.find bindings.names name >>| resolve_name_or_import' t (current_path, name)
in
match bindings with
| Sigs sigs -> f sigs
| Defs defs -> f defs)
and resolve_name_or_import' t name = function
| Or_imported.Local entry -> name, entry
| Imported path_name -> find_entry' t path_name
;;
let rec find_entry t name = snd (find_entry' t name)
and resolve_name_or_import t = function
| Or_imported.Local entry -> entry
| Imported path_name -> find_entry t path_name
;;
let find_type t name = find_entry t name |> Name_entry.typ
let find_cnstr_type t = Value_name.Qualified.of_cnstr_name >> find_type t
let find_fixity t name = Option.value (find_entry t name).fixity ~default:Fixity.default
let find_type_decl' ?at_path ?defs_only t name =
let open Option.Let_syntax in
find
?at_path
t
name
~to_ustring:Type_name.Qualified.to_ustring
?defs_only
~f:(fun path name bindings ->
let f bindings ~check_submodule =
match Map.find bindings.types name with
| Some decl -> Some (path, decl)
| None ->
(* Allow type names like [List.List] to be found as just [List] *)
let module_name = Type_name.to_ustring name |> Module_name.of_ustring_unchecked in
Map.find bindings.modules module_name >>= check_submodule
in
match bindings with
| Sigs sigs ->
f sigs ~check_submodule:(function
| Local (None, sigs) ->
let%bind decl = Map.find sigs.types name in
Some (path, decl)
| Local (Some _, _) -> .
| Imported import_path -> Some (path, Some (Imported (import_path, name))))
| Defs defs ->
f defs ~check_submodule:(function
| Local (None, defs) ->
let%bind decl = Map.find defs.types name in
Some (path, decl)
| Local (Some sigs, _defs) ->
let%bind decl = Map.find sigs.types name in
Some (path, decl)
| Imported import_path -> Some (path, Some (Imported (import_path, name)))))
;;
(* TODO: Ideally we should have consistent behavior between all the absolutify functions,
which should include following imports all the way to a local name. I don't think that
is currently the case. *)
let absolutify_path t path =
find
t
(path, ())
~f:(fun path () _ -> Some path)
~to_ustring:(fun (path, ()) -> Module_path.to_ustring path)
;;
let absolutify_type_name t ((_, name) as path) = fst (find_type_decl' t path), name
let absolutify_value_name t name = fst (find_entry' t name)
(* TODO: how do I fill in foreign modules?
For now, just assume a toplevel module already exists and copy (?) it into scope
Later we can implement looking up new modules from the file system, installed packages, etc.
Should be able to work out all dependency information fairly easily by enforcing that
everything is imported, including toplevel modules *)
NOTE : Imports at toplevel defs affect both sigs and defs , but in submodules ,
they affect defs only . This behavior is super weird , tbh .
TODO : try to make this less confusing
Also , maybe the order of imports should matter - could just gather them as we go ?
they affect defs only. This behavior is super weird, tbh.
TODO: try to make this less confusing
Also, maybe the order of imports should matter - could just gather them as we go? *)
let import _t _module_name =
let module_bindings = find_module t [ module_name ] in
update_current t ~f:(fun bindings - >
{ bindings with
modules =
~key : module_name ~data : module_bindings
| > or_name_clash " Import of duplicate module " ( Module_name.to_ustring module_name )
} )
update_current t ~f:(fun bindings ->
{ bindings with
modules =
Map.add bindings.modules ~key:module_name ~data:module_bindings
|> or_name_clash "Import of duplicate module" (Module_name.to_ustring module_name)
})*)
failwith "TODO: module imports (properly)"
;;
(* TODO: test this, it's almost certainly wrong somehow *)
let import_filtered t path ~f =
let path = absolutify_path t path in
let map_to_imports_filtered path bindings ~f =
{ names =
Map.filter_mapi bindings.names ~f:(fun ~key:name ~data:_ ->
Option.some_if
(f (Value_name.unidentify name))
(Or_imported.Imported (path, name)))
; types =
Map.filter_mapi bindings.types ~f:(fun ~key:type_name ~data:_ ->
Option.some_if
(f (Type_name.unidentify type_name))
(Some (Or_imported.Imported (path, type_name))))
; modules =
Map.filter_mapi bindings.modules ~f:(fun ~key:module_name ~data:_ ->
Option.some_if
(f (Module_name.unidentify module_name))
(Or_imported.Imported (path @ [ module_name ])))
}
in
let bindings_to_import =
match resolve_path_exn t path ~defs_only:false with
| Sigs sigs -> map_to_imports_filtered path ~f sigs
| Defs defs -> map_to_imports_filtered path ~f defs
in
let f bindings = merge_no_shadow bindings bindings_to_import in
update_current t ~f:{ f }
;;
let import_all = import_filtered ~f:(fun _ -> true)
let import_with t path = function
| [] -> import_all t path
| imports -> import_filtered t path ~f:(List.mem imports ~equal:Unidentified_name.equal)
;;
let import_without t path hiding =
import_filtered t path ~f:(not << Nonempty.mem hiding ~equal:Unidentified_name.equal)
;;
let map_type_expr_names type_expr ~f =
Type.Expr.map type_expr ~var:Fn.id ~pf:Fn.id ~f:(function
| Type_app (name, args) -> Defer (Type.Expr.Type_app (f name, args))
| typ -> Defer typ)
;;
let absolutify_type_expr t =
map_type_expr_names ~f:(fun name -> absolutify_type_name t name)
;;
let of_prelude_sexp sexp =
let t = into_parent (t_of_sexp sexp) in
import_all t Intrinsics.prelude_module_path
;;
let prelude = lazy (of_prelude_sexp Umber_std.Prelude.names)
let add_val_or_extern
?extern_name
t
name
fixity
(trait_bounds, type_expr)
~unify
~type_source
=
let f bindings =
if not (List.is_empty trait_bounds) then failwith "TODO: trait bounds in val";
let scheme = absolutify_type_expr t type_expr in
{ bindings with
names =
Map.update bindings.names name ~f:(function
| None ->
compiler_bug [%message "Missing placeholder name entry" (name : Value_name.t)]
| Some (Local existing_entry) ->
unify (Type.Scheme.instantiate scheme) (Name_entry.typ existing_entry);
Local
(Name_entry.merge
existing_entry
{ type_source; typ = Scheme scheme; fixity; extern_name })
| Some (Imported imported_name) ->
(* TODO: consider allowing this use case
e.g. importing from another module, and then giving that import a new,
compatible type declaration *)
name_error
~msg:"Duplicate val for imported item"
Ustring.(
Value_name.to_ustring name
^ of_string_exn " vs "
^ Value_name.Qualified.to_ustring imported_name))
}
in
update_current t ~f:{ f }
;;
let add_val = add_val_or_extern ?extern_name:None ~type_source:Val_declared
let add_extern t name fixity typ extern_name ~unify =
add_val_or_extern t name fixity typ ~extern_name ~unify ~type_source:Extern_declared
;;
let absolutify_type_decl t = Type.Decl.map_exprs ~f:(absolutify_type_expr t)
let add_to_types ?(err_msg = "Type name clash") types name decl =
Map.update types name ~f:(function
| None | Some None -> decl
| Some _ -> name_error ~msg:err_msg (Type_name.to_ustring name))
;;
let add_type_decl ({ current_path; _ } as t) type_name decl =
let f bindings =
if not (Type.Decl.no_free_params decl)
then
Compilation_error.raise
Type_error
~msg:[%message "Free parameters in type declaration" (decl : Type.Decl.t)];
let decl = absolutify_type_decl t decl in
{ bindings with
types =
add_to_types
bindings.types
type_name
(Some (Local decl))
~err_msg:"Duplicate type declarations"
; names =
(match decl with
| params, Variants cnstrs ->
(* Add constructors as functions to the namespace *)
let result_type : Type.Scheme.t =
let path = Path.to_module_path current_path in
let params = List.map params ~f:Type.Expr.var in
Type_app ((path, type_name), params)
in
List.fold cnstrs ~init:bindings.names ~f:(fun names (cnstr_name, args) ->
let entry =
Name_entry.val_declared
(match Nonempty.of_list args with
| Some args -> Function (args, result_type)
| None -> result_type)
in
Map.add names ~key:(Value_name.of_cnstr_name cnstr_name) ~data:(Local entry)
|> or_name_clash
"Variant constructor name clashes with another value"
(Cnstr_name.to_ustring cnstr_name))
| _ -> bindings.names)
}
in
update_current t ~f:{ f }
;;
let set_inferred_scheme t name scheme =
let f bindings =
let inferred_entry =
{ Name_entry.type_source = Let_inferred
; typ = Scheme scheme
; fixity = None
; extern_name = None
}
in
{ bindings with
names =
Map.update bindings.names name ~f:(function
| None -> Local inferred_entry
| Some (Local existing_entry) ->
Local (Name_entry.merge existing_entry inferred_entry)
| Some (Imported _) ->
(* TODO: Think about the exact semantics of this. I think we want to disallow
shadowing/name clashes between imported and local names, but I'm not sure
if here is the best place to do it. *)
name_error
~msg:"Name clash between imported and local binding"
(Value_name.to_ustring name))
}
in
update_current t ~f:{ f }
;;
let add_name_placeholder t name =
let f bindings =
{ bindings with
names =
Map.update bindings.names name ~f:(function
| None -> Local Name_entry.placeholder
| Some (Local { Name_entry.type_source = Let_inferred; _ } as entry) -> entry
| _ -> name_error ~msg:"Duplicate name" (Value_name.to_ustring name))
}
in
update_current t ~f:{ f }
;;
let add_type_placeholder t type_name =
let f bindings =
{ bindings with
types = add_to_types bindings.types type_name None ~err_msg:"Duplicate type name"
}
in
update_current t ~f:{ f }
;;
let fold_local_names t ~init ~f =
let fold_local path bindings init =
Map.fold bindings.names ~init ~f:(fun ~key:name ~data acc ->
match data with
| Local entry -> f acc (path, name) entry
| Imported _ -> acc)
in
let rec fold_defs t path (defs : defs) ~init ~f =
Map.fold
defs.modules
~init:(fold_local path defs init)
~f:(fun ~key:module_name ~data acc ->
We can ignore sigs here because should have all the names
match data with
| Local (_, defs) -> fold_defs t (path @ [ module_name ]) defs ~init:acc ~f
| Imported _ -> acc)
in
fold_defs t [] t.toplevel ~init ~f
;;
let merge_names t new_names ~combine =
let new_names = Map.map new_names ~f:Or_imported.local in
let f bindings =
{ bindings with
names =
Map.merge_skewed bindings.names new_names ~combine:(fun ~key entry1 entry2 ->
let entry1, entry2 =
resolve_name_or_import t entry1, resolve_name_or_import t entry2
in
Local (combine key entry1 entry2))
}
in
update_current t ~f:{ f }
;;
let rec find_type_decl ?at_path ?defs_only t type_name =
resolve_decl_or_import
?at_path
?defs_only
t
(snd (find_type_decl' ?at_path ?defs_only t type_name))
and resolve_decl_or_import ?at_path ?defs_only t = function
| Some (Or_imported.Local decl) -> Some decl
| Some (Imported path_name) ->
(* TODO: pretty sure this import path should be resolved at the place it's written,
not the current path - this goes for all imports, unless we absolutify their paths *)
find_type_decl ?at_path ?defs_only t path_name
| None -> None
;;
let find_type_decl ?at_path ?(defs_only = false) t type_name =
option_or_default (find_type_decl ?at_path ~defs_only t type_name) ~f:(fun () ->
compiler_bug
[%message
"Placeholder decl not replaced"
(type_name : Type_name.Qualified.t)
(without_std t : t)])
;;
let resolve_decl_or_import ?at_path t decl_or_import =
option_or_default (resolve_decl_or_import ?at_path t decl_or_import) ~f:(fun () ->
compiler_bug
[%message
"Placeholder decl not replaced"
(decl_or_import : (Type.Decl.t, Type_name.Qualified.t) Or_imported.t option)
(without_std t : t)])
;;
let find_absolute_type_decl = find_type_decl ~at_path:[]
let find_type_decl = find_type_decl ?at_path:None
let current_path t = t.current_path
let find_sigs_and_defs t path module_name =
let open Option.Let_syntax in
let rec loop t path module_name =
find
t
(path, module_name)
~f:
(fun _ module_name -> function
| Sigs _ ->
compiler_bug
[%message
"Name_bindings.find_sigs_and_defs found only sigs"
(path : Module_path.t)
(module_name : Module_name.t)
(t : t)]
| Defs bindings ->
(match%bind Map.find bindings.modules module_name with
| Imported path ->
let%bind path, module_name = List.split_last path in
Some (loop t path module_name)
| Local sigs_and_defs -> Some sigs_and_defs))
~to_ustring:(fun (path, module_name) ->
Module_path.to_ustring (path @ [ module_name ]))
in
let sigs, defs = loop t path module_name in
Option.map sigs ~f:(fun sigs -> Sigs sigs), Defs defs
;;
module Sigs_or_defs = struct
type name_bindings = t
type t = sigs_or_defs
let names = function
| Sigs { names; _ } | Defs { names; _ } -> names
;;
let value_names = Map.key_set << names
let types = function
| Sigs { types; _ } | Defs { types; _ } -> types
;;
let type_names = Map.key_set << types
let module_names = function
| Sigs sigs -> Map.key_set sigs.modules
| Defs defs -> Map.key_set defs.modules
;;
let make_find ~into_bindings ~resolve t bindings name =
option_or_default
(Map.find (into_bindings bindings) name)
~f:(fun () ->
compiler_bug [%message "Sigs_or_defs.find failed" (bindings : sigs_or_defs)])
|> resolve t
;;
let find_entry = make_find ~into_bindings:names ~resolve:resolve_name_or_import
let find_type_decl = make_find ~into_bindings:types ~resolve:resolve_decl_or_import
let find_module t bindings module_name =
let open Option.Let_syntax in
match bindings with
| Sigs bindings ->
(match%bind Map.find bindings.modules module_name with
| Imported path -> resolve_path t path ~defs_only:false
| Local (None, sigs) -> Some (Sigs sigs)
| Local (Some _, _) -> .)
| Defs bindings ->
(match%bind Map.find bindings.modules module_name with
| Imported path -> resolve_path t path ~defs_only:false
| Local (Some sigs, _) -> Some (Sigs sigs)
| Local (None, defs) -> Some (Defs defs))
;;
end
| null | https://raw.githubusercontent.com/umber-lang/umber/43339b87e10a704bffea16341f6285bc3cba058e/src/name_bindings.ml | ocaml | TODO: probably just make 'path the variable so we don't have to put unit for module paths
Try looking at the current scope, then travel up to parent scopes to find a matching name
Recursively check the parent
Allow type names like [List.List] to be found as just [List]
TODO: Ideally we should have consistent behavior between all the absolutify functions,
which should include following imports all the way to a local name. I don't think that
is currently the case.
TODO: how do I fill in foreign modules?
For now, just assume a toplevel module already exists and copy (?) it into scope
Later we can implement looking up new modules from the file system, installed packages, etc.
Should be able to work out all dependency information fairly easily by enforcing that
everything is imported, including toplevel modules
TODO: test this, it's almost certainly wrong somehow
TODO: consider allowing this use case
e.g. importing from another module, and then giving that import a new,
compatible type declaration
Add constructors as functions to the namespace
TODO: Think about the exact semantics of this. I think we want to disallow
shadowing/name clashes between imported and local names, but I'm not sure
if here is the best place to do it.
TODO: pretty sure this import path should be resolved at the place it's written,
not the current path - this goes for all imports, unless we absolutify their paths | open Import
open Names
module Name_entry = struct
module Type_source = struct
module T = struct
type t =
| Placeholder
| Let_inferred
| Val_declared
| Extern_declared
[@@deriving compare, enumerate, equal, sexp, variants]
end
include T
include Comparable.Make (T)
let%test "priority order" =
List.equal
equal
(List.sort ~compare all)
[ Placeholder; Let_inferred; Val_declared; Extern_declared ]
;;
end
module Type_or_scheme = struct
type t =
| Type of Type.t
| Scheme of Type.Scheme.t
[@@deriving equal, sexp]
end
TODO : Consider having this type be responsible for assigning / tracking unique names ,
rather than doing it in the MIR .
rather than doing it in the MIR. *)
type t =
{ typ : Type_or_scheme.t
; type_source : Type_source.t [@default Val_declared] [@sexp_drop_default.equal]
; fixity : Fixity.t option [@sexp.option]
; extern_name : Extern_name.t option [@sexp.option]
}
[@@deriving equal, fields, sexp]
let typ entry =
match entry.typ with
| Type typ -> typ
| Scheme scheme -> Type.Scheme.instantiate ~map_name:Fn.id scheme
;;
let scheme entry =
match entry.typ with
| Scheme scheme -> Some scheme
| Type _ -> None
;;
let val_declared ?fixity ?extern_name typ =
{ type_source = Val_declared; typ = Scheme typ; fixity; extern_name }
;;
let let_inferred ?fixity ?extern_name typ =
{ type_source = Let_inferred; typ = Type typ; fixity; extern_name }
;;
let placeholder =
{ type_source = Placeholder
; typ = Scheme (Var Type.Param.dummy)
; fixity = None
; extern_name = None
}
;;
let merge entry entry' =
let preferred, typ, other =
match
Ordering.of_int (Type_source.compare entry.type_source entry'.type_source)
with
| Greater -> entry, entry.typ, entry'
| Less -> entry', entry'.typ, entry
| Equal ->
let typ =
match entry.typ, entry'.typ with
| Type _, Scheme _ | Scheme _, Scheme _ | Type _, Type _ -> entry'.typ
| Scheme _, Type _ -> entry.typ
in
entry', typ, entry
in
let pick getter = Option.first_some (getter preferred) (getter other) in
{ typ
; type_source = preferred.type_source
; fixity = pick fixity
; extern_name = pick extern_name
}
;;
end
module Or_imported = struct
type ('entry, 'path) t =
| Local of 'entry
| Imported of 'path
[@@deriving sexp, variants]
end
module Path = struct
module T = struct
type t = (Module_name.t * [ `Sig | `Def ]) list
[@@deriving equal, compare, hash, sexp]
let to_string =
let rec loop buf = function
| [] -> Buffer.contents buf
| (module_name, place) :: rest ->
if Buffer.length buf > 0 then Buffer.add_char buf '.';
Ustring.add_to_buffer buf (Module_name.to_ustring module_name);
(match place with
| `Sig -> Buffer.add_string buf "(s)"
| `Def -> Buffer.add_string buf "(d)");
loop buf rest
in
fun t ->
let buf = Buffer.create (List.length t * 5) in
loop buf t
;;
let of_string =
let open Option.Let_syntax in
let rec lex_nonempty acc lexbuf =
let%bind module_name =
Result.ok (Lex_helpers.lex_upper_name lexbuf)
>>| Module_name.of_ustring_unchecked
in
let%bind place = Lex_helpers.lex_place lexbuf in
let acc = (module_name, place) :: acc in
match%sedlex lexbuf with
| '.' -> lex_nonempty acc lexbuf
| eof -> Some acc
| _ -> None
in
function
| "" -> []
| str ->
(match lex_nonempty [] (Sedlexing.Utf8.from_string str) with
| Some path -> List.rev path
| None -> failwith "Name_bindings.Path.of_string: parse failed")
;;
end
include T
include Sexpable.Of_stringable (T)
include Comparable.Make (T)
include Hashable.Make (T)
let to_module_path = List.map ~f:fst
let append t module_name ~place = t @ [ module_name, place ]
end
type t =
{ current_path : Path.t
; toplevel : defs
}
and sigs = Nothing.t bindings
and defs = sigs bindings
and 'a bindings =
{ names : (Name_entry.t, Value_name.Qualified.t) Or_imported.t Value_name.Map.t
; types : (Type.Decl.t, Type_name.Qualified.t) Or_imported.t option Type_name.Map.t
; modules : ('a option * 'a bindings, Module_path.t) Or_imported.t Module_name.Map.t
}
[@@deriving sexp]
type sigs_or_defs =
| Sigs of sigs
| Defs of defs
[@@deriving sexp_of]
let name_error ~msg ustr =
Compilation_error.raise Name_error ~msg:[%message msg ~_:(ustr : Ustring.t)]
;;
let name_error_path path =
name_error ~msg:"Couldn't find path" (Module_path.to_ustring path)
;;
let or_name_clash msg ustr = function
| `Ok value -> value
| `Duplicate -> name_error ~msg ustr
;;
let or_name_error_path x path =
Option.value_or_thunk x ~default:(fun () -> name_error_path path)
;;
let empty_bindings =
{ names = Value_name.Map.empty
; types = Type_name.Map.empty
; modules = Module_name.Map.empty
}
;;
let empty = { current_path = []; toplevel = empty_bindings }
let without_std t =
{ t with
toplevel =
{ t.toplevel with
modules = Map.remove t.toplevel.modules Intrinsics.std_module_name
}
}
;;
type f_bindings = { f : 'a. 'a bindings -> 'a bindings }
let update_current t ~f =
let updating_import_err t imported_module =
compiler_bug
[%message "Updating imported module" (imported_module : Module_path.t) (t : t)]
in
let rec loop_sigs t (sigs : sigs) path ~f =
match path with
| [] -> f.f sigs
| (_, `Def) :: _ -> compiler_bug [%message "`Def inside sig path" (t : t)]
| (module_name, `Sig) :: rest ->
{ sigs with
modules =
Map.update sigs.modules module_name ~f:(function
| Some (Local (None, sigs)) -> Local (None, loop_sigs t sigs rest ~f)
| Some (Imported imported_module) -> updating_import_err t imported_module
| None -> name_error_path (Path.to_module_path t.current_path)
| Some (Local (Some _, _)) -> .)
}
in
let rec loop_defs t defs path ~f =
match path with
| [] -> f.f defs
| (module_name, place) :: rest ->
{ defs with
modules =
Map.update defs.modules module_name ~f:(function
| Some (Local (sigs, defs)) ->
(match place with
| `Sig ->
let sigs = Option.value sigs ~default:empty_bindings in
Local (Some (loop_sigs t sigs rest ~f), defs)
| `Def -> Local (sigs, loop_defs t defs rest ~f))
| Some (Imported imported_module) -> updating_import_err t imported_module
| None -> name_error_path (Path.to_module_path t.current_path))
}
in
{ t with toplevel = loop_defs t t.toplevel t.current_path ~f }
;;
let into_module t ~place module_name =
let f bindings =
{ bindings with
modules =
Map.update
bindings.modules
module_name
~f:(Option.value ~default:(Or_imported.Local (None, empty_bindings)))
}
in
let t = update_current t ~f:{ f } in
{ t with current_path = t.current_path @ [ module_name, place ] }
;;
let into_parent t =
{ t with current_path = List.drop_last t.current_path |> Option.value ~default:[] }
;;
let with_submodule t ~place module_name ~f =
{ (f (into_module t ~place module_name)) with current_path = t.current_path }
;;
let with_submodule' t ~place module_name ~f =
let t', x = f (into_module ~place t module_name) in
{ t' with current_path = t.current_path }, x
;;
let core =
{ current_path = []
; toplevel =
{ empty_bindings with
types =
List.fold
Intrinsics.all
~init:empty_bindings.types
~f:(fun types (module Intrinsic) ->
Map.set types ~key:Intrinsic.name ~data:(Some (Local Intrinsic.decl)))
; names =
List.fold
Intrinsics.Bool.cnstrs
~init:empty_bindings.names
~f:(fun names (cnstr_name, extern_name) ->
Map.set
names
~key:(Value_name.of_cnstr_name cnstr_name)
~data:
(Local
(Name_entry.val_declared
~extern_name
(Type.Concrete.cast Intrinsics.Bool.typ))))
}
}
;;
let merge_no_shadow t1 t2 =
let err to_ustring ~key:name = name_error ~msg:"Name clash" (to_ustring name) in
{ names = Map.merge_skewed t1.names t2.names ~combine:(err Value_name.to_ustring)
; types = Map.merge_skewed t1.types t2.types ~combine:(err Type_name.to_ustring)
; modules = Map.merge_skewed t1.modules t2.modules ~combine:(err Module_name.to_ustring)
}
;;
let resolve_path =
let open Option.Let_syntax in
let rec loop_sigs t path sigs =
match path with
| [] -> Some (Sigs sigs)
| module_name :: rest ->
(match%bind Map.find sigs.modules module_name with
| Local (None, sigs) -> loop_sigs t rest sigs
| Local (Some _, _) -> .
| Imported path -> resolve_path t path ~defs_only:false)
and loop_defs t current_path path defs =
match path with
| [] -> Some (Defs defs)
| module_name :: rest ->
(match%bind Map.find defs.modules module_name with
| Local (sigs, defs) ->
let current_path, go_into =
match current_path with
| Some [] | None -> None, `Sig
| Some ((module_name', place) :: rest') ->
if Module_name.(module_name = module_name')
then Some rest', place
else None, `Sig
in
(match go_into, sigs with
| `Sig, Some sigs -> loop_sigs t rest sigs
| `Sig, None | `Def, _ -> loop_defs t current_path rest defs)
| Imported path -> resolve_path t path ~defs_only:false)
and loop_defs_only t path defs =
match path with
| [] -> Some (Defs defs)
| module_name :: rest ->
(match%bind Map.find defs.modules module_name with
| Local (_, defs) -> loop_defs_only t rest defs
| Imported path -> resolve_path t path ~defs_only:true)
and resolve_path t path ~defs_only =
if defs_only
then loop_defs_only t path t.toplevel
else loop_defs t (Some t.current_path) path t.toplevel
in
resolve_path
;;
let resolve_path_exn t path ~defs_only =
or_name_error_path (resolve_path t path ~defs_only) path
;;
let with_path t path ~f =
let t', x = f { t with current_path = path } in
{ t' with current_path = t.current_path }, x
;;
let find =
let rec loop ?at_path ?(defs_only = false) t ((path, name) as input) ~f ~to_ustring =
let at_path = Option.value at_path ~default:(Path.to_module_path t.current_path) in
let bindings_at_current = resolve_path_exn ~defs_only t at_path in
match List.hd path with
| Some first_module ->
let full_path = at_path @ path in
let f bindings =
if Map.mem bindings.modules first_module
then (
let bindings =
or_name_error_path (resolve_path ~defs_only t full_path) at_path
in
option_or_default (f full_path name bindings) ~f:(fun () ->
name_error ~msg:"Couldn't find name" (to_ustring input)))
else check_parent t at_path input ~f ~to_ustring
in
(match bindings_at_current with
| Sigs sigs -> f sigs
| Defs defs -> f defs)
| None ->
option_or_default (f at_path name bindings_at_current) ~f:(fun () ->
check_parent t at_path input ~f ~to_ustring)
and check_parent t current_path input ~f ~to_ustring =
match List.drop_last current_path with
| Some parent_path -> loop t ~at_path:parent_path input ~f ~to_ustring
| None -> name_error ~msg:"Couldn't find name" (to_ustring input)
in
fun ?at_path ?defs_only t input ~f ~to_ustring ->
loop ?at_path ?defs_only t input ~f ~to_ustring
;;
let rec find_entry' t name =
let open Option.Let_syntax in
find
t
name
~to_ustring:Value_name.Qualified.to_ustring
~f:(fun current_path name bindings ->
let f bindings =
Map.find bindings.names name >>| resolve_name_or_import' t (current_path, name)
in
match bindings with
| Sigs sigs -> f sigs
| Defs defs -> f defs)
and resolve_name_or_import' t name = function
| Or_imported.Local entry -> name, entry
| Imported path_name -> find_entry' t path_name
;;
let rec find_entry t name = snd (find_entry' t name)
and resolve_name_or_import t = function
| Or_imported.Local entry -> entry
| Imported path_name -> find_entry t path_name
;;
let find_type t name = find_entry t name |> Name_entry.typ
let find_cnstr_type t = Value_name.Qualified.of_cnstr_name >> find_type t
let find_fixity t name = Option.value (find_entry t name).fixity ~default:Fixity.default
let find_type_decl' ?at_path ?defs_only t name =
let open Option.Let_syntax in
find
?at_path
t
name
~to_ustring:Type_name.Qualified.to_ustring
?defs_only
~f:(fun path name bindings ->
let f bindings ~check_submodule =
match Map.find bindings.types name with
| Some decl -> Some (path, decl)
| None ->
let module_name = Type_name.to_ustring name |> Module_name.of_ustring_unchecked in
Map.find bindings.modules module_name >>= check_submodule
in
match bindings with
| Sigs sigs ->
f sigs ~check_submodule:(function
| Local (None, sigs) ->
let%bind decl = Map.find sigs.types name in
Some (path, decl)
| Local (Some _, _) -> .
| Imported import_path -> Some (path, Some (Imported (import_path, name))))
| Defs defs ->
f defs ~check_submodule:(function
| Local (None, defs) ->
let%bind decl = Map.find defs.types name in
Some (path, decl)
| Local (Some sigs, _defs) ->
let%bind decl = Map.find sigs.types name in
Some (path, decl)
| Imported import_path -> Some (path, Some (Imported (import_path, name)))))
;;
let absolutify_path t path =
find
t
(path, ())
~f:(fun path () _ -> Some path)
~to_ustring:(fun (path, ()) -> Module_path.to_ustring path)
;;
let absolutify_type_name t ((_, name) as path) = fst (find_type_decl' t path), name
let absolutify_value_name t name = fst (find_entry' t name)
NOTE : Imports at toplevel defs affect both sigs and defs , but in submodules ,
they affect defs only . This behavior is super weird , tbh .
TODO : try to make this less confusing
Also , maybe the order of imports should matter - could just gather them as we go ?
they affect defs only. This behavior is super weird, tbh.
TODO: try to make this less confusing
Also, maybe the order of imports should matter - could just gather them as we go? *)
let import _t _module_name =
let module_bindings = find_module t [ module_name ] in
update_current t ~f:(fun bindings - >
{ bindings with
modules =
~key : module_name ~data : module_bindings
| > or_name_clash " Import of duplicate module " ( Module_name.to_ustring module_name )
} )
update_current t ~f:(fun bindings ->
{ bindings with
modules =
Map.add bindings.modules ~key:module_name ~data:module_bindings
|> or_name_clash "Import of duplicate module" (Module_name.to_ustring module_name)
})*)
failwith "TODO: module imports (properly)"
;;
let import_filtered t path ~f =
let path = absolutify_path t path in
let map_to_imports_filtered path bindings ~f =
{ names =
Map.filter_mapi bindings.names ~f:(fun ~key:name ~data:_ ->
Option.some_if
(f (Value_name.unidentify name))
(Or_imported.Imported (path, name)))
; types =
Map.filter_mapi bindings.types ~f:(fun ~key:type_name ~data:_ ->
Option.some_if
(f (Type_name.unidentify type_name))
(Some (Or_imported.Imported (path, type_name))))
; modules =
Map.filter_mapi bindings.modules ~f:(fun ~key:module_name ~data:_ ->
Option.some_if
(f (Module_name.unidentify module_name))
(Or_imported.Imported (path @ [ module_name ])))
}
in
let bindings_to_import =
match resolve_path_exn t path ~defs_only:false with
| Sigs sigs -> map_to_imports_filtered path ~f sigs
| Defs defs -> map_to_imports_filtered path ~f defs
in
let f bindings = merge_no_shadow bindings bindings_to_import in
update_current t ~f:{ f }
;;
let import_all = import_filtered ~f:(fun _ -> true)
let import_with t path = function
| [] -> import_all t path
| imports -> import_filtered t path ~f:(List.mem imports ~equal:Unidentified_name.equal)
;;
let import_without t path hiding =
import_filtered t path ~f:(not << Nonempty.mem hiding ~equal:Unidentified_name.equal)
;;
let map_type_expr_names type_expr ~f =
Type.Expr.map type_expr ~var:Fn.id ~pf:Fn.id ~f:(function
| Type_app (name, args) -> Defer (Type.Expr.Type_app (f name, args))
| typ -> Defer typ)
;;
let absolutify_type_expr t =
map_type_expr_names ~f:(fun name -> absolutify_type_name t name)
;;
let of_prelude_sexp sexp =
let t = into_parent (t_of_sexp sexp) in
import_all t Intrinsics.prelude_module_path
;;
let prelude = lazy (of_prelude_sexp Umber_std.Prelude.names)
let add_val_or_extern
?extern_name
t
name
fixity
(trait_bounds, type_expr)
~unify
~type_source
=
let f bindings =
if not (List.is_empty trait_bounds) then failwith "TODO: trait bounds in val";
let scheme = absolutify_type_expr t type_expr in
{ bindings with
names =
Map.update bindings.names name ~f:(function
| None ->
compiler_bug [%message "Missing placeholder name entry" (name : Value_name.t)]
| Some (Local existing_entry) ->
unify (Type.Scheme.instantiate scheme) (Name_entry.typ existing_entry);
Local
(Name_entry.merge
existing_entry
{ type_source; typ = Scheme scheme; fixity; extern_name })
| Some (Imported imported_name) ->
name_error
~msg:"Duplicate val for imported item"
Ustring.(
Value_name.to_ustring name
^ of_string_exn " vs "
^ Value_name.Qualified.to_ustring imported_name))
}
in
update_current t ~f:{ f }
;;
let add_val = add_val_or_extern ?extern_name:None ~type_source:Val_declared
let add_extern t name fixity typ extern_name ~unify =
add_val_or_extern t name fixity typ ~extern_name ~unify ~type_source:Extern_declared
;;
let absolutify_type_decl t = Type.Decl.map_exprs ~f:(absolutify_type_expr t)
let add_to_types ?(err_msg = "Type name clash") types name decl =
Map.update types name ~f:(function
| None | Some None -> decl
| Some _ -> name_error ~msg:err_msg (Type_name.to_ustring name))
;;
let add_type_decl ({ current_path; _ } as t) type_name decl =
let f bindings =
if not (Type.Decl.no_free_params decl)
then
Compilation_error.raise
Type_error
~msg:[%message "Free parameters in type declaration" (decl : Type.Decl.t)];
let decl = absolutify_type_decl t decl in
{ bindings with
types =
add_to_types
bindings.types
type_name
(Some (Local decl))
~err_msg:"Duplicate type declarations"
; names =
(match decl with
| params, Variants cnstrs ->
let result_type : Type.Scheme.t =
let path = Path.to_module_path current_path in
let params = List.map params ~f:Type.Expr.var in
Type_app ((path, type_name), params)
in
List.fold cnstrs ~init:bindings.names ~f:(fun names (cnstr_name, args) ->
let entry =
Name_entry.val_declared
(match Nonempty.of_list args with
| Some args -> Function (args, result_type)
| None -> result_type)
in
Map.add names ~key:(Value_name.of_cnstr_name cnstr_name) ~data:(Local entry)
|> or_name_clash
"Variant constructor name clashes with another value"
(Cnstr_name.to_ustring cnstr_name))
| _ -> bindings.names)
}
in
update_current t ~f:{ f }
;;
let set_inferred_scheme t name scheme =
let f bindings =
let inferred_entry =
{ Name_entry.type_source = Let_inferred
; typ = Scheme scheme
; fixity = None
; extern_name = None
}
in
{ bindings with
names =
Map.update bindings.names name ~f:(function
| None -> Local inferred_entry
| Some (Local existing_entry) ->
Local (Name_entry.merge existing_entry inferred_entry)
| Some (Imported _) ->
name_error
~msg:"Name clash between imported and local binding"
(Value_name.to_ustring name))
}
in
update_current t ~f:{ f }
;;
let add_name_placeholder t name =
let f bindings =
{ bindings with
names =
Map.update bindings.names name ~f:(function
| None -> Local Name_entry.placeholder
| Some (Local { Name_entry.type_source = Let_inferred; _ } as entry) -> entry
| _ -> name_error ~msg:"Duplicate name" (Value_name.to_ustring name))
}
in
update_current t ~f:{ f }
;;
let add_type_placeholder t type_name =
let f bindings =
{ bindings with
types = add_to_types bindings.types type_name None ~err_msg:"Duplicate type name"
}
in
update_current t ~f:{ f }
;;
let fold_local_names t ~init ~f =
let fold_local path bindings init =
Map.fold bindings.names ~init ~f:(fun ~key:name ~data acc ->
match data with
| Local entry -> f acc (path, name) entry
| Imported _ -> acc)
in
let rec fold_defs t path (defs : defs) ~init ~f =
Map.fold
defs.modules
~init:(fold_local path defs init)
~f:(fun ~key:module_name ~data acc ->
We can ignore sigs here because should have all the names
match data with
| Local (_, defs) -> fold_defs t (path @ [ module_name ]) defs ~init:acc ~f
| Imported _ -> acc)
in
fold_defs t [] t.toplevel ~init ~f
;;
let merge_names t new_names ~combine =
let new_names = Map.map new_names ~f:Or_imported.local in
let f bindings =
{ bindings with
names =
Map.merge_skewed bindings.names new_names ~combine:(fun ~key entry1 entry2 ->
let entry1, entry2 =
resolve_name_or_import t entry1, resolve_name_or_import t entry2
in
Local (combine key entry1 entry2))
}
in
update_current t ~f:{ f }
;;
let rec find_type_decl ?at_path ?defs_only t type_name =
resolve_decl_or_import
?at_path
?defs_only
t
(snd (find_type_decl' ?at_path ?defs_only t type_name))
and resolve_decl_or_import ?at_path ?defs_only t = function
| Some (Or_imported.Local decl) -> Some decl
| Some (Imported path_name) ->
find_type_decl ?at_path ?defs_only t path_name
| None -> None
;;
let find_type_decl ?at_path ?(defs_only = false) t type_name =
option_or_default (find_type_decl ?at_path ~defs_only t type_name) ~f:(fun () ->
compiler_bug
[%message
"Placeholder decl not replaced"
(type_name : Type_name.Qualified.t)
(without_std t : t)])
;;
let resolve_decl_or_import ?at_path t decl_or_import =
option_or_default (resolve_decl_or_import ?at_path t decl_or_import) ~f:(fun () ->
compiler_bug
[%message
"Placeholder decl not replaced"
(decl_or_import : (Type.Decl.t, Type_name.Qualified.t) Or_imported.t option)
(without_std t : t)])
;;
let find_absolute_type_decl = find_type_decl ~at_path:[]
let find_type_decl = find_type_decl ?at_path:None
let current_path t = t.current_path
let find_sigs_and_defs t path module_name =
let open Option.Let_syntax in
let rec loop t path module_name =
find
t
(path, module_name)
~f:
(fun _ module_name -> function
| Sigs _ ->
compiler_bug
[%message
"Name_bindings.find_sigs_and_defs found only sigs"
(path : Module_path.t)
(module_name : Module_name.t)
(t : t)]
| Defs bindings ->
(match%bind Map.find bindings.modules module_name with
| Imported path ->
let%bind path, module_name = List.split_last path in
Some (loop t path module_name)
| Local sigs_and_defs -> Some sigs_and_defs))
~to_ustring:(fun (path, module_name) ->
Module_path.to_ustring (path @ [ module_name ]))
in
let sigs, defs = loop t path module_name in
Option.map sigs ~f:(fun sigs -> Sigs sigs), Defs defs
;;
module Sigs_or_defs = struct
type name_bindings = t
type t = sigs_or_defs
let names = function
| Sigs { names; _ } | Defs { names; _ } -> names
;;
let value_names = Map.key_set << names
let types = function
| Sigs { types; _ } | Defs { types; _ } -> types
;;
let type_names = Map.key_set << types
let module_names = function
| Sigs sigs -> Map.key_set sigs.modules
| Defs defs -> Map.key_set defs.modules
;;
let make_find ~into_bindings ~resolve t bindings name =
option_or_default
(Map.find (into_bindings bindings) name)
~f:(fun () ->
compiler_bug [%message "Sigs_or_defs.find failed" (bindings : sigs_or_defs)])
|> resolve t
;;
let find_entry = make_find ~into_bindings:names ~resolve:resolve_name_or_import
let find_type_decl = make_find ~into_bindings:types ~resolve:resolve_decl_or_import
let find_module t bindings module_name =
let open Option.Let_syntax in
match bindings with
| Sigs bindings ->
(match%bind Map.find bindings.modules module_name with
| Imported path -> resolve_path t path ~defs_only:false
| Local (None, sigs) -> Some (Sigs sigs)
| Local (Some _, _) -> .)
| Defs bindings ->
(match%bind Map.find bindings.modules module_name with
| Imported path -> resolve_path t path ~defs_only:false
| Local (Some sigs, _) -> Some (Sigs sigs)
| Local (None, defs) -> Some (Defs defs))
;;
end
|
72586df3bcb3e0e044422d1b9237f857f30c35e422196b47b045a426542387c9 | Holmusk/three-layer | Session.hs | module Lib.Core.Session
( Sessions
, Session (..)
, SessionExpiry (..)
, sessionExpired
, mkNewSession
) where
import Data.Time.Clock (NominalDiffTime, UTCTime, addUTCTime, getCurrentTime)
import Lib.Core.Id (AnyId)
type Sessions = MVar (HashMap AnyId Session)
newtype Session = Session
{ sLoginTime :: UTCTime
} deriving stock (Eq, Show)
newtype SessionExpiry = SessionExpiry
{ unSessionExpiry :: NominalDiffTime
} deriving newtype (Num)
-- | Checks whether session expired within given interval relative to current time
sessionExpired :: SessionExpiry -> UTCTime -> Session -> Bool
sessionExpired (SessionExpiry expiry) currentTime session =
let sessionEnd = addUTCTime expiry $ sLoginTime session
in sessionEnd <= currentTime
-- | Created a new 'Session'.
mkNewSession :: MonadIO m => m Session
mkNewSession = liftIO $ Session <$> getCurrentTime
| null | https://raw.githubusercontent.com/Holmusk/three-layer/1b58ec102f206681b66a584ca4a7d18f2eb4ef81/src/Lib/Core/Session.hs | haskell | | Checks whether session expired within given interval relative to current time
| Created a new 'Session'. | module Lib.Core.Session
( Sessions
, Session (..)
, SessionExpiry (..)
, sessionExpired
, mkNewSession
) where
import Data.Time.Clock (NominalDiffTime, UTCTime, addUTCTime, getCurrentTime)
import Lib.Core.Id (AnyId)
type Sessions = MVar (HashMap AnyId Session)
newtype Session = Session
{ sLoginTime :: UTCTime
} deriving stock (Eq, Show)
newtype SessionExpiry = SessionExpiry
{ unSessionExpiry :: NominalDiffTime
} deriving newtype (Num)
sessionExpired :: SessionExpiry -> UTCTime -> Session -> Bool
sessionExpired (SessionExpiry expiry) currentTime session =
let sessionEnd = addUTCTime expiry $ sLoginTime session
in sessionEnd <= currentTime
mkNewSession :: MonadIO m => m Session
mkNewSession = liftIO $ Session <$> getCurrentTime
|
5748e025389fe5601926e0ced16f14c358f01f70757a6a00de25ff79e68f214a | duckyuck/flare | main.cljs | (ns flare.main)
| null | https://raw.githubusercontent.com/duckyuck/flare/4d983fda75fab718b5ec0bf4e3f8ab4bfa1a2080/src/flare/main.cljs | clojure | (ns flare.main)
|
|
7d9e2f6a363f536e1d828151d508375cf3cb79628304c40367a3c51affa41a8d | xvw/preface | indexed_comonad.ml | open QCheck2
module Suite
(R : Model.COVARIANT_2)
(F : Preface_specs.INDEXED_COMONAD
with type ('a, 'index) t = ('a, 'index) R.t)
(A : Model.T0)
(B : Model.T0)
(C : Model.T0)
(D : Model.T0)
(Index : Model.T0) =
struct
module Functor = Indexed_functor.Suite (R) (F) (A) (B) (C) (Index)
module Laws = Preface_laws.Indexed_comonad.For (F)
let print pp = Format.asprintf "%a" (R.pp pp Index.pp)
let comonad_1 count =
let generator = R.generator A.generator Index.generator in
let print = print A.pp in
Util.test ~count ~print generator Laws.comonad_1 (fun lhs rhs x ->
let left = lhs x
and right = rhs x in
R.equal A.equal Index.equal left right )
;;
let comonad_2 count =
let generator =
Gen.tup2
(fun1 (R.observable A.observable Index.observable) B.generator)
(R.generator A.generator Index.generator)
in
let print (_, x) = print A.pp x in
Util.test ~count ~print generator Laws.comonad_2 (fun lhs rhs (ff, x) ->
let f = Fn.apply ff in
let left = lhs f x
and right = rhs f x in
B.equal left right )
;;
let comonad_3 count =
let generator =
Gen.tup3
(fun1 (R.observable A.observable Index.observable) B.generator)
(fun1 (R.observable C.observable Index.observable) A.generator)
(R.generator C.generator Index.generator)
in
let print (_, _, x) = print C.pp x in
Util.test ~count ~print generator Laws.comonad_3 (fun lhs rhs (ff, gg, x) ->
let f = Fn.apply ff
and g = Fn.apply gg in
let left = lhs f g x
and right = rhs f g x in
R.equal B.equal Index.equal left right )
;;
let comonad_4 count =
let generator =
Gen.tup2
(fun1 (R.observable A.observable Index.observable) B.generator)
(R.generator A.generator Index.generator)
in
let print (_, x) = print A.pp x in
Util.test ~count ~print generator Laws.comonad_4 (fun lhs rhs (ff, x) ->
let f = Fn.apply ff in
let left = lhs f x
and right = rhs f x in
B.equal left right )
;;
let comonad_5 count =
let generator =
Gen.tup2
(fun1 (R.observable A.observable Index.observable) B.generator)
(R.generator A.generator Index.generator)
in
let print (_, x) = print A.pp x in
Util.test ~count ~print generator Laws.comonad_5 (fun lhs rhs (ff, x) ->
let f = Fn.apply ff in
let left = lhs f x
and right = rhs f x in
B.equal left right )
;;
let comonad_6 count =
let generator =
Gen.tup4
(fun1 (R.observable A.observable Index.observable) B.generator)
(fun1 (R.observable B.observable Index.observable) C.generator)
(fun1 (R.observable C.observable Index.observable) D.generator)
(R.generator A.generator Index.generator)
in
let print (_, _, _, x) = print A.pp x in
Util.test ~count ~print generator Laws.comonad_6
(fun lhs rhs (ff, gg, hh, x) ->
let f = Fn.apply ff
and g = Fn.apply gg
and h = Fn.apply hh in
let left = lhs f g h x
and right = rhs f g h x in
D.equal left right )
;;
let comonad_7 count =
let generator = R.generator A.generator Index.generator in
let print = print A.pp in
Util.test ~count ~print generator Laws.comonad_7 (fun lhs rhs x ->
let left = lhs x
and right = rhs x in
R.equal A.equal Index.equal left right )
;;
let comonad_8 count =
let generator = R.generator A.generator Index.generator in
let print = print A.pp in
Util.test ~count ~print generator Laws.comonad_8 (fun lhs rhs x ->
let left = lhs x
and right = rhs x in
R.equal A.equal Index.equal left right )
;;
let comonad_9 count =
let generator = R.generator A.generator Index.generator in
let print = print A.pp in
Util.test ~count ~print generator Laws.comonad_9 (fun lhs rhs x ->
let left = lhs x
and right = rhs x in
R.equal
(R.equal (R.equal A.equal Index.equal) Index.equal)
Index.equal left right )
;;
let comonad_10 count =
let generator =
Gen.tup2
(fun1 (R.observable A.observable Index.observable) B.generator)
(R.generator A.generator Index.generator)
in
let print (_, x) = print A.pp x in
Util.test ~count ~print generator Laws.comonad_10 (fun lhs rhs (ff, x) ->
let f = Fn.apply ff in
let left = lhs f x
and right = rhs f x in
R.equal B.equal Index.equal left right )
;;
let comonad_11 count =
let generator = R.generator A.generator Index.generator in
let print = print A.pp in
Util.test ~count ~print generator Laws.comonad_11 (fun lhs rhs x ->
let left = lhs x
and right = rhs x in
R.equal (R.equal A.equal Index.equal) Index.equal left right )
;;
let comonad_12 count =
let generator =
Gen.tup2
(fun1 A.observable B.generator)
(R.generator A.generator Index.generator)
in
let print (_, x) = print A.pp x in
Util.test ~count ~print generator Laws.comonad_12 (fun lhs rhs (ff, x) ->
let f = Fn.apply ff in
let left = lhs f x
and right = rhs f x in
R.equal B.equal Index.equal left right )
;;
let tests ~count =
Functor.tests ~count
@ [
comonad_1 count
; comonad_2 count
; comonad_3 count
; comonad_4 count
; comonad_5 count
; comonad_6 count
; comonad_7 count
; comonad_8 count
; comonad_9 count
; comonad_10 count
; comonad_11 count
; comonad_12 count
]
;;
end
| null | https://raw.githubusercontent.com/xvw/preface/51892a7ce2ddfef69de963265da3617968cdb7ad/lib/preface_qcheck/indexed_comonad.ml | ocaml | open QCheck2
module Suite
(R : Model.COVARIANT_2)
(F : Preface_specs.INDEXED_COMONAD
with type ('a, 'index) t = ('a, 'index) R.t)
(A : Model.T0)
(B : Model.T0)
(C : Model.T0)
(D : Model.T0)
(Index : Model.T0) =
struct
module Functor = Indexed_functor.Suite (R) (F) (A) (B) (C) (Index)
module Laws = Preface_laws.Indexed_comonad.For (F)
let print pp = Format.asprintf "%a" (R.pp pp Index.pp)
let comonad_1 count =
let generator = R.generator A.generator Index.generator in
let print = print A.pp in
Util.test ~count ~print generator Laws.comonad_1 (fun lhs rhs x ->
let left = lhs x
and right = rhs x in
R.equal A.equal Index.equal left right )
;;
let comonad_2 count =
let generator =
Gen.tup2
(fun1 (R.observable A.observable Index.observable) B.generator)
(R.generator A.generator Index.generator)
in
let print (_, x) = print A.pp x in
Util.test ~count ~print generator Laws.comonad_2 (fun lhs rhs (ff, x) ->
let f = Fn.apply ff in
let left = lhs f x
and right = rhs f x in
B.equal left right )
;;
let comonad_3 count =
let generator =
Gen.tup3
(fun1 (R.observable A.observable Index.observable) B.generator)
(fun1 (R.observable C.observable Index.observable) A.generator)
(R.generator C.generator Index.generator)
in
let print (_, _, x) = print C.pp x in
Util.test ~count ~print generator Laws.comonad_3 (fun lhs rhs (ff, gg, x) ->
let f = Fn.apply ff
and g = Fn.apply gg in
let left = lhs f g x
and right = rhs f g x in
R.equal B.equal Index.equal left right )
;;
let comonad_4 count =
let generator =
Gen.tup2
(fun1 (R.observable A.observable Index.observable) B.generator)
(R.generator A.generator Index.generator)
in
let print (_, x) = print A.pp x in
Util.test ~count ~print generator Laws.comonad_4 (fun lhs rhs (ff, x) ->
let f = Fn.apply ff in
let left = lhs f x
and right = rhs f x in
B.equal left right )
;;
let comonad_5 count =
let generator =
Gen.tup2
(fun1 (R.observable A.observable Index.observable) B.generator)
(R.generator A.generator Index.generator)
in
let print (_, x) = print A.pp x in
Util.test ~count ~print generator Laws.comonad_5 (fun lhs rhs (ff, x) ->
let f = Fn.apply ff in
let left = lhs f x
and right = rhs f x in
B.equal left right )
;;
let comonad_6 count =
let generator =
Gen.tup4
(fun1 (R.observable A.observable Index.observable) B.generator)
(fun1 (R.observable B.observable Index.observable) C.generator)
(fun1 (R.observable C.observable Index.observable) D.generator)
(R.generator A.generator Index.generator)
in
let print (_, _, _, x) = print A.pp x in
Util.test ~count ~print generator Laws.comonad_6
(fun lhs rhs (ff, gg, hh, x) ->
let f = Fn.apply ff
and g = Fn.apply gg
and h = Fn.apply hh in
let left = lhs f g h x
and right = rhs f g h x in
D.equal left right )
;;
let comonad_7 count =
let generator = R.generator A.generator Index.generator in
let print = print A.pp in
Util.test ~count ~print generator Laws.comonad_7 (fun lhs rhs x ->
let left = lhs x
and right = rhs x in
R.equal A.equal Index.equal left right )
;;
let comonad_8 count =
let generator = R.generator A.generator Index.generator in
let print = print A.pp in
Util.test ~count ~print generator Laws.comonad_8 (fun lhs rhs x ->
let left = lhs x
and right = rhs x in
R.equal A.equal Index.equal left right )
;;
let comonad_9 count =
let generator = R.generator A.generator Index.generator in
let print = print A.pp in
Util.test ~count ~print generator Laws.comonad_9 (fun lhs rhs x ->
let left = lhs x
and right = rhs x in
R.equal
(R.equal (R.equal A.equal Index.equal) Index.equal)
Index.equal left right )
;;
let comonad_10 count =
let generator =
Gen.tup2
(fun1 (R.observable A.observable Index.observable) B.generator)
(R.generator A.generator Index.generator)
in
let print (_, x) = print A.pp x in
Util.test ~count ~print generator Laws.comonad_10 (fun lhs rhs (ff, x) ->
let f = Fn.apply ff in
let left = lhs f x
and right = rhs f x in
R.equal B.equal Index.equal left right )
;;
let comonad_11 count =
let generator = R.generator A.generator Index.generator in
let print = print A.pp in
Util.test ~count ~print generator Laws.comonad_11 (fun lhs rhs x ->
let left = lhs x
and right = rhs x in
R.equal (R.equal A.equal Index.equal) Index.equal left right )
;;
let comonad_12 count =
let generator =
Gen.tup2
(fun1 A.observable B.generator)
(R.generator A.generator Index.generator)
in
let print (_, x) = print A.pp x in
Util.test ~count ~print generator Laws.comonad_12 (fun lhs rhs (ff, x) ->
let f = Fn.apply ff in
let left = lhs f x
and right = rhs f x in
R.equal B.equal Index.equal left right )
;;
let tests ~count =
Functor.tests ~count
@ [
comonad_1 count
; comonad_2 count
; comonad_3 count
; comonad_4 count
; comonad_5 count
; comonad_6 count
; comonad_7 count
; comonad_8 count
; comonad_9 count
; comonad_10 count
; comonad_11 count
; comonad_12 count
]
;;
end
|
|
5b37b34e1a1d9b6c29b495d251ede4b2c752467ca8b01a47cd36b77cbc76eff1 | NickSeagull/drahko | Syntax.hs | module Drahko.Syntax where
import Relude
newtype Name = Name {unName :: Text}
deriving (Eq, Show, IsString, Generic, Ord, Hashable)
data BinaryOperator
= Add
| Subtract
| Multiply
| Divide
| Equal
| LessThan
| LessThanEqual
| GreaterThan
| GreaterThanEqual
| Concat
deriving (Eq, Show)
data Literal
= Integer Integer
| Floating Double
| String Text
| List [Expression]
deriving (Eq, Show)
data Expression
= BinaryOperatorApply BinaryOperator Expression Expression
| Literal Literal
| Variable Name
| Apply Expression [Expression]
| Projection Expression Expression
| DotAccess Expression Expression
deriving (Eq, Show)
type Block = [Statement]
data ConditionalCase = ConditionalCase Expression Block
deriving (Eq, Show)
data ConditionalStatement
= ConditionalStatement ConditionalCase [ConditionalCase] (Maybe Block)
deriving (Eq, Show)
data Statement
= Return Expression
| While Expression Block
| Break
| Continue
| Function Name [Name] Block
| Call Expression [Expression]
| Condition ConditionalStatement
| Assignment Expression Expression
| NoOp
| RawExpression Expression
| Command Name [Expression]
| Class Name (Maybe Name) [Statement]
deriving (Eq, Show)
newtype Program = Program [Statement]
deriving (Eq, Show)
| null | https://raw.githubusercontent.com/NickSeagull/drahko/cd84ee4b25f0d51900c248dfdc225370bfe19728/codegen/src/Drahko/Syntax.hs | haskell | module Drahko.Syntax where
import Relude
newtype Name = Name {unName :: Text}
deriving (Eq, Show, IsString, Generic, Ord, Hashable)
data BinaryOperator
= Add
| Subtract
| Multiply
| Divide
| Equal
| LessThan
| LessThanEqual
| GreaterThan
| GreaterThanEqual
| Concat
deriving (Eq, Show)
data Literal
= Integer Integer
| Floating Double
| String Text
| List [Expression]
deriving (Eq, Show)
data Expression
= BinaryOperatorApply BinaryOperator Expression Expression
| Literal Literal
| Variable Name
| Apply Expression [Expression]
| Projection Expression Expression
| DotAccess Expression Expression
deriving (Eq, Show)
type Block = [Statement]
data ConditionalCase = ConditionalCase Expression Block
deriving (Eq, Show)
data ConditionalStatement
= ConditionalStatement ConditionalCase [ConditionalCase] (Maybe Block)
deriving (Eq, Show)
data Statement
= Return Expression
| While Expression Block
| Break
| Continue
| Function Name [Name] Block
| Call Expression [Expression]
| Condition ConditionalStatement
| Assignment Expression Expression
| NoOp
| RawExpression Expression
| Command Name [Expression]
| Class Name (Maybe Name) [Statement]
deriving (Eq, Show)
newtype Program = Program [Statement]
deriving (Eq, Show)
|
|
2e58cb208ff6622437f650cdb7db9f67b4bbccb44aea02d0cf1f231d9b25fbbe | tek/ribosome | Codes.hs | module Ribosome.Menu.Prompt.Data.Codes where
import Control.Exception (evaluate)
import Data.Map.Strict ((!?))
import qualified Data.Map.Strict as Map (fromList)
import qualified Data.Text as Text (singleton)
specialCodes :: Map Text Text
specialCodes =
Map.fromList [
("\x80\xffX", "<c-@>"),
("\65533kb", "<bs>"),
("\x80kB", "<s-tab>"),
("\x0", "<c-k>"),
("\x80kD", "<del>"),
("\x9B", "<csi>"),
("\x80\xfdP", "<xcsi>"),
("\x80ku", "<up>"),
("\x80kd", "<down>"),
("\x80kl", "<left>"),
("\x80kr", "<right>"),
( " \x80\xfd " , " s - up " ) ,
-- ("\x80\xfd", "s-down"),
("\x80#4", "<s-left>"),
("\x80%i", "<s-right>"),
("\x80\xfdT", "<c-left>"),
("\x80\xfdU", "<c-right>"),
("\x80k1", "<f1>"),
("\x80k2", "<f2>"),
("\x80k3", "<f3>"),
("\x80k4", "<f4>"),
("\x80k5", "<f5>"),
("\x80k6", "<f6>"),
("\x80k7", "<f7>"),
("\x80k8", "<f8>"),
("\x80k9", "<f9>"),
("\x80k;", "<f10>"),
("\x80F1", "<f11>"),
("\x80F2", "<f12>"),
("\x80\xfd\x06", "<s-f1>"),
("\x80\xfd\x07", "<s-f2>"),
("\x80\xfd\x08", "<s-f3>"),
("\x80\xfd\x09", "<s-f4>"),
("\x80\xfd\x0A", "<s-f5>"),
("\x80\xfd\x0B", "<s-f6>"),
("\x80\xfd\x0C", "<s-f7>"),
("\x80\xfd\x0D", "<s-f8>"),
("\x80\xfd\x0E", "<s-f9>"),
("\x80\xfd\x0F", "<s-f10>"),
("\x80\xfd\x10", "<s-f11>"),
("\x80\xfd\x11", "<s-f12>"),
("\x80%1", "<help>"),
("\x80&8", "<undo>"),
("\x80kI", "<insert>"),
("\x80kh", "<home>"),
("\x80@7", "<end>"),
("\x80kP", "<pageup>"),
("\x80kN", "<pagedown>"),
("\x80K1", "<khome>"),
("\x80K4", "<kend>"),
("\x80K3", "<kpageup>"),
("\x80K5", "<kpagedown>"),
("\x80K6", "<kplus>"),
("\x80K7", "<kminus>"),
("\x80K9", "<kmultiply>"),
("\x80K8", "<kdivide>"),
("\x80KA", "<kenter>"),
("\x80KB", "<kpoint>"),
("\x80KC", "<k0>"),
("\x80KD", "<k1>"),
("\x80KE", "<k2>"),
("\x80KF", "<k3>"),
("\x80KG", "<k4>"),
("\x80KH", "<k5>"),
("\x80KI", "<k6>"),
("\x80KJ", "<k7>"),
("\x80KK", "<k8>"),
("\x80KL", "<k9>")
]
specialNumCodes :: Map Int Text
specialNumCodes =
Map.fromList [
(9, "<tab>"),
(10, "<c-j>"),
(11, "<c-k>"),
(12, "<fe>"),
(13, "<cr>"),
(14, "<c-n>"),
(25, "<c-y>"),
(27, "<esc>"),
(32, "<space>"),
(60, "<lt>"),
(92, "<bslash>"),
(124, "<bar>")
]
modifierCodes :: [(Int, Text)]
modifierCodes =
[
(2, "shift"),
(4, "control"),
(8, "alt"),
(16, "meta"),
(32, "mouse_double"),
(64, "mouse_triple"),
(96, "mouse_quadruple"),
(128, "command")
]
decodeInputChar :: Text -> Maybe Text
decodeInputChar =
(specialCodes !?)
decodeInputNum ::
Member (Embed IO) r =>
Int ->
Sem r (Maybe Text)
decodeInputNum a =
maybe codepoint (pure . Just) (specialNumCodes !? a)
where
codepoint =
fmap Text.singleton . rightToMaybe <$> tryAny (evaluate (chr a))
| null | https://raw.githubusercontent.com/tek/ribosome/a676b4f0085916777bfdacdcc761f82d933edb80/packages/menu/lib/Ribosome/Menu/Prompt/Data/Codes.hs | haskell | ("\x80\xfd", "s-down"), | module Ribosome.Menu.Prompt.Data.Codes where
import Control.Exception (evaluate)
import Data.Map.Strict ((!?))
import qualified Data.Map.Strict as Map (fromList)
import qualified Data.Text as Text (singleton)
specialCodes :: Map Text Text
specialCodes =
Map.fromList [
("\x80\xffX", "<c-@>"),
("\65533kb", "<bs>"),
("\x80kB", "<s-tab>"),
("\x0", "<c-k>"),
("\x80kD", "<del>"),
("\x9B", "<csi>"),
("\x80\xfdP", "<xcsi>"),
("\x80ku", "<up>"),
("\x80kd", "<down>"),
("\x80kl", "<left>"),
("\x80kr", "<right>"),
( " \x80\xfd " , " s - up " ) ,
("\x80#4", "<s-left>"),
("\x80%i", "<s-right>"),
("\x80\xfdT", "<c-left>"),
("\x80\xfdU", "<c-right>"),
("\x80k1", "<f1>"),
("\x80k2", "<f2>"),
("\x80k3", "<f3>"),
("\x80k4", "<f4>"),
("\x80k5", "<f5>"),
("\x80k6", "<f6>"),
("\x80k7", "<f7>"),
("\x80k8", "<f8>"),
("\x80k9", "<f9>"),
("\x80k;", "<f10>"),
("\x80F1", "<f11>"),
("\x80F2", "<f12>"),
("\x80\xfd\x06", "<s-f1>"),
("\x80\xfd\x07", "<s-f2>"),
("\x80\xfd\x08", "<s-f3>"),
("\x80\xfd\x09", "<s-f4>"),
("\x80\xfd\x0A", "<s-f5>"),
("\x80\xfd\x0B", "<s-f6>"),
("\x80\xfd\x0C", "<s-f7>"),
("\x80\xfd\x0D", "<s-f8>"),
("\x80\xfd\x0E", "<s-f9>"),
("\x80\xfd\x0F", "<s-f10>"),
("\x80\xfd\x10", "<s-f11>"),
("\x80\xfd\x11", "<s-f12>"),
("\x80%1", "<help>"),
("\x80&8", "<undo>"),
("\x80kI", "<insert>"),
("\x80kh", "<home>"),
("\x80@7", "<end>"),
("\x80kP", "<pageup>"),
("\x80kN", "<pagedown>"),
("\x80K1", "<khome>"),
("\x80K4", "<kend>"),
("\x80K3", "<kpageup>"),
("\x80K5", "<kpagedown>"),
("\x80K6", "<kplus>"),
("\x80K7", "<kminus>"),
("\x80K9", "<kmultiply>"),
("\x80K8", "<kdivide>"),
("\x80KA", "<kenter>"),
("\x80KB", "<kpoint>"),
("\x80KC", "<k0>"),
("\x80KD", "<k1>"),
("\x80KE", "<k2>"),
("\x80KF", "<k3>"),
("\x80KG", "<k4>"),
("\x80KH", "<k5>"),
("\x80KI", "<k6>"),
("\x80KJ", "<k7>"),
("\x80KK", "<k8>"),
("\x80KL", "<k9>")
]
specialNumCodes :: Map Int Text
specialNumCodes =
Map.fromList [
(9, "<tab>"),
(10, "<c-j>"),
(11, "<c-k>"),
(12, "<fe>"),
(13, "<cr>"),
(14, "<c-n>"),
(25, "<c-y>"),
(27, "<esc>"),
(32, "<space>"),
(60, "<lt>"),
(92, "<bslash>"),
(124, "<bar>")
]
modifierCodes :: [(Int, Text)]
modifierCodes =
[
(2, "shift"),
(4, "control"),
(8, "alt"),
(16, "meta"),
(32, "mouse_double"),
(64, "mouse_triple"),
(96, "mouse_quadruple"),
(128, "command")
]
decodeInputChar :: Text -> Maybe Text
decodeInputChar =
(specialCodes !?)
decodeInputNum ::
Member (Embed IO) r =>
Int ->
Sem r (Maybe Text)
decodeInputNum a =
maybe codepoint (pure . Just) (specialNumCodes !? a)
where
codepoint =
fmap Text.singleton . rightToMaybe <$> tryAny (evaluate (chr a))
|
9fa63d32ddeb5fd44a322faa66c736a934e8521d7f9581dc237188979e4a872f | hasktorch/hasktorch | Tensor.hs | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
# LANGUAGE DefaultSignatures #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE UndecidableInstances #
module Torch.Tensor where
import Control.Exception.Safe (throwIO)
import Control.Monad (forM, forM_)
import Numeric.Half
import Data.Complex
import Data.Int (Int16, Int64)
import Data.List (intercalate)
import Data.Proxy
import Data.Reflection
import qualified Data.Vector as V
import Data.Word (Word8)
import Foreign.C.Types
import Foreign.ForeignPtr
import Foreign.Ptr
import Foreign.Storable
import GHC.Generics
import Numeric
import System.IO.Unsafe
import Torch.DType
import Torch.Device
import Torch.Internal.Cast
import Torch.Internal.Class (Castable (..), CppTuple2 (..), CppTuple3 (..), CppTuple4 (..))
import qualified Torch.Internal.Const as ATen
import qualified Torch.Internal.Managed.Cast as ATen
import qualified Torch.Internal.Managed.Native as ATen
import qualified Torch.Internal.Managed.TensorFactories as LibTorch
import qualified Torch.Internal.Managed.Type.Context as ATen
import qualified Torch.Internal.Managed.Type.StdArray as ATen
import qualified Torch.Internal.Managed.Type.StdString as ATen
import qualified Torch.Internal.Managed.Type.Tensor as ATen
import qualified Torch.Internal.Managed.Type.TensorIndex as ATen
import qualified Torch.Internal.Managed.Type.TensorOptions as ATen
import qualified Torch.Internal.Managed.Type.Extra as ATen
import qualified Torch.Internal.Type as ATen
import qualified Torch.Internal.Unmanaged.Type.Tensor as Unmanaged (tensor_data_ptr)
import Torch.Lens
import Torch.TensorOptions
type ATenTensor = ForeignPtr ATen.Tensor
-- do not use the constructor
newtype Tensor = Unsafe ATenTensor
instance Castable Tensor ATenTensor where
cast (Unsafe aten_tensor) f = f aten_tensor
uncast aten_tensor f = f $ Unsafe aten_tensor
newtype MutableTensor = MutableTensor Tensor deriving Show
newMutableTensor :: Tensor -> IO MutableTensor
newMutableTensor tensor = MutableTensor <$> cast1 ATen.detach_t tensor
toImmutable :: MutableTensor -> IO Tensor
toImmutable (MutableTensor tensor) = cast1 ATen.detach_t tensor
--------------------------------------------------------------------------------
-- Basic tensor properties
--------------------------------------------------------------------------------
-- | Returns the total number of elements in the input tensor.
numel ::
-- | input
Tensor ->
-- | number of elements in tensor
Int
numel t = unsafePerformIO $ cast1 ATen.tensor_numel $ t
-- | Returns the size of a given dimension of the input tensor.
size ::
-- | dimension
Int ->
-- | input
Tensor ->
Int
size dim t = unsafePerformIO $ (cast2 ATen.tensor_size_l) t dim
-- | Returns the shape of the tensor
shape ::
-- | input
Tensor ->
-- | list of integers representing the shape of the tensor
[Int]
shape t = unsafePerformIO $ (cast1 ATen.tensor_sizes) t
-- | Returns the dimensions of the input tensor
dim ::
-- | input
Tensor ->
-- | output
Int
dim t = unsafePerformIO $ (cast1 ATen.tensor_dim) t
-- | Returns the dimensions of the input tensor
dimUnsafe ::
-- | input
Tensor ->
-- | output
Int
dimUnsafe t = unsafePerformIO $ (cast1 ATen.tensor_dim_unsafe) t
-- | Returns the dimensions of the input tensor
dimCUnsafe ::
-- | input
Tensor ->
-- | output
Int
dimCUnsafe t = unsafePerformIO $ (cast1 ATen.tensor_dim_c_unsafe) t
-- | Returns the device on which the tensor is currently allocated
device ::
-- | input
Tensor ->
-- | object representing the device
Device
device t = unsafePerformIO $ do
hasCUDA <- cast0 ATen.hasCUDA :: IO Bool
if hasCUDA
then do
isCUDA <- cast1 ATen.tensor_is_cuda t :: IO Bool
if isCUDA then cuda <$> cast1 ATen.tensor_get_device t else pure cpu
else pure cpu
where
cpu = Device {deviceType = CPU, deviceIndex = 0}
cuda :: Int -> Device
cuda di = Device {deviceType = CUDA, deviceIndex = fromIntegral di}
-- | Returns the data type of the input tensor
dtype ::
-- | input
Tensor ->
-- | data type of the input tensor
DType
dtype t = unsafePerformIO $ cast1 ATen.tensor_scalar_type t
toComplex :: Tensor -> Complex Double
toComplex t = unsafePerformIO $
case dtype t of
ComplexHalf -> do
r :+ i <- withTensor t $ \ptr -> peekElemOff (castPtr ptr) 0 :: IO (Complex Half)
return (realToFrac r :+ realToFrac i)
ComplexFloat -> do
r :+ i <- withTensor t $ \ptr -> peekElemOff (castPtr ptr) 0 :: IO (Complex Float)
return (realToFrac r :+ realToFrac i)
ComplexDouble -> withTensor t $ \ptr -> peekElemOff (castPtr ptr) 0 :: IO (Complex Double)
_ -> (:+ 0) <$> cast1 ATen.tensor_item_double t
toDouble :: Tensor -> Double
toDouble t = unsafePerformIO $ cast1 ATen.tensor_item_double t
toInt :: Tensor -> Int
toInt t = unsafePerformIO $ cast1 ATen.tensor_item_int64_t t
-- | Casts the input tensor to the given data type
_toType ::
-- | data type to cast input to
DType ->
-- | input
Tensor ->
-- | output
Tensor
_toType dtype t = unsafePerformIO $ cast2 ATen.tensor_toType_s t dtype
instance HasTypes Tensor Tensor where
types_ = id
instance HasTypes (a -> a) Tensor where
types_ _ = pure
instance HasTypes Int Tensor where
types_ _ = pure
instance HasTypes Double Tensor where
types_ _ = pure
instance HasTypes Float Tensor where
types_ _ = pure
instance HasTypes Bool Tensor where
types_ _ = pure
instance HasTypes Int Int where
types_ = id
instance HasTypes Float Float where
types_ = id
instance HasTypes Double Double where
types_ = id
instance HasTypes Bool Bool where
types_ = id
toType :: forall a. HasTypes a Tensor => DType -> a -> a
toType dtype t = over (types @Tensor @a) (_toType dtype) t
toDevice :: forall a. HasTypes a Tensor => Device -> a -> a
toDevice device' t = over (types @Tensor @a) (_toDevice device') t
-- | Casts the input tensor to given device
_toDevice ::
-- | device to cast input to
Device ->
-- | input
Tensor ->
-- | output
Tensor
_toDevice device' t = unsafePerformIO $ do
hasCUDA <- cast0 ATen.hasCUDA :: IO Bool
let device = Torch.Tensor.device t
t' <-
toDevice'
(deviceType device)
(deviceType device')
(deviceIndex device)
(deviceIndex device')
hasCUDA
check
(deviceType device')
(deviceType $ Torch.Tensor.device t')
(deviceIndex device')
(deviceIndex $ Torch.Tensor.device t')
pure t'
where
toDevice' dt dt' di di' _ | dt == dt' && di == di' = pure t -- do nothing
copy from di to di '
toDevice' CPU CUDA 0 di' True | di' >= 0 = getOpts t >>= withDeviceIndex di' >>= to t -- copy from cpu:0 to cuda:di'
toDevice' CUDA CPU di 0 True | di >= 0 = getOpts t >>= withDeviceType CPU >>= to t -- copy from cuda:di to cpu:0
toDevice' dt dt' di di' _ =
error $
"cannot move tensor from \""
<> show dt
<> ":"
<> show di
<> "\" to \""
<> show dt'
<> ":"
<> show di'
<> "\""
getOpts :: Tensor -> IO TensorOptions
getOpts = cast1 ATen.tensor_options
withDeviceType :: DeviceType -> TensorOptions -> IO TensorOptions
withDeviceType dt opts = cast2 ATen.tensorOptions_device_D opts dt
withDeviceIndex :: Int16 -> TensorOptions -> IO TensorOptions
withDeviceIndex di opts = cast2 ATen.tensorOptions_device_index_s opts di -- careful, setting the device index implies setting the device type to CUDA!
to :: Tensor -> TensorOptions -> IO Tensor
to t opts = cast4 ATen.tensor_to_obb t opts nonBlocking copy
where
nonBlocking = False
copy = False
check dt dt' di di' | dt == dt' && di == di' = pure ()
check dt dt' di di' =
error $
"moving of tensor failed: device should have been \""
<> show dt
<> ":"
<> show di
<> "\" but is \""
<> show dt'
<> ":"
<> show di'
<> "\""
toDeviceWithTensor :: Tensor -> Tensor -> Tensor
toDeviceWithTensor reference input = unsafePerformIO $ cast2 ATen.tensor_to_device reference input
-- | Slices the input tensor along the selected dimension at the given index.
select ::
-- | dimension to slice along
Int ->
-- | index in the given dimension
Int ->
-- | input
Tensor ->
-- | output
Tensor
select dim idx t = unsafePerformIO $ cast3 ATen.tensor_select_ll t dim idx
| Returns a new tensor which indexes the input tensor along dimension dim using the entries in index which is a LongTensor .
indexSelect ::
-- | dim
Int ->
-- | indexTensor
Tensor ->
-- | input
Tensor ->
-- | output
Tensor
indexSelect dim indexTensor t = unsafePerformIO $ (cast3 ATen.index_select_tlt) t dim indexTensor
indexSelect' ::
-- | dim
Int ->
-- | indexList
[Int] ->
-- | input
Tensor ->
-- | output
Tensor
indexSelect' dim indexList t = unsafePerformIO $ (cast3 ATen.index_select_tlt) t dim (asTensor' indexList t)
-- | Slices the input tensor along the selected dimension at the given range.
sliceDim ::
-- | dim
Int ->
-- | start
Int ->
| end
Int ->
-- | step
Int ->
-- | input
Tensor ->
Tensor
sliceDim _dim _start _end _step _self = unsafePerformIO $ (cast5 ATen.slice_tllll) _self _dim _start _end _step
isContiguous ::
Tensor ->
Bool
isContiguous t = unsafePerformIO $ (cast1 ATen.tensor_is_contiguous) t
contiguous ::
Tensor ->
Tensor
contiguous t = unsafePerformIO $ (cast1 ATen.tensor_contiguous) t
-- | Returns a tensor with the same data and number of elements as input, but with the specified shape.
reshape ::
[Int] ->
Tensor ->
Tensor
reshape shape t = unsafePerformIO $ cast2 ATen.reshape_tl t shape
--------------------------------------------------------------------------------
-- Move backend
--------------------------------------------------------------------------------
toSparse :: Tensor -> Tensor
toSparse t = unsafePerformIO $ (cast1 ATen.tensor_to_sparse) t
toDense :: Tensor -> Tensor
toDense t = unsafePerformIO $ (cast1 ATen.tensor_to_dense) t
toMKLDNN :: Tensor -> Tensor
toMKLDNN t = unsafePerformIO $ (cast1 ATen.tensor_to_mkldnn) t
toCPU :: Tensor -> Tensor
toCPU t = unsafePerformIO $ (cast1 ATen.tensor_cpu) t
toCUDA :: Tensor -> Tensor
toCUDA t = unsafePerformIO $ (cast1 ATen.tensor_cuda) t
--------------------------------------------------------------------------------
-- Indexing support
--------------------------------------------------------------------------------
-- TensorIndex is the same as slice of pytorch.
--
There is one - to - one correspondence between Pytorch and Hasktorch tensor index types :
Pytorch | Hasktorch
-- -----------------------------------------------------
-- `None` | `None`
` Ellipsis ` | ` Ellipsis `
` ... ` | ` Ellipsis `
` 123 ` | ` 123 `
-- `True` / `False` | `True` / `False`
-- `:` | `Slice ()`
-- `::` | `Slice ()`
` 1 : ` | ` Slice ( 1 , None ) `
` 1 : : ` | ` Slice ( 1 , None ) `
` :3 ` | ` Slice ( None , 3 ) `
` :3 : ` | ` Slice ( None , 3 ) `
` : : 2 ` | ` Slice ( None , None , 2 ) `
` 1:3 ` | ` Slice ( 1 , 3 ) `
` 1::2 ` | ` Slice ( 1 , None , 2 ) `
` : 3:2 ` | ` Slice ( None , 3 , 2 ) `
` 1:3:2 ` | ` Slice ( 1 , 3 , 2 ) `
` torch.tensor([1 , 2 ] ) ` ) | ` asTensor([1 , 2 : : Int ] ) `
newtype RawTensorIndexList = RawTensorIndexList (ForeignPtr (ATen.StdVector ATen.TensorIndex))
newtype RawTensorIndex = RawTensorIndex (ForeignPtr ATen.TensorIndex)
(!) :: TensorIndex a => Tensor -> a -> Tensor
(Unsafe t) ! idx = unsafePerformIO $ do
let idxs = pushIndex [] idx
vec <- ATen.newTensorIndexList
forM_ idxs $ \(RawTensorIndex i) -> do
ATen.tensorIndexList_push_back vec i
ATen.index t vec >>= (return . Unsafe)
maskedFill :: (TensorIndex a, TensorLike t) => Tensor -> a -> t -> Tensor
maskedFill (Unsafe t') idx v' = unsafePerformIO $ do
let idxs = pushIndex [] idx
(Unsafe v) = asTensor v'
t <- ATen.clone_t t'
vec <- ATen.newTensorIndexList
forM_ idxs $ \(RawTensorIndex i) -> do
ATen.tensorIndexList_push_back vec i
ATen.index_put_ t vec v
return $ Unsafe t
data None = None
deriving (Show, Eq)
data Ellipsis = Ellipsis
deriving (Show, Eq)
newtype Slice a = Slice a
deriving (Show, Eq)
instance Castable RawTensorIndex (ForeignPtr ATen.TensorIndex) where
cast (RawTensorIndex obj) f = f obj
uncast obj f = f $ RawTensorIndex obj
class TensorIndex a where
pushIndex :: [RawTensorIndex] -> a -> [RawTensorIndex]
toLens :: TensorLike b => a -> Lens' Tensor b
default toLens :: TensorLike b => a -> Lens' Tensor b
toLens idx func s = maskedFill s idx <$> (asTensor <$> func (asValue (s ! idx)))
instance {-# OVERLAPS #-} TensorIndex None where
pushIndex vec _ = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithNone
return ((RawTensorIndex idx) : vec)
instance {-# OVERLAPS #-} TensorIndex Ellipsis where
pushIndex vec _ = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithEllipsis
return ((RawTensorIndex idx) : vec)
instance {-# OVERLAPS #-} TensorIndex Bool where
pushIndex vec b = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithBool (if b then 1 else 0)
return ((RawTensorIndex idx) : vec)
instance {-# OVERLAPS #-} (Integral a) => TensorIndex (Slice (a, a)) where
pushIndex vec (Slice (start, end)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice (fromIntegral start :: CInt) (fromIntegral end :: CInt) 1
return ((RawTensorIndex idx) : vec)
instance {-# OVERLAPS #-} (Integral a) => TensorIndex (Slice (a, a, a)) where
pushIndex vec (Slice (start, end, step)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice (fromIntegral start :: CInt) (fromIntegral end :: CInt) (fromIntegral step :: CInt)
return ((RawTensorIndex idx) : vec)
instance {-# OVERLAPS #-} (Integral a) => TensorIndex (Slice (None, None, a)) where
pushIndex vec (Slice (_, _, step)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice 0 (maxBound :: CInt) (fromIntegral step :: CInt)
return ((RawTensorIndex idx) : vec)
instance {-# OVERLAPS #-} (Integral a) => TensorIndex (Slice a) where
pushIndex vec (Slice start) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice (fromIntegral start :: CInt) (maxBound :: CInt) 1
return ((RawTensorIndex idx) : vec)
instance {-# OVERLAPS #-} (Integral a) => TensorIndex (Slice (a, None)) where
pushIndex vec (Slice (start, _)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice (fromIntegral start :: CInt) (maxBound :: CInt) 1
return ((RawTensorIndex idx) : vec)
instance {-# OVERLAPS #-} (Integral a) => TensorIndex (Slice (a, None, a)) where
pushIndex vec (Slice (start, _, step)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice (fromIntegral start :: CInt) (maxBound :: CInt) (fromIntegral step :: CInt)
return ((RawTensorIndex idx) : vec)
instance {-# OVERLAPS #-} (Integral a) => TensorIndex (Slice (None, a, a)) where
pushIndex vec (Slice (_, end, step)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice 0 (fromIntegral end :: CInt) (fromIntegral step :: CInt)
return ((RawTensorIndex idx) : vec)
instance {-# OVERLAPS #-} (Integral a) => TensorIndex (Slice (None, a)) where
pushIndex vec (Slice (_, end)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice 0 (fromIntegral end :: CInt) 1
return ((RawTensorIndex idx) : vec)
instance {-# OVERLAPS #-} TensorIndex (Slice ()) where
pushIndex vec (Slice ()) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice 0 (maxBound :: CInt) 1
return ((RawTensorIndex idx) : vec)
instance TensorIndex Int where
pushIndex vec v = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithInt (fromIntegral v :: CInt)
return ((RawTensorIndex idx) : vec)
instance TensorIndex Integer where
pushIndex vec v = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithInt (fromIntegral v :: CInt)
return ((RawTensorIndex idx) : vec)
instance TensorIndex Tensor where
pushIndex vec v = unsafePerformIO $ do
idx <- cast1 ATen.newTensorIndexWithTensor v
return (idx : vec)
instance TensorIndex () where
pushIndex vec _ = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice 0 (maxBound :: CInt) 1
return ((RawTensorIndex idx) : vec)
instance (TensorIndex a, TensorIndex b) => TensorIndex (a, b) where
pushIndex vec (a, b) = (flip pushIndex a) . (flip pushIndex b) $ vec
instance (TensorIndex a, TensorIndex b, TensorIndex c) => TensorIndex (a, b, c) where
pushIndex vec (a, b, c) = (flip pushIndex a) . (flip pushIndex b) . (flip pushIndex c) $ vec
instance (TensorIndex a, TensorIndex b, TensorIndex c, TensorIndex d) => TensorIndex (a, b, c, d) where
pushIndex vec (a, b, c, d) = (flip pushIndex a) . (flip pushIndex b) . (flip pushIndex c) . (flip pushIndex d) $ vec
instance (TensorIndex a, TensorIndex b, TensorIndex c, TensorIndex d, TensorIndex e) => TensorIndex (a, b, c, d, e) where
pushIndex vec (a, b, c, d, e) = (flip pushIndex a) . (flip pushIndex b) . (flip pushIndex c) . (flip pushIndex d) . (flip pushIndex e) $ vec
--------------------------------------------------------------------------------
-- Scalar <-> Tensor promotion
--------------------------------------------------------------------------------
asValue :: TensorLike a => Tensor -> a
asValue t =
let cpuTensor = if device t == Device CPU 0 then t else toCPU t
contTensor = if isContiguous cpuTensor then cpuTensor else contiguous cpuTensor
in _asValue contTensor
class TensorOptionLike a where
withTensorOptions :: Tensor -> a -> Tensor
instance TensorOptionLike TensorOptions where
withTensorOptions t opts = unsafePerformIO $ cast4 ATen.tensor_to_obb t opts nonBlocking copy
where
nonBlocking = False
copy = False
instance TensorOptionLike Tensor where
withTensorOptions t opts = unsafePerformIO $ cast4 ATen.tensor_to_tbb t opts nonBlocking copy
where
nonBlocking = False
copy = False
class TensorLike a where
asTensor' :: TensorOptionLike opt => a -> opt -> Tensor
asTensor' v opts = withTensorOptions (asTensor v) opts
asTensor :: a -> Tensor
_asValue :: Tensor -> a
-- Internal functions(like "_xxx") are below. Do not use them directly.
_dtype :: DType
_dims :: a -> [Int]
_deepDims :: a -> Maybe [Int]
_peekElemOff :: Ptr () -> Int -> [Int] -> IO a
_pokeElemOff :: Ptr () -> Int -> a -> IO ()
bool_opts = withDType Bool defaultOpts
uint8_opts = withDType UInt8 defaultOpts
int64_opts = withDType Int64 defaultOpts
float_opts = withDType Float defaultOpts
double_opts = withDType Double defaultOpts
withTensor :: Tensor -> (Ptr () -> IO a) -> IO a
withTensor t fn =
let tensor = if isContiguous t then t else contiguous t
in cast tensor $ \t' -> withForeignPtr t' $ \tensor_ptr -> Unmanaged.tensor_data_ptr tensor_ptr >>= fn
-- | The internal function of withTensor. It does not check contiguous memory-layout.
_withTensor :: Tensor -> (Ptr () -> IO a) -> IO a
_withTensor t fn =
cast t $ \t' -> withForeignPtr t' $ \tensor_ptr -> Unmanaged.tensor_data_ptr tensor_ptr >>= fn
instance {-# OVERLAPPING #-} (Reifies a DType, Storable a) => TensorLike a where
asTensor v = unsafePerformIO $ do
t <- ((cast2 ATen.new_empty_tensor) :: [Int] -> TensorOptions -> IO Tensor) [] $ withDType (_dtype @a) defaultOpts
_withTensor t $ \ptr -> do
_pokeElemOff ptr 0 v
return t
_asValue t = unsafePerformIO $ do
if _dtype @a == dtype t
then do
withTensor t $ \ptr -> do
_peekElemOff ptr 0 []
else throwIO $ userError $ "The infered DType of asValue is " ++ show (_dtype @a) ++ ", but the DType of tensor on memory is " ++ show (dtype t) ++ "."
_dtype = reflect (Proxy :: Proxy a)
_dims _ = []
_deepDims _ = Just []
_peekElemOff ptr offset _ = peekElemOff (castPtr ptr) offset
_pokeElemOff ptr offset v = pokeElemOff (castPtr ptr) offset v
instance {-# OVERLAPPING #-} TensorLike Bool where
asTensor v = unsafePerformIO $ do
t <- ((cast2 ATen.new_empty_tensor) :: [Int] -> TensorOptions -> IO Tensor) [] $ withDType (_dtype @Bool) defaultOpts
_withTensor t $ \ptr -> do
_pokeElemOff ptr 0 v
return t
_asValue t = unsafePerformIO $ do
if _dtype @Bool == dtype t
then do
withTensor t $ \ptr -> do
_peekElemOff ptr 0 []
else throwIO $ userError $ "The infered DType of asValue is " ++ show (_dtype @Bool) ++ ", but the DType of tensor on memory is " ++ show (dtype t) ++ "."
_dtype = reflect (Proxy :: Proxy Bool)
_dims _ = []
_deepDims _ = Just []
_peekElemOff ptr offset _ = (/= 0) <$> (peekElemOff (castPtr ptr) offset :: IO Word8)
_pokeElemOff ptr offset v = pokeElemOff (castPtr ptr) offset ((if v then 1 else 0) :: Word8)
instance {-# OVERLAPPING #-} TensorLike Tensor where
asTensor' v opts = withTensorOptions v opts
asTensor = id
_asValue = id
_dtype = error "Not implemented for Tensor-type"
_dims v = error "Not implemented for Tensor-type"
_deepDims v = error "Not implemented for Tensor-type"
_peekElemOff = error "Not implemented for Tensor-type"
_pokeElemOff = error "Not implemented for Tensor-type"
instance {-# OVERLAPPING #-} TensorLike a => TensorLike (a, a) where
asTensor (a, b) = asTensor [a, b]
_asValue v =
let [a, b] = _asValue v
in (a, b)
_dtype = error "Not implemented for tuple-type"
_dims v = error "Not implemented for tuple-type"
_deepDims v = error "Not implemented for tuple-type"
_peekElemOff = error "Not implemented for tuple-type"
_pokeElemOff = error "Not implemented for tuple-type"
instance {-# OVERLAPPING #-} TensorLike a => TensorLike [a] where
asTensor v = unsafePerformIO $ do
t <- ((cast2 ATen.new_empty_tensor) :: [Int] -> TensorOptions -> IO Tensor) (_dims v) $ withDType (_dtype @a) defaultOpts
_withTensor t $ \ptr -> do
_pokeElemOff ptr 0 v
return t
_asValue t = unsafePerformIO $ do
if _dtype @a == dtype t
then do
withTensor t $ \ptr -> do
_peekElemOff ptr 0 (shape t)
else throwIO $ userError $ "The infered DType of asValue is " ++ show (_dtype @a) ++ ", but the DType of tensor on memory is " ++ show (dtype t) ++ "."
_dtype = _dtype @a
_dims [] = [0]
_dims v@(x : _) = (length v) : (_dims x)
_deepDims [] = Just [0]
_deepDims v@(x : xs) = do
deepDimsX <- _deepDims x
deepDimsXs <- traverse _deepDims xs
if and $ fmap (deepDimsX ==) deepDimsXs
then return $ length v : deepDimsX
else Nothing
_peekElemOff ptr offset [] = return []
_peekElemOff ptr offset (d : dims) =
let width = product dims
in forM [0 .. (d -1)] $ \i ->
_peekElemOff ptr (offset + i * width) dims
_pokeElemOff ptr offset [] = return ()
_pokeElemOff ptr offset v@(x : _) =
let width = product (_dims x)
in forM_ (zip [0 ..] v) $ \(i, d) ->
if product (_dims d) == width -- This validation may be slow.
then (_pokeElemOff @a) ptr (offset + i * width) d
else throwIO $ userError $ "There are lists having different length."
class AsTensors as where
toTensors :: as -> V.Vector Tensor
default toTensors :: (Generic as, GAsTensors (Rep as)) => as -> V.Vector Tensor
toTensors a = gToTensors $ from a
instance TensorLike a => AsTensors a where
toTensors = pure . asTensor
class GAsTensors record where
gToTensors :: record as -> V.Vector Tensor
instance (GAsTensors ls, GAsTensors rs) => GAsTensors (ls :*: rs) where
gToTensors (g :*: d) = gToTensors g V.++ gToTensors d
instance (GAsTensors ls, GAsTensors rs) => GAsTensors (ls :+: rs) where
gToTensors (L1 g) = gToTensors g
gToTensors (R1 g) = gToTensors g
instance (GAsTensors ls) => GAsTensors (M1 i c ls) where
gToTensors (M1 g) = gToTensors g
instance (TensorLike ls) => GAsTensors (K1 i ls) where
gToTensors (K1 g) = pure $ asTensor g
--------------------------------------------------------------------------------
-- Show
--------------------------------------------------------------------------------
instance Show Tensor where
show t' =
case (dim t) of
0 -> details ++ show0d t
1 -> details ++ show1d t
n -> details ++ shownd n 0 t
where
t = if device t' == Device CPU 0 then t' else toCPU t'
-- TODO: this is obviously not the right way to do it,
-- and will be terribly slow, so please fix it.
showElems elemShow sep t = "[" ++ (intercalate sep $ map elemShow [t ! i | i <- [0 .. ((size 0 t) - 1)]]) ++ "]"
padPositive x s = if x >= 0 then " " ++ s else s
TODO : this assumes that scientific notation only uses one - digit exponents , which is not
-- true in general
padLarge x s = if (abs x) >= 0.1 then s ++ " " else s
show0d x =
if isIntegral (dtype t)
then padPositive (toInt x) $ show $ toInt x
else
if isComplex (dtype t)
then
let r :+ i = toComplex x
in (padLarge r $ padPositive r $ showGFloat (Just 4) r "") ++ " + i" ++
(padLarge i $ padPositive i $ showGFloat (Just 4) i "")
else padLarge (toDouble x) $ padPositive (toDouble x) $ showGFloat (Just 4) (toDouble x) ""
show1d = showElems show0d ", "
shownd n offset =
case n of
2 -> showElems show1d (",\n " ++ padding ++ replicate offset ' ')
_ -> showElems (shownd (n -1) (offset + 1)) (",\n " ++ padding ++ replicate offset ' ')
details = "Tensor " ++ (show $ dtype t) ++ " " ++ (show $ shape t) ++ " "
padding = map (const ' ') details
--------------------------------------------------------------------------------
-- Castable instances
--------------------------------------------------------------------------------
NB : ATen only defines Castable [ ForeignPtr ATen . Tensor ] ( ForeignPtr ATen . TensorList )
instance Castable [Tensor] (ForeignPtr ATen.TensorList) where
cast xs f = do
ptr_list <- mapM (\x -> (cast x return :: IO (ForeignPtr ATen.Tensor))) xs
cast ptr_list f
uncast xs f = uncast xs $ \ptr_list -> do
tensor_list <- mapM (\(x :: ForeignPtr ATen.Tensor) -> uncast x return) ptr_list
f tensor_list
instance Castable [Tensor] (ForeignPtr (ATen.C10List ATen.Tensor)) where
cast xs f = do
ptr_list <- mapM (\x -> (cast x return :: IO (ForeignPtr ATen.Tensor))) xs
cast ptr_list f
uncast xs f = uncast xs $ \ptr_list -> do
tensor_list <- mapM (\(x :: ForeignPtr ATen.Tensor) -> uncast x return) ptr_list
f tensor_list
instance Castable [Tensor] (ForeignPtr (ATen.C10List (ATen.C10Optional ATen.Tensor))) where
cast xs f = do
ptr_list <- mapM (\x -> (cast x return :: IO (ForeignPtr ATen.Tensor))) xs
cast ptr_list f
uncast xs f = uncast xs $ \ptr_list -> do
tensor_list <- mapM (\(x :: ForeignPtr ATen.Tensor) -> uncast x return) ptr_list
f tensor_list
| null | https://raw.githubusercontent.com/hasktorch/hasktorch/a31ef707927cd70ea9283e3b10f2270ef3e2935a/hasktorch/src/Torch/Tensor.hs | haskell | # LANGUAGE RankNTypes #
# LANGUAGE TypeSynonymInstances #
do not use the constructor
------------------------------------------------------------------------------
Basic tensor properties
------------------------------------------------------------------------------
| Returns the total number of elements in the input tensor.
| input
| number of elements in tensor
| Returns the size of a given dimension of the input tensor.
| dimension
| input
| Returns the shape of the tensor
| input
| list of integers representing the shape of the tensor
| Returns the dimensions of the input tensor
| input
| output
| Returns the dimensions of the input tensor
| input
| output
| Returns the dimensions of the input tensor
| input
| output
| Returns the device on which the tensor is currently allocated
| input
| object representing the device
| Returns the data type of the input tensor
| input
| data type of the input tensor
| Casts the input tensor to the given data type
| data type to cast input to
| input
| output
| Casts the input tensor to given device
| device to cast input to
| input
| output
do nothing
copy from cpu:0 to cuda:di'
copy from cuda:di to cpu:0
careful, setting the device index implies setting the device type to CUDA!
| Slices the input tensor along the selected dimension at the given index.
| dimension to slice along
| index in the given dimension
| input
| output
| dim
| indexTensor
| input
| output
| dim
| indexList
| input
| output
| Slices the input tensor along the selected dimension at the given range.
| dim
| start
| step
| input
| Returns a tensor with the same data and number of elements as input, but with the specified shape.
------------------------------------------------------------------------------
Move backend
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Indexing support
------------------------------------------------------------------------------
TensorIndex is the same as slice of pytorch.
-----------------------------------------------------
`None` | `None`
`True` / `False` | `True` / `False`
`:` | `Slice ()`
`::` | `Slice ()`
# OVERLAPS #
# OVERLAPS #
# OVERLAPS #
# OVERLAPS #
# OVERLAPS #
# OVERLAPS #
# OVERLAPS #
# OVERLAPS #
# OVERLAPS #
# OVERLAPS #
# OVERLAPS #
# OVERLAPS #
------------------------------------------------------------------------------
Scalar <-> Tensor promotion
------------------------------------------------------------------------------
Internal functions(like "_xxx") are below. Do not use them directly.
| The internal function of withTensor. It does not check contiguous memory-layout.
# OVERLAPPING #
# OVERLAPPING #
# OVERLAPPING #
# OVERLAPPING #
# OVERLAPPING #
This validation may be slow.
------------------------------------------------------------------------------
Show
------------------------------------------------------------------------------
TODO: this is obviously not the right way to do it,
and will be terribly slow, so please fix it.
true in general
------------------------------------------------------------------------------
Castable instances
------------------------------------------------------------------------------ | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
# LANGUAGE DefaultSignatures #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module Torch.Tensor where
import Control.Exception.Safe (throwIO)
import Control.Monad (forM, forM_)
import Numeric.Half
import Data.Complex
import Data.Int (Int16, Int64)
import Data.List (intercalate)
import Data.Proxy
import Data.Reflection
import qualified Data.Vector as V
import Data.Word (Word8)
import Foreign.C.Types
import Foreign.ForeignPtr
import Foreign.Ptr
import Foreign.Storable
import GHC.Generics
import Numeric
import System.IO.Unsafe
import Torch.DType
import Torch.Device
import Torch.Internal.Cast
import Torch.Internal.Class (Castable (..), CppTuple2 (..), CppTuple3 (..), CppTuple4 (..))
import qualified Torch.Internal.Const as ATen
import qualified Torch.Internal.Managed.Cast as ATen
import qualified Torch.Internal.Managed.Native as ATen
import qualified Torch.Internal.Managed.TensorFactories as LibTorch
import qualified Torch.Internal.Managed.Type.Context as ATen
import qualified Torch.Internal.Managed.Type.StdArray as ATen
import qualified Torch.Internal.Managed.Type.StdString as ATen
import qualified Torch.Internal.Managed.Type.Tensor as ATen
import qualified Torch.Internal.Managed.Type.TensorIndex as ATen
import qualified Torch.Internal.Managed.Type.TensorOptions as ATen
import qualified Torch.Internal.Managed.Type.Extra as ATen
import qualified Torch.Internal.Type as ATen
import qualified Torch.Internal.Unmanaged.Type.Tensor as Unmanaged (tensor_data_ptr)
import Torch.Lens
import Torch.TensorOptions
type ATenTensor = ForeignPtr ATen.Tensor
newtype Tensor = Unsafe ATenTensor
instance Castable Tensor ATenTensor where
cast (Unsafe aten_tensor) f = f aten_tensor
uncast aten_tensor f = f $ Unsafe aten_tensor
newtype MutableTensor = MutableTensor Tensor deriving Show
newMutableTensor :: Tensor -> IO MutableTensor
newMutableTensor tensor = MutableTensor <$> cast1 ATen.detach_t tensor
toImmutable :: MutableTensor -> IO Tensor
toImmutable (MutableTensor tensor) = cast1 ATen.detach_t tensor
numel ::
Tensor ->
Int
numel t = unsafePerformIO $ cast1 ATen.tensor_numel $ t
size ::
Int ->
Tensor ->
Int
size dim t = unsafePerformIO $ (cast2 ATen.tensor_size_l) t dim
shape ::
Tensor ->
[Int]
shape t = unsafePerformIO $ (cast1 ATen.tensor_sizes) t
dim ::
Tensor ->
Int
dim t = unsafePerformIO $ (cast1 ATen.tensor_dim) t
dimUnsafe ::
Tensor ->
Int
dimUnsafe t = unsafePerformIO $ (cast1 ATen.tensor_dim_unsafe) t
dimCUnsafe ::
Tensor ->
Int
dimCUnsafe t = unsafePerformIO $ (cast1 ATen.tensor_dim_c_unsafe) t
device ::
Tensor ->
Device
device t = unsafePerformIO $ do
hasCUDA <- cast0 ATen.hasCUDA :: IO Bool
if hasCUDA
then do
isCUDA <- cast1 ATen.tensor_is_cuda t :: IO Bool
if isCUDA then cuda <$> cast1 ATen.tensor_get_device t else pure cpu
else pure cpu
where
cpu = Device {deviceType = CPU, deviceIndex = 0}
cuda :: Int -> Device
cuda di = Device {deviceType = CUDA, deviceIndex = fromIntegral di}
dtype ::
Tensor ->
DType
dtype t = unsafePerformIO $ cast1 ATen.tensor_scalar_type t
toComplex :: Tensor -> Complex Double
toComplex t = unsafePerformIO $
case dtype t of
ComplexHalf -> do
r :+ i <- withTensor t $ \ptr -> peekElemOff (castPtr ptr) 0 :: IO (Complex Half)
return (realToFrac r :+ realToFrac i)
ComplexFloat -> do
r :+ i <- withTensor t $ \ptr -> peekElemOff (castPtr ptr) 0 :: IO (Complex Float)
return (realToFrac r :+ realToFrac i)
ComplexDouble -> withTensor t $ \ptr -> peekElemOff (castPtr ptr) 0 :: IO (Complex Double)
_ -> (:+ 0) <$> cast1 ATen.tensor_item_double t
toDouble :: Tensor -> Double
toDouble t = unsafePerformIO $ cast1 ATen.tensor_item_double t
toInt :: Tensor -> Int
toInt t = unsafePerformIO $ cast1 ATen.tensor_item_int64_t t
_toType ::
DType ->
Tensor ->
Tensor
_toType dtype t = unsafePerformIO $ cast2 ATen.tensor_toType_s t dtype
instance HasTypes Tensor Tensor where
types_ = id
instance HasTypes (a -> a) Tensor where
types_ _ = pure
instance HasTypes Int Tensor where
types_ _ = pure
instance HasTypes Double Tensor where
types_ _ = pure
instance HasTypes Float Tensor where
types_ _ = pure
instance HasTypes Bool Tensor where
types_ _ = pure
instance HasTypes Int Int where
types_ = id
instance HasTypes Float Float where
types_ = id
instance HasTypes Double Double where
types_ = id
instance HasTypes Bool Bool where
types_ = id
toType :: forall a. HasTypes a Tensor => DType -> a -> a
toType dtype t = over (types @Tensor @a) (_toType dtype) t
toDevice :: forall a. HasTypes a Tensor => Device -> a -> a
toDevice device' t = over (types @Tensor @a) (_toDevice device') t
_toDevice ::
Device ->
Tensor ->
Tensor
_toDevice device' t = unsafePerformIO $ do
hasCUDA <- cast0 ATen.hasCUDA :: IO Bool
let device = Torch.Tensor.device t
t' <-
toDevice'
(deviceType device)
(deviceType device')
(deviceIndex device)
(deviceIndex device')
hasCUDA
check
(deviceType device')
(deviceType $ Torch.Tensor.device t')
(deviceIndex device')
(deviceIndex $ Torch.Tensor.device t')
pure t'
where
copy from di to di '
toDevice' dt dt' di di' _ =
error $
"cannot move tensor from \""
<> show dt
<> ":"
<> show di
<> "\" to \""
<> show dt'
<> ":"
<> show di'
<> "\""
getOpts :: Tensor -> IO TensorOptions
getOpts = cast1 ATen.tensor_options
withDeviceType :: DeviceType -> TensorOptions -> IO TensorOptions
withDeviceType dt opts = cast2 ATen.tensorOptions_device_D opts dt
withDeviceIndex :: Int16 -> TensorOptions -> IO TensorOptions
to :: Tensor -> TensorOptions -> IO Tensor
to t opts = cast4 ATen.tensor_to_obb t opts nonBlocking copy
where
nonBlocking = False
copy = False
check dt dt' di di' | dt == dt' && di == di' = pure ()
check dt dt' di di' =
error $
"moving of tensor failed: device should have been \""
<> show dt
<> ":"
<> show di
<> "\" but is \""
<> show dt'
<> ":"
<> show di'
<> "\""
toDeviceWithTensor :: Tensor -> Tensor -> Tensor
toDeviceWithTensor reference input = unsafePerformIO $ cast2 ATen.tensor_to_device reference input
select ::
Int ->
Int ->
Tensor ->
Tensor
select dim idx t = unsafePerformIO $ cast3 ATen.tensor_select_ll t dim idx
| Returns a new tensor which indexes the input tensor along dimension dim using the entries in index which is a LongTensor .
indexSelect ::
Int ->
Tensor ->
Tensor ->
Tensor
indexSelect dim indexTensor t = unsafePerformIO $ (cast3 ATen.index_select_tlt) t dim indexTensor
indexSelect' ::
Int ->
[Int] ->
Tensor ->
Tensor
indexSelect' dim indexList t = unsafePerformIO $ (cast3 ATen.index_select_tlt) t dim (asTensor' indexList t)
sliceDim ::
Int ->
Int ->
| end
Int ->
Int ->
Tensor ->
Tensor
sliceDim _dim _start _end _step _self = unsafePerformIO $ (cast5 ATen.slice_tllll) _self _dim _start _end _step
isContiguous ::
Tensor ->
Bool
isContiguous t = unsafePerformIO $ (cast1 ATen.tensor_is_contiguous) t
contiguous ::
Tensor ->
Tensor
contiguous t = unsafePerformIO $ (cast1 ATen.tensor_contiguous) t
reshape ::
[Int] ->
Tensor ->
Tensor
reshape shape t = unsafePerformIO $ cast2 ATen.reshape_tl t shape
toSparse :: Tensor -> Tensor
toSparse t = unsafePerformIO $ (cast1 ATen.tensor_to_sparse) t
toDense :: Tensor -> Tensor
toDense t = unsafePerformIO $ (cast1 ATen.tensor_to_dense) t
toMKLDNN :: Tensor -> Tensor
toMKLDNN t = unsafePerformIO $ (cast1 ATen.tensor_to_mkldnn) t
toCPU :: Tensor -> Tensor
toCPU t = unsafePerformIO $ (cast1 ATen.tensor_cpu) t
toCUDA :: Tensor -> Tensor
toCUDA t = unsafePerformIO $ (cast1 ATen.tensor_cuda) t
There is one - to - one correspondence between Pytorch and Hasktorch tensor index types :
Pytorch | Hasktorch
` Ellipsis ` | ` Ellipsis `
` ... ` | ` Ellipsis `
` 123 ` | ` 123 `
` 1 : ` | ` Slice ( 1 , None ) `
` 1 : : ` | ` Slice ( 1 , None ) `
` :3 ` | ` Slice ( None , 3 ) `
` :3 : ` | ` Slice ( None , 3 ) `
` : : 2 ` | ` Slice ( None , None , 2 ) `
` 1:3 ` | ` Slice ( 1 , 3 ) `
` 1::2 ` | ` Slice ( 1 , None , 2 ) `
` : 3:2 ` | ` Slice ( None , 3 , 2 ) `
` 1:3:2 ` | ` Slice ( 1 , 3 , 2 ) `
` torch.tensor([1 , 2 ] ) ` ) | ` asTensor([1 , 2 : : Int ] ) `
newtype RawTensorIndexList = RawTensorIndexList (ForeignPtr (ATen.StdVector ATen.TensorIndex))
newtype RawTensorIndex = RawTensorIndex (ForeignPtr ATen.TensorIndex)
(!) :: TensorIndex a => Tensor -> a -> Tensor
(Unsafe t) ! idx = unsafePerformIO $ do
let idxs = pushIndex [] idx
vec <- ATen.newTensorIndexList
forM_ idxs $ \(RawTensorIndex i) -> do
ATen.tensorIndexList_push_back vec i
ATen.index t vec >>= (return . Unsafe)
maskedFill :: (TensorIndex a, TensorLike t) => Tensor -> a -> t -> Tensor
maskedFill (Unsafe t') idx v' = unsafePerformIO $ do
let idxs = pushIndex [] idx
(Unsafe v) = asTensor v'
t <- ATen.clone_t t'
vec <- ATen.newTensorIndexList
forM_ idxs $ \(RawTensorIndex i) -> do
ATen.tensorIndexList_push_back vec i
ATen.index_put_ t vec v
return $ Unsafe t
data None = None
deriving (Show, Eq)
data Ellipsis = Ellipsis
deriving (Show, Eq)
newtype Slice a = Slice a
deriving (Show, Eq)
instance Castable RawTensorIndex (ForeignPtr ATen.TensorIndex) where
cast (RawTensorIndex obj) f = f obj
uncast obj f = f $ RawTensorIndex obj
class TensorIndex a where
pushIndex :: [RawTensorIndex] -> a -> [RawTensorIndex]
toLens :: TensorLike b => a -> Lens' Tensor b
default toLens :: TensorLike b => a -> Lens' Tensor b
toLens idx func s = maskedFill s idx <$> (asTensor <$> func (asValue (s ! idx)))
pushIndex vec _ = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithNone
return ((RawTensorIndex idx) : vec)
pushIndex vec _ = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithEllipsis
return ((RawTensorIndex idx) : vec)
pushIndex vec b = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithBool (if b then 1 else 0)
return ((RawTensorIndex idx) : vec)
pushIndex vec (Slice (start, end)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice (fromIntegral start :: CInt) (fromIntegral end :: CInt) 1
return ((RawTensorIndex idx) : vec)
pushIndex vec (Slice (start, end, step)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice (fromIntegral start :: CInt) (fromIntegral end :: CInt) (fromIntegral step :: CInt)
return ((RawTensorIndex idx) : vec)
pushIndex vec (Slice (_, _, step)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice 0 (maxBound :: CInt) (fromIntegral step :: CInt)
return ((RawTensorIndex idx) : vec)
pushIndex vec (Slice start) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice (fromIntegral start :: CInt) (maxBound :: CInt) 1
return ((RawTensorIndex idx) : vec)
pushIndex vec (Slice (start, _)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice (fromIntegral start :: CInt) (maxBound :: CInt) 1
return ((RawTensorIndex idx) : vec)
pushIndex vec (Slice (start, _, step)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice (fromIntegral start :: CInt) (maxBound :: CInt) (fromIntegral step :: CInt)
return ((RawTensorIndex idx) : vec)
pushIndex vec (Slice (_, end, step)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice 0 (fromIntegral end :: CInt) (fromIntegral step :: CInt)
return ((RawTensorIndex idx) : vec)
pushIndex vec (Slice (_, end)) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice 0 (fromIntegral end :: CInt) 1
return ((RawTensorIndex idx) : vec)
pushIndex vec (Slice ()) = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice 0 (maxBound :: CInt) 1
return ((RawTensorIndex idx) : vec)
instance TensorIndex Int where
pushIndex vec v = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithInt (fromIntegral v :: CInt)
return ((RawTensorIndex idx) : vec)
instance TensorIndex Integer where
pushIndex vec v = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithInt (fromIntegral v :: CInt)
return ((RawTensorIndex idx) : vec)
instance TensorIndex Tensor where
pushIndex vec v = unsafePerformIO $ do
idx <- cast1 ATen.newTensorIndexWithTensor v
return (idx : vec)
instance TensorIndex () where
pushIndex vec _ = unsafePerformIO $ do
idx <- ATen.newTensorIndexWithSlice 0 (maxBound :: CInt) 1
return ((RawTensorIndex idx) : vec)
instance (TensorIndex a, TensorIndex b) => TensorIndex (a, b) where
pushIndex vec (a, b) = (flip pushIndex a) . (flip pushIndex b) $ vec
instance (TensorIndex a, TensorIndex b, TensorIndex c) => TensorIndex (a, b, c) where
pushIndex vec (a, b, c) = (flip pushIndex a) . (flip pushIndex b) . (flip pushIndex c) $ vec
instance (TensorIndex a, TensorIndex b, TensorIndex c, TensorIndex d) => TensorIndex (a, b, c, d) where
pushIndex vec (a, b, c, d) = (flip pushIndex a) . (flip pushIndex b) . (flip pushIndex c) . (flip pushIndex d) $ vec
instance (TensorIndex a, TensorIndex b, TensorIndex c, TensorIndex d, TensorIndex e) => TensorIndex (a, b, c, d, e) where
pushIndex vec (a, b, c, d, e) = (flip pushIndex a) . (flip pushIndex b) . (flip pushIndex c) . (flip pushIndex d) . (flip pushIndex e) $ vec
asValue :: TensorLike a => Tensor -> a
asValue t =
let cpuTensor = if device t == Device CPU 0 then t else toCPU t
contTensor = if isContiguous cpuTensor then cpuTensor else contiguous cpuTensor
in _asValue contTensor
class TensorOptionLike a where
withTensorOptions :: Tensor -> a -> Tensor
instance TensorOptionLike TensorOptions where
withTensorOptions t opts = unsafePerformIO $ cast4 ATen.tensor_to_obb t opts nonBlocking copy
where
nonBlocking = False
copy = False
instance TensorOptionLike Tensor where
withTensorOptions t opts = unsafePerformIO $ cast4 ATen.tensor_to_tbb t opts nonBlocking copy
where
nonBlocking = False
copy = False
class TensorLike a where
asTensor' :: TensorOptionLike opt => a -> opt -> Tensor
asTensor' v opts = withTensorOptions (asTensor v) opts
asTensor :: a -> Tensor
_asValue :: Tensor -> a
_dtype :: DType
_dims :: a -> [Int]
_deepDims :: a -> Maybe [Int]
_peekElemOff :: Ptr () -> Int -> [Int] -> IO a
_pokeElemOff :: Ptr () -> Int -> a -> IO ()
bool_opts = withDType Bool defaultOpts
uint8_opts = withDType UInt8 defaultOpts
int64_opts = withDType Int64 defaultOpts
float_opts = withDType Float defaultOpts
double_opts = withDType Double defaultOpts
withTensor :: Tensor -> (Ptr () -> IO a) -> IO a
withTensor t fn =
let tensor = if isContiguous t then t else contiguous t
in cast tensor $ \t' -> withForeignPtr t' $ \tensor_ptr -> Unmanaged.tensor_data_ptr tensor_ptr >>= fn
_withTensor :: Tensor -> (Ptr () -> IO a) -> IO a
_withTensor t fn =
cast t $ \t' -> withForeignPtr t' $ \tensor_ptr -> Unmanaged.tensor_data_ptr tensor_ptr >>= fn
asTensor v = unsafePerformIO $ do
t <- ((cast2 ATen.new_empty_tensor) :: [Int] -> TensorOptions -> IO Tensor) [] $ withDType (_dtype @a) defaultOpts
_withTensor t $ \ptr -> do
_pokeElemOff ptr 0 v
return t
_asValue t = unsafePerformIO $ do
if _dtype @a == dtype t
then do
withTensor t $ \ptr -> do
_peekElemOff ptr 0 []
else throwIO $ userError $ "The infered DType of asValue is " ++ show (_dtype @a) ++ ", but the DType of tensor on memory is " ++ show (dtype t) ++ "."
_dtype = reflect (Proxy :: Proxy a)
_dims _ = []
_deepDims _ = Just []
_peekElemOff ptr offset _ = peekElemOff (castPtr ptr) offset
_pokeElemOff ptr offset v = pokeElemOff (castPtr ptr) offset v
asTensor v = unsafePerformIO $ do
t <- ((cast2 ATen.new_empty_tensor) :: [Int] -> TensorOptions -> IO Tensor) [] $ withDType (_dtype @Bool) defaultOpts
_withTensor t $ \ptr -> do
_pokeElemOff ptr 0 v
return t
_asValue t = unsafePerformIO $ do
if _dtype @Bool == dtype t
then do
withTensor t $ \ptr -> do
_peekElemOff ptr 0 []
else throwIO $ userError $ "The infered DType of asValue is " ++ show (_dtype @Bool) ++ ", but the DType of tensor on memory is " ++ show (dtype t) ++ "."
_dtype = reflect (Proxy :: Proxy Bool)
_dims _ = []
_deepDims _ = Just []
_peekElemOff ptr offset _ = (/= 0) <$> (peekElemOff (castPtr ptr) offset :: IO Word8)
_pokeElemOff ptr offset v = pokeElemOff (castPtr ptr) offset ((if v then 1 else 0) :: Word8)
asTensor' v opts = withTensorOptions v opts
asTensor = id
_asValue = id
_dtype = error "Not implemented for Tensor-type"
_dims v = error "Not implemented for Tensor-type"
_deepDims v = error "Not implemented for Tensor-type"
_peekElemOff = error "Not implemented for Tensor-type"
_pokeElemOff = error "Not implemented for Tensor-type"
asTensor (a, b) = asTensor [a, b]
_asValue v =
let [a, b] = _asValue v
in (a, b)
_dtype = error "Not implemented for tuple-type"
_dims v = error "Not implemented for tuple-type"
_deepDims v = error "Not implemented for tuple-type"
_peekElemOff = error "Not implemented for tuple-type"
_pokeElemOff = error "Not implemented for tuple-type"
asTensor v = unsafePerformIO $ do
t <- ((cast2 ATen.new_empty_tensor) :: [Int] -> TensorOptions -> IO Tensor) (_dims v) $ withDType (_dtype @a) defaultOpts
_withTensor t $ \ptr -> do
_pokeElemOff ptr 0 v
return t
_asValue t = unsafePerformIO $ do
if _dtype @a == dtype t
then do
withTensor t $ \ptr -> do
_peekElemOff ptr 0 (shape t)
else throwIO $ userError $ "The infered DType of asValue is " ++ show (_dtype @a) ++ ", but the DType of tensor on memory is " ++ show (dtype t) ++ "."
_dtype = _dtype @a
_dims [] = [0]
_dims v@(x : _) = (length v) : (_dims x)
_deepDims [] = Just [0]
_deepDims v@(x : xs) = do
deepDimsX <- _deepDims x
deepDimsXs <- traverse _deepDims xs
if and $ fmap (deepDimsX ==) deepDimsXs
then return $ length v : deepDimsX
else Nothing
_peekElemOff ptr offset [] = return []
_peekElemOff ptr offset (d : dims) =
let width = product dims
in forM [0 .. (d -1)] $ \i ->
_peekElemOff ptr (offset + i * width) dims
_pokeElemOff ptr offset [] = return ()
_pokeElemOff ptr offset v@(x : _) =
let width = product (_dims x)
in forM_ (zip [0 ..] v) $ \(i, d) ->
then (_pokeElemOff @a) ptr (offset + i * width) d
else throwIO $ userError $ "There are lists having different length."
class AsTensors as where
toTensors :: as -> V.Vector Tensor
default toTensors :: (Generic as, GAsTensors (Rep as)) => as -> V.Vector Tensor
toTensors a = gToTensors $ from a
instance TensorLike a => AsTensors a where
toTensors = pure . asTensor
class GAsTensors record where
gToTensors :: record as -> V.Vector Tensor
instance (GAsTensors ls, GAsTensors rs) => GAsTensors (ls :*: rs) where
gToTensors (g :*: d) = gToTensors g V.++ gToTensors d
instance (GAsTensors ls, GAsTensors rs) => GAsTensors (ls :+: rs) where
gToTensors (L1 g) = gToTensors g
gToTensors (R1 g) = gToTensors g
instance (GAsTensors ls) => GAsTensors (M1 i c ls) where
gToTensors (M1 g) = gToTensors g
instance (TensorLike ls) => GAsTensors (K1 i ls) where
gToTensors (K1 g) = pure $ asTensor g
instance Show Tensor where
show t' =
case (dim t) of
0 -> details ++ show0d t
1 -> details ++ show1d t
n -> details ++ shownd n 0 t
where
t = if device t' == Device CPU 0 then t' else toCPU t'
showElems elemShow sep t = "[" ++ (intercalate sep $ map elemShow [t ! i | i <- [0 .. ((size 0 t) - 1)]]) ++ "]"
padPositive x s = if x >= 0 then " " ++ s else s
TODO : this assumes that scientific notation only uses one - digit exponents , which is not
padLarge x s = if (abs x) >= 0.1 then s ++ " " else s
show0d x =
if isIntegral (dtype t)
then padPositive (toInt x) $ show $ toInt x
else
if isComplex (dtype t)
then
let r :+ i = toComplex x
in (padLarge r $ padPositive r $ showGFloat (Just 4) r "") ++ " + i" ++
(padLarge i $ padPositive i $ showGFloat (Just 4) i "")
else padLarge (toDouble x) $ padPositive (toDouble x) $ showGFloat (Just 4) (toDouble x) ""
show1d = showElems show0d ", "
shownd n offset =
case n of
2 -> showElems show1d (",\n " ++ padding ++ replicate offset ' ')
_ -> showElems (shownd (n -1) (offset + 1)) (",\n " ++ padding ++ replicate offset ' ')
details = "Tensor " ++ (show $ dtype t) ++ " " ++ (show $ shape t) ++ " "
padding = map (const ' ') details
NB : ATen only defines Castable [ ForeignPtr ATen . Tensor ] ( ForeignPtr ATen . TensorList )
instance Castable [Tensor] (ForeignPtr ATen.TensorList) where
cast xs f = do
ptr_list <- mapM (\x -> (cast x return :: IO (ForeignPtr ATen.Tensor))) xs
cast ptr_list f
uncast xs f = uncast xs $ \ptr_list -> do
tensor_list <- mapM (\(x :: ForeignPtr ATen.Tensor) -> uncast x return) ptr_list
f tensor_list
instance Castable [Tensor] (ForeignPtr (ATen.C10List ATen.Tensor)) where
cast xs f = do
ptr_list <- mapM (\x -> (cast x return :: IO (ForeignPtr ATen.Tensor))) xs
cast ptr_list f
uncast xs f = uncast xs $ \ptr_list -> do
tensor_list <- mapM (\(x :: ForeignPtr ATen.Tensor) -> uncast x return) ptr_list
f tensor_list
instance Castable [Tensor] (ForeignPtr (ATen.C10List (ATen.C10Optional ATen.Tensor))) where
cast xs f = do
ptr_list <- mapM (\x -> (cast x return :: IO (ForeignPtr ATen.Tensor))) xs
cast ptr_list f
uncast xs f = uncast xs $ \ptr_list -> do
tensor_list <- mapM (\(x :: ForeignPtr ATen.Tensor) -> uncast x return) ptr_list
f tensor_list
|
3efeb1138b6935b1180d2f42cce9f737be9c827d9ca247cde2141788f0f0f666 | emqx/emqx | emqx_banned_SUITE.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2018 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_banned_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("emqx/include/emqx.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
all() -> emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
emqx_common_test_helpers:start_apps([]),
ok = ekka:start(),
Config.
end_per_suite(_Config) ->
ekka:stop(),
mria:stop(),
mria_mnesia:delete_schema(),
emqx_common_test_helpers:stop_apps([]).
t_add_delete(_) ->
Banned = #banned{
who = {clientid, <<"TestClient">>},
by = <<"banned suite">>,
reason = <<"test">>,
at = erlang:system_time(second),
until = erlang:system_time(second) + 1
},
{ok, _} = emqx_banned:create(Banned),
{error, {already_exist, Banned}} = emqx_banned:create(Banned),
?assertEqual(1, emqx_banned:info(size)),
{error, {already_exist, Banned}} =
emqx_banned:create(Banned#banned{until = erlang:system_time(second) + 100}),
?assertEqual(1, emqx_banned:info(size)),
ok = emqx_banned:delete({clientid, <<"TestClient">>}),
?assertEqual(0, emqx_banned:info(size)).
t_check(_) ->
{ok, _} = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>}}),
{ok, _} = emqx_banned:create(#banned{who = {username, <<"BannedUser">>}}),
{ok, _} = emqx_banned:create(#banned{who = {peerhost, {192, 168, 0, 1}}}),
?assertEqual(3, emqx_banned:info(size)),
ClientInfo1 = #{
clientid => <<"BannedClient">>,
username => <<"user">>,
peerhost => {127, 0, 0, 1}
},
ClientInfo2 = #{
clientid => <<"client">>,
username => <<"BannedUser">>,
peerhost => {127, 0, 0, 1}
},
ClientInfo3 = #{
clientid => <<"client">>,
username => <<"user">>,
peerhost => {192, 168, 0, 1}
},
ClientInfo4 = #{
clientid => <<"client">>,
username => <<"user">>,
peerhost => {127, 0, 0, 1}
},
ClientInfo5 = #{},
ClientInfo6 = #{clientid => <<"client1">>},
?assert(emqx_banned:check(ClientInfo1)),
?assert(emqx_banned:check(ClientInfo2)),
?assert(emqx_banned:check(ClientInfo3)),
?assertNot(emqx_banned:check(ClientInfo4)),
?assertNot(emqx_banned:check(ClientInfo5)),
?assertNot(emqx_banned:check(ClientInfo6)),
ok = emqx_banned:delete({clientid, <<"BannedClient">>}),
ok = emqx_banned:delete({username, <<"BannedUser">>}),
ok = emqx_banned:delete({peerhost, {192, 168, 0, 1}}),
?assertNot(emqx_banned:check(ClientInfo1)),
?assertNot(emqx_banned:check(ClientInfo2)),
?assertNot(emqx_banned:check(ClientInfo3)),
?assertNot(emqx_banned:check(ClientInfo4)),
?assertEqual(0, emqx_banned:info(size)).
t_unused(_) ->
Who1 = {clientid, <<"BannedClient1">>},
Who2 = {clientid, <<"BannedClient2">>},
?assertMatch(
{ok, _},
emqx_banned:create(#banned{
who = Who1,
until = erlang:system_time(second)
})
),
?assertMatch(
{ok, _},
emqx_banned:create(#banned{
who = Who2,
until = erlang:system_time(second) - 1
})
),
?assertEqual(ignored, gen_server:call(emqx_banned, unexpected_req)),
?assertEqual(ok, gen_server:cast(emqx_banned, unexpected_msg)),
%% expiry timer
timer:sleep(500),
ok = emqx_banned:delete(Who1),
ok = emqx_banned:delete(Who2).
t_kick(_) ->
ClientId = <<"client">>,
snabbkaffe:start_trace(),
Now = erlang:system_time(second),
Who = {clientid, ClientId},
emqx_banned:create(#{
who => Who,
by => <<"test">>,
reason => <<"test">>,
at => Now,
until => Now + 120
}),
Trace = snabbkaffe:collect_trace(),
snabbkaffe:stop(),
emqx_banned:delete(Who),
?assertEqual(1, length(?of_kind(kick_session_due_to_banned, Trace))).
| null | https://raw.githubusercontent.com/emqx/emqx/dbc10c2eed3df314586c7b9ac6292083204f1f68/apps/emqx/test/emqx_banned_SUITE.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
expiry timer | Copyright ( c ) 2018 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_banned_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("emqx/include/emqx.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
all() -> emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
emqx_common_test_helpers:start_apps([]),
ok = ekka:start(),
Config.
end_per_suite(_Config) ->
ekka:stop(),
mria:stop(),
mria_mnesia:delete_schema(),
emqx_common_test_helpers:stop_apps([]).
t_add_delete(_) ->
Banned = #banned{
who = {clientid, <<"TestClient">>},
by = <<"banned suite">>,
reason = <<"test">>,
at = erlang:system_time(second),
until = erlang:system_time(second) + 1
},
{ok, _} = emqx_banned:create(Banned),
{error, {already_exist, Banned}} = emqx_banned:create(Banned),
?assertEqual(1, emqx_banned:info(size)),
{error, {already_exist, Banned}} =
emqx_banned:create(Banned#banned{until = erlang:system_time(second) + 100}),
?assertEqual(1, emqx_banned:info(size)),
ok = emqx_banned:delete({clientid, <<"TestClient">>}),
?assertEqual(0, emqx_banned:info(size)).
t_check(_) ->
{ok, _} = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>}}),
{ok, _} = emqx_banned:create(#banned{who = {username, <<"BannedUser">>}}),
{ok, _} = emqx_banned:create(#banned{who = {peerhost, {192, 168, 0, 1}}}),
?assertEqual(3, emqx_banned:info(size)),
ClientInfo1 = #{
clientid => <<"BannedClient">>,
username => <<"user">>,
peerhost => {127, 0, 0, 1}
},
ClientInfo2 = #{
clientid => <<"client">>,
username => <<"BannedUser">>,
peerhost => {127, 0, 0, 1}
},
ClientInfo3 = #{
clientid => <<"client">>,
username => <<"user">>,
peerhost => {192, 168, 0, 1}
},
ClientInfo4 = #{
clientid => <<"client">>,
username => <<"user">>,
peerhost => {127, 0, 0, 1}
},
ClientInfo5 = #{},
ClientInfo6 = #{clientid => <<"client1">>},
?assert(emqx_banned:check(ClientInfo1)),
?assert(emqx_banned:check(ClientInfo2)),
?assert(emqx_banned:check(ClientInfo3)),
?assertNot(emqx_banned:check(ClientInfo4)),
?assertNot(emqx_banned:check(ClientInfo5)),
?assertNot(emqx_banned:check(ClientInfo6)),
ok = emqx_banned:delete({clientid, <<"BannedClient">>}),
ok = emqx_banned:delete({username, <<"BannedUser">>}),
ok = emqx_banned:delete({peerhost, {192, 168, 0, 1}}),
?assertNot(emqx_banned:check(ClientInfo1)),
?assertNot(emqx_banned:check(ClientInfo2)),
?assertNot(emqx_banned:check(ClientInfo3)),
?assertNot(emqx_banned:check(ClientInfo4)),
?assertEqual(0, emqx_banned:info(size)).
t_unused(_) ->
Who1 = {clientid, <<"BannedClient1">>},
Who2 = {clientid, <<"BannedClient2">>},
?assertMatch(
{ok, _},
emqx_banned:create(#banned{
who = Who1,
until = erlang:system_time(second)
})
),
?assertMatch(
{ok, _},
emqx_banned:create(#banned{
who = Who2,
until = erlang:system_time(second) - 1
})
),
?assertEqual(ignored, gen_server:call(emqx_banned, unexpected_req)),
?assertEqual(ok, gen_server:cast(emqx_banned, unexpected_msg)),
timer:sleep(500),
ok = emqx_banned:delete(Who1),
ok = emqx_banned:delete(Who2).
t_kick(_) ->
ClientId = <<"client">>,
snabbkaffe:start_trace(),
Now = erlang:system_time(second),
Who = {clientid, ClientId},
emqx_banned:create(#{
who => Who,
by => <<"test">>,
reason => <<"test">>,
at => Now,
until => Now + 120
}),
Trace = snabbkaffe:collect_trace(),
snabbkaffe:stop(),
emqx_banned:delete(Who),
?assertEqual(1, length(?of_kind(kick_session_due_to_banned, Trace))).
|
2aa5412d0230da4645edc8bee328f28fe330abb5686f6ac89140f963c39ac34f | janestreet/hardcaml | mangler.mli | (** Map a set of names to a set of unique names. *)
open Base
(** A mangler is a mapping from strings to the next available integer which should be
added to the name to make it unique. *)
type t [@@deriving sexp_of]
(** Create a new mangler. *)
val create : case_sensitive:bool -> t
val add_identifier : t -> string -> [ `Ok | `Duplicate ]
(** Add a list of identifiers to the mangler table. Raises if an identifier is already in
the table. *)
val add_identifiers_exn : t -> string list -> unit
(** Test if the string is in the mangler, and return its mangler index if it is. *)
val find_index : t -> string -> int option
(** [mangle t name] returns a unique, mangled name. *)
val mangle : t -> string -> string
| null | https://raw.githubusercontent.com/janestreet/hardcaml/4126f65f39048fef5853ba9b8d766143f678a9e4/src/mangler.mli | ocaml | * Map a set of names to a set of unique names.
* A mangler is a mapping from strings to the next available integer which should be
added to the name to make it unique.
* Create a new mangler.
* Add a list of identifiers to the mangler table. Raises if an identifier is already in
the table.
* Test if the string is in the mangler, and return its mangler index if it is.
* [mangle t name] returns a unique, mangled name. |
open Base
type t [@@deriving sexp_of]
val create : case_sensitive:bool -> t
val add_identifier : t -> string -> [ `Ok | `Duplicate ]
val add_identifiers_exn : t -> string list -> unit
val find_index : t -> string -> int option
val mangle : t -> string -> string
|
c978bf59691235da2a2702720b3f01077f28b4c89be90dfd1d1e8a11b087957a | gregcman/lisp-in-small-pieces | chap7d-unused.lisp | #+nil
(define-class primitive Object
(address))
#+nil
(define-class continuation Object
(stack))
#+nil
(progn
(defun CALL0 (address)
(INVOKE0 address))
(defun CALL1 (address m1)
(append m1
(INVOKE1 address)))
(defun CALL2 (address m1 m2)
(append m1
(PUSH-VALUE)
m2
(POP-ARG1)
(INVOKE2 address)))
(defun CALL3 (address m1 m2 m3)
(append m1
(PUSH-VALUE)
m2
(PUSH-VALUE)
m3
(POP-ARG2)
(POP-ARG1)
(INVOKE3 address))))
(defun SHALLOW-ARGUMENT-REF (j)
(check-byte j)
(case j
((0 1 2 3) (list (+ 1 j)))
(otherwise (list 5 j))))
(defun PREDEFINED (i)
(check-byte i)
(case i
0=\+true+ , 1=\+false+ , 2= ( ) , 3 = cons , 4 = car , 5 , 6 = pair ? , 7 = symbol ? , 8 = eq ?
((0 1 2 3 4 5 6 7 8) (list (+ 10 i)))
(otherwise (list 19 i))))
(defun SET-SHALLOW-ARGUMENT! (j)
(case j
((0 1 2 3) (list (+ 21 j)))
(otherwise (list 25 j))))
(defun CONSTANT (value)
(cond
((eq? value +true+) (list 10))
((eq? value +false+) (list 11))
((eq? value '()) (list 12))
((equal? value -1) (list 80))
((equal? value 0) (list 81))
((equal? value 1) (list 82))
((equal? value 2) (list 83))
((equal? value 4) (list 84))
((and (integer? value) ; immediate value
(<= 0 value)
(< value 255))
(list 79 value))
(t (EXPLICIT-CONSTANT value))))
(defun INVOKE0 (address)
(case address
((read) (list 89))
((newline) (list 88))
(otherwise (static-wrong "Cannot integrate" address))))
#+nil
(defun INVOKE1 (address)
(case address
((car) (list 90))
((cdr) (list 91))
((pair?) (list 92))
((symbol?) (list 93))
((display) (list 94))
(otherwise (static-wrong "Cannot integrate" address))))
;;; The same one with other unary primitives.
(defun INVOKE1 (address)
(case address
((car) (list 90))
((cdr) (list 91))
((pair?) (list 92))
((symbol?) (list 93))
((display) (list 94))
((primitive?) (list 95))
((null?) (list 96))
((continuation?) (list 97))
((eof-object?) (list 98))
(otherwise (static-wrong "Cannot integrate" address))))
(defun INVOKE2 (address)
(case address
((cons) (list 100))
((eq?) (list 101))
((set-car!) (list 102))
((set-cdr!) (list 103))
((+) (list 104))
((-) (list 105))
((=) (list 106))
((<) (list 107))
((>) (list 108))
((*) (list 109))
((<=) (list 110))
((>=) (list 111))
((remainder)(list 112))
(otherwise (static-wrong "Cannot integrate" address))))
(defun INVOKE3 (address)
(static-wrong "No ternary integrated procedure" address))
(defun ARITY=? (arity+1)
(case arity+1
((1 2 3 4) (list (+ 70 arity+1)))
(otherwise (list 75 arity+1))))
(defun POP-FRAME! (rank)
(case rank
((0 1 2 3) (list (+ 60 rank)))
(otherwise (list 64 rank))))
(defun ALLOCATE-FRAME (size)
(case size
((0 1 2 3 4) (list (+ 50 size)))
(otherwise (list 55 (+ size 1)))))
| null | https://raw.githubusercontent.com/gregcman/lisp-in-small-pieces/71a89aa2108dc1122a60337177ea75c5170e6828/unused/chap7d-unused.lisp | lisp | immediate value
The same one with other unary primitives. | #+nil
(define-class primitive Object
(address))
#+nil
(define-class continuation Object
(stack))
#+nil
(progn
(defun CALL0 (address)
(INVOKE0 address))
(defun CALL1 (address m1)
(append m1
(INVOKE1 address)))
(defun CALL2 (address m1 m2)
(append m1
(PUSH-VALUE)
m2
(POP-ARG1)
(INVOKE2 address)))
(defun CALL3 (address m1 m2 m3)
(append m1
(PUSH-VALUE)
m2
(PUSH-VALUE)
m3
(POP-ARG2)
(POP-ARG1)
(INVOKE3 address))))
(defun SHALLOW-ARGUMENT-REF (j)
(check-byte j)
(case j
((0 1 2 3) (list (+ 1 j)))
(otherwise (list 5 j))))
(defun PREDEFINED (i)
(check-byte i)
(case i
0=\+true+ , 1=\+false+ , 2= ( ) , 3 = cons , 4 = car , 5 , 6 = pair ? , 7 = symbol ? , 8 = eq ?
((0 1 2 3 4 5 6 7 8) (list (+ 10 i)))
(otherwise (list 19 i))))
(defun SET-SHALLOW-ARGUMENT! (j)
(case j
((0 1 2 3) (list (+ 21 j)))
(otherwise (list 25 j))))
(defun CONSTANT (value)
(cond
((eq? value +true+) (list 10))
((eq? value +false+) (list 11))
((eq? value '()) (list 12))
((equal? value -1) (list 80))
((equal? value 0) (list 81))
((equal? value 1) (list 82))
((equal? value 2) (list 83))
((equal? value 4) (list 84))
(<= 0 value)
(< value 255))
(list 79 value))
(t (EXPLICIT-CONSTANT value))))
(defun INVOKE0 (address)
(case address
((read) (list 89))
((newline) (list 88))
(otherwise (static-wrong "Cannot integrate" address))))
#+nil
(defun INVOKE1 (address)
(case address
((car) (list 90))
((cdr) (list 91))
((pair?) (list 92))
((symbol?) (list 93))
((display) (list 94))
(otherwise (static-wrong "Cannot integrate" address))))
(defun INVOKE1 (address)
(case address
((car) (list 90))
((cdr) (list 91))
((pair?) (list 92))
((symbol?) (list 93))
((display) (list 94))
((primitive?) (list 95))
((null?) (list 96))
((continuation?) (list 97))
((eof-object?) (list 98))
(otherwise (static-wrong "Cannot integrate" address))))
(defun INVOKE2 (address)
(case address
((cons) (list 100))
((eq?) (list 101))
((set-car!) (list 102))
((set-cdr!) (list 103))
((+) (list 104))
((-) (list 105))
((=) (list 106))
((<) (list 107))
((>) (list 108))
((*) (list 109))
((<=) (list 110))
((>=) (list 111))
((remainder)(list 112))
(otherwise (static-wrong "Cannot integrate" address))))
(defun INVOKE3 (address)
(static-wrong "No ternary integrated procedure" address))
(defun ARITY=? (arity+1)
(case arity+1
((1 2 3 4) (list (+ 70 arity+1)))
(otherwise (list 75 arity+1))))
(defun POP-FRAME! (rank)
(case rank
((0 1 2 3) (list (+ 60 rank)))
(otherwise (list 64 rank))))
(defun ALLOCATE-FRAME (size)
(case size
((0 1 2 3 4) (list (+ 50 size)))
(otherwise (list 55 (+ size 1)))))
|
faa8cf35b738003b546914c90b1a4b58a5e0ee975e54c5480137cd1d77d67e4d | facebookarchive/pfff | lib_unparser.mli |
type elt =
| OrigElt of string
| Removed of string
| Added of string
| Esthet2 of (Parse_info.esthet * string)
(* helpers *)
val elts_of_any:
kind_and_info_of_tok:('tok -> Parse_info.token_kind * Parse_info.info) ->
'tok list ->
elt list
(* debugging *)
val vof_elt: elt -> Ocaml.v
(* heuristics *)
val drop_esthet_between_removed: elt list -> elt list
val drop_whole_line_if_only_removed: elt list -> elt list
val debug: bool ref
(* main entry point *)
val string_of_toks_using_transfo:
kind_and_info_of_tok:('tok -> Parse_info.token_kind * Parse_info.info) ->
'tok list ->
string
| null | https://raw.githubusercontent.com/facebookarchive/pfff/ec21095ab7d445559576513a63314e794378c367/matcher/lib_unparser.mli | ocaml | helpers
debugging
heuristics
main entry point |
type elt =
| OrigElt of string
| Removed of string
| Added of string
| Esthet2 of (Parse_info.esthet * string)
val elts_of_any:
kind_and_info_of_tok:('tok -> Parse_info.token_kind * Parse_info.info) ->
'tok list ->
elt list
val vof_elt: elt -> Ocaml.v
val drop_esthet_between_removed: elt list -> elt list
val drop_whole_line_if_only_removed: elt list -> elt list
val debug: bool ref
val string_of_toks_using_transfo:
kind_and_info_of_tok:('tok -> Parse_info.token_kind * Parse_info.info) ->
'tok list ->
string
|
a5b60c6c51ff20a33377bc49d0cfaa17d08f5790d8f6e92c5537941d2e50831f | marick/fp-oo | t_modules.clj | (ns sources.t-modules
(:use midje.sweet))
(load-file "sources/modules.clj")
(fact "Anything"
(let [instance (send-to Anything :new)]
(send-to instance :class-name ) => 'Anything
(send-to instance :class) => Anything)
(send-to Anything :class-name) => 'Klass
(send-to Anything :class) => Klass
(send-to Anything :ancestors) => '[Anything])
(fact "comparison"
(<=> 10 200) => -1
(<=> 200 200) => 0
(<=> 2000 200) => 1)
(fact "Trilobites"
(send-to Trilobite :ancestors) => '[Trilobite Anything]
(let [cyclops (send-to Trilobite :new 1)
panopty (send-to Trilobite :new 1000)]
(send-to cyclops :class-name) => 'Trilobite
(send-to panopty :class) => Trilobite
(send-to cyclops :facets) => 1
(send-to cyclops :<=> panopty) => -1
(send-to cyclops :<=> cyclops) => 0
(send-to panopty :<=> cyclops) => 1))
| null | https://raw.githubusercontent.com/marick/fp-oo/434937826d794d6fe02b3e9a62cf5b4fbc314412/test/sources/t_modules.clj | clojure | (ns sources.t-modules
(:use midje.sweet))
(load-file "sources/modules.clj")
(fact "Anything"
(let [instance (send-to Anything :new)]
(send-to instance :class-name ) => 'Anything
(send-to instance :class) => Anything)
(send-to Anything :class-name) => 'Klass
(send-to Anything :class) => Klass
(send-to Anything :ancestors) => '[Anything])
(fact "comparison"
(<=> 10 200) => -1
(<=> 200 200) => 0
(<=> 2000 200) => 1)
(fact "Trilobites"
(send-to Trilobite :ancestors) => '[Trilobite Anything]
(let [cyclops (send-to Trilobite :new 1)
panopty (send-to Trilobite :new 1000)]
(send-to cyclops :class-name) => 'Trilobite
(send-to panopty :class) => Trilobite
(send-to cyclops :facets) => 1
(send-to cyclops :<=> panopty) => -1
(send-to cyclops :<=> cyclops) => 0
(send-to panopty :<=> cyclops) => 1))
|
|
27af1c613345f6e62933543b76110942c9c63a9327eaf64e6e62f6e20096d846 | metametadata/carry | signals.cljs | (ns app.common.signals
(:require [app.blueprint-methods :refer [on-signal]]
[cljs.core.match :refer-macros [match]]))
(defmethod on-signal nil
[_model signal _dispatch-signal _dispatch-action]
(match signal
:on-start nil
:on-stop nil))
(defmethod on-signal (namespace ::_)
[_model signal _dispatch-signal dispatch-action]
(match signal
::on-home (dispatch-action [:app.common.actions/navigate :home])
::on-settings (dispatch-action [:app.common.actions/navigate :settings]))) | null | https://raw.githubusercontent.com/metametadata/carry/fa5c7cd0d8f1b71edca70330acc97c6245638efb/examples/blueprint-splitting-multimethods-core-match/src/app/common/signals.cljs | clojure | (ns app.common.signals
(:require [app.blueprint-methods :refer [on-signal]]
[cljs.core.match :refer-macros [match]]))
(defmethod on-signal nil
[_model signal _dispatch-signal _dispatch-action]
(match signal
:on-start nil
:on-stop nil))
(defmethod on-signal (namespace ::_)
[_model signal _dispatch-signal dispatch-action]
(match signal
::on-home (dispatch-action [:app.common.actions/navigate :home])
::on-settings (dispatch-action [:app.common.actions/navigate :settings]))) |
|
4f4b1e3a1fb9ec6b0e4f87df573b819d4fddba6969149a90f2cbe41c743211e5 | ghc/packages-dph | PData.hs |
-- | Parallel array data.
--
-- This is an interface onto the internal array types and operators defined
-- by the library, and should not normally be used by client programs.
module Data.Array.Parallel.PArray.PData
( -- * Parallel array types
PArray (..), PData(..), PDatas(..)
, length, takeData
-- * PR (Parallel Representation)
, PR (..)
-- * Extra conversions
, fromListPR
, toListPR
-- * Nested arrays
, module Data.Array.Parallel.PArray.PData.Nested
-- * Tuple arrays
, module Data.Array.Parallel.PArray.PData.Tuple2
, module Data.Array.Parallel.PArray.PData.Tuple3
, module Data.Array.Parallel.PArray.PData.Tuple4
, module Data.Array.Parallel.PArray.PData.Tuple5
, module Data.Array.Parallel.PArray.PData.Tuple6)
where
import Data.Array.Parallel.PArray.PData.Base
import Data.Array.Parallel.PArray.PData.Wrap
import Data.Array.Parallel.PArray.PData.Nested
import Data.Array.Parallel.PArray.PData.Tuple2
import Data.Array.Parallel.PArray.PData.Tuple3
import Data.Array.Parallel.PArray.PData.Tuple4
import Data.Array.Parallel.PArray.PData.Tuple5
import Data.Array.Parallel.PArray.PData.Tuple6
import Data.Array.Parallel.PArray.PData.Tuple7
import Data.Array.Parallel.PArray.PData.Void ()
import Data.Array.Parallel.PArray.PData.Unit ()
import Data.Array.Parallel.PArray.PData.Int ()
import Data.Array.Parallel.PArray.PData.Word8 ()
import Data.Array.Parallel.PArray.PData.Double ()
import Data.Array.Parallel.PArray.PData.Sum2 ()
import Data.Array.Parallel.PArray.PRepr.Instances ()
import qualified Data.Vector as V
import Prelude hiding (length)
-- | Convert a list to a PData.
fromListPR :: PR a => [a] -> PData a
fromListPR = fromVectorPR . V.fromList
-- | Convert a PData to a list.
toListPR :: PR a => PData a -> [a]
toListPR = V.toList . toVectorPR
| null | https://raw.githubusercontent.com/ghc/packages-dph/64eca669f13f4d216af9024474a3fc73ce101793/dph-lifted-vseg/Data/Array/Parallel/PArray/PData.hs | haskell | | Parallel array data.
This is an interface onto the internal array types and operators defined
by the library, and should not normally be used by client programs.
* Parallel array types
* PR (Parallel Representation)
* Extra conversions
* Nested arrays
* Tuple arrays
| Convert a list to a PData.
| Convert a PData to a list. |
module Data.Array.Parallel.PArray.PData
PArray (..), PData(..), PDatas(..)
, length, takeData
, PR (..)
, fromListPR
, toListPR
, module Data.Array.Parallel.PArray.PData.Nested
, module Data.Array.Parallel.PArray.PData.Tuple2
, module Data.Array.Parallel.PArray.PData.Tuple3
, module Data.Array.Parallel.PArray.PData.Tuple4
, module Data.Array.Parallel.PArray.PData.Tuple5
, module Data.Array.Parallel.PArray.PData.Tuple6)
where
import Data.Array.Parallel.PArray.PData.Base
import Data.Array.Parallel.PArray.PData.Wrap
import Data.Array.Parallel.PArray.PData.Nested
import Data.Array.Parallel.PArray.PData.Tuple2
import Data.Array.Parallel.PArray.PData.Tuple3
import Data.Array.Parallel.PArray.PData.Tuple4
import Data.Array.Parallel.PArray.PData.Tuple5
import Data.Array.Parallel.PArray.PData.Tuple6
import Data.Array.Parallel.PArray.PData.Tuple7
import Data.Array.Parallel.PArray.PData.Void ()
import Data.Array.Parallel.PArray.PData.Unit ()
import Data.Array.Parallel.PArray.PData.Int ()
import Data.Array.Parallel.PArray.PData.Word8 ()
import Data.Array.Parallel.PArray.PData.Double ()
import Data.Array.Parallel.PArray.PData.Sum2 ()
import Data.Array.Parallel.PArray.PRepr.Instances ()
import qualified Data.Vector as V
import Prelude hiding (length)
fromListPR :: PR a => [a] -> PData a
fromListPR = fromVectorPR . V.fromList
toListPR :: PR a => PData a -> [a]
toListPR = V.toList . toVectorPR
|
eeff2199c4b2b96c0e453d2d3070df55a15c51077802aa0a3eaf7f6d4557638c | cojna/iota | IntModBench.hs | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE MagicHash #
# LANGUAGE TypeApplications #
# LANGUAGE UnboxedTuples #
module Data.IntModBench (benchMain) where
import Criterion
import Data.GaloisField
import Data.IntMod
import qualified Data.Vector.Unboxed as U
import GHC.Exts
import System.Random.XoRoShiRo
benchMain :: Benchmark
benchMain =
bgroup
"IntMod"
[ bgroup
"(+%)"
[ bench "(+%)" $ whnf (U.foldl' (+%) 0) randoms
, bench "addModGF" $ whnf (U.foldl' addModGF 0) randoms
, bench "addMod1" $ whnf (U.foldl' addMod1 0) randoms
, bench "addMod2" $ whnf (U.foldl' addMod2 0) randoms
, bench "addMod3" $ whnf (U.foldl' addMod3 0) randoms
, bench "addMod4" $ whnf (U.foldl' addMod4 0) randoms
, bench "addMod5" $ whnf (U.foldl' addMod5 0) randoms
]
, bgroup
"(-%)"
[ bench "(-%)" $ whnf (U.foldl' (-%) 0) randoms
, bench "subModGF" $ whnf (U.foldl' subModGF 0) randoms
, bench "subMod1" $ whnf (U.foldl' subMod1 0) randoms
, bench "subMod2" $ whnf (U.foldl' subMod2 0) randoms
, bench "subMod3" $ whnf (U.foldl' subMod3 0) randoms
, bench "subMod4" $ whnf (U.foldl' subMod4 0) randoms
, bench "subMod5" $ whnf (U.foldl' subMod5 0) randoms
, bench "subMod6" $ whnf (U.foldl' subMod6 0) randoms
, bench "subMod7" $ whnf (U.foldl' subMod7 0) randoms
]
, bgroup
"(*%)"
[ bench "(*%)" $ whnf (U.foldl' (*%) 1) randoms
, bench "timesModGF" $ whnf (U.foldl' timesModGF 1) randoms
, bench "timesMod1" $ whnf (U.foldl' timesMod1 1) randoms
, bench "timesMod2" $ whnf (U.foldl' timesMod2 1) randoms
, bench "timesMod3" $ whnf (U.foldl' timesMod3 1) randoms
, bench "timesMod4" $ whnf (U.foldl' timesMod4 1) randoms
, bench "timesMod5" $ whnf (U.foldl' timesMod5 1) randoms
, bench "timesMod6" $ whnf (U.foldl' timesMod6 1) randoms
, bench "timesMod7" $ whnf (U.foldl' timesMod7 1) randoms
]
]
where
n = 10000
randoms :: U.Vector Int
randoms = withRNG $ \rng ->
U.replicateM n (getIntMod . intMod <$> nextInt rng)
#define MOD 1000000007
addModGF :: Int -> Int -> Int
addModGF = coerce ((+) @(GF MOD))
addMod1 :: Int -> Int -> Int
addMod1 (I# x#) (I# y#) = I# ((x# +# y#) `remInt#` MOD#)
addMod2 :: Int -> Int -> Int
addMod2 (I# x#) (I# y#) = case x# +# y# of
r#
| isTrue# (r# <# MOD#) -> I# r#
| otherwise -> I# (r# -# MOD#)
-- current
addMod3 :: Int -> Int -> Int
addMod3 (I# x#) (I# y#) = case x# +# y# of
r# -> I# (r# -# ((r# >=# MOD#) *# MOD#))
addMod4 :: Int -> Int -> Int
addMod4 (I# x#) (I# y#) = case x# +# y# of
r# -> I# (r# -# (MOD# *# (r# >=# MOD#)))
addMod5 :: Int -> Int -> Int
addMod5 (I# x#) (I# y#) = I# (x# +# y# -# (MOD# *# (x# +# y# >=# MOD#)))
subModGF :: Int -> Int -> Int
subModGF = coerce ((-) @(GF MOD))
subMod1 :: Int -> Int -> Int
subMod1 (I# x#) (I# y#) = I# ((x# -# y# +# MOD#) `remInt#` MOD#)
subMod2 :: Int -> Int -> Int
subMod2 (I# x#) (I# y#) = case x# -# y# of
r#
| isTrue# (r# >=# 0#) -> I# r#
| otherwise -> I# (r# +# MOD#)
subMod3 :: Int -> Int -> Int
subMod3 (I# x#) (I# y#)
| isTrue# (x# >=# y#) = I# (x# -# y#)
| otherwise = I# (x# -# y# +# MOD#)
-- current
subMod4 :: Int -> Int -> Int
subMod4 (I# x#) (I# y#) = case x# -# y# of
r# -> I# (r# +# ((r# <# 0#) *# MOD#))
subMod5 :: Int -> Int -> Int
subMod5 (I# x#) (I# y#) = case x# -# y# of
r# -> I# (r# +# (MOD# *# (r# <# 0#)))
subMod6 :: Int -> Int -> Int
subMod6 (I# x#) (I# y#) = I# (x# -# y# +# ((x# <# y#) *# MOD#))
subMod7 :: Int -> Int -> Int
subMod7 (I# x#) (I# y#) = I# (x# -# y# +# (MOD# *# (x# <# y#)))
#define INV_MOD 18446743945
timesModGF :: Int -> Int -> Int
timesModGF = coerce ((*) @(GF MOD))
timesMod1 :: Int -> Int -> Int
timesMod1 (I# x#) (I# y#) = I# (x# *# y# `remInt#` MOD#)
timesMod2 :: Int -> Int -> Int
timesMod2 (I# x#) (I# y#) = case timesWord# (int2Word# x#) (int2Word# y#) of
z# -> case timesWord2# z# INV_MOD## of
(# q#, _ #) -> case minusWord# z# (timesWord# q# MOD##) of
v# -> I# (word2Int# v# +# leWord# MOD## v# *# MOD#)
timesMod3 :: Int -> Int -> Int
timesMod3 (I# x#) (I# y#) = case int2Word# (x# *# y#) of
z# -> case timesWord2# z# INV_MOD## of
(# q#, _ #) -> case minusWord# z# (timesWord# q# MOD##) of
v# -> I# (word2Int# v# +# leWord# MOD## v# *# MOD#)
timesMod4 :: Int -> Int -> Int
timesMod4 (I# x#) (I# y#) = case int2Word# (x# *# y#) of
z# -> case timesWord2# z# INV_MOD## of
(# q#, _ #) -> case minusWord# z# (timesWord# q# MOD##) of
v# -> I# (geWord# v# MOD## *# MOD# +# word2Int# v#)
timesMod5 :: Int -> Int -> Int
timesMod5 (I# x#) (I# y#) = case timesWord# (int2Word# x#) (int2Word# y#) of
z# -> case timesWord2# z# im# of
(# q#, _ #) -> case minusWord# z# (timesWord# q# MOD##) of
v# -> I# (word2Int# v# +# leWord# MOD## v# *# MOD#)
where
im# = plusWord# (quotWord# 0xffffffffffffffff## MOD##) 1##
timesMod6 :: Int -> Int -> Int
timesMod6 (I# x#) (I# y#) = case timesWord# (int2Word# x#) (int2Word# y#) of
z# -> case timesWord2# z# im# of
(# q#, _ #) -> case minusWord# z# (timesWord# q# MOD##) of
v#
| isTrue# (geWord# v# MOD##) -> I# (word2Int# (plusWord# v# MOD##))
| otherwise -> I# (word2Int# v#)
where
im# = plusWord# (quotWord# 0xffffffffffffffff## MOD##) 1##
timesMod7 :: Int -> Int -> Int
timesMod7 (I# x#) (I# y#) = case timesWord# (int2Word# x#) (int2Word# y#) of
z# -> case timesWord2# z# im# of
(# q#, _ #) -> case minusWord# z# (timesWord# q# m#) of
v#
| isTrue# (geWord# v# m#) -> I# (word2Int# (plusWord# v# m#))
| otherwise -> I# (word2Int# v#)
where
m# = int2Word# MOD#
im# = plusWord# (quotWord# 0xffffffffffffffff## m#) 1##
| null | https://raw.githubusercontent.com/cojna/iota/6d2ad5b71b1b50bca9136d6ed84f80a0b7713d7c/benchmark/Data/IntModBench.hs | haskell | current
current | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE MagicHash #
# LANGUAGE TypeApplications #
# LANGUAGE UnboxedTuples #
module Data.IntModBench (benchMain) where
import Criterion
import Data.GaloisField
import Data.IntMod
import qualified Data.Vector.Unboxed as U
import GHC.Exts
import System.Random.XoRoShiRo
benchMain :: Benchmark
benchMain =
bgroup
"IntMod"
[ bgroup
"(+%)"
[ bench "(+%)" $ whnf (U.foldl' (+%) 0) randoms
, bench "addModGF" $ whnf (U.foldl' addModGF 0) randoms
, bench "addMod1" $ whnf (U.foldl' addMod1 0) randoms
, bench "addMod2" $ whnf (U.foldl' addMod2 0) randoms
, bench "addMod3" $ whnf (U.foldl' addMod3 0) randoms
, bench "addMod4" $ whnf (U.foldl' addMod4 0) randoms
, bench "addMod5" $ whnf (U.foldl' addMod5 0) randoms
]
, bgroup
"(-%)"
[ bench "(-%)" $ whnf (U.foldl' (-%) 0) randoms
, bench "subModGF" $ whnf (U.foldl' subModGF 0) randoms
, bench "subMod1" $ whnf (U.foldl' subMod1 0) randoms
, bench "subMod2" $ whnf (U.foldl' subMod2 0) randoms
, bench "subMod3" $ whnf (U.foldl' subMod3 0) randoms
, bench "subMod4" $ whnf (U.foldl' subMod4 0) randoms
, bench "subMod5" $ whnf (U.foldl' subMod5 0) randoms
, bench "subMod6" $ whnf (U.foldl' subMod6 0) randoms
, bench "subMod7" $ whnf (U.foldl' subMod7 0) randoms
]
, bgroup
"(*%)"
[ bench "(*%)" $ whnf (U.foldl' (*%) 1) randoms
, bench "timesModGF" $ whnf (U.foldl' timesModGF 1) randoms
, bench "timesMod1" $ whnf (U.foldl' timesMod1 1) randoms
, bench "timesMod2" $ whnf (U.foldl' timesMod2 1) randoms
, bench "timesMod3" $ whnf (U.foldl' timesMod3 1) randoms
, bench "timesMod4" $ whnf (U.foldl' timesMod4 1) randoms
, bench "timesMod5" $ whnf (U.foldl' timesMod5 1) randoms
, bench "timesMod6" $ whnf (U.foldl' timesMod6 1) randoms
, bench "timesMod7" $ whnf (U.foldl' timesMod7 1) randoms
]
]
where
n = 10000
randoms :: U.Vector Int
randoms = withRNG $ \rng ->
U.replicateM n (getIntMod . intMod <$> nextInt rng)
#define MOD 1000000007
addModGF :: Int -> Int -> Int
addModGF = coerce ((+) @(GF MOD))
addMod1 :: Int -> Int -> Int
addMod1 (I# x#) (I# y#) = I# ((x# +# y#) `remInt#` MOD#)
addMod2 :: Int -> Int -> Int
addMod2 (I# x#) (I# y#) = case x# +# y# of
r#
| isTrue# (r# <# MOD#) -> I# r#
| otherwise -> I# (r# -# MOD#)
addMod3 :: Int -> Int -> Int
addMod3 (I# x#) (I# y#) = case x# +# y# of
r# -> I# (r# -# ((r# >=# MOD#) *# MOD#))
addMod4 :: Int -> Int -> Int
addMod4 (I# x#) (I# y#) = case x# +# y# of
r# -> I# (r# -# (MOD# *# (r# >=# MOD#)))
addMod5 :: Int -> Int -> Int
addMod5 (I# x#) (I# y#) = I# (x# +# y# -# (MOD# *# (x# +# y# >=# MOD#)))
subModGF :: Int -> Int -> Int
subModGF = coerce ((-) @(GF MOD))
subMod1 :: Int -> Int -> Int
subMod1 (I# x#) (I# y#) = I# ((x# -# y# +# MOD#) `remInt#` MOD#)
subMod2 :: Int -> Int -> Int
subMod2 (I# x#) (I# y#) = case x# -# y# of
r#
| isTrue# (r# >=# 0#) -> I# r#
| otherwise -> I# (r# +# MOD#)
subMod3 :: Int -> Int -> Int
subMod3 (I# x#) (I# y#)
| isTrue# (x# >=# y#) = I# (x# -# y#)
| otherwise = I# (x# -# y# +# MOD#)
subMod4 :: Int -> Int -> Int
subMod4 (I# x#) (I# y#) = case x# -# y# of
r# -> I# (r# +# ((r# <# 0#) *# MOD#))
subMod5 :: Int -> Int -> Int
subMod5 (I# x#) (I# y#) = case x# -# y# of
r# -> I# (r# +# (MOD# *# (r# <# 0#)))
subMod6 :: Int -> Int -> Int
subMod6 (I# x#) (I# y#) = I# (x# -# y# +# ((x# <# y#) *# MOD#))
subMod7 :: Int -> Int -> Int
subMod7 (I# x#) (I# y#) = I# (x# -# y# +# (MOD# *# (x# <# y#)))
#define INV_MOD 18446743945
timesModGF :: Int -> Int -> Int
timesModGF = coerce ((*) @(GF MOD))
timesMod1 :: Int -> Int -> Int
timesMod1 (I# x#) (I# y#) = I# (x# *# y# `remInt#` MOD#)
timesMod2 :: Int -> Int -> Int
timesMod2 (I# x#) (I# y#) = case timesWord# (int2Word# x#) (int2Word# y#) of
z# -> case timesWord2# z# INV_MOD## of
(# q#, _ #) -> case minusWord# z# (timesWord# q# MOD##) of
v# -> I# (word2Int# v# +# leWord# MOD## v# *# MOD#)
timesMod3 :: Int -> Int -> Int
timesMod3 (I# x#) (I# y#) = case int2Word# (x# *# y#) of
z# -> case timesWord2# z# INV_MOD## of
(# q#, _ #) -> case minusWord# z# (timesWord# q# MOD##) of
v# -> I# (word2Int# v# +# leWord# MOD## v# *# MOD#)
timesMod4 :: Int -> Int -> Int
timesMod4 (I# x#) (I# y#) = case int2Word# (x# *# y#) of
z# -> case timesWord2# z# INV_MOD## of
(# q#, _ #) -> case minusWord# z# (timesWord# q# MOD##) of
v# -> I# (geWord# v# MOD## *# MOD# +# word2Int# v#)
timesMod5 :: Int -> Int -> Int
timesMod5 (I# x#) (I# y#) = case timesWord# (int2Word# x#) (int2Word# y#) of
z# -> case timesWord2# z# im# of
(# q#, _ #) -> case minusWord# z# (timesWord# q# MOD##) of
v# -> I# (word2Int# v# +# leWord# MOD## v# *# MOD#)
where
im# = plusWord# (quotWord# 0xffffffffffffffff## MOD##) 1##
timesMod6 :: Int -> Int -> Int
timesMod6 (I# x#) (I# y#) = case timesWord# (int2Word# x#) (int2Word# y#) of
z# -> case timesWord2# z# im# of
(# q#, _ #) -> case minusWord# z# (timesWord# q# MOD##) of
v#
| isTrue# (geWord# v# MOD##) -> I# (word2Int# (plusWord# v# MOD##))
| otherwise -> I# (word2Int# v#)
where
im# = plusWord# (quotWord# 0xffffffffffffffff## MOD##) 1##
timesMod7 :: Int -> Int -> Int
timesMod7 (I# x#) (I# y#) = case timesWord# (int2Word# x#) (int2Word# y#) of
z# -> case timesWord2# z# im# of
(# q#, _ #) -> case minusWord# z# (timesWord# q# m#) of
v#
| isTrue# (geWord# v# m#) -> I# (word2Int# (plusWord# v# m#))
| otherwise -> I# (word2Int# v#)
where
m# = int2Word# MOD#
im# = plusWord# (quotWord# 0xffffffffffffffff## m#) 1##
|
bf4f7f094f47439d3f548573e721025e1d9c9e734a99faa94ee66a46416dc5d9 | borodust/bodge-nanovg | x86_64-pc-windows-gnu.lisp | (uiop:define-package :%nanovg (:use))
(uiop:define-package :bodge-nanovg-gl2-bindings~pristine (:use :cl))
(common-lisp:in-package :bodge-nanovg-gl2-bindings~pristine)
(cffi:defbitfield (%nanovg::align :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:74:6"
(:left 1)
(:center 2)
(:right 4)
(:top 8)
(:middle 16)
(:bottom 32)
(:baseline 64))
(cffi:defbitfield (%nanovg::blend-factor :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:86:6"
(:zero 1)
(:one 2)
(:src-color 4)
(:one-minus-src-color 8)
(:dst-color 16)
(:one-minus-dst-color 32)
(:src-alpha 64)
(:one-minus-src-alpha 128)
(:dst-alpha 256)
(:one-minus-dst-alpha 512)
(:src-alpha-saturate 1024))
(cffi:defcenum (%nanovg::composite-operation :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:100:6"
(:source-over 0)
(:source-in 1)
(:source-out 2)
(:atop 3)
(:destination-over 4)
(:destination-in 5)
(:destination-out 6)
(:destination-atop 7)
(:lighter 8)
(:copy 9)
(:xor 10))
(cffi:defbitfield (%nanovg::create-flags :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg_gl.h:27:6"
(:antialias 1)
(:stencil-strokes 2)
(:debug 4))
(cffi:defbitfield (%nanovg::image-flags :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:138:6"
(:generate-mipmaps 1)
(:repeatx 2)
(:repeaty 4)
(:flipy 8)
(:premultiplied 16)
(:nearest 32))
(cffi:defbitfield (%nanovg::image-flags-gl :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg_gl.h:98:6"
(:image-nodelete 65536))
(cffi:defcenum (%nanovg::line-cap :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:66:6"
(:butt 0)
(:round 1)
(:square 2)
(:bevel 3)
(:miter 4))
(cffi:defbitfield (%nanovg::solidity :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:61:6"
(:solid 1)
(:hole 2))
(cffi:defbitfield (%nanovg::texture :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:618:6"
(:alpha 1)
(:rgba 2))
(cffi:defbitfield (%nanovg::winding :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:56:6"
(:ccw 1)
(:cw 2))
(cffi:defcstruct (%nanovg::context :size 0))
(cffi:defctype %nanovg::context (:struct %nanovg::context))
(declaim (inline %nanovg::add-fallback-font))
(cffi:defcfun ("nvgAddFallbackFont" %nanovg::add-fallback-font)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::base-font claw-utils:claw-string)
(%nanovg::fallback-font claw-utils:claw-string))
(declaim (inline %nanovg::add-fallback-font-id))
(cffi:defcfun ("nvgAddFallbackFontId" %nanovg::add-fallback-font-id)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::base-font :int)
(%nanovg::fallback-font :int))
(declaim (inline %nanovg::arc))
(cffi:defcfun ("nvgArc" %nanovg::arc)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::cx :float)
(%nanovg::cy :float)
(%nanovg::r :float)
(%nanovg::a0 :float)
(%nanovg::a1 :float)
(%nanovg::dir :int))
(declaim (inline %nanovg::arc-to))
(cffi:defcfun ("nvgArcTo" %nanovg::arc-to)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x1 :float)
(%nanovg::y1 :float)
(%nanovg::x2 :float)
(%nanovg::y2 :float)
(%nanovg::radius :float))
(declaim (inline %nanovg::begin-frame))
(cffi:defcfun ("nvgBeginFrame" %nanovg::begin-frame)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::window-width :float)
(%nanovg::window-height :float)
(%nanovg::device-pixel-ratio :float))
(declaim (inline %nanovg::begin-path))
(cffi:defcfun ("nvgBeginPath" %nanovg::begin-path)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::bezier-to))
(cffi:defcfun ("nvgBezierTo" %nanovg::bezier-to)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::c1x :float)
(%nanovg::c1y :float)
(%nanovg::c2x :float)
(%nanovg::c2y :float)
(%nanovg::x :float)
(%nanovg::y :float))
(cffi:defcstruct (%nanovg::|C:@S@NV-GCOLOR@UA@SA| :size 16)
(%nanovg::r :float :offset 0)
(%nanovg::g :float :offset 4)
(%nanovg::b :float :offset 8)
(%nanovg::a :float :offset 12))
(cffi:defcunion (%nanovg::|C:@S@NV-GCOLOR@UA| :size 16)
(%nanovg::rgba :float :count 4)
(%claw.anonymous::|0|
(:struct %nanovg::|C:@S@NV-GCOLOR@UA@SA|)))
(cffi:defcstruct (%nanovg::color :size 16)
(%claw.anonymous::|0|
(:union %nanovg::|C:@S@NV-GCOLOR@UA|) :offset 0))
(cffi:defctype %nanovg::color (:struct %nanovg::color))
(cffi:defcstruct (%nanovg::paint :size 76)
(%nanovg::xform :float :count 6 :offset 0)
(%nanovg::extent :float :count 2 :offset 24)
(%nanovg::radius :float :offset 32)
(%nanovg::feather :float :offset 36)
(%nanovg::inner-color %nanovg::color :offset 40)
(%nanovg::outer-color %nanovg::color :offset 56)
(%nanovg::image :int :offset 72))
(cffi:defctype %nanovg::paint (:struct %nanovg::paint))
(declaim (inline %nanovg::box-gradient))
(cffi:defcfun ("__claw_nvgBoxGradient" %nanovg::box-gradient)
(:pointer %nanovg::paint)
(%nanovg::%%claw-result- (:pointer %nanovg::paint))
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::w :float)
(%nanovg::h :float)
(%nanovg::r :float)
(%nanovg::f :float)
(%nanovg::icol (:pointer %nanovg::color))
(%nanovg::ocol (:pointer %nanovg::color)))
(declaim (inline %nanovg::cancel-frame))
(cffi:defcfun ("nvgCancelFrame" %nanovg::cancel-frame)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::circle))
(cffi:defcfun ("nvgCircle" %nanovg::circle)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::cx :float)
(%nanovg::cy :float)
(%nanovg::r :float))
(declaim (inline %nanovg::close-path))
(cffi:defcfun ("nvgClosePath" %nanovg::close-path)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::create-font))
(cffi:defcfun ("nvgCreateFont" %nanovg::create-font)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::name claw-utils:claw-string)
(%nanovg::filename claw-utils:claw-string))
(declaim (inline %nanovg::create-font-mem))
(cffi:defcfun ("nvgCreateFontMem" %nanovg::create-font-mem)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::name claw-utils:claw-string)
(%nanovg::data (:pointer :unsigned-char))
(%nanovg::ndata :int)
(%nanovg::free-data :int))
(declaim (inline %nanovg::create-gl2))
(cffi:defcfun ("nvgCreateGL2" %nanovg::create-gl2)
(:pointer %nanovg::context)
(%nanovg::flags :int))
(declaim (inline %nanovg::create-image))
(cffi:defcfun ("nvgCreateImage" %nanovg::create-image)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::filename claw-utils:claw-string)
(%nanovg::image-flags :int))
(declaim (inline %nanovg::create-image-mem))
(cffi:defcfun ("nvgCreateImageMem" %nanovg::create-image-mem)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::image-flags :int)
(%nanovg::data (:pointer :unsigned-char))
(%nanovg::ndata :int))
(declaim (inline %nanovg::create-image-rgba))
(cffi:defcfun ("nvgCreateImageRGBA" %nanovg::create-image-rgba)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::w :int)
(%nanovg::h :int)
(%nanovg::image-flags :int)
(%nanovg::data (:pointer :unsigned-char)))
(cffi:defcstruct (%nanovg::params :size 112)
(%nanovg::user-ptr (:pointer :void) :offset 0)
(%nanovg::edge-anti-alias :int :offset 8)
(%nanovg::render-create (:pointer :void) :offset 16)
(%nanovg::render-create-texture (:pointer :void)
:offset 24)
(%nanovg::render-delete-texture (:pointer :void)
:offset 32)
(%nanovg::render-update-texture (:pointer :void)
:offset 40)
(%nanovg::render-get-texture-size (:pointer :void)
:offset 48)
(%nanovg::render-viewport (:pointer :void) :offset
56)
(%nanovg::render-cancel (:pointer :void) :offset 64)
(%nanovg::render-flush (:pointer :void) :offset 72)
(%nanovg::render-fill (:pointer :void) :offset 80)
(%nanovg::render-stroke (:pointer :void) :offset 88)
(%nanovg::render-triangles (:pointer :void) :offset
96)
(%nanovg::render-delete (:pointer :void) :offset
104))
(cffi:defctype %nanovg::params (:struct %nanovg::params))
(declaim (inline %nanovg::create-internal))
(cffi:defcfun ("nvgCreateInternal" %nanovg::create-internal)
(:pointer %nanovg::context)
(%nanovg::params (:pointer %nanovg::params)))
(declaim (inline %nanovg::current-transform))
(cffi:defcfun ("nvgCurrentTransform" %nanovg::current-transform)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::xform (:pointer :float)))
(declaim (inline %nanovg::debug-dump-path-cache))
(cffi:defcfun ("nvgDebugDumpPathCache"
%nanovg::debug-dump-path-cache)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::deg-to-rad))
(cffi:defcfun ("nvgDegToRad" %nanovg::deg-to-rad)
:float
(%nanovg::deg :float))
(declaim (inline %nanovg::delete-gl2))
(cffi:defcfun ("nvgDeleteGL2" %nanovg::delete-gl2)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::delete-image))
(cffi:defcfun ("nvgDeleteImage" %nanovg::delete-image)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::image :int))
(declaim (inline %nanovg::delete-internal))
(cffi:defcfun ("nvgDeleteInternal" %nanovg::delete-internal)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::ellipse))
(cffi:defcfun ("nvgEllipse" %nanovg::ellipse)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::cx :float)
(%nanovg::cy :float)
(%nanovg::rx :float)
(%nanovg::ry :float))
(declaim (inline %nanovg::end-frame))
(cffi:defcfun ("nvgEndFrame" %nanovg::end-frame)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::fill))
(cffi:defcfun ("nvgFill" %nanovg::fill)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::fill-color))
(cffi:defcfun ("__claw_nvgFillColor" %nanovg::fill-color)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::color (:pointer %nanovg::color)))
(declaim (inline %nanovg::fill-paint))
(cffi:defcfun ("__claw_nvgFillPaint" %nanovg::fill-paint)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::paint (:pointer %nanovg::paint)))
(declaim (inline %nanovg::find-font))
(cffi:defcfun ("nvgFindFont" %nanovg::find-font)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::name claw-utils:claw-string))
(declaim (inline %nanovg::font-blur))
(cffi:defcfun ("nvgFontBlur" %nanovg::font-blur)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::blur :float))
(declaim (inline %nanovg::font-face))
(cffi:defcfun ("nvgFontFace" %nanovg::font-face)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::font claw-utils:claw-string))
(declaim (inline %nanovg::font-face-id))
(cffi:defcfun ("nvgFontFaceId" %nanovg::font-face-id)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::font :int))
(declaim (inline %nanovg::font-size))
(cffi:defcfun ("nvgFontSize" %nanovg::font-size)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::size :float))
(declaim (inline %nanovg::global-alpha))
(cffi:defcfun ("nvgGlobalAlpha" %nanovg::global-alpha)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::alpha :float))
(declaim (inline %nanovg::global-composite-blend-func))
(cffi:defcfun ("nvgGlobalCompositeBlendFunc"
%nanovg::global-composite-blend-func)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::sfactor :int)
(%nanovg::dfactor :int))
(declaim (inline %nanovg::global-composite-blend-func-separate))
(cffi:defcfun ("nvgGlobalCompositeBlendFuncSeparate"
%nanovg::global-composite-blend-func-separate)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::src-rgb :int)
(%nanovg::dst-rgb :int)
(%nanovg::src-alpha :int)
(%nanovg::dst-alpha :int))
(declaim (inline %nanovg::global-composite-operation))
(cffi:defcfun ("nvgGlobalCompositeOperation"
%nanovg::global-composite-operation)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::op :int))
(declaim (inline %nanovg::hsl))
(cffi:defcfun ("__claw_nvgHSL" %nanovg::hsl)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::h :float)
(%nanovg::s :float)
(%nanovg::l :float))
(declaim (inline %nanovg::hsla))
(cffi:defcfun ("__claw_nvgHSLA" %nanovg::hsla)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::h :float)
(%nanovg::s :float)
(%nanovg::l :float)
(%nanovg::a :unsigned-char))
(declaim (inline %nanovg::image-pattern))
(cffi:defcfun ("__claw_nvgImagePattern" %nanovg::image-pattern)
(:pointer %nanovg::paint)
(%nanovg::%%claw-result- (:pointer %nanovg::paint))
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::ox :float)
(%nanovg::oy :float)
(%nanovg::ex :float)
(%nanovg::ey :float)
(%nanovg::angle :float)
(%nanovg::image :int)
(%nanovg::alpha :float))
(declaim (inline %nanovg::image-size))
(cffi:defcfun ("nvgImageSize" %nanovg::image-size)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::image :int)
(%nanovg::w (:pointer :int))
(%nanovg::h (:pointer :int)))
(declaim (inline %nanovg::internal-params))
(cffi:defcfun ("nvgInternalParams" %nanovg::internal-params)
(:pointer %nanovg::params)
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::intersect-scissor))
(cffi:defcfun ("nvgIntersectScissor" %nanovg::intersect-scissor)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::w :float)
(%nanovg::h :float))
(declaim (inline %nanovg::lerp-rgba))
(cffi:defcfun ("__claw_nvgLerpRGBA" %nanovg::lerp-rgba)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::c0 (:pointer %nanovg::color))
(%nanovg::c1 (:pointer %nanovg::color))
(%nanovg::u :float))
(declaim (inline %nanovg::line-cap))
(cffi:defcfun ("nvgLineCap" %nanovg::line-cap)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::cap :int))
(declaim (inline %nanovg::line-join))
(cffi:defcfun ("nvgLineJoin" %nanovg::line-join)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::join :int))
(declaim (inline %nanovg::line-to))
(cffi:defcfun ("nvgLineTo" %nanovg::line-to)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float))
(declaim (inline %nanovg::linear-gradient))
(cffi:defcfun ("__claw_nvgLinearGradient" %nanovg::linear-gradient)
(:pointer %nanovg::paint)
(%nanovg::%%claw-result- (:pointer %nanovg::paint))
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::sx :float)
(%nanovg::sy :float)
(%nanovg::ex :float)
(%nanovg::ey :float)
(%nanovg::icol (:pointer %nanovg::color))
(%nanovg::ocol (:pointer %nanovg::color)))
(declaim (inline %nanovg::miter-limit))
(cffi:defcfun ("nvgMiterLimit" %nanovg::miter-limit)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::limit :float))
(declaim (inline %nanovg::move-to))
(cffi:defcfun ("nvgMoveTo" %nanovg::move-to)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float))
(declaim (inline %nanovg::path-winding))
(cffi:defcfun ("nvgPathWinding" %nanovg::path-winding)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::dir :int))
(declaim (inline %nanovg::quad-to))
(cffi:defcfun ("nvgQuadTo" %nanovg::quad-to)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::cx :float)
(%nanovg::cy :float)
(%nanovg::x :float)
(%nanovg::y :float))
(declaim (inline %nanovg::rgb))
(cffi:defcfun ("__claw_nvgRGB" %nanovg::rgb)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::r :unsigned-char)
(%nanovg::g :unsigned-char)
(%nanovg::b :unsigned-char))
(declaim (inline %nanovg::rgba))
(cffi:defcfun ("__claw_nvgRGBA" %nanovg::rgba)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::r :unsigned-char)
(%nanovg::g :unsigned-char)
(%nanovg::b :unsigned-char)
(%nanovg::a :unsigned-char))
(declaim (inline %nanovg::rgba-f))
(cffi:defcfun ("__claw_nvgRGBAf" %nanovg::rgba-f)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::r :float)
(%nanovg::g :float)
(%nanovg::b :float)
(%nanovg::a :float))
(declaim (inline %nanovg::rgb-f))
(cffi:defcfun ("__claw_nvgRGBf" %nanovg::rgb-f)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::r :float)
(%nanovg::g :float)
(%nanovg::b :float))
(declaim (inline %nanovg::rad-to-deg))
(cffi:defcfun ("nvgRadToDeg" %nanovg::rad-to-deg)
:float
(%nanovg::rad :float))
(declaim (inline %nanovg::radial-gradient))
(cffi:defcfun ("__claw_nvgRadialGradient" %nanovg::radial-gradient)
(:pointer %nanovg::paint)
(%nanovg::%%claw-result- (:pointer %nanovg::paint))
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::cx :float)
(%nanovg::cy :float)
(%nanovg::inr :float)
(%nanovg::outr :float)
(%nanovg::icol (:pointer %nanovg::color))
(%nanovg::ocol (:pointer %nanovg::color)))
(declaim (inline %nanovg::rect))
(cffi:defcfun ("nvgRect" %nanovg::rect)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::w :float)
(%nanovg::h :float))
(declaim (inline %nanovg::reset))
(cffi:defcfun ("nvgReset" %nanovg::reset)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::reset-scissor))
(cffi:defcfun ("nvgResetScissor" %nanovg::reset-scissor)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::reset-transform))
(cffi:defcfun ("nvgResetTransform" %nanovg::reset-transform)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::restore))
(cffi:defcfun ("nvgRestore" %nanovg::restore)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::rotate))
(cffi:defcfun ("nvgRotate" %nanovg::rotate)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::angle :float))
(declaim (inline %nanovg::rounded-rect))
(cffi:defcfun ("nvgRoundedRect" %nanovg::rounded-rect)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::w :float)
(%nanovg::h :float)
(%nanovg::r :float))
(declaim (inline %nanovg::rounded-rect-varying))
(cffi:defcfun ("nvgRoundedRectVarying" %nanovg::rounded-rect-varying)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::w :float)
(%nanovg::h :float)
(%nanovg::rad-top-left :float)
(%nanovg::rad-top-right :float)
(%nanovg::rad-bottom-right :float)
(%nanovg::rad-bottom-left :float))
(declaim (inline %nanovg::save))
(cffi:defcfun ("nvgSave" %nanovg::save)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::scale))
(cffi:defcfun ("nvgScale" %nanovg::scale)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float))
(declaim (inline %nanovg::scissor))
(cffi:defcfun ("nvgScissor" %nanovg::scissor)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::w :float)
(%nanovg::h :float))
(declaim (inline %nanovg::shape-anti-alias))
(cffi:defcfun ("nvgShapeAntiAlias" %nanovg::shape-anti-alias)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::enabled :int))
(declaim (inline %nanovg::skew-x))
(cffi:defcfun ("nvgSkewX" %nanovg::skew-x)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::angle :float))
(declaim (inline %nanovg::skew-y))
(cffi:defcfun ("nvgSkewY" %nanovg::skew-y)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::angle :float))
(declaim (inline %nanovg::stroke))
(cffi:defcfun ("nvgStroke" %nanovg::stroke)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::stroke-color))
(cffi:defcfun ("__claw_nvgStrokeColor" %nanovg::stroke-color)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::color (:pointer %nanovg::color)))
(declaim (inline %nanovg::stroke-paint))
(cffi:defcfun ("__claw_nvgStrokePaint" %nanovg::stroke-paint)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::paint (:pointer %nanovg::paint)))
(declaim (inline %nanovg::stroke-width))
(cffi:defcfun ("nvgStrokeWidth" %nanovg::stroke-width)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::size :float))
(declaim (inline %nanovg::text))
(cffi:defcfun ("nvgText" %nanovg::text)
:float
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::string claw-utils:claw-string)
(%nanovg::end claw-utils:claw-string))
(declaim (inline %nanovg::text-align))
(cffi:defcfun ("nvgTextAlign" %nanovg::text-align)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::align :int))
(declaim (inline %nanovg::text-bounds))
(cffi:defcfun ("nvgTextBounds" %nanovg::text-bounds)
:float
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::string claw-utils:claw-string)
(%nanovg::end claw-utils:claw-string)
(%nanovg::bounds (:pointer :float)))
(declaim (inline %nanovg::text-box))
(cffi:defcfun ("nvgTextBox" %nanovg::text-box)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::break-row-width :float)
(%nanovg::string claw-utils:claw-string)
(%nanovg::end claw-utils:claw-string))
(declaim (inline %nanovg::text-box-bounds))
(cffi:defcfun ("nvgTextBoxBounds" %nanovg::text-box-bounds)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::break-row-width :float)
(%nanovg::string claw-utils:claw-string)
(%nanovg::end claw-utils:claw-string)
(%nanovg::bounds (:pointer :float)))
(cffi:defcstruct (%nanovg::text-row :size 40)
(%nanovg::start claw-utils:claw-string :offset 0)
(%nanovg::end claw-utils:claw-string :offset 8)
(%nanovg::next claw-utils:claw-string :offset 16)
(%nanovg::width :float :offset 24)
(%nanovg::minx :float :offset 28)
(%nanovg::maxx :float :offset 32))
(cffi:defctype %nanovg::text-row (:struct %nanovg::text-row))
(declaim (inline %nanovg::text-break-lines))
(cffi:defcfun ("nvgTextBreakLines" %nanovg::text-break-lines)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::string claw-utils:claw-string)
(%nanovg::end claw-utils:claw-string)
(%nanovg::break-row-width :float)
(%nanovg::rows (:pointer %nanovg::text-row))
(%nanovg::max-rows :int))
(cffi:defcstruct (%nanovg::glyph-position :size 24)
(%nanovg::str claw-utils:claw-string :offset 0)
(%nanovg::x :float :offset 8)
(%nanovg::minx :float :offset 12)
(%nanovg::maxx :float :offset 16))
(cffi:defctype %nanovg::glyph-position
(:struct %nanovg::glyph-position))
(declaim (inline %nanovg::text-glyph-positions))
(cffi:defcfun ("nvgTextGlyphPositions" %nanovg::text-glyph-positions)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::string claw-utils:claw-string)
(%nanovg::end claw-utils:claw-string)
(%nanovg::positions (:pointer %nanovg::glyph-position))
(%nanovg::max-positions :int))
(declaim (inline %nanovg::text-letter-spacing))
(cffi:defcfun ("nvgTextLetterSpacing" %nanovg::text-letter-spacing)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::spacing :float))
(declaim (inline %nanovg::text-line-height))
(cffi:defcfun ("nvgTextLineHeight" %nanovg::text-line-height)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::line-height :float))
(declaim (inline %nanovg::text-metrics))
(cffi:defcfun ("nvgTextMetrics" %nanovg::text-metrics)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::ascender (:pointer :float))
(%nanovg::descender (:pointer :float))
(%nanovg::lineh (:pointer :float)))
(declaim (inline %nanovg::trans-rgba))
(cffi:defcfun ("__claw_nvgTransRGBA" %nanovg::trans-rgba)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::c0 (:pointer %nanovg::color))
(%nanovg::a :unsigned-char))
(declaim (inline %nanovg::trans-rgb-af))
(cffi:defcfun ("__claw_nvgTransRGBAf" %nanovg::trans-rgb-af)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::c0 (:pointer %nanovg::color))
(%nanovg::a :float))
(declaim (inline %nanovg::transform))
(cffi:defcfun ("nvgTransform" %nanovg::transform)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::a :float)
(%nanovg::b :float)
(%nanovg::c :float)
(%nanovg::d :float)
(%nanovg::e :float)
(%nanovg::f :float))
(declaim (inline %nanovg::transform-identity))
(cffi:defcfun ("nvgTransformIdentity" %nanovg::transform-identity)
:void
(%nanovg::dst (:pointer :float)))
(declaim (inline %nanovg::transform-inverse))
(cffi:defcfun ("nvgTransformInverse" %nanovg::transform-inverse)
:int
(%nanovg::dst (:pointer :float))
(%nanovg::src (:pointer :float)))
(declaim (inline %nanovg::transform-multiply))
(cffi:defcfun ("nvgTransformMultiply" %nanovg::transform-multiply)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::src (:pointer :float)))
(declaim (inline %nanovg::transform-point))
(cffi:defcfun ("nvgTransformPoint" %nanovg::transform-point)
:void
(%nanovg::dstx (:pointer :float))
(%nanovg::dsty (:pointer :float))
(%nanovg::xform (:pointer :float))
(%nanovg::srcx :float)
(%nanovg::srcy :float))
(declaim (inline %nanovg::transform-premultiply))
(cffi:defcfun ("nvgTransformPremultiply"
%nanovg::transform-premultiply)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::src (:pointer :float)))
(declaim (inline %nanovg::transform-rotate))
(cffi:defcfun ("nvgTransformRotate" %nanovg::transform-rotate)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::a :float))
(declaim (inline %nanovg::transform-scale))
(cffi:defcfun ("nvgTransformScale" %nanovg::transform-scale)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::sx :float)
(%nanovg::sy :float))
(declaim (inline %nanovg::transform-skew-x))
(cffi:defcfun ("nvgTransformSkewX" %nanovg::transform-skew-x)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::a :float))
(declaim (inline %nanovg::transform-skew-y))
(cffi:defcfun ("nvgTransformSkewY" %nanovg::transform-skew-y)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::a :float))
(declaim (inline %nanovg::transform-translate))
(cffi:defcfun ("nvgTransformTranslate" %nanovg::transform-translate)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::tx :float)
(%nanovg::ty :float))
(declaim (inline %nanovg::translate))
(cffi:defcfun ("nvgTranslate" %nanovg::translate)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float))
(declaim (inline %nanovg::update-image))
(cffi:defcfun ("nvgUpdateImage" %nanovg::update-image)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::image :int)
(%nanovg::data (:pointer :unsigned-char)))
(cffi:defctype %nanovg::g-luint :unsigned-int)
(declaim (inline %nanovg::create-image-from-handle-gl2))
(cffi:defcfun ("nvglCreateImageFromHandleGL2"
%nanovg::create-image-from-handle-gl2)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::texture-id %nanovg::g-luint)
(%nanovg::w :int)
(%nanovg::h :int)
(%nanovg::flags :int))
(declaim (inline %nanovg::image-handle-gl2))
(cffi:defcfun ("nvglImageHandleGL2" %nanovg::image-handle-gl2)
%nanovg::g-luint
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::image :int))
(cffi:defcstruct (%nanovg::composite-operation-state :size 16)
(%nanovg::src-rgb :int :offset 0)
(%nanovg::dst-rgb :int :offset 4)
(%nanovg::src-alpha :int :offset 8)
(%nanovg::dst-alpha :int :offset 12))
(cffi:defcstruct (%nanovg::vertex :size 16)
(%nanovg::x :float :offset 0)
(%nanovg::y :float :offset 4)
(%nanovg::u :float :offset 8)
(%nanovg::v :float :offset 12))
(cffi:defctype %nanovg::vertex (:struct %nanovg::vertex))
(cffi:defcstruct (%nanovg::path :size 56)
(%nanovg::first :int :offset 0)
(%nanovg::count :int :offset 4)
(%nanovg::closed :unsigned-char :offset 8)
(%nanovg::nbevel :int :offset 12)
(%nanovg::fill (:pointer %nanovg::vertex) :offset
16)
(%nanovg::nfill :int :offset 24)
(%nanovg::stroke (:pointer %nanovg::vertex) :offset
32)
(%nanovg::nstroke :int :offset 40)
(%nanovg::winding :int :offset 44)
(%nanovg::convex :int :offset 48))
(cffi:defcstruct (%nanovg::scissor :size 32)
(%nanovg::xform :float :count 6 :offset 0)
(%nanovg::extent :float :count 2 :offset 24))
(cffi:defctype %nanovg::composite-operation-state
(:struct %nanovg::composite-operation-state))
(cffi:defctype %nanovg::path (:struct %nanovg::path))
(cffi:defctype %nanovg::scissor (:struct %nanovg::scissor))
(eval-when (:load-toplevel :compile-toplevel :execute)
(export '%nanovg::nstroke :%nanovg)
(export '%nanovg::glyph-position :%nanovg)
(export '%nanovg::render-fill :%nanovg)
(export '%nanovg::texture :%nanovg)
(export '%nanovg::rgba-f :%nanovg)
(export '%nanovg::line-cap :%nanovg)
(export '%nanovg::rgb-f :%nanovg)
(export '%nanovg::composite-operation :%nanovg)
(export '%nanovg::font-face-id :%nanovg)
(export '%nanovg::rounded-rect-varying :%nanovg)
(export '%nanovg::miter-limit :%nanovg)
(export '%nanovg::reset-transform :%nanovg)
(export '%nanovg::stroke-color :%nanovg)
(export '%nanovg::end-frame :%nanovg)
(export '%nanovg::transform-premultiply :%nanovg)
(export '%nanovg::skew-y :%nanovg)
(export '%nanovg::arc-to :%nanovg)
(export '%nanovg::radius :%nanovg)
(export '%nanovg::path-winding :%nanovg)
(export '%nanovg::transform :%nanovg)
(export '%nanovg::create-internal :%nanovg)
(export '%nanovg::transform-inverse :%nanovg)
(export '%nanovg::end :%nanovg)
(export '%claw.anonymous::|0| nil)
(export '%nanovg::winding :%nanovg)
(export '%nanovg::box-gradient :%nanovg)
(export '%nanovg::transform-identity :%nanovg)
(export '%nanovg::add-fallback-font :%nanovg)
(export '%nanovg::skew-x :%nanovg)
(export '%nanovg::str :%nanovg)
(export '%nanovg::solidity :%nanovg)
(export '%nanovg::line-join :%nanovg)
(export '%nanovg::reset :%nanovg)
(export '%nanovg::internal-params :%nanovg)
(export '%nanovg::render-create :%nanovg)
(export '%nanovg::dst-alpha :%nanovg)
(export '%nanovg::rgba :%nanovg)
(export '%nanovg::trans-rgb-af :%nanovg)
(export '%nanovg::update-image :%nanovg)
(export '%nanovg::create-image-mem :%nanovg)
(export '%nanovg::ellipse :%nanovg)
(export '%nanovg::trans-rgba :%nanovg)
(export '%nanovg::|C:@S@NV-GCOLOR@UA| :%nanovg)
(export '%nanovg::font-face :%nanovg)
(export '%nanovg::text-metrics :%nanovg)
(export '%nanovg::nfill :%nanovg)
(export '%nanovg::edge-anti-alias :%nanovg)
(export '%nanovg::begin-frame :%nanovg)
(export '%nanovg::path :%nanovg)
(export '%nanovg::text-break-lines :%nanovg)
(export '%nanovg::image-handle-gl2 :%nanovg)
(export '%nanovg::y :%nanovg)
(export '%nanovg::shape-anti-alias :%nanovg)
(export '%nanovg::create-font :%nanovg)
(export '%nanovg::global-composite-blend-func :%nanovg)
(export '%nanovg::maxx :%nanovg)
(export '%nanovg::font-size :%nanovg)
(export '%nanovg::fill-color :%nanovg)
(export '%nanovg::create-gl2 :%nanovg)
(export '%nanovg::stroke :%nanovg)
(export '%nanovg::render-update-texture :%nanovg)
(export '%nanovg::render-get-texture-size :%nanovg)
(export '%nanovg::text-box-bounds :%nanovg)
(export '%nanovg::render-cancel :%nanovg)
(export '%nanovg::render-stroke :%nanovg)
(export '%nanovg::next :%nanovg)
(export '%nanovg::save :%nanovg)
(export '%nanovg::text-line-height :%nanovg)
(export '%nanovg::outer-color :%nanovg)
(export '%nanovg::current-transform :%nanovg)
(export '%nanovg::image-size :%nanovg)
(export '%nanovg::create-image-from-handle-gl2 :%nanovg)
(export '%nanovg::convex :%nanovg)
(export '%nanovg::xform :%nanovg)
(export '%nanovg::radial-gradient :%nanovg)
(export '%nanovg::delete-internal :%nanovg)
(export '%nanovg::create-image-rgba :%nanovg)
(export '%nanovg::quad-to :%nanovg)
(export '%nanovg::transform-point :%nanovg)
(export '%nanovg::scale :%nanovg)
(export '%nanovg::paint :%nanovg)
(export '%nanovg::x :%nanovg)
(export '%nanovg::delete-gl2 :%nanovg)
(export '%nanovg::text-align :%nanovg)
(export '%nanovg::g :%nanovg)
(export '%nanovg::text-glyph-positions :%nanovg)
(export '%nanovg::transform-multiply :%nanovg)
(export '%nanovg::add-fallback-font-id :%nanovg)
(export '%nanovg::close-path :%nanovg)
(export '%nanovg::debug-dump-path-cache :%nanovg)
(export '%nanovg::transform-skew-y :%nanovg)
(export '%nanovg::width :%nanovg)
(export '%nanovg::render-triangles :%nanovg)
(export '%nanovg::stroke-width :%nanovg)
(export '%nanovg::intersect-scissor :%nanovg)
(export '%nanovg::minx :%nanovg)
(export '%nanovg::restore :%nanovg)
(export '%nanovg::r :%nanovg)
(export '%nanovg::a :%nanovg)
(export '%nanovg::text :%nanovg)
(export '%nanovg::translate :%nanovg)
(export '%nanovg::inner-color :%nanovg)
(export '%nanovg::image-pattern :%nanovg)
(export '%nanovg::first :%nanovg)
(export '%nanovg::cancel-frame :%nanovg)
(export '%nanovg::find-font :%nanovg)
(export '%nanovg::scissor :%nanovg)
(export '%nanovg::move-to :%nanovg)
(export '%nanovg::text-box :%nanovg)
(export '%nanovg::circle :%nanovg)
(export '%nanovg::text-bounds :%nanovg)
(export '%nanovg::create-flags :%nanovg)
(export '%nanovg::transform-translate :%nanovg)
(export '%nanovg::arc :%nanovg)
(export '%nanovg::rounded-rect :%nanovg)
(export '%nanovg::global-composite-operation :%nanovg)
(export '%nanovg::deg-to-rad :%nanovg)
(export '%nanovg::|C:@S@NV-GCOLOR@UA@SA| :%nanovg)
(export '%nanovg::lerp-rgba :%nanovg)
(export '%nanovg::line-to :%nanovg)
(export '%nanovg::render-delete :%nanovg)
(export '%nanovg::blend-factor :%nanovg)
(export '%nanovg::rotate :%nanovg)
(export '%nanovg::b :%nanovg)
(export '%nanovg::rgb :%nanovg)
(export '%nanovg::rect :%nanovg)
(export '%nanovg::user-ptr :%nanovg)
(export '%nanovg::linear-gradient :%nanovg)
(export '%nanovg::begin-path :%nanovg)
(export '%nanovg::delete-image :%nanovg)
(export '%nanovg::src-alpha :%nanovg)
(export '%nanovg::font-blur :%nanovg)
(export '%nanovg::extent :%nanovg)
(export '%nanovg::nbevel :%nanovg)
(export '%nanovg::stroke-paint :%nanovg)
(export '%nanovg::global-composite-blend-func-separate :%nanovg)
(export '%nanovg::image-flags-gl :%nanovg)
(export '%nanovg::color :%nanovg)
(export '%nanovg::vertex :%nanovg)
(export '%nanovg::transform-rotate :%nanovg)
(export '%nanovg::image :%nanovg)
(export '%nanovg::rad-to-deg :%nanovg)
(export '%nanovg::u :%nanovg)
(export '%nanovg::render-viewport :%nanovg)
(export '%nanovg::fill :%nanovg)
(export '%nanovg::count :%nanovg)
(export '%nanovg::render-delete-texture :%nanovg)
(export '%nanovg::create-image :%nanovg)
(export '%nanovg::transform-skew-x :%nanovg)
(export '%nanovg::image-flags :%nanovg)
(export '%nanovg::text-row :%nanovg)
(export '%nanovg::text-letter-spacing :%nanovg)
(export '%nanovg::bezier-to :%nanovg)
(export '%nanovg::start :%nanovg)
(export '%nanovg::context :%nanovg)
(export '%nanovg::render-create-texture :%nanovg)
(export '%nanovg::feather :%nanovg)
(export '%nanovg::g-luint :%nanovg)
(export '%nanovg::composite-operation-state :%nanovg)
(export '%nanovg::fill-paint :%nanovg)
(export '%nanovg::render-flush :%nanovg)
(export '%nanovg::transform-scale :%nanovg)
(export '%nanovg::src-rgb :%nanovg)
(export '%nanovg::reset-scissor :%nanovg)
(export '%nanovg::global-alpha :%nanovg)
(export '%nanovg::hsl :%nanovg)
(export '%nanovg::align :%nanovg)
(export '%nanovg::dst-rgb :%nanovg)
(export '%nanovg::create-font-mem :%nanovg)
(export '%nanovg::closed :%nanovg)
(export '%nanovg::v :%nanovg)
(export '%nanovg::hsla :%nanovg)
(export '%nanovg::params :%nanovg))
| null | https://raw.githubusercontent.com/borodust/bodge-nanovg/57b72625979dfb5575720eea6ad46e0bb55f71e2/bindings/gl2/x86_64-pc-windows-gnu.lisp | lisp | (uiop:define-package :%nanovg (:use))
(uiop:define-package :bodge-nanovg-gl2-bindings~pristine (:use :cl))
(common-lisp:in-package :bodge-nanovg-gl2-bindings~pristine)
(cffi:defbitfield (%nanovg::align :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:74:6"
(:left 1)
(:center 2)
(:right 4)
(:top 8)
(:middle 16)
(:bottom 32)
(:baseline 64))
(cffi:defbitfield (%nanovg::blend-factor :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:86:6"
(:zero 1)
(:one 2)
(:src-color 4)
(:one-minus-src-color 8)
(:dst-color 16)
(:one-minus-dst-color 32)
(:src-alpha 64)
(:one-minus-src-alpha 128)
(:dst-alpha 256)
(:one-minus-dst-alpha 512)
(:src-alpha-saturate 1024))
(cffi:defcenum (%nanovg::composite-operation :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:100:6"
(:source-over 0)
(:source-in 1)
(:source-out 2)
(:atop 3)
(:destination-over 4)
(:destination-in 5)
(:destination-out 6)
(:destination-atop 7)
(:lighter 8)
(:copy 9)
(:xor 10))
(cffi:defbitfield (%nanovg::create-flags :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg_gl.h:27:6"
(:antialias 1)
(:stencil-strokes 2)
(:debug 4))
(cffi:defbitfield (%nanovg::image-flags :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:138:6"
(:generate-mipmaps 1)
(:repeatx 2)
(:repeaty 4)
(:flipy 8)
(:premultiplied 16)
(:nearest 32))
(cffi:defbitfield (%nanovg::image-flags-gl :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg_gl.h:98:6"
(:image-nodelete 65536))
(cffi:defcenum (%nanovg::line-cap :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:66:6"
(:butt 0)
(:round 1)
(:square 2)
(:bevel 3)
(:miter 4))
(cffi:defbitfield (%nanovg::solidity :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:61:6"
(:solid 1)
(:hole 2))
(cffi:defbitfield (%nanovg::texture :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:618:6"
(:alpha 1)
(:rgba 2))
(cffi:defbitfield (%nanovg::winding :unsigned-int)
"/home/borodust/devel/repo/bodge-projects/bodge-nanovg/src/lib/nanovg/src/nanovg.h:56:6"
(:ccw 1)
(:cw 2))
(cffi:defcstruct (%nanovg::context :size 0))
(cffi:defctype %nanovg::context (:struct %nanovg::context))
(declaim (inline %nanovg::add-fallback-font))
(cffi:defcfun ("nvgAddFallbackFont" %nanovg::add-fallback-font)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::base-font claw-utils:claw-string)
(%nanovg::fallback-font claw-utils:claw-string))
(declaim (inline %nanovg::add-fallback-font-id))
(cffi:defcfun ("nvgAddFallbackFontId" %nanovg::add-fallback-font-id)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::base-font :int)
(%nanovg::fallback-font :int))
(declaim (inline %nanovg::arc))
(cffi:defcfun ("nvgArc" %nanovg::arc)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::cx :float)
(%nanovg::cy :float)
(%nanovg::r :float)
(%nanovg::a0 :float)
(%nanovg::a1 :float)
(%nanovg::dir :int))
(declaim (inline %nanovg::arc-to))
(cffi:defcfun ("nvgArcTo" %nanovg::arc-to)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x1 :float)
(%nanovg::y1 :float)
(%nanovg::x2 :float)
(%nanovg::y2 :float)
(%nanovg::radius :float))
(declaim (inline %nanovg::begin-frame))
(cffi:defcfun ("nvgBeginFrame" %nanovg::begin-frame)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::window-width :float)
(%nanovg::window-height :float)
(%nanovg::device-pixel-ratio :float))
(declaim (inline %nanovg::begin-path))
(cffi:defcfun ("nvgBeginPath" %nanovg::begin-path)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::bezier-to))
(cffi:defcfun ("nvgBezierTo" %nanovg::bezier-to)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::c1x :float)
(%nanovg::c1y :float)
(%nanovg::c2x :float)
(%nanovg::c2y :float)
(%nanovg::x :float)
(%nanovg::y :float))
(cffi:defcstruct (%nanovg::|C:@S@NV-GCOLOR@UA@SA| :size 16)
(%nanovg::r :float :offset 0)
(%nanovg::g :float :offset 4)
(%nanovg::b :float :offset 8)
(%nanovg::a :float :offset 12))
(cffi:defcunion (%nanovg::|C:@S@NV-GCOLOR@UA| :size 16)
(%nanovg::rgba :float :count 4)
(%claw.anonymous::|0|
(:struct %nanovg::|C:@S@NV-GCOLOR@UA@SA|)))
(cffi:defcstruct (%nanovg::color :size 16)
(%claw.anonymous::|0|
(:union %nanovg::|C:@S@NV-GCOLOR@UA|) :offset 0))
(cffi:defctype %nanovg::color (:struct %nanovg::color))
(cffi:defcstruct (%nanovg::paint :size 76)
(%nanovg::xform :float :count 6 :offset 0)
(%nanovg::extent :float :count 2 :offset 24)
(%nanovg::radius :float :offset 32)
(%nanovg::feather :float :offset 36)
(%nanovg::inner-color %nanovg::color :offset 40)
(%nanovg::outer-color %nanovg::color :offset 56)
(%nanovg::image :int :offset 72))
(cffi:defctype %nanovg::paint (:struct %nanovg::paint))
(declaim (inline %nanovg::box-gradient))
(cffi:defcfun ("__claw_nvgBoxGradient" %nanovg::box-gradient)
(:pointer %nanovg::paint)
(%nanovg::%%claw-result- (:pointer %nanovg::paint))
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::w :float)
(%nanovg::h :float)
(%nanovg::r :float)
(%nanovg::f :float)
(%nanovg::icol (:pointer %nanovg::color))
(%nanovg::ocol (:pointer %nanovg::color)))
(declaim (inline %nanovg::cancel-frame))
(cffi:defcfun ("nvgCancelFrame" %nanovg::cancel-frame)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::circle))
(cffi:defcfun ("nvgCircle" %nanovg::circle)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::cx :float)
(%nanovg::cy :float)
(%nanovg::r :float))
(declaim (inline %nanovg::close-path))
(cffi:defcfun ("nvgClosePath" %nanovg::close-path)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::create-font))
(cffi:defcfun ("nvgCreateFont" %nanovg::create-font)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::name claw-utils:claw-string)
(%nanovg::filename claw-utils:claw-string))
(declaim (inline %nanovg::create-font-mem))
(cffi:defcfun ("nvgCreateFontMem" %nanovg::create-font-mem)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::name claw-utils:claw-string)
(%nanovg::data (:pointer :unsigned-char))
(%nanovg::ndata :int)
(%nanovg::free-data :int))
(declaim (inline %nanovg::create-gl2))
(cffi:defcfun ("nvgCreateGL2" %nanovg::create-gl2)
(:pointer %nanovg::context)
(%nanovg::flags :int))
(declaim (inline %nanovg::create-image))
(cffi:defcfun ("nvgCreateImage" %nanovg::create-image)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::filename claw-utils:claw-string)
(%nanovg::image-flags :int))
(declaim (inline %nanovg::create-image-mem))
(cffi:defcfun ("nvgCreateImageMem" %nanovg::create-image-mem)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::image-flags :int)
(%nanovg::data (:pointer :unsigned-char))
(%nanovg::ndata :int))
(declaim (inline %nanovg::create-image-rgba))
(cffi:defcfun ("nvgCreateImageRGBA" %nanovg::create-image-rgba)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::w :int)
(%nanovg::h :int)
(%nanovg::image-flags :int)
(%nanovg::data (:pointer :unsigned-char)))
(cffi:defcstruct (%nanovg::params :size 112)
(%nanovg::user-ptr (:pointer :void) :offset 0)
(%nanovg::edge-anti-alias :int :offset 8)
(%nanovg::render-create (:pointer :void) :offset 16)
(%nanovg::render-create-texture (:pointer :void)
:offset 24)
(%nanovg::render-delete-texture (:pointer :void)
:offset 32)
(%nanovg::render-update-texture (:pointer :void)
:offset 40)
(%nanovg::render-get-texture-size (:pointer :void)
:offset 48)
(%nanovg::render-viewport (:pointer :void) :offset
56)
(%nanovg::render-cancel (:pointer :void) :offset 64)
(%nanovg::render-flush (:pointer :void) :offset 72)
(%nanovg::render-fill (:pointer :void) :offset 80)
(%nanovg::render-stroke (:pointer :void) :offset 88)
(%nanovg::render-triangles (:pointer :void) :offset
96)
(%nanovg::render-delete (:pointer :void) :offset
104))
(cffi:defctype %nanovg::params (:struct %nanovg::params))
(declaim (inline %nanovg::create-internal))
(cffi:defcfun ("nvgCreateInternal" %nanovg::create-internal)
(:pointer %nanovg::context)
(%nanovg::params (:pointer %nanovg::params)))
(declaim (inline %nanovg::current-transform))
(cffi:defcfun ("nvgCurrentTransform" %nanovg::current-transform)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::xform (:pointer :float)))
(declaim (inline %nanovg::debug-dump-path-cache))
(cffi:defcfun ("nvgDebugDumpPathCache"
%nanovg::debug-dump-path-cache)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::deg-to-rad))
(cffi:defcfun ("nvgDegToRad" %nanovg::deg-to-rad)
:float
(%nanovg::deg :float))
(declaim (inline %nanovg::delete-gl2))
(cffi:defcfun ("nvgDeleteGL2" %nanovg::delete-gl2)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::delete-image))
(cffi:defcfun ("nvgDeleteImage" %nanovg::delete-image)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::image :int))
(declaim (inline %nanovg::delete-internal))
(cffi:defcfun ("nvgDeleteInternal" %nanovg::delete-internal)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::ellipse))
(cffi:defcfun ("nvgEllipse" %nanovg::ellipse)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::cx :float)
(%nanovg::cy :float)
(%nanovg::rx :float)
(%nanovg::ry :float))
(declaim (inline %nanovg::end-frame))
(cffi:defcfun ("nvgEndFrame" %nanovg::end-frame)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::fill))
(cffi:defcfun ("nvgFill" %nanovg::fill)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::fill-color))
(cffi:defcfun ("__claw_nvgFillColor" %nanovg::fill-color)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::color (:pointer %nanovg::color)))
(declaim (inline %nanovg::fill-paint))
(cffi:defcfun ("__claw_nvgFillPaint" %nanovg::fill-paint)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::paint (:pointer %nanovg::paint)))
(declaim (inline %nanovg::find-font))
(cffi:defcfun ("nvgFindFont" %nanovg::find-font)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::name claw-utils:claw-string))
(declaim (inline %nanovg::font-blur))
(cffi:defcfun ("nvgFontBlur" %nanovg::font-blur)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::blur :float))
(declaim (inline %nanovg::font-face))
(cffi:defcfun ("nvgFontFace" %nanovg::font-face)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::font claw-utils:claw-string))
(declaim (inline %nanovg::font-face-id))
(cffi:defcfun ("nvgFontFaceId" %nanovg::font-face-id)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::font :int))
(declaim (inline %nanovg::font-size))
(cffi:defcfun ("nvgFontSize" %nanovg::font-size)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::size :float))
(declaim (inline %nanovg::global-alpha))
(cffi:defcfun ("nvgGlobalAlpha" %nanovg::global-alpha)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::alpha :float))
(declaim (inline %nanovg::global-composite-blend-func))
(cffi:defcfun ("nvgGlobalCompositeBlendFunc"
%nanovg::global-composite-blend-func)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::sfactor :int)
(%nanovg::dfactor :int))
(declaim (inline %nanovg::global-composite-blend-func-separate))
(cffi:defcfun ("nvgGlobalCompositeBlendFuncSeparate"
%nanovg::global-composite-blend-func-separate)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::src-rgb :int)
(%nanovg::dst-rgb :int)
(%nanovg::src-alpha :int)
(%nanovg::dst-alpha :int))
(declaim (inline %nanovg::global-composite-operation))
(cffi:defcfun ("nvgGlobalCompositeOperation"
%nanovg::global-composite-operation)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::op :int))
(declaim (inline %nanovg::hsl))
(cffi:defcfun ("__claw_nvgHSL" %nanovg::hsl)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::h :float)
(%nanovg::s :float)
(%nanovg::l :float))
(declaim (inline %nanovg::hsla))
(cffi:defcfun ("__claw_nvgHSLA" %nanovg::hsla)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::h :float)
(%nanovg::s :float)
(%nanovg::l :float)
(%nanovg::a :unsigned-char))
(declaim (inline %nanovg::image-pattern))
(cffi:defcfun ("__claw_nvgImagePattern" %nanovg::image-pattern)
(:pointer %nanovg::paint)
(%nanovg::%%claw-result- (:pointer %nanovg::paint))
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::ox :float)
(%nanovg::oy :float)
(%nanovg::ex :float)
(%nanovg::ey :float)
(%nanovg::angle :float)
(%nanovg::image :int)
(%nanovg::alpha :float))
(declaim (inline %nanovg::image-size))
(cffi:defcfun ("nvgImageSize" %nanovg::image-size)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::image :int)
(%nanovg::w (:pointer :int))
(%nanovg::h (:pointer :int)))
(declaim (inline %nanovg::internal-params))
(cffi:defcfun ("nvgInternalParams" %nanovg::internal-params)
(:pointer %nanovg::params)
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::intersect-scissor))
(cffi:defcfun ("nvgIntersectScissor" %nanovg::intersect-scissor)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::w :float)
(%nanovg::h :float))
(declaim (inline %nanovg::lerp-rgba))
(cffi:defcfun ("__claw_nvgLerpRGBA" %nanovg::lerp-rgba)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::c0 (:pointer %nanovg::color))
(%nanovg::c1 (:pointer %nanovg::color))
(%nanovg::u :float))
(declaim (inline %nanovg::line-cap))
(cffi:defcfun ("nvgLineCap" %nanovg::line-cap)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::cap :int))
(declaim (inline %nanovg::line-join))
(cffi:defcfun ("nvgLineJoin" %nanovg::line-join)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::join :int))
(declaim (inline %nanovg::line-to))
(cffi:defcfun ("nvgLineTo" %nanovg::line-to)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float))
(declaim (inline %nanovg::linear-gradient))
(cffi:defcfun ("__claw_nvgLinearGradient" %nanovg::linear-gradient)
(:pointer %nanovg::paint)
(%nanovg::%%claw-result- (:pointer %nanovg::paint))
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::sx :float)
(%nanovg::sy :float)
(%nanovg::ex :float)
(%nanovg::ey :float)
(%nanovg::icol (:pointer %nanovg::color))
(%nanovg::ocol (:pointer %nanovg::color)))
(declaim (inline %nanovg::miter-limit))
(cffi:defcfun ("nvgMiterLimit" %nanovg::miter-limit)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::limit :float))
(declaim (inline %nanovg::move-to))
(cffi:defcfun ("nvgMoveTo" %nanovg::move-to)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float))
(declaim (inline %nanovg::path-winding))
(cffi:defcfun ("nvgPathWinding" %nanovg::path-winding)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::dir :int))
(declaim (inline %nanovg::quad-to))
(cffi:defcfun ("nvgQuadTo" %nanovg::quad-to)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::cx :float)
(%nanovg::cy :float)
(%nanovg::x :float)
(%nanovg::y :float))
(declaim (inline %nanovg::rgb))
(cffi:defcfun ("__claw_nvgRGB" %nanovg::rgb)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::r :unsigned-char)
(%nanovg::g :unsigned-char)
(%nanovg::b :unsigned-char))
(declaim (inline %nanovg::rgba))
(cffi:defcfun ("__claw_nvgRGBA" %nanovg::rgba)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::r :unsigned-char)
(%nanovg::g :unsigned-char)
(%nanovg::b :unsigned-char)
(%nanovg::a :unsigned-char))
(declaim (inline %nanovg::rgba-f))
(cffi:defcfun ("__claw_nvgRGBAf" %nanovg::rgba-f)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::r :float)
(%nanovg::g :float)
(%nanovg::b :float)
(%nanovg::a :float))
(declaim (inline %nanovg::rgb-f))
(cffi:defcfun ("__claw_nvgRGBf" %nanovg::rgb-f)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::r :float)
(%nanovg::g :float)
(%nanovg::b :float))
(declaim (inline %nanovg::rad-to-deg))
(cffi:defcfun ("nvgRadToDeg" %nanovg::rad-to-deg)
:float
(%nanovg::rad :float))
(declaim (inline %nanovg::radial-gradient))
(cffi:defcfun ("__claw_nvgRadialGradient" %nanovg::radial-gradient)
(:pointer %nanovg::paint)
(%nanovg::%%claw-result- (:pointer %nanovg::paint))
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::cx :float)
(%nanovg::cy :float)
(%nanovg::inr :float)
(%nanovg::outr :float)
(%nanovg::icol (:pointer %nanovg::color))
(%nanovg::ocol (:pointer %nanovg::color)))
(declaim (inline %nanovg::rect))
(cffi:defcfun ("nvgRect" %nanovg::rect)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::w :float)
(%nanovg::h :float))
(declaim (inline %nanovg::reset))
(cffi:defcfun ("nvgReset" %nanovg::reset)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::reset-scissor))
(cffi:defcfun ("nvgResetScissor" %nanovg::reset-scissor)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::reset-transform))
(cffi:defcfun ("nvgResetTransform" %nanovg::reset-transform)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::restore))
(cffi:defcfun ("nvgRestore" %nanovg::restore)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::rotate))
(cffi:defcfun ("nvgRotate" %nanovg::rotate)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::angle :float))
(declaim (inline %nanovg::rounded-rect))
(cffi:defcfun ("nvgRoundedRect" %nanovg::rounded-rect)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::w :float)
(%nanovg::h :float)
(%nanovg::r :float))
(declaim (inline %nanovg::rounded-rect-varying))
(cffi:defcfun ("nvgRoundedRectVarying" %nanovg::rounded-rect-varying)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::w :float)
(%nanovg::h :float)
(%nanovg::rad-top-left :float)
(%nanovg::rad-top-right :float)
(%nanovg::rad-bottom-right :float)
(%nanovg::rad-bottom-left :float))
(declaim (inline %nanovg::save))
(cffi:defcfun ("nvgSave" %nanovg::save)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::scale))
(cffi:defcfun ("nvgScale" %nanovg::scale)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float))
(declaim (inline %nanovg::scissor))
(cffi:defcfun ("nvgScissor" %nanovg::scissor)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::w :float)
(%nanovg::h :float))
(declaim (inline %nanovg::shape-anti-alias))
(cffi:defcfun ("nvgShapeAntiAlias" %nanovg::shape-anti-alias)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::enabled :int))
(declaim (inline %nanovg::skew-x))
(cffi:defcfun ("nvgSkewX" %nanovg::skew-x)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::angle :float))
(declaim (inline %nanovg::skew-y))
(cffi:defcfun ("nvgSkewY" %nanovg::skew-y)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::angle :float))
(declaim (inline %nanovg::stroke))
(cffi:defcfun ("nvgStroke" %nanovg::stroke)
:void
(%nanovg::ctx (:pointer %nanovg::context)))
(declaim (inline %nanovg::stroke-color))
(cffi:defcfun ("__claw_nvgStrokeColor" %nanovg::stroke-color)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::color (:pointer %nanovg::color)))
(declaim (inline %nanovg::stroke-paint))
(cffi:defcfun ("__claw_nvgStrokePaint" %nanovg::stroke-paint)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::paint (:pointer %nanovg::paint)))
(declaim (inline %nanovg::stroke-width))
(cffi:defcfun ("nvgStrokeWidth" %nanovg::stroke-width)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::size :float))
(declaim (inline %nanovg::text))
(cffi:defcfun ("nvgText" %nanovg::text)
:float
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::string claw-utils:claw-string)
(%nanovg::end claw-utils:claw-string))
(declaim (inline %nanovg::text-align))
(cffi:defcfun ("nvgTextAlign" %nanovg::text-align)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::align :int))
(declaim (inline %nanovg::text-bounds))
(cffi:defcfun ("nvgTextBounds" %nanovg::text-bounds)
:float
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::string claw-utils:claw-string)
(%nanovg::end claw-utils:claw-string)
(%nanovg::bounds (:pointer :float)))
(declaim (inline %nanovg::text-box))
(cffi:defcfun ("nvgTextBox" %nanovg::text-box)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::break-row-width :float)
(%nanovg::string claw-utils:claw-string)
(%nanovg::end claw-utils:claw-string))
(declaim (inline %nanovg::text-box-bounds))
(cffi:defcfun ("nvgTextBoxBounds" %nanovg::text-box-bounds)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::break-row-width :float)
(%nanovg::string claw-utils:claw-string)
(%nanovg::end claw-utils:claw-string)
(%nanovg::bounds (:pointer :float)))
(cffi:defcstruct (%nanovg::text-row :size 40)
(%nanovg::start claw-utils:claw-string :offset 0)
(%nanovg::end claw-utils:claw-string :offset 8)
(%nanovg::next claw-utils:claw-string :offset 16)
(%nanovg::width :float :offset 24)
(%nanovg::minx :float :offset 28)
(%nanovg::maxx :float :offset 32))
(cffi:defctype %nanovg::text-row (:struct %nanovg::text-row))
(declaim (inline %nanovg::text-break-lines))
(cffi:defcfun ("nvgTextBreakLines" %nanovg::text-break-lines)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::string claw-utils:claw-string)
(%nanovg::end claw-utils:claw-string)
(%nanovg::break-row-width :float)
(%nanovg::rows (:pointer %nanovg::text-row))
(%nanovg::max-rows :int))
(cffi:defcstruct (%nanovg::glyph-position :size 24)
(%nanovg::str claw-utils:claw-string :offset 0)
(%nanovg::x :float :offset 8)
(%nanovg::minx :float :offset 12)
(%nanovg::maxx :float :offset 16))
(cffi:defctype %nanovg::glyph-position
(:struct %nanovg::glyph-position))
(declaim (inline %nanovg::text-glyph-positions))
(cffi:defcfun ("nvgTextGlyphPositions" %nanovg::text-glyph-positions)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float)
(%nanovg::string claw-utils:claw-string)
(%nanovg::end claw-utils:claw-string)
(%nanovg::positions (:pointer %nanovg::glyph-position))
(%nanovg::max-positions :int))
(declaim (inline %nanovg::text-letter-spacing))
(cffi:defcfun ("nvgTextLetterSpacing" %nanovg::text-letter-spacing)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::spacing :float))
(declaim (inline %nanovg::text-line-height))
(cffi:defcfun ("nvgTextLineHeight" %nanovg::text-line-height)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::line-height :float))
(declaim (inline %nanovg::text-metrics))
(cffi:defcfun ("nvgTextMetrics" %nanovg::text-metrics)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::ascender (:pointer :float))
(%nanovg::descender (:pointer :float))
(%nanovg::lineh (:pointer :float)))
(declaim (inline %nanovg::trans-rgba))
(cffi:defcfun ("__claw_nvgTransRGBA" %nanovg::trans-rgba)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::c0 (:pointer %nanovg::color))
(%nanovg::a :unsigned-char))
(declaim (inline %nanovg::trans-rgb-af))
(cffi:defcfun ("__claw_nvgTransRGBAf" %nanovg::trans-rgb-af)
(:pointer %nanovg::color)
(%nanovg::%%claw-result- (:pointer %nanovg::color))
(%nanovg::c0 (:pointer %nanovg::color))
(%nanovg::a :float))
(declaim (inline %nanovg::transform))
(cffi:defcfun ("nvgTransform" %nanovg::transform)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::a :float)
(%nanovg::b :float)
(%nanovg::c :float)
(%nanovg::d :float)
(%nanovg::e :float)
(%nanovg::f :float))
(declaim (inline %nanovg::transform-identity))
(cffi:defcfun ("nvgTransformIdentity" %nanovg::transform-identity)
:void
(%nanovg::dst (:pointer :float)))
(declaim (inline %nanovg::transform-inverse))
(cffi:defcfun ("nvgTransformInverse" %nanovg::transform-inverse)
:int
(%nanovg::dst (:pointer :float))
(%nanovg::src (:pointer :float)))
(declaim (inline %nanovg::transform-multiply))
(cffi:defcfun ("nvgTransformMultiply" %nanovg::transform-multiply)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::src (:pointer :float)))
(declaim (inline %nanovg::transform-point))
(cffi:defcfun ("nvgTransformPoint" %nanovg::transform-point)
:void
(%nanovg::dstx (:pointer :float))
(%nanovg::dsty (:pointer :float))
(%nanovg::xform (:pointer :float))
(%nanovg::srcx :float)
(%nanovg::srcy :float))
(declaim (inline %nanovg::transform-premultiply))
(cffi:defcfun ("nvgTransformPremultiply"
%nanovg::transform-premultiply)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::src (:pointer :float)))
(declaim (inline %nanovg::transform-rotate))
(cffi:defcfun ("nvgTransformRotate" %nanovg::transform-rotate)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::a :float))
(declaim (inline %nanovg::transform-scale))
(cffi:defcfun ("nvgTransformScale" %nanovg::transform-scale)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::sx :float)
(%nanovg::sy :float))
(declaim (inline %nanovg::transform-skew-x))
(cffi:defcfun ("nvgTransformSkewX" %nanovg::transform-skew-x)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::a :float))
(declaim (inline %nanovg::transform-skew-y))
(cffi:defcfun ("nvgTransformSkewY" %nanovg::transform-skew-y)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::a :float))
(declaim (inline %nanovg::transform-translate))
(cffi:defcfun ("nvgTransformTranslate" %nanovg::transform-translate)
:void
(%nanovg::dst (:pointer :float))
(%nanovg::tx :float)
(%nanovg::ty :float))
(declaim (inline %nanovg::translate))
(cffi:defcfun ("nvgTranslate" %nanovg::translate)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::x :float)
(%nanovg::y :float))
(declaim (inline %nanovg::update-image))
(cffi:defcfun ("nvgUpdateImage" %nanovg::update-image)
:void
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::image :int)
(%nanovg::data (:pointer :unsigned-char)))
(cffi:defctype %nanovg::g-luint :unsigned-int)
(declaim (inline %nanovg::create-image-from-handle-gl2))
(cffi:defcfun ("nvglCreateImageFromHandleGL2"
%nanovg::create-image-from-handle-gl2)
:int
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::texture-id %nanovg::g-luint)
(%nanovg::w :int)
(%nanovg::h :int)
(%nanovg::flags :int))
(declaim (inline %nanovg::image-handle-gl2))
(cffi:defcfun ("nvglImageHandleGL2" %nanovg::image-handle-gl2)
%nanovg::g-luint
(%nanovg::ctx (:pointer %nanovg::context))
(%nanovg::image :int))
(cffi:defcstruct (%nanovg::composite-operation-state :size 16)
(%nanovg::src-rgb :int :offset 0)
(%nanovg::dst-rgb :int :offset 4)
(%nanovg::src-alpha :int :offset 8)
(%nanovg::dst-alpha :int :offset 12))
(cffi:defcstruct (%nanovg::vertex :size 16)
(%nanovg::x :float :offset 0)
(%nanovg::y :float :offset 4)
(%nanovg::u :float :offset 8)
(%nanovg::v :float :offset 12))
(cffi:defctype %nanovg::vertex (:struct %nanovg::vertex))
(cffi:defcstruct (%nanovg::path :size 56)
(%nanovg::first :int :offset 0)
(%nanovg::count :int :offset 4)
(%nanovg::closed :unsigned-char :offset 8)
(%nanovg::nbevel :int :offset 12)
(%nanovg::fill (:pointer %nanovg::vertex) :offset
16)
(%nanovg::nfill :int :offset 24)
(%nanovg::stroke (:pointer %nanovg::vertex) :offset
32)
(%nanovg::nstroke :int :offset 40)
(%nanovg::winding :int :offset 44)
(%nanovg::convex :int :offset 48))
(cffi:defcstruct (%nanovg::scissor :size 32)
(%nanovg::xform :float :count 6 :offset 0)
(%nanovg::extent :float :count 2 :offset 24))
(cffi:defctype %nanovg::composite-operation-state
(:struct %nanovg::composite-operation-state))
(cffi:defctype %nanovg::path (:struct %nanovg::path))
(cffi:defctype %nanovg::scissor (:struct %nanovg::scissor))
(eval-when (:load-toplevel :compile-toplevel :execute)
(export '%nanovg::nstroke :%nanovg)
(export '%nanovg::glyph-position :%nanovg)
(export '%nanovg::render-fill :%nanovg)
(export '%nanovg::texture :%nanovg)
(export '%nanovg::rgba-f :%nanovg)
(export '%nanovg::line-cap :%nanovg)
(export '%nanovg::rgb-f :%nanovg)
(export '%nanovg::composite-operation :%nanovg)
(export '%nanovg::font-face-id :%nanovg)
(export '%nanovg::rounded-rect-varying :%nanovg)
(export '%nanovg::miter-limit :%nanovg)
(export '%nanovg::reset-transform :%nanovg)
(export '%nanovg::stroke-color :%nanovg)
(export '%nanovg::end-frame :%nanovg)
(export '%nanovg::transform-premultiply :%nanovg)
(export '%nanovg::skew-y :%nanovg)
(export '%nanovg::arc-to :%nanovg)
(export '%nanovg::radius :%nanovg)
(export '%nanovg::path-winding :%nanovg)
(export '%nanovg::transform :%nanovg)
(export '%nanovg::create-internal :%nanovg)
(export '%nanovg::transform-inverse :%nanovg)
(export '%nanovg::end :%nanovg)
(export '%claw.anonymous::|0| nil)
(export '%nanovg::winding :%nanovg)
(export '%nanovg::box-gradient :%nanovg)
(export '%nanovg::transform-identity :%nanovg)
(export '%nanovg::add-fallback-font :%nanovg)
(export '%nanovg::skew-x :%nanovg)
(export '%nanovg::str :%nanovg)
(export '%nanovg::solidity :%nanovg)
(export '%nanovg::line-join :%nanovg)
(export '%nanovg::reset :%nanovg)
(export '%nanovg::internal-params :%nanovg)
(export '%nanovg::render-create :%nanovg)
(export '%nanovg::dst-alpha :%nanovg)
(export '%nanovg::rgba :%nanovg)
(export '%nanovg::trans-rgb-af :%nanovg)
(export '%nanovg::update-image :%nanovg)
(export '%nanovg::create-image-mem :%nanovg)
(export '%nanovg::ellipse :%nanovg)
(export '%nanovg::trans-rgba :%nanovg)
(export '%nanovg::|C:@S@NV-GCOLOR@UA| :%nanovg)
(export '%nanovg::font-face :%nanovg)
(export '%nanovg::text-metrics :%nanovg)
(export '%nanovg::nfill :%nanovg)
(export '%nanovg::edge-anti-alias :%nanovg)
(export '%nanovg::begin-frame :%nanovg)
(export '%nanovg::path :%nanovg)
(export '%nanovg::text-break-lines :%nanovg)
(export '%nanovg::image-handle-gl2 :%nanovg)
(export '%nanovg::y :%nanovg)
(export '%nanovg::shape-anti-alias :%nanovg)
(export '%nanovg::create-font :%nanovg)
(export '%nanovg::global-composite-blend-func :%nanovg)
(export '%nanovg::maxx :%nanovg)
(export '%nanovg::font-size :%nanovg)
(export '%nanovg::fill-color :%nanovg)
(export '%nanovg::create-gl2 :%nanovg)
(export '%nanovg::stroke :%nanovg)
(export '%nanovg::render-update-texture :%nanovg)
(export '%nanovg::render-get-texture-size :%nanovg)
(export '%nanovg::text-box-bounds :%nanovg)
(export '%nanovg::render-cancel :%nanovg)
(export '%nanovg::render-stroke :%nanovg)
(export '%nanovg::next :%nanovg)
(export '%nanovg::save :%nanovg)
(export '%nanovg::text-line-height :%nanovg)
(export '%nanovg::outer-color :%nanovg)
(export '%nanovg::current-transform :%nanovg)
(export '%nanovg::image-size :%nanovg)
(export '%nanovg::create-image-from-handle-gl2 :%nanovg)
(export '%nanovg::convex :%nanovg)
(export '%nanovg::xform :%nanovg)
(export '%nanovg::radial-gradient :%nanovg)
(export '%nanovg::delete-internal :%nanovg)
(export '%nanovg::create-image-rgba :%nanovg)
(export '%nanovg::quad-to :%nanovg)
(export '%nanovg::transform-point :%nanovg)
(export '%nanovg::scale :%nanovg)
(export '%nanovg::paint :%nanovg)
(export '%nanovg::x :%nanovg)
(export '%nanovg::delete-gl2 :%nanovg)
(export '%nanovg::text-align :%nanovg)
(export '%nanovg::g :%nanovg)
(export '%nanovg::text-glyph-positions :%nanovg)
(export '%nanovg::transform-multiply :%nanovg)
(export '%nanovg::add-fallback-font-id :%nanovg)
(export '%nanovg::close-path :%nanovg)
(export '%nanovg::debug-dump-path-cache :%nanovg)
(export '%nanovg::transform-skew-y :%nanovg)
(export '%nanovg::width :%nanovg)
(export '%nanovg::render-triangles :%nanovg)
(export '%nanovg::stroke-width :%nanovg)
(export '%nanovg::intersect-scissor :%nanovg)
(export '%nanovg::minx :%nanovg)
(export '%nanovg::restore :%nanovg)
(export '%nanovg::r :%nanovg)
(export '%nanovg::a :%nanovg)
(export '%nanovg::text :%nanovg)
(export '%nanovg::translate :%nanovg)
(export '%nanovg::inner-color :%nanovg)
(export '%nanovg::image-pattern :%nanovg)
(export '%nanovg::first :%nanovg)
(export '%nanovg::cancel-frame :%nanovg)
(export '%nanovg::find-font :%nanovg)
(export '%nanovg::scissor :%nanovg)
(export '%nanovg::move-to :%nanovg)
(export '%nanovg::text-box :%nanovg)
(export '%nanovg::circle :%nanovg)
(export '%nanovg::text-bounds :%nanovg)
(export '%nanovg::create-flags :%nanovg)
(export '%nanovg::transform-translate :%nanovg)
(export '%nanovg::arc :%nanovg)
(export '%nanovg::rounded-rect :%nanovg)
(export '%nanovg::global-composite-operation :%nanovg)
(export '%nanovg::deg-to-rad :%nanovg)
(export '%nanovg::|C:@S@NV-GCOLOR@UA@SA| :%nanovg)
(export '%nanovg::lerp-rgba :%nanovg)
(export '%nanovg::line-to :%nanovg)
(export '%nanovg::render-delete :%nanovg)
(export '%nanovg::blend-factor :%nanovg)
(export '%nanovg::rotate :%nanovg)
(export '%nanovg::b :%nanovg)
(export '%nanovg::rgb :%nanovg)
(export '%nanovg::rect :%nanovg)
(export '%nanovg::user-ptr :%nanovg)
(export '%nanovg::linear-gradient :%nanovg)
(export '%nanovg::begin-path :%nanovg)
(export '%nanovg::delete-image :%nanovg)
(export '%nanovg::src-alpha :%nanovg)
(export '%nanovg::font-blur :%nanovg)
(export '%nanovg::extent :%nanovg)
(export '%nanovg::nbevel :%nanovg)
(export '%nanovg::stroke-paint :%nanovg)
(export '%nanovg::global-composite-blend-func-separate :%nanovg)
(export '%nanovg::image-flags-gl :%nanovg)
(export '%nanovg::color :%nanovg)
(export '%nanovg::vertex :%nanovg)
(export '%nanovg::transform-rotate :%nanovg)
(export '%nanovg::image :%nanovg)
(export '%nanovg::rad-to-deg :%nanovg)
(export '%nanovg::u :%nanovg)
(export '%nanovg::render-viewport :%nanovg)
(export '%nanovg::fill :%nanovg)
(export '%nanovg::count :%nanovg)
(export '%nanovg::render-delete-texture :%nanovg)
(export '%nanovg::create-image :%nanovg)
(export '%nanovg::transform-skew-x :%nanovg)
(export '%nanovg::image-flags :%nanovg)
(export '%nanovg::text-row :%nanovg)
(export '%nanovg::text-letter-spacing :%nanovg)
(export '%nanovg::bezier-to :%nanovg)
(export '%nanovg::start :%nanovg)
(export '%nanovg::context :%nanovg)
(export '%nanovg::render-create-texture :%nanovg)
(export '%nanovg::feather :%nanovg)
(export '%nanovg::g-luint :%nanovg)
(export '%nanovg::composite-operation-state :%nanovg)
(export '%nanovg::fill-paint :%nanovg)
(export '%nanovg::render-flush :%nanovg)
(export '%nanovg::transform-scale :%nanovg)
(export '%nanovg::src-rgb :%nanovg)
(export '%nanovg::reset-scissor :%nanovg)
(export '%nanovg::global-alpha :%nanovg)
(export '%nanovg::hsl :%nanovg)
(export '%nanovg::align :%nanovg)
(export '%nanovg::dst-rgb :%nanovg)
(export '%nanovg::create-font-mem :%nanovg)
(export '%nanovg::closed :%nanovg)
(export '%nanovg::v :%nanovg)
(export '%nanovg::hsla :%nanovg)
(export '%nanovg::params :%nanovg))
|
|
9d5680c378b08e69515855b96e6907bfe9ccdd16e2862ff186ec670b6c28c803 | arttuka/reagent-material-ui | browse_gallery_sharp.cljs | (ns reagent-mui.icons.browse-gallery-sharp
"Imports @mui/icons-material/BrowseGallerySharp as a Reagent component."
(:require-macros [reagent-mui.util :refer [create-svg-icon e]])
(:require [react :as react]
["@mui/material/SvgIcon" :as SvgIcon]
[reagent-mui.util]))
(def browse-gallery-sharp (create-svg-icon [(e "path" #js {"d" "M9 3c-4.97 0-9 4.03-9 9s4.03 9 9 9 9-4.03 9-9-4.03-9-9-9zm2.79 13.21L8 12.41V7h2v4.59l3.21 3.21-1.42 1.41z"}) (e "path" #js {"d" "M17.99 3.52v2.16C20.36 6.8 22 9.21 22 12c0 2.79-1.64 5.2-4.01 6.32v2.16C21.48 19.24 24 15.91 24 12s-2.52-7.24-6.01-8.48z"})]
"BrowseGallerySharp"))
| null | https://raw.githubusercontent.com/arttuka/reagent-material-ui/c7cd0d7c661ab9df5b0aed0213a6653a9a3f28ea/src/icons/reagent_mui/icons/browse_gallery_sharp.cljs | clojure | (ns reagent-mui.icons.browse-gallery-sharp
"Imports @mui/icons-material/BrowseGallerySharp as a Reagent component."
(:require-macros [reagent-mui.util :refer [create-svg-icon e]])
(:require [react :as react]
["@mui/material/SvgIcon" :as SvgIcon]
[reagent-mui.util]))
(def browse-gallery-sharp (create-svg-icon [(e "path" #js {"d" "M9 3c-4.97 0-9 4.03-9 9s4.03 9 9 9 9-4.03 9-9-4.03-9-9-9zm2.79 13.21L8 12.41V7h2v4.59l3.21 3.21-1.42 1.41z"}) (e "path" #js {"d" "M17.99 3.52v2.16C20.36 6.8 22 9.21 22 12c0 2.79-1.64 5.2-4.01 6.32v2.16C21.48 19.24 24 15.91 24 12s-2.52-7.24-6.01-8.48z"})]
"BrowseGallerySharp"))
|
|
49c9f4fb7071321bbd75efcf1a150bf30e65e7013ddd6b00b187754f4b74fc17 | carp-lang/Carp | Types.hs | # LANGUAGE DeriveGeneric #
module Types
( TypeMappings,
Ty (..),
showMaybeTy,
unifySignatures,
replaceTyVars,
areUnifiable,
typesDeleterFunctionType,
typesCopyFunctionType,
doesTypeContainTyVarWithName,
replaceConflicted,
lambdaEnvTy,
typeEqIgnoreLifetimes,
checkKinds,
-- SymPath imports
SymPath (..),
mangle,
pathToC,
consPath,
Kind,
tyToKind,
areKindsConsistent,
createStructName,
getStructName,
getPathFromStructName,
getNameFromStructName,
getStructPath,
promoteNumber,
)
where
import Data.Hashable
import Data.List (intercalate)
import Data.Maybe (fromMaybe)
import Data.Text (pack, splitOn, unpack)
import GHC.Generics (Generic)
import qualified Map
import SymPath
import Util
--import Debug.Trace
-- | Carp types.
data Ty
= IntTy
| LongTy
| ByteTy
| BoolTy
| FloatTy
| DoubleTy
| StringTy
| PatternTy
| CharTy
| CCharTy
In order of appearance : ( 1 ) Argument types , ( 2 ) Return type , ( 3 ) Lifetime
| VarTy String
| UnitTy
| ModuleTy
| PointerTy Ty
second Ty is the lifetime
| StaticLifetimeTy
| StructTy Ty [Ty] -- the name (possibly a var) of the struct, and it's type parameters
| ConcreteNameTy SymPath -- the name of a struct
| TypeTy -- the type of types
| MacroTy
| DynamicTy -- the type of dynamic functions (used in REPL and macros)
| InterfaceTy
| CTy -- C literals
| Universe -- the type of types of types (the type of TypeTy)
deriving (Eq, Ord, Generic)
instance Hashable Ty
-- | Kinds checking
Carp 's system is simple enough that we do not need to describe kinds by their airty .
After confirming two tys have either base or higher kind
-- unification checks are sufficient to determine whether their arities are compatible.
data Kind
= Base
| Higher
deriving (Eq, Ord, Show)
tyToKind :: Ty -> Kind
tyToKind (StructTy _ _) = Higher
the type of functions , consider the ( - > ) constructor in Haskell
tyToKind (PointerTy _) = Higher
may also be treated as a data constructor
tyToKind _ = Base
-- | Check whether or not the kinds of type variables are consistent.
-- This function will return Left as soon as a variable is used inconsistently,
-- reporting which variable triggered the issue.
-- If all variables are used consistently, it will process the whole list and
-- return ().
--
-- Kind arity matters; that is, `(f a b)` is not consistent with
-- `(f b)`. So long as the kind of a variable is the same across its uses,
-- everything is OK, for example:
-- ((Foo f a b) [x (f a) y (f b)])
-- is valid, and so is
-- ((Foo f a b) [x f y a z b])
-- But a definition such as:
-- ((Foo f a b) [x (f a b) y (f a)])
-- is inconsistent (kind of `f` differs) and so is
-- ((Foo f a b) [x (f a) y b (b a)])
-- (kind of `b` is inconsistent.
areKindsConsistent :: [Ty] -> Either String ()
areKindsConsistent typeVars =
assignKinds typeVars Map.empty
where
assignKinds :: [Ty] -> Map.Map String Int -> Either String ()
assignKinds ((StructTy (VarTy name) vars) : rest) arityMap =
case Map.lookup name arityMap of
Nothing -> assignKinds next (Map.insert name kind arityMap)
Just k ->
if k == kind
then assignKinds next arityMap
else Left name
where
next = vars ++ rest
kind = length vars
assignKinds ((VarTy v) : rest) arityMap =
case Map.lookup v arityMap of
Nothing -> assignKinds rest (Map.insert v kind arityMap)
Just k ->
if k == kind
then assignKinds rest arityMap
else Left v
where
kind = 0
assignKinds (FuncTy args ret _ : rest) arityMap =
assignKinds (args ++ ret : rest) arityMap
assignKinds ((PointerTy p) : rest) arityMap =
assignKinds (p : rest) arityMap
assignKinds ((RefTy r _) : rest) arityMap =
assignKinds (r : rest) arityMap
assignKinds (_ : rest) arityMap = assignKinds rest arityMap
assignKinds [] _ = pure ()
Exactly like ' = = ' for , but ignore lifetime parameter
typeEqIgnoreLifetimes :: Ty -> Ty -> Bool
typeEqIgnoreLifetimes (RefTy a _) (RefTy b _) = a == b
typeEqIgnoreLifetimes (FuncTy argsA retA _) (FuncTy argsB retB _) =
all (== True) (zipWith typeEqIgnoreLifetimes argsA argsB)
&& typeEqIgnoreLifetimes retA retB
typeEqIgnoreLifetimes (StructTy a tyVarsA) (StructTy b tyVarsB) =
a == b
&& all (== True) (zipWith typeEqIgnoreLifetimes tyVarsA tyVarsB)
typeEqIgnoreLifetimes a b = a == b
data SumTyCase = SumTyCase
{ caseName :: String,
caseMembers :: [(String, Ty)]
}
deriving (Show, Ord, Eq)
fnOrLambda :: String
fnOrLambda =
case platform of
Windows -> "Fn"
_ -> "Fn" -- "λ"
instance Show Ty where
show IntTy = "Int"
show FloatTy = "Float"
show DoubleTy = "Double"
show LongTy = "Long"
show ByteTy = "Byte"
show BoolTy = "Bool"
show StringTy = "String"
show PatternTy = "Pattern"
show CharTy = "Char"
show CCharTy = "CChar"
show (FuncTy argTys retTy StaticLifetimeTy) = "(" ++ fnOrLambda ++ " [" ++ joinWithComma (map show argTys) ++ "] " ++ show retTy ++ ")"
show (FuncTy argTys retTy lt) = "(" ++ fnOrLambda ++ " [" ++ joinWithComma (map show argTys) ++ "] " ++ show retTy ++ " " ++ show lt ++ ")"
show (VarTy t) = t
show UnitTy = "()"
show ModuleTy = "Module"
show TypeTy = "Type"
show InterfaceTy = "Interface"
show (StructTy s []) = show s
show (StructTy s typeArgs) = "(" ++ show s ++ " " ++ joinWithSpace (map show typeArgs) ++ ")"
show (ConcreteNameTy spath) = show spath
show (PointerTy p) = "(Ptr " ++ show p ++ ")"
show (RefTy r lt) =
-- case r of
-- PointerTy _ -> listView
StructTy _ _ - > listView
-- FuncTy _ _ -> listView
-- _ -> "&" ++ show r
-- where listView = "(Ref " ++ show r ++ ")"
"(Ref " ++ show r ++ " " ++ show lt ++ ")"
show StaticLifetimeTy = "StaticLifetime"
show MacroTy = "Macro"
show DynamicTy = "Dynamic"
show Universe = "Universe"
show CTy = "C"
showMaybeTy :: Maybe Ty -> String
showMaybeTy (Just t) = show t
showMaybeTy Nothing = "(missing-type)"
doesTypeContainTyVarWithName :: String -> Ty -> Bool
doesTypeContainTyVarWithName name (VarTy n) = name == n
doesTypeContainTyVarWithName name (FuncTy argTys retTy lt) =
doesTypeContainTyVarWithName name lt
|| any (doesTypeContainTyVarWithName name) argTys
|| doesTypeContainTyVarWithName name retTy
doesTypeContainTyVarWithName name (StructTy n tyArgs) = doesTypeContainTyVarWithName name n || any (doesTypeContainTyVarWithName name) tyArgs
doesTypeContainTyVarWithName name (PointerTy p) = doesTypeContainTyVarWithName name p
doesTypeContainTyVarWithName name (RefTy r lt) =
doesTypeContainTyVarWithName name r
|| doesTypeContainTyVarWithName name lt
doesTypeContainTyVarWithName _ _ = False
replaceConflicted :: String -> Ty -> Ty
replaceConflicted name (VarTy n) =
if n == name
then VarTy (n ++ "conflicted")
else VarTy n
replaceConflicted name (FuncTy argTys retTy lt) =
FuncTy
(map (replaceConflicted name) argTys)
(replaceConflicted name retTy)
(replaceConflicted name lt)
replaceConflicted name (StructTy n tyArgs) = StructTy (replaceConflicted name n) (map (replaceConflicted name) tyArgs)
replaceConflicted name (PointerTy p) = PointerTy (replaceConflicted name p)
replaceConflicted name (RefTy r lt) =
RefTy
(replaceConflicted name r)
(replaceConflicted name lt)
replaceConflicted _ t = t
-- | Map type variable names to actual types, eg. t0 => Int, t1 => Float
type TypeMappings = Map.Map String Ty
| From two types , one with type variables and one without ( e.g. ( Fn [ " t0 " ] " t1 " ) and ( Fn [ Int ] Bool ) )
-- create mappings that translate from the type variables to concrete types, e.g. "t0" => Int, "t1" => Bool
unifySignatures :: Ty -> Ty -> TypeMappings
unifySignatures at ct = Map.fromList (unify at ct)
where
unify :: Ty -> Ty -> [(String, Ty)]
unify (VarTy _) (VarTy _) = [] -- if a == b then [] else error ("Can't unify " ++ show a ++ " with " ++ show b)
unify (VarTy a) value = [(a, value)]
unify (StructTy v'@(VarTy _) aArgs) (StructTy n bArgs) = unify v' n ++ concat (zipWith unify aArgs bArgs)
unify (StructTy a@(ConcreteNameTy _) aArgs) (StructTy b bArgs)
| a == b = concat (zipWith unify aArgs bArgs)
| otherwise = [] -- error ("Can't unify " ++ a ++ " with " ++ b)
unify (StructTy _ _) _ = [] -- error ("Can't unify " ++ show a ++ " with " ++ show b)
unify (PointerTy a) (PointerTy b) = unify a b
unify (PointerTy _) _ = [] -- error ("Can't unify " ++ show a ++ " with " ++ show b)
unify (RefTy a ltA) (RefTy b ltB) = unify a b ++ unify ltA ltB
unify (RefTy _ _) _ = [] -- error ("Can't unify " ++ show a ++ " with " ++ show b)
unify (FuncTy argTysA retTyA ltA) (FuncTy argTysB retTyB ltB) =
let argToks = concat (zipWith unify argTysA argTysB)
retToks = unify retTyA retTyB
ltToks = unify ltA ltB
in ltToks ++ argToks ++ retToks
unify FuncTy {} _ = [] -- error ("Can't unify " ++ show a ++ " with " ++ show b)
unify a b
| a == b = []
| otherwise = [] -- error ("Can't unify " ++ show a ++ " with " ++ show b)
| Checks if two types will unify
areUnifiable :: Ty -> Ty -> Bool
areUnifiable (VarTy _) (VarTy _) = True
areUnifiable (VarTy _) _ = True
areUnifiable _ (VarTy _) = True
areUnifiable (StructTy a aArgs) (StructTy b bArgs)
| length aArgs /= length bArgs = False
| areUnifiable a b =
let argBools = zipWith areUnifiable aArgs bArgs
in all (== True) argBools
| otherwise = False
areUnifiable (StructTy (VarTy _) aArgs) (FuncTy bArgs _ _)
| length aArgs /= length bArgs = False
| otherwise = all (== True) (zipWith areUnifiable aArgs bArgs)
areUnifiable (StructTy (VarTy _) args) (RefTy _ _)
| length args == 2 = True
| otherwise = False
areUnifiable (StructTy _ _) _ = False
areUnifiable (PointerTy a) (PointerTy b) = areUnifiable a b
areUnifiable (PointerTy _) _ = False
areUnifiable (RefTy a ltA) (RefTy b ltB) = areUnifiable a b && areUnifiable ltA ltB
areUnifiable RefTy {} _ = False
areUnifiable (FuncTy argTysA retTyA ltA) (FuncTy argTysB retTyB ltB)
| length argTysA /= length argTysB = False
| otherwise =
let argBools = zipWith areUnifiable argTysA argTysB
retBool = areUnifiable retTyA retTyB
ltBool = areUnifiable ltA ltB
in all (== True) (ltBool : retBool : argBools)
areUnifiable FuncTy {} _ = False
areUnifiable CTy _ = True
areUnifiable _ CTy = True
areUnifiable a b
| a == b = True
| otherwise = False
-- Checks whether or not the kindedness of types match
-- Kinds are polymorphic constructors such as (f a)
Note that this disagrees with the notion of unifiablitity in areUnifiable
checkKinds :: Ty -> Ty -> Bool
-- Base < Higher
checkKinds (FuncTy argTysA retTyA _) (FuncTy argTysB retTyB _) =
let argKinds = zipWith checkKinds argTysA argTysB
retKinds = tyToKind retTyA <= tyToKind retTyB
in all (== True) (retKinds : argKinds)
checkKinds t t' = tyToKind t <= tyToKind t'
-- | Put concrete types into the places where there are type variables.
-- For example (Fn [a] b) => (Fn [Int] Bool)
-- NOTE: If a concrete type can't be found, the type variable will stay the same.
replaceTyVars :: TypeMappings -> Ty -> Ty
replaceTyVars mappings t =
case t of
(VarTy key) -> fromMaybe t (Map.lookup key mappings)
(FuncTy argTys retTy lt) -> FuncTy (map (replaceTyVars mappings) argTys) (replaceTyVars mappings retTy) (replaceTyVars mappings lt)
(StructTy name tyArgs) ->
case replaceTyVars mappings name of
special case , struct ( f a b ) mapped to ( RefTy a lt )
-- We f in such a case to the full (Ref a lt) in constraints; we also still map
-- individual members a and b, as these need mappings since they may be
-- referred to in other places (e.g. (Fn [(f a b)] a)--without a mapping,
-- a would remain generic here.
(RefTy a lt) -> replaceTyVars mappings (RefTy a lt)
_ -> StructTy (replaceTyVars mappings name) (fmap (replaceTyVars mappings) tyArgs)
(PointerTy x) -> PointerTy (replaceTyVars mappings x)
(RefTy x lt) -> RefTy (replaceTyVars mappings x) (replaceTyVars mappings lt)
_ -> t
-- | The type of a type's copying function.
typesCopyFunctionType :: Ty -> Ty
typesCopyFunctionType memberType = FuncTy [RefTy memberType (VarTy "q")] memberType StaticLifetimeTy
-- | The type of a type's deleter function.
typesDeleterFunctionType :: Ty -> Ty
typesDeleterFunctionType memberType = FuncTy [memberType] UnitTy StaticLifetimeTy
| The type of environments sent to Lambdas ( used in emitted C code )
lambdaEnvTy :: Ty
lambdaEnvTy = StructTy (ConcreteNameTy (SymPath [] "LambdaEnv")) []
createStructName :: [String] -> String -> String
createStructName path name = intercalate "." (path ++ [name])
getStructName :: Ty -> String
getStructName (StructTy (ConcreteNameTy spath) _) = show spath
getStructName (StructTy (VarTy name) _) = name
getStructName _ = ""
getPathFromStructName :: String -> [String]
getPathFromStructName structName =
let path = map unpack (splitOn (pack ".") (pack structName))
in if length path > 1 then init path else []
getNameFromStructName :: String -> String
getNameFromStructName structName = last (map unpack (splitOn (pack ".") (pack structName)))
getStructPath :: Ty -> SymPath
getStructPath (StructTy (ConcreteNameTy spath) _) = spath
getStructPath (StructTy (VarTy name) _) = (SymPath [] name)
getStructPath _ = (SymPath [] "")
-- N.B.: promoteNumber is only safe for numeric types!
promoteNumber :: Ty -> Ty -> Ty
promoteNumber a b | a == b = a
promoteNumber ByteTy other = other
promoteNumber other ByteTy = other
promoteNumber IntTy other = other
promoteNumber other IntTy = other
promoteNumber LongTy other = other
promoteNumber other LongTy = other
promoteNumber FloatTy other = other
promoteNumber other FloatTy = other
promoteNumber DoubleTy _ = DoubleTy
promoteNumber _ DoubleTy = DoubleTy
promoteNumber a b =
error ("promoteNumber called with non-numbers: " ++ show a ++ ", " ++ show b)
| null | https://raw.githubusercontent.com/carp-lang/Carp/da25a255e99223b3d0158edede861b5f93a9f69d/src/Types.hs | haskell | SymPath imports
import Debug.Trace
| Carp types.
the name (possibly a var) of the struct, and it's type parameters
the name of a struct
the type of types
the type of dynamic functions (used in REPL and macros)
C literals
the type of types of types (the type of TypeTy)
| Kinds checking
unification checks are sufficient to determine whether their arities are compatible.
| Check whether or not the kinds of type variables are consistent.
This function will return Left as soon as a variable is used inconsistently,
reporting which variable triggered the issue.
If all variables are used consistently, it will process the whole list and
return ().
Kind arity matters; that is, `(f a b)` is not consistent with
`(f b)`. So long as the kind of a variable is the same across its uses,
everything is OK, for example:
((Foo f a b) [x (f a) y (f b)])
is valid, and so is
((Foo f a b) [x f y a z b])
But a definition such as:
((Foo f a b) [x (f a b) y (f a)])
is inconsistent (kind of `f` differs) and so is
((Foo f a b) [x (f a) y b (b a)])
(kind of `b` is inconsistent.
"λ"
case r of
PointerTy _ -> listView
FuncTy _ _ -> listView
_ -> "&" ++ show r
where listView = "(Ref " ++ show r ++ ")"
| Map type variable names to actual types, eg. t0 => Int, t1 => Float
create mappings that translate from the type variables to concrete types, e.g. "t0" => Int, "t1" => Bool
if a == b then [] else error ("Can't unify " ++ show a ++ " with " ++ show b)
error ("Can't unify " ++ a ++ " with " ++ b)
error ("Can't unify " ++ show a ++ " with " ++ show b)
error ("Can't unify " ++ show a ++ " with " ++ show b)
error ("Can't unify " ++ show a ++ " with " ++ show b)
error ("Can't unify " ++ show a ++ " with " ++ show b)
error ("Can't unify " ++ show a ++ " with " ++ show b)
Checks whether or not the kindedness of types match
Kinds are polymorphic constructors such as (f a)
Base < Higher
| Put concrete types into the places where there are type variables.
For example (Fn [a] b) => (Fn [Int] Bool)
NOTE: If a concrete type can't be found, the type variable will stay the same.
We f in such a case to the full (Ref a lt) in constraints; we also still map
individual members a and b, as these need mappings since they may be
referred to in other places (e.g. (Fn [(f a b)] a)--without a mapping,
a would remain generic here.
| The type of a type's copying function.
| The type of a type's deleter function.
N.B.: promoteNumber is only safe for numeric types! | # LANGUAGE DeriveGeneric #
module Types
( TypeMappings,
Ty (..),
showMaybeTy,
unifySignatures,
replaceTyVars,
areUnifiable,
typesDeleterFunctionType,
typesCopyFunctionType,
doesTypeContainTyVarWithName,
replaceConflicted,
lambdaEnvTy,
typeEqIgnoreLifetimes,
checkKinds,
SymPath (..),
mangle,
pathToC,
consPath,
Kind,
tyToKind,
areKindsConsistent,
createStructName,
getStructName,
getPathFromStructName,
getNameFromStructName,
getStructPath,
promoteNumber,
)
where
import Data.Hashable
import Data.List (intercalate)
import Data.Maybe (fromMaybe)
import Data.Text (pack, splitOn, unpack)
import GHC.Generics (Generic)
import qualified Map
import SymPath
import Util
data Ty
= IntTy
| LongTy
| ByteTy
| BoolTy
| FloatTy
| DoubleTy
| StringTy
| PatternTy
| CharTy
| CCharTy
In order of appearance : ( 1 ) Argument types , ( 2 ) Return type , ( 3 ) Lifetime
| VarTy String
| UnitTy
| ModuleTy
| PointerTy Ty
second Ty is the lifetime
| StaticLifetimeTy
| MacroTy
| InterfaceTy
deriving (Eq, Ord, Generic)
instance Hashable Ty
Carp 's system is simple enough that we do not need to describe kinds by their airty .
After confirming two tys have either base or higher kind
data Kind
= Base
| Higher
deriving (Eq, Ord, Show)
tyToKind :: Ty -> Kind
tyToKind (StructTy _ _) = Higher
the type of functions , consider the ( - > ) constructor in Haskell
tyToKind (PointerTy _) = Higher
may also be treated as a data constructor
tyToKind _ = Base
areKindsConsistent :: [Ty] -> Either String ()
areKindsConsistent typeVars =
assignKinds typeVars Map.empty
where
assignKinds :: [Ty] -> Map.Map String Int -> Either String ()
assignKinds ((StructTy (VarTy name) vars) : rest) arityMap =
case Map.lookup name arityMap of
Nothing -> assignKinds next (Map.insert name kind arityMap)
Just k ->
if k == kind
then assignKinds next arityMap
else Left name
where
next = vars ++ rest
kind = length vars
assignKinds ((VarTy v) : rest) arityMap =
case Map.lookup v arityMap of
Nothing -> assignKinds rest (Map.insert v kind arityMap)
Just k ->
if k == kind
then assignKinds rest arityMap
else Left v
where
kind = 0
assignKinds (FuncTy args ret _ : rest) arityMap =
assignKinds (args ++ ret : rest) arityMap
assignKinds ((PointerTy p) : rest) arityMap =
assignKinds (p : rest) arityMap
assignKinds ((RefTy r _) : rest) arityMap =
assignKinds (r : rest) arityMap
assignKinds (_ : rest) arityMap = assignKinds rest arityMap
assignKinds [] _ = pure ()
Exactly like ' = = ' for , but ignore lifetime parameter
typeEqIgnoreLifetimes :: Ty -> Ty -> Bool
typeEqIgnoreLifetimes (RefTy a _) (RefTy b _) = a == b
typeEqIgnoreLifetimes (FuncTy argsA retA _) (FuncTy argsB retB _) =
all (== True) (zipWith typeEqIgnoreLifetimes argsA argsB)
&& typeEqIgnoreLifetimes retA retB
typeEqIgnoreLifetimes (StructTy a tyVarsA) (StructTy b tyVarsB) =
a == b
&& all (== True) (zipWith typeEqIgnoreLifetimes tyVarsA tyVarsB)
typeEqIgnoreLifetimes a b = a == b
data SumTyCase = SumTyCase
{ caseName :: String,
caseMembers :: [(String, Ty)]
}
deriving (Show, Ord, Eq)
fnOrLambda :: String
fnOrLambda =
case platform of
Windows -> "Fn"
instance Show Ty where
show IntTy = "Int"
show FloatTy = "Float"
show DoubleTy = "Double"
show LongTy = "Long"
show ByteTy = "Byte"
show BoolTy = "Bool"
show StringTy = "String"
show PatternTy = "Pattern"
show CharTy = "Char"
show CCharTy = "CChar"
show (FuncTy argTys retTy StaticLifetimeTy) = "(" ++ fnOrLambda ++ " [" ++ joinWithComma (map show argTys) ++ "] " ++ show retTy ++ ")"
show (FuncTy argTys retTy lt) = "(" ++ fnOrLambda ++ " [" ++ joinWithComma (map show argTys) ++ "] " ++ show retTy ++ " " ++ show lt ++ ")"
show (VarTy t) = t
show UnitTy = "()"
show ModuleTy = "Module"
show TypeTy = "Type"
show InterfaceTy = "Interface"
show (StructTy s []) = show s
show (StructTy s typeArgs) = "(" ++ show s ++ " " ++ joinWithSpace (map show typeArgs) ++ ")"
show (ConcreteNameTy spath) = show spath
show (PointerTy p) = "(Ptr " ++ show p ++ ")"
show (RefTy r lt) =
StructTy _ _ - > listView
"(Ref " ++ show r ++ " " ++ show lt ++ ")"
show StaticLifetimeTy = "StaticLifetime"
show MacroTy = "Macro"
show DynamicTy = "Dynamic"
show Universe = "Universe"
show CTy = "C"
showMaybeTy :: Maybe Ty -> String
showMaybeTy (Just t) = show t
showMaybeTy Nothing = "(missing-type)"
doesTypeContainTyVarWithName :: String -> Ty -> Bool
doesTypeContainTyVarWithName name (VarTy n) = name == n
doesTypeContainTyVarWithName name (FuncTy argTys retTy lt) =
doesTypeContainTyVarWithName name lt
|| any (doesTypeContainTyVarWithName name) argTys
|| doesTypeContainTyVarWithName name retTy
doesTypeContainTyVarWithName name (StructTy n tyArgs) = doesTypeContainTyVarWithName name n || any (doesTypeContainTyVarWithName name) tyArgs
doesTypeContainTyVarWithName name (PointerTy p) = doesTypeContainTyVarWithName name p
doesTypeContainTyVarWithName name (RefTy r lt) =
doesTypeContainTyVarWithName name r
|| doesTypeContainTyVarWithName name lt
doesTypeContainTyVarWithName _ _ = False
replaceConflicted :: String -> Ty -> Ty
replaceConflicted name (VarTy n) =
if n == name
then VarTy (n ++ "conflicted")
else VarTy n
replaceConflicted name (FuncTy argTys retTy lt) =
FuncTy
(map (replaceConflicted name) argTys)
(replaceConflicted name retTy)
(replaceConflicted name lt)
replaceConflicted name (StructTy n tyArgs) = StructTy (replaceConflicted name n) (map (replaceConflicted name) tyArgs)
replaceConflicted name (PointerTy p) = PointerTy (replaceConflicted name p)
replaceConflicted name (RefTy r lt) =
RefTy
(replaceConflicted name r)
(replaceConflicted name lt)
replaceConflicted _ t = t
type TypeMappings = Map.Map String Ty
| From two types , one with type variables and one without ( e.g. ( Fn [ " t0 " ] " t1 " ) and ( Fn [ Int ] Bool ) )
unifySignatures :: Ty -> Ty -> TypeMappings
unifySignatures at ct = Map.fromList (unify at ct)
where
unify :: Ty -> Ty -> [(String, Ty)]
unify (VarTy a) value = [(a, value)]
unify (StructTy v'@(VarTy _) aArgs) (StructTy n bArgs) = unify v' n ++ concat (zipWith unify aArgs bArgs)
unify (StructTy a@(ConcreteNameTy _) aArgs) (StructTy b bArgs)
| a == b = concat (zipWith unify aArgs bArgs)
unify (PointerTy a) (PointerTy b) = unify a b
unify (RefTy a ltA) (RefTy b ltB) = unify a b ++ unify ltA ltB
unify (FuncTy argTysA retTyA ltA) (FuncTy argTysB retTyB ltB) =
let argToks = concat (zipWith unify argTysA argTysB)
retToks = unify retTyA retTyB
ltToks = unify ltA ltB
in ltToks ++ argToks ++ retToks
unify a b
| a == b = []
| Checks if two types will unify
areUnifiable :: Ty -> Ty -> Bool
areUnifiable (VarTy _) (VarTy _) = True
areUnifiable (VarTy _) _ = True
areUnifiable _ (VarTy _) = True
areUnifiable (StructTy a aArgs) (StructTy b bArgs)
| length aArgs /= length bArgs = False
| areUnifiable a b =
let argBools = zipWith areUnifiable aArgs bArgs
in all (== True) argBools
| otherwise = False
areUnifiable (StructTy (VarTy _) aArgs) (FuncTy bArgs _ _)
| length aArgs /= length bArgs = False
| otherwise = all (== True) (zipWith areUnifiable aArgs bArgs)
areUnifiable (StructTy (VarTy _) args) (RefTy _ _)
| length args == 2 = True
| otherwise = False
areUnifiable (StructTy _ _) _ = False
areUnifiable (PointerTy a) (PointerTy b) = areUnifiable a b
areUnifiable (PointerTy _) _ = False
areUnifiable (RefTy a ltA) (RefTy b ltB) = areUnifiable a b && areUnifiable ltA ltB
areUnifiable RefTy {} _ = False
areUnifiable (FuncTy argTysA retTyA ltA) (FuncTy argTysB retTyB ltB)
| length argTysA /= length argTysB = False
| otherwise =
let argBools = zipWith areUnifiable argTysA argTysB
retBool = areUnifiable retTyA retTyB
ltBool = areUnifiable ltA ltB
in all (== True) (ltBool : retBool : argBools)
areUnifiable FuncTy {} _ = False
areUnifiable CTy _ = True
areUnifiable _ CTy = True
areUnifiable a b
| a == b = True
| otherwise = False
Note that this disagrees with the notion of unifiablitity in areUnifiable
checkKinds :: Ty -> Ty -> Bool
checkKinds (FuncTy argTysA retTyA _) (FuncTy argTysB retTyB _) =
let argKinds = zipWith checkKinds argTysA argTysB
retKinds = tyToKind retTyA <= tyToKind retTyB
in all (== True) (retKinds : argKinds)
checkKinds t t' = tyToKind t <= tyToKind t'
replaceTyVars :: TypeMappings -> Ty -> Ty
replaceTyVars mappings t =
case t of
(VarTy key) -> fromMaybe t (Map.lookup key mappings)
(FuncTy argTys retTy lt) -> FuncTy (map (replaceTyVars mappings) argTys) (replaceTyVars mappings retTy) (replaceTyVars mappings lt)
(StructTy name tyArgs) ->
case replaceTyVars mappings name of
special case , struct ( f a b ) mapped to ( RefTy a lt )
(RefTy a lt) -> replaceTyVars mappings (RefTy a lt)
_ -> StructTy (replaceTyVars mappings name) (fmap (replaceTyVars mappings) tyArgs)
(PointerTy x) -> PointerTy (replaceTyVars mappings x)
(RefTy x lt) -> RefTy (replaceTyVars mappings x) (replaceTyVars mappings lt)
_ -> t
typesCopyFunctionType :: Ty -> Ty
typesCopyFunctionType memberType = FuncTy [RefTy memberType (VarTy "q")] memberType StaticLifetimeTy
typesDeleterFunctionType :: Ty -> Ty
typesDeleterFunctionType memberType = FuncTy [memberType] UnitTy StaticLifetimeTy
| The type of environments sent to Lambdas ( used in emitted C code )
lambdaEnvTy :: Ty
lambdaEnvTy = StructTy (ConcreteNameTy (SymPath [] "LambdaEnv")) []
createStructName :: [String] -> String -> String
createStructName path name = intercalate "." (path ++ [name])
getStructName :: Ty -> String
getStructName (StructTy (ConcreteNameTy spath) _) = show spath
getStructName (StructTy (VarTy name) _) = name
getStructName _ = ""
getPathFromStructName :: String -> [String]
getPathFromStructName structName =
let path = map unpack (splitOn (pack ".") (pack structName))
in if length path > 1 then init path else []
getNameFromStructName :: String -> String
getNameFromStructName structName = last (map unpack (splitOn (pack ".") (pack structName)))
getStructPath :: Ty -> SymPath
getStructPath (StructTy (ConcreteNameTy spath) _) = spath
getStructPath (StructTy (VarTy name) _) = (SymPath [] name)
getStructPath _ = (SymPath [] "")
promoteNumber :: Ty -> Ty -> Ty
promoteNumber a b | a == b = a
promoteNumber ByteTy other = other
promoteNumber other ByteTy = other
promoteNumber IntTy other = other
promoteNumber other IntTy = other
promoteNumber LongTy other = other
promoteNumber other LongTy = other
promoteNumber FloatTy other = other
promoteNumber other FloatTy = other
promoteNumber DoubleTy _ = DoubleTy
promoteNumber _ DoubleTy = DoubleTy
promoteNumber a b =
error ("promoteNumber called with non-numbers: " ++ show a ++ ", " ++ show b)
|
9e190f25b445d0e1337e846ecf137f555ae4bcbd08f176fef6e158d35880e216 | polyfy/polylith | core.clj | (ns polylith.clj.core.ws-explorer.core
(:require [clojure.pprint :as pp]
[clojure.string :as str]
[puget.printer :as puget]
[clojure.walk :as walk]
[polylith.clj.core.util.interface.str :as str-util]
[polylith.clj.core.util.interface.color :as color]))
(def color-schema
{:color-scheme {:nil [:magenta]
:number [:yellow]
:string [:yellow]
:boolean [:magenta]
:keyword [:magenta]
:delimiter [:white]}})
(defn intify [arg]
(try
(Integer/parseUnsignedInt arg)
(catch Exception _
arg)))
(defn keys? [key]
(contains? #{"" "keys"} key))
(defn search? [key-name]
(str/ends-with? key-name "*"))
(defn match-str? [value key-name]
(when value
(if (keys? key-name)
true
(if (search? key-name)
(str/starts-with? value (str-util/drop-last 1 key-name))
(= value key-name)))))
(defn value-from-map [m key-name]
(let [k (keyword key-name)
mm (into {} m)]
(cond
(contains? mm k) (mm k)
(contains? mm key-name) (mm key-name)
(search? key-name) (mapv keyword
(filter #(match-str? % key-name)
(map name (keys mm)))))))
(defn match? [value key-name]
(or (and (map? value)
(or (match-str? (:name value) key-name)
(match-str? (:alias value) key-name)))
(and (string? value)
(match-str? value key-name))
(and (keyword? value)
(match-str? (name value) key-name))))
(defn value-from-vector [v index-or-name]
(let [i (intify index-or-name)]
(if (integer? i)
(v i)
(let [values (filterv #(match? % index-or-name) v)]
(if (= 1 (count values))
(if (search? index-or-name)
values
(first values))
values)))))
(defn vector-key [value]
(if (map? value)
(:name value)
value))
(defn keys-value [value]
(cond
(map? value) (vec (sort (keys value)))
(vector? value) (mapv vector-key value)))
(defn extract-value [value keys]
(let [key (first keys)]
(cond
(nil? key) value
(contains? #{"" "keys"} key) (recur (keys-value value) (rest keys))
(= "count" key) (when (counted? value) (count value))
:else (cond
(map? value) (recur (value-from-map value key) (rest keys))
(vector? value) (recur (value-from-vector value key) (rest keys))
:else value))))
(defn do-replace [value {:keys [from to]}]
(if (string? value)
(str/replace value (re-pattern from) to)
value))
(defn replace-fn [replace]
(fn [value] (reduce do-replace value replace)))
(defn replace-values [value replace]
(if replace
(walk/postwalk (replace-fn replace) value)
value))
(defn extract [workspace values]
(let [replace (-> workspace :user-input :replace)
value (-> (extract-value workspace values)
(replace-values replace))]
(if (map? value)
(into (sorted-map) value)
value)))
(defn adjust-keys [get]
(let [values (if (or (nil? get)
(sequential? get)) get [get])]
(if (= "" (last values))
(drop-last values)
values)))
(defn ws [workspace get out color-mode]
(let [values (adjust-keys get)]
(if (nil? out)
(if (= color/none color-mode)
(pp/pprint (extract workspace values))
(puget/cprint (extract workspace values) color-schema))
(pp/pprint (extract workspace values) (clojure.java.io/writer out)))))
| null | https://raw.githubusercontent.com/polyfy/polylith/addb82f4f8755625568add75162429e9b18972e1/components/ws-explorer/src/polylith/clj/core/ws_explorer/core.clj | clojure | (ns polylith.clj.core.ws-explorer.core
(:require [clojure.pprint :as pp]
[clojure.string :as str]
[puget.printer :as puget]
[clojure.walk :as walk]
[polylith.clj.core.util.interface.str :as str-util]
[polylith.clj.core.util.interface.color :as color]))
(def color-schema
{:color-scheme {:nil [:magenta]
:number [:yellow]
:string [:yellow]
:boolean [:magenta]
:keyword [:magenta]
:delimiter [:white]}})
(defn intify [arg]
(try
(Integer/parseUnsignedInt arg)
(catch Exception _
arg)))
(defn keys? [key]
(contains? #{"" "keys"} key))
(defn search? [key-name]
(str/ends-with? key-name "*"))
(defn match-str? [value key-name]
(when value
(if (keys? key-name)
true
(if (search? key-name)
(str/starts-with? value (str-util/drop-last 1 key-name))
(= value key-name)))))
(defn value-from-map [m key-name]
(let [k (keyword key-name)
mm (into {} m)]
(cond
(contains? mm k) (mm k)
(contains? mm key-name) (mm key-name)
(search? key-name) (mapv keyword
(filter #(match-str? % key-name)
(map name (keys mm)))))))
(defn match? [value key-name]
(or (and (map? value)
(or (match-str? (:name value) key-name)
(match-str? (:alias value) key-name)))
(and (string? value)
(match-str? value key-name))
(and (keyword? value)
(match-str? (name value) key-name))))
(defn value-from-vector [v index-or-name]
(let [i (intify index-or-name)]
(if (integer? i)
(v i)
(let [values (filterv #(match? % index-or-name) v)]
(if (= 1 (count values))
(if (search? index-or-name)
values
(first values))
values)))))
(defn vector-key [value]
(if (map? value)
(:name value)
value))
(defn keys-value [value]
(cond
(map? value) (vec (sort (keys value)))
(vector? value) (mapv vector-key value)))
(defn extract-value [value keys]
(let [key (first keys)]
(cond
(nil? key) value
(contains? #{"" "keys"} key) (recur (keys-value value) (rest keys))
(= "count" key) (when (counted? value) (count value))
:else (cond
(map? value) (recur (value-from-map value key) (rest keys))
(vector? value) (recur (value-from-vector value key) (rest keys))
:else value))))
(defn do-replace [value {:keys [from to]}]
(if (string? value)
(str/replace value (re-pattern from) to)
value))
(defn replace-fn [replace]
(fn [value] (reduce do-replace value replace)))
(defn replace-values [value replace]
(if replace
(walk/postwalk (replace-fn replace) value)
value))
(defn extract [workspace values]
(let [replace (-> workspace :user-input :replace)
value (-> (extract-value workspace values)
(replace-values replace))]
(if (map? value)
(into (sorted-map) value)
value)))
(defn adjust-keys [get]
(let [values (if (or (nil? get)
(sequential? get)) get [get])]
(if (= "" (last values))
(drop-last values)
values)))
(defn ws [workspace get out color-mode]
(let [values (adjust-keys get)]
(if (nil? out)
(if (= color/none color-mode)
(pp/pprint (extract workspace values))
(puget/cprint (extract workspace values) color-schema))
(pp/pprint (extract workspace values) (clojure.java.io/writer out)))))
|
|
c51c90c8392f5002e6ab42d45254ba1ea3524915118f0bf83eab9dd844bbb3a6 | Frozenlock/reagent-modals | modals.cljs | (ns reagent-modals.modals
(:require [reagent.core :as r :refer [atom]]
[goog.dom :as dom]
[goog.events :as events])
(:import [goog.events EventType]))
;;; Make sure to create the modal-window element somewhere in the dom.
;;; Recommended: at the start of the document.
(def modal-id "reagent-modal")
(defonce modal-content (atom {:content nil;[:div]
:shown nil
:size nil}))
(defn get-modal []
(dom/getElement modal-id))
(defn- with-opts [opts]
(let [m (js/jQuery (get-modal))]
(.call (aget m "modal") m opts)
(.call (aget m "modal") m "show")
m))
(defmulti show-modal! (fn [args] (map? args))) ;; backward compatibility
(defmethod show-modal! true
[{:keys [keyboard backdrop] :or {keyboard true backdrop true}}]
(with-opts #js {:keyboard keyboard :backdrop backdrop}))
(defmethod show-modal! false [keyboard]
(with-opts #js {:keyboard keyboard}))
(defn close-modal! []
(let [m (js/jQuery (get-modal))]
(.call (aget m "modal") m "hide")))
(defn close-button
"A pre-configured close button. Just include it anywhere in the
modal to let the user dismiss it." []
[:button.close {:type "button" :data-dismiss "modal"}
[:span.glyphicon.glyphicon-remove {:aria-hidden "true"}]
[:span.sr-only "Close"]])
(defn modal-window []
(let [unmounting? (atom nil)]
(r/create-class
{:component-did-mount
(fn [e] (let [m (js/jQuery (get-modal))]
(.call (aget m "on") m "hidden.bs.modal"
#(do (when-let [f (:hidden @modal-content)] (f))
;; don't erase the content if we are
;; unmounting the modal window, we are
;; probably only reloading the app.
(when-not @unmounting?
(swap! modal-content assoc :content nil))))
(.call (aget m "on") m "shown.bs.modal"
#(when-let [f (:shown @modal-content)] (f)))
(.call (aget m "on") m "hide.bs.modal"
#(when-let [f (:hide @modal-content)] (f))))
;; we might need to show the modal after an app reload.
(let [mc @modal-content]
(when (:content mc)
(show-modal! mc))))
:component-will-unmount (fn []
(reset! unmounting? true)
(close-modal!))
:reagent-render
(fn []
(let [{:keys [content size]} @modal-content
size-class {:lg "modal-lg"
:sm "modal-sm"}]
[:div.modal.fade {:id modal-id :tab-index -1 :role "dialog"}
[:div.modal-dialog {:class (get size-class size)}
[:div.modal-content
content]]]))})))
;;; main function
(defn modal!
"Update and show the modal window. `reagent-content' is a normal
reagent component. `configs' is an optional map of advanced
configurations:
- :shown -> a function called once the modal is shown.
- :hide -> a function called once the modal is asked to hide.
- :hidden -> a function called once the modal is hidden.
- :size -> Can be :lg (large) or :sm (small). Everything else defaults to medium.
- :keyboard -> if true, `esc' key can dismiss the modal. Default to true.
- :backdrop -> true (default): backdrop.
\"static\" : backdrop, but doesn't close the model when clicked upon.
false : no backdrop."
([reagent-content] (modal! reagent-content nil))
([reagent-content configs]
(reset! modal-content (merge {:content reagent-content} configs))
(show-modal! (select-keys configs [:keyboard :backdrop]))))
| null | https://raw.githubusercontent.com/Frozenlock/reagent-modals/16e94fe739eb862e27d0427a746a1c608d78dedf/src/reagent_modals/modals.cljs | clojure | Make sure to create the modal-window element somewhere in the dom.
Recommended: at the start of the document.
[:div]
backward compatibility
don't erase the content if we are
unmounting the modal window, we are
probably only reloading the app.
we might need to show the modal after an app reload.
main function | (ns reagent-modals.modals
(:require [reagent.core :as r :refer [atom]]
[goog.dom :as dom]
[goog.events :as events])
(:import [goog.events EventType]))
(def modal-id "reagent-modal")
:shown nil
:size nil}))
(defn get-modal []
(dom/getElement modal-id))
(defn- with-opts [opts]
(let [m (js/jQuery (get-modal))]
(.call (aget m "modal") m opts)
(.call (aget m "modal") m "show")
m))
(defmethod show-modal! true
[{:keys [keyboard backdrop] :or {keyboard true backdrop true}}]
(with-opts #js {:keyboard keyboard :backdrop backdrop}))
(defmethod show-modal! false [keyboard]
(with-opts #js {:keyboard keyboard}))
(defn close-modal! []
(let [m (js/jQuery (get-modal))]
(.call (aget m "modal") m "hide")))
(defn close-button
"A pre-configured close button. Just include it anywhere in the
modal to let the user dismiss it." []
[:button.close {:type "button" :data-dismiss "modal"}
[:span.glyphicon.glyphicon-remove {:aria-hidden "true"}]
[:span.sr-only "Close"]])
(defn modal-window []
(let [unmounting? (atom nil)]
(r/create-class
{:component-did-mount
(fn [e] (let [m (js/jQuery (get-modal))]
(.call (aget m "on") m "hidden.bs.modal"
#(do (when-let [f (:hidden @modal-content)] (f))
(when-not @unmounting?
(swap! modal-content assoc :content nil))))
(.call (aget m "on") m "shown.bs.modal"
#(when-let [f (:shown @modal-content)] (f)))
(.call (aget m "on") m "hide.bs.modal"
#(when-let [f (:hide @modal-content)] (f))))
(let [mc @modal-content]
(when (:content mc)
(show-modal! mc))))
:component-will-unmount (fn []
(reset! unmounting? true)
(close-modal!))
:reagent-render
(fn []
(let [{:keys [content size]} @modal-content
size-class {:lg "modal-lg"
:sm "modal-sm"}]
[:div.modal.fade {:id modal-id :tab-index -1 :role "dialog"}
[:div.modal-dialog {:class (get size-class size)}
[:div.modal-content
content]]]))})))
(defn modal!
"Update and show the modal window. `reagent-content' is a normal
reagent component. `configs' is an optional map of advanced
configurations:
- :shown -> a function called once the modal is shown.
- :hide -> a function called once the modal is asked to hide.
- :hidden -> a function called once the modal is hidden.
- :size -> Can be :lg (large) or :sm (small). Everything else defaults to medium.
- :keyboard -> if true, `esc' key can dismiss the modal. Default to true.
- :backdrop -> true (default): backdrop.
\"static\" : backdrop, but doesn't close the model when clicked upon.
false : no backdrop."
([reagent-content] (modal! reagent-content nil))
([reagent-content configs]
(reset! modal-content (merge {:content reagent-content} configs))
(show-modal! (select-keys configs [:keyboard :backdrop]))))
|
30c0070433616e6e635ac865ea01d38800f79f7a1b2a21f103a5952f6abb42f9 | layerware/hugsql | quotes.clj | (ns princess-bride.db.quotes
(:require [hugsql.core :as hugsql]))
(hugsql/def-db-fns "princess_bride/db/sql/quotes.sql")
(hugsql/def-sqlvec-fns "princess_bride/db/sql/quotes.sql")
| null | https://raw.githubusercontent.com/layerware/hugsql/052c04a2a6dc99c3c35810ddf83416c2fd93c5e5/examples/princess-bride/src/princess_bride/db/quotes.clj | clojure | (ns princess-bride.db.quotes
(:require [hugsql.core :as hugsql]))
(hugsql/def-db-fns "princess_bride/db/sql/quotes.sql")
(hugsql/def-sqlvec-fns "princess_bride/db/sql/quotes.sql")
|
|
aba0a466f125b8bc7e8c9f0c461483bc592a693de4d7756417593e7d90983ff6 | herbelin/coq-hh | eterm.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Environ
open Tacmach
open Term
open Evd
open Names
open Util
open Tacinterp
val mkMetas : int -> constr list
val evar_dependencies : evar_map -> int -> Intset.t
val sort_dependencies : (int * evar_info * Intset.t) list -> (int * evar_info * Intset.t) list
(* env, id, evars, number of function prototypes to try to clear from
evars contexts, object and type *)
val eterm_obligations : env -> identifier -> evar_map -> evar_map -> int ->
?status:obligation_definition_status -> constr -> types ->
(identifier * types * loc * obligation_definition_status * Intset.t *
tactic option) array
(* Existential key, obl. name, type as product, location of the original evar, associated tactic,
status and dependencies as indexes into the array *)
* ((existential_key * identifier) list * ((identifier -> constr) -> constr -> constr)) * constr * types
Translations from existential identifiers to obligation identifiers
and for terms with existentials to closed terms , given a
translation from obligation identifiers to , new term , new type
and for terms with existentials to closed terms, given a
translation from obligation identifiers to constrs, new term, new type *)
| null | https://raw.githubusercontent.com/herbelin/coq-hh/296d03d5049fea661e8bdbaf305ed4bf6d2001d2/plugins/subtac/eterm.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
env, id, evars, number of function prototypes to try to clear from
evars contexts, object and type
Existential key, obl. name, type as product, location of the original evar, associated tactic,
status and dependencies as indexes into the array | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Environ
open Tacmach
open Term
open Evd
open Names
open Util
open Tacinterp
val mkMetas : int -> constr list
val evar_dependencies : evar_map -> int -> Intset.t
val sort_dependencies : (int * evar_info * Intset.t) list -> (int * evar_info * Intset.t) list
val eterm_obligations : env -> identifier -> evar_map -> evar_map -> int ->
?status:obligation_definition_status -> constr -> types ->
(identifier * types * loc * obligation_definition_status * Intset.t *
tactic option) array
* ((existential_key * identifier) list * ((identifier -> constr) -> constr -> constr)) * constr * types
Translations from existential identifiers to obligation identifiers
and for terms with existentials to closed terms , given a
translation from obligation identifiers to , new term , new type
and for terms with existentials to closed terms, given a
translation from obligation identifiers to constrs, new term, new type *)
|
e74b419f87cbc5ab0f2336c43d151a2f7ee6c27d9e2f666b6746e7c283575776 | OCamlPro/ocp-build | trie.ml | (**************************************************************************)
(* *)
(* Typerex Libraries *)
(* *)
Copyright 2011 - 2017 OCamlPro SAS
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(*
#option OCPP_CHECK_SPACES 0
*)
This file belongs to
(*s A trie is a tree-like structure to implement dictionaries over
keys which have list-like structures. The idea is that each node
branches on an element of the list and stores the value associated
to the path from the root, if any. Therefore, a trie can be
defined as soon as a map over the elements of the list is
given. *)
module Make (M : Map.S) = struct
s Then a trie is just a tree - like structure , where a possible
information is stored at the node ( [ ' a option ] ) and where the sons
are given by a map from type [ key ] to sub - tries , so of type
[ ' a t M.t ] . The empty trie is just the empty map .
information is stored at the node (['a option]) and where the sons
are given by a map from type [key] to sub-tries, so of type
['a t M.t]. The empty trie is just the empty map. *)
type key = M.key list
type 'a t = Node of 'a option * 'a t M.t
let empty = Node (None, M.empty)
(*s To find a mapping in a trie is easy: when all the elements of the
key have been read, we just inspect the optional info at the
current node; otherwise, we descend in the appropriate sub-trie
using [M.find]. *)
let rec find l t = match (l,t) with
| [], Node (None,_) -> raise Not_found
| [], Node (Some v,_) -> v
| x::r, Node (_,m) -> find r (M.find x m)
let rec mem l t = match (l,t) with
| [], Node (None,_) -> false
| [], Node (Some _,_) -> true
| x::r, Node (_,m) -> try mem r (M.find x m) with Not_found -> false
(*s Insertion is more subtle. When the final node is reached, we just
put the information ([Some v]). Otherwise, we have to insert the
binding in the appropriate sub-trie [t']. But it may not exists,
and in that case [t'] is bound to an empty trie. Then we get a new
sub-trie [t''] by a recursive insertion and we modify the
branching, so that it now points to [t''], with [M.add]. *)
let add l v t =
let rec ins = function
| [], Node (_,m) -> Node (Some v,m)
| x::r, Node (v,m) ->
let t' = try M.find x m with Not_found -> empty in
let t'' = ins (r,t') in
Node (v, M.add x t'' m)
in
ins (l,t)
(*s When removing a binding, we take care of not leaving bindings to empty
sub-tries in the nodes. Therefore, we test wether the result [t'] of
the recursive call is the empty trie [empty]: if so, we just remove
the branching with [M.remove]; otherwise, we modify it with [M.add]. *)
let rec remove l t = match (l,t) with
| [], Node (_,m) -> Node (None,m)
| x::r, Node (v,m) ->
try
let t' = remove r (M.find x m) in
Node (v, if t' = empty then M.remove x m else M.add x t' m)
with Not_found ->
t
s The iterators [ map ] , [ mapi ] , [ iter ] and [ fold ] are implemented in
a straigthforward way using the corresponding iterators [ M.map ] ,
[ M.mapi ] , [ M.iter ] and [ M.fold ] . For the last three of them ,
we have to remember the path from the root , as an extra argument
[ revp ] . Since elements are pushed in reverse order in [ revp ] ,
we have to reverse it with [ List.rev ] when the actual binding
has to be passed to function [ f ] .
a straigthforward way using the corresponding iterators [M.map],
[M.mapi], [M.iter] and [M.fold]. For the last three of them,
we have to remember the path from the root, as an extra argument
[revp]. Since elements are pushed in reverse order in [revp],
we have to reverse it with [List.rev] when the actual binding
has to be passed to function [f]. *)
let rec map f = function
| Node (None,m) -> Node (None, M.map (map f) m)
| Node (Some v,m) -> Node (Some (f v), M.map (map f) m)
let mapi f t =
let rec maprec revp = function
| Node (None,m) ->
Node (None, M.mapi (fun x -> maprec (x::revp)) m)
| Node (Some v,m) ->
Node (Some (f (List.rev revp) v), M.mapi (fun x -> maprec (x::revp)) m)
in
maprec [] t
let iter f t =
let rec traverse revp = function
| Node (None,m) ->
M.iter (fun x -> traverse (x::revp)) m
| Node (Some v,m) ->
f (List.rev revp) v; M.iter (fun x t -> traverse (x::revp) t) m
in
traverse [] t
let fold f t acc =
let rec traverse revp t acc = match t with
| Node (None,m) ->
M.fold (fun x -> traverse (x::revp)) m acc
| Node (Some v,m) ->
f (List.rev revp) v (M.fold (fun x -> traverse (x::revp)) m acc)
in
traverse [] t acc
let compare cmp a b =
let rec comp a b = match a,b with
| Node (Some _, _), Node (None, _) -> 1
| Node (None, _), Node (Some _, _) -> -1
| Node (None, m1), Node (None, m2) ->
M.compare comp m1 m2
| Node (Some a, m1), Node (Some b, m2) ->
let c = cmp a b in
if c <> 0 then c else M.compare comp m1 m2
in
comp a b
let equal eq a b =
let rec comp a b = match a,b with
| Node (None, m1), Node (None, m2) ->
M.equal comp m1 m2
| Node (Some a, m1), Node (Some b, m2) ->
eq a b && M.equal comp m1 m2
| _ ->
false
in
comp a b
(* The base case is rather stupid, but constructable *)
let is_empty = function
| Node (None, m1) -> M.is_empty m1
| _ -> false
(* OCamlPro *)
let for_all fn t =
fold (fun k v accu -> accu && fn k v) t true
let exists fn t =
fold (fun k v accu -> accu || fn k v) t false
let filter fn t =
let f = ref empty in
let aux k v =
if fn k v then
f := add k v !f in
iter aux t;
!f
(* XXX: this can be done more efficiently *)
let partition fn t =
let ok = ref empty in
let fail = ref empty in
let aux k v =
if fn k v then
ok := add k v !ok
else
fail := add k v !fail in
iter aux t;
!ok, !fail
let cardinal t =
fold (fun _ _ accu -> accu + 1) t 0
let rec choose = function
| Node (None, m) ->
let mk, mv = M.choose m in
let k , v = choose mv in
mk :: k, v
| Node (Some v, _) ->
[], v
let singleton k v =
add k v empty
(* XXX TODO *)
let max_binding _t =
Printf.eprintf "TODO\n%!";
assert false
let min_binding _t =
Printf.eprintf "TODO\n%!";
assert false
let split _k _t =
Printf.eprintf "TODO\n%!";
assert false
let merge _t1 _t1 =
Printf.eprintf "TODO\n%!";
assert false
let bindings _t =
Printf.eprintf "TODO\n%!";
assert false
let union _t1 _t2 =
Printf.eprintf "TODO\n%!";
assert false
end
| null | https://raw.githubusercontent.com/OCamlPro/ocp-build/56aff560bb438c12b2929feaf8379bc6f31b9840/libs/ocplib-lang/trie.ml | ocaml | ************************************************************************
Typerex Libraries
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
#option OCPP_CHECK_SPACES 0
s A trie is a tree-like structure to implement dictionaries over
keys which have list-like structures. The idea is that each node
branches on an element of the list and stores the value associated
to the path from the root, if any. Therefore, a trie can be
defined as soon as a map over the elements of the list is
given.
s To find a mapping in a trie is easy: when all the elements of the
key have been read, we just inspect the optional info at the
current node; otherwise, we descend in the appropriate sub-trie
using [M.find].
s Insertion is more subtle. When the final node is reached, we just
put the information ([Some v]). Otherwise, we have to insert the
binding in the appropriate sub-trie [t']. But it may not exists,
and in that case [t'] is bound to an empty trie. Then we get a new
sub-trie [t''] by a recursive insertion and we modify the
branching, so that it now points to [t''], with [M.add].
s When removing a binding, we take care of not leaving bindings to empty
sub-tries in the nodes. Therefore, we test wether the result [t'] of
the recursive call is the empty trie [empty]: if so, we just remove
the branching with [M.remove]; otherwise, we modify it with [M.add].
The base case is rather stupid, but constructable
OCamlPro
XXX: this can be done more efficiently
XXX TODO | Copyright 2011 - 2017 OCamlPro SAS
the GNU Lesser General Public License version 2.1 , with the
This file belongs to
module Make (M : Map.S) = struct
s Then a trie is just a tree - like structure , where a possible
information is stored at the node ( [ ' a option ] ) and where the sons
are given by a map from type [ key ] to sub - tries , so of type
[ ' a t M.t ] . The empty trie is just the empty map .
information is stored at the node (['a option]) and where the sons
are given by a map from type [key] to sub-tries, so of type
['a t M.t]. The empty trie is just the empty map. *)
type key = M.key list
type 'a t = Node of 'a option * 'a t M.t
let empty = Node (None, M.empty)
let rec find l t = match (l,t) with
| [], Node (None,_) -> raise Not_found
| [], Node (Some v,_) -> v
| x::r, Node (_,m) -> find r (M.find x m)
let rec mem l t = match (l,t) with
| [], Node (None,_) -> false
| [], Node (Some _,_) -> true
| x::r, Node (_,m) -> try mem r (M.find x m) with Not_found -> false
let add l v t =
let rec ins = function
| [], Node (_,m) -> Node (Some v,m)
| x::r, Node (v,m) ->
let t' = try M.find x m with Not_found -> empty in
let t'' = ins (r,t') in
Node (v, M.add x t'' m)
in
ins (l,t)
let rec remove l t = match (l,t) with
| [], Node (_,m) -> Node (None,m)
| x::r, Node (v,m) ->
try
let t' = remove r (M.find x m) in
Node (v, if t' = empty then M.remove x m else M.add x t' m)
with Not_found ->
t
s The iterators [ map ] , [ mapi ] , [ iter ] and [ fold ] are implemented in
a straigthforward way using the corresponding iterators [ M.map ] ,
[ M.mapi ] , [ M.iter ] and [ M.fold ] . For the last three of them ,
we have to remember the path from the root , as an extra argument
[ revp ] . Since elements are pushed in reverse order in [ revp ] ,
we have to reverse it with [ List.rev ] when the actual binding
has to be passed to function [ f ] .
a straigthforward way using the corresponding iterators [M.map],
[M.mapi], [M.iter] and [M.fold]. For the last three of them,
we have to remember the path from the root, as an extra argument
[revp]. Since elements are pushed in reverse order in [revp],
we have to reverse it with [List.rev] when the actual binding
has to be passed to function [f]. *)
let rec map f = function
| Node (None,m) -> Node (None, M.map (map f) m)
| Node (Some v,m) -> Node (Some (f v), M.map (map f) m)
let mapi f t =
let rec maprec revp = function
| Node (None,m) ->
Node (None, M.mapi (fun x -> maprec (x::revp)) m)
| Node (Some v,m) ->
Node (Some (f (List.rev revp) v), M.mapi (fun x -> maprec (x::revp)) m)
in
maprec [] t
let iter f t =
let rec traverse revp = function
| Node (None,m) ->
M.iter (fun x -> traverse (x::revp)) m
| Node (Some v,m) ->
f (List.rev revp) v; M.iter (fun x t -> traverse (x::revp) t) m
in
traverse [] t
let fold f t acc =
let rec traverse revp t acc = match t with
| Node (None,m) ->
M.fold (fun x -> traverse (x::revp)) m acc
| Node (Some v,m) ->
f (List.rev revp) v (M.fold (fun x -> traverse (x::revp)) m acc)
in
traverse [] t acc
let compare cmp a b =
let rec comp a b = match a,b with
| Node (Some _, _), Node (None, _) -> 1
| Node (None, _), Node (Some _, _) -> -1
| Node (None, m1), Node (None, m2) ->
M.compare comp m1 m2
| Node (Some a, m1), Node (Some b, m2) ->
let c = cmp a b in
if c <> 0 then c else M.compare comp m1 m2
in
comp a b
let equal eq a b =
let rec comp a b = match a,b with
| Node (None, m1), Node (None, m2) ->
M.equal comp m1 m2
| Node (Some a, m1), Node (Some b, m2) ->
eq a b && M.equal comp m1 m2
| _ ->
false
in
comp a b
let is_empty = function
| Node (None, m1) -> M.is_empty m1
| _ -> false
let for_all fn t =
fold (fun k v accu -> accu && fn k v) t true
let exists fn t =
fold (fun k v accu -> accu || fn k v) t false
let filter fn t =
let f = ref empty in
let aux k v =
if fn k v then
f := add k v !f in
iter aux t;
!f
let partition fn t =
let ok = ref empty in
let fail = ref empty in
let aux k v =
if fn k v then
ok := add k v !ok
else
fail := add k v !fail in
iter aux t;
!ok, !fail
let cardinal t =
fold (fun _ _ accu -> accu + 1) t 0
let rec choose = function
| Node (None, m) ->
let mk, mv = M.choose m in
let k , v = choose mv in
mk :: k, v
| Node (Some v, _) ->
[], v
let singleton k v =
add k v empty
let max_binding _t =
Printf.eprintf "TODO\n%!";
assert false
let min_binding _t =
Printf.eprintf "TODO\n%!";
assert false
let split _k _t =
Printf.eprintf "TODO\n%!";
assert false
let merge _t1 _t1 =
Printf.eprintf "TODO\n%!";
assert false
let bindings _t =
Printf.eprintf "TODO\n%!";
assert false
let union _t1 _t2 =
Printf.eprintf "TODO\n%!";
assert false
end
|
4b1888bc524c4202568ec4ab43531d4d144e292a917d178c93f42df6b44546ba | Clozure/ccl | cl-ppcre-tests.lisp | -*- Mode : Lisp ; tab - width : 2 ; indent - tabs - mode : nil -*-
Methods for compiling and running CL - PPCRE unit tests with code coverage analysis
(in-package :code-cover-test)
(require :cl-ppcre-test)
;; Compiling CL-PPCRE unit tests with code coverage analysis (maybe) enabled
(defmethod asdf:perform :around ((op asdf:compile-op) (system (eql (asdf:find-system :cl-ppcre))))
(with-code-coverage-compile ()
(call-next-method)))
(defmethod asdf:perform :around ((op asdf:compile-op) (system (eql (asdf:find-system :cl-ppcre-test))))
(with-code-coverage-compile ()
(call-next-method)))
;; Running unit tests with code coverage analysis (maybe) enabled
(defclass cl-ppcre-tests (code-cover-test)
((verbose-p :initform nil :initarg :verbose-p))
(:default-initargs :systems '("cl-ppcre-test" "cl-ppcre"))
)
(defmethod do-tests ((test cl-ppcre-tests) &rest args)
(declare (ignore args))
;; see cl-ppcre-test/test/tests.lisp
(with-slots (verbose-p) test
(do-test "perl-test"
(cl-ppcre-test::perl-test :verbose verbose-p))
(do-test "test-optimized-test-functions"
(cl-ppcre-test::test-optimized-test-functions :verbose verbose-p))
(dotimes (n 10)
(do-test (format nil "simple-tests-~d" n)
(cl-ppcre-test::simple-tests :verbose verbose-p)))))
| null | https://raw.githubusercontent.com/Clozure/ccl/6c1a9458f7a5437b73ec227e989aa5b825f32fd3/examples/code-cover-test/cl-ppcre-tests.lisp | lisp | tab - width : 2 ; indent - tabs - mode : nil -*-
Compiling CL-PPCRE unit tests with code coverage analysis (maybe) enabled
Running unit tests with code coverage analysis (maybe) enabled
see cl-ppcre-test/test/tests.lisp |
Methods for compiling and running CL - PPCRE unit tests with code coverage analysis
(in-package :code-cover-test)
(require :cl-ppcre-test)
(defmethod asdf:perform :around ((op asdf:compile-op) (system (eql (asdf:find-system :cl-ppcre))))
(with-code-coverage-compile ()
(call-next-method)))
(defmethod asdf:perform :around ((op asdf:compile-op) (system (eql (asdf:find-system :cl-ppcre-test))))
(with-code-coverage-compile ()
(call-next-method)))
(defclass cl-ppcre-tests (code-cover-test)
((verbose-p :initform nil :initarg :verbose-p))
(:default-initargs :systems '("cl-ppcre-test" "cl-ppcre"))
)
(defmethod do-tests ((test cl-ppcre-tests) &rest args)
(declare (ignore args))
(with-slots (verbose-p) test
(do-test "perl-test"
(cl-ppcre-test::perl-test :verbose verbose-p))
(do-test "test-optimized-test-functions"
(cl-ppcre-test::test-optimized-test-functions :verbose verbose-p))
(dotimes (n 10)
(do-test (format nil "simple-tests-~d" n)
(cl-ppcre-test::simple-tests :verbose verbose-p)))))
|
6ccba4dda71a6fee99c7026a50d9b2b1a3404b7f74e8ac7bed674b0c5ed92aa6 | hadolint/hadolint | DL3059.hs | module Hadolint.Rule.DL3059 (rule) where
import Hadolint.Rule
import qualified Hadolint.Shell as Shell
import Language.Docker.Syntax
data Acc
= Acc { flags :: RunFlags, count :: Int }
| Empty
deriving (Eq)
-- | This Rule catches multiple consecutive `RUN` instructions.
-- It ignores the case where multiple commands are chained together (e.g. with
-- `&&`) because in that case the programmer most likely has deliberately
-- chosen to use multiple `RUN` instructions. Cases where --mount=xxx flags
-- differ are excluded as well.
rule :: Rule Shell.ParsedShell
rule = customRule check (emptyState Empty)
where
code = "DL3059"
severity = DLInfoC
message = "Multiple consecutive `RUN` instructions. Consider consolidation."
check line st (Run (RunArgs ar fl))
| state st == Empty =
st |> modify (remember fl (foldArguments countCommands ar))
| flags (state st) /= fl =
st |> modify (remember fl (foldArguments countCommands ar))
| foldArguments countCommands ar > 2 || count (state st) > 2 =
st |> modify (remember fl (foldArguments countCommands ar))
| otherwise = st |> addFail CheckFailure {..}
check _ st (Comment _) = st
check _ st _ = st |> modify reset
# INLINEABLE rule #
remember :: RunFlags -> Int -> Acc -> Acc
remember fl cn _ = Acc { flags = fl, count = cn }
reset :: Acc -> Acc
reset _ = Empty
countCommands :: Shell.ParsedShell -> Int
countCommands script = length $ Shell.presentCommands script
| null | https://raw.githubusercontent.com/hadolint/hadolint/dc66e0996474b324f10649618f424fc76ca60e28/src/Hadolint/Rule/DL3059.hs | haskell | | This Rule catches multiple consecutive `RUN` instructions.
It ignores the case where multiple commands are chained together (e.g. with
`&&`) because in that case the programmer most likely has deliberately
chosen to use multiple `RUN` instructions. Cases where --mount=xxx flags
differ are excluded as well. | module Hadolint.Rule.DL3059 (rule) where
import Hadolint.Rule
import qualified Hadolint.Shell as Shell
import Language.Docker.Syntax
data Acc
= Acc { flags :: RunFlags, count :: Int }
| Empty
deriving (Eq)
rule :: Rule Shell.ParsedShell
rule = customRule check (emptyState Empty)
where
code = "DL3059"
severity = DLInfoC
message = "Multiple consecutive `RUN` instructions. Consider consolidation."
check line st (Run (RunArgs ar fl))
| state st == Empty =
st |> modify (remember fl (foldArguments countCommands ar))
| flags (state st) /= fl =
st |> modify (remember fl (foldArguments countCommands ar))
| foldArguments countCommands ar > 2 || count (state st) > 2 =
st |> modify (remember fl (foldArguments countCommands ar))
| otherwise = st |> addFail CheckFailure {..}
check _ st (Comment _) = st
check _ st _ = st |> modify reset
# INLINEABLE rule #
remember :: RunFlags -> Int -> Acc -> Acc
remember fl cn _ = Acc { flags = fl, count = cn }
reset :: Acc -> Acc
reset _ = Empty
countCommands :: Shell.ParsedShell -> Int
countCommands script = length $ Shell.presentCommands script
|
0b96d72fe2966bdafbe7e4e594471a807bd06ae38bd386327b07ccdcf4f2e0a3 | schmidt73/guidescan-web | add_grna_lib.clj | (ns add-grna-lib
(:require [next.jdbc :as jdbc]
[honeysql.core :as sql]
[honeysql.helpers :as h]
[honeysql.format :as fmt]
[taoensso.timbre :as timbre]
[clojure.data.csv :as csv]
[failjure.core :as f]
[script-utils :refer :all])
(:gen-class))
(def create-grna-library-table
(->> ["CREATE TABLE IF NOT EXISTS"
" libraries (grna VARCHAR(1023) NOT NULL,"
" organism VARCHAR(1023) NOT NULL,"
" source VARCHAR(1023) NOT NULL,"
" gene_symbol VARCHAR(1023),"
" grna_type VARCHAR(1023) NOT NULL,"
" chromosome VARCHAR(1023),"
" identifier VARCHAR(1023),"
" region_id VARCHAR(1023),"
" strand VARCHAR(1023),"
" position INT,"
" offtarget0 INT NOT NULL,"
" offtarget1 INT NOT NULL,"
" offtarget2 INT NOT NULL,"
" offtarget3 INT NOT NULL,"
" specificity REAL,"
" specificity_5pG REAL,"
" cutting_efficiency REAL,"
" PRIMARY KEY (grna, grna_type));"]
(clojure.string/join "\n")))
(def create-essential-genes-table
(->> ["CREATE TABLE IF NOT EXISTS"
" essential_genes (gene_symbol VARCHAR(1023) NOT NULL,"
" organism VARCHAR(1023) NOT NULL,"
" PRIMARY KEY (gene_symbol, organism));"]
(clojure.string/join "\n")))
(defmethod fmt/format-clause :on-conflict-nothing [[op v] sqlmap]
(str "ON CONFLICT DO NOTHING"))
(h/defhelper on-conflict-nothing [m args]
(assoc m :on-conflict-nothing nil))
(defn nil-parse-int [s]
(if-not (empty? s)
(Math/round (Float/parseFloat s))))
(defn nil-parse-float [s]
(if-not (empty? s)
(Float/parseFloat s)))
(defn nil-get-entry [csv-entry k]
(let [v (get csv-entry k)]
(if-not (empty? v)
v)))
(defn create-sql-record-guides
[organism csv-entry]
{:grna (nil-get-entry csv-entry "sgRNA")
:identifier (nil-get-entry csv-entry "Identifier")
:organism organism
:source "Guidescan2"
:region_id (nil-get-entry csv-entry "Cutting Region ID")
:specificity (nil-parse-float (get csv-entry "Specificity"))
:specificity_5pG (nil-parse-float (get csv-entry "5pG Specificity"))
:cutting_efficiency (nil-parse-float (get csv-entry "Cutting Efficiency"))
:gene_symbol (nil-get-entry csv-entry "Gene")
:grna_type (nil-get-entry csv-entry "Type")
:strand (nil-get-entry csv-entry "Strand")
:chromosome (nil-get-entry csv-entry "Chr")
:position (nil-parse-int (get csv-entry "Pos"))
:offtarget0 (Integer/parseInt (get csv-entry "0 Off-Targets"))
:offtarget1 (Integer/parseInt (get csv-entry "1 Off-Targets"))
:offtarget2 (Integer/parseInt (get csv-entry "2 Off-Targets"))
:offtarget3 (Integer/parseInt (get csv-entry "3 Off-Targets"))})
(defn create-sql-statement-guides
[organism csv-entries]
(let [records (map #(create-sql-record-guides organism %) csv-entries)]
(-> (h/insert-into :libraries)
(h/values records)
(on-conflict-nothing)
sql/format)))
(defn create-sql-statement-genes
[organism essential-genes]
(-> (h/insert-into :essential_genes)
(h/values (map (fn [gene-sym] {:gene_symbol gene-sym :organism organism})
essential-genes))
(on-conflict-nothing)
sql/format))
(defn usage []
(->> ["Adds a new guide-rna library to the database."
""
"Usage: java -jar add-grna-lib.jar [jdbc-url-string] [library-csv] [essential-gene-list] [organism]"]
(clojure.string/join \newline)))
(defn -main
[& args]
(when (< (count args) 3)
(println (usage))
(System/exit 1))
(let [ds (jdbc/get-datasource {:jdbcUrl (nth args 0)})
library-csv (read-csv-with-header (nth args 1))
essential-genes-list (clojure.string/split-lines (slurp (nth args 2)))
organism (nth args 3)
entries (partition-all 500 library-csv)]
(with-open [conn (jdbc/get-connection ds)]
(jdbc/execute! conn [create-grna-library-table])
(jdbc/execute! conn [create-essential-genes-table])
(doseq [essential-genes (partition-all 500 essential-genes-list)]
(jdbc/execute! conn (create-sql-statement-genes organism essential-genes)))
(doseq [entry entries]
(jdbc/execute! conn (create-sql-statement-guides organism entry))))))
| null | https://raw.githubusercontent.com/schmidt73/guidescan-web/32f001ea39bc2f738e3b45615af0803c31c06cd4/scripts/add_grna_lib.clj | clojure | (ns add-grna-lib
(:require [next.jdbc :as jdbc]
[honeysql.core :as sql]
[honeysql.helpers :as h]
[honeysql.format :as fmt]
[taoensso.timbre :as timbre]
[clojure.data.csv :as csv]
[failjure.core :as f]
[script-utils :refer :all])
(:gen-class))
(def create-grna-library-table
(->> ["CREATE TABLE IF NOT EXISTS"
" libraries (grna VARCHAR(1023) NOT NULL,"
" organism VARCHAR(1023) NOT NULL,"
" source VARCHAR(1023) NOT NULL,"
" gene_symbol VARCHAR(1023),"
" grna_type VARCHAR(1023) NOT NULL,"
" chromosome VARCHAR(1023),"
" identifier VARCHAR(1023),"
" region_id VARCHAR(1023),"
" strand VARCHAR(1023),"
" position INT,"
" offtarget0 INT NOT NULL,"
" offtarget1 INT NOT NULL,"
" offtarget2 INT NOT NULL,"
" offtarget3 INT NOT NULL,"
" specificity REAL,"
" specificity_5pG REAL,"
" cutting_efficiency REAL,"
" PRIMARY KEY (grna, grna_type));"]
(clojure.string/join "\n")))
(def create-essential-genes-table
(->> ["CREATE TABLE IF NOT EXISTS"
" essential_genes (gene_symbol VARCHAR(1023) NOT NULL,"
" organism VARCHAR(1023) NOT NULL,"
" PRIMARY KEY (gene_symbol, organism));"]
(clojure.string/join "\n")))
(defmethod fmt/format-clause :on-conflict-nothing [[op v] sqlmap]
(str "ON CONFLICT DO NOTHING"))
(h/defhelper on-conflict-nothing [m args]
(assoc m :on-conflict-nothing nil))
(defn nil-parse-int [s]
(if-not (empty? s)
(Math/round (Float/parseFloat s))))
(defn nil-parse-float [s]
(if-not (empty? s)
(Float/parseFloat s)))
(defn nil-get-entry [csv-entry k]
(let [v (get csv-entry k)]
(if-not (empty? v)
v)))
(defn create-sql-record-guides
[organism csv-entry]
{:grna (nil-get-entry csv-entry "sgRNA")
:identifier (nil-get-entry csv-entry "Identifier")
:organism organism
:source "Guidescan2"
:region_id (nil-get-entry csv-entry "Cutting Region ID")
:specificity (nil-parse-float (get csv-entry "Specificity"))
:specificity_5pG (nil-parse-float (get csv-entry "5pG Specificity"))
:cutting_efficiency (nil-parse-float (get csv-entry "Cutting Efficiency"))
:gene_symbol (nil-get-entry csv-entry "Gene")
:grna_type (nil-get-entry csv-entry "Type")
:strand (nil-get-entry csv-entry "Strand")
:chromosome (nil-get-entry csv-entry "Chr")
:position (nil-parse-int (get csv-entry "Pos"))
:offtarget0 (Integer/parseInt (get csv-entry "0 Off-Targets"))
:offtarget1 (Integer/parseInt (get csv-entry "1 Off-Targets"))
:offtarget2 (Integer/parseInt (get csv-entry "2 Off-Targets"))
:offtarget3 (Integer/parseInt (get csv-entry "3 Off-Targets"))})
(defn create-sql-statement-guides
[organism csv-entries]
(let [records (map #(create-sql-record-guides organism %) csv-entries)]
(-> (h/insert-into :libraries)
(h/values records)
(on-conflict-nothing)
sql/format)))
(defn create-sql-statement-genes
[organism essential-genes]
(-> (h/insert-into :essential_genes)
(h/values (map (fn [gene-sym] {:gene_symbol gene-sym :organism organism})
essential-genes))
(on-conflict-nothing)
sql/format))
(defn usage []
(->> ["Adds a new guide-rna library to the database."
""
"Usage: java -jar add-grna-lib.jar [jdbc-url-string] [library-csv] [essential-gene-list] [organism]"]
(clojure.string/join \newline)))
(defn -main
[& args]
(when (< (count args) 3)
(println (usage))
(System/exit 1))
(let [ds (jdbc/get-datasource {:jdbcUrl (nth args 0)})
library-csv (read-csv-with-header (nth args 1))
essential-genes-list (clojure.string/split-lines (slurp (nth args 2)))
organism (nth args 3)
entries (partition-all 500 library-csv)]
(with-open [conn (jdbc/get-connection ds)]
(jdbc/execute! conn [create-grna-library-table])
(jdbc/execute! conn [create-essential-genes-table])
(doseq [essential-genes (partition-all 500 essential-genes-list)]
(jdbc/execute! conn (create-sql-statement-genes organism essential-genes)))
(doseq [entry entries]
(jdbc/execute! conn (create-sql-statement-guides organism entry))))))
|
|
f0a7916155f564dd302b7d3fc444a5d4591d730ec935f10628f435784322cbf3 | seancorfield/next-jdbc | plan_test.clj | copyright ( c ) 2020 - 2021 , all rights reserved
(ns next.jdbc.plan-test
"Tests for the plan helpers."
(:require [clojure.test :refer [deftest is use-fixtures]]
[next.jdbc.plan :as plan]
[next.jdbc.specs :as specs]
[next.jdbc.test-fixtures
:refer [with-test-db ds]]
[clojure.string :as str]))
(set! *warn-on-reflection* true)
around each test because of the folding tests using 1,000 rows
(use-fixtures :each with-test-db)
(specs/instrument)
(deftest select-one!-tests
(is (= {:id 1}
(plan/select-one! (ds) [:id] ["select * from fruit order by id"])))
(is (= 1
(plan/select-one! (ds) :id ["select * from fruit order by id"])))
(is (= "Banana"
(plan/select-one! (ds) :name ["select * from fruit where id = ?" 2])))
(is (= [1 "Apple"]
(plan/select-one! (ds) (juxt :id :name)
["select * from fruit order by id"])))
(is (= {:id 1 :name "Apple"}
(plan/select-one! (ds) #(select-keys % [:id :name])
["select * from fruit order by id"]))))
(deftest select-vector-tests
(is (= [{:id 1} {:id 2} {:id 3} {:id 4}]
(plan/select! (ds) [:id] ["select * from fruit order by id"])))
(is (= [1 2 3 4]
(plan/select! (ds) :id ["select * from fruit order by id"])))
(is (= ["Banana"]
(plan/select! (ds) :name ["select * from fruit where id = ?" 2])))
(is (= [[2 "Banana"]]
(plan/select! (ds) (juxt :id :name)
["select * from fruit where id = ?" 2])))
(is (= [{:id 2 :name "Banana"}]
(plan/select! (ds) [:id :name]
["select * from fruit where id = ?" 2]))))
(deftest select-set-tests
(is (= #{{:id 1} {:id 2} {:id 3} {:id 4}}
(plan/select! (ds) [:id] ["select * from fruit order by id"]
{:into #{}})))
(is (= #{1 2 3 4}
(plan/select! (ds) :id ["select * from fruit order by id"]
{:into #{}}))))
(deftest select-map-tests
(is (= {1 "Apple", 2 "Banana", 3 "Peach", 4 "Orange"}
(plan/select! (ds) (juxt :id :name) ["select * from fruit order by id"]
{:into {}}))))
(deftest select-issue-227
(is (= ["Apple"]
(plan/select! (ds) :name ["select * from fruit where id = ?" 1]
{:column-fn #(str/replace % "-" "_")})))
(is (= ["Apple"]
(plan/select! (ds) :foo/name ["select * from fruit where id = ?" 1]
{:column-fn #(str/replace % "-" "_")})))
(is (= ["Apple"]
(plan/select! (ds) #(get % "name") ["select * from fruit where id = ?" 1]
{:column-fn #(str/replace % "-" "_")})))
(is (= [["Apple"]]
(plan/select! (ds) (juxt :name) ["select * from fruit where id = ?" 1]
{:column-fn #(str/replace % "-" "_")}))))
| null | https://raw.githubusercontent.com/seancorfield/next-jdbc/961b880e085d56278e9ee566cf0f1beb0be87734/test/next/jdbc/plan_test.clj | clojure | copyright ( c ) 2020 - 2021 , all rights reserved
(ns next.jdbc.plan-test
"Tests for the plan helpers."
(:require [clojure.test :refer [deftest is use-fixtures]]
[next.jdbc.plan :as plan]
[next.jdbc.specs :as specs]
[next.jdbc.test-fixtures
:refer [with-test-db ds]]
[clojure.string :as str]))
(set! *warn-on-reflection* true)
around each test because of the folding tests using 1,000 rows
(use-fixtures :each with-test-db)
(specs/instrument)
(deftest select-one!-tests
(is (= {:id 1}
(plan/select-one! (ds) [:id] ["select * from fruit order by id"])))
(is (= 1
(plan/select-one! (ds) :id ["select * from fruit order by id"])))
(is (= "Banana"
(plan/select-one! (ds) :name ["select * from fruit where id = ?" 2])))
(is (= [1 "Apple"]
(plan/select-one! (ds) (juxt :id :name)
["select * from fruit order by id"])))
(is (= {:id 1 :name "Apple"}
(plan/select-one! (ds) #(select-keys % [:id :name])
["select * from fruit order by id"]))))
(deftest select-vector-tests
(is (= [{:id 1} {:id 2} {:id 3} {:id 4}]
(plan/select! (ds) [:id] ["select * from fruit order by id"])))
(is (= [1 2 3 4]
(plan/select! (ds) :id ["select * from fruit order by id"])))
(is (= ["Banana"]
(plan/select! (ds) :name ["select * from fruit where id = ?" 2])))
(is (= [[2 "Banana"]]
(plan/select! (ds) (juxt :id :name)
["select * from fruit where id = ?" 2])))
(is (= [{:id 2 :name "Banana"}]
(plan/select! (ds) [:id :name]
["select * from fruit where id = ?" 2]))))
(deftest select-set-tests
(is (= #{{:id 1} {:id 2} {:id 3} {:id 4}}
(plan/select! (ds) [:id] ["select * from fruit order by id"]
{:into #{}})))
(is (= #{1 2 3 4}
(plan/select! (ds) :id ["select * from fruit order by id"]
{:into #{}}))))
(deftest select-map-tests
(is (= {1 "Apple", 2 "Banana", 3 "Peach", 4 "Orange"}
(plan/select! (ds) (juxt :id :name) ["select * from fruit order by id"]
{:into {}}))))
(deftest select-issue-227
(is (= ["Apple"]
(plan/select! (ds) :name ["select * from fruit where id = ?" 1]
{:column-fn #(str/replace % "-" "_")})))
(is (= ["Apple"]
(plan/select! (ds) :foo/name ["select * from fruit where id = ?" 1]
{:column-fn #(str/replace % "-" "_")})))
(is (= ["Apple"]
(plan/select! (ds) #(get % "name") ["select * from fruit where id = ?" 1]
{:column-fn #(str/replace % "-" "_")})))
(is (= [["Apple"]]
(plan/select! (ds) (juxt :name) ["select * from fruit where id = ?" 1]
{:column-fn #(str/replace % "-" "_")}))))
|
|
727ed9022db3866f7e81945907ef8f17f7dacfaa6517041ca0ef1b9d1c9fdc0e | dvingo/cljs-emotion | cljs_emotion.cljc | (ns dv.cljs-emotion
(:require
#?@(:cljs [["react" :as react]
["@emotion/hash" :as emotion-hash*]
["@emotion/styled" :as styled*]
["@emotion/react" :as styled-core :refer [Global ThemeProvider]]
[goog.object :as g]])
[clojure.string :as str]
[clojure.walk :as walk])
#?(:cljs (:require-macros [dv.cljs-emotion :refer [defstyled]])))
;; Support plain cljs compiler and shadow.
#?(:cljs (def emotion-hash (g/get emotion-hash* "default")))
#?(:cljs (def styled (g/get styled* "default")))
#?(:cljs (def jsx* styled-core/jsx))
Used to prevent generated code from needing to require goog.object
(defn obj-set [o k v]
#?(:cljs (g/set o k v)
:clj nil))
(defn obj-get [o k]
#?(:cljs (g/get o k)
:clj nil))
;; from fulcro
#?(:cljs
(defn force-children
"Utility function that will force a lazy sequence of children (recursively) into realized
vectors (React cannot deal with lazy seqs in production mode)"
[x]
(if (seq? x)
(to-array (mapv force-children x))
x)))
#?(:cljs
(defn kebab->camel
[prop]
(if (str/starts-with? prop ".")
prop
(if (str/includes? prop "-")
(let [words (->> (re-seq #"[a-zA-Z]+" prop)
(mapv str/capitalize))]
(-> words
(update 0 str/lower-case)
str/join))
prop))))
;; todo use the caching strategy seen in reagent.impl.template
#?(:cljs
(defn camelize-keys
"Also replaces styled components with their css classname is key position."
[style-map]
(walk/postwalk
(fn in-walk [v]
(cond
(keyword? v)
(-> v name kebab->camel)
(and (meta v) (contains? (meta v) ::hashed-name))
(str "." (-> v meta ::hashed-name))
:else v))
style-map)))
#?(:cljs
(defn keyframes [anim-map]
(styled-core/keyframes (clj->js (camelize-keys anim-map)))))
(def cljs-props-key "dv.cljs-emotion/props")
#?(:cljs
(defn wrap-call-style-fn [anon-styles?]
(fn [x]
(cond
;; Another emotion styled component created with this lib.
(and (meta x) (contains? (meta x) ::hashed-name))
(str "." (-> x meta ::hashed-name))
(fn? x)
(fn [arg]
;; arg# is js props passed at runtime, we ship it back and forth js -> cljs -> js
;; js->clj is resulting in an infinite recur when children contains another styled component, so we remove it.
(js-delete arg "children")
(if anon-styles?
;; with anonymous styles there can be no props - so the theme is passed as the only argument
(clj->js (camelize-keys (x (js->clj arg :keywordize-keys true))))
(let [cljs-args (assoc (obj-get arg cljs-props-key)
:theme (js->clj (obj-get arg "theme") :keywordize-keys true))]
;; invoke the user-supplied function which returns style data - convert whatever they return to js data structures.
(clj->js (camelize-keys (x cljs-args))))))
;; maps come up in value position for nested selectors
(map? x)
(camelize-keys x)
:else x))))
#?(:cljs (goog-define ADD_CLASSNAMES "INITIAL"))
#?(:cljs
(defn add-class-names? []
(if (boolean? ADD_CLASSNAMES)
ADD_CLASSNAMES
goog.DEBUG)))
#?(:cljs
(defn add-class-name [props class-name]
(if (object? props)
(doto props
(goog.object/set "className"
(->> [class-name (goog.object/get props "className")]
(str/join " ")
(str/trim))))
(update props :className #(if (nil? %) class-name (str class-name " " %))))))
#?(:cljs (defn hashit [string] (str "dvcss-" (emotion-hash string))))
#?(:cljs
(defn set-class-name [props class-name]
(if class-name
(let [hashed-name (hashit class-name)
props (add-class-name props hashed-name)]
(if (add-class-names?)
(add-class-name props class-name)
props))
props)))
#?(:cljs
(defn map->obj [m]
(reduce-kv (fn [o k v]
;; convert keywords to string only in key position
(let [new-k (cond-> k (implements? INamed k) name)
new-v (cond-> v (map? v) map->obj)]
(doto o (obj-set new-k new-v))))
#js{} m)))
#?(:cljs
(defn make-js-props [props class-name]
(if (object? props)
props
(let [clj-props (set-class-name props class-name)
js-props (map->obj clj-props)]
(doto js-props (obj-set cljs-props-key clj-props))))))
#?(:cljs
(defn react-factory [el class-name]
(fn
([]
(jsx* el (clj->js (set-class-name {} class-name))))
([props]
(try
(cond
(or (react/isValidElement props) (string? props))
(jsx* el (set-class-name #js{} class-name) props)
(map? props)
;; Do not use clj->js in order to preserve clojure data types like keywords that would not
;; survive a round-trip clj->js js->clj
(jsx* el (make-js-props props class-name))
(object? props)
(jsx* el (set-class-name props class-name))
(or (array? props) (coll? props))
(jsx* el (set-class-name #js{} class-name) (force-children props))
:else
(jsx* el (set-class-name #js{} class-name)))
(catch js/Object e
(js/console.error "Error invoking an emotion styled component: " e))))
([props & children]
;; if props are a mapping type and not a react child
(if (or (and (object? props) (not (react/isValidElement props))) (map? props))
(let [js-props (make-js-props props class-name)]
(if (seq children)
(apply jsx* el js-props (force-children children))
(jsx* el js-props)))
(apply jsx* el (set-class-name #js{} class-name) (force-children (list* props children))))))))
#?(:clj
(defn get-type
[styled-arg tag-name]
(cond
;; if literals, don't need to determine type at runtime
;; a dom element like :div, same as styled.div``
(string? tag-name) `(obj-get ~styled-arg ~tag-name)
(keyword? tag-name) `(obj-get ~styled-arg ~(name tag-name))
:else
`(cond
(string? ~tag-name)
(obj-get ~styled-arg ~tag-name)
(keyword? ~tag-name)
(obj-get ~styled-arg ~(name tag-name))
;; Another styled component
(::styled (meta ~tag-name))
(.call ~styled-arg ~styled-arg (::styled (meta ~tag-name)))
;; A React component
:else
(.call ~styled-arg ~styled-arg ~tag-name)))))
#?(:clj
(defn get-cls-name
[namespace-name print-style component-sym]
(case print-style
:full (str namespace-name "/" component-sym)
:short (str component-sym)
:nil nil)))
#?(:clj (def default-classname-style :full))
#?(:clj
(defn get-cls-name-from-meta
"Returns string or nil for the classname"
[namespace-name component-sym]
(if (contains? (meta component-sym) :styled/classname)
(let [print-config (:styled/classname (meta component-sym))]
(if (#{:full :short :nil} print-config)
(get-cls-name namespace-name print-config component-sym)
(throw (Exception. (str "Unknown option for class-name style in metadata passed to component: " component-sym)))))
(get-cls-name namespace-name default-classname-style component-sym))))
#?(:clj
(defmacro defstyled
([component-name el & children]
(let [component-type (gensym "component-type")
clss (gensym "clss")
class-name (gensym "className")
full-class-name (gensym "fullClassName")
children* (gensym "children")]
`(let [~class-name ~(get-cls-name-from-meta (-> &env :ns :name) component-name)
~full-class-name ~(str (-> &env :ns :name) "/" component-name)
~children*
(walk/postwalk
;; todo here you can do props validation also
;; should not allow anything that's not a symbol, map, vector, js-obj, js-array, fn
(wrap-call-style-fn false)
~(vec children))
;; pass js structures to the lib
~children* (cljs.core/clj->js ~children*)
~component-type ~(get-type `styled el)
~clss (.apply ~component-type ~component-type ~children*)]
(obj-set ~clss "displayName" ~(str (-> &env :ns :name) "/" component-name))
(def ~component-name
(with-meta (react-factory ~clss ~class-name)
{::styled ~clss
::hashed-name (hashit ~full-class-name)}))
(cljs.core/specify! ~component-name
~'Object
(~'toString [this#]
(cljs.core/str "." (::hashed-name (meta ~component-name))))))))))
#?(:clj
(comment
(macroexpand-1 '(defstyled button4 button3 {:fontSize "20em"}))
(macroexpand '(defstyled button3 :button
{:background "lightblue" :color "green"}))
(macroexpand-1
'(defstyled button3 :button
{:background "lightblue"
button1 {:color "green"}}
(fn [jsprops cp]
{:color (or (:color cp) "white")})))
;; postwalk doesn't continue expanding replaced values, like the fn call here:
(walk/postwalk
(fn [i]
(cond
(fn? i) (i)
(keyword? i) (kebab->camel (name i))
:else i))
[{:background-color "blue"}
(fn [] {:border-radius 5})])))
(comment
(camelize-keys
[{:background "lightblue"
:font-size 20
:border-radius "10px"}
{:background-image "url(xyz.com/my-image)"}])
(camelize-keys
{:background "lightblue"
:font-size 20
"@media(min-width: 200px)" {:font-size 33}
:border-radius "10px"})
(walk/postwalk
(fn [item]
(println "item: " item " map entry: " (map-entry? item) " vec? " (vector? item))
(cond
(keyword? item)
(do
(println "found keyword")
(keyword (kebab->camel (name item))))
:elseeeee item)
)
{:background "lightblue"
:font-size 20
"@media(min-width: 200px)"
[{:font-size 33}
{:background-color "purple"}]
:border-radius "10px"}))
#?(:cljs
(def ^:private global* (react-factory Global nil)))
;; emotion doesn't allow functions in nested position, only
;; objects and arrays of objects
but they do allow one function as a child
;; you can always wrap the call in a fn if you want dynamism like below
;-js/emotion/blob/188dc0e785cfc9b10b3f9a6ead62b56ddd38e039/packages/core/src/global.js#L16
#?(:cljs
(defn global-style
"Takes a cljs vector or hashmap of styles and converts to JS types before calling emotion's Global function.
Inserts styles into the DOM that target global elements and classes."
[props]
(global* {:styles (camelize-keys props)})))
;; can use like so:
(comment
(global-style {:body {:background "#cce" "@media (min-width:700px)" {:background "white"}}})
(global-style
[(clj->js {:body {:font-family "serif"}}) {:body {:border "2px solid yellOW"}} {:body {:background-color "#ecccee"}}])
;; to adapt based on props, wrap in a fn:
(defn my-globals [props]
(global-style
{:body {:background-color "red"}})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Theme support
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
#?(:cljs
(defn theme-provider
"Takes a hashmap of a style theme and react children to render with that theme in the React Context
using emotion's ThemeProvider."
[props & children]
(when-not (contains? props :theme)
(throw (js/Error. "You must pass a :theme to the theme-provider.")))
(apply react/createElement ThemeProvider
(clj->js props)
(force-children children))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; CSS prop support
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
#?(:clj
(defn ^:private css-body [props]
`(do
;(assert (contains? ~props :css) "Props must contain :css key")
(cljs.core/clj->js
(assoc ~props :css
(walk/postwalk
;; todo here you can do props validation also
;; should not allow anything that's not a symbol, map, vector, js-obj, js-array, fn
(wrap-call-style-fn true)
(:css ~props)))))))
#?(:clj
(defmacro jsx
([el props]
(let [el (cond-> el (keyword? el) name)
css-props (css-body props)]
`(jsx* ~el ~css-props)))
([el props & children]
(let [el (cond-> el (keyword? el) name)
css-props (css-body props)]
`(jsx* ~el ~css-props ~@children)))))
#?(:cljs
(defn convert-css
"Takes a hashmap or vector of styles and converts to JS types, will pass any functions cljs data structures."
[css]
(clj->js (walk/postwalk (wrap-call-style-fn true) css))))
#?(:cljs
(defn css [& css]
(styled-core/css (clj->js (walk/postwalk (wrap-call-style-fn true) css)))))
| null | https://raw.githubusercontent.com/dvingo/cljs-emotion/0e2d7339657bd2957f03fbf8f4bb4a97b4fefc81/src/main/dv/cljs_emotion.cljc | clojure | Support plain cljs compiler and shadow.
from fulcro
todo use the caching strategy seen in reagent.impl.template
Another emotion styled component created with this lib.
arg# is js props passed at runtime, we ship it back and forth js -> cljs -> js
js->clj is resulting in an infinite recur when children contains another styled component, so we remove it.
with anonymous styles there can be no props - so the theme is passed as the only argument
invoke the user-supplied function which returns style data - convert whatever they return to js data structures.
maps come up in value position for nested selectors
convert keywords to string only in key position
Do not use clj->js in order to preserve clojure data types like keywords that would not
survive a round-trip clj->js js->clj
if props are a mapping type and not a react child
if literals, don't need to determine type at runtime
a dom element like :div, same as styled.div``
Another styled component
A React component
todo here you can do props validation also
should not allow anything that's not a symbol, map, vector, js-obj, js-array, fn
pass js structures to the lib
postwalk doesn't continue expanding replaced values, like the fn call here:
emotion doesn't allow functions in nested position, only
objects and arrays of objects
you can always wrap the call in a fn if you want dynamism like below
-js/emotion/blob/188dc0e785cfc9b10b3f9a6ead62b56ddd38e039/packages/core/src/global.js#L16
can use like so:
to adapt based on props, wrap in a fn:
Theme support
CSS prop support
(assert (contains? ~props :css) "Props must contain :css key")
todo here you can do props validation also
should not allow anything that's not a symbol, map, vector, js-obj, js-array, fn | (ns dv.cljs-emotion
(:require
#?@(:cljs [["react" :as react]
["@emotion/hash" :as emotion-hash*]
["@emotion/styled" :as styled*]
["@emotion/react" :as styled-core :refer [Global ThemeProvider]]
[goog.object :as g]])
[clojure.string :as str]
[clojure.walk :as walk])
#?(:cljs (:require-macros [dv.cljs-emotion :refer [defstyled]])))
#?(:cljs (def emotion-hash (g/get emotion-hash* "default")))
#?(:cljs (def styled (g/get styled* "default")))
#?(:cljs (def jsx* styled-core/jsx))
Used to prevent generated code from needing to require goog.object
(defn obj-set [o k v]
#?(:cljs (g/set o k v)
:clj nil))
(defn obj-get [o k]
#?(:cljs (g/get o k)
:clj nil))
#?(:cljs
(defn force-children
"Utility function that will force a lazy sequence of children (recursively) into realized
vectors (React cannot deal with lazy seqs in production mode)"
[x]
(if (seq? x)
(to-array (mapv force-children x))
x)))
#?(:cljs
(defn kebab->camel
[prop]
(if (str/starts-with? prop ".")
prop
(if (str/includes? prop "-")
(let [words (->> (re-seq #"[a-zA-Z]+" prop)
(mapv str/capitalize))]
(-> words
(update 0 str/lower-case)
str/join))
prop))))
#?(:cljs
(defn camelize-keys
"Also replaces styled components with their css classname is key position."
[style-map]
(walk/postwalk
(fn in-walk [v]
(cond
(keyword? v)
(-> v name kebab->camel)
(and (meta v) (contains? (meta v) ::hashed-name))
(str "." (-> v meta ::hashed-name))
:else v))
style-map)))
#?(:cljs
(defn keyframes [anim-map]
(styled-core/keyframes (clj->js (camelize-keys anim-map)))))
(def cljs-props-key "dv.cljs-emotion/props")
#?(:cljs
(defn wrap-call-style-fn [anon-styles?]
(fn [x]
(cond
(and (meta x) (contains? (meta x) ::hashed-name))
(str "." (-> x meta ::hashed-name))
(fn? x)
(fn [arg]
(js-delete arg "children")
(if anon-styles?
(clj->js (camelize-keys (x (js->clj arg :keywordize-keys true))))
(let [cljs-args (assoc (obj-get arg cljs-props-key)
:theme (js->clj (obj-get arg "theme") :keywordize-keys true))]
(clj->js (camelize-keys (x cljs-args))))))
(map? x)
(camelize-keys x)
:else x))))
#?(:cljs (goog-define ADD_CLASSNAMES "INITIAL"))
#?(:cljs
(defn add-class-names? []
(if (boolean? ADD_CLASSNAMES)
ADD_CLASSNAMES
goog.DEBUG)))
#?(:cljs
(defn add-class-name [props class-name]
(if (object? props)
(doto props
(goog.object/set "className"
(->> [class-name (goog.object/get props "className")]
(str/join " ")
(str/trim))))
(update props :className #(if (nil? %) class-name (str class-name " " %))))))
#?(:cljs (defn hashit [string] (str "dvcss-" (emotion-hash string))))
#?(:cljs
(defn set-class-name [props class-name]
(if class-name
(let [hashed-name (hashit class-name)
props (add-class-name props hashed-name)]
(if (add-class-names?)
(add-class-name props class-name)
props))
props)))
#?(:cljs
(defn map->obj [m]
(reduce-kv (fn [o k v]
(let [new-k (cond-> k (implements? INamed k) name)
new-v (cond-> v (map? v) map->obj)]
(doto o (obj-set new-k new-v))))
#js{} m)))
#?(:cljs
(defn make-js-props [props class-name]
(if (object? props)
props
(let [clj-props (set-class-name props class-name)
js-props (map->obj clj-props)]
(doto js-props (obj-set cljs-props-key clj-props))))))
#?(:cljs
(defn react-factory [el class-name]
(fn
([]
(jsx* el (clj->js (set-class-name {} class-name))))
([props]
(try
(cond
(or (react/isValidElement props) (string? props))
(jsx* el (set-class-name #js{} class-name) props)
(map? props)
(jsx* el (make-js-props props class-name))
(object? props)
(jsx* el (set-class-name props class-name))
(or (array? props) (coll? props))
(jsx* el (set-class-name #js{} class-name) (force-children props))
:else
(jsx* el (set-class-name #js{} class-name)))
(catch js/Object e
(js/console.error "Error invoking an emotion styled component: " e))))
([props & children]
(if (or (and (object? props) (not (react/isValidElement props))) (map? props))
(let [js-props (make-js-props props class-name)]
(if (seq children)
(apply jsx* el js-props (force-children children))
(jsx* el js-props)))
(apply jsx* el (set-class-name #js{} class-name) (force-children (list* props children))))))))
#?(:clj
(defn get-type
[styled-arg tag-name]
(cond
(string? tag-name) `(obj-get ~styled-arg ~tag-name)
(keyword? tag-name) `(obj-get ~styled-arg ~(name tag-name))
:else
`(cond
(string? ~tag-name)
(obj-get ~styled-arg ~tag-name)
(keyword? ~tag-name)
(obj-get ~styled-arg ~(name tag-name))
(::styled (meta ~tag-name))
(.call ~styled-arg ~styled-arg (::styled (meta ~tag-name)))
:else
(.call ~styled-arg ~styled-arg ~tag-name)))))
#?(:clj
(defn get-cls-name
[namespace-name print-style component-sym]
(case print-style
:full (str namespace-name "/" component-sym)
:short (str component-sym)
:nil nil)))
#?(:clj (def default-classname-style :full))
#?(:clj
(defn get-cls-name-from-meta
"Returns string or nil for the classname"
[namespace-name component-sym]
(if (contains? (meta component-sym) :styled/classname)
(let [print-config (:styled/classname (meta component-sym))]
(if (#{:full :short :nil} print-config)
(get-cls-name namespace-name print-config component-sym)
(throw (Exception. (str "Unknown option for class-name style in metadata passed to component: " component-sym)))))
(get-cls-name namespace-name default-classname-style component-sym))))
#?(:clj
(defmacro defstyled
([component-name el & children]
(let [component-type (gensym "component-type")
clss (gensym "clss")
class-name (gensym "className")
full-class-name (gensym "fullClassName")
children* (gensym "children")]
`(let [~class-name ~(get-cls-name-from-meta (-> &env :ns :name) component-name)
~full-class-name ~(str (-> &env :ns :name) "/" component-name)
~children*
(walk/postwalk
(wrap-call-style-fn false)
~(vec children))
~children* (cljs.core/clj->js ~children*)
~component-type ~(get-type `styled el)
~clss (.apply ~component-type ~component-type ~children*)]
(obj-set ~clss "displayName" ~(str (-> &env :ns :name) "/" component-name))
(def ~component-name
(with-meta (react-factory ~clss ~class-name)
{::styled ~clss
::hashed-name (hashit ~full-class-name)}))
(cljs.core/specify! ~component-name
~'Object
(~'toString [this#]
(cljs.core/str "." (::hashed-name (meta ~component-name))))))))))
#?(:clj
(comment
(macroexpand-1 '(defstyled button4 button3 {:fontSize "20em"}))
(macroexpand '(defstyled button3 :button
{:background "lightblue" :color "green"}))
(macroexpand-1
'(defstyled button3 :button
{:background "lightblue"
button1 {:color "green"}}
(fn [jsprops cp]
{:color (or (:color cp) "white")})))
(walk/postwalk
(fn [i]
(cond
(fn? i) (i)
(keyword? i) (kebab->camel (name i))
:else i))
[{:background-color "blue"}
(fn [] {:border-radius 5})])))
(comment
(camelize-keys
[{:background "lightblue"
:font-size 20
:border-radius "10px"}
{:background-image "url(xyz.com/my-image)"}])
(camelize-keys
{:background "lightblue"
:font-size 20
"@media(min-width: 200px)" {:font-size 33}
:border-radius "10px"})
(walk/postwalk
(fn [item]
(println "item: " item " map entry: " (map-entry? item) " vec? " (vector? item))
(cond
(keyword? item)
(do
(println "found keyword")
(keyword (kebab->camel (name item))))
:elseeeee item)
)
{:background "lightblue"
:font-size 20
"@media(min-width: 200px)"
[{:font-size 33}
{:background-color "purple"}]
:border-radius "10px"}))
#?(:cljs
(def ^:private global* (react-factory Global nil)))
but they do allow one function as a child
#?(:cljs
(defn global-style
"Takes a cljs vector or hashmap of styles and converts to JS types before calling emotion's Global function.
Inserts styles into the DOM that target global elements and classes."
[props]
(global* {:styles (camelize-keys props)})))
(comment
(global-style {:body {:background "#cce" "@media (min-width:700px)" {:background "white"}}})
(global-style
[(clj->js {:body {:font-family "serif"}}) {:body {:border "2px solid yellOW"}} {:body {:background-color "#ecccee"}}])
(defn my-globals [props]
(global-style
{:body {:background-color "red"}})))
#?(:cljs
(defn theme-provider
"Takes a hashmap of a style theme and react children to render with that theme in the React Context
using emotion's ThemeProvider."
[props & children]
(when-not (contains? props :theme)
(throw (js/Error. "You must pass a :theme to the theme-provider.")))
(apply react/createElement ThemeProvider
(clj->js props)
(force-children children))))
#?(:clj
(defn ^:private css-body [props]
`(do
(cljs.core/clj->js
(assoc ~props :css
(walk/postwalk
(wrap-call-style-fn true)
(:css ~props)))))))
#?(:clj
(defmacro jsx
([el props]
(let [el (cond-> el (keyword? el) name)
css-props (css-body props)]
`(jsx* ~el ~css-props)))
([el props & children]
(let [el (cond-> el (keyword? el) name)
css-props (css-body props)]
`(jsx* ~el ~css-props ~@children)))))
#?(:cljs
(defn convert-css
"Takes a hashmap or vector of styles and converts to JS types, will pass any functions cljs data structures."
[css]
(clj->js (walk/postwalk (wrap-call-style-fn true) css))))
#?(:cljs
(defn css [& css]
(styled-core/css (clj->js (walk/postwalk (wrap-call-style-fn true) css)))))
|
5709e280220086869272254c3c58fc35a327569c5fe337c6fd4f4931c55c5f06 | kaznum/programming_in_ocaml_exercise | sample.ml | List.fold_left;;
ListLabels.fold_left;;
ListLabels.fold_left ~f:(fun x y -> x + y) ~init:0 [1;2;3;4];;
ListLabels.fold_left ~init:0 ~f:(fun x y -> x + y) [1;2;3;4];;
type ('a, 'b) foldarg = {f: 'a -> 'b -> 'a; init: 'a};;
let rec fold_left' {f=f; init=init} = function
[] -> init
| a::rest -> fold_left' { f = f; init = f init a } rest;;
fold_left' { init = 0; f = (fun x y -> x + y)} [1;2;3;4];;
let g = ListLabels.fold_left ~init:0;;
g ~f:(fun x y -> x + y) [1;2;3;4];;
ListLabels.fold_left ~f:(fun x y -> x + y);;
(* definition *)
let rec fold_left ~f:func ~init:e = function
[] -> e
| a::rest -> fold_left ~f:func ~init:(func e a) rest;;
fold_left ~f:(fun x y -> x + y) ~init: 0 [1;2;3;4];;
let rec fold_left ~f ~init = function
[] -> init
| a::rest -> fold_left ~f:f ~init:(f init a) rest;;
fold_left ~f:(fun x y -> x + y) ~init: 0 [1;2;3;4];;
(* duplicated labels *)
let test ~a:x ~a:y = x - y;;
(* error *)
let empty_label ~:x = x;;
let foo ~(x:int) = x;;
ListLabels.map;;
ListLabels.map (fun x -> x + 1) [1;2;3;4];;
let k ~const ~ignored = const;;
let k' = k 1 2;;
k' ~const:(fun x y -> x) ~ignored:"hoge";;
let apply f arg1 arg2 = f ~arg1 ~arg2;;
apply (fun ~arg1 ~arg2 -> arg1 * arg2 + 1) 4 7;;
apply (fun ~arg1 ~arg2 -> arg2 * arg1 + 1) 4 7;;
(* error *)
apply (fun ~arg2 ~arg1 -> arg1 * arg2 + 1) 4 7;;
let apply_to_one f = f 1;;
apply_to_one (fun ~x -> x + x);;
(* optional arguments *)
let rec seq from ?step:(s=1) n =
if n <= 0 then [] else from :: seq(from + s) ~step:s (n - 1);;
seq 2 5;;
seq 2 ~step:2 5;;
let rec seq from ?(step=1) n =
if n <= 0 then [] else from :: seq(from + step) ~step:step (n - 1);;
seq 2 ~step:2 5;;
seq 3;;
(* error *)
seq 5 2 3;;
let f ?(x=1) ~y = x + y;;
f ~y:3;;
seq 1 10 ~step:4;;
(* error *)
let f = seq 1 10 in f ~step:4;;
(seq 1 10) ~step:4;;
let rec seq from ?step n =
match step with
None -> if n <= 0 then [] else from :: seq (from + 1) (n-1)
| Some s ->
if n <= 0 then [] else from :: seq ( from + s ) ~step:s (n-1);;
seq 1 10 ~step:4;;
seq 1 10;;
let rec seq from ?step n =
let s = match step with None -> 1 | Some s -> s in
if n <= 0 then [] else from :: seq (from + s) ~step:s (n - 1);;
let rec seq from ?step n =
let s = match step with None -> 1 | Some s -> s in
if n <= 0 then [] else from :: seq (from + s) ?step (n - 1);;
(* error *)
let test f = f 10 ~step:2 4;;
test seq;;
(* succeed *)
let test (f : int -> ?step:int -> int -> 'a list) = f 10 ~step:2 4;;
test seq;;
let test' f = f 10 4;;
let g ?(x=4) y z = x + y + z;;
test' g;;
(* error *)
test' seq;;
test' (fun y z -> g ?x:None y z);;
test' (fun y z -> seq 10 ?step:None 4);;
| null | https://raw.githubusercontent.com/kaznum/programming_in_ocaml_exercise/6f6a5d62a7a87a1c93561db88f08ae4e445b7d4e/ch13/sample.ml | ocaml | definition
duplicated labels
error
error
optional arguments
error
error
error
succeed
error | List.fold_left;;
ListLabels.fold_left;;
ListLabels.fold_left ~f:(fun x y -> x + y) ~init:0 [1;2;3;4];;
ListLabels.fold_left ~init:0 ~f:(fun x y -> x + y) [1;2;3;4];;
type ('a, 'b) foldarg = {f: 'a -> 'b -> 'a; init: 'a};;
let rec fold_left' {f=f; init=init} = function
[] -> init
| a::rest -> fold_left' { f = f; init = f init a } rest;;
fold_left' { init = 0; f = (fun x y -> x + y)} [1;2;3;4];;
let g = ListLabels.fold_left ~init:0;;
g ~f:(fun x y -> x + y) [1;2;3;4];;
ListLabels.fold_left ~f:(fun x y -> x + y);;
let rec fold_left ~f:func ~init:e = function
[] -> e
| a::rest -> fold_left ~f:func ~init:(func e a) rest;;
fold_left ~f:(fun x y -> x + y) ~init: 0 [1;2;3;4];;
let rec fold_left ~f ~init = function
[] -> init
| a::rest -> fold_left ~f:f ~init:(f init a) rest;;
fold_left ~f:(fun x y -> x + y) ~init: 0 [1;2;3;4];;
let test ~a:x ~a:y = x - y;;
let empty_label ~:x = x;;
let foo ~(x:int) = x;;
ListLabels.map;;
ListLabels.map (fun x -> x + 1) [1;2;3;4];;
let k ~const ~ignored = const;;
let k' = k 1 2;;
k' ~const:(fun x y -> x) ~ignored:"hoge";;
let apply f arg1 arg2 = f ~arg1 ~arg2;;
apply (fun ~arg1 ~arg2 -> arg1 * arg2 + 1) 4 7;;
apply (fun ~arg1 ~arg2 -> arg2 * arg1 + 1) 4 7;;
apply (fun ~arg2 ~arg1 -> arg1 * arg2 + 1) 4 7;;
let apply_to_one f = f 1;;
apply_to_one (fun ~x -> x + x);;
let rec seq from ?step:(s=1) n =
if n <= 0 then [] else from :: seq(from + s) ~step:s (n - 1);;
seq 2 5;;
seq 2 ~step:2 5;;
let rec seq from ?(step=1) n =
if n <= 0 then [] else from :: seq(from + step) ~step:step (n - 1);;
seq 2 ~step:2 5;;
seq 3;;
seq 5 2 3;;
let f ?(x=1) ~y = x + y;;
f ~y:3;;
seq 1 10 ~step:4;;
let f = seq 1 10 in f ~step:4;;
(seq 1 10) ~step:4;;
let rec seq from ?step n =
match step with
None -> if n <= 0 then [] else from :: seq (from + 1) (n-1)
| Some s ->
if n <= 0 then [] else from :: seq ( from + s ) ~step:s (n-1);;
seq 1 10 ~step:4;;
seq 1 10;;
let rec seq from ?step n =
let s = match step with None -> 1 | Some s -> s in
if n <= 0 then [] else from :: seq (from + s) ~step:s (n - 1);;
let rec seq from ?step n =
let s = match step with None -> 1 | Some s -> s in
if n <= 0 then [] else from :: seq (from + s) ?step (n - 1);;
let test f = f 10 ~step:2 4;;
test seq;;
let test (f : int -> ?step:int -> int -> 'a list) = f 10 ~step:2 4;;
test seq;;
let test' f = f 10 4;;
let g ?(x=4) y z = x + y + z;;
test' g;;
test' seq;;
test' (fun y z -> g ?x:None y z);;
test' (fun y z -> seq 10 ?step:None 4);;
|
80b8fc5e8428e422b8dd3af0ba93fd35718a04f3f929dbfe9c3755393d2bc01a | stritzinger/opcua | opcua_nodeset.erl | -module(opcua_nodeset).
Original NodeSet from -Nodeset/tree/v1.04/Schema
%%% INCLUDES %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-include_lib("kernel/include/logger.hrl").
-include_lib("xmerl/include/xmerl.hrl").
-include("opcua.hrl").
-include("opcua_internal.hrl").
%%% EXPORTS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Startup Functions
-export([start_link/1]).
%% API Functions
-export([attributes/0]).
-export([attribute_name/1]).
-export([attribute_type/1]).
-export([attribute_id/1]).
-export([status/1]).
-export([status_code/1]).
-export([status_name/1, status_name/2]).
-export([is_status/1]).
-export([data_type/1]).
-export([type_descriptor/2]).
-export([schema/1]).
-export([namespace_uri/1]).
-export([namespace_id/1]).
-export([namespaces/0]).
-export([is_subtype/2]).
-export([node/1]).
-export([references/1, references/2]).
%% Behaviour gen_server callback functions
-export([init/1]).
-export([handle_call/3]).
-export([handle_cast/2]).
-export([handle_info/2]).
-export([terminate/2]).
-export([code_change/3]).
TYPES % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
-record(state, {
}).
%%% STARTUP FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
start_link(BaseDir) ->
gen_server:start_link(?MODULE, [BaseDir], []).
%%% API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
attributes() ->
maps:values(persistent_term:get({?MODULE, attributes})).
attribute_name(Attr) ->
{_, Name} = persistent_term:get({?MODULE, attribute, Attr}),
Name.
%TODO: Why would we load the names from the specs but we would hardcode the types ???
attribute_type(node_id) -> node_id;
attribute_type(node_class) -> ?NNID(257);
attribute_type(browse_name) -> qualified_name;
attribute_type(display_name) -> localized_text;
attribute_type(description) -> localized_text;
attribute_type(write_mask) -> ?NNID(347);
attribute_type(user_write_mask) -> ?NNID(347);
attribute_type(is_abstract) -> boolean;
attribute_type(symmetric) -> boolean;
attribute_type(inverse_name) -> boolean;
attribute_type(contains_no_loops) -> boolean;
attribute_type(event_notifier) -> byte_string;
attribute_type(value) -> variant;
attribute_type(data_type) -> node_id;
attribute_type(value_rank) -> int32;
attribute_type(array_dimensions) -> uint32;
attribute_type(access_level) -> ?NNID(15031);
attribute_type(user_access_level) -> ?NNID(15031);
attribute_type(minimum_sampling_interval) -> double;
attribute_type(historizing) -> boolean;
attribute_type(executable) -> boolean;
attribute_type(user_executable) -> boolean;
attribute_type(data_type_definition) -> ?NNID(97);
attribute_type(role_permissions) -> ?NNID(96); % It is a list
attribute_type(user_role_permissions) -> ?NNID(96); % It is a list
attribute_type(access_restrictions) -> ?NNID(95); % It is a list
attribute_type(access_level_ex) -> ?NNID(95); % It is a list
attribute_type(_Attr) -> error(bad_attribute_id_invalid).
attribute_id(Attr) ->
{Id, _} = persistent_term:get({?MODULE, attribute, Attr}),
Id.
status(Status) ->
persistent_term:get({?MODULE, status, Status}).
status_name(Status) ->
{_, Name, _} = persistent_term:get({?MODULE, status, Status}),
Name.
status_name(Status, Default) ->
{_, Name, _} = persistent_term:get({?MODULE, status, Status},
{undefined, Default, undefined}),
Name.
status_code(Status) ->
{Code, _, _} = persistent_term:get({?MODULE, status, Status}),
Code.
is_status(Status) ->
try persistent_term:get({?MODULE, Status}) of
{_, _, _} -> true
catch error:badarg -> false
end.
data_type(TypeDescriptorNodeSpec) ->
opcua_space_backend:data_type(?MODULE, TypeDescriptorNodeSpec).
type_descriptor(DataTypeNodeSpec, Encoding) ->
opcua_space_backend:type_descriptor(?MODULE, DataTypeNodeSpec, Encoding).
schema(NodeSpec) ->
opcua_space_backend:schema(?MODULE, NodeSpec).
namespace_uri(Id) ->
opcua_space_backend:namespace_uri(?MODULE, Id).
namespace_id(Uri) ->
opcua_space_backend:namespace_id(?MODULE, Uri).
namespaces() ->
opcua_space_backend:namespaces(?MODULE).
is_subtype(SubTypeSpec, SuperTypeSpec) ->
opcua_space_backend:is_subtype(?MODULE, SubTypeSpec, SuperTypeSpec).
node(NodeSpec) ->
opcua_space_backend:node(?MODULE, NodeSpec).
references(OriginNodeSpec) ->
opcua_space_backend:references(?MODULE, OriginNodeSpec, #{}).
references(OriginNodeSpec, Opts) ->
opcua_space_backend:references(?MODULE, OriginNodeSpec, Opts).
%%% BEHAVIOUR gen_server CALLBACK FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
init(undefined) ->
?LOG_DEBUG("OPCUA nodeset process starting empty", []),
opcua_space_backend:init(?MODULE),
{ok, #state{}};
init(BaseDir) ->
?LOG_DEBUG("OPCUA nodeset process starting, loading nodeset from ~s", [BaseDir]),
opcua_space_backend:init(?MODULE),
?LOG_INFO("Loading OPCUA attributes mapping..."),
load_attributes(BaseDir),
?LOG_INFO("Loading OPCUA status code mapping..."),
load_status(BaseDir),
?LOG_INFO("Loading OPCUA address space..."),
%FIXME: use real space persistence instead of this temporary backward compatible hack
load_namespaces(BaseDir),
load_nodes(BaseDir),
load_references(BaseDir),
load_datatypes(BaseDir),
load_encodings(BaseDir),
{ok, #state{}}.
handle_call(Req, From, State) ->
?LOG_WARNING("Unexpected gen_server call from ~p: ~p", [From, Req]),
{reply, {error, unexpected_call}, State}.
handle_cast(Req, State) ->
?LOG_WARNING("Unexpected gen_server cast: ~p", [Req]),
{noreply, State}.
handle_info(Msg, State) ->
?LOG_WARNING("Unexpected gen_server message: ~p", [Msg]),
{noreply, State}.
terminate(Reason, _State) ->
?LOG_DEBUG("OPCUA nodeset process terminating: ~p", [Reason]),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%% INTERNAL FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
load_attributes(BaseDir) ->
load_all_terms(BaseDir, "attributes", fun store_attribute/1).
load_status(BaseDir) ->
load_all_terms(BaseDir, "status", fun store_status/1).
load_namespaces(BaseDir) ->
load_all_terms(BaseDir, "namespaces", fun store_namespace/1).
load_nodes(BaseDir) ->
load_all_terms(BaseDir, "nodes", fun store_node/1).
load_references(BaseDir) ->
load_all_terms(BaseDir, "references", fun store_reference/1).
load_datatypes(BaseDir) ->
load_all_terms(BaseDir, "datatypes", fun store_datatype/1).
load_encodings(BaseDir) ->
load_all_terms(BaseDir, "encodings", fun store_encoding/1).
load_all_terms(BaseDir, Tag, Fun) ->
Pattern = filename:join(BaseDir, "**/*." ++ Tag ++ ".bterm"),
NoAccCB = fun(V, C) ->
Fun(V),
case C rem 500 of
0 ->
?LOG_DEBUG("Loaded ~w ~s; memory: ~.3f MB",
[C, Tag, erlang:memory(total)/(1024*1024)]);
_ -> ok
end,
C + 1
end,
NoAccFun = fun(F, C) -> opcua_util_bterm:fold(F, NoAccCB, C) end,
Count = lists:foldl(NoAccFun, 0, filelib:wildcard(Pattern)),
?LOG_DEBUG("Loaded ~w ~s terms", [Count, Tag]),
ok.
store_attribute({Id, Name} = Spec) when is_integer(Id), is_atom(Name) ->
Attributes = persistent_term:get({?MODULE, attributes}, #{}),
Attributes2 = Attributes#{Id => Name},
persistent_term:put({?MODULE, attributes}, Attributes2),
persistent_term:put({?MODULE, attribute, Id}, Spec),
persistent_term:put({?MODULE, attribute, Name}, Spec),
ok.
store_status({Code, Name, Desc} = Spec)
when is_integer(Code), is_atom(Name), is_binary(Desc) ->
persistent_term:put({?MODULE, status, Code}, Spec),
persistent_term:put({?MODULE, status, Name}, Spec),
ok.
store_datatype({Keys, DataType}) ->
%FIXME: Temporary backward compatible hack
KeyValuePairs = [{datatype, {Key, DataType}} || Key <- Keys],
lists:foreach(fun(Item) ->
opcua_space_backend:store(?MODULE, Item)
end, KeyValuePairs),
ok.
store_namespace({_Id, _Uri} = Spec) ->
%FIXME: Temporary backward compatible hack
opcua_space_backend:store(?MODULE, {namespace, Spec}),
ok.
store_encoding({DescId, {TypeId, Encoding}}) ->
%FIXME: Temporary backward compatible hack
opcua_space_backend:store(?MODULE, {encoding, {DescId, {TypeId, Encoding}}}),
ok.
store_node(#opcua_node{} = Node) ->
%FIXME: Temporary backward compatible hack
opcua_space_backend:add_nodes(?MODULE, [Node]),
ok.
store_reference(#opcua_reference{} = Reference) ->
%FIXME: Temporary backward compatible hack
opcua_space_backend:add_references(?MODULE, [Reference]),
ok.
| null | https://raw.githubusercontent.com/stritzinger/opcua/a9802f829f80e6961871653f4d3c932f9496ba99/src/opcua_nodeset.erl | erlang | INCLUDES %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
EXPORTS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Startup Functions
API Functions
Behaviour gen_server callback functions
% % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
STARTUP FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
TODO: Why would we load the names from the specs but we would hardcode the types ???
It is a list
It is a list
It is a list
It is a list
BEHAVIOUR gen_server CALLBACK FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
FIXME: use real space persistence instead of this temporary backward compatible hack
INTERNAL FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
FIXME: Temporary backward compatible hack
FIXME: Temporary backward compatible hack
FIXME: Temporary backward compatible hack
FIXME: Temporary backward compatible hack
FIXME: Temporary backward compatible hack | -module(opcua_nodeset).
Original NodeSet from -Nodeset/tree/v1.04/Schema
-include_lib("kernel/include/logger.hrl").
-include_lib("xmerl/include/xmerl.hrl").
-include("opcua.hrl").
-include("opcua_internal.hrl").
-export([start_link/1]).
-export([attributes/0]).
-export([attribute_name/1]).
-export([attribute_type/1]).
-export([attribute_id/1]).
-export([status/1]).
-export([status_code/1]).
-export([status_name/1, status_name/2]).
-export([is_status/1]).
-export([data_type/1]).
-export([type_descriptor/2]).
-export([schema/1]).
-export([namespace_uri/1]).
-export([namespace_id/1]).
-export([namespaces/0]).
-export([is_subtype/2]).
-export([node/1]).
-export([references/1, references/2]).
-export([init/1]).
-export([handle_call/3]).
-export([handle_cast/2]).
-export([handle_info/2]).
-export([terminate/2]).
-export([code_change/3]).
-record(state, {
}).
start_link(BaseDir) ->
gen_server:start_link(?MODULE, [BaseDir], []).
attributes() ->
maps:values(persistent_term:get({?MODULE, attributes})).
attribute_name(Attr) ->
{_, Name} = persistent_term:get({?MODULE, attribute, Attr}),
Name.
attribute_type(node_id) -> node_id;
attribute_type(node_class) -> ?NNID(257);
attribute_type(browse_name) -> qualified_name;
attribute_type(display_name) -> localized_text;
attribute_type(description) -> localized_text;
attribute_type(write_mask) -> ?NNID(347);
attribute_type(user_write_mask) -> ?NNID(347);
attribute_type(is_abstract) -> boolean;
attribute_type(symmetric) -> boolean;
attribute_type(inverse_name) -> boolean;
attribute_type(contains_no_loops) -> boolean;
attribute_type(event_notifier) -> byte_string;
attribute_type(value) -> variant;
attribute_type(data_type) -> node_id;
attribute_type(value_rank) -> int32;
attribute_type(array_dimensions) -> uint32;
attribute_type(access_level) -> ?NNID(15031);
attribute_type(user_access_level) -> ?NNID(15031);
attribute_type(minimum_sampling_interval) -> double;
attribute_type(historizing) -> boolean;
attribute_type(executable) -> boolean;
attribute_type(user_executable) -> boolean;
attribute_type(data_type_definition) -> ?NNID(97);
attribute_type(_Attr) -> error(bad_attribute_id_invalid).
attribute_id(Attr) ->
{Id, _} = persistent_term:get({?MODULE, attribute, Attr}),
Id.
status(Status) ->
persistent_term:get({?MODULE, status, Status}).
status_name(Status) ->
{_, Name, _} = persistent_term:get({?MODULE, status, Status}),
Name.
status_name(Status, Default) ->
{_, Name, _} = persistent_term:get({?MODULE, status, Status},
{undefined, Default, undefined}),
Name.
status_code(Status) ->
{Code, _, _} = persistent_term:get({?MODULE, status, Status}),
Code.
is_status(Status) ->
try persistent_term:get({?MODULE, Status}) of
{_, _, _} -> true
catch error:badarg -> false
end.
data_type(TypeDescriptorNodeSpec) ->
opcua_space_backend:data_type(?MODULE, TypeDescriptorNodeSpec).
type_descriptor(DataTypeNodeSpec, Encoding) ->
opcua_space_backend:type_descriptor(?MODULE, DataTypeNodeSpec, Encoding).
schema(NodeSpec) ->
opcua_space_backend:schema(?MODULE, NodeSpec).
namespace_uri(Id) ->
opcua_space_backend:namespace_uri(?MODULE, Id).
namespace_id(Uri) ->
opcua_space_backend:namespace_id(?MODULE, Uri).
namespaces() ->
opcua_space_backend:namespaces(?MODULE).
is_subtype(SubTypeSpec, SuperTypeSpec) ->
opcua_space_backend:is_subtype(?MODULE, SubTypeSpec, SuperTypeSpec).
node(NodeSpec) ->
opcua_space_backend:node(?MODULE, NodeSpec).
references(OriginNodeSpec) ->
opcua_space_backend:references(?MODULE, OriginNodeSpec, #{}).
references(OriginNodeSpec, Opts) ->
opcua_space_backend:references(?MODULE, OriginNodeSpec, Opts).
init(undefined) ->
?LOG_DEBUG("OPCUA nodeset process starting empty", []),
opcua_space_backend:init(?MODULE),
{ok, #state{}};
init(BaseDir) ->
?LOG_DEBUG("OPCUA nodeset process starting, loading nodeset from ~s", [BaseDir]),
opcua_space_backend:init(?MODULE),
?LOG_INFO("Loading OPCUA attributes mapping..."),
load_attributes(BaseDir),
?LOG_INFO("Loading OPCUA status code mapping..."),
load_status(BaseDir),
?LOG_INFO("Loading OPCUA address space..."),
load_namespaces(BaseDir),
load_nodes(BaseDir),
load_references(BaseDir),
load_datatypes(BaseDir),
load_encodings(BaseDir),
{ok, #state{}}.
handle_call(Req, From, State) ->
?LOG_WARNING("Unexpected gen_server call from ~p: ~p", [From, Req]),
{reply, {error, unexpected_call}, State}.
handle_cast(Req, State) ->
?LOG_WARNING("Unexpected gen_server cast: ~p", [Req]),
{noreply, State}.
handle_info(Msg, State) ->
?LOG_WARNING("Unexpected gen_server message: ~p", [Msg]),
{noreply, State}.
terminate(Reason, _State) ->
?LOG_DEBUG("OPCUA nodeset process terminating: ~p", [Reason]),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
load_attributes(BaseDir) ->
load_all_terms(BaseDir, "attributes", fun store_attribute/1).
load_status(BaseDir) ->
load_all_terms(BaseDir, "status", fun store_status/1).
load_namespaces(BaseDir) ->
load_all_terms(BaseDir, "namespaces", fun store_namespace/1).
load_nodes(BaseDir) ->
load_all_terms(BaseDir, "nodes", fun store_node/1).
load_references(BaseDir) ->
load_all_terms(BaseDir, "references", fun store_reference/1).
load_datatypes(BaseDir) ->
load_all_terms(BaseDir, "datatypes", fun store_datatype/1).
load_encodings(BaseDir) ->
load_all_terms(BaseDir, "encodings", fun store_encoding/1).
load_all_terms(BaseDir, Tag, Fun) ->
Pattern = filename:join(BaseDir, "**/*." ++ Tag ++ ".bterm"),
NoAccCB = fun(V, C) ->
Fun(V),
case C rem 500 of
0 ->
?LOG_DEBUG("Loaded ~w ~s; memory: ~.3f MB",
[C, Tag, erlang:memory(total)/(1024*1024)]);
_ -> ok
end,
C + 1
end,
NoAccFun = fun(F, C) -> opcua_util_bterm:fold(F, NoAccCB, C) end,
Count = lists:foldl(NoAccFun, 0, filelib:wildcard(Pattern)),
?LOG_DEBUG("Loaded ~w ~s terms", [Count, Tag]),
ok.
store_attribute({Id, Name} = Spec) when is_integer(Id), is_atom(Name) ->
Attributes = persistent_term:get({?MODULE, attributes}, #{}),
Attributes2 = Attributes#{Id => Name},
persistent_term:put({?MODULE, attributes}, Attributes2),
persistent_term:put({?MODULE, attribute, Id}, Spec),
persistent_term:put({?MODULE, attribute, Name}, Spec),
ok.
store_status({Code, Name, Desc} = Spec)
when is_integer(Code), is_atom(Name), is_binary(Desc) ->
persistent_term:put({?MODULE, status, Code}, Spec),
persistent_term:put({?MODULE, status, Name}, Spec),
ok.
store_datatype({Keys, DataType}) ->
KeyValuePairs = [{datatype, {Key, DataType}} || Key <- Keys],
lists:foreach(fun(Item) ->
opcua_space_backend:store(?MODULE, Item)
end, KeyValuePairs),
ok.
store_namespace({_Id, _Uri} = Spec) ->
opcua_space_backend:store(?MODULE, {namespace, Spec}),
ok.
store_encoding({DescId, {TypeId, Encoding}}) ->
opcua_space_backend:store(?MODULE, {encoding, {DescId, {TypeId, Encoding}}}),
ok.
store_node(#opcua_node{} = Node) ->
opcua_space_backend:add_nodes(?MODULE, [Node]),
ok.
store_reference(#opcua_reference{} = Reference) ->
opcua_space_backend:add_references(?MODULE, [Reference]),
ok.
|
5d81ec8bad41556dbf60f084d73ad38ae0882f78d1eb90f51a5cf96cd46decab | fraidev/tornado | tcp_server.ml | let listen ~net ~sw ~port =
let addr = `Tcp (Eio.Net.Ipaddr.V4.loopback, port) in
let socket = Eio.Net.listen net ~sw ~reuse_addr:true ~backlog:5 addr in
let flow, addr = Eio.Net.accept ~sw socket in
flow, addr
;;
| null | https://raw.githubusercontent.com/fraidev/tornado/d896911dcfddec49aa3acf53e43c03c7744a2721/test/e2e/tcp_server.ml | ocaml | let listen ~net ~sw ~port =
let addr = `Tcp (Eio.Net.Ipaddr.V4.loopback, port) in
let socket = Eio.Net.listen net ~sw ~reuse_addr:true ~backlog:5 addr in
let flow, addr = Eio.Net.accept ~sw socket in
flow, addr
;;
|
|
14945c6b291580662e224fdc9c5c7cd89baf2d67e3b312891541263783e35047 | ayamada/cac2020 | tween.cljs | (ns cac2020.tween
(:require-macros [cac2020.macro :as m])
(:require [clojure.string :as string]
[cac2020.property :as p :include-macros true]
))
;;; TODO: tweenを一時停止する機能を提供
(defn prepare-parameter [start end]
(assert (number? start))
(assert (number? end))
(let [diff (- end start)]
(array ::pp start diff)))
(defn apply-progress [prepared-parameter progress]
(assert (= ::pp (aget prepared-parameter 0)))
(let [start (aget prepared-parameter 1)
diff (aget prepared-parameter 2)]
(+ start (* progress diff))))
(def pp prepare-parameter)
(def ap apply-progress)
;;; tweenは純粋に毎フレームでtarget-objを動かすだけ
(defonce ^js all-tween-entries (array))
;;; NB: ホットリロード等でツリー全体がdestroyされた場合はこれを呼ぶ事
(defn clear! [] (set! (.-length all-tween-entries) 0))
(defn tick! [delta-frames]
(m/dotimes-backward [i (alength all-tween-entries)]
(let [^cljs entry (aget all-tween-entries i)
target-obj (.-target-obj entry)
now-frames (+ (.-now-frames entry) delta-frames)
ttl-frames (.-ttl-frames entry)
tick-fn (.-tick-fn entry)
progress (max 0 (min (/ now-frames ttl-frames) 1))
done? (= 1 progress)]
(when done?
(.splice all-tween-entries i 1))
(set! (.-now-frames entry) now-frames)
(tick-fn target-obj progress)
(when done?
(when-let [done-fn (.-done-fn entry)]
(done-fn target-obj)))))
nil)
(defn register! [target-obj ttl-frames tick-fn & [done-fn]]
(assert (instance? js/Object target-obj))
;; TODO: これをどうするかは悩むところ。今回はなしで。しかし「何度もregister!するが、有効なのは一個だけにしたい」要件は普通にあるので、あとでルールを考える事
( m / dotimes - backward [ i ( all - tween - entries ) ]
( let [ ^cljs entry ( aget all - tween - entries i )
; o (.-target-obj entry)]
; (when (= target-obj o)
; (.splice all-tween-entries i 1))))
(let [^cljs entry (js-obj)]
(set! (.-target-obj entry) target-obj)
(set! (.-now-frames entry) 0)
(set! (.-ttl-frames entry) ttl-frames)
(set! (.-tick-fn entry) tick-fn)
(set! (.-done-fn entry) done-fn)
(.push all-tween-entries entry)
(tick-fn target-obj 0)
nil))
(defn wait! [target-obj ttl-frames cont]
(register! target-obj
ttl-frames
(fn [o progress] nil)
cont))
(defn change-alpha! [^js target-obj ttl-frames new-alpha & [cont]]
(let [pp-alpha (pp (.-alpha target-obj) new-alpha)]
(register! target-obj
ttl-frames
(fn [o progress]
(set! (.-alpha o) (ap pp-alpha progress)))
cont)))
(defn vibrate!
[^js target-obj ttl-frames power-start & [power-end cont only-x? only-y?]]
(let [pp-power (pp power-start (or power-end 0))
total (alength all-tween-entries)]
(loop [i (dec total)]
(if-not (neg? i)
(let [^cljs entry (aget all-tween-entries i)]
(when-not (= target-obj (.-target-obj entry))
(recur (dec i))))
(let [orig-x (.-x target-obj)
orig-y (.-y target-obj)
h (fn [^js o progress]
(let [p (ap pp-power progress)
r-p (inc (* 2 p))]
(when-not only-y?
(set! (.-x o) (+ orig-x (- (rand r-p) p))))
(when-not only-x?
(set! (.-y o) (+ orig-y (- (rand r-p) p))))
nil))
done-h (fn [^js o]
(set! (.-x o) orig-x)
(set! (.-y o) orig-y)
(when cont
(cont o)))]
(register! target-obj ttl-frames h done-h))))))
| null | https://raw.githubusercontent.com/ayamada/cac2020/99d249ce9b9b0e2cf856ddeaea22796ce60d4fa7/src/main/cac2020/tween.cljs | clojure | TODO: tweenを一時停止する機能を提供
tweenは純粋に毎フレームでtarget-objを動かすだけ
NB: ホットリロード等でツリー全体がdestroyされた場合はこれを呼ぶ事
TODO: これをどうするかは悩むところ。今回はなしで。しかし「何度もregister!するが、有効なのは一個だけにしたい」要件は普通にあるので、あとでルールを考える事
o (.-target-obj entry)]
(when (= target-obj o)
(.splice all-tween-entries i 1)))) | (ns cac2020.tween
(:require-macros [cac2020.macro :as m])
(:require [clojure.string :as string]
[cac2020.property :as p :include-macros true]
))
(defn prepare-parameter [start end]
(assert (number? start))
(assert (number? end))
(let [diff (- end start)]
(array ::pp start diff)))
(defn apply-progress [prepared-parameter progress]
(assert (= ::pp (aget prepared-parameter 0)))
(let [start (aget prepared-parameter 1)
diff (aget prepared-parameter 2)]
(+ start (* progress diff))))
(def pp prepare-parameter)
(def ap apply-progress)
(defonce ^js all-tween-entries (array))
(defn clear! [] (set! (.-length all-tween-entries) 0))
(defn tick! [delta-frames]
(m/dotimes-backward [i (alength all-tween-entries)]
(let [^cljs entry (aget all-tween-entries i)
target-obj (.-target-obj entry)
now-frames (+ (.-now-frames entry) delta-frames)
ttl-frames (.-ttl-frames entry)
tick-fn (.-tick-fn entry)
progress (max 0 (min (/ now-frames ttl-frames) 1))
done? (= 1 progress)]
(when done?
(.splice all-tween-entries i 1))
(set! (.-now-frames entry) now-frames)
(tick-fn target-obj progress)
(when done?
(when-let [done-fn (.-done-fn entry)]
(done-fn target-obj)))))
nil)
(defn register! [target-obj ttl-frames tick-fn & [done-fn]]
(assert (instance? js/Object target-obj))
( m / dotimes - backward [ i ( all - tween - entries ) ]
( let [ ^cljs entry ( aget all - tween - entries i )
(let [^cljs entry (js-obj)]
(set! (.-target-obj entry) target-obj)
(set! (.-now-frames entry) 0)
(set! (.-ttl-frames entry) ttl-frames)
(set! (.-tick-fn entry) tick-fn)
(set! (.-done-fn entry) done-fn)
(.push all-tween-entries entry)
(tick-fn target-obj 0)
nil))
(defn wait! [target-obj ttl-frames cont]
(register! target-obj
ttl-frames
(fn [o progress] nil)
cont))
(defn change-alpha! [^js target-obj ttl-frames new-alpha & [cont]]
(let [pp-alpha (pp (.-alpha target-obj) new-alpha)]
(register! target-obj
ttl-frames
(fn [o progress]
(set! (.-alpha o) (ap pp-alpha progress)))
cont)))
(defn vibrate!
[^js target-obj ttl-frames power-start & [power-end cont only-x? only-y?]]
(let [pp-power (pp power-start (or power-end 0))
total (alength all-tween-entries)]
(loop [i (dec total)]
(if-not (neg? i)
(let [^cljs entry (aget all-tween-entries i)]
(when-not (= target-obj (.-target-obj entry))
(recur (dec i))))
(let [orig-x (.-x target-obj)
orig-y (.-y target-obj)
h (fn [^js o progress]
(let [p (ap pp-power progress)
r-p (inc (* 2 p))]
(when-not only-y?
(set! (.-x o) (+ orig-x (- (rand r-p) p))))
(when-not only-x?
(set! (.-y o) (+ orig-y (- (rand r-p) p))))
nil))
done-h (fn [^js o]
(set! (.-x o) orig-x)
(set! (.-y o) orig-y)
(when cont
(cont o)))]
(register! target-obj ttl-frames h done-h))))))
|
36fc290a61a80220ea94d5e65a98f8a4e90511d21e3434457891fc1ad224e7ca | hyperledger-labs/fabric-chaincode-haskell | Helper.hs | {-# LANGUAGE OverloadedStrings #-}
module Helper where
import Common.Common as Pb
import Data.Bifunctor ( first )
import Peer.Chaincode as Pb
import Peer.ChaincodeShim as Pb
import Peer.Proposal as Pb
import Peer.ProposalResponse as Pb
import Proto3.Suite as Suite
import Types ( ChaincodeStub(..), Error(..), MapTextBytes )
-- These are some helper functions to process the unmarshalling of different types
-- from the chaincode message in order to populate the stub
getChaincodeInput :: ChaincodeMessage -> Either Error Pb.ChaincodeInput
getChaincodeInput mes = first DecodeError $ Suite.fromByteString (chaincodeMessagePayload mes)
getProposal :: Pb.SignedProposal -> Either Error Pb.Proposal
getProposal signedProposal = first DecodeError $ Suite.fromByteString (signedProposalProposalBytes signedProposal)
getHeader :: Pb.Proposal -> Either Error Pb.Header
getHeader proposal = first DecodeError $ Suite.fromByteString (proposalHeader proposal)
getChannelHeader :: Pb.Header -> Either Error Pb.ChannelHeader
getChannelHeader header = first DecodeError $ Suite.fromByteString (headerChannelHeader header)
getChaincodeProposalPayload :: Pb.Proposal -> Either Error Pb.ChaincodeProposalPayload
getChaincodeProposalPayload proposal = first DecodeError $ Suite.fromByteString (proposalPayload proposal)
getSignatureHeader :: Pb.Header -> Either Error Pb.SignatureHeader
getSignatureHeader header = first DecodeError $ Suite.fromByteString (headerSignatureHeader header)
-- TODO : Use ChannelHeader and SignatureHeader to implement getBinding
createBinding :: Pb.Proposal -> Maybe MapTextBytes
createBinding _ = Nothing
| null | https://raw.githubusercontent.com/hyperledger-labs/fabric-chaincode-haskell/9594e82371a9a805ad812e38fd58e063df5ca4e7/src/Helper.hs | haskell | # LANGUAGE OverloadedStrings #
These are some helper functions to process the unmarshalling of different types
from the chaincode message in order to populate the stub
TODO : Use ChannelHeader and SignatureHeader to implement getBinding |
module Helper where
import Common.Common as Pb
import Data.Bifunctor ( first )
import Peer.Chaincode as Pb
import Peer.ChaincodeShim as Pb
import Peer.Proposal as Pb
import Peer.ProposalResponse as Pb
import Proto3.Suite as Suite
import Types ( ChaincodeStub(..), Error(..), MapTextBytes )
getChaincodeInput :: ChaincodeMessage -> Either Error Pb.ChaincodeInput
getChaincodeInput mes = first DecodeError $ Suite.fromByteString (chaincodeMessagePayload mes)
getProposal :: Pb.SignedProposal -> Either Error Pb.Proposal
getProposal signedProposal = first DecodeError $ Suite.fromByteString (signedProposalProposalBytes signedProposal)
getHeader :: Pb.Proposal -> Either Error Pb.Header
getHeader proposal = first DecodeError $ Suite.fromByteString (proposalHeader proposal)
getChannelHeader :: Pb.Header -> Either Error Pb.ChannelHeader
getChannelHeader header = first DecodeError $ Suite.fromByteString (headerChannelHeader header)
getChaincodeProposalPayload :: Pb.Proposal -> Either Error Pb.ChaincodeProposalPayload
getChaincodeProposalPayload proposal = first DecodeError $ Suite.fromByteString (proposalPayload proposal)
getSignatureHeader :: Pb.Header -> Either Error Pb.SignatureHeader
getSignatureHeader header = first DecodeError $ Suite.fromByteString (headerSignatureHeader header)
createBinding :: Pb.Proposal -> Maybe MapTextBytes
createBinding _ = Nothing
|
5cc6bef9ba06133d39636af4b4579ab9afd0e6183f28e850d9cc96231b0a0092 | 0install/0install | gui.mli | Copyright ( C ) 2013 , the README file for details , or visit .
* See the README file for details, or visit .
*)
(** Manage the GUI sub-process. *)
type feed_description = {
times : (string * float) list;
summary : string option;
description : string list;
homepages : string list;
signatures : [
| `Valid of Support.Gpg.fingerprint * Support.Gpg.timestamp * string option * [`Trusted | `Not_trusted]
| `Invalid of string
] list;
}
(** The GUI plugin registers itself here. *)
val register_plugin : (General.config -> Ui.ui_handler option) -> unit
val download_icon : Fetch.fetcher -> Feed_provider.feed_provider -> Feed_url.non_distro_feed -> unit Lwt.t
(** Should we use the GUI?
* The input says what the user requested:
* No -> we never want to use the GUI
* Yes -> we always want to use the GUI, and throw an exception if it's not available
* Auto -> we want to use the GUI iff it's available
*
* Returns a suitable GUI handler if so, or None if we should use a non-GUI handler.
*)
val try_get_gui : General.config -> use_gui:[< `Yes | `No | `Auto] -> Ui.ui_handler option
(** Download the feed and add it as an extra feed of the interface. *)
val add_remote_feed :
General.config ->
Fetch.fetcher ->
Sigs.iface_uri -> Feed_url.remote_feed -> unit Lwt.t
(** Add a local feed to an interface. *)
val add_feed : General.config -> Sigs.iface_uri -> Feed_url.local_feed -> unit
val remove_feed : General.config -> Sigs.iface_uri -> Feed_url.non_distro_feed -> unit
val compile : General.config -> Feed_provider.feed_provider -> Sigs.iface_uri -> autocompile:bool -> unit Lwt.t
(** Try to guess whether we have source for this interface.
* Returns true if we have any source-only feeds, or any source implementations
* in our regular feeds. However, we don't look inside the source feeds (so a
* source feed containing no implementations will still count as true).
* This is used in the GUI to decide whether to shade the Compile button.
*)
val have_source_for : Feed_provider.feed_provider -> Sigs.iface_uri -> bool
(** List the implementations of this interface in the order they should be shown in the GUI.
* @return (selected_version, implementations). *)
val list_impls : Solver.Output.t -> Solver.role ->
(Impl.generic_implementation option * (Impl.generic_implementation * Impl_provider.rejection_reason option) list)
(* Returns (fetch-size, fetch-tooltip) *)
val get_fetch_info : General.config -> Impl.generic_implementation -> (string * string)
(** Get the initial text for the bug report dialog box. *)
val get_bug_report_details : General.config -> role:Solver.role -> (bool * Solver.Output.t) -> string
(** Submit a bug report for this interface.
* @return the response from the server (on success).
* @raise Safe_exn.T on failure. *)
val send_bug_report : Sigs.iface_uri -> string -> string Lwt.t
val run_test : General.config -> Distro.t -> (Selections.t -> string Lwt.t) -> (bool * Solver.Output.t) -> string Lwt.t
val generate_feed_description : General.config -> Trust.trust_db -> Feed.t -> Feed_metadata.t -> feed_description Lwt.t
| null | https://raw.githubusercontent.com/0install/0install/22eebdbe51a9f46cda29eed3e9e02e37e36b2d18/src/zeroinstall/gui.mli | ocaml | * Manage the GUI sub-process.
* The GUI plugin registers itself here.
* Should we use the GUI?
* The input says what the user requested:
* No -> we never want to use the GUI
* Yes -> we always want to use the GUI, and throw an exception if it's not available
* Auto -> we want to use the GUI iff it's available
*
* Returns a suitable GUI handler if so, or None if we should use a non-GUI handler.
* Download the feed and add it as an extra feed of the interface.
* Add a local feed to an interface.
* Try to guess whether we have source for this interface.
* Returns true if we have any source-only feeds, or any source implementations
* in our regular feeds. However, we don't look inside the source feeds (so a
* source feed containing no implementations will still count as true).
* This is used in the GUI to decide whether to shade the Compile button.
* List the implementations of this interface in the order they should be shown in the GUI.
* @return (selected_version, implementations).
Returns (fetch-size, fetch-tooltip)
* Get the initial text for the bug report dialog box.
* Submit a bug report for this interface.
* @return the response from the server (on success).
* @raise Safe_exn.T on failure. | Copyright ( C ) 2013 , the README file for details , or visit .
* See the README file for details, or visit .
*)
type feed_description = {
times : (string * float) list;
summary : string option;
description : string list;
homepages : string list;
signatures : [
| `Valid of Support.Gpg.fingerprint * Support.Gpg.timestamp * string option * [`Trusted | `Not_trusted]
| `Invalid of string
] list;
}
val register_plugin : (General.config -> Ui.ui_handler option) -> unit
val download_icon : Fetch.fetcher -> Feed_provider.feed_provider -> Feed_url.non_distro_feed -> unit Lwt.t
val try_get_gui : General.config -> use_gui:[< `Yes | `No | `Auto] -> Ui.ui_handler option
val add_remote_feed :
General.config ->
Fetch.fetcher ->
Sigs.iface_uri -> Feed_url.remote_feed -> unit Lwt.t
val add_feed : General.config -> Sigs.iface_uri -> Feed_url.local_feed -> unit
val remove_feed : General.config -> Sigs.iface_uri -> Feed_url.non_distro_feed -> unit
val compile : General.config -> Feed_provider.feed_provider -> Sigs.iface_uri -> autocompile:bool -> unit Lwt.t
val have_source_for : Feed_provider.feed_provider -> Sigs.iface_uri -> bool
val list_impls : Solver.Output.t -> Solver.role ->
(Impl.generic_implementation option * (Impl.generic_implementation * Impl_provider.rejection_reason option) list)
val get_fetch_info : General.config -> Impl.generic_implementation -> (string * string)
val get_bug_report_details : General.config -> role:Solver.role -> (bool * Solver.Output.t) -> string
val send_bug_report : Sigs.iface_uri -> string -> string Lwt.t
val run_test : General.config -> Distro.t -> (Selections.t -> string Lwt.t) -> (bool * Solver.Output.t) -> string Lwt.t
val generate_feed_description : General.config -> Trust.trust_db -> Feed.t -> Feed_metadata.t -> feed_description Lwt.t
|
719f4f7c2febd339cfabf8ba3771c680c67316e3d66bdb05ecdc66865e546665 | FPtje/GLuaFixer | LintMain.hs | # LANGUAGE LambdaCase #
module Main where
import "glualint-lib" GLua.AG.PrettyPrint
import "glualint-lib" GLua.Parser
import "glualint-lib" GLua.ASTInstances ()
import "glualint-lib" GLua.LineLimitParser (execParseLineLimits, LineLimit (LineLimit))
import "glualint-lib" GLua.TokenTypes (isWhitespace)
import "glualint-lib" GLuaFixer.AG.ASTLint
import "glualint-lib" GLuaFixer.AnalyseProject
import "glualint-lib" GLuaFixer.LintMessage
import "glualint-lib" GLuaFixer.LintSettings
import "glualint-lib" GLuaFixer.Util
import qualified "glualint-lib" GLua.Lexer as Lex
import Control.Applicative ((<|>), optional)
import Control.Monad (unless, void)
import Data.Functor ((<&>))
import Data.IORef (IORef, atomicWriteIORef, newIORef, readIORef)
import Data.Maybe (fromMaybe, fromJust)
import Options.Applicative ((<**>))
import System.Directory (doesDirectoryExist, getCurrentDirectory)
import System.Environment (getArgs, getProgName)
import System.Exit (exitSuccess, exitFailure, ExitCode (..))
import System.IO (hPutStrLn, stderr)
import Data.Foldable
import qualified Data.Aeson as JSON
import qualified Data.ByteString.Lazy as BL
import qualified Options.Applicative as Opt
import qualified Options.Applicative.Help.Types as Opt
import qualified System.Signal as Signal
version :: String
version = "1.24.1"
main :: IO ()
main = do
-- Keep track of whether process is cancelled through some signal
aborted <- newIORef Continue
Signal.installHandler Signal.sigTERM $ \_ -> atomicWriteIORef aborted Abort
Signal.installHandler Signal.sigINT $ \_ -> atomicWriteIORef aborted Abort
args <- getArgs
let prefs = Opt.defaultPrefs { Opt.prefShowHelpOnEmpty = True }
let cliParseResult = Opt.execParserPure prefs cliParserInfo args
case cliParseResult of
Opt.Success options -> runGluaLint options aborted
Opt.CompletionInvoked _completionResult -> void $ Opt.handleParseResult cliParseResult
Opt.Failure parserFailure -> do
progName <- getProgName
handleCliFailure aborted args $ Opt.execFailure parserFailure progName
-- | Command line options of glualint
data Options
= Options
{ optsConfigFile :: Maybe FilePath
, optsIndentation :: Maybe Indentation
, optsOutputFormat :: Maybe LogFormatChoice
, optsCommand :: Command
} deriving (Show)
-- | Available subcommands
data Command
= Lint StdInOrFiles
| PrettyPrint StdInOrFiles
| AnalyseGlobals [FilePath]
| DumpAst StdInOrFiles
| Test [FilePath]
| PrintVersion
deriving (Show)
-- | Metadata of the application
cliParserInfo :: Opt.ParserInfo Options
cliParserInfo = Opt.info (cliParser <**> Opt.helper)
( Opt.fullDesc
<> Opt.progDesc "Linter and pretty printer for Garry's mod's flavour of Lua."
<> Opt.header "glualint - lint and pretty print GLua files."
)
-- | Defines the command line interface parser
cliParser :: Opt.Parser Options
cliParser = Options
<$> configOption
<*> indentOption
<*> outputFormatOption
<*> commandParser
where
configOption :: Opt.Parser (Maybe FilePath)
configOption =
optional $ Opt.strOption
( Opt.long "config"
<> Opt.metavar "PATH"
<> Opt.help "Explicitly define config file location. By default it will search for it."
)
indentOption :: Opt.Parser (Maybe String)
indentOption =
optional $ Opt.strOption
( Opt.long "indentation"
<> Opt.metavar "STR"
<> Opt.help "What to use for indentation when pretty printing, 4 spaces by default."
)
outputFormatOption :: Opt.Parser (Maybe LogFormatChoice)
outputFormatOption =
optional $ Opt.option outputFormatReader
( Opt.long "output-format"
<> Opt.metavar "FORMAT"
<> Opt.help "Logging format, either 'auto', 'standard' or 'github', defaults to 'standard'"
)
outputFormatReader :: Opt.ReadM LogFormatChoice
outputFormatReader = Opt.eitherReader $ \case
"standard" -> Right $ LogFormatChoice StandardLogFormat
"github" -> Right $ LogFormatChoice GithubLogFormat
"auto" -> Right AutoLogFormatChoice
val -> Left $ "Bad output format '" <> val <> "', must be either 'auto', 'standard' or 'github'."
commandParser :: Opt.Parser Command
commandParser = Opt.hsubparser
( Opt.command
"lint"
(Opt.info (Lint <$> parseStdInOrFiles) $
Opt.progDesc "Lint the given files. Directories will be traversed recursively.")
<> Opt.command
"pretty-print"
(Opt.info (PrettyPrint <$> parseStdInOrFiles) $
Opt.progDesc "Pretty print the given files, replacing their contents with the pretty printed code.")
<> Opt.command
"analyse-globals"
(Opt.info (AnalyseGlobals <$> filesArgument) $
Opt.progDesc "Print a list of all globals used and defined in the given files/directories.")
<> Opt.command
"dump-ast"
(Opt.info (DumpAst <$> parseStdInOrFiles) $
Opt.progDesc "Print a list of all globals used and defined in the given files/directories.")
<> Opt.command
"test"
(Opt.info (Test <$> filesArgument) $
Opt.progDesc "Run tests on the given files. Use for testing/debugging glualint.")
<> Opt.command
"version"
(Opt.info (pure PrintVersion) $
Opt.progDesc "Print the version of glualint and exit.")
)
parseStdInOrFiles :: Opt.Parser StdInOrFiles
parseStdInOrFiles =
Opt.flag' UseStdIn
( Opt.long "stdin"
<> Opt.help "Use stdin instead of files."
)
<|> UseFiles <$> filesArgument
filesArgument :: Opt.Parser [FilePath]
filesArgument = Opt.some (Opt.argument Opt.str $ Opt.metavar "FILES")
-- | Run subcommands
runGluaLint :: Options -> IORef Abort -> IO ()
runGluaLint opts aborted = do
case optsCommand opts of
Lint stdinOrFiles -> do
noErrorsOrWarnings <- and <$> forEachInput mbIndent mbOutputFormat stdinOrFiles aborted
lintStdin
lintFile
unless noErrorsOrWarnings exitFailure
PrettyPrint stdinOrFiles -> do
noErrors <- and <$> forEachInput mbIndent mbOutputFormat stdinOrFiles aborted
prettyPrintStdin
prettyPrintFile
unless noErrors $ exitFailure
AnalyseGlobals files ->
analyseGlobals files
DumpAst stdinOrFiles ->
forEachInput_ mbIndent mbOutputFormat stdinOrFiles aborted
dumpASTStdin
dumpASTFile
Test files ->
runTest files
PrintVersion ->
putStrLn version
where
mbIndent = optsIndentation opts
mbOutputFormat = optsOutputFormat opts
-- | When the regulare CLI fails to parse, the legacy one might yet succeed
handleCliFailure :: IORef Abort -> [String] -> (Opt.ParserHelp, ExitCode, Int) -> IO ()
handleCliFailure aborted args (parserHelp, exitCode, terminalColumns) = case exitCode of
-- This means the help was activated. Print the help
ExitSuccess -> putStrLn $ Opt.renderHelp terminalColumns parserHelp
ExitFailure _ -> do
-- Attempt legacy CLI interface
mbRes <- legacyCli aborted Nothing args
case mbRes of
-- Legacy parser failed as well, print help
Nothing -> do
hPutStrLn stderr $ Opt.renderHelp terminalColumns parserHelp
exitFailure
-- Empty file list, print help
Just (_settings, []) -> do
hPutStrLn stderr $ Opt.renderHelp terminalColumns parserHelp
exitFailure
Just (settings, files) -> do
hasMessages <- legacyLint settings files
if hasMessages then exitFailure
else exitSuccess
prettyPrintStdin :: LintSettings -> String -> IO Bool
prettyPrintStdin settings contents =
case prettyPrint settings contents of
Nothing -> pure False
Just result -> True <$ putStr result
prettyPrintFile :: LintSettings -> FilePath -> String -> IO Bool
prettyPrintFile settings filePath contents = do
hPutStrLn stderr $ "Pretty printing " ++ filePath
case prettyPrint settings contents of
Nothing -> pure False
Just result -> True <$ doWriteFile filePath result
-- | Pure pretty print function
prettyPrint :: LintSettings -> String -> Maybe String
prettyPrint lintsettings lua =
if prettyprint_rejectInvalidCode lintsettings && not (null errors)
then Nothing
else Just $ prettyprintConf ppconf ast
where
(ast, errors) = parseGLuaFromString lua
ppconf = lint2ppSetting lintsettings
| Lint from stdin
lintStdin :: LintSettings -> String -> IO Bool
lintStdin settings contents = do
lintFile settings "stdin" contents
-- | Lint a file
lintFile :: LintSettings -> FilePath -> String -> IO Bool
lintFile settings filePath contents = do
let msgs = lint settings filePath contents
logFormat <- logFormatChoiceToLogFormat $ log_format settings
case logFormat of
StandardLogFormat ->
mapM_ (putStrLn . formatLintMessage StandardLogFormat) msgs
GithubLogFormat -> do
When the log format is GitHub , also print in the regular format . GitHub actions hide the
-- GitHub specific output from the logs for some reason.
mapM_ (putStrLn . formatLintMessage GithubLogFormat) msgs
mapM_ (putStrLn . formatLintMessage StandardLogFormat) msgs
pure $ null msgs
-- | Lint a string, using parsec
lint :: LintSettings -> FilePath -> String -> [LintMessage]
lint config f contents =
case parseFile config f contents of
Left errs -> errs
Right (lexWarnings, ast) ->
let
lineLengthWarnings = execParseLineLimits f (LineLimit $ lint_maxLineLength config) contents
parserWarnings = map ($f) $ astWarnings config ast
in
-- Print all warnings
sortLintMessages $ lineLengthWarnings ++ lexWarnings ++ parserWarnings
-- | Pretty print, uses the uu-parsinglib library
legacyPrettyPrintStdin :: Maybe Indentation -> IO ()
legacyPrettyPrintStdin ind = do
lua <- getContents
cwd <- getCurrentDirectory
lintsettings <- overrideLintSettingsIndentation ind <$> getSettings cwd
for_ (prettyPrint lintsettings lua) $ \result ->
putStr result
legacyPrettyPrintFiles :: IORef Abort -> Maybe Indentation -> [FilePath] -> IO ()
legacyPrettyPrintFiles aborted ind = mapM_ pp
where
pp :: FilePath -> IO ()
pp f = do
isDirectory <- doesDirectoryExist f
if isDirectory then do
luaFiles <- findLuaFiles [] f
legacyPrettyPrintFiles aborted ind luaFiles
else do
lintsettings <- overrideLintSettingsIndentation ind <$> getSettings f
readIORef aborted >>= \case
Abort -> pure ()
Continue -> do
hPutStrLn stderr $ "Pretty printing " ++ f
lua <- doReadFile f
for_ (prettyPrint lintsettings lua) $ \result ->
doWriteFile f result
overrideLintSettingsIndentation :: Maybe Indentation -> (LintSettings -> LintSettings)
overrideLintSettingsIndentation ind settings = settings
{ prettyprint_indentation = fromMaybe (prettyprint_indentation settings) ind
}
| Lint a set of files , uses parsec 's parser library
-- Bool return value indicates whether there were either warnings or errors
legacyLint :: Maybe LintSettings -> [FilePath] -> IO Bool
legacyLint _ [] = pure False
legacyLint ls (f : fs) = do
settings <- getSettings f
let config = fromJust $ ls <|> Just settings
logFormat <- logFormatChoiceToLogFormat $ log_format config
-- When we're dealing with a directory, lint all the files in it recursively.
isDirectory <- doesDirectoryExist f
hasMsgs <- if isDirectory then findLuaFiles (lint_ignoreFiles config) f >>= legacyLint ls
else if f == "stdin" then do
msgs <- lint config f <$> getContents
mapM_ (putStrLn . formatLintMessage logFormat) msgs
pure $ not $ null msgs
else do
msgs <- lint config f <$> doReadFile f
mapM_ (putStrLn . formatLintMessage logFormat) msgs
pure $ not $ null msgs
-- Lint the other files
(|| hasMsgs) <$> legacyLint ls fs
dumpASTStdin :: LintSettings -> String -> IO ()
dumpASTStdin settings contents =
dumpASTFile settings "stdin" contents
dumpASTFile :: LintSettings -> FilePath -> String -> IO ()
dumpASTFile settings filePath contents =
case parseFile settings filePath contents of
Left errs -> mapM_ print errs
Right (_lexWarnings, ast) ->
BL.putStr $ JSON.encode ast
dumpAST :: IORef Abort -> [FilePath] -> IO ()
dumpAST aborted fs =
forEachInput_ Nothing Nothing (UseFiles fs) aborted dumpASTStdin dumpASTFile
runTest :: [FilePath] -> IO ()
runTest fs = do
putStrLn "Running tests"
for_ fs $ \f -> do
isDirectory <- doesDirectoryExist f
if isDirectory then do
luaFiles <- findLuaFiles [] f
runTest luaFiles
else do
contents <- doReadFile f
let (uu_lex, uu_lex_errors) = Lex.execParseTokens contents
unless (null uu_lex_errors) $ do
putStrLn $ "Errors when trying to lex '" ++ f ++
"' with uu-parsinglib lexer!"
print uu_lex_errors
let (uu_ast, uu_parseErrs) = parseGLua $ filter (not . isWhitespace) uu_lex
lintsettings <- getSettings f
putStrLn $ "Testing " ++ f
unless (null uu_parseErrs) $ do
putStrLn $ "Errors when trying to parse '" ++ f ++
"' with uu-parsinglib parser!"
print uu_parseErrs
case parseFile lintsettings f contents of
Right _ -> pure ()
Left errs -> do
putStrLn $ "Errors when trying to parse '" ++ f ++ "' with parsec parser!"
print errs
let pretty_printed = prettyprintConf (lint2ppSetting lintsettings) uu_ast
let (_uu_ast_pp, uu_parseErrs_pp) = parseGLuaFromString pretty_printed
unless (null uu_parseErrs_pp) $ do
putStrLn $ "Errors when trying to parse '" ++ f ++
"' with uu-parsinglib parser after pretty print!"
print uu_parseErrs
case parseFile lintsettings f pretty_printed of
Right _ -> pure ()
Left errs -> do
putStrLn $ "Errors when trying to parse '" ++ f ++
"' with parsec parser after pretty print!"
print errs
--
-- Legacy
--
-- | Deprecated and naive command line interface, kept in place for backwards
-- compatibility. Returns Nothing when this parser fails as well.
legacyCli
:: IORef Abort
-> Maybe Indentation
-> [String]
-> IO (Maybe (Maybe LintSettings, [FilePath]))
legacyCli aborted ind = \case
["--config"] -> pure Nothing
-- fail when a subcommand of the new parser is given as argument
"lint" : _ -> pure Nothing
"pretty-print" : _ -> pure Nothing
"analyse-globals" : _ -> pure Nothing
"dump-ast" : _ -> pure Nothing
"test" : _ -> pure Nothing
"version" : _ -> pure Nothing
-- End of recursion case
[] -> pure $ Just (Nothing, [])
"--pretty-print-files" : fs -> legacyPrettyPrintFiles aborted ind fs >> exitSuccess
"--pretty-print" : _ -> legacyPrettyPrintStdin ind >> exitSuccess
"--analyse-globals" : fs -> analyseGlobals fs >> exitSuccess
"--dump-ast" : fs -> dumpAST aborted fs >> exitSuccess
"--version" : _ -> putStrLn version >> exitSuccess
"--test" : fs -> runTest fs >> exitSuccess
"--stdin" : xs -> do
legacyCli aborted ind xs <&> \case
Nothing -> Nothing
Just (sets, pths) -> Just (sets, "stdin" : pths)
"--config" : f : xs -> do
settings <- settingsFromFile f
legacyCli aborted ind xs <&> \case
Nothing -> Nothing
Just (_, fps) -> Just (settings, fps)
('-' : '-' : 'i' : 'n' : 'd' : 'e' : 'n' : 't' : 'a' : 't' : 'i' : 'o' : 'n' : '=' : '\'' : ind') : xs ->
legacyCli aborted (Just (init ind')) xs
('-' : '-' : 'i' : 'n' : 'd' : 'e' : 'n' : 't' : 'a' : 't' : 'i' : 'o' : 'n' : '=' : ind') : xs ->
legacyCli aborted (Just ind') xs
f : xs -> do
legacyCli aborted ind xs <&> \case
Nothing -> Nothing
Just (ls, fs) -> Just (ls, f : fs)
| null | https://raw.githubusercontent.com/FPtje/GLuaFixer/af1020f18b2a5f78c6c64b12041f2745e2c25eb9/src/GLuaFixer/LintMain.hs | haskell | Keep track of whether process is cancelled through some signal
| Command line options of glualint
| Available subcommands
| Metadata of the application
| Defines the command line interface parser
| Run subcommands
| When the regulare CLI fails to parse, the legacy one might yet succeed
This means the help was activated. Print the help
Attempt legacy CLI interface
Legacy parser failed as well, print help
Empty file list, print help
| Pure pretty print function
| Lint a file
GitHub specific output from the logs for some reason.
| Lint a string, using parsec
Print all warnings
| Pretty print, uses the uu-parsinglib library
Bool return value indicates whether there were either warnings or errors
When we're dealing with a directory, lint all the files in it recursively.
Lint the other files
Legacy
| Deprecated and naive command line interface, kept in place for backwards
compatibility. Returns Nothing when this parser fails as well.
fail when a subcommand of the new parser is given as argument
End of recursion case | # LANGUAGE LambdaCase #
module Main where
import "glualint-lib" GLua.AG.PrettyPrint
import "glualint-lib" GLua.Parser
import "glualint-lib" GLua.ASTInstances ()
import "glualint-lib" GLua.LineLimitParser (execParseLineLimits, LineLimit (LineLimit))
import "glualint-lib" GLua.TokenTypes (isWhitespace)
import "glualint-lib" GLuaFixer.AG.ASTLint
import "glualint-lib" GLuaFixer.AnalyseProject
import "glualint-lib" GLuaFixer.LintMessage
import "glualint-lib" GLuaFixer.LintSettings
import "glualint-lib" GLuaFixer.Util
import qualified "glualint-lib" GLua.Lexer as Lex
import Control.Applicative ((<|>), optional)
import Control.Monad (unless, void)
import Data.Functor ((<&>))
import Data.IORef (IORef, atomicWriteIORef, newIORef, readIORef)
import Data.Maybe (fromMaybe, fromJust)
import Options.Applicative ((<**>))
import System.Directory (doesDirectoryExist, getCurrentDirectory)
import System.Environment (getArgs, getProgName)
import System.Exit (exitSuccess, exitFailure, ExitCode (..))
import System.IO (hPutStrLn, stderr)
import Data.Foldable
import qualified Data.Aeson as JSON
import qualified Data.ByteString.Lazy as BL
import qualified Options.Applicative as Opt
import qualified Options.Applicative.Help.Types as Opt
import qualified System.Signal as Signal
version :: String
version = "1.24.1"
main :: IO ()
main = do
aborted <- newIORef Continue
Signal.installHandler Signal.sigTERM $ \_ -> atomicWriteIORef aborted Abort
Signal.installHandler Signal.sigINT $ \_ -> atomicWriteIORef aborted Abort
args <- getArgs
let prefs = Opt.defaultPrefs { Opt.prefShowHelpOnEmpty = True }
let cliParseResult = Opt.execParserPure prefs cliParserInfo args
case cliParseResult of
Opt.Success options -> runGluaLint options aborted
Opt.CompletionInvoked _completionResult -> void $ Opt.handleParseResult cliParseResult
Opt.Failure parserFailure -> do
progName <- getProgName
handleCliFailure aborted args $ Opt.execFailure parserFailure progName
data Options
= Options
{ optsConfigFile :: Maybe FilePath
, optsIndentation :: Maybe Indentation
, optsOutputFormat :: Maybe LogFormatChoice
, optsCommand :: Command
} deriving (Show)
data Command
= Lint StdInOrFiles
| PrettyPrint StdInOrFiles
| AnalyseGlobals [FilePath]
| DumpAst StdInOrFiles
| Test [FilePath]
| PrintVersion
deriving (Show)
cliParserInfo :: Opt.ParserInfo Options
cliParserInfo = Opt.info (cliParser <**> Opt.helper)
( Opt.fullDesc
<> Opt.progDesc "Linter and pretty printer for Garry's mod's flavour of Lua."
<> Opt.header "glualint - lint and pretty print GLua files."
)
cliParser :: Opt.Parser Options
cliParser = Options
<$> configOption
<*> indentOption
<*> outputFormatOption
<*> commandParser
where
configOption :: Opt.Parser (Maybe FilePath)
configOption =
optional $ Opt.strOption
( Opt.long "config"
<> Opt.metavar "PATH"
<> Opt.help "Explicitly define config file location. By default it will search for it."
)
indentOption :: Opt.Parser (Maybe String)
indentOption =
optional $ Opt.strOption
( Opt.long "indentation"
<> Opt.metavar "STR"
<> Opt.help "What to use for indentation when pretty printing, 4 spaces by default."
)
outputFormatOption :: Opt.Parser (Maybe LogFormatChoice)
outputFormatOption =
optional $ Opt.option outputFormatReader
( Opt.long "output-format"
<> Opt.metavar "FORMAT"
<> Opt.help "Logging format, either 'auto', 'standard' or 'github', defaults to 'standard'"
)
outputFormatReader :: Opt.ReadM LogFormatChoice
outputFormatReader = Opt.eitherReader $ \case
"standard" -> Right $ LogFormatChoice StandardLogFormat
"github" -> Right $ LogFormatChoice GithubLogFormat
"auto" -> Right AutoLogFormatChoice
val -> Left $ "Bad output format '" <> val <> "', must be either 'auto', 'standard' or 'github'."
commandParser :: Opt.Parser Command
commandParser = Opt.hsubparser
( Opt.command
"lint"
(Opt.info (Lint <$> parseStdInOrFiles) $
Opt.progDesc "Lint the given files. Directories will be traversed recursively.")
<> Opt.command
"pretty-print"
(Opt.info (PrettyPrint <$> parseStdInOrFiles) $
Opt.progDesc "Pretty print the given files, replacing their contents with the pretty printed code.")
<> Opt.command
"analyse-globals"
(Opt.info (AnalyseGlobals <$> filesArgument) $
Opt.progDesc "Print a list of all globals used and defined in the given files/directories.")
<> Opt.command
"dump-ast"
(Opt.info (DumpAst <$> parseStdInOrFiles) $
Opt.progDesc "Print a list of all globals used and defined in the given files/directories.")
<> Opt.command
"test"
(Opt.info (Test <$> filesArgument) $
Opt.progDesc "Run tests on the given files. Use for testing/debugging glualint.")
<> Opt.command
"version"
(Opt.info (pure PrintVersion) $
Opt.progDesc "Print the version of glualint and exit.")
)
parseStdInOrFiles :: Opt.Parser StdInOrFiles
parseStdInOrFiles =
Opt.flag' UseStdIn
( Opt.long "stdin"
<> Opt.help "Use stdin instead of files."
)
<|> UseFiles <$> filesArgument
filesArgument :: Opt.Parser [FilePath]
filesArgument = Opt.some (Opt.argument Opt.str $ Opt.metavar "FILES")
runGluaLint :: Options -> IORef Abort -> IO ()
runGluaLint opts aborted = do
case optsCommand opts of
Lint stdinOrFiles -> do
noErrorsOrWarnings <- and <$> forEachInput mbIndent mbOutputFormat stdinOrFiles aborted
lintStdin
lintFile
unless noErrorsOrWarnings exitFailure
PrettyPrint stdinOrFiles -> do
noErrors <- and <$> forEachInput mbIndent mbOutputFormat stdinOrFiles aborted
prettyPrintStdin
prettyPrintFile
unless noErrors $ exitFailure
AnalyseGlobals files ->
analyseGlobals files
DumpAst stdinOrFiles ->
forEachInput_ mbIndent mbOutputFormat stdinOrFiles aborted
dumpASTStdin
dumpASTFile
Test files ->
runTest files
PrintVersion ->
putStrLn version
where
mbIndent = optsIndentation opts
mbOutputFormat = optsOutputFormat opts
handleCliFailure :: IORef Abort -> [String] -> (Opt.ParserHelp, ExitCode, Int) -> IO ()
handleCliFailure aborted args (parserHelp, exitCode, terminalColumns) = case exitCode of
ExitSuccess -> putStrLn $ Opt.renderHelp terminalColumns parserHelp
ExitFailure _ -> do
mbRes <- legacyCli aborted Nothing args
case mbRes of
Nothing -> do
hPutStrLn stderr $ Opt.renderHelp terminalColumns parserHelp
exitFailure
Just (_settings, []) -> do
hPutStrLn stderr $ Opt.renderHelp terminalColumns parserHelp
exitFailure
Just (settings, files) -> do
hasMessages <- legacyLint settings files
if hasMessages then exitFailure
else exitSuccess
prettyPrintStdin :: LintSettings -> String -> IO Bool
prettyPrintStdin settings contents =
case prettyPrint settings contents of
Nothing -> pure False
Just result -> True <$ putStr result
prettyPrintFile :: LintSettings -> FilePath -> String -> IO Bool
prettyPrintFile settings filePath contents = do
hPutStrLn stderr $ "Pretty printing " ++ filePath
case prettyPrint settings contents of
Nothing -> pure False
Just result -> True <$ doWriteFile filePath result
prettyPrint :: LintSettings -> String -> Maybe String
prettyPrint lintsettings lua =
if prettyprint_rejectInvalidCode lintsettings && not (null errors)
then Nothing
else Just $ prettyprintConf ppconf ast
where
(ast, errors) = parseGLuaFromString lua
ppconf = lint2ppSetting lintsettings
| Lint from stdin
lintStdin :: LintSettings -> String -> IO Bool
lintStdin settings contents = do
lintFile settings "stdin" contents
lintFile :: LintSettings -> FilePath -> String -> IO Bool
lintFile settings filePath contents = do
let msgs = lint settings filePath contents
logFormat <- logFormatChoiceToLogFormat $ log_format settings
case logFormat of
StandardLogFormat ->
mapM_ (putStrLn . formatLintMessage StandardLogFormat) msgs
GithubLogFormat -> do
When the log format is GitHub , also print in the regular format . GitHub actions hide the
mapM_ (putStrLn . formatLintMessage GithubLogFormat) msgs
mapM_ (putStrLn . formatLintMessage StandardLogFormat) msgs
pure $ null msgs
lint :: LintSettings -> FilePath -> String -> [LintMessage]
lint config f contents =
case parseFile config f contents of
Left errs -> errs
Right (lexWarnings, ast) ->
let
lineLengthWarnings = execParseLineLimits f (LineLimit $ lint_maxLineLength config) contents
parserWarnings = map ($f) $ astWarnings config ast
in
sortLintMessages $ lineLengthWarnings ++ lexWarnings ++ parserWarnings
legacyPrettyPrintStdin :: Maybe Indentation -> IO ()
legacyPrettyPrintStdin ind = do
lua <- getContents
cwd <- getCurrentDirectory
lintsettings <- overrideLintSettingsIndentation ind <$> getSettings cwd
for_ (prettyPrint lintsettings lua) $ \result ->
putStr result
legacyPrettyPrintFiles :: IORef Abort -> Maybe Indentation -> [FilePath] -> IO ()
legacyPrettyPrintFiles aborted ind = mapM_ pp
where
pp :: FilePath -> IO ()
pp f = do
isDirectory <- doesDirectoryExist f
if isDirectory then do
luaFiles <- findLuaFiles [] f
legacyPrettyPrintFiles aborted ind luaFiles
else do
lintsettings <- overrideLintSettingsIndentation ind <$> getSettings f
readIORef aborted >>= \case
Abort -> pure ()
Continue -> do
hPutStrLn stderr $ "Pretty printing " ++ f
lua <- doReadFile f
for_ (prettyPrint lintsettings lua) $ \result ->
doWriteFile f result
overrideLintSettingsIndentation :: Maybe Indentation -> (LintSettings -> LintSettings)
overrideLintSettingsIndentation ind settings = settings
{ prettyprint_indentation = fromMaybe (prettyprint_indentation settings) ind
}
| Lint a set of files , uses parsec 's parser library
legacyLint :: Maybe LintSettings -> [FilePath] -> IO Bool
legacyLint _ [] = pure False
legacyLint ls (f : fs) = do
settings <- getSettings f
let config = fromJust $ ls <|> Just settings
logFormat <- logFormatChoiceToLogFormat $ log_format config
isDirectory <- doesDirectoryExist f
hasMsgs <- if isDirectory then findLuaFiles (lint_ignoreFiles config) f >>= legacyLint ls
else if f == "stdin" then do
msgs <- lint config f <$> getContents
mapM_ (putStrLn . formatLintMessage logFormat) msgs
pure $ not $ null msgs
else do
msgs <- lint config f <$> doReadFile f
mapM_ (putStrLn . formatLintMessage logFormat) msgs
pure $ not $ null msgs
(|| hasMsgs) <$> legacyLint ls fs
dumpASTStdin :: LintSettings -> String -> IO ()
dumpASTStdin settings contents =
dumpASTFile settings "stdin" contents
dumpASTFile :: LintSettings -> FilePath -> String -> IO ()
dumpASTFile settings filePath contents =
case parseFile settings filePath contents of
Left errs -> mapM_ print errs
Right (_lexWarnings, ast) ->
BL.putStr $ JSON.encode ast
dumpAST :: IORef Abort -> [FilePath] -> IO ()
dumpAST aborted fs =
forEachInput_ Nothing Nothing (UseFiles fs) aborted dumpASTStdin dumpASTFile
runTest :: [FilePath] -> IO ()
runTest fs = do
putStrLn "Running tests"
for_ fs $ \f -> do
isDirectory <- doesDirectoryExist f
if isDirectory then do
luaFiles <- findLuaFiles [] f
runTest luaFiles
else do
contents <- doReadFile f
let (uu_lex, uu_lex_errors) = Lex.execParseTokens contents
unless (null uu_lex_errors) $ do
putStrLn $ "Errors when trying to lex '" ++ f ++
"' with uu-parsinglib lexer!"
print uu_lex_errors
let (uu_ast, uu_parseErrs) = parseGLua $ filter (not . isWhitespace) uu_lex
lintsettings <- getSettings f
putStrLn $ "Testing " ++ f
unless (null uu_parseErrs) $ do
putStrLn $ "Errors when trying to parse '" ++ f ++
"' with uu-parsinglib parser!"
print uu_parseErrs
case parseFile lintsettings f contents of
Right _ -> pure ()
Left errs -> do
putStrLn $ "Errors when trying to parse '" ++ f ++ "' with parsec parser!"
print errs
let pretty_printed = prettyprintConf (lint2ppSetting lintsettings) uu_ast
let (_uu_ast_pp, uu_parseErrs_pp) = parseGLuaFromString pretty_printed
unless (null uu_parseErrs_pp) $ do
putStrLn $ "Errors when trying to parse '" ++ f ++
"' with uu-parsinglib parser after pretty print!"
print uu_parseErrs
case parseFile lintsettings f pretty_printed of
Right _ -> pure ()
Left errs -> do
putStrLn $ "Errors when trying to parse '" ++ f ++
"' with parsec parser after pretty print!"
print errs
legacyCli
:: IORef Abort
-> Maybe Indentation
-> [String]
-> IO (Maybe (Maybe LintSettings, [FilePath]))
legacyCli aborted ind = \case
["--config"] -> pure Nothing
"lint" : _ -> pure Nothing
"pretty-print" : _ -> pure Nothing
"analyse-globals" : _ -> pure Nothing
"dump-ast" : _ -> pure Nothing
"test" : _ -> pure Nothing
"version" : _ -> pure Nothing
[] -> pure $ Just (Nothing, [])
"--pretty-print-files" : fs -> legacyPrettyPrintFiles aborted ind fs >> exitSuccess
"--pretty-print" : _ -> legacyPrettyPrintStdin ind >> exitSuccess
"--analyse-globals" : fs -> analyseGlobals fs >> exitSuccess
"--dump-ast" : fs -> dumpAST aborted fs >> exitSuccess
"--version" : _ -> putStrLn version >> exitSuccess
"--test" : fs -> runTest fs >> exitSuccess
"--stdin" : xs -> do
legacyCli aborted ind xs <&> \case
Nothing -> Nothing
Just (sets, pths) -> Just (sets, "stdin" : pths)
"--config" : f : xs -> do
settings <- settingsFromFile f
legacyCli aborted ind xs <&> \case
Nothing -> Nothing
Just (_, fps) -> Just (settings, fps)
('-' : '-' : 'i' : 'n' : 'd' : 'e' : 'n' : 't' : 'a' : 't' : 'i' : 'o' : 'n' : '=' : '\'' : ind') : xs ->
legacyCli aborted (Just (init ind')) xs
('-' : '-' : 'i' : 'n' : 'd' : 'e' : 'n' : 't' : 'a' : 't' : 'i' : 'o' : 'n' : '=' : ind') : xs ->
legacyCli aborted (Just ind') xs
f : xs -> do
legacyCli aborted ind xs <&> \case
Nothing -> Nothing
Just (ls, fs) -> Just (ls, f : fs)
|
ab5f398ce7d2e8efbec2914ada5c073eb0f497481b3c0b6eae57a08bdcdb35db | heshrobe/joshua-dist | ruled-tables.lisp | -*- Mode : Lisp ; Syntax : ANSI - Common - Lisp ; Package : CLIM - INTERNALS ; Base : 10 ; Lowercase : Yes -*-
(in-package :clim-internals)
(eval-when (:compile-toplevel :execute :load-toplevel)
(export (intern (string-upcase "ruled-table-output-record") :clim) :clim)
(export (intern (string-upcase "draw-rules") :clim) :clim))
(defmethod adjust-table-cells ((table standard-table-output-record) stream)
(let* ((nrows 0)
(ncells nil)
(cells 0)
(row-table-p (row-table-p table))
(table-mapper (if row-table-p #'map-over-table-rows #'map-over-table-columns))
(x-spacing (slot-value table 'x-spacing))
(y-spacing (slot-value table 'y-spacing))
(equalize-column-widths (slot-value table 'equalize-column-widths)))
(declare (type fixnum nrows cells))
(declare (type coordinate x-spacing y-spacing))
(labels ((count-rows (row)
(incf nrows)
(setq cells 0)
(map-over-row-cells #'count-cells row)
(assert (not (zerop cells)) ()
"Row or column in table does not contain any cells")
(cond ((null ncells)
(setq ncells cells))
(t
(maxf ncells cells))))
(count-cells (cell)
(assert (cell-output-record-p cell))
(incf cells)))
(declare (dynamic-extent #'count-rows #'count-cells))
;; Calculate nrows & ncells (= ncells per row)
(funcall table-mapper #'count-rows table))
If there are no rows , COUNT - ROWS wo n't get invoked and NCELLS
will be NIL . If all the rows and columns are empty , NCELLS will
;; be 0. In either case, that means we're done.
(when (or (null ncells) (= ncells 0))
(return-from adjust-table-cells
(tree-recompute-extent table)))
(with-stack-array (row-array nrows :initial-element 0)
(with-stack-array (column-array ncells :initial-element 0)
(let ((x-pos nil)
(y-pos nil)
(row-count 0)
(column-count 0)
(total-width (coordinate 0))
(total-height (coordinate 0)))
(declare (type fixnum row-count column-count))
(declare (type coordinate total-width total-height))
We always want the table to start at its START - X and START - Y positions .
(multiple-value-setq (x-pos y-pos) (output-record-position table))
(macrolet (#-CCL-2 (row-max-height (row-number)
`(svref row-array ,row-number))
#-CCL-2 (column-max-width (column-number)
`(svref column-array ,column-number)))
;; Figure out max height for each row,
;; max width for each column.
;; Collect row heights and column widths into temp arrays.
;; We need to remember for each row its total height and
;; the difference between the smallest top and the largest top.
;; For each row remember the total height and then remember the maximum
;; difference between the row top and the y-position of the row.
;; Rows and columns are pretty symmetric, but we need to arrange
;; for a few things to work out right...
(unless row-table-p
(rotatef row-array column-array))
(if row-table-p (setq row-count -1) (setq column-count -1))
(flet ((row-mapper (row)
(if row-table-p (incf row-count) (incf column-count))
(if row-table-p (setq column-count -1) (setq row-count -1))
(adjust-table-cells row stream)
(flet ((cell-mapper (cell)
(if row-table-p (incf column-count) (incf row-count))
(multiple-value-bind (width height)
(bounding-rectangle-size cell)
(declare (type coordinate width height))
(maxf (row-max-height row-count)
(max height (cell-min-height cell)))
(maxf (column-max-width column-count)
(max width (cell-min-width cell))))))
(declare (dynamic-extent #'cell-mapper))
(map-over-row-cells #'cell-mapper row))))
(declare (dynamic-extent #'row-mapper))
(funcall table-mapper #'row-mapper table))
(when equalize-column-widths
(let ((column-width (coordinate 0))
(n-columns (1+ column-count)))
(declare (type fixnum n-columns))
(declare (type coordinate column-width))
(dotimes (i n-columns)
(maxf column-width (column-max-width i)))
(dotimes (i n-columns)
(setf (column-max-width i) column-width))))
(if row-table-p (setq row-count -1) (setq column-count -1))
(flet ((row-mapper (row)
(if row-table-p (incf row-count) (incf column-count))
(let ((this-row-height (row-max-height row-count))
(this-column-width (column-max-width column-count)))
(declare (type coordinate this-row-height this-column-width))
;; All numbers are in (output-record-parent table) coordinates
(if row-table-p (setq column-count -1) (setq row-count -1))
(setq total-width x-pos
total-height y-pos)
(flet ((cell-mapper (cell)
(if row-table-p (incf column-count) (incf row-count))
(let ((column-width (column-max-width column-count))
(row-height (row-max-height row-count))
(cell-width (bounding-rectangle-width cell))
(cell-height (bounding-rectangle-height cell))
(x-alignment-adjust 0)
(y-alignment-adjust 0))
(declare (type coordinate column-width row-height
cell-width cell-height))
(ecase (slot-value cell 'x-alignment)
(:left )
(:right
(setq x-alignment-adjust
(- column-width cell-width)))
(:center
(setq x-alignment-adjust
(floor (- column-width cell-width) 2))))
(ecase (slot-value cell 'y-alignment)
(:top )
(:bottom
(setq y-alignment-adjust
(- row-height cell-height)))
(:center
(setq y-alignment-adjust
(floor (- row-height cell-height) 2))))
(multiple-value-bind (x-offset y-offset)
(convert-from-ancestor-to-descendant-coordinates
(output-record-parent table) (output-record-parent cell))
(declare (type coordinate x-offset y-offset))
;;; Make sure output-record of a row fills
;;; the entire row height.
;;; The reason for this is that the code that
;;; calculates the total-extent of the table
;;; uses the size and position of the child-cells.
;;; As a result, the bottom row is drawn only
;;; to its minimal height.
(when row-table-p
(let ((old-width (bounding-rectangle-width cell)))
(bounding-rectangle-set-size
cell
old-width
this-row-height)))
(output-record-set-position
cell
(+ x-offset total-width x-alignment-adjust)
(+ y-offset total-height y-alignment-adjust)))
(if row-table-p
(incf total-width (+ column-width x-spacing))
(incf total-height (+ row-height y-spacing))))))
(declare (dynamic-extent #'cell-mapper))
(map-over-row-cells #'cell-mapper row))
(if row-table-p
(incf y-pos (+ this-row-height y-spacing))
(incf x-pos (+ this-column-width x-spacing))))))
(declare (dynamic-extent #'row-mapper))
(funcall table-mapper #'row-mapper table)))
;; at this point you could draw lines around the cells easily
;; because you have the row and column arrays and total-height and total-width
(draw-rules table column-array row-array stream)
))))
(tree-recompute-extent table))
(defmethod draw-rules ((table standard-table-output-record) column-array row-array stream)
(declare (ignore column-array row-array stream))
(values))
(defclass ruled-table-output-record (standard-table-output-record) ())
(defmethod draw-rules ((table ruled-table-output-record) column-array row-array stream)
(let* ((x-spacing (slot-value table 'x-spacing))
(y-spacing (slot-value table 'y-spacing))
(max-x (loop for x across column-array sum (+ x x-spacing)))
(max-y (loop for y across row-array sum (+ y y-spacing))))
(add-output-record
(with-output-to-output-record (stream 'standard-sequence-output-record ;; record
)
( record )
(clim:draw-line* stream 0 max-y max-x max-y)
(clim:draw-line* stream max-x 0 max-x max-y)
(let ((last-x 0))
(loop for x across column-array
for first = t then nil
do (draw-line* stream last-x 0 last-x max-y)
(incf last-x x)
(if first
(incf last-x (floor x-spacing 2))
(incf last-x x-spacing))
))
(let ((last-y 0))
(loop for y across row-array
for first = t then nil
do (draw-line* stream 0 last-y max-x last-y)
(incf last-y y)
(if first
(incf last-y (floor y-spacing 2))
(incf last-y y-spacing)))))
table))) | null | https://raw.githubusercontent.com/heshrobe/joshua-dist/f59f06303f9fabef3e945a920cf9a26d9c2fd55e/clim-fixes/ruled-tables.lisp | lisp | Syntax : ANSI - Common - Lisp ; Package : CLIM - INTERNALS ; Base : 10 ; Lowercase : Yes -*-
Calculate nrows & ncells (= ncells per row)
be 0. In either case, that means we're done.
Figure out max height for each row,
max width for each column.
Collect row heights and column widths into temp arrays.
We need to remember for each row its total height and
the difference between the smallest top and the largest top.
For each row remember the total height and then remember the maximum
difference between the row top and the y-position of the row.
Rows and columns are pretty symmetric, but we need to arrange
for a few things to work out right...
All numbers are in (output-record-parent table) coordinates
Make sure output-record of a row fills
the entire row height.
The reason for this is that the code that
calculates the total-extent of the table
uses the size and position of the child-cells.
As a result, the bottom row is drawn only
to its minimal height.
at this point you could draw lines around the cells easily
because you have the row and column arrays and total-height and total-width
record |
(in-package :clim-internals)
(eval-when (:compile-toplevel :execute :load-toplevel)
(export (intern (string-upcase "ruled-table-output-record") :clim) :clim)
(export (intern (string-upcase "draw-rules") :clim) :clim))
(defmethod adjust-table-cells ((table standard-table-output-record) stream)
(let* ((nrows 0)
(ncells nil)
(cells 0)
(row-table-p (row-table-p table))
(table-mapper (if row-table-p #'map-over-table-rows #'map-over-table-columns))
(x-spacing (slot-value table 'x-spacing))
(y-spacing (slot-value table 'y-spacing))
(equalize-column-widths (slot-value table 'equalize-column-widths)))
(declare (type fixnum nrows cells))
(declare (type coordinate x-spacing y-spacing))
(labels ((count-rows (row)
(incf nrows)
(setq cells 0)
(map-over-row-cells #'count-cells row)
(assert (not (zerop cells)) ()
"Row or column in table does not contain any cells")
(cond ((null ncells)
(setq ncells cells))
(t
(maxf ncells cells))))
(count-cells (cell)
(assert (cell-output-record-p cell))
(incf cells)))
(declare (dynamic-extent #'count-rows #'count-cells))
(funcall table-mapper #'count-rows table))
If there are no rows , COUNT - ROWS wo n't get invoked and NCELLS
will be NIL . If all the rows and columns are empty , NCELLS will
(when (or (null ncells) (= ncells 0))
(return-from adjust-table-cells
(tree-recompute-extent table)))
(with-stack-array (row-array nrows :initial-element 0)
(with-stack-array (column-array ncells :initial-element 0)
(let ((x-pos nil)
(y-pos nil)
(row-count 0)
(column-count 0)
(total-width (coordinate 0))
(total-height (coordinate 0)))
(declare (type fixnum row-count column-count))
(declare (type coordinate total-width total-height))
We always want the table to start at its START - X and START - Y positions .
(multiple-value-setq (x-pos y-pos) (output-record-position table))
(macrolet (#-CCL-2 (row-max-height (row-number)
`(svref row-array ,row-number))
#-CCL-2 (column-max-width (column-number)
`(svref column-array ,column-number)))
(unless row-table-p
(rotatef row-array column-array))
(if row-table-p (setq row-count -1) (setq column-count -1))
(flet ((row-mapper (row)
(if row-table-p (incf row-count) (incf column-count))
(if row-table-p (setq column-count -1) (setq row-count -1))
(adjust-table-cells row stream)
(flet ((cell-mapper (cell)
(if row-table-p (incf column-count) (incf row-count))
(multiple-value-bind (width height)
(bounding-rectangle-size cell)
(declare (type coordinate width height))
(maxf (row-max-height row-count)
(max height (cell-min-height cell)))
(maxf (column-max-width column-count)
(max width (cell-min-width cell))))))
(declare (dynamic-extent #'cell-mapper))
(map-over-row-cells #'cell-mapper row))))
(declare (dynamic-extent #'row-mapper))
(funcall table-mapper #'row-mapper table))
(when equalize-column-widths
(let ((column-width (coordinate 0))
(n-columns (1+ column-count)))
(declare (type fixnum n-columns))
(declare (type coordinate column-width))
(dotimes (i n-columns)
(maxf column-width (column-max-width i)))
(dotimes (i n-columns)
(setf (column-max-width i) column-width))))
(if row-table-p (setq row-count -1) (setq column-count -1))
(flet ((row-mapper (row)
(if row-table-p (incf row-count) (incf column-count))
(let ((this-row-height (row-max-height row-count))
(this-column-width (column-max-width column-count)))
(declare (type coordinate this-row-height this-column-width))
(if row-table-p (setq column-count -1) (setq row-count -1))
(setq total-width x-pos
total-height y-pos)
(flet ((cell-mapper (cell)
(if row-table-p (incf column-count) (incf row-count))
(let ((column-width (column-max-width column-count))
(row-height (row-max-height row-count))
(cell-width (bounding-rectangle-width cell))
(cell-height (bounding-rectangle-height cell))
(x-alignment-adjust 0)
(y-alignment-adjust 0))
(declare (type coordinate column-width row-height
cell-width cell-height))
(ecase (slot-value cell 'x-alignment)
(:left )
(:right
(setq x-alignment-adjust
(- column-width cell-width)))
(:center
(setq x-alignment-adjust
(floor (- column-width cell-width) 2))))
(ecase (slot-value cell 'y-alignment)
(:top )
(:bottom
(setq y-alignment-adjust
(- row-height cell-height)))
(:center
(setq y-alignment-adjust
(floor (- row-height cell-height) 2))))
(multiple-value-bind (x-offset y-offset)
(convert-from-ancestor-to-descendant-coordinates
(output-record-parent table) (output-record-parent cell))
(declare (type coordinate x-offset y-offset))
(when row-table-p
(let ((old-width (bounding-rectangle-width cell)))
(bounding-rectangle-set-size
cell
old-width
this-row-height)))
(output-record-set-position
cell
(+ x-offset total-width x-alignment-adjust)
(+ y-offset total-height y-alignment-adjust)))
(if row-table-p
(incf total-width (+ column-width x-spacing))
(incf total-height (+ row-height y-spacing))))))
(declare (dynamic-extent #'cell-mapper))
(map-over-row-cells #'cell-mapper row))
(if row-table-p
(incf y-pos (+ this-row-height y-spacing))
(incf x-pos (+ this-column-width x-spacing))))))
(declare (dynamic-extent #'row-mapper))
(funcall table-mapper #'row-mapper table)))
(draw-rules table column-array row-array stream)
))))
(tree-recompute-extent table))
(defmethod draw-rules ((table standard-table-output-record) column-array row-array stream)
(declare (ignore column-array row-array stream))
(values))
(defclass ruled-table-output-record (standard-table-output-record) ())
(defmethod draw-rules ((table ruled-table-output-record) column-array row-array stream)
(let* ((x-spacing (slot-value table 'x-spacing))
(y-spacing (slot-value table 'y-spacing))
(max-x (loop for x across column-array sum (+ x x-spacing)))
(max-y (loop for y across row-array sum (+ y y-spacing))))
(add-output-record
)
( record )
(clim:draw-line* stream 0 max-y max-x max-y)
(clim:draw-line* stream max-x 0 max-x max-y)
(let ((last-x 0))
(loop for x across column-array
for first = t then nil
do (draw-line* stream last-x 0 last-x max-y)
(incf last-x x)
(if first
(incf last-x (floor x-spacing 2))
(incf last-x x-spacing))
))
(let ((last-y 0))
(loop for y across row-array
for first = t then nil
do (draw-line* stream 0 last-y max-x last-y)
(incf last-y y)
(if first
(incf last-y (floor y-spacing 2))
(incf last-y y-spacing)))))
table))) |
e44ddca6c668e7b950225a8daea40ad8b311f4036f6cd9abf27c5e309025cd65 | modular-macros/ocaml-macros | t330-compact-4.ml | open Lib;;
let rec f n =
if n <= 0 then []
else n :: f (n-1)
in
Gc.compact ();
let l = f 300 in
if List.fold_left (+) 0 l <> 301 * 150 then raise Not_found
;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 BRANCH 746
11 RESTART
12 GRAB 1
14 ACC0
15 BRANCHIFNOT 28
17 ACC1
18 PUSHACC1
19 GETFIELD1
20 PUSHOFFSETCLOSURE0
21 APPLY2
22 PUSHACC1
23 GETFIELD0
24 MAKEBLOCK2 0
26 RETURN 2
28 ACC1
29 RETURN 2
31 RESTART
32 GRAB 3
34 CONST0
35 PUSHACC4
36 LEINT
37 BRANCHIFNOT 42
39
40 RETURN 4
42 ACC3
43 PUSHACC3
44 PUSHACC3
45 PUSHACC3
46 C_CALL4 caml_input
48 PUSHCONST0
49 PUSHACC1
50 EQ
51 BRANCHIFNOT 58
53 End_of_file
55 MAKEBLOCK1 0
57 RAISE
58 ACC0
59 PUSHACC5
60 SUBINT
61 PUSHACC1
62 PUSHACC5
63 ADDINT
64 PUSHACC4
65 PUSHACC4
66 PUSHOFFSETCLOSURE0
67 APPTERM 4 , 9
70 ACC0
71 C_CALL1 caml_input_scan_line
73 PUSHCONST0
74 PUSHACC1
75 EQ
76 BRANCHIFNOT 83
78 End_of_file
80 MAKEBLOCK1 0
82 RAISE
83
84 PUSHACC1
85
86 BRANCHIFNOT 107
88 ACC0
89 OFFSETINT -1
91 C_CALL1 create_string
93 PUSHACC1
94 OFFSETINT -1
96 PUSHCONST0
97 PUSHACC2
98 PUSHACC5
99 C_CALL4 caml_input
101 ACC2
102 C_CALL1 caml_input_char
104 ACC0
105 RETURN 3
107 ACC0
108 NEGINT
109 C_CALL1 create_string
111 PUSHACC1
112 NEGINT
113 PUSHCONST0
114 PUSHACC2
115 PUSHACC5
116 C_CALL4 caml_input
118
119 PUSHTRAP 130
121 ACC6
122 PUSHOFFSETCLOSURE0
123 APPLY1
124 PUSHACC5
125 PUSHENVACC1
126 APPLY2
127 POPTRAP
128 RETURN 3
130 PUSHGETGLOBAL End_of_file
132 PUSHACC1
133 GETFIELD0
134 EQ
135 BRANCHIFNOT 140
137 ACC1
138 RETURN 4
140 ACC0
141 RAISE
142 ACC0
143 C_CALL1 caml_flush
145 RETURN 1
147 RESTART
148 GRAB 1
150 ACC1
151 PUSHACC1
152 C_CALL2 caml_output_char
154 RETURN 2
156 RESTART
157 GRAB 1
159 ACC1
160 PUSHACC1
161 C_CALL2 caml_output_char
163 RETURN 2
165 RESTART
166 GRAB 1
168 ACC1
169 PUSHACC1
170 C_CALL2 caml_output_int
172 RETURN 2
174 RESTART
175 GRAB 1
177 ACC1
178 PUSHACC1
179 C_CALL2 caml_seek_out
181 RETURN 2
183 ACC0
184 C_CALL1 caml_pos_out
186 RETURN 1
188 ACC0
189 C_CALL1 caml_channel_size
191 RETURN 1
193 RESTART
194 GRAB 1
196 ACC1
197 PUSHACC1
198 C_CALL2 caml_set_binary_mode
200 RETURN 2
202 ACC0
203 C_CALL1 caml_input_char
205 RETURN 1
207 ACC0
208 C_CALL1 caml_input_char
210 RETURN 1
212 ACC0
213 C_CALL1 caml_input_int
215 RETURN 1
217 ACC0
218 C_CALL1 input_value
220 RETURN 1
222 RESTART
223 GRAB 1
225 ACC1
226 PUSHACC1
227 C_CALL2 caml_seek_in
229 RETURN 2
231 ACC0
232 C_CALL1 caml_pos_in
234 RETURN 1
236 ACC0
237 C_CALL1 caml_channel_size
239 RETURN 1
241 ACC0
242 C_CALL1 caml_close_channel
244 RETURN 1
246 RESTART
247 GRAB 1
249 ACC1
250 PUSHACC1
251 C_CALL2 caml_set_binary_mode
253 RETURN 2
255 CONST0
256 PUSHENVACC1
257 APPLY1
258 ACC0
259 C_CALL1 sys_exit
261 RETURN 1
263 CONST0
264 PUSHENVACC1
265 GETFIELD0
266 APPTERM1 2
268 CONST0
269 PUSHENVACC1
270 APPLY1
271 CONST0
272 PUSHENVACC2
273 APPTERM1 2
275 ENVACC1
276 GETFIELD0
277 PUSHACC0
278 PUSHACC2
279 CLOSURE 2 , 268
282 PUSHENVACC1
283 SETFIELD0
284 RETURN 2
286 ENVACC1
287 C_CALL1 caml_flush
289 ENVACC2
290 C_CALL1 caml_flush
292 RETURN 1
294 CONST0
295 PUSHENVACC1
296 APPLY1
297 C_CALL1 float_of_string
299 RETURN 1
301 CONST0
302 PUSHENVACC1
303 APPLY1
304 C_CALL1 int_of_string
306 RETURN 1
308 ENVACC2
309 C_CALL1 caml_flush
311 ENVACC1
312 PUSHENVACC3
313 APPTERM1 2
315 CONSTINT 13
317 PUSHENVACC1
318 C_CALL2 caml_output_char
320 ENVACC1
321 C_CALL1 caml_flush
323 RETURN 1
325 ACC0
326 PUSHENVACC1
327 PUSHENVACC2
328 APPLY2
329 CONSTINT 13
331 PUSHENVACC1
332 C_CALL2 caml_output_char
334 ENVACC1
335 C_CALL1 caml_flush
337 RETURN 1
339 ACC0
340 PUSHENVACC1
341 APPLY1
342 PUSHENVACC2
343 PUSHENVACC3
344 APPTERM2 3
346 ACC0
347 PUSHENVACC1
348 APPLY1
349 PUSHENVACC2
350 PUSHENVACC3
351 APPTERM2 3
353 ACC0
354 PUSHENVACC1
355 PUSHENVACC2
356 APPTERM2 3
358 ACC0
359 PUSHENVACC1
360 C_CALL2 caml_output_char
362 RETURN 1
364 CONSTINT 13
366 PUSHENVACC1
367 C_CALL2 caml_output_char
369 ENVACC1
370 C_CALL1 caml_flush
372 RETURN 1
374 ACC0
375 PUSHENVACC1
376 PUSHENVACC2
377 APPLY2
378 CONSTINT 13
380 PUSHENVACC1
381 C_CALL2 caml_output_char
383 RETURN 1
385 ACC0
386 PUSHENVACC1
387 APPLY1
388 PUSHENVACC2
389 PUSHENVACC3
390 APPTERM2 3
392 ACC0
393 PUSHENVACC1
394 APPLY1
395 PUSHENVACC2
396 PUSHENVACC3
397 APPTERM2 3
399 ACC0
400 PUSHENVACC1
401 PUSHENVACC2
402 APPTERM2 3
404 ACC0
405 PUSHENVACC1
406 C_CALL2 caml_output_char
408 RETURN 1
410 RESTART
411 GRAB 3
413 CONST0
414 PUSHACC3
415 LTINT
416 BRANCHIF 427
418 ACC1
419 C_CALL1 ml_string_length
421 PUSHACC4
422 PUSHACC4
423 ADDINT
424 GTINT
425 BRANCHIFNOT 432
427 GETGLOBAL " really_input "
429 PUSHENVACC1
430 APPTERM1 5
432 ACC3
433 PUSHACC3
434 PUSHACC3
435 PUSHACC3
436 PUSHENVACC2
437 APPTERM 4 , 8
440 RESTART
441 GRAB 3
443 CONST0
444 PUSHACC3
445 LTINT
446 BRANCHIF 457
448 ACC1
449 C_CALL1 ml_string_length
451 PUSHACC4
452 PUSHACC4
453 ADDINT
454
455 " input "
459 PUSHENVACC1
460 APPTERM1 5
462 ACC3
463 PUSHACC3
464 PUSHACC3
465 PUSHACC3
466 C_CALL4 caml_input
468 RETURN 4
470 ACC0
471 PUSHCONST0
472 PUSHGETGLOBAL < 0>(0 , < 0>(6 , 0 ) )
474 PUSHENVACC1
475 APPTERM3 4
477 ACC0
478 PUSHCONST0
479 PUSHGETGLOBAL < 0>(0 , < 0>(7 , 0 ) )
481 PUSHENVACC1
482 APPTERM3 4
484 RESTART
485 GRAB 2
487 ACC1
488 PUSHACC1
489 PUSHACC4
490 C_CALL3 sys_open
492 C_CALL1 caml_open_descriptor
494 RETURN 3
496 ACC0
497 C_CALL1 caml_flush
499 ACC0
500 C_CALL1 caml_close_channel
502 RETURN 1
504 RESTART
505 GRAB 1
507 CONST0
508 PUSHACC2
509 PUSHACC2
510 C_CALL3 output_value
512 RETURN 2
514 RESTART
515 GRAB 3
517 CONST0
518 PUSHACC3
519 LTINT
520 BRANCHIF 531
522 ACC1
523 C_CALL1 ml_string_length
525 PUSHACC4
526 PUSHACC4
527 ADDINT
528
529 BRANCHIFNOT 536
531 GETGLOBAL " output "
533 PUSHENVACC1
534 APPTERM1 5
536 ACC3
537 PUSHACC3
538 PUSHACC3
539 PUSHACC3
540 C_CALL4 caml_output
542 RETURN 4
544 RESTART
545 GRAB 1
547 ACC1
548 C_CALL1 ml_string_length
550 PUSHCONST0
551 PUSHACC3
552 PUSHACC3
553 C_CALL4 caml_output
555 RETURN 2
557 ACC0
558 PUSHCONSTINT 438
560 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(6 , 0 ) ) ) )
562 PUSHENVACC1
563 APPTERM3 4
565 ACC0
566 PUSHCONSTINT 438
568 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(7 , 0 ) ) ) )
570 PUSHENVACC1
571 APPTERM3 4
573 RESTART
574 GRAB 2
576 ACC1
577 PUSHACC1
578 PUSHACC4
579 C_CALL3 sys_open
581 C_CALL1 caml_open_descriptor
583 RETURN 3
585 ACC0
586 PUSHGETGLOBAL " % .12 g "
588 C_CALL2 format_float
590 RETURN 1
592 ACC0
593 PUSHGETGLOBAL " % d "
595 C_CALL2 format_int
597 RETURN 1
599 " false "
601 PUSHACC1
602 C_CALL2 string_equal
604 BRANCHIFNOT 609
606 CONST0
607 RETURN 1
609 " true "
611 PUSHACC1
612 C_CALL2 string_equal
614 BRANCHIFNOT 619
616 CONST1
617 RETURN 1
619 " bool_of_string "
621 PUSHENVACC1
622 APPTERM1 2
624 ACC0
625 BRANCHIFNOT 631
627 " true "
629 RETURN 1
631 " false "
633 RETURN 1
635
636 PUSHACC1
637 LTINT
638 BRANCHIF 646
640
642 PUSHACC1
643 GTINT
644 BRANCHIFNOT 651
646 " char_of_int "
648 PUSHENVACC1
649 APPTERM1 2
651 ACC0
652 RETURN 1
654 RESTART
655 GRAB 1
657 ACC0
658 C_CALL1 ml_string_length
660 PUSHACC2
661 C_CALL1 ml_string_length
663 PUSHACC0
664 PUSHACC2
665 ADDINT
666 C_CALL1 create_string
668 PUSHACC2
669 PUSHCONST0
670 PUSHACC2
671 PUSHCONST0
672 PUSHACC7
673 C_CALL5 blit_string
675 ACC1
676 PUSHACC3
677 PUSHACC2
678 PUSHCONST0
679 PUSHACC 8
681 C_CALL5 blit_string
683 ACC0
684 RETURN 5
686 -1
688 PUSHACC1
689 XORINT
690 RETURN 1
692
693 PUSHACC1
694 GEINT
695 BRANCHIFNOT 700
697 ACC0
698 RETURN 1
700 ACC0
701 NEGINT
702 RETURN 1
704 RESTART
705 GRAB 1
707 ACC1
708 PUSHACC1
709 C_CALL2 greaterequal
711 BRANCHIFNOT 716
713 ACC0
714 RETURN 2
716 ACC1
717 RETURN 2
719 RESTART
720 GRAB 1
722 ACC1
723 PUSHACC1
724 C_CALL2 lessequal
726 BRANCHIFNOT 731
728 ACC0
729 RETURN 2
731 ACC1
732 RETURN 2
734 ACC0
735
737 MAKEBLOCK2 0
739 RAISE
740 ACC0
741 PUSHGETGLOBAL Failure
743 MAKEBLOCK2 0
745 RAISE
746 CLOSURE 0 , 740
749 PUSH
750 CLOSURE 0 , 734
753 PUSHGETGLOBAL " Pervasives . Exit "
755 MAKEBLOCK1 0
757 PUSHGETGLOBAL " Pervasives . Assert_failure "
759 MAKEBLOCK1 0
761 PUSH
762 CLOSURE 0 , 720
765 PUSH
766 CLOSURE 0 , 705
769 PUSH
770 CLOSURE 0 , 692
773 PUSH
774 CLOSURE 0 , 686
777 PUSHCONST0
778 PUSHCONSTINT 31
780 PUSHCONST1
781 LSLINT
782 EQ
783 BRANCHIFNOT 789
785 CONSTINT 30
787 BRANCH 791
789 CONSTINT 62
791 PUSHCONST1
792 LSLINT
793 PUSHACC0
794 OFFSETINT -1
796 PUSH
797 CLOSURE 0 , 655
800 PUSHACC 9
802 CLOSURE 1 , 635
805 PUSH
806 CLOSURE 0 , 624
809 PUSHACC 11
811 CLOSURE 1 , 599
814 PUSH
815 CLOSURE 0 , 592
818 PUSH
819 CLOSURE 0 , 585
822 PUSH
823 CLOSUREREC 0 , 12
827
828 C_CALL1 caml_open_descriptor
830 PUSHCONST1
831 C_CALL1 caml_open_descriptor
833 PUSHCONST2
834 C_CALL1 caml_open_descriptor
836 PUSH
837 CLOSURE 0 , 574
840 PUSHACC0
841 CLOSURE 1 , 565
844 PUSHACC1
845 CLOSURE 1 , 557
848 PUSH
849 CLOSURE 0 , 545
852 PUSHACC 22
854 CLOSURE 1 , 515
857 PUSH
858 CLOSURE 0 , 505
861 PUSH
862 CLOSURE 0 , 496
865 PUSH
866 CLOSURE 0 , 485
869 PUSHACC0
870 CLOSURE 1 , 477
873 PUSHACC1
874 CLOSURE 1 , 470
877 PUSHACC 28
879 CLOSURE 1 , 441
882 PUSH
883 CLOSUREREC 0 , 32
887 ACC0
888 PUSHACC 31
890 CLOSURE 2 , 411
893 PUSHACC 22
895 CLOSUREREC 1 , 70
899 ACC 15
901 CLOSURE 1 , 404
904 PUSHACC 11
906 PUSHACC 17
908 CLOSURE 2 , 399
911 PUSHACC 12
913 PUSHACC 18
915 PUSHACC 23
917 CLOSURE 3 , 392
920 PUSHACC 13
922 PUSHACC 19
924 PUSHACC 23
926 CLOSURE 3 , 385
929 PUSHACC 14
931 PUSHACC 20
933 CLOSURE 2 , 374
936 PUSHACC 20
938 CLOSURE 1 , 364
941 PUSHACC 20
943 CLOSURE 1 , 358
946 PUSHACC 17
948 PUSHACC 22
950 CLOSURE 2 , 353
953 PUSHACC 18
955 PUSHACC 23
957 PUSHACC 29
959 CLOSURE 3 , 346
962 PUSHACC 19
964 PUSHACC 24
966 PUSHACC 29
968 CLOSURE 3 , 339
971 PUSHACC 20
973 PUSHACC 25
975 CLOSURE 2 , 325
978 PUSHACC 25
980 CLOSURE 1 , 315
983 PUSHACC 12
985 PUSHACC 28
987 PUSHACC 30
989 CLOSURE 3 , 308
992 PUSHACC0
993 CLOSURE 1 , 301
996 PUSHACC1
997 CLOSURE 1 , 294
1000 PUSHACC 29
1002 PUSHACC 31
1004 CLOSURE 2 , 286
1007 MAKEBLOCK1 0
1009 PUSHACC0
1010 CLOSURE 1 , 275
1013 PUSHACC1
1014 CLOSURE 1 , 263
1017 PUSHACC0
1018 CLOSURE 1 , 255
1021 PUSHACC1
1022 PUSHACC 22
1024 PUSHACC4
1025 PUSHACC3
1026 PUSH
1027 CLOSURE 0 , 247
1030 PUSH
1031 CLOSURE 0 , 241
1034 PUSH
1035 CLOSURE 0 , 236
1038 PUSH
1039 CLOSURE 0 , 231
1042 PUSH
1043 CLOSURE 0 , 223
1046 PUSH
1047 CLOSURE 0 , 217
1050 PUSH
1051 CLOSURE 0 , 212
1054 PUSH
1055 CLOSURE 0 , 207
1058 PUSHACC 32
1060 PUSHACC 35
1062 PUSHACC 33
1064 PUSH
1065 CLOSURE 0 , 202
1068 PUSHACC 41
1070 PUSHACC 40
1072 PUSHACC 42
1074 PUSH
1075 CLOSURE 0 , 194
1078 PUSHACC 46
1080 PUSH
1081 CLOSURE 0 , 188
1084 PUSH
1085 CLOSURE 0 , 183
1088 PUSH
1089 CLOSURE 0 , 175
1092 PUSHACC 51
1094 PUSH
1095 CLOSURE 0 , 166
1098 PUSH
1099 CLOSURE 0 , 157
1102 PUSHACC 55
1104 PUSHACC 57
1106 PUSH
1107 CLOSURE 0 , 148
1110 PUSH
1111 CLOSURE 0 , 142
1114 PUSHACC 63
1116 PUSHACC 62
1118 PUSHACC 64
1120 PUSHACC 38
1122 PUSHACC 40
1124 PUSHACC 42
1126 PUSHACC 44
1128 PUSHACC 46
1130 PUSHACC 48
1132 PUSHACC 50
1134 PUSHACC 52
1136 PUSHACC 54
1138 PUSHACC 56
1140 PUSHACC 58
1142 PUSHACC 60
1144 PUSHACC 62
1146 PUSHACC 64
1148 PUSHACC 66
1150 PUSHACC 82
1152 PUSHACC 84
1154 PUSHACC 86
1156 PUSHACC 88
1158 PUSHACC 90
1160 PUSHACC 92
1162 PUSHACC 94
1164 PUSHACC 96
1166 PUSHACC 98
1168 PUSHACC 100
1170 PUSHACC 104
1172 PUSHACC 104
1174 PUSHACC 104
1176 PUSHACC 108
1178 PUSHACC 110
1180 PUSHACC 112
1182 PUSHACC 117
1184 PUSHACC 117
1186 PUSHACC 117
1188 PUSHACC 117
1190 MAKEBLOCK 69 , 0
1193 POP 53
1195 SETGLOBAL Pervasives
1197 BRANCH 2177
1199 RESTART
1200 GRAB 1
1202 ACC1
1203 BRANCHIFNOT 1213
1205 ACC1
1206 GETFIELD1
1207 PUSHACC1
1208 OFFSETINT 1
1210 PUSHOFFSETCLOSURE0
1211 APPTERM2 4
1213 ACC0
1214 RETURN 2
1216 RESTART
1217 GRAB 1
1219 ACC0
1220 BRANCHIFNOT 1251
1222 CONST0
1223 PUSHACC2
1224 EQ
1225 BRANCHIFNOT 1231
1227 ACC0
1228 GETFIELD0
1229 RETURN 2
1231 CONST0
1232 PUSHACC2
1233 GTINT
1234 BRANCHIFNOT 1244
1236 ACC1
1237 OFFSETINT -1
1239 PUSHACC1
1240 GETFIELD1
1241 PUSHOFFSETCLOSURE0
1242 APPTERM2 4
1244 GETGLOBAL " List.nth "
1246 PUSHGETGLOBALFIELD Pervasives , 2
1249 APPTERM1 3
1251 GETGLOBAL " nth "
1253 PUSHGETGLOBALFIELD Pervasives , 3
1256 APPTERM1 3
1258 RESTART
1259 GRAB 1
1261 ACC0
1262 BRANCHIFNOT 1274
1264 ACC1
1265 PUSHACC1
1266 GETFIELD0
1267 MAKEBLOCK2 0
1269 PUSHACC1
1270 GETFIELD1
1271 PUSHOFFSETCLOSURE0
1272 APPTERM2 4
1274 ACC1
1275 RETURN 2
1277 ACC0
1278 BRANCHIFNOT 1291
1280 ACC0
1281 GETFIELD1
1282 PUSHOFFSETCLOSURE0
1283 APPLY1
1284 PUSHACC1
1285 GETFIELD0
1286 PUSHGETGLOBALFIELD Pervasives , 16
1289 APPTERM2 3
1291 RETURN 1
1293 RESTART
1294 GRAB 1
1296 ACC1
1297 BRANCHIFNOT 1313
1299 ACC1
1300 GETFIELD0
1301 PUSHACC1
1302 APPLY1
1303 PUSHACC2
1304 GETFIELD1
1305 PUSHACC2
1306 PUSHOFFSETCLOSURE0
1307 APPLY2
1308 PUSHACC1
1309 MAKEBLOCK2 0
1311 POP 1
1313 RETURN 2
1315 RESTART
1316 GRAB 1
1318 ACC1
1319 BRANCHIFNOT 1331
1321 ACC1
1322 GETFIELD0
1323 PUSHACC1
1324 APPLY1
1325 ACC1
1326 GETFIELD1
1327 PUSHACC1
1328 PUSHOFFSETCLOSURE0
1329 APPTERM2 4
1331 RETURN 2
1333 RESTART
1334 GRAB 2
1336 ACC2
1337 BRANCHIFNOT 1350
1339 ACC2
1340 GETFIELD1
1341 PUSHACC3
1342 GETFIELD0
1343 PUSHACC3
1344 PUSHACC3
1345 APPLY2
1346 PUSHACC2
1347 PUSHOFFSETCLOSURE0
1348 APPTERM3 6
1350 ACC1
1351 RETURN 3
1353 RESTART
1354 GRAB 2
1356 ACC1
1357 BRANCHIFNOT 1370
1359 ACC2
1360 PUSHACC2
1361 GETFIELD1
1362 PUSHACC2
1363 PUSHOFFSETCLOSURE0
1364 APPLY3
1365 PUSHACC2
1366 GETFIELD0
1367 PUSHACC2
1368 APPTERM2 5
1370 ACC2
1371 RETURN 3
1373 RESTART
1374 GRAB 2
1376 ACC1
1377 BRANCHIFNOT 1400
1379 ACC2
1380 BRANCHIFNOT 1407
1382 ACC2
1383 GETFIELD0
1384 PUSHACC2
1385 GETFIELD0
1386 PUSHACC2
1387 APPLY2
1388 PUSHACC3
1389 GETFIELD1
1390 PUSHACC3
1391 GETFIELD1
1392 PUSHACC3
1393 PUSHOFFSETCLOSURE0
1394 APPLY3
1395 PUSHACC1
1396 MAKEBLOCK2 0
1398 RETURN 4
1400 ACC2
1401 BRANCHIFNOT 1405
1403 BRANCH 1407
1405 RETURN 3
1407 GETGLOBAL " List.map2 "
1409 PUSHGETGLOBALFIELD Pervasives , 2
1412 APPTERM1 4
1414 RESTART
1415 GRAB 2
1417 ACC1
1418 BRANCHIFNOT 1437
1420 ACC2
1421 BRANCHIFNOT 1444
1423 ACC2
1424 GETFIELD0
1425 PUSHACC2
1426 GETFIELD0
1427 PUSHACC2
1428 APPLY2
1429 ACC2
1430 GETFIELD1
1431 PUSHACC2
1432 GETFIELD1
1433 PUSHACC2
1434 PUSHOFFSETCLOSURE0
1435 APPTERM3 6
1437 ACC2
1438 BRANCHIFNOT 1442
1440 BRANCH 1444
1442 RETURN 3
1444 GETGLOBAL " List.iter2 "
1446 PUSHGETGLOBALFIELD Pervasives , 2
1449 APPTERM1 4
1451 RESTART
1452 GRAB 3
1454 ACC2
1455 BRANCHIFNOT 1476
1457 ACC3
1458 1461 GETFIELD1
1462 PUSHACC3
1463 GETFIELD1
1464 PUSHACC5
1465 GETFIELD0
1466 PUSHACC5
1467 GETFIELD0
1468 PUSHACC5
1469 PUSHACC5
1470 APPLY3
1471 PUSHACC3
1472 PUSHOFFSETCLOSURE0
1473 APPTERM 4 , 8
1476 ACC3
1477 BRANCHIF 1482
1479 ACC1
1480 RETURN 4
1482 GETGLOBAL " List.fold_left2 "
1484 PUSHGETGLOBALFIELD Pervasives , 2
1487 APPTERM1 5
1489 RESTART
1490 GRAB 3
1492 ACC1
1493 BRANCHIFNOT 1516
1495 ACC2
1496 BRANCHIFNOT 1522
1498 PUSH_RETADDR 1509
1500 ACC6
1501 PUSHACC6
1502 GETFIELD1
1503 PUSHACC6
1504 GETFIELD1
1505 PUSHACC6
1506 PUSHOFFSETCLOSURE0
1507 APPLY 4
1509 PUSHACC3
1510 GETFIELD0
1511 PUSHACC3
1512 GETFIELD0
1513 PUSHACC3
1514 APPTERM3 7
1516 ACC2
1517 BRANCHIF 1522
1519 ACC3
1520 RETURN 4
1522 GETGLOBAL " List.fold_right2 "
1524 PUSHGETGLOBALFIELD Pervasives , 2
1527 APPTERM1 5
1529 RESTART
1530 GRAB 1
1532 ACC1
1533 BRANCHIFNOT 1549
1535 ACC1
1536 GETFIELD0
1537 PUSHACC1
1538 APPLY1
1539 BRANCHIFNOT 1547
1541 ACC1
1542 GETFIELD1
1543 PUSHACC1
1544 PUSHOFFSETCLOSURE0
1545 APPTERM2 4
1547 RETURN 2
1549 CONST1
1550 RETURN 2
1552 RESTART
1553 GRAB 1
1555 ACC1
1556 BRANCHIFNOT 1570
1558 ACC1
1559 GETFIELD0
1560 PUSHACC1
1561 APPLY1
1562 BRANCHIF 1570
1564 ACC1
1565 GETFIELD1
1566 PUSHACC1
1567 PUSHOFFSETCLOSURE0
1568 APPTERM2 4
1570 RETURN 2
1572 RESTART
1573 GRAB 2
1575 ACC1
1576 BRANCHIFNOT 1599
1578 ACC2
1579 BRANCHIFNOT 1605
1581 ACC2
1582 GETFIELD0
1583 PUSHACC2
1584 GETFIELD0
1585 PUSHACC2
1586 APPLY2
1587 BRANCHIFNOT 1597
1589 ACC2
1590 GETFIELD1
1591 PUSHACC2
1592 GETFIELD1
1593 PUSHACC2
1594 PUSHOFFSETCLOSURE0
1595 APPTERM3 6
1597 RETURN 3
1599 ACC2
1600 BRANCHIF 1605
1602 CONST1
1603 RETURN 3
1605 " List.for_all2 "
1607 PUSHGETGLOBALFIELD Pervasives , 2
1610 APPTERM1 4
1612 RESTART
1613 GRAB 2
1615 ACC1
1616 BRANCHIFNOT 1639
1618 ACC2
1619 BRANCHIFNOT 1646
1621 ACC2
1622 GETFIELD0
1623 PUSHACC2
1624 GETFIELD0
1625 PUSHACC2
1626 APPLY2
1627 BRANCHIF 1637
1629 ACC2
1630 GETFIELD1
1631 PUSHACC2
1632 GETFIELD1
1633 PUSHACC2
1634 PUSHOFFSETCLOSURE0
1635 APPTERM3 6
1637 RETURN 3
1639 ACC2
1640 BRANCHIFNOT 1644
1642 BRANCH 1646
1644 RETURN 3
1646 " List.exists2 "
1648 PUSHGETGLOBALFIELD Pervasives , 2
1651 APPTERM1 4
1653 RESTART
1654 GRAB 1
1656 ACC1
1657 BRANCHIFNOT 1672
1659 ACC0
1660 PUSHACC2
1661 GETFIELD0
1662 C_CALL2 equal
1664 BRANCHIF 1672
1666 ACC1
1667 GETFIELD1
1668 PUSHACC1
1669 PUSHOFFSETCLOSURE0
1670 APPTERM2 4
1672 RETURN 2
1674 RESTART
1675 GRAB 1
1677 ACC1
1678 BRANCHIFNOT 1692
1680 ACC0
1681 PUSHACC2
1682 GETFIELD0
1683 EQ
1684 BRANCHIF 1692
1686 ACC1
1687 GETFIELD1
1688 PUSHACC1
1689 PUSHOFFSETCLOSURE0
1690 APPTERM2 4
1692 RETURN 2
1694 RESTART
1695 GRAB 1
1697 ACC1
1698 BRANCHIFNOT 1719
1700 ACC1
1701 GETFIELD0
1702 PUSHACC1
1703 PUSHACC1
1704 GETFIELD0
1705 C_CALL2 equal
1707 BRANCHIFNOT 1713
1709 ACC0
1710 GETFIELD1
1711 RETURN 3
1713 ACC2
1714 GETFIELD1
1715 PUSHACC2
1716 PUSHOFFSETCLOSURE0
1717 APPTERM2 5
1719 GETGLOBAL Not_found
1721 MAKEBLOCK1 0
1723 RAISE
1724 RESTART
1725 GRAB 1
1727 ACC1
1728 BRANCHIFNOT 1748
1730 ACC1
1731 GETFIELD0
1732 PUSHACC1
1733 PUSHACC1
1734 GETFIELD0
1735 EQ
1736 BRANCHIFNOT 1742
1738 ACC0
1739 GETFIELD1
1740 RETURN 3
1742 ACC2
1743 GETFIELD1
1744 PUSHACC2
1745 PUSHOFFSETCLOSURE0
1746 APPTERM2 5
1748 GETGLOBAL Not_found
1750 MAKEBLOCK1 0
1752 RAISE
1753 RESTART
1754 GRAB 1
1756 ACC1
1757 BRANCHIFNOT 1773
1759 ACC0
1760 PUSHACC2
1761 GETFIELD0
1762 GETFIELD0
1763 C_CALL2 equal
1765 BRANCHIF 1773
1767 ACC1
1768 GETFIELD1
1769 PUSHACC1
1770 PUSHOFFSETCLOSURE0
1771 APPTERM2 4
1773 RETURN 2
1775 RESTART
1776 GRAB 1
1778 ACC1
1779 BRANCHIFNOT 1794
1781 ACC0
1782 PUSHACC2
1783 GETFIELD0
1784 GETFIELD0
1785 EQ
1786 BRANCHIF 1794
1788 ACC1
1789 GETFIELD1
1790 PUSHACC1
1791 PUSHOFFSETCLOSURE0
1792 APPTERM2 4
1794 RETURN 2
1796 RESTART
1797 GRAB 1
1799 ACC1
1800 BRANCHIFNOT 1825
1802 ACC1
1803 GETFIELD0
1804 PUSHACC2
1805 GETFIELD1
1806 PUSHACC2
1807 PUSHACC2
1808 GETFIELD0
1809 C_CALL2 equal
1811 BRANCHIFNOT 1816
1813 ACC0
1814 RETURN 4
1816 ACC0
1817 PUSHACC3
1818 PUSHOFFSETCLOSURE0
1819 APPLY2
1820 PUSHACC2
1821 MAKEBLOCK2 0
1823 POP 2
1825 RETURN 2
1827 RESTART
1828 GRAB 1
1830 ACC1
1831 BRANCHIFNOT 1855
1833 ACC1
1834 GETFIELD0
1835 PUSHACC2
1836 GETFIELD1
1837 PUSHACC2
1838 PUSHACC2
1839 GETFIELD0
1840 EQ
1841 BRANCHIFNOT 1846
1843 ACC0
1844 RETURN 4
1846 ACC0
1847 PUSHACC3
1848 PUSHOFFSETCLOSURE0
1849 APPLY2
1850 PUSHACC2
1851 MAKEBLOCK2 0
1853 POP 2
1855 RETURN 2
1857 RESTART
1858 GRAB 1
1860 ACC1
1861 BRANCHIFNOT 1879
1863 ACC1
1864 GETFIELD0
1865 PUSHACC0
1866 PUSHACC2
1867 APPLY1
1868 BRANCHIFNOT 1873
1870 ACC0
1871 RETURN 3
1873 ACC2
1874 GETFIELD1
1875 PUSHACC2
1876 PUSHOFFSETCLOSURE0
1877 APPTERM2 5
1879 GETGLOBAL Not_found
1881 MAKEBLOCK1 0
1883 RAISE
1884 RESTART
1885 GRAB 2
1887 ACC2
1888 BRANCHIFNOT 1917
1890 ACC2
1891 GETFIELD0
1892 PUSHACC3
1893 GETFIELD1
1894 PUSHACC1
1895 PUSHENVACC2
1896 APPLY1
1897 BRANCHIFNOT 1908
1899 ACC0
1900 PUSHACC4
1901 PUSHACC4
1902 PUSHACC4
1903 MAKEBLOCK2 0
1905 PUSHOFFSETCLOSURE0
1906 APPTERM3 8
1908 ACC0
1909 PUSHACC4
1910 PUSHACC3
1911 MAKEBLOCK2 0
1913 PUSHACC4
1914 PUSHOFFSETCLOSURE0
1915 APPTERM3 8
1917 ACC1
1918 PUSHENVACC1
1919 APPLY1
1920 PUSHACC1
1921 PUSHENVACC1
1922 APPLY1
1923 MAKEBLOCK2 0
1925 RETURN 3
1927 RESTART
1928 GRAB 1
1930 ACC0
1931 PUSHENVACC1
1932 CLOSUREREC 2 , 1885
1936 ACC2
1937 PUSHCONST0
1938 PUSHCONST0
1939 PUSHACC3
1940 APPTERM3 6
1942 ACC0
1943 BRANCHIFNOT 1967
1945 ACC0
1946 GETFIELD0
1947 PUSHACC1
1948 GETFIELD1
1949 PUSHOFFSETCLOSURE0
1950 APPLY1
1951 PUSHACC0
1952 GETFIELD1
1953 PUSHACC2
1954 GETFIELD1
1955 MAKEBLOCK2 0
1957 PUSHACC1
1958 GETFIELD0
1959 PUSHACC3
1960 GETFIELD0
1961 MAKEBLOCK2 0
1963 MAKEBLOCK2 0
1965 RETURN 3
1967 < 0>(0 , 0 )
1969 RETURN 1
1971 RESTART
1972 GRAB 1
1974 ACC0
1975 BRANCHIFNOT 1996
1977 ACC1
1978 BRANCHIFNOT 2003
1980 ACC1
1981 GETFIELD1
1982 PUSHACC1
1983 GETFIELD1
1984 PUSHOFFSETCLOSURE0
1985 APPLY2
1986 PUSHACC2
1987 GETFIELD0
1988 PUSHACC2
1989 GETFIELD0
1990 MAKEBLOCK2 0
1992 MAKEBLOCK2 0
1994 RETURN 2
1996 ACC1
1997 BRANCHIFNOT 2001
1999 BRANCH 2003
2001 RETURN 2
2003 " List.combine "
2005 PUSHGETGLOBALFIELD Pervasives , 2
2008 APPTERM1 3
2010 RESTART
2011 GRAB 1
2013 ACC1
2014 BRANCHIFNOT 2038
2016 ACC1
2017 GETFIELD0
2018 PUSHACC2
2019 GETFIELD1
2020 PUSHACC1
2021 PUSHENVACC2
2022 APPLY1
2023 BRANCHIFNOT 2033
2025 ACC0
2026 PUSHACC3
2027 PUSHACC3
2028 MAKEBLOCK2 0
2030 PUSHOFFSETCLOSURE0
2031 APPTERM2 6
2033 ACC0
2034 PUSHACC3
2035 PUSHOFFSETCLOSURE0
2036 APPTERM2 6
2038 ACC0
2039 PUSHENVACC1
2040 APPTERM1 3
2042 ACC0
2043 PUSHENVACC1
2044 CLOSUREREC 2 , 2011
2048 CONST0
2049 PUSHACC1
2050 APPTERM1 3
2052 RESTART
2053 GRAB 2
2055 ACC1
2056 BRANCHIFNOT 2077
2058 ACC2
2059 BRANCHIFNOT 2084
2061 ACC2
2062 GETFIELD1
2063 PUSHACC2
2064 GETFIELD1
2065 PUSHACC2
2066 PUSHACC5
2067 GETFIELD0
2068 PUSHACC5
2069 GETFIELD0
2070 PUSHENVACC1
2071 APPLY2
2072 MAKEBLOCK2 0
2074 PUSHOFFSETCLOSURE0
2075 APPTERM3 6
2077 ACC2
2078 BRANCHIFNOT 2082
2080 BRANCH 2084
2082 RETURN 3
2084 GETGLOBAL " List.rev_map2 "
2086 PUSHGETGLOBALFIELD Pervasives , 2
2089 APPTERM1 4
2091 RESTART
2092 GRAB 2
2094 ACC0
2095 CLOSUREREC 1 , 2053
2099 ACC3
2100 PUSHACC3
2101 PUSHCONST0
2102 PUSHACC3
2103 APPTERM3 7
2105 RESTART
2106 GRAB 1
2108 ACC1
2109 BRANCHIFNOT 2123
2111 ACC1
2112 GETFIELD1
2113 PUSHACC1
2114 PUSHACC3
2115 GETFIELD0
2116 PUSHENVACC1
2117 APPLY1
2118 MAKEBLOCK2 0
2120 PUSHOFFSETCLOSURE0
2121 APPTERM2 4
2123 ACC0
2124 RETURN 2
2126 RESTART
2127 GRAB 1
2129 ACC0
2130 CLOSUREREC 1 , 2106
2134 ACC2
2135 PUSHCONST0
2136 PUSHACC2
2137 APPTERM2 5
2139 CONST0
2140 PUSHACC1
2141 PUSHENVACC1
2142 APPTERM2 3
2144 ACC0
2145 BRANCHIFNOT 2151
2147 ACC0
2148 GETFIELD1
2149 RETURN 1
2151 GETGLOBAL " tl "
2153 PUSHGETGLOBALFIELD Pervasives , 3
2156 APPTERM1 2
2158 ACC0
2159 BRANCHIFNOT 2165
2161 ACC0
2162 GETFIELD0
2163 RETURN 1
2165 GETGLOBAL " hd "
2167 PUSHGETGLOBALFIELD Pervasives , 3
2170 APPTERM1 2
2172 ACC0
2173 PUSHCONST0
2174 PUSHENVACC1
2175 APPTERM2 3
2177 CLOSUREREC 0 , 1200
2181 ACC0
2182 CLOSURE 1 , 2172
2185 PUSH
2186 CLOSURE 0 , 2158
2189 PUSH
2190 CLOSURE 0 , 2144
2193 PUSH
2194 CLOSUREREC 0 , 1217
2198 GETGLOBALFIELD Pervasives , 16
2201 PUSH
2202 CLOSUREREC 0 , 1259
2206 ACC0
2207 CLOSURE 1 , 2139
2210 PUSH
2211 CLOSUREREC 0 , 1277
2215 CLOSUREREC 0 , 1294
2219 CLOSURE 0 , 2127
2222 PUSH
2223 CLOSUREREC 0 , 1316
2227 CLOSUREREC 0 , 1334
2231 CLOSUREREC 0 , 1354
2235 CLOSUREREC 0 , 1374
2239 CLOSURE 0 , 2092
2242 PUSH
2243 CLOSUREREC 0 , 1415
2247 CLOSUREREC 0 , 1452
2251 CLOSUREREC 0 , 1490
2255 CLOSUREREC 0 , 1530
2259 CLOSUREREC 0 , 1553
2263 CLOSUREREC 0 , 1573
2267 CLOSUREREC 0 , 1613
2271 CLOSUREREC 0 , 1654
2275 CLOSUREREC 0 , 1675
2279 CLOSUREREC 0 , 1695
2283 CLOSUREREC 0 , 1725
2287 CLOSUREREC 0 , 1754
2291 CLOSUREREC 0 , 1776
2295 CLOSUREREC 0 , 1797
2299 CLOSUREREC 0 , 1828
2303 CLOSUREREC 0 , 1858
2307 ACC 24
2309 CLOSURE 1 , 2042
2312 PUSHACC 25
2314 CLOSUREREC 1 , 1928
2318 CLOSUREREC 0 , 1942
2322 CLOSUREREC 0 , 1972
2326 ACC0
2327 PUSHACC2
2328 PUSHACC7
2329 PUSHACC 9
2331 PUSHACC 11
2333 PUSHACC 13
2335 PUSHACC 15
2337 PUSHACC 17
2339 PUSHACC 10
2341 PUSHACC 12
2343 PUSHACC 13
2345 PUSHACC 15
2347 PUSHACC 23
2349 PUSHACC 25
2351 PUSHACC 27
2353 PUSHACC 29
2355 PUSHACC 31
2357 PUSHACC 33
2359 PUSHACC 35
2361 PUSHACC 37
2363 PUSHACC 40
2365 PUSHACC 42
2367 PUSHACC 41
2369 PUSHACC 45
2371 PUSHACC 47
2373 PUSHACC 50
2375 PUSHACC 52
2377 PUSHACC 51
2379 PUSHACC 55
2381 PUSHACC 56
2383 PUSHACC 59
2385 PUSHACC 61
2387 PUSHACC 60
2389 PUSHACC 64
2391 PUSHACC 66
2393 PUSHACC 68
2395 PUSHACC 70
2397 MAKEBLOCK 37 , 0
2400 POP 36
2402 SETGLOBAL List
2404 BRANCH 2432
2406 CONST0
2407 PUSHACC1
2408 LEINT
2409 BRANCHIFNOT 2414
2411 CONST0
2412 RETURN 1
2414 ACC0
2415 OFFSETINT -1
2417 PUSHOFFSETCLOSURE0
2418 APPLY1
2419 PUSHACC1
2420 MAKEBLOCK2 0
2422 RETURN 1
2424 RESTART
2425 GRAB 1
2427 ACC1
2428 PUSHACC1
2429 ADDINT
2430 RETURN 2
2432 CLOSUREREC 0 , 2406
2436 CONST0
2437 C_CALL1 gc_compaction
2439 CONSTINT 300
2441 PUSHACC1
2442 APPLY1
2443 PUSHCONSTINT 150
2445 PUSHCONSTINT 301
2447 MULINT
2448 PUSHACC1
2449 PUSHCONST0
2450 PUSH
2451 CLOSURE 0 , 2425
2454 PUSHGETGLOBALFIELD List , 12
2457 APPLY3
2458 NEQ
2459 BRANCHIFNOT 2466
2461 Not_found
2463 MAKEBLOCK1 0
2465 RAISE
2466 POP 2
2468 ATOM0
2469 SETGLOBAL T330 - compact-4
2471 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 BRANCH 746
11 RESTART
12 GRAB 1
14 ACC0
15 BRANCHIFNOT 28
17 ACC1
18 PUSHACC1
19 GETFIELD1
20 PUSHOFFSETCLOSURE0
21 APPLY2
22 PUSHACC1
23 GETFIELD0
24 MAKEBLOCK2 0
26 RETURN 2
28 ACC1
29 RETURN 2
31 RESTART
32 GRAB 3
34 CONST0
35 PUSHACC4
36 LEINT
37 BRANCHIFNOT 42
39 CONST0
40 RETURN 4
42 ACC3
43 PUSHACC3
44 PUSHACC3
45 PUSHACC3
46 C_CALL4 caml_input
48 PUSHCONST0
49 PUSHACC1
50 EQ
51 BRANCHIFNOT 58
53 GETGLOBAL End_of_file
55 MAKEBLOCK1 0
57 RAISE
58 ACC0
59 PUSHACC5
60 SUBINT
61 PUSHACC1
62 PUSHACC5
63 ADDINT
64 PUSHACC4
65 PUSHACC4
66 PUSHOFFSETCLOSURE0
67 APPTERM 4, 9
70 ACC0
71 C_CALL1 caml_input_scan_line
73 PUSHCONST0
74 PUSHACC1
75 EQ
76 BRANCHIFNOT 83
78 GETGLOBAL End_of_file
80 MAKEBLOCK1 0
82 RAISE
83 CONST0
84 PUSHACC1
85 GTINT
86 BRANCHIFNOT 107
88 ACC0
89 OFFSETINT -1
91 C_CALL1 create_string
93 PUSHACC1
94 OFFSETINT -1
96 PUSHCONST0
97 PUSHACC2
98 PUSHACC5
99 C_CALL4 caml_input
101 ACC2
102 C_CALL1 caml_input_char
104 ACC0
105 RETURN 3
107 ACC0
108 NEGINT
109 C_CALL1 create_string
111 PUSHACC1
112 NEGINT
113 PUSHCONST0
114 PUSHACC2
115 PUSHACC5
116 C_CALL4 caml_input
118 CONST0
119 PUSHTRAP 130
121 ACC6
122 PUSHOFFSETCLOSURE0
123 APPLY1
124 PUSHACC5
125 PUSHENVACC1
126 APPLY2
127 POPTRAP
128 RETURN 3
130 PUSHGETGLOBAL End_of_file
132 PUSHACC1
133 GETFIELD0
134 EQ
135 BRANCHIFNOT 140
137 ACC1
138 RETURN 4
140 ACC0
141 RAISE
142 ACC0
143 C_CALL1 caml_flush
145 RETURN 1
147 RESTART
148 GRAB 1
150 ACC1
151 PUSHACC1
152 C_CALL2 caml_output_char
154 RETURN 2
156 RESTART
157 GRAB 1
159 ACC1
160 PUSHACC1
161 C_CALL2 caml_output_char
163 RETURN 2
165 RESTART
166 GRAB 1
168 ACC1
169 PUSHACC1
170 C_CALL2 caml_output_int
172 RETURN 2
174 RESTART
175 GRAB 1
177 ACC1
178 PUSHACC1
179 C_CALL2 caml_seek_out
181 RETURN 2
183 ACC0
184 C_CALL1 caml_pos_out
186 RETURN 1
188 ACC0
189 C_CALL1 caml_channel_size
191 RETURN 1
193 RESTART
194 GRAB 1
196 ACC1
197 PUSHACC1
198 C_CALL2 caml_set_binary_mode
200 RETURN 2
202 ACC0
203 C_CALL1 caml_input_char
205 RETURN 1
207 ACC0
208 C_CALL1 caml_input_char
210 RETURN 1
212 ACC0
213 C_CALL1 caml_input_int
215 RETURN 1
217 ACC0
218 C_CALL1 input_value
220 RETURN 1
222 RESTART
223 GRAB 1
225 ACC1
226 PUSHACC1
227 C_CALL2 caml_seek_in
229 RETURN 2
231 ACC0
232 C_CALL1 caml_pos_in
234 RETURN 1
236 ACC0
237 C_CALL1 caml_channel_size
239 RETURN 1
241 ACC0
242 C_CALL1 caml_close_channel
244 RETURN 1
246 RESTART
247 GRAB 1
249 ACC1
250 PUSHACC1
251 C_CALL2 caml_set_binary_mode
253 RETURN 2
255 CONST0
256 PUSHENVACC1
257 APPLY1
258 ACC0
259 C_CALL1 sys_exit
261 RETURN 1
263 CONST0
264 PUSHENVACC1
265 GETFIELD0
266 APPTERM1 2
268 CONST0
269 PUSHENVACC1
270 APPLY1
271 CONST0
272 PUSHENVACC2
273 APPTERM1 2
275 ENVACC1
276 GETFIELD0
277 PUSHACC0
278 PUSHACC2
279 CLOSURE 2, 268
282 PUSHENVACC1
283 SETFIELD0
284 RETURN 2
286 ENVACC1
287 C_CALL1 caml_flush
289 ENVACC2
290 C_CALL1 caml_flush
292 RETURN 1
294 CONST0
295 PUSHENVACC1
296 APPLY1
297 C_CALL1 float_of_string
299 RETURN 1
301 CONST0
302 PUSHENVACC1
303 APPLY1
304 C_CALL1 int_of_string
306 RETURN 1
308 ENVACC2
309 C_CALL1 caml_flush
311 ENVACC1
312 PUSHENVACC3
313 APPTERM1 2
315 CONSTINT 13
317 PUSHENVACC1
318 C_CALL2 caml_output_char
320 ENVACC1
321 C_CALL1 caml_flush
323 RETURN 1
325 ACC0
326 PUSHENVACC1
327 PUSHENVACC2
328 APPLY2
329 CONSTINT 13
331 PUSHENVACC1
332 C_CALL2 caml_output_char
334 ENVACC1
335 C_CALL1 caml_flush
337 RETURN 1
339 ACC0
340 PUSHENVACC1
341 APPLY1
342 PUSHENVACC2
343 PUSHENVACC3
344 APPTERM2 3
346 ACC0
347 PUSHENVACC1
348 APPLY1
349 PUSHENVACC2
350 PUSHENVACC3
351 APPTERM2 3
353 ACC0
354 PUSHENVACC1
355 PUSHENVACC2
356 APPTERM2 3
358 ACC0
359 PUSHENVACC1
360 C_CALL2 caml_output_char
362 RETURN 1
364 CONSTINT 13
366 PUSHENVACC1
367 C_CALL2 caml_output_char
369 ENVACC1
370 C_CALL1 caml_flush
372 RETURN 1
374 ACC0
375 PUSHENVACC1
376 PUSHENVACC2
377 APPLY2
378 CONSTINT 13
380 PUSHENVACC1
381 C_CALL2 caml_output_char
383 RETURN 1
385 ACC0
386 PUSHENVACC1
387 APPLY1
388 PUSHENVACC2
389 PUSHENVACC3
390 APPTERM2 3
392 ACC0
393 PUSHENVACC1
394 APPLY1
395 PUSHENVACC2
396 PUSHENVACC3
397 APPTERM2 3
399 ACC0
400 PUSHENVACC1
401 PUSHENVACC2
402 APPTERM2 3
404 ACC0
405 PUSHENVACC1
406 C_CALL2 caml_output_char
408 RETURN 1
410 RESTART
411 GRAB 3
413 CONST0
414 PUSHACC3
415 LTINT
416 BRANCHIF 427
418 ACC1
419 C_CALL1 ml_string_length
421 PUSHACC4
422 PUSHACC4
423 ADDINT
424 GTINT
425 BRANCHIFNOT 432
427 GETGLOBAL "really_input"
429 PUSHENVACC1
430 APPTERM1 5
432 ACC3
433 PUSHACC3
434 PUSHACC3
435 PUSHACC3
436 PUSHENVACC2
437 APPTERM 4, 8
440 RESTART
441 GRAB 3
443 CONST0
444 PUSHACC3
445 LTINT
446 BRANCHIF 457
448 ACC1
449 C_CALL1 ml_string_length
451 PUSHACC4
452 PUSHACC4
453 ADDINT
454 GTINT
455 BRANCHIFNOT 462
457 GETGLOBAL "input"
459 PUSHENVACC1
460 APPTERM1 5
462 ACC3
463 PUSHACC3
464 PUSHACC3
465 PUSHACC3
466 C_CALL4 caml_input
468 RETURN 4
470 ACC0
471 PUSHCONST0
472 PUSHGETGLOBAL <0>(0, <0>(6, 0))
474 PUSHENVACC1
475 APPTERM3 4
477 ACC0
478 PUSHCONST0
479 PUSHGETGLOBAL <0>(0, <0>(7, 0))
481 PUSHENVACC1
482 APPTERM3 4
484 RESTART
485 GRAB 2
487 ACC1
488 PUSHACC1
489 PUSHACC4
490 C_CALL3 sys_open
492 C_CALL1 caml_open_descriptor
494 RETURN 3
496 ACC0
497 C_CALL1 caml_flush
499 ACC0
500 C_CALL1 caml_close_channel
502 RETURN 1
504 RESTART
505 GRAB 1
507 CONST0
508 PUSHACC2
509 PUSHACC2
510 C_CALL3 output_value
512 RETURN 2
514 RESTART
515 GRAB 3
517 CONST0
518 PUSHACC3
519 LTINT
520 BRANCHIF 531
522 ACC1
523 C_CALL1 ml_string_length
525 PUSHACC4
526 PUSHACC4
527 ADDINT
528 GTINT
529 BRANCHIFNOT 536
531 GETGLOBAL "output"
533 PUSHENVACC1
534 APPTERM1 5
536 ACC3
537 PUSHACC3
538 PUSHACC3
539 PUSHACC3
540 C_CALL4 caml_output
542 RETURN 4
544 RESTART
545 GRAB 1
547 ACC1
548 C_CALL1 ml_string_length
550 PUSHCONST0
551 PUSHACC3
552 PUSHACC3
553 C_CALL4 caml_output
555 RETURN 2
557 ACC0
558 PUSHCONSTINT 438
560 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(6, 0))))
562 PUSHENVACC1
563 APPTERM3 4
565 ACC0
566 PUSHCONSTINT 438
568 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(7, 0))))
570 PUSHENVACC1
571 APPTERM3 4
573 RESTART
574 GRAB 2
576 ACC1
577 PUSHACC1
578 PUSHACC4
579 C_CALL3 sys_open
581 C_CALL1 caml_open_descriptor
583 RETURN 3
585 ACC0
586 PUSHGETGLOBAL "%.12g"
588 C_CALL2 format_float
590 RETURN 1
592 ACC0
593 PUSHGETGLOBAL "%d"
595 C_CALL2 format_int
597 RETURN 1
599 GETGLOBAL "false"
601 PUSHACC1
602 C_CALL2 string_equal
604 BRANCHIFNOT 609
606 CONST0
607 RETURN 1
609 GETGLOBAL "true"
611 PUSHACC1
612 C_CALL2 string_equal
614 BRANCHIFNOT 619
616 CONST1
617 RETURN 1
619 GETGLOBAL "bool_of_string"
621 PUSHENVACC1
622 APPTERM1 2
624 ACC0
625 BRANCHIFNOT 631
627 GETGLOBAL "true"
629 RETURN 1
631 GETGLOBAL "false"
633 RETURN 1
635 CONST0
636 PUSHACC1
637 LTINT
638 BRANCHIF 646
640 CONSTINT 255
642 PUSHACC1
643 GTINT
644 BRANCHIFNOT 651
646 GETGLOBAL "char_of_int"
648 PUSHENVACC1
649 APPTERM1 2
651 ACC0
652 RETURN 1
654 RESTART
655 GRAB 1
657 ACC0
658 C_CALL1 ml_string_length
660 PUSHACC2
661 C_CALL1 ml_string_length
663 PUSHACC0
664 PUSHACC2
665 ADDINT
666 C_CALL1 create_string
668 PUSHACC2
669 PUSHCONST0
670 PUSHACC2
671 PUSHCONST0
672 PUSHACC7
673 C_CALL5 blit_string
675 ACC1
676 PUSHACC3
677 PUSHACC2
678 PUSHCONST0
679 PUSHACC 8
681 C_CALL5 blit_string
683 ACC0
684 RETURN 5
686 CONSTINT -1
688 PUSHACC1
689 XORINT
690 RETURN 1
692 CONST0
693 PUSHACC1
694 GEINT
695 BRANCHIFNOT 700
697 ACC0
698 RETURN 1
700 ACC0
701 NEGINT
702 RETURN 1
704 RESTART
705 GRAB 1
707 ACC1
708 PUSHACC1
709 C_CALL2 greaterequal
711 BRANCHIFNOT 716
713 ACC0
714 RETURN 2
716 ACC1
717 RETURN 2
719 RESTART
720 GRAB 1
722 ACC1
723 PUSHACC1
724 C_CALL2 lessequal
726 BRANCHIFNOT 731
728 ACC0
729 RETURN 2
731 ACC1
732 RETURN 2
734 ACC0
735 PUSHGETGLOBAL Invalid_argument
737 MAKEBLOCK2 0
739 RAISE
740 ACC0
741 PUSHGETGLOBAL Failure
743 MAKEBLOCK2 0
745 RAISE
746 CLOSURE 0, 740
749 PUSH
750 CLOSURE 0, 734
753 PUSHGETGLOBAL "Pervasives.Exit"
755 MAKEBLOCK1 0
757 PUSHGETGLOBAL "Pervasives.Assert_failure"
759 MAKEBLOCK1 0
761 PUSH
762 CLOSURE 0, 720
765 PUSH
766 CLOSURE 0, 705
769 PUSH
770 CLOSURE 0, 692
773 PUSH
774 CLOSURE 0, 686
777 PUSHCONST0
778 PUSHCONSTINT 31
780 PUSHCONST1
781 LSLINT
782 EQ
783 BRANCHIFNOT 789
785 CONSTINT 30
787 BRANCH 791
789 CONSTINT 62
791 PUSHCONST1
792 LSLINT
793 PUSHACC0
794 OFFSETINT -1
796 PUSH
797 CLOSURE 0, 655
800 PUSHACC 9
802 CLOSURE 1, 635
805 PUSH
806 CLOSURE 0, 624
809 PUSHACC 11
811 CLOSURE 1, 599
814 PUSH
815 CLOSURE 0, 592
818 PUSH
819 CLOSURE 0, 585
822 PUSH
823 CLOSUREREC 0, 12
827 CONST0
828 C_CALL1 caml_open_descriptor
830 PUSHCONST1
831 C_CALL1 caml_open_descriptor
833 PUSHCONST2
834 C_CALL1 caml_open_descriptor
836 PUSH
837 CLOSURE 0, 574
840 PUSHACC0
841 CLOSURE 1, 565
844 PUSHACC1
845 CLOSURE 1, 557
848 PUSH
849 CLOSURE 0, 545
852 PUSHACC 22
854 CLOSURE 1, 515
857 PUSH
858 CLOSURE 0, 505
861 PUSH
862 CLOSURE 0, 496
865 PUSH
866 CLOSURE 0, 485
869 PUSHACC0
870 CLOSURE 1, 477
873 PUSHACC1
874 CLOSURE 1, 470
877 PUSHACC 28
879 CLOSURE 1, 441
882 PUSH
883 CLOSUREREC 0, 32
887 ACC0
888 PUSHACC 31
890 CLOSURE 2, 411
893 PUSHACC 22
895 CLOSUREREC 1, 70
899 ACC 15
901 CLOSURE 1, 404
904 PUSHACC 11
906 PUSHACC 17
908 CLOSURE 2, 399
911 PUSHACC 12
913 PUSHACC 18
915 PUSHACC 23
917 CLOSURE 3, 392
920 PUSHACC 13
922 PUSHACC 19
924 PUSHACC 23
926 CLOSURE 3, 385
929 PUSHACC 14
931 PUSHACC 20
933 CLOSURE 2, 374
936 PUSHACC 20
938 CLOSURE 1, 364
941 PUSHACC 20
943 CLOSURE 1, 358
946 PUSHACC 17
948 PUSHACC 22
950 CLOSURE 2, 353
953 PUSHACC 18
955 PUSHACC 23
957 PUSHACC 29
959 CLOSURE 3, 346
962 PUSHACC 19
964 PUSHACC 24
966 PUSHACC 29
968 CLOSURE 3, 339
971 PUSHACC 20
973 PUSHACC 25
975 CLOSURE 2, 325
978 PUSHACC 25
980 CLOSURE 1, 315
983 PUSHACC 12
985 PUSHACC 28
987 PUSHACC 30
989 CLOSURE 3, 308
992 PUSHACC0
993 CLOSURE 1, 301
996 PUSHACC1
997 CLOSURE 1, 294
1000 PUSHACC 29
1002 PUSHACC 31
1004 CLOSURE 2, 286
1007 MAKEBLOCK1 0
1009 PUSHACC0
1010 CLOSURE 1, 275
1013 PUSHACC1
1014 CLOSURE 1, 263
1017 PUSHACC0
1018 CLOSURE 1, 255
1021 PUSHACC1
1022 PUSHACC 22
1024 PUSHACC4
1025 PUSHACC3
1026 PUSH
1027 CLOSURE 0, 247
1030 PUSH
1031 CLOSURE 0, 241
1034 PUSH
1035 CLOSURE 0, 236
1038 PUSH
1039 CLOSURE 0, 231
1042 PUSH
1043 CLOSURE 0, 223
1046 PUSH
1047 CLOSURE 0, 217
1050 PUSH
1051 CLOSURE 0, 212
1054 PUSH
1055 CLOSURE 0, 207
1058 PUSHACC 32
1060 PUSHACC 35
1062 PUSHACC 33
1064 PUSH
1065 CLOSURE 0, 202
1068 PUSHACC 41
1070 PUSHACC 40
1072 PUSHACC 42
1074 PUSH
1075 CLOSURE 0, 194
1078 PUSHACC 46
1080 PUSH
1081 CLOSURE 0, 188
1084 PUSH
1085 CLOSURE 0, 183
1088 PUSH
1089 CLOSURE 0, 175
1092 PUSHACC 51
1094 PUSH
1095 CLOSURE 0, 166
1098 PUSH
1099 CLOSURE 0, 157
1102 PUSHACC 55
1104 PUSHACC 57
1106 PUSH
1107 CLOSURE 0, 148
1110 PUSH
1111 CLOSURE 0, 142
1114 PUSHACC 63
1116 PUSHACC 62
1118 PUSHACC 64
1120 PUSHACC 38
1122 PUSHACC 40
1124 PUSHACC 42
1126 PUSHACC 44
1128 PUSHACC 46
1130 PUSHACC 48
1132 PUSHACC 50
1134 PUSHACC 52
1136 PUSHACC 54
1138 PUSHACC 56
1140 PUSHACC 58
1142 PUSHACC 60
1144 PUSHACC 62
1146 PUSHACC 64
1148 PUSHACC 66
1150 PUSHACC 82
1152 PUSHACC 84
1154 PUSHACC 86
1156 PUSHACC 88
1158 PUSHACC 90
1160 PUSHACC 92
1162 PUSHACC 94
1164 PUSHACC 96
1166 PUSHACC 98
1168 PUSHACC 100
1170 PUSHACC 104
1172 PUSHACC 104
1174 PUSHACC 104
1176 PUSHACC 108
1178 PUSHACC 110
1180 PUSHACC 112
1182 PUSHACC 117
1184 PUSHACC 117
1186 PUSHACC 117
1188 PUSHACC 117
1190 MAKEBLOCK 69, 0
1193 POP 53
1195 SETGLOBAL Pervasives
1197 BRANCH 2177
1199 RESTART
1200 GRAB 1
1202 ACC1
1203 BRANCHIFNOT 1213
1205 ACC1
1206 GETFIELD1
1207 PUSHACC1
1208 OFFSETINT 1
1210 PUSHOFFSETCLOSURE0
1211 APPTERM2 4
1213 ACC0
1214 RETURN 2
1216 RESTART
1217 GRAB 1
1219 ACC0
1220 BRANCHIFNOT 1251
1222 CONST0
1223 PUSHACC2
1224 EQ
1225 BRANCHIFNOT 1231
1227 ACC0
1228 GETFIELD0
1229 RETURN 2
1231 CONST0
1232 PUSHACC2
1233 GTINT
1234 BRANCHIFNOT 1244
1236 ACC1
1237 OFFSETINT -1
1239 PUSHACC1
1240 GETFIELD1
1241 PUSHOFFSETCLOSURE0
1242 APPTERM2 4
1244 GETGLOBAL "List.nth"
1246 PUSHGETGLOBALFIELD Pervasives, 2
1249 APPTERM1 3
1251 GETGLOBAL "nth"
1253 PUSHGETGLOBALFIELD Pervasives, 3
1256 APPTERM1 3
1258 RESTART
1259 GRAB 1
1261 ACC0
1262 BRANCHIFNOT 1274
1264 ACC1
1265 PUSHACC1
1266 GETFIELD0
1267 MAKEBLOCK2 0
1269 PUSHACC1
1270 GETFIELD1
1271 PUSHOFFSETCLOSURE0
1272 APPTERM2 4
1274 ACC1
1275 RETURN 2
1277 ACC0
1278 BRANCHIFNOT 1291
1280 ACC0
1281 GETFIELD1
1282 PUSHOFFSETCLOSURE0
1283 APPLY1
1284 PUSHACC1
1285 GETFIELD0
1286 PUSHGETGLOBALFIELD Pervasives, 16
1289 APPTERM2 3
1291 RETURN 1
1293 RESTART
1294 GRAB 1
1296 ACC1
1297 BRANCHIFNOT 1313
1299 ACC1
1300 GETFIELD0
1301 PUSHACC1
1302 APPLY1
1303 PUSHACC2
1304 GETFIELD1
1305 PUSHACC2
1306 PUSHOFFSETCLOSURE0
1307 APPLY2
1308 PUSHACC1
1309 MAKEBLOCK2 0
1311 POP 1
1313 RETURN 2
1315 RESTART
1316 GRAB 1
1318 ACC1
1319 BRANCHIFNOT 1331
1321 ACC1
1322 GETFIELD0
1323 PUSHACC1
1324 APPLY1
1325 ACC1
1326 GETFIELD1
1327 PUSHACC1
1328 PUSHOFFSETCLOSURE0
1329 APPTERM2 4
1331 RETURN 2
1333 RESTART
1334 GRAB 2
1336 ACC2
1337 BRANCHIFNOT 1350
1339 ACC2
1340 GETFIELD1
1341 PUSHACC3
1342 GETFIELD0
1343 PUSHACC3
1344 PUSHACC3
1345 APPLY2
1346 PUSHACC2
1347 PUSHOFFSETCLOSURE0
1348 APPTERM3 6
1350 ACC1
1351 RETURN 3
1353 RESTART
1354 GRAB 2
1356 ACC1
1357 BRANCHIFNOT 1370
1359 ACC2
1360 PUSHACC2
1361 GETFIELD1
1362 PUSHACC2
1363 PUSHOFFSETCLOSURE0
1364 APPLY3
1365 PUSHACC2
1366 GETFIELD0
1367 PUSHACC2
1368 APPTERM2 5
1370 ACC2
1371 RETURN 3
1373 RESTART
1374 GRAB 2
1376 ACC1
1377 BRANCHIFNOT 1400
1379 ACC2
1380 BRANCHIFNOT 1407
1382 ACC2
1383 GETFIELD0
1384 PUSHACC2
1385 GETFIELD0
1386 PUSHACC2
1387 APPLY2
1388 PUSHACC3
1389 GETFIELD1
1390 PUSHACC3
1391 GETFIELD1
1392 PUSHACC3
1393 PUSHOFFSETCLOSURE0
1394 APPLY3
1395 PUSHACC1
1396 MAKEBLOCK2 0
1398 RETURN 4
1400 ACC2
1401 BRANCHIFNOT 1405
1403 BRANCH 1407
1405 RETURN 3
1407 GETGLOBAL "List.map2"
1409 PUSHGETGLOBALFIELD Pervasives, 2
1412 APPTERM1 4
1414 RESTART
1415 GRAB 2
1417 ACC1
1418 BRANCHIFNOT 1437
1420 ACC2
1421 BRANCHIFNOT 1444
1423 ACC2
1424 GETFIELD0
1425 PUSHACC2
1426 GETFIELD0
1427 PUSHACC2
1428 APPLY2
1429 ACC2
1430 GETFIELD1
1431 PUSHACC2
1432 GETFIELD1
1433 PUSHACC2
1434 PUSHOFFSETCLOSURE0
1435 APPTERM3 6
1437 ACC2
1438 BRANCHIFNOT 1442
1440 BRANCH 1444
1442 RETURN 3
1444 GETGLOBAL "List.iter2"
1446 PUSHGETGLOBALFIELD Pervasives, 2
1449 APPTERM1 4
1451 RESTART
1452 GRAB 3
1454 ACC2
1455 BRANCHIFNOT 1476
1457 ACC3
1458 BRANCHIFNOT 1482
1460 ACC3
1461 GETFIELD1
1462 PUSHACC3
1463 GETFIELD1
1464 PUSHACC5
1465 GETFIELD0
1466 PUSHACC5
1467 GETFIELD0
1468 PUSHACC5
1469 PUSHACC5
1470 APPLY3
1471 PUSHACC3
1472 PUSHOFFSETCLOSURE0
1473 APPTERM 4, 8
1476 ACC3
1477 BRANCHIF 1482
1479 ACC1
1480 RETURN 4
1482 GETGLOBAL "List.fold_left2"
1484 PUSHGETGLOBALFIELD Pervasives, 2
1487 APPTERM1 5
1489 RESTART
1490 GRAB 3
1492 ACC1
1493 BRANCHIFNOT 1516
1495 ACC2
1496 BRANCHIFNOT 1522
1498 PUSH_RETADDR 1509
1500 ACC6
1501 PUSHACC6
1502 GETFIELD1
1503 PUSHACC6
1504 GETFIELD1
1505 PUSHACC6
1506 PUSHOFFSETCLOSURE0
1507 APPLY 4
1509 PUSHACC3
1510 GETFIELD0
1511 PUSHACC3
1512 GETFIELD0
1513 PUSHACC3
1514 APPTERM3 7
1516 ACC2
1517 BRANCHIF 1522
1519 ACC3
1520 RETURN 4
1522 GETGLOBAL "List.fold_right2"
1524 PUSHGETGLOBALFIELD Pervasives, 2
1527 APPTERM1 5
1529 RESTART
1530 GRAB 1
1532 ACC1
1533 BRANCHIFNOT 1549
1535 ACC1
1536 GETFIELD0
1537 PUSHACC1
1538 APPLY1
1539 BRANCHIFNOT 1547
1541 ACC1
1542 GETFIELD1
1543 PUSHACC1
1544 PUSHOFFSETCLOSURE0
1545 APPTERM2 4
1547 RETURN 2
1549 CONST1
1550 RETURN 2
1552 RESTART
1553 GRAB 1
1555 ACC1
1556 BRANCHIFNOT 1570
1558 ACC1
1559 GETFIELD0
1560 PUSHACC1
1561 APPLY1
1562 BRANCHIF 1570
1564 ACC1
1565 GETFIELD1
1566 PUSHACC1
1567 PUSHOFFSETCLOSURE0
1568 APPTERM2 4
1570 RETURN 2
1572 RESTART
1573 GRAB 2
1575 ACC1
1576 BRANCHIFNOT 1599
1578 ACC2
1579 BRANCHIFNOT 1605
1581 ACC2
1582 GETFIELD0
1583 PUSHACC2
1584 GETFIELD0
1585 PUSHACC2
1586 APPLY2
1587 BRANCHIFNOT 1597
1589 ACC2
1590 GETFIELD1
1591 PUSHACC2
1592 GETFIELD1
1593 PUSHACC2
1594 PUSHOFFSETCLOSURE0
1595 APPTERM3 6
1597 RETURN 3
1599 ACC2
1600 BRANCHIF 1605
1602 CONST1
1603 RETURN 3
1605 GETGLOBAL "List.for_all2"
1607 PUSHGETGLOBALFIELD Pervasives, 2
1610 APPTERM1 4
1612 RESTART
1613 GRAB 2
1615 ACC1
1616 BRANCHIFNOT 1639
1618 ACC2
1619 BRANCHIFNOT 1646
1621 ACC2
1622 GETFIELD0
1623 PUSHACC2
1624 GETFIELD0
1625 PUSHACC2
1626 APPLY2
1627 BRANCHIF 1637
1629 ACC2
1630 GETFIELD1
1631 PUSHACC2
1632 GETFIELD1
1633 PUSHACC2
1634 PUSHOFFSETCLOSURE0
1635 APPTERM3 6
1637 RETURN 3
1639 ACC2
1640 BRANCHIFNOT 1644
1642 BRANCH 1646
1644 RETURN 3
1646 GETGLOBAL "List.exists2"
1648 PUSHGETGLOBALFIELD Pervasives, 2
1651 APPTERM1 4
1653 RESTART
1654 GRAB 1
1656 ACC1
1657 BRANCHIFNOT 1672
1659 ACC0
1660 PUSHACC2
1661 GETFIELD0
1662 C_CALL2 equal
1664 BRANCHIF 1672
1666 ACC1
1667 GETFIELD1
1668 PUSHACC1
1669 PUSHOFFSETCLOSURE0
1670 APPTERM2 4
1672 RETURN 2
1674 RESTART
1675 GRAB 1
1677 ACC1
1678 BRANCHIFNOT 1692
1680 ACC0
1681 PUSHACC2
1682 GETFIELD0
1683 EQ
1684 BRANCHIF 1692
1686 ACC1
1687 GETFIELD1
1688 PUSHACC1
1689 PUSHOFFSETCLOSURE0
1690 APPTERM2 4
1692 RETURN 2
1694 RESTART
1695 GRAB 1
1697 ACC1
1698 BRANCHIFNOT 1719
1700 ACC1
1701 GETFIELD0
1702 PUSHACC1
1703 PUSHACC1
1704 GETFIELD0
1705 C_CALL2 equal
1707 BRANCHIFNOT 1713
1709 ACC0
1710 GETFIELD1
1711 RETURN 3
1713 ACC2
1714 GETFIELD1
1715 PUSHACC2
1716 PUSHOFFSETCLOSURE0
1717 APPTERM2 5
1719 GETGLOBAL Not_found
1721 MAKEBLOCK1 0
1723 RAISE
1724 RESTART
1725 GRAB 1
1727 ACC1
1728 BRANCHIFNOT 1748
1730 ACC1
1731 GETFIELD0
1732 PUSHACC1
1733 PUSHACC1
1734 GETFIELD0
1735 EQ
1736 BRANCHIFNOT 1742
1738 ACC0
1739 GETFIELD1
1740 RETURN 3
1742 ACC2
1743 GETFIELD1
1744 PUSHACC2
1745 PUSHOFFSETCLOSURE0
1746 APPTERM2 5
1748 GETGLOBAL Not_found
1750 MAKEBLOCK1 0
1752 RAISE
1753 RESTART
1754 GRAB 1
1756 ACC1
1757 BRANCHIFNOT 1773
1759 ACC0
1760 PUSHACC2
1761 GETFIELD0
1762 GETFIELD0
1763 C_CALL2 equal
1765 BRANCHIF 1773
1767 ACC1
1768 GETFIELD1
1769 PUSHACC1
1770 PUSHOFFSETCLOSURE0
1771 APPTERM2 4
1773 RETURN 2
1775 RESTART
1776 GRAB 1
1778 ACC1
1779 BRANCHIFNOT 1794
1781 ACC0
1782 PUSHACC2
1783 GETFIELD0
1784 GETFIELD0
1785 EQ
1786 BRANCHIF 1794
1788 ACC1
1789 GETFIELD1
1790 PUSHACC1
1791 PUSHOFFSETCLOSURE0
1792 APPTERM2 4
1794 RETURN 2
1796 RESTART
1797 GRAB 1
1799 ACC1
1800 BRANCHIFNOT 1825
1802 ACC1
1803 GETFIELD0
1804 PUSHACC2
1805 GETFIELD1
1806 PUSHACC2
1807 PUSHACC2
1808 GETFIELD0
1809 C_CALL2 equal
1811 BRANCHIFNOT 1816
1813 ACC0
1814 RETURN 4
1816 ACC0
1817 PUSHACC3
1818 PUSHOFFSETCLOSURE0
1819 APPLY2
1820 PUSHACC2
1821 MAKEBLOCK2 0
1823 POP 2
1825 RETURN 2
1827 RESTART
1828 GRAB 1
1830 ACC1
1831 BRANCHIFNOT 1855
1833 ACC1
1834 GETFIELD0
1835 PUSHACC2
1836 GETFIELD1
1837 PUSHACC2
1838 PUSHACC2
1839 GETFIELD0
1840 EQ
1841 BRANCHIFNOT 1846
1843 ACC0
1844 RETURN 4
1846 ACC0
1847 PUSHACC3
1848 PUSHOFFSETCLOSURE0
1849 APPLY2
1850 PUSHACC2
1851 MAKEBLOCK2 0
1853 POP 2
1855 RETURN 2
1857 RESTART
1858 GRAB 1
1860 ACC1
1861 BRANCHIFNOT 1879
1863 ACC1
1864 GETFIELD0
1865 PUSHACC0
1866 PUSHACC2
1867 APPLY1
1868 BRANCHIFNOT 1873
1870 ACC0
1871 RETURN 3
1873 ACC2
1874 GETFIELD1
1875 PUSHACC2
1876 PUSHOFFSETCLOSURE0
1877 APPTERM2 5
1879 GETGLOBAL Not_found
1881 MAKEBLOCK1 0
1883 RAISE
1884 RESTART
1885 GRAB 2
1887 ACC2
1888 BRANCHIFNOT 1917
1890 ACC2
1891 GETFIELD0
1892 PUSHACC3
1893 GETFIELD1
1894 PUSHACC1
1895 PUSHENVACC2
1896 APPLY1
1897 BRANCHIFNOT 1908
1899 ACC0
1900 PUSHACC4
1901 PUSHACC4
1902 PUSHACC4
1903 MAKEBLOCK2 0
1905 PUSHOFFSETCLOSURE0
1906 APPTERM3 8
1908 ACC0
1909 PUSHACC4
1910 PUSHACC3
1911 MAKEBLOCK2 0
1913 PUSHACC4
1914 PUSHOFFSETCLOSURE0
1915 APPTERM3 8
1917 ACC1
1918 PUSHENVACC1
1919 APPLY1
1920 PUSHACC1
1921 PUSHENVACC1
1922 APPLY1
1923 MAKEBLOCK2 0
1925 RETURN 3
1927 RESTART
1928 GRAB 1
1930 ACC0
1931 PUSHENVACC1
1932 CLOSUREREC 2, 1885
1936 ACC2
1937 PUSHCONST0
1938 PUSHCONST0
1939 PUSHACC3
1940 APPTERM3 6
1942 ACC0
1943 BRANCHIFNOT 1967
1945 ACC0
1946 GETFIELD0
1947 PUSHACC1
1948 GETFIELD1
1949 PUSHOFFSETCLOSURE0
1950 APPLY1
1951 PUSHACC0
1952 GETFIELD1
1953 PUSHACC2
1954 GETFIELD1
1955 MAKEBLOCK2 0
1957 PUSHACC1
1958 GETFIELD0
1959 PUSHACC3
1960 GETFIELD0
1961 MAKEBLOCK2 0
1963 MAKEBLOCK2 0
1965 RETURN 3
1967 GETGLOBAL <0>(0, 0)
1969 RETURN 1
1971 RESTART
1972 GRAB 1
1974 ACC0
1975 BRANCHIFNOT 1996
1977 ACC1
1978 BRANCHIFNOT 2003
1980 ACC1
1981 GETFIELD1
1982 PUSHACC1
1983 GETFIELD1
1984 PUSHOFFSETCLOSURE0
1985 APPLY2
1986 PUSHACC2
1987 GETFIELD0
1988 PUSHACC2
1989 GETFIELD0
1990 MAKEBLOCK2 0
1992 MAKEBLOCK2 0
1994 RETURN 2
1996 ACC1
1997 BRANCHIFNOT 2001
1999 BRANCH 2003
2001 RETURN 2
2003 GETGLOBAL "List.combine"
2005 PUSHGETGLOBALFIELD Pervasives, 2
2008 APPTERM1 3
2010 RESTART
2011 GRAB 1
2013 ACC1
2014 BRANCHIFNOT 2038
2016 ACC1
2017 GETFIELD0
2018 PUSHACC2
2019 GETFIELD1
2020 PUSHACC1
2021 PUSHENVACC2
2022 APPLY1
2023 BRANCHIFNOT 2033
2025 ACC0
2026 PUSHACC3
2027 PUSHACC3
2028 MAKEBLOCK2 0
2030 PUSHOFFSETCLOSURE0
2031 APPTERM2 6
2033 ACC0
2034 PUSHACC3
2035 PUSHOFFSETCLOSURE0
2036 APPTERM2 6
2038 ACC0
2039 PUSHENVACC1
2040 APPTERM1 3
2042 ACC0
2043 PUSHENVACC1
2044 CLOSUREREC 2, 2011
2048 CONST0
2049 PUSHACC1
2050 APPTERM1 3
2052 RESTART
2053 GRAB 2
2055 ACC1
2056 BRANCHIFNOT 2077
2058 ACC2
2059 BRANCHIFNOT 2084
2061 ACC2
2062 GETFIELD1
2063 PUSHACC2
2064 GETFIELD1
2065 PUSHACC2
2066 PUSHACC5
2067 GETFIELD0
2068 PUSHACC5
2069 GETFIELD0
2070 PUSHENVACC1
2071 APPLY2
2072 MAKEBLOCK2 0
2074 PUSHOFFSETCLOSURE0
2075 APPTERM3 6
2077 ACC2
2078 BRANCHIFNOT 2082
2080 BRANCH 2084
2082 RETURN 3
2084 GETGLOBAL "List.rev_map2"
2086 PUSHGETGLOBALFIELD Pervasives, 2
2089 APPTERM1 4
2091 RESTART
2092 GRAB 2
2094 ACC0
2095 CLOSUREREC 1, 2053
2099 ACC3
2100 PUSHACC3
2101 PUSHCONST0
2102 PUSHACC3
2103 APPTERM3 7
2105 RESTART
2106 GRAB 1
2108 ACC1
2109 BRANCHIFNOT 2123
2111 ACC1
2112 GETFIELD1
2113 PUSHACC1
2114 PUSHACC3
2115 GETFIELD0
2116 PUSHENVACC1
2117 APPLY1
2118 MAKEBLOCK2 0
2120 PUSHOFFSETCLOSURE0
2121 APPTERM2 4
2123 ACC0
2124 RETURN 2
2126 RESTART
2127 GRAB 1
2129 ACC0
2130 CLOSUREREC 1, 2106
2134 ACC2
2135 PUSHCONST0
2136 PUSHACC2
2137 APPTERM2 5
2139 CONST0
2140 PUSHACC1
2141 PUSHENVACC1
2142 APPTERM2 3
2144 ACC0
2145 BRANCHIFNOT 2151
2147 ACC0
2148 GETFIELD1
2149 RETURN 1
2151 GETGLOBAL "tl"
2153 PUSHGETGLOBALFIELD Pervasives, 3
2156 APPTERM1 2
2158 ACC0
2159 BRANCHIFNOT 2165
2161 ACC0
2162 GETFIELD0
2163 RETURN 1
2165 GETGLOBAL "hd"
2167 PUSHGETGLOBALFIELD Pervasives, 3
2170 APPTERM1 2
2172 ACC0
2173 PUSHCONST0
2174 PUSHENVACC1
2175 APPTERM2 3
2177 CLOSUREREC 0, 1200
2181 ACC0
2182 CLOSURE 1, 2172
2185 PUSH
2186 CLOSURE 0, 2158
2189 PUSH
2190 CLOSURE 0, 2144
2193 PUSH
2194 CLOSUREREC 0, 1217
2198 GETGLOBALFIELD Pervasives, 16
2201 PUSH
2202 CLOSUREREC 0, 1259
2206 ACC0
2207 CLOSURE 1, 2139
2210 PUSH
2211 CLOSUREREC 0, 1277
2215 CLOSUREREC 0, 1294
2219 CLOSURE 0, 2127
2222 PUSH
2223 CLOSUREREC 0, 1316
2227 CLOSUREREC 0, 1334
2231 CLOSUREREC 0, 1354
2235 CLOSUREREC 0, 1374
2239 CLOSURE 0, 2092
2242 PUSH
2243 CLOSUREREC 0, 1415
2247 CLOSUREREC 0, 1452
2251 CLOSUREREC 0, 1490
2255 CLOSUREREC 0, 1530
2259 CLOSUREREC 0, 1553
2263 CLOSUREREC 0, 1573
2267 CLOSUREREC 0, 1613
2271 CLOSUREREC 0, 1654
2275 CLOSUREREC 0, 1675
2279 CLOSUREREC 0, 1695
2283 CLOSUREREC 0, 1725
2287 CLOSUREREC 0, 1754
2291 CLOSUREREC 0, 1776
2295 CLOSUREREC 0, 1797
2299 CLOSUREREC 0, 1828
2303 CLOSUREREC 0, 1858
2307 ACC 24
2309 CLOSURE 1, 2042
2312 PUSHACC 25
2314 CLOSUREREC 1, 1928
2318 CLOSUREREC 0, 1942
2322 CLOSUREREC 0, 1972
2326 ACC0
2327 PUSHACC2
2328 PUSHACC7
2329 PUSHACC 9
2331 PUSHACC 11
2333 PUSHACC 13
2335 PUSHACC 15
2337 PUSHACC 17
2339 PUSHACC 10
2341 PUSHACC 12
2343 PUSHACC 13
2345 PUSHACC 15
2347 PUSHACC 23
2349 PUSHACC 25
2351 PUSHACC 27
2353 PUSHACC 29
2355 PUSHACC 31
2357 PUSHACC 33
2359 PUSHACC 35
2361 PUSHACC 37
2363 PUSHACC 40
2365 PUSHACC 42
2367 PUSHACC 41
2369 PUSHACC 45
2371 PUSHACC 47
2373 PUSHACC 50
2375 PUSHACC 52
2377 PUSHACC 51
2379 PUSHACC 55
2381 PUSHACC 56
2383 PUSHACC 59
2385 PUSHACC 61
2387 PUSHACC 60
2389 PUSHACC 64
2391 PUSHACC 66
2393 PUSHACC 68
2395 PUSHACC 70
2397 MAKEBLOCK 37, 0
2400 POP 36
2402 SETGLOBAL List
2404 BRANCH 2432
2406 CONST0
2407 PUSHACC1
2408 LEINT
2409 BRANCHIFNOT 2414
2411 CONST0
2412 RETURN 1
2414 ACC0
2415 OFFSETINT -1
2417 PUSHOFFSETCLOSURE0
2418 APPLY1
2419 PUSHACC1
2420 MAKEBLOCK2 0
2422 RETURN 1
2424 RESTART
2425 GRAB 1
2427 ACC1
2428 PUSHACC1
2429 ADDINT
2430 RETURN 2
2432 CLOSUREREC 0, 2406
2436 CONST0
2437 C_CALL1 gc_compaction
2439 CONSTINT 300
2441 PUSHACC1
2442 APPLY1
2443 PUSHCONSTINT 150
2445 PUSHCONSTINT 301
2447 MULINT
2448 PUSHACC1
2449 PUSHCONST0
2450 PUSH
2451 CLOSURE 0, 2425
2454 PUSHGETGLOBALFIELD List, 12
2457 APPLY3
2458 NEQ
2459 BRANCHIFNOT 2466
2461 GETGLOBAL Not_found
2463 MAKEBLOCK1 0
2465 RAISE
2466 POP 2
2468 ATOM0
2469 SETGLOBAL T330-compact-4
2471 STOP
**)
| null | https://raw.githubusercontent.com/modular-macros/ocaml-macros/05372c7248b5a7b1aa507b3c581f710380f17fcd/testsuite/tests/tool-ocaml/t330-compact-4.ml | ocaml | open Lib;;
let rec f n =
if n <= 0 then []
else n :: f (n-1)
in
Gc.compact ();
let l = f 300 in
if List.fold_left (+) 0 l <> 301 * 150 then raise Not_found
;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 BRANCH 746
11 RESTART
12 GRAB 1
14 ACC0
15 BRANCHIFNOT 28
17 ACC1
18 PUSHACC1
19 GETFIELD1
20 PUSHOFFSETCLOSURE0
21 APPLY2
22 PUSHACC1
23 GETFIELD0
24 MAKEBLOCK2 0
26 RETURN 2
28 ACC1
29 RETURN 2
31 RESTART
32 GRAB 3
34 CONST0
35 PUSHACC4
36 LEINT
37 BRANCHIFNOT 42
39
40 RETURN 4
42 ACC3
43 PUSHACC3
44 PUSHACC3
45 PUSHACC3
46 C_CALL4 caml_input
48 PUSHCONST0
49 PUSHACC1
50 EQ
51 BRANCHIFNOT 58
53 End_of_file
55 MAKEBLOCK1 0
57 RAISE
58 ACC0
59 PUSHACC5
60 SUBINT
61 PUSHACC1
62 PUSHACC5
63 ADDINT
64 PUSHACC4
65 PUSHACC4
66 PUSHOFFSETCLOSURE0
67 APPTERM 4 , 9
70 ACC0
71 C_CALL1 caml_input_scan_line
73 PUSHCONST0
74 PUSHACC1
75 EQ
76 BRANCHIFNOT 83
78 End_of_file
80 MAKEBLOCK1 0
82 RAISE
83
84 PUSHACC1
85
86 BRANCHIFNOT 107
88 ACC0
89 OFFSETINT -1
91 C_CALL1 create_string
93 PUSHACC1
94 OFFSETINT -1
96 PUSHCONST0
97 PUSHACC2
98 PUSHACC5
99 C_CALL4 caml_input
101 ACC2
102 C_CALL1 caml_input_char
104 ACC0
105 RETURN 3
107 ACC0
108 NEGINT
109 C_CALL1 create_string
111 PUSHACC1
112 NEGINT
113 PUSHCONST0
114 PUSHACC2
115 PUSHACC5
116 C_CALL4 caml_input
118
119 PUSHTRAP 130
121 ACC6
122 PUSHOFFSETCLOSURE0
123 APPLY1
124 PUSHACC5
125 PUSHENVACC1
126 APPLY2
127 POPTRAP
128 RETURN 3
130 PUSHGETGLOBAL End_of_file
132 PUSHACC1
133 GETFIELD0
134 EQ
135 BRANCHIFNOT 140
137 ACC1
138 RETURN 4
140 ACC0
141 RAISE
142 ACC0
143 C_CALL1 caml_flush
145 RETURN 1
147 RESTART
148 GRAB 1
150 ACC1
151 PUSHACC1
152 C_CALL2 caml_output_char
154 RETURN 2
156 RESTART
157 GRAB 1
159 ACC1
160 PUSHACC1
161 C_CALL2 caml_output_char
163 RETURN 2
165 RESTART
166 GRAB 1
168 ACC1
169 PUSHACC1
170 C_CALL2 caml_output_int
172 RETURN 2
174 RESTART
175 GRAB 1
177 ACC1
178 PUSHACC1
179 C_CALL2 caml_seek_out
181 RETURN 2
183 ACC0
184 C_CALL1 caml_pos_out
186 RETURN 1
188 ACC0
189 C_CALL1 caml_channel_size
191 RETURN 1
193 RESTART
194 GRAB 1
196 ACC1
197 PUSHACC1
198 C_CALL2 caml_set_binary_mode
200 RETURN 2
202 ACC0
203 C_CALL1 caml_input_char
205 RETURN 1
207 ACC0
208 C_CALL1 caml_input_char
210 RETURN 1
212 ACC0
213 C_CALL1 caml_input_int
215 RETURN 1
217 ACC0
218 C_CALL1 input_value
220 RETURN 1
222 RESTART
223 GRAB 1
225 ACC1
226 PUSHACC1
227 C_CALL2 caml_seek_in
229 RETURN 2
231 ACC0
232 C_CALL1 caml_pos_in
234 RETURN 1
236 ACC0
237 C_CALL1 caml_channel_size
239 RETURN 1
241 ACC0
242 C_CALL1 caml_close_channel
244 RETURN 1
246 RESTART
247 GRAB 1
249 ACC1
250 PUSHACC1
251 C_CALL2 caml_set_binary_mode
253 RETURN 2
255 CONST0
256 PUSHENVACC1
257 APPLY1
258 ACC0
259 C_CALL1 sys_exit
261 RETURN 1
263 CONST0
264 PUSHENVACC1
265 GETFIELD0
266 APPTERM1 2
268 CONST0
269 PUSHENVACC1
270 APPLY1
271 CONST0
272 PUSHENVACC2
273 APPTERM1 2
275 ENVACC1
276 GETFIELD0
277 PUSHACC0
278 PUSHACC2
279 CLOSURE 2 , 268
282 PUSHENVACC1
283 SETFIELD0
284 RETURN 2
286 ENVACC1
287 C_CALL1 caml_flush
289 ENVACC2
290 C_CALL1 caml_flush
292 RETURN 1
294 CONST0
295 PUSHENVACC1
296 APPLY1
297 C_CALL1 float_of_string
299 RETURN 1
301 CONST0
302 PUSHENVACC1
303 APPLY1
304 C_CALL1 int_of_string
306 RETURN 1
308 ENVACC2
309 C_CALL1 caml_flush
311 ENVACC1
312 PUSHENVACC3
313 APPTERM1 2
315 CONSTINT 13
317 PUSHENVACC1
318 C_CALL2 caml_output_char
320 ENVACC1
321 C_CALL1 caml_flush
323 RETURN 1
325 ACC0
326 PUSHENVACC1
327 PUSHENVACC2
328 APPLY2
329 CONSTINT 13
331 PUSHENVACC1
332 C_CALL2 caml_output_char
334 ENVACC1
335 C_CALL1 caml_flush
337 RETURN 1
339 ACC0
340 PUSHENVACC1
341 APPLY1
342 PUSHENVACC2
343 PUSHENVACC3
344 APPTERM2 3
346 ACC0
347 PUSHENVACC1
348 APPLY1
349 PUSHENVACC2
350 PUSHENVACC3
351 APPTERM2 3
353 ACC0
354 PUSHENVACC1
355 PUSHENVACC2
356 APPTERM2 3
358 ACC0
359 PUSHENVACC1
360 C_CALL2 caml_output_char
362 RETURN 1
364 CONSTINT 13
366 PUSHENVACC1
367 C_CALL2 caml_output_char
369 ENVACC1
370 C_CALL1 caml_flush
372 RETURN 1
374 ACC0
375 PUSHENVACC1
376 PUSHENVACC2
377 APPLY2
378 CONSTINT 13
380 PUSHENVACC1
381 C_CALL2 caml_output_char
383 RETURN 1
385 ACC0
386 PUSHENVACC1
387 APPLY1
388 PUSHENVACC2
389 PUSHENVACC3
390 APPTERM2 3
392 ACC0
393 PUSHENVACC1
394 APPLY1
395 PUSHENVACC2
396 PUSHENVACC3
397 APPTERM2 3
399 ACC0
400 PUSHENVACC1
401 PUSHENVACC2
402 APPTERM2 3
404 ACC0
405 PUSHENVACC1
406 C_CALL2 caml_output_char
408 RETURN 1
410 RESTART
411 GRAB 3
413 CONST0
414 PUSHACC3
415 LTINT
416 BRANCHIF 427
418 ACC1
419 C_CALL1 ml_string_length
421 PUSHACC4
422 PUSHACC4
423 ADDINT
424 GTINT
425 BRANCHIFNOT 432
427 GETGLOBAL " really_input "
429 PUSHENVACC1
430 APPTERM1 5
432 ACC3
433 PUSHACC3
434 PUSHACC3
435 PUSHACC3
436 PUSHENVACC2
437 APPTERM 4 , 8
440 RESTART
441 GRAB 3
443 CONST0
444 PUSHACC3
445 LTINT
446 BRANCHIF 457
448 ACC1
449 C_CALL1 ml_string_length
451 PUSHACC4
452 PUSHACC4
453 ADDINT
454
455 " input "
459 PUSHENVACC1
460 APPTERM1 5
462 ACC3
463 PUSHACC3
464 PUSHACC3
465 PUSHACC3
466 C_CALL4 caml_input
468 RETURN 4
470 ACC0
471 PUSHCONST0
472 PUSHGETGLOBAL < 0>(0 , < 0>(6 , 0 ) )
474 PUSHENVACC1
475 APPTERM3 4
477 ACC0
478 PUSHCONST0
479 PUSHGETGLOBAL < 0>(0 , < 0>(7 , 0 ) )
481 PUSHENVACC1
482 APPTERM3 4
484 RESTART
485 GRAB 2
487 ACC1
488 PUSHACC1
489 PUSHACC4
490 C_CALL3 sys_open
492 C_CALL1 caml_open_descriptor
494 RETURN 3
496 ACC0
497 C_CALL1 caml_flush
499 ACC0
500 C_CALL1 caml_close_channel
502 RETURN 1
504 RESTART
505 GRAB 1
507 CONST0
508 PUSHACC2
509 PUSHACC2
510 C_CALL3 output_value
512 RETURN 2
514 RESTART
515 GRAB 3
517 CONST0
518 PUSHACC3
519 LTINT
520 BRANCHIF 531
522 ACC1
523 C_CALL1 ml_string_length
525 PUSHACC4
526 PUSHACC4
527 ADDINT
528
529 BRANCHIFNOT 536
531 GETGLOBAL " output "
533 PUSHENVACC1
534 APPTERM1 5
536 ACC3
537 PUSHACC3
538 PUSHACC3
539 PUSHACC3
540 C_CALL4 caml_output
542 RETURN 4
544 RESTART
545 GRAB 1
547 ACC1
548 C_CALL1 ml_string_length
550 PUSHCONST0
551 PUSHACC3
552 PUSHACC3
553 C_CALL4 caml_output
555 RETURN 2
557 ACC0
558 PUSHCONSTINT 438
560 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(6 , 0 ) ) ) )
562 PUSHENVACC1
563 APPTERM3 4
565 ACC0
566 PUSHCONSTINT 438
568 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(7 , 0 ) ) ) )
570 PUSHENVACC1
571 APPTERM3 4
573 RESTART
574 GRAB 2
576 ACC1
577 PUSHACC1
578 PUSHACC4
579 C_CALL3 sys_open
581 C_CALL1 caml_open_descriptor
583 RETURN 3
585 ACC0
586 PUSHGETGLOBAL " % .12 g "
588 C_CALL2 format_float
590 RETURN 1
592 ACC0
593 PUSHGETGLOBAL " % d "
595 C_CALL2 format_int
597 RETURN 1
599 " false "
601 PUSHACC1
602 C_CALL2 string_equal
604 BRANCHIFNOT 609
606 CONST0
607 RETURN 1
609 " true "
611 PUSHACC1
612 C_CALL2 string_equal
614 BRANCHIFNOT 619
616 CONST1
617 RETURN 1
619 " bool_of_string "
621 PUSHENVACC1
622 APPTERM1 2
624 ACC0
625 BRANCHIFNOT 631
627 " true "
629 RETURN 1
631 " false "
633 RETURN 1
635
636 PUSHACC1
637 LTINT
638 BRANCHIF 646
640
642 PUSHACC1
643 GTINT
644 BRANCHIFNOT 651
646 " char_of_int "
648 PUSHENVACC1
649 APPTERM1 2
651 ACC0
652 RETURN 1
654 RESTART
655 GRAB 1
657 ACC0
658 C_CALL1 ml_string_length
660 PUSHACC2
661 C_CALL1 ml_string_length
663 PUSHACC0
664 PUSHACC2
665 ADDINT
666 C_CALL1 create_string
668 PUSHACC2
669 PUSHCONST0
670 PUSHACC2
671 PUSHCONST0
672 PUSHACC7
673 C_CALL5 blit_string
675 ACC1
676 PUSHACC3
677 PUSHACC2
678 PUSHCONST0
679 PUSHACC 8
681 C_CALL5 blit_string
683 ACC0
684 RETURN 5
686 -1
688 PUSHACC1
689 XORINT
690 RETURN 1
692
693 PUSHACC1
694 GEINT
695 BRANCHIFNOT 700
697 ACC0
698 RETURN 1
700 ACC0
701 NEGINT
702 RETURN 1
704 RESTART
705 GRAB 1
707 ACC1
708 PUSHACC1
709 C_CALL2 greaterequal
711 BRANCHIFNOT 716
713 ACC0
714 RETURN 2
716 ACC1
717 RETURN 2
719 RESTART
720 GRAB 1
722 ACC1
723 PUSHACC1
724 C_CALL2 lessequal
726 BRANCHIFNOT 731
728 ACC0
729 RETURN 2
731 ACC1
732 RETURN 2
734 ACC0
735
737 MAKEBLOCK2 0
739 RAISE
740 ACC0
741 PUSHGETGLOBAL Failure
743 MAKEBLOCK2 0
745 RAISE
746 CLOSURE 0 , 740
749 PUSH
750 CLOSURE 0 , 734
753 PUSHGETGLOBAL " Pervasives . Exit "
755 MAKEBLOCK1 0
757 PUSHGETGLOBAL " Pervasives . Assert_failure "
759 MAKEBLOCK1 0
761 PUSH
762 CLOSURE 0 , 720
765 PUSH
766 CLOSURE 0 , 705
769 PUSH
770 CLOSURE 0 , 692
773 PUSH
774 CLOSURE 0 , 686
777 PUSHCONST0
778 PUSHCONSTINT 31
780 PUSHCONST1
781 LSLINT
782 EQ
783 BRANCHIFNOT 789
785 CONSTINT 30
787 BRANCH 791
789 CONSTINT 62
791 PUSHCONST1
792 LSLINT
793 PUSHACC0
794 OFFSETINT -1
796 PUSH
797 CLOSURE 0 , 655
800 PUSHACC 9
802 CLOSURE 1 , 635
805 PUSH
806 CLOSURE 0 , 624
809 PUSHACC 11
811 CLOSURE 1 , 599
814 PUSH
815 CLOSURE 0 , 592
818 PUSH
819 CLOSURE 0 , 585
822 PUSH
823 CLOSUREREC 0 , 12
827
828 C_CALL1 caml_open_descriptor
830 PUSHCONST1
831 C_CALL1 caml_open_descriptor
833 PUSHCONST2
834 C_CALL1 caml_open_descriptor
836 PUSH
837 CLOSURE 0 , 574
840 PUSHACC0
841 CLOSURE 1 , 565
844 PUSHACC1
845 CLOSURE 1 , 557
848 PUSH
849 CLOSURE 0 , 545
852 PUSHACC 22
854 CLOSURE 1 , 515
857 PUSH
858 CLOSURE 0 , 505
861 PUSH
862 CLOSURE 0 , 496
865 PUSH
866 CLOSURE 0 , 485
869 PUSHACC0
870 CLOSURE 1 , 477
873 PUSHACC1
874 CLOSURE 1 , 470
877 PUSHACC 28
879 CLOSURE 1 , 441
882 PUSH
883 CLOSUREREC 0 , 32
887 ACC0
888 PUSHACC 31
890 CLOSURE 2 , 411
893 PUSHACC 22
895 CLOSUREREC 1 , 70
899 ACC 15
901 CLOSURE 1 , 404
904 PUSHACC 11
906 PUSHACC 17
908 CLOSURE 2 , 399
911 PUSHACC 12
913 PUSHACC 18
915 PUSHACC 23
917 CLOSURE 3 , 392
920 PUSHACC 13
922 PUSHACC 19
924 PUSHACC 23
926 CLOSURE 3 , 385
929 PUSHACC 14
931 PUSHACC 20
933 CLOSURE 2 , 374
936 PUSHACC 20
938 CLOSURE 1 , 364
941 PUSHACC 20
943 CLOSURE 1 , 358
946 PUSHACC 17
948 PUSHACC 22
950 CLOSURE 2 , 353
953 PUSHACC 18
955 PUSHACC 23
957 PUSHACC 29
959 CLOSURE 3 , 346
962 PUSHACC 19
964 PUSHACC 24
966 PUSHACC 29
968 CLOSURE 3 , 339
971 PUSHACC 20
973 PUSHACC 25
975 CLOSURE 2 , 325
978 PUSHACC 25
980 CLOSURE 1 , 315
983 PUSHACC 12
985 PUSHACC 28
987 PUSHACC 30
989 CLOSURE 3 , 308
992 PUSHACC0
993 CLOSURE 1 , 301
996 PUSHACC1
997 CLOSURE 1 , 294
1000 PUSHACC 29
1002 PUSHACC 31
1004 CLOSURE 2 , 286
1007 MAKEBLOCK1 0
1009 PUSHACC0
1010 CLOSURE 1 , 275
1013 PUSHACC1
1014 CLOSURE 1 , 263
1017 PUSHACC0
1018 CLOSURE 1 , 255
1021 PUSHACC1
1022 PUSHACC 22
1024 PUSHACC4
1025 PUSHACC3
1026 PUSH
1027 CLOSURE 0 , 247
1030 PUSH
1031 CLOSURE 0 , 241
1034 PUSH
1035 CLOSURE 0 , 236
1038 PUSH
1039 CLOSURE 0 , 231
1042 PUSH
1043 CLOSURE 0 , 223
1046 PUSH
1047 CLOSURE 0 , 217
1050 PUSH
1051 CLOSURE 0 , 212
1054 PUSH
1055 CLOSURE 0 , 207
1058 PUSHACC 32
1060 PUSHACC 35
1062 PUSHACC 33
1064 PUSH
1065 CLOSURE 0 , 202
1068 PUSHACC 41
1070 PUSHACC 40
1072 PUSHACC 42
1074 PUSH
1075 CLOSURE 0 , 194
1078 PUSHACC 46
1080 PUSH
1081 CLOSURE 0 , 188
1084 PUSH
1085 CLOSURE 0 , 183
1088 PUSH
1089 CLOSURE 0 , 175
1092 PUSHACC 51
1094 PUSH
1095 CLOSURE 0 , 166
1098 PUSH
1099 CLOSURE 0 , 157
1102 PUSHACC 55
1104 PUSHACC 57
1106 PUSH
1107 CLOSURE 0 , 148
1110 PUSH
1111 CLOSURE 0 , 142
1114 PUSHACC 63
1116 PUSHACC 62
1118 PUSHACC 64
1120 PUSHACC 38
1122 PUSHACC 40
1124 PUSHACC 42
1126 PUSHACC 44
1128 PUSHACC 46
1130 PUSHACC 48
1132 PUSHACC 50
1134 PUSHACC 52
1136 PUSHACC 54
1138 PUSHACC 56
1140 PUSHACC 58
1142 PUSHACC 60
1144 PUSHACC 62
1146 PUSHACC 64
1148 PUSHACC 66
1150 PUSHACC 82
1152 PUSHACC 84
1154 PUSHACC 86
1156 PUSHACC 88
1158 PUSHACC 90
1160 PUSHACC 92
1162 PUSHACC 94
1164 PUSHACC 96
1166 PUSHACC 98
1168 PUSHACC 100
1170 PUSHACC 104
1172 PUSHACC 104
1174 PUSHACC 104
1176 PUSHACC 108
1178 PUSHACC 110
1180 PUSHACC 112
1182 PUSHACC 117
1184 PUSHACC 117
1186 PUSHACC 117
1188 PUSHACC 117
1190 MAKEBLOCK 69 , 0
1193 POP 53
1195 SETGLOBAL Pervasives
1197 BRANCH 2177
1199 RESTART
1200 GRAB 1
1202 ACC1
1203 BRANCHIFNOT 1213
1205 ACC1
1206 GETFIELD1
1207 PUSHACC1
1208 OFFSETINT 1
1210 PUSHOFFSETCLOSURE0
1211 APPTERM2 4
1213 ACC0
1214 RETURN 2
1216 RESTART
1217 GRAB 1
1219 ACC0
1220 BRANCHIFNOT 1251
1222 CONST0
1223 PUSHACC2
1224 EQ
1225 BRANCHIFNOT 1231
1227 ACC0
1228 GETFIELD0
1229 RETURN 2
1231 CONST0
1232 PUSHACC2
1233 GTINT
1234 BRANCHIFNOT 1244
1236 ACC1
1237 OFFSETINT -1
1239 PUSHACC1
1240 GETFIELD1
1241 PUSHOFFSETCLOSURE0
1242 APPTERM2 4
1244 GETGLOBAL " List.nth "
1246 PUSHGETGLOBALFIELD Pervasives , 2
1249 APPTERM1 3
1251 GETGLOBAL " nth "
1253 PUSHGETGLOBALFIELD Pervasives , 3
1256 APPTERM1 3
1258 RESTART
1259 GRAB 1
1261 ACC0
1262 BRANCHIFNOT 1274
1264 ACC1
1265 PUSHACC1
1266 GETFIELD0
1267 MAKEBLOCK2 0
1269 PUSHACC1
1270 GETFIELD1
1271 PUSHOFFSETCLOSURE0
1272 APPTERM2 4
1274 ACC1
1275 RETURN 2
1277 ACC0
1278 BRANCHIFNOT 1291
1280 ACC0
1281 GETFIELD1
1282 PUSHOFFSETCLOSURE0
1283 APPLY1
1284 PUSHACC1
1285 GETFIELD0
1286 PUSHGETGLOBALFIELD Pervasives , 16
1289 APPTERM2 3
1291 RETURN 1
1293 RESTART
1294 GRAB 1
1296 ACC1
1297 BRANCHIFNOT 1313
1299 ACC1
1300 GETFIELD0
1301 PUSHACC1
1302 APPLY1
1303 PUSHACC2
1304 GETFIELD1
1305 PUSHACC2
1306 PUSHOFFSETCLOSURE0
1307 APPLY2
1308 PUSHACC1
1309 MAKEBLOCK2 0
1311 POP 1
1313 RETURN 2
1315 RESTART
1316 GRAB 1
1318 ACC1
1319 BRANCHIFNOT 1331
1321 ACC1
1322 GETFIELD0
1323 PUSHACC1
1324 APPLY1
1325 ACC1
1326 GETFIELD1
1327 PUSHACC1
1328 PUSHOFFSETCLOSURE0
1329 APPTERM2 4
1331 RETURN 2
1333 RESTART
1334 GRAB 2
1336 ACC2
1337 BRANCHIFNOT 1350
1339 ACC2
1340 GETFIELD1
1341 PUSHACC3
1342 GETFIELD0
1343 PUSHACC3
1344 PUSHACC3
1345 APPLY2
1346 PUSHACC2
1347 PUSHOFFSETCLOSURE0
1348 APPTERM3 6
1350 ACC1
1351 RETURN 3
1353 RESTART
1354 GRAB 2
1356 ACC1
1357 BRANCHIFNOT 1370
1359 ACC2
1360 PUSHACC2
1361 GETFIELD1
1362 PUSHACC2
1363 PUSHOFFSETCLOSURE0
1364 APPLY3
1365 PUSHACC2
1366 GETFIELD0
1367 PUSHACC2
1368 APPTERM2 5
1370 ACC2
1371 RETURN 3
1373 RESTART
1374 GRAB 2
1376 ACC1
1377 BRANCHIFNOT 1400
1379 ACC2
1380 BRANCHIFNOT 1407
1382 ACC2
1383 GETFIELD0
1384 PUSHACC2
1385 GETFIELD0
1386 PUSHACC2
1387 APPLY2
1388 PUSHACC3
1389 GETFIELD1
1390 PUSHACC3
1391 GETFIELD1
1392 PUSHACC3
1393 PUSHOFFSETCLOSURE0
1394 APPLY3
1395 PUSHACC1
1396 MAKEBLOCK2 0
1398 RETURN 4
1400 ACC2
1401 BRANCHIFNOT 1405
1403 BRANCH 1407
1405 RETURN 3
1407 GETGLOBAL " List.map2 "
1409 PUSHGETGLOBALFIELD Pervasives , 2
1412 APPTERM1 4
1414 RESTART
1415 GRAB 2
1417 ACC1
1418 BRANCHIFNOT 1437
1420 ACC2
1421 BRANCHIFNOT 1444
1423 ACC2
1424 GETFIELD0
1425 PUSHACC2
1426 GETFIELD0
1427 PUSHACC2
1428 APPLY2
1429 ACC2
1430 GETFIELD1
1431 PUSHACC2
1432 GETFIELD1
1433 PUSHACC2
1434 PUSHOFFSETCLOSURE0
1435 APPTERM3 6
1437 ACC2
1438 BRANCHIFNOT 1442
1440 BRANCH 1444
1442 RETURN 3
1444 GETGLOBAL " List.iter2 "
1446 PUSHGETGLOBALFIELD Pervasives , 2
1449 APPTERM1 4
1451 RESTART
1452 GRAB 3
1454 ACC2
1455 BRANCHIFNOT 1476
1457 ACC3
1458 1461 GETFIELD1
1462 PUSHACC3
1463 GETFIELD1
1464 PUSHACC5
1465 GETFIELD0
1466 PUSHACC5
1467 GETFIELD0
1468 PUSHACC5
1469 PUSHACC5
1470 APPLY3
1471 PUSHACC3
1472 PUSHOFFSETCLOSURE0
1473 APPTERM 4 , 8
1476 ACC3
1477 BRANCHIF 1482
1479 ACC1
1480 RETURN 4
1482 GETGLOBAL " List.fold_left2 "
1484 PUSHGETGLOBALFIELD Pervasives , 2
1487 APPTERM1 5
1489 RESTART
1490 GRAB 3
1492 ACC1
1493 BRANCHIFNOT 1516
1495 ACC2
1496 BRANCHIFNOT 1522
1498 PUSH_RETADDR 1509
1500 ACC6
1501 PUSHACC6
1502 GETFIELD1
1503 PUSHACC6
1504 GETFIELD1
1505 PUSHACC6
1506 PUSHOFFSETCLOSURE0
1507 APPLY 4
1509 PUSHACC3
1510 GETFIELD0
1511 PUSHACC3
1512 GETFIELD0
1513 PUSHACC3
1514 APPTERM3 7
1516 ACC2
1517 BRANCHIF 1522
1519 ACC3
1520 RETURN 4
1522 GETGLOBAL " List.fold_right2 "
1524 PUSHGETGLOBALFIELD Pervasives , 2
1527 APPTERM1 5
1529 RESTART
1530 GRAB 1
1532 ACC1
1533 BRANCHIFNOT 1549
1535 ACC1
1536 GETFIELD0
1537 PUSHACC1
1538 APPLY1
1539 BRANCHIFNOT 1547
1541 ACC1
1542 GETFIELD1
1543 PUSHACC1
1544 PUSHOFFSETCLOSURE0
1545 APPTERM2 4
1547 RETURN 2
1549 CONST1
1550 RETURN 2
1552 RESTART
1553 GRAB 1
1555 ACC1
1556 BRANCHIFNOT 1570
1558 ACC1
1559 GETFIELD0
1560 PUSHACC1
1561 APPLY1
1562 BRANCHIF 1570
1564 ACC1
1565 GETFIELD1
1566 PUSHACC1
1567 PUSHOFFSETCLOSURE0
1568 APPTERM2 4
1570 RETURN 2
1572 RESTART
1573 GRAB 2
1575 ACC1
1576 BRANCHIFNOT 1599
1578 ACC2
1579 BRANCHIFNOT 1605
1581 ACC2
1582 GETFIELD0
1583 PUSHACC2
1584 GETFIELD0
1585 PUSHACC2
1586 APPLY2
1587 BRANCHIFNOT 1597
1589 ACC2
1590 GETFIELD1
1591 PUSHACC2
1592 GETFIELD1
1593 PUSHACC2
1594 PUSHOFFSETCLOSURE0
1595 APPTERM3 6
1597 RETURN 3
1599 ACC2
1600 BRANCHIF 1605
1602 CONST1
1603 RETURN 3
1605 " List.for_all2 "
1607 PUSHGETGLOBALFIELD Pervasives , 2
1610 APPTERM1 4
1612 RESTART
1613 GRAB 2
1615 ACC1
1616 BRANCHIFNOT 1639
1618 ACC2
1619 BRANCHIFNOT 1646
1621 ACC2
1622 GETFIELD0
1623 PUSHACC2
1624 GETFIELD0
1625 PUSHACC2
1626 APPLY2
1627 BRANCHIF 1637
1629 ACC2
1630 GETFIELD1
1631 PUSHACC2
1632 GETFIELD1
1633 PUSHACC2
1634 PUSHOFFSETCLOSURE0
1635 APPTERM3 6
1637 RETURN 3
1639 ACC2
1640 BRANCHIFNOT 1644
1642 BRANCH 1646
1644 RETURN 3
1646 " List.exists2 "
1648 PUSHGETGLOBALFIELD Pervasives , 2
1651 APPTERM1 4
1653 RESTART
1654 GRAB 1
1656 ACC1
1657 BRANCHIFNOT 1672
1659 ACC0
1660 PUSHACC2
1661 GETFIELD0
1662 C_CALL2 equal
1664 BRANCHIF 1672
1666 ACC1
1667 GETFIELD1
1668 PUSHACC1
1669 PUSHOFFSETCLOSURE0
1670 APPTERM2 4
1672 RETURN 2
1674 RESTART
1675 GRAB 1
1677 ACC1
1678 BRANCHIFNOT 1692
1680 ACC0
1681 PUSHACC2
1682 GETFIELD0
1683 EQ
1684 BRANCHIF 1692
1686 ACC1
1687 GETFIELD1
1688 PUSHACC1
1689 PUSHOFFSETCLOSURE0
1690 APPTERM2 4
1692 RETURN 2
1694 RESTART
1695 GRAB 1
1697 ACC1
1698 BRANCHIFNOT 1719
1700 ACC1
1701 GETFIELD0
1702 PUSHACC1
1703 PUSHACC1
1704 GETFIELD0
1705 C_CALL2 equal
1707 BRANCHIFNOT 1713
1709 ACC0
1710 GETFIELD1
1711 RETURN 3
1713 ACC2
1714 GETFIELD1
1715 PUSHACC2
1716 PUSHOFFSETCLOSURE0
1717 APPTERM2 5
1719 GETGLOBAL Not_found
1721 MAKEBLOCK1 0
1723 RAISE
1724 RESTART
1725 GRAB 1
1727 ACC1
1728 BRANCHIFNOT 1748
1730 ACC1
1731 GETFIELD0
1732 PUSHACC1
1733 PUSHACC1
1734 GETFIELD0
1735 EQ
1736 BRANCHIFNOT 1742
1738 ACC0
1739 GETFIELD1
1740 RETURN 3
1742 ACC2
1743 GETFIELD1
1744 PUSHACC2
1745 PUSHOFFSETCLOSURE0
1746 APPTERM2 5
1748 GETGLOBAL Not_found
1750 MAKEBLOCK1 0
1752 RAISE
1753 RESTART
1754 GRAB 1
1756 ACC1
1757 BRANCHIFNOT 1773
1759 ACC0
1760 PUSHACC2
1761 GETFIELD0
1762 GETFIELD0
1763 C_CALL2 equal
1765 BRANCHIF 1773
1767 ACC1
1768 GETFIELD1
1769 PUSHACC1
1770 PUSHOFFSETCLOSURE0
1771 APPTERM2 4
1773 RETURN 2
1775 RESTART
1776 GRAB 1
1778 ACC1
1779 BRANCHIFNOT 1794
1781 ACC0
1782 PUSHACC2
1783 GETFIELD0
1784 GETFIELD0
1785 EQ
1786 BRANCHIF 1794
1788 ACC1
1789 GETFIELD1
1790 PUSHACC1
1791 PUSHOFFSETCLOSURE0
1792 APPTERM2 4
1794 RETURN 2
1796 RESTART
1797 GRAB 1
1799 ACC1
1800 BRANCHIFNOT 1825
1802 ACC1
1803 GETFIELD0
1804 PUSHACC2
1805 GETFIELD1
1806 PUSHACC2
1807 PUSHACC2
1808 GETFIELD0
1809 C_CALL2 equal
1811 BRANCHIFNOT 1816
1813 ACC0
1814 RETURN 4
1816 ACC0
1817 PUSHACC3
1818 PUSHOFFSETCLOSURE0
1819 APPLY2
1820 PUSHACC2
1821 MAKEBLOCK2 0
1823 POP 2
1825 RETURN 2
1827 RESTART
1828 GRAB 1
1830 ACC1
1831 BRANCHIFNOT 1855
1833 ACC1
1834 GETFIELD0
1835 PUSHACC2
1836 GETFIELD1
1837 PUSHACC2
1838 PUSHACC2
1839 GETFIELD0
1840 EQ
1841 BRANCHIFNOT 1846
1843 ACC0
1844 RETURN 4
1846 ACC0
1847 PUSHACC3
1848 PUSHOFFSETCLOSURE0
1849 APPLY2
1850 PUSHACC2
1851 MAKEBLOCK2 0
1853 POP 2
1855 RETURN 2
1857 RESTART
1858 GRAB 1
1860 ACC1
1861 BRANCHIFNOT 1879
1863 ACC1
1864 GETFIELD0
1865 PUSHACC0
1866 PUSHACC2
1867 APPLY1
1868 BRANCHIFNOT 1873
1870 ACC0
1871 RETURN 3
1873 ACC2
1874 GETFIELD1
1875 PUSHACC2
1876 PUSHOFFSETCLOSURE0
1877 APPTERM2 5
1879 GETGLOBAL Not_found
1881 MAKEBLOCK1 0
1883 RAISE
1884 RESTART
1885 GRAB 2
1887 ACC2
1888 BRANCHIFNOT 1917
1890 ACC2
1891 GETFIELD0
1892 PUSHACC3
1893 GETFIELD1
1894 PUSHACC1
1895 PUSHENVACC2
1896 APPLY1
1897 BRANCHIFNOT 1908
1899 ACC0
1900 PUSHACC4
1901 PUSHACC4
1902 PUSHACC4
1903 MAKEBLOCK2 0
1905 PUSHOFFSETCLOSURE0
1906 APPTERM3 8
1908 ACC0
1909 PUSHACC4
1910 PUSHACC3
1911 MAKEBLOCK2 0
1913 PUSHACC4
1914 PUSHOFFSETCLOSURE0
1915 APPTERM3 8
1917 ACC1
1918 PUSHENVACC1
1919 APPLY1
1920 PUSHACC1
1921 PUSHENVACC1
1922 APPLY1
1923 MAKEBLOCK2 0
1925 RETURN 3
1927 RESTART
1928 GRAB 1
1930 ACC0
1931 PUSHENVACC1
1932 CLOSUREREC 2 , 1885
1936 ACC2
1937 PUSHCONST0
1938 PUSHCONST0
1939 PUSHACC3
1940 APPTERM3 6
1942 ACC0
1943 BRANCHIFNOT 1967
1945 ACC0
1946 GETFIELD0
1947 PUSHACC1
1948 GETFIELD1
1949 PUSHOFFSETCLOSURE0
1950 APPLY1
1951 PUSHACC0
1952 GETFIELD1
1953 PUSHACC2
1954 GETFIELD1
1955 MAKEBLOCK2 0
1957 PUSHACC1
1958 GETFIELD0
1959 PUSHACC3
1960 GETFIELD0
1961 MAKEBLOCK2 0
1963 MAKEBLOCK2 0
1965 RETURN 3
1967 < 0>(0 , 0 )
1969 RETURN 1
1971 RESTART
1972 GRAB 1
1974 ACC0
1975 BRANCHIFNOT 1996
1977 ACC1
1978 BRANCHIFNOT 2003
1980 ACC1
1981 GETFIELD1
1982 PUSHACC1
1983 GETFIELD1
1984 PUSHOFFSETCLOSURE0
1985 APPLY2
1986 PUSHACC2
1987 GETFIELD0
1988 PUSHACC2
1989 GETFIELD0
1990 MAKEBLOCK2 0
1992 MAKEBLOCK2 0
1994 RETURN 2
1996 ACC1
1997 BRANCHIFNOT 2001
1999 BRANCH 2003
2001 RETURN 2
2003 " List.combine "
2005 PUSHGETGLOBALFIELD Pervasives , 2
2008 APPTERM1 3
2010 RESTART
2011 GRAB 1
2013 ACC1
2014 BRANCHIFNOT 2038
2016 ACC1
2017 GETFIELD0
2018 PUSHACC2
2019 GETFIELD1
2020 PUSHACC1
2021 PUSHENVACC2
2022 APPLY1
2023 BRANCHIFNOT 2033
2025 ACC0
2026 PUSHACC3
2027 PUSHACC3
2028 MAKEBLOCK2 0
2030 PUSHOFFSETCLOSURE0
2031 APPTERM2 6
2033 ACC0
2034 PUSHACC3
2035 PUSHOFFSETCLOSURE0
2036 APPTERM2 6
2038 ACC0
2039 PUSHENVACC1
2040 APPTERM1 3
2042 ACC0
2043 PUSHENVACC1
2044 CLOSUREREC 2 , 2011
2048 CONST0
2049 PUSHACC1
2050 APPTERM1 3
2052 RESTART
2053 GRAB 2
2055 ACC1
2056 BRANCHIFNOT 2077
2058 ACC2
2059 BRANCHIFNOT 2084
2061 ACC2
2062 GETFIELD1
2063 PUSHACC2
2064 GETFIELD1
2065 PUSHACC2
2066 PUSHACC5
2067 GETFIELD0
2068 PUSHACC5
2069 GETFIELD0
2070 PUSHENVACC1
2071 APPLY2
2072 MAKEBLOCK2 0
2074 PUSHOFFSETCLOSURE0
2075 APPTERM3 6
2077 ACC2
2078 BRANCHIFNOT 2082
2080 BRANCH 2084
2082 RETURN 3
2084 GETGLOBAL " List.rev_map2 "
2086 PUSHGETGLOBALFIELD Pervasives , 2
2089 APPTERM1 4
2091 RESTART
2092 GRAB 2
2094 ACC0
2095 CLOSUREREC 1 , 2053
2099 ACC3
2100 PUSHACC3
2101 PUSHCONST0
2102 PUSHACC3
2103 APPTERM3 7
2105 RESTART
2106 GRAB 1
2108 ACC1
2109 BRANCHIFNOT 2123
2111 ACC1
2112 GETFIELD1
2113 PUSHACC1
2114 PUSHACC3
2115 GETFIELD0
2116 PUSHENVACC1
2117 APPLY1
2118 MAKEBLOCK2 0
2120 PUSHOFFSETCLOSURE0
2121 APPTERM2 4
2123 ACC0
2124 RETURN 2
2126 RESTART
2127 GRAB 1
2129 ACC0
2130 CLOSUREREC 1 , 2106
2134 ACC2
2135 PUSHCONST0
2136 PUSHACC2
2137 APPTERM2 5
2139 CONST0
2140 PUSHACC1
2141 PUSHENVACC1
2142 APPTERM2 3
2144 ACC0
2145 BRANCHIFNOT 2151
2147 ACC0
2148 GETFIELD1
2149 RETURN 1
2151 GETGLOBAL " tl "
2153 PUSHGETGLOBALFIELD Pervasives , 3
2156 APPTERM1 2
2158 ACC0
2159 BRANCHIFNOT 2165
2161 ACC0
2162 GETFIELD0
2163 RETURN 1
2165 GETGLOBAL " hd "
2167 PUSHGETGLOBALFIELD Pervasives , 3
2170 APPTERM1 2
2172 ACC0
2173 PUSHCONST0
2174 PUSHENVACC1
2175 APPTERM2 3
2177 CLOSUREREC 0 , 1200
2181 ACC0
2182 CLOSURE 1 , 2172
2185 PUSH
2186 CLOSURE 0 , 2158
2189 PUSH
2190 CLOSURE 0 , 2144
2193 PUSH
2194 CLOSUREREC 0 , 1217
2198 GETGLOBALFIELD Pervasives , 16
2201 PUSH
2202 CLOSUREREC 0 , 1259
2206 ACC0
2207 CLOSURE 1 , 2139
2210 PUSH
2211 CLOSUREREC 0 , 1277
2215 CLOSUREREC 0 , 1294
2219 CLOSURE 0 , 2127
2222 PUSH
2223 CLOSUREREC 0 , 1316
2227 CLOSUREREC 0 , 1334
2231 CLOSUREREC 0 , 1354
2235 CLOSUREREC 0 , 1374
2239 CLOSURE 0 , 2092
2242 PUSH
2243 CLOSUREREC 0 , 1415
2247 CLOSUREREC 0 , 1452
2251 CLOSUREREC 0 , 1490
2255 CLOSUREREC 0 , 1530
2259 CLOSUREREC 0 , 1553
2263 CLOSUREREC 0 , 1573
2267 CLOSUREREC 0 , 1613
2271 CLOSUREREC 0 , 1654
2275 CLOSUREREC 0 , 1675
2279 CLOSUREREC 0 , 1695
2283 CLOSUREREC 0 , 1725
2287 CLOSUREREC 0 , 1754
2291 CLOSUREREC 0 , 1776
2295 CLOSUREREC 0 , 1797
2299 CLOSUREREC 0 , 1828
2303 CLOSUREREC 0 , 1858
2307 ACC 24
2309 CLOSURE 1 , 2042
2312 PUSHACC 25
2314 CLOSUREREC 1 , 1928
2318 CLOSUREREC 0 , 1942
2322 CLOSUREREC 0 , 1972
2326 ACC0
2327 PUSHACC2
2328 PUSHACC7
2329 PUSHACC 9
2331 PUSHACC 11
2333 PUSHACC 13
2335 PUSHACC 15
2337 PUSHACC 17
2339 PUSHACC 10
2341 PUSHACC 12
2343 PUSHACC 13
2345 PUSHACC 15
2347 PUSHACC 23
2349 PUSHACC 25
2351 PUSHACC 27
2353 PUSHACC 29
2355 PUSHACC 31
2357 PUSHACC 33
2359 PUSHACC 35
2361 PUSHACC 37
2363 PUSHACC 40
2365 PUSHACC 42
2367 PUSHACC 41
2369 PUSHACC 45
2371 PUSHACC 47
2373 PUSHACC 50
2375 PUSHACC 52
2377 PUSHACC 51
2379 PUSHACC 55
2381 PUSHACC 56
2383 PUSHACC 59
2385 PUSHACC 61
2387 PUSHACC 60
2389 PUSHACC 64
2391 PUSHACC 66
2393 PUSHACC 68
2395 PUSHACC 70
2397 MAKEBLOCK 37 , 0
2400 POP 36
2402 SETGLOBAL List
2404 BRANCH 2432
2406 CONST0
2407 PUSHACC1
2408 LEINT
2409 BRANCHIFNOT 2414
2411 CONST0
2412 RETURN 1
2414 ACC0
2415 OFFSETINT -1
2417 PUSHOFFSETCLOSURE0
2418 APPLY1
2419 PUSHACC1
2420 MAKEBLOCK2 0
2422 RETURN 1
2424 RESTART
2425 GRAB 1
2427 ACC1
2428 PUSHACC1
2429 ADDINT
2430 RETURN 2
2432 CLOSUREREC 0 , 2406
2436 CONST0
2437 C_CALL1 gc_compaction
2439 CONSTINT 300
2441 PUSHACC1
2442 APPLY1
2443 PUSHCONSTINT 150
2445 PUSHCONSTINT 301
2447 MULINT
2448 PUSHACC1
2449 PUSHCONST0
2450 PUSH
2451 CLOSURE 0 , 2425
2454 PUSHGETGLOBALFIELD List , 12
2457 APPLY3
2458 NEQ
2459 BRANCHIFNOT 2466
2461 Not_found
2463 MAKEBLOCK1 0
2465 RAISE
2466 POP 2
2468 ATOM0
2469 SETGLOBAL T330 - compact-4
2471 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 BRANCH 746
11 RESTART
12 GRAB 1
14 ACC0
15 BRANCHIFNOT 28
17 ACC1
18 PUSHACC1
19 GETFIELD1
20 PUSHOFFSETCLOSURE0
21 APPLY2
22 PUSHACC1
23 GETFIELD0
24 MAKEBLOCK2 0
26 RETURN 2
28 ACC1
29 RETURN 2
31 RESTART
32 GRAB 3
34 CONST0
35 PUSHACC4
36 LEINT
37 BRANCHIFNOT 42
39 CONST0
40 RETURN 4
42 ACC3
43 PUSHACC3
44 PUSHACC3
45 PUSHACC3
46 C_CALL4 caml_input
48 PUSHCONST0
49 PUSHACC1
50 EQ
51 BRANCHIFNOT 58
53 GETGLOBAL End_of_file
55 MAKEBLOCK1 0
57 RAISE
58 ACC0
59 PUSHACC5
60 SUBINT
61 PUSHACC1
62 PUSHACC5
63 ADDINT
64 PUSHACC4
65 PUSHACC4
66 PUSHOFFSETCLOSURE0
67 APPTERM 4, 9
70 ACC0
71 C_CALL1 caml_input_scan_line
73 PUSHCONST0
74 PUSHACC1
75 EQ
76 BRANCHIFNOT 83
78 GETGLOBAL End_of_file
80 MAKEBLOCK1 0
82 RAISE
83 CONST0
84 PUSHACC1
85 GTINT
86 BRANCHIFNOT 107
88 ACC0
89 OFFSETINT -1
91 C_CALL1 create_string
93 PUSHACC1
94 OFFSETINT -1
96 PUSHCONST0
97 PUSHACC2
98 PUSHACC5
99 C_CALL4 caml_input
101 ACC2
102 C_CALL1 caml_input_char
104 ACC0
105 RETURN 3
107 ACC0
108 NEGINT
109 C_CALL1 create_string
111 PUSHACC1
112 NEGINT
113 PUSHCONST0
114 PUSHACC2
115 PUSHACC5
116 C_CALL4 caml_input
118 CONST0
119 PUSHTRAP 130
121 ACC6
122 PUSHOFFSETCLOSURE0
123 APPLY1
124 PUSHACC5
125 PUSHENVACC1
126 APPLY2
127 POPTRAP
128 RETURN 3
130 PUSHGETGLOBAL End_of_file
132 PUSHACC1
133 GETFIELD0
134 EQ
135 BRANCHIFNOT 140
137 ACC1
138 RETURN 4
140 ACC0
141 RAISE
142 ACC0
143 C_CALL1 caml_flush
145 RETURN 1
147 RESTART
148 GRAB 1
150 ACC1
151 PUSHACC1
152 C_CALL2 caml_output_char
154 RETURN 2
156 RESTART
157 GRAB 1
159 ACC1
160 PUSHACC1
161 C_CALL2 caml_output_char
163 RETURN 2
165 RESTART
166 GRAB 1
168 ACC1
169 PUSHACC1
170 C_CALL2 caml_output_int
172 RETURN 2
174 RESTART
175 GRAB 1
177 ACC1
178 PUSHACC1
179 C_CALL2 caml_seek_out
181 RETURN 2
183 ACC0
184 C_CALL1 caml_pos_out
186 RETURN 1
188 ACC0
189 C_CALL1 caml_channel_size
191 RETURN 1
193 RESTART
194 GRAB 1
196 ACC1
197 PUSHACC1
198 C_CALL2 caml_set_binary_mode
200 RETURN 2
202 ACC0
203 C_CALL1 caml_input_char
205 RETURN 1
207 ACC0
208 C_CALL1 caml_input_char
210 RETURN 1
212 ACC0
213 C_CALL1 caml_input_int
215 RETURN 1
217 ACC0
218 C_CALL1 input_value
220 RETURN 1
222 RESTART
223 GRAB 1
225 ACC1
226 PUSHACC1
227 C_CALL2 caml_seek_in
229 RETURN 2
231 ACC0
232 C_CALL1 caml_pos_in
234 RETURN 1
236 ACC0
237 C_CALL1 caml_channel_size
239 RETURN 1
241 ACC0
242 C_CALL1 caml_close_channel
244 RETURN 1
246 RESTART
247 GRAB 1
249 ACC1
250 PUSHACC1
251 C_CALL2 caml_set_binary_mode
253 RETURN 2
255 CONST0
256 PUSHENVACC1
257 APPLY1
258 ACC0
259 C_CALL1 sys_exit
261 RETURN 1
263 CONST0
264 PUSHENVACC1
265 GETFIELD0
266 APPTERM1 2
268 CONST0
269 PUSHENVACC1
270 APPLY1
271 CONST0
272 PUSHENVACC2
273 APPTERM1 2
275 ENVACC1
276 GETFIELD0
277 PUSHACC0
278 PUSHACC2
279 CLOSURE 2, 268
282 PUSHENVACC1
283 SETFIELD0
284 RETURN 2
286 ENVACC1
287 C_CALL1 caml_flush
289 ENVACC2
290 C_CALL1 caml_flush
292 RETURN 1
294 CONST0
295 PUSHENVACC1
296 APPLY1
297 C_CALL1 float_of_string
299 RETURN 1
301 CONST0
302 PUSHENVACC1
303 APPLY1
304 C_CALL1 int_of_string
306 RETURN 1
308 ENVACC2
309 C_CALL1 caml_flush
311 ENVACC1
312 PUSHENVACC3
313 APPTERM1 2
315 CONSTINT 13
317 PUSHENVACC1
318 C_CALL2 caml_output_char
320 ENVACC1
321 C_CALL1 caml_flush
323 RETURN 1
325 ACC0
326 PUSHENVACC1
327 PUSHENVACC2
328 APPLY2
329 CONSTINT 13
331 PUSHENVACC1
332 C_CALL2 caml_output_char
334 ENVACC1
335 C_CALL1 caml_flush
337 RETURN 1
339 ACC0
340 PUSHENVACC1
341 APPLY1
342 PUSHENVACC2
343 PUSHENVACC3
344 APPTERM2 3
346 ACC0
347 PUSHENVACC1
348 APPLY1
349 PUSHENVACC2
350 PUSHENVACC3
351 APPTERM2 3
353 ACC0
354 PUSHENVACC1
355 PUSHENVACC2
356 APPTERM2 3
358 ACC0
359 PUSHENVACC1
360 C_CALL2 caml_output_char
362 RETURN 1
364 CONSTINT 13
366 PUSHENVACC1
367 C_CALL2 caml_output_char
369 ENVACC1
370 C_CALL1 caml_flush
372 RETURN 1
374 ACC0
375 PUSHENVACC1
376 PUSHENVACC2
377 APPLY2
378 CONSTINT 13
380 PUSHENVACC1
381 C_CALL2 caml_output_char
383 RETURN 1
385 ACC0
386 PUSHENVACC1
387 APPLY1
388 PUSHENVACC2
389 PUSHENVACC3
390 APPTERM2 3
392 ACC0
393 PUSHENVACC1
394 APPLY1
395 PUSHENVACC2
396 PUSHENVACC3
397 APPTERM2 3
399 ACC0
400 PUSHENVACC1
401 PUSHENVACC2
402 APPTERM2 3
404 ACC0
405 PUSHENVACC1
406 C_CALL2 caml_output_char
408 RETURN 1
410 RESTART
411 GRAB 3
413 CONST0
414 PUSHACC3
415 LTINT
416 BRANCHIF 427
418 ACC1
419 C_CALL1 ml_string_length
421 PUSHACC4
422 PUSHACC4
423 ADDINT
424 GTINT
425 BRANCHIFNOT 432
427 GETGLOBAL "really_input"
429 PUSHENVACC1
430 APPTERM1 5
432 ACC3
433 PUSHACC3
434 PUSHACC3
435 PUSHACC3
436 PUSHENVACC2
437 APPTERM 4, 8
440 RESTART
441 GRAB 3
443 CONST0
444 PUSHACC3
445 LTINT
446 BRANCHIF 457
448 ACC1
449 C_CALL1 ml_string_length
451 PUSHACC4
452 PUSHACC4
453 ADDINT
454 GTINT
455 BRANCHIFNOT 462
457 GETGLOBAL "input"
459 PUSHENVACC1
460 APPTERM1 5
462 ACC3
463 PUSHACC3
464 PUSHACC3
465 PUSHACC3
466 C_CALL4 caml_input
468 RETURN 4
470 ACC0
471 PUSHCONST0
472 PUSHGETGLOBAL <0>(0, <0>(6, 0))
474 PUSHENVACC1
475 APPTERM3 4
477 ACC0
478 PUSHCONST0
479 PUSHGETGLOBAL <0>(0, <0>(7, 0))
481 PUSHENVACC1
482 APPTERM3 4
484 RESTART
485 GRAB 2
487 ACC1
488 PUSHACC1
489 PUSHACC4
490 C_CALL3 sys_open
492 C_CALL1 caml_open_descriptor
494 RETURN 3
496 ACC0
497 C_CALL1 caml_flush
499 ACC0
500 C_CALL1 caml_close_channel
502 RETURN 1
504 RESTART
505 GRAB 1
507 CONST0
508 PUSHACC2
509 PUSHACC2
510 C_CALL3 output_value
512 RETURN 2
514 RESTART
515 GRAB 3
517 CONST0
518 PUSHACC3
519 LTINT
520 BRANCHIF 531
522 ACC1
523 C_CALL1 ml_string_length
525 PUSHACC4
526 PUSHACC4
527 ADDINT
528 GTINT
529 BRANCHIFNOT 536
531 GETGLOBAL "output"
533 PUSHENVACC1
534 APPTERM1 5
536 ACC3
537 PUSHACC3
538 PUSHACC3
539 PUSHACC3
540 C_CALL4 caml_output
542 RETURN 4
544 RESTART
545 GRAB 1
547 ACC1
548 C_CALL1 ml_string_length
550 PUSHCONST0
551 PUSHACC3
552 PUSHACC3
553 C_CALL4 caml_output
555 RETURN 2
557 ACC0
558 PUSHCONSTINT 438
560 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(6, 0))))
562 PUSHENVACC1
563 APPTERM3 4
565 ACC0
566 PUSHCONSTINT 438
568 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(7, 0))))
570 PUSHENVACC1
571 APPTERM3 4
573 RESTART
574 GRAB 2
576 ACC1
577 PUSHACC1
578 PUSHACC4
579 C_CALL3 sys_open
581 C_CALL1 caml_open_descriptor
583 RETURN 3
585 ACC0
586 PUSHGETGLOBAL "%.12g"
588 C_CALL2 format_float
590 RETURN 1
592 ACC0
593 PUSHGETGLOBAL "%d"
595 C_CALL2 format_int
597 RETURN 1
599 GETGLOBAL "false"
601 PUSHACC1
602 C_CALL2 string_equal
604 BRANCHIFNOT 609
606 CONST0
607 RETURN 1
609 GETGLOBAL "true"
611 PUSHACC1
612 C_CALL2 string_equal
614 BRANCHIFNOT 619
616 CONST1
617 RETURN 1
619 GETGLOBAL "bool_of_string"
621 PUSHENVACC1
622 APPTERM1 2
624 ACC0
625 BRANCHIFNOT 631
627 GETGLOBAL "true"
629 RETURN 1
631 GETGLOBAL "false"
633 RETURN 1
635 CONST0
636 PUSHACC1
637 LTINT
638 BRANCHIF 646
640 CONSTINT 255
642 PUSHACC1
643 GTINT
644 BRANCHIFNOT 651
646 GETGLOBAL "char_of_int"
648 PUSHENVACC1
649 APPTERM1 2
651 ACC0
652 RETURN 1
654 RESTART
655 GRAB 1
657 ACC0
658 C_CALL1 ml_string_length
660 PUSHACC2
661 C_CALL1 ml_string_length
663 PUSHACC0
664 PUSHACC2
665 ADDINT
666 C_CALL1 create_string
668 PUSHACC2
669 PUSHCONST0
670 PUSHACC2
671 PUSHCONST0
672 PUSHACC7
673 C_CALL5 blit_string
675 ACC1
676 PUSHACC3
677 PUSHACC2
678 PUSHCONST0
679 PUSHACC 8
681 C_CALL5 blit_string
683 ACC0
684 RETURN 5
686 CONSTINT -1
688 PUSHACC1
689 XORINT
690 RETURN 1
692 CONST0
693 PUSHACC1
694 GEINT
695 BRANCHIFNOT 700
697 ACC0
698 RETURN 1
700 ACC0
701 NEGINT
702 RETURN 1
704 RESTART
705 GRAB 1
707 ACC1
708 PUSHACC1
709 C_CALL2 greaterequal
711 BRANCHIFNOT 716
713 ACC0
714 RETURN 2
716 ACC1
717 RETURN 2
719 RESTART
720 GRAB 1
722 ACC1
723 PUSHACC1
724 C_CALL2 lessequal
726 BRANCHIFNOT 731
728 ACC0
729 RETURN 2
731 ACC1
732 RETURN 2
734 ACC0
735 PUSHGETGLOBAL Invalid_argument
737 MAKEBLOCK2 0
739 RAISE
740 ACC0
741 PUSHGETGLOBAL Failure
743 MAKEBLOCK2 0
745 RAISE
746 CLOSURE 0, 740
749 PUSH
750 CLOSURE 0, 734
753 PUSHGETGLOBAL "Pervasives.Exit"
755 MAKEBLOCK1 0
757 PUSHGETGLOBAL "Pervasives.Assert_failure"
759 MAKEBLOCK1 0
761 PUSH
762 CLOSURE 0, 720
765 PUSH
766 CLOSURE 0, 705
769 PUSH
770 CLOSURE 0, 692
773 PUSH
774 CLOSURE 0, 686
777 PUSHCONST0
778 PUSHCONSTINT 31
780 PUSHCONST1
781 LSLINT
782 EQ
783 BRANCHIFNOT 789
785 CONSTINT 30
787 BRANCH 791
789 CONSTINT 62
791 PUSHCONST1
792 LSLINT
793 PUSHACC0
794 OFFSETINT -1
796 PUSH
797 CLOSURE 0, 655
800 PUSHACC 9
802 CLOSURE 1, 635
805 PUSH
806 CLOSURE 0, 624
809 PUSHACC 11
811 CLOSURE 1, 599
814 PUSH
815 CLOSURE 0, 592
818 PUSH
819 CLOSURE 0, 585
822 PUSH
823 CLOSUREREC 0, 12
827 CONST0
828 C_CALL1 caml_open_descriptor
830 PUSHCONST1
831 C_CALL1 caml_open_descriptor
833 PUSHCONST2
834 C_CALL1 caml_open_descriptor
836 PUSH
837 CLOSURE 0, 574
840 PUSHACC0
841 CLOSURE 1, 565
844 PUSHACC1
845 CLOSURE 1, 557
848 PUSH
849 CLOSURE 0, 545
852 PUSHACC 22
854 CLOSURE 1, 515
857 PUSH
858 CLOSURE 0, 505
861 PUSH
862 CLOSURE 0, 496
865 PUSH
866 CLOSURE 0, 485
869 PUSHACC0
870 CLOSURE 1, 477
873 PUSHACC1
874 CLOSURE 1, 470
877 PUSHACC 28
879 CLOSURE 1, 441
882 PUSH
883 CLOSUREREC 0, 32
887 ACC0
888 PUSHACC 31
890 CLOSURE 2, 411
893 PUSHACC 22
895 CLOSUREREC 1, 70
899 ACC 15
901 CLOSURE 1, 404
904 PUSHACC 11
906 PUSHACC 17
908 CLOSURE 2, 399
911 PUSHACC 12
913 PUSHACC 18
915 PUSHACC 23
917 CLOSURE 3, 392
920 PUSHACC 13
922 PUSHACC 19
924 PUSHACC 23
926 CLOSURE 3, 385
929 PUSHACC 14
931 PUSHACC 20
933 CLOSURE 2, 374
936 PUSHACC 20
938 CLOSURE 1, 364
941 PUSHACC 20
943 CLOSURE 1, 358
946 PUSHACC 17
948 PUSHACC 22
950 CLOSURE 2, 353
953 PUSHACC 18
955 PUSHACC 23
957 PUSHACC 29
959 CLOSURE 3, 346
962 PUSHACC 19
964 PUSHACC 24
966 PUSHACC 29
968 CLOSURE 3, 339
971 PUSHACC 20
973 PUSHACC 25
975 CLOSURE 2, 325
978 PUSHACC 25
980 CLOSURE 1, 315
983 PUSHACC 12
985 PUSHACC 28
987 PUSHACC 30
989 CLOSURE 3, 308
992 PUSHACC0
993 CLOSURE 1, 301
996 PUSHACC1
997 CLOSURE 1, 294
1000 PUSHACC 29
1002 PUSHACC 31
1004 CLOSURE 2, 286
1007 MAKEBLOCK1 0
1009 PUSHACC0
1010 CLOSURE 1, 275
1013 PUSHACC1
1014 CLOSURE 1, 263
1017 PUSHACC0
1018 CLOSURE 1, 255
1021 PUSHACC1
1022 PUSHACC 22
1024 PUSHACC4
1025 PUSHACC3
1026 PUSH
1027 CLOSURE 0, 247
1030 PUSH
1031 CLOSURE 0, 241
1034 PUSH
1035 CLOSURE 0, 236
1038 PUSH
1039 CLOSURE 0, 231
1042 PUSH
1043 CLOSURE 0, 223
1046 PUSH
1047 CLOSURE 0, 217
1050 PUSH
1051 CLOSURE 0, 212
1054 PUSH
1055 CLOSURE 0, 207
1058 PUSHACC 32
1060 PUSHACC 35
1062 PUSHACC 33
1064 PUSH
1065 CLOSURE 0, 202
1068 PUSHACC 41
1070 PUSHACC 40
1072 PUSHACC 42
1074 PUSH
1075 CLOSURE 0, 194
1078 PUSHACC 46
1080 PUSH
1081 CLOSURE 0, 188
1084 PUSH
1085 CLOSURE 0, 183
1088 PUSH
1089 CLOSURE 0, 175
1092 PUSHACC 51
1094 PUSH
1095 CLOSURE 0, 166
1098 PUSH
1099 CLOSURE 0, 157
1102 PUSHACC 55
1104 PUSHACC 57
1106 PUSH
1107 CLOSURE 0, 148
1110 PUSH
1111 CLOSURE 0, 142
1114 PUSHACC 63
1116 PUSHACC 62
1118 PUSHACC 64
1120 PUSHACC 38
1122 PUSHACC 40
1124 PUSHACC 42
1126 PUSHACC 44
1128 PUSHACC 46
1130 PUSHACC 48
1132 PUSHACC 50
1134 PUSHACC 52
1136 PUSHACC 54
1138 PUSHACC 56
1140 PUSHACC 58
1142 PUSHACC 60
1144 PUSHACC 62
1146 PUSHACC 64
1148 PUSHACC 66
1150 PUSHACC 82
1152 PUSHACC 84
1154 PUSHACC 86
1156 PUSHACC 88
1158 PUSHACC 90
1160 PUSHACC 92
1162 PUSHACC 94
1164 PUSHACC 96
1166 PUSHACC 98
1168 PUSHACC 100
1170 PUSHACC 104
1172 PUSHACC 104
1174 PUSHACC 104
1176 PUSHACC 108
1178 PUSHACC 110
1180 PUSHACC 112
1182 PUSHACC 117
1184 PUSHACC 117
1186 PUSHACC 117
1188 PUSHACC 117
1190 MAKEBLOCK 69, 0
1193 POP 53
1195 SETGLOBAL Pervasives
1197 BRANCH 2177
1199 RESTART
1200 GRAB 1
1202 ACC1
1203 BRANCHIFNOT 1213
1205 ACC1
1206 GETFIELD1
1207 PUSHACC1
1208 OFFSETINT 1
1210 PUSHOFFSETCLOSURE0
1211 APPTERM2 4
1213 ACC0
1214 RETURN 2
1216 RESTART
1217 GRAB 1
1219 ACC0
1220 BRANCHIFNOT 1251
1222 CONST0
1223 PUSHACC2
1224 EQ
1225 BRANCHIFNOT 1231
1227 ACC0
1228 GETFIELD0
1229 RETURN 2
1231 CONST0
1232 PUSHACC2
1233 GTINT
1234 BRANCHIFNOT 1244
1236 ACC1
1237 OFFSETINT -1
1239 PUSHACC1
1240 GETFIELD1
1241 PUSHOFFSETCLOSURE0
1242 APPTERM2 4
1244 GETGLOBAL "List.nth"
1246 PUSHGETGLOBALFIELD Pervasives, 2
1249 APPTERM1 3
1251 GETGLOBAL "nth"
1253 PUSHGETGLOBALFIELD Pervasives, 3
1256 APPTERM1 3
1258 RESTART
1259 GRAB 1
1261 ACC0
1262 BRANCHIFNOT 1274
1264 ACC1
1265 PUSHACC1
1266 GETFIELD0
1267 MAKEBLOCK2 0
1269 PUSHACC1
1270 GETFIELD1
1271 PUSHOFFSETCLOSURE0
1272 APPTERM2 4
1274 ACC1
1275 RETURN 2
1277 ACC0
1278 BRANCHIFNOT 1291
1280 ACC0
1281 GETFIELD1
1282 PUSHOFFSETCLOSURE0
1283 APPLY1
1284 PUSHACC1
1285 GETFIELD0
1286 PUSHGETGLOBALFIELD Pervasives, 16
1289 APPTERM2 3
1291 RETURN 1
1293 RESTART
1294 GRAB 1
1296 ACC1
1297 BRANCHIFNOT 1313
1299 ACC1
1300 GETFIELD0
1301 PUSHACC1
1302 APPLY1
1303 PUSHACC2
1304 GETFIELD1
1305 PUSHACC2
1306 PUSHOFFSETCLOSURE0
1307 APPLY2
1308 PUSHACC1
1309 MAKEBLOCK2 0
1311 POP 1
1313 RETURN 2
1315 RESTART
1316 GRAB 1
1318 ACC1
1319 BRANCHIFNOT 1331
1321 ACC1
1322 GETFIELD0
1323 PUSHACC1
1324 APPLY1
1325 ACC1
1326 GETFIELD1
1327 PUSHACC1
1328 PUSHOFFSETCLOSURE0
1329 APPTERM2 4
1331 RETURN 2
1333 RESTART
1334 GRAB 2
1336 ACC2
1337 BRANCHIFNOT 1350
1339 ACC2
1340 GETFIELD1
1341 PUSHACC3
1342 GETFIELD0
1343 PUSHACC3
1344 PUSHACC3
1345 APPLY2
1346 PUSHACC2
1347 PUSHOFFSETCLOSURE0
1348 APPTERM3 6
1350 ACC1
1351 RETURN 3
1353 RESTART
1354 GRAB 2
1356 ACC1
1357 BRANCHIFNOT 1370
1359 ACC2
1360 PUSHACC2
1361 GETFIELD1
1362 PUSHACC2
1363 PUSHOFFSETCLOSURE0
1364 APPLY3
1365 PUSHACC2
1366 GETFIELD0
1367 PUSHACC2
1368 APPTERM2 5
1370 ACC2
1371 RETURN 3
1373 RESTART
1374 GRAB 2
1376 ACC1
1377 BRANCHIFNOT 1400
1379 ACC2
1380 BRANCHIFNOT 1407
1382 ACC2
1383 GETFIELD0
1384 PUSHACC2
1385 GETFIELD0
1386 PUSHACC2
1387 APPLY2
1388 PUSHACC3
1389 GETFIELD1
1390 PUSHACC3
1391 GETFIELD1
1392 PUSHACC3
1393 PUSHOFFSETCLOSURE0
1394 APPLY3
1395 PUSHACC1
1396 MAKEBLOCK2 0
1398 RETURN 4
1400 ACC2
1401 BRANCHIFNOT 1405
1403 BRANCH 1407
1405 RETURN 3
1407 GETGLOBAL "List.map2"
1409 PUSHGETGLOBALFIELD Pervasives, 2
1412 APPTERM1 4
1414 RESTART
1415 GRAB 2
1417 ACC1
1418 BRANCHIFNOT 1437
1420 ACC2
1421 BRANCHIFNOT 1444
1423 ACC2
1424 GETFIELD0
1425 PUSHACC2
1426 GETFIELD0
1427 PUSHACC2
1428 APPLY2
1429 ACC2
1430 GETFIELD1
1431 PUSHACC2
1432 GETFIELD1
1433 PUSHACC2
1434 PUSHOFFSETCLOSURE0
1435 APPTERM3 6
1437 ACC2
1438 BRANCHIFNOT 1442
1440 BRANCH 1444
1442 RETURN 3
1444 GETGLOBAL "List.iter2"
1446 PUSHGETGLOBALFIELD Pervasives, 2
1449 APPTERM1 4
1451 RESTART
1452 GRAB 3
1454 ACC2
1455 BRANCHIFNOT 1476
1457 ACC3
1458 BRANCHIFNOT 1482
1460 ACC3
1461 GETFIELD1
1462 PUSHACC3
1463 GETFIELD1
1464 PUSHACC5
1465 GETFIELD0
1466 PUSHACC5
1467 GETFIELD0
1468 PUSHACC5
1469 PUSHACC5
1470 APPLY3
1471 PUSHACC3
1472 PUSHOFFSETCLOSURE0
1473 APPTERM 4, 8
1476 ACC3
1477 BRANCHIF 1482
1479 ACC1
1480 RETURN 4
1482 GETGLOBAL "List.fold_left2"
1484 PUSHGETGLOBALFIELD Pervasives, 2
1487 APPTERM1 5
1489 RESTART
1490 GRAB 3
1492 ACC1
1493 BRANCHIFNOT 1516
1495 ACC2
1496 BRANCHIFNOT 1522
1498 PUSH_RETADDR 1509
1500 ACC6
1501 PUSHACC6
1502 GETFIELD1
1503 PUSHACC6
1504 GETFIELD1
1505 PUSHACC6
1506 PUSHOFFSETCLOSURE0
1507 APPLY 4
1509 PUSHACC3
1510 GETFIELD0
1511 PUSHACC3
1512 GETFIELD0
1513 PUSHACC3
1514 APPTERM3 7
1516 ACC2
1517 BRANCHIF 1522
1519 ACC3
1520 RETURN 4
1522 GETGLOBAL "List.fold_right2"
1524 PUSHGETGLOBALFIELD Pervasives, 2
1527 APPTERM1 5
1529 RESTART
1530 GRAB 1
1532 ACC1
1533 BRANCHIFNOT 1549
1535 ACC1
1536 GETFIELD0
1537 PUSHACC1
1538 APPLY1
1539 BRANCHIFNOT 1547
1541 ACC1
1542 GETFIELD1
1543 PUSHACC1
1544 PUSHOFFSETCLOSURE0
1545 APPTERM2 4
1547 RETURN 2
1549 CONST1
1550 RETURN 2
1552 RESTART
1553 GRAB 1
1555 ACC1
1556 BRANCHIFNOT 1570
1558 ACC1
1559 GETFIELD0
1560 PUSHACC1
1561 APPLY1
1562 BRANCHIF 1570
1564 ACC1
1565 GETFIELD1
1566 PUSHACC1
1567 PUSHOFFSETCLOSURE0
1568 APPTERM2 4
1570 RETURN 2
1572 RESTART
1573 GRAB 2
1575 ACC1
1576 BRANCHIFNOT 1599
1578 ACC2
1579 BRANCHIFNOT 1605
1581 ACC2
1582 GETFIELD0
1583 PUSHACC2
1584 GETFIELD0
1585 PUSHACC2
1586 APPLY2
1587 BRANCHIFNOT 1597
1589 ACC2
1590 GETFIELD1
1591 PUSHACC2
1592 GETFIELD1
1593 PUSHACC2
1594 PUSHOFFSETCLOSURE0
1595 APPTERM3 6
1597 RETURN 3
1599 ACC2
1600 BRANCHIF 1605
1602 CONST1
1603 RETURN 3
1605 GETGLOBAL "List.for_all2"
1607 PUSHGETGLOBALFIELD Pervasives, 2
1610 APPTERM1 4
1612 RESTART
1613 GRAB 2
1615 ACC1
1616 BRANCHIFNOT 1639
1618 ACC2
1619 BRANCHIFNOT 1646
1621 ACC2
1622 GETFIELD0
1623 PUSHACC2
1624 GETFIELD0
1625 PUSHACC2
1626 APPLY2
1627 BRANCHIF 1637
1629 ACC2
1630 GETFIELD1
1631 PUSHACC2
1632 GETFIELD1
1633 PUSHACC2
1634 PUSHOFFSETCLOSURE0
1635 APPTERM3 6
1637 RETURN 3
1639 ACC2
1640 BRANCHIFNOT 1644
1642 BRANCH 1646
1644 RETURN 3
1646 GETGLOBAL "List.exists2"
1648 PUSHGETGLOBALFIELD Pervasives, 2
1651 APPTERM1 4
1653 RESTART
1654 GRAB 1
1656 ACC1
1657 BRANCHIFNOT 1672
1659 ACC0
1660 PUSHACC2
1661 GETFIELD0
1662 C_CALL2 equal
1664 BRANCHIF 1672
1666 ACC1
1667 GETFIELD1
1668 PUSHACC1
1669 PUSHOFFSETCLOSURE0
1670 APPTERM2 4
1672 RETURN 2
1674 RESTART
1675 GRAB 1
1677 ACC1
1678 BRANCHIFNOT 1692
1680 ACC0
1681 PUSHACC2
1682 GETFIELD0
1683 EQ
1684 BRANCHIF 1692
1686 ACC1
1687 GETFIELD1
1688 PUSHACC1
1689 PUSHOFFSETCLOSURE0
1690 APPTERM2 4
1692 RETURN 2
1694 RESTART
1695 GRAB 1
1697 ACC1
1698 BRANCHIFNOT 1719
1700 ACC1
1701 GETFIELD0
1702 PUSHACC1
1703 PUSHACC1
1704 GETFIELD0
1705 C_CALL2 equal
1707 BRANCHIFNOT 1713
1709 ACC0
1710 GETFIELD1
1711 RETURN 3
1713 ACC2
1714 GETFIELD1
1715 PUSHACC2
1716 PUSHOFFSETCLOSURE0
1717 APPTERM2 5
1719 GETGLOBAL Not_found
1721 MAKEBLOCK1 0
1723 RAISE
1724 RESTART
1725 GRAB 1
1727 ACC1
1728 BRANCHIFNOT 1748
1730 ACC1
1731 GETFIELD0
1732 PUSHACC1
1733 PUSHACC1
1734 GETFIELD0
1735 EQ
1736 BRANCHIFNOT 1742
1738 ACC0
1739 GETFIELD1
1740 RETURN 3
1742 ACC2
1743 GETFIELD1
1744 PUSHACC2
1745 PUSHOFFSETCLOSURE0
1746 APPTERM2 5
1748 GETGLOBAL Not_found
1750 MAKEBLOCK1 0
1752 RAISE
1753 RESTART
1754 GRAB 1
1756 ACC1
1757 BRANCHIFNOT 1773
1759 ACC0
1760 PUSHACC2
1761 GETFIELD0
1762 GETFIELD0
1763 C_CALL2 equal
1765 BRANCHIF 1773
1767 ACC1
1768 GETFIELD1
1769 PUSHACC1
1770 PUSHOFFSETCLOSURE0
1771 APPTERM2 4
1773 RETURN 2
1775 RESTART
1776 GRAB 1
1778 ACC1
1779 BRANCHIFNOT 1794
1781 ACC0
1782 PUSHACC2
1783 GETFIELD0
1784 GETFIELD0
1785 EQ
1786 BRANCHIF 1794
1788 ACC1
1789 GETFIELD1
1790 PUSHACC1
1791 PUSHOFFSETCLOSURE0
1792 APPTERM2 4
1794 RETURN 2
1796 RESTART
1797 GRAB 1
1799 ACC1
1800 BRANCHIFNOT 1825
1802 ACC1
1803 GETFIELD0
1804 PUSHACC2
1805 GETFIELD1
1806 PUSHACC2
1807 PUSHACC2
1808 GETFIELD0
1809 C_CALL2 equal
1811 BRANCHIFNOT 1816
1813 ACC0
1814 RETURN 4
1816 ACC0
1817 PUSHACC3
1818 PUSHOFFSETCLOSURE0
1819 APPLY2
1820 PUSHACC2
1821 MAKEBLOCK2 0
1823 POP 2
1825 RETURN 2
1827 RESTART
1828 GRAB 1
1830 ACC1
1831 BRANCHIFNOT 1855
1833 ACC1
1834 GETFIELD0
1835 PUSHACC2
1836 GETFIELD1
1837 PUSHACC2
1838 PUSHACC2
1839 GETFIELD0
1840 EQ
1841 BRANCHIFNOT 1846
1843 ACC0
1844 RETURN 4
1846 ACC0
1847 PUSHACC3
1848 PUSHOFFSETCLOSURE0
1849 APPLY2
1850 PUSHACC2
1851 MAKEBLOCK2 0
1853 POP 2
1855 RETURN 2
1857 RESTART
1858 GRAB 1
1860 ACC1
1861 BRANCHIFNOT 1879
1863 ACC1
1864 GETFIELD0
1865 PUSHACC0
1866 PUSHACC2
1867 APPLY1
1868 BRANCHIFNOT 1873
1870 ACC0
1871 RETURN 3
1873 ACC2
1874 GETFIELD1
1875 PUSHACC2
1876 PUSHOFFSETCLOSURE0
1877 APPTERM2 5
1879 GETGLOBAL Not_found
1881 MAKEBLOCK1 0
1883 RAISE
1884 RESTART
1885 GRAB 2
1887 ACC2
1888 BRANCHIFNOT 1917
1890 ACC2
1891 GETFIELD0
1892 PUSHACC3
1893 GETFIELD1
1894 PUSHACC1
1895 PUSHENVACC2
1896 APPLY1
1897 BRANCHIFNOT 1908
1899 ACC0
1900 PUSHACC4
1901 PUSHACC4
1902 PUSHACC4
1903 MAKEBLOCK2 0
1905 PUSHOFFSETCLOSURE0
1906 APPTERM3 8
1908 ACC0
1909 PUSHACC4
1910 PUSHACC3
1911 MAKEBLOCK2 0
1913 PUSHACC4
1914 PUSHOFFSETCLOSURE0
1915 APPTERM3 8
1917 ACC1
1918 PUSHENVACC1
1919 APPLY1
1920 PUSHACC1
1921 PUSHENVACC1
1922 APPLY1
1923 MAKEBLOCK2 0
1925 RETURN 3
1927 RESTART
1928 GRAB 1
1930 ACC0
1931 PUSHENVACC1
1932 CLOSUREREC 2, 1885
1936 ACC2
1937 PUSHCONST0
1938 PUSHCONST0
1939 PUSHACC3
1940 APPTERM3 6
1942 ACC0
1943 BRANCHIFNOT 1967
1945 ACC0
1946 GETFIELD0
1947 PUSHACC1
1948 GETFIELD1
1949 PUSHOFFSETCLOSURE0
1950 APPLY1
1951 PUSHACC0
1952 GETFIELD1
1953 PUSHACC2
1954 GETFIELD1
1955 MAKEBLOCK2 0
1957 PUSHACC1
1958 GETFIELD0
1959 PUSHACC3
1960 GETFIELD0
1961 MAKEBLOCK2 0
1963 MAKEBLOCK2 0
1965 RETURN 3
1967 GETGLOBAL <0>(0, 0)
1969 RETURN 1
1971 RESTART
1972 GRAB 1
1974 ACC0
1975 BRANCHIFNOT 1996
1977 ACC1
1978 BRANCHIFNOT 2003
1980 ACC1
1981 GETFIELD1
1982 PUSHACC1
1983 GETFIELD1
1984 PUSHOFFSETCLOSURE0
1985 APPLY2
1986 PUSHACC2
1987 GETFIELD0
1988 PUSHACC2
1989 GETFIELD0
1990 MAKEBLOCK2 0
1992 MAKEBLOCK2 0
1994 RETURN 2
1996 ACC1
1997 BRANCHIFNOT 2001
1999 BRANCH 2003
2001 RETURN 2
2003 GETGLOBAL "List.combine"
2005 PUSHGETGLOBALFIELD Pervasives, 2
2008 APPTERM1 3
2010 RESTART
2011 GRAB 1
2013 ACC1
2014 BRANCHIFNOT 2038
2016 ACC1
2017 GETFIELD0
2018 PUSHACC2
2019 GETFIELD1
2020 PUSHACC1
2021 PUSHENVACC2
2022 APPLY1
2023 BRANCHIFNOT 2033
2025 ACC0
2026 PUSHACC3
2027 PUSHACC3
2028 MAKEBLOCK2 0
2030 PUSHOFFSETCLOSURE0
2031 APPTERM2 6
2033 ACC0
2034 PUSHACC3
2035 PUSHOFFSETCLOSURE0
2036 APPTERM2 6
2038 ACC0
2039 PUSHENVACC1
2040 APPTERM1 3
2042 ACC0
2043 PUSHENVACC1
2044 CLOSUREREC 2, 2011
2048 CONST0
2049 PUSHACC1
2050 APPTERM1 3
2052 RESTART
2053 GRAB 2
2055 ACC1
2056 BRANCHIFNOT 2077
2058 ACC2
2059 BRANCHIFNOT 2084
2061 ACC2
2062 GETFIELD1
2063 PUSHACC2
2064 GETFIELD1
2065 PUSHACC2
2066 PUSHACC5
2067 GETFIELD0
2068 PUSHACC5
2069 GETFIELD0
2070 PUSHENVACC1
2071 APPLY2
2072 MAKEBLOCK2 0
2074 PUSHOFFSETCLOSURE0
2075 APPTERM3 6
2077 ACC2
2078 BRANCHIFNOT 2082
2080 BRANCH 2084
2082 RETURN 3
2084 GETGLOBAL "List.rev_map2"
2086 PUSHGETGLOBALFIELD Pervasives, 2
2089 APPTERM1 4
2091 RESTART
2092 GRAB 2
2094 ACC0
2095 CLOSUREREC 1, 2053
2099 ACC3
2100 PUSHACC3
2101 PUSHCONST0
2102 PUSHACC3
2103 APPTERM3 7
2105 RESTART
2106 GRAB 1
2108 ACC1
2109 BRANCHIFNOT 2123
2111 ACC1
2112 GETFIELD1
2113 PUSHACC1
2114 PUSHACC3
2115 GETFIELD0
2116 PUSHENVACC1
2117 APPLY1
2118 MAKEBLOCK2 0
2120 PUSHOFFSETCLOSURE0
2121 APPTERM2 4
2123 ACC0
2124 RETURN 2
2126 RESTART
2127 GRAB 1
2129 ACC0
2130 CLOSUREREC 1, 2106
2134 ACC2
2135 PUSHCONST0
2136 PUSHACC2
2137 APPTERM2 5
2139 CONST0
2140 PUSHACC1
2141 PUSHENVACC1
2142 APPTERM2 3
2144 ACC0
2145 BRANCHIFNOT 2151
2147 ACC0
2148 GETFIELD1
2149 RETURN 1
2151 GETGLOBAL "tl"
2153 PUSHGETGLOBALFIELD Pervasives, 3
2156 APPTERM1 2
2158 ACC0
2159 BRANCHIFNOT 2165
2161 ACC0
2162 GETFIELD0
2163 RETURN 1
2165 GETGLOBAL "hd"
2167 PUSHGETGLOBALFIELD Pervasives, 3
2170 APPTERM1 2
2172 ACC0
2173 PUSHCONST0
2174 PUSHENVACC1
2175 APPTERM2 3
2177 CLOSUREREC 0, 1200
2181 ACC0
2182 CLOSURE 1, 2172
2185 PUSH
2186 CLOSURE 0, 2158
2189 PUSH
2190 CLOSURE 0, 2144
2193 PUSH
2194 CLOSUREREC 0, 1217
2198 GETGLOBALFIELD Pervasives, 16
2201 PUSH
2202 CLOSUREREC 0, 1259
2206 ACC0
2207 CLOSURE 1, 2139
2210 PUSH
2211 CLOSUREREC 0, 1277
2215 CLOSUREREC 0, 1294
2219 CLOSURE 0, 2127
2222 PUSH
2223 CLOSUREREC 0, 1316
2227 CLOSUREREC 0, 1334
2231 CLOSUREREC 0, 1354
2235 CLOSUREREC 0, 1374
2239 CLOSURE 0, 2092
2242 PUSH
2243 CLOSUREREC 0, 1415
2247 CLOSUREREC 0, 1452
2251 CLOSUREREC 0, 1490
2255 CLOSUREREC 0, 1530
2259 CLOSUREREC 0, 1553
2263 CLOSUREREC 0, 1573
2267 CLOSUREREC 0, 1613
2271 CLOSUREREC 0, 1654
2275 CLOSUREREC 0, 1675
2279 CLOSUREREC 0, 1695
2283 CLOSUREREC 0, 1725
2287 CLOSUREREC 0, 1754
2291 CLOSUREREC 0, 1776
2295 CLOSUREREC 0, 1797
2299 CLOSUREREC 0, 1828
2303 CLOSUREREC 0, 1858
2307 ACC 24
2309 CLOSURE 1, 2042
2312 PUSHACC 25
2314 CLOSUREREC 1, 1928
2318 CLOSUREREC 0, 1942
2322 CLOSUREREC 0, 1972
2326 ACC0
2327 PUSHACC2
2328 PUSHACC7
2329 PUSHACC 9
2331 PUSHACC 11
2333 PUSHACC 13
2335 PUSHACC 15
2337 PUSHACC 17
2339 PUSHACC 10
2341 PUSHACC 12
2343 PUSHACC 13
2345 PUSHACC 15
2347 PUSHACC 23
2349 PUSHACC 25
2351 PUSHACC 27
2353 PUSHACC 29
2355 PUSHACC 31
2357 PUSHACC 33
2359 PUSHACC 35
2361 PUSHACC 37
2363 PUSHACC 40
2365 PUSHACC 42
2367 PUSHACC 41
2369 PUSHACC 45
2371 PUSHACC 47
2373 PUSHACC 50
2375 PUSHACC 52
2377 PUSHACC 51
2379 PUSHACC 55
2381 PUSHACC 56
2383 PUSHACC 59
2385 PUSHACC 61
2387 PUSHACC 60
2389 PUSHACC 64
2391 PUSHACC 66
2393 PUSHACC 68
2395 PUSHACC 70
2397 MAKEBLOCK 37, 0
2400 POP 36
2402 SETGLOBAL List
2404 BRANCH 2432
2406 CONST0
2407 PUSHACC1
2408 LEINT
2409 BRANCHIFNOT 2414
2411 CONST0
2412 RETURN 1
2414 ACC0
2415 OFFSETINT -1
2417 PUSHOFFSETCLOSURE0
2418 APPLY1
2419 PUSHACC1
2420 MAKEBLOCK2 0
2422 RETURN 1
2424 RESTART
2425 GRAB 1
2427 ACC1
2428 PUSHACC1
2429 ADDINT
2430 RETURN 2
2432 CLOSUREREC 0, 2406
2436 CONST0
2437 C_CALL1 gc_compaction
2439 CONSTINT 300
2441 PUSHACC1
2442 APPLY1
2443 PUSHCONSTINT 150
2445 PUSHCONSTINT 301
2447 MULINT
2448 PUSHACC1
2449 PUSHCONST0
2450 PUSH
2451 CLOSURE 0, 2425
2454 PUSHGETGLOBALFIELD List, 12
2457 APPLY3
2458 NEQ
2459 BRANCHIFNOT 2466
2461 GETGLOBAL Not_found
2463 MAKEBLOCK1 0
2465 RAISE
2466 POP 2
2468 ATOM0
2469 SETGLOBAL T330-compact-4
2471 STOP
**)
|
|
eb29ee83abf726d9efd8eacee34078f988da37c3176ea6699b5b34c6b21e71b2 | BitGameEN/bitgamex | cow_http_te.erl | Copyright ( c ) 2014 - 2018 , < >
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
%% copyright notice and this permission notice appear in all copies.
%%
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-module(cow_http_te).
%% Identity.
-export([stream_identity/2]).
-export([identity/1]).
Chunked .
-export([stream_chunked/2]).
-export([chunk/1]).
-export([last_chunk/0]).
%% The state type is the same for both identity and chunked.
-type state() :: {non_neg_integer(), non_neg_integer()}.
-type decode_ret() :: more
| {more, Data::binary(), state()}
| {more, Data::binary(), RemLen::non_neg_integer(), state()}
| {more, Data::binary(), Rest::binary(), state()}
| {done, TotalLen::non_neg_integer(), Rest::binary()}
| {done, Data::binary(), TotalLen::non_neg_integer(), Rest::binary()}.
-export_type([decode_ret/0]).
-include("cow_parse.hrl").
-ifdef(TEST).
dripfeed(<< C, Rest/bits >>, Acc, State, F) ->
case F(<< Acc/binary, C >>, State) of
more ->
dripfeed(Rest, << Acc/binary, C >>, State, F);
{more, _, State2} ->
dripfeed(Rest, <<>>, State2, F);
{more, _, Length, State2} when is_integer(Length) ->
dripfeed(Rest, <<>>, State2, F);
{more, _, Acc2, State2} ->
dripfeed(Rest, Acc2, State2, F);
{done, _, <<>>} ->
ok;
{done, _, _, <<>>} ->
ok
end.
-endif.
%% Identity.
%% @doc Decode an identity stream.
-spec stream_identity(Data, State)
-> {more, Data, Len, State} | {done, Data, Len, Data}
when Data::binary(), State::state(), Len::non_neg_integer().
stream_identity(Data, {Streamed, Total}) ->
Streamed2 = Streamed + byte_size(Data),
if
Streamed2 < Total ->
{more, Data, Total - Streamed2, {Streamed2, Total}};
true ->
Size = Total - Streamed,
<< Data2:Size/binary, Rest/bits >> = Data,
{done, Data2, Total, Rest}
end.
-spec identity(Data) -> Data when Data::iodata().
identity(Data) ->
Data.
-ifdef(TEST).
stream_identity_test() ->
{done, <<>>, 0, <<>>}
= stream_identity(identity(<<>>), {0, 0}),
{done, <<"\r\n">>, 2, <<>>}
= stream_identity(identity(<<"\r\n">>), {0, 2}),
{done, << 0:80000 >>, 10000, <<>>}
= stream_identity(identity(<< 0:80000 >>), {0, 10000}),
ok.
stream_identity_parts_test() ->
{more, << 0:8000 >>, 1999, S1}
= stream_identity(<< 0:8000 >>, {0, 2999}),
{more, << 0:8000 >>, 999, S2}
= stream_identity(<< 0:8000 >>, S1),
{done, << 0:7992 >>, 2999, <<>>}
= stream_identity(<< 0:7992 >>, S2),
ok.
%% Using the same data as the chunked one for comparison.
horse_stream_identity() ->
horse:repeat(10000,
stream_identity(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"\r\n">>, {0, 43})
).
horse_stream_identity_dripfeed() ->
horse:repeat(10000,
dripfeed(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"\r\n">>, <<>>, {0, 43}, fun stream_identity/2)
).
-endif.
Chunked .
%% @doc Decode a chunked stream.
-spec stream_chunked(Data, State)
-> more | {more, Data, State} | {more, Data, Len, State}
| {more, Data, Data, State}
| {done, Len, Data} | {done, Data, Len, Data}
when Data::binary(), State::state(), Len::non_neg_integer().
stream_chunked(Data, State) ->
stream_chunked(Data, State, <<>>).
%% New chunk.
stream_chunked(Data = << C, _/bits >>, {0, Streamed}, Acc) when C =/= $\r ->
case chunked_len(Data, Streamed, Acc, 0) of
{next, Rest, State, Acc2} ->
stream_chunked(Rest, State, Acc2);
{more, State, Acc2} ->
{more, Acc2, Data, State};
Ret ->
Ret
end;
%% Trailing \r\n before next chunk.
stream_chunked(<< "\r\n", Rest/bits >>, {2, Streamed}, Acc) ->
stream_chunked(Rest, {0, Streamed}, Acc);
%% Trailing \r before next chunk.
stream_chunked(<< "\r" >>, {2, Streamed}, Acc) ->
{more, Acc, {1, Streamed}};
%% Trailing \n before next chunk.
stream_chunked(<< "\n", Rest/bits >>, {1, Streamed}, Acc) ->
stream_chunked(Rest, {0, Streamed}, Acc);
%% More data needed.
stream_chunked(<<>>, State = {Rem, _}, Acc) ->
{more, Acc, Rem, State};
Chunk data .
stream_chunked(Data, {Rem, Streamed}, Acc) when Rem > 2 ->
DataSize = byte_size(Data),
RemSize = Rem - 2,
case Data of
<< Chunk:RemSize/binary, "\r\n", Rest/bits >> ->
stream_chunked(Rest, {0, Streamed + RemSize}, << Acc/binary, Chunk/binary >>);
<< Chunk:RemSize/binary, "\r" >> ->
{more, << Acc/binary, Chunk/binary >>, {1, Streamed + RemSize}};
%% Everything in Data is part of the chunk. If we have more
%% data than the chunk accepts, then this is an error and we crash.
_ when DataSize =< RemSize ->
Rem2 = Rem - DataSize,
{more, << Acc/binary, Data/binary >>, Rem2, {Rem2, Streamed + DataSize}}
end.
chunked_len(<< $0, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16);
chunked_len(<< $1, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 1);
chunked_len(<< $2, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 2);
chunked_len(<< $3, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 3);
chunked_len(<< $4, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 4);
chunked_len(<< $5, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 5);
chunked_len(<< $6, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 6);
chunked_len(<< $7, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 7);
chunked_len(<< $8, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 8);
chunked_len(<< $9, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 9);
chunked_len(<< $A, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 10);
chunked_len(<< $B, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 11);
chunked_len(<< $C, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 12);
chunked_len(<< $D, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 13);
chunked_len(<< $E, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 14);
chunked_len(<< $F, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 15);
chunked_len(<< $a, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 10);
chunked_len(<< $b, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 11);
chunked_len(<< $c, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 12);
chunked_len(<< $d, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 13);
chunked_len(<< $e, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 14);
chunked_len(<< $f, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 15);
Chunk extensions .
%%
Note that we currently skip the first character we encounter here ,
and not in the skip_chunk_ext function . If we latter implement
%% chunk extensions (unlikely) we will need to change this clause too.
chunked_len(<< C, R/bits >>, S, A, Len) when ?IS_WS(C); C =:= $; -> skip_chunk_ext(R, S, A, Len, 0);
%% Final chunk.
%%
%% When trailers are following we simply return them as the Rest.
%% Then the user code can decide to call the stream_trailers function
%% to parse them. The user can therefore ignore trailers as necessary
%% if they do not wish to handle them.
chunked_len(<< "\r\n\r\n", R/bits >>, _, <<>>, 0) -> {done, no_trailers, R};
chunked_len(<< "\r\n\r\n", R/bits >>, _, A, 0) -> {done, A, no_trailers, R};
chunked_len(<< "\r\n", R/bits >>, _, <<>>, 0) when byte_size(R) > 2 -> {done, trailers, R};
chunked_len(<< "\r\n", R/bits >>, _, A, 0) when byte_size(R) > 2 -> {done, A, trailers, R};
chunked_len(_, _, _, 0) -> more;
Normal chunk . Add 2 to for the trailing \r\n .
chunked_len(<< "\r\n", R/bits >>, S, A, Len) -> {next, R, {Len + 2, S}, A};
chunked_len(<<"\r">>, _, <<>>, _) -> more;
chunked_len(<<"\r">>, S, A, _) -> {more, {0, S}, A};
chunked_len(<<>>, _, <<>>, _) -> more;
chunked_len(<<>>, S, A, _) -> {more, {0, S}, A}.
skip_chunk_ext(R = << "\r", _/bits >>, S, A, Len, _) -> chunked_len(R, S, A, Len);
skip_chunk_ext(R = <<>>, S, A, Len, _) -> chunked_len(R, S, A, Len);
We skip up to 128 characters of chunk extensions . The value
%% is hardcoded: chunk extensions are very rarely seen in the
wild and Cowboy does n't do anything with them anyway .
%%
%% Line breaks are not allowed in the middle of chunk extensions.
skip_chunk_ext(<< C, R/bits >>, S, A, Len, Skipped) when C =/= $\n, Skipped < 128 ->
skip_chunk_ext(R, S, A, Len, Skipped + 1).
%% @doc Encode a chunk.
-spec chunk(D) -> D when D::iodata().
chunk(Data) ->
[integer_to_list(iolist_size(Data), 16), <<"\r\n">>,
Data, <<"\r\n">>].
%% @doc Encode the last chunk of a chunked stream.
-spec last_chunk() -> << _:40 >>.
last_chunk() ->
<<"0\r\n\r\n">>.
-ifdef(TEST).
stream_chunked_identity_test() ->
{done, <<"Wikipedia in\r\n\r\nchunks.">>, no_trailers, <<>>}
= stream_chunked(iolist_to_binary([
chunk("Wiki"),
chunk("pedia"),
chunk(" in\r\n\r\nchunks."),
last_chunk()
]), {0, 0}),
ok.
stream_chunked_one_pass_test() ->
{done, no_trailers, <<>>} = stream_chunked(<<"0\r\n\r\n">>, {0, 0}),
{done, <<"Wikipedia in\r\n\r\nchunks.">>, no_trailers, <<>>}
= stream_chunked(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"\r\n">>, {0, 0}),
%% Same but with extra spaces or chunk extensions.
{done, <<"Wikipedia in\r\n\r\nchunks.">>, no_trailers, <<>>}
= stream_chunked(<<
"4 \r\n"
"Wiki\r\n"
"5 ; ext = abc\r\n"
"pedia\r\n"
"e;ext=abc\r\n"
" in\r\n\r\nchunks.\r\n"
"0;ext\r\n"
"\r\n">>, {0, 0}),
%% Same but with trailers.
{done, <<"Wikipedia in\r\n\r\nchunks.">>, trailers, Rest}
= stream_chunked(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"x-foo-bar: bar foo\r\n"
"\r\n">>, {0, 0}),
{[{<<"x-foo-bar">>, <<"bar foo">>}], <<>>} = cow_http:parse_headers(Rest),
ok.
stream_chunked_n_passes_test() ->
S0 = {0, 0},
more = stream_chunked(<<"4\r">>, S0),
{more, <<>>, 6, S1} = stream_chunked(<<"4\r\n">>, S0),
{more, <<"Wiki">>, 0, S2} = stream_chunked(<<"Wiki\r\n">>, S1),
{more, <<"pedia">>, <<"e\r">>, S3} = stream_chunked(<<"5\r\npedia\r\ne\r">>, S2),
{more, <<" in\r\n\r\nchunks.">>, 2, S4} = stream_chunked(<<"e\r\n in\r\n\r\nchunks.">>, S3),
{done, no_trailers, <<>>} = stream_chunked(<<"\r\n0\r\n\r\n">>, S4),
%% A few extra for coverage purposes.
more = stream_chunked(<<"\n3">>, {1, 0}),
{more, <<"abc">>, 2, {2, 3}} = stream_chunked(<<"\n3\r\nabc">>, {1, 0}),
{more, <<"abc">>, {1, 3}} = stream_chunked(<<"3\r\nabc\r">>, {0, 0}),
{more, <<"abc">>, <<"123">>, {0, 3}} = stream_chunked(<<"3\r\nabc\r\n123">>, {0, 0}),
ok.
stream_chunked_dripfeed_test() ->
dripfeed(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"\r\n">>, <<>>, {0, 0}, fun stream_chunked/2).
do_body_to_chunks(_, <<>>, Acc) ->
lists:reverse([<<"0\r\n\r\n">>|Acc]);
do_body_to_chunks(ChunkSize, Body, Acc) ->
BodySize = byte_size(Body),
ChunkSize2 = case BodySize < ChunkSize of
true -> BodySize;
false -> ChunkSize
end,
<< Chunk:ChunkSize2/binary, Rest/binary >> = Body,
ChunkSizeBin = list_to_binary(integer_to_list(ChunkSize2, 16)),
do_body_to_chunks(ChunkSize, Rest,
[<< ChunkSizeBin/binary, "\r\n", Chunk/binary, "\r\n" >>|Acc]).
stream_chunked_dripfeed2_test() ->
Body = list_to_binary(io_lib:format("~p", [lists:seq(1, 100)])),
Body2 = iolist_to_binary(do_body_to_chunks(50, Body, [])),
dripfeed(Body2, <<>>, {0, 0}, fun stream_chunked/2).
stream_chunked_error_test_() ->
Tests = [
{<<>>, undefined},
{<<"\n\naaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa">>, {2, 0}}
],
[{lists:flatten(io_lib:format("value ~p state ~p", [V, S])),
fun() -> {'EXIT', _} = (catch stream_chunked(V, S)) end}
|| {V, S} <- Tests].
horse_stream_chunked() ->
horse:repeat(10000,
stream_chunked(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"\r\n">>, {0, 0})
).
horse_stream_chunked_dripfeed() ->
horse:repeat(10000,
dripfeed(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"\r\n">>, <<>>, {0, 43}, fun stream_chunked/2)
).
-endif.
| null | https://raw.githubusercontent.com/BitGameEN/bitgamex/151ba70a481615379f9648581a5d459b503abe19/src/deps/cowlib/src/cow_http_te.erl | erlang |
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
Identity.
The state type is the same for both identity and chunked.
Identity.
@doc Decode an identity stream.
Using the same data as the chunked one for comparison.
@doc Decode a chunked stream.
New chunk.
Trailing \r\n before next chunk.
Trailing \r before next chunk.
Trailing \n before next chunk.
More data needed.
Everything in Data is part of the chunk. If we have more
data than the chunk accepts, then this is an error and we crash.
chunk extensions (unlikely) we will need to change this clause too.
Final chunk.
When trailers are following we simply return them as the Rest.
Then the user code can decide to call the stream_trailers function
to parse them. The user can therefore ignore trailers as necessary
if they do not wish to handle them.
is hardcoded: chunk extensions are very rarely seen in the
Line breaks are not allowed in the middle of chunk extensions.
@doc Encode a chunk.
@doc Encode the last chunk of a chunked stream.
Same but with extra spaces or chunk extensions.
Same but with trailers.
A few extra for coverage purposes. | Copyright ( c ) 2014 - 2018 , < >
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
-module(cow_http_te).
-export([stream_identity/2]).
-export([identity/1]).
Chunked .
-export([stream_chunked/2]).
-export([chunk/1]).
-export([last_chunk/0]).
-type state() :: {non_neg_integer(), non_neg_integer()}.
-type decode_ret() :: more
| {more, Data::binary(), state()}
| {more, Data::binary(), RemLen::non_neg_integer(), state()}
| {more, Data::binary(), Rest::binary(), state()}
| {done, TotalLen::non_neg_integer(), Rest::binary()}
| {done, Data::binary(), TotalLen::non_neg_integer(), Rest::binary()}.
-export_type([decode_ret/0]).
-include("cow_parse.hrl").
-ifdef(TEST).
dripfeed(<< C, Rest/bits >>, Acc, State, F) ->
case F(<< Acc/binary, C >>, State) of
more ->
dripfeed(Rest, << Acc/binary, C >>, State, F);
{more, _, State2} ->
dripfeed(Rest, <<>>, State2, F);
{more, _, Length, State2} when is_integer(Length) ->
dripfeed(Rest, <<>>, State2, F);
{more, _, Acc2, State2} ->
dripfeed(Rest, Acc2, State2, F);
{done, _, <<>>} ->
ok;
{done, _, _, <<>>} ->
ok
end.
-endif.
-spec stream_identity(Data, State)
-> {more, Data, Len, State} | {done, Data, Len, Data}
when Data::binary(), State::state(), Len::non_neg_integer().
stream_identity(Data, {Streamed, Total}) ->
Streamed2 = Streamed + byte_size(Data),
if
Streamed2 < Total ->
{more, Data, Total - Streamed2, {Streamed2, Total}};
true ->
Size = Total - Streamed,
<< Data2:Size/binary, Rest/bits >> = Data,
{done, Data2, Total, Rest}
end.
-spec identity(Data) -> Data when Data::iodata().
identity(Data) ->
Data.
-ifdef(TEST).
stream_identity_test() ->
{done, <<>>, 0, <<>>}
= stream_identity(identity(<<>>), {0, 0}),
{done, <<"\r\n">>, 2, <<>>}
= stream_identity(identity(<<"\r\n">>), {0, 2}),
{done, << 0:80000 >>, 10000, <<>>}
= stream_identity(identity(<< 0:80000 >>), {0, 10000}),
ok.
stream_identity_parts_test() ->
{more, << 0:8000 >>, 1999, S1}
= stream_identity(<< 0:8000 >>, {0, 2999}),
{more, << 0:8000 >>, 999, S2}
= stream_identity(<< 0:8000 >>, S1),
{done, << 0:7992 >>, 2999, <<>>}
= stream_identity(<< 0:7992 >>, S2),
ok.
horse_stream_identity() ->
horse:repeat(10000,
stream_identity(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"\r\n">>, {0, 43})
).
horse_stream_identity_dripfeed() ->
horse:repeat(10000,
dripfeed(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"\r\n">>, <<>>, {0, 43}, fun stream_identity/2)
).
-endif.
Chunked .
-spec stream_chunked(Data, State)
-> more | {more, Data, State} | {more, Data, Len, State}
| {more, Data, Data, State}
| {done, Len, Data} | {done, Data, Len, Data}
when Data::binary(), State::state(), Len::non_neg_integer().
stream_chunked(Data, State) ->
stream_chunked(Data, State, <<>>).
stream_chunked(Data = << C, _/bits >>, {0, Streamed}, Acc) when C =/= $\r ->
case chunked_len(Data, Streamed, Acc, 0) of
{next, Rest, State, Acc2} ->
stream_chunked(Rest, State, Acc2);
{more, State, Acc2} ->
{more, Acc2, Data, State};
Ret ->
Ret
end;
stream_chunked(<< "\r\n", Rest/bits >>, {2, Streamed}, Acc) ->
stream_chunked(Rest, {0, Streamed}, Acc);
stream_chunked(<< "\r" >>, {2, Streamed}, Acc) ->
{more, Acc, {1, Streamed}};
stream_chunked(<< "\n", Rest/bits >>, {1, Streamed}, Acc) ->
stream_chunked(Rest, {0, Streamed}, Acc);
stream_chunked(<<>>, State = {Rem, _}, Acc) ->
{more, Acc, Rem, State};
Chunk data .
stream_chunked(Data, {Rem, Streamed}, Acc) when Rem > 2 ->
DataSize = byte_size(Data),
RemSize = Rem - 2,
case Data of
<< Chunk:RemSize/binary, "\r\n", Rest/bits >> ->
stream_chunked(Rest, {0, Streamed + RemSize}, << Acc/binary, Chunk/binary >>);
<< Chunk:RemSize/binary, "\r" >> ->
{more, << Acc/binary, Chunk/binary >>, {1, Streamed + RemSize}};
_ when DataSize =< RemSize ->
Rem2 = Rem - DataSize,
{more, << Acc/binary, Data/binary >>, Rem2, {Rem2, Streamed + DataSize}}
end.
chunked_len(<< $0, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16);
chunked_len(<< $1, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 1);
chunked_len(<< $2, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 2);
chunked_len(<< $3, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 3);
chunked_len(<< $4, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 4);
chunked_len(<< $5, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 5);
chunked_len(<< $6, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 6);
chunked_len(<< $7, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 7);
chunked_len(<< $8, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 8);
chunked_len(<< $9, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 9);
chunked_len(<< $A, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 10);
chunked_len(<< $B, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 11);
chunked_len(<< $C, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 12);
chunked_len(<< $D, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 13);
chunked_len(<< $E, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 14);
chunked_len(<< $F, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 15);
chunked_len(<< $a, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 10);
chunked_len(<< $b, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 11);
chunked_len(<< $c, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 12);
chunked_len(<< $d, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 13);
chunked_len(<< $e, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 14);
chunked_len(<< $f, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 15);
Chunk extensions .
Note that we currently skip the first character we encounter here ,
and not in the skip_chunk_ext function . If we latter implement
chunked_len(<< C, R/bits >>, S, A, Len) when ?IS_WS(C); C =:= $; -> skip_chunk_ext(R, S, A, Len, 0);
chunked_len(<< "\r\n\r\n", R/bits >>, _, <<>>, 0) -> {done, no_trailers, R};
chunked_len(<< "\r\n\r\n", R/bits >>, _, A, 0) -> {done, A, no_trailers, R};
chunked_len(<< "\r\n", R/bits >>, _, <<>>, 0) when byte_size(R) > 2 -> {done, trailers, R};
chunked_len(<< "\r\n", R/bits >>, _, A, 0) when byte_size(R) > 2 -> {done, A, trailers, R};
chunked_len(_, _, _, 0) -> more;
Normal chunk . Add 2 to for the trailing \r\n .
chunked_len(<< "\r\n", R/bits >>, S, A, Len) -> {next, R, {Len + 2, S}, A};
chunked_len(<<"\r">>, _, <<>>, _) -> more;
chunked_len(<<"\r">>, S, A, _) -> {more, {0, S}, A};
chunked_len(<<>>, _, <<>>, _) -> more;
chunked_len(<<>>, S, A, _) -> {more, {0, S}, A}.
skip_chunk_ext(R = << "\r", _/bits >>, S, A, Len, _) -> chunked_len(R, S, A, Len);
skip_chunk_ext(R = <<>>, S, A, Len, _) -> chunked_len(R, S, A, Len);
We skip up to 128 characters of chunk extensions . The value
wild and Cowboy does n't do anything with them anyway .
skip_chunk_ext(<< C, R/bits >>, S, A, Len, Skipped) when C =/= $\n, Skipped < 128 ->
skip_chunk_ext(R, S, A, Len, Skipped + 1).
-spec chunk(D) -> D when D::iodata().
chunk(Data) ->
[integer_to_list(iolist_size(Data), 16), <<"\r\n">>,
Data, <<"\r\n">>].
-spec last_chunk() -> << _:40 >>.
last_chunk() ->
<<"0\r\n\r\n">>.
-ifdef(TEST).
stream_chunked_identity_test() ->
{done, <<"Wikipedia in\r\n\r\nchunks.">>, no_trailers, <<>>}
= stream_chunked(iolist_to_binary([
chunk("Wiki"),
chunk("pedia"),
chunk(" in\r\n\r\nchunks."),
last_chunk()
]), {0, 0}),
ok.
stream_chunked_one_pass_test() ->
{done, no_trailers, <<>>} = stream_chunked(<<"0\r\n\r\n">>, {0, 0}),
{done, <<"Wikipedia in\r\n\r\nchunks.">>, no_trailers, <<>>}
= stream_chunked(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"\r\n">>, {0, 0}),
{done, <<"Wikipedia in\r\n\r\nchunks.">>, no_trailers, <<>>}
= stream_chunked(<<
"4 \r\n"
"Wiki\r\n"
"5 ; ext = abc\r\n"
"pedia\r\n"
"e;ext=abc\r\n"
" in\r\n\r\nchunks.\r\n"
"0;ext\r\n"
"\r\n">>, {0, 0}),
{done, <<"Wikipedia in\r\n\r\nchunks.">>, trailers, Rest}
= stream_chunked(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"x-foo-bar: bar foo\r\n"
"\r\n">>, {0, 0}),
{[{<<"x-foo-bar">>, <<"bar foo">>}], <<>>} = cow_http:parse_headers(Rest),
ok.
stream_chunked_n_passes_test() ->
S0 = {0, 0},
more = stream_chunked(<<"4\r">>, S0),
{more, <<>>, 6, S1} = stream_chunked(<<"4\r\n">>, S0),
{more, <<"Wiki">>, 0, S2} = stream_chunked(<<"Wiki\r\n">>, S1),
{more, <<"pedia">>, <<"e\r">>, S3} = stream_chunked(<<"5\r\npedia\r\ne\r">>, S2),
{more, <<" in\r\n\r\nchunks.">>, 2, S4} = stream_chunked(<<"e\r\n in\r\n\r\nchunks.">>, S3),
{done, no_trailers, <<>>} = stream_chunked(<<"\r\n0\r\n\r\n">>, S4),
more = stream_chunked(<<"\n3">>, {1, 0}),
{more, <<"abc">>, 2, {2, 3}} = stream_chunked(<<"\n3\r\nabc">>, {1, 0}),
{more, <<"abc">>, {1, 3}} = stream_chunked(<<"3\r\nabc\r">>, {0, 0}),
{more, <<"abc">>, <<"123">>, {0, 3}} = stream_chunked(<<"3\r\nabc\r\n123">>, {0, 0}),
ok.
stream_chunked_dripfeed_test() ->
dripfeed(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"\r\n">>, <<>>, {0, 0}, fun stream_chunked/2).
do_body_to_chunks(_, <<>>, Acc) ->
lists:reverse([<<"0\r\n\r\n">>|Acc]);
do_body_to_chunks(ChunkSize, Body, Acc) ->
BodySize = byte_size(Body),
ChunkSize2 = case BodySize < ChunkSize of
true -> BodySize;
false -> ChunkSize
end,
<< Chunk:ChunkSize2/binary, Rest/binary >> = Body,
ChunkSizeBin = list_to_binary(integer_to_list(ChunkSize2, 16)),
do_body_to_chunks(ChunkSize, Rest,
[<< ChunkSizeBin/binary, "\r\n", Chunk/binary, "\r\n" >>|Acc]).
stream_chunked_dripfeed2_test() ->
Body = list_to_binary(io_lib:format("~p", [lists:seq(1, 100)])),
Body2 = iolist_to_binary(do_body_to_chunks(50, Body, [])),
dripfeed(Body2, <<>>, {0, 0}, fun stream_chunked/2).
stream_chunked_error_test_() ->
Tests = [
{<<>>, undefined},
{<<"\n\naaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa">>, {2, 0}}
],
[{lists:flatten(io_lib:format("value ~p state ~p", [V, S])),
fun() -> {'EXIT', _} = (catch stream_chunked(V, S)) end}
|| {V, S} <- Tests].
horse_stream_chunked() ->
horse:repeat(10000,
stream_chunked(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"\r\n">>, {0, 0})
).
horse_stream_chunked_dripfeed() ->
horse:repeat(10000,
dripfeed(<<
"4\r\n"
"Wiki\r\n"
"5\r\n"
"pedia\r\n"
"e\r\n"
" in\r\n\r\nchunks.\r\n"
"0\r\n"
"\r\n">>, <<>>, {0, 43}, fun stream_chunked/2)
).
-endif.
|
d8fee67a52953b8f10a542c21a1edfc67ec12d48228c49cd6bc04d93182867e4 | deadtrickster/cl-statsd | pipeline.lisp | (in-package :cl-statsd.test)
(cl-interpol:enable-interpol-syntax)
(plan 1)
(subtest "Pipeline test"
(is (with-capture-client ()
(statsd:pipeline ()
(statsd:inc "qwe")
(statsd:inc "ewq")))
#?"qwe:1|c\newq:1|c"))
(finalize)
| null | https://raw.githubusercontent.com/deadtrickster/cl-statsd/7790c95c097f690994256519d24106b53c3e5e37/t/pipeline.lisp | lisp | (in-package :cl-statsd.test)
(cl-interpol:enable-interpol-syntax)
(plan 1)
(subtest "Pipeline test"
(is (with-capture-client ()
(statsd:pipeline ()
(statsd:inc "qwe")
(statsd:inc "ewq")))
#?"qwe:1|c\newq:1|c"))
(finalize)
|
|
9abfa62c6996db2cf68be9abe671b77cb8b87591a9c7e5502fdd26d589496b9a | didierverna/focus | package.lisp | ;;; package.lisp --- Package definition
Copyright ( C ) 2015
Author : < >
This file is part of FoCus .
;; Copying and distribution of this file, with or without modification,
;; are permitted in any medium without royalty provided the copyright
;; notice and this notice are preserved. This file is offered as-is,
;; without any warranty.
;;; Commentary:
;;; Code:
(defpackage :net.didierverna.focus.demos.quotation
(:use :cl :net.didierverna.focus)
(:shadowing-import-from :net.didierverna.focus :format :formatter)
(:export :quotation))
;;; package.lisp ends here
| null | https://raw.githubusercontent.com/didierverna/focus/bf2da0220ad0d0a08b52f88a1c843dcb4fb3fbf8/demos/quotation/package.lisp | lisp | package.lisp --- Package definition
Copying and distribution of this file, with or without modification,
are permitted in any medium without royalty provided the copyright
notice and this notice are preserved. This file is offered as-is,
without any warranty.
Commentary:
Code:
package.lisp ends here |
Copyright ( C ) 2015
Author : < >
This file is part of FoCus .
(defpackage :net.didierverna.focus.demos.quotation
(:use :cl :net.didierverna.focus)
(:shadowing-import-from :net.didierverna.focus :format :formatter)
(:export :quotation))
|
3cfdd0191e5acd70fd1a462492a6e0020c1d7bd5a49a6a0e56d072bd1b212ac2 | johnbender/unraverl | util.erl | -module(util).
-export([find_function/2,
replace/3,
replace/2,
find_attribute/2,
args_list_form/3,
to_string/1]).
find_function(Form, Name) ->
[Result] = [{SType, LineNum, SName, Arity, Clauses} || {SType, LineNum, SName, Arity, Clauses} <- Form, Name == SName],
Result.
%If the fun can handle both matching and replacement
replace(Form, ReplaceFun) when is_tuple(Form) ->
map_tuple(ReplaceFun(Form), ?MODULE, replace, [ReplaceFun]);
replace(Form, ReplaceFun) when is_list(Form) ->
[replace(X, ReplaceFun) || X <- ReplaceFun(Form)];
replace(Form, ReplaceFun) ->
ReplaceFun(Form).
If the third argument is not a replacement fun but a data structure
replace(Form, CompareFun, Replacement) when is_function(Replacement) == false ->
replace(Form, CompareFun, fun(_) -> Replacement end);
%If the matching and replacement funs are separated
replace(Form, CompareFun, ReplaceFun) when is_tuple(Form) ->
case CompareFun(Form) of
true -> ReplaceFun(Form);
_ -> map_tuple(Form, ?MODULE, replace, [CompareFun, ReplaceFun])
end;
replace(Form, CompareFun, ReplaceFun) when is_list(Form) ->
case CompareFun(Form) of
true -> ReplaceFun(Form);
_ -> [replace(X, CompareFun, ReplaceFun) || X <- Form]
end;
replace(Form, CompareFun, ReplaceFun) ->
case CompareFun(Form) of
true -> ReplaceFun(Form);
_ -> Form
end.
map_tuple(Tuple, Module, Function, Args) when is_tuple(Tuple) ->
map_tuple(tuple_to_list(Tuple), Module, Function, Args);
map_tuple([Head|T], Module, Function, Args) ->
map_tuple(T, Module, Function, Args, { apply(Module, Function, [Head|Args]) }).
map_tuple([Head|T], Module, Function, Args, Acc) ->
Tuple = erlang:append_element(Acc, apply(Module, Function, [Head|Args])),
map_tuple(T, Module, Function, Args, Tuple);
map_tuple([], _, _, _, Acc) -> Acc.
find_attribute(Form, Name) ->
[{SType, LineNum, SName, Value} || {SType, LineNum, SName, Value} <- Form, Name == SName].
args_list_form(Count, LineNum, NameSeed) when is_list(NameSeed) ->
[{var, LineNum, list_to_atom(NameSeed ++ to_string(Num))} || Num <- lists:reverse(lists:seq(1, Count))].
to_string(Object) when is_number(Object) ->
lists:flatten(io_lib:format("~w" , [Object]));
to_string(Object) when is_atom(Object) ->
atom_to_list(Object);
to_string(Object) when is_binary(Object) ->
binary_to_list(Object);
to_string(Object) when is_list(Object) ->
Object.
| null | https://raw.githubusercontent.com/johnbender/unraverl/cc8310fab4538cefd035502f95327bf894130292/util.erl | erlang | If the fun can handle both matching and replacement
If the matching and replacement funs are separated | -module(util).
-export([find_function/2,
replace/3,
replace/2,
find_attribute/2,
args_list_form/3,
to_string/1]).
find_function(Form, Name) ->
[Result] = [{SType, LineNum, SName, Arity, Clauses} || {SType, LineNum, SName, Arity, Clauses} <- Form, Name == SName],
Result.
replace(Form, ReplaceFun) when is_tuple(Form) ->
map_tuple(ReplaceFun(Form), ?MODULE, replace, [ReplaceFun]);
replace(Form, ReplaceFun) when is_list(Form) ->
[replace(X, ReplaceFun) || X <- ReplaceFun(Form)];
replace(Form, ReplaceFun) ->
ReplaceFun(Form).
If the third argument is not a replacement fun but a data structure
replace(Form, CompareFun, Replacement) when is_function(Replacement) == false ->
replace(Form, CompareFun, fun(_) -> Replacement end);
replace(Form, CompareFun, ReplaceFun) when is_tuple(Form) ->
case CompareFun(Form) of
true -> ReplaceFun(Form);
_ -> map_tuple(Form, ?MODULE, replace, [CompareFun, ReplaceFun])
end;
replace(Form, CompareFun, ReplaceFun) when is_list(Form) ->
case CompareFun(Form) of
true -> ReplaceFun(Form);
_ -> [replace(X, CompareFun, ReplaceFun) || X <- Form]
end;
replace(Form, CompareFun, ReplaceFun) ->
case CompareFun(Form) of
true -> ReplaceFun(Form);
_ -> Form
end.
map_tuple(Tuple, Module, Function, Args) when is_tuple(Tuple) ->
map_tuple(tuple_to_list(Tuple), Module, Function, Args);
map_tuple([Head|T], Module, Function, Args) ->
map_tuple(T, Module, Function, Args, { apply(Module, Function, [Head|Args]) }).
map_tuple([Head|T], Module, Function, Args, Acc) ->
Tuple = erlang:append_element(Acc, apply(Module, Function, [Head|Args])),
map_tuple(T, Module, Function, Args, Tuple);
map_tuple([], _, _, _, Acc) -> Acc.
find_attribute(Form, Name) ->
[{SType, LineNum, SName, Value} || {SType, LineNum, SName, Value} <- Form, Name == SName].
args_list_form(Count, LineNum, NameSeed) when is_list(NameSeed) ->
[{var, LineNum, list_to_atom(NameSeed ++ to_string(Num))} || Num <- lists:reverse(lists:seq(1, Count))].
to_string(Object) when is_number(Object) ->
lists:flatten(io_lib:format("~w" , [Object]));
to_string(Object) when is_atom(Object) ->
atom_to_list(Object);
to_string(Object) when is_binary(Object) ->
binary_to_list(Object);
to_string(Object) when is_list(Object) ->
Object.
|
8e7aa038d01909757f562587bf40b3189b7143dfd91d3befc5a35677e964a1ef | lambdaisland/kaocha | hello_test.clj | (ns foo.hello-test
(:require [clojure.test :as t]))
(t/deftest pass-1
(t/is true))
(t/deftest pass-2
(t/is true))
(t/deftest fail-1
(t/is true)
(t/is false)
(t/is true))
(t/deftest pass-3
(t/is true))
| null | https://raw.githubusercontent.com/lambdaisland/kaocha/8f18babb732b21e7fb2231e44be4d972c7ab22bc/fixtures/c-tests/foo/hello_test.clj | clojure | (ns foo.hello-test
(:require [clojure.test :as t]))
(t/deftest pass-1
(t/is true))
(t/deftest pass-2
(t/is true))
(t/deftest fail-1
(t/is true)
(t/is false)
(t/is true))
(t/deftest pass-3
(t/is true))
|
|
8c01358ad6fd54b159f1a967dcd3caaf8880374faf1a2a0e1cd96d469c03ddc3 | dbuenzli/astring | examples.ml | (* This code is in the public domain *)
open Astring
(* Version number (v|V).major.minor[.patch][(+|-)info] *)
let parse_version : string -> (int * int * int * string option) option =
fun s -> try
let parse_opt_v s = match String.Sub.head s with
| Some ('v'|'V') -> String.Sub.tail s
| Some _ -> s
| None -> raise Exit
in
let parse_dot s = match String.Sub.head s with
| Some '.' -> String.Sub.tail s
| Some _ | None -> raise Exit
in
let parse_int s =
match String.Sub.span ~min:1 ~sat:Char.Ascii.is_digit s with
| (i, _) when String.Sub.is_empty i -> raise Exit
| (i, s) ->
match String.Sub.to_int i with
| None -> raise Exit | Some i -> i, s
in
let maj, s = parse_int (parse_opt_v (String.sub s)) in
let min, s = parse_int (parse_dot s) in
let patch, s = match String.Sub.head s with
| Some '.' -> parse_int (parse_dot s)
| _ -> 0, s
in
let info = match String.Sub.head s with
| Some ('+' | '-') -> Some (String.Sub.(to_string (tail s)))
| Some _ -> raise Exit
| None -> None
in
Some (maj, min, patch, info)
with Exit -> None
(* Key value bindings *)
let parse_env : string -> string String.map option =
fun s -> try
let skip_white s = String.Sub.drop ~sat:Char.Ascii.is_white s in
let parse_key s =
let id_char c = Char.Ascii.is_letter c || c = '_' in
match String.Sub.span ~min:1 ~sat:id_char s with
| (key, _) when String.Sub.is_empty key -> raise Exit
| (key, rem) -> (String.Sub.to_string key), rem
in
let parse_eq s = match String.Sub.head s with
| Some '=' -> String.Sub.tail s
| Some _ | None -> raise Exit
in
let parse_value s = match String.Sub.head s with
| Some '"' -> (* quoted *)
let is_data = function '\\' | '"' -> false | _ -> true in
let rec loop acc s =
let data, rem = String.Sub.span ~sat:is_data s in
match String.Sub.head rem with
| Some '"' ->
let acc = List.rev (data :: acc) in
String.Sub.(to_string @@ concat acc), (String.Sub.tail rem)
| Some '\\' ->
let rem = String.Sub.tail rem in
begin match String.Sub.head rem with
| Some ('"' | '\\' as c) ->
let acc = String.(sub (of_char c)) :: data :: acc in
loop acc (String.Sub.tail rem)
| Some _ | None -> raise Exit
end
| None | Some _ -> raise Exit
in
loop [] (String.Sub.tail s)
| Some _ ->
let is_data c = not (Char.Ascii.is_white c) in
let data, rem = String.Sub.span ~sat:is_data s in
String.Sub.to_string data, rem
| None -> "", s
in
let rec parse_bindings acc s =
if String.Sub.is_empty s then acc else
let key, s = parse_key s in
let value, s = s |> skip_white |> parse_eq |> skip_white |> parse_value in
parse_bindings (String.Map.add key value acc) (skip_white s)
in
Some (String.sub s |> skip_white |> parse_bindings String.Map.empty)
with Exit -> None
| null | https://raw.githubusercontent.com/dbuenzli/astring/ec7a266a3a680e5d246689855c639da53d713428/test/examples.ml | ocaml | This code is in the public domain
Version number (v|V).major.minor[.patch][(+|-)info]
Key value bindings
quoted |
open Astring
let parse_version : string -> (int * int * int * string option) option =
fun s -> try
let parse_opt_v s = match String.Sub.head s with
| Some ('v'|'V') -> String.Sub.tail s
| Some _ -> s
| None -> raise Exit
in
let parse_dot s = match String.Sub.head s with
| Some '.' -> String.Sub.tail s
| Some _ | None -> raise Exit
in
let parse_int s =
match String.Sub.span ~min:1 ~sat:Char.Ascii.is_digit s with
| (i, _) when String.Sub.is_empty i -> raise Exit
| (i, s) ->
match String.Sub.to_int i with
| None -> raise Exit | Some i -> i, s
in
let maj, s = parse_int (parse_opt_v (String.sub s)) in
let min, s = parse_int (parse_dot s) in
let patch, s = match String.Sub.head s with
| Some '.' -> parse_int (parse_dot s)
| _ -> 0, s
in
let info = match String.Sub.head s with
| Some ('+' | '-') -> Some (String.Sub.(to_string (tail s)))
| Some _ -> raise Exit
| None -> None
in
Some (maj, min, patch, info)
with Exit -> None
let parse_env : string -> string String.map option =
fun s -> try
let skip_white s = String.Sub.drop ~sat:Char.Ascii.is_white s in
let parse_key s =
let id_char c = Char.Ascii.is_letter c || c = '_' in
match String.Sub.span ~min:1 ~sat:id_char s with
| (key, _) when String.Sub.is_empty key -> raise Exit
| (key, rem) -> (String.Sub.to_string key), rem
in
let parse_eq s = match String.Sub.head s with
| Some '=' -> String.Sub.tail s
| Some _ | None -> raise Exit
in
let parse_value s = match String.Sub.head s with
let is_data = function '\\' | '"' -> false | _ -> true in
let rec loop acc s =
let data, rem = String.Sub.span ~sat:is_data s in
match String.Sub.head rem with
| Some '"' ->
let acc = List.rev (data :: acc) in
String.Sub.(to_string @@ concat acc), (String.Sub.tail rem)
| Some '\\' ->
let rem = String.Sub.tail rem in
begin match String.Sub.head rem with
| Some ('"' | '\\' as c) ->
let acc = String.(sub (of_char c)) :: data :: acc in
loop acc (String.Sub.tail rem)
| Some _ | None -> raise Exit
end
| None | Some _ -> raise Exit
in
loop [] (String.Sub.tail s)
| Some _ ->
let is_data c = not (Char.Ascii.is_white c) in
let data, rem = String.Sub.span ~sat:is_data s in
String.Sub.to_string data, rem
| None -> "", s
in
let rec parse_bindings acc s =
if String.Sub.is_empty s then acc else
let key, s = parse_key s in
let value, s = s |> skip_white |> parse_eq |> skip_white |> parse_value in
parse_bindings (String.Map.add key value acc) (skip_white s)
in
Some (String.sub s |> skip_white |> parse_bindings String.Map.empty)
with Exit -> None
|
98449d45ca0bf22680119b4853125db03337f13bcc922480c3c2b28dbf2baee5 | apauley/hledger-flow | Main.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE QuasiQuotes #
module Main where
import Parsing ( parseStartYear )
import Path ( reldir )
import qualified Turtle hiding (switch)
import Prelude hiding (putStrLn)
import Options.Applicative
( auto,
optional,
Alternative(many, (<|>)),
Parser,
flag',
help,
long,
metavar,
option,
short,
str,
switch )
import Hledger.Flow.PathHelpers (TurtlePath)
import Hledger.Flow.Common ( hledgerInfoFromPath )
import Hledger.Flow.Internals (versionInfo, systemInfo)
import Hledger.Flow.BaseDir ( determineBaseDir )
import qualified Hledger.Flow.RuntimeOptions as RT
import Hledger.Flow.Reports ( generateReports )
import Hledger.Flow.Import.CSVImport ( importCSVs )
import qualified Data.Text.IO as T
data ImportParams = ImportParams { maybeImportBaseDir :: Maybe TurtlePath
, importStartYear :: Maybe String
, onlyNewFiles :: Bool
} deriving (Show)
newtype ReportParams = ReportParams {maybeReportBaseDir :: Maybe TurtlePath} deriving Show
data Command = Import ImportParams | Report ReportParams deriving (Show)
data MainParams = MainParams { verbosity :: Int
, hledgerPathOpt :: Maybe TurtlePath
, showOpts :: Bool
, batchSize :: Maybe Int
, sequential :: Bool
} deriving (Show)
data BaseCommand = Version | Command { mainParams :: MainParams, command :: Command } deriving (Show)
main :: IO ()
main = do
cmd <- Turtle.options "An hledger workflow focusing on automated statement import and classification:\n-flow#readme" baseCommandParser
case cmd of
Version -> do
sysInfo <- systemInfo
T.putStrLn $ versionInfo sysInfo
Command mainParams' (Import subParams) -> toRuntimeOptionsImport mainParams' subParams >>= importCSVs
Command mainParams' (Report subParams) -> toRuntimeOptionsReport mainParams' subParams >>= generateReports
defaultBatchSize :: Int
defaultBatchSize = 200
determineBatchSize :: MainParams -> IO Int
determineBatchSize mainParams' =
case (batchSize mainParams') of
Nothing -> return defaultBatchSize
Just size -> return size
toRuntimeOptionsImport :: MainParams -> ImportParams -> IO RT.RuntimeOptions
toRuntimeOptionsImport mainParams' subParams' = do
startYear <- parseStartYear $ importStartYear subParams'
let maybeBD = maybeImportBaseDir subParams' :: Maybe TurtlePath
(bd, runDir) <- determineBaseDir maybeBD
hli <- hledgerInfoFromPath $ hledgerPathOpt mainParams'
size <- determineBatchSize mainParams'
sysInfo <- systemInfo
return RT.RuntimeOptions { RT.baseDir = bd
, RT.importRunDir = runDir
, RT.importStartYear = startYear
, RT.onlyNewFiles = onlyNewFiles subParams'
, RT.hfVersion = versionInfo sysInfo
, RT.hledgerInfo = hli
, RT.sysInfo = sysInfo
, RT.verbose = verbosity mainParams' > 0
, RT.showOptions = showOpts mainParams'
, RT.sequential = sequential mainParams'
, RT.batchSize = size
}
toRuntimeOptionsReport :: MainParams -> ReportParams -> IO RT.RuntimeOptions
toRuntimeOptionsReport mainParams' subParams' = do
let maybeBD = maybeReportBaseDir subParams' :: Maybe TurtlePath
(bd, _) <- determineBaseDir maybeBD
hli <- hledgerInfoFromPath $ hledgerPathOpt mainParams'
size <- determineBatchSize mainParams'
sysInfo <- systemInfo
return RT.RuntimeOptions { RT.baseDir = bd
, RT.importRunDir = [reldir|.|]
, RT.importStartYear = Nothing
, RT.onlyNewFiles = False
, RT.hfVersion = versionInfo sysInfo
, RT.hledgerInfo = hli
, RT.sysInfo = sysInfo
, RT.verbose = verbosity mainParams' > 0
, RT.showOptions = showOpts mainParams'
, RT.sequential = sequential mainParams'
, RT.batchSize = size
}
baseCommandParser :: Parser BaseCommand
baseCommandParser = (Command <$> verboseParser <*> commandParser)
<|> flag' Version (long "version" <> short 'V' <> help "Display version information")
commandParser :: Parser Command
commandParser = fmap Import (Turtle.subcommand "import" "Uses hledger with your own rules and/or scripts to convert electronic statements into categorised journal files" subcommandParserImport)
<|> fmap Report (Turtle.subcommand "report" "Generate Reports" subcommandParserReport)
verboseParser :: Parser MainParams
verboseParser = MainParams
<$> (length <$> many (flag' () (long "verbose" <> short 'v' <> help "Print more verbose output")))
<*> optional (Turtle.optPath "hledger-path" 'H' "The full path to an hledger executable")
<*> switch (long "show-options" <> help "Print the options this program will run with")
<*> optional (option auto (long "batch-size" <> metavar "SIZE" <> help ("Parallel processing of files are done in batches of the specified size. Default: " <> show defaultBatchSize <> ". Ignored during sequential processing.")))
<*> switch (long "sequential" <> help "Disable parallel processing")
subcommandParserImport :: Parser ImportParams
subcommandParserImport = ImportParams
<$> optional (Turtle.argPath "dir" "The directory to import. Use the base directory for a full import or a sub-directory for a partial import. Defaults to the current directory.")
<*> optional (option str (long "start-year" <> metavar "YEAR" <> help "Import only from the specified year and onwards, ignoring previous years. By default all available years are imported. Valid values include a 4-digit year or 'current' for the current year"))
<*> switch (long "new-files-only" <> help "Don't regenerate transaction files if they are already present. This applies to hledger journal files as well as files produced by the preprocess and construct scripts.")
subcommandParserReport :: Parser ReportParams
subcommandParserReport = ReportParams
<$> optional (Turtle.argPath "basedir" "The hledger-flow base directory")
| null | https://raw.githubusercontent.com/apauley/hledger-flow/487e456871193acdffb39d29cd4c83004053c754/app/Main.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE QuasiQuotes #
module Main where
import Parsing ( parseStartYear )
import Path ( reldir )
import qualified Turtle hiding (switch)
import Prelude hiding (putStrLn)
import Options.Applicative
( auto,
optional,
Alternative(many, (<|>)),
Parser,
flag',
help,
long,
metavar,
option,
short,
str,
switch )
import Hledger.Flow.PathHelpers (TurtlePath)
import Hledger.Flow.Common ( hledgerInfoFromPath )
import Hledger.Flow.Internals (versionInfo, systemInfo)
import Hledger.Flow.BaseDir ( determineBaseDir )
import qualified Hledger.Flow.RuntimeOptions as RT
import Hledger.Flow.Reports ( generateReports )
import Hledger.Flow.Import.CSVImport ( importCSVs )
import qualified Data.Text.IO as T
data ImportParams = ImportParams { maybeImportBaseDir :: Maybe TurtlePath
, importStartYear :: Maybe String
, onlyNewFiles :: Bool
} deriving (Show)
newtype ReportParams = ReportParams {maybeReportBaseDir :: Maybe TurtlePath} deriving Show
data Command = Import ImportParams | Report ReportParams deriving (Show)
data MainParams = MainParams { verbosity :: Int
, hledgerPathOpt :: Maybe TurtlePath
, showOpts :: Bool
, batchSize :: Maybe Int
, sequential :: Bool
} deriving (Show)
data BaseCommand = Version | Command { mainParams :: MainParams, command :: Command } deriving (Show)
main :: IO ()
main = do
cmd <- Turtle.options "An hledger workflow focusing on automated statement import and classification:\n-flow#readme" baseCommandParser
case cmd of
Version -> do
sysInfo <- systemInfo
T.putStrLn $ versionInfo sysInfo
Command mainParams' (Import subParams) -> toRuntimeOptionsImport mainParams' subParams >>= importCSVs
Command mainParams' (Report subParams) -> toRuntimeOptionsReport mainParams' subParams >>= generateReports
defaultBatchSize :: Int
defaultBatchSize = 200
determineBatchSize :: MainParams -> IO Int
determineBatchSize mainParams' =
case (batchSize mainParams') of
Nothing -> return defaultBatchSize
Just size -> return size
toRuntimeOptionsImport :: MainParams -> ImportParams -> IO RT.RuntimeOptions
toRuntimeOptionsImport mainParams' subParams' = do
startYear <- parseStartYear $ importStartYear subParams'
let maybeBD = maybeImportBaseDir subParams' :: Maybe TurtlePath
(bd, runDir) <- determineBaseDir maybeBD
hli <- hledgerInfoFromPath $ hledgerPathOpt mainParams'
size <- determineBatchSize mainParams'
sysInfo <- systemInfo
return RT.RuntimeOptions { RT.baseDir = bd
, RT.importRunDir = runDir
, RT.importStartYear = startYear
, RT.onlyNewFiles = onlyNewFiles subParams'
, RT.hfVersion = versionInfo sysInfo
, RT.hledgerInfo = hli
, RT.sysInfo = sysInfo
, RT.verbose = verbosity mainParams' > 0
, RT.showOptions = showOpts mainParams'
, RT.sequential = sequential mainParams'
, RT.batchSize = size
}
toRuntimeOptionsReport :: MainParams -> ReportParams -> IO RT.RuntimeOptions
toRuntimeOptionsReport mainParams' subParams' = do
let maybeBD = maybeReportBaseDir subParams' :: Maybe TurtlePath
(bd, _) <- determineBaseDir maybeBD
hli <- hledgerInfoFromPath $ hledgerPathOpt mainParams'
size <- determineBatchSize mainParams'
sysInfo <- systemInfo
return RT.RuntimeOptions { RT.baseDir = bd
, RT.importRunDir = [reldir|.|]
, RT.importStartYear = Nothing
, RT.onlyNewFiles = False
, RT.hfVersion = versionInfo sysInfo
, RT.hledgerInfo = hli
, RT.sysInfo = sysInfo
, RT.verbose = verbosity mainParams' > 0
, RT.showOptions = showOpts mainParams'
, RT.sequential = sequential mainParams'
, RT.batchSize = size
}
baseCommandParser :: Parser BaseCommand
baseCommandParser = (Command <$> verboseParser <*> commandParser)
<|> flag' Version (long "version" <> short 'V' <> help "Display version information")
commandParser :: Parser Command
commandParser = fmap Import (Turtle.subcommand "import" "Uses hledger with your own rules and/or scripts to convert electronic statements into categorised journal files" subcommandParserImport)
<|> fmap Report (Turtle.subcommand "report" "Generate Reports" subcommandParserReport)
verboseParser :: Parser MainParams
verboseParser = MainParams
<$> (length <$> many (flag' () (long "verbose" <> short 'v' <> help "Print more verbose output")))
<*> optional (Turtle.optPath "hledger-path" 'H' "The full path to an hledger executable")
<*> switch (long "show-options" <> help "Print the options this program will run with")
<*> optional (option auto (long "batch-size" <> metavar "SIZE" <> help ("Parallel processing of files are done in batches of the specified size. Default: " <> show defaultBatchSize <> ". Ignored during sequential processing.")))
<*> switch (long "sequential" <> help "Disable parallel processing")
subcommandParserImport :: Parser ImportParams
subcommandParserImport = ImportParams
<$> optional (Turtle.argPath "dir" "The directory to import. Use the base directory for a full import or a sub-directory for a partial import. Defaults to the current directory.")
<*> optional (option str (long "start-year" <> metavar "YEAR" <> help "Import only from the specified year and onwards, ignoring previous years. By default all available years are imported. Valid values include a 4-digit year or 'current' for the current year"))
<*> switch (long "new-files-only" <> help "Don't regenerate transaction files if they are already present. This applies to hledger journal files as well as files produced by the preprocess and construct scripts.")
subcommandParserReport :: Parser ReportParams
subcommandParserReport = ReportParams
<$> optional (Turtle.argPath "basedir" "The hledger-flow base directory")
|
125416b878956d8744da632004581f3bf1ab369a5cc6132113c0c3adb1db3370 | SKA-ScienceDataProcessor/RC | Data.hs | {-# LANGUAGE StandaloneDeriving, DeriveDataTypeable #-}
-- | Data representation definitions
module Kernel.Data
( -- * Configuration
Config(..), OskarInput(..), GridKernelType(..), DegridKernelType(..)
, GridPar(..), GCFPar(..), GCFFile(..), CleanPar(..), StrategyPar(..)
, defaultConfig, cfgParallelism
, gridImageWidth, gridImageHeight, gridScale, gridXY2UV, gcfMaxSize, gcfGet
-- * Data tags
, Index, Tag, Vis, UVGrid, FullUVGrid, Image, Cleaned, GCFs
-- * Data representations
, DDom, TDom, UDom, VDom, WDom, UVDom, LDom, MDom, LMDom, GUDom, GVDom, GUVDom
, IndexRepr, UVGRepr, UVGMarginRepr, FacetRepr, ImageRepr, FullUVGRepr, PlanRepr, GCFsRepr
, indexRepr, uvgRepr, uvgMarginRepr, facetRepr, imageRepr, fullUVGRepr, planRepr, gcfsRepr
-- * Visibility data representations
, RawVisRepr, RotatedVisRepr, VisRepr
, rawVisRepr, rotatedVisRepr, visRepr
) where
import Data.Typeable
import Data.Int ( Int32 )
import Flow.Halide
import Flow.Domain
import Flow.Kernel
import Kernel.Config
-- Data tags
data Index -- ^ Data set index
^ Initialisation ( e.g. FFT plans )
^ Visibilities ( File name to OSKAR / raw visibilities / binned ... )
data UVGrid -- ^ UV grid
data FullUVGrid -- ^ Full UV grid
data Image -- ^ Image
data Cleaned -- ^ Result from cleaning
data GCFs -- ^ A set of GCFs
deriving instance Typeable Tag
deriving instance Typeable Vis
deriving instance Typeable UVGrid
deriving instance Typeable FullUVGrid
deriving instance Typeable Image
deriving instance Typeable GCFs
type DDom = Domain Bins -- ^ Domain used for indexing data sets
type TDom = Domain Range -- ^ Domain used for indexing visibilities
type UDom = Domain Range -- ^ Domain used for the u grid dimension
type VDom = Domain Range -- ^ Domain used for the v grid dimension
type WDom = Domain Bins -- ^ Domain used for the w grid pseudo-dimension
type UVDom = (UDom, VDom) -- ^ Domain used for the (u,v) grid dimensions
type LDom = Domain Range -- ^ Domain used for the l image dimension
type MDom = Domain Range -- ^ Domain used for the m image dimension
type LMDom = (LDom, MDom) -- ^ Domain used for the (l,m) image dimensions
type GUDom = Domain Bins -- ^ Domain used for u grid dimension of grid convolution functions
type GVDom = Domain Bins -- ^ Domain used for v grid dimension of grid convolution functions
type GUVDom = (GUDom, GVDom) -- ^ Domain used for (u,v) grid dimensions of grid convolution functions
type IndexRepr = BinRepr (HalideRepr Dim0 Int32 Index)
indexRepr :: DDom -> IndexRepr
indexRepr ddom = BinRepr ddom $ halideRepr dim0
type UVGRepr = RangeRepr (RangeRepr (HalideRepr Dim1 Double UVGrid))
uvgRepr :: UVDom -> UVGRepr
uvgRepr (udom, vdom) =
RangeRepr vdom $
RangeRepr udom $
halideRepr (dim1 dimCpx)
type UVGMarginRepr = MarginRepr (MarginRepr (HalideRepr Dim1 Double UVGrid))
uvgMarginRepr :: GCFPar -> UVDom -> UVGMarginRepr
uvgMarginRepr gcfp (udom, vdom) =
marginRepr vdom (gcfMaxSize gcfp `div` 2) $
marginRepr udom (gcfMaxSize gcfp `div` 2) $
halideRepr (dim1 dimCpx)
dimCpx :: Dim
dimCpx = (0, 2)
type FacetRepr = HalideRepr Dim2 Double Image
facetRepr :: GridPar -> ImageRepr
facetRepr gp = halideRepr $ dimY :. dimX :. Z
where dimX = (0, fromIntegral $ gridWidth gp)
dimY = (0, fromIntegral $ gridHeight gp)
type ImageRepr = HalideRepr Dim2 Double Image
imageRepr :: GridPar -> ImageRepr
imageRepr gp = halideRepr $ dimY :. dimX :. Z
where dimX = (0, fromIntegral $ gridImageWidth gp)
dimY = (0, fromIntegral $ gridImageHeight gp)
type FullUVGRepr = HalideRepr Dim3 Double FullUVGrid
fullUVGRepr :: GridPar -> FullUVGRepr
fullUVGRepr gp = halideRepr $ dimY :. dimX :. dimCpx :. Z
where dimX = (0, fromIntegral $ gridImageWidth gp)
dimY = (0, fromIntegral $ gridImageHeight gp)
HalideRepr Dim0 Int32 Tag
planRepr :: PlanRepr
planRepr = NoRepr -- halideRepr dim0
-- | Raw visibilities: Dynamically sized list of visibility records
-- (see "dimVisFields").
type RawVisRepr = RangeRepr (HalideRepr Dim1 Double Vis)
rawVisRepr :: Domain Range -> RawVisRepr
rawVisRepr dom = RangeRepr dom $ halideRepr (dim1 dimVisFields)
type RotatedVisRepr = RegionRepr Range (RegionRepr Range (RangeRepr (HalideRepr Dim1 Double Vis)))
rotatedVisRepr :: LMDom -> TDom -> RotatedVisRepr
rotatedVisRepr (ldom, mdom) tdom =
RegionRepr ldom $ RegionRepr mdom $ RangeRepr tdom $
halideRepr (dim1 dimVisFields)
type VisRepr = RegionRepr Range (RegionRepr Range (BinRepr (HalideRepr Dim1 Double Vis)))
visRepr :: UVDom -> WDom -> VisRepr
visRepr (udom, vdom) wdom =
RegionRepr udom $ RegionRepr vdom $ BinRepr wdom $
halideRepr (dim1 dimVisFields)
| We have 5 visibility fields : u , v and w , Real , imag
dimVisFields :: Dim
dimVisFields = (0, 5)
type GCFsRepr = RegionRepr Bins (ArrayRepr (BinRepr (BinRepr (HalideRepr Dim1 Double GCFs))))
gcfsRepr :: GCFPar -> WDom -> GUVDom -> GCFsRepr
gcfsRepr gcfp wdom (gudom, gvdom) =
RegionRepr wdom $
ArrayRepr dimOver $
BinRepr gvdom $
BinRepr gudom $
halideRepr (dim1 dimCpx)
where dimOver = (0, fromIntegral $ gcfOver gcfp * gcfOver gcfp)
| null | https://raw.githubusercontent.com/SKA-ScienceDataProcessor/RC/1b5e25baf9204a9f7ef40ed8ee94a86cc6c674af/MS6/programs/Kernel/Data.hs | haskell | # LANGUAGE StandaloneDeriving, DeriveDataTypeable #
| Data representation definitions
* Configuration
* Data tags
* Data representations
* Visibility data representations
Data tags
^ Data set index
^ UV grid
^ Full UV grid
^ Image
^ Result from cleaning
^ A set of GCFs
^ Domain used for indexing data sets
^ Domain used for indexing visibilities
^ Domain used for the u grid dimension
^ Domain used for the v grid dimension
^ Domain used for the w grid pseudo-dimension
^ Domain used for the (u,v) grid dimensions
^ Domain used for the l image dimension
^ Domain used for the m image dimension
^ Domain used for the (l,m) image dimensions
^ Domain used for u grid dimension of grid convolution functions
^ Domain used for v grid dimension of grid convolution functions
^ Domain used for (u,v) grid dimensions of grid convolution functions
halideRepr dim0
| Raw visibilities: Dynamically sized list of visibility records
(see "dimVisFields"). |
module Kernel.Data
Config(..), OskarInput(..), GridKernelType(..), DegridKernelType(..)
, GridPar(..), GCFPar(..), GCFFile(..), CleanPar(..), StrategyPar(..)
, defaultConfig, cfgParallelism
, gridImageWidth, gridImageHeight, gridScale, gridXY2UV, gcfMaxSize, gcfGet
, Index, Tag, Vis, UVGrid, FullUVGrid, Image, Cleaned, GCFs
, DDom, TDom, UDom, VDom, WDom, UVDom, LDom, MDom, LMDom, GUDom, GVDom, GUVDom
, IndexRepr, UVGRepr, UVGMarginRepr, FacetRepr, ImageRepr, FullUVGRepr, PlanRepr, GCFsRepr
, indexRepr, uvgRepr, uvgMarginRepr, facetRepr, imageRepr, fullUVGRepr, planRepr, gcfsRepr
, RawVisRepr, RotatedVisRepr, VisRepr
, rawVisRepr, rotatedVisRepr, visRepr
) where
import Data.Typeable
import Data.Int ( Int32 )
import Flow.Halide
import Flow.Domain
import Flow.Kernel
import Kernel.Config
^ Initialisation ( e.g. FFT plans )
^ Visibilities ( File name to OSKAR / raw visibilities / binned ... )
deriving instance Typeable Tag
deriving instance Typeable Vis
deriving instance Typeable UVGrid
deriving instance Typeable FullUVGrid
deriving instance Typeable Image
deriving instance Typeable GCFs
type IndexRepr = BinRepr (HalideRepr Dim0 Int32 Index)
indexRepr :: DDom -> IndexRepr
indexRepr ddom = BinRepr ddom $ halideRepr dim0
type UVGRepr = RangeRepr (RangeRepr (HalideRepr Dim1 Double UVGrid))
uvgRepr :: UVDom -> UVGRepr
uvgRepr (udom, vdom) =
RangeRepr vdom $
RangeRepr udom $
halideRepr (dim1 dimCpx)
type UVGMarginRepr = MarginRepr (MarginRepr (HalideRepr Dim1 Double UVGrid))
uvgMarginRepr :: GCFPar -> UVDom -> UVGMarginRepr
uvgMarginRepr gcfp (udom, vdom) =
marginRepr vdom (gcfMaxSize gcfp `div` 2) $
marginRepr udom (gcfMaxSize gcfp `div` 2) $
halideRepr (dim1 dimCpx)
dimCpx :: Dim
dimCpx = (0, 2)
type FacetRepr = HalideRepr Dim2 Double Image
facetRepr :: GridPar -> ImageRepr
facetRepr gp = halideRepr $ dimY :. dimX :. Z
where dimX = (0, fromIntegral $ gridWidth gp)
dimY = (0, fromIntegral $ gridHeight gp)
type ImageRepr = HalideRepr Dim2 Double Image
imageRepr :: GridPar -> ImageRepr
imageRepr gp = halideRepr $ dimY :. dimX :. Z
where dimX = (0, fromIntegral $ gridImageWidth gp)
dimY = (0, fromIntegral $ gridImageHeight gp)
type FullUVGRepr = HalideRepr Dim3 Double FullUVGrid
fullUVGRepr :: GridPar -> FullUVGRepr
fullUVGRepr gp = halideRepr $ dimY :. dimX :. dimCpx :. Z
where dimX = (0, fromIntegral $ gridImageWidth gp)
dimY = (0, fromIntegral $ gridImageHeight gp)
HalideRepr Dim0 Int32 Tag
planRepr :: PlanRepr
type RawVisRepr = RangeRepr (HalideRepr Dim1 Double Vis)
rawVisRepr :: Domain Range -> RawVisRepr
rawVisRepr dom = RangeRepr dom $ halideRepr (dim1 dimVisFields)
type RotatedVisRepr = RegionRepr Range (RegionRepr Range (RangeRepr (HalideRepr Dim1 Double Vis)))
rotatedVisRepr :: LMDom -> TDom -> RotatedVisRepr
rotatedVisRepr (ldom, mdom) tdom =
RegionRepr ldom $ RegionRepr mdom $ RangeRepr tdom $
halideRepr (dim1 dimVisFields)
type VisRepr = RegionRepr Range (RegionRepr Range (BinRepr (HalideRepr Dim1 Double Vis)))
visRepr :: UVDom -> WDom -> VisRepr
visRepr (udom, vdom) wdom =
RegionRepr udom $ RegionRepr vdom $ BinRepr wdom $
halideRepr (dim1 dimVisFields)
| We have 5 visibility fields : u , v and w , Real , imag
dimVisFields :: Dim
dimVisFields = (0, 5)
type GCFsRepr = RegionRepr Bins (ArrayRepr (BinRepr (BinRepr (HalideRepr Dim1 Double GCFs))))
gcfsRepr :: GCFPar -> WDom -> GUVDom -> GCFsRepr
gcfsRepr gcfp wdom (gudom, gvdom) =
RegionRepr wdom $
ArrayRepr dimOver $
BinRepr gvdom $
BinRepr gudom $
halideRepr (dim1 dimCpx)
where dimOver = (0, fromIntegral $ gcfOver gcfp * gcfOver gcfp)
|
423340a4acb1dcc4cc8411deb3601eb16b6f70662220cc729f88b7777ea9fadd | digitallyinduced/ihp | Types.hs | |
Module : IHP.ServerSideComponent . Types
Description : Types & Data Structures for IHP SSC
Copyright : ( c ) digitally induced GmbH , 2021
Module: IHP.ServerSideComponent.Types
Description: Types & Data Structures for IHP SSC
Copyright: (c) digitally induced GmbH, 2021
-}
module IHP.ServerSideComponent.Types where
import IHP.ViewPrelude
import qualified Network.WebSockets as WebSocket
class Component state action | state -> action where
initialState :: state
render :: state -> Html
action ::
( ?instanceRef :: IORef (ComponentInstance state)
, ?connection :: WebSocket.Connection
, ?context :: ControllerContext
, ?modelContext :: ModelContext
) => state -> action -> IO state
componentDidMount ::
( ?instanceRef :: IORef (ComponentInstance state)
, ?connection :: WebSocket.Connection
, ?context :: ControllerContext
, ?modelContext :: ModelContext
) => state -> IO state
componentDidMount state = pure state
data ComponentsController components
= ComponentsController
deriving (Eq, Show, Data)
data ComponentInstance state
= ComponentInstance { state :: state } -- ^ If you wondered why the current rendered HTML doesn't need to be stored here for later diffing it: As our render functions are pure we can just re-render the HTML based on the state when we want to do our diffing
instance (SetField "state" (ComponentInstance state) state) where
setField state componentInstance = componentInstance { state }
| null | https://raw.githubusercontent.com/digitallyinduced/ihp/18f104da69c526ff9e8ad3a6cdaedc6d39afb38c/IHP/ServerSideComponent/Types.hs | haskell | ^ If you wondered why the current rendered HTML doesn't need to be stored here for later diffing it: As our render functions are pure we can just re-render the HTML based on the state when we want to do our diffing | |
Module : IHP.ServerSideComponent . Types
Description : Types & Data Structures for IHP SSC
Copyright : ( c ) digitally induced GmbH , 2021
Module: IHP.ServerSideComponent.Types
Description: Types & Data Structures for IHP SSC
Copyright: (c) digitally induced GmbH, 2021
-}
module IHP.ServerSideComponent.Types where
import IHP.ViewPrelude
import qualified Network.WebSockets as WebSocket
class Component state action | state -> action where
initialState :: state
render :: state -> Html
action ::
( ?instanceRef :: IORef (ComponentInstance state)
, ?connection :: WebSocket.Connection
, ?context :: ControllerContext
, ?modelContext :: ModelContext
) => state -> action -> IO state
componentDidMount ::
( ?instanceRef :: IORef (ComponentInstance state)
, ?connection :: WebSocket.Connection
, ?context :: ControllerContext
, ?modelContext :: ModelContext
) => state -> IO state
componentDidMount state = pure state
data ComponentsController components
= ComponentsController
deriving (Eq, Show, Data)
data ComponentInstance state
instance (SetField "state" (ComponentInstance state) state) where
setField state componentInstance = componentInstance { state }
|
43fa0251a7961bc9287b7290a15962188537e5cc5e71c4182c409047f3f1689a | warmte/hi | Parser.hs | module HW3.Parser where
import HW3.Base ( functions, HiFun(..), HiValue(..), HiExpr(..), HiAction (..) )
import Data.Void (Void)
import Text.Megaparsec ( ParseErrorBundle,
parseTest,
MonadParsec(try, notFollowedBy),
manyTill,
(<|>),
(<?>),
parse,
parseTest,
between,
choice,
many,
Parsec,
MonadParsec(eof, label),
ParseErrorBundle, optional, endBy, sepBy, sepBy1, sepEndBy, satisfy )
import Control.Monad.Trans.Except (Except, runExcept)
import Data.Functor.Identity (Identity)
import Data.Text ( pack )
import Text.Megaparsec.Char ( string, char, space1, hexDigitChar )
import Control.Monad.Combinators.Expr ( makeExprParser, Operator (..) )
import qualified Text.Megaparsec.Char.Lexer as L
import qualified GHC.TypeLits as Data
import qualified Data.ByteString
import Data.Scientific (toRealFloat)
import Text.Megaparsec.Stream ()
import Data.Sequence (fromList)
import Data.Word (Word8)
import Data.Char (isAlphaNum, isAlpha)
import Data.List (intercalate)
type Parser = Parsec Void String
-- space skipping parser
skipSpace :: Parser ()
skipSpace = L.space space1 (L.skipLineComment ";;") (L.skipBlockCommentNested "/*" "*/")
lexeme parser
lexeme :: Parser a -> Parser a
lexeme = L.lexeme skipSpace
-- parser for numeric values
num :: Parser HiExpr
num = label "number" $ HiExprValue . HiValueNumber <$> L.signed skipSpace (do toRational <$> L.scientific)
-- parser for boolean values
bool :: Parser HiExpr
bool = label "bool" $ choice [ HiExprValue <$> (HiValueBool True <$ string "true"), HiExprValue <$> (HiValueBool False <$ string "false") ]
-- parser for null value
nullp :: Parser HiExpr
nullp = label "null" $ HiExprValue <$> (HiValueNull <$ string "null")
-- parser for function names
fun :: Parser HiExpr
fun = label "function" $ HiExprValue <$> (HiValueFunction <$> choice (map (\(name, fun) -> fun <$ string name) functions))
-- parser for actions which are not parser as functions
action :: Parser HiExpr
action = label "action" $ HiExprValue <$> choice [ HiValueAction HiActionCwd <$ string "cwd", HiValueAction HiActionNow <$ string "now" ]
-- parser for strings
stringLiteral :: Parser HiExpr
stringLiteral = HiExprValue <$> (HiValueString . pack <$> (char '"' >> manyTill L.charLiteral (char '"')))
-- parser for lists of values
list :: Parser HiExpr
list = do
elems <- between (lexeme $ char '[') (lexeme $ char ']') args
pure $ HiExprApply (HiExprValue $ HiValueFunction HiFunList) elems
parser for bytestrings
bytearray :: Parser HiExpr
bytearray = label "bytestring" $ do
elems <- between (lexeme $ string "[#") (lexeme $ string "#]") (sepEndBy byte space1)
pure $ HiExprValue $ HiValueBytes $ Data.ByteString.pack elems
-- parser which parses a byte
byte :: Parser Word8
byte = do
f <- hexDigitChar
s <- hexDigitChar
return $ read $ "0x" ++ [f, s]
-- parser which parses a dictionary key-value element
dictelem :: Parser (HiExpr, HiExpr)
dictelem = lexeme $ do
key <- lexeme hiexpr
_ <- lexeme $ char ':'
value <- lexeme hiexpr
return (key, value)
-- parser for dictionaries
dict :: Parser HiExpr
dict = label "dictionary" $ HiExprDict <$> between (lexeme $ char '{') (lexeme $ char '}') (sepBy dictelem (lexeme $ char ','))
-- parser for all values
value :: Parser HiExpr
value = label "value" $ choice [ dict, fun, action, num, bool, nullp, stringLiteral, bytearray, list ]
-- parser for a list of arguments
args :: Parser [HiExpr]
args = sepBy hiexpr (lexeme $ char ',')
-- different ways to apply something to a function
data FuncModifier = Args [HiExpr] | DottedArg HiExpr | Run deriving Show
-- parser for function arguments
funcargs :: Parser FuncModifier
funcargs = Args <$> between (lexeme $ char '(') (lexeme $ char ')') args
parser for one dotted function argument
dotarg :: Parser FuncModifier
dotarg = lexeme $ do
_ <- lexeme $ char '.'
text <- ((:) <$> satisfy isAlpha <*> many (satisfy isAlphaNum)) `sepBy1` char '-'
pure $ DottedArg $ HiExprValue $ HiValueString $ pack $ intercalate "-" text
-- parser for run sign
run :: Parser FuncModifier
run = Run <$ lexeme (char '!')
-- parser which parses function application
hiexprapply :: Parser HiExpr
hiexprapply = label "expression application" $ do
name <- lexeme (between (lexeme $ char '(') (lexeme $ char ')') hiexpr <|> value)
funArgs <- many (funcargs <|> run <|> dotarg)
pure $ if not (Prelude.null funArgs) then combine name funArgs else name
where
-- evaluate a result value of all stacked applications
combine :: HiExpr -> [FuncModifier] -> HiExpr
combine x [] = x
combine x [Args args] = HiExprApply x args
combine x [DottedArg arg] = HiExprApply x [arg]
combine x [Run] = HiExprRun x
combine x ((Args args):ys) = combine (HiExprApply x args) ys
combine x ((DottedArg arg):ys) = combine (HiExprApply x [arg]) ys
combine x (Run:ys) = combine (HiExprRun x) ys
-- parser for parse all the expression except binary operators
hiexpr' :: Parser HiExpr
hiexpr' = label "expression" $ choice [ hiexprapply, between (lexeme $ char '(') (lexeme $ char ')') hiexpr ]
-- main parser
hiexpr :: Parser HiExpr
hiexpr = makeExprParser hiexpr' table <?> "table expression"
-- parser which parses 'operation' symbol which are not followed by the 'notfollow' symbol
-- we need it to be able to parse '/' operation which is unfortunately a prefix or another operation '/=' with less priority
op :: String -> String -> Parser String
op operation notfollow = (lexeme . try) (string operation <* notFollowedBy (lexeme $ string notfollow))
table :: [[Operator Parser HiExpr]]
table = [ [ binary InfixL "*" HiFunMul
, binary' InfixL (op "/" "=") HiFunDiv ]
, [ binary InfixL "+" HiFunAdd
, binary InfixL "-" HiFunSub ]
, [ binary InfixN "<=" HiFunNotGreaterThan
, binary InfixN ">=" HiFunNotLessThan
, binary InfixN ">" HiFunGreaterThan
, binary InfixN "<" HiFunLessThan
, binary InfixN "==" HiFunEquals
, binary InfixN "/=" HiFunNotEquals ]
, [ binary InfixR "&&" HiFunAnd ]
, [ binary InfixR "||" HiFunOr ] ]
-- construct normal hi-function application by binary operations symbols
binary :: (Parser (HiExpr -> HiExpr -> HiExpr) -> Operator Parser HiExpr) -> String -> HiFun -> Operator Parser HiExpr
binary infixtype name = binary' infixtype (lexeme $ string name)
binary' :: (Parser (HiExpr -> HiExpr -> HiExpr) -> Operator Parser HiExpr) -> Parser String -> HiFun -> Operator Parser HiExpr
binary' pri parser fun = pri ((\ x y -> HiExprApply (HiExprValue $ HiValueFunction fun) [x, y]) <$ parser )
-- parseT :: String -> IO ()
-- parseT = parseTest hiexpr
-- main parse function
parse :: String -> Either (ParseErrorBundle String Void) HiExpr
parse = Text.Megaparsec.parse (between skipSpace eof hiexpr) ""
| null | https://raw.githubusercontent.com/warmte/hi/2f01ccb9cf98c6267a26c5fd7f397558c0dbc14d/src/HW3/Parser.hs | haskell | space skipping parser
parser for numeric values
parser for boolean values
parser for null value
parser for function names
parser for actions which are not parser as functions
parser for strings
parser for lists of values
parser which parses a byte
parser which parses a dictionary key-value element
parser for dictionaries
parser for all values
parser for a list of arguments
different ways to apply something to a function
parser for function arguments
parser for run sign
parser which parses function application
evaluate a result value of all stacked applications
parser for parse all the expression except binary operators
main parser
parser which parses 'operation' symbol which are not followed by the 'notfollow' symbol
we need it to be able to parse '/' operation which is unfortunately a prefix or another operation '/=' with less priority
construct normal hi-function application by binary operations symbols
parseT :: String -> IO ()
parseT = parseTest hiexpr
main parse function | module HW3.Parser where
import HW3.Base ( functions, HiFun(..), HiValue(..), HiExpr(..), HiAction (..) )
import Data.Void (Void)
import Text.Megaparsec ( ParseErrorBundle,
parseTest,
MonadParsec(try, notFollowedBy),
manyTill,
(<|>),
(<?>),
parse,
parseTest,
between,
choice,
many,
Parsec,
MonadParsec(eof, label),
ParseErrorBundle, optional, endBy, sepBy, sepBy1, sepEndBy, satisfy )
import Control.Monad.Trans.Except (Except, runExcept)
import Data.Functor.Identity (Identity)
import Data.Text ( pack )
import Text.Megaparsec.Char ( string, char, space1, hexDigitChar )
import Control.Monad.Combinators.Expr ( makeExprParser, Operator (..) )
import qualified Text.Megaparsec.Char.Lexer as L
import qualified GHC.TypeLits as Data
import qualified Data.ByteString
import Data.Scientific (toRealFloat)
import Text.Megaparsec.Stream ()
import Data.Sequence (fromList)
import Data.Word (Word8)
import Data.Char (isAlphaNum, isAlpha)
import Data.List (intercalate)
type Parser = Parsec Void String
skipSpace :: Parser ()
skipSpace = L.space space1 (L.skipLineComment ";;") (L.skipBlockCommentNested "/*" "*/")
lexeme parser
lexeme :: Parser a -> Parser a
lexeme = L.lexeme skipSpace
num :: Parser HiExpr
num = label "number" $ HiExprValue . HiValueNumber <$> L.signed skipSpace (do toRational <$> L.scientific)
bool :: Parser HiExpr
bool = label "bool" $ choice [ HiExprValue <$> (HiValueBool True <$ string "true"), HiExprValue <$> (HiValueBool False <$ string "false") ]
nullp :: Parser HiExpr
nullp = label "null" $ HiExprValue <$> (HiValueNull <$ string "null")
fun :: Parser HiExpr
fun = label "function" $ HiExprValue <$> (HiValueFunction <$> choice (map (\(name, fun) -> fun <$ string name) functions))
action :: Parser HiExpr
action = label "action" $ HiExprValue <$> choice [ HiValueAction HiActionCwd <$ string "cwd", HiValueAction HiActionNow <$ string "now" ]
stringLiteral :: Parser HiExpr
stringLiteral = HiExprValue <$> (HiValueString . pack <$> (char '"' >> manyTill L.charLiteral (char '"')))
list :: Parser HiExpr
list = do
elems <- between (lexeme $ char '[') (lexeme $ char ']') args
pure $ HiExprApply (HiExprValue $ HiValueFunction HiFunList) elems
parser for bytestrings
bytearray :: Parser HiExpr
bytearray = label "bytestring" $ do
elems <- between (lexeme $ string "[#") (lexeme $ string "#]") (sepEndBy byte space1)
pure $ HiExprValue $ HiValueBytes $ Data.ByteString.pack elems
byte :: Parser Word8
byte = do
f <- hexDigitChar
s <- hexDigitChar
return $ read $ "0x" ++ [f, s]
dictelem :: Parser (HiExpr, HiExpr)
dictelem = lexeme $ do
key <- lexeme hiexpr
_ <- lexeme $ char ':'
value <- lexeme hiexpr
return (key, value)
dict :: Parser HiExpr
dict = label "dictionary" $ HiExprDict <$> between (lexeme $ char '{') (lexeme $ char '}') (sepBy dictelem (lexeme $ char ','))
value :: Parser HiExpr
value = label "value" $ choice [ dict, fun, action, num, bool, nullp, stringLiteral, bytearray, list ]
args :: Parser [HiExpr]
args = sepBy hiexpr (lexeme $ char ',')
data FuncModifier = Args [HiExpr] | DottedArg HiExpr | Run deriving Show
funcargs :: Parser FuncModifier
funcargs = Args <$> between (lexeme $ char '(') (lexeme $ char ')') args
parser for one dotted function argument
dotarg :: Parser FuncModifier
dotarg = lexeme $ do
_ <- lexeme $ char '.'
text <- ((:) <$> satisfy isAlpha <*> many (satisfy isAlphaNum)) `sepBy1` char '-'
pure $ DottedArg $ HiExprValue $ HiValueString $ pack $ intercalate "-" text
run :: Parser FuncModifier
run = Run <$ lexeme (char '!')
hiexprapply :: Parser HiExpr
hiexprapply = label "expression application" $ do
name <- lexeme (between (lexeme $ char '(') (lexeme $ char ')') hiexpr <|> value)
funArgs <- many (funcargs <|> run <|> dotarg)
pure $ if not (Prelude.null funArgs) then combine name funArgs else name
where
combine :: HiExpr -> [FuncModifier] -> HiExpr
combine x [] = x
combine x [Args args] = HiExprApply x args
combine x [DottedArg arg] = HiExprApply x [arg]
combine x [Run] = HiExprRun x
combine x ((Args args):ys) = combine (HiExprApply x args) ys
combine x ((DottedArg arg):ys) = combine (HiExprApply x [arg]) ys
combine x (Run:ys) = combine (HiExprRun x) ys
hiexpr' :: Parser HiExpr
hiexpr' = label "expression" $ choice [ hiexprapply, between (lexeme $ char '(') (lexeme $ char ')') hiexpr ]
hiexpr :: Parser HiExpr
hiexpr = makeExprParser hiexpr' table <?> "table expression"
op :: String -> String -> Parser String
op operation notfollow = (lexeme . try) (string operation <* notFollowedBy (lexeme $ string notfollow))
table :: [[Operator Parser HiExpr]]
table = [ [ binary InfixL "*" HiFunMul
, binary' InfixL (op "/" "=") HiFunDiv ]
, [ binary InfixL "+" HiFunAdd
, binary InfixL "-" HiFunSub ]
, [ binary InfixN "<=" HiFunNotGreaterThan
, binary InfixN ">=" HiFunNotLessThan
, binary InfixN ">" HiFunGreaterThan
, binary InfixN "<" HiFunLessThan
, binary InfixN "==" HiFunEquals
, binary InfixN "/=" HiFunNotEquals ]
, [ binary InfixR "&&" HiFunAnd ]
, [ binary InfixR "||" HiFunOr ] ]
binary :: (Parser (HiExpr -> HiExpr -> HiExpr) -> Operator Parser HiExpr) -> String -> HiFun -> Operator Parser HiExpr
binary infixtype name = binary' infixtype (lexeme $ string name)
binary' :: (Parser (HiExpr -> HiExpr -> HiExpr) -> Operator Parser HiExpr) -> Parser String -> HiFun -> Operator Parser HiExpr
binary' pri parser fun = pri ((\ x y -> HiExprApply (HiExprValue $ HiValueFunction fun) [x, y]) <$ parser )
parse :: String -> Either (ParseErrorBundle String Void) HiExpr
parse = Text.Megaparsec.parse (between skipSpace eof hiexpr) ""
|
9ca6e196a9b0990acb0d6c483ace2b4f87bac06724185c644100a6f3d9f73bb0 | cryptosense/pkcs11 | p11_attribute_type.ml | type not_implemented = NOT_IMPLEMENTED of string
type 'a t =
| CKA_CLASS : P11_object_class.t t
| CKA_TOKEN : bool t
| CKA_PRIVATE : bool t
| CKA_LABEL : string t
| CKA_VALUE : string t
| CKA_TRUSTED : bool t
| CKA_CHECK_VALUE : not_implemented t
| CKA_KEY_TYPE : P11_key_type.t t
| CKA_SUBJECT : string t
| CKA_ID : string t
| CKA_SENSITIVE : bool t
| CKA_ENCRYPT : bool t
| CKA_DECRYPT : bool t
| CKA_WRAP : bool t
| CKA_UNWRAP : bool t
| CKA_SIGN : bool t
| CKA_SIGN_RECOVER : bool t
| CKA_VERIFY : bool t
| CKA_VERIFY_RECOVER : bool t
| CKA_DERIVE : bool t
| CKA_START_DATE : not_implemented t
| CKA_END_DATE : not_implemented t
| CKA_MODULUS : P11_bigint.t t
| CKA_MODULUS_BITS : P11_ulong.t t
| CKA_PUBLIC_EXPONENT : P11_bigint.t t
| CKA_PRIVATE_EXPONENT : P11_bigint.t t
| CKA_PRIME_1 : P11_bigint.t t
| CKA_PRIME_2 : P11_bigint.t t
| CKA_EXPONENT_1 : P11_bigint.t t
| CKA_EXPONENT_2 : P11_bigint.t t
| CKA_COEFFICIENT : P11_bigint.t t
| CKA_PRIME : P11_bigint.t t
| CKA_SUBPRIME : P11_bigint.t t
| CKA_BASE : P11_bigint.t t
| CKA_PRIME_BITS : P11_ulong.t t
| CKA_SUBPRIME_BITS : P11_ulong.t t
| CKA_VALUE_LEN : P11_ulong.t t
| CKA_EXTRACTABLE : bool t
| CKA_LOCAL : bool t
| CKA_NEVER_EXTRACTABLE : bool t
| CKA_ALWAYS_SENSITIVE : bool t
| CKA_KEY_GEN_MECHANISM : P11_key_gen_mechanism.t t
| CKA_MODIFIABLE : bool t
| CKA_EC_PARAMS : string t
| CKA_EC_POINT : string t
| CKA_ALWAYS_AUTHENTICATE : bool t
| CKA_WRAP_WITH_TRUSTED : bool t
| CKA_WRAP_TEMPLATE : not_implemented t
| CKA_UNWRAP_TEMPLATE : not_implemented t
| CKA_ALLOWED_MECHANISMS : not_implemented t
| CKA_CS_UNKNOWN : Unsigned.ULong.t -> not_implemented t
type pack = Pack : 'a t -> pack
let to_string : type a. a t -> string = function
| CKA_CLASS -> "CKA_CLASS"
| CKA_TOKEN -> "CKA_TOKEN"
| CKA_PRIVATE -> "CKA_PRIVATE"
| CKA_LABEL -> "CKA_LABEL"
| CKA_VALUE -> "CKA_VALUE"
| CKA_TRUSTED -> "CKA_TRUSTED"
| CKA_CHECK_VALUE -> "CKA_CHECK_VALUE"
| CKA_KEY_TYPE -> "CKA_KEY_TYPE"
| CKA_SUBJECT -> "CKA_SUBJECT"
| CKA_ID -> "CKA_ID"
| CKA_SENSITIVE -> "CKA_SENSITIVE"
| CKA_ENCRYPT -> "CKA_ENCRYPT"
| CKA_DECRYPT -> "CKA_DECRYPT"
| CKA_WRAP -> "CKA_WRAP"
| CKA_UNWRAP -> "CKA_UNWRAP"
| CKA_SIGN -> "CKA_SIGN"
| CKA_SIGN_RECOVER -> "CKA_SIGN_RECOVER"
| CKA_VERIFY -> "CKA_VERIFY"
| CKA_VERIFY_RECOVER -> "CKA_VERIFY_RECOVER"
| CKA_DERIVE -> "CKA_DERIVE"
| CKA_START_DATE -> "CKA_START_DATE"
| CKA_END_DATE -> "CKA_END_DATE"
| CKA_MODULUS -> "CKA_MODULUS"
| CKA_MODULUS_BITS -> "CKA_MODULUS_BITS"
| CKA_PUBLIC_EXPONENT -> "CKA_PUBLIC_EXPONENT"
| CKA_PRIVATE_EXPONENT -> "CKA_PRIVATE_EXPONENT"
| CKA_PRIME_1 -> "CKA_PRIME_1"
| CKA_PRIME_2 -> "CKA_PRIME_2"
| CKA_EXPONENT_1 -> "CKA_EXPONENT_1"
| CKA_EXPONENT_2 -> "CKA_EXPONENT_2"
| CKA_COEFFICIENT -> "CKA_COEFFICIENT"
| CKA_PRIME -> "CKA_PRIME"
| CKA_SUBPRIME -> "CKA_SUBPRIME"
| CKA_BASE -> "CKA_BASE"
| CKA_PRIME_BITS -> "CKA_PRIME_BITS"
| CKA_SUBPRIME_BITS -> "CKA_SUBPRIME_BITS"
| CKA_VALUE_LEN -> "CKA_VALUE_LEN"
| CKA_EXTRACTABLE -> "CKA_EXTRACTABLE"
| CKA_LOCAL -> "CKA_LOCAL"
| CKA_NEVER_EXTRACTABLE -> "CKA_NEVER_EXTRACTABLE"
| CKA_ALWAYS_SENSITIVE -> "CKA_ALWAYS_SENSITIVE"
| CKA_KEY_GEN_MECHANISM -> "CKA_KEY_GEN_MECHANISM"
| CKA_MODIFIABLE -> "CKA_MODIFIABLE"
| CKA_ECDSA_PARAMS - > " CKA_ECDSA_PARAMS "
| CKA_EC_PARAMS -> "CKA_EC_PARAMS"
| CKA_EC_POINT -> "CKA_EC_POINT"
| CKA_ALWAYS_AUTHENTICATE -> "CKA_ALWAYS_AUTHENTICATE"
| CKA_WRAP_WITH_TRUSTED -> "CKA_WRAP_WITH_TRUSTED"
| CKA_WRAP_TEMPLATE -> "CKA_WRAP_TEMPLATE"
| CKA_UNWRAP_TEMPLATE -> "CKA_UNWRAP_TEMPLATE"
| CKA_ALLOWED_MECHANISMS -> "CKA_ALLOWED_MECHANISMS"
| CKA_CS_UNKNOWN ul -> Unsigned.ULong.to_string ul
let of_string = function
| "CKA_CLASS" -> Pack CKA_CLASS
| "CKA_TOKEN" -> Pack CKA_TOKEN
| "CKA_PRIVATE" -> Pack CKA_PRIVATE
| "CKA_LABEL" -> Pack CKA_LABEL
| "CKA_VALUE" -> Pack CKA_VALUE
| "CKA_TRUSTED" -> Pack CKA_TRUSTED
| "CKA_CHECK_VALUE" -> Pack CKA_CHECK_VALUE
| "CKA_KEY_TYPE" -> Pack CKA_KEY_TYPE
| "CKA_SUBJECT" -> Pack CKA_SUBJECT
| "CKA_ID" -> Pack CKA_ID
| "CKA_SENSITIVE" -> Pack CKA_SENSITIVE
| "CKA_ENCRYPT" -> Pack CKA_ENCRYPT
| "CKA_DECRYPT" -> Pack CKA_DECRYPT
| "CKA_WRAP" -> Pack CKA_WRAP
| "CKA_UNWRAP" -> Pack CKA_UNWRAP
| "CKA_SIGN" -> Pack CKA_SIGN
| "CKA_SIGN_RECOVER" -> Pack CKA_SIGN_RECOVER
| "CKA_VERIFY" -> Pack CKA_VERIFY
| "CKA_VERIFY_RECOVER" -> Pack CKA_VERIFY_RECOVER
| "CKA_DERIVE" -> Pack CKA_DERIVE
| "CKA_START_DATE" -> Pack CKA_START_DATE
| "CKA_END_DATE" -> Pack CKA_END_DATE
| "CKA_MODULUS" -> Pack CKA_MODULUS
| "CKA_MODULUS_BITS" -> Pack CKA_MODULUS_BITS
| "CKA_PUBLIC_EXPONENT" -> Pack CKA_PUBLIC_EXPONENT
| "CKA_PRIVATE_EXPONENT" -> Pack CKA_PRIVATE_EXPONENT
| "CKA_PRIME_1" -> Pack CKA_PRIME_1
| "CKA_PRIME_2" -> Pack CKA_PRIME_2
| "CKA_EXPONENT_1" -> Pack CKA_EXPONENT_1
| "CKA_EXPONENT_2" -> Pack CKA_EXPONENT_2
| "CKA_COEFFICIENT" -> Pack CKA_COEFFICIENT
| "CKA_PRIME" -> Pack CKA_PRIME
| "CKA_SUBPRIME" -> Pack CKA_SUBPRIME
| "CKA_BASE" -> Pack CKA_BASE
| "CKA_PRIME_BITS" -> Pack CKA_PRIME_BITS
| "CKA_SUBPRIME_BITS" -> Pack CKA_SUBPRIME_BITS
| "CKA_SUB_PRIME_BITS" -> Pack CKA_SUBPRIME_BITS
| "CKA_VALUE_LEN" -> Pack CKA_VALUE_LEN
| "CKA_EXTRACTABLE" -> Pack CKA_EXTRACTABLE
| "CKA_LOCAL" -> Pack CKA_LOCAL
| "CKA_NEVER_EXTRACTABLE" -> Pack CKA_NEVER_EXTRACTABLE
| "CKA_ALWAYS_SENSITIVE" -> Pack CKA_ALWAYS_SENSITIVE
| "CKA_KEY_GEN_MECHANISM" -> Pack CKA_KEY_GEN_MECHANISM
| "CKA_MODIFIABLE" -> Pack CKA_MODIFIABLE
| "CKA_ECDSA_PARAMS" -> Pack CKA_EC_PARAMS
| "CKA_EC_PARAMS" -> Pack CKA_EC_PARAMS
| "CKA_EC_POINT" -> Pack CKA_EC_POINT
| "CKA_ALWAYS_AUTHENTICATE" -> Pack CKA_ALWAYS_AUTHENTICATE
| "CKA_WRAP_WITH_TRUSTED" -> Pack CKA_WRAP_WITH_TRUSTED
| "CKA_WRAP_TEMPLATE" -> Pack CKA_WRAP_TEMPLATE
| "CKA_UNWRAP_TEMPLATE" -> Pack CKA_UNWRAP_TEMPLATE
| "CKA_ALLOWED_MECHANISMS" -> Pack CKA_ALLOWED_MECHANISMS
| s -> (
try Pack (CKA_CS_UNKNOWN (Unsigned.ULong.of_string s)) with
| Failure _ -> invalid_arg "CK_ATTRIBUTE_TYPE.of_string")
module Encoding = struct
let ( ! ) x = Unsigned.ULong.of_string (Int64.to_string x)
let ckf_ARRAY_ATTRIBUTE = 0x40000000L
let _CKA_CLASS = !0x00000000L
let _CKA_TOKEN = !0x00000001L
let _CKA_PRIVATE = !0x00000002L
let _CKA_LABEL = !0x00000003L
let _CKA_APPLICATION = !0x00000010L
let _CKA_VALUE = !0x00000011L
let _CKA_OBJECT_ID = !0x00000012L
let _CKA_CERTIFICATE_TYPE = !0x00000080L
let _CKA_ISSUER = !0x00000081L
let _CKA_SERIAL_NUMBER = !0x00000082L
let _CKA_AC_ISSUER = !0x00000083L
let _CKA_OWNER = !0x00000084L
let _CKA_ATTR_TYPES = !0x00000085L
let _CKA_TRUSTED = !0x00000086L
let _CKA_CERTIFICATE_CATEGORY = !0x00000087L
let _CKA_JAVA_MIDP_SECURITY_DOMAIN = !0x00000088L
let _CKA_URL = !0x00000089L
let _CKA_HASH_OF_SUBJECT_PUBLIC_KEY = !0x0000008AL
let _CKA_HASH_OF_ISSUER_PUBLIC_KEY = !0x0000008BL
let _CKA_CHECK_VALUE = !0x00000090L
let _CKA_KEY_TYPE = !0x00000100L
let _CKA_SUBJECT = !0x00000101L
let _CKA_ID = !0x00000102L
let _CKA_SENSITIVE = !0x00000103L
let _CKA_ENCRYPT = !0x00000104L
let _CKA_DECRYPT = !0x00000105L
let _CKA_WRAP = !0x00000106L
let _CKA_UNWRAP = !0x00000107L
let _CKA_SIGN = !0x00000108L
let _CKA_SIGN_RECOVER = !0x00000109L
let _CKA_VERIFY = !0x0000010AL
let _CKA_VERIFY_RECOVER = !0x0000010BL
let _CKA_DERIVE = !0x0000010CL
let _CKA_START_DATE = !0x00000110L
let _CKA_END_DATE = !0x00000111L
let _CKA_MODULUS = !0x00000120L
let _CKA_MODULUS_BITS = !0x00000121L
let _CKA_PUBLIC_EXPONENT = !0x00000122L
let _CKA_PRIVATE_EXPONENT = !0x00000123L
let _CKA_PRIME_1 = !0x00000124L
let _CKA_PRIME_2 = !0x00000125L
let _CKA_EXPONENT_1 = !0x00000126L
let _CKA_EXPONENT_2 = !0x00000127L
let _CKA_COEFFICIENT = !0x00000128L
let _CKA_PRIME = !0x00000130L
let _CKA_SUBPRIME = !0x00000131L
let _CKA_BASE = !0x00000132L
let _CKA_PRIME_BITS = !0x00000133L
let _CKA_SUBPRIME_BITS = !0x00000134L
let _CKA_VALUE_BITS = !0x00000160L
let _CKA_VALUE_LEN = !0x00000161L
let _CKA_EXTRACTABLE = !0x00000162L
let _CKA_LOCAL = !0x00000163L
let _CKA_NEVER_EXTRACTABLE = !0x00000164L
let _CKA_ALWAYS_SENSITIVE = !0x00000165L
let _CKA_KEY_GEN_MECHANISM = !0x00000166L
let _CKA_MODIFIABLE = !0x00000170L
let _CKA_EC_PARAMS = !0x00000180L
let _CKA_EC_POINT = !0x00000181L
let _CKA_SECONDARY_AUTH = !0x00000200L
let _CKA_AUTH_PIN_FLAGS = !0x00000201L
let _CKA_ALWAYS_AUTHENTICATE = !0x00000202L
let _CKA_WRAP_WITH_TRUSTED = !0x00000210L
let _CKA_WRAP_TEMPLATE = !(Int64.logor ckf_ARRAY_ATTRIBUTE 0x00000211L)
let _CKA_UNWRAP_TEMPLATE = !(Int64.logor ckf_ARRAY_ATTRIBUTE 0x00000212L)
let _CKA_OTP_FORMAT = !0x00000220L
let _CKA_OTP_LENGTH = !0x00000221L
let _CKA_OTP_TIME_INTERVAL = !0x00000222L
let _CKA_OTP_USER_FRIENDLY_MODE = !0x00000223L
let _CKA_OTP_CHALLENGE_REQUIREMENT = !0x00000224L
let _CKA_OTP_TIME_REQUIREMENT = !0x00000225L
let _CKA_OTP_COUNTER_REQUIREMENT = !0x00000226L
let _CKA_OTP_PIN_REQUIREMENT = !0x00000227L
let _CKA_OTP_COUNTER = !0x0000022EL
let _CKA_OTP_TIME = !0x0000022FL
let _CKA_OTP_USER_IDENTIFIER = !0x0000022AL
let _CKA_OTP_SERVICE_IDENTIFIER = !0x0000022BL
let _CKA_OTP_SERVICE_LOGO = !0x0000022CL
let _CKA_OTP_SERVICE_LOGO_TYPE = !0x0000022DL
let _CKA_HW_FEATURE_TYPE = !0x00000300L
let _CKA_RESET_ON_INIT = !0x00000301L
let _CKA_HAS_RESET = !0x00000302L
let _CKA_PIXEL_X = !0x00000400L
let _CKA_PIXEL_Y = !0x00000401L
let _CKA_RESOLUTION = !0x00000402L
let _CKA_CHAR_ROWS = !0x00000403L
let _CKA_CHAR_COLUMNS = !0x00000404L
let _CKA_COLOR = !0x00000405L
let _CKA_BITS_PER_PIXEL = !0x00000406L
let _CKA_CHAR_SETS = !0x00000480L
let _CKA_ENCODING_METHODS = !0x00000481L
let _CKA_MIME_TYPES = !0x00000482L
let _CKA_MECHANISM_TYPE = !0x00000500L
let _CKA_REQUIRED_CMS_ATTRIBUTES = !0x00000501L
let _CKA_DEFAULT_CMS_ATTRIBUTES = !0x00000502L
let _CKA_SUPPORTED_CMS_ATTRIBUTES = !0x00000503L
let _CKA_ALLOWED_MECHANISMS = !(Int64.logor ckf_ARRAY_ATTRIBUTE 0x00000600L)
let _CKA_VENDOR_DEFINED = !0x80000000L
let make (type s) (x : s t) : Unsigned.ULong.t =
match x with
| CKA_CLASS -> _CKA_CLASS
| CKA_TOKEN -> _CKA_TOKEN
| CKA_PRIVATE -> _CKA_PRIVATE
| CKA_LABEL -> _CKA_LABEL
| CKA_VALUE -> _CKA_VALUE
| CKA_TRUSTED -> _CKA_TRUSTED
| CKA_CHECK_VALUE -> _CKA_CHECK_VALUE
| CKA_KEY_TYPE -> _CKA_KEY_TYPE
| CKA_SUBJECT -> _CKA_SUBJECT
| CKA_ID -> _CKA_ID
| CKA_SENSITIVE -> _CKA_SENSITIVE
| CKA_ENCRYPT -> _CKA_ENCRYPT
| CKA_DECRYPT -> _CKA_DECRYPT
| CKA_WRAP -> _CKA_WRAP
| CKA_UNWRAP -> _CKA_UNWRAP
| CKA_SIGN -> _CKA_SIGN
| CKA_SIGN_RECOVER -> _CKA_SIGN_RECOVER
| CKA_VERIFY -> _CKA_VERIFY
| CKA_VERIFY_RECOVER -> _CKA_VERIFY_RECOVER
| CKA_DERIVE -> _CKA_DERIVE
| CKA_START_DATE -> _CKA_START_DATE
| CKA_END_DATE -> _CKA_END_DATE
| CKA_MODULUS -> _CKA_MODULUS
| CKA_MODULUS_BITS -> _CKA_MODULUS_BITS
| CKA_PUBLIC_EXPONENT -> _CKA_PUBLIC_EXPONENT
| CKA_PRIVATE_EXPONENT -> _CKA_PRIVATE_EXPONENT
| CKA_PRIME_1 -> _CKA_PRIME_1
| CKA_PRIME_2 -> _CKA_PRIME_2
| CKA_EXPONENT_1 -> _CKA_EXPONENT_1
| CKA_EXPONENT_2 -> _CKA_EXPONENT_2
| CKA_COEFFICIENT -> _CKA_COEFFICIENT
| CKA_PRIME -> _CKA_PRIME
| CKA_SUBPRIME -> _CKA_SUBPRIME
| CKA_BASE -> _CKA_BASE
| CKA_PRIME_BITS -> _CKA_PRIME_BITS
| CKA_SUBPRIME_BITS -> _CKA_SUBPRIME_BITS
| CKA_VALUE_LEN -> _CKA_VALUE_LEN
| CKA_EXTRACTABLE -> _CKA_EXTRACTABLE
| CKA_LOCAL -> _CKA_LOCAL
| CKA_NEVER_EXTRACTABLE -> _CKA_NEVER_EXTRACTABLE
| CKA_ALWAYS_SENSITIVE -> _CKA_ALWAYS_SENSITIVE
| CKA_KEY_GEN_MECHANISM -> _CKA_KEY_GEN_MECHANISM
| CKA_MODIFIABLE -> _CKA_MODIFIABLE
| CKA_EC_PARAMS -> _CKA_EC_PARAMS
| CKA_EC_POINT -> _CKA_EC_POINT
| CKA_ALWAYS_AUTHENTICATE -> _CKA_ALWAYS_AUTHENTICATE
| CKA_WRAP_WITH_TRUSTED -> _CKA_WRAP_WITH_TRUSTED
| CKA_WRAP_TEMPLATE -> _CKA_WRAP_TEMPLATE
| CKA_UNWRAP_TEMPLATE -> _CKA_UNWRAP_TEMPLATE
| CKA_ALLOWED_MECHANISMS -> _CKA_ALLOWED_MECHANISMS
| CKA_CS_UNKNOWN ul -> ul
end
type (_, _) comparison =
| Equal : ('a, 'a) comparison
| Not_equal : int -> ('a, 'b) comparison
let compare a b =
let a = Encoding.make a in
let b = Encoding.make b in
Unsigned.ULong.compare a b
let compare' : type a b. a t -> b t -> (a, b) comparison =
fun a b ->
let a' = Encoding.make a in
let b' = Encoding.make b in
let n = P11_ulong.compare a' b' in
if n <> 0 then
Not_equal n
else
match (a, b) with
| (CKA_CLASS, CKA_CLASS) -> Equal
| (CKA_TOKEN, CKA_TOKEN) -> Equal
| (CKA_PRIVATE, CKA_PRIVATE) -> Equal
| (CKA_LABEL, CKA_LABEL) -> Equal
| (CKA_VALUE, CKA_VALUE) -> Equal
| (CKA_TRUSTED, CKA_TRUSTED) -> Equal
| (CKA_CHECK_VALUE, CKA_CHECK_VALUE) -> Equal
| (CKA_KEY_TYPE, CKA_KEY_TYPE) -> Equal
| (CKA_SUBJECT, CKA_SUBJECT) -> Equal
| (CKA_ID, CKA_ID) -> Equal
| (CKA_SENSITIVE, CKA_SENSITIVE) -> Equal
| (CKA_ENCRYPT, CKA_ENCRYPT) -> Equal
| (CKA_DECRYPT, CKA_DECRYPT) -> Equal
| (CKA_WRAP, CKA_WRAP) -> Equal
| (CKA_UNWRAP, CKA_UNWRAP) -> Equal
| (CKA_SIGN, CKA_SIGN) -> Equal
| (CKA_SIGN_RECOVER, CKA_SIGN_RECOVER) -> Equal
| (CKA_VERIFY, CKA_VERIFY) -> Equal
| (CKA_VERIFY_RECOVER, CKA_VERIFY_RECOVER) -> Equal
| (CKA_DERIVE, CKA_DERIVE) -> Equal
| (CKA_START_DATE, CKA_START_DATE) -> Equal
| (CKA_END_DATE, CKA_END_DATE) -> Equal
| (CKA_MODULUS, CKA_MODULUS) -> Equal
| (CKA_MODULUS_BITS, CKA_MODULUS_BITS) -> Equal
| (CKA_PUBLIC_EXPONENT, CKA_PUBLIC_EXPONENT) -> Equal
| (CKA_PRIVATE_EXPONENT, CKA_PRIVATE_EXPONENT) -> Equal
| (CKA_PRIME_1, CKA_PRIME_1) -> Equal
| (CKA_PRIME_2, CKA_PRIME_2) -> Equal
| (CKA_EXPONENT_1, CKA_EXPONENT_1) -> Equal
| (CKA_EXPONENT_2, CKA_EXPONENT_2) -> Equal
| (CKA_COEFFICIENT, CKA_COEFFICIENT) -> Equal
| (CKA_PRIME, CKA_PRIME) -> Equal
| (CKA_SUBPRIME, CKA_SUBPRIME) -> Equal
| (CKA_BASE, CKA_BASE) -> Equal
| (CKA_PRIME_BITS, CKA_PRIME_BITS) -> Equal
| (CKA_SUBPRIME_BITS, CKA_SUBPRIME_BITS) -> Equal
| (CKA_VALUE_LEN, CKA_VALUE_LEN) -> Equal
| (CKA_EXTRACTABLE, CKA_EXTRACTABLE) -> Equal
| (CKA_LOCAL, CKA_LOCAL) -> Equal
| (CKA_NEVER_EXTRACTABLE, CKA_NEVER_EXTRACTABLE) -> Equal
| (CKA_ALWAYS_SENSITIVE, CKA_ALWAYS_SENSITIVE) -> Equal
| (CKA_KEY_GEN_MECHANISM, CKA_KEY_GEN_MECHANISM) -> Equal
| (CKA_MODIFIABLE, CKA_MODIFIABLE) -> Equal
| (CKA_EC_PARAMS, CKA_EC_PARAMS) -> Equal
| (CKA_EC_POINT, CKA_EC_POINT) -> Equal
| (CKA_ALWAYS_AUTHENTICATE, CKA_ALWAYS_AUTHENTICATE) -> Equal
| (CKA_WRAP_WITH_TRUSTED, CKA_WRAP_WITH_TRUSTED) -> Equal
| (CKA_WRAP_TEMPLATE, CKA_WRAP_TEMPLATE) -> Equal
| (CKA_UNWRAP_TEMPLATE, CKA_UNWRAP_TEMPLATE) -> Equal
| (CKA_ALLOWED_MECHANISMS, CKA_ALLOWED_MECHANISMS) -> Equal
| (CKA_CS_UNKNOWN ul1, CKA_CS_UNKNOWN ul2) ->
let cmp = Unsigned.ULong.compare ul1 ul2 in
if cmp = 0 then
Equal
else
Not_equal cmp
| (CKA_CLASS, _) -> assert false
| (CKA_TOKEN, _) -> assert false
| (CKA_PRIVATE, _) -> assert false
| (CKA_LABEL, _) -> assert false
| (CKA_VALUE, _) -> assert false
| (CKA_TRUSTED, _) -> assert false
| (CKA_CHECK_VALUE, _) -> assert false
| (CKA_KEY_TYPE, _) -> assert false
| (CKA_SUBJECT, _) -> assert false
| (CKA_ID, _) -> assert false
| (CKA_SENSITIVE, _) -> assert false
| (CKA_ENCRYPT, _) -> assert false
| (CKA_DECRYPT, _) -> assert false
| (CKA_WRAP, _) -> assert false
| (CKA_UNWRAP, _) -> assert false
| (CKA_SIGN, _) -> assert false
| (CKA_SIGN_RECOVER, _) -> assert false
| (CKA_VERIFY, _) -> assert false
| (CKA_VERIFY_RECOVER, _) -> assert false
| (CKA_DERIVE, _) -> assert false
| (CKA_START_DATE, _) -> assert false
| (CKA_END_DATE, _) -> assert false
| (CKA_MODULUS, _) -> assert false
| (CKA_MODULUS_BITS, _) -> assert false
| (CKA_PUBLIC_EXPONENT, _) -> assert false
| (CKA_PRIVATE_EXPONENT, _) -> assert false
| (CKA_PRIME_1, _) -> assert false
| (CKA_PRIME_2, _) -> assert false
| (CKA_EXPONENT_1, _) -> assert false
| (CKA_EXPONENT_2, _) -> assert false
| (CKA_COEFFICIENT, _) -> assert false
| (CKA_PRIME, _) -> assert false
| (CKA_SUBPRIME, _) -> assert false
| (CKA_BASE, _) -> assert false
| (CKA_PRIME_BITS, _) -> assert false
| (CKA_SUBPRIME_BITS, _) -> assert false
| (CKA_VALUE_LEN, _) -> assert false
| (CKA_EXTRACTABLE, _) -> assert false
| (CKA_LOCAL, _) -> assert false
| (CKA_NEVER_EXTRACTABLE, _) -> assert false
| (CKA_ALWAYS_SENSITIVE, _) -> assert false
| (CKA_KEY_GEN_MECHANISM, _) -> assert false
| (CKA_MODIFIABLE, _) -> assert false
| (CKA_EC_PARAMS, _) -> assert false
| (CKA_EC_POINT, _) -> assert false
| (CKA_ALWAYS_AUTHENTICATE, _) -> assert false
| (CKA_WRAP_WITH_TRUSTED, _) -> assert false
| (CKA_WRAP_TEMPLATE, _) -> assert false
| (CKA_UNWRAP_TEMPLATE, _) -> assert false
| (CKA_ALLOWED_MECHANISMS, _) -> assert false
| (CKA_CS_UNKNOWN _, _) -> assert false
let compare_pack (Pack a) (Pack b) = compare a b
let equal a b = compare a b = 0
let equal_pack a b = compare_pack a b = 0
let show_pack (Pack attribute) = to_string attribute
let pp_pack fmt pack = Format.pp_print_string fmt (show_pack pack)
let to_json attribute =
try `String (to_string attribute) with
| Invalid_argument _ -> `Null
let pack_to_json (Pack attribute) = to_json attribute
let pack_to_yojson = pack_to_json
let pack_of_yojson =
P11_helpers.of_json_string ~typename:"attribute type" of_string
let elements =
[ Pack CKA_CLASS
; Pack CKA_TOKEN
; Pack CKA_PRIVATE
; Pack CKA_LABEL
; Pack CKA_VALUE
; Pack CKA_TRUSTED
; Pack CKA_KEY_TYPE
; Pack CKA_SUBJECT
; Pack CKA_ID
; Pack CKA_SENSITIVE
; Pack CKA_ENCRYPT
; Pack CKA_DECRYPT
; Pack CKA_WRAP
; Pack CKA_UNWRAP
; Pack CKA_SIGN
; Pack CKA_SIGN_RECOVER
; Pack CKA_VERIFY
; Pack CKA_VERIFY_RECOVER
; Pack CKA_DERIVE
; Pack CKA_MODULUS
; Pack CKA_MODULUS_BITS
; Pack CKA_PUBLIC_EXPONENT
; Pack CKA_PRIVATE_EXPONENT
; Pack CKA_PRIME_1
; Pack CKA_PRIME_2
; Pack CKA_EXPONENT_1
; Pack CKA_EXPONENT_2
; Pack CKA_COEFFICIENT
; Pack CKA_PRIME
; Pack CKA_SUBPRIME
; Pack CKA_BASE
; Pack CKA_PRIME_BITS
; Pack CKA_SUBPRIME_BITS
; Pack CKA_VALUE_LEN
; Pack CKA_EXTRACTABLE
; Pack CKA_LOCAL
; Pack CKA_NEVER_EXTRACTABLE
; Pack CKA_ALWAYS_SENSITIVE
; Pack CKA_KEY_GEN_MECHANISM
; Pack CKA_MODIFIABLE
; Pack CKA_EC_PARAMS
; Pack CKA_EC_POINT
; Pack CKA_ALWAYS_AUTHENTICATE
; Pack CKA_WRAP_WITH_TRUSTED ]
let known_attribute_types = List.map (fun (Pack c) -> to_string c) elements
| null | https://raw.githubusercontent.com/cryptosense/pkcs11/93c39c7a31c87f68f0beabf75ef90d85a782a983/lib/p11_attribute_type.ml | ocaml | type not_implemented = NOT_IMPLEMENTED of string
type 'a t =
| CKA_CLASS : P11_object_class.t t
| CKA_TOKEN : bool t
| CKA_PRIVATE : bool t
| CKA_LABEL : string t
| CKA_VALUE : string t
| CKA_TRUSTED : bool t
| CKA_CHECK_VALUE : not_implemented t
| CKA_KEY_TYPE : P11_key_type.t t
| CKA_SUBJECT : string t
| CKA_ID : string t
| CKA_SENSITIVE : bool t
| CKA_ENCRYPT : bool t
| CKA_DECRYPT : bool t
| CKA_WRAP : bool t
| CKA_UNWRAP : bool t
| CKA_SIGN : bool t
| CKA_SIGN_RECOVER : bool t
| CKA_VERIFY : bool t
| CKA_VERIFY_RECOVER : bool t
| CKA_DERIVE : bool t
| CKA_START_DATE : not_implemented t
| CKA_END_DATE : not_implemented t
| CKA_MODULUS : P11_bigint.t t
| CKA_MODULUS_BITS : P11_ulong.t t
| CKA_PUBLIC_EXPONENT : P11_bigint.t t
| CKA_PRIVATE_EXPONENT : P11_bigint.t t
| CKA_PRIME_1 : P11_bigint.t t
| CKA_PRIME_2 : P11_bigint.t t
| CKA_EXPONENT_1 : P11_bigint.t t
| CKA_EXPONENT_2 : P11_bigint.t t
| CKA_COEFFICIENT : P11_bigint.t t
| CKA_PRIME : P11_bigint.t t
| CKA_SUBPRIME : P11_bigint.t t
| CKA_BASE : P11_bigint.t t
| CKA_PRIME_BITS : P11_ulong.t t
| CKA_SUBPRIME_BITS : P11_ulong.t t
| CKA_VALUE_LEN : P11_ulong.t t
| CKA_EXTRACTABLE : bool t
| CKA_LOCAL : bool t
| CKA_NEVER_EXTRACTABLE : bool t
| CKA_ALWAYS_SENSITIVE : bool t
| CKA_KEY_GEN_MECHANISM : P11_key_gen_mechanism.t t
| CKA_MODIFIABLE : bool t
| CKA_EC_PARAMS : string t
| CKA_EC_POINT : string t
| CKA_ALWAYS_AUTHENTICATE : bool t
| CKA_WRAP_WITH_TRUSTED : bool t
| CKA_WRAP_TEMPLATE : not_implemented t
| CKA_UNWRAP_TEMPLATE : not_implemented t
| CKA_ALLOWED_MECHANISMS : not_implemented t
| CKA_CS_UNKNOWN : Unsigned.ULong.t -> not_implemented t
type pack = Pack : 'a t -> pack
let to_string : type a. a t -> string = function
| CKA_CLASS -> "CKA_CLASS"
| CKA_TOKEN -> "CKA_TOKEN"
| CKA_PRIVATE -> "CKA_PRIVATE"
| CKA_LABEL -> "CKA_LABEL"
| CKA_VALUE -> "CKA_VALUE"
| CKA_TRUSTED -> "CKA_TRUSTED"
| CKA_CHECK_VALUE -> "CKA_CHECK_VALUE"
| CKA_KEY_TYPE -> "CKA_KEY_TYPE"
| CKA_SUBJECT -> "CKA_SUBJECT"
| CKA_ID -> "CKA_ID"
| CKA_SENSITIVE -> "CKA_SENSITIVE"
| CKA_ENCRYPT -> "CKA_ENCRYPT"
| CKA_DECRYPT -> "CKA_DECRYPT"
| CKA_WRAP -> "CKA_WRAP"
| CKA_UNWRAP -> "CKA_UNWRAP"
| CKA_SIGN -> "CKA_SIGN"
| CKA_SIGN_RECOVER -> "CKA_SIGN_RECOVER"
| CKA_VERIFY -> "CKA_VERIFY"
| CKA_VERIFY_RECOVER -> "CKA_VERIFY_RECOVER"
| CKA_DERIVE -> "CKA_DERIVE"
| CKA_START_DATE -> "CKA_START_DATE"
| CKA_END_DATE -> "CKA_END_DATE"
| CKA_MODULUS -> "CKA_MODULUS"
| CKA_MODULUS_BITS -> "CKA_MODULUS_BITS"
| CKA_PUBLIC_EXPONENT -> "CKA_PUBLIC_EXPONENT"
| CKA_PRIVATE_EXPONENT -> "CKA_PRIVATE_EXPONENT"
| CKA_PRIME_1 -> "CKA_PRIME_1"
| CKA_PRIME_2 -> "CKA_PRIME_2"
| CKA_EXPONENT_1 -> "CKA_EXPONENT_1"
| CKA_EXPONENT_2 -> "CKA_EXPONENT_2"
| CKA_COEFFICIENT -> "CKA_COEFFICIENT"
| CKA_PRIME -> "CKA_PRIME"
| CKA_SUBPRIME -> "CKA_SUBPRIME"
| CKA_BASE -> "CKA_BASE"
| CKA_PRIME_BITS -> "CKA_PRIME_BITS"
| CKA_SUBPRIME_BITS -> "CKA_SUBPRIME_BITS"
| CKA_VALUE_LEN -> "CKA_VALUE_LEN"
| CKA_EXTRACTABLE -> "CKA_EXTRACTABLE"
| CKA_LOCAL -> "CKA_LOCAL"
| CKA_NEVER_EXTRACTABLE -> "CKA_NEVER_EXTRACTABLE"
| CKA_ALWAYS_SENSITIVE -> "CKA_ALWAYS_SENSITIVE"
| CKA_KEY_GEN_MECHANISM -> "CKA_KEY_GEN_MECHANISM"
| CKA_MODIFIABLE -> "CKA_MODIFIABLE"
| CKA_ECDSA_PARAMS - > " CKA_ECDSA_PARAMS "
| CKA_EC_PARAMS -> "CKA_EC_PARAMS"
| CKA_EC_POINT -> "CKA_EC_POINT"
| CKA_ALWAYS_AUTHENTICATE -> "CKA_ALWAYS_AUTHENTICATE"
| CKA_WRAP_WITH_TRUSTED -> "CKA_WRAP_WITH_TRUSTED"
| CKA_WRAP_TEMPLATE -> "CKA_WRAP_TEMPLATE"
| CKA_UNWRAP_TEMPLATE -> "CKA_UNWRAP_TEMPLATE"
| CKA_ALLOWED_MECHANISMS -> "CKA_ALLOWED_MECHANISMS"
| CKA_CS_UNKNOWN ul -> Unsigned.ULong.to_string ul
let of_string = function
| "CKA_CLASS" -> Pack CKA_CLASS
| "CKA_TOKEN" -> Pack CKA_TOKEN
| "CKA_PRIVATE" -> Pack CKA_PRIVATE
| "CKA_LABEL" -> Pack CKA_LABEL
| "CKA_VALUE" -> Pack CKA_VALUE
| "CKA_TRUSTED" -> Pack CKA_TRUSTED
| "CKA_CHECK_VALUE" -> Pack CKA_CHECK_VALUE
| "CKA_KEY_TYPE" -> Pack CKA_KEY_TYPE
| "CKA_SUBJECT" -> Pack CKA_SUBJECT
| "CKA_ID" -> Pack CKA_ID
| "CKA_SENSITIVE" -> Pack CKA_SENSITIVE
| "CKA_ENCRYPT" -> Pack CKA_ENCRYPT
| "CKA_DECRYPT" -> Pack CKA_DECRYPT
| "CKA_WRAP" -> Pack CKA_WRAP
| "CKA_UNWRAP" -> Pack CKA_UNWRAP
| "CKA_SIGN" -> Pack CKA_SIGN
| "CKA_SIGN_RECOVER" -> Pack CKA_SIGN_RECOVER
| "CKA_VERIFY" -> Pack CKA_VERIFY
| "CKA_VERIFY_RECOVER" -> Pack CKA_VERIFY_RECOVER
| "CKA_DERIVE" -> Pack CKA_DERIVE
| "CKA_START_DATE" -> Pack CKA_START_DATE
| "CKA_END_DATE" -> Pack CKA_END_DATE
| "CKA_MODULUS" -> Pack CKA_MODULUS
| "CKA_MODULUS_BITS" -> Pack CKA_MODULUS_BITS
| "CKA_PUBLIC_EXPONENT" -> Pack CKA_PUBLIC_EXPONENT
| "CKA_PRIVATE_EXPONENT" -> Pack CKA_PRIVATE_EXPONENT
| "CKA_PRIME_1" -> Pack CKA_PRIME_1
| "CKA_PRIME_2" -> Pack CKA_PRIME_2
| "CKA_EXPONENT_1" -> Pack CKA_EXPONENT_1
| "CKA_EXPONENT_2" -> Pack CKA_EXPONENT_2
| "CKA_COEFFICIENT" -> Pack CKA_COEFFICIENT
| "CKA_PRIME" -> Pack CKA_PRIME
| "CKA_SUBPRIME" -> Pack CKA_SUBPRIME
| "CKA_BASE" -> Pack CKA_BASE
| "CKA_PRIME_BITS" -> Pack CKA_PRIME_BITS
| "CKA_SUBPRIME_BITS" -> Pack CKA_SUBPRIME_BITS
| "CKA_SUB_PRIME_BITS" -> Pack CKA_SUBPRIME_BITS
| "CKA_VALUE_LEN" -> Pack CKA_VALUE_LEN
| "CKA_EXTRACTABLE" -> Pack CKA_EXTRACTABLE
| "CKA_LOCAL" -> Pack CKA_LOCAL
| "CKA_NEVER_EXTRACTABLE" -> Pack CKA_NEVER_EXTRACTABLE
| "CKA_ALWAYS_SENSITIVE" -> Pack CKA_ALWAYS_SENSITIVE
| "CKA_KEY_GEN_MECHANISM" -> Pack CKA_KEY_GEN_MECHANISM
| "CKA_MODIFIABLE" -> Pack CKA_MODIFIABLE
| "CKA_ECDSA_PARAMS" -> Pack CKA_EC_PARAMS
| "CKA_EC_PARAMS" -> Pack CKA_EC_PARAMS
| "CKA_EC_POINT" -> Pack CKA_EC_POINT
| "CKA_ALWAYS_AUTHENTICATE" -> Pack CKA_ALWAYS_AUTHENTICATE
| "CKA_WRAP_WITH_TRUSTED" -> Pack CKA_WRAP_WITH_TRUSTED
| "CKA_WRAP_TEMPLATE" -> Pack CKA_WRAP_TEMPLATE
| "CKA_UNWRAP_TEMPLATE" -> Pack CKA_UNWRAP_TEMPLATE
| "CKA_ALLOWED_MECHANISMS" -> Pack CKA_ALLOWED_MECHANISMS
| s -> (
try Pack (CKA_CS_UNKNOWN (Unsigned.ULong.of_string s)) with
| Failure _ -> invalid_arg "CK_ATTRIBUTE_TYPE.of_string")
module Encoding = struct
let ( ! ) x = Unsigned.ULong.of_string (Int64.to_string x)
let ckf_ARRAY_ATTRIBUTE = 0x40000000L
let _CKA_CLASS = !0x00000000L
let _CKA_TOKEN = !0x00000001L
let _CKA_PRIVATE = !0x00000002L
let _CKA_LABEL = !0x00000003L
let _CKA_APPLICATION = !0x00000010L
let _CKA_VALUE = !0x00000011L
let _CKA_OBJECT_ID = !0x00000012L
let _CKA_CERTIFICATE_TYPE = !0x00000080L
let _CKA_ISSUER = !0x00000081L
let _CKA_SERIAL_NUMBER = !0x00000082L
let _CKA_AC_ISSUER = !0x00000083L
let _CKA_OWNER = !0x00000084L
let _CKA_ATTR_TYPES = !0x00000085L
let _CKA_TRUSTED = !0x00000086L
let _CKA_CERTIFICATE_CATEGORY = !0x00000087L
let _CKA_JAVA_MIDP_SECURITY_DOMAIN = !0x00000088L
let _CKA_URL = !0x00000089L
let _CKA_HASH_OF_SUBJECT_PUBLIC_KEY = !0x0000008AL
let _CKA_HASH_OF_ISSUER_PUBLIC_KEY = !0x0000008BL
let _CKA_CHECK_VALUE = !0x00000090L
let _CKA_KEY_TYPE = !0x00000100L
let _CKA_SUBJECT = !0x00000101L
let _CKA_ID = !0x00000102L
let _CKA_SENSITIVE = !0x00000103L
let _CKA_ENCRYPT = !0x00000104L
let _CKA_DECRYPT = !0x00000105L
let _CKA_WRAP = !0x00000106L
let _CKA_UNWRAP = !0x00000107L
let _CKA_SIGN = !0x00000108L
let _CKA_SIGN_RECOVER = !0x00000109L
let _CKA_VERIFY = !0x0000010AL
let _CKA_VERIFY_RECOVER = !0x0000010BL
let _CKA_DERIVE = !0x0000010CL
let _CKA_START_DATE = !0x00000110L
let _CKA_END_DATE = !0x00000111L
let _CKA_MODULUS = !0x00000120L
let _CKA_MODULUS_BITS = !0x00000121L
let _CKA_PUBLIC_EXPONENT = !0x00000122L
let _CKA_PRIVATE_EXPONENT = !0x00000123L
let _CKA_PRIME_1 = !0x00000124L
let _CKA_PRIME_2 = !0x00000125L
let _CKA_EXPONENT_1 = !0x00000126L
let _CKA_EXPONENT_2 = !0x00000127L
let _CKA_COEFFICIENT = !0x00000128L
let _CKA_PRIME = !0x00000130L
let _CKA_SUBPRIME = !0x00000131L
let _CKA_BASE = !0x00000132L
let _CKA_PRIME_BITS = !0x00000133L
let _CKA_SUBPRIME_BITS = !0x00000134L
let _CKA_VALUE_BITS = !0x00000160L
let _CKA_VALUE_LEN = !0x00000161L
let _CKA_EXTRACTABLE = !0x00000162L
let _CKA_LOCAL = !0x00000163L
let _CKA_NEVER_EXTRACTABLE = !0x00000164L
let _CKA_ALWAYS_SENSITIVE = !0x00000165L
let _CKA_KEY_GEN_MECHANISM = !0x00000166L
let _CKA_MODIFIABLE = !0x00000170L
let _CKA_EC_PARAMS = !0x00000180L
let _CKA_EC_POINT = !0x00000181L
let _CKA_SECONDARY_AUTH = !0x00000200L
let _CKA_AUTH_PIN_FLAGS = !0x00000201L
let _CKA_ALWAYS_AUTHENTICATE = !0x00000202L
let _CKA_WRAP_WITH_TRUSTED = !0x00000210L
let _CKA_WRAP_TEMPLATE = !(Int64.logor ckf_ARRAY_ATTRIBUTE 0x00000211L)
let _CKA_UNWRAP_TEMPLATE = !(Int64.logor ckf_ARRAY_ATTRIBUTE 0x00000212L)
let _CKA_OTP_FORMAT = !0x00000220L
let _CKA_OTP_LENGTH = !0x00000221L
let _CKA_OTP_TIME_INTERVAL = !0x00000222L
let _CKA_OTP_USER_FRIENDLY_MODE = !0x00000223L
let _CKA_OTP_CHALLENGE_REQUIREMENT = !0x00000224L
let _CKA_OTP_TIME_REQUIREMENT = !0x00000225L
let _CKA_OTP_COUNTER_REQUIREMENT = !0x00000226L
let _CKA_OTP_PIN_REQUIREMENT = !0x00000227L
let _CKA_OTP_COUNTER = !0x0000022EL
let _CKA_OTP_TIME = !0x0000022FL
let _CKA_OTP_USER_IDENTIFIER = !0x0000022AL
let _CKA_OTP_SERVICE_IDENTIFIER = !0x0000022BL
let _CKA_OTP_SERVICE_LOGO = !0x0000022CL
let _CKA_OTP_SERVICE_LOGO_TYPE = !0x0000022DL
let _CKA_HW_FEATURE_TYPE = !0x00000300L
let _CKA_RESET_ON_INIT = !0x00000301L
let _CKA_HAS_RESET = !0x00000302L
let _CKA_PIXEL_X = !0x00000400L
let _CKA_PIXEL_Y = !0x00000401L
let _CKA_RESOLUTION = !0x00000402L
let _CKA_CHAR_ROWS = !0x00000403L
let _CKA_CHAR_COLUMNS = !0x00000404L
let _CKA_COLOR = !0x00000405L
let _CKA_BITS_PER_PIXEL = !0x00000406L
let _CKA_CHAR_SETS = !0x00000480L
let _CKA_ENCODING_METHODS = !0x00000481L
let _CKA_MIME_TYPES = !0x00000482L
let _CKA_MECHANISM_TYPE = !0x00000500L
let _CKA_REQUIRED_CMS_ATTRIBUTES = !0x00000501L
let _CKA_DEFAULT_CMS_ATTRIBUTES = !0x00000502L
let _CKA_SUPPORTED_CMS_ATTRIBUTES = !0x00000503L
let _CKA_ALLOWED_MECHANISMS = !(Int64.logor ckf_ARRAY_ATTRIBUTE 0x00000600L)
let _CKA_VENDOR_DEFINED = !0x80000000L
let make (type s) (x : s t) : Unsigned.ULong.t =
match x with
| CKA_CLASS -> _CKA_CLASS
| CKA_TOKEN -> _CKA_TOKEN
| CKA_PRIVATE -> _CKA_PRIVATE
| CKA_LABEL -> _CKA_LABEL
| CKA_VALUE -> _CKA_VALUE
| CKA_TRUSTED -> _CKA_TRUSTED
| CKA_CHECK_VALUE -> _CKA_CHECK_VALUE
| CKA_KEY_TYPE -> _CKA_KEY_TYPE
| CKA_SUBJECT -> _CKA_SUBJECT
| CKA_ID -> _CKA_ID
| CKA_SENSITIVE -> _CKA_SENSITIVE
| CKA_ENCRYPT -> _CKA_ENCRYPT
| CKA_DECRYPT -> _CKA_DECRYPT
| CKA_WRAP -> _CKA_WRAP
| CKA_UNWRAP -> _CKA_UNWRAP
| CKA_SIGN -> _CKA_SIGN
| CKA_SIGN_RECOVER -> _CKA_SIGN_RECOVER
| CKA_VERIFY -> _CKA_VERIFY
| CKA_VERIFY_RECOVER -> _CKA_VERIFY_RECOVER
| CKA_DERIVE -> _CKA_DERIVE
| CKA_START_DATE -> _CKA_START_DATE
| CKA_END_DATE -> _CKA_END_DATE
| CKA_MODULUS -> _CKA_MODULUS
| CKA_MODULUS_BITS -> _CKA_MODULUS_BITS
| CKA_PUBLIC_EXPONENT -> _CKA_PUBLIC_EXPONENT
| CKA_PRIVATE_EXPONENT -> _CKA_PRIVATE_EXPONENT
| CKA_PRIME_1 -> _CKA_PRIME_1
| CKA_PRIME_2 -> _CKA_PRIME_2
| CKA_EXPONENT_1 -> _CKA_EXPONENT_1
| CKA_EXPONENT_2 -> _CKA_EXPONENT_2
| CKA_COEFFICIENT -> _CKA_COEFFICIENT
| CKA_PRIME -> _CKA_PRIME
| CKA_SUBPRIME -> _CKA_SUBPRIME
| CKA_BASE -> _CKA_BASE
| CKA_PRIME_BITS -> _CKA_PRIME_BITS
| CKA_SUBPRIME_BITS -> _CKA_SUBPRIME_BITS
| CKA_VALUE_LEN -> _CKA_VALUE_LEN
| CKA_EXTRACTABLE -> _CKA_EXTRACTABLE
| CKA_LOCAL -> _CKA_LOCAL
| CKA_NEVER_EXTRACTABLE -> _CKA_NEVER_EXTRACTABLE
| CKA_ALWAYS_SENSITIVE -> _CKA_ALWAYS_SENSITIVE
| CKA_KEY_GEN_MECHANISM -> _CKA_KEY_GEN_MECHANISM
| CKA_MODIFIABLE -> _CKA_MODIFIABLE
| CKA_EC_PARAMS -> _CKA_EC_PARAMS
| CKA_EC_POINT -> _CKA_EC_POINT
| CKA_ALWAYS_AUTHENTICATE -> _CKA_ALWAYS_AUTHENTICATE
| CKA_WRAP_WITH_TRUSTED -> _CKA_WRAP_WITH_TRUSTED
| CKA_WRAP_TEMPLATE -> _CKA_WRAP_TEMPLATE
| CKA_UNWRAP_TEMPLATE -> _CKA_UNWRAP_TEMPLATE
| CKA_ALLOWED_MECHANISMS -> _CKA_ALLOWED_MECHANISMS
| CKA_CS_UNKNOWN ul -> ul
end
type (_, _) comparison =
| Equal : ('a, 'a) comparison
| Not_equal : int -> ('a, 'b) comparison
let compare a b =
let a = Encoding.make a in
let b = Encoding.make b in
Unsigned.ULong.compare a b
let compare' : type a b. a t -> b t -> (a, b) comparison =
fun a b ->
let a' = Encoding.make a in
let b' = Encoding.make b in
let n = P11_ulong.compare a' b' in
if n <> 0 then
Not_equal n
else
match (a, b) with
| (CKA_CLASS, CKA_CLASS) -> Equal
| (CKA_TOKEN, CKA_TOKEN) -> Equal
| (CKA_PRIVATE, CKA_PRIVATE) -> Equal
| (CKA_LABEL, CKA_LABEL) -> Equal
| (CKA_VALUE, CKA_VALUE) -> Equal
| (CKA_TRUSTED, CKA_TRUSTED) -> Equal
| (CKA_CHECK_VALUE, CKA_CHECK_VALUE) -> Equal
| (CKA_KEY_TYPE, CKA_KEY_TYPE) -> Equal
| (CKA_SUBJECT, CKA_SUBJECT) -> Equal
| (CKA_ID, CKA_ID) -> Equal
| (CKA_SENSITIVE, CKA_SENSITIVE) -> Equal
| (CKA_ENCRYPT, CKA_ENCRYPT) -> Equal
| (CKA_DECRYPT, CKA_DECRYPT) -> Equal
| (CKA_WRAP, CKA_WRAP) -> Equal
| (CKA_UNWRAP, CKA_UNWRAP) -> Equal
| (CKA_SIGN, CKA_SIGN) -> Equal
| (CKA_SIGN_RECOVER, CKA_SIGN_RECOVER) -> Equal
| (CKA_VERIFY, CKA_VERIFY) -> Equal
| (CKA_VERIFY_RECOVER, CKA_VERIFY_RECOVER) -> Equal
| (CKA_DERIVE, CKA_DERIVE) -> Equal
| (CKA_START_DATE, CKA_START_DATE) -> Equal
| (CKA_END_DATE, CKA_END_DATE) -> Equal
| (CKA_MODULUS, CKA_MODULUS) -> Equal
| (CKA_MODULUS_BITS, CKA_MODULUS_BITS) -> Equal
| (CKA_PUBLIC_EXPONENT, CKA_PUBLIC_EXPONENT) -> Equal
| (CKA_PRIVATE_EXPONENT, CKA_PRIVATE_EXPONENT) -> Equal
| (CKA_PRIME_1, CKA_PRIME_1) -> Equal
| (CKA_PRIME_2, CKA_PRIME_2) -> Equal
| (CKA_EXPONENT_1, CKA_EXPONENT_1) -> Equal
| (CKA_EXPONENT_2, CKA_EXPONENT_2) -> Equal
| (CKA_COEFFICIENT, CKA_COEFFICIENT) -> Equal
| (CKA_PRIME, CKA_PRIME) -> Equal
| (CKA_SUBPRIME, CKA_SUBPRIME) -> Equal
| (CKA_BASE, CKA_BASE) -> Equal
| (CKA_PRIME_BITS, CKA_PRIME_BITS) -> Equal
| (CKA_SUBPRIME_BITS, CKA_SUBPRIME_BITS) -> Equal
| (CKA_VALUE_LEN, CKA_VALUE_LEN) -> Equal
| (CKA_EXTRACTABLE, CKA_EXTRACTABLE) -> Equal
| (CKA_LOCAL, CKA_LOCAL) -> Equal
| (CKA_NEVER_EXTRACTABLE, CKA_NEVER_EXTRACTABLE) -> Equal
| (CKA_ALWAYS_SENSITIVE, CKA_ALWAYS_SENSITIVE) -> Equal
| (CKA_KEY_GEN_MECHANISM, CKA_KEY_GEN_MECHANISM) -> Equal
| (CKA_MODIFIABLE, CKA_MODIFIABLE) -> Equal
| (CKA_EC_PARAMS, CKA_EC_PARAMS) -> Equal
| (CKA_EC_POINT, CKA_EC_POINT) -> Equal
| (CKA_ALWAYS_AUTHENTICATE, CKA_ALWAYS_AUTHENTICATE) -> Equal
| (CKA_WRAP_WITH_TRUSTED, CKA_WRAP_WITH_TRUSTED) -> Equal
| (CKA_WRAP_TEMPLATE, CKA_WRAP_TEMPLATE) -> Equal
| (CKA_UNWRAP_TEMPLATE, CKA_UNWRAP_TEMPLATE) -> Equal
| (CKA_ALLOWED_MECHANISMS, CKA_ALLOWED_MECHANISMS) -> Equal
| (CKA_CS_UNKNOWN ul1, CKA_CS_UNKNOWN ul2) ->
let cmp = Unsigned.ULong.compare ul1 ul2 in
if cmp = 0 then
Equal
else
Not_equal cmp
| (CKA_CLASS, _) -> assert false
| (CKA_TOKEN, _) -> assert false
| (CKA_PRIVATE, _) -> assert false
| (CKA_LABEL, _) -> assert false
| (CKA_VALUE, _) -> assert false
| (CKA_TRUSTED, _) -> assert false
| (CKA_CHECK_VALUE, _) -> assert false
| (CKA_KEY_TYPE, _) -> assert false
| (CKA_SUBJECT, _) -> assert false
| (CKA_ID, _) -> assert false
| (CKA_SENSITIVE, _) -> assert false
| (CKA_ENCRYPT, _) -> assert false
| (CKA_DECRYPT, _) -> assert false
| (CKA_WRAP, _) -> assert false
| (CKA_UNWRAP, _) -> assert false
| (CKA_SIGN, _) -> assert false
| (CKA_SIGN_RECOVER, _) -> assert false
| (CKA_VERIFY, _) -> assert false
| (CKA_VERIFY_RECOVER, _) -> assert false
| (CKA_DERIVE, _) -> assert false
| (CKA_START_DATE, _) -> assert false
| (CKA_END_DATE, _) -> assert false
| (CKA_MODULUS, _) -> assert false
| (CKA_MODULUS_BITS, _) -> assert false
| (CKA_PUBLIC_EXPONENT, _) -> assert false
| (CKA_PRIVATE_EXPONENT, _) -> assert false
| (CKA_PRIME_1, _) -> assert false
| (CKA_PRIME_2, _) -> assert false
| (CKA_EXPONENT_1, _) -> assert false
| (CKA_EXPONENT_2, _) -> assert false
| (CKA_COEFFICIENT, _) -> assert false
| (CKA_PRIME, _) -> assert false
| (CKA_SUBPRIME, _) -> assert false
| (CKA_BASE, _) -> assert false
| (CKA_PRIME_BITS, _) -> assert false
| (CKA_SUBPRIME_BITS, _) -> assert false
| (CKA_VALUE_LEN, _) -> assert false
| (CKA_EXTRACTABLE, _) -> assert false
| (CKA_LOCAL, _) -> assert false
| (CKA_NEVER_EXTRACTABLE, _) -> assert false
| (CKA_ALWAYS_SENSITIVE, _) -> assert false
| (CKA_KEY_GEN_MECHANISM, _) -> assert false
| (CKA_MODIFIABLE, _) -> assert false
| (CKA_EC_PARAMS, _) -> assert false
| (CKA_EC_POINT, _) -> assert false
| (CKA_ALWAYS_AUTHENTICATE, _) -> assert false
| (CKA_WRAP_WITH_TRUSTED, _) -> assert false
| (CKA_WRAP_TEMPLATE, _) -> assert false
| (CKA_UNWRAP_TEMPLATE, _) -> assert false
| (CKA_ALLOWED_MECHANISMS, _) -> assert false
| (CKA_CS_UNKNOWN _, _) -> assert false
let compare_pack (Pack a) (Pack b) = compare a b
let equal a b = compare a b = 0
let equal_pack a b = compare_pack a b = 0
let show_pack (Pack attribute) = to_string attribute
let pp_pack fmt pack = Format.pp_print_string fmt (show_pack pack)
let to_json attribute =
try `String (to_string attribute) with
| Invalid_argument _ -> `Null
let pack_to_json (Pack attribute) = to_json attribute
let pack_to_yojson = pack_to_json
let pack_of_yojson =
P11_helpers.of_json_string ~typename:"attribute type" of_string
let elements =
[ Pack CKA_CLASS
; Pack CKA_TOKEN
; Pack CKA_PRIVATE
; Pack CKA_LABEL
; Pack CKA_VALUE
; Pack CKA_TRUSTED
; Pack CKA_KEY_TYPE
; Pack CKA_SUBJECT
; Pack CKA_ID
; Pack CKA_SENSITIVE
; Pack CKA_ENCRYPT
; Pack CKA_DECRYPT
; Pack CKA_WRAP
; Pack CKA_UNWRAP
; Pack CKA_SIGN
; Pack CKA_SIGN_RECOVER
; Pack CKA_VERIFY
; Pack CKA_VERIFY_RECOVER
; Pack CKA_DERIVE
; Pack CKA_MODULUS
; Pack CKA_MODULUS_BITS
; Pack CKA_PUBLIC_EXPONENT
; Pack CKA_PRIVATE_EXPONENT
; Pack CKA_PRIME_1
; Pack CKA_PRIME_2
; Pack CKA_EXPONENT_1
; Pack CKA_EXPONENT_2
; Pack CKA_COEFFICIENT
; Pack CKA_PRIME
; Pack CKA_SUBPRIME
; Pack CKA_BASE
; Pack CKA_PRIME_BITS
; Pack CKA_SUBPRIME_BITS
; Pack CKA_VALUE_LEN
; Pack CKA_EXTRACTABLE
; Pack CKA_LOCAL
; Pack CKA_NEVER_EXTRACTABLE
; Pack CKA_ALWAYS_SENSITIVE
; Pack CKA_KEY_GEN_MECHANISM
; Pack CKA_MODIFIABLE
; Pack CKA_EC_PARAMS
; Pack CKA_EC_POINT
; Pack CKA_ALWAYS_AUTHENTICATE
; Pack CKA_WRAP_WITH_TRUSTED ]
let known_attribute_types = List.map (fun (Pack c) -> to_string c) elements
|
|
cc037d5bd35cf00fd8bfcad0fda36aa2ef496a7371c649fe4b8980a380b0b49f | CarlosMChica/HaskellBook | StateT.hs | # LANGUAGE UndecidableInstances #
# LANGUAGE InstanceSigs #
module StateT where
newtype StateT s m a = StateT { runStateT :: s -> m (a, s) }
instance Functor f => Functor (StateT s f) where
fmap :: (a -> b) -> StateT s f a -> StateT s f b
fmap h (StateT smas) = StateT $ \s -> mapFst h <$> smas s
where mapFst :: (a -> b) -> (a, c) -> (b, c)
mapFst f (x, y) = (f x, y)
instance (Monad m) => Applicative (StateT s m) where
pure :: a -> StateT s m a
pure x = StateT $ \s -> pure (x, s)
(<*>) :: StateT s m (a -> b) -> StateT s m a -> StateT s m b
(StateT smab) <*> (StateT sma) = StateT $ \s ->
sma s > > = ( \(x , s ' ) - > ( \(f , s '' ) - > ( f x , s ' ) ) < $ > s )
do
(f, s') <- smab s
(x, s'') <- sma s'
return (f x, s'')
instance Monad m => Monad (StateT s m) where
return = pure
(>>=) :: StateT s m a -> (a -> StateT s m b) -> StateT s m b
--(StateT sma) >>= f = StateT $ \s -> sma s >>= (\(a, s') -> runStateT (f a) s')
(StateT sma) >>= f = StateT $ \s -> do
(a, s') <- sma s
runStateT (f a) s'
| null | https://raw.githubusercontent.com/CarlosMChica/HaskellBook/86f82cf36cd00003b1a1aebf264e4b5d606ddfad/chapter26/StateT.hs | haskell | (StateT sma) >>= f = StateT $ \s -> sma s >>= (\(a, s') -> runStateT (f a) s') | # LANGUAGE UndecidableInstances #
# LANGUAGE InstanceSigs #
module StateT where
newtype StateT s m a = StateT { runStateT :: s -> m (a, s) }
instance Functor f => Functor (StateT s f) where
fmap :: (a -> b) -> StateT s f a -> StateT s f b
fmap h (StateT smas) = StateT $ \s -> mapFst h <$> smas s
where mapFst :: (a -> b) -> (a, c) -> (b, c)
mapFst f (x, y) = (f x, y)
instance (Monad m) => Applicative (StateT s m) where
pure :: a -> StateT s m a
pure x = StateT $ \s -> pure (x, s)
(<*>) :: StateT s m (a -> b) -> StateT s m a -> StateT s m b
(StateT smab) <*> (StateT sma) = StateT $ \s ->
sma s > > = ( \(x , s ' ) - > ( \(f , s '' ) - > ( f x , s ' ) ) < $ > s )
do
(f, s') <- smab s
(x, s'') <- sma s'
return (f x, s'')
instance Monad m => Monad (StateT s m) where
return = pure
(>>=) :: StateT s m a -> (a -> StateT s m b) -> StateT s m b
(StateT sma) >>= f = StateT $ \s -> do
(a, s') <- sma s
runStateT (f a) s'
|
a077e62be843478f385c8e7abc07337179387490f941688ef183baa7dbc82915 | zotonic/zotonic | mod_microsoft.erl | @author < >
2021
%%
@doc Microsoft integration . Adds Microsoft / Azure login and other functionalities .
Copyright 2021
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(mod_microsoft).
-author("Marc Worrell <>").
-mod_title("Microsoft").
-mod_description("Adds Microsoft login and other Microsoft/Azure related features.").
-mod_prio(400).
-mod_depends([ admin, authentication, mod_oauth2 ]).
-export([
event/2
]).
-export([
get_config/1
]).
-include_lib("zotonic_core/include/zotonic.hrl").
You have to add your Microsoft appid and secret to the config .
By default , we only request access to the Microsoft user 's e - mail address .
% The 'openid' scope is always added when requesting the access token.
-define(MICROSOFT_SCOPE, <<"User.Read email">>).
The tenant : common , organizations , consumers or Microsot tenant i d 's
-define(MICROSOFT_TENANT, <<"common">>).
@doc Return the Microsoft appid , secret and scope
-spec get_config(z:context()) -> {AppId::string(), Secret::string(), Scope::string(), Tenant::string()}.
get_config(Context) ->
{ z_convert:to_list(m_config:get_value(mod_microsoft, appid, Context)),
z_convert:to_list(m_config:get_value(mod_microsoft, appsecret, Context)),
z_convert:to_list(m_config:get_value(mod_microsoft, scope, ?MICROSOFT_SCOPE, Context)),
z_convert:to_list(m_config:get_value(mod_microsoft, tenant, ?MICROSOFT_TENANT, Context))
}.
event(#submit{message=admin_microsoft}, Context) ->
case z_acl:is_allowed(use, mod_admin_config, Context) of
true ->
save_settings(Context),
z_render:growl(?__("Saved the Microsoft settings.", Context), Context);
false ->
z_render:growl(?__("You don't have permission to change the Microsoft settings.", Context), Context)
end.
save_settings(Context) ->
lists:foreach(fun ({Key, Value}) ->
case is_setting(Key) of
true -> m_config:set_value(mod_microsoft, binary_to_atom(Key, 'utf8'), Value, Context);
false -> ok
end
end,
z_context:get_q_all_noz(Context)).
is_setting(<<"appid">>) -> true;
is_setting(<<"appsecret">>) -> true;
is_setting(<<"scope">>) -> true;
is_setting(<<"useauth">>) -> true;
is_setting(<<"tenant">>) -> true;
is_setting(_) -> false.
| null | https://raw.githubusercontent.com/zotonic/zotonic/ea643a112ff26ffcb15b2f9f3f9b6f8db23e52f9/apps/zotonic_mod_microsoft/src/mod_microsoft.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
The 'openid' scope is always added when requesting the access token. | @author < >
2021
@doc Microsoft integration . Adds Microsoft / Azure login and other functionalities .
Copyright 2021
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(mod_microsoft).
-author("Marc Worrell <>").
-mod_title("Microsoft").
-mod_description("Adds Microsoft login and other Microsoft/Azure related features.").
-mod_prio(400).
-mod_depends([ admin, authentication, mod_oauth2 ]).
-export([
event/2
]).
-export([
get_config/1
]).
-include_lib("zotonic_core/include/zotonic.hrl").
You have to add your Microsoft appid and secret to the config .
By default , we only request access to the Microsoft user 's e - mail address .
-define(MICROSOFT_SCOPE, <<"User.Read email">>).
The tenant : common , organizations , consumers or Microsot tenant i d 's
-define(MICROSOFT_TENANT, <<"common">>).
@doc Return the Microsoft appid , secret and scope
-spec get_config(z:context()) -> {AppId::string(), Secret::string(), Scope::string(), Tenant::string()}.
get_config(Context) ->
{ z_convert:to_list(m_config:get_value(mod_microsoft, appid, Context)),
z_convert:to_list(m_config:get_value(mod_microsoft, appsecret, Context)),
z_convert:to_list(m_config:get_value(mod_microsoft, scope, ?MICROSOFT_SCOPE, Context)),
z_convert:to_list(m_config:get_value(mod_microsoft, tenant, ?MICROSOFT_TENANT, Context))
}.
event(#submit{message=admin_microsoft}, Context) ->
case z_acl:is_allowed(use, mod_admin_config, Context) of
true ->
save_settings(Context),
z_render:growl(?__("Saved the Microsoft settings.", Context), Context);
false ->
z_render:growl(?__("You don't have permission to change the Microsoft settings.", Context), Context)
end.
save_settings(Context) ->
lists:foreach(fun ({Key, Value}) ->
case is_setting(Key) of
true -> m_config:set_value(mod_microsoft, binary_to_atom(Key, 'utf8'), Value, Context);
false -> ok
end
end,
z_context:get_q_all_noz(Context)).
is_setting(<<"appid">>) -> true;
is_setting(<<"appsecret">>) -> true;
is_setting(<<"scope">>) -> true;
is_setting(<<"useauth">>) -> true;
is_setting(<<"tenant">>) -> true;
is_setting(_) -> false.
|
e9938a171da13075497eb31bed1b9ce546f7241bd233ea0dfc04d286356f68c6 | ska80/thinlisp | packages.lisp | (in-package "TL")
;;;; Module PACKAGES
Copyright ( c ) 1999 - 2001 The ThinLisp Group
Copyright ( c ) 1996 Gensym Corporation .
;;; All rights reserved.
This file is part of ThinLisp .
ThinLisp is open source ; you can redistribute it and/or modify it
under the terms of the ThinLisp License as published by the ThinLisp
Group ; either version 1 or ( at your option ) any later version .
ThinLisp is distributed in the hope that it will be useful , but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
For additional information see < / >
Author :
Package Functions and Macros
;;; This module implements operations for symbols and packages.
;;; The following form allows us to reference lisp:*package* in our sources, yet
;;; still have a C translation for it in the emitted images.
(def-translatable-lisp-var *package*)
;;; The function `sxhash-string' takes a string and computes the hash number of
that string . The hash value is guaranteed to be of type ( unsigned - byte 16 ) .
Note that the hash code for a one character string is equal to the char - code
;;; of the character. This fact is used when initializing the symbol T in
;;; tli::dump-makefile-information. -jallard 12/5/97
(declaim (functional sxhash-string))
(defun sxhash-string (text-string)
(declare (type string text-string)
(return-type fixnum))
(loop with hash fixnum = 0
for index from 0 below (length text-string)
do
(setq hash (logxor (+ (logand (ash hash 1) 65535)
(ash hash -15))
(char-code (char text-string index))))
finally (return hash)))
;;; The function tli::init-symbol takes a symbol with only the type tag
;;; initialized, and assigns into it the name an hash number of the symbol. It
;;; returns the symbol. The function tli::init-symbol-into-package is a
;;; combination of init-symbol and insert-symbol-into-package, this is called
;;; from the top level initializations of symbols in translated C code.
(defun tli::init-symbol (symbol string string-hash)
(declare (type symbol symbol)
(type t string)
(type fixnum string-hash)
(return-type symbol))
(tli::set-symbol-type-tag symbol)
(setf (tli::symbol-local-value symbol) t)
(setf (tli::symbol-external symbol) nil)
(setf (tli::symbol-balance symbol) 0)
(setf (tli::symbol-imported symbol) nil)
(setf (tli::symbol-name-hash symbol) string-hash)
(tli::set-symbol-name symbol string)
(tli::set-symbol-value-pointer symbol (tli::the-unbound-value))
(tli::set-non-null-symbol-plist symbol nil)
(tli::set-symbol-package symbol nil)
(tli::set-symbol-function symbol (tli::the-unbound-value))
(setf (tli::symbol-left-branch symbol) (tli::the-unbound-value))
(setf (tli::symbol-right-branch symbol) (tli::the-unbound-value))
symbol)
(defun tli::init-symbol-into-package (symbol string string-hash package)
(declare (type symbol symbol)
(type t string)
(type fixnum string-hash)
(type package package)
(return-type symbol))
(tli::init-symbol symbol string string-hash)
(when package
(insert-symbol-into-package symbol package))
symbol)
;;; The function `insert-symbol-into-package' takes a package and a symbol to be
;;; interned directly into that package. This function returns no values.
(defun insert-symbol-into-package (symbol package)
(declare (type symbol symbol)
(type package package)
(return-type void))
(if (tli::not-unbound-value-p (tli::package-root-symbol package))
;; Insert into balanced binary tree later, just a binary tree now. -jra
;; 4/8/96
(let ((hash-number (tli::symbol-name-hash symbol))
(name (tli::non-null-symbol-name symbol)))
(declare (type fixnum hash-number))
(loop with current-symbol = (tli::package-root-symbol package)
for last-symbol = current-symbol
for current-hash fixnum = (tli::symbol-name-hash current-symbol)
do
(cond ((< hash-number current-hash)
(setq current-symbol (tli::symbol-left-branch current-symbol))
(unless (tli::not-unbound-value-p current-symbol)
(setf (tli::symbol-left-branch last-symbol) symbol)
(tli::set-symbol-package symbol package)
(return nil)))
((> hash-number current-hash)
(setq current-symbol (tli::symbol-right-branch current-symbol))
(unless (tli::not-unbound-value-p current-symbol)
(setf (tli::symbol-right-branch last-symbol) symbol)
(tli::set-symbol-package symbol package)
(return nil)))
(t
(let ((compare-result
(tli::string-compare
name (tli::non-null-symbol-name current-symbol))))
(declare (type fixnum compare-result))
(cond
((< compare-result 0)
(setq current-symbol (tli::symbol-left-branch current-symbol))
(unless (tli::not-unbound-value-p current-symbol)
(setf (tli::symbol-left-branch last-symbol) symbol)
(tli::set-symbol-package symbol package)
(return nil)))
((> compare-result 0)
(setq current-symbol (tli::symbol-right-branch current-symbol))
(unless (tli::not-unbound-value-p current-symbol)
(setf (tli::symbol-right-branch last-symbol) symbol)
(tli::set-symbol-package symbol package)
(return nil)))
(t
(error "Can't insert ~a in ~a, a symbol with that name already exists."
symbol package))))))))
(progn
(tli::set-symbol-package symbol package)
(setf (tli::package-root-symbol package) symbol))))
;;; The function `find-symbol-in-single-package' takes a string, the hash for
;;; that string, and a package. If there is a symbol with the given name
;;; directly interned in this package, then this function will return that
;;; symbol. If no such symbol exists, then this function returns 0. Note that
NIL is not the default return value , since that could be the symbol that is
;;; returned from this function. Also note that this function is not searching
through the used packages of the given package , but only searches the one
;;; given package. Even further, note that this function returns the symbol as
;;; found in the package data structure. The caller must check whether or not
;;; the symbol is an imported pointer to another symbol or if the symbol is
;;; external.
(defun find-symbol-in-single-package (string string-hash package)
(declare (type string string)
(type fixnum string-hash)
(type package package)
(return-type t))
(loop with current-symbol = (tli::package-root-symbol package)
initially (unless (tli::not-unbound-value-p current-symbol)
(return 0))
while (tli::not-unbound-value-p current-symbol)
for current-hash fixnum = (tli::symbol-name-hash current-symbol)
do
(cond ((< string-hash current-hash)
(setq current-symbol (tli::symbol-left-branch current-symbol)))
((> string-hash current-hash)
(setq current-symbol (tli::symbol-right-branch current-symbol)))
(t
(let ((compare-result (tli::string-compare
string
(tli::non-null-symbol-name current-symbol))))
(declare (type fixnum compare-result))
(cond
((< compare-result 0)
(setq current-symbol (tli::symbol-left-branch current-symbol)))
((> compare-result 0)
(setq current-symbol (tli::symbol-right-branch current-symbol)))
(t
(return current-symbol))))))
finally (return 0)))
;;; The variable `all-packages' contains a list of all packages in the current
;;; environment. The function `list-all-packages' is the CLtL2 interface to
;;; fetching that list.
(defvar all-packages)
(declaim (functional list-all-packages))
(defun list-all-packages ()
all-packages)
(defun find-package-1 (string-or-symbol-or-package)
(declare (return-type t))
(typecase string-or-symbol-or-package
(string
(let ((name string-or-symbol-or-package))
(declare (type string name))
(loop for package in all-packages do
(when (string= (package-name package) name)
(return package)))))
(symbol
(find-package-1 (symbol-name string-or-symbol-or-package)))
(package
string-or-symbol-or-package)
(t
(error "FIND-PACKAGE given bad argument ~a."
string-or-symbol-or-package))))
;;; The macro `find-package-or-error' takes a string, symbol, or package and
;;; returns the package named by the argument. Note that this differs from
;;; find-package, which returns NIL if no package can be found. This macro
;;; attempts to optimize cases where the argument can be proven to be a package.
(defmacro find-package-or-error (&environment env package-or-name)
(if (tli::tl-subtypep (tli::expression-result-type package-or-name env)
'package)
package-or-name
`(find-package-or-error-1 ,package-or-name)))
(defun find-package-or-error-1 (name)
(declare (return-type package))
(let ((find-result (find-package name)))
(if find-result
find-result
(error "No package with name ~a could be found." name))))
;;; The macro `package-use-list' fetches the use-list from a package after
;;; coercing it to a package.
(defmacro package-use-list (package-or-package-name)
`(tli::package-use-list-internal
(find-package-or-error ,package-or-package-name)))
(defun make-package-1 (name use)
(declare (type string name)
(return-type t))
(with-permanent-area
(let* ((name-to-use (string-upcase name))
(use-list (loop for used in use
for used-package = (find-package-or-error used)
collect used-package))
(found-package? (find-package-1 name-to-use)))
(cond
(found-package?
#-translator
(error "Cannot make-package ~s, that package already exists."
name-to-use)
;; When in translation, check that the given use list matches the
;; already existing use list, else signal an error. This relaxing of
;; the error case is done since we pre-create all packages that have
constant folded symbol interned in them . -jallard 12/4/97
(unless (loop for used-package-cons = (package-use-list found-package?)
then (cdr-of-cons used-package-cons)
for new-package-cons = use-list
then (cdr-of-cons new-package-cons)
while (and used-package-cons new-package-cons)
always (eq (car-of-cons used-package-cons)
(car-of-cons new-package-cons))
finally
(when (or used-package-cons new-package-cons)
(return nil)))
(error "Use list for ~a differs from compile-time list: ~@
compile-time = ~s, new = ~s."
name-to-use (tli::package-use-list-internal found-package?)
use))
found-package?)
(t
(let ((new-package (tli::make-new-package name-to-use use-list)))
(setq all-packages (cons new-package all-packages))
new-package))))))
;;; The macro `find-symbol' expands into a call to find-symbol-in-package. This
;;; function requires an actual package argument and is given the already
;;; computed hash of the string.
(defmacro find-symbol (string &optional package)
(let ((package-arg (if package
`(find-package-or-error ,package)
'*package*)))
(if (or (constantp string) (symbolp string))
`(find-symbol-in-package
,string (sxhash-string ,string) ,package-arg)
(let ((string-var (gensym)))
`(let ((,string-var ,string))
(declare (type string ,string-var))
(find-symbol-in-package
,string-var (sxhash-string ,string-var) ,package-arg))))))
(defun find-symbol-in-package (string string-hash package)
(declare (type string string)
(type fixnum string-hash)
(type package package))
#-translator
(declare (ignore string-hash))
#-translator
(return-from find-symbol-in-package
(ab-lisp::find-symbol string package))
#+translator
(let ((found-symbol
(find-symbol-in-single-package string string-hash package)))
(if (eql found-symbol 0)
(loop for used-package in (package-use-list package)
for found-inherited-symbol
= (find-symbol-in-single-package
string string-hash used-package)
do
(unless (eql found-inherited-symbol 0)
(return (values
(if (tli::symbol-imported found-inherited-symbol)
(tli::symbol-value-pointer found-inherited-symbol)
found-inherited-symbol)
:inherited)))
finally
(return (values nil nil)))
(values (if (tli::symbol-imported found-symbol)
(tli::symbol-value-pointer found-symbol)
found-symbol)
(if (tli::symbol-external found-symbol)
:external
:internal)))))
;;; The function `intern-string-in-package' implements intern. It takes the
;;; string, an already computed hash number for the string, and the package
;;; object to intern any newly created symbol into.
(defun intern-string-in-package (string hash-number package)
(declare (type string string)
(type fixnum hash-number)
(type package package))
(multiple-value-bind (symbol found?)
(find-symbol-in-package string hash-number package)
(if found?
(values symbol found?)
(with-permanent-area
(let ((new-symbol (tli::make-empty-symbol)))
(tli::init-symbol new-symbol string hash-number)
(tli::set-symbol-package new-symbol package)
(insert-symbol-into-package new-symbol package)
(values new-symbol :internal))))))
;;; The function `import' takes a symbol or list of symbols, and an optional
package . It performs the usual CLtL import operation . Note that this is a
;;; consing operation.
;;; The way that an imported symbol is represented in the package is that a new
;;; symbol structure is allocated, and the symbol-imported bit is set on that
;;; symbol. The symbol-value of this newly created "indirection symbol" is then
;;; set to the symbol being imported. Within find-symbol-in-package, when a
;;; symbol is found by find-symbol-in-single-package, find-symbol-in-package
;;; then checks if the returned symbol structure has the symbol-imported bit
;;; set. If so, find-symbol-in-package returns the symbol-value of the symbol
;;; structure returned from find-symbol-in-single-package. In this manner,
symbol structures are used to indirect from the binary tree of one package
;;; to imported symbols brought in from other packages. Note that if the
;;; imported symbol is then exported, this happens by setting the exported bit
on the indirection symbol , not the originally imported symbol . -jra 5/7/96
(defun import (symbol-or-symbol-list &optional (package-arg *package*))
(declare (return-type t))
(with-permanent-area ()
(let ((symbol-list (if (listp symbol-or-symbol-list)
symbol-or-symbol-list
(tli::list-dynamic-extent symbol-or-symbol-list)))
(package (find-package-or-error package-arg)))
(loop for symbol in symbol-list
for symbol-name = (symbol-name symbol)
for symbol-hash fixnum = (tli::symbol-name-hash symbol)
do
(multiple-value-bind (found-symbol found?)
(find-symbol-in-package symbol-name symbol-hash package)
(when (and found? (not (eq found-symbol symbol)))
(error "The symbol ~a cannot be imported into ~a, a symbol with ~
that name is already accessible."
symbol package)))
(cond ((null (symbol-package symbol))
(insert-symbol-into-package symbol package))
(t
(let ((import-symbol (tli::make-empty-symbol)))
(tli::init-symbol import-symbol symbol-name symbol-hash)
(setf (tli::symbol-imported import-symbol) t)
(setf (tli::symbol-value-pointer import-symbol) symbol)
(insert-symbol-into-package import-symbol package)))))
t)))
;;; The function `export' takes a symbol or list of symbols, and an optional
;;; package. This version of export does not perform all of the name conflict
;;; checks called for in CLtL2, Section 11.5. It is presumed that the
;;; development Lisp environment has weeded these out.
(defun export (symbol-or-symbol-list &optional (package-arg *package*))
(declare (return-type t))
(with-permanent-area ()
(let ((symbol-list (if (listp symbol-or-symbol-list)
symbol-or-symbol-list
(tli::list-dynamic-extent symbol-or-symbol-list)))
(package (find-package-or-error package-arg)))
(loop for symbol in symbol-list
for symbol-name = (symbol-name symbol)
for symbol-hash fixnum = (tli::symbol-name-hash symbol)
do
(multiple-value-bind (found-symbol found?)
(find-symbol-in-package symbol-name symbol-hash package)
(unless (and found? (eq found-symbol symbol))
(error "The symbol ~a cannot be exported from ~a, it is not ~
accessible from that package."
symbol package))
(case found?
((:external)
;; Already exported, do nothing.
nil)
((:internal)
;; Already found in this package's binary tree, merely the external
;; bit.
(setf (tli::symbol-external symbol) t))
((:inherited)
;; It is accessible, but not in this package's data strucutures.
;; Import an indirection symbol, and set the external bit on it.
(let ((import-symbol (tli::make-empty-symbol)))
(tli::init-symbol import-symbol symbol-name symbol-hash)
(setf (tli::symbol-imported import-symbol) t)
(setf (tli::symbol-value-pointer import-symbol) symbol)
(insert-symbol-into-package import-symbol package)
(setf (tli::symbol-external import-symbol) t)))
(t
(error "Bad second value ~a from find-symbol received by export."
found?)))))
t)))
;;; The predicate `keywordp' tests if a given object is a keyword.
(defvar *keyword-package* (find-package "KEYWORD"))
(defmacro keywordp (object)
(if (symbolp object)
`(and (symbolp ,object)
,object
(eq (symbol-package ,object) *keyword-package*))
(let ((symbol-evaled (gensym)))
`(let ((,symbol-evaled ,object))
(keywordp ,symbol-evaled)))))
The function make - gensymed - symbol implements translated calls to .
(defvar *gensym-counter* 1)
(declaim (type fixnum *gensym-counter*))
(defun make-gensymed-symbol (string-or-counter?)
(declare (return-type symbol))
(let ((prefix "G")
(counter *gensym-counter*))
(declare (type fixnum counter))
(cond
((fixnump string-or-counter?)
(setq counter string-or-counter?))
((stringp string-or-counter?)
(setq prefix string-or-counter?)
(incf *gensym-counter*))
(t
;; Else, ignore the argument, do the default.
(incf *gensym-counter*)))
(with-permanent-area
(let* ((counter-length
(1+ (truncate (the double-float
(log (float counter 1.0) 10)))))
(new-name (make-string
(+ counter-length (length (the string prefix))))))
(declare (type fixnum counter-length))
(setf (fill-pointer new-name) 0)
(format new-name "~a~v,'0d" prefix counter-length counter)
(make-symbol new-name)))))
;;; The TL internal function `write-symbol' is called from tl:write to output
;;; symbols.
(defun write-symbol (symbol case stream?)
(declare (type symbol symbol)
(return-type void))
(let ((name-string (symbol-name symbol)))
(when *print-escape*
(if (keywordp symbol)
(write-string ":" stream?)
(let ((home-package (symbol-package symbol)))
(when (and (not (eq home-package *package*))
(not (eq (find-symbol name-string *package*) symbol)))
(write-string (package-name home-package) stream?)
(write-string
(if (tli::symbol-external symbol) ":" "::")
stream?)))))
(let* ((name name-string)
(length (length (the string name-string)))
(stream (get-string-or-file-stream-for-output-macro
stream? length))
(string? (stringp stream)))
(declare (type string name)
(type fixnum length))
;; Do the best optimization for the default case, :upcase.
(cond
((eq case :upcase)
(if string?
(dotimes (index length)
(write-char-to-string (char-upcase (char name index)) stream))
(dotimes (index length)
(write-char-to-file-stream
(char-upcase (char name index)) stream))))
((eq case :downcase)
(dotimes (index length)
(write-char-to-string-or-file-stream
(char-downcase (char name index)) stream string?)))
;; The :capitalize case.
(t
(do* ((first t)
(index 0 (+ index 1)))
((>= index length))
(declare (type fixnum index))
(let* ((char (char name index))
(alpha-char? (alpha-char-p char)))
(cond (first
(when alpha-char?
(setq char (char-upcase char))
(setq first nil)))
((not alpha-char?)
(setq first t))
(t
(setq char (char-downcase char))))
(write-char-to-string-or-file-stream char stream string?))))))))
;;;; Package Specific Symbols
( jh , 9/27/90 )
;;; with-package-specific-symbols
;;; (package-specs) &rest body
;;;
;;; On occasion we need to mention a package-qualified symbol even though we
;;; aren't sure that the package exists. This has happened recently in our
translator efforts , where the translator 's host has a package that the
destination Lisp lacks . For example , suppose a Lucid - specific memory
;;; allocation form contains the symbol lucid::change-memory-management. Although
;;; this form need not be present in the resulting Chestnut Lisp image, the symbol
;;; is read anyway, since the reader has to read in enough of a form to be able
to ignore it . This causes an error in when the reader reaches the
;;; package qualifier for the unknown "LUCID" package.
;;;
;;; Note that this is only a problem in macros, since any defuns with
platform - specific symbols are not read in the host and never make it to
;;; the macro phase as long as the conditionalization #-translator is used.
;;;
;;; The solution is to refer to a package-qualified symbol by its printname and
;;; package name and thus bypass the symbol-reader entirely. The macro
;;; with-package-specific-symbols accomplishes this by interning and substituting
;;; the package-qualified symbol into a macroexpansion only when it finds that the
;;; package exists. When the package doesn't exist, with-package-specific-symbol
;;; leaves the symbol alone, since the code will never be executed anyway. This
;;; arrangement gives the reader something to skip over, does the right thing when
;;; we actually intend to use the code, and makes the code more perspicuous than
;;; sprinkling it throughout with (intern "SYMBOL" "PACKAGE") forms.
;;;
;;; The form package-specs is of the format
( package1 |::| symbol1
;;; ...)
;;; and is intended to resemble the package-qualified symbols visually. Note that
the middle item , the symbol whose printname consists of two colons , is
;;; syntactic sugar.
;;;
This macro also has the advantage of documenting in one place the
;;; platform-specific calls we make.
(defmacro with-package-specific-symbols (package-specs &body body)
(let (platform-specific-symbol-alist)
(do ((these-package-specs package-specs (cdddr these-package-specs)))
((null these-package-specs))
(let ((the-package (first these-package-specs))
(symbol (third these-package-specs)))
(when (find-package the-package)
(push
(cons symbol
(intern (symbol-name symbol) (symbol-name the-package)))
platform-specific-symbol-alist))))
(if (= (length body) 1)
(setq body (car body))
(setq body `(progn ,@body)))
(if platform-specific-symbol-alist
`(sublis ',platform-specific-symbol-alist
,body)
body)))
| null | https://raw.githubusercontent.com/ska80/thinlisp/173573a723256d901887f1cbc26d5403025879ca/tl/lisp/packages.lisp | lisp | Module PACKAGES
All rights reserved.
you can redistribute it and/or modify it
either version 1 or ( at your option ) any later version .
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
This module implements operations for symbols and packages.
The following form allows us to reference lisp:*package* in our sources, yet
still have a C translation for it in the emitted images.
The function `sxhash-string' takes a string and computes the hash number of
of the character. This fact is used when initializing the symbol T in
tli::dump-makefile-information. -jallard 12/5/97
The function tli::init-symbol takes a symbol with only the type tag
initialized, and assigns into it the name an hash number of the symbol. It
returns the symbol. The function tli::init-symbol-into-package is a
combination of init-symbol and insert-symbol-into-package, this is called
from the top level initializations of symbols in translated C code.
The function `insert-symbol-into-package' takes a package and a symbol to be
interned directly into that package. This function returns no values.
Insert into balanced binary tree later, just a binary tree now. -jra
4/8/96
The function `find-symbol-in-single-package' takes a string, the hash for
that string, and a package. If there is a symbol with the given name
directly interned in this package, then this function will return that
symbol. If no such symbol exists, then this function returns 0. Note that
returned from this function. Also note that this function is not searching
given package. Even further, note that this function returns the symbol as
found in the package data structure. The caller must check whether or not
the symbol is an imported pointer to another symbol or if the symbol is
external.
The variable `all-packages' contains a list of all packages in the current
environment. The function `list-all-packages' is the CLtL2 interface to
fetching that list.
The macro `find-package-or-error' takes a string, symbol, or package and
returns the package named by the argument. Note that this differs from
find-package, which returns NIL if no package can be found. This macro
attempts to optimize cases where the argument can be proven to be a package.
The macro `package-use-list' fetches the use-list from a package after
coercing it to a package.
When in translation, check that the given use list matches the
already existing use list, else signal an error. This relaxing of
the error case is done since we pre-create all packages that have
The macro `find-symbol' expands into a call to find-symbol-in-package. This
function requires an actual package argument and is given the already
computed hash of the string.
The function `intern-string-in-package' implements intern. It takes the
string, an already computed hash number for the string, and the package
object to intern any newly created symbol into.
The function `import' takes a symbol or list of symbols, and an optional
consing operation.
The way that an imported symbol is represented in the package is that a new
symbol structure is allocated, and the symbol-imported bit is set on that
symbol. The symbol-value of this newly created "indirection symbol" is then
set to the symbol being imported. Within find-symbol-in-package, when a
symbol is found by find-symbol-in-single-package, find-symbol-in-package
then checks if the returned symbol structure has the symbol-imported bit
set. If so, find-symbol-in-package returns the symbol-value of the symbol
structure returned from find-symbol-in-single-package. In this manner,
to imported symbols brought in from other packages. Note that if the
imported symbol is then exported, this happens by setting the exported bit
The function `export' takes a symbol or list of symbols, and an optional
package. This version of export does not perform all of the name conflict
checks called for in CLtL2, Section 11.5. It is presumed that the
development Lisp environment has weeded these out.
Already exported, do nothing.
Already found in this package's binary tree, merely the external
bit.
It is accessible, but not in this package's data strucutures.
Import an indirection symbol, and set the external bit on it.
The predicate `keywordp' tests if a given object is a keyword.
Else, ignore the argument, do the default.
The TL internal function `write-symbol' is called from tl:write to output
symbols.
Do the best optimization for the default case, :upcase.
The :capitalize case.
Package Specific Symbols
with-package-specific-symbols
(package-specs) &rest body
On occasion we need to mention a package-qualified symbol even though we
aren't sure that the package exists. This has happened recently in our
allocation form contains the symbol lucid::change-memory-management. Although
this form need not be present in the resulting Chestnut Lisp image, the symbol
is read anyway, since the reader has to read in enough of a form to be able
package qualifier for the unknown "LUCID" package.
Note that this is only a problem in macros, since any defuns with
the macro phase as long as the conditionalization #-translator is used.
The solution is to refer to a package-qualified symbol by its printname and
package name and thus bypass the symbol-reader entirely. The macro
with-package-specific-symbols accomplishes this by interning and substituting
the package-qualified symbol into a macroexpansion only when it finds that the
package exists. When the package doesn't exist, with-package-specific-symbol
leaves the symbol alone, since the code will never be executed anyway. This
arrangement gives the reader something to skip over, does the right thing when
we actually intend to use the code, and makes the code more perspicuous than
sprinkling it throughout with (intern "SYMBOL" "PACKAGE") forms.
The form package-specs is of the format
...)
and is intended to resemble the package-qualified symbols visually. Note that
syntactic sugar.
platform-specific calls we make. | (in-package "TL")
Copyright ( c ) 1999 - 2001 The ThinLisp Group
Copyright ( c ) 1996 Gensym Corporation .
This file is part of ThinLisp .
under the terms of the ThinLisp License as published by the ThinLisp
ThinLisp is distributed in the hope that it will be useful , but
For additional information see < / >
Author :
Package Functions and Macros
(def-translatable-lisp-var *package*)
that string . The hash value is guaranteed to be of type ( unsigned - byte 16 ) .
Note that the hash code for a one character string is equal to the char - code
(declaim (functional sxhash-string))
(defun sxhash-string (text-string)
(declare (type string text-string)
(return-type fixnum))
(loop with hash fixnum = 0
for index from 0 below (length text-string)
do
(setq hash (logxor (+ (logand (ash hash 1) 65535)
(ash hash -15))
(char-code (char text-string index))))
finally (return hash)))
(defun tli::init-symbol (symbol string string-hash)
(declare (type symbol symbol)
(type t string)
(type fixnum string-hash)
(return-type symbol))
(tli::set-symbol-type-tag symbol)
(setf (tli::symbol-local-value symbol) t)
(setf (tli::symbol-external symbol) nil)
(setf (tli::symbol-balance symbol) 0)
(setf (tli::symbol-imported symbol) nil)
(setf (tli::symbol-name-hash symbol) string-hash)
(tli::set-symbol-name symbol string)
(tli::set-symbol-value-pointer symbol (tli::the-unbound-value))
(tli::set-non-null-symbol-plist symbol nil)
(tli::set-symbol-package symbol nil)
(tli::set-symbol-function symbol (tli::the-unbound-value))
(setf (tli::symbol-left-branch symbol) (tli::the-unbound-value))
(setf (tli::symbol-right-branch symbol) (tli::the-unbound-value))
symbol)
(defun tli::init-symbol-into-package (symbol string string-hash package)
(declare (type symbol symbol)
(type t string)
(type fixnum string-hash)
(type package package)
(return-type symbol))
(tli::init-symbol symbol string string-hash)
(when package
(insert-symbol-into-package symbol package))
symbol)
(defun insert-symbol-into-package (symbol package)
(declare (type symbol symbol)
(type package package)
(return-type void))
(if (tli::not-unbound-value-p (tli::package-root-symbol package))
(let ((hash-number (tli::symbol-name-hash symbol))
(name (tli::non-null-symbol-name symbol)))
(declare (type fixnum hash-number))
(loop with current-symbol = (tli::package-root-symbol package)
for last-symbol = current-symbol
for current-hash fixnum = (tli::symbol-name-hash current-symbol)
do
(cond ((< hash-number current-hash)
(setq current-symbol (tli::symbol-left-branch current-symbol))
(unless (tli::not-unbound-value-p current-symbol)
(setf (tli::symbol-left-branch last-symbol) symbol)
(tli::set-symbol-package symbol package)
(return nil)))
((> hash-number current-hash)
(setq current-symbol (tli::symbol-right-branch current-symbol))
(unless (tli::not-unbound-value-p current-symbol)
(setf (tli::symbol-right-branch last-symbol) symbol)
(tli::set-symbol-package symbol package)
(return nil)))
(t
(let ((compare-result
(tli::string-compare
name (tli::non-null-symbol-name current-symbol))))
(declare (type fixnum compare-result))
(cond
((< compare-result 0)
(setq current-symbol (tli::symbol-left-branch current-symbol))
(unless (tli::not-unbound-value-p current-symbol)
(setf (tli::symbol-left-branch last-symbol) symbol)
(tli::set-symbol-package symbol package)
(return nil)))
((> compare-result 0)
(setq current-symbol (tli::symbol-right-branch current-symbol))
(unless (tli::not-unbound-value-p current-symbol)
(setf (tli::symbol-right-branch last-symbol) symbol)
(tli::set-symbol-package symbol package)
(return nil)))
(t
(error "Can't insert ~a in ~a, a symbol with that name already exists."
symbol package))))))))
(progn
(tli::set-symbol-package symbol package)
(setf (tli::package-root-symbol package) symbol))))
NIL is not the default return value , since that could be the symbol that is
through the used packages of the given package , but only searches the one
(defun find-symbol-in-single-package (string string-hash package)
(declare (type string string)
(type fixnum string-hash)
(type package package)
(return-type t))
(loop with current-symbol = (tli::package-root-symbol package)
initially (unless (tli::not-unbound-value-p current-symbol)
(return 0))
while (tli::not-unbound-value-p current-symbol)
for current-hash fixnum = (tli::symbol-name-hash current-symbol)
do
(cond ((< string-hash current-hash)
(setq current-symbol (tli::symbol-left-branch current-symbol)))
((> string-hash current-hash)
(setq current-symbol (tli::symbol-right-branch current-symbol)))
(t
(let ((compare-result (tli::string-compare
string
(tli::non-null-symbol-name current-symbol))))
(declare (type fixnum compare-result))
(cond
((< compare-result 0)
(setq current-symbol (tli::symbol-left-branch current-symbol)))
((> compare-result 0)
(setq current-symbol (tli::symbol-right-branch current-symbol)))
(t
(return current-symbol))))))
finally (return 0)))
(defvar all-packages)
(declaim (functional list-all-packages))
(defun list-all-packages ()
all-packages)
(defun find-package-1 (string-or-symbol-or-package)
(declare (return-type t))
(typecase string-or-symbol-or-package
(string
(let ((name string-or-symbol-or-package))
(declare (type string name))
(loop for package in all-packages do
(when (string= (package-name package) name)
(return package)))))
(symbol
(find-package-1 (symbol-name string-or-symbol-or-package)))
(package
string-or-symbol-or-package)
(t
(error "FIND-PACKAGE given bad argument ~a."
string-or-symbol-or-package))))
(defmacro find-package-or-error (&environment env package-or-name)
(if (tli::tl-subtypep (tli::expression-result-type package-or-name env)
'package)
package-or-name
`(find-package-or-error-1 ,package-or-name)))
(defun find-package-or-error-1 (name)
(declare (return-type package))
(let ((find-result (find-package name)))
(if find-result
find-result
(error "No package with name ~a could be found." name))))
(defmacro package-use-list (package-or-package-name)
`(tli::package-use-list-internal
(find-package-or-error ,package-or-package-name)))
(defun make-package-1 (name use)
(declare (type string name)
(return-type t))
(with-permanent-area
(let* ((name-to-use (string-upcase name))
(use-list (loop for used in use
for used-package = (find-package-or-error used)
collect used-package))
(found-package? (find-package-1 name-to-use)))
(cond
(found-package?
#-translator
(error "Cannot make-package ~s, that package already exists."
name-to-use)
constant folded symbol interned in them . -jallard 12/4/97
(unless (loop for used-package-cons = (package-use-list found-package?)
then (cdr-of-cons used-package-cons)
for new-package-cons = use-list
then (cdr-of-cons new-package-cons)
while (and used-package-cons new-package-cons)
always (eq (car-of-cons used-package-cons)
(car-of-cons new-package-cons))
finally
(when (or used-package-cons new-package-cons)
(return nil)))
(error "Use list for ~a differs from compile-time list: ~@
compile-time = ~s, new = ~s."
name-to-use (tli::package-use-list-internal found-package?)
use))
found-package?)
(t
(let ((new-package (tli::make-new-package name-to-use use-list)))
(setq all-packages (cons new-package all-packages))
new-package))))))
(defmacro find-symbol (string &optional package)
(let ((package-arg (if package
`(find-package-or-error ,package)
'*package*)))
(if (or (constantp string) (symbolp string))
`(find-symbol-in-package
,string (sxhash-string ,string) ,package-arg)
(let ((string-var (gensym)))
`(let ((,string-var ,string))
(declare (type string ,string-var))
(find-symbol-in-package
,string-var (sxhash-string ,string-var) ,package-arg))))))
(defun find-symbol-in-package (string string-hash package)
(declare (type string string)
(type fixnum string-hash)
(type package package))
#-translator
(declare (ignore string-hash))
#-translator
(return-from find-symbol-in-package
(ab-lisp::find-symbol string package))
#+translator
(let ((found-symbol
(find-symbol-in-single-package string string-hash package)))
(if (eql found-symbol 0)
(loop for used-package in (package-use-list package)
for found-inherited-symbol
= (find-symbol-in-single-package
string string-hash used-package)
do
(unless (eql found-inherited-symbol 0)
(return (values
(if (tli::symbol-imported found-inherited-symbol)
(tli::symbol-value-pointer found-inherited-symbol)
found-inherited-symbol)
:inherited)))
finally
(return (values nil nil)))
(values (if (tli::symbol-imported found-symbol)
(tli::symbol-value-pointer found-symbol)
found-symbol)
(if (tli::symbol-external found-symbol)
:external
:internal)))))
(defun intern-string-in-package (string hash-number package)
(declare (type string string)
(type fixnum hash-number)
(type package package))
(multiple-value-bind (symbol found?)
(find-symbol-in-package string hash-number package)
(if found?
(values symbol found?)
(with-permanent-area
(let ((new-symbol (tli::make-empty-symbol)))
(tli::init-symbol new-symbol string hash-number)
(tli::set-symbol-package new-symbol package)
(insert-symbol-into-package new-symbol package)
(values new-symbol :internal))))))
package . It performs the usual CLtL import operation . Note that this is a
symbol structures are used to indirect from the binary tree of one package
on the indirection symbol , not the originally imported symbol . -jra 5/7/96
(defun import (symbol-or-symbol-list &optional (package-arg *package*))
(declare (return-type t))
(with-permanent-area ()
(let ((symbol-list (if (listp symbol-or-symbol-list)
symbol-or-symbol-list
(tli::list-dynamic-extent symbol-or-symbol-list)))
(package (find-package-or-error package-arg)))
(loop for symbol in symbol-list
for symbol-name = (symbol-name symbol)
for symbol-hash fixnum = (tli::symbol-name-hash symbol)
do
(multiple-value-bind (found-symbol found?)
(find-symbol-in-package symbol-name symbol-hash package)
(when (and found? (not (eq found-symbol symbol)))
(error "The symbol ~a cannot be imported into ~a, a symbol with ~
that name is already accessible."
symbol package)))
(cond ((null (symbol-package symbol))
(insert-symbol-into-package symbol package))
(t
(let ((import-symbol (tli::make-empty-symbol)))
(tli::init-symbol import-symbol symbol-name symbol-hash)
(setf (tli::symbol-imported import-symbol) t)
(setf (tli::symbol-value-pointer import-symbol) symbol)
(insert-symbol-into-package import-symbol package)))))
t)))
(defun export (symbol-or-symbol-list &optional (package-arg *package*))
(declare (return-type t))
(with-permanent-area ()
(let ((symbol-list (if (listp symbol-or-symbol-list)
symbol-or-symbol-list
(tli::list-dynamic-extent symbol-or-symbol-list)))
(package (find-package-or-error package-arg)))
(loop for symbol in symbol-list
for symbol-name = (symbol-name symbol)
for symbol-hash fixnum = (tli::symbol-name-hash symbol)
do
(multiple-value-bind (found-symbol found?)
(find-symbol-in-package symbol-name symbol-hash package)
(unless (and found? (eq found-symbol symbol))
(error "The symbol ~a cannot be exported from ~a, it is not ~
accessible from that package."
symbol package))
(case found?
((:external)
nil)
((:internal)
(setf (tli::symbol-external symbol) t))
((:inherited)
(let ((import-symbol (tli::make-empty-symbol)))
(tli::init-symbol import-symbol symbol-name symbol-hash)
(setf (tli::symbol-imported import-symbol) t)
(setf (tli::symbol-value-pointer import-symbol) symbol)
(insert-symbol-into-package import-symbol package)
(setf (tli::symbol-external import-symbol) t)))
(t
(error "Bad second value ~a from find-symbol received by export."
found?)))))
t)))
(defvar *keyword-package* (find-package "KEYWORD"))
(defmacro keywordp (object)
(if (symbolp object)
`(and (symbolp ,object)
,object
(eq (symbol-package ,object) *keyword-package*))
(let ((symbol-evaled (gensym)))
`(let ((,symbol-evaled ,object))
(keywordp ,symbol-evaled)))))
The function make - gensymed - symbol implements translated calls to .
(defvar *gensym-counter* 1)
(declaim (type fixnum *gensym-counter*))
(defun make-gensymed-symbol (string-or-counter?)
(declare (return-type symbol))
(let ((prefix "G")
(counter *gensym-counter*))
(declare (type fixnum counter))
(cond
((fixnump string-or-counter?)
(setq counter string-or-counter?))
((stringp string-or-counter?)
(setq prefix string-or-counter?)
(incf *gensym-counter*))
(t
(incf *gensym-counter*)))
(with-permanent-area
(let* ((counter-length
(1+ (truncate (the double-float
(log (float counter 1.0) 10)))))
(new-name (make-string
(+ counter-length (length (the string prefix))))))
(declare (type fixnum counter-length))
(setf (fill-pointer new-name) 0)
(format new-name "~a~v,'0d" prefix counter-length counter)
(make-symbol new-name)))))
(defun write-symbol (symbol case stream?)
(declare (type symbol symbol)
(return-type void))
(let ((name-string (symbol-name symbol)))
(when *print-escape*
(if (keywordp symbol)
(write-string ":" stream?)
(let ((home-package (symbol-package symbol)))
(when (and (not (eq home-package *package*))
(not (eq (find-symbol name-string *package*) symbol)))
(write-string (package-name home-package) stream?)
(write-string
(if (tli::symbol-external symbol) ":" "::")
stream?)))))
(let* ((name name-string)
(length (length (the string name-string)))
(stream (get-string-or-file-stream-for-output-macro
stream? length))
(string? (stringp stream)))
(declare (type string name)
(type fixnum length))
(cond
((eq case :upcase)
(if string?
(dotimes (index length)
(write-char-to-string (char-upcase (char name index)) stream))
(dotimes (index length)
(write-char-to-file-stream
(char-upcase (char name index)) stream))))
((eq case :downcase)
(dotimes (index length)
(write-char-to-string-or-file-stream
(char-downcase (char name index)) stream string?)))
(t
(do* ((first t)
(index 0 (+ index 1)))
((>= index length))
(declare (type fixnum index))
(let* ((char (char name index))
(alpha-char? (alpha-char-p char)))
(cond (first
(when alpha-char?
(setq char (char-upcase char))
(setq first nil)))
((not alpha-char?)
(setq first t))
(t
(setq char (char-downcase char))))
(write-char-to-string-or-file-stream char stream string?))))))))
( jh , 9/27/90 )
translator efforts , where the translator 's host has a package that the
destination Lisp lacks . For example , suppose a Lucid - specific memory
to ignore it . This causes an error in when the reader reaches the
platform - specific symbols are not read in the host and never make it to
( package1 |::| symbol1
the middle item , the symbol whose printname consists of two colons , is
This macro also has the advantage of documenting in one place the
(defmacro with-package-specific-symbols (package-specs &body body)
(let (platform-specific-symbol-alist)
(do ((these-package-specs package-specs (cdddr these-package-specs)))
((null these-package-specs))
(let ((the-package (first these-package-specs))
(symbol (third these-package-specs)))
(when (find-package the-package)
(push
(cons symbol
(intern (symbol-name symbol) (symbol-name the-package)))
platform-specific-symbol-alist))))
(if (= (length body) 1)
(setq body (car body))
(setq body `(progn ,@body)))
(if platform-specific-symbol-alist
`(sublis ',platform-specific-symbol-alist
,body)
body)))
|
59530071649d60e536e14a109328fa3e5d0d75c129d96125c2f5df73d1ec8fbd | WhatsApp/eqwalizer | nowarn.erl | Copyright ( c ) Meta Platforms , Inc. and affiliates . All rights reserved .
%%%
This source code is licensed under the Apache 2.0 license found in
%%% the LICENSE file in the root directory of this source tree.
-module(nowarn).
-compile([export_all, nowarn_export_all]).
-eqwalizer({nowarn_function, nowarn_bad/0}).
-eqwalizer({nowarn_function, nowarn_redundant/0}).
-spec good() ->
integer().
good() ->
1.
-spec bad() ->
integer().
bad() ->
ok.
-spec nowarn_bad() ->
integer().
nowarn_bad() ->
ok.
-spec nowarn_redundant() ->
integer().
nowarn_redundant() ->
1.
| null | https://raw.githubusercontent.com/WhatsApp/eqwalizer/9935940d71ef65c7bf7a9dfad77d89c0006c288e/eqwalizer/test_projects/check/src/nowarn.erl | erlang |
the LICENSE file in the root directory of this source tree. | Copyright ( c ) Meta Platforms , Inc. and affiliates . All rights reserved .
This source code is licensed under the Apache 2.0 license found in
-module(nowarn).
-compile([export_all, nowarn_export_all]).
-eqwalizer({nowarn_function, nowarn_bad/0}).
-eqwalizer({nowarn_function, nowarn_redundant/0}).
-spec good() ->
integer().
good() ->
1.
-spec bad() ->
integer().
bad() ->
ok.
-spec nowarn_bad() ->
integer().
nowarn_bad() ->
ok.
-spec nowarn_redundant() ->
integer().
nowarn_redundant() ->
1.
|
02fba830595799c0e389f7b69c07bdf674ed250eee840c2b16534198c01c883d | 05st/capri | OperatorDef.hs | module OperatorDef where
import Data.Text (Text)
data Assoc
= ALeft
| ARight
| ANone
| APrefix
| APostfix
deriving (Show)
data OperatorDef = OperatorDef
{ assoc :: Assoc
, prec :: Integer
, oper :: Text
} deriving (Show)
| null | https://raw.githubusercontent.com/05st/capri/a98b7a5bbefb1b04a87eaac4285d5c3503e8b1ac/src/OperatorDef.hs | haskell | module OperatorDef where
import Data.Text (Text)
data Assoc
= ALeft
| ARight
| ANone
| APrefix
| APostfix
deriving (Show)
data OperatorDef = OperatorDef
{ assoc :: Assoc
, prec :: Integer
, oper :: Text
} deriving (Show)
|
|
01ae343eec9cfbc3f3e51e34185426226ca028d741c88210c3e808aa2b93dea0 | rtrusso/scp | implambda.scm | (define (func a . b)
(display 456)
(newline)
(display a)
(newline)
(display 123)
(newline))
(display 0)
(newline)
(func 0)
(func 1 2)
(func 2 3 4)
| null | https://raw.githubusercontent.com/rtrusso/scp/2051e76df14bd36aef81aba519ffafa62b260f5c/src/tests/implambda.scm | scheme | (define (func a . b)
(display 456)
(newline)
(display a)
(newline)
(display 123)
(newline))
(display 0)
(newline)
(func 0)
(func 1 2)
(func 2 3 4)
|
|
6fd2ae53039cfb86ed32834fba745da490be8b6d3f772b3ed7d903df49635f00 | AdRoll/rebar3_hank | ignore_config.erl | -module(ignore_config).
%% @doc No warnings should be emitted for this file since
%% rebar.config specifically states that all of them
%% should be ignored.
-define(MACRO_ALL, "this macro is always ignored").
-define(MACRO_ALL(), "regardless of its arity").
-define(MACRO_ALL(It), "is never reported as unused").
-define(MACRO_ALL(Not, Even), "if it has multiple arguments").
-define(MACRO_0(), "This one should since it has 0 arguments").
-define(MACRO_1(But), "not for this version with arity 1").
-define(MACRO_NONE, "This instance should be ignored").
| null | https://raw.githubusercontent.com/AdRoll/rebar3_hank/6035f08e74c694ae56c4c9382e74303e7b3e9018/test/files/unused_macros/ignore_config.erl | erlang | @doc No warnings should be emitted for this file since
rebar.config specifically states that all of them
should be ignored. | -module(ignore_config).
-define(MACRO_ALL, "this macro is always ignored").
-define(MACRO_ALL(), "regardless of its arity").
-define(MACRO_ALL(It), "is never reported as unused").
-define(MACRO_ALL(Not, Even), "if it has multiple arguments").
-define(MACRO_0(), "This one should since it has 0 arguments").
-define(MACRO_1(But), "not for this version with arity 1").
-define(MACRO_NONE, "This instance should be ignored").
|
900e9ec95ac517fcb4db5178aae166c5bc132bd2b191d5c7a3e35ae0fdeaf77d | adoptingerlang/service_discovery | sds_storage.erl | -module(sds_storage).
-behaviour(service_discovery_storage).
-export([configure_storage/1,
create/1,
read/1,
read_endpoints/1,
add_named_ports/2,
list/0,
register/2]).
-define(STORAGE_KEY, {?MODULE, storage_module}).
-define(STORAGE_MOD, (persistent_term:get(?STORAGE_KEY))).
configure_storage(StorageMod) ->
persistent_term:put(?STORAGE_KEY, StorageMod).
-spec create(service_discovery:service()) -> binary() | {error, term()}.
create(Service) ->
?STORAGE_MOD:create(Service).
-spec read(unicode:unicode_binary()) -> service_discovery:service() | {error, term()}.
read(ServiceName) ->
?STORAGE_MOD:read(ServiceName).
-spec read_endpoints(unicode:unicode_binary()) -> [service_discovery:endpoint()] | {error, term()}.
read_endpoints(ServiceName) ->
?STORAGE_MOD:read_endpoints(ServiceName).
-spec add_named_ports(unicode:unicode_binary(), service_discovery:named_ports()) -> ok | {error, term()}.
add_named_ports(ServiceName, NamedPorts) ->
?STORAGE_MOD:add_named_ports(ServiceName, NamedPorts).
-spec list() -> [service_discovery:service()] | {error, term()}.
list() ->
?STORAGE_MOD:list().
-spec register(service_discovery:name(), service_discovery:endpoint()) -> ok.
register(ServiceName, Endpoint) ->
?STORAGE_MOD:register(ServiceName, Endpoint).
| null | https://raw.githubusercontent.com/adoptingerlang/service_discovery/03bed070048e70ce267fa4a585fa157bbc883425/apps/service_discovery_storage/src/sds_storage.erl | erlang | -module(sds_storage).
-behaviour(service_discovery_storage).
-export([configure_storage/1,
create/1,
read/1,
read_endpoints/1,
add_named_ports/2,
list/0,
register/2]).
-define(STORAGE_KEY, {?MODULE, storage_module}).
-define(STORAGE_MOD, (persistent_term:get(?STORAGE_KEY))).
configure_storage(StorageMod) ->
persistent_term:put(?STORAGE_KEY, StorageMod).
-spec create(service_discovery:service()) -> binary() | {error, term()}.
create(Service) ->
?STORAGE_MOD:create(Service).
-spec read(unicode:unicode_binary()) -> service_discovery:service() | {error, term()}.
read(ServiceName) ->
?STORAGE_MOD:read(ServiceName).
-spec read_endpoints(unicode:unicode_binary()) -> [service_discovery:endpoint()] | {error, term()}.
read_endpoints(ServiceName) ->
?STORAGE_MOD:read_endpoints(ServiceName).
-spec add_named_ports(unicode:unicode_binary(), service_discovery:named_ports()) -> ok | {error, term()}.
add_named_ports(ServiceName, NamedPorts) ->
?STORAGE_MOD:add_named_ports(ServiceName, NamedPorts).
-spec list() -> [service_discovery:service()] | {error, term()}.
list() ->
?STORAGE_MOD:list().
-spec register(service_discovery:name(), service_discovery:endpoint()) -> ok.
register(ServiceName, Endpoint) ->
?STORAGE_MOD:register(ServiceName, Endpoint).
|
|
f0a1f7dc22716172ee7ce30756bd42038fd71e02c72eb7e13f5934a5a8985f9a | ekmett/succinct-binary | Blob.hs | # options_ghc -Wno - orphans #
module Data.Binary.Succinct.Blob
( Blob(..)
, blob
-- guts
, inspectMeta
, inspectShape
, inspectContent
, inspectBlob
) where
import Data.Word
import Data.Bits
import Data.ByteString as Strict
import Data.ByteString.Builder as Builder
import Data.ByteString.Lazy as Lazy
import Data.Semigroup
import qualified Data.Vector.Storable as Storable
import HaskellWorks.Data.BalancedParens.RangeMinMax as BP
import HaskellWorks.Data.RankSelect.CsPoppy as CsPoppy
import HaskellWorks.Data.RankSelect.Base.Rank0
import Data.Vector.Storable.ByteString
import Data.Binary.Succinct.Put
import Data.Binary.Succinct.Orphans ()
data Blob = Blob
{ blobSize :: Word64
, blobMeta :: CsPoppy
, blobShape :: RangeMinMax (Storable.Vector Word64)
, blobContent :: Strict.ByteString
} -- deriving Show
-- for debugging
instance Show Blob where
show = inspectBlob
-- evil orphan for debugging
instance Show Put where
show = show . blob
blob :: Put -> Blob
blob ma = case runPut ma (State 0 0 0 0) of
Result (State i b j b') (W m s c n) -> Blob
{ blobSize = n
, blobMeta = makeCsPoppy $ ws $ flush8 i b m
, blobShape = mkRangeMinMax $ ws $ flush8 j b' s
, blobContent = bs c
}
where
flush :: Int -> Word8 -> Builder -> Builder
flush 0 _ xs = xs
flush _ b xs = xs <> word8 b
flush8 :: Int -> Word8 -> Builder -> Builder
flush8 r k d = flush r k d <> stimes (7 :: Int) (word8 0)
trim8 :: Strict.ByteString -> Strict.ByteString
trim8 b = Strict.take (Strict.length b .&. complement 7) b
bs :: Builder -> Strict.ByteString
bs = Lazy.toStrict . Builder.toLazyByteString
ws :: Builder -> Storable.Vector Word64
ws = byteStringToVector . trim8 . bs
access :: Rank1 v => v -> Word64 -> Word64
access s 1 = rank1 s 1
access s n = rank1 s n - rank1 s (n - 1)
as :: Rank1 v => a -> a -> v -> Word64 -> a
as l r s i = case access s i of
0 -> l
_ -> r
Print out a string of S 's and D 's , corresponding to Shape or Data , from the meta index
inspectMeta :: Blob -> String
inspectMeta (Blob n m _ _) = as 'D' 'S' m <$> [1..n]
-- Print out the balanced parentheses representation of our paren index
inspectShape :: Blob -> String
inspectShape (Blob n m s _) = as ')' '(' s <$> [1..rank1 m n]
-- Print out our raw content buffer
inspectContent :: Blob -> String
inspectContent (Blob _ _ _ c) = show c
-- Print out a representation of the entire blob, interleaving paren and content
inspectBlob :: Blob -> String
inspectBlob (Blob n m s c) = do
i <- [1..n]
case access m i of
0 -> '{' : shows (Strict.index c $ fromIntegral $ rank0 m i - 1) "}"
_ -> [as ')' '(' s $ rank1 m i]
| null | https://raw.githubusercontent.com/ekmett/succinct-binary/c8731ba6617e83ab416faffde95a118c4a3eef38/src/Data/Binary/Succinct/Blob.hs | haskell | guts
deriving Show
for debugging
evil orphan for debugging
Print out the balanced parentheses representation of our paren index
Print out our raw content buffer
Print out a representation of the entire blob, interleaving paren and content | # options_ghc -Wno - orphans #
module Data.Binary.Succinct.Blob
( Blob(..)
, blob
, inspectMeta
, inspectShape
, inspectContent
, inspectBlob
) where
import Data.Word
import Data.Bits
import Data.ByteString as Strict
import Data.ByteString.Builder as Builder
import Data.ByteString.Lazy as Lazy
import Data.Semigroup
import qualified Data.Vector.Storable as Storable
import HaskellWorks.Data.BalancedParens.RangeMinMax as BP
import HaskellWorks.Data.RankSelect.CsPoppy as CsPoppy
import HaskellWorks.Data.RankSelect.Base.Rank0
import Data.Vector.Storable.ByteString
import Data.Binary.Succinct.Put
import Data.Binary.Succinct.Orphans ()
data Blob = Blob
{ blobSize :: Word64
, blobMeta :: CsPoppy
, blobShape :: RangeMinMax (Storable.Vector Word64)
, blobContent :: Strict.ByteString
instance Show Blob where
show = inspectBlob
instance Show Put where
show = show . blob
blob :: Put -> Blob
blob ma = case runPut ma (State 0 0 0 0) of
Result (State i b j b') (W m s c n) -> Blob
{ blobSize = n
, blobMeta = makeCsPoppy $ ws $ flush8 i b m
, blobShape = mkRangeMinMax $ ws $ flush8 j b' s
, blobContent = bs c
}
where
flush :: Int -> Word8 -> Builder -> Builder
flush 0 _ xs = xs
flush _ b xs = xs <> word8 b
flush8 :: Int -> Word8 -> Builder -> Builder
flush8 r k d = flush r k d <> stimes (7 :: Int) (word8 0)
trim8 :: Strict.ByteString -> Strict.ByteString
trim8 b = Strict.take (Strict.length b .&. complement 7) b
bs :: Builder -> Strict.ByteString
bs = Lazy.toStrict . Builder.toLazyByteString
ws :: Builder -> Storable.Vector Word64
ws = byteStringToVector . trim8 . bs
access :: Rank1 v => v -> Word64 -> Word64
access s 1 = rank1 s 1
access s n = rank1 s n - rank1 s (n - 1)
as :: Rank1 v => a -> a -> v -> Word64 -> a
as l r s i = case access s i of
0 -> l
_ -> r
Print out a string of S 's and D 's , corresponding to Shape or Data , from the meta index
inspectMeta :: Blob -> String
inspectMeta (Blob n m _ _) = as 'D' 'S' m <$> [1..n]
inspectShape :: Blob -> String
inspectShape (Blob n m s _) = as ')' '(' s <$> [1..rank1 m n]
inspectContent :: Blob -> String
inspectContent (Blob _ _ _ c) = show c
inspectBlob :: Blob -> String
inspectBlob (Blob n m s c) = do
i <- [1..n]
case access m i of
0 -> '{' : shows (Strict.index c $ fromIntegral $ rank0 m i - 1) "}"
_ -> [as ')' '(' s $ rank1 m i]
|
8f29ada2cc228e435d25ec74e18da928c3268568782f8da771b284b3fd3208e4 | tweag/asterius | SymbolMap.hs | {-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
-- |
Module : Asterius . Types .
Copyright : ( c ) 2018 EURL Tweag
-- License : All rights reserved (see LICENCE file in the distribution).
--
The @'SymbolMap ' a@ type represents a finite map / dictionary from keys of
type ' EntitySymbol ' to values of type Internally it is represented as
an ' IM.IntMap ' of , where the key of the unique of the ' EntitySymbol ' is used
for the indexing . In order to be able to access the ' EntitySymbol '
-- corresponding to each entry, we also store it alongside each element.
module Asterius.Types.SymbolMap
( -- * SymbolMap type
SymbolMap,
-- * Construction
empty,
singleton,
-- * Query
member,
lookup,
(!),
-- * Size
size,
-- * Insertion
insert,
-- * Filtering
filter,
restrictKeys,
-- * Folds and Maps
foldrWithKey',
mapWithKey,
mapAccum,
-- * Conversion
elems,
keys,
keysSet,
-- ** Lists
toList,
fromList,
-- ** Maps
toMap,
)
where
import Asterius.Types.EntitySymbol
import qualified Asterius.Types.SymbolSet as SS
import Binary
import Control.DeepSeq
import Control.Monad
import Data.Data
import qualified Data.IntMap.Lazy as IM
import qualified Data.Map.Lazy as Map
import GHC.Exts (IsList (..))
import GHC.Stack
import Prelude hiding (filter, lookup)
| A map from ' EntitySymbol 's to values
newtype SymbolMap a = SymbolMap (IM.IntMap (EntitySymbol, a))
deriving newtype (Eq, Monoid, NFData)
deriving stock (Data)
instance Semigroup (SymbolMap a) where
SymbolMap m0 <> SymbolMap m1 =
SymbolMap $
IM.unionWithKey
(\_ (sym, _) _ -> error $ "Duplicate symbol: " <> show sym)
m0
m1
instance Show a => Show (SymbolMap a) where
showsPrec d m =
showParen (d > 10) $
showString "fromList " . shows (toListSM m)
instance Binary a => Binary (SymbolMap a) where
put_ bh m =
put_ bh (size m)
*> forM_ (toListSM m) (\(k, v) -> put_ bh k *> lazyPut bh v)
get bh = fromListSM <$> do
n <- get bh
replicateM n $ (,) <$> get bh <*> lazyGet bh
instance IsList (SymbolMap a) where
type Item (SymbolMap a) = (EntitySymbol, a)
fromList = fromListSM
toList = toListSM
-- ----------------------------------------------------------------------------
-- | /O(1)/. The empty map.
{-# INLINE empty #-}
empty :: SymbolMap a
empty = SymbolMap IM.empty
-- | /O(1)/. A map containing a single association.
# INLINE singleton #
singleton :: EntitySymbol -> a -> SymbolMap a
singleton k e = SymbolMap $ IM.singleton (getKeyES k) (k, e)
-- | /O(n)/. Number of elements in the map.
# INLINE size #
size :: SymbolMap a -> Int
size (SymbolMap m) = IM.size m
| /O(min(n , W))/. Is the ' EntitySymbol ' a member of the map ?
{-# INLINE member #-}
member :: EntitySymbol -> SymbolMap a -> Bool
member k (SymbolMap m) = getKeyES k `IM.member` m
-- | /O(n)/. Return all elements of the map in the ascending order of the key
-- of the unique of their 'EntitySymbol'-key.
elems :: SymbolMap a -> [a]
elems (SymbolMap m) = map snd $ IM.elems m
| /O(min(n , W))/. Lookup the value at an ' EntitySymbol ' in the map .
lookup :: EntitySymbol -> SymbolMap a -> Maybe a
lookup k (SymbolMap m) = snd <$> IM.lookup (getKeyES k) m
| /O(min(n , W))/. Find the value at an ' EntitySymbol ' . Calls ' error ' when the
-- element can not be found.
(!) :: HasCallStack => SymbolMap a -> EntitySymbol -> a
(!) m k = case lookup k m of
Just e -> e
Nothing ->
error $
"SymbolMap.!: given key ("
++ show k
++ ") is not an element in the map"
infixl 9 !
-- | The restriction of a map to the keys in a set.
restrictKeys :: SymbolMap a -> SS.SymbolSet -> SymbolMap a
restrictKeys (SymbolMap m) s = SymbolMap $ IM.restrictKeys m (SS.toIntSet s)
-- | /O(n)/. Map a function over all values in the map.
mapWithKey :: (EntitySymbol -> a -> b) -> SymbolMap a -> SymbolMap b
mapWithKey fn (SymbolMap m) =
SymbolMap $ IM.mapWithKey (\_ (k, e) -> (k, fn k e)) m
-- | /O(min(n,W))/. Insert a new key/value pair in the map. If the key is
-- already present in the map, the associated value is replaced with the
-- supplied value.
insert :: EntitySymbol -> a -> SymbolMap a -> SymbolMap a
insert k e (SymbolMap m) = SymbolMap $ IM.insert (getKeyES k) (k, e) m
-- | /O(n*log n)/. The set of all keys of the map.
# INLINE keysSet #
keysSet :: SymbolMap a -> SS.SymbolSet
keysSet = SS.fromList . keys
| /O(n)/. Return all ' EntitySymbol ' keys of the map , in ascending order of
-- the key of their unique.
keys :: SymbolMap a -> [EntitySymbol]
keys (SymbolMap m) = map fst $ IM.elems m
-- | /O(n)/. Thread an accumulating argument through the map, in ascending
order of keys of uniques on the ' EntitySymbol ' keys .
mapAccum :: (a -> b -> (a, c)) -> a -> SymbolMap b -> (a, SymbolMap c)
mapAccum f z (SymbolMap m) =
SymbolMap <$> IM.mapAccum (\a (k, e) -> fmap (k,) $ f a e) z m
-- | /O(n)/. Fold the keys and values in the map using the given
-- right-associative binary operator. This is a strict variant: each
-- application of the operator is evaluated before using the result in the next
-- application. This function is strict in the starting value.
foldrWithKey' :: (EntitySymbol -> a -> b -> b) -> b -> SymbolMap a -> b
foldrWithKey' f z (SymbolMap m) = IM.foldrWithKey' (\_ (k, e) b -> f k e b) z m
-- | /O(n)/. Filter all values that satisfy a predicate.
# INLINE filter #
filter :: (a -> Bool) -> SymbolMap a -> SymbolMap a
filter p (SymbolMap m) = SymbolMap $ IM.filter (p . snd) m
GEORGE : Given that EntitySymbol appears both in a co- and a contra- variant
-- position in the function, there is no direct way to utilize IM.mapKeys for
-- implementing mapKeys (getUnique is irreversible). TODO: reduce usage.
-- ----------------------------------------------------------------------------
instance Functor SymbolMap where
fmap f (SymbolMap m) = SymbolMap $ IM.map (\(k, e) -> (k, f e)) m
instance Foldable SymbolMap where
foldr f z (SymbolMap m) = IM.foldr (\(_, e) b -> f e b) z m
instance Traversable SymbolMap where
traverse f (SymbolMap m) =
SymbolMap <$> traverse (\(k, x) -> fmap (\e -> (k, e)) (f x)) m
| /O(n*log n)/. Build a symbol map from a list of key / value pairs .
# INLINE fromListSM #
fromListSM :: [(EntitySymbol, a)] -> SymbolMap a
fromListSM =
SymbolMap
. IM.fromList
. map (\(k, e) -> (getKeyES k, (k, e)))
-- | /O(n)/. Convert a symbol map to a list of key/value pairs.
# INLINE toListSM #
toListSM :: SymbolMap a -> [(EntitySymbol, a)]
toListSM (SymbolMap m) = IM.elems m
| /O(n*log a symbol map to a ' Map . Map ' .
# INLINE toMap #
toMap :: SymbolMap a -> Map.Map EntitySymbol a
toMap = Map.fromList . toListSM
| null | https://raw.githubusercontent.com/tweag/asterius/9f2574d9c2b50aa83d105741799e2f65b05e2023/asterius/src-types/Asterius/Types/SymbolMap.hs | haskell | # LANGUAGE DeriveDataTypeable #
|
License : All rights reserved (see LICENCE file in the distribution).
corresponding to each entry, we also store it alongside each element.
* SymbolMap type
* Construction
* Query
* Size
* Insertion
* Filtering
* Folds and Maps
* Conversion
** Lists
** Maps
----------------------------------------------------------------------------
| /O(1)/. The empty map.
# INLINE empty #
| /O(1)/. A map containing a single association.
| /O(n)/. Number of elements in the map.
# INLINE member #
| /O(n)/. Return all elements of the map in the ascending order of the key
of the unique of their 'EntitySymbol'-key.
element can not be found.
| The restriction of a map to the keys in a set.
| /O(n)/. Map a function over all values in the map.
| /O(min(n,W))/. Insert a new key/value pair in the map. If the key is
already present in the map, the associated value is replaced with the
supplied value.
| /O(n*log n)/. The set of all keys of the map.
the key of their unique.
| /O(n)/. Thread an accumulating argument through the map, in ascending
| /O(n)/. Fold the keys and values in the map using the given
right-associative binary operator. This is a strict variant: each
application of the operator is evaluated before using the result in the next
application. This function is strict in the starting value.
| /O(n)/. Filter all values that satisfy a predicate.
position in the function, there is no direct way to utilize IM.mapKeys for
implementing mapKeys (getUnique is irreversible). TODO: reduce usage.
----------------------------------------------------------------------------
| /O(n)/. Convert a symbol map to a list of key/value pairs. | # LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
Module : Asterius . Types .
Copyright : ( c ) 2018 EURL Tweag
The @'SymbolMap ' a@ type represents a finite map / dictionary from keys of
type ' EntitySymbol ' to values of type Internally it is represented as
an ' IM.IntMap ' of , where the key of the unique of the ' EntitySymbol ' is used
for the indexing . In order to be able to access the ' EntitySymbol '
module Asterius.Types.SymbolMap
SymbolMap,
empty,
singleton,
member,
lookup,
(!),
size,
insert,
filter,
restrictKeys,
foldrWithKey',
mapWithKey,
mapAccum,
elems,
keys,
keysSet,
toList,
fromList,
toMap,
)
where
import Asterius.Types.EntitySymbol
import qualified Asterius.Types.SymbolSet as SS
import Binary
import Control.DeepSeq
import Control.Monad
import Data.Data
import qualified Data.IntMap.Lazy as IM
import qualified Data.Map.Lazy as Map
import GHC.Exts (IsList (..))
import GHC.Stack
import Prelude hiding (filter, lookup)
| A map from ' EntitySymbol 's to values
newtype SymbolMap a = SymbolMap (IM.IntMap (EntitySymbol, a))
deriving newtype (Eq, Monoid, NFData)
deriving stock (Data)
instance Semigroup (SymbolMap a) where
SymbolMap m0 <> SymbolMap m1 =
SymbolMap $
IM.unionWithKey
(\_ (sym, _) _ -> error $ "Duplicate symbol: " <> show sym)
m0
m1
instance Show a => Show (SymbolMap a) where
showsPrec d m =
showParen (d > 10) $
showString "fromList " . shows (toListSM m)
instance Binary a => Binary (SymbolMap a) where
put_ bh m =
put_ bh (size m)
*> forM_ (toListSM m) (\(k, v) -> put_ bh k *> lazyPut bh v)
get bh = fromListSM <$> do
n <- get bh
replicateM n $ (,) <$> get bh <*> lazyGet bh
instance IsList (SymbolMap a) where
type Item (SymbolMap a) = (EntitySymbol, a)
fromList = fromListSM
toList = toListSM
empty :: SymbolMap a
empty = SymbolMap IM.empty
# INLINE singleton #
singleton :: EntitySymbol -> a -> SymbolMap a
singleton k e = SymbolMap $ IM.singleton (getKeyES k) (k, e)
# INLINE size #
size :: SymbolMap a -> Int
size (SymbolMap m) = IM.size m
| /O(min(n , W))/. Is the ' EntitySymbol ' a member of the map ?
member :: EntitySymbol -> SymbolMap a -> Bool
member k (SymbolMap m) = getKeyES k `IM.member` m
elems :: SymbolMap a -> [a]
elems (SymbolMap m) = map snd $ IM.elems m
| /O(min(n , W))/. Lookup the value at an ' EntitySymbol ' in the map .
lookup :: EntitySymbol -> SymbolMap a -> Maybe a
lookup k (SymbolMap m) = snd <$> IM.lookup (getKeyES k) m
| /O(min(n , W))/. Find the value at an ' EntitySymbol ' . Calls ' error ' when the
(!) :: HasCallStack => SymbolMap a -> EntitySymbol -> a
(!) m k = case lookup k m of
Just e -> e
Nothing ->
error $
"SymbolMap.!: given key ("
++ show k
++ ") is not an element in the map"
infixl 9 !
restrictKeys :: SymbolMap a -> SS.SymbolSet -> SymbolMap a
restrictKeys (SymbolMap m) s = SymbolMap $ IM.restrictKeys m (SS.toIntSet s)
mapWithKey :: (EntitySymbol -> a -> b) -> SymbolMap a -> SymbolMap b
mapWithKey fn (SymbolMap m) =
SymbolMap $ IM.mapWithKey (\_ (k, e) -> (k, fn k e)) m
insert :: EntitySymbol -> a -> SymbolMap a -> SymbolMap a
insert k e (SymbolMap m) = SymbolMap $ IM.insert (getKeyES k) (k, e) m
# INLINE keysSet #
keysSet :: SymbolMap a -> SS.SymbolSet
keysSet = SS.fromList . keys
| /O(n)/. Return all ' EntitySymbol ' keys of the map , in ascending order of
keys :: SymbolMap a -> [EntitySymbol]
keys (SymbolMap m) = map fst $ IM.elems m
order of keys of uniques on the ' EntitySymbol ' keys .
mapAccum :: (a -> b -> (a, c)) -> a -> SymbolMap b -> (a, SymbolMap c)
mapAccum f z (SymbolMap m) =
SymbolMap <$> IM.mapAccum (\a (k, e) -> fmap (k,) $ f a e) z m
foldrWithKey' :: (EntitySymbol -> a -> b -> b) -> b -> SymbolMap a -> b
foldrWithKey' f z (SymbolMap m) = IM.foldrWithKey' (\_ (k, e) b -> f k e b) z m
# INLINE filter #
filter :: (a -> Bool) -> SymbolMap a -> SymbolMap a
filter p (SymbolMap m) = SymbolMap $ IM.filter (p . snd) m
GEORGE : Given that EntitySymbol appears both in a co- and a contra- variant
instance Functor SymbolMap where
fmap f (SymbolMap m) = SymbolMap $ IM.map (\(k, e) -> (k, f e)) m
instance Foldable SymbolMap where
foldr f z (SymbolMap m) = IM.foldr (\(_, e) b -> f e b) z m
instance Traversable SymbolMap where
traverse f (SymbolMap m) =
SymbolMap <$> traverse (\(k, x) -> fmap (\e -> (k, e)) (f x)) m
| /O(n*log n)/. Build a symbol map from a list of key / value pairs .
# INLINE fromListSM #
fromListSM :: [(EntitySymbol, a)] -> SymbolMap a
fromListSM =
SymbolMap
. IM.fromList
. map (\(k, e) -> (getKeyES k, (k, e)))
# INLINE toListSM #
toListSM :: SymbolMap a -> [(EntitySymbol, a)]
toListSM (SymbolMap m) = IM.elems m
| /O(n*log a symbol map to a ' Map . Map ' .
# INLINE toMap #
toMap :: SymbolMap a -> Map.Map EntitySymbol a
toMap = Map.fromList . toListSM
|
4664bca783d2d5d3360e40a395bb545779a08e29090ff9e33a1c5757a57e7b77 | tezos/tezos-mirror | lwtreslib.mli | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2020 - 2021 Nomadic Labs < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
* { 1 : intro : the Lwt- and result - aware complement }
Lwtreslib ( or Lwt - result - stdlib ) is a library to complement the OCaml 's
Stdlib in software projects that make heavy use of Lwt and the result type .
{ 2 Introduction }
Lwtreslib aims to
{ ul
{ li Replace exception - raising functions with exception - safe one . E.g. ,
functions that may raise { ! Not_found } in the are
shadowed by functions that return an { ! option } . }
Provide an extensive set of Lwt- , result- and Lwt - result - traversors
for the common data - types of the . E.g. , { ! } is
available alongside [ List.map_s ] for Lwt sequential traversal ,
[ List.map_e ] for result traversal , etc . }
Provide a uniform semantic , especially regarding error management .
E.g. , all sequential traversal functions have the same fail - early
semantic , whereas all concurrent traversal functions have the same
best - effort semantic . }
Provide good documentation . }
}
{ 2 Semantic }
The semantic of the functions exported by Lwtreslib is uniform and
predictable . This applies to the - like functions , the Lwt - aware
functions , the result - aware functions , and the Lwt - and - result - aware
functions .
{ 3 Semantic of vanilla - functions }
Functions that have the same signature as their 's counterpart have
the same semantic .
Functions exported by Lwtreslib do not raise exceptions . ( With the exception
of the functions exported by the { ! WithExceptions } module . ) If a function
raises an exception in the , its type is changed in Lwtreslib . In
general the following substitutions apply :
{ ul
that may raise { ! Not_found } ( e.g. , [ List.find ] ) return an
{ ! option } instead . }
that may fail because of indexing errors ( e.g. , [ List.nth ] ,
[ List.hd ] , etc . ) also return an { ! option } instead . }
that may raise { ! Invalid_argument } ( e.g. , [ List.iter2 ] )
return a { ! result } type instead . They take an additional argument
indicating what [ Error ] to return instead of the exception . }
}
{ 3 Semantic of Lwt - aware functions }
Lwtreslib exports Lwt - aware functions for all traversal functions of the
Stdlib .
Functions with the [ _ s ] suffix traverse their underlying collection
sequentially , waiting for the promise associated to one element to resolve
before processing to the next element . Note that for the [ Seq * ] modules ( see
below ) the sequential traversors are bundled under an [ S ] submodules rather
than suffixed with [ _ s ] .
Functions with the [ _ p ] suffix traverse their underlying collection
concurrently , creating promises for all the elements and then waiting for
all of them to resolve . The " p " in the [ _ p ] suffix is for compatibility with
Lwt and in particular [ Lwt_list ] . The mnemonic is " parallel " even though
there is not parallelism , only concurrency .
These [ _ s]- and [ _ p]-suffixed functions are semantically identical to their
Lwt counterpart when it is available . Most notably , [ Lwtreslib . List ] is a
strict superset of [ Lwt_list ] .
{ 3 Semantic of result - aware functions }
Lwtreslib exports result - aware functions for all the traversal functions of
the Stdlib . These function allow easy manipulation of [ ( ' a , ' e ) result ]
values .
Functions with the [ _ e ] suffix traverse their underlying collection whilst
wrapping the accumulator / result in a [ result ] . These functions have a
fail - early semantic : if one of the steps returns an [ Error _ ] , then the whole
traversal is interrupted and returns the same [ Error _ ] . Note that for the
[ Seq * ] modules ( see below ) the result - aware traversors are bundled under an
[ E ] submodules rather than suffixed with [ _ e ] .
{ 3 Semantic of Lwt - result - aware functions }
Lwtreslib exports Lwt - result - aware functions for all the traversal functions
of the . These function allow easy manipulation of
[ ( ' a , ' e ) result Lwt.t ] -- i.e. , promises that may fail .
Functions with the [ _ es ] suffix traverse their underlying collection
sequentially ( like [ _ s ] functions ) whilst wrapping the accumulator / result in
a [ result ] ( like [ _ e ] functions ) . These functions have a fail - early
semantic : if one of the step returns a promise that resolves to an
[ Error _ ] , then the whole traversal is interrupted and the returned promise
resolves to the same [ Error _ ] . Note that for the [ Seq * ] modules ( see below )
the Lwt - result - aware traversors are bundled under an [ ES ] submodules rather
than suffixed with [ _ es ] .
Functions with the [ _ ep ] suffix traverse their underlying collection
concurrently ( like [ _ p ] functions ) whilst wrapping the accumulator / result in
a [ result ] ( like [ _ e ] functions ) . These functions have a best - effort
semantic : if one of the step returns a promise that resolves to an
[ Error _ ] , the other promises are left to resolve ; once all the promises
have resolved , then the returned promise resolves with an [ Error _ ] that
carries all the other errors in a list . It is up to the user to convert this
list to a more manageable type if needed .
{ 3 A note on [ Seq ] }
The [ Seq ] module exports a type that suspends nodes under a closure .
Consequently , some interactions with result , Lwt , and result - Lwt is not
possible . E.g. , [ map]ping can be either lazy or within Lwt but not both :
[ Seq.map_s ] would have type [ ( ' a - > ' b Lwt.t ) - > ' a t - > ' b t Lwt.t ] where
the returned promise forces the whole sequence ( and never resolves on
infinite sequences ) .
In Lwtreslib , [ Seq ] does not provide these additional transformers that would
force the sequence simply due to the bad interaction of the Monads and the
type of sequences . Instead , Lwtreslib provides
- A subset of traversors where the laziness and the monad mix well ( e.g. ,
[ iter ] but not [ map ] ) . These are exported under the modules [ S ] , [ E ] and
[ ES ] .
- Variants of [ Seq ] called [ Seq_e ] , [ Seq_s ] , and [ Seq_es ] where the
combination with the monad is baked into the sequence type itself .
If you want to map a sequnence using an Lwt - returning function , you should
first convert the sequence to an Lwt - aware sequence using [ Seq_s.of_seq ] ,
and then map this converted function using [ Seq_s.S.map ] .
Note that this returns a [ Seq_s.t ] sequence so further transformations will
be within [ Seq_s ] and not within [ Seq ] . Once in a monad , you stay in the
monad .
{ 3 [ Traced ] }
The { ! Traced } module offers a small wrapper around Lwtreslib . This wrapper
is intended to ease the use of [ _ ep ] functions . It does so by introducing a
trace data - type : a structured collection of errors .
This trace data - type is used to collapse the types [ ' e ] and [ ' e list ] of
errors . Indeed , without this collapse , chaining [ _ ep ] together or chaining
[ _ ep ] with [ _ es ] functions requires significant boilerplate to flatten
lists , to listify single errors , etc . Need for boilerplate mostly vanishes
when using the [ Traced ] wrapper .
{ 2 Monad helpers }
Lwtreslib also exports monadic operators ( binds , return , etc . ) for the
Lwt - monad , the result - monad , and the combined Lwt - result - monad .
{ 2 Exceptions }
If at all possible , avoid exceptions .
If possible , avoid exceptions .
If you use exceptions , here are a few things to keep in mind :
The [ _ p ] functions are semantically equivalent to Lwt 's . This means that
some exceptions are dropped . Specifically , when more than one promise raises
an exception in a concurrent traversor , only one is passed on to the user ,
the others are silently ignored .
Use [ raise ] ( rather than [ Lwt.fail ] ) when within an Lwt callback .
{ 2 [ WithExceptions ] }
The [ WithExceptions ] module is there for convenience in non - production code
and for the specific cases where it is guaranteed not to raise an exception .
E.g. , it is intended for removing the { ! option } boxing in cases where the
invariant is guaranteed by construction :
{ [
( * * Return an interval of integers , from 0 to its argument ( if positive )
or from its argument to 0 ( otherwise ) .
Lwtreslib (or Lwt-result-stdlib) is a library to complement the OCaml's
Stdlib in software projects that make heavy use of Lwt and the result type.
{2 Introduction}
Lwtreslib aims to
{ul
{li Replace exception-raising functions with exception-safe one. E.g.,
functions that may raise {!Not_found} in the Stdlib are
shadowed by functions that return an {!option}.}
{li Provide an extensive set of Lwt-, result- and Lwt-result-traversors
for the common data-types of the Stdlib. E.g., {!List.map} is
available alongside [List.map_s] for Lwt sequential traversal,
[List.map_e] for result traversal, etc.}
{li Provide a uniform semantic, especially regarding error management.
E.g., all sequential traversal functions have the same fail-early
semantic, whereas all concurrent traversal functions have the same
best-effort semantic.}
{li Provide good documentation.}
}
{2 Semantic}
The semantic of the functions exported by Lwtreslib is uniform and
predictable. This applies to the Stdlib-like functions, the Lwt-aware
functions, the result-aware functions, and the Lwt-and-result-aware
functions.
{3 Semantic of vanilla-functions}
Functions that have the same signature as their Stdlib's counterpart have
the same semantic.
Functions exported by Lwtreslib do not raise exceptions. (With the exception
of the functions exported by the {!WithExceptions} module.) If a function
raises an exception in the Stdlib, its type is changed in Lwtreslib. In
general the following substitutions apply:
{ul
{li Functions that may raise {!Not_found} (e.g., [List.find]) return an
{!option} instead.}
{li Functions that may fail because of indexing errors (e.g., [List.nth],
[List.hd], etc.) also return an {!option} instead.}
{li Functions that may raise {!Invalid_argument} (e.g., [List.iter2])
return a {!result} type instead. They take an additional argument
indicating what [Error] to return instead of the exception.}
}
{3 Semantic of Lwt-aware functions}
Lwtreslib exports Lwt-aware functions for all traversal functions of the
Stdlib.
Functions with the [_s] suffix traverse their underlying collection
sequentially, waiting for the promise associated to one element to resolve
before processing to the next element. Note that for the [Seq*] modules (see
below) the sequential traversors are bundled under an [S] submodules rather
than suffixed with [_s].
Functions with the [_p] suffix traverse their underlying collection
concurrently, creating promises for all the elements and then waiting for
all of them to resolve. The "p" in the [_p] suffix is for compatibility with
Lwt and in particular [Lwt_list]. The mnemonic is "parallel" even though
there is not parallelism, only concurrency.
These [_s]- and [_p]-suffixed functions are semantically identical to their
Lwt counterpart when it is available. Most notably, [Lwtreslib.List] is a
strict superset of [Lwt_list].
{3 Semantic of result-aware functions}
Lwtreslib exports result-aware functions for all the traversal functions of
the Stdlib. These function allow easy manipulation of [('a, 'e) result]
values.
Functions with the [_e] suffix traverse their underlying collection whilst
wrapping the accumulator/result in a [result]. These functions have a
fail-early semantic: if one of the steps returns an [Error _], then the whole
traversal is interrupted and returns the same [Error _]. Note that for the
[Seq*] modules (see below) the result-aware traversors are bundled under an
[E] submodules rather than suffixed with [_e].
{3 Semantic of Lwt-result-aware functions}
Lwtreslib exports Lwt-result-aware functions for all the traversal functions
of the Stdlib. These function allow easy manipulation of
[('a, 'e) result Lwt.t] -- i.e., promises that may fail.
Functions with the [_es] suffix traverse their underlying collection
sequentially (like [_s] functions) whilst wrapping the accumulator/result in
a [result] (like [_e] functions). These functions have a fail-early
semantic: if one of the step returns a promise that resolves to an
[Error _], then the whole traversal is interrupted and the returned promise
resolves to the same [Error _]. Note that for the [Seq*] modules (see below)
the Lwt-result-aware traversors are bundled under an [ES] submodules rather
than suffixed with [_es].
Functions with the [_ep] suffix traverse their underlying collection
concurrently (like [_p] functions) whilst wrapping the accumulator/result in
a [result] (like [_e] functions). These functions have a best-effort
semantic: if one of the step returns a promise that resolves to an
[Error _], the other promises are left to resolve; once all the promises
have resolved, then the returned promise resolves with an [Error _] that
carries all the other errors in a list. It is up to the user to convert this
list to a more manageable type if needed.
{3 A note on [Seq]}
The [Seq] module exports a type that suspends nodes under a closure.
Consequently, some interactions with result, Lwt, and result-Lwt is not
possible. E.g., [map]ping can be either lazy or within Lwt but not both:
[Seq.map_s] would have type [('a -> 'b Lwt.t) -> 'a t -> 'b t Lwt.t] where
the returned promise forces the whole sequence (and never resolves on
infinite sequences).
In Lwtreslib, [Seq] does not provide these additional transformers that would
force the sequence simply due to the bad interaction of the Monads and the
type of sequences. Instead, Lwtreslib provides
- A subset of traversors where the laziness and the monad mix well (e.g.,
[iter] but not [map]). These are exported under the modules [S], [E] and
[ES].
- Variants of [Seq] called [Seq_e], [Seq_s], and [Seq_es] where the
combination with the monad is baked into the sequence type itself.
If you want to map a sequnence using an Lwt-returning function, you should
first convert the sequence to an Lwt-aware sequence using [Seq_s.of_seq],
and then map this converted function using [Seq_s.S.map].
Note that this returns a [Seq_s.t] sequence so further transformations will
be within [Seq_s] and not within [Seq]. Once in a monad, you stay in the
monad.
{3 [Traced]}
The {!Traced} module offers a small wrapper around Lwtreslib. This wrapper
is intended to ease the use of [_ep] functions. It does so by introducing a
trace data-type: a structured collection of errors.
This trace data-type is used to collapse the types ['e] and ['e list] of
errors. Indeed, without this collapse, chaining [_ep] together or chaining
[_ep] with [_es] functions requires significant boilerplate to flatten
lists, to listify single errors, etc. Need for boilerplate mostly vanishes
when using the [Traced] wrapper.
{2 Monad helpers}
Lwtreslib also exports monadic operators (binds, return, etc.) for the
Lwt-monad, the result-monad, and the combined Lwt-result-monad.
{2 Exceptions}
If at all possible, avoid exceptions.
If possible, avoid exceptions.
If you use exceptions, here are a few things to keep in mind:
The [_p] functions are semantically equivalent to Lwt's. This means that
some exceptions are dropped. Specifically, when more than one promise raises
an exception in a concurrent traversor, only one is passed on to the user,
the others are silently ignored.
Use [raise] (rather than [Lwt.fail]) when within an Lwt callback.
{2 [WithExceptions]}
The [WithExceptions] module is there for convenience in non-production code
and for the specific cases where it is guaranteed not to raise an exception.
E.g., it is intended for removing the {!option} boxing in cases where the
invariant is guaranteed by construction:
{[
(** Return an interval of integers, from 0 to its argument (if positive)
or from its argument to 0 (otherwise). *)
let steps stop =
if stop = 0 then
[]
else if stop > 0 then
List.init ~when_negative_length:() Fun.id
|> WithExceptions.Option.get ~loc:__LOC__
else
let stop = Int.neg stop in
List.init ~when_negative_length:() Int.neg
|> WithExceptions.Option.get ~loc:__LOC__
]} *)
* { 1 Instance : [ Bare ] }
[ Bare ] provides all the functions as described above . It is intended to be
opened to shadow some modules of [ ] .
All values within the modules follow the same naming and semantic
conventions described above . The sequential traversors are fail - early :
in the following example the code returns an [ Error ] and does not print
anything .
{ [
List.iter_e
( fun x - >
if x = " " then
Error " empty string "
else begin
print_endline x ;
Ok ( ) )
[
" " ; ( * This will cause the iteration to stop
[Bare] provides all the functions as described above. It is intended to be
opened to shadow some modules of [Stdlib].
All values within the modules follow the same naming and semantic
conventions described above. The sequential traversors are fail-early:
in the following example the code returns an [Error] and does not print
anything.
{[
List.iter_e
(fun x ->
if x = "" then
Error "empty string"
else begin
print_endline x;
Ok ())
[
""; (* This will cause the iteration to stop *)
"this is not printed";
"neither is this printed";
]
]}
The concurrent (parallel) traversors are best-effort: in the following
example the code prints all the non-empty strings in an unspecified order
before returning an [Error].
{[
List.iter_ep
(fun x ->
if x = "" then
Lwt.return (Error "empty string")
else begin
print_endline x;
Lwt.return (Ok ()))
[
""; (* This will cause the iteration to error in the end *)
"this is printed";
"this is printed as well";
]
]}
The module [WithExceptions] provides some exception-raising helpers to
reduce the boilerplate that the library imposes.
{2 Comparison, Equality, etc.}
When a function requires a comparison function, it takes a [compare] named
parameter. This must define a total order as described in
{!Stdlib.Map.OrderedType}.
Note that the polymorphic structural comparison {!Stdlib.compare} is unsound
for comparing some values; notably, it may fail when comparing
data-structures that include functions or closures.
Similarly and for the same reason, some functions take an [equal] function.
*)
module Bare : sig
module Hashtbl : Bare_sigs.Hashtbl.S
module List : Bare_sigs.List.S
module Map : Bare_sigs.Map.S
module Monad : Bare_sigs.Monad.S
module Option : Bare_sigs.Option.S
module Result : Bare_sigs.Result.S
module Seq : Bare_sigs.Seq.S
module Seq_e : Bare_sigs.Seq_e.S
module Seq_s : Bare_sigs.Seq_s.S
module Seq_es :
Bare_sigs.Seq_es.S
with type ('a, 'e) seq_e_t := ('a, 'e) Seq_e.t
and type 'a seq_s_t := 'a Seq_s.t
module Set : Bare_sigs.Set.S
module Unit : Bare_sigs.Unit.S
module WithExceptions : Bare_sigs.WithExceptions.S
end
* A module with the [ TRACE ] signature provides the necessary type and functions
to collect multiple errors into a single error data - structure . This , in turn ,
allows Lwtreslib to provide more usable [ _ ep ] variants to standard traversal
functions .
to collect multiple errors into a single error data-structure. This, in turn,
allows Lwtreslib to provide more usable [_ep] variants to standard traversal
functions. *)
module type TRACE = Traced_sigs.Trace.S
module type TRACED_MONAD = Traced_sigs.Monad.S
* [ Traced ] is a functor to generate advanced combined - monad replacements
for parts of the . The generated module is similar to [ Bare ] with the
addition of traces : structured collections of errors .
For convenience , the monad includes primitives to error directly with a
trace rather than a bare error .
All the [ _ ep ] traversors return traces of errors rather than lists of
errors . The [ _ ep ] traversors preserve their best - effort semantic .
Additional functions in the [ Monad ] allow the construction of sequential
traces : functions to enrich traces with new errors . E.g. ,
{ [
let load_config file =
Result.map_error
( fun trace - >
Trace.cons " can not load configuration file " trace )
@@ begin
let open Lwt_result_syntax in
let * file = open_file in
let * lines = read_lines file in
let * json = parse_config lines in
make_dictionary json
end
] }
Example implementations of traces are provided in the [ traces/ ] directory .
for parts of the Stdlib. The generated module is similar to [Bare] with the
addition of traces: structured collections of errors.
For convenience, the monad includes primitives to error directly with a
trace rather than a bare error.
All the [_ep] traversors return traces of errors rather than lists of
errors. The [_ep] traversors preserve their best-effort semantic.
Additional functions in the [Monad] allow the construction of sequential
traces: functions to enrich traces with new errors. E.g.,
{[
let load_config file =
Result.map_error
(fun trace ->
Trace.cons "cannot load configuration file" trace)
@@ begin
let open Lwt_result_syntax in
let* file = open_file in
let* lines = read_lines file in
let* json = parse_config lines in
make_dictionary json
end
]}
Example implementations of traces are provided in the [traces/] directory.
*)
module Traced (Trace : TRACE) : sig
module Monad : TRACED_MONAD with type 'error trace = 'error Trace.trace
module Hashtbl :
Traced_sigs.Hashtbl.S with type 'error trace := 'error Trace.trace
module List : Traced_sigs.List.S with type 'error trace := 'error Trace.trace
module Map : Traced_sigs.Map.S with type 'error trace := 'error Trace.trace
module Option : Traced_sigs.Option.S
module Result : Traced_sigs.Result.S
module Seq : Traced_sigs.Seq.S with type 'error trace := 'error Trace.trace
module Seq_e : Traced_sigs.Seq_e.S
module Seq_s :
Traced_sigs.Seq_s.S with type 'error trace := 'error Trace.trace
module Seq_es :
Traced_sigs.Seq_es.S
with type ('a, 'e) seq_e_t := ('a, 'e) Seq_e.t
and type 'a seq_s_t := 'a Seq_s.t
module Set : Traced_sigs.Set.S with type 'error trace := 'error Trace.trace
module Unit : Traced_sigs.Unit.S
module WithExceptions : Traced_sigs.WithExceptions.S
end
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/448daee0f4ae76893ae524536e428aa0e22f1d60/src/lib_lwt_result_stdlib/lwtreslib.mli | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
* Return an interval of integers, from 0 to its argument (if positive)
or from its argument to 0 (otherwise).
This will cause the iteration to stop
This will cause the iteration to error in the end | Copyright ( c ) 2020 - 2021 Nomadic Labs < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
* { 1 : intro : the Lwt- and result - aware complement }
Lwtreslib ( or Lwt - result - stdlib ) is a library to complement the OCaml 's
Stdlib in software projects that make heavy use of Lwt and the result type .
{ 2 Introduction }
Lwtreslib aims to
{ ul
{ li Replace exception - raising functions with exception - safe one . E.g. ,
functions that may raise { ! Not_found } in the are
shadowed by functions that return an { ! option } . }
Provide an extensive set of Lwt- , result- and Lwt - result - traversors
for the common data - types of the . E.g. , { ! } is
available alongside [ List.map_s ] for Lwt sequential traversal ,
[ List.map_e ] for result traversal , etc . }
Provide a uniform semantic , especially regarding error management .
E.g. , all sequential traversal functions have the same fail - early
semantic , whereas all concurrent traversal functions have the same
best - effort semantic . }
Provide good documentation . }
}
{ 2 Semantic }
The semantic of the functions exported by Lwtreslib is uniform and
predictable . This applies to the - like functions , the Lwt - aware
functions , the result - aware functions , and the Lwt - and - result - aware
functions .
{ 3 Semantic of vanilla - functions }
Functions that have the same signature as their 's counterpart have
the same semantic .
Functions exported by Lwtreslib do not raise exceptions . ( With the exception
of the functions exported by the { ! WithExceptions } module . ) If a function
raises an exception in the , its type is changed in Lwtreslib . In
general the following substitutions apply :
{ ul
that may raise { ! Not_found } ( e.g. , [ List.find ] ) return an
{ ! option } instead . }
that may fail because of indexing errors ( e.g. , [ List.nth ] ,
[ List.hd ] , etc . ) also return an { ! option } instead . }
that may raise { ! Invalid_argument } ( e.g. , [ List.iter2 ] )
return a { ! result } type instead . They take an additional argument
indicating what [ Error ] to return instead of the exception . }
}
{ 3 Semantic of Lwt - aware functions }
Lwtreslib exports Lwt - aware functions for all traversal functions of the
Stdlib .
Functions with the [ _ s ] suffix traverse their underlying collection
sequentially , waiting for the promise associated to one element to resolve
before processing to the next element . Note that for the [ Seq * ] modules ( see
below ) the sequential traversors are bundled under an [ S ] submodules rather
than suffixed with [ _ s ] .
Functions with the [ _ p ] suffix traverse their underlying collection
concurrently , creating promises for all the elements and then waiting for
all of them to resolve . The " p " in the [ _ p ] suffix is for compatibility with
Lwt and in particular [ Lwt_list ] . The mnemonic is " parallel " even though
there is not parallelism , only concurrency .
These [ _ s]- and [ _ p]-suffixed functions are semantically identical to their
Lwt counterpart when it is available . Most notably , [ Lwtreslib . List ] is a
strict superset of [ Lwt_list ] .
{ 3 Semantic of result - aware functions }
Lwtreslib exports result - aware functions for all the traversal functions of
the Stdlib . These function allow easy manipulation of [ ( ' a , ' e ) result ]
values .
Functions with the [ _ e ] suffix traverse their underlying collection whilst
wrapping the accumulator / result in a [ result ] . These functions have a
fail - early semantic : if one of the steps returns an [ Error _ ] , then the whole
traversal is interrupted and returns the same [ Error _ ] . Note that for the
[ Seq * ] modules ( see below ) the result - aware traversors are bundled under an
[ E ] submodules rather than suffixed with [ _ e ] .
{ 3 Semantic of Lwt - result - aware functions }
Lwtreslib exports Lwt - result - aware functions for all the traversal functions
of the . These function allow easy manipulation of
[ ( ' a , ' e ) result Lwt.t ] -- i.e. , promises that may fail .
Functions with the [ _ es ] suffix traverse their underlying collection
sequentially ( like [ _ s ] functions ) whilst wrapping the accumulator / result in
a [ result ] ( like [ _ e ] functions ) . These functions have a fail - early
semantic : if one of the step returns a promise that resolves to an
[ Error _ ] , then the whole traversal is interrupted and the returned promise
resolves to the same [ Error _ ] . Note that for the [ Seq * ] modules ( see below )
the Lwt - result - aware traversors are bundled under an [ ES ] submodules rather
than suffixed with [ _ es ] .
Functions with the [ _ ep ] suffix traverse their underlying collection
concurrently ( like [ _ p ] functions ) whilst wrapping the accumulator / result in
a [ result ] ( like [ _ e ] functions ) . These functions have a best - effort
semantic : if one of the step returns a promise that resolves to an
[ Error _ ] , the other promises are left to resolve ; once all the promises
have resolved , then the returned promise resolves with an [ Error _ ] that
carries all the other errors in a list . It is up to the user to convert this
list to a more manageable type if needed .
{ 3 A note on [ Seq ] }
The [ Seq ] module exports a type that suspends nodes under a closure .
Consequently , some interactions with result , Lwt , and result - Lwt is not
possible . E.g. , [ map]ping can be either lazy or within Lwt but not both :
[ Seq.map_s ] would have type [ ( ' a - > ' b Lwt.t ) - > ' a t - > ' b t Lwt.t ] where
the returned promise forces the whole sequence ( and never resolves on
infinite sequences ) .
In Lwtreslib , [ Seq ] does not provide these additional transformers that would
force the sequence simply due to the bad interaction of the Monads and the
type of sequences . Instead , Lwtreslib provides
- A subset of traversors where the laziness and the monad mix well ( e.g. ,
[ iter ] but not [ map ] ) . These are exported under the modules [ S ] , [ E ] and
[ ES ] .
- Variants of [ Seq ] called [ Seq_e ] , [ Seq_s ] , and [ Seq_es ] where the
combination with the monad is baked into the sequence type itself .
If you want to map a sequnence using an Lwt - returning function , you should
first convert the sequence to an Lwt - aware sequence using [ Seq_s.of_seq ] ,
and then map this converted function using [ Seq_s.S.map ] .
Note that this returns a [ Seq_s.t ] sequence so further transformations will
be within [ Seq_s ] and not within [ Seq ] . Once in a monad , you stay in the
monad .
{ 3 [ Traced ] }
The { ! Traced } module offers a small wrapper around Lwtreslib . This wrapper
is intended to ease the use of [ _ ep ] functions . It does so by introducing a
trace data - type : a structured collection of errors .
This trace data - type is used to collapse the types [ ' e ] and [ ' e list ] of
errors . Indeed , without this collapse , chaining [ _ ep ] together or chaining
[ _ ep ] with [ _ es ] functions requires significant boilerplate to flatten
lists , to listify single errors , etc . Need for boilerplate mostly vanishes
when using the [ Traced ] wrapper .
{ 2 Monad helpers }
Lwtreslib also exports monadic operators ( binds , return , etc . ) for the
Lwt - monad , the result - monad , and the combined Lwt - result - monad .
{ 2 Exceptions }
If at all possible , avoid exceptions .
If possible , avoid exceptions .
If you use exceptions , here are a few things to keep in mind :
The [ _ p ] functions are semantically equivalent to Lwt 's . This means that
some exceptions are dropped . Specifically , when more than one promise raises
an exception in a concurrent traversor , only one is passed on to the user ,
the others are silently ignored .
Use [ raise ] ( rather than [ Lwt.fail ] ) when within an Lwt callback .
{ 2 [ WithExceptions ] }
The [ WithExceptions ] module is there for convenience in non - production code
and for the specific cases where it is guaranteed not to raise an exception .
E.g. , it is intended for removing the { ! option } boxing in cases where the
invariant is guaranteed by construction :
{ [
( * * Return an interval of integers , from 0 to its argument ( if positive )
or from its argument to 0 ( otherwise ) .
Lwtreslib (or Lwt-result-stdlib) is a library to complement the OCaml's
Stdlib in software projects that make heavy use of Lwt and the result type.
{2 Introduction}
Lwtreslib aims to
{ul
{li Replace exception-raising functions with exception-safe one. E.g.,
functions that may raise {!Not_found} in the Stdlib are
shadowed by functions that return an {!option}.}
{li Provide an extensive set of Lwt-, result- and Lwt-result-traversors
for the common data-types of the Stdlib. E.g., {!List.map} is
available alongside [List.map_s] for Lwt sequential traversal,
[List.map_e] for result traversal, etc.}
{li Provide a uniform semantic, especially regarding error management.
E.g., all sequential traversal functions have the same fail-early
semantic, whereas all concurrent traversal functions have the same
best-effort semantic.}
{li Provide good documentation.}
}
{2 Semantic}
The semantic of the functions exported by Lwtreslib is uniform and
predictable. This applies to the Stdlib-like functions, the Lwt-aware
functions, the result-aware functions, and the Lwt-and-result-aware
functions.
{3 Semantic of vanilla-functions}
Functions that have the same signature as their Stdlib's counterpart have
the same semantic.
Functions exported by Lwtreslib do not raise exceptions. (With the exception
of the functions exported by the {!WithExceptions} module.) If a function
raises an exception in the Stdlib, its type is changed in Lwtreslib. In
general the following substitutions apply:
{ul
{li Functions that may raise {!Not_found} (e.g., [List.find]) return an
{!option} instead.}
{li Functions that may fail because of indexing errors (e.g., [List.nth],
[List.hd], etc.) also return an {!option} instead.}
{li Functions that may raise {!Invalid_argument} (e.g., [List.iter2])
return a {!result} type instead. They take an additional argument
indicating what [Error] to return instead of the exception.}
}
{3 Semantic of Lwt-aware functions}
Lwtreslib exports Lwt-aware functions for all traversal functions of the
Stdlib.
Functions with the [_s] suffix traverse their underlying collection
sequentially, waiting for the promise associated to one element to resolve
before processing to the next element. Note that for the [Seq*] modules (see
below) the sequential traversors are bundled under an [S] submodules rather
than suffixed with [_s].
Functions with the [_p] suffix traverse their underlying collection
concurrently, creating promises for all the elements and then waiting for
all of them to resolve. The "p" in the [_p] suffix is for compatibility with
Lwt and in particular [Lwt_list]. The mnemonic is "parallel" even though
there is not parallelism, only concurrency.
These [_s]- and [_p]-suffixed functions are semantically identical to their
Lwt counterpart when it is available. Most notably, [Lwtreslib.List] is a
strict superset of [Lwt_list].
{3 Semantic of result-aware functions}
Lwtreslib exports result-aware functions for all the traversal functions of
the Stdlib. These function allow easy manipulation of [('a, 'e) result]
values.
Functions with the [_e] suffix traverse their underlying collection whilst
wrapping the accumulator/result in a [result]. These functions have a
fail-early semantic: if one of the steps returns an [Error _], then the whole
traversal is interrupted and returns the same [Error _]. Note that for the
[Seq*] modules (see below) the result-aware traversors are bundled under an
[E] submodules rather than suffixed with [_e].
{3 Semantic of Lwt-result-aware functions}
Lwtreslib exports Lwt-result-aware functions for all the traversal functions
of the Stdlib. These function allow easy manipulation of
[('a, 'e) result Lwt.t] -- i.e., promises that may fail.
Functions with the [_es] suffix traverse their underlying collection
sequentially (like [_s] functions) whilst wrapping the accumulator/result in
a [result] (like [_e] functions). These functions have a fail-early
semantic: if one of the step returns a promise that resolves to an
[Error _], then the whole traversal is interrupted and the returned promise
resolves to the same [Error _]. Note that for the [Seq*] modules (see below)
the Lwt-result-aware traversors are bundled under an [ES] submodules rather
than suffixed with [_es].
Functions with the [_ep] suffix traverse their underlying collection
concurrently (like [_p] functions) whilst wrapping the accumulator/result in
a [result] (like [_e] functions). These functions have a best-effort
semantic: if one of the step returns a promise that resolves to an
[Error _], the other promises are left to resolve; once all the promises
have resolved, then the returned promise resolves with an [Error _] that
carries all the other errors in a list. It is up to the user to convert this
list to a more manageable type if needed.
{3 A note on [Seq]}
The [Seq] module exports a type that suspends nodes under a closure.
Consequently, some interactions with result, Lwt, and result-Lwt is not
possible. E.g., [map]ping can be either lazy or within Lwt but not both:
[Seq.map_s] would have type [('a -> 'b Lwt.t) -> 'a t -> 'b t Lwt.t] where
the returned promise forces the whole sequence (and never resolves on
infinite sequences).
In Lwtreslib, [Seq] does not provide these additional transformers that would
force the sequence simply due to the bad interaction of the Monads and the
type of sequences. Instead, Lwtreslib provides
- A subset of traversors where the laziness and the monad mix well (e.g.,
[iter] but not [map]). These are exported under the modules [S], [E] and
[ES].
- Variants of [Seq] called [Seq_e], [Seq_s], and [Seq_es] where the
combination with the monad is baked into the sequence type itself.
If you want to map a sequnence using an Lwt-returning function, you should
first convert the sequence to an Lwt-aware sequence using [Seq_s.of_seq],
and then map this converted function using [Seq_s.S.map].
Note that this returns a [Seq_s.t] sequence so further transformations will
be within [Seq_s] and not within [Seq]. Once in a monad, you stay in the
monad.
{3 [Traced]}
The {!Traced} module offers a small wrapper around Lwtreslib. This wrapper
is intended to ease the use of [_ep] functions. It does so by introducing a
trace data-type: a structured collection of errors.
This trace data-type is used to collapse the types ['e] and ['e list] of
errors. Indeed, without this collapse, chaining [_ep] together or chaining
[_ep] with [_es] functions requires significant boilerplate to flatten
lists, to listify single errors, etc. Need for boilerplate mostly vanishes
when using the [Traced] wrapper.
{2 Monad helpers}
Lwtreslib also exports monadic operators (binds, return, etc.) for the
Lwt-monad, the result-monad, and the combined Lwt-result-monad.
{2 Exceptions}
If at all possible, avoid exceptions.
If possible, avoid exceptions.
If you use exceptions, here are a few things to keep in mind:
The [_p] functions are semantically equivalent to Lwt's. This means that
some exceptions are dropped. Specifically, when more than one promise raises
an exception in a concurrent traversor, only one is passed on to the user,
the others are silently ignored.
Use [raise] (rather than [Lwt.fail]) when within an Lwt callback.
{2 [WithExceptions]}
The [WithExceptions] module is there for convenience in non-production code
and for the specific cases where it is guaranteed not to raise an exception.
E.g., it is intended for removing the {!option} boxing in cases where the
invariant is guaranteed by construction:
{[
let steps stop =
if stop = 0 then
[]
else if stop > 0 then
List.init ~when_negative_length:() Fun.id
|> WithExceptions.Option.get ~loc:__LOC__
else
let stop = Int.neg stop in
List.init ~when_negative_length:() Int.neg
|> WithExceptions.Option.get ~loc:__LOC__
]} *)
* { 1 Instance : [ Bare ] }
[ Bare ] provides all the functions as described above . It is intended to be
opened to shadow some modules of [ ] .
All values within the modules follow the same naming and semantic
conventions described above . The sequential traversors are fail - early :
in the following example the code returns an [ Error ] and does not print
anything .
{ [
List.iter_e
( fun x - >
if x = " " then
Error " empty string "
else begin
print_endline x ;
Ok ( ) )
[
" " ; ( * This will cause the iteration to stop
[Bare] provides all the functions as described above. It is intended to be
opened to shadow some modules of [Stdlib].
All values within the modules follow the same naming and semantic
conventions described above. The sequential traversors are fail-early:
in the following example the code returns an [Error] and does not print
anything.
{[
List.iter_e
(fun x ->
if x = "" then
Error "empty string"
else begin
print_endline x;
Ok ())
[
"this is not printed";
"neither is this printed";
]
]}
The concurrent (parallel) traversors are best-effort: in the following
example the code prints all the non-empty strings in an unspecified order
before returning an [Error].
{[
List.iter_ep
(fun x ->
if x = "" then
Lwt.return (Error "empty string")
else begin
print_endline x;
Lwt.return (Ok ()))
[
"this is printed";
"this is printed as well";
]
]}
The module [WithExceptions] provides some exception-raising helpers to
reduce the boilerplate that the library imposes.
{2 Comparison, Equality, etc.}
When a function requires a comparison function, it takes a [compare] named
parameter. This must define a total order as described in
{!Stdlib.Map.OrderedType}.
Note that the polymorphic structural comparison {!Stdlib.compare} is unsound
for comparing some values; notably, it may fail when comparing
data-structures that include functions or closures.
Similarly and for the same reason, some functions take an [equal] function.
*)
module Bare : sig
module Hashtbl : Bare_sigs.Hashtbl.S
module List : Bare_sigs.List.S
module Map : Bare_sigs.Map.S
module Monad : Bare_sigs.Monad.S
module Option : Bare_sigs.Option.S
module Result : Bare_sigs.Result.S
module Seq : Bare_sigs.Seq.S
module Seq_e : Bare_sigs.Seq_e.S
module Seq_s : Bare_sigs.Seq_s.S
module Seq_es :
Bare_sigs.Seq_es.S
with type ('a, 'e) seq_e_t := ('a, 'e) Seq_e.t
and type 'a seq_s_t := 'a Seq_s.t
module Set : Bare_sigs.Set.S
module Unit : Bare_sigs.Unit.S
module WithExceptions : Bare_sigs.WithExceptions.S
end
* A module with the [ TRACE ] signature provides the necessary type and functions
to collect multiple errors into a single error data - structure . This , in turn ,
allows Lwtreslib to provide more usable [ _ ep ] variants to standard traversal
functions .
to collect multiple errors into a single error data-structure. This, in turn,
allows Lwtreslib to provide more usable [_ep] variants to standard traversal
functions. *)
module type TRACE = Traced_sigs.Trace.S
module type TRACED_MONAD = Traced_sigs.Monad.S
* [ Traced ] is a functor to generate advanced combined - monad replacements
for parts of the . The generated module is similar to [ Bare ] with the
addition of traces : structured collections of errors .
For convenience , the monad includes primitives to error directly with a
trace rather than a bare error .
All the [ _ ep ] traversors return traces of errors rather than lists of
errors . The [ _ ep ] traversors preserve their best - effort semantic .
Additional functions in the [ Monad ] allow the construction of sequential
traces : functions to enrich traces with new errors . E.g. ,
{ [
let load_config file =
Result.map_error
( fun trace - >
Trace.cons " can not load configuration file " trace )
@@ begin
let open Lwt_result_syntax in
let * file = open_file in
let * lines = read_lines file in
let * json = parse_config lines in
make_dictionary json
end
] }
Example implementations of traces are provided in the [ traces/ ] directory .
for parts of the Stdlib. The generated module is similar to [Bare] with the
addition of traces: structured collections of errors.
For convenience, the monad includes primitives to error directly with a
trace rather than a bare error.
All the [_ep] traversors return traces of errors rather than lists of
errors. The [_ep] traversors preserve their best-effort semantic.
Additional functions in the [Monad] allow the construction of sequential
traces: functions to enrich traces with new errors. E.g.,
{[
let load_config file =
Result.map_error
(fun trace ->
Trace.cons "cannot load configuration file" trace)
@@ begin
let open Lwt_result_syntax in
let* file = open_file in
let* lines = read_lines file in
let* json = parse_config lines in
make_dictionary json
end
]}
Example implementations of traces are provided in the [traces/] directory.
*)
module Traced (Trace : TRACE) : sig
module Monad : TRACED_MONAD with type 'error trace = 'error Trace.trace
module Hashtbl :
Traced_sigs.Hashtbl.S with type 'error trace := 'error Trace.trace
module List : Traced_sigs.List.S with type 'error trace := 'error Trace.trace
module Map : Traced_sigs.Map.S with type 'error trace := 'error Trace.trace
module Option : Traced_sigs.Option.S
module Result : Traced_sigs.Result.S
module Seq : Traced_sigs.Seq.S with type 'error trace := 'error Trace.trace
module Seq_e : Traced_sigs.Seq_e.S
module Seq_s :
Traced_sigs.Seq_s.S with type 'error trace := 'error Trace.trace
module Seq_es :
Traced_sigs.Seq_es.S
with type ('a, 'e) seq_e_t := ('a, 'e) Seq_e.t
and type 'a seq_s_t := 'a Seq_s.t
module Set : Traced_sigs.Set.S with type 'error trace := 'error Trace.trace
module Unit : Traced_sigs.Unit.S
module WithExceptions : Traced_sigs.WithExceptions.S
end
|
fb15a53b8388a9f34854b42a94fc082e56e2d3993087ac64f5cd282a2be6acaa | GrammaticalFramework/gf-core | ErrM.hs | # LANGUAGE CPP #
# LANGUAGE DeriveFunctor #
-- BNF Converter: Error Monad
Copyright ( C ) 2004 Author :
-- This file comes with NO WARRANTY and may be used FOR ANY PURPOSE.
module GFCC.ErrM where
Control . Monad . Fail import will become redundant in GHC 8.8 +
import qualified Control.Monad.Fail as Fail
import Control.Monad (ap)
the Error monad : like Maybe type with error msgs
data Err a = Ok a | Bad String
deriving (Read, Show, Eq, Functor)
instance Applicative Err where
pure = Ok
(<*>) = ap
instance Monad Err where
return = Ok
Ok a >>= f = f a
Bad s >>= f = Bad s
#if !(MIN_VERSION_base(4,13,0))
fail = Bad
#endif
instance Fail.MonadFail Err where
fail = Bad
| null | https://raw.githubusercontent.com/GrammaticalFramework/gf-core/3122590e351f769ca6e60dfd4eeaafba1c5c22e8/src/tools/c/GFCC/ErrM.hs | haskell | BNF Converter: Error Monad
This file comes with NO WARRANTY and may be used FOR ANY PURPOSE. | # LANGUAGE CPP #
# LANGUAGE DeriveFunctor #
Copyright ( C ) 2004 Author :
module GFCC.ErrM where
Control . Monad . Fail import will become redundant in GHC 8.8 +
import qualified Control.Monad.Fail as Fail
import Control.Monad (ap)
the Error monad : like Maybe type with error msgs
data Err a = Ok a | Bad String
deriving (Read, Show, Eq, Functor)
instance Applicative Err where
pure = Ok
(<*>) = ap
instance Monad Err where
return = Ok
Ok a >>= f = f a
Bad s >>= f = Bad s
#if !(MIN_VERSION_base(4,13,0))
fail = Bad
#endif
instance Fail.MonadFail Err where
fail = Bad
|
5fa8ccd6aa7ec8bcab160ae0b77493145bf40e5649c56ae686a35afe089f6ccd | RDTK/generator | configuration.lisp | ;;;; configuration.lisp --- Configuration for the commandline-interface module.
;;;;
Copyright ( C ) 2013 - 2022 Jan Moringen
;;;;
Author : < >
(cl:in-package #:build-generator.commandline-interface)
;;; Schema
(options:define-schema *global-schema*
"Global configuration options."
Generic
("version" :type 'boolean :default nil
:documentation
"Print version information and exit.")
("help" :type 'boolean :default nil
:documentation
"Print this help and exit.")
("swank" :type 'boolean :default nil
:documentation
"Start a swank server.")
("debug" :type 'boolean :default nil
:documentation
"Enable debug mode.")
;; Execution mode and feedback
("on-error" :type 'error-policy
:default '((caused-by-unfulfilled-project-dependency-error . :continue)
(t . :fail))
:documentation
#.(format nil "Continue when encountering errors?~@
~@
Can be simply~@
~2@T\"abort\" to abort immediately for any ~
error~@
~2@T\"fail\" to continue but indicate failure ~
for all errors~@
~2@T\"continue\" to continue without ~
indicating failure for all ~
errors~@
~2@T\"debug\" to enter the debug for all ~
errors~@
~@
To choose specific actions for particular ~@
errors, rules can be written according to the ~@
following grammar:~@
~2@Terror-policy ::= rule* default~@
~2@Trule ::= error \"=>\" action \":\"~@
~2@Terror ::= ~{\"~A\"~^ | ~}~@
~2@Tdefault ::= action~@
~2@Taction ::= ~{\"~(~A~)\"~^ | ~}~@
~@
Example:~@
~@
~2@Tdependency-error=>continue:analysis-error=>fail:abort~@
~@
The above continues the run with exit code ~
zero in case dependency-errors are ~
encountered, continues and returns a non-zero ~
exit code for analysis-errors and immediately ~
aborts with non-zero exit code for all other ~
errors."
(map 'list (lambda+ ((name . alias))
(or alias (string-downcase name)))
*condition-types*)
*error-handling-actions*))
("num-processes" :type 'positive-integer :default 8
:documentation
"Number of threads (and processes) to execute in parallel.")
("progress-style" :type '(member :none :cmake :one-line)
:default :cmake
:documentation
"Progress display style.")
("colored-output" :type 'boolean :default nil
:documentation
"Should output be printed with colors.")
;; Directories
("cache-directory" :type 'options:directory-pathname
:documentation
"Directory into which cached data like repository mirrors should be written.")
("temp-directory" :type 'options:directory-pathname
:default #P"/tmp/"
:documentation
"Directory into which temporary files should be written.")
("cache-age-limit" :type '(or null non-negative-integer)
:default 1800
:documentation
#.(format nil "Acceptable age of cached ~
information in seconds.~@
~@
Older cached information will not be used and ~
will be replaced by newly computed ~
information."))
;; Application-level debugging
("trace-variable" :type '(list string :inherit? t)
:documentation
"Trace all accesses to the specified variable."))
(options:define-schema *schema*
"Configuration options of the build generator."
("global" *global-schema*)
("commands" commands::*command-schema*))
Commandline options
(commandline:define-option-mapping (*schema* "global")
Meta
("--version" "version")
(("-h" "--help") "help")
("--debug" "debug")
;; Execution mode and feedback
("--on-error" "on-error" "POLICY")
(("-j" "--num-processes") "num-processes" "NUMBER-OF-PROCESSES")
("--progress-style" "progress-style" "STYLE")
;; Directories and cache
("--temp-directory" "temp-directory" "DIRECTORY")
("--cache-directory" "cache-directory" "DIRECTORY")
("--cache-age-limit" "cache-age-limit" "AGE-IN-SECONDS")
;; Application-level debugging
("--trace-variable" "trace-variable" "VARIABLE-NAME"))
;;; Configuration processing
(defun process-configuration-and-commandline-arguments (arguments)
"Load configuration from sources then add ARGUMENTS."
(let+ ((config-debug? (configuration.options.debug:maybe-enable-debugging
"BUILD_GENERATOR_"))
(schema *schema*)
(configuration (options:make-configuration schema))
(source (configuration.options.sources:make-source
:common-cascade
:basename "build-generator"
:syntax :ini))
(synchronizer (make-instance 'options:standard-synchronizer
:target configuration))
((&flet option-value (&rest components)
(let ((option (options:find-option
components configuration)))
(options:option-value
option :if-does-not-exist nil)))))
;; Process configuration sources other than commandline arguments.
(configuration.options.sources:initialize source schema)
(configuration.options.sources:process source synchronizer)
;; Process global commandline options and split ARGUMENTS into
;;
;; GLOBAL-ARGUMENTS COMMAND LOCAL-ARGUMENTS
;;
(let* ((command-index (process-global-commandline-arguments
synchronizer arguments))
(command (if command-index
(elt arguments command-index)
"help"))
(local-arguments (when command-index
(subseq arguments (1+ command-index)))))
;; Process local (i.e. consumed by command) commandline options.
(commands:configure-command
synchronizer command local-arguments)
;; If the help command will be executed because no command has
;; been supplied, enable brief output.
(unless command-index
(setf (options:option-value
(options:find-option
'("commands" "help" "brief?") configuration))
t)))
(when config-debug?
(configuration.options.debug:output
(with-output-to-string (stream)
(describe configuration stream)
(terpri stream))))
;; Extract and interpret "special" options.
(let+ (((version? help? debug?)
(map 'list (curry #'option-value "global")
'("version" "help" "debug"))))
(when debug? (log:config :debug))
(values #'option-value schema configuration synchronizer
(list :version? version? :help? help? :debug? debug?)))))
(defun process-global-commandline-arguments (synchronizer arguments)
(let+ (((&flet notify (name event value &key (raw? t))
(options:notify
synchronizer name event value :source :commandline :raw? raw?)))
((&flet set-value (name value)
(notify :added name nil)
(notify :new-value name value
:raw? (not (typep value 'boolean))))))
(commandline:map-commandline-options
#'set-value "global" arguments :stop-at-positional? t)))
;;; Terminal setup
(defun (setf default-progress-style) (new-value)
(reinitialize-instance (options:find-option
'("global" "progress-style") *schema*)
:default new-value))
(defun (setf default-colored-output) (new-value)
(reinitialize-instance (configuration.options:find-option
'("global" "colored-output") *schema*)
:default new-value))
(defun adapt-configuration-for-terminal (&key
(standard-output *standard-output*)
(error-output *error-output*)
(terminal-type (uiop:getenv "TERM")))
(let ((interactive? (and (interactive-stream-p standard-output)
(interactive-stream-p error-output)))
(smart? (not (equal terminal-type "dumb"))))
Change defaults to " one - line " progress style and colored output
;; when apparently running interactively.
(when (and interactive? smart?)
(setf (default-progress-style) :one-line
(default-colored-output) t))
(and interactive? smart?)))
Cache directory setup
(defun (setf default-cache-directory) (new-value)
(reinitialize-instance (options:find-option
'("global" "cache-directory") *schema*)
:default new-value))
(defun adapt-configuration-for-home ()
(setf (default-cache-directory) (uiop:xdg-cache-home "build-generator/")))
;;; Environment adaptation
(defun adapt-configuration-for-environment ()
(adapt-configuration-for-home)
(adapt-configuration-for-terminal))
| null | https://raw.githubusercontent.com/RDTK/generator/8d9e6e47776f2ccb7b5ed934337d2db50ecbe2f5/src/commandline-interface/configuration.lisp | lisp | configuration.lisp --- Configuration for the commandline-interface module.
Schema
Execution mode and feedback
Directories
Application-level debugging
Execution mode and feedback
Directories and cache
Application-level debugging
Configuration processing
Process configuration sources other than commandline arguments.
Process global commandline options and split ARGUMENTS into
GLOBAL-ARGUMENTS COMMAND LOCAL-ARGUMENTS
Process local (i.e. consumed by command) commandline options.
If the help command will be executed because no command has
been supplied, enable brief output.
Extract and interpret "special" options.
Terminal setup
when apparently running interactively.
Environment adaptation | Copyright ( C ) 2013 - 2022 Jan Moringen
Author : < >
(cl:in-package #:build-generator.commandline-interface)
(options:define-schema *global-schema*
"Global configuration options."
Generic
("version" :type 'boolean :default nil
:documentation
"Print version information and exit.")
("help" :type 'boolean :default nil
:documentation
"Print this help and exit.")
("swank" :type 'boolean :default nil
:documentation
"Start a swank server.")
("debug" :type 'boolean :default nil
:documentation
"Enable debug mode.")
("on-error" :type 'error-policy
:default '((caused-by-unfulfilled-project-dependency-error . :continue)
(t . :fail))
:documentation
#.(format nil "Continue when encountering errors?~@
~@
Can be simply~@
~2@T\"abort\" to abort immediately for any ~
error~@
~2@T\"fail\" to continue but indicate failure ~
for all errors~@
~2@T\"continue\" to continue without ~
indicating failure for all ~
errors~@
~2@T\"debug\" to enter the debug for all ~
errors~@
~@
To choose specific actions for particular ~@
errors, rules can be written according to the ~@
following grammar:~@
~2@Terror-policy ::= rule* default~@
~2@Trule ::= error \"=>\" action \":\"~@
~2@Terror ::= ~{\"~A\"~^ | ~}~@
~2@Tdefault ::= action~@
~2@Taction ::= ~{\"~(~A~)\"~^ | ~}~@
~@
Example:~@
~@
~2@Tdependency-error=>continue:analysis-error=>fail:abort~@
~@
The above continues the run with exit code ~
zero in case dependency-errors are ~
encountered, continues and returns a non-zero ~
exit code for analysis-errors and immediately ~
aborts with non-zero exit code for all other ~
errors."
(map 'list (lambda+ ((name . alias))
(or alias (string-downcase name)))
*condition-types*)
*error-handling-actions*))
("num-processes" :type 'positive-integer :default 8
:documentation
"Number of threads (and processes) to execute in parallel.")
("progress-style" :type '(member :none :cmake :one-line)
:default :cmake
:documentation
"Progress display style.")
("colored-output" :type 'boolean :default nil
:documentation
"Should output be printed with colors.")
("cache-directory" :type 'options:directory-pathname
:documentation
"Directory into which cached data like repository mirrors should be written.")
("temp-directory" :type 'options:directory-pathname
:default #P"/tmp/"
:documentation
"Directory into which temporary files should be written.")
("cache-age-limit" :type '(or null non-negative-integer)
:default 1800
:documentation
#.(format nil "Acceptable age of cached ~
information in seconds.~@
~@
Older cached information will not be used and ~
will be replaced by newly computed ~
information."))
("trace-variable" :type '(list string :inherit? t)
:documentation
"Trace all accesses to the specified variable."))
(options:define-schema *schema*
"Configuration options of the build generator."
("global" *global-schema*)
("commands" commands::*command-schema*))
Commandline options
(commandline:define-option-mapping (*schema* "global")
Meta
("--version" "version")
(("-h" "--help") "help")
("--debug" "debug")
("--on-error" "on-error" "POLICY")
(("-j" "--num-processes") "num-processes" "NUMBER-OF-PROCESSES")
("--progress-style" "progress-style" "STYLE")
("--temp-directory" "temp-directory" "DIRECTORY")
("--cache-directory" "cache-directory" "DIRECTORY")
("--cache-age-limit" "cache-age-limit" "AGE-IN-SECONDS")
("--trace-variable" "trace-variable" "VARIABLE-NAME"))
(defun process-configuration-and-commandline-arguments (arguments)
"Load configuration from sources then add ARGUMENTS."
(let+ ((config-debug? (configuration.options.debug:maybe-enable-debugging
"BUILD_GENERATOR_"))
(schema *schema*)
(configuration (options:make-configuration schema))
(source (configuration.options.sources:make-source
:common-cascade
:basename "build-generator"
:syntax :ini))
(synchronizer (make-instance 'options:standard-synchronizer
:target configuration))
((&flet option-value (&rest components)
(let ((option (options:find-option
components configuration)))
(options:option-value
option :if-does-not-exist nil)))))
(configuration.options.sources:initialize source schema)
(configuration.options.sources:process source synchronizer)
(let* ((command-index (process-global-commandline-arguments
synchronizer arguments))
(command (if command-index
(elt arguments command-index)
"help"))
(local-arguments (when command-index
(subseq arguments (1+ command-index)))))
(commands:configure-command
synchronizer command local-arguments)
(unless command-index
(setf (options:option-value
(options:find-option
'("commands" "help" "brief?") configuration))
t)))
(when config-debug?
(configuration.options.debug:output
(with-output-to-string (stream)
(describe configuration stream)
(terpri stream))))
(let+ (((version? help? debug?)
(map 'list (curry #'option-value "global")
'("version" "help" "debug"))))
(when debug? (log:config :debug))
(values #'option-value schema configuration synchronizer
(list :version? version? :help? help? :debug? debug?)))))
(defun process-global-commandline-arguments (synchronizer arguments)
(let+ (((&flet notify (name event value &key (raw? t))
(options:notify
synchronizer name event value :source :commandline :raw? raw?)))
((&flet set-value (name value)
(notify :added name nil)
(notify :new-value name value
:raw? (not (typep value 'boolean))))))
(commandline:map-commandline-options
#'set-value "global" arguments :stop-at-positional? t)))
(defun (setf default-progress-style) (new-value)
(reinitialize-instance (options:find-option
'("global" "progress-style") *schema*)
:default new-value))
(defun (setf default-colored-output) (new-value)
(reinitialize-instance (configuration.options:find-option
'("global" "colored-output") *schema*)
:default new-value))
(defun adapt-configuration-for-terminal (&key
(standard-output *standard-output*)
(error-output *error-output*)
(terminal-type (uiop:getenv "TERM")))
(let ((interactive? (and (interactive-stream-p standard-output)
(interactive-stream-p error-output)))
(smart? (not (equal terminal-type "dumb"))))
Change defaults to " one - line " progress style and colored output
(when (and interactive? smart?)
(setf (default-progress-style) :one-line
(default-colored-output) t))
(and interactive? smart?)))
Cache directory setup
(defun (setf default-cache-directory) (new-value)
(reinitialize-instance (options:find-option
'("global" "cache-directory") *schema*)
:default new-value))
(defun adapt-configuration-for-home ()
(setf (default-cache-directory) (uiop:xdg-cache-home "build-generator/")))
(defun adapt-configuration-for-environment ()
(adapt-configuration-for-home)
(adapt-configuration-for-terminal))
|
d05f897fc6e02fe7fce1b8349c16c5b9b4b30c53dad20eab94be5459ca567166 | mstksg/inCode | Sequences.hs | {-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE TupleSections #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module Sequences (
-- Ap(..)
-- , liftAp
-- , runAp
CoAp ( .. )
-- , liftCoAp
-- , runCoAp
Div(..)
, liftDiv, runDiv
, Dec(..)
, liftDec, runDec
, Conclude(..)
, InvDay(..)
, runInvDayApply
, runInvDayDivise
, InvNight(..)
, runInvNightAlt
, runInvNightDecide
, Not(..)
, chainPair
) where
import Control.Applicative
import Control.Natural
import Data.Bifunctor
import Data.Bifunctor.Assoc
import Data.Functor.Contravariant
import Data.Functor.Contravariant.Divisible
import Data.Functor.Identity
import Data.Functor.Invariant
import Data.Functor.Plus
import Data.HBifunctor
import Data.HBifunctor.Tensor
import Data.HFunctor
import Data.HFunctor.Chain
import Data.Kind
import Data.Void
class Contravariant f => Divise f where
divise :: (a -> (b, c)) -> f b -> f c -> f a
instance Semigroup r => Divise (Op r) where
divise f (Op g) (Op h) = Op $ \x -> case f x of
(y, z) -> g y <> h z
data Div :: (Type -> Type) -> Type -> Type where
Conquer :: Div f a
Divide :: (a -> (b, c)) -> f b -> Div f c -> Div f a
liftDiv :: f a -> Div f a
liftDiv x = Divide (,()) x Conquer
runDiv
:: forall f g a. Divisible g
=> (forall x. f x -> g x)
-> Div f a
-> g a
runDiv f = go
where
go :: Div f x -> g x
go = \case
Conquer -> conquer
Divide g x xs -> divide g (f x) (go xs)
instance Contravariant (Div f) where
contramap f = \case
Conquer -> Conquer
Divide g x xs -> Divide (g . f) x xs
instance Divisible (Div f) where
conquer = Conquer
divide f = \case
Conquer -> contramap (snd . f)
Divide g x xs -> Divide (assoc . first g . f) x
. divide id xs
class Contravariant f => Decide f where
decide :: (a -> Either b c) -> f b -> f c -> f a
class Decide f => Conclude f where
conclude :: (a -> Void) -> f a
instance Decide (Op r) where
decide f (Op g) (Op h) = Op $ \r -> case f r of
Left x -> g x
Right y -> h y
instance Conclude (Op r) where
conclude g = Op (absurd . g)
data Dec :: (Type -> Type) -> Type -> Type where
Lose :: (a -> Void) -> Dec f a
Choose :: (a -> Either b c) -> f b -> Dec f c -> Dec f a
liftDec :: f a -> Dec f a
liftDec x = Choose Left x (Lose id)
decided :: Conclude f => f a -> f b -> f (Either a b)
decided = decide id
runDec
:: forall f g a. Conclude g
=> (forall x. f x -> g x)
-> Dec f a
-> g a
runDec f = go
where
go :: Dec f x -> g x
go = \case
Lose g -> conclude g
Choose g x xs -> decide g (f x) (go xs)
instance Contravariant (Dec f) where
contramap f = \case
Lose g -> Lose (g . f)
Choose g x xs -> Choose (g . f) x xs
instance Decide (Dec f) where
decide f = \case
Lose g -> contramap (either (absurd . g) id . f)
Choose g x xs -> Choose (assoc . first g . f) x
. decide id xs
instance Conclude (Dec f) where
conclude = Lose
data InvDay :: (Type -> Type) -> (Type -> Type) -> (Type -> Type) where
InvDay
:: f b
-> g c
-> (a -> (b, c))
-> (b -> c -> a)
-> InvDay f g a
instance HBifunctor InvDay where
hbimap f g (InvDay x y h j) = InvDay (f x) (g y) h j
instance Invariant (InvDay f g) where
invmap f g (InvDay x y h j) = InvDay x y (h . g) (\k -> f . j k)
runInvDayApply
:: Apply h
=> (f ~> h)
-> (g ~> h)
-> InvDay f g ~> h
runInvDayApply f g (InvDay x y _ j) = j <$> f x <.> g y
runInvDayDivise
:: Divise h
=> (f ~> h)
-> (g ~> h)
-> InvDay f g ~> h
runInvDayDivise f g (InvDay x y h _) = divise h (f x) (g y)
data InvNight :: (Type -> Type) -> (Type -> Type) -> (Type -> Type) where
InvNight
:: f b
-> g c
-> (a -> Either b c)
-> (Either b c -> a)
-> InvNight f g a
instance HBifunctor InvNight where
hbimap f g (InvNight x y h j) = InvNight (f x) (g y) h j
runInvNightAlt
:: Alt h
=> (f ~> h)
-> (g ~> h)
-> InvNight f g ~> h
runInvNightAlt f g (InvNight x y _ j) = fmap (j . Left) (f x) <!> fmap (j . Right) (g y)
runInvNightDecide
:: Decide h
=> (f ~> h)
-> (g ~> h)
-> InvNight f g ~> h
runInvNightDecide f g (InvNight x y h _) = decide h (f x) (g y)
instance Invariant (InvNight f g) where
invmap f g (InvNight x y h j) = InvNight x y (h . g) (f . j)
newtype Not a = Not { refute :: a -> Void }
instance Invariant Not where
invmap _ g (Not x) = Not (x . g)
-- instance Invariant (Chain t i f) where
-- invmap = undefined
instance Invariant (Chain InvNight Not f) where
invmap f g = \case
Done x -> Done (invmap f g x )
More xs -> More (invmap f g xs)
instance Invariant (Chain InvDay Identity f) where
invmap f g = \case
Done x -> Done (invmap f g x )
More xs -> More (invmap f g xs)
chainPair :: Tensor t i => t f f ~> Chain t i f
chainPair = More . hright inject
-- instance (Invariant i, Invariant (t f (Chain t i f))) => Invariant (Chain t i f) where
-- invmap f g = \case
-- Done x -> Done (invmap f g x)
-- More xs -> More (invmap f g xs)
data Ap : : ( Type - > Type ) - > ( Type - > Type ) where
-- Pure :: a -> Ap f a
-- ConsAp :: ((a, b) -> c)
-- -> f a
-- -> Ap f b
-- -> Ap f c
instance ( Ap f ) where
-- fmap f = \case
-- Pure x -> Pure (f x)
ConsAp g x xs - > ConsAp ( f . ) x xs
-- instance Applicative (Ap f) where
-- pure = Pure
-- liftA2 f = \case
-- Pure x -> fmap (f x)
-- ConsAp g x xs -> ConsAp (\(i, (j, k)) -> f (g (i, j)) k) x
-- . liftA2 (,) xs
-- liftAp :: f a -> Ap f a
liftAp x = ConsAp fst x ( Pure ( ) )
-- runAp
-- :: forall f g a. Applicative g
-- => (forall x. f x -> g x)
-- -> Ap f a
-- -> g a
-- runAp f = go
-- where
-- go :: Ap f b -> g b
-- go = \case
-- Pure x -> pure x
-- ConsAp g x xs -> curry g <$> f x <*> go xs
data CoAp : : ( Type - > Type ) - > ( Type - > Type ) where
-- CoPure :: CoAp f a
-- ConsCoAp :: (Either a b -> c)
-- -> f a
-- -> CoAp f b
-- -> CoAp f c
instance Functor ( CoAp f ) where
-- fmap f = \case
-- CoPure -> CoPure
ConsCoAp g x xs - > ConsCoAp ( f . ) x xs
instance Alt ( CoAp f ) where
-- (<!>) = appendCoAp (either id id)
instance Plus ( CoAp f ) where
-- zero = CoPure
-- appendCoAp
-- :: (Either a b -> c)
-- -> CoAp f a
-- -> CoAp f b
-- -> CoAp f c
-- appendCoAp f = \case
-- CoPure -> fmap (f . Right)
-- ConsCoAp g x xs -> ConsCoAp (f . first g . reEither) x
-- . appendCoAp id xs
-- where
-- reEither = \case
-- Left x -> Left (Left x)
-- Right (Left y) -> Left (Right y)
-- Right (Right z) -> Right z
-- liftCoAp :: f a -> CoAp f a
liftCoAp x = ConsCoAp ( either i d absurd ) x CoPure
-- runCoAp
-- :: forall f g a. Plus g
-- => (forall x. f x -> g x)
-- -> CoAp f a
-- -> g a
-- runCoAp f = go
-- where
-- go :: CoAp f b -> g b
-- go = \case
CoPure - > zero
-- ConsCoAp g x xs -> (g . Left <$> f x) <!> (g . Right <$> go xs)
-- data ContraAp :: (Type -> Type) -> (Type -> Type) where
ContraPure : : a
ConsContraAp : : ( c - > ( a , b ) )
-- -> f a
-- -> ContraAp f b
-- -> ContraAp f c
instance ( ContraAp f ) where
f = \case
-- ContraPure -> ContraPure
ConsContraAp ( g . f ) x xs
instance Divisible ( ) where
-- divide f = \case
ContraPure - > contramap ( snd . f )
\ys - >
ConsContraAp ( ( \((i , j),k ) - > ( i , ( j , k ) ) ) . first g . f ) x $
-- divide id xs ys
-- conquer = ContraPure
instance HFunctor ContraAp where
hmap f = \case
-- ContraPure -> ContraPure
( f x ) ( hmap f xs )
-- liftContraAp :: f a -> ContraAp f a
liftContraAp x = ConsContraAp ( , ( ) ) x ContraPure
data : : ( Type - > Type ) - > ( Type - > Type ) where
-- ContraCoPure :: (a -> Void) -> ContraCoAp f a
-- ConsContraCoAp :: (c -> Either a b)
-- -> f a
-- -> ContraCoAp f b
-- -> ContraCoAp f c
instance ( ContraCoAp f ) where
f = \case
-- ContraCoPure g -> ContraCoPure (g . f)
-- ConsContraCoAp g x xs -> ConsContraCoAp (g . f) x xs
-- instance ( ContraCoAp f ) where
-- -- lose = ContraCoPure
-- appendContraCoAp
-- :: (c -> Either a b)
-- -> ContraCoAp f a
-- -> ContraCoAp f b
-- -> ContraCoAp f c
-- appendContraCoAp f = \case
ContraCoPure g - > ( either ( absurd . ) i d . f )
-- ConsContraCoAp g x xs ->
-- ConsContraCoAp (reEither . first g . f) x . appendContraCoAp id xs
-- where
-- reEither = \case
-- Left (Left x) -> Left x
-- Left (Right y) -> Right (Left y)
-- Right z -> Right (Right z)
-- liftContraCoAp :: f a -> ContraCoAp f a
-- liftContraCoAp x = ConsContraCoAp Left x (ContraCoPure id)
-- data InvAp : : ( Type - > Type ) - > ( Type - > Type ) where
-- : : a - > InvAp f a
-- -- ConsInvAp :: ((a, b) -> c)
-- -- -> (c -> (a, b))
-- -- -> f a
-- -- -> InvAp f b
-- -- -> InvAp f c
-- -- instance Invariant (InvAp f) where
-- -- invmap f g = \case
-- InvPure x - > InvPure ( f x )
-- ConsInvAp j k x xs - > ConsInvAp ( f . j ) ( k . ) x xs
-- -- appendInvAp
-- -- :: ((a, b) -> c)
-- -- -> (c -> (a, b))
-- -- -> InvAp f a
-- -- -> InvAp f b
-- -- -> InvAp f c
-- -- appendInvAp f g = \case
-- - > invmap ( f . ( x , ) ) ( snd . g )
| null | https://raw.githubusercontent.com/mstksg/inCode/e1f80a3dfd83eaa2b817dc922fd7f331cd1ece8a/code-samples/functor-structures/Sequences.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE GADTs #
# LANGUAGE KindSignatures #
# LANGUAGE RankNTypes #
# LANGUAGE ScopedTypeVariables #
Ap(..)
, liftAp
, runAp
, liftCoAp
, runCoAp
instance Invariant (Chain t i f) where
invmap = undefined
instance (Invariant i, Invariant (t f (Chain t i f))) => Invariant (Chain t i f) where
invmap f g = \case
Done x -> Done (invmap f g x)
More xs -> More (invmap f g xs)
Pure :: a -> Ap f a
ConsAp :: ((a, b) -> c)
-> f a
-> Ap f b
-> Ap f c
fmap f = \case
Pure x -> Pure (f x)
instance Applicative (Ap f) where
pure = Pure
liftA2 f = \case
Pure x -> fmap (f x)
ConsAp g x xs -> ConsAp (\(i, (j, k)) -> f (g (i, j)) k) x
. liftA2 (,) xs
liftAp :: f a -> Ap f a
runAp
:: forall f g a. Applicative g
=> (forall x. f x -> g x)
-> Ap f a
-> g a
runAp f = go
where
go :: Ap f b -> g b
go = \case
Pure x -> pure x
ConsAp g x xs -> curry g <$> f x <*> go xs
CoPure :: CoAp f a
ConsCoAp :: (Either a b -> c)
-> f a
-> CoAp f b
-> CoAp f c
fmap f = \case
CoPure -> CoPure
(<!>) = appendCoAp (either id id)
zero = CoPure
appendCoAp
:: (Either a b -> c)
-> CoAp f a
-> CoAp f b
-> CoAp f c
appendCoAp f = \case
CoPure -> fmap (f . Right)
ConsCoAp g x xs -> ConsCoAp (f . first g . reEither) x
. appendCoAp id xs
where
reEither = \case
Left x -> Left (Left x)
Right (Left y) -> Left (Right y)
Right (Right z) -> Right z
liftCoAp :: f a -> CoAp f a
runCoAp
:: forall f g a. Plus g
=> (forall x. f x -> g x)
-> CoAp f a
-> g a
runCoAp f = go
where
go :: CoAp f b -> g b
go = \case
ConsCoAp g x xs -> (g . Left <$> f x) <!> (g . Right <$> go xs)
data ContraAp :: (Type -> Type) -> (Type -> Type) where
-> f a
-> ContraAp f b
-> ContraAp f c
ContraPure -> ContraPure
divide f = \case
divide id xs ys
conquer = ContraPure
ContraPure -> ContraPure
liftContraAp :: f a -> ContraAp f a
ContraCoPure :: (a -> Void) -> ContraCoAp f a
ConsContraCoAp :: (c -> Either a b)
-> f a
-> ContraCoAp f b
-> ContraCoAp f c
ContraCoPure g -> ContraCoPure (g . f)
ConsContraCoAp g x xs -> ConsContraCoAp (g . f) x xs
instance ( ContraCoAp f ) where
-- lose = ContraCoPure
appendContraCoAp
:: (c -> Either a b)
-> ContraCoAp f a
-> ContraCoAp f b
-> ContraCoAp f c
appendContraCoAp f = \case
ConsContraCoAp g x xs ->
ConsContraCoAp (reEither . first g . f) x . appendContraCoAp id xs
where
reEither = \case
Left (Left x) -> Left x
Left (Right y) -> Right (Left y)
Right z -> Right (Right z)
liftContraCoAp :: f a -> ContraCoAp f a
liftContraCoAp x = ConsContraCoAp Left x (ContraCoPure id)
data InvAp : : ( Type - > Type ) - > ( Type - > Type ) where
: : a - > InvAp f a
-- ConsInvAp :: ((a, b) -> c)
-- -> (c -> (a, b))
-- -> f a
-- -> InvAp f b
-- -> InvAp f c
-- instance Invariant (InvAp f) where
-- invmap f g = \case
InvPure x - > InvPure ( f x )
ConsInvAp j k x xs - > ConsInvAp ( f . j ) ( k . ) x xs
-- appendInvAp
-- :: ((a, b) -> c)
-- -> (c -> (a, b))
-- -> InvAp f a
-- -> InvAp f b
-- -> InvAp f c
-- appendInvAp f g = \case
- > invmap ( f . ( x , ) ) ( snd . g ) | # LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE TupleSections #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module Sequences (
CoAp ( .. )
Div(..)
, liftDiv, runDiv
, Dec(..)
, liftDec, runDec
, Conclude(..)
, InvDay(..)
, runInvDayApply
, runInvDayDivise
, InvNight(..)
, runInvNightAlt
, runInvNightDecide
, Not(..)
, chainPair
) where
import Control.Applicative
import Control.Natural
import Data.Bifunctor
import Data.Bifunctor.Assoc
import Data.Functor.Contravariant
import Data.Functor.Contravariant.Divisible
import Data.Functor.Identity
import Data.Functor.Invariant
import Data.Functor.Plus
import Data.HBifunctor
import Data.HBifunctor.Tensor
import Data.HFunctor
import Data.HFunctor.Chain
import Data.Kind
import Data.Void
class Contravariant f => Divise f where
divise :: (a -> (b, c)) -> f b -> f c -> f a
instance Semigroup r => Divise (Op r) where
divise f (Op g) (Op h) = Op $ \x -> case f x of
(y, z) -> g y <> h z
data Div :: (Type -> Type) -> Type -> Type where
Conquer :: Div f a
Divide :: (a -> (b, c)) -> f b -> Div f c -> Div f a
liftDiv :: f a -> Div f a
liftDiv x = Divide (,()) x Conquer
runDiv
:: forall f g a. Divisible g
=> (forall x. f x -> g x)
-> Div f a
-> g a
runDiv f = go
where
go :: Div f x -> g x
go = \case
Conquer -> conquer
Divide g x xs -> divide g (f x) (go xs)
instance Contravariant (Div f) where
contramap f = \case
Conquer -> Conquer
Divide g x xs -> Divide (g . f) x xs
instance Divisible (Div f) where
conquer = Conquer
divide f = \case
Conquer -> contramap (snd . f)
Divide g x xs -> Divide (assoc . first g . f) x
. divide id xs
class Contravariant f => Decide f where
decide :: (a -> Either b c) -> f b -> f c -> f a
class Decide f => Conclude f where
conclude :: (a -> Void) -> f a
instance Decide (Op r) where
decide f (Op g) (Op h) = Op $ \r -> case f r of
Left x -> g x
Right y -> h y
instance Conclude (Op r) where
conclude g = Op (absurd . g)
data Dec :: (Type -> Type) -> Type -> Type where
Lose :: (a -> Void) -> Dec f a
Choose :: (a -> Either b c) -> f b -> Dec f c -> Dec f a
liftDec :: f a -> Dec f a
liftDec x = Choose Left x (Lose id)
decided :: Conclude f => f a -> f b -> f (Either a b)
decided = decide id
runDec
:: forall f g a. Conclude g
=> (forall x. f x -> g x)
-> Dec f a
-> g a
runDec f = go
where
go :: Dec f x -> g x
go = \case
Lose g -> conclude g
Choose g x xs -> decide g (f x) (go xs)
instance Contravariant (Dec f) where
contramap f = \case
Lose g -> Lose (g . f)
Choose g x xs -> Choose (g . f) x xs
instance Decide (Dec f) where
decide f = \case
Lose g -> contramap (either (absurd . g) id . f)
Choose g x xs -> Choose (assoc . first g . f) x
. decide id xs
instance Conclude (Dec f) where
conclude = Lose
data InvDay :: (Type -> Type) -> (Type -> Type) -> (Type -> Type) where
InvDay
:: f b
-> g c
-> (a -> (b, c))
-> (b -> c -> a)
-> InvDay f g a
instance HBifunctor InvDay where
hbimap f g (InvDay x y h j) = InvDay (f x) (g y) h j
instance Invariant (InvDay f g) where
invmap f g (InvDay x y h j) = InvDay x y (h . g) (\k -> f . j k)
runInvDayApply
:: Apply h
=> (f ~> h)
-> (g ~> h)
-> InvDay f g ~> h
runInvDayApply f g (InvDay x y _ j) = j <$> f x <.> g y
runInvDayDivise
:: Divise h
=> (f ~> h)
-> (g ~> h)
-> InvDay f g ~> h
runInvDayDivise f g (InvDay x y h _) = divise h (f x) (g y)
data InvNight :: (Type -> Type) -> (Type -> Type) -> (Type -> Type) where
InvNight
:: f b
-> g c
-> (a -> Either b c)
-> (Either b c -> a)
-> InvNight f g a
instance HBifunctor InvNight where
hbimap f g (InvNight x y h j) = InvNight (f x) (g y) h j
runInvNightAlt
:: Alt h
=> (f ~> h)
-> (g ~> h)
-> InvNight f g ~> h
runInvNightAlt f g (InvNight x y _ j) = fmap (j . Left) (f x) <!> fmap (j . Right) (g y)
runInvNightDecide
:: Decide h
=> (f ~> h)
-> (g ~> h)
-> InvNight f g ~> h
runInvNightDecide f g (InvNight x y h _) = decide h (f x) (g y)
instance Invariant (InvNight f g) where
invmap f g (InvNight x y h j) = InvNight x y (h . g) (f . j)
newtype Not a = Not { refute :: a -> Void }
instance Invariant Not where
invmap _ g (Not x) = Not (x . g)
instance Invariant (Chain InvNight Not f) where
invmap f g = \case
Done x -> Done (invmap f g x )
More xs -> More (invmap f g xs)
instance Invariant (Chain InvDay Identity f) where
invmap f g = \case
Done x -> Done (invmap f g x )
More xs -> More (invmap f g xs)
chainPair :: Tensor t i => t f f ~> Chain t i f
chainPair = More . hright inject
data Ap : : ( Type - > Type ) - > ( Type - > Type ) where
instance ( Ap f ) where
ConsAp g x xs - > ConsAp ( f . ) x xs
liftAp x = ConsAp fst x ( Pure ( ) )
data CoAp : : ( Type - > Type ) - > ( Type - > Type ) where
instance Functor ( CoAp f ) where
ConsCoAp g x xs - > ConsCoAp ( f . ) x xs
instance Alt ( CoAp f ) where
instance Plus ( CoAp f ) where
liftCoAp x = ConsCoAp ( either i d absurd ) x CoPure
CoPure - > zero
ContraPure : : a
ConsContraAp : : ( c - > ( a , b ) )
instance ( ContraAp f ) where
f = \case
ConsContraAp ( g . f ) x xs
instance Divisible ( ) where
ContraPure - > contramap ( snd . f )
\ys - >
ConsContraAp ( ( \((i , j),k ) - > ( i , ( j , k ) ) ) . first g . f ) x $
instance HFunctor ContraAp where
hmap f = \case
( f x ) ( hmap f xs )
liftContraAp x = ConsContraAp ( , ( ) ) x ContraPure
data : : ( Type - > Type ) - > ( Type - > Type ) where
instance ( ContraCoAp f ) where
f = \case
ContraCoPure g - > ( either ( absurd . ) i d . f )
|
64d13cd71a3acc0717005b7ec3ef12ccfc2bba686fd1a58373299bdadb064ea3 | ssor/erlangDemos | dispatcher_prop.erl | Copyright ( c ) 2011 , < >
Copyright ( c ) 2011 , < >
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
%% copyright notice and this permission notice appear in all copies.
%%
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-module(dispatcher_prop).
-include_lib("proper/include/proper.hrl").
%% Generators.
hostname_head_char() ->
oneof([choose($a, $z), choose($A, $Z), choose($0, $9)]).
hostname_char() ->
oneof([choose($a, $z), choose($A, $Z), choose($0, $9), $-]).
hostname_label() ->
?SUCHTHAT(Label, [hostname_head_char()|list(hostname_char())],
length(Label) < 64).
hostname() ->
?SUCHTHAT(Hostname,
?LET(Labels, list(hostname_label()), string:join(Labels, ".")),
length(Hostname) > 0 andalso length(Hostname) =< 255).
port_number() ->
choose(1, 16#ffff).
port_str() ->
oneof(["", ?LET(Port, port_number(), ":" ++ integer_to_list(Port))]).
server() ->
?LET({Hostname, PortStr}, {hostname(), port_str()},
list_to_binary(Hostname ++ PortStr)).
%% Properties.
prop_split_host_symmetric() ->
?FORALL(Server, server(),
begin case cowboy_dispatcher:split_host(Server) of
{Tokens, RawHost, undefined} ->
(Server == RawHost) and (Server == binary_join(Tokens, "."));
{Tokens, RawHost, Port} ->
PortBin = (list_to_binary(":" ++ integer_to_list(Port))),
(Server == << RawHost/binary, PortBin/binary >>)
and (Server == << (binary_join(Tokens, "."))/binary,
PortBin/binary >>)
end end).
Internal .
Contributed by MononcQc on # erlounge .
binary_join(Flowers, Leaf) ->
case Flowers of
[] -> <<>>;
[Petal|Pot] -> iolist_to_binary(
[Petal | [[Leaf | Pollen] || Pollen <- Pot]])
end.
| null | https://raw.githubusercontent.com/ssor/erlangDemos/632cd905be2c4f275f1c1ae15238e711d7bb9147/cowboy_old/test/dispatcher_prop.erl | erlang |
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
Generators.
Properties. | Copyright ( c ) 2011 , < >
Copyright ( c ) 2011 , < >
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
-module(dispatcher_prop).
-include_lib("proper/include/proper.hrl").
hostname_head_char() ->
oneof([choose($a, $z), choose($A, $Z), choose($0, $9)]).
hostname_char() ->
oneof([choose($a, $z), choose($A, $Z), choose($0, $9), $-]).
hostname_label() ->
?SUCHTHAT(Label, [hostname_head_char()|list(hostname_char())],
length(Label) < 64).
hostname() ->
?SUCHTHAT(Hostname,
?LET(Labels, list(hostname_label()), string:join(Labels, ".")),
length(Hostname) > 0 andalso length(Hostname) =< 255).
port_number() ->
choose(1, 16#ffff).
port_str() ->
oneof(["", ?LET(Port, port_number(), ":" ++ integer_to_list(Port))]).
server() ->
?LET({Hostname, PortStr}, {hostname(), port_str()},
list_to_binary(Hostname ++ PortStr)).
prop_split_host_symmetric() ->
?FORALL(Server, server(),
begin case cowboy_dispatcher:split_host(Server) of
{Tokens, RawHost, undefined} ->
(Server == RawHost) and (Server == binary_join(Tokens, "."));
{Tokens, RawHost, Port} ->
PortBin = (list_to_binary(":" ++ integer_to_list(Port))),
(Server == << RawHost/binary, PortBin/binary >>)
and (Server == << (binary_join(Tokens, "."))/binary,
PortBin/binary >>)
end end).
Internal .
Contributed by MononcQc on # erlounge .
binary_join(Flowers, Leaf) ->
case Flowers of
[] -> <<>>;
[Petal|Pot] -> iolist_to_binary(
[Petal | [[Leaf | Pollen] || Pollen <- Pot]])
end.
|
bc331e52ad3e18dbd6e272380298fa470e3bab586f4f2c3de6a1b76255544d11 | engineyard/vertebra-erl | vertebra_protocol.erl | Copyright 2008 , Engine Yard , Inc.
%
This file is part of Vertebra .
%
Vertebra is free software : you can redistribute it and/or modify it under the
% terms of the GNU Lesser General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
% later version.
%
Vertebra is distributed in the hope that it will be useful , but WITHOUT ANY
% WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
% A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
% details.
%
You should have received a copy of the GNU Lesser General Public License
along with Vertebra . If not , see < / > .
-module(vertebra_protocol).
-export([op/1, nack/1, ack/1, final/1, data/2, error/2, error/3, finalize/3]).
-export([make_resource_list/1]).
-include("xml.hrl").
finalize(Op, OpType, Id) when OpType == "get" orelse
OpType == "set" orelse
OpType == "error" orelse
OpType == "result" ->
Attrs = [{"type", OpType},
{"xml:lang", "en"},
{"id", Id}],
#xmlelement{name="iq", attrs=Attrs, sub_el=[Op]}.
error(Reason, Token) when is_list(Reason) ->
error("error", Reason, Token).
error(Type, Reason, Token) when is_list(Reason) ->
{ok, ReasonEl} = xml_util:convert(to, {string, [{"name", "reason"}], list_to_binary(Reason)}),
#xmlelement{name="error", attrs=[{"token", Token},
{"xmlns", ?AGENT_NS},
{"type", Type}], sub_el=[ReasonEl]}.
ack(Token) ->
#xmlelement{name="ack", attrs=[{"token",Token},
{"xmlns", ?AGENT_NS}], sub_el=[]}.
nack(Token) ->
#xmlelement{name="nack", attrs=[{"token",Token},
{"xmlns", ?AGENT_NS}], sub_el=[]}.
final(Token) ->
#xmlelement{name="final", attrs=[{"token",Token},
{"xmlns", ?AGENT_NS}], sub_el=[]}.
data({xmlelement, "list", _, _} = Results, Token) ->
Attrs = [{"token", Token}, {"xmlns", ?AGENT_NS}],
{xmlelement, "data", Attrs, [Results]};
data(Results, Token) when is_list(Results) ->
Attrs = [{"token", Token}, {"xmlns", ?AGENT_NS}],
{xmlelement, "data", Attrs, build_subels(Results, [])};
data(Results, Token) ->
data([Results], Token).
%%
%% op({OpName, SubEls}) -> #xmlelement
%% OpName -> string
%% SubEls -> [SubEl]
SubEl - > { Name , , [ SubEl ] | [ ] } | { text , Text }
%%
op({OpName, Token, SubEls}) ->
Attrs = [{"type", OpName}, {"token", Token}, {"xmlns", ?AGENT_NS}],
#xmlelement{name="op", attrs=Attrs, sub_el=build_subels(SubEls, [])}.
build_subels([{text, Text}|T], Accum) ->
build_subels(T, lists:append(Accum, [#xmlcdata{data=Text}]));
build_subels([{xmlelement, _, _, _}=H|T], Accum) ->
build_subels(T, lists:append(Accum, [H]));
build_subels([{ElName, ElAttrs, ElSubEls}|T], Accum) ->
SubEls = build_subels(ElSubEls, []),
build_subels(T, lists:append(Accum, [#xmlelement{name=ElName, attrs=ElAttrs, sub_el=SubEls}]));
build_subels([], Accum) ->
Accum.
make_resource_list(Resources) ->
lists:map(fun(R) -> {xmlelement, "res", [], [{xmlcdata, R}]} end, Resources).
| null | https://raw.githubusercontent.com/engineyard/vertebra-erl/cf6e7c84f6dfbf2e31f19c47e9db112ae292ec27/lib/vertebra/src/vertebra_protocol.erl | erlang |
terms of the GNU Lesser General Public License as published by the Free
later version.
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
details.
op({OpName, SubEls}) -> #xmlelement
OpName -> string
SubEls -> [SubEl]
| Copyright 2008 , Engine Yard , Inc.
This file is part of Vertebra .
Vertebra is free software : you can redistribute it and/or modify it under the
Software Foundation , either version 3 of the License , or ( at your option ) any
Vertebra is distributed in the hope that it will be useful , but WITHOUT ANY
You should have received a copy of the GNU Lesser General Public License
along with Vertebra . If not , see < / > .
-module(vertebra_protocol).
-export([op/1, nack/1, ack/1, final/1, data/2, error/2, error/3, finalize/3]).
-export([make_resource_list/1]).
-include("xml.hrl").
finalize(Op, OpType, Id) when OpType == "get" orelse
OpType == "set" orelse
OpType == "error" orelse
OpType == "result" ->
Attrs = [{"type", OpType},
{"xml:lang", "en"},
{"id", Id}],
#xmlelement{name="iq", attrs=Attrs, sub_el=[Op]}.
error(Reason, Token) when is_list(Reason) ->
error("error", Reason, Token).
error(Type, Reason, Token) when is_list(Reason) ->
{ok, ReasonEl} = xml_util:convert(to, {string, [{"name", "reason"}], list_to_binary(Reason)}),
#xmlelement{name="error", attrs=[{"token", Token},
{"xmlns", ?AGENT_NS},
{"type", Type}], sub_el=[ReasonEl]}.
ack(Token) ->
#xmlelement{name="ack", attrs=[{"token",Token},
{"xmlns", ?AGENT_NS}], sub_el=[]}.
nack(Token) ->
#xmlelement{name="nack", attrs=[{"token",Token},
{"xmlns", ?AGENT_NS}], sub_el=[]}.
final(Token) ->
#xmlelement{name="final", attrs=[{"token",Token},
{"xmlns", ?AGENT_NS}], sub_el=[]}.
data({xmlelement, "list", _, _} = Results, Token) ->
Attrs = [{"token", Token}, {"xmlns", ?AGENT_NS}],
{xmlelement, "data", Attrs, [Results]};
data(Results, Token) when is_list(Results) ->
Attrs = [{"token", Token}, {"xmlns", ?AGENT_NS}],
{xmlelement, "data", Attrs, build_subels(Results, [])};
data(Results, Token) ->
data([Results], Token).
SubEl - > { Name , , [ SubEl ] | [ ] } | { text , Text }
op({OpName, Token, SubEls}) ->
Attrs = [{"type", OpName}, {"token", Token}, {"xmlns", ?AGENT_NS}],
#xmlelement{name="op", attrs=Attrs, sub_el=build_subels(SubEls, [])}.
build_subels([{text, Text}|T], Accum) ->
build_subels(T, lists:append(Accum, [#xmlcdata{data=Text}]));
build_subels([{xmlelement, _, _, _}=H|T], Accum) ->
build_subels(T, lists:append(Accum, [H]));
build_subels([{ElName, ElAttrs, ElSubEls}|T], Accum) ->
SubEls = build_subels(ElSubEls, []),
build_subels(T, lists:append(Accum, [#xmlelement{name=ElName, attrs=ElAttrs, sub_el=SubEls}]));
build_subels([], Accum) ->
Accum.
make_resource_list(Resources) ->
lists:map(fun(R) -> {xmlelement, "res", [], [{xmlcdata, R}]} end, Resources).
|
d94d8de1a7a117ccfaef3f1ed74ed56877541ca3640becc862500a72e62642a0 | PacktPublishing/Haskell-High-Performance-Programming | reactive-banana-fib.hs | -- file: reactive-banana-fib.hs
# LANGUAGE RecursiveDo #
import Reactive.Banana
fib :: Event () -> Moment (Behavior Int)
fib step = mdo
fib1 <- stepper 1 (fib2 <@ step)
fib2 <- accumB 1 ((+) <$> fib1 <@ step)
return fib1
| null | https://raw.githubusercontent.com/PacktPublishing/Haskell-High-Performance-Programming/2b1bfdb8102129be41e8d79c7e9caf12100c5556/Chapter13/reactive-banana-fib.hs | haskell | file: reactive-banana-fib.hs | # LANGUAGE RecursiveDo #
import Reactive.Banana
fib :: Event () -> Moment (Behavior Int)
fib step = mdo
fib1 <- stepper 1 (fib2 <@ step)
fib2 <- accumB 1 ((+) <$> fib1 <@ step)
return fib1
|
ebc095ec56d0a98b44aa016e0516e866647ffeb1e3a84fb5d55f34496ec438a2 | emqx/ecql | ecql_tests.erl | Copyright ( c ) 2016 eMQTT.IO , All Rights Reserved .
%%%
%%% Permission is hereby granted, free of charge, to any person obtaining a copy
%%% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
%%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
%%% furnished to do so, subject to the following conditions:
%%%
%%% The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
%%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
%%% SOFTWARE.
%%%
@author < >
%%%
-module(ecql_tests).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-define(OPTIONS, [{nodes, [{"127.0.0.1", 9042}]},
{keyspace, "test"},
{username, "cassandra"},
{password, "cassandra"}]).
ecql_test_() ->
{foreach, fun setup/0, fun cleanup/1, [fun tests/1]}.
setup() ->
{ok, C} = ecql:connect(?OPTIONS), C.
tests(C) ->
[?_test(t_use_keyspace(C)),
?_test(t_select(C)),
?_test(t_update(C)),
?_test(t_prepare(C)),
?_test(t_named_prepare(C))].
cleanup(C) ->
ecql:close(C).
t_use_keyspace(C) ->
{ok, <<"test">>} = ecql:query(C, "use test").
t_select(C) ->
{ok, {<<"test.tab">>, _Columns, _Rows}} = ecql:query(C, "select * from test.tab"),
{ok, Result} = ecql:query(C, "select * from test.tab where first_id = ? and second_id = ?", [{bigint, 1}, 'secid']),
?debugFmt("Result: ~p~n", [Result]).
t_update(C) ->
ok = ecql:query(C, <<"update test.tab set col_map['keyx'] = 'valuex' where first_id = 1 and second_id = 'secid'">>),
{ok, Ref} = ecql:async_query(C, "select col_text from test.tab"),
receive
{async_cql_reply, Ref, {ok, {<<"test.tab">>, [{<<"col_text">>, varchar}], Rows}}} ->
?debugFmt("AsyncQuery Rows: ~p~n", [Rows]);
{async_cql_reply, Ref, Error} ->
throw(Error)
after
1000 -> error(timeout)
end.
t_prepare(C) ->
{ok, Id} = ecql:prepare(C, "select * from test.tab where first_id = ? and second_id = ?"),
{ok, {TableSpec, Columns, Rows}} = ecql:execute(C, Id, [{bigint, 1}, 'secid']).
t_named_prepare(C) ->
{ok, _Id} = ecql:prepare(C, select_one, "select * from test.tab where first_id = ? limit 1"),
{ok, {TableSpec, Columns, Rows}} = ecql:execute(C, select_one, [{bigint, 1}]).
-endif.
| null | https://raw.githubusercontent.com/emqx/ecql/f29d05fb8bdf1167877ec07ef7f0950f6feeab95/test/ecql_tests.erl | erlang |
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| Copyright ( c ) 2016 eMQTT.IO , All Rights Reserved .
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
@author < >
-module(ecql_tests).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-define(OPTIONS, [{nodes, [{"127.0.0.1", 9042}]},
{keyspace, "test"},
{username, "cassandra"},
{password, "cassandra"}]).
ecql_test_() ->
{foreach, fun setup/0, fun cleanup/1, [fun tests/1]}.
setup() ->
{ok, C} = ecql:connect(?OPTIONS), C.
tests(C) ->
[?_test(t_use_keyspace(C)),
?_test(t_select(C)),
?_test(t_update(C)),
?_test(t_prepare(C)),
?_test(t_named_prepare(C))].
cleanup(C) ->
ecql:close(C).
t_use_keyspace(C) ->
{ok, <<"test">>} = ecql:query(C, "use test").
t_select(C) ->
{ok, {<<"test.tab">>, _Columns, _Rows}} = ecql:query(C, "select * from test.tab"),
{ok, Result} = ecql:query(C, "select * from test.tab where first_id = ? and second_id = ?", [{bigint, 1}, 'secid']),
?debugFmt("Result: ~p~n", [Result]).
t_update(C) ->
ok = ecql:query(C, <<"update test.tab set col_map['keyx'] = 'valuex' where first_id = 1 and second_id = 'secid'">>),
{ok, Ref} = ecql:async_query(C, "select col_text from test.tab"),
receive
{async_cql_reply, Ref, {ok, {<<"test.tab">>, [{<<"col_text">>, varchar}], Rows}}} ->
?debugFmt("AsyncQuery Rows: ~p~n", [Rows]);
{async_cql_reply, Ref, Error} ->
throw(Error)
after
1000 -> error(timeout)
end.
t_prepare(C) ->
{ok, Id} = ecql:prepare(C, "select * from test.tab where first_id = ? and second_id = ?"),
{ok, {TableSpec, Columns, Rows}} = ecql:execute(C, Id, [{bigint, 1}, 'secid']).
t_named_prepare(C) ->
{ok, _Id} = ecql:prepare(C, select_one, "select * from test.tab where first_id = ? limit 1"),
{ok, {TableSpec, Columns, Rows}} = ecql:execute(C, select_one, [{bigint, 1}]).
-endif.
|
8adf0a6488b267b0c5cd0063a2e129b3cdef95421f5e88a1d1aef59b976bcafd | matterandvoid-space/subscriptions | datalevin_eql_test.clj | (ns space.matterandvoid.subscriptions.datalevin-eql-test
(:require
[clojure.test :refer [deftest testing is use-fixtures]]
[clojure.string :as str]
[datalevin.core :as d]
[space.matterandvoid.subscriptions.core :refer [<sub]]
[space.matterandvoid.subscriptions.datalevin-eql :as sut]
[space.matterandvoid.subscriptions.impl.reagent-ratom :as r]
[taoensso.timbre :as log]))
(log/set-level! :error)
(set! *print-namespace-maps* false)
(def schema
{:user/id {:db/valueType :db.type/keyword :db/unique :db.unique/identity}
:user/friends {:db/valueType :db.type/ref :db/cardinality :db.cardinality/many}
:user/name {:db/valueType :db.type/string :db/unique :db.unique/identity}
:bot/id {:db/valueType :db.type/keyword :db/unique :db.unique/identity}
:bot/name {:db/valueType :db.type/string :db/unique :db.unique/identity}
:comment/id {:db/valueType :db.type/keyword :db/unique :db.unique/identity}
:comment/text {:db/valueType :db.type/string}
:comment/sub-comments {:db/valueType :db.type/ref :db/cardinality :db.cardinality/many}
:list/id {:db/valueType :db.type/keyword :db/unique :db.unique/identity}
:list/name {:db/valueType :db.type/string :db/unique :db.unique/identity}
:list/members {:db/valueType :db.type/ref :db/cardinality :db.cardinality/many}
:list/items {:db/valueType :db.type/ref :db/cardinality :db.cardinality/many}
:human/id {:db/valueType :db.type/keyword :db/unique :db.unique/identity}
:human/name {:db/valueType :db.type/string :db/unique :db.unique/identity}
:human/best-friend {:db/valueType :db.type/ref :db/cardinality :db.cardinality/one}
:todo/id {:db/valueType :db.type/keyword :db/unique :db.unique/identity}
:todo/text {:db/valueType :db.type/string :db/unique :db.unique/identity}
:todo/author {:db/valueType :db.type/ref :db/cardinality :db.cardinality/one}
:todo/comment {:db/valueType :db.type/ref :db/cardinality :db.cardinality/one}
:todo/comments {:db/valueType :db.type/ref :db/cardinality :db.cardinality/many}})
(def conn (d/get-conn (str "/tmp/datalevin/" (random-uuid)) schema))
(def user-comp (sut/nc {:query [:user/id :user/name {:user/friends '...}] :name ::user :ident :user/id}))
(def bot-comp (sut/nc {:query [:bot/id :bot/name] :name ::bot :ident :bot/id}))
(def human-comp (sut/nc {:query [:human/id :human/name {:human/best-friend 1}] :name ::human :ident :human/id}))
(def author-comp (sut/nc {:query {:bot/id (sut/get-query bot-comp)
:user/id (sut/get-query user-comp)}
:name ::author}))
(def comment-comp (sut/nc {:query [:comment/id :comment/text {:comment/sub-comments '...}] :name ::comment :ident :comment/id}))
(def todo-comp (sut/nc {:query [:todo/id :todo/text {:todo/comment (sut/get-query comment-comp)}
{:todo/comments (sut/get-query comment-comp)}
{:todo/author (sut/get-query author-comp)}] :name ::todo :ident :todo/id}))
(def todo-q (sut/get-query todo-comp))
(def list-member-comp (sut/nc {:query {:comment/id (sut/get-query comment-comp) :todo/id todo-q} :name ::list-member}))
(def list-member-q (sut/get-query list-member-comp))
(def list-comp (sut/nc {:ident :list/id :name ::list
:query [:list/id :list/name
{:list/items (sut/get-query list-member-comp)}
{:list/members (sut/get-query list-member-comp)}]}))
;(reset! subs.impl/handler-registry_ {})
;(subs/clear-subscription-cache! nil)
(run! sut/register-component-subs! [user-comp bot-comp comment-comp todo-comp list-comp human-comp])
(d/transact! conn
[{:comment/id :comment-1 :comment/text "FIRST COMMENT" :comment/sub-comments ["comment-2"]}
{:db/id "comment-2" :comment/id :comment-2 :comment/text "SECOND COMMENT"}
{:comment/id :comment-3 :comment/text "THIRD COMMENT"}
;; to-one cycle
{:db/id "human-1" :human/id :human-1 :human/name "human Y" :human/best-friend "human-1"}
{:human/id :human-2 :human/name "human X" :human/best-friend "human-3"}
{:db/id "human-3" :human/id :human-3 :human/name "human Z" :human/best-friend [:human/id :human-1]}
;; to-many cycle
{:user/id :user-7 :user/name "user 7"}
{:user/id :user-6 :user/name "user 6" :user/friends [[:user/id :user-7]]}
{:user/id :user-5 :user/name "user 5" :user/friends [[:user/id :user-6] [:user/id :user-7]]}
{:db/id -2 :user/id :user-2 :user/name "user 2" :user/friends [-2 -1 -3 [:user/id :user-5]]}
{:db/id -1 :user/id :user-1 :user/name "user 1" :user/friends [-2]}
{:db/id -4 :user/id :user-4 :user/name "user 4" :user/friends [-3 [:user/id :user-4]]}
{:db/id -3 :user/id :user-3 :user/name "user 3" :user/friends [[:user/id :user-2] [:user/id :user-4]]}
{:todo/id :todo-2 :todo/text "todo 2" :todo/author [:user/id :user-2]}
{:list/id :list-1 :list/name "first list"
:list/members [[:comment/id :comment-1] [:todo/id :todo-2]]
:list/items [[:todo/id :todo-2] [:comment/id :comment-1]]}
{:bot/id :bot-1 :bot/name "bot 1"}
;; union queries
{:todo/id :todo-1 :todo/text "todo 1" :todo/author [:bot/id :bot-1] :todo/comment [:comment/id :comment-1]}
{:todo/id :todo-3 :todo/text "todo 3" :todo/comments [[:comment/id :comment-1] [:comment/id :comment-3]]}
{:user/id :user-9 :user/name "user 9" :user/friends ["user-10"]}
{:db/id "user-10" :user/id :user-10 :user/name "user 10" :user/friends [[:user/id :user-10] [:user/id :user-9] "user-11"]}
{:db/id "user-11" :user/id :user-11 :user/name "user 11" :user/friends [[:user/id :user-10] "user-12"]}
{:db/id "user-12" :user/id :user-12 :user/name "user 12" :user/friends [[:user/id :user-11] [:user/id :user-12]]}])
(defonce db_ (r/atom (d/db conn)))
(defn ent [ref] (d/entity @db_ (d/entid @db_ ref)))
(deftest union-queries-test
(testing "to-one union queries"
(is (= {:todo/id :todo-1, :todo/author {:bot/name "bot 1", :bot/id :bot-1}}
(<sub db_ [::todo {:todo/id :todo-1 sut/query-key [:todo/id :todo/author]}])))
(is (=
{:todo/id :todo-2, :todo/author {:user/friends (set [(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])]), :user/name "user 2", :user/id :user-2}}
(update-in (<sub db_ [::todo {:todo/id :todo-2 sut/query-key [:todo/id :todo/author]}]) [:todo/author :user/friends] set)))
(is (= {:todo/id :todo-2, :todo/author {:user/name "user 2"}}
(<sub db_ [::todo {:todo/id :todo-2 sut/query-key [:todo/id {:todo/author {:user/id [:user/name]
:does-not/exist [:a :b :c]}}]}])))
(testing "support for * query"
(is (= #:todo{:id :todo-1, :author {:bot/id :bot-1 :bot/name "bot 1"}} (<sub db_ [::todo {:todo/id :todo-1 sut/query-key [:todo/id {:todo/author ['*]}]}])))
(is (=
{:todo/id :todo-2,
:todo/author {:user/friends (set [(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])]), :user/name "user 2", :user/id :user-2}}
(update-in (<sub db_ [::todo {:todo/id :todo-2 sut/query-key [:todo/id {:todo/author '[*]}]}]) [:todo/author :user/friends] set)))))
(testing "to-many union queries"
(is {:list/items [{:comment/id :comment-1, :comment/text "FIRST COMMENT",
:comment/sub-comments [{:comment/id :comment-2, :comment/text "SECOND COMMENT"}]}
{:todo/id :todo-2, :todo/text "todo 2",
:todo/author {:user/id :user-2, :user/name "user 2",
:user/friends [{:user/id :user-2, :user/name "user 2",
:user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}
{:user/id :user-3, :user/name "user 3",
:user/friends [{:user/id :user-2, :user/name "user 2",
:user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}
{:user/id :user-4, :user/name "user 4",
:user/friends [{:user/id :user-4, :user/name "user 4", :user/friends #{(ent [:user/id :user-4]) (ent [:user/id :user-3])}}
{:user/id :user-3, :user/name "user 3", :user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-4])}}]}]}
{:user/id :user-1, :user/name "user 1",
:user/friends [{:user/id :user-2, :user/name "user 2",
:user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}]}
{:user/id :user-5, :user/name "user 5",
:user/friends [{:user/id :user-7, :user/name "user 7"}
{:user/id :user-6, :user/name "user 6", :user/friends [{:user/id :user-7, :user/name "user 7"}]}]}]}}],
:list/members [{:comment/id :comment-1, :comment/text "FIRST COMMENT"} {:todo/id :todo-2, :todo/text "todo 2"}]}
(<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items list-member-q}
{:list/members {:comment/id [:comment/id :comment/text] :todo/id [:todo/id :todo/text]}}]}]))
(testing "unions should only return queried-for branches"
(is (= {:list/items []} (<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items {:todo2/id [:todo/id :todo/text]}}]}])))
(is (= {:list/items [{:comment/sub-comments #{(ent [:comment/id :comment-2])}, :comment/id :comment-1, :comment/text "FIRST COMMENT"}]} (<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items {:comment/id ['*]}}]}])))
(is (= {:list/items [{:comment/id :comment-1, :comment/text "FIRST COMMENT"}]} (<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items {:comment/id [:comment/id :comment/text]}}]}])))
(is (= {:list/items [{:todo/id :todo-2, :todo/text "todo 2"}]}
(<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items {:todo/id [:todo/id :todo/text]}}]}]))))))
(deftest walking-test
(testing "hashmap expansion"
(let [out (<sub db_ [::user {`get-friends (fn [e]
(let [friends (map (fn [f-id] (ent (:db/id f-id))) (:user/friends e))]
;; stop keeps the entity but does not recur on it, vs removing it completely from the
;; result set.
{:stop (mapv (fn [{:user/keys [id]}] [:user/id id]) (filter (fn [{:user/keys [name]}] (= name "user 3")) friends))
:expand (mapv (fn [{:user/keys [id]}] [:user/id id]) (remove (fn [{:user/keys [name]}] (= name "user 3")) friends))}))
:user/id :user-1 sut/query-key [:user/name :user/id
{(list :user/friends {sut/walk-fn-key `get-friends}) '...}]}])]
(is (= {:user/name "user 1",
:user/id :user-1,
:user/friends [{:user/name "user 2",
:user/id :user-2,
:user/friends [{:user/name "user 1", :user/id :user-1, :user/friends #{(ent [:user/id :user-2])}}
{:user/name "user 5", :user/id :user-5,
:user/friends [{:user/name "user 7", :user/id :user-7} {:user/name "user 6", :user/id :user-6, :user/friends [{:user/name "user 7", :user/id :user-7}]}]}
{:user/name "user 2", :user/id :user-2,
:user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}
{:user/id :user-3, :user/name "user 3", :user/friends [(ent [:user/id :user-4]) (ent [:user/id :user-2])]}]}]} out))))
(testing "collection expansion"
(let [out1 (<sub db_ [::user {`get-friends (fn [e]
(let [friends (map (fn [f-id] (ent (:db/id f-id))) (:user/friends e))]
(->> friends
(filter (fn [{:user/keys [name]}] (or (= name "user 3") (= name "user 2") (= name "user 1"))))
(mapv (fn [{:user/keys [id]}] [:user/id id])))))
:user/id :user-1 sut/query-key [:user/name :user/id
{(list :user/friends {sut/walk-fn-key `get-friends}) '...}]}])
out2 (<sub db_ [::user {`get-friends (fn [e]
(let [friends (map (fn [f-id] (ent (:db/id f-id))) (:user/friends e))]
(->> friends
(filter (fn [{:user/keys [name]}] (or (= name "user 2") (= name "user 1"))))
(mapv (fn [{:user/keys [id]}] [:user/id id])))))
:user/id :user-1 sut/query-key [:user/name :user/id
{(list :user/friends {sut/walk-fn-key `get-friends}) '...}]}])]
(is (= {:user/name "user 1", :user/id :user-1,
:user/friends [{:user/name "user 2", :user/id :user-2,
:user/friends [{:user/name "user 1", :user/id :user-1, :user/friends #{(ent [:user/id :user-2])}}
{:user/name "user 2", :user/id :user-2, :user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}]}]}
out2))
(is (= {:user/name "user 1", :user/id :user-1,
:user/friends [{:user/name "user 2", :user/id :user-2,
:user/friends [{:user/name "user 3", :user/id :user-3,
:user/friends [{:user/name "user 2", :user/id :user-2,
:user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}]}
{:user/name "user 1", :user/id :user-1, :user/friends #{(ent [:user/id :user-2])}}
{:user/name "user 2", :user/id :user-2,
:user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}]}]}
out1))))
(testing "truthy/falsey expansion"
(let [out (<sub db_ [::user {`keep-walking? (fn [e] (#{"user 1" "user 2"} (:user/name e))) :user/id :user-1
sut/query-key [:user/name :user/id {(list :user/friends {sut/walk-fn-key `keep-walking?}) '...}]}])]
(comment (d/touch (d/entity @db_ 10)))
(is (= {:user/name "user 1",
:user/id :user-1,
:user/friends [{:user/name "user 2", :user/id :user-2,
:user/friends [{:user/name "user 3", :user/id :user-3, :user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-4])}}
{:user/name "user 1", :user/id :user-1, :user/friends #{(ent [:user/id :user-2])}}
{:user/name "user 5", :user/id :user-5, :user/friends #{(ent [:user/id :user-6]) (ent [:user/id :user-7])}}
{:user/name "user 2", :user/id :user-2, :user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}]}]}
out)))))
(deftest xform-test
(testing "transform an attribute"
(is
{:user/name "USER 1", :user/id :user-1}
(<sub db_ [::user {'upper-case-name (fn [e] (update e :user/name str/upper-case))
'uppercase str/upper-case
:user/id :user-1
sut/query-key [(list :user/name {sut/xform-fn-key 'uppercase}) :user/id]}]))))
(comment
(<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items list-member-q}
{:list/members {:comment/id [:comment/id :comment/text] :todo/id [:todo/id :todo/text]}}]}])
(<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items list-member-q}
{:list/members {:comment/id [:comment/id :comment/text] :todo/id [:todo/id :todo/text]}}]}])
(d/touch (d/entity db 1)) ; user-2
(d/touch (d/entity db 2)) ; user-2
(d/touch (d/entity db 3)) ; user-3
(d/touch (d/entity db 11)) ; user-7
(d/touch (d/entity db 13)) ; user-5
(d/touch (d/entity db 13))
(d/touch (d/entity db {:db/id 2}))
(<sub db_ [::user {:user/id :user-1 sut/query-key [:user/name :user/id {:user/friends 0}]}])
(<sub db_ [::user {:user/id :user-1 sut/query-key [:user/name :user/id {:user/friends 1}]}])
(<sub db_ [::user {:user/id :user-1 sut/query-key [:user/name :user/id {:user/friends 4}]}])
(<sub db_ [::user {:user/id :user-1 sut/query-key [:user/name :user/id {:user/friends 2}]}])
(d/touch (d/entity db [:user/id :user-12]))
(into {} (d/entity db [:user/id :user-12]))
(<sub db_ [::user {'keep-walking? (fn [e]
(println "IN KEEP walking? " e)
(#{"user 1" "user 2"} (:user/name e))
(= " user 1 " (: user / name e ) )
)
:user/id :user-1
sut/query-key [:user/name :user/id {(list :user/friends {sut/walk-fn-key 'keep-walking?}) '...}]}])
(<sub db_ [::user {'upper-case-name (fn [e] (println "IN xform fn " e) (update e :user/name str/upper-case))
:user/id :user-1
sut/query-key [:user/name :user/id {(list :user/friends {sut/xform-fn-key 'upper-case-name}) 4}]}])
(<sub db_ [::user {'upper-case-name (fn [e] (println "IN xform fn " e) (update e :user/name str/upper-case))
'keep-walking? (fn [e] (println "IN KEEP walking? " e) (#{"user 1" "user 2"} (:user/name e)))
:user/id :user-1
sut/query-key [:user/name :user/id {(list :user/friends {sut/xform-fn-key 'upper-case-name
sut/walk-fn-key 'keep-walking?}) '...}]}])
)
| null | https://raw.githubusercontent.com/matterandvoid-space/subscriptions/7cfcc05fd238b6a08040bf46c8b4d0ed6688a90a/src/test/space/matterandvoid/subscriptions/datalevin_eql_test.clj | clojure | (reset! subs.impl/handler-registry_ {})
(subs/clear-subscription-cache! nil)
to-one cycle
to-many cycle
union queries
stop keeps the entity but does not recur on it, vs removing it completely from the
result set.
user-2
user-2
user-3
user-7
user-5 | (ns space.matterandvoid.subscriptions.datalevin-eql-test
(:require
[clojure.test :refer [deftest testing is use-fixtures]]
[clojure.string :as str]
[datalevin.core :as d]
[space.matterandvoid.subscriptions.core :refer [<sub]]
[space.matterandvoid.subscriptions.datalevin-eql :as sut]
[space.matterandvoid.subscriptions.impl.reagent-ratom :as r]
[taoensso.timbre :as log]))
(log/set-level! :error)
(set! *print-namespace-maps* false)
(def schema
{:user/id {:db/valueType :db.type/keyword :db/unique :db.unique/identity}
:user/friends {:db/valueType :db.type/ref :db/cardinality :db.cardinality/many}
:user/name {:db/valueType :db.type/string :db/unique :db.unique/identity}
:bot/id {:db/valueType :db.type/keyword :db/unique :db.unique/identity}
:bot/name {:db/valueType :db.type/string :db/unique :db.unique/identity}
:comment/id {:db/valueType :db.type/keyword :db/unique :db.unique/identity}
:comment/text {:db/valueType :db.type/string}
:comment/sub-comments {:db/valueType :db.type/ref :db/cardinality :db.cardinality/many}
:list/id {:db/valueType :db.type/keyword :db/unique :db.unique/identity}
:list/name {:db/valueType :db.type/string :db/unique :db.unique/identity}
:list/members {:db/valueType :db.type/ref :db/cardinality :db.cardinality/many}
:list/items {:db/valueType :db.type/ref :db/cardinality :db.cardinality/many}
:human/id {:db/valueType :db.type/keyword :db/unique :db.unique/identity}
:human/name {:db/valueType :db.type/string :db/unique :db.unique/identity}
:human/best-friend {:db/valueType :db.type/ref :db/cardinality :db.cardinality/one}
:todo/id {:db/valueType :db.type/keyword :db/unique :db.unique/identity}
:todo/text {:db/valueType :db.type/string :db/unique :db.unique/identity}
:todo/author {:db/valueType :db.type/ref :db/cardinality :db.cardinality/one}
:todo/comment {:db/valueType :db.type/ref :db/cardinality :db.cardinality/one}
:todo/comments {:db/valueType :db.type/ref :db/cardinality :db.cardinality/many}})
(def conn (d/get-conn (str "/tmp/datalevin/" (random-uuid)) schema))
(def user-comp (sut/nc {:query [:user/id :user/name {:user/friends '...}] :name ::user :ident :user/id}))
(def bot-comp (sut/nc {:query [:bot/id :bot/name] :name ::bot :ident :bot/id}))
(def human-comp (sut/nc {:query [:human/id :human/name {:human/best-friend 1}] :name ::human :ident :human/id}))
(def author-comp (sut/nc {:query {:bot/id (sut/get-query bot-comp)
:user/id (sut/get-query user-comp)}
:name ::author}))
(def comment-comp (sut/nc {:query [:comment/id :comment/text {:comment/sub-comments '...}] :name ::comment :ident :comment/id}))
(def todo-comp (sut/nc {:query [:todo/id :todo/text {:todo/comment (sut/get-query comment-comp)}
{:todo/comments (sut/get-query comment-comp)}
{:todo/author (sut/get-query author-comp)}] :name ::todo :ident :todo/id}))
(def todo-q (sut/get-query todo-comp))
(def list-member-comp (sut/nc {:query {:comment/id (sut/get-query comment-comp) :todo/id todo-q} :name ::list-member}))
(def list-member-q (sut/get-query list-member-comp))
(def list-comp (sut/nc {:ident :list/id :name ::list
:query [:list/id :list/name
{:list/items (sut/get-query list-member-comp)}
{:list/members (sut/get-query list-member-comp)}]}))
(run! sut/register-component-subs! [user-comp bot-comp comment-comp todo-comp list-comp human-comp])
(d/transact! conn
[{:comment/id :comment-1 :comment/text "FIRST COMMENT" :comment/sub-comments ["comment-2"]}
{:db/id "comment-2" :comment/id :comment-2 :comment/text "SECOND COMMENT"}
{:comment/id :comment-3 :comment/text "THIRD COMMENT"}
{:db/id "human-1" :human/id :human-1 :human/name "human Y" :human/best-friend "human-1"}
{:human/id :human-2 :human/name "human X" :human/best-friend "human-3"}
{:db/id "human-3" :human/id :human-3 :human/name "human Z" :human/best-friend [:human/id :human-1]}
{:user/id :user-7 :user/name "user 7"}
{:user/id :user-6 :user/name "user 6" :user/friends [[:user/id :user-7]]}
{:user/id :user-5 :user/name "user 5" :user/friends [[:user/id :user-6] [:user/id :user-7]]}
{:db/id -2 :user/id :user-2 :user/name "user 2" :user/friends [-2 -1 -3 [:user/id :user-5]]}
{:db/id -1 :user/id :user-1 :user/name "user 1" :user/friends [-2]}
{:db/id -4 :user/id :user-4 :user/name "user 4" :user/friends [-3 [:user/id :user-4]]}
{:db/id -3 :user/id :user-3 :user/name "user 3" :user/friends [[:user/id :user-2] [:user/id :user-4]]}
{:todo/id :todo-2 :todo/text "todo 2" :todo/author [:user/id :user-2]}
{:list/id :list-1 :list/name "first list"
:list/members [[:comment/id :comment-1] [:todo/id :todo-2]]
:list/items [[:todo/id :todo-2] [:comment/id :comment-1]]}
{:bot/id :bot-1 :bot/name "bot 1"}
{:todo/id :todo-1 :todo/text "todo 1" :todo/author [:bot/id :bot-1] :todo/comment [:comment/id :comment-1]}
{:todo/id :todo-3 :todo/text "todo 3" :todo/comments [[:comment/id :comment-1] [:comment/id :comment-3]]}
{:user/id :user-9 :user/name "user 9" :user/friends ["user-10"]}
{:db/id "user-10" :user/id :user-10 :user/name "user 10" :user/friends [[:user/id :user-10] [:user/id :user-9] "user-11"]}
{:db/id "user-11" :user/id :user-11 :user/name "user 11" :user/friends [[:user/id :user-10] "user-12"]}
{:db/id "user-12" :user/id :user-12 :user/name "user 12" :user/friends [[:user/id :user-11] [:user/id :user-12]]}])
(defonce db_ (r/atom (d/db conn)))
(defn ent [ref] (d/entity @db_ (d/entid @db_ ref)))
(deftest union-queries-test
(testing "to-one union queries"
(is (= {:todo/id :todo-1, :todo/author {:bot/name "bot 1", :bot/id :bot-1}}
(<sub db_ [::todo {:todo/id :todo-1 sut/query-key [:todo/id :todo/author]}])))
(is (=
{:todo/id :todo-2, :todo/author {:user/friends (set [(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])]), :user/name "user 2", :user/id :user-2}}
(update-in (<sub db_ [::todo {:todo/id :todo-2 sut/query-key [:todo/id :todo/author]}]) [:todo/author :user/friends] set)))
(is (= {:todo/id :todo-2, :todo/author {:user/name "user 2"}}
(<sub db_ [::todo {:todo/id :todo-2 sut/query-key [:todo/id {:todo/author {:user/id [:user/name]
:does-not/exist [:a :b :c]}}]}])))
(testing "support for * query"
(is (= #:todo{:id :todo-1, :author {:bot/id :bot-1 :bot/name "bot 1"}} (<sub db_ [::todo {:todo/id :todo-1 sut/query-key [:todo/id {:todo/author ['*]}]}])))
(is (=
{:todo/id :todo-2,
:todo/author {:user/friends (set [(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])]), :user/name "user 2", :user/id :user-2}}
(update-in (<sub db_ [::todo {:todo/id :todo-2 sut/query-key [:todo/id {:todo/author '[*]}]}]) [:todo/author :user/friends] set)))))
(testing "to-many union queries"
(is {:list/items [{:comment/id :comment-1, :comment/text "FIRST COMMENT",
:comment/sub-comments [{:comment/id :comment-2, :comment/text "SECOND COMMENT"}]}
{:todo/id :todo-2, :todo/text "todo 2",
:todo/author {:user/id :user-2, :user/name "user 2",
:user/friends [{:user/id :user-2, :user/name "user 2",
:user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}
{:user/id :user-3, :user/name "user 3",
:user/friends [{:user/id :user-2, :user/name "user 2",
:user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}
{:user/id :user-4, :user/name "user 4",
:user/friends [{:user/id :user-4, :user/name "user 4", :user/friends #{(ent [:user/id :user-4]) (ent [:user/id :user-3])}}
{:user/id :user-3, :user/name "user 3", :user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-4])}}]}]}
{:user/id :user-1, :user/name "user 1",
:user/friends [{:user/id :user-2, :user/name "user 2",
:user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}]}
{:user/id :user-5, :user/name "user 5",
:user/friends [{:user/id :user-7, :user/name "user 7"}
{:user/id :user-6, :user/name "user 6", :user/friends [{:user/id :user-7, :user/name "user 7"}]}]}]}}],
:list/members [{:comment/id :comment-1, :comment/text "FIRST COMMENT"} {:todo/id :todo-2, :todo/text "todo 2"}]}
(<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items list-member-q}
{:list/members {:comment/id [:comment/id :comment/text] :todo/id [:todo/id :todo/text]}}]}]))
(testing "unions should only return queried-for branches"
(is (= {:list/items []} (<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items {:todo2/id [:todo/id :todo/text]}}]}])))
(is (= {:list/items [{:comment/sub-comments #{(ent [:comment/id :comment-2])}, :comment/id :comment-1, :comment/text "FIRST COMMENT"}]} (<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items {:comment/id ['*]}}]}])))
(is (= {:list/items [{:comment/id :comment-1, :comment/text "FIRST COMMENT"}]} (<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items {:comment/id [:comment/id :comment/text]}}]}])))
(is (= {:list/items [{:todo/id :todo-2, :todo/text "todo 2"}]}
(<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items {:todo/id [:todo/id :todo/text]}}]}]))))))
(deftest walking-test
(testing "hashmap expansion"
(let [out (<sub db_ [::user {`get-friends (fn [e]
(let [friends (map (fn [f-id] (ent (:db/id f-id))) (:user/friends e))]
{:stop (mapv (fn [{:user/keys [id]}] [:user/id id]) (filter (fn [{:user/keys [name]}] (= name "user 3")) friends))
:expand (mapv (fn [{:user/keys [id]}] [:user/id id]) (remove (fn [{:user/keys [name]}] (= name "user 3")) friends))}))
:user/id :user-1 sut/query-key [:user/name :user/id
{(list :user/friends {sut/walk-fn-key `get-friends}) '...}]}])]
(is (= {:user/name "user 1",
:user/id :user-1,
:user/friends [{:user/name "user 2",
:user/id :user-2,
:user/friends [{:user/name "user 1", :user/id :user-1, :user/friends #{(ent [:user/id :user-2])}}
{:user/name "user 5", :user/id :user-5,
:user/friends [{:user/name "user 7", :user/id :user-7} {:user/name "user 6", :user/id :user-6, :user/friends [{:user/name "user 7", :user/id :user-7}]}]}
{:user/name "user 2", :user/id :user-2,
:user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}
{:user/id :user-3, :user/name "user 3", :user/friends [(ent [:user/id :user-4]) (ent [:user/id :user-2])]}]}]} out))))
(testing "collection expansion"
(let [out1 (<sub db_ [::user {`get-friends (fn [e]
(let [friends (map (fn [f-id] (ent (:db/id f-id))) (:user/friends e))]
(->> friends
(filter (fn [{:user/keys [name]}] (or (= name "user 3") (= name "user 2") (= name "user 1"))))
(mapv (fn [{:user/keys [id]}] [:user/id id])))))
:user/id :user-1 sut/query-key [:user/name :user/id
{(list :user/friends {sut/walk-fn-key `get-friends}) '...}]}])
out2 (<sub db_ [::user {`get-friends (fn [e]
(let [friends (map (fn [f-id] (ent (:db/id f-id))) (:user/friends e))]
(->> friends
(filter (fn [{:user/keys [name]}] (or (= name "user 2") (= name "user 1"))))
(mapv (fn [{:user/keys [id]}] [:user/id id])))))
:user/id :user-1 sut/query-key [:user/name :user/id
{(list :user/friends {sut/walk-fn-key `get-friends}) '...}]}])]
(is (= {:user/name "user 1", :user/id :user-1,
:user/friends [{:user/name "user 2", :user/id :user-2,
:user/friends [{:user/name "user 1", :user/id :user-1, :user/friends #{(ent [:user/id :user-2])}}
{:user/name "user 2", :user/id :user-2, :user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}]}]}
out2))
(is (= {:user/name "user 1", :user/id :user-1,
:user/friends [{:user/name "user 2", :user/id :user-2,
:user/friends [{:user/name "user 3", :user/id :user-3,
:user/friends [{:user/name "user 2", :user/id :user-2,
:user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}]}
{:user/name "user 1", :user/id :user-1, :user/friends #{(ent [:user/id :user-2])}}
{:user/name "user 2", :user/id :user-2,
:user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}]}]}
out1))))
(testing "truthy/falsey expansion"
(let [out (<sub db_ [::user {`keep-walking? (fn [e] (#{"user 1" "user 2"} (:user/name e))) :user/id :user-1
sut/query-key [:user/name :user/id {(list :user/friends {sut/walk-fn-key `keep-walking?}) '...}]}])]
(comment (d/touch (d/entity @db_ 10)))
(is (= {:user/name "user 1",
:user/id :user-1,
:user/friends [{:user/name "user 2", :user/id :user-2,
:user/friends [{:user/name "user 3", :user/id :user-3, :user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-4])}}
{:user/name "user 1", :user/id :user-1, :user/friends #{(ent [:user/id :user-2])}}
{:user/name "user 5", :user/id :user-5, :user/friends #{(ent [:user/id :user-6]) (ent [:user/id :user-7])}}
{:user/name "user 2", :user/id :user-2, :user/friends #{(ent [:user/id :user-2]) (ent [:user/id :user-3]) (ent [:user/id :user-1]) (ent [:user/id :user-5])}}]}]}
out)))))
(deftest xform-test
(testing "transform an attribute"
(is
{:user/name "USER 1", :user/id :user-1}
(<sub db_ [::user {'upper-case-name (fn [e] (update e :user/name str/upper-case))
'uppercase str/upper-case
:user/id :user-1
sut/query-key [(list :user/name {sut/xform-fn-key 'uppercase}) :user/id]}]))))
(comment
(<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items list-member-q}
{:list/members {:comment/id [:comment/id :comment/text] :todo/id [:todo/id :todo/text]}}]}])
(<sub db_ [::list {:list/id :list-1 sut/query-key [{:list/items list-member-q}
{:list/members {:comment/id [:comment/id :comment/text] :todo/id [:todo/id :todo/text]}}]}])
(d/touch (d/entity db 13))
(d/touch (d/entity db {:db/id 2}))
(<sub db_ [::user {:user/id :user-1 sut/query-key [:user/name :user/id {:user/friends 0}]}])
(<sub db_ [::user {:user/id :user-1 sut/query-key [:user/name :user/id {:user/friends 1}]}])
(<sub db_ [::user {:user/id :user-1 sut/query-key [:user/name :user/id {:user/friends 4}]}])
(<sub db_ [::user {:user/id :user-1 sut/query-key [:user/name :user/id {:user/friends 2}]}])
(d/touch (d/entity db [:user/id :user-12]))
(into {} (d/entity db [:user/id :user-12]))
(<sub db_ [::user {'keep-walking? (fn [e]
(println "IN KEEP walking? " e)
(#{"user 1" "user 2"} (:user/name e))
(= " user 1 " (: user / name e ) )
)
:user/id :user-1
sut/query-key [:user/name :user/id {(list :user/friends {sut/walk-fn-key 'keep-walking?}) '...}]}])
(<sub db_ [::user {'upper-case-name (fn [e] (println "IN xform fn " e) (update e :user/name str/upper-case))
:user/id :user-1
sut/query-key [:user/name :user/id {(list :user/friends {sut/xform-fn-key 'upper-case-name}) 4}]}])
(<sub db_ [::user {'upper-case-name (fn [e] (println "IN xform fn " e) (update e :user/name str/upper-case))
'keep-walking? (fn [e] (println "IN KEEP walking? " e) (#{"user 1" "user 2"} (:user/name e)))
:user/id :user-1
sut/query-key [:user/name :user/id {(list :user/friends {sut/xform-fn-key 'upper-case-name
sut/walk-fn-key 'keep-walking?}) '...}]}])
)
|
faf6dd8aec5bbda79d53a2b7f5c9fff8cf8c992e1ba9b39d4b2a9c81a2ec8d8c | GaloisInc/renovate | Relocation.hs | # LANGUAGE FlexibleContexts #
{-# LANGUAGE GADTs #-}
# LANGUAGE StandaloneDeriving #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
module Renovate.Core.Relocation (
ArchitectureRelocation
, Relocation(..)
) where
import Data.Kind ( Type )
import qualified Data.Macaw.CFG as MC
import qualified Renovate.Core.Address as RCA
-- | An architecture-specific relocation extension type
--
-- If this is not needed, instantiate it as 'Void'
type family ArchitectureRelocation arch :: Type
-- | Representations of relocations that must be resolved during instruction concretization
--
-- Each operand is annotated with a relocation that contains enough information
-- to be resolved during the concretization phase of rewriting. These
-- relocations are analogous to those in object files, and represent symbolic
-- addresses that must be concretized.
data Relocation arch where
-- | A reference to an absolute address by means of a PC-relative offset that
-- needs to be re-computed when the PC of the instruction changes
--
-- These are largely used for referencing existing data values in a
-- position-independent way, as data is never assigned a symbolic address (as
-- moving existing data is too dangerous in general)
PCRelativeRelocation :: RCA.ConcreteAddress arch -> Relocation arch
-- | A reference to a symbolic location that should be referenced (probably by
-- a PC-relative offset) once both the containing instruction and target have
-- been assigned addresses.
--
-- These are assigned to injected code, injected data, and code that may move
-- (e.g., jump targets)
SymbolicRelocation :: RCA.SymbolicAddress arch -> Relocation arch
-- | An architecture-specific relocation type
--
-- If this is not needed, instantiate the 'ArchitectureRelocation' type as 'Void'
ArchRelocation :: ArchitectureRelocation arch -> Relocation arch
-- | For operands that do not require relocations
NoRelocation :: Relocation arch
deriving instance (Show (ArchitectureRelocation arch), MC.MemWidth (MC.ArchAddrWidth arch)) => Show (Relocation arch)
| null | https://raw.githubusercontent.com/GaloisInc/renovate/550f64c1119f6804967e3077dcf0cb7c57ddb603/renovate/src/Renovate/Core/Relocation.hs | haskell | # LANGUAGE GADTs #
| An architecture-specific relocation extension type
If this is not needed, instantiate it as 'Void'
| Representations of relocations that must be resolved during instruction concretization
Each operand is annotated with a relocation that contains enough information
to be resolved during the concretization phase of rewriting. These
relocations are analogous to those in object files, and represent symbolic
addresses that must be concretized.
| A reference to an absolute address by means of a PC-relative offset that
needs to be re-computed when the PC of the instruction changes
These are largely used for referencing existing data values in a
position-independent way, as data is never assigned a symbolic address (as
moving existing data is too dangerous in general)
| A reference to a symbolic location that should be referenced (probably by
a PC-relative offset) once both the containing instruction and target have
been assigned addresses.
These are assigned to injected code, injected data, and code that may move
(e.g., jump targets)
| An architecture-specific relocation type
If this is not needed, instantiate the 'ArchitectureRelocation' type as 'Void'
| For operands that do not require relocations | # LANGUAGE FlexibleContexts #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
module Renovate.Core.Relocation (
ArchitectureRelocation
, Relocation(..)
) where
import Data.Kind ( Type )
import qualified Data.Macaw.CFG as MC
import qualified Renovate.Core.Address as RCA
type family ArchitectureRelocation arch :: Type
data Relocation arch where
PCRelativeRelocation :: RCA.ConcreteAddress arch -> Relocation arch
SymbolicRelocation :: RCA.SymbolicAddress arch -> Relocation arch
ArchRelocation :: ArchitectureRelocation arch -> Relocation arch
NoRelocation :: Relocation arch
deriving instance (Show (ArchitectureRelocation arch), MC.MemWidth (MC.ArchAddrWidth arch)) => Show (Relocation arch)
|
65f17319e81932d3ad68de5a57115e945f50ddae479d646bf4808d3988b7b9c2 | shimmering-void/sketches | core_test.cljs | (ns sketches.core-test
(:require
[cljs.test :refer-macros [deftest is testing]]
[sketches.core :refer [multiply]]))
(deftest multiply-test
(is (= (* 1 2) (multiply 1 2))))
(deftest multiply-test-2
(is (= (* 75 10) (multiply 10 75))))
| null | https://raw.githubusercontent.com/shimmering-void/sketches/84d29636a798720e8db3379c21814fe9f92686fd/test/sketches/core_test.cljs | clojure | (ns sketches.core-test
(:require
[cljs.test :refer-macros [deftest is testing]]
[sketches.core :refer [multiply]]))
(deftest multiply-test
(is (= (* 1 2) (multiply 1 2))))
(deftest multiply-test-2
(is (= (* 75 10) (multiply 10 75))))
|
|
2eae85ace1f8b577c25a2bf5308449c52d5afed94b691dc917a8062168d33362 | vollmerm/lalla-chess | move.lisp | ;;;; move.lisp
;;;;
;;;; This file contains the procedures that generate and handle moves. Moves are stored in numbers with bitmapping.
(in-package #:lalla)
(declaim (optimize speed))
Moves are stored in 18 - bit words .
;; They consist of a from square, a to square, and a series of tags.
;; The following functions are short convenience functions and are marked for inlining.
(defun* (move-from -> (unsigned-byte 7)) ((m (unsigned-byte 18)))
(ldb (byte 7 0) m))
(defun* (move-to -> (unsigned-byte 7)) ((m (unsigned-byte 18)))
(ldb (byte 7 7) m))
(defun* (move-tag -> (unsigned-byte 4)) ((m (unsigned-byte 18)))
(ldb (byte 4 14) m))
(defun* (move-capture-bit -> (unsigned-byte 1)) ((m (unsigned-byte 18)))
(ldb (byte 1 0) (move-tag m)))
(defun* (move-capture -> boolean) ((m (unsigned-byte 18)))
(= (move-capture-bit m) 1))
(defun* (move-promotion -> boolean) ((m (unsigned-byte 18)))
(= (ldb (byte 1 1) (move-tag m)) 1))
(defun* (move-ep-bit -> (unsigned-byte 1)) ((m (unsigned-byte 18)))
(ldb (byte 1 2) (move-tag m)))
(defun* (move-ep -> boolean) ((m (unsigned-byte 18)))
(= (move-ep-bit m) 1))
(defun* (move-castle -> boolean) ((m (unsigned-byte 18)))
(= (ldb (byte 1 3) (move-tag m)) 1))
;; Create a number that represents a move
(defun* (create-move -> (unsigned-byte 18))
((from (unsigned-byte 7)) (to (unsigned-byte 7))
(capture (unsigned-byte 1)) (promotion (unsigned-byte 1))
(ep (unsigned-byte 1)) (castle (unsigned-byte 1)))
(let ((m 0))
(declare ((unsigned-byte 18) m))
(setf (ldb (byte 7 0) m) from)
(setf (ldb (byte 7 7) m) to)
(setf (ldb (byte 1 14) m) capture)
(setf (ldb (byte 1 15) m) promotion)
(setf (ldb (byte 1 16) m) ep)
(setf (ldb (byte 1 17) m) castle)
m))
(declaim (inline move-from move-to move-tag move-capture move-capture-bit
move-promotion move-ep move-ep-bit move-castle make-move))
;; This is the maximum number of moves that could be generated.
(defconstant max-move-count 218)
;; These are some functions for converting stuff to strings
(defparameter* (file-string string) "abcdefgh") ;; use char index of string
(defun* (position-file -> standard-char) ((index (mod 128)))
(char file-string (logand index 7)))
(defparameter* (rank-string string) "12345678")
(defun* (position-rank -> standard-char) ((index (mod 128)))
(char rank-string (- 7 (ash index -4))))
;; Create a string that represents a move (useful for printing to the screen)
(defun* (move->string -> string) ((m (unsigned-byte 18)))
;; with-output-to-string is pretty cool
(with-output-to-string (stream)
(princ (position-file (move-from m)) stream)
(princ (position-rank (move-from m)) stream)
(princ (position-file (move-to m)) stream)
(princ (position-rank (move-to m)) stream)
;; all generated promotions are queen promotions
(when (move-promotion m) (princ #\q stream))))
;; Generate moves for a certain side!
;; Just a warning: you're going to need a wide screen/window to read this function.
;; It gets nested pretty deep (it might help if you listen to Inception music...
;; do people still tell that joke? "Must go deeper..." Sigh...)
(defun* (generate-moves -> (vector (unsigned-byte 18)))
((turn-color (unsigned-byte 1)))
;; Create a vector for moves, with a fill pointer. When moves are generated
;; they will be pushed onto the vector.
;; This is not ideal for performance. It would be better to store moves in
;; a simple array and manage the fill pointer manually as a separate value.
That would allow SBCL to optimize access to the structure . For now , it has
;; to do some extra work at runtime to manage access to the vector.
(let ((moves-vector
(make-array max-move-count
:element-type '(unsigned-byte 18)
:fill-pointer 0
:initial-element 0)))
Move generation involves one loop nested inside another . The outer loop
;; goes through every square on the board (which is inefficient---eventually
;; it should use a list of piece locations), and the inner loop generates
;; moves for each piece. When moves are generated they're pushed onto
;; moves-vector.
(loop
for square from 0 to 127
when (and (not (off-board square))
(not (blank-square square))
(= turn-color (piece-color (aref board square)))) do
;; Now that we've found a square, we extract information about it
;; and bind it to values in a big let (you know the drill by now).
(*let ((piece (unsigned-byte 4) (aref board square))
(color (unsigned-byte 1) (piece-color piece))
(type (unsigned-byte 3) (piece-type piece))
(sliding boolean (aref sliding-piece type))
(start (unsigned-byte 8) (aref step-offset type))
(step (signed-byte 8) (aref piece-steps start))
(iter-square (mod 128) square))
pawns have extra rules , so we check that first
(generate-pawn-special square moves-vector type)
;; iterate through each step amount.
;; there's an array of step offsets in piece.lisp, so see
;; the values there for each piece if you're curious. they're
;; pretty self-explanatory.
(loop while (/= step 0) do
;; this is the general strategy for generating moves:
;; each piece is either sliding or non-sliding, and
;; each piece has a series of "step offsets" associated
;; with it. for example, the bishop is a sliding piece
and it can go +15 , +17 , -15 , and -17 . if you go look
at the board , or remember the layout of the 0x88 board ,
;; you can see that adding these numbers to the current
bishop location will yield one - step jumps for it .
(setf iter-square square) ;; add offset
;; start inner block. this cancel be returned from in the loop
(block inner
(loop while (and (> (+ iter-square step) -1)
(< (+ iter-square step) 128)) do
;; inner loop for different step amounts
make one step
(if (or (off-board iter-square) ; jump off board
(and (not (blank-square iter-square)) ; non-blank square
(= color (square-color iter-square)))) ; hit own piece
(return-from inner) ; break
(progn ; make move
; generate base move
(unless (and (or (= type 1) ; pawns must capture on
(= type 2)) ; diagonal moves
(blank-square iter-square))
;; time to generate a move!
(vector-push
(create-move square iter-square ; to/from
(if (blank-square iter-square) 0 1) ; capture
(if (and (is-pawn square)
(or (and (= (get-rank iter-square) 7)
(= turn-color 1))
(and (= (get-rank iter-square) 0)
(= turn-color 0))))
1 0) ; promotion if we reach end of board
0 ; ep
0) ; castle
moves-vector))
(unless sliding (return-from inner)))))) ;; break if not sliding pieces
;; increment the start value to try the next offset amount
(incf start)
(setf step (aref piece-steps start)))))
;; now that the moves have been generated, they need to be sorted.
;; currently this just uses a very simple sort procedure. all captures
go first , then everything else
(sort moves-vector #'> :key #'move-capture-bit)))
;; Pawns have weird rules. This function handles moving forward.
(defun* (generate-pawn-special -> :void)
((square (mod 128)) (moves-vector (vector (unsigned-byte 18)))
(type (unsigned-byte 3)))
;; needs to be a pawn!
(when (or (and (= type 1) (= (get-rank square) 6))
(and (= type 2) (= (get-rank square) 1)))
;; come up with an increment value. either positive and negative
;; depending on whether we're moving up or down
(let ((increment (if (= type 1) -16 16)))
generate one step up
(when (blank-square (+ square increment))
(vector-push
(create-move square (+ square increment)
0
0
0
0)
moves-vector)
generate two steps up
we ca n't go two steps if the first step was n't possible , so
;; we nest this inside the above when clause.
(when (blank-square (+ square (* 2 increment)))
(vector-push
(create-move square (+ square (* 2 increment))
0
0
1 ; ep move
0)
moves-vector)))))
(values)) ; return nothing
;; this determines if a move is a king capture. it's useful later in the search process
(defun* (king-capture -> boolean) ((m (unsigned-byte 18)))
(*let ((to (unsigned-byte 7) (move-to m))
(replaced (unsigned-byte 4) (aref board to))
(type (unsigned-byte 3) (piece-type replaced)))
(= type 7)))
(declaim (inline king-capture))
| null | https://raw.githubusercontent.com/vollmerm/lalla-chess/62e4068eba7f3288c6e99a7977bd0cc4f4f4190b/move.lisp | lisp | move.lisp
This file contains the procedures that generate and handle moves. Moves are stored in numbers with bitmapping.
They consist of a from square, a to square, and a series of tags.
The following functions are short convenience functions and are marked for inlining.
Create a number that represents a move
This is the maximum number of moves that could be generated.
These are some functions for converting stuff to strings
use char index of string
Create a string that represents a move (useful for printing to the screen)
with-output-to-string is pretty cool
all generated promotions are queen promotions
Generate moves for a certain side!
Just a warning: you're going to need a wide screen/window to read this function.
It gets nested pretty deep (it might help if you listen to Inception music...
do people still tell that joke? "Must go deeper..." Sigh...)
Create a vector for moves, with a fill pointer. When moves are generated
they will be pushed onto the vector.
This is not ideal for performance. It would be better to store moves in
a simple array and manage the fill pointer manually as a separate value.
to do some extra work at runtime to manage access to the vector.
goes through every square on the board (which is inefficient---eventually
it should use a list of piece locations), and the inner loop generates
moves for each piece. When moves are generated they're pushed onto
moves-vector.
Now that we've found a square, we extract information about it
and bind it to values in a big let (you know the drill by now).
iterate through each step amount.
there's an array of step offsets in piece.lisp, so see
the values there for each piece if you're curious. they're
pretty self-explanatory.
this is the general strategy for generating moves:
each piece is either sliding or non-sliding, and
each piece has a series of "step offsets" associated
with it. for example, the bishop is a sliding piece
you can see that adding these numbers to the current
add offset
start inner block. this cancel be returned from in the loop
inner loop for different step amounts
jump off board
non-blank square
hit own piece
break
make move
generate base move
pawns must capture on
diagonal moves
time to generate a move!
to/from
capture
promotion if we reach end of board
ep
castle
break if not sliding pieces
increment the start value to try the next offset amount
now that the moves have been generated, they need to be sorted.
currently this just uses a very simple sort procedure. all captures
Pawns have weird rules. This function handles moving forward.
needs to be a pawn!
come up with an increment value. either positive and negative
depending on whether we're moving up or down
we nest this inside the above when clause.
ep move
return nothing
this determines if a move is a king capture. it's useful later in the search process |
(in-package #:lalla)
(declaim (optimize speed))
Moves are stored in 18 - bit words .
(defun* (move-from -> (unsigned-byte 7)) ((m (unsigned-byte 18)))
(ldb (byte 7 0) m))
(defun* (move-to -> (unsigned-byte 7)) ((m (unsigned-byte 18)))
(ldb (byte 7 7) m))
(defun* (move-tag -> (unsigned-byte 4)) ((m (unsigned-byte 18)))
(ldb (byte 4 14) m))
(defun* (move-capture-bit -> (unsigned-byte 1)) ((m (unsigned-byte 18)))
(ldb (byte 1 0) (move-tag m)))
(defun* (move-capture -> boolean) ((m (unsigned-byte 18)))
(= (move-capture-bit m) 1))
(defun* (move-promotion -> boolean) ((m (unsigned-byte 18)))
(= (ldb (byte 1 1) (move-tag m)) 1))
(defun* (move-ep-bit -> (unsigned-byte 1)) ((m (unsigned-byte 18)))
(ldb (byte 1 2) (move-tag m)))
(defun* (move-ep -> boolean) ((m (unsigned-byte 18)))
(= (move-ep-bit m) 1))
(defun* (move-castle -> boolean) ((m (unsigned-byte 18)))
(= (ldb (byte 1 3) (move-tag m)) 1))
(defun* (create-move -> (unsigned-byte 18))
((from (unsigned-byte 7)) (to (unsigned-byte 7))
(capture (unsigned-byte 1)) (promotion (unsigned-byte 1))
(ep (unsigned-byte 1)) (castle (unsigned-byte 1)))
(let ((m 0))
(declare ((unsigned-byte 18) m))
(setf (ldb (byte 7 0) m) from)
(setf (ldb (byte 7 7) m) to)
(setf (ldb (byte 1 14) m) capture)
(setf (ldb (byte 1 15) m) promotion)
(setf (ldb (byte 1 16) m) ep)
(setf (ldb (byte 1 17) m) castle)
m))
(declaim (inline move-from move-to move-tag move-capture move-capture-bit
move-promotion move-ep move-ep-bit move-castle make-move))
(defconstant max-move-count 218)
(defun* (position-file -> standard-char) ((index (mod 128)))
(char file-string (logand index 7)))
(defparameter* (rank-string string) "12345678")
(defun* (position-rank -> standard-char) ((index (mod 128)))
(char rank-string (- 7 (ash index -4))))
(defun* (move->string -> string) ((m (unsigned-byte 18)))
(with-output-to-string (stream)
(princ (position-file (move-from m)) stream)
(princ (position-rank (move-from m)) stream)
(princ (position-file (move-to m)) stream)
(princ (position-rank (move-to m)) stream)
(when (move-promotion m) (princ #\q stream))))
(defun* (generate-moves -> (vector (unsigned-byte 18)))
((turn-color (unsigned-byte 1)))
That would allow SBCL to optimize access to the structure . For now , it has
(let ((moves-vector
(make-array max-move-count
:element-type '(unsigned-byte 18)
:fill-pointer 0
:initial-element 0)))
Move generation involves one loop nested inside another . The outer loop
(loop
for square from 0 to 127
when (and (not (off-board square))
(not (blank-square square))
(= turn-color (piece-color (aref board square)))) do
(*let ((piece (unsigned-byte 4) (aref board square))
(color (unsigned-byte 1) (piece-color piece))
(type (unsigned-byte 3) (piece-type piece))
(sliding boolean (aref sliding-piece type))
(start (unsigned-byte 8) (aref step-offset type))
(step (signed-byte 8) (aref piece-steps start))
(iter-square (mod 128) square))
pawns have extra rules , so we check that first
(generate-pawn-special square moves-vector type)
(loop while (/= step 0) do
and it can go +15 , +17 , -15 , and -17 . if you go look
at the board , or remember the layout of the 0x88 board ,
bishop location will yield one - step jumps for it .
(block inner
(loop while (and (> (+ iter-square step) -1)
(< (+ iter-square step) 128)) do
make one step
(blank-square iter-square))
(vector-push
(if (and (is-pawn square)
(or (and (= (get-rank iter-square) 7)
(= turn-color 1))
(and (= (get-rank iter-square) 0)
(= turn-color 0))))
moves-vector))
(incf start)
(setf step (aref piece-steps start)))))
go first , then everything else
(sort moves-vector #'> :key #'move-capture-bit)))
(defun* (generate-pawn-special -> :void)
((square (mod 128)) (moves-vector (vector (unsigned-byte 18)))
(type (unsigned-byte 3)))
(when (or (and (= type 1) (= (get-rank square) 6))
(and (= type 2) (= (get-rank square) 1)))
(let ((increment (if (= type 1) -16 16)))
generate one step up
(when (blank-square (+ square increment))
(vector-push
(create-move square (+ square increment)
0
0
0
0)
moves-vector)
generate two steps up
we ca n't go two steps if the first step was n't possible , so
(when (blank-square (+ square (* 2 increment)))
(vector-push
(create-move square (+ square (* 2 increment))
0
0
0)
moves-vector)))))
(defun* (king-capture -> boolean) ((m (unsigned-byte 18)))
(*let ((to (unsigned-byte 7) (move-to m))
(replaced (unsigned-byte 4) (aref board to))
(type (unsigned-byte 3) (piece-type replaced)))
(= type 7)))
(declaim (inline king-capture))
|
143b331f6ed3f165728da0eaae0f62a1210024604dcd41f7834dcdb7e65141bc | manuel-serrano/hop | compile_optimized_boolify.scm | ;*=====================================================================*/
;* .../hop/2.5.x/scheme2js/compile_optimized_boolify.scm */
;* ------------------------------------------------------------- */
* Author : * /
* Creation : 2007 - 11 * /
* Last change : Thu Sep 5 16:05:44 2013 ( serrano ) * /
* Copyright : 2013 * /
;* ------------------------------------------------------------- */
;* Conditional expression compilation */
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module compile-optimized-boolify
(import config
tools
nodes
export-desc
template-display
verbose)
(export (compile-boolified p compile::procedure node::Node)))
;*---------------------------------------------------------------------*/
;* compile-optimized-if-boolify ... */
;*---------------------------------------------------------------------*/
(define (compile-optimized-if-boolify p compile n)
(with-access::If n (test then else)
(if (and (isa? else Const)
(with-access::Const else (value)
(not value)))
(template-display p
"(~e&&~e)"
(compile test p #f #f)
(compile-optimized-boolify p compile then))
(compile-unoptimized-boolify p compile n))))
;*---------------------------------------------------------------------*/
;* compile-optimized-boolify ... */
;*---------------------------------------------------------------------*/
(define (compile-optimized-boolify p compile n)
(cond
((isa? n Call)
(with-access::Call n (operator operands)
(if (isa? operator Ref)
(with-access::Ref operator (var)
(with-access::Var var (kind constant? export-desc)
(if (and (or (eq? kind 'exported) (eq? kind 'imported))
constant?
(eq? (with-access::Export-Desc export-desc
(return-type) return-type)
'bool))
(compile n p #f #f)
(compile-unoptimized-boolify p compile n))))
(compile-unoptimized-boolify p compile n))))
((isa? n If)
(compile-optimized-if-boolify p compile n))
((isa? n Const)
(with-access::Const n (value)
(template-display p
"~a" (if value "true" "false"))))
(else
(compile-unoptimized-boolify p compile n))))
;*---------------------------------------------------------------------*/
;* compile-unoptimzed-boolify ... */
;*---------------------------------------------------------------------*/
(define (compile-unoptimized-boolify p compile node)
(template-display p
"(~e !== false)"
(compile node p #f #f)))
;*---------------------------------------------------------------------*/
;* compile-boolified ... */
;*---------------------------------------------------------------------*/
(define (compile-boolified p compile node)
;; TODO: get rid of '(config ... )
(if (config 'optimize-boolify)
(compile-optimized-boolify p compile node)
(compile-unoptimized-boolify p compile node)))
| null | https://raw.githubusercontent.com/manuel-serrano/hop/481cb10478286796addd2ec9ee29c95db27aa390/scheme2js/compile_optimized_boolify.scm | scheme | *=====================================================================*/
* .../hop/2.5.x/scheme2js/compile_optimized_boolify.scm */
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
* Conditional expression compilation */
*=====================================================================*/
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* compile-optimized-if-boolify ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* compile-optimized-boolify ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* compile-unoptimzed-boolify ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* compile-boolified ... */
*---------------------------------------------------------------------*/
TODO: get rid of '(config ... ) | * Author : * /
* Creation : 2007 - 11 * /
* Last change : Thu Sep 5 16:05:44 2013 ( serrano ) * /
* Copyright : 2013 * /
(module compile-optimized-boolify
(import config
tools
nodes
export-desc
template-display
verbose)
(export (compile-boolified p compile::procedure node::Node)))
(define (compile-optimized-if-boolify p compile n)
(with-access::If n (test then else)
(if (and (isa? else Const)
(with-access::Const else (value)
(not value)))
(template-display p
"(~e&&~e)"
(compile test p #f #f)
(compile-optimized-boolify p compile then))
(compile-unoptimized-boolify p compile n))))
(define (compile-optimized-boolify p compile n)
(cond
((isa? n Call)
(with-access::Call n (operator operands)
(if (isa? operator Ref)
(with-access::Ref operator (var)
(with-access::Var var (kind constant? export-desc)
(if (and (or (eq? kind 'exported) (eq? kind 'imported))
constant?
(eq? (with-access::Export-Desc export-desc
(return-type) return-type)
'bool))
(compile n p #f #f)
(compile-unoptimized-boolify p compile n))))
(compile-unoptimized-boolify p compile n))))
((isa? n If)
(compile-optimized-if-boolify p compile n))
((isa? n Const)
(with-access::Const n (value)
(template-display p
"~a" (if value "true" "false"))))
(else
(compile-unoptimized-boolify p compile n))))
(define (compile-unoptimized-boolify p compile node)
(template-display p
"(~e !== false)"
(compile node p #f #f)))
(define (compile-boolified p compile node)
(if (config 'optimize-boolify)
(compile-optimized-boolify p compile node)
(compile-unoptimized-boolify p compile node)))
|
24b2f34a15e9ee36fa5923319333601d0bea21f9f5c930e8ae13b718384e8049 | ruanpienaar/goanna | goanna_common_tests.erl | -module(goanna_common_tests).
-include_lib("eunit/include/eunit.hrl").
goanna_common_unit_test_() ->
{setup,
% Setup Fixture
fun() ->
xxx
end,
% Cleanup Fixture
fun(xxx) ->
ok
end,
% List of tests
[
% Example test
{"goanna_common:prop_value/0",
?_assert(unit_testing:try_test_fun(fun prop_value/0))},
{"goanna_common:get_trace_abbreviation/1",
?_assert(unit_testing:try_test_fun(fun get_trace_abbreviation/0))},
{"goanna_common:trace_abbreviations/0",
?_assert(unit_testing:try_test_fun(fun trace_abbreviations/0))},
{"goanna_common:format_trace_item/0",
?_assert(unit_testing:try_test_fun(fun format_trace_item/0))},
{"goanna_common:get_time/0",
?_assert(unit_testing:try_test_fun(fun get_time/0))}
]
}.
prop_value() ->
?assertEqual(
default,
goanna_common:prop_value(field, [], default)
),
?assertEqual(
value,
goanna_common:prop_value(field, [{field, value}], default)
),
?assertEqual(
other,
goanna_common:prop_value(field, [{field, other}, {field, value}], default)
).
get_trace_abbreviation() ->
?assertEqual(
"REC",
goanna_common:get_trace_abbreviation('receive')
),
?assertEqual(
"S",
goanna_common:get_trace_abbreviation(send)
),
?assertEqual(
"STNEP",
goanna_common:get_trace_abbreviation(send_to_non_existing_process)
),
?assertEqual(
"C",
goanna_common:get_trace_abbreviation(call)
),
?assertEqual(
"RT",
goanna_common:get_trace_abbreviation(return_to)
),
?assertEqual(
"RF",
goanna_common:get_trace_abbreviation(return_from)
),
?assertEqual(
"EF",
goanna_common:get_trace_abbreviation(exception_from)
),
?assertEqual(
"SPW",
goanna_common:get_trace_abbreviation(spawn)
),
?assertEqual(
"EXI",
goanna_common:get_trace_abbreviation(exit)
),
?assertEqual(
"LI",
goanna_common:get_trace_abbreviation(link)
),
?assertEqual(
"ULI",
goanna_common:get_trace_abbreviation(unlink)
),
?assertEqual(
"GLI",
goanna_common:get_trace_abbreviation(getting_linked)
),
?assertEqual(
"GULI",
goanna_common:get_trace_abbreviation(getting_unlinked)
),
?assertEqual(
"REG",
goanna_common:get_trace_abbreviation(register)
),
?assertEqual(
"UNREG",
goanna_common:get_trace_abbreviation(unregister)
),
?assertEqual(
"I",
goanna_common:get_trace_abbreviation(in)
),
?assertEqual(
"O",
goanna_common:get_trace_abbreviation(out)
),
?assertEqual(
"GCS",
goanna_common:get_trace_abbreviation(gc_start)
),
?assertEqual(
"GCE",
goanna_common:get_trace_abbreviation(gc_end)
).
trace_abbreviations() ->
ok = goanna_common:trace_abbreviations().
format_trace_item() ->
ReportedTS = {_MegaSecs=1540, _Secs=666854, _MicroSecs=911643},
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host EF : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), exception_from, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host RF : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), return_from, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host C : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), call, info, ReportedTS})
)
),
? (
% "",
% lists:flatten(
% goanna_common:format_trace_item('n1@host', {trace_ts, self(), label, info, ReportedTS})
% )
% ),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host REC : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), 'receive', info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host S : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), send, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host STNEP: info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), send_to_non_existing_process, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host C : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), call, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host RT : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), return_to, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host RF : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), return_from, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host EF : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), exception_from, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host SPW : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), spawn, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host EXI : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), exit, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host LI : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), link, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host ULI : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), unlink, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GLI : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), getting_linked, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GULI : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), getting_unlinked, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host REG : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), register, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host UNREG: info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), unregister, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host I : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), in, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host O : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), out, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GCS : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), gc_start, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GCE : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), gc_end, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host EF : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), exception_from, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host RF : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), return_from, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host C : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), call, info, extra, ReportedTS})
)
),
? (
% "",
% lists:flatten(
% goanna_common:format_trace_item('n1@host', {trace_ts, self(), label, info, extra, ReportedTS})
% )
% ),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host REC : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), 'receive', info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host S : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), send, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host STNEP: info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), send_to_non_existing_process, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host C : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), call, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host RT : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), return_to, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host RF : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), return_from, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host EF : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), exception_from, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host SPW : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), spawn, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host EXI : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), exit, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host LI : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), link, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host ULI : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), unlink, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GLI : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), getting_linked, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GULI : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), getting_unlinked, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host REG : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), register, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host UNREG: info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), unregister, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host I : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), in, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host O : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), out, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GCS : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), gc_start, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GCE : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), gc_end, info, extra, ReportedTS})
)
),
?assertEqual(
"n1@host {seq_trace,label,seq_trace_info}",
lists:flatten(
goanna_common:format_trace_item('n1@host', {seq_trace, label, seq_trace_info})
)
),
?assertEqual(
"n1@host {drop,1234}",
lists:flatten(
goanna_common:format_trace_item('n1@host', {drop, 1234})
)
),
?assertEqual(
"n1@host bla",
lists:flatten(
goanna_common:format_trace_item('n1@host', bla)
)
).
get_time() ->
?assertEqual(
"2018-10-27T19:00:54.911643",
goanna_common:get_time({_MegaSecs=1540, _Secs=666854, _MicroSecs=911643})
). | null | https://raw.githubusercontent.com/ruanpienaar/goanna/52d75566fd6f9760fbdebe53b2ca3c82fdb44e01/test/goanna_common_tests.erl | erlang | Setup Fixture
Cleanup Fixture
List of tests
Example test
"",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), label, info, ReportedTS})
)
),
"",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), label, info, extra, ReportedTS})
)
), | -module(goanna_common_tests).
-include_lib("eunit/include/eunit.hrl").
goanna_common_unit_test_() ->
{setup,
fun() ->
xxx
end,
fun(xxx) ->
ok
end,
[
{"goanna_common:prop_value/0",
?_assert(unit_testing:try_test_fun(fun prop_value/0))},
{"goanna_common:get_trace_abbreviation/1",
?_assert(unit_testing:try_test_fun(fun get_trace_abbreviation/0))},
{"goanna_common:trace_abbreviations/0",
?_assert(unit_testing:try_test_fun(fun trace_abbreviations/0))},
{"goanna_common:format_trace_item/0",
?_assert(unit_testing:try_test_fun(fun format_trace_item/0))},
{"goanna_common:get_time/0",
?_assert(unit_testing:try_test_fun(fun get_time/0))}
]
}.
prop_value() ->
?assertEqual(
default,
goanna_common:prop_value(field, [], default)
),
?assertEqual(
value,
goanna_common:prop_value(field, [{field, value}], default)
),
?assertEqual(
other,
goanna_common:prop_value(field, [{field, other}, {field, value}], default)
).
get_trace_abbreviation() ->
?assertEqual(
"REC",
goanna_common:get_trace_abbreviation('receive')
),
?assertEqual(
"S",
goanna_common:get_trace_abbreviation(send)
),
?assertEqual(
"STNEP",
goanna_common:get_trace_abbreviation(send_to_non_existing_process)
),
?assertEqual(
"C",
goanna_common:get_trace_abbreviation(call)
),
?assertEqual(
"RT",
goanna_common:get_trace_abbreviation(return_to)
),
?assertEqual(
"RF",
goanna_common:get_trace_abbreviation(return_from)
),
?assertEqual(
"EF",
goanna_common:get_trace_abbreviation(exception_from)
),
?assertEqual(
"SPW",
goanna_common:get_trace_abbreviation(spawn)
),
?assertEqual(
"EXI",
goanna_common:get_trace_abbreviation(exit)
),
?assertEqual(
"LI",
goanna_common:get_trace_abbreviation(link)
),
?assertEqual(
"ULI",
goanna_common:get_trace_abbreviation(unlink)
),
?assertEqual(
"GLI",
goanna_common:get_trace_abbreviation(getting_linked)
),
?assertEqual(
"GULI",
goanna_common:get_trace_abbreviation(getting_unlinked)
),
?assertEqual(
"REG",
goanna_common:get_trace_abbreviation(register)
),
?assertEqual(
"UNREG",
goanna_common:get_trace_abbreviation(unregister)
),
?assertEqual(
"I",
goanna_common:get_trace_abbreviation(in)
),
?assertEqual(
"O",
goanna_common:get_trace_abbreviation(out)
),
?assertEqual(
"GCS",
goanna_common:get_trace_abbreviation(gc_start)
),
?assertEqual(
"GCE",
goanna_common:get_trace_abbreviation(gc_end)
).
trace_abbreviations() ->
ok = goanna_common:trace_abbreviations().
format_trace_item() ->
ReportedTS = {_MegaSecs=1540, _Secs=666854, _MicroSecs=911643},
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host EF : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), exception_from, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host RF : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), return_from, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host C : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), call, info, ReportedTS})
)
),
? (
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host REC : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), 'receive', info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host S : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), send, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host STNEP: info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), send_to_non_existing_process, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host C : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), call, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host RT : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), return_to, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host RF : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), return_from, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host EF : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), exception_from, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host SPW : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), spawn, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host EXI : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), exit, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host LI : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), link, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host ULI : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), unlink, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GLI : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), getting_linked, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GULI : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), getting_unlinked, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host REG : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), register, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host UNREG: info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), unregister, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host I : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), in, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host O : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), out, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GCS : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), gc_start, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GCE : info\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), gc_end, info, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host EF : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), exception_from, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host RF : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), return_from, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host C : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), call, info, extra, ReportedTS})
)
),
? (
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host REC : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), 'receive', info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host S : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), send, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host STNEP: info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), send_to_non_existing_process, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host C : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), call, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host RT : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), return_to, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host RF : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), return_from, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host EF : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), exception_from, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host SPW : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), spawn, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host EXI : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), exit, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host LI : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), link, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host ULI : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), unlink, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GLI : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), getting_linked, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GULI : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), getting_unlinked, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host REG : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), register, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host UNREG: info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), unregister, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host I : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), in, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host O : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), out, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GCS : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), gc_start, info, extra, ReportedTS})
)
),
?assertEqual(
"2018-10-27T19:00:54.911643 n1@host GCE : info extra\n",
lists:flatten(
goanna_common:format_trace_item('n1@host', {trace_ts, self(), gc_end, info, extra, ReportedTS})
)
),
?assertEqual(
"n1@host {seq_trace,label,seq_trace_info}",
lists:flatten(
goanna_common:format_trace_item('n1@host', {seq_trace, label, seq_trace_info})
)
),
?assertEqual(
"n1@host {drop,1234}",
lists:flatten(
goanna_common:format_trace_item('n1@host', {drop, 1234})
)
),
?assertEqual(
"n1@host bla",
lists:flatten(
goanna_common:format_trace_item('n1@host', bla)
)
).
get_time() ->
?assertEqual(
"2018-10-27T19:00:54.911643",
goanna_common:get_time({_MegaSecs=1540, _Secs=666854, _MicroSecs=911643})
). |
449548b1ad83364427ca46d782cc8934b97e00ea76ce8bead27a51f43530e9ca | icfpcontest2021/icfpcontest2021.github.io | JasperSolver.hs | -- | Some ideas:
--
-- * Add neighbours that preserve distances, such as move, flip, rotate
-- * For pinching in an invalid state, we should pick the vertices that are the
-- furthest away with a higher probability than the closer ones
-- * For pinching in a valid state, we should pick the vertices that contribute
-- the most to our score (i.e. the worst ones) with a higher probability
-- * Simulated annealing really needs a contiuous scoring function to work well,
-- and ours is not because there's a big drop in invalid->valid. Either we
should make it contiuous or we should run this in two phases .
{-# LANGUAGE BangPatterns #-}
# LANGUAGE RecordWildCards #
module BrainWall.JasperSolver where
import BrainWall.Box (Box)
import qualified BrainWall.Box as Box
import BrainWall.Database (allFeatures)
import BrainWall.Edge
import qualified BrainWall.JasperSolver.HillClimb as HC
import BrainWall.JasperSolver.Limbs
import qualified BrainWall.JasperSolver.SimulatedAnnealing as SA
import BrainWall.Json
import BrainWall.Polygon
import BrainWall.Polygon.ContainsPoint
import BrainWall.Problem
import BrainWall.Svg
import BrainWall.V2
import Control.Lens (preview)
import Control.Monad (guard)
import qualified Data.Aeson as Aeson
import Data.Bifunctor (first)
import qualified Data.ByteString.Lazy.Char8 as BL8
import qualified Data.IntMap as IM
import Data.List (foldl')
import qualified Data.List as L
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.List.NonEmpty as NonEmpty
import Data.Maybe (fromMaybe,
maybeToList)
import Data.Ratio (Ratio)
import qualified Data.Text as T
import qualified Data.Vector as V
import Debug.Trace (trace)
import qualified Options.Applicative.Extended as OA
import qualified System.IO as IO
import System.Random.Extended (RandomGen,
distribution,
newStdGen, randomR)
-- | A state in the search space.
data State = State
| Full problem . TODO : remove this
stateProblem :: !Problem
, -- | Allowed epsilon
stateEpsilon :: !Double
, -- | The hole we're trying to fit in.
stateHole :: !(Polygon Integer)
, -- | Bounding box the hole
stateHoleBox :: !(Box Integer)
, -- | The distance constraints between the vertices.
stateDistances :: !(IM.IntMap (IM.IntMap Integer))
, -- | The limbs found in the figure.
stateLimbs :: !Limbs
, -- | Bonus we are targeting
stateTargetBonus :: !(Maybe (V2 Integer))
, -- | The current locations of the vertices.
stateVertices :: !(V.Vector (V2 Double))
} deriving (Show)
makeInitialState :: Maybe (V2 Integer) -> Problem -> State
makeInitialState targetBonus problem@Problem {..} = State {..}
where
stateProblem = problem
stateEpsilon = fromIntegral problemEpsilon / fromIntegral epsilonDenominator
stateHole = unHole problemHole
stateTargetBonus = targetBonus
stateHoleBox = fromMaybe (Box.fromV2 (V2 0 0)) $
foldMap (Just . Box.fromV2) (unPolygon stateHole)
stateDistances = foldl' insert IM.empty $ do
(i, j) <- V.toList $ figureEdges problemFigure
edge <- maybeToList $ lookupFigureEdge (i, j) problemFigure
pure (i, j, edgeSquaredLength edge)
where
insert m (i, j, d) =
IM.insertWith IM.union j (IM.singleton i d) .
IM.insertWith IM.union i (IM.singleton j d) $ m
stateLimbs = findLimbs problemFigure
stateVertices = fmap fromIntegral <$> figureVertices problemFigure
-- | A state with more info
data Annotated = Annotated
{ -- | Plain state
annotatedState :: !State
, -- | Indices of vertices which are located outside of the hole
annotatedOutside :: !(V.Vector Int)
-- | Distance to the closest vertex of the hole for each vertex.
, annotatedDistanceToHole :: !(V.Vector Integer)
} deriving (Show)
annotateState :: State -> Annotated
annotateState state@State {..} = Annotated {..}
where
annotatedState = state
annotatedOutside = V.map fst $
V.filter (not . (`pointInPolygon` stateHole) . fmap round . snd) $
V.indexed stateVertices
annotatedDistanceToHole = flip V.map stateVertices $ \v ->
minimum . map (edgeSquaredLength . Edge (round <$> v)) .
V.toList $ unPolygon stateHole
pinchState :: RandomGen g => Annotated -> g -> (State, g)
pinchState Annotated {..} gen0 =
let (i, gen1) = distribution weighted gen0
(phi, gen2) = randomR (0 :: Double, pi * 2) gen1
(rho, gen3) = randomR (0, maxPinch) gen2
offset = V2 (rho * cos phi) (rho * sin phi)
vertices = V.update stateVertices $ V.singleton (i, offset) in
(annotatedState {stateVertices = vertices}, gen3)
where
State {..} = annotatedState
maxPinch =
let b = stateHoleBox in
0.5 * fromInteger (Box.width b + Box.height b)
candidates
| V.null annotatedOutside = V.imap const stateVertices
| otherwise = annotatedOutside
weighted = NonEmpty.fromList $ do
i <- V.toList candidates
let distanceToHole = annotatedDistanceToHole V.! i
The vertex furthest away is three times as likely to get picked .
pure (maxDistanceToHole + 2 * distanceToHole, i)
maxDistanceToHole = V.maximum annotatedDistanceToHole
rotateState :: RandomGen g => Annotated -> g -> (State, g)
rotateState Annotated {..} gen0 =
let (i, gen1) = randomR (0, V.length stateVertices - 1) gen0
(d, gen2) = randomR (-pi / 4, pi / 4) gen1
p = stateVertices V.! i
rotated = V.imap (\j q ->
if i == j
then q
else
let (Polar rho phi) = toPolar (q .-. p) in
p .+. fromPolar (Polar rho (phi + d))) stateVertices in
(annotatedState {stateVertices = rotated}, gen2)
where
State {..} = annotatedState
moveState :: RandomGen g => Annotated -> g -> (State, g)
moveState Annotated {..} gen0 =
let (phi, gen1) = randomR (0 :: Double, pi * 2) gen0
(rho, gen2) = randomR (0, maxPinch) gen1
offset = V2 (rho * cos phi) (rho * sin phi)
vertices = V.map (.+. offset) stateVertices in
(annotatedState {stateVertices = vertices}, gen2)
where
State {..} = annotatedState
maxPinch =
let b = stateHoleBox in
0.1 * fromInteger (Box.width b + Box.height b)
limbState :: RandomGen g => Annotated -> g -> (Maybe State, g)
limbState Annotated {..} gen0 =
let (i, gen1) = randomR (0, V.length stateLimbs - 1) gen0
alts = alternateLimb (stateLimbs V.! i)
(j, gen2) = randomR (0, V.length alts - 1) gen1
limb = alts V.! j
(phi, gen3) = randomR (-pi / 4, pi / 4) gen2 in
case moveLimb phi limb stateVertices of
Nothing -> (Nothing, gen3)
Just verts -> (Just annotatedState {stateVertices = verts}, gen3)
where
State {..} = annotatedState
-- | Move things around very slowly. This can be used at the end of the run
-- to fine tune the solution and squeeze out some more points.
creepState :: RandomGen g => Annotated -> g -> (State, g)
creepState Annotated {..} gen0 =
let (i, gen1) = randomR (0, V.length stateVertices - 1) gen0
(j, gen2) = randomR (0, 3) gen1
p = stateVertices V.! i
p' =
[p .+. V2 1 0, p .+. V2 0 1, p .-. V2 1 0, p .-. V2 0 1] !! j
verts = V.update stateVertices $ V.singleton (i, p') in
(annotatedState {stateVertices = verts}, gen2)
where
State {..} = annotatedState
data EtherealScore
= ValidScore Integer
-- | Number of violations, total squared distance from "outside" points to
-- nearest edges.
| InvalidScore (Int, Ratio Integer)
deriving (Eq, Show)
instance Ord EtherealScore where
ValidScore x <= ValidScore y = y <= x
ValidScore _ <= InvalidScore _ = False
InvalidScore _ <= ValidScore _ = True
InvalidScore x <= InvalidScore y = y <= x
stateToSolution :: State -> Solution
stateToSolution state = Solution
{ solutionBonuses = V.empty
, solutionVertices = fmap round <$> stateVertices state
}
scoreState :: Annotated -> EtherealScore
scoreState Annotated {..} = case judgeSolution stateProblem sol of
Left errs -> InvalidScore (length errs, distances)
Right s -> ValidScore $ s + toBonus
where
State {..} = annotatedState
sol = stateToSolution annotatedState
toBonus = case stateTargetBonus of
Nothing -> 0
Just b -> (^ (2 :: Integer)) $ V.minimum $ do
v <- solutionVertices sol
pure $ squaredDistance v b
distances = L.sum $ do
i <- annotatedOutside
let p = solutionVertices sol V.! i
pure $ V.minimum $ do
edge <- polygonEdges stateHole
pure $ edgePointSquaredDistance p edge
rigidifyState :: State -> Maybe State
rigidifyState state0@State {..} = go (0 :: Int) stateVertices
where
maxIters = 2000
distances = stateDistances
eps = stateEpsilon
go !iter vertices
| V.all null forces = Just $ state0 {stateVertices = vertices}
| iter >= maxIters =
trace ("rigidify: gave up after " ++ show iter ++ " iterations")
Nothing
| otherwise = go (iter + 1) $
V.zipWith (\fs v -> foldl' (.+.) v fs) forces vertices
where
forces = flip V.imap vertices $ \i p -> do
neighbours <- maybeToList $ IM.lookup i distances
(j, d2) <- IM.toList neighbours
let q = vertices V.! j
d'2 = squaredDistance p q :: Double
guard $ abs (d'2 / fromIntegral d2 - 1) > eps
let actualDistance = sqrt d'2 :: Double
expectedDistance = sqrt $ fromIntegral d2
delta = actualDistance - expectedDistance
guard $ delta < -1e-9 || delta > 1e-9
pure $ (q .-. p) .* (0.4 * delta / max 1 actualDistance)
data Command = Pinch | Move | Rotate | MoveLimb deriving (Eq, Show)
stepState :: RandomGen g => Annotated -> g -> (Annotated, g)
stepState ann gen0 =
let (cmd, gen1) = flip distribution gen0 $
(((* 3) . V.length . stateLimbs $ annotatedState ann), MoveLimb) :|
(5 :: Int, Pinch) : (2, Move) : (3, Rotate) : [] in
case cmd of
Pinch ->
let (pinched, gen2) = pinchState ann gen1 in
case rigidifyState pinched of
Just rigid -> (annotateState rigid, gen2)
Nothing -> (ann, gen1)
Move ->
let (moved, gen2) = moveState ann gen1 in
(annotateState moved, gen2)
Rotate ->
let (rotated, gen2) = rotateState ann gen1 in
case rigidifyState rotated of
Just rigid -> (annotateState rigid, gen2)
Nothing -> (ann, gen1)
MoveLimb ->
let (moved, gen2) = limbState ann gen1 in
case rigidifyState =<< moved of
Just rigid -> (annotateState rigid, gen2)
Nothing -> (ann, gen1)
data Options = Options
{ optionsSaveLimbs :: Maybe FilePath
, optionsStartFrom :: Maybe FilePath
, optionsTargetBonus :: Maybe (V2 Integer)
, optionsUseBonus :: Maybe ClaimedBonus
, optionsValidOnly :: Bool
, optionsProblemPath :: FilePath
} deriving (Show)
defaultOptions :: Options
defaultOptions = Options
{ optionsSaveLimbs = Nothing
, optionsStartFrom = Nothing
, optionsTargetBonus = Nothing
, optionsUseBonus = Nothing
, optionsValidOnly = False
, optionsProblemPath = ""
}
solve :: Options -> Problem -> IO Solution
solve options problem = do
mbStartFrom <- case optionsStartFrom options of
Nothing -> pure Nothing
Just fp -> Just <$> decodeFileWith (decodeSolution allFeatures) fp
let initial = annotateState $
let s0 = makeInitialState (optionsTargetBonus options) problem in
case mbStartFrom of
Nothing -> s0
Just Solution {..} -> s0
{stateVertices = fmap fromIntegral <$> solutionVertices}
saOptions = SA.defaultOptions
{ SA.oScore = \x -> case scoreState x of
ValidScore p -> negate $ fromIntegral p
InvalidScore (p, d) ->
(-10000000) - fromIntegral p * 100000 - fromRational d
, SA.oNeighbour = stepState
, SA.oQuit = if optionsValidOnly options
then \x -> case scoreState x of
ValidScore _ -> True
InvalidScore _ -> False
else \_ -> False
, SA.oGiveUp = Just 2000
}
V2 svgWidth svgHeight =
fromIntegral <$> Box.bottomRight (problemBox problem)
limbsSvg =
let figureElement = edgesToSvgElement . map (fmap fromIntegral) .
foldFigureEdges pure $ problemFigure problem
verts = stateVertices $ annotatedState initial
limbsElement = edgesToSvgElement $ do
Limb (i, j) (k, l) _ _ <- V.toList . stateLimbs $
annotatedState initial
pure $ Edge
((verts V.! i) .* 0.5 .+. (verts V.! j) .* 0.5)
((verts V.! k) .* 0.5 .+. (verts V.! l) .* 0.5)
movedElement = edgesToSvgElement $ do
limb <- take 1 . V.toList . stateLimbs $
annotatedState initial
verts' <- maybeToList $ moveLimb (-pi / 16) limb verts
(i, j) <- V.toList . figureEdges $ problemFigure problem
pure $ Edge (verts' V.! i) (verts' V.! j) in
(makeSvg (0, 0, svgWidth, svgHeight))
{ svgElements =
[ figureElement {elementStyle = Just "stroke:red"}
, limbsElement {elementStyle = Just "stroke:green"}
, movedElement
{elementStyle = Just "stroke:orange;stroke-width:0.3"}
]
}
case optionsSaveLimbs options of
Nothing -> pure ()
Just path -> writeFile path $ encodeSvg limbsSvg
gen0 <- newStdGen
let (!saResult, gen1) = SA.run saOptions initial gen0
hcResult <- case scoreState saResult of
InvalidScore _ -> pure saResult
ValidScore _ | optionsValidOnly options -> pure saResult
ValidScore saScore -> do
IO.hPutStrLn IO.stderr $ "Starting hill climb with score " <>
show saScore
let hcScore = \x -> case scoreState x of
ValidScore p -> negate p
InvalidScore _ -> negate saScore - 1
hcOpts = HC.defaultOptions
{ HC.oScore = hcScore
, HC.oNeighbour = \x -> first annotateState . creepState x
}
!(!hcResult, _gen2) = HC.hillClimb hcOpts saResult gen1
pure $ fromEthereal $ hillclimb 0 ( score problem initial ) initial gen0
IO.hPutStrLn IO.stderr $ "Finished hill climb with score " <>
show (hcScore hcResult)
pure hcResult
let solution = stateToSolution $ annotatedState hcResult
pure solution
{ solutionBonuses = maybe mempty V.singleton $ optionsUseBonus options
}
parseOptions :: OA.Parser Options
parseOptions = Options
<$> OA.optional (OA.strOption $
OA.long "limbs" <>
OA.metavar "LIMBS.svg")
<*> OA.optional (OA.strOption $
OA.long "start-from" <>
OA.metavar "SOLUTION.json")
<*> OA.optional (OA.option (OA.maybeReader $ preview v2FromString) $
OA.long "target-bonus" <>
OA.metavar "X,Y")
<*> OA.optional (ClaimedBonus
<$> OA.option OA.auto (
OA.long "use-bonus-problem" <>
OA.metavar "PROBLEM")
<*> OA.option bonusReader (
OA.long "use-bonus" <>
OA.metavar "BONUS")
<*> pure Nothing)
<*> OA.switch (
OA.long "valid" <>
OA.help "exit as soon as a valid solution is found")
<*> OA.strArgument (OA.metavar "PROBLEM")
where
bonusReader = OA.maybeReader $ preview bonusTypeFromText . T.pack
main :: IO ()
main = do
options <- OA.simpleRunParser parseOptions
problem <- decodeFileWith (decodeProblem allFeatures)
(optionsProblemPath options)
solution <- solve options problem
BL8.putStrLn . Aeson.encode $ encodeSolution allFeatures solution
| null | https://raw.githubusercontent.com/icfpcontest2021/icfpcontest2021.github.io/fb23fea2a8ecec7740017d3dda78d921c1df5a26/toolchain/lib/BrainWall/JasperSolver.hs | haskell | | Some ideas:
* Add neighbours that preserve distances, such as move, flip, rotate
* For pinching in an invalid state, we should pick the vertices that are the
furthest away with a higher probability than the closer ones
* For pinching in a valid state, we should pick the vertices that contribute
the most to our score (i.e. the worst ones) with a higher probability
* Simulated annealing really needs a contiuous scoring function to work well,
and ours is not because there's a big drop in invalid->valid. Either we
# LANGUAGE BangPatterns #
| A state in the search space.
| Allowed epsilon
| The hole we're trying to fit in.
| Bounding box the hole
| The distance constraints between the vertices.
| The limbs found in the figure.
| Bonus we are targeting
| The current locations of the vertices.
| A state with more info
| Plain state
| Indices of vertices which are located outside of the hole
| Distance to the closest vertex of the hole for each vertex.
| Move things around very slowly. This can be used at the end of the run
to fine tune the solution and squeeze out some more points.
| Number of violations, total squared distance from "outside" points to
nearest edges. | should make it contiuous or we should run this in two phases .
# LANGUAGE RecordWildCards #
module BrainWall.JasperSolver where
import BrainWall.Box (Box)
import qualified BrainWall.Box as Box
import BrainWall.Database (allFeatures)
import BrainWall.Edge
import qualified BrainWall.JasperSolver.HillClimb as HC
import BrainWall.JasperSolver.Limbs
import qualified BrainWall.JasperSolver.SimulatedAnnealing as SA
import BrainWall.Json
import BrainWall.Polygon
import BrainWall.Polygon.ContainsPoint
import BrainWall.Problem
import BrainWall.Svg
import BrainWall.V2
import Control.Lens (preview)
import Control.Monad (guard)
import qualified Data.Aeson as Aeson
import Data.Bifunctor (first)
import qualified Data.ByteString.Lazy.Char8 as BL8
import qualified Data.IntMap as IM
import Data.List (foldl')
import qualified Data.List as L
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.List.NonEmpty as NonEmpty
import Data.Maybe (fromMaybe,
maybeToList)
import Data.Ratio (Ratio)
import qualified Data.Text as T
import qualified Data.Vector as V
import Debug.Trace (trace)
import qualified Options.Applicative.Extended as OA
import qualified System.IO as IO
import System.Random.Extended (RandomGen,
distribution,
newStdGen, randomR)
data State = State
| Full problem . TODO : remove this
stateProblem :: !Problem
stateEpsilon :: !Double
stateHole :: !(Polygon Integer)
stateHoleBox :: !(Box Integer)
stateDistances :: !(IM.IntMap (IM.IntMap Integer))
stateLimbs :: !Limbs
stateTargetBonus :: !(Maybe (V2 Integer))
stateVertices :: !(V.Vector (V2 Double))
} deriving (Show)
makeInitialState :: Maybe (V2 Integer) -> Problem -> State
makeInitialState targetBonus problem@Problem {..} = State {..}
where
stateProblem = problem
stateEpsilon = fromIntegral problemEpsilon / fromIntegral epsilonDenominator
stateHole = unHole problemHole
stateTargetBonus = targetBonus
stateHoleBox = fromMaybe (Box.fromV2 (V2 0 0)) $
foldMap (Just . Box.fromV2) (unPolygon stateHole)
stateDistances = foldl' insert IM.empty $ do
(i, j) <- V.toList $ figureEdges problemFigure
edge <- maybeToList $ lookupFigureEdge (i, j) problemFigure
pure (i, j, edgeSquaredLength edge)
where
insert m (i, j, d) =
IM.insertWith IM.union j (IM.singleton i d) .
IM.insertWith IM.union i (IM.singleton j d) $ m
stateLimbs = findLimbs problemFigure
stateVertices = fmap fromIntegral <$> figureVertices problemFigure
data Annotated = Annotated
annotatedState :: !State
annotatedOutside :: !(V.Vector Int)
, annotatedDistanceToHole :: !(V.Vector Integer)
} deriving (Show)
annotateState :: State -> Annotated
annotateState state@State {..} = Annotated {..}
where
annotatedState = state
annotatedOutside = V.map fst $
V.filter (not . (`pointInPolygon` stateHole) . fmap round . snd) $
V.indexed stateVertices
annotatedDistanceToHole = flip V.map stateVertices $ \v ->
minimum . map (edgeSquaredLength . Edge (round <$> v)) .
V.toList $ unPolygon stateHole
pinchState :: RandomGen g => Annotated -> g -> (State, g)
pinchState Annotated {..} gen0 =
let (i, gen1) = distribution weighted gen0
(phi, gen2) = randomR (0 :: Double, pi * 2) gen1
(rho, gen3) = randomR (0, maxPinch) gen2
offset = V2 (rho * cos phi) (rho * sin phi)
vertices = V.update stateVertices $ V.singleton (i, offset) in
(annotatedState {stateVertices = vertices}, gen3)
where
State {..} = annotatedState
maxPinch =
let b = stateHoleBox in
0.5 * fromInteger (Box.width b + Box.height b)
candidates
| V.null annotatedOutside = V.imap const stateVertices
| otherwise = annotatedOutside
weighted = NonEmpty.fromList $ do
i <- V.toList candidates
let distanceToHole = annotatedDistanceToHole V.! i
The vertex furthest away is three times as likely to get picked .
pure (maxDistanceToHole + 2 * distanceToHole, i)
maxDistanceToHole = V.maximum annotatedDistanceToHole
rotateState :: RandomGen g => Annotated -> g -> (State, g)
rotateState Annotated {..} gen0 =
let (i, gen1) = randomR (0, V.length stateVertices - 1) gen0
(d, gen2) = randomR (-pi / 4, pi / 4) gen1
p = stateVertices V.! i
rotated = V.imap (\j q ->
if i == j
then q
else
let (Polar rho phi) = toPolar (q .-. p) in
p .+. fromPolar (Polar rho (phi + d))) stateVertices in
(annotatedState {stateVertices = rotated}, gen2)
where
State {..} = annotatedState
moveState :: RandomGen g => Annotated -> g -> (State, g)
moveState Annotated {..} gen0 =
let (phi, gen1) = randomR (0 :: Double, pi * 2) gen0
(rho, gen2) = randomR (0, maxPinch) gen1
offset = V2 (rho * cos phi) (rho * sin phi)
vertices = V.map (.+. offset) stateVertices in
(annotatedState {stateVertices = vertices}, gen2)
where
State {..} = annotatedState
maxPinch =
let b = stateHoleBox in
0.1 * fromInteger (Box.width b + Box.height b)
limbState :: RandomGen g => Annotated -> g -> (Maybe State, g)
limbState Annotated {..} gen0 =
let (i, gen1) = randomR (0, V.length stateLimbs - 1) gen0
alts = alternateLimb (stateLimbs V.! i)
(j, gen2) = randomR (0, V.length alts - 1) gen1
limb = alts V.! j
(phi, gen3) = randomR (-pi / 4, pi / 4) gen2 in
case moveLimb phi limb stateVertices of
Nothing -> (Nothing, gen3)
Just verts -> (Just annotatedState {stateVertices = verts}, gen3)
where
State {..} = annotatedState
creepState :: RandomGen g => Annotated -> g -> (State, g)
creepState Annotated {..} gen0 =
let (i, gen1) = randomR (0, V.length stateVertices - 1) gen0
(j, gen2) = randomR (0, 3) gen1
p = stateVertices V.! i
p' =
[p .+. V2 1 0, p .+. V2 0 1, p .-. V2 1 0, p .-. V2 0 1] !! j
verts = V.update stateVertices $ V.singleton (i, p') in
(annotatedState {stateVertices = verts}, gen2)
where
State {..} = annotatedState
data EtherealScore
= ValidScore Integer
| InvalidScore (Int, Ratio Integer)
deriving (Eq, Show)
instance Ord EtherealScore where
ValidScore x <= ValidScore y = y <= x
ValidScore _ <= InvalidScore _ = False
InvalidScore _ <= ValidScore _ = True
InvalidScore x <= InvalidScore y = y <= x
stateToSolution :: State -> Solution
stateToSolution state = Solution
{ solutionBonuses = V.empty
, solutionVertices = fmap round <$> stateVertices state
}
scoreState :: Annotated -> EtherealScore
scoreState Annotated {..} = case judgeSolution stateProblem sol of
Left errs -> InvalidScore (length errs, distances)
Right s -> ValidScore $ s + toBonus
where
State {..} = annotatedState
sol = stateToSolution annotatedState
toBonus = case stateTargetBonus of
Nothing -> 0
Just b -> (^ (2 :: Integer)) $ V.minimum $ do
v <- solutionVertices sol
pure $ squaredDistance v b
distances = L.sum $ do
i <- annotatedOutside
let p = solutionVertices sol V.! i
pure $ V.minimum $ do
edge <- polygonEdges stateHole
pure $ edgePointSquaredDistance p edge
rigidifyState :: State -> Maybe State
rigidifyState state0@State {..} = go (0 :: Int) stateVertices
where
maxIters = 2000
distances = stateDistances
eps = stateEpsilon
go !iter vertices
| V.all null forces = Just $ state0 {stateVertices = vertices}
| iter >= maxIters =
trace ("rigidify: gave up after " ++ show iter ++ " iterations")
Nothing
| otherwise = go (iter + 1) $
V.zipWith (\fs v -> foldl' (.+.) v fs) forces vertices
where
forces = flip V.imap vertices $ \i p -> do
neighbours <- maybeToList $ IM.lookup i distances
(j, d2) <- IM.toList neighbours
let q = vertices V.! j
d'2 = squaredDistance p q :: Double
guard $ abs (d'2 / fromIntegral d2 - 1) > eps
let actualDistance = sqrt d'2 :: Double
expectedDistance = sqrt $ fromIntegral d2
delta = actualDistance - expectedDistance
guard $ delta < -1e-9 || delta > 1e-9
pure $ (q .-. p) .* (0.4 * delta / max 1 actualDistance)
data Command = Pinch | Move | Rotate | MoveLimb deriving (Eq, Show)
stepState :: RandomGen g => Annotated -> g -> (Annotated, g)
stepState ann gen0 =
let (cmd, gen1) = flip distribution gen0 $
(((* 3) . V.length . stateLimbs $ annotatedState ann), MoveLimb) :|
(5 :: Int, Pinch) : (2, Move) : (3, Rotate) : [] in
case cmd of
Pinch ->
let (pinched, gen2) = pinchState ann gen1 in
case rigidifyState pinched of
Just rigid -> (annotateState rigid, gen2)
Nothing -> (ann, gen1)
Move ->
let (moved, gen2) = moveState ann gen1 in
(annotateState moved, gen2)
Rotate ->
let (rotated, gen2) = rotateState ann gen1 in
case rigidifyState rotated of
Just rigid -> (annotateState rigid, gen2)
Nothing -> (ann, gen1)
MoveLimb ->
let (moved, gen2) = limbState ann gen1 in
case rigidifyState =<< moved of
Just rigid -> (annotateState rigid, gen2)
Nothing -> (ann, gen1)
data Options = Options
{ optionsSaveLimbs :: Maybe FilePath
, optionsStartFrom :: Maybe FilePath
, optionsTargetBonus :: Maybe (V2 Integer)
, optionsUseBonus :: Maybe ClaimedBonus
, optionsValidOnly :: Bool
, optionsProblemPath :: FilePath
} deriving (Show)
defaultOptions :: Options
defaultOptions = Options
{ optionsSaveLimbs = Nothing
, optionsStartFrom = Nothing
, optionsTargetBonus = Nothing
, optionsUseBonus = Nothing
, optionsValidOnly = False
, optionsProblemPath = ""
}
solve :: Options -> Problem -> IO Solution
solve options problem = do
mbStartFrom <- case optionsStartFrom options of
Nothing -> pure Nothing
Just fp -> Just <$> decodeFileWith (decodeSolution allFeatures) fp
let initial = annotateState $
let s0 = makeInitialState (optionsTargetBonus options) problem in
case mbStartFrom of
Nothing -> s0
Just Solution {..} -> s0
{stateVertices = fmap fromIntegral <$> solutionVertices}
saOptions = SA.defaultOptions
{ SA.oScore = \x -> case scoreState x of
ValidScore p -> negate $ fromIntegral p
InvalidScore (p, d) ->
(-10000000) - fromIntegral p * 100000 - fromRational d
, SA.oNeighbour = stepState
, SA.oQuit = if optionsValidOnly options
then \x -> case scoreState x of
ValidScore _ -> True
InvalidScore _ -> False
else \_ -> False
, SA.oGiveUp = Just 2000
}
V2 svgWidth svgHeight =
fromIntegral <$> Box.bottomRight (problemBox problem)
limbsSvg =
let figureElement = edgesToSvgElement . map (fmap fromIntegral) .
foldFigureEdges pure $ problemFigure problem
verts = stateVertices $ annotatedState initial
limbsElement = edgesToSvgElement $ do
Limb (i, j) (k, l) _ _ <- V.toList . stateLimbs $
annotatedState initial
pure $ Edge
((verts V.! i) .* 0.5 .+. (verts V.! j) .* 0.5)
((verts V.! k) .* 0.5 .+. (verts V.! l) .* 0.5)
movedElement = edgesToSvgElement $ do
limb <- take 1 . V.toList . stateLimbs $
annotatedState initial
verts' <- maybeToList $ moveLimb (-pi / 16) limb verts
(i, j) <- V.toList . figureEdges $ problemFigure problem
pure $ Edge (verts' V.! i) (verts' V.! j) in
(makeSvg (0, 0, svgWidth, svgHeight))
{ svgElements =
[ figureElement {elementStyle = Just "stroke:red"}
, limbsElement {elementStyle = Just "stroke:green"}
, movedElement
{elementStyle = Just "stroke:orange;stroke-width:0.3"}
]
}
case optionsSaveLimbs options of
Nothing -> pure ()
Just path -> writeFile path $ encodeSvg limbsSvg
gen0 <- newStdGen
let (!saResult, gen1) = SA.run saOptions initial gen0
hcResult <- case scoreState saResult of
InvalidScore _ -> pure saResult
ValidScore _ | optionsValidOnly options -> pure saResult
ValidScore saScore -> do
IO.hPutStrLn IO.stderr $ "Starting hill climb with score " <>
show saScore
let hcScore = \x -> case scoreState x of
ValidScore p -> negate p
InvalidScore _ -> negate saScore - 1
hcOpts = HC.defaultOptions
{ HC.oScore = hcScore
, HC.oNeighbour = \x -> first annotateState . creepState x
}
!(!hcResult, _gen2) = HC.hillClimb hcOpts saResult gen1
pure $ fromEthereal $ hillclimb 0 ( score problem initial ) initial gen0
IO.hPutStrLn IO.stderr $ "Finished hill climb with score " <>
show (hcScore hcResult)
pure hcResult
let solution = stateToSolution $ annotatedState hcResult
pure solution
{ solutionBonuses = maybe mempty V.singleton $ optionsUseBonus options
}
parseOptions :: OA.Parser Options
parseOptions = Options
<$> OA.optional (OA.strOption $
OA.long "limbs" <>
OA.metavar "LIMBS.svg")
<*> OA.optional (OA.strOption $
OA.long "start-from" <>
OA.metavar "SOLUTION.json")
<*> OA.optional (OA.option (OA.maybeReader $ preview v2FromString) $
OA.long "target-bonus" <>
OA.metavar "X,Y")
<*> OA.optional (ClaimedBonus
<$> OA.option OA.auto (
OA.long "use-bonus-problem" <>
OA.metavar "PROBLEM")
<*> OA.option bonusReader (
OA.long "use-bonus" <>
OA.metavar "BONUS")
<*> pure Nothing)
<*> OA.switch (
OA.long "valid" <>
OA.help "exit as soon as a valid solution is found")
<*> OA.strArgument (OA.metavar "PROBLEM")
where
bonusReader = OA.maybeReader $ preview bonusTypeFromText . T.pack
main :: IO ()
main = do
options <- OA.simpleRunParser parseOptions
problem <- decodeFileWith (decodeProblem allFeatures)
(optionsProblemPath options)
solution <- solve options problem
BL8.putStrLn . Aeson.encode $ encodeSolution allFeatures solution
|
b31feff06375198f132895b5d76795b9a06fa2dae51c4505e9730ab008075603 | tonyg/kali-scheme | lu-decomp.scm | Copyright ( c ) 1993 , 1994 by and .
Copyright ( c ) 1996 by NEC Research Institute , Inc. See file COPYING .
LU Decomposition ( a rewriting of a program from ` Numerical Recipes
in ' ; look there for a detailed description of what is going on ) .
A is an NxN matrix that is updated in place .
; This returns a row permutation vector and the sign of that vector.
(define *lu-decomposition-epsilon* 1.0e-20)
(define (lu-decomposition a)
(let* ((n (car (array-shape a)))
(indx (make-vector n))
(sign 1.0)
(vv (make-vector n)))
(do ((i 0 (+ i 1)))
((>= i n))
(do ((j 0 (+ j 1))
(big 0.0 (max big (abs (array-ref a i j)))))
((>= j n)
(if (= big 0.0)
(error "lu-decomposition matrix has a zero row" a i))
(vector-set! vv i (/ big)))))
(do ((j 0 (+ j 1)))
((>= j n))
(let ()
(define (sum-elts i end)
(do ((k 0 (+ k 1))
(sum (array-ref a i j)
(- sum (* (array-ref a i k)
(array-ref a k j)))))
((>= k end)
sum)))
(do ((i 0 (+ i 1)))
((>= i j))
(array-set! a (sum-elts i i) i j))
(receive (big imax)
(let loop ((i j) (big 0.0) (imax 0))
(if (>= i n)
(values big imax)
(let ((sum (sum-elts i j)))
(array-set! a sum i j)
(let ((temp (* (vector-ref vv i) (abs sum))))
(if (>= temp big)
(loop (+ i 1) temp i)
(loop (+ i 1) big imax))))))
(if (not (= j imax))
(begin
(do ((k 0 (+ k 1)))
((>= k n))
(let ((temp (array-ref a imax k)))
(array-set! a (array-ref a j k) imax k)
(array-set! a temp j k)))
(set! sign (- sign))
(vector-set! vv imax (vector-ref vv j))))
(vector-set! indx j imax)
(if (= (array-ref a j j) 0.0)
(array-set! a *lu-decomposition-epsilon* j j))
(if (not (= j (- n 1)))
(let ((temp (/ (array-ref a j j))))
(do ((i (+ j 1) (+ i 1)))
((>= i n))
(array-set! a (* (array-ref a i j) temp) i j)))))))
(values indx sign)))
(define (lu-back-substitute a indx b)
(let ((n (car (array-shape a))))
(let loop ((i 0) (ii #f))
(if (< i n)
(let* ((ip (vector-ref indx i))
(temp (vector-ref b ip)))
(vector-set! b ip (vector-ref b i))
(let ((new (if ii
(do ((j ii (+ j 1))
(sum temp (- sum (* (array-ref a i j)
(vector-ref b j)))))
((>= j i)
sum))
temp)))
(vector-set! b i new)
(loop (+ i 1)
(if (or ii (= temp 0.0)) ii i))))))
(do ((i (- n 1) (- i 1)))
((< i 0))
(do ((j (+ i 1) (+ j 1))
(sum (vector-ref b i) (- sum (* (array-ref a i j)
(vector-ref b j)))))
((>= j n)
(vector-set! b i (/ sum (array-ref a i i))))))))
;(define m
( array ' ( 4 4 )
1.0 2.0 3.0 -2.0
8.0 -6.0 6.0 1.0
3.0 -2.0 0.0 -7.0
4.0 7.0 2.0 -1.0 ) )
;
;(define b '#(2.0 1.0 3.0 -2.0))
;
;(define (test m b)
; (let* ((a (copy-array m))
; (n (car (array-shape m)))
; (x (make-vector n)))
;
; (do ((i 0 (+ i 1)))
; ((>= i n))
; (vector-set! x i (vector-ref b i)))
;
; (display "b = ")
; (display b)
; (newline)
;
; (call-with-values
; (lambda ()
; (lu-decomposition a))
; (lambda (indx sign)
; (lu-back-substitute a indx x)
;
; (display "x = ")
; (display x)
; (newline)
;
; (let ((y (make-vector (vector-length b))))
; (do ((i 0 (+ i 1)))
; ((>= i n))
; (do ((j 0 (+ j 1))
( t 0.0 ( + t ( * ( array - ref m i j ) ( vector - ref x j ) ) ) ) )
; ((>= j n)
; (vector-set! y i t))))
;
; (display "a * x =")
; (display y)
; (newline))))))
| null | https://raw.githubusercontent.com/tonyg/kali-scheme/79bf76b4964729b63fce99c4d2149b32cb067ac0/scheme/big/lu-decomp.scm | scheme | look there for a detailed description of what is going on ) .
This returns a row permutation vector and the sign of that vector.
(define m
(define b '#(2.0 1.0 3.0 -2.0))
(define (test m b)
(let* ((a (copy-array m))
(n (car (array-shape m)))
(x (make-vector n)))
(do ((i 0 (+ i 1)))
((>= i n))
(vector-set! x i (vector-ref b i)))
(display "b = ")
(display b)
(newline)
(call-with-values
(lambda ()
(lu-decomposition a))
(lambda (indx sign)
(lu-back-substitute a indx x)
(display "x = ")
(display x)
(newline)
(let ((y (make-vector (vector-length b))))
(do ((i 0 (+ i 1)))
((>= i n))
(do ((j 0 (+ j 1))
((>= j n)
(vector-set! y i t))))
(display "a * x =")
(display y)
(newline)))))) | Copyright ( c ) 1993 , 1994 by and .
Copyright ( c ) 1996 by NEC Research Institute , Inc. See file COPYING .
LU Decomposition ( a rewriting of a program from ` Numerical Recipes
A is an NxN matrix that is updated in place .
(define *lu-decomposition-epsilon* 1.0e-20)
(define (lu-decomposition a)
(let* ((n (car (array-shape a)))
(indx (make-vector n))
(sign 1.0)
(vv (make-vector n)))
(do ((i 0 (+ i 1)))
((>= i n))
(do ((j 0 (+ j 1))
(big 0.0 (max big (abs (array-ref a i j)))))
((>= j n)
(if (= big 0.0)
(error "lu-decomposition matrix has a zero row" a i))
(vector-set! vv i (/ big)))))
(do ((j 0 (+ j 1)))
((>= j n))
(let ()
(define (sum-elts i end)
(do ((k 0 (+ k 1))
(sum (array-ref a i j)
(- sum (* (array-ref a i k)
(array-ref a k j)))))
((>= k end)
sum)))
(do ((i 0 (+ i 1)))
((>= i j))
(array-set! a (sum-elts i i) i j))
(receive (big imax)
(let loop ((i j) (big 0.0) (imax 0))
(if (>= i n)
(values big imax)
(let ((sum (sum-elts i j)))
(array-set! a sum i j)
(let ((temp (* (vector-ref vv i) (abs sum))))
(if (>= temp big)
(loop (+ i 1) temp i)
(loop (+ i 1) big imax))))))
(if (not (= j imax))
(begin
(do ((k 0 (+ k 1)))
((>= k n))
(let ((temp (array-ref a imax k)))
(array-set! a (array-ref a j k) imax k)
(array-set! a temp j k)))
(set! sign (- sign))
(vector-set! vv imax (vector-ref vv j))))
(vector-set! indx j imax)
(if (= (array-ref a j j) 0.0)
(array-set! a *lu-decomposition-epsilon* j j))
(if (not (= j (- n 1)))
(let ((temp (/ (array-ref a j j))))
(do ((i (+ j 1) (+ i 1)))
((>= i n))
(array-set! a (* (array-ref a i j) temp) i j)))))))
(values indx sign)))
(define (lu-back-substitute a indx b)
(let ((n (car (array-shape a))))
(let loop ((i 0) (ii #f))
(if (< i n)
(let* ((ip (vector-ref indx i))
(temp (vector-ref b ip)))
(vector-set! b ip (vector-ref b i))
(let ((new (if ii
(do ((j ii (+ j 1))
(sum temp (- sum (* (array-ref a i j)
(vector-ref b j)))))
((>= j i)
sum))
temp)))
(vector-set! b i new)
(loop (+ i 1)
(if (or ii (= temp 0.0)) ii i))))))
(do ((i (- n 1) (- i 1)))
((< i 0))
(do ((j (+ i 1) (+ j 1))
(sum (vector-ref b i) (- sum (* (array-ref a i j)
(vector-ref b j)))))
((>= j n)
(vector-set! b i (/ sum (array-ref a i i))))))))
( array ' ( 4 4 )
1.0 2.0 3.0 -2.0
8.0 -6.0 6.0 1.0
3.0 -2.0 0.0 -7.0
4.0 7.0 2.0 -1.0 ) )
( t 0.0 ( + t ( * ( array - ref m i j ) ( vector - ref x j ) ) ) ) )
|
28b9e501168d72fc80dc7cbe84428fd98435d9a9c4f43379e96709fe7ac2c39d | ku-fpg/blank-canvas | Main.hs | {-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Text (Text)
import Graphics.Blank
main :: IO ()
main = blankCanvas 3000 { events = ["mousedown"] } $ \ context -> loop context Map.empty X
data XO = X | O
deriving (Eq,Ord,Show)
swap :: XO -> XO
swap X = O
swap O = X
loop :: DeviceContext -> Map (Int, Int) XO -> XO -> IO ()
loop context board turn = do
-- print board
-- print turn
(w,h,sz) <- send context $ do
let (w,h) = (width context, height context)
clearRect (0,0,w,h)
beginPath()
let sz = min w h
save()
translate (w / 2, h / 2)
sequence_ [ do bigLine (-sz * 0.45,n) (sz * 0.45,n)
bigLine (n,-sz * 0.45) (n,sz * 0.45)
| n <- [-sz * 0.15,sz * 0.15]
]
sequence_ [ do save()
translate (fromIntegral x * sz * 0.3,fromIntegral y * sz * 0.3)
case Map.lookup (x,y) board of
Just X -> drawX (sz * 0.1)
Just O -> drawO (sz * 0.1)
Nothing -> return ()
restore()
| x <- [-1,0,1]
, y <- [-1,0,1]
]
restore()
return (w,h,sz)
let pointToSq :: (Double, Double) -> Maybe (Int,Int)
pointToSq (x,y) = do
x' <- fd ((x - w / 2) / sz)
y' <- fd ((y - h / 2) / sz)
return (x',y')
fd x =
-- trace (show ("fx",x,r)) $
if r `elem` [-1..1] then Just (signum r) else Nothing
where r = round (x * 3.3333)
event <- wait context
-- print event
case ePageXY event of
-- if no mouse location, ignore, and redraw
Nothing -> loop context board turn
Just (x',y') -> case pointToSq (x',y') of
Nothing -> loop context board turn
Just pos -> case Map.lookup pos board of
Nothing -> loop context
(Map.insert pos turn board)
(swap turn)
-- already something here
Just _ -> loop context board turn
xColor, oColor, boardColor :: Text
xColor = "#ff0000"
oColor = "#00a000"
boardColor = "#000080"
drawX :: Double -> Canvas ()
drawX size = do
strokeStyle xColor
lineCap "butt"
beginPath()
moveTo(-size,-size)
lineTo(size,size)
lineWidth 10
stroke()
beginPath()
moveTo(-size,size)
lineTo(size,-size)
lineWidth 10
stroke()
drawO :: Double -> Canvas ()
drawO radius = do
beginPath()
arc(0, 0, radius, 0, 2 * pi, False)
lineWidth 10
strokeStyle oColor
stroke()
bigLine :: (Double, Double) -> (Double, Double) -> Canvas ()
bigLine (x,y) (x',y') = do
beginPath()
moveTo(x,y)
lineTo(x',y')
lineWidth 20
strokeStyle boardColor
lineCap "round"
stroke()
| null | https://raw.githubusercontent.com/ku-fpg/blank-canvas/39915c17561106ce06e1e3dcef85cc2e956626e6/examples/tictactoe/Main.hs | haskell | # LANGUAGE OverloadedStrings #
print board
print turn
trace (show ("fx",x,r)) $
print event
if no mouse location, ignore, and redraw
already something here | module Main where
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Text (Text)
import Graphics.Blank
main :: IO ()
main = blankCanvas 3000 { events = ["mousedown"] } $ \ context -> loop context Map.empty X
data XO = X | O
deriving (Eq,Ord,Show)
swap :: XO -> XO
swap X = O
swap O = X
loop :: DeviceContext -> Map (Int, Int) XO -> XO -> IO ()
loop context board turn = do
(w,h,sz) <- send context $ do
let (w,h) = (width context, height context)
clearRect (0,0,w,h)
beginPath()
let sz = min w h
save()
translate (w / 2, h / 2)
sequence_ [ do bigLine (-sz * 0.45,n) (sz * 0.45,n)
bigLine (n,-sz * 0.45) (n,sz * 0.45)
| n <- [-sz * 0.15,sz * 0.15]
]
sequence_ [ do save()
translate (fromIntegral x * sz * 0.3,fromIntegral y * sz * 0.3)
case Map.lookup (x,y) board of
Just X -> drawX (sz * 0.1)
Just O -> drawO (sz * 0.1)
Nothing -> return ()
restore()
| x <- [-1,0,1]
, y <- [-1,0,1]
]
restore()
return (w,h,sz)
let pointToSq :: (Double, Double) -> Maybe (Int,Int)
pointToSq (x,y) = do
x' <- fd ((x - w / 2) / sz)
y' <- fd ((y - h / 2) / sz)
return (x',y')
fd x =
if r `elem` [-1..1] then Just (signum r) else Nothing
where r = round (x * 3.3333)
event <- wait context
case ePageXY event of
Nothing -> loop context board turn
Just (x',y') -> case pointToSq (x',y') of
Nothing -> loop context board turn
Just pos -> case Map.lookup pos board of
Nothing -> loop context
(Map.insert pos turn board)
(swap turn)
Just _ -> loop context board turn
xColor, oColor, boardColor :: Text
xColor = "#ff0000"
oColor = "#00a000"
boardColor = "#000080"
drawX :: Double -> Canvas ()
drawX size = do
strokeStyle xColor
lineCap "butt"
beginPath()
moveTo(-size,-size)
lineTo(size,size)
lineWidth 10
stroke()
beginPath()
moveTo(-size,size)
lineTo(size,-size)
lineWidth 10
stroke()
drawO :: Double -> Canvas ()
drawO radius = do
beginPath()
arc(0, 0, radius, 0, 2 * pi, False)
lineWidth 10
strokeStyle oColor
stroke()
bigLine :: (Double, Double) -> (Double, Double) -> Canvas ()
bigLine (x,y) (x',y') = do
beginPath()
moveTo(x,y)
lineTo(x',y')
lineWidth 20
strokeStyle boardColor
lineCap "round"
stroke()
|
8f2cd3ae988d962835c114daf6d9cc69203aa117d9b034ef9d4e78204f5c6977 | pfdietz/ansi-test | logcount.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Thu Sep 11 23:12:56 2003
;;;; Contains: Tests of LOGCOUNT
;;; Error tests
(deftest logcount.error.1
(signals-error (logcount) program-error)
t)
(deftest logcount.error.2
(signals-error (logcount 0 nil) program-error)
t)
(deftest logcount.error.3
(check-type-error #'logcount #'integerp)
nil)
;;; Non-error tests
(deftest logcount.1
(logcount 0)
0)
(deftest logcount.2
(logcount 1)
1)
(deftest logcount.3
(logcount 2)
1)
(deftest logcount.4
(logcount 3)
2)
(deftest logcount.5
(logcount -1)
0)
(deftest logcount.6
(loop for x = (random-fixnum)
repeat 100
always (eql (logcount x) (logcount (lognot x))))
t)
(deftest logcount.7
(let ((bound (ash 1 300)))
(loop for x = (random-from-interval bound)
repeat 100
always (eql (logcount x) (logcount (lognot x)))))
t)
(deftest logcount.8
(loop for y = (random (1+ most-positive-fixnum))
repeat 100
unless
(let ((cnt 0)
(x y))
(loop while (> x 0)
do
(when (oddp x) (incf cnt))
(setf x (ash x -1)))
(eql cnt (logcount y)))
collect y)
nil)
| null | https://raw.githubusercontent.com/pfdietz/ansi-test/3f4b9d31c3408114f0467eaeca4fd13b28e2ce31/numbers/logcount.lsp | lisp | -*- Mode: Lisp -*-
Contains: Tests of LOGCOUNT
Error tests
Non-error tests | Author :
Created : Thu Sep 11 23:12:56 2003
(deftest logcount.error.1
(signals-error (logcount) program-error)
t)
(deftest logcount.error.2
(signals-error (logcount 0 nil) program-error)
t)
(deftest logcount.error.3
(check-type-error #'logcount #'integerp)
nil)
(deftest logcount.1
(logcount 0)
0)
(deftest logcount.2
(logcount 1)
1)
(deftest logcount.3
(logcount 2)
1)
(deftest logcount.4
(logcount 3)
2)
(deftest logcount.5
(logcount -1)
0)
(deftest logcount.6
(loop for x = (random-fixnum)
repeat 100
always (eql (logcount x) (logcount (lognot x))))
t)
(deftest logcount.7
(let ((bound (ash 1 300)))
(loop for x = (random-from-interval bound)
repeat 100
always (eql (logcount x) (logcount (lognot x)))))
t)
(deftest logcount.8
(loop for y = (random (1+ most-positive-fixnum))
repeat 100
unless
(let ((cnt 0)
(x y))
(loop while (> x 0)
do
(when (oddp x) (incf cnt))
(setf x (ash x -1)))
(eql cnt (logcount y)))
collect y)
nil)
|
60e27eb5ee414c6c56ad32c42ed6c3a69d73ef6f89cb6388917b89188e6fdc68 | windorg/app-old | User.hs | module Web.Controller.User where
import Data.Text (strip)
import qualified IHP.AuthSupport.Controller.Sessions as Sessions
import IHP.ValidationSupport.ValidateField (matchesRegex)
import Web.Controller.Authorization
import Web.Controller.Prelude
import Web.Helper.Common
import Web.View.Sessions.LoginOrSignup
import Web.View.User.Edit
import Web.View.User.Show
instance Sessions.SessionsControllerConfig User => Controller UserController where
action ShowUserAction{userId} = do
user <- fetch userId
boards <-
query @Board
|> filterWhere (#ownerId, userId)
|> orderByDesc #createdAt
|> fetch
>>= filterM (userCanView @Board . get #id)
followed <- case mbCurrentUserId of
Nothing -> pure Nothing
Just currentUid ->
query @FollowedUser
|> filterWhere (#subscriberId, currentUid)
|> filterWhere (#followedUserId, userId)
|> fetchExists
<&> Just
render ShowView{..}
action EditUserAction{userId} = do
user <- fetch userId
render EditView{..}
action UpdateUserAction{userId} = do
user <- fetch userId
user
|> buildUser
|> ifValid \case
Left user -> render EditView{..}
Right user -> do
user <- user |> updateRecord
redirectTo EditUserAction{..}
action CreateUserAction = do
let user = newRecord @User
user
|> buildUser
|> modify #displayName strip
|> validateField #email isEmail
|> validateField #passwordHash nonEmpty
|> validateField #handle (matchesRegex "^[a-zA-Z0-9_-]{1,64}$")
|> (\u -> if get #displayName u == "" then u |> set #displayName (get #handle u) else u)
|> ifValid \case
Left user ->
render
LoginOrSignupView
{ loginUser = newRecord @User,
signupUser = user
}
Right user -> do
hashed <- hashPassword (get #passwordHash user)
user <-
user
|> set #passwordHash hashed
|> createRecord
login user
redirectUrl <- getSessionAndClear "IHP.LoginSupport.redirectAfterLogin"
redirectToPath (fromMaybe (Sessions.afterLoginRedirectPath @User) redirectUrl)
action DeleteUserAction{userId} = do
user <- fetch userId
deleteRecord user
redirectToPath "/"
action UpdateFollowUserAction{userId} = do
ensureIsUser
follows <-
query @FollowedUser
|> filterWhere (#subscriberId, currentUserId)
|> filterWhere (#followedUserId, userId)
|> fetchExists
unless follows do
(newRecord :: FollowedUser)
|> set #subscriberId currentUserId
|> set #followedUserId userId
|> createRecord
pure ()
redirectBack
action UpdateUnfollowUserAction{userId} = do
ensureIsUser
mbFollow <-
query @FollowedUser
|> filterWhere (#subscriberId, currentUserId)
|> filterWhere (#followedUserId, userId)
|> fetchOneOrNothing
mapM_ deleteRecord mbFollow
redirectBack
buildUser user =
user
|> fill @["email", "handle", "displayName", "passwordHash"]
| null | https://raw.githubusercontent.com/windorg/app-old/ed9c5322c8ab8a0275bdcd479be12a3f230da8c9/Web/Controller/User.hs | haskell | module Web.Controller.User where
import Data.Text (strip)
import qualified IHP.AuthSupport.Controller.Sessions as Sessions
import IHP.ValidationSupport.ValidateField (matchesRegex)
import Web.Controller.Authorization
import Web.Controller.Prelude
import Web.Helper.Common
import Web.View.Sessions.LoginOrSignup
import Web.View.User.Edit
import Web.View.User.Show
instance Sessions.SessionsControllerConfig User => Controller UserController where
action ShowUserAction{userId} = do
user <- fetch userId
boards <-
query @Board
|> filterWhere (#ownerId, userId)
|> orderByDesc #createdAt
|> fetch
>>= filterM (userCanView @Board . get #id)
followed <- case mbCurrentUserId of
Nothing -> pure Nothing
Just currentUid ->
query @FollowedUser
|> filterWhere (#subscriberId, currentUid)
|> filterWhere (#followedUserId, userId)
|> fetchExists
<&> Just
render ShowView{..}
action EditUserAction{userId} = do
user <- fetch userId
render EditView{..}
action UpdateUserAction{userId} = do
user <- fetch userId
user
|> buildUser
|> ifValid \case
Left user -> render EditView{..}
Right user -> do
user <- user |> updateRecord
redirectTo EditUserAction{..}
action CreateUserAction = do
let user = newRecord @User
user
|> buildUser
|> modify #displayName strip
|> validateField #email isEmail
|> validateField #passwordHash nonEmpty
|> validateField #handle (matchesRegex "^[a-zA-Z0-9_-]{1,64}$")
|> (\u -> if get #displayName u == "" then u |> set #displayName (get #handle u) else u)
|> ifValid \case
Left user ->
render
LoginOrSignupView
{ loginUser = newRecord @User,
signupUser = user
}
Right user -> do
hashed <- hashPassword (get #passwordHash user)
user <-
user
|> set #passwordHash hashed
|> createRecord
login user
redirectUrl <- getSessionAndClear "IHP.LoginSupport.redirectAfterLogin"
redirectToPath (fromMaybe (Sessions.afterLoginRedirectPath @User) redirectUrl)
action DeleteUserAction{userId} = do
user <- fetch userId
deleteRecord user
redirectToPath "/"
action UpdateFollowUserAction{userId} = do
ensureIsUser
follows <-
query @FollowedUser
|> filterWhere (#subscriberId, currentUserId)
|> filterWhere (#followedUserId, userId)
|> fetchExists
unless follows do
(newRecord :: FollowedUser)
|> set #subscriberId currentUserId
|> set #followedUserId userId
|> createRecord
pure ()
redirectBack
action UpdateUnfollowUserAction{userId} = do
ensureIsUser
mbFollow <-
query @FollowedUser
|> filterWhere (#subscriberId, currentUserId)
|> filterWhere (#followedUserId, userId)
|> fetchOneOrNothing
mapM_ deleteRecord mbFollow
redirectBack
buildUser user =
user
|> fill @["email", "handle", "displayName", "passwordHash"]
|
|
00469d2a1982e7ba8c0aab0b2c903b8fc73a6ec53a0c327244276436e2c40843 | fyquah/hardcaml_zprize | twiddle_factor_stream.mli | * Computes a stream of twiddle factors ( successive powers of the initial
roots ) used in the first pass of the 4 step algorithm .
roots) used in the first pass of the 4 step algorithm. *)
open Base
open Hardcaml
val pipe_length : int
module I : sig
type 'a t =
{ clock : 'a
; start_twiddles : 'a
; omegas : 'a list
}
[@@deriving sexp_of, hardcaml]
end
module O : sig
type 'a t = { w : 'a } [@@deriving sexp_of, hardcaml]
end
val create : Scope.t -> Signal.t Interface.Create_fn(I)(O).t
val hierarchy : Scope.t -> Signal.t Interface.Create_fn(I)(O).t
val initial_pipeline_factors : int -> Signal.t list
| null | https://raw.githubusercontent.com/fyquah/hardcaml_zprize/553b1be10ae9b977decbca850df6ee2d0595e7ff/libs/hardcaml_ntt/src/twiddle_factor_stream.mli | ocaml | * Computes a stream of twiddle factors ( successive powers of the initial
roots ) used in the first pass of the 4 step algorithm .
roots) used in the first pass of the 4 step algorithm. *)
open Base
open Hardcaml
val pipe_length : int
module I : sig
type 'a t =
{ clock : 'a
; start_twiddles : 'a
; omegas : 'a list
}
[@@deriving sexp_of, hardcaml]
end
module O : sig
type 'a t = { w : 'a } [@@deriving sexp_of, hardcaml]
end
val create : Scope.t -> Signal.t Interface.Create_fn(I)(O).t
val hierarchy : Scope.t -> Signal.t Interface.Create_fn(I)(O).t
val initial_pipeline_factors : int -> Signal.t list
|
|
5eb9e6520a66cebc82bd91d2d4b7660d58791eb8bf2858b69228d7b2242a0390 | chrovis/cljam | pileup_test.clj | (ns cljam.io.pileup-test
(:require [clojure.test :refer [deftest is are testing]]
[clojure.java.io :as cio]
[clojure.string :as cstr]
[clojure.walk :as walk]
[cljam.test-common :refer
[with-before-after
prepare-cache!
clean-cache!
not-throw?
http-server
temp-dir
test-pileup-file]]
[cljam.io.pileup :as plpio]
[cljam.io.sequence :as cseq]
[cljam.io.fasta-index.core :as fai]
[cljam.io.protocols :as p])
(:import [java.io File StringWriter StringReader]
[cljam.io.pileup PileupReader PileupWriter]))
(defn- rec->map [coll]
(walk/postwalk
(fn [x]
(if (record? x)
(into {} x)
x))
coll))
(def ^:private ^:const the-last-pile
ref2 36 N 1 A$ ?
{:rname "ref2",
:pos 36,
:ref \N,
:count 1,
:pile
[{:start? false,
:mapq nil,
:base \A,
:qual 30,
:reverse? false,
:end? true,
:insertion nil,
:deletion nil,
:qname nil,
:alignment nil}]})
;; Reader
;; ------
(deftest parse-bases-col
(testing "without-ref"
(are [?in ?out]
(= (mapv #(into {:qname nil :alignment nil} %) ?out)
(mapv rec->map (#'plpio/parse-bases-col nil ?in)))
"" []
"A" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"a" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? true, :end? false, :insertion nil, :deletion nil}]
"^]A" [{:start? true, :mapq 60, :base \A, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"a$" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? true, :end? true, :insertion nil, :deletion nil}]
"A$C" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion nil}
{:start? false, :mapq nil, :base \C, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"A$^]C" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion nil}
{:start? true, :mapq 60, :base \C, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"^BN" [{:start? true, :mapq 33, :base \N, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
">" [{:start? false, :mapq nil, :base \>, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"*" [{:start? false, :mapq nil, :base \*, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"<" [{:start? false, :mapq nil, :base \>, :qual -1, :reverse? true, :end? false, :insertion nil, :deletion nil}]
"A+2TG" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? false, :insertion "TG", :deletion nil}]
"a-12nnnnnnnnnnnn$c$" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? true, :end? true, :insertion nil, :deletion 12}
{:start? false, :mapq nil, :base \C, :qual -1, :reverse? true, :end? true, :insertion nil, :deletion nil}]
"A+14AAAAATTTTTGGGG" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? false, :insertion "AAAAATTTTTGGGG", :deletion nil}]
"^]A+14AAAAATTTTTGGGG" [{:start? true, :mapq 60, :base \A, :qual -1, :reverse? false, :end? false, :insertion "AAAAATTTTTGGGG", :deletion nil}]
"A+14AAAAATTTTTGGGG$" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? true, :insertion "AAAAATTTTTGGGG", :deletion nil}]
"A+14AAAAATTTTTGGGGG" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? false, :insertion "AAAAATTTTTGGGG", :deletion nil}
{:start? false, :mapq nil, :base \G, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"A-2NN$" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion 2}]
"A-2NN$^Ca" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion 2}
{:start? true, :mapq 34, :base \A, :qual -1, :reverse? true, :end? false, :insertion nil, :deletion nil}]))
(testing "with-ref"
(are [?in ?out]
(= (mapv #(into {:qname nil :alignment nil} %) ?out)
(mapv rec->map (#'plpio/parse-bases-col \T ?in)))
"" []
"A" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"a$" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? true, :end? true, :insertion nil, :deletion nil}]
"," [{:start? false, :mapq nil, :base \T, :qual -1, :reverse? true, :end? false, :insertion nil, :deletion nil}]
".$" [{:start? false, :mapq nil, :base \T, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion nil}]
".$^]," [{:start? false, :mapq nil, :base \T, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion nil}
{:start? true, :mapq 60, :base \T, :qual -1, :reverse? true, :end? false, :insertion nil, :deletion nil}]
"^B." [{:start? true, :mapq 33, :base \T, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
",+2tg" [{:start? false, :mapq nil, :base \T, :qual -1, :reverse? true, :end? false, :insertion "TG", :deletion nil}]
".-2AT$" [{:start? false, :mapq nil, :base \T, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion 2}]
",-2at$^Ca" [{:start? false, :mapq nil, :base \T, :qual -1, :reverse? true, :end? true, :insertion nil, :deletion 2}
{:start? true, :mapq 34, :base \A, :qual -1, :reverse? true, :end? false, :insertion nil, :deletion nil}]))
(testing "invalid arguments"
(are [?in]
(thrown? Exception (#'plpio/parse-bases-col nil ?in))
"A+"
"^"
"^A"
"^A+1A"
"+"
"-"
"++"
"--"
"A$$A"
"A+2A^]A"
"A+2A"
"A+2"
"A-2N"
"A-2")))
(deftest parse-pileup-line
(testing "without-ref"
(are [?in ?out]
(= ?out (rec->map (#'plpio/parse-pileup-line ?in)))
"chr1\t10\tN\t0\t\t" {:rname "chr1", :pos 10, :ref \N, :count 0,
:pile []}
"chr1\t10\tN\t1\tA\tI" {:rname "chr1", :pos 10, :ref \N, :count 1,
:pile [{:start? false, :mapq nil, :base \A, :qual 40, :reverse? false, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}]}
"chr1\t10\tN\t2\taA\tIB" {:rname "chr1", :pos 10, :ref \N, :count 2,
:pile [{:start? false, :mapq nil, :base \A, :qual 40, :reverse? true, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}
{:start? false, :mapq nil, :base \A, :qual 33, :reverse? false, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}]}
"chr1\t10\tN\t2\taA-2NN$\tIB" {:rname "chr1", :pos 10, :ref \N, :count 2,
:pile [{:start? false, :mapq nil, :base \A, :qual 40, :reverse? true, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}
{:start? false, :mapq nil, :base \A, :qual 33, :reverse? false, :end? true, :insertion nil, :deletion 2, :qname nil, :alignment nil}]}))
(testing "with-ref"
(are [?in ?out]
(= ?out (rec->map (#'plpio/parse-pileup-line ?in)))
"chr1\t10\tA\t0\t\t" {:rname "chr1", :pos 10, :ref \A, :count 0,
:pile []}
"chr1\t10\ta\t1\t.\tI" {:rname "chr1", :pos 10, :ref \a, :count 1,
:pile [{:start? false, :mapq nil, :base \A, :qual 40, :reverse? false, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}]}
"chr1\t10\tA\t2\t,.\tIB" {:rname "chr1", :pos 10, :ref \A, :count 2,
:pile [{:start? false, :mapq nil, :base \A, :qual 40, :reverse? true, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}
{:start? false, :mapq nil, :base \A, :qual 33, :reverse? false, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}]}
"chr1\t10\tA\t2\t,.-2CA$\tIB" {:rname "chr1", :pos 10, :ref \A, :count 2,
:pile [{:start? false, :mapq nil, :base \A, :qual 40, :reverse? true, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}
{:start? false, :mapq nil, :base \A, :qual 33, :reverse? false, :end? true, :insertion nil, :deletion 2, :qname nil, :alignment nil}]})))
(deftest reader
(with-open [r (plpio/reader test-pileup-file)]
(is (instance? PileupReader r))))
(deftest read-piles
(with-open [r (plpio/reader test-pileup-file)]
(is (= the-last-pile
(rec->map (last (plpio/read-piles r)))))))
;; Writer
;; ------
(defmacro with-string-writer [symbol & exprs]
`(with-open [sw# (StringWriter.)
~symbol (cio/writer sw#)]
~@exprs
(.flush ~symbol)
(str sw#)))
(deftest write-mpileup-alignment!
(testing "without-ref"
(are [?in ?out]
(= ?out (with-string-writer w
(#'plpio/write-mpileup-alignment! w nil "chr1" 10 nil ?in)))
{:base \A :reverse? false :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "A"
{:base \N :reverse? false :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "N"
{:base \A :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a"
{:base \N :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "n"
{:base \* :reverse? false :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "*"
{:base \* :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "*"
{:base \> :reverse? false :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} ">"
{:base \> :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "<"
{:base \A :reverse? false :insertion "A" :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "A+1A"
{:base \A :reverse? false :deletion 2 :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "A-2NN"
{:base \A :reverse? true :insertion "AT" :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a+2at"
{:base \A :reverse? true :insertion "AAAATTTTGGGGCCCC" :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a+16aaaattttggggcccc"
{:base \A :reverse? true :deletion 3 :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a-3nnn"
{:base \A :reverse? true :deletion 10 :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a-10nnnnnnnnnn"
{:base \A :reverse? false :start? true :mapq 60 :alignment {:flag 0 :mapq 60 :pos 10 :end 15}} "^]A"
{:base \A :reverse? false :start? true :mapq 93 :alignment {:flag 0 :mapq 93 :pos 10 :end 15}} "^~A"
{:base \A :reverse? false :start? true :mapq 94 :alignment {:flag 0 :mapq 94 :pos 10 :end 15}} "^~A"
{:base \A :reverse? true :start? true :mapq 40 :insertion "AT" :alignment {:flag 16 :mapq 40 :pos 10 :end 15}} "^Ia+2at"
{:base \A :reverse? false :end? true :alignment {:flag 0 :mapq 40 :pos 5 :end 10}} "A$"
{:base \A :reverse? true :end? true :deletion 4 :alignment {:flag 16 :mapq 40 :pos 5 :end 10}} "a-4nnnn$"))
(testing "with-ref"
(let [r (reify p/ISequenceReader
(p/read-sequence [this {:keys [start end]}]
(subs "ATGCATGCATGCATGCATGCATGCATGC" (dec start) end)))]
(are [?in ?out]
(= ?out (with-string-writer w
(#'plpio/write-mpileup-alignment! w r "chr1" 10 \T ?in)))
{:base \A :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "A"
{:base \T :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "."
{:base \T :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} ","
{:base \N :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "N"
{:base \A :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a"
{:base \N :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "n"
{:base \* :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "*"
{:base \* :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "*"
{:base \> :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} ">"
{:base \> :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "<"
{:base \A :insertion "A" :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "A+1A"
{:base \A :deletion 2 :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "A-2GC"
{:base \A :reverse? true :insertion "AT" :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a+2at"
{:base \A :reverse? true :insertion "AAAATTTTGGGGCCCC" :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a+16aaaattttggggcccc"
{:base \A :reverse? true :deletion 3 :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a-3gca"
{:base \A :reverse? true :deletion 10 :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a-10gcatgcatgc"
{:base \A :start? true :mapq 60 :alignment {:flag 0 :mapq 60 :pos 10 :end 15}} "^]A"
{:base \A :start? true :mapq 93 :alignment {:flag 0 :mapq 93 :pos 10 :end 15}} "^~A"
{:base \A :start? true :mapq 94 :alignment {:flag 0 :mapq 94 :pos 10 :end 15}} "^~A"
{:base \A :reverse? true :start? true :mapq 40 :insertion "AT" :alignment {:flag 16 :mapq 40 :pos 10 :end 15}} "^Ia+2at"
{:base \A :end? true :alignment {:flag 0 :mapq 40 :pos 5 :end 10}} "A$"
{:base \A :reverse? true :end? true :deletion 4 :alignment {:flag 16 :mapq 40 :pos 5 :end 10}} "a-4gcat$"))))
(defn string-sequence-reader [s]
(reify p/ISequenceReader
(p/read-sequence [this region]
(p/read-sequence this region {}))
(p/read-sequence [this {:keys [start end]} {:keys [mask?]}]
((if mask? identity cstr/upper-case)
(subs s (dec start) end)))))
(deftest write-mpileup-line!
(testing "without-ref"
(are [?in ?out]
(= ?out (with-string-writer w
(#'plpio/write-mpileup-line! w nil {:rname (first ?in)
:pos (second ?in)
:pile (last ?in)})))
["chr1" 10 []] "chr1\t10\tN\t0\t\t"
["chr1" 10 [{:base \A :qual 40 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tN\t1\tA\tI"
["chr1" 10 [{:base \A :qual 93 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tN\t1\tA\t~"
["chr1" 10 [{:base \A :qual 94 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tN\t1\tA\t~"
["chr1" 10 [{:base \A :qual 40 :alignment {:flag 0 :pos 5}}
{:base \A :qual 33 :reverse? true :start? true :mapq 60 :alignment {:flag 16 :mapq 60 :pos 10}}]] "chr1\t10\tN\t2\tA^]a\tIB"))
(testing "with-ref"
(let [r (string-sequence-reader "NNNNNNNNNAtGCATGCAT")]
(are [?in ?out]
(= ?out (with-string-writer w
(#'plpio/write-mpileup-line! w r {:rname (first ?in)
:pos (second ?in)
:pile (last ?in)})))
["chr1" 10 []] "chr1\t10\tA\t0\t\t"
["chr1" 10 [{:base \A :qual 40 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tA\t1\t.\tI"
["chr1" 10 [{:base \A :qual 93 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tA\t1\t.\t~"
["chr1" 10 [{:base \A :qual 94 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tA\t1\t.\t~"
["chr1" 10 [{:base \A :insertion "AA" :qual 40 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tA\t1\t.+2AA\tI"
["chr1" 10 [{:base \A :deletion 2 :qual 40 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tA\t1\t.-2TG\tI"
["chr1" 11 [{:base \T :qual 40 :alignment {:flag 0 :pos 5}}]] "chr1\t11\tt\t1\t.\tI"
["chr1" 10 [{:base \A :qual 40 :alignment {:flag 0 :pos 5}}
{:base \A :qual 33 :reverse? true :start? true :mapq 60 :alignment {:flag 16 :mapq 60 :pos 10}}]] "chr1\t10\tA\t2\t.^],\tIB"))))
(deftest writer
(let [tmp (File/createTempFile "writer-test" ".mpileup")]
(try
(with-open [w (plpio/writer tmp)]
(is (instance? PileupWriter w)))
(finally
(when (.isFile (cio/file tmp))
(cio/delete-file (cio/file tmp)))))))
(deftest write-piles
(is (= "chr1\t10\tN\t1\tA\tI\n"
(with-open [sw (StringWriter.)
w (plpio/writer sw)]
(plpio/write-piles w [{:rname "chr1"
:pos 10
:pile [{:base \A :qual 40 :alignment {:flag 0 :pos 5}}]}])
(.toString sw)))))
;; Read & Write
;; ------------
(deftest regression
(testing "without-ref"
(are [?input]
(= ?input
(with-open [r (plpio/reader (StringReader. ?input))
sw (StringWriter.)
w (plpio/writer sw)]
(plpio/write-piles w (plpio/read-piles r))
(str sw)))
"chr1\t10\tN\t1\tA\tI\n"
"chr1\t10\tN\t4\tAaTt\tIABC\n"
"chr1\t10\tN\t4\t^]A+3TTTa-2nn$Tt\tIABC\n"))
(testing "with-ref"
(are [?input]
(= ?input
(with-before-after {:before (prepare-cache!)
:after (clean-cache!)}
(let [tmp (cio/file temp-dir "pileup-regression.fa")
idx (cio/file temp-dir "pileup-regression.fai")]
(with-open [w (cseq/writer (.getCanonicalPath tmp))]
(cseq/write-sequences w [{:name "chr1" :sequence "NNNNNNNNNATGC"}]))
(fai/create-index tmp idx)
(with-open [r (plpio/reader (StringReader. ?input))
sw (StringWriter.)
w (plpio/writer sw tmp)]
(plpio/write-piles w (plpio/read-piles r))
(str sw)))))
"chr1\t10\tA\t1\t.\tI\n"
"chr1\t10\tA\t4\t.,Tt\tIABC\n"
"chr1\t10\tA\t4\t^].+3TTT,-2tg$Tt\tIABC\n")))
(deftest source-type-test
(testing "reader"
(with-open [server (http-server)]
(are [x] (= the-last-pile
(with-open [r (plpio/reader x)]
(rec->map (last (plpio/read-piles r)))))
test-pileup-file
(cio/file test-pileup-file)
(cio/as-url (cio/file test-pileup-file))
(cio/as-url (str (:uri server) "/pileup/test.pileup")))))
(testing "writer"
(let [tmp-pileup-file (cio/file temp-dir "pileup-source-type-writer.mpileup")]
(are [x] (with-before-after {:before (prepare-cache!)
:after (clean-cache!)}
(with-open [w (plpio/writer x)]
(not-throw? (plpio/write-piles w [the-last-pile]))))
(.getCanonicalPath tmp-pileup-file)
tmp-pileup-file
(cio/as-url tmp-pileup-file)))))
| null | https://raw.githubusercontent.com/chrovis/cljam/2b8e7386765be8efdbbbb4f18dbc52447f4a08af/test/cljam/io/pileup_test.clj | clojure | Reader
------
Writer
------
Read & Write
------------ | (ns cljam.io.pileup-test
(:require [clojure.test :refer [deftest is are testing]]
[clojure.java.io :as cio]
[clojure.string :as cstr]
[clojure.walk :as walk]
[cljam.test-common :refer
[with-before-after
prepare-cache!
clean-cache!
not-throw?
http-server
temp-dir
test-pileup-file]]
[cljam.io.pileup :as plpio]
[cljam.io.sequence :as cseq]
[cljam.io.fasta-index.core :as fai]
[cljam.io.protocols :as p])
(:import [java.io File StringWriter StringReader]
[cljam.io.pileup PileupReader PileupWriter]))
(defn- rec->map [coll]
(walk/postwalk
(fn [x]
(if (record? x)
(into {} x)
x))
coll))
(def ^:private ^:const the-last-pile
ref2 36 N 1 A$ ?
{:rname "ref2",
:pos 36,
:ref \N,
:count 1,
:pile
[{:start? false,
:mapq nil,
:base \A,
:qual 30,
:reverse? false,
:end? true,
:insertion nil,
:deletion nil,
:qname nil,
:alignment nil}]})
(deftest parse-bases-col
(testing "without-ref"
(are [?in ?out]
(= (mapv #(into {:qname nil :alignment nil} %) ?out)
(mapv rec->map (#'plpio/parse-bases-col nil ?in)))
"" []
"A" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"a" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? true, :end? false, :insertion nil, :deletion nil}]
"^]A" [{:start? true, :mapq 60, :base \A, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"a$" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? true, :end? true, :insertion nil, :deletion nil}]
"A$C" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion nil}
{:start? false, :mapq nil, :base \C, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"A$^]C" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion nil}
{:start? true, :mapq 60, :base \C, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"^BN" [{:start? true, :mapq 33, :base \N, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
">" [{:start? false, :mapq nil, :base \>, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"*" [{:start? false, :mapq nil, :base \*, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"<" [{:start? false, :mapq nil, :base \>, :qual -1, :reverse? true, :end? false, :insertion nil, :deletion nil}]
"A+2TG" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? false, :insertion "TG", :deletion nil}]
"a-12nnnnnnnnnnnn$c$" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? true, :end? true, :insertion nil, :deletion 12}
{:start? false, :mapq nil, :base \C, :qual -1, :reverse? true, :end? true, :insertion nil, :deletion nil}]
"A+14AAAAATTTTTGGGG" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? false, :insertion "AAAAATTTTTGGGG", :deletion nil}]
"^]A+14AAAAATTTTTGGGG" [{:start? true, :mapq 60, :base \A, :qual -1, :reverse? false, :end? false, :insertion "AAAAATTTTTGGGG", :deletion nil}]
"A+14AAAAATTTTTGGGG$" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? true, :insertion "AAAAATTTTTGGGG", :deletion nil}]
"A+14AAAAATTTTTGGGGG" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? false, :insertion "AAAAATTTTTGGGG", :deletion nil}
{:start? false, :mapq nil, :base \G, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"A-2NN$" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion 2}]
"A-2NN$^Ca" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion 2}
{:start? true, :mapq 34, :base \A, :qual -1, :reverse? true, :end? false, :insertion nil, :deletion nil}]))
(testing "with-ref"
(are [?in ?out]
(= (mapv #(into {:qname nil :alignment nil} %) ?out)
(mapv rec->map (#'plpio/parse-bases-col \T ?in)))
"" []
"A" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
"a$" [{:start? false, :mapq nil, :base \A, :qual -1, :reverse? true, :end? true, :insertion nil, :deletion nil}]
"," [{:start? false, :mapq nil, :base \T, :qual -1, :reverse? true, :end? false, :insertion nil, :deletion nil}]
".$" [{:start? false, :mapq nil, :base \T, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion nil}]
".$^]," [{:start? false, :mapq nil, :base \T, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion nil}
{:start? true, :mapq 60, :base \T, :qual -1, :reverse? true, :end? false, :insertion nil, :deletion nil}]
"^B." [{:start? true, :mapq 33, :base \T, :qual -1, :reverse? false, :end? false, :insertion nil, :deletion nil}]
",+2tg" [{:start? false, :mapq nil, :base \T, :qual -1, :reverse? true, :end? false, :insertion "TG", :deletion nil}]
".-2AT$" [{:start? false, :mapq nil, :base \T, :qual -1, :reverse? false, :end? true, :insertion nil, :deletion 2}]
",-2at$^Ca" [{:start? false, :mapq nil, :base \T, :qual -1, :reverse? true, :end? true, :insertion nil, :deletion 2}
{:start? true, :mapq 34, :base \A, :qual -1, :reverse? true, :end? false, :insertion nil, :deletion nil}]))
(testing "invalid arguments"
(are [?in]
(thrown? Exception (#'plpio/parse-bases-col nil ?in))
"A+"
"^"
"^A"
"^A+1A"
"+"
"-"
"++"
"--"
"A$$A"
"A+2A^]A"
"A+2A"
"A+2"
"A-2N"
"A-2")))
(deftest parse-pileup-line
(testing "without-ref"
(are [?in ?out]
(= ?out (rec->map (#'plpio/parse-pileup-line ?in)))
"chr1\t10\tN\t0\t\t" {:rname "chr1", :pos 10, :ref \N, :count 0,
:pile []}
"chr1\t10\tN\t1\tA\tI" {:rname "chr1", :pos 10, :ref \N, :count 1,
:pile [{:start? false, :mapq nil, :base \A, :qual 40, :reverse? false, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}]}
"chr1\t10\tN\t2\taA\tIB" {:rname "chr1", :pos 10, :ref \N, :count 2,
:pile [{:start? false, :mapq nil, :base \A, :qual 40, :reverse? true, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}
{:start? false, :mapq nil, :base \A, :qual 33, :reverse? false, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}]}
"chr1\t10\tN\t2\taA-2NN$\tIB" {:rname "chr1", :pos 10, :ref \N, :count 2,
:pile [{:start? false, :mapq nil, :base \A, :qual 40, :reverse? true, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}
{:start? false, :mapq nil, :base \A, :qual 33, :reverse? false, :end? true, :insertion nil, :deletion 2, :qname nil, :alignment nil}]}))
(testing "with-ref"
(are [?in ?out]
(= ?out (rec->map (#'plpio/parse-pileup-line ?in)))
"chr1\t10\tA\t0\t\t" {:rname "chr1", :pos 10, :ref \A, :count 0,
:pile []}
"chr1\t10\ta\t1\t.\tI" {:rname "chr1", :pos 10, :ref \a, :count 1,
:pile [{:start? false, :mapq nil, :base \A, :qual 40, :reverse? false, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}]}
"chr1\t10\tA\t2\t,.\tIB" {:rname "chr1", :pos 10, :ref \A, :count 2,
:pile [{:start? false, :mapq nil, :base \A, :qual 40, :reverse? true, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}
{:start? false, :mapq nil, :base \A, :qual 33, :reverse? false, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}]}
"chr1\t10\tA\t2\t,.-2CA$\tIB" {:rname "chr1", :pos 10, :ref \A, :count 2,
:pile [{:start? false, :mapq nil, :base \A, :qual 40, :reverse? true, :end? false, :insertion nil, :deletion nil, :qname nil, :alignment nil}
{:start? false, :mapq nil, :base \A, :qual 33, :reverse? false, :end? true, :insertion nil, :deletion 2, :qname nil, :alignment nil}]})))
(deftest reader
(with-open [r (plpio/reader test-pileup-file)]
(is (instance? PileupReader r))))
(deftest read-piles
(with-open [r (plpio/reader test-pileup-file)]
(is (= the-last-pile
(rec->map (last (plpio/read-piles r)))))))
(defmacro with-string-writer [symbol & exprs]
`(with-open [sw# (StringWriter.)
~symbol (cio/writer sw#)]
~@exprs
(.flush ~symbol)
(str sw#)))
(deftest write-mpileup-alignment!
(testing "without-ref"
(are [?in ?out]
(= ?out (with-string-writer w
(#'plpio/write-mpileup-alignment! w nil "chr1" 10 nil ?in)))
{:base \A :reverse? false :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "A"
{:base \N :reverse? false :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "N"
{:base \A :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a"
{:base \N :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "n"
{:base \* :reverse? false :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "*"
{:base \* :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "*"
{:base \> :reverse? false :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} ">"
{:base \> :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "<"
{:base \A :reverse? false :insertion "A" :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "A+1A"
{:base \A :reverse? false :deletion 2 :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "A-2NN"
{:base \A :reverse? true :insertion "AT" :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a+2at"
{:base \A :reverse? true :insertion "AAAATTTTGGGGCCCC" :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a+16aaaattttggggcccc"
{:base \A :reverse? true :deletion 3 :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a-3nnn"
{:base \A :reverse? true :deletion 10 :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a-10nnnnnnnnnn"
{:base \A :reverse? false :start? true :mapq 60 :alignment {:flag 0 :mapq 60 :pos 10 :end 15}} "^]A"
{:base \A :reverse? false :start? true :mapq 93 :alignment {:flag 0 :mapq 93 :pos 10 :end 15}} "^~A"
{:base \A :reverse? false :start? true :mapq 94 :alignment {:flag 0 :mapq 94 :pos 10 :end 15}} "^~A"
{:base \A :reverse? true :start? true :mapq 40 :insertion "AT" :alignment {:flag 16 :mapq 40 :pos 10 :end 15}} "^Ia+2at"
{:base \A :reverse? false :end? true :alignment {:flag 0 :mapq 40 :pos 5 :end 10}} "A$"
{:base \A :reverse? true :end? true :deletion 4 :alignment {:flag 16 :mapq 40 :pos 5 :end 10}} "a-4nnnn$"))
(testing "with-ref"
(let [r (reify p/ISequenceReader
(p/read-sequence [this {:keys [start end]}]
(subs "ATGCATGCATGCATGCATGCATGCATGC" (dec start) end)))]
(are [?in ?out]
(= ?out (with-string-writer w
(#'plpio/write-mpileup-alignment! w r "chr1" 10 \T ?in)))
{:base \A :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "A"
{:base \T :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "."
{:base \T :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} ","
{:base \N :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "N"
{:base \A :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a"
{:base \N :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "n"
{:base \* :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "*"
{:base \* :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "*"
{:base \> :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} ">"
{:base \> :reverse? true :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "<"
{:base \A :insertion "A" :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "A+1A"
{:base \A :deletion 2 :alignment {:flag 0 :mapq 60 :pos 5 :end 15}} "A-2GC"
{:base \A :reverse? true :insertion "AT" :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a+2at"
{:base \A :reverse? true :insertion "AAAATTTTGGGGCCCC" :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a+16aaaattttggggcccc"
{:base \A :reverse? true :deletion 3 :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a-3gca"
{:base \A :reverse? true :deletion 10 :alignment {:flag 16 :mapq 60 :pos 5 :end 15}} "a-10gcatgcatgc"
{:base \A :start? true :mapq 60 :alignment {:flag 0 :mapq 60 :pos 10 :end 15}} "^]A"
{:base \A :start? true :mapq 93 :alignment {:flag 0 :mapq 93 :pos 10 :end 15}} "^~A"
{:base \A :start? true :mapq 94 :alignment {:flag 0 :mapq 94 :pos 10 :end 15}} "^~A"
{:base \A :reverse? true :start? true :mapq 40 :insertion "AT" :alignment {:flag 16 :mapq 40 :pos 10 :end 15}} "^Ia+2at"
{:base \A :end? true :alignment {:flag 0 :mapq 40 :pos 5 :end 10}} "A$"
{:base \A :reverse? true :end? true :deletion 4 :alignment {:flag 16 :mapq 40 :pos 5 :end 10}} "a-4gcat$"))))
(defn string-sequence-reader [s]
(reify p/ISequenceReader
(p/read-sequence [this region]
(p/read-sequence this region {}))
(p/read-sequence [this {:keys [start end]} {:keys [mask?]}]
((if mask? identity cstr/upper-case)
(subs s (dec start) end)))))
(deftest write-mpileup-line!
(testing "without-ref"
(are [?in ?out]
(= ?out (with-string-writer w
(#'plpio/write-mpileup-line! w nil {:rname (first ?in)
:pos (second ?in)
:pile (last ?in)})))
["chr1" 10 []] "chr1\t10\tN\t0\t\t"
["chr1" 10 [{:base \A :qual 40 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tN\t1\tA\tI"
["chr1" 10 [{:base \A :qual 93 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tN\t1\tA\t~"
["chr1" 10 [{:base \A :qual 94 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tN\t1\tA\t~"
["chr1" 10 [{:base \A :qual 40 :alignment {:flag 0 :pos 5}}
{:base \A :qual 33 :reverse? true :start? true :mapq 60 :alignment {:flag 16 :mapq 60 :pos 10}}]] "chr1\t10\tN\t2\tA^]a\tIB"))
(testing "with-ref"
(let [r (string-sequence-reader "NNNNNNNNNAtGCATGCAT")]
(are [?in ?out]
(= ?out (with-string-writer w
(#'plpio/write-mpileup-line! w r {:rname (first ?in)
:pos (second ?in)
:pile (last ?in)})))
["chr1" 10 []] "chr1\t10\tA\t0\t\t"
["chr1" 10 [{:base \A :qual 40 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tA\t1\t.\tI"
["chr1" 10 [{:base \A :qual 93 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tA\t1\t.\t~"
["chr1" 10 [{:base \A :qual 94 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tA\t1\t.\t~"
["chr1" 10 [{:base \A :insertion "AA" :qual 40 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tA\t1\t.+2AA\tI"
["chr1" 10 [{:base \A :deletion 2 :qual 40 :alignment {:flag 0 :pos 5}}]] "chr1\t10\tA\t1\t.-2TG\tI"
["chr1" 11 [{:base \T :qual 40 :alignment {:flag 0 :pos 5}}]] "chr1\t11\tt\t1\t.\tI"
["chr1" 10 [{:base \A :qual 40 :alignment {:flag 0 :pos 5}}
{:base \A :qual 33 :reverse? true :start? true :mapq 60 :alignment {:flag 16 :mapq 60 :pos 10}}]] "chr1\t10\tA\t2\t.^],\tIB"))))
(deftest writer
(let [tmp (File/createTempFile "writer-test" ".mpileup")]
(try
(with-open [w (plpio/writer tmp)]
(is (instance? PileupWriter w)))
(finally
(when (.isFile (cio/file tmp))
(cio/delete-file (cio/file tmp)))))))
(deftest write-piles
(is (= "chr1\t10\tN\t1\tA\tI\n"
(with-open [sw (StringWriter.)
w (plpio/writer sw)]
(plpio/write-piles w [{:rname "chr1"
:pos 10
:pile [{:base \A :qual 40 :alignment {:flag 0 :pos 5}}]}])
(.toString sw)))))
(deftest regression
(testing "without-ref"
(are [?input]
(= ?input
(with-open [r (plpio/reader (StringReader. ?input))
sw (StringWriter.)
w (plpio/writer sw)]
(plpio/write-piles w (plpio/read-piles r))
(str sw)))
"chr1\t10\tN\t1\tA\tI\n"
"chr1\t10\tN\t4\tAaTt\tIABC\n"
"chr1\t10\tN\t4\t^]A+3TTTa-2nn$Tt\tIABC\n"))
(testing "with-ref"
(are [?input]
(= ?input
(with-before-after {:before (prepare-cache!)
:after (clean-cache!)}
(let [tmp (cio/file temp-dir "pileup-regression.fa")
idx (cio/file temp-dir "pileup-regression.fai")]
(with-open [w (cseq/writer (.getCanonicalPath tmp))]
(cseq/write-sequences w [{:name "chr1" :sequence "NNNNNNNNNATGC"}]))
(fai/create-index tmp idx)
(with-open [r (plpio/reader (StringReader. ?input))
sw (StringWriter.)
w (plpio/writer sw tmp)]
(plpio/write-piles w (plpio/read-piles r))
(str sw)))))
"chr1\t10\tA\t1\t.\tI\n"
"chr1\t10\tA\t4\t.,Tt\tIABC\n"
"chr1\t10\tA\t4\t^].+3TTT,-2tg$Tt\tIABC\n")))
(deftest source-type-test
(testing "reader"
(with-open [server (http-server)]
(are [x] (= the-last-pile
(with-open [r (plpio/reader x)]
(rec->map (last (plpio/read-piles r)))))
test-pileup-file
(cio/file test-pileup-file)
(cio/as-url (cio/file test-pileup-file))
(cio/as-url (str (:uri server) "/pileup/test.pileup")))))
(testing "writer"
(let [tmp-pileup-file (cio/file temp-dir "pileup-source-type-writer.mpileup")]
(are [x] (with-before-after {:before (prepare-cache!)
:after (clean-cache!)}
(with-open [w (plpio/writer x)]
(not-throw? (plpio/write-piles w [the-last-pile]))))
(.getCanonicalPath tmp-pileup-file)
tmp-pileup-file
(cio/as-url tmp-pileup-file)))))
|
ff11127cf853569b768dae26cbbd15981d360d8d6507ab54780d95f97c87865d | srdqty/talc-3.0 | render.mli |
val apply : (Program.v -> Program.v list -> Program.v list) ref
val inline_closure : (Program.v -> Program.v) ref
val optimize_scene : bool ref
val f :
amb:(float * float * float) -> lights: Program.v array ->
obj:Program.obj -> depth:int -> fov:float -> wid:int -> ht:int ->
file:string -> unit
| null | https://raw.githubusercontent.com/srdqty/talc-3.0/df83dd5ff0e2b189b13280ddae233d8277199350/apps/gml/plclub/src/render.mli | ocaml |
val apply : (Program.v -> Program.v list -> Program.v list) ref
val inline_closure : (Program.v -> Program.v) ref
val optimize_scene : bool ref
val f :
amb:(float * float * float) -> lights: Program.v array ->
obj:Program.obj -> depth:int -> fov:float -> wid:int -> ht:int ->
file:string -> unit
|
|
c2e55943faffd369466a222d69d1a58e6d9eeb98bdcc1e33057f2fd7ea67d002 | felixengelmann/act-r-sentence-parser-em | sp-lv05.lisp | -*- mode : LISP ; Syntax : COMMON - LISP ; Base : 10 -*-
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; ACT-R Sentence Parsing Model
;;;
Copyright ( C ) 2006 ,
;;;
Extended by 2012/2013 to work with the EMMA eye
;;; movement model in ACT-R 6
;;;
;;; Includes the ACT-R Sentence Parsing Model processing
German negative and positive polarity items as described in the
Cognitive Science article , Bruessow & Lewis ( 2007 ) .
;;;
The original English model is described in the Cognitive Science
article Lewis & Vasishth ( 2004 ) .
;;;
;;; ===================================================================
;;;
;;; The model is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or
;;; (at your option) any later version.
;;;
;;; The model is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with this program. If not, see </>.
;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Bugs :
;;;
;;; To do :
;;;
;;; ----- History -----
;;;
;;; : *
;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; WORKING DIRECTORY
( cwd " /Users / felix / Dropbox / Workspace / ACT - R_EMMA / PortedParser3/ " )
;;#+:acl(set-mac-default-directory #P"/Users/felix/Dropbox/Workspace/ACT-R_EMMA/PortedParserEMMA/")
( setf * default - pathname - defaults * # P"/Users / felix / Workspace / ACT - R - Parser / Projects / LewisVasishth2005/ " )
(setf *output-dir* "output")
(defparameter *read-corpus* NIL)
(defvar *lang*)
( setf * lang * ' spanish )
(setf *lang* 'english)
(defun load-sp-core NIL
(load "../sp/helper-functions")
(load "../sp/interface")
(load "../sp/interface-emma")
(load "../sp/experiment-control-original")
(load "../sp/experiment-control")
(load "../sp/support-lexicon")
(load "../sp/interface-parser")
)
(defun load-model-support-sp nil
(load "../sp/constants")
(load "../sp/support-productions")
(load "../sp/productions-control")
; (load "productions-control-adjusted")
(load "../sp/productions-parser")
( load " demo-productions.lisp " )
; (load "demo-productions-adjusted.lisp")
(load "../sp/chunks")
)
(defun load-sp nil
(load-sp-core)
(load "model")
(load-model-support-sp)
(load "sentences")
)
;; (clear-all):
there is no model defined , the time is set to 0.0 ,
;; the event queue is cleared, waiting events are removed and the event hooks are cleared.
(defun clear-sp nil
(clear-all)
(reset)
; (suppress-warnings
(load "sp-lv05")
);)
;; (reload)
;; Reload is essentially a shortcut for reloading a model file
;; that has been edited to incorporate those changes.
(defun reload-sp nil
; (suppress-warnings
(clear-all)
(load-sp)
);)
(defun rl NIL
(reload-sp))
;; (reset):
for the current meta - process the time is set to 0.0 ,
;; the event queue is cleared, all waiting events are removed
;; and then each of the currently defined models is reset.
(defun reset-sp nil
(reset)
(suppress-warnings
(load-model-support-sp)
))
(defun soft-reset-sp nil
; reset parsing module?
(reset-sp)
)
(load-sp-core)
(compile-file "../sp/helper-functions")
(compile-file "../sp/interface")
(compile-file "../sp/interface-emma")
(compile-file "../sp/experiment-control-original")
(compile-file "../sp/experiment-control")
(compile-file "../sp/support-lexicon")
; (compile-file "../sp/interface-parser")
(clear-all)
(load-sp)
; (reload-sp)
| null | https://raw.githubusercontent.com/felixengelmann/act-r-sentence-parser-em/0d063c8772a367ac57e7fc9f2e1611b2956734df/LewisVasishth2005/sp-lv05.lisp | lisp | Syntax : COMMON - LISP ; Base : 10 -*-
ACT-R Sentence Parsing Model
movement model in ACT-R 6
Includes the ACT-R Sentence Parsing Model processing
===================================================================
The model is free software; you can redistribute it and/or modify
either version 3 of the License , or
(at your option) any later version.
The model is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
Bugs :
To do :
----- History -----
: *
WORKING DIRECTORY
#+:acl(set-mac-default-directory #P"/Users/felix/Dropbox/Workspace/ACT-R_EMMA/PortedParserEMMA/")
(load "productions-control-adjusted")
(load "demo-productions-adjusted.lisp")
(clear-all):
the event queue is cleared, waiting events are removed and the event hooks are cleared.
(suppress-warnings
)
(reload)
Reload is essentially a shortcut for reloading a model file
that has been edited to incorporate those changes.
(suppress-warnings
)
(reset):
the event queue is cleared, all waiting events are removed
and then each of the currently defined models is reset.
reset parsing module?
(compile-file "../sp/interface-parser")
(reload-sp) | Copyright ( C ) 2006 ,
Extended by 2012/2013 to work with the EMMA eye
German negative and positive polarity items as described in the
Cognitive Science article , Bruessow & Lewis ( 2007 ) .
The original English model is described in the Cognitive Science
article Lewis & Vasishth ( 2004 ) .
it under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
( cwd " /Users / felix / Dropbox / Workspace / ACT - R_EMMA / PortedParser3/ " )
( setf * default - pathname - defaults * # P"/Users / felix / Workspace / ACT - R - Parser / Projects / LewisVasishth2005/ " )
(setf *output-dir* "output")
(defparameter *read-corpus* NIL)
(defvar *lang*)
( setf * lang * ' spanish )
(setf *lang* 'english)
(defun load-sp-core NIL
(load "../sp/helper-functions")
(load "../sp/interface")
(load "../sp/interface-emma")
(load "../sp/experiment-control-original")
(load "../sp/experiment-control")
(load "../sp/support-lexicon")
(load "../sp/interface-parser")
)
(defun load-model-support-sp nil
(load "../sp/constants")
(load "../sp/support-productions")
(load "../sp/productions-control")
(load "../sp/productions-parser")
( load " demo-productions.lisp " )
(load "../sp/chunks")
)
(defun load-sp nil
(load-sp-core)
(load "model")
(load-model-support-sp)
(load "sentences")
)
there is no model defined , the time is set to 0.0 ,
(defun clear-sp nil
(clear-all)
(reset)
(load "sp-lv05")
(defun reload-sp nil
(clear-all)
(load-sp)
(defun rl NIL
(reload-sp))
for the current meta - process the time is set to 0.0 ,
(defun reset-sp nil
(reset)
(suppress-warnings
(load-model-support-sp)
))
(defun soft-reset-sp nil
(reset-sp)
)
(load-sp-core)
(compile-file "../sp/helper-functions")
(compile-file "../sp/interface")
(compile-file "../sp/interface-emma")
(compile-file "../sp/experiment-control-original")
(compile-file "../sp/experiment-control")
(compile-file "../sp/support-lexicon")
(clear-all)
(load-sp)
|
dfc720847430a1fde8ad14aa13b9c23bbd8a35f9a5c06af80f8d665c13a13baf | everpeace/programming-erlang-code | a.erl | -module(a).
-compile(export_all).
start(Tag) ->
spawn(fun() -> loop(Tag) end).
loop(Tag) ->
sleep(),
Val = b:x(),
io:format("Vsn1 (~p) b:x() = ~p~n",[Tag, Val]),
loop(Tag).
sleep() ->
receive
after 3000 -> true
end.
| null | https://raw.githubusercontent.com/everpeace/programming-erlang-code/8ef31aa13d15b41754dda225c50284915c29cb48/code/a.erl | erlang | -module(a).
-compile(export_all).
start(Tag) ->
spawn(fun() -> loop(Tag) end).
loop(Tag) ->
sleep(),
Val = b:x(),
io:format("Vsn1 (~p) b:x() = ~p~n",[Tag, Val]),
loop(Tag).
sleep() ->
receive
after 3000 -> true
end.
|
|
2d982b4f868e43818f6cb99ed0ff446d527fe7e1f7314a7adecc278f210c1814 | harpocrates/inline-rust | FunctionPointerTypes.hs | # LANGUAGE QuasiQuotes , TemplateHaskell #
module FunctionPointerTypes where
import Language.Rust.Inline
import Test.Hspec
import Foreign.Storable
import Foreign.Ptr
import Data.Word ( Word )
import Data.Char ( ord )
extendContext functions
extendContext basic
setCrateModule
[rust|
extern fn foo(x: isize) -> isize { 2*x + 3 }
extern fn bar(w: usize, c: char) -> isize { w as isize + c as isize }
|]
funcPointerTypes :: Spec
funcPointerTypes = describe "Function pointer types" $ do
it "Can marshal a `FunPtr (Int -> Int)` argument" $ do
let foo x = 2*x + 3
x <- $(withFunPtr [t| Int -> Int |]) foo $ \fooPtr ->
[rustIO| isize {
let foo = $( fooPtr: extern "C" fn(isize) -> isize );
foo(4) + foo(9)
} |]
let x' = foo 4 + foo 9
x `shouldBe` x'
it "Can marshal a `FunPtr (Int -> Int)` return" $ do
let fooPtr = [rust| extern fn(isize) -> isize { foo } |]
let foo = $(unFunPtr [t| Int -> Int |]) fooPtr
[rust| isize { foo(4) } |] `shouldBe` foo 4
it "Can marshal a `FunPtr (Word -> Char -> Int)` argument" $ do
let bar :: Word -> Char -> Int
bar w c = fromIntegral w + 8 * ord c
x <- $(withFunPtr [t| Word -> Char -> Int |]) bar $ \barPtr ->
[rustIO| isize {
let bar = $( barPtr: extern "C" fn(usize,char) -> isize );
bar(4,'a') + bar(9,'o')
} |]
let x' = bar 4 'a' + bar 9 'o'
x `shouldBe` x'
it "Can marshal a `FunPtr (Word -> Char -> Int)` return" $ do
let barPtr = [rust| extern fn(usize,char) -> isize { bar } |]
let bar = $(unFunPtr [t| Word -> Char -> Int |]) barPtr
[rust| isize { bar(4, 'a') } |] `shouldBe` bar 4 'a'
| null | https://raw.githubusercontent.com/harpocrates/inline-rust/5ecff8c92526000e5fc358a2dfede9b60ef59a1a/tests/FunctionPointerTypes.hs | haskell | # LANGUAGE QuasiQuotes , TemplateHaskell #
module FunctionPointerTypes where
import Language.Rust.Inline
import Test.Hspec
import Foreign.Storable
import Foreign.Ptr
import Data.Word ( Word )
import Data.Char ( ord )
extendContext functions
extendContext basic
setCrateModule
[rust|
extern fn foo(x: isize) -> isize { 2*x + 3 }
extern fn bar(w: usize, c: char) -> isize { w as isize + c as isize }
|]
funcPointerTypes :: Spec
funcPointerTypes = describe "Function pointer types" $ do
it "Can marshal a `FunPtr (Int -> Int)` argument" $ do
let foo x = 2*x + 3
x <- $(withFunPtr [t| Int -> Int |]) foo $ \fooPtr ->
[rustIO| isize {
let foo = $( fooPtr: extern "C" fn(isize) -> isize );
foo(4) + foo(9)
} |]
let x' = foo 4 + foo 9
x `shouldBe` x'
it "Can marshal a `FunPtr (Int -> Int)` return" $ do
let fooPtr = [rust| extern fn(isize) -> isize { foo } |]
let foo = $(unFunPtr [t| Int -> Int |]) fooPtr
[rust| isize { foo(4) } |] `shouldBe` foo 4
it "Can marshal a `FunPtr (Word -> Char -> Int)` argument" $ do
let bar :: Word -> Char -> Int
bar w c = fromIntegral w + 8 * ord c
x <- $(withFunPtr [t| Word -> Char -> Int |]) bar $ \barPtr ->
[rustIO| isize {
let bar = $( barPtr: extern "C" fn(usize,char) -> isize );
bar(4,'a') + bar(9,'o')
} |]
let x' = bar 4 'a' + bar 9 'o'
x `shouldBe` x'
it "Can marshal a `FunPtr (Word -> Char -> Int)` return" $ do
let barPtr = [rust| extern fn(usize,char) -> isize { bar } |]
let bar = $(unFunPtr [t| Word -> Char -> Int |]) barPtr
[rust| isize { bar(4, 'a') } |] `shouldBe` bar 4 'a'
|
|
e0827878ed21575816ed6b4670de093ea3beac46db1d3f3cc9982af05a1774da | rlepigre/ocaml-earley | option.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
(* The OCaml programmers *)
(* *)
Copyright 2018 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
* Option values .
Option values explicitly indicate the presence or absence of a value .
@since 4.08
Option values explicitly indicate the presence or absence of a value.
@since 4.08 *)
* { 1 : options Options }
type 'a t = 'a option = None | Some of 'a (**)
(** The type for option values. Either [None] or a value [Some v]. *)
val none : 'a option
(** [none] is [None]. *)
val some : 'a -> 'a option
(** [some v] is [Some v]. *)
val value : 'a option -> default:'a -> 'a
(** [value o ~default] is [v] if [o] is [Some v] and [default] otherwise. *)
val get : 'a option -> 'a
(** [get o] is [v] if [o] is [Some v] and @raise Invalid_argument otherwise. *)
val bind : 'a option -> ('a -> 'b option) -> 'b option
(** [bind o f] is [f v] if [o] is [Some v] and [None] if [o] is [None]. *)
val join : 'a option option -> 'a option
(** [join oo] is [Some v] if [oo] is [Some (Some v)] and [None] otherwise. *)
val map : ('a -> 'b) -> 'a option -> 'b option
(** [map f o] is [None] if [o] is [None] and [Some (f v)] is [o] is [Some v]. *)
val fold : none:'a -> some:('b -> 'a) -> 'b option -> 'a
(** [fold ~none ~some o] is [none] if [o] is [None] and [some v] if [o] is
[Some v]. *)
val iter : ('a -> unit) -> 'a option -> unit
(** [iter f o] is [f v] if [o] is [Some v] and [()] otherwise. *)
* { 1 : Predicates and comparisons }
val is_none : 'a option -> bool
(** [is_none o] is [true] iff [o] is [None]. *)
val is_some : 'a option -> bool
(** [is_some o] is [true] iff [o] is [Some o]. *)
val equal : ('a -> 'a -> bool) -> 'a option -> 'a option -> bool
(** [equal eq o0 o1] is [true] iff [o0] and [o1] are both [None] or if
they are [Some v0] and [Some v1] and [eq v0 v1] is [true]. *)
val compare : ('a -> 'a -> int) -> 'a option -> 'a option -> int
(** [compare cmp o0 o1] is a total order on options using [cmp] to compare
values wrapped by [Some _]. [None] is smaller than [Some _] values. *)
* { 1 : convert Converting }
val to_result : none:'e -> 'a option -> ('a, 'e) result
* [ ~none o ] is [ Ok v ] if [ o ] is [ Some v ] and [ Error none ]
otherwise .
otherwise. *)
val to_list : 'a option -> 'a list
(** [to_list o] is [[]] if [o] is [None] and [[v]] if [o] is [Some v]. *)
val to_seq : 'a option -> 'a Seq.t
(** [to_seq o] is [o] as a sequence. [None] is the empty sequence and
[Some v] is the singleton sequence containing [v]. *)
| null | https://raw.githubusercontent.com/rlepigre/ocaml-earley/66d7acb2b22edae0c2f7d64ef2a50fa70d1ed112/ocaml_ast/src/option.mli | ocaml | ************************************************************************
OCaml
The OCaml programmers
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* The type for option values. Either [None] or a value [Some v].
* [none] is [None].
* [some v] is [Some v].
* [value o ~default] is [v] if [o] is [Some v] and [default] otherwise.
* [get o] is [v] if [o] is [Some v] and @raise Invalid_argument otherwise.
* [bind o f] is [f v] if [o] is [Some v] and [None] if [o] is [None].
* [join oo] is [Some v] if [oo] is [Some (Some v)] and [None] otherwise.
* [map f o] is [None] if [o] is [None] and [Some (f v)] is [o] is [Some v].
* [fold ~none ~some o] is [none] if [o] is [None] and [some v] if [o] is
[Some v].
* [iter f o] is [f v] if [o] is [Some v] and [()] otherwise.
* [is_none o] is [true] iff [o] is [None].
* [is_some o] is [true] iff [o] is [Some o].
* [equal eq o0 o1] is [true] iff [o0] and [o1] are both [None] or if
they are [Some v0] and [Some v1] and [eq v0 v1] is [true].
* [compare cmp o0 o1] is a total order on options using [cmp] to compare
values wrapped by [Some _]. [None] is smaller than [Some _] values.
* [to_list o] is [[]] if [o] is [None] and [[v]] if [o] is [Some v].
* [to_seq o] is [o] as a sequence. [None] is the empty sequence and
[Some v] is the singleton sequence containing [v]. | Copyright 2018 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* Option values .
Option values explicitly indicate the presence or absence of a value .
@since 4.08
Option values explicitly indicate the presence or absence of a value.
@since 4.08 *)
* { 1 : options Options }
val none : 'a option
val some : 'a -> 'a option
val value : 'a option -> default:'a -> 'a
val get : 'a option -> 'a
val bind : 'a option -> ('a -> 'b option) -> 'b option
val join : 'a option option -> 'a option
val map : ('a -> 'b) -> 'a option -> 'b option
val fold : none:'a -> some:('b -> 'a) -> 'b option -> 'a
val iter : ('a -> unit) -> 'a option -> unit
* { 1 : Predicates and comparisons }
val is_none : 'a option -> bool
val is_some : 'a option -> bool
val equal : ('a -> 'a -> bool) -> 'a option -> 'a option -> bool
val compare : ('a -> 'a -> int) -> 'a option -> 'a option -> int
* { 1 : convert Converting }
val to_result : none:'e -> 'a option -> ('a, 'e) result
* [ ~none o ] is [ Ok v ] if [ o ] is [ Some v ] and [ Error none ]
otherwise .
otherwise. *)
val to_list : 'a option -> 'a list
val to_seq : 'a option -> 'a Seq.t
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.