code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
module System.Warp.Match where
import System.Warp.Types
import System.Process
import Data.List(intersperse, isInfixOf, find, null)
import qualified Data.Map as M
runMatch :: String -> Facts -> Matcher -> Bool
runMatch _ _ MatchAll = True
runMatch _ _ MatchNone = True
runMatch host _ (MatchHost candidate) = (host == candidate)
runMatch _ facts (MatchFact key val) = case M.lookup key facts of
Nothing -> False
Just myfact -> (myfact == val)
runMatch host facts (MatchNot matcher) = not $ runMatch host facts matcher
runMatch host facts (MatchOr matchers) =
case find (runMatch host facts) matchers of
Nothing -> False
Just _ -> True
runMatch host facts (MatchAnd matchers) =
null $ [ x | x <- matchers, not $ runMatch host facts x]
|
pyr/warp-agent
|
System/Warp/Match.hs
|
isc
| 792 | 0 | 9 | 148 | 292 | 153 | 139 | 20 | 3 |
module Data.Graphics.SVG where
-- a solution to the SVG-problem posted at http://lpaste.net/4776707050010836992
import Control.Logic.Frege (adjoin) -- http://lpaste.net/111101
import Data.Graphics.BoundingBox -- http://lpaste.net/2865245555871711232
import Data.XHTML -- http://lpaste.net/113385
svgDoctype :: String
svgDoctype = "<!DOCTYPE svg PUBLIC '-//W3C//DTD SVG 1.1//EN' " ++
"'http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd'>"
-- Now it gets fun. We declare our svg-playing field, then we play in that
-- field with whatever shape we're creating
svgField :: Int -> Int -> [Element] -> Element
svgField width height =
svgFromBB (makeBoundingBox (1, 10) (0,0)
(adjoin ((100.0 *) . fromIntegral) (width, height)))
svgFromBB :: BoundingBox -> [Element] -> Element
svgFromBB bb =
let bounds (Bounder (BBinternal (BB _ org end) _ _)) = (org, end)
((startx, starty), (width, height)) = bounds bb
range = unwords (map (show . floor) [startx, starty, width, height]) in
Elt "svg" [Attrib "viewbox" range,
Attrib "xmlns" "http://www.w3.org/2000/svg",
Attrib "version" "1.1"]
. (E (Elt "desc" [] [S "Playin' in da SVG-hood!"]):)
. map E
ell :: Int -> Int -> String -> [Attribute] -> Element
ell xAxis yAxis colour attrs =
Elt "ellipse"
([attr "rx" xAxis, attr "ry" yAxis, Attrib "fill" colour] ++ attrs) []
attr :: Show a => String -> a -> Attribute
attr nm = Attrib nm . show
attr_ :: (Show a, RealFrac a) => String -> a -> Attribute
attr_ nm = attr nm . floor
circ :: Int -> Int -> Int -> String -> [Attribute] -> Element
circ cx cy rad colour attrs =
Elt "circle"
(attr "cx" cx:attr "cy" cy:attr "r" rad:Attrib "fill" colour:attrs)
[]
{-- Here's the example from w3:
<text x="250" y="150"
font-family="Verdana" font-size="55" fill="blue" >
Hello, out there
</text>
So you can add the font and color attributes if you'd like
--}
text, txt :: Point2D -> String -> [Attribute] -> Element
text (x,y) txt attrs =
Elt "text" (attr "x" (floor x):attr "y" (floor y):attrs) [S txt]
font :: [Attribute]
font = [attr "font-size" 14, Attrib "font-family" "Verdana"]
txt (x,y) textme = text (x,y) textme . ((Attrib "fill" "black":font) ++)
line :: Int -> Int -> Int -> Int -> Int -> String -> Element
line x1 y1 x2 y2 sw colour =
Elt "line" (Attrib "stroke" colour:map (uncurry attr)
[("x1",x1),("y1",y1),("x2",x2),("y2",y2),("stroke-width",sw)]) []
rect :: Int -> Int -> Int -> Int -> String -> [Attribute] -> Element
rect orgx orgy width height colour attrs =
Elt "rect" ((Attrib "fill" colour:
map (uncurry attr) [("x", orgx), ("y", orgy),
("width", width), ("height", height)]) ++ attrs) []
-- now if you want to show the oval somewhere other than origin:
trans :: Int -> Int -> [Element] -> Element
trans x y = Elt "g" [Attrib "transform" ("translate(" ++ show x ++
(' ' : show y) ++ ")")] . map E
-- AND THEN show off your ellipse ... or ellipses! ... to the world!
ellipses :: FilePath -> IO ()
ellipses file =
writeFile file $ unlines [ {-- pi, --} svgDoctype,
show $ svgField 20 10 -- in cm
[trans 150 150 [ell 100 100 "blue" []], -- a blue circle
trans 450 100 [ell 50 75 "none" [Attrib "stroke" "green",
attr "stroke-width" 22]],
-- a green thingie
trans 350 270 [ell 100 50 "red" []]]] -- a red oval
-- *Data.SVG> ellipses "scircs.svg" ~> a file with shapes in it
|
geophf/1HaskellADay
|
exercises/HAD/Data/Graphics/SVG.hs
|
mit
| 3,675 | 0 | 15 | 951 | 1,207 | 646 | 561 | 60 | 1 |
-- Problems/Problem001Spec.hs
module Problems.Problem001Spec (main, spec) where
import Test.Hspec
import Problems.Problem001
main :: IO()
main = hspec spec
spec :: Spec
spec = describe "Problem 1" $
it "Should evaluate to 233168" $
p1 `shouldBe` 233168
|
Sgoettschkes/learning
|
haskell/ProjectEuler/tests/Problems/Problem001Spec.hs
|
mit
| 269 | 0 | 8 | 52 | 73 | 41 | 32 | 9 | 1 |
{-# htermination shows :: Show a => [a] -> String -> String #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_shows_4.hs
|
mit
| 64 | 0 | 2 | 13 | 3 | 2 | 1 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Applicative
import Snap
import Snap.Core
import Snap.Util.FileServe
import Snap.Http.Server
import Snap.Snaplet.STL.View
main :: IO ()
main = serveSnaplet defaultConfig stlAppInit
|
yogeshsajanikar/stl
|
snaplet-stl/stl-view/Main.hs
|
mit
| 311 | 0 | 6 | 91 | 57 | 35 | 22 | 10 | 1 |
import ClassyPrelude hiding (first)
import Prelude (read)
import System.IO.Unsafe
import Data.Char
import N
import Helpers
data T = T Text [T]
file :: Text
file = unsafePerformIO $ readFile "/home/yom/db.txt"
doIt t = do
rt ← use root
note ← newNote [] "blah"
addSub note rt
addInputLines ["other text"]
execute "ed blah"
where
tt = map parseLine $ lines t
parseLine :: Text → (Int, Text)
parseLine s =
let (sp, s2) = span isSpace s
(n, s3) = span isDigit s2
in (length sp `div` 3, drop 2 s3)
|
aelve/Jane
|
Import.hs
|
mit
| 555 | 0 | 11 | 146 | 221 | 114 | 107 | -1 | -1 |
{- |
module : Database.MySQLX.Model
description : crud interface
copyright : (c) naoto ogawa, 2017
license : MIT
maintainer :
stability : experimental
portability :
Interface for X Protocol Protocol Buffer.
-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RecordWildCards #-}
module DataBase.MySQLX.Model where
import Prelude as P
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Any.Type as PAT
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Any as PA
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Array as PAR
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ArrayAny as PARAny
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.AuthenticateContinue as PAC
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.AuthenticateOk as PAO
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.AuthenticateStart as PAS
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Capabilities as PCs
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.CapabilitiesGet as PCG
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.CapabilitiesSet as PCS
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Capability as PC
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ClientMessages.Type as PCMT
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ClientMessages as PCM
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Close as PC
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Collection as PCll
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Column as PCol
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ColumnIdentifier as PCI
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ColumnMetaData.FieldType as PCMDFT
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ColumnMetaData as PCMD
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.CreateView as PCV
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.DataModel as PDM
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Delete as PD
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.DocumentPathItem.Type as PDPIT
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.DocumentPathItem as PDPI
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.DropView as PDV
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Error.Severity as PES
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Error as PE
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Expr.Type as PET
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Expr as PEx
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.FetchDone as PFD
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.FetchDoneMoreOutParams as PFDMOP
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.FetchDoneMoreResultsets as PFDMR
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Find as PF
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Frame.Scope as PFS
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Frame as PFr
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.FunctionCall as PFC
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Identifier as PI
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Insert.TypedRow as PITR
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Insert as PI
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Limit as PL
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ModifyView as PMV
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Object.ObjectField as POF
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Object as PO
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ObjectAny.ObjectFieldAny as POFAny
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ObjectAny as POAny
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Ok as POk
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Open.Condition.ConditionOperation as POCCO
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Open.Condition as POC
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Open.CtxOperation as POCtx
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Open as POp
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Operator as POpe
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Order.Direction as POD
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Order as PO
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Projection as PP
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Reset as PRe
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Row as PR
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Scalar.Octets as PSO
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Scalar.String as PSS
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Scalar.Type as PST
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Scalar as PS
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ServerMessages.Type as PSMT
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ServerMessages as PSM
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.SessionStateChanged.Parameter as PSSCP
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.SessionStateChanged as PSSC
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.SessionVariableChanged as PSVC
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.StmtExecute as PSE
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.StmtExecuteOk as PSEO
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Update as PU
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.UpdateOperation.UpdateType as PUOUT
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.UpdateOperation as PUO
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ViewAlgorithm as PVA
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ViewCheckOption as PVCO
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.ViewSqlSecurity as PVSS
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Warning.Level as PWL
import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Warning as PW
import qualified Com.Mysql.Cj.Mysqlx.Protobuf as P'
-- protocol buffer library
import qualified Text.ProtocolBuffers as PB
import qualified Text.ProtocolBuffers.Basic as PBB
import qualified Text.ProtocolBuffers.Header as PBH
import qualified Text.ProtocolBuffers.TextMessage as PBT
import qualified Text.ProtocolBuffers.WireMessage as PBW
import qualified Text.ProtocolBuffers.Reflections as PBR
-- general, standard library
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Data.ByteString.Builder
import Data.ByteString.Conversion.To
import qualified Data.Foldable as F
import Data.Int as I
import qualified Data.List as L (find, intercalate)
import Data.Maybe as M
import Data.Sequence as Seq
import Data.String
import Data.Text as T
import Data.Text.Encoding as TE
import Data.Typeable (TypeRep, Typeable, typeRep, typeOf)
import Data.Word as W
import Data.Monoid
import Control.Exception.Safe (Exception, MonadThrow, SomeException, throwM)
import Control.Monad
import Control.Monad.Trans.Reader
import Control.Monad.IO.Class
-- my library
import DataBase.MySQLX.Util
import DataBase.MySQLX.Exception
getMessage :: (MonadThrow m, PBW.Wire a, PBR.ReflectDescriptor a, PBT.TextMsg a, Typeable a) => B.ByteString -> m a
getMessage bs = do
case PBW.messageGet (BL.fromStrict bs) of
Left e -> error e -- throwM $ MessageGetException "foo" (typeOf bs)
Right (w,_) -> return w
getMessageEither :: (PBW.Wire a, PBR.ReflectDescriptor a, PBT.TextMsg a, Typeable a)
=> B.ByteString
-> Either String a
getMessageEither bs = do
case PBW.messageGet (BL.fromStrict bs) of
Left e -> Left e
Right (w,_) -> Right w
--
-- ByteString -> m Model
--
-- mkAny.Type :: PAT .Any.Type
-- mkAny.Type = PB.defaultValue
getAny :: (MonadThrow m) => B.ByteString -> m PA.Any
getAny = getMessage
getArray :: (MonadThrow m) => B.ByteString -> m PAR.Array
getArray = getMessage
getArrayAny :: (MonadThrow m) => B.ByteString -> m PARAny.ArrayAny
getArrayAny = getMessage
getAuthenticateContinue :: (MonadThrow m) => B.ByteString -> m PAC.AuthenticateContinue
getAuthenticateContinue = getMessage
getAuthenticateOk :: (MonadThrow m) => B.ByteString -> m PAO.AuthenticateOk
getAuthenticateOk = getMessage
getAuthenticateStart :: (MonadThrow m) => B.ByteString -> m PAS.AuthenticateStart
getAuthenticateStart = getMessage
getCapabilities :: (MonadThrow m) => B.ByteString -> m PCs.Capabilities
getCapabilities = getMessage
getCapabilitiesGet :: (MonadThrow m) => B.ByteString -> m PCG.CapabilitiesGet
getCapabilitiesGet = getMessage
getCapabilitiesSet :: (MonadThrow m) => B.ByteString -> m PCS.CapabilitiesSet
getCapabilitiesSet = getMessage
getCapability :: (MonadThrow m) => B.ByteString -> m PC.Capability
getCapability = getMessage
-- mkClientMessages.Type :: (MonadThrow m) => B.ByteString -> m PCMT.ClientMessages.Type
-- mkClientMessages.Type = getMessage
getClientMessages :: (MonadThrow m) => B.ByteString -> m PCM.ClientMessages
getClientMessages = getMessage
getClose :: (MonadThrow m) => B.ByteString -> m PC.Close
getClose = getMessage
getCollection :: (MonadThrow m) => B.ByteString -> m PCll.Collection
getCollection = getMessage
getColumn :: (MonadThrow m) => B.ByteString -> m PCol.Column
getColumn = getMessage
getColumnIdentifier :: (MonadThrow m) => B.ByteString -> m PCI.ColumnIdentifier
getColumnIdentifier = getMessage
-- mkColumnMetaData.FieldType :: (MonadThrow m) => B.ByteString -> m PCMD.ColumnMetaData.FieldType
-- mkColumnMetaData.FieldType = getMessage
getColumnMetaData :: (MonadThrow m) => B.ByteString -> m PCMD.ColumnMetaData
getColumnMetaData = getMessage
getCreateView :: (MonadThrow m) => B.ByteString -> m PCV.CreateView
getCreateView = getMessage
-- getDataModel :: (MonadThrow m) => B.ByteString -> m PDM.DataModel
-- getDataModel = getMessage
getDelete :: (MonadThrow m) => B.ByteString -> m PD.Delete
getDelete = getMessage
-- mkDocumentPathItem.Type :: (MonadThrow m) => B.ByteString -> m PDPI.DocumentPathItem.Type
-- mkDocumentPathItem.Type = getMessage
getDocumentPathItem :: (MonadThrow m) => B.ByteString -> m PDPI.DocumentPathItem
getDocumentPathItem = getMessage
getDropView :: (MonadThrow m) => B.ByteString -> m PDV.DropView
getDropView = getMessage
-- mkError.Severity :: (MonadThrow m) => B.ByteString -> m PE.Error.Severity
-- mkError.Severity = getMessage
getError :: (MonadThrow m) => B.ByteString -> m PE.Error
getError = getMessage
-- mkExpr.Type :: PEx.Expr.Type
-- mkExpr.Type = getMessage
getExpr :: (MonadThrow m) => B.ByteString -> m PEx.Expr
getExpr = getMessage
getFetchDone :: (MonadThrow m) => B.ByteString -> m PFD.FetchDone
getFetchDone = getMessage
getFetchDoneMoreOutParams :: (MonadThrow m) => B.ByteString -> m PFDMOP.FetchDoneMoreOutParams
getFetchDoneMoreOutParams = getMessage
getFetchDoneMoreResultsets :: (MonadThrow m) => B.ByteString -> m PFDMR.FetchDoneMoreResultsets
getFetchDoneMoreResultsets = getMessage
getFind :: (MonadThrow m) => B.ByteString -> m PF.Find
getFind = getMessage
-- mkFrame.Scope :: (MonadThrow m) => B.ByteString -> m PFr.Frame.Scope
-- mkFrame.Scope = getMessage
{-
Frame Structure
type local global payload
1 * * Warning
2 * - SessionVariableChanged
3 * - SessionStateChanged
-}
getFrame :: (MonadThrow m) => B.ByteString -> m PFr.Frame
getFrame = getMessage
-- | Warning Frame Type
frame_warning :: W.Word32
frame_warning = 1
-- | Session Variable Chagend Frame Type
frame_session_variable_changed :: W.Word32
frame_session_variable_changed = 2
-- | Session State Chagend Frame Type
frame_session_state_changed :: W.Word32
frame_session_state_changed = 3
getFramePayload :: (MonadThrow m) => PFr.Frame -> m B.ByteString
getFramePayload frame = do
case PFr.payload frame of
Nothing -> throwM $ XProtocolException "tPayload is Nothing"
Just p -> return $ BL.toStrict p
getPayloadWarning :: (MonadThrow m) => PFr.Frame -> m PW.Warning
getPayloadWarning x = getFramePayload x >>= getWarning
getPayloadSessionStateChanged :: (MonadThrow m) => PFr.Frame -> m PSSC.SessionStateChanged
getPayloadSessionStateChanged x = getFramePayload x >>= getSessionStateChanged
getPayloadSessionVariableChanged :: (MonadThrow m) => PFr.Frame -> m PSVC.SessionVariableChanged
getPayloadSessionVariableChanged x = getFramePayload x >>= getSessionVariableChanged
getFunctionCall :: (MonadThrow m) => B.ByteString -> m PFC.FunctionCall
getFunctionCall = getMessage
getIdentifier :: (MonadThrow m) => B.ByteString -> m PI.Identifier
getIdentifier = getMessage
getTypedRow :: (MonadThrow m) => B.ByteString -> m PITR.TypedRow
getTypedRow = getMessage
getInsert :: (MonadThrow m) => B.ByteString -> m PI.Insert
getInsert = getMessage
getLimit :: (MonadThrow m) => B.ByteString -> m PL.Limit
getLimit = getMessage
getModifyView :: (MonadThrow m) => B.ByteString -> m PMV.ModifyView
getModifyView = getMessage
getObjectField :: (MonadThrow m) => B.ByteString -> m POF.ObjectField
getObjectField = getMessage
getObject :: (MonadThrow m) => B.ByteString -> m PO.Object
getObject = getMessage
getOk :: (MonadThrow m) => B.ByteString -> m POk.Ok
getOk = getMessage
-- mkOpen.Condition.ConditionOperation :: (MonadThrow m) => B.ByteString -> m POC.Open.Condition.ConditionOperation
-- mkOpen.Condition.ConditionOperation = getMessage
getCondition :: (MonadThrow m) => B.ByteString -> m POC.Condition
getCondition = getMessage
-- mkOpen.CtxOperation :: (MonadThrow m) => B.ByteString -> m POe.Open.CtxOperation
-- mkOpen.CtxOperation = getMessage
getOpen :: (MonadThrow m) => B.ByteString -> m POp.Open
getOpen = getMessage
getOperator :: (MonadThrow m) => B.ByteString -> m POpe.Operator
getOperator = getMessage
-- mkOrder.Direction :: (MonadThrow m) => B.ByteString -> m POD.Order.Direction
-- mkOrder.Direction = getMessage
getOrder :: (MonadThrow m) => B.ByteString -> m PO.Order
getOrder = getMessage
getProjection :: (MonadThrow m) => B.ByteString -> m PP.Projection
getProjection = getMessage
getReset :: (MonadThrow m) => B.ByteString -> m PRe.Reset
getReset = getMessage
getRow :: (MonadThrow m) => B.ByteString -> m PR.Row
getRow = getMessage
getScalarOctets :: (MonadThrow m) => B.ByteString -> m PSO.Octets
getScalarOctets = getMessage
getScalarString :: (MonadThrow m) => B.ByteString -> m PSS.String
getScalarString = getMessage
-- mkScalar.Type :: (MonadThrow m) => B.ByteString -> m PST.Scalar.Type
-- mkScalar.Type = getMessage
getScalar :: (MonadThrow m) => B.ByteString -> m PS.Scalar
getScalar = getMessage
-- mkServerMessages.Type :: (MonadThrow m) => B.ByteString -> m PSMT.ServerMessages.Type
-- mkServerMessages.Type = getMessage
getServerMessages :: (MonadThrow m) => B.ByteString -> m PSM.ServerMessages
getServerMessages = getMessage
-- mkSessionStateChanged.Parameter :: (MonadThrow m) => B.ByteString -> m PSSCP.SessionStateChanged.Parameter
-- mkSessionStateChanged.Parameter = getMessage
getSessionStateChanged :: (MonadThrow m) => B.ByteString -> m PSSC.SessionStateChanged
getSessionStateChanged = getMessage
getSessionVariableChanged :: (MonadThrow m) => B.ByteString -> m PSVC.SessionVariableChanged
getSessionVariableChanged = getMessage
getStmtExecute :: (MonadThrow m) => B.ByteString -> m PSE.StmtExecute
getStmtExecute = getMessage
getStmtExecuteOk :: (MonadThrow m) => B.ByteString -> m PSEO.StmtExecuteOk
getStmtExecuteOk = getMessage
getUpdate :: (MonadThrow m) => B.ByteString -> m PU.Update
getUpdate = getMessage
-- mkUpdateOperation.UpdateType :: (MonadThrow m) => B.ByteString -> m PUOUT.UpdateOperation.UpdateType
-- mkUpdateOperation.UpdateType = getMessage
getUpdateOperation :: (MonadThrow m) => B.ByteString -> m PUO.UpdateOperation
getUpdateOperation = getMessage
-- getViewAlgorithm :: (MonadThrow m) => B.ByteString -> m PVA.ViewAlgorithm
-- getViewAlgorithm = getMessage
-- getViewCheckOption :: (MonadThrow m) => B.ByteString -> m PVCO.ViewCheckOption
-- getViewCheckOption = getMessage
-- getViewSqlSecurity :: (MonadThrow m) => B.ByteString -> m PVSS.ViewSqlSecurity
-- getViewSqlSecurity = getMessage
-- mkWarning.Level :: (MonadThrow m) => B.ByteString -> m PWL.Warning.Level
-- mkWarning.Level = getMessage
{-
data Warning = Warning{level :: !(P'.Maybe Com.Mysql.Cj.Mysqlx.Protobuf.Warning.Level), code :: !(P'.Word32), msg :: !(P'.Utf8)}
data Level = NOTE | WARNING | ERROR
-}
-- | Make a Warning instance from ByteString.
getWarning :: (MonadThrow m) => B.ByteString -> m PW.Warning
getWarning = getMessage
--
-- Various data -> m Model
--
mkAnyType :: PAT.Type
mkAnyType = PB.defaultValue
{-
Any :: data Type = SCALAR | OBJECT | ARRAY
-}
-- mkAny :: PA.Any
-- mkAny = PB.defaultValue
-- | Make a SCALAR type Any. Don't use this function, use any. (TODO hiding)
mkAnyScalar :: PS.Scalar -> PA.Any
mkAnyScalar x = PB.defaultValue {PA.type' = PAT.SCALAR, PA.scalar = Just x}
-- | Make an OBJECT type Any. Don't use this function, use any. (TODO hiding)
mkAnyObjectAny :: POAny.ObjectAny -> PA.Any
mkAnyObjectAny x = PB.defaultValue {PA.type' = PAT.OBJECT, PA.obj = Just x}
-- | Make a ARRAY type Any. Don't use this function, use any. (TODO hiding)
mkAnyArrayAny :: PARAny.ArrayAny -> PA.Any
mkAnyArrayAny x = PB.defaultValue {PA.type' = PAT.ARRAY, PA.array = Just x}
{-
data Any = Any{
type' :: !(Com.Mysql.Cj.Mysqlx.Protobuf.Any.Type)
, scalar :: !(P'.Maybe Com.Mysql.Cj.Mysqlx.Protobuf.Scalar)
, obj :: !(P'.Maybe Com.Mysql.Cj.Mysqlx.Protobuf.Object)
, array :: !(P'.Maybe Com.Mysql.Cj.Mysqlx.Protobuf.Array)
}
Int, Int64, Word8, Word64, Double, Float, Bool, String, Text, Object, Array,
-}
-- | Make an Any instance.
class Anyable a where
-- | Make an Any instance.
any :: a -> PA.Any
-- | Make a list of Any instance.
anys :: [a] -> [PA.Any]
anys = P.map DataBase.MySQLX.Model.any
instance Anyable Int where any = mkAnyScalar . scalar
instance Anyable Int64 where any = mkAnyScalar . scalar
instance Anyable Word8 where any = mkAnyScalar . scalar
instance Anyable Word64 where any = mkAnyScalar . scalar
instance Anyable Double where any = mkAnyScalar . scalar
instance Anyable Float where any = mkAnyScalar . scalar
instance Anyable Bool where any = mkAnyScalar . scalar
instance Anyable String where any = mkAnyScalar . scalar
instance Anyable Text where any = mkAnyScalar . scalar
instance Anyable PS.Scalar where any = mkAnyScalar
instance Anyable POAny.ObjectAny where any = mkAnyObjectAny
instance Anyable PARAny.ArrayAny where any = mkAnyArrayAny
-- | Make a ArrayAny instance.
mkArrayAny :: [PA.Any] -> PARAny.ArrayAny
mkArrayAny xs = PARAny.ArrayAny {value = Seq.fromList xs}
-- | Make a Array instance.
mkArray :: [PEx.Expr] -> PAR.Array
mkArray xs = PAR.Array {value = Seq.fromList xs}
-- | Make an authenticate continue instance.
mkAuthenticateContinue :: (ToByteString a, ToByteString b, ToByteString c, ToByteString d)
=> a -- ^ Database name
-> b -- ^ User name
-> c -- ^ Salt, which is given by MySQL Server.
-> d -- ^ Password
-> PAC.AuthenticateContinue
mkAuthenticateContinue dbname username salt pw = PB.defaultValue {
PAC.auth_data = toLazyByteString $
builder dbname
<> builder ("\x00" :: String)
<> builder username
<> builder ("\x00*" :: String)
<> builder (toHex' $ getPasswordHash salt pw)
}
mkAuthenticateOk :: (MonadThrow m) => B.ByteString -> m PAO.AuthenticateOk
mkAuthenticateOk = getMessage
-- | Make an authenticate start instance.
mkAuthenticateStart :: (ToByteString a)
=> a -- ^ User name
-> PAS.AuthenticateStart
mkAuthenticateStart user = PB.defaultValue {
PAS.mech_name = PBH.uFromString "MYSQL41"
, PAS.auth_data = Just $ toByteString user
}
mkCapabilities :: [PC.Capability] -> PCs.Capabilities
mkCapabilities xs = PCs.Capabilities (Seq.fromList xs)
mkCapabilitiesGet :: PCG.CapabilitiesGet
mkCapabilitiesGet = PB.defaultValue
mkCapabilitiesSet :: PCS.CapabilitiesSet
mkCapabilitiesSet = PB.defaultValue
-- data Capability = Capability{name :: !(P'.Utf8), value :: !(Com.Mysql.Cj.Mysqlx.Protobuf.Any)}
-- mkCapability :: PC.Capability
-- mkCapability = PB.defaultValue
mkCapability val any = PC.Capability (PBH.uFromString val) any
mkClientMessagesType :: PCMT.Type
mkClientMessagesType = PB.defaultValue
mkClientMessages :: PCM.ClientMessages
mkClientMessages = PB.defaultValue
mkClose :: PC.Close
mkClose = PB.defaultValue
{- Collection -}
-- | Make a collection instance.
mkCollection ::
String -- ^ schema
-> String -- ^ collection name
-> PCll.Collection
mkCollection schema name = PCll.Collection (PBH.uFromString name) (Just $ PBH.uFromString schema)
-- | Make a collection instance without a schema name.
mkCollection' ::
String -- ^ collection name
-> PCll.Collection
mkCollection' name = PCll.Collection (PBH.uFromString name) Nothing
mkColumn :: PCol.Column
mkColumn = PB.defaultValue
{- ColumnIdentifier -}
mkColumnIdentifier :: PCI.ColumnIdentifier
mkColumnIdentifier = PB.defaultValue
columnIdentifierNameDocumentPahtItem :: String -> [PDPI.DocumentPathItem] -> PCI.ColumnIdentifier
columnIdentifierNameDocumentPahtItem nm docpathItems = addColumnIdentifierName (columnIdentifierDocumentPahtItem docpathItems) nm
columnIdentifierDocumentPahtItem :: [PDPI.DocumentPathItem] -> PCI.ColumnIdentifier
columnIdentifierDocumentPahtItem docpathItems = PB.defaultValue {PCI.document_path = Seq.fromList docpathItems}
columnIdentifierName :: String -> PCI.ColumnIdentifier
columnIdentifierName x = PB.defaultValue {PCI.name = Just $ PBH.uFromString x}
addColumnIdentifierName :: PCI.ColumnIdentifier -> String -> PCI.ColumnIdentifier
addColumnIdentifierName iden x = iden {PCI.name = Just $ PBH.uFromString x}
columnIdentifierTableName :: String -> PCI.ColumnIdentifier
columnIdentifierTableName x = PB.defaultValue {PCI.table_name = Just $ PBH.uFromString x}
addColumnIdentifierTableName :: PCI.ColumnIdentifier -> String -> PCI.ColumnIdentifier
addColumnIdentifierTableName iden x = iden {PCI.table_name = Just $ PBH.uFromString x}
columnIdentifierSchemaName :: String -> PCI.ColumnIdentifier
columnIdentifierSchemaName x = PB.defaultValue {PCI.schema_name = Just $ PBH.uFromString x}
addColumnIdentifierSchemaName :: PCI.ColumnIdentifier -> String -> PCI.ColumnIdentifier
addColumnIdentifierSchemaName iden x = iden {PCI.schema_name = Just $ PBH.uFromString x}
addColumnIdentifier' :: PCI.ColumnIdentifier -> String -> PCI.ColumnIdentifier
addColumnIdentifier' iden name = iden {
PCI.name = Just $ PBH.uFromString name
}
columnIdentifier'' :: String -> String -> PCI.ColumnIdentifier
columnIdentifier'' table name = PB.defaultValue {
PCI.table_name = Just $ PBH.uFromString table
,PCI.name = Just $ PBH.uFromString name
}
addColumnIdentifier'' :: PCI.ColumnIdentifier -> String -> String -> PCI.ColumnIdentifier
addColumnIdentifier'' iden table name = iden {
PCI.table_name = Just $ PBH.uFromString table
,PCI.name = Just $ PBH.uFromString name
}
columnIdentifier''' :: String -> String -> String -> PCI.ColumnIdentifier
columnIdentifier''' schema table name = PB.defaultValue {
PCI.schema_name = Just $ PBH.uFromString schema
,PCI.table_name = Just $ PBH.uFromString table
,PCI.name = Just $ PBH.uFromString name
}
addColumnIdentifier''' :: PCI.ColumnIdentifier -> String -> String -> String -> PCI.ColumnIdentifier
addColumnIdentifier''' iden schema table name = iden {
PCI.schema_name = Just $ PBH.uFromString schema
,PCI.table_name = Just $ PBH.uFromString table
,PCI.name = Just $ PBH.uFromString name
}
mkColumnMetaDataFieldType :: PCMDFT.FieldType
mkColumnMetaDataFieldType = PB.defaultValue
mkColumnMetaData :: PCMD.ColumnMetaData
mkColumnMetaData = PB.defaultValue
{-
data ColumnMetaData = ColumnMetaData{type' :: !(Com.Mysql.Cj.Mysqlx.Protobuf.ColumnMetaData.FieldType),
name :: !(P'.Maybe P'.ByteString), original_name :: !(P'.Maybe P'.ByteString),
table :: !(P'.Maybe P'.ByteString), original_table :: !(P'.Maybe P'.ByteString),
schema :: !(P'.Maybe P'.ByteString), catalog :: !(P'.Maybe P'.ByteString),
collation :: !(P'.Maybe P'.Word64), fractional_digits :: !(P'.Maybe P'.Word32),
length :: !(P'.Maybe P'.Word32), flags :: !(P'.Maybe P'.Word32),
content_type :: !(P'.Maybe P'.Word32)}
-}
getColumnType :: PCMD.ColumnMetaData -> PCMDFT.FieldType
getColumnType = PCMD.type'
getColumnName :: PCMD.ColumnMetaData -> T.Text
getColumnName = _maybeByteString2Text . PCMD.name
getColumnOriginalName :: PCMD.ColumnMetaData -> T.Text
getColumnOriginalName = _maybeByteString2Text . PCMD.original_name
getColumnTable :: PCMD.ColumnMetaData -> T.Text
getColumnTable = _maybeByteString2Text . PCMD.table
getColumnOriginalTable :: PCMD.ColumnMetaData -> T.Text
getColumnOriginalTable = _maybeByteString2Text . PCMD.original_table
getColumnSchema :: PCMD.ColumnMetaData -> T.Text
getColumnSchema = _maybeByteString2Text . PCMD.schema
getColumnCatalog :: PCMD.ColumnMetaData -> T.Text
getColumnCatalog = _maybeByteString2Text . PCMD.catalog
getColumnCollation :: PCMD.ColumnMetaData -> W.Word64
getColumnCollation meta = fromMaybe 0 $ PCMD.collation meta
getColumnFractionalDigits :: PCMD.ColumnMetaData -> W.Word32
getColumnFractionalDigits meta = fromMaybe 0 $ PCMD.fractional_digits meta
getColumnLength :: PCMD.ColumnMetaData -> W.Word32
getColumnLength meta = fromMaybe 0 $ PCMD.length meta
getColumnFlags :: PCMD.ColumnMetaData -> W.Word32
getColumnFlags meta = fromMaybe 0 $ PCMD.flags meta
getColumnContentType :: PCMD.ColumnMetaData -> W.Word32
getColumnContentType meta = fromMaybe 0 $ PCMD.content_type meta
_maybeByteString2Text :: Maybe BL.ByteString -> T.Text
_maybeByteString2Text = TE.decodeUtf8 . BL.toStrict . M.fromJust
isSint :: PCMD.ColumnMetaData -> Bool
isSint = (==) PCMDFT.SINT . PCMD.type'
isUint :: PCMD.ColumnMetaData -> Bool
isUint = (==) PCMDFT.UINT . PCMD.type'
isDouble :: PCMD.ColumnMetaData -> Bool
isDouble = (==) PCMDFT.DOUBLE . PCMD.type'
isFloat :: PCMD.ColumnMetaData -> Bool
isFloat = (==) PCMDFT.FLOAT . PCMD.type'
isBytes :: PCMD.ColumnMetaData -> Bool
isBytes = (==) PCMDFT.BYTES . PCMD.type'
isTime :: PCMD.ColumnMetaData -> Bool
isTime = (==) PCMDFT.TIME . PCMD.type'
isDatetime :: PCMD.ColumnMetaData -> Bool
isDatetime = (==) PCMDFT.DATETIME . PCMD.type'
isSet :: PCMD.ColumnMetaData -> Bool
isSet = (==) PCMDFT.SET . PCMD.type'
isEnum :: PCMD.ColumnMetaData -> Bool
isEnum = (==) PCMDFT.ENUM . PCMD.type'
isBit :: PCMD.ColumnMetaData -> Bool
isBit = (==) PCMDFT.BIT . PCMD.type'
isDecimal :: PCMD.ColumnMetaData -> Bool
isDecimal = (==) PCMDFT.DECIMAL . PCMD.type'
_eqlField :: (PCMD.ColumnMetaData -> Maybe BL.ByteString) -> PCMD.ColumnMetaData -> String -> Bool
_eqlField getter meta val =
case getter meta of
Nothing -> False
Just y -> y == (BL.fromStrict $ s2bs val)
eqlCMDName :: PCMD.ColumnMetaData -> String -> Bool
eqlCMDName colmeta name = _eqlField PCMD.name colmeta name
eqlCMDOriginalName :: PCMD.ColumnMetaData -> String -> Bool
eqlCMDOriginalName colmeta name = _eqlField PCMD.original_name colmeta name
eqlCMDTable :: PCMD.ColumnMetaData -> String -> Bool
eqlCMDTable colmeta name = _eqlField PCMD.table colmeta name
eqlCMDOriginalTable :: PCMD.ColumnMetaData -> String -> Bool
eqlCMDOriginalTable colmeta name = _eqlField PCMD.original_table colmeta name
eqlCMDSchema :: PCMD.ColumnMetaData -> String -> Bool
eqlCMDSchema colmeta name = _eqlField PCMD.schema colmeta name
eqlCMDCatalog :: PCMD.ColumnMetaData -> String -> Bool
eqlCMDCatalog colmeta name = _eqlField PCMD.catalog colmeta name
-- // BYTES 0x0001 GEOMETRY (WKB encoding)
-- // BYTES 0x0002 JSON (text encoding)
-- // BYTES 0x0003 XML (text encoding)
data ColumnContentType = GEOMETRY | JSON | XML | NONE
getContentType :: PCMD.ColumnMetaData -> ColumnContentType
getContentType meta =
case PCMD.content_type meta of
Nothing -> NONE
Just x ->
case x of
1 -> GEOMETRY
2 -> JSON
3 -> XML
_ -> NONE
mkCreateView :: PCV.CreateView
mkCreateView = PB.defaultValue
mkDataModel :: PDM.DataModel
mkDataModel = PB.defaultValue
mkDelete :: PD.Delete
mkDelete = PB.defaultValue
mkDocumentPathItemType :: PDPIT.Type
mkDocumentPathItemType = PB.defaultValue
mkDoubleAsterisk :: PDPI.DocumentPathItem
mkDoubleAsterisk = PB.defaultValue { PDPI.type' = PDPIT.DOUBLE_ASTERISK }
mkMemberAsterisk = PB.defaultValue { PDPI.type' = PDPIT.MEMBER_ASTERISK }
mkArrayIndexAsterisk = PB.defaultValue { PDPI.type' = PDPIT.ARRAY_INDEX_ASTERISK }
mkArrayIndex idx = PB.defaultValue { PDPI.type' = PDPIT.ARRAY_INDEX , PDPI.index = Just $ (idx :: Word32) }
mkMember val = PB.defaultValue { PDPI.type' = PDPIT.MEMBER , PDPI.value = Just $ PBH.uFromString val }
{- DocumentPathItem -}
mkDocumentPathItem :: String -> PDPI.DocumentPathItem
mkDocumentPathItem ('*':'*':_ ) = PB.defaultValue { PDPI.type' = PDPIT.DOUBLE_ASTERISK }
mkDocumentPathItem ('*':_ ) = PB.defaultValue { PDPI.type' = PDPIT.MEMBER_ASTERISK }
mkDocumentPathItem ('[':'*':']':_ ) = PB.defaultValue { PDPI.type' = PDPIT.ARRAY_INDEX_ASTERISK }
mkDocumentPathItem ('[':xs ) = PB.defaultValue { PDPI.type' = PDPIT.ARRAY_INDEX , PDPI.index = Just $ (read (P.init xs) :: Word32) }
mkDocumentPathItem (x ) = PB.defaultValue { PDPI.type' = PDPIT.MEMBER , PDPI.value = Just $ PBH.uFromString x }
mkDropView :: PDV.DropView
mkDropView = PB.defaultValue
mkErrorSeverity :: PES.Severity
mkErrorSeverity = PB.defaultValue
mkError :: PE.Error
mkError = PB.defaultValue
mkExprType :: PET.Type
mkExprType = PB.defaultValue
{- Expr
Type = IDENT | LITERAL | VARIABLE | FUNC_CALL | OPERATOR | PLACEHOLDER | OBJECT | ARRAY
-}
mkExpr :: PEx.Expr
mkExpr = PB.defaultValue
-- | Make an Expr instance and Retrieve a value from Expr
class Exprable a where
-- | Make an Expr instance.
expr :: a -> PEx.Expr
-- | Retrieve a value from Expr safely.
exprVal :: PEx.Expr -> Maybe a
-- exprVal = error "not implmented. Submit an issue." -- undefined -- TODO impiementations
-- | Retrieve a value from Expr.
exprVal' :: PEx.Expr -> a
exprVal' = M.fromJust . exprVal
-- LITERAL
instance Exprable Int where
expr = exprLiteral . scalar
exprVal = join . fmap getScalarVal . PEx.literal
instance Exprable Int64 where
expr = exprLiteral . scalar
exprVal = join . fmap getScalarVal . PEx.literal
instance Exprable Word8 where
expr = exprLiteral . scalar
exprVal = join . fmap getScalarVal . PEx.literal
instance Exprable Word64 where
expr = exprLiteral . scalar
exprVal = join . fmap getScalarVal . PEx.literal
instance Exprable Double where
expr = exprLiteral . scalar
exprVal = join . fmap getScalarVal . PEx.literal
instance Exprable Float where
expr = exprLiteral . scalar
exprVal = join . fmap getScalarVal . PEx.literal
instance Exprable Bool where
expr = exprLiteral . scalar
exprVal = join . fmap getScalarVal . PEx.literal
instance Exprable String where
expr = exprLiteral . scalar
exprVal = join . fmap getScalarVal . PEx.literal
instance Exprable T.Text where
expr = exprLiteral . scalar
exprVal = join . fmap getScalarVal . PEx.literal
instance Exprable BL.ByteString where
expr = exprLiteral . scalar
exprVal = join . fmap getScalarVal . PEx.literal
instance Exprable B.ByteString where
expr = exprLiteral . scalar
exprVal = join . fmap getScalarVal . PEx.literal
-- FUNC_CALL
instance Exprable PFC.FunctionCall where
expr a = PB.defaultValue {PEx.type' = PET.FUNC_CALL , PEx.function_call = Just a}
exprVal = PEx.function_call
-- OBJECT
instance Exprable PO.Object where
expr a = PB.defaultValue {PEx.type' = PET.OBJECT , PEx.object = Just a}
exprVal = PEx.object
-- ObjectField -> OBJECT
instance Exprable [POF.ObjectField] where
expr as = expr $ setObject as
exprVal = fmap F.toList . fmap PO.fld . PEx.object
-- ARRAY
instance Exprable PAR.Array where
expr a = PB.defaultValue {PEx.type' = PET.ARRAY , PEx.array = Just a}
exprVal = PEx.array
-- | Make a type LITERAL Expr.
exprLiteral :: PS.Scalar -> PEx.Expr
exprLiteral x = PB.defaultValue {PEx.type' = PET.LITERAL, PEx.literal = Just $ x}
-- | Make a Null Expr.
mkNullExpr :: PEx.Expr
mkNullExpr = exprLiteral mkNullScalar
exprColumnIdentifier :: PCI.ColumnIdentifier -> PEx.Expr
exprColumnIdentifier colIdent = PB.defaultValue {PEx.type' = PET.IDENT, PEx.identifier = Just $ colIdent}
exprPlaceholder :: Int -> PEx.Expr
exprPlaceholder pos = PB.defaultValue {PEx.type' = PET.PLACEHOLDER, PEx.position = Just $ fromIntegral pos }
-- | 1st placeholder.
ph1 = exprPlaceholder 0
-- | 2nd placeholder.
ph2 = exprPlaceholder 1
-- | 3rd placeholder.
ph3 = exprPlaceholder 2
-- | 4th placeholder.
ph4 = exprPlaceholder 3
-- | 5th placeholder.
ph5 = exprPlaceholder 4
-- | 6th placeholder.
ph6 = exprPlaceholder 5
-- | 7th placeholder.
ph7 = exprPlaceholder 6
-- | 8th placeholder.
ph8 = exprPlaceholder 7
-- | 9th placeholder.
ph9 = exprPlaceholder 8
exprIdentifierName :: String -> PEx.Expr
exprIdentifierName = exprColumnIdentifier . columnIdentifierName
exprDocumentPathItem :: String -> PEx.Expr
exprDocumentPathItem docPath = exprColumnIdentifier $ columnIdentifierDocumentPahtItem [mkDocumentPathItem docPath]
-- | Alias of exprDocumentPathItem
path = exprDocumentPathItem
mkFetchDone :: PFD.FetchDone
mkFetchDone = PB.defaultValue
mkFetchDoneMoreOutParams :: PFDMOP.FetchDoneMoreOutParams
mkFetchDoneMoreOutParams = PB.defaultValue
mkFetchDoneMoreResultsets :: PFDMR.FetchDoneMoreResultsets
mkFetchDoneMoreResultsets = PB.defaultValue
mkFind :: PF.Find
mkFind = PB.defaultValue
mkFrameScope :: PFS.Scope
mkFrameScope = PB.defaultValue
mkFrame :: PFr.Frame
mkFrame = PB.defaultValue
{- FunctionCall -}
-- | Make a function call instance.
mkFunctionCall ::
String -- ^ function name
-> [PEx.Expr] -- ^ parameters
-> PFC.FunctionCall
mkFunctionCall name params = PFC.FunctionCall (mkIdentifier' name) (Seq.fromList params)
mkFunctionCall' ::
String -- ^ function name
-> String -- ^ schema name
-> [PEx.Expr] -- ^ parameters
-> PFC.FunctionCall
mkFunctionCall' name schema params = PFC.FunctionCall (mkIdentifier name schema) (Seq.fromList params)
{- Identifier -}
mkIdentifier :: String -> String -> PI.Identifier
mkIdentifier x schema = PI.Identifier (PBH.uFromString x) (Just $ PBH.uFromString schema)
mkIdentifier' :: String -> PI.Identifier
mkIdentifier' x = PI.Identifier (PBH.uFromString x) Nothing
{- TypedRow -}
mkTypedRow :: PITR.TypedRow
mkTypedRow = PB.defaultValue
-- | make a TypedRow instance which has multiple Exprs.
mkExpr2TypedRow :: [PEx.Expr] -> PITR.TypedRow
mkExpr2TypedRow fields = PITR.TypedRow $ Seq.fromList fields
-- | make a TypedRow instance which has one Expr.
mkExpr2TypedRow' :: PEx.Expr -> PITR.TypedRow
mkExpr2TypedRow' field = PITR.TypedRow $ Seq.singleton field
{- Insert -}
mkInsert :: PI.Insert
mkInsert = PB.defaultValue
fmapInsertRow :: (PITR.TypedRow -> PITR.TypedRow) -> PI.Insert -> PI.Insert
fmapInsertRow f ins = ins {PI.row = fmap f (PI.row ins)}
mkInsertX :: String -> String -> String -> PI.Insert
mkInsertX schema table json = PB.defaultValue {
PI.collection = mkCollection schema table
,PI.data_model = Just PDM.DOCUMENT
,PI.projection = Seq.empty
,PI.row = Seq.singleton $ mkExpr2TypedRow' $ expr json
,PI.args = Seq.empty
}
-- | Mkae a Limit instance.
mkLimit :: Int -> Int -> PL.Limit
mkLimit num offset = PL.Limit (fromIntegral num) (Just $ fromIntegral offset)
-- | Mkae a Limit instance.
mkLimit' :: Int -> PL.Limit
mkLimit' num = PL.Limit (fromIntegral num) Nothing
mkModifyView :: PMV.ModifyView
mkModifyView = PB.defaultValue
{- ObjectFieldAny -}
mkObjectFieldAny :: String -> PA.Any -> POFAny.ObjectFieldAny
mkObjectFieldAny k a = POFAny.ObjectFieldAny {POFAny.key = (PBH.uFromString k), POFAny.value = a}
{- ObjectField -}
mkObjectField :: String -> PEx.Expr -> POF.ObjectField
mkObjectField k a = POF.ObjectField {POF.key = (PBH.uFromString k), POF.value = a}
{- Object -}
mkObject :: PO.Object
mkObject = PB.defaultValue
setObject :: [POF.ObjectField] -> PO.Object
setObject xs = PO.Object $ Seq.fromList xs
{- Object -}
mkObjectAny :: POAny.ObjectAny
mkObjectAny = PB.defaultValue
setObjectAny :: [POFAny.ObjectFieldAny] -> POAny.ObjectAny
setObjectAny xs = POAny.ObjectAny $ Seq.fromList xs
mkOk :: POk.Ok
mkOk = PB.defaultValue
-- mkOpenConditionOperation :: POCCO.ConditionOperation
-- mkOpenConditionOperation = PB.defaultValue
mkCondition :: POC.Condition
mkCondition = PB.defaultValue
{-
see https://github.com/mysql/mysql-server/blob/5.7/rapid/plugin/x/src/expect.cc
-}
-- | Make Condition opSet Set, if not set or overwrite, if set.
mkCondtinonOpSet :: POC.Condition -> POC.Condition
mkCondtinonOpSet condition = condition {POC.op = Just $ POCCO.EXPECT_OP_SET}
-- | Unset the condition.
mkCondtinonOpUnset :: POC.Condition -> POC.Condition
mkCondtinonOpUnset condition = condition {POC.op = Just $ POCCO.EXPECT_OP_UNSET}
-- | Make NO_ERROR Condition.
mkConditionNoError :: POC.Condition
mkConditionNoError = PB.defaultValue {POC.condition_key = condition_no_error}
-- -- | Make NO_ERROR Condition Unset.
-- mkConditionNoErrorUnset :: POC.Condition
-- mkConditionNoErrorUnset = mkCondtinonOpSet $ PB.defaultValue {POC.condition_key = condition_no_error}
--
condition_no_error = 1 :: Word32
condition_schema_version = 2 :: Word32
condition_gtid_executed_contains = 3 :: Word32
condition_gtid_wait_less_than_ms = 4 :: Word32
-- mkOpenCtxOperation :: POCtx.CtxOperation
-- mkOpenCtxOperation = PB.defaultValue
{-
data Open = Open{op :: !(P'.Maybe CtxOperation), cond :: !(P'.Seq Condition)}
data CtxOperation = EXPECT_CTX_COPY_PREV | EXPECT_CTX_EMPTY
-}
mkOpen :: POp.Open
mkOpen = PB.defaultValue
-- | Expectation
mkExpectCtxCopyPrev :: POp.Open
mkExpectCtxCopyPrev = POp.Open (Just POCtx.EXPECT_CTX_COPY_PREV) Seq.empty
-- | +No Error Expectation
mkExpectNoError :: POp.Open
mkExpectNoError = POp.Open (Just POCtx.EXPECT_CTX_EMPTY) (Seq.singleton mkConditionNoError)
-- | -No Error Expectation (Don't use No Error explicitly)
mkExpectUnset :: POp.Open
mkExpectUnset = POp.Open (Just POCtx.EXPECT_CTX_EMPTY) (Seq.empty)
{-Operator -}
mkOperator :: String -> POpe.Operator
mkOperator op = POpe.Operator {POpe.name = PB.fromString op, POpe.param = Seq.empty}
-- Unary
-- * ``!``
-- * ``sign_plus``
-- * ``sign_minus``
-- * ``~``
xPlus :: PEx.Expr -> PEx.Expr
xPlus x = undefined
-- Using special representation, with more than 2 params TODO
-- * ``in`` (param[0] IN (param[1], param[2], ...))
-- * ``not_in`` (param[0] NOT IN (param[1], param[2], ...))
mkOperatorNot :: POpe.Operator -> POpe.Operator
mkOperatorNot ope = ope {POpe.name = preUtf8 "not_" (POpe.name ope)}
xIs :: PEx.Expr -> PEx.Expr -> PEx.Expr
xIs x1 x2 = multiaryOperator mkOperatorIs [x1, x2]
xIsNot :: PEx.Expr -> PEx.Expr -> PEx.Expr
xIsNot x1 x2 = multiaryOperator mkOperatorIsNot [x1, x2]
mkOperatorIn = mkOperator "in"
mkOperatorNotIn = mkOperatorNot mkOperatorIn
xIn :: PEx.Expr -> [PEx.Expr] -> PEx.Expr
xIn iden params = multiaryOperator mkOperatorIn (iden : params)
xNotIn :: PEx.Expr -> [PEx.Expr] -> PEx.Expr
xNotIn iden params = multiaryOperator mkOperatorNotIn (iden : params)
mkOperatorBetween = mkOperator "between"
mkOperatorNotBetween = mkOperatorNot mkOperatorBetween
xBetween :: PEx.Expr -> PEx.Expr -> PEx.Expr -> PEx.Expr
xBetween x min max = multiaryOperator mkOperatorBetween [x, min, max]
xNotBetween :: PEx.Expr -> PEx.Expr -> PEx.Expr -> PEx.Expr
xNotBetween x min max = multiaryOperator mkOperatorNotBetween [x, min, max]
xCast :: [PEx.Expr] -> PEx.Expr
xCast args = multiaryOperator (mkOperator "cast") args
-- Ternary TODO
-- Units for date_add/date_sub TODO
-- Types for cast TODO
-- Unary
-- * ``!``
-- * ``sign_plus``
-- * ``sign_minus``
-- * ``~``
mkOperatorBang = mkOperator "!" -- TODO type signature
mkOperatorSignPlus = mkOperator "+"
mkOperatorSignMinus = mkOperator "-"
mkOperatorTilda = mkOperator "~"
mkSingleOperator ope x = multiaryOperator (mkOperator ope) [x]
(@!) x = multiaryOperator mkOperatorBang [x]
(@+) x = multiaryOperator mkOperatorSignPlus [x]
(@-) x = multiaryOperator mkOperatorSignMinus [x]
(@~) x = multiaryOperator mkOperatorTilda [x]
mkBinaryOperator :: String -> PEx.Expr -> PEx.Expr -> PEx.Expr
mkBinaryOperator = binaryOperator . mkOperator
-- Binary
mkOperatorAnd = mkOperator "&&" -- TODO type signature
mkOperatorOr = mkOperator "||"
mkOperatorXor = mkOperator "xor"
mkOperatorEq = mkOperator "=="
mkOperatorNotEq = mkOperator "!="
mkOperatorGt = mkOperator ">"
mkOperatorGte = mkOperator ">="
mkOperatorSt = mkOperator "<"
mkOperatorSte = mkOperator "<="
mkOperatorShiftL = mkOperator "<<"
mkOperatorShiftR = mkOperator "??"
mkOperatorPlus = mkOperator "+"
mkOperatorMinus = mkOperator "-"
mkOperatorMultpl = mkOperator "*"
mkOperatorDivid = mkOperator "/"
mkOperatorRem = mkOperator "%"
mkOperatorIs = mkOperator "is"
mkOperatorIsNot = mkOperator "is_not"
mkOperatorReg = mkOperator "regexp"
mkOperatorNotReg = mkOperator "not_regexp"
mkOperatorLike = mkOperator "like"
mkOperatorNotLike = mkOperator "not_like"
mkOperatorCast = mkOperator "cast"
(@&&) :: PEx.Expr -> PEx.Expr -> PEx.Expr
(@&&) = binaryOperator mkOperatorAnd
(@||) = binaryOperator mkOperatorOr -- TODO type signature
(xor) = binaryOperator mkOperatorXor
(@==) = binaryOperator mkOperatorEq
(@!=) = binaryOperator mkOperatorNotEq
(@>) = binaryOperator mkOperatorGt
(@>=) = binaryOperator mkOperatorGte
(@<) = binaryOperator mkOperatorSt
(@<=) = binaryOperator mkOperatorSte
(@<<) = binaryOperator mkOperatorShiftL
(@??) = binaryOperator mkOperatorShiftR
(@@+) = binaryOperator mkOperatorPlus
(@@-) = binaryOperator mkOperatorMinus
(@*) = binaryOperator mkOperatorMultpl
(@/) = binaryOperator mkOperatorDivid
(@%) = binaryOperator mkOperatorRem
(is) = binaryOperator mkOperatorIs
(is_not) = binaryOperator mkOperatorIsNot
(regexp) = binaryOperator mkOperatorReg
(not_regexp) = binaryOperator mkOperatorNotReg
(like) = binaryOperator mkOperatorLike
(not_like) = binaryOperator mkOperatorNotLike
(cast) = binaryOperator mkOperatorCast
binaryOperator :: POpe.Operator -> PEx.Expr -> PEx.Expr -> PEx.Expr
binaryOperator ope a b = multiaryOperator ope [a, b]
multiaryOperator :: POpe.Operator -> [PEx.Expr] -> PEx.Expr
multiaryOperator ope xs = PB.defaultValue { PEx.type' = PET.OPERATOR, PEx.operator = Just $ ope {POpe.param = Seq.fromList xs } }
mkOrderDirection :: POD.Direction
mkOrderDirection = PB.defaultValue
mkOrder :: PO.Order
mkOrder = PB.defaultValue
{- Projection -}
mkProjection :: PEx.Expr -> String -> PP.Projection
mkProjection expr alias = PP.Projection {PP.source = expr, PP.alias = Just $ PBH.uFromString alias}
mkProjection' :: PEx.Expr -> PP.Projection
mkProjection' expr = PP.Projection {PP.source = expr, PP.alias = Nothing}
mkReset :: PRe.Reset
mkReset = PB.defaultValue
mkRow :: PR.Row
mkRow = PB.defaultValue
{- Octets -}
mkScalarOctets :: PSO.Octets
mkScalarOctets = PB.defaultValue
-- | Make a scalar of Octet type from Lazy ByteString
scalarOctets :: BL.ByteString -> PSO.Octets
scalarOctets x = PB.defaultValue {PSO.value = x}
-- | Make a scalar of Octet type from strict ByteString
scalarOctets' :: B.ByteString -> PSO.Octets
scalarOctets' x = PB.defaultValue {PSO.value = BL.fromStrict x}
{- String -}
mkScalarString :: PSS.String
mkScalarString = PB.defaultValue
scalarString :: String -> PSS.String
scalarString x = PB.defaultValue {PSS.value = PBH.pack x}
mkScalarType :: PST.Type
mkScalarType = PB.defaultValue
{-
Scalar :: data Type = V_SINT | V_UINT | V_NULL | V_OCTETS | V_DOUBLE | V_FLOAT | V_BOOL | V_STRING
-}
mkScalar :: PS.Scalar
mkScalar = PB.defaultValue
-- | Make a Null.
mkNullScalar :: PS.Scalar
mkNullScalar = PB.defaultValue {PS.type' = PST.V_NULL}
-- | Make an Scalar instance and Retrieve a value from Scalar.
class Scalarable x where
-- | Make an Scalar instance.
scalar :: x -> PS.Scalar
-- | Retrieve a value from Scalar safely.
getScalarVal :: PS.Scalar -> Maybe x
-- | Retrieve a value from Scalar, an exception maybe occurs.
getScalarVal' :: (MonadIO m, MonadThrow m) => PS.Scalar -> m x
-- | Retrieve a value from Scalar unsafely.
getScalarVal'' :: PS.Scalar -> x
getScalarVal'' = M.fromJust . getScalarVal
-- | internal use only (TODO hiding)
_getScalarVal' :: (MonadIO m, MonadThrow m)
=> PST.Type
-> (PS.Scalar -> Maybe a)
-> (a -> b)
-> String
-> PS.Scalar
-> m b
_getScalarVal' t func trans info scl =
if PS.type' scl == t then
case func scl of
Just x -> return $ trans x
Nothing -> throwM $ XProtocolException $ info ++ " value is Nothing"
else
throwM $ XProtocolException $ F.concat ["type of scalar value is not ", info, ", actually ", show $ PS.type' scl]
instance Scalarable Int where
scalar x = PB.defaultValue {PS.type' = PST.V_SINT , PS.v_signed_int = Just $ fromIntegral x}
getScalarVal PS.Scalar{..} = fromIntegral <$> v_signed_int
getScalarVal' x = _getScalarVal' PST.V_SINT PS.v_signed_int fromIntegral "V_SINT" x
instance Scalarable I.Int64 where
scalar x = PB.defaultValue {PS.type' = PST.V_SINT , PS.v_signed_int = Just x}
getScalarVal PS.Scalar{..} = v_signed_int
getScalarVal' x = _getScalarVal' PST.V_SINT PS.v_signed_int id "V_SINT" x
instance Scalarable W.Word8 where
scalar x = PB.defaultValue {PS.type' = PST.V_UINT , PS.v_unsigned_int = Just $ fromIntegral x}
getScalarVal PS.Scalar{..} = fromIntegral <$> v_unsigned_int
getScalarVal' x = _getScalarVal' PST.V_UINT PS.v_unsigned_int fromIntegral "V_UINT" x
instance Scalarable W.Word64 where
scalar x = PB.defaultValue {PS.type' = PST.V_UINT , PS.v_unsigned_int = Just x}
getScalarVal PS.Scalar{..} = v_unsigned_int
getScalarVal' x = _getScalarVal' PST.V_UINT PS.v_unsigned_int id "V_UINT" x
instance Scalarable Double where
scalar x = PB.defaultValue {PS.type' = PST.V_DOUBLE, PS.v_double = Just x}
getScalarVal PS.Scalar{..} = v_double
getScalarVal' x = _getScalarVal' PST.V_DOUBLE PS.v_double id "V_DOUBLE" x
instance Scalarable Float where
scalar x = PB.defaultValue {PS.type' = PST.V_FLOAT , PS.v_float = Just x}
getScalarVal PS.Scalar{..} = v_float
getScalarVal' x = _getScalarVal' PST.V_FLOAT PS.v_float id "V_FLOAT" x
instance Scalarable Bool where
scalar x = PB.defaultValue {PS.type' = PST.V_BOOL , PS.v_bool = Just x}
getScalarVal PS.Scalar{..} = v_bool
getScalarVal' x = _getScalarVal' PST.V_BOOL PS.v_bool id "V_BOOL" x
instance Scalarable String where
scalar x = PB.defaultValue {PS.type' = PST.V_STRING, PS.v_string = Just $ scalarString x}
getScalarVal PS.Scalar{..} = (T.unpack . TE.decodeUtf8 . BL.toStrict . PSS.value) <$> v_string
getScalarVal' x = _getScalarVal' PST.V_STRING PS.v_string (T.unpack . TE.decodeUtf8 . BL.toStrict . PSS.value) "V_STRING" x
instance Scalarable T.Text where
scalar x = PB.defaultValue {PS.type' = PST.V_STRING, PS.v_string = Just $ scalarString (T.unpack x)}
getScalarVal PS.Scalar{..} = (TE.decodeUtf8 . BL.toStrict . PSS.value) <$> v_string
getScalarVal' x = _getScalarVal' PST.V_STRING PS.v_string (TE.decodeUtf8 . BL.toStrict . PSS.value) "V_STRING" x
instance Scalarable BL.ByteString where
scalar x = PB.defaultValue {PS.type' = PST.V_OCTETS, PS.v_octets = Just $ scalarOctets x}
getScalarVal PS.Scalar{..} = PSO.value <$> v_octets
getScalarVal' x = _getScalarVal' PST.V_OCTETS PS.v_octets PSO.value "V_OCTETS" x
instance Scalarable B.ByteString where
scalar x = PB.defaultValue {PS.type' = PST.V_OCTETS, PS.v_octets = Just $ scalarOctets' x}
getScalarVal PS.Scalar{..} = BL.toStrict . PSO.value <$> v_octets
getScalarVal' x = _getScalarVal' PST.V_OCTETS PS.v_octets (BL.toStrict . PSO.value) "V_OCTETS" x
-- | Nothing to be converted to a Null Scalar.
instance (Scalarable a) => Scalarable (Maybe a) where
scalar (Just x) = scalar x
scalar Nothing = mkNullScalar
getScalarVal = getScalarVal -- TODO test.
getScalarVal' = getScalarVal' -- TODO test.
mkServerMessagesType :: PSMT.Type
mkServerMessagesType = PB.defaultValue
mkServerMessages :: PSM.ServerMessages
mkServerMessages = PB.defaultValue
mkSessionStateChangedParameter :: PSSCP.Parameter
mkSessionStateChangedParameter = PB.defaultValue
mkSessionStateChanged :: PSSC.SessionStateChanged
mkSessionStateChanged = PB.defaultValue
mkSessionVariableChanged :: PSVC.SessionVariableChanged
mkSessionVariableChanged = PB.defaultValue
{-
StmtExecute
-}
mkStmtExecute :: String -> String -> [PA.Any] -> Bool -> PSE.StmtExecute
mkStmtExecute ns sql args meta = PB.defaultValue
`setNamespace` ns
`setStmt` sql
`setStmtArgs` args
`setCompactMetadata` meta
mkStmtExecuteSql :: String -> [PA.Any] -> PSE.StmtExecute
mkStmtExecuteSql sql args = PB.defaultValue
`setStmt` sql
`setStmtArgs` args
mkStmtExecuteX :: String -> [PA.Any] -> Bool -> PSE.StmtExecute
mkStmtExecuteX sql args meta = PB.defaultValue
`setNamespace` "mysqlx"
`setStmt` sql
`setStmtArgs` args
`setCompactMetadata` meta
mkStmtExecuteX' :: String -> [PA.Any] -> PSE.StmtExecute
mkStmtExecuteX' sql args = mkStmtExecuteX sql args False
setNamespace :: PSE.StmtExecute -> String -> PSE.StmtExecute
setNamespace stmt ns = stmt {PSE.namespace = Just $ PBH.uFromString ns}
setStmt :: PSE.StmtExecute -> String -> PSE.StmtExecute
setStmt stmt sql = stmt {PSE.stmt = (BL.fromStrict . TE.encodeUtf8 . T.pack) sql}
setStmtArgs :: PSE.StmtExecute -> [PA.Any]-> PSE.StmtExecute
setStmtArgs stmt args = stmt {PSE.args = Seq.fromList args}
setStmtArg :: PSE.StmtExecute -> PA.Any-> PSE.StmtExecute
setStmtArg stmt arg = stmt {PSE.args = arg <| PSE.args stmt}
setCompactMetadata :: PSE.StmtExecute -> Bool -> PSE.StmtExecute
setCompactMetadata stmt meta = stmt {PSE.compact_metadata = Just meta}
mkStmtExecuteOk :: PSEO.StmtExecuteOk
mkStmtExecuteOk = PB.defaultValue
mkUpdate :: PU.Update
mkUpdate = PB.defaultValue
mkUpdateOperationUpdateType :: PUOUT.UpdateType
mkUpdateOperationUpdateType = PB.defaultValue
{- UpdateOperation -}
-- | Make an UpdateOperation instance.
mkUpdateOperation ::
PUOUT.UpdateType -- ^ type
-> PCI.ColumnIdentifier -- ^ identifier
-> PEx.Expr -- ^ Expr
-> PUO.UpdateOperation
mkUpdateOperation ut iden ex = PUO.UpdateOperation {PUO.source = iden, PUO.operation = ut, PUO.value = Just ex}
mkUpdateOperationSet = mkUpdateOperation PUOUT.SET -- table only
mkUpdateOperationItemRemove = mkUpdateOperation PUOUT.ITEM_REMOVE
mkUpdateOperationItemSet = mkUpdateOperation PUOUT.ITEM_SET -- add
mkUpdateOperationItemReplace = mkUpdateOperation PUOUT.ITEM_REPLACE
mkUpdateOperationItemMerge = mkUpdateOperation PUOUT.ITEM_MERGE
mkUpdateOperationArrayInsert = mkUpdateOperation PUOUT.ARRAY_INSERT
mkUpdateOperationArrayAppend = mkUpdateOperation PUOUT.ARRAY_APPEND
-- | Make an update item.
class (Exprable a) => UpdateOperatable a where
updateSet :: String -> a -> PUO.UpdateOperation
updateSet ident a = mkUpdateOperationSet (columnIdentifierDocumentPahtItem [mkDocumentPathItem ident]) (expr a)
updateItemRemove :: String -> a -> PUO.UpdateOperation
updateItemRemove ident a = mkUpdateOperationItemRemove (columnIdentifierDocumentPahtItem [mkDocumentPathItem ident]) (expr a)
updateItemSet :: String -> a -> PUO.UpdateOperation
updateItemSet ident a = mkUpdateOperationItemSet (columnIdentifierDocumentPahtItem [mkDocumentPathItem ident]) (expr a)
updateItemReplace :: String -> a -> PUO.UpdateOperation
updateItemReplace ident a = mkUpdateOperationItemReplace (columnIdentifierDocumentPahtItem [mkDocumentPathItem ident]) (expr a)
updateItemMerge :: String -> a -> PUO.UpdateOperation
updateItemMerge ident a = mkUpdateOperationItemMerge (columnIdentifierDocumentPahtItem [mkDocumentPathItem ident]) (expr a)
instance UpdateOperatable Int
instance UpdateOperatable Int64
instance UpdateOperatable Word8
instance UpdateOperatable Word64
instance UpdateOperatable Double
instance UpdateOperatable Float
instance UpdateOperatable Bool
instance UpdateOperatable String
instance UpdateOperatable Text
-- | Make an update array insert operation.
updateArrayInsert :: String -> PAR.Array -> PUO.UpdateOperation
updateArrayInsert ident arr = mkUpdateOperationArrayInsert (columnIdentifierDocumentPahtItem [mkDocumentPathItem ident]) (expr arr)
-- | Make an update array append operation.
updateArrayAppend :: String -> PAR.Array -> PUO.UpdateOperation
updateArrayAppend ident arr = mkUpdateOperationArrayAppend (columnIdentifierDocumentPahtItem [mkDocumentPathItem ident]) (expr arr)
mkViewAlgorithm :: PVA.ViewAlgorithm
mkViewAlgorithm = PB.defaultValue
mkViewCheckOption :: PVCO.ViewCheckOption
mkViewCheckOption = PB.defaultValue
mkViewSqlSecurity :: PVSS.ViewSqlSecurity
mkViewSqlSecurity = PB.defaultValue
mkWarningLevel :: PWL.Level
mkWarningLevel = PB.defaultValue
mkWarning :: PW.Warning
mkWarning = PB.defaultValue
-- CURRENT_SCHEMA = 1;
getCurrentSchema :: (MonadIO m, MonadThrow m, Scalarable a) => PSSC.SessionStateChanged -> m a
getCurrentSchema = getSessionStateChangedVal PSSCP.CURRENT_SCHEMA "CURRENT_SCHEMA"
-- ACCOUNT_EXPIRED = 2;
getAccountExpired :: (MonadIO m, MonadThrow m, Scalarable a) => PSSC.SessionStateChanged -> m a
getAccountExpired = getSessionStateChangedVal PSSCP.ACCOUNT_EXPIRED "ACCOUNT_EXPIRED"
-- GENERATED_INSERT_ID = 3;
getGeneratedInsertId :: (MonadIO m, MonadThrow m, Scalarable a) => PSSC.SessionStateChanged -> m a
getGeneratedInsertId = getSessionStateChangedVal PSSCP.GENERATED_INSERT_ID "GENERATED_INSERT_ID"
-- ROWS_AFFECTED = 4;
getRowsAffected :: (MonadIO m, MonadThrow m) => PSSC.SessionStateChanged -> m W.Word64
getRowsAffected = getSessionStateChangedVal PSSCP.ROWS_AFFECTED "ROWS_AFFECTED"
-- ROWS_FOUND = 5;
getRowsFound :: (MonadIO m, MonadThrow m, Scalarable a) => PSSC.SessionStateChanged -> m a
getRowsFound = getSessionStateChangedVal PSSCP.ROWS_FOUND "ROWS_FOUND"
-- ROWS_MATCHED = 6;
getRowsMatched :: (MonadIO m, MonadThrow m, Scalarable a) => PSSC.SessionStateChanged -> m a
getRowsMatched = getSessionStateChangedVal PSSCP.ROWS_MATCHED "ROWS_MATCHED"
-- TRX_COMMITTED = 7;
getTrxCommited :: (MonadIO m, MonadThrow m, Scalarable a) => PSSC.SessionStateChanged -> m a
getTrxCommited = getSessionStateChangedVal PSSCP.TRX_COMMITTED "TRX_COMMITTED"
-- TRX_ROLLEDBACK = 9;
getTrxRolldback :: (MonadIO m, MonadThrow m, Scalarable a) => PSSC.SessionStateChanged -> m a
getTrxRolldback = getSessionStateChangedVal PSSCP.TRX_ROLLEDBACK "TRX_ROLLEDBACK"
-- PRODUCED_MESSAGE = 10;
getProducedMessage :: (MonadIO m, MonadThrow m, Scalarable a) => PSSC.SessionStateChanged -> m a
getProducedMessage = getSessionStateChangedVal PSSCP.PRODUCED_MESSAGE "PRODUCED_MESSAGE"
-- CLIENT_ID_ASSIGNED = 11;
getClientId :: (MonadIO m, MonadThrow m) => PSSC.SessionStateChanged -> m W.Word64
getClientId = getSessionStateChangedVal PSSCP.CLIENT_ID_ASSIGNED "CLIENT_ID_ASSIGNED"
getSessionStateChangedVal :: (MonadIO m, MonadThrow m, Scalarable a) => PSSCP.Parameter -> String -> PSSC.SessionStateChanged -> m a
getSessionStateChangedVal p info ssc = do
-- debug ssc
if PSSC.param ssc == p
then case PSSC.value ssc of
Just s -> getScalarVal' s
Nothing -> throwM $ XProtocolException $ "param is " ++ info ++ ", but Nothing"
else
throwM $ XProtocolException $ "param is not " ++ info ++ ", but " ++ (show $ PSSC.param ssc)
-- | Server message NO : ok = 0
s_ok = 0 :: Int
-- | Server message NO : error = 1
s_error = 1 :: Int
-- | Server message NO : conn_capabilities = 2
s_conn_capabilities = 2 :: Int
-- | Server message NO : sess_authenticate_continue = 3
s_sess_authenticate_continue = 3 :: Int
-- | Server message NO : sess_authenticate_ok =4
s_sess_authenticate_ok = 4 :: Int
-- | Server message NO : notice = 11
s_notice = 11 :: Int
-- | Server message NO : resultset_column_meta_data = 12
s_resultset_column_meta_data = 12 :: Int
-- | Server message NO : resultset_row = 13
s_resultset_row = 13 :: Int
-- | Server message NO : resultset_fetch_done = 14
s_resultset_fetch_done = 14 :: Int
-- | Server message NO : resultset_fetch_suspended = 15
s_resultset_fetch_suspended = 15 :: Int
-- | Server message NO : resultset_fetch_done_more_resultsets = 16
s_resultset_fetch_done_more_resultsets = 16 :: Int
-- | Server message NO : sql_stmt_execute_ok = 17
s_sql_stmt_execute_ok = 17 :: Int
-- | Server message NO : resultset_fetch_done_more_out_params = 18
s_resultset_fetch_done_more_out_params = 18 :: Int
getClientMsgTypeNo :: (Typeable msg, Show msg) => msg -> Int
getClientMsgTypeNo msg =
case found of
Nothing -> P.error $ "getClientMsgTpeNo faliure, msg=" ++ show msg
Just (a,b,c) -> a
where found = L.find (\(a, b, c) -> c == typeOf msg) clientMessageMap
-- | Mapping between a message type number and an object.
clientMessageMap :: [(Int, PCMT.Type, TypeRep)]
clientMessageMap =
[
( 1, PCMT.CON_CAPABILITIES_GET, typeOf (undefined :: PCG.CapabilitiesGet) ),
( 2, PCMT.CON_CAPABILITIES_SET, typeOf (undefined :: PCS.CapabilitiesSet) ),
( 3, PCMT.CON_CLOSE, typeOf (undefined :: PC.Close) ),
( 4, PCMT.SESS_AUTHENTICATE_START, typeOf (undefined :: PAS.AuthenticateStart) ),
( 5, PCMT.SESS_AUTHENTICATE_CONTINUE, typeOf (undefined :: PAC.AuthenticateContinue) ),
( 6, PCMT.SESS_RESET, typeOf (undefined :: PRe.Reset) ),
( 7, PCMT.SESS_CLOSE, typeOf (undefined :: PC.Close) ),
(12, PCMT.SQL_STMT_EXECUTE, typeOf (undefined :: PSE.StmtExecute ) ),
(17, PCMT.CRUD_FIND, typeOf (undefined :: PF.Find) ),
(18, PCMT.CRUD_INSERT, typeOf (undefined :: PI.Insert) ),
(19, PCMT.CRUD_UPDATE, typeOf (undefined :: PU.Update) ),
(20, PCMT.CRUD_DELETE, typeOf (undefined :: PD.Delete) ),
(24, PCMT.EXPECT_OPEN, typeOf (undefined :: POp.Open) ),
(25, PCMT.EXPECT_CLOSE, typeOf (undefined :: PC.Close) ),
(30, PCMT.CRUD_CREATE_VIEW, typeOf (undefined :: PCV.CreateView) ),
(31, PCMT.CRUD_MODIFY_VIEW, typeOf (undefined :: PMV.ModifyView) ),
(32, PCMT.CRUD_DROP_VIEW, typeOf (undefined :: PDV.DropView) )
]
--
-- for debug purpose
--
-- | Serialize an object to a file.
writeObj :: (PBW.Wire a, PBR.ReflectDescriptor a) => FilePath -> a -> IO ()
writeObj path obj = BL.writeFile path $ PBW.messagePut obj
-- | Deserialize a file to an object.
--
-- Example
--
-- >>> let x = readObj "memo/dump_java_insert_prepared_type_timestamp.bin" :: IO StmtExecute
-- >>> import Text.Pretty.Simple
-- >>> :t pPrint
-- pPrint :: (MonadIO m, Show a) => a -> m ()
-- >>> x >>= pPrint
-- StmtExecute
-- { namespace = Just gg"sql"
-- , stmt = "insert into data_type_timestamp values (?);"
-- , args = fromList
-- [ Any
-- { type' = SCALAR
-- , scalar = Just
-- ( Scalar
-- { type' = V_STRING
-- , v_signed_int = Nothing
-- , v_unsigned_int = Nothing
-- , v_octets = Nothing
-- , v_double = Nothing
-- , v_float = Nothing
-- , v_bool = Nothing
-- , v_string = Just
-- ( String
-- { value = "2017-09-17T12:34:56.0"
-- , collation = Nothing
-- }
-- )
-- }
-- )
-- , obj = Nothing
-- , array = Nothing
-- }
-- ]
-- , compact_metadata = Just False
-- }
-- >>>
readObj :: (MonadIO m, MonadThrow m, PBW.Wire a, PBR.ReflectDescriptor a, PBT.TextMsg a, Typeable a) => FilePath -> m a
readObj path = do
bin <- liftIO $ B.readFile path
obj <- getMessage bin
return obj
--
-- ToString
--
scalarToString :: PS.Scalar -> String
scalarToString PS.Scalar{..} =
case type' of
PST.V_SINT -> j2s v_signed_int
PST.V_UINT -> j2s v_unsigned_int
PST.V_NULL -> "NULL"
PST.V_OCTETS -> show $ bs2s' $ fromJust $ PSO.value <$> v_octets -- use show to add "\"" before and after the data
PST.V_DOUBLE -> j2s v_double
PST.V_FLOAT -> j2s v_float
PST.V_BOOL -> fromJust $ (\x -> if x then "TRUE" else "FALSE") <$> v_bool
PST.V_STRING -> show $ bs2s' $ fromJust $ PSS.value <$> v_string
-- _ -> error "invalid type" Pattern match is redundant
j2s :: (Show a) => Maybe a -> String
j2s = show . fromJust
documentPathToString :: Seq.Seq PDPI.DocumentPathItem -> String
documentPathToString seq = F.foldr (\x acc -> documentSinglePathToString x ++ acc) "" seq
documentSinglePathToString :: PDPI.DocumentPathItem -> String
documentSinglePathToString PDPI.DocumentPathItem{..} =
case type' of
PDPIT.MEMBER -> "." ++ ((PBH.uToString $ fromJust value) >>= escapeDoubleQuote )
PDPIT.MEMBER_ASTERISK -> ".*"
PDPIT.ARRAY_INDEX -> "[" ++ (show $ fromJust index) ++ "]" -- '[' : :']' : []
PDPIT.ARRAY_INDEX_ASTERISK -> "[*]"
PDPIT.DOUBLE_ASTERISK -> "**"
-- "@\"@" --> "\"@\\\"@\""
escapeDoubleQuote = \c -> if c == '\"' then "\\\"" else [c]
columnIdentifierToString :: PCI.ColumnIdentifier -> String
columnIdentifierToString PCI.ColumnIdentifier{..} =
case name of
Just n ->
case Seq.null document_path of
True -> sch ++ tbl ++ nam
False -> sch ++ tbl ++ nam ++ "->$" ++ (documentPathToString document_path)
Nothing -> "$" ++ (documentPathToString document_path)
where nam = stripMaybeToStr name ""
tbl = stripMaybeToStr table_name "."
sch = stripMaybeToStr schema_name "."
stripMaybeToStr :: (Maybe PBH.Utf8) -> String -> String
stripMaybeToStr m appnd =
case m of
Just str -> do
let s = PBH.uToString str
if P.null s then "" else (quoteIdentifier s) ++ appnd
Nothing -> ""
quoteIdentifier :: String -> String
quoteIdentifier str =
case specialCharFound str of
True -> "'" ++ (str >>= escapeAps) ++ "'"
False -> str
-- "\'" --> "''"
escapeAps = \c -> if c == '\'' then "''" else [c]
removeAps = \c -> if c == '\"' then [] else [c]
specialCharFound "" = False
specialCharFound (x:xs)
| x == '\'' = True
| x == '"' = True
| x == '`' = True
| x == '$' = True
| x == '.' = True
| x == '-' = True
| otherwise = specialCharFound xs
functionCallToString :: PFC.FunctionCall -> String
functionCallToString PFC.FunctionCall{..} =
sch ++ nae ++ "(" ++ exps ++ ")"
where nae = quoteIdentifier $ PBH.uToString $ PI.name name
sch = stripMaybeToStr (PI.schema_name name) "."
exps = L.intercalate ", " $ F.toList $ fmap exprToString param
exprToString :: PEx.Expr -> String
exprToString PEx.Expr{..} =
case type' of
PET.IDENT -> columnIdentifierToString $ fromJust identifier
PET.LITERAL -> scalarToString $ fromJust literal
PET.VARIABLE -> undefined
PET.FUNC_CALL -> functionCallToString $ fromJust function_call
PET.OPERATOR -> operatorToString $ fromJust operator
PET.PLACEHOLDER -> ":" ++ (show $ fromJust position)
PET.OBJECT -> objectToString $ fromJust object
PET.ARRAY -> undefined
-- _ -> error $ "Unknown type tag: " ++ (show type') -- Pattern match is redundant
paramListToString :: [String] -> String
paramListToString params = "(" ++ (L.intercalate ", " params) ++ ")"
paramListToString' :: Seq.Seq String -> String
paramListToString' params = paramListToString (F.toList params)
operatorToString :: POpe.Operator -> String
operatorToString POpe.Operator{..} =
case nam of
"between" -> Seq.index pars 0 ++ " between " ++ Seq.index pars 1 ++ " AND " ++ Seq.index pars 2
"not_between" -> Seq.index pars 0 ++ " not between " ++ Seq.index pars 1 ++ " AND " ++ Seq.index pars 2
"in" -> Seq.index pars 0 ++ " in" ++ (paramListToString' $ Seq.drop 1 pars)
"not_in" -> Seq.index pars 0 ++ " not in" ++ (paramListToString' $ Seq.drop 1 pars)
"like" -> Seq.index pars 0 ++ " like " ++ (Seq.index pars 1) ++ getEscapeStr pars
"not_like" -> Seq.index pars 0 ++ " not like " ++ (Seq.index pars 1) ++ getEscapeStr pars
"regrex" -> Seq.index pars 0 ++ " regrex " ++ (Seq.index pars 1)
"not_regrex" -> Seq.index pars 0 ++ " not_regrex " ++ (Seq.index pars 1)
"cast" -> "cast(" ++ Seq.index pars 0 ++ " AS " ++ (Seq.index pars 1 >>= removeAps)
_ -> case len of
2 -> if P.length nam < 3 || (nam == "and" || nam == "or")
then "(" ++ Seq.index pars 0 ++ " " ++ nam ++ " " ++ Seq.index pars 1 ++ ")"
else nam ++ (paramListToString' pars)
1 -> nam ++ Seq.index pars 0
0 -> nam
_ -> nam ++ (paramListToString' pars)
where nam = PBH.uToString name
pars = fmap exprToString param
getEscapeStr xs = if Seq.length xs == 3 then " ESCAPE " ++ (Seq.index xs 2) else ""
len = Seq.length pars
objectToString :: PO.Object -> String
objectToString PO.Object{..} =
L.intercalate "," $ F.toList $ fmap (\x -> "'" ++ quoteJsonKey (PBH.uToString $ POF.key x) ++ "':" ++ exprToString (POF.value x)) fld
-- replaceAll("'", "\\\\'");
quoteJsonKey :: String -> String
quoteJsonKey jken = jken >>= (\x -> if x == '\'' then "\\\'" else [x])
|
naoto-ogawa/h-xproto-mysql
|
src/DataBase/MySQLX/Model.hs
|
mit
| 77,817 | 0 | 19 | 23,307 | 16,152 | 8,954 | 7,198 | 1,076 | 15 |
{-# LANGUAGE OverloadedStrings #-}
module Edabo.MPD where
import Data.Either (partitionEithers)
import Data.Maybe (mapMaybe)
import Data.String (fromString)
import Data.UUID (UUID)
import qualified Data.UUID (toString)
import qualified Data.UUID as UUID
import Edabo.CmdLine.Types (CommandResult (..))
import Edabo.Types (Track (..), makeTrack)
import Network.MPD (Metadata (MUSICBRAINZ_ALBUMID, MUSICBRAINZ_TRACKID, MUSICBRAINZ_RELEASETRACKID),
Response, Song (..), add,
clear, currentSong, find,
sgGetTag, toString, withMPD,
(=?))
import Network.MPD.Commands.Extensions (getPlaylist)
import Safe (headMay)
-- | Clears the current playlist.
clearMPDPlaylist :: IO (Response ())
clearMPDPlaylist = withMPD clear
-- | Returns a list of songs in the current playlist.
getMPDPlaylist :: IO (Response [Song])
getMPDPlaylist = withMPD getPlaylist
-- | Tries to convert from 'Song' to 'Track'.
getTrackFromSong :: Song
-> Either CommandResult Track
getTrackFromSong song@(Song {sgIndex = Just _}) =
let recordingid = sgGetTag MUSICBRAINZ_TRACKID song
releaseid = sgGetTag MUSICBRAINZ_ALBUMID song
releasetrackid = sgGetTag MUSICBRAINZ_RELEASETRACKID song
in case recordingid of
Just trackids -> Right $ makeTrack tid rlid rltid
where tid = toString $ head trackids
rlid = buildOptional releaseid
rltid = buildOptional releasetrackid
buildOptional value =
case value of
Nothing -> Nothing
Just [] -> Nothing
Just v -> Just $ toString $ head v
Nothing -> Left $ MissingMetadata [MUSICBRAINZ_TRACKID] song
getTrackFromSong song@Song {sgIndex = Nothing} =
Left $ InvalidInfo "The song has no position in the playlist" song
-- | Gets the current 'Song', converted to a 'Track'.
getCurrentTrack :: IO (Either CommandResult Track)
getCurrentTrack = do
response <- withMPD currentSong
case response of
(Left e) -> return $ Left $ MPDFailure e
(Right maybeSong) -> case maybeSong of
Nothing -> return $ Left NoCurrentSong
(Just song) -> return $ getTrackFromSong song
-- | Tries to convert multiple 'Song's to 'Track's.
getTracksFromSongs :: [Song]
-> [Either CommandResult Track]
getTracksFromSongs = map getTrackFromSong
-- | Gets the 'Tracks' that are currently in the playlist.
getTracksFromPlaylist :: IO (Either CommandResult [Track])
getTracksFromPlaylist = do
playlist <- getMPDPlaylist
let res = case playlist of
Left e -> Left $ MPDFailure e
Right songs -> trackListOrError $ getTracksFromSongs songs
return res
where trackListOrError :: [Either CommandResult Track]
-> Either CommandResult [Track]
trackListOrError tl = case partitionEithers tl of
(lefts@(_:_), _) -> Left $ MultipleResults lefts
(_, rights) -> Right rights
-- | Loads a list of 'Track's into the current playlist.
loadMPDPlaylist :: [Track] -- ^ The list of tracks
-> Metadata -- ^ The 'Metadata' used to look up each track in
-- MPDs database
-> (Track -> Maybe UUID.UUID) -- ^ The function used to get a
-- 'UUID.UUID' value corresponding
-- to 'Metadata'
-> [IO (Response ())]
loadMPDPlaylist pltracks meta uuidgetter = map loadsong $ mapMaybe uuidgetter pltracks
where loadsong :: UUID -> IO (Response ())
loadsong uuid = do
withMPD $ find $ meta =? fromString (Data.UUID.toString uuid)
>>= either (return . Left) addFirst
addFirst songs = case headMay songs of
Just s -> withMPD $ add $ sgFilePath s
Nothing -> return $ return ()
|
mineo/edabo
|
src/Edabo/MPD.hs
|
mit
| 4,942 | 0 | 16 | 2,120 | 958 | 507 | 451 | 75 | 4 |
{-# LANGUAGE
RecordWildCards
#-}
{-|
Module : HSat.Make.Instances.Common.Literal
Description : Generic functions to create Literal's
Copyright : (c) Andrew Burnett 2014-2015
Maintainer : [email protected]
Stability : experimental
Portability : Unknown
Exports a main data type 'LiteralSet' that houses an environment that is used to
create randomly generated 'Literal's with certain criteria - for example, to make
sure that no 'Literal' is created with the same 'Variable' until the context is reset
-}
module HSat.Make.Instances.Common.Literal (
-- * LiteralSet
LiteralSet(..) ,
mkLiteralSet , -- :: (MonadRandom m) => Word -> Bool -> m LiteralSet
reset , -- :: (MonadRandom m) => LiteralMake m ()
getTrueLiteral , -- :: (MonadRandom m) => LiteralMake m Literal
getRandomLiteral , -- :: (MonadRandom m) => LiteralMake m Literal
-- * Type Synonyms
LiteralMake ,
-- * Errors
LiteralMakeError(..),
) where
import Prelude hiding (lookup)
import Control.Monad.Catch
import Control.Monad.Random
import Control.Monad.State
import Data.Set (Set)
import qualified Data.Set as S
import HSat.Problem.Instances.Common
import HSat.Solution.Instances.CNF
{-|
Provides a context that allows 'Literal's to be created randomly relative to certain
criteria
-}
data LiteralSet = LiteralSet {
-- | The 'Set' of 'Variable's that are able to appear. If, for example, no 'Variable'
-- can appear more than once in a context, it is removed from this set
getVarsThatCanAppear :: Set Variable ,
-- | The assignment randomly generated for problem's that must evaluate to 'True'
getTrueSet :: BoolSolution,
-- | The number of 'Literal's generated in this context that evaluate to 'True'
getHasGeneratedTrue :: Word ,
-- | The maximum 'Variable' in the context of contexts. This is used when the context is reset
getMaximumVariable :: Word ,
-- | Denotes whether a 'Variable' can appear twice before the context is reset
getVarsAppearTwice :: Bool
} deriving (Eq,Show)
{-|
Given a maximum 'Variable' and a 'Bool' denoting whether a 'Variable' can appear twice
in a context, a 'LiteralSet' is created within a 'MonadRandom' context
-}
mkLiteralSet :: (MonadRandom m) => Word -> Bool -> m LiteralSet
mkLiteralSet maxVar vAppearTwice = do
trueSet <- mkTrueSet maxVar
return $ LiteralSet vars trueSet 0 maxVar vAppearTwice
where
vars = S.fromList varList
varList = if maxVar==0 then [] else map mkVariable [1..maxVar]
{-|
When a 'LiteralSet' is generating 'Literal's we allow 'LiteralMakeError's to be
thrown in case of undefined behaviour.
-}
data LiteralMakeError =
-- | Thrown when a mapping index cannot be found
CannotFindMapping |
-- | Thrown when there are no more 'Variable's allowed to be chosen within a context, though a request for one has been made
NoVariables
deriving (Eq,Show)
instance Exception LiteralMakeError
{-|
A context called 'LiteralMake' that allows for smaller type signatures
-}
type LiteralMake monad result =
StateT LiteralSet monad result
{-|
Reset's the context within the 'LiteralMake'.
Specifically, puts all 'Variable's back in the 'Variable Set', and resets the number
of Variable's that have evaluated to True back to 0
-}
reset :: (MonadRandom m) => LiteralMake m ()
reset =
modify reset'
where
reset' :: LiteralSet -> LiteralSet
reset' ls@LiteralSet{..} =
ls {
getVarsThatCanAppear = fullSet getMaximumVariable,
getHasGeneratedTrue = 0
}
{-
Creates a full set of Variables from the LiteralSet
-}
fullSet :: Word -> S.Set Variable
fullSet 0 = S.empty
fullSet maxVar = S.fromList $ map mkVariable [1..maxVar]
{-
Randomly generate a Variable with respect to a context
-}
makeVariable :: (MonadRandom m, MonadThrow m) => LiteralMake m Variable
makeVariable = do
vars <- gets getVarsThatCanAppear
case S.size vars of
0 -> throwM NoVariables
n -> do
vAppearTwice <- gets getVarsAppearTwice
index <- getRandomR (0,n-1)
let var = S.elemAt index vars
unless vAppearTwice $ modify $ removeVariable var
return var
{-
Removes the Variable from the LiteralSet's Variable Set and returns the new LiteralSet
-}
removeVariable :: Variable -> LiteralSet -> LiteralSet
removeVariable v ls@LiteralSet{..} =
ls {
getVarsThatCanAppear = S.delete v getVarsThatCanAppear
}
{-|
Returns a 'Literal' that will evaluate to 'True' within the 'LiteralSet's solution
-}
getTrueLiteral :: (MonadRandom m, MonadThrow m) => LiteralMake m Literal
getTrueLiteral = do
var <- makeVariable
mapping <- gets getTrueSet
case lookup var mapping of
Nothing -> throwM CannotFindMapping
Just sign -> do
modify changeTrueLiteralCreated
return $ mkLiteral sign var
{-
An update function for the LiteralSet data type.
Increases the number of generatedTrue
-}
changeTrueLiteralCreated :: LiteralSet -> LiteralSet
changeTrueLiteralCreated ls@LiteralSet{..} =
ls {
getHasGeneratedTrue = getHasGeneratedTrue + 1
}
{-|
Generates a random literal within the 'LiteralMake' context.
-}
getRandomLiteral :: (MonadRandom m, MonadThrow m) => LiteralMake m Literal
getRandomLiteral = do
var <- makeVariable
bool <- getRandom
let sign = mkSign bool
lit = mkLiteral sign var
mapping <- gets getTrueSet
case lookup var mapping of
Just sign' -> when (sign==sign') (modify changeTrueLiteralCreated) >> return lit
Nothing -> throwM CannotFindMapping
|
aburnett88/HSat
|
src/HSat/Make/Instances/Common/Literal.hs
|
mit
| 5,753 | 0 | 16 | 1,333 | 897 | 481 | 416 | 87 | 2 |
module NetHack.Monad.NHAction
(runNHAction, answer, NHAction(), update, getTerminalM, get,
putInventoryM, putInventoryNeedsUpdateM, getElementM, putElementM,
getMessagesM, control, nextRunningIDM,
getLevelM, bailout, getCoordsM, forbidMovementFromM,
putLevelM, putCurrentLevelM,
maybeMarkAsOpenDoorM,
goingDownstairs, getLevelTransitionM, resetLevelTransitionM,
Answerable())
where
import NetHack.Data.LevelTransition
import NetHack.Data.Level
import qualified NetHack.Data.NetHackState as NS
import NetHack.Data.Messages
import NetHack.Data.Item(Item)
import qualified Terminal.Data as T
import qualified Terminal.Terminal as T
import Communication.RWChan
import qualified Data.Map as M
import Control.Concurrent.STM
import Control.Monad.State
import qualified Data.ByteString.Char8 as B
newtype NHAction a = NHAction (StateT NS.NetHackState IO a)
deriving (MonadState NS.NetHackState, MonadIO, Functor)
instance Monad NHAction where
return x = NHAction $ return x
(NHAction m) >>= b = NHAction $ m >>= (\value -> let x = b value
in unwrap x)
where
unwrap (NHAction x) = x
runNHAction :: NS.NetHackState -> NHAction a -> IO a
runNHAction ns (NHAction st) = do
(result, _) <- runStateT st ns
return result
getTerminalM :: NHAction T.Terminal
getTerminalM = do ns <- get; return $ NS.terminal ns
getLevelM :: NHAction Level
getLevelM = do ns <- get; return $ NS.currentLevel ns
putLevelM :: Level -> NHAction ()
putLevelM l = do ns <- get; put $ NS.setLevel ns l
putCurrentLevelM :: Level -> NHAction ()
putCurrentLevelM l = do ns <- get; put $ NS.setCurrentLevel ns l
getMessagesM :: NHAction [String]
getMessagesM = do ns <- get; return $ NS.messages ns
getCoordsM :: NHAction (Int, Int)
getCoordsM = do t <- getTerminalM; return $ T.coords t
getElementM :: (Int, Int) -> NHAction Element
getElementM coords = do level <- getLevelM
return $ elemAtDefault level coords
putElementM :: Element -> (Int, Int) -> NHAction ()
putElementM elem coords = do level <- getLevelM
let elems = elements level
putLevelM $ setElements level $
M.insert coords elem elems
putInventoryM :: M.Map Char [Item] -> NHAction ()
putInventoryM i = do ns <- get; put $ NS.setInventory ns i
putInventoryNeedsUpdateM :: Bool -> NHAction ()
putInventoryNeedsUpdateM b =
do ns <- get; put $ NS.setInventoryNeedsUpdate ns b
nextRunningIDM :: NHAction Int
nextRunningIDM = do
ns <- get
let (id, newNs) = NS.nextRunningID ns
put newNs
return id
getLevelTransitionM :: NHAction (Maybe LevelTransition)
getLevelTransitionM = do ns <- get; return $ NS.levelTransition ns
resetLevelTransitionM :: NHAction ()
resetLevelTransitionM = do ns <- get; put $ NS.resetLevelTransition ns
goingDownstairs :: NHAction ()
goingDownstairs = do
l <- getLevelM
coords <- getCoordsM
ns <- get
let dLevel = NS.dungeonLevel ns
put $ NS.setLevelTransition ns $ goingDownstairsTransition dLevel coords
maybeMarkAsOpenDoorM :: Coords -> NHAction ()
maybeMarkAsOpenDoorM coords =
do t <- getTerminalM
level <- getLevelM
putLevelM $ maybeMarkAsOpenDoor level t coords
forbidMovementFromM :: (Int, Int) -> (Int, Int) -> NHAction ()
forbidMovementFromM from to =
do level <- getLevelM
putLevelM $ forbidMovementFrom level from to
bailout = error
update :: NHAction ()
update = do
oldState <- get
newState <- liftIO $ atomically $ NS.update oldState
liftIO $ T.printOut $ NS.terminal newState
liftIO $ putStrLn $ show $ NS.levelTransition newState
put newState
class Answerable a where
answer :: a -> NHAction ()
instance Answerable Char where
answer ch = do
oldState <- get
liftIO $ atomically $ NS.write oldState $ B.pack [ch]
update
instance Answerable [Char] where
answer str = do
oldState <- get
liftIO $ atomically $ NS.write oldState $ B.pack str
update
instance Answerable B.ByteString where
answer str = do
oldState <- get
liftIO $ atomically $ NS.write oldState $ str
update
control :: Char -> String
control 'A' = "\x01"
control 'B' = "\x02"
control 'C' = "\x03"
control 'D' = "\x04"
control 'E' = "\x05"
control 'F' = "\x06"
control 'G' = "\x07"
control 'H' = "\x08"
control 'I' = "\x09"
control 'J' = "\x0a"
control 'K' = "\x0b"
control 'L' = "\x0c"
control 'M' = "\x0d"
control 'N' = "\x0e"
control 'O' = "\x0f"
control 'P' = "\x10"
control 'Q' = "\x11"
control 'R' = "\x12"
control 'S' = "\x13"
control 'T' = "\x14"
control 'U' = "\x15"
control 'V' = "\x16"
control 'W' = "\x17"
control 'X' = "\x18"
control 'Y' = "\x19"
control 'Z' = "\x1a"
control '[' = "\x1b"
control '\\' = "\x1c"
control '^' = "\x1e"
control '_' = "\x1f"
control _ =
error $ "Invalid control character requested (" ++ show control ++ ")"
|
Noeda/Megaman
|
src/NetHack/Monad/NHAction.hs
|
mit
| 4,960 | 0 | 13 | 1,090 | 1,666 | 839 | 827 | 145 | 1 |
module Pregame.GhcPrim
( module X
) where
import GHC.Types as X
( Bool(False, True)
, Char
, Int
, Word
, Float
, Double
, Ordering(LT, GT, EQ)
, IO
)
|
jxv/pregame
|
src/Pregame/GhcPrim.hs
|
mit
| 174 | 0 | 6 | 54 | 59 | 42 | 17 | 11 | 0 |
-- HMM from Anglican (https://bitbucket.org/probprog/anglican-white-paper)
{-# LANGUAGE
FlexibleContexts,
TypeFamilies
#-}
module HMM (
values,
hmm,
syntheticData
) where
--Hidden Markov Models
import Control.Monad (replicateM)
import Data.Vector (fromList)
import Control.Monad.Bayes.Class
-- | Observed values
values :: [Double]
values = [0.9,0.8,0.7,0,-0.025,-5,-2,-0.1,0,
0.13,0.45,6,0.2,0.3,-1,-1]
-- | The transition model.
trans :: MonadSample m => Int -> m Int
trans 0 = categorical $ fromList [0.1, 0.4, 0.5]
trans 1 = categorical $ fromList [0.2, 0.6, 0.2]
trans 2 = categorical $ fromList [0.15,0.7,0.15]
-- | The emission model.
emissionMean :: Int -> Double
emissionMean x = mean x where
mean 0 = -1
mean 1 = 1
mean 2 = 0
-- | Initial state distribution
start :: MonadSample m => m Int
start = uniformD [0,1,2]
-- | Example HMM from http://dl.acm.org/citation.cfm?id=2804317
hmm :: (MonadInfer m) => [Double] -> m [Int]
hmm dataset = f dataset (const . return) where
expand x y = do
x' <- trans x
factor $ normalPdf (emissionMean x') 1 y
return x'
f [] k = start >>= k []
f (y:ys) k = f ys (\xs x -> expand x y >>= k (x:xs))
syntheticData :: MonadSample m => Int -> m [Double]
syntheticData n = replicateM n syntheticPoint where
syntheticPoint = uniformD [0,1,2]
|
adscib/monad-bayes
|
models/HMM.hs
|
mit
| 1,336 | 0 | 13 | 268 | 515 | 280 | 235 | 35 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iotanalytics-dataset-filter.html
module Stratosphere.ResourceProperties.IoTAnalyticsDatasetFilter where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.IoTAnalyticsDatasetDeltaTime
-- | Full data type definition for IoTAnalyticsDatasetFilter. See
-- 'ioTAnalyticsDatasetFilter' for a more convenient constructor.
data IoTAnalyticsDatasetFilter =
IoTAnalyticsDatasetFilter
{ _ioTAnalyticsDatasetFilterDeltaTime :: Maybe IoTAnalyticsDatasetDeltaTime
} deriving (Show, Eq)
instance ToJSON IoTAnalyticsDatasetFilter where
toJSON IoTAnalyticsDatasetFilter{..} =
object $
catMaybes
[ fmap (("DeltaTime",) . toJSON) _ioTAnalyticsDatasetFilterDeltaTime
]
-- | Constructor for 'IoTAnalyticsDatasetFilter' containing required fields as
-- arguments.
ioTAnalyticsDatasetFilter
:: IoTAnalyticsDatasetFilter
ioTAnalyticsDatasetFilter =
IoTAnalyticsDatasetFilter
{ _ioTAnalyticsDatasetFilterDeltaTime = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iotanalytics-dataset-filter.html#cfn-iotanalytics-dataset-filter-deltatime
itadfDeltaTime :: Lens' IoTAnalyticsDatasetFilter (Maybe IoTAnalyticsDatasetDeltaTime)
itadfDeltaTime = lens _ioTAnalyticsDatasetFilterDeltaTime (\s a -> s { _ioTAnalyticsDatasetFilterDeltaTime = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/IoTAnalyticsDatasetFilter.hs
|
mit
| 1,533 | 0 | 12 | 156 | 167 | 98 | 69 | 23 | 1 |
module Control.Schule.DB where
import Control.SQL
import Control.Types
import Control.Schule.Typ
import qualified Control.Student.Type as CST
import qualified Control.Student.DB
import Autolib.Multilingual ( Language (..))
import Prelude hiding ( all )
-- | get alle Schulen aus DB
-- TODO: implementiere filter
get :: IO [ Schule ]
get = get_from_where ( map reed [ "schule" ] ) $ ands []
get_unr :: UNr -> IO [ Schule ]
get_unr u = get_from_where
( map reed [ "schule" ] )
( equals ( reed "schule.UNr" ) ( toEx u ) )
get_from_where f w = do
conn <- myconnect
stat <- squery conn $ Query qq
[ From f , Where w ]
res <- common stat
disconnect conn
return res
qq = Select
$ map reed [ "schule.UNr as UNr"
, "schule.Name as Name"
, "schule.Mail_Suffix as Mail_Suffix"
, "schule.Use_Shibboleth as Use_Shibboleth"
, "schule.Preferred_Language as Preferred_Language"
]
common = collectRows $ \ state -> do
g_unr <- getFieldValue state "UNr"
g_name <- getFieldValue state "Name"
g_mail_suffix <- getFieldValue state "Mail_Suffix"
g_use_shibboleth <- getFieldValue state "Use_Shibboleth"
g_preferred_language <- getFieldValue state "Preferred_Language"
return $ Schule { unr = g_unr
, name = g_name
, mail_suffix = g_mail_suffix
, use_shibboleth = 0 /= ( g_use_shibboleth :: Int )
, preferred_language = g_preferred_language
}
-- | put into table:
put :: Maybe UNr
-> Schule
-> IO ()
put munr vor = do
conn <- myconnect
let common = [ ( reed "Name", toEx $ name vor )
, ( reed "Mail_Suffix", toEx $ mail_suffix vor )
, ( reed "Use_Shibboleth", toEx $ use_shibboleth vor )
, ( reed "Preferred_Language", toEx $ preferred_language vor )
]
case munr of
Nothing -> squery conn $ Query
( Insert (reed "schule") common )
[ ]
Just unr -> squery conn $ Query
( Update [] (reed "schule") common )
[ Where $ equals ( reed "schule.UNr" ) ( toEx unr ) ]
disconnect conn
-- | delete
delete :: UNr
-> IO ()
delete unr = do
conn <- myconnect
squery conn $ Query
( Delete ( reed "schule" ) )
[ Where $ equals ( reed "schule.UNr" ) ( toEx unr ) ]
disconnect conn
|
marcellussiegburg/autotool
|
db/src/Control/Schule/DB.hs
|
gpl-2.0
| 2,425 | 15 | 16 | 744 | 701 | 364 | 337 | 63 | 2 |
module UserError(assert) where {
assert :: String -> Bool -> a -> a;
assert s x y = if x then y else error $ "UserError.assert failed: " ++ s;
-- assert _ _ y = y;
}
|
kenta2/yescrypt
|
UserError.hs
|
gpl-3.0
| 166 | 0 | 7 | 38 | 59 | 35 | 24 | 3 | 2 |
module Shiny.Hardware.Dummy (mkDummyHardware) where
import Shiny.Shiny
import Shiny.Hardware
import Data.IORef
import Data.Time (getCurrentTime)
mkDummyHardware :: Int -> IO (Hardware)
mkDummyHardware size = do
ref <- newIORef (emptyDisplay size)
return $ Hardware {
readDisplay = read ref,
updateDisplay = writeIORef ref,
displaySize = return size,
resetDisplay = return ()
}
where read ref = do
disp <- readIORef ref
time <- getCurrentTime
putStrLn $ "Display at time: " ++ show time ++ "\n" ++ showDisplay disp
return disp
|
dhrosa/shiny
|
Shiny/Hardware/Dummy.hs
|
gpl-3.0
| 593 | 0 | 12 | 144 | 181 | 92 | 89 | 18 | 1 |
-- Copyright (c) 2011 - All rights reserved - Keera Studios
module Physics.Shapes where
import Data
-- * Shapes
type AABB = (Pos2D, Size2D) -- We consider these half the size
type Circle = (Pos2D, Double)
|
keera-studios/pang-a-lambda
|
src/Physics/Shapes.hs
|
gpl-3.0
| 209 | 0 | 5 | 40 | 36 | 25 | 11 | 4 | 0 |
-- | Helper functions to generate values of primitives.
-- These are copied from Test.QuickCheck.Arbitrary and adjusted to use MonadGen
module Sara.TestUtils.ArbitraryUtils where
import Sara.TestUtils.GenT
import Data.Ratio ( (%) )
-- | Generates a boolean.
arbitraryBool :: MonadGen g => g Bool
arbitraryBool = choose (False, True)
-- | Generates a natural number. The number's maximum value depends on
-- the size parameter.
arbitrarySizedNatural :: (Integral a, MonadGen g) => g a
arbitrarySizedNatural =
sized $ \n ->
inBounds fromInteger (choose (0, toInteger n))
-- | Generates an integral number. The number can be positive or negative
-- and its maximum absolute value depends on the size parameter.
arbitrarySizedIntegral :: (Integral a, MonadGen g) => g a
arbitrarySizedIntegral =
sized $ \n ->
inBounds fromInteger (choose (-toInteger n, toInteger n))
inBounds :: (Integral a, MonadGen g) => (Integer -> a) -> g Integer -> g a
inBounds fi g = fmap fi (g `suchThat` (\x -> toInteger (fi x) == x))
-- | Generates a fractional number. The number can be positive or negative
-- and its maximum absolute value depends on the size parameter.
arbitrarySizedFractional :: (Fractional a, MonadGen g) => g a
arbitrarySizedFractional =
sized $ \n ->
let n' = toInteger n in
do a <- choose ((-n') * precision, n' * precision)
b <- choose (1, precision)
return (fromRational (a % b))
where
precision = 9999999999999 :: Integer
|
Lykos/Sara
|
tests/Sara/TestUtils/ArbitraryUtils.hs
|
gpl-3.0
| 1,472 | 1 | 16 | 279 | 393 | 212 | 181 | 23 | 1 |
module Main (
run,
main
) where
import Scanner
import Parser
import AST
import Semantics
-- |Runs a piece of code
run :: String -> AST
run code =
let
(ast, _) = parse . scan $ code
in
reduce ast
main = do
code <- getLine
if code /= "exit" then do
print $ run code
main
else
putStrLn "Bye!"
|
marco-zanella/minilambda
|
src/Interpreter.hs
|
gpl-3.0
| 329 | 0 | 11 | 100 | 114 | 60 | 54 | 18 | 2 |
module FizzBuzz where
fizzbuzz :: Int -> String
fizzbuzz n
| n `mod` 3 == 0 && n `mod`5 == 0 = "fizzbuzz!"
| n `mod` 3 == 0 = "fizz!"
| n `mod` 5 == 0 = "buzz!"
| otherwise = number n ++ "!"
lessThan20 :: Int -> String
lessThan20 n | n >0 && n < 20 = answers !! (n-1)
where
answers =
words ("one two three four five six seven eight nine ten "
++ "eleven twelve thirteen fourteen fifteen sixteen "
++ "seventeen eighteen nineteen")
tens :: Int -> String
tens n
| n >= 2 && n <= 9 =
answers !! (n-2)
where
answers = words "twenty thirty forty fifty sixty seventy eighty ninety"
number :: Int -> String
number n
| 1 <= n && n < 20 = lessThan20 n
| n `mod` 10 == 0 && n < 100 = tens (n `div` 10)
| n < 100 = tens (n `div` 10) ++ " " ++ lessThan20 (n `mod` 10)
| n == 100 = "one hundred"
|
EduPH/Problemas
|
FizzBuzz/src/FizzBuzz.hs
|
gpl-3.0
| 1,027 | 0 | 12 | 423 | 363 | 186 | 177 | 24 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Config ( parseConfig
) where
import Data.ConfigFile ( readfile
, get
, emptyCP
, CPError(..)
, CPErrorData(..) )
import Control.Monad.IO.Class ( liftIO )
import Control.Monad.Except ( runExceptT )
import Control.Monad.Catch ( throwM
, MonadThrow )
import Control.Monad ( join )
import qualified Exception.Handler as E
import DB.Types ( DBInfo(..)
, PostgresAuth(..)
, DBConn(..))
-- |Attempt to parse the configuration file located in conf/sparkive.conf.
parseConfig' :: IO (Either CPError DBInfo)
parseConfig' = runExceptT $ do
cp <- join . liftIO $ readfile emptyCP "conf/sparkive.conf"
backend <- get cp "Database" "backend"
if backend == "postgres"
then do
host <- get cp "Postgres" "host"
user <- get cp "Postgres" "user"
pass <- get cp "Postgres" "pass"
port <- get cp "Postgres" "port"
dbname <- get cp "Postgres" "db_name"
return . PostgresInfo $ PostgresAuth host user pass port dbname
else if backend == "acid-state"
then do
acidPath <- get cp "Acid-State" "dir"
return $ AcidStateInfo acidPath
else liftIO . throwM $ E.ConfigParseException wrongDBType
where wrongDBType = "Database type in sparkive.conf must be either \"acid-state\" or \"postgres\""
-- |Parse the configuration file as in 'parseConfig\'', but simply throw an exception
-- if an error occurs.
parseConfig :: IO DBInfo
parseConfig = do
pc <- parseConfig'
case pc of
Left cperr -> throwM (E.ConfigParseException $ prettyPrintErr cperr)
Right x -> return x
-- |Return a string describing the given 'CPError' in a more user-friendly way.
prettyPrintErr :: CPError -> String
prettyPrintErr (errDat,errStr) =
case errDat of
ParseError str -> "The " ++ confFile ++ " appears to be malformed. Here's the parse error: " ++ str
NoSection str -> "The section \"" ++ str ++ "\" does not exist in the " ++ confFile ++ "."
NoOption str -> "The option \"" ++ str ++ "\" does not exist in the " ++ confFile ++ "."
OtherProblem str -> "There was an error processing the " ++ confFile ++ ". Here's the error: " ++ str
_ -> error "Could not process configuration file."
where confFile = "configuration file for this Sparkive installation (sparkive.conf)"
|
MortimerMcMire315/sparkive
|
src/Config.hs
|
gpl-3.0
| 2,604 | 0 | 13 | 792 | 547 | 285 | 262 | 49 | 5 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Worlds.SpaceFillingTunnel
( spaceFillingTunnel
) where
import Data.Foldable
import qualified Data.Set as Set
import Control.Monad.Random
import Linear hiding (translation)
import Constraints.Scalar
import Constraints.Vector
import Worlds.RandomColorBox
import Transformation
import SceneTO
import Worlds.SpaceFillingPath
spaceFillingTunnel :: forall m v a.
(MonadRandom m, SomeVector v, SomeScalar a) =>
Int -> m (SceneTO v a, Transformation v a)
spaceFillingTunnel iteration = (, translation (pure (-1))) <$> scene
where
scene = randomColorBoxTunnel (2^(iteration+1))
((`Set.notMember` pathSet) . toList)
pathSet :: Set.Set [Int]
pathSet = Set.fromList path
path = map (map (+1)) $ stretchedSpaceFillingPath dim iteration
dim = length (pure 0 :: v Int)
|
MatthiasHu/4d-labyrinth
|
src/Worlds/SpaceFillingTunnel.hs
|
gpl-3.0
| 870 | 0 | 11 | 145 | 264 | 150 | 114 | 24 | 1 |
{-# LANGUAGE Arrows #-}
module Main where
import System.IO
import Data.Maybe
import Control.Monad.Random
import Control.Monad.Reader
import Control.Monad.State
import Data.MonadicStreamFunction
import SIR
type Time = Double
type DTime = Double
type EnvironmentFold e = [e] -> e -> e
type AgentMSF g s m e = MSF (ReaderT DTime (StateT (AgentOut s m) (Rand g))) (AgentIn m, e) e
type SIREnv = [AgentId]
type SIRAgentMSF g = AgentMSF g SIRState SIRMsg SIREnv
contactRate :: Double
contactRate = 5.0
infectionProb :: Double
infectionProb = 0.05
illnessDuration :: Double
illnessDuration = 15.0
agentCount :: Int
agentCount = 1000
infectedCount :: Int
infectedCount = 10
rngSeed :: Int
rngSeed = 42
dt :: DTime
dt = 0.01
t :: Time
t = 150
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
let g = mkStdGen rngSeed
let as = initAgents agentCount infectedCount
let ass = runSimulationUntil g t dt as
let dyns = aggregateAllStates ass
let fileName = "SIR_DUNAI_AGENTIOSTATE_DYNAMICS_" ++ show agentCount ++ "agents.m"
writeAggregatesToFile fileName dyns
runSimulationUntil :: RandomGen g => g
-> Time
-> DTime
-> [SIRState]
-> [[SIRState]]
runSimulationUntil g t dt as = map (\(aos, _) -> map (fromJust. agentObservable) aos) aoss
where
steps = floor $ t / dt
ticks = replicate steps ()
msfs = map sirAgent as
n = length as
env = [0..n-1]
ains = map agentIn env
aossM = embed (parSimulation msfs ains env sirEnvFold) ticks
aoss = evalRand (evalStateT (runReaderT aossM dt) agentOut) g
sirEnvFold :: [SIREnv] -> SIREnv -> SIREnv
sirEnvFold _ e = e
sirAgent :: RandomGen g => SIRState -> SIRAgentMSF g
sirAgent Susceptible = susceptibleAgent
sirAgent Infected = infectedAgent
sirAgent Recovered = recoveredAgent
susceptibleAgent :: RandomGen g => SIRAgentMSF g
susceptibleAgent = switch susceptibleAgentInfectedEvent (const infectedAgent)
where
susceptibleAgentInfectedEvent :: RandomGen g =>
MSF
(ReaderT DTime (StateT SIRAgentOut (Rand g)))
(SIRAgentIn, SIREnv)
(SIREnv, Maybe ())
susceptibleAgentInfectedEvent = proc (ain, e) -> do
isInfected <- arrM (\ain' -> do
doInfect <- lift $ lift $ gotInfected infectionProb ain'
if doInfect
then lift $ put (agentOutObs Infected) >> return doInfect
else lift $ put (agentOutObs Susceptible) >> return doInfect) -< ain
infEvt <- boolToMaybe () -< isInfected
_ <- susceptibleAgentInfectedEventAux -< e
returnA -< (e, infEvt)
where
susceptibleAgentInfectedEventAux :: RandomGen g =>
MSF
(ReaderT DTime (StateT SIRAgentOut (Rand g)))
(SIREnv)
()
susceptibleAgentInfectedEventAux = proc e -> do
makeContact <- occasionally (1 / contactRate) () -< ()
if isJust makeContact
then arrM blub -< e
else returnA -< ()
where
blub :: RandomGen g =>
SIREnv
-> ReaderT DTime (StateT SIRAgentOut (Rand g)) ()
blub e = do
randContact <- lift $ lift $ randomElem e
lift $ sendMessageM (randContact, Contact Susceptible)
return ()
boolToMaybe :: Monad m => a -> MSF m Bool (Maybe a)
boolToMaybe a = proc b -> do
if b
then returnA -< Just a
else returnA -< Nothing
infectedAgent :: RandomGen g => SIRAgentMSF g
infectedAgent = switch infectedAgentRecoveredEvent (const recoveredAgent)
where
infectedAgentRecoveredEvent :: RandomGen g =>
MSF
(ReaderT DTime (StateT SIRAgentOut (Rand g)))
(SIRAgentIn, SIREnv)
(SIREnv, Maybe ())
infectedAgentRecoveredEvent = proc (ain, e) -> do
recEvt <- occasionally illnessDuration () -< ()
let a = maybe Infected (const Recovered) recEvt
arrM (\(a, ain) -> do
lift $ put (agentOutObs a)
lift $ respondToContactWithM Infected ain) -< (a, ain)
returnA -< (e, recEvt)
recoveredAgent :: RandomGen g => SIRAgentMSF g
recoveredAgent = proc (_, e) -> do
arrM (\_ -> lift $ put (agentOutObs Recovered)) -< ()
returnA -< e
parSimulation :: RandomGen g =>
[AgentMSF g s m e]
-> [AgentIn m]
-> e
-> EnvironmentFold e
-> MSF (ReaderT DTime (StateT (AgentOut s m) (Rand g)))
()
([AgentOut s m], e)
parSimulation msfs ains e ef = MSF $ \_ -> do
aosMsfs <- mapM (parSimulationAux e) (zip ains msfs)
let aoes = map fst aosMsfs
let msfs' = map snd aosMsfs
let aos = map fst aoes
let es = map snd aoes
let aids = map agentId ains
let ains' = map (\ai -> agentIn $ agentId ai) ains
let ains'' = distributeMessages ains' (zip aids aos)
let e' = ef es e
return ((aos, e'), parSimulation msfs' ains'' e' ef)
where
parSimulationAux :: e
-> (AgentIn m, AgentMSF g s m e)
-> ReaderT DTime (StateT (AgentOut s m) (Rand g)) ((AgentOut s m, e), AgentMSF g s m e)
parSimulationAux e (ain, msf) = do
_ <- lift $ put agentOut -- NOTE: reset state
(e', msf') <- unMSF msf (ain, e)
ao <- lift $ get -- NOTE: get state
return ((ao, e'), msf')
-- NOTE: is in spirit of the Yampa implementation
occasionally :: RandomGen g => Time -> b -> MSF (ReaderT DTime (StateT (AgentOut s m) (Rand g))) a (Maybe b)
occasionally t_avg b
| t_avg > 0 = MSF (const tf)
| otherwise = error "AFRP: occasionally: Non-positive average interval."
where
-- Generally, if events occur with an average frequency of f, the
-- probability of at least one event occurring in an interval of t
-- is given by (1 - exp (-f*t)). The goal in the following is to
-- decide whether at least one event occurred in the interval of size
-- dt preceding the current sample point. For the first point,
-- we can think of the preceding interval as being 0, implying
-- no probability of an event occurring.
tf = do
dt <- ask
r <- lift $ getRandomR (0, 1)
let p = 1 - exp (-(dt / t_avg))
let evt = if r < p
then Just b
else Nothing
return (evt, MSF (const tf))
|
thalerjonathan/phd
|
coding/papers/FrABS/Haskell/prototyping/SIRDunaiAgentIOState/src/Main.hs
|
gpl-3.0
| 6,898 | 5 | 22 | 2,385 | 2,124 | 1,073 | 1,051 | 155 | 3 |
module DL3034 (tests) where
import Helpers
import Test.Hspec
tests :: SpecWith ()
tests = do
let ?rulesConfig = mempty
describe "DL3034 - Non-interactive switch missing from zypper command: `zypper install -y`" $ do
it "not ok without non-interactive switch" $ do
ruleCatches "DL3034" "RUN zypper install httpd=2.4.24 && zypper clean"
onBuildRuleCatches "DL3034" "RUN zypper install httpd=2.4.24 && zypper clean"
it "ok with non-interactive switch present" $ do
ruleCatchesNot "DL3034" "RUN zypper install -y httpd=2.4.24 && zypper clean"
ruleCatchesNot "DL3034" "RUN zypper install --no-confirm httpd=2.4.24 && zypper clean"
onBuildRuleCatchesNot "DL3034" "RUN zypper install -y httpd=2.4.24 && zypper clean"
onBuildRuleCatchesNot "DL3034" "RUN zypper install --no-confirm httpd=2.4.24 && zypper clean"
|
lukasmartinelli/hadolint
|
test/DL3034.hs
|
gpl-3.0
| 855 | 0 | 13 | 158 | 119 | 53 | 66 | -1 | -1 |
-- | Qualification Round 2015 Problem D. Ominous Omino
-- https://code.google.com/codejam/contest/6224486/dashboard#s=p3
module OminousOmino where
-- constant imports
import Text.ParserCombinators.Parsec
import Text.Parsec
import System.IO (openFile, hClose, hGetContents, hPutStrLn, IOMode(ReadMode), stderr)
import Debug.Trace (trace)
-- variable imports
import qualified Data.Set as S
import Data.List (group, sort, sortBy, foldl', inits)
import Data.Char (ord)
import qualified Data.Map as M
-- variable Data
data TestCase = TestCase
Int -- ^ X
Int -- ^ R
Int -- ^ C
deriving (Show, Eq, Ord)
-- variable implementation
solveCase tc@(TestCase x r c) = solve tc
solve tc@(TestCase x r c) =
if x >= 7
|| x == 3 && s == 1
|| x == 4 && s <= 2
|| x == 5 && (s <= 2 || (s, l) == (3, 5))
|| x == 6 && s <= 3
|| rest /= 0
then rich
else gabe
where
(_, rest) = (r * c) `divMod` x
(s, l) = (min r c, max r c)
rich = "RICHARD"
gabe = "GABRIEL"
divise n
| rest == 0 = (d, d)
| rest == 1 = (d + 1, d)
where
(d, rest) = n `divMod` 2
-- Parser (variable part)
parseSingleCase = do
x <- parseInt
char ' '
r <- parseInt
char ' '
c <- parseInt
eol <|> eof
return $ TestCase x r c
eol :: GenParser Char st ()
eol = char '\n' >> return ()
parseIntegral :: Integral a => (String -> a) -> GenParser Char st a
parseIntegral rd = rd <$> (plus <|> minus <|> number)
where
plus = char '+' *> number
minus = (:) <$> char '-' <*> number
number = many1 digit
parseInteger :: GenParser Char st Integer
parseInteger = parseIntegral (read :: String -> Integer)
parseIntegers :: GenParser Char st [Integer]
parseIntegers = parseInteger `sepBy` (char ' ')
parseInt :: GenParser Char st Int
parseInt = parseIntegral (read :: String -> Int)
parseInts :: GenParser Char st [Int]
parseInts = parseInt `sepBy` (char ' ')
--
-- constant part
--
-- Parsing (constant part)
-- | First number is number of test cases
data TestInput = TestInput
Int -- ^ number of 'TestCase's
[TestCase]
deriving (Show, Ord, Eq)
parseTestCases = do
numCases <- parseInt
eol
cases <- count numCases parseSingleCase
return $ TestInput numCases cases
parseCases :: String -> Either ParseError TestInput
parseCases contents = parse parseTestCases "(stdin)" contents
-- main
runOnContent :: String -> IO ()
runOnContent content = do
let parsed = parseCases content
case parsed of
Right (TestInput _ cases) -> mapM_ putStrLn (output (solveCases cases))
Left err -> hPutStrLn stderr $ show err
where
solveCases xs = map solveCase xs
consCase n s = "Case #" ++ (show n) ++ ": " ++ s
output xs = zipWith consCase [1..] xs
-- | command line implementation
run = do
cs <- getContents
runOnContent cs
main = run
|
dirkz/google-code-jam-haskell
|
practice/src/OminousOmino.hs
|
mpl-2.0
| 2,932 | 0 | 23 | 770 | 1,014 | 540 | 474 | 79 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ServiceNetworking.Services.Connections.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List the private connections that are configured in a service
-- consumer\'s VPC network.
--
-- /See:/ <https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started Service Networking API Reference> for @servicenetworking.services.connections.list@.
module Network.Google.Resource.ServiceNetworking.Services.Connections.List
(
-- * REST Resource
ServicesConnectionsListResource
-- * Creating a Request
, servicesConnectionsList
, ServicesConnectionsList
-- * Request Lenses
, sclParent
, sclXgafv
, sclUploadProtocol
, sclAccessToken
, sclUploadType
, sclNetwork
, sclCallback
) where
import Network.Google.Prelude
import Network.Google.ServiceNetworking.Types
-- | A resource alias for @servicenetworking.services.connections.list@ method which the
-- 'ServicesConnectionsList' request conforms to.
type ServicesConnectionsListResource =
"v1" :>
Capture "parent" Text :>
"connections" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "network" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListConnectionsResponse
-- | List the private connections that are configured in a service
-- consumer\'s VPC network.
--
-- /See:/ 'servicesConnectionsList' smart constructor.
data ServicesConnectionsList =
ServicesConnectionsList'
{ _sclParent :: !Text
, _sclXgafv :: !(Maybe Xgafv)
, _sclUploadProtocol :: !(Maybe Text)
, _sclAccessToken :: !(Maybe Text)
, _sclUploadType :: !(Maybe Text)
, _sclNetwork :: !(Maybe Text)
, _sclCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ServicesConnectionsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sclParent'
--
-- * 'sclXgafv'
--
-- * 'sclUploadProtocol'
--
-- * 'sclAccessToken'
--
-- * 'sclUploadType'
--
-- * 'sclNetwork'
--
-- * 'sclCallback'
servicesConnectionsList
:: Text -- ^ 'sclParent'
-> ServicesConnectionsList
servicesConnectionsList pSclParent_ =
ServicesConnectionsList'
{ _sclParent = pSclParent_
, _sclXgafv = Nothing
, _sclUploadProtocol = Nothing
, _sclAccessToken = Nothing
, _sclUploadType = Nothing
, _sclNetwork = Nothing
, _sclCallback = Nothing
}
-- | The service that is managing peering connectivity for a service
-- producer\'s organization. For Google services that support this
-- functionality, this value is
-- \`services\/servicenetworking.googleapis.com\`. If you specify
-- \`services\/-\` as the parameter value, all configured peering services
-- are listed.
sclParent :: Lens' ServicesConnectionsList Text
sclParent
= lens _sclParent (\ s a -> s{_sclParent = a})
-- | V1 error format.
sclXgafv :: Lens' ServicesConnectionsList (Maybe Xgafv)
sclXgafv = lens _sclXgafv (\ s a -> s{_sclXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
sclUploadProtocol :: Lens' ServicesConnectionsList (Maybe Text)
sclUploadProtocol
= lens _sclUploadProtocol
(\ s a -> s{_sclUploadProtocol = a})
-- | OAuth access token.
sclAccessToken :: Lens' ServicesConnectionsList (Maybe Text)
sclAccessToken
= lens _sclAccessToken
(\ s a -> s{_sclAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
sclUploadType :: Lens' ServicesConnectionsList (Maybe Text)
sclUploadType
= lens _sclUploadType
(\ s a -> s{_sclUploadType = a})
-- | The name of service consumer\'s VPC network that\'s connected with
-- service producer network through a private connection. The network name
-- must be in the following format:
-- \`projects\/{project}\/global\/networks\/{network}\`. {project} is a
-- project number, such as in \`12345\` that includes the VPC service
-- consumer\'s VPC network. {network} is the name of the service
-- consumer\'s VPC network.
sclNetwork :: Lens' ServicesConnectionsList (Maybe Text)
sclNetwork
= lens _sclNetwork (\ s a -> s{_sclNetwork = a})
-- | JSONP
sclCallback :: Lens' ServicesConnectionsList (Maybe Text)
sclCallback
= lens _sclCallback (\ s a -> s{_sclCallback = a})
instance GoogleRequest ServicesConnectionsList where
type Rs ServicesConnectionsList =
ListConnectionsResponse
type Scopes ServicesConnectionsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/service.management"]
requestClient ServicesConnectionsList'{..}
= go _sclParent _sclXgafv _sclUploadProtocol
_sclAccessToken
_sclUploadType
_sclNetwork
_sclCallback
(Just AltJSON)
serviceNetworkingService
where go
= buildClient
(Proxy :: Proxy ServicesConnectionsListResource)
mempty
|
brendanhay/gogol
|
gogol-servicenetworking/gen/Network/Google/Resource/ServiceNetworking/Services/Connections/List.hs
|
mpl-2.0
| 5,994 | 0 | 17 | 1,306 | 795 | 468 | 327 | 115 | 1 |
module DifferentialGeometry where
import Vectors
data DifferentialGeometry =
DifferentialGeometry {
dgPoint :: Vec3,
dgNormal :: Vec3,
dgDPDU :: Vec3, dgDPDV :: Vec3 }
deriving Show
|
pstiasny/mgr
|
DifferentialGeometry.hs
|
lgpl-3.0
| 200 | 0 | 8 | 43 | 43 | 28 | 15 | 8 | 0 |
module Main where
import Haste.DOM (withElems, getValue, setProp)
import Haste.Events (onEvent, MouseEvent(Click))
import PLexceptGOTOdotNET (run)
main = withElems ["prog", "result", "run-button"] driver
driver [progElem, resultElem, runButtonElem] =
onEvent runButtonElem Click $ \_ -> do
Just prog <- getValue progElem
setProp resultElem "textContent" $ run prog
|
catseye/PL-GOTO.NET
|
src/HasteMain.hs
|
unlicense
| 389 | 0 | 10 | 69 | 123 | 68 | 55 | 9 | 1 |
module Lol.Champs where
import Prelude
import System.Random
import Lol.Helpers
data Champ = Ahri
| Akali
| Alistar
| Amumu
| Anivia
| Annie
| Ashe
| Blitzcrank
| Brand
| Caitlyn
| Cassiopeia
| ChoGath
| Corki
| DrMundo
| Evelynn
| Ezreal
| Fiddlesticks
| Fizz
| Galio
| Gangplank
| Garen
| Gragas
| Graves
| Heimerdinger
| Irelia
| Janna
| JarvanIV
| Jax
| Karma
| Karthus
| Kassadin
| Katarina
| Kayle
| Kennen
| KogMaw
| LeBlanc
| LeeSin
| Leona
| Lux
| Malphite
| Malzahar
| Maokai
| MasterYi
| MissFortune
| Mordekaiser
| Morgana
| Nasus
| Nidalee
| Nocturne
| Nunu
| Olaf
| Orianna
| Pantheon
| Poppy
| Rammus
| Renekton
| Riven
| Rumble
| Ryze
| Shaco
| Shen
| Shyvana
| Singed
| Sion
| Sivir
| Skarner
| Sona
| Soraka
| Swain
| Talon
| Taric
| Teemo
| Tristana
| Trundle
| Tryndamere
| TwistedFate
| Twitch
| Udyr
| Urgot
| Vayne
| Veigar
| Viktor
| Vladimir
| Volibear
| Warwick
| Wukong
| Xerath
| XinZhao
| Yorick
| Zilean
deriving (Bounded, Enum, Eq, Ord, Show)
instance Random Champ where
randomR = boundedEnumRandomR
random = boundedEnumRandom
type Level = Int
|
MostAwesomeDude/lollerskates
|
Lol/Champs.hs
|
bsd-2-clause
| 2,071 | 0 | 6 | 1,208 | 340 | 220 | 120 | 99 | 0 |
{-# LANGUAGE BangPatterns #-}
import qualified Data.ByteString.Char8 as C
import qualified Data.Foldable as F
import qualified Data.IntMap.Strict as IntMap
import qualified Data.Sequence as Seq
import Data.Sequence(Seq)
import Data.IntMap.Strict(IntMap)
import Data.Maybe
pushBack x q = q Seq.|> x
popFront q = (x, q')
where (!x Seq.:< q') = Seq.viewl q
increaseKey k = snd . IntMap.insertLookupWithKey (\k new old -> succ old) k 1
decreaseKey k = IntMap.updateWithKey (\k v -> if v == 1 then Nothing else Just $! (pred v)) k
data ArraySub = Empty | ArraySub {
_size :: {-# UNPACK #-} !Int
, _queue :: Seq Int
, _heap :: IntMap Int
} deriving Show
size Empty = 0
size (ArraySub k _ _) = k
push x Empty = ArraySub 1 (Seq.singleton x) (IntMap.singleton x 1)
push x (ArraySub k q h) = ArraySub (succ k) (pushBack x q) (increaseKey x h)
pushpop x (ArraySub k q h) = ArraySub k q'' h'
where (!a, !q') = popFront q
!h' = increaseKey x . decreaseKey a $ h
!q'' = pushBack x q'
arrayMax (ArraySub k q h) = fst . IntMap.findMax $ h
pushUntil k xxs a
| k == 0 = (a, xxs)
| otherwise = pushUntil (pred k) (tail xxs) (push (head xxs) a)
arraysub ints k = go ( Seq.singleton (arrayMax arr) ) arr ints'
where (arr, ints') = pushUntil k ints Empty
go r u [] = r
go r u (x:xs) = go (r Seq.|> arrayMax u') u' xs
where u' = pushpop x u
pr :: [Int] -> String
pr = foldr p1 ""
where p1 x = shows x . showString " "
readint = fst . fromJust . C.readInt
process inputs = putStrLn . pr . F.toList $ ans
where (_:ints) = map readint (C.words inputs)
!ans = arraysub (init ints) (last ints)
main = C.getContents >>= process
|
wangbj/haskell
|
arraysub.hs
|
bsd-2-clause
| 1,693 | 0 | 11 | 405 | 784 | 404 | 380 | 43 | 2 |
{-# LANGUAGE RankNTypes, BangPatterns, FlexibleContexts, Strict #-}
{- |
Module : Dominators
Copyright : (c) Matt Morrow 2009
License : BSD3
Maintainer : <[email protected]>
Stability : experimental
Portability : portable
Taken from the dom-lt package.
The Lengauer-Tarjan graph dominators algorithm.
\[1\] Lengauer, Tarjan,
/A Fast Algorithm for Finding Dominators in a Flowgraph/, 1979.
\[2\] Muchnick,
/Advanced Compiler Design and Implementation/, 1997.
\[3\] Brisk, Sarrafzadeh,
/Interference Graphs for Procedures in Static Single/
/Information Form are Interval Graphs/, 2007.
Originally taken from the dom-lt package.
-}
module Dominators (
Node,Path,Edge
,Graph,Rooted
,idom,ipdom
,domTree,pdomTree
,dom,pdom
,pddfs,rpddfs
,fromAdj,fromEdges
,toAdj,toEdges
,asTree,asGraph
,parents,ancestors
) where
import GhcPrelude
import Data.Bifunctor
import Data.Tuple (swap)
import Data.Tree
import Data.IntMap(IntMap)
import Data.IntSet(IntSet)
import qualified Data.IntMap.Strict as IM
import qualified Data.IntSet as IS
import Control.Monad
import Control.Monad.ST.Strict
import Data.Array.ST
import Data.Array.Base hiding ((!))
-- (unsafeNewArray_
-- ,unsafeWrite,unsafeRead
-- ,readArray,writeArray)
import Util (debugIsOn)
-----------------------------------------------------------------------------
type Node = Int
type Path = [Node]
type Edge = (Node,Node)
type Graph = IntMap IntSet
type Rooted = (Node, Graph)
-----------------------------------------------------------------------------
-- | /Dominators/.
-- Complexity as for @idom@
dom :: Rooted -> [(Node, Path)]
dom = ancestors . domTree
-- | /Post-dominators/.
-- Complexity as for @idom@.
pdom :: Rooted -> [(Node, Path)]
pdom = ancestors . pdomTree
-- | /Dominator tree/.
-- Complexity as for @idom@.
domTree :: Rooted -> Tree Node
domTree a@(r,_) =
let is = filter ((/=r).fst) (idom a)
tg = fromEdges (fmap swap is)
in asTree (r,tg)
-- | /Post-dominator tree/.
-- Complexity as for @idom@.
pdomTree :: Rooted -> Tree Node
pdomTree a@(r,_) =
let is = filter ((/=r).fst) (ipdom a)
tg = fromEdges (fmap swap is)
in asTree (r,tg)
-- | /Immediate dominators/.
-- /O(|E|*alpha(|E|,|V|))/, where /alpha(m,n)/ is
-- \"a functional inverse of Ackermann's function\".
--
-- This Complexity bound assumes /O(1)/ indexing. Since we're
-- using @IntMap@, it has an additional /lg |V|/ factor
-- somewhere in there. I'm not sure where.
idom :: Rooted -> [(Node,Node)]
idom rg = runST (evalS idomM =<< initEnv (pruneReach rg))
-- | /Immediate post-dominators/.
-- Complexity as for @idom@.
ipdom :: Rooted -> [(Node,Node)]
ipdom rg = runST (evalS idomM =<< initEnv (pruneReach (second predG rg)))
-----------------------------------------------------------------------------
-- | /Post-dominated depth-first search/.
pddfs :: Rooted -> [Node]
pddfs = reverse . rpddfs
-- | /Reverse post-dominated depth-first search/.
rpddfs :: Rooted -> [Node]
rpddfs = concat . levels . pdomTree
-----------------------------------------------------------------------------
type Dom s a = S s (Env s) a
type NodeSet = IntSet
type NodeMap a = IntMap a
data Env s = Env
{succE :: !Graph
,predE :: !Graph
,bucketE :: !Graph
,dfsE :: {-# UNPACK #-}!Int
,zeroE :: {-# UNPACK #-}!Node
,rootE :: {-# UNPACK #-}!Node
,labelE :: {-# UNPACK #-}!(Arr s Node)
,parentE :: {-# UNPACK #-}!(Arr s Node)
,ancestorE :: {-# UNPACK #-}!(Arr s Node)
,childE :: {-# UNPACK #-}!(Arr s Node)
,ndfsE :: {-# UNPACK #-}!(Arr s Node)
,dfnE :: {-# UNPACK #-}!(Arr s Int)
,sdnoE :: {-# UNPACK #-}!(Arr s Int)
,sizeE :: {-# UNPACK #-}!(Arr s Int)
,domE :: {-# UNPACK #-}!(Arr s Node)
,rnE :: {-# UNPACK #-}!(Arr s Node)}
-----------------------------------------------------------------------------
idomM :: Dom s [(Node,Node)]
idomM = do
dfsDom =<< rootM
n <- gets dfsE
forM_ [n,n-1..1] (\i-> do
w <- ndfsM i
sw <- sdnoM w
ps <- predsM w
forM_ ps (\v-> do
u <- eval v
su <- sdnoM u
when (su < sw)
(store sdnoE w su))
z <- ndfsM =<< sdnoM w
modify(\e->e{bucketE=IM.adjust
(w`IS.insert`)
z (bucketE e)})
pw <- parentM w
link pw w
bps <- bucketM pw
forM_ bps (\v-> do
u <- eval v
su <- sdnoM u
sv <- sdnoM v
let dv = case su < sv of
True-> u
False-> pw
store domE v dv))
forM_ [1..n] (\i-> do
w <- ndfsM i
j <- sdnoM w
z <- ndfsM j
dw <- domM w
when (dw /= z)
(do ddw <- domM dw
store domE w ddw))
fromEnv
-----------------------------------------------------------------------------
eval :: Node -> Dom s Node
eval v = do
n0 <- zeroM
a <- ancestorM v
case a==n0 of
True-> labelM v
False-> do
compress v
a <- ancestorM v
l <- labelM v
la <- labelM a
sl <- sdnoM l
sla <- sdnoM la
case sl <= sla of
True-> return l
False-> return la
compress :: Node -> Dom s ()
compress v = do
n0 <- zeroM
a <- ancestorM v
aa <- ancestorM a
when (aa /= n0) (do
compress a
a <- ancestorM v
aa <- ancestorM a
l <- labelM v
la <- labelM a
sl <- sdnoM l
sla <- sdnoM la
when (sla < sl)
(store labelE v la)
store ancestorE v aa)
-----------------------------------------------------------------------------
link :: Node -> Node -> Dom s ()
link v w = do
n0 <- zeroM
lw <- labelM w
slw <- sdnoM lw
let balance s = do
c <- childM s
lc <- labelM c
slc <- sdnoM lc
case slw < slc of
False-> return s
True-> do
zs <- sizeM s
zc <- sizeM c
cc <- childM c
zcc <- sizeM cc
case 2*zc <= zs+zcc of
True-> do
store ancestorE c s
store childE s cc
balance s
False-> do
store sizeE c zs
store ancestorE s c
balance c
s <- balance w
lw <- labelM w
zw <- sizeM w
store labelE s lw
store sizeE v . (+zw) =<< sizeM v
let follow s = do
when (s /= n0) (do
store ancestorE s v
follow =<< childM s)
zv <- sizeM v
follow =<< case zv < 2*zw of
False-> return s
True-> do
cv <- childM v
store childE v s
return cv
-----------------------------------------------------------------------------
dfsDom :: Node -> Dom s ()
dfsDom i = do
_ <- go i
n0 <- zeroM
r <- rootM
store parentE r n0
where go i = do
n <- nextM
store dfnE i n
store sdnoE i n
store ndfsE n i
store labelE i i
ss <- succsM i
forM_ ss (\j-> do
s <- sdnoM j
case s==0 of
False-> return()
True-> do
store parentE j i
go j)
-----------------------------------------------------------------------------
initEnv :: Rooted -> ST s (Env s)
initEnv (r0,g0) = do
let (g,rnmap) = renum 1 g0
pred = predG g
r = rnmap IM.! r0
n = IM.size g
ns = [0..n]
m = n+1
let bucket = IM.fromList
(zip ns (repeat mempty))
rna <- newI m
writes rna (fmap swap
(IM.toList rnmap))
doms <- newI m
sdno <- newI m
size <- newI m
parent <- newI m
ancestor <- newI m
child <- newI m
label <- newI m
ndfs <- newI m
dfn <- newI m
forM_ [0..n] (doms.=0)
forM_ [0..n] (sdno.=0)
forM_ [1..n] (size.=1)
forM_ [0..n] (ancestor.=0)
forM_ [0..n] (child.=0)
(doms.=r) r
(size.=0) 0
(label.=0) 0
return (Env
{rnE = rna
,dfsE = 0
,zeroE = 0
,rootE = r
,labelE = label
,parentE = parent
,ancestorE = ancestor
,childE = child
,ndfsE = ndfs
,dfnE = dfn
,sdnoE = sdno
,sizeE = size
,succE = g
,predE = pred
,bucketE = bucket
,domE = doms})
fromEnv :: Dom s [(Node,Node)]
fromEnv = do
dom <- gets domE
rn <- gets rnE
-- r <- gets rootE
(_,n) <- st (getBounds dom)
forM [1..n] (\i-> do
j <- st(rn!:i)
d <- st(dom!:i)
k <- st(rn!:d)
return (j,k))
-----------------------------------------------------------------------------
zeroM :: Dom s Node
zeroM = gets zeroE
domM :: Node -> Dom s Node
domM = fetch domE
rootM :: Dom s Node
rootM = gets rootE
succsM :: Node -> Dom s [Node]
succsM i = gets (IS.toList . (! i) . succE)
predsM :: Node -> Dom s [Node]
predsM i = gets (IS.toList . (! i) . predE)
bucketM :: Node -> Dom s [Node]
bucketM i = gets (IS.toList . (! i) . bucketE)
sizeM :: Node -> Dom s Int
sizeM = fetch sizeE
sdnoM :: Node -> Dom s Int
sdnoM = fetch sdnoE
-- dfnM :: Node -> Dom s Int
-- dfnM = fetch dfnE
ndfsM :: Int -> Dom s Node
ndfsM = fetch ndfsE
childM :: Node -> Dom s Node
childM = fetch childE
ancestorM :: Node -> Dom s Node
ancestorM = fetch ancestorE
parentM :: Node -> Dom s Node
parentM = fetch parentE
labelM :: Node -> Dom s Node
labelM = fetch labelE
nextM :: Dom s Int
nextM = do
n <- gets dfsE
let n' = n+1
modify(\e->e{dfsE=n'})
return n'
-----------------------------------------------------------------------------
type A = STUArray
type Arr s a = A s Int a
infixl 9 !:
infixr 2 .=
(.=) :: (MArray (A s) a (ST s))
=> Arr s a -> a -> Int -> ST s ()
(v .= x) i
| debugIsOn = writeArray v i x
| otherwise = unsafeWrite v i x
(!:) :: (MArray (A s) a (ST s))
=> A s Int a -> Int -> ST s a
a !: i
| debugIsOn = do
o <- readArray a i
return $! o
| otherwise = do
o <- unsafeRead a i
return $! o
new :: (MArray (A s) a (ST s))
=> Int -> ST s (Arr s a)
new n = unsafeNewArray_ (0,n-1)
newI :: Int -> ST s (Arr s Int)
newI = new
-- newD :: Int -> ST s (Arr s Double)
-- newD = new
-- dump :: (MArray (A s) a (ST s)) => Arr s a -> ST s [a]
-- dump a = do
-- (m,n) <- getBounds a
-- forM [m..n] (\i -> a!:i)
writes :: (MArray (A s) a (ST s))
=> Arr s a -> [(Int,a)] -> ST s ()
writes a xs = forM_ xs (\(i,x) -> (a.=x) i)
-- arr :: (MArray (A s) a (ST s)) => [a] -> ST s (Arr s a)
-- arr xs = do
-- let n = length xs
-- a <- new n
-- go a n 0 xs
-- return a
-- where go _ _ _ [] = return ()
-- go a n i (x:xs)
-- | i <= n = (a.=x) i >> go a n (i+1) xs
-- | otherwise = return ()
-----------------------------------------------------------------------------
(!) :: Monoid a => IntMap a -> Int -> a
(!) g n = maybe mempty id (IM.lookup n g)
fromAdj :: [(Node, [Node])] -> Graph
fromAdj = IM.fromList . fmap (second IS.fromList)
fromEdges :: [Edge] -> Graph
fromEdges = collectI IS.union fst (IS.singleton . snd)
toAdj :: Graph -> [(Node, [Node])]
toAdj = fmap (second IS.toList) . IM.toList
toEdges :: Graph -> [Edge]
toEdges = concatMap (uncurry (fmap . (,))) . toAdj
predG :: Graph -> Graph
predG g = IM.unionWith IS.union (go g) g0
where g0 = fmap (const mempty) g
f :: IntMap IntSet -> Int -> IntSet -> IntMap IntSet
f m i a = foldl' (\m p -> IM.insertWith mappend p
(IS.singleton i) m)
m
(IS.toList a)
go :: IntMap IntSet -> IntMap IntSet
go = flip IM.foldlWithKey' mempty f
pruneReach :: Rooted -> Rooted
pruneReach (r,g) = (r,g2)
where is = reachable
(maybe mempty id
. flip IM.lookup g) $ r
g2 = IM.fromList
. fmap (second (IS.filter (`IS.member`is)))
. filter ((`IS.member`is) . fst)
. IM.toList $ g
tip :: Tree a -> (a, [Tree a])
tip (Node a ts) = (a, ts)
parents :: Tree a -> [(a, a)]
parents (Node i xs) = p i xs
++ concatMap parents xs
where p i = fmap (flip (,) i . rootLabel)
ancestors :: Tree a -> [(a, [a])]
ancestors = go []
where go acc (Node i xs)
= let acc' = i:acc
in p acc' xs ++ concatMap (go acc') xs
p is = fmap (flip (,) is . rootLabel)
asGraph :: Tree Node -> Rooted
asGraph t@(Node a _) = let g = go t in (a, fromAdj g)
where go (Node a ts) = let as = (fst . unzip . fmap tip) ts
in (a, as) : concatMap go ts
asTree :: Rooted -> Tree Node
asTree (r,g) = let go a = Node a (fmap go ((IS.toList . f) a))
f = (g !)
in go r
reachable :: (Node -> NodeSet) -> (Node -> NodeSet)
reachable f a = go (IS.singleton a) a
where go seen a = let s = f a
as = IS.toList (s `IS.difference` seen)
in foldl' go (s `IS.union` seen) as
collectI :: (c -> c -> c)
-> (a -> Int) -> (a -> c) -> [a] -> IntMap c
collectI (<>) f g
= foldl' (\m a -> IM.insertWith (<>)
(f a)
(g a) m) mempty
-- collect :: (Ord b) => (c -> c -> c)
-- -> (a -> b) -> (a -> c) -> [a] -> Map b c
-- collect (<>) f g
-- = foldl' (\m a -> SM.insertWith (<>)
-- (f a)
-- (g a) m) mempty
-- (renamed, old -> new)
renum :: Int -> Graph -> (Graph, NodeMap Node)
renum from = (\(_,m,g)->(g,m))
. IM.foldlWithKey'
f (from,mempty,mempty)
where
f :: (Int, NodeMap Node, IntMap IntSet) -> Node -> IntSet
-> (Int, NodeMap Node, IntMap IntSet)
f (!n,!env,!new) i ss =
let (j,n2,env2) = go n env i
(n3,env3,ss2) = IS.fold
(\k (!n,!env,!new)->
case go n env k of
(l,n2,env2)-> (n2,env2,l `IS.insert` new))
(n2,env2,mempty) ss
new2 = IM.insertWith IS.union j ss2 new
in (n3,env3,new2)
go :: Int
-> NodeMap Node
-> Node
-> (Node,Int,NodeMap Node)
go !n !env i =
case IM.lookup i env of
Just j -> (j,n,env)
Nothing -> (n,n+1,IM.insert i n env)
-----------------------------------------------------------------------------
newtype S z s a = S {unS :: forall o. (a -> s -> ST z o) -> s -> ST z o}
instance Functor (S z s) where
fmap f (S g) = S (\k -> g (k . f))
instance Monad (S z s) where
return = pure
S g >>= f = S (\k -> g (\a -> unS (f a) k))
instance Applicative (S z s) where
pure a = S (\k -> k a)
(<*>) = ap
-- get :: S z s s
-- get = S (\k s -> k s s)
gets :: (s -> a) -> S z s a
gets f = S (\k s -> k (f s) s)
-- set :: s -> S z s ()
-- set s = S (\k _ -> k () s)
modify :: (s -> s) -> S z s ()
modify f = S (\k -> k () . f)
-- runS :: S z s a -> s -> ST z (a, s)
-- runS (S g) = g (\a s -> return (a,s))
evalS :: S z s a -> s -> ST z a
evalS (S g) = g ((return .) . const)
-- execS :: S z s a -> s -> ST z s
-- execS (S g) = g ((return .) . flip const)
st :: ST z a -> S z s a
st m = S (\k s-> do
a <- m
k a s)
store :: (MArray (A z) a (ST z))
=> (s -> Arr z a) -> Int -> a -> S z s ()
store f i x = do
a <- gets f
st ((a.=x) i)
fetch :: (MArray (A z) a (ST z))
=> (s -> Arr z a) -> Int -> S z s a
fetch f i = do
a <- gets f
st (a!:i)
|
sdiehl/ghc
|
compiler/utils/Dominators.hs
|
bsd-3-clause
| 16,145 | 0 | 22 | 5,622 | 5,986 | 3,072 | 2,914 | 435 | 4 |
module Problem45 where
import Data.List
isqrt :: Integer -> Maybe Integer
isqrt n = binarySearch 1 n
where
binarySearch low high =
let mid = (low + high) `div` 2
midSq = mid * mid in
if high < low then Nothing
else if midSq == n then Just mid
else if midSq > n then binarySearch low (mid-1)
else binarySearch (mid+1) high
isTriangle :: Integer -> Bool
isTriangle t =
case isqrt (1 + 8 * t) of
Just n -> (n - 1) `mod` 2 == 0
Nothing -> False
isPentagonal :: Integer -> Bool
isPentagonal p =
case isqrt (1 + 24 * p) of
Just n -> (n + 1) `mod` 6 == 0
Nothing -> False
isHexagonal :: Integer -> Bool
isHexagonal h =
case isqrt (1 + 8 * h) of
Just n -> (n + 1) `mod` 4 == 0
Nothing -> False
hexagonals :: [Integer]
hexagonals = map h [1..]
where h n = n * (2*n - 1)
cond n = isTriangle n && isPentagonal n && n > 40755
main :: IO ()
main = print $ find cond hexagonals
|
noraesae/euler
|
src/Problem45.hs
|
bsd-3-clause
| 953 | 0 | 14 | 279 | 439 | 230 | 209 | 32 | 4 |
{-|
Module : Idris.REPL.Parser
Description : Parser for the REPL commands.
License : BSD3
Maintainer : The Idris Community.
-}
module Idris.REPL.Parser (
parseCmd
, help
, allHelp
, setOptions
) where
import Idris.AbsSyntax
import Idris.Colours
import Idris.Core.TT
import Idris.Help
import Idris.Options
import qualified Idris.Parser as P
import qualified Idris.Parser.Expr as P
import qualified Idris.Parser.Helpers as P
import qualified Idris.Parser.Ops as P
import Idris.REPL.Commands
import Control.Applicative
import Control.Monad.State.Strict
import Data.Char (isSpace, toLower)
import Data.List
import Data.List.Split (splitOn)
import System.Console.ANSI (Color(..))
import System.FilePath ((</>))
import Text.Parser.Char (anyChar, oneOf)
import Text.Parser.Combinators
import Text.Trifecta (Result)
parseCmd :: IState -> String -> String -> Result (Either String Command)
parseCmd i inputname = P.runparser pCmd i inputname . trim
where trim = f . f
where f = reverse . dropWhile isSpace
type CommandTable = [ ( [String], CmdArg, String
, String -> P.IdrisParser (Either String Command) ) ]
setOptions :: [(String, Opt)]
setOptions = [("errorcontext", ErrContext),
("showimplicits", ShowImpl),
("originalerrors", ShowOrigErr),
("autosolve", AutoSolve),
("nobanner", NoBanner),
("warnreach", WarnReach),
("evaltypes", EvalTypes),
("desugarnats", DesugarNats)]
help :: [([String], CmdArg, String)]
help = (["<expr>"], NoArg, "Evaluate an expression") :
[ (map (':' :) names, args, text) | (names, args, text, _) <- parserCommandsForHelp ]
allHelp :: [([String], CmdArg, String)]
allHelp = [ (map (':' :) names, args, text)
| (names, args, text, _) <- parserCommandsForHelp ++ parserCommands ]
parserCommandsForHelp :: CommandTable
parserCommandsForHelp =
[ exprArgCmd ["t", "type"] Check "Check the type of an expression"
, exprArgCmd ["core"] Core "View the core language representation of a term"
, nameArgCmd ["miss", "missing"] Missing "Show missing clauses"
, (["doc"], NameArg, "Show internal documentation", cmd_doc)
, (["mkdoc"], NamespaceArg, "Generate IdrisDoc for namespace(s) and dependencies"
, genArg "namespace" (many anyChar) MakeDoc)
, (["apropos"], SeqArgs (OptionalArg PkgArgs) NameArg, " Search names, types, and documentation"
, cmd_apropos)
, (["s", "search"], SeqArgs (OptionalArg PkgArgs) ExprArg
, " Search for values by type", cmd_search)
, nameArgCmd ["wc", "whocalls"] WhoCalls "List the callers of some name"
, nameArgCmd ["cw", "callswho"] CallsWho "List the callees of some name"
, namespaceArgCmd ["browse"] Browse "List the contents of some namespace"
, nameArgCmd ["total"] TotCheck "Check the totality of a name"
, noArgCmd ["r", "reload"] Reload "Reload current file"
, noArgCmd ["w", "watch"] Watch "Watch the current file for changes"
, (["l", "load"], FileArg, "Load a new file"
, strArg (\f -> Load f Nothing))
, (["!"], ShellCommandArg, "Run a shell command", strArg RunShellCommand)
, (["cd"], FileArg, "Change working directory"
, strArg ChangeDirectory)
, (["module"], ModuleArg, "Import an extra module", moduleArg ModImport) -- NOTE: dragons
, noArgCmd ["e", "edit"] Edit "Edit current file using $EDITOR or $VISUAL"
, noArgCmd ["m", "metavars"] Metavars "Show remaining proof obligations (metavariables or holes)"
, (["p", "prove"], MetaVarArg, "Prove a metavariable"
, nameArg (Prove False))
, (["elab"], MetaVarArg, "Build a metavariable using the elaboration shell"
, nameArg (Prove True))
, (["a", "addproof"], NameArg, "Add proof to source file", cmd_addproof)
, (["rmproof"], NameArg, "Remove proof from proof stack"
, nameArg RmProof)
, (["showproof"], NameArg, "Show proof"
, nameArg ShowProof)
, noArgCmd ["proofs"] Proofs "Show available proofs"
, exprArgCmd ["x"] ExecVal "Execute IO actions resulting from an expression using the interpreter"
, (["c", "compile"], FileArg, "Compile to an executable [codegen] <filename>", cmd_compile)
, (["exec", "execute"], OptionalArg ExprArg, "Compile to an executable and run", cmd_execute)
, (["dynamic"], FileArg, "Dynamically load a C library (similar to %dynamic)", cmd_dynamic)
, (["dynamic"], NoArg, "List dynamically loaded C libraries", cmd_dynamic)
, noArgCmd ["?", "h", "help"] Help "Display this help text"
, optArgCmd ["set"] SetOpt $ "Set an option (" ++ optionsList ++ ")"
, optArgCmd ["unset"] UnsetOpt "Unset an option"
, (["color", "colour"], ColourArg
, "Turn REPL colours on or off; set a specific colour"
, cmd_colour)
, (["consolewidth"], ConsoleWidthArg, "Set the width of the console", cmd_consolewidth)
, (["printerdepth"], OptionalArg NumberArg, "Set the maximum pretty-printer depth (no arg for infinite)", cmd_printdepth)
, noArgCmd ["q", "quit"] Quit "Exit the Idris system"
, noArgCmd ["warranty"] Warranty "Displays warranty information"
, (["let"], ManyArgs DeclArg
, "Evaluate a declaration, such as a function definition, instance implementation, or fixity declaration"
, cmd_let)
, (["unlet", "undefine"], ManyArgs NameArg
, "Remove the listed repl definitions, or all repl definitions if no names given"
, cmd_unlet)
, nameArgCmd ["printdef"] PrintDef "Show the definition of a function"
, (["pp", "pprint"], (SeqArgs OptionArg (SeqArgs NumberArg NameArg))
, "Pretty prints an Idris function in either LaTeX or HTML and for a specified width."
, cmd_pprint)
, (["verbosity"], NumberArg, "Set verbosity level", cmd_verb)
]
where optionsList = intercalate ", " $ map fst setOptions
parserCommands :: CommandTable
parserCommands =
[ noArgCmd ["u", "universes"] Universes "Display universe constraints"
, noArgCmd ["errorhandlers"] ListErrorHandlers "List registered error handlers"
, nameArgCmd ["d", "def"] Defn "Display a name's internal definitions"
, nameArgCmd ["transinfo"] TransformInfo "Show relevant transformation rules for a name"
, nameArgCmd ["di", "dbginfo"] DebugInfo "Show debugging information for a name"
, exprArgCmd ["patt"] Pattelab "(Debugging) Elaborate pattern expression"
, exprArgCmd ["spec"] Spec "?"
, exprArgCmd ["whnf"] WHNF "(Debugging) Show weak head normal form of an expression"
, exprArgCmd ["inline"] TestInline "?"
, proofArgCmd ["cs", "casesplit"] CaseSplitAt
":cs <line> <name> splits the pattern variable on the line"
, proofArgCmd ["apc", "addproofclause"] AddProofClauseFrom
":apc <line> <name> adds a pattern-matching proof clause to name on line"
, proofArgCmd ["ac", "addclause"] AddClauseFrom
":ac <line> <name> adds a clause for the definition of the name on the line"
, proofArgCmd ["am", "addmissing"] AddMissing
":am <line> <name> adds all missing pattern matches for the name on the line"
, proofArgCmd ["mw", "makewith"] MakeWith
":mw <line> <name> adds a with clause for the definition of the name on the line"
, proofArgCmd ["mc", "makecase"] MakeCase
":mc <line> <name> adds a case block for the definition of the metavariable on the line"
, proofArgCmd ["ml", "makelemma"] MakeLemma "?"
, (["log"], NumberArg, "Set logging level", cmd_log)
, ( ["logcats"]
, ManyArgs NameArg
, "Set logging categories"
, cmd_cats)
, (["lto", "loadto"], SeqArgs NumberArg FileArg
, "Load file up to line number", cmd_loadto)
, (["ps", "proofsearch"], NoArg
, ":ps <line> <name> <names> does proof search for name on line, with names as hints"
, cmd_proofsearch)
, (["ref", "refine"], NoArg
, ":ref <line> <name> <name'> attempts to partially solve name on line, with name' as hint, introducing metavariables for arguments that aren't inferrable"
, cmd_refine)
, (["debugunify"], SeqArgs ExprArg ExprArg
, "(Debugging) Try to unify two expressions", const $ do
l <- P.simpleExpr defaultSyntax
r <- P.simpleExpr defaultSyntax
eof
return (Right (DebugUnify l r))
)
]
noArgCmd names command doc =
(names, NoArg, doc, noArgs command)
nameArgCmd names command doc =
(names, NameArg, doc, fnNameArg command)
namespaceArgCmd names command doc =
(names, NamespaceArg, doc, namespaceArg command)
exprArgCmd names command doc =
(names, ExprArg, doc, exprArg command)
optArgCmd names command doc =
(names, OptionArg, doc, optArg command)
proofArgCmd names command doc =
(names, NoArg, doc, proofArg command)
pCmd :: P.IdrisParser (Either String Command)
pCmd = choice [ do c <- cmd names; parser c
| (names, _, _, parser) <- parserCommandsForHelp ++ parserCommands ]
<|> unrecognized
<|> nop
<|> eval
where nop = do eof; return (Right NOP)
unrecognized = do
P.lchar ':'
cmd <- many anyChar
let cmd' = takeWhile (/=' ') cmd
return (Left $ "Unrecognized command: " ++ cmd')
cmd :: [String] -> P.IdrisParser String
cmd xs = try $ do
P.lchar ':'
docmd sorted_xs
where docmd [] = fail "Could not parse command"
docmd (x:xs) = try (P.reserved x >> return x) <|> docmd xs
sorted_xs = sortBy (\x y -> compare (length y) (length x)) xs
noArgs :: Command -> String -> P.IdrisParser (Either String Command)
noArgs cmd name = do
let emptyArgs = do
eof
return (Right cmd)
let failure = return (Left $ ":" ++ name ++ " takes no arguments")
emptyArgs <|> failure
eval :: P.IdrisParser (Either String Command)
eval = do
t <- P.fullExpr defaultSyntax
return $ Right (Eval t)
exprArg :: (PTerm -> Command) -> String -> P.IdrisParser (Either String Command)
exprArg cmd name = do
let noArg = do
eof
return $ Left ("Usage is :" ++ name ++ " <expression>")
let justOperator = do
(op, fc) <- P.operatorFC
eof
return $ Right $ cmd (PRef fc [] (sUN op))
let properArg = do
t <- P.fullExpr defaultSyntax
return $ Right (cmd t)
try noArg <|> try justOperator <|> properArg
genArg :: String -> P.IdrisParser a -> (a -> Command)
-> String -> P.IdrisParser (Either String Command)
genArg argName argParser cmd name = do
let emptyArgs = do eof; failure
oneArg = do arg <- argParser
eof
return (Right (cmd arg))
try emptyArgs <|> oneArg <|> failure
where
failure = return $ Left ("Usage is :" ++ name ++ " <" ++ argName ++ ">")
nameArg, fnNameArg :: (Name -> Command) -> String -> P.IdrisParser (Either String Command)
nameArg = genArg "name" $ fst <$> P.name
fnNameArg = genArg "functionname" $ fst <$> P.fnName
strArg :: (String -> Command) -> String -> P.IdrisParser (Either String Command)
strArg = genArg "string" (many anyChar)
moduleArg :: (FilePath -> Command) -> String -> P.IdrisParser (Either String Command)
moduleArg = genArg "module" (fmap (toPath . fst) P.identifier)
where
toPath n = foldl1' (</>) $ splitOn "." n
namespaceArg :: ([String] -> Command) -> String -> P.IdrisParser (Either String Command)
namespaceArg = genArg "namespace" (fmap (toNS . fst) P.identifier)
where
toNS = splitOn "."
optArg :: (Opt -> Command) -> String -> P.IdrisParser (Either String Command)
optArg cmd name = do
let emptyArgs = do
eof
return $ Left ("Usage is :" ++ name ++ " <option>")
let oneArg = do
o <- pOption
P.whiteSpace
eof
return (Right (cmd o))
let failure = return $ Left "Unrecognized setting"
try emptyArgs <|> oneArg <|> failure
where
pOption :: P.IdrisParser Opt
pOption = foldl (<|>) empty $ map (\(a, b) -> do discard (P.symbol a); return b) setOptions
proofArg :: (Bool -> Int -> Name -> Command) -> String -> P.IdrisParser (Either String Command)
proofArg cmd name = do
upd <- option False $ do
P.lchar '!'
return True
l <- fst <$> P.natural
n <- fst <$> P.name;
return (Right (cmd upd (fromInteger l) n))
cmd_doc :: String -> P.IdrisParser (Either String Command)
cmd_doc name = do
let constant = do
c <- fmap fst P.constant
eof
return $ Right (DocStr (Right c) FullDocs)
let pType = do
P.reserved "Type"
eof
return $ Right (DocStr (Left $ P.mkName ("Type", "")) FullDocs)
let fnName = fnNameArg (\n -> DocStr (Left n) FullDocs) name
try constant <|> pType <|> fnName
cmd_consolewidth :: String -> P.IdrisParser (Either String Command)
cmd_consolewidth name = do
w <- pConsoleWidth
return (Right (SetConsoleWidth w))
where
pConsoleWidth :: P.IdrisParser ConsoleWidth
pConsoleWidth = do discard (P.symbol "auto"); return AutomaticWidth
<|> do discard (P.symbol "infinite"); return InfinitelyWide
<|> do n <- fmap (fromInteger . fst) P.natural
return (ColsWide n)
cmd_printdepth :: String -> P.IdrisParser (Either String Command)
cmd_printdepth _ = do d <- optional (fmap (fromInteger . fst) P.natural)
return (Right $ SetPrinterDepth d)
cmd_execute :: String -> P.IdrisParser (Either String Command)
cmd_execute name = do
tm <- option maintm (P.fullExpr defaultSyntax)
return (Right (Execute tm))
where
maintm = PRef (fileFC "(repl)") [] (sNS (sUN "main") ["Main"])
cmd_dynamic :: String -> P.IdrisParser (Either String Command)
cmd_dynamic name = do
let optArg = do l <- many anyChar
if (l /= "")
then return $ Right (DynamicLink l)
else return $ Right ListDynamic
let failure = return $ Left $ "Usage is :" ++ name ++ " [<library>]"
try optArg <|> failure
cmd_pprint :: String -> P.IdrisParser (Either String Command)
cmd_pprint name = do
fmt <- ppFormat
P.whiteSpace
n <- fmap (fromInteger . fst) P.natural
P.whiteSpace
t <- P.fullExpr defaultSyntax
return (Right (PPrint fmt n t))
where
ppFormat :: P.IdrisParser OutputFmt
ppFormat = (discard (P.symbol "html") >> return HTMLOutput)
<|> (discard (P.symbol "latex") >> return LaTeXOutput)
cmd_compile :: String -> P.IdrisParser (Either String Command)
cmd_compile name = do
let defaultCodegen = Via IBCFormat "c"
let codegenOption :: P.IdrisParser Codegen
codegenOption = do
let bytecodeCodegen = discard (P.symbol "bytecode") *> return Bytecode
viaCodegen = do x <- fst <$> P.identifier
return (Via IBCFormat (map toLower x))
bytecodeCodegen <|> viaCodegen
let hasOneArg = do
i <- get
f <- fst <$> P.identifier
eof
return $ Right (Compile defaultCodegen f)
let hasTwoArgs = do
i <- get
codegen <- codegenOption
f <- fst <$> P.identifier
eof
return $ Right (Compile codegen f)
let failure = return $ Left $ "Usage is :" ++ name ++ " [<codegen>] <filename>"
try hasTwoArgs <|> try hasOneArg <|> failure
cmd_addproof :: String -> P.IdrisParser (Either String Command)
cmd_addproof name = do
n <- option Nothing $ do
x <- fst <$> P.name
return (Just x)
eof
return (Right (AddProof n))
cmd_log :: String -> P.IdrisParser (Either String Command)
cmd_log name = do
i <- fmap (fromIntegral . fst) P.natural
eof
return (Right (LogLvl i))
cmd_verb :: String -> P.IdrisParser (Either String Command)
cmd_verb name = do
i <- fmap (fromIntegral . fst) P.natural
eof
return (Right (Verbosity i))
cmd_cats :: String -> P.IdrisParser (Either String Command)
cmd_cats name = do
cs <- sepBy pLogCats (P.whiteSpace)
eof
return $ Right $ LogCategory (concat cs)
where
badCat = do
c <- fst <$> P.identifier
fail $ "Category: " ++ c ++ " is not recognised."
pLogCats :: P.IdrisParser [LogCat]
pLogCats = try (P.symbol (strLogCat IParse) >> return parserCats)
<|> try (P.symbol (strLogCat IElab) >> return elabCats)
<|> try (P.symbol (strLogCat ICodeGen) >> return codegenCats)
<|> try (P.symbol (strLogCat ICoverage) >> return [ICoverage])
<|> try (P.symbol (strLogCat IIBC) >> return [IIBC])
<|> try (P.symbol (strLogCat IErasure) >> return [IErasure])
<|> badCat
cmd_let :: String -> P.IdrisParser (Either String Command)
cmd_let name = do
defn <- concat <$> many (P.decl defaultSyntax)
return (Right (NewDefn defn))
cmd_unlet :: String -> P.IdrisParser (Either String Command)
cmd_unlet name = (Right . Undefine) `fmap` many (fst <$> P.name)
cmd_loadto :: String -> P.IdrisParser (Either String Command)
cmd_loadto name = do
toline <- fmap (fromInteger . fst) P.natural
f <- many anyChar;
return (Right (Load f (Just toline)))
cmd_colour :: String -> P.IdrisParser (Either String Command)
cmd_colour name = fmap Right pSetColourCmd
where
colours :: [(String, Maybe Color)]
colours = [ ("black", Just Black)
, ("red", Just Red)
, ("green", Just Green)
, ("yellow", Just Yellow)
, ("blue", Just Blue)
, ("magenta", Just Magenta)
, ("cyan", Just Cyan)
, ("white", Just White)
, ("default", Nothing)
]
pSetColourCmd :: P.IdrisParser Command
pSetColourCmd = (do c <- pColourType
let defaultColour = IdrisColour Nothing True False False False
opts <- sepBy pColourMod (P.whiteSpace)
let colour = foldr ($) defaultColour $ reverse opts
return $ SetColour c colour)
<|> try (P.symbol "on" >> return ColourOn)
<|> try (P.symbol "off" >> return ColourOff)
pColour :: P.IdrisParser (Maybe Color)
pColour = doColour colours
where doColour [] = fail "Unknown colour"
doColour ((s, c):cs) = (try (P.symbol s) >> return c) <|> doColour cs
pColourMod :: P.IdrisParser (IdrisColour -> IdrisColour)
pColourMod = try (P.symbol "vivid" >> return doVivid)
<|> try (P.symbol "dull" >> return doDull)
<|> try (P.symbol "underline" >> return doUnderline)
<|> try (P.symbol "nounderline" >> return doNoUnderline)
<|> try (P.symbol "bold" >> return doBold)
<|> try (P.symbol "nobold" >> return doNoBold)
<|> try (P.symbol "italic" >> return doItalic)
<|> try (P.symbol "noitalic" >> return doNoItalic)
<|> try (pColour >>= return . doSetColour)
where doVivid i = i { vivid = True }
doDull i = i { vivid = False }
doUnderline i = i { underline = True }
doNoUnderline i = i { underline = False }
doBold i = i { bold = True }
doNoBold i = i { bold = False }
doItalic i = i { italic = True }
doNoItalic i = i { italic = False }
doSetColour c i = i { colour = c }
-- | Generate the colour type names using the default Show instance.
colourTypes :: [(String, ColourType)]
colourTypes = map (\x -> ((map toLower . reverse . drop 6 . reverse . show) x, x)) $
enumFromTo minBound maxBound
pColourType :: P.IdrisParser ColourType
pColourType = doColourType colourTypes
where doColourType [] = fail $ "Unknown colour category. Options: " ++
(concat . intersperse ", " . map fst) colourTypes
doColourType ((s,ct):cts) = (try (P.symbol s) >> return ct) <|> doColourType cts
idChar = oneOf (['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9'] ++ ['_'])
cmd_apropos :: String -> P.IdrisParser (Either String Command)
cmd_apropos = packageBasedCmd (some idChar) Apropos
packageBasedCmd :: P.IdrisParser a -> ([String] -> a -> Command)
-> String -> P.IdrisParser (Either String Command)
packageBasedCmd valParser cmd name =
try (do P.lchar '('
pkgs <- sepBy (some idChar) (P.lchar ',')
P.lchar ')'
val <- valParser
return (Right (cmd pkgs val)))
<|> do val <- valParser
return (Right (cmd [] val))
cmd_search :: String -> P.IdrisParser (Either String Command)
cmd_search = packageBasedCmd
(P.fullExpr (defaultSyntax { implicitAllowed = True })) Search
cmd_proofsearch :: String -> P.IdrisParser (Either String Command)
cmd_proofsearch name = do
upd <- option False (do P.lchar '!'; return True)
l <- fmap (fromInteger . fst) P.natural; n <- fst <$> P.name
hints <- many (fst <$> P.fnName)
return (Right (DoProofSearch upd True l n hints))
cmd_refine :: String -> P.IdrisParser (Either String Command)
cmd_refine name = do
upd <- option False (do P.lchar '!'; return True)
l <- fmap (fromInteger . fst) P.natural; n <- fst <$> P.name
hint <- fst <$> P.fnName
return (Right (DoProofSearch upd False l n [hint]))
|
uuhan/Idris-dev
|
src/Idris/REPL/Parser.hs
|
bsd-3-clause
| 21,486 | 0 | 21 | 5,611 | 6,792 | 3,514 | 3,278 | 442 | 3 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Volume.ES.Rules
( rules ) where
import Data.String
import Data.Text (Text)
import Prelude
import Duckling.Dimensions.Types
import Duckling.Types
import Duckling.Regex.Types
import Duckling.Volume.Helpers
import Duckling.Numeral.Helpers (isPositive)
import qualified Duckling.Volume.Types as TVolume
import qualified Duckling.Numeral.Types as TNumeral
volumes :: [(Text, String, TVolume.Unit)]
volumes = [ ("<latent vol> ml" , "m(l|ililitros?)" , TVolume.Millilitre)
, ("<vol> hectoliters" , "(hectolitros?)" , TVolume.Hectolitre)
, ("<vol> liters" , "l(itros?)?" , TVolume.Litre)
, ("<latent vol> gallon", "gal(o|ó)ne?s?" , TVolume.Gallon)
]
rulesVolumes :: [Rule]
rulesVolumes = map go volumes
where
go :: (Text, String, TVolume.Unit) -> Rule
go (name, regexPattern, u) = Rule
{ name = name
, pattern =
[ regex regexPattern
]
, prod = \_ -> Just . Token Volume $ unitOnly u
}
fractions :: [(Text, String, Double)]
fractions = [ ("half", "medio", 1/2)
]
rulesFractionalVolume :: [Rule]
rulesFractionalVolume = map go fractions
where
go :: (Text, String, Double) -> Rule
go (name, regexPattern, f) = Rule
{ name = name
, pattern =
[ regex regexPattern
, Predicate isUnitOnly
]
, prod = \case
(_:
Token Volume TVolume.VolumeData{TVolume.unit = Just u}:
_) ->
Just . Token Volume $ volume u f
_ -> Nothing
}
rules :: [Rule]
rules =
[
]
++ rulesVolumes
++ rulesFractionalVolume
|
facebookincubator/duckling
|
Duckling/Volume/ES/Rules.hs
|
bsd-3-clause
| 1,925 | 0 | 19 | 491 | 483 | 291 | 192 | 49 | 2 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.EXT.CMYKA
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/EXT/cmyka.txt EXT_cmyka> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.EXT.CMYKA (
-- * Enums
gl_CMYKA_EXT,
gl_CMYK_EXT,
gl_PACK_CMYK_HINT_EXT,
gl_UNPACK_CMYK_HINT_EXT
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
|
phaazon/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/EXT/CMYKA.hs
|
bsd-3-clause
| 678 | 0 | 4 | 87 | 46 | 37 | 9 | 6 | 0 |
{-# LANGUAGE TupleSections #-}
-- | Server operations common to many modules.
module Game.LambdaHack.Server.CommonServer
( execFailure, resetFidPerception, resetLitInDungeon, getPerFid
, revealItems, moveStores, deduceQuits, deduceKilled, electLeader
, addActor, addActorIid, projectFail
, pickWeaponServer, sumOrganEqpServer, actorSkillsServer
) where
import Prelude ()
import Prelude.Compat
import Control.Exception.Assert.Sugar
import Control.Monad (void, unless, when, forM_, join)
import qualified Data.EnumMap.Strict as EM
import Data.List (partition, delete)
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import qualified NLP.Miniutter.English as MU
import qualified Text.Show.Pretty as Show.Pretty
import Game.LambdaHack.Atomic
import qualified Game.LambdaHack.Common.Ability as Ability
import Game.LambdaHack.Common.Actor
import Game.LambdaHack.Common.ActorState
import qualified Game.LambdaHack.Common.Color as Color
import Game.LambdaHack.Common.Faction
import Game.LambdaHack.Common.Flavour
import Game.LambdaHack.Common.Item
import Game.LambdaHack.Common.ItemDescription
import Game.LambdaHack.Common.ItemStrongest
import qualified Game.LambdaHack.Common.Kind as Kind
import Game.LambdaHack.Common.Level
import Game.LambdaHack.Common.Misc
import Game.LambdaHack.Common.MonadStateRead
import Game.LambdaHack.Common.Msg
import Game.LambdaHack.Common.Perception
import Game.LambdaHack.Common.Point
import Game.LambdaHack.Common.Random
import Game.LambdaHack.Common.Request
import Game.LambdaHack.Common.State
import qualified Game.LambdaHack.Common.Tile as Tile
import Game.LambdaHack.Common.Time
import Game.LambdaHack.Content.ItemKind (ItemKind)
import qualified Game.LambdaHack.Content.ItemKind as IK
import Game.LambdaHack.Content.ModeKind
import Game.LambdaHack.Content.RuleKind
import Game.LambdaHack.Server.Fov
import Game.LambdaHack.Server.ItemServer
import Game.LambdaHack.Server.MonadServer
import Game.LambdaHack.Server.State
execFailure :: (MonadAtomic m, MonadServer m)
=> ActorId -> RequestTimed a -> ReqFailure -> m ()
execFailure aid req failureSer = do
-- Clients should rarely do that (only in case of invisible actors)
-- so we report it, send a --more-- meeesage (if not AI), but do not crash
-- (server should work OK with stupid clients, too).
body <- getsState $ getActorBody aid
let fid = bfid body
msg = showReqFailure failureSer
impossible = impossibleReqFailure failureSer
debugShow :: Show a => a -> Text
debugShow = T.pack . Show.Pretty.ppShow
possiblyAlarm = if impossible
then debugPossiblyPrintAndExit
else debugPossiblyPrint
possiblyAlarm $
"execFailure:" <+> msg <> "\n"
<> debugShow body <> "\n" <> debugShow req
execSfxAtomic $ SfxMsgFid fid $ "Unexpected problem:" <+> msg <> "."
-- TODO: --more--, but keep in history
-- | Update the cached perception for the selected level, for a faction.
-- The assumption is the level, and only the level, has changed since
-- the previous perception calculation.
resetFidPerception :: MonadServer m
=> PersLit -> FactionId -> LevelId
-> m Perception
resetFidPerception persLit fid lid = do
sfovMode <- getsServer $ sfovMode . sdebugSer
lvl <- getLevel lid
let fovMode = fromMaybe Digital sfovMode
per = fidLidPerception fovMode persLit fid lid lvl
upd = EM.adjust (EM.adjust (const per) lid) fid
modifyServer $ \ser2 -> ser2 {sper = upd (sper ser2)}
return $! per
resetLitInDungeon :: MonadServer m => m PersLit
resetLitInDungeon = do
sfovMode <- getsServer $ sfovMode . sdebugSer
ser <- getServer
let fovMode = fromMaybe Digital sfovMode
getsState $ \s -> litInDungeon fovMode s ser
getPerFid :: MonadServer m => FactionId -> LevelId -> m Perception
getPerFid fid lid = do
pers <- getsServer sper
let failFact = assert `failure` "no perception for faction" `twith` (lid, fid)
fper = EM.findWithDefault failFact fid pers
failLvl = assert `failure` "no perception for level" `twith` (lid, fid)
per = EM.findWithDefault failLvl lid fper
return $! per
revealItems :: (MonadAtomic m, MonadServer m)
=> Maybe FactionId -> Maybe (ActorId, Actor) -> m ()
revealItems mfid mbody = do
let !_A = assert (maybe True (not . bproj . snd) mbody) ()
itemToF <- itemToFullServer
dungeon <- getsState sdungeon
let discover aid store iid k =
let itemFull = itemToF iid k
c = CActor aid store
in case itemDisco itemFull of
Just ItemDisco{itemKindId} -> do
seed <- getsServer $ (EM.! iid) . sitemSeedD
Level{ldepth} <- getLevel $ jlid $ itemBase itemFull
execUpdAtomic $ UpdDiscover c iid itemKindId seed ldepth
_ -> assert `failure` (mfid, mbody, c, iid, itemFull)
f aid = do
b <- getsState $ getActorBody aid
let ourSide = maybe True (== bfid b) mfid
-- Don't ID projectiles, because client may not see them.
when (not (bproj b) && ourSide) $
-- CSha is IDed for each actor of each faction, which is OK,
-- even though it may introduce a slight lag.
-- AI clients being sent this is a bigger waste anyway.
join $ getsState $ mapActorItems_ (discover aid) b
mapDungeonActors_ f dungeon
maybe (return ())
(\(aid, b) -> join $ getsState $ mapActorItems_ (discover aid) b)
mbody
moveStores :: (MonadAtomic m, MonadServer m)
=> ActorId -> CStore -> CStore -> m ()
moveStores aid fromStore toStore = do
b <- getsState $ getActorBody aid
let g iid (k, _) = execUpdAtomic $ UpdMoveItem iid k aid fromStore toStore
mapActorCStore_ fromStore g b
quitF :: (MonadAtomic m, MonadServer m)
=> Maybe (ActorId, Actor) -> Status -> FactionId -> m ()
quitF mbody status fid = do
let !_A = assert (maybe True ((fid ==) . bfid . snd) mbody) ()
fact <- getsState $ (EM.! fid) . sfactionD
let oldSt = gquit fact
case stOutcome <$> oldSt of
Just Killed -> return () -- Do not overwrite in case
Just Defeated -> return () -- many things happen in 1 turn.
Just Conquer -> return ()
Just Escape -> return ()
_ -> do
when (fhasUI $ gplayer fact) $ do
keepAutomated <- getsServer $ skeepAutomated . sdebugSer
when (isAIFact fact
&& fleaderMode (gplayer fact) /= LeaderNull
&& not keepAutomated) $
execUpdAtomic $ UpdAutoFaction fid False
revealItems (Just fid) mbody
registerScore status (snd <$> mbody) fid
execUpdAtomic $ UpdQuitFaction fid (snd <$> mbody) oldSt $ Just status -- TODO: send only aid to UpdQuitFaction and elsewhere --- aid is alive
modifyServer $ \ser -> ser {squit = True} -- end turn ASAP
-- Send any QuitFactionA actions that can be deduced from their current state.
deduceQuits :: (MonadAtomic m, MonadServer m)
=> FactionId -> Maybe (ActorId, Actor) -> Status -> m ()
deduceQuits fid mbody status@Status{stOutcome}
| stOutcome `elem` [Defeated, Camping, Restart, Conquer] =
assert `failure` "no quitting to deduce" `twith` (fid, mbody, status)
deduceQuits fid mbody status = do
let mapQuitF statusF fids = mapM_ (quitF Nothing statusF) $ delete fid fids
quitF mbody status fid
let inGameOutcome (_, fact) = case stOutcome <$> gquit fact of
Just Killed -> False
Just Defeated -> False
Just Restart -> False -- effectively, commits suicide
_ -> True
factionD <- getsState sfactionD
let assocsInGame = filter inGameOutcome $ EM.assocs factionD
keysInGame = map fst assocsInGame
assocsKeepArena = filter (keepArenaFact . snd) assocsInGame
assocsUI = filter (fhasUI . gplayer . snd) assocsInGame
nonHorrorAIG = filter (not . isHorrorFact . snd) assocsInGame
worldPeace =
all (\(fid1, _) -> all (\(_, fact2) -> not $ isAtWar fact2 fid1)
nonHorrorAIG)
nonHorrorAIG
case assocsKeepArena of
_ | null assocsUI ->
-- Only non-UI players left in the game and they all win.
mapQuitF status{stOutcome=Conquer} keysInGame
[] ->
-- Only leaderless and spawners remain (the latter may sometimes
-- have no leader, just as the former), so they win,
-- or we could get stuck in a state with no active arena and so no spawns.
mapQuitF status{stOutcome=Conquer} keysInGame
_ | worldPeace ->
-- Nobody is at war any more, so all win (e.g., horrors, but never mind).
mapQuitF status{stOutcome=Conquer} keysInGame
_ | stOutcome status == Escape -> do
-- Otherwise, in a game with many warring teams alive,
-- only complete Victory matters, until enough of them die.
let (victors, losers) =
partition (flip isAllied fid . snd) assocsInGame
mapQuitF status{stOutcome=Escape} $ map fst victors
mapQuitF status{stOutcome=Defeated} $ map fst losers
_ -> return ()
-- | Tell whether a faction that we know is still in game, keeps arena.
-- Keeping arena means, if the faction is still in game,
-- it always has a leader in the dungeon somewhere.
-- So, leaderless factions and spawner factions do not keep an arena,
-- even though the latter usually has a leader for most of the game.
keepArenaFact :: Faction -> Bool
keepArenaFact fact = fleaderMode (gplayer fact) /= LeaderNull
&& fneverEmpty (gplayer fact)
-- We assume the actor in the second argumet is dead or dominated
-- by this point. Even if the actor is to be dominated,
-- @bfid@ of the actor body is still the old faction.
deduceKilled :: (MonadAtomic m, MonadServer m)
=> ActorId -> Actor -> m ()
deduceKilled aid body = do
Kind.COps{corule} <- getsState scops
let firstDeathEnds = rfirstDeathEnds $ Kind.stdRuleset corule
fid = bfid body
fact <- getsState $ (EM.! fid) . sfactionD
when (fneverEmpty $ gplayer fact) $ do
actorsAlive <- anyActorsAlive fid (Just aid)
when (not actorsAlive || firstDeathEnds) $
deduceQuits fid (Just (aid, body))
$ Status Killed (fromEnum $ blid body) Nothing
anyActorsAlive :: MonadServer m => FactionId -> Maybe ActorId -> m Bool
anyActorsAlive fid maid = do
fact <- getsState $ (EM.! fid) . sfactionD
if fleaderMode (gplayer fact) /= LeaderNull
then return $! isJust $ gleader fact
else do
as <- getsState $ fidActorNotProjAssocs fid
return $! not $ null $ maybe as (\aid -> filter ((/= aid) . fst) as) maid
electLeader :: MonadAtomic m => FactionId -> LevelId -> ActorId -> m ()
electLeader fid lid aidDead = do
mleader <- getsState $ gleader . (EM.! fid) . sfactionD
when (isNothing mleader || fmap fst mleader == Just aidDead) $ do
actorD <- getsState sactorD
let ours (_, b) = bfid b == fid && not (bproj b)
party = filter ours $ EM.assocs actorD
onLevel <- getsState $ actorRegularAssocs (== fid) lid
let mleaderNew = case filter (/= aidDead) $ map fst $ onLevel ++ party of
[] -> Nothing
aid : _ -> Just (aid, Nothing)
unless (mleader == mleaderNew) $
execUpdAtomic $ UpdLeadFaction fid mleader mleaderNew
projectFail :: (MonadAtomic m, MonadServer m)
=> ActorId -- ^ actor projecting the item (is on current lvl)
-> Point -- ^ target position of the projectile
-> Int -- ^ digital line parameter
-> ItemId -- ^ the item to be projected
-> CStore -- ^ whether the items comes from floor or inventory
-> Bool -- ^ whether the item is a blast
-> m (Maybe ReqFailure)
projectFail source tpxy eps iid cstore isBlast = do
Kind.COps{cotile} <- getsState scops
sb <- getsState $ getActorBody source
let lid = blid sb
spos = bpos sb
lvl@Level{lxsize, lysize} <- getLevel lid
case bla lxsize lysize eps spos tpxy of
Nothing -> return $ Just ProjectAimOnself
Just [] -> assert `failure` "projecting from the edge of level"
`twith` (spos, tpxy)
Just (pos : restUnlimited) -> do
bag <- getsState $ getActorBag source cstore
case EM.lookup iid bag of
Nothing -> return $ Just ProjectOutOfReach
Just kit -> do
itemToF <- itemToFullServer
activeItems <- activeItemsServer source
actorSk <- actorSkillsServer source
let skill = EM.findWithDefault 0 Ability.AbProject actorSk
itemFull@ItemFull{itemBase} = itemToF iid kit
forced = isBlast || bproj sb
legal = permittedProject " " forced skill itemFull sb activeItems
case legal of
Left reqFail -> return $ Just reqFail
Right _ -> do
let fragile = IK.Fragile `elem` jfeature itemBase
rest = if fragile
then take (chessDist spos tpxy - 1) restUnlimited
else restUnlimited
t = lvl `at` pos
if not $ Tile.isWalkable cotile t
then return $ Just ProjectBlockTerrain
else do
lab <- getsState $ posToActors pos lid
if not $ all (bproj . snd) lab
then if isBlast && bproj sb then do
-- Hit the blocking actor.
projectBla source spos (pos:rest) iid cstore isBlast
return Nothing
else return $ Just ProjectBlockActor
else do
if isBlast && bproj sb && eps `mod` 2 == 0 then
-- Make the explosion a bit less regular.
projectBla source spos (pos:rest) iid cstore isBlast
else
projectBla source pos rest iid cstore isBlast
return Nothing
projectBla :: (MonadAtomic m, MonadServer m)
=> ActorId -- ^ actor projecting the item (is on current lvl)
-> Point -- ^ starting point of the projectile
-> [Point] -- ^ rest of the trajectory of the projectile
-> ItemId -- ^ the item to be projected
-> CStore -- ^ whether the items comes from floor or inventory
-> Bool -- ^ whether the item is a blast
-> m ()
projectBla source pos rest iid cstore isBlast = do
sb <- getsState $ getActorBody source
item <- getsState $ getItemBody iid
let lid = blid sb
localTime <- getsState $ getLocalTime lid
unless isBlast $ execSfxAtomic $ SfxProject source iid cstore
bag <- getsState $ getActorBag source cstore
case iid `EM.lookup` bag of
Nothing -> assert `failure` (source, pos, rest, iid, cstore)
Just kit@(_, it) -> do
addProjectile pos rest iid kit lid (bfid sb) localTime isBlast
let c = CActor source cstore
execUpdAtomic $ UpdLoseItem iid item (1, take 1 it) c
-- | Create a projectile actor containing the given missile.
--
-- Projectile has no organs except for the trunk.
addProjectile :: (MonadAtomic m, MonadServer m)
=> Point -> [Point] -> ItemId -> ItemQuant -> LevelId
-> FactionId -> Time -> Bool
-> m ()
addProjectile bpos rest iid (_, it) blid bfid btime isBlast = do
localTime <- getsState $ getLocalTime blid
itemToF <- itemToFullServer
let itemFull@ItemFull{itemBase} = itemToF iid (1, take 1 it)
(trajectory, (speed, trange)) = itemTrajectory itemBase (bpos : rest)
adj | trange < 5 = "falling"
| otherwise = "flying"
-- Not much detail about a fast flying item.
(_, object1, object2) = partItem CInv localTime
(itemNoDisco (itemBase, 1))
bname = makePhrase [MU.AW $ MU.Text adj, object1, object2]
tweakBody b = b { bsymbol = if isBlast then bsymbol b else '*'
, bcolor = if isBlast then bcolor b else Color.BrWhite
, bname
, bhp = 1
, bproj = True
, btrajectory = Just (trajectory, speed)
, beqp = EM.singleton iid (1, take 1 it)
, borgan = EM.empty}
bpronoun = "it"
void $ addActorIid iid itemFull
True bfid bpos blid tweakBody bpronoun btime
addActor :: (MonadAtomic m, MonadServer m)
=> GroupName ItemKind -> FactionId -> Point -> LevelId
-> (Actor -> Actor) -> Text -> Time
-> m (Maybe ActorId)
addActor actorGroup bfid pos lid tweakBody bpronoun time = do
-- We bootstrap the actor by first creating the trunk of the actor's body
-- contains the constant properties.
let trunkFreq = [(actorGroup, 1)]
m2 <- rollAndRegisterItem lid trunkFreq (CTrunk bfid lid pos) False Nothing
case m2 of
Nothing -> return Nothing
Just (trunkId, (trunkFull, _)) ->
addActorIid trunkId trunkFull False bfid pos lid tweakBody bpronoun time
addActorIid :: (MonadAtomic m, MonadServer m)
=> ItemId -> ItemFull -> Bool -> FactionId -> Point -> LevelId
-> (Actor -> Actor) -> Text -> Time
-> m (Maybe ActorId)
addActorIid trunkId trunkFull@ItemFull{..} bproj
bfid pos lid tweakBody bpronoun time = do
let trunkKind = case itemDisco of
Just ItemDisco{itemKind} -> itemKind
Nothing -> assert `failure` trunkFull
-- Initial HP and Calm is based only on trunk and ignores organs.
let hp = xM (max 2 $ sumSlotNoFilter IK.EqpSlotAddMaxHP [trunkFull])
`div` 2
calm = xM $ max 1
$ sumSlotNoFilter IK.EqpSlotAddMaxCalm [trunkFull]
-- Create actor.
factionD <- getsState sfactionD
let factMine = factionD EM.! bfid
DebugModeSer{scurDiffSer} <- getsServer sdebugSer
nU <- nUI
-- If difficulty is below standard, HP is added to the UI factions,
-- otherwise HP is added to their enemies.
-- If no UI factions, their role is taken by the escapees (for testing).
let diffBonusCoeff = difficultyCoeff scurDiffSer
hasUIorEscapes Faction{gplayer} =
fhasUI gplayer || nU == 0 && fcanEscape gplayer
boostFact = not bproj
&& if diffBonusCoeff > 0
then hasUIorEscapes factMine
|| any hasUIorEscapes
(filter (`isAllied` bfid) $ EM.elems factionD)
else any hasUIorEscapes
(filter (`isAtWar` bfid) $ EM.elems factionD)
diffHP | boostFact = hp * 2 ^ abs diffBonusCoeff
| otherwise = hp
bonusHP = fromIntegral $ (diffHP - hp) `divUp` oneM
healthOrgans = [(Just bonusHP, ("bonus HP", COrgan)) | bonusHP /= 0]
bsymbol = jsymbol itemBase
bname = IK.iname trunkKind
bcolor = flavourToColor $ jflavour itemBase
b = actorTemplate trunkId bsymbol bname bpronoun bcolor diffHP calm
pos lid time bfid
-- Insert the trunk as the actor's organ.
withTrunk = b {borgan = EM.singleton trunkId (itemK, itemTimer)}
aid <- getsServer sacounter
modifyServer $ \ser -> ser {sacounter = succ aid}
execUpdAtomic $ UpdCreateActor aid (tweakBody withTrunk) [(trunkId, itemBase)]
-- Create, register and insert all initial actor items, including
-- the bonus health organs from difficulty setting.
forM_ (healthOrgans ++ map (Nothing,) (IK.ikit trunkKind))
$ \(mk, (ikText, cstore)) -> do
let container = CActor aid cstore
itemFreq = [(ikText, 1)]
mIidEtc <- rollAndRegisterItem lid itemFreq container False mk
case mIidEtc of
Nothing -> assert `failure` (lid, itemFreq, container, mk)
Just (_, (ItemFull{itemDisco=
Just ItemDisco{itemAE=
Just ItemAspectEffect{jeffects=_:_}}}, _)) ->
return () -- discover by use
Just (iid, (ItemFull{itemBase=itemBase2}, _)) -> do
seed <- getsServer $ (EM.! iid) . sitemSeedD
Level{ldepth} <- getLevel $ jlid itemBase2
execUpdAtomic $ UpdDiscoverSeed container iid seed ldepth
return $ Just aid
-- Server has to pick a random weapon or it could leak item discovery
-- information. In case of non-projectiles, it only picks items
-- with some effects, though, so it leaks properties of completely
-- unidentified items.
pickWeaponServer :: MonadServer m => ActorId -> m (Maybe (ItemId, CStore))
pickWeaponServer source = do
eqpAssocs <- fullAssocsServer source [CEqp]
bodyAssocs <- fullAssocsServer source [COrgan]
actorSk <- actorSkillsServer source
sb <- getsState $ getActorBody source
localTime <- getsState $ getLocalTime (blid sb)
-- For projectiles we need to accept even items without any effect,
-- so that the projectile dissapears and "No effect" feedback is produced.
let allAssocs = eqpAssocs ++ bodyAssocs
calm10 = calmEnough10 sb $ map snd allAssocs
forced = bproj sb
permitted = permittedPrecious calm10 forced
legalPrecious = either (const False) (const True) . permitted
preferredPrecious = either (const False) id . permitted
strongest = strongestMelee True localTime allAssocs
strongestLegal = filter (legalPrecious . snd . snd) strongest
strongestPreferred = filter (preferredPrecious . snd . snd) strongestLegal
best = case strongestPreferred of
_ | bproj sb -> map (1,) eqpAssocs
_ | EM.findWithDefault 0 Ability.AbMelee actorSk <= 0 -> []
_:_ -> strongestPreferred
[] -> strongestLegal
case best of
[] -> return Nothing
iis@((maxS, _) : _) -> do
let maxIis = map snd $ takeWhile ((== maxS) . fst) iis
(iid, _) <- rndToAction $ oneOf maxIis
let cstore = if isJust (lookup iid bodyAssocs) then COrgan else CEqp
return $ Just (iid, cstore)
sumOrganEqpServer :: MonadServer m
=> IK.EqpSlot -> ActorId -> m Int
sumOrganEqpServer eqpSlot aid = do
activeAssocs <- activeItemsServer aid
return $! sumSlotNoFilter eqpSlot activeAssocs
actorSkillsServer :: MonadServer m => ActorId -> m Ability.Skills
actorSkillsServer aid = do
activeItems <- activeItemsServer aid
body <- getsState $ getActorBody aid
fact <- getsState $ (EM.! bfid body) . sfactionD
let mleader = fst <$> gleader fact
getsState $ actorSkills mleader aid activeItems
|
beni55/LambdaHack
|
Game/LambdaHack/Server/CommonServer.hs
|
bsd-3-clause
| 22,445 | 0 | 33 | 5,988 | 6,225 | 3,191 | 3,034 | -1 | -1 |
{-|
Module : Main (file aern2-real-benchOp)
Description : execute a simple CR expression
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
-}
module Main where
import MixedTypesNumPrelude
-- import Prelude
import Text.Printf
import System.Environment
import AERN2.MP
import AERN2.Real
import AERN2.Real.Tests () -- instance Arbitrary CauchyReal
main :: IO ()
main =
do
args <- getArgs
(computationDescription, result) <- processArgs args
putStrLn $ computationDescription
putStrLn $ "result = " ++ show result
processArgs ::
[String] ->
IO (String, MPBall)
processArgs [op, accuracyS] =
return (computationDescription, result)
where
computationDescription =
printf "computing %s using accuracy %d" op ac
ac :: Integer
ac = read accuracyS
result =
case op of
-- "exp" ->
-- map ((? (bitsS ac)) . exp) $
-- unsafePerformIO $ pickValues valuesSmall count
-- "log" ->
-- map ((~!) . (? (bitsS ac)) . log) $
-- unsafePerformIO $ pickValues valuesPositive count
"sqrt2" ->
((~!) . (? (bitsS ac)) . sqrt) 2
-- "cos" ->
-- map ((? (bitsS ac)) . cos) $
-- unsafePerformIO $ pickValues values count
-- "add" ->
-- map ((? (bitsS ac)) . (uncurry (+))) $
-- unsafePerformIO $ pickValues2 values values count
-- "mul" ->
-- map ((? (bitsS ac)) . (uncurry (*))) $
-- unsafePerformIO $ pickValues2 values values count
-- "div" ->
-- map ((~!) . (? (bitsS ac)) . (uncurry (/))) $
-- unsafePerformIO $ pickValues2 values valuesPositive count
-- "logistic" ->
-- map ((? (bitsS ac)) . (logistic 3.82 count)) $
-- [real 0.125]
_ -> error $ "unknown op " ++ op
processArgs _ =
error "expecting arguments: <operation> <precision>"
-- logistic :: Rational -> Integer -> CauchyReal -> CauchyReal
-- logistic c n x
-- | n == 0 = x
-- | otherwise = logistic c (n-1) $ c * x * (1-x)
|
michalkonecny/aern2
|
aern2-net/bench/aern2-real-simpleOp.hs
|
bsd-3-clause
| 2,204 | 0 | 16 | 676 | 260 | 153 | 107 | 30 | 2 |
{-# LANGUAGE OverloadedStrings, TemplateHaskell, DeriveDataTypeable #-}
module Rede.HarFiles.ServedEntry(
sreStatus
,sreHeaders
,sreContents
,sreHost
,resolveFromHar
,createResolveCenter
,servedResources
,allSeenHosts
-- ,createResolveCenterFromFilePath
,resourceHandleToByteString
-- ,hostsFromHarFile
,handleFromMethodAndUrl
-- ,createResolveCenterFromLazyByteString
,resolveCenterFromLazyByteString
,rcName
,rcOriginalUrl
,handlesAtResolveCenter
,ServedEntry (..)
,ResolveCenter
,BadHarFile (..)
,ResourceHandle()
) where
import Control.Exception
import qualified Control.Lens as L
import Control.Lens ( (^.), (&), (.~) )
import Control.Lens.TH (makeLenses)
-- import Data.Char
import Data.Typeable
import Data.Maybe (fromMaybe, isJust, fromJust)
import Data.Aeson (eitherDecode)
import qualified Data.ByteString as B
import qualified Data.ByteString.Base64 as B64
import Network.URI (parseURI, uriAuthority, uriRegName)
import qualified Network.URI as U
import qualified Data.ByteString.Lazy as LB
import Data.ByteString.Char8 (unpack, pack)
-- import Data.Text(Text)
import qualified Data.Map.Strict as M
import qualified Data.Set as S
-- import qualified Data.ByteString.Base64 as B64
import Rede.Utils (lowercaseText, hashFromUrl)
import Rede.MainLoop.Tokens (
UnpackedNameValueList(..)
, getHeader
)
import Rede.HarFiles.JSONDataStructure
-- How do we identify resources that are being
-- asked for? Plain ByteString? Wecan start this way...
-- if query-string reordering is an issue, we can supersede
-- this type somehow...
newtype ResourceHandle = ResourceHandle B.ByteString
deriving (Eq, Show, Ord)
resourceHandleToByteString :: ResourceHandle -> B.ByteString
resourceHandleToByteString (ResourceHandle bs) = bs
-- | Individual entries used on resolution
data ServedEntry = ServedEntry {
-- Status to return to the server
_sreStatus :: !Int
-- Headers to return to the server
,_sreHeaders :: !UnpackedNameValueList
-- And other contents which are part of the response,
-- also to return to the server.
,_sreContents :: !B.ByteString
-- Let's just keep here the host
,_sreHost :: !B.ByteString
}
-- | Everything needed to solve things out
data ResolveCenter = ResolveCenter {
-- Things I'm actually going to serve
_servedResources :: M.Map ResourceHandle ServedEntry
-- A list with all the hosts we have to emulate, without duplicates
,_allSeenHosts :: [B.ByteString]
-- A unique name that can be used to identify this resolve center...
,_rcName :: B.ByteString
-- The original url
,_rcOriginalUrl :: B.ByteString
-- Some other results, like the number of resources that
-- can't be served because they are in the wrong HTTP method
-- or protocol.
}
data BadHarFile = BadHarFile B.ByteString
deriving (Show, Typeable)
instance Exception BadHarFile
makeLenses ''ServedEntry
makeLenses ''ResolveCenter
-- Here: translate a .HAR file to a lookup function. It may happen
-- that this function doesn't find the resource, in that case return
-- Nothing. Another level should decide what response to cook on
-- that particular scenario.
--
-- TODO: Entry filtering made down below made need to be reported,
-- and for that, this interface changed.
resolveFromHar :: ResolveCenter -> ResourceHandle -> Maybe ServedEntry
resolveFromHar resolve_center resource_handle =
M.lookup resource_handle doc_dic
where
doc_dic = _servedResources resolve_center
handlesAtResolveCenter :: ResolveCenter -> [B.ByteString]
handlesAtResolveCenter resolve_center = map
(resourceHandleToByteString . fst)
(M.toList $ resolve_center ^. servedResources )
-- The function that builds a resolve center. This is the only
-- place where such resolve centers can be created. Other functions use this one.
createResolveCenter :: Har_PostResponse -> ResolveCenter
createResolveCenter har_document =
ResolveCenter
(M.fromList resource_pairs) -- <- Creates a dictionary
unduplicated_hosts
hash_piece
first_url
where
har_log = har_document ^. harLogPR
resource_pairs = extractPairs har_log
all_seen_hosts = map (L.view ( L._2 . sreHost) ) resource_pairs
unduplicated_hosts = (S.toList . S.fromList) all_seen_hosts
first_url = har_document ^. originUrl
hash_piece = hashFromUrl first_url
-- createResolveCenterFromFilePath :: B.ByteString -> IO ResolveCenter
-- createResolveCenterFromFilePath filename = do
-- file_contents <- LB.readFile $ unpack filename
-- case (decode file_contents :: Maybe Har_Outer ) of
-- Just doc_model -> return $ createResolveCenter doc_model
-- Nothing -> throw $ BadHarFile filename
-- createResolveCenterFromLazyByteString :: LB.ByteString -> ResolveCenter
-- createResolveCenterFromLazyByteString file_contents = do
-- case (decode file_contents :: Maybe Har_Outer ) of
-- Just doc_model -> createResolveCenter doc_model
-- Nothing -> throw $ BadHarFile $"InputString"
resolveCenterFromLazyByteString :: LB.ByteString -> ResolveCenter
resolveCenterFromLazyByteString file_contents = do
case (eitherDecode file_contents :: Either String Har_PostResponse ) of
Right har_post_response ->
createResolveCenter har_post_response
Left msg -> throw $ BadHarFile $ pack msg
-- Convenience function to extract all the hosts from a .har file.
-- Not very efficient.
-- hostsFromHarFile :: FilePath -> IO [B.ByteString]
-- hostsFromHarFile har_filename = do
-- resolve_center <- createResolveCenterFromFilePath $ pack har_filename
-- return $ resolve_center ^. allSeenHosts
extractPairs :: Har_Log -> [(ResourceHandle, ServedEntry)]
extractPairs har_log =
map fromJust $ filter isJust $ map docFromEntry $ filter entryCanBeServed doc_entries
where
-- Using a lens to fetch the entries from the document.
-- The parenthesis are not needed, except as documentation
doc_entries =har_log ^. entries
-- Right now, we will be filtering out requests which are based on
-- methods other than GET
entryCanBeServed :: Har_Entry -> Bool
entryCanBeServed har_entry = http_method == "GET"
where
http_method = har_entry ^. request.method
docFromEntry :: Har_Entry -> Maybe (ResourceHandle, ServedEntry)
docFromEntry e = do
entry <- servedEntryFromStatusHeadersAndContents
(resp ^. status)
(resp ^. respHeaders . L.to harHeadersToUVL)
content_text
the_url
return (
handleFromMethodAndUrl
(req ^. method)
the_url
, entry
)
where
the_url = (req ^. reqUrl )
req = e ^. request
resp = e ^. response
content_text = fromMaybe "" (resp ^. content . contentText )
-- This not only changes format, it also lower-cases header names.
-- I do this as a way of normalizing them...
harHeadersToUVL :: [Har_Header] -> UnpackedNameValueList
harHeadersToUVL h = UnpackedNameValueList $ map
(\ har_header -> ( lowercaseText (har_header ^. headerName ), (har_header ^. headerValue) )
) h
handleFromMethodAndUrl :: HereString -> HereString -> ResourceHandle
handleFromMethodAndUrl methodx url =
ResourceHandle $ methodx `B.append` schema_neuter_url
where
schema_neuter_url = pack $ show complete_url
Just (U.URI {- scheme -} _ authority u_path u_query u_frag) = U.parseURI $ unpack url
Just (U.URIAuth _ use_host _) = authority
complete_url = U.URI {
U.uriScheme = "snu:"
,U.uriAuthority = Just $ U.URIAuth {
U.uriUserInfo = ""
,U.uriRegName = use_host
,U.uriPort = ""
}
,U.uriPath = u_path
,U.uriQuery = u_query
,U.uriFragment = u_frag
}
servedEntryFromStatusHeadersAndContents :: Int
-> UnpackedNameValueList
-> B.ByteString
-> B.ByteString
-> Maybe ServedEntry
servedEntryFromStatusHeadersAndContents statusx unvl contents the_url = do
uri <- parseURI $ unpack the_url
auth <- uriAuthority uri
let
host_of_url = pack $ uriRegName auth
preliminar = ServedEntry statusx unvl contents host_of_url
return $ heedContentTypeAndDecode preliminar
-- where
-- maybe_uri = parseURI $ unpack the_url
-- Just auth = uriAuthority uri
-- host_of_url = pack $ uriRegName auth
-- preliminar = ServedEntry statusx unvl contents host_of_url
contentsAreBinary :: B.ByteString -> Bool
contentsAreBinary content_type =
content_type
`S.member`
binary_content_types
where
binary_content_types = S.fromList [
"image/png",
"image/jpg"
]
heedContentTypeAndDecode :: ServedEntry -> ServedEntry
heedContentTypeAndDecode served_entry =
served_entry & sreContents .~ decoded_contents
where
decoded_contents = case maybe_content_type of
Just content_type ->
if contentsAreBinary content_type
then
rightly_decoded
else
not_decoded
Nothing -> not_decoded
maybe_content_type = getHeader (served_entry ^. sreHeaders) "content-type"
Right rightly_decoded = B64.decode not_decoded
not_decoded = served_entry ^. sreContents
|
loadimpact/http2-test
|
hs-src/Rede/HarFiles/ServedEntry.hs
|
bsd-3-clause
| 9,875 | 0 | 12 | 2,514 | 1,533 | 871 | 662 | -1 | -1 |
module Main where
import Lib
main :: IO ()
main = parseArgs
|
blischalk/servermon
|
app/Main.hs
|
bsd-3-clause
| 62 | 0 | 6 | 14 | 22 | 13 | 9 | 4 | 1 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
-- | Haskell code generator
module Data.Protobuf.CodeGen (
convert
) where
-- import Control.Arrow
import Data.List
import Data.Protobuf.AST
import Data.Protobuf.DataTree
import Data.Generics.Uniplate.Data
import Language.Haskell.Exts.Syntax as Hask
-- | Convert module into the haskell code to be dumped
convert :: ([Identifier TagType],HsModule) -> Module
convert (qs, msg) =
Module s (modName qs)
[ LanguagePragma s [ Ident "DeriveDataTypeable"
, Ident "NoImplicitPrelude"
, Ident "FlexibleInstances"
, Ident "KindSignatures"
, Ident "StandaloneDeriving"
] ]
Nothing Nothing
-- Imports
( ImportDecl { importLoc = s
, importModule = ModuleName "Data.Protobuf.Imports"
, importQualified = True
, importSrc = False
, importPkg = Nothing
, importAs = Just $ ModuleName "P'"
, importSpecs = Nothing
}
: importList msg
)
-- Code
(convertDecl msg)
----------------------------------------------------------------
-- Generate import list
importList :: HsModule -> [ImportDecl]
importList
= map toImport . nub . concatMap pick . universeBi
where
pick (HsBuiltin _) = []
pick (HsUserMessage (Qualified qs q)) = [qs ++ [q]]
pick (HsUserEnum (Qualified qs q)) = [qs ++ [q]]
-- Create import
toImport qs = ImportDecl { importLoc = s
, importModule = ModuleName $ intercalate "." $ map identifier qs
, importQualified = True
, importSrc = False
, importPkg = Nothing
, importAs = Nothing
, importSpecs = Nothing
}
----------------------------------------------------------------
-- Generate data and instances declarations
convertDecl :: HsModule -> [Decl]
-- [Message]
convertDecl (HsMessage (TyName name) fields) =
[ DataDecl s typeKind
[] (Ident name)
[ UnkindedVar (Ident "r") ]
[ QualConDecl s [] [] $ RecDecl (Ident name) $ map (recordField bang) fields
]
[]
, DerivDecl s [] (qname "Show") [ tycon name `TyApp` qtycon "Checked" ]
, DerivDecl s [] (qname "Show") [ tycon name `TyApp` qtycon "Unchecked" ]
, instance_ "Default" (tycon name `TyApp` qtycon "Unchecked")
[ bind "def" =: foldl App (con name)
[ case defV of
Just v -> lit v
Nothing -> qvar "def"
| HsField _ _ _ defV <- fields ]
]
, instance_ "MessageField" (tycon name `TyApp` qtycon "Unchecked")
[ let ns1 = patNames "x" fields
ns2 = patNames "y" fields
in fun "mergeField" [ (PApp $ UnQual $ Ident name) (map PVar ns1)
, (PApp $ UnQual $ Ident name) (map PVar ns2)
]
=: appF (con name)
[ app [ qvar "mergeField"
, Var (UnQual n1)
, Var (UnQual n2)
]
| (n1, n2) <- zip ns1 ns2
]
]
, instance_ "Monoid" (tycon name `TyApp` qtycon "Unchecked")
[ bind "mempty" =: qvar "def"
, bind "mappend" =: qvar "mergeField"
]
, instance_ "Message" (tycon name)
[ bind "getMessage" =:
let_ [ TypeSig s [Ident "loop"] (qtycon "LoopType" `TyApp` tycon name)
, bind "loop" =:
Lambda s [pvar "v"]
( Do [ pvar "done" <-- qvar "isEmpty"
, Qualifier $ If (var "done")
(app [ qvar "return" , var "v" ])
(Do [ pvar "wt" <-- qvar "get"
, Qualifier $
Case (var "wt") $
concat [ caseField f | f <- fields]
++ [ PWildCard -->
Do [ Qualifier $ app [ qvar "skipUnknownField"
, var "wt" ]
, Qualifier $ app [ var "loop"
, var "v" ]
]
]
]
)
]
)
]
(app [ var "loop"
, qvar "mempty"
] )
, checkReq name fields
, let ns = patNames "x" fields
in fun "putMessage" [PApp (UnQual $ Ident name) (map PVar ns)] =:
Do [ Qualifier e | e <- zipWith putMessage ns fields ]
]
]
where
(typeKind,bang) =
case fields of
[_] -> (NewType, UnBangedTy)
_ -> (DataType, BangedTy )
-- [Enum]
convertDecl (HsEnum (TyName name) fields) =
[ DataDecl s DataType [] (Ident name) []
[ QualConDecl s [] [] (ConDecl (Ident n) []) | (TyName n, _) <- fields ]
[ (qname "Show", []), (qname "Eq", []), (qname "Enum", []), (qname "Bounded", []) ]
, instance_ "PbEnum" (tycon name) $
[ fun "fromPbEnum" [pvar n] =: lit i | (TyName n, i) <- fields ] ++
[ fun "toPbEnum" [plit i] =: con n | (TyName n, i) <- fields ]
, instance_ "Ord" (tycon name)
[ bind "compare" =: app [ qvar "comparing"
, qvar "fromPbEnum" ]
]
, instance_ "Default" (tycon name)
[ bind "def" =: con (case head fields of { (TyName n,_) -> n })
]
, instance_ "MessageField" (tycon name) []
]
----------------------------------------------------------------
-- Generate checkReq function
checkReq :: String -> [HsField] -> Decl
checkReq name fields =
fun "checkReq" [PApp (UnQual (Ident name)) (map PVar ns)] =:
foldl (\e1 e2 -> app [ qvar "ap", e1, e2])
(app [ qvar "return", var name ])
(zipWith check ns fields)
where
ns = patNames "x" fields
-- Repeated
check n (HsField (HsSeq (HsBuiltin _) _) _ _ _)
= app [ qvar "return", Var $ UnQual n ]
check n (HsField (HsSeq (HsUserEnum _) _) _ _ _)
= app [ qvar "return", Var $ UnQual n ]
check n (HsField (HsSeq (HsUserMessage _) _) _ _ _)
= app [ qvar "mapM", qvar "checkReq", Var $ UnQual n ]
-- Optional
check n (HsField (HsMaybe (HsBuiltin _)) _ _ (Just o))
= app [ qvar "checkMaybe", lit o, Var (UnQual n)]
check n (HsField (HsMaybe (HsUserEnum _)) _ _ (Just o))
= app [ qvar "checkMaybe", lit o, Var (UnQual n)]
check n (HsField (HsMaybe (HsBuiltin _)) _ _ _)
= app [ qvar "return", Var $ UnQual n ]
check n (HsField (HsMaybe (HsUserEnum _)) _ _ _)
= app [ qvar "return", Var $ UnQual n ]
check n (HsField (HsMaybe _) _ _ _)
= app [ qvar "checkMaybeMsg"
, Var (UnQual n)]
-- Required
check n (HsField (HsReq (HsBuiltin _)) _ _ _)
= app [ qvar "checkRequired" , Var (UnQual n) ]
check n (HsField (HsReq (HsUserEnum _)) _ _ _)
= app [ qvar "checkRequired" , Var (UnQual n) ]
check n (HsField (HsReq (HsUserMessage _)) _ _ _)
= app [ qvar "checkRequiredMsg" , Var (UnQual n) ]
----------------------------------------------------------------
-- Single fields of record for message
recordField :: (Hask.Type -> BangType) -> HsField -> ([Name], BangType)
recordField bang (HsField tp name _ _) =
([Ident name], outerType tp)
where
outerType (HsReq t ) = bang $ qtycon "Val" `TyApp` TyVar (Ident "r") `TyApp` innerType t
outerType (HsMaybe t ) = bang $ qtycon "Maybe" `TyApp` innerType t
outerType (HsSeq t _) = bang $ qtycon "Seq" `TyApp` innerType t
innerType (HsBuiltin t) = primType t
innerType (HsUserMessage q) = userType q
innerType (HsUserEnum q) = enumType q
userType (Qualified qs n) =
(TyCon $ Qual (modName (qs++[n])) (Ident $ identifier n)) `TyApp` TyVar (Ident "r")
enumType (Qualified qs n) =
(TyCon $ Qual (modName (qs++[n])) (Ident $ identifier n))
-- Builtin types
primType PbDouble = qtycon "Double"
primType PbFloat = qtycon "Float"
primType PbInt32 = qtycon "Int32"
primType PbInt64 = qtycon "Int64"
primType PbUInt32 = qtycon "Word32"
primType PbUInt64 = qtycon "Word64"
primType PbSInt32 = qtycon "Int32"
primType PbSInt64 = qtycon "Int64"
primType PbFixed32 = qtycon "Word32"
primType PbFixed64 = qtycon "Word64"
primType PbSFixed32 = qtycon "Int32"
primType PbSFixed64 = qtycon "Int64"
primType PbBool = qtycon "Bool"
primType PbString = qtycon "String"
primType PbBytes = qtycon "ByteString"
----------------------------------------------------------------
-- Alternatives for case expression in decoder
caseField :: HsField -> [Alt]
caseField (HsField ty name (FieldTag tag) _) =
-- We have found tag
[ (PApp (qname "WireTag") [plit tag, plit (typeTag ty)]) -->
Do [ pvar "f" <-- getter
, Qualifier $ app [ var "loop"
, RecUpdate (var "v") [
-- FIXME: record fields could be shadowed
-- here. It should be replaced with
-- lambda exression
FieldUpdate (UnQual $ Ident name)
(app [ qvar "mergeField"
, app [ var name
, var "v"
]
, var "f"
]
)
]
]
]
-- Oops! wrong field type
, PApp (qname "WireTag") [plit tag, PWildCard] -->
app [ qvar "fail"
, lit "Invalid type tag encountered!"
]
]
where
-- Getters
getter = case ty of
HsReq t -> qvar "Present" .<$>. getField t
HsMaybe t -> qvar "Just" .<$>. getField t
HsSeq t True -> getPacked t
HsSeq t _ -> qvar "singleton" .<$>. getField t
-- Get packed fields
getPacked (HsBuiltin t) = app [ qvar "getPacked"
, getPrim t
]
getPacked _ = error "Impossible happened. Invalid packed option"
-- Get field
getField (HsUserMessage _) = qvar "getDelimMessage"
getField (HsUserEnum _) = qvar "getPbEnum"
getField (HsBuiltin t) = getPrim t
-- Getter for built-in types
getPrim t = case t of
PbDouble -> qvar "getFloat64le"
PbFloat -> qvar "getFloat32le"
PbInt32 -> qvar "getVarInt32"
PbInt64 -> qvar "getVarInt64"
PbUInt32 -> qvar "getVarWord32"
PbUInt64 -> qvar "getVarWord64"
PbSInt32 -> qvar "getZigzag32"
PbSInt64 -> qvar "getZigzag64"
PbFixed32 -> qvar "getWord32le"
PbFixed64 -> qvar "getWord64le"
PbSFixed32 -> qvar "fromIntegral" .<$>. qvar "getWord32le"
PbSFixed64 -> qvar "fromIntegral" .<$>. qvar "getWord64le"
PbBool -> qvar "getVarBool"
PbString -> qvar "getPbString"
PbBytes -> qvar "getPbBytestring"
----------------------------------------------------------------
-- Encode message field
putMessage :: Name -> HsField -> Exp
putMessage nm (HsField ty _ (FieldTag tag) _) =
putField ty
where
-- Encode field
putField (HsReq t) = app [ innerPut (putSingle t)
, Var $ UnQual nm
]
putField (HsMaybe t) = app [ qvar "putOptional"
, innerPut (putSingle t)
, Var $ UnQual nm
]
putField (HsSeq t False) = app [ qvar "mapM_"
, innerPut (putSingle t)
, Var $ UnQual nm
]
putField (HsSeq (HsBuiltin t) True ) = app
[ innerPut $ app [ qvar "putPacked"
, putPrim t
]
, Var $ UnQual nm
]
putField _ = error "Impossible happened: invalid packed field"
--
innerPut expr = app
[ qvar "putWithWireTag"
, lit tag
, lit (typeTag ty)
, expr
]
--
putSingle (HsBuiltin t) = putPrim t
putSingle (HsUserEnum _) = qvar "putPbEnum"
putSingle (HsUserMessage _) = qvar "putMessage"
-- Put primitive type
putPrim t = case t of
PbDouble -> qvar "putFloat64le"
PbFloat -> qvar "putFloat32le"
PbInt32 -> qvar "putVarInt32"
PbInt64 -> qvar "putVarInt64"
PbUInt32 -> qvar "putVarWord32"
PbUInt64 -> qvar "putVarWord64"
PbSInt32 -> qvar "putZigzag32"
PbSInt64 -> qvar "putZigzag64"
PbFixed32 -> qvar "putWord32le"
PbFixed64 -> qvar "putWord64le"
PbSFixed32 -> qvar "fromIntegral" .<$>. qvar "putWord32le"
PbSFixed64 -> qvar "fromIntegral" .<$>. qvar "putWord64le"
PbBool -> qvar "putVarBool"
PbString -> qvar "putPbString"
PbBytes -> qvar "putPbBytestring"
----------------------------------------------------------------
-- Type tags for integra l
typeTag :: HsType -> Integer
typeTag ty = case ty of
HsReq t -> innerTag t
HsMaybe t -> innerTag t
HsSeq _ True -> lenDelim
HsSeq t _ -> innerTag t
where
innerTag (HsUserMessage _) = lenDelim
innerTag (HsUserEnum _) = varint
innerTag (HsBuiltin t) = case t of
PbDouble -> fixed64
PbFloat -> fixed32
PbInt32 -> varint
PbInt64 -> varint
PbUInt32 -> varint
PbUInt64 -> varint
PbSInt32 -> varint
PbSInt64 -> varint
PbFixed32 -> fixed32
PbFixed64 -> fixed64
PbSFixed32 -> fixed32
PbSFixed64 -> fixed64
PbBool -> varint
PbString -> lenDelim
PbBytes -> lenDelim
varint, fixed32, fixed64, lenDelim :: Integer
varint = 0
fixed64 = 1
lenDelim = 2
fixed32 = 5
----------------------------------------------------------------
-- Helpers
----------------------------------------------------------------
-- Get a module name
modName :: [Identifier TagType] -> ModuleName
modName = ModuleName . intercalate "." . map identifier
-- Null location
s :: SrcLoc
s = SrcLoc "" 0 0
-- Get qualified name
qname :: String -> QName
qname = Qual (ModuleName "P'") . Ident
-- Shorhands for variables, constructors and type constructors
var,qvar,con,qcon :: String -> Exp
var = Var . UnQual . Ident
qvar = Var . qname
con = Con . UnQual . Ident
qcon = Con . qname
tycon, qtycon :: String -> Hask.Type
tycon = TyCon . UnQual . Ident
qtycon = TyCon . qname
pvar :: String -> Pat
pvar = PVar . Ident
app :: [Exp] -> Exp
app = foldl1 App
appF :: Exp -> [Exp] -> Exp
appF = foldl App
(.<$>.) :: Exp -> Exp -> Exp
f .<$>. g = app [ qvar "fmap" , f , g ]
instance_ :: String -> Hask.Type -> [Decl] -> Decl
instance_ cl ty decls =
InstDecl s [] (qname cl) [ty] $ map InsDecl decls
fun :: String -> [Pat] -> (String,[Pat])
fun = (,)
bind :: String -> (String,[Pat])
bind = flip fun []
-- Let binding
let_ :: [Decl] -> Exp -> Exp
let_ xs e = Let (BDecls xs) e
-- Shorthand for bind in do block
(<--) :: Pat -> Exp -> Stmt
p <-- e = Generator s p e
-- Shorthand for clause in case alternative
(-->) :: Pat -> Exp -> Alt
p --> e = Alt s p (UnGuardedAlt e) (BDecls [])
(=:) :: (String, [Pat]) -> Exp -> Decl
(name,pats) =: expr = FunBind [ Match s (Ident name) pats Nothing (UnGuardedRhs expr) (BDecls []) ]
-- list of variable names for patters
patNames :: String -> [a] -> [Name]
patNames pref xs = [ Ident $ pref ++ show i | (i,_) <- zip [1::Integer .. ] xs ]
class LiteralVal l where
lit :: l -> Exp
plit :: l -> Pat
instance LiteralVal Integer where
lit = Lit . Int
plit = PLit . Int
instance LiteralVal String where
lit = Lit . String
plit = PLit . String
instance LiteralVal Bool where
lit True = Con $ qname "True"
lit False = Con $ qname "False"
plit = error "UNIMPLEMENTED"
instance LiteralVal Rational where
lit = Lit . Frac
plit = PLit . Frac
instance LiteralVal OptionVal where
lit (OptString a) = lit a
lit (OptBool b) = lit b
lit (OptInt i) = lit i
lit (OptReal r) = lit r
plit = error "UNIMPLEMENTED"
|
Shimuuar/protobuf
|
protobuf-grammar/Data/Protobuf/CodeGen.hs
|
bsd-3-clause
| 16,933 | 0 | 29 | 6,144 | 5,200 | 2,650 | 2,550 | 337 | 21 |
{-# LANGUAGE OverloadedStrings #-}
module Jira2Sheet.GoogleDrive where
import Control.Applicative (empty)
import Control.Lens ((&), (.~), (?~), (^.))
import Data.Aeson (FromJSON (..), Value (..), (.:))
import Data.Aeson.Types (typeMismatch)
import qualified Data.ByteString.Lazy as LBS
import Data.Text (Text)
import Network.OAuth.OAuth2 (AccessToken (..), OAuth2 (..))
import Network.Wreq (auth, defaults, header, param,
responseBody)
import qualified Network.Wreq as Wreq
import Jira2Sheet.Types.HTTP (MonadHTTP (..))
data DriveFileMetadata = DriveFileMetadata {
fileId :: Text
} deriving (Show, Eq)
instance FromJSON DriveFileMetadata where
parseJSON (Object v) = DriveFileMetadata <$> v .: "id"
parseJSON invalid = typeMismatch "DriveFileMetadata" invalid
oauth = OAuth2 { oauthClientId = "804038769221-786vn5l5m772h21masc5p4nm3gl995as.apps.googleusercontent.com"
, oauthClientSecret = "0JuYNS6p7ibK8jA38_rJBkWO"
, oauthCallback = Just "urn:ietf:wg:oauth:2.0:oob"
, oauthOAuthorizeEndpoint = "https://accounts.google.com/o/oauth2/auth"
, oauthAccessTokenEndpoint = "https://www.googleapis.com/oauth2/v4/token" }
uploadCsv :: (MonadHTTP m) => AccessToken -> LBS.ByteString -> m DriveFileMetadata
uploadCsv tkn content = do
let opts = defaults & auth ?~ Wreq.oauth2Bearer (accessToken tkn)
& header "Content-Type" .~ ["text/csv"]
& param "uploadType" .~ ["media"]
response <- postWith opts "https://www.googleapis.com/upload/drive/v3/files" content
pure $ response ^. responseBody
|
berdario/jira2sheet
|
src/Jira2Sheet/GoogleDrive.hs
|
bsd-3-clause
| 1,806 | 0 | 17 | 482 | 394 | 232 | 162 | 31 | 1 |
import Lichen.Count.Config
import Lichen.Count.Main
main :: IO ()
main = realMain defaultConfig
|
Submitty/AnalysisTools
|
app/count/Main.hs
|
bsd-3-clause
| 97 | 0 | 6 | 13 | 31 | 17 | 14 | 4 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Mafia.Cabal.Dependencies
( findDependenciesForCurrentDirectory
, findDependenciesForPackage
, filterPackages
, flagArg
-- exported for testing
, parsePackagePlan
, renderPackagePlan
) where
import Control.Monad.Trans.Bifunctor (firstT)
import Control.Monad.Trans.Either (EitherT, hoistEither, runEitherT, left)
import Data.Attoparsec.Text (Parser)
import qualified Data.Attoparsec.Text as A
import qualified Data.Graph as Graph
import qualified Data.List as List
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.String (String)
import qualified Data.Text as T
import Mafia.Cabal.Constraint
import Mafia.Cabal.Index
import Mafia.Cabal.Package
import Mafia.Cabal.Process (cabalFrom)
import Mafia.Cabal.Types
import Mafia.Cabal.Version
import Mafia.Ghc
import Mafia.IO
import Mafia.P
import Mafia.Package
import Mafia.Path
import Mafia.Process
import System.IO (IO)
------------------------------------------------------------------------
filterPackages :: PackageName -> Set Package -> Set Package
filterPackages name pkgs =
Set.fromDistinctAscList . mapMaybe (filterPackage name) $ Set.toAscList pkgs
filterPackage :: PackageName -> Package -> Maybe Package
filterPackage name = \case
Package ref deps hash
| name == pkgName (refId ref) ->
Just (Package ref Set.empty hash)
| deps' <- filterPackages name deps
, not (Set.null deps') ->
Just (Package ref deps' hash)
| otherwise ->
Nothing
------------------------------------------------------------------------
findDependenciesForCurrentDirectory :: [Flag] -> [SourcePackage] -> [Constraint] -> EitherT CabalError IO Package
findDependenciesForCurrentDirectory flags spkgs constraints = do
hoistEither . fromInstallPlan spkgs =<< installPlanForCurrentDirectory flags spkgs constraints
findDependenciesForPackage :: PackageName -> [Constraint] -> EitherT CabalError IO Package
findDependenciesForPackage name constraints = do
hoistEither . fromInstallPlan [] =<< installPlanForPackage name constraints
fromInstallPlan :: [SourcePackage] -> [PackagePlan] -> Either CabalError Package
fromInstallPlan spkgs rdeps =
let rdMap =
mapFromList (refId . ppRef) rdeps
spCombine s r =
r { ppRef = (ppRef r) { refSrcPkg = Just s } }
spMap =
mapFromList spPackageId spkgs
combinedRevDeps =
Map.intersectionWith spCombine spMap rdMap `Map.union` rdMap
(graph, fromVertex0) =
Graph.graphFromEdges' (fmap toGraphKey (Map.elems combinedRevDeps))
fromVertex =
fromGraphKey . fromVertex0
packageRefs =
Map.fromList .
fmap (\ref -> (refId ref, ref)) .
fmap fromVertex $
Graph.topSort graph
dependencies =
reifyPackageRefs $
Map.unionsWith Set.union $
fmap (\(k,v) -> Map.fromList [(k, Set.singleton v), (v, Set.empty)]) $
fmap (bimap fromVertex fromVertex) $
Graph.edges $
Graph.transposeG $
graph
lookupRef ref =
fromMaybe (mkPackage ref Set.empty) (Map.lookup ref dependencies)
topLevels =
fmap (refId . ppRef) $
filter (null . ppDeps) rdeps
in
case topLevels of
[] ->
Left CabalNoTopLevelPackage
[topLevel] ->
case fmap lookupRef (Map.lookup topLevel packageRefs) of
Nothing ->
Left (CabalTopLevelPackageNotFoundInPlan topLevel)
Just pkg ->
Right pkg
xs ->
Left (CabalMultipleTopLevelPackages xs)
reifyPackageRefs :: Map PackageRef (Set PackageRef) -> Map PackageRef Package
reifyPackageRefs refs =
let pkgs =
Map.mapWithKey lookup refs
lookup ref deps =
mkPackage ref (Set.fromList . mapMaybe (\d -> Map.lookup d pkgs) $ Set.toList deps)
in pkgs
mapFromList :: Ord k => (v -> k) -> [v] -> Map k v
mapFromList f xs = Map.fromList (List.zip (fmap f xs) xs)
toGraphKey :: PackagePlan -> (PackagePlan, PackageId, [PackageId])
toGraphKey pp = (pp, refId (ppRef pp), ppDeps pp)
fromGraphKey :: (PackagePlan, PackageId, [PackageId]) -> PackageRef
fromGraphKey (pp, _, _) = ppRef pp
installPlanForCurrentDirectory :: [Flag] -> [SourcePackage] -> [Constraint] -> EitherT CabalError IO [PackagePlan]
installPlanForCurrentDirectory flags spkgs constraints0 = do
let
-- Make sure we can only install the source package by pinning its version
-- explicitly. This makes cabal fail if the .cabal file would have caused
-- the hackage version to be installed instead.
constraints =
constraintArgs $ constraints0 <> fmap sourcePackageConstraint spkgs
flagArgs =
fmap flagArg flags
args =
[ "--enable-tests"
, "--enable-benchmarks"
, "--enable-profiling" ]
dir <- getCurrentDirectory
makeInstallPlan (Just dir) (fmap spDirectory spkgs) (args <> constraints <> flagArgs)
installPlanForPackage :: PackageName -> [Constraint] -> EitherT CabalError IO [PackagePlan]
installPlanForPackage name constraints =
makeInstallPlan Nothing [] $ [unPackageName name] <> constraintArgs constraints
makeInstallPlan :: Maybe Directory -> [Directory] -> [Argument] -> EitherT CabalError IO [PackagePlan]
makeInstallPlan mdir sourcePkgs installArgs = do
(_ :: GhcVersion) <- firstT CabalGhcError getGhcVersion -- check ghc is on the path
checkCabalVersion
withSystemTempDirectory "mafia-deps-" $ \tmp -> do
let
dir = fromMaybe tmp mdir
cabal = cabalFrom dir (tmp </> "sandbox.config") []
Hush <- cabal "sandbox" ["init", "--sandbox", tmp]
-- this is a fast 'cabal sandbox add-source'
createIndexFile sourcePkgs tmp
let
installDryRun args =
cabal "install" $
[ "--reorder-goals"
, "--max-backjumps=-1"
, "--avoid-reinstalls"
, "--dry-run" ] <> installArgs <> args
result <- liftIO . runEitherT $ installDryRun ["-v2"]
case result of
Right (OutErr out _) -> do
plan <- hoistEither (parseInstallPlan out)
case mapMaybe takeReinstall plan of
[] -> return plan
xs -> left (CabalReinstallsDetected xs)
Left _ -> do
-- this will fail with the standard cabal dependency error message
Pass <- installDryRun []
-- this should never happen
left CabalInstallIsNotReferentiallyTransparent
flagArg :: Flag -> Argument
flagArg = \case
FlagOff f ->
"--flags=-" <> f
FlagOn f ->
"--flags=" <> f
takeReinstall :: PackagePlan -> Maybe PackagePlan
takeReinstall p =
case p of
PackagePlan _ _ _ (Reinstall _) -> Just p
PackagePlan _ _ _ _ -> Nothing
------------------------------------------------------------------------
parseInstallPlan :: Text -> Either CabalError [PackagePlan]
parseInstallPlan =
first (CabalInstallPlanParseError . T.pack) .
A.parseOnly pPackagePlans
parsePackagePlan :: Text -> Either String PackagePlan
parsePackagePlan txt =
let go err = "Invalid package plan: '" <> T.unpack txt <> "'\nExpected: " <> err
in first go (A.parseOnly pPackagePlan txt)
pPackagePlans :: Parser [PackagePlan]
pPackagePlans = do
pDropLines "In order, the following would be installed:"
A.manyTill (pPackagePlan <* A.char '\n') (A.takeWhile (== '\n') *> A.endOfInput)
-- Drop all lines up to and including the provided target line.
pDropLines :: Text -> Parser ()
pDropLines target =
let go = do
l <- A.takeWhile (/= '\n') <* A.char '\n'
unless (l == target) go
in go
pPackagePlan :: Parser PackagePlan
pPackagePlan = do
pid <- pPackageId
latest <- optional pLatest
flags <- many pFlag
_ <- many pStanza
deps <- fromMaybe [] <$> optional pVia
status <- pSpaceSep *> (pNewPackage <|> pNewVersion <|> pReinstall)
pure (PackagePlan (PackageRef pid flags Nothing) latest deps status)
pSpaceSep :: Parser Char
pSpaceSep = A.char '\n' <|> A.char ' '
pFlag :: Parser Flag
pFlag =
let flag p = A.char ' ' *> A.char p *> A.takeTill (== ' ')
in FlagOff <$> flag '-' <|>
FlagOn <$> flag '+'
pStanza :: Parser ()
pStanza =
A.string " *test" *> pure () <|>
A.string " *bench" *> pure ()
pLatest :: Parser Version
pLatest =
A.string " (latest:" *> pSpaceSep *> pVersion (== ')') <* A.char ')'
pVia :: Parser [PackageId]
pVia =
pSpaceSep *> A.string "(via:" *> pSpaceSep *> pPackageId `A.sepBy1` pSpaceSep <* A.char ')'
pNewPackage :: Parser PackageStatus
pNewPackage =
A.string "(new" *> pSpaceSep *> A.string "package)" *> pure NewPackage
pNewVersion :: Parser PackageStatus
pNewVersion =
A.string "(new" *> pSpaceSep *> A.string "version)" *> pure NewVersion
pReinstall :: Parser PackageStatus
pReinstall = do
_ <- A.string "(reinstall)" *> pSpaceSep *> A.string "(changes:" *> pSpaceSep
cs <- pPackageChange `A.sepBy1` (A.string "," *> pSpaceSep)
_ <- A.char ')'
pure (Reinstall cs)
pPackageChange :: Parser PackageChange
pPackageChange =
let pkg = pPackageId
arr = A.string " -> "
ver = pVersion (\x -> x == ',' || x == ')')
in PackageChange <$> pkg <*> (arr *> ver)
-- TODO would be good if parsePackageId/parseVersion were attoparsec parsers
-- TODO instead of `Text -> Maybe a` so we didn't need these two clunky
-- TODO wrappers below:
pPackageId :: Parser PackageId
pPackageId = do
let
isPkgIdChar c
| c >= 'a' && c <= 'z' = True
| c >= 'A' && c <= 'Z' = True
| c >= '0' && c <= '9' = True
| c == '-' = True
| c == '.' = True
| otherwise = False
txt <- A.takeWhile1 isPkgIdChar
case parsePackageId txt of
Nothing -> fail ("not a package-id: " <> T.unpack txt)
Just pid -> pure pid
pVersion :: (Char -> Bool) -> Parser Version
pVersion p = do
txt <- A.takeTill p
case parseVersion txt of
Nothing -> fail ("not a version number: " <> T.unpack txt)
Just ver -> pure ver
|
ambiata/mafia
|
src/Mafia/Cabal/Dependencies.hs
|
bsd-3-clause
| 10,338 | 0 | 21 | 2,427 | 2,995 | 1,515 | 1,480 | 249 | 4 |
module Data.Aeson.Serialize.Internal
(
) where
|
plow-technologies/aeson-serialize
|
src/Data/Aeson/Serialize/Internal.hs
|
bsd-3-clause
| 55 | 0 | 3 | 13 | 11 | 8 | 3 | 2 | 0 |
-- | The top level module.
module Graphene
( module Graphene.Types
) where
import Graphene.Types
|
piyush-kurur/graphene
|
Graphene.hs
|
bsd-3-clause
| 112 | 0 | 5 | 30 | 18 | 12 | 6 | 3 | 0 |
module Main where
import Prelude
import Data.Monoid
import Data.String
import Text.Read
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Builder as TLB
import qualified Data.Text.Lazy.IO as TLIO
import qualified Data.List as L
import qualified Data.Map as M
import qualified Data.Scientific as Sc
import Data.Functor.Identity
import Control.Monad.Writer
import Control.Monad
import Control.Arrow (first)
import qualified Options.Applicative as O
import System.Process (readProcess)
import qualified Data.Text.Format as F
-- * Command line arguments
data Config = Config
-- histogram
{ buckets :: Integer
, scale :: Either Integer Integer -- Either force $(tput lines)
-- labels
, hideBuckets :: Bool
, hideCounts :: Bool
, input :: String
} deriving (Show)
argp :: Config -> O.ParserInfo Config
argp def = O.info (O.helper <*> optParser) optProgDesc
where
optProgDesc = O.fullDesc
<> O.header "ho"
<> O.progDesc "Make simple histograms on the terminal from numeric input"
optParser = pure Config
<*> optAuto 'b' "buckets" "number of buckets" (O.value (buckets def))
<*> optAuto 's' "scale" "scale counts to some integer" (O.value (scale def))
<*> optSw 'r' "hide-buckets" "hide buckets" mempty
<*> optSw 'c' "hide-counts" "hide counts" mempty
<*> O.argument O.str (O.metavar "FILE")
main :: IO ()
main = do
Config b s r c f <- getDefaults
file <- if f == "-" then getContents else readFile f
let (freqs :: Counts Sc.Scientific, errors) = readFreqs . lines $ file
TLIO.putStr . sh r c s $ toBuckets b $ freqs
let realErrors = filter (not . all (== ' ')) errors
when (length realErrors > 0) $ putStrLn $ "Couldn't parse these lines: " <> show realErrors
getDefaults :: IO Config
getDefaults = do
cols <- read' "cols"
lines <- read' "lines"
let def = Config (lines - 1) (Right cols) False False "-"
args <- O.execParser $ argp def
print args
return args
where
read' what = read <$> readProcess "tput" [what] ""
-- * Histogram
type Counts a = M.Map a Integer
data Buckets
= Interval [((Rational, Rational), Integer)]
| Discrete [(Rational, Integer)]
deriving Show
readFreqs :: (Read a, Ord a) => [String] -> (Counts a, [String])
readFreqs ts = runWriter $ foldM f M.empty ts
where
f m t = case readMaybe t of
Nothing -> tell [t] *> pure m
Just n -> pure $ M.insertWith (+) n 1 m
toBuckets :: (Fractional a, Ord a, Show a, Real a) => Integer -> Counts a -> Buckets
toBuckets n rm = if toInteger (M.size rm) <= n
then Discrete li
else let
buckets = mkIntervals n (map fst li)
in Interval $ f buckets li
where
li = map (first toRational) $ M.toAscList rm
mkIntervals n li = let
min = toRational $ minimum li
max = toRational $ maximum li
step = (max - min) / fromIntegral n
points = takeWhile (<= max) $ iterate (+step) min
intervals = points `zip` tail points
in takeWhile ((<= max) . snd) intervals
f :: (Ord a, Num b) => [(a, a)] -> [(a, b)] -> [((a, a), b)]
f (bu@ (_, b) : xs) li = let
(cur, rest) = L.partition ((<= b) . fst) li
in (bu, sum (map snd cur)) : f xs rest
f [] [] = []
f a b = error "This should never happen"
-- * Show
sh :: (Real a, RealFrac a) => Bool -> Bool -> Either Integer Integer -> Buckets -> TL.Text
sh hideBuckets hideCounts scaleC b = TL.unlines $ case b of
Interval bm -> let
(amax, bmax) = intervalLabels bm
(aformat, amaxPadded) = mkFormat amax
(bformat, bmaxPadded) = mkFormat bmax
intervalLabel (a, b) = when showBuckets $ tell (TLB.toLazyText $ aformat a <> sep1 <> bformat b <> sep2)
bucketLabelLength = toInteger $ (amaxPadded + bmaxPadded) + tlen sep1 + tlen sep2
in map (row intervalLabel (scale bucketLabelLength)) bm
Discrete bm -> let
amax = discreteLabels bm
(aformat, amaxPadded) = mkFormat amax
discreteLabel a = when showBuckets $ tell (TLB.toLazyText $ aformat a <> sep2)
bucketLabelLength = toInteger $ amaxPadded + tlen sep2
in map (row discreteLabel (scale bucketLabelLength)) bm
where
showBuckets = not hideBuckets
showCounts = not hideCounts
sep1 = " - "
sep2 = " "
sep3 = " "
countBar scale c = TL.replicate (fromIntegral (scale c)) "*"
countNum n c = TLB.toLazyText $ F.left n ' ' c <> sep3
maxCount = maximum $ case b of Interval bm -> map snd bm; Discrete bm -> map snd bm
countLength = length $ show maxCount
countLabelLength = toInteger (countLength + tlen sep3)
row :: forall b. (b -> W ()) -> (Integer -> Integer) -> (b, Integer) -> TL.Text
row mkLabel scale (bucket, c) = execWriter $ do
mkLabel bucket
when showCounts $ tell (countNum countLength c)
tell $ countBar scale c
scale :: Integer -> Integer -> Integer
scale bucketLabelLength = case scaleC of
Left scaleTo -> let frac = fromInteger scaleTo / fromInteger maxCount
in \cur -> floor (frac * fromInteger cur)
Right termWidth -> let
deduction = (if showBuckets then bucketLabelLength else 0) + (if showCounts then countLabelLength else 0)
scaleTo = termWidth - deduction
in if maxCount < scaleTo
then id
else let
frac = fromInteger scaleTo / fromInteger maxCount
in \cur -> floor (frac * fromInteger cur)
type W = WriterT TL.Text Data.Functor.Identity.Identity
tlen = fromIntegral . TL.length
intervalLabels bm = let
(as, bs) = unzip $ map fst bm
in (intSize as, intSize bs)
discreteLabels bm = intSize (map fst bm)
mkFormat n = let p = n + 3 in (F.left p ' ' . F.prec n, p)
intSize = length . show . maximum . map ceiling
-- * Helpers
optSw = maker O.switch
-- optStr = maker O.strOption
optAuto = maker (O.option O.auto)
maker f short long help more = f
$ O.short short
<> O.long long
<> O.help help
<> more
|
eyeinsky/ho
|
Main.hs
|
bsd-3-clause
| 5,903 | 0 | 20 | 1,420 | 2,282 | 1,179 | 1,103 | -1 | -1 |
module Arhelk.Armenian.Lemma.Data.Common where
import TextShow
import Data.Text as T
-- | Describes possible part of speach in Russian
data SpeachPart =
Substantive -- ^ Существительное
| Adjective -- ^ Прилагательное
| Numeral -- ^ Числительное
| Pronoun -- ^ Местоимение
| Verb -- ^ Глагол
| Adverb -- ^ Наречие
| Preposition -- ^ Предлог
| Conjunction -- ^ Союз
| GrammarParticle -- ^ Частица
| Interjection -- ^ Междуметие
| Participle -- ^ Причастие
| Transgressive -- ^ Деепричастие
deriving (Eq, Ord, Enum, Show, Bounded)
instance TextShow SpeachPart where
showb p = case p of
Substantive -> "сущ."
Adjective -> "прил."
Numeral -> "числ."
Pronoun -> "мест."
Verb -> "гл."
Adverb -> "нар."
Preposition -> "предл."
Conjunction -> "союз"
GrammarParticle -> "част."
Interjection -> "межд."
Participle -> "прич."
Transgressive -> "деепр."
-- | Множественное или единственное число
data GrammarQuantity =
GrammarSingle -- ^ Единственное
| GrammarMultiple -- ^ Множественное
deriving (Eq, Ord, Enum, Show, Bounded)
instance TextShow GrammarQuantity where
showb v = case v of
GrammarSingle -> "ед. число"
GrammarMultiple -> "мн. число"
-- | Лицо
data GrammarPerson =
FirstPerson -- ^ Первое лицо
| SecondPerson -- ^ Второе лицо
| ThirdPerson -- ^ Третье лицо
deriving (Eq, Ord, Enum, Show, Bounded)
instance TextShow GrammarPerson where
showb v = case v of
FirstPerson -> "1 лицо"
SecondPerson -> "2 лицо"
ThirdPerson -> "3 лицо"
-- | Падеж. Grammatical case.
data Հոլով =
Ուղղական -- ^ Иминительный
| Սերական -- ^ Родительный
| Տրական -- ^ Дательный
| Հայցական -- ^ Винительный
| Բացարական -- ^ Исходный
| Գործիական -- ^ Творительный
| Ներգոյական -- ^ Местный
deriving (Eq, Ord, Enum, Show, Bounded)
instance TextShow Հոլով where
showb v = case v of
Ուղղական -> "им. падеж"
Սերական -> "род. падеж"
Տրական -> "дат. падеж"
Հայցական -> "вин. падеж"
Բացարական -> "исходн. падеж"
Գործիական -> "твор. падеж"
Ներգոյական -> "местн. падеж"
holovs::Հոլով -> [Text]
holovs Սերական = ["ի", "վի", "ան", "ոջ", "ու", "ուն"]
holovs Տրական = ["ի", "վի", "ու", "ան", "ոջ", "ու"]
holovs Հայցական = ["ի", "ու", "ան", "ոջ"]
holovs Բացարական = ["ից", "ից", "ու", "ուց"]
holovs Գործիական = ["ով", "ով", "ու", "ով", "ով"]
holovs Ներգոյական = ["ան","ում", "ում"]
holovs Ուղղական = ["", "ու", "ուն"]
++ ["ի", "վի", "ան", "ոջ", "ու", "ուն"]
++ ["ի", "վի", "ու", "ան", "ոջ", "ու"]
++ ["ի", "ու", "ան", "ոջ"]
++ ["ից", "ից", "ու", "ուց"]
++ ["ով", "ով", "ու", "ով", "ով"]
++ ["ան","ում", "ում"]
|
Teaspot-Studio/arhelk-armenian
|
src/Arhelk/Armenian/Lemma/Data/Common.hs
|
bsd-3-clause
| 3,391 | 0 | 11 | 604 | 1,048 | 585 | 463 | 81 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Unison.Kind where
import GHC.Generics
import Data.Aeson.TH
data Kind = Star | Constraint | Arrow Kind Kind deriving (Eq,Ord,Read,Show,Generic)
deriveJSON defaultOptions ''Kind
|
CGenie/platform
|
shared/src/Unison/Kind.hs
|
mit
| 251 | 0 | 6 | 34 | 69 | 39 | 30 | 7 | 0 |
--
--
--
-----------------
-- Exercise 10.1.
-----------------
--
--
--
module E'10''1 where
-- List comprehension:
-- -------------------
doubleAll :: [Integer] -> [Integer]
doubleAll integerList
= [ integer * 2 | integer <- integerList ]
-- doubleAll [ 2 , 1 , 7 ]
-- ~> [ integer * 2 | integer <- [ 2 , 1 , 7 ] ]
--
-- integer 2 1 7
-- integer * 2 4 2 14
--
-- ~> [ 4 , 2 , 14 ]
-- Primitive recursion:
-- --------------------
doubleAll' :: [Integer] -> [Integer]
doubleAll' [] = []
doubleAll' (integer : remainingIntegers)
= integer * 2 : doubleAll remainingIntegers
-- doubleAll' [ 2 , 1 , 7 ]
-- ~> doubleAll' ( 2 : [ 1 , 7 ] )
-- ~> ( 2 * 2 ) : doubleAll' [ 1 , 7 ]
-- ~> 4 : doubleAll' [ 1 , 7 ]
-- ~> 4 : doubleAll' ( 1 : [ 7 ] )
-- ~> 4 : ( 1 * 2 ) : doubleAll' [ 7 ]
-- ~> 4 : 2 : doubleAll' [ 7 ]
-- ~> 4 : 2 : doubleAll' ( 7 : [] )
-- ~> 4 : 2 : ( 7 * 2 ) : doubleAll' []
-- ~> 4 : 2 : 14 : doubleAll' []
-- ~> 4 : 2 : 14 : []
-- ~> [ 4 , 2 , 14 ]
-- Map:
-- ----
doubleAll'' :: [Integer] -> [Integer]
doubleAll'' integerList
= map double integerList
where
double :: Integer -> Integer
double integer = integer * 2
-- doubleAll'' [ 2 , 1 , 7 ]
-- ~> map double [ 2 , 1 , 7 ]
-- ~> ( double 2 ) : map double [ 1 , 7 ]
-- ~> ( 2 * 2 ) : map double [ 1 , 7 ]
-- ~> 4 : map double [ 1 , 7 ]
-- ~> 4 : ( double 1 ) : map double [ 7 ]
-- ~> 4 : ( 1 * 2 ) : map double [ 7 ]
-- ~> 4 : 2 : map double [ 7 ]
-- ~> 4 : 2 : ( double 7 ) : map double []
-- ~> 4 : 2 : ( 7 * 2 ) : map double []
-- ~> 4 : 2 : 14 : map double []
-- ~> 4 : 2 : 14 : []
-- ~> [ 4 , 2 , 14 ]
{- GHCi>
:{
let numbers :: [Integer] ;
numbers = [ 2 , 1 , 7 ]
:}
doubleAll numbers
doubleAll' numbers
doubleAll'' numbers
-}
-- [ 4 , 2 , 14 ]
-- [ 4 , 2 , 14 ]
-- [ 4 , 2 , 14 ]
|
pascal-knodel/haskell-craft
|
_/links/E'10''1.hs
|
mit
| 1,902 | 0 | 7 | 630 | 197 | 130 | 67 | 13 | 1 |
--
--
--
------------------
-- Exercise 10.37.
------------------
--
--
--
module E'10'37 where
import E'10'36
(
Move ( Rock , Paper , Scissors )
, outcome
, Tournament
, tournamentOutcome
)
showTournament :: Tournament -> String
showTournament tournament@( firstPlayerMoves , secondPlayerMoves )
= "\nGame :\n\n"
++ "Player = 1 | 2\n"
++ " - | -\n"
++ concat ( zipWith showTurn firstPlayerMoves secondPlayerMoves )
++ " - | -\n"
++ "Winner = " ++ winner ++ "\n\n"
++ "Score = " ++ ( show score ) ++ "\n\n"
where
showTurn :: Move -> Move -> String
showTurn a b
= showTurn' ( outcome a b ) a b
showTurn' :: Integer -> Move -> Move -> String
showTurn' 1 a b = " * " ++ ( show a ) ++ " | " ++ ( show b ) ++ "\n"
showTurn' (-1) a b = " " ++ ( show a ) ++ " | " ++ ( show b ) ++ " *\n"
showTurn' _ a b = " " ++ ( show a ) ++ " | " ++ ( show b ) ++ "\n"
score :: Integer
score = tournamentOutcome tournament
winner :: String
winner
-- "Player : 1 2"
-- "Winner : "
| score < 0 = " | 2"
| score > 0 = "1 | "
| otherwise = "1 | 2 (DRAW)"
{- GHCi>
putStr ( showTournament ( [ Scissors , Rock ] , [ Rock , Scissors ] ) )
putStr ( showTournament ( [ Scissors , Rock ] , [ Rock , Rock ] ) )
-}
--
-- Game :
--
-- Player = 1 | 2
-- - | -
-- S | R *
-- * R | S
-- - | -
-- Winner = 1 | 2 (DRAW)
--
-- Score = 0
--
--
-- Game :
--
-- Player = 1 | 2
-- - | -
-- S | R *
-- R | R
-- - | -
-- Winner = | 2
--
-- Score = -1
--
|
pascal-knodel/haskell-craft
|
_/links/E'10'37.hs
|
mit
| 1,827 | 0 | 15 | 769 | 387 | 220 | 167 | 30 | 3 |
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# OPTIONS -fno-warn-type-defaults -fno-warn-deprecations #-}
-- | Try Haskell!
{-
127.0.0.1:4001
/static",serveDirectory static => no idea, guess it does not work like it should
/eval?exp=6*7 => 42
/users => users stats)
/ => normal home page
How do I enable SSL?
First, you need to install snap-server with -fopenssl. If you have already installed snap-server, you might want to uninstall it first with ghc-pkg unregister -f snap-server to avoid potential version conflicts.
Once you have done that, run your application as follows using values appropriate to your setup.
./app --ssl-port=443 --ssl-cert=cert.pem --ssl-key=key.pem
-}
module TryHaskell where
import Paths_tryhaskell
import Control.Arrow ((***))
import Control.Applicative ((<$>),(<|>))
import Control.Concurrent
import Control.Monad
import Control.Monad.Trans
import Data.Aeson as Aeson
import Data.Bifunctor
import Data.ByteString (ByteString)
import Data.ByteString.Char8 (pack)
import Data.ByteString.Lazy (fromChunks)
import qualified Data.ByteString.Lazy as L (ByteString)
import qualified Data.Cache.LRU.IO as LRU
import qualified Data.HashMap.Strict as M
import Data.Hashable
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import Data.Text (unpack)
import qualified Data.Text as S
import Data.Text.Encoding (decodeUtf8)
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy as T
-- import Data.Time.Format.Locale
import Data.Time
import Prelude hiding (div,head)
import PureIO (Interrupt(..),Output(..),Input(..),IOException(..))
import Safe
import Snap.Core
import Snap.Http.Server hiding (Config)
import Snap.Util.FileServe
import System.Environment (getEnvironment, lookupEnv)
import System.Exit
import System.IO (stderr, hPutStrLn) --
#if ! MIN_VERSION_time(1,5,0)
import System.Locale --
#endif
import System.Process.Text.Lazy
import TryHaskell.BuildPage
import TryHaskell.Tutorials
tutorials :: MVar Stats -> [(ByteString, Snap ())]
tutorials stats = map (\tut_no ->(pack ("/tutorial"++(show tut_no)),tut tut_no stats)) [1,2,11,12,31,32,22,23]
data EvalResult
= ErrorResult !Text
| SuccessResult !(Text,Text,Text) ![Text] !(Map FilePath String)
| GetInputResult ![Text] !(Map FilePath String)
deriving (Show,Eq)
--data Stats = Stats
-- { statsUsers :: !(HashMap ByteString UTCTime) }
type Cache = LRU.AtomicLRU (ByteString, ByteString) Value
-- | Setup the server threads and state.
setupServer :: IO ((MVar Stats,ThreadId), Cache)
setupServer =
do checkMuEval
mCacheLimit <- (readMay =<<) <$> lookupEnv "CACHE_LIMIT"
cache <- LRU.newAtomicLRU (mCacheLimit <|> Just 1000)
stats <- newMVar (Stats mempty)
expire <- forkIO (expireVisitors stats)
return ((stats,expire),cache)
-- | Start a web server.
startServer :: Cache
-> MVar Stats
-> IO ()
startServer cache stats =
do env <- getEnvironment
static <- getDataFileName "static"
let port =
maybe 4001 read $
lookup "PORT" env
let config =
setPort port .
setAccessLog ConfigNoLog .
setErrorLog ConfigNoLog .
setVerbose False
httpServe (config defaultConfig)
(dispatch static cache stats) -- so this part is what needs to change to support urls with different names
-- | Ensure mueval is available and working
checkMuEval :: IO ()
checkMuEval =
do result <- mueval False "()"
case result of
Left err -> die err -- putStrLn $ msg err
_ -> return ()
where
die err = do hPutStrLn stderr ("ERROR: mueval " ++ msg err)
exitFailure
msg err | T.null err = "failed to start"
| otherwise = "startup failure:\n" ++ T.unpack err
-- | Dispatch on the routes.
dispatch :: FilePath -> Cache -> MVar Stats -> Snap ()
dispatch static cache stats =
route $ [("/static",serveDirectory static)
,("/eval",eval cache stats)
,("/users",users stats)
,("/",home stats)
,("/tryhaskell",home stats)
] ++ (tutorials stats)
-- | Write out the list of current users.
users :: MVar Stats -> Snap ()
users statsv =
do stats <- liftIO (readMVar statsv)
writeLBS (encode (map (show . hash *** epoch)
(M.toList (statsUsers stats))))
where epoch :: UTCTime -> Integer
epoch = read . formatTime defaultTimeLocale "%s"
{-
-- | Log the current user's visit to the stats table.
logVisit :: MVar Stats -> Snap ByteString
logVisit stats =
do ipHeaderFilter
addr <- fmap rqRemoteAddr getRequest
now <- liftIO getCurrentTime
let updateStats (Stats u) = Stats (M.insert addr now u)
liftIO (modifyMVar_ stats (return . updateStats))
return addr
-}
-- | Reap visitors that have been inactive for one minute.
expireVisitors :: MVar Stats -> IO ()
expireVisitors stats =
forever
(do threadDelay (1000 * 1000 * 15)
now <- getCurrentTime
modifyMVar_ stats
(return .
Stats .
M.filter (not . (>60) . diffUTCTime now) .
statsUsers))
-- | Evaluate the given expression.
eval :: Cache -> MVar Stats -> Snap ()
eval cache stats =
do mex <- getParam "exp"
args <- getParam "args"
case mex of
Nothing -> error "exp expected"
Just ex ->
do let key = (ex,fromMaybe "" args)
logit ex args
liftIO (cachedEval key cache args ex) >>= jsonp
where
logit ex _ =
do ip <- logVisit stats
now <- liftIO getCurrentTime
liftIO (appendFile "/tmp/tryhaskell-log"
(show now ++ " " ++
S.unpack (decodeUtf8 ip) ++
"> " ++
(S.unpack . decodeUtf8) ex ++
"\n"))
-- | Read from the cache for the given expression (and context), or
-- otherwise generate the JSON.
cachedEval :: (Eq k, Ord k)
=> k -> LRU.AtomicLRU k Value -> Maybe ByteString -> ByteString
-> IO Value
cachedEval key cache args ex =
do mCached <- LRU.lookup key cache
case mCached of
Just cached -> return cached
Nothing ->
do o <- case getArgs of
Nothing -> muevalToJson ex mempty mempty
Just (is,fs) -> muevalToJson ex is fs
case o of
(Object i)
| Just _ <- M.lookup "error" i -> return ()
_ ->
LRU.insert key o cache
return o
where getArgs = fmap toLazy args >>= decode
-- | Output a JSON value, possibly wrapping it in a callback if one
-- was requested.
jsonp :: Value -> Snap ()
jsonp o =
do mcallback <- getParam "callback"
writeLBS (case mcallback of
Nothing -> encode o
Just c -> toLazy c <> "(" <> encode o <> ");")
-- | Evaluate the given expression and return the result as a JSON value.
muevalToJson :: MonadIO m => ByteString -> [String] -> Map FilePath String -> m Value
muevalToJson ex is fs =
do result <- liftIO (muevalOrType (unpack (decodeUtf8 ex)) is fs)
case result of
ErrorResult "can't find file: Imports.hs\n" -> muevalToJson ex is fs
ErrorResult e ->
return
(codify
(ErrorResult
(if e == ""
then helpfulMsg
else e)))
_ -> return (codify result)
where helpfulMsg = "No result, evaluator might've been killed due to heavy traffic. Retry?"
codify result =
Aeson.object
(case result of
ErrorResult err ->
[("error" .= err)]
SuccessResult (expr,typ,value') stdouts files ->
[("success" .=
Aeson.object [("value" .= value')
,("expr" .= expr)
,("type" .= typ)
,("stdout" .= stdouts)
,("files" .= files)])]
GetInputResult stdouts files ->
[("stdout" .= stdouts)
,("files" .= files)])
-- | Strict bystring to lazy.
toLazy :: ByteString -> L.ByteString
toLazy = fromChunks . return
-- | Try to evaluate the given expression. If there's a mueval error
-- (i.e. a compile error), then try just getting the type of the
-- expression.
muevalOrType :: String -> [String] -> Map FilePath String -> IO EvalResult
muevalOrType e is fs =
do typeResult <- mueval True e
case typeResult of
Left err ->
return (ErrorResult err)
Right (expr,typ,_) ->
if T.isPrefixOf "IO " typ
then muevalIO e is fs
else
do evalResult <- mueval False e
case evalResult of
Left err ->
if T.isPrefixOf "No instance for" err ||
T.isPrefixOf "Ambiguous " err
then return (SuccessResult (expr,typ,"") mempty fs)
else return (ErrorResult err)
Right (_,_,val) ->
return (SuccessResult (expr,typ,val) mempty fs)
-- | Try to evaluate the expression as a (pure) IO action, if it type
-- checks and evaluates, then we're going to enter a potential
-- (referentially transparent) back-and-forth between the server and
-- the client.
--
-- It handles stdin/stdout and files.
muevalIO :: String -> [String] -> Map FilePath String -> IO EvalResult
muevalIO e is fs =
do result <- mueval False ("runTryHaskellIO " ++ show (convert (Input is fs)) ++ " (" ++ e ++ ")")
case result of
Left err ->
return (ErrorResult err)
Right (_,_,readMay . T.unpack -> Just r) ->
ioResult e (bimap (second oconvert) (second oconvert) r)
_ ->
return (ErrorResult "Problem running IO.")
where convert (Input os fs') = (os,Map.toList fs')
oconvert (os,fs') = Output os (Map.fromList fs')
-- | Extract an eval result from the IO reply.
ioResult :: String -> Either (Interrupt,Output) (String,Output) -> IO EvalResult
ioResult e r =
case r of
Left i ->
case i of
(InterruptException ex,_) ->
return
(ErrorResult
(case ex of
UserError err -> T.pack err
FileNotFound fp -> T.pack ("File not found: " <> fp)
DirectoryNotFound fp -> T.pack ("Directory not found: " <> fp)))
(InterruptStdin,Output os fs) ->
return (GetInputResult (map T.pack os) fs)
Right (value',Output os fs) ->
do typ <- mueval True e
return
(case typ of
Left err ->
ErrorResult err
Right (_,iotyp,_) ->
SuccessResult (T.pack e,iotyp,T.pack value')
(map T.pack os)
fs)
-- | Evaluate the given expression and return either an error or an
-- (expr,type,value) triple.
mueval :: Bool -> String -> IO (Either Text (Text,Text,Text))
mueval typeOnly e =
do env <- getEnvironment
importsfp <- getDataFileName "Imports.hs"
let timeout = maybe "1" id $ lookup "MUEVAL_TIMEOUT" env
options = ["-i","-t",timeout,"--expression",e] ++
["--no-imports","-l",importsfp] ++
["--type-only" | typeOnly]
(status,out,err) <- readProcessWithExitCode "mueval" options ""
case status of
ExitSuccess ->
case T.lines out of
[e',typ,value'] | T.pack e == e' -> return (Right (T.pack e,typ,value'))
_ -> do appendFile "/tmp/tryhaskell-log"
(e ++
" -> " ++
show out ++ " (bad output)" ++
"\n")
return (Left ("Unable to get type and value of expression: " <> T.pack e))
ExitFailure{} ->
case T.lines out of
[e',_typ,value'] | T.pack e == e' -> return (Left value')
[e',_typ] | T.pack e == e' -> return (Left "Evaluation killed!")
_ ->
return (Left (out <> if out == "" then err <> " " <> T.pack (show status) else ""))
|
wimvanderbauwhede/tryhaskell
|
src/TryHaskell.hs
|
bsd-3-clause
| 12,948 | 0 | 22 | 4,274 | 3,356 | 1,733 | 1,623 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Control.Lens (makeLenses, (^.), (&), (%~))
import Control.Monad (void)
import Data.Default
import qualified Graphics.Vty as V
import qualified Brick.Types as T
import Brick.Types (rowL, columnL)
import qualified Brick.Main as M
import qualified Brick.Widgets.Border as B
import Brick.Widgets.Core
( Widget
, translateBy
)
data St =
St { _topLayerLocation :: T.Location
, _bottomLayerLocation :: T.Location
}
makeLenses ''St
drawUi :: St -> [Widget]
drawUi st =
[ topLayer st
, bottomLayer st
]
topLayer :: St -> Widget
topLayer st =
translateBy (st^.topLayerLocation) $
B.border "Top layer\n(Arrow keys move)"
bottomLayer :: St -> Widget
bottomLayer st =
translateBy (st^.bottomLayerLocation) $
B.border "Bottom layer\n(Ctrl-arrow keys move)"
appEvent :: St -> V.Event -> M.EventM (M.Next St)
appEvent st (V.EvKey V.KDown []) = M.continue $ st & topLayerLocation.rowL %~ (+ 1)
appEvent st (V.EvKey V.KUp []) = M.continue $ st & topLayerLocation.rowL %~ (subtract 1)
appEvent st (V.EvKey V.KRight []) = M.continue $ st & topLayerLocation.columnL %~ (+ 1)
appEvent st (V.EvKey V.KLeft []) = M.continue $ st & topLayerLocation.columnL %~ (subtract 1)
appEvent st (V.EvKey V.KDown [V.MCtrl]) = M.continue $ st & bottomLayerLocation.rowL %~ (+ 1)
appEvent st (V.EvKey V.KUp [V.MCtrl]) = M.continue $ st & bottomLayerLocation.rowL %~ (subtract 1)
appEvent st (V.EvKey V.KRight [V.MCtrl]) = M.continue $ st & bottomLayerLocation.columnL %~ (+ 1)
appEvent st (V.EvKey V.KLeft [V.MCtrl]) = M.continue $ st & bottomLayerLocation.columnL %~ (subtract 1)
appEvent st (V.EvKey V.KEsc []) = M.halt st
appEvent st _ = M.continue st
app :: M.App St V.Event
app =
M.App { M.appDraw = drawUi
, M.appStartEvent = return
, M.appHandleEvent = appEvent
, M.appAttrMap = const def
, M.appLiftVtyEvent = id
, M.appChooseCursor = M.neverShowCursor
}
main :: IO ()
main = void $ M.defaultMain app $ St (T.Location (0, 0)) (T.Location (0, 0))
|
FranklinChen/brick
|
programs/LayerDemo.hs
|
bsd-3-clause
| 2,147 | 0 | 10 | 440 | 824 | 448 | 376 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Distribution.Types.PackageName
( PackageName, unPackageName, mkPackageName
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Utils.ShortText
import qualified Text.PrettyPrint as Disp
import Distribution.ParseUtils
import Distribution.Text
-- | A package name.
--
-- Use 'mkPackageName' and 'unPackageName' to convert from/to a
-- 'String'.
--
-- This type is opaque since @Cabal-2.0@
--
-- @since 2.0
newtype PackageName = PackageName ShortText
deriving (Generic, Read, Show, Eq, Ord, Typeable, Data)
-- | Convert 'PackageName' to 'String'
unPackageName :: PackageName -> String
unPackageName (PackageName s) = fromShortText s
-- | Construct a 'PackageName' from a 'String'
--
-- 'mkPackageName' is the inverse to 'unPackageName'
--
-- Note: No validations are performed to ensure that the resulting
-- 'PackageName' is valid
--
-- @since 2.0
mkPackageName :: String -> PackageName
mkPackageName = PackageName . toShortText
-- | 'mkPackageName'
--
-- @since 2.0
instance IsString PackageName where
fromString = mkPackageName
instance Binary PackageName
instance Text PackageName where
disp = Disp.text . unPackageName
parse = mkPackageName <$> parsePackageName
instance NFData PackageName where
rnf (PackageName pkg) = rnf pkg
|
mydaum/cabal
|
Cabal/Distribution/Types/PackageName.hs
|
bsd-3-clause
| 1,403 | 0 | 8 | 209 | 233 | 140 | 93 | 25 | 1 |
{-# LANGUAGE CPP, ImplicitParams #-}
{-
(c) The University of Glasgow 2006-2012
(c) The GRASP Project, Glasgow University, 1992-1998
-}
-- | This module defines classes and functions for pretty-printing. It also
-- exports a number of helpful debugging and other utilities such as 'trace' and 'panic'.
--
-- The interface to this module is very similar to the standard Hughes-PJ pretty printing
-- module, except that it exports a number of additional functions that are rarely used,
-- and works over the 'SDoc' type.
module Outputable (
-- * Type classes
Outputable(..), OutputableBndr(..),
-- * Pretty printing combinators
SDoc, runSDoc, initSDocContext,
docToSDoc,
interppSP, interpp'SP,
pprQuotedList, pprWithCommas, quotedListWithOr, quotedListWithNor,
empty, isEmpty, nest,
char,
text, ftext, ptext, ztext,
int, intWithCommas, integer, float, double, rational, doublePrec,
parens, cparen, brackets, braces, quotes, quote,
doubleQuotes, angleBrackets, paBrackets,
semi, comma, colon, dcolon, space, equals, dot, vbar,
arrow, larrow, darrow, arrowt, larrowt, arrowtt, larrowtt,
lparen, rparen, lbrack, rbrack, lbrace, rbrace, underscore,
blankLine, forAllLit,
(<>), (<+>), hcat, hsep,
($$), ($+$), vcat,
sep, cat,
fsep, fcat,
hang, hangNotEmpty, punctuate, ppWhen, ppUnless,
speakNth, speakN, speakNOf, plural, isOrAre, doOrDoes,
unicodeSyntax,
coloured, PprColour, colType, colCoerc, colDataCon,
colBinder, bold, keyword,
-- * Converting 'SDoc' into strings and outputing it
printForC, printForAsm, printForUser, printForUserPartWay,
pprCode, mkCodeStyle,
showSDoc, showSDocUnsafe, showSDocOneLine,
showSDocForUser, showSDocDebug, showSDocDump, showSDocDumpOneLine,
showSDocUnqual, showPpr,
renderWithStyle,
pprInfixVar, pprPrefixVar,
pprHsChar, pprHsString, pprHsBytes,
primFloatSuffix, primDoubleSuffix,
pprPrimChar, pprPrimInt, pprPrimWord, pprPrimInt64, pprPrimWord64,
pprFastFilePath,
-- * Controlling the style in which output is printed
BindingSite(..),
PprStyle, CodeStyle(..), PrintUnqualified(..),
QueryQualifyName, QueryQualifyModule, QueryQualifyPackage,
reallyAlwaysQualify, reallyAlwaysQualifyNames,
alwaysQualify, alwaysQualifyNames, alwaysQualifyModules,
neverQualify, neverQualifyNames, neverQualifyModules,
alwaysQualifyPackages, neverQualifyPackages,
QualifyName(..), queryQual,
sdocWithDynFlags, sdocWithPlatform,
getPprStyle, withPprStyle, withPprStyleDoc,
pprDeeper, pprDeeperList, pprSetDepth,
codeStyle, userStyle, debugStyle, dumpStyle, asmStyle,
ifPprDebug, qualName, qualModule, qualPackage,
mkErrStyle, defaultErrStyle, defaultDumpStyle, mkDumpStyle, defaultUserStyle,
mkUserStyle, cmdlineParserStyle, Depth(..),
-- * Error handling and debugging utilities
pprPanic, pprSorry, assertPprPanic, pprPgmError,
pprTrace, pprTraceIt, warnPprTrace, pprSTrace,
trace, pgmError, panic, sorry, assertPanic,
pprDebugAndThen,
) where
import {-# SOURCE #-} DynFlags( DynFlags,
targetPlatform, pprUserLength, pprCols,
useUnicode, useUnicodeSyntax,
unsafeGlobalDynFlags )
import {-# SOURCE #-} Module( UnitId, Module, ModuleName, moduleName )
import {-# SOURCE #-} OccName( OccName )
import {-# SOURCE #-} StaticFlags( opt_PprStyle_Debug, opt_NoDebugOutput )
import FastString
import qualified Pretty
import Util
import Platform
import Pretty ( Doc, Mode(..) )
import Panic
import GHC.Serialized
import GHC.LanguageExtensions (Extension)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Data.Char
import qualified Data.Map as M
import Data.Int
import qualified Data.IntMap as IM
import Data.Set (Set)
import qualified Data.Set as Set
import Data.String
import Data.Word
import System.IO ( Handle )
import System.FilePath
import Text.Printf
import Numeric (showFFloat)
import Data.Graph (SCC(..))
import GHC.Fingerprint
import GHC.Show ( showMultiLineString )
#if __GLASGOW_HASKELL__ > 710
import GHC.Stack
#endif
{-
************************************************************************
* *
\subsection{The @PprStyle@ data type}
* *
************************************************************************
-}
data PprStyle
= PprUser PrintUnqualified Depth
-- Pretty-print in a way that will make sense to the
-- ordinary user; must be very close to Haskell
-- syntax, etc.
-- Assumes printing tidied code: non-system names are
-- printed without uniques.
| PprDump PrintUnqualified
-- For -ddump-foo; less verbose than PprDebug, but more than PprUser
-- Does not assume tidied code: non-external names
-- are printed with uniques.
| PprDebug -- Full debugging output
| PprCode CodeStyle
-- Print code; either C or assembler
data CodeStyle = CStyle -- The format of labels differs for C and assembler
| AsmStyle
data Depth = AllTheWay
| PartWay Int -- 0 => stop
-- -----------------------------------------------------------------------------
-- Printing original names
-- | When printing code that contains original names, we need to map the
-- original names back to something the user understands. This is the
-- purpose of the triple of functions that gets passed around
-- when rendering 'SDoc'.
data PrintUnqualified = QueryQualify {
queryQualifyName :: QueryQualifyName,
queryQualifyModule :: QueryQualifyModule,
queryQualifyPackage :: QueryQualifyPackage
}
-- | given an /original/ name, this function tells you which module
-- name it should be qualified with when printing for the user, if
-- any. For example, given @Control.Exception.catch@, which is in scope
-- as @Exception.catch@, this function will return @Just "Exception"@.
-- Note that the return value is a ModuleName, not a Module, because
-- in source code, names are qualified by ModuleNames.
type QueryQualifyName = Module -> OccName -> QualifyName
-- | For a given module, we need to know whether to print it with
-- a package name to disambiguate it.
type QueryQualifyModule = Module -> Bool
-- | For a given package, we need to know whether to print it with
-- the unit id to disambiguate it.
type QueryQualifyPackage = UnitId -> Bool
-- See Note [Printing original names] in HscTypes
data QualifyName -- Given P:M.T
= NameUnqual -- It's in scope unqualified as "T"
-- OR nothing called "T" is in scope
| NameQual ModuleName -- It's in scope qualified as "X.T"
| NameNotInScope1 -- It's not in scope at all, but M.T is not bound
-- in the current scope, so we can refer to it as "M.T"
| NameNotInScope2 -- It's not in scope at all, and M.T is already bound in
-- the current scope, so we must refer to it as "P:M.T"
reallyAlwaysQualifyNames :: QueryQualifyName
reallyAlwaysQualifyNames _ _ = NameNotInScope2
-- | NB: This won't ever show package IDs
alwaysQualifyNames :: QueryQualifyName
alwaysQualifyNames m _ = NameQual (moduleName m)
neverQualifyNames :: QueryQualifyName
neverQualifyNames _ _ = NameUnqual
alwaysQualifyModules :: QueryQualifyModule
alwaysQualifyModules _ = True
neverQualifyModules :: QueryQualifyModule
neverQualifyModules _ = False
alwaysQualifyPackages :: QueryQualifyPackage
alwaysQualifyPackages _ = True
neverQualifyPackages :: QueryQualifyPackage
neverQualifyPackages _ = False
reallyAlwaysQualify, alwaysQualify, neverQualify :: PrintUnqualified
reallyAlwaysQualify
= QueryQualify reallyAlwaysQualifyNames
alwaysQualifyModules
alwaysQualifyPackages
alwaysQualify = QueryQualify alwaysQualifyNames
alwaysQualifyModules
alwaysQualifyPackages
neverQualify = QueryQualify neverQualifyNames
neverQualifyModules
neverQualifyPackages
defaultUserStyle, defaultDumpStyle :: PprStyle
defaultUserStyle = mkUserStyle neverQualify AllTheWay
-- Print without qualifiers to reduce verbosity, unless -dppr-debug
defaultDumpStyle | opt_PprStyle_Debug = PprDebug
| otherwise = PprDump neverQualify
mkDumpStyle :: PrintUnqualified -> PprStyle
mkDumpStyle print_unqual | opt_PprStyle_Debug = PprDebug
| otherwise = PprDump print_unqual
defaultErrStyle :: DynFlags -> PprStyle
-- Default style for error messages, when we don't know PrintUnqualified
-- It's a bit of a hack because it doesn't take into account what's in scope
-- Only used for desugarer warnings, and typechecker errors in interface sigs
-- NB that -dppr-debug will still get into PprDebug style
defaultErrStyle dflags = mkErrStyle dflags neverQualify
-- | Style for printing error messages
mkErrStyle :: DynFlags -> PrintUnqualified -> PprStyle
mkErrStyle dflags qual = mkUserStyle qual (PartWay (pprUserLength dflags))
cmdlineParserStyle :: PprStyle
cmdlineParserStyle = mkUserStyle alwaysQualify AllTheWay
mkUserStyle :: PrintUnqualified -> Depth -> PprStyle
mkUserStyle unqual depth
| opt_PprStyle_Debug = PprDebug
| otherwise = PprUser unqual depth
instance Outputable PprStyle where
ppr (PprUser {}) = text "user-style"
ppr (PprCode {}) = text "code-style"
ppr (PprDump {}) = text "dump-style"
ppr (PprDebug {}) = text "debug-style"
{-
Orthogonal to the above printing styles are (possibly) some
command-line flags that affect printing (often carried with the
style). The most likely ones are variations on how much type info is
shown.
The following test decides whether or not we are actually generating
code (either C or assembly), or generating interface files.
************************************************************************
* *
\subsection{The @SDoc@ data type}
* *
************************************************************************
-}
newtype SDoc = SDoc { runSDoc :: SDocContext -> Doc }
data SDocContext = SDC
{ sdocStyle :: !PprStyle
, sdocLastColour :: !PprColour
-- ^ The most recently used colour. This allows nesting colours.
, sdocDynFlags :: !DynFlags
}
instance IsString SDoc where
fromString = text
initSDocContext :: DynFlags -> PprStyle -> SDocContext
initSDocContext dflags sty = SDC
{ sdocStyle = sty
, sdocLastColour = colReset
, sdocDynFlags = dflags
}
withPprStyle :: PprStyle -> SDoc -> SDoc
withPprStyle sty d = SDoc $ \ctxt -> runSDoc d ctxt{sdocStyle=sty}
withPprStyleDoc :: DynFlags -> PprStyle -> SDoc -> Doc
withPprStyleDoc dflags sty d = runSDoc d (initSDocContext dflags sty)
pprDeeper :: SDoc -> SDoc
pprDeeper d = SDoc $ \ctx -> case ctx of
SDC{sdocStyle=PprUser _ (PartWay 0)} -> Pretty.text "..."
SDC{sdocStyle=PprUser q (PartWay n)} ->
runSDoc d ctx{sdocStyle = PprUser q (PartWay (n-1))}
_ -> runSDoc d ctx
-- | Truncate a list that is longer than the current depth.
pprDeeperList :: ([SDoc] -> SDoc) -> [SDoc] -> SDoc
pprDeeperList f ds
| null ds = f []
| otherwise = SDoc work
where
work ctx@SDC{sdocStyle=PprUser q (PartWay n)}
| n==0 = Pretty.text "..."
| otherwise =
runSDoc (f (go 0 ds)) ctx{sdocStyle = PprUser q (PartWay (n-1))}
where
go _ [] = []
go i (d:ds) | i >= n = [text "...."]
| otherwise = d : go (i+1) ds
work other_ctx = runSDoc (f ds) other_ctx
pprSetDepth :: Depth -> SDoc -> SDoc
pprSetDepth depth doc = SDoc $ \ctx ->
case ctx of
SDC{sdocStyle=PprUser q _} ->
runSDoc doc ctx{sdocStyle = PprUser q depth}
_ ->
runSDoc doc ctx
getPprStyle :: (PprStyle -> SDoc) -> SDoc
getPprStyle df = SDoc $ \ctx -> runSDoc (df (sdocStyle ctx)) ctx
sdocWithDynFlags :: (DynFlags -> SDoc) -> SDoc
sdocWithDynFlags f = SDoc $ \ctx -> runSDoc (f (sdocDynFlags ctx)) ctx
sdocWithPlatform :: (Platform -> SDoc) -> SDoc
sdocWithPlatform f = sdocWithDynFlags (f . targetPlatform)
qualName :: PprStyle -> QueryQualifyName
qualName (PprUser q _) mod occ = queryQualifyName q mod occ
qualName (PprDump q) mod occ = queryQualifyName q mod occ
qualName _other mod _ = NameQual (moduleName mod)
qualModule :: PprStyle -> QueryQualifyModule
qualModule (PprUser q _) m = queryQualifyModule q m
qualModule (PprDump q) m = queryQualifyModule q m
qualModule _other _m = True
qualPackage :: PprStyle -> QueryQualifyPackage
qualPackage (PprUser q _) m = queryQualifyPackage q m
qualPackage (PprDump q) m = queryQualifyPackage q m
qualPackage _other _m = True
queryQual :: PprStyle -> PrintUnqualified
queryQual s = QueryQualify (qualName s)
(qualModule s)
(qualPackage s)
codeStyle :: PprStyle -> Bool
codeStyle (PprCode _) = True
codeStyle _ = False
asmStyle :: PprStyle -> Bool
asmStyle (PprCode AsmStyle) = True
asmStyle _other = False
dumpStyle :: PprStyle -> Bool
dumpStyle (PprDump {}) = True
dumpStyle _other = False
debugStyle :: PprStyle -> Bool
debugStyle PprDebug = True
debugStyle _other = False
userStyle :: PprStyle -> Bool
userStyle (PprUser _ _) = True
userStyle _other = False
ifPprDebug :: SDoc -> SDoc -- Empty for non-debug style
ifPprDebug d = SDoc $ \ctx ->
case ctx of
SDC{sdocStyle=PprDebug} -> runSDoc d ctx
_ -> Pretty.empty
printForUser :: DynFlags -> Handle -> PrintUnqualified -> SDoc -> IO ()
printForUser dflags handle unqual doc
= Pretty.printDoc PageMode (pprCols dflags) handle
(runSDoc doc (initSDocContext dflags (mkUserStyle unqual AllTheWay)))
printForUserPartWay :: DynFlags -> Handle -> Int -> PrintUnqualified -> SDoc
-> IO ()
printForUserPartWay dflags handle d unqual doc
= Pretty.printDoc PageMode (pprCols dflags) handle
(runSDoc doc (initSDocContext dflags (mkUserStyle unqual (PartWay d))))
-- printForC, printForAsm do what they sound like
printForC :: DynFlags -> Handle -> SDoc -> IO ()
printForC dflags handle doc =
Pretty.printDoc LeftMode (pprCols dflags) handle
(runSDoc doc (initSDocContext dflags (PprCode CStyle)))
printForAsm :: DynFlags -> Handle -> SDoc -> IO ()
printForAsm dflags handle doc =
Pretty.printDoc LeftMode (pprCols dflags) handle
(runSDoc doc (initSDocContext dflags (PprCode AsmStyle)))
pprCode :: CodeStyle -> SDoc -> SDoc
pprCode cs d = withPprStyle (PprCode cs) d
mkCodeStyle :: CodeStyle -> PprStyle
mkCodeStyle = PprCode
-- Can't make SDoc an instance of Show because SDoc is just a function type
-- However, Doc *is* an instance of Show
-- showSDoc just blasts it out as a string
showSDoc :: DynFlags -> SDoc -> String
showSDoc dflags sdoc = renderWithStyle dflags sdoc defaultUserStyle
-- showSDocUnsafe is unsafe, because `unsafeGlobalDynFlags` might not be
-- initialised yet.
showSDocUnsafe :: SDoc -> String
showSDocUnsafe sdoc = showSDoc unsafeGlobalDynFlags sdoc
showPpr :: Outputable a => DynFlags -> a -> String
showPpr dflags thing = showSDoc dflags (ppr thing)
showSDocUnqual :: DynFlags -> SDoc -> String
-- Only used by Haddock
showSDocUnqual dflags sdoc = showSDoc dflags sdoc
showSDocForUser :: DynFlags -> PrintUnqualified -> SDoc -> String
-- Allows caller to specify the PrintUnqualified to use
showSDocForUser dflags unqual doc
= renderWithStyle dflags doc (mkUserStyle unqual AllTheWay)
showSDocDump :: DynFlags -> SDoc -> String
showSDocDump dflags d = renderWithStyle dflags d defaultDumpStyle
showSDocDebug :: DynFlags -> SDoc -> String
showSDocDebug dflags d = renderWithStyle dflags d PprDebug
renderWithStyle :: DynFlags -> SDoc -> PprStyle -> String
renderWithStyle dflags sdoc sty
= let s = Pretty.style{ Pretty.mode = PageMode,
Pretty.lineLength = pprCols dflags }
in Pretty.renderStyle s $ runSDoc sdoc (initSDocContext dflags sty)
-- This shows an SDoc, but on one line only. It's cheaper than a full
-- showSDoc, designed for when we're getting results like "Foo.bar"
-- and "foo{uniq strictness}" so we don't want fancy layout anyway.
showSDocOneLine :: DynFlags -> SDoc -> String
showSDocOneLine dflags d
= let s = Pretty.style{ Pretty.mode = OneLineMode,
Pretty.lineLength = pprCols dflags } in
Pretty.renderStyle s $ runSDoc d (initSDocContext dflags defaultUserStyle)
showSDocDumpOneLine :: DynFlags -> SDoc -> String
showSDocDumpOneLine dflags d
= let s = Pretty.style{ Pretty.mode = OneLineMode,
Pretty.lineLength = irrelevantNCols } in
Pretty.renderStyle s $ runSDoc d (initSDocContext dflags defaultDumpStyle)
irrelevantNCols :: Int
-- Used for OneLineMode and LeftMode when number of cols isn't used
irrelevantNCols = 1
isEmpty :: DynFlags -> SDoc -> Bool
isEmpty dflags sdoc = Pretty.isEmpty $ runSDoc sdoc dummySDocContext
where dummySDocContext = initSDocContext dflags PprDebug
docToSDoc :: Doc -> SDoc
docToSDoc d = SDoc (\_ -> d)
empty :: SDoc
char :: Char -> SDoc
text :: String -> SDoc
ftext :: FastString -> SDoc
ptext :: LitString -> SDoc
ztext :: FastZString -> SDoc
int :: Int -> SDoc
integer :: Integer -> SDoc
float :: Float -> SDoc
double :: Double -> SDoc
rational :: Rational -> SDoc
empty = docToSDoc $ Pretty.empty
char c = docToSDoc $ Pretty.char c
text s = docToSDoc $ Pretty.text s
{-# INLINE text #-} -- Inline so that the RULE Pretty.text will fire
ftext s = docToSDoc $ Pretty.ftext s
ptext s = docToSDoc $ Pretty.ptext s
ztext s = docToSDoc $ Pretty.ztext s
int n = docToSDoc $ Pretty.int n
integer n = docToSDoc $ Pretty.integer n
float n = docToSDoc $ Pretty.float n
double n = docToSDoc $ Pretty.double n
rational n = docToSDoc $ Pretty.rational n
-- | @doublePrec p n@ shows a floating point number @n@ with @p@
-- digits of precision after the decimal point.
doublePrec :: Int -> Double -> SDoc
doublePrec p n = text (showFFloat (Just p) n "")
parens, braces, brackets, quotes, quote,
paBrackets, doubleQuotes, angleBrackets :: SDoc -> SDoc
parens d = SDoc $ Pretty.parens . runSDoc d
braces d = SDoc $ Pretty.braces . runSDoc d
brackets d = SDoc $ Pretty.brackets . runSDoc d
quote d = SDoc $ Pretty.quote . runSDoc d
doubleQuotes d = SDoc $ Pretty.doubleQuotes . runSDoc d
angleBrackets d = char '<' <> d <> char '>'
paBrackets d = text "[:" <> d <> text ":]"
cparen :: Bool -> SDoc -> SDoc
cparen b d = SDoc $ Pretty.maybeParens b . runSDoc d
-- 'quotes' encloses something in single quotes...
-- but it omits them if the thing begins or ends in a single quote
-- so that we don't get `foo''. Instead we just have foo'.
quotes d =
sdocWithDynFlags $ \dflags ->
if useUnicode dflags
then char '‘' <> d <> char '’'
else SDoc $ \sty ->
let pp_d = runSDoc d sty
str = show pp_d
in case (str, snocView str) of
(_, Just (_, '\'')) -> pp_d
('\'' : _, _) -> pp_d
_other -> Pretty.quotes pp_d
semi, comma, colon, equals, space, dcolon, underscore, dot, vbar :: SDoc
arrow, larrow, darrow, arrowt, larrowt, arrowtt, larrowtt :: SDoc
lparen, rparen, lbrack, rbrack, lbrace, rbrace, blankLine :: SDoc
blankLine = docToSDoc $ Pretty.text ""
dcolon = unicodeSyntax (char '∷') (docToSDoc $ Pretty.text "::")
arrow = unicodeSyntax (char '→') (docToSDoc $ Pretty.text "->")
larrow = unicodeSyntax (char '←') (docToSDoc $ Pretty.text "<-")
darrow = unicodeSyntax (char '⇒') (docToSDoc $ Pretty.text "=>")
arrowt = unicodeSyntax (char '⤚') (docToSDoc $ Pretty.text ">-")
larrowt = unicodeSyntax (char '⤙') (docToSDoc $ Pretty.text "-<")
arrowtt = unicodeSyntax (char '⤜') (docToSDoc $ Pretty.text ">>-")
larrowtt = unicodeSyntax (char '⤛') (docToSDoc $ Pretty.text "-<<")
semi = docToSDoc $ Pretty.semi
comma = docToSDoc $ Pretty.comma
colon = docToSDoc $ Pretty.colon
equals = docToSDoc $ Pretty.equals
space = docToSDoc $ Pretty.space
underscore = char '_'
dot = char '.'
vbar = char '|'
lparen = docToSDoc $ Pretty.lparen
rparen = docToSDoc $ Pretty.rparen
lbrack = docToSDoc $ Pretty.lbrack
rbrack = docToSDoc $ Pretty.rbrack
lbrace = docToSDoc $ Pretty.lbrace
rbrace = docToSDoc $ Pretty.rbrace
forAllLit :: SDoc
forAllLit = unicodeSyntax (char '∀') (text "forall")
unicodeSyntax :: SDoc -> SDoc -> SDoc
unicodeSyntax unicode plain = sdocWithDynFlags $ \dflags ->
if useUnicode dflags && useUnicodeSyntax dflags
then unicode
else plain
nest :: Int -> SDoc -> SDoc
-- ^ Indent 'SDoc' some specified amount
(<>) :: SDoc -> SDoc -> SDoc
-- ^ Join two 'SDoc' together horizontally without a gap
(<+>) :: SDoc -> SDoc -> SDoc
-- ^ Join two 'SDoc' together horizontally with a gap between them
($$) :: SDoc -> SDoc -> SDoc
-- ^ Join two 'SDoc' together vertically; if there is
-- no vertical overlap it "dovetails" the two onto one line
($+$) :: SDoc -> SDoc -> SDoc
-- ^ Join two 'SDoc' together vertically
nest n d = SDoc $ Pretty.nest n . runSDoc d
(<>) d1 d2 = SDoc $ \sty -> (Pretty.<>) (runSDoc d1 sty) (runSDoc d2 sty)
(<+>) d1 d2 = SDoc $ \sty -> (Pretty.<+>) (runSDoc d1 sty) (runSDoc d2 sty)
($$) d1 d2 = SDoc $ \sty -> (Pretty.$$) (runSDoc d1 sty) (runSDoc d2 sty)
($+$) d1 d2 = SDoc $ \sty -> (Pretty.$+$) (runSDoc d1 sty) (runSDoc d2 sty)
hcat :: [SDoc] -> SDoc
-- ^ Concatenate 'SDoc' horizontally
hsep :: [SDoc] -> SDoc
-- ^ Concatenate 'SDoc' horizontally with a space between each one
vcat :: [SDoc] -> SDoc
-- ^ Concatenate 'SDoc' vertically with dovetailing
sep :: [SDoc] -> SDoc
-- ^ Separate: is either like 'hsep' or like 'vcat', depending on what fits
cat :: [SDoc] -> SDoc
-- ^ Catenate: is either like 'hcat' or like 'vcat', depending on what fits
fsep :: [SDoc] -> SDoc
-- ^ A paragraph-fill combinator. It's much like sep, only it
-- keeps fitting things on one line until it can't fit any more.
fcat :: [SDoc] -> SDoc
-- ^ This behaves like 'fsep', but it uses '<>' for horizontal conposition rather than '<+>'
hcat ds = SDoc $ \sty -> Pretty.hcat [runSDoc d sty | d <- ds]
hsep ds = SDoc $ \sty -> Pretty.hsep [runSDoc d sty | d <- ds]
vcat ds = SDoc $ \sty -> Pretty.vcat [runSDoc d sty | d <- ds]
sep ds = SDoc $ \sty -> Pretty.sep [runSDoc d sty | d <- ds]
cat ds = SDoc $ \sty -> Pretty.cat [runSDoc d sty | d <- ds]
fsep ds = SDoc $ \sty -> Pretty.fsep [runSDoc d sty | d <- ds]
fcat ds = SDoc $ \sty -> Pretty.fcat [runSDoc d sty | d <- ds]
hang :: SDoc -- ^ The header
-> Int -- ^ Amount to indent the hung body
-> SDoc -- ^ The hung body, indented and placed below the header
-> SDoc
hang d1 n d2 = SDoc $ \sty -> Pretty.hang (runSDoc d1 sty) n (runSDoc d2 sty)
-- | This behaves like 'hang', but does not indent the second document
-- when the header is empty.
hangNotEmpty :: SDoc -> Int -> SDoc -> SDoc
hangNotEmpty d1 n d2 =
SDoc $ \sty -> Pretty.hangNotEmpty (runSDoc d1 sty) n (runSDoc d2 sty)
punctuate :: SDoc -- ^ The punctuation
-> [SDoc] -- ^ The list that will have punctuation added between every adjacent pair of elements
-> [SDoc] -- ^ Punctuated list
punctuate _ [] = []
punctuate p (d:ds) = go d ds
where
go d [] = [d]
go d (e:es) = (d <> p) : go e es
ppWhen, ppUnless :: Bool -> SDoc -> SDoc
ppWhen True doc = doc
ppWhen False _ = empty
ppUnless True _ = empty
ppUnless False doc = doc
-- | A colour\/style for use with 'coloured'.
newtype PprColour = PprColour String
-- Colours
colType :: PprColour
colType = PprColour "\27[34m"
colBold :: PprColour
colBold = PprColour "\27[;1m"
colCoerc :: PprColour
colCoerc = PprColour "\27[34m"
colDataCon :: PprColour
colDataCon = PprColour "\27[31m"
colBinder :: PprColour
colBinder = PprColour "\27[32m"
colReset :: PprColour
colReset = PprColour "\27[0m"
-- | Apply the given colour\/style for the argument.
--
-- Only takes effect if colours are enabled.
coloured :: PprColour -> SDoc -> SDoc
-- TODO: coloured _ sdoc ctxt | coloursDisabled = sdoc ctxt
coloured col@(PprColour c) sdoc =
SDoc $ \ctx@SDC{ sdocLastColour = PprColour lc } ->
let ctx' = ctx{ sdocLastColour = col } in
Pretty.zeroWidthText c Pretty.<> runSDoc sdoc ctx' Pretty.<> Pretty.zeroWidthText lc
bold :: SDoc -> SDoc
bold = coloured colBold
keyword :: SDoc -> SDoc
keyword = bold
{-
************************************************************************
* *
\subsection[Outputable-class]{The @Outputable@ class}
* *
************************************************************************
-}
-- | Class designating that some type has an 'SDoc' representation
class Outputable a where
ppr :: a -> SDoc
pprPrec :: Rational -> a -> SDoc
-- 0 binds least tightly
-- We use Rational because there is always a
-- Rational between any other two Rationals
ppr = pprPrec 0
pprPrec _ = ppr
instance Outputable Char where
ppr c = text [c]
instance Outputable Bool where
ppr True = text "True"
ppr False = text "False"
instance Outputable Ordering where
ppr LT = text "LT"
ppr EQ = text "EQ"
ppr GT = text "GT"
instance Outputable Int32 where
ppr n = integer $ fromIntegral n
instance Outputable Int64 where
ppr n = integer $ fromIntegral n
instance Outputable Int where
ppr n = int n
instance Outputable Word16 where
ppr n = integer $ fromIntegral n
instance Outputable Word32 where
ppr n = integer $ fromIntegral n
instance Outputable Word where
ppr n = integer $ fromIntegral n
instance Outputable () where
ppr _ = text "()"
instance (Outputable a) => Outputable [a] where
ppr xs = brackets (fsep (punctuate comma (map ppr xs)))
instance (Outputable a) => Outputable (Set a) where
ppr s = braces (fsep (punctuate comma (map ppr (Set.toList s))))
instance (Outputable a, Outputable b) => Outputable (a, b) where
ppr (x,y) = parens (sep [ppr x <> comma, ppr y])
instance Outputable a => Outputable (Maybe a) where
ppr Nothing = text "Nothing"
ppr (Just x) = text "Just" <+> ppr x
instance (Outputable a, Outputable b) => Outputable (Either a b) where
ppr (Left x) = text "Left" <+> ppr x
ppr (Right y) = text "Right" <+> ppr y
-- ToDo: may not be used
instance (Outputable a, Outputable b, Outputable c) => Outputable (a, b, c) where
ppr (x,y,z) =
parens (sep [ppr x <> comma,
ppr y <> comma,
ppr z ])
instance (Outputable a, Outputable b, Outputable c, Outputable d) =>
Outputable (a, b, c, d) where
ppr (a,b,c,d) =
parens (sep [ppr a <> comma,
ppr b <> comma,
ppr c <> comma,
ppr d])
instance (Outputable a, Outputable b, Outputable c, Outputable d, Outputable e) =>
Outputable (a, b, c, d, e) where
ppr (a,b,c,d,e) =
parens (sep [ppr a <> comma,
ppr b <> comma,
ppr c <> comma,
ppr d <> comma,
ppr e])
instance (Outputable a, Outputable b, Outputable c, Outputable d, Outputable e, Outputable f) =>
Outputable (a, b, c, d, e, f) where
ppr (a,b,c,d,e,f) =
parens (sep [ppr a <> comma,
ppr b <> comma,
ppr c <> comma,
ppr d <> comma,
ppr e <> comma,
ppr f])
instance (Outputable a, Outputable b, Outputable c, Outputable d, Outputable e, Outputable f, Outputable g) =>
Outputable (a, b, c, d, e, f, g) where
ppr (a,b,c,d,e,f,g) =
parens (sep [ppr a <> comma,
ppr b <> comma,
ppr c <> comma,
ppr d <> comma,
ppr e <> comma,
ppr f <> comma,
ppr g])
instance Outputable FastString where
ppr fs = ftext fs -- Prints an unadorned string,
-- no double quotes or anything
instance (Outputable key, Outputable elt) => Outputable (M.Map key elt) where
ppr m = ppr (M.toList m)
instance (Outputable elt) => Outputable (IM.IntMap elt) where
ppr m = ppr (IM.toList m)
instance Outputable Fingerprint where
ppr (Fingerprint w1 w2) = text (printf "%016x%016x" w1 w2)
instance Outputable a => Outputable (SCC a) where
ppr (AcyclicSCC v) = text "NONREC" $$ (nest 3 (ppr v))
ppr (CyclicSCC vs) = text "REC" $$ (nest 3 (vcat (map ppr vs)))
instance Outputable Serialized where
ppr (Serialized the_type bytes) = int (length bytes) <+> text "of type" <+> text (show the_type)
instance Outputable Extension where
ppr = text . show
{-
************************************************************************
* *
\subsection{The @OutputableBndr@ class}
* *
************************************************************************
-}
-- | 'BindingSite' is used to tell the thing that prints binder what
-- language construct is binding the identifier. This can be used
-- to decide how much info to print.
-- Also see Note [Binding-site specific printing] in PprCore
data BindingSite
= LambdaBind -- ^ The x in (\x. e)
| CaseBind -- ^ The x in case scrut of x { (y,z) -> ... }
| CasePatBind -- ^ The y,z in case scrut of x { (y,z) -> ... }
| LetBind -- ^ The x in (let x = rhs in e)
-- | When we print a binder, we often want to print its type too.
-- The @OutputableBndr@ class encapsulates this idea.
class Outputable a => OutputableBndr a where
pprBndr :: BindingSite -> a -> SDoc
pprBndr _b x = ppr x
pprPrefixOcc, pprInfixOcc :: a -> SDoc
-- Print an occurrence of the name, suitable either in the
-- prefix position of an application, thus (f a b) or ((+) x)
-- or infix position, thus (a `f` b) or (x + y)
{-
************************************************************************
* *
\subsection{Random printing helpers}
* *
************************************************************************
-}
-- We have 31-bit Chars and will simply use Show instances of Char and String.
-- | Special combinator for showing character literals.
pprHsChar :: Char -> SDoc
pprHsChar c | c > '\x10ffff' = char '\\' <> text (show (fromIntegral (ord c) :: Word32))
| otherwise = text (show c)
-- | Special combinator for showing string literals.
pprHsString :: FastString -> SDoc
pprHsString fs = vcat (map text (showMultiLineString (unpackFS fs)))
-- | Special combinator for showing bytestring literals.
pprHsBytes :: ByteString -> SDoc
pprHsBytes bs = let escaped = concatMap escape $ BS.unpack bs
in vcat (map text (showMultiLineString escaped)) <> char '#'
where escape :: Word8 -> String
escape w = let c = chr (fromIntegral w)
in if isAscii c
then [c]
else '\\' : show w
-- Postfix modifiers for unboxed literals.
-- See Note [Printing of literals in Core] in `basicTypes/Literal.hs`.
primCharSuffix, primFloatSuffix, primIntSuffix :: SDoc
primDoubleSuffix, primWordSuffix, primInt64Suffix, primWord64Suffix :: SDoc
primCharSuffix = char '#'
primFloatSuffix = char '#'
primIntSuffix = char '#'
primDoubleSuffix = text "##"
primWordSuffix = text "##"
primInt64Suffix = text "L#"
primWord64Suffix = text "L##"
-- | Special combinator for showing unboxed literals.
pprPrimChar :: Char -> SDoc
pprPrimInt, pprPrimWord, pprPrimInt64, pprPrimWord64 :: Integer -> SDoc
pprPrimChar c = pprHsChar c <> primCharSuffix
pprPrimInt i = integer i <> primIntSuffix
pprPrimWord w = integer w <> primWordSuffix
pprPrimInt64 i = integer i <> primInt64Suffix
pprPrimWord64 w = integer w <> primWord64Suffix
---------------------
-- Put a name in parens if it's an operator
pprPrefixVar :: Bool -> SDoc -> SDoc
pprPrefixVar is_operator pp_v
| is_operator = parens pp_v
| otherwise = pp_v
-- Put a name in backquotes if it's not an operator
pprInfixVar :: Bool -> SDoc -> SDoc
pprInfixVar is_operator pp_v
| is_operator = pp_v
| otherwise = char '`' <> pp_v <> char '`'
---------------------
pprFastFilePath :: FastString -> SDoc
pprFastFilePath path = text $ normalise $ unpackFS path
{-
************************************************************************
* *
\subsection{Other helper functions}
* *
************************************************************************
-}
pprWithCommas :: (a -> SDoc) -- ^ The pretty printing function to use
-> [a] -- ^ The things to be pretty printed
-> SDoc -- ^ 'SDoc' where the things have been pretty printed,
-- comma-separated and finally packed into a paragraph.
pprWithCommas pp xs = fsep (punctuate comma (map pp xs))
-- | Returns the separated concatenation of the pretty printed things.
interppSP :: Outputable a => [a] -> SDoc
interppSP xs = sep (map ppr xs)
-- | Returns the comma-separated concatenation of the pretty printed things.
interpp'SP :: Outputable a => [a] -> SDoc
interpp'SP xs = sep (punctuate comma (map ppr xs))
-- | Returns the comma-separated concatenation of the quoted pretty printed things.
--
-- > [x,y,z] ==> `x', `y', `z'
pprQuotedList :: Outputable a => [a] -> SDoc
pprQuotedList = quotedList . map ppr
quotedList :: [SDoc] -> SDoc
quotedList xs = hsep (punctuate comma (map quotes xs))
quotedListWithOr :: [SDoc] -> SDoc
-- [x,y,z] ==> `x', `y' or `z'
quotedListWithOr xs@(_:_:_) = quotedList (init xs) <+> text "or" <+> quotes (last xs)
quotedListWithOr xs = quotedList xs
quotedListWithNor :: [SDoc] -> SDoc
-- [x,y,z] ==> `x', `y' nor `z'
quotedListWithNor xs@(_:_:_) = quotedList (init xs) <+> text "nor" <+> quotes (last xs)
quotedListWithNor xs = quotedList xs
{-
************************************************************************
* *
\subsection{Printing numbers verbally}
* *
************************************************************************
-}
intWithCommas :: Integral a => a -> SDoc
-- Prints a big integer with commas, eg 345,821
intWithCommas n
| n < 0 = char '-' <> intWithCommas (-n)
| q == 0 = int (fromIntegral r)
| otherwise = intWithCommas q <> comma <> zeroes <> int (fromIntegral r)
where
(q,r) = n `quotRem` 1000
zeroes | r >= 100 = empty
| r >= 10 = char '0'
| otherwise = text "00"
-- | Converts an integer to a verbal index:
--
-- > speakNth 1 = text "first"
-- > speakNth 5 = text "fifth"
-- > speakNth 21 = text "21st"
speakNth :: Int -> SDoc
speakNth 1 = text "first"
speakNth 2 = text "second"
speakNth 3 = text "third"
speakNth 4 = text "fourth"
speakNth 5 = text "fifth"
speakNth 6 = text "sixth"
speakNth n = hcat [ int n, text suffix ]
where
suffix | n <= 20 = "th" -- 11,12,13 are non-std
| last_dig == 1 = "st"
| last_dig == 2 = "nd"
| last_dig == 3 = "rd"
| otherwise = "th"
last_dig = n `rem` 10
-- | Converts an integer to a verbal multiplicity:
--
-- > speakN 0 = text "none"
-- > speakN 5 = text "five"
-- > speakN 10 = text "10"
speakN :: Int -> SDoc
speakN 0 = text "none" -- E.g. "he has none"
speakN 1 = text "one" -- E.g. "he has one"
speakN 2 = text "two"
speakN 3 = text "three"
speakN 4 = text "four"
speakN 5 = text "five"
speakN 6 = text "six"
speakN n = int n
-- | Converts an integer and object description to a statement about the
-- multiplicity of those objects:
--
-- > speakNOf 0 (text "melon") = text "no melons"
-- > speakNOf 1 (text "melon") = text "one melon"
-- > speakNOf 3 (text "melon") = text "three melons"
speakNOf :: Int -> SDoc -> SDoc
speakNOf 0 d = text "no" <+> d <> char 's'
speakNOf 1 d = text "one" <+> d -- E.g. "one argument"
speakNOf n d = speakN n <+> d <> char 's' -- E.g. "three arguments"
-- | Determines the pluralisation suffix appropriate for the length of a list:
--
-- > plural [] = char 's'
-- > plural ["Hello"] = empty
-- > plural ["Hello", "World"] = char 's'
plural :: [a] -> SDoc
plural [_] = empty -- a bit frightening, but there you are
plural _ = char 's'
-- | Determines the form of to be appropriate for the length of a list:
--
-- > isOrAre [] = text "are"
-- > isOrAre ["Hello"] = text "is"
-- > isOrAre ["Hello", "World"] = text "are"
isOrAre :: [a] -> SDoc
isOrAre [_] = text "is"
isOrAre _ = text "are"
-- | Determines the form of to do appropriate for the length of a list:
--
-- > doOrDoes [] = text "do"
-- > doOrDoes ["Hello"] = text "does"
-- > doOrDoes ["Hello", "World"] = text "do"
doOrDoes :: [a] -> SDoc
doOrDoes [_] = text "does"
doOrDoes _ = text "do"
{-
************************************************************************
* *
\subsection{Error handling}
* *
************************************************************************
-}
pprPanic :: String -> SDoc -> a
-- ^ Throw an exception saying "bug in GHC"
pprPanic = panicDoc
pprSorry :: String -> SDoc -> a
-- ^ Throw an exception saying "this isn't finished yet"
pprSorry = sorryDoc
pprPgmError :: String -> SDoc -> a
-- ^ Throw an exception saying "bug in pgm being compiled" (used for unusual program errors)
pprPgmError = pgmErrorDoc
pprTrace :: String -> SDoc -> a -> a
-- ^ If debug output is on, show some 'SDoc' on the screen
pprTrace str doc x
| opt_NoDebugOutput = x
| otherwise = pprDebugAndThen unsafeGlobalDynFlags trace (text str) doc x
-- | @pprTraceIt desc x@ is equivalent to @pprTrace desc (ppr x) x@
pprTraceIt :: Outputable a => String -> a -> a
pprTraceIt desc x = pprTrace desc (ppr x) x
-- | If debug output is on, show some 'SDoc' on the screen along
-- with a call stack when available.
#if __GLASGOW_HASKELL__ > 710
pprSTrace :: (?callStack :: CallStack) => SDoc -> a -> a
pprSTrace = pprTrace (prettyCallStack ?callStack)
#else
pprSTrace :: SDoc -> a -> a
pprSTrace = pprTrace "no callstack info"
#endif
warnPprTrace :: Bool -> String -> Int -> SDoc -> a -> a
-- ^ Just warn about an assertion failure, recording the given file and line number.
-- Should typically be accessed with the WARN macros
warnPprTrace _ _ _ _ x | not debugIsOn = x
warnPprTrace _ _file _line _msg x | opt_NoDebugOutput = x
warnPprTrace False _file _line _msg x = x
warnPprTrace True file line msg x
= pprDebugAndThen unsafeGlobalDynFlags trace heading msg x
where
heading = hsep [text "WARNING: file", text file <> comma, text "line", int line]
-- | Panic with an assertation failure, recording the given file and
-- line number. Should typically be accessed with the ASSERT family of macros
#if __GLASGOW_HASKELL__ > 710
assertPprPanic :: (?callStack :: CallStack) => String -> Int -> SDoc -> a
assertPprPanic _file _line msg
= pprPanic "ASSERT failed!" doc
where
doc = sep [ text (prettyCallStack ?callStack)
, msg ]
#else
assertPprPanic :: String -> Int -> SDoc -> a
assertPprPanic file line msg
= pprPanic "ASSERT failed!" doc
where
doc = sep [ hsep [ text "file", text file
, text "line", int line ]
, msg ]
#endif
pprDebugAndThen :: DynFlags -> (String -> a) -> SDoc -> SDoc -> a
pprDebugAndThen dflags cont heading pretty_msg
= cont (showSDocDump dflags doc)
where
doc = sep [heading, nest 2 pretty_msg]
|
vikraman/ghc
|
compiler/utils/Outputable.hs
|
bsd-3-clause
| 41,320 | 0 | 17 | 10,641 | 9,756 | 5,231 | 4,525 | 685 | 4 |
{-# LANGUAGE BangPatterns #-}
module Network.Wai.Middleware.Push.Referer.LimitMultiMap where
import Data.Map (Map)
import qualified Data.Map.Strict as M
import Data.Set (Set)
import qualified Data.Set as S
data LimitMultiMap k v = LimitMultiMap {
limitKey :: !Int
, limitVal :: !Int
, multiMap :: !(Map k (Set v))
} deriving (Eq, Show)
isEmpty :: LimitMultiMap k t -> Bool
isEmpty (LimitMultiMap _ _ m) = M.null m
empty :: Int -> Int -> LimitMultiMap k v
empty lk lv = LimitMultiMap lk lv M.empty
insert :: (Ord k, Ord v) => (k,v) -> LimitMultiMap k v -> LimitMultiMap k v
insert (k,v) (LimitMultiMap lk lv m)
| siz < lk = let !m' = M.alter alt k m in LimitMultiMap lk lv m'
| siz == lk = let !m' = M.adjust adj k m in LimitMultiMap lk lv m'
| otherwise = error "insert"
where
siz = M.size m
alt Nothing = Just $ S.singleton v
alt s@(Just set)
| S.size set == lv = s
| otherwise = Just $ S.insert v set
adj set
| S.size set == lv = set
| otherwise = S.insert v set
lookup :: Ord k => k -> LimitMultiMap k v -> [v]
lookup k (LimitMultiMap _ _ m) = case M.lookup k m of
Nothing -> []
Just set -> S.toList set
|
creichert/wai
|
wai-http2-extra/Network/Wai/Middleware/Push/Referer/LimitMultiMap.hs
|
mit
| 1,210 | 0 | 13 | 321 | 543 | 272 | 271 | 38 | 2 |
import System.Environment
import System.IO
import Text.ParserCombinators.Parsec
import Control.Monad
import Data.ByteString.Lazy.Char8 as BS hiding (length,take,drop,filter,head)
import Control.Applicative hiding ((<|>), many)
{--
cjoin1(Open usp Tukubai)
designed by Nobuaki Tounaka
written by Ryuichi Ueda
The MIT License
Copyright (C) 2012 Universal Shell Programming Laboratory
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
--}
showUsage :: IO ()
showUsage = do System.IO.hPutStr stderr ("Usage : cjoin1 [+ng] <key=n> <master> <tran>\n" ++
"Mon Jul 22 10:17:09 JST 2013\n" ++
"Open usp Tukubai (LINUX+FREEBSD), Haskell ver.\n")
main :: IO ()
main = do args <- getArgs
case args of
["-h"] -> showUsage
["--help"] -> showUsage
["+ng",key,master,tran] -> mainProc True key master tran
["+ng",key,master] -> mainProc True key master "-"
[key,master,tran] -> mainProc False key master tran
[key,master] -> mainProc False key master "-"
mainProc :: Bool -> String -> String -> String -> IO ()
mainProc ng key master tran = do ms <- readF master
ts <- readF tran
mainProc' ng (parseKey key) ms ts
readF :: String -> IO BS.ByteString
readF "-" = BS.getContents
readF f = BS.readFile f
parseKey :: String -> Keys
parseKey str = case parse keys "" str of
Right opt -> opt
Left err -> Error (show err)
mainProc' :: Bool -> Keys -> BS.ByteString -> BS.ByteString -> IO ()
mainProc' ng (Keys ks) ms ts = out ng [ join1 mlines t | t <- tlines ]
where mlines = parseMaster ks (BS.lines ms)
tlines = parseTran ks (BS.lines ts)
out :: Bool -> [OutTran] -> IO ()
out _ [] = do return ()
out False ((OkTran ln):as) = (BS.putStrLn ln) >> (out False as)
out True ((OkTran ln):as) = (BS.putStrLn ln) >> (out True as)
out False ((NgTran ln):as) = out False as
out True ((NgTran ln):as) = (BS.hPutStrLn stderr ln) >> (out True as)
join1 :: [Master] -> Tran -> OutTran
join1 ms (Tran p k a) = makeLine (pickMaster ms k) (Tran p k a)
makeLine :: Maybe Master -> Tran -> OutTran
makeLine (Just (Master k v)) (Tran p _ a) = OkTran (BS.unwords $ p ++ k ++ v ++ a)
makeLine Nothing (Tran p k a) = NgTran (BS.unwords $ p ++ k ++ a)
myWords :: BS.ByteString -> [BS.ByteString]
myWords line = BS.split ' ' line
pickMaster :: [Master] -> [BS.ByteString] -> Maybe Master
pickMaster ms k = if length matched > 0 then Just (head matched) else Nothing
where matched = filter ( matchMaster k ) ms
matchMaster k (Master a b) = k == a
parseMaster :: [Int] -> [BS.ByteString] -> [Master]
parseMaster ks lines = [ f (length ks) (myWords ln) | ln <- lines ]
where f n ws = Master (take n ws) (drop n ws)
parseTran :: [Int] -> [BS.ByteString] -> [Tran]
parseTran ks lines = [ parseTran' ks (myWords ln) | ln <- lines ]
parseTran' :: [Int] -> [BS.ByteString] -> Tran
parseTran' ks ws = Tran (take pre ws) (take (length ks) rem) (drop (length ks) rem)
where pre = (ks !! 0) - 1
rem = drop pre ws
data Keys = Keys [Int] | Error String
data Master = Master [BS.ByteString] [BS.ByteString] deriving Show -- keys and values
data Tran = Tran [BS.ByteString] [BS.ByteString] [BS.ByteString] deriving Show -- values and keys and values
data OutTran = OkTran BS.ByteString | NgTran BS.ByteString
keys = string "key=" >> (try(rangekey) <|> try(singlekey) )
singlekey = do n <- many1 digit
return $ Keys [read n::Int]
rangekey = do n <- many1 digit
char '/'
m <- many1 digit
return $ Keys [(read n::Int)..(read m::Int)]
|
ShellShoccar-jpn/Open-usp-Tukubai
|
COMMANDS.HS/cjoin1.hs
|
mit
| 4,895 | 0 | 11 | 1,294 | 1,501 | 775 | 726 | 71 | 6 |
module PFEdeps(
PFE5MT,runPFE5,clean5,getSt5ext,updSt5ext,setSt5ext,
--Dep,Deps,deps0,
tdepModules,tdepModules',depModules,depModules',
tdefinedNames,isDefaultDecl,isInstDecl,splitDecls
, Deps2(..)
)
where
import Prelude hiding (readFile,readIO)
import Data.Maybe(fromMaybe)
import Data.List(nub,sort)
import HsModule
import HsIdent(getHSName,HsIdentI(..))
import HasBaseStruct(basestruct,hsTypeSig,hsInfixDecl)
import HasBaseName(getBaseName)
import HsDeclStruct(DI(..))
import SrcLoc1(srcLoc)
import TypedIds(IdTy(..))
import QualNames(mkUnqual)
import PNT(PNT(..))
import UniqueNames(noSrcLoc)
--import TiNames(idTy)
--import TiModule() -- instance HasIdTy PNT -- grr!
import PFE0
import PFE2(getModuleTime)
import PFE3(parseModule)
import PFE4(PFE4MT,getSt4ext,updSt4ext,clean4,runPFE4,typeCheckModule)
import TiClasses(fromDefs)
import TiNames(instName,defaultName,derivedInstName)
import DefinedNames(definedNames,addName,classMethods,contextSize)
import TiDefinedNames(definedTypeName)
import FreeNames(freeNames) --freeValues
import PrettyPrint
import MUtils
import AbstractIO
import FileUtils
import DirUtils(optCreateDirectory,getModificationTimeMaybe,rmR)
type Dep n = [([n],([n],[Hash]))] -- tricky to use correctly, should be abstract!!
type Deps n = [(ModuleName,(ClockTime,Dep n))]
type Deps2 n = (Deps n,Deps n)
deps0 = [] :: Deps n
type PFE5MT n i1 i2 ds1 ds2 ext m = PFE4MT n i1 i2 ds1 ds2 (Deps2 i2,ext) m
runPFE5 ext = runPFE4 ((deps0,deps0),ext)
getSt5 :: Monad m => PFE5MT n i1 i2 ds1 ds2 ext m (Deps2 i2)
updSt5 :: Monad m => Upd (Deps2 i2)->PFE5MT n i1 i2 ds1 ds2 ext m ()
getSt5ext :: Monad m => PFE5MT n i1 i2 ds1 ds2 ext m ext
updSt5ext :: Monad m => Upd ext->PFE5MT n i1 i2 ds1 ds2 ext m ()
getSt5 = fst # getSt4ext
updSt5 = updSt4ext . apFst
setSt5 = updSt5 . const
getSt5ext = snd # getSt4ext
updSt5ext = updSt4ext . apSnd
setSt5ext = updSt5ext . const
type Hash = Int
hash =
checksum . quickrender . withPPEnv hashmode . ppi
where
checksum = foldr (\c h->3*h+fromEnum c) 0
hashmode = defaultMode{layoutType=PPNoLayout,typeInfo=False}
-- Compute the dependency info for all modules in the project:
depModules = depModules' Nothing
tdepModules = tdepModules' Nothing
-- Update the dependeny info for a selected set of modules in the project:
depModules' optms =
do (olddeps,oldtdeps) <- getSt5
newdeps <- depModules'' olddeps syntaxDeps optms
setSt5 (newdeps,oldtdeps)
return newdeps
where
syntaxDeps = (depFile,untypedParse,True)
untypedParse = fmap dup . parseModule'
tdepModules' optms =
do (olddeps,oldtdeps) <- getSt5
newtdeps <- depModules'' oldtdeps typedDeps optms
setSt5 (olddeps,newtdeps)
return newtdeps
where
typedDeps = (tdepFile,typedParse,False)
typedParse m = (,) # parseModule' m <# typeCheckModule m
-- The module is parsed twice!
parseModule' = fmap snd . parseModule
depModules'' olddeps depsrc optms =
do optCreateDirectory `projPath` depdir
ms <- maybe allModules return optms
updateDeps depsrc olddeps ms
updateDeps (depFile,parseMod,allInst) olddeps ms =
do newdeps <- mapM upd ms
let changed = map fst newdeps
deps = newdeps++filter ((`notElem` changed).fst) olddeps
return deps
where
upd m =
do let olddep = lookup m olddeps
t <- getModuleTime m
if t `newerThan` (fst # olddep)
then do dept <- maybeF getModificationTimeMaybe depf
if t `newerThan` dept then again else useOld dept
else return (m,fromJust' "PFEdeps.hs:124" olddep)
where
depf = depFile m
again =
do epput $ "Extracting dependencies:"<+>m
dep <- dependencies allInst # parseMod m
t <- updateDep depf dep
return (m,(t,dep))
useOld dept =
do dir <- fromJust' "PFEdeps.hs:120" # projectDir
let path = depf dir
ret dep = return (m,(fromJust' "PFEdeps.hs:123" dept,dep))
--ret . read'' path =<< readFile path -- lazy
maybe again ret =<< maybeM (readIO =<< readFile path) -- strict
updateDep depf dep =
do optdir <- projectDir
case optdir of
Nothing -> getClockTime
Just dir ->
do updated <- updateFile' (depf dir) (show dep)
getModificationTime (depf dir)
{-+
The hash is computed from the source AST, while the set of free names in
a declaration is computed from the type checked AST, to catch dependencies
on instances, which are made explicit by the dictionary translation.
Things like derived instances that do not appear in the source code
will be assigned hash value [].
-}
dependencies allInst (untyped,typed) = udeps++deps
where
deps0 = [(rdefs d,rfree d)|d<-fromDefs (hsModDecls typed)]
udeps = [([],(fvs,[]))|([],fvs@(_:_))<-deps0]
deps1 = mapSnd (nub.concat) $ collectByFst [(n,fvs)|(ns,fvs)<-deps0,n<-ns]
deps = [([n],(fvs,findHash n))|(n,fvs)<-deps1]
findHash n = nub $ sort [h|(n',h)<-hs,n'==n]
hs = [(n,h)|d<-fromDefs (hsModDecls untyped),let h=hash d,n<-rdefs d]
mn = getBaseName (hsModName typed)
rdefs = map getHSName . tdefinedNames allInst mn
rfree = restrict . tfreeNames mn
restrict = nub . concatMap (addowner.getHSName)
--restrict = map getHSName
addowner x = if o==x then [x] else [o,x]
where o = owner x
-- Map subordinate names to their owner (reduces the total number of names):
owner x =
case idty x of
ConstrOf t ty -> pnt t (Type ty)
FieldOf t ty -> pnt t (Type ty)
MethodOf c n ms -> pnt c (Class n ms)
_ -> x
where
pnt t idty = PNT (mkUnqual t) idty noSrcLoc
idty (PNT _ ty _) = ty
-- To track dependencies on instances, include the names assigned to
-- instances by the type checker.
-- Also, to keep relevant type signatures and infix declarations, pretend that
-- they are part of the definitions of the identifiers the mention.
tdefinedNames allInst m d =
case basestruct d of
Just (HsInstDecl s optn ctx inst ds) -> if allInst then [] else [HsVar n]
where n = fromMaybe (instName m s inst) optn
Just (HsTypeSig s is c t) -> map HsVar is
Just (HsInfixDecl s f is) -> is
_ -> ns
where ns = map fst (definedNames (addName d))
{-
Since the type checker lifts default methods out from the class declaration
to the top level, we make the class declaration depend on the default
methods, under the assumption that they will be included in slices if
the class is included...
-}
tfreeNames m d =
case basestruct d of
Just (HsDataDecl s c tp cs cls) ->
[HsVar (derivedInstName m cl tn)|cl<-cls]++ns
where tn = definedTypeName tp
Just (HsClassDecl s c t fd ds) ->
map fst (freeNames d)++map (fmap defaultName) methods
where
methods = map fst (classMethods undefined (contextSize c) ds)
_ -> ns
where ns = map fst (freeNames d)
isDefaultDecl d =
case basestruct d of
Just (HsDefaultDecl{}) -> True
_ -> False
isInstDecl d =
case basestruct d of
Just (HsInstDecl{}) -> True
_ -> False
{-+
To make it ease to include the right infix declarations and type signatures,
split them. For example, if * is needed but / is not, you can't keep or throw
away all of the infix declaration "infixl 7 *,/".
-}
splitDecls = concatMap splitDecl
splitDecl d =
case basestruct d of
Just (HsTypeSig s is c t) -> [hsTypeSig s [i] c t|i<-is]
Just (HsInfixDecl s f is) -> [hsInfixDecl s f [i]|i<-is]
_ -> [d]
--------------------------------------------------------------------------------
clean5 = withProjectDir clean
where
clean dir = do rmR [depdir dir]
clean4
--------------------------------------------------------------------------------
depdir dir=dir++"dep/"
depFile m dir = depdir dir++moduleInfoPath m++".g"
tdepFile m dir = depdir dir++moduleInfoPath m++".tg"
--------------------------------------------------------------------------------
|
SAdams601/HaRe
|
old/tools/pfe/PFEdeps.hs
|
bsd-3-clause
| 8,045 | 32 | 20 | 1,756 | 2,475 | 1,333 | 1,142 | 163 | 5 |
{-# Language KindSignatures #-}
{-# Language PolyKinds #-}
module T15881 where
import Data.Kind
data A n (a :: n n) :: Type
|
sdiehl/ghc
|
testsuite/tests/polykinds/T15881.hs
|
bsd-3-clause
| 132 | 0 | 6 | 30 | 28 | 19 | 9 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
module T9778 where
data T = A | B
data G a where
C :: G A
|
ghc-android/ghc
|
testsuite/tests/rename/should_compile/T9778.hs
|
bsd-3-clause
| 112 | 0 | 6 | 29 | 29 | 19 | 10 | 6 | 0 |
module Carbs.Pasta where
data Vongole = Vongole Int
data Linguine = Linguine { eggs :: Bool
, cooking_time :: Int }
|
elisehuard/hs-pasta
|
src/Carbs/Pasta.hs
|
mit
| 143 | 0 | 8 | 48 | 36 | 22 | 14 | 4 | 0 |
import Data.List
import Data.Numbers.Primes
result :: Int
result = sum $ takeWhile (<2000000) primes
main = putStrLn $ show(result)
|
nbartlomiej/haskeuler
|
010/problem-010.hs
|
mit
| 134 | 0 | 7 | 21 | 50 | 28 | 22 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancefleetconfig-volumespecification.html
module Stratosphere.ResourceProperties.EMRInstanceFleetConfigVolumeSpecification where
import Stratosphere.ResourceImports
-- | Full data type definition for EMRInstanceFleetConfigVolumeSpecification.
-- See 'emrInstanceFleetConfigVolumeSpecification' for a more convenient
-- constructor.
data EMRInstanceFleetConfigVolumeSpecification =
EMRInstanceFleetConfigVolumeSpecification
{ _eMRInstanceFleetConfigVolumeSpecificationIops :: Maybe (Val Integer)
, _eMRInstanceFleetConfigVolumeSpecificationSizeInGB :: Val Integer
, _eMRInstanceFleetConfigVolumeSpecificationVolumeType :: Val Text
} deriving (Show, Eq)
instance ToJSON EMRInstanceFleetConfigVolumeSpecification where
toJSON EMRInstanceFleetConfigVolumeSpecification{..} =
object $
catMaybes
[ fmap (("Iops",) . toJSON) _eMRInstanceFleetConfigVolumeSpecificationIops
, (Just . ("SizeInGB",) . toJSON) _eMRInstanceFleetConfigVolumeSpecificationSizeInGB
, (Just . ("VolumeType",) . toJSON) _eMRInstanceFleetConfigVolumeSpecificationVolumeType
]
-- | Constructor for 'EMRInstanceFleetConfigVolumeSpecification' containing
-- required fields as arguments.
emrInstanceFleetConfigVolumeSpecification
:: Val Integer -- ^ 'emrifcvsSizeInGB'
-> Val Text -- ^ 'emrifcvsVolumeType'
-> EMRInstanceFleetConfigVolumeSpecification
emrInstanceFleetConfigVolumeSpecification sizeInGBarg volumeTypearg =
EMRInstanceFleetConfigVolumeSpecification
{ _eMRInstanceFleetConfigVolumeSpecificationIops = Nothing
, _eMRInstanceFleetConfigVolumeSpecificationSizeInGB = sizeInGBarg
, _eMRInstanceFleetConfigVolumeSpecificationVolumeType = volumeTypearg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancefleetconfig-volumespecification.html#cfn-elasticmapreduce-instancefleetconfig-volumespecification-iops
emrifcvsIops :: Lens' EMRInstanceFleetConfigVolumeSpecification (Maybe (Val Integer))
emrifcvsIops = lens _eMRInstanceFleetConfigVolumeSpecificationIops (\s a -> s { _eMRInstanceFleetConfigVolumeSpecificationIops = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancefleetconfig-volumespecification.html#cfn-elasticmapreduce-instancefleetconfig-volumespecification-sizeingb
emrifcvsSizeInGB :: Lens' EMRInstanceFleetConfigVolumeSpecification (Val Integer)
emrifcvsSizeInGB = lens _eMRInstanceFleetConfigVolumeSpecificationSizeInGB (\s a -> s { _eMRInstanceFleetConfigVolumeSpecificationSizeInGB = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancefleetconfig-volumespecification.html#cfn-elasticmapreduce-instancefleetconfig-volumespecification-volumetype
emrifcvsVolumeType :: Lens' EMRInstanceFleetConfigVolumeSpecification (Val Text)
emrifcvsVolumeType = lens _eMRInstanceFleetConfigVolumeSpecificationVolumeType (\s a -> s { _eMRInstanceFleetConfigVolumeSpecificationVolumeType = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/EMRInstanceFleetConfigVolumeSpecification.hs
|
mit
| 3,237 | 0 | 13 | 269 | 357 | 203 | 154 | 34 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilyDependencies #-}
--------------------------------------------------------------------------------
-- |
-- Module : Control.Workflow.Coordinator
-- Copyright : (c) 2019 Kai Zhang
-- License : MIT
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Coordinator needs to be able to discover new workers and send commands
-- to workers. The implementation of Coordinator thus contains server and
-- client parts. Server-side codes are executed by `withCoordinator` and
-- client-side codes are executed by `initClient`.
--
--------------------------------------------------------------------------------
module Control.Workflow.Coordinator
( Signal(..)
, Worker(..)
, WorkerStatus(..)
, Coordinator(..)
) where
import Data.Binary (Binary)
import Control.Monad.Catch (MonadMask)
import GHC.Generics (Generic)
import Control.Monad.IO.Class (MonadIO)
import Control.Distributed.Process
import GHC.Conc (STM)
import Data.Proxy (Proxy(..))
import Control.Workflow.Types
-- | Coordinator manages a pool of workers.
class Coordinator coordinator where
-- Configuration
-- | Configuration
type Config coordinator = config | config -> coordinator
setQueueSize :: Int -> Config coordinator -> Config coordinator
-- Master/server-side process
-- | Initialize Coordinator on the server.
withCoordinator :: (MonadMask m, MonadIO m)
=> Config coordinator -> (coordinator -> m a) -> m a
-- | Server initiation process
initiate :: coordinator -> Process ()
-- | Server shutdown process
shutdown :: coordinator -> Process ()
-- Worker/client-side process
startClient :: Proxy coordinator -> NodeId -> FunctionTable -> IO ()
-- Operational functions
-- | Return all workers currently in the pool.
getWorkers :: coordinator -> STM [Worker]
-- | Reserve a free worker. This function should block
-- until a worker is reserved.
reserve :: coordinator -> Maybe Resource -> Process ProcessId
-- | Set a worker free but keep it alive so that it can be assigned other jobs.
freeWorker :: MonadIO m => coordinator -> ProcessId -> m ()
setWorkerError :: MonadIO m => coordinator -> String -> ProcessId -> m ()
-- | A worker.
data Worker = Worker
{ _worker_id :: ProcessId
, _worker_status :: WorkerStatus
, _worker_config :: Maybe Resource
} deriving (Generic, Show)
instance Binary Worker
-- | The status of a worker.
data WorkerStatus = Idle
| Working
| ErrorExit String
deriving (Eq, Generic, Show)
instance Binary WorkerStatus
data Signal = Shutdown deriving (Generic)
instance Binary Signal
|
kaizhang/SciFlow
|
SciFlow/src/Control/Workflow/Coordinator.hs
|
mit
| 2,894 | 12 | 18 | 615 | 416 | 262 | 154 | 43 | 0 |
-- trig utilities
-- convert a value in degrees to radians
radians :: Floating a => a -> a
radians x = pi * x / 180.0
-- convert a value in radians to degrees
degrees :: Floating a => a -> a
degrees x = 180.0 * x / pi
-- return the sine of a value given in degrees
sinDeg :: Floating a => a -> a
sinDeg x = sin (radians x)
-- return the cosine of a value given in degrees
cosDeg :: Floating a => a -> a
cosDeg x = cos (radians x)
-- return the tangent of a value given in degrees
tanDeg :: Floating a => a -> a
tanDeg x = tan (radians x)
-- return the arcsine of a value as degrees
asinDeg :: Floating a => a -> a
asinDeg x = degrees (asin x)
-- return the arccosine of a value as degrees
acosDeg :: Floating a => a -> a
acosDeg x = degrees (acos x)
-- return the arctangent of a value as degrees
atanDeg :: Floating a => a -> a
atanDeg x = degrees (atan x)
|
jdeisenberg/rwh_commentary
|
chapter02_files/trigUtils.hs
|
mit
| 868 | 0 | 7 | 202 | 272 | 137 | 135 | 16 | 1 |
import Data.List
import Control.Monad
solveRPN :: (Num a, Read a) => String -> a
solveRPN = head . foldl foldingFunction [] . words
where foldingFunction (x:y:ys) "*" = (x * y):ys
foldingFunction (x:y:ys) "+" = (x + y):ys
foldingFunction (x:y:ys) "-" = (y - x):ys
foldingFunction xs numberString = read numberString:xs
solveRPNwithST :: String -> Maybe Double
solveRPNwithST st = do
[result] <- foldM foldingFunctionST [] (words st)
return result
readMaybe st = case reads st of [(x,"")] -> Just x
_ -> Nothing
foldingFunctionST :: [Double] -> String -> Maybe [Double]
foldingFunctionST (x:y:ys) "*" = return ((x * y):ys)
foldingFunctionST (x:y:ys) "+" = return ((x + y):ys)
foldingFunctionST (x:y:ys) "-" = return ((y - x):ys)
foldingFunctionST xs numberString = liftM (:xs) (readMaybe numberString)
|
mortum5/programming
|
haskell/usefull/parser/rpn.hs
|
mit
| 906 | 0 | 10 | 229 | 421 | 218 | 203 | 19 | 4 |
-- Double Char
-- http://www.codewars.com/kata/56b1f01c247c01db92000076/
module Codewars.Strings where
doubleChar :: String -> String
doubleChar = concatMap (\c -> [c,c])
|
gafiatulin/codewars
|
src/7 kyu/Strings 2.hs
|
mit
| 173 | 0 | 8 | 21 | 39 | 24 | 15 | 3 | 1 |
-- RLE
-- https://www.codewars.com/kata/578bf2d8daa01a4ee8000046
module Kata.RLE (encode,decode) where
import Data.Char (isNumber)
import Data.List (group, unfoldr)
import Data.Maybe (listToMaybe)
import Data.Tuple (swap)
encode :: String -> String
encode = concatMap (\g -> (show . length $ g) ++ take 1 g) . group
decode :: String -> String
decode = concat . unfoldr f
where f s = let (n, c:rest) = span isNumber s in if null s then Nothing else fmap (swap . (,) rest . (`replicate` c) . fst) . listToMaybe . reads $ n
|
gafiatulin/codewars
|
src/6 kyu/RLE2.hs
|
mit
| 529 | 0 | 19 | 95 | 219 | 121 | 98 | 10 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds, MultiParamTypeClasses, UndecidableInstances #-}
{-# LANGUAGE JavaScriptFFI #-}
-----------------------------------------------------------------------------
--
-- Module : JsHs.Array
-- Copyright : (c) Artem Chirkin
-- License : MIT
--
-- Maintainer : Artem Chirkin <[email protected]>
-- Stability : experimental
--
-- JavaScript Arrays containing elements of a single type.
-- Also provides an interface to derive behavior of arrays (if you do newtype JSVal and assume it contains an array).
-- For array-like structures instanciate `LikeJSArray` class.
-- For elements of arrays instanciate `LikeJS` class.
--
-----------------------------------------------------------------------------
module JsHs.Array
( Array (), emptyArray
, LikeJSArray (..), LikeJS (..)
, join
, map, mapi, mapSame, mapIO, mapIO_, mapiIO, mapiIO_
, foldl, foldl1, foldr, foldr1, foldi, foldi1
, foldIO, foldIO_, foldiIO, foldiIO_
, zip, zipi, zipIO, zipIO_, zipiIO, zipiIO_
, unionZip, unionZipIO, unionZipIO_
, fromList, toList
, length, (!), slice, take, drop, concat
, filter, mapEither
, sort, removeSeqDups
) where
--import Data.Geometry.VectorMath --(Vector )
import Prelude hiding (map, foldl, foldr, foldl1, foldr1
, zip, take, drop, concat, filter, length)
import Data.Coerce (Coercible (), coerce)
import Control.Monad (void)
import System.IO.Unsafe (unsafePerformIO)
import JsHs.JSString (JSString, pack, unpack')
import JsHs.Callback
import JsHs.Types (JSVal, IsJSVal, jsval)
import JsHs.LikeJS.Class
-- | JavaScript array containing elements of single type
newtype Array a = Array JSVal
instance IsJSVal (Array a)
instance LikeJS t a => LikeJS "Array" (Array a)
instance LikeJS t a => LikeJSArray t (Array a) where
type ArrayElem (Array a) = a
{-# INLINE toJSArray #-}
toJSArray = id
{-# INLINE fromJSArray #-}
fromJSArray = id
-- | Data types, representation of which is the same as of JavaScript Array
class LikeJS ta (ArrayElem a) => LikeJSArray ta a where
type ArrayElem a
toJSArray :: a -> Array (ArrayElem a)
fromJSArray :: Array (ArrayElem a) -> a
{-# INLINE toJSArray #-}
default toJSArray :: Coercible a JSVal => a -> Array (ArrayElem a)
toJSArray = coerce
{-# INLINE fromJSArray #-}
default fromJSArray :: Coercible JSVal a => Array (ArrayElem a) -> a
fromJSArray = coerce
-- | Convert haskell list into JS array
fromList :: ( LikeJSArray ta a )
=> [ArrayElem a] -> a
fromList = fromJSArray . Array . asJSVal
-- | Convert JS array to haskell list
toList :: ( LikeJSArray ta a )
=> a -> [ArrayElem a]
toList = asLikeJS . jsval . toJSArray
{-# NOINLINE map #-}
map :: ( LikeJSArray ta a
, LikeJS ty y)
=> (ArrayElem a -> y)
-> a -> Array y
map f arr = unsafePerformIO $ do
call <- syncCallbackUnsafe1 $ asJSVal . f . asLikeJS
r <- js_mapArray call (toJSArray arr)
r `seq` releaseCallback call
return r
{-# NOINLINE mapSame #-}
mapSame :: ( LikeJSArray ta a
, LikeJS ta x
, x ~ ArrayElem a)
=> (x -> x)
-> a -> a
mapSame f arr = unsafePerformIO $ do
call <- syncCallbackUnsafe1 $ asJSVal . f . asLikeJS
r <- fromJSArray <$> js_mapArray call (toJSArray arr)
r `seq` releaseCallback call
return r
{-# NOINLINE mapi #-}
mapi :: ( LikeJSArray ta a
, LikeJS ty y)
=> (Int -> ArrayElem a -> y)
-> a -> Array y
mapi f arr = unsafePerformIO $ do
call <- syncCallbackUnsafe2 $ \e i -> asJSVal (f (asLikeJS i) (asLikeJS e))
r <- js_mapArray call (toJSArray arr)
r `seq` releaseCallback call
return r
{-# INLINE mapIO #-}
mapIO :: ( LikeJSArray ta a
, LikeJS ty y)
=> (ArrayElem a -> IO y)
-> a -> IO (Array y)
mapIO f arr = do
call <- syncCallbackUnsafeIO1 $ fmap asJSVal . f . asLikeJS
r <- js_mapArray call (toJSArray arr)
r `seq` releaseCallback call
return r
{-# INLINE mapIO_ #-}
mapIO_ :: LikeJSArray ta a => (ArrayElem a -> IO ()) -> a -> IO ()
mapIO_ f arr = do
call <- syncCallback1 ContinueAsync $ f . asLikeJS
js_mapArray_ call (toJSArray arr)
releaseCallback call
{-# INLINE mapiIO #-}
mapiIO :: ( LikeJSArray ta a, LikeJS ty y) => (Int -> ArrayElem a -> IO y) -> a -> IO (Array y)
mapiIO f arr = do
call <- syncCallbackUnsafeIO2 $ \e i -> fmap asJSVal (f (asLikeJS i) (asLikeJS e))
r <- js_mapArray call (toJSArray arr)
r `seq` releaseCallback call
return r
{-# INLINE mapiIO_ #-}
mapiIO_ :: LikeJSArray ta a => (Int -> ArrayElem a -> IO ()) -> a -> IO ()
mapiIO_ f arr = do
call <- syncCallback2 ContinueAsync $ \e i -> f (asLikeJS i) (asLikeJS e)
js_mapArray_ call (toJSArray arr)
releaseCallback call
{-# RULES
"map/concat" forall f g arr . map f (map g arr) = map (f . g) arr
"mapSame/concat" forall f g arr . mapSame f (mapSame g arr) = mapSame (f . g) arr
"mapSN/concat" forall f g arr . map f (mapSame g arr) = map (f . g) arr
"map/toSame" forall f arr . fromJSArray (map f arr) = mapSame f arr
#-}
{-# NOINLINE foldl #-}
foldl :: ( LikeJSArray tt t
, LikeJS ta a)
=> (a -> ArrayElem t -> a)
-> a -> t -> a
foldl f x0 arr = unsafePerformIO $ do
call <- syncCallbackUnsafe2 $ \r e -> asJSVal (f (asLikeJS r) (asLikeJS e))
r <- asLikeJS <$> js_foldlArray call (asJSVal x0) (toJSArray arr)
r `seq` releaseCallback call
return r
{-# NOINLINE foldl1 #-}
foldl1 :: ( LikeJSArray tt t
, LikeJS ta a)
=> (a -> ArrayElem t -> a)
-> t -> a
foldl1 f arr = unsafePerformIO $ do
call <- syncCallbackUnsafe2 $ \r e -> asJSVal (f (asLikeJS r) (asLikeJS e))
r <- asLikeJS <$> js_foldl1Array call (toJSArray arr)
r `seq` releaseCallback call
return r
{-# NOINLINE foldr #-}
foldr :: ( LikeJSArray tt t
, LikeJS ta a)
=> (ArrayElem t -> a -> a)
-> a -> t -> a
foldr f x0 arr = unsafePerformIO $ do
call <- syncCallbackUnsafe2 $ \r e -> asJSVal (f (asLikeJS e) (asLikeJS r))
r <- asLikeJS <$> js_foldrArray call (asJSVal x0) (toJSArray arr)
r `seq` releaseCallback call
return r
{-# NOINLINE foldr1 #-}
foldr1 :: ( LikeJSArray tt t
, LikeJS ta a)
=> (ArrayElem t -> a -> a)
-> t -> a
foldr1 f arr = unsafePerformIO $ do
call <- syncCallbackUnsafe2 $ \r e -> asJSVal (f (asLikeJS e) (asLikeJS r))
r <- asLikeJS <$> js_foldr1Array call (toJSArray arr)
r `seq` releaseCallback call
return r
{-# NOINLINE foldi #-}
foldi :: ( LikeJSArray tt t
, LikeJS ta a)
=> (Int -> a -> ArrayElem t -> a)
-> a -> t -> a
foldi f x0 arr = unsafePerformIO $ do
call <- syncCallbackUnsafe3 $ \r e i -> asJSVal (f (asLikeJS i) (asLikeJS r) (asLikeJS e))
r <- asLikeJS <$> js_foldlArray call (asJSVal x0) (toJSArray arr)
r `seq` releaseCallback call
return r
{-# NOINLINE foldi1 #-}
foldi1 :: ( LikeJSArray tt t
, LikeJS ta a)
=> (Int -> a -> ArrayElem t -> a)
-> t -> a
foldi1 f arr = unsafePerformIO $ do
call <- syncCallbackUnsafe3 $ \r e i -> asJSVal (f (asLikeJS i) (asLikeJS r) (asLikeJS e))
r <- asLikeJS <$> js_foldl1Array call (toJSArray arr)
r `seq` releaseCallback call
return r
{-# INLINE foldIO #-}
foldIO :: ( LikeJSArray tt t
, LikeJS ty y)
=> (y -> ArrayElem t -> IO y)
-> y -> t -> IO y
foldIO f x0 arr = do
call <- syncCallbackUnsafeIO2 $ \r e -> fmap asJSVal (f (asLikeJS e) (asLikeJS r))
r <- asLikeJS <$> js_foldlArray call (asJSVal x0) (toJSArray arr)
r `seq` releaseCallback call
return r
{-# INLINE foldIO_ #-}
foldIO_ :: ( LikeJSArray tt t
, LikeJS ty y)
=> (y -> ArrayElem t -> IO y)
-> y -> t -> IO ()
foldIO_ f x0 arr = void $ foldIO f x0 arr
{-# INLINE foldiIO #-}
foldiIO :: ( LikeJSArray tt t
, LikeJS ty y)
=> (Int -> y -> ArrayElem t -> IO y)
-> y -> t -> IO y
foldiIO f x0 arr = do
call <- syncCallbackUnsafeIO3 $ \r e i -> fmap asJSVal (f (asLikeJS i) (asLikeJS e) (asLikeJS r))
r <- asLikeJS <$> js_foldlArray call (asJSVal x0) (toJSArray arr)
r `seq` releaseCallback call
return r
{-# INLINE foldiIO_ #-}
foldiIO_ :: ( LikeJSArray tt t
, LikeJS ty y)
=> (Int -> y -> ArrayElem t -> IO y)
-> y -> t -> IO ()
foldiIO_ f x0 arr = void $ foldiIO f x0 arr
{-# RULES
"foldlmap/concat" forall f g x0 arr . foldl f x0 (map g arr) = foldl (\r -> f r . g ) x0 arr
"foldlmapSame/concat" forall f g x0 arr . foldl f x0 (mapSame g arr) = foldl (\r -> f r . g ) x0 arr
"foldl1map/concat" forall f g arr . foldl1 f (map g arr) = foldl1 (\r -> f r . g ) arr
"foldl1mapSame/concat" forall f g arr . foldl1 f (mapSame g arr) = foldl1 (\r -> f r . g ) arr
"foldrmap/concat" forall f g x0 arr . foldr f x0 (map g arr) = foldr (\e r -> f r (g e)) x0 arr
"foldrmapSame/concat" forall f g x0 arr . foldr f x0 (mapSame g arr) = foldr (\e r -> f r (g e)) x0 arr
"foldr1map/concat" forall f g arr . foldr1 f (map g arr) = foldr1 (\e r -> f r (g e)) arr
"foldr1mapSame/concat" forall f g arr . foldr1 f (mapSame g arr) = foldr1 (\e r -> f r (g e)) arr
#-}
-- zipping
{-# NOINLINE zip #-}
zip :: ( LikeJSArray ta a
, LikeJSArray tb b
, LikeJS ty y)
=> (ArrayElem a -> ArrayElem b -> y)
-> a -> b -> Array y
zip f arr1 arr2 = unsafePerformIO $ do
call <- syncCallbackUnsafe2 $ \e1 e2 -> asJSVal (f (asLikeJS e1) (asLikeJS e2))
r <- js_zipArray call (toJSArray arr1) (toJSArray arr2)
r `seq` releaseCallback call
return r
{-# NOINLINE zipi #-}
zipi :: ( LikeJSArray ta a
, LikeJSArray tb b
, LikeJS ty y)
=> (Int -> ArrayElem a -> ArrayElem b -> y)
-> a -> b -> Array y
zipi f arr1 arr2 = unsafePerformIO $ do
call <- syncCallbackUnsafe3 $ \e1 e2 i -> asJSVal (f (asLikeJS i) (asLikeJS e1) (asLikeJS e2))
r <- js_zipArray call (toJSArray arr1) (toJSArray arr2)
r `seq` releaseCallback call
return r
{-# INLINE zipIO #-}
zipIO :: ( LikeJSArray ta a
, LikeJSArray tb b
, LikeJS ty y)
=> (ArrayElem a -> ArrayElem b -> IO y)
-> a -> b -> IO (Array y)
zipIO f arr1 arr2 = do
call <- syncCallbackUnsafeIO2 $ \e1 e2 -> fmap asJSVal (f (asLikeJS e1) (asLikeJS e2))
r <- js_zipArray call (toJSArray arr1) (toJSArray arr2)
r `seq` releaseCallback call
return r
{-# INLINE zipiIO #-}
zipiIO :: ( LikeJSArray ta a
, LikeJSArray tb b
, LikeJS ty y)
=> (Int -> ArrayElem a -> ArrayElem b -> IO y)
-> a -> b -> IO (Array y)
zipiIO f arr1 arr2 = do
call <- syncCallbackUnsafeIO3 $ \e1 e2 i -> fmap asJSVal (f (asLikeJS i) (asLikeJS e1) (asLikeJS e2))
r <- js_zipArray call (toJSArray arr1) (toJSArray arr2)
r `seq` releaseCallback call
return r
{-# INLINE zipIO_ #-}
zipIO_ :: ( LikeJSArray ta a
, LikeJSArray tb b )
=> (ArrayElem a -> ArrayElem b -> IO ())
-> a -> b -> IO ()
zipIO_ f arr1 arr2 = do
call <- syncCallback2 ContinueAsync $ \e1 e2 -> f (asLikeJS e1) (asLikeJS e2)
js_zipArray_ call (toJSArray arr1) (toJSArray arr2)
releaseCallback call
{-# INLINE zipiIO_ #-}
zipiIO_ :: ( LikeJSArray ta a
, LikeJSArray tb b )
=> (Int -> ArrayElem a -> ArrayElem b -> IO ())
-> a -> b -> IO ()
zipiIO_ f arr1 arr2 = do
call <- syncCallback3 ContinueAsync $ \e1 e2 i -> f (asLikeJS i) (asLikeJS e1) (asLikeJS e2)
js_zipArray_ call (toJSArray arr1) (toJSArray arr2)
releaseCallback call
{-# NOINLINE unionZip #-}
unionZip :: ( LikeJSArray ta a
, LikeJSArray tb b
, LikeJS ty y )
=> (Int -> Maybe (ArrayElem a) -> Maybe (ArrayElem b) -> y)
-> a -> b -> Array y
unionZip f arr1 arr2 = unsafePerformIO $ do
call <- syncCallbackUnsafe3 $ \e1 e2 i -> asJSVal (f (asLikeJS i) (asLikeJS e1) (asLikeJS e2))
r <- js_unionZipArray call (toJSArray arr1) (toJSArray arr2)
r `seq` releaseCallback call
return r
{-# INLINE unionZipIO #-}
unionZipIO :: ( LikeJSArray ta a
, LikeJSArray tb b
, LikeJS ty y )
=> (Int -> Maybe (ArrayElem a) -> Maybe (ArrayElem b) -> IO y)
-> a -> b -> IO (Array y)
unionZipIO f arr1 arr2 = do
call <- syncCallbackUnsafeIO3 $ \e1 e2 i -> fmap asJSVal (f (asLikeJS i) (asLikeJS e1) (asLikeJS e2))
r <- js_unionZipArray call (toJSArray arr1) (toJSArray arr2)
r `seq` releaseCallback call
return r
{-# INLINE unionZipIO_ #-}
unionZipIO_ :: ( LikeJSArray ta a
, LikeJSArray tb b )
=> (Int -> Maybe (ArrayElem a) -> Maybe (ArrayElem b) -> IO ())
-> a -> b -> IO ()
unionZipIO_ f arr1 arr2 = do
call <- syncCallback3 ContinueAsync $ \e1 e2 i -> f (asLikeJS i) (asLikeJS e1) (asLikeJS e2)
js_unionZipArray_ call (toJSArray arr1) (toJSArray arr2)
releaseCallback call
instance (Show a, LikeJS ta a) => Show (Array a) where
show = unpack' . js_show . map (pack . show)
{-# RULES
"show/JSStringArray" show = unpack' . js_show
#-}
-- | Sort an array using JavaScript sort function
foreign import javascript unsafe "$1.sort()"
sort :: Array a -> Array a
-- | Remove sequential duplicate elements.
-- This returns a list of unique values if was used on a previously sorted array.
foreign import javascript unsafe "$1.map(function(e,i){if(e==$1[i+1]){return null;}else{return e;}}).filter(function(e){return e!=null;})"
removeSeqDups :: Array a -> Array a
-- mapping
foreign import javascript unsafe "$2.map(h$retIfDef($1))"
js_mapArray :: Callback f -> Array a -> IO (Array b)
foreign import javascript unsafe "$2.forEach(h$doIfDef($1))"
js_mapArray_ :: Callback f -> Array a -> IO ()
-- folding
foreign import javascript unsafe "$3.reduce(h$retIfDef2oa($1),$2)"
js_foldlArray :: Callback f -> JSVal -> Array a -> IO JSVal
foreign import javascript unsafe "$2.reduce(h$retIfDef2oa($1))"
js_foldl1Array :: Callback f -> Array a -> IO JSVal
foreign import javascript unsafe "$3.reduceRight(h$retIfDef2oa($1),$2)"
js_foldrArray :: Callback f -> JSVal -> Array a -> IO JSVal
foreign import javascript unsafe "$2.reduceRight(h$retIfDef2oa($1))"
js_foldr1Array :: Callback f -> Array a -> IO JSVal
-- zipping
foreign import javascript unsafe "if($3){var f = h$retIfDef2($1); $r = $2.map(function(e,i){return f(e,$3[i],i);});}else{$r = [];}"
js_zipArray :: Callback f -> Array a -> Array b -> IO (Array c)
foreign import javascript unsafe "if($3){var f = h$retIfDef2($1); $2.forEach(function(e,i){f(e,$3[i],i);});}"
js_zipArray_ :: Callback f -> Array a -> Array b -> IO ()
foreign import javascript unsafe "var le = $2 || [], ri = $3 || []; var n = Math.max(le.length, ri.length); $r = new Array(n); for(var i = 0; i < n; i++){$r[i] = $1(le[i],ri[i],i);}"
js_unionZipArray :: Callback f -> Array a -> Array b -> IO (Array c)
foreign import javascript unsafe "var le = $2 || [], ri = $3 || []; var n = Math.max(le.length, ri.length); for(var i = 0; i < n; i++){$1(le[i],ri[i],i);}"
js_unionZipArray_ :: Callback f -> Array a -> Array b -> IO ()
-- filtering
{-# NOINLINE filter #-}
filter :: ( LikeJSArray ta a )
=> (ArrayElem a -> Bool)
-> a -> a
filter f arr = unsafePerformIO $ do
call <- syncCallbackUnsafe1 $ asJSVal . f . asLikeJS
r <- fromJSArray <$> js_filter call (toJSArray arr)
r `seq` releaseCallback call
return r
{-# NOINLINE mapEither #-}
mapEither :: ( LikeJSArray ta a
, LikeJS tx x
, LikeJS ty y)
=> (ArrayElem a -> Either x y)
-> a -> (Array x, Array y)
mapEither f arr = unsafePerformIO $ do
call <- syncCallbackUnsafe1 $ asJSVal . f . asLikeJS
r <- call `seq` js_mapEither call (toJSArray arr)
r `seq` releaseCallback call
return r
foreign import javascript unsafe "$2.filter($1)"
js_filter :: Callback (a -> Bool) -> Array a -> IO (Array a)
foreign import javascript unsafe "var rez = $2.map($1); $r1 = rez.filter(function(e){return !e.isRight();}).map(function(e){return e.left;}); $r2 = rez.filter(function(e){return e.isRight();}).map(function(e){return e.right;});"
js_mapEither :: (Callback (a -> Either b c)) -> Array a -> IO (Array b, Array c)
foreign import javascript unsafe "JSON.stringify($1)"
js_show :: Array a -> JSString
--
--foreign import javascript unsafe "LikeHS.listFromArray($1)"
-- js_ArrayToList :: Array a -> Any
--
--
--foreign import javascript unsafe "LikeHS.listToArray($1)"
-- js_ListToArray :: Any -> Array a
----------------------------------------------------------------------------------------------------
-- custom functions
----------------------------------------------------------------------------------------------------
{-# INLINE length #-}
length :: LikeJSArray ta a => a -> Int
length = js_length . toJSArray
foreign import javascript unsafe "$1.length"
js_length :: Array a -> Int
-- | index JS array
(!) :: LikeJSArray ta a => a -> Int -> ArrayElem a
(!) arr = asLikeJS . js_index (toJSArray arr)
foreign import javascript unsafe "$1[$2]"
js_index :: Array a -> Int -> JSVal
{-# INLINE slice #-}
slice :: LikeJSArray ta a => Int -> Int -> a -> a
slice a b = fromJSArray . js_slice a b . toJSArray
{-# INLINE take #-}
take :: LikeJSArray ta a => Int -> a -> a
take n = fromJSArray . js_slice 0 n . toJSArray
{-# INLINE drop #-}
drop :: LikeJSArray ta a => Int -> a -> a
drop n = fromJSArray . js_slice1 n . toJSArray
foreign import javascript unsafe "$3.slice($1,$2)"
js_slice :: Int -> Int -> Array a -> Array a
foreign import javascript unsafe "$2.slice($1)"
js_slice1 :: Int -> Array a -> Array a
-- | Concatenate two JS arrays
{-# INLINE concat #-}
concat :: LikeJSArray ta a => a -> a -> a
concat a = fromJSArray . js_concat (toJSArray a) . toJSArray
foreign import javascript unsafe "$1.concat($2)"
js_concat :: Array a -> Array a -> Array a
-- | Concatenate array of arrays into single array
foreign import javascript unsafe "[].concat.apply([], $1)"
join :: Array (Array a) -> Array a
foreign import javascript unsafe "[]"
emptyArray :: Array a
|
mb21/qua-kit
|
libs/hs/ghcjs-hs-interop/src/JsHs/Array.hs
|
mit
| 19,063 | 93 | 16 | 5,122 | 5,753 | 2,906 | 2,847 | 401 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Gratte.TextExtractor
( extractText
) where
import Control.Monad
import Control.Monad.Gratte
import Control.Monad.Trans
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import System.Exit
import System.GratteExternalCommands
import System.IO.Temp
import qualified Filesystem as FS
import qualified Filesystem.Path.CurrentOS as FS
import Gratte.Options
import Gratte.Utils
type TextExtractor = FilePath -> FilePath -> Gratte (Maybe T.Text)
extractText :: FS.FilePath -> Gratte (Maybe T.Text)
extractText file = do
hasOcr <- getAddOption ocr
pdfM <- getAddOption pdfMode
let ext = FS.extension file
opts <- getOptions
let filePath = FS.encodeString file
liftIO $ withSystemTempDirectory "ocr-text" $ \ tempDir ->
withGratte opts $
case (ext, pdfM, hasOcr) of
(Just "pdf", PDFModeImage, True ) -> extractPDFImage tempDir filePath
(Just "pdf", PDFModeImage, False ) -> return Nothing
(Just "pdf", PDFModeText , _ ) -> extractPDFText tempDir filePath
(_ , _ , True ) -> extractImage tempDir filePath
(_ , _ , False ) -> return Nothing
extractImage :: TextExtractor
extractImage tempDir file = do
opts <- getOptions
let tmpFileNoExt = tempDir ++ "/tmp"
liftIO $ do
(exitCode, err) <- execTesseract file tmpFileNoExt
case exitCode of
ExitSuccess -> do
-- caveat: tesseract adds a .txt extension to the output, even if it already exists!
rawText <- TIO.readFile (tmpFileNoExt ++ ".txt")
return . Just $ T.map removeStrangeChars rawText
ExitFailure _ ->
withGratte opts $ do
logError $ "Could not OCR the file: " ++ file
logError err
return Nothing
extractPDFImage :: TextExtractor
extractPDFImage tempDir file = do
opts <- getOptions
liftIO $ do
(exitCode, err) <- execConvert file tempDir
withGratte opts $
case exitCode of
ExitSuccess -> extractSingleImage (FS.decodeString tempDir)
ExitFailure _ -> do
logError $ "Could not convert the PDF file to image: " ++ file
logError err
return Nothing
extractSingleImage :: FS.FilePath -> Gratte (Maybe T.Text)
extractSingleImage tempDir = do
let singleImage = tempDir <//> "single-image.png"
opts <- getOptions
liftIO $ do
imagesBaseNames <- filter (`FS.hasExtension` "png") `liftM` FS.listDirectory tempDir
let imagePaths = map FS.encodeString imagesBaseNames
singleImagePath = FS.encodeString singleImage
(exitCode, err) <- execConvertAppend imagePaths singleImagePath
withGratte opts $
case exitCode of
ExitSuccess -> extractImage (FS.encodeString tempDir) singleImagePath
ExitFailure _ -> do
logError $ "Could OCR the image: " ++ singleImagePath
logError err
return Nothing
extractPDFText :: TextExtractor
extractPDFText tempDir file = do
opts <- getOptions
liftIO $ withTempFile tempDir "temp-pdf-text.txt" $ \tempFile h -> do
(exitCode, err) <- execPDFToText file tempFile
case exitCode of
ExitSuccess -> do
rawText <- TIO.hGetContents h
return . Just $ T.map removeStrangeChars rawText
ExitFailure _ ->
withGratte opts $ do
logError $ "Could not convert the pdf file: " ++ file
logError err
return Nothing
removeStrangeChars :: Char -> Char
removeStrangeChars c =
if c `elem` alpha then c else ' '
where alpha = ['a'..'z'] ++ ['A'..'Z'] ++
['0'..'9'] ++ "ÉÈÊÀÂÎÔéèêàâîô."
|
ostapneko/gratte-papier
|
src/Gratte/TextExtractor.hs
|
mit
| 3,759 | 0 | 18 | 1,020 | 1,006 | 501 | 505 | 93 | 5 |
{-# LANGUAGE RecordWildCards #-}
module SyntheticWeb.Plan.Writer (writePlan) where
import Text.Printf (PrintfArg (), printf)
import SyntheticWeb.Plan.Types
import SyntheticWeb.Statistical (Statistical (..))
writePlan :: Plan -> String
writePlan (Plan plan) = go plan
where go = unlines . concatMap writePattern
writePattern :: (Weight, Pattern) -> [String]
writePattern (Weight w, Pattern {..}) =
printf "pattern %s with weight %d [" name w :
replaceLastComma (map writeActivity activities)
replaceLastComma :: [String] -> [String]
replaceLastComma [] = ["]"]
replaceLastComma xs =
let x:xs' = reverse xs
x' = uncommify x
in reverse (x':xs')
where
uncommify = go . reverse
go "" = "]"
go (_:ys) = reverse (']':ys)
writeActivity :: Activity -> String
writeActivity (SLEEP duration) =
printf " SLEEP %s," (writeDuration duration)
writeActivity (GET headers download rate) =
printf " GET headers %s download %s rate %s," (show headers)
(writeDownload download)
(writeRate rate)
writeActivity (PUT headers upload) =
printf " PUT headers %s upload %s," (show headers)
(writeUpload upload)
writeActivity (POST headers upload download rate) =
printf " POST headers %s upload %s download %s rate %s,"
(show headers)
(writeUpload upload)
(writeDownload download)
(writeRate rate)
writeDuration :: Duration -> String
writeDuration (Usec stat) = printf "%s usec" (writeStatistical stat)
writeDuration (Msec stat) = printf "%s msec" (writeStatistical stat)
writeDuration (Sec stat) = printf "%s sec" (writeStatistical stat)
writeSize :: Size -> String
writeSize (Size stat) = printf "%s bytes" (writeStatistical stat)
writeStatistical :: PrintfArg a => Statistical a -> String
writeStatistical (Exactly bytes) = printf "exactly %d" bytes
writeStatistical (Uniform range) = printf "uniform %d-%d" `uncurry` range
writeStatistical (Gaussian range) = printf "gaussian %d,%d" `uncurry` range
writeDownload :: Download -> String
writeDownload (Download size) = writeSize size
writeUpload :: Upload -> String
writeUpload (Upload size) = writeSize size
writeRate :: Rate -> String
writeRate Unlimited = "unlimited"
writeRate (LimitedTo size) = printf "limitedTo %s" (writeSize size)
|
kosmoskatten/synthetic-web
|
src/SyntheticWeb/Plan/Writer.hs
|
mit
| 2,406 | 0 | 9 | 543 | 734 | 375 | 359 | 54 | 2 |
-- Help the fruit guy
-- https://www.codewars.com/kata/557af4c6169ac832300000ba
module Codewars.Kata.Fruits where
import Data.Maybe(fromMaybe)
import Data.Char(toLower)
import Data.List (stripPrefix)
removeRotten :: [String] -> [String]
removeRotten = map ((\ f -> fromMaybe f . stripPrefix "rotten" $ f) . map toLower)
|
gafiatulin/codewars
|
src/Beta/Fruits.hs
|
mit
| 323 | 0 | 12 | 41 | 94 | 54 | 40 | 6 | 1 |
module Sudoku where
import Data.List.Split
import Data.Tuple
import Data.List
import Control.Applicative
import Control.Arrow
import Control.Lens
type Pos = (Int, Int)
type Point a = (Pos, a)
type SPoint = (Pos, Int)
type Points a = [Point a] -- Numbers to be implemented into board.
-- Board should be a matrix, but not obligatory a square one.
type Line a = [a]
type Board a = [Line a]
type Slots a = Board (Line a) -- slots of unified h/v/s
type Slots3 a = Board (Board a) -- slots of h/v/s
-- L = Lattice
type LSlots a = Board (Point (Line a))
type SLine = Line Int
type SBoard = Board Int
type SSlots = Slots Int
type SSlots3 = Slots3 Int
type SLSlots = LSlots Int
-- Constructors
board :: [Int] -> SBoard
board = boardFrom 0
boardFrom :: a -> [a] -> Board a
boardFrom filling = chunksOf 9 . take 81 . (++ repeat filling)
boardS :: String -> SBoard
boardS = board . map read . words
boardOf :: a -> Board a
boardOf = flip mapB (board []) . const
poses :: Board Pos
poses = map (flip map [0..8] . (,)) [0..8]
-- Generic Transformers
mapB :: (a -> b) -> Board a -> Board b
mapB = map . map
showB :: Show a => Board a -> String
showB = spacify "\n" . map (spacify " ") . mapB show
where spacify s = foldr1 (++) . intersperse s
zipB :: [[a]] -> [[b]] -> [[(a, b)]]
zipB = zipWith zip
-- Line getters
horizontal :: Board a -> Pos -> Line a
horizontal b (h, _) = b !! h
vertical :: Board a -> Pos -> Line a
vertical = (. swap) . horizontal . transpose
square :: Board a -> Pos -> Line a
square b (x, y) = map ((!!) . (b!!)) (sqLine $ div x 3) <*> (sqLine $ div y 3)
where sqLine segment = map (+segment*3) [0..2]
-- Homomorphisms
horizontals :: Board a -> Board a
horizontals = id
verticals :: Board a -> Board a
verticals = transpose
squares :: Board a -> Board a
squares b = withBoard b square toPos
where
toPos = ((*3) *** (*3)) . swap . flip divMod 3
withBoard b f argf = map (f b . argf) [0..8]
-- Point related stuff.
cells :: Board a -> Board (Point a)
cells = zipB poses
implement :: Board a -> Point a -> Board a
implement b ((x, y), v) = b & ix x . ix y .~ v
on :: Board a -> Pos -> a
on b (x, y) = b !! x !! y
-- Slots and data for solving.
unify :: SSlots3 -> SSlots -- Board FieldsTakenIn3 -> Board FieldsNotTaken
unify = mapB $ ([1..9] \\) . concat
takenless :: SBoard -> SSlots -> SSlots
takenless b = mapB decide . cells
where
decide (pos, slots) =
if b `on` pos == 0
then slots
else []
streamMap :: Board a -> Slots3 a
streamMap b = mapB choices poses
where
choices pos = map (($ (b, pos)) . uncurry) [horizontal, vertical, square]
slots :: SBoard -> SSlots
slots = flip takenless =<< unify . streamMap
streams :: Board a -> Line (Board a)
streams b = map ($ b) [horizontals, verticals, squares]
|
siers/sudoku-solver
|
Sudoku.hs
|
mit
| 2,853 | 0 | 11 | 701 | 1,226 | 664 | 562 | -1 | -1 |
module Name (BuiltinName (..), LocalName (..), Name (..), qualifiedName, unqualifiedName, NameWith (..), Path (..), ResolvedName, Error (..), resolveNames, ValidationError (..), validateNames) where
import MyPrelude
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Pretty as P
import qualified AST
import AST (AST)
------------------------------------------------------------------------ types
data BuiltinName
= Builtin_Int
| Builtin_Bool
| Builtin_Text
| Builtin_Unit
| Builtin_true
| Builtin_false
| Builtin_ask
| Builtin_say
| Builtin_write
deriving (Generic, Eq, Ord, Enum, Bounded, Show)
instance Enumerable BuiltinName
-- subscopes within each scope are numbered positionally, starting with 0
data Path = Path {
function :: Text,
scope :: [Int]
} deriving (Generic, Eq, Ord, Show)
data LocalName = LocalName {
path :: Path,
givenName :: Text
} deriving (Generic, Eq, Ord, Show)
data Name
= BuiltinName BuiltinName
| FunctionName Text
| Name LocalName
deriving (Generic, Eq, Ord, Show)
data NameWith info = NameWith {
name :: Name,
info :: info
} deriving (Generic, Show, Functor)
instance Eq (NameWith info) where
(==) = (==) `on` name
instance Ord (NameWith info) where
compare = compare `on` name
type ResolvedName = NameWith AST.BindingType
qualifiedName :: Name -> Text
qualifiedName = \case
BuiltinName name -> unqualifiedName (BuiltinName name)
FunctionName name -> "." ++ name
Name localName -> "." ++ function (path localName) ++ "." ++ Text.intercalate "." (map showText (scope (path localName))) ++ "." ++ givenName localName
unqualifiedName :: Name -> Text
unqualifiedName = \case
BuiltinName builtin -> Text.drop (Text.length "Builtin_" ) (showText builtin)
FunctionName name -> name
Name localName -> givenName localName
------------------------------------------------------------------------ pretty-printing
instance AST.RenderName Name where
renderName defOrUse = let makeName isBuiltin nameType name = P.note (P.Identifier (P.IdentInfo name defOrUse nameType isBuiltin)) (P.pretty name) in \case
Name (LocalName Path { function, scope } given) -> renderedPath ++ renderedGiven
where pathText = function ++ "." ++ foldr (\a b -> showText a ++ "." ++ b) "" scope
renderedPath = makeName False P.Block pathText
renderedGiven = makeName False P.Unit given
FunctionName name -> makeName False P.Function name
BuiltinName builtin -> makeName True P.Unknown (unqualifiedName (BuiltinName builtin))
instance AST.RenderName ResolvedName where
renderName defOrUse (NameWith name _) = AST.renderName defOrUse name
------------------------------------------------------------------------ resolution frontend
class (forall metadata. Monad (m metadata)) => NameResolveM m where
lookupName :: Text -> m metadata ResolvedName
enterScope :: m metadata a -> m metadata a
bindName :: AST.BindingType -> Text -> m metadata ResolvedName
enterMetadata :: metadata -> m metadata a -> m metadata a
enterMetadataOf :: NameResolveM m => NodeWith node metadata name -> m metadata a -> m metadata a
enterMetadataOf = enterMetadata . nodeMetadata
class ResolveNamesIn node where
resolveNamesIn :: NameResolveM m => node metadata Text -> m metadata (node metadata ResolvedName)
instance ResolveNamesIn AST.Type where
resolveNamesIn = \case
AST.NamedType name -> do
resolvedName <- lookupName name
return (AST.NamedType resolvedName)
AST.FunctionType parameters returns -> do
resolvedParameters <- mapM resolveNamesIn parameters
resolvedReturns <- resolveNamesIn returns
return (AST.FunctionType resolvedParameters resolvedReturns)
instance ResolveNamesIn AST.Function where
resolveNamesIn AST.Function { AST.functionName, AST.arguments, AST.returns, AST.body } = do
-- the argument types and return type are in global scope, must be resolved before entering any scope
argumentTypes <- forM arguments \argument -> do
enterMetadataOf argument do
(resolveNamesIn . AST.argumentType . nodeWithout) argument
resolvedReturns <- mapM resolveNamesIn returns
resolvedFunction <- bindName AST.Let functionName
-- the argument names are in scope for the body, and may also be shadowed by it
(resolvedArguments, resolvedBody) <- enterScope do
resolvedArguments <- forM (zip argumentTypes arguments) \(resolvedType, argument) -> do
enterMetadataOf argument do
forNodeM argument \AST.Argument { AST.argumentName } -> do
resolvedName <- bindName AST.Let argumentName
return (AST.Argument resolvedName resolvedType)
resolvedBody <- resolveNamesIn body
return (resolvedArguments, resolvedBody)
return AST.Function {
AST.functionName = resolvedFunction,
AST.arguments = resolvedArguments,
AST.returns = resolvedReturns,
AST.body = resolvedBody
}
instance ResolveNamesIn AST.Block where
resolveNamesIn AST.Block { AST.exitTarget, AST.statements } = enterScope do
resolvedTarget <- mapM (bindName AST.Let) exitTarget
resolvedStatements <- mapM resolveNamesIn statements
return (AST.Block resolvedTarget resolvedStatements)
instance ResolveNamesIn AST.Statement where
resolveNamesIn = \case
AST.Binding btype name expr -> do
-- resolve the expression BEFORE binding the name:
-- the name should not be in scope for the expression!
resolvedExpr <- resolveNamesIn expr
fullName <- bindName btype name
return (AST.Binding btype fullName resolvedExpr)
AST.Assign var expr -> do
resolvedVar <- lookupName var
resolvedExpr <- resolveNamesIn expr
return (AST.Assign resolvedVar resolvedExpr)
AST.IfThen expr body -> do
resolvedExpr <- resolveNamesIn expr
resolvedBody <- resolveNamesIn body
return (AST.IfThen resolvedExpr resolvedBody)
AST.IfThenElse expr body1 body2 -> do
resolvedExpr <- resolveNamesIn expr
resolvedBody1 <- resolveNamesIn body1
resolvedBody2 <- resolveNamesIn body2
return (AST.IfThenElse resolvedExpr resolvedBody1 resolvedBody2)
AST.Forever body -> do
resolvedBody <- resolveNamesIn body
return (AST.Forever resolvedBody)
AST.While expr body -> do
resolvedExpr <- resolveNamesIn expr
resolvedBody <- resolveNamesIn body
return (AST.While resolvedExpr resolvedBody)
AST.Return target maybeExpr -> do
resolvedTarget <- lookupName target
resolvedExpr <- mapM resolveNamesIn maybeExpr
return (AST.Return resolvedTarget resolvedExpr)
AST.Break target -> do
resolvedTarget <- lookupName target
return (AST.Break resolvedTarget)
AST.Expression expr -> do
resolvedExpr <- resolveNamesIn expr
return (AST.Expression resolvedExpr)
-- We used to be able to do this as just `mapM lookupName`, but that doesn't record metadata...
-- Wonder if we could do anything to make it work "automatically" again...
instance ResolveNamesIn AST.Expression where
resolveNamesIn = \case
AST.Named n -> do
resolvedName <- lookupName n
return (AST.Named resolvedName)
AST.UnaryOperator op expr -> do
resolvedExpr <- resolveNamesIn expr
return (AST.UnaryOperator op resolvedExpr)
AST.BinaryOperator expr1 op expr2 -> do
resolvedExpr1 <- resolveNamesIn expr1
resolvedExpr2 <- resolveNamesIn expr2
return (AST.BinaryOperator resolvedExpr1 op resolvedExpr2)
AST.Call fn args -> do
resolvedFn <- resolveNamesIn fn
resolvedArgs <- mapM resolveNamesIn args
return (AST.Call resolvedFn resolvedArgs)
AST.NumberLiteral number -> do
return (AST.NumberLiteral number)
AST.TextLiteral text -> do
return (AST.TextLiteral text)
instance ResolveNamesIn node => ResolveNamesIn (NodeWith node) where
resolveNamesIn node = do
enterMetadataOf node do
mapNodeM resolveNamesIn node
------------------------------------------------------------------------ resolution backend
data Error
= NameNotFound Text Path
| NameConflict Text Path
deriving (Generic, Show)
newtype NameResolve metadata a = NameResolve {
runNameResolve :: ExceptT Error (State (Context metadata)) a
} deriving (Functor, Applicative, Monad, MonadState (Context metadata), MonadError Error)
resolveNames :: AST metadata Text -> Either (With metadata Error) (AST metadata ResolvedName)
resolveNames = plumbMetadata . runState (Context [] [] []) . runExceptT . runNameResolve . mapM resolveNamesIn where
plumbMetadata = \case
(Right result, _ ) -> Right result
(Left error, context) -> Left (With (assert (head (metadata context))) error)
-- the stack of scopes we are currently inside
-- fst: how many sub-scopes within that scope we have visited so far
-- snd: the names bound within that scope
type LocalContext = [(Int, Map Text AST.BindingType)] -- TODO maybe use Natural and NonEmpty here
data Context metadata = Context {
functions :: [Text],
locals :: LocalContext,
metadata :: [metadata]
} deriving (Generic, Show)
currentFunction :: Context metadata -> Text
currentFunction = assert . head . functions
lookupLocal :: Text -> LocalContext -> Maybe ([Int], AST.BindingType)
lookupLocal name = \case
[] -> Nothing
((_, names) : parent) -> case Map.lookup name names of
Just bindingType -> Just (map fst parent, bindingType)
Nothing -> lookupLocal name parent
lookupInContext :: Text -> Context metadata -> Maybe ResolvedName
lookupInContext givenName context@Context { functions, locals } = oneOf [tryLocal, tryFunction, tryBuiltin] where
tryLocal = fmap makeLocalName (lookupLocal givenName locals) where
makeLocalName (scope, info) = NameWith { name = Name LocalName { path = Path { function = currentFunction context, scope }, givenName }, info }
tryFunction = justIf (elem givenName functions) NameWith { name = FunctionName givenName, info = AST.Let }
tryBuiltin = fmap makeBuiltinName (lookup ("Builtin_" ++ givenName) builtinNames) where
builtinNames = map (\builtinName -> (showText builtinName, builtinName)) (enumerate @BuiltinName)
makeBuiltinName builtinName = NameWith { name = BuiltinName builtinName, info = AST.Let }
instance NameResolveM NameResolve where
lookupName name = do
context <- getState
case lookupInContext name context of
Just found -> return found
Nothing -> throwError (NameNotFound name (Path (currentFunction context) (map fst (locals context))))
enterScope action = do
modifyM (field @"locals") (prepend (0, Map.empty))
result <- action
newLocals <- modifyM (field @"locals") (assert . tail)
case newLocals of
(scopeID, names) : rest -> do
assertM (scopeID >= 0)
setM (field @"locals") ((scopeID + 1, names) : rest)
return result
[] -> do
return result
bindName info name = do
context <- getState
case locals context of
[] -> do
doModifyM (field @"functions") \functions -> do
when (elem name functions) do
throwError (NameConflict name (Path name [])) -- TODO should be a nil path instead...?
return (prepend name functions)
return NameWith { name = FunctionName name, info }
(scopeID, names) : rest -> do
when (Map.member name names) do
throwError (NameConflict name (Path (currentFunction context) (map fst (locals context))))
setM (field @"locals") ((scopeID, Map.insert name info names) : rest)
return NameWith { name = Name LocalName { path = Path { function = currentFunction context, scope = map fst rest }, givenName = name }, info }
enterMetadata metadata action = do
modifyM (field @"metadata") (prepend metadata)
result <- action
modifyM (field @"metadata") (assert . tail)
return result
------------------------------------------------------------------------ validation
data ValidationError info
= NotInScope Name
| Redefined Name
| InfoMismatch (NameWith info) (NameWith info)
deriving (Generic, Show)
-- This checks that:
-- * Each name is in scope where it is used.
-- * A name is not defined more than once by the same scope.
-- * The info stored alongside the name is the same at each of its occurrences.
-- This does NOT check that:
-- * The `path` component of the name is correct. This is regarded as an implementation detail, subject to change.
-- * The binding types are stored correctly. This is an unfortunate limitation of being polymorphic over the `info` type.
validateNames :: Eq info => AST metadata (NameWith info) -> Either (ValidationError info) ()
validateNames = runExcept . evalStateT [Map.empty, builtinNames] . mapM_ validate where
builtinNames = Map.fromList (zip (map BuiltinName (enumerate @BuiltinName)) (repeat Nothing))
type ValidateM info = StateT [Map Name (Maybe info)] (Except (ValidationError info))
class Validate node where
validate :: Eq info => node metadata (NameWith info) -> ValidateM info ()
instance Validate AST.Type where
validate = \case
AST.NamedType name -> do
validateName name
AST.FunctionType parameters returns -> do
mapM_ validate parameters
validate returns
instance Validate AST.Function where
validate function = do
mapM_ validate (map (AST.argumentType . nodeWithout) (AST.arguments function))
mapM_ validate (AST.returns function)
recordName (AST.functionName function)
modifyState (prepend Map.empty)
mapM_ recordName (map (AST.argumentName . nodeWithout) (AST.arguments function))
validate (AST.body function)
modifyState (assert . tail)
return ()
instance Validate AST.Block where
validate block = do
modifyState (prepend Map.empty)
mapM_ recordName (AST.exitTarget block)
mapM_ validate (AST.statements block)
modifyState (assert . tail)
return ()
instance Validate AST.Statement where
validate = \case
AST.Binding _ name expr -> do
validate expr
recordName name
AST.Assign n expr -> do
validate expr
validateName n
AST.IfThen expr body -> do
validate expr
validate body
AST.IfThenElse expr body1 body2 -> do
validate expr
mapM_ validate [body1, body2]
AST.Forever body -> do
validate body
AST.While expr body -> do
validate expr
validate body
AST.Return target maybeExpr -> do
validateName target
mapM_ validate maybeExpr
AST.Break target -> do
validateName target
AST.Expression expr -> do
validate expr
instance Validate AST.Expression where
validate = \case
AST.Named n -> do
validateName n
AST.UnaryOperator _ expr -> do
validate expr
AST.BinaryOperator expr1 _ expr2 -> do
mapM_ validate [expr1, expr2]
AST.NumberLiteral _ -> do
return ()
AST.TextLiteral _ -> do
return ()
AST.Call fn args -> do
validate fn
mapM_ validate args
instance Validate node => Validate (NodeWith node) where
validate = validate . nodeWithout
validateName :: Eq info => NameWith info -> ValidateM info ()
validateName (NameWith name info1) = do
context <- getState
case Map.lookup name (Map.unions context) of
Nothing -> do
throwError (NotInScope name)
Just Nothing -> do
return () -- builtin names have no stored info (TODO?)
Just (Just info2) -> do
when (info1 != info2) do
throwError (InfoMismatch (NameWith name info1) (NameWith name info2))
recordName :: NameWith info -> ValidateM info ()
recordName (NameWith name info) = do
doModifyState \context -> do
let scope = assert (head context)
when (Map.member name scope) do
throwError (Redefined name)
return (prepend (Map.insert name (Just info) scope) (assert (tail context)))
return ()
|
glaebhoerl/stageless
|
src/Name.hs
|
mit
| 17,249 | 0 | 27 | 4,652 | 4,841 | 2,370 | 2,471 | -1 | -1 |
import Data.List
import Control.Monad
replace :: Int -> a -> [a] -> [a]
replace index element list = (take index list) ++ [element] ++ (drop (index+1) list)
type Matrix a = [[a]]
replace' :: Int -> Int -> a -> Matrix a -> Matrix a
replace' x y element matrix = replace x (replace y element (matrix !! x)) matrix
diagonal :: Matrix a -> [a]
diagonal m = zipWith (!!) m [0..]
diagonals :: Matrix a -> [[a]]
diagonals matrix =
let tails' = tail . tails
diagonalsNW m = map diagonal ([m] ++ tails' m ++ tails' (transpose m))
in diagonalsNW matrix ++ diagonalsNW (map reverse matrix)
data Mark = O | X deriving (Eq, Show)
type Cell = Maybe Mark
type Pattern = [Cell]
type Board = Matrix Cell
generateBoard :: Int -> Board
generateBoard size = [ [Nothing | x <- [1..size]] | y <- [1..size]]
patterns :: Board -> [Pattern]
patterns board = board ++ (transpose board) ++ (diagonals board)
winner :: Board -> Maybe Mark
winner board
| any (isInfixOf [Just O, Just O, Just O, Just O, Just O]) (patterns board) = Just O
| any (isInfixOf [Just X, Just X, Just X, Just X, Just X]) (patterns board) = Just X
| otherwise = Nothing
aiMove :: Board -> Board
aiMove board = maximumBy (\a b -> compare (evaluate a) (evaluate b)) $ aiMoves board
aiMoves :: Board -> [Board]
aiMoves board =
let lengths = [0 .. (length board - 1)]
available x y = board !! x !! y == Nothing
in [ replace' x y (Just X) board | x <- lengths, y <- lengths, available x y ]
evaluate :: Board -> Int
evaluate board =
let knowledge = [
-- Not the brightest AI, has only few rules; add more if you wish!
( [Just O , Just O , Just O , Just O , Nothing] , -1000 ),
( [Just O , Just O , Just O , Nothing , Just O] , -1000 ),
( [Just O , Just O , Nothing , Just O , Just O] , -1000 ),
( [Just O , Just O , Just O , Nothing] , -100 ),
( [Just O , Just O , Nothing , Just O] , -100 ),
( [Just X , Just X , Just X , Just X , Just X] , 1000000 ),
( [Just X , Just X , Just X , Just X , Nothing] , 100 ),
( [Just X , Just X , Just X , Nothing , Nothing] , 4 ),
( [Just X , Just X , Nothing , Nothing , Nothing] , 2 ),
( [Just X , Nothing , Nothing , Nothing , Nothing] , 1 )
]
ratePattern' p = map (\(k,s) -> if (isInfixOf k p) then s else 0) knowledge
ratePattern p = (ratePattern' p) ++ (ratePattern' $ reverse p)
in sum $ concat $ map ratePattern (patterns board)
main = play (generateBoard 11)
display :: Board -> IO ()
display board = do
let display' c = if c==Nothing then " _ " else if c==(Just O) then " o " else " x "
spacedIndex = [(show x) ++ " " | x <- [0..9]] ++ (map show [10..(length board)])
mapM_ putStr ([" "] ++ (intersperse " " (tail spacedIndex)) ++ ["\n"])
mapM_ ( \ (index, row) -> do
putStr $ (spacedIndex !! index) ++ (concat (map display' row)) ++ "\n"
) (zipWith (\ a b -> (a,b)) [1..] board)
play :: Board -> IO ()
play board
| winner board == (Just O) = display board >> putStrLn "You have won."
| winner board == (Just X) = display board >> putStrLn "The computer has won."
| otherwise = do
display board >> putStrLn "Input coordinates in format: x,y."
input <- getLine
let (x,y) = break (==',') input
ix = (read x) -1
iy = (read $ tail y) -1
board' = replace' ix iy (Just O) board
play (if winner board' == Nothing then aiMove board' else board')
|
nbartlomiej/takefive
|
takefive.hs
|
mit
| 3,544 | 0 | 17 | 1,003 | 1,676 | 874 | 802 | 71 | 3 |
module RandomSupply where
import Supply
import System.Random hiding(next)
randomsIO :: Random a => IO [a]
randomsIO =
getStdRandom $ \g ->
let (a, b) = split g
in (randoms a, b)
|
dservgun/haskell_test_code
|
src/RandomSupply.hs
|
gpl-2.0
| 210 | 0 | 11 | 63 | 80 | 44 | 36 | 8 | 1 |
module TestSuite where
import Driver
import Control.Monad.Identity
import Data.Traversable
import Text.Show.Functions ()
main :: IO ()
main = Driver.main tests
type Prop_Functor f a = f a -> Bool
prop_functor :: (Eq (f a), Functor f) => f a -> Bool
prop_functor ws = fmap id ws == ws
prop_traversable :: (Eq (t b), Traversable t) => t a -> (a -> b) -> Bool
prop_traversable ws f = runIdentity (Data.Traversable.mapM (Identity . f) ws) == fmap f ws
tests :: [(String, Int -> IO (Bool, Int))]
tests =
[ -- Format like this in the future, but no tests are applicable here:
-- ("WindowSet prop_traversable" , mytest (prop_traversable :: WindowSet Int -> (Int -> Int) -> Bool))
]
|
codemac/yi-editor
|
tests/TestSuite.hs
|
gpl-2.0
| 699 | 0 | 10 | 143 | 232 | 127 | 105 | 15 | 1 |
{-| Implementation of the iallocator interface.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.HTools.Backend.IAlloc
( readRequest
, runIAllocator
, processRelocate
, loadData
) where
import Data.Either ()
import Data.Maybe (fromMaybe, isJust, fromJust)
import Data.List
import Control.Monad
import System.Time
import Text.JSON (JSObject, JSValue(JSArray),
makeObj, encodeStrict, decodeStrict, fromJSObject, showJSON)
import Ganeti.BasicTypes
import qualified Ganeti.HTools.Cluster as Cluster
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Nic as Nic
import qualified Ganeti.Constants as C
import Ganeti.HTools.CLI
import Ganeti.HTools.Loader
import Ganeti.HTools.Types
import Ganeti.JSON
import Ganeti.Utils
{-# ANN module "HLint: ignore Eta reduce" #-}
-- | Type alias for the result of an IAllocator call.
type IAllocResult = (String, JSValue, Node.List, Instance.List)
-- | Parse a NIC within an instance (in a creation request)
parseNic :: String -> JSRecord -> Result Nic.Nic
parseNic n a = do
mac <- maybeFromObj a "mac"
ip <- maybeFromObj a "ip"
mode <- maybeFromObj a "mode" >>= \m -> case m of
Just "bridged" -> Ok $ Just Nic.Bridged
Just "routed" -> Ok $ Just Nic.Routed
Just "openvswitch" -> Ok $ Just Nic.OpenVSwitch
Nothing -> Ok Nothing
_ -> Bad $ "invalid NIC mode in instance " ++ n
link <- maybeFromObj a "link"
bridge <- maybeFromObj a "bridge"
network <- maybeFromObj a "network"
return (Nic.create mac ip mode link bridge network)
-- | Parse the basic specifications of an instance.
--
-- Instances in the cluster instance list and the instance in an
-- 'Allocate' request share some common properties, which are read by
-- this function.
parseBaseInstance :: String
-> JSRecord
-> Result (String, Instance.Instance)
parseBaseInstance n a = do
let errorMessage = "invalid data for instance '" ++ n ++ "'"
let extract x = tryFromObj errorMessage a x
disk <- extract "disk_space_total"
jsdisks <- extract "disks" >>= toArray >>= asObjectList
dsizes <- mapM (flip (tryFromObj errorMessage) "size" . fromJSObject) jsdisks
dspindles <- mapM (annotateResult errorMessage .
flip maybeFromObj "spindles" . fromJSObject) jsdisks
let disks = zipWith Instance.Disk dsizes dspindles
mem <- extract "memory"
vcpus <- extract "vcpus"
tags <- extract "tags"
dt <- extract "disk_template"
su <- extract "spindle_use"
nics <- extract "nics" >>= toArray >>= asObjectList >>=
mapM (parseNic n . fromJSObject)
return
(n,
Instance.create n mem disk disks vcpus Running tags True 0 0 dt su nics)
-- | Parses an instance as found in the cluster instance list.
parseInstance :: NameAssoc -- ^ The node name-to-index association list
-> String -- ^ The name of the instance
-> JSRecord -- ^ The JSON object
-> Result (String, Instance.Instance)
parseInstance ktn n a = do
base <- parseBaseInstance n a
nodes <- fromObj a "nodes"
(pnode, snodes) <-
case nodes of
[] -> Bad $ "empty node list for instance " ++ n
x:xs -> readEitherString x >>= \x' -> return (x', xs)
pidx <- lookupNode ktn n pnode
sidx <- case snodes of
[] -> return Node.noSecondary
x:_ -> readEitherString x >>= lookupNode ktn n
return (n, Instance.setBoth (snd base) pidx sidx)
-- | Parses a node as found in the cluster node list.
parseNode :: NameAssoc -- ^ The group association
-> String -- ^ The node's name
-> JSRecord -- ^ The JSON object
-> Result (String, Node.Node)
parseNode ktg n a = do
let desc = "invalid data for node '" ++ n ++ "'"
extract x = tryFromObj desc a x
offline <- extract "offline"
drained <- extract "drained"
guuid <- extract "group"
vm_capable <- annotateResult desc $ maybeFromObj a "vm_capable"
let vm_capable' = fromMaybe True vm_capable
gidx <- lookupGroup ktg n guuid
ndparams <- extract "ndparams" >>= asJSObject
excl_stor <- tryFromObj desc (fromJSObject ndparams) "exclusive_storage"
let live = not offline && not drained && vm_capable'
lvextract def = eitherLive live def . extract
sptotal <- if excl_stor
then lvextract 0 "total_spindles"
else tryFromObj desc (fromJSObject ndparams) "spindle_count"
spfree <- lvextract 0 "free_spindles"
mtotal <- lvextract 0.0 "total_memory"
mnode <- lvextract 0 "reserved_memory"
mfree <- lvextract 0 "free_memory"
dtotal <- lvextract 0.0 "total_disk"
dfree <- lvextract 0 "free_disk"
ctotal <- lvextract 0.0 "total_cpus"
cnos <- lvextract 0 "reserved_cpus"
let node = Node.create n mtotal mnode mfree dtotal dfree ctotal cnos
(not live) sptotal spfree gidx excl_stor
return (n, node)
-- | Parses a group as found in the cluster group list.
parseGroup :: String -- ^ The group UUID
-> JSRecord -- ^ The JSON object
-> Result (String, Group.Group)
parseGroup u a = do
let extract x = tryFromObj ("invalid data for group '" ++ u ++ "'") a x
name <- extract "name"
apol <- extract "alloc_policy"
nets <- extract "networks"
ipol <- extract "ipolicy"
tags <- extract "tags"
return (u, Group.create name u apol nets ipol tags)
-- | Top-level parser.
--
-- The result is a tuple of eventual warning messages and the parsed
-- request; if parsing the input data fails, we'll return a 'Bad'
-- value.
parseData :: ClockTime -- ^ The current time
-> String -- ^ The JSON message as received from Ganeti
-> Result ([String], Request) -- ^ Result tuple
parseData now body = do
decoded <- fromJResult "Parsing input IAllocator message" (decodeStrict body)
let obj = fromJSObject decoded
extrObj x = tryFromObj "invalid iallocator message" obj x
-- request parser
request <- liftM fromJSObject (extrObj "request")
let extrFromReq r x = tryFromObj "invalid request dict" r x
let extrReq x = extrFromReq request x
-- existing group parsing
glist <- liftM fromJSObject (extrObj "nodegroups")
gobj <- mapM (\(x, y) -> asJSObject y >>= parseGroup x . fromJSObject) glist
let (ktg, gl) = assignIndices gobj
-- existing node parsing
nlist <- liftM fromJSObject (extrObj "nodes")
nobj <- mapM (\(x,y) ->
asJSObject y >>= parseNode ktg x . fromJSObject) nlist
let (ktn, nl) = assignIndices nobj
-- existing instance parsing
ilist <- extrObj "instances"
let idata = fromJSObject ilist
iobj <- mapM (\(x,y) ->
asJSObject y >>= parseInstance ktn x . fromJSObject) idata
let (kti, il) = assignIndices iobj
-- cluster tags
ctags <- extrObj "cluster_tags"
cdata1 <- mergeData [] [] [] [] now (ClusterData gl nl il ctags defIPolicy)
let (msgs, fix_nl) = checkData (cdNodes cdata1) (cdInstances cdata1)
cdata = cdata1 { cdNodes = fix_nl }
map_n = cdNodes cdata
map_i = cdInstances cdata
map_g = cdGroups cdata
optype <- extrReq "type"
rqtype <-
case () of
_ | optype == C.iallocatorModeAlloc ->
do
rname <- extrReq "name"
req_nodes <- extrReq "required_nodes"
inew <- parseBaseInstance rname request
let io = snd inew
return $ Allocate io req_nodes
| optype == C.iallocatorModeReloc ->
do
rname <- extrReq "name"
ridx <- lookupInstance kti rname
req_nodes <- extrReq "required_nodes"
ex_nodes <- extrReq "relocate_from"
ex_idex <- mapM (Container.findByName map_n) ex_nodes
return $ Relocate ridx req_nodes (map Node.idx ex_idex)
| optype == C.iallocatorModeChgGroup ->
do
rl_names <- extrReq "instances"
rl_insts <- mapM (liftM Instance.idx .
Container.findByName map_i) rl_names
gr_uuids <- extrReq "target_groups"
gr_idxes <- mapM (liftM Group.idx .
Container.findByName map_g) gr_uuids
return $ ChangeGroup rl_insts gr_idxes
| optype == C.iallocatorModeNodeEvac ->
do
rl_names <- extrReq "instances"
rl_insts <- mapM (Container.findByName map_i) rl_names
let rl_idx = map Instance.idx rl_insts
rl_mode <- extrReq "evac_mode"
return $ NodeEvacuate rl_idx rl_mode
| optype == C.iallocatorModeMultiAlloc ->
do
arry <- extrReq "instances" :: Result [JSObject JSValue]
let inst_reqs = map fromJSObject arry
prqs <- mapM (\r ->
do
rname <- extrFromReq r "name"
req_nodes <- extrFromReq r "required_nodes"
inew <- parseBaseInstance rname r
let io = snd inew
return (io, req_nodes)) inst_reqs
return $ MultiAllocate prqs
| otherwise -> fail ("Invalid request type '" ++ optype ++ "'")
return (msgs, Request rqtype cdata)
-- | Formats the result into a valid IAllocator response message.
formatResponse :: Bool -- ^ Whether the request was successful
-> String -- ^ Information text
-> JSValue -- ^ The JSON encoded result
-> String -- ^ The full JSON-formatted message
formatResponse success info result =
let e_success = ("success", showJSON success)
e_info = ("info", showJSON info)
e_result = ("result", result)
in encodeStrict $ makeObj [e_success, e_info, e_result]
-- | Flatten the log of a solution into a string.
describeSolution :: Cluster.AllocSolution -> String
describeSolution = intercalate ", " . Cluster.asLog
-- | Convert allocation/relocation results into the result format.
formatAllocate :: Instance.List -> Cluster.AllocSolution -> Result IAllocResult
formatAllocate il as = do
let info = describeSolution as
case Cluster.asSolution as of
Nothing -> fail info
Just (nl, inst, nodes, _) ->
do
let il' = Container.add (Instance.idx inst) inst il
return (info, showJSON $ map Node.name nodes, nl, il')
-- | Convert multi allocation results into the result format.
formatMultiAlloc :: (Node.List, Instance.List, Cluster.AllocSolutionList)
-> Result IAllocResult
formatMultiAlloc (fin_nl, fin_il, ars) =
let rars = reverse ars
(allocated, failed) = partition (isJust . Cluster.asSolution . snd) rars
aars = map (\(_, ar) ->
let (_, inst, nodes, _) = fromJust $ Cluster.asSolution ar
iname = Instance.name inst
nnames = map Node.name nodes
in (iname, nnames)) allocated
fars = map (\(inst, ar) ->
let iname = Instance.name inst
in (iname, describeSolution ar)) failed
info = show (length failed) ++ " instances failed to allocate and " ++
show (length allocated) ++ " were allocated successfully"
in return (info, showJSON (aars, fars), fin_nl, fin_il)
-- | Convert a node-evacuation/change group result.
formatNodeEvac :: Group.List
-> Node.List
-> Instance.List
-> (Node.List, Instance.List, Cluster.EvacSolution)
-> Result IAllocResult
formatNodeEvac gl nl il (fin_nl, fin_il, es) =
let iname = Instance.name . flip Container.find il
nname = Node.name . flip Container.find nl
gname = Group.name . flip Container.find gl
fes = map (\(idx, msg) -> (iname idx, msg)) $ Cluster.esFailed es
mes = map (\(idx, gdx, ndxs) -> (iname idx, gname gdx, map nname ndxs))
$ Cluster.esMoved es
failed = length fes
moved = length mes
info = show failed ++ " instances failed to move and " ++ show moved ++
" were moved successfully"
in Ok (info, showJSON (mes, fes, Cluster.esOpCodes es), fin_nl, fin_il)
-- | Runs relocate for a single instance.
--
-- This is wrapper over the 'Cluster.tryNodeEvac' function that is run
-- with a single instance (ours), and further it checks that the
-- result it got (in the nodes field) is actually consistent, as
-- tryNodeEvac is designed to output primarily an opcode list, not a
-- node list.
processRelocate :: Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Idx -- ^ The index of the instance to move
-> Int -- ^ The number of nodes required
-> [Ndx] -- ^ Nodes which should not be used
-> Result (Node.List, Instance.List, [Ndx]) -- ^ Solution list
processRelocate gl nl il idx 1 exndx = do
let orig = Container.find idx il
sorig = Instance.sNode orig
porig = Instance.pNode orig
mir_type = Instance.mirrorType orig
(exp_node, node_type, reloc_type) <-
case mir_type of
MirrorNone -> fail "Can't relocate non-mirrored instances"
MirrorInternal -> return (sorig, "secondary", ChangeSecondary)
MirrorExternal -> return (porig, "primary", ChangePrimary)
when (exndx /= [exp_node]) .
-- FIXME: we can't use the excluded nodes here; the logic is
-- already _but only partially_ implemented in tryNodeEvac...
fail $ "Unsupported request: excluded nodes not equal to\
\ instance's " ++ node_type ++ "(" ++ show exp_node
++ " versus " ++ show exndx ++ ")"
(nl', il', esol) <- Cluster.tryNodeEvac gl nl il reloc_type [idx]
nodes <- case lookup idx (Cluster.esFailed esol) of
Just msg -> fail msg
Nothing ->
case lookup idx (map (\(a, _, b) -> (a, b))
(Cluster.esMoved esol)) of
Nothing ->
fail "Internal error: lost instance idx during move"
Just n -> return n
let inst = Container.find idx il'
pnode = Instance.pNode inst
snode = Instance.sNode inst
nodes' <-
case mir_type of
MirrorNone -> fail "Internal error: mirror type none after relocation?!"
MirrorInternal ->
do
when (snode == sorig) $
fail "Internal error: instance didn't change secondary node?!"
when (snode == pnode) $
fail "Internal error: selected primary as new secondary?!"
if nodes == [pnode, snode]
then return [snode] -- only the new secondary is needed
else fail $ "Internal error: inconsistent node list (" ++
show nodes ++ ") versus instance nodes (" ++ show pnode ++
"," ++ show snode ++ ")"
MirrorExternal ->
do
when (pnode == porig) $
fail "Internal error: instance didn't change primary node?!"
if nodes == [pnode]
then return nodes
else fail $ "Internal error: inconsistent node list (" ++
show nodes ++ ") versus instance node (" ++ show pnode ++ ")"
return (nl', il', nodes')
processRelocate _ _ _ _ reqn _ =
fail $ "Exchange " ++ show reqn ++ " nodes mode is not implemented"
formatRelocate :: (Node.List, Instance.List, [Ndx])
-> Result IAllocResult
formatRelocate (nl, il, ndxs) =
let nodes = map (`Container.find` nl) ndxs
names = map Node.name nodes
in Ok ("success", showJSON names, nl, il)
-- | Process a request and return new node lists.
processRequest :: Request -> Result IAllocResult
processRequest request =
let Request rqtype (ClusterData gl nl il _ _) = request
in case rqtype of
Allocate xi reqn ->
Cluster.tryMGAlloc gl nl il xi reqn >>= formatAllocate il
Relocate idx reqn exnodes ->
processRelocate gl nl il idx reqn exnodes >>= formatRelocate
ChangeGroup gdxs idxs ->
Cluster.tryChangeGroup gl nl il idxs gdxs >>=
formatNodeEvac gl nl il
NodeEvacuate xi mode ->
Cluster.tryNodeEvac gl nl il mode xi >>=
formatNodeEvac gl nl il
MultiAllocate xies ->
Cluster.allocList gl nl il xies [] >>= formatMultiAlloc
-- | Reads the request from the data file(s).
readRequest :: FilePath -> IO Request
readRequest fp = do
now <- getClockTime
input_data <- case fp of
"-" -> getContents
_ -> readFile fp
case parseData now input_data of
Bad err -> exitErr err
Ok (fix_msgs, rq) -> maybeShowWarnings fix_msgs >> return rq
-- | Main iallocator pipeline.
runIAllocator :: Request -> (Maybe (Node.List, Instance.List), String)
runIAllocator request =
let (ok, info, result, cdata) =
case processRequest request of
Ok (msg, r, nl, il) -> (True, "Request successful: " ++ msg, r,
Just (nl, il))
Bad msg -> (False, "Request failed: " ++ msg, JSArray [], Nothing)
rstring = formatResponse ok info result
in (cdata, rstring)
-- | Load the data from an iallocation request file
loadData :: FilePath -- ^ The path to the file
-> IO (Result ClusterData)
loadData fp = do
Request _ cdata <- readRequest fp
return $ Ok cdata
|
narurien/ganeti-ceph
|
src/Ganeti/HTools/Backend/IAlloc.hs
|
gpl-2.0
| 18,477 | 0 | 23 | 5,289 | 4,745 | 2,362 | 2,383 | 354 | 9 |
{-
notation:
t in CL(S),
Nf(t) - normal form of t
DAG(t) - DAG of t with complete exact hash consing
(equivalently, reduced in the BDD sense: each node has an address,
for each addresses p, q: @(p,q) occurs at most once)
conjectures:
-}
import S.DAG
import S.Type
import S.Size
import S.Table (normalizing)
import Data.Ratio
import Control.Concurrent.STM
import Control.Monad ( when, forM_ )
import System.IO
main = do
top <- atomically $ newTVar 0
let ts = concat $ terms_for [s]
-- map monster2 [ 1 .. ]
-- map monster3 [ 1 .. ]
forM_ ts $ \ t ->
when ( normalizing t ) $ do
let (s,o) = normal t
(up, this) <- atomically $ do
let this = fromIntegral s % size t
-- fromIntegral o % fromIntegral s
-- fromIntegral o % size t
prev <- readTVar top
writeTVar top $ max this prev
return ( this >= prev, this )
when up $ do
print ( (fromRational this) :: Double
, t,size t
-- , size e
, s)
hFlush stdout
line k =
unspine $ replicate (k + 1) s
monster1 k =
unspine [ s, a, line k, s, s, s]
monster2 k =
unspine [ s, t, t, line k, s]
monster3 k =
let tee k = foldr app (app s a) $ replicate k t
in unspine [ tee k,s,s,s]
|
jwaldmann/s
|
compressor.hs
|
gpl-3.0
| 1,453 | 0 | 20 | 570 | 416 | 212 | 204 | 33 | 1 |
module Main where
import System.Environment
import System.Exit
import Control.Monad
import System.Console.Haskeline
import qualified Data.Map as M
import Data.List((\\))
import Environment
import Interpreter
main :: IO ()
main = do
args <- getArgs
Parameters int pro <- parseParameters args
val <- case pro of
Nothing -> return . Just $ M.empty
Just x -> execProgram x M.empty
unless int exitSuccess
case val of
Nothing -> exitWith (ExitFailure 2)
Just x -> nInteractWith x
execProgram :: String -> Env -> IO (Maybe Env)
execProgram prog e = do
res <- fullInterpreter prog e []
case res of
Left err -> print err >> return Nothing
Right (val, []) -> return . Just $ val
Right (val, top:_) -> print top >> return (Just val)
nInteract :: IO ()
nInteract = nInteractWith M.empty
nInteractWith :: Env -> IO ()
nInteractWith e = do
prog <- runInputT defaultSettings $ getInputLine "N> "
case prog of
Nothing -> exitSuccess
Just program -> do
res <- execProgram program e
case res of
Nothing -> nInteractWith e
Just e' -> nInteractWith e'
type Env = M.Map String FObject
data Parameters = Parameters { interactive :: Bool, code :: Maybe String }
data Switches = Switches {switches :: [String], nonSwitches :: [String]}
parseSwitches :: Bool -> [String] -> Switches
parseSwitches _ [] = Switches [] []
parseSwitches False items = Switches [] items
parseSwitches True ("--":rest) = parseSwitches False rest
parseSwitches True (top:rest)
= case top of
'-':'-':tag -> Switches (tag:s) n
'-':tag -> Switches (tag:s) n
nonTag -> Switches s (nonTag:n)
where
Switches s n = parseSwitches True rest
pFromS :: Switches -> IO Parameters
pFromS (Switches [] []) = return $ Parameters True Nothing
pFromS (Switches ["i"] []) = return $ Parameters True Nothing
pFromS (Switches tags [item])
| null $ tags \\ ["i", "e"] = Parameters ("i" `elem` tags) . Just <$> if "e" `elem` tags then return item else readFile item
| otherwise = usage
pFromS _ = usage
parseParameters :: [String] -> IO Parameters
parseParameters = pFromS . parseSwitches True
usage :: IO a
usage = do
name <- getProgName
putStrLn $ "Usage: " ++ name ++ " [ -i ] [ FILE | -e PROGRAM ]"
exitWith (ExitFailure 1)
|
kavigupta/N-programming-language
|
src/Main.hs
|
gpl-3.0
| 2,407 | 0 | 15 | 617 | 914 | 457 | 457 | 66 | 3 |
-- | Test correspondientes a las reglas de re-escritura.
module TestSuite.Tests.Matching where
import qualified Data.Map as M
import qualified Data.Sequence as S
import TestSuite.Tests.Samples
import Equ.Matching
import Equ.Parser
import Equ.PreExpr
import Equ.Theories.FOL(folForall,folExist)
import Test.HUnit (Assertion, assertFailure)
import Test.Framework (testGroup, Test)
import Test.Framework.Providers.HUnit (testCase)
import Control.Monad (unless)
infixl 5 ./
s ./ p = M.insert (fst p) (snd p) s
emp = M.empty
-- | True v False -m-> p v p : No existe match.
testCase0 :: Assertion
testCase0 = testMatch lhs rhs res
where lhs = pVp
rhs = trueVfalse
Just frhs = goDown (toFocus rhs) >>= goRight
merror = (frhs, DoubleMatch p true false)
res = Left (merror, S.fromList [])
-- | True v False -m-> p v q : [p->True, q->False]
testCase1 :: Assertion
testCase1 = testMatch pVq trueVfalse (Right s)
where s = emp ./ (p,true) ./ (q,false)
-- | Sy + S(x+S0) + z -m-> x + Sy + z : [x->Sy, y->x+S0]
testCase2 :: Assertion
testCase2 = testMatch xPlusSyPlusZ sAppyPlusSomePlusz (Right s)
where s = emp ./ (x,sAppy) ./ (y,xPlussApp0)
-- | #([0] ++ [1]) + 1 -m-> #([x,y]) + z : [x->0, y->1, z->1]
testCase3 :: Assertion
testCase3 = testMatch lengthListPlusz
lengthListPlusOne (Right s)
where s = M.fromList [ (x, zero)
, (y, one)
, (w, two)
, (z, one)
]
-- | 〈∀ z : 〈∀ z : z = z : F@z@z〉 : G@z〉 -m->
-- 〈∀ x : 〈∀ y : y = x : F@y@x〉 : G@x〉 : No existe match.
testCase4 :: Assertion
testCase4 = testMatch lhs rhs res
where lhs = parser "〈∀ x :〈∀ y : y = x : F%(y,x)〉: G%(x)〉"
rhs = parser "〈∀ z :〈∀ z : z = z : F%(z,z)〉: G%(z)〉"
Just frhs = goDown (toFocus rhs) >>= goDown >>= goDown >>= goRight
merror = (frhs, BindingVar v0)
res = Left (merror, S.fromList [])
-- | 〈∃ xx : (G@(# []) + xx) ▹ [] ⇒ True : w ⇒ q〉 -m->
-- 〈∃ x : G@y + x ▹ [] ⇒ p : q ⇒ w〉 : [y->(# []), p->True , w->q, q->w]
testCase5 :: Assertion
testCase5 = testMatch (parser "〈∃ x : G%(y) + x ▹ [] ⇒ p : q ⇒ w〉")
(parser "〈∃ xx : (G%((# [])) + xx) ▹ [] ⇒ True : w ⇒ q〉")
(Right subst)
where subst = M.fromList [ (y, parser "(# [])")
, (p, parser "True")
, (w, parser "q")
, (q, parser "w")
]
-- | Uno mas complicado con cuantificadores. Dejamos libre en la segunda expresion
-- una variable que es ligada en la primera.
testCase6 :: Assertion
testCase6 = testMatch (parser "〈∃ xs : 〈∀ y : y = xs.0 : F%(y) ∧ p〉 : xs↓1 = ys↓1〉")
(parser "〈∃ ys : 〈∀ z : z = ys.0 : F%(z) ∧ (True ⇒ p ∨ q)〉 : ys↓1 = (xs++zs)↓1〉")
(Right subst)
where subst = M.fromList [ (p,parser "(True ⇒ p ∨ q)")
, (ys,parser "(xs++zs)")
]
-- | Test para expresiones con paréntesis.
testCaseParens :: Assertion
testCaseParens = testMatch (parser "(p ⇒ q)") (parser "((True ∨ False) ∧ r) ⇒ (p ≡ q)")
(Right subst)
where subst = M.fromList [ (p,parser "((True ∨ False) ∧ r)")
, (q,parser "(p ≡ q)")
]
-- | No deberiamos poder hacer matching de funciones con nombres distintos.
testCase7 :: Assertion
testCase7 = testMatch lhs rhs res
where lhs = parser "G%(y) + x"
rhs = parser "S%(y) + z"
funG = Fun $ g
funS = Fun $ s
Just frhs = goDown (toFocus rhs) >>= goDown
merror = (frhs, InequPreExpr funG funS)
res = Left (merror, S.fromList [])
-- | No deberiamos poder hacer matching de distintos cuantificadores.
testCase8 :: Assertion
testCase8 = testMatch lhs rhs res
where lhs = exist0
rhs = forAll0
frhs = toFocus rhs
merror = (frhs, InequQuantifier folExist folForall)
res = Left (merror, S.fromList [])
-- | No deberiamos poder hacer matching de distintas constantes.
testCase9 :: Assertion
testCase9 = testMatch conL conR res
where conL = parser "[]"
conR = parser "0"
fconR = toFocus conR
merror = (fconR, InequPreExpr conL conR)
res = Left (merror, S.fromList [])
-- | Controla que el matching entre las expresiones sea el correcto.
-- Toma dos expresiones y una substitución esperada.
testMatch :: PreExpr -> PreExpr -> Either (MatchMErr,Log) ExprSubst -> Assertion
testMatch pe pe' mpe = let m = match pe pe'
in unless (m == mpe) $
assertFailure $
"\n Resultado esperado: " ++ show mpe ++
"\n Contra : " ++ show m
-- | Grupo de test para matching.
testGroupMatch :: Test
testGroupMatch = testGroup "Matching"
[ testCase (dontMatch "True v False -m-> p v p")
testCase0
, testCase "True v False -m-> p v q : [p->True, q->False]"
testCase1
, testCase "Sy + S(x+S0) + z -m-> x + Sy + z : [x->Sy, y->x+S0]"
testCase2
, testCase "#([0] ++ [1]) + 1 -m-> #([x,y]) + z : [x->0, y->1, z->1]"
testCase3
, testCase (dontMatch $
"〈∀ z : 〈∀ z : z = z : F@z@z〉 : G@z〉 -m->" ++
"〈∀ x : 〈∀ y : y = x : F@y@x〉 : G@x〉"
)
testCase4
, testCase ("〈∃ xx : (G@(# []) + xx) ▹ [] ⇒ True : w ⇒ q〉 -m-> " ++
"〈∃ x : G@y + x ▹ [] ⇒ p : q ⇒ w〉 :" ++
"[y->(# []), p->True , w->q, q->w]")
testCase5
, testCase ("〈∃ ys : 〈∀ z : z = ys.0 : F@y ∧ (True ⇒ p ∨ q)〉 : ys↓1 = (xs++zs)↓1〉 -m-> \n" ++
"〈∃ xs : 〈∀ y : y = xs.0 : F@y ∧ p〉 : xs↓1 = ys↓1〉 :"++
"[p -> True ⇒ p ∨ q, ys -> (xs++zs)]")
testCase6
, testCase ("((True ∨ False) ∧ r) ⇒ (p ≡ q) -m-> " ++
"(p ⇒ q) :" ++
"[p -> ((True ∨ False) ∧ r), q -> (p ≡ q)]")
testCaseParens
, testCase (dontMatch "S@y + x -m-> G@y + z") testCase7
, testCase (dontMatch "∀ =/= ∃") testCase8
, testCase (dontMatch "[] =/= 0") testCase9
]
where dontMatch = ("No hay matching: " ++)
|
miguelpagano/equ
|
TestSuite/Tests/Matching.hs
|
gpl-3.0
| 6,980 | 0 | 14 | 2,526 | 1,306 | 713 | 593 | 120 | 1 |
{-|
Module : Helper.Tuple
Description : Helper functions for working with lists.
Copyright : 2014, Jonas Cleve
License : GPL-3
-}
module Helper.Tuple (
-- * Triple functions
first, second, third
) where
-- | Return the first element from a three-tuple.
first :: (a, b, c) -> a
first (x, _, _) = x
-- | Return the second element from a three-tuple.
second :: (a, b, c) -> b
second (_, x, _) = x
-- | Return the third element from a three-tuple.
third :: (a, b, c) -> c
third (_, _, x) = x
|
Potregon/while
|
src/Helper/Tuple.hs
|
gpl-3.0
| 514 | 0 | 6 | 125 | 127 | 79 | 48 | 8 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.LevelPuzzleMode.LevelPuzzleWorld.Content.Fancy
(
ContentData (..),
makeContentData,
destroyContentData,
OffsetArray,
offsetarrayAt,
) where
import MyPrelude
import Game.MEnv
import Data.Array.Unboxed
import Data.Array.Base
import Game.Shade
import Game.Grid.GridWorld.Node
import Game.LevelPuzzleMode.LevelPuzzleWorld.Room
import OpenGL
import OpenGL.Helpers
data ContentData =
ContentData
{
contentdataWallVAO :: !GLuint,
contentdataWallVBO :: !GLuint,
contentdataWallIBO :: !GLuint,
contentdataWallOffsets :: !OffsetArray
-- map any two connected rooms into distinct colors:
-- contentdataColorMapMap :: !ColorMapMapArray -- RoomIx -> ColorMapIx
}
makeContentData :: UInt -> [Room] -> MEnv' ContentData
makeContentData roomssize rooms = io $ do
let numWalls = foldl' (\ix room -> ix + roomWallSize room) 0 rooms :: UInt
-- vao
vao <- bindNewVAO
glEnableVertexAttribArray attPos
glEnableVertexAttribArray attTexCoord
-- ibo
ibo <- makeGroupIBO 6 numWalls
-- vbo
(vbo, offs) <- makeWallVBOOffsets rooms numWalls
return ContentData
{
contentdataWallVAO = vao,
contentdataWallVBO = vbo,
contentdataWallIBO = ibo,
contentdataWallOffsets = offsetarrayList (length' offs) offs
}
makeWallVBOOffsets :: [Room] -> UInt -> IO (GLuint, [UInt])
makeWallVBOOffsets rooms numWalls = do
vbo <- bindNewBuf gl_ARRAY_BUFFER
glBufferData gl_ARRAY_BUFFER (fI $ numWalls * 72) nullPtr gl_STATIC_DRAW
-- wtf?? empty data not allowed in glMapBufferOES!!!
offs <- if numWalls == 0 then return [0] else do
writeBuf gl_ARRAY_BUFFER $ helper rooms 0
glVertexAttribPointer attPos 3 gl_SHORT gl_FALSE 12 $ mkPtrGLvoid 0
glVertexAttribPointer attTexCoord 2 gl_UNSIGNED_SHORT gl_TRUE 12 $ mkPtrGLvoid 8
return (vbo, offs)
where
-- write walls in content
helper rooms off ptr =
case rooms of
[] -> return [off]
(r:rs) -> do
helper' (roomWall r) (roomWallSize r) 0 ptr
(off:) `fmap` helper rs (off + roomWallSize r * 8)
(plusPtr ptr (fI $ roomWallSize r * 72))
-- write walls in room
helper' wallarray wallarraysize ix ptr =
if ix == wallarraysize
then return ()
else do
helper'' (wallarrayAt wallarray ix) ptr
helper' wallarray wallarraysize (ix + 1) (plusPtr ptr 72)
-- write wall
helper'' wall ptr = do
let Node n0 n1 n2 = wallNode wall
Node x0 x1 x2 = wallX wall
Node y0 y1 y2 = wallY wall
s0 = 0x0000
s1 = 0x8000
s2 = if wallIsDouble wall then 0x0000 else 0xffff
t0 = 0x0000
t1 = 0xffff
pokeByteOff ptr (0 + 0) (fI (n0 + y0) :: GLshort)
pokeByteOff ptr (0 + 2) (fI (n1 + y1) :: GLshort)
pokeByteOff ptr (0 + 4) (fI (n2 + y2) :: GLshort)
pokeByteOff ptr (0 + 8) (s0 :: GLushort)
pokeByteOff ptr (0 + 10) (t1 :: GLushort)
pokeByteOff ptr (12+ 0) (fI n0 :: GLshort)
pokeByteOff ptr (12+ 2) (fI n1 :: GLshort)
pokeByteOff ptr (12+ 4) (fI n2 :: GLshort)
pokeByteOff ptr (12+ 8) (s0 :: GLushort)
pokeByteOff ptr (12+ 10) (t0 :: GLushort)
pokeByteOff ptr (24+ 0) (fI (n0 + x0 + y0) :: GLshort)
pokeByteOff ptr (24+ 2) (fI (n1 + x1 + y1) :: GLshort)
pokeByteOff ptr (24+ 4) (fI (n2 + x2 + y2) :: GLshort)
pokeByteOff ptr (24+ 8) (s1 :: GLushort)
pokeByteOff ptr (24+ 10) (t1 :: GLushort)
pokeByteOff ptr (36+ 0) (fI (n0 + x0) :: GLshort)
pokeByteOff ptr (36+ 2) (fI (n1 + x1) :: GLshort)
pokeByteOff ptr (36+ 4) (fI (n2 + x2) :: GLshort)
pokeByteOff ptr (36+ 8) (s1 :: GLushort)
pokeByteOff ptr (36+ 10) (t0 :: GLushort)
pokeByteOff ptr (48+ 0) (fI (n0 + y0) :: GLshort)
pokeByteOff ptr (48+ 2) (fI (n1 + y1) :: GLshort)
pokeByteOff ptr (48+ 4) (fI (n2 + y2) :: GLshort)
pokeByteOff ptr (48+ 8) (s2 :: GLushort)
pokeByteOff ptr (48+ 10) (t1 :: GLushort)
pokeByteOff ptr (60+ 0) (fI n0 :: GLshort)
pokeByteOff ptr (60+ 2) (fI n1 :: GLshort)
pokeByteOff ptr (60+ 4) (fI n2 :: GLshort)
pokeByteOff ptr (60+ 8) (s2 :: GLushort)
pokeByteOff ptr (60+ 10) (t0 :: GLushort)
destroyContentData :: ContentData -> MEnv' ()
destroyContentData cntdata = io $ do
delBuf $ contentdataWallVBO cntdata
delBuf $ contentdataWallIBO cntdata
delBuf $ contentdataWallVAO cntdata
--------------------------------------------------------------------------------
-- OffsetArray
type OffsetArray =
UArray Int UInt
offsetarrayList :: UInt -> [UInt] -> OffsetArray
offsetarrayList size offs =
listArray (0, fI size - 1) offs
offsetarrayAt :: OffsetArray -> UInt -> UInt
offsetarrayAt array ix =
unsafeAt array (fI ix)
|
karamellpelle/grid
|
designer/source/Game/LevelPuzzleMode/LevelPuzzleWorld/Content/Fancy.hs
|
gpl-3.0
| 6,085 | 0 | 19 | 1,875 | 1,752 | 912 | 840 | 117 | 5 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Drive.Files.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates a file\'s metadata and\/or content. This method supports patch
-- semantics.
--
-- /See:/ <https://developers.google.com/drive/ Drive API Reference> for @drive.files.update@.
module Network.Google.Resource.Drive.Files.Update
(
-- * REST Resource
FilesUpdateResource
-- * Creating a Request
, filesUpdate
, FilesUpdate
-- * Request Lenses
, fuPayload
, fuRemoveParents
, fuUseContentAsIndexableText
, fuOCRLanguage
, fuKeepRevisionForever
, fuSupportsAllDrives
, fuIncludePermissionsForView
, fuEnforceSingleParent
, fuFileId
, fuAddParents
, fuSupportsTeamDrives
) where
import Network.Google.Drive.Types
import Network.Google.Prelude
-- | A resource alias for @drive.files.update@ method which the
-- 'FilesUpdate' request conforms to.
type FilesUpdateResource =
"drive" :>
"v3" :>
"files" :>
Capture "fileId" Text :>
QueryParam "removeParents" Text :>
QueryParam "useContentAsIndexableText" Bool :>
QueryParam "ocrLanguage" Text :>
QueryParam "keepRevisionForever" Bool :>
QueryParam "supportsAllDrives" Bool :>
QueryParam "includePermissionsForView" Text :>
QueryParam "enforceSingleParent" Bool :>
QueryParam "addParents" Text :>
QueryParam "supportsTeamDrives" Bool :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] File :> Patch '[JSON] File
:<|>
"upload" :>
"drive" :>
"v3" :>
"files" :>
Capture "fileId" Text :>
QueryParam "removeParents" Text :>
QueryParam "useContentAsIndexableText" Bool :>
QueryParam "ocrLanguage" Text :>
QueryParam "keepRevisionForever" Bool :>
QueryParam "supportsAllDrives" Bool :>
QueryParam "includePermissionsForView" Text :>
QueryParam "enforceSingleParent" Bool :>
QueryParam "addParents" Text :>
QueryParam "supportsTeamDrives" Bool :>
QueryParam "alt" AltJSON :>
QueryParam "uploadType" Multipart :>
MultipartRelated '[JSON] File :>
Patch '[JSON] File
-- | Updates a file\'s metadata and\/or content. This method supports patch
-- semantics.
--
-- /See:/ 'filesUpdate' smart constructor.
data FilesUpdate =
FilesUpdate'
{ _fuPayload :: !File
, _fuRemoveParents :: !(Maybe Text)
, _fuUseContentAsIndexableText :: !Bool
, _fuOCRLanguage :: !(Maybe Text)
, _fuKeepRevisionForever :: !Bool
, _fuSupportsAllDrives :: !Bool
, _fuIncludePermissionsForView :: !(Maybe Text)
, _fuEnforceSingleParent :: !Bool
, _fuFileId :: !Text
, _fuAddParents :: !(Maybe Text)
, _fuSupportsTeamDrives :: !Bool
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'FilesUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fuPayload'
--
-- * 'fuRemoveParents'
--
-- * 'fuUseContentAsIndexableText'
--
-- * 'fuOCRLanguage'
--
-- * 'fuKeepRevisionForever'
--
-- * 'fuSupportsAllDrives'
--
-- * 'fuIncludePermissionsForView'
--
-- * 'fuEnforceSingleParent'
--
-- * 'fuFileId'
--
-- * 'fuAddParents'
--
-- * 'fuSupportsTeamDrives'
filesUpdate
:: File -- ^ 'fuPayload'
-> Text -- ^ 'fuFileId'
-> FilesUpdate
filesUpdate pFuPayload_ pFuFileId_ =
FilesUpdate'
{ _fuPayload = pFuPayload_
, _fuRemoveParents = Nothing
, _fuUseContentAsIndexableText = False
, _fuOCRLanguage = Nothing
, _fuKeepRevisionForever = False
, _fuSupportsAllDrives = False
, _fuIncludePermissionsForView = Nothing
, _fuEnforceSingleParent = False
, _fuFileId = pFuFileId_
, _fuAddParents = Nothing
, _fuSupportsTeamDrives = False
}
-- | Multipart request metadata.
fuPayload :: Lens' FilesUpdate File
fuPayload
= lens _fuPayload (\ s a -> s{_fuPayload = a})
-- | A comma-separated list of parent IDs to remove.
fuRemoveParents :: Lens' FilesUpdate (Maybe Text)
fuRemoveParents
= lens _fuRemoveParents
(\ s a -> s{_fuRemoveParents = a})
-- | Whether to use the uploaded content as indexable text.
fuUseContentAsIndexableText :: Lens' FilesUpdate Bool
fuUseContentAsIndexableText
= lens _fuUseContentAsIndexableText
(\ s a -> s{_fuUseContentAsIndexableText = a})
-- | A language hint for OCR processing during image import (ISO 639-1 code).
fuOCRLanguage :: Lens' FilesUpdate (Maybe Text)
fuOCRLanguage
= lens _fuOCRLanguage
(\ s a -> s{_fuOCRLanguage = a})
-- | Whether to set the \'keepForever\' field in the new head revision. This
-- is only applicable to files with binary content in Google Drive. Only
-- 200 revisions for the file can be kept forever. If the limit is reached,
-- try deleting pinned revisions.
fuKeepRevisionForever :: Lens' FilesUpdate Bool
fuKeepRevisionForever
= lens _fuKeepRevisionForever
(\ s a -> s{_fuKeepRevisionForever = a})
-- | Whether the requesting application supports both My Drives and shared
-- drives.
fuSupportsAllDrives :: Lens' FilesUpdate Bool
fuSupportsAllDrives
= lens _fuSupportsAllDrives
(\ s a -> s{_fuSupportsAllDrives = a})
-- | Specifies which additional view\'s permissions to include in the
-- response. Only \'published\' is supported.
fuIncludePermissionsForView :: Lens' FilesUpdate (Maybe Text)
fuIncludePermissionsForView
= lens _fuIncludePermissionsForView
(\ s a -> s{_fuIncludePermissionsForView = a})
-- | Deprecated. Adding files to multiple folders is no longer supported. Use
-- shortcuts instead.
fuEnforceSingleParent :: Lens' FilesUpdate Bool
fuEnforceSingleParent
= lens _fuEnforceSingleParent
(\ s a -> s{_fuEnforceSingleParent = a})
-- | The ID of the file.
fuFileId :: Lens' FilesUpdate Text
fuFileId = lens _fuFileId (\ s a -> s{_fuFileId = a})
-- | A comma-separated list of parent IDs to add.
fuAddParents :: Lens' FilesUpdate (Maybe Text)
fuAddParents
= lens _fuAddParents (\ s a -> s{_fuAddParents = a})
-- | Deprecated use supportsAllDrives instead.
fuSupportsTeamDrives :: Lens' FilesUpdate Bool
fuSupportsTeamDrives
= lens _fuSupportsTeamDrives
(\ s a -> s{_fuSupportsTeamDrives = a})
instance GoogleRequest FilesUpdate where
type Rs FilesUpdate = File
type Scopes FilesUpdate =
'["https://www.googleapis.com/auth/drive",
"https://www.googleapis.com/auth/drive.appdata",
"https://www.googleapis.com/auth/drive.file",
"https://www.googleapis.com/auth/drive.metadata",
"https://www.googleapis.com/auth/drive.scripts"]
requestClient FilesUpdate'{..}
= go _fuFileId _fuRemoveParents
(Just _fuUseContentAsIndexableText)
_fuOCRLanguage
(Just _fuKeepRevisionForever)
(Just _fuSupportsAllDrives)
_fuIncludePermissionsForView
(Just _fuEnforceSingleParent)
_fuAddParents
(Just _fuSupportsTeamDrives)
(Just AltJSON)
_fuPayload
driveService
where go :<|> _
= buildClient (Proxy :: Proxy FilesUpdateResource)
mempty
instance GoogleRequest (MediaUpload FilesUpdate)
where
type Rs (MediaUpload FilesUpdate) = File
type Scopes (MediaUpload FilesUpdate) =
Scopes FilesUpdate
requestClient (MediaUpload FilesUpdate'{..} body)
= go _fuFileId _fuRemoveParents
(Just _fuUseContentAsIndexableText)
_fuOCRLanguage
(Just _fuKeepRevisionForever)
(Just _fuSupportsAllDrives)
_fuIncludePermissionsForView
(Just _fuEnforceSingleParent)
_fuAddParents
(Just _fuSupportsTeamDrives)
(Just AltJSON)
(Just Multipart)
_fuPayload
body
driveService
where _ :<|> go
= buildClient (Proxy :: Proxy FilesUpdateResource)
mempty
|
brendanhay/gogol
|
gogol-drive/gen/Network/Google/Resource/Drive/Files/Update.hs
|
mpl-2.0
| 9,333 | 0 | 40 | 2,617 | 1,417 | 792 | 625 | 204 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Classroom.UserProFiles.GuardianInvitations.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns a list of guardian invitations that the requesting user is
-- permitted to view, filtered by the parameters provided. This method
-- returns the following error codes: * \`PERMISSION_DENIED\` if a
-- \`student_id\` is specified, and the requesting user is not permitted to
-- view guardian invitations for that student, if \`\"-\"\` is specified as
-- the \`student_id\` and the user is not a domain administrator, if
-- guardians are not enabled for the domain in question, or for other
-- access errors. * \`INVALID_ARGUMENT\` if a \`student_id\` is specified,
-- but its format cannot be recognized (it is not an email address, nor a
-- \`student_id\` from the API, nor the literal string \`me\`). May also be
-- returned if an invalid \`page_token\` or \`state\` is provided. *
-- \`NOT_FOUND\` if a \`student_id\` is specified, and its format can be
-- recognized, but Classroom has no record of that student.
--
-- /See:/ <https://developers.google.com/classroom/ Google Classroom API Reference> for @classroom.userProfiles.guardianInvitations.list@.
module Network.Google.Resource.Classroom.UserProFiles.GuardianInvitations.List
(
-- * REST Resource
UserProFilesGuardianInvitationsListResource
-- * Creating a Request
, userProFilesGuardianInvitationsList
, UserProFilesGuardianInvitationsList
-- * Request Lenses
, upfgilStudentId
, upfgilStates
, upfgilXgafv
, upfgilUploadProtocol
, upfgilPp
, upfgilAccessToken
, upfgilUploadType
, upfgilBearerToken
, upfgilInvitedEmailAddress
, upfgilPageToken
, upfgilPageSize
, upfgilCallback
) where
import Network.Google.Classroom.Types
import Network.Google.Prelude
-- | A resource alias for @classroom.userProfiles.guardianInvitations.list@ method which the
-- 'UserProFilesGuardianInvitationsList' request conforms to.
type UserProFilesGuardianInvitationsListResource =
"v1" :>
"userProfiles" :>
Capture "studentId" Text :>
"guardianInvitations" :>
QueryParams "states" Text :>
QueryParam "$.xgafv" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "invitedEmailAddress" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListGuardianInvitationsResponse
-- | Returns a list of guardian invitations that the requesting user is
-- permitted to view, filtered by the parameters provided. This method
-- returns the following error codes: * \`PERMISSION_DENIED\` if a
-- \`student_id\` is specified, and the requesting user is not permitted to
-- view guardian invitations for that student, if \`\"-\"\` is specified as
-- the \`student_id\` and the user is not a domain administrator, if
-- guardians are not enabled for the domain in question, or for other
-- access errors. * \`INVALID_ARGUMENT\` if a \`student_id\` is specified,
-- but its format cannot be recognized (it is not an email address, nor a
-- \`student_id\` from the API, nor the literal string \`me\`). May also be
-- returned if an invalid \`page_token\` or \`state\` is provided. *
-- \`NOT_FOUND\` if a \`student_id\` is specified, and its format can be
-- recognized, but Classroom has no record of that student.
--
-- /See:/ 'userProFilesGuardianInvitationsList' smart constructor.
data UserProFilesGuardianInvitationsList = UserProFilesGuardianInvitationsList'
{ _upfgilStudentId :: !Text
, _upfgilStates :: !(Maybe [Text])
, _upfgilXgafv :: !(Maybe Text)
, _upfgilUploadProtocol :: !(Maybe Text)
, _upfgilPp :: !Bool
, _upfgilAccessToken :: !(Maybe Text)
, _upfgilUploadType :: !(Maybe Text)
, _upfgilBearerToken :: !(Maybe Text)
, _upfgilInvitedEmailAddress :: !(Maybe Text)
, _upfgilPageToken :: !(Maybe Text)
, _upfgilPageSize :: !(Maybe (Textual Int32))
, _upfgilCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UserProFilesGuardianInvitationsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'upfgilStudentId'
--
-- * 'upfgilStates'
--
-- * 'upfgilXgafv'
--
-- * 'upfgilUploadProtocol'
--
-- * 'upfgilPp'
--
-- * 'upfgilAccessToken'
--
-- * 'upfgilUploadType'
--
-- * 'upfgilBearerToken'
--
-- * 'upfgilInvitedEmailAddress'
--
-- * 'upfgilPageToken'
--
-- * 'upfgilPageSize'
--
-- * 'upfgilCallback'
userProFilesGuardianInvitationsList
:: Text -- ^ 'upfgilStudentId'
-> UserProFilesGuardianInvitationsList
userProFilesGuardianInvitationsList pUpfgilStudentId_ =
UserProFilesGuardianInvitationsList'
{ _upfgilStudentId = pUpfgilStudentId_
, _upfgilStates = Nothing
, _upfgilXgafv = Nothing
, _upfgilUploadProtocol = Nothing
, _upfgilPp = True
, _upfgilAccessToken = Nothing
, _upfgilUploadType = Nothing
, _upfgilBearerToken = Nothing
, _upfgilInvitedEmailAddress = Nothing
, _upfgilPageToken = Nothing
, _upfgilPageSize = Nothing
, _upfgilCallback = Nothing
}
-- | The ID of the student whose guardian invitations are to be returned. The
-- identifier can be one of the following: * the numeric identifier for the
-- user * the email address of the user * the string literal \`\"me\"\`,
-- indicating the requesting user * the string literal \`\"-\"\`,
-- indicating that results should be returned for all students that the
-- requesting user is permitted to view guardian invitations.
upfgilStudentId :: Lens' UserProFilesGuardianInvitationsList Text
upfgilStudentId
= lens _upfgilStudentId
(\ s a -> s{_upfgilStudentId = a})
-- | If specified, only results with the specified \`state\` values will be
-- returned. Otherwise, results with a \`state\` of \`PENDING\` will be
-- returned.
upfgilStates :: Lens' UserProFilesGuardianInvitationsList [Text]
upfgilStates
= lens _upfgilStates (\ s a -> s{_upfgilStates = a})
. _Default
. _Coerce
-- | V1 error format.
upfgilXgafv :: Lens' UserProFilesGuardianInvitationsList (Maybe Text)
upfgilXgafv
= lens _upfgilXgafv (\ s a -> s{_upfgilXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
upfgilUploadProtocol :: Lens' UserProFilesGuardianInvitationsList (Maybe Text)
upfgilUploadProtocol
= lens _upfgilUploadProtocol
(\ s a -> s{_upfgilUploadProtocol = a})
-- | Pretty-print response.
upfgilPp :: Lens' UserProFilesGuardianInvitationsList Bool
upfgilPp = lens _upfgilPp (\ s a -> s{_upfgilPp = a})
-- | OAuth access token.
upfgilAccessToken :: Lens' UserProFilesGuardianInvitationsList (Maybe Text)
upfgilAccessToken
= lens _upfgilAccessToken
(\ s a -> s{_upfgilAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
upfgilUploadType :: Lens' UserProFilesGuardianInvitationsList (Maybe Text)
upfgilUploadType
= lens _upfgilUploadType
(\ s a -> s{_upfgilUploadType = a})
-- | OAuth bearer token.
upfgilBearerToken :: Lens' UserProFilesGuardianInvitationsList (Maybe Text)
upfgilBearerToken
= lens _upfgilBearerToken
(\ s a -> s{_upfgilBearerToken = a})
-- | If specified, only results with the specified \`invited_email_address\`
-- will be returned.
upfgilInvitedEmailAddress :: Lens' UserProFilesGuardianInvitationsList (Maybe Text)
upfgilInvitedEmailAddress
= lens _upfgilInvitedEmailAddress
(\ s a -> s{_upfgilInvitedEmailAddress = a})
-- | nextPageToken value returned from a previous list call, indicating that
-- the subsequent page of results should be returned. The list request must
-- be otherwise identical to the one that resulted in this token.
upfgilPageToken :: Lens' UserProFilesGuardianInvitationsList (Maybe Text)
upfgilPageToken
= lens _upfgilPageToken
(\ s a -> s{_upfgilPageToken = a})
-- | Maximum number of items to return. Zero or unspecified indicates that
-- the server may assign a maximum. The server may return fewer than the
-- specified number of results.
upfgilPageSize :: Lens' UserProFilesGuardianInvitationsList (Maybe Int32)
upfgilPageSize
= lens _upfgilPageSize
(\ s a -> s{_upfgilPageSize = a})
. mapping _Coerce
-- | JSONP
upfgilCallback :: Lens' UserProFilesGuardianInvitationsList (Maybe Text)
upfgilCallback
= lens _upfgilCallback
(\ s a -> s{_upfgilCallback = a})
instance GoogleRequest
UserProFilesGuardianInvitationsList where
type Rs UserProFilesGuardianInvitationsList =
ListGuardianInvitationsResponse
type Scopes UserProFilesGuardianInvitationsList = '[]
requestClient
UserProFilesGuardianInvitationsList'{..}
= go _upfgilStudentId (_upfgilStates ^. _Default)
_upfgilXgafv
_upfgilUploadProtocol
(Just _upfgilPp)
_upfgilAccessToken
_upfgilUploadType
_upfgilBearerToken
_upfgilInvitedEmailAddress
_upfgilPageToken
_upfgilPageSize
_upfgilCallback
(Just AltJSON)
classroomService
where go
= buildClient
(Proxy ::
Proxy UserProFilesGuardianInvitationsListResource)
mempty
|
rueshyna/gogol
|
gogol-classroom/gen/Network/Google/Resource/Classroom/UserProFiles/GuardianInvitations/List.hs
|
mpl-2.0
| 10,652 | 0 | 23 | 2,412 | 1,246 | 730 | 516 | 176 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Controller.Funding
( queryFunderHandler
, postVolumeFunding
, deleteVolumeFunder
) where
import Control.Monad (liftM2)
import qualified Data.Text as T
import Has (focusIO)
import qualified JSON
import Model.Id
import Model.Permission
import Model.Volume
import Model.Funding
import Model.Funding.FundRef
import HTTP.Form.Deform
import HTTP.Path.Parser
import Action
import Controller.Paths
import Controller.Form
import Controller.Permission
import Controller.Volume
data QueryFundersRequest = QueryFundersRequest T.Text Bool
queryFunderHandler :: Action -- TODO: GET only
queryFunderHandler = withAuth $ do
_ <- authAccount
QueryFundersRequest q a <- runForm Nothing $ liftM2 QueryFundersRequest
("query" .:> (deformRequired =<< deform))
("all" .:> deform)
r <- QueryFunderResponse <$> if a
then focusIO $ searchFundRef q
else findFunders q
return $ okResponse [] $ (JSON.mapObjects funderJSON . unwrap) r
-- | Body of funder query response
newtype QueryFunderResponse = QueryFunderResponse { unwrap :: [Funder] }
data CreateOrUpdateVolumeFundingRequest =
CreateOrUpdateVolumeFundingRequest [T.Text]
postVolumeFunding :: ActionRoute (Id Volume, Id Funder)
postVolumeFunding = action POST (pathJSON >/> pathId </> pathId) $ \(vi, fi) -> withAuth $ do
v <- getVolume PermissionEDIT vi
f <- maybeAction =<< lookupFunderRef fi
CreateOrUpdateVolumeFundingRequest a <- runForm Nothing $ do
csrfForm
CreateOrUpdateVolumeFundingRequest <$> ("awards" .:> filter (not . T.null) <$> withSubDeforms (\_ -> deform))
let resp@(AddVolumeFundingResponse fa) = AddVolumeFundingResponse (Funding f a)
_ <- changeVolumeFunding v fa
return $ okResponse [] $ JSON.pairs $ (fundingJSON . avfUnwrap) resp
-- | Body of add volume funding response
newtype AddVolumeFundingResponse = AddVolumeFundingResponse { avfUnwrap :: Funding }
deleteVolumeFunder :: ActionRoute (Id Volume, Id Funder)
deleteVolumeFunder = action DELETE (pathJSON >/> pathId </> pathId) $ \(vi, fi) -> withAuth $ do
guardVerfHeader
v <- getVolume PermissionEDIT vi
_ <- removeVolumeFunder v fi
return $ okResponse [] $ JSON.recordEncoding $ volumeJSONSimple v
|
databrary/databrary
|
src/Controller/Funding.hs
|
agpl-3.0
| 2,222 | 0 | 21 | 347 | 629 | 330 | 299 | 52 | 2 |
{-# LANGUAGE FunctionalDependencies, TemplateHaskell #-}
module Model.SQL.Select
( SelectOutput(..)
, Selector(..)
, selector
, selectColumn
, selectColumns
, addSelects
, fromMap
, fromAlias
, crossJoin
, joinOn
, joinUsing
, maybeJoinOn
, maybeJoinUsing
, selectJoin
, selectMap
, makeQuery
, selectDistinctQuery
, nameRef
) where
import Control.Arrow (second)
import Control.Monad.State (StateT(..))
import Control.Monad.Trans.Class (lift)
import Data.Char (isLetter, toLower)
import Data.List (intercalate, unfoldr)
import Database.PostgreSQL.Typed.Query (QueryFlags, parseQueryFlags, makePGQuery)
import qualified Language.Haskell.TH as TH
import Service.DB (useTDB)
data SelectOutput
= SelectColumn { _selectTable, _selectColumn :: String }
| SelectExpr String
| OutputJoin { outputNullable :: !Bool, outputJoiner :: TH.Name, outputJoin :: [SelectOutput] }
| OutputMap { outputNullable :: !Bool, outputMapper :: TH.Exp -> TH.Exp, outputMap :: SelectOutput }
_outputTuple :: [SelectOutput] -> SelectOutput
_outputTuple l = OutputJoin False (TH.tupleDataName $ length l) l
outputMaybe :: SelectOutput -> SelectOutput
outputMaybe (OutputJoin False f l) = OutputJoin True f l
outputMaybe (OutputMap False f l) = OutputMap True f l
outputMaybe s = s
outputColumns :: SelectOutput -> [String]
outputColumns (SelectColumn t c) = [t ++ '.' : c]
outputColumns (SelectExpr s) = [s]
outputColumns (OutputJoin _ _ o) = concatMap outputColumns o
outputColumns (OutputMap _ _ o) = outputColumns o
outputParser :: SelectOutput -> StateT [TH.Name] TH.Q TH.Exp
outputParser (OutputJoin mb f ol) = do
fi <- lift $ TH.reify f
(fe, ft) <- case fi of
TH.ClassOpI _ t _ -> return (TH.VarE f, t)
TH.DataConI _ t _ -> return (TH.ConE f, t)
TH.VarI _ t _ -> return (TH.VarE f, t)
_ -> die "wrong kind"
if mb
then do
let am = unfoldr argMaybe ft
(bl, ae) <- bindArgs am ol
-- when (null bl) $ die "function with at least one non-Maybe argument required"
return $ TH.DoE $ bl ++ [TH.NoBindS $ TH.AppE (TH.ConE 'Just) $ foldl TH.AppE fe ae]
else foldl TH.AppE fe <$> mapM outputParser ol
where
bindArgs (False:m) (o:l) = do
n <- lift $ TH.newName "cm"
a <- outputParser (outputMaybe o)
(bl, al) <- bindArgs m l
return (TH.BindS (TH.VarP n) a : bl, TH.VarE n : al)
bindArgs (True:m) (o:l) = do
a <- outputParser o
second (a:) <$> bindArgs m l
bindArgs _ o = (,) [] <$> mapM outputParser o
argMaybe (TH.ArrowT `TH.AppT` a `TH.AppT` r) = Just (isMaybeT a, r)
argMaybe _ = Nothing
isMaybeT (TH.AppT (TH.ConT m) _) = m == ''Maybe
isMaybeT _ = False
die s = fail $ "outputParser " ++ show f ++ ": " ++ s
outputParser (OutputMap False f o) =
f <$> outputParser o
outputParser (OutputMap True f o) = do
x <- lift $ TH.newName "x"
((TH.VarE 'fmap `TH.AppE` TH.LamE [TH.VarP x] (f $ TH.VarE x)) `TH.AppE`)
<$> outputParser (outputMaybe o)
outputParser _ = StateT st where
st (i:l) = return (TH.VarE i, l)
st [] = fail "outputParser: insufficient values"
data Selector = Selector
{ selectOutput :: SelectOutput
, selectSource :: String
, selectJoined :: String
}
selector :: String -> SelectOutput -> Selector
selector t o = Selector o t (',':t)
selectColumn :: String -> String -> Selector
selectColumn t c = selector t $ SelectColumn t c
selectColumns :: TH.Name -> String -> [String] -> Selector
selectColumns f t c =
selector t $ OutputJoin False f $ map (SelectColumn t) c
addSelects :: TH.Name -> Selector -> [SelectOutput] -> Selector
addSelects f s c = s
{ selectOutput = OutputJoin False f (selectOutput s : c) }
fromMap :: (String -> String) -> Selector -> Selector
fromMap f sel = sel
{ selectSource = f $ selectSource sel
, selectJoined = f $ selectJoined sel
}
outputFromAlias :: String -> SelectOutput -> SelectOutput
outputFromAlias t (SelectColumn _ c) = SelectColumn t c
outputFromAlias _ (SelectExpr e) = error $ "fromAlias (SelectExpr " ++ show e ++ ")"
outputFromAlias t o@OutputJoin{ outputJoin = l } = o{ outputJoin = map (outputFromAlias t) l }
outputFromAlias t o@OutputMap{ outputMap = l } = o{ outputMap = outputFromAlias t l }
fromAlias :: Selector -> String -> Selector
fromAlias sel as = fromMap (++ " AS " ++ as) sel
{ selectOutput = outputFromAlias as $ selectOutput sel }
joinWith :: (String -> String) -> Selector -> Selector
joinWith j sel = sel{ selectJoined = j (selectSource sel) }
maybeJoinWith :: (String -> String) -> Selector -> Selector
maybeJoinWith j sel = sel
{ selectJoined = j (selectSource sel)
, selectOutput = outputMaybe (selectOutput sel) }
crossJoin :: Selector -> Selector
crossJoin = joinWith (" CROSS JOIN " ++)
joinOn :: String -> Selector -> Selector
joinOn on = joinWith (\s -> " JOIN " ++ s ++ " ON " ++ on)
joinUsing :: [String] -> Selector -> Selector
joinUsing using = joinWith (\s -> " JOIN " ++ s ++ " USING (" ++ intercalate "," using ++ ")")
maybeJoinOn :: String -> Selector -> Selector
maybeJoinOn on = maybeJoinWith (\s -> " LEFT JOIN " ++ s ++ " ON " ++ on)
maybeJoinUsing :: [String] -> Selector -> Selector
maybeJoinUsing using = maybeJoinWith (\s -> " LEFT JOIN " ++ s ++ " USING (" ++ intercalate "," using ++ ")")
selectJoin :: TH.Name -> [Selector] -> Selector
selectJoin f l@(h:t) = Selector
{ selectOutput = OutputJoin False f $ map selectOutput l
, selectSource = selectSource h ++ joins
, selectJoined = selectJoined h ++ joins
} where joins = concatMap selectJoined t
selectJoin _ [] = error "selectJoin: empty list"
selectMap :: (TH.Exp -> TH.Exp) -> Selector -> Selector
selectMap f s = s{ selectOutput = OutputMap False f (selectOutput s) }
takeWhileEnd :: (a -> Bool) -> [a] -> [a]
takeWhileEnd p = fst . foldr go ([], False) where
go x (rest, done)
| not done && p x = (x:rest, False)
| otherwise = (rest, True)
makeQuery :: QueryFlags -> (String -> String) -> SelectOutput -> TH.ExpQ
makeQuery flags sql output = do
_ <- useTDB
nl <- mapM (TH.newName . ('v':) . colVar) cols
(parse, []) <- runStateT (outputParser output) nl
TH.AppE (TH.VarE 'fmap `TH.AppE` TH.LamE [TH.TupP $ map TH.VarP nl] parse)
<$> makePGQuery flags (sql $ intercalate "," cols)
where
colVar s = case takeWhileEnd isLetter s of
[] -> "c"
(h:l) -> toLower h : l
cols = outputColumns output
selectDistinctQuery :: Maybe [String] -> Selector -> String -> TH.ExpQ
selectDistinctQuery dist Selector{ selectOutput = o, selectSource = s } sqlf =
makeQuery flags (\c -> select dist ++ c ++ " FROM " ++ s ++ ' ':sql) o
where
(flags, sql) = parseQueryFlags sqlf
select Nothing = "SELECT " -- ALL
select (Just []) = "SELECT DISTINCT "
select (Just l) = "SELECT DISTINCT ON (" ++ intercalate "," l ++ ") "
nameRef :: TH.Name -> String
nameRef n = maybe b (++ '.' : b) $ TH.nameModule n where b = TH.nameBase n
|
databrary/databrary
|
src/Model/SQL/Select.hs
|
agpl-3.0
| 6,890 | 0 | 17 | 1,404 | 2,758 | 1,442 | 1,316 | 161 | 10 |
{-# LANGUAGE OverloadedStrings #-}
import Language.PureScript.Bridge
import Control.Lens
import Data.Proxy
import GHC.Generics
import Servant.PureScript
import Gonimo.CodeGen.TypeBridges
import Gonimo.Server.DbEntities
import Gonimo.Server.Types
import Gonimo.Server.Error
import Gonimo.WebAPI
import Gonimo.WebAPI.Types as Client
data GonimoBridge
instance HasBridge GonimoBridge where
languageBridge _ = buildBridge gonimoBridge
gonimoProxy :: Proxy GonimoBridge
gonimoProxy = Proxy
data TestTypeConstructor m a = TestTypeConstructor (m a) deriving Generic
myTypes :: [SumType 'Haskell]
myTypes = [ mkSumType (Proxy :: Proxy Client.AuthData)
, mkSumType (Proxy :: Proxy Account)
, mkSumType (Proxy :: Proxy Client.InvitationInfo)
, mkSumType (Proxy :: Proxy Client.InvitationReply)
, mkSumType (Proxy :: Proxy ServerError)
, mkSumType (Proxy :: Proxy AuthToken)
, mkSumType (Proxy :: Proxy Client)
, mkSumType (Proxy :: Proxy Coffee)
, mkSumType (Proxy :: Proxy Invitation)
, mkSumType (Proxy :: Proxy InvitationDelivery)
, mkSumType (Proxy :: Proxy SendInvitation)
]
mySettings :: Settings
mySettings = addReaderParam "Authorization" defaultSettings & apiModuleName .~ "Gonimo.WebAPI"
main :: IO ()
main = do
let gonimoFrontPath = "../gonimo-front/src"
writePSTypes gonimoFrontPath (buildBridge gonimoBridge) myTypes
writeAPIModuleWithSettings mySettings gonimoFrontPath gonimoProxy gonimoAPI
|
charringer/gonimo-back
|
app/PSGenerator.hs
|
agpl-3.0
| 1,634 | 0 | 9 | 400 | 384 | 209 | 175 | -1 | -1 |
module Language.Prolog.Syntax where
data Expr =
Var String -- variable
| Str String [Expr] -- structure
| Op String [Expr] -- operator
| Cons Expr Expr -- list
| Num (Either Integer Double) -- number
deriving Show
type Sentence = (Maybe Expr, Maybe [Expr])
data Module = Module { moduleName :: String
, exports :: [String]
}
|
acharal/parsec-prolog
|
src/Language/Prolog/Syntax.hs
|
lgpl-3.0
| 443 | 0 | 9 | 175 | 109 | 67 | 42 | 11 | 0 |
{-|
Module : Haskoin
Description : Bitcoin (BTC/BCH) Libraries for Haskell
Copyright : No rights reserved
License : MIT
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
This module exports almost all of Haskoin Core, excluding only a few highly
specialized address and block-related functions.
-}
module Haskoin
( module Address
, module Network
, module Constants
, module Block
, module Transaction
, module Script
, module Keys
, module Crypto
, module Util
) where
import Haskoin.Address as Address
import Haskoin.Block as Block
import Haskoin.Constants as Constants
import Haskoin.Crypto as Crypto
import Haskoin.Keys as Keys
import Haskoin.Network as Network
import Haskoin.Script as Script
import Haskoin.Transaction as Transaction
import Haskoin.Util as Util
|
haskoin/haskoin
|
src/Haskoin.hs
|
unlicense
| 1,055 | 0 | 4 | 372 | 106 | 77 | 29 | 19 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
{-| This module implements the core algebra of Karps with respect to
structured transforms. It takes functional structured transforms and flattens
them into simpler, flat projections.
This is probably one of the most complex parts of the optimizer.
-}
module Spark.Core.Internal.StructuredFlattening(
structuredFlatten
) where
import Formatting
import Spark.Core.Internal.Utilities
import Data.List(nub)
import Data.Maybe(mapMaybe)
import qualified Data.Vector as V
import qualified Data.List.NonEmpty as N
import qualified Data.Map.Strict as M
import Data.List.NonEmpty(NonEmpty(..))
import Spark.Core.Internal.StructureFunctions
import Spark.Core.Internal.OpStructures
import Spark.Core.Internal.NodeBuilder(nbName)
import Spark.Core.Internal.ComputeDag
import Spark.Core.Internal.ContextStructures(ComputeGraph)
import Spark.Core.Internal.DAGStructures(Vertex(..), Edge(..), IndexedEdge(..), gIndexedEdges)
import Spark.Core.Internal.BrainStructures(makeParentEdge, nodeAsVertex)
import Spark.Core.Internal.TypesStructures(DataType(..), StrictDataType(Struct), StructType(..), StructField(..))
import Spark.Core.Internal.TypesFunctions(extractFields, structType', extractFields2)
import Spark.Core.Internal.DatasetStructures(OperatorNode(..), StructureEdge(ParentEdge), onOp, onPath, onType, onLocality)
import Spark.Core.Internal.DatasetFunctions(filterParentNodes)
import Spark.Core.InternalStd.Filter(filterBuilder)
import Spark.Core.StructuresInternal(FieldName(..), FieldPath(..), NodePath, nodePathAppendSuffix)
import Spark.Core.Try
{-| Takes a graph that may contain some functional nodes, and attempts to
apply these nodes as operands, flattening the inner functions and the groups
in the process.
This outputs a graph where all the functional elements have been replaced by
their low-level, imperative equivalents.
It works by doing the following:
- build a new DAG in which the edges track the groupings.
- traverse the DAG and apply the functional nodes
- reconstuct the final DAG
-}
structuredFlatten :: ComputeGraph -> Try ComputeGraph
structuredFlatten cg = do
-- The functional structures are identified.
-- They are not fully parsed and may present errors.
labeled <- _labelNodesInitial cg
{- The final topology is in place:
- nodes are identified and linked.
- initial placeholders are linked to start node.
- final node is connected to a single sink.
-}
connected <- _labelConnectNodes labeled
-- return $ _labelConvert connected
-- The transform is performed.
let analyzed = _analyzeGraph connected
trans <- _mainTransform analyzed
-- -- Some post-processing for filter?
-- undefined
-- Convert the graph back to a compute graph, and add the preprocessing nodes.
_mainTransformReturn trans
-- -- fg <- _fgraph cg
-- -- fg' <- _performTrans fg
-- -- _fgraphBack cg fg'
{-| The different moves that happen in the stack of keys.
This is then used to infer the type and datatype of the key.
-}
data StackMove =
StackEnter1 -- We are entering one more layer in the stack.
| StackKeep -- We are maintainng our current position in the stack.
| StackExit1 -- We are dropping the last key from the stack.
deriving (Eq, Show)
-- Convenient shortcut
type CDag v = ComputeDag v StructureEdge
{-| The type of function that is applied:
-}
data NodeFunctionalType =
{-| shuffle:
overall: distributed -> distributed
inner function: distributed -> local
-}
FunctionalShuffle
deriving (Eq, Show)
-- data FunctionalNodeAnalysis = FunctionalNodeAnalysis {
-- fnaId :: !VertexId,
-- fnaType :: !NodeParseType,
-- fnaParent :: !VertexId, -- The direct parent of the functional node
-- fnaFunctionStart :: !VertexId, -- The placeholder that starts
-- fnaFunctionEnd :: !VertexId -- The final node of the function that ends
-- } deriving (Eq, Show)
-- ******* Initial parsing ********
-- This step verifies some basic topology. The shape invariants are supposed
-- to have been verified by the builders.
-- Labeling of nodes with information about the functional structure.
-- This is the forward pass structure.
data FunctionalParsing =
-- The final node of a functional operation
-- Includes itself, path of start, path of placeholder, path of sink.
FConclusion OperatorNode NodeFunctionalType NodePath NodePath NodePath
-- Some other node
| FOther OperatorNode
deriving (Show)
-- Labeling of the nodes with information about functional structure.
-- This is the output of the backward pass.
data FunctionalLabeling =
-- The final node, see FConclusion
FLFinal OperatorNode NodePath NodePath NodePath
-- The sink node of the inner function.
-- and the path of the final node
| FLSink OperatorNode NodePath
-- The start placeholder node and the path of the final node.
| FLPlaceholder OperatorNode NodePath
-- The start node that is the input to the functional operation.
-- and the path of the final node.
| FLStart OperatorNode NodePath
| FLOther OperatorNode
deriving (Show)
{-| Preliminary checks and labeling of nodes.
-}
_labelNodesInitial :: ComputeGraph -> Try (CDag FunctionalLabeling)
_labelNodesInitial cg = do
-- Forward pass: identify the parents.
cg1 <- computeGraphMapVertices cg fun1
let rcg1 = reverseGraph cg1
-- Backward pass: identify the inputs of the functional nodes.
let rcg2 = computeGraphMapVerticesI rcg1 f2
let cg2 = reverseGraph rcg2
return cg2
where
functionalType :: OperatorNode -> Maybe NodeFunctionalType
functionalType on = case onOp on of
NodeDistributedOp so | soName so == nbName functionalShuffleBuilder -> Just FunctionalShuffle
_ -> Nothing
p :: FunctionalParsing -> NodePath
p (FConclusion on _ _ _ _) = onPath on
p (FOther on) = onPath on
fun1 on l = f1' (functionalType on) on (filterParentNodes l)
-- Just operates on the parents.
f1' :: Maybe NodeFunctionalType -> OperatorNode -> [FunctionalParsing] -> Try FunctionalParsing
f1' (Just nft) on [onInit, onPlaceholder, onSink] =
-- TODO: could do more checks on the type of the placeholder too.
pure $ FConclusion on nft (p onInit) (p onPlaceholder) (p onSink)
f1' (Just _) on l =
tryError $ sformat ("_labelNodesInitial: expected 3 parents "%sh%"but got "%sh) on l
f1' Nothing on _ = pure $ FOther on
findPlaceholderParent on = concatMap f where
f (FLFinal onEnd _ pPlaceholder _ ) | onPath on == pPlaceholder = [FLPlaceholder on (onPath onEnd)]
f _ = []
f2 :: FunctionalParsing -> [(FunctionalLabeling, StructureEdge)] -> FunctionalLabeling
f2 (FConclusion on _ pInit pPlaceholder pSink) _ = FLFinal on pInit pPlaceholder pSink
f2 (FOther on) l = traceHint ("_labelNodesInitial: on="<>show' on<>" l="<>show' l<>" res=") $ case filterParentNodes l of
[FLFinal onEnd pInit _ _] | onPath on == pInit -> FLStart on (onPath onEnd)
[FLFinal onEnd _ _ pSink] | onPath on == pSink -> FLSink on (onPath onEnd)
-- Try to isolate a placeholder out of all the parents
lp -> case findPlaceholderParent on lp of
-- TODO: it is a programming errors to find multiple parents.
-- We should return an error in that case.
(x:_) -> x
[] -> FLOther on -- No placeholder parent
{-| Connects the functional nodes:
- the placeholder is not connected to the start
- the sink is disconnected from the start and the placeholder.
-}
_labelConnectNodes :: CDag FunctionalLabeling -> Try (CDag FunctionalLabeling)
_labelConnectNodes cg = do
cg1 <- tryEither $ graphAdd cg [] eds
return $ graphFilterEdges' cg1 fun
where
-- We add one edge between the start and the placeholder.
eds = concatMap f (graphVertexData cg) where
f (FLFinal _ pInit pPlaceholder _) = [makeParentEdge pInit pPlaceholder]
f _ = []
-- Remember that True == we keep it.
fun FLFinal{} ParentEdge FLStart{} = False
fun FLFinal{} ParentEdge FLPlaceholder{} = False
fun _ _ _ = True
-- Temporary while the function is not complete.
_labelConvert :: CDag FunctionalLabeling -> ComputeGraph
_labelConvert = mapVertexData (_onFunctionalLabel . traceHint "_labelConvert: x=")
_onFunctionalLabel :: FunctionalLabeling -> OperatorNode
_onFunctionalLabel (FLFinal on _ _ _) = on
_onFunctionalLabel (FLSink on _) = on
_onFunctionalLabel (FLPlaceholder on _) = on
_onFunctionalLabel (FLStart on _) = on
_onFunctionalLabel (FLOther on) = on
-- ************ Initial analysis *********
-- This pass does not calculate datatypes or transforms, but
-- extracts all the information required from the nodes.
-- It does not modify the operations nor does it add extra columns for the key.
-- All these passes could be combined in one big pass, but it is easier
-- to reason about them separately for the time being.
{-| The different types of nodes that are recognized by the algorithm.
-}
data FNodeType =
FDistributedTransform ColOp -- Parents are assumed to be distributed too.
| FLocalTransform ColOp -- Parents are assumed to be local or aggs.
| FImperativeAggregate AggOp -- A call to a low-level aggregate (reduce)
| FImperativeShuffle AggOp -- The keyed reduction.
| FEnter -- The placeholder that starts the functional shuffle.
| FExit -- The sink node for functional operations. Nothing special happens on this node.
| FUnknown -- Some unknown type of node (allowed at the top level)
| FFilter -- A filter operation.
-- Unimplemented other operations for now
-- | FLocalPack
deriving (Eq, Show)
{-| A node, once the functional analysis has been conducted.
This node does not do type checking when being built.
Arguments:
- the original operator node
- the stack move that is associated with this operator
- the interpretation of the node
-}
data FNode = FNode OperatorNode FNodeType deriving (Eq, Show)
_analyzeGraph :: CDag FunctionalLabeling -> CDag FNode
_analyzeGraph cg = computeGraphMapVerticesI cg f' where
-- Just consider the parents
f' fl _ = FNode on' (f (onOp on') fl) where
on' = _onFunctionalLabel fl
-- To check on the parents for now
f :: NodeOp -> FunctionalLabeling -> FNodeType
f _ FLPlaceholder{} = FEnter
f _ FLFinal{} = FExit
f (NodeReduction ao) _ = FImperativeAggregate ao
f (NodeGroupedReduction ao) _ = FImperativeShuffle ao
f (NodeStructuredTransform co) _ = FDistributedTransform co
f (NodeLocalStructuredTransform co) _ = FLocalTransform co
f (NodeDistributedOp so) _ | soName so == nbName filterBuilder = FFilter
-- We do not know about any other node for now.
f _ _ = FUnknown
-- ******** Transform **********
-- This is the step that attempts to perform the transform.
{- Details on the layout of the data.
For nodes at the top level, the datatype does not change.
For nodes inside a stack, the datatype is encapsulated inside a structure, in
the following form:
{key:{key1:DT1, .... keyN:DTN}, value:DT}
DT.. and DT are determined by the grouping operations.
keyN is the deepest key, which corresponds to the top of the stack.
The scheme above is robust to any collision with user-specified column names,
but it requires a bit more bookkeeping to go in out of the top level and to
add new keys.
-}
{-| The different types of nodes that will be output.
Every non-unknown node translate to one of these.
-}
data FPNodeType =
FPDistributedTransform ColOp
| FPLocalTransform ColOp
| FPImperativeAggregate AggOp
| FPImperativeShuffle AggOp
| FPFilter
| FPUnknown
deriving (Eq, Show)
type FStack = [NodePath]
type FStack' = NonEmpty NodePath
{-| Like the functional node, but with the stack of all the entrances that
have not been closed by exits so far.
This is important to know with respect to the unknown nodes.
The operator node and the node type contain the final node operation and
node type (with key information plugged in).
-}
data FPostNode = FPostNode {
fpnNode :: !OperatorNode,
_fpnType :: !FPNodeType,
fpnStack :: !FStack,
fpnPreNode :: !(Maybe OperatorNode)
} deriving (Eq, Show)
fpostNode :: OperatorNode -> FPNodeType -> FStack -> FPostNode
fpostNode on fpnt fs = FPostNode on fpnt fs Nothing
{-| The main transform.
This transform does not add conversion nodes required by some operators.
This function only works with linear transforms for now. Only a single parent
is accepted. Multiparent, multi-level transforms will added in the future.
-}
_mainTransform :: CDag FNode -> Try (CDag FPostNode)
_mainTransform cg = computeGraphMapVertices cg fun where
-- For the unknown nodes, just let them go through at the root level.
-- The only case accepted for now is top level with multiple parents and
-- single parent inside stacks.
-- Anything else is rejected.
fun :: FNode -> [(FPostNode, StructureEdge)] -> Try FPostNode
fun (FNode on fnt) l = do
currentStack <- currentStackt
_ <- check1Parent currentStack parents
f1 currentStack parentTypes fnt on
where
parents = filterParentNodes l
parentTypes = onType . fpnNode <$> parents
parentsStacks = fpnStack <$> parents
currentStackt = _currentStackSame parentsStacks
-- Checks that there is only parent for deeper nodes.
check1Parent [] _ = pure ()
check1Parent (_:_) [] = pure ()
check1Parent (_:_) [_] = pure ()
check1Parent (_:_) parents' =
tryError $ sformat ("Found more than one parent in the stack parents="%sh%" node="%sh) parents' on
-- else tryError $ sformat ("Trying to use unrecognized node inside a keyed function:"%sh%" its parents are "%sh) on l
_mainTransformReturn :: CDag FPostNode -> Try ComputeGraph
-- TODO: this is pretty heavy and completely reconstructs the graph from
-- scratch using indexed edges. There should be a function that does
-- add-nodes-and-swap
_mainTransformReturn cg = tryEither $
buildGraphFromList' vertices' (iedges2 ++ edges1) inputs outputs where
-- Find all the pairs of pre-nodes and nodes.
-- For these, we need to a) insert an edge between the two, and b) swap the
-- incoming edges.
-- The pairs of nodes that will need to be modified: (preNode, node)
withPre = mapMaybe f (V.toList (cdVertices cg)) where
f (Vertex _ fpn) = (,fpnNode fpn) <$> fpnPreNode fpn
vertices1 = nodeAsVertex . fst <$> withPre
vertices2 = fmap fpnNode <$> V.toList (cdVertices cg)
vertices' = vertices2 ++ vertices1
-- We should not have changed inputs and outputs, so no need to perform
-- swapping here.
inputs = vertexId <$> V.toList (cdInputs cg)
outputs = vertexId <$> V.toList (cdOutputs cg)
iedges = gIndexedEdges (computeGraphToGraph cg)
numEdges = length iedges
edges1 = f <$> zip [numEdges..] withPre where
f (idx, (pre, n)) = IndexedEdge {
iedgeFromIndex = idx,
iedgeFrom = edgeFrom e,
iedgeToIndex = idx,
iedgeTo = edgeTo e,
iedgeData = edgeData e
} where e = makeParentEdge (onPath pre) (onPath n)
-- The new vertices
-- TODO: I always get confused which one is the first or the last, so just
-- putting the two there. It should not be a problem because we are adding
-- a new node.
swapDct = M.fromList $ concatMap f edges1 where
f v = [(iedgeFrom v, iedgeTo v), (iedgeTo v, iedgeFrom v)]
-- For all the edges that were coming to the node, point them to the
-- prenode instead.
iedges2 = f <$> iedges where
f ie = ie1 where
ie1 = case iedgeFrom ie `M.lookup` swapDct of
Just vid -> ie { iedgeFrom = vid }
Nothing -> ie
-- ie2 = case iedgeTo ie1 `M.lookup` swapDct of
-- Just vid -> ie1 { iedgeTo = vid }
-- Nothing -> ie1
-- Given the current stack, transforms the current operation.
-- Most of the actual work is delegated to subfunctions.
-- This function does not leave the graph in a proper state: some operations
-- such as filter require structural transforms before and after the filter
-- itself.
-- TODO: rename to _flatteningMain
f1 ::
FStack -> -- The stack of the parents
[DataType] -> -- The datatype of the parents (including the key)
FNodeType -> -- The current node type
OperatorNode -> -- The op node of the current node
Try FPostNode -- The result of the operation.
-- Enters: they should have only one parent.
f1 [] [dt] FEnter on = _performEnter0 dt on
f1 (h:t) [dt] FEnter on = _performEnter (h:|t) dt (onPath on)
f1 _ _ FEnter on = tryError $ "_flatteningMain: wrong FEnter on "<>show' on
-- Exits: should have one parent and be inside a stack.
f1 [] _ FExit on = tryError $ sformat ("Trying to exit a functional group, but there no group to exit from. node:"%sh) on
f1 (h:t) [dt] FExit on = _performExit (h:|t) dt (onPath on) (onLocality on)
f1 _ _ FExit on = tryError $ "_flatteningMain: wrong FExit on "<>show' on
-- Any node at the top level -> go through
f1 [] _ FUnknown on = pure $ fpostNode on FPUnknown []
f1 [] _ (FDistributedTransform co) on = pure $ fpostNode on (FPDistributedTransform co) []
f1 [] _ (FLocalTransform co) on = pure $ fpostNode on (FPLocalTransform co) []
f1 [] _ (FImperativeAggregate ao) on = pure $ fpostNode on (FPImperativeAggregate ao) []
f1 [] _ (FImperativeShuffle ao) on = pure $ fpostNode on (FPImperativeShuffle ao) []
f1 [] _ FFilter on = pure $ fpostNode on FPFilter []
-- Unknown at a higher level -> error
f1 l _ FUnknown on = tryError $ sformat ("Unknown node found inside stack "%sh%": node="%sh) l on
-- Distribute transform within a group.
f1 (h:t) [dt] (FDistributedTransform co) on = _performDistributedTrans (h:|t) dt (onPath on) (onType on) co
f1 _ _ (FDistributedTransform _) on = tryError $ "_flatteningMain: wrong FDistributedTransform on "<>show' on
f1 (h:t) [dt] (FImperativeAggregate ao) on = _performAggregate (h:|t) dt (onPath on) (onType on) ao
f1 _ _ (FImperativeAggregate _) on = tryError $ "_flatteningMain: wrong FImperativeAggregate on "<>show' on
-- Filters
f1 (h:t) [dt] FFilter on = _performFilter (h:|t) dt (onPath on)
f1 _ _ FFilter on = tryError $ "_flatteningMain: wrong FFilter on "<>show' on
-- Missing
f1 _ _ (FLocalTransform _) on = missing $ "_flatteningMain: FLocalTransform"<>show' on
f1 _ _ (FImperativeShuffle _) on = missing $ "_flatteningMain: FImperativeShuffle"<>show' on
{-| Entering from the root. The input is expected to be
-}
_performEnter0 :: (HasCallStack) => DataType -> OperatorNode -> Try FPostNode
_performEnter0 dt on = do
-- The parent data type should be a struct of the form {key:dt1, value:dt2}
-- Isolate both parts and write a projection operator for it
(keyDt, valueDt) <- _getStartPair dt
let dt' = _keyGroupType (keyDt:|[]) valueDt
-- TODO: for now, just handling distributed nodes at the entrance.
let on' = on {onNodeInfo = coreNodeInfo dt' Distributed no}
return $ fpostNode on' (FPDistributedTransform co) [p]
where
co = _colStruct [
TransformField _key (_colStruct [
TransformField (_keyIdx 1) $ _extraction [_key]
]),
TransformField _group $ _extraction [_value]
]
no = NodeStructuredTransform co
p = onPath on
{-| Entering within a stack. -}
_performEnter :: FStack' -> DataType -> NodePath -> Try FPostNode
_performEnter (h:|t) dt np = do
-- The parent data type should be a struct of the form:
-- {key:{...}, value:{key:keyDT, value:valueDT}}
-- We need to shift the inner key to the other ones and move the value
-- one level up.
(keyDt1:|keyDts, groupDt) <- _getGroupedType dt
(innerKeyDt, valueDt) <- _getStartPair groupDt
let dt' = _keyGroupType (innerKeyDt:|keyDt1:keyDts) valueDt
let on' = OperatorNode nid np $ coreNodeInfo dt' Distributed no
return $ fpostNode on' (FPDistributedTransform co) (np:h:t)
where
-- The initial number of keys, which is how we are going to build
-- the extraction.
numKeys = length (h:t)
-- The original keys, nothing special other than moving them.
fields = f <$> [1..numKeys] where
f idx = TransformField (_keyIdx idx) $ _extraction [_key, _keyIdx idx]
newField = TransformField (_keyIdx (numKeys + 1)) $ _extraction [_value, _key]
co = _colStruct [
TransformField _key (_colStruct (fields ++ [newField])),
TransformField _group $ _extraction [_group, _value]
]
no = NodeStructuredTransform co
nid = error "_performEnter: id not computed"
{-| Exit: the top key is moved from the stack onto the value group.
If this is the last value, we just simplify the key names.
-}
_performExit :: FStack' -> DataType -> NodePath -> Locality -> Try FPostNode
_performExit (_:|t) dt np loc = do
-- Get the current types in the keys
(keyDts, groupDt) <- _getGroupedType dt
case N.tail keyDts of
[] -> do
-- We drop the structure for the keys, there is only one key.
let co = _colStruct [
TransformField _key $ _extraction [_key, _keyIdx 1],
TransformField _value $ _extraction [_group]]
let dt' = structType' (StructField _key (N.head keyDts) :| [StructField _group groupDt])
let no = exitTrans co
let on' = OperatorNode nid np $ coreNodeInfo dt' loc no
return $ fpostNode on' (exitFTrans co) t
(hdt:tdt) -> do
-- We still have some keys that need to be kept around.
-- Move the last key from the key group into the value group.
-- Drop the last key from the key group.
let co = _colStruct [
TransformField _key keyCo,
TransformField _group $ _colStruct [
TransformField _key $ _extraction [_key, _keyIdx (numRemKeys + 1)],
TransformField _value $ _extraction [_group]
]]
let no = exitTrans co
let dt' = _keyGroupType (hdt:|tdt) groupDt
let on' = OperatorNode nid np $ coreNodeInfo dt' loc no
return $ fpostNode on' (exitFTrans co) t
where
(exitFTrans, exitTrans) = case loc of
Distributed -> (FPDistributedTransform, NodeStructuredTransform)
Local -> (FPLocalTransform, NodeLocalStructuredTransform)
nid = error "_performExit: id not computed"
numRemKeys = length t -- The number of remaining keys
keyCo = _colStruct (f <$> [1..numRemKeys]) where
f idx = TransformField (_keyIdx idx) $ _extraction [_key, _keyIdx idx]
_performDistributedTrans ::
FStack' ->
DataType -> -- The start datatype. Must be a group data type.
NodePath -> -- The path of the current node.
DataType -> -- The result data type of the transform.
ColOp -> -- The current op
Try FPostNode
_performDistributedTrans (h:|t) parentDt np dt co = do
(keyDts, _) <- _getGroupedType parentDt
let dt' = _keyGroupType keyDts dt
let on' = makeOp dt'
return $ fpostNode on' (FPDistributedTransform co') (h:t)
where
-- Unlike aggregation, transforms must be wrapped to account for the groups:
-- 1. the extractors must peek inside the group
-- 2. the result must also transform the keyset (which is unchanged)
gCo = _wrapGroup co
co' = _colStruct [
TransformField _key $ _extraction [_key],
TransformField _group gCo]
no = NodeStructuredTransform co'
nid = error "_performDistributedTrans: id not computed"
makeOp dt' = OperatorNode nid np $ coreNodeInfo dt' Distributed no
{-| Performs the aggregate inside a stack.
-}
_performAggregate :: (HasCallStack) =>
FStack' ->
DataType -> -- The start data type. Must be a group data type.
NodePath -> -- The path of the current node
-- The result data type of the aggregation,
-- It is expected to be a structure {key:X, value:Y}
-- (this corresponds to the application of an aggregation outside a stack)
DataType ->
-- The op
AggOp ->
Try FPostNode
_performAggregate (h:|t) parentDt np dt ao = do
(keysDt, _) <- _getGroupedType parentDt
let dt' = _keyGroupType keysDt dt
return $ fpostNode (on' dt') (FPImperativeShuffle ao) (h:t)
where
-- Wrap the aggregation:
-- 1. the extractors need not be rewritten because they already expect
-- the values to be in a sub-field called 'value'
-- 2. the output is itseld inside a group, and this is already taken
-- into account by the backend.
-- ao' = AggStruct . V.fromList $ [AggField _group ao]
no = NodeGroupedReduction ao
nid = error "_performAggregate: id not computed"
on' dt' = OperatorNode nid np $ coreNodeInfo dt' Distributed no
{-|
The filtering. It builds 3 nodes:
- a preprocessing node that moves the keys inside the value and exposes
the filter column
- the filter node
There is no need for post processing since the outcome of the fister is
already in the proper position.
-}
_performFilter ::
FStack' ->
-- The start data type. Must be a group data type of the following structure:
-- {key:{...}, value:{filter:bool, value:XX}}
DataType ->
NodePath -> -- The path of the current node
-- The result data type of the aggregation,
-- It is expected to be a structure {key:X, value:Y}
-- (this corresponds to the application of an aggregation outside a stack)
Try FPostNode
_performFilter (h:|t) parentDt np = do
(keysDt, valueDt) <- _getGroupedType parentDt
(filtDt, valDt) <- extractFields2 "filter" "value" valueDt
let finalType = _keyGroupType keysDt valDt
let preType = structType' $ StructField "filter" filtDt :| [StructField "value" finalType]
return $ FPostNode (filtNode finalType) FPFilter (h:t) (Just (preNode preType))
where
co = _colStruct [
TransformField "filter" $ _extraction [_group, "filter"],
TransformField "value" $ _colStruct [
TransformField _key $ _extraction [_key],
TransformField _group $ _extraction [_group, "value"]
]
]
no = NodeStructuredTransform co
nid = error "_performFilter: id not computed"
npPre = nodePathAppendSuffix np "_kagg_filter"
preNode dt' = OperatorNode nid npPre $ coreNodeInfo dt' Distributed no
filtNode dt' = OperatorNode nid np cni where
so = StandardOperator (nbName filterBuilder) dt' emptyExtra
no' = NodeDistributedOp so
cni = coreNodeInfo dt' Distributed no'
_extraction :: [FieldName] -> ColOp
_extraction = ColExtraction . FieldPath . V.fromList
_colStruct :: [TransformField] -> ColOp
_colStruct = ColStruct . V.fromList
-- Is this stack for the top level?
_isTopLevel :: FStack -> Bool
_isTopLevel [] = True
_isTopLevel _ = False
{-| Checks that all the given stacks are the same.
-}
_currentStackSame :: [FStack] -> Try FStack
_currentStackSame [] = pure [] -- No parent -> root level
_currentStackSame (h:t) = case N.nub (h :| t) of
(s :| []) -> pure s
_ -> tryError $ sformat ("_currentStackSame: Nodes with different stacks cannot be merged: "%sh) (h:t)
{-| The name of the field that holds the keys.
-}
_key :: FieldName
_key = "key"
{-| The name of the field that holds the values. -}
_group :: FieldName
-- TODO: harmonize everywhere to this, it is much simpler to
-- have a single name when dealing with operators that expect a pair.
_group = "value"
-- TODO: remove
_value :: FieldName
_value = "value"
-- | Given a key index, returns the corresponding field name.
_keyIdx :: Int -> FieldName
_keyIdx idx = FieldName $ "key_" <> show' idx
-- Builds an extractor for a given key
_keyExtractor :: Int -> ColOp
_keyExtractor idx = ColExtraction . FieldPath . V.fromList $ [_key, _keyIdx idx]
-- The extractor for the group.
_groupExtractor :: ColOp
_groupExtractor = ColExtraction . FieldPath . V.singleton $ _group
-- The extractor for the entire set of keys.
_keyStructExtractor :: ColOp
_keyStructExtractor = ColExtraction . FieldPath . V.fromList $ [_key]
-- The name of the field that holds values at the start of a functional block.
_startValue :: FieldName
_startValue = "value"
-- Attempts to pattern match this data type into {key: dt1, value: dt2}
-- which is what is expected at the start node.
_getStartPair :: (HasCallStack) => DataType -> Try (DataType, DataType)
_getStartPair (StrictType (Struct (StructType v))) = case V.toList v of
[StructField n1 dt1, StructField n2 dt2] | n1 == _key && n2 == _startValue -> pure (dt1, dt2)
_ -> tryError $ sformat ("_getStartPair: could not find key,value pair from the inner struct "%sh) v
_getStartPair dt = tryError $ sformat ("_getStartPair: could not find key,value pair in the type "%sh) dt
-- Attempts to extract the types of the keys and the type of the value.
_getGroupedType :: (HasCallStack) => DataType -> Try (NonEmpty DataType, DataType)
_getGroupedType (StrictType (Struct (StructType v))) = case V.toList v of
[StructField n1 dt1, StructField n2 dt2] | n1 == _key && n2 == _group -> (,dt2) <$> l1 where
-- It does not check the names of the fields, they are assumed to be in order
-- with the uppermost the first and the deepest the last.
l1 = case dt1 of
(StrictType (Struct (StructType v'))) -> case f <$> V.toList v' of
[] -> tryError "_getGroupedType: empty key struct"
(h:t) -> pure (N.reverse (h:|t))
_ -> tryError $ sformat ("_getGroupedType: expected struct for keys but got "%sh) dt1
f (StructField _ dt') = dt'
_ -> tryError $ sformat ("_getGroupedType: could not find key,value pair from the inner struct "%sh) v
_getGroupedType dt = tryError $ sformat ("_getGroupedType: could not find key,value pair in the type "%sh) dt
{-| builds the data type of a keyed group.
The deepest key is the head.
Second arg is the group type.
-}
_keyGroupType :: NonEmpty DataType -> DataType -> DataType
_keyGroupType (h:|t) groupDt = structType' (df1:|[df2]) where
-- Accomodate for the fact that the deepest key is the head.
indexedKeys = N.reverse (N.zip (1:|[2..]) (N.reverse (h:|t)))
keyDt = structType' $ f <$> indexedKeys where
f (idx,dt) = StructField (_keyIdx idx) dt
df1 = StructField _key keyDt
df2 = StructField _group groupDt
-- Given a list of stacks (coming from all the parents), tries to
-- find the deepest stack. All the other ones are supposed to a prefix
-- of the deepest.
_currentStack :: [FStack] -> Try FStack
_currentStack [] = pure []
_currentStack l = do
head' <- headt
rest <- restt
return $ head' : rest
where
groups = concatMap f l where
f (h : t) = [(h, t)]
f _ = []
headt = checkTop (fst <$> groups)
restt = _currentStack (snd <$> groups)
checkTop l' = case nub l' of
[] -> tryError "_currentStack: empty"
[x] -> pure x
l'' -> tryError $ sformat ("_currentStack: one of the node paths is not a proper subset of the other: "%sh%" list of paths:"%sh) l'' l
_extractGroupType :: DataType -> Try (DataType, DataType)
_extractGroupType dt = do
l <- extractFields [_key, _value] dt
case l of
[keyDt, valueDt] -> pure (keyDt, valueDt)
_ -> tryError $ sformat ("_extractGroupType: expected a structure with 2 field, but got "%sh) dt
{-| Takes a col and makes sure that the extraction patterns are wrapped inside
the group instead of directly accessing the field path.
-}
_wrapGroup :: ColOp -> ColOp
-- TODO: make this function pure.
_wrapGroup (ColBroadcast _) = error "_wrapGroup: ColBroadcast encountered. It should have been removed already"
_wrapGroup (ColExtraction fp) = ColExtraction (_wrapFieldPath fp)
_wrapGroup (ColFunction sn v t) = ColFunction sn (_wrapGroup <$> v) t
_wrapGroup (x @ ColLit{}) = x
_wrapGroup (ColStruct v) = ColStruct (f <$> v) where
f (TransformField fn v') = TransformField fn (_wrapGroup v')
{-| Takes an agg and wraps the extraction patterns so that it accesses inside
the group instead of the top-level field path.
-}
_wrapAgg :: AggOp -> AggOp
_wrapAgg (AggUdaf ua ucn fp) = AggUdaf ua ucn (_wrapFieldPath fp)
_wrapAgg (AggFunction sfn fp t) = AggFunction sfn (_wrapFieldPath fp) t
_wrapAgg (AggStruct v) = AggStruct (f <$> v) where
f (AggField fn v') = AggField fn (_wrapAgg v')
_wrapFieldPath :: FieldPath -> FieldPath
_wrapFieldPath (FieldPath v) = FieldPath v' where
v' = V.fromList (_value : V.toList v)
|
tjhunter/karps
|
haskell/src/Spark/Core/Internal/StructuredFlattening.hs
|
apache-2.0
| 32,147 | 13 | 25 | 6,650 | 6,904 | 3,630 | 3,274 | -1 | -1 |
-- |
-- Module : ApiClient
-- Copyright : (c) 2017 Alain O'Dea
-- License : Apache Public License, v. 2.0.
-- Maintainer : Alain O'Dea <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Tenable SecurityCenter REST API - API Client for examples
--
-- Configuration parsing and API Client for running examples.
{-# LANGUAGE OverloadedStrings #-}
module ApiClient where
import Network.Tenable.SecurityCenter.Client (runRequest)
import Network.Tenable.SecurityCenter.Token
( CreateTokenRequest(..)
, CreateTokenResponse(..)
, DeleteTokenRequest(..)
)
import Network.Tenable.SecurityCenter.Types
( Token
, ApiResponse
, Endpoint(..)
, httpMethod
)
import Data.Aeson (eitherDecode)
import Data.Aeson.Types
( FromJSON(parseJSON)
, ToJSON
, Value(Object)
, typeMismatch
, (.:)
)
import qualified Data.ByteString.Lazy.Char8 as L8
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Network.HTTP.Conduit
( Manager
, CookieJar
, createCookieJar
, newManager
, tlsManagerSettings
)
import qualified Data.Time.Clock as Clock
import Data.Time.ISO8601 (formatISO8601Millis)
import qualified Data.UUID as U
import qualified Data.UUID.V4 as U
instance FromJSON Config where
parseJSON (Object v) = Config <$>
v .: "host" <*>
v .: "username" <*>
v .: "password"
parseJSON invalid = typeMismatch "Config" invalid
data Config = Config
{ securityCenterHost :: T.Text
, securityCenterUsername :: T.Text
, securityCenterPassword :: T.Text
}
data ApiClient = ApiClient
{ apiClientManager :: Manager
, apiClientHostname :: T.Text
, apiClientSession :: CookieJar
, apiClientToken :: Token
}
createApiClient :: FilePath
-> IO ApiClient
createApiClient configFilename = do
configFile <- L8.readFile configFilename
let config = either error id $ eitherDecode configFile
let hostname = securityCenterHost $ config
let u = securityCenterUsername config
let p = securityCenterPassword config
manager <- newManager tlsManagerSettings
(t, session) <- getToken manager hostname u p
return $ ApiClient manager hostname session t
getToken :: Manager
-> T.Text
-> T.Text
-> T.Text
-> IO (Token, CookieJar)
getToken manager hostname u p = do
let unauthSession = createCookieJar []
let req = CreateTokenRequest u p
(res, authSession) <- runRequest manager hostname unauthSession req Nothing
let Just t = fmap createTokenResponseToken res
return (t, authSession)
runApiRequest :: (Endpoint a, ToJSON a, FromJSON b)
=> ApiClient
-> a
-> IO (Maybe b, CookieJar)
runApiRequest apiClient req = do
reqId <- U.nextRandom
logSendApi reqId req
let manager = apiClientManager apiClient
let hostname = apiClientHostname apiClient
let session = apiClientSession apiClient
res <- runRequest manager hostname session req $
Just $ apiClientToken apiClient
logSuccessApi reqId req
return res
logSendApi :: Endpoint a
=> U.UUID
-> a
-> IO ()
logSendApi = logApi "Sending..."
logSuccessApi :: Endpoint a
=> U.UUID
-> a
-> IO ()
logSuccessApi = logApi "Success"
logApi :: Endpoint a
=> T.Text
-> U.UUID
-> a
-> IO ()
logApi msg reqId req = do
currentTime <- Clock.getCurrentTime
T.putStrLn $ T.concat
[ T.pack $ formatISO8601Millis currentTime
, ":"
, U.toText reqId
, ":"
, msg
, ":"
, httpMethod $ endpointRequestMethod req
, ":"
, endpointRequestPath req
]
endSession :: ApiClient
-> IO (Maybe (ApiResponse Value), CookieJar)
endSession apiClient = do
let req = DeleteTokenRequest
runApiRequest apiClient req
|
AlainODea-haskell/tenable-securitycenter
|
app/ApiClient.hs
|
apache-2.0
| 4,074 | 0 | 11 | 1,132 | 1,001 | 536 | 465 | 115 | 1 |
---------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent.STM.THash.Internal
-- Copyright : (C) 2006 Edward Kmett
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable (requires STM)
--
-- "Control.Concurrent.STM.THash" Internals. Unless you really want to do
-- the plumbing yourself you probably want to use that instead.
--
-- There is a nearby point in the design space that generates a traditional
-- sorted linear hash table which will output keys and values in the same
-- order as long as both hashes have the same set of keys, regardless of
-- insertion order. To get there we would need to maintain the linked lists
-- in sorted order.
----------------------------------------------------------------------------
module Control.Concurrent.STM.THash.Internal (
THT, -- Eq key => THT key value
new, -- (k -> Int) -> STM (THT k v)
fromList,-- Eq k => (k -> Int) -> [(k,v)] -> STM (THT k v)
insert, -- Eq k => THT k v -> k -> v -> STM (THT k v, Bool)
update, -- Eq k => THT k v -> k -> v -> STM (THT k v)
modify, -- Eq k => THT k v -> k -> (Maybe v -> v) -> STM (THT k v)
delete, -- Eq k => THT k v -> k -> STM (THT k v, Bool)
lookup, -- Eq k => THT k v -> k -> STM (Maybe v)
mapH, --((k,v) -> r) -> THT k v -> STM [r]
each, -- THT k v -> STM [(k,v)]
keys, -- THT k v -> STM [k]
values -- THT k v -> STM [v]
) where
import Prelude
( Show(..), Ord(..), Eq, Bool(..), Maybe(..)
, Num, Int
, (*), (+), (-), ($), (==), (++), (.), (/=)
, mapM_, sequence_, sequence, return, mod, fst, snd, id
, otherwise
)
import Data.Bits
import Data.Array
import Control.Monad (liftM, replicateM,when)
import Control.Concurrent.STM
import Control.Concurrent.STM.TVar
import qualified Data.List as List (partition, lookup, length, concatMap, map)
import Foreign (unsafePerformIO)
data THT k v = MkTHT
{ slots :: Array Int (TVar [(k, v)])
, mask :: !Int
, count :: !Int
, hash :: k -> Int
}
{-# INLINE stride #-}
stride :: THT k v -> Int
stride this = (mask this + 1) `shiftR` 1
{-# INLINE capacity #-}
capacity :: THT k v -> Int
capacity this = let (l,u) = bounds $ slots this in u - l + 1
{-# INLINE new #-}
new :: (k -> Int) -> STM (THT k v)
new hash = do
slots <- replicateM 4 $ newTVar []
return MkTHT
{ slots = listArray (0,3) slots
, mask = 0
, count = 0
, hash = hash
}
{-# INLINE fromList #-}
fromList :: Eq k => (k -> Int) -> [(k,v)] -> STM(THT k v)
fromList hash list = do
slots <- replicateM capacity $ newTVar []
let this = MkTHT
{ slots = listArray (0,mask) slots
, count = count
, mask = mask
, hash = hash
}
put (key,value) = do
let loc = chain this key
list <- readTVar loc
writeTVar loc $ (key,value):list
mapM_ put list
return this
where
count = List.length list
pow2 m | count < m = m
| otherwise = pow2 (m+m)
capacity = 2*pow2 1
mask = capacity - 1
-- TODO fix from here down
{-# INLINE addBucket #-}
addBucket :: THT k v -> STM (THT k v)
addBucket this = do
slots' <- maybeGrow $ slots this
addBucket' this slots'
where
maybeGrow | count this == capacity this = grow
| otherwise = return
{-# INLINE addBucket' #-}
addBucket' :: THT k v -> Array Int (TVar [(k, v)]) -> STM (THT k v)
addBucket' this slots' = do
when (count this > 0) $ do
ll <- readTVar left
let (ol,nl) = List.partition (\(k,v) -> old == locate this' k) ll
writeTVar left ol
writeTVar right nl
return this'
where
count' = count this + 1
mask' = mask this .|. count'
this' = this { count = count', mask = mask', slots = slots' }
new = count this -- intentionally using previous count
old = new - stride this -- intentionally using previous count and stride
left = slots' ! old
right = slots' ! new
{-# INLINE removeBucket #-}
removeBucket :: THT k v -> STM (THT k v)
removeBucket this = do
list1 <- readTVar right
list2 <- readTVar left
writeTVar left (list1 ++ list2)
writeTVar right []
return this'
where
count' = count this - 1
mask' = if count' < stride this
then mask this - stride this
else mask this
stride' = (mask' + 1) `shiftR` 2
this' = this { count = count', mask = mask' }
left = slots this' ! (count this' - stride this')
right = slots this' ! count this'
{-# INLINE lookup #-}
lookup :: Eq k => THT k v -> k -> STM (Maybe v)
lookup this key = if (count this == 0) then return Nothing else do
list <- readTVar $ chain this key
return $ List.lookup key list
{-# INLINE insert #-}
insert :: Eq k => THT k v -> k -> v -> STM (THT k v, Bool)
insert this key value = do
list <- readTVar $ chain this key
case List.lookup key list of
Just _ -> return (this,False)
(Nothing) -> do
this' <- addBucket this
let tvar = chain this' key
list' <- readTVar tvar
writeTVar tvar $ (key,value):list'
return (this',True)
{-# INLINE update #-}
update :: Eq k => THT k v -> k -> v -> STM (THT k v)
update this key value = modify this key $ \_ -> value
{-# INLINE modify #-}
modify :: Eq k => THT k v -> k -> (Maybe v -> v) -> STM (THT k v)
modify this key f = do
list <- readTVar old
case List.lookup key list of
Just value -> do
writeTVar old $ List.map fixup list
return this
Nothing -> liftM fst . insert this key . f $ Nothing
where
old = chain this key
fixup (k,v) | k == key = (k, f $ Just v)
| otherwise = (k, v)
{-# INLINE delete #-}
delete :: Eq k => THT k v -> k -> STM (THT k v, Bool)
delete this key = do
let tvar = chain this key
list <- readTVar $ tvar
case strip key list of
(list', True) -> do
writeTVar tvar list'
this' <- removeBucket this
return (this', True)
(_, False) -> return (this, False)
{-# INLINE strip #-}
strip :: Eq k => k -> [(k,v)] -> ([(k,v)],Bool)
strip key list = strip' key list []
strip' :: Eq k => k -> [(k,v)] -> [(k,v)] -> ([(k,v)],Bool)
strip' key ((key',val):tail) head
| key == key' = (head ++ tail, True) -- delete
| otherwise = strip' key tail $ (key',val):head
strip' _ [] head = (head, False)
{-# INLINE grow #-}
-- replace a numerically indexed array of TVars of lists with one twice its size.
grow :: (Ix i, Num i) => Array i (TVar [t]) -> STM (Array i (TVar [t]))
grow a = do
top <- replicateM n $ newTVar []
return $ listArray (l,h') $ elems a ++ top
where
(l,h) = bounds a
l' = h + 1
h' = l' + h - l
n = rangeSize(l',h')
{-# INLINE bin #-}
-- figure out what bin a given hashed value is mapped to.
bin :: THT k v -> Int -> Int
bin this val =
if residue >= count this
then residue - stride this
else residue
where
residue = val .&. mask this
{-# INLINE locate #-}
-- translate a key to its current bin
locate :: THT k v -> k -> Int
locate this key = bin this $ hash this key
{-# INLINE chain #-}
-- translate a key to a tvar in which we might find it
chain :: THT k v -> k -> TVar [(k,v)]
chain this key = slots this ! locate this key
{-# INLINE mapH #-}
mapH :: ((k,v) -> r) -> THT k v -> STM [r]
mapH f this = do
lists <- sequence [ readTVar $ slots this ! i | i <- [0..count this] ]
return $ List.concatMap (List.map f) lists
{-# INLINE each #-}
each :: THT k v -> STM [(k,v)]
each = mapH id
{-# INLINE keys #-}
keys :: THT k v -> STM [k]
keys = mapH fst
{-# INLINE values #-}
values :: THT k v -> STM [v]
values = mapH snd
|
ekmett/stm-hash
|
src/Control/Concurrent/STM/THash/Internal.hs
|
bsd-2-clause
| 8,209 | 10 | 17 | 2,552 | 2,779 | 1,463 | 1,316 | 196 | 2 |
{-| Implementation of the generic daemon functionality.
-}
{-# LANGUAGE CPP #-}
{-
Copyright (C) 2011, 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Daemon
( DaemonOptions(..)
, OptType
, CheckFn
, PrepFn
, MainFn
, defaultOptions
, oShowHelp
, oShowVer
, oNoDaemonize
, oNoUserChecks
, oDebug
, oPort
, oBindAddress
, oSyslogUsage
, oForceNode
, oNoVoting
, oYesDoIt
, parseArgs
, parseAddress
, cleanupSocket
, describeError
, genericMain
, getFQDN
) where
import Control.Concurrent
import Control.Exception
import Control.Monad
import Control.Monad.Fail (MonadFail)
import Data.Maybe (fromMaybe, listToMaybe)
import Text.Printf
import Data.Word
import GHC.IO.Handle (hDuplicateTo)
import Network.BSD (getHostName)
import qualified Network.Socket as Socket
import Network.Socket
import System.Console.GetOpt
import System.Directory
import System.Exit
import System.Environment
import System.IO
import System.IO.Error (isDoesNotExistError, modifyIOError, annotateIOError)
import System.Posix.Directory
import System.Posix.Files
import System.Posix.IO
import System.Posix.Process
import System.Posix.Types
import System.Posix.Signals
import Ganeti.Common as Common
import Ganeti.Logging
import Ganeti.Runtime
import Ganeti.BasicTypes
import Ganeti.Utils
import qualified Ganeti.Constants as C
import qualified Ganeti.Ssconf as Ssconf
-- * Constants
-- | \/dev\/null path.
devNull :: FilePath
devNull = "/dev/null"
-- | Error message prefix, used in two separate paths (when forking
-- and when not).
daemonStartupErr :: String -> String
daemonStartupErr = ("Error when starting the daemon process: " ++)
-- * Data types
-- | Command line options structure.
data DaemonOptions = DaemonOptions
{ optShowHelp :: Bool -- ^ Just show the help
, optShowVer :: Bool -- ^ Just show the program version
, optShowComp :: Bool -- ^ Just show the completion info
, optDaemonize :: Bool -- ^ Whether to daemonize or not
, optPort :: Maybe Word16 -- ^ Override for the network port
, optDebug :: Bool -- ^ Enable debug messages
, optNoUserChecks :: Bool -- ^ Ignore user checks
, optBindAddress :: Maybe String -- ^ Listen on a custom address
, optSyslogUsage :: Maybe SyslogUsage -- ^ Override for Syslog usage
, optForceNode :: Bool -- ^ Ignore node checks
, optNoVoting :: Bool -- ^ skip voting for master
, optYesDoIt :: Bool -- ^ force dangerous options
}
-- | Default values for the command line options.
defaultOptions :: DaemonOptions
defaultOptions = DaemonOptions
{ optShowHelp = False
, optShowVer = False
, optShowComp = False
, optDaemonize = True
, optPort = Nothing
, optDebug = False
, optNoUserChecks = False
, optBindAddress = Nothing
, optSyslogUsage = Nothing
, optForceNode = False
, optNoVoting = False
, optYesDoIt = False
}
instance StandardOptions DaemonOptions where
helpRequested = optShowHelp
verRequested = optShowVer
compRequested = optShowComp
requestHelp o = o { optShowHelp = True }
requestVer o = o { optShowVer = True }
requestComp o = o { optShowComp = True }
-- | Abrreviation for the option type.
type OptType = GenericOptType DaemonOptions
-- | Check function type.
type CheckFn a = DaemonOptions -> IO (Either ExitCode a)
-- | Prepare function type.
type PrepFn a b = DaemonOptions -> a -> IO b
-- | Main execution function type.
type MainFn a b = DaemonOptions -> a -> b -> IO ()
-- * Command line options
oNoDaemonize :: OptType
oNoDaemonize =
(Option "f" ["foreground"]
(NoArg (\ opts -> Ok opts { optDaemonize = False }))
"Don't detach from the current terminal",
OptComplNone)
oDebug :: OptType
oDebug =
(Option "d" ["debug"]
(NoArg (\ opts -> Ok opts { optDebug = True }))
"Enable debug messages",
OptComplNone)
oNoUserChecks :: OptType
oNoUserChecks =
(Option "" ["no-user-checks"]
(NoArg (\ opts -> Ok opts { optNoUserChecks = True }))
"Ignore user checks",
OptComplNone)
oPort :: Int -> OptType
oPort def =
(Option "p" ["port"]
(reqWithConversion (tryRead "reading port")
(\port opts -> Ok opts { optPort = Just port }) "PORT")
("Network port (default: " ++ show def ++ ")"),
OptComplInteger)
oBindAddress :: OptType
oBindAddress =
(Option "b" ["bind"]
(ReqArg (\addr opts -> Ok opts { optBindAddress = Just addr })
"ADDR")
("Bind address (default is 'any' on either IPv4 or IPv6, " ++
"depending on cluster configuration)"),
OptComplInetAddr)
oSyslogUsage :: OptType
oSyslogUsage =
(Option "" ["syslog"]
(reqWithConversion syslogUsageFromRaw
(\su opts -> Ok opts { optSyslogUsage = Just su })
"SYSLOG")
("Enable logging to syslog (except debug messages); " ++
"one of 'no', 'yes' or 'only' [" ++ C.syslogUsage ++ "]"),
OptComplChoices ["yes", "no", "only"])
oForceNode :: OptType
oForceNode =
(Option "" ["force-node"]
(NoArg (\ opts -> Ok opts { optForceNode = True }))
"Force the daemon to run on a different node than the master",
OptComplNone)
oNoVoting :: OptType
oNoVoting =
(Option "" ["no-voting"]
(NoArg (\ opts -> Ok opts { optNoVoting = True }))
"Skip node agreement check (dangerous)",
OptComplNone)
oYesDoIt :: OptType
oYesDoIt =
(Option "" ["yes-do-it"]
(NoArg (\ opts -> Ok opts { optYesDoIt = True }))
"Force a dangerous operation",
OptComplNone)
-- | Generic options.
genericOpts :: [OptType]
genericOpts = [ oShowHelp
, oShowVer
, oShowComp
]
-- | Annotates and transforms IOErrors into a Result type. This can be
-- used in the error handler argument to 'catch', for example.
ioErrorToResult :: String -> IOError -> IO (Result a)
ioErrorToResult description exc =
return . Bad $ description ++ ": " ++ show exc
-- | Small wrapper over getArgs and 'parseOpts'.
parseArgs :: String -> [OptType] -> IO (DaemonOptions, [String])
parseArgs cmd options = do
cmd_args <- getArgs
parseOpts defaultOptions cmd_args cmd (options ++ genericOpts) []
-- * Daemon-related functions
-- | PID file mode.
pidFileMode :: FileMode
pidFileMode = unionFileModes ownerReadMode ownerWriteMode
-- | PID file open flags.
pidFileFlags :: OpenFileFlags
pidFileFlags = defaultFileFlags { noctty = True, trunc = False }
-- | Writes a PID file and locks it.
writePidFile :: FilePath -> IO Fd
writePidFile path = do
fd <- openFd path ReadWrite (Just pidFileMode) pidFileFlags
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
my_pid <- getProcessID
_ <- fdWrite fd (show my_pid ++ "\n")
return fd
-- | Helper function to ensure a socket doesn't exist. Should only be
-- called once we have locked the pid file successfully.
cleanupSocket :: FilePath -> IO ()
cleanupSocket socketPath =
catchJust (guard . isDoesNotExistError) (removeLink socketPath)
(const $ return ())
-- | Sets up a daemon's environment.
setupDaemonEnv :: FilePath -> FileMode -> IO ()
setupDaemonEnv cwd umask = do
changeWorkingDirectory cwd
_ <- setFileCreationMask umask
_ <- createSession
return ()
-- | Cleanup function, performing all the operations that need to be done prior
-- to shutting down a daemon.
finalCleanup :: FilePath -> IO ()
finalCleanup = removeFile
-- | Signal handler for the termination signal.
handleSigTerm :: ThreadId -> IO ()
handleSigTerm mainTID =
-- Throw termination exception to the main thread, so that the daemon is
-- actually stopped in the proper way, executing all the functions waiting on
-- "finally" statement.
Control.Exception.throwTo mainTID ExitSuccess
-- | Signal handler for reopening log files.
handleSigHup :: FilePath -> IO ()
handleSigHup path = do
setupDaemonFDs (Just path)
logInfo "Reopening log files after receiving SIGHUP"
-- | Sets up a daemon's standard file descriptors.
setupDaemonFDs :: Maybe FilePath -> IO ()
setupDaemonFDs logfile = do
null_in_handle <- openFile devNull ReadMode
null_out_handle <- openFile (fromMaybe devNull logfile) AppendMode
hDuplicateTo null_in_handle stdin
hDuplicateTo null_out_handle stdout
hDuplicateTo null_out_handle stderr
hClose null_in_handle
hClose null_out_handle
-- | Computes the default bind address for a given family.
defaultBindAddr :: Int -- ^ The port we want
-> Result Socket.Family -- ^ The cluster IP family
-> IO (Result (Socket.Family, Socket.SockAddr))
#if MIN_VERSION_network(2,7,0)
defaultBindAddr _ (Bad m) = return (Bad m)
defaultBindAddr port (Ok fam) = do
addrs <- getAddrInfo (Just defaultHints { addrFamily = fam
, addrFlags = [AI_PASSIVE]
, addrSocketType = Stream
}) Nothing (Just (show port))
return $ case addrs of
a:_ -> Ok $ (fam, addrAddress a)
[] -> Bad $ "Cannot resolve default listening addres?!"
#else
defaultBindAddr port (Ok Socket.AF_INET) =
return $ Ok (Socket.AF_INET,
Socket.SockAddrInet (fromIntegral port) Socket.iNADDR_ANY)
defaultBindAddr port (Ok Socket.AF_INET6) =
return $ Ok (Socket.AF_INET6,
Socket.SockAddrInet6 (fromIntegral port) 0 Socket.iN6ADDR_ANY 0)
defaultBindAddr _ fam =
return $ Bad $ "Unsupported address family: " ++ show fam
#endif
-- | Based on the options, compute the socket address to use for the
-- daemon.
parseAddress :: DaemonOptions -- ^ Command line options
-> Int -- ^ Default port for this daemon
-> IO (Result (Socket.Family, Socket.SockAddr))
parseAddress opts defport = do
let port = maybe defport fromIntegral $ optPort opts
def_family <- Ssconf.getPrimaryIPFamily Nothing
case optBindAddress opts of
Nothing -> defaultBindAddr port def_family
Just saddr -> Control.Exception.catch
(resolveAddr port saddr)
(ioErrorToResult $ "Invalid address " ++ saddr)
-- | Environment variable to override the assumed host name of the
-- current node.
vClusterHostNameEnvVar :: String
vClusterHostNameEnvVar = "GANETI_HOSTNAME"
-- | Get the real full qualified host name.
getFQDN' :: Maybe Socket.AddrInfo -> IO String
getFQDN' hints = do
hostname <- getHostName
addrInfos <- Socket.getAddrInfo hints (Just hostname) Nothing
let address = listToMaybe addrInfos >>= (Just . Socket.addrAddress)
case address of
Just a -> do
fqdn <- liftM fst $ Socket.getNameInfo [] True False a
return (fromMaybe hostname fqdn)
Nothing -> return hostname
-- | Return the full qualified host name, honoring the vcluster setup
-- and hints on the preferred socket type or protocol.
getFQDNwithHints :: Maybe Socket.AddrInfo -> IO String
getFQDNwithHints hints = do
let ioErrorToNothing :: IOError -> IO (Maybe String)
ioErrorToNothing _ = return Nothing
vcluster_node <- Control.Exception.catch
(liftM Just (getEnv vClusterHostNameEnvVar))
ioErrorToNothing
case vcluster_node of
Just node_name -> return node_name
Nothing -> getFQDN' hints
-- | Return the full qualified host name, honoring the vcluster setup.
getFQDN :: IO String
getFQDN = do
familyresult <- Ssconf.getPrimaryIPFamily Nothing
getFQDNwithHints
$ genericResult (const Nothing)
(\family -> Just $ Socket.defaultHints { Socket.addrFamily = family })
familyresult
-- | Returns if the current node is the master node.
isMaster :: IO Bool
isMaster = do
curNode <- getFQDN
masterNode <- Ssconf.getMasterNode Nothing
case masterNode of
Ok n -> return (curNode == n)
Bad _ -> return False
-- | Ensures that the daemon runs on the right node (and exits
-- gracefully if it doesnt)
ensureNode :: GanetiDaemon -> DaemonOptions -> IO ()
ensureNode daemon opts = do
is_master <- isMaster
when (daemonOnlyOnMaster daemon
&& not is_master
&& not (optForceNode opts)) $ do
putStrLn "Not master, exiting."
exitWith (ExitFailure C.exitNotmaster)
-- | Run an I\/O action that might throw an I\/O error, under a
-- handler that will simply annotate and re-throw the exception.
describeError :: String -> Maybe Handle -> Maybe FilePath -> IO a -> IO a
describeError descr hndl fpath =
modifyIOError (\e -> annotateIOError e descr hndl fpath)
-- | Run an I\/O action as a daemon.
--
-- WARNING: this only works in single-threaded mode (either using the
-- single-threaded runtime, or using the multi-threaded one but with
-- only one OS thread, i.e. -N1).
daemonize :: FilePath -> (Maybe Fd -> IO ()) -> IO ()
daemonize logfile action = do
(rpipe, wpipe) <- createPipe
-- first fork
_ <- forkProcess $ do
-- in the child
closeFd rpipe
let wpipe' = Just wpipe
setupDaemonEnv "/" (unionFileModes groupModes otherModes)
setupDaemonFDs (Just logfile) `Control.Exception.catch`
handlePrepErr False wpipe'
-- second fork, launches the actual child code; standard
-- double-fork technique
_ <- forkProcess (action wpipe')
exitImmediately ExitSuccess
closeFd wpipe
hndl <- fdToHandle rpipe
errors <- hGetContents hndl
ecode <- if null errors
then return ExitSuccess
else do
hPutStrLn stderr $ daemonStartupErr errors
return $ ExitFailure C.exitFailure
exitImmediately ecode
-- | Generic daemon startup.
genericMain :: GanetiDaemon -- ^ The daemon we're running
-> [OptType] -- ^ The available options
-> CheckFn a -- ^ Check function
-> PrepFn a b -- ^ Prepare function
-> MainFn a b -- ^ Execution function
-> IO ()
genericMain daemon options check_fn prep_fn exec_fn = do
let progname = daemonName daemon
(opts, args) <- parseArgs progname options
-- Modify handleClient in Ganeti.UDSServer to remove this logging from luxid.
when (optDebug opts && daemon == GanetiLuxid) .
hPutStrLn stderr $
printf C.debugModeConfidentialityWarning (daemonName daemon)
ensureNode daemon opts
exitUnless (null args) "This program doesn't take any arguments"
unless (optNoUserChecks opts) $ do
runtimeEnts <- runResultT getEnts
ents <- exitIfBad "Can't find required user/groups" runtimeEnts
verifyDaemonUser daemon ents
syslog <- case optSyslogUsage opts of
Nothing -> exitIfBad "Invalid cluster syslog setting" $
syslogUsageFromRaw C.syslogUsage
Just v -> return v
log_file <- daemonLogFile daemon
-- run the check function and optionally exit if it returns an exit code
check_result <- check_fn opts
check_result' <- case check_result of
Left code -> exitWith code
Right v -> return v
let processFn = if optDaemonize opts
then daemonize log_file
else \action -> action Nothing
_ <- installHandler lostConnection (Catch (handleSigHup log_file)) Nothing
processFn $ innerMain daemon opts syslog check_result' prep_fn exec_fn
-- | Full prepare function.
--
-- This is executed after daemonization, and sets up both the log
-- files (a generic functionality) and the custom prepare function of
-- the daemon.
fullPrep :: GanetiDaemon -- ^ The daemon we're running
-> DaemonOptions -- ^ The options structure, filled from the cmdline
-> SyslogUsage -- ^ Syslog mode
-> a -- ^ Check results
-> PrepFn a b -- ^ Prepare function
-> IO (FilePath, b)
fullPrep daemon opts syslog check_result prep_fn = do
logfile <- if optDaemonize opts
then return Nothing
else liftM Just $ daemonLogFile daemon
pidfile <- daemonPidFile daemon
let dname = daemonName daemon
setupLogging logfile dname (optDebug opts) True False syslog
_ <- describeError "writing PID file; already locked?"
Nothing (Just pidfile) $ writePidFile pidfile
logNotice $ dname ++ " daemon startup"
prep_res <- prep_fn opts check_result
tid <- myThreadId
_ <- installHandler sigTERM (Catch $ handleSigTerm tid) Nothing
return (pidfile, prep_res)
-- | Inner daemon function.
--
-- This is executed after daemonization.
innerMain :: GanetiDaemon -- ^ The daemon we're running
-> DaemonOptions -- ^ The options structure, filled from the cmdline
-> SyslogUsage -- ^ Syslog mode
-> a -- ^ Check results
-> PrepFn a b -- ^ Prepare function
-> MainFn a b -- ^ Execution function
-> Maybe Fd -- ^ Error reporting function
-> IO ()
innerMain daemon opts syslog check_result prep_fn exec_fn fd = do
(pidFile, prep_result) <- fullPrep daemon opts syslog check_result prep_fn
`Control.Exception.catch` handlePrepErr True fd
-- no error reported, we should now close the fd
maybeCloseFd fd
finally (exec_fn opts check_result prep_result)
(finalCleanup pidFile
>> logNotice (daemonName daemon ++ " daemon shutdown"))
-- | Daemon prepare error handling function.
handlePrepErr :: Bool -> Maybe Fd -> IOError -> IO a
handlePrepErr logging_setup fd err = do
let msg = show err
case fd of
-- explicitly writing to the fd directly, since when forking it's
-- better (safer) than trying to convert this into a full handle
Just fd' -> fdWrite fd' msg >> return ()
Nothing -> hPutStrLn stderr (daemonStartupErr msg)
when logging_setup $ logError msg
exitWith $ ExitFailure 1
-- | Close a file descriptor.
maybeCloseFd :: Maybe Fd -> IO ()
maybeCloseFd Nothing = return ()
maybeCloseFd (Just fd) = closeFd fd
|
ganeti/ganeti
|
src/Ganeti/Daemon.hs
|
bsd-2-clause
| 19,064 | 0 | 15 | 4,380 | 3,906 | 2,039 | 1,867 | 374 | 4 |
{-
Graphs are considered equal (using relaxed (~#==) notation if they have the same vertices
and the same vertex adjacency (each 2 verices have exactly the same number of edges connecting them)
TODO this needs testing
-}
module PolyGraph.ReadOnly.Graph.GraphEquality (
(~#==)
, edgeCountGIsomorphism
) where
import PolyGraph.ReadOnly (GMorphism(..))
import PolyGraph.ReadOnly.Graph (EdgeSemantics(..), Graph)
import PolyGraph.Common (UOPair)
import PolyGraph.Buildable.PolyMorth (morth)
import Data.Hashable (Hashable)
import Instances.EdgeCountMapGraph (EdgeCountMapGraph)
--morth :: forall g0 v0 e0 t0 g1 v1 e1 t1. (GraphDataSet g0 v0 e0 t0, BuildableGraphDataSet g1 v1 e1 t1) =>
-- GMorphism v0 e0 v1 e1 -> g0 -> g1
edgeCountGIsomorphism :: (EdgeSemantics e v) => GMorphism v e v (UOPair v)
edgeCountGIsomorphism = GMorphism {
vTrans = id,
eTrans = resolveEdge
}
(~#==) :: forall g0 g1 v e0 e1 t0 t1 . (Eq v, Hashable v, Graph g0 v e0 t0, Graph g1 v e1 t1) =>
g0 -> g1 -> Bool
g0 ~#== g1 =
let g0edgeCounts :: EdgeCountMapGraph v
g0edgeCounts = morth edgeCountGIsomorphism g0
g1edgeCounts :: EdgeCountMapGraph v
g1edgeCounts = morth edgeCountGIsomorphism g1
in g0edgeCounts == g1edgeCounts
|
rpeszek/GraphPlay
|
src/PolyGraph/ReadOnly/Graph/GraphEquality.hs
|
bsd-3-clause
| 1,363 | 0 | 9 | 347 | 266 | 155 | 111 | -1 | -1 |
-- | This module contains the definitions for creating properties to pass to javascript elements and
-- foreign javascript classes. In addition, it contains definitions for the
-- <https://facebook.github.io/react/docs/events.html React Event System>.
{-# LANGUAGE ViewPatterns, UndecidableInstances #-}
module React.Flux.PropertiesAndEvents (
PropertyOrHandler
-- * Creating Properties
, property
, elementProperty
, nestedProperty
, CallbackFunction
, callback
, foreignClass
, rawJsRendering
-- ** Combinators
, (@=)
, ($=)
, (&=)
, classNames
, classNamesLast
, classNamesAny
-- * Creating Events
, Event(..)
, EventTarget(..)
, eventTargetProp
, target
, preventDefault
, stopPropagation
, capturePhase
, on
-- ** Keyboard
, KeyboardEvent(..)
, onKeyDown
, onKeyPress
, onKeyUp
-- ** Focus
, FocusEvent(..)
, onBlur
, onFocus
-- ** Form
, onChange
, onInput
, onSubmit
-- ** Mouse
, MouseEvent(..)
, onClick
, onContextMenu
, onDoubleClick
, onDrag
, onDragEnd
, onDragEnter
, onDragExit
, onDragLeave
, onDragOver
, onDragStart
, onDrop
, onMouseDown
, onMouseEnter
, onMouseLeave
, onMouseMove
, onMouseOut
, onMouseOver
, onMouseUp
-- ** Touch
, initializeTouchEvents
, Touch(..)
, TouchEvent(..)
, onTouchCancel
, onTouchEnd
, onTouchMove
, onTouchStart
-- ** UI
, onScroll
-- ** Wheel
, WheelEvent(..)
, onWheel
-- ** Image
, onLoad
, onError
) where
import Control.Monad (forM)
import Control.Monad.Writer (runWriter)
import Control.DeepSeq
import System.IO.Unsafe (unsafePerformIO)
import Data.Monoid ((<>))
import qualified Data.Text as T
import GHC.Generics
import React.Flux.Internal
import React.Flux.Views (ViewEventHandler, StatefulViewEventHandler)
import Data.Maybe (fromMaybe)
import GHCJS.Foreign (fromJSBool)
import GHCJS.Marshal (FromJSVal(..))
import GHCJS.Types (JSVal, nullRef, IsJSVal)
import JavaScript.Array as JSA
import qualified Data.JSString.Text as JSS
-- | Some third-party React classes allow passing React elements as properties. This function
-- will first run the given 'ReactElementM' to obtain an element or elements, and then use that
-- element as the value for a property with the given key.
elementProperty :: JSString -> ReactElementM handler () -> PropertyOrHandler handler
elementProperty = ElementProperty
-- | Allows you to create nested object properties. The list of properties passed in will be
-- converted to an object which is then set as the value for a property with the given name. For
-- example,
--
-- >[ nestedProperty "Hello" [ "a" @= (100 :: Int), "b" $= "World" ]
-- >, "c" $= "!!!"
-- >]
--
-- would create a javascript object
--
-- >{"Hello": {a: 100, b: "World"}, "c": "!!!"}
nestedProperty :: JSString -> [PropertyOrHandler handler] -> PropertyOrHandler handler
nestedProperty = NestedProperty
-- | A class which is used to implement <https://wiki.haskell.org/Varargs variable argument functions>.
-- Any function where each argument implements 'FromJSVal' and the result is either
-- 'ViewEventHandler' or 'StatefulViewEventHandler' is an instance of this class.
class CallbackFunction handler a | a -> handler where
applyFromArguments :: JSArray -> Int -> a -> IO handler
instance CallbackFunction ViewEventHandler ViewEventHandler where
applyFromArguments _ _ h = return h
instance {-# OVERLAPPING #-} CallbackFunction (StatefulViewEventHandler s) (StatefulViewEventHandler s) where
applyFromArguments _ _ h = return h
instance {-# OVERLAPPABLE #-} (FromJSVal a, CallbackFunction handler b) => CallbackFunction handler (a -> b) where
applyFromArguments args k f = do
ma <- fromJSVal $ if k >= JSA.length args then nullRef else JSA.index k args
a <- maybe (error "Unable to decode callback argument") return ma
applyFromArguments args (k+1) $ f a
-- | Create a callback property. This is primarily intended for foreign React classes which expect
-- callbacks to be passed to them as properties. For events on DOM elements, you should instead use
-- the handlers below.
--
-- The function @func@ can be any function, as long as each argument to the function is an instance
-- of 'FromJSVal' and the result of the function is @handler@. Internally, 'callback' creates a
-- javascript function which accesses the @arguments@ javascript object and then matches entries in
-- @arguments@ to the parameters of @func@. If @func@ has more parameters than the javascript
-- @arguments@ object, a javascript null is used for the conversion. Since the 'Maybe' instance of
-- 'FromJSVal' converts a null reference to 'Nothing', you can exploit this to create
-- variable-argument javascript callbacks.
--
-- For example, all three of the following functions could be passed as @func@ inside a view.
--
-- >foo :: Int -> Maybe String -> ViewEventHandler
-- >bar :: Aeson.Value -> ViewEventHandler
-- >baz :: ViewEventHandler
--
-- For another example, see the haddock comments in "React.Flux.Addons.Bootstrap".
callback :: CallbackFunction handler func => JSString -> func -> PropertyOrHandler handler
callback name func = CallbackPropertyWithArgumentArray name $ \arr -> applyFromArguments arr 0 func
-- | Create a 'ReactElement' for a class defined in javascript. See
-- 'React.Flux.Combinators.foreign_' for a convenient wrapper and some examples.
foreignClass :: JSVal -- ^ The javascript reference to the class
-> [PropertyOrHandler eventHandler] -- ^ properties and handlers to pass when creating an instance of this class.
-> ReactElementM eventHandler a -- ^ The child element or elements
-> ReactElementM eventHandler a
foreignClass name attrs (ReactElementM child) =
let (a, childEl) = runWriter child
in elementToM a $ ForeignElement (Right $ ReactViewRef name) attrs childEl
-- | Inject arbitrary javascript code into the rendering function. This is very low level and should only
-- be used as a last resort when interacting with complex third-party react classes. For the most part,
-- third-party react classes can be interacted with using 'foreignClass' and the various ways of creating
-- properties.
rawJsRendering :: (JSVal -> JSArray -> IO JSVal)
-- ^ The raw code to inject into the rendering function. The first argument is the 'this' value
-- from the rendering function so points to the react class. The second argument is the result of
-- rendering the children so is an array of react elements. The return value must be a React element.
-> ReactElementM handler () -- ^ the children
-> ReactElementM handler ()
rawJsRendering trans (ReactElementM child) =
let (a, childEl) = runWriter child
trans' thisVal childLst =
ReactElementRef <$> trans thisVal (JSA.fromList $ map reactElementRef childLst)
in elementToM a $ RawJsElement trans' childEl
----------------------------------------------------------------------------------------------------
--- Generic Event
----------------------------------------------------------------------------------------------------
-- | A reference to the object that dispatched the event.
-- <https://developer.mozilla.org/en-US/docs/Web/API/Event/target>
newtype EventTarget = EventTarget JSVal
deriving (Generic)
instance IsJSVal EventTarget
instance NFData EventTarget
instance Show (EventTarget) where
show _ = "EventTarget"
-- | Access a property in an event target
eventTargetProp :: FromJSVal val => EventTarget -> JSString -> val
eventTargetProp (EventTarget ref) key = ref .: key
-- | Every event in React is a synthetic event, a cross-browser wrapper around the native event.
data Event = Event
{ evtType :: T.Text
, evtBubbles :: Bool
, evtCancelable :: Bool
, evtCurrentTarget :: EventTarget
, evtDefaultPrevented :: Bool
, evtPhase :: Int
, evtIsTrusted :: Bool
-- evtNativeEvent
, evtTarget :: EventTarget
, evtTimestamp :: Int
, evtHandlerArg :: HandlerArg
} deriving (Show, Generic)
instance NFData Event
-- | A version of 'eventTargetProp' which accesses the property of 'evtTarget' in the event. This
-- is useful for example:
--
-- >div_ $
-- > input_ [ "type" @= "checked"
-- > , onChange $ \evt -> let val = target evt "value" in ...
-- > ]
--
-- In this case, @val@ would coorespond to the javascript expression @evt.target.value@.
target :: FromJSVal val => Event -> JSString -> val
target e s = eventTargetProp (evtTarget e) s
parseEvent :: HandlerArg -> Event
parseEvent arg@(HandlerArg o) = Event
{ evtType = o .: "type"
, evtBubbles = o .: "bubbles"
, evtCancelable = o .: "cancelable"
, evtCurrentTarget = EventTarget $ js_getProp o "currentTarget"
, evtDefaultPrevented = o .: "defaultPrevented"
, evtPhase = o .: "eventPhase"
, evtIsTrusted = o .: "isTrusted"
, evtTarget = EventTarget $ js_getProp o "target"
, evtTimestamp = o .: "timeStamp"
, evtHandlerArg = arg
}
-- | Use this to create an event handler for an event not covered by the rest of this module.
-- (Events are not covered if they don't have extra arguments that require special handling.)
-- For example, onPlay and onPause are events you could use with @on@.
on :: JSString -> (Event -> handler) -> PropertyOrHandler handler
on name f = CallbackPropertyWithSingleArgument
{ csPropertyName = name
, csFunc = f . parseEvent
}
-- | Construct a handler from a detail parser, used by the various events below.
on2 :: JSString -- ^ The event name
-> (HandlerArg -> detail) -- ^ A function parsing the details for the specific event.
-> (Event -> detail -> handler) -- ^ The function implementing the handler.
-> PropertyOrHandler handler
on2 name parseDetail f = CallbackPropertyWithSingleArgument
{ csPropertyName = name
, csFunc = \raw -> f (parseEvent raw) (parseDetail raw)
}
-- | React re-uses event objects in a pool. To make sure this is OK, we must perform
-- all computation involving the event object before it is returned to React. But the callback
-- registered in the handler will return anytime the Haskell thread blocks, and the Haskell thread
-- will continue asynchronously. If this occurs, the event object is no longer valid. This
-- therefore needs to be called and fully processed in the handler before anything else happens,
-- like sending actions to stores.
--
-- TODO: this requires some more reasoning that would show that it actually works. It may be
-- possible that there still is room for the thread to yield between receiving the handler object
-- and executing the 'unsafePerformIO' in here, even if it is called first thing in the handler
-- body.
--
-- FIXME: A better way may be to make a specialized monad available in the event handlers that
-- allows for these things, but not general IO.
preventDefault :: Event -> ()
preventDefault (evtHandlerArg -> HandlerArg ref) = unsafePerformIO (js_preventDefault ref) `seq` ()
-- | See 'preventDefault'.
stopPropagation :: Event -> ()
stopPropagation (evtHandlerArg -> HandlerArg ref) = unsafePerformIO (js_stopPropagation ref) `seq` ()
foreign import javascript unsafe
"$1['preventDefault']();"
js_preventDefault :: JSVal -> IO ()
foreign import javascript unsafe
"$1['stopPropagation']();"
js_stopPropagation :: JSVal -> IO ()
-- | By default, the handlers below are triggered during the bubbling phase. Use this to switch
-- them to trigger during the capture phase.
capturePhase :: PropertyOrHandler handler -> PropertyOrHandler handler
capturePhase (CallbackPropertyWithSingleArgument n h) = CallbackPropertyWithSingleArgument (n <> "Capture") h
capturePhase _ = error "You must use React.Flux.PropertiesAndEvents.capturePhase on an event handler"
---------------------------------------------------------------------------------------------------
--- Keyboard
---------------------------------------------------------------------------------------------------
-- | The data for the keyboard events
data KeyboardEvent = KeyboardEvent
{ keyEvtAltKey :: Bool
, keyEvtCharCode :: Int
, keyEvtCtrlKey :: Bool
, keyGetModifierState :: T.Text -> Bool
, keyKey :: T.Text
, keyCode :: Int
, keyLocale :: Maybe T.Text
, keyLocation :: Int
, keyMetaKey :: Bool
, keyRepeat :: Bool
, keyShiftKey :: Bool
, keyWhich :: Int
}
deriving (Generic)
instance NFData KeyboardEvent
instance Show KeyboardEvent where
show (KeyboardEvent k1 k2 k3 _ k4 k5 k6 k7 k8 k9 k10 k11) =
show (k1, k2, k3, k4, k5, k6, k7, k8, k9, k10, k11)
parseKeyboardEvent :: HandlerArg -> KeyboardEvent
parseKeyboardEvent (HandlerArg o) = KeyboardEvent
{ keyEvtAltKey = o .: "altKey"
, keyEvtCharCode = o .: "charCode"
, keyEvtCtrlKey = o .: "ctrlKey"
, keyGetModifierState = getModifierState o
, keyKey = o .: "key"
, keyCode = o .: "keyCode"
, keyLocale = o .: "locale"
, keyLocation = o .: "location"
, keyMetaKey = o .: "metaKey"
, keyRepeat = o .: "repeat"
, keyShiftKey = o .: "shiftKey"
, keyWhich = o .: "which"
}
onKeyDown :: (Event -> KeyboardEvent -> handler) -> PropertyOrHandler handler
onKeyDown = on2 "onKeyDown" parseKeyboardEvent
onKeyPress :: (Event -> KeyboardEvent -> handler) -> PropertyOrHandler handler
onKeyPress = on2 "onKeyPress" parseKeyboardEvent
onKeyUp :: (Event -> KeyboardEvent -> handler) -> PropertyOrHandler handler
onKeyUp = on2 "onKeyUp" parseKeyboardEvent
--------------------------------------------------------------------------------
-- Focus Events
--------------------------------------------------------------------------------
data FocusEvent = FocusEvent {
focusRelatedTarget :: EventTarget
} deriving (Show)
parseFocusEvent :: HandlerArg -> FocusEvent
parseFocusEvent (HandlerArg ref) = FocusEvent $ EventTarget $ js_getProp ref "relatedTarget"
onBlur :: (Event -> FocusEvent -> handler) -> PropertyOrHandler handler
onBlur = on2 "onBlur" parseFocusEvent
onFocus :: (Event -> FocusEvent -> handler) -> PropertyOrHandler handler
onFocus = on2 "onFocus" parseFocusEvent
--------------------------------------------------------------------------------
-- Form Events
--------------------------------------------------------------------------------
-- | The onChange event is special in React and should be used for all input change events. For
-- details, see <https://facebook.github.io/react/docs/forms.html>
onChange :: (Event -> handler) -> PropertyOrHandler handler
onChange = on "onChange"
onInput :: (Event -> handler) -> PropertyOrHandler handler
onInput = on "onInput"
onSubmit :: (Event -> handler) -> PropertyOrHandler handler
onSubmit = on "onSubmit"
--------------------------------------------------------------------------------
-- Mouse Events
--------------------------------------------------------------------------------
data MouseEvent = MouseEvent
{ mouseAltKey :: Bool
, mouseButton :: Int
, mouseButtons :: Int
, mouseClientX :: Int
, mouseClientY :: Int
, mouseCtrlKey :: Bool
, mouseGetModifierState :: T.Text -> Bool
, mouseMetaKey :: Bool
, mousePageX :: Int
, mousePageY :: Int
, mouseRelatedTarget :: EventTarget
, mouseScreenX :: Int
, mouseScreenY :: Int
, mouseShiftKey :: Bool
}
deriving (Generic)
instance NFData MouseEvent
instance Show MouseEvent where
show (MouseEvent m1 m2 m3 m4 m5 m6 _ m7 m8 m9 m10 m11 m12 m13)
= show (m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13)
parseMouseEvent :: HandlerArg -> MouseEvent
parseMouseEvent (HandlerArg o) = MouseEvent
{ mouseAltKey = o .: "altKey"
, mouseButton = o .: "button"
, mouseButtons = o .: "buttons"
, mouseClientX = o .: "clientX"
, mouseClientY = o .: "clientY"
, mouseCtrlKey = o .: "ctrlKey"
, mouseGetModifierState = getModifierState o
, mouseMetaKey = o .: "metaKey"
, mousePageX = o .: "pageX"
, mousePageY = o .: "pageY"
, mouseRelatedTarget = EventTarget $ js_getProp o "relatedTarget"
, mouseScreenX = o .: "screenX"
, mouseScreenY = o .: "screenY"
, mouseShiftKey = o .: "shiftKey"
}
onClick :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onClick = on2 "onClick" parseMouseEvent
onContextMenu :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onContextMenu = on2 "onContextMenu" parseMouseEvent
onDoubleClick :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onDoubleClick = on2 "onDoubleClick" parseMouseEvent
onDrag :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onDrag = on2 "onDrag" parseMouseEvent
onDragEnd :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onDragEnd = on2 "onDragEnd" parseMouseEvent
onDragEnter :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onDragEnter = on2 "onDragEnter" parseMouseEvent
onDragExit :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onDragExit = on2 "onDragExit" parseMouseEvent
onDragLeave :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onDragLeave = on2 "onDragLeave" parseMouseEvent
onDragOver :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onDragOver = on2 "onDragOver" parseMouseEvent
onDragStart :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onDragStart = on2 "onDragStart" parseMouseEvent
onDrop :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onDrop = on2 "onDrop" parseMouseEvent
onMouseDown :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onMouseDown = on2 "onMouseDown" parseMouseEvent
onMouseEnter :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onMouseEnter = on2 "onMouseEnter" parseMouseEvent
onMouseLeave :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onMouseLeave = on2 "onMouseLeave" parseMouseEvent
onMouseMove :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onMouseMove = on2 "onMouseMove" parseMouseEvent
onMouseOut :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onMouseOut = on2 "onMouseOut" parseMouseEvent
onMouseOver :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onMouseOver = on2 "onMouseOver" parseMouseEvent
onMouseUp :: (Event -> MouseEvent -> handler) -> PropertyOrHandler handler
onMouseUp = on2 "onMouseUp" parseMouseEvent
--------------------------------------------------------------------------------
-- Touch
--------------------------------------------------------------------------------
-- | Initialize touch events is only needed with React 0.13, in version 0.14 it was removed.
foreign import javascript unsafe
"React['initializeTouchEvents'] ? React['initializeTouchEvents'](true) : null"
initializeTouchEvents :: IO ()
data Touch = Touch {
touchIdentifier :: Int
, touchTarget :: EventTarget
, touchScreenX :: Int
, touchScreenY :: Int
, touchClientX :: Int
, touchClientY :: Int
, touchPageX :: Int
, touchPageY :: Int
} deriving (Show, Generic)
instance NFData Touch
data TouchEvent = TouchEvent {
touchAltKey :: Bool
, changedTouches :: [Touch]
, touchCtrlKey :: Bool
, touchGetModifierState :: T.Text -> Bool
, touchMetaKey :: Bool
, touchShiftKey :: Bool
, touchTargets :: [Touch]
, touches :: [Touch]
}
deriving (Generic)
instance NFData TouchEvent
instance Show TouchEvent where
show (TouchEvent t1 t2 t3 _ t4 t5 t6 t7)
= show (t1, t2, t3, t4, t5, t6, t7)
parseTouch :: JSVal -> Touch
parseTouch o = Touch
{ touchIdentifier = o .: "identifier"
, touchTarget = EventTarget $ js_getProp o "target"
, touchScreenX = o .: "screenX"
, touchScreenY = o .: "screenY"
, touchClientX = o .: "clientX"
, touchClientY = o .: "clientY"
, touchPageX = o .: "pageX"
, touchPageY = o .: "pageY"
}
parseTouchList :: JSVal -> JSString -> [Touch]
parseTouchList obj key = unsafePerformIO $ do
let arr = js_getArrayProp obj key
len = arrayLength arr
forM [0..len-1] $ \idx -> do
let jsref = arrayIndex idx arr
return $ parseTouch jsref
parseTouchEvent :: HandlerArg -> TouchEvent
parseTouchEvent (HandlerArg o) = TouchEvent
{ touchAltKey = o .: "altKey"
, changedTouches = parseTouchList o "changedTouches"
, touchCtrlKey = o .: "ctrlKey"
, touchGetModifierState = getModifierState o
, touchMetaKey = o .: "metaKey"
, touchShiftKey = o .: "shiftKey"
, touchTargets = parseTouchList o "targetTouches"
, touches = parseTouchList o "touches"
}
onTouchCancel :: (Event -> TouchEvent -> handler) -> PropertyOrHandler handler
onTouchCancel = on2 "onTouchCancel" parseTouchEvent
onTouchEnd :: (Event -> TouchEvent -> handler) -> PropertyOrHandler handler
onTouchEnd = on2 "onTouchEnd" parseTouchEvent
onTouchMove :: (Event -> TouchEvent -> handler) -> PropertyOrHandler handler
onTouchMove = on2 "onTouchMove" parseTouchEvent
onTouchStart :: (Event -> TouchEvent -> handler) -> PropertyOrHandler handler
onTouchStart = on2 "onTouchStart" parseTouchEvent
--------------------------------------------------------------------------------
-- UI Events
--------------------------------------------------------------------------------
onScroll :: (Event -> handler) -> PropertyOrHandler handler
onScroll = on "onScroll"
--------------------------------------------------------------------------------
-- Wheel
--------------------------------------------------------------------------------
data WheelEvent = WheelEvent {
wheelDeltaMode :: Int
, wheelDeltaX :: Int
, wheelDeltaY :: Int
, wheelDeltaZ :: Int
} deriving (Show, Generic)
instance NFData WheelEvent
parseWheelEvent :: HandlerArg -> WheelEvent
parseWheelEvent (HandlerArg o) = WheelEvent
{ wheelDeltaMode = o .: "deltaMode"
, wheelDeltaX = o .: "deltaX"
, wheelDeltaY = o .: "deltaY"
, wheelDeltaZ = o .: "deltaZ"
}
onWheel :: (Event -> MouseEvent -> WheelEvent -> handler) -> PropertyOrHandler handler
onWheel f = CallbackPropertyWithSingleArgument
{ csPropertyName = "onWheel"
, csFunc = \raw -> f (parseEvent raw) (parseMouseEvent raw) (parseWheelEvent raw)
}
--------------------------------------------------------------------------------
--- Image
--------------------------------------------------------------------------------
onLoad :: (Event -> handler) -> PropertyOrHandler handler
onLoad = on "onLoad"
onError :: (Event -> handler) -> PropertyOrHandler handler
onError = on "onError"
--------------------------------------------------------------------------------
--- JS Utils
--------------------------------------------------------------------------------
foreign import javascript unsafe
"$1[$2]"
js_getProp :: JSVal -> JSString -> JSVal
foreign import javascript unsafe
"$1[$2]"
js_getArrayProp :: JSVal -> JSString -> JSArray
-- | Access a property from an object. Since event objects are immutable, we can use
-- unsafePerformIO without worry.
(.:) :: FromJSVal b => JSVal -> JSString -> b
obj .: key = fromMaybe (error "Unable to decode event target") $ unsafePerformIO $ -- TODO: get rid of the unsafePerformIO here!
fromJSVal $ js_getProp obj key
foreign import javascript unsafe
"$1['getModifierState']($2)"
js_GetModifierState :: JSVal -> JSString -> JSVal
getModifierState :: JSVal -> T.Text -> Bool
getModifierState ref = fromJSBool . js_GetModifierState ref . JSS.textToJSString
arrayLength :: JSArray -> Int
arrayLength = JSA.length
arrayIndex :: Int -> JSArray -> JSVal
arrayIndex = JSA.index
|
liqula/react-flux
|
src/React/Flux/PropertiesAndEvents.hs
|
bsd-3-clause
| 23,860 | 37 | 15 | 4,405 | 4,491 | 2,533 | 1,958 | -1 | -1 |
type Addr = Integer
type Time = Integer
type Env = Map Name Addr
type Store = Map Addr Val
data Val = LitV Lit | Clo [Name] Call Env
type StateSpace = Maybe (Call, Env, Store, Time)
|
davdar/quals
|
writeup-old/sections/03AAMByExample/02AbstractStateSpace/00Cut.hs
|
bsd-3-clause
| 182 | 0 | 7 | 38 | 77 | 45 | 32 | 6 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
module Main where
import Data.Data
import Data.Typeable
import qualified DockerHub.Build as B
import qualified DockerHub.Config as C
import qualified DockerHub.Data as D
import qualified DockerHub.Pull as P
import System.Console.CmdArgs
import System.Environment (getArgs, withArgs)
-- Main entry point for the program.
-- Takes the path (folder) where the docker-hub.yml file is located and a list
-- of DockerHub repositories, and issues requests to DockerHub to build these
-- repositories.
main = do
args <- getArgs
cmd <- getCmd args
case cmd of
CmdBuild{} -> runBuild cmd
CmdPull{} -> runPull cmd
-- Functions/types for internal use.
-- Run the "build" command.
-- Issues a request to DockerHub to build the requested repositories.
runBuild :: Cmd -> IO ()
runBuild cmd = do
config <- C.load $ cmdConfigFile cmd
let reposToBuild = reposFromConfig config $ cmdRepositories cmd
responses <- B.build reposToBuild
print responses
-- Run the "pull" command.
runPull :: Cmd -> IO ()
runPull cmd = do
config <- C.load $ cmdConfigFile cmd
let tag = cmdTag cmd
let requestedRepos = cmdRepositories cmd
let reposToPull = if (null requestedRepos) then error "You must specify at least one repository to pull."
else reposFromConfig config requestedRepos
if (null reposToPull) then error "None of the specified repositoried have a corresponding configuration."
else do
results <- P.pull tag reposToPull
print results
-- The following command line handling system has roughly been taken from HLint
-- library.
getCmd :: [String] -> IO Cmd
getCmd args = withArgs (map f args) $ cmdArgsRun mode
where f x = if x == "-?" || x == "--help" then "--help=all" else x
data Cmd
= CmdBuild
{ cmdConfigFile :: FilePath
, cmdTag :: String
, cmdRepositories :: [String] }
| CmdPull
{ cmdConfigFile :: FilePath
, cmdTag :: String
, cmdRepositories :: [String] }
deriving (Data, Typeable, Show)
mode = cmdArgsMode $ modes
[ CmdBuild
{ cmdConfigFile = name'' "config-file" "~/.docker/docker-hub.yaml" &= typFile &= help "The configuration file that contains information about the repositories. It defaults to ~/.docker/docker-hub.yaml."
, cmdTag = name'' "tag" "latest" &= typ "TAG" &= help "The docker tag to build. It defaults to 'latest'."
, cmdRepositories = def &= args &= typ "REPOSITORIES"
} &= explicit &= name "build"
, CmdPull
{ cmdConfigFile = name'' "config-file" "~/.docker/docker-hub.yaml" &= typFile &= help "The configuration file that contains information about the repositories. It defaults to ~/.docker/docker-hub.yaml."
, cmdTag = name'' "tag" "latest" &= typ "TAG" &= help "The docker tag to build. It defaults to 'latest'."
, cmdRepositories = def &= args &= typ "REPOSITORIES"
} &= explicit &= name "pull"
] &= program "docker-hub" &=verbosity &= summary "A command line utility that eases triggering builds and pulls of repositories in the DockerHub registry."
where name' optionName = def &= explicit &= name optionName
name'' optionName optionDefault = optionDefault &= explicit &= name optionName
-- Gets a Config value and a list of repository names and returns a list of
-- Repository values (as defined in the Config value) that exist in both the
-- Config value and the list of repository names.
reposFromConfig :: C.Config -> [String] -> [D.Repository]
reposFromConfig config requestedRepos = filter (isRequestedRepo requestedRepos) configRepos
where configRepos = C.repositories config
isRequestedRepo requestedRepos configRepo = (D.name configRepo) `elem` requestedRepos
|
krystalcode/docker-hub
|
app/Main.hs
|
bsd-3-clause
| 3,895 | 0 | 17 | 915 | 796 | 414 | 382 | 64 | 3 |
{-# LANGUAGE CPP, NamedFieldPuns, NondecreasingIndentation #-}
{-# OPTIONS_GHC -fno-cse #-}
-- -fno-cse is needed for GLOBAL_VAR's to behave properly
-----------------------------------------------------------------------------
--
-- GHC Driver
--
-- (c) The University of Glasgow 2005
--
-----------------------------------------------------------------------------
module DriverPipeline (
-- Run a series of compilation steps in a pipeline, for a
-- collection of source files.
oneShot, compileFile,
-- Interfaces for the batch-mode driver
linkBinary,
-- Interfaces for the compilation manager (interpreted/batch-mode)
preprocess,
compileOne, compileOne',
link,
-- Exports for hooks to override runPhase and link
PhasePlus(..), CompPipeline(..), PipeEnv(..), PipeState(..),
phaseOutputFilename, getOutputFilename, getPipeState, getPipeEnv,
hscPostBackendPhase, getLocation, setModLocation, setDynFlags,
runPhase, exeFileName,
mkExtraObjToLinkIntoBinary, mkNoteObjsToLinkIntoBinary,
maybeCreateManifest,
linkingNeeded, checkLinkInfo, writeInterfaceOnlyMode
) where
#include "HsVersions.h"
import PipelineMonad
import Packages
import HeaderInfo
import DriverPhases
import SysTools
import Elf
import HscMain
import Finder
import HscTypes hiding ( Hsc )
import Outputable
import Module
import ErrUtils
import DynFlags
import Config
import Panic
import Util
import StringBuffer ( hGetStringBuffer )
import BasicTypes ( SuccessFlag(..) )
import Maybes ( expectJust )
import SrcLoc
import LlvmCodeGen ( llvmFixupAsm )
import MonadUtils
import Platform
import TcRnTypes
import Hooks
import qualified GHC.LanguageExtensions as LangExt
import Exception
import System.Directory
import System.FilePath
import System.IO
import Control.Monad
import Data.List ( isSuffixOf )
import Data.Maybe
import Data.Version
-- ---------------------------------------------------------------------------
-- Pre-process
-- | Just preprocess a file, put the result in a temp. file (used by the
-- compilation manager during the summary phase).
--
-- We return the augmented DynFlags, because they contain the result
-- of slurping in the OPTIONS pragmas
preprocess :: HscEnv
-> (FilePath, Maybe Phase) -- ^ filename and starting phase
-> IO (DynFlags, FilePath)
preprocess hsc_env (filename, mb_phase) =
ASSERT2(isJust mb_phase || isHaskellSrcFilename filename, text filename)
runPipeline anyHsc hsc_env (filename, fmap RealPhase mb_phase)
Nothing Temporary Nothing{-no ModLocation-} Nothing{-no stub-}
-- ---------------------------------------------------------------------------
-- | Compile
--
-- Compile a single module, under the control of the compilation manager.
--
-- This is the interface between the compilation manager and the
-- compiler proper (hsc), where we deal with tedious details like
-- reading the OPTIONS pragma from the source file, converting the
-- C or assembly that GHC produces into an object file, and compiling
-- FFI stub files.
--
-- NB. No old interface can also mean that the source has changed.
compileOne :: HscEnv
-> ModSummary -- ^ summary for module being compiled
-> Int -- ^ module N ...
-> Int -- ^ ... of M
-> Maybe ModIface -- ^ old interface, if we have one
-> Maybe Linkable -- ^ old linkable, if we have one
-> SourceModified
-> IO HomeModInfo -- ^ the complete HomeModInfo, if successful
compileOne = compileOne' Nothing (Just batchMsg)
compileOne' :: Maybe TcGblEnv
-> Maybe Messager
-> HscEnv
-> ModSummary -- ^ summary for module being compiled
-> Int -- ^ module N ...
-> Int -- ^ ... of M
-> Maybe ModIface -- ^ old interface, if we have one
-> Maybe Linkable -- ^ old linkable, if we have one
-> SourceModified
-> IO HomeModInfo -- ^ the complete HomeModInfo, if successful
compileOne' m_tc_result mHscMessage
hsc_env0 summary mod_index nmods mb_old_iface maybe_old_linkable
source_modified0
= do
debugTraceMsg dflags1 2 (text "compile: input file" <+> text input_fnpp)
(status, hmi0) <- hscIncrementalCompile
always_do_basic_recompilation_check
m_tc_result mHscMessage
hsc_env summary source_modified mb_old_iface (mod_index, nmods)
let flags = hsc_dflags hsc_env0
in do unless (gopt Opt_KeepHiFiles flags) $
addFilesToClean flags [ml_hi_file $ ms_location summary]
unless (gopt Opt_KeepOFiles flags) $
addFilesToClean flags [ml_obj_file $ ms_location summary]
case (status, hsc_lang) of
(HscUpToDate, _) ->
-- TODO recomp014 triggers this assert. What's going on?!
-- ASSERT( isJust maybe_old_linkable || isNoLink (ghcLink dflags) )
return hmi0 { hm_linkable = maybe_old_linkable }
(HscNotGeneratingCode, HscNothing) ->
let mb_linkable = if isHsBootOrSig src_flavour
then Nothing
-- TODO: Questionable.
else Just (LM (ms_hs_date summary) this_mod [])
in return hmi0 { hm_linkable = mb_linkable }
(HscNotGeneratingCode, _) -> panic "compileOne HscNotGeneratingCode"
(_, HscNothing) -> panic "compileOne HscNothing"
(HscUpdateBoot, HscInterpreted) -> do
return hmi0
(HscUpdateBoot, _) -> do
touchObjectFile dflags object_filename
return hmi0
(HscUpdateSig, HscInterpreted) ->
let linkable = LM (ms_hs_date summary) this_mod []
in return hmi0 { hm_linkable = Just linkable }
(HscUpdateSig, _) -> do
output_fn <- getOutputFilename next_phase
Temporary basename dflags next_phase (Just location)
-- #10660: Use the pipeline instead of calling
-- compileEmptyStub directly, so -dynamic-too gets
-- handled properly
_ <- runPipeline StopLn hsc_env
(output_fn,
Just (HscOut src_flavour
mod_name HscUpdateSig))
(Just basename)
Persistent
(Just location)
Nothing
o_time <- getModificationUTCTime object_filename
let linkable = LM o_time this_mod [DotO object_filename]
return hmi0 { hm_linkable = Just linkable }
(HscRecomp cgguts summary, HscInterpreted) -> do
(hasStub, comp_bc) <- hscInteractive hsc_env cgguts summary
stub_o <- case hasStub of
Nothing -> return []
Just stub_c -> do
stub_o <- compileStub hsc_env stub_c
return [DotO stub_o]
let hs_unlinked = [BCOs comp_bc]
unlinked_time = ms_hs_date summary
-- Why do we use the timestamp of the source file here,
-- rather than the current time? This works better in
-- the case where the local clock is out of sync
-- with the filesystem's clock. It's just as accurate:
-- if the source is modified, then the linkable will
-- be out of date.
let linkable = LM unlinked_time (ms_mod summary)
(hs_unlinked ++ stub_o)
return hmi0 { hm_linkable = Just linkable }
(HscRecomp cgguts summary, _) -> do
output_fn <- getOutputFilename next_phase
Temporary basename dflags next_phase (Just location)
-- We're in --make mode: finish the compilation pipeline.
_ <- runPipeline StopLn hsc_env
(output_fn,
Just (HscOut src_flavour mod_name (HscRecomp cgguts summary)))
(Just basename)
Persistent
(Just location)
Nothing
-- The object filename comes from the ModLocation
o_time <- getModificationUTCTime object_filename
let linkable = LM o_time this_mod [DotO object_filename]
return hmi0 { hm_linkable = Just linkable }
where dflags0 = ms_hspp_opts summary
this_mod = ms_mod summary
location = ms_location summary
input_fn = expectJust "compile:hs" (ml_hs_file location)
input_fnpp = ms_hspp_file summary
mod_graph = hsc_mod_graph hsc_env0
needsTH = any (xopt LangExt.TemplateHaskell . ms_hspp_opts) mod_graph
needsQQ = any (xopt LangExt.QuasiQuotes . ms_hspp_opts) mod_graph
needsLinker = needsTH || needsQQ
isDynWay = any (== WayDyn) (ways dflags0)
isProfWay = any (== WayProf) (ways dflags0)
internalInterpreter = not (gopt Opt_ExternalInterpreter dflags0)
src_flavour = ms_hsc_src summary
mod_name = ms_mod_name summary
next_phase = hscPostBackendPhase dflags src_flavour hsc_lang
object_filename = ml_obj_file location
-- #8180 - when using TemplateHaskell, switch on -dynamic-too so
-- the linker can correctly load the object files. This isn't necessary
-- when using -fexternal-interpreter.
dflags1 = if needsLinker && dynamicGhc && internalInterpreter &&
not isDynWay && not isProfWay
then gopt_set dflags0 Opt_BuildDynamicToo
else dflags0
basename = dropExtension input_fn
-- We add the directory in which the .hs files resides) to the import
-- path. This is needed when we try to compile the .hc file later, if it
-- imports a _stub.h file that we created here.
current_dir = takeDirectory basename
old_paths = includePaths dflags1
dflags = dflags1 { includePaths = current_dir : old_paths }
hsc_env = hsc_env0 {hsc_dflags = dflags}
-- Figure out what lang we're generating
hsc_lang = hscTarget dflags
-- -fforce-recomp should also work with --make
force_recomp = gopt Opt_ForceRecomp dflags
source_modified
| force_recomp = SourceModified
| otherwise = source_modified0
always_do_basic_recompilation_check = case hsc_lang of
HscInterpreted -> True
_ -> False
-----------------------------------------------------------------------------
-- stub .h and .c files (for foreign export support)
-- The _stub.c file is derived from the haskell source file, possibly taking
-- into account the -stubdir option.
--
-- The object file created by compiling the _stub.c file is put into a
-- temporary file, which will be later combined with the main .o file
-- (see the MergeStubs phase).
compileStub :: HscEnv -> FilePath -> IO FilePath
compileStub hsc_env stub_c = do
(_, stub_o) <- runPipeline StopLn hsc_env (stub_c,Nothing) Nothing
Temporary Nothing{-no ModLocation-} Nothing
return stub_o
compileEmptyStub :: DynFlags -> HscEnv -> FilePath -> ModLocation -> ModuleName -> IO ()
compileEmptyStub dflags hsc_env basename location mod_name = do
-- To maintain the invariant that every Haskell file
-- compiles to object code, we make an empty (but
-- valid) stub object file for signatures. However,
-- we make sure this object file has a unique symbol,
-- so that ranlib on OS X doesn't complain, see
-- http://ghc.haskell.org/trac/ghc/ticket/12673
-- and https://github.com/haskell/cabal/issues/2257
empty_stub <- newTempName dflags "c"
let src = text "int" <+> ppr (mkModule (thisPackage dflags) mod_name) <+> text "= 0;"
writeFile empty_stub (showSDoc dflags (pprCode CStyle src))
_ <- runPipeline StopLn hsc_env
(empty_stub, Nothing)
(Just basename)
Persistent
(Just location)
Nothing
return ()
-- ---------------------------------------------------------------------------
-- Link
link :: GhcLink -- interactive or batch
-> DynFlags -- dynamic flags
-> Bool -- attempt linking in batch mode?
-> HomePackageTable -- what to link
-> IO SuccessFlag
-- For the moment, in the batch linker, we don't bother to tell doLink
-- which packages to link -- it just tries all that are available.
-- batch_attempt_linking should only be *looked at* in batch mode. It
-- should only be True if the upsweep was successful and someone
-- exports main, i.e., we have good reason to believe that linking
-- will succeed.
link ghcLink dflags
= lookupHook linkHook l dflags ghcLink dflags
where
l LinkInMemory _ _ _
= if cGhcWithInterpreter == "YES"
then -- Not Linking...(demand linker will do the job)
return Succeeded
else panicBadLink LinkInMemory
l NoLink _ _ _
= return Succeeded
l LinkBinary dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
l LinkStaticLib dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
l LinkDynLib dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
panicBadLink :: GhcLink -> a
panicBadLink other = panic ("link: GHC not built to link this way: " ++
show other)
link' :: DynFlags -- dynamic flags
-> Bool -- attempt linking in batch mode?
-> HomePackageTable -- what to link
-> IO SuccessFlag
link' dflags batch_attempt_linking hpt
| batch_attempt_linking
= do
let
staticLink = case ghcLink dflags of
LinkStaticLib -> True
_ -> platformBinariesAreStaticLibs (targetPlatform dflags)
home_mod_infos = eltsHpt hpt
-- the packages we depend on
pkg_deps = concatMap (map fst . dep_pkgs . mi_deps . hm_iface) home_mod_infos
-- the linkables to link
linkables = map (expectJust "link".hm_linkable) home_mod_infos
debugTraceMsg dflags 3 (text "link: linkables are ..." $$ vcat (map ppr linkables))
-- check for the -no-link flag
if isNoLink (ghcLink dflags)
then do debugTraceMsg dflags 3 (text "link(batch): linking omitted (-c flag given).")
return Succeeded
else do
let getOfiles (LM _ _ us) = map nameOfObject (filter isObject us)
obj_files = concatMap getOfiles linkables
exe_file = exeFileName staticLink dflags
linking_needed <- linkingNeeded dflags staticLink linkables pkg_deps
if not (gopt Opt_ForceRecomp dflags) && not linking_needed
then do debugTraceMsg dflags 2 (text exe_file <+> text "is up to date, linking not required.")
return Succeeded
else do
compilationProgressMsg dflags ("Linking " ++ exe_file ++ " ...")
-- Don't showPass in Batch mode; doLink will do that for us.
let link = case ghcLink dflags of
LinkBinary -> linkBinary
LinkStaticLib -> linkStaticLibCheck
LinkDynLib -> linkDynLibCheck
other -> panicBadLink other
link dflags obj_files pkg_deps
debugTraceMsg dflags 3 (text "link: done")
-- linkBinary only returns if it succeeds
return Succeeded
| otherwise
= do debugTraceMsg dflags 3 (text "link(batch): upsweep (partially) failed OR" $$
text " Main.main not exported; not linking.")
return Succeeded
linkingNeeded :: DynFlags -> Bool -> [Linkable] -> [InstalledUnitId] -> IO Bool
linkingNeeded dflags staticLink linkables pkg_deps = do
-- if the modification time on the executable is later than the
-- modification times on all of the objects and libraries, then omit
-- linking (unless the -fforce-recomp flag was given).
let exe_file = exeFileName staticLink dflags
e_exe_time <- tryIO $ getModificationUTCTime exe_file
case e_exe_time of
Left _ -> return True
Right t -> do
-- first check object files and extra_ld_inputs
let extra_ld_inputs = [ f | FileOption _ f <- ldInputs dflags ]
e_extra_times <- mapM (tryIO . getModificationUTCTime) extra_ld_inputs
let (errs,extra_times) = splitEithers e_extra_times
let obj_times = map linkableTime linkables ++ extra_times
if not (null errs) || any (t <) obj_times
then return True
else do
-- next, check libraries. XXX this only checks Haskell libraries,
-- not extra_libraries or -l things from the command line.
let pkg_hslibs = [ (libraryDirs c, lib)
| Just c <- map (lookupInstalledPackage dflags) pkg_deps,
lib <- packageHsLibs dflags c ]
pkg_libfiles <- mapM (uncurry (findHSLib dflags)) pkg_hslibs
if any isNothing pkg_libfiles then return True else do
e_lib_times <- mapM (tryIO . getModificationUTCTime)
(catMaybes pkg_libfiles)
let (lib_errs,lib_times) = splitEithers e_lib_times
if not (null lib_errs) || any (t <) lib_times
then return True
else checkLinkInfo dflags pkg_deps exe_file
-- Returns 'False' if it was, and we can avoid linking, because the
-- previous binary was linked with "the same options".
checkLinkInfo :: DynFlags -> [InstalledUnitId] -> FilePath -> IO Bool
checkLinkInfo dflags pkg_deps exe_file
| not (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
-- ToDo: Windows and OS X do not use the ELF binary format, so
-- readelf does not work there. We need to find another way to do
-- this.
= return False -- conservatively we should return True, but not
-- linking in this case was the behaviour for a long
-- time so we leave it as-is.
| otherwise
= do
link_info <- getLinkInfo dflags pkg_deps
debugTraceMsg dflags 3 $ text ("Link info: " ++ link_info)
m_exe_link_info <- readElfNoteAsString dflags exe_file
ghcLinkInfoSectionName ghcLinkInfoNoteName
let sameLinkInfo = (Just link_info == m_exe_link_info)
debugTraceMsg dflags 3 $ case m_exe_link_info of
Nothing -> text "Exe link info: Not found"
Just s
| sameLinkInfo -> text ("Exe link info is the same")
| otherwise -> text ("Exe link info is different: " ++ s)
return (not sameLinkInfo)
platformSupportsSavingLinkOpts :: OS -> Bool
platformSupportsSavingLinkOpts os
| os == OSSolaris2 = False -- see #5382
| otherwise = osElfTarget os
-- See Note [LinkInfo section]
ghcLinkInfoSectionName :: String
ghcLinkInfoSectionName = ".debug-ghc-link-info"
-- if we use the ".debug" prefix, then strip will strip it by default
-- Identifier for the note (see Note [LinkInfo section])
ghcLinkInfoNoteName :: String
ghcLinkInfoNoteName = "GHC link info"
findHSLib :: DynFlags -> [String] -> String -> IO (Maybe FilePath)
findHSLib dflags dirs lib = do
let batch_lib_file = if WayDyn `notElem` ways dflags
then "lib" ++ lib <.> "a"
else mkSOName (targetPlatform dflags) lib
found <- filterM doesFileExist (map (</> batch_lib_file) dirs)
case found of
[] -> return Nothing
(x:_) -> return (Just x)
-- -----------------------------------------------------------------------------
-- Compile files in one-shot mode.
oneShot :: HscEnv -> Phase -> [(String, Maybe Phase)] -> IO ()
oneShot hsc_env stop_phase srcs = do
o_files <- mapM (compileFile hsc_env stop_phase) srcs
doLink (hsc_dflags hsc_env) stop_phase o_files
compileFile :: HscEnv -> Phase -> (FilePath, Maybe Phase) -> IO FilePath
compileFile hsc_env stop_phase (src, mb_phase) = do
exists <- doesFileExist src
when (not exists) $
throwGhcExceptionIO (CmdLineError ("does not exist: " ++ src))
let
dflags = hsc_dflags hsc_env
split = gopt Opt_SplitObjs dflags
mb_o_file = outputFile dflags
ghc_link = ghcLink dflags -- Set by -c or -no-link
-- When linking, the -o argument refers to the linker's output.
-- otherwise, we use it as the name for the pipeline's output.
output
-- If we are dong -fno-code, then act as if the output is
-- 'Temporary'. This stops GHC trying to copy files to their
-- final location.
| HscNothing <- hscTarget dflags = Temporary
| StopLn <- stop_phase, not (isNoLink ghc_link) = Persistent
-- -o foo applies to linker
| isJust mb_o_file = SpecificFile
-- -o foo applies to the file we are compiling now
| otherwise = Persistent
stop_phase' = case stop_phase of
As _ | split -> SplitAs
_ -> stop_phase
( _, out_file) <- runPipeline stop_phase' hsc_env
(src, fmap RealPhase mb_phase) Nothing output
Nothing{-no ModLocation-} Nothing
return out_file
doLink :: DynFlags -> Phase -> [FilePath] -> IO ()
doLink dflags stop_phase o_files
| not (isStopLn stop_phase)
= return () -- We stopped before the linking phase
| otherwise
= case ghcLink dflags of
NoLink -> return ()
LinkBinary -> linkBinary dflags o_files []
LinkStaticLib -> linkStaticLibCheck dflags o_files []
LinkDynLib -> linkDynLibCheck dflags o_files []
other -> panicBadLink other
-- ---------------------------------------------------------------------------
-- | Run a compilation pipeline, consisting of multiple phases.
--
-- This is the interface to the compilation pipeline, which runs
-- a series of compilation steps on a single source file, specifying
-- at which stage to stop.
--
-- The DynFlags can be modified by phases in the pipeline (eg. by
-- OPTIONS_GHC pragmas), and the changes affect later phases in the
-- pipeline.
runPipeline
:: Phase -- ^ When to stop
-> HscEnv -- ^ Compilation environment
-> (FilePath,Maybe PhasePlus) -- ^ Input filename (and maybe -x suffix)
-> Maybe FilePath -- ^ original basename (if different from ^^^)
-> PipelineOutput -- ^ Output filename
-> Maybe ModLocation -- ^ A ModLocation, if this is a Haskell module
-> Maybe FilePath -- ^ stub object, if we have one
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline stop_phase hsc_env0 (input_fn, mb_phase)
mb_basename output maybe_loc maybe_stub_o
= do let
dflags0 = hsc_dflags hsc_env0
-- Decide where dump files should go based on the pipeline output
dflags = dflags0 { dumpPrefix = Just (basename ++ ".") }
hsc_env = hsc_env0 {hsc_dflags = dflags}
(input_basename, suffix) = splitExtension input_fn
suffix' = drop 1 suffix -- strip off the .
basename | Just b <- mb_basename = b
| otherwise = input_basename
-- If we were given a -x flag, then use that phase to start from
start_phase = fromMaybe (RealPhase (startPhase suffix')) mb_phase
isHaskell (RealPhase (Unlit _)) = True
isHaskell (RealPhase (Cpp _)) = True
isHaskell (RealPhase (HsPp _)) = True
isHaskell (RealPhase (Hsc _)) = True
isHaskell (HscOut {}) = True
isHaskell _ = False
isHaskellishFile = isHaskell start_phase
env = PipeEnv{ stop_phase,
src_filename = input_fn,
src_basename = basename,
src_suffix = suffix',
output_spec = output }
-- We want to catch cases of "you can't get there from here" before
-- we start the pipeline, because otherwise it will just run off the
-- end.
let happensBefore' = happensBefore dflags
case start_phase of
RealPhase start_phase' ->
-- See Note [Partial ordering on phases]
-- Not the same as: (stop_phase `happensBefore` start_phase')
when (not (start_phase' `happensBefore'` stop_phase ||
start_phase' `eqPhase` stop_phase)) $
throwGhcExceptionIO (UsageError
("cannot compile this file to desired target: "
++ input_fn))
HscOut {} -> return ()
debugTraceMsg dflags 4 (text "Running the pipeline")
r <- runPipeline' start_phase hsc_env env input_fn
maybe_loc maybe_stub_o
-- If we are compiling a Haskell module, and doing
-- -dynamic-too, but couldn't do the -dynamic-too fast
-- path, then rerun the pipeline for the dyn way
let dflags = hsc_dflags hsc_env
-- NB: Currently disabled on Windows (ref #7134, #8228, and #5987)
when (not $ platformOS (targetPlatform dflags) == OSMinGW32) $ do
when isHaskellishFile $ whenCannotGenerateDynamicToo dflags $ do
debugTraceMsg dflags 4
(text "Running the pipeline again for -dynamic-too")
let dflags' = dynamicTooMkDynamicDynFlags dflags
hsc_env' <- newHscEnv dflags'
_ <- runPipeline' start_phase hsc_env' env input_fn
maybe_loc maybe_stub_o
return ()
return r
runPipeline'
:: PhasePlus -- ^ When to start
-> HscEnv -- ^ Compilation environment
-> PipeEnv
-> FilePath -- ^ Input filename
-> Maybe ModLocation -- ^ A ModLocation, if this is a Haskell module
-> Maybe FilePath -- ^ stub object, if we have one
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline' start_phase hsc_env env input_fn
maybe_loc maybe_stub_o
= do
-- Execute the pipeline...
let state = PipeState{ hsc_env, maybe_loc, maybe_stub_o = maybe_stub_o }
evalP (pipeLoop start_phase input_fn) env state
-- ---------------------------------------------------------------------------
-- outer pipeline loop
-- | pipeLoop runs phases until we reach the stop phase
pipeLoop :: PhasePlus -> FilePath -> CompPipeline (DynFlags, FilePath)
pipeLoop phase input_fn = do
env <- getPipeEnv
dflags <- getDynFlags
-- See Note [Partial ordering on phases]
let happensBefore' = happensBefore dflags
stopPhase = stop_phase env
case phase of
RealPhase realPhase | realPhase `eqPhase` stopPhase -- All done
-> -- Sometimes, a compilation phase doesn't actually generate any output
-- (eg. the CPP phase when -fcpp is not turned on). If we end on this
-- stage, but we wanted to keep the output, then we have to explicitly
-- copy the file, remembering to prepend a {-# LINE #-} pragma so that
-- further compilation stages can tell what the original filename was.
case output_spec env of
Temporary ->
return (dflags, input_fn)
output ->
do pst <- getPipeState
final_fn <- liftIO $ getOutputFilename
stopPhase output (src_basename env)
dflags stopPhase (maybe_loc pst)
when (final_fn /= input_fn) $ do
let msg = ("Copying `" ++ input_fn ++"' to `" ++ final_fn ++ "'")
line_prag = Just ("{-# LINE 1 \"" ++ src_filename env ++ "\" #-}\n")
liftIO $ copyWithHeader dflags msg line_prag input_fn final_fn
return (dflags, final_fn)
| not (realPhase `happensBefore'` stopPhase)
-- Something has gone wrong. We'll try to cover all the cases when
-- this could happen, so if we reach here it is a panic.
-- eg. it might happen if the -C flag is used on a source file that
-- has {-# OPTIONS -fasm #-}.
-> panic ("pipeLoop: at phase " ++ show realPhase ++
" but I wanted to stop at phase " ++ show stopPhase)
_
-> do liftIO $ debugTraceMsg dflags 4
(text "Running phase" <+> ppr phase)
(next_phase, output_fn) <- runHookedPhase phase input_fn dflags
r <- pipeLoop next_phase output_fn
case phase of
HscOut {} ->
whenGeneratingDynamicToo dflags $ do
setDynFlags $ dynamicTooMkDynamicDynFlags dflags
-- TODO shouldn't ignore result:
_ <- pipeLoop phase input_fn
return ()
_ ->
return ()
return r
runHookedPhase :: PhasePlus -> FilePath -> DynFlags
-> CompPipeline (PhasePlus, FilePath)
runHookedPhase pp input dflags =
lookupHook runPhaseHook runPhase dflags pp input dflags
-- -----------------------------------------------------------------------------
-- In each phase, we need to know into what filename to generate the
-- output. All the logic about which filenames we generate output
-- into is embodied in the following function.
-- | Computes the next output filename after we run @next_phase@.
-- Like 'getOutputFilename', but it operates in the 'CompPipeline' monad
-- (which specifies all of the ambient information.)
phaseOutputFilename :: Phase{-next phase-} -> CompPipeline FilePath
phaseOutputFilename next_phase = do
PipeEnv{stop_phase, src_basename, output_spec} <- getPipeEnv
PipeState{maybe_loc, hsc_env} <- getPipeState
let dflags = hsc_dflags hsc_env
liftIO $ getOutputFilename stop_phase output_spec
src_basename dflags next_phase maybe_loc
-- | Computes the next output filename for something in the compilation
-- pipeline. This is controlled by several variables:
--
-- 1. 'Phase': the last phase to be run (e.g. 'stopPhase'). This
-- is used to tell if we're in the last phase or not, because
-- in that case flags like @-o@ may be important.
-- 2. 'PipelineOutput': is this intended to be a 'Temporary' or
-- 'Persistent' build output? Temporary files just go in
-- a fresh temporary name.
-- 3. 'String': what was the basename of the original input file?
-- 4. 'DynFlags': the obvious thing
-- 5. 'Phase': the phase we want to determine the output filename of.
-- 6. @Maybe ModLocation@: the 'ModLocation' of the module we're
-- compiling; this can be used to override the default output
-- of an object file. (TODO: do we actually need this?)
getOutputFilename
:: Phase -> PipelineOutput -> String
-> DynFlags -> Phase{-next phase-} -> Maybe ModLocation -> IO FilePath
getOutputFilename stop_phase output basename dflags next_phase maybe_location
| is_last_phase, Persistent <- output = persistent_fn
| is_last_phase, SpecificFile <- output = case outputFile dflags of
Just f -> return f
Nothing ->
panic "SpecificFile: No filename"
| keep_this_output = persistent_fn
| otherwise = newTempName dflags suffix
where
hcsuf = hcSuf dflags
odir = objectDir dflags
osuf = objectSuf dflags
keep_hc = gopt Opt_KeepHcFiles dflags
keep_s = gopt Opt_KeepSFiles dflags
keep_bc = gopt Opt_KeepLlvmFiles dflags
myPhaseInputExt HCc = hcsuf
myPhaseInputExt MergeStub = osuf
myPhaseInputExt StopLn = osuf
myPhaseInputExt other = phaseInputExt other
is_last_phase = next_phase `eqPhase` stop_phase
-- sometimes, we keep output from intermediate stages
keep_this_output =
case next_phase of
As _ | keep_s -> True
LlvmOpt | keep_bc -> True
HCc | keep_hc -> True
_other -> False
suffix = myPhaseInputExt next_phase
-- persistent object files get put in odir
persistent_fn
| StopLn <- next_phase = return odir_persistent
| otherwise = return persistent
persistent = basename <.> suffix
odir_persistent
| Just loc <- maybe_location = ml_obj_file loc
| Just d <- odir = d </> persistent
| otherwise = persistent
-- -----------------------------------------------------------------------------
-- | Each phase in the pipeline returns the next phase to execute, and the
-- name of the file in which the output was placed.
--
-- We must do things dynamically this way, because we often don't know
-- what the rest of the phases will be until part-way through the
-- compilation: for example, an {-# OPTIONS -fasm #-} at the beginning
-- of a source file can change the latter stages of the pipeline from
-- taking the LLVM route to using the native code generator.
--
runPhase :: PhasePlus -- ^ Run this phase
-> FilePath -- ^ name of the input file
-> DynFlags -- ^ for convenience, we pass the current dflags in
-> CompPipeline (PhasePlus, -- next phase to run
FilePath) -- output filename
-- Invariant: the output filename always contains the output
-- Interesting case: Hsc when there is no recompilation to do
-- Then the output filename is still a .o file
-------------------------------------------------------------------------------
-- Unlit phase
runPhase (RealPhase (Unlit sf)) input_fn dflags
= do
output_fn <- phaseOutputFilename (Cpp sf)
let flags = [ -- The -h option passes the file name for unlit to
-- put in a #line directive
SysTools.Option "-h"
-- See Note [Don't normalise input filenames].
, SysTools.Option $ escape input_fn
, SysTools.FileOption "" input_fn
, SysTools.FileOption "" output_fn
]
liftIO $ SysTools.runUnlit dflags flags
return (RealPhase (Cpp sf), output_fn)
where
-- escape the characters \, ", and ', but don't try to escape
-- Unicode or anything else (so we don't use Util.charToC
-- here). If we get this wrong, then in
-- Coverage.isGoodTickSrcSpan where we check that the filename in
-- a SrcLoc is the same as the source filenaame, the two will
-- look bogusly different. See test:
-- libraries/hpc/tests/function/subdir/tough2.hs
escape ('\\':cs) = '\\':'\\': escape cs
escape ('\"':cs) = '\\':'\"': escape cs
escape ('\'':cs) = '\\':'\'': escape cs
escape (c:cs) = c : escape cs
escape [] = []
-------------------------------------------------------------------------------
-- Cpp phase : (a) gets OPTIONS out of file
-- (b) runs cpp if necessary
runPhase (RealPhase (Cpp sf)) input_fn dflags0
= do
src_opts <- liftIO $ getOptionsFromFile dflags0 input_fn
(dflags1, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags0 src_opts
setDynFlags dflags1
liftIO $ checkProcessArgsResult dflags1 unhandled_flags
if not (xopt LangExt.Cpp dflags1) then do
-- we have to be careful to emit warnings only once.
unless (gopt Opt_Pp dflags1) $
liftIO $ handleFlagWarnings dflags1 warns
-- no need to preprocess CPP, just pass input file along
-- to the next phase of the pipeline.
return (RealPhase (HsPp sf), input_fn)
else do
output_fn <- phaseOutputFilename (HsPp sf)
liftIO $ doCpp dflags1 True{-raw-}
input_fn output_fn
-- re-read the pragmas now that we've preprocessed the file
-- See #2464,#3457
src_opts <- liftIO $ getOptionsFromFile dflags0 output_fn
(dflags2, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags0 src_opts
liftIO $ checkProcessArgsResult dflags2 unhandled_flags
unless (gopt Opt_Pp dflags2) $
liftIO $ handleFlagWarnings dflags2 warns
-- the HsPp pass below will emit warnings
setDynFlags dflags2
return (RealPhase (HsPp sf), output_fn)
-------------------------------------------------------------------------------
-- HsPp phase
runPhase (RealPhase (HsPp sf)) input_fn dflags
= do
if not (gopt Opt_Pp dflags) then
-- no need to preprocess, just pass input file along
-- to the next phase of the pipeline.
return (RealPhase (Hsc sf), input_fn)
else do
PipeEnv{src_basename, src_suffix} <- getPipeEnv
let orig_fn = src_basename <.> src_suffix
output_fn <- phaseOutputFilename (Hsc sf)
liftIO $ SysTools.runPp dflags
( [ SysTools.Option orig_fn
, SysTools.Option input_fn
, SysTools.FileOption "" output_fn
]
)
-- re-read pragmas now that we've parsed the file (see #3674)
src_opts <- liftIO $ getOptionsFromFile dflags output_fn
(dflags1, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags src_opts
setDynFlags dflags1
liftIO $ checkProcessArgsResult dflags1 unhandled_flags
liftIO $ handleFlagWarnings dflags1 warns
return (RealPhase (Hsc sf), output_fn)
-----------------------------------------------------------------------------
-- Hsc phase
-- Compilation of a single module, in "legacy" mode (_not_ under
-- the direction of the compilation manager).
runPhase (RealPhase (Hsc src_flavour)) input_fn dflags0
= do -- normal Hsc mode, not mkdependHS
PipeEnv{ stop_phase=stop,
src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- we add the current directory (i.e. the directory in which
-- the .hs files resides) to the include path, since this is
-- what gcc does, and it's probably what you want.
let current_dir = takeDirectory basename
paths = includePaths dflags0
dflags = dflags0 { includePaths = current_dir : paths }
setDynFlags dflags
-- gather the imports and module name
(hspp_buf,mod_name,imps,src_imps) <- liftIO $ do
do
buf <- hGetStringBuffer input_fn
(src_imps,imps,L _ mod_name) <- getImports dflags buf input_fn (basename <.> suff)
return (Just buf, mod_name, imps, src_imps)
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
-- the object file for one module.)
-- Note the nasty duplication with the same computation in compileFile above
location <- getLocation src_flavour mod_name
let o_file = ml_obj_file location -- The real object file
hi_file = ml_hi_file location
dest_file | writeInterfaceOnlyMode dflags
= hi_file
| otherwise
= o_file
-- Figure out if the source has changed, for recompilation avoidance.
--
-- Setting source_unchanged to True means that M.o seems
-- to be up to date wrt M.hs; so no need to recompile unless imports have
-- changed (which the compiler itself figures out).
-- Setting source_unchanged to False tells the compiler that M.o is out of
-- date wrt M.hs (or M.o doesn't exist) so we must recompile regardless.
src_timestamp <- liftIO $ getModificationUTCTime (basename <.> suff)
source_unchanged <- liftIO $
if not (isStopLn stop)
-- SourceModified unconditionally if
-- (a) recompilation checker is off, or
-- (b) we aren't going all the way to .o file (e.g. ghc -S)
then return SourceModified
-- Otherwise look at file modification dates
else do dest_file_exists <- doesFileExist dest_file
if not dest_file_exists
then return SourceModified -- Need to recompile
else do t2 <- getModificationUTCTime dest_file
if t2 > src_timestamp
then return SourceUnmodified
else return SourceModified
PipeState{hsc_env=hsc_env'} <- getPipeState
-- Tell the finder cache about this module
mod <- liftIO $ addHomeModuleToFinder hsc_env' mod_name location
-- Make the ModSummary to hand to hscMain
let
mod_summary = ModSummary { ms_mod = mod,
ms_hsc_src = src_flavour,
ms_hspp_file = input_fn,
ms_hspp_opts = dflags,
ms_hspp_buf = hspp_buf,
ms_location = location,
ms_hs_date = src_timestamp,
ms_obj_date = Nothing,
ms_parsed_mod = Nothing,
ms_iface_date = Nothing,
ms_textual_imps = imps,
ms_srcimps = src_imps }
-- run the compiler!
let msg hsc_env _ what _ = oneShotMsg hsc_env what
(result, _) <- liftIO $ hscIncrementalCompile True Nothing (Just msg) hsc_env'
mod_summary source_unchanged Nothing (1,1)
return (HscOut src_flavour mod_name result,
panic "HscOut doesn't have an input filename")
runPhase (HscOut src_flavour mod_name result) _ dflags = do
location <- getLocation src_flavour mod_name
setModLocation location
let o_file = ml_obj_file location -- The real object file
hsc_lang = hscTarget dflags
next_phase = hscPostBackendPhase dflags src_flavour hsc_lang
case result of
HscNotGeneratingCode ->
return (RealPhase StopLn,
panic "No output filename from Hsc when no-code")
HscUpToDate ->
do liftIO $ touchObjectFile dflags o_file
-- The .o file must have a later modification date
-- than the source file (else we wouldn't get Nothing)
-- but we touch it anyway, to keep 'make' happy (we think).
return (RealPhase StopLn, o_file)
HscUpdateBoot ->
do -- In the case of hs-boot files, generate a dummy .o-boot
-- stamp file for the benefit of Make
liftIO $ touchObjectFile dflags o_file
return (RealPhase StopLn, o_file)
HscUpdateSig ->
do -- We need to create a REAL but empty .o file
-- because we are going to attempt to put it in a library
PipeState{hsc_env=hsc_env'} <- getPipeState
let input_fn = expectJust "runPhase" (ml_hs_file location)
basename = dropExtension input_fn
liftIO $ compileEmptyStub dflags hsc_env' basename location mod_name
return (RealPhase StopLn, o_file)
HscRecomp cgguts mod_summary
-> do output_fn <- phaseOutputFilename next_phase
PipeState{hsc_env=hsc_env'} <- getPipeState
(outputFilename, mStub) <- liftIO $ hscGenHardCode hsc_env' cgguts mod_summary output_fn
case mStub of
Nothing -> return ()
Just stub_c ->
do stub_o <- liftIO $ compileStub hsc_env' stub_c
setStubO stub_o
return (RealPhase next_phase, outputFilename)
-----------------------------------------------------------------------------
-- Cmm phase
runPhase (RealPhase CmmCpp) input_fn dflags
= do
output_fn <- phaseOutputFilename Cmm
liftIO $ doCpp dflags False{-not raw-}
input_fn output_fn
return (RealPhase Cmm, output_fn)
runPhase (RealPhase Cmm) input_fn dflags
= do
let hsc_lang = hscTarget dflags
let next_phase = hscPostBackendPhase dflags HsSrcFile hsc_lang
output_fn <- phaseOutputFilename next_phase
PipeState{hsc_env} <- getPipeState
liftIO $ hscCompileCmmFile hsc_env input_fn output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- Cc phase
-- we don't support preprocessing .c files (with -E) now. Doing so introduces
-- way too many hacks, and I can't say I've ever used it anyway.
runPhase (RealPhase cc_phase) input_fn dflags
| any (cc_phase `eqPhase`) [Cc, Ccxx, HCc, Cobjc, Cobjcxx]
= do
let platform = targetPlatform dflags
hcc = cc_phase `eqPhase` HCc
let cmdline_include_paths = includePaths dflags
-- HC files have the dependent packages stamped into them
pkgs <- if hcc then liftIO $ getHCFilePackages input_fn else return []
-- add package include paths even if we're just compiling .c
-- files; this is the Value Add(TM) that using ghc instead of
-- gcc gives you :)
pkg_include_dirs <- liftIO $ getPackageIncludePath dflags pkgs
let include_paths = foldr (\ x xs -> ("-I" ++ x) : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let gcc_extra_viac_flags = extraGccViaCFlags dflags
let pic_c_flags = picCCOpts dflags
let verbFlags = getVerbFlags dflags
-- cc-options are not passed when compiling .hc files. Our
-- hc code doesn't not #include any header files anyway, so these
-- options aren't necessary.
pkg_extra_cc_opts <- liftIO $
if cc_phase `eqPhase` HCc
then return []
else getPackageExtraCcOpts dflags pkgs
framework_paths <-
if platformUsesFrameworks platform
then do pkgFrameworkPaths <- liftIO $ getPackageFrameworkPath dflags pkgs
let cmdlineFrameworkPaths = frameworkPaths dflags
return $ map ("-F"++)
(cmdlineFrameworkPaths ++ pkgFrameworkPaths)
else return []
let split_objs = gopt Opt_SplitObjs dflags
split_opt | hcc && split_objs = [ "-DUSE_SPLIT_MARKERS" ]
| otherwise = [ ]
let cc_opt | optLevel dflags >= 2 = [ "-O2" ]
| optLevel dflags >= 1 = [ "-O" ]
| otherwise = []
-- Decide next phase
let next_phase = As False
output_fn <- phaseOutputFilename next_phase
let
more_hcc_opts =
-- on x86 the floating point regs have greater precision
-- than a double, which leads to unpredictable results.
-- By default, we turn this off with -ffloat-store unless
-- the user specified -fexcess-precision.
(if platformArch platform == ArchX86 &&
not (gopt Opt_ExcessPrecision dflags)
then [ "-ffloat-store" ]
else []) ++
-- gcc's -fstrict-aliasing allows two accesses to memory
-- to be considered non-aliasing if they have different types.
-- This interacts badly with the C code we generate, which is
-- very weakly typed, being derived from C--.
["-fno-strict-aliasing"]
ghcVersionH <- liftIO $ getGhcVersionPathName dflags
let gcc_lang_opt | cc_phase `eqPhase` Ccxx = "c++"
| cc_phase `eqPhase` Cobjc = "objective-c"
| cc_phase `eqPhase` Cobjcxx = "objective-c++"
| otherwise = "c"
liftIO $ SysTools.runCc dflags (
-- force the C compiler to interpret this file as C when
-- compiling .hc files, by adding the -x c option.
-- Also useful for plain .c files, just in case GHC saw a
-- -x c option.
[ SysTools.Option "-x", SysTools.Option gcc_lang_opt
, SysTools.FileOption "" input_fn
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
pic_c_flags
-- Stub files generated for foreign exports references the runIO_closure
-- and runNonIO_closure symbols, which are defined in the base package.
-- These symbols are imported into the stub.c file via RtsAPI.h, and the
-- way we do the import depends on whether we're currently compiling
-- the base package or not.
++ (if platformOS platform == OSMinGW32 &&
thisPackage dflags == baseUnitId
then [ "-DCOMPILING_BASE_PACKAGE" ]
else [])
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc) as GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack. See #2872, commit
-- 5bd3072ac30216a505151601884ac88bf404c9f2
++ (if platformArch platform == ArchSPARC
then ["-mcpu=v9"]
else [])
-- GCC 4.6+ doesn't like -Wimplicit when compiling C++.
++ (if (cc_phase /= Ccxx && cc_phase /= Cobjcxx)
then ["-Wimplicit"]
else [])
++ (if hcc
then gcc_extra_viac_flags ++ more_hcc_opts
else [])
++ verbFlags
++ [ "-S" ]
++ cc_opt
++ [ "-include", ghcVersionH ]
++ framework_paths
++ split_opt
++ include_paths
++ pkg_extra_cc_opts
))
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- Splitting phase
runPhase (RealPhase Splitter) input_fn dflags
= do -- tmp_pfx is the prefix used for the split .s files
split_s_prefix <- liftIO $ SysTools.newTempName dflags "split"
let n_files_fn = split_s_prefix
liftIO $ SysTools.runSplit dflags
[ SysTools.FileOption "" input_fn
, SysTools.FileOption "" split_s_prefix
, SysTools.FileOption "" n_files_fn
]
-- Save the number of split files for future references
s <- liftIO $ readFile n_files_fn
let n_files = read s :: Int
dflags' = dflags { splitInfo = Just (split_s_prefix, n_files) }
setDynFlags dflags'
-- Remember to delete all these files
liftIO $ addFilesToClean dflags'
[ split_s_prefix ++ "__" ++ show n ++ ".s"
| n <- [1..n_files]]
return (RealPhase SplitAs,
"**splitter**") -- we don't use the filename in SplitAs
-----------------------------------------------------------------------------
-- As, SpitAs phase : Assembler
-- This is for calling the assembler on a regular assembly file (not split).
runPhase (RealPhase (As with_cpp)) input_fn dflags
= do
-- LLVM from version 3.0 onwards doesn't support the OS X system
-- assembler, so we use clang as the assembler instead. (#5636)
let whichAsProg | hscTarget dflags == HscLlvm &&
platformOS (targetPlatform dflags) == OSDarwin
= return SysTools.runClang
| otherwise = return SysTools.runAs
as_prog <- whichAsProg
let cmdline_include_paths = includePaths dflags
let pic_c_flags = picCCOpts dflags
next_phase <- maybeMergeStub
output_fn <- phaseOutputFilename next_phase
-- we create directories for the object file, because it
-- might be a hierarchical module.
liftIO $ createDirectoryIfMissing True (takeDirectory output_fn)
ccInfo <- liftIO $ getCompilerInfo dflags
let runAssembler inputFilename outputFilename
= liftIO $ as_prog dflags
([ SysTools.Option ("-I" ++ p) | p <- cmdline_include_paths ]
-- See Note [-fPIC for assembler]
++ map SysTools.Option pic_c_flags
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc). GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else [])
++ (if any (ccInfo ==) [Clang, AppleClang, AppleClang51]
then [SysTools.Option "-Qunused-arguments"]
else [])
++ [ SysTools.Option "-x"
, if with_cpp
then SysTools.Option "assembler-with-cpp"
else SysTools.Option "assembler"
, SysTools.Option "-c"
, SysTools.FileOption "" inputFilename
, SysTools.Option "-o"
, SysTools.FileOption "" outputFilename
])
liftIO $ debugTraceMsg dflags 4 (text "Running the assembler")
runAssembler input_fn output_fn
return (RealPhase next_phase, output_fn)
-- This is for calling the assembler on a split assembly file (so a collection
-- of assembly files)
runPhase (RealPhase SplitAs) _input_fn dflags
= do
-- we'll handle the stub_o file in this phase, so don't MergeStub,
-- just jump straight to StopLn afterwards.
let next_phase = StopLn
output_fn <- phaseOutputFilename next_phase
let base_o = dropExtension output_fn
osuf = objectSuf dflags
split_odir = base_o ++ "_" ++ osuf ++ "_split"
let pic_c_flags = picCCOpts dflags
-- this also creates the hierarchy
liftIO $ createDirectoryIfMissing True split_odir
-- remove M_split/ *.o, because we're going to archive M_split/ *.o
-- later and we don't want to pick up any old objects.
fs <- liftIO $ getDirectoryContents split_odir
liftIO $ mapM_ removeFile $
map (split_odir </>) $ filter (osuf `isSuffixOf`) fs
let (split_s_prefix, n) = case splitInfo dflags of
Nothing -> panic "No split info"
Just x -> x
let split_s n = split_s_prefix ++ "__" ++ show n <.> "s"
split_obj :: Int -> FilePath
split_obj n = split_odir </>
takeFileName base_o ++ "__" ++ show n <.> osuf
let assemble_file n
= SysTools.runAs dflags (
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc). GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
(if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else []) ++
-- See Note [-fPIC for assembler]
map SysTools.Option pic_c_flags ++
[ SysTools.Option "-c"
, SysTools.Option "-o"
, SysTools.FileOption "" (split_obj n)
, SysTools.FileOption "" (split_s n)
])
liftIO $ mapM_ assemble_file [1..n]
-- Note [pipeline-split-init]
-- If we have a stub file, it may contain constructor
-- functions for initialisation of this module. We can't
-- simply leave the stub as a separate object file, because it
-- will never be linked in: nothing refers to it. We need to
-- ensure that if we ever refer to the data in this module
-- that needs initialisation, then we also pull in the
-- initialisation routine.
--
-- To that end, we make a DANGEROUS ASSUMPTION here: the data
-- that needs to be initialised is all in the FIRST split
-- object. See Note [codegen-split-init].
PipeState{maybe_stub_o} <- getPipeState
case maybe_stub_o of
Nothing -> return ()
Just stub_o -> liftIO $ do
tmp_split_1 <- newTempName dflags osuf
let split_1 = split_obj 1
copyFile split_1 tmp_split_1
removeFile split_1
joinObjectFiles dflags [tmp_split_1, stub_o] split_1
-- join them into a single .o file
liftIO $ joinObjectFiles dflags (map split_obj [1..n]) output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- LlvmOpt phase
runPhase (RealPhase LlvmOpt) input_fn dflags
= do
let opt_lvl = max 0 (min 2 $ optLevel dflags)
-- don't specify anything if user has specified commands. We do this
-- for opt but not llc since opt is very specifically for optimisation
-- passes only, so if the user is passing us extra options we assume
-- they know what they are doing and don't get in the way.
optFlag = if null (getOpts dflags opt_lo)
then map SysTools.Option $ words (llvmOpts !! opt_lvl)
else []
tbaa | gopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
output_fn <- phaseOutputFilename LlvmLlc
liftIO $ SysTools.runLlvmOpt dflags
([ SysTools.FileOption "" input_fn,
SysTools.Option "-o",
SysTools.FileOption "" output_fn]
++ optFlag
++ [SysTools.Option tbaa])
return (RealPhase LlvmLlc, output_fn)
where
-- we always (unless -optlo specified) run Opt since we rely on it to
-- fix up some pretty big deficiencies in the code we generate
llvmOpts = [ "-mem2reg -globalopt"
, "-O1 -globalopt"
, "-O2"
]
-----------------------------------------------------------------------------
-- LlvmLlc phase
runPhase (RealPhase LlvmLlc) input_fn dflags
= do
let opt_lvl = max 0 (min 2 $ optLevel dflags)
-- iOS requires external references to be loaded indirectly from the
-- DATA segment or dyld traps at runtime writing into TEXT: see #7722
rmodel | platformOS (targetPlatform dflags) == OSiOS = "dynamic-no-pic"
| gopt Opt_PIC dflags = "pic"
| WayDyn `elem` ways dflags = "dynamic-no-pic"
| otherwise = "static"
tbaa | gopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
-- hidden debugging flag '-dno-llvm-mangler' to skip mangling
let next_phase = case gopt Opt_NoLlvmMangler dflags of
False -> LlvmMangle
True | gopt Opt_SplitObjs dflags -> Splitter
True -> As False
output_fn <- phaseOutputFilename next_phase
liftIO $ SysTools.runLlvmLlc dflags
([ SysTools.Option (llvmOpts !! opt_lvl),
SysTools.Option $ "-relocation-model=" ++ rmodel,
SysTools.FileOption "" input_fn,
SysTools.Option "-o", SysTools.FileOption "" output_fn]
++ [SysTools.Option tbaa]
++ map SysTools.Option fpOpts
++ map SysTools.Option abiOpts
++ map SysTools.Option sseOpts
++ map SysTools.Option avxOpts
++ map SysTools.Option avx512Opts
++ map SysTools.Option stackAlignOpts)
return (RealPhase next_phase, output_fn)
where
-- Bug in LLVM at O3 on OSX.
llvmOpts = if platformOS (targetPlatform dflags) == OSDarwin
then ["-O1", "-O2", "-O2"]
else ["-O1", "-O2", "-O3"]
-- On ARMv7 using LLVM, LLVM fails to allocate floating point registers
-- while compiling GHC source code. It's probably due to fact that it
-- does not enable VFP by default. Let's do this manually here
fpOpts = case platformArch (targetPlatform dflags) of
ArchARM ARMv7 ext _ -> if (elem VFPv3 ext)
then ["-mattr=+v7,+vfp3"]
else if (elem VFPv3D16 ext)
then ["-mattr=+v7,+vfp3,+d16"]
else []
ArchARM ARMv6 ext _ -> if (elem VFPv2 ext)
then ["-mattr=+v6,+vfp2"]
else ["-mattr=+v6"]
_ -> []
-- On Ubuntu/Debian with ARM hard float ABI, LLVM's llc still
-- compiles into soft-float ABI. We need to explicitly set abi
-- to hard
abiOpts = case platformArch (targetPlatform dflags) of
ArchARM _ _ HARD -> ["-float-abi=hard"]
ArchARM _ _ _ -> []
_ -> []
sseOpts | isSse4_2Enabled dflags = ["-mattr=+sse42"]
| isSse2Enabled dflags = ["-mattr=+sse2"]
| isSseEnabled dflags = ["-mattr=+sse"]
| otherwise = []
avxOpts | isAvx512fEnabled dflags = ["-mattr=+avx512f"]
| isAvx2Enabled dflags = ["-mattr=+avx2"]
| isAvxEnabled dflags = ["-mattr=+avx"]
| otherwise = []
avx512Opts =
[ "-mattr=+avx512cd" | isAvx512cdEnabled dflags ] ++
[ "-mattr=+avx512er" | isAvx512erEnabled dflags ] ++
[ "-mattr=+avx512pf" | isAvx512pfEnabled dflags ]
stackAlignOpts =
case platformArch (targetPlatform dflags) of
ArchX86_64 | isAvxEnabled dflags -> ["-stack-alignment=32"]
_ -> []
-----------------------------------------------------------------------------
-- LlvmMangle phase
runPhase (RealPhase LlvmMangle) input_fn dflags
= do
let next_phase = if gopt Opt_SplitObjs dflags then Splitter else As False
output_fn <- phaseOutputFilename next_phase
liftIO $ llvmFixupAsm dflags input_fn output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- merge in stub objects
runPhase (RealPhase MergeStub) input_fn dflags
= do
PipeState{maybe_stub_o} <- getPipeState
output_fn <- phaseOutputFilename StopLn
liftIO $ createDirectoryIfMissing True (takeDirectory output_fn)
case maybe_stub_o of
Nothing ->
panic "runPhase(MergeStub): no stub"
Just stub_o -> do
liftIO $ joinObjectFiles dflags [input_fn, stub_o] output_fn
return (RealPhase StopLn, output_fn)
-- warning suppression
runPhase (RealPhase other) _input_fn _dflags =
panic ("runPhase: don't know how to run phase " ++ show other)
maybeMergeStub :: CompPipeline Phase
maybeMergeStub
= do
PipeState{maybe_stub_o} <- getPipeState
if isJust maybe_stub_o then return MergeStub else return StopLn
getLocation :: HscSource -> ModuleName -> CompPipeline ModLocation
getLocation src_flavour mod_name = do
dflags <- getDynFlags
PipeEnv{ src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- Build a ModLocation to pass to hscMain.
-- The source filename is rather irrelevant by now, but it's used
-- by hscMain for messages. hscMain also needs
-- the .hi and .o filenames, and this is as good a way
-- as any to generate them, and better than most. (e.g. takes
-- into account the -osuf flags)
location1 <- liftIO $ mkHomeModLocation2 dflags mod_name basename suff
-- Boot-ify it if necessary
let location2 | HsBootFile <- src_flavour = addBootSuffixLocn location1
| otherwise = location1
-- Take -ohi into account if present
-- This can't be done in mkHomeModuleLocation because
-- it only applies to the module being compiles
let ohi = outputHi dflags
location3 | Just fn <- ohi = location2{ ml_hi_file = fn }
| otherwise = location2
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
-- the object file for one module.)
-- Note the nasty duplication with the same computation in compileFile above
let expl_o_file = outputFile dflags
location4 | Just ofile <- expl_o_file
, isNoLink (ghcLink dflags)
= location3 { ml_obj_file = ofile }
| otherwise = location3
return location4
mkExtraObj :: DynFlags -> Suffix -> String -> IO FilePath
mkExtraObj dflags extn xs
= do cFile <- newTempName dflags extn
oFile <- newTempName dflags "o"
writeFile cFile xs
ccInfo <- liftIO $ getCompilerInfo dflags
SysTools.runCc dflags
([Option "-c",
FileOption "" cFile,
Option "-o",
FileOption "" oFile]
++ if extn /= "s"
then cOpts
else asmOpts ccInfo)
return oFile
where
-- Pass a different set of options to the C compiler depending one whether
-- we're compiling C or assembler. When compiling C, we pass the usual
-- set of include directories and PIC flags.
cOpts = map Option (picCCOpts dflags)
++ map (FileOption "-I")
(includeDirs $ getPackageDetails dflags rtsUnitId)
-- When compiling assembler code, we drop the usual C options, and if the
-- compiler is Clang, we add an extra argument to tell Clang to ignore
-- unused command line options. See trac #11684.
asmOpts ccInfo =
if any (ccInfo ==) [Clang, AppleClang, AppleClang51]
then [Option "-Qunused-arguments"]
else []
-- When linking a binary, we need to create a C main() function that
-- starts everything off. This used to be compiled statically as part
-- of the RTS, but that made it hard to change the -rtsopts setting,
-- so now we generate and compile a main() stub as part of every
-- binary and pass the -rtsopts setting directly to the RTS (#5373)
--
mkExtraObjToLinkIntoBinary :: DynFlags -> IO FilePath
mkExtraObjToLinkIntoBinary dflags = do
when (gopt Opt_NoHsMain dflags && haveRtsOptsFlags dflags) $ do
log_action dflags dflags NoReason SevInfo noSrcSpan defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -no-hs-main." $$
text " Call hs_init_ghc() from your main() function to set these options.")
mkExtraObj dflags "c" (showSDoc dflags main)
where
main
| gopt Opt_NoHsMain dflags = Outputable.empty
| otherwise = vcat [
text "#include \"Rts.h\"",
text "extern StgClosure ZCMain_main_closure;",
text "int main(int argc, char *argv[])",
char '{',
text " RtsConfig __conf = defaultRtsConfig;",
text " __conf.rts_opts_enabled = "
<> text (show (rtsOptsEnabled dflags)) <> semi,
text " __conf.rts_opts_suggestions = "
<> text (if rtsOptsSuggestions dflags
then "rtsTrue"
else "rtsFalse") <> semi,
case rtsOpts dflags of
Nothing -> Outputable.empty
Just opts -> text " __conf.rts_opts= " <>
text (show opts) <> semi,
text " __conf.rts_hs_main = rtsTrue;",
text " return hs_main(argc,argv,&ZCMain_main_closure,__conf);",
char '}',
char '\n' -- final newline, to keep gcc happy
]
-- Write out the link info section into a new assembly file. Previously
-- this was included as inline assembly in the main.c file but this
-- is pretty fragile. gas gets upset trying to calculate relative offsets
-- that span the .note section (notably .text) when debug info is present
mkNoteObjsToLinkIntoBinary :: DynFlags -> [InstalledUnitId] -> IO [FilePath]
mkNoteObjsToLinkIntoBinary dflags dep_packages = do
link_info <- getLinkInfo dflags dep_packages
if (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
then fmap (:[]) $ mkExtraObj dflags "s" (showSDoc dflags (link_opts link_info))
else return []
where
link_opts info = hcat [
-- "link info" section (see Note [LinkInfo section])
makeElfNote dflags ghcLinkInfoSectionName ghcLinkInfoNoteName 0 info,
-- ALL generated assembly must have this section to disable
-- executable stacks. See also
-- compiler/nativeGen/AsmCodeGen.hs for another instance
-- where we need to do this.
if platformHasGnuNonexecStack (targetPlatform dflags)
then text ".section .note.GNU-stack,\"\",@progbits\n"
else Outputable.empty
]
-- | Return the "link info" string
--
-- See Note [LinkInfo section]
getLinkInfo :: DynFlags -> [InstalledUnitId] -> IO String
getLinkInfo dflags dep_packages = do
package_link_opts <- getPackageLinkOpts dflags dep_packages
pkg_frameworks <- if platformUsesFrameworks (targetPlatform dflags)
then getPackageFrameworks dflags dep_packages
else return []
let extra_ld_inputs = ldInputs dflags
let
link_info = (package_link_opts,
pkg_frameworks,
rtsOpts dflags,
rtsOptsEnabled dflags,
gopt Opt_NoHsMain dflags,
map showOpt extra_ld_inputs,
getOpts dflags opt_l)
--
return (show link_info)
{- Note [LinkInfo section]
~~~~~~~~~~~~~~~~~~~~~~~
The "link info" is a string representing the parameters of the link. We save
this information in the binary, and the next time we link, if nothing else has
changed, we use the link info stored in the existing binary to decide whether
to re-link or not.
The "link info" string is stored in a ELF section called ".debug-ghc-link-info"
(see ghcLinkInfoSectionName) with the SHT_NOTE type. For some time, it used to
not follow the specified record-based format (see #11022).
-}
-----------------------------------------------------------------------------
-- Look for the /* GHC_PACKAGES ... */ comment at the top of a .hc file
getHCFilePackages :: FilePath -> IO [InstalledUnitId]
getHCFilePackages filename =
Exception.bracket (openFile filename ReadMode) hClose $ \h -> do
l <- hGetLine h
case l of
'/':'*':' ':'G':'H':'C':'_':'P':'A':'C':'K':'A':'G':'E':'S':rest ->
return (map stringToInstalledUnitId (words rest))
_other ->
return []
-----------------------------------------------------------------------------
-- Static linking, of .o files
-- The list of packages passed to link is the list of packages on
-- which this program depends, as discovered by the compilation
-- manager. It is combined with the list of packages that the user
-- specifies on the command line with -package flags.
--
-- In one-shot linking mode, we can't discover the package
-- dependencies (because we haven't actually done any compilation or
-- read any interface files), so the user must explicitly specify all
-- the packages.
linkBinary :: DynFlags -> [FilePath] -> [InstalledUnitId] -> IO ()
linkBinary = linkBinary' False
linkBinary' :: Bool -> DynFlags -> [FilePath] -> [InstalledUnitId] -> IO ()
linkBinary' staticLink dflags o_files dep_packages = do
let platform = targetPlatform dflags
mySettings = settings dflags
verbFlags = getVerbFlags dflags
output_fn = exeFileName staticLink dflags
-- get the full list of packages to link with, by combining the
-- explicit packages with the auto packages and all of their
-- dependencies, and eliminating duplicates.
full_output_fn <- if isAbsolute output_fn
then return output_fn
else do d <- getCurrentDirectory
return $ normalise (d </> output_fn)
pkg_lib_paths <- getPackageLibraryPath dflags dep_packages
let pkg_lib_path_opts = concatMap get_pkg_lib_path_opts pkg_lib_paths
get_pkg_lib_path_opts l
| osElfTarget (platformOS platform) &&
dynLibLoader dflags == SystemDependent &&
WayDyn `elem` ways dflags
= let libpath = if gopt Opt_RelativeDynlibPaths dflags
then "$ORIGIN" </>
(l `makeRelativeTo` full_output_fn)
else l
rpath = if gopt Opt_RPath dflags
then ["-Wl,-rpath", "-Wl," ++ libpath]
else []
-- Solaris 11's linker does not support -rpath-link option. It silently
-- ignores it and then complains about next option which is -l<some
-- dir> as being a directory and not expected object file, E.g
-- ld: elf error: file
-- /tmp/ghc-src/libraries/base/dist-install/build:
-- elf_begin: I/O error: region read: Is a directory
rpathlink = if (platformOS platform) == OSSolaris2
then []
else ["-Wl,-rpath-link", "-Wl," ++ l]
in ["-L" ++ l] ++ rpathlink ++ rpath
| osMachOTarget (platformOS platform) &&
dynLibLoader dflags == SystemDependent &&
WayDyn `elem` ways dflags &&
gopt Opt_RPath dflags
= let libpath = if gopt Opt_RelativeDynlibPaths dflags
then "@loader_path" </>
(l `makeRelativeTo` full_output_fn)
else l
in ["-L" ++ l] ++ ["-Wl,-rpath", "-Wl," ++ libpath]
| otherwise = ["-L" ++ l]
let lib_paths = libraryPaths dflags
let lib_path_opts = map ("-L"++) lib_paths
extraLinkObj <- mkExtraObjToLinkIntoBinary dflags
noteLinkObjs <- mkNoteObjsToLinkIntoBinary dflags dep_packages
pkg_link_opts <- do
(package_hs_libs, extra_libs, other_flags) <- getPackageLinkOpts dflags dep_packages
return $ if staticLink
then package_hs_libs -- If building an executable really means making a static
-- library (e.g. iOS), then we only keep the -l options for
-- HS packages, because libtool doesn't accept other options.
-- In the case of iOS these need to be added by hand to the
-- final link in Xcode.
else other_flags ++ package_hs_libs ++ extra_libs -- -Wl,-u,<sym> contained in other_flags
-- needs to be put before -l<package>,
-- otherwise Solaris linker fails linking
-- a binary with unresolved symbols in RTS
-- which are defined in base package
-- the reason for this is a note in ld(1) about
-- '-u' option: "The placement of this option
-- on the command line is significant.
-- This option must be placed before the library
-- that defines the symbol."
-- frameworks
pkg_framework_opts <- getPkgFrameworkOpts dflags platform dep_packages
let framework_opts = getFrameworkOpts dflags platform
-- probably _stub.o files
let extra_ld_inputs = ldInputs dflags
-- Here are some libs that need to be linked at the *end* of
-- the command line, because they contain symbols that are referred to
-- by the RTS. We can't therefore use the ordinary way opts for these.
let
debug_opts | WayDebug `elem` ways dflags = [
#if defined(HAVE_LIBBFD)
"-lbfd", "-liberty"
#endif
]
| otherwise = []
let thread_opts
| WayThreaded `elem` ways dflags =
let os = platformOS (targetPlatform dflags)
in if os `elem` [OSMinGW32, OSFreeBSD, OSOpenBSD,
OSNetBSD, OSHaiku, OSQNXNTO, OSiOS, OSDarwin]
then []
else ["-lpthread"]
| otherwise = []
rc_objs <- maybeCreateManifest dflags output_fn
let link = if staticLink
then SysTools.runLibtool
else SysTools.runLink
link dflags (
map SysTools.Option verbFlags
++ [ SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
[]
-- Permit the linker to auto link _symbol to _imp_symbol.
-- This lets us link against DLLs without needing an "import library".
++ (if platformOS platform == OSMinGW32
then ["-Wl,--enable-auto-import"]
else [])
-- '-no_compact_unwind'
-- C++/Objective-C exceptions cannot use optimised
-- stack unwinding code. The optimised form is the
-- default in Xcode 4 on at least x86_64, and
-- without this flag we're also seeing warnings
-- like
-- ld: warning: could not create compact unwind for .LFB3: non-standard register 5 being saved in prolog
-- on x86.
++ (if sLdSupportsCompactUnwind mySettings &&
not staticLink &&
(platformOS platform == OSDarwin || platformOS platform == OSiOS) &&
case platformArch platform of
ArchX86 -> True
ArchX86_64 -> True
ArchARM {} -> True
ArchARM64 -> True
_ -> False
then ["-Wl,-no_compact_unwind"]
else [])
-- '-no_pie'
-- iOS uses 'dynamic-no-pic', so we must pass this to ld to suppress a warning; see #7722
++ (if platformOS platform == OSiOS &&
not staticLink
then ["-Wl,-no_pie"]
else [])
-- '-Wl,-read_only_relocs,suppress'
-- ld gives loads of warnings like:
-- ld: warning: text reloc in _base_GHCziArr_unsafeArray_info to _base_GHCziArr_unsafeArray_closure
-- when linking any program. We're not sure
-- whether this is something we ought to fix, but
-- for now this flags silences them.
++ (if platformOS platform == OSDarwin &&
platformArch platform == ArchX86 &&
not staticLink
then ["-Wl,-read_only_relocs,suppress"]
else [])
++ (if sLdIsGnuLd mySettings
then ["-Wl,--gc-sections"]
else [])
++ o_files
++ lib_path_opts)
++ extra_ld_inputs
++ map SysTools.Option (
rc_objs
++ framework_opts
++ pkg_lib_path_opts
++ extraLinkObj:noteLinkObjs
++ pkg_link_opts
++ pkg_framework_opts
++ debug_opts
++ thread_opts
))
exeFileName :: Bool -> DynFlags -> FilePath
exeFileName staticLink dflags
| Just s <- outputFile dflags =
case platformOS (targetPlatform dflags) of
OSMinGW32 -> s <?.> "exe"
_ -> if staticLink
then s <?.> "a"
else s
| otherwise =
if platformOS (targetPlatform dflags) == OSMinGW32
then "main.exe"
else if staticLink
then "liba.a"
else "a.out"
where s <?.> ext | null (takeExtension s) = s <.> ext
| otherwise = s
maybeCreateManifest
:: DynFlags
-> FilePath -- filename of executable
-> IO [FilePath] -- extra objects to embed, maybe
maybeCreateManifest dflags exe_filename
| platformOS (targetPlatform dflags) == OSMinGW32 &&
gopt Opt_GenManifest dflags
= do let manifest_filename = exe_filename <.> "manifest"
writeFile manifest_filename $
"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n"++
" <assembly xmlns=\"urn:schemas-microsoft-com:asm.v1\" manifestVersion=\"1.0\">\n"++
" <assemblyIdentity version=\"1.0.0.0\"\n"++
" processorArchitecture=\"X86\"\n"++
" name=\"" ++ dropExtension exe_filename ++ "\"\n"++
" type=\"win32\"/>\n\n"++
" <trustInfo xmlns=\"urn:schemas-microsoft-com:asm.v3\">\n"++
" <security>\n"++
" <requestedPrivileges>\n"++
" <requestedExecutionLevel level=\"asInvoker\" uiAccess=\"false\"/>\n"++
" </requestedPrivileges>\n"++
" </security>\n"++
" </trustInfo>\n"++
"</assembly>\n"
-- Windows will find the manifest file if it is named
-- foo.exe.manifest. However, for extra robustness, and so that
-- we can move the binary around, we can embed the manifest in
-- the binary itself using windres:
if not (gopt Opt_EmbedManifest dflags) then return [] else do
rc_filename <- newTempName dflags "rc"
rc_obj_filename <- newTempName dflags (objectSuf dflags)
writeFile rc_filename $
"1 24 MOVEABLE PURE " ++ show manifest_filename ++ "\n"
-- magic numbers :-)
-- show is a bit hackish above, but we need to escape the
-- backslashes in the path.
runWindres dflags $ map SysTools.Option $
["--input="++rc_filename,
"--output="++rc_obj_filename,
"--output-format=coff"]
-- no FileOptions here: windres doesn't like seeing
-- backslashes, apparently
removeFile manifest_filename
return [rc_obj_filename]
| otherwise = return []
linkDynLibCheck :: DynFlags -> [String] -> [InstalledUnitId] -> IO ()
linkDynLibCheck dflags o_files dep_packages
= do
when (haveRtsOptsFlags dflags) $ do
log_action dflags dflags NoReason SevInfo noSrcSpan defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -shared." $$
text " Call hs_init_ghc() from your main() function to set these options.")
linkDynLib dflags o_files dep_packages
linkStaticLibCheck :: DynFlags -> [String] -> [InstalledUnitId] -> IO ()
linkStaticLibCheck dflags o_files dep_packages
= do
when (platformOS (targetPlatform dflags) `notElem` [OSiOS, OSDarwin]) $
throwGhcExceptionIO (ProgramError "Static archive creation only supported on Darwin/OS X/iOS")
linkBinary' True dflags o_files dep_packages
-- -----------------------------------------------------------------------------
-- Running CPP
doCpp :: DynFlags -> Bool -> FilePath -> FilePath -> IO ()
doCpp dflags raw input_fn output_fn = do
let hscpp_opts = picPOpts dflags
let cmdline_include_paths = includePaths dflags
pkg_include_dirs <- getPackageIncludePath dflags []
let include_paths = foldr (\ x xs -> "-I" : x : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let verbFlags = getVerbFlags dflags
let cpp_prog args | raw = SysTools.runCpp dflags args
| otherwise = SysTools.runCc dflags (SysTools.Option "-E" : args)
let target_defs =
[ "-D" ++ HOST_OS ++ "_BUILD_OS",
"-D" ++ HOST_ARCH ++ "_BUILD_ARCH",
"-D" ++ TARGET_OS ++ "_HOST_OS",
"-D" ++ TARGET_ARCH ++ "_HOST_ARCH" ]
-- remember, in code we *compile*, the HOST is the same our TARGET,
-- and BUILD is the same as our HOST.
let sse_defs =
[ "-D__SSE__" | isSseEnabled dflags ] ++
[ "-D__SSE2__" | isSse2Enabled dflags ] ++
[ "-D__SSE4_2__" | isSse4_2Enabled dflags ]
let avx_defs =
[ "-D__AVX__" | isAvxEnabled dflags ] ++
[ "-D__AVX2__" | isAvx2Enabled dflags ] ++
[ "-D__AVX512CD__" | isAvx512cdEnabled dflags ] ++
[ "-D__AVX512ER__" | isAvx512erEnabled dflags ] ++
[ "-D__AVX512F__" | isAvx512fEnabled dflags ] ++
[ "-D__AVX512PF__" | isAvx512pfEnabled dflags ]
backend_defs <- getBackendDefs dflags
#ifdef GHCI
let th_defs = [ "-D__GLASGOW_HASKELL_TH__" ]
#else
let th_defs = [ "-D__GLASGOW_HASKELL_TH__=0" ]
#endif
-- Default CPP defines in Haskell source
ghcVersionH <- getGhcVersionPathName dflags
let hsSourceCppOpts = [ "-include", ghcVersionH ]
-- MIN_VERSION macros
let uids = explicitPackages (pkgState dflags)
pkgs = catMaybes (map (lookupPackage dflags) uids)
mb_macro_include <-
if not (null pkgs) && gopt Opt_VersionMacros dflags
then do macro_stub <- newTempName dflags "h"
writeFile macro_stub (generatePackageVersionMacros pkgs)
-- Include version macros for every *exposed* package.
-- Without -hide-all-packages and with a package database
-- size of 1000 packages, it takes cpp an estimated 2
-- milliseconds to process this file. See Trac #10970
-- comment 8.
return [SysTools.FileOption "-include" macro_stub]
else return []
cpp_prog ( map SysTools.Option verbFlags
++ map SysTools.Option include_paths
++ map SysTools.Option hsSourceCppOpts
++ map SysTools.Option target_defs
++ map SysTools.Option backend_defs
++ map SysTools.Option th_defs
++ map SysTools.Option hscpp_opts
++ map SysTools.Option sse_defs
++ map SysTools.Option avx_defs
++ mb_macro_include
-- Set the language mode to assembler-with-cpp when preprocessing. This
-- alleviates some of the C99 macro rules relating to whitespace and the hash
-- operator, which we tend to abuse. Clang in particular is not very happy
-- about this.
++ [ SysTools.Option "-x"
, SysTools.Option "assembler-with-cpp"
, SysTools.Option input_fn
-- We hackily use Option instead of FileOption here, so that the file
-- name is not back-slashed on Windows. cpp is capable of
-- dealing with / in filenames, so it works fine. Furthermore
-- if we put in backslashes, cpp outputs #line directives
-- with *double* backslashes. And that in turn means that
-- our error messages get double backslashes in them.
-- In due course we should arrange that the lexer deals
-- with these \\ escapes properly.
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
])
getBackendDefs :: DynFlags -> IO [String]
getBackendDefs dflags | hscTarget dflags == HscLlvm = do
llvmVer <- figureLlvmVersion dflags
return $ case llvmVer of
Just n -> [ "-D__GLASGOW_HASKELL_LLVM__=" ++ format n ]
_ -> []
where
format (major, minor)
| minor >= 100 = error "getBackendDefs: Unsupported minor version"
| otherwise = show $ (100 * major + minor :: Int) -- Contract is Int
getBackendDefs _ =
return []
-- ---------------------------------------------------------------------------
-- Macros (cribbed from Cabal)
generatePackageVersionMacros :: [PackageConfig] -> String
generatePackageVersionMacros pkgs = concat
-- Do not add any C-style comments. See Trac #3389.
[ generateMacros "" pkgname version
| pkg <- pkgs
, let version = packageVersion pkg
pkgname = map fixchar (packageNameString pkg)
]
fixchar :: Char -> Char
fixchar '-' = '_'
fixchar c = c
generateMacros :: String -> String -> Version -> String
generateMacros prefix name version =
concat
["#define ", prefix, "VERSION_",name," ",show (showVersion version),"\n"
,"#define MIN_", prefix, "VERSION_",name,"(major1,major2,minor) (\\\n"
," (major1) < ",major1," || \\\n"
," (major1) == ",major1," && (major2) < ",major2," || \\\n"
," (major1) == ",major1," && (major2) == ",major2," && (minor) <= ",minor,")"
,"\n\n"
]
where
(major1:major2:minor:_) = map show (versionBranch version ++ repeat 0)
-- ---------------------------------------------------------------------------
-- join object files into a single relocatable object file, using ld -r
joinObjectFiles :: DynFlags -> [FilePath] -> FilePath -> IO ()
joinObjectFiles dflags o_files output_fn = do
let mySettings = settings dflags
ldIsGnuLd = sLdIsGnuLd mySettings
osInfo = platformOS (targetPlatform dflags)
ld_r args cc = SysTools.runLink dflags ([
SysTools.Option "-nostdlib",
SysTools.Option "-Wl,-r"
]
++ (if any (cc ==) [Clang, AppleClang, AppleClang51]
then []
else [SysTools.Option "-nodefaultlibs"])
++ (if osInfo == OSFreeBSD
then [SysTools.Option "-L/usr/lib"]
else [])
-- gcc on sparc sets -Wl,--relax implicitly, but
-- -r and --relax are incompatible for ld, so
-- disable --relax explicitly.
++ (if platformArch (targetPlatform dflags)
`elem` [ArchSPARC, ArchSPARC64]
&& ldIsGnuLd
then [SysTools.Option "-Wl,-no-relax"]
else [])
++ map SysTools.Option ld_build_id
++ [ SysTools.Option "-o",
SysTools.FileOption "" output_fn ]
++ args)
-- suppress the generation of the .note.gnu.build-id section,
-- which we don't need and sometimes causes ld to emit a
-- warning:
ld_build_id | sLdSupportsBuildId mySettings = ["-Wl,--build-id=none"]
| otherwise = []
ccInfo <- getCompilerInfo dflags
if ldIsGnuLd
then do
script <- newTempName dflags "ldscript"
cwd <- getCurrentDirectory
let o_files_abs = map (\x -> "\"" ++ (cwd </> x) ++ "\"") o_files
writeFile script $ "INPUT(" ++ unwords o_files_abs ++ ")"
ld_r [SysTools.FileOption "" script] ccInfo
else if sLdSupportsFilelist mySettings
then do
filelist <- newTempName dflags "filelist"
writeFile filelist $ unlines o_files
ld_r [SysTools.Option "-Wl,-filelist",
SysTools.FileOption "-Wl," filelist] ccInfo
else do
ld_r (map (SysTools.FileOption "") o_files) ccInfo
-- -----------------------------------------------------------------------------
-- Misc.
writeInterfaceOnlyMode :: DynFlags -> Bool
writeInterfaceOnlyMode dflags =
gopt Opt_WriteInterface dflags &&
HscNothing == hscTarget dflags
-- | What phase to run after one of the backend code generators has run
hscPostBackendPhase :: DynFlags -> HscSource -> HscTarget -> Phase
hscPostBackendPhase _ HsBootFile _ = StopLn
hscPostBackendPhase _ HsigFile _ = StopLn
hscPostBackendPhase dflags _ hsc_lang =
case hsc_lang of
HscC -> HCc
HscAsm | gopt Opt_SplitObjs dflags -> Splitter
| otherwise -> As False
HscLlvm -> LlvmOpt
HscNothing -> StopLn
HscInterpreted -> StopLn
touchObjectFile :: DynFlags -> FilePath -> IO ()
touchObjectFile dflags path = do
createDirectoryIfMissing True $ takeDirectory path
SysTools.touch dflags "Touching object file" path
haveRtsOptsFlags :: DynFlags -> Bool
haveRtsOptsFlags dflags =
isJust (rtsOpts dflags) || case rtsOptsEnabled dflags of
RtsOptsSafeOnly -> False
_ -> True
-- | Find out path to @ghcversion.h@ file
getGhcVersionPathName :: DynFlags -> IO FilePath
getGhcVersionPathName dflags = do
dirs <- getPackageIncludePath dflags [toInstalledUnitId rtsUnitId]
found <- filterM doesFileExist (map (</> "ghcversion.h") dirs)
case found of
[] -> throwGhcExceptionIO (InstallationError ("ghcversion.h missing"))
(x:_) -> return x
-- Note [-fPIC for assembler]
-- When compiling .c source file GHC's driver pipeline basically
-- does the following two things:
-- 1. ${CC} -S 'PIC_CFLAGS' source.c
-- 2. ${CC} -x assembler -c 'PIC_CFLAGS' source.S
--
-- Why do we need to pass 'PIC_CFLAGS' both to C compiler and assembler?
-- Because on some architectures (at least sparc32) assembler also chooses
-- the relocation type!
-- Consider the following C module:
--
-- /* pic-sample.c */
-- int v;
-- void set_v (int n) { v = n; }
-- int get_v (void) { return v; }
--
-- $ gcc -S -fPIC pic-sample.c
-- $ gcc -c pic-sample.s -o pic-sample.no-pic.o # incorrect binary
-- $ gcc -c -fPIC pic-sample.s -o pic-sample.pic.o # correct binary
--
-- $ objdump -r -d pic-sample.pic.o > pic-sample.pic.o.od
-- $ objdump -r -d pic-sample.no-pic.o > pic-sample.no-pic.o.od
-- $ diff -u pic-sample.pic.o.od pic-sample.no-pic.o.od
--
-- Most of architectures won't show any difference in this test, but on sparc32
-- the following assembly snippet:
--
-- sethi %hi(_GLOBAL_OFFSET_TABLE_-8), %l7
--
-- generates two kinds or relocations, only 'R_SPARC_PC22' is correct:
--
-- 3c: 2f 00 00 00 sethi %hi(0), %l7
-- - 3c: R_SPARC_PC22 _GLOBAL_OFFSET_TABLE_-0x8
-- + 3c: R_SPARC_HI22 _GLOBAL_OFFSET_TABLE_-0x8
{- Note [Don't normalise input filenames]
Summary
We used to normalise input filenames when starting the unlit phase. This
broke hpc in `--make` mode with imported literate modules (#2991).
Introduction
1) --main
When compiling a module with --main, GHC scans its imports to find out which
other modules it needs to compile too. It turns out that there is a small
difference between saying `ghc --make A.hs`, when `A` imports `B`, and
specifying both modules on the command line with `ghc --make A.hs B.hs`. In
the former case, the filename for B is inferred to be './B.hs' instead of
'B.hs'.
2) unlit
When GHC compiles a literate haskell file, the source code first needs to go
through unlit, which turns it into normal Haskell source code. At the start
of the unlit phase, in `Driver.Pipeline.runPhase`, we call unlit with the
option `-h` and the name of the original file. We used to normalise this
filename using System.FilePath.normalise, which among other things removes
an initial './'. unlit then uses that filename in #line directives that it
inserts in the transformed source code.
3) SrcSpan
A SrcSpan represents a portion of a source code file. It has fields
linenumber, start column, end column, and also a reference to the file it
originated from. The SrcSpans for a literate haskell file refer to the
filename that was passed to unlit -h.
4) -fhpc
At some point during compilation with -fhpc, in the function
`deSugar.Coverage.isGoodTickSrcSpan`, we compare the filename that a
`SrcSpan` refers to with the name of the file we are currently compiling.
For some reason I don't yet understand, they can sometimes legitimally be
different, and then hpc ignores that SrcSpan.
Problem
When running `ghc --make -fhpc A.hs`, where `A.hs` imports the literate
module `B.lhs`, `B` is inferred to be in the file `./B.lhs` (1). At the
start of the unlit phase, the name `./B.lhs` is normalised to `B.lhs` (2).
Therefore the SrcSpans of `B` refer to the file `B.lhs` (3), but we are
still compiling `./B.lhs`. Hpc thinks these two filenames are different (4),
doesn't include ticks for B, and we have unhappy customers (#2991).
Solution
Do not normalise `input_fn` when starting the unlit phase.
Alternative solution
Another option would be to not compare the two filenames on equality, but to
use System.FilePath.equalFilePath. That function first normalises its
arguments. The problem is that by the time we need to do the comparison, the
filenames have been turned into FastStrings, probably for performance
reasons, so System.FilePath.equalFilePath can not be used directly.
Archeology
The call to `normalise` was added in a commit called "Fix slash
direction on Windows with the new filePath code" (c9b6b5e8). The problem
that commit was addressing has since been solved in a different manner, in a
commit called "Fix the filename passed to unlit" (1eedbc6b). So the
`normalise` is no longer necessary.
-}
|
snoyberg/ghc
|
compiler/main/DriverPipeline.hs
|
bsd-3-clause
| 102,179 | 0 | 31 | 34,248 | 16,242 | 8,230 | 8,012 | 1,372 | 43 |
module Day4 where
import Data.Char (chr, ord)
import Data.List (find, group, intercalate, sort, sortBy)
import Data.Ord
import Text.Megaparsec hiding (Pos)
import Text.Megaparsec.Lexer hiding (space)
import Text.Megaparsec.String
data Room =
Room [Char]
Int
[Char]
deriving (Show)
name :: Room -> String
name (Room n _ _) = n
sector :: Room -> Int
sector (Room _ s _) = s
roomsP :: Parser [Room]
roomsP = roomP `endBy` newline
where
roomP = Room <$> nameP <*> sect <*> checksum
nameP = intercalate "-" <$> many lowerChar `endBy` char '-'
sect = fromIntegral <$> integer
checksum = char '[' *> many lowerChar <* char ']'
isReal :: Room -> Bool
isReal (Room name _ checksum) =
map head (take 5 $ sortBy ordr $ group $ sort $ filter (/= '-') name) == checksum
where
ordr b a
| comparing length a b == EQ = comparing head b a
| otherwise = comparing length a b
part1 :: [Room] -> Int
part1 = sum . map sector . filter isReal
rot :: Int -> Char -> Char
rot _ '-' = ' '
rot n c = chr (a + ((ord c - a + n) `mod` (z - a + 1)))
where
a = ord 'a'
z = ord 'z'
part2 :: [Room] -> Maybe Int
part2 = fmap sector . find (\r -> name r == "northpole object storage") . decrypt
where
decrypt = map (\(Room n s c) -> Room (map (rot s) n) s c)
main :: IO ()
main = do
let inputFile = "input/day4.txt"
input <- readFile inputFile
let Right rooms = parse roomsP inputFile input
print $ part1 rooms
mapM_ print $ part2 rooms
|
liff/adventofcode-2016
|
app/Day4.hs
|
bsd-3-clause
| 1,589 | 0 | 14 | 466 | 672 | 347 | 325 | 45 | 1 |
import System.Environment (getArgs)
guess :: Int -> Int -> [String] -> String
guess lo (-1) (x:xs) = guess lo (read x) xs
guess lo hi (x:xs) | x == "Lower" = guess lo (d - 1) xs
| x == "Higher" = guess (d + 1) hi xs
| otherwise = show d
where c = mod (lo+hi) 2
d = div (lo+hi) 2 + c
main :: IO ()
main = do
[inpFile] <- getArgs
input <- readFile inpFile
putStr . unlines . map (guess 0 (-1) . words) $ lines input
|
nikai3d/ce-challenges
|
moderate/guess_the_number.hs
|
bsd-3-clause
| 541 | 0 | 14 | 220 | 263 | 131 | 132 | 13 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.