code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE RecordWildCards #-}
module Bce.Networking where
import qualified Bce.BlockChain as BlockChain
import qualified Bce.DbFs as Db
import qualified Bce.VerifiedDb as VerifiedDb
import qualified Bce.P2p as P2p
import Bce.Logger
import Bce.Verified
import Bce.Hash
import Bce.BlockChainHash
import Bce.Util
import Bce.TimeStamp
import Bce.InitialBlock
import Bce.BlockChainSerialization
import qualified Data.Binary as Bin
import qualified Data.Set as Set
import qualified Data.Map as Map
import GHC.Generics (Generic)
import qualified Data.Binary.Get as BinGet
import qualified Data.Binary.Put as BinPut
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import Control.Applicative
import Data.Monoid
import Debug.Trace
import Control.Concurrent
import Control.Concurrent.STM
import Control.Monad.Trans
import Control.Monad.Trans.State
import Bce.PeerAddress
data NetworkState = NetworkState { networkP2p :: P2p.P2p
, networkStateDb :: Db.Db }
data Network = Network { networkState :: NetworkState
, braggerThread :: ThreadId
, transactionAnnouncherThread :: ThreadId
, networkListenerThread :: ThreadId }
data NetworkMessage = Brag Int
| Ask Hash
| Propose [BlockChain.Block]
| Dunno
| PushTransactions (Set.Set BlockChain.Transaction)
deriving (Show, Generic)
instance Bin.Binary NetworkMessage
encodeMessage :: NetworkMessage -> BS.ByteString
encodeMessage msg = BSL.toStrict $ BinPut.runPut $ Bin.put msg
decodeMessage :: BS.ByteString -> NetworkMessage
decodeMessage bs = BinGet.runGet Bin.get $ BSL.fromStrict bs
maxBlocksSyncBatch = 50
handlePeerMessage :: NetworkState -> PeerAddress -> NetworkMessage -> StateT NetworkListenerState IO ()
handlePeerMessage net peer msg = do
let db = networkStateDb net
liftIO $ logDebug $ "got message " ++ show msg
case msg of
Brag braggedLen -> do
(dbLen, VerifiedBlock topBlock) <- liftIO $ Db.getLongestHead db
if dbLen < braggedLen
then do
liftIO $ logDebug $ "saw bragger with length!" ++ show braggedLen
s <- get
if Map.member peer (networkListenerActiveSyncs s)
then liftIO $ logDebug "already syncing from that bragger"
else do
liftIO $ logDebug $ "starting sync from " ++ show peer
let askFrom = blockId topBlock
let newSync = NetworkBlocksSyncState 1 askFrom
let ns = s{networkListenerActiveSyncs=Map.insert peer newSync $ networkListenerActiveSyncs s}
put ns
liftIO $ isend net peer $ Ask askFrom
else return ()
Ask fromHash -> do
blocksOpt <- liftIO $ Db.getBlocksFrom (networkStateDb net) fromHash
case blocksOpt of
Just blocks -> do
liftIO $ logDebug $ "proposing blocks from " ++ show fromHash
liftIO $ isend net peer $ Propose $ blocks
Nothing -> do
liftIO $ logDebug $ "dunno from hash" ++ show fromHash
liftIO $ isend net peer $ Dunno
Propose blocks -> do
modify (\oldState -> oldState{networkListenerActiveSyncs=
Map.delete peer $ networkListenerActiveSyncs oldState})
liftIO $ logDebug $ "pushing blocks to chain" ++ show (length blocks)
liftIO $ VerifiedDb.verifyAndPushBlocks (networkStateDb net) blocks
Dunno -> do
oldState <- get
let oldSyncStateOpt = Map.lookup peer $ networkListenerActiveSyncs oldState
case oldSyncStateOpt of
Nothing -> liftIO $ logWarning $ "got dunno for unstarted sync from " ++ show peer
Just oldSyncState -> do
let askSkipInterval = 2 ^ (networkBlocksSyncAccelerationKoef oldSyncState)
let oldFromHash = networkBlockSyncLastAsk oldSyncState
oldBlocksOpt <- liftIO $ Db.getBlocksTo db oldFromHash askSkipInterval
let newFromHash = case oldBlocksOpt of
Nothing -> blockId initialBlock
Just oldBlocks -> blockId $ verifiedBlock $ last oldBlocks
let newSyncState = oldSyncState{ networkBlocksSyncAccelerationKoef =
1 + networkBlocksSyncAccelerationKoef oldSyncState
, networkBlockSyncLastAsk = newFromHash }
put oldState{networkListenerActiveSyncs=Map.insert peer newSyncState
$ networkListenerActiveSyncs oldState}
liftIO $ logDebug $ "got dunno; asking from" ++ show newFromHash
++ " with skip " ++ show askSkipInterval
liftIO $ isend net peer $ Ask newFromHash
PushTransactions transactions -> do
liftIO $ VerifiedDb.verifyAndPushTransactions (networkStateDb net) transactions
return ()
data NetworkBlocksSyncState = NetworkBlocksSyncState { networkBlocksSyncAccelerationKoef :: Int
, networkBlockSyncLastAsk :: Hash } deriving (Show)
data NetworkListenerState = NetworkListenerState {
networkListenerActiveSyncs :: Map.Map PeerAddress NetworkBlocksSyncState } deriving (Show)
networkListener :: NetworkState -> IO ()
networkListener net = do
chan <- atomically $ dupTChan $ P2p.p2pRecvChan $ networkP2p net
let initialState = NetworkListenerState Map.empty
let loop = do
msg <- liftIO $ atomically $ readTChan chan
case msg of
P2p.PeerConnected peer -> do
liftIO $ Db.pushSeed (networkStateDb net) peer
P2p.PeerDisconnected peer -> do
liftIO $ logDebug $ "peer disconnected " ++ show peer
modify (\oldState -> oldState{networkListenerActiveSyncs=
Map.delete peer $ networkListenerActiveSyncs oldState})
P2p.PeerMessage peer bs -> handlePeerMessage net peer $ decodeMessage bs
loop
evalStateT loop initialState
bragger :: NetworkState -> IO ()
bragger net =
let loop = do
threadDelay (secondsToMicroseconds $ 5)
(length, _) <- Db.getLongestHead (networkStateDb net)
logTrace $ "bragging with length of " ++ show length
ibroadcast net $ Brag length
loop
in loop
transactionsAnnouncer :: NetworkState -> IO ()
transactionsAnnouncer net = do
threadDelay (secondsToMicroseconds $ 5)
transactionsToAnnounce <- Db.getTransactions $ networkStateDb net
ibroadcast net $ PushTransactions transactionsToAnnounce
transactionsAnnouncer net
-- TODO: [PeerAddress] -> Set.Set PeerAddress
start :: P2p.P2pConfig -> [PeerAddress] -> Db.Db -> IO Network
start p2pConfig seeds db = do
dbSeeds <- Db.getSeeds db
let allSeeds = Set.toList $ Set.union dbSeeds $ Set.fromList seeds
networkState <- NetworkState <$> (P2p.start seeds p2pConfig) <*> pure db
networkListenerThread <- forkIO $ networkListener networkState
braggerThread <- forkIO $ bragger networkState
transactionAnnouncherThread <- forkIO $ transactionsAnnouncer networkState
return Network{..}
stop :: Network -> IO ()
stop net = do
killThread $ networkListenerThread net
killThread $ transactionAnnouncherThread net
killThread $ braggerThread net
P2p.stop $ networkP2p $ networkState net
ibroadcast :: NetworkState -> NetworkMessage -> IO ()
ibroadcast ns msg = P2p.broadcastPayload (networkP2p $ ns) (encodeMessage msg)
broadcast :: Network -> NetworkMessage -> IO ()
broadcast net msg = ibroadcast (networkState net) msg
isend :: NetworkState -> PeerAddress -> NetworkMessage -> IO ()
isend net peer msg = P2p.sendPayload (networkP2p net) peer (encodeMessage msg)
send :: Network -> PeerAddress -> NetworkMessage -> IO ()
send net peer msg = isend (networkState net) peer msg
networkTime :: Network -> IO TimeStamp
networkTime net = P2p.networkTime $ networkP2p $ networkState net
networkDb :: Network -> Db.Db
networkDb net = networkStateDb $ networkState net
| dehun/bce | src/Bce/Networking.hs | mit | 8,763 | 0 | 25 | 2,715 | 2,124 | 1,049 | 1,075 | 169 | 10 |
import Text.ParserCombinators.Parsec hiding (spaces)
import System.Environment
import Control.Monad
data LispVal = Atom String
| List [LispVal]
| DottedList [LispVal] LispVal
| Number Integer
| String String
| Bool Bool
parseString :: Parser LispVal
parseString = do
char '"'
x <- many (noneOf "\"")
char '"'
return $ String x
parseList :: Parser LispVal
parseList = liftM List $ sepBy parseExpr spaces
parseDottedList :: Parser LispVal
parseDottedList = do
head <- endBy parseExpr spaces
tail <- char '.' >> spaces >> parseExpr
return $ DottedList head tail
parseQuoted :: Parser LispVal
parseQuoted = do
char '\''
x <- parseExpr
return $ List [Atom "quote", x]
parseAtom :: Parser LispVal
parseAtom = do
first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
let atom = first:rest
return $ case atom of
"#t" -> Bool True
"#f" -> Bool False
_ -> Atom atom
parseNumber :: Parser LispVal
parseNumber = liftM (Number . read) $ many1 digit
parseExpr :: Parser LispVal
parseExpr = parseAtom
<|> parseString
<|> parseNumber
<|> parseQuoted
<|> do char '('
x <- try parseList <|> parseDottedList
char ')'
return x
symbol :: Parser Char
symbol = oneOf "!#$%&|*+-/:<=>?@^_~"
spaces :: Parser ()
spaces = skipMany1 space
readExpr :: String -> String
readExpr input = case parse parseExpr "lisp" input of
Left err -> "No match: " ++ show err
Right _ -> "Found value"
main :: IO ()
main = do
(expr:_) <- getArgs
putStrLn $ readExpr expr
| mmwtsn/write-yourself-a-scheme | 02-parsing/04-recursive-parsers.hs | mit | 1,621 | 20 | 11 | 423 | 589 | 276 | 313 | 59 | 3 |
#!/usr/bin/env runhaskell
module Main where
import Control.Monad
import Control.Lens
import Data.Function (on)
import Data.List (sortBy)
import qualified Data.Map as M
import qualified Data.Text as T
import qualified Data.Text.IO as T
import System.Environment
import Text.Printf
import SpaceWeather.FlareClass
import SpaceWeather.Format
import SpaceWeather.Prediction
import SpaceWeather.SkillScore
import System.System
readResult :: FilePath -> IO PredictionResult
readResult fn = do
txt <- T.readFile fn
let Right ret = decode txt
return ret
tssOf :: FlareClass -> PredictionResult -> Double
tssOf fc res =
case res of
PredictionSuccess prMap ->
prMap M.! fc M.! TrueSkillStatistic ^. scoreValue
_ -> 0
main :: IO ()
main = do
dirName <- fmap head getArgs
fns <- fmap lines $ readSystem0 $ printf "find %s | grep 'result.yml'" dirName
resultSet <- mapM readResult fns
forM_ defaultFlareClasses $ \fc -> do
let best = last $ sortBy (compare `on` tssOf fc) resultSet
print (fc, tssOf fc best)
print best
| nushio3/UFCORIN | exe-src/find-best-TSS.hs | mit | 1,050 | 0 | 18 | 193 | 337 | 174 | 163 | 35 | 2 |
{-# LANGUAGE ForeignFunctionInterface, JavaScriptFFI, OverloadedStrings #-}
module Main where
import Foreign.Ptr
import GHCJS.Types
import GHCJS.Foreign
import qualified Data.JSString as JSS
import qualified Data.JSString.Text as JSS
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TE
import qualified Data.ByteString as B
import Data.Monoid
foreign import javascript unsafe "h$log($1)"
js_log :: JSString -> IO ()
foreign import javascript unsafe "$r = ''; for(var i=0;i< $1 ;i++) { $r += 'xyz'; }"
js_makeSomeStr :: Int -> IO JSString
x :: T.Text
x = "abc"
y :: T.Text
y = "In the programming-language world, one rule of survival is simple: dance or die."
main = do
js_log . JSS.textToJSString $ y
js_log . JSS.textToJSString . mconcat . replicate 10 $ x
putStrLn . T.unpack . JSS.textFromJSString =<< js_makeSomeStr 10
putStrLn . T.unpack . TE.decodeUtf8With TE.ignore . B.pack $ [63..80] ++ [194,162,226,130,172]
| ghcjs/ghcjs | test/ffi/marshalString.hs | mit | 1,007 | 6 | 12 | 161 | 266 | 152 | 114 | 25 | 1 |
{-# Language TemplateHaskell #-}
module Kvs4.KvsMaster where
import qualified Data.Map.Strict as M
import Text.Printf
import Control.Monad
import Control.Concurrent hiding (newChan)
import Control.Distributed.Process hiding (handleMessage)
import Control.Distributed.Process.Internal.Closure.TH
import Kvs4.KvsTypes
import Kvs4.KvsWorker
createDB :: [NodeId] -> Process Database
createDB peers = spawnLocal $ do
pids <- forM peers $ \nid -> do
say $ "spawn on: " ++ show nid
spawn nid $ $(mkStaticClosure 'worker)
_ <- mapM_ monitor pids
let groups = groupPair pids
if null pids
then worker
else waitMessage groups
groupPair :: [a] -> [[a]]
groupPair [] = []
groupPair [x] = []
groupPair (x:y:xs) = [x,y] : groupPair xs
waitMessage :: [[ProcessId]] -> Process ()
waitMessage groups = receiveWait
[ match $ handleNotification groups
, match $ handleRequest groups
, matchAny $ \msg -> do
say $ printf "received unknownMessage: %s" (show msg)
]
handleNotification :: [[ProcessId]] -> ProcessMonitorNotification -> Process ()
handleNotification groups (ProcessMonitorNotification ref pid reason) = do
say $ printf "died worker %s by %s" (show pid) (show reason)
unmonitor ref
waitMessage $ map (filter (/= pid)) groups
handleRequest :: [[ProcessId]] -> Request -> Process ()
handleRequest groups req = do
let index = findWorker req $ length groups
group = groups !! index
forM_ group $ \pid -> send pid req
waitMessage groups
findWorker :: Request -> Int -> Int
findWorker req workerNum = fromEnum c `mod` workerNum
where c = head $ getKey req
getKey :: Request -> Key
getKey (ReqOp (Get k) _) = k
getKey (ReqOp (Set k _) _) = k
rcdata :: RemoteTable -> RemoteTable
rcdata = Kvs4.KvsWorker.__remoteTable
| y-kamiya/parallel-concurrent-haskell | src/Server/Kvs4/KvsMaster.hs | gpl-2.0 | 1,775 | 0 | 16 | 333 | 647 | 335 | 312 | 49 | 2 |
module HEP.Automation.MadGraph.Dataset.Set20110716set3 where
import HEP.Storage.WebDAV.Type
import HEP.Automation.MadGraph.Model
import HEP.Automation.MadGraph.Machine
import HEP.Automation.MadGraph.UserCut
import HEP.Automation.MadGraph.SetupType
import HEP.Automation.MadGraph.Model.SChanC8Vschmaltz
import HEP.Automation.MadGraph.Dataset.Processes
import HEP.Automation.JobQueue.JobType
processSetup :: ProcessSetup SChanC8Vschmaltz
processSetup = PS {
model = SChanC8Vschmaltz
, process = preDefProcess TTBar0or1J
, processBrief = "TTBar0or1J"
, workname = "713_SChanC8Vschmaltz_TTBar0or1J_LHC"
}
paramSet :: [ModelParam SChanC8Vschmaltz]
paramSet = [ SChanC8VschmaltzParam { mnp = m, mphi = 100.0, ga = g, nphi = n}
| m <- [ 420, 440 ]
, g <- [ 0.35, 0.45 .. 0.65 ]
, n <- [ 4,5,6,7 ] ]
{- | m <- [ 420 ]
, (g,n) <- [ (0.39*1.22,6.2), (0.37*1.22,5.3 ), (0.35*1.22,4.27) ] ] -}
sets :: [Int]
sets = [1]
ucut :: UserCut
ucut = UserCut {
uc_metcut = 15.0
, uc_etacutlep = 2.7
, uc_etcutlep = 18.0
, uc_etacutjet = 2.7
, uc_etcutjet = 15.0
}
eventsets :: [EventSet]
eventsets =
[ EventSet processSetup
(RS { param = p
, numevent = 100000
, machine = LHC7 ATLAS
, rgrun = Fixed
, rgscale = 200.0
, match = MLM
, cut = DefCut
, pythia = RunPYTHIA
, usercut = UserCutDef ucut -- NoUserCutDef --
, pgs = RunPGS
, jetalgo = AntiKTJet 0.4
, uploadhep = NoUploadHEP
, setnum = num
})
| p <- paramSet , num <- sets ]
webdavdir :: WebDAVRemoteDir
webdavdir = WebDAVRemoteDir "paper3/ttbar_LHC_schmaltz_pgsscan"
| wavewave/madgraph-auto-dataset | src/HEP/Automation/MadGraph/Dataset/Set20110716set3.hs | gpl-3.0 | 1,901 | 0 | 10 | 621 | 407 | 256 | 151 | 48 | 1 |
import SimplicialComplex
import System.Vacuum.Cairo
main = viewFile "/tmp/outvacuum" abstractTet
| DanielSchuessler/hstri | bin/vacuum.hs | gpl-3.0 | 98 | 1 | 5 | 10 | 23 | 11 | 12 | 3 | 1 |
import Control.Monad (forever)
import System.Posix.Process (forkProcess)
forkBomb = forever $ forkProcess forkBomb
main = forkBomb
| h31nr1ch/Sexy-Bitch | war_ssh/fork3.hs | gpl-3.0 | 133 | 2 | 6 | 17 | 45 | 22 | 23 | 4 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RecordWildCards #-}
-- | Build LXD images using lxdfiles.
module System.LXD.LXDFile.Build (
build
) where
import Prelude hiding (writeFile)
import Control.Monad.Except (MonadError)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (MonadReader, ReaderT, runReaderT, ask)
import Data.Aeson.Encode.Pretty (encodePretty)
import Data.ByteString.Lazy (writeFile)
import Data.Either.Combinators (rightToMaybe)
import Data.Monoid ((<>))
import Data.Text (Text, pack, unpack)
import Text.Parsec (parse, many, noneOf, char, string)
import Filesystem.Path.CurrentOS (decodeString)
import Turtle (Line, Fold(..), fold, lineToText, inproc, rm, format, sleep, (%))
import qualified Turtle as R
import Language.LXDFile (LXDFile(..))
import System.LXD.LXDFile.ScriptAction (scriptActions, runScriptAction, tmpfile)
import System.LXD.LXDFile.Utils.Line (echoT, echoS, unsafeStringToLine)
import System.LXD.LXDFile.Utils.Monad (orThrowM)
import System.LXD.LXDFile.Utils.Shell (Container, lxc, lxcExec, lxcFilePush)
data BuildCtx = BuildCtx { lxdfile :: LXDFile
, imageName :: String
, context :: FilePath
, buildContainer :: Text }
build :: (MonadIO m, MonadError String m) => LXDFile -> String -> FilePath -> m ()
build lxdfile'@LXDFile{..} imageName' context' = do
container <- launch `orThrowM` "error: could not launch container"
let ctx = BuildCtx { lxdfile = lxdfile'
, imageName = imageName'
, context = context'
, buildContainer = container }
flip runReaderT ctx $ do
echoT $ "Building " <> pack imageName' <> " in " <> container
sleep 5.0
mapM_ (flip runReaderT container . runScriptAction context') $ scriptActions actions
includeLXDFile
echoT $ "Stopping " <> container
lxc ["stop", container]
echoS $ "Publishing to " <> imageName'
case description of
Nothing -> lxc ["publish", container, format ("--alias=" % R.s) (pack imageName')]
Just desc -> lxc ["publish", container, format ("--alias=" % R.s) (pack imageName'), format ("description=" % R.s) (pack desc)]
lxc ["delete", container]
where
launch :: MonadIO m => m (Maybe Text)
launch = do
line <- launchLine
return $ lineToText <$> line
launchLine :: MonadIO m => m (Maybe Line)
launchLine = fold (inproc "lxc" ["launch", pack baseImage] mempty) $
Fold selectLaunchName Nothing id
selectLaunchName (Just x) _ = Just x
selectLaunchName _ x = unsafeStringToLine <$> parseLaunch (lineToText x)
parseLaunch = rightToMaybe . parse (many (char '\r') *> string "Container name is: " *> many (noneOf " ")) "" . unpack
includeLXDFile :: (MonadIO m, MonadError String m, MonadReader BuildCtx m) => m ()
includeLXDFile = do
file <- tmpfile "lxdfile-metadata-lxdfile"
ask >>= liftIO . writeFile file . encodePretty . lxdfile
run $ lxcExec ["mkdir", "-p", "/etc/lxdfile"]
run $ lxcFilePush "0644" file "/etc/lxdfile/lxdfile"
rm (decodeString file)
run :: MonadReader BuildCtx m => ReaderT Container m a -> m a
run x = buildContainer <$> ask >>= runReaderT x
| hverr/lxdfile | src/System/LXD/LXDFile/Build.hs | gpl-3.0 | 3,356 | 0 | 18 | 761 | 1,025 | 560 | 465 | 65 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.Comments.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Modifies a comment.
--
-- /See:/ <https://developers.google.com/youtube/v3 YouTube Data API Reference> for @youtube.comments.update@.
module Network.Google.Resource.YouTube.Comments.Update
(
-- * REST Resource
CommentsUpdateResource
-- * Creating a Request
, commentsUpdate
, CommentsUpdate
-- * Request Lenses
, cuPart
, cuPayload
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.comments.update@ method which the
-- 'CommentsUpdate' request conforms to.
type CommentsUpdateResource =
"youtube" :>
"v3" :>
"comments" :>
QueryParam "part" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Comment :> Put '[JSON] Comment
-- | Modifies a comment.
--
-- /See:/ 'commentsUpdate' smart constructor.
data CommentsUpdate = CommentsUpdate'
{ _cuPart :: !Text
, _cuPayload :: !Comment
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CommentsUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cuPart'
--
-- * 'cuPayload'
commentsUpdate
:: Text -- ^ 'cuPart'
-> Comment -- ^ 'cuPayload'
-> CommentsUpdate
commentsUpdate pCuPart_ pCuPayload_ =
CommentsUpdate'
{ _cuPart = pCuPart_
, _cuPayload = pCuPayload_
}
-- | The part parameter identifies the properties that the API response will
-- include. You must at least include the snippet part in the parameter
-- value since that part contains all of the properties that the API
-- request can update.
cuPart :: Lens' CommentsUpdate Text
cuPart = lens _cuPart (\ s a -> s{_cuPart = a})
-- | Multipart request metadata.
cuPayload :: Lens' CommentsUpdate Comment
cuPayload
= lens _cuPayload (\ s a -> s{_cuPayload = a})
instance GoogleRequest CommentsUpdate where
type Rs CommentsUpdate = Comment
type Scopes CommentsUpdate =
'["https://www.googleapis.com/auth/youtube.force-ssl"]
requestClient CommentsUpdate'{..}
= go (Just _cuPart) (Just AltJSON) _cuPayload
youTubeService
where go
= buildClient (Proxy :: Proxy CommentsUpdateResource)
mempty
| rueshyna/gogol | gogol-youtube/gen/Network/Google/Resource/YouTube/Comments/Update.hs | mpl-2.0 | 3,113 | 0 | 13 | 716 | 391 | 236 | 155 | 59 | 1 |
{-# LANGUAGE TemplateHaskell #-}
import CLaSH.Prelude hiding ((++),undefined,take)
import Prelude
import MatrixMul
import System.RedPitaya.Bus.Tools
import System.RedPitaya.Bus
type MM = Matrix 3 3 (Signed 8)
mm = fmap $ Just . uncurry matrixMultiply :: Signal (MM,MM) -> Signal (Maybe MM)
core inp = busBind n mm (addrLow inp) where
n = $(bTQ (undefined :: MatrixCore 3 3 3 (Signed 8))) -- type deduce template
topEntity = rpSimpleBind core
i2i = fromInteger . toInteger
-- test
wr add val = Just (i2i add,Write,i2i val) :: BusIn
wr5 = let f x = wr x (x * 0x01010101 + 0x10203) in f <$> [0 .. 5]
rd = let f x = Just (x,Read,0) in fmap f [0,1 .. 2]
testHelper = take 9 $ simulate core (wr5 ++ rd)
| ra1u/lambdaya-bus | examples/Test-v1.0/Test.hs | lgpl-3.0 | 713 | 3 | 13 | 143 | 336 | 174 | 162 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ViewPatterns #-}
module Main where
import Control.Applicative
import Control.Monad
import Control.Monad.Reader
import Data.Monoid
import Data.String
import qualified Data.Text as T
import qualified Data.Text.IO as IO
import qualified Data.Text.Lazy.IO as LazyIO
import System.Environment
import qualified System.Exit as E
import Text.Blaze ((!))
import qualified Text.Blaze.Html.Renderer.Text as Renderer
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Text.Parsec hiding (many, (<|>), optional)
import qualified Text.Parsec.Text as TParsec
import Text.Shakespeare.Text
main :: IO ()
main = do
args <- getArgs
when (length args /= 1) $ do
prog <- getProgName
putStrLn $ prog <> ": specify slides file"
E.exitFailure
md <- IO.readFile $ head args
LazyIO.putStr $ Renderer.renderHtml $
compile presentation defaultStrategy md
data Strategy = Strategy {
getLanguage :: H.AttributeValue
, makeCaption :: T.Text -> T.Text -> H.Html
, makeTitle :: T.Text -> H.Html
}
type Parser = ParsecT T.Text () (Reader Strategy)
defaultStrategy :: Strategy
defaultStrategy = Strategy {
getLanguage = "ja-JP"
, makeCaption = \title subTitle -> do
H.header ! A.class_ "caption" $ do
H.h1 $ H.toMarkup title
H.p $ H.toMarkup subTitle
, makeTitle = H.title . H.toMarkup
}
compile :: Parser H.Html -> Strategy -> T.Text -> H.Html
compile p s t = either (error . show) id $ runReader (runParserT p () "" t) s
presentation :: Parser H.Html
presentation = do
slides <- many $ coverSlide <|> basicSlide <|> emptyLines
packSlides $ H.toMarkup slides
where
packSlides slides = do
lang <- asks getLanguage
title <- asks makeTitle <*> pure "Presentation"
cap <- asks makeCaption <*> pure "Presentation" <*> pure "Name"
pure $ H.html ! A.lang lang $ do
H.head $ do
title
H.meta ! A.charset "UTF-8"
H.meta ! A.name "viewport"
! A.content "width=1274, user-scalable=no"
H.link ! A.rel "stylesheet"
! A.href "themes/ribbon/styles/style.css"
H.link ! A.rel "stylesheet"
! A.href "themes/ribbon/styles/print.css"
! A.media "print"
H.style $ H.toMarkup [st|
#Cover h2 {
color: #FFF;
text-align: center;
font-size: 70px;
}
#FitToWidth h2,
#FitToHeight h2 {
color: #FFF;
text-align: center;
}
|]
cap
H.body slides
H.div ! A.class_ "progress" $
H.div mempty
H.script ! A.src "scripts/script.js" $ mempty
coverSlide :: Parser H.Html
coverSlide = makeSlide '=' "slide cover"
basicSlide :: Parser H.Html
basicSlide = makeSlide '-' "slide"
makeSlide :: Char -> H.AttributeValue -> Parser H.Html
makeSlide liner class_ = do
h <- header
(H.toMarkup -> contents) <- many (slideContent <|> emptyLines)
pure $ H.div ! A.id (htmlToAttr h) ! A.class_ class_ $
H.div $
H.section $ do
H.header $ H.h2 h
contents
where
htmlToAttr = H.toValue . Renderer.renderHtml . H.contents
header = try $ do
title <- commonLine
void $ some (char liner) *> newline
pure title
slideContent :: Parser H.Html
slideContent = paragraph <|> bullet <|> code
where
paragraph = do
ls <- some $ notFollowedBy newSection *> commonLine
pure $ H.p $ H.toMarkup ls
where
newSection = try $
manyTill anyChar newline
*> oneOf "-="
bullet = do
is <- some item
pure $ H.ul $ H.toMarkup is
where
item = do
line <- char '*' *> spaces *> thisLine
pure $ H.li $ H.toMarkup line
code = do
ss <- lookAhead $ some $ char ' '
ls <- some $ codeline $ length ss
pure $ H.pre $ H.toMarkup ls
where
codeline level = do
replicateM_ level (char ' ')
line <- plainThisLine
pure $ H.code $ H.toMarkup line
commonLine :: Parser H.Html
commonLine = do
void $ lookAhead $ noneOf specialChars
thisLine
where
specialChars = "=-* \n"
thisLine :: Parser H.Html
thisLine = parseInline <$> plainThisLine
where
parseInline = either (error . show) id . runP inlineParser () ""
inlineParser :: TParsec.Parser H.Html
inlineParser = H.toMarkup <$> many (text_ <|> image <|> anchor)
where
text_ = H.toMarkup <$> (some $ notFollowedBy specials *> anyChar)
where
specials = void (char '[') <|> void (string "![")
image = do
void $ char '!'
(src, alt) <- blackets
pure $ H.img ! A.src src ! A.alt alt
anchor = do
(href, (body :: T.Text)) <- blackets
pure $ H.a ! A.href href $ H.toMarkup body
blackets :: (IsString src, IsString body) => TParsec.Parser (src, body)
blackets = do
void $ char '['
(fromString -> body) <- anyChar `manyTill` char ']'
void $ char '('
(fromString -> src) <- anyChar `manyTill` char ')'
pure $ (src, body)
plainThisLine :: Parser T.Text
plainThisLine = T.pack <$> anyChar `manyTill` (void newline <|> void eof)
emptyLines :: Parser H.Html
emptyLines = some emptyLine *> pure mempty
where
emptyLine = void $
try $ (many $ oneOf " \t" :: Parser String) *> newline
| keitax/md2shower | Main.hs | lgpl-3.0 | 5,411 | 0 | 19 | 1,434 | 1,806 | 908 | 898 | 144 | 1 |
{-# LANGUAGE TypeFamilies, DataKinds, PolyKinds, TypeOperators, GADTs,
RankNTypes, ScopedTypeVariables,
UndecidableInstances, MultiParamTypeClasses, FlexibleInstances,
FunctionalDependencies, FlexibleContexts
#-}
module Foo where
import Data.Proxy
class QA a where
type R a
data Fun a b where
Snd :: Fun (a, b) b
data Exp a where
App :: Proxy a -> Fun a b -> Exp a -> Exp b
newtype Q a = Q (Exp (R a))
foo :: forall a b. (QA a, QA b) => Q (a, b) -> Q b
foo (Q d) = Q (App (Proxy :: Proxy (R (a, b))) Snd d)
{-
data Foo = A | B | C
data SFoo (a :: [ Foo ]) where
FooA :: SFoo '[ 'A]
FooB :: SFoo '[ 'B]
FooC :: SFoo '[ 'C]
producer :: Bool -> Foo
producer True = A
producer False = B
consumer :: Foo -> String
consumer A = "A"
consumer B = "B"
consumer C = error "Can't happen"
type family Elem (a :: k) (b :: [k]) :: [k] where
Elem a '[] = '[]
Elem a (a ': xs) = '[a]
Elem a (b ': xs) = '[]
type SFooI (a :: [Foo]) = forall e. SFoo (Elem e a)
producerI :: Bool -> SFooI '[A, B]
producerI True = FooA
--producerI False = FooB
type family F a = r | r -> a
type instance F Char = Char
idF :: F x -> F x
idF x = x
foo = idF True
class Collection c where
type Elem c
insert :: Elem c -> c -> c
instance Collection [a] where
type Elem [a] = a
insert x xs = x : xs
class F a r | a -> r, r -> a
instance F [a] a
foo :: (F [Int] Int) => a
foo = undefined
import Data.Type.Equality
type family F a = r | r -> a where
F a = [F a]
idT :: F Char -> F Bool
idT x = x
class Hcl a b where
type Ht a b = r
data Dict = Dict [Node] deriving Show
data Node = Continue Char Dict | End Char Dict deriving Show
-- ab abc
-- Dict [Continue 'a' [Dict [Continue 'b' [Dict [End 'c' []]]]]]
emptyDict :: Dict
emptyDict = Dict []
insert :: String -> Dict -> Dict
insert [] dict = dict
insert [c] (Dict nodes) = Dict (insertNode End (insert []) c nodes)
insert (c:cs) (Dict nodes) = Dict (insertNode Continue (insert cs) c nodes)
insertNode :: (Char -> Dict -> Node) -> (Dict -> Dict) -> Char -> [Node] -> [Node]
insertNode ins insertDict c []
= ins c (insertDict emptyDict) : []
insertNode ins insertDict c (Continue x dict : nodes')
| x == c = ins c (insertDict dict) : nodes'
insertNode _ insertDict c (End x dict : nodes')
| x == c = End c (insertDict dict) : nodes'
insertNode ins insertDict c (node : nodes')
= node : insertNode ins insertDict c nodes'
-}
| jstolarek/sandbox | haskell/Foo.hs | unlicense | 2,510 | 0 | 12 | 682 | 193 | 107 | 86 | 15 | 1 |
qsort :: Ord a => [a] -> [a]
qsort [] = []
qsort (h:hx) = [i | i <- hx1, i < h] ++ [h] ++ [i | i <- hx1, i > h]
where hx1 = qsort hx
main = do
print (qsort "Qiou Yang")
| sgfxq/Haskell | Demo.hs | apache-2.0 | 172 | 1 | 9 | 49 | 136 | 66 | 70 | 6 | 1 |
-- http://www.codewars.com/kata/5286b2e162056fd0cb000c20
module Collatz where
import Data.List
collatz :: Int -> String
collatz m = intercalate "->" $ map show (takeWhile (/= 1) (iterate f m) ++ [1]) where
f n = if n`mod`2==0 then n`div`2 else 3*n+1 | Bodigrim/katas | src/haskell/6-Collatz.hs | bsd-2-clause | 253 | 0 | 11 | 41 | 108 | 60 | 48 | 5 | 2 |
module Assets where
import Data.Map hiding (foldr)
import Graphics.UI.SDL.Image
import Control.Lens
import Control.Monad
import Animation
assetsPath :: String
assetsPath = "./assets/"
data Assets = Assets {
_bulletAnimations :: Map String Animation,
_shipAnimations :: Map String Animation,
_counterAnimations :: Map String Animation,
_gameOverAnimations :: Map String Animation
} deriving (Eq, Ord, Show)
makeLenses ''Assets
loadAssets :: IO Assets
loadAssets = do
let bulletAnimationFolder = assetsPath ++ "bullet/"
shipAnimationFolder = assetsPath ++ "ship/"
counterAnimationFolder = assetsPath ++ "lifecounter/"
gameOverFolder = assetsPath ++ "gameover/"
bulletAnimationNames = [("neutral", 1), ("blue", 1)]
shipAnimationNames = [("neutral", 1), ("left", 1), ("right", 1)]
counterAnimationNames = [("neutral",1)]
gameOverAnimationNames = [("neutral",1)]
bulletAnimations <- loadAnimations bulletAnimationFolder bulletAnimationNames
shipAnimations <- loadAnimations shipAnimationFolder shipAnimationNames
counterAnimations <- loadAnimations counterAnimationFolder counterAnimationNames
gameOverAnimations <- loadAnimations gameOverFolder gameOverAnimationNames
return $ Assets bulletAnimations shipAnimations counterAnimations gameOverAnimations
loadAnimations :: FilePath -> [(String,Int)] -> IO (Map String Animation)
loadAnimations folder animationNames = do
animations <- forM animationNames $ \(name, size) -> do
animation <- loadAnimation (folder ++ name ++ "/") size
return (name, animation)
return $ foldr (\(name, animation) animMap -> insert name animation animMap) empty animations
| alexisVallet/hachitai-haskell-shmup | Assets.hs | bsd-2-clause | 1,678 | 0 | 16 | 259 | 461 | 250 | 211 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LiberalTypeSynonyms #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ExtendedDefaultRules #-}
module Binary where
import Bound.Scope
import Bound.Var
import Control.Monad
import Control.Applicative
import Control.Lens
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import Data.ByteString
import Data.Monoid
import Ermine.Syntax
import Ermine.Syntax.Core
import Ermine.Syntax.Global
import Ermine.Syntax.Kind as K
import Ermine.Syntax.Literal
import Ermine.Syntax.Module
import Ermine.Syntax.Pattern
import Ermine.Syntax.Term as Term
import Ermine.Syntax.Type as Type
import Prelude.Extras
import Test.QuickCheck
import Test.QuickCheck.Function
import Test.QuickCheck.Instances
import Test.Framework.TH
import Test.Framework.Providers.QuickCheck2
import Arbitrary
prop_pack_unpack_fixity f = (unpackFixity . packFixity) f == Just f
pack_unpack :: (Binary a, Eq a) => a -> Bool
pack_unpack a = runGet get (runPut $ put a) == a
prop_pack_unpack_global :: Global -> Bool
prop_pack_unpack_global = pack_unpack
prop_pack_unpack_hardkind :: HardKind -> Bool
prop_pack_unpack_hardkind = pack_unpack
prop_pack_unpack_kind :: Kind Int -> Bool
prop_pack_unpack_kind = pack_unpack
prop_pack_unpack_schema :: Schema Int -> Bool
prop_pack_unpack_schema = pack_unpack
prop_pack_unpack_hardtype :: HardType -> Bool
prop_pack_unpack_hardtype = pack_unpack
prop_pack_unpack_type :: Type Int Int -> Bool
prop_pack_unpack_type = pack_unpack
prop_pack_unpack_literal :: Literal -> Bool
prop_pack_unpack_literal = pack_unpack
prop_pack_unpack_hardterm :: HardTerm -> Bool
prop_pack_unpack_hardterm = pack_unpack
prop_pack_unpack_pattern :: Pattern Int -> Bool
prop_pack_unpack_pattern = pack_unpack
prop_pack_unpack_binding_type :: BindingType Int -> Bool
prop_pack_unpack_binding_type = pack_unpack
prop_pack_unpack_body_bound :: BodyBound -> Bool
prop_pack_unpack_body_bound = pack_unpack
prop_pack_unpack_where_bound :: WhereBound -> Bool
prop_pack_unpack_where_bound = pack_unpack
prop_pack_unpack_guarded :: Guarded Int -> Bool
prop_pack_unpack_guarded = pack_unpack
prop_pack_unpack_body :: Body Int Int -> Bool
prop_pack_unpack_body = pack_unpack
prop_pack_unpack_binding :: Binding Int Int -> Bool
prop_pack_unpack_binding = pack_unpack
prop_pack_unpack_term :: Term Int Int -> Bool
prop_pack_unpack_term = pack_unpack
prop_pack_unpack_hardcore :: HardCore -> Bool
prop_pack_unpack_hardcore = pack_unpack
prop_pack_unpack_core :: Core () Int -> Bool
prop_pack_unpack_core = pack_unpack
prop_pack_unpack_module :: Module -> Bool
prop_pack_unpack_module = pack_unpack
-- Random choice of Binary test case type. May or may not be worth
-- keeping this around.
data AnyBinary where
AB :: (Show t, Eq t, Binary t) => t -> AnyBinary
binaryGens :: [Gen AnyBinary]
binaryGens = [ AB <$> (arbitrary :: Gen Global)
, AB <$> (arbitrary :: Gen HardKind)
, AB <$> (arbitrary :: Gen HardType)
, AB <$> (arbitrary :: Gen (Kind Int))
, AB <$> (arbitrary :: Gen (Schema Int))
, AB <$> (arbitrary :: Gen (Type Int Int))
, AB <$> (arbitrary :: Gen Literal)
, AB <$> (arbitrary :: Gen HardTerm)
, AB <$> (arbitrary :: Gen (Pattern Int))
]
instance Arbitrary AnyBinary where
arbitrary = oneof binaryGens
instance Show AnyBinary where
show (AB v) = "AB" ++ show v
prop_pack_unpack_random :: AnyBinary -> Bool
prop_pack_unpack_random (AB b) = pack_unpack b
tests = $testGroupGenerator
| ekmett/ermine | tests/properties/Binary.hs | bsd-2-clause | 3,740 | 0 | 10 | 591 | 845 | 478 | 367 | 96 | 1 |
{-# LANGUAGE OverloadedStrings, CPP #-}
module Text.HTML.SanitizeXSS.Css (
sanitizeCSS
#ifdef TEST
, allowedCssAttributeValue
#endif
) where
import Data.Text (Text)
import qualified Data.Text as T
import Data.Attoparsec.Text
import Data.Text.Lazy.Builder (toLazyText)
import Data.Set (member, fromList, Set)
import Data.Char (isDigit)
import Control.Applicative ((<|>), pure)
import Text.CSS.Render (renderAttrs)
import Text.CSS.Parse (parseAttrs)
import Prelude hiding (takeWhile)
import qualified Data.Text.Lazy as L
-- import FileLocation (debug, debugM)
-- this is a direct translation from sanitizer.py, except
-- sanitizer.py filters out url(), but this is redundant
sanitizeCSS :: L.Text -> L.Text
sanitizeCSS = toLazyText . renderAttrs . filter isSanitaryAttr . filterUrl . parseAttributes . L.toStrict
where
filterUrl :: [(Text,Text)] -> [(Text,Text)]
filterUrl = map filterUrlAttribute
where
filterUrlAttribute :: (Text, Text) -> (Text, Text)
filterUrlAttribute (prop,value) =
case parseOnly rejectUrl value of
Left _ -> (prop,value)
Right noUrl -> filterUrlAttribute (prop, noUrl)
rejectUrl = do
pre <- manyTill anyChar (string "url")
skipMany space
_<-char '('
skipWhile (/= ')')
_<-char ')'
rest <- takeText
return $ T.append (T.pack pre) rest
parseAttributes css = case parseAttrs css of
Left _ -> []
Right as -> as
isSanitaryAttr (_, "") = False
isSanitaryAttr ("",_) = False
isSanitaryAttr (prop, value)
| prop `member` allowed_css_properties = True
| (T.takeWhile (/= '-') prop) `member` allowed_css_unit_properties &&
all allowedCssAttributeValue (T.words value) = True
| prop `member` allowed_svg_properties = True
| otherwise = False
allowed_css_unit_properties :: Set Text
allowed_css_unit_properties = fromList ["background","border","margin","padding"]
allowedCssAttributeValue :: Text -> Bool
allowedCssAttributeValue val =
val `member` allowed_css_keywords ||
case parseOnly allowedCssAttributeParser val of
Left _ -> False
Right b -> b
where
allowedCssAttributeParser = do
rgb <|> hex <|> rgb <|> cssUnit
aToF = fromList "abcdef"
hex = do
_ <- char '#'
hx <- takeText
return $ T.all (\c -> isDigit c || (c `member` aToF)) hx
-- should have used sepBy (symbol ",")
rgb = do
_<- string "rgb("
skipMany1 digit >> skipOk (== '%')
skip (== ',')
skipMany digit >> skipOk (== '%')
skip (== ',')
skipMany digit >> skipOk (== '%')
skip (== ')')
return True
cssUnit = do
skip isDigit
skipOk isDigit
skipOk (== '.')
skipOk isDigit >> skipOk isDigit
skipSpace
unit <- takeText
return $ T.null unit || unit `member` allowed_css_attribute_value_units
skipOk :: (Char -> Bool) -> Parser ()
skipOk p = skip p <|> pure ()
allowed_css_attribute_value_units :: Set Text
allowed_css_attribute_value_units = fromList
[ "cm", "em", "ex", "in", "mm", "pc", "pt", "px", "%", ",", "\\"]
allowed_css_properties :: Set Text
allowed_css_properties = fromList acceptable_css_properties
where
acceptable_css_properties = ["azimuth", "background-color",
"border-bottom-color", "border-collapse", "border-color",
"border-left-color", "border-right-color", "border-top-color", "clear",
"color", "cursor", "direction", "display", "elevation", "float", "font",
"font-family", "font-size", "font-style", "font-variant", "font-weight",
"height", "letter-spacing", "line-height", "overflow", "pause",
"pause-after", "pause-before", "pitch", "pitch-range", "richness",
"speak", "speak-header", "speak-numeral", "speak-punctuation",
"speech-rate", "stress", "text-align", "text-decoration", "text-indent",
"unicode-bidi", "vertical-align", "voice-family", "volume",
"white-space", "width"]
allowed_css_keywords :: Set Text
allowed_css_keywords = fromList acceptable_css_keywords
where
acceptable_css_keywords = ["auto", "aqua", "black", "block", "blue",
"bold", "both", "bottom", "brown", "center", "collapse", "dashed",
"dotted", "fuchsia", "gray", "green", "!important", "italic", "left",
"lime", "maroon", "medium", "none", "navy", "normal", "nowrap", "olive",
"pointer", "purple", "red", "right", "solid", "silver", "teal", "top",
"transparent", "underline", "white", "yellow"]
-- used in css filtering
allowed_svg_properties :: Set Text
allowed_svg_properties = fromList acceptable_svg_properties
where
acceptable_svg_properties = [ "fill", "fill-opacity", "fill-rule",
"stroke", "stroke-width", "stroke-linecap", "stroke-linejoin",
"stroke-opacity"]
| silkapp/haskell-xss-sanitize | Text/HTML/SanitizeXSS/Css.hs | bsd-2-clause | 4,876 | 0 | 15 | 1,058 | 1,331 | 744 | 587 | 105 | 5 |
module Atomo.Core where
import Control.Concurrent
import Control.Monad.State
import Atomo.Types
import Atomo.Environment
-- | Defines all primitive objects, including the Lobby.
initCore :: VM ()
initCore = do
-- the very root object
object <- newObject id
-- top scope is a proto delegating to the root object
topObj <- newObject $ \o -> o { oDelegates = [object] }
modify $ \e -> e { top = topObj }
-- Lobby is the very bottom scope object
define (single "Lobby" PThis) (Primitive Nothing topObj)
-- define Object as the root object
define (single "Object" PThis) (Primitive Nothing object)
-- create parser environment
parserEnv <- newObject $ \o -> o { oDelegates = [topObj] }
modify $ \e -> e
{ primitives = (primitives e) { idObject = rORef object }
, parserState = (parserState e) { psEnvironment = parserEnv }
}
-- this thread's channel
chan <- liftIO newChan
modify $ \e -> e { channel = chan }
-- define primitive objects
forM_ primObjs $ \(n, f) -> do
o <- newObject $ \o -> o { oDelegates = [object] }
define (single n PThis) (Primitive Nothing o)
modify $ \e -> e { primitives = f (primitives e) (rORef o) }
where
primObjs =
[ ("Block", \is r -> is { idBlock = r })
, ("Boolean", \is r -> is { idBoolean = r })
, ("Char", \is r -> is { idChar = r })
, ("Continuation", \is r -> is { idContinuation = r })
, ("Double", \is r -> is { idDouble = r })
, ("Expression", \is r -> is { idExpression = r })
, ("Haskell", \is r -> is { idHaskell = r })
, ("Integer", \is r -> is { idInteger = r })
, ("List", \is r -> is { idList = r })
, ("Message", \is r -> is { idMessage = r })
, ("Method", \is r -> is { idMethod = r })
, ("Particle", \is r -> is { idParticle = r })
, ("Process", \is r -> is { idProcess = r })
, ("Pattern", \is r -> is { idPattern = r })
, ("Rational", \is r -> is { idRational = r })
, ("String", \is r -> is { idString = r })
]
| Mathnerd314/atomo | src/Atomo/Core.hs | bsd-3-clause | 2,127 | 0 | 17 | 652 | 769 | 447 | 322 | 39 | 1 |
{-# LANGUAGE FlexibleContexts #-}
-- | relationship handling
-- <http://instagram.com/developer/endpoints/relationships/#>
module Instagram.Relationships (
getFollows
,getFollowedBy
,getFollowsParams
,getFollowedByParams
,getRequestedBy
,getRelationship
,setRelationShip
,RelationShipAction(..)
,FollowParams(..)
)where
import Instagram.Monad
import Instagram.Types
import Data.Typeable (Typeable)
import qualified Network.HTTP.Types as HT
import Data.Char (toLower)
-- | Get the list of users this user follows.
getFollows :: (MonadBaseControl IO m, MonadResource m) => UserID
-> Maybe OAuthToken
-> InstagramT m (Envelope [User])
getFollows uid token =getGetEnvelopeM ["/v1/users/",uid,"/follows"] token ([]::HT.Query)
-- | Get the list of users this user is followed by.
getFollowedBy :: (MonadBaseControl IO m, MonadResource m) => UserID
-> Maybe OAuthToken
-> InstagramT m (Envelope [User])
getFollowedBy uid token =getGetEnvelopeM ["/v1/users/",uid,"/followed-by"] token ([]::HT.Query)
data FollowParams = FollowParams {
fpCount :: Int
} deriving (Show, Read, Eq, Ord)
instance HT.QueryLike FollowParams where
toQuery FollowParams{fpCount=count} =
["count" ?+ show count]
-- | Get the list of users this user follows.
getFollowsParams :: (MonadBaseControl IO m, MonadResource m) => UserID
-> Maybe OAuthToken
-> FollowParams
-> InstagramT m (Envelope [User])
getFollowsParams uid token fp =
getGetEnvelopeM ["/v1/users/",uid,"/follows"] token fp
-- | Get the list of users this user is followed by.
getFollowedByParams :: (MonadBaseControl IO m, MonadResource m) => UserID
-> Maybe OAuthToken
-> FollowParams
-> InstagramT m (Envelope [User])
getFollowedByParams uid token fp =
getGetEnvelopeM ["/v1/users/",uid,"/followed-by"] token fp
-- | List the users who have requested this user's permission to follow.
getRequestedBy :: (MonadBaseControl IO m, MonadResource m) =>
OAuthToken
-> InstagramT m (Envelope [User])
getRequestedBy token =getGetEnvelope ["/v1/users/self/requested-by"] token ([]::HT.Query)
-- | Get information about a relationship to another user.
getRelationship :: (MonadBaseControl IO m, MonadResource m) => UserID
-> OAuthToken
-> InstagramT m (Envelope Relationship)
getRelationship uid token =getGetEnvelope ["/v1/users/",uid,"/relationship"] token ([]::HT.Query)
-- | relationship action
data RelationShipAction = Follow
| Unfollow
| Block
| Unblock
| Approve
| Deny
deriving (Show,Read,Eq,Ord,Bounded,Enum,Typeable)
instance HT.QueryLike RelationShipAction where
toQuery a=
["action" ?+ map toLower (show a)]
-- | Modify the relationship between the current user and the target user.
setRelationShip :: (MonadBaseControl IO m, MonadResource m) => UserID
-> OAuthToken
-> RelationShipAction
-> InstagramT m (Envelope (Maybe Relationship))
setRelationShip uid=getPostEnvelope ["/v1/users/",uid,"/relationship"]
| cdepillabout/ig | src/Instagram/Relationships.hs | bsd-3-clause | 2,966 | 0 | 13 | 469 | 793 | 435 | 358 | 65 | 1 |
module Data.List.Origami where
cata :: b -> (a -> b -> b) -> [a] -> b
cata b _ [] = b
cata b f (x:xs) = f x (cata b f xs)
ana :: (b -> Maybe (a,b)) -> b -> [a]
ana g b = case g b of
Nothing -> []
Just (a,b') -> a : ana g b'
hylo :: c -> (b -> c -> c) -> (a -> Maybe (b,a)) -> a -> c
hylo c f g a = case g a of
Nothing -> c
Just (b,a') -> f b (hylo c f g a')
-- hylo c f g p a = cat c f . ana g p a
para :: b -> (a -> ([a],b) -> b) -> [a] -> b
para b _ [] = b
para b f (a:as) = f a (as, para b f as)
| nobsun/simple-origami | src/Data/List/Origami.hs | bsd-3-clause | 528 | 0 | 11 | 179 | 366 | 193 | 173 | 15 | 2 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
import Prelude
import qualified Control.Exception as E
import Control.Monad
import Data.Aeson
import Data.Aeson.Types (Parser)
import qualified Data.ByteString.Lazy as BL
import Data.Char (isSpace, toLower)
import Data.List (isInfixOf, sort)
import qualified Data.Map as M
import System.Directory
import System.Environment (getArgs)
import System.Exit
import System.FilePath
import System.IO.Temp (withSystemTempDirectory)
import System.Process
import Text.CSL
import Text.CSL.Compat.Pandoc (writeHtmlString)
import Text.CSL.Reference
import Text.CSL.Style hiding (Number)
import Text.Pandoc (Block (..), Format (..), Inline (..),
Pandoc (..), bottomUp, nullMeta)
import qualified Text.Pandoc.UTF8 as UTF8
import Text.Printf
import qualified Data.Text as T
import Data.Text (Text)
data TestCase = TestCase{
testMode :: Mode -- mode
, testBibopts :: BibOpts -- bibsection
, testCitations :: [CiteObject] -- citations
, testCitationItems :: Citations -- citation-items
, testCsl :: Style -- csl
, testAbbreviations :: Abbreviations -- abbreviations
, testReferences :: [Reference] -- input
, testResult :: Text -- result
} deriving (Show)
data Mode = CitationMode
| CitationRTFMode
| BibliographyMode
| BibliographyHeaderMode
| BibliographyNoSortMode
deriving Show
instance FromJSON Mode where
parseJSON (String "citation") = return CitationMode
parseJSON (String "citation-rtf") = return CitationRTFMode
parseJSON (String "bibliography") = return BibliographyMode
parseJSON (String "bibliography-header") = return BibliographyHeaderMode
parseJSON (String "bibliography-nosort") = return BibliographyNoSortMode
parseJSON _ = fail "Unknown mode"
instance FromJSON TestCase where
parseJSON (Object v) = TestCase <$>
v .: "mode" <*>
v .:? "bibsection" .!= Select [] [] <*>
((v .: "citations") >>= parseCitations) <*>
v .:? "citation_items" .!= [] <*>
(parseCSL <$> (v .: "csl")) <*>
v .:? "abbreviations" .!= (Abbreviations M.empty) <*>
v .: "input" <*>
v .: "result"
where parseCitations :: Data.Aeson.Value -> Parser [CiteObject]
parseCitations x@Array{} = parseJSON x
parseCitations _ = return []
parseJSON _ = fail "Could not parse test case"
newtype CiteObject =
CiteObject { unCiteObject :: [Cite] } deriving Show
instance FromJSON CiteObject where
parseJSON (Array v) =
case fromJSON (Array v) of
Success [Object x, Array _, Array _] ->
CiteObject <$> x .: "citationItems"
Error e -> fail $ "Could not parse CiteObject: " ++ e
x -> fail $ "Could not parse CiteObject" ++ show x
parseJSON x = fail $ "Could not parse CiteObject " ++ show x
#if MIN_VERSION_aeson(0,10,0)
#else
instance FromJSON [CiteObject] where
parseJSON (Array v) = mapM parseJSON $ V.toList v
parseJSON _ = return []
#endif
data TestResult =
Passed
| Skipped
| Failed
| Errored
deriving (Show, Eq)
testDir :: FilePath
testDir = "citeproc-test" </> "processor-tests" </> "machines"
handler :: FilePath -> E.SomeException -> IO TestResult
handler path e = do
putStrLn $ "[ERROR] " ++ path ++ "\n" ++ show e
return Errored
runTest :: FilePath -> IO TestResult
runTest path = E.handle (handler path) $ do
raw <- BL.readFile path
let testCase = either error id $ eitherDecode raw
let procOpts' = ProcOpts (testBibopts testCase) False
style <- localizeCSL Nothing
$ (testCsl testCase) { styleAbbrevs = testAbbreviations testCase }
let refs = testReferences testCase
let cites = map unCiteObject (testCitations testCase) ++ testCitationItems testCase
let cites' = if null cites
then [map (\ref -> emptyCite{ citeId = unLiteral $ refId ref}) refs]
else cites
let expected = adjustEntities $ fixBegins $ trimEnd $ testResult testCase
let mode = testMode testCase
let assemble BibliographyMode xs =
"<div class=\"csl-bib-body\">\n" <>
T.unlines (map (\x -> " <div class=\"csl-entry\">" <> x <>
"</div>") xs) <> "</div>\n"
assemble _ xs = T.unlines xs
case mode of
BibliographyHeaderMode -> do
putStrLn $ "[SKIPPED] " ++ path ++ "\n"
return Skipped
BibliographyNoSortMode -> do
putStrLn $ "[SKIPPED] " ++ path ++ "\n"
return Skipped
_ -> do
let result = assemble mode
$ map (inlinesToString . renderPandoc style) $
(case mode of {CitationMode -> citations; _ -> bibliography})
$ citeproc procOpts' style refs cites'
if result == expected
then do
putStrLn $ "[PASSED] " ++ path ++ "\n"
return Passed
else do
putStrLn $ "[FAILED] " ++ path
showDiff (T.unpack expected) (T.unpack result)
putStrLn ""
return Failed
trimEnd :: Text -> Text
trimEnd t = T.stripEnd t <> "\n"
-- this is designed to mimic the test suite's output:
inlinesToString :: [Inline] -> Text
inlinesToString ils =
writeHtmlString
$ bottomUp (concatMap adjustSpans)
$ Pandoc nullMeta [Plain ils]
-- We want & instead of & etc.
adjustEntities :: Text -> Text
adjustEntities = T.replace "&" "&"
-- citeproc-js test suite expects "citations" to be formatted like
-- .. [0] Smith (2007)
-- >> [1] Jones (2008)
-- To get a meaningful comparison, we remove this.
fixBegins :: Text -> Text
fixBegins = T.unlines . map fixLine . T.lines
where fixLine t =
case T.stripPrefix "..[" t `mplus` T.stripPrefix ">>[" t of
Just rest ->
T.dropWhile isSpace . T.dropWhile (not . isSpace) $ rest
Nothing -> t
-- adjust the spans so we fit what the test suite expects.
adjustSpans :: Inline -> [Inline]
adjustSpans (Note [Para xs]) = xs
adjustSpans (Link _ ils _) = ils
adjustSpans (Span ("",[],[]) xs) = xs
adjustSpans (Span ("",["nocase"],[]) xs) = xs
adjustSpans (Span ("",["citeproc-no-output"],[]) _) =
[Str "[CSL STYLE ERROR: reference with no printed form.]"]
adjustSpans (Span (id',classes,kvs) ils) =
[Span (id',classes',kvs') ils]
where classes' = filter (`notElem` ["csl-no-emph","csl-no-strong","csl-no-smallcaps"]) classes
kvs' = if null styles then kvs else (("style", mconcat styles) : kvs)
styles = ["font-style:normal;" | "csl-no-emph" `elem` classes]
++ ["font-weight:normal;" | "csl-no-strong" `elem` classes]
++ ["font-variant:normal;" | "csl-no-smallcaps" `elem` classes]
adjustSpans (Emph xs) =
RawInline (Format "html") "<i>" : xs ++ [RawInline (Format "html") "</i>"]
adjustSpans (Strong xs) =
RawInline (Format "html") "<b>" : xs ++ [RawInline (Format "html") "</b>"]
adjustSpans (SmallCaps xs) =
RawInline (Format "html") "<span style=\"font-variant:small-caps;\">" : xs ++ [RawInline (Format "html") "</span>"]
adjustSpans x = [x]
showDiff :: String -> String -> IO ()
showDiff expected' result' =
withSystemTempDirectory "test-pandoc-citeproc-XXX" $ \fp -> do
let expectedf = fp </> "expected"
let actualf = fp </> "actual"
UTF8.writeFile expectedf expected'
UTF8.writeFile actualf result'
withDirectory fp $ void $ rawSystem "diff" ["-u","expected","actual"]
withDirectory :: FilePath -> IO a -> IO a
withDirectory fp action = do
oldDir <- getCurrentDirectory
setCurrentDirectory fp
result <- action
setCurrentDirectory oldDir
return result
main :: IO ()
main = do
args <- getArgs
let matchesPattern x
| null args = True
| otherwise = any (`isInfixOf` (map toLower x))
(map (map toLower . takeBaseName) args)
exists <- doesDirectoryExist testDir
unless exists $ do
putStrLn "Downloading test suite"
_ <- rawSystem "git" ["clone", "https://github.com/citation-style-language/test-suite.git", "citeproc-test"]
withDirectory "citeproc-test" $
void $ rawSystem "python" ["processor.py", "--grind"]
testFiles <- if any ('/' `elem`) args
then return args
else (map (testDir </>) . sort .
filter matchesPattern .
filter (\f -> takeExtension f == ".json"))
<$> getDirectoryContents testDir
results <- mapM runTest testFiles
let numpasses = length $ filter (== Passed) results
let numskipped = length $ filter (== Skipped) results
let numfailures = length $ filter (== Failed) results
let numerrors = length $ filter (== Errored) results
putStrLn $ show numpasses ++ " passed; " ++ show numfailures ++
" failed; " ++ show numskipped ++ " skipped; " ++
show numerrors ++ " errored."
let summary = unlines $ zipWith (\fp res -> printf "%-10s %s" (show res) fp) testFiles results
when (null args) $ do -- write log if complete test suite run
ex <- doesFileExist "test-citeproc.log"
when ex $ do
putStrLn "Copying existing test-citeproc.log to test-citeproc.log.old"
copyFile "test-citeproc.log" "test-citeproc.log.old"
putStrLn "Writing test-citeproc.log."
UTF8.writeFile "test-citeproc.log" summary
exitWith $ if numfailures == 0
then ExitSuccess
else ExitFailure $ numfailures + numerrors
| jgm/pandoc-citeproc | tests/test-citeproc.hs | bsd-3-clause | 10,192 | 9 | 22 | 2,897 | 2,796 | 1,430 | 1,366 | 223 | 7 |
{-# LANGUAGE RecordWildCards, OverloadedStrings #-}
module Geordi.Request.Wai where
import Geordi.Request
import Geordi.FileInfo
import Geordi.FileBackend
import qualified Data.ByteString.Lazy as B
import qualified Network.Wai as W
import qualified Network.Wai.Parse as W
import qualified Network.HTTP.Types.URI as H
import qualified Network.HTTP.Types.Header as H
import qualified Network.HTTP.Types.Method as H
import qualified Web.Cookie as W
import qualified Data.Map as M
import Control.Monad.Trans.Resource
import qualified Data.Text.Lazy.Encoding as T
import qualified Data.Text.Lazy as T
import Data.Maybe
import Control.Arrow
fromWai :: W.Request -> FileBackend f -> ResourceT IO (Request f)
fromWai req (FB backend') = do
(posts, files) <- if W.requestMethod req == H.methodPost
then do (params, files) <- W.parseRequestBody backend' req
return ( M.fromListWith (++) . map (T.decodeUtf8 . B.fromStrict *** ((:[]) . T.decodeUtf8 . B.fromStrict)) $ params
, M.fromListWith (++) . map (T.decodeUtf8 . B.fromStrict *** ((:[]) . toOurFileInfo)) $ files
)
else return (M.empty, M.empty)
return $ Request { queries = M.fromListWith (++) $ map (T.fromStrict *** (:[]) . T.fromStrict . fromMaybe "") $ H.queryToQueryText $ W.queryString req
, cookies = M.fromListWith (++) $ map (T.fromStrict *** (:[]) . T.fromStrict) $ fromMaybe [] $ fmap W.parseCookiesText $ lookup H.hCookie $ W.requestHeaders req
, urlpieces = map T.fromStrict $ W.pathInfo req
, posts = posts
, files = files
, methodStr = W.requestMethod req
}
where toOurFileInfo (W.FileInfo {..}) = FileInfo fileName fileContentType fileContent
| liamoc/geordi | Geordi/Request/Wai.hs | bsd-3-clause | 1,957 | 0 | 22 | 569 | 573 | 327 | 246 | -1 | -1 |
-- | This module should be qualified-imported.
module Sound.Sox
(
-- * Building command-line arguments
mkArgs
-- * Operands
, operand
, autodetect
, Operand
-- ** Special file paths
, stdin
, stdout
-- * Formats
, bits
, channels
, rate
, Format
-- ** Endianness
, endian
, big
, little
, Endian
-- ** File types
, fileType
, raw
, snd
, wav
, FileType
-- ** Encodings
, encoding
, signedInteger
, floatingPoint
, Encoding
-- * Effects
, setChannels
, normalize
, Effect
-- * Type hints
, Dbfs
, NumBit
, NumChan
, Rate
)
where
import Data.Monoid
import Prelude hiding (snd)
newtype Format
= MkFormat [String]
instance Monoid Format where
mempty = MkFormat []
mappend (MkFormat x) (MkFormat y) = MkFormat (mappend x y)
-- | Number of bits per sample.
bits :: NumBit Int -> Format
bits n = MkFormat ["--bits", show n]
type NumBit a = a
-- | Number of channels.
channels :: NumChan Int -> Format
channels n = MkFormat ["--channels", show n]
type NumChan a = a
-- | Number of samples per second.
rate :: Rate Int -> Format
rate n = MkFormat ["--rate", show n]
type Rate a = a
-- | Sample type.
encoding :: Encoding -> Format
encoding (MkEncoding e) = MkFormat ["--encoding", e]
newtype Encoding
= MkEncoding String
signedInteger :: Encoding
signedInteger = MkEncoding "signed-integer"
floatingPoint :: Encoding
floatingPoint = MkEncoding "floating-point"
newtype FileType
= MkFileType String
raw :: FileType
raw = MkFileType "raw"
snd :: FileType
snd = MkFileType "snd"
wav :: FileType
wav = MkFileType "wav"
fileType :: FileType -> Format
fileType (MkFileType x) = MkFormat ["--type", x]
newtype Endian
= MkEndian String
endian :: Endian -> Format
endian (MkEndian x) = MkFormat ["--endian", x]
big :: Endian
big = MkEndian "big"
little :: Endian
little = MkEndian "little"
data Operand
= MkOperand Format FilePath
stdin :: FilePath
stdin = "-"
stdout :: FilePath
stdout = "-"
autodetect :: FilePath -> Operand
autodetect = MkOperand mempty
operand :: Format -> FilePath -> Operand
operand = MkOperand
-- | Set the number of channels.
setChannels :: NumChan Int -> Effect
setChannels n = MkEffect ["channel", show n]
normalize :: Dbfs Int -> Effect
normalize dbfs = MkEffect ["gain", "-n", show dbfs]
type Dbfs a = a
data Effect
= MkEffect [String]
-- | This does not include the executable.
mkArgs :: [Operand] -> Operand -> [Effect] -> [String]
mkArgs inputs output effects =
["--buffer", "131072", "--multi-threaded"]
++ concatMap unOp inputs
++ unOp output
++ concatMap unEf effects
where
unOp (MkOperand (MkFormat fmt) path) = fmt ++ [path]
unEf (MkEffect x) = x
| edom/sound | src/Sound/Sox.hs | bsd-3-clause | 2,822 | 0 | 11 | 698 | 787 | 444 | 343 | 99 | 1 |
{-# LANGUAGE ViewPatterns #-}
--
-- Copyright © 2014-2015 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
-- | Description: Run /Synchronise/ as a one-shot command.
--
module Synchronise.Program.Once
( -- * One-shot `synchronise` on documents
Request(..)
, synchroniseOnce
-- * Run store commands
, runPSQL
) where
import Control.Exception
import Control.Monad.IO.Class
import Data.Aeson
import Synchronise.Configuration
import Synchronise.DataSource
import Synchronise.Document
import Synchronise.Identifier
import Synchronise.Store
import Synchronise.Store.PostgreSQL
import Synchronise.Monad
--------------------------------------------------------------------------------
-- * Operations on documents
data Request
= Create { commandKey :: ForeignKey }
| Read { commandKey :: ForeignKey }
| Update { commandKey :: ForeignKey }
| Delete { commandKey :: ForeignKey }
deriving (Eq, Show)
-- | Run a single command on documents.
--
synchroniseOnce
:: Request
-> Configuration
-> SynchroniseMonad ()
synchroniseOnce req cfg = do
let rk = commandKey req
ds <- either error return $ getDataSource cfg (fkEntity rk) (fkSource rk)
case req of
Create fk -> inputDocument fk >>= exec . createDocument ds
Read fk -> exec $ readDocument ds fk
Update fk -> inputDocument rk >>= exec . updateDocument ds fk
Delete fk -> exec $ deleteDocument ds fk
where
exec :: (MonadIO m, Show a) => DSMonad m a -> m ()
exec a = do
res <- runDSMonad a
case res of
Left e -> error $ show e
Right v -> liftIO $ print v
-- | Read JSON from standard input and produce a 'Document'.
inputDocument
:: MonadIO m
=> ForeignKey
-> m Document
inputDocument fk =
let e = fkEntity fk
s = fkSource fk
in return $ Document e s Null
--------------------------------------------------------------------------------
-- * Low-level operations on a persistent store.
runPSQL :: (PGStore -> IO a) -> Configuration -> IO a
runPSQL act (configServer -> (_,_,pg_conn))
= bracket (initBackend (PGOpts pg_conn))
(closeBackend)
act
| anchor/synchronise | lib/Synchronise/Program/Once.hs | bsd-3-clause | 2,406 | 0 | 13 | 554 | 547 | 289 | 258 | 53 | 5 |
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExistentialQuantification #-}
#include "inline.hs"
-- |
-- Module : Streamly.Internal.Data.Pipe.Types
-- Copyright : (c) 2019 Composewell Technologies
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
module Streamly.Internal.Data.Pipe.Types
( Step (..)
, Pipe (..)
, PipeState (..)
, zipWith
, tee
, map
, compose
)
where
import Control.Arrow (Arrow(..))
import Control.Category (Category(..))
import Data.Maybe (isJust)
#if __GLASGOW_HASKELL__ < 808
import Data.Semigroup (Semigroup(..))
#endif
import Prelude hiding (zipWith, map, id, unzip, null)
import Streamly.Internal.Data.Strict (Tuple'(..), Tuple3'(..))
import qualified Prelude
------------------------------------------------------------------------------
-- Pipes
------------------------------------------------------------------------------
-- A scan is a much simpler version of pipes. A scan always produces an output
-- on an input whereas a pipe does not necessarily produce an output on an
-- input, it might consume multiple inputs before producing an output. That way
-- it can implement filtering. Similarly, it can produce more than one output
-- on an single input.
--
-- Therefore when two pipes are composed in parallel formation, one may run
-- slower or faster than the other. If all of them are being fed from the same
-- source, we may have to buffer the input to match the speeds. In case of
-- scans we do not have that problem.
--
-- We may also need a "Stop" constructor to indicate that we are not generating
-- any more values and we can have a "Done" constructor to indicate that we are
-- not consuming any more values. Similarly we can have a stop with error or
-- exception and a done with error or leftover values.
--
-- In generator mode, Continue means no output/continue. In fold mode Continue means
-- need more input to produce result. we can perhaps call it Continue instead.
--
data Step s a =
Yield a s
| Continue s
-- | Represents a stateful transformation over an input stream of values of
-- type @a@ to outputs of type @b@ in 'Monad' @m@.
-- A pipe uses a consume function and a produce function. It can switch from
-- consume/fold mode to a produce/source mode. The first step function is a
-- fold function while the seocnd one is a stream generator function.
--
-- We can upgrade a stream or a fold into a pipe. However, streams are more
-- efficient in generation and folds are more efficient in consumption.
--
-- For pure transformation we can have a 'Scan' type. A Scan would be more
-- efficient in zipping whereas pipes are useful for merging and zipping where
-- we know buffering can occur. A Scan type can be upgraded to a pipe.
--
-- XXX In general the starting state could either be for generation or for
-- consumption. Currently we are only starting with a consumption state.
--
-- An explicit either type for better readability of the code
data PipeState s1 s2 = Consume s1 | Produce s2
isProduce :: PipeState s1 s2 -> Bool
isProduce s =
case s of
Produce _ -> True
Consume _ -> False
data Pipe m a b =
forall s1 s2. Pipe (s1 -> a -> m (Step (PipeState s1 s2) b))
(s2 -> m (Step (PipeState s1 s2) b)) s1
instance Monad m => Functor (Pipe m a) where
{-# INLINE_NORMAL fmap #-}
fmap f (Pipe consume produce initial) = Pipe consume' produce' initial
where
{-# INLINE_LATE consume' #-}
consume' st a = do
r <- consume st a
return $ case r of
Yield x s -> Yield (f x) s
Continue s -> Continue s
{-# INLINE_LATE produce' #-}
produce' st = do
r <- produce st
return $ case r of
Yield x s -> Yield (f x) s
Continue s -> Continue s
-- XXX move this to a separate module
data Deque a = Deque [a] [a]
{-# INLINE null #-}
null :: Deque a -> Bool
null (Deque [] []) = True
null _ = False
{-# INLINE snoc #-}
snoc :: a -> Deque a -> Deque a
snoc a (Deque snocList consList) = Deque (a : snocList) consList
{-# INLINE uncons #-}
uncons :: Deque a -> Maybe (a, Deque a)
uncons (Deque snocList consList) =
case consList of
h : t -> Just (h, Deque snocList t)
_ ->
case Prelude.reverse snocList of
h : t -> Just (h, Deque [] t)
_ -> Nothing
-- | The composed pipe distributes the input to both the constituent pipes and
-- zips the output of the two using a supplied zipping function.
--
-- @since 0.7.0
{-# INLINE_NORMAL zipWith #-}
zipWith :: Monad m => (a -> b -> c) -> Pipe m i a -> Pipe m i b -> Pipe m i c
zipWith f (Pipe consumeL produceL stateL) (Pipe consumeR produceR stateR) =
Pipe consume produce state
where
-- Left state means we need to consume input from the source. A Right
-- state means we either have buffered input or we are in generation
-- mode so we do not need input from source in either case.
--
state = Tuple' (Consume stateL, Nothing, Nothing)
(Consume stateR, Nothing, Nothing)
-- XXX for heavy buffering we need to have the (ring) buffer in pinned
-- memory using the Storable instance.
{-# INLINE_LATE consume #-}
consume (Tuple' (sL, resL, lq) (sR, resR, rq)) a = do
s1 <- drive sL resL lq consumeL produceL a
s2 <- drive sR resR rq consumeR produceR a
yieldOutput s1 s2
where
{-# INLINE drive #-}
drive st res queue fConsume fProduce val = do
case res of
Nothing -> goConsume st queue val fConsume fProduce
Just x -> return $
case queue of
Nothing -> (st, Just x, Just $ (Deque [val] []))
Just q -> (st, Just x, Just $ snoc val q)
{-# INLINE goConsume #-}
goConsume stt queue val fConsume stp2 = do
case stt of
Consume st -> do
case queue of
Nothing -> do
r <- fConsume st val
return $ case r of
Yield x s -> (s, Just x, Nothing)
Continue s -> (s, Nothing, Nothing)
Just queue' ->
case uncons queue' of
Just (v, q) -> do
r <- fConsume st v
let q' = snoc val q
return $ case r of
Yield x s -> (s, Just x, Just q')
Continue s -> (s, Nothing, Just q')
Nothing -> undefined -- never occurs
Produce st -> do
r <- stp2 st
return $ case r of
Yield x s -> (s, Just x, queue)
Continue s -> (s, Nothing, queue)
{-# INLINE_LATE produce #-}
produce (Tuple' (sL, resL, lq) (sR, resR, rq)) = do
s1 <- drive sL resL lq consumeL produceL
s2 <- drive sR resR rq consumeR produceR
yieldOutput s1 s2
where
{-# INLINE drive #-}
drive stt res q fConsume fProduce = do
case res of
Nothing -> goProduce stt q fConsume fProduce
Just x -> return (stt, Just x, q)
{-# INLINE goProduce #-}
goProduce stt queue fConsume fProduce = do
case stt of
Consume st -> do
case queue of
-- See yieldOutput. We enter produce mode only when
-- each pipe is either in Produce state or the
-- queue is non-empty. So this case cannot occur.
Nothing -> undefined
Just queue' ->
case uncons queue' of
Just (v, q) -> do
r <- fConsume st v
-- We provide a guarantee that if the
-- queue is "Just" it is always
-- non-empty. yieldOutput and goConsume
-- depend on it.
let q' = if null q
then Nothing
else Just q
return $ case r of
Yield x s -> (s, Just x, q')
Continue s -> (s, Nothing, q')
Nothing -> return (stt, Nothing, Nothing)
Produce st -> do
r <- fProduce st
return $ case r of
Yield x s -> (s, Just x, queue)
Continue s -> (s, Nothing, queue)
{-# INLINE yieldOutput #-}
yieldOutput s1@(sL', resL', lq') s2@(sR', resR', rq') = return $
-- switch to produce mode if we do not need input
if (isProduce sL' || isJust lq') && (isProduce sR' || isJust rq')
then
case (resL', resR') of
(Just xL, Just xR) ->
Yield (f xL xR) (Produce (Tuple' (clear s1) (clear s2)))
_ -> Continue (Produce (Tuple' s1 s2))
else
case (resL', resR') of
(Just xL, Just xR) ->
Yield (f xL xR) (Consume (Tuple' (clear s1) (clear s2)))
_ -> Continue (Consume (Tuple' s1 s2))
where clear (s, _, q) = (s, Nothing, q)
instance Monad m => Applicative (Pipe m a) where
{-# INLINE pure #-}
pure b = Pipe (\_ _ -> pure $ Yield b (Consume ())) undefined ()
(<*>) = zipWith id
-- | The composed pipe distributes the input to both the constituent pipes and
-- merges the outputs of the two.
--
-- @since 0.7.0
{-# INLINE_NORMAL tee #-}
tee :: Monad m => Pipe m a b -> Pipe m a b -> Pipe m a b
tee (Pipe consumeL produceL stateL) (Pipe consumeR produceR stateR) =
Pipe consume produce state
where
state = Tuple' (Consume stateL) (Consume stateR)
consume (Tuple' sL sR) a = do
case sL of
Consume st -> do
r <- consumeL st a
return $ case r of
Yield x s -> Yield x (Produce (Tuple3' (Just a) s sR))
Continue s -> Continue (Produce (Tuple3' (Just a) s sR))
-- XXX we should never come here unless the initial state of the
-- first pipe is set to "Right".
Produce _st -> undefined -- do
{-
r <- produceL st
return $ case r of
Yield x s -> Yield x (Right (Tuple3' (Just a) s sR))
Continue s -> Continue (Right (Tuple3' (Just a) s sR))
-}
produce (Tuple3' (Just a) sL sR) = do
case sL of
Consume _ -> do
case sR of
Consume st -> do
r <- consumeR st a
let nextL s = Consume (Tuple' sL s)
let nextR s = Produce (Tuple3' Nothing sL s)
return $ case r of
Yield x s@(Consume _) -> Yield x (nextL s)
Yield x s@(Produce _) -> Yield x (nextR s)
Continue s@(Consume _) -> Continue (nextL s)
Continue s@(Produce _) -> Continue (nextR s)
-- We will never come here unless the initial state of
-- second pipe is set to "Right".
Produce _ -> undefined
Produce st -> do
r <- produceL st
let next s = Produce (Tuple3' (Just a) s sR)
return $ case r of
Yield x s -> Yield x (next s)
Continue s -> Continue (next s)
produce (Tuple3' Nothing sL sR) = do
case sR of
Consume _ -> undefined -- should never occur
Produce st -> do
r <- produceR st
return $ case r of
Yield x s@(Consume _) ->
Yield x (Consume (Tuple' sL s))
Yield x s@(Produce _) ->
Yield x (Produce (Tuple3' Nothing sL s))
Continue s@(Consume _) ->
Continue (Consume (Tuple' sL s))
Continue s@(Produce _) ->
Continue (Produce (Tuple3' Nothing sL s))
instance Monad m => Semigroup (Pipe m a b) where
{-# INLINE (<>) #-}
(<>) = tee
-- | Lift a pure function to a 'Pipe'.
--
-- @since 0.7.0
{-# INLINE map #-}
map :: Monad m => (a -> b) -> Pipe m a b
map f = Pipe consume undefined ()
where
consume _ a = return $ Yield (f a) (Consume ())
{-
-- | A hollow or identity 'Pipe' passes through everything that comes in.
--
-- @since 0.7.0
{-# INLINE id #-}
id :: Monad m => Pipe m a a
id = map Prelude.id
-}
-- | Compose two pipes such that the output of the second pipe is attached to
-- the input of the first pipe.
--
-- @since 0.7.0
{-# INLINE_NORMAL compose #-}
compose :: Monad m => Pipe m b c -> Pipe m a b -> Pipe m a c
compose (Pipe consumeL produceL stateL) (Pipe consumeR produceR stateR) =
Pipe consume produce state
where
state = Tuple' (Consume stateL) (Consume stateR)
consume (Tuple' sL sR) a = do
case sL of
Consume stt ->
case sR of
Consume st -> do
rres <- consumeR st a
case rres of
Yield x sR' -> do
let next s =
if isProduce sR'
then Produce s
else Consume s
lres <- consumeL stt x
return $ case lres of
Yield y s1@(Consume _) ->
Yield y (next $ Tuple' s1 sR')
Yield y s1@(Produce _) ->
Yield y (Produce $ Tuple' s1 sR')
Continue s1@(Consume _) ->
Continue (next $ Tuple' s1 sR')
Continue s1@(Produce _) ->
Continue (Produce $ Tuple' s1 sR')
Continue s1@(Consume _) ->
return $ Continue (Consume $ Tuple' sL s1)
Continue s1@(Produce _) ->
return $ Continue (Produce $ Tuple' sL s1)
Produce _ -> undefined
-- XXX we should never come here unless the initial state of the
-- first pipe is set to "Right".
Produce _ -> undefined
-- XXX we need to write the code in mor optimized fashion. Use Continue
-- more and less yield points.
produce (Tuple' sL sR) = do
case sL of
Produce st -> do
r <- produceL st
let next s = if isProduce sR then Produce s else Consume s
return $ case r of
Yield x s@(Consume _) -> Yield x (next $ Tuple' s sR)
Yield x s@(Produce _) -> Yield x (Produce $ Tuple' s sR)
Continue s@(Consume _) -> Continue (next $ Tuple' s sR)
Continue s@(Produce _) -> Continue (Produce $ Tuple' s sR)
Consume stt ->
case sR of
Produce st -> do
rR <- produceR st
case rR of
Yield x sR' -> do
let next s =
if isProduce sR'
then Produce s
else Consume s
rL <- consumeL stt x
return $ case rL of
Yield y s1@(Consume _) ->
Yield y (next $ Tuple' s1 sR')
Yield y s1@(Produce _) ->
Yield y (Produce $ Tuple' s1 sR')
Continue s1@(Consume _) ->
Continue (next $ Tuple' s1 sR')
Continue s1@(Produce _) ->
Continue (Produce $ Tuple' s1 sR')
Continue s1@(Consume _) ->
return $ Continue (Consume $ Tuple' sL s1)
Continue s1@(Produce _) ->
return $ Continue (Produce $ Tuple' sL s1)
Consume _ -> return $ Continue (Consume $ Tuple' sL sR)
instance Monad m => Category (Pipe m) where
{-# INLINE id #-}
id = map Prelude.id
{-# INLINE (.) #-}
(.) = compose
unzip :: Pipe m a x -> Pipe m b y -> Pipe m (a, b) (x, y)
unzip = undefined
instance Monad m => Arrow (Pipe m) where
{-# INLINE arr #-}
arr = map
{-# INLINE (***) #-}
(***) = unzip
{-# INLINE (&&&) #-}
(&&&) = zipWith (,)
| harendra-kumar/asyncly | src/Streamly/Internal/Data/Pipe/Types.hs | bsd-3-clause | 18,197 | 0 | 29 | 7,967 | 4,305 | 2,187 | 2,118 | 281 | 21 |
{-# LANGUAGE TemplateHaskell #-}
module Dipper.Jar
( dipperJar
, withDipperJar
) where
import Control.Exception (bracket, catch)
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.FileEmbed (embedFile)
import System.Directory (getTemporaryDirectory, removeFile)
import System.IO (openBinaryTempFileWithDefaultPermissions, hClose)
------------------------------------------------------------------------
dipperJar :: ByteString
dipperJar = $(embedFile "jar/target/dipper.jar")
------------------------------------------------------------------------
withDipperJar :: (FilePath -> IO a) -> IO a
withDipperJar = withTempFile "dipper.jar" dipperJar
------------------------------------------------------------------------
withTempFile :: String -> B.ByteString -> (FilePath -> IO a) -> IO a
withTempFile name content action = do
tmp <- getTemporaryDirectory
bracket (openBinaryTempFileWithDefaultPermissions tmp name)
(\(p, h) -> hClose h >> ignoreIOErrors (removeFile p))
(\(p, h) -> B.hPut h content >> hClose h >> action p)
where
ignoreIOErrors ioe = ioe `catch` (\e -> const (return ()) (e :: IOError))
| jystic/dipper | src/Dipper/Jar.hs | bsd-3-clause | 1,247 | 0 | 13 | 229 | 311 | 171 | 140 | 21 | 1 |
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module NumberSix.Handlers.Http
( handler
) where
--------------------------------------------------------------------------------
import Control.Exception (SomeException (..), catch)
import Control.Monad.Trans (liftIO)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Network.HTTP.Conduit as H
import qualified Network.HTTP.Types as H
import Prelude hiding (catch)
--------------------------------------------------------------------------------
import NumberSix.Bang
import NumberSix.Irc
import NumberSix.Util
import NumberSix.Util.Http (httpPrefix)
--------------------------------------------------------------------------------
http :: Text -> IO Text
http uri = do
req <- H.parseUrl uri'
let req' = req {H.redirectCount = 0, H.checkStatus = \_ _ _ -> Nothing}
rsp <- H.withManager $ \m -> H.httpLbs req' m
let status = H.responseStatus rsp
location
| H.statusCode status < 300 = ""
| H.statusCode status >= 400 = ""
| otherwise =
case lookup "Location" (H.responseHeaders rsp) of
Nothing -> ""
Just loc -> " (Location: " <> loc <> ")"
return $ T.pack (show $ H.responseVersion rsp) <> " " <>
T.pack (show $ H.statusCode status) <> " " <>
T.decodeUtf8 (H.statusMessage status) <> T.decodeUtf8 location
where
uri' = T.unpack $ httpPrefix uri
--------------------------------------------------------------------------------
-- | Catch possible network errors
wrapped :: Text -> IO Text
wrapped uri = catch (http uri) $ \(SomeException e) ->
return $ T.pack $ show e
--------------------------------------------------------------------------------
handler :: UninitializedHandler
handler = makeBangHandler "Http" ["!http"] $ liftIO . wrapped
| itkovian/number-six | src/NumberSix/Handlers/Http.hs | bsd-3-clause | 2,134 | 0 | 17 | 522 | 500 | 269 | 231 | 37 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Model where
import Data.Time
import Database.Persist.TH
import qualified Data.Text as T
share [mkPersist sqlSettings, mkMigrate "migrateModel"] [persistLowerCase|
Customer json
name T.Text
deriving Show
Item json
name T.Text
price Double
revenue Double
deriving Show
Sale json
item ItemId
customer CustomerId
amount Int
time UTCTime
deriving Show
|]
| agrafix/revenue-sample-app | src/Model.hs | bsd-3-clause | 841 | 0 | 7 | 265 | 56 | 38 | 18 | 14 | 0 |
{-# LANGUAGE RecordWildCards #-}
module SlitherBot.Ai.Circle
( CircleAiState
, circleAi
) where
import Data.Fixed
import SlitherBot.Ai
import SlitherBot.GameState
data CircleAiState = CircleAiState
{ casAngle :: !Double
} deriving (Eq, Show)
circleAi :: Ai CircleAiState
circleAi = Ai
{ aiUpdate = \GameState{..} cas@CircleAiState{..} ->
let newAngle = (casAngle + pi / 4) `mod'` (2 * pi)
in (AiOutput{aoAngle = newAngle, aoSpeedup = False}, cas{ casAngle = newAngle })
, aiInitialState = CircleAiState{casAngle = 0}
, aiHtmlStatus = mempty
}
| chpatrick/slither-bot | src/SlitherBot/Ai/Circle.hs | bsd-3-clause | 607 | 0 | 15 | 142 | 184 | 109 | 75 | 19 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# lANGUAGE ScopedTypeVariables #-}
-- ---------------------------------------------------------------------------
-- |
-- Module : Data.Vector.Algorithms.AmericanFlag
-- Copyright : (c) 2011 Dan Doel
-- Maintainer : Dan Doel <[email protected]>
-- Stability : Experimental
-- Portability : Non-portable (FlexibleContexts, ScopedTypeVariables)
--
-- This module implements American flag sort: an in-place, unstable, bucket
-- sort. Also in contrast to radix sort, the values are inspected in a big
-- endian order, and buckets are sorted via recursive splitting. This,
-- however, makes it sensible for sorting strings in lexicographic order
-- (provided indexing is fast).
--
-- The algorithm works as follows: at each stage, the array is looped over,
-- counting the number of elements for each bucket. Then, starting at the
-- beginning of the array, elements are permuted in place to reside in the
-- proper bucket, following chains until they reach back to the current
-- base index. Finally, each bucket is sorted recursively. This lends itself
-- well to the aforementioned variable-length strings, and so the algorithm
-- takes a stopping predicate, which is given a representative of the stripe,
-- rather than running for a set number of iterations.
module Data.Vector.Algorithms.AmericanFlag ( sort
, sortBy
, Lexicographic(..)
) where
import Prelude hiding (read, length)
import Control.Monad
import Control.Monad.Primitive
import Data.Word
import Data.Int
import Data.Bits
import qualified Data.ByteString as B
import Data.Vector.Generic.Mutable
import qualified Data.Vector.Primitive.Mutable as PV
import qualified Data.Vector.Unboxed.Mutable as U
import Data.Vector.Algorithms.Common
import qualified Data.Vector.Algorithms.Insertion as I
-- | The methods of this class specify the information necessary to sort
-- arrays using the default ordering. The name 'Lexicographic' is meant
-- to convey that index should return results in a similar way to indexing
-- into a string.
class Lexicographic e where
-- | Given a representative of a stripe and an index number, this
-- function should determine whether to stop sorting.
terminate :: e -> Int -> Bool
-- | The size of the bucket array necessary for sorting es
size :: e -> Int
-- | Determines which bucket a given element should inhabit for a
-- particular iteration.
index :: Int -> e -> Int
instance Lexicographic Word8 where
terminate _ n = n > 0
{-# INLINE terminate #-}
size _ = 256
{-# INLINE size #-}
index _ n = fromIntegral n
{-# INLINE index #-}
instance Lexicographic Word16 where
terminate _ n = n > 1
{-# INLINE terminate #-}
size _ = 256
{-# INLINE size #-}
index 0 n = fromIntegral $ (n `shiftR` 8) .&. 255
index 1 n = fromIntegral $ n .&. 255
index _ _ = 0
{-# INLINE index #-}
instance Lexicographic Word32 where
terminate _ n = n > 3
{-# INLINE terminate #-}
size _ = 256
{-# INLINE size #-}
index 0 n = fromIntegral $ (n `shiftR` 24) .&. 255
index 1 n = fromIntegral $ (n `shiftR` 16) .&. 255
index 2 n = fromIntegral $ (n `shiftR` 8) .&. 255
index 3 n = fromIntegral $ n .&. 255
index _ _ = 0
{-# INLINE index #-}
instance Lexicographic Word64 where
terminate _ n = n > 7
{-# INLINE terminate #-}
size _ = 256
{-# INLINE size #-}
index 0 n = fromIntegral $ (n `shiftR` 56) .&. 255
index 1 n = fromIntegral $ (n `shiftR` 48) .&. 255
index 2 n = fromIntegral $ (n `shiftR` 40) .&. 255
index 3 n = fromIntegral $ (n `shiftR` 32) .&. 255
index 4 n = fromIntegral $ (n `shiftR` 24) .&. 255
index 5 n = fromIntegral $ (n `shiftR` 16) .&. 255
index 6 n = fromIntegral $ (n `shiftR` 8) .&. 255
index 7 n = fromIntegral $ n .&. 255
index _ _ = 0
{-# INLINE index #-}
instance Lexicographic Word where
terminate _ n = n > 7
{-# INLINE terminate #-}
size _ = 256
{-# INLINE size #-}
index 0 n = fromIntegral $ (n `shiftR` 56) .&. 255
index 1 n = fromIntegral $ (n `shiftR` 48) .&. 255
index 2 n = fromIntegral $ (n `shiftR` 40) .&. 255
index 3 n = fromIntegral $ (n `shiftR` 32) .&. 255
index 4 n = fromIntegral $ (n `shiftR` 24) .&. 255
index 5 n = fromIntegral $ (n `shiftR` 16) .&. 255
index 6 n = fromIntegral $ (n `shiftR` 8) .&. 255
index 7 n = fromIntegral $ n .&. 255
index _ _ = 0
{-# INLINE index #-}
instance Lexicographic Int8 where
terminate _ n = n > 0
{-# INLINE terminate #-}
size _ = 256
{-# INLINE size #-}
index _ n = 255 .&. fromIntegral n `xor` 128
{-# INLINE index #-}
instance Lexicographic Int16 where
terminate _ n = n > 1
{-# INLINE terminate #-}
size _ = 256
{-# INLINE size #-}
index 0 n = fromIntegral $ ((n `xor` minBound) `shiftR` 8) .&. 255
index 1 n = fromIntegral $ n .&. 255
index _ _ = 0
{-# INLINE index #-}
instance Lexicographic Int32 where
terminate _ n = n > 3
{-# INLINE terminate #-}
size _ = 256
{-# INLINE size #-}
index 0 n = fromIntegral $ ((n `xor` minBound) `shiftR` 24) .&. 255
index 1 n = fromIntegral $ (n `shiftR` 16) .&. 255
index 2 n = fromIntegral $ (n `shiftR` 8) .&. 255
index 3 n = fromIntegral $ n .&. 255
index _ _ = 0
{-# INLINE index #-}
instance Lexicographic Int64 where
terminate _ n = n > 7
{-# INLINE terminate #-}
size _ = 256
{-# INLINE size #-}
index 0 n = fromIntegral $ ((n `xor` minBound) `shiftR` 56) .&. 255
index 1 n = fromIntegral $ (n `shiftR` 48) .&. 255
index 2 n = fromIntegral $ (n `shiftR` 40) .&. 255
index 3 n = fromIntegral $ (n `shiftR` 32) .&. 255
index 4 n = fromIntegral $ (n `shiftR` 24) .&. 255
index 5 n = fromIntegral $ (n `shiftR` 16) .&. 255
index 6 n = fromIntegral $ (n `shiftR` 8) .&. 255
index 7 n = fromIntegral $ n .&. 255
index _ _ = 0
{-# INLINE index #-}
instance Lexicographic Int where
terminate _ n = n > 7
{-# INLINE terminate #-}
size _ = 256
{-# INLINE size #-}
index 0 n = ((n `xor` minBound) `shiftR` 56) .&. 255
index 1 n = (n `shiftR` 48) .&. 255
index 2 n = (n `shiftR` 40) .&. 255
index 3 n = (n `shiftR` 32) .&. 255
index 4 n = (n `shiftR` 24) .&. 255
index 5 n = (n `shiftR` 16) .&. 255
index 6 n = (n `shiftR` 8) .&. 255
index 7 n = n .&. 255
index _ _ = 0
{-# INLINE index #-}
instance Lexicographic B.ByteString where
terminate b i = i >= B.length b
{-# INLINE terminate #-}
size _ = 257
{-# INLINE size #-}
index i b
| i >= B.length b = 0
| otherwise = fromIntegral (B.index b i) + 1
{-# INLINE index #-}
-- | Sorts an array using the default ordering. Both Lexicographic and
-- Ord are necessary because the algorithm falls back to insertion sort
-- for sufficiently small arrays.
sort :: forall e m v. (PrimMonad m, MVector v e, Lexicographic e, Ord e)
=> v (PrimState m) e -> m ()
sort v = sortBy compare terminate (size e) index v
where e :: e
e = undefined
{-# INLINE sort #-}
-- | A fully parameterized version of the sorting algorithm. Again, this
-- function takes both radix information and a comparison, because the
-- algorithms falls back to insertion sort for small arrays.
sortBy :: (PrimMonad m, MVector v e)
=> Comparison e -- ^ a comparison for the insertion sort flalback
-> (e -> Int -> Bool) -- ^ determines whether a stripe is complete
-> Int -- ^ the number of buckets necessary
-> (Int -> e -> Int) -- ^ the big-endian radix function
-> v (PrimState m) e -- ^ the array to be sorted
-> m ()
sortBy cmp stop buckets radix v
| length v == 0 = return ()
| otherwise = do count <- new buckets
pile <- new buckets
countLoop (radix 0) v count
flagLoop cmp stop radix count pile v
{-# INLINE sortBy #-}
flagLoop :: (PrimMonad m, MVector v e)
=> Comparison e
-> (e -> Int -> Bool) -- number of passes
-> (Int -> e -> Int) -- radix function
-> PV.MVector (PrimState m) Int -- auxiliary count array
-> PV.MVector (PrimState m) Int -- auxiliary pile array
-> v (PrimState m) e -- source array
-> m ()
flagLoop cmp stop radix count pile v = go 0 v
where
go pass v = do e <- unsafeRead v 0
unless (stop e $ pass - 1) $ go' pass v
go' pass v
| len < threshold = I.sortByBounds cmp v 0 len
| otherwise = do accumulate count pile
permute (radix pass) count pile v
recurse 0
where
len = length v
ppass = pass + 1
recurse i
| i < len = do j <- countStripe (radix ppass) (radix pass) count v i
go ppass (unsafeSlice i (j - i) v)
recurse j
| otherwise = return ()
{-# INLINE flagLoop #-}
accumulate :: (PrimMonad m)
=> PV.MVector (PrimState m) Int
-> PV.MVector (PrimState m) Int
-> m ()
accumulate count pile = loop 0 0
where
len = length count
loop i acc
| i < len = do ci <- unsafeRead count i
let acc' = acc + ci
unsafeWrite pile i acc
unsafeWrite count i acc'
loop (i+1) acc'
| otherwise = return ()
{-# INLINE accumulate #-}
permute :: (PrimMonad m, MVector v e)
=> (e -> Int) -- radix function
-> PV.MVector (PrimState m) Int -- count array
-> PV.MVector (PrimState m) Int -- pile array
-> v (PrimState m) e -- source array
-> m ()
permute rdx count pile v = go 0
where
len = length v
go i
| i < len = do e <- unsafeRead v i
let r = rdx e
p <- unsafeRead pile r
m <- if r > 0
then unsafeRead count (r-1)
else return 0
case () of
-- if the current element is already in the right pile,
-- go to the end of the pile
_ | m <= i && i < p -> go p
-- if the current element happens to be in the right
-- pile, bump the pile counter and go to the next element
| i == p -> unsafeWrite pile r (p+1) >> go (i+1)
-- otherwise follow the chain
| otherwise -> follow i e p >> go (i+1)
| otherwise = return ()
follow i e j = do en <- unsafeRead v j
let r = rdx en
p <- inc pile r
if p == j
-- if the target happens to be in the right pile, don't move it.
then follow i e (j+1)
else unsafeWrite v j e >> if i == p
then unsafeWrite v i en
else follow i en p
{-# INLINE permute #-}
countStripe :: (PrimMonad m, MVector v e)
=> (e -> Int) -- radix function
-> (e -> Int) -- stripe function
-> PV.MVector (PrimState m) Int -- count array
-> v (PrimState m) e -- source array
-> Int -- starting position
-> m Int -- end of stripe: [lo,hi)
countStripe rdx str count v lo = do set count 0
e <- unsafeRead v lo
go (str e) e (lo+1)
where
len = length v
go !s e i = inc count (rdx e) >>
if i < len
then do en <- unsafeRead v i
if str en == s
then go s en (i+1)
else return i
else return len
{-# INLINE countStripe #-}
threshold :: Int
threshold = 25
| tolysz/vector-algorithms | src/Data/Vector/Algorithms/AmericanFlag.hs | bsd-3-clause | 12,090 | 0 | 18 | 3,975 | 3,397 | 1,794 | 1,603 | 254 | 4 |
-----------------------------------------------------------------------------
-- |
-- Module : Control.Parallel
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable
--
-- Parallel Constructs
--
-----------------------------------------------------------------------------
module Control.Parallel (
par, seq -- re-exported
) where
import Prelude
-- Maybe parIO and the like could be added here later.
-- For now, Hugs does not support par properly.
par a b = b
| OS2World/DEV-UTIL-HUGS | libraries/Control/Parallel.hs | bsd-3-clause | 662 | 0 | 5 | 124 | 43 | 32 | 11 | 4 | 1 |
-- | An internal module that copies a few select functions
-- from Control.Error.Util, as used in Snap.Snaplet.Auth.Handlers.
module Snap.Snaplet.Auth.Handlers.Errors
( hush
, hushT
, note
, noteT
, hoistMaybe
) where
import Control.Monad
import Control.Monad.Trans.Either
import Control.Monad.Trans.Maybe
-- | Suppress the 'Left' value of an 'Either'
hush :: Either a b -> Maybe b
hush = either (const Nothing) Just
-- | Suppress the 'Left' value of an 'EitherT'
hushT :: (Monad m) => EitherT a m b -> MaybeT m b
hushT = MaybeT . liftM hush . runEitherT
-- | Tag the 'Nothing' value of a 'Maybe'
note :: a -> Maybe b -> Either a b
note a = maybe (Left a) Right
-- | Tag the 'Nothing' value of a 'MaybeT'
noteT :: (Monad m) => a -> MaybeT m b -> EitherT a m b
noteT a = EitherT . liftM (note a) . runMaybeT
-- | Lift a 'Maybe' to the 'MaybeT' monad
hoistMaybe :: (Monad m) => Maybe b -> MaybeT m b
hoistMaybe = MaybeT . return
| 23Skidoo/snap | src/Snap/Snaplet/Auth/Handlers/Errors.hs | bsd-3-clause | 945 | 0 | 9 | 194 | 265 | 143 | 122 | 19 | 1 |
module ArbitraryTypeclass where
import Test.QuickCheck
import Test.QuickCheck.Gen (oneof)
import Control.Monad (liftM)
-- baby Arbitrary
data Trivial = Trivial deriving (Eq, Show)
trivialGen :: Gen Trivial
trivialGen = return Trivial
instance Arbitrary Trivial where
arbitrary = trivialGen
-- identity Crisis
data Identity a = Identity a deriving (Eq, Show)
identityGen :: Arbitrary a => Gen (Identity a)
identityGen = do
a <- arbitrary
return $ Identity a
instance Arbitrary a => Arbitrary (Identity a) where
arbitrary = identityGen
identityGenInt :: Gen (Identity Int)
identityGenInt = identityGen
-- Arbitrary Products
data Pair a b = Pair a b deriving (Eq, Show)
pairGen :: (Arbitrary a, Arbitrary b) => Gen (Pair a b)
pairGen = do
a <- arbitrary
b <- arbitrary
return $ Pair a b
-- this is naccessary if the datatype is embedded in other data types
instance (Arbitrary a, Arbitrary b) => Arbitrary (Pair a b) where
arbitrary = pairGen
-- one instance of pairGen
pairGenIntString :: Gen (Pair Int String)
pairGenIntString = pairGen
-- Arbitrary sums
data Sum a b =
First a | Second b deriving (Eq, Show)
-- equal odds for each
sumGenEqual :: (Arbitrary a, Arbitrary b) => Gen (Sum a b)
sumGenEqual = do
a <- arbitrary
b <- arbitrary
oneof [return $ First a, return $ Second b]
sumGenCharInt :: Gen (Sum Char Int)
sumGenCharInt = sumGenEqual
-- 10 times First a then Second b
sumGenFirstPls :: (Arbitrary a, Arbitrary b) => Gen (Sum a b)
sumGenFirstPls = do
a <- arbitrary
b <- arbitrary
frequency [(10, return $ First a), (1, return $ Second b)]
{- or using liftM
sumGenFirstPls = do
frequency [(10, liftM First arbitrary),
(1, liftM Second arbitrary)]
-}
| chengzh2008/hpffp | src/ch14-Testing/arbitrary/src/ArbitraryTypeclass.hs | bsd-3-clause | 1,722 | 0 | 11 | 342 | 550 | 291 | 259 | 42 | 1 |
module Statistics.Information.Utils.Random where
import Data.Matrix
import Statistics.Information.Utils.List
import System.Random
splitN :: RandomGen g => Int -> g -> [g]
splitN 0 _ = []
splitN 1 g = [g]
splitN n g = let (g', g'') = split g in
g' : splitN (n-1) g''
rands :: (RandomGen g, Random a) => g -> Int -> (a, a) -> [a]
rands g n (a, b) = take n $ randomRs (a, b) g
noisy :: RandomGen g => g -> Matrix Double -> Matrix Double
noisy g xs =
let noise = [repeatN d (intens * r) | r <- rands g n (0.0, fromIntegral n)] in
elementwise (+) xs (fromLists noise)
where
intens = 1e-10
n = nrows xs
d = ncols xs
| eligottlieb/Shannon | src/Statistics/Information/Utils/Random.hs | bsd-3-clause | 636 | 0 | 14 | 150 | 321 | 169 | 152 | 18 | 1 |
{-# LANGUAGE OverloadedStrings, DeriveGeneric #-}
module BankStatement where
import qualified Data.Text as T
import Data.Text.Encoding as E
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString as B
import qualified Data.Foldable as F
import Data.Csv.Streaming
import qualified Data.Map as Map
import System.Locale
import Data.Time
import Data.Time.Format
import Data.Csv hiding (decode)
import GHC.Generics
data Transaction = Transaction { date :: BL.ByteString
, amount :: Double
, description :: BL.ByteString
, otherParty :: BL.ByteString
, reference :: BL.ByteString
, particulars :: BL.ByteString
, analysisCode :: BL.ByteString
} deriving (Show, Generic)
instance FromRecord Transaction
type Values = T.Text
type Amount = Double
type Classified = String
type IsClassified = Transaction -> Bool
categories :: [(Classified, IsClassified)]
categories = [("Income", isIncome)
, ("Food", descriptionContainsWords ["Pak N Save", "Countdown"])
, ("Rent", descriptionContainsWords ["7110Lachie"])
, ("Isagenix", descriptionContainsWords ["Isagenix"])
, ("Takeways", descriptionContainsWords ["Cafe", "Sierra", "sals","Genta","The Globe Bar","Burger King","Subway","McDonalds","Wendy's","Carls Jr","Burgerfuel","Burger Fuel","Fish & Chips","kfc","Kebabs","Pita Pit", "Starbucks", "Pizza", "La Porchet"])
, ("Expenses", descriptionContainsWords ["Sklenars","Line of credit","AMI Insurance","Spotify Premium x 12","Professional Earcare","Just Cuts","Repco","Prepaid","Pb Technologies","Kiwivelo"])
, ("Transport", descriptionContainsWords ["gull", "Tyres"])
, ("Entertainment", descriptionContainsWords ["Event", "netflix"])
]
descriptionContainsWords :: [T.Text] -> IsClassified
descriptionContainsWords xs = (foldr (||) False) . (flip descriptionContains) (map T.toLower xs)
isIncome :: IsClassified
isIncome t
| amount t > 0 && (T.toLower . toText) t == "eroad limited" = True
| otherwise = False
addToCategory :: String -> (Double, [T.Text]) -> Map.Map Classified (Amount, [T.Text]) -> Map.Map Classified (Amount, [T.Text])
addToCategory s d = Map.insertWith appendAndAdd s d
where appendAndAdd a b = ((fst a) + (fst b), (snd a) ++ (snd b))
toCategory :: [(Classified, IsClassified)] -> Transaction -> Map.Map String (Amount, [T.Text]) -> Map.Map String (Amount, [T.Text])
toCategory [] t = addToCategory "Other" (amount t, [toText t])
toCategory (x:xs) t | (snd x) t == True = (addToCategory . fst) x (amount t, [toText t])
| otherwise = toCategory xs t
transactions :: BL.ByteString -> Records Transaction
transactions = decode HasHeader
toText :: Transaction -> T.Text
toText = E.decodeUtf8 . BL.toStrict . description
descriptionContains :: Transaction -> [T.Text] -> [Bool]
descriptionContains = map . (flip T.isInfixOf) . T.toLower . toText
getAtBatsSum :: BL.ByteString -> (Map.Map String (Amount, [T.Text]))
getAtBatsSum csvData = F.foldr' (toCategory categories) Map.empty (transactions csvData)
| willsam100/bankStatement | src/BankStatement.hs | bsd-3-clause | 3,394 | 0 | 13 | 825 | 1,001 | 572 | 429 | 57 | 1 |
module Experiment (experiment, generators) where
import Data.Monoid
import Simulation.Aivika
import Simulation.Aivika.Experiment
import Simulation.Aivika.Experiment.Chart
specs = Specs { spcStartTime = 0,
spcStopTime = 13,
spcDT = 0.01,
spcMethod = RungeKutta4,
spcGeneratorType = SimpleGenerator }
experiment :: Experiment
experiment =
defaultExperiment {
experimentSpecs = specs,
experimentRunCount = 1,
experimentTitle = "Chemical Reaction",
experimentDescription = "Chemical Reaction as described in " ++
"the 5-minute tutorial of Berkeley-Madonna" }
t = resultByName "t"
a = resultByName "a"
b = resultByName "b"
c = resultByName "c"
generators :: ChartRendering r => [WebPageGenerator r]
generators =
[outputView defaultExperimentSpecsView,
outputView $ defaultLastValueView {
lastValueSeries = t <> a <> b <> c },
outputView $ defaultTableView {
tableSeries = t <> a <> b <> c },
outputView $ defaultTimeSeriesView {
timeSeriesTitle = "Time Series",
timeSeriesLeftYSeries = a <> b <> c },
outputView $ defaultXYChartView {
xyChartTitle = "XYChart - 1",
xyChartPlotTitle = "b=b(a), c=c(a)",
xyChartXSeries = a,
xyChartLeftYSeries = b,
xyChartRightYSeries = c },
outputView $ defaultXYChartView {
xyChartTitle = "XYChart - 2",
xyChartPlotTitle = "a=a(b), c=c(b)",
xyChartXSeries = b,
xyChartRightYSeries = a <> c },
outputView $ defaultXYChartView {
xyChartTitle = "XYChart - 3",
xyChartPlotTitle = "a=a(c), b=b(c)",
xyChartXSeries = c,
xyChartLeftYSeries = b,
xyChartRightYSeries = a } ]
| dsorokin/aivika-experiment-chart | examples/ChemicalReaction/Experiment.hs | bsd-3-clause | 1,723 | 0 | 11 | 434 | 371 | 223 | 148 | 49 | 1 |
-- | Ch04
module Ch04 where
gt100 :: Integer -> Bool
gt100 x = x > 100
greaterThan100 :: [Integer] -> [Integer]
greaterThan100 xs = filter gt100 xs
greaterThan100_2 :: [Integer] -> [Integer]
greaterThan100_2 = filter (\x -> x > 100)
greaterThan100_3 :: [Integer] -> [Integer]
greaterThan100_3 = filter (> 100)
foo :: (b -> c) -> (a -> b) -> (a -> c)
foo f g = \x -> f (g x)
myTest :: [Integer] -> Bool
myTest xs = even (length (greaterThan100 xs))
myTest' :: [Integer] -> Bool
myTest' = even . length . greaterThan100
f'' :: (Int, Int) -> Int
f'' (x, y) = 2 * x + y
foobar :: [Integer] -> Integer
foobar [] = 0
foobar (x:xs)
| x > 3 = (7*x + 2) + foobar xs
| otherwise = foobar xs
sum' :: [Integer] -> Integer
sum' [] = 0
sum' (x:xs) = x + sum' xs
product' :: [Integer] -> Integer
product' [] = 1
product' (x:xs) = x * product' xs
length' :: [a] -> Integer
length' [] = 0
length' (_:xs) = 1 + length' xs
fold :: b -> (a -> b -> b) -> [a] -> b
fold a _ [] = a
fold a f (x:xs) = fold (f x a) f xs
| codingiam/sandbox-hs | src/Ch04.hs | bsd-3-clause | 1,014 | 0 | 9 | 236 | 559 | 298 | 261 | 34 | 1 |
--
--
--
-----------------
-- Exercise 8.14.
-----------------
--
--
--
module E'8'14 where
import Data.List ( words )
-- Note: This time I consider every character or consecutive appearance
-- of characters that are not whitespace a word.
wc :: IO ()
wc
= wc' (0 , 0 , 0)
where
wc' :: (Integer , Integer , Integer) -> IO ()
wc' ( lineCount , wordCount , charCount )
-- "currentCount" is the 'loop data'
= do input <- getLine
putStr input
putStr "\n"
if (input /= "")
then (
wc' (
lineCount + 1 ,
wordCount + ( toInteger ( length ( words input ) ) ) ,
charCount + ( toInteger ( length ( concat (words input) ) ) )
)
)
else (
do putStr ( "lines count: " ++ (show lineCount) ++ "\n" )
putStr ( "words count: " ++ (show wordCount) ++ "\n" )
putStr ( "chars count: " ++ (show charCount) ++ "\n\n" )
)
-- GHCi> wc
-- 1 23
-- 1 23
--
--
-- lines count: 1
-- words count: 2
-- chars count: 3
| pascal-knodel/haskell-craft | _/links/E'8'14.hs | mit | 1,220 | 0 | 21 | 520 | 281 | 156 | 125 | 20 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module InnerEar.Database.SQLite where
import Database.SQLite.Simple
import Database.SQLite.Simple.FromRow
import Database.SQLite.Simple.ToRow
import Database.SQLite.Simple.FromField
import Database.SQLite.Simple.ToField
import Database.SQLite.Simple.Ok
import Data.Time.Clock
import Control.Monad.Except
import InnerEar.Types.User
import InnerEar.Types.Data
import InnerEar.Types.ExerciseId
import InnerEar.Database.Users
import InnerEar.Database.Events
import InnerEar.Database.Stores
openDatabase :: IO Connection
openDatabase = do
c <- open "../InnerEar.db"
createUsersTable c
createEventsTable c
createStoresTable c
runExceptT $ addUser c $ User "test" "test" NormalUser
return c
closeDatabase :: Connection -> IO ()
closeDatabase = close
| d0kt0r0/InnerEar | src/InnerEar/Database/SQLite.hs | gpl-3.0 | 796 | 0 | 9 | 90 | 184 | 105 | 79 | 26 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.DirectConnect.DescribeLocations
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the list of AWS Direct Connect locations in the current AWS
-- region. These are the locations that may be selected when calling
-- CreateConnection or CreateInterconnect.
--
-- /See:/ <http://docs.aws.amazon.com/directconnect/latest/APIReference/API_DescribeLocations.html AWS API Reference> for DescribeLocations.
module Network.AWS.DirectConnect.DescribeLocations
(
-- * Creating a Request
describeLocations
, DescribeLocations
-- * Destructuring the Response
, describeLocationsResponse
, DescribeLocationsResponse
-- * Response Lenses
, dlrsLocations
, dlrsResponseStatus
) where
import Network.AWS.DirectConnect.Types
import Network.AWS.DirectConnect.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'describeLocations' smart constructor.
data DescribeLocations =
DescribeLocations'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeLocations' with the minimum fields required to make a request.
--
describeLocations
:: DescribeLocations
describeLocations = DescribeLocations'
instance AWSRequest DescribeLocations where
type Rs DescribeLocations = DescribeLocationsResponse
request = postJSON directConnect
response
= receiveJSON
(\ s h x ->
DescribeLocationsResponse' <$>
(x .?> "locations" .!@ mempty) <*>
(pure (fromEnum s)))
instance ToHeaders DescribeLocations where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("OvertureService.DescribeLocations" :: ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON DescribeLocations where
toJSON = const (Object mempty)
instance ToPath DescribeLocations where
toPath = const "/"
instance ToQuery DescribeLocations where
toQuery = const mempty
-- | /See:/ 'describeLocationsResponse' smart constructor.
data DescribeLocationsResponse = DescribeLocationsResponse'
{ _dlrsLocations :: !(Maybe [Location])
, _dlrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeLocationsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dlrsLocations'
--
-- * 'dlrsResponseStatus'
describeLocationsResponse
:: Int -- ^ 'dlrsResponseStatus'
-> DescribeLocationsResponse
describeLocationsResponse pResponseStatus_ =
DescribeLocationsResponse'
{ _dlrsLocations = Nothing
, _dlrsResponseStatus = pResponseStatus_
}
-- | Undocumented member.
dlrsLocations :: Lens' DescribeLocationsResponse [Location]
dlrsLocations = lens _dlrsLocations (\ s a -> s{_dlrsLocations = a}) . _Default . _Coerce;
-- | The response status code.
dlrsResponseStatus :: Lens' DescribeLocationsResponse Int
dlrsResponseStatus = lens _dlrsResponseStatus (\ s a -> s{_dlrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-directconnect/gen/Network/AWS/DirectConnect/DescribeLocations.hs | mpl-2.0 | 3,864 | 0 | 13 | 827 | 501 | 299 | 202 | 69 | 1 |
{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | orphan instances
module Commands.Frontends.Dragon13.Instances where
import Control.Monad.Catch (Exception)
import Data.Typeable (Typeable)
import qualified Language.Python.Common.ParseError as Python
deriving instance Typeable Python.ParseError
instance Exception Python.ParseError
| sboosali/commands-frontend-DragonNaturallySpeaking | sources/Commands/Frontends/Dragon13/Instances.hs | bsd-3-clause | 447 | 0 | 6 | 100 | 61 | 39 | 22 | 8 | 0 |
{-# LANGUAGE GADTs #-}
module DPH.War.Result
( Result(..)
, isResultUnexpectedFailure
, isResultUnexpectedSuccess
, takeResultTime
, takeResultDiff
, takeQuirks )
where
import BuildBox
import Data.Maybe
data Result
= ResultUnexpectedFailure
| ResultUnexpectedSuccess
| ResultAspect (WithUnits (Aspect Single))
| ResultQuirk Quirk
| ResultDiff FilePath FilePath FilePath
deriving Show
isResultUnexpectedFailure :: Result -> Bool
isResultUnexpectedFailure rr
= case rr of
ResultUnexpectedFailure{} -> True
_ -> False
isResultUnexpectedSuccess :: Result -> Bool
isResultUnexpectedSuccess rr
= case rr of
ResultUnexpectedSuccess{} -> True
_ -> False
takeResultTime :: [Result] -> Maybe Seconds
takeResultTime as
= listToMaybe [t | ResultAspect (WithSeconds (Time TotalWall (Single t))) <- as]
takeResultDiff :: [Result] -> Maybe (FilePath, FilePath, FilePath)
takeResultDiff as
= listToMaybe [ (fileRef, fileOut, fileDiff)
| a@(ResultDiff fileRef fileOut fileDiff) <- as]
takeQuirks :: [Result] -> [Quirk]
takeQuirks rs
= [q | ResultQuirk q <- rs]
| mainland/dph | dph-test/framework/DPH/War/Result.hs | bsd-3-clause | 1,091 | 26 | 15 | 183 | 349 | 191 | 158 | 37 | 2 |
module Interp (runInterp) where
import GenUtils
import DataTypes
import InterpUtils
import Parser (pgnLexer)
runInterp :: AbsGame -> RealGame
runInterp (Game tags toks) = Game tags (pgnInterp toks initParState)
initParState = (FirstBoard startBoard)
type Par a = StoreBoard -> a
thenP :: Par a -> (a -> Par b) -> Par b
returnP :: a -> Par a
returnP a = \s -> a
thenP m k s = case m s of
r -> k r s
failP a = \s -> error a
consP q rest = \s -> q : pgnInterp rest s
thenP' :: Par StoreBoard -> Par a -> Par a
thenP' m k s = case m s of
r -> k r
newGameP :: Par a -> Par a
newGameP m = \ _ -> m initParState
getCurrColour :: Par Colour
getCurrColour =
getBoard `thenP` \ (Board _ (MoveNumber _ col) _) ->
returnP col
checkColour :: MoveNumber -> Par ()
checkColour (MoveNumber i col) =
getBoard `thenP` \ (Board _ (MoveNumber i' col') _) ->
if i == i' && col == col'
then returnP ()
else failP ("number mis-match: "
++ userFormat (MoveNumber i col)
++ " (looking for "
++ userFormat (MoveNumber i' col')
++ ")\n")
data StoreBoard
= FirstBoard Board
| UndoableBoard Board {- new -} Board {- back one -}
updateBoard :: Board -> Par StoreBoard
updateBoard brd (FirstBoard old_brd)
= UndoableBoard brd old_brd
updateBoard brd (UndoableBoard old_brd _)
= UndoableBoard brd old_brd
getBoard :: Par Board
getBoard s@(FirstBoard brd)
= brd
getBoard s@(UndoableBoard brd _)
= brd
undoBoard :: Par StoreBoard
undoBoard (FirstBoard _)
= error "Incorrect start to some analysis"
undoBoard (UndoableBoard _ old_brd)
= FirstBoard old_brd
pgnInterp :: [Token] -> Par [Quantum]
pgnInterp (IntToken n:PeriodToken:PeriodToken:PeriodToken:rest) =
checkColour (MoveNumber n Black) `thenP` \ () ->
pgnInterp rest
pgnInterp (IntToken n:PeriodToken:rest) =
checkColour (MoveNumber n White) `thenP` \ () ->
pgnInterp rest
pgnInterp (SymbolToken str:CommentToken (ann:rs):r)
| all (flip elem "!?") ann =
pgnInterp (SymbolToken str:pgnLexer ann ++ (CommentToken rs:r))
pgnInterp (CommentToken (n:tag:rest):r)
| head tag == '(' && take 2 (reverse tag) == ":)" && length rest > 1 =
getCurrColour `thenP` \ col ->
let
invert Black r = r -- because the move has *already* happened
invert _ "0.00" = "0.00" -- don't negate 0
invert _ ('-':r) = r
invert _ r = '-':r
in
pgnInterp (LeftRBToken:map SymbolToken (take (length rest-1) rest)
++ [CommentToken ["Score:",invert col n],RightRBToken] ++ r)
pgnInterp (CommentToken []:rest) = pgnInterp rest
pgnInterp (CommentToken comm:rest) =
consP (QuantumComment comm) rest
pgnInterp (NAGToken nag:rest) =
consP (QuantumNAG nag) rest
pgnInterp (NAGAnnToken nag _:rest) =
consP (QuantumNAG nag) rest
pgnInterp (SymbolToken "0-1":rest) =
consP (QuantumResult "0-1") rest
pgnInterp (SymbolToken "1-0":rest) =
consP (QuantumResult "1-0") rest
pgnInterp (SymbolToken "1/2-1/2":rest) =
consP (QuantumResult "1/2-1/2") rest
pgnInterp (AsterixToken:rest) =
consP (QuantumResult "*") rest
pgnInterp (SymbolToken move:rest@(NAGAnnToken _ str:_)) =
getBoard `thenP` \ brd ->
parseMove move brd `thenP` \ (mv,ch,corrMv,new_brd) ->
updateBoard new_brd `thenP'`
consP (QuantumMove mv ch str new_brd) rest
pgnInterp (SymbolToken move:rest) =
getBoard `thenP` \ brd ->
parseMove move brd `thenP` \ (mv,ch,corrMv,new_brd) ->
updateBoard new_brd `thenP'`
consP (QuantumMove mv ch "" new_brd) rest
pgnInterp (LeftRBToken:rest) =
getAnalysis rest 0 [] `thenP` \ (anal,rest) ->
(undoBoard `thenP'`
pgnInterp anal) `thenP` \ anal' ->
consP (QuantumAnalysis anal') rest
pgnInterp [] = returnP []
pgnInterp toks = failP ("when reading: "
++ unwords (map userFormat (take 10 toks)))
getAnalysis (t@LeftRBToken:r) n anal = getAnalysis r (n+1) (t:anal)
getAnalysis (t@RightRBToken:r) n anal
| n == (0 :: Int) = returnP (reverse anal,r)
| otherwise = getAnalysis r (n-1) (t:anal)
getAnalysis (t:r) n anal = getAnalysis r n (t:anal)
getAnalysis [] n anal = failP "no closing ')'"
parseMove :: String -> Board -> Par (String,String,String,Board)
parseMove move brd@(Board _ (MoveNumber _ col) _) =
case mapMaybeFail charToMoveTok move of
Nothing -> failP ("strange move:" ++ move)
Just mv_toks ->
let
(chs,mv_toks') = getChecks (reverse mv_toks)
(queen,mv_toks'') = getQueen mv_toks'
in
case parseAlgMove mv_toks'' queen brd of
(the_mv,new_brd) -> returnP (the_mv,chs,"$$",new_brd)
parseAlgMove
:: [MoveTok]
-> Maybe Piece
-> Board
-> (String,Board)
parseAlgMove [PartCastleTok,MoveToTok,PartCastleTok] Nothing
= findCastleKMove
parseAlgMove [PartCastleTok,MoveToTok,PartCastleTok,
MoveToTok,PartCastleTok] Nothing
= findCastleQMove
parseAlgMove (PieceTok King:r) Nothing = parsePieceMove r King
parseAlgMove (PieceTok Queen:r) Nothing = parsePieceMove r Queen
parseAlgMove (PieceTok Rook:r) Nothing = parsePieceMove r Rook
parseAlgMove (PieceTok Knight:r) Nothing = parsePieceMove r Knight
parseAlgMove (PieceTok Bishop:r) Nothing = parsePieceMove r Bishop
parseAlgMove [FileTok sf,RankTok sr,MoveToTok,FileTok df,RankTok dr] q =
findAPawnMove (extendBP (sf,sr)) (extendBP (df,dr)) q
parseAlgMove [FileTok sf,RankTok sr,CaptureTok,FileTok df,RankTok dr] q =
findAPawnMove (extendBP (sf,sr)) (extendBP (df,dr)) q
parseAlgMove [FileTok sf,RankTok sr,FileTok df,RankTok dr] q = \ brd ->
case lookupBoardPiece brd (sf,sr) of
Nothing -> error ("cant find piece at: " ++ userFormatBoardPos (sf,sr))
Just Pawn -> findAPawnMove (extendBP (sf,sr)) (extendBP (df,dr)) q brd
Just King | sf == 5 && df == 7 -> findCastleKMove brd
Just King | sf == 5 && df == 3 -> findCastleQMove brd
Just p -> findAMove p (extendBP (sf,sr)) (extendBP (df,dr)) brd
-- later !
parseAlgMove [FileTok df,RankTok dr] q =
findAPawnMove (Nothing,Nothing) (extendBP (df,dr)) q
parseAlgMove [FileTok sf,CaptureTok,FileTok df,RankTok dr] q =
findAPawnMove (Just sf,Nothing) (extendBP (df,dr)) q
parseAlgMove [FileTok sf,FileTok df] q =
findAPawnMove (Just sf,Nothing) (Just df,Nothing) q
parseAlgMove [FileTok sf,CaptureTok,FileTok df] q =
findAPawnMove (Just sf,Nothing) (Just df,Nothing) q
parseAlgMove _ _ = error "!>!"
parsePieceMove [FileTok df,RankTok dr] p
= findAMove p (Nothing,Nothing) (extendBP (df,dr))
parsePieceMove [CaptureTok,FileTok df,RankTok dr] p
= findAMove p (Nothing,Nothing) (extendBP (df,dr))
parsePieceMove [RankTok sr,FileTok df,RankTok dr] p
= findAMove p (Nothing,Just sr) (extendBP (df,dr))
parsePieceMove [RankTok sr,CaptureTok,FileTok df,RankTok dr] p
= findAMove p (Nothing,Just sr) (extendBP (df,dr))
parsePieceMove [FileTok sf,FileTok df,RankTok dr] p
= findAMove p (Just sf,Nothing) (extendBP (df,dr))
parsePieceMove [FileTok sf,CaptureTok,FileTok df,RankTok dr] p
= findAMove p (Just sf,Nothing) (extendBP (df,dr))
parsePieceMove [FileTok sf,RankTok sr,MoveToTok,FileTok df,RankTok dr] p
= findAMove p (extendBP (sf,sr)) (extendBP (df,dr))
parsePieceMove [FileTok sf,RankTok sr,CaptureTok,FileTok df,RankTok dr] p
= findAMove p (extendBP (sf,sr)) (extendBP (df,dr))
parsePieceMove _ p = failP ("syntax error in move:")
getChecks (CheckTok:CheckTok:r) = ("#",r)
getChecks (CheckTok:r) = ("+",r)
getChecks (MateTok:r) = ("#",r)
getChecks r = ("",r)
getQueen (PieceTok p:QueensWith:r) = (Just p,reverse r)
getQueen r = (Nothing,reverse r)
| sdiehl/ghc | testsuite/tests/programs/andy_cherry/Interp.hs | bsd-3-clause | 8,295 | 0 | 20 | 2,203 | 3,298 | 1,700 | 1,598 | 180 | 5 |
{-# LANGUAGE Safe #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Functor.Classes
-- Copyright : (c) Ross Paterson 2013
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Liftings of the Prelude classes 'Eq', 'Ord', 'Read' and 'Show' to
-- unary and binary type constructors.
--
-- These classes are needed to express the constraints on arguments of
-- transformers in portable Haskell. Thus for a new transformer @T@,
-- one might write instances like
--
-- > instance (Eq1 f) => Eq1 (T f) where ...
-- > instance (Ord1 f) => Ord1 (T f) where ...
-- > instance (Read1 f) => Read1 (T f) where ...
-- > instance (Show1 f) => Show1 (T f) where ...
--
-- If these instances can be defined, defining instances of the base
-- classes is mechanical:
--
-- > instance (Eq1 f, Eq a) => Eq (T f a) where (==) = eq1
-- > instance (Ord1 f, Ord a) => Ord (T f a) where compare = compare1
-- > instance (Read1 f, Read a) => Read (T f a) where
-- > readPrec = readPrec1
-- > readListPrec = readListPrecDefault
-- > instance (Show1 f, Show a) => Show (T f a) where showsPrec = showsPrec1
--
-- @since 4.9.0.0
-----------------------------------------------------------------------------
module Data.Functor.Classes (
-- * Liftings of Prelude classes
-- ** For unary constructors
Eq1(..), eq1,
Ord1(..), compare1,
Read1(..), readsPrec1, readPrec1,
liftReadListDefault, liftReadListPrecDefault,
Show1(..), showsPrec1,
-- ** For binary constructors
Eq2(..), eq2,
Ord2(..), compare2,
Read2(..), readsPrec2, readPrec2,
liftReadList2Default, liftReadListPrec2Default,
Show2(..), showsPrec2,
-- * Helper functions
-- $example
readsData, readData,
readsUnaryWith, readUnaryWith,
readsBinaryWith, readBinaryWith,
showsUnaryWith,
showsBinaryWith,
-- ** Obsolete helpers
readsUnary,
readsUnary1,
readsBinary1,
showsUnary,
showsUnary1,
showsBinary1,
) where
import Control.Applicative (Alternative((<|>)), Const(Const))
import Data.Functor.Identity (Identity(Identity))
import Data.Proxy (Proxy(Proxy))
import Data.List.NonEmpty (NonEmpty(..))
import Data.Monoid (mappend)
import GHC.Read (expectP, list, paren)
import Text.ParserCombinators.ReadPrec (ReadPrec, readPrec_to_S, readS_to_Prec)
import Text.Read (Read(..), parens, prec, step)
import Text.Read.Lex (Lexeme(..))
import Text.Show (showListWith)
-- | Lifting of the 'Eq' class to unary type constructors.
--
-- @since 4.9.0.0
class Eq1 f where
-- | Lift an equality test through the type constructor.
--
-- The function will usually be applied to an equality function,
-- but the more general type ensures that the implementation uses
-- it to compare elements of the first container with elements of
-- the second.
--
-- @since 4.9.0.0
liftEq :: (a -> b -> Bool) -> f a -> f b -> Bool
-- | Lift the standard @('==')@ function through the type constructor.
--
-- @since 4.9.0.0
eq1 :: (Eq1 f, Eq a) => f a -> f a -> Bool
eq1 = liftEq (==)
-- | Lifting of the 'Ord' class to unary type constructors.
--
-- @since 4.9.0.0
class (Eq1 f) => Ord1 f where
-- | Lift a 'compare' function through the type constructor.
--
-- The function will usually be applied to a comparison function,
-- but the more general type ensures that the implementation uses
-- it to compare elements of the first container with elements of
-- the second.
--
-- @since 4.9.0.0
liftCompare :: (a -> b -> Ordering) -> f a -> f b -> Ordering
-- | Lift the standard 'compare' function through the type constructor.
--
-- @since 4.9.0.0
compare1 :: (Ord1 f, Ord a) => f a -> f a -> Ordering
compare1 = liftCompare compare
-- | Lifting of the 'Read' class to unary type constructors.
--
-- Both 'liftReadsPrec' and 'liftReadPrec' exist to match the interface
-- provided in the 'Read' type class, but it is recommended to implement
-- 'Read1' instances using 'liftReadPrec' as opposed to 'liftReadsPrec', since
-- the former is more efficient than the latter. For example:
--
-- @
-- instance 'Read1' T where
-- 'liftReadPrec' = ...
-- 'liftReadListPrec' = 'liftReadListPrecDefault'
-- @
--
-- For more information, refer to the documentation for the 'Read' class.
--
-- @since 4.9.0.0
class Read1 f where
{-# MINIMAL liftReadsPrec | liftReadPrec #-}
-- | 'readsPrec' function for an application of the type constructor
-- based on 'readsPrec' and 'readList' functions for the argument type.
--
-- @since 4.9.0.0
liftReadsPrec :: (Int -> ReadS a) -> ReadS [a] -> Int -> ReadS (f a)
liftReadsPrec rp rl = readPrec_to_S $
liftReadPrec (readS_to_Prec rp) (readS_to_Prec (const rl))
-- | 'readList' function for an application of the type constructor
-- based on 'readsPrec' and 'readList' functions for the argument type.
-- The default implementation using standard list syntax is correct
-- for most types.
--
-- @since 4.9.0.0
liftReadList :: (Int -> ReadS a) -> ReadS [a] -> ReadS [f a]
liftReadList rp rl = readPrec_to_S
(list $ liftReadPrec (readS_to_Prec rp) (readS_to_Prec (const rl))) 0
-- | 'readPrec' function for an application of the type constructor
-- based on 'readPrec' and 'readListPrec' functions for the argument type.
--
-- @since 4.10.0.0
liftReadPrec :: ReadPrec a -> ReadPrec [a] -> ReadPrec (f a)
liftReadPrec rp rl = readS_to_Prec $
liftReadsPrec (readPrec_to_S rp) (readPrec_to_S rl 0)
-- | 'readListPrec' function for an application of the type constructor
-- based on 'readPrec' and 'readListPrec' functions for the argument type.
--
-- The default definition uses 'liftReadList'. Instances that define
-- 'liftReadPrec' should also define 'liftReadListPrec' as
-- 'liftReadListPrecDefault'.
--
-- @since 4.10.0.0
liftReadListPrec :: ReadPrec a -> ReadPrec [a] -> ReadPrec [f a]
liftReadListPrec rp rl = readS_to_Prec $ \_ ->
liftReadList (readPrec_to_S rp) (readPrec_to_S rl 0)
-- | Lift the standard 'readsPrec' and 'readList' functions through the
-- type constructor.
--
-- @since 4.9.0.0
readsPrec1 :: (Read1 f, Read a) => Int -> ReadS (f a)
readsPrec1 = liftReadsPrec readsPrec readList
-- | Lift the standard 'readPrec' and 'readListPrec' functions through the
-- type constructor.
--
-- @since 4.10.0.0
readPrec1 :: (Read1 f, Read a) => ReadPrec (f a)
readPrec1 = liftReadPrec readPrec readListPrec
-- | A possible replacement definition for the 'liftReadList' method.
-- This is only needed for 'Read1' instances where 'liftReadListPrec' isn't
-- defined as 'liftReadListPrecDefault'.
--
-- @since 4.10.0.0
liftReadListDefault :: Read1 f => (Int -> ReadS a) -> ReadS [a] -> ReadS [f a]
liftReadListDefault rp rl = readPrec_to_S
(liftReadListPrec (readS_to_Prec rp) (readS_to_Prec (const rl))) 0
-- | A possible replacement definition for the 'liftReadListPrec' method,
-- defined using 'liftReadPrec'.
--
-- @since 4.10.0.0
liftReadListPrecDefault :: Read1 f => ReadPrec a -> ReadPrec [a]
-> ReadPrec [f a]
liftReadListPrecDefault rp rl = list (liftReadPrec rp rl)
-- | Lifting of the 'Show' class to unary type constructors.
--
-- @since 4.9.0.0
class Show1 f where
-- | 'showsPrec' function for an application of the type constructor
-- based on 'showsPrec' and 'showList' functions for the argument type.
--
-- @since 4.9.0.0
liftShowsPrec :: (Int -> a -> ShowS) -> ([a] -> ShowS) ->
Int -> f a -> ShowS
-- | 'showList' function for an application of the type constructor
-- based on 'showsPrec' and 'showList' functions for the argument type.
-- The default implementation using standard list syntax is correct
-- for most types.
--
-- @since 4.9.0.0
liftShowList :: (Int -> a -> ShowS) -> ([a] -> ShowS) ->
[f a] -> ShowS
liftShowList sp sl = showListWith (liftShowsPrec sp sl 0)
-- | Lift the standard 'showsPrec' and 'showList' functions through the
-- type constructor.
--
-- @since 4.9.0.0
showsPrec1 :: (Show1 f, Show a) => Int -> f a -> ShowS
showsPrec1 = liftShowsPrec showsPrec showList
-- | Lifting of the 'Eq' class to binary type constructors.
--
-- @since 4.9.0.0
class Eq2 f where
-- | Lift equality tests through the type constructor.
--
-- The function will usually be applied to equality functions,
-- but the more general type ensures that the implementation uses
-- them to compare elements of the first container with elements of
-- the second.
--
-- @since 4.9.0.0
liftEq2 :: (a -> b -> Bool) -> (c -> d -> Bool) -> f a c -> f b d -> Bool
-- | Lift the standard @('==')@ function through the type constructor.
--
-- @since 4.9.0.0
eq2 :: (Eq2 f, Eq a, Eq b) => f a b -> f a b -> Bool
eq2 = liftEq2 (==) (==)
-- | Lifting of the 'Ord' class to binary type constructors.
--
-- @since 4.9.0.0
class (Eq2 f) => Ord2 f where
-- | Lift 'compare' functions through the type constructor.
--
-- The function will usually be applied to comparison functions,
-- but the more general type ensures that the implementation uses
-- them to compare elements of the first container with elements of
-- the second.
--
-- @since 4.9.0.0
liftCompare2 :: (a -> b -> Ordering) -> (c -> d -> Ordering) ->
f a c -> f b d -> Ordering
-- | Lift the standard 'compare' function through the type constructor.
--
-- @since 4.9.0.0
compare2 :: (Ord2 f, Ord a, Ord b) => f a b -> f a b -> Ordering
compare2 = liftCompare2 compare compare
-- | Lifting of the 'Read' class to binary type constructors.
--
-- Both 'liftReadsPrec2' and 'liftReadPrec2' exist to match the interface
-- provided in the 'Read' type class, but it is recommended to implement
-- 'Read2' instances using 'liftReadPrec2' as opposed to 'liftReadsPrec2',
-- since the former is more efficient than the latter. For example:
--
-- @
-- instance 'Read2' T where
-- 'liftReadPrec2' = ...
-- 'liftReadListPrec2' = 'liftReadListPrec2Default'
-- @
--
-- For more information, refer to the documentation for the 'Read' class.
-- @since 4.9.0.0
class Read2 f where
{-# MINIMAL liftReadsPrec2 | liftReadPrec2 #-}
-- | 'readsPrec' function for an application of the type constructor
-- based on 'readsPrec' and 'readList' functions for the argument types.
--
-- @since 4.9.0.0
liftReadsPrec2 :: (Int -> ReadS a) -> ReadS [a] ->
(Int -> ReadS b) -> ReadS [b] -> Int -> ReadS (f a b)
liftReadsPrec2 rp1 rl1 rp2 rl2 = readPrec_to_S $
liftReadPrec2 (readS_to_Prec rp1) (readS_to_Prec (const rl1))
(readS_to_Prec rp2) (readS_to_Prec (const rl2))
-- | 'readList' function for an application of the type constructor
-- based on 'readsPrec' and 'readList' functions for the argument types.
-- The default implementation using standard list syntax is correct
-- for most types.
--
-- @since 4.9.0.0
liftReadList2 :: (Int -> ReadS a) -> ReadS [a] ->
(Int -> ReadS b) -> ReadS [b] -> ReadS [f a b]
liftReadList2 rp1 rl1 rp2 rl2 = readPrec_to_S
(list $ liftReadPrec2 (readS_to_Prec rp1) (readS_to_Prec (const rl1))
(readS_to_Prec rp2) (readS_to_Prec (const rl2))) 0
-- | 'readPrec' function for an application of the type constructor
-- based on 'readPrec' and 'readListPrec' functions for the argument types.
--
-- @since 4.10.0.0
liftReadPrec2 :: ReadPrec a -> ReadPrec [a] ->
ReadPrec b -> ReadPrec [b] -> ReadPrec (f a b)
liftReadPrec2 rp1 rl1 rp2 rl2 = readS_to_Prec $
liftReadsPrec2 (readPrec_to_S rp1) (readPrec_to_S rl1 0)
(readPrec_to_S rp2) (readPrec_to_S rl2 0)
-- | 'readListPrec' function for an application of the type constructor
-- based on 'readPrec' and 'readListPrec' functions for the argument types.
--
-- The default definition uses 'liftReadList2'. Instances that define
-- 'liftReadPrec2' should also define 'liftReadListPrec2' as
-- 'liftReadListPrec2Default'.
--
-- @since 4.10.0.0
liftReadListPrec2 :: ReadPrec a -> ReadPrec [a] ->
ReadPrec b -> ReadPrec [b] -> ReadPrec [f a b]
liftReadListPrec2 rp1 rl1 rp2 rl2 = readS_to_Prec $ \_ ->
liftReadList2 (readPrec_to_S rp1) (readPrec_to_S rl1 0)
(readPrec_to_S rp2) (readPrec_to_S rl2 0)
-- | Lift the standard 'readsPrec' function through the type constructor.
--
-- @since 4.9.0.0
readsPrec2 :: (Read2 f, Read a, Read b) => Int -> ReadS (f a b)
readsPrec2 = liftReadsPrec2 readsPrec readList readsPrec readList
-- | Lift the standard 'readPrec' function through the type constructor.
--
-- @since 4.10.0.0
readPrec2 :: (Read2 f, Read a, Read b) => ReadPrec (f a b)
readPrec2 = liftReadPrec2 readPrec readListPrec readPrec readListPrec
-- | A possible replacement definition for the 'liftReadList2' method.
-- This is only needed for 'Read2' instances where 'liftReadListPrec2' isn't
-- defined as 'liftReadListPrec2Default'.
--
-- @since 4.10.0.0
liftReadList2Default :: Read2 f => (Int -> ReadS a) -> ReadS [a] ->
(Int -> ReadS b) -> ReadS [b] ->ReadS [f a b]
liftReadList2Default rp1 rl1 rp2 rl2 = readPrec_to_S
(liftReadListPrec2 (readS_to_Prec rp1) (readS_to_Prec (const rl1))
(readS_to_Prec rp2) (readS_to_Prec (const rl2))) 0
-- | A possible replacement definition for the 'liftReadListPrec2' method,
-- defined using 'liftReadPrec2'.
--
-- @since 4.10.0.0
liftReadListPrec2Default :: Read2 f => ReadPrec a -> ReadPrec [a] ->
ReadPrec b -> ReadPrec [b] -> ReadPrec [f a b]
liftReadListPrec2Default rp1 rl1 rp2 rl2 = list (liftReadPrec2 rp1 rl1 rp2 rl2)
-- | Lifting of the 'Show' class to binary type constructors.
--
-- @since 4.9.0.0
class Show2 f where
-- | 'showsPrec' function for an application of the type constructor
-- based on 'showsPrec' and 'showList' functions for the argument types.
--
-- @since 4.9.0.0
liftShowsPrec2 :: (Int -> a -> ShowS) -> ([a] -> ShowS) ->
(Int -> b -> ShowS) -> ([b] -> ShowS) -> Int -> f a b -> ShowS
-- | 'showList' function for an application of the type constructor
-- based on 'showsPrec' and 'showList' functions for the argument types.
-- The default implementation using standard list syntax is correct
-- for most types.
--
-- @since 4.9.0.0
liftShowList2 :: (Int -> a -> ShowS) -> ([a] -> ShowS) ->
(Int -> b -> ShowS) -> ([b] -> ShowS) -> [f a b] -> ShowS
liftShowList2 sp1 sl1 sp2 sl2 =
showListWith (liftShowsPrec2 sp1 sl1 sp2 sl2 0)
-- | Lift the standard 'showsPrec' function through the type constructor.
--
-- @since 4.9.0.0
showsPrec2 :: (Show2 f, Show a, Show b) => Int -> f a b -> ShowS
showsPrec2 = liftShowsPrec2 showsPrec showList showsPrec showList
-- Instances for Prelude type constructors
-- | @since 4.9.0.0
instance Eq1 Maybe where
liftEq _ Nothing Nothing = True
liftEq _ Nothing (Just _) = False
liftEq _ (Just _) Nothing = False
liftEq eq (Just x) (Just y) = eq x y
-- | @since 4.9.0.0
instance Ord1 Maybe where
liftCompare _ Nothing Nothing = EQ
liftCompare _ Nothing (Just _) = LT
liftCompare _ (Just _) Nothing = GT
liftCompare comp (Just x) (Just y) = comp x y
-- | @since 4.9.0.0
instance Read1 Maybe where
liftReadPrec rp _ =
parens (expectP (Ident "Nothing") *> pure Nothing)
<|>
readData (readUnaryWith rp "Just" Just)
liftReadListPrec = liftReadListPrecDefault
liftReadList = liftReadListDefault
-- | @since 4.9.0.0
instance Show1 Maybe where
liftShowsPrec _ _ _ Nothing = showString "Nothing"
liftShowsPrec sp _ d (Just x) = showsUnaryWith sp "Just" d x
-- | @since 4.9.0.0
instance Eq1 [] where
liftEq _ [] [] = True
liftEq _ [] (_:_) = False
liftEq _ (_:_) [] = False
liftEq eq (x:xs) (y:ys) = eq x y && liftEq eq xs ys
-- | @since 4.9.0.0
instance Ord1 [] where
liftCompare _ [] [] = EQ
liftCompare _ [] (_:_) = LT
liftCompare _ (_:_) [] = GT
liftCompare comp (x:xs) (y:ys) = comp x y `mappend` liftCompare comp xs ys
-- | @since 4.9.0.0
instance Read1 [] where
liftReadPrec _ rl = rl
liftReadListPrec = liftReadListPrecDefault
liftReadList = liftReadListDefault
-- | @since 4.9.0.0
instance Show1 [] where
liftShowsPrec _ sl _ = sl
-- | @since 4.10.0.0
instance Eq1 NonEmpty where
liftEq eq (a :| as) (b :| bs) = eq a b && liftEq eq as bs
-- | @since 4.10.0.0
instance Ord1 NonEmpty where
liftCompare cmp (a :| as) (b :| bs) = cmp a b `mappend` liftCompare cmp as bs
-- | @since 4.10.0.0
instance Read1 NonEmpty where
liftReadsPrec rdP rdL p s = readParen (p > 5) (\s' -> do
(a, s'') <- rdP 6 s'
(":|", s''') <- lex s''
(as, s'''') <- rdL s'''
return (a :| as, s'''')) s
-- | @since 4.10.0.0
instance Show1 NonEmpty where
liftShowsPrec shwP shwL p (a :| as) = showParen (p > 5) $
shwP 6 a . showString " :| " . shwL as
-- | @since 4.9.0.0
instance Eq2 (,) where
liftEq2 e1 e2 (x1, y1) (x2, y2) = e1 x1 x2 && e2 y1 y2
-- | @since 4.9.0.0
instance Ord2 (,) where
liftCompare2 comp1 comp2 (x1, y1) (x2, y2) =
comp1 x1 x2 `mappend` comp2 y1 y2
-- | @since 4.9.0.0
instance Read2 (,) where
liftReadPrec2 rp1 _ rp2 _ = parens $ paren $ do
x <- rp1
expectP (Punc ",")
y <- rp2
return (x,y)
liftReadListPrec2 = liftReadListPrec2Default
liftReadList2 = liftReadList2Default
-- | @since 4.9.0.0
instance Show2 (,) where
liftShowsPrec2 sp1 _ sp2 _ _ (x, y) =
showChar '(' . sp1 0 x . showChar ',' . sp2 0 y . showChar ')'
-- | @since 4.9.0.0
instance (Eq a) => Eq1 ((,) a) where
liftEq = liftEq2 (==)
-- | @since 4.9.0.0
instance (Ord a) => Ord1 ((,) a) where
liftCompare = liftCompare2 compare
-- | @since 4.9.0.0
instance (Read a) => Read1 ((,) a) where
liftReadPrec = liftReadPrec2 readPrec readListPrec
liftReadListPrec = liftReadListPrecDefault
liftReadList = liftReadListDefault
-- | @since 4.9.0.0
instance (Show a) => Show1 ((,) a) where
liftShowsPrec = liftShowsPrec2 showsPrec showList
-- | @since 4.9.0.0
instance Eq2 Either where
liftEq2 e1 _ (Left x) (Left y) = e1 x y
liftEq2 _ _ (Left _) (Right _) = False
liftEq2 _ _ (Right _) (Left _) = False
liftEq2 _ e2 (Right x) (Right y) = e2 x y
-- | @since 4.9.0.0
instance Ord2 Either where
liftCompare2 comp1 _ (Left x) (Left y) = comp1 x y
liftCompare2 _ _ (Left _) (Right _) = LT
liftCompare2 _ _ (Right _) (Left _) = GT
liftCompare2 _ comp2 (Right x) (Right y) = comp2 x y
-- | @since 4.9.0.0
instance Read2 Either where
liftReadPrec2 rp1 _ rp2 _ = readData $
readUnaryWith rp1 "Left" Left <|>
readUnaryWith rp2 "Right" Right
liftReadListPrec2 = liftReadListPrec2Default
liftReadList2 = liftReadList2Default
-- | @since 4.9.0.0
instance Show2 Either where
liftShowsPrec2 sp1 _ _ _ d (Left x) = showsUnaryWith sp1 "Left" d x
liftShowsPrec2 _ _ sp2 _ d (Right x) = showsUnaryWith sp2 "Right" d x
-- | @since 4.9.0.0
instance (Eq a) => Eq1 (Either a) where
liftEq = liftEq2 (==)
-- | @since 4.9.0.0
instance (Ord a) => Ord1 (Either a) where
liftCompare = liftCompare2 compare
-- | @since 4.9.0.0
instance (Read a) => Read1 (Either a) where
liftReadPrec = liftReadPrec2 readPrec readListPrec
liftReadListPrec = liftReadListPrecDefault
liftReadList = liftReadListDefault
-- | @since 4.9.0.0
instance (Show a) => Show1 (Either a) where
liftShowsPrec = liftShowsPrec2 showsPrec showList
-- Instances for other functors defined in the base package
-- | @since 4.9.0.0
instance Eq1 Identity where
liftEq eq (Identity x) (Identity y) = eq x y
-- | @since 4.9.0.0
instance Ord1 Identity where
liftCompare comp (Identity x) (Identity y) = comp x y
-- | @since 4.9.0.0
instance Read1 Identity where
liftReadPrec rp _ = readData $
readUnaryWith rp "Identity" Identity
liftReadListPrec = liftReadListPrecDefault
liftReadList = liftReadListDefault
-- | @since 4.9.0.0
instance Show1 Identity where
liftShowsPrec sp _ d (Identity x) = showsUnaryWith sp "Identity" d x
-- | @since 4.9.0.0
instance Eq2 Const where
liftEq2 eq _ (Const x) (Const y) = eq x y
-- | @since 4.9.0.0
instance Ord2 Const where
liftCompare2 comp _ (Const x) (Const y) = comp x y
-- | @since 4.9.0.0
instance Read2 Const where
liftReadPrec2 rp _ _ _ = readData $
readUnaryWith rp "Const" Const
liftReadListPrec2 = liftReadListPrec2Default
liftReadList2 = liftReadList2Default
-- | @since 4.9.0.0
instance Show2 Const where
liftShowsPrec2 sp _ _ _ d (Const x) = showsUnaryWith sp "Const" d x
-- | @since 4.9.0.0
instance (Eq a) => Eq1 (Const a) where
liftEq = liftEq2 (==)
-- | @since 4.9.0.0
instance (Ord a) => Ord1 (Const a) where
liftCompare = liftCompare2 compare
-- | @since 4.9.0.0
instance (Read a) => Read1 (Const a) where
liftReadPrec = liftReadPrec2 readPrec readListPrec
liftReadListPrec = liftReadListPrecDefault
liftReadList = liftReadListDefault
-- | @since 4.9.0.0
instance (Show a) => Show1 (Const a) where
liftShowsPrec = liftShowsPrec2 showsPrec showList
-- Proxy unfortunately imports this module, hence these instances are placed
-- here,
-- | @since 4.9.0.0
instance Eq1 Proxy where
liftEq _ _ _ = True
-- | @since 4.9.0.0
instance Ord1 Proxy where
liftCompare _ _ _ = EQ
-- | @since 4.9.0.0
instance Show1 Proxy where
liftShowsPrec _ _ _ _ = showString "Proxy"
-- | @since 4.9.0.0
instance Read1 Proxy where
liftReadPrec _ _ = parens (expectP (Ident "Proxy") *> pure Proxy)
liftReadListPrec = liftReadListPrecDefault
liftReadList = liftReadListDefault
-- Building blocks
-- | @'readsData' p d@ is a parser for datatypes where each alternative
-- begins with a data constructor. It parses the constructor and
-- passes it to @p@. Parsers for various constructors can be constructed
-- with 'readsUnary', 'readsUnary1' and 'readsBinary1', and combined with
-- @mappend@ from the @Monoid@ class.
--
-- @since 4.9.0.0
readsData :: (String -> ReadS a) -> Int -> ReadS a
readsData reader d =
readParen (d > 10) $ \ r -> [res | (kw,s) <- lex r, res <- reader kw s]
-- | @'readData' p@ is a parser for datatypes where each alternative
-- begins with a data constructor. It parses the constructor and
-- passes it to @p@. Parsers for various constructors can be constructed
-- with 'readUnaryWith' and 'readBinaryWith', and combined with
-- '(<|>)' from the 'Alternative' class.
--
-- @since 4.10.0.0
readData :: ReadPrec a -> ReadPrec a
readData reader = parens $ prec 10 reader
-- | @'readsUnaryWith' rp n c n'@ matches the name of a unary data constructor
-- and then parses its argument using @rp@.
--
-- @since 4.9.0.0
readsUnaryWith :: (Int -> ReadS a) -> String -> (a -> t) -> String -> ReadS t
readsUnaryWith rp name cons kw s =
[(cons x,t) | kw == name, (x,t) <- rp 11 s]
-- | @'readUnaryWith' rp n c'@ matches the name of a unary data constructor
-- and then parses its argument using @rp@.
--
-- @since 4.10.0.0
readUnaryWith :: ReadPrec a -> String -> (a -> t) -> ReadPrec t
readUnaryWith rp name cons = do
expectP $ Ident name
x <- step rp
return $ cons x
-- | @'readsBinaryWith' rp1 rp2 n c n'@ matches the name of a binary
-- data constructor and then parses its arguments using @rp1@ and @rp2@
-- respectively.
--
-- @since 4.9.0.0
readsBinaryWith :: (Int -> ReadS a) -> (Int -> ReadS b) ->
String -> (a -> b -> t) -> String -> ReadS t
readsBinaryWith rp1 rp2 name cons kw s =
[(cons x y,u) | kw == name, (x,t) <- rp1 11 s, (y,u) <- rp2 11 t]
-- | @'readBinaryWith' rp1 rp2 n c'@ matches the name of a binary
-- data constructor and then parses its arguments using @rp1@ and @rp2@
-- respectively.
--
-- @since 4.10.0.0
readBinaryWith :: ReadPrec a -> ReadPrec b ->
String -> (a -> b -> t) -> ReadPrec t
readBinaryWith rp1 rp2 name cons = do
expectP $ Ident name
x <- step rp1
y <- step rp2
return $ cons x y
-- | @'showsUnaryWith' sp n d x@ produces the string representation of a
-- unary data constructor with name @n@ and argument @x@, in precedence
-- context @d@.
--
-- @since 4.9.0.0
showsUnaryWith :: (Int -> a -> ShowS) -> String -> Int -> a -> ShowS
showsUnaryWith sp name d x = showParen (d > 10) $
showString name . showChar ' ' . sp 11 x
-- | @'showsBinaryWith' sp1 sp2 n d x y@ produces the string
-- representation of a binary data constructor with name @n@ and arguments
-- @x@ and @y@, in precedence context @d@.
--
-- @since 4.9.0.0
showsBinaryWith :: (Int -> a -> ShowS) -> (Int -> b -> ShowS) ->
String -> Int -> a -> b -> ShowS
showsBinaryWith sp1 sp2 name d x y = showParen (d > 10) $
showString name . showChar ' ' . sp1 11 x . showChar ' ' . sp2 11 y
-- Obsolete building blocks
-- | @'readsUnary' n c n'@ matches the name of a unary data constructor
-- and then parses its argument using 'readsPrec'.
--
-- @since 4.9.0.0
{-# DEPRECATED readsUnary "Use readsUnaryWith to define liftReadsPrec" #-}
readsUnary :: (Read a) => String -> (a -> t) -> String -> ReadS t
readsUnary name cons kw s =
[(cons x,t) | kw == name, (x,t) <- readsPrec 11 s]
-- | @'readsUnary1' n c n'@ matches the name of a unary data constructor
-- and then parses its argument using 'readsPrec1'.
--
-- @since 4.9.0.0
{-# DEPRECATED readsUnary1 "Use readsUnaryWith to define liftReadsPrec" #-}
readsUnary1 :: (Read1 f, Read a) => String -> (f a -> t) -> String -> ReadS t
readsUnary1 name cons kw s =
[(cons x,t) | kw == name, (x,t) <- readsPrec1 11 s]
-- | @'readsBinary1' n c n'@ matches the name of a binary data constructor
-- and then parses its arguments using 'readsPrec1'.
--
-- @since 4.9.0.0
{-# DEPRECATED readsBinary1 "Use readsBinaryWith to define liftReadsPrec" #-}
readsBinary1 :: (Read1 f, Read1 g, Read a) =>
String -> (f a -> g a -> t) -> String -> ReadS t
readsBinary1 name cons kw s =
[(cons x y,u) | kw == name,
(x,t) <- readsPrec1 11 s, (y,u) <- readsPrec1 11 t]
-- | @'showsUnary' n d x@ produces the string representation of a unary data
-- constructor with name @n@ and argument @x@, in precedence context @d@.
--
-- @since 4.9.0.0
{-# DEPRECATED showsUnary "Use showsUnaryWith to define liftShowsPrec" #-}
showsUnary :: (Show a) => String -> Int -> a -> ShowS
showsUnary name d x = showParen (d > 10) $
showString name . showChar ' ' . showsPrec 11 x
-- | @'showsUnary1' n d x@ produces the string representation of a unary data
-- constructor with name @n@ and argument @x@, in precedence context @d@.
--
-- @since 4.9.0.0
{-# DEPRECATED showsUnary1 "Use showsUnaryWith to define liftShowsPrec" #-}
showsUnary1 :: (Show1 f, Show a) => String -> Int -> f a -> ShowS
showsUnary1 name d x = showParen (d > 10) $
showString name . showChar ' ' . showsPrec1 11 x
-- | @'showsBinary1' n d x y@ produces the string representation of a binary
-- data constructor with name @n@ and arguments @x@ and @y@, in precedence
-- context @d@.
--
-- @since 4.9.0.0
{-# DEPRECATED showsBinary1 "Use showsBinaryWith to define liftShowsPrec" #-}
showsBinary1 :: (Show1 f, Show1 g, Show a) =>
String -> Int -> f a -> g a -> ShowS
showsBinary1 name d x y = showParen (d > 10) $
showString name . showChar ' ' . showsPrec1 11 x .
showChar ' ' . showsPrec1 11 y
{- $example
These functions can be used to assemble 'Read' and 'Show' instances for
new algebraic types. For example, given the definition
> data T f a = Zero a | One (f a) | Two a (f a)
a standard 'Read1' instance may be defined as
> instance (Read1 f) => Read1 (T f) where
> liftReadPrec rp rl = readData $
> readUnaryWith rp "Zero" Zero <|>
> readUnaryWith (liftReadPrec rp rl) "One" One <|>
> readBinaryWith rp (liftReadPrec rp rl) "Two" Two
> liftReadListPrec = liftReadListPrecDefault
and the corresponding 'Show1' instance as
> instance (Show1 f) => Show1 (T f) where
> liftShowsPrec sp _ d (Zero x) =
> showsUnaryWith sp "Zero" d x
> liftShowsPrec sp sl d (One x) =
> showsUnaryWith (liftShowsPrec sp sl) "One" d x
> liftShowsPrec sp sl d (Two x y) =
> showsBinaryWith sp (liftShowsPrec sp sl) "Two" d x y
-}
| rahulmutt/ghcvm | libraries/base/Data/Functor/Classes.hs | bsd-3-clause | 28,636 | 0 | 14 | 6,356 | 6,479 | 3,486 | 2,993 | 330 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="da-DK">
<title>Groovy Support</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/groovy/src/main/javahelp/org/zaproxy/zap/extension/groovy/resources/help_da_DK/helpset_da_DK.hs | apache-2.0 | 959 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
{-# LANGUAGE FlexibleContexts, PartialTypeSignatures, NamedWildCards #-}
module SomethingShowable where
somethingShowable :: Show _x => _x -> _
somethingShowable x = show (not x)
-- Inferred type: Bool -> String
| ezyang/ghc | testsuite/tests/partial-sigs/should_compile/SomethingShowable.hs | bsd-3-clause | 213 | 0 | 7 | 30 | 39 | 21 | 18 | 4 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeOperators #-}
module Sqarkov.Database
( withDatabase
, insertGram7s
, phraseChannel
, phraseChannelNicks
) where
import Control.DeepSeq
import Control.Exception
import Control.Monad
import Control.Monad.Trans.Either
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Lazy.Builder as BSB
import Data.Foldable (foldMap, toList)
import Data.List
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import Data.Ord
import qualified Data.Sequence as Seq
import Data.Text (Text)
import qualified Database.Migrate as M
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.Copy
import Database.PostgreSQL.Simple.SqlQQ
import Sqarkov.NGram
withDatabase :: (Connection -> IO a) -> IO a
withDatabase act =
bracket (connectPostgreSQL "dbname=sqarkov") close $ \db -> do
migs <- ioEitherT (M.find "migrations")
_ <- M.executeMigrate migs db (M.migrate M.psqlMigrateDatabase)
act db
insertGram7s :: Connection -> [Gram7] -> IO ()
insertGram7s db gram7s = do
_ <- execute_ db createTempTable
copy_ db copyTemp
putCopyBuilder db . foldMap (BSB.byteString . escapeCopyGram7) $ gram7s
_ <- putCopyEnd db
_ <- execute_ db transferMain
_ <- execute_ db dropTempTable
return ()
where
createTempTable =
[sql|
create temporary table gram7_import
( channel text not null
, nick text not null
, wl3 text not null
, wl2 text not null
, wl1 text not null
, wm0 text not null
, wr1 text not null
, wr2 text not null
, wr3 text not null
);
|]
copyTemp =
[sql|
copy gram7_import
(channel, nick, wl3, wl2, wl1, wm0, wr1, wr2, wr3)
from stdin;
|]
transferMain =
[sql|
insert into channel (name)
select distinct channel from gram7_import
where not exists (select 0 from channel c where c.name = channel);
insert into nick (name)
select distinct nick from gram7_import
where not exists (select 0 from nick n where n.name = nick);
insert into word (word)
select distinct wm0 from gram7_import
where not exists (select 0 from word w where w.word = wm0);
insert into gram7 (channel_id, nick_id, wl3_id, wl2_id, wl1_id, wm0_id, wr1_id, wr2_id, wr3_id)
select ch.id, n.id, wl3.id, wl2.id, wl1.id, wm0.id, wr1.id, wr2.id, wr3.id
from gram7_import g
left join channel ch on ch.name = g.channel
left join nick n on n.name = g.nick
left join word wl3 on wl3.word = g.wl3
left join word wl2 on wl2.word = g.wl2
left join word wl1 on wl1.word = g.wl1
left join word wm0 on wm0.word = g.wm0
left join word wr1 on wr1.word = g.wr1
left join word wr2 on wr2.word = g.wr2
left join word wr3 on wr3.word = g.wr3;
|]
dropTempTable =
[sql| drop table gram7_import; |]
phraseChannel :: Connection -> Text -> IO (Maybe [(Text, Text)])
phraseChannel db channel = phrase db sel (Only channel)
where
sel = [sql|
join channel ch on ch.id = g.channel_id
where ch.name = ?
|]
phraseChannelNicks :: Connection -> Text -> [Text]
-> IO (Maybe [(Text, Text)])
phraseChannelNicks db channel nicks = phrase db sel (channel, In nicks)
where
sel = [sql|
join channel ch on ch.id = g.channel_id
join nick n on n.id = g.nick_id
where ch.name = ? and n.name in ?
|]
phrase :: ToRow params => Connection -> Query -> params
-> IO (Maybe [(Text, Text)])
phrase db sel params = do
withSavepoint db $ do
_ <- execute db createTempView params
res <- fold_ db selectRandomPhrase mempty $
\prev (n, phr) ->
return $!! prev <> Just (Seq.singleton n, Seq.singleton phr)
_ <- execute_ db dropTempView
return $ fmap (\(ns, phrs) -> zip (majorityElems (toList ns)) (toList phrs))
res
where
createTempView =
[sql|
create temporary view gram7_selection as
select g.id, g.channel_id, g.nick_id, g.wl3_id, g.wl2_id, g.wl1_id, g.wm0_id, g.wr1_id, g.wr2_id, g.wr3_id
from gram7 g
|] <> " " <> sel <> ";"
selectRandomPhrase =
[sql|
select n.name, wm0.word
from random_phrase() r
join channel ch on ch.id = r.channel_id
join nick n on n.id = r.nick_id
join word wm0 on wm0.id = r.wm0_id
order by r.ord
|]
dropTempView =
[sql| drop view gram7_selection; |]
putCopyBuilder :: Connection -> BSB.Builder -> IO ()
putCopyBuilder db = mapM_ (putCopyData db) . BSL.toChunks . BSB.toLazyByteString
-- Only pick the majority nick that contributed to each word.
majorityElems :: Ord a => [a] -> [a]
majorityElems ns = concatMap (take 1 . majority) windows
where
padded = replicate 3 Nothing ++ map Just ns ++ replicate 3 Nothing
windows = do
(n1:n2:n3:n4:n5:n6:n7:_) <- tails padded
return . catMaybes $ [n1, n2, n3, n4, n5, n6, n7]
majority xs = sortBy (flip (comparing (counts Map.!))) xs
where
counts = Map.fromListWith (+) . map (, 1 :: Integer) $ xs
ioEitherT :: EitherT String IO a -> IO a
ioEitherT = either (ioError . userError) return <=< runEitherT
| ion1/sqarkov | src/Sqarkov/Database.hs | isc | 5,648 | 0 | 18 | 1,670 | 1,100 | 598 | 502 | 88 | 1 |
module ProtoDB where
| MadSciGuys/protodb | src/ProtoDB.hs | mit | 21 | 0 | 2 | 3 | 4 | 3 | 1 | 1 | 0 |
{-# htermination mapAndUnzipM :: (a -> [] (b,c)) -> [a] -> [] ([b], [c]) #-}
import Monad
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Monad_mapAndUnzipM_2.hs | mit | 90 | 0 | 3 | 17 | 5 | 3 | 2 | 1 | 0 |
{--
- Problem 59
(**) Construct height-balanced binary trees
In a height-balanced binary tree, the following property holds for every node:
The height of its left subtree and the height of its right subtree are almost equal,
which means their difference is not greater than one.
Construct a list of all height-balanced binary trees with the given element and the given maximum height.
Example:
?‐ hbal_tree(3,T).
T = t(x, t(x, t(x, nil, nil), t(x, nil, nil)), t(x, t(x, nil, nil), t(x, nil, nil))) ;
T = t(x, t(x, t(x, nil, nil), t(x, nil, nil)), t(x, t(x, nil, nil), nil)) ;
etc......No
Example in Haskell:
*Main> take 4 $ hbalTree 'x' 3
[Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' Empty (Branch 'x' Empty Empty)),
Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' (Branch 'x' Empty Empty) Empty),
Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' Empty Empty)),
Branch 'x' (Branch 'x' Empty (Branch 'x' Empty Empty)) (Branch 'x' Empty Empty)]
--}
data Tree a = Empty | Branch a (Tree a) (Tree a) deriving (Show, Eq)
hbalTree :: Char -> Int -> [Tree Char]
hbalTree _ 0 = [Empty]
hbalTree c 1 = [Branch c Empty Empty]
hbalTree c n = [Branch c l r | k <- kk, l <- hbalTree c (fst k), r <- hbalTree c (snd k)] where
kk = [(n-2, n-1), (n-1, n-2), (n-1, n-1)]
| sighingnow/Functional-99-Problems | Haskell/59.hs | mit | 1,412 | 0 | 10 | 352 | 218 | 116 | 102 | 6 | 1 |
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
-- Ops.hs ---
--
-- Filename: Ops.hs
-- Description:
-- Author: Manuel Schneckenreither
-- Maintainer:
-- Created: Fri Sep 5 15:21:41 2014 (+0200)
-- Version:
-- Package-Requires: ()
-- Last-Updated: Sun May 7 21:09:36 2017 (+0200)
-- By: Manuel Schneckenreither
-- Update #: 1123
-- URL:
-- Doc URL:
-- Keywords:
-- Compatibility:
--
--
-- Commentary:
--
--
--
--
-- Change Log:
--
--
--
--
--
-- Code:
{-# LANGUAGE CPP #-}
-- | TODO: comment this module
module Data.Rewriting.ARA.ByInferenceRules.Prove.Ops
( mapInfTreeNodes
, mapInfTreeNodesVarNr
, mapProvenInfTreeNodes
, mapDatatypesVarNr
, mapDatatypes
, mapSignaturesVarNr
, mapProveAB
, mapProveAsB
, mapProveABs
, mapProveAsBs
, mapRulesInfTreeNodesVar
, accessorMaybe
)
where
import Data.Rewriting.ARA.ByInferenceRules.Prove.Type
import Data.Rewriting.ARA.ByInferenceRules.TypeSignatures
import Data.Rewriting.ARA.Constants
import Data.Rewriting.Typed.Datatype
import Data.Rewriting.Typed.Problem
import Data.Rewriting.Typed.Rule
import Data.Rewriting.Typed.Signature
import Data.Maybe (fromMaybe)
import Debug.Trace (trace)
-- | This function takes as input parameter another function which modifies a
-- tuple of a prove and an integer value. The Int refers to the variable number.
-- Therefore, if new variables get created by the function it has to update this
-- integer and set it accordingly in the return tuple, such that this function
-- can update the varNr field in the Prove data structure accordingly.
updateProve :: Show a => (Prove f v s sDt dt cn -> a)
-> (Prove f v s sDt dt cn -> a -> Prove f v s sDt dt cn)
-> ((Prove f v s sDt dt cn, a) -> (Prove f v s sDt dt cn, a))
-> Prove f v s sDt dt cn -> Prove f v s sDt dt cn
updateProve accessor updateFun fun pr =
let (nPr, nVal) = fun (pr, accessor pr)
in updateFun nPr nVal
-- | @execFunOnProblem fun@ updates the problem from the State by using the
-- function @fun@. Afterwards it returns the number that was returned by the
-- function @fun@.
-- execFunOnProblem :: ((ProblemSig, a) -> (ProblemSig, a)) -> (ProblemSig, a) -> (ProblemSig, a)
-- execFunOnProblem fun (pr, nr) = fun (pr, nr)
-- | This function maps over the infTreeNodesToProve of the input prove. In case variables
-- get created one should use the postfix according to the given number in the
-- input of the function and increase the Int value of the output. It returns
-- the new prove, in which the varNr field is already set according to the
-- output integer value of the input function.
mapInfTreeNodesVarNr :: ((InfTreeNode f v dt, Int) -> (InfTreeNode f v dt, Int))
-> Prove f v s sDt dt cn -> Prove f v s sDt dt cn
mapInfTreeNodesVarNr = mapInfTreeNodes varNr updateVarNr
-- | @mapInfTreeNode f v dts accessor updateFun fun pr@ can be used to iterate over the
-- infTreeNodesToProve of a prove @pr@. The function @fun@ will be applied to all
-- functions. An accumulator @a@ is used to hold the result of the function
-- @fun@. The input parameters to fun are retrieved using the specified
-- @accessor@ field of the Prove data-structure. The update function sets the
-- accumulated result from the function calls @fun@ through the function
-- @updateFunction@.
mapInfTreeNodes :: Show a => (Prove f v s sDt dt cn -> a)
-> (Prove f v s sDt dt cn -> a -> Prove f v s sDt dt cn)
-> ((InfTreeNode f v dt, a) -> (InfTreeNode f v dt, a))
-> Prove f v s sDt dt cn -> Prove f v s sDt dt cn
mapInfTreeNodes = mapProveAsB infTreeNodesToProve (\p x -> p { infTreeNodesToProve = x })
mapProveAB :: Show b => (Prove f v s sDt dt cn -> a)
-> (Prove f v s sDt dt cn -> a -> Prove f v s sDt dt cn)
-> (Prove f v s sDt dt cn -> b)
-> (Prove f v s sDt dt cn -> b -> Prove f v s sDt dt cn)
-> ((a, b) -> (a, b))
-> Prove f v s sDt dt cn
-> Prove f v s sDt dt cn
mapProveAB accessorIt updateIt accessorCum updateCum fun =
updateProve accessorCum updateCum (iterateA fun accessorIt updateIt)
mapProveAsB :: Show b =>
(Prove f v s sDt dt cn -> [a])
-> (Prove f v s sDt dt cn -> [a] -> Prove f v s sDt dt cn)
-> (Prove f v s sDt dt cn -> b)
-> (Prove f v s sDt dt cn -> b -> Prove f v s sDt dt cn)
-> ((a, b) -> (a, b))
-> Prove f v s sDt dt cn -> Prove f v s sDt dt cn
mapProveAsB accessorIt updateIt accessorCum updateCum fun =
updateProve accessorCum updateCum (iterateAs fun accessorIt updateIt)
mapProveABs :: Show b => (Prove f v s sDt dt cn -> a)
-> (Prove f v s sDt dt cn -> a -> Prove f v s sDt dt cn)
-> (Prove f v s sDt dt cn -> [b])
-> (Prove f v s sDt dt cn -> [b] -> Prove f v s sDt dt cn)
-> ((a, [b]) -> (a, [b]))
-> Prove f v s sDt dt cn
-> Prove f v s sDt dt cn
mapProveABs accessorIt updateIt accessorCum updateCum fun =
updateProve accessorCum updateCum (iterateA fun accessorIt updateIt)
mapProveAsBs :: Show b => (Prove f v s sDt dt cn -> [a])
-> (Prove f v s sDt dt cn -> [a] -> Prove f v s sDt dt cn)
-> (Prove f v s sDt dt cn -> [b])
-> (Prove f v s sDt dt cn -> [b] -> Prove f v s sDt dt cn)
-> ((a, [b]) -> (a, [b]))
-> Prove f v s sDt dt cn
-> Prove f v s sDt dt cn
mapProveAsBs accessorIt updateIt accessorCum updateCum fun =
updateProve accessorCum updateCum (iterateAs fun accessorIt updateIt)
-- | @updateFun pr nr@ is used to set the update Function for the prove @pr@ to
-- the variable number field @varNr@.
updateVarNr :: Prove f v s sDt dt cn -> Int -> Prove f v s sDt dt cn
updateVarNr pr nr = pr { varNr = nr }
mapRulesInfTreeNodesVar :: (Show f, Show v) =>
((Rule f v, ([InfTreeNode f v dt], Int))
-> (Rule f v, ([InfTreeNode f v dt], Int)))
-> Prove f v s sDt dt cn
-> Prove f v s sDt dt cn
mapRulesInfTreeNodesVar =
mapProveAsB (allRules . rules . problem) const (\x -> (infTreeNodesToProve x, varNr x))
(\p (x, y) -> p {infTreeNodesToProve = x, varNr = y })
-- | This function maps over the proven infTreeNodesToProve of the input prove. In
-- case variables get created one should use the postfix according to the given
-- number in the input of the function and increase the Int value of the output. It
-- returns the new prove, in which the varNr field is already set according to the
-- output integer value of the input function.
mapProvenInfTreeNodes :: ((InfTreeNode f v dt, Int) -> (InfTreeNode f v dt, Int))
-> Prove f v s sDt dt cn
-> Prove f v s sDt dt cn
mapProvenInfTreeNodes = mapProveAsB provenInfTreeNodes (\p x -> p { provenInfTreeNodes = x }) varNr updateVarNr
-- | @iterateAs fun pr@ can be used to iterate over the problem of a prove. It
-- calls the given function @fun@ on the problem and updates the prove @pr@ with
-- the newly generated problem.
iterateProblemVarNr :: ((ProblemSig f v s sDt dt cn, Int)
-> (ProblemSig f v s sDt dt cn, Int))
-> Prove f v s sDt dt cn
-> Prove f v s sDt dt cn
iterateProblemVarNr = mapProveAB problem (\p x -> p { problem = x }) varNr updateVarNr
-- | @mapSignatures fun (pr, nr)@ maps over the signatures of a problem. It
-- saves an integer which is used as suffix for new variables during the
-- execution.
mapSignaturesVarNr :: ((SignatureSig s sDt, Int) -> (SignatureSig s sDt, Int))
-> Prove f v s sDt dt cn
-> Prove f v s sDt dt cn
mapSignaturesVarNr fun = iterateProblemVarNr (iterateAs fun accessor update)
where accessor :: ProblemSig f v s sDt dt cn -> [SignatureSig s sDt]
accessor = accessorMaybe signatures
update :: ProblemSig f v s sDt dt cn
-> [SignatureSig s sDt]
-> (ProblemSig f v s sDt dt cn)
update pr' sig = pr' { signatures = if null sig
then Nothing
else Just sig }
-- | @accessorMaybe fun prob@ is used to access a field using the function @fun@
-- from the problem @pr@. In case the field is @Nothing@ it will return the
-- empty list (@[]@).
accessorMaybe :: (ProblemSig f v s sDt dt cn -> Maybe [a])
-> ProblemSig f v s sDt dt cn -> [a]
accessorMaybe fun pr = fromMaybe [] (fun pr)
-- | @mapDatatypes fun (prob, nr)@ iterates over the datatypes of the problem
-- @prob@ executing the function @fun@ on each of the data-type elements. In
-- case no elements are given in the Maybe data-structure (Nothing or Just []),
-- then Nothing will be returned. The integer @nr@ is used to keep track of the
-- suffixes of the newly generated variables.
mapDatatypesVarNr :: ((DatatypeSig dt cn, Int) -> (DatatypeSig dt cn, Int))
-> Prove f v s sDt dt cn -> Prove f v s sDt dt cn
mapDatatypesVarNr fun =
mapProveAB problem (\p x -> p { problem = x }) (const 0) const -- varNr updateVarNr
(iterateAs fun accessor update)
where accessor :: ProblemSig f v s sDt dt cn -> [DatatypeSig dt cn]
accessor = accessorMaybe datatypes
update :: ProblemSig f v s sDt dt cn -> [DatatypeSig dt cn] -> ProblemSig f v s sDt dt cn
update p n = p { datatypes = if null n
then Nothing
else Just n }
-- type DatatypeSig dt cn = Datatype (String, [Cost Int]) (String, Cost Int)
-- | This function maps over the datatypes accumulating the result int the second
-- part of the tuple. It sets the resulting data-types using the first function,
-- and uses the specified update function to update the prove with the resulting
-- list @[a]@.
mapDatatypes :: Show a => (Prove f v s sDt dt cn -> [DatatypeSig dt cn] -> Prove f v s sDt dt cn)
-> (Prove f v s sDt dt cn -> [a])
-> (Prove f v s sDt dt cn -> [a] -> Prove f v s sDt dt cn)
-> ((DatatypeSig dt cn, [a]) -> (DatatypeSig dt cn, [a]))
-> Prove f v s sDt dt cn
-> Prove f v s sDt dt cn
mapDatatypes = mapProveAsBs accessorIt
where accessorIt :: Prove f v s sDt dt cn -> [DatatypeSig dt cn]
accessorIt = accessorMaybe datatypes . problem
-- | This function updates a field using the input function. The accessor
-- specifies the access to the field to be updated. It will be called on the
-- input prove. The update function is used to update the prove data structure
-- with the result of the function calls using a foldl over the elements
-- retrieved using the accessor. The integer return value is supposed to be the
-- new variable counter. This means it is the input variable number + new
-- variables created using the given function (and not the number of new
-- variables).
iterateAs :: ((a, c) -> (a, c)) -> (b -> [a]) -> (b -> [a] -> b) -> (b, c) -> (b, c)
iterateAs fun accessor update (pr, nr) =
do let (nVal, nAcc) = foldl f ([], nr) (accessor pr)
(update pr nVal, nAcc)
where f (acc, nr') ctx = let (nVal, nNr) = fun (ctx, nr')
in (acc ++ [nVal], nNr)
-- | This function updates a field using the input function. The @accessor@
-- specifies the function to access the field of the data-structure @pr@ to be
-- updated. It will be called on the input prove. The update function is used to
-- update the prove data structure with the result of the function call.
iterateA :: ((a, c) -> (a, c)) -> (b -> a) -> (b -> a -> b) -> (b, c) -> (b, c)
iterateA fun accessor update (pr, nr) =
let (nVal, nNr) = fun (accessor pr, nr)
in (update pr nVal, nNr)
--
-- Ops.hs ends here
| ComputationWithBoundedResources/ara-inference | src/Data/Rewriting/ARA/ByInferenceRules/Prove/Ops.hs | mit | 12,274 | 0 | 14 | 3,542 | 3,210 | 1,726 | 1,484 | 141 | 2 |
import Control.Monad
import Data.Char (isAlpha, toLower)
import System.Exit (exitSuccess)
normalize :: String -> String
normalize = map toLower . filter isAlpha
palindrome :: IO ()
palindrome = forever $ do
line <- getLine
let line' = normalize line
case (line' == reverse line') of
True -> putStrLn "It's a palindrome!"
False -> do
putStrLn "Nope!"
exitSuccess | candu/haskellbook | ch13/forever.hs | mit | 389 | 0 | 13 | 83 | 132 | 65 | 67 | 14 | 2 |
-- | Process Clang's JSON Compilation Database for centrinel's purposes.
{-# language NamedFieldPuns, OverloadedStrings #-}
module Centrinel.Util.CompilationDatabase (parseCompilationDatabase
, RunLikeCC (..)
, makeStandaloneRunLikeCC
, Invoke (..)
, combineDuplicateRuns
, divideRunLikeCC) where
import Data.Monoid (Monoid(..), (<>))
import Data.Text (Text)
import qualified Data.Map.Lazy as M
import qualified Data.Text as T
import qualified Data.ByteString.Lazy as B
import Data.Aeson (eitherDecode')
import qualified Clang.CompilationDatabase as CDB
import Centrinel.System.RunLikeCC (RunLikeCC(..))
parseCompilationDatabase :: B.ByteString -> Either String [RunLikeCC Invoke]
parseCompilationDatabase = fmap (fmap commandObjectRunLikeCC) . eitherDecode'
commandObjectRunLikeCC :: CDB.CommandObject -> RunLikeCC Invoke
commandObjectRunLikeCC cmd =
case cmd of
CDB.CommandObject {CDB.command = Just command, CDB.file = file, CDB.directory = workingDirectory} ->
RunLikeCC { file, workingDirectory, artifact = Invoke (tail $ T.words command) }
CDB.CommandObject {CDB.arguments = Just args, CDB.file = file, CDB.directory = workingDirectory} ->
RunLikeCC { file, workingDirectory, artifact = Invoke (tail args) }
CDB.CommandObject {CDB.file = file, CDB.command = Nothing, CDB.arguments = Nothing} ->
error $ "impossible: command object for " ++ show file ++ "with no command and no arguments"
-- | The list of arguments that were passed to the compiler.
-- Double quotes and backslashes are escaped with a backslash.
-- Note that the name of the compiler is /not/ part of 'invokeArguments'.
newtype Invoke = Invoke { invokeArguments :: [Text] }
deriving (Show)
makeStandaloneRunLikeCC :: [String] -> RunLikeCC Invoke
makeStandaloneRunLikeCC = RunLikeCC mempty "." . Invoke . fmap T.pack
mergeRunLikeCC :: Monoid a => RunLikeCC a -> RunLikeCC a -> RunLikeCC a
mergeRunLikeCC i1 i2 = i1 { artifact = artifact i1 <> artifact i2 }
combineDuplicateRuns :: [RunLikeCC Invoke] -> [RunLikeCC [Invoke]]
combineDuplicateRuns = M.elems . M.fromListWith mergeRunLikeCC . map (\i -> (dirFilePair i, singletonize i))
where
dirFilePair :: RunLikeCC a -> (Text, Text)
dirFilePair i = (workingDirectory i, file i)
singletonize :: RunLikeCC a -> RunLikeCC [a]
singletonize i = i { artifact = [artifact i] }
divideRunLikeCC :: RunLikeCC [a] -> [RunLikeCC a]
divideRunLikeCC i = map (\a -> i { artifact = a }) (artifact i)
{-# INLINEABLE divideRunLikeCC #-}
| lambdageek/use-c | src/Centrinel/Util/CompilationDatabase.hs | mit | 2,712 | 0 | 14 | 605 | 694 | 390 | 304 | 40 | 3 |
-- Pretty.hs ---
--
-- Filename: Pretty.hs
-- Description:
-- Author: Manuel Schneckenreither
-- Maintainer:
-- Created: Mon Oct 6 13:22:09 2014 (+0200)
-- Version:
-- Package-Requires: ()
-- Last-Updated: Mon Jul 23 10:24:45 2018 (+0200)
-- By: Manuel Schneckenreither
-- Update #: 155
-- URL:
-- Doc URL:
-- Keywords:
-- Compatibility:
--
--
-- Commentary:
--
--
--
--
-- Change Log:
--
--
--
--
--
--
--
-- Code:
-- | TODO: comment this module
module Data.Rewriting.ARA.ByInferenceRules.InfTreeNode.Pretty
( prettyInfTreeNode
, prettyInfTreeNodeView
)
where
import Data.Rewriting.ARA.ByInferenceRules.AnalyzerCondition.Pretty
import Data.Rewriting.ARA.ByInferenceRules.AnalyzerCost
import Data.Rewriting.ARA.ByInferenceRules.AnalyzerCost.Pretty
import Data.Rewriting.ARA.ByInferenceRules.AnalyzerDatatype
import Data.Rewriting.ARA.ByInferenceRules.InfTreeNode.Type
import Data.Rewriting.ARA.ByInferenceRules.Vector.Pretty
-- import Prelude hiding
-- ((<$>))
import Control.Arrow ((***))
import Data.List (intersperse)
import Data.Maybe
import Prelude hiding ((<>))
import Text.PrettyPrint
import qualified Text.PrettyPrint.ANSI.Leijen as L
line = text "" $+$ empty
prettyInfTreeNode :: (Show f, Show v, Show dt) => InfTreeNode f v dt -> Doc
prettyInfTreeNode ctx =
nest 2 $ hcat (intersperse (text ", ") lstPre) <+> text "|-" <>
hcat (intersperse (text "+") $ map (prettyACostCondition int) (costs ctx))
<> text "-" <+> prettyPostCond (postCondition ctx) $+$ line $+$ line <>
vcat (map (text . show) (history ctx)) $+$ line -- <$>
-- hang 4 (text "Conditions: "
-- <$> pretty (conditions ctx))
where lstPre = map prettyPreCond (preConditions ctx)
prettyPostCond :: (Show dt, Show a) => Maybe (a, ADatatype dt Int) -> Doc
prettyPostCond Nothing = empty
prettyPostCond (Just (f,d)) =
text (show f) <+> text ":" <+> prettyADatatype (prettyACost int) d
prettyPreCond :: (Show v, Show dt) => (v, ADatatype dt Int) -> Doc
prettyPreCond (a,b) = text (show a) <> colon <+> prettyADatatype (prettyACost int) b
prettyPreCond' (a,b) =
text a <> colon <+> prettyADatatype (prettyACost prettyVector) b
prettyInfTreeNodeView :: InfTreeNodeView -> Doc
prettyInfTreeNodeView (InfTreeNodeView pre cst post) =
hcat (intersperse (text ",") (map prettyPreCond' pre))
<+> text "|-" <> hcat (intersperse (text "+") $
map (prettyACostCondition prettyVector) cst) <>
text "-" <+> postTerm post <> postCost post
where -- postTerm Nothing = empty
postTerm (a,_) = text (show (L.pretty a))
-- postCost Nothing = empty
postCost (_,b) = if null (show postCostDoc)
then empty
else text ":" <> postCostDoc
where postCostDoc = prettyADatatype (prettyACost prettyVector) b
prettyInfTreeNodeView (InfTreeNodeLeafView sig cfSig) =
prSig False sig <>
(if isJust cfSig then text " " <+> prSig True (fromJust cfSig) else empty)
where prSig isCf (FunSig n pre cst post) =
text n <> (if isCf then text "_cf" else empty) <+> text ":: [" <>
hcat (intersperse (text " x ")
(map (prettyADatatype (prettyACost prettyVector)) pre))
<> text "] -" <> hcat (intersperse (text "+")$
map (prettyACostCondition prettyVector) cst)
<> text "->" <+> prettyADatatype (prettyACost prettyVector) post
prettyInfTreeNodeView InfTreeNodeLeafEmpty = empty
--
-- Pretty.hs ends here
| ComputationWithBoundedResources/ara-inference | src/Data/Rewriting/ARA/ByInferenceRules/InfTreeNode/Pretty.hs | mit | 3,922 | 0 | 20 | 1,149 | 1,005 | 545 | 460 | 53 | 4 |
module Graphics.Shader.Internal.Expression (
Expr, Expression,
getType,
addE, subtractE, varE,
) where
--------------------------------------------------------------------------------
import Graphics.Shader.Internal.Variable
--------------------------------------------------------------------------------
data Op = Add
| Subtract
| Multiple
| Divide
data Expression = VarExpr ShaderVarRep
| BinOp Op Expression Expression
type Expr a = Expression
getType :: Expr a -> ShaderVarType
getType (VarExpr var) = ty var
getType (BinOp _ e1 _) = getType e1
class Addable a where
addE :: Expr a -> Expr a -> Expr a
addE e1 e2 = BinOp Add e1 e2
instance Addable Int
instance Addable Float
instance Addable Double
class Subtractable a where
subtractE :: Expr a -> Expr a -> Expr a
subtractE e1 e2 = BinOp Subtract e1 e2
instance Subtractable Int
instance Subtractable Float
instance Subtractable Double
varE :: ShaderVar a -> Expr a
varE v = VarExpr v
| Mokosha/shaders | Graphics/Shader/Internal/Expression.hs | mit | 1,007 | 0 | 9 | 199 | 293 | 150 | 143 | 29 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Test.Week4 (week4) where
import Data.List (permutations)
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck
import Week4
week4 :: TestTree
week4 = testGroup "Week 4 - Higher-order programming and type inference"
[
exercise1
, exercise2
, exercise3
, exercise4
]
exercise1 =
testGroup "Exercise 1 - Wholemeal programming" [
testProperty "Example 1" $
(\x -> fun1 x === fun1' x)
, testProperty "Example 2" $
(\x -> x > 0 && x < 10000 ==> fun2 x == fun2' x)
]
exercise2 =
testGroup "Exercise 2 - Folding with trees" [
testCase "Example 1" $ True @?=
foldl (&&) True
(map ((foldTree "abc" ==) . foldTree) $ permutations "abc")
, testCase "Example 2" $ 4 @?=
(height $ foldTree "ABCDEFGHIJ")
]
exercise3 =
testGroup "Exercise 3 - More folds!" [
testGroup "Problem 1 - xor" [
testCase "Example 1" $ True @?=
xor [False, True, False]
, testCase "Example 2" $ False @?=
xor [False, True, False, False, True]
]
, testGroup "Problem 2 - map" [
testCase "Example 1" $ [1, 2, 3] @?=
map' (+1) [0..2]
, testProperty "Example 2" $
(\(x :: [Integer]) -> map (2*) x == map' (2*) x)
]
, testGroup "Problem 3 - foldl using foldr" [
testProperty "Example 1" $
(\(x :: [Integer]) -> myFoldl (-) 0 x == foldl (-) 0 x)
]
]
isPrime :: Integer -> Bool
isPrime x = False == or [ x `mod` y == 0| y <- [2..(x - 1)] ]
exercise4 =
testGroup "Exercise 4 - Finding primes" [
testProperty "Example 1" $
(\n -> and $ map isPrime $ sieveSundaram n)
]
| taylor1791/cis-194-spring | test/Test/Week4.hs | mit | 1,658 | 0 | 15 | 448 | 563 | 305 | 258 | -1 | -1 |
module NumberTheory.Main where
import Notes
import qualified Data.Text as T
import qualified Prelude as P (Int, map, mod, (+), (<),
(^))
import Functions.Basics.Macro
import Functions.BinaryOperation.Terms
import Functions.Jections.Terms
import Groups.Macro
import Groups.Terms
import Logic.FirstOrderLogic.Macro
import Logic.PropositionalLogic.Macro
import Relations.Basics.Terms
import Relations.Equivalence.Macro
import Relations.Equivalence.Terms
import Sets.Basics.Terms
import NumberTheory.Macro
import NumberTheory.Terms
numberTheoryC :: Note
numberTheoryC = chapter "Number Theory" $ do
naturalNumbersS
wholeNumbersS
divisibilityS
moduloS
naturalNumbersS :: Note
naturalNumbersS = section "Natural numbers" $ do
naturalNumbersDefinition
naturalNumbersAddition
naturalNumbersSubtraction
naturalNumbersMultiplication
naturalNumbersDivision
naturalNumbersDefinition :: Note
naturalNumbersDefinition = de $ do
s [naturalNumbers', m nats, "are inductively defined as follows"]
itemize $ do
item $ m $ 0 === emptyset
let n = "n"
item $ m $ succ n === n ∪ setof n
naturalNumbersAddition :: Note
naturalNumbersAddition = de $ do
s [the, addition', "of", naturalNumbers, "is a", binaryOperation, m addN_, "defined recursively as follows"]
let n = "n"
ma $ n `addN` 0 === n === 0 `addN` n
let m = "m"
ma $ succ n + m === succ (n `addN` m)
naturalNumbersSubtraction :: Note
naturalNumbersSubtraction = de $ do
s [the, subtraction', "of", naturalNumbers, "is a", binaryOperation, m subN_, "defined in terms of", addition, "as follows"]
let a = "a"
let b = "b"
let c = "c"
s ["We say that", m $ a `subN` b =: c, "holds if", m $ c `addN` b =: a, "holds"]
naturalNumbersMultiplication :: Note
naturalNumbersMultiplication = de $ do
s [the, multiplication', "of", naturalNumbers, "is a", binaryOperation, m mulN_, "defined in terms of", addition, "as follows"]
let n = "n"
ma $ n `mulN` 0 === 0 === 0 `mulN` n
ma $ n `mulN` 1 === n === 1 `mulN` n
let m = "m"
ma $ succ n `mulN` m === m `addN` (pars $ n `mulN` m)
naturalNumbersDivision :: Note
naturalNumbersDivision = de $ do
s [the, division', "of", naturalNumbers, "is a", binaryOperation, m divN_, "defined in terms of", multiplication, "as follows"]
let a = "a"
let b = "b"
let c = "c"
s ["We say that", m $ a `divN` b =: c, "holds if", m $ c `mulN` b =: a, "holds"]
s [m $ a `divN` b, "is often written as", m $ a / b]
wholeNumbersS :: Note
wholeNumbersS = do
wholeNumbersDefinition
wholeNumbersEquivalentDefinition
naturalNumbersSubsetofWholeNumbersUnderInjection
wholeNumbersAddition
wholeNumbersSubtraction
wholeNumbersMultiplication
wholeNumbersDivision
wholeNumbersDefinition :: Note
wholeNumbersDefinition = de $ do
s [wholeNumbers', or, integers', m ints, "are defined as the", equivalenceClasses, "of", m $ naturals ^ 2, "with respect to the following", equivalenceRelation]
let (a, b, c, d) = ("a", "b", "c", "d")
-- b - a = d - c
ma $ wholen a b .~ wholen c d === b + c =: d + a
nte $ do
s ["Intuitively, an", element, m $ wholen a b, "represents", m $ b - a, "even if that", element, "does not exist in", m nats]
wholeNumbersEquivalentDefinition :: Note
wholeNumbersEquivalentDefinition = nte $ do
let pos = "+"
neg = "-"
s [wholeNumbers, "can equivalently be defined using two abstract elements", m pos, and, m neg, "as the", set, m $ setofs [pos, neg] ⨯ nats]
s ["Then there is no need to use", equivalenceClasses, "but we have to come up with suitable definitions of", m pos, and, m neg]
s ["For example, we can use the following definitions"]
ma $ pos === emptyset <> qquad <> text and <> qquad <> neg === setof emptyset
naturalNumbersSubsetofWholeNumbersUnderInjection :: Note
naturalNumbersSubsetofWholeNumbersUnderInjection = nte $ do
let i = "i"
s ["We regard the", set, "of", naturalNumbers, "as a", subset, "of the", wholeNumbers, "under the following", injection, m i]
let a = "a"
ma $ func i nats ints a $ wholen 0 a
wholeNumbersAddition :: Note
wholeNumbersAddition = de $ do
s [the, addition, m addZ_, "of", wholeNumbers, "is defined as the component-wise", addition, "of", naturalNumbers]
let (a, b, c, d) = ("a", "b", "c", "d")
ma $ wholen a b `addZ` wholen c d === wholen (a `addN` c) (b `addN` d)
s ["As such, we abbreviate", m $ wholen 0 a, "as", m a]
wholeNumbersSubtraction :: Note
wholeNumbersSubtraction = de $ do
s [the, subtraction', "of", wholeNumbers, "is a", binaryOperation, m subZ_, "defined in terms of", addition, "as follows"]
let a = "a"
let b = "b"
let c = "c"
s ["We say that", m $ a `subZ` b =: c, "holds if", m $ c `addZ` b =: a, "holds"]
wholeNumbersMultiplication :: Note
wholeNumbersMultiplication = de $ do
s [the, multiplication', "of", wholeNumbers, "is a", binaryOperation, m mulZ_, "defined in terms of", addition, "as follows"]
let n = "n"
ma $ n `mulZ` 0 === 0 === 0 `mulZ` n
ma $ n `mulZ` 1 === n === 1 `mulZ` n
let m = "m"
ma $ succ n `mulZ` m === m `addZ` (pars $ n `mulZ` m)
wholeNumbersDivision :: Note
wholeNumbersDivision = de $ do
s [the, division', "of", wholeNumbers, "is a", binaryOperation, m divN_, "defined in terms of", multiplication, "as follows"]
let a = "a"
let b = "b"
let c = "c"
s ["We say that", m $ a `divZ` b =: c, "holds if", m $ c `mulZ` b =: a, "holds"]
s [m $ a `divZ` b, "is often written as", m $ a / b]
divisibilityS :: Note
divisibilityS = section "Divisibilty" $ do
divisibilityDefinition
dividesTransitive
dividesMultiples
productDivides
gcdDefinition
lcmDefinition
todo "gcdExistence"
todo "lcmExistence"
bezoutIdentityLemma
lcmGcdProduct
primeDefinition
coprimeDefinition
coprimeDivisionCancels
coprimeDividesProduct
coprimeCompound
gcdMultiplicative
gcdMultiplicativeConsequence
divisibilityDefinition :: Note
divisibilityDefinition = de $ do
todo "define divisibility more abstractly in integrity domains"
let a = "a"
let b = "b"
let c = "c"
s ["We define a", wholeNumber, m a, "to be", divisible', "by another", wholeNumber, m b, "if there exists a", wholeNumber, m c, "such that", m $ a `divZ` b =: c]
s ["We then call", m b, "a", divisor', "of", m a, and, m c, "the", quotient']
ma $ a .| b === te (c ∈ ints) (a * c =: b)
dividesTransitive :: Note
dividesTransitive = prop $ do
lab dividesTransitivePropertyLabel
s [the, divides, relation, is, transitive_]
let a = "a"
let b = "b"
let c = "c"
ma $ fa (cs [a, b, c] ∈ ints) $ (pars $ a .| b) ∧ (pars $ b .| c) ⇒ (pars $ a .| c)
proof $ do
let x = "x"
s ["Because", m a, divides, m b <> ", there exists an", integer, m x, "as follows"]
ma $ a * x =: b
let y = "y"
s ["Because", m b, divides, m c <> ", there exists an", integer, m y, "as follows"]
ma $ b * y =: c
s ["Now we conclude that", m a, divides, m c, with, quotient, m $ x * y]
ma $ a * x * y =: c
dividesMultiples :: Note
dividesMultiples = prop $ do
lab dividesMultiplesPropertyLabel
let a = "a"
let b = "b"
let r = "r"
s ["Let", m a, and, m b, be, integers, "such that", m a, divides, m b]
ma $ fa (r ∈ ints) $ (a .| b) ⇒ (a .| (r * b))
proof $ do
let q = "q"
s ["Because", m a, divides, m b, "there exists an", integer, m q, "as follows"]
ma $ a * q =: b
s ["Let", m r, "be arbitrary"]
s ["Now, ", m a, divides, m $ r * b, "because of the following equation which we obtain by multiplying", m r, "to both sides of the previous equation"]
ma $ a * (q * r) =: b * r
productDivides :: Note
productDivides = prop $ do
lab productDividesPropertyLabel
let a = "a"
b = "b"
c = "c"
d = "d"
ab = a * b
cd = c * d
s ["Let", csa [m a, m b, m c, m d], be, integers, "such that", m a, divides, m b, and, m c, divides, m d <> ", then", m ab, divides, m cd]
ma $ (pars $ a .| b) ∧ (pars $ c .| d) ⇒ (ab .| cd)
proof $ do
let q = "q"
s ["Because", m a, divides, m b, "there exists a", m q, "as follows"]
ma $ a * q =: b
let r = "r"
s ["Because", m c, divides, m d, "there exists a", m r, "as follows"]
ma $ c * q =: d
s ["When we multiply these equations, we find that", m ab, divides, m cd, with, quotient, m $ q * r]
ma $ ab * q * r =: cd
gcdDefinition :: Note
gcdDefinition = de $ do
let a = "a"
b = "b"
g = "g"
c = "c"
s [the, greatestCommonDivisor', m $ gcd a b, "of two", integers, m a, and, m b, "is defined as follow"]
ma $ g =: gcd a b === (pars $ g .| a) ∧ (pars $ g .| b) ∧ (not $ pars $ te (c ∈ ints) $ (pars $ c .| a) ∧ (pars $ c .| b) ∧ (pars $ c < g))
lcmDefinition :: Note
lcmDefinition = de $ do
let a = "a"
b = "b"
l = "l"
c = "c"
s [the, leastCommonMultiple', m $ lcm a b, "of two", integers, m a, and, m b, "is defined as follow"]
ma $ l =: lcm a b === (pars $ a .| l) ∧ (pars $ b .| l) ∧ (not $ pars $ te (c ∈ ints) $ (pars $ a .| c) ∧ (pars $ b .| c) ∧ (pars $ c < l))
bezoutIdentityLemma :: Note
bezoutIdentityLemma = lem $ do
lab bezoutsIdentityLemmaLabel
let a = "a"
b = "b"
x = "x"
y = "y"
s ["Let", m a, and, m b, "be nonzero", integers, "then there exist", integers, m x, and, m y, "as follows"]
ma $ a * x + b * y =: gcd a b
todo "write this down correctly"
toprove
lcmGcdProduct :: Note
lcmGcdProduct = prop $ do
let a = "a"
b = "b"
ab = a * b
gab = gcd a b
lab = lcm a b
s ["Let", m a, and, m b, be, integers]
ma $ gab * lab =: a * b
proof $ do
let p = "p"
s ["Because", m gab, divides, m a <> ", it also divides", m ab, ref dividesMultiplesPropertyLabel, "so there exists an", integer, m p, "as follows"]
ma $ gab * p =: ab
s ["We now prove that", m p, "equals", m lab]
itemize $ do
let x = "x"
item $ do
s [m a, divides, m p]
newline
s ["Because", m gab, divides, m b <> ", there exists an", m x, "as follows"]
ma $ gab * x =: b
s ["Multiply both sides by", m a, "and we get the following"]
ma $ gab * x * a =: ab
s ["Equate this with the equation that we found for", m ab, "earlier, and we conclude that", m a, divides, m p, with, quotient, m x]
let y = "y"
item $ do
s [m b, divides, m p]
newline
s ["Because", m gab, divides, m a <> ", there exists an", m y, "as follows"]
ma $ gab * y =: a
s ["Multiply both sides by", m b, "and we get the following"]
ma $ gab * y * b =: ab
s ["Equate this with the equation that we found for", m ab, "earlier, and we conclude that", m b, divides, m p, with, quotient, m y]
item $ do
s ["There is no smaller", integer, "like that"]
newline
let z = "z"
s ["Suppose", m z, "is an", integer, "that is", divisible, by, m a, and, m b]
let k = "k"
k1 = k !: 1
k2 = k !: 2
ma $ z =: a * k1 <> quad <> text and <> quad <> z =: b * k2
let u = "u"
v = "v"
s ["By", bezoutsLemma <> ", there must exist two", integers, m u, and, m v, "as follows"]
ma $ gab =: a * u + b * v
s ["Now observe the following"]
aligneqs (z * gab)
[ z * (pars $ a * u + b * v)
, z * a * u + z * b * v
, (pars $ b * k2) * a * u + (pars $ a * k1) * b * v
, (a * b) * (pars $ k2 * u + k1 * v)
, (gab * p) * (pars $ k2 * u + k1 * v)
]
s ["We concude that", m p, divides, m z]
ma $ z =: p * (pars $ k2 * u + k1 * v)
coprimeDivisionCancels :: Note
coprimeDivisionCancels = prop $ do
lab coprimeDivisionCancelsPropertyLabel
let a = "a"
b = "b"
c = "c"
bc = b * c
s ["Let", csa [m a, m b, m c], be, integers, "such that", m a, divides, m bc, and, m a, and, m c, are, coprime <> ", then", m a, divides, m b]
ma $ (pars $ a .| bc) ∧ (pars $ a `copr` c) ⇒ (pars $ a .| b)
proof $ do
let n = "n"
s ["Because", m a, divides, m bc <> ", there exists an", integer, m n, "as follows"]
ma $ n * a =: bc
let x = "x"
y = "y"
s ["By", bezoutsLemma <> ", there must exist two", integers, m x, and, m y, "as follows"]
ma $ 1 =: a * x + c * y
s ["Multiply", m b, "on both sides of this equation to obtain the following"]
ma $ b =: a * b * x + b * c * y
s ["Now substitute", m bc]
ma $ b =: a * b * x + a * n * y
s ["Seperate out", m a, "to conclude that", m a, divides, m b, with, quotient, m $ b * x + n * y]
ma $ b =: a * (pars $ b * x + n * y)
coprimeDividesProduct :: Note
coprimeDividesProduct = prop $ do
lab coprimeDividesProductPropertyLabel
let a = "a"
b = "b"
c = "c"
ab = a * b
s ["Let", csa [m a, m b, m c], be, integers, "such that", m a, divides, m b, and, m a, divides, m c, and, m a, and, m b, are, coprime <> ", then", m ab, divides, m c]
ma $ (pars $ a .| c) ∧ (pars $ b .| c) ∧ (pars $ gcd a b =: 1) ⇒ (pars $ ab .| c)
proof $ do
let q = "q"
s ["Because", m a, divides, m c, "there exists a", m q, "as follows"]
ma $ a * q =: c
s ["Because", m b, divides, m $ a * q, but, m a, and, m b, are, coprime, "we conclude that", m b, divides, m q, ref coprimeDivisionCancelsPropertyLabel]
s ["Because", m b, divides, m q, "there must exist an", integer, m p, "as follows"]
let p = "p"
ma $ b * p =: q
s ["We now find that", m ab, divides, m c, with, quotient, m p]
ma $ a * b * p =: c
primeDefinition :: Note
primeDefinition = de $ do
let a = "a"
s ["An", integer, m a, "is called", prime', "if it its largest", divisor <> ", different from", m a, "itself, is", m 1]
coprimeDefinition :: Note
coprimeDefinition = de $ do
lab coprimeDefinitionLabel
lab relativelyPrimeDefinitionLabel
let a = "a"
b = "b"
s ["Two", integers, m a, and, m b, "are considered", cso [coprime', relativelyPrime', mutuallyPrime], "if their", greatestCommonDivisor, "is one"]
ma $ a `copr` b === gcd a b =: 1
s ["Equivalently, their", leastCommonMultiple, is, m $ a * b]
toprove
coprimeCompound :: Note
coprimeCompound = prop $ do
lab coprimeCompoundPropertyLabel
let a = "a"
b = "b"
c = "c"
s ["Let", csa [m a, m b, m c], be, integers, "such that", m a, and, m b, are, coprime <> ", then", m $ gcd a c, and, m $ gcd b c, are, coprime]
ma $ (a `copr` b) ⇒ fa (c ∈ ints) (gcd a c `copr` gcd b c)
proof $ do
s ["Suppose, for the sake of contradiction, that", m $ gcd a c, and, m $ gcd b c, "are not", coprime]
s ["This would mean the following"]
let g = "g"
ma $ gcd (gcd a c) (gcd b c) =: g > 1
s ["This means that", m g, divides, m $ gcd a c, and, m $ gcd b c, and, "therefore transitively", m a, and, m b, ref dividesTransitivePropertyLabel]
s ["Because", m a, and, m b, are, coprime, "the", greatestCommonDivisor, "of", m a, and, m b, is, m 1, "so", m g, "cannot be a", divisor, "of", m a, and, m b]
s ["We arrive at a contradiction"]
gcdMultiplicative :: Note
gcdMultiplicative = prop $ do
lab gcdMultiplicativePropertyLabel
let a = "a"
b = "b"
c = "c"
s ["Let", csa [m a, m b, m c], be, integers, "such that", m a, and, m b, are, coprime]
let ab = a * b
gab = gcd ab c
ga = gcd a c
gb = gcd b c
gab_ = ga * gb
g = "g"
ma $ gab =: ga * gb
proof $ do
s ["We prove the three components of the", greatestCommonDivisor, "separately"]
s ["Define", m $ g =: gab_]
itemize $ do
item $ do
s [m g, divides, m ab, ref productDividesPropertyLabel]
item $ do
s [m g, divides, m c, ref coprimeCompoundPropertyLabel, ref coprimeDividesProductPropertyLabel]
item $ do
s [m g, "is the smallest", integer, "that does so"]
newline
let z = "z"
s ["Suppose there was an", integer, m z, "that divided both", m ab, and, m c]
let x = "x"
y = "y"
s ["That would mean that there exist integers", m x, and, m y]
ma $ z * x =: ab <> quad <> text and <> quad <> z * y =: c
let t = "t"
u = "u"
v = "v"
w = "w"
s ["According to", bezoutsLemma, ref bezoutsIdentityLemmaLabel <> ", there must exist", integers, csa [m t, m u, m v, m w], "as follows"]
ma $ g =: (pars $ t * a + u * c) * (pars $ v * b + w * c)
s ["Now observe the following"]
aligneqs g
[ t * a * v * b + t * a * w * c + u * c * v * b + u * c * w * c
, z * x * t * v + z * y * t * a * w + z * y * u * v * b + z * y * u * c * w
, z * (pars $ x * t * v + y * t * a * w + y * u * v * b + y * u * c * w)
]
s ["We conclude that", m z, divides, m g]
gcdMultiplicativeConsequence :: Note
gcdMultiplicativeConsequence = con $ do
lab gcdMultiplicativeConsequenceLabel
let a = "a"
b = "b"
c = "c"
bc = b * c
s ["Let", csa [m a, m b, m c], be, integers, "such that", m a, and, m b, are, coprime]
s [m a, and, m bc, are, coprime, "if and only if both", m a, and, m b <> ",", and, m a, and, m c, are, coprime]
proof $ do
s ["Proof of an equivalence"]
itemize $ do
item $ do
s ["If", m $ a `copr` bc, "holds, then", m $ gcd a c * gcd b c, "must be one", ref gcdMultiplicativePropertyLabel]
s ["Because they are", integers <> ", this means that both", m $ gcd a c, and, m $ gcd b c, "must be one and therefore, by definition,", m $ a `copr` c, and, m $ b `copr` c, "hold"]
item $ do
s ["If both", m $ a `copr` c, and, m $ b `copr` c, "hold, then the product of their", greatestCommonDivisors, "must be one and therefore", m a, and, m bc, coprime]
moduloS :: Note
moduloS = section "Modular arithmetic" $ do
oddEvenDefinition
modularIntegersDefinition
solutionOfLinearCongruenceTheorem
chineseRemainderTheoremPart
quadraticResidueDefinition
quadraticResidueExamples
quadraticResiduesInPrimeGroup
oneFourthQuadraticResidues
legendreSymbolDefinition
legendreSymbolExamples
eulerCriterionLegendreSymbol
jacobiSymbolDefinition
jacobiSymbolExamples
modularIntegersDefinition :: Note
modularIntegersDefinition = de $ do
let n = "n"
s [the, integers, "modulo an", integer, m n, "are defined as the following", quotientGroup]
ma $ intmod n === qgrp ints (n <> ints)
let a = "a"
b = "b"
q = "q"
s ["We say that an", integer, m a, is, congruent', with, "an", integer, m b, "modulo an", integer, m n, "if there exists an", integer, m q, "such that", m $ a =: b + q * n, "holds"]
ma $ eqmod n a b === te (q ∈ ints) (a =: b + q * n)
todo "fully formalize once we have a good chapter on groups"
oddEvenDefinition :: Note
oddEvenDefinition = de $ do
s ["An", integer, "is called", odd', "if it is", congruent, with, m 1, modulo, m 2]
s ["If it is instead", congruent, with, m 0, modulo, m 2 <> ", then we call it", even']
solutionOfLinearCongruenceTheorem :: Note
solutionOfLinearCongruenceTheorem = thm $ do
lab solutionOfLinearCongruenceTheoremLabel
let a = "a"
n = "n"
b = "b"
s ["Let", csa [m a, m b, m n], be, integers]
let x = "x"
s ["There exists an", integer, m x, "as follows if and only if", m $ gcd a n, divides, m b]
s [m b, "is unique if", m a, and, m n, are, coprime]
ma $ (pars $ gcd a n .| b) ⇔ (pars $ te (x ∈ ints) $ eqmod n (a * x) b)
proof $ do
s ["Proof of an equivalence"]
itemize $ do
item $ do
let y = "y"
s ["If", m $ gcd a n, divides, m b, "then there exists an", integer, m y, "as follows"]
ma $ gcd a n * y =: b
let p = "p"
q = "q"
s [bezoutsLemma, "tells us that there exist", integers, m p, and, m q, "as follows"]
ma $ gcd a n =: a * p + n * q
s ["If we substitute this in the above equation, we get the following"]
ma $ a * p * y + n * q * y =: b
s ["If we now look at the second term on the left-hand side, we see that it's divisible by", m n, "so it dissappears when viewed modulo", m n]
ma $ eqmod n (a * p * y) b
s ["We find that", m $ p * y, "is a valid candidate for", m x]
newline
let u = "u"
s ["Now, assume", m a, and, m n, are, coprime, and, m u, "is another such", integer, "solution"]
s ["If", m n, "equals", m 1 <> ", then", m x, "is trivially unique, because it's always zero"]
s ["Otherwise, note that", m a, "cannot be zero because then the", greatestCommonDivisor, "of", m a, and, m n, "would be", m n, "instead of", m 1]
ma $ eqmod n (a * x) (a * u)
s ["We divide out", m a, "which we're allowed to do because", m a, "is not zero"]
s ["We find that", m x, and, m u, "are equal and therefore", m x, "is unique"]
item $ do
s ["Let", m x, "be an", integer, "as follows"]
ma $ eqmod n (a * x) b
let f = "f"
s ["This means that there exists an", integer, m f, "as follows"]
ma $ a * x + f * n =: b
let p = "p"
q = "q"
g = gcd a n
s ["Now,", m $ gcd a n, divides, m a, and, m n, "so there exist", integers, m p, and, m q, "as follows"]
ma $ g * p =: a <> qquad <> text and <> qquad <> g * q =: n
s ["After substitution, we find the following"]
ma $ g * p * x + g * q * f =: b
s ["We conclude that", m g, divides, m b, with, quotient, m $ p * x + q * f]
chineseRemainderTheoremPart :: Note
chineseRemainderTheoremPart = thm $ do
lab chineseRemainderTheoremLabel
lab chineseRemainderTheoremDefinitionLabel
s [the, chineseRemainderTheorem']
newline
let n = "n"
k = "k"
a = "a"
(n1, n2, nk, ns) = buildList n k
(a1, a2, ak, as) = buildList a k
s ["Let", m ns, "be a list of", pairwiseCoprime, integers]
let x = "x"
s ["For any given list of", integers, m as <> ", there exists an", integer, m x, "as follows"]
ma $ centeredBelowEachOther $
[ eqmod n1 x a1
, eqmod n2 x a2
, vdots
, eqmod nk x ak
]
let i = "i"
let ni = n !: i
s ["Furthermore, the solution is unique modulo", m $ prodcmp i ni]
proof $ do
let nn = "N"
s ["Let", m nn, "be the product", m $ prodcmpr (i =: 1) k ni]
let nni = nn !: i
s ["Define", m nni, "as", m $ nn / nk]
newline
s ["Because the", integers, m ns, are, pairwiseCoprime <> ",", m nni, and, m ni, "are also", coprime, ref gcdMultiplicativeConsequenceLabel]
ma $ gcd nni ni =: 1
let x = "x"
xi = x !: i
s ["This means that the", linearCongruence, m $ eqmod nk (nni * xi) 1, "has some unique solution", m xi, ref solutionOfLinearCongruenceTheoremLabel]
let ai = a !: i
s ["Define", m $ x =: sumcmpr (i =: 1) k (ai * nni * xi)]
s ["We will now prove that", m x, "satisfies all the", linearCongruences]
s ["Let", m i, "therefore be arbitrary"]
let j = "j"
nj = n !: j
s ["Note first that for any", m j, "different from", m i <> ",", m nj, divides, m nni]
ma $ eqmod ni nj 0
s ["We find that the following holds"]
ma $ eqmod ni x (ai * nni * xi)
s ["Finally, because", m $ nni * xi, "was found to be congruent with", m 1, "modulo", m ni, "we find that", m x, "is congruent with", m ai]
newline
s ["Now we only have to prove that this solution is unique modulo", m n]
let y = "y"
s ["Suppose that", m y, "was another solution of the system"]
s ["This means that each", m ni, divides, m $ y - x, "but because each of the moduli are", coprime, "we find that also", m nn, divides, m $ y - x, ref coprimeDividesProductPropertyLabel]
s ["That is,", m y, and, m x, are, congruent, modulo, m nn]
quadraticResidueDefinition :: Note
quadraticResidueDefinition = de $ do
lab quadraticResidueDefinitionLabel
let n = "n"
x = "r"
y = "q"
s ["A", quadraticResidue', "modulo an", integer, m n, "is an", integer, m x, "such that there exists an", integer, m y, "as follows"]
ma $ eqmod n (y ^ 2) x
quadraticResidueExamples :: Note
quadraticResidueExamples = do
ex $ do
let n = "n"
s [m 0, and, m 1, "are always", quadraticResidues, "in", m $ intmod n, for, m $ n > 1, because, m $ eqmod n (0 ^ 2) 0, and, m $ eqmod n (1 ^ 2) 1]
ex $ do
s ["In", m (intmod 7) <> ",", m 2, "is a", quadraticResidue, because, m $ eqmod 7 (5 ^ 2) 2]
ex $ do
s ["In", m $ intmod 5, ", the", quadraticResidues, are, csa [m 0, m 1, m 4], because, csa [m $ eqmod 5 (0 ^ 2) 0, m $ eqmod 5 (1 ^ 2) 1, m $ eqmod 5 (2 ^ 2) 4]]
ex $ do
s ["In", m $ intmod 35, ", the", quadraticResidues, are, "the following", elements]
ma $ cs [0, 1, 4, 9, 11, 14, 15, 16, 21, 25, 29, 30]
ex $ do
s ["Here are the", quadraticResidues, "(different from", m 0, and, m 1 <> ") modulo some small", integers]
let rawn :: P.Int -> Note
rawn = raw . T.pack . show
let n = 20
newline
hereFigure $ linedTable
((raw "n\\setminus q") : P.map rawn [1 .. n])
( P.map (\i -> do
rawn i : (P.map (\j -> if j P.< (i P.+ 1) then (rawn $ j P.^ (2 :: P.Int) `P.mod` i) else mempty) [1 .. n])
) [0 .. n])
quadraticResiduesInPrimeGroup :: Note
quadraticResiduesInPrimeGroup = thm $ do
let p = "p"
a = "a"
s ["Let", m p, "be an", odd, prime, and, m a, "an", integer, "that is not", divisible, by, m p]
let g = "g"
s ["Let", m g, "be a", generator, "of", m $ intmgrp p]
let q = "q"
itemize $ do
item $ do
s [m a, "is a", quadraticResidue, modulo, m p, "if and only if there exists an", integer, m q, "such that", m $ eqmod p a (g ^ (2 * q)), "holds"]
item $ do
s [m a, "is not a", quadraticResidue, modulo, m p, "if and only if there exists an", integer, m q, "such that", m $ eqmod p a (g ^ (2 * q + 1)), "holds"]
proof $ do
s ["Because", m a, "is not", divisible, by, m p <> ",", m a, "is an", element, "of", m $ int0mod p]
itemize $ do
item $ do
let x = "x"
s ["Suppose", m a, "is a", quadraticResidue, modulo, m p, "then there exists an", integer, m x, "as follows"]
ma $ eqmod p (x^2) a
s [m x, "must then be an", element, "of", m $ int0mod p]
s ["Because", m g, "is a", generator, for, m (intmgrp p) <> ", there must exist an", integer, m q, "as follows"]
ma $ eqmod p x (g ^ q)
s ["This means that we have found the", m q, "that we were looking for"]
ma $ eqmod p a (g ^ (2 * q))
s ["The other direction is trivial"]
item $ do
toprove
oneFourthQuadraticResidues :: Note
oneFourthQuadraticResidues = thm $ do
let p = "p"
q = "q"
n = "n"
s ["Let", m p, and, m q, "be two", odd, primes, and, define, m $ n =: p * q]
s [m $ 1 / 4, "of the", elements, "in", m $ int0mod n, are, quadraticResidues, modulo, m n]
toprove
legendreSymbolDefinition :: Note
legendreSymbolDefinition = de $ do
let a = "a"
p = "p"
s ["Let", m a, "be an", integer, and, m p, "an", odd, prime]
s [the, legendreSymbol', "of", m a, over, m p, "is defined as follows"]
ma $ (leg a p ===) $ cases $ do
1 & text "if " <> m a <> text (" is a " <> quadraticResidue <> " " <> modulo <> " ") <> m p <> text " and " <> neg (p .| a)
lnbk
(-1) & text "if " <> m a <> text (" is not a " <> quadraticResidue <> " " <> modulo <> " ") <> m p
lnbk
0 & text "if " <> p .| a
legendreSymbolExamples :: Note
legendreSymbolExamples = do
ex $ do
s ["Note that", m $ 4 ^ 2 =: 16, and, m $ eqmod 11 16 5]
ma $ leg 5 11 =: 1
ex $ do
ma $ leg 6 11 =: -1
eulerCriterionLegendreSymbol :: Note
eulerCriterionLegendreSymbol = thm $ do
s [eulersCriterion', for, legendreSymbols]
let a = "a"
p = "p"
s ["Let", m a, "be an", integer, and, m p, "an", integer, odd, prime]
let l = leg a p
ap = a ^ ((p - 1) / 2)
ma $ eqmod p l ap
proof $ do
s ["We have to prove three cases:"]
itemize $ do
item $ do
s [m ap, is, m 0, modulo, m p, "if and only if", m $ leg a p, is, m 0]
newline
let n = "n"
s ["if", m p, divides, m a, "then there exists an", m n, "as follows"]
ma $ (n * p) ^ ((p - 1) / 2)
s ["This is clearly", divisible, by, m p, "and therefore the following holds"]
ma $ eqmod p ap 0
item $ do
s [m ap, is, m 1, modulo, m p, "if and only if", m a, "is a", quadraticResidue, modulo, m p]
newline
toprove
item $ do
s [m ap, is, m (-1), modulo, m p, "if and only if", m a, "is not a", quadraticResidue, modulo, m p]
newline
toprove
jacobiSymbolDefinition :: Note
jacobiSymbolDefinition = de $ do
let a = "a"
n = "n"
s ["Let", m a, "be an", integer, and, m n, "an", odd, naturalNumber, "with the following", primeFactorization]
let p = "p"
t = "t"
(p1, p2, pt, _) = buildList p t
v = "v"
(v1, v2, vt, _) = buildList v t
i = "i"
pi = p !: i
vi = v !: i
let (^) = (.^:)
ma $ n =: p1 ^ v1 * p2 ^ v2 * dotsb * pt ^ vt =: prodcmpr (i =: 1) t (pi ^ vi)
s [the, jacobiSymbol', "of", m a, over, m p, "is defined as follows"]
ma $ jac a n === (leg a p1) ^ v1 * (leg a p2) ^ v2 * dotsb * (leg a pt) ^ vt =: prodcmpr (i =: 1) t ((leg a pi) ^ vi)
jacobiSymbolExamples :: Note
jacobiSymbolExamples = do
let (^) = (.^:)
ex $ do
ma $ jac 5 9 =: jac 5 (3 ^ 2) =: (leg 5 3) ^ 2 =: 1
ex $ do
ma $ jac 5 12 =: jac 5 (3 * 2 ^ 2) =: (leg 5 3) * (leg 5 2) ^ 2 =: -1
| NorfairKing/the-notes | src/NumberTheory/Main.hs | gpl-2.0 | 32,005 | 27 | 57 | 10,859 | 13,150 | 6,790 | 6,360 | 688 | 2 |
{- |
Module : $Header$
Description : OWL signatures colimits
Copyright : (c) Mihai Codescu, and Uni Bremen 2009
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable
OWL2 signature colimits, computed component-wise.
-}
module OWL2.ColimSign where
import OWL2.Sign
import OWL2.Morphism
import OWL2.AS
import Common.SetColimit
import Common.Lib.Graph
import Data.Graph.Inductive.Graph as Graph
import qualified Data.Map as Map
signColimit :: Gr Sign (Int, OWLMorphism) ->
(Sign, Map.Map Int OWLMorphism)
signColimit graph = let
conGraph = emap (getEntityTypeMap Class) $ nmap concepts graph
dataGraph = emap (getEntityTypeMap Datatype) $ nmap datatypes graph
indGraph = emap (getEntityTypeMap NamedIndividual) $ nmap individuals graph
objGraph = emap (getEntityTypeMap ObjectProperty) $
nmap objectProperties graph
dataPropGraph = emap (getEntityTypeMap DataProperty) $
nmap dataProperties graph
annPropGraph = emap (getEntityTypeMap AnnotationProperty) $
nmap annotationRoles graph
_prefixGraph = emap getPrefixMap
$ nmap (Map.keysSet . toQName . prefixMap) graph
(con, funC) = addIntToSymbols $ computeColimitSet conGraph
(dat, funD) = addIntToSymbols $ computeColimitSet dataGraph
(ind, funI) = addIntToSymbols $ computeColimitSet indGraph
(obj, funO) = addIntToSymbols $ computeColimitSet objGraph
(dp, funDP) = addIntToSymbols $ computeColimitSet dataPropGraph
(ap, funAP) = addIntToSymbols $ computeColimitSet annPropGraph
-- (pf, funP) = addIntToSymbols $ computeColimitSet prefixGraph
morFun i = foldl Map.union Map.empty
[ setEntityTypeMap Class $
Map.findWithDefault (error "maps") i funC,
setEntityTypeMap Datatype $
Map.findWithDefault (error "maps") i funD,
setEntityTypeMap NamedIndividual $
Map.findWithDefault (error "maps") i funI,
setEntityTypeMap ObjectProperty $
Map.findWithDefault (error "maps") i funO,
setEntityTypeMap DataProperty $
Map.findWithDefault (error "maps") i funDP,
setEntityTypeMap AnnotationProperty $
Map.findWithDefault (error "maps") i funAP
]
morMaps = Map.fromAscList $
map (\ x -> (x, morFun x)) $ nodes graph
nameMap = foldl Map.union Map.empty $
map (\ (_, l) -> prefixMap l) $ labNodes graph
colimSign = emptySign {
concepts = con,
datatypes = dat,
objectProperties = obj,
dataProperties = dp,
individuals = ind,
annotationRoles = ap,
prefixMap = nameMap
}
colimMor = Map.fromAscList $
map (\ (i, ssig) -> let
mm = Map.findWithDefault (error "mor") i morMaps
om = OWLMorphism {
osource = ssig,
otarget = colimSign,
mmaps = mm,
pmap = Map.empty
}
in (i, om)
) $ labNodes graph
in (colimSign, colimMor)
instance SymbolName QName where
addIntAsSuffix (q, i) = q { localPart = localPart q ++ show i }
getEntityTypeMap :: EntityType -> (Int, OWLMorphism)
-> (Int, Map.Map QName QName)
getEntityTypeMap e (i, phi) = let
f = Map.filterWithKey
(\ (Entity x _) _ -> x == e) $ mmaps phi
in (i, Map.fromList $
map (\ (Entity _ x, y) -> (x, y)) $
Map.toAscList f)
setEntityTypeMap :: EntityType -> Map.Map QName QName
-> Map.Map Entity QName
setEntityTypeMap = Map.mapKeys . Entity
getPrefixMap :: (Int, OWLMorphism) -> (Int, Map.Map QName QName)
getPrefixMap (i, phi) = let
f = pmap phi
in (i, Map.fromList $
map (\ (x, y) -> (mkQName x, mkQName y)) $
Map.toAscList f)
toQName :: PrefixMap -> Map.Map QName String
toQName pm = Map.fromList $ map (\ (p, s) -> (mkQName p, s)) $ Map.toList pm
| nevrenato/Hets_Fork | OWL2/ColimSign.hs | gpl-2.0 | 4,345 | 0 | 20 | 1,422 | 1,178 | 623 | 555 | 85 | 1 |
--
-- Copyright (c) 2013-2019 Nicola Bonelli <[email protected]>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
module CGrep.Distance (distance, (~==)) where
-- from http://www.haskell.org/haskellwiki/Edit_distance
--
distance :: Eq a => [a] -> [a] -> Int
distance a b
= last (if lab == 0 then mainDiag
else if lab > 0 then lowers !! (lab - 1)
else {- < 0 -} uppers !! (-1 - lab))
where mainDiag = oneDiag a b (head uppers) (-1 : head lowers)
uppers = eachDiag a b (mainDiag : uppers) -- upper diagonals
lowers = eachDiag b a (mainDiag : lowers) -- lower diagonals
eachDiag _a [] _diags = []
eachDiag a' (_bch:bs) (lastDiag:diags) = oneDiag a' bs nextDiag lastDiag : eachDiag a' bs diags
where nextDiag = head (tail diags)
eachDiag _ _ [] = undefined -- the original implementation does not cover this case...
oneDiag a' b' diagAbove diagBelow = thisdiag
where doDiag [] _b _nw _n _w = []
doDiag _a [] _nw _n _w = []
doDiag (ach:as) (bch:bs) nw n w = me : doDiag as bs me (tail n) (tail w)
where me = if ach == bch then nw else 1 + min3 (head w) nw (head n)
firstelt = 1 + head diagBelow
thisdiag = firstelt : doDiag a' b' firstelt diagAbove (tail diagBelow)
lab = length a - length b
min3 x y z = if x < y then x else min y z
(~==) :: String -> String -> Bool
a ~== b | len < 5 = dist < 3
| otherwise = dist < (len * 40 `div` 100)
where len = fromIntegral (length a `min` length b)
dist = distance a b
{-# INLINE (~==) #-}
| awgn/cgrep | src/CGrep/Distance.hs | gpl-2.0 | 2,361 | 0 | 15 | 677 | 632 | 339 | 293 | 28 | 9 |
module Test where
{
import PopGen;
import Probability;
get_observed_alleles file = map list_from_vector $ list_from_vector $ read_phase_file file;
filename = "/home/bredelings/Reports/Kmar/TwinCays2005a.phase1.infile";
n = 5;
note mean ~ iid(n, gamma(0.5,0.5) );
note sigmaOverMu ~ iid(n, gamma(1.05,0.1) );
alpha = 1.0;
note p ~ symmetric_dirichlet (n, alpha/(intToDouble n));
data1 = get_observed_alleles filename;
n_loci = length data1;
n_individuals = (length (data1!!0))/2;
note category ~ iid(n_loci, categorical p);
note z ~ iid(n_loci, normal(0.0, 1.0));
safe_exp x = if (x < (-20.0)) then
exp (-20.0);
else if (x > 20.0) then
exp 20.0;
else
exp x;
theta = [ mean!!k * safe_exp (z!!i * sigmaOverMu!!k) | i <- take n_loci [0..], let {k=category!!i}];
theta_effective = map (*(2.0-s)) theta;
note theta_example ~ mixture [ (p!!i, logNormal(log(mean!!i),sigmaOverMu!!i)) | i <- take n [0..] ];
note s ~ uniform(0.0, 1.0);
note t' ~ iid(n_individuals, exponential (-1.0/log s));
t = map truncate t';
note i ~ iid(n_loci, plate (n_individuals,\k->bernoulli (1.0-0.5**t!!k)) );
note data data1 ~ plate (n_loci, \l -> afs2 (theta_effective!!l,i!!l));
note MakeLogger p;
note MakeLogger theta;
note MakeLogger t;
note MakeMove (sum_out_coal t!0 i!0);
} | bredelings/BAli-Phy | tests/PartialSelfing/diploid.pure_herm.multi_theta.TC.hs | gpl-2.0 | 1,379 | 29 | 15 | 303 | 593 | 326 | 267 | -1 | -1 |
{- Piffle, Copyright (C) 2007, Jaap Weel. This program is free
software; you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option)
any later version. This program is distributed in the hope that it
will be useful, but WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details. You should
have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc.,
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -}
module PiffleAst(
Exp(..),
Declaration(..),
File(..),
module Common
) where
import Position
import Common
-- PIFFLE AST --------------------------------------------------------
{- There's more in Common.hs, where those elements of the AST are
defined that Piffle has in common with the IR language. -}
{- Expressions -}
data Exp = Literal Pos Lit
| Variable Pos Ident
| Binop Pos Exp Binop Exp
| Unop Pos Unop Exp
| Cast Pos Exp AtomicType
| Index Pos Exp Exp
| Apply Pos Ident [Exp]
| Seq Pos [Declaration] [Exp] Exp
| If Pos Exp Exp Exp
| ForIn Pos Ident Exp Exp Exp
| ForFromTo Pos Ident Inty Inty Exp Exp
deriving (Show)
{- Declarations -}
data Declaration = Defvar Pos Typing
| Defun Pos Typing [Typing] Exp
deriving (Show)
{- An entire source file -}
data File = File [Declaration]
deriving(Show)
| jaapweel/piffle | src/PiffleAst.hs | gpl-2.0 | 1,796 | 0 | 7 | 535 | 225 | 134 | 91 | 24 | 0 |
{-# OPTIONS -fglasgow-exts #-}
----------------------------------------------------------------------------
-- |
-- Module : Text.XML.Schema.Parser
-- Copyright : (c) Simon Foster 2004
-- License : GPL version 2 (see COPYING)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (ghc >= 6 only)
--
-- A Parser for XML Schema.
--
-- @This file is part of HAIFA.@
--
-- @HAIFA is free software; you can redistribute it and\/or modify it under the terms of the
-- GNU General Public License as published by the Free Software Foundation; either version 2
-- of the License, or (at your option) any later version.@
--
-- @HAIFA is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
-- even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.@
--
-- @You should have received a copy of the GNU General Public License along with HAIFA; if not,
-- write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA@
----------------------------------------------------------------------------
module Text.XML.Schema.Parser where
import Control.Monad
import Control.Monad.Error
import Constants
import Governor
import Utils
import Text.XML.DOMParser hiding (trace)
import Network.URI
import qualified Char
import List
import Maybe
import Data.FiniteMap
import Text.Regex
import Debug.Trace
import Text.XML.Schema.Structure
import Text.XML.Schema.BasicTypes
msum1 :: (MonadPlus m) => [a -> m b] -> (a -> m b)
msum1 fs = \x -> msum (map ($ x) fs)
msum2 :: (MonadPlus m) => [a -> b -> m c] -> (a -> b -> m c)
msum2 fs = \x -> \y -> msum (map (\f -> f x y) fs)
-- | This combinator allows the passing of both the XmlTree and the NamespaceTable into the first
-- operation silently. Secondly it allows the second argument to be a non-monadic constructor.
-- I know this is a weird combinator, but it allows my code to be much more concise, since most
-- all the operations take the NST and the Tree and produce a value which then may need constructing.
(>#>) :: Monad m => (a -> b -> m c) -> (c -> d) -> a -> b -> m d
f >#> g = \x -> \y -> f x y >>= (return . g)
type XParser a = (MonadPlus m) => NamespaceTable -> XmlTree -> m a
parseElement :: XParser Element
parseElement nst t = checkTop "element" t $
let a = getAttrs ["abstract", "block", "default", "final", "fixed", "form", "id", "maxOccurs", "minOccurs", "name", "nillable", "ref", "substitutionGroup", "type"] t in do
-- k <- mapM' (parseKeyData nst) (getChildren t)
return $ Elem
(parseBool False (a!!0))
(if ((a!!1)=="#all")
then S_All
else S_Sel $ mapsTo ersMap (delimit (a!!1) ' '))
(mNull' (a!!2))
(if ((a!!3)=="#all")
then S_All
else S_Sel $ mapsTo ersMap (delimit (a!!3) ' '))
(mNull' (a!!4))
(((nmlz $ a!!5) == "qualified") ? (Q_Qualified, Q_Unqualified))
(mNull (a!!6) newID)
(fromMaybe 1 $ readMaybe (a!!7))
(fromMaybe 1 $ readMaybe (a!!8))
(mNull (a!!9) newNCName)
(parseBool False (a!!10))
(mNull (a!!11) (qn nst))
(mNull (a!!12) (qn nst))
(mNull (a!!13) (qn nst))
(msum $ map (msum1 [parseSimpleType nst >@> (return . Left), parseComplexType nst >@> (return . Right)]) (getChildren t))
{- ((hasLP "simpleType" t) ?> (Just . Left . parseSimpleType nst . head,
(hasLP "complexType" t) ?> (Just . Right . parseComplexType nst . head, Nothing))) -}
(mapM' (parseKeyData nst) (getChildren t))
parseKeyData :: XParser KeyData
parseKeyData = msum2 [parseUnique, parseKey, parseKeyRef]
parseUnique :: XParser KeyData
parseUnique nst t = checkTop "unique" t $
let a = getAttrs ["id", "name"] t; c=(getChildren t) in do
s <- (msum $ map (parseSelector nst) c)
f <- (mapM' (parseField nst) c)
return $ Unique
(mNull (a!!0) newID)
(mNull (a!!1) newNCName)
s
f
parseKey :: XParser KeyData
parseKey nst t = checkTop "key" t $
let a = getAttrs ["id", "name"] t; c=(getChildren t) in do
s <- (msum $ map (parseSelector nst) c)
f <- (mapM' (parseField nst) c)
return $ Key
(mNull (a!!0) newID)
(mNull (a!!1) newNCName)
s
f
parseKeyRef :: XParser KeyData
parseKeyRef nst t = checkTop "keyref" t $
let a = getAttrs ["id", "name", "refer"] t; c=(getChildren t) in do
s <- (msum $ map (parseSelector nst) c)
f <- (mapM' (parseField nst) c)
return $ KeyRef
(mNull (a!!0) newID)
(mNull (a!!1) newNCName)
(mNull (a!!2) (qn nst))
s
f
parseSelector :: XParser Selector
parseSelector nst t = checkTop "selector" t $
let a = getAttrs ["id", "xpath"] t in
return $ Selector (mNull (a!!0) newID) (a!!1)
parseField :: XParser Field
parseField nst t = checkTop "field" t $
let a = getAttrs ["id", "xpath"] t in
return $ Field (mNull (a!!0) newID) (a!!1)
parseAttribute :: XParser Attribute
parseAttribute nst t = checkTop "attribute" t $
let a = getAttrs ["default", "fixed", "form", "id", "name", "ref", "type", "use"] t in return $ Attr
(mNull' (a!!0))
(mNull' (a!!1))
(((==) "qualified" $ nmlz (a!!2)) ? (Q_Qualified, Q_Unqualified))
(mNull (a!!3) newID)
(mNull (a!!4) newNCName)
(mNull (a!!5) (qn nst))
(mNull (a!!6) (qn nst))
(fromMaybe U_Optional $ lookup (nmlz $ (a!!7)) useMap)
(msum $ map (parseSimpleType nst) (getChildren t))
--(((getChildren .> hasLocalPart "simpleType") t) ?> (Just . parseSimpleType nst . head, Nothing))
parseAorAG :: XParser AorAG
parseAorAG = msum2 [parseAttribute >#> A_Attr, parseAttributeGroup >#> A_AG]
parseAttributeGroup :: XParser AttributeGroup
parseAttributeGroup nst t = checkTop "attributeGroup" t $
let a = getAttrs ["id", "name", "ref"] t in do
g <- mapM' (parseAorAG nst) (getChildren t)
return $ AttrGroup
(mNull (a!!0) newID)
(mNull (a!!1) newNCName)
(mNull (a!!2) (qn nst))
g
(msum $ map (parseAnyAttribute nst) (getChildren t))
--(((getChildren .> hasLP "anyAttribute") t) ?> (parseAnyAttribute nst . head, Nothing))
parseAnyAttribute :: XParser AnyAttribute
parseAnyAttribute nst t = checkTop "anyAttribute" t $
let a = getAttrs ["id", "namespace", "processContents"] t in return $ AnyAttr
(mNull (a!!0) newID)
(case (a!!1) of
"##any" -> S2_Any
"##other" -> S2_Other
x -> S2_Sel $ map (\x -> case x of
"##targetNamespace" -> N_TargetNamespace
"##local" -> N_Local
a -> N_AnyURI $ fromJust $ parseURI a) (delimit x ' '))
-- FIXME : Rule #86786; Never ever use fromJust if you cannot guarantee valid input.
(fromMaybe P_Strict $ lookup (nmlz (a!!2)) [("lax", P_Lax), ("skip", P_Skip), ("strict", P_Strict)])
parseSimpleType :: XParser SimpleType
parseSimpleType nst t = checkTop "simpleType" t $
let a = getAttrs ["final", "id", "name"] t in return $ Simp
(if ((a!!0)=="#all")
then S_All
else S_Sel $ mapsTo lurMap (delimit (a!!0) ' '))
(mNull (a!!1) newNCName)
(mNull (a!!2) newID)
(getOpt t)
where
getOpt = msum . map (msum1 [getRestr, getList, getUnion]) . getChildren
getRestr t = (parseSimpRestriction nst t) >>= return . S_Restr
getList t = checkTop "list" t $ let a = getAttrs ["id", "itemType"] t in return $ S_List
(mNull (a!!0) newID)
(mNull (a!!1) (qn nst))
(msum $ map (parseSimpleType nst) (getChildren t))
-- (parseSimpleType nst $ head ((getChildren .> hasLP "simpleType") t))
getUnion t = checkTop "union" t $ let a = getAttrs ["id", "memberTypes"] t in do
u <- (mapM' (parseSimpleType nst) (getChildren t))
return $ S_Union
(mNull (a!!0) newID)
(map (qn nst) $ delimit (a!!1) ' ')
u
parseSimpExtension :: XParser Extension
parseSimpExtension nst t = checkTop "extension" t $
let a = getAttrs ["base", "id"] t in do
e <- mapM' (parseAorAG nst) (getChildren t)
return $ E_SimpExt
(mNull (a!!0) (qn nst))
(mNull (a!!1) newID)
e
(msum $ map (parseAnyAttribute nst) (getChildren t))
--(((getChildren .> hasLP "anyAttribute") t) ?> (parseAnyAttribute nst . head, Nothing))
parseSimpRestriction :: XParser Restriction
parseSimpRestriction nst t = checkTop "restriction" t $
let a = getAttrs ["base", "id"] t in do
x <- mapM' (parseAorAG nst) (getChildren t)
return $ R_SimpRestr
(mNull (a!!0) $ qn nst)
(mNull (a!!1) newID)
(msum $ map (parseSimpleType nst) (getChildren t))
(map getRest ((getChildren .> (neg $ (hasLP "annotation" +++ hasLP "simpleType"))) t))
x
(msum $ map (parseAnyAttribute nst) (getChildren t))
where
getRest t = case (localPart $ qnOf t) of
"minExclusive" -> MinExclusive id' value fixed
"minInclusive" -> MinInclusive id' value fixed
"maxExclusive" -> MaxExclusive id' value fixed
"maxInclusive" -> MaxInclusive id' value fixed
"totalDigits" -> TotalDigits id' numVal fixed
"fractionDigits" -> FractionDigits id' numVal fixed
"length" -> Length id' numVal fixed
"minLength" -> MinLength id' numVal fixed
"maxLength" -> MaxLength id' numVal fixed
"enumeration" -> Enumeration id' value
"whiteSpace" -> WhiteSpace id' wsMap fixed
"pattern" -> Pattern id' value
x -> (error $ x ++ " found.")
where
at = getAttrs ["fixed", "id", "value"] t
fixed = ((nmlz $ at!!0)=="true") ? (True, False)
id' = (mNull (at!!1) newID)
value = (at!!2)
numVal = ((readMaybe value) ?> (fromJust, 0))
wsMap = (lookup (nmlz value) [("collapse", Collapse), ("preserve", Preserve), ("replace", Replace)])
parseComplexType :: XParser ComplexType
parseComplexType nst t = checkTop "complexType" t $
let a = getAttrs ["abstract", "block", "final", "id", "mixed", "name"] t in return $ Comp
(parseBool False (a!!0))
(if ((a!!1)=="#all")
then S_All
else S_Sel $ mapsTo ersMap (delimit (a!!1) ' '))
(if ((a!!2)=="#all")
then S_All
else S_Sel $ mapsTo ersMap (delimit (a!!2) ' '))
(mNull (a!!3) newID)
(parseBool False (a!!4))
(mNull (a!!5) newNCName)
(parseComplexTypeCont nst t)
parseComplexTypeCont :: XParser ComplexTypeCont
parseComplexTypeCont nst t =
(msum $ map (msum2 [parseSimpleContent >#> CT_Simp, parseComplexContent >#> CT_Comp] nst) (getChildren t))
`mplus`
do
a <- mapM' (parseAorAG nst) (getChildren t)
return $ CT_Struct
(msum $ map (parseStruct nst) (getChildren t))
a
(msum $ map (parseAnyAttribute nst) (getChildren t))
{-
((getChildren .> hasLP "simpleContent") t) ?> (CT_Simp . parseSimpleContent nst . head,
((getChildren .> hasLP "complexContent") t) ?> (CT_Comp . parseComplexContent nst . head,
CT_Struct (parseStruct nst (head $ getStruct t)) (map (parseAorAG nst) ((getChildren .> (hasLP "attribute" +++ hasLP "attributeGroup")) t)) (((getChildren .> hasLP "anyAttribute") t) ?> (Just . parseAnyAttribute nst . head, Nothing))))
-}
parseComplexContent :: XParser ComplexContent
parseComplexContent nst t = checkTop "complexContent" t $
let a = getAttrs ["id", "mixed"] t in do
er <- getTopChild t >>= msum2 [parseCompExtension >#> C_Ext, parseCompRestriction >#> C_Res] nst
return $ ComplexContent
(mNull (a!!1) newID)
(parseBool False (a!!1))
er
{-
(funcByLP [("extension", C_Ext . parseCompExtension nst), ("restriction", C_Res . parseCompRestriction nst)] t)
-}
parseCompRestriction :: XParser Restriction
parseCompRestriction nst t = checkTop "restriction" t $
let a = getAttrs ["base", "id"] t in do
x <- mapM' (parseAorAG nst) (getChildren t)
return $ R_CompRestr
(mNull (a!!0) $ qn nst)
(mNull (a!!1) newID)
(msum $ map (parseStruct nst) (getChildren t))
x
(msum $ map (parseAnyAttribute nst) (getChildren t))
parseCompExtension :: XParser Extension
parseCompExtension nst t = checkTop "extension" t $
let a = getAttrs ["base", "id"] t in do
x <- mapM' (parseAorAG nst) (getChildren t)
return $ E_CompExt
(mNull (a!!0) $ qn nst)
(mNull (a!!1) newID)
(msum $ map (parseStruct nst) (getChildren t))
x
(msum $ map (parseAnyAttribute nst) (getChildren t))
parseSimpleContent :: XParser SimpleContent
parseSimpleContent nst t = checkTop "simpleContent" t $
let a = getAttrs ["id"] t in
msum2 [parseSimpExtension >#> Left,
parseSimpRestriction >#> Right] nst t >>=
return . SCont (mNull (a!!0) newID)
{-
(funcByLP [("extension", Left . parseSimpExtension nst),
("restriction", Right . parseSimpRestriction nst)] t)
msum' [(parseAttribute nst >@> (return . A_Attr)),
(parseAttributeGroup nst >@> (return . A_AG ))]
-}
parseGroup :: XParser Group
parseGroup nst t = checkTop "group" t $
let a = getAttrs ["name"] t in
getTopChild t
>>=
msum2 [parseInter >#> G_Inter,
parseChoice >#> G_Choice,
parseSeq >#> G_Seq] nst
>>=
return . Group (mNull (a!!0) newNCName)
parseInter :: XParser Inter
parseInter nst t = checkTop "all" t $
let a = getAttrs ["id", "minOccurs", "maxOccurs"] t in do
e <- mapM' (parseElement nst) $ getChildren t
return $ Inter
(mNull (a!!0) newID)
(fromMaybe 0 $ readMaybe (a!!1))
(readMaybe (a!!2))
e
{-
parseStruct :: NamespaceTable -> XmlTree -> Struct
parseStruct nst t = funcByLP [("all", S_Inter . parseInter nst),
("element", S_Element . parseElement nst),
("group", S_Group . parseGroup nst),
("choice", S_Choice . parseChoice nst),
("sequence", S_Seq . parseSeq nst),
("any", S_Any . parseAny nst)] t
-}
parseStruct :: XParser Struct
parseStruct = msum2 [parseInter >#> S_Inter, parseElement >#> S_Element,
parseGroup >#> S_Group, parseChoice >#> S_Choice,
parseSeq >#> S_Seq, parseAny >#> S_Any]
getStruct :: XmlFilter
getStruct = hasLP "group"+++hasLP "all"+++hasLP "sequence"+++hasLP "choice"
parseChoice :: XParser Choice
parseChoice nst t = checkTop "choice" t $
let a = getAttrs ["id", "minOccurs", "maxOccurs"] t in do
c <- mapM' (parseStruct nst) $ getChildren t
return $ Choice
(mNull (a!!0) newID)
(fromMaybe 0 $ readMaybe (a!!1))
(readMaybe (a!!2))
c
parseSeq :: XParser Seq
parseSeq nst t = checkTop "sequence" t $
let a = getAttrs ["id", "minOccurs", "maxOccurs"] t in do
s <- mapM' (parseStruct nst) $ getChildren t
return $ Seq
(mNull (a!!0) newID)
(fromMaybe 0 $ readMaybe (a!!1))
(readMaybe (a!!2))
s
parseAny:: XParser Any
parseAny nst t = checkTop "any" t $
let a = getAttrs ["id", "maxOccurs", "minOccurs", "namespace", "processContents"] t in return $ Any
(mNull (a!!0) newID)
(readMaybe (a!!1))
(fromMaybe 0 $ readMaybe (a!!2))
(case (a!!3) of
"##any" -> S2_Any
"##other" -> S2_Other
x -> S2_Sel $ map (\x -> case x of
"##targetNamespace" -> N_TargetNamespace
"##local" -> N_Local
a -> N_AnyURI $ fromJust $ parseURI a) (delimit x ' '))
-- FIXME : Rule #86786; Never ever use fromJust
(fromMaybe P_Strict $ lookup (nmlz (a!!4)) [("lax", P_Lax), ("skip", P_Skip), ("strict", P_Strict)])
funcByLP :: [(String, XmlTree -> a)] -> XmlTree -> a
funcByLP ((l,f):t) x = (hasLP l x) ?? (f x, funcByLP t x)
parseBool :: Bool -> String -> Bool
parseBool d s | (((nmlz s)=="true")||(s=="1")) = True | (((nmlz s)=="false")||(s=="0")) = False | otherwise = d
checkTop :: (Monad m) => String -> XmlTree -> m a -> m a
checkTop n t f = (hasLP n t) ?? (f, fail $ "Not an XSD " ++ n)
--mapM' f l = map fromJust $ filter isJust $ map f l
{-
sequence' :: Monad m => [m a] -> m [a]
sequence' = foldr mcons (return [])
where mcons p q = p >>= \x -> q >>= \y -> return (x:y)
-}
mapM' f = sequence' . map f
sequence' :: MonadPlus m => [m a] -> m [a]
sequence' [] = fail "Cannot sequence empty list"
sequence' l = foldr1 (liftM2 (++)) $ map (\x -> (x >>= \y -> return [y]) `mplus` (return [])) l
parseXSchema :: FilePath -> IO (Maybe XSchema)
parseXSchema f =
run' $
parseXSD $ newDocument f
parseXSD :: XmlTree -> XState state (Maybe XSchema)
parseXSD t = do
x <- (setSystemParams
.>>
getXmlContents -- parse the document, had to split this up because HXT won't validate the DTD.
.>>
parseXmlDoc
.>>
liftMf transfAllCharRef
.>>
liftF canonicalizeAllNodes -- Remove Header and bits we aren't interested in
.>>
liftF propagateNamespaces -- Propogate namespace URIs down the tree
.>>
liftF (removeDocWhiteSpace)
.>>
liftF (deep $ hasLP "schema")
) t
if (null x) then return Nothing else do
let nst = getNSTable (head x) ++ [("xml", "http://www.w3.org/XML/1998/namespace")]
-- FIXME: It seems that the XML prefix is not instantiated by the document, and should be done automatically.
-- io $ print nst
-- return Nothing
{-
let types = mapM' (msum2 [parseSimpleType >#> T_Simp,
parseComplexType >#> T_Comp] nst) (getChildren $ head x)
els = msum $ map (parseElement nst) (getChildren $ head x) :: Maybe [Element] -}
-- io $ print $ (mapM' (parseComplexType nst) (getChildren $ head x) :: Either String [ComplexType])
-- let simp = (parseSimpleTypes (head x) nst)
-- io $ print $ xmlTreesToString $ (getChildren .> hasLP "restriction") $ head $ (multi $ hasLP "simpleType") (head x)
-- return $ fail ""
-- let a = getAttrs ["attributeFormDefault", "blockDefault", "elementFormDefault", "finalDefault", "id", "targetNamespace", "version", "lang"] (head x)
return $ parseXSD' nst (head x)
{-return $ XS
(((nmlz $ a!!0) == "qualified") ? (Q_Qualified, Q_Unqualified))
(if ((a!!1)=="#all")
then S_All
else S_Sel $ mapsTo ersMap (delimit (a!!1) ' '))
(((nmlz $ a!!2) == "qualified") ? (Q_Qualified, Q_Unqualified))
(if ((a!!3)=="#all")
then S_All
else S_Sel $ mapsTo ersMap (delimit (a!!3) ' '))
(mNull (a!!4) newID)
((null (a!!5)) ? (Nothing, parseURI (a!!5)))
(mNull' (a!!6))
(mNull' (a!!7))
[]
types
[]
[]
[]-}
parseXSD' :: XParser XSchema
parseXSD' nst t = checkTop "schema" t $
let a = getAttrs ["attributeFormDefault", "blockDefault", "elementFormDefault", "finalDefault", "id", "targetNamespace", "version", "lang"] t in do
types <- mapM' (msum2 [parseSimpleType >#> T_Simp,
parseComplexType >#> T_Comp] nst) (getChildren t)
els <- mapM' (parseElement nst) (getChildren t)
atr <- mapM' (parseAorAG nst) (getChildren t)
grp <- mapM' (parseGroup nst) (getChildren t)
return $ XS
(((nmlz $ a!!0) == "qualified") ? (Q_Qualified, Q_Unqualified))
(if ((a!!1)=="#all")
then S_All
else S_Sel $ mapsTo ersMap (delimit (a!!1) ' '))
(((nmlz $ a!!2) == "qualified") ? (Q_Qualified, Q_Unqualified))
(if ((a!!3)=="#all")
then S_All
else S_Sel $ mapsTo ersMap (delimit (a!!3) ' '))
(mNull (a!!4) newID)
((null (a!!5)) ? (Nothing, parseURI (a!!5)))
(mNull' (a!!6))
(mNull' (a!!7))
[] -- FIXME : Sort out how imports are gonna work.
types
grp
els
atr
| twopoint718/haifa | src/Text/XML/Schema/Parser.hs | gpl-2.0 | 23,094 | 0 | 22 | 8,213 | 6,326 | 3,276 | 3,050 | 362 | 13 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE IncoherentInstances #-}
{-# LANGUAGE DeriveGeneric #-}
module Test.QuickFuzz.Gen.Media.Wav where
import Data.Default
import qualified Data.Binary as B
import Sound.Wav
import Test.QuickCheck
import Test.QuickFuzz.Derive.Arbitrary
import Test.QuickFuzz.Derive.Show
import Test.QuickFuzz.Gen.FormatInfo
import Test.QuickFuzz.Gen.Base.ByteString
devArbitrary ''WaveFile
wavInfo :: FormatInfo WaveFile NoActions
wavInfo = def
{ encode = B.encode
, random = arbitrary
, ext = "wav"
}
| elopez/QuickFuzz | src/Test/QuickFuzz/Gen/Media/Wav.hs | gpl-3.0 | 584 | 0 | 7 | 85 | 110 | 72 | 38 | 19 | 1 |
module Main where
import System.Environment(getArgs)
import Data.List (delete)
import qualified Data.Map as M
type ChainMap = M.Map Char [Char]
splitBy :: Char -> String -> [String]
splitBy c s =
case dropWhile (== c) s of
"" -> []
s' -> w : splitBy c s''
where (w, s'') = break (== c) s'
buildChainMap :: [String] -> ChainMap
buildChainMap [] = M.empty
buildChainMap (x:xs) = M.insertWith (++) (head x) [last x] $ buildChainMap xs
followChain :: ChainMap -> Char -> Int
followChain chainMap x =
let followChain' y = followChain (M.adjust (delete y) x chainMap) y
maxChains = map followChain' (M.findWithDefault [] x chainMap)
in if null maxChains then 0 else 1 + maximum maxChains
processLine' :: String -> String
processLine' line =
let chainMap = buildChainMap (splitBy ',' line)
allChains = map (followChain chainMap) (M.keys chainMap)
maxChain = maximum allChains
in if maxChain == 1 then "None"
else show maxChain
main :: IO ()
main = do
[inputFile] <- getArgs
input <- readFile inputFile
mapM_ putStrLn $ map processLine' $ lines input
| cryptica/CodeEval | Challenges/135_WordChain/main.hs | gpl-3.0 | 1,146 | 0 | 13 | 277 | 445 | 229 | 216 | 31 | 2 |
{-# LANGUAGE RankNTypes,
ExistentialQuantification,
FlexibleInstances,
MultiParamTypeClasses,
UndecidableInstances #-}
module Unfold ( Unfoldable(..)
, Step(..)
, Unfold
, UnfoldT(..)
, unfold
, unfoldList
, toList
, fromList
) where
import Data.Monoid
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Error
import qualified Data.Foldable as F
import qualified Data.Traversable as T
class Unfoldable u where
next :: u a -> Step (u a) a
data Step s a
= Yield a s
| Done
instance Functor (Step s) where
fmap f (Yield a s) = Yield (f a) s
fmap _ Done = Done
instance F.Foldable (Step s) where
foldMap f (Yield a _) = f a
foldMap _ Done = mempty
instance T.Traversable (Step s) where
traverse f (Yield a s) = Yield <$> f a <*> pure s
traverse _ Done = pure Done
infixl 4 <!>
(<!>) :: (s -> t) -> Step s a -> Step t a
f <!> (Yield a s) = Yield a $ f s
_ <!> Done = Done
data UnfoldS s a
-- | An 'UnfoldS s a' has a state 's' and produces values of type 'a'.
= UnfoldS s (s -> Step s a)
instance Unfoldable (UnfoldS s) where
next (UnfoldS s next') = (\s' -> UnfoldS s' next') <!> next' s
instance Functor (UnfoldS s) where
fmap f (UnfoldS s next') = UnfoldS s $ fmap f . next'
data Unfold a
-- | An 'Unfold' wraps an 'UnfoldS' and hides its state.
= forall u. (Functor u, Unfoldable u) => Unfold (u a)
-- | Smart constructor for an 'Unfold'
unfold :: s -> (s -> Step s a) -> Unfold a
unfold s next' = Unfold $ UnfoldS s next'
instance Eq a => Eq (Unfold a) where
u1 == u2 = toList u1 == toList u2
instance Show a => Show (Unfold a) where
show u = "Unfold " ++ show (toList u)
instance Unfoldable Unfold where
next (Unfold u) = Unfold <!> next u
instance Monoid (Unfold a) where
mempty = unfold () (const Done)
u1 `mappend` u2 =
unfold (Left u1) next'
where
next' (Left u1') =
case next u1' of
Done -> next' (Right u2)
Yield a u1'' -> Yield a (Left u1'')
next' (Right u2') = Right <!> next u2'
instance Functor Unfold where
fmap f (Unfold u) = Unfold (fmap f u)
instance Applicative Unfold where
pure = return
(<*>) = ap
instance Monad Unfold where
return x = unfold True (\s -> if s then Yield x False else Done)
u0 >>= f = unfold (Left u0) next'
where
next' (Left u1) =
case next u1 of
Yield a u1' -> next' $ Right (u1',f a)
Done -> Done
next' (Right (u1,u2)) =
case next u2 of
Yield a u2' -> Yield a (Right (u1,u2'))
Done -> next' $ Left u1
instance F.Foldable Unfold where
foldr f b u =
case next u of
Yield a u' -> F.foldr f (f a b) u'
Done -> b
instance T.Traversable Unfold where
-- (a -> f b) -> Unfold a -> f (Unfold b)
traverse f u =
case next u of
Done -> pure mempty
Yield a u' -> mappend <$> (pure <$> f a) <*> T.traverse f u'
toList :: Unfold a -> [a]
toList = reverse . F.foldr (:) []
fromList :: [a] -> Unfold a
fromList = flip unfold unfoldList
unfoldList :: [a] -> Step [a] a
unfoldList (x:xs) = Yield x xs
unfoldList [] = Done
newtype UnfoldT m a = UnfoldT { runUnfoldT :: m (Unfold a) }
instance Functor m => Functor (UnfoldT m) where
fmap f = UnfoldT . fmap (fmap f) . runUnfoldT
instance (Applicative m, Monad m) => Applicative (UnfoldT m) where
pure = return
(<*>) = ap
instance (Applicative m, Monad m) => Monad (UnfoldT m) where
return = UnfoldT . return . return
u >>= f = UnfoldT $ liftM join $ T.traverse (runUnfoldT . f) =<< runUnfoldT u
instance Applicative m => Monoid (UnfoldT m a) where
mempty = UnfoldT $ pure mempty
u1 `mappend` u2 = UnfoldT $ mappend <$> runUnfoldT u1 <*> runUnfoldT u2
instance MonadTrans UnfoldT where
lift m = UnfoldT (return `liftM` m)
mapUnfoldT :: (m (Unfold a) -> n (Unfold b)) -> UnfoldT m a -> UnfoldT n b
mapUnfoldT f = UnfoldT . f . runUnfoldT
instance (Applicative m, MonadReader r m) => MonadReader r (UnfoldT m) where
ask = lift ask
local = mapUnfoldT . local
reader = lift . reader
instance (Applicative m, MonadState s m) => MonadState s (UnfoldT m) where
get = lift get
put = lift . put
state = lift . state
liftCatch :: (m (Unfold a) -> (e -> m (Unfold a)) -> m (Unfold a)) ->
UnfoldT m a -> (e -> UnfoldT m a) -> UnfoldT m a
liftCatch f m h = UnfoldT $ f (runUnfoldT m) (runUnfoldT . h)
instance (Applicative m, MonadError e m) => MonadError e (UnfoldT m) where
throwError = lift . throwError
catchError = liftCatch catchError
| svenkeidel/gnome-citadel | src/Unfold.hs | gpl-3.0 | 4,822 | 0 | 14 | 1,370 | 1,968 | 1,004 | 964 | 128 | 1 |
-- License: GPL v2 or later
module ArrayUtils (amapWithIxs, arraySize) where
import Data.Array.IArray
amapWithIxs :: (IArray a e, IArray a e', Ix i) => (i -> e -> e') -> a i e -> a i e'
amapWithIxs f arr = array (bounds arr) (map (\i -> (i, f i (arr ! i))) (indices arr))
arraySize :: (IArray a e) => a (Int,Int) e -> (Int,Int)
arraySize arr = case bounds arr of ((x1,y1),(x2,y2)) -> (x2 - x1 + 1, y2 - y1 + 1)
| idupree/Pollutocracy | ArrayUtils.hs | gpl-3.0 | 416 | 0 | 13 | 90 | 237 | 130 | 107 | 6 | 1 |
module TagFS (
Route, TagSet,
File(..),
Entry(..),
Dir(..),
buildBaseRoute,
route, route',
routeDir, routeDir',
split,
module TagFS.Tag,
module TagFS.File
) where
import TagFS.Tag (Tag(..), getName, getValue, parseTag, formatTag)
import TagFS.File (File(..))
import qualified TagFS.File as F
import Route hiding (Route, route)
import qualified Route as R
import TagSet hiding (TagSet)
import qualified TagSet as T
import Predicate
import System.FilePath
import Control.Applicative
import Control.Arrow
import Data.Maybe
import Control.Monad.Trans
import Control.Monad.Trans.State
import Data.Set (Set)
import qualified Data.Set as S
{- |
A file in the tagfs file system.
Currently, the following types of files occur:
* @'RegularFile' file@ – A file, wich points to a real file @file@
* @'TagFile' tags file@ – A virtual file containing the @tags@ of the file @file@.
-}
data Entry = RegularFile File
| TagFile [Tag] File
deriving (Eq, Show, Read)
{- |
A directory in the tagfs file system.
Currently, the following types of directories occur:
* @'TagDir' tag@ – A directory representing the 'Tag' @tag@. Usually, this
will include all files with this tag, probably further filtered.
* @'ExtendedBaseDir' name@ – An 'ExtendedTag' is representet as a path
@name/value@ with two directories. The @value@ directory will be the
'TagDir' of the tag, the @name@ directory is an @'ExtendedBaseDir' name@.
* 'Dir' – A default directory with no special meaning (such as the root
directory).
-}
data Dir = TagDir Tag
| ExtendedBaseDir String
| Dir
deriving (Eq, Ord, Show, Read)
-- | The 'Route.Route' used for the tagfs file system. It uses 'FilePath' for the path
-- segments and @('Maybe' 'Dir')@ as directory tags.
type Route = R.Route FilePath (Maybe Dir)
-- | The 'TagSet.TagSet' used for the tagfs file system. It uses 'FilePath' for
-- file names and 'Tag' for tags.
type TagSet = T.TagSet File Tag
allTags :: Set Tag -> Bool
allTags = const True
data FSStatus = FSStatus { tagSet :: TagSet, visited :: [Tag], predicate :: Set Tag -> Bool }
makeStatus :: TagSet -> FSStatus
makeStatus ts = FSStatus ts [] allTags
type RouteBuilder a = StateT FSStatus Route a
choice_ :: [RouteBuilder a] -> RouteBuilder a
choice_ l = do
state1 <- get
(a, state2) <- lift . choice $ map (`runStateT` state1) l
put state2
return a
modifyVisited :: ([Tag] -> [Tag]) -> RouteBuilder ()
modifyVisited f = modify (\s -> s { visited = f (visited s) })
modifyPredicate :: ((Set Tag -> Bool) -> Set Tag -> Bool) -> RouteBuilder ()
modifyPredicate f = modify (\s -> s { predicate = f (predicate s) })
-- | Creates the complete tagfs file system route for a given 'TagSet'.
buildBaseRoute :: TagSet -> Route Entry
buildBaseRoute ts = evalStateT buildSubRoute (makeStatus ts)
buildSubRoute :: RouteBuilder Entry
buildSubRoute = choice_ [filesRoute, tagDirsRoute, expressionRoutes]
filesRoute :: RouteBuilder Entry
filesRoute = choice_ [regularFileRoute, tagFileRoute]
tagDirsRoute :: RouteBuilder Entry
tagDirsRoute = do
ts <- gets tagSet
visit <- gets visited
let mytags = filter (`notElem` visit) (tags ts)
choice_ (map tagRoute mytags)
tagRoute :: Tag -> RouteBuilder Entry
tagRoute t = choice_ [plainTagRoute t, logicalDirsRoute t]
plainTagRoute :: Tag -> RouteBuilder Entry
plainTagRoute tag = do
tagDir tag
modifyVisited (tag:)
modifyPredicate (\f s -> f s && S.member tag s)
buildSubRoute
logicalDirsRoute :: Tag -> RouteBuilder Entry
logicalDirsRoute tag = choice_
[ logicalTagRoute (\f s -> f s && S.member tag s) "and" tag
, logicalTagRoute (\f s -> f s || S.member tag s) "or" tag
, logicalTagRoute (\f s -> f s && not (S.member tag s)) "not" tag
, lift (match Nothing "and")
>> logicalTagRoute (\f s -> f s && not (S.member tag s)) "not" tag
, lift (match Nothing "or")
>> logicalTagRoute (\f s -> f s || not (S.member tag s)) "not" tag
]
logicalTagRoute :: ((Set Tag -> Bool) -> Set Tag -> Bool) -> String -> Tag
-> RouteBuilder Entry
logicalTagRoute f funcname tag = do
lift (match Nothing funcname)
tagDir tag
modifyVisited (tag:)
modifyPredicate f
buildSubRoute
expressionRoutes :: RouteBuilder Entry
expressionRoutes = choice_
[ expressionRoute (&&)
, lift (match Nothing "not") >> expressionRoute (&&!)
, lift (match Nothing "and") >> expressionRoute (&&)
, lift (match Nothing "or") >> expressionRoute (||)
, lift (match Nothing "and" >> match Nothing "not") >> expressionRoute (&&!)
, lift (match Nothing "or" >> match Nothing "not") >> expressionRoute (||!)
]
where
a &&! b = a && not b
a ||! b = a || not b
readMay :: Read a => String -> Maybe a
readMay s = case [x | (x,t) <- reads s, ("","") <- lex t] of
[x] -> Just x
_ -> Nothing
expressionRoute :: (Bool -> Bool -> Bool) -> RouteBuilder Entry
expressionRoute op = do
tree <- lift $ capture [] Nothing (\s -> case s of '?':s' -> parse s'; _ -> Nothing)
let p s c = case c of
Exists v -> Simple v `S.member` s
Is Equals a (StringVal b) -> Extended a b `S.member` s
Is o a b -> findTag s o a b
let m f s = f s `op` eval' (p s) tree
modifyPredicate m
buildSubRoute
where
findTag :: Set Tag -> Op -> Var -> Val -> Bool
findTag s o a (StringVal b) = find (getOp o) (vals Just s a) b
findTag s o a (IntVal b) = find (getOp o) (vals readMay s a) b
vals :: (String -> Maybe a) -> Set Tag -> String -> [a]
vals f s n = mapMaybe p $ S.elems s where
p (Extended n' v) | n' == n = f v
p _ = Nothing
getOp :: Ord a => Op -> a -> a -> Bool
getOp Equals = (==)
getOp Greater = (>)
getOp Less = (<)
getOp GreaterThan = (>=)
getOp LessThan = (<=)
find :: Ord a => (a -> a -> Bool) -> [a] -> a -> Bool
find o l b = any (`o` b) l
tagDir :: Tag -> RouteBuilder ()
tagDir tag@(Simple n) = lift $ match (Just $ TagDir tag) n
tagDir tag@(Extended n v) =
lift (matchHidden (Just $ TagDir tag) (formatTag tag)) <|>
lift (do
match (Just $ ExtendedBaseDir n) n
match (Just $ TagDir tag) v)
fsTreesRoute :: [F.FSTree a] -> Route a
fsTreesRoute = choice . map go where
go (F.Leaf s a) = match Nothing s >> return a
go (F.Branch s l) = do
match Nothing s
fsTreesRoute l
regularFileRoute :: RouteBuilder Entry
regularFileRoute = do
p <- gets predicate
fs <- queryFiles p <$> gets tagSet
--s <- lift $ matchSet Nothing (S.map getPath fs)
f <- lift . fsTreesRoute $ F.makeFSTrees fs
return $ RegularFile f
tagFileExt :: FilePath
tagFileExt = ".tags"
tagFileRoute :: RouteBuilder Entry
{-tagFileRoute = do
file <- lift $ capture [] Nothing getFile
f <- gets predicate
ts <- gets tagSet
let fs = queryFiles f ts
if file `notElem` fs then lift noRoute
else do
let t = queryTags file ts
lift $ maybe noRoute (\t' -> return $ TagFile t' file) t
where
getFile n = case splitExtension n of
(name, ext) | ext == tagFileExt -> Just $ File name
_ -> Nothing-}
tagFileRoute = lift noRoute
-- helper function for easier routing
-- | Splits a 'FilePath' in a list of directories. The path is expected to begin
-- with \'\/\'. It is splitted on every \'\/\' (but the first one).
split :: FilePath -> [FilePath]
split [] = error "split': empty list"
split (_:ps) = splitDirectories ps
-- | Routes a given path. Performs splitting with 'split' and wraps
-- 'Route.route', giving a nicer return type.
route :: Route Entry -> FilePath -> Maybe (Either Dir Entry)
route r p = let seg = split p in route' r seg
helper :: Either (Maybe Dir) (a, t) -> Either Dir a
helper (Left e) = Left $ fromMaybe Dir e
helper (Right (a,_)) = Right a
-- | A variant of 'route', which expects the path to be splitted with 'split' already.
route' :: Route Entry -> [FilePath] -> Maybe (Either Dir Entry)
route' r seg = helper <$> R.route Nothing r seg
-- | Routes a given path wir 'Route.getBranch'. Performs splitting with 'split'
-- and wraps the result into a nicer type.
routeDir :: Route Entry -> FilePath -> Maybe (Maybe [(FilePath, Either Dir Entry)])
routeDir r p = let seg = split p in routeDir' r seg
-- | A variant of 'routeDir', wich expects the path to be splitted with 'split' already.
routeDir' :: Route Entry -> [FilePath]
-> Maybe (Maybe [(FilePath, Either Dir Entry)])
routeDir' r seg = case getBranch Nothing r seg of
Nothing -> Nothing
Just (Right _) -> Just Nothing
Just (Left es) -> Just . Just $ map (second helper) es
| ffwng/tagfs | TagFS.hs | gpl-3.0 | 8,351 | 42 | 15 | 1,684 | 2,856 | 1,467 | 1,389 | 169 | 10 |
{-# LANGUAGE TupleSections #-}
module Worlds.OrthogonalPlanes
( orthogonalPlanes
) where
import Linear
import Control.Lens
import Control.Monad.Random
import Worlds.RandomColorBox
import SceneTO
import Transformation
orthogonalPlanes :: (MonadRandom m, Floating a) =>
Int -> m (SceneTO V4 a, Transformation V4 a)
orthogonalPlanes n = (, mempty) <$> randomColorBox n
(\v -> (v^._x == (n `div` 2) && v^._y == (n `div` 2))
|| (v^._z == (n `div` 2) && v^._w == (n `div` 2)) )
| MatthiasHu/4d-labyrinth | src/Worlds/OrthogonalPlanes.hs | gpl-3.0 | 497 | 0 | 16 | 97 | 195 | 112 | 83 | 14 | 1 |
{-# LANGUAGE UnicodeSyntax, NoImplicitPrelude #-}
module Stream
( Stream(..)
, toFname
, readStream
, writeStream
, writeStream'
, writeFileStream
, writeFileStream'
) where
import BasePrelude hiding (readFile, writeFile, hGetContents)
import Prelude.Unicode
import Data.IOData (IOData, readFile, writeFile, hGetContents, hPut)
import System.Directory (createDirectoryIfMissing, doesFileExist, doesDirectoryExist, listDirectory, removeFile, removeDirectory)
import System.FilePath (takeDirectory)
data Stream
= Standard
| File FilePath
deriving (Show, Read)
toFname ∷ Stream → String
toFname Standard = "stdin"
toFname (File fname) = fname
readStream ∷ (MonadIO m, IOData α) ⇒ Stream → m α
readStream Standard = hGetContents stdin
readStream (File fname) = readFile fname
writeStream ∷ (MonadIO m, IOData α) ⇒ Stream → α → m ()
writeStream Standard = hPut stdout
writeStream (File fname) = \text → do
liftIO $ createDirectoryIfMissing True (takeDirectory fname)
writeFile fname text
writeStream' ∷ (MonadIO m, IOData α) ⇒ Stream → Maybe α → m ()
writeStream' Standard Nothing = pure ()
writeStream' Standard (Just text) = hPut stdout text
writeStream' (File fname) Nothing = do
liftIO $ do
exists ← doesFileExist fname
when exists (removeFile fname)
liftIO $ do
let dir = takeDirectory fname
exists ← doesDirectoryExist dir
when exists $ do
isEmpty ← null <$> listDirectory dir
when isEmpty (removeDirectory dir)
writeStream' (File fname) (Just text) = do
liftIO $ createDirectoryIfMissing True (takeDirectory fname)
writeFile fname text
writeFileStream ∷ (MonadIO m, IOData α) ⇒ FilePath → α → m ()
writeFileStream = writeStream ∘ File
writeFileStream' ∷ (MonadIO m, IOData α) ⇒ FilePath → Maybe α → m ()
writeFileStream' = writeStream' ∘ File
| 39aldo39/klfc | src/Stream.hs | gpl-3.0 | 1,951 | 0 | 15 | 395 | 628 | 321 | 307 | 49 | 1 |
{-# LANGUAGE TemplateHaskell #-}
-- | Command line options for tests
module TestOptions
( FullTestConfig (..)
, TestVar
, getOptions
, testTVar
, readTestConfig
) where
import Control.Concurrent.STM.TVar (TVar, newTVarIO)
import qualified Data.Aeson.TH as A
import Data.Default (Default (def))
import qualified Data.Yaml as Y
import Options.Applicative (Parser, (<>), auto, execParser, fullDesc,
help, helper, info, long, metavar, option,
progDesc, short, switch)
import RSCoin.Core (Severity (..))
import System.IO.Unsafe (unsafePerformIO)
data FullTestConfig = FullTestConfig
{ ftcGlobalSeverity :: !Severity
, ftcBankSeverity :: !(Maybe Severity)
, ftcMintetteSeverity :: !(Maybe Severity)
, ftcUserSeverity :: !(Maybe Severity)
, ftcTestingSeverity :: !(Maybe Severity)
, ftcRealMode :: !Bool
} deriving Show
instance Default FullTestConfig where
def =
FullTestConfig
{ ftcGlobalSeverity = Warning
, ftcBankSeverity = def
, ftcMintetteSeverity = def
, ftcUserSeverity = def
, ftcTestingSeverity = Just Warning
, ftcRealMode = False
}
readTestConfig :: FilePath -> IO FullTestConfig
readTestConfig fp =
either (error . ("[FATAL] Failed to parse config: " ++) . show) id <$>
Y.decodeFileEither fp
type TestVar = TVar FullTestConfig
testTVar :: TestVar
testTVar = unsafePerformIO (newTVarIO def)
{-# NOINLINE testTVar #-}
optionsParser :: Parser FullTestConfig
optionsParser =
FullTestConfig <$>
option
auto
(short 'g' <>long "global-severity" <>
help "Global logging severity" <>
metavar "SEVERITY") <*>
option
auto
(short 'b' <> long "bank-severity" <>
help "Bank's logging severity" <>
metavar "SEVERITY") <*>
option
auto
(short 'm' <> long "mintette-severity" <>
help "Mintette's logging severity" <>
metavar "SEVERITY") <*>
option
auto
(short 'u' <> long "user-severity" <>
help "User's logging severity" <>
metavar "SEVERITY") <*>
option
auto
(short 't' <> long "testing-severity" <>
help "Testing logging severity" <>
metavar "SEVERITY") <*>
switch
(short 'r' <> long "real-mode" <>
help "Run tests in real mode")
getOptions :: IO FullTestConfig
getOptions = do
execParser $
info
(helper <*> optionsParser)
(fullDesc <> progDesc "RSCoin's testing framework")
$(A.deriveJSON A.defaultOptions ''Severity)
$(A.deriveJSON A.defaultOptions ''FullTestConfig)
| input-output-hk/rscoin-haskell | test/TestOptions.hs | gpl-3.0 | 2,911 | 0 | 16 | 952 | 657 | 354 | 303 | 92 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.DigitalAssetLinks.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.DigitalAssetLinks.Types
(
-- * Service Configuration
digitalAssetLinksService
-- * AndroidAppAsset
, AndroidAppAsset
, androidAppAsset
, aaaPackageName
, aaaCertificate
-- * Statement
, Statement
, statement
, sRelation
, sSource
, sTarget
-- * CheckResponseErrorCodeItem
, CheckResponseErrorCodeItem (..)
-- * Asset
, Asset
, asset
, aAndroidApp
, aWeb
-- * ListResponse
, ListResponse
, listResponse
, lrDebugString
, lrMaxAge
, lrErrorCode
, lrStatements
-- * Xgafv
, Xgafv (..)
-- * CheckResponse
, CheckResponse
, checkResponse
, crDebugString
, crMaxAge
, crErrorCode
, crLinked
-- * WebAsset
, WebAsset
, webAsset
, waSite
-- * CertificateInfo
, CertificateInfo
, certificateInfo
, ciSha256Fingerprint
-- * ListResponseErrorCodeItem
, ListResponseErrorCodeItem (..)
) where
import Network.Google.DigitalAssetLinks.Types.Product
import Network.Google.DigitalAssetLinks.Types.Sum
import Network.Google.Prelude
-- | Default request referring to version 'v1' of the Digital Asset Links API. This contains the host and root path used as a starting point for constructing service requests.
digitalAssetLinksService :: ServiceConfig
digitalAssetLinksService
= defaultService (ServiceId "digitalassetlinks:v1")
"digitalassetlinks.googleapis.com"
| brendanhay/gogol | gogol-digitalassetlinks/gen/Network/Google/DigitalAssetLinks/Types.hs | mpl-2.0 | 1,998 | 0 | 7 | 464 | 196 | 139 | 57 | 50 | 1 |
{-# LANGUAGE MultiWayIf #-}
module Deftype (moduleForDeftype, bindingsForRegisteredType) where
import qualified Data.Map as Map
import Data.Maybe
import Debug.Trace
import Obj
import Types
import Util
import Template
import ToTemplate
import Infer
import Concretize
import Polymorphism
import ArrayTemplates
import Lookup
{-# ANN module "HLint: ignore Reduce duplication" #-}
-- | This function creates a "Type Module" with the same name as the type being defined.
-- A type module provides a namespace for all the functions that area automatically
-- generated by a deftype.
moduleForDeftype :: TypeEnv -> Env -> [String] -> String -> [Ty] -> [XObj] -> Maybe Info -> Maybe Env -> Either String (String, XObj, [XObj])
moduleForDeftype typeEnv env pathStrings typeName typeVariables rest i existingEnv =
let typeModuleName = typeName
typeModuleEnv = case existingEnv of
Just env -> env
Nothing -> Env (Map.fromList []) (Just env) (Just typeModuleName) [] ExternalEnv 0
-- The variable 'insidePath' is the path used for all member functions inside the 'typeModule'.
-- For example (module Vec2 [x Float]) creates bindings like Vec2.create, Vec2.x, etc.
insidePath = pathStrings ++ [typeModuleName]
in do validateMemberCases typeEnv typeVariables rest
let structTy = StructTy typeName typeVariables
(okMembers, membersDeps) <- templatesForMembers typeEnv env insidePath structTy rest
okInit <- binderForInit insidePath structTy rest
--okNew <- templateForNew insidePath structTy rest
(okStr, strDeps) <- binderForStrOrPrn typeEnv env insidePath structTy rest "str"
(okPrn, _) <- binderForStrOrPrn typeEnv env insidePath structTy rest "prn"
(okDelete, deleteDeps) <- binderForDelete typeEnv env insidePath structTy rest
(okCopy, copyDeps) <- binderForCopy typeEnv env insidePath structTy rest
let funcs = okInit : okStr : okPrn : okDelete : okCopy : okMembers
moduleEnvWithBindings = addListOfBindings typeModuleEnv funcs
typeModuleXObj = XObj (Mod moduleEnvWithBindings) i (Just ModuleTy)
deps = deleteDeps ++ membersDeps ++ copyDeps ++ strDeps
return (typeModuleName, typeModuleXObj, deps)
-- | Will generate getters/setters/updaters when registering EXTERNAL types.
-- | i.e. (register-type VRUnicornData [hp Int, magic Float])
-- | TODO: Remove duplication shared by moduleForDeftype-function.
bindingsForRegisteredType :: TypeEnv -> Env -> [String] -> String -> [XObj] -> Maybe Info -> Maybe Env -> Either String (String, XObj, [XObj])
bindingsForRegisteredType typeEnv env pathStrings typeName rest i existingEnv =
let typeModuleName = typeName
typeModuleEnv = case existingEnv of
Just env -> env
Nothing -> Env (Map.fromList []) (Just env) (Just typeModuleName) [] ExternalEnv 0
insidePath = pathStrings ++ [typeModuleName]
in do validateMemberCases typeEnv [] rest
let structTy = StructTy typeName []
(binders, deps) <- templatesForMembers typeEnv env insidePath structTy rest
okInit <- binderForInit insidePath structTy rest
--okNew <- templateForNew insidePath structTy rest
(okStr, strDeps) <- binderForStrOrPrn typeEnv env insidePath structTy rest "str"
(okPrn, _) <- binderForStrOrPrn typeEnv env insidePath structTy rest "prn"
let moduleEnvWithBindings = addListOfBindings typeModuleEnv (okInit : okStr : okPrn : binders)
typeModuleXObj = XObj (Mod moduleEnvWithBindings) i (Just ModuleTy)
return (typeModuleName, typeModuleXObj, deps ++ strDeps)
-- | Generate all the templates for ALL the member variables in a deftype declaration.
templatesForMembers :: TypeEnv -> Env -> [String] -> Ty -> [XObj] -> Either String ([(String, Binder)], [XObj])
templatesForMembers typeEnv env insidePath structTy [XObj (Arr membersXobjs) _ _] =
let bindersAndDeps = concatMap (templatesForSingleMember typeEnv env insidePath structTy) (pairwise membersXobjs)
in Right (map fst bindersAndDeps, concatMap snd bindersAndDeps)
templatesForMembers _ _ _ _ _ = Left "Can't create member functions for type with more than one case (yet)."
-- | Generate the templates for a single member in a deftype declaration.
templatesForSingleMember :: TypeEnv -> Env -> [String] -> Ty -> (XObj, XObj) -> [((String, Binder), [XObj])]
templatesForSingleMember typeEnv env insidePath p@(StructTy typeName _) (nameXObj, typeXObj) =
let Just t = xobjToTy typeXObj
memberName = getName nameXObj
in [instanceBinderWithDeps (SymPath insidePath memberName) (FuncTy [RefTy p] (RefTy t)) (templateGetter (mangle memberName) t)
, if isTypeGeneric t
then (templateGenericSetter insidePath p t memberName, [])
else instanceBinderWithDeps (SymPath insidePath ("set-" ++ memberName)) (FuncTy [p, t] p) (templateSetter typeEnv env (mangle memberName) t)
,instanceBinderWithDeps (SymPath insidePath ("set-" ++ memberName ++ "!")) (FuncTy [RefTy (p), t] UnitTy) (templateMutatingSetter typeEnv env (mangle memberName) t)
,instanceBinderWithDeps (SymPath insidePath ("update-" ++ memberName))
(FuncTy [p, RefTy (FuncTy [t] t)] p)
(templateUpdater (mangle memberName))]
-- | The template for getters of a deftype.
templateGetter :: String -> Ty -> Template
templateGetter member memberTy =
Template
(FuncTy [RefTy (VarTy "p")] (VarTy "t"))
(const (toTemplate "$t $NAME($(Ref p) p)"))
(const $
let fixForVoidStarMembers =
if isFunctionType memberTy && (not (isTypeGeneric memberTy))
then "(" ++ tyToCLambdaFix (RefTy memberTy) ++ ")"
else ""
in (toTemplate ("$DECL { return " ++ fixForVoidStarMembers ++ "(&(p->" ++ member ++ ")); }\n")))
(const [])
-- | The template for setters of a concrete deftype.
templateSetter :: TypeEnv -> Env -> String -> Ty -> Template
templateSetter typeEnv env memberName memberTy =
let callToDelete = memberDeletion typeEnv env (memberName, memberTy)
in
Template
(FuncTy [VarTy "p", VarTy "t"] (VarTy "p"))
(const (toTemplate "$p $NAME($p p, $t newValue)"))
(const (toTemplate (unlines ["$DECL {"
,callToDelete
," p." ++ memberName ++ " = newValue;"
," return p;"
,"}\n"])))
(\_ -> if | isManaged typeEnv memberTy -> depsOfPolymorphicFunction typeEnv env [] "delete" (typesDeleterFunctionType memberTy)
| isFunctionType memberTy -> [defineFunctionTypeAlias memberTy]
| otherwise -> [])
-- | The template for setters of a generic deftype.
templateGenericSetter :: [String] -> Ty -> Ty -> String -> (String, Binder)
templateGenericSetter pathStrings originalStructTy memberTy memberName =
defineTypeParameterizedTemplate templateCreator path (FuncTy [originalStructTy, memberTy] originalStructTy)
where path = SymPath pathStrings ("set-" ++ memberName)
t = (FuncTy [VarTy "p", VarTy "t"] (VarTy "p"))
templateCreator = TemplateCreator $
\typeEnv env ->
Template
t
(const (toTemplate "$p $NAME($p p, $t newValue)"))
(\(FuncTy [_, memberTy] _) ->
(let callToDelete = memberDeletion typeEnv env (memberName, memberTy)
in (toTemplate (unlines ["$DECL {"
,callToDelete
," p." ++ memberName ++ " = newValue;"
," return p;"
,"}\n"]))))
(\(FuncTy [_, memberTy] _) ->
if isManaged typeEnv memberTy
then depsOfPolymorphicFunction typeEnv env [] "delete" (typesDeleterFunctionType memberTy)
else [])
-- | The template for mutating setters of a deftype.
templateMutatingSetter :: TypeEnv -> Env -> String -> Ty -> Template
templateMutatingSetter typeEnv env memberName memberTy =
Template
(FuncTy [RefTy (VarTy "p"), VarTy "t"] UnitTy)
(const (toTemplate "void $NAME($p* pRef, $t newValue)"))
(const (toTemplate (unlines ["$DECL {"
," pRef->" ++ memberName ++ " = newValue;"
,"}\n"])))
(const [])
-- | The template for updater functions of a deftype.
-- | (allows changing a variable by passing an transformation function).
templateUpdater :: String -> Template
templateUpdater member =
Template
(FuncTy [VarTy "p", RefTy (FuncTy [VarTy "t"] (VarTy "t"))] (VarTy "p"))
(const (toTemplate "$p $NAME($p p, Lambda *updater)")) -- "Lambda" used to be: $(Fn [t] t)
(const (toTemplate (unlines ["$DECL {"
," p." ++ member ++ " = " ++ (templateCodeForCallingLambda "(*updater)" (FuncTy [VarTy "t"] (VarTy "t")) ["p." ++ member]) ++ ";"
," return p;"
,"}\n"])))
(\(FuncTy [_, RefTy t@(FuncTy fArgTys fRetTy)] _) ->
if isTypeGeneric fRetTy
then []
else [defineFunctionTypeAlias t, defineFunctionTypeAlias (FuncTy (lambdaEnvTy : fArgTys) fRetTy)])
-- | Helper function to create the binder for the 'init' template.
binderForInit :: [String] -> Ty -> [XObj] -> Either String (String, Binder)
binderForInit insidePath structTy@(StructTy typeName _) [XObj (Arr membersXObjs) _ _] =
if isTypeGeneric structTy
then Right (genericInit StackAlloc insidePath structTy membersXObjs)
else Right $ instanceBinder (SymPath insidePath "init")
(FuncTy (initArgListTypes membersXObjs) structTy)
(concreteInit StackAlloc structTy membersXObjs)
-- | Generate a list of types from a deftype declaration.
initArgListTypes :: [XObj] -> [Ty]
initArgListTypes xobjs = map (\(_, x) -> fromJust (xobjToTy x)) (pairwise xobjs)
-- | The template for the 'init' and 'new' functions for a concrete deftype.
concreteInit :: AllocationMode -> Ty -> [XObj] -> Template
concreteInit allocationMode originalStructTy@(StructTy typeName typeVariables) membersXObjs =
Template
(FuncTy (map snd (memberXObjsToPairs membersXObjs)) (VarTy "p"))
(\(FuncTy _ concreteStructTy) ->
let mappings = unifySignatures originalStructTy concreteStructTy
correctedMembers = replaceGenericTypeSymbolsOnMembers mappings membersXObjs
memberPairs = memberXObjsToPairs correctedMembers
in (toTemplate $ "$p $NAME(" ++ joinWithComma (map memberArg memberPairs) ++ ")"))
(const (tokensForInit allocationMode typeName membersXObjs))
(\(FuncTy _ _) -> [])
-- | The template for the 'init' and 'new' functions for a generic deftype.
genericInit :: AllocationMode -> [String] -> Ty -> [XObj] -> (String, Binder)
genericInit allocationMode pathStrings originalStructTy@(StructTy typeName _) membersXObjs =
defineTypeParameterizedTemplate templateCreator path t
where path = SymPath pathStrings "init"
t = (FuncTy (map snd (memberXObjsToPairs membersXObjs)) originalStructTy)
templateCreator = TemplateCreator $
\typeEnv env ->
Template
(FuncTy (map snd (memberXObjsToPairs membersXObjs)) (VarTy "p"))
(\(FuncTy _ concreteStructTy) ->
let mappings = unifySignatures originalStructTy concreteStructTy
correctedMembers = replaceGenericTypeSymbolsOnMembers mappings membersXObjs
memberPairs = memberXObjsToPairs correctedMembers
in (toTemplate $ "$p $NAME(" ++ joinWithComma (map memberArg memberPairs) ++ ")"))
(const (tokensForInit allocationMode typeName membersXObjs))
(\(FuncTy _ concreteStructTy) ->
case concretizeType typeEnv concreteStructTy of
Left err -> error (err ++ ". This error should not crash the compiler - change return type to Either here.")
Right ok -> ok
)
tokensForInit :: AllocationMode -> String -> [XObj] -> [Token]
tokensForInit allocationMode typeName membersXObjs =
toTemplate $ unlines [ "$DECL {"
, case allocationMode of
StackAlloc -> " $p instance;"
HeapAlloc -> " $p instance = CARP_MALLOC(sizeof(" ++ typeName ++ "));"
, joinWith "\n" (map (memberAssignment allocationMode) (memberXObjsToPairs membersXObjs))
, " return instance;"
, "}"]
-- | Creates the C code for an arg to the init function.
-- | i.e. "(deftype A [x Int])" will generate "int x" which
-- | will be used in the init function like this: "A_init(int x)"
memberArg :: (String, Ty) -> String
memberArg (memberName, memberTy) =
tyToCLambdaFix (templitizeTy memberTy) ++ " " ++ memberName
-- | If the type is just a type variable; create a template type variable by appending $ in front of it's name
templitizeTy :: Ty -> Ty
templitizeTy (VarTy vt) = VarTy ("$" ++ vt)
templitizeTy (FuncTy argTys retTy) = FuncTy (map templitizeTy argTys) (templitizeTy retTy)
templitizeTy (StructTy name tys) = StructTy name (map templitizeTy tys)
templitizeTy (RefTy t) = RefTy (templitizeTy t)
templitizeTy (PointerTy t) = PointerTy (templitizeTy t)
templitizeTy t = t
-- | Helper function to create the binder for the 'str' template.
binderForStrOrPrn :: TypeEnv -> Env -> [String] -> Ty -> [XObj] -> String -> Either String ((String, Binder), [XObj])
binderForStrOrPrn typeEnv env insidePath structTy@(StructTy typeName _) [XObj (Arr membersXObjs) _ _] strOrPrn =
if isTypeGeneric structTy
then Right (genericStr insidePath structTy membersXObjs strOrPrn, [])
else Right (instanceBinderWithDeps (SymPath insidePath strOrPrn)
(FuncTy [RefTy structTy] StringTy)
(concreteStr typeEnv env structTy (memberXObjsToPairs membersXObjs) strOrPrn))
-- | The template for the 'str' function for a concrete deftype.
concreteStr :: TypeEnv -> Env -> Ty -> [(String, Ty)] -> String -> Template
concreteStr typeEnv env concreteStructTy@(StructTy typeName _) memberPairs strOrPrn =
Template
(FuncTy [RefTy concreteStructTy] StringTy)
(\(FuncTy [RefTy structTy] StringTy) -> (toTemplate $ "String $NAME(" ++ tyToCLambdaFix structTy ++ " *p)"))
(\(FuncTy [RefTy structTy@(StructTy _ concreteMemberTys)] StringTy) ->
(tokensForStr typeEnv env typeName memberPairs concreteStructTy))
(\(ft@(FuncTy [RefTy structTy@(StructTy _ concreteMemberTys)] StringTy)) ->
concatMap (depsOfPolymorphicFunction typeEnv env [] "prn" . typesStrFunctionType typeEnv)
(filter (\t -> (not . isExternalType typeEnv) t && (not . isFullyGenericType) t)
(map snd memberPairs)))
-- | The template for the 'str' function for a generic deftype.
genericStr :: [String] -> Ty -> [XObj] -> String -> (String, Binder)
genericStr pathStrings originalStructTy@(StructTy typeName varTys) membersXObjs strOrPrn =
defineTypeParameterizedTemplate templateCreator path t
where path = SymPath pathStrings strOrPrn
t = FuncTy [(RefTy originalStructTy)] StringTy
members = memberXObjsToPairs membersXObjs
templateCreator = TemplateCreator $
\typeEnv env ->
Template
t
(\(FuncTy [RefTy concreteStructTy] StringTy) ->
(toTemplate $ "String $NAME(" ++ tyToCLambdaFix concreteStructTy ++ " *p)"))
(\(FuncTy [RefTy concreteStructTy@(StructTy _ concreteMemberTys)] StringTy) ->
let mappings = unifySignatures originalStructTy concreteStructTy
correctedMembers = replaceGenericTypeSymbolsOnMembers mappings membersXObjs
memberPairs = memberXObjsToPairs correctedMembers
in (tokensForStr typeEnv env typeName memberPairs concreteStructTy))
(\(ft@(FuncTy [RefTy concreteStructTy@(StructTy _ concreteMemberTys)] StringTy)) ->
let mappings = unifySignatures originalStructTy concreteStructTy
correctedMembers = replaceGenericTypeSymbolsOnMembers mappings membersXObjs
memberPairs = memberXObjsToPairs correctedMembers
in concatMap (depsOfPolymorphicFunction typeEnv env [] "prn" . typesStrFunctionType typeEnv)
(filter (\t -> (not . isExternalType typeEnv) t && (not . isFullyGenericType) t)
(map snd memberPairs))
++
(if isTypeGeneric concreteStructTy then [] else [defineFunctionTypeAlias ft]))
tokensForStr :: TypeEnv -> Env -> String -> [(String, Ty)] -> Ty -> [Token]
tokensForStr typeEnv env typeName memberPairs concreteStructTy =
(toTemplate $ unlines [ "$DECL {"
, " // convert members to String here:"
, " String temp = NULL;"
, " int tempsize = 0;"
, " (void)tempsize; // that way we remove the occasional unused warning "
, calculateStructStrSize typeEnv env memberPairs concreteStructTy
, " String buffer = CARP_MALLOC(size);"
, " String bufferPtr = buffer;"
, ""
, " snprintf(bufferPtr, size, \"(%s \", \"" ++ typeName ++ "\");"
, " bufferPtr += strlen(\"" ++ typeName ++ "\") + 2;\n"
, joinWith "\n" (map (memberPrn typeEnv env) memberPairs)
, " bufferPtr--;"
, " snprintf(bufferPtr, size, \")\");"
, " return buffer;"
, "}"])
-- | Figure out how big the string needed for the string representation of the struct has to be.
calculateStructStrSize :: TypeEnv -> Env -> [(String, Ty)] -> Ty -> String
calculateStructStrSize typeEnv env members structTy@(StructTy name _) =
" int size = snprintf(NULL, 0, \"(%s )\", \"" ++ name ++ "\");\n" ++
unlines (map memberPrnSize members)
where memberPrnSize (memberName, memberTy) =
let refOrNotRefType = if isManaged typeEnv memberTy then RefTy memberTy else memberTy
maybeTakeAddress = if isManaged typeEnv memberTy then "&" else ""
strFuncType = FuncTy [refOrNotRefType] StringTy
in case nameOfPolymorphicFunction typeEnv env strFuncType "prn" of
Just strFunctionPath ->
unlines [" temp = " ++ pathToC strFunctionPath ++ "(" ++ maybeTakeAddress ++ "p->" ++ memberName ++ "); "
, " size += snprintf(NULL, 0, \"%s \", temp);"
, " if(temp) { CARP_FREE(temp); temp = NULL; }"
]
Nothing ->
if isExternalType typeEnv memberTy
then unlines [ " size += snprintf(NULL, 0, \"%p \", p->" ++ memberName ++ ");"
, " if(temp) { CARP_FREE(temp); temp = NULL; }"
]
else " // Failed to find str function for " ++ memberName ++ " : " ++ show memberTy ++ "\n"
-- | Generate C code for converting a member variable to a string and appending it to a buffer.
memberPrn :: TypeEnv -> Env -> (String, Ty) -> String
memberPrn typeEnv env (memberName, memberTy) =
let refOrNotRefType = if isManaged typeEnv memberTy then RefTy memberTy else memberTy
maybeTakeAddress = if isManaged typeEnv memberTy then "&" else ""
strFuncType = FuncTy [refOrNotRefType] StringTy
in case nameOfPolymorphicFunction typeEnv env strFuncType "prn" of
Just strFunctionPath ->
unlines [" temp = " ++ pathToC strFunctionPath ++ "(" ++ maybeTakeAddress ++ "p->" ++ memberName ++ ");"
, " snprintf(bufferPtr, size, \"%s \", temp);"
, " bufferPtr += strlen(temp) + 1;"
, " if(temp) { CARP_FREE(temp); temp = NULL; }"
]
Nothing ->
if isExternalType typeEnv memberTy
then unlines [ " tempsize = snprintf(NULL, 0, \"%p\", p->" ++ memberName ++ ");"
, " temp = malloc(tempsize);"
, " snprintf(temp, tempsize, \"%p\", p->" ++ memberName ++ ");"
, " snprintf(bufferPtr, size, \"%s \", temp);"
, " bufferPtr += strlen(temp) + 1;"
, " if(temp) { CARP_FREE(temp); temp = NULL; }"
]
else " // Failed to find str function for " ++ memberName ++ " : " ++ show memberTy ++ "\n"
-- | Generate C code for assigning to a member variable.
-- | Needs to know if the instance is a pointer or stack variable.
memberAssignment :: AllocationMode -> (String, Ty) -> String
memberAssignment allocationMode (memberName, _) = " instance" ++ sep ++ memberName ++ " = " ++ memberName ++ ";"
where sep = case allocationMode of
StackAlloc -> "."
HeapAlloc -> "->"
-- | Helper function to create the binder for the 'delete' template.
binderForDelete :: TypeEnv -> Env -> [String] -> Ty -> [XObj] -> Either String ((String, Binder), [XObj])
binderForDelete typeEnv env insidePath structTy@(StructTy typeName _) [XObj (Arr membersXObjs) _ _] =
if isTypeGeneric structTy
then Right (genericDelete insidePath structTy membersXObjs, [])
else Right (instanceBinderWithDeps (SymPath insidePath "delete")
(FuncTy [structTy] UnitTy)
(concreteDelete typeEnv env (memberXObjsToPairs membersXObjs)))
-- | The template for the 'delete' function of a generic deftype.
genericDelete :: [String] -> Ty -> [XObj] -> (String, Binder)
genericDelete pathStrings originalStructTy membersXObjs =
defineTypeParameterizedTemplate templateCreator path (FuncTy [originalStructTy] UnitTy)
where path = SymPath pathStrings "delete"
t = (FuncTy [VarTy "p"] UnitTy)
templateCreator = TemplateCreator $
\typeEnv env ->
Template
t
(const (toTemplate "void $NAME($p p)"))
(\(FuncTy [concreteStructTy] UnitTy) ->
let mappings = unifySignatures originalStructTy concreteStructTy
correctedMembers = replaceGenericTypeSymbolsOnMembers mappings membersXObjs
memberPairs = memberXObjsToPairs correctedMembers
in (toTemplate $ unlines [ "$DECL {"
, joinWith "\n" (map (memberDeletion typeEnv env) memberPairs)
, "}"]))
(\(FuncTy [concreteStructTy] UnitTy) ->
let mappings = unifySignatures originalStructTy concreteStructTy
correctedMembers = replaceGenericTypeSymbolsOnMembers mappings membersXObjs
memberPairs = memberXObjsToPairs correctedMembers
in if isTypeGeneric concreteStructTy
then []
else concatMap (depsOfPolymorphicFunction typeEnv env [] "delete" . typesDeleterFunctionType)
(filter (isManaged typeEnv) (map snd memberPairs)))
-- | Helper function to create the binder for the 'copy' template.
binderForCopy :: TypeEnv -> Env -> [String] -> Ty -> [XObj] -> Either String ((String, Binder), [XObj])
binderForCopy typeEnv env insidePath structTy@(StructTy typeName _) [XObj (Arr membersXObjs) _ _] =
if isTypeGeneric structTy
then Right (genericCopy insidePath structTy membersXObjs, [])
else Right (instanceBinderWithDeps (SymPath insidePath "copy")
(FuncTy [RefTy structTy] structTy)
(concreteCopy typeEnv env (memberXObjsToPairs membersXObjs)))
-- | The template for the 'copy' function of a generic deftype.
genericCopy :: [String] -> Ty -> [XObj] -> (String, Binder)
genericCopy pathStrings originalStructTy membersXObjs =
defineTypeParameterizedTemplate templateCreator path (FuncTy [RefTy originalStructTy] originalStructTy)
where path = SymPath pathStrings "copy"
t = (FuncTy [RefTy (VarTy "p")] (VarTy "p"))
templateCreator = TemplateCreator $
\typeEnv env ->
Template
t
(const (toTemplate "$p $NAME($p* pRef)"))
(\(FuncTy [RefTy concreteStructTy] _) ->
let mappings = unifySignatures originalStructTy concreteStructTy
correctedMembers = replaceGenericTypeSymbolsOnMembers mappings membersXObjs
memberPairs = memberXObjsToPairs correctedMembers
in (tokensForCopy typeEnv env memberPairs))
(\(FuncTy [RefTy concreteStructTy] _) ->
let mappings = unifySignatures originalStructTy concreteStructTy
correctedMembers = replaceGenericTypeSymbolsOnMembers mappings membersXObjs
memberPairs = memberXObjsToPairs correctedMembers
in if isTypeGeneric concreteStructTy
then []
else concatMap (depsOfPolymorphicFunction typeEnv env [] "copy" . typesCopyFunctionType)
(filter (isManaged typeEnv) (map snd memberPairs)))
| eriksvedang/Carp | src/Deftype.hs | mpl-2.0 | 25,489 | 0 | 24 | 6,922 | 6,100 | 3,180 | 2,920 | 363 | 5 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Zones.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the specified Zone resource. Get a list of available zones by
-- making a list() request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.zones.get@.
module Network.Google.Resource.Compute.Zones.Get
(
-- * REST Resource
ZonesGetResource
-- * Creating a Request
, zonesGet
, ZonesGet
-- * Request Lenses
, zgProject
, zgZone
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.zones.get@ method which the
-- 'ZonesGet' request conforms to.
type ZonesGetResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Zone
-- | Returns the specified Zone resource. Get a list of available zones by
-- making a list() request.
--
-- /See:/ 'zonesGet' smart constructor.
data ZonesGet = ZonesGet'
{ _zgProject :: !Text
, _zgZone :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ZonesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'zgProject'
--
-- * 'zgZone'
zonesGet
:: Text -- ^ 'zgProject'
-> Text -- ^ 'zgZone'
-> ZonesGet
zonesGet pZgProject_ pZgZone_ =
ZonesGet'
{ _zgProject = pZgProject_
, _zgZone = pZgZone_
}
-- | Project ID for this request.
zgProject :: Lens' ZonesGet Text
zgProject
= lens _zgProject (\ s a -> s{_zgProject = a})
-- | Name of the zone resource to return.
zgZone :: Lens' ZonesGet Text
zgZone = lens _zgZone (\ s a -> s{_zgZone = a})
instance GoogleRequest ZonesGet where
type Rs ZonesGet = Zone
type Scopes ZonesGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient ZonesGet'{..}
= go _zgProject _zgZone (Just AltJSON) computeService
where go
= buildClient (Proxy :: Proxy ZonesGetResource)
mempty
| rueshyna/gogol | gogol-compute/gen/Network/Google/Resource/Compute/Zones/Get.hs | mpl-2.0 | 3,059 | 0 | 14 | 748 | 389 | 235 | 154 | 61 | 1 |
func = do
Foo { _lstate_indent = _lstate_indent lkasdlkjalsdjlakjsdlkjasldkjalskdjlkajsd
, _lstate_foo = _lstate_foo lkasdlkjalsdjlakjsdlkjasldkjalskdjlkajsd
}
| lspitzner/brittany | data/Test272.hs | agpl-3.0 | 177 | 0 | 9 | 32 | 29 | 15 | 14 | 3 | 1 |
{-# LANGUAGE ScopedTypeVariables, LambdaCase #-}
module Main where {
import Chess;
import System.Environment(getArgs);
import System.IO;
-- import Retrograde;
silly :: IO [String];
silly = getArgs;
main :: IO();
-- main = print $ and $ map (test_retro2 test_directory) $ all_positions test_directory;
-- boardsize 4 = 22 sec
-- boardsize 5 = 3m27s
-- main = do {print $ length $ all_positions test_directory ; mapM_ (print.length) $ until_fixed $ iterate gens gen_0 } ;
-- boardsize 4 = 16 min
-- main = do {let {l = iterate_mapreduce gen_0;};mapM_ (\l2 -> random_entry l2 >>= putStrLn . show_entry test_directory) l;};
--boardsize 5 = 289 min on mkc, added 16gb swap just in case. longest mate was 33.
--main = eval_iterate;
-- board_size 4 = 23 minutes with stalemate
-- main = print $ length $ piece_set2 4 [];
-- size 2 = 11664
-- size 3 = 3790800
-- size 4 = 629236836 -- 4 minutes
-- after flip_color
-- 2 = 5886
-- 3 = 1895400
-- 4 = 314627193 -- 19 minutes, after assert sorted
-- omitting Dabbaba_rider
-- 2 = 2628
-- 3 = 562464
-- 4 = 62432010
-- main = three_pieces_length_check;
-- main = getArgs >>= try_three_pieces . read . head;
{-main = do {
hSetBuffering stdout LineBuffering;
putStrLn "start";
-- rand_three_pieces;
-- print $ length $ three_pieces();
getArgs >>= try_three_pieces . read . head;
};-}
main = do {
hSetBuffering stdout LineBuffering;
putStrLn $ "#my_boardsize " ++ show my_boardsize;
putStrLn $ "#pass_permitted " ++ show pass_permitted;
putStrLn $ "#stalemate is draw " ++ show stalemate_draw;
getArgs >>= \case {
-- | 980 seconds for qr
["longest"] -> show_longest;
["allcpp"] -> putStrLn all_pieces_for_cplusplus;
["v1"] -> verify_piece_locs;
["v2"] -> verify_successors;
["dump"] -> mapM_ (putStrLn . unwords . map show . table_line) $ concat all_list;
["terminal"] -> mapM_ (putStrLn . unwords . map show . table_line) gen_0;
["testretro"] -> print $ and $ map (test_retro2 test_directory) $ all_positions test_directory;
_ -> error "need args";
}};
}
| kenta2/retrograde | main.hs | agpl-3.0 | 2,012 | 0 | 16 | 348 | 314 | 183 | 131 | 22 | 8 |
{-
Copyright (C) 2009 Andrejs Sisojevs <[email protected]>
All rights reserved.
For license and copyright information, see the file COPYRIGHT
-}
--------------------------------------------------------------------------
--------------------------------------------------------------------------
-- | Module around 'pcsi2text' function, which generates a message out of 'PCSI' and 'PCLT_Catalog'.
module Text.PCLT.MakeMessage where
import qualified Data.ByteString.Lazy.UTF8.Unified as Lazy (ByteString)
import qualified Data.ByteString.Lazy.UTF8.Unified as B hiding (ByteString)
import qualified Data.Map as M
import Data.Map (Map, (!))
import Data.MyHelpers
import Data.Typeable
import Text.ConstraintedLBS
import Text.PCLT.Parser.AdvancedSepBy (SeparatedSectorMarker, MarkedChunkLength)
import Text.PCLT.Catalog
import Text.PCLT.CatalogMaths
import Text.PCLT.CommonTypes
import Text.PCLT.Config
import Text.PCLT.MakeCatalog
import Text.PCLT.PCSI
import Text.PCLT.SDL
import Text.PCLT.Template
-- | Error type for 'pcsi2text'.
data PCSI2Text_Error =
RequiredCompositeIsMissing_P2TE RequiredByRequirerCompositeIsMissing_PCLTE
| RequiredCompositeLoclizationIsMissing_P2TE RequiredByRequirerCompositeIsMissing_PCLTE LanguageName
| CompositionCycle_P2TE CompositionCycle_PCLTE
| SDL_DeterminationFailure_P2TE PCLT_ID SDL_DeterminationFailure
-- | This error is possible only if program is wrong.
| UnsupportedMarker_P2TE SeparatedSectorMarker Lazy.ByteString PCLT_ID LanguageName
| NoValueForParameter_P2TE PCLT_ID LanguageName PCLT_ParamKey
-- | Not used, reserved for future versions.
| UnderAccordingParamReparsingFailure_P2TE PCLT_ID LanguageName PCLT_ParamKey ReadPCSCatalogError
-- | Not used, reserved for future versions.
| ReparsingDepthMaxReached_P2TE PCLT_ID LanguageName PCLT_ParamKey ReparsingDepth
-- | Not used, reserved for future versions.
| ReparsingLengthMaxReached_P2TE PCLT_ID LanguageName PCLT_ParamKey ForInstaniationUsedChunkLength
-- | No more free space in resulting 'CLBS'.
| InstaniationLengthMaxReached_P2TE PCLT_ID LanguageName ForInstaniationUsedChunkLength
deriving (Show, Typeable)
-- | Error type for 'givenSDL_statisfies'.
data SDL_DeterminationFailure =
RequiredCompositeIsMissing_SDLDF RequiredByRequirerCompositeIsMissing_PCLTE
-- | The SDL of template is specified
-- (using 'PCLT_SDL_ToParamCompositeLink') to be the same as is
-- one of a template, that must have been put
-- under a parameter (using 'PCSI_PV'), but referenced parameter
-- is missing in given 'PCSI'
| MissingParam_SDLByParamCompositeLink_SDLDF PCLT_CompositeKey PCLT_ParamKey
-- | The SDL of template is specified
-- (using 'PCLT_SDL_ToParamCompositeLink') to be the same as is
-- one of a template, that must have been put
-- under a parameter (using 'PCSI_PV'), but referenced parameter
-- value is constructed using not 'PCSI_PV'.
| WrongParamType_SDLByParamCompositeLink_SDLDF PCLT_CompositeKey PCLT_ParamKey
-- | The SDL of template is specified
-- (using 'PCLT_SDL_ToParamCompositeLink') to be the same as is
-- one of a template, that must have been put
-- under a parameter (using 'PCSI_PV'), but the 'PCSI' under this
-- parameter references some unknown (to catalog) template.
| UnknownComposite_SDLByParamCompositeLink_SDLDF PCLT_CompositeKey PCLT_ParamKey PCLT_CompositeKey
| SDLReferentialCycle_SDLDF PCLT_CompositeKey [PCLT_ID]
| ErrornousSDL_SDLDF PCLT_CompositeKey PCLT_ErrornousSDL
deriving (Show, Typeable)
-- | Type of 'PCSI2Text_Error'. Stripping arguments.
data PCSI2Text_Error_Type =
RequiredCompositeIsMissing_P2TET
| RequiredCompositeLoclizationIsMissing_P2TET
| CompositionCycle_P2TET
| SDL_DeterminationFailure_P2TET SDL_DeterminationFailure_Type
| UnsupportedMarker_P2TET
| NoValueForParameter_P2TET
| UnderAccordingParamReparsingFailure_P2TET
| ReparsingDepthMaxReached_P2TET
| ReparsingLengthMaxReached_P2TET
| InstaniationLengthMaxReached_P2TET
-- | Type of 'SDL_DeterminationFailure'. Stripping arguments.
data SDL_DeterminationFailure_Type =
RequiredCompositeIsMissing_SDLDFT
| MissingParam_SDLByParamCompositeLink_SDLDFT
| WrongParamType_SDLByParamCompositeLink_SDLDFT
| UnknownComposite_SDLByParamCompositeLink_SDLDFT
| SDLReferentialCycle_SDLDFT
| ErrornousSDL_SDLDFT
p2teType :: PCSI2Text_Error -> PCSI2Text_Error_Type
p2teType e =
case e of
RequiredCompositeIsMissing_P2TE _ -> RequiredCompositeIsMissing_P2TET
RequiredCompositeLoclizationIsMissing_P2TE _ _ -> RequiredCompositeLoclizationIsMissing_P2TET
CompositionCycle_P2TE _ -> CompositionCycle_P2TET
SDL_DeterminationFailure_P2TE _ sdldf -> SDL_DeterminationFailure_P2TET (sdldfType sdldf)
UnsupportedMarker_P2TE _ _ _ _ -> UnsupportedMarker_P2TET
NoValueForParameter_P2TE _ _ _ -> NoValueForParameter_P2TET
UnderAccordingParamReparsingFailure_P2TE _ _ _ _ -> UnderAccordingParamReparsingFailure_P2TET
ReparsingDepthMaxReached_P2TE _ _ _ _ -> ReparsingDepthMaxReached_P2TET
ReparsingLengthMaxReached_P2TE _ _ _ _ -> ReparsingLengthMaxReached_P2TET
InstaniationLengthMaxReached_P2TE _ _ _ -> InstaniationLengthMaxReached_P2TET
sdldfType :: SDL_DeterminationFailure -> SDL_DeterminationFailure_Type
sdldfType e =
case e of
RequiredCompositeIsMissing_SDLDF _ -> RequiredCompositeIsMissing_SDLDFT
MissingParam_SDLByParamCompositeLink_SDLDF _ _ -> MissingParam_SDLByParamCompositeLink_SDLDFT
WrongParamType_SDLByParamCompositeLink_SDLDF _ _ -> WrongParamType_SDLByParamCompositeLink_SDLDFT
UnknownComposite_SDLByParamCompositeLink_SDLDF _ _ _ -> UnknownComposite_SDLByParamCompositeLink_SDLDFT
SDLReferentialCycle_SDLDF _ _ -> SDLReferentialCycle_SDLDFT
ErrornousSDL_SDLDF _ _ -> ErrornousSDL_SDLDFT
-- | Template representation generation errors types abbreviations:
--
-- > RequiredCompositeIsMissing_P2TET -> "CM"
-- > RequiredCompositeLoclizationIsMissing_P2TET -> "CLM"
-- > CompositionCycle_P2TET -> "CC"
-- > SDL_DeterminationFailure_P2TET sdldft -> "SF" ++
-- > case sdldft of
-- > RequiredCompositeIsMissing_SDLDFT -> "(CM)"
-- > MissingParam_SDLByParamCompositeLink_SDLDFT -> "(LMP)"
-- > WrongParamType_SDLByParamCompositeLink_SDLDFT -> "(LWPT)"
-- > UnknownComposite_SDLByParamCompositeLink_SDLDFT -> "(LUC)"
-- > SDLReferentialCycle_SDLDFT -> "(CC)"
-- > ErrornousSDL_SDLDFT -> "(ES)"
-- > UnsupportedMarker_P2TET -> "UM"
-- > NoValueForParameter_P2TET -> "NV"
-- > UnderAccordingParamReparsingFailure_P2TET -> "RF"
-- > ReparsingDepthMaxReached_P2TET -> "RDM"
-- > ReparsingLengthMaxReached_P2TET -> "RLM"
-- > InstaniationLengthMaxReached_P2TET -> "ILM"
shortOf_PCSI2Text_Error :: PCSI2Text_Error -> Lazy.ByteString
shortOf_PCSI2Text_Error e = B.pack $
case p2teType e of
RequiredCompositeIsMissing_P2TET -> "CM"
RequiredCompositeLoclizationIsMissing_P2TET -> "CLM"
CompositionCycle_P2TET -> "CC"
SDL_DeterminationFailure_P2TET sdldft -> "SF" ++
case sdldft of
RequiredCompositeIsMissing_SDLDFT -> "(CM)"
MissingParam_SDLByParamCompositeLink_SDLDFT -> "(LMP)"
WrongParamType_SDLByParamCompositeLink_SDLDFT -> "(LWPT)"
UnknownComposite_SDLByParamCompositeLink_SDLDFT -> "(LUC)"
SDLReferentialCycle_SDLDFT -> "(CC)"
ErrornousSDL_SDLDFT -> "(ES)"
UnsupportedMarker_P2TET -> "UM"
NoValueForParameter_P2TET -> "NV"
UnderAccordingParamReparsingFailure_P2TET -> "RF"
ReparsingDepthMaxReached_P2TET -> "RDM"
ReparsingLengthMaxReached_P2TET -> "RLM"
InstaniationLengthMaxReached_P2TET -> "ILM"
-- | Whenever representation generator can't make representation
-- for a template due to some error, it puts there (instead of representation)
-- an error marking ('pcsMarkingErrorPlaceholderWrapper') with an abbreviation (see 'shortOf_PCSI2Text_Error')
-- of error type and template ID.
includeAsAnError :: PCLT_InnerConfig -> PCSI2Text_Error -> String -> (Lazy.ByteString, ForInstaniationUsedChunkLength)
includeAsAnError cfg e s = let _err_marker_str = pcsMarkingErrorPlaceholderWrapper cfg
r = B.concat
[ _err_marker_str
, shortOf_PCSI2Text_Error e
, B.pack ("->" ++ s)
, _err_marker_str
]
in (r, B.length r)
-- | A test, if a given reciever's detalization level is enough
-- to represent a given 'PCSI'.
givenSDL_statisfies :: ShowDetalizationLevel -> PCSI -> PCLT_ShowDetalizationLevel -> PCLT_CatalogMap -> Either SDL_DeterminationFailure Bool
givenSDL_statisfies det_lev pcsi_0 req_sdl_0 cat_map = _givenSDL_statisfies pcsi_0 req_sdl_0 []
where
pcsi_0_id = pcsiTplID pcsi_0
err31 rer red = RequiredCompositeIsMissing_SDLDF $ RequiredByRequirerCompositeIsMissing_PCLTE rer $ RequiredCompositeIsMissing_PCLTE red
err32 i_id p_k = MissingParam_SDLByParamCompositeLink_SDLDF i_id p_k
err33 i_id p_k = WrongParamType_SDLByParamCompositeLink_SDLDF i_id p_k
err34 i_id p_k sc_id = UnknownComposite_SDLByParamCompositeLink_SDLDF i_id p_k sc_id
err35 i_id buf = SDLReferentialCycle_SDLDF i_id buf
err36 i_id esdl = ErrornousSDL_SDLDF i_id esdl
--------------------------------------------------------------------------------------
_givenSDL_statisfies :: PCSI -> PCLT_ShowDetalizationLevel -> [PCLT_ID] -> Either SDL_DeterminationFailure Bool
_givenSDL_statisfies pcsi req_sdl super_buf =
let pcsi_id = pcsiTplID pcsi
new_super_buf = pcsi_id : super_buf
in case pcsi_id `elem` super_buf of
True -> Left (err35 pcsi_id (pcsi_id : super_buf))
False ->
case req_sdl of
PCLT_SDL sdl -> Right (det_lev >= sdl)
PCLT_SDL_ToTemplateLink k ->
case M.lookup k cat_map of
Nothing -> Left $ err31 pcsi_0_id k
Just s_pclt -> _givenSDL_statisfies
(pcsi {pcsiTplID = k} )
(pcltRequiredSDL s_pclt)
new_super_buf
PCLT_SDL_ToParamCompositeLink param_key ->
case M.lookup param_key $ pcsiParamsValsMap pcsi of
Nothing -> Left $ err32 pcsi_id param_key
Just p_val ->
case p_val of
PCSI_PV s_pcsi ->
let s_pcsi_id = pcsiTplID s_pcsi
in case s_pcsi_id `elem` new_super_buf of
True -> Left (err35 s_pcsi_id (s_pcsi_id : new_super_buf))
False ->
case M.lookup (pcsiTplID s_pcsi) cat_map of
Nothing -> Left $ err34 pcsi_id param_key (pcsiTplID s_pcsi)
Just s_pclt -> givenSDL_statisfies
det_lev
s_pcsi
(pcltRequiredSDL s_pclt)
cat_map
_ -> Left (err33 pcsi_id param_key)
PCLT_SDL_Errornous esdl -> Left (err36 pcsi_id esdl)
type SpaceAvailableForPCSIInstaniation = MarkedChunkLength
type ForInstaniationUsedChunkLength = MarkedChunkLength
-- | Wrapper around 'pcsi2text' for cases, when new 'CLBS' for output
-- is to be created. It's maximal length is set to be same
-- as configured in parameter 'pcsInstaniationResultMaxSize'
pcsi2new_text :: PCSI -> (ShowDetalizationLevel, LanguageName) -> PCLT_Catalog -> (StdOut_CLBS, [ErrorWithPCSCatalog PCSI2Text_Error])
pcsi2new_text _pcsi (det_lev, lng) pcs_catalog = pcsi2text (newCLBS $ catInstMaxLen pcs_catalog) _pcsi (det_lev, lng) pcs_catalog
-- | Make a representation out of 'PCSI' in specified detailization level,
-- in specified language, using specified catalog. And append result
-- to a specified 'CLBS'.
pcsi2text :: StdOut_CLBS -> PCSI -> (ShowDetalizationLevel, LanguageName) -> PCLT_Catalog -> (StdOut_CLBS, [ErrorWithPCSCatalog PCSI2Text_Error])
pcsi2text init_clbs _pcsi (det_lev, lng) pcs_catalog =
let (clbs, errs_list) = _pcsi2text (pcsiTplID _pcsi) _pcsi gen_cat_map [] (init_clbs, 0)
errs_last_add =
case clbsFinalized_isit clbs of
True -> [err12]
False -> []
in ( clbs
, map (ErrorWithPCSCatalog (pcltcCatalogID pcs_catalog)) (errs_last_add ++ errs_list)
)
where
gen_cat_map = pcltcCatalogMap pcs_catalog
cfg = pcltcInnerConfig pcs_catalog
_allow_untmpl_msgs = pcsAllowUntemplatedMessages cfg
_allow_lnguntmpl_msgs = pcsAllowUntemplatedLocalizationsOfMessages cfg
_show_adhoc_params_emp = pcsShowAdhocParamsInResultOfUntemplated cfg
_insuficient_det_lev_str = pcsInsuficientDetLevelPlaceholder cfg
_insuficient_det_lev_str_len = B.length _insuficient_det_lev_str
_insuficient_det_lev__incl = (_insuficient_det_lev_str, _insuficient_det_lev_str_len)
_inst_max_size = pcsInstaniationResultMaxSize cfg
_reparsing_depth_max = pcsReparsingDepthMax cfg
_reparsable_text_size_max = pcsReparseParameterContentMaxSize cfg
_newline_lbs = pcsNewlineLBS cfg
_newline_lbs_len = B.length _newline_lbs
_allow_sdl_det_fail = pcsAllowFailureToDetermineSDL_parseIdByModusMargin cfg
errMarkStr err key = includeAsAnError cfg err key
err0 msg_id buf = CompositionCycle_P2TE $ CompositionCycle_PCLTE msg_id buf
err1 rer red = RequiredCompositeIsMissing_P2TE (RequiredByRequirerCompositeIsMissing_PCLTE rer $ RequiredCompositeIsMissing_PCLTE red)
err2 rer red = RequiredCompositeLoclizationIsMissing_P2TE (RequiredByRequirerCompositeIsMissing_PCLTE rer $ RequiredCompositeIsMissing_PCLTE red) lng
err3 rer sdl_e = SDL_DeterminationFailure_P2TE rer sdl_e
err6 marker t_chunk tpl_id = UnsupportedMarker_P2TE marker t_chunk tpl_id lng
err8 tpl_id p_k = NoValueForParameter_P2TE tpl_id lng p_k
err9 tpl_id p_k re = UnderAccordingParamReparsingFailure_P2TE tpl_id lng p_k re
err10 tpl_id p_k = ReparsingDepthMaxReached_P2TE tpl_id lng p_k _reparsing_depth_max
err11 tpl_id p_k = ReparsingLengthMaxReached_P2TE tpl_id lng p_k _reparsable_text_size_max
err12 = InstaniationLengthMaxReached_P2TE (pcsiTplID _pcsi) lng (clbsMaxLen init_clbs)
--------------------------------
--------------------------------
_pcsi2text :: PCLT_ID
-> PCSI
-> PCLT_CatalogMap
-> [PCLT_ID]
-> (StdOut_CLBS, ReparsingDepth)
-> (StdOut_CLBS, [PCSI2Text_Error])
_pcsi2text refere_pcsi_id pcsi composites_map super_buf (clbs, reparsing_depth) =
let pcsi_id = pcsiTplID pcsi
pcsi_pvm = pcsiParamsValsMap pcsi
returnJustErr :: PCSI2Text_Error -> String -> (StdOut_CLBS, [PCSI2Text_Error])
returnJustErr _err k = (errMarkStr _err k `addToCLBS_1` clbs, [_err])
returnInsuficientSDL errs_list = (_insuficient_det_lev__incl `addToCLBS_1` clbs, errs_list)
decontexted_msg __req_det_lev =
let _req_det_lev = PCLT_SDL __req_det_lev
err_of_statisfies_0 = givenSDL_statisfies det_lev pcsi _req_det_lev gen_cat_map
(err_of_statisfies_1, errs0) =
case err_of_statisfies_0 of
Right _ -> (err_of_statisfies_0, [])
Left sdl_err -> ( case _allow_sdl_det_fail of
True -> Right (det_lev == InfinitelyBig_SDL)
False -> err_of_statisfies_0
, [err3 pcsi_id sdl_err]
)
in case err_of_statisfies_1 of
Left sdl_err -> returnJustErr (err3 pcsi_id sdl_err) pcsi_id
Right statisfies ->
case _show_adhoc_params_emp && statisfies of -- && (null super_buf)
False -> returnInsuficientSDL errs0
True ->
let _header = B.concat [B.pack pcsi_id, B.pack " {"]
_header_len = B.length _header
clbs2 = (_header, _header_len) `addToCLBS_1` clbs
-- fold_f :: [(CLBS, [PCSI2Text_Error])]
fold_f = (\ (p_key, p_val) (clbs_accum, errs_accum) ->
let (new_clbs_accum, errs_list_addition) =
instaniateParam
p_key
pcsi { pcsiParamsValsMap = M.singleton
p_key $
PVList_PV
[ NewLine_PV
, PlainText_PV (" @@@ " ++ p_key ++ ": ")
, Indented_PV 4 p_val
]
}
(clbs_accum, undefined)
in case clbsFinalized_isit clbs_accum of
True -> (clbs_accum, errs_accum)
False ->
( new_clbs_accum
, errs_accum ++ errs_list_addition
)
)
(clbs3, errs1) = foldr fold_f (clbs2, errs0) (M.toList pcsi_pvm)
in ( addToCLBS_1
(B.pack "}", 1) $
case M.null pcsi_pvm of
False -> (_newline_lbs, _newline_lbs_len) `addToCLBS_1` clbs3
True -> clbs3
, errs1
)
in case pcsi_id `elem` super_buf of
True -> returnJustErr (err0 pcsi_id super_buf) pcsi_id
False ->
case clbsFinalized_isit clbs of
True -> (clbs, [])
False ->
case M.lookup pcsi_id composites_map of
Nothing ->
case _allow_untmpl_msgs of
True -> decontexted_msg InfinitelyBig_SDL
False -> returnJustErr (err1 refere_pcsi_id pcsi_id) pcsi_id
Just pclt ->
case M.lookup lng (pcltLocalizationsMap pclt) of
Nothing -> case _allow_lnguntmpl_msgs of
True -> decontexted_msg InfinitelyBig_SDL
False -> returnJustErr (err2 refere_pcsi_id pcsi_id) pcsi_id
Just ldt ->
let subcomposites = ldtSubcompositesMap ldt
err_of_statisfies_0 = givenSDL_statisfies det_lev pcsi (pcltRequiredSDL pclt) gen_cat_map
(err_of_statisfies_1, errs0) =
case err_of_statisfies_0 of
Right _ -> (err_of_statisfies_0, [])
Left sdl_e -> ( case _allow_sdl_det_fail of
True -> Right (det_lev == InfinitelyBig_SDL)
False -> err_of_statisfies_0
, [err3 pcsi_id sdl_e]
)
in case err_of_statisfies_1 of
Left sdl_e -> returnJustErr (err3 (pcsiTplID pcsi) sdl_e) pcsi_id
Right False -> returnInsuficientSDL errs0
Right True ->
let foldr_f =
(\ (clbs_accum, errs_accum) (ldtm, str, len) ->
let (new_clbs_accum, errs_add) =
instaniateTplChunk
subcomposites
(ldtm, str, len)
pcsi
super_buf
(clbs_accum, reparsing_depth)
in ( new_clbs_accum
, errs_accum ++ errs_add
)
)
in foldl foldr_f (clbs, errs0) (ldtAbstractedString ldt)
where
-----------------------------------------------------
instaniateTplChunk :: PCLT_CatalogMap
-> (PCS_SpecificMarkings, Lazy.ByteString, MarkedChunkLength)
-> PCSI -> [PCLT_ID]
-> (StdOut_CLBS, ReparsingDepth)
-> (StdOut_CLBS, [PCSI2Text_Error])
instaniateTplChunk subcomposites (ldtm, lbs, len) pcsi super_buf (clbs, reparsing_depth) =
let str = B.unpack lbs
in case clbsFinalized_isit clbs of
True -> (clbs, [])
False ->
case ldtm of
PlainText_LngTplM -> ((lbs, len) `addToCLBS_1` clbs, [])
Parameter_LngTplM -> let param_key = str
in instaniateParam param_key pcsi (clbs, reparsing_depth)
Composite_LngTplM -> let sub_pcsi_id = str
sub_pcsi = pcsi { pcsiTplID = sub_pcsi_id }
in _pcsi2text (pcsiTplID pcsi) sub_pcsi subcomposites (pcsiTplID pcsi : super_buf) (clbs, reparsing_depth)
Unsupported_LngTplM ssm ->
let _err = err6 ssm lbs (pcsiTplID pcsi)
in (errMarkStr _err str `addToCLBS_1` clbs, [_err])
------------------------------------------------------
instaniateParam :: PCLT_ParamKey
-> PCSI
-> (StdOut_CLBS, ReparsingDepth)
-> (StdOut_CLBS, [PCSI2Text_Error])
instaniateParam param_key pcsi (clbs, reparsing_depth) =
let pcsi_pvm = pcsiParamsValsMap pcsi
pclt_id = pcsiTplID pcsi
referer_pcsi_id = pclt_id ++ "<p: " ++ param_key ++ ">"
in case M.lookup param_key pcsi_pvm of
Nothing -> let _err = err8 pclt_id param_key
in (errMarkStr _err param_key `addToCLBS_1` clbs, [_err])
Just _pcsi_ipv ->
let _processParamVal :: PCLT_ParamVal -> CLBS -> (CLBS, [PCSI2Text_Error])
_processParamVal pcsi_ipv sub_clbs =
case clbsFinalized_isit sub_clbs of
True -> (sub_clbs, [])
False -> _processParamVal_sub
where
_processParamVal_sub :: (CLBS, [PCSI2Text_Error])
_processParamVal_sub = case pcsi_ipv of
Nothing_PV -> (sub_clbs, [])
NewLine_PV -> ((_newline_lbs, _newline_lbs_len) `addToCLBS_1` sub_clbs, [])
Indented_PV i pcsi_iipv -> let fr_clbs = freeSpaceCLBS sub_clbs
(notind_clbs, _errs_list) = _processParamVal pcsi_iipv fr_clbs
ind_clbs = insertInsteadOf_inCLBS (_newline_lbs, B.concat [_newline_lbs, B.replicate (fromIntegral i) ' ']) notind_clbs
in (ind_clbs `addToCLBS_2` sub_clbs, _errs_list)
{- this is an experimental part Reparsable_PV rp_pv rp_params ->
curently doesn't work let dest_clbs = newCLBS _reparsable_text_size_max
(clbs_for_reparse, _errs_list_0) =
_processParamVal
rp_pv
dest_clbs -- in newer version must get rid of this workaround by exploiting laziness at it's 100%... for that will need to get rid of all lengths here.
_errs_list_1 =
(++)
_errs_list_0
(case clbsFinalized_isit clbs_for_reparse of
True -> [err11 (pcsiTplID pcsi) param_key]
False -> []
)
reparse_willwe = reparsing_depth < _reparsing_depth_max
(add_to_sub_clbs, add_errs_list) =
case reparse_willwe of
False -> (clbs_for_reparse, _errs_list_1 ++ [err10 pclt_id param_key])
True ->
let rp_pclt_id = "<<REPARSE:" ++ pclt_id ++ "(" ++ param_key ++ ")>>"
err_or_pclt = str2pclt
( rp_pclt_id
, PCLT_SDL Zero_SDL
)
(lng, clbsLBS clbs_for_reparse)
pcs_catalog
in case err_or_pclt of
Left ar_err -> let (ErrorWithPCSCatalog _ re) = ar_err
in (clbs_for_reparse, _errs_list_1 ++ [err9 pclt_id param_key re])
Right re_pclt ->
let (result_clbs, _errs_list_2) =
_pcsi2text
(referer_pcsi_id ++ "<r>")
(thePCSI rp_pclt_id (M.toList rp_params))
(M.insert rp_pclt_id re_pclt gen_cat_map)
[]
(sub_clbs, reparsing_depth + 1)
in (result_clbs, _errs_list_1 ++ _errs_list_2)
in (add_to_sub_clbs `addToCLBS_2` sub_clbs, add_errs_list) -}
PlainText_PV v -> let v_lbs = B.pack v in ((v_lbs, B.length v_lbs) `addToCLBS_1` sub_clbs, [])
PlainTextLBS_PV v -> ((v, B.length v) `addToCLBS_1` sub_clbs, [])
PCSI_PV pcsi_pv -> _pcsi2text referer_pcsi_id pcsi_pv gen_cat_map [] (sub_clbs, reparsing_depth)
PCSIList_PV pcsi_l separator_pv ->
let (separator_clbs, errs_list_0) = _processParamVal separator_pv (freeSpaceCLBS sub_clbs)
(result_clbs, errs_list, _) =
foldl
(\ (clbs_accum0, errs_accum, i) pcsi_fl ->
case clbsFinalized_isit clbs_accum0 of
True -> (clbs_accum0, errs_accum, i + 1)
False -> let clbs_accum2 =
case i > 1 of
True -> separator_clbs `addToCLBS_2` clbs_accum0
False -> clbs_accum0
pcsi_fl2 = pcsi_fl {
pcsiParamsValsMap =
sumPCSI_PVMs
(pcsiParamsValsMap pcsi_fl)
(M.fromList [("__row_idx", PlainText_PV $ show i)])
}
(clbs_accum3, errs_add) = _processParamVal (PCSI_PV pcsi_fl2) clbs_accum2
in (clbs_accum3, errs_accum ++ errs_add, i + 1)
)
(sub_clbs, errs_list_0, 1)
pcsi_l
in (result_clbs, errs_list)
PVList_PV pv_list ->
let (result_clbs, errs_list, _) =
foldl
(\ (clbs_accum, errs_accum, i) pv_fl ->
case clbsFinalized_isit clbs_accum of
True -> (clbs_accum, errs_accum, i+1)
False -> let (clbs_new_accum, errs_add) = _processParamVal pv_fl clbs_accum
in (clbs_new_accum, errs_accum ++ errs_add, i + 1)
)
(sub_clbs, [], 1)
pv_list
in (result_clbs, errs_list)
----------------------------
in _processParamVal _pcsi_ipv clbs
------------------------------------------------------ | Andrey-Sisoyev/haskell-PCLT | Text/PCLT/MakeMessage.hs | lgpl-2.1 | 40,600 | 0 | 45 | 21,732 | 4,363 | 2,318 | 2,045 | 364 | 34 |
import Data.Int
import System.Environment (getArgs)
import Text.Printf
import Vision.Histogram
import Vision.Image
import Vision.Primitive
-- Compares two images by their HSV histograms.
--
-- usage: ./histogram input1.png input2.png
main :: IO ()
main = do
[input1, input2] <- getArgs
-- Loads the images. Automatically infers the format.
io1 <- load Nothing input1
io2 <- load Nothing input2
case (io1, io2) of
(Right img1, Right img2) -> do
let rgb1 = convert img1 :: RGB
rgb2 = convert img2 :: RGB
-- Converts both images to the HSV color space as it gives
-- better results when comparing colors.
hsv1 = convert rgb1 :: HSV
hsv2 = convert rgb2 :: HSV
-- Computes a small histogram so two colors which are similar
-- will be in the same bin.
histSize = Just $ ix3 10 5 5
hist1 = histogram histSize hsv1 :: Histogram DIM3 Int32
hist2 = histogram histSize hsv2 :: Histogram DIM3 Int32
-- Normalizes both histograms as the number of pixels in the two
-- images could be different.
hist1' = normalize 100 hist1 :: Histogram DIM3 Double
hist2' = normalize 100 hist2 :: Histogram DIM3 Double
intersec = compareIntersect hist1' hist2'
printf "The two images share %.2f%% of their colors.\n" intersec
_ -> putStrLn "Error while reading the images."
| TomMD/friday | example/Histogram.hs | lgpl-3.0 | 1,562 | 0 | 16 | 528 | 292 | 153 | 139 | 25 | 2 |
{-# LANGUAGE InstanceSigs #-}
module MonadTrans where
import Control.Monad
import Control.Monad.IO.Class (MonadIO (liftIO))
import Control.Monad.Trans.Class (MonadTrans (lift))
instance MonadTrans IdentityT where
lift :: Monad m => m a -> IdentityT m a
lift = IdentityT
instance MonadTrans MaybeT where
lift :: Monad m => m a -> MaybeT m a
lift ma = MaybeT $ fmap Just ma
instance MonadTrans (ReaderT r) where
-- Equivalent to ReaderT const
lift ma = ReaderT $ \r -> ma
instance MonadTrans (EitherT e) where
-- Pointfree version of: lift ma = EitherT $ fmap Right ma
lift = EitherT . fmap Right
instance MonadTrans (StateT s) where
lift ma = StateT $ \s -> fmap (\a -> (a, s)) ma
instance MonadIO m => MonadIO (IdentityT m) where
liftIO :: IO a -> IdentityT m a
liftIO ioA = IdentityT ma
-- Use m's liftIO to put ioA into m
where
ma = liftIO ioA
instance MonadIO m => MonadIO (EitherT e m) where
liftIO :: IO a -> EitherT e m a
-- This is m's liftIO and EitherT's lift
liftIO = lift . liftIO
-- Equivalent to:
-- liftIO ioA = EitherT $ fmap Right ma
-- where ma = liftIO ioA
instance MonadIO m => MonadIO (MaybeT m) where
liftIO = lift . liftIO
-- Equivalent to:
-- liftIO ioA = MaybeT mMaybeA
-- where mMaybeA = fmap Just ma
-- ma = liftIO ioA
instance MonadIO m => MonadIO (ReaderT r m) where
liftIO ioA = ReaderT rToMa
where
rToMa r = liftIO ioA
instance MonadIO m => MonadIO (StateT s m) where
liftIO ioA = StateT smas
where
smas s = fmap (\a -> (a, s)) ma
ma = liftIO ioA
| dmvianna/haskellbook | src/Ch26-MonadTrans.hs | unlicense | 1,621 | 0 | 11 | 427 | 504 | 263 | 241 | 33 | 0 |
module Main where
import Control.Monad (forM_)
import Data.Char (toLower)
import Data.List (sort)
import qualified Data.Set as Set
import Data.Set (Set)
import System.Environment
import System.Exit
import System.IO
import BV
import Interaction
import Utility
main :: IO ()
main = do
hSetBuffering stdin LineBuffering
hSetBuffering stdout LineBuffering
args <- getArgs
case args of
["train", size, n] -> do
let (size', n') = (read size, read n)
forM_ [1..n'] $ \x -> do
putStrLn "============================="
putStrLn $ "T R A I N I N G #" ++ show x
putStrLn "============================="
trainTest [] size'
["real", size, skip] -> do
Just probs <- myproblems
let (probs', size', skip') = (sort $ filter (\p -> probSize p == size') probs, read size, read skip)
forM_ (drop skip' (zip [1..] probs')) $ \(x, p) -> do
putStrLn "============================="
putStrLn $ "R E A L for Size:" ++ size ++ " #" ++ show x
putStrLn $ "pid = " ++ probId p
putStrLn $ "ops = " ++ show (probOperators p)
putStrLn $ "size = " ++ show (probSize p)
putStrLn "============================="
realTest p
[pid, ops', size'] -> do
let ops = words ops'
size = read size'
if Set.fromList ops `Set.isSubsetOf` allOps
then do
putStrLn "============================="
putStrLn $ "pid = " ++ pid
putStrLn $ "ops = " ++ show ops
putStrLn $ "size = " ++ show size
putStrLn "============================="
guessMania pid ops size
else do
putStrLn $ "unknown operators: " ++ show (Set.toList (Set.fromList ops `Set.difference` allOps))
exitFailure
_ -> do
putStrLn "usage: GuessMania pid \"op1 op2 op3 .. \" size"
putStrLn " GuessMania train size cycle"
putStrLn " GuessMania real size skip"
return ()
allOps :: Set String
allOps =
Set.fromList $
["if0","fold","tfold","bonus"] ++
[render o | o <- [(minBound::Op1) .. maxBound]] ++
[render o | o <- [(minBound::Op2) .. maxBound]]
| msakai/icfpc2013 | GuessMania/Main.hs | bsd-2-clause | 2,159 | 0 | 22 | 606 | 721 | 359 | 362 | 61 | 5 |
{-# LANGUAGE BangPatterns, FlexibleContexts, OverloadedStrings #-}
import Text.Printf
import System.Event.Clock
import qualified Data.Attoparsec as A (parseWith)
import qualified Data.Attoparsec.Char8 as A
import RFC2616
import Control.Exception
import Control.Concurrent.QSemN
import Control.Monad
import Network.Socket hiding (connect, recv)
import System.Console.GetOpt
import Data.Function
import Data.Monoid
import GHC.Conc (numCapabilities)
import Args (ljust, parseArgs, positive, theLast)
import Control.Concurrent (forkIO)
import System.Environment (getArgs)
import qualified Data.ByteString.Char8 as B
import Text.Parsec
import Text.Parsec.String
import Control.Applicative hiding (many, (<|>))
import Data.Char (isSpace)
import System.Event.Thread
import EventSocket
type URL = (String, String, String)
url :: Parser URL
url =
(,,) <$> (string "http://" *> (many . satisfy $ \c -> c /= ':' && c /= '/'))
<*> ((char ':' *> many digit) <|> pure "80")
<*> ((many1 . satisfy $ not . isSpace) <|> pure "/")
urlConnector :: String -> IO (IO (Socket, B.ByteString))
urlConnector urlStr = do
let (host, port, uri) = case parse url "<cmdline>" urlStr of
Left err -> error (show err)
Right req -> req
myHints = defaultHints { addrSocketType = Stream }
(ai:_) <- getAddrInfo (Just myHints) (Just host) (Just port)
return $ do
sock <- socket (addrFamily ai) (addrSocketType ai) (addrProtocol ai)
let req = B.concat ["GET ", B.pack uri, " HTTP/1.1\r\n"
,"Host: ", B.pack host, ":", B.pack port, "\r\n"]
connect sock (addrAddress ai)
return (sock, req)
client ctors reqs = do
forM_ ctors $ \connector -> do
let loop slop !reqno sock reqStart = do
let refill = recv sock 65536
req = B.concat [reqStart, "\r\n"]
sendAll sock req
resp <- (if B.null slop then refill else return slop) >>=
A.parseWith refill RFC2616.response
case resp of
err@(A.Partial _) -> print err
err@(A.Fail bs _ msg) -> print (msg, B.take 10 bs)
A.Done bs (st, chdrs) -> do
let hdrs = map lowerHeader chdrs
close = Header "connection" ["close"]
contentLength = case A.parse A.decimal (B.concat (lookupHeader "content-length" hdrs)) `A.feed` "" of
A.Done _ n -> n
err -> error (show chdrs)
let slurp !n s = do
let len = B.length s
if len == 0 || len >= n
then return $! B.drop n s
else slurp (n-len) =<< recv sock 65536
if B.length bs >= contentLength
then if reqno >= reqs || close `elem` hdrs
then return ()
else loop (B.drop contentLength bs) (reqno+1) sock reqStart
else slurp contentLength bs >>= \s ->
if reqno >= reqs || close `elem` hdrs
then return ()
else loop s (reqno+1) sock reqStart
bracket connector (sClose . fst) . uncurry $ loop "" 1
main = do
(cfg, urls) <- parseArgs defaultConfig defaultOptions =<< getArgs
when (null urls) $ error "no URLs"
ensureIOManagerIsRunning
ctors <- mapM urlConnector urls
let clients = theLast cfgClients cfg
conns = theLast cfgConnections cfg
requests = theLast cfgRequests cfg
total = clients * conns * requests
putStrLn $ "issuing " ++ show total ++ " requests"
sem <- newQSemN 0
start <- getCurrentTime
replicateM_ clients $ do
forkIO $ (client (take conns (cycle ctors)) requests `finally` signalQSemN sem 1)
return ()
waitQSemN sem clients
end <- getCurrentTime
let elapsed = end - start
rate = fromIntegral total / elapsed
printf "%.6g reqs/sec in %.6g secs\n" rate elapsed
------------------------------------------------------------------------
-- Configuration
data Config = Config {
cfgClients :: Last Int
, cfgConnections :: Last Int
, cfgRequests :: Last Int
}
defaultConfig :: Config
defaultConfig = Config {
cfgClients = ljust numCapabilities
, cfgConnections = ljust numCapabilities
, cfgRequests = ljust 1
}
instance Monoid Config where
mempty = Config {
cfgClients = mempty
, cfgConnections = mempty
, cfgRequests = mempty
}
mappend a b = Config {
cfgClients = app cfgClients a b
, cfgConnections = app cfgConnections a b
, cfgRequests = app cfgRequests a b
}
where app :: (Monoid b) => (a -> b) -> a -> a -> b
app = on mappend
defaultOptions :: [OptDescr (IO Config)]
defaultOptions = [
Option ['c'] ["clients"]
(ReqArg (positive "number of concurrent clients" $ \n ->
mempty { cfgClients = n }) "N")
"number of concurrent clients"
, Option ['n'] ["connections"]
(ReqArg (positive "number of connections" $ \n ->
mempty { cfgConnections = n }) "N")
"number of connections"
, Option ['r'] ["requests"]
(ReqArg (positive "number of requests per connection" $ \n ->
mempty { cfgRequests = n }) "N")
"number of requests"
]
| tibbe/event | benchmarks/HttpClient.hs | bsd-2-clause | 5,419 | 0 | 30 | 1,673 | 1,742 | 909 | 833 | 128 | 9 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UnicodeSyntax #-}
import Prelude hiding (catch)
import Control.Exception (SomeException,catch)
import Criterion.Main
import Data.Monoid
import System.Environment
import System.IO
import LogicGrowsOnTrees
import LogicGrowsOnTrees.Checkpoint
import LogicGrowsOnTrees.Utils.PerfectTree (trivialPerfectTree)
import LogicGrowsOnTrees.Utils.WordSum
import qualified LogicGrowsOnTrees.Parallel.Adapter.Processes as Processes
import qualified LogicGrowsOnTrees.Parallel.Adapter.Threads as Threads
import LogicGrowsOnTrees.Parallel.Adapter.Threads (setNumberOfWorkers)
import LogicGrowsOnTrees.Parallel.Common.Worker (exploreTreeGeneric)
import LogicGrowsOnTrees.Parallel.ExplorationMode (AllMode,ExplorationMode(AllMode))
import LogicGrowsOnTrees.Parallel.Main
import LogicGrowsOnTrees.Parallel.Purity (Purity(Pure))
depth = 15
main = do
args ← getArgs
case args of
["worker-bee"] →
Processes.runWorkerUsingHandles
AllMode
Pure
(trivialPerfectTree 2 depth)
stdin
stdout
`catch`
(\(e::SomeException) → error $ "Worker process failed: " ++ show e)
_ → Processes.getProgFilepath >>= defaultMain . benchmarks
benchmarks worker_filepath =
[bench "list" $ nf (getWordSum . mconcat . trivialPerfectTree 2) depth
,bench "tree" $ nf (getWordSum . exploreTree . trivialPerfectTree 2) depth
,bench "tree w/ checkpointing" $ nf (getWordSum . exploreTreeStartingFromCheckpoint Unexplored . trivialPerfectTree 2) depth
,bench "tree using worker" $ exploreTreeGeneric AllMode Pure (trivialPerfectTree 2 depth)
,bench "tree using single thread" $ Threads.exploreTree (setNumberOfWorkers 1) (trivialPerfectTree 2 depth)
,bench "tree using single process" $
Processes.runSupervisor
(AllMode :: ExplorationMode (AllMode WordSum))
worker_filepath
["worker-bee"]
(const $ return ())
mempty
(setNumberOfWorkers 1)
]
| gcross/LogicGrowsOnTrees-processes | benchmarks/benchmark.hs | bsd-2-clause | 2,100 | 0 | 14 | 437 | 470 | 260 | 210 | 47 | 2 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DatatypeContexts #-}
module Control.CP.FD.Interface (
FDSolver,
FDInstance,
(@+),(@-),(@*),(@/),(@%),(!),(@!!),(@..),(@++),size,xfold,xsum,xhead,xtail,list,slice,xmap,cte,
(Control.CP.FD.Interface.@||),
(Control.CP.FD.Interface.@&&),
Control.CP.FD.Interface.inv,
(Control.CP.FD.Interface.@=),
(Control.CP.FD.Interface.@/=),
(Control.CP.FD.Interface.@<),
(Control.CP.FD.Interface.@>),
(Control.CP.FD.Interface.@<=),
(Control.CP.FD.Interface.@>=),
(Control.CP.FD.Interface.@:),
(Control.CP.FD.Interface.@?),
(Control.CP.FD.Interface.@??),
Control.CP.FD.Interface.channel,
val,
-- Control.CP.FD.Interface.newInt, Control.CP.FD.Interface.newBool, Control.CP.FD.Interface.newCol,
Control.CP.FD.Interface.sorted,
Control.CP.FD.Interface.sSorted,
Control.CP.FD.Interface.forall,
Control.CP.FD.Interface.forany,
Control.CP.FD.Interface.loopall,
Control.CP.FD.Interface.allDiff,
Control.CP.FD.Interface.allDiffD,
Control.CP.FD.Interface.loopany,
allin,
asExpr, asCol, Control.CP.FD.Interface.asBool,
colList, labelCol,
ModelInt, ModelCol, ModelBool,
exists, true, false,
-- Modelable,
) where
import Control.CP.FD.FD (FDSolver, FDInstance, FDIntTerm, getColItems)
import qualified Control.CP.FD.Model as Model
import Control.CP.FD.Model (Model, ModelBool, ModelCol, ModelInt, ToModelBool, asBool, asExpr, asCol, cte, newModelTerm, ModelIntArg, ModelBoolArg, ModelColArg)
import qualified Data.Expr.Sugar as Sugar
import Data.Expr.Util
import Data.Expr.Data
import Data.Expr.Sugar ((@+),(@-),(@*),(@/),(@%),(!),(@!!),(@..),(@++),size,xfold,xhead,xtail,slice,xmap,xsum,list)
import Control.CP.Solver
import Control.CP.SearchTree
import Control.CP.EnumTerm
import Control.Monad (ap, liftM)
newtype DummySolver a = DummySolver ()
instance Monad DummySolver where
return = pure
_ >>= _ = DummySolver ()
instance Applicative DummySolver where
pure _ = DummySolver ()
(<*>) = ap
instance Functor DummySolver where
fmap = liftM
data EQHelp b where
EQHelp :: Model.ModelTermType b => ((b -> Model) -> Model) -> EQHelp b
instance Model.ModelTermType t => Term DummySolver t where
type Help DummySolver t = EQHelp t
help _ _ = EQHelp newModelTerm
newvar = DummySolver ()
instance Solver DummySolver where
type Constraint DummySolver = Either Model ()
type Label DummySolver = ()
add _ = DummySolver ()
run _ = error "Attempt to run dummy solver"
mark = DummySolver ()
goto _ = DummySolver ()
newtype Model.ModelTermType t => DummyTerm t = DummyTerm t
-- class (Solver s, Term s ModelBool, Term s ModelInt, Term s ModelCol) => Modelable s where
-- instance Modelable DummySolver where
-- instance FDSolver s => Modelable (FDInstance s) where
treeToModel :: Tree DummySolver () -> Model
treeToModel (Return _) = BoolConst True
treeToModel (Try a b) = (Sugar.@||) (treeToModel a) (treeToModel b)
treeToModel (Add (Left c) m) = (Sugar.@&&) c (treeToModel m)
treeToModel Fail = BoolConst False
treeToModel (Label _) = error "Cannot turn labelled trees into expressions"
treeToModel (NewVar (f :: t -> Tree DummySolver ())) = case (help ((error "treeToModel undefined 1") :: DummySolver ()) ((error "treeToModel undefined 2") :: t)) of EQHelp ff -> ff (\x -> treeToModel $ f (x :: t))
addM :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => Model -> m ()
addM m = addC $ Left m
infixr 2 @||
(@||) :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => Tree DummySolver () -> Tree DummySolver () -> m ()
(@||) a b = addM $ treeToModel $ a \/ b
infixr 3 @&&
(@&&) :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => Tree DummySolver () -> Tree DummySolver () -> m ()
(@&&) a b = addM $ treeToModel $ a /\ b
channel :: Tree DummySolver () -> ModelInt
channel a = Sugar.channel $ treeToModel a
inv :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => Tree DummySolver () -> m ()
inv a = addM $ Sugar.inv $ treeToModel a
infix 4 @=, @/=, @<, @>, @<=, @>=
class ModelExprClass a where
(@=) :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => a -> a -> m ()
(@/=) :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => a -> a -> m ()
instance ModelExprClass ModelInt where
a @= b = addM $ (Sugar.@=) a b
a @/= b = addM $ (Sugar.@/=) a b
instance ModelExprClass ModelCol where
a @= b = addM $ (Sugar.@=) a b
a @/= b = addM $ (Sugar.@/=) a b
instance ModelExprClass ModelBool where
a @= b = addM $ (Sugar.@=) a b
a @/= b = addM $ (Sugar.@/=) a b
instance ModelExprClass (Tree DummySolver ()) where
a @= b = addM $ (Sugar.@=) (treeToModel a) (treeToModel b)
a @/= b = addM $ (Sugar.@/=) (treeToModel a) (treeToModel b)
(@<) :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => ModelInt -> ModelInt -> m ()
(@<) a b = addM $ (Sugar.@<) a b
(@>) :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => ModelInt -> ModelInt -> m ()
(@>) a b = addM $ (Sugar.@>) a b
(@>=) :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => ModelInt -> ModelInt -> m ()
(@>=) a b = addM $ (Sugar.@>=) a b
(@<=) :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => ModelInt -> ModelInt -> m ()
(@<=) a b = addM $ (Sugar.@<=) a b
val :: Tree DummySolver () -> ModelInt
val = Sugar.toExpr . treeToModel
{- newBool :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => (ModelBool -> Tree DummySolver a) -> m a
newBool = exists
newInt :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => (ModelInt -> m a) -> m a
newInt = exists
newCol :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => (ModelCol -> m a) -> m a
newCol = exists
-}
asBool :: (FDSolver s, MonadTree m, TreeSolver m ~ FDInstance s, ToModelBool t) => t -> m ()
asBool = addM . Control.CP.FD.Model.asBool
sorted :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => ModelCol -> m ()
sorted = addM . Sugar.sorted
sSorted :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => ModelCol -> m ()
sSorted = addM . Sugar.sSorted
allDiff :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => ModelCol -> m ()
allDiff = addM . Sugar.allDiff
allDiffD :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => ModelCol -> m ()
allDiffD = addM . Sugar.allDiffD
mm (nv@(Term tv)) m x =
let tf t = if (t==tv) then x else Term t
tb t = if (Term t==x) then nv else Term t
in boolTransformEx (tf,ColTerm,BoolTerm,tb,ColTerm,BoolTerm) m
forall :: (Term s ModelInt, Term s ModelBool, Term s ModelCol, Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => ModelCol -> (ModelInt -> Tree DummySolver ()) -> m ()
-- forall col f = exists $ \nv -> addM $ Sugar.forall col $ mm nv $ treeToModel $ f nv
forall col f = addM $ Sugar.forall col (treeToModel . f)
forany :: (Term s ModelInt, Term s ModelBool, Term s ModelCol, Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => ModelCol -> (ModelInt -> Tree DummySolver ()) -> m ()
-- forany col f = exists $ \nv -> addM $ Sugar.forany col $ mm nv $ treeToModel $ f nv
forany col f = addM $ Sugar.forany col (treeToModel . f)
loopall :: (Term s ModelInt, Term s ModelBool, Term s ModelCol, Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => (ModelInt,ModelInt) -> (ModelInt -> Tree DummySolver ()) -> m ()
-- loopall r f = exists $ \nv -> addM $ Sugar.loopall r $ mm nv $ treeToModel $ f nv
loopall r f = addM $ Sugar.loopall r (treeToModel . f)
loopany :: (Term s ModelInt, Term s ModelBool, Term s ModelCol, Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => (ModelInt,ModelInt) -> (ModelInt -> Tree DummySolver ()) -> m ()
-- loopany r f = exists $ \nv -> addM $ Sugar.loopany r $ mm nv $ treeToModel $ f nv
loopany r f = addM $ Sugar.loopany r (treeToModel . f)
colList :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s) => ModelCol -> Int -> m [ModelInt]
colList col len = do
addM $ (Sugar.@=) (size col) (asExpr len)
return $ map (\i -> col!cte i) [0..len-1]
labelCol :: (FDSolver s, MonadTree m, TreeSolver m ~ FDInstance s, EnumTerm s (FDIntTerm s)) => ModelCol -> m [TermBaseType s (FDIntTerm s)]
labelCol col = label $ do
lst <- getColItems col maxBound
return $ do
lsti <- colList col $ length lst
enumerate lsti
assignments lsti
infix 5 @:
(@:) :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s, Sugar.ExprRange ModelIntArg ModelColArg ModelBoolArg r, Term s ModelInt, Term s ModelBool, Term s ModelCol) => ModelInt -> r -> m ()
a @: b = addM $ (Sugar.@:) a b
infix 4 @?
infix 4 @??
a @? (t,f) = (Sugar.@?) (treeToModel a) (t,f)
a @?? (t,f) = addM $ (Sugar.@??) (treeToModel a) (treeToModel t, treeToModel f)
allin :: (Constraint s ~ Either Model q, MonadTree m, TreeSolver m ~ s, Sugar.ExprRange ModelIntArg ModelColArg ModelBoolArg r, Term s ModelInt, Term s ModelBool, Term s ModelCol) => ModelCol -> r -> m ()
allin c b = Control.CP.FD.Interface.forall c $ \x -> addM $ (Sugar.@:) x b
| neothemachine/monadiccp | src/Control/CP/FD/Interface.hs | bsd-3-clause | 9,361 | 0 | 13 | 1,728 | 3,708 | 2,000 | 1,708 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
module Bench where
import Criterion.Main
import Data.Attoparsec.Text as AP
import Data.Text as T
import Control.Applicative
import qualified Text.Syntax.Poly as S
import qualified Control.Isomorphism.Partial.Ext as S
import Control.Isomorphism.Partial.TH (defineIsomorphisms)
import Text.Syntax.Parser.Attoparsec.Text ()
data ParsedData = ParsedData
{ _parsedText1 :: !Text
, _parsedText2 :: !Text
, _parsedText3 :: !Text
} deriving ( Show, Eq )
defineIsomorphisms ''ParsedData
ap_field :: Parser Text -> Parser Text
ap_field p = p <* char '|'
ap_text :: Parser Text
ap_text = AP.takeWhile (\c -> c /= '|')
as_text :: Parser Text
as_text = AP.scan () (\_ c -> if c /= '|' then Just () else Nothing)
ap_parsedData :: Parser ParsedData
ap_parsedData = ParsedData <$> ap_field ap_text <*> ap_field ap_text <*> ap_field ap_text
as_parsedData :: Parser ParsedData
as_parsedData = ParsedData <$> ap_field as_text <*> ap_field as_text <*> ap_field as_text
stringText :: S.Iso String Text
stringText = S.iso T.pack T.unpack
s_text :: (S.Syntax Char delta) => delta Text
s_text = stringText S.<$> S.many fieldChar
where fieldChar :: (S.Syntax Char delta) => delta Char
fieldChar = S.subset ((/=) '|') S.<$> S.token
s_field :: (S.Syntax Char delta) => delta a -> delta a
s_field p = p S.<* (S.this '|' S.<|> S.syntaxError "field delimiter")
s_parsedData :: Parser ParsedData
s_parsedData = parsedData S.<$> ap_field as_text S.<*> ap_field as_text S.<*> ap_field as_text
dataToParse :: Text
dataToParse = T.append large (T.pack "|111111111111111111111111111111111111|русский текст|")
where
large = T.replicate 100 (T.pack "123")
-- Our benchmark harness.
main :: IO ()
main = defaultMain
[ bgroup "parser"
[ bench "parsec opt" $ whnf (\d -> let Right a = parseOnly ap_parsedData d in a) dataToParse
, bench "parsec scan" $ whnf (\d -> let Right a = parseOnly as_parsedData d in a) dataToParse
, bench "parsec syntax" $ whnf (\d -> let Right a = parseOnly s_parsedData d in a) dataToParse
, bench "parsec opt (warm)" $ whnf (\d -> let Right a = parseOnly ap_parsedData d in a) dataToParse
]
]
| schernichkin/exchange | bench/Bench.hs | bsd-3-clause | 2,286 | 0 | 17 | 451 | 760 | 393 | 367 | 53 | 2 |
module Cis194.Week2.LogAnalysisSpec (main, spec) where
import Test.Hspec
import Cis194.Week2.Log
import Cis194.Week2.LogAnalysis
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "LogAnalysis" $ do
it "should parse error lines" $ do
parseMessage "E 2 562 help help" `shouldBe` LogMessage (Error 2) 562 "help help"
it "should parse info lines" $ do
parseMessage "I 29 la la la" `shouldBe` LogMessage Info 29 "la la la"
it "should parse unknown lines" $ do
parseMessage "This is not in the right format" `shouldBe` Unknown "This is not in the right format"
describe "parse" $ do
it "should take a multi-line string and parse out the LogMessages" $ do
let fakeLogFile = "E 2 562 help help \nI 2 hello ma"
parse fakeLogFile `shouldBe` [LogMessage (Error 2) 562 "help help", LogMessage Info 2 "hello ma"]
describe "insert" $ do
it "no-ops given an Unknown LogMessage" $ do
insert (Unknown "foo") Leaf `shouldBe` Leaf
it "returns a new tree with itself included, given a Leaf" $ do
let a = Leaf
let b = LogMessage Warning 5 "baz"
let c = insert b a
c `shouldBe` Node Leaf b Leaf
it "maintains the sort order of messages in the tree" $ do
let foo = LogMessage Warning 10 "foo"
let baz = LogMessage Warning 5 "baz"
let bif = LogMessage Warning 15 "bif"
let a = Node Leaf foo Leaf
let b = insert baz a
let c = insert bif b
b `shouldBe` Node (Node Leaf baz Leaf) foo Leaf
c `shouldBe` Node (Node Leaf baz Leaf) foo (Node Leaf bif Leaf)
describe "build" $ do
it "builds a MessageTree from a list of LogMessages" $ do
let foo = LogMessage Warning 10 "foo"
let baz = LogMessage Warning 5 "baz"
let bif = LogMessage Warning 15 "bif"
build [foo, baz, bif] `shouldBe` Node (Node Leaf baz Leaf) foo (Node Leaf bif Leaf)
describe "inOrder" $ do
it "will deconstruct the MessageTree into a list of LogMessages" $ do
let foo = LogMessage Warning 10 "foo"
let baz = LogMessage Warning 5 "baz"
let bif = LogMessage Warning 15 "bif"
let gaz = LogMessage (Error 1) 20 "gaz"
let tree = Node (Node Leaf baz Leaf) foo (Node Leaf bif (Node Leaf gaz Leaf))
inOrder tree `shouldBe` [baz, foo, bif, gaz]
describe "whatWentWrong" $ do
it "will return the messages from LogMessages with Errors whose severity is 50+ - sorted by timestamp" $ do
let messages = [LogMessage (Error 49) 10 "alpha", LogMessage (Error 100) 9 "kappa", LogMessage (Error 51) 11 "beta", Unknown "foo", LogMessage Warning 100 "blar"]
whatWentWrong messages `shouldBe` ["kappa", "beta"]
| potatosalad/cis194 | test/Cis194/Week2/LogAnalysisSpec.hs | bsd-3-clause | 2,692 | 0 | 20 | 686 | 882 | 416 | 466 | 54 | 1 |
{-|
Module : Network.Spotify.Clinet
Description : Spotify Web API client library.
Stability : experimental
-}
module Network.Spotify.Client
( AuthorizeApi
, MeApi
, TokenApi
, authorizeApi
, authorizeLink
, userApi
, getMe
, getMePlaylists
, spotifyAccountsBaseUrl
, spotifyApiBaseUrl
, tokenApi
, tokenRequest
) where
import Network.Spotify.Api.Auth
import Network.Spotify.Api.User
import Network.Spotify.Internal.Utils (spotifyApiBaseUrl)
-- TODO: Document all public values in this client library
| chances/servant-spotify | src/Network/Spotify/Client.hs | bsd-3-clause | 591 | 0 | 5 | 151 | 73 | 50 | 23 | 16 | 0 |
module Module4.Task12 where
data Person = Person { firstName :: String, lastName :: String, age :: Int }
updateLastName :: Person -> Person -> Person
updateLastName p1 p2 = p2 { lastName = (lastName p1) }
| dstarcev/stepic-haskell | src/Module4/Task12.hs | bsd-3-clause | 213 | 0 | 8 | 44 | 70 | 41 | 29 | 4 | 1 |
module Problem686 where
req :: Int
req = 678910
main :: IO ()
main = print $ (!! (req - 1)) $ filter ((== 123) . first3) [1 ..]
first3 :: Int -> Int
first3 n = truncate res
where
frac :: Double -> Double
frac x = x - fromIntegral (floor x)
lb = logBase 10 2 :: Double
res = 100 * (10 ** frac (fromIntegral n * lb))
| adityagupta1089/Project-Euler-Haskell | src/problems/Problem686.hs | bsd-3-clause | 337 | 0 | 13 | 93 | 162 | 88 | 74 | 11 | 1 |
--------------------------------------------------------------------------------
-- Simulation of the flocking algorithm and the agents used, described in
-- "A Minimalist Flocking Algorithm for Swarm Robots" by
-- Christoph Moeslinger, Thomas Schmickl and Karl Crailsheim.
--------------------------------------------------------------------------------
module Main (main) where
import Flock
import Simulation
import Control.Monad (when)
import Control.Monad.Random (uniform, getRandomR)
import Control.Monad.State (get)
import Control.Lens ((&), (.=))
import Data.List (find, maximumBy)
import Data.Function (on)
import Data.Maybe (isJust)
import System.Environment (getArgs)
import System.IO (hFlush, stdout)
import Graphics.Gloss.Data.Vector (rotateV, mulSV, normalizeV, angleVV)
import Debug.Trace (trace)
fib :: (Eq a, Num a) => a -> a
fib n = fib' 0 1 n
where
fib' r1 r2 0 = r2
fib' r1 r2 n = fib' (r1+r2) r1 (n-1)
getLine' :: IO String
getLine' = do
l <- getLine
if head l == '#'
then getLine'
else return l
main :: IO ()
main = do
putStr "Agent radius: "
hFlush stdout
rad <- read <$> getLine'
putStr "Sensor range: "
hFlush stdout
srange <- read <$> getLine'
putStr "Desired distance: "
hFlush stdout
desired <- read <$> getLine'
putStr "Minimal distance: "
hFlush stdout
minimal <- read <$> getLine'
let
-- rad = 5
-- srange = 30
-- desired = 15
-- minimal = 2
area = Plane
{ _planeAgents =
[ mkAgent (rotateV a (30,0)) rad (0,0) 1 srange
| a <- [0,pi/8..2*pi]
]
, _planeObstacles = [ Obstacle (rotateV a (300,0)) 10 | a <- [0,pi/64..2*pi]]
}
runSim 700 700 (120*60) area (behavior minimal desired)
area :: Plane
area = Plane
{ _planeAgents =
[ mkAgent (i,j) rad (0,0) 1 sensorRange
| i <- [-100, -90..100]
, j <- [-100, -90..100]
, j^2 + i^2 <= 100 ^2
]
, _planeObstacles = []
}
rad, sensorRange, desired, minimal :: Float
rad = 5
sensorRange = rad * 10
desired = sensorRange * 0.9
minimal = sensorRange * 0.2
collisionAvoidance :: Distance -> Behavior Bool
collisionAvoidance minimum = do
(_, os) <- scanD 0 (pi/4)
self <- get
let
cos = os
& find (\o ->
distance self o <= minimum)
& isJust
when cos $ do
lr <- uniform [-pi/2,pi/2]
turn lr
return cos
separation :: Distance -> Behavior Bool
separation minimum = do
(as, _) <- scanD 0 (pi*3/4)
self <- get
let
cas =
find
(\a ->
distance self a <= minimum)
as
case cas of
Nothing -> do
return False
Just a -> do
when (isOnLeft self a)
turnRight
when (isOnRight self a)
turnLeft
return True
cohesion :: Distance -> Behavior Bool
cohesion desired = do
self <- get
let
p a = distance self a > desired
f = length . filter p . fst
ls <- f <$> scanD (-pi/2) (pi/4)
rs <- f <$> scanD (pi/2) (pi/4)
bs <- f <$> scanD pi (pi/4)
if ls+rs+bs <= 0
then do
return False
else do
snd $ maximumBy (compare `on` fst)
[ (ls, turnLeft)
, (rs, turnRight)
, (bs, turnRandomly)
]
return True
homing :: Behavior ()
homing = do
(as, _) <- scan
let
n = length as
d@(d1,d2) = (1/toEnum n) `mulSV` sum (map (normalizeV . _agentDirection) as)
when (n /= 0 && not (isNaN d1 || isNaN d2)) $ do
agentDirection .= (d1, d2)
turnStrength :: Angle
turnStrength = pi/4
turnLeft :: Behavior ()
turnLeft = turn (-turnStrength)
turnRight :: Behavior ()
turnRight = turn turnStrength
turnRandomly :: Behavior ()
turnRandomly = do
action <- uniform [turnLeft, turnRight]
action
behavior :: Distance -> Distance -> Behavior ()
behavior minimum desired = do
(as, os) <- scan
self <- get
move
if length (filter _agentFlocking as) >= 1 || length as >= 5
then agentFlocking .= True
else agentFlocking .= False
ca <- collisionAvoidance minimum
when (not ca) $ do
s <- separation minimum
when (not s) $ do
c <- cohesion desired
when (not c)
homing
| SRechenberger/flock | examples/msc2/Main.hs | bsd-3-clause | 4,091 | 0 | 16 | 1,061 | 1,634 | 840 | 794 | 141 | 2 |
{-# LANGUAGE JavaScriptFFI #-}
import Data.JSString (JSString, unpack)
import System.Timeout (timeout)
foreign import javascript interruptible
"setTimeout(function() { $c(\"finished\"); }, 1000*Math.random());"
js_timeout :: IO JSString
main :: IO ()
main = do
ret <- timeout (500*1000) js_timeout
case ret of
Nothing -> putStrLn "not finished"
Just str -> putStrLn $ unpack str
| syocy/ghcjs-game-example | app/timeout/Timeout.hs | bsd-3-clause | 398 | 3 | 11 | 70 | 109 | 55 | 54 | -1 | -1 |
{-# LANGUAGE CPP #-}
module GHC.Platform.SPARC where
import GhcPrelude
#define MACHREGS_NO_REGS 0
#define MACHREGS_sparc 1
#include "../../../includes/CodeGen.Platform.hs"
| sdiehl/ghc | compiler/GHC/Platform/SPARC.hs | bsd-3-clause | 176 | 0 | 3 | 21 | 14 | 11 | 3 | 3 | 0 |
module Main where
import Lib
main :: IO ()
main = test
| rubenmoor/cellstore | app/Main.hs | bsd-3-clause | 57 | 0 | 6 | 14 | 22 | 13 | 9 | 4 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoRebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.EN.NZ.Rules
( rules
) where
import Data.Maybe
import Prelude
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (parseInt)
import Duckling.Regex.Types
import Duckling.Time.Computed (easterSunday)
import Duckling.Time.Helpers
import Duckling.Time.Types (TimeData (..))
import Duckling.Types
import qualified Duckling.TimeGrain.Types as TG
ruleDDMM :: Rule
ruleDDMM = Rule
{ name = "dd/mm"
, pattern =
[ regex "(3[01]|[12]\\d|0?[1-9])\\s?[/-]\\s?(1[0-2]|0?[1-9])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (dd:mm:_)):_) -> do
d <- parseInt dd
m <- parseInt mm
tt $ monthDay m d
_ -> Nothing
}
ruleDDMMYYYY :: Rule
ruleDDMMYYYY = Rule
{ name = "dd/mm/yyyy"
, pattern =
[ regex "(3[01]|[12]\\d|0?[1-9])[-/\\s](1[0-2]|0?[1-9])[-/\\s](\\d{2,4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (dd:mm:yy:_)):_) -> do
y <- parseInt yy
d <- parseInt dd
m <- parseInt mm
tt $ yearMonthDay y m d
_ -> Nothing
}
-- Clashes with HHMMSS, hence only 4-digit years
ruleDDMMYYYYDot :: Rule
ruleDDMMYYYYDot = Rule
{ name = "dd.mm.yyyy"
, pattern =
[ regex "(3[01]|[12]\\d|0?[1-9])\\.(1[0-2]|0?[1-9])\\.(\\d{4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (dd:mm:yy:_)):_) -> do
y <- parseInt yy
d <- parseInt dd
m <- parseInt mm
tt $ yearMonthDay y m d
_ -> Nothing
}
rulePeriodicHolidays :: [Rule]
rulePeriodicHolidays = mkRuleHolidays
-- Fixed dates, year over year
[ ( "ANZAC Day", "anzac day", monthDay 4 25 )
, ( "Guy Fawkes Night", "guy fawkes night", monthDay 11 5 )
, ( "Waitangi Day", "waitangi day", monthDay 2 6 )
-- Fixed day/week/month, year over year
, ( "Administrative Professionals' Day"
, "(administrative professional|secretarie|admin)('?s'?)? day"
, nthDOWOfMonth 3 3 4 )
, ( "Father's Day", "father'?s?'? day", nthDOWOfMonth 1 7 9 )
, ( "Labour Day", "labour day", nthDOWOfMonth 4 1 10 )
, ( "Mother's Day", "mother'?s?'? day", nthDOWOfMonth 2 7 5 )
, ( "Queen's birthday", "queen's birthday", nthDOWOfMonth 1 1 6 )
, ( "Thanksgiving Day", "thanks?giving( day)?", nthDOWOfMonth 4 4 11 )
]
ruleComputedHolidays :: [Rule]
ruleComputedHolidays = mkRuleHolidays
[ ( "Easter Tuesday", "easter\\s+tue(sday)?"
, cycleNthAfter False TG.Day 2 easterSunday )
]
rules :: [Rule]
rules =
[ ruleDDMM
, ruleDDMMYYYY
, ruleDDMMYYYYDot
]
++ ruleComputedHolidays
++ rulePeriodicHolidays
| facebookincubator/duckling | Duckling/Time/EN/NZ/Rules.hs | bsd-3-clause | 2,915 | 0 | 19 | 638 | 738 | 414 | 324 | 74 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Lib where
import Control.Monad.Logger (runStdoutLoggingT)
import Control.Monad.Trans.Reader (runReaderT)
import Database.Persist.Postgresql (ConnectionString,
createPostgresqlPool,
runSqlPool)
import Network.HTTP.Types.Status (notFound404)
import Network.Wai.Middleware.RequestLogger (logStdoutDev)
import Web.Scotty.Trans
import Database
connStr :: ConnectionString
connStr = "host=localhost dbname=listify user=test password=test port=5432"
routes :: ScottyT Error ConfigM ()
routes = do
middleware logStdoutDev
get "/" getIndex
get "/lists" getLists
post "/lists" createList
get "/lists/:id" getListWithItems
delete "/lists/:id" deleteList
get "/items" getItems
post "/items" createItem
get "/items/:id" getItem
delete "/items/:id" deleteItem
notFound $ status notFound404
runApplication :: IO ()
runApplication = do
pool <- runStdoutLoggingT $ createPostgresqlPool connStr 10
let cfg = Config pool
let r m = runReaderT (runConfigM m) cfg
runSqlPool doMigration pool
scottyT 3000 r routes
| bendiksolheim/listify-backend | src/Lib.hs | bsd-3-clause | 1,397 | 0 | 12 | 414 | 281 | 140 | 141 | 35 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Wai.Application.Classic.Lang (parseLang) where
import Control.Applicative hiding (optional)
import Data.Attoparsec.ByteString (Parser, takeWhile, parseOnly)
import Data.Attoparsec.ByteString.Char8 (char, string, count, space, digit, option, sepBy1)
import Data.ByteString.Char8 hiding (map, count, take, takeWhile, notElem)
import Data.List (sortBy)
import Data.Ord
import Prelude hiding (takeWhile)
-- |
-- >>> parseLang "en-gb;q=0.8, en;q=0.7, da"
-- ["da","en-gb","en"]
parseLang :: ByteString -> [ByteString]
parseLang bs = case parseOnly acceptLanguage bs of
Right ls -> map fst $ sortBy detrimental ls
_ -> []
where
detrimental = flip (comparing snd)
----------------------------------------------------------------
acceptLanguage :: Parser [(ByteString,Int)]
acceptLanguage = rangeQvalue `sepBy1` (spaces *> char ',' *> spaces)
rangeQvalue :: Parser (ByteString,Int)
rangeQvalue = (,) <$> languageRange <*> quality
languageRange :: Parser ByteString
languageRange = takeWhile (`notElem` [32, 44, 59])
quality :: Parser Int
quality = option 1000 (string ";q=" *> qvalue)
qvalue :: Parser Int
qvalue = 1000 <$ (char '1' *> optional (char '.' *> range 0 3 digit))
<|> read3 <$> (char '0' *> option "0" (char '.' *> range 0 3 digit))
where
read3 n = read . take 3 $ n ++ repeat '0'
optional p = () <$ p <|> return ()
----------------------------------------------------------------
range :: Int -> Int -> Parser a -> Parser [a]
range n m p = (++) <$> count n p <*> upto (m - n) p
upto :: Int -> Parser a -> Parser [a]
upto 0 _ = return []
upto n p = (:) <$> p <*> upto (n - 1) p <|> return []
spaces :: Parser ()
spaces = () <$ many space
| kazu-yamamoto/wai-app-file-cgi | Network/Wai/Application/Classic/Lang.hs | bsd-3-clause | 1,744 | 0 | 13 | 310 | 636 | 345 | 291 | 34 | 2 |
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- |
-- Module : Streamly.Internal.Data.Fold
-- Copyright : (c) 2019 Composewell Technologies
-- (c) 2013 Gabriel Gonzalez
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
-- Also see the "Streamly.Internal.Data.Sink" module that provides specialized left folds
-- that discard the outputs.
--
-- IMPORTANT: keep the signatures consistent with the folds in Streamly.Prelude
module Streamly.Internal.Data.Fold
(
-- * Fold Type
Fold (..)
, hoist
, generally
-- , tail
-- , init
-- * Fold Creation Utilities
, mkPure
, mkPureId
, mkFold
, mkFoldId
-- ** Full Folds
, drain
, drainBy
, drainBy2
, last
, length
, sum
, product
, maximumBy
, maximum
, minimumBy
, minimum
-- , the
, mean
, variance
, stdDev
, rollingHash
, rollingHashWithSalt
-- , rollingHashFirstN
-- , rollingHashLastN
-- ** Full Folds (Monoidal)
, mconcat
, foldMap
, foldMapM
-- ** Full Folds (To Containers)
, toList
, toListRevF -- experimental
-- ** Partial Folds
-- , drainN
-- , drainWhile
-- , lastN
-- , (!!)
-- , genericIndex
, index
, head
-- , findM
, find
, lookup
, findIndex
, elemIndex
, null
, elem
, notElem
-- XXX these are slower than right folds even when full input is used
, all
, any
, and
, or
-- * Transformations
-- ** Covariant Operations
, sequence
, mapM
-- ** Mapping
, transform
, lmap
--, lsequence
, lmapM
-- ** Filtering
, lfilter
, lfilterM
-- , ldeleteBy
-- , luniq
, lcatMaybes
{-
-- ** Mapping Filters
, lmapMaybe
, lmapMaybeM
-- ** Scanning Filters
, lfindIndices
, lelemIndices
-- ** Insertion
-- | Insertion adds more elements to the stream.
, linsertBy
, lintersperseM
-- ** Reordering
, lreverse
-}
-- * Parsing
-- ** Trimming
, ltake
-- , lrunFor -- time
, ltakeWhile
{-
, ltakeWhileM
, ldrop
, ldropWhile
, ldropWhileM
-}
, lsessionsOf
, lchunksOf
-- * Distributing
, tee
, distribute
-- * Partitioning
-- , partitionByM
-- , partitionBy
, partition
-- * Demultiplexing
, demux
-- , demuxWith
, demux_
-- , demuxWith_
-- * Classifying
, classify
-- , classifyWith
-- * Unzipping
, unzip
-- These can be expressed using lmap/lmapM and unzip
-- , unzipWith
-- , unzipWithM
-- * Running Folds
, initialize
, runStep
-- * Nested Folds
-- , concatMap
-- , chunksOf
, duplicate -- experimental
-- * Folding to SVar
, toParallelSVar
, toParallelSVarLimited
)
where
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Data.Functor.Identity (Identity(..))
import Data.Map.Strict (Map)
import Prelude
hiding (filter, drop, dropWhile, take, takeWhile, zipWith, foldr,
foldl, map, mapM_, sequence, all, any, sum, product, elem,
notElem, maximum, minimum, head, last, tail, length, null,
reverse, iterate, init, and, or, lookup, foldr1, (!!),
scanl, scanl1, replicate, concatMap, mconcat, foldMap, unzip,
span, splitAt, break, mapM)
import qualified Data.Map.Strict as Map
import qualified Prelude
import Streamly.Internal.Data.Pipe.Types (Pipe (..), PipeState(..))
import Streamly.Internal.Data.Fold.Types
import Streamly.Internal.Data.Strict
import Streamly.Internal.Data.SVar
import qualified Streamly.Internal.Data.Pipe.Types as Pipe
------------------------------------------------------------------------------
-- Smart constructors
------------------------------------------------------------------------------
-- | Make a fold using a pure step function, a pure initial state and
-- a pure state extraction function.
--
-- /Internal/
--
{-# INLINE mkPure #-}
mkPure :: Monad m => (s -> a -> s) -> s -> (s -> b) -> Fold m a b
mkPure step initial extract =
Fold (\s a -> return $ step s a) (return initial) (return . extract)
-- | Make a fold using a pure step function and a pure initial state. The
-- final state extracted is identical to the intermediate state.
--
-- /Internal/
--
{-# INLINE mkPureId #-}
mkPureId :: Monad m => (b -> a -> b) -> b -> Fold m a b
mkPureId step initial = mkPure step initial id
-- | Make a fold with an effectful step function and initial state, and a state
-- extraction function.
--
-- > mkFold = Fold
--
-- We can just use 'Fold' but it is provided for completeness.
--
-- /Internal/
--
{-# INLINE mkFold #-}
mkFold :: (s -> a -> m s) -> m s -> (s -> m b) -> Fold m a b
mkFold = Fold
-- | Make a fold with an effectful step function and initial state. The final
-- state extracted is identical to the intermediate state.
--
-- /Internal/
--
{-# INLINE mkFoldId #-}
mkFoldId :: Monad m => (b -> a -> m b) -> m b -> Fold m a b
mkFoldId step initial = Fold step initial return
------------------------------------------------------------------------------
-- hoist
------------------------------------------------------------------------------
-- | Change the underlying monad of a fold
--
-- /Internal/
hoist :: (forall x. m x -> n x) -> Fold m a b -> Fold n a b
hoist f (Fold step initial extract) =
Fold (\x a -> f $ step x a) (f initial) (f . extract)
-- | Adapt a pure fold to any monad
--
-- > generally = hoist (return . runIdentity)
--
-- /Internal/
generally :: Monad m => Fold Identity a b -> Fold m a b
generally = hoist (return . runIdentity)
------------------------------------------------------------------------------
-- Transformations on fold inputs
------------------------------------------------------------------------------
-- | Flatten the monadic output of a fold to pure output.
--
-- @since 0.7.0
{-# INLINE sequence #-}
sequence :: Monad m => Fold m a (m b) -> Fold m a b
sequence (Fold step initial extract) = Fold step initial extract'
where
extract' x = do
act <- extract x
act >>= return
-- | Map a monadic function on the output of a fold.
--
-- @since 0.7.0
{-# INLINE mapM #-}
mapM :: Monad m => (b -> m c) -> Fold m a b -> Fold m a c
mapM f = sequence . fmap f
------------------------------------------------------------------------------
-- Transformations on fold inputs
------------------------------------------------------------------------------
-- rename to lpipe?
--
-- | Apply a transformation on a 'Fold' using a 'Pipe'.
--
-- @since 0.7.0
{-# INLINE transform #-}
transform :: Monad m => Pipe m a b -> Fold m b c -> Fold m a c
transform (Pipe pstep1 pstep2 pinitial) (Fold fstep finitial fextract) =
Fold step initial extract
where
initial = Tuple' <$> return pinitial <*> finitial
step (Tuple' ps fs) x = do
r <- pstep1 ps x
go fs r
where
-- XXX use SPEC?
go acc (Pipe.Yield b (Consume ps')) = do
acc' <- fstep acc b
return (Tuple' ps' acc')
go acc (Pipe.Yield b (Produce ps')) = do
acc' <- fstep acc b
r <- pstep2 ps'
go acc' r
go acc (Pipe.Continue (Consume ps')) = return (Tuple' ps' acc)
go acc (Pipe.Continue (Produce ps')) = do
r <- pstep2 ps'
go acc r
extract (Tuple' _ fs) = fextract fs
------------------------------------------------------------------------------
-- Utilities
------------------------------------------------------------------------------
-- | @_Fold1 step@ returns a new 'Fold' using just a step function that has the
-- same type for the accumulator and the element. The result type is the
-- accumulator type wrapped in 'Maybe'. The initial accumulator is retrieved
-- from the 'Foldable', the result is 'None' for empty containers.
{-# INLINABLE _Fold1 #-}
_Fold1 :: Monad m => (a -> a -> a) -> Fold m a (Maybe a)
_Fold1 step = Fold step_ (return Nothing') fromStrictMaybe
where
step_ mx a = return $ Just' $
case mx of
Nothing' -> a
Just' x -> step x a
------------------------------------------------------------------------------
-- Left folds
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- Run Effects
------------------------------------------------------------------------------
-- | A fold that drains all its input, running the effects and discarding the
-- results.
--
-- @since 0.7.0
{-# INLINABLE drain #-}
drain :: Monad m => Fold m a ()
drain = Fold step begin done
where
begin = return ()
step _ _ = return ()
done = return
-- |
-- > drainBy f = lmapM f drain
--
-- Drain all input after passing it through a monadic function. This is the
-- dual of mapM_ on stream producers.
--
-- @since 0.7.0
{-# INLINABLE drainBy #-}
drainBy :: Monad m => (a -> m b) -> Fold m a ()
drainBy f = Fold (const (void . f)) (return ()) return
{-# INLINABLE drainBy2 #-}
drainBy2 :: Monad m => (a -> m b) -> Fold2 m c a ()
drainBy2 f = Fold2 (const (void . f)) (\_ -> return ()) return
-- | Extract the last element of the input stream, if any.
--
-- @since 0.7.0
{-# INLINABLE last #-}
last :: Monad m => Fold m a (Maybe a)
last = _Fold1 (flip const)
------------------------------------------------------------------------------
-- To Summary
------------------------------------------------------------------------------
-- | Like 'length', except with a more general 'Num' return value
--
-- @since 0.7.0
{-# INLINABLE genericLength #-}
genericLength :: (Monad m, Num b) => Fold m a b
genericLength = Fold (\n _ -> return $ n + 1) (return 0) return
-- | Determine the length of the input stream.
--
-- @since 0.7.0
{-# INLINABLE length #-}
length :: Monad m => Fold m a Int
length = genericLength
-- | Determine the sum of all elements of a stream of numbers. Returns additive
-- identity (@0@) when the stream is empty. Note that this is not numerically
-- stable for floating point numbers.
--
-- @since 0.7.0
{-# INLINABLE sum #-}
sum :: (Monad m, Num a) => Fold m a a
sum = Fold (\x a -> return $ x + a) (return 0) return
-- | Determine the product of all elements of a stream of numbers. Returns
-- multiplicative identity (@1@) when the stream is empty.
--
-- @since 0.7.0
{-# INLINABLE product #-}
product :: (Monad m, Num a) => Fold m a a
product = Fold (\x a -> return $ x * a) (return 1) return
------------------------------------------------------------------------------
-- To Summary (Maybe)
------------------------------------------------------------------------------
-- | Determine the maximum element in a stream using the supplied comparison
-- function.
--
-- @since 0.7.0
{-# INLINABLE maximumBy #-}
maximumBy :: Monad m => (a -> a -> Ordering) -> Fold m a (Maybe a)
maximumBy cmp = _Fold1 max'
where
max' x y = case cmp x y of
GT -> x
_ -> y
-- |
-- @
-- maximum = 'maximumBy' compare
-- @
--
-- Determine the maximum element in a stream.
--
-- @since 0.7.0
{-# INLINABLE maximum #-}
maximum :: (Monad m, Ord a) => Fold m a (Maybe a)
maximum = _Fold1 max
-- | Computes the minimum element with respect to the given comparison function
--
-- @since 0.7.0
{-# INLINABLE minimumBy #-}
minimumBy :: Monad m => (a -> a -> Ordering) -> Fold m a (Maybe a)
minimumBy cmp = _Fold1 min'
where
min' x y = case cmp x y of
GT -> y
_ -> x
-- | Determine the minimum element in a stream using the supplied comparison
-- function.
--
-- @since 0.7.0
{-# INLINABLE minimum #-}
minimum :: (Monad m, Ord a) => Fold m a (Maybe a)
minimum = _Fold1 min
------------------------------------------------------------------------------
-- To Summary (Statistical)
------------------------------------------------------------------------------
-- | Compute a numerically stable arithmetic mean of all elements in the input
-- stream.
--
-- @since 0.7.0
{-# INLINABLE mean #-}
mean :: (Monad m, Fractional a) => Fold m a a
mean = Fold step (return begin) (return . done)
where
begin = Tuple' 0 0
step (Tuple' x n) y = return $
let n' = n + 1
in Tuple' (x + (y - x) / n') n'
done (Tuple' x _) = x
-- | Compute a numerically stable (population) variance over all elements in
-- the input stream.
--
-- @since 0.7.0
{-# INLINABLE variance #-}
variance :: (Monad m, Fractional a) => Fold m a a
variance = Fold step (return begin) (return . done)
where
begin = Tuple3' 0 0 0
step (Tuple3' n mean_ m2) x = return $ Tuple3' n' mean' m2'
where
n' = n + 1
mean' = (n * mean_ + x) / (n + 1)
delta = x - mean_
m2' = m2 + delta * delta * n / (n + 1)
done (Tuple3' n _ m2) = m2 / n
-- | Compute a numerically stable (population) standard deviation over all
-- elements in the input stream.
--
-- @since 0.7.0
{-# INLINABLE stdDev #-}
stdDev :: (Monad m, Floating a) => Fold m a a
stdDev = sqrt variance
-- | Compute an 'Int' sized polynomial rolling hash
--
-- > H = salt * k ^ n + c1 * k ^ (n - 1) + c2 * k ^ (n - 2) + ... + cn * k ^ 0
--
-- Where @c1@, @c2@, @cn@ are the elements in the input stream and @k@ is a
-- constant.
--
-- This hash is often used in Rabin-Karp string search algorithm.
--
-- See https://en.wikipedia.org/wiki/Rolling_hash
--
-- @since 0.7.0
{-# INLINABLE rollingHashWithSalt #-}
rollingHashWithSalt :: (Monad m, Enum a) => Int -> Fold m a Int
rollingHashWithSalt salt = Fold step initial extract
where
k = 2891336453
initial = return salt
step cksum a = return $ cksum * k + fromEnum a
extract = return
-- | A default salt used in the implementation of 'rollingHash'.
{-# INLINE defaultSalt #-}
defaultSalt :: Int
#if WORD_SIZE_IN_BITS == 64
defaultSalt = 0xdc36d1615b7400a4
#else
defaultSalt = 0x087fc72c
#endif
-- | Compute an 'Int' sized polynomial rolling hash of a stream.
--
-- > rollingHash = rollingHashWithSalt defaultSalt
--
-- @since 0.7.0
{-# INLINABLE rollingHash #-}
rollingHash :: (Monad m, Enum a) => Fold m a Int
rollingHash = rollingHashWithSalt defaultSalt
------------------------------------------------------------------------------
-- Monoidal left folds
------------------------------------------------------------------------------
-- | Fold an input stream consisting of monoidal elements using 'mappend'
-- and 'mempty'.
--
-- > S.fold FL.mconcat (S.map Sum $ S.enumerateFromTo 1 10)
--
-- @since 0.7.0
{-# INLINABLE mconcat #-}
mconcat :: (Monad m, Monoid a) => Fold m a a
mconcat = Fold (\x a -> return $ mappend x a) (return mempty) return
-- |
-- > foldMap f = map f mconcat
--
-- Make a fold from a pure function that folds the output of the function
-- using 'mappend' and 'mempty'.
--
-- > S.fold (FL.foldMap Sum) $ S.enumerateFromTo 1 10
--
-- @since 0.7.0
{-# INLINABLE foldMap #-}
foldMap :: (Monad m, Monoid b) => (a -> b) -> Fold m a b
foldMap f = lmap f mconcat
-- |
-- > foldMapM f = mapM f mconcat
--
-- Make a fold from a monadic function that folds the output of the function
-- using 'mappend' and 'mempty'.
--
-- > S.fold (FL.foldMapM (return . Sum)) $ S.enumerateFromTo 1 10
--
-- @since 0.7.0
{-# INLINABLE foldMapM #-}
foldMapM :: (Monad m, Monoid b) => (a -> m b) -> Fold m a b
foldMapM act = Fold step begin done
where
done = return
begin = return mempty
step m a = do
m' <- act a
return $! mappend m m'
------------------------------------------------------------------------------
-- To Containers
------------------------------------------------------------------------------
-- | Folds the input stream to a list.
--
-- /Warning!/ working on large lists accumulated as buffers in memory could be
-- very inefficient, consider using "Streamly.Array" instead.
--
-- @since 0.7.0
-- id . (x1 :) . (x2 :) . (x3 :) . ... . (xn :) $ []
{-# INLINABLE toList #-}
toList :: Monad m => Fold m a [a]
toList = Fold (\f x -> return $ f . (x :))
(return id)
(return . ($ []))
------------------------------------------------------------------------------
-- Partial Folds
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- To Elements
------------------------------------------------------------------------------
-- | Like 'index', except with a more general 'Integral' argument
--
-- @since 0.7.0
{-# INLINABLE genericIndex #-}
genericIndex :: (Integral i, Monad m) => i -> Fold m a (Maybe a)
genericIndex i = Fold step (return $ Left' 0) done
where
step x a = return $
case x of
Left' j -> if i == j
then Right' a
else Left' (j + 1)
_ -> x
done x = return $
case x of
Left' _ -> Nothing
Right' a -> Just a
-- | Lookup the element at the given index.
--
-- @since 0.7.0
{-# INLINABLE index #-}
index :: Monad m => Int -> Fold m a (Maybe a)
index = genericIndex
-- | Extract the first element of the stream, if any.
--
-- @since 0.7.0
{-# INLINABLE head #-}
head :: Monad m => Fold m a (Maybe a)
head = _Fold1 const
-- | Returns the first element that satisfies the given predicate.
--
-- @since 0.7.0
{-# INLINABLE find #-}
find :: Monad m => (a -> Bool) -> Fold m a (Maybe a)
find predicate = Fold step (return Nothing') fromStrictMaybe
where
step x a = return $
case x of
Nothing' -> if predicate a
then Just' a
else Nothing'
_ -> x
-- | In a stream of (key-value) pairs @(a, b)@, return the value @b@ of the
-- first pair where the key equals the given value @a@.
--
-- @since 0.7.0
{-# INLINABLE lookup #-}
lookup :: (Eq a, Monad m) => a -> Fold m (a,b) (Maybe b)
lookup a0 = Fold step (return Nothing') fromStrictMaybe
where
step x (a,b) = return $
case x of
Nothing' -> if a == a0
then Just' b
else Nothing'
_ -> x
-- | Convert strict 'Either'' to lazy 'Maybe'
{-# INLINABLE hush #-}
hush :: Either' a b -> Maybe b
hush (Left' _) = Nothing
hush (Right' b) = Just b
-- | Returns the first index that satisfies the given predicate.
--
-- @since 0.7.0
{-# INLINABLE findIndex #-}
findIndex :: Monad m => (a -> Bool) -> Fold m a (Maybe Int)
findIndex predicate = Fold step (return $ Left' 0) (return . hush)
where
step x a = return $
case x of
Left' i ->
if predicate a
then Right' i
else Left' (i + 1)
_ -> x
-- | Returns the first index where a given value is found in the stream.
--
-- @since 0.7.0
{-# INLINABLE elemIndex #-}
elemIndex :: (Eq a, Monad m) => a -> Fold m a (Maybe Int)
elemIndex a = findIndex (a ==)
------------------------------------------------------------------------------
-- To Boolean
------------------------------------------------------------------------------
-- | Return 'True' if the input stream is empty.
--
-- @since 0.7.0
{-# INLINABLE null #-}
null :: Monad m => Fold m a Bool
null = Fold (\_ _ -> return False) (return True) return
-- |
-- > any p = lmap p or
--
-- | Returns 'True' if any of the elements of a stream satisfies a predicate.
--
-- @since 0.7.0
{-# INLINABLE any #-}
any :: Monad m => (a -> Bool) -> Fold m a Bool
any predicate = Fold (\x a -> return $ x || predicate a) (return False) return
-- | Return 'True' if the given element is present in the stream.
--
-- @since 0.7.0
{-# INLINABLE elem #-}
elem :: (Eq a, Monad m) => a -> Fold m a Bool
elem a = any (a ==)
-- |
-- > all p = lmap p and
--
-- | Returns 'True' if all elements of a stream satisfy a predicate.
--
-- @since 0.7.0
{-# INLINABLE all #-}
all :: Monad m => (a -> Bool) -> Fold m a Bool
all predicate = Fold (\x a -> return $ x && predicate a) (return True) return
-- | Returns 'True' if the given element is not present in the stream.
--
-- @since 0.7.0
{-# INLINABLE notElem #-}
notElem :: (Eq a, Monad m) => a -> Fold m a Bool
notElem a = all (a /=)
-- | Returns 'True' if all elements are 'True', 'False' otherwise
--
-- @since 0.7.0
{-# INLINABLE and #-}
and :: Monad m => Fold m Bool Bool
and = Fold (\x a -> return $ x && a) (return True) return
-- | Returns 'True' if any element is 'True', 'False' otherwise
--
-- @since 0.7.0
{-# INLINABLE or #-}
or :: Monad m => Fold m Bool Bool
or = Fold (\x a -> return $ x || a) (return False) return
------------------------------------------------------------------------------
-- Distributing
------------------------------------------------------------------------------
--
-- | Distribute one copy of the stream to each fold and zip the results.
--
-- @
-- |-------Fold m a b--------|
-- ---stream m a---| |---m (b,c)
-- |-------Fold m a c--------|
-- @
-- >>> S.fold (FL.tee FL.sum FL.length) (S.enumerateFromTo 1.0 100.0)
-- (5050.0,100)
--
-- @since 0.7.0
{-# INLINE tee #-}
tee :: Monad m => Fold m a b -> Fold m a c -> Fold m a (b,c)
tee f1 f2 = (,) <$> f1 <*> f2
{-# INLINE foldNil #-}
foldNil :: Monad m => Fold m a [b]
foldNil = Fold step begin done where
begin = return []
step _ _ = return []
done = return
{-# INLINE foldCons #-}
foldCons :: Monad m => Fold m a b -> Fold m a [b] -> Fold m a [b]
foldCons (Fold stepL beginL doneL) (Fold stepR beginR doneR) =
Fold step begin done
where
begin = Tuple' <$> beginL <*> beginR
step (Tuple' xL xR) a = Tuple' <$> stepL xL a <*> stepR xR a
done (Tuple' xL xR) = (:) <$> (doneL xL) <*> (doneR xR)
-- XXX use "List" instead of "[]"?, use Array for output to scale it to a large
-- number of consumers? For polymorphic case a vector could be helpful. For
-- Storables we can use arrays. Will need separate APIs for those.
--
-- | Distribute one copy of the stream to each fold and collect the results in
-- a container.
--
-- @
--
-- |-------Fold m a b--------|
-- ---stream m a---| |---m [b]
-- |-------Fold m a b--------|
-- | |
-- ...
-- @
--
-- >>> S.fold (FL.distribute [FL.sum, FL.length]) (S.enumerateFromTo 1 5)
-- [15,5]
--
-- This is the consumer side dual of the producer side 'sequence' operation.
--
-- @since 0.7.0
{-# INLINE distribute #-}
distribute :: Monad m => [Fold m a b] -> Fold m a [b]
distribute [] = foldNil
distribute (x:xs) = foldCons x (distribute xs)
------------------------------------------------------------------------------
-- Partitioning
------------------------------------------------------------------------------
--
-- | Partition the input over two folds using an 'Either' partitioning
-- predicate.
--
-- @
--
-- |-------Fold b x--------|
-- -----stream m a --> (Either b c)----| |----(x,y)
-- |-------Fold c y--------|
-- @
--
-- Send input to either fold randomly:
--
-- >>> import System.Random (randomIO)
-- >>> randomly a = randomIO >>= \x -> return $ if x then Left a else Right a
-- >>> S.fold (FL.partitionByM randomly FL.length FL.length) (S.enumerateFromTo 1 100)
-- (59,41)
--
-- Send input to the two folds in a proportion of 2:1:
--
-- @
-- import Data.IORef (newIORef, readIORef, writeIORef)
-- proportionately m n = do
-- ref <- newIORef $ cycle $ concat [replicate m Left, replicate n Right]
-- return $ \\a -> do
-- r <- readIORef ref
-- writeIORef ref $ tail r
-- return $ head r a
--
-- main = do
-- f <- proportionately 2 1
-- r <- S.fold (FL.partitionByM f FL.length FL.length) (S.enumerateFromTo (1 :: Int) 100)
-- print r
-- @
-- @
-- (67,33)
-- @
--
-- This is the consumer side dual of the producer side 'mergeBy' operation.
--
-- @since 0.7.0
{-# INLINE partitionByM #-}
partitionByM :: Monad m
=> (a -> m (Either b c)) -> Fold m b x -> Fold m c y -> Fold m a (x, y)
partitionByM f (Fold stepL beginL doneL) (Fold stepR beginR doneR) =
Fold step begin done
where
begin = Tuple' <$> beginL <*> beginR
step (Tuple' xL xR) a = do
r <- f a
case r of
Left b -> Tuple' <$> stepL xL b <*> return xR
Right c -> Tuple' <$> return xL <*> stepR xR c
done (Tuple' xL xR) = (,) <$> doneL xL <*> doneR xR
-- Note: we could use (a -> Bool) instead of (a -> Either b c), but the latter
-- makes the signature clearer as to which case belongs to which fold.
-- XXX need to check the performance in both cases.
-- | Same as 'partitionByM' but with a pure partition function.
--
-- Count even and odd numbers in a stream:
--
-- @
-- >>> let f = FL.partitionBy (\\n -> if even n then Left n else Right n)
-- (fmap (("Even " ++) . show) FL.length)
-- (fmap (("Odd " ++) . show) FL.length)
-- in S.fold f (S.enumerateFromTo 1 100)
-- ("Even 50","Odd 50")
-- @
--
-- @since 0.7.0
{-# INLINE partitionBy #-}
partitionBy :: Monad m
=> (a -> Either b c) -> Fold m b x -> Fold m c y -> Fold m a (x, y)
partitionBy f = partitionByM (return . f)
-- | Compose two folds such that the combined fold accepts a stream of 'Either'
-- and routes the 'Left' values to the first fold and 'Right' values to the
-- second fold.
--
-- > partition = partitionBy id
--
-- @since 0.7.0
{-# INLINE partition #-}
partition :: Monad m
=> Fold m b x -> Fold m c y -> Fold m (Either b c) (x, y)
partition = partitionBy id
{-
-- | Send one item to each fold in a round-robin fashion. This is the consumer
-- side dual of producer side 'mergeN' operation.
--
-- partitionN :: Monad m => [Fold m a b] -> Fold m a [b]
-- partitionN fs = Fold step begin done
-}
-- TODO Demultiplex an input element into a number of typed variants. We want
-- to statically restrict the target values within a set of predefined types,
-- an enumeration of a GADT. We also want to make sure that the Map contains
-- only those types and the full set of those types.
--
-- TODO Instead of the input Map it should probably be a lookup-table using an
-- array and not in GC memory. The same applies to the output Map as well.
-- However, that would only be helpful if we have a very large data structure,
-- need to measure and see how it scales.
--
-- This is the consumer side dual of the producer side 'mux' operation (XXX to
-- be implemented).
-- | Split the input stream based on a key field and fold each split using a
-- specific fold collecting the results in a map from the keys to the results.
-- Useful for cases like protocol handlers to handle different type of packets
-- using different handlers.
--
-- @
--
-- |-------Fold m a b
-- -----stream m a-----Map-----|
-- |-------Fold m a b
-- |
-- ...
-- @
--
-- @since 0.7.0
{-# INLINE demuxWith #-}
demuxWith :: (Monad m, Ord k)
=> (a -> (k, a')) -> Map k (Fold m a' b) -> Fold m a (Map k b)
demuxWith f kv = Fold step initial extract
where
initial = return kv
step mp a = case f a of
(k, a') -> Map.alterF twiddle k mp
-- XXX should we raise an exception in Nothing case?
-- Ideally we should enforce that it is a total map over k so that look
-- up never fails
-- XXX we could use a monadic update function for a single lookup and
-- update in the map.
where
twiddle Nothing = pure Nothing
twiddle (Just (Fold step' acc extract')) = do
!r <- acc >>= \x -> step' x a'
pure . Just $ Fold step' (return r) extract'
extract = Prelude.mapM (\(Fold _ acc e) -> acc >>= e)
-- | Fold a stream of key value pairs using a map of specific folds for each
-- key into a map from keys to the results of fold outputs of the corresponding
-- values.
--
-- @
-- > let table = Data.Map.fromList [(\"SUM", FL.sum), (\"PRODUCT", FL.product)]
-- input = S.fromList [(\"SUM",1),(\"PRODUCT",2),(\"SUM",3),(\"PRODUCT",4)]
-- in S.fold (FL.demux table) input
-- One 1
-- Two 2
-- @
--
-- @since 0.7.0
{-# INLINE demux #-}
demux :: (Monad m, Ord k)
=> Map k (Fold m a b) -> Fold m (k, a) (Map k b)
demux = demuxWith id
-- | Split the input stream based on a key field and fold each split using a
-- specific fold without collecting the results. Useful for cases like protocol
-- handlers to handle different type of packets.
--
-- @
--
-- |-------Fold m a ()
-- -----stream m a-----Map-----|
-- |-------Fold m a ()
-- |
-- ...
-- @
--
--
-- @since 0.7.0
-- demuxWith_ can be slightly faster than demuxWith because we do not need to
-- update the Map in this case. This may be significant only if the map is
-- large.
{-# INLINE demuxWith_ #-}
demuxWith_ :: (Monad m, Ord k)
=> (a -> (k, a')) -> Map k (Fold m a' b) -> Fold m a ()
demuxWith_ f kv = Fold step initial extract
where
initial = do
Prelude.mapM (\(Fold s i e) ->
i >>= \r -> return (Fold s (return r) e)) kv
step mp a
-- XXX should we raise an exception in Nothing case?
-- Ideally we should enforce that it is a total map over k so that look
-- up never fails
| (k, a') <- f a
= case Map.lookup k mp of
Nothing -> return mp
Just (Fold step' acc _) -> do
_ <- acc >>= \x -> step' x a'
return mp
extract mp = Prelude.mapM_ (\(Fold _ acc e) -> acc >>= e) mp
-- | Given a stream of key value pairs and a map from keys to folds, fold the
-- values for each key using the corresponding folds, discarding the outputs.
--
-- @
-- > let prn = FL.drainBy print
-- > let table = Data.Map.fromList [(\"ONE", prn), (\"TWO", prn)]
-- input = S.fromList [(\"ONE",1),(\"TWO",2)]
-- in S.fold (FL.demux_ table) input
-- One 1
-- Two 2
-- @
--
-- @since 0.7.0
{-# INLINE demux_ #-}
demux_ :: (Monad m, Ord k) => Map k (Fold m a ()) -> Fold m (k, a) ()
demux_ = demuxWith_ id
-- TODO If the data is large we may need a map/hashmap in pinned memory instead
-- of a regular Map. That may require a serializable constraint though. We can
-- have another API for that.
--
-- | Split the input stream based on a key field and fold each split using the
-- given fold. Useful for map/reduce, bucketizing the input in different bins
-- or for generating histograms.
--
-- @
-- > let input = S.fromList [(\"ONE",1),(\"ONE",1.1),(\"TWO",2), (\"TWO",2.2)]
-- in S.fold (FL.classify FL.toList) input
-- fromList [(\"ONE",[1.1,1.0]),(\"TWO",[2.2,2.0])]
-- @
--
-- @since 0.7.0
{-# INLINE classifyWith #-}
classifyWith :: (Monad m, Ord k) => (a -> k) -> Fold m a b -> Fold m a (Map k b)
classifyWith f (Fold step initial extract) = Fold step' initial' extract'
where
initial' = return Map.empty
step' kv a =
let k = f a
in case Map.lookup k kv of
Nothing -> do
x <- initial
r <- step x a
return $ Map.insert k r kv
Just x -> do
r <- step x a
return $ Map.insert k r kv
extract' = Prelude.mapM extract
-- | Given an input stream of key value pairs and a fold for values, fold all
-- the values belonging to each key. Useful for map/reduce, bucketizing the
-- input in different bins or for generating histograms.
--
-- @
-- > let input = S.fromList [(\"ONE",1),(\"ONE",1.1),(\"TWO",2), (\"TWO",2.2)]
-- in S.fold (FL.classify FL.toList) input
-- fromList [(\"ONE",[1.1,1.0]),(\"TWO",[2.2,2.0])]
-- @
--
-- @since 0.7.0
-- Same as:
--
-- > classify fld = classifyWith fst (lmap snd fld)
--
{-# INLINE classify #-}
classify :: (Monad m, Ord k) => Fold m a b -> Fold m (k, a) (Map k b)
classify fld = classifyWith fst (lmap snd fld)
------------------------------------------------------------------------------
-- Unzipping
------------------------------------------------------------------------------
--
-- | Like 'unzipWith' but with a monadic splitter function.
--
-- @since 0.7.0
{-# INLINE unzipWithM #-}
unzipWithM :: Monad m
=> (a -> m (b,c)) -> Fold m b x -> Fold m c y -> Fold m a (x,y)
unzipWithM f (Fold stepL beginL doneL) (Fold stepR beginR doneR) =
Fold step begin done
where
step (Tuple' xL xR) a = do
(b,c) <- f a
Tuple' <$> stepL xL b <*> stepR xR c
begin = Tuple' <$> beginL <*> beginR
done (Tuple' xL xR) = (,) <$> doneL xL <*> doneR xR
-- | Split elements in the input stream into two parts using a pure splitter
-- function, direct each part to a different fold and zip the results.
--
-- @since 0.7.0
{-# INLINE unzipWith #-}
unzipWith :: Monad m
=> (a -> (b,c)) -> Fold m b x -> Fold m c y -> Fold m a (x,y)
unzipWith f = unzipWithM (return . f)
-- | Send the elements of tuples in a stream of tuples through two different
-- folds.
--
-- @
--
-- |-------Fold m a x--------|
-- ---------stream of (a,b)--| |----m (x,y)
-- |-------Fold m b y--------|
--
-- @
--
-- This is the consumer side dual of the producer side 'zip' operation.
--
-- @since 0.7.0
{-# INLINE unzip #-}
unzip :: Monad m => Fold m a x -> Fold m b y -> Fold m (a,b) (x,y)
unzip = unzipWith id
------------------------------------------------------------------------------
-- Nesting
------------------------------------------------------------------------------
--
{-
-- All the stream flattening transformations can also be applied to a fold
-- input stream.
-- | This can be used to apply all the stream generation operations on folds.
lconcatMap ::(IsStream t, Monad m) => (a -> t m b)
-> Fold m b c
-> Fold m a c
lconcatMap s f1 f2 = undefined
-}
-- All the grouping transformation that we apply to a stream can also be
-- applied to a fold input stream.
{-
-- | Group the input stream into groups of elements between @low@ and @high@.
-- Collection starts in chunks of @low@ and then keeps doubling until we reach
-- @high@. Each chunk is folded using the provided fold function.
--
-- This could be useful, for example, when we are folding a stream of unknown
-- size to a stream of arrays and we want to minimize the number of
-- allocations.
--
-- @
--
-- XXX we should be able to implement it with parsers/terminating folds.
--
{-# INLINE lchunksInRange #-}
lchunksInRange :: Monad m
=> Int -> Int -> Fold m a b -> Fold m b c -> Fold m a c
lchunksInRange low high (Fold step1 initial1 extract1)
(Fold step2 initial2 extract2) = undefined
-}
------------------------------------------------------------------------------
-- Fold to a Parallel SVar
------------------------------------------------------------------------------
{-# INLINE toParallelSVar #-}
toParallelSVar :: MonadIO m => SVar t m a -> Maybe WorkerInfo -> Fold m a ()
toParallelSVar svar winfo = Fold step initial extract
where
initial = return ()
step () x = liftIO $ do
-- XXX we can have a separate fold for unlimited buffer case to avoid a
-- branch in the step here.
decrementBufferLimit svar
void $ send svar (ChildYield x)
extract () = liftIO $ do
sendStop svar winfo
{-# INLINE toParallelSVarLimited #-}
toParallelSVarLimited :: MonadIO m
=> SVar t m a -> Maybe WorkerInfo -> Fold m a ()
toParallelSVarLimited svar winfo = Fold step initial extract
where
initial = return True
step True x = liftIO $ do
yieldLimitOk <- decrementYieldLimit svar
if yieldLimitOk
then do
decrementBufferLimit svar
void $ send svar (ChildYield x)
return True
else do
cleanupSVarFromWorker svar
sendStop svar winfo
return False
step False _ = return False
extract True = liftIO $ sendStop svar winfo
extract False = return ()
| harendra-kumar/asyncly | src/Streamly/Internal/Data/Fold.hs | bsd-3-clause | 36,711 | 0 | 18 | 9,280 | 7,096 | 3,920 | 3,176 | 420 | 4 |
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
module ClientGen.JS where
import Data.Proxy
import Data.Text
import Servant.JS
import API
-- jsCode :: Text
-- jsCode = jsForAPI (Proxy :: Proxy API1) vanillaJS
| CBMM/CBaaS | cbaas-lib/src/ClientGen/JS.hs | bsd-3-clause | 227 | 0 | 4 | 40 | 28 | 19 | 9 | 7 | 0 |
module HERMIT.Web.Renderer (webChannel) where
import Control.Concurrent.STM
import Data.Monoid
import HERMIT.Core
import HERMIT.Kure
import HERMIT.PrettyPrinter.Common
import HERMIT.Web.JSON
import System.IO
webChannel :: TChan (Either String [Glyph]) -> Handle -> PrettyOptions -> Either String DocH -> IO ()
webChannel chan _ _ (Left s) = atomically $ writeTChan chan $ Left s
webChannel chan _ opts (Right doc) = let Runes rs = renderCode opts doc
in atomically $ writeTChan chan $ Right $ runesToGlyphs rs
runesToGlyphs :: [Rune] -> [Glyph]
runesToGlyphs = go [] Nothing Nothing
where go :: Path Crumb -> Maybe Style -> Maybe (Path Crumb) -> [Rune] -> [Glyph]
go _ _ _ [] = []
go p s bp (Rune str:r) = Glyph str p s bp : go p s bp r
go p _ bp (Markup s:r) = go p (Just s) bp r
go _ s bp (PathA p :r) = go p s bp r
go p s _ (BndrA bp:r) = go p s (Just bp) r
go p s _ (EndBndrA:r) = go p s Nothing r
-- | Runes are precursors to Glyphs
data Rune = Rune String | Markup Style | PathA (Path Crumb) | BndrA (Path Crumb) | EndBndrA
newtype Runes = Runes [ Rune ]
instance RenderSpecial Runes where
renderSpecial sym = Runes [ Markup SYNTAX , Rune [ch] ]
where Unicode ch = renderSpecial sym
instance Monoid Runes where
mempty = Runes mempty
mappend (Runes rs1) (Runes rs2) = Runes $ mergeRunes $ rs1 ++ rs2
mergeRunes :: [Rune] -> [Rune]
mergeRunes [] = []
mergeRunes [r] = [r]
mergeRunes (g:h:r) = case merge g h of
Left g' -> mergeRunes (g':r)
Right (g',h') -> g' : mergeRunes (h':r)
where merge (Rune s1) (Rune s2) = Left $ Rune (s1 ++ s2)
merge (Markup _) (Markup s2) = Left $ Markup s2
merge (PathA _) (PathA p2) = Left $ PathA p2
merge r1 r2 = Right (r1,r2)
instance RenderCode Runes where
rPutStr txt = Runes [ Rune txt ]
rDoHighlight _ [] = mempty
rDoHighlight Nothing (BndrAttr p:_) = Runes [ BndrA $ snocPathToPath p ]
rDoHighlight (Just (BndrAttr _)) _ = Runes [ EndBndrA ]
rDoHighlight _ (PathAttr p:_) = Runes [ PathA $ snocPathToPath p ]
rDoHighlight _ (Color col:_) =
Runes $ case col of
KeywordColor -> [ Markup KEYWORD ]
SyntaxColor -> [ Markup SYNTAX ]
IdColor -> [ Markup VAR ]
CoercionColor -> [ Markup COERCION ]
TypeColor -> [ Markup TYPE ]
LitColor -> [ Markup LIT ]
WarningColor -> [ Markup WARNING ]
rDoHighlight o (_:rest) = rDoHighlight o rest
| ku-fpg/hermit-web | src/HERMIT/Web/Renderer.hs | bsd-3-clause | 2,711 | 0 | 12 | 873 | 1,111 | 561 | 550 | 55 | 6 |
module Mote.Types (Hole, FileData (..), MoteState (..),
HoleInfo (..), ErrorType (..), AugmentedHoleInfo(..), M, Ref, ScopeMap) where
import Control.Concurrent.MVar
import Control.Monad.Error
import Data.IntervalMap.FingerTree (IntervalMap)
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Time.Clock
import GHC
import System.IO
import TcRnTypes (Ct (..))
import UniqSupply
type Hole = SrcSpan
type Ref = MVar
type ScopeMap = IntervalMap SrcLoc RdrName
data FileData = FileData
{ path :: FilePath
-- This is apparently stored in the ModSummary. Check it out.
, modifyTimeAtLastLoad :: UTCTime
, hsModule :: HsModule RdrName
, typecheckedModule :: TypecheckedModule
, holesInfo :: M.Map SrcSpan AugmentedHoleInfo
, scopeMap :: ScopeMap
}
data MoteState = MoteState
{ fileData :: Maybe FileData
, currentHole :: Maybe AugmentedHoleInfo
, logFile :: Handle
, uniq :: UniqSupply
, argHoles :: S.Set Hole -- holes which are arguments to functions
, loadErrors :: [String]
}
data AugmentedHoleInfo = AugmentedHoleInfo
{ holeInfo :: HoleInfo
-- these are computed only when requested. I would like to rely on
-- Haskell's laziness for memoization here but the fact that suggestions
-- are computed in a monad makes it impossible.
, suggestions :: Maybe [(Name, Type)]
}
data HoleInfo = HoleInfo
{ holeCt :: Ct
, holeEnv :: [(Id, Type)]
}
-- | Possible errors from the server.
data ErrorType
= NoHole -- ^ No hole at the current location.
| NotInMap -- ^ The current hole has not been loaded properly into Mote.
| NoFile -- ^ The given file was not loaded properly into Mote.
| NoVariable String -- ^ The variable with the given name does not exist.
| TypeNotInEnv -- ^ The type does not make sense in the current environment.
| NoRefine -- ^ The provided expression for refinement didn't match the hole type.
| NoHoleInfo -- ^ Information for the current hole was not loaded properly.
| ParseError String -- ^ A parse error with the given message.
| GHCError String -- ^ An error (and message) directly from GHC.
| Unsupported String -- ^ The feature with the given name is not supported (yet).
| OtherError String -- ^ Some other error, with the given error message.
| UnknownError -- ^ An error that doesn't even have an error message.
instance Show ErrorType where
show NoHole = "No hole at the current location."
show NotInMap = "Hole not loaded into map."
show NoFile = "File not loaded."
show (NoVariable var) = "Variable `" ++ var ++ "' not found."
show TypeNotInEnv = "The type does not make sense in the current environment."
show NoRefine = "Could not refine."
show NoHoleInfo = "Information for the current hole was not loaded properly."
show (ParseError msg) = "Parse error: " ++ msg
show (GHCError msg) = "GHC error: " ++ msg
show (Unsupported feature) = feature ++ " is not supported yet."
show (OtherError msg) = msg
show UnknownError = "Unknown error."
instance Error ErrorType where
noMsg = UnknownError
strMsg = OtherError
type M = ErrorT ErrorType Ghc
| imeckler/mote | Mote/Types.hs | bsd-3-clause | 3,525 | 0 | 11 | 1,030 | 574 | 347 | 227 | 65 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
-- |
-- Module: System.FilePath.Glob
-- Copyright: Bryan O'Sullivan
-- License: BSD3
-- Maintainer: Bryan O'Sullivan <[email protected]>
-- Stability: unstable
-- Portability: everywhere
module System.FilePath.Glob (
namesMatching
) where
import Control.Exception
import Control.Monad (forM)
import System.FilePath.GlobPattern ((~~))
import System.Directory (doesDirectoryExist, doesFileExist,
getCurrentDirectory, getDirectoryContents)
import System.FilePath (dropTrailingPathSeparator, splitFileName, (</>))
import System.IO.Unsafe (unsafeInterleaveIO)
-- | Return a list of names matching a glob pattern. The list is
-- generated lazily.
namesMatching :: String -> IO [FilePath]
namesMatching pat
| not (isPattern pat) = do
exists <- doesNameExist pat
return (if exists then [pat] else [])
| otherwise = do
case splitFileName pat of
("", baseName) -> do
curDir <- getCurrentDirectory
listMatches curDir baseName
(dirName, baseName) -> do
dirs <- if isPattern dirName
then namesMatching (dropTrailingPathSeparator dirName)
else return [dirName]
let listDir = if isPattern baseName
then listMatches
else listPlain
pathNames <- forM dirs $ \dir -> do
baseNames <- listDir dir baseName
return (map (dir </>) baseNames)
return (concat pathNames)
where isPattern = any (`elem` "[*?")
listMatches :: FilePath -> String -> IO [String]
listMatches dirName pat = do
dirName' <- if null dirName
then getCurrentDirectory
else return dirName
names <- unsafeInterleaveIO (handle (\(_::IOException) -> return []) $
getDirectoryContents dirName')
let names' = if isHidden pat
then filter isHidden names
else filter (not . isHidden) names
return (filter (~~ pat) names')
where isHidden ('.':_) = True
isHidden _ = False
listPlain :: FilePath -> String -> IO [String]
listPlain dirName baseName = do
exists <- if null baseName
then doesDirectoryExist dirName
else doesNameExist (dirName </> baseName)
return (if exists then [baseName] else [])
doesNameExist :: FilePath -> IO Bool
doesNameExist name = do
fileExists <- doesFileExist name
if fileExists
then return True
else doesDirectoryExist name
| bos/filemanip | System/FilePath/Glob.hs | bsd-3-clause | 2,593 | 0 | 21 | 759 | 658 | 343 | 315 | 57 | 5 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.