code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE QuasiQuotes #-}
module SAML2.XML.Types where
import Data.List.NonEmpty (NonEmpty(..))
import Network.URI (URI(..), URIAuth(..), uriToString)
import qualified Text.XML.HXT.DOM.TypeDefs as HXT
import qualified Text.XML.HXT.Arrow.Pickle.Xml.Invertible as XP
type Node = HXT.XmlTree
-- instance XP.XmlPickler XML.Node where xpickle = XP.xpTree
type Nodes = HXT.XmlTrees
-- instance XP.XmlPickler XML.Nodes where xpickle = XP.xpTrees
type List1 a = NonEmpty a
xpList1 :: XP.PU a -> XP.PU (List1 a)
xpList1 f = [XP.biCase|a:l <-> a:|l|] XP.>$< XP.xpList1 f
type QName = HXT.QName
data Namespace = Namespace
{ namespacePrefix :: !String
, namespaceURI :: !URI
, namespaceURIString :: !String
}
mkNamespace :: String -> URI -> Namespace
mkNamespace p u = Namespace p u $ uriToString id u ""
mkNName :: Namespace -> String -> QName
mkNName ns n = HXT.mkQName (namespacePrefix ns) n (namespaceURIString ns)
httpURI :: String -> String -> String -> String -> URI
httpURI host = URI "http:" $ Just $ URIAuth "" host ""
xmlNS, xmlnsNS :: Namespace
xmlNS = mkNamespace "xml" $ httpURI "www.w3.org" "/XML/1998/namespace" "" ""
xmlnsNS = mkNamespace "xmlns" $ httpURI "www.w3.org" "/2000/xmlns/" "" ""
| dylex/hsaml2 | SAML2/XML/Types.hs | apache-2.0 | 1,217 | 0 | 9 | 194 | 366 | 207 | 159 | 31 | 1 |
module ID.GP
(
) where
import Filesystem.Path.CurrentOS
import Prelude hiding (FilePath, writeFile)
import Text.XML
import ID.GP.Types
-- TODO Output JSON information about the gene.
outputMeta :: FilePath -> Gene Element -> Double -> IO ()
outputMeta = undefined
| erochest/intelligent-design | src/ID/GP.hs | apache-2.0 | 337 | 0 | 9 | 111 | 69 | 41 | 28 | 8 | 1 |
-- | Test low-level operations
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-enable-rewrite-rules -fno-warn-missing-signatures -fno-warn-unused-imports #-}
module Tests.Properties.LowLevel (testLowLevel) where
import Control.Applicative ((<$>), pure)
import Control.Exception as E (SomeException, catch, evaluate)
import Data.Int (Int32, Int64)
import Data.Text.Foreign
import Data.Text.Internal (mul, mul32, mul64)
import Data.Word (Word16, Word32)
import System.IO.Unsafe (unsafePerformIO)
import Test.QuickCheck.Monadic
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.QuickCheck (testProperty)
import Test.QuickCheck hiding ((.&.))
import Tests.QuickCheckUtils
import Tests.Utils
import qualified Data.Bits as Bits (shiftL, shiftR)
import qualified Data.Text as T
import qualified Data.Text.Internal.Unsafe.Shift as U
import qualified Data.Text.IO as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.IO as TL
import qualified System.IO as IO
mulRef :: (Integral a, Bounded a) => a -> a -> Maybe a
mulRef a b
| ab < bot || ab > top = Nothing
| otherwise = Just (fromIntegral ab)
where ab = fromIntegral a * fromIntegral b
top = fromIntegral (maxBound `asTypeOf` a) :: Integer
bot = fromIntegral (minBound `asTypeOf` a) :: Integer
eval :: (a -> b -> c) -> a -> b -> Maybe c
eval f a b = unsafePerformIO $
(Just <$> evaluate (f a b)) `E.catch` (\(_::SomeException) -> pure Nothing)
t_mul32 :: Int32 -> Int32 -> Property
t_mul32 a b = mulRef a b === eval mul32 a b
t_mul64 :: Int64 -> Int64 -> Property
t_mul64 a b = mulRef a b === eval mul64 a b
t_mul :: Int -> Int -> Property
t_mul a b = mulRef a b === eval mul a b
-- Bit shifts.
shiftL w = forAll (choose (0,width-1)) $ \k -> Bits.shiftL w k == U.shiftL w k
where width = round (log (fromIntegral m) / log 2 :: Double)
(m,_) = (maxBound, m == w)
shiftR w = forAll (choose (0,width-1)) $ \k -> Bits.shiftR w k == U.shiftR w k
where width = round (log (fromIntegral m) / log 2 :: Double)
(m,_) = (maxBound, m == w)
shiftL_Int = shiftL :: Int -> Property
shiftL_Word16 = shiftL :: Word16 -> Property
shiftL_Word32 = shiftL :: Word32 -> Property
shiftR_Int = shiftR :: Int -> Property
shiftR_Word16 = shiftR :: Word16 -> Property
shiftR_Word32 = shiftR :: Word32 -> Property
-- Misc.
t_dropWord16 m t = dropWord16 m t `T.isSuffixOf` t
t_takeWord16 m t = takeWord16 m t `T.isPrefixOf` t
t_take_drop_16 m t = T.append (takeWord16 n t) (dropWord16 n t) === t
where n = small m
t_use_from t = monadicIO $ assert . (==t) =<< run (useAsPtr t fromPtr)
t_copy t = T.copy t === t
-- Input and output.
-- t_put_get = write_read T.unlines T.filter put get
-- where put h = withRedirect h IO.stdout . T.putStr
-- get h = withRedirect h IO.stdin T.getContents
-- tl_put_get = write_read TL.unlines TL.filter put get
-- where put h = withRedirect h IO.stdout . TL.putStr
-- get h = withRedirect h IO.stdin TL.getContents
t_write_read = write_read T.unlines T.filter T.hPutStr T.hGetContents
tl_write_read = write_read TL.unlines TL.filter TL.hPutStr TL.hGetContents
t_write_read_line e m b t = write_read head T.filter T.hPutStrLn
T.hGetLine e m b [t]
tl_write_read_line e m b t = write_read head TL.filter TL.hPutStrLn
TL.hGetLine e m b [t]
testLowLevel :: TestTree
testLowLevel =
testGroup "lowlevel" [
testGroup "mul" [
testProperty "t_mul" t_mul,
testProperty "t_mul32" t_mul32,
testProperty "t_mul64" t_mul64
],
testGroup "shifts" [
testProperty "shiftL_Int" shiftL_Int,
testProperty "shiftL_Word16" shiftL_Word16,
testProperty "shiftL_Word32" shiftL_Word32,
testProperty "shiftR_Int" shiftR_Int,
testProperty "shiftR_Word16" shiftR_Word16,
testProperty "shiftR_Word32" shiftR_Word32
],
testGroup "misc" [
testProperty "t_dropWord16" t_dropWord16,
testProperty "t_takeWord16" t_takeWord16,
testProperty "t_take_drop_16" t_take_drop_16,
testProperty "t_use_from" t_use_from,
testProperty "t_copy" t_copy
],
testGroup "input-output" [
testProperty "t_write_read" t_write_read,
testProperty "tl_write_read" tl_write_read,
testProperty "t_write_read_line" t_write_read_line,
testProperty "tl_write_read_line" tl_write_read_line
-- These tests are subject to I/O race conditions
-- testProperty "t_put_get" t_put_get,
-- testProperty "tl_put_get" tl_put_get
]
]
| bos/text | tests/Tests/Properties/LowLevel.hs | bsd-2-clause | 4,576 | 0 | 12 | 947 | 1,334 | 730 | 604 | 88 | 1 |
{-|
Module : Web.Mastodon.API
Description : Dummy file to re-export everything from API folder
-}
module Web.Mastodon.API
( module API
,
) where
import Web.Mastodon.API.Accounts as API
import Web.Mastodon.API.Actions as API
import Web.Mastodon.API.Apps as API
import Web.Mastodon.API.Statuses as API
import Web.Mastodon.API.Timelines as API
| cmdd/mastodon-api | src/Web/Mastodon/API.hs | bsd-3-clause | 407 | 0 | 4 | 109 | 60 | 46 | 14 | 7 | 0 |
{-# LANGUAGE GADTs, GeneralizedNewtypeDeriving #-}
-------------------------------------------------------------------------------
-- |
-- Module : Control.Monad.CC.Prompt
-- Copyright : (c) R. Kent Dybvig, Simon L. Peyton Jones and Amr Sabry
-- License : MIT
--
-- Maintainer : Dan Doel
-- Stability : Experimental
-- Portability : Non-portable (rank-2 types, generalized algebraic datatypes)
--
-- A monadic treatment of delimited continuations.
--
-- Adapted from the paper
-- /A Monadic Framework for Delimited Continuations/,
-- by R. Kent Dybvig, Simon Peyton Jones and Amr Sabry
-- (<http://www.cs.indiana.edu/~sabry/papers/monadicDC.pdf>)
--
-- This module implements the generation of unique prompt names to be used
-- as delimiters.
module Control.Monad.CC.Prompt (
-- * P, The prompt generation monad
P,
-- * The Prompt type
Prompt,
runP,
newPromptName,
eqPrompt,
-- * A type equality datatype
Equal(..)
) where
import Control.Monad.State
import Control.Monad.Reader
import Unsafe.Coerce
-- | The prompt type, parameterized by two types:
-- * ans : The region identifier, used to ensure that prompts are only used
-- within the same context in which they are created.
--
-- * a : The type of values that may be returned 'through' a given prompt.
-- For instance, only prompts of type 'Prompt r a' may be pushed onto a
-- computation of type 'CC r a'.
newtype Prompt ans a = Prompt Int
-- | The prompt generation monad. Represents the type of computations that
-- make use of a supply of unique prompts.
newtype P ans m a = P { unP :: StateT Int m a }
deriving (Functor, Monad, MonadTrans, MonadState Int, MonadReader r)
-- | Runs a computation that makes use of prompts, yielding a result in the
-- underlying monad.
runP :: (Monad m) => P ans m ans -> m ans
runP p = evalStateT (unP p) 0
-- | Generates a new, unique prompt
newPromptName :: (Monad m) => P ans m (Prompt ans a)
newPromptName = do i <- get ; put (succ i) ; return (Prompt i)
-- | A datatype representing type equality. The EQU constructor can
-- be used to provide evidence that two types are equivalent.
data Equal a b where
EQU :: Equal a a
NEQ :: Equal a b
-- Unfortunately, the type system cannot check that the value of two prompts being
-- equal ensures the equality of their types, so unsafeCoerce must be used.
-- | Tests to determine if two prompts are equal. If so, it provides
-- evidence of that fact, in the form of an /Equal/.
eqPrompt :: Prompt ans a -> Prompt ans b -> Equal a b
eqPrompt (Prompt p1) (Prompt p2)
| p1 == p2 = unsafeCoerce EQU
| otherwise = NEQ
| vito/hummus | src/Control/Monad/CC/Prompt.hs | bsd-3-clause | 2,691 | 0 | 9 | 584 | 362 | 214 | 148 | 25 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Distance.FR.Corpus
( corpus ) where
import Prelude
import Data.String
import Duckling.Distance.Types
import Duckling.Lang
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {lang = FR}, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (DistanceValue Kilometre 3)
[ "3 kilomètres"
, "3 kilometres"
, "3 km"
, "3km"
, "3k"
]
, examples (DistanceValue Kilometre 3.0)
[ "3,0 km"
]
, examples (DistanceValue Mile 8)
[ "8 miles"
]
, examples (DistanceValue Metre 9)
[ "9 metres"
, "9m"
]
, examples (DistanceValue Centimetre 2)
[ "2cm"
, "2 centimetres"
]
]
| rfranek/duckling | Duckling/Distance/FR/Corpus.hs | bsd-3-clause | 1,197 | 0 | 9 | 380 | 203 | 121 | 82 | 29 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.SUNX
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- A convenience module, combining all raw modules containing SUNX extensions.
--
--------------------------------------------------------------------------------
module Graphics.GL.SUNX (
module Graphics.GL.SUNX.ConstantData
) where
import Graphics.GL.SUNX.ConstantData
| haskell-opengl/OpenGLRaw | src/Graphics/GL/SUNX.hs | bsd-3-clause | 566 | 0 | 5 | 80 | 37 | 30 | 7 | 3 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module Snap.Snaplet.Persistent
( initPersist
, PersistState(..)
, HasPersistPool(..)
, mkPgPool
, mkSnapletPgPool
, runPersist
, runPersist'
, withPool
-- * Utility Functions
, mkKey
, mkKeyBS
, mkKeyT
, showKey
, showKeyBS
, mkInt
, mkWord64
, followForeignKey
, fromPersistValue'
) where
-------------------------------------------------------------------------------
import Control.Monad.Catch as EC
import Control.Monad.Logger
import Control.Monad.State
import Control.Monad.Trans.Reader
import Control.Monad.Trans.Resource
import Control.Retry
import Data.ByteString (ByteString)
import Data.Configurator
import Data.Configurator.Types
import Data.Maybe
import Data.Monoid
import Data.Readable
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Word
import Database.Persist
import Database.Persist.Class
import Database.Persist.Postgresql hiding (get)
import qualified Database.Persist.Postgresql as DB
import Database.Persist.Types
import Paths_snaplet_persistent
import Snap.Core
import Snap.Snaplet as S
-------------------------------------------------------------------------------
instance MonadThrow Snap where
throwM = liftSnap . throwM
instance MonadCatch Snap where
catch e h = liftSnap $ catch e h
-------------------------------------------------------------------------------
newtype PersistState = PersistState { persistPool :: ConnectionPool }
-------------------------------------------------------------------------------
-- | Implement this type class to have any monad work with snaplet-persistent.
-- A default instance is provided for (Handler b PersistState).
class MonadIO m => HasPersistPool m where
getPersistPool :: m ConnectionPool
instance HasPersistPool m => HasPersistPool (NoLoggingT m) where
getPersistPool = runNoLoggingT getPersistPool
instance HasPersistPool (S.Handler b PersistState) where
getPersistPool = gets persistPool
instance MonadIO m => HasPersistPool (ReaderT ConnectionPool m) where
getPersistPool = ask
-------------------------------------------------------------------------------
-- | Initialize Persistent with an initial SQL function called right
-- after the connection pool has been created. This is most useful for
-- calling migrations upfront right after initialization.
--
-- Example:
--
-- > initPersist (runMigrationUnsafe migrateAll)
--
-- where migrateAll is the migration function that was auto-generated
-- by the QQ statement in your persistent schema definition in the
-- call to 'mkMigrate'.
initPersist :: SqlPersistT (NoLoggingT IO) a -> SnapletInit b PersistState
initPersist migration = makeSnaplet "persist" description datadir $ do
conf <- getSnapletUserConfig
p <- liftIO . runNoLoggingT $ mkSnapletPgPool conf
liftIO . runNoLoggingT $ runSqlPool migration p
return $ PersistState p
where
description = "Snaplet for persistent DB library"
datadir = Just $ liftM (++"/resources/db") getDataDir
-------------------------------------------------------------------------------
-- | Constructs a connection pool from Config.
mkPgPool :: (MonadLogger m, MonadBaseControl IO m, MonadIO m) => Config -> m ConnectionPool
mkPgPool conf = do
pgConStr <- liftIO $ require conf "postgre-con-str"
cons <- liftIO $ require conf "postgre-pool-size"
createPostgresqlPool pgConStr cons
-------------------------------------------------------------------------------
-- | Constructs a connection pool in a snaplet context.
mkSnapletPgPool :: (MonadBaseControl IO m, MonadLogger m, MonadIO m, EC.MonadCatch m) => Config -> m ConnectionPool
mkSnapletPgPool = mkPgPool
-------------------------------------------------------------------------------
-- | Runs a SqlPersist action in any monad with a HasPersistPool instance.
runPersist :: (HasPersistPool m, MonadSnap m)
=> SqlPersistT (ResourceT (NoLoggingT IO)) b
-- ^ Run given Persistent action in the defined monad.
-> m b
runPersist act = getPersistPool >>= \p -> liftSnap (withPool p act)
runPersist' :: (HasPersistPool m)
=> SqlPersistT (ResourceT (NoLoggingT IO)) b
-- ^ Run given Persistent action in the defined monad.
-> m b
runPersist' act = getPersistPool >>= \p -> withPool p act
------------------------------------------------------------------------------
-- | Run a database action, if a `PersistentSqlException` is raised
-- the action will be retried four times with a 50ms delay between
-- each retry.
--
-- This is being done because sometimes Postgres will reap connections
-- and the connection leased out of the pool may then be stale and
-- will often times throw a `Couldn'tGetSQLConnection` type value.
withPool :: MonadIO m
=> ConnectionPool
-> SqlPersistT (ResourceT (NoLoggingT IO)) a
-> m a
withPool cp f = liftIO $ recoverAll retryPolicy' $ \_ -> runF f cp
where
retryPolicy' = constantDelay 50000 <> limitRetries 5
runF f' cp' = liftIO . runNoLoggingT . runResourceT $ runSqlPool f' cp'
-------------------------------------------------------------------------------
-- | Make a Key from an Int.
mkKey :: ToBackendKey SqlBackend entity => Int -> Key entity
mkKey = fromBackendKey . SqlBackendKey . fromIntegral
-------------------------------------------------------------------------------
-- | Makes a Key from a ByteString. Calls error on failure.
mkKeyBS :: ToBackendKey SqlBackend entity => ByteString -> Key entity
mkKeyBS = mkKey . fromMaybe (error "Can't ByteString value") . fromBS
-------------------------------------------------------------------------------
-- | Makes a Key from Text. Calls error on failure.
mkKeyT :: ToBackendKey SqlBackend entity => Text -> Key entity
mkKeyT = mkKey . fromMaybe (error "Can't Text value") . fromText
-------------------------------------------------------------------------------
-- | Makes a Text representation of a Key.
showKey :: ToBackendKey SqlBackend e => Key e -> Text
showKey = T.pack . show . mkInt
-------------------------------------------------------------------------------
-- | Makes a ByteString representation of a Key.
showKeyBS :: ToBackendKey SqlBackend e => Key e -> ByteString
showKeyBS = T.encodeUtf8 . showKey
-------------------------------------------------------------------------------
-- | Converts a Key to Int. Fails with error if the conversion fails.
mkInt :: ToBackendKey SqlBackend a => Key a -> Int
mkInt = fromIntegral . unSqlBackendKey . toBackendKey
-------------------------------------------------------------------------------
-- | Converts a Key to Word64. Fails with error if the conversion fails.
mkWord64 :: ToBackendKey SqlBackend a => Key a -> Word64
mkWord64 = fromIntegral . unSqlBackendKey . toBackendKey
-------------------------------------------------------------------------------
-- Converts a PersistValue to a more concrete type. Calls error if the
-- conversion fails.
fromPersistValue' :: PersistField c => PersistValue -> c
fromPersistValue' = either (const $ error "Persist conversion failed") id
. fromPersistValue
------------------------------------------------------------------------------
-- | Follows a foreign key field in one entity and retrieves the corresponding
-- entity from the database.
followForeignKey :: (PersistEntity a, HasPersistPool m,
PersistEntityBackend a ~ SqlBackend)
=> (t -> Key a) -> Entity t -> m (Maybe (Entity a))
followForeignKey toKey (Entity _ val) = do
let key' = toKey val
mval <- runPersist' $ DB.get key'
return $ fmap (Entity key') mval
| plumlife/snaplet-persistent | src/Snap/Snaplet/Persistent.hs | bsd-3-clause | 8,240 | 0 | 12 | 1,577 | 1,378 | 748 | 630 | 115 | 1 |
module Distribution.Simple.Test.ExeV10
( runTest
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Compat.CreatePipe
import Distribution.Compat.Environment
import qualified Distribution.PackageDescription as PD
import Distribution.Simple.Build.PathsModule
import Distribution.Simple.BuildPaths
import Distribution.Simple.Compiler
import Distribution.Simple.Hpc
import Distribution.Simple.InstallDirs
import qualified Distribution.Simple.LocalBuildInfo as LBI
import qualified Distribution.Types.LocalBuildInfo as LBI
import Distribution.Simple.Setup
import Distribution.Simple.Test.Log
import Distribution.Simple.Utils
import Distribution.System
import Distribution.TestSuite
import Distribution.Text
import Distribution.Verbosity
import Control.Concurrent (forkIO)
import System.Directory
( createDirectoryIfMissing, doesDirectoryExist, doesFileExist
, getCurrentDirectory, removeDirectoryRecursive )
import System.Exit ( ExitCode(..) )
import System.FilePath ( (</>), (<.>) )
import System.IO ( hGetContents, hPutStr, stdout, stderr )
runTest :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> LBI.ComponentLocalBuildInfo
-> TestFlags
-> PD.TestSuite
-> IO TestSuiteLog
runTest pkg_descr lbi clbi flags suite = do
let isCoverageEnabled = LBI.testCoverage lbi
way = guessWay lbi
tixDir_ = tixDir distPref way $ PD.testName suite
pwd <- getCurrentDirectory
existingEnv <- getEnvironment
let cmd = LBI.buildDir lbi </> PD.testName suite
</> PD.testName suite <.> exeExtension
-- Check that the test executable exists.
exists <- doesFileExist cmd
unless exists $ die $ "Error: Could not find test program \"" ++ cmd
++ "\". Did you build the package first?"
-- Remove old .tix files if appropriate.
unless (fromFlag $ testKeepTix flags) $ do
exists' <- doesDirectoryExist tixDir_
when exists' $ removeDirectoryRecursive tixDir_
-- Create directory for HPC files.
createDirectoryIfMissing True tixDir_
-- Write summary notices indicating start of test suite
notice verbosity $ summarizeSuiteStart $ PD.testName suite
(wOut, wErr, logText) <- case details of
Direct -> return (stdout, stderr, "")
_ -> do
(rOut, wOut) <- createPipe
-- Read test executable's output lazily (returns immediately)
logText <- hGetContents rOut
-- Force the IO manager to drain the test output pipe
void $ forkIO $ length logText `seq` return ()
-- '--show-details=streaming': print the log output in another thread
when (details == Streaming) $ void $ forkIO $ hPutStr stdout logText
return (wOut, wOut, logText)
-- Run the test executable
let opts = map (testOption pkg_descr lbi suite)
(testOptions flags)
dataDirPath = pwd </> PD.dataDir pkg_descr
tixFile = pwd </> tixFilePath distPref way (PD.testName suite)
pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath)
: existingEnv
shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled] ++ pkgPathEnv
-- Add (DY)LD_LIBRARY_PATH if needed
shellEnv' <- if LBI.withDynExe lbi
then do let (Platform _ os) = LBI.hostPlatform lbi
paths <- LBI.depLibraryPaths True False lbi clbi
return (addLibraryPath os paths shellEnv)
else return shellEnv
exit <- rawSystemIOWithEnv verbosity cmd opts Nothing (Just shellEnv')
-- these handles are automatically closed
Nothing (Just wOut) (Just wErr)
-- Generate TestSuiteLog from executable exit code and a machine-
-- readable test log.
let suiteLog = buildLog exit
-- Write summary notice to log file indicating start of test suite
appendFile (logFile suiteLog) $ summarizeSuiteStart $ PD.testName suite
-- Append contents of temporary log file to the final human-
-- readable log file
appendFile (logFile suiteLog) logText
-- Write end-of-suite summary notice to log file
appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog
-- Show the contents of the human-readable log file on the terminal
-- if there is a failure and/or detailed output is requested
let whenPrinting = when $
( details == Always ||
details == Failures && not (suitePassed $ testLogs suiteLog))
-- verbosity overrides show-details
&& verbosity >= normal
whenPrinting $ putStr $ unlines $ lines logText
-- Write summary notice to terminal indicating end of test suite
notice verbosity $ summarizeSuiteFinish suiteLog
when isCoverageEnabled $
markupTest verbosity lbi distPref (display $ PD.package pkg_descr) suite
return suiteLog
where
distPref = fromFlag $ testDistPref flags
verbosity = fromFlag $ testVerbosity flags
details = fromFlag $ testShowDetails flags
testLogDir = distPref </> "test"
buildLog exit =
let r = case exit of
ExitSuccess -> Pass
ExitFailure c -> Fail $ "exit code: " ++ show c
n = PD.testName suite
l = TestLog
{ testName = n
, testOptionsReturned = []
, testResult = r
}
in TestSuiteLog
{ testSuiteName = n
, testLogs = l
, logFile =
testLogDir
</> testSuiteLogPath (fromFlag $ testHumanLog flags)
pkg_descr lbi n l
}
-- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't
-- necessarily a path.
testOption :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> PD.TestSuite
-> PathTemplate
-> String
testOption pkg_descr lbi suite template =
fromPathTemplate $ substPathTemplate env template
where
env = initialPathTemplateEnv
(PD.package pkg_descr) (LBI.localUnitId lbi)
(compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++
[(TestSuiteNameVar, toPathTemplate $ PD.testName suite)]
| sopvop/cabal | Cabal/Distribution/Simple/Test/ExeV10.hs | bsd-3-clause | 6,413 | 0 | 18 | 1,819 | 1,346 | 701 | 645 | 116 | 4 |
{-# LANGUAGE LambdaCase #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Util.ExclusiveScratchpads
-- Description : Named scratchpads that can be mutually exclusive.
-- Copyright : Bruce Forte (2017)
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Bruce Forte
-- Stability : unstable
-- Portability : unportable
--
-- Named scratchpads that can be mutually exclusive.
--
-----------------------------------------------------------------------------
module XMonad.Util.ExclusiveScratchpads (
-- * Usage
-- $usage
mkXScratchpads,
xScratchpadsManageHook,
-- * Keyboard related
scratchpadAction,
hideAll,
resetExclusiveSp,
-- * Mouse related
setNoexclusive,
resizeNoexclusive,
floatMoveNoexclusive,
-- * Types
ExclusiveScratchpad(..),
ExclusiveScratchpads,
-- * Hooks
nonFloating,
defaultFloating,
customFloating
) where
import XMonad.Prelude (appEndo, filterM, liftA2, (<=<))
import XMonad
import XMonad.Actions.Minimize
import XMonad.Actions.TagWindows (addTag,delTag)
import XMonad.Hooks.ManageHelpers (doRectFloat,isInProperty)
import qualified XMonad.StackSet as W
-- $usage
--
-- For this module to work properly, you need to use "XMonad.Layout.BoringWindows" and
-- "XMonad.Layout.Minimize", please refer to the documentation of these modules for more
-- information on how to configure them.
--
-- To use this module, put the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Utils.ExclusiveScratchpads
-- > import XMonad.ManageHook (title,appName)
-- > import qualified XMonad.StackSet as W
--
-- Add exclusive scratchpads, for example:
--
-- > exclusiveSps = mkXScratchpads [ ("htop", "urxvt -name htop -e htop", title =? "htop")
-- > , ("xclock", "xclock", appName =? "xclock")
-- > ] $ customFloating $ W.RationalRect (1/4) (1/4) (1/2) (1/2)
--
-- The scratchpads don\'t have to be exclusive, you can create them like this (see 'ExclusiveScratchpad'):
--
-- > regularSps = [ XSP "term" "urxvt -name scratchpad" (appName =? "scratchpad") defaultFloating [] ]
--
-- Create a list that contains all your scratchpads like this:
--
-- > scratchpads = exclusiveSps ++ regularSps
--
-- Add the hooks to your managehook (see "XMonad.Doc.Extending#Editing_the_manage_hook"), eg.:
--
-- > manageHook = myManageHook <+> xScratchpadsManageHook scratchpads
--
-- And finally add some keybindings (see "XMonad.Doc.Extending#Editing_key_bindings"):
--
-- > , ((modMask, xK_h), scratchpadAction scratchpads "htop")
-- > , ((modMask, xK_c), scratchpadAction scratchpads "xclock")
-- > , ((modMask, xK_t), scratchpadAction scratchpads "term")
-- > , ((modMask, xK_h), hideAll scratchpads)
--
-- Now you can get your scratchpads by pressing the corresponding keys, if you
-- have the @htop@ scratchpad on your current screen and you fetch the @xclock@
-- scratchpad then @htop@ gets hidden.
--
-- If you move a scratchpad it still gets hidden when you fetch a scratchpad of
-- the same family, to change that behaviour and make windows not exclusive
-- anymore when they get resized or moved add these mouse bindings
-- (see "XMonad.Doc.Extending#Editing_mouse_bindings"):
--
-- > , ((mod4Mask, button1), floatMoveNoexclusive scratchpads)
-- > , ((mod4Mask, button3), resizeNoexclusive scratchpads)
--
-- To reset a moved scratchpad to the original position that you set with its hook,
-- call @resetExclusiveSp@ when it is in focus. For example if you want to extend
-- Mod-Return to reset the placement when a scratchpad is in focus but keep the
-- default behaviour for tiled windows, set these key bindings:
--
-- > , ((modMask, xK_Return), windows W.swapMaster >> resetExclusiveSp scratchpads)
--
-- __Note:__ This is just an example, in general you can add more than two
-- exclusive scratchpads and multiple families of such.
data ExclusiveScratchpad = XSP { name :: String -- ^ Name of the scratchpad
, cmd :: String -- ^ Command to spawn the scratchpad
, query :: Query Bool -- ^ Query to match the scratchpad
, hook :: ManageHook -- ^ Hook to specify the placement policy
, exclusive :: [String] -- ^ Names of exclusive scratchpads
}
type ExclusiveScratchpads = [ExclusiveScratchpad]
-- -----------------------------------------------------------------------------------
-- | Create 'ExclusiveScratchpads' from @[(name,cmd,query)]@ with a common @hook@
mkXScratchpads :: [(String,String,Query Bool)] -- ^ List of @(name,cmd,query)@ of the
-- exclusive scratchpads
-> ManageHook -- ^ The common @hook@ that they use
-> ExclusiveScratchpads
mkXScratchpads xs h = foldl accumulate [] xs
where
accumulate a (n,c,q) = XSP n c q h (filter (n/=) names) : a
names = map (\(n,_,_) -> n) xs
-- | Create 'ManageHook' from 'ExclusiveScratchpads'
xScratchpadsManageHook :: ExclusiveScratchpads -- ^ List of exclusive scratchpads from
-- which a 'ManageHook' should be generated
-> ManageHook
xScratchpadsManageHook = composeAll . fmap (\sp -> query sp --> hook sp)
-- | Pop up/hide the scratchpad by name and possibly hide its exclusive
scratchpadAction :: ExclusiveScratchpads -- ^ List of exclusive scratchpads
-> String -- ^ Name of the scratchpad to toggle
-> X ()
scratchpadAction xs n =
let ys = filter ((n==).name) xs in
case ys of [] -> return ()
(sp:_) -> let q = query sp in withWindowSet $ \s -> do
ws <- filterM (runQuery q) $ W.allWindows s
case ws of [] -> do spawn (cmd sp)
hideOthers xs n
windows W.shiftMaster
(w:_) -> do toggleWindow w
whenX (runQuery isExclusive w) (hideOthers xs n)
where
toggleWindow w = liftA2 (&&) (runQuery isMaximized w) (onCurrentScreen w) >>= \case
True -> whenX (onCurrentScreen w) (minimizeWindow w)
False -> do windows (flip W.shiftWin w =<< W.currentTag)
maximizeWindowAndFocus w
windows W.shiftMaster
onCurrentScreen w = withWindowSet (return . elem w . currentWindows)
-- | Hide all 'ExclusiveScratchpads' on the current screen
hideAll :: ExclusiveScratchpads -- ^ List of exclusive scratchpads
-> X ()
hideAll xs = mapWithCurrentScreen q minimizeWindow
where q = joinQueries (map query xs) <&&> isExclusive <&&> isMaximized
-- | If the focused window is a scratchpad, the scratchpad gets reset to the original
-- placement specified with the hook and becomes exclusive again
resetExclusiveSp :: ExclusiveScratchpads -- ^ List of exclusive scratchpads
-> X ()
resetExclusiveSp xs = withFocused $ \w -> whenX (isScratchpad xs w) $ do
let ys = filterM (flip runQuery w . query) xs
unlessX (null <$> ys) $ do
mh <- head . map hook <$> ys -- ys /= [], so `head` is fine
n <- head . map name <$> ys -- same
(windows . appEndo <=< runQuery mh) w
hideOthers xs n
delTag "_XSP_NOEXCLUSIVE" w
where unlessX = whenX . fmap not
-- -----------------------------------------------------------------------------------
-- | Hide the scratchpad of the same family by name if it's on the focused workspace
hideOthers :: ExclusiveScratchpads -> String -> X ()
hideOthers xs n =
let ys = concatMap exclusive $ filter ((n==).name) xs
qs = map query $ filter ((`elem` ys).name) xs
q = joinQueries qs <&&> isExclusive <&&> isMaximized in
mapWithCurrentScreen q minimizeWindow
-- | Conditionally map a function on all windows of the current screen
mapWithCurrentScreen :: Query Bool -> (Window -> X ()) -> X ()
mapWithCurrentScreen q f = withWindowSet $ \s -> do
ws <- filterM (runQuery q) $ currentWindows s
mapM_ f ws
-- | Extract all windows on the current screen from a StackSet
currentWindows :: W.StackSet i l a sid sd -> [a]
currentWindows = W.integrate' . W.stack . W.workspace . W.current
-- | Check if given window is a scratchpad
isScratchpad :: ExclusiveScratchpads -> Window -> X Bool
isScratchpad xs w = withWindowSet $ \s -> do
let q = joinQueries $ map query xs
ws <- filterM (runQuery q) $ W.allWindows s
return $ elem w ws
-- | Build a disjunction from a list of clauses
joinQueries :: [Query Bool] -> Query Bool
joinQueries = foldl (<||>) (liftX $ return False)
-- | Useful queries
isExclusive, isMaximized :: Query Bool
isExclusive = notElem "_XSP_NOEXCLUSIVE" . words <$> stringProperty "_XMONAD_TAGS"
isMaximized = not <$> isInProperty "_NET_WM_STATE" "_NET_WM_STATE_HIDDEN"
-- -----------------------------------------------------------------------------------
-- | Make a window not exclusive anymore
setNoexclusive :: ExclusiveScratchpads -- ^ List of exclusive scratchpads
-> Window -- ^ Window which should be made not
-- exclusive anymore
-> X ()
setNoexclusive xs w = whenX (isScratchpad xs w) $ addTag "_XSP_NOEXCLUSIVE" w
-- | Float and drag the window, make it not exclusive anymore
floatMoveNoexclusive :: ExclusiveScratchpads -- ^ List of exclusive scratchpads
-> Window -- ^ Window which should be moved
-> X ()
floatMoveNoexclusive xs w = setNoexclusive xs w
>> focus w
>> mouseMoveWindow w
>> windows W.shiftMaster
-- | Resize window, make it not exclusive anymore
resizeNoexclusive :: ExclusiveScratchpads -- ^ List of exclusive scratchpads
-> Window -- ^ Window which should be resized
-> X ()
resizeNoexclusive xs w = setNoexclusive xs w
>> focus w
>> mouseResizeWindow w
>> windows W.shiftMaster
-- -----------------------------------------------------------------------------------
-- | Manage hook that makes the window non-floating
nonFloating :: ManageHook
nonFloating = idHook
-- | Manage hook that makes the window floating with the default placement
defaultFloating :: ManageHook
defaultFloating = doFloat
-- | Manage hook that makes the window floating with custom placement
customFloating :: W.RationalRect -- ^ @RationalRect x y w h@ that specifies relative position,
-- height and width (see "XMonad.StackSet#RationalRect")
-> ManageHook
customFloating = doRectFloat
| xmonad/xmonad-contrib | XMonad/Util/ExclusiveScratchpads.hs | bsd-3-clause | 10,843 | 0 | 23 | 2,667 | 1,644 | 898 | 746 | -1 | -1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Signal.Core.Reify
( Key (..)
, Node (..)
, Nodes
, reify
, reify_fun
)where
import Control.Monad.Operational.Compositional
import Language.Embedded.VHDL (PredicateExp)
import Signal.Core (S, Signal(..), Sig(..), Symbol(..), U, Witness)
import Signal.Core.Stream (Stream, Str)
import qualified Signal.Core as S
import qualified Signal.Core.Stream as Str
import Control.Applicative ((<$>))
import Control.Arrow (first, second)
import Control.Monad
import Control.Monad.State
import Data.Functor.Identity
import Data.Typeable (Typeable)
import Data.Dynamic (Dynamic, toDyn)
import Data.Ref
import Data.Ref.Map (Map, Name)
import qualified Data.Ref.Map as M
import Prelude hiding (Left, Right)
import System.Mem.StableName -- !
--------------------------------------------------------------------------------
-- * Graph representation of Signals
--------------------------------------------------------------------------------
-- | ...
data Key (i :: (* -> *) -> * -> *) (a :: *)
where
Key :: Name (S Symbol i a) -> Key i a
-- | ...
data Node (i :: (* -> *) -> * -> *) (a :: *)
where
Node :: (Witness i a, Typeable a) => S Key i a -> Node i (S Symbol i a)
--------------------------------------------------------------------------------
-- ** Reify Monad
type Nodes i = Map (Node i)
type Names = Map (Name)
type Reify i = StateT (Nodes i, Names) IO
--------------------------------------------------------------------------------
-- ** Helpers for insert/lookup
insertNode :: Ref (S Symbol i a) -> Node i (S Symbol i a) -> Reify i (Key i a)
insertNode ref@(Ref name _) node = modify (first $ M.insert ref node) >> return (Key name)
insertName :: Ref (S Symbol i a) -> Reify i ()
insertName ref@(Ref name _) = modify . second $ M.insert ref name
lookupName :: Ref (S Symbol i a) -> Reify i (Maybe (Key i a))
lookupName ref@(Ref name _) = do
node <- gets (M.lookup name . snd)
return $ case node of
Nothing -> Nothing
Just old -> Just (Key old)
--------------------------------------------------------------------------------
-- * Reification of Signals
--------------------------------------------------------------------------------
reify'
:: forall i a. Typeable a
=> Symbol i a
-> Reify i (Key i a)
reify' (Symbol ref@(Ref _ node)) =
do name <- lookupName ref
case name of
Just old -> return old
Nothing -> case node of
(S.Var dyn) -> insertNode ref (Node (S.Var dyn))
(S.Repeat str) -> insertNode ref (Node (S.Repeat str))
(S.Map f sig) ->
do key <- reify' sig
insertNode ref (Node (S.Map f key))
(S.Join l r) ->
do lkey <- reify' l
rkey <- reify' r
insertNode ref (Node (S.Join lkey rkey))
(S.Left l) ->
do lkey <- reify' l
insertNode ref (Node (S.Left lkey))
(S.Right r) ->
do rkey <- reify' r
insertNode ref (Node (S.Right rkey))
(S.Delay v sig) ->
do key <- reify' sig
insertNode ref (Node (S.Delay v key))
--------------------------------------------------------------------------------
-- ** ...
-- | ...
reify :: Typeable a => Sig i a -> IO (Key i (Identity a), Nodes i)
reify (Sig (Signal sym)) = second fst <$> runStateT (reify' sym) (M.empty, M.empty)
-- | ...
reify_fun
:: ( PredicateExp (IExp i) a
, Typeable i
, Typeable a
, Typeable b
)
=> (Sig i a -> Sig i b)
-> IO ( Key i (Identity b)
, Key i (Identity a)
, Nodes i
)
reify_fun f =
do let (Sig (Signal (Symbol (Ref var _))), sig) = let a = Sig (S.variable (toDyn f)) in (a, f a)
(key, nodes) <- reify sig
return
( key
, Key var
, nodes)
--------------------------------------------------------------------------------
| markus-git/signal | src/Signal/Core/Reify.hs | bsd-3-clause | 4,025 | 0 | 20 | 999 | 1,455 | 768 | 687 | -1 | -1 |
-- | 'Select' and 'SelectArr' are the composable units of database
-- querying that are used in Opaleye.
module Opaleye.Select where
import qualified Opaleye.QueryArr as Q
-- | A Postgres @SELECT@, i.e. some functionality that can run via SQL
-- and produce a collection of rows.
--
-- @Select a@ is analogous to a Haskell value @[a]@.
type Select = SelectArr ()
-- | @SelectArr a b@ is analogous to a Haskell function @a -> [b]@.
type SelectArr = Q.QueryArr
| WraithM/haskell-opaleye | src/Opaleye/Select.hs | bsd-3-clause | 463 | 0 | 6 | 84 | 40 | 28 | 12 | 4 | 0 |
module GEC.KeyExchange
( -- * Types
P.StsCtx, P.GecKeError(..), P.GenError, P.mkCtx
-- * Aliases
, P.Message1, P.Message2, P.Message3, P.KeyMaterial
-- * Message Construction
, initiate, respond, responseAck, finish
-- * Constants
, P.messageOneSize, P.messageTwoSize, P.messageThreeSize
) where
import qualified GEC.KeyExchange.Pure as P
import Crypto.Random (CryptoRandomGen, throwLeft)
initiate :: CryptoRandomGen g => g -> P.StsCtx -> (P.Message1, P.StsCtx, g)
initiate g c = throwLeft (P.initiate g c)
respond :: CryptoRandomGen g => g -> P.StsCtx -> P.Message1 -> (P.Message1, P.StsCtx, g)
respond g c _m = throwLeft (P.initiate g c)
responseAck :: P.StsCtx -> P.Message2 -> Int -> (P.Message3, P.KeyMaterial)
responseAck c m n = throwLeft (P.responseAck c m n)
finish :: P.StsCtx -> P.Message3 -> Int -> P.KeyMaterial
finish c m n = throwLeft (P.finish c m n)
| GaloisInc/gec | src/GEC/KeyExchange.hs | bsd-3-clause | 926 | 0 | 10 | 186 | 337 | 187 | 150 | 16 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Setup
-- Copyright : Isaac Jones 2003-2004
-- Duncan Coutts 2007
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This is a big module, but not very complicated. The code is very regular
-- and repetitive. It defines the command line interface for all the Cabal
-- commands. For each command (like @configure@, @build@ etc) it defines a type
-- that holds all the flags, the default set of flags and a 'CommandUI' that
-- maps command line flags to and from the corresponding flags type.
--
-- All the flags types are instances of 'Monoid', see
-- <http://www.haskell.org/pipermail/cabal-devel/2007-December/001509.html>
-- for an explanation.
--
-- The types defined here get used in the front end and especially in
-- @cabal-install@ which has to do quite a bit of manipulating sets of command
-- line flags.
--
-- This is actually relatively nice, it works quite well. The main change it
-- needs is to unify it with the code for managing sets of fields that can be
-- read and written from files. This would allow us to save configure flags in
-- config files.
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
{-# LANGUAGE CPP #-}
module Distribution.Simple.Setup (
GlobalFlags(..), emptyGlobalFlags, defaultGlobalFlags, globalCommand,
ConfigFlags(..), emptyConfigFlags, defaultConfigFlags, configureCommand,
configAbsolutePaths, readPackageDbList, showPackageDbList,
CopyFlags(..), emptyCopyFlags, defaultCopyFlags, copyCommand,
InstallFlags(..), emptyInstallFlags, defaultInstallFlags, installCommand,
HaddockFlags(..), emptyHaddockFlags, defaultHaddockFlags, haddockCommand,
HscolourFlags(..), emptyHscolourFlags, defaultHscolourFlags, hscolourCommand,
BuildFlags(..), emptyBuildFlags, defaultBuildFlags, buildCommand,
buildVerbose,
ReplFlags(..), defaultReplFlags, replCommand,
CleanFlags(..), emptyCleanFlags, defaultCleanFlags, cleanCommand,
RegisterFlags(..), emptyRegisterFlags, defaultRegisterFlags, registerCommand,
unregisterCommand,
SDistFlags(..), emptySDistFlags, defaultSDistFlags, sdistCommand,
TestFlags(..), emptyTestFlags, defaultTestFlags, testCommand,
TestShowDetails(..),
BenchmarkFlags(..), emptyBenchmarkFlags,
defaultBenchmarkFlags, benchmarkCommand,
CopyDest(..),
configureArgs, configureOptions, configureCCompiler, configureLinker,
buildOptions, installDirsOptions,
programConfigurationOptions, programConfigurationPaths',
defaultDistPref,
Flag(..),
toFlag,
fromFlag,
fromFlagOrDefault,
flagToMaybe,
flagToList,
boolOpt, boolOpt', trueArg, falseArg, optionVerbosity, numJobsParser ) where
import Distribution.Compiler ()
import Distribution.ReadE
import Distribution.Text
( Text(..), display )
import qualified Distribution.Compat.ReadP as Parse
import qualified Text.PrettyPrint as Disp
import Distribution.Package ( Dependency(..)
, PackageName
, InstalledPackageId )
import Distribution.PackageDescription
( FlagName(..), FlagAssignment )
import Distribution.Simple.Command hiding (boolOpt, boolOpt')
import qualified Distribution.Simple.Command as Command
import Distribution.Simple.Compiler
( CompilerFlavor(..), defaultCompilerFlavor, PackageDB(..)
, OptimisationLevel(..), flagToOptimisationLevel
, absolutePackageDBPath )
import Distribution.Simple.Utils
( wrapLine, lowercase, intercalate )
import Distribution.Simple.Program (Program(..), ProgramConfiguration,
requireProgram,
programInvocation, progInvokePath, progInvokeArgs,
knownPrograms,
addKnownProgram, emptyProgramConfiguration,
haddockProgram, ghcProgram, gccProgram, ldProgram)
import Distribution.Simple.InstallDirs
( InstallDirs(..), CopyDest(..),
PathTemplate, toPathTemplate, fromPathTemplate )
import Distribution.Verbosity
import Control.Monad (liftM)
import Data.List ( sort )
import Data.Char ( isSpace, isAlpha )
import Data.Monoid ( Monoid(..) )
-- FIXME Not sure where this should live
defaultDistPref :: FilePath
defaultDistPref = "dist"
-- ------------------------------------------------------------
-- * Flag type
-- ------------------------------------------------------------
-- | All flags are monoids, they come in two flavours:
--
-- 1. list flags eg
--
-- > --ghc-option=foo --ghc-option=bar
--
-- gives us all the values ["foo", "bar"]
--
-- 2. singular value flags, eg:
--
-- > --enable-foo --disable-foo
--
-- gives us Just False
-- So this Flag type is for the latter singular kind of flag.
-- Its monoid instance gives us the behaviour where it starts out as
-- 'NoFlag' and later flags override earlier ones.
--
data Flag a = Flag a | NoFlag deriving (Show, Read, Eq)
instance Functor Flag where
fmap f (Flag x) = Flag (f x)
fmap _ NoFlag = NoFlag
instance Monoid (Flag a) where
mempty = NoFlag
_ `mappend` f@(Flag _) = f
f `mappend` NoFlag = f
instance Bounded a => Bounded (Flag a) where
minBound = toFlag minBound
maxBound = toFlag maxBound
instance Enum a => Enum (Flag a) where
fromEnum = fromEnum . fromFlag
toEnum = toFlag . toEnum
enumFrom (Flag a) = map toFlag . enumFrom $ a
enumFrom _ = []
enumFromThen (Flag a) (Flag b) = toFlag `map` enumFromThen a b
enumFromThen _ _ = []
enumFromTo (Flag a) (Flag b) = toFlag `map` enumFromTo a b
enumFromTo _ _ = []
enumFromThenTo (Flag a) (Flag b) (Flag c) = toFlag `map` enumFromThenTo a b c
enumFromThenTo _ _ _ = []
toFlag :: a -> Flag a
toFlag = Flag
fromFlag :: Flag a -> a
fromFlag (Flag x) = x
fromFlag NoFlag = error "fromFlag NoFlag. Use fromFlagOrDefault"
fromFlagOrDefault :: a -> Flag a -> a
fromFlagOrDefault _ (Flag x) = x
fromFlagOrDefault def NoFlag = def
flagToMaybe :: Flag a -> Maybe a
flagToMaybe (Flag x) = Just x
flagToMaybe NoFlag = Nothing
flagToList :: Flag a -> [a]
flagToList (Flag x) = [x]
flagToList NoFlag = []
allFlags :: [Flag Bool] -> Flag Bool
allFlags flags = toFlag $ all (\f -> fromFlagOrDefault False f) flags
-- ------------------------------------------------------------
-- * Global flags
-- ------------------------------------------------------------
-- In fact since individual flags types are monoids and these are just sets of
-- flags then they are also monoids pointwise. This turns out to be really
-- useful. The mempty is the set of empty flags and mappend allows us to
-- override specific flags. For example we can start with default flags and
-- override with the ones we get from a file or the command line, or both.
-- | Flags that apply at the top level, not to any sub-command.
data GlobalFlags = GlobalFlags {
globalVersion :: Flag Bool,
globalNumericVersion :: Flag Bool
}
defaultGlobalFlags :: GlobalFlags
defaultGlobalFlags = GlobalFlags {
globalVersion = Flag False,
globalNumericVersion = Flag False
}
globalCommand :: CommandUI GlobalFlags
globalCommand = CommandUI {
commandName = "",
commandSynopsis = "",
commandUsage = \_ ->
"This Setup program uses the Haskell Cabal Infrastructure.\n"
++ "See http://www.haskell.org/cabal/ for more information.\n",
commandDescription = Just $ \pname ->
"For more information about a command use\n"
++ " " ++ pname ++ " COMMAND --help\n\n"
++ "Typical steps for installing Cabal packages:\n"
++ concat [ " " ++ pname ++ " " ++ x ++ "\n"
| x <- ["configure", "build", "install"]],
commandDefaultFlags = defaultGlobalFlags,
commandOptions = \_ ->
[option ['V'] ["version"]
"Print version information"
globalVersion (\v flags -> flags { globalVersion = v })
trueArg
,option [] ["numeric-version"]
"Print just the version number"
globalNumericVersion (\v flags -> flags { globalNumericVersion = v })
trueArg
]
}
emptyGlobalFlags :: GlobalFlags
emptyGlobalFlags = mempty
instance Monoid GlobalFlags where
mempty = GlobalFlags {
globalVersion = mempty,
globalNumericVersion = mempty
}
mappend a b = GlobalFlags {
globalVersion = combine globalVersion,
globalNumericVersion = combine globalNumericVersion
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Config flags
-- ------------------------------------------------------------
-- | Flags to @configure@ command
data ConfigFlags = ConfigFlags {
--FIXME: the configPrograms is only here to pass info through to configure
-- because the type of configure is constrained by the UserHooks.
-- when we change UserHooks next we should pass the initial
-- ProgramConfiguration directly and not via ConfigFlags
configPrograms :: ProgramConfiguration, -- ^All programs that cabal may
-- run
configProgramPaths :: [(String, FilePath)], -- ^user specifed programs paths
configProgramArgs :: [(String, [String])], -- ^user specifed programs args
configProgramPathExtra :: [FilePath], -- ^Extend the $PATH
configHcFlavor :: Flag CompilerFlavor, -- ^The \"flavor\" of the
-- compiler, sugh as GHC or
-- Hugs.
configHcPath :: Flag FilePath, -- ^given compiler location
configHcPkg :: Flag FilePath, -- ^given hc-pkg location
configVanillaLib :: Flag Bool, -- ^Enable vanilla library
configProfLib :: Flag Bool, -- ^Enable profiling in the library
configSharedLib :: Flag Bool, -- ^Build shared library
configDynExe :: Flag Bool, -- ^Enable dynamic linking of the
-- executables.
configProfExe :: Flag Bool, -- ^Enable profiling in the
-- executables.
configConfigureArgs :: [String], -- ^Extra arguments to @configure@
configOptimization :: Flag OptimisationLevel, -- ^Enable optimization.
configProgPrefix :: Flag PathTemplate, -- ^Installed executable prefix.
configProgSuffix :: Flag PathTemplate, -- ^Installed executable suffix.
configInstallDirs :: InstallDirs (Flag PathTemplate), -- ^Installation
-- paths
configScratchDir :: Flag FilePath,
configExtraLibDirs :: [FilePath], -- ^ path to search for extra libraries
configExtraIncludeDirs :: [FilePath], -- ^ path to search for header files
configDistPref :: Flag FilePath, -- ^"dist" prefix
configVerbosity :: Flag Verbosity, -- ^verbosity level
configUserInstall :: Flag Bool, -- ^The --user\/--global flag
configPackageDBs :: [Maybe PackageDB], -- ^Which package DBs to use
configGHCiLib :: Flag Bool, -- ^Enable compiling library for GHCi
configSplitObjs :: Flag Bool, -- ^Enable -split-objs with GHC
configStripExes :: Flag Bool, -- ^Enable executable stripping
configConstraints :: [Dependency], -- ^Additional constraints for
-- dependencies.
configDependencies :: [(PackageName, InstalledPackageId)],
-- ^The packages depended on.
configConfigurationsFlags :: FlagAssignment,
configTests :: Flag Bool, -- ^Enable test suite compilation
configBenchmarks :: Flag Bool, -- ^Enable benchmark compilation
configLibCoverage :: Flag Bool,
-- ^Enable test suite program coverage.
configExactConfiguration :: Flag Bool
-- ^All direct dependencies and flags are provided on the command line by
-- the user via the '--dependency' and '--flags' options.
}
deriving (Read,Show)
configAbsolutePaths :: ConfigFlags -> IO ConfigFlags
configAbsolutePaths f =
(\v -> f { configPackageDBs = v })
`liftM` mapM (maybe (return Nothing) (liftM Just . absolutePackageDBPath))
(configPackageDBs f)
defaultConfigFlags :: ProgramConfiguration -> ConfigFlags
defaultConfigFlags progConf = emptyConfigFlags {
configPrograms = progConf,
configHcFlavor = maybe NoFlag Flag defaultCompilerFlavor,
configVanillaLib = Flag True,
configProfLib = Flag False,
configSharedLib = NoFlag,
configDynExe = Flag False,
configProfExe = Flag False,
configOptimization = Flag NormalOptimisation,
configProgPrefix = Flag (toPathTemplate ""),
configProgSuffix = Flag (toPathTemplate ""),
configDistPref = Flag defaultDistPref,
configVerbosity = Flag normal,
configUserInstall = Flag False, --TODO: reverse this
#if defined(mingw32_HOST_OS)
-- See #1589.
configGHCiLib = Flag True,
#else
configGHCiLib = Flag False,
#endif
configSplitObjs = Flag False, -- takes longer, so turn off by default
configStripExes = Flag True,
configTests = Flag False,
configBenchmarks = Flag False,
configLibCoverage = Flag False,
configExactConfiguration = Flag False
}
configureCommand :: ProgramConfiguration -> CommandUI ConfigFlags
configureCommand progConf = makeCommand name shortDesc
longDesc defaultFlags options
where
name = "configure"
shortDesc = "Prepare to build the package."
longDesc = Just (\_ -> programFlagsDescription progConf)
defaultFlags = defaultConfigFlags progConf
options showOrParseArgs =
configureOptions showOrParseArgs
++ programConfigurationPaths progConf showOrParseArgs
configProgramPaths (\v fs -> fs { configProgramPaths = v })
++ programConfigurationOption progConf showOrParseArgs
configProgramArgs (\v fs -> fs { configProgramArgs = v })
++ programConfigurationOptions progConf showOrParseArgs
configProgramArgs (\v fs -> fs { configProgramArgs = v })
configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags]
configureOptions showOrParseArgs =
[optionVerbosity configVerbosity
(\v flags -> flags { configVerbosity = v })
,optionDistPref
configDistPref (\d flags -> flags { configDistPref = d })
showOrParseArgs
,option [] ["compiler"] "compiler"
configHcFlavor (\v flags -> flags { configHcFlavor = v })
(choiceOpt [ (Flag GHC, ("g", ["ghc"]), "compile with GHC")
, (Flag NHC, ([] , ["nhc98"]), "compile with NHC")
, (Flag JHC, ([] , ["jhc"]), "compile with JHC")
, (Flag LHC, ([] , ["lhc"]), "compile with LHC")
, (Flag Hugs,([] , ["hugs"]), "compile with Hugs")
, (Flag UHC, ([] , ["uhc"]), "compile with UHC")
-- "haskell-suite" compiler id string will be replaced
-- by a more specific one during the configure stage
, (Flag (HaskellSuite "haskell-suite"), ([] , ["haskell-suite"]),
"compile with a haskell-suite compiler")])
,option "w" ["with-compiler"]
"give the path to a particular compiler"
configHcPath (\v flags -> flags { configHcPath = v })
(reqArgFlag "PATH")
,option "" ["with-hc-pkg"]
"give the path to the package tool"
configHcPkg (\v flags -> flags { configHcPkg = v })
(reqArgFlag "PATH")
]
++ map liftInstallDirs installDirsOptions
++ [option "b" ["scratchdir"]
"directory to receive the built package (hugs-only)"
configScratchDir (\v flags -> flags { configScratchDir = v })
(reqArgFlag "DIR")
--TODO: eliminate scratchdir flag
,option "" ["program-prefix"]
"prefix to be applied to installed executables"
configProgPrefix
(\v flags -> flags { configProgPrefix = v })
(reqPathTemplateArgFlag "PREFIX")
,option "" ["program-suffix"]
"suffix to be applied to installed executables"
configProgSuffix (\v flags -> flags { configProgSuffix = v } )
(reqPathTemplateArgFlag "SUFFIX")
,option "" ["library-vanilla"]
"Vanilla libraries"
configVanillaLib (\v flags -> flags { configVanillaLib = v })
(boolOpt [] [])
,option "p" ["library-profiling"]
"Library profiling"
configProfLib (\v flags -> flags { configProfLib = v })
(boolOpt "p" [])
,option "" ["shared"]
"Shared library"
configSharedLib (\v flags -> flags { configSharedLib = v })
(boolOpt [] [])
,option "" ["executable-dynamic"]
"Executable dynamic linking"
configDynExe (\v flags -> flags { configDynExe = v })
(boolOpt [] [])
,option "" ["executable-profiling"]
"Executable profiling"
configProfExe (\v flags -> flags { configProfExe = v })
(boolOpt [] [])
,multiOption "optimization"
configOptimization (\v flags -> flags { configOptimization = v })
[optArg' "n" (Flag . flagToOptimisationLevel)
(\f -> case f of
Flag NoOptimisation -> []
Flag NormalOptimisation -> [Nothing]
Flag MaximumOptimisation -> [Just "2"]
_ -> [])
"O" ["enable-optimization","enable-optimisation"]
"Build with optimization (n is 0--2, default is 1)",
noArg (Flag NoOptimisation) []
["disable-optimization","disable-optimisation"]
"Build without optimization"
]
,option "" ["library-for-ghci"]
"compile library for use with GHCi"
configGHCiLib (\v flags -> flags { configGHCiLib = v })
(boolOpt [] [])
,option "" ["split-objs"]
"split library into smaller objects to reduce binary sizes (GHC 6.6+)"
configSplitObjs (\v flags -> flags { configSplitObjs = v })
(boolOpt [] [])
,option "" ["executable-stripping"]
"strip executables upon installation to reduce binary sizes"
configStripExes (\v flags -> flags { configStripExes = v })
(boolOpt [] [])
,option "" ["configure-option"]
"Extra option for configure"
configConfigureArgs (\v flags -> flags { configConfigureArgs = v })
(reqArg' "OPT" (\x -> [x]) id)
,option "" ["user-install"]
"doing a per-user installation"
configUserInstall (\v flags -> flags { configUserInstall = v })
(boolOpt' ([],["user"]) ([], ["global"]))
,option "" ["package-db"]
"Use a given package database (to satisfy dependencies and register in). May be a specific file, 'global', 'user' or 'clear'."
configPackageDBs (\v flags -> flags { configPackageDBs = v })
(reqArg' "DB" readPackageDbList showPackageDbList)
,option "f" ["flags"]
"Force values for the given flags in Cabal conditionals in the .cabal file. E.g., --flags=\"debug -usebytestrings\" forces the flag \"debug\" to true and \"usebytestrings\" to false."
configConfigurationsFlags (\v flags -> flags { configConfigurationsFlags = v })
(reqArg' "FLAGS" readFlagList showFlagList)
,option "" ["extra-include-dirs"]
"A list of directories to search for header files"
configExtraIncludeDirs (\v flags -> flags {configExtraIncludeDirs = v})
(reqArg' "PATH" (\x -> [x]) id)
,option "" ["extra-lib-dirs"]
"A list of directories to search for external libraries"
configExtraLibDirs (\v flags -> flags {configExtraLibDirs = v})
(reqArg' "PATH" (\x -> [x]) id)
,option "" ["extra-prog-path"]
"A list of directories to search for required programs (in addition to the normal search locations)"
configProgramPathExtra (\v flags -> flags {configProgramPathExtra = v})
(reqArg' "PATH" (\x -> [x]) id)
,option "" ["constraint"]
"A list of additional constraints on the dependencies."
configConstraints (\v flags -> flags { configConstraints = v})
(reqArg "DEPENDENCY"
(readP_to_E (const "dependency expected") ((\x -> [x]) `fmap` parse))
(map (\x -> display x)))
,option "" ["dependency"]
"A list of exact dependencies. E.g., --dependency=\"void=void-0.5.8-177d5cdf20962d0581fe2e4932a6c309\""
configDependencies (\v flags -> flags { configDependencies = v})
(reqArg "NAME=ID"
(readP_to_E (const "dependency expected") ((\x -> [x]) `fmap` parseDependency))
(map (\x -> display (fst x) ++ "=" ++ display (snd x))))
,option "" ["tests"]
"dependency checking and compilation for test suites listed in the package description file."
configTests (\v flags -> flags { configTests = v })
(boolOpt [] [])
,option "" ["library-coverage"]
"build library and test suites with Haskell Program Coverage enabled. (GHC only)"
configLibCoverage (\v flags -> flags { configLibCoverage = v })
(boolOpt [] [])
,option "" ["exact-configuration"]
"All direct dependencies and flags are provided on the command line."
configExactConfiguration
(\v flags -> flags { configExactConfiguration = v })
trueArg
,option "" ["benchmarks"]
"dependency checking and compilation for benchmarks listed in the package description file."
configBenchmarks (\v flags -> flags { configBenchmarks = v })
(boolOpt [] [])
]
where
readFlagList :: String -> FlagAssignment
readFlagList = map tagWithValue . words
where tagWithValue ('-':fname) = (FlagName (lowercase fname), False)
tagWithValue fname = (FlagName (lowercase fname), True)
showFlagList :: FlagAssignment -> [String]
showFlagList fs = [ if not set then '-':fname else fname
| (FlagName fname, set) <- fs]
liftInstallDirs =
liftOption configInstallDirs (\v flags -> flags { configInstallDirs = v })
reqPathTemplateArgFlag title _sf _lf d get set =
reqArgFlag title _sf _lf d
(fmap fromPathTemplate . get) (set . fmap toPathTemplate)
readPackageDbList :: String -> [Maybe PackageDB]
readPackageDbList "clear" = [Nothing]
readPackageDbList "global" = [Just GlobalPackageDB]
readPackageDbList "user" = [Just UserPackageDB]
readPackageDbList other = [Just (SpecificPackageDB other)]
showPackageDbList :: [Maybe PackageDB] -> [String]
showPackageDbList = map showPackageDb
where
showPackageDb Nothing = "clear"
showPackageDb (Just GlobalPackageDB) = "global"
showPackageDb (Just UserPackageDB) = "user"
showPackageDb (Just (SpecificPackageDB db)) = db
parseDependency :: Parse.ReadP r (PackageName, InstalledPackageId)
parseDependency = do
x <- parse
_ <- Parse.char '='
y <- parse
return (x, y)
installDirsOptions :: [OptionField (InstallDirs (Flag PathTemplate))]
installDirsOptions =
[ option "" ["prefix"]
"bake this prefix in preparation of installation"
prefix (\v flags -> flags { prefix = v })
installDirArg
, option "" ["bindir"]
"installation directory for executables"
bindir (\v flags -> flags { bindir = v })
installDirArg
, option "" ["libdir"]
"installation directory for libraries"
libdir (\v flags -> flags { libdir = v })
installDirArg
, option "" ["libsubdir"]
"subdirectory of libdir in which libs are installed"
libsubdir (\v flags -> flags { libsubdir = v })
installDirArg
, option "" ["libexecdir"]
"installation directory for program executables"
libexecdir (\v flags -> flags { libexecdir = v })
installDirArg
, option "" ["datadir"]
"installation directory for read-only data"
datadir (\v flags -> flags { datadir = v })
installDirArg
, option "" ["datasubdir"]
"subdirectory of datadir in which data files are installed"
datasubdir (\v flags -> flags { datasubdir = v })
installDirArg
, option "" ["docdir"]
"installation directory for documentation"
docdir (\v flags -> flags { docdir = v })
installDirArg
, option "" ["htmldir"]
"installation directory for HTML documentation"
htmldir (\v flags -> flags { htmldir = v })
installDirArg
, option "" ["haddockdir"]
"installation directory for haddock interfaces"
haddockdir (\v flags -> flags { haddockdir = v })
installDirArg
, option "" ["sysconfdir"]
"installation directory for configuration files"
sysconfdir (\v flags -> flags { sysconfdir = v })
installDirArg
]
where
installDirArg _sf _lf d get set =
reqArgFlag "DIR" _sf _lf d
(fmap fromPathTemplate . get) (set . fmap toPathTemplate)
emptyConfigFlags :: ConfigFlags
emptyConfigFlags = mempty
instance Monoid ConfigFlags where
mempty = ConfigFlags {
configPrograms = error "FIXME: remove configPrograms",
configProgramPaths = mempty,
configProgramArgs = mempty,
configProgramPathExtra = mempty,
configHcFlavor = mempty,
configHcPath = mempty,
configHcPkg = mempty,
configVanillaLib = mempty,
configProfLib = mempty,
configSharedLib = mempty,
configDynExe = mempty,
configProfExe = mempty,
configConfigureArgs = mempty,
configOptimization = mempty,
configProgPrefix = mempty,
configProgSuffix = mempty,
configInstallDirs = mempty,
configScratchDir = mempty,
configDistPref = mempty,
configVerbosity = mempty,
configUserInstall = mempty,
configPackageDBs = mempty,
configGHCiLib = mempty,
configSplitObjs = mempty,
configStripExes = mempty,
configExtraLibDirs = mempty,
configConstraints = mempty,
configDependencies = mempty,
configExtraIncludeDirs = mempty,
configConfigurationsFlags = mempty,
configTests = mempty,
configLibCoverage = mempty,
configExactConfiguration = mempty,
configBenchmarks = mempty
}
mappend a b = ConfigFlags {
configPrograms = configPrograms b,
configProgramPaths = combine configProgramPaths,
configProgramArgs = combine configProgramArgs,
configProgramPathExtra = combine configProgramPathExtra,
configHcFlavor = combine configHcFlavor,
configHcPath = combine configHcPath,
configHcPkg = combine configHcPkg,
configVanillaLib = combine configVanillaLib,
configProfLib = combine configProfLib,
configSharedLib = combine configSharedLib,
configDynExe = combine configDynExe,
configProfExe = combine configProfExe,
configConfigureArgs = combine configConfigureArgs,
configOptimization = combine configOptimization,
configProgPrefix = combine configProgPrefix,
configProgSuffix = combine configProgSuffix,
configInstallDirs = combine configInstallDirs,
configScratchDir = combine configScratchDir,
configDistPref = combine configDistPref,
configVerbosity = combine configVerbosity,
configUserInstall = combine configUserInstall,
configPackageDBs = combine configPackageDBs,
configGHCiLib = combine configGHCiLib,
configSplitObjs = combine configSplitObjs,
configStripExes = combine configStripExes,
configExtraLibDirs = combine configExtraLibDirs,
configConstraints = combine configConstraints,
configDependencies = combine configDependencies,
configExtraIncludeDirs = combine configExtraIncludeDirs,
configConfigurationsFlags = combine configConfigurationsFlags,
configTests = combine configTests,
configLibCoverage = combine configLibCoverage,
configExactConfiguration = combine configExactConfiguration,
configBenchmarks = combine configBenchmarks
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Copy flags
-- ------------------------------------------------------------
-- | Flags to @copy@: (destdir, copy-prefix (backwards compat), verbosity)
data CopyFlags = CopyFlags {
copyDest :: Flag CopyDest,
copyDistPref :: Flag FilePath,
copyVerbosity :: Flag Verbosity
}
deriving Show
defaultCopyFlags :: CopyFlags
defaultCopyFlags = CopyFlags {
copyDest = Flag NoCopyDest,
copyDistPref = Flag defaultDistPref,
copyVerbosity = Flag normal
}
copyCommand :: CommandUI CopyFlags
copyCommand = makeCommand name shortDesc longDesc defaultCopyFlags options
where
name = "copy"
shortDesc = "Copy the files into the install locations."
longDesc = Just $ \_ ->
"Does not call register, and allows a prefix at install time\n"
++ "Without the --destdir flag, configure determines location.\n"
options showOrParseArgs =
[optionVerbosity copyVerbosity (\v flags -> flags { copyVerbosity = v })
,optionDistPref
copyDistPref (\d flags -> flags { copyDistPref = d })
showOrParseArgs
,option "" ["destdir"]
"directory to copy files to, prepended to installation directories"
copyDest (\v flags -> flags { copyDest = v })
(reqArg "DIR" (succeedReadE (Flag . CopyTo))
(\f -> case f of Flag (CopyTo p) -> [p]; _ -> []))
]
emptyCopyFlags :: CopyFlags
emptyCopyFlags = mempty
instance Monoid CopyFlags where
mempty = CopyFlags {
copyDest = mempty,
copyDistPref = mempty,
copyVerbosity = mempty
}
mappend a b = CopyFlags {
copyDest = combine copyDest,
copyDistPref = combine copyDistPref,
copyVerbosity = combine copyVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Install flags
-- ------------------------------------------------------------
-- | Flags to @install@: (package db, verbosity)
data InstallFlags = InstallFlags {
installPackageDB :: Flag PackageDB,
installDistPref :: Flag FilePath,
installUseWrapper :: Flag Bool,
installInPlace :: Flag Bool,
installVerbosity :: Flag Verbosity
}
deriving Show
defaultInstallFlags :: InstallFlags
defaultInstallFlags = InstallFlags {
installPackageDB = NoFlag,
installDistPref = Flag defaultDistPref,
installUseWrapper = Flag False,
installInPlace = Flag False,
installVerbosity = Flag normal
}
installCommand :: CommandUI InstallFlags
installCommand = makeCommand name shortDesc longDesc defaultInstallFlags options
where
name = "install"
shortDesc = "Copy the files into the install locations. Run register."
longDesc = Just $ \_ ->
"Unlike the copy command, install calls the register command.\n"
++ "If you want to install into a location that is not what was\n"
++ "specified in the configure step, use the copy command.\n"
options showOrParseArgs =
[optionVerbosity installVerbosity (\v flags -> flags { installVerbosity = v })
,optionDistPref
installDistPref (\d flags -> flags { installDistPref = d })
showOrParseArgs
,option "" ["inplace"]
"install the package in the install subdirectory of the dist prefix, so it can be used without being installed"
installInPlace (\v flags -> flags { installInPlace = v })
trueArg
,option "" ["shell-wrappers"]
"using shell script wrappers around executables"
installUseWrapper (\v flags -> flags { installUseWrapper = v })
(boolOpt [] [])
,option "" ["package-db"] ""
installPackageDB (\v flags -> flags { installPackageDB = v })
(choiceOpt [ (Flag UserPackageDB, ([],["user"]),
"upon configuration register this package in the user's local package database")
, (Flag GlobalPackageDB, ([],["global"]),
"(default) upon configuration register this package in the system-wide package database")])
]
emptyInstallFlags :: InstallFlags
emptyInstallFlags = mempty
instance Monoid InstallFlags where
mempty = InstallFlags{
installPackageDB = mempty,
installDistPref = mempty,
installUseWrapper = mempty,
installInPlace = mempty,
installVerbosity = mempty
}
mappend a b = InstallFlags{
installPackageDB = combine installPackageDB,
installDistPref = combine installDistPref,
installUseWrapper = combine installUseWrapper,
installInPlace = combine installInPlace,
installVerbosity = combine installVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * SDist flags
-- ------------------------------------------------------------
-- | Flags to @sdist@: (snapshot, verbosity)
data SDistFlags = SDistFlags {
sDistSnapshot :: Flag Bool,
sDistDirectory :: Flag FilePath,
sDistDistPref :: Flag FilePath,
sDistListSources :: Flag FilePath,
sDistVerbosity :: Flag Verbosity
}
deriving Show
defaultSDistFlags :: SDistFlags
defaultSDistFlags = SDistFlags {
sDistSnapshot = Flag False,
sDistDirectory = mempty,
sDistDistPref = Flag defaultDistPref,
sDistListSources = mempty,
sDistVerbosity = Flag normal
}
sdistCommand :: CommandUI SDistFlags
sdistCommand = makeCommand name shortDesc longDesc defaultSDistFlags options
where
name = "sdist"
shortDesc = "Generate a source distribution file (.tar.gz)."
longDesc = Nothing
options showOrParseArgs =
[optionVerbosity sDistVerbosity (\v flags -> flags { sDistVerbosity = v })
,optionDistPref
sDistDistPref (\d flags -> flags { sDistDistPref = d })
showOrParseArgs
,option "" ["list-sources"]
"Just write a list of the package's sources to a file"
sDistListSources (\v flags -> flags { sDistListSources = v })
(reqArgFlag "FILE")
,option "" ["snapshot"]
"Produce a snapshot source distribution"
sDistSnapshot (\v flags -> flags { sDistSnapshot = v })
trueArg
,option "" ["output-directory"]
("Generate a source distribution in the given directory, "
++ "without creating a tarball")
sDistDirectory (\v flags -> flags { sDistDirectory = v })
(reqArgFlag "DIR")
]
emptySDistFlags :: SDistFlags
emptySDistFlags = mempty
instance Monoid SDistFlags where
mempty = SDistFlags {
sDistSnapshot = mempty,
sDistDirectory = mempty,
sDistDistPref = mempty,
sDistListSources = mempty,
sDistVerbosity = mempty
}
mappend a b = SDistFlags {
sDistSnapshot = combine sDistSnapshot,
sDistDirectory = combine sDistDirectory,
sDistDistPref = combine sDistDistPref,
sDistListSources = combine sDistListSources,
sDistVerbosity = combine sDistVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Register flags
-- ------------------------------------------------------------
-- | Flags to @register@ and @unregister@: (user package, gen-script,
-- in-place, verbosity)
data RegisterFlags = RegisterFlags {
regPackageDB :: Flag PackageDB,
regGenScript :: Flag Bool,
regGenPkgConf :: Flag (Maybe FilePath),
regInPlace :: Flag Bool,
regDistPref :: Flag FilePath,
regVerbosity :: Flag Verbosity
}
deriving Show
defaultRegisterFlags :: RegisterFlags
defaultRegisterFlags = RegisterFlags {
regPackageDB = NoFlag,
regGenScript = Flag False,
regGenPkgConf = NoFlag,
regInPlace = Flag False,
regDistPref = Flag defaultDistPref,
regVerbosity = Flag normal
}
registerCommand :: CommandUI RegisterFlags
registerCommand = makeCommand name shortDesc longDesc
defaultRegisterFlags options
where
name = "register"
shortDesc = "Register this package with the compiler."
longDesc = Nothing
options showOrParseArgs =
[optionVerbosity regVerbosity (\v flags -> flags { regVerbosity = v })
,optionDistPref
regDistPref (\d flags -> flags { regDistPref = d })
showOrParseArgs
,option "" ["packageDB"] ""
regPackageDB (\v flags -> flags { regPackageDB = v })
(choiceOpt [ (Flag UserPackageDB, ([],["user"]),
"upon registration, register this package in the user's local package database")
, (Flag GlobalPackageDB, ([],["global"]),
"(default)upon registration, register this package in the system-wide package database")])
,option "" ["inplace"]
"register the package in the build location, so it can be used without being installed"
regInPlace (\v flags -> flags { regInPlace = v })
trueArg
,option "" ["gen-script"]
"instead of registering, generate a script to register later"
regGenScript (\v flags -> flags { regGenScript = v })
trueArg
,option "" ["gen-pkg-config"]
"instead of registering, generate a package registration file"
regGenPkgConf (\v flags -> flags { regGenPkgConf = v })
(optArg' "PKG" Flag flagToList)
]
unregisterCommand :: CommandUI RegisterFlags
unregisterCommand = makeCommand name shortDesc
longDesc defaultRegisterFlags options
where
name = "unregister"
shortDesc = "Unregister this package with the compiler."
longDesc = Nothing
options showOrParseArgs =
[optionVerbosity regVerbosity (\v flags -> flags { regVerbosity = v })
,optionDistPref
regDistPref (\d flags -> flags { regDistPref = d })
showOrParseArgs
,option "" ["user"] ""
regPackageDB (\v flags -> flags { regPackageDB = v })
(choiceOpt [ (Flag UserPackageDB, ([],["user"]),
"unregister this package in the user's local package database")
, (Flag GlobalPackageDB, ([],["global"]),
"(default) unregister this package in the system-wide package database")])
,option "" ["gen-script"]
"Instead of performing the unregister command, generate a script to unregister later"
regGenScript (\v flags -> flags { regGenScript = v })
trueArg
]
emptyRegisterFlags :: RegisterFlags
emptyRegisterFlags = mempty
instance Monoid RegisterFlags where
mempty = RegisterFlags {
regPackageDB = mempty,
regGenScript = mempty,
regGenPkgConf = mempty,
regInPlace = mempty,
regDistPref = mempty,
regVerbosity = mempty
}
mappend a b = RegisterFlags {
regPackageDB = combine regPackageDB,
regGenScript = combine regGenScript,
regGenPkgConf = combine regGenPkgConf,
regInPlace = combine regInPlace,
regDistPref = combine regDistPref,
regVerbosity = combine regVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * HsColour flags
-- ------------------------------------------------------------
data HscolourFlags = HscolourFlags {
hscolourCSS :: Flag FilePath,
hscolourExecutables :: Flag Bool,
hscolourTestSuites :: Flag Bool,
hscolourBenchmarks :: Flag Bool,
hscolourDistPref :: Flag FilePath,
hscolourVerbosity :: Flag Verbosity
}
deriving Show
emptyHscolourFlags :: HscolourFlags
emptyHscolourFlags = mempty
defaultHscolourFlags :: HscolourFlags
defaultHscolourFlags = HscolourFlags {
hscolourCSS = NoFlag,
hscolourExecutables = Flag False,
hscolourTestSuites = Flag False,
hscolourBenchmarks = Flag False,
hscolourDistPref = Flag defaultDistPref,
hscolourVerbosity = Flag normal
}
instance Monoid HscolourFlags where
mempty = HscolourFlags {
hscolourCSS = mempty,
hscolourExecutables = mempty,
hscolourTestSuites = mempty,
hscolourBenchmarks = mempty,
hscolourDistPref = mempty,
hscolourVerbosity = mempty
}
mappend a b = HscolourFlags {
hscolourCSS = combine hscolourCSS,
hscolourExecutables = combine hscolourExecutables,
hscolourTestSuites = combine hscolourTestSuites,
hscolourBenchmarks = combine hscolourBenchmarks,
hscolourDistPref = combine hscolourDistPref,
hscolourVerbosity = combine hscolourVerbosity
}
where combine field = field a `mappend` field b
hscolourCommand :: CommandUI HscolourFlags
hscolourCommand = makeCommand name shortDesc longDesc
defaultHscolourFlags options
where
name = "hscolour"
shortDesc = "Generate HsColour colourised code, in HTML format."
longDesc = Just (\_ -> "Requires hscolour.\n")
options showOrParseArgs =
[optionVerbosity hscolourVerbosity
(\v flags -> flags { hscolourVerbosity = v })
,optionDistPref
hscolourDistPref (\d flags -> flags { hscolourDistPref = d })
showOrParseArgs
,option "" ["executables"]
"Run hscolour for Executables targets"
hscolourExecutables (\v flags -> flags { hscolourExecutables = v })
trueArg
,option "" ["tests"]
"Run hscolour for Test Suite targets"
hscolourTestSuites (\v flags -> flags { hscolourTestSuites = v })
trueArg
,option "" ["benchmarks"]
"Run hscolour for Benchmark targets"
hscolourBenchmarks (\v flags -> flags { hscolourBenchmarks = v })
trueArg
,option "" ["all"]
"Run hscolour for all targets"
(\f -> allFlags [ hscolourExecutables f
, hscolourTestSuites f
, hscolourBenchmarks f])
(\v flags -> flags { hscolourExecutables = v
, hscolourTestSuites = v
, hscolourBenchmarks = v })
trueArg
,option "" ["css"]
"Use a cascading style sheet"
hscolourCSS (\v flags -> flags { hscolourCSS = v })
(reqArgFlag "PATH")
]
-- ------------------------------------------------------------
-- * Haddock flags
-- ------------------------------------------------------------
data HaddockFlags = HaddockFlags {
haddockProgramPaths :: [(String, FilePath)],
haddockProgramArgs :: [(String, [String])],
haddockHoogle :: Flag Bool,
haddockHtml :: Flag Bool,
haddockHtmlLocation :: Flag String,
haddockExecutables :: Flag Bool,
haddockTestSuites :: Flag Bool,
haddockBenchmarks :: Flag Bool,
haddockInternal :: Flag Bool,
haddockCss :: Flag FilePath,
haddockHscolour :: Flag Bool,
haddockHscolourCss :: Flag FilePath,
haddockContents :: Flag PathTemplate,
haddockDistPref :: Flag FilePath,
haddockKeepTempFiles:: Flag Bool,
haddockVerbosity :: Flag Verbosity
}
deriving Show
defaultHaddockFlags :: HaddockFlags
defaultHaddockFlags = HaddockFlags {
haddockProgramPaths = mempty,
haddockProgramArgs = [],
haddockHoogle = Flag False,
haddockHtml = Flag False,
haddockHtmlLocation = NoFlag,
haddockExecutables = Flag False,
haddockTestSuites = Flag False,
haddockBenchmarks = Flag False,
haddockInternal = Flag False,
haddockCss = NoFlag,
haddockHscolour = Flag False,
haddockHscolourCss = NoFlag,
haddockContents = NoFlag,
haddockDistPref = Flag defaultDistPref,
haddockKeepTempFiles= Flag False,
haddockVerbosity = Flag normal
}
haddockCommand :: CommandUI HaddockFlags
haddockCommand = makeCommand name shortDesc longDesc defaultHaddockFlags options
where
name = "haddock"
shortDesc = "Generate Haddock HTML documentation."
longDesc = Just $ \_ -> "Requires the program haddock, either version 0.x or 2.x.\n"
options showOrParseArgs =
[optionVerbosity haddockVerbosity
(\v flags -> flags { haddockVerbosity = v })
,optionDistPref
haddockDistPref (\d flags -> flags { haddockDistPref = d })
showOrParseArgs
,option "" ["keep-temp-files"]
"Keep temporary files"
haddockKeepTempFiles (\b flags -> flags { haddockKeepTempFiles = b })
trueArg
,option "" ["hoogle"]
"Generate a hoogle database"
haddockHoogle (\v flags -> flags { haddockHoogle = v })
trueArg
,option "" ["html"]
"Generate HTML documentation (the default)"
haddockHtml (\v flags -> flags { haddockHtml = v })
trueArg
,option "" ["html-location"]
"Location of HTML documentation for pre-requisite packages"
haddockHtmlLocation (\v flags -> flags { haddockHtmlLocation = v })
(reqArgFlag "URL")
,option "" ["executables"]
"Run haddock for Executables targets"
haddockExecutables (\v flags -> flags { haddockExecutables = v })
trueArg
,option "" ["tests"]
"Run haddock for Test Suite targets"
haddockTestSuites (\v flags -> flags { haddockTestSuites = v })
trueArg
,option "" ["benchmarks"]
"Run haddock for Benchmark targets"
haddockBenchmarks (\v flags -> flags { haddockBenchmarks = v })
trueArg
,option "" ["all"]
"Run haddock for all targets"
(\f -> allFlags [ haddockExecutables f
, haddockTestSuites f
, haddockBenchmarks f])
(\v flags -> flags { haddockExecutables = v
, haddockTestSuites = v
, haddockBenchmarks = v })
trueArg
,option "" ["internal"]
"Run haddock for internal modules and include all symbols"
haddockInternal (\v flags -> flags { haddockInternal = v })
trueArg
,option "" ["css"]
"Use PATH as the haddock stylesheet"
haddockCss (\v flags -> flags { haddockCss = v })
(reqArgFlag "PATH")
,option "" ["hyperlink-source","hyperlink-sources"]
"Hyperlink the documentation to the source code (using HsColour)"
haddockHscolour (\v flags -> flags { haddockHscolour = v })
trueArg
,option "" ["hscolour-css"]
"Use PATH as the HsColour stylesheet"
haddockHscolourCss (\v flags -> flags { haddockHscolourCss = v })
(reqArgFlag "PATH")
,option "" ["contents-location"]
"Bake URL in as the location for the contents page"
haddockContents (\v flags -> flags { haddockContents = v })
(reqArg' "URL"
(toFlag . toPathTemplate)
(flagToList . fmap fromPathTemplate))
]
++ programConfigurationPaths progConf ParseArgs
haddockProgramPaths (\v flags -> flags { haddockProgramPaths = v})
++ programConfigurationOption progConf showOrParseArgs
haddockProgramArgs (\v fs -> fs { haddockProgramArgs = v })
++ programConfigurationOptions progConf ParseArgs
haddockProgramArgs (\v flags -> flags { haddockProgramArgs = v})
progConf = addKnownProgram haddockProgram
$ addKnownProgram ghcProgram
$ emptyProgramConfiguration
emptyHaddockFlags :: HaddockFlags
emptyHaddockFlags = mempty
instance Monoid HaddockFlags where
mempty = HaddockFlags {
haddockProgramPaths = mempty,
haddockProgramArgs = mempty,
haddockHoogle = mempty,
haddockHtml = mempty,
haddockHtmlLocation = mempty,
haddockExecutables = mempty,
haddockTestSuites = mempty,
haddockBenchmarks = mempty,
haddockInternal = mempty,
haddockCss = mempty,
haddockHscolour = mempty,
haddockHscolourCss = mempty,
haddockContents = mempty,
haddockDistPref = mempty,
haddockKeepTempFiles= mempty,
haddockVerbosity = mempty
}
mappend a b = HaddockFlags {
haddockProgramPaths = combine haddockProgramPaths,
haddockProgramArgs = combine haddockProgramArgs,
haddockHoogle = combine haddockHoogle,
haddockHtml = combine haddockHoogle,
haddockHtmlLocation = combine haddockHtmlLocation,
haddockExecutables = combine haddockExecutables,
haddockTestSuites = combine haddockTestSuites,
haddockBenchmarks = combine haddockBenchmarks,
haddockInternal = combine haddockInternal,
haddockCss = combine haddockCss,
haddockHscolour = combine haddockHscolour,
haddockHscolourCss = combine haddockHscolourCss,
haddockContents = combine haddockContents,
haddockDistPref = combine haddockDistPref,
haddockKeepTempFiles= combine haddockKeepTempFiles,
haddockVerbosity = combine haddockVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Clean flags
-- ------------------------------------------------------------
data CleanFlags = CleanFlags {
cleanSaveConf :: Flag Bool,
cleanDistPref :: Flag FilePath,
cleanVerbosity :: Flag Verbosity
}
deriving Show
defaultCleanFlags :: CleanFlags
defaultCleanFlags = CleanFlags {
cleanSaveConf = Flag False,
cleanDistPref = Flag defaultDistPref,
cleanVerbosity = Flag normal
}
cleanCommand :: CommandUI CleanFlags
cleanCommand = makeCommand name shortDesc longDesc defaultCleanFlags options
where
name = "clean"
shortDesc = "Clean up after a build."
longDesc = Just (\_ -> "Removes .hi, .o, preprocessed sources, etc.\n")
options showOrParseArgs =
[optionVerbosity cleanVerbosity (\v flags -> flags { cleanVerbosity = v })
,optionDistPref
cleanDistPref (\d flags -> flags { cleanDistPref = d })
showOrParseArgs
,option "s" ["save-configure"]
"Do not remove the configuration file (dist/setup-config) during cleaning. Saves need to reconfigure."
cleanSaveConf (\v flags -> flags { cleanSaveConf = v })
trueArg
]
emptyCleanFlags :: CleanFlags
emptyCleanFlags = mempty
instance Monoid CleanFlags where
mempty = CleanFlags {
cleanSaveConf = mempty,
cleanDistPref = mempty,
cleanVerbosity = mempty
}
mappend a b = CleanFlags {
cleanSaveConf = combine cleanSaveConf,
cleanDistPref = combine cleanDistPref,
cleanVerbosity = combine cleanVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Build flags
-- ------------------------------------------------------------
data BuildFlags = BuildFlags {
buildProgramPaths :: [(String, FilePath)],
buildProgramArgs :: [(String, [String])],
buildDistPref :: Flag FilePath,
buildVerbosity :: Flag Verbosity,
buildNumJobs :: Flag (Maybe Int),
-- TODO: this one should not be here, it's just that the silly
-- UserHooks stop us from passing extra info in other ways
buildArgs :: [String]
}
deriving Show
{-# DEPRECATED buildVerbose "Use buildVerbosity instead" #-}
buildVerbose :: BuildFlags -> Verbosity
buildVerbose = fromFlagOrDefault normal . buildVerbosity
defaultBuildFlags :: BuildFlags
defaultBuildFlags = BuildFlags {
buildProgramPaths = mempty,
buildProgramArgs = [],
buildDistPref = Flag defaultDistPref,
buildVerbosity = Flag normal,
buildNumJobs = mempty,
buildArgs = []
}
buildCommand :: ProgramConfiguration -> CommandUI BuildFlags
buildCommand progConf = makeCommand name shortDesc longDesc
defaultBuildFlags (buildOptions progConf)
where
name = "build"
shortDesc = "Compile all targets or specific targets."
longDesc = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " build "
++ " All the components in the package\n"
++ " " ++ pname ++ " build foo "
++ " A component (i.e. lib, exe, test suite)\n"
--TODO: re-enable once we have support for module/file targets
-- ++ " " ++ pname ++ " build Foo.Bar "
-- ++ " A module\n"
-- ++ " " ++ pname ++ " build Foo/Bar.hs"
-- ++ " A file\n\n"
-- ++ "If a target is ambigious it can be qualified with the component "
-- ++ "name, e.g.\n"
-- ++ " " ++ pname ++ " build foo:Foo.Bar\n"
-- ++ " " ++ pname ++ " build testsuite1:Foo/Bar.hs\n"
buildOptions :: ProgramConfiguration -> ShowOrParseArgs
-> [OptionField BuildFlags]
buildOptions progConf showOrParseArgs =
optionVerbosity buildVerbosity (\v flags -> flags { buildVerbosity = v })
: optionDistPref
buildDistPref (\d flags -> flags { buildDistPref = d })
showOrParseArgs
: option "j" ["jobs"]
"Run NUM jobs simultaneously (or '$ncpus' if no NUM is given)"
buildNumJobs (\v flags -> flags { buildNumJobs = v })
(optArg "NUM" (fmap Flag numJobsParser)
(Flag Nothing)
(map (Just . maybe "$ncpus" show) . flagToList))
: programConfigurationPaths progConf showOrParseArgs
buildProgramPaths (\v flags -> flags { buildProgramPaths = v})
++ programConfigurationOption progConf showOrParseArgs
buildProgramArgs (\v fs -> fs { buildProgramArgs = v })
++ programConfigurationOptions progConf showOrParseArgs
buildProgramArgs (\v flags -> flags { buildProgramArgs = v})
emptyBuildFlags :: BuildFlags
emptyBuildFlags = mempty
instance Monoid BuildFlags where
mempty = BuildFlags {
buildProgramPaths = mempty,
buildProgramArgs = mempty,
buildVerbosity = mempty,
buildDistPref = mempty,
buildNumJobs = mempty,
buildArgs = mempty
}
mappend a b = BuildFlags {
buildProgramPaths = combine buildProgramPaths,
buildProgramArgs = combine buildProgramArgs,
buildVerbosity = combine buildVerbosity,
buildDistPref = combine buildDistPref,
buildNumJobs = combine buildNumJobs,
buildArgs = combine buildArgs
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Repl Flags
-- ------------------------------------------------------------
data ReplFlags = ReplFlags {
replProgramPaths :: [(String, FilePath)],
replProgramArgs :: [(String, [String])],
replDistPref :: Flag FilePath,
replVerbosity :: Flag Verbosity,
replReload :: Flag Bool
}
deriving Show
defaultReplFlags :: ReplFlags
defaultReplFlags = ReplFlags {
replProgramPaths = mempty,
replProgramArgs = [],
replDistPref = Flag defaultDistPref,
replVerbosity = Flag normal,
replReload = Flag False
}
instance Monoid ReplFlags where
mempty = ReplFlags {
replProgramPaths = mempty,
replProgramArgs = mempty,
replVerbosity = mempty,
replDistPref = mempty,
replReload = mempty
}
mappend a b = ReplFlags {
replProgramPaths = combine replProgramPaths,
replProgramArgs = combine replProgramArgs,
replVerbosity = combine replVerbosity,
replDistPref = combine replDistPref,
replReload = combine replReload
}
where combine field = field a `mappend` field b
replCommand :: ProgramConfiguration -> CommandUI ReplFlags
replCommand progConf = CommandUI {
commandName = "repl",
commandSynopsis = "Open an interpreter session for the given target.",
commandDescription = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " repl "
++ " The first component in the package\n"
++ " " ++ pname ++ " repl foo "
++ " A named component (i.e. lib, exe, test suite)\n",
--TODO: re-enable once we have support for module/file targets
-- ++ " " ++ pname ++ " repl Foo.Bar "
-- ++ " A module\n"
-- ++ " " ++ pname ++ " repl Foo/Bar.hs"
-- ++ " A file\n\n"
-- ++ "If a target is ambigious it can be qualified with the component "
-- ++ "name, e.g.\n"
-- ++ " " ++ pname ++ " repl foo:Foo.Bar\n"
-- ++ " " ++ pname ++ " repl testsuite1:Foo/Bar.hs\n"
commandUsage = \pname -> "Usage: " ++ pname ++ " repl [FILENAME] [FLAGS]\n",
commandDefaultFlags = defaultReplFlags,
commandOptions = \showOrParseArgs ->
optionVerbosity replVerbosity (\v flags -> flags { replVerbosity = v })
: optionDistPref
replDistPref (\d flags -> flags { replDistPref = d })
showOrParseArgs
: programConfigurationPaths progConf showOrParseArgs
replProgramPaths (\v flags -> flags { replProgramPaths = v})
++ programConfigurationOption progConf showOrParseArgs
replProgramArgs (\v flags -> flags { replProgramArgs = v})
++ programConfigurationOptions progConf showOrParseArgs
replProgramArgs (\v flags -> flags { replProgramArgs = v})
++ case showOrParseArgs of
ParseArgs ->
[ option "" ["reload"]
"Used from within an interpreter to update files."
replReload (\v flags -> flags { replReload = v })
trueArg
]
_ -> []
}
-- ------------------------------------------------------------
-- * Test flags
-- ------------------------------------------------------------
data TestShowDetails = Never | Failures | Always
deriving (Eq, Ord, Enum, Bounded, Show)
knownTestShowDetails :: [TestShowDetails]
knownTestShowDetails = [minBound..maxBound]
instance Text TestShowDetails where
disp = Disp.text . lowercase . show
parse = maybe Parse.pfail return . classify =<< ident
where
ident = Parse.munch1 (\c -> isAlpha c || c == '_' || c == '-')
classify str = lookup (lowercase str) enumMap
enumMap :: [(String, TestShowDetails)]
enumMap = [ (display x, x)
| x <- knownTestShowDetails ]
--TODO: do we need this instance?
instance Monoid TestShowDetails where
mempty = Never
mappend a b = if a < b then b else a
data TestFlags = TestFlags {
testDistPref :: Flag FilePath,
testVerbosity :: Flag Verbosity,
testHumanLog :: Flag PathTemplate,
testMachineLog :: Flag PathTemplate,
testShowDetails :: Flag TestShowDetails,
testKeepTix :: Flag Bool,
--TODO: eliminate the test list and pass it directly as positional args to
--the testHook
testList :: Flag [String],
-- TODO: think about if/how options are passed to test exes
testOptions :: [PathTemplate]
}
defaultTestFlags :: TestFlags
defaultTestFlags = TestFlags {
testDistPref = Flag defaultDistPref,
testVerbosity = Flag normal,
testHumanLog = toFlag $ toPathTemplate $ "$pkgid-$test-suite.log",
testMachineLog = toFlag $ toPathTemplate $ "$pkgid.log",
testShowDetails = toFlag Failures,
testKeepTix = toFlag False,
testList = Flag [],
testOptions = []
}
testCommand :: CommandUI TestFlags
testCommand = makeCommand name shortDesc longDesc defaultTestFlags options
where
name = "test"
shortDesc = "Run the test suite, if any (configure with UserHooks)."
longDesc = Nothing
options showOrParseArgs =
[ optionVerbosity testVerbosity (\v flags -> flags { testVerbosity = v })
, optionDistPref
testDistPref (\d flags -> flags { testDistPref = d })
showOrParseArgs
, option [] ["log"]
("Log all test suite results to file (name template can use "
++ "$pkgid, $compiler, $os, $arch, $test-suite, $result)")
testHumanLog (\v flags -> flags { testHumanLog = v })
(reqArg' "TEMPLATE"
(toFlag . toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["machine-log"]
("Produce a machine-readable log file (name template can use "
++ "$pkgid, $compiler, $os, $arch, $result)")
testMachineLog (\v flags -> flags { testMachineLog = v })
(reqArg' "TEMPLATE"
(toFlag . toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["show-details"]
("'always': always show results of individual test cases. "
++ "'never': never show results of individual test cases. "
++ "'failures': show results of failing test cases.")
testShowDetails (\v flags -> flags { testShowDetails = v })
(reqArg "FILTER"
(readP_to_E (\_ -> "--show-details flag expects one of "
++ intercalate ", "
(map display knownTestShowDetails))
(fmap toFlag parse))
(flagToList . fmap display))
, option [] ["keep-tix-files"]
"keep .tix files for HPC between test runs"
testKeepTix (\v flags -> flags { testKeepTix = v})
trueArg
, option [] ["test-options"]
("give extra options to test executables "
++ "(name templates can use $pkgid, $compiler, "
++ "$os, $arch, $test-suite)")
testOptions (\v flags -> flags { testOptions = v })
(reqArg' "TEMPLATES" (map toPathTemplate . splitArgs)
(const []))
, option [] ["test-option"]
("give extra option to test executables "
++ "(no need to quote options containing spaces, "
++ "name template can use $pkgid, $compiler, "
++ "$os, $arch, $test-suite)")
testOptions (\v flags -> flags { testOptions = v })
(reqArg' "TEMPLATE" (\x -> [toPathTemplate x])
(map fromPathTemplate))
]
emptyTestFlags :: TestFlags
emptyTestFlags = mempty
instance Monoid TestFlags where
mempty = TestFlags {
testDistPref = mempty,
testVerbosity = mempty,
testHumanLog = mempty,
testMachineLog = mempty,
testShowDetails = mempty,
testKeepTix = mempty,
testList = mempty,
testOptions = mempty
}
mappend a b = TestFlags {
testDistPref = combine testDistPref,
testVerbosity = combine testVerbosity,
testHumanLog = combine testHumanLog,
testMachineLog = combine testMachineLog,
testShowDetails = combine testShowDetails,
testKeepTix = combine testKeepTix,
testList = combine testList,
testOptions = combine testOptions
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Benchmark flags
-- ------------------------------------------------------------
data BenchmarkFlags = BenchmarkFlags {
benchmarkDistPref :: Flag FilePath,
benchmarkVerbosity :: Flag Verbosity,
benchmarkOptions :: [PathTemplate]
}
defaultBenchmarkFlags :: BenchmarkFlags
defaultBenchmarkFlags = BenchmarkFlags {
benchmarkDistPref = Flag defaultDistPref,
benchmarkVerbosity = Flag normal,
benchmarkOptions = []
}
benchmarkCommand :: CommandUI BenchmarkFlags
benchmarkCommand = makeCommand name shortDesc
longDesc defaultBenchmarkFlags options
where
name = "bench"
shortDesc = "Run the benchmark, if any (configure with UserHooks)."
longDesc = Nothing
options showOrParseArgs =
[ optionVerbosity benchmarkVerbosity
(\v flags -> flags { benchmarkVerbosity = v })
, optionDistPref
benchmarkDistPref (\d flags -> flags { benchmarkDistPref = d })
showOrParseArgs
, option [] ["benchmark-options"]
("give extra options to benchmark executables "
++ "(name templates can use $pkgid, $compiler, "
++ "$os, $arch, $benchmark)")
benchmarkOptions (\v flags -> flags { benchmarkOptions = v })
(reqArg' "TEMPLATES" (map toPathTemplate . splitArgs)
(const []))
, option [] ["benchmark-option"]
("give extra option to benchmark executables "
++ "(no need to quote options containing spaces, "
++ "name template can use $pkgid, $compiler, "
++ "$os, $arch, $benchmark)")
benchmarkOptions (\v flags -> flags { benchmarkOptions = v })
(reqArg' "TEMPLATE" (\x -> [toPathTemplate x])
(map fromPathTemplate))
]
emptyBenchmarkFlags :: BenchmarkFlags
emptyBenchmarkFlags = mempty
instance Monoid BenchmarkFlags where
mempty = BenchmarkFlags {
benchmarkDistPref = mempty,
benchmarkVerbosity = mempty,
benchmarkOptions = mempty
}
mappend a b = BenchmarkFlags {
benchmarkDistPref = combine benchmarkDistPref,
benchmarkVerbosity = combine benchmarkVerbosity,
benchmarkOptions = combine benchmarkOptions
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Shared options utils
-- ------------------------------------------------------------
programFlagsDescription :: ProgramConfiguration -> String
programFlagsDescription progConf =
"The flags --with-PROG and --PROG-option(s) can be used with"
++ " the following programs:"
++ (concatMap (\line -> "\n " ++ unwords line) . wrapLine 77 . sort)
[ programName prog | (prog, _) <- knownPrograms progConf ]
++ "\n"
-- | For each known program @PROG@ in 'progConf', produce a @with-PROG@
-- 'OptionField'.
programConfigurationPaths
:: ProgramConfiguration
-> ShowOrParseArgs
-> (flags -> [(String, FilePath)])
-> ([(String, FilePath)] -> (flags -> flags))
-> [OptionField flags]
programConfigurationPaths progConf showOrParseArgs get set =
programConfigurationPaths' ("with-" ++) progConf showOrParseArgs get set
-- | Like 'programConfigurationPaths', but allows to customise the option name.
programConfigurationPaths'
:: (String -> String)
-> ProgramConfiguration
-> ShowOrParseArgs
-> (flags -> [(String, FilePath)])
-> ([(String, FilePath)] -> (flags -> flags))
-> [OptionField flags]
programConfigurationPaths' mkName progConf showOrParseArgs get set =
case showOrParseArgs of
-- we don't want a verbose help text list so we just show a generic one:
ShowArgs -> [withProgramPath "PROG"]
ParseArgs -> map (withProgramPath . programName . fst)
(knownPrograms progConf)
where
withProgramPath prog =
option "" [mkName prog]
("give the path to " ++ prog)
get set
(reqArg' "PATH" (\path -> [(prog, path)])
(\progPaths -> [ path | (prog', path) <- progPaths, prog==prog' ]))
-- | For each known program @PROG@ in 'progConf', produce a @PROG-option@
-- 'OptionField'.
programConfigurationOption
:: ProgramConfiguration
-> ShowOrParseArgs
-> (flags -> [(String, [String])])
-> ([(String, [String])] -> (flags -> flags))
-> [OptionField flags]
programConfigurationOption progConf showOrParseArgs get set =
case showOrParseArgs of
-- we don't want a verbose help text list so we just show a generic one:
ShowArgs -> [programOption "PROG"]
ParseArgs -> map (programOption . programName . fst)
(knownPrograms progConf)
where
programOption prog =
option "" [prog ++ "-option"]
("give an extra option to " ++ prog ++
" (no need to quote options containing spaces)")
get set
(reqArg' "OPT" (\arg -> [(prog, [arg])])
(\progArgs -> concat [ args
| (prog', args) <- progArgs, prog==prog' ]))
-- | For each known program @PROG@ in 'progConf', produce a @PROG-options@
-- 'OptionField'.
programConfigurationOptions
:: ProgramConfiguration
-> ShowOrParseArgs
-> (flags -> [(String, [String])])
-> ([(String, [String])] -> (flags -> flags))
-> [OptionField flags]
programConfigurationOptions progConf showOrParseArgs get set =
case showOrParseArgs of
-- we don't want a verbose help text list so we just show a generic one:
ShowArgs -> [programOptions "PROG"]
ParseArgs -> map (programOptions . programName . fst)
(knownPrograms progConf)
where
programOptions prog =
option "" [prog ++ "-options"]
("give extra options to " ++ prog)
get set
(reqArg' "OPTS" (\args -> [(prog, splitArgs args)]) (const []))
-- | Common parser for the @-j@ flag of @build@ and @install@.
numJobsParser :: ReadE (Maybe Int)
numJobsParser = ReadE $ \s ->
case s of
"$ncpus" -> Right Nothing
_ -> case reads s of
[(n, "")]
| n < 1 -> Left "The number of jobs should be 1 or more."
| n > 64 -> Left "You probably don't want that many jobs."
| otherwise -> Right (Just n)
_ -> Left "The jobs value should be a number or '$ncpus'"
-- ------------------------------------------------------------
-- * GetOpt Utils
-- ------------------------------------------------------------
boolOpt :: SFlags -> SFlags
-> MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a
boolOpt = Command.boolOpt flagToMaybe Flag
boolOpt' :: OptFlags -> OptFlags
-> MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a
boolOpt' = Command.boolOpt' flagToMaybe Flag
trueArg, falseArg :: SFlags -> LFlags -> Description -> (b -> Flag Bool) ->
(Flag Bool -> (b -> b)) -> OptDescr b
trueArg = noArg (Flag True)
falseArg = noArg (Flag False)
reqArgFlag :: ArgPlaceHolder -> SFlags -> LFlags -> Description ->
(b -> Flag String) -> (Flag String -> b -> b) -> OptDescr b
reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList
optionDistPref :: (flags -> Flag FilePath)
-> (Flag FilePath -> flags -> flags)
-> ShowOrParseArgs
-> OptionField flags
optionDistPref get set = \showOrParseArgs ->
option "" (distPrefFlagName showOrParseArgs)
( "The directory where Cabal puts generated build files "
++ "(default " ++ defaultDistPref ++ ")")
get set
(reqArgFlag "DIR")
where
distPrefFlagName ShowArgs = ["builddir"]
distPrefFlagName ParseArgs = ["builddir", "distdir", "distpref"]
optionVerbosity :: (flags -> Flag Verbosity)
-> (Flag Verbosity -> flags -> flags)
-> OptionField flags
optionVerbosity get set =
option "v" ["verbose"]
"Control verbosity (n is 0--3, default verbosity level is 1)"
get set
(optArg "n" (fmap Flag flagToVerbosity)
(Flag verbose) -- default Value if no n is given
(fmap (Just . showForCabal) . flagToList))
-- ------------------------------------------------------------
-- * Other Utils
-- ------------------------------------------------------------
-- | Arguments to pass to a @configure@ script, e.g. generated by
-- @autoconf@.
configureArgs :: Bool -> ConfigFlags -> [String]
configureArgs bcHack flags
= hc_flag
++ optFlag "with-hc-pkg" configHcPkg
++ optFlag' "prefix" prefix
++ optFlag' "bindir" bindir
++ optFlag' "libdir" libdir
++ optFlag' "libexecdir" libexecdir
++ optFlag' "datadir" datadir
++ optFlag' "sysconfdir" sysconfdir
++ configConfigureArgs flags
where
hc_flag = case (configHcFlavor flags, configHcPath flags) of
(_, Flag hc_path) -> [hc_flag_name ++ hc_path]
(Flag hc, NoFlag) -> [hc_flag_name ++ display hc]
(NoFlag,NoFlag) -> []
hc_flag_name
--TODO kill off thic bc hack when defaultUserHooks is removed.
| bcHack = "--with-hc="
| otherwise = "--with-compiler="
optFlag name config_field = case config_field flags of
Flag p -> ["--" ++ name ++ "=" ++ p]
NoFlag -> []
optFlag' name config_field = optFlag name (fmap fromPathTemplate
. config_field
. configInstallDirs)
configureCCompiler :: Verbosity -> ProgramConfiguration
-> IO (FilePath, [String])
configureCCompiler verbosity lbi = configureProg verbosity lbi gccProgram
configureLinker :: Verbosity -> ProgramConfiguration -> IO (FilePath, [String])
configureLinker verbosity lbi = configureProg verbosity lbi ldProgram
configureProg :: Verbosity -> ProgramConfiguration -> Program
-> IO (FilePath, [String])
configureProg verbosity programConfig prog = do
(p, _) <- requireProgram verbosity prog programConfig
let pInv = programInvocation p []
return (progInvokePath pInv, progInvokeArgs pInv)
-- | Helper function to split a string into a list of arguments.
-- It's supposed to handle quoted things sensibly, eg:
--
-- > splitArgs "--foo=\"C:\Program Files\Bar\" --baz"
-- > = ["--foo=C:\Program Files\Bar", "--baz"]
--
splitArgs :: String -> [String]
splitArgs = space []
where
space :: String -> String -> [String]
space w [] = word w []
space w ( c :s)
| isSpace c = word w (space [] s)
space w ('"':s) = string w s
space w s = nonstring w s
string :: String -> String -> [String]
string w [] = word w []
string w ('"':s) = space w s
string w ( c :s) = string (c:w) s
nonstring :: String -> String -> [String]
nonstring w [] = word w []
nonstring w ('"':s) = string w s
nonstring w ( c :s) = space (c:w) s
word [] s = s
word w s = reverse w : s
-- The test cases kinda have to be rewritten from the ground up... :/
--hunitTests :: [Test]
--hunitTests =
-- let m = [("ghc", GHC), ("nhc98", NHC), ("hugs", Hugs)]
-- (flags, commands', unkFlags, ers)
-- = getOpt Permute options ["configure", "foobar", "--prefix=/foo", "--ghc", "--nhc98", "--hugs", "--with-compiler=/comp", "--unknown1", "--unknown2", "--install-prefix=/foo", "--user", "--global"]
-- in [TestLabel "very basic option parsing" $ TestList [
-- "getOpt flags" ~: "failed" ~:
-- [Prefix "/foo", GhcFlag, NhcFlag, HugsFlag,
-- WithCompiler "/comp", InstPrefix "/foo", UserFlag, GlobalFlag]
-- ~=? flags,
-- "getOpt commands" ~: "failed" ~: ["configure", "foobar"] ~=? commands',
-- "getOpt unknown opts" ~: "failed" ~:
-- ["--unknown1", "--unknown2"] ~=? unkFlags,
-- "getOpt errors" ~: "failed" ~: [] ~=? ers],
--
-- TestLabel "test location of various compilers" $ TestList
-- ["configure parsing for prefix and compiler flag" ~: "failed" ~:
-- (Right (ConfigCmd (Just comp, Nothing, Just "/usr/local"), []))
-- ~=? (parseArgs ["--prefix=/usr/local", "--"++name, "configure"])
-- | (name, comp) <- m],
--
-- TestLabel "find the package tool" $ TestList
-- ["configure parsing for prefix comp flag, withcompiler" ~: "failed" ~:
-- (Right (ConfigCmd (Just comp, Just "/foo/comp", Just "/usr/local"), []))
-- ~=? (parseArgs ["--prefix=/usr/local", "--"++name,
-- "--with-compiler=/foo/comp", "configure"])
-- | (name, comp) <- m],
--
-- TestLabel "simpler commands" $ TestList
-- [flag ~: "failed" ~: (Right (flagCmd, [])) ~=? (parseArgs [flag])
-- | (flag, flagCmd) <- [("build", BuildCmd),
-- ("install", InstallCmd Nothing False),
-- ("sdist", SDistCmd),
-- ("register", RegisterCmd False)]
-- ]
-- ]
{- Testing ideas:
* IO to look for hugs and hugs-pkg (which hugs, etc)
* quickCheck to test permutations of arguments
* what other options can we over-ride with a command-line flag?
-}
| fpco/cabal | Cabal/Distribution/Simple/Setup.hs | bsd-3-clause | 78,688 | 0 | 18 | 20,994 | 15,729 | 8,874 | 6,855 | 1,458 | 9 |
module Random (random_nbit_int, random_int) where
import System.IO
import Data.Word
import qualified Data.ByteString.Lazy as BSL
import qualified NumberTheory as NT
import qualified Encoder as Enc
-- Return a random integer with num_bits bits
random_nbit_int :: Int -> IO Integer
random_nbit_int num_bits = do
contents <- BSL.readFile "/dev/urandom"
let num = (Enc.words_to_int (BSL.unpack contents) ((div num_bits 8) + 1))
return $ num `mod` (2^num_bits)
-- Return a random integer in the interval [0, n-1]
random_int :: Integer -> IO Integer
random_int n = do
let num_bits = NT.get_num_bits n
x <- random_nbit_int num_bits
if x < n then return x else (random_int n)
| DanielJanzon/CryptoLab | Random.hs | bsd-3-clause | 697 | 0 | 15 | 130 | 207 | 112 | 95 | 16 | 2 |
module Main where
import Mitchell.Prelude
import Hive (Winner, runHive)
import Hive.Console.Player (completion, player)
import System.Console.Haskeline
main :: IO ()
main =
runInputT (setComplete completion defaultSettings) game
>>= print
game :: InputT IO (Maybe Winner)
game = runHive True True (player "Player 1") (player "Player 2")
| mitchellwrosen/hive | hive-console/app/Main.hs | bsd-3-clause | 363 | 0 | 8 | 71 | 114 | 63 | 51 | 11 | 1 |
import Cppf
main = interact (cppf . tokenise)
| kirelagin/perfect-formatting-cpp | Main.hs | bsd-3-clause | 47 | 0 | 7 | 9 | 19 | 10 | 9 | 2 | 1 |
{-# OPTIONS_GHC -fno-warn-missing-methods #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-|
Module : Numeric.ER.RnToRm.Approx.PieceWise
Description : arbitrary precision piece-wise-something function enclosures
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
Arbitrary precision piece-wise something
(eg linear, polynomial, rational) enclosures
of functions @R^n->R^m@.
The type of approximation within segments is specified
by an instance of 'FA.ERFnDomApprox'.
The piece-wise construction defines another instance of 'FA.ERFnDomApprox'.
-}
module Numeric.ER.RnToRm.Approx.PieceWise
(
ERFnPiecewise(..)
)
where
import qualified Numeric.ER.RnToRm.BisectionTree as BISTR
import qualified Numeric.ER.RnToRm.BisectionTree.Integration as BTINTEG
import qualified Numeric.ER.RnToRm.Approx as FA
import qualified Numeric.ER.Real.Approx as RA
import qualified Numeric.ER.Real.Approx.Elementary as RAEL
import qualified Numeric.ER.BasicTypes.DomainBox as DBox
import Numeric.ER.BasicTypes.DomainBox (VariableID(..), DomainBox, DomainBoxMappable, DomainIntBox)
import Numeric.ER.BasicTypes
import Numeric.ER.Misc
import qualified Text.Html as H
import Data.Typeable
import Data.Generics.Basics
import Data.Binary
import Data.Maybe
{-|
Arbitrary precision piece-wise something
(eg linear, polynomial, rational) enclosures
of functions @R^n->R^m@.
The type of approximation within segments is specified
by an instance of 'FA.ERFnDomApprox'.
The piece-wise construction defines another instance of 'FA.ERFnDomApprox'.
-}
data ERFnPiecewise box varid domra fa =
ERFnPiecewise (BISTR.BisectionTree box varid domra fa)
deriving (Typeable, Data)
instance (Binary a, Binary b, Binary c, Binary d) => Binary (ERFnPiecewise a b c d) where
put (ERFnPiecewise a) = put a
get = get >>= \a -> return (ERFnPiecewise a)
pwLift1 ::
(DomainBox box varid domra) =>
(fa -> fa) ->
(ERFnPiecewise box varid domra fa) ->
(ERFnPiecewise box varid domra fa)
pwLift1 op (ERFnPiecewise bistr) =
ERFnPiecewise (BISTR.mapWithDom (const op) bistr)
pwLift2 ::
(RA.ERIntApprox domra, FA.ERFnDomApprox box varid domra ranra fa) =>
(fa -> fa -> fa) ->
EffortIndex ->
(ERFnPiecewise box varid domra fa) ->
(ERFnPiecewise box varid domra fa) ->
(ERFnPiecewise box varid domra fa)
pwLift2 op ix f1@(ERFnPiecewise bistr1) f2@(ERFnPiecewise bistr2) =
ERFnPiecewise $
fromJust $ fst $
BISTR.combineWith faSplit faSplit opBistr ix bistr1 bistr2
where
opBistr domB val1 val2 =
(Just $ op val1 val2, [])
pwbistrZipWith ::
(RA.ERIntApprox domra, FA.ERFnDomApprox box varid domra ranra fa) =>
(fa -> fa -> res) ->
EffortIndex ->
(BISTR.BisectionTree box varid domra fa) ->
(BISTR.BisectionTree box varid domra fa) ->
(BISTR.BisectionTree box varid domra res)
pwbistrZipWith op ix bistr1 bistr2 =
fromJust $ fst $
BISTR.combineWith faSplit faSplit opBistr ix bistr1 bistr2
where
opBistr domB val1 val2 =
(Just $ op val1 val2, [])
pwSplit ::
(RA.ERIntApprox domra, DomainBox box varid domra) =>
(fa -> (fa, fa)) ->
(ERFnPiecewise box varid domra fa) -> (ERFnPiecewise box varid domra fa, ERFnPiecewise box varid domra fa)
pwSplit op f@(ERFnPiecewise bistr) =
(ERFnPiecewise bistr1, ERFnPiecewise bistr2)
where
bistr1 = BISTR.mapWithDom (const fst) bistr12
bistr2 = BISTR.mapWithDom (const snd) bistr12
bistr12 = BISTR.mapWithDom (const op) bistr
faSplit ::
(RA.ERIntApprox domra, FA.ERFnDomApprox box varid domra ranra fa) =>
BISTR.ValueSplitter box varid domra fa
faSplit ix depth domB fa var pt =
FA.bisect var (Just pt) fa
faCombine ::
(RA.ERIntApprox domra, FA.ERFnDomApprox box varid domra ranra fa) =>
BISTR.ValueCombiner box varid domra fa
faCombine ix dp (BISTR.Leaf _ _ v) = v
faCombine ix dp (BISTR.Node _ dom x pt lo hi) =
FA.unBisect x (vLO, vHI)
where
vLO = faCombine ix (dp + 1) lo
vHI = faCombine ix (dp + 1) hi
instance
(FA.ERFnDomApprox box varid domra ranra fa, VariableID varid) =>
Show (ERFnPiecewise box varid domra fa)
where
show f@(ERFnPiecewise bistr) =
"\nERFnPiecewise:" ++ show bistr
instance
(FA.ERFnDomApprox box varid domra ranra fa, RA.ERIntApprox fa, H.HTML fa) =>
H.HTML (ERFnPiecewise box varid domra fa)
where
toHtml (ERFnPiecewise bistr) =
H.toHtml bistr
instance
(FA.ERFnDomApprox box varid domra ranra fa) =>
Eq (ERFnPiecewise box varid domra fa)
where
(ERFnPiecewise bistr1) == (ERFnPiecewise bistr2) =
error $
"ERFnPiecewise: Eq: not implemented yet"
instance
(FA.ERFnDomApprox box varid domra ranra fa) =>
Ord (ERFnPiecewise box varid domra fa)
where
compare (ERFnPiecewise bistr1) (ERFnPiecewise bistr2) =
error $
"ERFnPiecewise: Ord: not implemented yet"
instance
(FA.ERFnDomApprox box varid domra ranra fa, VariableID varid) =>
Num (ERFnPiecewise box varid domra fa)
where
fromInteger n = ERFnPiecewise $ BISTR.const DBox.noinfo (fromInteger n)
negate = pwLift1 negate
(+) = pwLift2 (+) 10
(*) = pwLift2 (*) 10
instance
(FA.ERFnDomApprox box varid domra ranra fa, VariableID varid) =>
Fractional (ERFnPiecewise box varid domra fa)
where
fromRational r = ERFnPiecewise $ BISTR.const DBox.noinfo (fromRational r)
recip = pwLift1 recip
instance
(FA.ERFnDomApprox box varid domra ranra fa, VariableID varid) =>
RA.ERApprox (ERFnPiecewise box varid domra fa)
where
initialiseBaseArithmetic _ =
RA.initialiseBaseArithmetic (0 :: fa)
getGranularity (ERFnPiecewise bistr) =
foldl max 10 $ map RA.getGranularity $ BISTR.collectValues bistr
setGranularityOuter gran = pwLift1 (RA.setGranularityOuter gran)
setMinGranularityOuter gran = pwLift1 (RA.setMinGranularityOuter gran)
isBounded (ERFnPiecewise bistr) =
and $ map RA.isBounded $ BISTR.collectValues bistr
f1 /\ f2 = pwLift2 (RA./\) 10 f1 f2
intersectMeasureImprovement ix f1@(ERFnPiecewise bistr1) f2@(ERFnPiecewise bistr2) =
-- unsafePrint
-- (
-- "ERFnPiecewise: intersectMeasureImprovement:"
-- ++ "\n f1 = " ++ show f1
-- ++ "\n f2 = " ++ show f2
-- ++ "\n isect = " ++ show (ERFnPiecewise bistrIsect)
-- ++ "\n impr = " ++ show (ERFnPiecewise bistrImpr)
-- )
-- | length fas1 == length fas2 =
(ERFnPiecewise bistrIsect, ERFnPiecewise bistrImpr)
-- | otherwise =
-- error $ show $ f1 RA./\ f2
where
bistrIsect = BISTR.mapWithDom (const fst) bistrIsectImpr
bistrImpr = BISTR.mapWithDom (const snd) bistrIsectImpr
bistrIsectImpr = pwbistrZipWith (RA.intersectMeasureImprovement ix) ix bistr1 bistr2
leqReals f1@(ERFnPiecewise bistr1) f2@(ERFnPiecewise bistr2) =
-- | length fas1 == length fas2 =
leqTuple $ BISTR.collectValues $ pwbistrZipWith (RA.leqReals) 10 bistr1 bistr2
-- | otherwise =
-- error $ show $ f1 RA./\ f2
where
leqTuple [] = Just True
leqTuple (tv : tvs) =
case and $ map (== tv) tvs of
True -> tv
_ -> Nothing
refines f1@(ERFnPiecewise bistr1) f2@(ERFnPiecewise bistr2) =
and $ BISTR.collectValues $ pwbistrZipWith (RA.refines) 10 bistr1 bistr2
compareApprox f1@(ERFnPiecewise bistr1) f2@(ERFnPiecewise bistr2) =
BISTR.compare RA.compareApprox RA.compareApprox bistr1 bistr2
instance
(FA.ERFnDomApprox box varid domra ranra fa, RA.ERIntApprox fa, VariableID varid) =>
RA.ERIntApprox (ERFnPiecewise box varid domra fa)
where
-- doubleBounds = :: ira -> (Double, Double)
-- floatBounds :: ira -> (Float, Float)
-- integerBounds :: ira -> (ExtendedInteger, ExtendedInteger)
bisectDomain maybePt f@(ERFnPiecewise bistr) =
case maybePt of
Nothing ->
pwSplit (RA.bisectDomain Nothing) f
Just (ERFnPiecewise bistrPt) ->
(ERFnPiecewise bistr1, ERFnPiecewise bistr2)
where
bistr1 = BISTR.mapWithDom (const fst) bistr12
bistr2 = BISTR.mapWithDom (const snd) bistr12
bistr12 =
pwbistrZipWith (\fa pt -> RA.bisectDomain (Just pt) fa) 10
bistr bistrPt
bounds = pwSplit RA.bounds
f1 \/ f2 = pwLift2 (RA.\/) 10 f1 f2
instance
(FA.ERFnDomApprox box varid domra ranra fa, RAEL.ERApproxElementary fa, VariableID varid) =>
RAEL.ERApproxElementary (ERFnPiecewise box varid domra fa)
where
abs ix = pwLift1 $ RAEL.abs ix
sqrt ix = pwLift1 $ RAEL.sqrt ix
exp ix = pwLift1 $ RAEL.exp ix
log ix = pwLift1 $ RAEL.log ix
sin ix = pwLift1 $ RAEL.sin ix
cos ix = pwLift1 $ RAEL.cos ix
atan ix = pwLift1 $ RAEL.atan ix
instance
(FA.ERFnDomApprox box varid domra ranra fa,
RA.ERIntApprox fa,
DomainBoxMappable box box varid domra domra,
Show box) =>
FA.ERFnApprox box varid domra ranra (ERFnPiecewise box varid domra fa)
where
check prgLocation (ERFnPiecewise bistr) =
ERFnPiecewise $ BISTR.mapWithDom checkSegm bistr
where
checkSegm dom f =
FA.check (prgLocation ++ "segm " ++ show dom ++ ": ") f
domra2ranra (ERFnPiecewise bistr) d =
FA.domra2ranra fa d
where
(fa : _) = BISTR.collectValues bistr
ranra2domra (ERFnPiecewise bistr) r =
FA.ranra2domra fa r
where
(fa : _) = BISTR.collectValues bistr
setMaxDegree maxDegree = pwLift1 (FA.setMaxDegree maxDegree)
setMaxSize maxSize = pwLift1 (FA.setMaxSize maxSize)
getRangeApprox (ERFnPiecewise bistr) =
foldl1 (zipWith (RA.\/)) $ map FA.getRangeApprox $ BISTR.collectValues bistr
getTupleSize (ERFnPiecewise bistr) =
FA.getTupleSize $ head $ BISTR.collectValues bistr
tuple fs =
foldl1 (pwLift2 (\a b -> FA.tuple [a,b]) 10) fs
applyTupleFn tupleFn = pwLift1 $ FA.applyTupleFn tupleFnNoPW
where
tupleFnNoPW fas =
map (\ (ERFnPiecewise (BISTR.Leaf _ _ fa)) -> fa ) $
tupleFn $
map (\fa -> ERFnPiecewise $ BISTR.Leaf 0 (FA.dom fa) fa)
fas
err = error "ERFnPiecewise: applyTupleFn"
volume (ERFnPiecewise bistr) =
sum $ map FA.volume $ BISTR.collectValues bistr
scale ratio = pwLift1 (FA.scale ratio)
partialIntersect ix substitutions
f1@(ERFnPiecewise bistr1)
f2@(ERFnPiecewise bistr2) =
ERFnPiecewise $
head $
BTINTEG.zipOnSubdomain
faSplit ix maxDepth substitutions
updateInside updateTouch updateAway
[bistr1, bistr2]
where
maxDepth = effIx2int ix
updateInside dom [val1, val2] =
[FA.partialIntersect ix substitutions val1 val2]
updateTouch = updateInside
updateAway dom [val1, val2] =
[val2]
eval ptBox (ERFnPiecewise bistr) =
foldl1 (zipWith (RA./\)) $
-- unsafePrintReturn ("ERFnPiecewise: eval: vals = ")$
map (\fa -> FA.eval ptBox fa) $
concat $ map BISTR.collectValues $
BISTR.lookupLeavesIntersectingDom bistr ptBox
evalInner ptBox (ERFnPiecewise bistr) =
foldl1 (zipWith (RA./\)) $
-- unsafePrintReturn ("ERFnPiecewise: evalInner: vals = ")$
map (\fa -> FA.evalInner ptBox fa) $
concat $ map BISTR.collectValues $
BISTR.lookupLeavesIntersectingDom bistr ptBox
partialEval substitutions f@(ERFnPiecewise bistr) =
pwLift1 (FA.partialEval substitutions) (ERFnPiecewise bistrNoVars)
where
bistrNoVars =
BISTR.removeVars substitutions bistr
composeNonDecreasing
fOuter@(ERFnPiecewise bistrOuter)
varid
fInner@(ERFnPiecewise bistrInner)
=
-- unsafePrintReturn
-- (
-- "PieceWise: composeNonDecreasing: "
-- ++ "\n fOuter = " ++ show fOuter
-- ++ "\n fInner = " ++ show fInner
-- ++ "\n result = "
-- ) $
ERFnPiecewise $ BISTR.mapLeaves composeLeaf bistrInner
where
composeLeaf leaf@(BISTR.Leaf _ _ vInner) =
leaf { BISTR.bistrVal = vComposed }
where
vComposed =
FA.composeNonDecreasing vOuter varid vInner
vOuter = faCombine 10 (BISTR.bistrDepth bistrOuter) bistrOuterRelevant
bistrOuterRelevant = BISTR.restrictToDom bistrOuter composeDomB
composeDomB = DBox.insert varid composeDomVar $ BISTR.bistrDom bistrOuter
composeDomVar = FA.ranra2domra fInner $ foldl1 (RA.\/) $ FA.getRangeApprox fInner
intersectMeasureImprovement ix f1@(ERFnPiecewise bistr1) f2@(ERFnPiecewise bistr2) =
(intersection, improvementRA)
where
(intersection, _) = RA.intersectMeasureImprovement ix f1 f2
improvementRA
| 0 `RA.refines` intersectionVolume && 0 `RA.refines` f1Volume = 1
-- error $
-- "ERFnInterval: intersectMeasureImprovement: inconsistent result: "
-- ++ show intersection
| otherwise =
f1Volume / intersectionVolume
intersectionVolume = FA.volume intersection
f1Volume = FA.volume f1
instance
(FA.ERFnDomApprox box varid domra ranra fa, RA.ERIntApprox fa, Show box,
DomainBoxMappable box box varid domra domra) =>
FA.ERFnDomApprox box varid domra ranra (ERFnPiecewise box varid domra fa)
where
dom (ERFnPiecewise bistr) = BISTR.bistrDom bistr
bottomApprox domB tupleSize =
ERFnPiecewise (BISTR.const domB $ FA.bottomApprox domB tupleSize)
const domB vals =
ERFnPiecewise $
BISTR.const domB $ FA.const domB vals
proj domB i =
ERFnPiecewise $ BISTR.Leaf 0 domB $ FA.proj domB i
bisect var maybePt (ERFnPiecewise bistr) =
(ERFnPiecewise bistrLo, ERFnPiecewise bistrHi)
where
(BISTR.Node _ _ _ _ bistrLo bistrHi) =
BISTR.split faSplit 10 var pt DBox.noinfo bistr
pt =
case maybePt of
Nothing ->
RA.defaultBisectPt $ DBox.lookup "PieceWise: bisect: " var (BISTR.bistrDom bistr)
Just pt -> pt
unBisect var (ERFnPiecewise bistr1, ERFnPiecewise bistr2) =
ERFnPiecewise $
BISTR.Node (depth - 1) dom var domVarMid bistr1Dp bistr2Dp
where
depth = max depth1 depth2
depth1 = BISTR.bistrDepth bistr1
depth2 = BISTR.bistrDepth bistr2
bistr1Dp
| depth1 == depth = bistr1
| otherwise =
BISTR.setDepth depth bistr1
bistr2Dp
| depth2 == depth = bistr2
| otherwise =
BISTR.setDepth depth bistr2
dom1 = BISTR.bistrDom bistr1
dom2 = BISTR.bistrDom bistr2
dom = DBox.unionWith (RA.\/) dom1 dom2
domVarMid =
snd $ RA.bounds $
DBox.lookup "ERFnPiecewise: FA.unbisect: " var dom1
integrate ix fD@(ERFnPiecewise bistrD) x integdomBox origin (ERFnPiecewise bistrInit) =
ERFnPiecewise bistrIntegr
where
maxDepth = intLogUp 2 (max 1 ix)
[bistrIntegr] =
BTINTEG.zipFromOrigin -- invoke a generic BISTR "integrator"
faSplit faCombine faSplit faCombine
ix x origin (Just $ DBox.findWithDefault RA.bottomApprox x integdomBox)
zipOutsideRange -- outside the integration range, set result to bottom
shouldSplit
integrateOriginHere -- how to integrate a piece that crosses the origin hyperplane
integrateOriginLower -- how to integrate a piece to the left of the origin hyperplane
integrateOriginHigher -- how to integrate a piece to the right of the origin hyperplane
[bistrD, bistrInit]
zipOutsideRange maybeFromL maybeFromR [bistrD, bistrInit] =
-- unsafePrint
-- (
-- "ERFnPiecewise: integrate: zipOutsideRange: "
-- ++ "\n domB = " ++ show domB
-- ++ "\n bottomFn = " ++ show bottomFn
-- )
[bistrPadj]
where
(ERFnPiecewise bistrPadj) =
case (maybeFromL, maybeFromR) of
(Nothing, Nothing) -> bottomFn
(Just bistrLO, Nothing) ->
FA.partialIntersect ix
(DBox.singleton x domLO)
(ERFnPiecewise bistrLO)
bottomFn
(Nothing, Just bistrHI) ->
FA.partialIntersect ix
(DBox.singleton x domHI)
(ERFnPiecewise bistrHI)
bottomFn
bottomFn =
ERFnPiecewise $ BISTR.Leaf depth domB $ FA.bottomApprox domB (FA.getTupleSize fD)
(domLO, domHI) =
RA.bounds $
DBox.lookup "ERFnPieceWise: integrate: zipOutsideRange: " x domB
domB = BISTR.bistrDom bistrD
depth = BISTR.bistrDepth bistrD
shouldSplit _ depth _ _ _ =
depth < maxDepth
integrateOriginHere ix depth dom [faD, faInit] =
-- unsafePrint
-- (
-- "ERFnPiecewise: integrateMeasureImprovement: integrateOriginHere: "
-- ++ "\n dom = " ++ show dom
-- ++ "\n faLO = " ++ show faLO
-- ++ "\n faHI = " ++ show faHI
-- )
(faLO, [faIntegr], faHI)
where
faIntegr =
FA.integrate ix faD x integdomBox origin faInit
faLO =
FA.partialEval (DBox.singleton x domLO) faIntegr
faHI =
FA.partialEval (DBox.singleton x domHI) faIntegr
(domLO, domHI) =
RA.bounds $
DBox.lookup "ERFnPieceWise: integrate: integrateOriginHere: " x dom
integrateOriginLower ix depth dom bistrLO [faD, faInit] =
-- unsafePrint
-- (
-- "ERFnPiecewise: integrateMeasureImprovement: integrateOriginLower: "
-- ++ "\n dom = " ++ show dom
-- ++ "\n faLO = " ++ show faLO
-- ++ "\n faHI = " ++ show faHI
-- )
([faIntegr], bistrLO { BISTR.bistrVal = faHI })
where
faIntegr =
FA.integrate ix faD x integdomBox domLO (BISTR.bistrVal bistrLO)
faHI =
FA.partialEval (DBox.singleton x domHI) faIntegr
(domLO, domHI) =
RA.bounds $
DBox.lookup "ERFnPieceWise: integrate: integrateOriginLower: " x dom
integrateOriginHigher ix depth dom [faD, faInit] bistrHI =
(bistrHI { BISTR.bistrVal = faLO }, [faIntegr])
where
faIntegr =
FA.integrate ix faD x integdomBox domHI (BISTR.bistrVal bistrHI)
faLO =
FA.partialEval (DBox.singleton x domLO) faIntegr
(domLO, domHI) =
RA.bounds $
DBox.lookup "ERFnPieceWise: integrate: integrateOriginHigher: " x dom
integrateMeasureImprovement ix (ERFnPiecewise bistrD) x integdomBox origin (ERFnPiecewise bistrP) =
(ERFnPiecewise bistrIsect, ERFnPiecewise bistrImpr)
where
[bistrIsect, bistrImpr] =
BTINTEG.zipFromOrigin
faSplit faCombine faSplit faCombine
ix x origin (Just $ DBox.findWithDefault RA.bottomApprox x integdomBox)
zipOutsideRange
shouldSplit
integrateOriginHere
integrateOriginLower
integrateOriginHigher
[bistrD, bistrP]
zipOutsideRange maybeFromL maybeFromR [bistrD, bistrP] =
-- unsafePrint
-- (
-- "ERFnPiecewise: zipOutsideRange"
-- )
[bistrPadj, BISTR.mapWithDom (\d v -> FA.const d [1]) bistrP]
where
(ERFnPiecewise bistrPadj) =
case (maybeFromL, maybeFromR) of
(Nothing, Nothing) -> (ERFnPiecewise bistrP)
(Just bistrLO, Nothing) ->
FA.partialIntersect ix
(DBox.singleton x domLO)
(ERFnPiecewise bistrLO)
(ERFnPiecewise bistrP)
(Nothing, Just bistrHI) ->
FA.partialIntersect ix
(DBox.singleton x domHI)
(ERFnPiecewise bistrHI)
(ERFnPiecewise bistrP)
(domLO, domHI) =
RA.bounds $
DBox.lookup "ERFnPieceWise: integrateMeasureImprovement: zipOutsideRange: " x domB
domB = BISTR.bistrDom bistrP
depth = BISTR.bistrDepth bistrP
shouldSplit ix depth _ _ _ =
depth < (effIx2int ix)
integrateOriginHere ix depth dom [faD, faP] =
-- unsafePrint
-- (
-- "ERFnPiecewise: integrateMeasureImprovement: integrateOriginHere: "
-- ++ "\n dom = " ++ show dom
-- ++ "\n faLO = " ++ show faLO
-- ++ "\n faHI = " ++ show faHI
-- )
(faLO, [faIsect, faImpr], faHI)
-- (FA.check "ERFnPieceWise: integrateOriginHere: faLO: " faLO,
-- [FA.check "ERFnPieceWise: integrateOriginHere: faIsect: " faIsect,
-- FA.check "ERFnPieceWise: integrateOriginHere: faImpr: " faImpr],
-- FA.check "ERFnPieceWise: integrateOriginHere: faHI: " faHI)
where
(faIsect, faImpr) =
FA.integrateMeasureImprovement ix faD x integdomBox origin faP
-- FA.integrateMeasureImprovement ix
-- (FA.check "ERFnPieceWise: integrateOriginHere: faD: " faD)
-- x integdomBox origin
-- (FA.check "ERFnPieceWise: integrateOriginHere: faP: " faP)
faLO =
FA.partialEval (DBox.singleton x domLO) faIsect
faHI =
FA.partialEval (DBox.singleton x domHI) faIsect
(domLO, domHI) =
RA.bounds $
DBox.lookup "ERFnPieceWise: integrateMeasureImprovement: integrateOriginHere: " x dom
integrateOriginLower ix depth dom bistrLO [faD, faP] =
-- unsafePrint
-- (
-- "ERFnPiecewise: integrateMeasureImprovement: integrateOriginLower: "
-- ++ "\n dom = " ++ show dom
-- ++ "\n faLO = " ++ show faLO
-- ++ "\n faPadj = " ++ show faPadj
-- ++ "\n faHI = " ++ show faHI
-- )
([faIsect, faImpr], bistrLO {BISTR.bistrVal = faHI})
where
(faIsect, faImpr) =
FA.integrateMeasureImprovement ix faD x integdomBox domLO faPadj
faPadj =
FA.partialIntersect ix (DBox.singleton x domLO) (BISTR.bistrVal bistrLO) faP
faHI =
FA.partialEval (DBox.singleton x domHI) faIsect
(domLO, domHI) =
RA.bounds $
DBox.lookup "ERFnPieceWise: integrateMeasureImprovement: integrateOriginLower: " x dom
integrateOriginHigher ix depth dom [faD, faP] bistrHI =
-- unsafePrint
-- (
-- "ERFnPiecewise: integrateMeasureImprovement: integrateOriginHigher: "
-- ++ "\n dom = " ++ show dom
-- ++ "\n faLO = " ++ show faLO
-- ++ "\n faHI = " ++ show faHI
-- )
(bistrHI {BISTR.bistrVal = faLO}, [faIsect, faImpr])
where
(faIsect, faImpr) =
FA.integrateMeasureImprovement ix faD x integdomBox domHI faPadj
faPadj =
FA.partialIntersect ix (DBox.singleton x domHI) (BISTR.bistrVal bistrHI) faP
faLO =
FA.partialEval (DBox.singleton x domLO) faIsect
(domLO, domHI) =
RA.bounds $
DBox.lookup "ERFnPieceWise: integrateMeasureImprovement: integrateOriginHigher: " x dom
| michalkonecny/polypaver | src/Numeric/ER/RnToRm/Approx/PieceWise.hs | bsd-3-clause | 25,255 | 2 | 17 | 8,310 | 5,816 | 3,043 | 2,773 | 431 | 1 |
{-# LANGUAGE PatternGuards, RecordWildCards #-}
-- | Parsing is a slow point, the below is optimised
module Development.Ninja.Parse(parse) where
import qualified Data.ByteString.Char8 as BS
import Development.Ninja.Env
import Development.Ninja.Type
import Control.Monad
import Data.Char
import Data.Maybe
endsDollar :: Str -> Bool
endsDollar = BS.isSuffixOf (BS.pack "$")
dropSpace :: Str -> Str
dropSpace = BS.dropWhile isSpace
startsSpace :: Str -> Bool
startsSpace = BS.isPrefixOf (BS.pack " ")
-- | This is a hot-spot, so optimised
linesCR :: Str -> [Str]
linesCR x = case BS.split '\n' x of
x:xs | Just ('\r',x) <- unsnoc x -> x : map (\x -> case unsnoc x of Just ('\r',x) -> x; _ -> x) xs
xs -> xs
where
-- the ByteString unsnoc was introduced in a newer version
unsnoc x | BS.null x = Nothing
| otherwise = Just (BS.last x, BS.init x)
word1 :: Str -> (Str, Str)
word1 x = (a, dropSpace b)
where (a,b) = BS.break isSpace x
isVar :: Char -> Bool
isVar x = isAlphaNum x || x == '_'
parse :: FilePath -> IO Ninja
parse file = do env <- newEnv; parseFile file env newNinja
parseFile :: FilePath -> Env Str Str -> Ninja -> IO Ninja
parseFile file env ninja = do
src <- if file == "-" then BS.getContents else BS.readFile file
foldM (applyStmt env) ninja $ splitStmts src
data Stmt = Stmt Str [Str] deriving Show
splitStmts :: Str -> [Stmt]
splitStmts = stmt . continuation . linesCR
where
continuation (x:xs) | endsDollar x = BS.concat (BS.init x : map (dropSpace . BS.init) a ++ map dropSpace (take 1 b)) : continuation (drop 1 b)
where (a,b) = span endsDollar xs
continuation (x:xs) = x : continuation xs
continuation [] = []
stmt [] = []
stmt (x:xs) = Stmt x (map dropSpace a) : stmt b
where (a,b) = span startsSpace xs
applyStmt :: Env Str Str -> Ninja -> Stmt -> IO Ninja
applyStmt env ninja@Ninja{..} (Stmt x xs)
| key == BS.pack "rule" =
return ninja{rules = (BS.takeWhile isVar rest, Rule $ parseBinds xs) : rules}
| key == BS.pack "default" = do
xs <- parseStrs env rest
return ninja{defaults = xs ++ defaults}
| key == BS.pack "pool" = do
depth <- getDepth env $ parseBinds xs
return ninja{pools = (BS.takeWhile isVar rest, depth) : pools}
| key == BS.pack "build" = do
(out,rest) <- return $ splitColon rest
outputs <- parseStrs env out
(rule,deps) <- return $ word1 $ dropSpace rest
(normal,implicit,orderOnly) <- fmap splitDeps $ parseStrs env deps
let build = Build rule env normal implicit orderOnly $ parseBinds xs
return $
if rule == BS.pack "phony" then ninja{phonys = [(x, normal) | x <- outputs] ++ phonys}
else if length outputs == 1 then ninja{singles = (head outputs, build) : singles}
else ninja{multiples = (outputs, build) : multiples}
| key == BS.pack "include" = parseFile (BS.unpack rest) env ninja
| key == BS.pack "subninja" = do
e <- scopeEnv env
parseFile (BS.unpack rest) e ninja
| Just (a,b) <- parseBind x = do
addBind env a b
return ninja
| BS.null $ dropSpace x = return ninja
| BS.pack "#" `BS.isPrefixOf` dropSpace x = return ninja -- comments can only occur on their own line
| otherwise = error $ "Cannot parse line: " ++ BS.unpack x
where (key,rest) = word1 x
getDepth :: Env Str Str -> [(Str, Expr)] -> IO Int
getDepth env xs = case lookup (BS.pack "depth") xs of
Nothing -> return 1
Just x -> do
x <- askExpr env x
case BS.readInt x of
Just (i, n) | BS.null n -> return i
_ -> error $ "Could not parse depth field in pool, got: " ++ BS.unpack x
parseBind :: Str -> Maybe (Str, Expr)
parseBind x
| (var,rest) <- BS.span isVar x
, Just ('=',rest) <- BS.uncons $ dropSpace rest
= Just (var, parseExpr $ dropSpace rest)
| otherwise = Nothing
parseBinds :: [Str] -> [(Str, Expr)]
parseBinds = map $ \x -> fromMaybe (error $ "Unknown Ninja binding: " ++ BS.unpack x) $ parseBind x
parseExpr :: Str -> Expr
parseExpr = exprs . f
where
exprs [x] = x
exprs xs = Exprs xs
f x = case BS.elemIndex '$' x of
Nothing -> [Lit x]
Just i -> Lit (BS.take i x) : g (BS.drop (i+1) x)
g x = case BS.uncons x of
Nothing -> []
Just (c,s)
| c == '$' -> Lit (BS.singleton '$') : f s
| c == ' ' -> Lit (BS.singleton ' ') : f s
| c == ':' -> Lit (BS.singleton ':') : f s
| c == '{' -> let (a,b) = BS.break (== '}') s in Var a : f (BS.drop 1 b)
| otherwise -> let (a,b) = BS.span isVar x in Var a : f b
parseStrs :: Env Str Str -> Str -> IO [Str]
parseStrs env = mapM (askExpr env) . parseExprs
splitDeps :: [Str] -> ([Str], [Str], [Str])
splitDeps (x:xs) | x == BS.pack "|" = ([],a++b,c)
| x == BS.pack "||" = ([],b,a++c)
| otherwise = (x:a,b,c)
where (a,b,c) = splitDeps xs
splitDeps [] = ([], [], [])
parseExprs :: Str -> [Expr]
parseExprs = map parseExpr . f
where
f x | BS.null x = []
| otherwise = let (a,b) = splitUnescaped ' ' x in a : f b
splitUnescaped :: Char -> Str -> (Str, Str)
splitUnescaped c x = case filter valid $ BS.elemIndices c x of
[] -> (x, BS.empty)
i:_ -> (BS.take i x, BS.drop (i+1) x)
where
-- technically there could be $$:, so an escaped $ followed by a literal :
-- that seems very unlikely...
valid i = i == 0 || BS.index x (i-1) /= '$'
-- Find a non-escape :
splitColon :: Str -> (Str, Str)
splitColon = splitUnescaped ':'
| nh2/shake | Development/Ninja/Parse.hs | bsd-3-clause | 5,941 | 0 | 17 | 1,840 | 2,536 | 1,267 | 1,269 | 122 | 4 |
{-# LANGUAGE TemplateHaskell, PatternGuards #-}
module HsBot where
import Control.Lens hiding (Action)
import Control.Applicative ((<$>), (<*>))
import Control.Arrow ((&&&))
import qualified Text.Printf as P
import qualified Control.Monad.State as ST
import Control.Monad (when)
import qualified Data.List as L
import Data.Maybe (isJust)
import Grid
import Robot
import qualified Robot as R
type IdWithAction = (RobotId, Action)
type IdsWithActions = [IdWithAction]
type IdWithResult = (RobotId, ActionResult)
type IdsWithResults = [IdWithResult]
type RobotsWithResults = [(Robot, ActionResult)]
data HsBot = HsBot {
_grid :: Grid,
_actionResults :: IdsWithResults
} deriving Show
makeLenses ''HsBot
type HsBotST = ST.StateT HsBot IO
mkHsBot :: Int -> Int -> HsBot
mkHsBot gridWidth gridHeight = HsBot (mkGrid gridWidth gridHeight) []
mkDefaultHsBot :: Int -> Int -> IO HsBot
mkDefaultHsBot gridWidth gridHeight =
ifoldlM addRobot (mkHsBot gridWidth gridHeight) colors
where
addRobot id hsBot color = placeRobotRandomly (defaultRobot id color) hsBot
colors = [(r, g, b) | r <- [1, 0, 0], g <- [0, 1, 0], b <- [0, 0, 1]]
placeRobotRandomly :: Robot -> HsBot -> IO HsBot
placeRobotRandomly robot hsBot = do
coord <- randomAndFreeCoord $ hsBot ^. grid
return $ placeRobot coord robot hsBot
placeRobot :: GridCoord -> Robot -> HsBot -> HsBot
placeRobot coord rob hsBot = hsBot & grid . atCoord coord . entity .~ RobotEntity rob
placeBlock :: GridCoord -> HsBotST ()
placeBlock coord = do
valid <- (isValidAndFree coord) <$> use grid
when valid $ grid . atCoord coord . entity .= BlockEntity
removeBlock :: GridCoord -> HsBotST ()
removeBlock coord = do
valid <- (isValid coord) <$> use grid
when valid $ do
hasBlock <- isJust <$> (preuse $ grid . atCoord coord . entity . _BlockEntity)
when hasBlock $ grid . atCoord coord . entity .= NoEntity
executeRobots :: HsBotST ()
executeRobots = ST.modify execRobots
execRobots :: HsBot -> HsBot
execRobots hsBot = apply (actions results) hsBot
where
results = matchRobotsWithResults hsBot
apply :: IdsWithActions -> HsBot -> HsBot
apply actions hsBot = L.foldl' (flip applyAction) (hsBot & actionResults .~ []) actions
where
applyAction (id, NoAction) hsBot = hsBot
applyAction (id, Move dir) hsBot =
case moveRobotAlongDir id dir (hsBot ^. grid) of
Just grid' -> hsBot & grid .~ grid'
& actionResults %~ ((id, Moved dir True) :)
_ -> hsBot & actionResults %~ ((id, Moved dir False) :)
actions :: RobotsWithResults -> IdsWithActions
actions = L.map $ \(rob, res) -> (rob ^. robotId, rob ^. execute $ res)
matchRobotsWithResults :: HsBot -> RobotsWithResults
matchRobotsWithResults hsBot =
L.map (\rob -> case lookup (rob ^. robotId) results of
Just res -> (rob, res)
_ -> (rob, NoResult))
robs
where
robs = hsBot ^. grid . to robots
results = hsBot ^. actionResults
renderHsBot :: HsBotST ()
renderHsBot = do
grid <- use grid
ST.liftIO $ renderGrid grid
| dan-t/hsbot | HsBot.hs | bsd-3-clause | 3,199 | 0 | 16 | 761 | 1,082 | 580 | 502 | -1 | -1 |
module RBPCP.Handler.Pay where
import RBPCP.Handler.Internal.Util
import qualified Servant.Server as SS
import qualified RBPCP.Types as RBPCP
import qualified PaymentChannel as PC
import qualified ChanDB as DB
import qualified Network.Haskoin.Crypto as HC
import qualified Data.Text as T
data PaymentError
= PaymentError PC.PayChanError
| ApplicationError T.Text
instance Show PaymentError where
show (PaymentError e) =
"payment error: " ++ show e
show (ApplicationError t) =
"application error: " ++ show t
instance IsHandlerException PaymentError where
mkHandlerErr = mkServantErr SS.err400
instance HasErrorResponse PaymentError where
errRes = cs . show
newtype PaymentCallback = PaymentCallback
( T.Text -- ^ Application data
-> PC.ServerPayChanX -- ^ New server state
-> PC.BtcAmount -- ^ Payment value
-> IO CallbackResult -- ^ Either error or application response data
)
-- | Either error or application response data
newtype CallbackResult = CallbackResult (Either T.Text T.Text)
runPay :: DB.ChanDBTx m dbM dbH
=> PaymentCallback
-> ReaderT PaymentCallback (EitherT (HandlerErr PaymentError) m) RBPCP.PaymentResult
-> HandlerM dbH chain RBPCP.PaymentResult
runPay cbFunc payM =
runAtomic $ runReaderT payM cbFunc
payE :: ( DB.ChanDBTx m dbM dbH
) =>
RBPCP.BtcTxId
-> Word32
-> Maybe RBPCP.SharedSecret
-> RBPCP.Payment
-> ReaderT PaymentCallback (EitherT (HandlerErr PaymentError) m) RBPCP.PaymentResult
payE _ _ Nothing _ = lift $ left $ UserError ResourceNotFound
payE fundTxId fundIdx (Just secret) (RBPCP.Payment payData appData) = do
lift $ maybeRedirect (fundTxId, fundIdx, secret) payData
let throwNotFound = maybe (lift $ left $ UserError ResourceNotFound) return
(newState, payVal) <- lift . abortOnErr . fmapL PaymentError
=<< liftIO . PC.acceptPayment payData
=<< throwNotFound
=<< lift (lift $ DB.getPayChan secret)
PaymentCallback callbackFunc <- ask
CallbackResult callbackResE <- liftIO $ callbackFunc appData newState payVal
appResData <- case callbackResE of
Left e -> lift $ abortWithErr $ ApplicationError e
Right r -> return r
-- Save state to DB
lift $ lift $ DB.updatePayChan newState
return RBPCP.PaymentResult
{ paymentResultChannelStatus =
if PC.channelValueLeft newState /= 0
then RBPCP.ChannelOpen
else RBPCP.ChannelClosed
, paymentResultChannelValueLeft = fromIntegral $ PC.channelValueLeft newState
, paymentResultValueReceived = fromIntegral payVal
, paymentResultSettlementTxid = Nothing
, paymentResultApplicationData = appResData
}
| runeksvendsen/rbpcp-handler | src/RBPCP/Handler/Pay.hs | bsd-3-clause | 3,084 | 0 | 14 | 939 | 692 | 361 | 331 | 61 | 3 |
module BookSimpleTypedLambda where
data T = Var Char |
Varj Int |
Abst T T |
App T T deriving (Eq)
{- ECA: This is a "large-step" semantics matching with the book's full
beta-redex semantics. The actual rules are for a "small-step",
call-by-value semantics, though.
-}
eval :: T -> T
eval (App (Abst x y) z) = eval (sub y x z)
eval (Abst x y) = Abst x (eval y)
eval (App x y) | isValue x = App x (eval y)
| otherwise =eval (App (eval x) (eval y))
{--
eval (App (Var x) y) = App (Var x) (eval y)
eval (App (Varj x) y) = App (Varj x) (eval y)
eval (App x y) = (App (eval x) y)
--}
eval x = x
-- in T replace T with T yields T
{- ECA: Think about how we might represent a substitution environment, and use
that to decide where to curry/uncurry arguments. For example, if we
represetn a substitution environment as an association list, we can
write the type of sub as T -> (T, T) -> T. This will let us do stuff
like map (sub z) env if we need to do more than one substitution at a
time!
-}
sub :: T -> T -> T -> T
sub (Abst (Var a) x) (Var y) z = Abst (Var a) (sub x (Var y) z)
sub (App a b) y z = App (sub a y z) (sub b y z)
sub x (Varj i) z = subDBHelper x 1 z
sub x y z = if x==y then z else x
--In T, replace Varj Int with T
subDBHelper :: T -> Int -> T -> T
subDBHelper (Abst x y) i new = Abst x (subDBHelper y (i +1) new)
subDBHelper (App a b) i new = App (subDBHelper a i new) (subDBHelper b i new)
subDBHelper x i z = if x == (Varj i) then z else x
{- ECA: Look at how much closer this implementation matches our written rules.
Note that ordering of the conditionals matters!
-}
eval2 :: T -> T
eval2 (App x y)
| isAbst x && isValue y = -- E-AppAbs
let (Abst a z) = x in
sub2 z (y, a)
| isValue x = eval . App x $ eval y -- E-App2
| otherwise = eval $ App (eval x) y -- E-App1
eval2 x = x
isAbst :: T -> Bool
isAbst Abst{} = True
isAbst _ = False
isValue :: T -> Bool
isValue Var {} = True
isValue Varj{} = True
isValue (App a b) = isValue a && isValue b
isValue _ = False
{- ECA: Note how much cleaner this looks for recursive calls. -}
sub2 :: T -> (T, T) -> T
sub2 z (Var x, Var y)
| x == y = z
| otherwise = Var x
sub2 (Abst a x) env =
Abst a $ sub2 x env
sub2 (App a b) env =
App (sub2 a env) $ sub2 b env
sub2 _ _ = error "sub2: bad substitution pair."
-- ECA: Here's alpha-equivalence if you're curious
alphaEq :: T -> T -> Bool
alphaEq = orda []
where orda :: [(T, T)] -> T -> T -> Bool
orda env tm1 tm2
| tm1 == tm2 && all (uncurry (==)) env = True
| otherwise =
case (tm1, tm2) of
(Var{}, Var{}) -> ordav env tm1 tm2
(App s1 t1, App s2 t2) ->
orda env s1 s2 && orda env t1 t2
(Abst x1@Var{} t1, Abst x2@Var{} t2) ->
orda ((x1, x2):env) t1 t2
_ -> False
ordav :: [(T, T)] -> T -> T -> Bool
ordav [] x1 x2 = x1 == x2
ordav ((l, r):oenv) x1 x2
| x1 == l = x2 == r
| otherwise = ordav oenv x1 x2
tru = (Abst (Var 't') (Abst (Var 'f') (Var 't')))
fls = (Abst (Var 't') (Abst (Var 'f') (Var 'f')))
and' = ( Abst (Var 'b') (Abst (Var 'c') (App (App (Var 'b') (Var 'c')) fls)))
tru2 = Abst (Var 'a') (Abst (Var 'b') (Var 'a'))
test = alphaEq tru tru2
{-test data
tru == eval (App( App and' tru) fls)
tru == eval (App( App and' tru) tru)
tru == eval (App( App and' fls) tru)
-}
data Lambda = Lambda String
instance Show T where
show (App x y) = show x ++ show y
show (Abst (Var x) y) = "(\955" ++ (x: []) ++ ". " ++ show y ++ ")"
show (Abst (Varj x) y) = "(\955" ++ ". " ++ show y ++ ")"
show (Var x) = x : [' ']
show (Varj x) = show x ++ " "
show x = show x
dbr :: T -> T
dbr (App a b) = App (dbr a) (dbr b)
dbr (Abst (Var x) bod) = Abst (Varj 1) (dbr (subDB bod (Var x) 1) )
dbr x = x
--in T1 replace instances of T2
subDB :: T -> T -> Int -> T
subDB (Var x) (Var y) i = if (Var x)==(Var y) then (Varj i) else (Var x)
subDB (Abst (Var a) x) (Var y) i = Abst (Var a) (subDB x (Var y) (i+1))
subDB (App a b) (Var y) i = App (subDB a (Var y) i) (subDB b (Var y) i)
subDB x y i = if x==y then (Varj i) else x
testTerm =Abst (Var 'z') (App (Abst (Var 'y') (App (Var 'y') (Abst (Var 'x') (Var 'x')))) (Abst (Var 'w') (App (Var 'z') (Var 'w'))))
| armoredsoftware/protocol | demos/PaulPractice/BookSimpleTypedLambda.hs | bsd-3-clause | 4,516 | 0 | 15 | 1,409 | 2,027 | 1,020 | 1,007 | 86 | 5 |
module WeiXin.PublicPlatform.Conversation.Message where
-- {{{1 imports
import ClassyPrelude
import qualified Control.Exception.Safe as ExcSafe
import Language.Haskell.TH
import Control.Monad.Logger
import Control.Monad.Except
import Data.List.NonEmpty as LNE hiding (insert)
import WeiXin.PublicPlatform.Conversation
import WeiXin.PublicPlatform.Utils
import WeiXin.PublicPlatform.Class
import WeiXin.PublicPlatform.WS
import WeiXin.PublicPlatform.Media
-- }}}1
type LoadMsgMonad m = (MonadIO m, MonadLoggerIO m, ExcSafe.MonadCatch m)
type LoadMsgEnv r = ( HasWxppOutMsgDir r
, HasSomeWxppCacheBackend r
, HasAccessTokenIO r
, HasWxppAppID r
, HasWreqSession r
, HasWxppUrlConfig r
)
talkerMessageDir :: HasWxppOutMsgDir a => a -> NonEmpty FilePath
talkerMessageDir env = fmap (</> "talk") $ getWxppOutMsgDir env
loadTalkMessage :: ( LoadMsgMonad m, LoadMsgEnv r) =>
r -> FilePath -> m (Either String WxppOutMsg)
loadTalkMessage env sub_path = runExceptT $ do
msg_l <- ExceptT $ runDelayedYamlLoaderL
(talkerMessageDir env)
(mkDelayedYamlLoader $ setExtIfNotExist "yml" $ sub_path)
let api_env = uncurry WxppApiEnv $ (getWreqSession &&& getWxppUrlConfig) env
flip runReaderT api_env $ do
fromWxppOutMsgL
(getWxppOutMsgDir env)
(getSomeWxppCacheBackend env)
(lift $ ExceptT $ wxTalkGetAccessToken env)
msg_l
loadTalkMessage' :: ( LoadMsgMonad m, LoadMsgEnv r) =>
FilePath
-> WxTalkerMonad r m WxppOutMsg
loadTalkMessage' sub_path = mkWxTalkerMonad $ \env -> loadTalkMessage env sub_path
loadTalkMsgHelper :: FilePath -> FilePath -> IO WxppOutMsg
loadTalkMsgHelper base_path sub_path = runDelayedYamlLoaderExc base_path $
mkDelayedYamlLoader $ setExtIfNotExist "yml" $ sub_path
-- | May throw IOError
loadTalkMessage_IOE :: ( LoadMsgMonad m, LoadMsgEnv r) =>
r -> FilePath -> m (Either String WxppOutMsg)
loadTalkMessage_IOE env sub_path = runExceptT $ do
msg_l <- withExceptT err_to_str $ ExceptT $ runDelayedYamlLoaderL_IOE
(talkerMessageDir env)
(mkDelayedYamlLoader $ setExtIfNotExist "yml" $ sub_path)
let api_env = uncurry WxppApiEnv $ (getWreqSession &&& getWxppUrlConfig) env
flip runReaderT api_env $ do
fromWxppOutMsgL
(getWxppOutMsgDir env)
(getSomeWxppCacheBackend env)
(lift $ ExceptT $ wxTalkGetAccessToken env)
msg_l
where
err_to_str err = "failed to load '" <> sub_path <> "': " <> show err
loadTalkMessage_IOE' :: ( LoadMsgMonad m, LoadMsgEnv r) =>
FilePath
-> WxTalkerMonad r m WxppOutMsg
loadTalkMessage_IOE' sub_path = mkWxTalkerMonad $ \env -> loadTalkMessage_IOE env sub_path
loadTalkMessageTH :: FilePath -> FilePath -> Q Exp
loadTalkMessageTH default_msgs_dir sub_path = do
fallback_v <- runIO $ loadTalkMsgHelper default_msgs_dir sub_path
[| \env -> loadTalkMessage_IOE env sub_path
`ExcSafe.catch`
\ err -> do
unless (isDoesNotExistError err) $ do
$logErrorS wxppLogSource $ fromString $
"fallback to default message because of failing to load message file '"
<> sub_path <> "', error was: " <> show err
return $ Right fallback_v
|]
loadTalkMessageTH' :: FilePath -> FilePath -> Q Exp
loadTalkMessageTH' default_msgs_dir sub_path = do
fallback_v <- runIO $ loadTalkMsgHelper default_msgs_dir sub_path
[| \env -> loadTalkMessage_IOE' env sub_path
`ExcSafe.catch`
\ err -> do
unless (isDoesNotExistError err) $ do
$logErrorS wxppLogSource $ fromString $
"fallback to default message because of failing to load message file '"
<> sub_path <> "', error was: " <> show err
return $ fallback_v
|]
| yoo-e/weixin-mp-sdk | WeiXin/PublicPlatform/Conversation/Message.hs | mit | 4,742 | 0 | 14 | 1,768 | 804 | 414 | 390 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances #-}
{- |
Module : $Header$
Description : Instance of class Logic for common logic
Copyright : (c) Karl Luc, DFKI Bremen 2010, Eugen Kuksa and Uni Bremen 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (imports Logic.Logic)
Instance of class Logic for the common logic
-}
module CommonLogic.Logic_CommonLogic where
import ATC.ProofTree ()
import Common.ProofTree
import CommonLogic.ATC_CommonLogic ()
import CommonLogic.Sign
import CommonLogic.AS_CommonLogic
import CommonLogic.Symbol as Symbol
import CommonLogic.Analysis
import qualified CommonLogic.Parse_CLIF as CLIF
import qualified CommonLogic.Parse_KIF as KIF
import qualified CommonLogic.Print_KIF as Print_KIF
import CommonLogic.Morphism
import CommonLogic.OMDocExport
import CommonLogic.OMDocImport as OMDocImport
import CommonLogic.OMDoc
import CommonLogic.Sublogic
import qualified Data.Map as Map
import Data.Monoid
import Logic.Logic
data CommonLogic = CommonLogic deriving Show
instance Language CommonLogic where
description _ = "CommonLogic Logic\n"
instance Category Sign Morphism
where
ide = idMor
dom = source
cod = target
isInclusion = Map.null . propMap
legal_mor = isLegalMorphism
composeMorphisms = composeMor
instance Sentences CommonLogic
TEXT_META
Sign
Morphism
Symbol
where
negation CommonLogic = Just . negForm
sym_of CommonLogic = singletonList . symOf
symmap_of CommonLogic = getSymbolMap -- returns the symbol map
sym_name CommonLogic = getSymbolName -- returns the name of a symbol
map_sen CommonLogic = mapSentence -- TODO
symsOfSen CommonLogic = symsOfTextMeta
symKind CommonLogic = Symbol.symKind
instance Monoid BASIC_SPEC where
mempty = Basic_spec []
mappend (Basic_spec l1) (Basic_spec l2) = Basic_spec $ l1 ++ l2
instance Syntax CommonLogic
BASIC_SPEC
Symbol
SYMB_ITEMS
SYMB_MAP_ITEMS
where
parsersAndPrinters CommonLogic =
addSyntax "KIF" (KIF.basicSpec, Print_KIF.printBasicSpec)
$ addSyntax "CLIF" (CLIF.basicSpec, pretty)
$ makeDefault (CLIF.basicSpec, pretty)
parse_symb_items CommonLogic = Just CLIF.symbItems
parse_symb_map_items CommonLogic = Just CLIF.symbMapItems
instance Logic CommonLogic
CommonLogicSL -- Sublogics
BASIC_SPEC -- basic_spec
TEXT_META -- sentence
SYMB_ITEMS -- symb_items
SYMB_MAP_ITEMS -- symb_map_items
Sign -- sign
Morphism -- morphism
Symbol -- symbol
Symbol -- raw_symbol
ProofTree -- proof_tree
where
stability CommonLogic = Testing
all_sublogics CommonLogic = sublogics_all
empty_proof_tree CommonLogic = emptyProofTree
provers CommonLogic = []
omdoc_metatheory CommonLogic = Just clMetaTheory
export_senToOmdoc CommonLogic = exportSenToOmdoc
export_symToOmdoc CommonLogic = exportSymToOmdoc
omdocToSen CommonLogic = OMDocImport.omdocToSen
omdocToSym CommonLogic = OMDocImport.omdocToSym
instance StaticAnalysis CommonLogic
BASIC_SPEC
TEXT_META
SYMB_ITEMS
SYMB_MAP_ITEMS
Sign
Morphism
Symbol
Symbol
where
basic_analysis CommonLogic = Just basicCommonLogicAnalysis
empty_signature CommonLogic = emptySig
is_subsig CommonLogic = isSubSigOf
subsig_inclusion CommonLogic s = return . inclusionMap s
signature_union CommonLogic = sigUnion
symbol_to_raw CommonLogic = symbolToRaw
id_to_raw CommonLogic = idToRaw
matches CommonLogic = Symbol.matches
stat_symb_items CommonLogic _ = mkStatSymbItems
stat_symb_map_items CommonLogic _ _ = mkStatSymbMapItem
induced_from_morphism CommonLogic = inducedFromMorphism
induced_from_to_morphism CommonLogic = inducedFromToMorphism
add_symb_to_sign CommonLogic = addSymbToSign -- TODO
{-
stat_symb_items CommonLogic = ()
stat_symb_map_items CommonLogic = ()
morphism_union CommonLogic = ()
-}
signature_colimit CommonLogic = signColimit
-- | Sublogics
instance SemiLatticeWithTop CommonLogicSL where
lub = sublogics_max
top = CommonLogic.Sublogic.top
instance MinSublogic CommonLogicSL BASIC_SPEC where
minSublogic = sl_basic_spec bottom
instance MinSublogic CommonLogicSL Sign where
minSublogic = sl_sig bottom
instance SublogicName CommonLogicSL where
sublogicName = sublogics_name
instance MinSublogic CommonLogicSL TEXT_META where
minSublogic tm = sublogic_text bottom $ getText tm
instance MinSublogic CommonLogicSL NAME where
minSublogic = sublogic_name bottom
instance MinSublogic CommonLogicSL Symbol where
minSublogic = sl_sym bottom
instance MinSublogic CommonLogicSL Morphism where
minSublogic = sl_mor bottom
instance MinSublogic CommonLogicSL SYMB_MAP_ITEMS where
minSublogic = sl_symmap bottom
instance MinSublogic CommonLogicSL SYMB_ITEMS where
minSublogic = sl_symitems bottom
instance ProjectSublogic CommonLogicSL BASIC_SPEC where
projectSublogic = prBasicSpec
instance ProjectSublogicM CommonLogicSL NAME where
projectSublogicM = prName
instance ProjectSublogicM CommonLogicSL SYMB_MAP_ITEMS where
projectSublogicM = prSymMapM
instance ProjectSublogicM CommonLogicSL SYMB_ITEMS where
projectSublogicM = prSymItemsM
instance ProjectSublogic CommonLogicSL Sign where
projectSublogic = prSig
instance ProjectSublogic CommonLogicSL Morphism where
projectSublogic = prMor
instance ProjectSublogicM CommonLogicSL Symbol where
projectSublogicM = prSymbolM
| keithodulaigh/Hets | CommonLogic/Logic_CommonLogic.hs | gpl-2.0 | 5,796 | 0 | 10 | 1,197 | 993 | 532 | 461 | 134 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EMR.RunJobFlow
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- RunJobFlow creates and starts running a new job flow. The job flow will
-- run the steps specified. Once the job flow completes, the cluster is
-- stopped and the HDFS partition is lost. To prevent loss of data,
-- configure the last step of the job flow to store results in Amazon S3.
-- If the JobFlowInstancesConfig 'KeepJobFlowAliveWhenNoSteps' parameter is
-- set to 'TRUE', the job flow will transition to the WAITING state rather
-- than shutting down once the steps have completed.
--
-- For additional protection, you can set the JobFlowInstancesConfig
-- 'TerminationProtected' parameter to 'TRUE' to lock the job flow and
-- prevent it from being terminated by API call, user intervention, or in
-- the event of a job flow error.
--
-- A maximum of 256 steps are allowed in each job flow.
--
-- If your job flow is long-running (such as a Hive data warehouse) or
-- complex, you may require more than 256 steps to process your data. You
-- can bypass the 256-step limitation in various ways, including using the
-- SSH shell to connect to the master node and submitting queries directly
-- to the software running on the master node, such as Hive and Hadoop. For
-- more information on how to do this, go to
-- <http://docs.aws.amazon.com/ElasticMapReduce/latest/DeveloperGuide/AddMoreThan256Steps.html Add More than 256 Steps to a Job Flow>
-- in the /Amazon Elastic MapReduce Developer\'s Guide/.
--
-- For long running job flows, we recommend that you periodically store
-- your results.
--
-- /See:/ <http://docs.aws.amazon.com/ElasticMapReduce/latest/API/API_RunJobFlow.html AWS API Reference> for RunJobFlow.
module Network.AWS.EMR.RunJobFlow
(
-- * Creating a Request
runJobFlow
, RunJobFlow
-- * Request Lenses
, rjfAMIVersion
, rjfAdditionalInfo
, rjfConfigurations
, rjfSteps
, rjfJobFlowRole
, rjfBootstrapActions
, rjfReleaseLabel
, rjfLogURI
, rjfNewSupportedProducts
, rjfVisibleToAllUsers
, rjfSupportedProducts
, rjfApplications
, rjfTags
, rjfServiceRole
, rjfName
, rjfInstances
-- * Destructuring the Response
, runJobFlowResponse
, RunJobFlowResponse
-- * Response Lenses
, rjfrsJobFlowId
, rjfrsResponseStatus
) where
import Network.AWS.EMR.Types
import Network.AWS.EMR.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Input to the RunJobFlow operation.
--
-- /See:/ 'runJobFlow' smart constructor.
data RunJobFlow = RunJobFlow'
{ _rjfAMIVersion :: !(Maybe Text)
, _rjfAdditionalInfo :: !(Maybe Text)
, _rjfConfigurations :: !(Maybe [Configuration])
, _rjfSteps :: !(Maybe [StepConfig])
, _rjfJobFlowRole :: !(Maybe Text)
, _rjfBootstrapActions :: !(Maybe [BootstrapActionConfig])
, _rjfReleaseLabel :: !(Maybe Text)
, _rjfLogURI :: !(Maybe Text)
, _rjfNewSupportedProducts :: !(Maybe [SupportedProductConfig])
, _rjfVisibleToAllUsers :: !(Maybe Bool)
, _rjfSupportedProducts :: !(Maybe [Text])
, _rjfApplications :: !(Maybe [Application])
, _rjfTags :: !(Maybe [Tag])
, _rjfServiceRole :: !(Maybe Text)
, _rjfName :: !Text
, _rjfInstances :: !JobFlowInstancesConfig
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RunJobFlow' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rjfAMIVersion'
--
-- * 'rjfAdditionalInfo'
--
-- * 'rjfConfigurations'
--
-- * 'rjfSteps'
--
-- * 'rjfJobFlowRole'
--
-- * 'rjfBootstrapActions'
--
-- * 'rjfReleaseLabel'
--
-- * 'rjfLogURI'
--
-- * 'rjfNewSupportedProducts'
--
-- * 'rjfVisibleToAllUsers'
--
-- * 'rjfSupportedProducts'
--
-- * 'rjfApplications'
--
-- * 'rjfTags'
--
-- * 'rjfServiceRole'
--
-- * 'rjfName'
--
-- * 'rjfInstances'
runJobFlow
:: Text -- ^ 'rjfName'
-> JobFlowInstancesConfig -- ^ 'rjfInstances'
-> RunJobFlow
runJobFlow pName_ pInstances_ =
RunJobFlow'
{ _rjfAMIVersion = Nothing
, _rjfAdditionalInfo = Nothing
, _rjfConfigurations = Nothing
, _rjfSteps = Nothing
, _rjfJobFlowRole = Nothing
, _rjfBootstrapActions = Nothing
, _rjfReleaseLabel = Nothing
, _rjfLogURI = Nothing
, _rjfNewSupportedProducts = Nothing
, _rjfVisibleToAllUsers = Nothing
, _rjfSupportedProducts = Nothing
, _rjfApplications = Nothing
, _rjfTags = Nothing
, _rjfServiceRole = Nothing
, _rjfName = pName_
, _rjfInstances = pInstances_
}
-- | For Amazon EMR releases 3.x and 2.x. For Amazon EMR releases 4.x and
-- greater, use ReleaseLabel.
--
-- The version of the Amazon Machine Image (AMI) to use when launching
-- Amazon EC2 instances in the job flow. The following values are valid:
--
-- - The version number of the AMI to use, for example, \"2.0.\"
--
-- If the AMI supports multiple versions of Hadoop (for example, AMI 1.0
-- supports both Hadoop 0.18 and 0.20) you can use the
-- JobFlowInstancesConfig 'HadoopVersion' parameter to modify the version
-- of Hadoop from the defaults shown above.
--
-- For details about the AMI versions currently supported by Amazon Elastic
-- MapReduce, go to
-- <http://docs.aws.amazon.com/ElasticMapReduce/latest/DeveloperGuide/EnvironmentConfig_AMIVersion.html#ami-versions-supported AMI Versions Supported in Elastic MapReduce>
-- in the /Amazon Elastic MapReduce Developer\'s Guide./
rjfAMIVersion :: Lens' RunJobFlow (Maybe Text)
rjfAMIVersion = lens _rjfAMIVersion (\ s a -> s{_rjfAMIVersion = a});
-- | A JSON string for selecting additional features.
rjfAdditionalInfo :: Lens' RunJobFlow (Maybe Text)
rjfAdditionalInfo = lens _rjfAdditionalInfo (\ s a -> s{_rjfAdditionalInfo = a});
-- | Amazon EMR releases 4.x or later.
--
-- The list of configurations supplied for the EMR cluster you are
-- creating.
rjfConfigurations :: Lens' RunJobFlow [Configuration]
rjfConfigurations = lens _rjfConfigurations (\ s a -> s{_rjfConfigurations = a}) . _Default . _Coerce;
-- | A list of steps to be executed by the job flow.
rjfSteps :: Lens' RunJobFlow [StepConfig]
rjfSteps = lens _rjfSteps (\ s a -> s{_rjfSteps = a}) . _Default . _Coerce;
-- | An IAM role for the job flow. The EC2 instances of the job flow assume
-- this role. The default role is 'EMRJobflowDefault'. In order to use the
-- default role, you must have already created it using the CLI.
rjfJobFlowRole :: Lens' RunJobFlow (Maybe Text)
rjfJobFlowRole = lens _rjfJobFlowRole (\ s a -> s{_rjfJobFlowRole = a});
-- | A list of bootstrap actions that will be run before Hadoop is started on
-- the cluster nodes.
rjfBootstrapActions :: Lens' RunJobFlow [BootstrapActionConfig]
rjfBootstrapActions = lens _rjfBootstrapActions (\ s a -> s{_rjfBootstrapActions = a}) . _Default . _Coerce;
-- | Amazon EMR releases 4.x or later.
--
-- The release label for the Amazon EMR release. For Amazon EMR 3.x and 2.x
-- AMIs, use amiVersion instead instead of ReleaseLabel.
rjfReleaseLabel :: Lens' RunJobFlow (Maybe Text)
rjfReleaseLabel = lens _rjfReleaseLabel (\ s a -> s{_rjfReleaseLabel = a});
-- | The location in Amazon S3 to write the log files of the job flow. If a
-- value is not provided, logs are not created.
rjfLogURI :: Lens' RunJobFlow (Maybe Text)
rjfLogURI = lens _rjfLogURI (\ s a -> s{_rjfLogURI = a});
-- | For Amazon EMR releases 3.x and 2.x. For Amazon EMR releases 4.x and
-- greater, use Applications.
--
-- A list of strings that indicates third-party software to use with the
-- job flow that accepts a user argument list. EMR accepts and forwards the
-- argument list to the corresponding installation script as bootstrap
-- action arguments. For more information, see
-- <http://docs.aws.amazon.com/ElasticMapReduce/latest/DeveloperGuide/emr-mapr.html Launch a Job Flow on the MapR Distribution for Hadoop>.
-- Currently supported values are:
--
-- - \"mapr-m3\" - launch the cluster using MapR M3 Edition.
-- - \"mapr-m5\" - launch the cluster using MapR M5 Edition.
-- - \"mapr\" with the user arguments specifying \"--edition,m3\" or
-- \"--edition,m5\" - launch the job flow using MapR M3 or M5 Edition
-- respectively.
-- - \"mapr-m7\" - launch the cluster using MapR M7 Edition.
-- - \"hunk\" - launch the cluster with the Hunk Big Data Analtics
-- Platform.
-- - \"hue\"- launch the cluster with Hue installed.
-- - \"spark\" - launch the cluster with Apache Spark installed.
-- - \"ganglia\" - launch the cluster with the Ganglia Monitoring System
-- installed.
rjfNewSupportedProducts :: Lens' RunJobFlow [SupportedProductConfig]
rjfNewSupportedProducts = lens _rjfNewSupportedProducts (\ s a -> s{_rjfNewSupportedProducts = a}) . _Default . _Coerce;
-- | Whether the job flow is visible to all IAM users of the AWS account
-- associated with the job flow. If this value is set to 'true', all IAM
-- users of that AWS account can view and (if they have the proper policy
-- permissions set) manage the job flow. If it is set to 'false', only the
-- IAM user that created the job flow can view and manage it.
rjfVisibleToAllUsers :: Lens' RunJobFlow (Maybe Bool)
rjfVisibleToAllUsers = lens _rjfVisibleToAllUsers (\ s a -> s{_rjfVisibleToAllUsers = a});
-- | For Amazon EMR releases 3.x and 2.x. For Amazon EMR releases 4.x and
-- greater, use Applications.
--
-- A list of strings that indicates third-party software to use with the
-- job flow. For more information, go to
-- <http://docs.aws.amazon.com/ElasticMapReduce/latest/DeveloperGuide/emr-supported-products.html Use Third Party Applications with Amazon EMR>.
-- Currently supported values are:
--
-- - \"mapr-m3\" - launch the job flow using MapR M3 Edition.
-- - \"mapr-m5\" - launch the job flow using MapR M5 Edition.
rjfSupportedProducts :: Lens' RunJobFlow [Text]
rjfSupportedProducts = lens _rjfSupportedProducts (\ s a -> s{_rjfSupportedProducts = a}) . _Default . _Coerce;
-- | Amazon EMR releases 4.x or later.
--
-- A list of applications for the cluster. Valid values are: \"Hadoop\",
-- \"Hive\", \"Mahout\", \"Pig\", and \"Spark.\" They are case insensitive.
rjfApplications :: Lens' RunJobFlow [Application]
rjfApplications = lens _rjfApplications (\ s a -> s{_rjfApplications = a}) . _Default . _Coerce;
-- | A list of tags to associate with a cluster and propagate to Amazon EC2
-- instances.
rjfTags :: Lens' RunJobFlow [Tag]
rjfTags = lens _rjfTags (\ s a -> s{_rjfTags = a}) . _Default . _Coerce;
-- | The IAM role that will be assumed by the Amazon EMR service to access
-- AWS resources on your behalf.
rjfServiceRole :: Lens' RunJobFlow (Maybe Text)
rjfServiceRole = lens _rjfServiceRole (\ s a -> s{_rjfServiceRole = a});
-- | The name of the job flow.
rjfName :: Lens' RunJobFlow Text
rjfName = lens _rjfName (\ s a -> s{_rjfName = a});
-- | A specification of the number and type of Amazon EC2 instances on which
-- to run the job flow.
rjfInstances :: Lens' RunJobFlow JobFlowInstancesConfig
rjfInstances = lens _rjfInstances (\ s a -> s{_rjfInstances = a});
instance AWSRequest RunJobFlow where
type Rs RunJobFlow = RunJobFlowResponse
request = postJSON eMR
response
= receiveJSON
(\ s h x ->
RunJobFlowResponse' <$>
(x .?> "JobFlowId") <*> (pure (fromEnum s)))
instance ToHeaders RunJobFlow where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("ElasticMapReduce.RunJobFlow" :: ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON RunJobFlow where
toJSON RunJobFlow'{..}
= object
(catMaybes
[("AmiVersion" .=) <$> _rjfAMIVersion,
("AdditionalInfo" .=) <$> _rjfAdditionalInfo,
("Configurations" .=) <$> _rjfConfigurations,
("Steps" .=) <$> _rjfSteps,
("JobFlowRole" .=) <$> _rjfJobFlowRole,
("BootstrapActions" .=) <$> _rjfBootstrapActions,
("ReleaseLabel" .=) <$> _rjfReleaseLabel,
("LogUri" .=) <$> _rjfLogURI,
("NewSupportedProducts" .=) <$>
_rjfNewSupportedProducts,
("VisibleToAllUsers" .=) <$> _rjfVisibleToAllUsers,
("SupportedProducts" .=) <$> _rjfSupportedProducts,
("Applications" .=) <$> _rjfApplications,
("Tags" .=) <$> _rjfTags,
("ServiceRole" .=) <$> _rjfServiceRole,
Just ("Name" .= _rjfName),
Just ("Instances" .= _rjfInstances)])
instance ToPath RunJobFlow where
toPath = const "/"
instance ToQuery RunJobFlow where
toQuery = const mempty
-- | The result of the RunJobFlow operation.
--
-- /See:/ 'runJobFlowResponse' smart constructor.
data RunJobFlowResponse = RunJobFlowResponse'
{ _rjfrsJobFlowId :: !(Maybe Text)
, _rjfrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RunJobFlowResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rjfrsJobFlowId'
--
-- * 'rjfrsResponseStatus'
runJobFlowResponse
:: Int -- ^ 'rjfrsResponseStatus'
-> RunJobFlowResponse
runJobFlowResponse pResponseStatus_ =
RunJobFlowResponse'
{ _rjfrsJobFlowId = Nothing
, _rjfrsResponseStatus = pResponseStatus_
}
-- | An unique identifier for the job flow.
rjfrsJobFlowId :: Lens' RunJobFlowResponse (Maybe Text)
rjfrsJobFlowId = lens _rjfrsJobFlowId (\ s a -> s{_rjfrsJobFlowId = a});
-- | The response status code.
rjfrsResponseStatus :: Lens' RunJobFlowResponse Int
rjfrsResponseStatus = lens _rjfrsResponseStatus (\ s a -> s{_rjfrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-emr/gen/Network/AWS/EMR/RunJobFlow.hs | mpl-2.0 | 14,756 | 0 | 13 | 3,144 | 1,944 | 1,192 | 752 | 201 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-| Unittest helpers for TemplateHaskell components.
-}
{-
Copyright (C) 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.TestHelper
( testSuite
, genArbitrary
) where
import Data.List (stripPrefix, isPrefixOf)
import Data.Maybe (fromMaybe)
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit (Assertion)
import Test.QuickCheck
import Language.Haskell.TH
import Ganeti.THH.Compat
-- | Test property prefix.
propPrefix :: String
propPrefix = "prop_"
-- | Test case prefix.
casePrefix :: String
casePrefix = "case_"
-- | Test case prefix without underscore.
case2Pfx :: String
case2Pfx = "case"
-- | Tries to drop a prefix from a string.
simplifyName :: String -> String -> String
simplifyName pfx string = fromMaybe string (stripPrefix pfx string)
-- | Builds a test from a QuickCheck property.
runProp :: Testable prop => String -> prop -> Test
runProp = testProperty . simplifyName propPrefix
-- | Builds a test for a HUnit test case.
runCase :: String -> Assertion -> Test
runCase = testCase . simplifyName casePrefix
-- | Runs the correct test provider for a given test, based on its
-- name (not very nice, but...).
run :: Name -> Q Exp
run name =
let str = nameBase name
nameE = varE name
strE = litE (StringL str)
in case () of
_ | propPrefix `isPrefixOf` str -> [| runProp $strE $nameE |]
| casePrefix `isPrefixOf` str -> [| runCase $strE $nameE |]
| case2Pfx `isPrefixOf` str ->
[| (testCase . simplifyName case2Pfx) $strE $nameE |]
| otherwise -> fail $ "Unsupported test function name '" ++ str ++ "'"
-- | Convert slashes in a name to underscores.
mapSlashes :: String -> String
mapSlashes = map (\c -> if c == '/' then '_' else c)
-- | Builds a test suite.
testSuite :: String -> [Name] -> Q [Dec]
testSuite tsname tdef = do
let fullname = mkName $ "test" ++ mapSlashes tsname
tests <- mapM run tdef
sigtype <- [t| Test |]
body <- [| testGroup $(litE $ stringL tsname) $(return $ ListE tests) |]
return [ SigD fullname sigtype
, ValD (VarP fullname) (NormalB body) []
]
-- | Builds an arbitrary value for a given constructor. This doesn't
-- use the actual types of the fields, since we expect arbitrary
-- instances for all of the types anyway, we only care about the
-- number of fields.
mkConsArbitrary :: (Name, [a]) -> Exp
mkConsArbitrary (name, types) =
let infix_arb a = InfixE (Just a) (VarE '(<*>)) (Just (VarE 'arbitrary))
constr = AppE (VarE 'pure) (ConE name)
in foldl (\a _ -> infix_arb a) constr types
-- | Extracts the name and the types from a constructor.
conInfo :: Con -> (Name, [Type])
conInfo (NormalC name t) = (name, map snd t)
conInfo (RecC name t) = (name, map (\(_, _, x) -> x) t)
conInfo (InfixC t1 name t2) = (name, [snd t1, snd t2])
conInfo (ForallC _ _ subcon) = conInfo subcon
-- | Builds an arbitrary instance for a regular data type (i.e. not Bounded).
mkRegularArbitrary :: Name -> [Con] -> Q [Dec]
mkRegularArbitrary name cons = do
expr <- case cons of
[] -> fail "Can't make Arbitrary instance for an empty data type"
[x] -> return $ mkConsArbitrary (conInfo x)
xs -> appE (varE 'oneof) $
listE (map (return . mkConsArbitrary . conInfo) xs)
return [gntInstanceD [] (AppT (ConT ''Arbitrary) (ConT name))
[ValD (VarP 'arbitrary) (NormalB expr) []]]
-- | Builds a default Arbitrary instance for a type. This requires
-- that all members are of types that already have Arbitrary
-- instances, and that the arbitrary instances are well behaved
-- (w.r.t. recursive data structures, or similar concerns). In that
-- sense, this is not appropriate for all data types, just those that
-- are simple but very repetitive or have many simple fields.
genArbitrary :: Name -> Q [Dec]
genArbitrary name = do
r <- reify name
case r of
TyConI (DataD _ _ _ _ cons _) ->
mkRegularArbitrary name cons
TyConI (NewtypeD _ _ _ _ con _) ->
mkRegularArbitrary name [con]
TyConI (TySynD _ _ (ConT tn)) -> genArbitrary tn
_ -> fail $ "Invalid type in call to genArbitrary for " ++ show name
++ ", type " ++ show r
| ganeti/ganeti | test/hs/Test/Ganeti/TestHelper.hs | bsd-2-clause | 5,545 | 0 | 17 | 1,138 | 1,144 | 612 | 532 | 76 | 4 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TemplateHaskell #-}
module NLP.Partage.Earley.Item
( Span (..)
, beg
, end
, gap
, Active (..)
, state
, spanA
, Passive (..)
, dagID
, spanP
, isAdjoinedTo
, regular
, auxiliary
, isRoot
-- #ifdef DebugOn
, printActive
, printPassive
-- #endif
) where
import Data.Lens.Light
import Data.Maybe (isJust, isNothing)
import Prelude hiding (span)
import Data.DAWG.Ord (ID)
import NLP.Partage.Earley.Base (Pos, NotFoot(..))
import NLP.Partage.DAG (DID)
import qualified NLP.Partage.DAG as DAG
-- #ifdef DebugOn
import NLP.Partage.Earley.Base (nonTerm)
import NLP.Partage.Earley.Auto (Auto(..))
-- #endif
--------------------------------------------------
-- BASE TYPES
--------------------------------------------------
data Span = Span {
-- | The starting position.
_beg :: Pos
-- | The ending position (or rather the position of the dot).
, _end :: Pos
-- | Coordinates of the gap (if applies)
, _gap :: Maybe (Pos, Pos)
} deriving (Show, Eq, Ord)
$( makeLenses [''Span] )
-- | Active chart item : state reference + span.
data Active = Active {
_state :: ID
, _spanA :: Span
} deriving (Show, Eq, Ord)
$( makeLenses [''Active] )
-- | Passive chart item : label + span.
-- TODO: remove the redundant 't' parameter
data Passive n t = Passive
{ _dagID :: Either (NotFoot n) DID
-- ^ We store non-terminal 'n' for items representing
-- fully recognized elementary trees.
, _spanP :: Span
-- ^ Span of the chart item
, _isAdjoinedTo :: Bool
-- ^ Was the node represented by the item already adjoined to?
} deriving (Show, Eq, Ord)
$( makeLenses [''Passive] )
-- | Does it represent regular rules?
regular :: Span -> Bool
regular = isNothing . getL gap
-- | Does it represent auxiliary rules?
auxiliary :: Span -> Bool
auxiliary = isJust . getL gap
-- | Does it represent a root?
isRoot :: Either n DID -> Bool
isRoot x = case x of
Left _ -> True
Right _ -> False
-- #ifdef DebugOn
-- | Print an active item.
printSpan :: Span -> IO ()
printSpan span = do
putStr . show $ getL beg span
putStr ", "
case getL gap span of
Nothing -> return ()
Just (p, q) -> do
putStr $ show p
putStr ", "
putStr $ show q
putStr ", "
putStr . show $ getL end span
-- | Print an active item.
printActive :: Active -> IO ()
printActive p = do
putStr "("
putStr . show $ getL state p
putStr ", "
printSpan $ getL spanA p
putStrLn ")"
-- | Print a passive item.
printPassive :: (Show n) => Passive n t -> Auto n t -> IO ()
printPassive p auto = do
putStr "("
-- putStr . viewLab $ getL label p
putStr $ case getL dagID p of
Left root ->
show (notFootLabel root) ++
if isSister root then "*" else ""
Right did ->
show (DAG.unDID did) ++ "[" ++
show (nonTerm (Right did) auto) ++ "]"
putStr ", "
printSpan $ getL spanP p
putStrLn ")"
-- #endif
| kawu/partage | src/NLP/Partage/Earley/Item.hs | bsd-2-clause | 3,172 | 0 | 17 | 931 | 850 | 460 | 390 | 85 | 3 |
module Singletons.Empty where
import Data.Singletons.TH
$(singletons [d|
data Empty
|])
| int-index/singletons | tests/compile-and-dump/Singletons/Empty.hs | bsd-3-clause | 93 | 0 | 7 | 15 | 26 | 16 | 10 | -1 | -1 |
{-# LANGUAGE NamedFieldPuns, RecordWildCards #-}
module Distribution.Server (
-- * Server control
Server(..),
ServerEnv(..),
initialise,
run,
shutdown,
checkpoint,
reloadDatafiles,
-- * Server configuration
ListenOn(..),
ServerConfig(..),
defaultServerConfig,
hasSavedState,
-- * Server state
serverState,
initState,
-- * Temporary server while loading data
setUpTemp,
tearDownTemp
) where
import Happstack.Server.SimpleHTTP
import Distribution.Server.Framework
import qualified Distribution.Server.Framework.BackupRestore as Import
import qualified Distribution.Server.Framework.BlobStorage as BlobStorage
import qualified Distribution.Server.Framework.Auth as Auth
import Distribution.Server.Framework.Templating (TemplatesMode(NormalMode))
import Distribution.Server.Framework.AuthTypes (PasswdPlain(..))
import Distribution.Server.Framework.HtmlFormWrapper (htmlFormWrapperHack)
import Distribution.Server.Framework.Feature as Feature
import qualified Distribution.Server.Features as Features
import Distribution.Server.Features.Users
import qualified Distribution.Server.Users.Types as Users
import qualified Distribution.Server.Users.Users as Users
import qualified Distribution.Server.Users.Group as Group
import Distribution.Text
import Distribution.Verbosity as Verbosity
import System.Directory (createDirectoryIfMissing, doesDirectoryExist)
import Control.Concurrent
import Network.URI (URI(..), URIAuth(URIAuth), nullURI)
import Network.BSD (getHostName)
import Data.List (foldl', nubBy)
import Data.Int (Int64)
import Control.Arrow (second)
import Data.Function (on)
import qualified System.Log.Logger as HsLogger
import Control.Exception.Lifted as Lifted
import Paths_hackage_server (getDataDir)
data ListenOn = ListenOn {
loPortNum :: Int,
loIP :: String
} deriving (Show)
data ServerConfig = ServerConfig {
confVerbosity :: Verbosity,
confHostUri :: URI,
confListenOn :: ListenOn,
confStateDir :: FilePath,
confStaticDir :: FilePath,
confTmpDir :: FilePath,
confCacheDelay:: Int
} deriving (Show)
confDbStateDir, confBlobStoreDir,
confStaticFilesDir, confTemplatesDir :: ServerConfig -> FilePath
confDbStateDir config = confStateDir config </> "db"
confBlobStoreDir config = confStateDir config </> "blobs"
confStaticFilesDir config = confStaticDir config </> "static"
confTemplatesDir config = confStaticDir config </> "templates"
defaultServerConfig :: IO ServerConfig
defaultServerConfig = do
hostName <- getHostName
dataDir <- getDataDir
let portnum = 8080 :: Int
return ServerConfig {
confVerbosity = Verbosity.normal,
confHostUri = nullURI {
uriScheme = "http:",
uriAuthority = Just (URIAuth "" hostName (':' : show portnum))
},
confListenOn = ListenOn {
loPortNum = 8080,
loIP = "0.0.0.0"
},
confStateDir = "state",
confStaticDir = dataDir,
confTmpDir = "state" </> "tmp",
confCacheDelay= 0
}
data Server = Server {
serverFeatures :: [HackageFeature],
serverUserFeature :: UserFeature,
serverListenOn :: ListenOn,
serverEnv :: ServerEnv
}
-- | If we made a server instance from this 'ServerConfig', would we find some
-- existing saved state or would it be a totally clean instance with no
-- existing state.
--
hasSavedState :: ServerConfig -> IO Bool
hasSavedState = doesDirectoryExist . confDbStateDir
mkServerEnv :: ServerConfig -> IO ServerEnv
mkServerEnv config@(ServerConfig verbosity hostURI _
stateDir _ tmpDir
cacheDelay) = do
createDirectoryIfMissing False stateDir
let blobStoreDir = confBlobStoreDir config
staticDir = confStaticFilesDir config
templatesDir = confTemplatesDir config
store <- BlobStorage.open blobStoreDir
let env = ServerEnv {
serverStaticDir = staticDir,
serverTemplatesDir = templatesDir,
serverTemplatesMode = NormalMode,
serverStateDir = stateDir,
serverBlobStore = store,
serverTmpDir = tmpDir,
serverCacheDelay = cacheDelay * 1000000, --microseconds
serverBaseURI = hostURI,
serverVerbosity = verbosity
}
return env
-- | Make a server instance from the server configuration.
--
-- This does not yet run the server (see 'run') but it does setup the server
-- state system, making it possible to import data, and initializes the
-- features.
--
-- Note: the server instance must eventually be 'shutdown' or you'll end up
-- with stale lock files.
--
initialise :: ServerConfig -> IO Server
initialise config = do
env <- mkServerEnv config
-- do feature initialization
(features, userFeature) <- Features.initHackageFeatures env
return Server {
serverFeatures = features,
serverUserFeature = userFeature,
serverListenOn = confListenOn config,
serverEnv = env
}
-- | Actually run the server, i.e. start accepting client http connections.
--
run :: Server -> IO ()
run server = do
-- We already check this in Main, so we expect this check to always
-- succeed, but just in case...
let staticDir = serverStaticDir (serverEnv server)
exists <- doesDirectoryExist staticDir
when (not exists) $ fail $ "The static files directory " ++ staticDir ++ " does not exist."
runServer listenOn $ do
handlePutPostQuotas
setLogging
fakeBrowserHttpMethods (impl server)
where
listenOn = serverListenOn server
-- HS6 - Quotas should be configurable as well. Also there are places in
-- the code that want to work with the request body directly but maybe
-- fail if the request body has already been consumed. The body will only
-- be consumed if it is a POST/PUT request *and* the content-type is
-- multipart/form-data. If this does happen, you should get a clear error
-- message saying what happened.
handlePutPostQuotas = decodeBody bodyPolicy
where
tmpdir = serverTmpDir (serverEnv server)
quota = 50 * (1024 ^ (2:: Int64))
-- setting quota at 50mb, though perhaps should be configurable?
bodyPolicy = defaultBodyPolicy tmpdir quota quota quota
setLogging =
liftIO $ HsLogger.updateGlobalLogger
"Happstack.Server"
(adjustLogLevel (serverVerbosity (serverEnv server)))
where
adjustLogLevel v
| v == Verbosity.normal = HsLogger.setLevel HsLogger.WARNING
| v == Verbosity.verbose = HsLogger.setLevel HsLogger.INFO
| v == Verbosity.deafening = HsLogger.setLevel HsLogger.DEBUG
| otherwise = id
-- This is a cunning hack to solve the problem that HTML forms do not
-- support PUT, DELETE, etc, they only support GET and POST. We don't want
-- to compromise the design of the whole server just because HTML does not
-- support HTTP properly, so we allow browsers using HTML forms to do
-- PUT/DELETE etc by POSTing with special body parameters.
fakeBrowserHttpMethods part =
msum [ do method POST
htmlFormWrapperHack part
-- or just do things the normal way
, part
]
-- | Perform a clean shutdown of the server.
--
shutdown :: Server -> IO ()
shutdown server =
Features.shutdownAllFeatures (serverFeatures server)
--TODO: stop accepting incomming connections,
-- wait for connections to be processed.
-- | Write out a checkpoint of the server state. This makes recovery quicker
-- because fewer logged transactions have to be replayed.
--
checkpoint :: Server -> IO ()
checkpoint server =
Features.checkpointAllFeatures (serverFeatures server)
reloadDatafiles :: Server -> IO ()
reloadDatafiles server =
mapM_ Feature.featureReloadFiles (serverFeatures server)
-- | Return /one/ abstract state component per feature
serverState :: Server -> [(String, AbstractStateComponent)]
serverState server = [ (featureName feature, mconcat (featureState feature))
| feature <- serverFeatures server
]
-- An alternative to an import: starts the server off to a sane initial state.
-- To accomplish this, we import a 'null' tarball, finalizing immediately after initializing import
initState :: Server -> (String, String) -> IO ()
initState server (admin, pass) = do
let store = serverBlobStore (serverEnv server)
void . Import.importBlank store $ map (second abstractStateRestore) (serverState server)
-- create default admin user
let UserFeature{updateAddUser, adminGroup} = serverUserFeature server
muid <- case simpleParse admin of
Just uname -> do
let userAuth = Auth.newPasswdHash Auth.hackageRealm uname (PasswdPlain pass)
updateAddUser uname (Users.UserAuth userAuth)
Nothing -> fail "Couldn't parse admin name (should be alphanumeric)"
case muid of
Right uid -> Group.addUserList adminGroup uid
Left Users.ErrUserNameClash -> fail $ "Inconceivable!! failed to create admin user"
-- The top-level server part.
-- It collects resources from Distribution.Server.Features, collects
-- them into a path hierarchy, and serves them.
impl :: Server -> ServerPart Response
impl server = logExceptions $
runServerPartE $
handleErrorResponse (serveErrorResponse errHandlers Nothing) $
renderServerTree [] serverTree
`mplus`
fallbackNotFound
where
serverTree :: ServerTree (DynamicPath -> ServerPartE Response)
serverTree =
fmap (serveResource errHandlers)
-- ServerTree Resource
. foldl' (\acc res -> addServerNode (resourceLocation res) res acc) serverTreeEmpty
-- [Resource]
$ concatMap Feature.featureResources (serverFeatures server)
errHandlers = nubBy ((==) `on` fst)
. reverse
. (("txt", textErrorPage):)
. concatMap Feature.featureErrHandlers
$ serverFeatures server
-- This basic one be overridden in another feature but means even in a
-- minimal server we can provide content-negoticated text/plain errors
textErrorPage :: ErrorResponse -> ServerPartE Response
textErrorPage = return . toResponse
fallbackNotFound =
errNotFound "Page not found"
[MText "Sorry, it's just not here."]
logExceptions :: ServerPart Response -> ServerPart Response
logExceptions act = Lifted.catch act $ \e -> do
liftIO . lognotice verbosity $ "WARNING: Received exception: " ++ show e
Lifted.throwIO (e :: SomeException)
verbosity = serverVerbosity (serverEnv server)
data TempServer = TempServer ThreadId
setUpTemp :: ServerConfig -> Int -> IO TempServer
setUpTemp sconf secs = do
tid <- forkIO $ do
-- wait a certain amount of time before setting it up, because sometimes
-- happstack-state is very fast, and switching the servers has a time
-- cost to it
threadDelay $ secs*1000000
-- could likewise specify a mirror to redirect to for tarballs, and 503 for everything else
runServer listenOn $ (resp 503 $ setHeader "Content-Type" "text/html" $ toResponse html503)
return (TempServer tid)
where listenOn = confListenOn sconf
runServer :: (ToMessage a) => ListenOn -> ServerPartT IO a -> IO ()
runServer listenOn f
= do socket <- bindIPv4 (loIP listenOn) (loPortNum listenOn)
simpleHTTPWithSocket socket nullConf f
-- | Static 503 page, based on Happstack's 404 page.
html503 :: String
html503 =
"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">" ++
"<html><head><title>503 Service Unavailable</title></head><body><h1>" ++
"503 Service Unavailable</h1><p>The server is undergoing maintenance" ++
"<br>It'll be back soon</p></body></html>"
tearDownTemp :: TempServer -> IO ()
tearDownTemp (TempServer tid) = do
killThread tid
-- give the server enough time to release the bind
threadDelay $ 1000000
| haskell-infra/hackage-server | Distribution/Server.hs | bsd-3-clause | 12,360 | 0 | 18 | 2,943 | 2,284 | 1,251 | 1,033 | 217 | 3 |
module Control.Monad.Extras (seqM) where
seqM :: Monad m => m a -> m a
seqM m = do
a <- m
return $! a
| maoe/ghc-time-alloc-prof | src/Control/Monad/Extras.hs | bsd-3-clause | 107 | 0 | 7 | 29 | 56 | 28 | 28 | 5 | 1 |
module Pos.Util.Future
( FutureError(..)
, newInitFuture
) where
import Universum
import System.IO.Unsafe (unsafeInterleaveIO)
data FutureError = FutureAlreadyFilled Text
deriving Show
instance Exception FutureError
-- | 'newInitFuture' creates a thunk and a procedure to fill it. This can be
-- used to create a data structure and initialize it gradually while doing some
-- IO (e.g. accessing the database).
-- There are two contracts the caller must obey:
-- * the thunk isn't forced until the procedure to fill it was called.
-- Violation of this contract will either block the thread forever or
-- trigger the error "thread blocked indefinitely in an MVar operation".
-- * the procedure to fill the thunk is called at most once.
-- Violation of this contract will throw `FutureAlreadyFilled`.
--
-- You can provide a name to 'newInitFuture' to make debugging easier when
-- something goes wrong and e.g. a future get filled twice.
newInitFuture
:: forall m m' a.
(MonadIO m, MonadIO m')
=> Text -> m (a, a -> m' ())
newInitFuture name = do
mvar <- newEmptyMVar
thunk <- liftIO $ unsafeInterleaveIO (readMVar mvar)
let setter value = assertSingleAssignment =<< tryPutMVar mvar value
pure (thunk, setter)
where
assertSingleAssignment :: Bool -> m' ()
assertSingleAssignment = \case
True -> pure ()
False -> liftIO $ throwM (FutureAlreadyFilled name)
| input-output-hk/pos-haskell-prototype | util/src/Pos/Util/Future.hs | mit | 1,466 | 0 | 13 | 329 | 234 | 128 | 106 | -1 | -1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
-- | This module builds Docker (OpenContainer) images.
module Stack.Image
(imageDocker, imgCmdName, imgDockerCmdName, imgOptsFromMonoid,
imgDockerOptsFromMonoid, imgOptsParser, imgDockerOptsParser)
where
import Control.Applicative
import Control.Exception.Lifted
import Control.Monad
import Control.Monad.Catch hiding (bracket)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader
import Control.Monad.Trans.Control
import Data.Char (toLower)
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Monoid
import qualified Data.Set as Set
import qualified Data.Text as T
import Data.Typeable
import Options.Applicative
import Path
import Path.IO
import Stack.Build.Source
import Stack.Package
import Stack.Types
import Stack.Types.Internal
import qualified System.Directory as SD
import System.IO.Temp
import System.FilePath (isPathSeparator)
import System.Process
type M e m = (HasBuildConfig e, HasConfig e, HasEnvConfig e, HasTerminal e,
MonadBaseControl IO m, MonadCatch m, MonadIO m, MonadLogger m,
MonadReader e m)
-- | Builds a Docker (OpenContainer) image extending the `base` image
-- specified in the project's stack.yaml. The new image will contain
-- all the executables from the packages in the project as well as any
-- other specified files to `add`. Then new image will be extended
-- with an ENTRYPOINT specified for each `entrypoint` listed in the
-- config file.
imageDocker :: M e m => m ()
imageDocker = do
tempDirFP <- liftIO SD.getTemporaryDirectory
bracket
(liftIO (createTempDirectory tempDirFP "stack-image-docker"))
(liftIO . SD.removeDirectoryRecursive)
(\dir ->
do stageExesInDir dir
syncAddContentToDir dir
createDockerImage dir
extendDockerImageWithEntrypoint dir)
-- | Extract all the Package(s) from the stack.yaml config file &
-- project cabal files.
projectPkgs :: M e m => m [Package]
projectPkgs = do
econfig <- asks getEnvConfig
bconfig <- asks getBuildConfig
forM
(Map.toList
(bcPackages bconfig))
(\(dir,_wanted) ->
do cabalfp <- getCabalFileName dir
name <- parsePackageNameFromFilePath cabalfp
let cfg = PackageConfig
{ packageConfigEnableTests = True
, packageConfigEnableBenchmarks = True
, packageConfigFlags = localFlags mempty bconfig name
, packageConfigGhcVersion = envConfigGhcVersion econfig
, packageConfigPlatform = configPlatform
(getConfig bconfig)
}
readPackage cfg cabalfp)
-- | Stage all the Package executables in the usr/local/bin
-- subdirectory of a temp directory.
stageExesInDir :: M e m => FilePath -> m ()
stageExesInDir dir = do
srcBinPath <- (</> $(mkRelDir "bin")) <$> installationRootLocal
destBinPath <- (</> $(mkRelDir "usr/local/bin")) <$> parseAbsDir dir
createTree destBinPath
pkgs <- projectPkgs
forM_
(concatMap (Set.toList . packageExes) pkgs)
(\exe ->
do exePath <-
parseRelFile
(T.unpack exe)
copyFile
(srcBinPath </> exePath)
(destBinPath </> exePath))
-- | Add any additional files into the temp directory, respecting the
-- (Source, Destination) mapping.
syncAddContentToDir :: M e m => FilePath -> m ()
syncAddContentToDir dir = do
config <- asks getConfig
bconfig <- asks getBuildConfig
dirPath <- parseAbsDir dir
let imgAdd = maybe Map.empty imgDockerAdd (imgDocker (configImage config))
forM_
(Map.toList imgAdd)
(\(source,dest) ->
do sourcePath <- parseRelDir source
destPath <- parseAbsDir dest
let destFullPath = dirPath </> dropRoot destPath
createTree destFullPath
copyDirectoryRecursive
(bcRoot bconfig </> sourcePath)
destFullPath)
-- | Derive an image name from the project directory.
imageName :: BuildConfig -> String
imageName = map toLower . filter (not . isPathSeparator) . toFilePath . dirname . bcRoot
-- | Create a general purpose docker image from the temporary
-- directory of executables & static content.
createDockerImage :: M e m => FilePath -> m ()
createDockerImage dir = do
config <- asks getConfig
bconfig <- asks getBuildConfig
let dockerConfig = imgDocker (configImage config)
case imgDockerBase =<< dockerConfig of
Nothing -> throwM StackImageDockerBaseUnspecifiedException
Just base -> do
dirPath <- parseAbsDir dir
liftIO
(do writeFile
(toFilePath
(dirPath </>
$(mkRelFile "Dockerfile")))
(unlines ["FROM " ++ base, "ADD ./ /"])
callProcess
"docker"
["build"
,"-t"
,fromMaybe (imageName bconfig)
(imgDockerImageName =<< dockerConfig)
,dir])
-- | Extend the general purpose docker image with entrypoints (if
-- specified).
extendDockerImageWithEntrypoint :: M e m => FilePath -> m ()
extendDockerImageWithEntrypoint dir = do
config <- asks getConfig
bconfig <- asks getBuildConfig
let dockerConfig = imgDocker (configImage config)
let dockerImageName = fromMaybe (imageName bconfig) (imgDockerImageName =<< dockerConfig)
let imgEntrypoints = maybe
Nothing
imgDockerEntrypoints
dockerConfig
case imgEntrypoints of
Nothing -> return ()
Just eps -> do
dirPath <- parseAbsDir dir
forM_
eps
(\ep ->
liftIO
(do writeFile
(toFilePath
(dirPath </>
$(mkRelFile "Dockerfile")))
(unlines
[ "FROM " ++ dockerImageName
, "ENTRYPOINT [\"/usr/local/bin/" ++
ep ++ "\"]"
, "CMD []"])
callProcess
"docker"
[ "build"
, "-t"
, dockerImageName ++ "-" ++ ep
, dir]))
-- | The command name for dealing with images.
imgCmdName :: String
imgCmdName = "image"
-- | The command name for building a docker container.
imgDockerCmdName :: String
imgDockerCmdName = "container"
-- | A parser for ImageOptsMonoid.
imgOptsParser :: Parser ImageOptsMonoid
imgOptsParser = ImageOptsMonoid <$>
optional
(subparser
(command
imgDockerCmdName
(info
imgDockerOptsParser
(progDesc "Create a container image (EXPERIMENTAL)"))))
-- | A parser for ImageDockerOptsMonoid.
imgDockerOptsParser :: Parser ImageDockerOptsMonoid
imgDockerOptsParser = ImageDockerOptsMonoid <$>
optional
(option
str
(long (imgDockerCmdName ++ "-" ++ T.unpack imgDockerBaseArgName) <>
metavar "NAME" <>
help "Docker base image name")) <*>
pure Nothing <*>
pure Nothing <*>
pure Nothing
-- | Convert image opts monoid to image options.
imgOptsFromMonoid :: ImageOptsMonoid -> ImageOpts
imgOptsFromMonoid ImageOptsMonoid{..} = ImageOpts
{ imgDocker = imgDockerOptsFromMonoid <$> imgMonoidDocker
}
-- | Convert Docker image opts monoid to Docker image options.
imgDockerOptsFromMonoid :: ImageDockerOptsMonoid -> ImageDockerOpts
imgDockerOptsFromMonoid ImageDockerOptsMonoid{..} = ImageDockerOpts
{ imgDockerBase = emptyToNothing imgDockerMonoidBase
, imgDockerEntrypoints = emptyToNothing imgDockerMonoidEntrypoints
, imgDockerAdd = fromMaybe Map.empty imgDockerMonoidAdd
, imgDockerImageName = emptyToNothing imgDockerMonoidImageName
}
where emptyToNothing Nothing = Nothing
emptyToNothing (Just s)
| null s =
Nothing
| otherwise =
Just s
-- | Stack image exceptions.
data StackImageException =
StackImageDockerBaseUnspecifiedException
deriving (Typeable)
instance Exception StackImageException
instance Show StackImageException where
show StackImageDockerBaseUnspecifiedException = "You must specify a base docker image on which to place your haskell executables."
| wskplho/stack | src/Stack/Image.hs | bsd-3-clause | 9,570 | 0 | 26 | 3,271 | 1,742 | 895 | 847 | 202 | 2 |
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
module SPARC.ShortcutJump (
JumpDest(..), getJumpDestBlockId,
canShortcut,
shortcutJump,
shortcutStatics,
shortBlockId
)
where
import SPARC.Instr
import SPARC.Imm
import CLabel
import BlockId
import OldCmm
import Panic
import Unique
data JumpDest
= DestBlockId BlockId
| DestImm Imm
getJumpDestBlockId :: JumpDest -> Maybe BlockId
getJumpDestBlockId (DestBlockId bid) = Just bid
getJumpDestBlockId _ = Nothing
canShortcut :: Instr -> Maybe JumpDest
canShortcut _ = Nothing
shortcutJump :: (BlockId -> Maybe JumpDest) -> Instr -> Instr
shortcutJump _ other = other
shortcutStatics :: (BlockId -> Maybe JumpDest) -> CmmStatics -> CmmStatics
shortcutStatics fn (Statics lbl statics)
= Statics lbl $ map (shortcutStatic fn) statics
-- we need to get the jump tables, so apply the mapping to the entries
-- of a CmmData too.
shortcutLabel :: (BlockId -> Maybe JumpDest) -> CLabel -> CLabel
shortcutLabel fn lab
| Just uq <- maybeAsmTemp lab = shortBlockId fn (mkBlockId uq)
| otherwise = lab
shortcutStatic :: (BlockId -> Maybe JumpDest) -> CmmStatic -> CmmStatic
shortcutStatic fn (CmmStaticLit (CmmLabel lab))
= CmmStaticLit (CmmLabel (shortcutLabel fn lab))
shortcutStatic fn (CmmStaticLit (CmmLabelDiffOff lbl1 lbl2 off))
= CmmStaticLit (CmmLabelDiffOff (shortcutLabel fn lbl1) lbl2 off)
-- slightly dodgy, we're ignoring the second label, but this
-- works with the way we use CmmLabelDiffOff for jump tables now.
shortcutStatic _ other_static
= other_static
shortBlockId :: (BlockId -> Maybe JumpDest) -> BlockId -> CLabel
shortBlockId fn blockid =
case fn blockid of
Nothing -> mkAsmTempLabel (getUnique blockid)
Just (DestBlockId blockid') -> shortBlockId fn blockid'
Just (DestImm (ImmCLbl lbl)) -> lbl
_other -> panic "shortBlockId"
| nomeata/ghc | compiler/nativeGen/SPARC/ShortcutJump.hs | bsd-3-clause | 2,182 | 16 | 12 | 412 | 537 | 277 | 260 | 45 | 4 |
{-# LANGUAGE CPP #-}
-- -----------------------------------------------------------------------------
--
-- (c) The University of Glasgow 1994-2004
--
-- -----------------------------------------------------------------------------
module PPC.Regs (
-- squeeze functions
virtualRegSqueeze,
realRegSqueeze,
mkVirtualReg,
regDotColor,
-- immediates
Imm(..),
strImmLit,
litToImm,
-- addressing modes
AddrMode(..),
addrOffset,
-- registers
spRel,
argRegs,
allArgRegs,
callClobberedRegs,
allMachRegNos,
classOfRealReg,
showReg,
-- machine specific
allFPArgRegs,
fits16Bits,
makeImmediate,
fReg,
sp, toc, r3, r4, r11, r12, r27, r28, r30,
f1, f20, f21,
allocatableRegs
)
where
#include "nativeGen/NCG.h"
#include "HsVersions.h"
import Reg
import RegClass
import Format
import Cmm
import CLabel ( CLabel )
import Unique
import CodeGen.Platform
import DynFlags
import Outputable
import Platform
import Data.Word ( Word8, Word16, Word32, Word64 )
import Data.Int ( Int8, Int16, Int32, Int64 )
-- squeese functions for the graph allocator -----------------------------------
-- | regSqueeze_class reg
-- Calculuate the maximum number of register colors that could be
-- denied to a node of this class due to having this reg
-- as a neighbour.
--
{-# INLINE virtualRegSqueeze #-}
virtualRegSqueeze :: RegClass -> VirtualReg -> Int
virtualRegSqueeze cls vr
= case cls of
RcInteger
-> case vr of
VirtualRegI{} -> 1
VirtualRegHi{} -> 1
_other -> 0
RcDouble
-> case vr of
VirtualRegD{} -> 1
VirtualRegF{} -> 0
_other -> 0
_other -> 0
{-# INLINE realRegSqueeze #-}
realRegSqueeze :: RegClass -> RealReg -> Int
realRegSqueeze cls rr
= case cls of
RcInteger
-> case rr of
RealRegSingle regNo
| regNo < 32 -> 1 -- first fp reg is 32
| otherwise -> 0
RealRegPair{} -> 0
RcDouble
-> case rr of
RealRegSingle regNo
| regNo < 32 -> 0
| otherwise -> 1
RealRegPair{} -> 0
_other -> 0
mkVirtualReg :: Unique -> Format -> VirtualReg
mkVirtualReg u format
| not (isFloatFormat format) = VirtualRegI u
| otherwise
= case format of
FF32 -> VirtualRegD u
FF64 -> VirtualRegD u
_ -> panic "mkVirtualReg"
regDotColor :: RealReg -> SDoc
regDotColor reg
= case classOfRealReg reg of
RcInteger -> text "blue"
RcFloat -> text "red"
RcDouble -> text "green"
RcDoubleSSE -> text "yellow"
-- immediates ------------------------------------------------------------------
data Imm
= ImmInt Int
| ImmInteger Integer -- Sigh.
| ImmCLbl CLabel -- AbstractC Label (with baggage)
| ImmLit SDoc -- Simple string
| ImmIndex CLabel Int
| ImmFloat Rational
| ImmDouble Rational
| ImmConstantSum Imm Imm
| ImmConstantDiff Imm Imm
| LO Imm
| HI Imm
| HA Imm {- high halfword adjusted -}
| HIGHERA Imm
| HIGHESTA Imm
strImmLit :: String -> Imm
strImmLit s = ImmLit (text s)
litToImm :: CmmLit -> Imm
litToImm (CmmInt i w) = ImmInteger (narrowS w i)
-- narrow to the width: a CmmInt might be out of
-- range, but we assume that ImmInteger only contains
-- in-range values. A signed value should be fine here.
litToImm (CmmFloat f W32) = ImmFloat f
litToImm (CmmFloat f W64) = ImmDouble f
litToImm (CmmLabel l) = ImmCLbl l
litToImm (CmmLabelOff l off) = ImmIndex l off
litToImm (CmmLabelDiffOff l1 l2 off)
= ImmConstantSum
(ImmConstantDiff (ImmCLbl l1) (ImmCLbl l2))
(ImmInt off)
litToImm _ = panic "PPC.Regs.litToImm: no match"
-- addressing modes ------------------------------------------------------------
data AddrMode
= AddrRegReg Reg Reg
| AddrRegImm Reg Imm
addrOffset :: AddrMode -> Int -> Maybe AddrMode
addrOffset addr off
= case addr of
AddrRegImm r (ImmInt n)
| fits16Bits n2 -> Just (AddrRegImm r (ImmInt n2))
| otherwise -> Nothing
where n2 = n + off
AddrRegImm r (ImmInteger n)
| fits16Bits n2 -> Just (AddrRegImm r (ImmInt (fromInteger n2)))
| otherwise -> Nothing
where n2 = n + toInteger off
_ -> Nothing
-- registers -------------------------------------------------------------------
-- @spRel@ gives us a stack relative addressing mode for volatile
-- temporaries and for excess call arguments. @fpRel@, where
-- applicable, is the same but for the frame pointer.
spRel :: DynFlags
-> Int -- desired stack offset in words, positive or negative
-> AddrMode
spRel dflags n = AddrRegImm sp (ImmInt (n * wORD_SIZE dflags))
-- argRegs is the set of regs which are read for an n-argument call to C.
-- For archs which pass all args on the stack (x86), is empty.
-- Sparc passes up to the first 6 args in regs.
argRegs :: RegNo -> [Reg]
argRegs 0 = []
argRegs 1 = map regSingle [3]
argRegs 2 = map regSingle [3,4]
argRegs 3 = map regSingle [3..5]
argRegs 4 = map regSingle [3..6]
argRegs 5 = map regSingle [3..7]
argRegs 6 = map regSingle [3..8]
argRegs 7 = map regSingle [3..9]
argRegs 8 = map regSingle [3..10]
argRegs _ = panic "MachRegs.argRegs(powerpc): don't know about >8 arguments!"
allArgRegs :: [Reg]
allArgRegs = map regSingle [3..10]
-- these are the regs which we cannot assume stay alive over a C call.
callClobberedRegs :: Platform -> [Reg]
callClobberedRegs platform
= case platformOS platform of
OSDarwin -> map regSingle (0:[2..12] ++ map fReg [0..13])
OSLinux -> map regSingle (0:[2..13] ++ map fReg [0..13])
_ -> panic "PPC.Regs.callClobberedRegs: not defined for this architecture"
allMachRegNos :: [RegNo]
allMachRegNos = [0..63]
{-# INLINE classOfRealReg #-}
classOfRealReg :: RealReg -> RegClass
classOfRealReg (RealRegSingle i)
| i < 32 = RcInteger
| otherwise = RcDouble
classOfRealReg (RealRegPair{})
= panic "regClass(ppr): no reg pairs on this architecture"
showReg :: RegNo -> String
showReg n
| n >= 0 && n <= 31 = "%r" ++ show n
| n >= 32 && n <= 63 = "%f" ++ show (n - 32)
| otherwise = "%unknown_powerpc_real_reg_" ++ show n
-- machine specific ------------------------------------------------------------
allFPArgRegs :: Platform -> [Reg]
allFPArgRegs platform
= case platformOS platform of
OSDarwin -> map (regSingle . fReg) [1..13]
OSLinux -> map (regSingle . fReg) [1..8]
_ -> panic "PPC.Regs.allFPArgRegs: not defined for this architecture"
fits16Bits :: Integral a => a -> Bool
fits16Bits x = x >= -32768 && x < 32768
makeImmediate :: Integral a => Width -> Bool -> a -> Maybe Imm
makeImmediate rep signed x = fmap ImmInt (toI16 rep signed)
where
narrow W64 False = fromIntegral (fromIntegral x :: Word64)
narrow W32 False = fromIntegral (fromIntegral x :: Word32)
narrow W16 False = fromIntegral (fromIntegral x :: Word16)
narrow W8 False = fromIntegral (fromIntegral x :: Word8)
narrow W64 True = fromIntegral (fromIntegral x :: Int64)
narrow W32 True = fromIntegral (fromIntegral x :: Int32)
narrow W16 True = fromIntegral (fromIntegral x :: Int16)
narrow W8 True = fromIntegral (fromIntegral x :: Int8)
narrow _ _ = panic "PPC.Regs.narrow: no match"
narrowed = narrow rep signed
toI16 W32 True
| narrowed >= -32768 && narrowed < 32768 = Just narrowed
| otherwise = Nothing
toI16 W32 False
| narrowed >= 0 && narrowed < 65536 = Just narrowed
| otherwise = Nothing
toI16 W64 True
| narrowed >= -32768 && narrowed < 32768 = Just narrowed
| otherwise = Nothing
toI16 W64 False
| narrowed >= 0 && narrowed < 65536 = Just narrowed
| otherwise = Nothing
toI16 _ _ = Just narrowed
{-
The PowerPC has 64 registers of interest; 32 integer registers and 32 floating
point registers.
-}
fReg :: Int -> RegNo
fReg x = (32 + x)
sp, toc, r3, r4, r11, r12, r27, r28, r30, f1, f20, f21 :: Reg
sp = regSingle 1
toc = regSingle 2
r3 = regSingle 3
r4 = regSingle 4
r11 = regSingle 11
r12 = regSingle 12
r27 = regSingle 27
r28 = regSingle 28
r30 = regSingle 30
f1 = regSingle $ fReg 1
f20 = regSingle $ fReg 20
f21 = regSingle $ fReg 21
-- allocatableRegs is allMachRegNos with the fixed-use regs removed.
-- i.e., these are the regs for which we are prepared to allow the
-- register allocator to attempt to map VRegs to.
allocatableRegs :: Platform -> [RealReg]
allocatableRegs platform
= let isFree i = freeReg platform i
in map RealRegSingle $ filter isFree allMachRegNos
| acowley/ghc | compiler/nativeGen/PPC/Regs.hs | bsd-3-clause | 9,617 | 0 | 15 | 3,044 | 2,362 | 1,229 | 1,133 | 216 | 13 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE Rank2Types #-}
#ifndef HLINT
{-# LANGUAGE UnboxedTuples #-}
#endif
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GADTs #-}
#ifdef TRUSTWORTHY
{-# LANGUAGE Trustworthy #-}
#endif
{-# OPTIONS_GHC -fno-full-laziness #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Data.Lens
-- Copyright : (C) 2012-2015 Edward Kmett, (C) 2006-2012 Neil Mitchell
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : Rank2Types
--
-- Smart and naïve generic traversals given 'Data' instances.
--
-- 'template', 'uniplate', and 'biplate' each build up information about what
-- types can be contained within another type to speed up 'Traversal'.
--
----------------------------------------------------------------------------
module Data.Data.Lens
(
-- * Generic Traversal
template
, tinplate
, uniplate
, biplate
-- * Field Accessor Traversal
, upon
, upon'
, onceUpon
, onceUpon'
-- * Data Traversal
, gtraverse
) where
import Control.Applicative
import Control.Exception as E
import Control.Lens.Internal.Context
import Control.Lens.Internal.Indexed
import Control.Lens.Lens
import Control.Lens.Setter
import Control.Lens.Traversal
import Control.Lens.Type
import Data.Data
import GHC.IO
import Data.Maybe
import Data.Foldable
import qualified Data.HashMap.Strict as M
import Data.HashMap.Strict (HashMap, (!))
import qualified Data.HashSet as S
import Data.HashSet (HashSet)
import Data.IORef
import Data.Monoid
import GHC.Exts (realWorld#)
import Prelude
#ifdef HLINT
{-# ANN module "HLint: ignore Eta reduce" #-}
{-# ANN module "HLint: ignore Use foldl" #-}
{-# ANN module "HLint: ignore Reduce duplication" #-}
{-# ANN module "HLint: ignore Unused LANGUAGE pragma" #-}
#endif
-- $setup
-- >>> :set -XNoOverloadedStrings
-- >>> import Control.Lens
-------------------------------------------------------------------------------
-- Generic Traversal
-------------------------------------------------------------------------------
-- | A generic applicative transformation that maps over the immediate subterms.
--
-- 'gtraverse' is to 'traverse' what 'gmapM' is to 'mapM'
--
-- This really belongs in @Data.Data@.
gtraverse :: (Applicative f, Data a) => (forall d. Data d => d -> f d) -> a -> f a
gtraverse f = gfoldl (\x y -> x <*> f y) pure
{-# INLINE gtraverse #-}
-------------------------------------------------------------------------------
-- Naïve Traversal
-------------------------------------------------------------------------------
-- | Naïve 'Traversal' using 'Data'. This does not attempt to optimize the traversal.
--
-- This is primarily useful when the children are immediately obvious, and for benchmarking.
tinplate :: (Data s, Typeable a) => Traversal' s a
tinplate f = gfoldl (step f) pure
{-# INLINE tinplate #-}
step :: forall s a f r. (Applicative f, Typeable a, Data s) => (a -> f a) -> f (s -> r) -> s -> f r
step f w s = w <*> case mightBe :: Maybe (Is s a) of
Just Data.Data.Lens.Refl -> f s
Nothing -> tinplate f s
{-# INLINE step #-}
-------------------------------------------------------------------------------
-- Smart Traversal
-------------------------------------------------------------------------------
-- | Find every occurrence of a given type @a@ recursively that doesn't require
-- passing through something of type @a@ using 'Data', while avoiding traversal
-- of areas that cannot contain a value of type @a@.
--
-- This is 'uniplate' with a more liberal signature.
template :: forall s a. (Data s, Typeable a) => Traversal' s a
template = uniplateData (fromOracle answer) where
answer = hitTest (undefined :: s) (undefined :: a)
{-# INLINE template #-}
-- | Find descendants of type @a@ non-transitively, while avoiding computation of areas that cannot contain values of
-- type @a@ using 'Data'.
--
-- 'uniplate' is a useful default definition for 'Control.Lens.Plated.plate'
uniplate :: Data a => Traversal' a a
uniplate = template
{-# INLINE uniplate #-}
-- | 'biplate' performs like 'template', except when @s ~ a@, it returns itself and nothing else.
biplate :: forall s a. (Data s, Typeable a) => Traversal' s a
biplate = biplateData (fromOracle answer) where
answer = hitTest (undefined :: s) (undefined :: a)
{-# INLINE biplate #-}
------------------------------------------------------------------------------
-- Automatic Traversal construction from field accessors
------------------------------------------------------------------------------
data FieldException a = FieldException !Int a deriving Typeable
instance Show (FieldException a) where
showsPrec d (FieldException i _) = showParen (d > 10) $
showString "<field " . showsPrec 11 i . showChar '>'
instance Typeable a => Exception (FieldException a)
lookupon :: Typeable a => LensLike' (Indexing Identity) s a -> (s -> a) -> s -> Maybe (Int, Context a a s)
lookupon l field s = case unsafePerformIO $ E.try $ evaluate $ field $ s & indexing l %@~ \i (a::a) -> E.throw (FieldException i a) of
Right _ -> Nothing
Left e -> case fromException e of
Nothing -> Nothing
Just (FieldException i a) -> Just (i, Context (\a' -> set (elementOf l i) a' s) a)
{-# INLINE lookupon #-}
-- | This automatically constructs a 'Traversal'' from an function.
--
-- >>> (2,4) & upon fst *~ 5
-- (10,4)
--
-- There are however, caveats on how this function can be used!
--
-- First, the user supplied function must access only one field of the specified type. That is to say the target
-- must be a single element that would be visited by @'holesOnOf' 'template' 'uniplate'@
--
-- Note: this even permits a number of functions to be used directly.
--
-- >>> [1,2,3,4] & upon head .~ 0
-- [0,2,3,4]
--
-- >>> [1,2,3,4] & upon last .~ 5
-- [1,2,3,5]
--
-- >>> [1,2,3,4] ^? upon tail
-- Just [2,3,4]
--
-- >>> "" ^? upon tail
-- Nothing
--
-- Accessing parents on the way down to children is okay:
--
-- >>> [1,2,3,4] & upon (tail.tail) .~ [10,20]
-- [1,2,10,20]
--
-- Second, the structure must not contain strict or unboxed fields of the same type that will be visited by 'Data'
--
-- @'upon' :: ('Data' s, 'Data' a) => (s -> a) -> 'IndexedTraversal'' [Int] s a@
upon :: forall p f s a. (Indexable [Int] p, Applicative f, Data s, Data a) => (s -> a) -> p a (f a) -> s -> f s
upon field f s = case lookupon template field s of
Nothing -> pure s
Just (i, Context k0 a0) ->
let
go :: [Int] -> Traversal' s a -> (a -> s) -> a -> f s
go is l k a = case lookupon (l.uniplate) field s of
Nothing -> k <$> indexed f (reverse is) a
Just (j, Context k' a') -> go (j:is) (l.elementOf uniplate j) k' a'
in go [i] (elementOf template i) k0 a0
{-# INLINE upon #-}
-- | The design of 'onceUpon'' doesn't allow it to search inside of values of type 'a' for other values of type 'a'.
-- 'upon'' provides this additional recursion.
--
-- Like 'onceUpon'', 'upon'' trusts the user supplied function more than 'upon' using it directly
-- as the accessor. This enables reading from the resulting 'Lens' to be considerably faster at the risk of
-- generating an illegal lens.
--
-- >>> upon' (tail.tail) .~ [10,20] $ [1,2,3,4]
-- [1,2,10,20]
upon' :: forall s a. (Data s, Data a) => (s -> a) -> IndexedLens' [Int] s a
upon' field f s = let
~(isn, kn) = case lookupon template field s of
Nothing -> (error "upon': no index, not a member", const s)
Just (i, Context k0 _) -> go [i] (elementOf template i) k0
go :: [Int] -> Traversal' s a -> (a -> s) -> ([Int], a -> s)
go is l k = case lookupon (l.uniplate) field s of
Nothing -> (reverse is, k)
Just (j, Context k' _) -> go (j:is) (l.elementOf uniplate j) k'
in kn <$> indexed f isn (field s)
{-# INLINE upon' #-}
-- | This automatically constructs a 'Traversal'' from a field accessor.
--
-- The index of the 'Traversal' can be used as an offset into @'elementOf' ('indexing' 'template')@ or into the list
-- returned by @'holesOf' 'template'@.
--
-- The design of 'onceUpon' doesn't allow it to search inside of values of type 'a' for other values of type 'a'.
-- 'upon' provides this additional recursion, but at the expense of performance.
--
-- >>> onceUpon (tail.tail) .~ [10,20] $ [1,2,3,4] -- BAD
-- [1,10,20]
--
-- >>> upon (tail.tail) .~ [10,20] $ [1,2,3,4] -- GOOD
-- [1,2,10,20]
--
-- When in doubt, use 'upon' instead.
onceUpon :: forall s a. (Data s, Typeable a) => (s -> a) -> IndexedTraversal' Int s a
onceUpon field f s = case lookupon template field s of
Nothing -> pure s
Just (i, Context k a) -> k <$> indexed f i a
{-# INLINE onceUpon #-}
-- | This more trusting version of 'upon' uses your function directly as the getter for a 'Lens'.
--
-- This means that reading from 'upon'' is considerably faster than 'upon'.
--
-- However, you pay for faster access in two ways:
--
-- 1. When passed an illegal field accessor, 'upon'' will give you a 'Lens' that quietly violates
-- the laws, unlike 'upon', which will give you a legal 'Traversal' that avoids modifying the target.
--
-- 2. Modifying with the lens is slightly slower, since it has to go back and calculate the index after the fact.
--
-- When given a legal field accessor, the index of the 'Lens' can be used as an offset into
-- @'elementOf' ('indexed' 'template')@ or into the list returned by @'holesOf' 'template'@.
--
-- When in doubt, use 'upon'' instead.
onceUpon' :: forall s a. (Data s, Typeable a) => (s -> a) -> IndexedLens' Int s a
onceUpon' field f s = k <$> indexed f i (field s) where
~(i, Context k _) = fromMaybe (error "upon': no index, not a member") (lookupon template field s)
{-# INLINE onceUpon' #-}
-------------------------------------------------------------------------------
-- Type equality
-------------------------------------------------------------------------------
data Is a b where
Refl :: Is a a
mightBe :: (Typeable a, Typeable b) => Maybe (Is a b)
mightBe = gcast Data.Data.Lens.Refl
{-# INLINE mightBe #-}
-------------------------------------------------------------------------------
-- Data Box
-------------------------------------------------------------------------------
data DataBox = forall a. Data a => DataBox
{ dataBoxKey :: TypeRep
, _dataBoxVal :: a
}
dataBox :: Data a => a -> DataBox
dataBox a = DataBox (typeOf a) a
{-# INLINE dataBox #-}
-- partial, caught elsewhere
sybChildren :: Data a => a -> [DataBox]
sybChildren x
| isAlgType dt = do
c <- dataTypeConstrs dt
gmapQ dataBox (fromConstr c `asTypeOf` x)
| otherwise = []
where dt = dataTypeOf x
{-# INLINE sybChildren #-}
-------------------------------------------------------------------------------
-- HitMap
-------------------------------------------------------------------------------
type HitMap = HashMap TypeRep (HashSet TypeRep)
emptyHitMap :: HitMap
emptyHitMap = M.fromList
[ (tRational, S.singleton tInteger)
, (tInteger, S.empty)
] where
tRational = typeOf (undefined :: Rational)
tInteger = typeOf (undefined :: Integer )
insertHitMap :: DataBox -> HitMap -> HitMap
insertHitMap box hit = fixEq trans (populate box) `mappend` hit where
populate :: DataBox -> HitMap
populate a = f a M.empty where
f (DataBox k v) m
| M.member k hit || M.member k m = m
| cs <- sybChildren v = fs cs $ M.insert k (S.fromList $ map dataBoxKey cs) m
fs [] m = m
fs (x:xs) m = fs xs (f x m)
trans :: HitMap -> HitMap
trans m = M.map f m where
f x = x `mappend` foldMap g x
g x = fromMaybe (hit ! x) (M.lookup x m)
fixEq :: Eq a => (a -> a) -> a -> a
fixEq f = go where
go x | x == x' = x'
| otherwise = go x'
where x' = f x
{-# INLINE fixEq #-}
#ifndef HLINT
-- | inlineable 'unsafePerformIO'
inlinePerformIO :: IO a -> a
inlinePerformIO (IO m) = case m realWorld# of
(# _, r #) -> r
{-# INLINE inlinePerformIO #-}
#endif
-------------------------------------------------------------------------------
-- Cache
-------------------------------------------------------------------------------
data Cache = Cache HitMap (HashMap TypeRep (HashMap TypeRep (Maybe Follower)))
cache :: IORef Cache
cache = unsafePerformIO $ newIORef $ Cache emptyHitMap M.empty
{-# NOINLINE cache #-}
readCacheFollower :: DataBox -> TypeRep -> Maybe Follower
readCacheFollower b@(DataBox kb _) ka = inlinePerformIO $
readIORef cache >>= \ (Cache hm m) -> case M.lookup kb m >>= M.lookup ka of
Just a -> return a
Nothing -> E.try (return $! insertHitMap b hm) >>= \r -> case r of
Left SomeException{} -> atomicModifyIORef cache $ \(Cache hm' n) -> (Cache hm' (insert2 kb ka Nothing n), Nothing)
Right hm' | fol <- Just (follower kb ka hm') -> atomicModifyIORef cache $ \(Cache _ n) -> (Cache hm' (insert2 kb ka fol n), fol)
insert2 :: TypeRep -> TypeRep -> a -> HashMap TypeRep (HashMap TypeRep a) -> HashMap TypeRep (HashMap TypeRep a)
insert2 x y v = M.insertWith (const $ M.insert y v) x (M.singleton y v)
{-# INLINE insert2 #-}
{-
readCacheHitMap :: DataBox -> Maybe HitMap
readCacheHitMap b@(DataBox kb _) = inlinePerformIO $
readIORef cache >>= \ (Cache hm _) -> case M.lookup kb hm of
Just _ -> return $ Just hm
Nothing -> E.try (return $! insertHitMap b hm) >>= \r -> case r of
Left SomeException{} -> return Nothing
Right hm' -> atomicModifyIORef cache $ \(Cache _ follow) -> (Cache hm' follow, Just hm')
-}
-------------------------------------------------------------------------------
-- Answers
-------------------------------------------------------------------------------
data Answer b a
= b ~ a => Hit a
| Follow
| Miss
-------------------------------------------------------------------------------
-- Oracles
-------------------------------------------------------------------------------
newtype Oracle a = Oracle { fromOracle :: forall t. Typeable t => t -> Answer t a }
hitTest :: forall a b. (Data a, Typeable b) => a -> b -> Oracle b
hitTest a b = Oracle $ \(c :: c) ->
case mightBe :: Maybe (Is c b) of
Just Data.Data.Lens.Refl -> Hit c
Nothing ->
case readCacheFollower (dataBox a) (typeOf b) of
Just p | not (p (typeOf c)) -> Miss
_ -> Follow
-------------------------------------------------------------------------------
-- Traversals
-------------------------------------------------------------------------------
biplateData :: forall f s a. (Applicative f, Data s) => (forall c. Typeable c => c -> Answer c a) -> (a -> f a) -> s -> f s
biplateData o f a0 = go2 a0 where
go :: Data d => d -> f d
go s = gfoldl (\x y -> x <*> go2 y) pure s
go2 :: Data d => d -> f d
go2 s = case o s of
Hit a -> f a
Follow -> go s
Miss -> pure s
{-# INLINE biplateData #-}
uniplateData :: forall f s a. (Applicative f, Data s) => (forall c. Typeable c => c -> Answer c a) -> (a -> f a) -> s -> f s
uniplateData o f a0 = go a0 where
go :: Data d => d -> f d
go s = gfoldl (\x y -> x <*> go2 y) pure s
go2 :: Data d => d -> f d
go2 s = case o s of
Hit a -> f a
Follow -> go s
Miss -> pure s
{-# INLINE uniplateData #-}
-------------------------------------------------------------------------------
-- Follower
-------------------------------------------------------------------------------
part :: (a -> Bool) -> HashSet a -> (HashSet a, HashSet a)
part p s = (S.filter p s, S.filter (not . p) s)
{-# INLINE part #-}
type Follower = TypeRep -> Bool
follower :: TypeRep -> TypeRep -> HitMap -> Follower
follower a b m
| S.null hit = const False
| S.null miss = const True
| S.size hit < S.size miss = S.member ?? hit
| otherwise = \k -> not (S.member k miss)
where (hit, miss) = part (\x -> S.member b (m ! x)) (S.insert a (m ! a))
| rpglover64/lens | src/Data/Data/Lens.hs | bsd-3-clause | 16,298 | 0 | 21 | 3,301 | 3,911 | 2,094 | 1,817 | 216 | 3 |
{-# LANGUAGE ExplicitForAll, MagicHash, KindSignatures #-}
module T12850 where
import GHC.Types (RuntimeRep(..), TYPE)
f :: forall (x :: TYPE 'IntRep). x -> x
f x = x
g = ()
where h = f 0#
| ezyang/ghc | testsuite/tests/typecheck/should_compile/T12850.hs | bsd-3-clause | 196 | 0 | 9 | 42 | 72 | 42 | 30 | -1 | -1 |
module Data.IORef.RunOnce (runOnce) where
import Control.Monad.IO.Class
import Data.IORef
runOnce :: MonadIO m => m a -> m (m a)
runOnce f = do
ref <- liftIO $ newIORef Nothing
return $ do
mval <- liftIO $ readIORef ref
case mval of
Just val -> return val
Nothing -> do
val <- f
liftIO $ writeIORef ref (Just val)
return val
| luigy/stack | src/Data/IORef/RunOnce.hs | bsd-3-clause | 423 | 0 | 18 | 158 | 151 | 72 | 79 | 14 | 2 |
{-# LANGUAGE RecordWildCards #-}
module T9815 where
newtype N = N Int deriving (Show)
foo = print N{..}
| shlevy/ghc | testsuite/tests/rename/should_fail/T9815.hs | bsd-3-clause | 106 | 0 | 6 | 20 | 33 | 20 | 13 | 4 | 1 |
{-# LANGUAGE EmptyDataDecls #-}
-- Trac #3572
module Main where
import Language.Haskell.TH
import Language.Haskell.TH.Ppr
main = putStrLn . pprint =<< runQ [d| data Void |]
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/th/T3572.hs | bsd-3-clause | 177 | 0 | 6 | 29 | 39 | 26 | 13 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
module Yesod.Core.Internal.Run where
import Yesod.Core.Internal.Response
import Blaze.ByteString.Builder (toByteString)
import Control.Applicative ((<$>))
import Control.Exception (fromException, evaluate)
import qualified Control.Exception as E
import Control.Exception.Lifted (catch)
import Control.Monad (mplus)
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Logger (LogLevel (LevelError), LogSource,
liftLoc)
import Control.Monad.Trans.Resource (runResourceT, withInternalState, runInternalState)
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import qualified Data.IORef as I
import qualified Data.Map as Map
import Data.Maybe (isJust)
import Data.Maybe (fromMaybe)
import Data.Monoid (appEndo, mempty)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Data.Time (getCurrentTime, addUTCTime)
import Language.Haskell.TH.Syntax (Loc, qLocation)
import qualified Network.HTTP.Types as H
import Network.Wai
import Network.Wai.Internal
#if !MIN_VERSION_base(4, 6, 0)
import Prelude hiding (catch)
#endif
import System.Log.FastLogger (LogStr, toLogStr)
import System.Random (newStdGen)
import Yesod.Core.Content
import Yesod.Core.Class.Yesod
import Yesod.Core.Types
import Yesod.Core.Internal.Request (parseWaiRequest,
tooLargeResponse)
import Yesod.Core.Internal.Util (formatRFC1123)
import Yesod.Routes.Class (Route, renderRoute)
import Control.DeepSeq (($!!), NFData)
import Control.Monad (liftM)
import Control.AutoUpdate (mkAutoUpdate, defaultUpdateSettings, updateAction, updateFreq)
returnDeepSessionMap :: Monad m => SessionMap -> m SessionMap
#if MIN_VERSION_bytestring(0, 10, 0)
returnDeepSessionMap sm = return $!! sm
#else
returnDeepSessionMap sm = fmap unWrappedBS `liftM` (return $!! fmap WrappedBS sm)
-- | Work around missing NFData instance for bytestring 0.9.
newtype WrappedBS = WrappedBS { unWrappedBS :: S8.ByteString }
instance NFData WrappedBS
#endif
-- | Function used internally by Yesod in the process of converting a
-- 'HandlerT' into an 'Application'. Should not be needed by users.
runHandler :: ToTypedContent c
=> RunHandlerEnv site
-> HandlerT site IO c
-> YesodApp
runHandler rhe@RunHandlerEnv {..} handler yreq = withInternalState $ \resState -> do
let toErrorHandler e =
case fromException e of
Just (HCError x) -> x
_ -> InternalError $ T.pack $ show e
istate <- liftIO $ I.newIORef GHState
{ ghsSession = reqSession yreq
, ghsRBC = Nothing
, ghsIdent = 1
, ghsCache = mempty
, ghsCacheBy = mempty
, ghsHeaders = mempty
}
let hd = HandlerData
{ handlerRequest = yreq
, handlerEnv = rhe
, handlerState = istate
, handlerToParent = const ()
, handlerResource = resState
}
contents' <- catch (fmap Right $ unHandlerT handler hd)
(\e -> return $ Left $ maybe (HCError $ toErrorHandler e) id
$ fromException e)
state <- liftIO $ I.readIORef istate
(finalSession, mcontents1) <- (do
finalSession <- returnDeepSessionMap (ghsSession state)
return (finalSession, Nothing)) `E.catch` \e -> return
(Map.empty, Just $! HCError $! InternalError $! T.pack $! show (e :: E.SomeException))
(headers, mcontents2) <- (do
headers <- return $!! appEndo (ghsHeaders state) []
return (headers, Nothing)) `E.catch` \e -> return
([], Just $! HCError $! InternalError $! T.pack $! show (e :: E.SomeException))
let contents =
case mcontents1 `mplus` mcontents2 of
Just x -> x
Nothing -> either id (HCContent defaultStatus . toTypedContent) contents'
let handleError e = flip runInternalState resState $ do
yar <- rheOnError e yreq
{ reqSession = finalSession
}
case yar of
YRPlain status' hs ct c sess ->
let hs' = headers ++ hs
status
| status' == defaultStatus = getStatus e
| otherwise = status'
in return $ YRPlain status hs' ct c sess
YRWai _ -> return yar
YRWaiApp _ -> return yar
let sendFile' ct fp p =
return $ YRPlain H.status200 headers ct (ContentFile fp p) finalSession
contents1 <- evaluate contents `E.catch` \e -> return
(HCError $! InternalError $! T.pack $! show (e :: E.SomeException))
case contents1 of
HCContent status (TypedContent ct c) -> do
ec' <- liftIO $ evaluateContent c
case ec' of
Left e -> handleError e
Right c' -> return $ YRPlain status headers ct c' finalSession
HCError e -> handleError e
HCRedirect status loc -> do
let disable_caching x =
Header "Cache-Control" "no-cache, must-revalidate"
: Header "Expires" "Thu, 01 Jan 1970 05:05:05 GMT"
: x
hs = (if status /= H.movedPermanently301 then disable_caching else id)
$ Header "Location" (encodeUtf8 loc) : headers
return $ YRPlain
status hs typePlain emptyContent
finalSession
HCSendFile ct fp p -> catch
(sendFile' ct fp p)
(handleError . toErrorHandler)
HCCreated loc -> do
let hs = Header "Location" (encodeUtf8 loc) : headers
return $ YRPlain
H.status201
hs
typePlain
emptyContent
finalSession
HCWai r -> return $ YRWai r
HCWaiApp a -> return $ YRWaiApp a
safeEh :: (Loc -> LogSource -> LogLevel -> LogStr -> IO ())
-> ErrorResponse
-> YesodApp
safeEh log' er req = do
liftIO $ log' $(qLocation >>= liftLoc) "yesod-core" LevelError
$ toLogStr $ "Error handler errored out: " ++ show er
return $ YRPlain
H.status500
[]
typePlain
(toContent ("Internal Server Error" :: S.ByteString))
(reqSession req)
-- | Run a 'HandlerT' completely outside of Yesod. This
-- function comes with many caveats and you shouldn't use it
-- unless you fully understand what it's doing and how it works.
--
-- As of now, there's only one reason to use this function at
-- all: in order to run unit tests of functions inside 'HandlerT'
-- but that aren't easily testable with a full HTTP request.
-- Even so, it's better to use @wai-test@ or @yesod-test@ instead
-- of using this function.
--
-- This function will create a fake HTTP request (both @wai@'s
-- 'Request' and @yesod@'s 'Request') and feed it to the
-- @HandlerT@. The only useful information the @HandlerT@ may
-- get from the request is the session map, which you must supply
-- as argument to @runFakeHandler@. All other fields contain
-- fake information, which means that they can be accessed but
-- won't have any useful information. The response of the
-- @HandlerT@ is completely ignored, including changes to the
-- session, cookies or headers. We only return you the
-- @HandlerT@'s return value.
runFakeHandler :: (Yesod site, MonadIO m) =>
SessionMap
-> (site -> Logger)
-> site
-> HandlerT site IO a
-> m (Either ErrorResponse a)
runFakeHandler fakeSessionMap logger site handler = liftIO $ do
ret <- I.newIORef (Left $ InternalError "runFakeHandler: no result")
getMaxExpires <- getGetMaxExpires
let handler' = do liftIO . I.writeIORef ret . Right =<< handler
return ()
let yapp = runHandler
RunHandlerEnv
{ rheRender = yesodRender site $ resolveApproot site fakeWaiRequest
, rheRoute = Nothing
, rheSite = site
, rheUpload = fileUpload site
, rheLog = messageLoggerSource site $ logger site
, rheOnError = errHandler
, rheGetMaxExpires = getMaxExpires
}
handler'
errHandler err req = do
liftIO $ I.writeIORef ret (Left err)
return $ YRPlain
H.status500
[]
typePlain
(toContent ("runFakeHandler: errHandler" :: S8.ByteString))
(reqSession req)
fakeWaiRequest = Request
{ requestMethod = "POST"
, httpVersion = H.http11
, rawPathInfo = "/runFakeHandler/pathInfo"
, rawQueryString = ""
, requestHeaderHost = Nothing
, requestHeaders = []
, isSecure = False
, remoteHost = error "runFakeHandler-remoteHost"
, pathInfo = ["runFakeHandler", "pathInfo"]
, queryString = []
, requestBody = return mempty
, vault = mempty
, requestBodyLength = KnownLength 0
, requestHeaderRange = Nothing
}
fakeRequest =
YesodRequest
{ reqGetParams = []
, reqCookies = []
, reqWaiRequest = fakeWaiRequest
, reqLangs = []
, reqToken = Just "NaN" -- not a nonce =)
, reqAccept = []
, reqSession = fakeSessionMap
}
_ <- runResourceT $ yapp fakeRequest
I.readIORef ret
yesodRunner :: (ToTypedContent res, Yesod site)
=> HandlerT site IO res
-> YesodRunnerEnv site
-> Maybe (Route site)
-> Application
yesodRunner handler' YesodRunnerEnv {..} route req sendResponse
| Just maxLen <- mmaxLen, KnownLength len <- requestBodyLength req, maxLen < len = sendResponse tooLargeResponse
| otherwise = do
let dontSaveSession _ = return []
(session, saveSession) <- liftIO $ do
maybe (return (Map.empty, dontSaveSession)) (\sb -> sbLoadSession sb req) yreSessionBackend
getMaxExpires <- getGetMaxExpires
let mkYesodReq = parseWaiRequest req session (isJust yreSessionBackend) mmaxLen
let yreq =
case mkYesodReq of
Left yreq -> yreq
Right needGen -> needGen yreGen
let ra = resolveApproot yreSite req
let log' = messageLoggerSource yreSite yreLogger
-- We set up two environments: the first one has a "safe" error handler
-- which will never throw an exception. The second one uses the
-- user-provided errorHandler function. If that errorHandler function
-- errors out, it will use the safeEh below to recover.
rheSafe = RunHandlerEnv
{ rheRender = yesodRender yreSite ra
, rheRoute = route
, rheSite = yreSite
, rheUpload = fileUpload yreSite
, rheLog = log'
, rheOnError = safeEh log'
, rheGetMaxExpires = getMaxExpires
}
rhe = rheSafe
{ rheOnError = runHandler rheSafe . errorHandler
}
yesodWithInternalState yreSite route $ \is -> do
yreq' <- yreq
yar <- runInternalState (runHandler rhe handler yreq') is
yarToResponse yar saveSession yreq' req is sendResponse
where
mmaxLen = maximumContentLength yreSite route
handler = yesodMiddleware handler'
getGetMaxExpires :: MonadIO m => m (IO Text)
getGetMaxExpires = liftIO $ mkAutoUpdate defaultUpdateSettings
{ updateAction = liftM (formatRFC1123 . addUTCTime (60*60*24*365)) getCurrentTime
, updateFreq = 60 * 60 * 1000000 -- Update once per hour
}
yesodRender :: Yesod y
=> y
-> ResolvedApproot
-> Route y
-> [(Text, Text)] -- ^ url query string
-> Text
yesodRender y ar url params =
decodeUtf8With lenientDecode $ toByteString $
fromMaybe
(joinPath y ar ps
$ params ++ params')
(urlRenderOverride y url)
where
(ps, params') = renderRoute url
resolveApproot :: Yesod master => master -> Request -> ResolvedApproot
resolveApproot master req =
case approot of
ApprootRelative -> ""
ApprootStatic t -> t
ApprootMaster f -> f master
ApprootRequest f -> f master req
stripHandlerT :: HandlerT child (HandlerT parent m) a
-> (parent -> child)
-> (Route child -> Route parent)
-> Maybe (Route child)
-> HandlerT parent m a
stripHandlerT (HandlerT f) getSub toMaster newRoute = HandlerT $ \hd -> do
let env = handlerEnv hd
($ hd) $ unHandlerT $ f hd
{ handlerEnv = env
{ rheSite = getSub $ rheSite env
, rheRoute = newRoute
, rheRender = \url params -> rheRender env (toMaster url) params
}
, handlerToParent = toMaster
}
| ygale/yesod | yesod-core/Yesod/Core/Internal/Run.hs | mit | 14,083 | 2 | 28 | 4,780 | 3,153 | 1,672 | 1,481 | 277 | 13 |
-- module
module RCL.Query (
Parameter,
Parameters,
Query,
QueryBuilder,
auth,
create,
format,
method,
param,
params,
sign,
tline,
(>=.)
) where
-- imports
import Control.Monad.Reader
import Data.Hash.MD5
import Data.List
import Network.URL
import RCL.Config
import RCL.Session
import RCL.Types
-- exported types
type Parameter = (String, String)
type Parameters = [Parameter]
type Context = (Config, Session)
type Query = Reader Context Parameters
type QueryBuilder = Parameters -> Query
-- exported functions
param :: Parameter -> QueryBuilder
param p ps = return $ p : ps
params :: Parameters -> QueryBuilder
params ps1 ps2 = return $ ps1 ++ ps2
method :: String -> QueryBuilder
method m = param ("method", m)
format :: QueryBuilder
format = param ("format", "json")
auth :: QueryBuilder
auth ps = reader $ \(_, s) -> ("auth_token", token s) : ps
tline :: QueryBuilder
tline ps = reader $ \(_, s) -> ("timeline", timeline s) : ps
sign :: QueryBuilder
sign ps = reader $ \(c, _) -> ("api_sig", md5s $ Str $ sig c) : ps
where cat s (k, v) = s ++ k ++ v
sig c = foldl' cat (secret c) $ sort ps
create :: Config -> Session -> URL -> QueryBuilder -> QueryURL
create c s url qb = exportURL $ foldl' add_param url $ makeQuery qb (c, s)
(>=.) :: QueryBuilder -> Parameter -> QueryBuilder
p >=. q = p >=> param q
-- internal functions
makeQuery :: QueryBuilder -> Context -> Parameters
makeQuery qb p@(c, _) = runReader (qb [("api_key", apiKey c)]) p
| nicuveo/RCL | src/RCL/Query.hs | mit | 1,596 | 0 | 10 | 402 | 574 | 324 | 250 | 48 | 1 |
module Data.Coded (Coded(..)) where
-- | Things that can be encoded and decoded
class Coded a where
encode :: a -> Integer
decode :: Integer -> a
| Soares/Dater.hs | src/Data/Coded.hs | mit | 155 | 0 | 7 | 36 | 43 | 25 | 18 | 4 | 0 |
{-
Large sum
Problem 13
Work out the first ten digits of the sum of the following one-hundred 50-digit numbers.
-}
numbers = [ 37107287533902102798797998220837590246510135740250,
46376937677490009712648124896970078050417018260538,
74324986199524741059474233309513058123726617309629,
91942213363574161572522430563301811072406154908250,
23067588207539346171171980310421047513778063246676,
89261670696623633820136378418383684178734361726757,
28112879812849979408065481931592621691275889832738,
44274228917432520321923589422876796487670272189318,
47451445736001306439091167216856844588711603153276,
70386486105843025439939619828917593665686757934951,
62176457141856560629502157223196586755079324193331,
64906352462741904929101432445813822663347944758178,
92575867718337217661963751590579239728245598838407,
58203565325359399008402633568948830189458628227828,
80181199384826282014278194139940567587151170094390,
35398664372827112653829987240784473053190104293586,
86515506006295864861532075273371959191420517255829,
71693888707715466499115593487603532921714970056938,
54370070576826684624621495650076471787294438377604,
53282654108756828443191190634694037855217779295145,
36123272525000296071075082563815656710885258350721,
45876576172410976447339110607218265236877223636045,
17423706905851860660448207621209813287860733969412,
81142660418086830619328460811191061556940512689692,
51934325451728388641918047049293215058642563049483,
62467221648435076201727918039944693004732956340691,
15732444386908125794514089057706229429197107928209,
55037687525678773091862540744969844508330393682126,
18336384825330154686196124348767681297534375946515,
80386287592878490201521685554828717201219257766954,
78182833757993103614740356856449095527097864797581,
16726320100436897842553539920931837441497806860984,
48403098129077791799088218795327364475675590848030,
87086987551392711854517078544161852424320693150332,
59959406895756536782107074926966537676326235447210,
69793950679652694742597709739166693763042633987085,
41052684708299085211399427365734116182760315001271,
65378607361501080857009149939512557028198746004375,
35829035317434717326932123578154982629742552737307,
94953759765105305946966067683156574377167401875275,
88902802571733229619176668713819931811048770190271,
25267680276078003013678680992525463401061632866526,
36270218540497705585629946580636237993140746255962,
24074486908231174977792365466257246923322810917141,
91430288197103288597806669760892938638285025333403,
34413065578016127815921815005561868836468420090470,
23053081172816430487623791969842487255036638784583,
11487696932154902810424020138335124462181441773470,
63783299490636259666498587618221225225512486764533,
67720186971698544312419572409913959008952310058822,
95548255300263520781532296796249481641953868218774,
76085327132285723110424803456124867697064507995236,
37774242535411291684276865538926205024910326572967,
23701913275725675285653248258265463092207058596522,
29798860272258331913126375147341994889534765745501,
18495701454879288984856827726077713721403798879715,
38298203783031473527721580348144513491373226651381,
34829543829199918180278916522431027392251122869539,
40957953066405232632538044100059654939159879593635,
29746152185502371307642255121183693803580388584903,
41698116222072977186158236678424689157993532961922,
62467957194401269043877107275048102390895523597457,
23189706772547915061505504953922979530901129967519,
86188088225875314529584099251203829009407770775672,
11306739708304724483816533873502340845647058077308,
82959174767140363198008187129011875491310547126581,
97623331044818386269515456334926366572897563400500,
42846280183517070527831839425882145521227251250327,
55121603546981200581762165212827652751691296897789,
32238195734329339946437501907836945765883352399886,
75506164965184775180738168837861091527357929701337,
62177842752192623401942399639168044983993173312731,
32924185707147349566916674687634660915035914677504,
99518671430235219628894890102423325116913619626622,
73267460800591547471830798392868535206946944540724,
76841822524674417161514036427982273348055556214818,
97142617910342598647204516893989422179826088076852,
87783646182799346313767754307809363333018982642090,
10848802521674670883215120185883543223812876952786,
71329612474782464538636993009049310363619763878039,
62184073572399794223406235393808339651327408011116,
66627891981488087797941876876144230030984490851411,
60661826293682836764744779239180335110989069790714,
85786944089552990653640447425576083659976645795096,
66024396409905389607120198219976047599490197230297,
64913982680032973156037120041377903785566085089252,
16730939319872750275468906903707539413042652315011,
94809377245048795150954100921645863754710598436791,
78639167021187492431995700641917969777599028300699,
15368713711936614952811305876380278410754449733078,
40789923115535562561142322423255033685442488917353,
44889911501440648020369068063960672322193204149535,
41503128880339536053299340368006977710650566631954,
81234880673210146739058568557934581403627822703280,
82616570773948327592232845941706525094512325230608,
22918802058777319719839450180888072429661980811197,
77158542502016545090413245809786882778948721859617,
72107838435069186155435662884062257473692284509516,
20849603980134001723930671666823555245252804609722,
53503534226472524250874054075591789781264330331690 ]
euler13 = take 10 (show (sum numbers))
| feliposz/project-euler-solutions | haskell/euler13.hs | mit | 5,666 | 200 | 9 | 431 | 529 | 315 | 214 | 101 | 1 |
module Hubris.Parser (parseTerm) where
import Hubris.Parser.Internal
| jroesch/dependent-tychk | src/Hubris/Parser.hs | mit | 70 | 0 | 4 | 7 | 17 | 11 | 6 | 2 | 0 |
{-# LANGUAGE CPP #-}
module Util where
import Control.DeepSeq
import qualified Data.ByteString as B
#if !MIN_VERSION_bytestring(0,10,0)
instance NFData B.ByteString
#endif
| thoughtpolice/hs-siphash2448 | benchmarks/Util.hs | mit | 173 | 0 | 6 | 21 | 29 | 19 | 10 | 5 | 0 |
{-# LANGUAGE TemplateHaskell #-}
-- Some common utilities for interprocedual analysis
module Language.DFA.Packages.Interp where
import Language.DFA.AST
import Language.DFA.Core.Label
import Language.DFA.Core.Mono
import Language.DFA.Common
import qualified Data.Set as S
import qualified Data.Map as M
import Debug.Trace.LocationTH
interpFlow :: Label a => Program a -> S.Set (Edge a)
interpFlow prog@(Program _ stmt) = flow stmt `S.union`
S.fromList [ Interp (lc, ln)
| (lc, ln, _, _) <- S.toList $ interflow prog
, lc `S.member` labels stmt ]
interpFlowProc :: InterLabelled ast a => ast a -> Proc a -> S.Set (Edge a)
interpFlowProc prog proc@(Proc _ _ _ _ _ end) = flow proc `S.union`
S.fromList [ Interp (lx, lr)
| (_, _, lx, lr) <- S.toList $ interflow prog
, lx `S.member` labels proc ]
-- Interp flow (l, l')
getContextOp :: (Label a, Show a) =>
(xl -> xl -> xl) -> xl -> a -> a -> Program a -> ContextOp a (M.Map Name xl) Block
getContextOp xMeet bottom l l' p@(Program procs _) =
case (unsafeLookup' $__LOCATION__ l bs, unsafeLookup' $__LOCATION__ l' bs) of
(BCall f ins outs, BIs) ->
let proc = head $ filter (\(Proc f' _ _ _ _ _) -> f' == f) procs
Proc _ ins' outs' _ stmt _ = proc
initSol prop =
let entry = M.fromList $ zip ins' (map (flip (unsafeLookup' $__LOCATION__) prop) ins)
emptyDict = zip (S.toList $ labels proc) $
repeat (M.fromList $ zip (fv stmt)
(repeat bottom))
in M.update (Just . M.union entry) l' (M.fromList emptyDict)
botRet callSiteProp = M.mapWithKey (\x l ->
if x `elem` outs
then bottom
else l `xMeet` unsafeLookup' $__LOCATION__ x callSiteProp)
in EnterCtx (interpFlowProc p proc) (toBlocks proc) initSol botRet
(BEnd, BCall f ins outs) -> ExitCtx $ \calleeProp callerProp ->
let proc = head $ filter (\(Proc f' _ _ _ _ _) -> f' == f) procs
Proc _ ins' outs' _ stmt _ = proc
updates = zip outs $ map (flip (unsafeLookup' $__LOCATION__) calleeProp) outs'
callerProp' = foldr meet callerProp updates
meet (x, px) p = M.insert x (px `xMeet` unsafeLookup' $__LOCATION__ x p) p
in callerProp'
_ -> error $ "Illegal interprocedural flow labels: " ++ show l ++ ", " ++ show l'
where
bs = toBlocks p
| izgzhen/static-hs | src/Language/DFA/Packages/Interp.hs | mit | 2,720 | 0 | 24 | 963 | 960 | 497 | 463 | 46 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentconfig.html
module Stratosphere.Resources.CodeDeployDeploymentConfig where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.CodeDeployDeploymentConfigMinimumHealthyHosts
-- | Full data type definition for CodeDeployDeploymentConfig. See
-- 'codeDeployDeploymentConfig' for a more convenient constructor.
data CodeDeployDeploymentConfig =
CodeDeployDeploymentConfig
{ _codeDeployDeploymentConfigDeploymentConfigName :: Maybe (Val Text)
, _codeDeployDeploymentConfigMinimumHealthyHosts :: Maybe CodeDeployDeploymentConfigMinimumHealthyHosts
} deriving (Show, Eq)
instance ToResourceProperties CodeDeployDeploymentConfig where
toResourceProperties CodeDeployDeploymentConfig{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::CodeDeploy::DeploymentConfig"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("DeploymentConfigName",) . toJSON) _codeDeployDeploymentConfigDeploymentConfigName
, fmap (("MinimumHealthyHosts",) . toJSON) _codeDeployDeploymentConfigMinimumHealthyHosts
]
}
-- | Constructor for 'CodeDeployDeploymentConfig' containing required fields
-- as arguments.
codeDeployDeploymentConfig
:: CodeDeployDeploymentConfig
codeDeployDeploymentConfig =
CodeDeployDeploymentConfig
{ _codeDeployDeploymentConfigDeploymentConfigName = Nothing
, _codeDeployDeploymentConfigMinimumHealthyHosts = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentconfig.html#cfn-codedeploy-deploymentconfig-deploymentconfigname
cddcDeploymentConfigName :: Lens' CodeDeployDeploymentConfig (Maybe (Val Text))
cddcDeploymentConfigName = lens _codeDeployDeploymentConfigDeploymentConfigName (\s a -> s { _codeDeployDeploymentConfigDeploymentConfigName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codedeploy-deploymentconfig.html#cfn-codedeploy-deploymentconfig-minimumhealthyhosts
cddcMinimumHealthyHosts :: Lens' CodeDeployDeploymentConfig (Maybe CodeDeployDeploymentConfigMinimumHealthyHosts)
cddcMinimumHealthyHosts = lens _codeDeployDeploymentConfigMinimumHealthyHosts (\s a -> s { _codeDeployDeploymentConfigMinimumHealthyHosts = a })
| frontrowed/stratosphere | library-gen/Stratosphere/Resources/CodeDeployDeploymentConfig.hs | mit | 2,484 | 0 | 14 | 241 | 272 | 158 | 114 | 30 | 1 |
-- BPM_8Puzzle.hs
-- El problema del 8 puzzle por BPM.
-- José A. Alonso Jiménez https://jaalonso.github.com
-- =====================================================================
module Tema_23.BPM_8Puzzle where
-- Hay que elegir una importación:
-- import Tema_23.BusquedaPrimeroElMejor
import I1M.BusquedaPrimeroElMejor
import Data.Array
-- Representación del problema:
-- ============================
-- Nota: La representación del problema está copiado de
-- BusquedaEnEspaciosDeEstados.hs
-- Una posición es un par de enteros.
type Posicion = (Int,Int)
-- Un tablero es un vector de posiciones, en el que el índice indica el
-- elemento que ocupa la posición.
type Tablero = Array Int Posicion
-- inicial8P es el estado inicial del 8 puzzle. En el ejemplo es
-- +---+---+---+
-- | 2 | 6 | 3 |
-- +---+---+---+
-- | 5 | | 4 |
-- +---+---+---+
-- | 1 | 7 | 8 |
-- +---+---+---+
inicial8P :: Tablero
inicial8P = array (0,8) [(2,(1,3)),(6,(2,3)),(3,(3,3)),
(5,(1,2)),(0,(2,2)),(4,(3,2)),
(1,(1,1)),(7,(2,1)),(8,(3,1))]
-- final8P es el estado final del 8 puzzle. En el ejemplo es
-- +---+---+---+
-- | 1 | 2 | 3 |
-- +---+---+---+
-- | 8 | | 4 |
-- +---+---+---+
-- | 7 | 6 | 5 |
-- +---+---+---+
final8P :: Tablero
final8P = array (0,8) [(1,(1,3)),(2,(2,3)),(3,(3,3)),
(8,(1,2)),(0,(2,2)),(4,(3,2)),
(7,(1,1)),(6,(2,1)),(5,(3,1))]
-- (distancia p1 p2) es la distancia Manhatan entre las posiciones p1 y
-- p2. Por ejemplo,
-- distancia (2,7) (4,1) == 8
distancia :: Posicion -> Posicion -> Int
distancia (x1,y1) (x2,y2) = abs (x1-x2) + abs (y1-y2)
-- (adyacente p1 p2) se verifica si las posiciones p1 y p2 son
-- adyacentes. Por ejemplo,
-- adyacente (3,2) (3,1) == True
-- adyacente (3,2) (1,2) == False
adyacente :: Posicion -> Posicion -> Bool
adyacente p1 p2 = distancia p1 p2 == 1
-- (todosMovimientos t) es la lista de los tableros obtenidos
-- aplicándole al tablero t todos los posibles movimientos; es decir,
-- intercambiando la posición del hueco con sus adyacentes. Por ejemplo,
-- λ> inicial8P
-- array (0,8) [(0,(2,2)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(1,2)),(6,(2,3)),(7,(2,1)),(8,(3,1))]
-- λ> todosMovimientos inicial8P
-- [array (0,8) [(0,(3,2)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(2,2)),
-- (5,(1,2)),(6,(2,3)),(7,(2,1)),(8,(3,1))],
-- array (0,8) [(0,(1,2)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,2)),(6,(2,3)),(7,(2,1)),(8,(3,1))],
-- array (0,8) [(0,(2,3)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(1,2)),(6,(2,2)),(7,(2,1)),(8,(3,1))],
-- array (0,8) [(0,(2,1)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(1,2)),(6,(2,3)),(7,(2,2)),(8,(3,1))]]
todosMovimientos :: Tablero -> [Tablero]
todosMovimientos t = [t//[(0,t!i),(i,t!0)] | i<-[1..8], adyacente (t!0) (t!i)]
-- Los nodos del espacio de estados son listas de tableros [t_n,...,t_1]
-- tal que t_i es un sucesor de t_(i-1).
newtype Tableros = Est [Tablero] deriving Show
-- (sucesores8P e) es la lista de sucesores del estado e. Por ejemplo,
-- λ> sucesores8P (Est [inicial8P])
-- [Est [array (0,8) [(0,(3,2)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(2,2)),
-- (5,(1,2)),(6,(2,3)),(7,(2,1)),(8,(3,1))],
-- array (0,8) [(0,(2,2)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(1,2)),(6,(2,3)),(7,(2,1)),(8,(3,1))]],
-- Est [array (0,8) [(0,(1,2)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,2)),(6,(2,3)),(7,(2,1)),(8,(3,1))],
-- array (0,8) [(0,(2,2)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(1,2)),(6,(2,3)),(7,(2,1)),(8,(3,1))]],
-- Est [array (0,8) [(0,(2,3)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(1,2)),(6,(2,2)),(7,(2,1)),(8,(3,1))],
-- array (0,8) [(0,(2,2)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(1,2)),(6,(2,3)),(7,(2,1)),(8,(3,1))]],
-- Est [array (0,8) [(0,(2,1)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(1,2)),(6,(2,3)),(7,(2,2)),(8,(3,1))],
-- array (0,8) [(0,(2,2)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(1,2)),(6,(2,3)),(7,(2,1)),(8,(3,1))]]]
sucesores8P :: Tableros -> [Tableros]
sucesores8P (Est(n@(t:ts))) =
filter (noEn ts) [ Est (t':n) | t' <- todosMovimientos t]
where noEn ts' (Est(t':_)) = elems t' `notElem` map elems ts'
noEn _ _ = error "Imposible"
sucesores8P _ = error "Imposible"
esFinal8P :: Tableros -> Bool
esFinal8P (Est (n:_)) = elems n == elems final8P
esFinal8P _ = error "Imposible"
-- Heurísticas
-- ===========
-- (heur1 t) es la suma de la distancia Manhatan desde la posición de
-- cada objeto del tablero a su posición en el estado final. Por
-- ejemplo,
-- heur1 inicial8P == 12
heur1 :: Tablero -> Int
heur1 b = sum [distancia (b!i) (final8P!i) | i <- [0..8]]
-- Dos estados se consideran iguales si tienen la misma heurística.
instance Eq Tableros
where Est(t1:_) == Est(t2:_) = heur1 t1 == heur1 t2
_ == _ = error "Imposible"
-- Un estado es menor o igual que otro si tiene una heurística menor o
-- igual.
instance Ord Tableros where
Est (t1:_) <= Est (t2:_) = heur1 t1 <= heur1 t2
_ <= _ = error "Imposible"
-- (buscaPM_8P) es la lista de las soluciones del 8 puzzle por búsqueda
-- primero el mejor. Por ejemplo,
-- λ> head buscaPM_8P
-- (Est [array (0,8) [(0,(2,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(3,1)),(6,(2,1)),(7,(1,1)),(8,(1,2))],
-- array (0,8) [(0,(2,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(3,1)),(6,(2,2)),(7,(1,1)),(8,(1,2))],
-- array (0,8) [(0,(1,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(3,1)),(6,(2,2)),(7,(2,1)),(8,(1,2))],
-- array (0,8) [(0,(1,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(3,1)),(6,(2,2)),(7,(2,1)),(8,(1,1))],
-- array (0,8) [(0,(2,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(3,1)),(6,(1,2)),(7,(2,1)),(8,(1,1))],
-- array (0,8) [(0,(2,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(3,1)),(6,(1,2)),(7,(2,2)),(8,(1,1))],
-- array (0,8) [(0,(3,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,1)),(6,(1,2)),(7,(2,2)),(8,(1,1))],
-- array (0,8) [(0,(3,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(2,1)),(6,(1,2)),(7,(2,2)),(8,(1,1))],
-- array (0,8) [(0,(2,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(2,1)),(6,(1,2)),(7,(3,2)),(8,(1,1))],
-- array (0,8) [(0,(1,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(2,1)),(6,(2,2)),(7,(3,2)),(8,(1,1))],
-- array (0,8) [(0,(1,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(2,1)),(6,(2,2)),(7,(3,2)),(8,(1,2))],
-- array (0,8) [(0,(2,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(1,1)),(6,(2,2)),(7,(3,2)),(8,(1,2))],
-- array (0,8) [(0,(2,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(1,1)),(6,(2,1)),(7,(3,2)),(8,(1,2))],
-- array (0,8) [(0,(3,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(1,1)),(6,(2,1)),(7,(2,2)),(8,(1,2))],
-- array (0,8) [(0,(3,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(1,1)),(6,(2,1)),(7,(2,2)),(8,(1,2))],
-- array (0,8) [(0,(2,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(1,1)),(6,(3,1)),(7,(2,2)),(8,(1,2))],
-- array (0,8) [(0,(1,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,1)),(6,(3,1)),(7,(2,2)),(8,(1,2))],
-- array (0,8) [(0,(1,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,1)),(6,(3,1)),(7,(2,2)),(8,(1,1))],
-- array (0,8) [(0,(2,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,1)),(6,(3,1)),(7,(1,2)),(8,(1,1))],
-- array (0,8) [(0,(2,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,2)),(6,(3,1)),(7,(1,2)),(8,(1,1))],
-- array (0,8) [(0,(3,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,2)),(6,(2,1)),(7,(1,2)),(8,(1,1))],
-- array (0,8) [(0,(3,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(2,2)),(6,(2,1)),(7,(1,2)),(8,(1,1))],
-- array (0,8) [(0,(2,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(3,2)),(6,(2,1)),(7,(1,2)),(8,(1,1))],
-- array (0,8) [(0,(2,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(3,2)),(6,(2,2)),(7,(1,2)),(8,(1,1))],
-- array (0,8) [(0,(1,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(3,2)),(6,(2,2)),(7,(1,2)),(8,(2,1))],
-- array (0,8) [(0,(1,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(3,2)),(6,(2,2)),(7,(1,1)),(8,(2,1))],
-- array (0,8) [(0,(2,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(3,2)),(6,(1,2)),(7,(1,1)),(8,(2,1))],
-- array (0,8) [(0,(2,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,1)),
-- (5,(3,2)),(6,(1,2)),(7,(1,1)),(8,(2,2))],
-- array (0,8) [(0,(3,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(2,1)),
-- (5,(3,2)),(6,(1,2)),(7,(1,1)),(8,(2,2))],
-- array (0,8) [(0,(3,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(2,1)),
-- (5,(3,1)),(6,(1,2)),(7,(1,1)),(8,(2,2))],
-- array (0,8) [(0,(2,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(2,1)),
-- (5,(3,1)),(6,(1,2)),(7,(1,1)),(8,(3,2))],
-- array (0,8) [(0,(2,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(2,2)),
-- (5,(3,1)),(6,(1,2)),(7,(1,1)),(8,(3,2))],
-- array (0,8) [(0,(3,1)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(2,2)),
-- (5,(2,1)),(6,(1,2)),(7,(1,1)),(8,(3,2))],
-- array (0,8) [(0,(3,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(2,2)),
-- (5,(2,1)),(6,(1,2)),(7,(1,1)),(8,(3,1))],
-- array (0,8) [(0,(2,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,1)),(6,(1,2)),(7,(1,1)),(8,(3,1))],
-- array (0,8) [(0,(1,2)),(1,(1,3)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,1)),(6,(2,2)),(7,(1,1)),(8,(3,1))],
-- array (0,8) [(0,(1,3)),(1,(1,2)),(2,(2,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,1)),(6,(2,2)),(7,(1,1)),(8,(3,1))],
-- array (0,8) [(0,(2,3)),(1,(1,2)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,1)),(6,(2,2)),(7,(1,1)),(8,(3,1))],
-- array (0,8) [(0,(2,2)),(1,(1,2)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,1)),(6,(2,3)),(7,(1,1)),(8,(3,1))],
-- array (0,8) [(0,(2,1)),(1,(1,2)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,2)),(6,(2,3)),(7,(1,1)),(8,(3,1))],
-- array (0,8) [(0,(1,1)),(1,(1,2)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,2)),(6,(2,3)),(7,(2,1)),(8,(3,1))],
-- array (0,8) [(0,(1,2)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(2,2)),(6,(2,3)),(7,(2,1)),(8,(3,1))],
-- array (0,8) [(0,(2,2)),(1,(1,1)),(2,(1,3)),(3,(3,3)),(4,(3,2)),
-- (5,(1,2)),(6,(2,3)),(7,(2,1)),(8,(3,1))]],
-- 78)
buscaPM_8P :: [Tableros]
buscaPM_8P = buscaPM sucesores8P
esFinal8P
(Est [inicial8P])
-- (nSolucionesPM_8P) es el número de soluciones del 8 puzzle por
-- búsqueda primero el mejor. Por ejemplo,
-- nSolucionesPM_8P == 43
nSolucionesPM_8P :: Int
nSolucionesPM_8P = length ls
where (Est ls : _) = buscaPM sucesores8P
esFinal8P
(Est [inicial8P])
| jaalonso/I1M-Cod-Temas | src/Tema_23/BPM_8Puzzle.hs | gpl-2.0 | 12,267 | 0 | 11 | 3,009 | 1,163 | 730 | 433 | -1 | -1 |
{-
Copyright 2015 Ian Denhardt <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
-}
module DedupBackup where
import System.Environment
import Control.Exception (IOException, try)
import Control.Monad (forM_, liftM, mapM_, unless, when)
import Data.ByteString.Char8 (unpack)
import Data.List (stripPrefix)
import System.Directory
(createDirectoryIfMissing, doesFileExist, getDirectoryContents)
import System.IO (hClose)
import qualified Crypto.Hash.SHA1 as SHA1
import qualified Data.ByteString.Base16 as Hex
import qualified Data.ByteString.Lazy as B
import qualified Data.Map.Strict as M
import qualified System.Posix.Files as PF
import qualified System.Posix.IO as PIO
import qualified System.Posix.Types as PT
dedupCutOff = 128 -- | number of bytes under which a file is just copied wholesale.
-- Anything bigger than this gets the fancy dedup treatment.
-- This stuff exists for the testsuite's benefit. When running the program, the
-- only instance of FileStatus we ever use is PF.FileStatus, but we can't
-- construct those, so we have our own type defined in the test suite for
-- testing:
class FileStatus a where
isRegularFile :: a -> Bool
isDirectory :: a -> Bool
isSymbolicLink :: a -> Bool
fileMode :: a -> PT.FileMode
fileOwner :: a -> PT.UserID
fileGroup :: a -> PT.GroupID
accessTime :: a -> PT.EpochTime
modificationTime :: a -> PT.EpochTime
fileSize :: a -> PT.FileOffset
instance FileStatus PF.FileStatus where
isRegularFile = PF.isRegularFile
isDirectory = PF.isDirectory
isSymbolicLink = PF.isSymbolicLink
fileMode = PF.fileMode
fileOwner = PF.fileOwner
fileGroup = PF.fileGroup
accessTime = PF.accessTime
modificationTime = PF.modificationTime
fileSize = PF.fileSize
-- // from System.FilePath *almost* does what we want, but it drops left if
-- right starts with a slash.
(//) left right = left ++ "/" ++ right
data JobSpec = JobSpec { src :: FilePath
, dest :: FilePath
, blobs :: FilePath
, chown :: Bool
, prev :: Maybe FilePath
}
data FileTree s = Directory s (M.Map FilePath (FileTree s))
| RegularFile s
| Symlink s
| Unsupported s
deriving(Show,Eq)
data Action s = MkDir s (M.Map FilePath (Action s))
| MkSymlink s
| DedupCopy s
| NaiveCopy s
| Report String
reportError :: IO () -> IO ()
-- ^ @reportError io@ executes @io@. if @io@ raises an @IOException@, the
-- exception is caught and printed, before resuming normal operation.
reportError io = do
result <- (try io) :: IO (Either IOException ())
case result of
Left err -> print err
Right () -> return ()
-- | @getContentNames@ is like @getDirectoryContents@, except that it excludes
-- "." and "..".
getContentsNames :: FilePath -> IO [FilePath]
getContentsNames path =
filter (`notElem` [".", ".."]) <$> getDirectoryContents path
lStatTree :: FilePath -> IO (FileTree PF.FileStatus)
lStatTree path = do
status <- PF.getSymbolicLinkStatus path
if isDirectory status then do
contentsNames <- getContentsNames path
contents <- mapM (lStatTree . (path //)) contentsNames
return $ Directory
status
(M.fromList (zip contentsNames contents))
else if isRegularFile status then
return $ RegularFile status
else if isSymbolicLink status then
return $ Symlink status
else
return $ Unsupported status
doAction :: (FileStatus s) => JobSpec -> Action s -> IO ()
doAction spec (MkDir status contents) = reportError $ do
let path = dest spec
createDirectoryIfMissing True path
syncMetadata (chown spec) path status
forM_ (M.toList contents)
(\(path', tree) ->
doAction
spec { dest = dest spec // path'
, src = src spec // path'
, prev = fmap (// path') (prev spec)
}
tree)
doAction spec (MkSymlink status) = reportError $ do
target <- PF.readSymbolicLink (src spec)
PF.createSymbolicLink target (dest spec)
syncMetadata (chown spec) (dest spec) status
doAction spec (DedupCopy status) = reportError $ do
changed <- case prev spec of
Nothing -> return True
Just prevpath -> do
exists <- doesFileExist prevpath
if exists then do
prevstatus <- PF.getSymbolicLinkStatus prevpath
return $ not (isRegularFile prevstatus) ||
(modificationTime prevstatus < modificationTime status)
else return True
if changed
then do
file <- B.readFile (src spec)
let hashname@(c1:c2:_) = unpack (Hex.encode $ SHA1.hashlazy file)
let blobname = blobs spec // [c1,c2] // hashname
(try :: IO a -> IO (Either IOException a)) $ do
fd <- PIO.openFd
blobname
PIO.WriteOnly
(Just $ PT.CMode 0600)
PIO.defaultFileFlags { PIO.exclusive = True }
hndl <- PIO.fdToHandle fd
B.readFile (src spec) >>= B.hPut hndl
hClose hndl
PF.createLink blobname (dest spec)
else do
let Just prevpath = prev spec
PF.createLink prevpath (dest spec)
syncMetadata (chown spec) (dest spec) status
doAction spec (NaiveCopy status) = reportError $ do
B.readFile (src spec) >>= B.writeFile (dest spec)
syncMetadata (chown spec) (dest spec) status
doAction spec (Report msg) = putStrLn (msg ++ show (src spec))
mkAction :: (FileStatus s) => FileTree s -> Action s
mkAction (Directory status contents) =
MkDir status (M.map mkAction contents)
mkAction (Symlink status) = MkSymlink status
mkAction (RegularFile status) =
if fileSize status > dedupCutOff then
DedupCopy status
else
NaiveCopy status
mkAction (Unsupported _) =
Report "Ignoring file of unsupported type: "
doBackup :: JobSpec -> IO ()
doBackup spec = do
let srcDir = src spec
putStrLn "Scanning source directory..."
srcTree <- lStatTree srcDir
let action = mkAction srcTree
putStrLn "Starting backup..."
doAction spec action
syncMetadata :: (FileStatus s) => Bool -> FilePath -> s -> IO ()
syncMetadata shouldChown path status = do
when shouldChown $ do
PF.setSymbolicLinkOwnerAndGroup path (fileOwner status) (fileGroup status)
unless (isSymbolicLink status) $ do
-- These act on the underlying file, and there are no symlink
-- equivalents.
PF.setFileMode path (fileMode status)
PF.setFileTimes path (accessTime status) (modificationTime status)
| zenhack/dedup-backup | src/DedupBackup.hs | gpl-3.0 | 7,696 | 0 | 21 | 2,263 | 1,897 | 968 | 929 | -1 | -1 |
{-# LANGUAGE CPP #-}
----------------------------------------------------------------------
-- |
-- Module : Text.TeX.Lexer.TokenParser.Expansion
-- Copyright : 2015-2017 Mathias Schenner,
-- 2015-2016 Language Science Press.
-- License : GPL-3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- Expansion of user-defined macros and environments.
----------------------------------------------------------------------
module Text.TeX.Lexer.TokenParser.Expansion
( -- * Macro expansion
expand
-- * Environment expansion
, expandEnvironment
) where
#if MIN_VERSION_base(4,8,0)
-- Prelude exports all required operators from Control.Applicative
#else
import Control.Applicative ((<$), (<$>), (*>))
#endif
import Control.Monad ((>=>), guard)
import Data.Maybe (fromMaybe)
import Text.TeX.Lexer.Macro
import Text.TeX.Lexer.Token
import Text.TeX.Lexer.TokenParser.Basic
import Text.TeX.Lexer.TokenParser.Core
-------------------- Macro expansion
-- | Expand a call of a user-defined macro
-- and push the expansion back into the input stream.
expand :: Monad m => MacroCmd -> LexerT m ()
expand = expansion >=> prependTokens
-- | Expand a call of a user-defined macro
-- and return the expansion.
expansion :: Monad m => MacroCmd -> LexerT m [Token]
expansion m = do
guard $ isMacroCmdUser m
args <- parseArgspec (macroCmdContext m)
return $ applyMacro (macroCmdBody m) args
-------------------- Environment expansion
-- | Expand a user-defined environment
-- and return the expansion as a pair of
-- @start code@ and @end code@.
expandEnvironment :: Monad m => MacroEnv -> LexerT m ([Token], [Token])
expandEnvironment (MacroEnv _ context startCode endCode) = do
args <- parseArgspec context
return (applyMacro startCode args, applyMacro endCode args)
-------------------- Helper functions
-- Parse the arguments in a macro call.
parseArgspec :: Monad m => ArgSpec -> LexerT m [[Token]]
parseArgspec = mapM parseArgtype
-- Parse a single argument in a macro call.
parseArgtype :: Monad m => ArgType -> LexerT m [Token]
parseArgtype Mandatory = stripBraces <$>
(skipSpaceExceptPar *> nextTokenNoExpand)
parseArgtype (Until [t]) = untilTok t
parseArgtype (Until ts) = untilToks ts
parseArgtype (UntilCC cc) = many (charccno cc)
parseArgtype (Delimited open close defval) =
option (fromMaybe [noValueTok] defval) (balanced open close)
parseArgtype (OptionalGroup open close defval) =
option (fromMaybe [noValueTok] defval) (balanced open close)
parseArgtype (OptionalGroupCC open close defval) =
option (fromMaybe [noValueTok] defval) (balancedCC open close)
parseArgtype (OptionalToken t) =
option [falseTok] ([trueTok] <$ tok t)
parseArgtype (LiteralToken t) = count 1 (tok t)
| synsem/texhs | src/Text/TeX/Lexer/TokenParser/Expansion.hs | gpl-3.0 | 2,799 | 0 | 10 | 439 | 621 | 337 | 284 | 40 | 1 |
module PropT25 where
import Prelude(Bool(..))
import Zeno
-- Definitions
True && x = x
_ && _ = False
False || x = x
_ || _ = True
not True = False
not False = True
-- Nats
data Nat = S Nat | Z
(+) :: Nat -> Nat -> Nat
Z + y = y
(S x) + y = S (x + y)
(*) :: Nat -> Nat -> Nat
Z * _ = Z
(S x) * y = y + (x * y)
(==),(/=) :: Nat -> Nat -> Bool
Z == Z = True
Z == _ = False
S _ == Z = False
S x == S y = x == y
x /= y = not (x == y)
(<=) :: Nat -> Nat -> Bool
Z <= _ = True
_ <= Z = False
S x <= S y = x <= y
one, zero :: Nat
zero = Z
one = S Z
double :: Nat -> Nat
double Z = Z
double (S x) = S (S (double x))
even :: Nat -> Bool
even Z = True
even (S Z) = False
even (S (S x)) = even x
half :: Nat -> Nat
half Z = Z
half (S Z) = Z
half (S (S x)) = S (half x)
mult :: Nat -> Nat -> Nat -> Nat
mult Z _ acc = acc
mult (S x) y acc = mult x y (y + acc)
fac :: Nat -> Nat
fac Z = S Z
fac (S x) = S x * fac x
qfac :: Nat -> Nat -> Nat
qfac Z acc = acc
qfac (S x) acc = qfac x (S x * acc)
exp :: Nat -> Nat -> Nat
exp _ Z = S Z
exp x (S n) = x * exp x n
qexp :: Nat -> Nat -> Nat -> Nat
qexp x Z acc = acc
qexp x (S n) acc = qexp x n (x * acc)
-- Lists
length :: [a] -> Nat
length [] = Z
length (_:xs) = S (length xs)
(++) :: [a] -> [a] -> [a]
[] ++ ys = ys
(x:xs) ++ ys = x : (xs ++ ys)
drop :: Nat -> [a] -> [a]
drop Z xs = xs
drop _ [] = []
drop (S x) (_:xs) = drop x xs
rev :: [a] -> [a]
rev [] = []
rev (x:xs) = rev xs ++ [x]
qrev :: [a] -> [a] -> [a]
qrev [] acc = acc
qrev (x:xs) acc = qrev xs (x:acc)
revflat :: [[a]] -> [a]
revflat [] = []
revflat ([]:xss) = revflat xss
revflat ((x:xs):xss) = revflat (xs:xss) ++ [x]
qrevflat :: [[a]] -> [a] -> [a]
qrevflat [] acc = acc
qrevflat ([]:xss) acc = qrevflat xss acc
qrevflat ((x:xs):xss) acc = qrevflat (xs:xss) (x:acc)
rotate :: Nat -> [a] -> [a]
rotate Z xs = xs
rotate _ [] = []
rotate (S n) (x:xs) = rotate n (xs ++ [x])
elem :: Nat -> [Nat] -> Bool
elem _ [] = False
elem n (x:xs) = n == x || elem n xs
subset :: [Nat] -> [Nat] -> Bool
subset [] ys = True
subset (x:xs) ys = x `elem` xs && subset xs ys
intersect,union :: [Nat] -> [Nat] -> [Nat]
(x:xs) `intersect` ys | x `elem` ys = x:(xs `intersect` ys)
| otherwise = xs `intersect` ys
[] `intersect` ys = []
union (x:xs) ys | x `elem` ys = union xs ys
| otherwise = x:(union xs ys)
union [] ys = ys
isort :: [Nat] -> [Nat]
isort [] = []
isort (x:xs) = insert x (isort xs)
insert :: Nat -> [Nat] -> [Nat]
insert n [] = [n]
insert n (x:xs) =
case n <= x of
True -> n : x : xs
False -> x : (insert n xs)
count :: Nat -> [Nat] -> Nat
count n (x:xs) | n == x = S (count n xs)
| otherwise = count n xs
count n [] = Z
sorted :: [Nat] -> Bool
sorted (x:y:xs) = x <= y && sorted (y:xs)
sorted _ = True
-- Theorem
prop_T25 :: [a] -> [a] -> Prop
prop_T25 x y = prove (even (length (x ++ y)) :=: even (length y + length x))
| danr/hipspec | testsuite/prod/zeno_version/PropT25.hs | gpl-3.0 | 3,002 | 0 | 12 | 923 | 2,021 | 1,051 | 970 | 114 | 2 |
module TB.System.Random.List (
shuffleList,
shuffleListCore,
shuffleListBase
) where
import Data.Function
import Data.List
import System.Random
shuffleList :: Int -> [a] -> [a]
shuffleList seed xs = map snd $ sortBy (compare `on` fst) $ zip rands xs
where
rands = randoms (mkStdGen seed) :: [Int]
shuffleListCore :: Ord b => [(b, a)] -> [a]
shuffleListCore xs = map snd $ sortBy (compare `on` fst) xs
shuffleListBase :: Int -> [a] -> [a]
shuffleListBase seed xs = shuffleListCore $ zip rands xs
where
rands = randoms (mkStdGen seed) :: [Int]
| adarqui/ToyBox | haskell/haskell/random/src/TB/System/Random/List.hs | gpl-3.0 | 594 | 0 | 9 | 140 | 230 | 127 | 103 | 15 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.LiaSettings.ListposDataproviders
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the list of POS data providers that have active settings for
-- the all eiligible countries.
--
-- /See:/ <https://developers.google.com/shopping-content/v2/ Content API for Shopping Reference> for @content.liasettings.listposdataproviders@.
module Network.Google.Resource.Content.LiaSettings.ListposDataproviders
(
-- * REST Resource
LiaSettingsListposDataprovidersResource
-- * Creating a Request
, liaSettingsListposDataproviders
, LiaSettingsListposDataproviders
-- * Request Lenses
, lsldXgafv
, lsldUploadProtocol
, lsldAccessToken
, lsldUploadType
, lsldCallback
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.liasettings.listposdataproviders@ method which the
-- 'LiaSettingsListposDataproviders' request conforms to.
type LiaSettingsListposDataprovidersResource =
"content" :>
"v2.1" :>
"liasettings" :>
"posdataproviders" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] LiaSettingsListPosDataProvidersResponse
-- | Retrieves the list of POS data providers that have active settings for
-- the all eiligible countries.
--
-- /See:/ 'liaSettingsListposDataproviders' smart constructor.
data LiaSettingsListposDataproviders =
LiaSettingsListposDataproviders'
{ _lsldXgafv :: !(Maybe Xgafv)
, _lsldUploadProtocol :: !(Maybe Text)
, _lsldAccessToken :: !(Maybe Text)
, _lsldUploadType :: !(Maybe Text)
, _lsldCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LiaSettingsListposDataproviders' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lsldXgafv'
--
-- * 'lsldUploadProtocol'
--
-- * 'lsldAccessToken'
--
-- * 'lsldUploadType'
--
-- * 'lsldCallback'
liaSettingsListposDataproviders
:: LiaSettingsListposDataproviders
liaSettingsListposDataproviders =
LiaSettingsListposDataproviders'
{ _lsldXgafv = Nothing
, _lsldUploadProtocol = Nothing
, _lsldAccessToken = Nothing
, _lsldUploadType = Nothing
, _lsldCallback = Nothing
}
-- | V1 error format.
lsldXgafv :: Lens' LiaSettingsListposDataproviders (Maybe Xgafv)
lsldXgafv
= lens _lsldXgafv (\ s a -> s{_lsldXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
lsldUploadProtocol :: Lens' LiaSettingsListposDataproviders (Maybe Text)
lsldUploadProtocol
= lens _lsldUploadProtocol
(\ s a -> s{_lsldUploadProtocol = a})
-- | OAuth access token.
lsldAccessToken :: Lens' LiaSettingsListposDataproviders (Maybe Text)
lsldAccessToken
= lens _lsldAccessToken
(\ s a -> s{_lsldAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
lsldUploadType :: Lens' LiaSettingsListposDataproviders (Maybe Text)
lsldUploadType
= lens _lsldUploadType
(\ s a -> s{_lsldUploadType = a})
-- | JSONP
lsldCallback :: Lens' LiaSettingsListposDataproviders (Maybe Text)
lsldCallback
= lens _lsldCallback (\ s a -> s{_lsldCallback = a})
instance GoogleRequest
LiaSettingsListposDataproviders
where
type Rs LiaSettingsListposDataproviders =
LiaSettingsListPosDataProvidersResponse
type Scopes LiaSettingsListposDataproviders =
'["https://www.googleapis.com/auth/content"]
requestClient LiaSettingsListposDataproviders'{..}
= go _lsldXgafv _lsldUploadProtocol _lsldAccessToken
_lsldUploadType
_lsldCallback
(Just AltJSON)
shoppingContentService
where go
= buildClient
(Proxy ::
Proxy LiaSettingsListposDataprovidersResource)
mempty
| brendanhay/gogol | gogol-shopping-content/gen/Network/Google/Resource/Content/LiaSettings/ListposDataproviders.hs | mpl-2.0 | 4,928 | 0 | 17 | 1,102 | 632 | 370 | 262 | 98 | 1 |
{-|
Module : DrawingConsole
Description : Связующий модуль консольной система вывода состояния игры
License : LGPLv3
-}
module DrawingConsole ( createDrawingConsole ) where
import DrawingConsole.Internal
| cmc-haskell-2015/checkers | src/DrawingConsole.hs | lgpl-3.0 | 273 | 0 | 4 | 43 | 15 | 10 | 5 | 2 | 0 |
module F_Term where
import List(sort)
type Symbol = String
data M_Term = M_Term Double [Symbol]
m_normalize :: M_Term -> M_Term
m_normalize (M_Term n list) = M_Term n (sort list)
instance Eq M_Term where
(==) (M_Term n1 list1) (M_Term n2 list2) = do_eq n1 n2 list1 list2 where
do_eq 0 0 _ _ = True
do_eq 0 n _ _ = False
do_eq n 0 _ _ = False
do_eq n1 n2 list1 list2 = n1 == n2 && sort list1 == sort list2
instance Ord M_Term where
compare (M_Term n1 list1) (M_Term n2 list2)
= cmp_number_part n1 n2 (compare (sort list1) (sort list2)) where
cmp_number_part _ _ LT = LT
cmp_number_part _ _ GT = GT
cmp_number_part n1 n2 EQ = compare n1 n2
instance Show M_Term where
showsPrec _ (M_Term n list) s = shows_m_term n list s where
shows_m_term 0 list s = '0' : s
shows_m_term 1 (x:xs) s = shows x (show_symbols xs s)
shows_m_term n list s = shows_short_float n (show_symbols list s)
show_symbols [] s = s
show_symbols (x:xs) s = '*' : shows x (show_symbols xs s)
newtype F_Term = F_Term [M_Term]
instance Eq F_Term where
(==) (F_Term list1) (F_Term list2) = list1 == list2
instance Show F_Term where
showsPrec _ (F_Term list) s = shows_list list s where
shows_list [] s = s
shows_list [x] s = shows x s
shows_list (x:xs) s = shows x (" + " ++ shows_list xs s)
shows_short_float :: Double->String->String
shows_short_float x s = let i = floor x in
if fromInteger i == x then shows i s else shows x s
normalize_m_terms :: [M_Term] -> [M_Term]
normalize_m_terms [] = []
normalize_m_terms list = let (x:xs) = sort list in process x xs where
process :: M_Term -> [M_Term] -> [M_Term]
process accumulator [] = [accumulator]
process accumulator@(M_Term ) (x:xs) = [accumulator]
normalize_m_terms list = normalize_sorted (sort list) where
normalize_sorted [] = []
normalize_sorted [] = []
| ibukanov/ahome | dev/bna/old/F_Term.hs | unlicense | 1,910 | 2 | 11 | 457 | 824 | 418 | 406 | 45 | 3 |
{-# LANGUAGE UnicodeSyntax, RankNTypes #-}
module Text.LogMerger.Logs.Types (
LogFormat(..)
, LogDissector
, Origin
, SGSNOrigin
, SGSNBasicEntry
, module Text.Regex
, module Text.LogMerger.Types
) where
import Pipes
import Data.Time (NominalDiffTime)
import qualified Pipes.ByteString as P
import Text.Regex
import Text.LogMerger.Types
import Data.Attoparsec.ByteString.Char8
import qualified Data.ByteString.Lazy.Internal as B
type SGSNOrigin = String
type SGSNBasicEntry = BasicLogEntry SGSNOrigin
type LogDissector = ∀ m a . (Monad m)
⇒ Producer P.ByteString m a
→ Producer SGSNBasicEntry m (Either String ())
data LogFormat = LogFormat {
_dissector ∷ LogDissector -- ^ Parser splitting bytestream to entries
, _nameRegex ∷ Regex -- ^ Regex used to pick dissector for a file
, _formatName ∷ String -- ^ Name of log format, used in manual parser picking
, _formatDescription ∷ String -- ^ Description of the log format
, _timeAs ∷ TimeAs -- ^ How to interpret timestamps
}
| k32/visualsgsn | src/Text/LogMerger/Logs/Types.hs | unlicense | 1,105 | 0 | 11 | 255 | 202 | 130 | 72 | 27 | 0 |
{-
Copyright 2020 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
import NonExistentModule
program = drawingOf(blank)
| google/codeworld | codeworld-compiler/test/testcases/missingModule/source.hs | apache-2.0 | 663 | 0 | 6 | 121 | 16 | 9 | 7 | 2 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
import qualified Data.Foldable as Foldable
import qualified Text.Blaze.Html.Renderer.String as R
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
-- import qualified Text.Blaze.Html.Renderer.Pretty as R
import Control.Monad (unless, when, zipWithM_)
import Data.Default.Class (Default(def))
import Data.List (intersperse)
import Data.Monoid
import Data.Time
import Data.Time.ISO8601 (formatISO8601)
data Tag
= Agda
| Compiler
| Concurrent
| Elm
| Erlang
| Theory
| Haskell
| HotCodeSwap
| HoTT
| Reliability
| Types
| Web
deriving Show
data Sponsor
= KiwiCom
| Ixperta
instance Show Sponsor where
showsPrec _ = showString . \case
KiwiCom -> "Kiwi.com"
Ixperta -> "IXPERTA"
class HasLink a where
getLink :: a -> URL
instance HasLink Sponsor where
getLink = \case
KiwiCom -> "https://www.kiwi.com/"
Ixperta -> "http://www.ixperta.com/"
-- TODO: Maybe we could add a logo for each sponsor.
data Language
= Cz
| En
| Sk
deriving Show
type Explanation = String
data Possibly a
= Present a
-- ^ Resource is prsent ant it is 'a'.
| NotYet
-- ^ Resource not yet available (there is still posibility there will be).
| NotPresent Explanation
-- ^ Resource probaby won't be available, with some explanation/excuse.
deriving Show
type URL = String
data Presentation = Presentation
{ title :: String
, author :: String
, language :: [Language]
-- ^ In what language(s) talk can\ be/was.
, tags :: [Tag]
, slides :: Possibly URL
-- ^ URL to slides of this presentation. 'Nothing' in case that there are
-- no slides.
, audio :: Possibly URL
-- ^ URL to audio recording of this presentation or 'Nothing' if there is
-- not any recording.
, player :: Possibly URL
-- ^ URL to web player able to handle audio recording of this presentation.
-- Set to 'Nothing' in case that wab player is not provided.
} deriving Show
instance Default Presentation where
def = Presentation
{ title = "Unknown title"
, author = "Unknown author"
, language = []
, tags = []
, slides = NotYet
, audio = NotYet
, player = NotYet
}
data Meetup = Meetup
{ indexM :: Integer
-- ^ Meetups form a total order in time and is mapped in to linear ordering
-- of 'Integer' numbers starting from 0.
, presentations :: [Presentation]
-- ^ List of presentations presented during meetup.
, time :: Maybe ZonedTime
-- ^ Time when the meetup occurred
, participants :: Maybe Integer
-- ^ Number of participats, for future, or meetups currently being held,
-- this is set, obviously, to 'Nothing'.
, sponsors :: [Sponsor]
} deriving Show
possiblyMaybe :: Possibly a -> Maybe a
possiblyMaybe = \case
Present x -> Just x
_ -> Nothing
futureMeetup :: Integer -> Meetup
futureMeetup idx = Meetup
{ indexM = idx
, presentations = []
, time = Nothing
, participants = Nothing
, sponsors = []
}
-- | Assign index to a 'Meetup'. It expects them to be in reverse order, i.e.
-- newest/future first and oldest as last.
checkMeetupsIndex :: [Meetup] -> [Meetup]
checkMeetupsIndex ms = case areCorrectlyOrdered ms of
Right () -> ms
Left msg -> error msg
where
areCorrectlyOrdered = zipWithM_ checkIndex [0 ..] . reverse
checkIndex expectedIndex m@Meetup{indexM = gotIndex} =
unless (expectedIndex == gotIndex) . Left $ concat
[ "Expected index ", show expectedIndex
, ", but got ", show gotIndex
, " in ", show m
]
-- | List of all meetups, those that already occurred and those that are
-- planned. Meetups are ordered from newest/future down to the oldest;
-- so please add new meetups on top.
meetups :: [Meetup]
meetups = checkMeetupsIndex
[ Meetup
{ indexM = 7
, presentations = []
, time = Just $ read "2017-02-22 19:00:00 +02:00"
, participants = Nothing
, sponsors = [KiwiCom]
}
, Meetup
{ indexM = 6
, presentations =
[ Presentation
{ title = "Introduction to Agda"
, author = "Adam Krupicka"
, language = [Sk]
, tags = [Agda, Theory]
, slides = Present "fpb-6/html/talk.html"
, audio = NotPresent "Not recorded"
, player = NotPresent "No audio recording"
}
]
, time = Just $ read "2016-10-13 19:00:00 +02:00"
, participants = Just 20
, sponsors = []
}
, Meetup
{ indexM = 5
, presentations =
[ Presentation
{ title = "Types and Higher Groupoids"
, author = "John Bourke"
, language = [En]
, tags = [HoTT, Theory]
, slides = NotYet
, audio = Present "fpb-5/fpb-5.ogg"
, player = NotYet
}
]
, time = Just $ read "2016-07-27 18:00:00 +02:00"
, participants = Just 12
, sponsors = []
}
, Meetup
{ indexM = 4
, presentations =
[ Presentation
{ title = "Elm - the Best of Functional Programming in Your Browser"
, author = "Adam Kövári"
, language = [En]
, tags = [Elm, Web]
, slides = Present "fpb-4/elm_best_of_fp_in_browser.pdf"
, audio = NotYet
, player = NotYet
}
]
, time = Just $ read "2016-06-28 18:30:00 +02:00"
, participants = Just 8
, sponsors = [Ixperta]
}
, Meetup
{ indexM = 3
, presentations =
[ Presentation
{ title = "Erlang for Haskellers"
, author = "Hynek Vychodil"
, language = [Cz]
, tags = [Erlang, Concurrent, Reliability, HotCodeSwap]
, slides = Present "fpb-3/erlang_for_haskellers.pdf"
, audio = Present "fpb-3/fpb-3.ogg"
, player = Present "fpb-3/player.html"
}
]
, time = Just $ read "2015-11-25 18:30:00 +01:00"
, participants = Just 28
, sponsors = []
}
, Meetup
{ indexM = 2
, presentations =
[ Presentation
{ title =
"Types as values: Derive correctness from practicality"
, author = "Peter"
, language = [Sk]
, tags = [Haskell, Types]
, slides = Present "fpb-2/types-as-values.html"
, audio = NotPresent "I forgot to start recording"
, player = NotPresent "Does not make sense without audio"
}
]
, time = Just $ read "2015-09-30 19:00:00 +02:00"
, participants = Just 14
, sponsors = []
}
, Meetup
{ indexM = 1
, presentations =
[ Presentation
{ title = "Apples and Oranges"
, author = "Matej"
, language = [Sk]
, tags = [Haskell, Types]
, slides = Present "fpb-1/fpb-1.html"
, audio = Present "fpb-1/fpb-1.ogg"
, player = Present "fpb-1/player.html"
}
]
, time = Just $ read "2015-05-12 18:00:00 +02:00"
, participants = Just $ 4 + 18
, sponsors = []
}
, Meetup
{ indexM = 0
, presentations =
[ Presentation
{ title = "There Is No Compiler"
, author = "Matej"
, language = [Sk]
, tags = [Haskell, Compiler]
, slides = Present "fpb-0/fpb-0.html"
, audio = Present "fpb-0/fpb-0.ogg"
, player = Present "fpb-0/player.html"
}
]
, time = Just $ read "2015-02-16 19:00:00 +01:00"
, participants = Just 6
, sponsors = []
}
]
localjs :: String
localjs = concatMap (dropWhile (' ' ==))
[ "$(document).ready(function(){"
, " $.timeago.settings.allowFuture = true;"
, " $(\"time.timeago\").timeago();"
, " $.getJSON( \"https://api.github.com/orgs/FPBrno/members\", function(data) {"
, " var items = [];"
, " $.each(data, function(item) {"
, " items.push($(\"<a>\", {href: data[item].html_url}).append($(\"<img>\", {"
, " src: data[item].avatar_url,"
, " alt: data[item].login,"
, " title: data[item].login,"
, " })));"
, " });"
, " $(\".members\").before($(\"<h2>\", {html: \"Public members\"})).empty().append(items);"
, " });"
, "});"
]
time2Html :: ZonedTime -> H.Html
time2Html t = H.time H.! A.class_ "timeago" H.! A.datetime (H.toValue . formatISO8601 $ zonedTimeToUTC t) $ H.toHtml (show t)
presentation2html :: Presentation -> H.Html
presentation2html Presentation{..} = H.div H.! A.class_ "presentation" $ do
classed "presentation_title" title
" (by "
H.toHtml author
", "
sequence . intersperse ", " $ map (classedShow "lang") language
")"
mapM_ ((" " >>) . classedShow "tag") tags
H.div H.! A.class_ "pres_goodies" $ do
h "Slides" slides
h "Audio" audio
h "Player" player
where
classed c x = H.span H.! A.class_ c $ H.toHtml x
classedShow c = classed c . show
g t u = H.a H.! A.href (H.toValue u) $ t
h x = Foldable.mapM_ (g x) . possiblyMaybe
presentations2html :: [Presentation] -> H.Html
presentations2html [] = "No presentations"
presentations2html [x] = do
"Presentation: "
presentation2html x
presentations2html s = do
"Presentatios: "
mapM_ presentation2html s
meetup2html :: Meetup -> H.Html
meetup2html Meetup{..} = H.div H.! A.class_ "meetup" $ do
H.h3 . H.toHtml $ "Meetup " <> show indexM
H.ul $ do
H.li $ do
"Time: "
maybe "To be determined" time2Html time
maybe mempty (\ n -> H.li ("Participants: " >> H.toHtml (show n))) participants
unless (null sponsors) . H.li $ do
"Sponsor" >> when (length sponsors > 1) "s" >> ": "
renderSponsors sponsors
H.li $ presentations2html presentations
where
renderSponsors = sequence_ . intersperse ", " . map renderSponsor
renderSponsor s =
H.a H.! A.href (H.toValue $ getLink s) $ H.toHtml (show s)
fpbTitle :: H.Html
fpbTitle = "Functional Programming Brno"
cdns :: H.Html
cdns = mconcat
[ js "https://code.jquery.com/jquery-2.1.3.min.js"
, js "https://cdn.rawgit.com/rmm5t/jquery-timeago/master/jquery.timeago.js"
-- , css "https://maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome.min.css"
-- , css "https://fonts.googleapis.com/css?family=Roboto:300,400"
] where
-- css u = H.link H.! A.rel "stylesheet" H.! A.href u
js u = H.script H.! A.src u $ mempty
timeago :: H.Html
timeago = H.script $ H.preEscapedToHtml localjs
fa :: String -> H.Html
fa x = H.i H.! A.class_ (H.toValue $ "fa " <> x) $ mempty
site :: UTCTime -> H.Html
site t = H.html $ do
H.head $ do
H.meta H.! A.charset "UTF-8"
H.title H.! A.class_ "head-title" $ fpbTitle
H.link H.! A.rel "stylesheet" H.! A.type_ "text/css" H.! A.href "style.css"
cdns
timeago
H.body . (H.div H.! A.class_ "wrapper") $ do
H.header $ do
H.h1 fpbTitle
H.img H.! A.src "images/FPB.svg" H.! A.alt "Functional Programming Brno"
-- H.menu $ do
-- fa "fa-rss fa-fw" >> "All"
-- fa "fa-rss fa-fw" >> "Articles"
-- fa "fa-rss fa-fw" >> "Meetups"
-- fa "fa-rss fa-fw" >> "Talks"
H.div H.! A.class_ "main" $ do
H.p $ H.preEscapedToHtml
("Functional Programming Brno (FPBrno or FPB for short) is for\
\ everyone interested in functional programming who happens to\
\ be in <a href=\"https://goo.gl/maps/MIRi3\">Brno</a> or\
\ nearby areas.\
\ Activities include but are not limited to talks and\
\ discussions." :: String)
H.p $ do
"We have a mailing list ("
H.a H.! A.href "https://groups.google.com/d/forum/fpbrno"
$ "online archive"
") that you can sign-up to simply by sending an email to\
\ [email protected] (even an empty email\
\ will do)."
H.p $ do
"Important news will also be tweeted on "
H.a H.! A.href "https://twitter.com/FPBrno"
$ "@FPBrno"
"."
H.p "More to come."
H.h2 "Upcoming events"
let fe = filter (maybe True ((t <=) . zonedTimeToUTC) . time) meetups
if null fe
then H.p "There are no planned events now.\
\ In the ideal case next meetup will occur\
\ between 1 and 2 months after the last meetup."
else mapM_ meetup2html fe
H.h2 "Past events"
mapM_ meetup2html . take 10 $ filter (maybe False ((t >) . zonedTimeToUTC) . time) meetups
H.div H.! A.class_ "members" $ mempty
H.p $ do
"Would you like to be on the list? \
\It's simple: get a GitHub account and send us a pull request. \
\If nothing else, add yourself to PEOPLE.md :-). \
\Already a member and you still don't see yourself? \
\Change your status to public on "
H.a H.! A.href "https://github.com/orgs/FPBrno/people"
$ "people page"
". (No pressure.)"
H.footer $ do
H.a H.! A.href "https://github.com/FPBrno" $ "FPBrno on GitHub"
" "
H.a H.! A.href "https://groups.google.com/d/forum/fpbrno" $ "FPBrno mailing list"
" "
H.a H.! A.href "https://twitter.com/FPBrno" $ "@FPBrno on Twitter"
H.div "© 2015-2016 Functional Programming Brno"
main :: IO ()
main = do
t <- getCurrentTime
writeFile "index.html" . R.renderHtml $ H.docType >> site t
| trskop/FPBrno.github.io | gen/test-html.hs | artistic-2.0 | 14,741 | 0 | 23 | 5,062 | 3,077 | 1,670 | 1,407 | 327 | 2 |
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE RecordWildCards #-}
module NW.Util where
import "monads-tf" Control.Monad.Identity
import Data.Char
import Data.List
import Data.Maybe
import qualified Data.Text.Lazy as T
import Text.Parsec.Char
import Text.Parsec.Combinator
import Text.Parsec.Pos
import Text.Parsec.Prim
import Text.Parsec.Text.Lazy ()
import Text.Parsec.Token
enclose :: (String, String) -> String -> String
enclose (a, b) str = a ++ str ++ b
paren :: String -> String
paren = enclose ("(", ")")
squote :: String -> String
squote = enclose ("`", "'")
squote' :: String -> String
squote' = enclose (" ", " ") . squote
pshow :: Show a => a -> IO ()
pshow = putStrLn . show
downcase :: String -> String
downcase = map toLower
enumsHash :: Show a => [a] -> [(String, a)]
enumsHash enums = zip (map (downcase . show) enums) enums
gameDataFormatDef :: GenLanguageDef T.Text u Identity
gameDataFormatDef = LanguageDef
{ commentStart = "/*"
, commentEnd = "*/"
, commentLine = "#"
, nestedComments = False
, identStart = letter
, identLetter = alphaNum
, opStart = opLetter gameDataFormatDef
, opLetter = oneOf ":/"
, reservedOpNames= []
, reservedNames = []
, caseSensitive = True
}
lexer :: GenTokenParser T.Text u Identity
lexer = makeTokenParser gameDataFormatDef
t_decimal :: ParsecT T.Text u Identity Integer
t_decimal = decimal lexer
-- NOTE: as of Parsec 3.1.5, `hexadecimal` expects numbers with a leading 'x',
-- not a '0x'!
t_hexadecimal :: ParsecT T.Text u Identity Integer
t_hexadecimal = hexadecimal lexer
t_natural :: ParsecT T.Text u Identity Integer
t_natural = natural lexer
t_symbol :: String -> ParsecT T.Text u Identity String
t_symbol = symbol lexer
t_identifier :: ParsecT T.Text u Identity String
t_identifier = identifier lexer
t_whiteSpace :: ParsecT T.Text u Identity ()
t_whiteSpace = whiteSpace lexer
t_braces :: ParsecT T.Text u Identity a -> ParsecT T.Text u Identity a
t_braces = braces lexer
t_brackets :: ParsecT T.Text u Identity a -> ParsecT T.Text u Identity a
t_brackets = brackets lexer
t_stringLiteral :: ParsecT T.Text u Identity String
t_stringLiteral = stringLiteral lexer
t_commaSep1 :: ParsecT T.Text u Identity a -> ParsecT T.Text u Identity [a]
t_commaSep1 = commaSep1 lexer
-- Having a ParsecT type allows us to use this parser in parsers that do or do
-- not involve keeping track of state. If we just have a "Parser Int" type, this
-- parser will not be able to be used inside, e.g., a "GameMapParser a" parser.
intParser :: ParsecT T.Text u Identity Int
intParser = do
sign <- optionMaybe $ char '-'
n <- t_decimal
let
n' = fromIntegral n
if isJust sign
then return $ negate n'
else return n'
intParser' :: ParsecT T.Text u Identity Int
intParser' = do
n <- intParser
_ <- t_whiteSpace
return n
uintParser :: ParsecT T.Text u Identity Int
uintParser = do
n <- t_decimal
let
n' = fromIntegral n
return n'
uintParser' :: ParsecT T.Text u Identity Int
uintParser' = do
n <- uintParser
_ <- t_whiteSpace
return n
hexParser :: ParsecT T.Text u Identity Int
hexParser = do
_ <- char '0'
n <- t_hexadecimal
let
n' = fromIntegral n
return n'
hexParser' :: ParsecT T.Text u Identity Int
hexParser' = do
n <- hexParser
_ <- t_whiteSpace
return n
intRangeParser :: ParsecT T.Text u Identity (Int, Int)
intRangeParser = try a <|> b
where
a = do
n <- intParser
_ <- string " "
m <- intParser
_ <- t_whiteSpace
return (n, m)
b = do
n <- intParser
_ <- t_whiteSpace
return (n, n)
t_stringTillNewline :: ParsecT T.Text u Identity String
t_stringTillNewline = do
str <- manyTill (noneOf "\n") (try . lookAhead $ trailingWhitespace)
_ <- t_whiteSpace
return str
where
trailingWhitespace = do
_ <- many $ oneOf " \t"
_ <- string "\n"
return ()
choice' :: Stream s m t => [ParsecT s u m a] -> ParsecT s u m a
choice' [] = fail "choice': empty list"
choice' (p:[]) = choice [p]
choice' ps = choice $ tries ++ [lastOne]
where
tries = map try (init ps)
lastOne = last ps
symbolA
:: String
-> (ParsecT T.Text u Identity a)
-> ParsecT T.Text u Identity (SourcePos, a)
symbolA str parser = do
_ <- t_symbol str
pos <- getPosition
a <- parser
return (pos, a)
symbolWhiteSpace :: String -> ParsecT T.Text u Identity ()
symbolWhiteSpace str = do
_ <- string str
_ <- lookAhead $ oneOf " \t\n"
return ()
sourceLC :: SourcePos -> String
sourceLC sp = paren $ "line "
++ (show $ sourceLine sp)
++ ", column " ++ (show $ sourceColumn sp)
duplicateDefinition
:: (Eq a, Show a, Monad m)
=> String
-> [(a, SourcePos)]
-> (a, SourcePos) -> m b
duplicateDefinition keyType hash (key, pos) = fail
$ headingBody
[show pos]
[ keyType ++ " " ++ show key ++ " already defined at "
++ (sourceLC . fromJust $ lookup key hash)
]
valueBeyondRange :: (Eq a, Show a, Monad m) => String -> (a, SourcePos) -> m b
valueBeyondRange valType (val, pos) = fail
$ headingBody
[show pos]
[ valType ++ " " ++ show val ++ " out of range"
]
headingBody :: [String] -> [String] -> String
headingBody heading body = (intercalate "\n" heading)
++ "\n"
++ (intercalate "\n" $ map indent body)
parseErrMsg
:: (Eq a, Show a, Monad m)
=> String
-> String
-> (String, a, SourcePos)
-> m b
parseErrMsg msg valLocation (valType, val, pos) = fail
$ headingBody
[ ""
, show pos
, msg
]
[ valLocation
, valType ++ ": " ++ show val
]
showTuple :: (Show a, Show b) => (a, b) -> String
showTuple (a, b) = show a ++ " " ++ show b
indent :: String -> String
indent s = "\t" ++ s
chop :: Int -> [a] -> [[a]]
chop _ [] = []
chop n xs = take n xs : chop n (drop n xs)
| listx/netherworld | src/NW/Util.hs | bsd-2-clause | 5,636 | 50 | 11 | 1,164 | 2,147 | 1,104 | 1,043 | 184 | 2 |
{-# LANGUAGE NoMonomorphismRestriction,FlexibleContexts,
TypeSynonymInstances,FlexibleInstances,MultiParamTypeClasses #-}
module LiveJournal.Login (
login,
loginExt,
LJLoginRequest(..),
LJLoginResponse(..),
loginObjectUpdater,
loginObjectFactory
)
where
import LiveJournal.Entity
import LiveJournal.Error
import LiveJournal.Session hiding (password)
import LiveJournal.Transport
import LiveJournal.Request
import LiveJournal.ResponseParser as LJRP
import Control.Applicative hiding ((<|>))
import Control.Monad
import Control.Monad.Trans
import Data.Maybe as DM
import Data.Map as DMP
import Data.List as DL
import Text.Parsec as TP
import Text.Parsec.ByteString
data LJLoginRequest = LoginRequest {
user :: String,
password :: String,
moods :: Maybe Int,
menus :: Bool,
pickws :: Bool,
pickwurls :: Bool
}
data LJLoginResponse = LoginResponse {
ljUsername :: String,
ljSession :: Session,
ljCommunities :: [Community],
ljMoods :: [ResponseData],
ljGroups :: [ResponseData],
ljMenus :: [ResponseData],
ljPics :: [ResponseData],
ljDefaultPicwUrl :: Maybe String,
ljFastServer :: Bool
} deriving (Show)
login :: String -> String -> IOResult LJLoginResponse
login username password = loginExt $ LoginRequest username password Nothing False False False
loginObjectFactory :: ObjectFactory ResponseData
loginObjectFactory "mood" = Just $ Mood 0 0 ""
loginObjectFactory "frgrp" = Just $ Group "" 0 False
loginObjectFactory "menu" = Just $ Menu 0 DMP.empty
loginObjectFactory _ = Nothing
loginObjectUpdater :: ObjectUpdater ResponseData
loginObjectUpdater "mood" "id" value obj = Just $ obj { moodId = read value }
loginObjectUpdater "mood" "name" value obj = Just $ obj { moodName = value }
loginObjectUpdater "mood" "parent" value obj = Just $ obj { moodParent = read value }
loginObjectUpdater "frgrp" "name" value obj = Just $ obj { groupName = value }
loginObjectUpdater "frgrp" "sortorder" value obj = Just $ obj { groupSortOrder = read value }
loginObjectUpdater "frgrp" "public" value obj = Just $ obj { groupPublic = "1" == value }
loginObjectUpdater "menu" menuParam value obj =
case parseResult of
(Left err) -> Just obj
(Right obj') -> Just obj'
where
parseResult = head $ TP.runPT parseMenuItem (obj, value) "" menuParam
loginObjectUpdater _ _ _ _ = Nothing
instance ResponseTransformer ResponseData LJLoginResponse where
transform (simpleMap, enumMap, objectMap) =
maybe (makeErrorStr $ "Can't create response " ++ show simpleMap) makeResult $ do
username <- DMP.lookup "name" simpleMap
return $ LoginResponse username Anonymous communities moods groups menus pickws defaultPicwUrl fastServer
where
communities = maybe [] (DL.concat . DMP.elems) $ DMP.lookup "access" enumMap
moods = fromMaybe [] $ DMP.elems <$> DMP.lookup "mood" objectMap
groups = fromMaybe [] $ DMP.elems <$> DMP.lookup "frgrp" objectMap
menus = fromMaybe [] $ DMP.elems <$> DMP.lookup "menu" objectMap
pickws = foldWithKey makePickws [] pickwUrls
pickwKeys = fromMaybe DMP.empty $ DMP.lookup "pickw" enumMap
pickwUrls = fromMaybe DMP.empty $ DMP.lookup "pickwurl" enumMap
makePickws idx [url] = let keywords = concat . maybeToList $ DMP.lookup idx pickwKeys
in ( Pickw url keywords : )
defaultPicwUrl = DMP.lookup "defaultpicurl" simpleMap
fastServer = isJust $ DMP.lookup "fastserver" simpleMap
loginExt :: LJLoginRequest -> IOResult LJLoginResponse
loginExt request =
prepareChallenge ( password request ) >>= DM.maybe emptyResponse login'
where
emptyResponse = makeError NoChallenge
login' (chal, auth_response) = do
result <- (runRequest request' (CRP loginObjectFactory loginObjectUpdater) :: IOResult LJLoginResponse)
return $ result { ljSession = Authenticated ( password request ) }
where
params = DL.concat [
[
("mode","login"),
("user",user request),
("auth_method","challenge"),
("auth_challenge",chal),
("auth_response",auth_response)
],
DM.maybe [] ( makeTupleSArr "getmoods" . show ) ( moods request ),
guard (menus request) >> makeTupleSArr "getmenus" "1",
guard (pickws request) >> makeTupleSArr "getpickws" "1",
guard (pickwurls request) >> makeTupleSArr "getpickwurls" "1"
]
request' = makeRequest params
makeTupleSArr = ( return . ) . (,)
parseMenuItem =
try (parseMenuItemProperty "_text" updateText) <|>
( try (parseMenuItemProperty "_url" updateUrl) <|> parseMenuItemProperty "_sub" updateSub )
where
updateText txt menyItemP = menyItemP { menuText = txt }
updateUrl txt menyItemP = menyItemP { menuUrl = txt }
updateSub txt menyItemP = menyItemP { menuSub = read txt }
parseMenuItemProperty suffix f = do
itemNum <- liftM read $ TP.many TP.digit
TP.string suffix
(menu, value) <- getState
return $ updateMenuMap menu itemNum (f value)
updateMenuMap menu itemNum f =
let newMap = DMP.alter updFunc itemNum ( menuItems menu )
in menu { menuItems = newMap }
where
updFunc Nothing = updFunc . Just $ MenuItem itemNum 0 "" ""
updFunc (Just menuItem) = Just (f menuItem)
| jdevelop/hslj | LiveJournal/Login.hs | bsd-3-clause | 6,220 | 0 | 14 | 2,020 | 1,556 | 823 | 733 | 113 | 2 |
module Yuuko.Text.XML.HXT.RelaxNG.PatternToString
( patternToStringTree
, patternToFormatedString
, xmlTreeToPatternStringTree
, xmlTreeToPatternFormatedString
, xmlTreeToPatternString
, nameClassToString
)
where
import Yuuko.Control.Arrow.ListArrows
import Yuuko.Data.Tree.NTree.TypeDefs
import Yuuko.Text.XML.HXT.DOM.Interface
import Yuuko.Text.XML.HXT.RelaxNG.DataTypes
import Yuuko.Text.XML.HXT.RelaxNG.CreatePattern
import Yuuko.Text.XML.HXT.RelaxNG.Utils
-- ------------------------------------------------------------
type PatternTree = NTree String
{- |
Returns a string representation of the pattern structure.
(see also: 'createPatternFromXmlTree')
Example:
> Element {}foo (Choice (Choice (Value ("","token") "abc"
> ("foo","www.bar.baz")]))(Data ("http://www.mysql.com","VARCHAR")
> [("length","2"),("maxLength","5")])) (Element {}bar (Group (Element {}baz
The function can @not@ be used to display circular ref-pattern structures.
-}
xmlTreeToPatternString :: LA XmlTree String
xmlTreeToPatternString
= createPatternFromXmlTree
>>^
show
-- | Returns a string representation of a nameclass.
nameClassToString :: NameClass -> String
nameClassToString AnyName
= "AnyName"
nameClassToString (AnyNameExcept nc)
= "AnyNameExcept " ++ nameClassToString nc
nameClassToString (Name uri local)
= "{" ++ uri ++ "}" ++ local
nameClassToString (NsName uri)
= "{" ++ uri ++ "}"
nameClassToString (NsNameExcept uri nc)
= uri ++ "except (NsName) " ++ nameClassToString nc
nameClassToString (NameClassChoice nc1 nc2)
= nameClassToString nc1 ++ " " ++ nameClassToString nc2
nameClassToString (NCError e)
= "NameClass Error: " ++ e
-- ------------------------------------------------------------
{- |
Returns a tree representation of the pattern structure.
The hard work is done by 'formatTree'.
Example:
> +---element {}bar
> |
> +---group
> |
> +---oneOrMore
> | |
> | +---attribute AnyName
> | |
> | +---text
> |
> +---text
The function can be used to display circular ref-pattern structures.
Example:
> <define name="baz">
> <element name="baz">
> ... <ref name="baz"/> ...
> </element>
> </define>
-}
patternToStringTree :: LA Pattern String
patternToStringTree
= fromSLA [] pattern2PatternTree
>>^
(\p -> formatTree id p ++ "\n")
-- | Returns a tree representation of the pattern structure.
-- (see also: 'createPatternFromXmlTree' and 'patternToStringTree')
xmlTreeToPatternStringTree :: LA XmlTree String
xmlTreeToPatternStringTree
= createPatternFromXmlTree
>>>
patternToStringTree
pattern2PatternTree :: SLA [NameClass] Pattern PatternTree
pattern2PatternTree
= choiceA
[ isA isRelaxEmpty :-> (constA $ NTree "empty" [])
, isA isRelaxNotAllowed :-> notAllowed2PatternTree
, isA isRelaxText :-> (constA $ NTree "text" [])
, isA isRelaxChoice :-> choice2PatternTree
, isA isRelaxInterleave :-> children2PatternTree "interleave"
, isA isRelaxGroup :-> children2PatternTree "group"
, isA isRelaxOneOrMore :-> children2PatternTree "oneOrMore"
, isA isRelaxList :-> children2PatternTree "list"
, isA isRelaxData :-> data2PatternTree
, isA isRelaxDataExcept :-> dataExcept2PatternTree
, isA isRelaxValue :-> value2PatternTree
, isA isRelaxAttribute :-> createPatternTreeFromElement "attribute"
, isA isRelaxElement :-> element2PatternTree
, isA isRelaxAfter :-> children2PatternTree "after"
]
notAllowed2PatternTree :: SLA [NameClass] Pattern PatternTree
notAllowed2PatternTree
= arr $ \(NotAllowed (ErrMsg _l sl)) -> NTree "notAllowed" $ map (\ s -> NTree s []) sl
data2PatternTree :: SLA [NameClass] Pattern PatternTree
data2PatternTree
= arr $ \ (Data d p) -> NTree "data" [ datatype2PatternTree d
, mapping2PatternTree "parameter" p
]
dataExcept2PatternTree :: SLA [NameClass] Pattern PatternTree
dataExcept2PatternTree
= this &&& (listA $ arrL getChildrenPattern >>> pattern2PatternTree)
>>>
arr2 ( \ (DataExcept d param _) pattern ->
NTree "dataExcept" ([ datatype2PatternTree d
, mapping2PatternTree "parameter" param
] ++ pattern)
)
value2PatternTree :: SLA [NameClass] Pattern PatternTree
value2PatternTree
= arr $ \ (Value d v c) -> NTree ("value = " ++ v) [ datatype2PatternTree d
, context2PatternTree c
]
createPatternTreeFromElement :: String -> SLA [NameClass] Pattern PatternTree
createPatternTreeFromElement name
= ( arr getNameClassFromPattern
&&&
listA (arrL getChildrenPattern >>> pattern2PatternTree)
)
>>>
arr2 (\nc rl -> NTree (name ++ " " ++ show nc) rl)
children2PatternTree :: String -> SLA [NameClass] Pattern PatternTree
children2PatternTree name
= listA (arrL getChildrenPattern >>> pattern2PatternTree)
>>^
(NTree name)
choice2PatternTree :: SLA [NameClass] Pattern PatternTree
choice2PatternTree
= ifA ( -- wenn das zweite kind ein noch nicht ausgegebenes element ist,
-- muss dieses anders behandelt werden
-- nur fuer bessere formatierung des outputs
arr (last . getChildrenPattern) >>> isA (isRelaxElement) >>>
(arr getNameClassFromPattern &&& getState) >>>
isA(\ (nc, liste) -> not $ elem nc liste)
)
( -- element in status aufnehmen, wird dann nicht mehr vom erste kind ausgegeben
arr getChildrenPattern
>>>
changeState (\s p -> (getNameClassFromPattern (last p)) : s)
>>>
( ( head ^>> pattern2PatternTree ) -- erstes kind normal verarbeiten
&&& -- zweites kind, das element, verarbeiten
( last ^>> createPatternTreeFromElement "element" )
)
>>>
arr2 ( \ l1 l2 -> NTree "choice" [l1, l2] )
)
( children2PatternTree "choice" )
element2PatternTree :: SLA [NameClass] Pattern PatternTree
element2PatternTree
= ifA ( (arr getNameClassFromPattern &&& getState)
>>>
isA (\ (nc, liste) -> elem nc liste)
)
( arr getNameClassFromPattern
>>>
arr (\nc -> NTree ("reference to element " ++ show nc) [])
)
( changeState (\ s p -> (getNameClassFromPattern p) : s)
>>>
createPatternTreeFromElement "element"
)
mapping2PatternTree :: String -> [(Prefix, Uri)] -> PatternTree
mapping2PatternTree name mapping
= NTree name (map (\(a, b) -> NTree (a ++ " = " ++ b) []) mapping)
datatype2PatternTree :: Datatype -> PatternTree
datatype2PatternTree dt
= NTree (datatype2String dt) []
context2PatternTree :: Context -> PatternTree
context2PatternTree (base, mapping)
= NTree "context" [ NTree ("base-uri = " ++ base) []
, mapping2PatternTree "namespace environment" mapping
]
-- ------------------------------------------------------------
-- | Returns a formated string representation of the pattern structure.
-- (see also: 'createPatternFromXmlTree' and 'patternToFormatedString')
xmlTreeToPatternFormatedString :: LA XmlTree String
xmlTreeToPatternFormatedString
= createPatternFromXmlTree
>>>
fromSLA [] patternToFormatedString
{- |
Returns a formated string representation of the pattern structure.
Example:
> Element {}foo (Choice (Choice ( Value = abc,
> datatypelibrary = http://relaxng.org/ns/structure/1.0, type = token,
> context (base-uri =file://test.rng,
> parameter: xml = http://www.w3.org/XML/1998/namespaces, foo = www.bar.baz),
The function can be used to display circular ref-pattern structures.
-}
patternToFormatedString :: SLA [NameClass] Pattern String
patternToFormatedString
= choiceA
[ isA isRelaxEmpty :-> (constA " empty ")
, isA isRelaxNotAllowed :-> (arr $ \ (NotAllowed errorEnv) -> show errorEnv)
, isA isRelaxText :-> (constA " text ")
, isA isRelaxChoice :-> children2FormatedString "choice"
, isA isRelaxInterleave :-> children2FormatedString "interleave"
, isA isRelaxGroup :-> children2FormatedString "group"
, isA isRelaxOneOrMore :-> children2FormatedString "oneOrMore"
, isA isRelaxList :-> children2FormatedString "list"
, isA isRelaxData :-> data2FormatedString
, isA isRelaxDataExcept :-> dataExcept2FormatedString
, isA isRelaxValue :-> value2FormatedString
, isA isRelaxAttribute :-> createFormatedStringFromElement "attribute"
, isA isRelaxElement :-> element2FormatedString
, isA isRelaxAfter :-> children2FormatedString "after"
]
children2FormatedString :: String -> SLA [NameClass] Pattern String
children2FormatedString name
= listA (arrL getChildrenPattern >>> patternToFormatedString)
>>^
(\ l -> name ++ " (" ++ formatStringListPatt l ++ ") " )
data2FormatedString :: SLA [NameClass] Pattern String
data2FormatedString
= arr ( \ (Data datatype paramList) ->
"Data " ++ datatype2String datatype ++ "\n " ++
mapping2String "parameter" paramList ++ "\n"
)
dataExcept2FormatedString :: SLA [NameClass] Pattern String
dataExcept2FormatedString
= arr ( \ (DataExcept datatype paramList _) ->
"DataExcept " ++ show datatype ++ "\n " ++
mapping2String "parameter" paramList ++ "\n "
)
&&&
( arr (\ (DataExcept _ _ p) -> p) >>> patternToFormatedString )
>>>
arr2 (++)
value2FormatedString :: SLA [NameClass] Pattern String
value2FormatedString
= arr $ \(Value datatype val context) ->
"Value = " ++ val ++ ", " ++ datatype2String datatype ++
"\n " ++ context2String context ++ "\n"
element2FormatedString :: SLA [NameClass] Pattern String
element2FormatedString
= ifA ( (arr getNameClassFromPattern &&& getState)
>>>
isA (\ (nc, liste) -> elem nc liste)
)
( arr getNameClassFromPattern
>>^
( \nc -> "reference to element " ++ nameClassToString nc ++ " " )
)
( changeState (\ s p -> (getNameClassFromPattern p) : s)
>>>
createFormatedStringFromElement "element"
)
createFormatedStringFromElement :: String -> SLA [NameClass] Pattern String
createFormatedStringFromElement name
= ( arr getNameClassFromPattern
&&&
( listA (arrL getChildrenPattern >>> patternToFormatedString)
>>^
formatStringListId
)
)
>>>
arr2 (\ nc rl -> name ++ " " ++ nameClassToString nc ++ " (" ++ rl ++ ")")
-- ------------------------------------------------------------
mapping2String :: String -> [(Prefix, Uri)] -> String
mapping2String name mapping
= name ++ ": " ++
formatStringList id ", " (map (\(a, b) -> a ++ " = " ++ b) mapping)
datatype2String :: Datatype -> String
datatype2String (lib, localName)
= "datatypelibrary = " ++ getLib ++ ", type = " ++ localName
where
getLib = if lib == "" then relaxNamespace else lib
context2String :: Context -> String
context2String (base, mapping)
= "context (base-uri = " ++ base ++ ", " ++
mapping2String "namespace environment" mapping ++ ")"
-- ------------------------------------------------------------
| nfjinjing/yuuko | src/Yuuko/Text/XML/HXT/RelaxNG/PatternToString.hs | bsd-3-clause | 11,508 | 22 | 17 | 2,745 | 2,440 | 1,276 | 1,164 | 208 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Network.EasyBitcoin.Internal.Base58
( encodeBase58
, decodeBase58
, addRedundancy
, liftRedundacy
)
where
import qualified Data.ByteString as BS
import Data.Char (ord, chr)
import Data.Word (Word8)
import Data.Maybe (fromJust, isJust, listToMaybe)
import Numeric (showIntAtBase, readInt)
import Data.String (fromString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import qualified Data.Text as T
import Control.Applicative
import Data.Bits
import Data.List(unfoldr)
import Numeric (showIntAtBase, readInt)
import Network.EasyBitcoin.Internal.HashFunctions
import Network.EasyBitcoin.Internal.ByteString
-------------------------------------------------------------------------------
-----------------------------------------------------
addRedundancy :: BS.ByteString -> BS.ByteString
addRedundancy bs = BS.append bs (encode' $ chksum32 bs)
liftRedundacy :: BS.ByteString -> Maybe BS.ByteString
liftRedundacy bs = let (original,extra) = BS.splitAt (BS.length bs - 4) bs
in if encode' (chksum32 original) == extra
then Just original
else Nothing
encodeBase58::BS.ByteString -> String
encodeBase58 bs = l++r
where
(z,b) = BS.span (== 0) bs
l = replicate (BS.length z) '1' -- preserve leading 0's
r | BS.null b = ""
| otherwise = encodeBase58I $ bsToInteger b
decodeBase58::String -> Maybe BS.ByteString
decodeBase58 str = r >>= return . (BS.append prefix)
where
(z,b) = span (== '1') $ str
prefix = BS.replicate (length z) 0 -- preserve leading 1's
r | null b = Just BS.empty
| otherwise = integerToBS <$> decodeBase58I b
---------------------------------------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------------------------------------
b58Data :: BS.ByteString
b58Data = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
b58 :: Word8 -> Word8
b58 i = BS.index b58Data (fromIntegral i)
b58' :: Word8 -> Maybe Word8
b58' w = fromIntegral <$> BS.elemIndex w b58Data
encodeBase58I :: Integer -> String
encodeBase58I i = showIntAtBase (58 :: Integer) f (fromIntegral i) ""
where
f = chr . fromIntegral . b58 . fromIntegral
decodeBase58I :: String -> Maybe Integer
decodeBase58I s = case listToMaybe $ readInt 58 p f s of
Just (r,[]) -> Just r
_ -> Nothing
where
c = b58' . fromIntegral . ord
p = isJust . c
f = fromIntegral . fromJust . c
------------------------------------------------------------------------------------------------------------------------------------
------------------------------------------------------------------------------------------------------------------------------------
-- | Transforms a string into a strict bytestring
stringToBS :: String -> BS.ByteString
stringToBS = B8.pack
-- | Transform a strict bytestring to a string
bsToString :: BS.ByteString -> String
bsToString = B8.unpack
| vwwv/easy-bitcoin | Network/EasyBitcoin/Internal/Base58.hs | bsd-3-clause | 3,299 | 0 | 13 | 684 | 774 | 422 | 352 | 60 | 2 |
{-# LANGUAGE NoImplicitPrelude #-}
--
--
--
module Main where
import Control.Monad
import NumericPrelude
import Ray.Algebra
import Ray.Geometry
import Ray.Physics
import Ray.Light
import Ray.Optics
nphoton = 100000 :: Int
lgt = PointLight (Color 0.33 0.33 0.34) 1.0 (Vector3 0 0 0)
pt = Vector3 0 2 2
main :: IO ()
main = do
forM_ [1..nphoton] $ \i -> do
(wl, (p, d)) <- generatePhoton lgt
let p = sortByAngle d
putStrLn $ show p
pi1_8 = pi / 8.0
cos1_8 = cos (1 * pi1_8)
cos2_8 = cos (2 * pi1_8)
cos3_8 = cos (3 * pi1_8)
cos4_8 = cos (4 * pi1_8)
cos5_8 = cos (5 * pi1_8)
cos6_8 = cos (6 * pi1_8)
cos7_8 = cos (7 * pi1_8)
cos8_8 = cos (8 * pi1_8)
sortByAngle :: Direction3 -> Int
sortByAngle d
| cos > cos1_8 = 1
| cos > cos2_8 = 2
| cos > cos3_8 = 3
| cos > cos4_8 = 4
| cos > cos5_8 = 5
| cos > cos6_8 = 6
| cos > cos7_8 = 7
| cos > cos8_8 = 8
where
cos = ey3 <.> d
| eijian/raytracer | test/TestPhoton2.hs | bsd-3-clause | 912 | 0 | 14 | 233 | 430 | 223 | 207 | 38 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RebindableSyntax #-}
module Morpher (main) where
import Prelude
import FFI
import Fay.Text
import Cinder.SVG
import Cinder.SVG.Attributes
main :: Fay ()
main = addEventListener "load" morpher False
morpher :: Fay ()
morpher = do
root >>= insert mu
return ()
where
mu = markup !+ pathD dFrom ! fill "cyan" !+ aADR "d" 5 9
! vs [showD dFrom,showD dTo,showD dFrom] !< Complete
dFrom = [M 200 200, Q 510 10 320 200, Q 510 510 320 320,
Q 10 510 200 320, Q 10 10 200 200]
dTo = [M 10 10, Q 125 75 190 10, Q 125 125 190 190,
Q 75 125 10 190, Q 75 75 10 10]
addEventListener :: Text -> Fay () -> Bool -> Fay ()
addEventListener = ffi "window['addEventListener'](%1,%2,%3)"
| crooney/cinder | examples/Morpher.hs | bsd-3-clause | 798 | 0 | 12 | 218 | 306 | 156 | 150 | 22 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Utils.Auth.Password (
hash
,validate
) where
import Data.Maybe
import Crypto.BCrypt as BC
import Data.ByteString.Char8 as BSC
defaultPolicy :: HashingPolicy
defaultPolicy =
HashingPolicy 12 (BSC.pack "$2a$")
hash :: String -> IO (Maybe String)
hash pass = do
cr <- BC.hashPasswordUsingPolicy defaultPolicy (BSC.pack pass)
case cr of
Just cpass -> return $ Just (BSC.unpack cpass)
Nothing -> return $ Nothing
validate :: String -> String -> Bool
validate pass cpass =
BC.validatePassword (BSC.pack cpass) (BSC.pack pass)
| DavidAlphaFox/sblog | src/Utils/Auth/Password.hs | bsd-3-clause | 590 | 0 | 14 | 107 | 194 | 102 | 92 | 19 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
-- | Tag a Store instance with structural version info to ensure we're
-- reading a compatible format.
module Data.Store.VersionTagged
( taggedDecodeOrLoad
, taggedEncodeFile
, decodeFileMaybe
) where
import Control.Applicative
import Control.Exception.Lifted (catch, IOException, assert)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Logger
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Data.ByteString as BS
import Data.Monoid ((<>))
import Data.Store
import Data.Store.TypeHash
import qualified Data.Text as T
import Path
import Path.IO (ensureDir)
import Prelude
-- | Write to the given file, with a binary-tagged tag.
taggedEncodeFile :: (Store a, HasTypeHash a, MonadIO m, MonadLogger m, Eq a)
=> Path Abs File
-> a
-> m ()
taggedEncodeFile fp x = do
let fpt = T.pack (toFilePath fp)
$logDebug $ "Encoding " <> fpt
ensureDir (parent fp)
let encoded = encode (Tagged x)
assert (decodeEx encoded == Tagged x) $ liftIO $ BS.writeFile (toFilePath fp) encoded
$logDebug $ "Finished writing " <> fpt
-- | Read from the given file. If the read fails, run the given action and
-- write that back to the file. Always starts the file off with the
-- version tag.
taggedDecodeOrLoad :: (Store a, HasTypeHash a, Eq a, MonadIO m, MonadLogger m, MonadBaseControl IO m)
=> Path Abs File
-> m a
-> m a
taggedDecodeOrLoad fp mx = do
let fpt = T.pack (toFilePath fp)
$logDebug $ "Trying to decode " <> fpt
mres <- decodeFileMaybe fp
case mres of
Nothing -> do
$logDebug $ "Failure decoding " <> fpt
x <- mx
taggedEncodeFile fp x
return x
Just x -> do
$logDebug $ "Success decoding " <> fpt
return x
decodeFileMaybe :: (Store a, HasTypeHash a, MonadIO m, MonadLogger m, MonadBaseControl IO m)
=> Path loc File
-> m (Maybe a)
decodeFileMaybe fp = do
mbs <- liftIO (Just <$> BS.readFile (toFilePath fp)) `catch` \(err :: IOException) -> do
$logDebug ("Exception ignored when attempting to load " <> T.pack (toFilePath fp) <> ": " <> T.pack (show err))
return Nothing
case mbs of
Nothing -> return Nothing
Just bs ->
liftIO (do (Tagged res) <- decodeIO bs
return (Just res)) `catch` \(err :: PeekException) -> do
let fpt = T.pack (toFilePath fp)
$logDebug ("Error while decoding " <> fpt <> ": " <> T.pack (show err) <> " (this might not be an error, when switching between stack versions)")
return Nothing
| sjakobi/stack | src/Data/Store/VersionTagged.hs | bsd-3-clause | 2,961 | 0 | 20 | 820 | 812 | 409 | 403 | 66 | 2 |
-- | None of the functions currently in here are used
-- They just show diffent options of implementing randomRs which is already part
-- of the System.Random package
import Random (randomR, Random, RandomGen, StdGen)
import Data.List (unfoldr)
-- | Implementation using unfoldr
-- unfoldr :: (b -> Maybe (a, b)) -> b -> [a]
-- (unfolding function) -> seedValue -> randoms
-- unfolds a list from the seed value b
-- returning Nothing would stop unfolding the list
-- returning Just (a, b) adds a to the list and uses b as the new seed
-- to produce infinite list always return Just (a, b)
randomsUnfoldr :: (RandomGen g, Random a) => a -> a -> g -> [a]
randomsUnfoldr min max = unfoldr (Just . randomR (min, max))
-- | Implementation using recursion
-- unfoldr not really needed since we never stop generation by returning Nothing
-- produces randoms lazily
randomsRec :: (RandomGen g, Random a) => a -> a -> g -> [a]
randomsRec min max g = x : randomsRec min max g'
where (x, g') = randomR (min, max) g
-- | System.Random implementation (randomRs) is almost identical to above
randomRs :: (Random a, RandomGen g) => (a,a) -> g -> [a]
randomRs ival g = x : randomRs ival g' where (x,g') = randomR ival g
-- | My initial (pretty bad) implementation (ab)using scanl
randomsScanl :: (Random a, Num a) => StdGen -> a -> a -> [b] -> [a]
randomsScanl gen min max = map fst . tail . scanl f (0, gen)
where f (x, g) _ = randomR (min, max) g
| thlorenz/Pricetory | src/Utils/Random.hs | bsd-3-clause | 1,469 | 1 | 10 | 307 | 395 | 210 | 185 | 12 | 1 |
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE DoAndIfThenElse #-}
----------------------------------------------------------------------------
-- |
-- Module : Language.Core.Interpreter.CaseAnalysis
-- Copyright : (c) Carlos López-Camey, University of Freiburg
-- License : BSD-3
--
-- Maintainer : [email protected]
-- Stability : stable
--
--
-- Part of the interpreter that does case analysis.
-----------------------------------------------------------------------------
module Language.Core.Interpreter.CaseAnalysis where
import Control.Applicative
import DART.CmdLine(watchReductionM)
import Language.Core.Interpreter.Structures
data CaseAnalysis = CaseAnalysisResult {
analysis_expression :: Exp, -- the expression alts are matched against with
matched_alternative :: Maybe Alt, -- the alternative that matches the analysis expression's value
expression_ref :: HeapReference, -- heap ref to the evaluated analysis expression
expression_value :: Value -- the evaluated analysis expression
}
| kmels/dart-haskell | src/Language/Core/Interpreter/CaseAnalysis.hs | bsd-3-clause | 1,031 | 0 | 9 | 141 | 84 | 60 | 24 | 11 | 0 |
module Game.Poker.Simple
( simpleGame
) where
import System.Random.Shuffle
import Game.Poker.Hands
import Game.Poker.Cards
import Game.Poker.AI
simpleGame :: IO ()
simpleGame = do
putStrLn "------------------"
putStrLn "-- simple poker --"
putStrLn "------------------"
deck <- shuffleM allCards
case getHand deck of
Nothing -> error "予期せぬエラー : getHand in simpleGame"
Just res -> matchPoker res
ynQuestion "-- もっかいやる?" simpleGame (putStrLn "-- またねノシノシ")
--------
data Player = Player | Enemy deriving Eq
showPlayerName :: Player -> String
showPlayerName Player = "あなた"
showPlayerName Enemy = "あいて"
matchPoker :: (Hand, Deck) -> IO ()
matchPoker (mhand, deck) = do
(mres, ndeck, nmhand) <- playPoker mhand deck Player
case getHand ndeck of
Nothing -> error "予期せぬエラー : getHand in matchPoker"
Just (ehand, odeck) -> do
(eres, _, nehand) <- playPoker ehand odeck Enemy
printResult nmhand nehand mres eres
playPoker :: Hand -> Deck -> Player -> IO ((PokerHand, Card), Deck, Hand)
playPoker hand deck player = do
discards <- if player == Player
then inputDisuse hand
else aiDisuse hand
case drawHand deck discards hand of
Nothing -> error "予期せぬエラー : drawHand"
Just (nhand, ndeck) -> do
let res = pokerHand nhand
return (res, ndeck, nhand)
inputDisuse :: Hand -> IO DiscardList
inputDisuse hand = do
printHand [] hand Player
putStrLn "-- 捨てるカードを選んでね"
gotDisuse <- getDiscardList hand
case gotDisuse of
Nothing -> do
putStrLn "-- 1~5の数値を並べて入力してね"
inputDisuse hand
Just disuses -> do
printHand disuses hand Player
ynQuestion "-- あなた:これでいい?" (return disuses) (inputDisuse hand)
aiDisuse :: Hand -> IO DiscardList
aiDisuse hand = do
let res = aiSelectDiscards hand
printHand res hand Enemy
putStrLn "-- あいて:これでいいよ!"
return res
----
printResult :: Hand -> Hand -> (PokerHand, Card) -> (PokerHand, Card) -> IO ()
printResult mhand ehand mres@(mph, mcard) eres@(eph, ecard) = do
putStrLn " ***** 結果発表!! *****"
printHand [] mhand Player
printHand [] ehand Enemy
putStrLn $ concat ["あなたの手札は ", show mph, " で、最強カードは ", show mcard, " でした"]
putStrLn $ concat ["あいての手札は ", show eph, " で、最強カードは ", show ecard, " でした"]
case judgeVictory mres eres of
LT -> putStrLn "あなたの負けです"
EQ -> putStrLn "引き分けです"
GT -> putStrLn "あなたの勝ちです"
printHand :: DiscardList -> Hand -> Player -> IO ()
printHand dis hand player =
putStrLn $ "-- " ++ showPlayerName player ++ "の手札 : " ++ showChangeHand dis hand
ynQuestion :: String -> IO a -> IO a -> IO a
ynQuestion s yes no = do
putStrLn $ s ++ "(y/n)"
input <- getLine
case input of
"y" -> yes
"n" -> no
_ -> do
putStrLn "-- `y`か`n`で入力してね"
ynQuestion s yes no
showChangeHand :: DiscardList -> Hand -> String
showChangeHand dis h = let
judge x = if elem x dis then " " ++ show x ++ " " else "[" ++ show x ++ "]"
in concat $ map judge (fromHand h)
| tokiwoousaka/draw-poker | src/Game/Poker/Simple.hs | bsd-3-clause | 3,298 | 0 | 15 | 684 | 1,040 | 505 | 535 | 84 | 3 |
module Slides where
import System.Environment
import System.Exit
import Util
import Data.Function (on)
import Data.List
import qualified Data.Map as Map
asIn names c = Map.findWithDefault "" c names
handoutSlide :: [(String,[String])] -> String
handoutSlide schedule =
"\\begin{frame}[shrink]\n \\fontsize{3pt}{3.6}\\selectfont\n" ++
" {\\small \\textbf{Schedule}}\n" ++
" \\begin{columns}[T]\n" ++
-- this manual flowing is terrible :(
let (schedule1,rest) = splitAt ((length schedule `div` 4) - 4) schedule in
let (schedule2,schedule3) = splitAt ((length rest `div` 2) - 4) rest in
makeColumn schedule1 ++
makeColumn schedule2 ++
makeColumn schedule3 ++
" \\end{columns}\n" ++
"\\end{frame}"
where makeColumn s = " \\begin{column}{.31\\linewidth}\n" ++
concatMap makeEntry s ++
" \\end{column}\n"
makeEntry (paper,conflicts) = " \\textbf{Paper \\#" ++ paper ++ ":} " ++
intercalate ", " conflicts ++ "\\\\\n"
conflictList :: [String] -> String
conflictList conflicts =
if null conflicts
then " \\textbf{No conflicts}"
else " \\textbf{Conflicts}:\n" ++ items conflicts
{- if length conflicts > 8
then let (cs1,cs2) = splitAt (length conflicts `div` 2) conflicts in
" \\begin{columns}[T]\n" ++
" \\begin{column}[T]{5cm}\n" ++ (items cs1) ++
" \\end{column}\n\n" ++
" \\begin{column}[T]{5cm}\n" ++ (items cs2) ++
" \\end{column}\n" ++
" \\end{columns}\n"
else -}
where items cs = " \\begin{itemize}\n" ++
concatMap conflictEntry cs ++
" \\end{itemize}\n"
conflictEntry c = " \\item " ++ c ++ "\n"
upNext :: String -> (String,[String]) -> String
upNext phrase (paper,conflicts) =
"\\begin{block}{" ++ phrase ++ ": Paper \\#" ++ paper ++ "}\n" ++
conflictList conflicts ++
"\\end{block}\n"
makeSlides :: [(String,[String])] -> String
makeSlides [] = ""
makeSlides ((paper,conflicts):rest) =
"\\begin{frame}[shrink]\n" ++
" \\frametitle{Paper \\#" ++ paper ++ "}\n\n" ++
"\\begin{columns}[T]\n\\begin{column}{.6\\linewidth}\n\\large" ++
conflictList conflicts ++
"\\end{column}\n" ++
(if not (null rest)
then "\\begin{column}{.38\\linewidth}\n" ++
(case rest of
(next1:next2:_) -> "\\tiny\n" ++ upNext "Up next" next1 ++ upNext "And then" next2
[next] -> "\\tiny\n" ++ upNext "Up next (last paper)" next) ++
"\\end{column}\n"
else "") ++
"\\end{columns}\n\\end{frame}\n\n" ++
makeSlides rest
slideDeck :: String -> String
slideDeck slides =
"\\documentclass{beamer}\n\n" ++
"\\usepackage{pgfpages}\n" ++
"\\pgfpagesuselayout{resize to}[a4paper, border shrink=5mm, landscape]\n\n" ++
"\\mode<presentation>\n" ++
"{\n" ++
"\\usetheme{Philly}\n\\setbeamercovered{invisible}\n\\setbeamertemplate{navigation symbols}{}\n" ++
"}\n\n"++
"\\begin{document}\n" ++ slides ++ "\\end{document}\n"
main :: IO ()
main = do
args <- getArgs
case args of
[scheduleFile,pcFile] -> do
-- read in the data
rawSchedule <- readCSV scheduleFile []
pc <- readCSV pcFile pcHeaders
-- pc members
let shortNames = Map.fromList $
map (\[first,last,email,_] -> (email,[head first] ++ ". " ++ last)) pc
let names = Map.fromList $
map (\[first,last,email,_] -> (email,first ++ " " ++ last)) pc
let scheduleWith n = map (\(paper:rawConflicts) ->
(paper,sort $ map (asIn n) rawConflicts))
rawSchedule
putStr $ slideDeck $
handoutSlide (scheduleWith shortNames) ++
makeSlides (scheduleWith names)
exitSuccess
_ -> do
name <- getProgName
putStrLn $ "Usage: " ++ name ++ " [schedule] [pc list]"
exitFailure
| mgree/conflict | slides.hs | bsd-3-clause | 3,975 | 0 | 23 | 1,052 | 949 | 494 | 455 | 88 | 3 |
module Physics.Plucker
( Plucker(..)
, squaredError
, isotropic
, (><)
, plucker
, intersects
) where
import Control.Applicative
import Data.Distributive
import Data.Foldable as Foldable
import Data.Semigroup
import Data.Traversable
import Physics.Epsilon
import Physics.Metric
import Control.Lens.Rep
import Physics.V4
-- Plücker coordinates
data Plucker a = Plucker a a a a a a deriving (Eq,Ord,Show,Read)
instance Functor Plucker where
fmap g (Plucker a b c d e f) = Plucker (g a) (g b) (g c) (g d) (g e) (g f)
instance Applicative Plucker where
pure a = Plucker a a a a a a
Plucker a b c d e f <*> Plucker g h i j k l =
Plucker (a g) (b h) (c i) (d j) (e k) (f l)
instance Monad Plucker where
return a = Plucker a a a a a a
(>>=) = bindRep
instance Distributive Plucker where
distribute = distributeRep
instance Representable Plucker where
rep f = Plucker (f p01) (f p02) (f p03) (f p23) (f p31) (f p12)
instance Foldable Plucker where
foldMap g (Plucker a b c d e f) =
g a `mappend` g b `mappend` g c `mappend` g d `mappend` g e `mappend` g f
instance Traversable Plucker where
traverse g (Plucker a b c d e f) =
Plucker <$> g a <*> g b <*> g c <*> g d <*> g e <*> g f
instance Num a => Num (Plucker a) where
(+) = liftA2 (+)
(*) = liftA2 (*)
negate = fmap negate
abs = fmap abs
signum = fmap signum
fromInteger = pure . fromInteger
instance Fractional a => Fractional (Plucker a) where
recip = fmap recip
(/) = liftA2 (/)
fromRational = pure . fromRational
plucker :: Num a => V4 a -> V4 a -> Plucker a
plucker (V4 a b c d)
(V4 e f g h) =
Plucker (a*f-b*e)
(a*g-c*e)
(a*d-h*e)
(c*h-d*g)
(d*f-b*h)
(b*g-c*f)
p01, p02, p03, p23, p31, p12 :: Functor f => (a -> f a) -> Plucker a -> f (Plucker a)
p01 g (Plucker a b c d e f) = (\a' -> Plucker a' b c d e f) <$> g a
p02 g (Plucker a b c d e f) = (\b' -> Plucker a b' c d e f) <$> g b
p03 g (Plucker a b c d e f) = (\c' -> Plucker a b c' d e f) <$> g c
p23 g (Plucker a b c d e f) = (\d' -> Plucker a b c d' e f) <$> g d
p31 g (Plucker a b c d e f) = (\e' -> Plucker a b c d e' f) <$> g e
p12 g (Plucker a b c d e f) = Plucker a b c d e <$> g f
{-# INLINE p01 #-}
{-# INLINE p02 #-}
{-# INLINE p03 #-}
{-# INLINE p23 #-}
{-# INLINE p31 #-}
{-# INLINE p12 #-}
-- | Valid Plücker coordinates @p@ will have @squaredError p == 0@
-- That said, floating point makes a mockery of this claim.
squaredError :: (Eq a, Num a) => Plucker a -> a
squaredError v = v >< v
-- | this isn't th actual metric because this bilinear form gives rise to an isotropic quadratic space
infixl 5 ><
(><) :: Num a => Plucker a -> Plucker a -> a
Plucker a b c d e f >< Plucker g h i j k l = a*g+b*h+c*i-d*j-e*k-f*l
isotropic :: Epsilon a => Plucker a -> Bool
isotropic a = nearZero (a >< a)
intersects :: Epsilon a => Plucker a -> Plucker a -> Bool
intersects a b = nearZero (a >< b)
instance Metric Plucker where
dot (Plucker a b c d e f) (Plucker g h i j k l) = a*g+b*h+c*i+d*j+e*k+f*l
instance Epsilon a => Epsilon (Plucker a) where
nearZero = nearZero . quadrance
-- TODO: drag some stuff out of my thesis
| ekmett/physics | Physics/Plucker.hs | bsd-3-clause | 3,180 | 0 | 16 | 805 | 1,620 | 834 | 786 | 82 | 1 |
{-# LANGUAGE FunctionalDependencies, FlexibleInstances, FlexibleContexts #-}
module Data.SetLike where
import Data.Hashable
import Data.HashSet (HashSet(..))
import qualified Data.HashSet as HS
import Data.Set (Set)
import qualified Data.Set as S
import Data.IntSet (IntSet)
import qualified Data.IntSet as IS
import Data.Monoid
import qualified Data.MapLike as ML
class Monoid full => SetLike full item | full -> item where
member :: item -> full -> Bool
singleton :: item -> full
singleton = flip insert mempty
(\\) :: full -> full -> full
insert :: item -> full -> full
insert = mappend . singleton
delete :: item -> full -> full
delete = flip (\\) . singleton
instance (Eq item, Hashable item) => SetLike (HashSet item) item where
member = HS.member
singleton = HS.singleton
(\\) = HS.difference
instance (Ord item) => SetLike (Set item) item where
member = S.member
singleton = S.singleton
(\\) = (S.\\)
instance SetLike IntSet Int where
member = IS.member
singleton = IS.singleton
(\\) = (IS.\\)
| ppetr/map-like | Data/SetLike.hs | bsd-3-clause | 1,205 | 0 | 8 | 358 | 342 | 201 | 141 | -1 | -1 |
{-# OPTIONS_GHC -Wall #-}
module InstTerms (instTerms) where
import qualified Data.Char as Char
import qualified JVM.Assembler as J
import qualified Koshucode.Baala.Core as K
import qualified Content as K
op :: (K.CText c) => String -> (String, c)
op name = ("op", K.pText name)
op0 :: (K.CContent c) => String -> [(String, c)]
op0 name = [op name]
op1 :: (K.CContent c) => String -> c -> [(String, c)]
op1 name x1 = [op name, ("x1", x1)]
op2 :: (K.CContent c) => String -> c -> c -> [(String, c)]
op2 name x1 x2 = [op name, ("x1", x1), ("x2", x2)]
op3 :: (K.CContent c) => String -> c -> c -> c -> [(String, c)]
op3 name x1 x2 x3 = [op name, ("x1", x1), ("x2", x2), ("x3", x3)]
op4 :: (K.CContent c) => String -> c -> c -> c -> c -> [(String, c)]
op4 name x1 x2 x3 x4 = [op name, ("x1", x1), ("x2", x2), ("x3", x3), ("x4", x4)]
cmp :: J.CMP -> String
cmp J.C_EQ = "eq"
cmp J.C_NE = "ne"
cmp J.C_LT = "lt"
cmp J.C_GE = "ge"
cmp J.C_GT = "gt"
cmp J.C_LE = "le"
imm :: J.IMM -> String
imm J.I0 = "0"
imm J.I1 = "1"
imm J.I2 = "2"
imm J.I3 = "3"
instTerms :: (K.CContent c) => J.Instruction -> [(String, c)]
instTerms (J.BIPUSH x1) = op1 "bipush" $ K.pWord8 x1
instTerms (J.SIPUSH x1) = op1 "sipush" $ K.pWord16 x1
instTerms (J.LDC1 x1) = op1 "ldc1" $ K.pWord8 x1
instTerms (J.LDC2 x1) = op1 "ldc2" $ K.pWord16 x1
instTerms (J.LDC2W x1) = op1 "ldc2w" $ K.pWord16 x1
instTerms (J.ILOAD x1) = op1 "iload" $ K.pWord8 x1
instTerms (J.LLOAD x1) = op1 "lload" $ K.pWord8 x1
instTerms (J.FLOAD x1) = op1 "fload" $ K.pWord8 x1
instTerms (J.DLOAD x1) = op1 "dload" $ K.pWord8 x1
instTerms (J.ALOAD x1) = op1 "aload" $ K.pWord8 x1
instTerms (J.ILOAD_ i) = op0 ("iload_" ++ imm i)
instTerms (J.LLOAD_ i) = op0 ("lload_" ++ imm i)
instTerms (J.FLOAD_ i) = op0 ("fload_" ++ imm i)
instTerms (J.DLOAD_ i) = op0 ("dload_" ++ imm i)
instTerms (J.ALOAD_ i) = op0 ("aload_" ++ imm i)
instTerms (J.ISTORE x1) = op1 "istore" $ K.pWord8 x1
instTerms (J.LSTORE x1) = op1 "lstore" $ K.pWord8 x1
instTerms (J.FSTORE x1) = op1 "fstore" $ K.pWord8 x1
instTerms (J.DSTORE x1) = op1 "dstore" $ K.pWord8 x1
instTerms (J.ASTORE x1) = op1 "astore" $ K.pWord8 x1
instTerms (J.ISTORE_ i) = op0 ("istore_" ++ imm i)
instTerms (J.LSTORE_ i) = op0 ("lstore_" ++ imm i)
instTerms (J.FSTORE_ i) = op0 ("fstore_" ++ imm i)
instTerms (J.DSTORE_ i) = op0 ("dstore_" ++ imm i)
instTerms (J.ASTORE_ i) = op0 ("astore_" ++ imm i)
instTerms (J.IINC x1 x2) = op2 "iinc" (K.pWord8 x1) (K.pWord8 x2)
instTerms (J.FCMP c) = op0 ("fcmp_" ++ cmp c)
instTerms (J.DCMP c) = op0 ("dcmp_" ++ cmp c)
instTerms (J.IF c x1) = op1 ("if" ++ cmp c) (K.pWord16 x1)
instTerms (J.IF_ICMP c x1) = op1 ("if_icmp" ++ cmp c) $ K.pWord16 x1
instTerms (J.IF_ACMP c x1) = op1 ("if_acmp" ++ cmp c) $ K.pWord16 x1
instTerms (J.GOTO x1) = op1 "goto" $ K.pWord16 x1
instTerms (J.JSR x1) = op1 "jsr" $ K.pWord16 x1
instTerms (J.TABLESWITCH x1 x2 x3 x4 _) = op4 "tableswitch" (K.pWord8 x1) (K.pWord32 x2) (K.pWord32 x3) (K.pWord32 x4)
instTerms (J.LOOKUPSWITCH x1 x2 x3 _) = op3 "lookupswitch" (K.pWord8 x1) (K.pWord32 x2) (K.pWord32 x3)
instTerms (J.GETSTATIC x1) = op1 "getstatic" $ K.pWord16 x1
instTerms (J.PUTSTATIC x1) = op1 "putstatic" $ K.pWord16 x1
instTerms (J.GETFIELD x1) = op1 "getfield" $ K.pWord16 x1
instTerms (J.PUTFIELD x1) = op1 "putfield" $ K.pWord16 x1
instTerms (J.INVOKEVIRTUAL x1) = op1 "invokevirtual" $ K.pWord16 x1
instTerms (J.INVOKESPECIAL x1) = op1 "invokespecial" $ K.pWord16 x1
instTerms (J.INVOKESTATIC x1) = op1 "invokestatic" $ K.pWord16 x1
instTerms (J.INVOKEINTERFACE x1 x2) = op2 "invokeinterface" (K.pWord16 x1) (K.pWord8 x2)
instTerms (J.NEW x1) = op1 "new" $ K.pWord16 x1
instTerms (J.NEWARRAY x1) = op1 "newarray" $ K.pWord8 x1
instTerms (J.ANEWARRAY x1) = op1 "anewarray" $ K.pWord16 x1
instTerms (J.CHECKCAST x1) = op1 "checkcast" $ K.pWord16 x1
instTerms (J.INSTANCEOF x1) = op1 "instanceof" $ K.pWord16 x1
instTerms (J.WIDE x1 i) = op1 "wide" $ K.pWord8 x1
instTerms (J.MULTINANEWARRAY x1 x2) = op2 "multinanewarray" (K.pWord16 x1) (K.pWord8 x2)
instTerms (J.IFNULL x1) = op1 "ifnull" $ K.pWord16 x1
instTerms (J.IFNONNULL x1) = op1 "ifnonnull" $ K.pWord16 x1
instTerms (J.GOTO_W x1) = op1 "goto_w" $ K.pWord32 x1
instTerms (J.JSR_W x1) = op1 "jsr_w" $ K.pWord32 x1
instTerms name = op0 $ map Char.toLower $ show name
| seinokatsuhiro/koshu-java-tool | InstTerms.hs | bsd-3-clause | 5,175 | 0 | 12 | 1,642 | 2,296 | 1,146 | 1,150 | 86 | 1 |
-- | Main module
module Main
(
main
)
where
import Data.Tuple.Utils (fst3)
import Data.Maybe
import Data.Word (Word32, Word16)
import Data.ByteString.Char8 (unpack)
import System.IO (Handle, hClose, hSetBinaryMode)
import System.Console.Haskeline (getInputLine)
import Control.Monad (unless, liftM)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.State (get)
import Control.Monad.IO.Class(liftIO, MonadIO)
import Control.Exception (bracket)
import Network (withSocketsDo, PortNumber, PortID(PortNumber), HostName,
sClose, accept, listenOn)
import App (App, runApp, FileEntry(..), addFileEntry, getFileEntry,
AppState(..), setLastCmd, setStack, Breakpoint(..), removeBreakpoint)
import IMsg (IMsg(..))
import UCmd (UCmd(..), parseUCmd, InfoCmd(..))
import Print (doPrint)
import Proto (setDebuggerOption, nextMsg, execContinue, execNext, execStep, execFinish,
setBreakpoint, deleteBreakpoint)
-- | Entry point
main :: IO ()
main = withSocketsDo $ printHello >> bracket
acceptPlayer
(hClose . fst3)
(start . fst3)
where
start h = do
putStrLn "Enter \'help\' for list of commands"
hSetBinaryMode h True
runApp h app
-- | Print hello message
printHello :: IO ()
printHello = do
putStrLn "HFB: Flash Debugger version 0.0.1"
putStrLn "Copyright (c) 2011 Yuras Shumovich"
putStrLn "mailto:[email protected]"
-- | Print list of commands
printHelp :: IO ()
printHelp = do
printHello
putStrLn "List of commands:"
putStrLn "\thelp print this help"
putStrLn "\tquit quit hfd"
putStrLn "\tcontinue continue execution until breakpoint hit"
putStrLn "\tstep continue execution until different source line reached"
putStrLn "\tnext continue execution until next source line reached"
putStrLn "\tinfo files show all source files"
putStrLn "\tinfo breakpoints show all breakpoints"
putStrLn "\tbreakpoint <fileID>:<line> set breakpoint at the location, e.g. \'b #1:23\'"
putStrLn "\t use \'info files\' to get fileID"
putStrLn "\tdelete <breakpointID> delete breakpoint by ID"
putStrLn "\t use \'info breakpoints\' to get breakpoint ID"
putStrLn "\tdelete delete all breakpoints"
putStrLn "\tprint <name>[.name]* inspect variables"
putStrLn "\tbacktrace (bt) show call stack"
putStrLn "Shortcuts are allowed, e.g. \'c\', \'co\', \'cont\', etc will mean \'continue\'"
-- | Listen on port, accept just one client and close socket
acceptPlayer :: IO (Handle, HostName, PortNumber)
acceptPlayer = bracket
(listenOn (PortNumber 7935))
sClose
(\s -> putStrLn "Waiting for player..." >> accept s)
-- | Main app
app :: App IO ()
app = do
processUntillBreak
setDebuggerOption "break_on_fault" "on"
-- doSetDebuggerOption "disable_script_stuck" "on"
-- doSetDebuggerOption "disable_script_stuck_dialog" "on"
-- doSetDebuggerOption "enumerate_override" "on"
setDebuggerOption "notify_on_failure" "on"
-- doSetDebuggerOption "invoke_setters" "on"
-- doSetDebuggerOption "swf_load_messages" "on"
loop
where
loop = do
exit <- processUserInput
unless exit (processUntillBreak >> loop)
-- | Process player's messages until 'IMsgBreakHitEx' received
processUntillBreak :: App IO ()
processUntillBreak = do
msg <- nextMsg
case msg of
IMsgBreakHitEx _ _ stack -> processBreak stack >> printSourceLine msg
IMsgSwdFileEntry _ _ _ _ _ -> processFileEntry msg >> processUntillBreak
IMsgException _ _ _ -> processException msg >> processUntillBreak
_ -> processUntillBreak
-- | Save current stack
processBreak :: Monad m => [(Word16, Word16, Word32, String)] -> App m ()
processBreak = setStack . map toStack
where
toStack (fl, ln, _, fn) = (fromIntegral fl, fromIntegral ln, fn)
-- | Print information about exception
processException :: MonadIO m => IMsg -> App m ()
processException (IMsgException _ msg _) = do
liftIO $ putStrLn " [exception]"
liftIO $ putStrLn msg
processException _ = error "processException: something is wrong"
-- | Print current source line
printSourceLine :: IMsg -> App IO ()
printSourceLine (IMsgBreakHitEx file line _) = do
files <- lift . lift $ fmap asFiles get
let mln = srcLine files
if isJust mln
then liftIO $ putStrLn $ " " ++ show line ++ ": " ++ fromJust mln
else liftIO $ putStrLn "No source"
where
srcLine files = do
FileEntry _ content <- lookup (fromIntegral file) files
let lln = take 1 $ drop (fromIntegral line - 1) content
if null lln
then Nothing
else Just $ head lln
printSourceLine _ = error "printSourceLine: something is wrong..."
-- | Read file content and add new file entry
processFileEntry :: MonadIO m => IMsg -> App m ()
processFileEntry (IMsgSwdFileEntry idi _ nm _ _) = do
content <- liftIO readFile'
addFileEntry (fromIntegral idi, FileEntry name (lines content))
where
name = unpack nm
path = map fixup name
fixup ';' = '/'
fixup ch = ch
readFile' = catch (readFile path) (const $ return "")
processFileEntry _ = error "processFileEntry: something is wrong..."
-- | Read user command and process it
processUserInput :: App IO Bool
processUserInput = do
l <- lift $ getInputLine "hfb> "
let cmd = l >>= parseUCmd
setLastCmd cmd
if isNothing cmd
then liftIO (putStrLn "Unknown command") >> processUserInput
else processCmd (fromJust cmd)
-- | Actualy process user command
processCmd :: UCmd -- ^ User command
-> App IO Bool -- ^ whether to exit
processCmd UCmdEmpty = do
cmd <- lift . lift $ liftM asLastCmd get
if isJust cmd
then processCmd (fromJust cmd)
else processUserInput
processCmd UCmdQuit = return True
processCmd UCmdContinue = execContinue >> return False
processCmd UCmdStep = execStep >> return False
processCmd UCmdNext = execNext >> return False
processCmd UCmdFinish = execFinish >> return False
processCmd (UCmdInfo cmd) = processInfoCmd cmd >> processUserInput
processCmd (UCmdPrint v) = doPrint v >> processUserInput
processCmd (UCmdBreakpoint fl ln) = setBreakpoint fl ln >> processUserInput
processCmd UCmdStack = printStack >> processUserInput
processCmd UCmdList = listSource >> processUserInput
processCmd (UCmdDelete (Just iD)) = deleteBP iD >> processUserInput
processCmd (UCmdDelete Nothing) = deleteAll >> processUserInput
processCmd UCmdTest = processUserInput
processCmd UCmdHelp = liftIO printHelp >> processUserInput
-- | Delete breakpoint by id
deleteBP :: MonadIO m => Int -> App m ()
deleteBP iD = do
bs <- lift . lift $ liftM asBreaks get
let bp = lookup iD bs
if isJust bp
then do
let Breakpoint fl ln = fromJust bp
deleteBreakpoint fl ln
removeBreakpoint iD
else liftIO $ putStrLn "Unknown breakpoint id. Type \"info breakpoints\" for list of all breakpoints"
-- | Delete all breakpoints
deleteAll :: MonadIO m => App m ()
deleteAll = do
bs <- lift . lift $ liftM asBreaks get
mapM_ (deleteBP . fst) bs
-- | Print source around current position
listSource :: MonadIO m => App m ()
listSource = do
stack <- lift . lift $ liftM asStack get
if null stack
then liftIO $ putStrLn "No source"
else print' $ head stack
where
print' (fl, ln, _) = do
fs <- lift . lift $ liftM asFiles get
let f = lookup fl fs
if isNothing f
then liftIO $ putStrLn "No source"
else liftIO $ mapM_ printLine $ take 11 $ drop (ln - 6) (zip allLines $ feContent $ fromJust f)
printLine (ln, cont) = putStrLn $ " " ++ show ln ++ ": " ++ cont
allLines :: [Int]
allLines = [1..]
-- | Print current stack
printStack :: MonadIO m => App m ()
printStack = do
state <- lift . lift $ get
let stack = asStack state
let files = asFiles state
liftIO $ mapM_ (print' files) stack
where
print' fs (fl, ln, fn) = putStrLn $ fn ++ "() at " ++ file fs fl ++ ":" ++ show ln
file fs fl = let fe = lookup fl fs in
if isJust fe
then fePath (fromJust fe)
else "(no source)"
-- | Process @info@ command
processInfoCmd :: MonadIO m => InfoCmd -> App m ()
processInfoCmd ICFiles = printFiles
where
printFiles = do
files <- lift . lift $ liftM asFiles get
liftIO $ mapM_ printFile files
printFile (idi, FileEntry name _) =
putStrLn $ "#" ++ show idi ++ ": " ++ name
processInfoCmd ICBreakpoints = do
bs <- lift . lift $ liftM asBreaks get
mapM_ printBP bs
where
printBP (iD, Breakpoint fl ln) = do
fe <- liftM fromJust $ getFileEntry fl
liftIO $ putStrLn $ " " ++ show iD ++ "\t: " ++ fePath fe ++ " at line " ++ show ln
| Yuras/hfd | src/hfd.hs | bsd-3-clause | 8,936 | 0 | 16 | 2,154 | 2,483 | 1,232 | 1,251 | 192 | 4 |
{-# LANGUAGE GADTs #-}
-- {-# LANGUAGE Strict #-}
-- Fast type-aligned queue optimized to effectful functions
-- (a -> m b)
-- (monad continuations have this type).
-- Constant-time append and snoc and
-- average constant-time left-edge deconstruction
module Data.Iota.FTCQueue1 (
FTCQueue,
tsingleton,
(|>), -- snoc
(><), -- append
ViewL(..),
tviewl
)
where
-- Non-empty tree. Deconstruction operations make it more and more
-- left-leaning
data FTCQueue m a b where
Leaf :: (a -> m b) -> FTCQueue m a b
Node :: FTCQueue m a x -> FTCQueue m x b -> FTCQueue m a b
-- Exported operations
-- There is no tempty: use (tsingleton return), which works just the same.
-- The names are chosen for compatibility with FastTCQueue
{-# INLINE tsingleton #-}
tsingleton :: (a -> m b) -> FTCQueue m a b
tsingleton r = Leaf r
-- snoc: clearly constant-time
{-# INLINE (|>) #-}
(|>) :: FTCQueue m a x -> (x -> m b) -> FTCQueue m a b
t |> r = Node t (Leaf r)
-- append: clearly constant-time
{-# INLINE (><) #-}
(><) :: FTCQueue m a x -> FTCQueue m x b -> FTCQueue m a b
t1 >< t2 = Node t1 t2
-- Left-edge deconstruction
data ViewL m a b where
TOne :: (a -> m b) -> ViewL m a b
(:|) :: (a -> m x) -> (FTCQueue m x b) -> ViewL m a b
{-# INLINABLE tviewl #-}
tviewl :: FTCQueue m a b -> ViewL m a b
tviewl (Leaf r) = TOne r
tviewl (Node t1 t2) = go t1 t2
where
go :: FTCQueue m a x -> FTCQueue m x b -> ViewL m a b
go (Leaf r) tr = r :| tr
go (Node tl1 tl2) tr = go tl1 (Node tl2 tr)
| AaronFriel/eff-experiments | src/Data/Iota/FTCQueue1.hs | bsd-3-clause | 1,516 | 0 | 9 | 358 | 496 | 266 | 230 | 29 | 2 |
{-# OPTIONS_GHC -F -pgmF htfpp #-}
{-# LANGUAGE DataKinds #-}
-----------------------------------------------------------------------------
--
-- Module : MathTests.Optimization
-- Copyright : Copyright (C) 2015 Artem M. Chirkin <[email protected]>
-- License : BSD3
--
-- Maintainer : Artem M. Chirkin <[email protected]>
-- Stability : Experimental
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module MathTests.Optimization where
import Control.Monad (liftM)
import VectorTests.VectorGenerators ()
import Test.Framework
import Geometry.Math.Optimization
import Geometry.Space.Approximate
import Geometry.Space.Types
--import Geometry.Space.Tensor
--import Debug.Trace
-- | specify precision for the tests
eps :: Double
eps = 0.00001
testApprox :: Approximately Double Bool -> Bool
testApprox = flip runApprox eps
prop_optimization1D0 :: Double -> Double -> Double -> Vector2 Double -> Bool
prop_optimization1D0 a' b c (Vector2 x0 x1) = testApprox $ minimize1Dbounded f (xmin,xmax) >>= areClose' rx
where f = F0D1 (\x -> a * x * x + b * x + c)
a = abs a' + 1
rx = min xmax . max xmin $ -b/2/a
xmin = min x0 x1
xmax = max x0 x1
prop_optimization1D1 :: Double -> Double -> Double -> Double -> Bool
prop_optimization1D1 a' b c x0 = testApprox $ minimize1D f x0 >>= areClose' rx
where f = F1D1 (\x -> a * x * x + b * x + c) (\x -> 2 * a * x + b)
a = abs a' + 1
rx = -b/2/a
prop_optimization2D1 :: Vector2 Double -> Bool
prop_optimization2D1 x0 = testApprox $ minimize testFunc2D1 x0 >>= areClose testFunc2DXmin
prop_optimization2D2 :: Vector2 Double -> Bool
prop_optimization2D2 x0 = testApprox $ minimize testFunc2D2 x0 >>= areClose testFunc2DXmin
prop_optimization3D1 :: Vector3 Double -> Bool
prop_optimization3D1 x0 = testApprox $ do
e <- getEps
xmin <- minimize testFunc3D1 x0
let rezs = liftM or $ mapM (areClose xmin) testFunc3DXmin
return $ runApprox rezs (e*4000)
prop_optimization3D2 :: Vector3 Double -> Bool
prop_optimization3D2 x0 = testApprox $ do
e <- getEps
xmin <- minimize testFunc3D1 x0
let rezs = liftM or $ mapM (areClose xmin) testFunc3DXmin
return $ runApprox rezs (e*1000)
--instance (Arbitrary x) => Arbitrary (NumericFunction1D (r::Nat) x) where
-- arbitrary x = do
-- Positive i' <- arbitrary
-- let i = n `mod` i + 1
-- f = funcs i
-- Positive c1 <- arbitrary
-- Positive c2 <- arbitrary
-- Positive c3 <- arbitrary
-- Positive c4 <- arbitrary
--
-- return $ F0D1 f
-- where n = length funcs
-- funcs = [ \a b _ _ x -> abs a * x*x + b*y
-- , \a b _ _ x -> abs a * abs x ** b
-- ]
-- TEST FUNCTIONS BELOW
testFunc3DXmin :: [Vector3 Double]
testFunc3DXmin = Vector3 0 (1.25**(1/3)) 0 -- this one is saddle point, but I am not going to make algorithm too complicated
: zipWith3 Vector3 x y z
where z1 = ( 5*sqrt 2 / 127 ) ** (1/3)
z2 = ( 5*sqrt 2 / 129 ) ** (1/3)
d2 = 2*sqrt 2
z = [z1,z2,-z1,-z2] >>= \t -> [t,t]
y = [z1,z2,-z1,-z2] >>= \t -> [-t*d2, t*d2]
x = map negate . map (/2) $ zipWith (*) y z
testFunc3D0 :: NumericFunction 0 3 Double
testFunc3D0 = F0 f where F2 f _ _ = testFunc3D2
testFunc3D1 :: NumericFunction 1 3 Double
testFunc3D1 = F1 f df where F2 f df _ = testFunc3D2
testFunc3D2 :: NumericFunction 2 3 Double
testFunc3D2 = F2 f df ddf
where f (Vector3 x y z) = x*x + x*y*z + z*z*z*z - 10*y + 2*y*y*y*y
df (Vector3 x y z) = Vector3 (2*x + y*z)
(x*z + 8*y*y*y - 10)
(x*y + 4*z*z*z)
ddf (Vector3 x y z) = Matrix3x3 f11 f12 f13
f12 f22 f23
f13 f23 f33
where f11 = 2
f12 = z
f13 = y
f22 = 24*y*y
f23 = x
f33 = 12*z*z
testFunc2DXmin :: Vector2 Double
testFunc2DXmin = Vector2 0 2.5
testFunc2D0 :: NumericFunction 0 2 Double
testFunc2D0 = F0 f where F2 f _ _ = testFunc2D2
testFunc2D1 :: NumericFunction 1 2 Double
testFunc2D1 = F1 f df where F2 f df _ = testFunc2D2
testFunc2D2 :: NumericFunction 2 2 Double
testFunc2D2 = F2 f df ddf
where f (Vector2 x y) = 0.5*x*x*x*x + x*x*y - 10*y + 2*y*y
df (Vector2 x y) = Vector2 (2*x*x*x + 2*x*y)
(x*x + 4*y - 10)
ddf (Vector2 x y) = Matrix2x2 f11 f12
f12 f22
where f11 = 6*x*x + 2*y
f12 = 2*x
f22 = 4
| achirkin/fgeom | test/MathTests/Optimization.hs | bsd-3-clause | 4,873 | 0 | 21 | 1,565 | 1,569 | 818 | 751 | 85 | 1 |
module Syntax where
data Expr = Number Int
| Variable Char
| Power Expr Expr
| Plus Expr Expr
| Mult Expr Expr
deriving(Eq, Show)
| asi1024/haskell-differentiator | src/Syntax.hs | bsd-3-clause | 183 | 0 | 6 | 78 | 50 | 29 | 21 | 7 | 0 |
module Opaleye.Internal.Sql where
import Prelude hiding (product)
import qualified Opaleye.Internal.PrimQuery as PQ
import qualified Opaleye.Internal.HaskellDB.PrimQuery as HPQ
import Opaleye.Internal.HaskellDB.PrimQuery (Symbol(Symbol))
import qualified Opaleye.Internal.HaskellDB.Sql as HSql
import qualified Opaleye.Internal.HaskellDB.Sql.Default as SD
import qualified Opaleye.Internal.HaskellDB.Sql.Generate as SG
import qualified Opaleye.Internal.Tag as T
import qualified Data.List.NonEmpty as NEL
import qualified Data.Maybe as M
import qualified Control.Arrow as Arr
data Select = SelectFrom From
| Table HSql.SqlTable
| SelectJoin Join
| SelectValues Values
| SelectBinary Binary
| SelectLabel Label
deriving Show
data From = From {
attrs :: [(HSql.SqlExpr, Maybe HSql.SqlColumn)],
tables :: [Select],
criteria :: [HSql.SqlExpr],
groupBy :: [HSql.SqlExpr],
orderBy :: [(HSql.SqlExpr, HSql.SqlOrder)],
limit :: Maybe Int,
offset :: Maybe Int
}
deriving Show
data Join = Join {
jJoinType :: JoinType,
jAttrs :: [(HSql.SqlExpr, Maybe HSql.SqlColumn)],
jTables :: (Select, Select),
jCond :: HSql.SqlExpr
}
deriving Show
data Values = Values {
vAttrs :: [(HSql.SqlExpr, Maybe HSql.SqlColumn)],
vValues :: [[HSql.SqlExpr]]
} deriving Show
data Binary = Binary {
bOp :: BinOp,
bSelect1 :: Select,
bSelect2 :: Select
} deriving Show
data JoinType = LeftJoin | RightJoin | FullJoin deriving Show
data BinOp = Except | ExceptAll | Union | UnionAll | Intersect | IntersectAll deriving Show
data TableName = String
data Returning a = Returning a [HSql.SqlExpr]
data Label = Label {
lLabel :: String,
lSelect :: Select
} deriving Show
sqlQueryGenerator :: PQ.PrimQueryFold Select
sqlQueryGenerator = (unit, baseTable, product, aggregate, order, limit_, join,
values, binary, label)
sql :: ([HPQ.PrimExpr], PQ.PrimQuery, T.Tag) -> Select
sql (pes, pq, t) = SelectFrom $ newSelect { attrs = makeAttrs pes
, tables = [pqSelect] }
where pqSelect = PQ.foldPrimQuery sqlQueryGenerator pq
makeAttrs = flip (zipWith makeAttr) [1..]
makeAttr pe i = sqlBinding (Symbol ("result" ++ show (i :: Int)) t, pe)
unit :: Select
unit = SelectFrom newSelect { attrs = [(HSql.ConstSqlExpr "0", Nothing)] }
baseTable :: String -> [(Symbol, HPQ.PrimExpr)] -> Select
baseTable name columns = SelectFrom $
newSelect { attrs = map sqlBinding columns
, tables = [Table name] }
product :: NEL.NonEmpty Select -> [HPQ.PrimExpr] -> Select
product ss pes = SelectFrom $
newSelect { tables = NEL.toList ss
, criteria = map sqlExpr pes }
aggregate :: [(Symbol, (Maybe (HPQ.AggrOp, [HPQ.OrderExpr]), HPQ.PrimExpr))] -> Select -> Select
aggregate aggrs s = SelectFrom $ newSelect { attrs = (map attr aggrs)
, tables = [s]
, groupBy = groupBy' }
where groupBy' = (map sqlExpr
. map expr
. filter (M.isNothing . aggrOp)) aggrs
attr = sqlBinding . Arr.second (uncurry aggrExpr)
expr (_, (_, e)) = e
aggrOp (_, (x, _)) = x
aggrExpr :: Maybe (HPQ.AggrOp, [HPQ.OrderExpr]) -> HPQ.PrimExpr -> HPQ.PrimExpr
aggrExpr = maybe id (\(op, ord) e -> HPQ.AggrExpr op e ord)
order :: [HPQ.OrderExpr] -> Select -> Select
order oes s = SelectFrom $
newSelect { tables = [s]
, orderBy = map (SD.toSqlOrder SD.defaultSqlGenerator) oes }
limit_ :: PQ.LimitOp -> Select -> Select
limit_ lo s = SelectFrom $ newSelect { tables = [s]
, limit = limit'
, offset = offset' }
where (limit', offset') = case lo of
PQ.LimitOp n -> (Just n, Nothing)
PQ.OffsetOp n -> (Nothing, Just n)
PQ.LimitOffsetOp l o -> (Just l, Just o)
join :: PQ.JoinType -> [(Symbol, HPQ.PrimExpr)] -> HPQ.PrimExpr -> Select -> Select
-> Select
join j columns cond s1 s2 = SelectJoin Join { jJoinType = joinType j
, jAttrs = mkAttrs columns
, jTables = (s1, s2)
, jCond = sqlExpr cond }
where mkAttrs = map sqlBinding
-- Postgres seems to name columns of VALUES clauses "column1",
-- "column2", ... . I'm not sure to what extent it is customisable or
-- how robust it is to rely on this
values :: [Symbol] -> [[HPQ.PrimExpr]] -> Select
values columns pes = SelectValues Values { vAttrs = mkColumns columns
, vValues = (map . map) sqlExpr pes }
where mkColumns = zipWith (flip (curry (sqlBinding . Arr.second mkColumn))) [1..]
mkColumn i = (HPQ.BaseTableAttrExpr . ("column" ++) . show) (i::Int)
binary :: PQ.BinOp -> [(Symbol, (HPQ.PrimExpr, HPQ.PrimExpr))]
-> (Select, Select) -> Select
binary op pes (select1, select2) = SelectBinary Binary {
bOp = binOp op,
bSelect1 = SelectFrom newSelect { attrs = map (mkColumn fst) pes,
tables = [select1] },
bSelect2 = SelectFrom newSelect { attrs = map (mkColumn snd) pes,
tables = [select2] }
}
where mkColumn e = sqlBinding . Arr.second e
joinType :: PQ.JoinType -> JoinType
joinType PQ.LeftJoin = LeftJoin
joinType PQ.RightJoin = RightJoin
joinType PQ.FullJoin = FullJoin
binOp :: PQ.BinOp -> BinOp
binOp o = case o of
PQ.Except -> Except
PQ.ExceptAll -> ExceptAll
PQ.Union -> Union
PQ.UnionAll -> UnionAll
PQ.Intersect -> Intersect
PQ.IntersectAll -> IntersectAll
newSelect :: From
newSelect = From {
attrs = [],
tables = [],
criteria = [],
groupBy = [],
orderBy = [],
limit = Nothing,
offset = Nothing
}
sqlExpr :: HPQ.PrimExpr -> HSql.SqlExpr
sqlExpr = SG.sqlExpr SD.defaultSqlGenerator
sqlBinding :: (Symbol, HPQ.PrimExpr) -> (HSql.SqlExpr, Maybe HSql.SqlColumn)
sqlBinding (Symbol sym t, pe) =
(sqlExpr pe, Just (HSql.SqlColumn (T.tagWith t sym)))
label :: String -> Select -> Select
label l s = SelectLabel (Label l s)
| silkapp/haskell-opaleye | src/Opaleye/Internal/Sql.hs | bsd-3-clause | 6,367 | 0 | 15 | 1,824 | 2,022 | 1,158 | 864 | 143 | 6 |
{-#LANGUAGE TypeFamilies#-}
{-#LANGUAGE TemplateHaskell#-}
{-#LANGUAGE QuasiQuotes#-}
{-#LANGUAGE OverloadedStrings#-}
module HsVerilog.Verilog (
module HsVerilog.Verilog.Internal
, module HsVerilog.Verilog.DSL
) where
import HsVerilog.Verilog.Internal
import HsVerilog.Verilog.DSL
| junjihashimoto/hsverilog | src/HsVerilog/Verilog.hs | bsd-3-clause | 287 | 0 | 5 | 27 | 38 | 27 | 11 | 9 | 0 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances,
ScopedTypeVariables, TypeFamilies, FlexibleContexts #-}
-- |Utilities for loading texture data.
module GLUtil.Textures where
import Control.Monad (forM_)
import Graphics.Rendering.OpenGL
import qualified Graphics.Rendering.OpenGL.GL.VertexArrays as GL
import Data.Array.Storable (StorableArray, withStorableArray)
import Data.ByteString.Internal (ByteString, toForeignPtr)
import Data.Vector.Storable (Vector, unsafeWith)
import Data.Word (Word8, Word16)
import Foreign.ForeignPtr (ForeignPtr, withForeignPtr)
import Foreign.Ptr (Ptr, plusPtr, castPtr, nullPtr)
import Foreign.Marshal.Array (withArray)
import GLUtil.TypeMapping (HasGLType(..))
-- |Pixel format of image data.
data TexColor = TexMono | TexRG | TexRGB | TexBGR | TexRGBA
-- |A basic texture information record.
data TexInfo a = TexInfo { texWidth :: GLsizei
, texHeight :: GLsizei
, texColor :: TexColor
, texData :: a }
-- |Helper for constructing a 'TexInfo' using Haskell 'Int's for image
-- dimensions.
texInfo :: Int -> Int -> TexColor -> a -> TexInfo a
texInfo w h = TexInfo (fromIntegral w) (fromIntegral h)
-- |Class for containers of texture data.
class HasGLType (Elem a) => IsPixelData a where
type Elem a
withPixels :: a -> (Ptr (Elem a) -> IO c) -> IO c
instance HasGLType b => IsPixelData [b] where
type Elem [b] = b
withPixels = withArray
instance HasGLType b => IsPixelData (Ptr b) where
type Elem (Ptr b) = b
withPixels = flip ($)
instance HasGLType b => IsPixelData (ForeignPtr b) where
type Elem (ForeignPtr b) = b
withPixels = withForeignPtr
instance HasGLType b => IsPixelData (StorableArray i b) where
type Elem (StorableArray i b) = b
withPixels = withStorableArray
instance HasGLType b => IsPixelData (Vector b) where
type Elem (Vector b) = b
withPixels = unsafeWith
instance IsPixelData ByteString where
type Elem ByteString = Word8
withPixels b m = aux . toForeignPtr $ b
where aux (fp,o,_) = withForeignPtr fp $ \p ->
m (plusPtr p o)
-- |Wrapper whose 'IsPixelData' instance treats the pointer underlying
-- a 'ByteString' as an array of 'Word16's.
newtype ShortString = ShortString ByteString
instance IsPixelData ShortString where
type Elem ShortString = Word16
withPixels (ShortString b) m = aux. toForeignPtr $ b
where aux (fp,o,_) = withForeignPtr fp $ \p ->
m (plusPtr (castPtr p :: Ptr Word16) o)
-- |Create a new 2D texture with uninitialized contents.
freshTexture :: forall a proxy. HasGLType a
=> Int -> Int -> TexColor -> proxy a -> IO TextureObject
freshTexture w h c _ = loadTexture $ texInfo w h c (nullPtr::Ptr a)
-- |Create a new 2D texture with uninitialized 'Word8' contents.
freshTextureWord8 :: Int -> Int -> TexColor -> IO TextureObject
freshTextureWord8 w h c = loadTexture $ texInfo w h c (nullPtr::Ptr Word8)
-- |Create a new 2D texture with uninitialized 'GLfloat' contents.
freshTextureFloat :: Int -> Int -> TexColor -> IO TextureObject
freshTextureFloat w h c = loadTexture $ texInfo w h c (nullPtr::Ptr GLfloat)
-- |Create a new 2D texture with data from a 'TexInfo'.
loadTexture :: IsPixelData a => TexInfo a -> IO TextureObject
loadTexture tex = do [obj] <- genObjectNames 1
reloadTexture obj tex
return obj
-- |Replace a 2D texture's pixel data with data from a 'TexInfo'.
reloadTexture :: forall a. IsPixelData a =>
TextureObject -> TexInfo a -> IO ()
reloadTexture obj tex = do textureBinding Texture2D $= Just obj
loadTex $ texColor tex
where loadTex TexMono = case pixelType of
GL.UnsignedShort -> loadAux Luminance16 Luminance
GL.Float -> loadAux R32F Red
GL.HalfFloat -> loadAux R16F Red
GL.UnsignedByte -> loadAux R8 Red
_ -> loadAux Luminance' Luminance
loadTex TexRG = case pixelType of
GL.UnsignedShort -> loadAux RG16 RGInteger
GL.Float -> loadAux RG32F RG
GL.HalfFloat -> loadAux RG16F RG
GL.UnsignedByte -> loadAux RG8UI RGInteger
GL.Byte -> loadAux RG8I RGInteger
GL.Int -> loadAux RG32I RGInteger
GL.UnsignedInt -> loadAux RG32UI RGInteger
_ -> error "Unknown pixelType for TexRG"
loadTex TexRGB = loadAux RGBA' RGB
loadTex TexBGR = loadAux RGBA' BGR
loadTex TexRGBA = loadAux RGBA' RGBA
sz = TextureSize2D (texWidth tex) (texHeight tex)
pixelType = glType (undefined::Elem a)
loadAux i e = withPixels (texData tex) $
(texImage2D Texture2D NoProxy 0 i sz 0 .
PixelData e pixelType)
-- | Set texture coordinate wrapping options for both the 'S' and 'T'
-- dimensions of a 2D texture.
texture2DWrap :: StateVar (Repetition, Clamping)
texture2DWrap = makeStateVar (get (textureWrapMode Texture2D S))
(forM_ [S,T] . aux)
where aux x d = textureWrapMode Texture2D d $= x
-- | Set texture coordinate wrapping options for the 'S', 'T', and 'R'
-- dimensions of a 3D texture.
texture3DWrap :: StateVar (Repetition, Clamping)
texture3DWrap = makeStateVar (get (textureWrapMode Texture2D S))
(forM_ [S,T,R] . aux)
where aux x d = textureWrapMode Texture2D d $= x
-- | Bind each of the given textures to successive texture units at
-- the given 'TextureTarget' starting with texture unit 0.
withTextures :: BindableTextureTarget t => t -> [TextureObject] -> IO a -> IO a
withTextures tt ts m = do mapM_ aux (zip ts [0..])
r <- m
cleanup 0 ts
activeTexture $= TextureUnit 0
return r
where aux (t,i) = do activeTexture $= TextureUnit i
textureBinding tt $= Just t
cleanup _ [] = return ()
cleanup i (_:ts') = do activeTexture $= TextureUnit i
textureBinding tt $= Nothing
cleanup (i+1) ts'
-- | Bind each of the given 2D textures to successive texture units
-- starting with texture unit 0.
withTextures2D :: [TextureObject] -> IO a -> IO a
withTextures2D = withTextures Texture2D
-- | Bind each of the given textures to the texture unit they are
-- paired with. The given action is run with these bindings, then the
-- texture bindings are reset. If you don't care which texture units
-- are used, consider using 'withTextures' or 'withTextures2D'.
withTexturesAt :: BindableTextureTarget t
=> t -> [(TextureObject,GLuint)] -> IO a -> IO a
withTexturesAt tt ts m = do mapM_ aux ts
r <- m
mapM_ (cleanup . snd) ts
return r
where aux (t,i) = do activeTexture $= TextureUnit i
textureBinding tt $= Just t
cleanup i = do activeTexture $= TextureUnit i
textureBinding tt $= Nothing
| coghex/abridgefaraway | src/GLUtil/Textures.hs | bsd-3-clause | 7,376 | 0 | 14 | 2,193 | 1,886 | 966 | 920 | 120 | 16 |
{-# LANGUAGE DataKinds, KindSignatures, OverloadedStrings, ScopedTypeVariables, TupleSections #-}
module Azure.Storage.Protocol.Blob
( BlobName
, getBlobName
, ContainerName
, getContainerName
, BlobReference
, listBlobs
, listContainers
, getBlob
) where
import Control.Monad (join)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Trans.Resource (MonadResource)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSAscii
import Data.Conduit (Source, ($=))
import qualified Data.Conduit.List as CL
import Data.Default.Class (def)
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8)
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types.Method as Methods
import qualified Text.XML.Stream.Parse as XML
import Azure.Storage.Protocol.Common
import qualified Azure.Storage.Protocol.Authentication as Auth
import Azure.Storage.Protocol.HttpHelpers
import Azure.Storage.Protocol.XmlHelpers
newtype BlobName = BlobName { getBlobName :: ByteString } deriving (Eq, Ord, Show)
newtype ContainerName = ContainerName { getContainerName :: ByteString } deriving (Eq, Ord, Show)
type BlobReference = (ContainerName, BlobName)
data BlobType = Block | Page deriving (Eq, Show, Ord, Enum)
type StorageOperation r = HTTP.Manager -> Auth.Account -> r
blobRequest :: MonadIO m => HTTP.Request -> Auth.Account -> m HTTP.Request
blobRequest = Auth.signedRequest Auth.Blob
issueRequest template manager account = do
signedReq <- blobRequest template account
httpRequest signedReq manager
issueRequestXml template manager account = issueRequest template manager account $= XML.parseBytes XML.def
listContainers :: MonadResource m => StorageOperation (Source m ContainerName)
listContainers manager account =
issueRequestXml template manager account $= elementContent "Name" $= CL.map (ContainerName . encodeUtf8)
where
template = def {
HTTP.queryString = "?comp=list"
}
slash = BSAscii.singleton '/'
listBlobs :: MonadResource m => ContainerName -> StorageOperation (Source m BlobReference)
listBlobs cn@(ContainerName containerName) manager account =
issueRequestXml template manager account $= elementContent "Name" $= CL.map ((cn,) . BlobName . encodeUtf8)
where
template = def {
HTTP.queryString = "?restype=container&comp=list",
HTTP.path = slash <> containerName
}
getBlob :: MonadResource m => BlobReference -> StorageOperation (Source m ByteString)
getBlob (ContainerName container, BlobName blob) manager account =
issueRequest template manager account
where
template = def {
HTTP.path = BS.concat [ slash, container, slash, blob ]
}
createContainer :: StorageOperation (ContainerName -> IO Bool)
createContainer = undefined
deleteContainer :: StorageOperation (ContainerName -> IO Bool)
deleteContainer = undefined | Porges/azure-storage-haskell | src/Azure/Storage/Protocol/Blob.hs | bsd-3-clause | 3,165 | 0 | 10 | 645 | 789 | 452 | 337 | 61 | 1 |
module Core.Environment (
Environment, lookup, (-->), env, ids, elems
) where
import Prelude hiding ( lookup )
import Core.Ast
import Data.Monoid
import qualified Data.Map as M
newtype Environment t = E { e :: M.Map Ident t }
deriving (Show)
lookup :: Ident -> Environment t -> Maybe t
lookup i (E e) = i `M.lookup` e
infix 8 -->
(-->) :: Ident -> t -> Environment t
(-->) = (E.) . M.singleton
env :: [(Ident, t)] -> Environment t
env = E . M.fromList
ids :: Environment t -> [Ident]
ids = M.keys . e
elems :: Environment t -> [t]
elems = M.elems . e
-- Maps are a monoid wrt left-biased union.
instance Monoid (Environment t) where
mempty = E M.empty
E e1 `mappend` E e2 = E (e1 `M.union` e2)
mconcat = E . mconcat . map e
instance Functor Environment where
fmap f = E . fmap f . e
| pqwy/redex | src/Core/Environment.hs | bsd-3-clause | 844 | 1 | 8 | 216 | 351 | 196 | 155 | 25 | 1 |
{-# LANGUAGE DeriveDataTypeable, DeriveGeneric,
FlexibleContexts, MultiParamTypeClasses,
TemplateHaskell
#-}
module Insomnia.ModuleType where
import Control.Applicative
import Control.Lens
import Data.Foldable (Foldable(..))
import Data.Traversable (foldMapDefault, fmapDefault)
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Unbound.Generics.LocallyNameless
import qualified Unbound.Generics.LocallyNameless.Unsafe as UU
import Insomnia.Identifier
import {-# SOURCE #-} Insomnia.Types (Type, Kind,
TypePath, TyConName,
TypeConstructor,
TraverseTypes(..))
import Insomnia.Expr (Expr)
import Insomnia.TypeDefn
import Insomnia.ValueConstructor
import Insomnia.Common.ModuleKind
import Insomnia.Common.Telescope
-- After typechecking a toplevel, we get a summary that specifies the
-- (normalized) types of its modules and its (normalized) module
-- types. It's kind of like a module signature except it doesn't have
-- value or type components, but does have signature components.
data ToplevelSummary =
UnitTS
| ModuleTS !Field (Bind (Identifier, Embed ModuleTypeNF) ToplevelSummary)
| SignatureTS !Field (Bind (SigIdentifier, Embed ModuleTypeNF) ToplevelSummary)
deriving (Show, Typeable, Generic)
data ModuleType =
SigMT !(SigV Signature) -- "module/model { decls ... }"
| IdentMT !SigPath -- "X_SIG"
| FunMT !(Bind (Telescope (FunctorArgument ModuleType)) ModuleType)
| WhereMT !ModuleType !WhereClause
deriving (Show, Typeable, Generic)
data WhereClause =
-- | "SIG where type p = ty"
-- (invariant, the IdP at the head of the TypePath is the one that is bound, and it is irrelevant.
WhereTypeCls !(Bind Identifier TypePath) !Type
deriving (Show, Typeable, Generic)
data FunctorArgument t =
FunctorArgument !Identifier !(Embed t)
deriving (Show, Typeable, Generic)
data Signature =
UnitSig
| ValueSig !Field !Type !Signature
| TypeSig !Field !(Bind (TyConName, Embed TypeSigDecl) Signature)
| SubmoduleSig !Field !(Bind (Identifier, Embed ModuleType) Signature)
deriving (Show, Typeable, Generic)
-- module type normal form: all signature identifiers have been resolved.
data ModuleTypeNF =
SigMTNF (SigV Signature)
| FunMTNF !(Bind (Telescope (FunctorArgument ModuleTypeNF)) ModuleTypeNF)
deriving (Show, Typeable, Generic)
-- | After evaluating a ModuleType, we get a signature together with its kind.
-- (the datatype is parametrized because we'll also use it for SelfSig during typechecking)
data SigV a = SigV { _sigVSig :: !a
, _sigVKind :: !ModuleKind
}
deriving (Show, Typeable, Generic)
-- | A type declaration in a signature.
-- This should really be @Either Kind TypeDefn@ or perhaps
-- @Either Kind (Kind, TypeDefn)@, but we will separate them, for now.
-- The invariant, however, is that both Maybes can't be Nothing.
data TypeSigDecl =
AbstractTypeSigDecl !Kind -- type T : ⋆
| ManifestTypeSigDecl !TypeDefn -- data T = True | False
| AliasTypeSigDecl !TypeAlias -- type T = Int
deriving (Show, Typeable, Generic)
$(makeLenses ''SigV)
instance Functor FunctorArgument where
fmap = fmapDefault
instance Foldable FunctorArgument where
foldMap = foldMapDefault
instance Traversable FunctorArgument where
traverse f (FunctorArgument x (Embed t)) = (FunctorArgument x . Embed) <$> f t
moduleTypeNormalFormEmbed :: ModuleTypeNF -> ModuleType
moduleTypeNormalFormEmbed (SigMTNF s) = SigMT s
moduleTypeNormalFormEmbed (FunMTNF bnd) =
let (args, body) = UU.unsafeUnbind bnd
args' = fmapTelescope (fmap moduleTypeNormalFormEmbed) args
body' = moduleTypeNormalFormEmbed body
in FunMT $ bind args' body'
instance Traversable SigV where
traverse = sigVSig
instance Foldable SigV where
foldMap = foldMapDefault
instance Functor SigV where
fmap = fmapDefault
instance Alpha ToplevelSummary
instance Alpha ModuleType
instance Alpha Signature
instance Alpha a => Alpha (FunctorArgument a)
instance Alpha a => Alpha (SigV a)
instance Alpha TypeSigDecl
instance Alpha WhereClause
instance Alpha ModuleTypeNF
instance Subst SigPath ToplevelSummary
instance Subst SigPath Signature
instance Subst SigPath TypeSigDecl
instance Subst SigPath ModuleTypeNF
instance Subst SigPath ModuleType
instance Subst SigPath WhereClause
instance Subst SigPath a => Subst SigPath (SigV a)
instance Subst SigPath a => Subst SigPath (FunctorArgument a)
instance Subst Path ToplevelSummary
instance Subst Path Signature
instance Subst Path TypeSigDecl
instance Subst Path ModuleType
instance Subst Path WhereClause
instance Subst Path a => Subst Path (SigV a)
instance Subst Path a => Subst Path (FunctorArgument a)
instance Subst Path ModuleTypeNF
instance Subst ValueConstructor ModuleType
instance Subst ValueConstructor Signature
instance Subst ValueConstructor a => Subst ValueConstructor (FunctorArgument a)
instance Subst ValueConstructor TypeSigDecl
instance Subst ValueConstructor a => Subst ValueConstructor (SigV a)
instance Subst ValueConstructor WhereClause
instance Subst TypeConstructor ModuleType
instance Subst TypeConstructor Signature
instance Subst TypeConstructor WhereClause
instance Subst TypeConstructor a => Subst TypeConstructor (FunctorArgument a)
instance Subst TypeConstructor TypeSigDecl
instance Subst TypeConstructor a => Subst TypeConstructor (SigV a)
instance Subst Type ModuleType
instance Subst Type a => Subst Type (FunctorArgument a)
instance Subst Type a => Subst Type (SigV a)
instance Subst Type Signature
instance Subst Type WhereClause
instance Subst Type TypeSigDecl
-- model types do not have expressions in them.
instance Subst Expr ModuleType where
subst _ _ = id
substs _ = id
instance Subst Expr a => Subst Expr (FunctorArgument a)
instance TraverseTypes ModuleType ModuleType where
traverseTypes f t = pure t -- XXX TODO finish traverseTypes for ModuleTypes
| lambdageek/insomnia | src/Insomnia/ModuleType.hs | bsd-3-clause | 6,022 | 0 | 13 | 1,061 | 1,435 | 727 | 708 | 173 | 1 |
module TaglessFinal.EvalState where
import qualified TaglessFinal.VarState as VarState
import qualified TaglessFinal.Subst as Subst
import TaglessFinal.Term
import Control.Monad.State
data EvalState = EvalState
{ getVarState :: VarState.VarState
, getSubst :: Subst.Subst Var
}
getSubstM :: State EvalState (Subst.Subst Var)
getSubstM = do
state <- get
return (getSubst state)
nextFreshVar :: State EvalState Int
nextFreshVar = do
state <- get
let varState = getVarState state
let subst = getSubst state
let x = VarState.getFreshVar varState
let newState = EvalState
{ getVarState = VarState.VarState
{ VarState.getVar = VarState.getVar varState
, VarState.getFreshVar = x + 1
}
, getSubst = subst
}
return x
nextVar :: State EvalState Int
nextVar = do
state <- get
let varState = getVarState state
let subst = getSubst state
let x = VarState.getVar varState
let newState = EvalState
{ getVarState = VarState.VarState
{ VarState.getVar = x + 1
, VarState.getFreshVar = VarState.getFreshVar varState
}
, getSubst = subst
}
return x
updateSubst :: (Subst.Subst Var -> Subst.Subst Var) -> State EvalState (Subst.Subst Var)
updateSubst f = do
state <- get
let varState = getVarState state
let newSubst = f (getSubst state)
put $ EvalState { getVarState = varState, getSubst = newSubst }
return newSubst | kajigor/uKanren_transformations | src/TaglessFinal/EvalState.hs | bsd-3-clause | 1,468 | 0 | 15 | 364 | 450 | 228 | 222 | 43 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE CPP, TupleSections, ViewPatterns #-}
module TcValidity (
Rank, UserTypeCtxt(..), checkValidType, checkValidMonoType,
ContextKind(..), expectedKindInCtxt,
checkValidTheta, checkValidFamPats,
checkValidInstance, validDerivPred,
checkInstTermination,
ClsInfo, checkValidCoAxiom, checkValidCoAxBranch,
checkValidTyFamEqn,
arityErr, badATErr,
checkValidTelescope, checkZonkValidTelescope, checkValidInferredKinds
) where
#include "HsVersions.h"
-- friends:
import TcUnify ( tcSubType_NC )
import TcSimplify ( simplifyAmbiguityCheck )
import TyCoRep
import TcType hiding ( sizeType, sizeTypes )
import TcMType
import PrelNames
import Type
import Coercion
import Unify( tcMatchTyX )
import Kind
import CoAxiom
import Class
import TyCon
-- others:
import HsSyn -- HsType
import TcRnMonad -- TcType, amongst others
import TcHsSyn ( checkForRepresentationPolymorphism )
import FunDeps
import FamInstEnv ( isDominatedBy, injectiveBranches,
InjectivityCheckResult(..) )
import FamInst ( makeInjectivityErrors )
import Name
import VarEnv
import VarSet
import ErrUtils
import DynFlags
import Util
import ListSetOps
import SrcLoc
import Outputable
import BasicTypes
import Module
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad
import Data.Maybe
import Data.List ( (\\) )
{-
************************************************************************
* *
Checking for ambiguity
* *
************************************************************************
Note [The ambiguity check for type signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
checkAmbiguity is a check on *user-supplied type signatures*. It is
*purely* there to report functions that cannot possibly be called. So for
example we want to reject:
f :: C a => Int
The idea is there can be no legal calls to 'f' because every call will
give rise to an ambiguous constraint. We could soundly omit the
ambiguity check on type signatures entirely, at the expense of
delaying ambiguity errors to call sites. Indeed, the flag
-XAllowAmbiguousTypes switches off the ambiguity check.
What about things like this:
class D a b | a -> b where ..
h :: D Int b => Int
The Int may well fix 'b' at the call site, so that signature should
not be rejected. Moreover, using *visible* fundeps is too
conservative. Consider
class X a b where ...
class D a b | a -> b where ...
instance D a b => X [a] b where...
h :: X a b => a -> a
Here h's type looks ambiguous in 'b', but here's a legal call:
...(h [True])...
That gives rise to a (X [Bool] beta) constraint, and using the
instance means we need (D Bool beta) and that fixes 'beta' via D's
fundep!
Behind all these special cases there is a simple guiding principle.
Consider
f :: <type>
f = ...blah...
g :: <type>
g = f
You would think that the definition of g would surely typecheck!
After all f has exactly the same type, and g=f. But in fact f's type
is instantiated and the instantiated constraints are solved against
the originals, so in the case an ambiguous type it won't work.
Consider our earlier example f :: C a => Int. Then in g's definition,
we'll instantiate to (C alpha) and try to deduce (C alpha) from (C a),
and fail.
So in fact we use this as our *definition* of ambiguity. We use a
very similar test for *inferred* types, to ensure that they are
unambiguous. See Note [Impedence matching] in TcBinds.
This test is very conveniently implemented by calling
tcSubType <type> <type>
This neatly takes account of the functional dependecy stuff above,
and implicit parameter (see Note [Implicit parameters and ambiguity]).
And this is what checkAmbiguity does.
What about this, though?
g :: C [a] => Int
Is every call to 'g' ambiguous? After all, we might have
intance C [a] where ...
at the call site. So maybe that type is ok! Indeed even f's
quintessentially ambiguous type might, just possibly be callable:
with -XFlexibleInstances we could have
instance C a where ...
and now a call could be legal after all! Well, we'll reject this
unless the instance is available *here*.
Note [When to call checkAmbiguity]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We call checkAmbiguity
(a) on user-specified type signatures
(b) in checkValidType
Conncerning (b), you might wonder about nested foralls. What about
f :: forall b. (forall a. Eq a => b) -> b
The nested forall is ambiguous. Originally we called checkAmbiguity
in the forall case of check_type, but that had two bad consequences:
* We got two error messages about (Eq b) in a nested forall like this:
g :: forall a. Eq a => forall b. Eq b => a -> a
* If we try to check for ambiguity of an nested forall like
(forall a. Eq a => b), the implication constraint doesn't bind
all the skolems, which results in "No skolem info" in error
messages (see Trac #10432).
To avoid this, we call checkAmbiguity once, at the top, in checkValidType.
(I'm still a bit worried about unbound skolems when the type mentions
in-scope type variables.)
In fact, because of the co/contra-variance implemented in tcSubType,
this *does* catch function f above. too.
Concerning (a) the ambiguity check is only used for *user* types, not
for types coming from inteface files. The latter can legitimately
have ambiguous types. Example
class S a where s :: a -> (Int,Int)
instance S Char where s _ = (1,1)
f:: S a => [a] -> Int -> (Int,Int)
f (_::[a]) x = (a*x,b)
where (a,b) = s (undefined::a)
Here the worker for f gets the type
fw :: forall a. S a => Int -> (# Int, Int #)
Note [Implicit parameters and ambiguity]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Only a *class* predicate can give rise to ambiguity
An *implicit parameter* cannot. For example:
foo :: (?x :: [a]) => Int
foo = length ?x
is fine. The call site will supply a particular 'x'
Furthermore, the type variables fixed by an implicit parameter
propagate to the others. E.g.
foo :: (Show a, ?x::[a]) => Int
foo = show (?x++?x)
The type of foo looks ambiguous. But it isn't, because at a call site
we might have
let ?x = 5::Int in foo
and all is well. In effect, implicit parameters are, well, parameters,
so we can take their type variables into account as part of the
"tau-tvs" stuff. This is done in the function 'FunDeps.grow'.
-}
checkAmbiguity :: UserTypeCtxt -> Type -> TcM ()
checkAmbiguity ctxt ty
| wantAmbiguityCheck ctxt
= do { traceTc "Ambiguity check for" (ppr ty)
-- Solve the constraints eagerly because an ambiguous type
-- can cause a cascade of further errors. Since the free
-- tyvars are skolemised, we can safely use tcSimplifyTop
; allow_ambiguous <- xoptM LangExt.AllowAmbiguousTypes
; (_wrap, wanted) <- addErrCtxt (mk_msg allow_ambiguous) $
captureConstraints $
tcSubType_NC ctxt ty (mkCheckExpType ty)
; simplifyAmbiguityCheck ty wanted
; traceTc "Done ambiguity check for" (ppr ty) }
| otherwise
= return ()
where
mk_msg allow_ambiguous
= vcat [ text "In the ambiguity check for" <+> what
, ppUnless allow_ambiguous ambig_msg ]
ambig_msg = text "To defer the ambiguity check to use sites, enable AllowAmbiguousTypes"
what | Just n <- isSigMaybe ctxt = quotes (ppr n)
| otherwise = pprUserTypeCtxt ctxt
wantAmbiguityCheck :: UserTypeCtxt -> Bool
wantAmbiguityCheck ctxt
= case ctxt of -- See Note [When we don't check for ambiguity]
GhciCtxt -> False
TySynCtxt {} -> False
_ -> True
checkUserTypeError :: Type -> TcM ()
-- Check to see if the type signature mentions "TypeError blah"
-- anywhere in it, and fail if so.
--
-- Very unsatisfactorily (Trac #11144) we need to tidy the type
-- because it may have come from an /inferred/ signature, not a
-- user-supplied one. This is really only a half-baked fix;
-- the other errors in checkValidType don't do tidying, and so
-- may give bad error messages when given an inferred type.
checkUserTypeError = check
where
check ty
| Just msg <- userTypeError_maybe ty = fail_with msg
| Just (_,ts) <- splitTyConApp_maybe ty = mapM_ check ts
| Just (t1,t2) <- splitAppTy_maybe ty = check t1 >> check t2
| Just (_,t1) <- splitForAllTy_maybe ty = check t1
| otherwise = return ()
fail_with msg = do { env0 <- tcInitTidyEnv
; let (env1, tidy_msg) = tidyOpenType env0 msg
; failWithTcM (env1, pprUserTypeErrorTy tidy_msg) }
{- Note [When we don't check for ambiguity]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In a few places we do not want to check a user-specified type for ambiguity
* GhciCtxt: Allow ambiguous types in GHCi's :kind command
E.g. type family T a :: * -- T :: forall k. k -> *
Then :k T should work in GHCi, not complain that
(T k) is ambiguous!
* TySynCtxt: type T a b = C a b => blah
It may be that when we /use/ T, we'll give an 'a' or 'b' that somehow
cure the ambiguity. So we defer the ambiguity check to the use site.
There is also an implementation reason (Trac #11608). In the RHS of
a type synonym we don't (currently) instantiate 'a' and 'b' with
TcTyVars before calling checkValidType, so we get asertion failures
from doing an ambiguity check on a type with TyVars in it. Fixing this
would not be hard, but let's wait till there's a reason.
************************************************************************
* *
Checking validity of a user-defined type
* *
************************************************************************
When dealing with a user-written type, we first translate it from an HsType
to a Type, performing kind checking, and then check various things that should
be true about it. We don't want to perform these checks at the same time
as the initial translation because (a) they are unnecessary for interface-file
types and (b) when checking a mutually recursive group of type and class decls,
we can't "look" at the tycons/classes yet. Also, the checks are rather
diverse, and used to really mess up the other code.
One thing we check for is 'rank'.
Rank 0: monotypes (no foralls)
Rank 1: foralls at the front only, Rank 0 inside
Rank 2: foralls at the front, Rank 1 on left of fn arrow,
basic ::= tyvar | T basic ... basic
r2 ::= forall tvs. cxt => r2a
r2a ::= r1 -> r2a | basic
r1 ::= forall tvs. cxt => r0
r0 ::= r0 -> r0 | basic
Another thing is to check that type synonyms are saturated.
This might not necessarily show up in kind checking.
type A i = i
data T k = MkT (k Int)
f :: T A -- BAD!
-}
checkValidType :: UserTypeCtxt -> Type -> TcM ()
-- Checks that a user-written type is valid for the given context
-- Assumes arguemt is fully zonked
-- Not used for instance decls; checkValidInstance instead
checkValidType ctxt ty
= do { traceTc "checkValidType" (ppr ty <+> text "::" <+> ppr (typeKind ty))
; rankn_flag <- xoptM LangExt.RankNTypes
; impred_flag <- xoptM LangExt.ImpredicativeTypes
; let gen_rank :: Rank -> Rank
gen_rank r | rankn_flag = ArbitraryRank
| otherwise = r
rank1 = gen_rank r1
rank0 = gen_rank r0
r0 = rankZeroMonoType
r1 = LimitedRank True r0
rank
= case ctxt of
DefaultDeclCtxt-> MustBeMonoType
ResSigCtxt -> MustBeMonoType
PatSigCtxt -> rank0
RuleSigCtxt _ -> rank1
TySynCtxt _ -> rank0
ExprSigCtxt -> rank1
TypeAppCtxt | impred_flag -> ArbitraryRank
| otherwise -> tyConArgMonoType
-- Normally, ImpredicativeTypes is handled in check_arg_type,
-- but visible type applications don't go through there.
-- So we do this check here.
FunSigCtxt {} -> rank1
InfSigCtxt _ -> ArbitraryRank -- Inferred type
ConArgCtxt _ -> rank1 -- We are given the type of the entire
-- constructor, hence rank 1
ForSigCtxt _ -> rank1
SpecInstCtxt -> rank1
ThBrackCtxt -> rank1
GhciCtxt -> ArbitraryRank
_ -> panic "checkValidType"
-- Can't happen; not used for *user* sigs
; env <- tcInitOpenTidyEnv (tyCoVarsOfTypeList ty)
-- Check the internal validity of the type itself
; check_type env ctxt rank ty
-- Check that the thing has kind Type, and is lifted if necessary.
-- Do this *after* check_type, because we can't usefully take
-- the kind of an ill-formed type such as (a~Int)
; check_kind env ctxt ty
; checkUserTypeError ty
-- Check for ambiguous types. See Note [When to call checkAmbiguity]
-- NB: this will happen even for monotypes, but that should be cheap;
-- and there may be nested foralls for the subtype test to examine
; checkAmbiguity ctxt ty
; traceTc "checkValidType done" (ppr ty <+> text "::" <+> ppr (typeKind ty)) }
checkValidMonoType :: Type -> TcM ()
-- Assumes arguemt is fully zonked
checkValidMonoType ty
= do { env <- tcInitOpenTidyEnv (tyCoVarsOfTypeList ty)
; check_type env SigmaCtxt MustBeMonoType ty }
check_kind :: TidyEnv -> UserTypeCtxt -> TcType -> TcM ()
-- Check that the type's kind is acceptable for the context
check_kind env ctxt ty
| TySynCtxt {} <- ctxt
, returnsConstraintKind actual_kind
= do { ck <- xoptM LangExt.ConstraintKinds
; if ck
then when (isConstraintKind actual_kind)
(do { dflags <- getDynFlags
; check_pred_ty env dflags ctxt ty })
else addErrTcM (constraintSynErr env actual_kind) }
| otherwise
= case expectedKindInCtxt ctxt of
TheKind k -> checkTcM (tcEqType actual_kind k) (kindErr env actual_kind)
OpenKind -> checkTcM (classifiesTypeWithValues actual_kind) (kindErr env actual_kind)
AnythingKind -> return ()
where
actual_kind = typeKind ty
-- | The kind expected in a certain context.
data ContextKind = TheKind Kind -- ^ a specific kind
| AnythingKind -- ^ any kind will do
| OpenKind -- ^ something of the form @TYPE _@
-- Depending on the context, we might accept any kind (for instance, in a TH
-- splice), or only certain kinds (like in type signatures).
expectedKindInCtxt :: UserTypeCtxt -> ContextKind
expectedKindInCtxt (TySynCtxt _) = AnythingKind
expectedKindInCtxt ThBrackCtxt = AnythingKind
expectedKindInCtxt GhciCtxt = AnythingKind
-- The types in a 'default' decl can have varying kinds
-- See Note [Extended defaults]" in TcEnv
expectedKindInCtxt DefaultDeclCtxt = AnythingKind
expectedKindInCtxt TypeAppCtxt = AnythingKind
expectedKindInCtxt (ForSigCtxt _) = TheKind liftedTypeKind
expectedKindInCtxt InstDeclCtxt = TheKind constraintKind
expectedKindInCtxt SpecInstCtxt = TheKind constraintKind
expectedKindInCtxt _ = OpenKind
{-
Note [Higher rank types]
~~~~~~~~~~~~~~~~~~~~~~~~
Technically
Int -> forall a. a->a
is still a rank-1 type, but it's not Haskell 98 (Trac #5957). So the
validity checker allow a forall after an arrow only if we allow it
before -- that is, with Rank2Types or RankNTypes
-}
data Rank = ArbitraryRank -- Any rank ok
| LimitedRank -- Note [Higher rank types]
Bool -- Forall ok at top
Rank -- Use for function arguments
| MonoType SDoc -- Monotype, with a suggestion of how it could be a polytype
| MustBeMonoType -- Monotype regardless of flags
rankZeroMonoType, tyConArgMonoType, synArgMonoType, constraintMonoType :: Rank
rankZeroMonoType = MonoType (text "Perhaps you intended to use RankNTypes or Rank2Types")
tyConArgMonoType = MonoType (text "GHC doesn't yet support impredicative polymorphism")
synArgMonoType = MonoType (text "Perhaps you intended to use LiberalTypeSynonyms")
constraintMonoType = MonoType (text "A constraint must be a monotype")
funArgResRank :: Rank -> (Rank, Rank) -- Function argument and result
funArgResRank (LimitedRank _ arg_rank) = (arg_rank, LimitedRank (forAllAllowed arg_rank) arg_rank)
funArgResRank other_rank = (other_rank, other_rank)
forAllAllowed :: Rank -> Bool
forAllAllowed ArbitraryRank = True
forAllAllowed (LimitedRank forall_ok _) = forall_ok
forAllAllowed _ = False
-- The zonker issues errors if it zonks a representation-polymorphic binder
-- But sometimes it's nice to check a little more eagerly, trying to report
-- errors earlier.
representationPolymorphismForbidden :: UserTypeCtxt -> Bool
representationPolymorphismForbidden = go
where
go (ConArgCtxt _) = True -- A rep-polymorphic datacon won't be useful
go (PatSynBuilderCtxt _) = True -- Similar to previous case
go _ = False -- Other cases are caught by zonker
----------------------------------------
-- | Fail with error message if the type is unlifted
check_lifted :: Type -> TcM ()
check_lifted _ = return ()
{- ------ Legacy comment ---------
The check_unlifted function seems entirely redundant. The
kind system should check for uses of unlifted types. So I've
removed the check. See Trac #11120 comment:19.
check_lifted ty
= do { env <- tcInitOpenTidyEnv (tyCoVarsOfTypeList ty)
; checkTcM (not (isUnliftedType ty)) (unliftedArgErr env ty) }
unliftedArgErr :: TidyEnv -> Type -> (TidyEnv, SDoc)
unliftedArgErr env ty = (env, sep [text "Illegal unlifted type:", ppr_tidy env ty])
------ End of legacy comment --------- -}
check_type :: TidyEnv -> UserTypeCtxt -> Rank -> Type -> TcM ()
-- The args say what the *type context* requires, independent
-- of *flag* settings. You test the flag settings at usage sites.
--
-- Rank is allowed rank for function args
-- Rank 0 means no for-alls anywhere
check_type env ctxt rank ty
| not (null tvs && null theta)
= do { traceTc "check_type" (ppr ty $$ ppr (forAllAllowed rank))
; checkTcM (forAllAllowed rank) (forAllTyErr env rank ty)
-- Reject e.g. (Maybe (?x::Int => Int)),
-- with a decent error message
; check_valid_theta env' SigmaCtxt theta
-- Allow type T = ?x::Int => Int -> Int
-- but not type T = ?x::Int
; check_type env' ctxt rank tau -- Allow foralls to right of arrow
; checkTcM (not (any (`elemVarSet` tyCoVarsOfType phi_kind) tvs))
(forAllEscapeErr env' ty tau_kind)
}
where
(tvs, theta, tau) = tcSplitSigmaTy ty
tau_kind = typeKind tau
(env', _) = tidyTyCoVarBndrs env tvs
phi_kind | null theta = tau_kind
| otherwise = liftedTypeKind
-- If there are any constraints, the kind is *. (#11405)
check_type _ _ _ (TyVarTy _) = return ()
check_type env ctxt rank (ForAllTy (Anon arg_ty) res_ty)
= do { check_type env ctxt arg_rank arg_ty
; when (representationPolymorphismForbidden ctxt) $
checkForRepresentationPolymorphism empty arg_ty
; check_type env ctxt res_rank res_ty }
where
(arg_rank, res_rank) = funArgResRank rank
check_type env ctxt rank (AppTy ty1 ty2)
= do { check_arg_type env ctxt rank ty1
; check_arg_type env ctxt rank ty2 }
check_type env ctxt rank ty@(TyConApp tc tys)
| isTypeSynonymTyCon tc || isTypeFamilyTyCon tc
= check_syn_tc_app env ctxt rank ty tc tys
| isUnboxedTupleTyCon tc = check_ubx_tuple env ctxt ty tys
| otherwise = mapM_ (check_arg_type env ctxt rank) tys
check_type _ _ _ (LitTy {}) = return ()
check_type env ctxt rank (CastTy ty _) = check_type env ctxt rank ty
check_type _ _ _ ty = pprPanic "check_type" (ppr ty)
----------------------------------------
check_syn_tc_app :: TidyEnv -> UserTypeCtxt -> Rank -> KindOrType
-> TyCon -> [KindOrType] -> TcM ()
-- Used for type synonyms and type synonym families,
-- which must be saturated,
-- but not data families, which need not be saturated
check_syn_tc_app env ctxt rank ty tc tys
| tc_arity <= length tys -- Saturated
-- Check that the synonym has enough args
-- This applies equally to open and closed synonyms
-- It's OK to have an *over-applied* type synonym
-- data Tree a b = ...
-- type Foo a = Tree [a]
-- f :: Foo a b -> ...
= do { -- See Note [Liberal type synonyms]
; liberal <- xoptM LangExt.LiberalTypeSynonyms
; if not liberal || isTypeFamilyTyCon tc then
-- For H98 and synonym families, do check the type args
mapM_ check_arg tys
else -- In the liberal case (only for closed syns), expand then check
case coreView ty of
Just ty' -> check_type env ctxt rank ty'
Nothing -> pprPanic "check_tau_type" (ppr ty) }
| GhciCtxt <- ctxt -- Accept under-saturated type synonyms in
-- GHCi :kind commands; see Trac #7586
= mapM_ check_arg tys
| otherwise
= failWithTc (tyConArityErr tc tys)
where
tc_arity = tyConArity tc
check_arg | isTypeFamilyTyCon tc = check_arg_type env ctxt rank
| otherwise = check_type env ctxt synArgMonoType
----------------------------------------
check_ubx_tuple :: TidyEnv -> UserTypeCtxt -> KindOrType
-> [KindOrType] -> TcM ()
check_ubx_tuple env ctxt ty tys
= do { ub_tuples_allowed <- xoptM LangExt.UnboxedTuples
; checkTcM ub_tuples_allowed (ubxArgTyErr env ty)
; impred <- xoptM LangExt.ImpredicativeTypes
; let rank' = if impred then ArbitraryRank else tyConArgMonoType
-- c.f. check_arg_type
-- However, args are allowed to be unlifted, or
-- more unboxed tuples, so can't use check_arg_ty
; mapM_ (check_type env ctxt rank') tys }
----------------------------------------
check_arg_type :: TidyEnv -> UserTypeCtxt -> Rank -> KindOrType -> TcM ()
-- The sort of type that can instantiate a type variable,
-- or be the argument of a type constructor.
-- Not an unboxed tuple, but now *can* be a forall (since impredicativity)
-- Other unboxed types are very occasionally allowed as type
-- arguments depending on the kind of the type constructor
--
-- For example, we want to reject things like:
--
-- instance Ord a => Ord (forall s. T s a)
-- and
-- g :: T s (forall b.b)
--
-- NB: unboxed tuples can have polymorphic or unboxed args.
-- This happens in the workers for functions returning
-- product types with polymorphic components.
-- But not in user code.
-- Anyway, they are dealt with by a special case in check_tau_type
check_arg_type _ _ _ (CoercionTy {}) = return ()
check_arg_type env ctxt rank ty
= do { impred <- xoptM LangExt.ImpredicativeTypes
; let rank' = case rank of -- Predictive => must be monotype
MustBeMonoType -> MustBeMonoType -- Monotype, regardless
_other | impred -> ArbitraryRank
| otherwise -> tyConArgMonoType
-- Make sure that MustBeMonoType is propagated,
-- so that we don't suggest -XImpredicativeTypes in
-- (Ord (forall a.a)) => a -> a
-- and so that if it Must be a monotype, we check that it is!
; check_type env ctxt rank' ty
; check_lifted ty }
-- NB the isUnLiftedType test also checks for
-- T State#
-- where there is an illegal partial application of State# (which has
-- kind * -> #); see Note [The kind invariant] in TyCoRep
----------------------------------------
forAllTyErr :: TidyEnv -> Rank -> Type -> (TidyEnv, SDoc)
forAllTyErr env rank ty
= ( env
, vcat [ hang herald 2 (ppr_tidy env ty)
, suggestion ] )
where
(tvs, _theta, _tau) = tcSplitSigmaTy ty
herald | null tvs = text "Illegal qualified type:"
| otherwise = text "Illegal polymorphic type:"
suggestion = case rank of
LimitedRank {} -> text "Perhaps you intended to use RankNTypes or Rank2Types"
MonoType d -> d
_ -> Outputable.empty -- Polytype is always illegal
forAllEscapeErr :: TidyEnv -> Type -> Kind -> (TidyEnv, SDoc)
forAllEscapeErr env ty tau_kind
= ( env
, hang (vcat [ text "Quantified type's kind mentions quantified type variable"
, text "(skolem escape)" ])
2 (vcat [ text " type:" <+> ppr_tidy env ty
, text "of kind:" <+> ppr_tidy env tau_kind ]) )
ubxArgTyErr :: TidyEnv -> Type -> (TidyEnv, SDoc)
ubxArgTyErr env ty = (env, sep [text "Illegal unboxed tuple type as function argument:", ppr_tidy env ty])
kindErr :: TidyEnv -> Kind -> (TidyEnv, SDoc)
kindErr env kind = (env, sep [text "Expecting an ordinary type, but found a type of kind", ppr_tidy env kind])
{-
Note [Liberal type synonyms]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If -XLiberalTypeSynonyms is on, expand closed type synonyms *before*
doing validity checking. This allows us to instantiate a synonym defn
with a for-all type, or with a partially-applied type synonym.
e.g. type T a b = a
type S m = m ()
f :: S (T Int)
Here, T is partially applied, so it's illegal in H98. But if you
expand S first, then T we get just
f :: Int
which is fine.
IMPORTANT: suppose T is a type synonym. Then we must do validity
checking on an appliation (T ty1 ty2)
*either* before expansion (i.e. check ty1, ty2)
*or* after expansion (i.e. expand T ty1 ty2, and then check)
BUT NOT BOTH
If we do both, we get exponential behaviour!!
data TIACons1 i r c = c i ::: r c
type TIACons2 t x = TIACons1 t (TIACons1 t x)
type TIACons3 t x = TIACons2 t (TIACons1 t x)
type TIACons4 t x = TIACons2 t (TIACons2 t x)
type TIACons7 t x = TIACons4 t (TIACons3 t x)
************************************************************************
* *
\subsection{Checking a theta or source type}
* *
************************************************************************
Note [Implicit parameters in instance decls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Implicit parameters _only_ allowed in type signatures; not in instance
decls, superclasses etc. The reason for not allowing implicit params in
instances is a bit subtle. If we allowed
instance (?x::Int, Eq a) => Foo [a] where ...
then when we saw
(e :: (?x::Int) => t)
it would be unclear how to discharge all the potential uses of the ?x
in e. For example, a constraint Foo [Int] might come out of e, and
applying the instance decl would show up two uses of ?x. Trac #8912.
-}
checkValidTheta :: UserTypeCtxt -> ThetaType -> TcM ()
-- Assumes arguemt is fully zonked
checkValidTheta ctxt theta
= do { env <- tcInitOpenTidyEnv (tyCoVarsOfTypesList theta)
; addErrCtxtM (checkThetaCtxt ctxt theta) $
check_valid_theta env ctxt theta }
-------------------------
check_valid_theta :: TidyEnv -> UserTypeCtxt -> [PredType] -> TcM ()
check_valid_theta _ _ []
= return ()
check_valid_theta env ctxt theta
= do { dflags <- getDynFlags
; warnTcM (Reason Opt_WarnDuplicateConstraints)
(wopt Opt_WarnDuplicateConstraints dflags && notNull dups)
(dupPredWarn env dups)
; traceTc "check_valid_theta" (ppr theta)
; mapM_ (check_pred_ty env dflags ctxt) theta }
where
(_,dups) = removeDups cmpType theta
-------------------------
{- Note [Validity checking for constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We look through constraint synonyms so that we can see the underlying
constraint(s). For example
type Foo = ?x::Int
instance Foo => C T
We should reject the instance because it has an implicit parameter in
the context.
But we record, in 'under_syn', whether we have looked under a synonym
to avoid requiring language extensions at the use site. Main example
(Trac #9838):
{-# LANGUAGE ConstraintKinds #-}
module A where
type EqShow a = (Eq a, Show a)
module B where
import A
foo :: EqShow a => a -> String
We don't want to require ConstraintKinds in module B.
-}
check_pred_ty :: TidyEnv -> DynFlags -> UserTypeCtxt -> PredType -> TcM ()
-- Check the validity of a predicate in a signature
-- See Note [Validity checking for constraints]
check_pred_ty env dflags ctxt pred
= do { check_type env SigmaCtxt constraintMonoType pred
; check_pred_help False env dflags ctxt pred }
check_pred_help :: Bool -- True <=> under a type synonym
-> TidyEnv
-> DynFlags -> UserTypeCtxt
-> PredType -> TcM ()
check_pred_help under_syn env dflags ctxt pred
| Just pred' <- coreView pred -- Switch on under_syn when going under a
-- synonym (Trac #9838, yuk)
= check_pred_help True env dflags ctxt pred'
| otherwise
= case splitTyConApp_maybe pred of
Just (tc, tys)
| isTupleTyCon tc
-> check_tuple_pred under_syn env dflags ctxt pred tys
-- NB: this equality check must come first, because (~) is a class,
-- too.
| tc `hasKey` heqTyConKey ||
tc `hasKey` eqTyConKey ||
tc `hasKey` eqPrimTyConKey
-> check_eq_pred env dflags pred tc tys
| Just cls <- tyConClass_maybe tc
-> check_class_pred env dflags ctxt pred cls tys -- Includes Coercible
_ -> check_irred_pred under_syn env dflags ctxt pred
check_eq_pred :: TidyEnv -> DynFlags -> PredType -> TyCon -> [TcType] -> TcM ()
check_eq_pred env dflags pred tc tys
= -- Equational constraints are valid in all contexts if type
-- families are permitted
do { checkTc (length tys == tyConArity tc) (tyConArityErr tc tys)
; checkTcM (xopt LangExt.TypeFamilies dflags
|| xopt LangExt.GADTs dflags)
(eqPredTyErr env pred) }
check_tuple_pred :: Bool -> TidyEnv -> DynFlags -> UserTypeCtxt -> PredType -> [PredType] -> TcM ()
check_tuple_pred under_syn env dflags ctxt pred ts
= do { -- See Note [ConstraintKinds in predicates]
checkTcM (under_syn || xopt LangExt.ConstraintKinds dflags)
(predTupleErr env pred)
; mapM_ (check_pred_help under_syn env dflags ctxt) ts }
-- This case will not normally be executed because without
-- -XConstraintKinds tuple types are only kind-checked as *
check_irred_pred :: Bool -> TidyEnv -> DynFlags -> UserTypeCtxt -> PredType -> TcM ()
check_irred_pred under_syn env dflags ctxt pred
-- The predicate looks like (X t1 t2) or (x t1 t2) :: Constraint
-- where X is a type function
= do { -- If it looks like (x t1 t2), require ConstraintKinds
-- see Note [ConstraintKinds in predicates]
-- But (X t1 t2) is always ok because we just require ConstraintKinds
-- at the definition site (Trac #9838)
failIfTcM (not under_syn && not (xopt LangExt.ConstraintKinds dflags)
&& hasTyVarHead pred)
(predIrredErr env pred)
-- Make sure it is OK to have an irred pred in this context
-- See Note [Irreducible predicates in superclasses]
; failIfTcM (is_superclass ctxt
&& not (xopt LangExt.UndecidableInstances dflags)
&& has_tyfun_head pred)
(predSuperClassErr env pred) }
where
is_superclass ctxt = case ctxt of { ClassSCCtxt _ -> True; _ -> False }
has_tyfun_head ty
= case tcSplitTyConApp_maybe ty of
Just (tc, _) -> isTypeFamilyTyCon tc
Nothing -> False
{- Note [ConstraintKinds in predicates]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Don't check for -XConstraintKinds under a type synonym, because that
was done at the type synonym definition site; see Trac #9838
e.g. module A where
type C a = (Eq a, Ix a) -- Needs -XConstraintKinds
module B where
import A
f :: C a => a -> a -- Does *not* need -XConstraintKinds
Note [Irreducible predicates in superclasses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Allowing type-family calls in class superclasses is somewhat dangerous
because we can write:
type family Fooish x :: * -> Constraint
type instance Fooish () = Foo
class Fooish () a => Foo a where
This will cause the constraint simplifier to loop because every time we canonicalise a
(Foo a) class constraint we add a (Fooish () a) constraint which will be immediately
solved to add+canonicalise another (Foo a) constraint. -}
-------------------------
check_class_pred :: TidyEnv -> DynFlags -> UserTypeCtxt -> PredType -> Class -> [TcType] -> TcM ()
check_class_pred env dflags ctxt pred cls tys
| isIPClass cls
= do { check_arity
; checkTcM (okIPCtxt ctxt) (badIPPred env pred) }
| otherwise
= do { check_arity
; checkTcM arg_tys_ok (env, predTyVarErr (tidyType env pred)) }
where
check_arity = checkTc (classArity cls == length tys)
(tyConArityErr (classTyCon cls) tys)
flexible_contexts = xopt LangExt.FlexibleContexts dflags
undecidable_ok = xopt LangExt.UndecidableInstances dflags
arg_tys_ok = case ctxt of
SpecInstCtxt -> True -- {-# SPECIALISE instance Eq (T Int) #-} is fine
InstDeclCtxt -> checkValidClsArgs (flexible_contexts || undecidable_ok) cls tys
-- Further checks on head and theta
-- in checkInstTermination
_ -> checkValidClsArgs flexible_contexts cls tys
-------------------------
okIPCtxt :: UserTypeCtxt -> Bool
-- See Note [Implicit parameters in instance decls]
okIPCtxt (FunSigCtxt {}) = True
okIPCtxt (InfSigCtxt {}) = True
okIPCtxt ExprSigCtxt = True
okIPCtxt TypeAppCtxt = True
okIPCtxt PatSigCtxt = True
okIPCtxt ResSigCtxt = True
okIPCtxt GenSigCtxt = True
okIPCtxt (ConArgCtxt {}) = True
okIPCtxt (ForSigCtxt {}) = True -- ??
okIPCtxt ThBrackCtxt = True
okIPCtxt GhciCtxt = True
okIPCtxt SigmaCtxt = True
okIPCtxt (DataTyCtxt {}) = True
okIPCtxt (PatSynBuilderCtxt {}) = True
okIPCtxt (TySynCtxt {}) = True -- e.g. type Blah = ?x::Int
-- Trac #11466
okIPCtxt (ClassSCCtxt {}) = False
okIPCtxt (InstDeclCtxt {}) = False
okIPCtxt (SpecInstCtxt {}) = False
okIPCtxt (RuleSigCtxt {}) = False
okIPCtxt DefaultDeclCtxt = False
badIPPred :: TidyEnv -> PredType -> (TidyEnv, SDoc)
badIPPred env pred
= ( env
, text "Illegal implicit parameter" <+> quotes (ppr_tidy env pred) )
{-
Note [Kind polymorphic type classes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
MultiParam check:
class C f where... -- C :: forall k. k -> Constraint
instance C Maybe where...
The dictionary gets type [C * Maybe] even if it's not a MultiParam
type class.
Flexibility check:
class C f where... -- C :: forall k. k -> Constraint
data D a = D a
instance C D where
The dictionary gets type [C * (D *)]. IA0_TODO it should be
generalized actually.
-}
checkThetaCtxt :: UserTypeCtxt -> ThetaType -> TidyEnv -> TcM (TidyEnv, SDoc)
checkThetaCtxt ctxt theta env
= return ( env
, vcat [ text "In the context:" <+> pprTheta (tidyTypes env theta)
, text "While checking" <+> pprUserTypeCtxt ctxt ] )
eqPredTyErr, predTupleErr, predIrredErr, predSuperClassErr :: TidyEnv -> PredType -> (TidyEnv, SDoc)
eqPredTyErr env pred
= ( env
, text "Illegal equational constraint" <+> ppr_tidy env pred $$
parens (text "Use GADTs or TypeFamilies to permit this") )
predTupleErr env pred
= ( env
, hang (text "Illegal tuple constraint:" <+> ppr_tidy env pred)
2 (parens constraintKindsMsg) )
predIrredErr env pred
= ( env
, hang (text "Illegal constraint:" <+> ppr_tidy env pred)
2 (parens constraintKindsMsg) )
predSuperClassErr env pred
= ( env
, hang (text "Illegal constraint" <+> quotes (ppr_tidy env pred)
<+> text "in a superclass context")
2 (parens undecidableMsg) )
predTyVarErr :: PredType -> SDoc -- type is already tidied!
predTyVarErr pred
= vcat [ hang (text "Non type-variable argument")
2 (text "in the constraint:" <+> ppr pred)
, parens (text "Use FlexibleContexts to permit this") ]
constraintSynErr :: TidyEnv -> Type -> (TidyEnv, SDoc)
constraintSynErr env kind
= ( env
, hang (text "Illegal constraint synonym of kind:" <+> quotes (ppr_tidy env kind))
2 (parens constraintKindsMsg) )
dupPredWarn :: TidyEnv -> [[PredType]] -> (TidyEnv, SDoc)
dupPredWarn env dups
= ( env
, text "Duplicate constraint" <> plural primaryDups <> text ":"
<+> pprWithCommas (ppr_tidy env) primaryDups )
where
primaryDups = map head dups
tyConArityErr :: TyCon -> [TcType] -> SDoc
-- For type-constructor arity errors, be careful to report
-- the number of /visible/ arguments required and supplied,
-- ignoring the /invisible/ arguments, which the user does not see.
-- (e.g. Trac #10516)
tyConArityErr tc tks
= arityErr (tyConFlavour tc) (tyConName tc)
tc_type_arity tc_type_args
where
vis_tks = filterOutInvisibleTypes tc tks
-- tc_type_arity = number of *type* args expected
-- tc_type_args = number of *type* args encountered
tc_type_arity = count isVisibleBinder $ tyConBinders tc
tc_type_args = length vis_tks
arityErr :: Outputable a => String -> a -> Int -> Int -> SDoc
arityErr what name n m
= hsep [ text "The" <+> text what, quotes (ppr name), text "should have",
n_arguments <> comma, text "but has been given",
if m==0 then text "none" else int m]
where
n_arguments | n == 0 = text "no arguments"
| n == 1 = text "1 argument"
| True = hsep [int n, text "arguments"]
{-
************************************************************************
* *
\subsection{Checking for a decent instance head type}
* *
************************************************************************
@checkValidInstHead@ checks the type {\em and} its syntactic constraints:
it must normally look like: @instance Foo (Tycon a b c ...) ...@
The exceptions to this syntactic checking: (1)~if the @GlasgowExts@
flag is on, or (2)~the instance is imported (they must have been
compiled elsewhere). In these cases, we let them go through anyway.
We can also have instances for functions: @instance Foo (a -> b) ...@.
-}
checkValidInstHead :: UserTypeCtxt -> Class -> [Type] -> TcM ()
checkValidInstHead ctxt clas cls_args
= do { dflags <- getDynFlags
; mod <- getModule
; checkTc (getUnique clas `notElem` abstractClassKeys ||
nameModule (getName clas) == mod)
(instTypeErr clas cls_args abstract_class_msg)
-- Check language restrictions;
-- but not for SPECIALISE instance pragmas
; let ty_args = filterOutInvisibleTypes (classTyCon clas) cls_args
; unless spec_inst_prag $
do { checkTc (xopt LangExt.TypeSynonymInstances dflags ||
all tcInstHeadTyNotSynonym ty_args)
(instTypeErr clas cls_args head_type_synonym_msg)
; checkTc (xopt LangExt.FlexibleInstances dflags ||
all tcInstHeadTyAppAllTyVars ty_args)
(instTypeErr clas cls_args head_type_args_tyvars_msg)
; checkTc (xopt LangExt.MultiParamTypeClasses dflags ||
length ty_args == 1 || -- Only count type arguments
(xopt LangExt.NullaryTypeClasses dflags &&
null ty_args))
(instTypeErr clas cls_args head_one_type_msg) }
; mapM_ checkValidTypePat ty_args }
where
spec_inst_prag = case ctxt of { SpecInstCtxt -> True; _ -> False }
head_type_synonym_msg = parens (
text "All instance types must be of the form (T t1 ... tn)" $$
text "where T is not a synonym." $$
text "Use TypeSynonymInstances if you want to disable this.")
head_type_args_tyvars_msg = parens (vcat [
text "All instance types must be of the form (T a1 ... an)",
text "where a1 ... an are *distinct type variables*,",
text "and each type variable appears at most once in the instance head.",
text "Use FlexibleInstances if you want to disable this."])
head_one_type_msg = parens (
text "Only one type can be given in an instance head." $$
text "Use MultiParamTypeClasses if you want to allow more, or zero.")
abstract_class_msg =
text "Manual instances of this class are not permitted."
tcInstHeadTyNotSynonym :: Type -> Bool
-- Used in Haskell-98 mode, for the argument types of an instance head
-- These must not be type synonyms, but everywhere else type synonyms
-- are transparent, so we need a special function here
tcInstHeadTyNotSynonym ty
= case ty of -- Do not use splitTyConApp,
-- because that expands synonyms!
TyConApp tc _ -> not (isTypeSynonymTyCon tc)
_ -> True
tcInstHeadTyAppAllTyVars :: Type -> Bool
-- Used in Haskell-98 mode, for the argument types of an instance head
-- These must be a constructor applied to type variable arguments.
-- But we allow kind instantiations.
tcInstHeadTyAppAllTyVars ty
| Just (tc, tys) <- tcSplitTyConApp_maybe (dropCasts ty)
= ok (filterOutInvisibleTypes tc tys) -- avoid kinds
| otherwise
= False
where
-- Check that all the types are type variables,
-- and that each is distinct
ok tys = equalLength tvs tys && hasNoDups tvs
where
tvs = mapMaybe tcGetTyVar_maybe tys
dropCasts :: Type -> Type
-- See Note [Casts during validity checking]
-- This function can turn a well-kinded type into an ill-kinded
-- one, so I've kept it local to this module
-- To consider: drop only UnivCo(HoleProv) casts
dropCasts (CastTy ty _) = dropCasts ty
dropCasts (AppTy t1 t2) = mkAppTy (dropCasts t1) (dropCasts t2)
dropCasts (TyConApp tc tys) = mkTyConApp tc (map dropCasts tys)
dropCasts (ForAllTy b ty) = ForAllTy (dropCastsB b) (dropCasts ty)
dropCasts ty = ty -- LitTy, TyVarTy, CoercionTy
dropCastsB :: TyBinder -> TyBinder
dropCastsB (Anon ty) = Anon (dropCasts ty)
dropCastsB b = b -- Don't bother in the kind of a forall
abstractClassKeys :: [Unique]
abstractClassKeys = [ heqTyConKey
, eqTyConKey
, coercibleTyConKey
] -- See Note [Equality class instances]
instTypeErr :: Class -> [Type] -> SDoc -> SDoc
instTypeErr cls tys msg
= hang (hang (text "Illegal instance declaration for")
2 (quotes (pprClassPred cls tys)))
2 msg
{- Note [Casts during validity checking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the (bogus)
instance Eq Char#
We elaborate to 'Eq (Char# |> UnivCo(hole))' where the hole is an
insoluble equality constraint for * ~ #. We'll report the insoluble
constraint separately, but we don't want to *also* complain that Eq is
not applied to a type constructor. So we look gaily look through
CastTys here.
Another example: Eq (Either a). Then we actually get a cast in
the middle:
Eq ((Either |> g) a)
Note [Valid 'deriving' predicate]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
validDerivPred checks for OK 'deriving' context. See Note [Exotic
derived instance contexts] in TcDeriv. However the predicate is
here because it uses sizeTypes, fvTypes.
It checks for three things
* No repeated variables (hasNoDups fvs)
* No type constructors. This is done by comparing
sizeTypes tys == length (fvTypes tys)
sizeTypes counts variables and constructors; fvTypes returns variables.
So if they are the same, there must be no constructors. But there
might be applications thus (f (g x)).
Note that tys only includes the visible arguments of the class type
constructor. Including the non-vivisble arguments can cause the following,
perfectly valid instance to be rejected:
class Category (cat :: k -> k -> *) where ...
newtype T (c :: * -> * -> *) a b = MkT (c a b)
instance Category c => Category (T c) where ...
since the first argument to Category is a non-visible *, which sizeTypes
would count as a constructor! See Trac #11833.
* Also check for a bizarre corner case, when the derived instance decl
would look like
instance C a b => D (T a) where ...
Note that 'b' isn't a parameter of T. This gives rise to all sorts of
problems; in particular, it's hard to compare solutions for equality
when finding the fixpoint, and that means the inferContext loop does
not converge. See Trac #5287.
Note [Equality class instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We can't have users writing instances for the equality classes. But we
still need to be able to write instances for them ourselves. So we allow
instances only in the defining module.
-}
validDerivPred :: TyVarSet -> PredType -> Bool
-- See Note [Valid 'deriving' predicate]
validDerivPred tv_set pred
= case classifyPredType pred of
ClassPred cls tys -> cls `hasKey` typeableClassKey
-- Typeable constraints are bigger than they appear due
-- to kind polymorphism, but that's OK
|| check_tys cls tys
EqPred {} -> False -- reject equality constraints
_ -> True -- Non-class predicates are ok
where
check_tys cls tys
= hasNoDups fvs
-- use sizePred to ignore implicit args
&& sizePred pred == fromIntegral (length fvs)
&& all (`elemVarSet` tv_set) fvs
where tys' = filterOutInvisibleTypes (classTyCon cls) tys
fvs = fvTypes tys'
{-
************************************************************************
* *
\subsection{Checking instance for termination}
* *
************************************************************************
-}
checkValidInstance :: UserTypeCtxt -> LHsSigType Name -> Type
-> TcM ([TyVar], ThetaType, Class, [Type])
checkValidInstance ctxt hs_type ty
| Just (clas,inst_tys) <- getClassPredTys_maybe tau
, inst_tys `lengthIs` classArity clas
= do { setSrcSpan head_loc (checkValidInstHead ctxt clas inst_tys)
; checkValidTheta ctxt theta
-- The Termination and Coverate Conditions
-- Check that instance inference will terminate (if we care)
-- For Haskell 98 this will already have been done by checkValidTheta,
-- but as we may be using other extensions we need to check.
--
-- Note that the Termination Condition is *more conservative* than
-- the checkAmbiguity test we do on other type signatures
-- e.g. Bar a => Bar Int is ambiguous, but it also fails
-- the termination condition, because 'a' appears more often
-- in the constraint than in the head
; undecidable_ok <- xoptM LangExt.UndecidableInstances
; traceTc "cvi" (ppr undecidable_ok $$ ppr ty)
; if undecidable_ok
then checkAmbiguity ctxt ty
else checkInstTermination inst_tys theta
; case (checkInstCoverage undecidable_ok clas theta inst_tys) of
IsValid -> return () -- Check succeeded
NotValid msg -> addErrTc (instTypeErr clas inst_tys msg)
; return (tvs, theta, clas, inst_tys) }
| otherwise
= failWithTc (text "Malformed instance head:" <+> ppr tau)
where
(tvs, theta, tau) = tcSplitSigmaTy ty
-- The location of the "head" of the instance
head_loc = getLoc (getLHsInstDeclHead hs_type)
{-
Note [Paterson conditions]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Termination test: the so-called "Paterson conditions" (see Section 5 of
"Understanding functional dependencies via Constraint Handling Rules,
JFP Jan 2007).
We check that each assertion in the context satisfies:
(1) no variable has more occurrences in the assertion than in the head, and
(2) the assertion has fewer constructors and variables (taken together
and counting repetitions) than the head.
This is only needed with -fglasgow-exts, as Haskell 98 restrictions
(which have already been checked) guarantee termination.
The underlying idea is that
for any ground substitution, each assertion in the
context has fewer type constructors than the head.
-}
checkInstTermination :: [TcType] -> ThetaType -> TcM ()
-- See Note [Paterson conditions]
checkInstTermination tys theta
= check_preds theta
where
head_fvs = fvTypes tys
head_size = sizeTypes tys
check_preds :: [PredType] -> TcM ()
check_preds preds = mapM_ check preds
check :: PredType -> TcM ()
check pred
= case classifyPredType pred of
EqPred {} -> return () -- See Trac #4200.
IrredPred {} -> check2 pred (sizeType pred)
ClassPred cls tys
| isTerminatingClass cls
-> return ()
| isCTupleClass cls -- Look inside tuple predicates; Trac #8359
-> check_preds tys
| otherwise
-> check2 pred (sizeTypes $ filterOutInvisibleTypes (classTyCon cls) tys)
-- Other ClassPreds
check2 pred pred_size
| not (null bad_tvs) = addErrTc (noMoreMsg bad_tvs what)
| pred_size >= head_size = addErrTc (smallerMsg what)
| otherwise = return ()
where
what = text "constraint" <+> quotes (ppr pred)
bad_tvs = fvType pred \\ head_fvs
smallerMsg :: SDoc -> SDoc
smallerMsg what
= vcat [ hang (text "The" <+> what)
2 (text "is no smaller than the instance head")
, parens undecidableMsg ]
noMoreMsg :: [TcTyVar] -> SDoc -> SDoc
noMoreMsg tvs what
= vcat [ hang (text "Variable" <> plural tvs <+> quotes (pprWithCommas ppr tvs)
<+> occurs <+> text "more often")
2 (sep [ text "in the" <+> what
, text "than in the instance head" ])
, parens undecidableMsg ]
where
occurs = if isSingleton tvs then text "occurs"
else text "occur"
undecidableMsg, constraintKindsMsg :: SDoc
undecidableMsg = text "Use UndecidableInstances to permit this"
constraintKindsMsg = text "Use ConstraintKinds to permit this"
{-
Note [Associated type instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We allow this:
class C a where
type T x a
instance C Int where
type T (S y) Int = y
type T Z Int = Char
Note that
a) The variable 'x' is not bound by the class decl
b) 'x' is instantiated to a non-type-variable in the instance
c) There are several type instance decls for T in the instance
All this is fine. Of course, you can't give any *more* instances
for (T ty Int) elsewhere, because it's an *associated* type.
Note [Checking consistent instantiation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class C a b where
type T a x b
instance C [p] Int
type T [p] y Int = (p,y,y) -- Induces the family instance TyCon
-- type TR p y = (p,y,y)
So we
* Form the mini-envt from the class type variables a,b
to the instance decl types [p],Int: [a->[p], b->Int]
* Look at the tyvars a,x,b of the type family constructor T
(it shares tyvars with the class C)
* Apply the mini-evnt to them, and check that the result is
consistent with the instance types [p] y Int
We do *not* assume (at this point) the the bound variables of
the associated type instance decl are the same as for the parent
instance decl. So, for example,
instance C [p] Int
type T [q] y Int = ...
would work equally well. Reason: making the *kind* variables line
up is much harder. Example (Trac #7282):
class Foo (xs :: [k]) where
type Bar xs :: *
instance Foo '[] where
type Bar '[] = Int
Here the instance decl really looks like
instance Foo k ('[] k) where
type Bar k ('[] k) = Int
but the k's are not scoped, and hence won't match Uniques.
So instead we just match structure, with tcMatchTyX, and check
that distinct type variables match 1-1 with distinct type variables.
HOWEVER, we *still* make the instance type variables scope over the
type instances, to pick up non-obvious kinds. Eg
class Foo (a :: k) where
type F a
instance Foo (b :: k -> k) where
type F b = Int
Here the instance is kind-indexed and really looks like
type F (k->k) (b::k->k) = Int
But if the 'b' didn't scope, we would make F's instance too
poly-kinded.
-}
-- | Extra information needed when type-checking associated types. The 'Class' is
-- the enclosing class, and the @VarEnv Type@ maps class variables to their
-- instance types.
type ClsInfo = (Class, VarEnv Type)
checkConsistentFamInst
:: Maybe ClsInfo
-> TyCon -- ^ Family tycon
-> [TyVar] -- ^ Type variables of the family instance
-> [Type] -- ^ Type patterns from instance
-> TcM ()
-- See Note [Checking consistent instantiation]
checkConsistentFamInst Nothing _ _ _ = return ()
checkConsistentFamInst (Just (clas, mini_env)) fam_tc at_tvs at_tys
= do { -- Check that the associated type indeed comes from this class
checkTc (Just clas == tyConAssoc_maybe fam_tc)
(badATErr (className clas) (tyConName fam_tc))
-- See Note [Checking consistent instantiation]
-- Check right to left, so that we spot type variable
-- inconsistencies before (more confusing) kind variables
; checkTc (check_args emptyTCvSubst (fam_tc_tvs `zip` at_tys))
(wrongATArgErr fam_tc expected_args at_tys) }
where
fam_tc_tvs = tyConTyVars fam_tc
expected_args = zipWith pick fam_tc_tvs at_tys
pick fam_tc_tv at_ty = case lookupVarEnv mini_env fam_tc_tv of
Just inst_ty -> inst_ty
Nothing -> at_ty
check_args :: TCvSubst -> [(TyVar,Type)] -> Bool
check_args subst ((fam_tc_tv, at_ty) : rest)
| Just inst_ty <- lookupVarEnv mini_env fam_tc_tv
= case tcMatchTyX subst at_ty inst_ty of
Just subst -> check_args subst rest
Nothing -> False
| otherwise
= check_args subst rest
check_args subst []
= check_tvs subst [] at_tvs
check_tvs :: TCvSubst -> [TyVar] -> [TyVar] -> Bool
check_tvs _ _ [] = True -- OK!!
check_tvs subst acc (tv:tvs)
| Just ty <- lookupTyVar subst tv
= case tcGetTyVar_maybe ty of
Nothing -> False
Just tv' | tv' `elem` acc -> False
| otherwise -> check_tvs subst (tv' : acc) tvs
| otherwise
= check_tvs subst acc tvs
{-
check_arg :: (TyVar, Type) -> TCvSubst -> TcM TCvSubst
check_arg (fam_tc_tv, at_ty) subst
| Just inst_ty <- lookupVarEnv mini_env fam_tc_tv
= case tcMatchTyX subst at_ty inst_ty of
Just subst -> return subst
Nothing -> failWithTc $ wrongATArgErr at_ty inst_ty
-- No need to instantiate here, because the axiom
-- uses the same type variables as the assocated class
| otherwise
= return subst -- Allow non-type-variable instantiation
-- See Note [Associated type instances]
check_distinct :: TCvSubst -> TcM ()
-- True if all the variables mapped the substitution
-- map to *distinct* type *variables*
check_distinct subst = go [] at_tvs
where
go _ [] = return ()
go acc (tv:tvs) = case lookupTyVar subst tv of
Nothing -> go acc tvs
Just ty | Just tv' <- tcGetTyVar_maybe ty
, tv' `notElem` acc
-> go (tv' : acc) tvs
_other -> addErrTc (dupTyVar tv)
-}
badATErr :: Name -> Name -> SDoc
badATErr clas op
= hsep [text "Class", quotes (ppr clas),
text "does not have an associated type", quotes (ppr op)]
wrongATArgErr :: TyCon -> [Type] -> [Type] -> SDoc
wrongATArgErr fam_tc expected_args actual_args
= vcat [ text "Type indexes must match class instance head"
, text "Expected:" <+> ppr (mkTyConApp fam_tc expected_args)
, text "Actual: " <+> ppr (mkTyConApp fam_tc actual_args) ]
{-
************************************************************************
* *
Checking type instance well-formedness and termination
* *
************************************************************************
-}
checkValidCoAxiom :: CoAxiom Branched -> TcM ()
checkValidCoAxiom ax@(CoAxiom { co_ax_tc = fam_tc, co_ax_branches = branches })
= do { mapM_ (checkValidCoAxBranch Nothing fam_tc) branch_list
; foldlM_ check_branch_compat [] branch_list }
where
branch_list = fromBranches branches
injectivity = familyTyConInjectivityInfo fam_tc
check_branch_compat :: [CoAxBranch] -- previous branches in reverse order
-> CoAxBranch -- current branch
-> TcM [CoAxBranch]-- current branch : previous branches
-- Check for
-- (a) this branch is dominated by previous ones
-- (b) failure of injectivity
check_branch_compat prev_branches cur_branch
| cur_branch `isDominatedBy` prev_branches
= do { addWarnAt NoReason (coAxBranchSpan cur_branch) $
inaccessibleCoAxBranch ax cur_branch
; return prev_branches }
| otherwise
= do { check_injectivity prev_branches cur_branch
; return (cur_branch : prev_branches) }
-- Injectivity check: check whether a new (CoAxBranch) can extend
-- already checked equations without violating injectivity
-- annotation supplied by the user.
-- See Note [Verifying injectivity annotation] in FamInstEnv
check_injectivity prev_branches cur_branch
| Injective inj <- injectivity
= do { let conflicts =
fst $ foldl (gather_conflicts inj prev_branches cur_branch)
([], 0) prev_branches
; mapM_ (\(err, span) -> setSrcSpan span $ addErr err)
(makeInjectivityErrors ax cur_branch inj conflicts) }
| otherwise
= return ()
gather_conflicts inj prev_branches cur_branch (acc, n) branch
-- n is 0-based index of branch in prev_branches
= case injectiveBranches inj cur_branch branch of
InjectivityUnified ax1 ax2
| ax1 `isDominatedBy` (replace_br prev_branches n ax2)
-> (acc, n + 1)
| otherwise
-> (branch : acc, n + 1)
InjectivityAccepted -> (acc, n + 1)
-- Replace n-th element in the list. Assumes 0-based indexing.
replace_br :: [CoAxBranch] -> Int -> CoAxBranch -> [CoAxBranch]
replace_br brs n br = take n brs ++ [br] ++ drop (n+1) brs
-- Check that a "type instance" is well-formed (which includes decidability
-- unless -XUndecidableInstances is given).
--
checkValidCoAxBranch :: Maybe ClsInfo
-> TyCon -> CoAxBranch -> TcM ()
checkValidCoAxBranch mb_clsinfo fam_tc
(CoAxBranch { cab_tvs = tvs, cab_cvs = cvs
, cab_lhs = typats
, cab_rhs = rhs, cab_loc = loc })
= checkValidTyFamEqn mb_clsinfo fam_tc tvs cvs typats rhs loc
-- | Do validity checks on a type family equation, including consistency
-- with any enclosing class instance head, termination, and lack of
-- polytypes.
checkValidTyFamEqn :: Maybe ClsInfo
-> TyCon -- ^ of the type family
-> [TyVar] -- ^ bound tyvars in the equation
-> [CoVar] -- ^ bound covars in the equation
-> [Type] -- ^ type patterns
-> Type -- ^ rhs
-> SrcSpan
-> TcM ()
checkValidTyFamEqn mb_clsinfo fam_tc tvs cvs typats rhs loc
= setSrcSpan loc $
do { checkValidFamPats mb_clsinfo fam_tc tvs cvs typats
-- The argument patterns, and RHS, are all boxed tau types
-- E.g Reject type family F (a :: k1) :: k2
-- type instance F (forall a. a->a) = ...
-- type instance F Int# = ...
-- type instance F Int = forall a. a->a
-- type instance F Int = Int#
-- See Trac #9357
; checkValidMonoType rhs
; check_lifted rhs
-- We have a decidable instance unless otherwise permitted
; undecidable_ok <- xoptM LangExt.UndecidableInstances
; unless undecidable_ok $
mapM_ addErrTc (checkFamInstRhs typats (tcTyFamInsts rhs)) }
-- Make sure that each type family application is
-- (1) strictly smaller than the lhs,
-- (2) mentions no type variable more often than the lhs, and
-- (3) does not contain any further type family instances.
--
checkFamInstRhs :: [Type] -- lhs
-> [(TyCon, [Type])] -- type family instances
-> [MsgDoc]
checkFamInstRhs lhsTys famInsts
= mapMaybe check famInsts
where
size = sizeTypes lhsTys
fvs = fvTypes lhsTys
check (tc, tys)
| not (all isTyFamFree tys) = Just (nestedMsg what)
| not (null bad_tvs) = Just (noMoreMsg bad_tvs what)
| size <= sizeTypes tys = Just (smallerMsg what)
| otherwise = Nothing
where
what = text "type family application" <+> quotes (pprType (TyConApp tc tys))
bad_tvs = fvTypes tys \\ fvs
checkValidFamPats :: Maybe ClsInfo -> TyCon -> [TyVar] -> [CoVar] -> [Type] -> TcM ()
-- Patterns in a 'type instance' or 'data instance' decl should
-- a) contain no type family applications
-- (vanilla synonyms are fine, though)
-- b) properly bind all their free type variables
-- e.g. we disallow (Trac #7536)
-- type T a = Int
-- type instance F (T a) = a
-- c) Have the right number of patterns
-- d) For associated types, are consistently instantiated
checkValidFamPats mb_clsinfo fam_tc tvs cvs ty_pats
= do { -- A family instance must have exactly the same number of type
-- parameters as the family declaration. You can't write
-- type family F a :: * -> *
-- type instance F Int y = y
-- because then the type (F Int) would be like (\y.y)
checkTc (length ty_pats == fam_arity) $
wrongNumberOfParmsErr (fam_arity - count isInvisibleBinder fam_bndrs)
-- report only explicit arguments
; mapM_ checkValidTypePat ty_pats
; let unbound_tcvs = filterOut (`elemVarSet` exactTyCoVarsOfTypes ty_pats) (tvs ++ cvs)
; checkTc (null unbound_tcvs) (famPatErr fam_tc unbound_tcvs ty_pats)
-- Check that type patterns match the class instance head
; checkConsistentFamInst mb_clsinfo fam_tc tvs ty_pats }
where
fam_arity = tyConArity fam_tc
fam_bndrs = take fam_arity $ fst $ splitPiTys (tyConKind fam_tc)
checkValidTypePat :: Type -> TcM ()
-- Used for type patterns in class instances,
-- and in type/data family instances
checkValidTypePat pat_ty
= do { -- Check that pat_ty is a monotype
checkValidMonoType pat_ty
-- One could imagine generalising to allow
-- instance C (forall a. a->a)
-- but we don't know what all the consequences might be
-- Ensure that no type family instances occur a type pattern
; checkTc (isTyFamFree pat_ty) $
tyFamInstIllegalErr pat_ty
; check_lifted pat_ty }
isTyFamFree :: Type -> Bool
-- ^ Check that a type does not contain any type family applications.
isTyFamFree = null . tcTyFamInsts
-- Error messages
wrongNumberOfParmsErr :: Arity -> SDoc
wrongNumberOfParmsErr exp_arity
= text "Number of parameters must match family declaration; expected"
<+> ppr exp_arity
inaccessibleCoAxBranch :: CoAxiom br -> CoAxBranch -> SDoc
inaccessibleCoAxBranch fi_ax cur_branch
= text "Type family instance equation is overlapped:" $$
nest 2 (pprCoAxBranch fi_ax cur_branch)
tyFamInstIllegalErr :: Type -> SDoc
tyFamInstIllegalErr ty
= hang (text "Illegal type synonym family application in instance" <>
colon) 2 $
ppr ty
nestedMsg :: SDoc -> SDoc
nestedMsg what
= sep [ text "Illegal nested" <+> what
, parens undecidableMsg ]
famPatErr :: TyCon -> [TyVar] -> [Type] -> SDoc
famPatErr fam_tc tvs pats
= hang (text "Family instance purports to bind type variable" <> plural tvs
<+> pprQuotedList tvs)
2 (hang (text "but the real LHS (expanding synonyms) is:")
2 (pprTypeApp fam_tc (map expandTypeSynonyms pats) <+>
text "= ..."))
{-
************************************************************************
* *
Telescope checking
* *
************************************************************************
Note [Bad telescopes]
~~~~~~~~~~~~~~~~~~~~~
Now that we can mix type and kind variables, there are an awful lot of
ways to shoot yourself in the foot. Here are some.
data SameKind :: k -> k -> * -- just to force unification
1. data T1 a k (b :: k) (x :: SameKind a b)
The problem here is that we discover that a and b should have the same
kind. But this kind mentions k, which is bound *after* a.
(Testcase: dependent/should_fail/BadTelescope)
2. data T2 a (c :: Proxy b) (d :: Proxy a) (x :: SameKind b d)
Note that b is not bound. Yet its kind mentions a. Because we have
a nice rule that all implicitly bound variables come before others,
this is bogus. (We could probably figure out to put b between a and c.
But I think this is doing users a disservice, in the long run.)
(Testcase: dependent/should_fail/BadTelescope4)
3. t3 :: forall a. (forall k (b :: k). SameKind a b) -> ()
This is a straightforward skolem escape. Note that a and b need to have
the same kind.
(Testcase: polykinds/T11142)
How do we deal with all of this? For TyCons, we have checkValidTyConTyVars.
That function looks to see if any of the tyConTyVars are repeated, but
it's really a telescope check. It works because all tycons are kind-generalized.
If there is a bad telescope, the kind-generalization will end up generalizing
over a variable bound later in the telescope.
For non-tycons, we do scope checking when we bring tyvars into scope,
in tcImplicitTKBndrs and tcExplicitTKBndrs. Note that we also have to
sort implicit binders into a well-scoped order whenever we have implicit
binders to worry about. This is done in quantifyTyVars and in
tcImplicitTKBndrs.
-}
-- | Check a list of binders to see if they make a valid telescope.
-- The key property we're checking for is scoping. For example:
-- > data SameKind :: k -> k -> *
-- > data X a k (b :: k) (c :: SameKind a b)
-- Kind inference says that a's kind should be k. But that's impossible,
-- because k isn't in scope when a is bound. This check has to come before
-- general validity checking, because once we kind-generalise, this sort
-- of problem is harder to spot (as we'll generalise over the unbound
-- k in a's type.) See also Note [Bad telescopes].
checkValidTelescope :: SDoc -- the original user-written telescope
-> [TyVar] -- explicit vars (not necessarily zonked)
-> SDoc -- note to put at bottom of message
-> TcM ()
checkValidTelescope hs_tvs orig_tvs extra
= discardResult $ checkZonkValidTelescope hs_tvs orig_tvs extra
-- | Like 'checkZonkValidTelescope', but returns the zonked tyvars
checkZonkValidTelescope :: SDoc
-> [TyVar]
-> SDoc
-> TcM [TyVar]
checkZonkValidTelescope hs_tvs orig_tvs extra
= do { orig_tvs <- mapM zonkTyCoVarKind orig_tvs
; let (_, sorted_tidied_tvs) = tidyTyCoVarBndrs emptyTidyEnv $
toposortTyVars orig_tvs
; unless (go [] emptyVarSet orig_tvs) $
addErr $
vcat [ hang (text "These kind and type variables:" <+> hs_tvs $$
text "are out of dependency order. Perhaps try this ordering:")
2 (sep (map pprTvBndr sorted_tidied_tvs))
, extra ]
; return orig_tvs }
where
go :: [TyVar] -- misplaced variables
-> TyVarSet -> [TyVar] -> Bool
go errs in_scope [] = null (filter (`elemVarSet` in_scope) errs)
-- report an error only when the variable in the kind is brought
-- into scope later in the telescope. Otherwise, we'll just quantify
-- over it in kindGeneralize, as we should.
go errs in_scope (tv:tvs)
= let bad_tvs = filterOut (`elemVarSet` in_scope) $
tyCoVarsOfTypeList (tyVarKind tv)
in go (bad_tvs ++ errs) (in_scope `extendVarSet` tv) tvs
-- | After inferring kinds of type variables, check to make sure that the
-- inferred kinds any of the type variables bound in a smaller scope.
-- This is a skolem escape check. See also Note [Bad telescopes].
checkValidInferredKinds :: [TyVar] -- ^ vars to check (zonked)
-> TyVarSet -- ^ vars out of scope
-> SDoc -- ^ suffix to error message
-> TcM ()
checkValidInferredKinds orig_kvs out_of_scope extra
= do { let bad_pairs = [ (tv, kv)
| kv <- orig_kvs
, Just tv <- map (lookupVarSet out_of_scope)
(tyCoVarsOfTypeList (tyVarKind kv)) ]
report (tidyTyVarOcc env -> tv, tidyTyVarOcc env -> kv)
= addErr $
text "The kind of variable" <+>
quotes (ppr kv) <> text ", namely" <+>
quotes (ppr (tyVarKind kv)) <> comma $$
text "depends on variable" <+>
quotes (ppr tv) <+> text "from an inner scope" $$
text "Perhaps bind" <+> quotes (ppr kv) <+>
text "sometime after binding" <+>
quotes (ppr tv) $$
extra
; mapM_ report bad_pairs }
where
(env1, _) = tidyTyCoVarBndrs emptyTidyEnv orig_kvs
(env, _) = tidyTyCoVarBndrs env1 (varSetElems out_of_scope)
{-
************************************************************************
* *
\subsection{Auxiliary functions}
* *
************************************************************************
-}
-- Free variables of a type, retaining repetitions, and expanding synonyms
fvType :: Type -> [TyCoVar]
fvType ty | Just exp_ty <- coreView ty = fvType exp_ty
fvType (TyVarTy tv) = [tv]
fvType (TyConApp _ tys) = fvTypes tys
fvType (LitTy {}) = []
fvType (AppTy fun arg) = fvType fun ++ fvType arg
fvType (ForAllTy bndr ty)
= fvType (binderType bndr) ++
caseBinder bndr (\tv -> filter (/= tv)) (const id) (fvType ty)
fvType (CastTy ty co) = fvType ty ++ fvCo co
fvType (CoercionTy co) = fvCo co
fvTypes :: [Type] -> [TyVar]
fvTypes tys = concat (map fvType tys)
fvCo :: Coercion -> [TyCoVar]
fvCo (Refl _ ty) = fvType ty
fvCo (TyConAppCo _ _ args) = concatMap fvCo args
fvCo (AppCo co arg) = fvCo co ++ fvCo arg
fvCo (ForAllCo tv h co) = filter (/= tv) (fvCo co) ++ fvCo h
fvCo (CoVarCo v) = [v]
fvCo (AxiomInstCo _ _ args) = concatMap fvCo args
fvCo (UnivCo p _ t1 t2) = fvProv p ++ fvType t1 ++ fvType t2
fvCo (SymCo co) = fvCo co
fvCo (TransCo co1 co2) = fvCo co1 ++ fvCo co2
fvCo (NthCo _ co) = fvCo co
fvCo (LRCo _ co) = fvCo co
fvCo (InstCo co arg) = fvCo co ++ fvCo arg
fvCo (CoherenceCo co1 co2) = fvCo co1 ++ fvCo co2
fvCo (KindCo co) = fvCo co
fvCo (SubCo co) = fvCo co
fvCo (AxiomRuleCo _ cs) = concatMap fvCo cs
fvProv :: UnivCoProvenance -> [TyCoVar]
fvProv UnsafeCoerceProv = []
fvProv (PhantomProv co) = fvCo co
fvProv (ProofIrrelProv co) = fvCo co
fvProv (PluginProv _) = []
fvProv (HoleProv h) = pprPanic "fvProv falls into a hole" (ppr h)
sizeType :: Type -> Int
-- Size of a type: the number of variables and constructors
sizeType ty | Just exp_ty <- coreView ty = sizeType exp_ty
sizeType (TyVarTy {}) = 1
sizeType (TyConApp _ tys) = sizeTypes tys + 1
sizeType (LitTy {}) = 1
sizeType (AppTy fun arg) = sizeType fun + sizeType arg
sizeType (ForAllTy (Anon arg) res)
= sizeType arg + sizeType res + 1
sizeType (ForAllTy (Named {}) ty)
= sizeType ty
sizeType (CastTy ty _) = sizeType ty
sizeType (CoercionTy _) = 1
sizeTypes :: [Type] -> Int
sizeTypes = sum . map sizeType
-- Size of a predicate
--
-- We are considering whether class constraints terminate.
-- Equality constraints and constraints for the implicit
-- parameter class always termiante so it is safe to say "size 0".
-- (Implicit parameter constraints always terminate because
-- there are no instances for them---they are only solved by
-- "local instances" in expressions).
-- See Trac #4200.
sizePred :: PredType -> Int
sizePred ty = goClass ty
where
goClass p = go (classifyPredType p)
go (ClassPred cls tys')
| isTerminatingClass cls = 0
| otherwise = sizeTypes (filterOutInvisibleTypes (classTyCon cls) tys')
go (EqPred {}) = 0
go (IrredPred ty) = sizeType ty
-- | When this says "True", ignore this class constraint during
-- a termination check
isTerminatingClass :: Class -> Bool
isTerminatingClass cls
= isIPClass cls
|| cls `hasKey` typeableClassKey
|| cls `hasKey` coercibleTyConKey
|| cls `hasKey` eqTyConKey
|| cls `hasKey` heqTyConKey
-- | Tidy before printing a type
ppr_tidy :: TidyEnv -> Type -> SDoc
ppr_tidy env ty = pprType (tidyType env ty)
| GaloisInc/halvm-ghc | compiler/typecheck/TcValidity.hs | bsd-3-clause | 76,553 | 4 | 24 | 21,428 | 11,589 | 5,938 | 5,651 | 853 | 15 |
module YOLP.Base where
import Network.HTTP.Conduit (Request)
class Requestable a where
toRequest :: a -> Request
data Output = XmlOut | JsonOut
instance Show Output where
show XmlOut = "xml"
show JsonOut = "json"
data YOLPError = GeocodeError | WeatherError deriving (Show)-- FIXME
| lesguillemets/rainfall-vim-hs | src/YOLP/Base.hs | bsd-3-clause | 298 | 0 | 7 | 58 | 87 | 49 | 38 | 9 | 0 |
module Media.MpegTs.Pes (
) where
import Data.Bits
{-
PES Stream ID Values
-}
data StreamID = PROGRAM_STREAM_MAP
| PRIVATE_STREAM_1
| PADDING_STREAM
| PRIVATE_STREAM_2
| AUDIO_STREAM
| VIDEO_STREAM
| ECM_STREAM
| EMM_STREAM
| ISO_IEC_13818_6_DSMCC_STREAM
| ISO_IEC_13522_STREAM
| ITU_REC_H222_1_TYPE_A
| ITU_REC_H222_1_TYPE_B
| ITU_REC_H222_1_TYPE_C
| ITU_REC_H222_1_TYPE_D
| ITU_REC_H222_1_TYPE_E
| ANCILLARY_STREAM
| ISO_IEC_14496_1_SL_PACKETIZED_STREAM
| ISO_IEC_14496_1_FLEXMUX_STREAM
| METADATA_STREAM
| EXTENDED_STREAM_ID
| RESERVED_DATA_STREAM
| PROGRAM_STREAM_DIRECTORY
| INVALID
deriving (Show, Eq, Ord, Bounded)
{-
PES Stream Id Enum Conversions
-}
instance Enum StreamID where
toEnum 0xBC = PROGRAM_STREAM_MAP
toEnum 0xBD = PRIVATE_STREAM_1
toEnum 0xBE = PADDING_STREAM
toEnum 0xBF = PRIVATE_STREAM_2
toEnum 0xF0 = ECM_STREAM
toEnum 0xF1 = EMM_STREAM
toEnum 0xF2 = ISO_IEC_13818_6_DSMCC_STREAM
toEnum 0xF3 = ISO_IEC_13522_STREAM
toEnum 0xF4 = ITU_REC_H222_1_TYPE_A
toEnum 0xF5 = ITU_REC_H222_1_TYPE_B
toEnum 0xF6 = ITU_REC_H222_1_TYPE_C
toEnum 0xF7 = ITU_REC_H222_1_TYPE_D
toEnum 0xF8 = ITU_REC_H222_1_TYPE_E
toEnum 0xF9 = ANCILLARY_STREAM
toEnum 0xFA = ISO_IEC_14496_1_SL_PACKETIZED_STREAM
toEnum 0xFB = ISO_IEC_14496_1_FLEXMUX_STREAM
toEnum 0xFC = METADATA_STREAM
toEnum 0xFD = EXTENDED_STREAM_ID
toEnum 0xFE = RESERVED_DATA_STREAM
toEnum 0xFF = PROGRAM_STREAM_DIRECTORY
toEnum id
| audio = AUDIO_STREAM
| video = VIDEO_STREAM
| otherwise = INVALID
where
audio = (id .&. 0xE0) == 0xC0
video = (id .&. 0xF0) == 0xE0
fromEnum PROGRAM_STREAM_MAP = 0xBC
fromEnum PRIVATE_STREAM_1 = 0xBD
fromEnum PADDING_STREAM = 0xBE
fromEnum PRIVATE_STREAM_2 = 0xBF
fromEnum AUDIO_STREAM = 0xC0
fromEnum VIDEO_STREAM = 0xE0
fromEnum ECM_STREAM = 0xF0
fromEnum EMM_STREAM = 0xF1
fromEnum ISO_IEC_13818_6_DSMCC_STREAM = 0xF2
fromEnum ISO_IEC_13522_STREAM = 0xF3
fromEnum ITU_REC_H222_1_TYPE_A = 0xF4
fromEnum ITU_REC_H222_1_TYPE_B = 0xF5
fromEnum ITU_REC_H222_1_TYPE_C = 0xF6
fromEnum ITU_REC_H222_1_TYPE_D = 0xF7
fromEnum ITU_REC_H222_1_TYPE_E = 0xF8
fromEnum ANCILLARY_STREAM = 0xF9
fromEnum ISO_IEC_14496_1_SL_PACKETIZED_STREAM = 0xFA
fromEnum ISO_IEC_14496_1_FLEXMUX_STREAM = 0xFB
fromEnum METADATA_STREAM = 0xFC
fromEnum EXTENDED_STREAM_ID = 0xFD
fromEnum RESERVED_DATA_STREAM = 0xFE
fromEnum PROGRAM_STREAM_DIRECTORY = 0xFF
{-
Parse the stream number for Audio and Video stream id's
-}
streamNumber :: Int -> Maybe (Int, StreamID)
streamNumber id =
case (toEnum id) of
VIDEO_STREAM -> Just (id .&. 0x0F, VIDEO_STREAM)
AUDIO_STREAM -> Just (id .&. 0x1F, AUDIO_STREAM)
_ -> Nothing
| kevinkirkup/mpegts-hs | src/Media/MpegTs/Pes.hs | bsd-3-clause | 3,507 | 0 | 10 | 1,246 | 600 | 318 | 282 | 81 | 3 |
module Util.User (
newToken
) where
import ClassyPrelude.Yesod
import Data.UUID.V4 (nextRandom)
import Data.UUID (toString)
newToken :: IO Text
newToken = pack . toString <$> nextRandom
| vinnymac/glot-www | Util/User.hs | mit | 192 | 0 | 6 | 31 | 58 | 34 | 24 | 7 | 1 |
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
module HyLoRes.Core.Worker (
Worker,
Serial, runSerial,
SMP, runSMP,
--
onClauseSet, onClauseSet_, fromClauseSet, onClausesIndex_,
cycleCount, incCycleCount,
param, params,
getDirective, unsatDetected, postProcessNew
)
where
import HyLoRes.Logger ( MonadLogger(..) )
import HyLoRes.Statistics ( MonadStatistics(..) )
import HyLoRes.ClauseSet ( ClauseSet )
import HyLoRes.Config ( Params (..) )
import HyLoRes.Clause.SelFunction ( SelFunc )
import HyLoRes.Statistics ( StatsValues )
import HyLoRes.Core.Worker.Base ( CycleCount, Directive )
import qualified HyLoRes.Core.Worker.Serial as Serial
import qualified HyLoRes.Core.SMP.Worker as SMP
import HyLoRes.Subsumption.CASSubsumptionTrie ( CASSubsumptionTrie )
import HyLoRes.Subsumption.ClausesByFormulaIndex
import Data.IORef
import HyLoRes.Util.Timeout ( TimeoutSignal )
import Control.Concurrent.MVar
data Serial
data SMP
data Witness t where
Serial :: Witness Serial
SMP :: Witness SMP
data Implementation t a where
I1 :: Serial.Worker a -> Implementation Serial a
I2 :: SMP.Worker a -> Implementation SMP a
type Worker_ t a = Witness t -> Implementation t a
-- wrap a Worker_ in a newtype to make it an instance of Monad
newtype Worker t a = Wrap (Worker_ t a)
instance Monad (Worker t) where
{-# INLINE return #-}
return a = Wrap $ pick (return a) (return a)
{-# INLINE fail #-}
fail s = Wrap $ pick (fail s) (fail s)
{-# INLINE (>>=) #-}
m >>= f = Wrap $ bind m f
{-# INLINE (>>) #-}
m >> m' = m >>= \_ -> m'
{-# INLINE bind #-}
bind :: forall a b t . Worker t a -> (a -> Worker t b) -> Worker_ t b
bind (Wrap worker) f = \w ->
case w of
Serial -> case worker Serial of
I1 m -> I1 (do a <- m
case f a of
Wrap worker' -> case worker' Serial of
I1 m' -> m'
:: Serial.Worker b)
SMP -> case worker SMP of
I2 m -> I2 (do a <- m
case f a of
Wrap worker' -> case worker' SMP of
I2 m' -> m'
:: SMP.Worker b)
{-# INLINE pick #-}
pick :: Serial.Worker a -> SMP.Worker a -> Worker_ t a
pick serial smp = \w -> case w of
Serial -> I1 serial
SMP -> I2 smp
instance MonadLogger (Worker t) where
mustLogEvent e = Wrap $ pick (mustLogEvent e) (mustLogEvent e)
performLog s = Wrap $ pick (performLog s) (performLog s)
instance MonadStatistics (Worker t) where
getStatsValues = Wrap $ pick getStatsValues getStatsValues
performIO a = Wrap $ pick (performIO a) (performIO a)
onClauseSet_ :: (ClauseSet -> ClauseSet) -> Worker t ()
onClauseSet_ f = Wrap $ pick (Serial.onClauseSet_ f) (SMP.onClauseSet_ f)
onClauseSet :: (ClauseSet -> (ClauseSet, a)) -> Worker t a
onClauseSet f = Wrap $ pick (Serial.onClauseSet f) (SMP.onClauseSet f)
fromClauseSet :: (ClauseSet -> a) -> Worker t a
fromClauseSet f = Wrap $ pick (Serial.fromClauseSet f) (SMP.fromClauseSet f)
onClausesIndex_ :: (ClausesByFormulaIndex -> ClausesByFormulaIndex) -> Worker t ()
onClausesIndex_ f = Wrap $ pick (Serial.onClausesIndex_ f) (SMP.onClausesIndex_ f)
cycleCount :: Worker t CycleCount
cycleCount = Wrap $ pick Serial.cycleCount SMP.cycleCount
incCycleCount :: Worker t ()
incCycleCount = Wrap $ pick Serial.incCycleCount SMP.incCycleCount
params :: Worker t Params
params = Wrap $ pick Serial.params SMP.params
param :: (Params -> a) -> Worker t a
param f = Wrap $ pick (Serial.param f) (SMP.param f)
getDirective :: Worker t Directive
getDirective = Wrap $ pick Serial.getDirective SMP.getDirective
unsatDetected :: Worker t ()
unsatDetected = Wrap $ pick (return ()) SMP.unsatDetected
postProcessNew :: Worker t ()
postProcessNew = Wrap $ pick Serial.postProcessNew SMP.postProcessNew
runSerial :: Worker Serial a
-> Params
-> SelFunc
-> StatsValues
-> TimeoutSignal
-> IO a
runSerial (Wrap worker) = case worker Serial of
I1 m -> Serial.runWorker m
runSMP :: Worker SMP a
-> SMP.WorkerChans
-> SMP.WorkerId
-> Params
-> StatsValues
-> IORef CASSubsumptionTrie
-> MVar ClausesByFormulaIndex
-> IO a
runSMP (Wrap worker) = case worker SMP of
I2 m -> SMP.runWorker m
| nevrenato/HyLoRes_Source | src/HyLoRes/Core/Worker.hs | gpl-2.0 | 4,679 | 4 | 22 | 1,377 | 1,467 | 763 | 704 | -1 | -1 |
{-# LANGUAGE CPP #-}
-- |
-- Module : Network.AWS.Compat.Time
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
module Network.AWS.Compat.Time
( parseTime
) where
#if MIN_VERSION_time(1,5,0)
import Data.Time.Format (ParseTime, TimeLocale, parseTimeM)
parseTime :: ParseTime a => TimeLocale -> String -> String -> Maybe a
parseTime = parseTimeM True
#else
import Data.Time.Format (parseTime)
#endif
| fmapfmapfmap/amazonka | core/src/Network/AWS/Compat/Time.hs | mpl-2.0 | 610 | 0 | 9 | 122 | 77 | 49 | 28 | 4 | 0 |
{-
(c) The University of Glasgow, 1994-2006
Core pass to saturate constructors and PrimOps
-}
{-# LANGUAGE BangPatterns, CPP, MultiWayIf #-}
module Eta.Core.CorePrep (
corePrepPgm, corePrepExpr, cvtLitInteger,
lookupMkIntegerName, lookupIntegerSDataConName
) where
#include "HsVersions.h"
import Eta.SimplCore.OccurAnal
import Eta.Main.HscTypes
import Eta.Prelude.PrelNames
import Eta.BasicTypes.MkId ( realWorldPrimId )
import Eta.Core.CoreUtils
import Eta.Core.CoreArity
import Eta.Core.CoreFVs
import Eta.SimplCore.CoreMonad ( CoreToDo(..) )
import Eta.Core.CoreLint ( endPassIO )
import Eta.Core.CoreSyn
import Eta.Core.CoreSubst
import Eta.Core.MkCore hiding( FloatBind(..) ) -- We use our own FloatBind here
import Eta.Types.Type
import Eta.BasicTypes.Literal
import Eta.Types.Coercion
import Eta.TypeCheck.TcEnv
import Eta.TypeCheck.TcRnMonad
import Eta.Types.TyCon
import Eta.BasicTypes.Demand
import Eta.BasicTypes.Var
import Eta.BasicTypes.VarSet
import Eta.BasicTypes.VarEnv
import Eta.BasicTypes.Id
import Eta.BasicTypes.IdInfo
import Eta.Prelude.TysWiredIn
import Eta.BasicTypes.DataCon
import Eta.Prelude.PrimOp
import Eta.BasicTypes.BasicTypes
import Eta.BasicTypes.Module
import Eta.BasicTypes.UniqSupply
import Eta.Utils.Maybes
import Eta.Utils.OrdList
import Eta.Main.ErrUtils
import Eta.Main.DynFlags
import Eta.Utils.Util
import Eta.Utils.Pair
import Eta.Utils.Outputable
import Eta.Utils.Platform
import Eta.Utils.FastString
import Eta.BasicTypes.Name ( NamedThing(..), nameSrcSpan )
import Eta.BasicTypes.SrcLoc ( SrcSpan(..), realSrcLocSpan, mkRealSrcLoc )
import Data.Bits
import Data.List ( mapAccumL )
import Control.Monad
{-
-- ---------------------------------------------------------------------------
-- Overview
-- ---------------------------------------------------------------------------
The goal of this pass is to prepare for code generation.
1. Saturate constructor and primop applications.
2. Convert to A-normal form; that is, function arguments
are always variables.
* Use case for strict arguments:
f E ==> case E of x -> f x
(where f is strict)
* Use let for non-trivial lazy arguments
f E ==> let x = E in f x
(were f is lazy and x is non-trivial)
3. Similarly, convert any unboxed lets into cases.
[I'm experimenting with leaving 'ok-for-speculation'
rhss in let-form right up to this point.]
4. Ensure that *value* lambdas only occur as the RHS of a binding
(The code generator can't deal with anything else.)
Type lambdas are ok, however, because the code gen discards them.
5. [Not any more; nuked Jun 2002] Do the seq/par munging.
6. Clone all local Ids.
This means that all such Ids are unique, rather than the
weaker guarantee of no clashes which the simplifier provides.
And that is what the code generator needs.
We don't clone TyVars or CoVars. The code gen doesn't need that,
and doing so would be tiresome because then we'd need
to substitute in types and coercions.
7. Give each dynamic CCall occurrence a fresh unique; this is
rather like the cloning step above.
8. Inject bindings for the "implicit" Ids:
* Constructor wrappers
* Constructor workers
We want curried definitions for all of these in case they
aren't inlined by some caller.
9. Replace (lazy e) by e. See Note [lazyId magic] in MkId.hs
Also replace (noinline e) by e.
10. Convert (LitInteger i t) into the core representation
for the Integer i. Normally this uses mkInteger, but if
we are using the integer-gmp implementation then there is a
special case where we use the S# constructor for Integers that
are in the range of Int.
11. Uphold tick consistency while doing this: We move ticks out of
(non-type) applications where we can, and make sure that we
annotate according to scoping rules when floating.
This is all done modulo type applications and abstractions, so that
when type erasure is done for conversion to STG, we don't end up with
any trivial or useless bindings.
Note [CorePrep invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Here is the syntax of the Core produced by CorePrep:
Trivial expressions
arg ::= lit | var
| arg ty | /\a. arg
| truv co | /\c. arg | arg |> co
Applications
app ::= lit | var | app arg | app ty | app co | app |> co
Expressions
body ::= app
| let(rec) x = rhs in body -- Boxed only
| case body of pat -> body
| /\a. body | /\c. body
| body |> co
Right hand sides (only place where value lambdas can occur)
rhs ::= /\a.rhs | \x.rhs | body
We define a synonym for each of these non-terminals. Functions
with the corresponding name produce a result in that syntax.
-}
type CpeArg = CoreExpr -- Non-terminal 'arg'
type CpeApp = CoreExpr -- Non-terminal 'app'
type CpeBody = CoreExpr -- Non-terminal 'body'
type CpeRhs = CoreExpr -- Non-terminal 'rhs'
{-
************************************************************************
* *
Top level stuff
* *
************************************************************************
-}
corePrepPgm :: HscEnv -> Module -> ModLocation -> CoreProgram -> [TyCon]
-> IO CoreProgram
corePrepPgm hsc_env _this_mod mod_loc binds data_tycons = do
let dflags = hsc_dflags hsc_env
us <- mkSplitUniqSupply 's'
initialCorePrepEnv <- mkInitialCorePrepEnv dflags hsc_env
let implicit_binds = mkDataConWorkers dflags mod_loc data_tycons
-- NB: we must feed mkImplicitBinds through corePrep too
-- so that they are suitably cloned and eta-expanded
binds_out = initUs_ us $ do
floats1 <- corePrepTopBinds initialCorePrepEnv binds
floats2 <- corePrepTopBinds initialCorePrepEnv implicit_binds
return (deFloatTop (floats1 `appendFloats` floats2))
endPassIO hsc_env alwaysQualify CorePrep binds_out []
return binds_out
corePrepExpr :: DynFlags -> HscEnv -> CoreExpr -> IO CoreExpr
corePrepExpr dflags hsc_env expr = do
us <- mkSplitUniqSupply 's'
initialCorePrepEnv <- mkInitialCorePrepEnv dflags hsc_env
let new_expr = initUs_ us (cpeBodyNF initialCorePrepEnv expr)
dumpIfSet_dyn dflags Opt_D_dump_prep "CorePrep" (ppr new_expr)
return new_expr
corePrepTopBinds :: CorePrepEnv -> [CoreBind] -> UniqSM Floats
-- Note [Floating out of top level bindings]
corePrepTopBinds initialCorePrepEnv binds
= go initialCorePrepEnv binds
where
go _ [] = return emptyFloats
go env (bind : binds) = do (env', floats, maybe_new_bind)
<- cpeBind TopLevel env bind
MASSERT(isNothing maybe_new_bind)
-- Only join points get returned this way by
-- cpeBind, and no join point may float to top
floatss <- go env' binds
return (floats `appendFloats` floatss)
mkDataConWorkers :: DynFlags -> ModLocation -> [TyCon] -> [CoreBind]
-- See Note [Data constructor workers]
-- c.f. Note [Injecting implicit bindings] in TidyPgm
mkDataConWorkers dflags mod_loc data_tycons
= [ NonRec id (tick_it (getName data_con) (Var id))
-- The ice is thin here, but it works
| tycon <- data_tycons, -- CorePrep will eta-expand it
data_con <- tyConDataCons tycon,
let id = dataConWorkId data_con
]
where
-- If we want to generate debug info, we put a source note on the
-- worker. This is useful, especially for heap profiling.
tick_it name
| debugLevel dflags == 0 = id
| RealSrcSpan span <- nameSrcSpan name = tick span
| Just file <- ml_hs_file mod_loc = tick (span1 file)
| otherwise = tick (span1 "???")
where tick span = Tick (SourceNote span $ showSDoc dflags (ppr name))
span1 file = realSrcLocSpan $ mkRealSrcLoc (mkFastString file) 1 1
{-
Note [Floating out of top level bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
NB: we do need to float out of top-level bindings
Consider x = length [True,False]
We want to get
s1 = False : []
s2 = True : s1
x = length s2
We return a *list* of bindings, because we may start with
x* = f (g y)
where x is demanded, in which case we want to finish with
a = g y
x* = f a
And then x will actually end up case-bound
Note [CafInfo and floating]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
What happens when we try to float bindings to the top level? At this
point all the CafInfo is supposed to be correct, and we must make certain
that is true of the new top-level bindings. There are two cases
to consider
a) The top-level binding is marked asCafRefs. In that case we are
basically fine. The floated bindings had better all be lazy lets,
so they can float to top level, but they'll all have HasCafRefs
(the default) which is safe.
b) The top-level binding is marked NoCafRefs. This really happens
Example. CoreTidy produces
$fApplicativeSTM [NoCafRefs] = D:Alternative retry# ...blah...
Now CorePrep has to eta-expand to
$fApplicativeSTM = let sat = \xy. retry x y
in D:Alternative sat ...blah...
So what we *want* is
sat [NoCafRefs] = \xy. retry x y
$fApplicativeSTM [NoCafRefs] = D:Alternative sat ...blah...
So, gruesomely, we must set the NoCafRefs flag on the sat bindings,
*and* substitute the modified 'sat' into the old RHS.
It should be the case that 'sat' is itself [NoCafRefs] (a value, no
cafs) else the original top-level binding would not itself have been
marked [NoCafRefs]. The DEBUG check in CoreToStg for
consistentCafInfo will find this.
This is all very gruesome and horrible. It would be better to figure
out CafInfo later, after CorePrep. We'll do that in due course.
Meanwhile this horrible hack works.
Note [Join points and floating]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Join points can float out of other join points but not out of value bindings:
let z =
let w = ... in -- can float
join k = ... in -- can't float
... jump k ...
join j x1 ... xn =
let y = ... in -- can float (but don't want to)
join h = ... in -- can float (but not much point)
... jump h ...
in ...
Here, the jump to h remains valid if h is floated outward, but the jump to k
does not.
We don't float *out* of join points. It would only be safe to float out of
nullary join points (or ones where the arguments are all either type arguments
or dead binders). Nullary join points aren't ever recursive, so they're always
effectively one-shot functions, which we don't float out of. We *could* float
join points from nullary join points, but there's no clear benefit at this
stage.
Note [Data constructor workers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create any necessary "implicit" bindings for data con workers. We
create the rather strange (non-recursive!) binding
$wC = \x y -> $wC x y
i.e. a curried constructor that allocates. This means that we can
treat the worker for a constructor like any other function in the rest
of the compiler. The point here is that CoreToStg will generate a
StgConApp for the RHS, rather than a call to the worker (which would
give a loop). As Lennart says: the ice is thin here, but it works.
Hmm. Should we create bindings for dictionary constructors? They are
always fully applied, and the bindings are just there to support
partial applications. But it's easier to let them through.
Note [Dead code in CorePrep]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Imagine that we got an input program like this (see Trac #4962):
f :: Show b => Int -> (Int, b -> Maybe Int -> Int)
f x = (g True (Just x) + g () (Just x), g)
where
g :: Show a => a -> Maybe Int -> Int
g _ Nothing = x
g y (Just z) = if z > 100 then g y (Just (z + length (show y))) else g y unknown
After specialisation and SpecConstr, we would get something like this:
f :: Show b => Int -> (Int, b -> Maybe Int -> Int)
f x = (g$Bool_True_Just x + g$Unit_Unit_Just x, g)
where
{-# RULES g $dBool = g$Bool
g $dUnit = g$Unit #-}
g = ...
{-# RULES forall x. g$Bool True (Just x) = g$Bool_True_Just x #-}
g$Bool = ...
{-# RULES forall x. g$Unit () (Just x) = g$Unit_Unit_Just x #-}
g$Unit = ...
g$Bool_True_Just = ...
g$Unit_Unit_Just = ...
Note that the g$Bool and g$Unit functions are actually dead code: they
are only kept alive by the occurrence analyser because they are
referred to by the rules of g, which is being kept alive by the fact
that it is used (unspecialised) in the returned pair.
However, at the CorePrep stage there is no way that the rules for g
will ever fire, and it really seems like a shame to produce an output
program that goes to the trouble of allocating a closure for the
unreachable g$Bool and g$Unit functions.
The way we fix this is to:
* In cloneBndr, drop all unfoldings/rules
* In deFloatTop, run a simple dead code analyser on each top-level
RHS to drop the dead local bindings. For that call to OccAnal, we
disable the binder swap, else the occurrence analyser sometimes
introduces new let bindings for cased binders, which lead to the bug
in #5433.
The reason we don't just OccAnal the whole output of CorePrep is that
the tidier ensures that all top-level binders are GlobalIds, so they
don't show up in the free variables any longer. So if you run the
occurrence analyser on the output of CoreTidy (or later) you e.g. turn
this program:
Rec {
f = ... f ...
}
Into this one:
f = ... f ...
(Since f is not considered to be free in its own RHS.)
************************************************************************
* *
The main code
* *
************************************************************************
-}
cpeBind :: TopLevelFlag -> CorePrepEnv -> CoreBind
-> UniqSM (CorePrepEnv,
Floats, -- Floating value bindings
Maybe CoreBind) -- Just bind' <=> returned new bind; no float
-- Nothing <=> added bind' to floats instead
cpeBind top_lvl env (NonRec bndr rhs)
| not (isJoinId bndr)
= do { (_, bndr1) <- cpCloneBndr env bndr
; let dmd = idDemandInfo bndr
is_unlifted = isUnLiftedType (idType bndr)
; (floats, bndr2, rhs2) <- cpePair top_lvl NonRecursive
dmd
is_unlifted
env bndr1 rhs
-- See Note [Inlining in CorePrep]
; if exprIsTrivial rhs2 && isNotTopLevel top_lvl
then return (extendCorePrepEnvExpr env bndr rhs2, floats, Nothing)
else do {
; let new_float = mkFloat dmd is_unlifted bndr2 rhs2
-- We want bndr'' in the envt, because it records
-- the evaluated-ness of the binder
; return (extendCorePrepEnv env bndr bndr2,
addFloat floats new_float,
Nothing) }}
| otherwise -- See Note [Join points and floating]
= panic "cpeBind: Join Points are not implemented yet."
-- TODO: Implement join points
-- = ASSERT(not (isTopLevel top_lvl)) -- can't have top-level join point
-- do { (_, bndr1) <- cpCloneBndr env bndr
-- ; (bndr2, rhs1) <- cpeJoinPair env bndr1 rhs
-- ; return (extendCorePrepEnv env bndr bndr2,
-- emptyFloats,
-- Just (NonRec bndr2 rhs1)) }
cpeBind top_lvl env (Rec pairs)
| not (isJoinId (head bndrs))
= do { (env', bndrs1) <- cpCloneBndrs env bndrs
; stuff <- zipWithM (cpePair top_lvl Recursive topDmd False env') bndrs1 rhss
; let (floats_s, bndrs2, rhss2) = unzip3 stuff
all_pairs = foldrOL add_float (bndrs2 `zip` rhss2)
(concatFloats floats_s)
; return (extendCorePrepEnvList env (bndrs `zip` bndrs2),
unitFloat (FloatLet (Rec all_pairs)),
Nothing) }
| otherwise -- See Note [Join points and floating]
= panic "cpeBind: (Rec) Join Points are no implemented yet."
-- TODO: Implement join points
-- = do { (env', bndrs1) <- cpCloneBndrs env bndrs
-- ; pairs1 <- zipWithM (cpeJoinPair env') bndrs1 rhss
-- ; let bndrs2 = map fst pairs1
-- ; return (extendCorePrepEnvList env' (bndrs `zip` bndrs2),
-- emptyFloats,
-- Just (Rec pairs1)) }
where
(bndrs, rhss) = unzip pairs
-- Flatten all the floats, and the current
-- group into a single giant Rec
add_float (FloatLet (NonRec b r)) prs2 = (b,r) : prs2
add_float (FloatLet (Rec prs1)) prs2 = prs1 ++ prs2
add_float b _ = pprPanic "cpeBind" (ppr b)
---------------
cpePair :: TopLevelFlag -> RecFlag -> Demand -> Bool
-> CorePrepEnv -> Id -> CoreExpr
-> UniqSM (Floats, Id, CpeRhs)
-- Used for all bindings
cpePair top_lvl is_rec dmd is_unlifted env bndr rhs
= ASSERT(not (isJoinId bndr)) -- those should use cpeJoinPair
do { (floats1, rhs1) <- cpeRhsE env rhs
-- See if we are allowed to float this stuff out of the RHS
; (floats2, rhs2) <- float_from_rhs floats1 rhs1
-- Make the arity match up
; (floats3, rhs3)
<- if manifestArity rhs1 <= arity
then return (floats2, cpeEtaExpand arity rhs2)
else WARN(True, text "CorePrep: silly extra arguments:" <+> ppr bndr)
-- Note [Silly extra arguments]
(do { v <- newVar (idType bndr)
; let float = mkFloat topDmd False v rhs2
; return ( addFloat floats2 float
, cpeEtaExpand arity (Var v)) })
-- Wrap floating ticks
; let (floats4, rhs4) = wrapTicks floats3 rhs3
-- Record if the binder is evaluated
-- and otherwise trim off the unfolding altogether
-- It's not used by the code generator; getting rid of it reduces
-- heap usage and, since we may be changing uniques, we'd have
-- to substitute to keep it right
; let bndr' | exprIsHNF rhs3 = bndr `setIdUnfolding` evaldUnfolding
| otherwise = bndr `setIdUnfolding` noUnfolding
; return (floats4, bndr', rhs4) }
where
platform = targetPlatform (cpe_dynFlags env)
arity = idArity bndr -- We must match this arity
---------------------
float_from_rhs floats rhs
| isEmptyFloats floats = return (emptyFloats, rhs)
| isTopLevel top_lvl = float_top floats rhs
| otherwise = float_nested floats rhs
---------------------
float_nested floats rhs
| wantFloatNested is_rec dmd is_unlifted floats rhs
= return (floats, rhs)
| otherwise = dontFloat floats rhs
---------------------
float_top floats rhs -- Urhgh! See Note [CafInfo and floating]
| mayHaveCafRefs (idCafInfo bndr)
, allLazyTop floats
= return (floats, rhs)
-- So the top-level binding is marked NoCafRefs
| Just (floats', rhs') <- canFloatFromNoCaf platform floats rhs
= return (floats', rhs')
| otherwise
= dontFloat floats rhs
dontFloat :: Floats -> CpeRhs -> UniqSM (Floats, CpeBody)
-- Non-empty floats, but do not want to float from rhs
-- So wrap the rhs in the floats
-- But: rhs1 might have lambdas, and we can't
-- put them inside a wrapBinds
dontFloat floats1 rhs
= do { (floats2, body) <- rhsToBody rhs
; return (emptyFloats, wrapBinds floats1 $
wrapBinds floats2 body) }
{- Note [Silly extra arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we had this
f{arity=1} = \x\y. e
We *must* match the arity on the Id, so we have to generate
f' = \x\y. e
f = \x. f' x
It's a bizarre case: why is the arity on the Id wrong? Reason
(in the days of __inline_me__):
f{arity=0} = __inline_me__ (let v = expensive in \xy. e)
When InlineMe notes go away this won't happen any more. But
it seems good for CorePrep to be robust.
-}
---------------
-- TODO: Implement join points
-- cpeJoinPair :: CorePrepEnv -> JoinId -> CoreExpr
-- -> UniqSM (JoinId, CpeRhs)
-- -- Used for all join bindings
-- cpeJoinPair env bndr rhs
-- = ASSERT(isJoinId bndr)
-- do { let Just join_arity = isJoinId_maybe bndr
-- (bndrs, body) = collectNBinders join_arity rhs
-- ; (env', bndrs') <- cpCloneBndrs env bndrs
-- ; body' <- cpeBodyNF env' body -- Will let-bind the body if it starts
-- -- with a lambda
-- ; let rhs' = mkCoreLams bndrs' body'
-- bndr' = bndr `setIdUnfolding` evaldUnfolding
-- `setIdArity` count isId bndrs
-- -- See Note [Arity and join points]
-- ; return (bndr', rhs') }
{-
Note [Arity and join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Up to now, we've allowed a join point to have an arity greater than its join
arity (minus type arguments), since this is what's useful for eta expansion.
However, for code gen purposes, its arity must be exactly the number of value
arguments it will be called with, and it must have exactly that many value
lambdas. Hence if there are extra lambdas we must let-bind the body of the RHS:
join j x y z = \w -> ... in ...
=>
join j x y z = (let f = \w -> ... in f) in ...
This is also what happens with Note [Silly extra arguments]. Note that it's okay
for us to mess with the arity because a join point is never exported.
-}
-- ---------------------------------------------------------------------------
-- CpeRhs: produces a result satisfying CpeRhs
-- ---------------------------------------------------------------------------
cpeRhsE :: CorePrepEnv -> CoreExpr -> UniqSM (Floats, CpeRhs)
-- If
-- e ===> (bs, e')
-- then
-- e = let bs in e' (semantically, that is!)
--
-- For example
-- f (g x) ===> ([v = g x], f v)
cpeRhsE _env expr@(Type {}) = return (emptyFloats, expr)
cpeRhsE _env expr@(Coercion {}) = return (emptyFloats, expr)
cpeRhsE env (Lit (LitInteger i _))
= cpeRhsE env (cvtLitInteger (cpe_dynFlags env) (getMkIntegerId env)
(cpe_integerSDataCon env) i)
cpeRhsE _env expr@(Lit {}) = return (emptyFloats, expr)
cpeRhsE env expr@(Var {}) = cpeApp env expr
cpeRhsE env expr@(App {}) = cpeApp env expr
cpeRhsE env (Let bind body)
= do { (env', bind_floats, maybe_bind') <- cpeBind NotTopLevel env bind
; (body_floats, body') <- cpeRhsE env' body
; let expr' = case maybe_bind' of Just bind' -> Let bind' body'
Nothing -> body'
; return (bind_floats `appendFloats` body_floats, expr') }
cpeRhsE env (Tick tickish expr)
| tickishPlace tickish == PlaceNonLam && tickish `tickishScopesLike` SoftScope
= do { (floats, body) <- cpeRhsE env expr
-- See [Floating Ticks in CorePrep]
; return (unitFloat (FloatTick tickish) `appendFloats` floats, body) }
| otherwise
= do { body <- cpeBodyNF env expr
; return (emptyFloats, mkTick tickish' body) }
where
tickish' | Breakpoint n fvs <- tickish
-- See also 'substTickish'
= Breakpoint n (map (getIdFromTrivialExpr . lookupCorePrepEnv env) fvs)
| otherwise
= tickish
cpeRhsE env (Cast expr co)
= do { (floats, expr') <- cpeRhsE env expr
; return (floats, Cast expr' co) }
cpeRhsE env expr@(Lam {})
= do { let (bndrs,body) = collectBinders expr
; (env', bndrs') <- cpCloneBndrs env bndrs
; body' <- cpeBodyNF env' body
; return (emptyFloats, mkLams bndrs' body') }
cpeRhsE env (Case scrut bndr ty alts)
= do { (floats, scrut') <- cpeBody env scrut
; let bndr1 = bndr `setIdUnfolding` evaldUnfolding
-- Record that the case binder is evaluated in the alternatives
; (env', bndr2) <- cpCloneBndr env bndr1
; let alts'
-- This flag is intended to aid in debugging strictness
-- analysis bugs. These are particularly nasty to chase down as
-- they may manifest as segmentation faults. When this flag is
-- enabled we instead produce an 'error' expression to catch
-- the case where a function we think should bottom
-- unexpectedly returns.
| gopt Opt_CatchBottoms (cpe_dynFlags env)
, not (altsAreExhaustive alts)
= addDefault alts (Just err)
| otherwise = alts
where err = mkRuntimeErrorApp rUNTIME_ERROR_ID ty
"Bottoming expression returned"
; alts'' <- mapM (sat_alt env') alts'
; return (floats, Case scrut' bndr2 ty alts'') }
where
sat_alt env (con, bs, rhs)
= do { (env2, bs') <- cpCloneBndrs env bs
; rhs' <- cpeBodyNF env2 rhs
; return (con, bs', rhs') }
cvtLitInteger :: DynFlags -> Id -> Maybe DataCon -> Integer -> CoreExpr
-- Here we convert a literal Integer to the low-level
-- representation. Exactly how we do this depends on the
-- library that implements Integer. If it's GMP we
-- use the S# data constructor for small literals.
-- See Note [Integer literals] in Literal
cvtLitInteger dflags _ (Just sdatacon) i
| inIntRange dflags i -- Special case for small integers
= mkConApp sdatacon [Lit (mkMachInt dflags i)]
cvtLitInteger dflags mk_integer _ i
= mkApps (Var mk_integer) [isNonNegative, ints]
where isNonNegative = if i < 0 then mkConApp falseDataCon []
else mkConApp trueDataCon []
ints = mkListExpr intTy (f (abs i))
f 0 = []
f x = let low = x .&. mask
high = x `shiftR` bits
in mkConApp intDataCon [Lit (mkMachInt dflags low)] : f high
bits = 31
mask = 2 ^ bits - 1
-- ---------------------------------------------------------------------------
-- CpeBody: produces a result satisfying CpeBody
-- ---------------------------------------------------------------------------
-- | Convert a 'CoreExpr' so it satisfies 'CpeBody', without
-- producing any floats (any generated floats are immediately
-- let-bound using 'wrapBinds'). Generally you want this, esp.
-- when you've reached a binding form (e.g., a lambda) and
-- floating any further would be incorrect.
cpeBodyNF :: CorePrepEnv -> CoreExpr -> UniqSM CpeBody
cpeBodyNF env expr
= do { (floats, body) <- cpeBody env expr
; return (wrapBinds floats body) }
-- | Convert a 'CoreExpr' so it satisfies 'CpeBody'; also produce
-- a list of 'Floats' which are being propagated upwards. In
-- fact, this function is used in only two cases: to
-- implement 'cpeBodyNF' (which is what you usually want),
-- and in the case when a let-binding is in a case scrutinee--here,
-- we can always float out:
--
-- case (let x = y in z) of ...
-- ==> let x = y in case z of ...
--
cpeBody :: CorePrepEnv -> CoreExpr -> UniqSM (Floats, CpeBody)
cpeBody env expr
= do { (floats1, rhs) <- cpeRhsE env expr
; (floats2, body) <- rhsToBody rhs
; return (floats1 `appendFloats` floats2, body) }
--------
rhsToBody :: CpeRhs -> UniqSM (Floats, CpeBody)
-- Remove top level lambdas by let-binding
rhsToBody (Tick t expr)
| tickishScoped t == NoScope -- only float out of non-scoped annotations
= do { (floats, expr') <- rhsToBody expr
; return (floats, mkTick t expr') }
rhsToBody (Cast e co)
-- You can get things like
-- case e of { p -> coerce t (\s -> ...) }
= do { (floats, e') <- rhsToBody e
; return (floats, Cast e' co) }
rhsToBody expr@(Lam {})
| Just no_lam_result <- tryEtaReducePrep bndrs body
= return (emptyFloats, no_lam_result)
| all isTyVar bndrs -- Type lambdas are ok
= return (emptyFloats, expr)
| otherwise -- Some value lambdas
= do { fn <- newVar (exprType expr)
; let rhs = cpeEtaExpand (exprArity expr) expr
float = FloatLet (NonRec fn rhs)
; return (unitFloat float, Var fn) }
where
(bndrs,body) = collectBinders expr
rhsToBody expr = return (emptyFloats, expr)
-- ---------------------------------------------------------------------------
-- CpeApp: produces a result satisfying CpeApp
-- ---------------------------------------------------------------------------
data ArgInfo = CpeApp CoreArg
| CpeCast Coercion
| CpeTick (Tickish Id)
{- Note [runRW arg]
~~~~~~~~~~~~~~~~~~~
If we got, say
runRW# (case bot of {})
which happened in Trac #11291, we do /not/ want to turn it into
(case bot of {}) realWorldPrimId#
because that gives a panic in CoreToStg.myCollectArgs, which expects
only variables in function position. But if we are sure to make
runRW# strict (which we do in MkId), this can't happen
-}
cpeApp :: CorePrepEnv -> CoreExpr -> UniqSM (Floats, CpeRhs)
-- May return a CpeRhs because of saturating primops
cpeApp top_env expr
= do { let (terminal, args, depth) = collect_args expr
; cpe_app top_env terminal args depth
}
where
-- We have a nested data structure of the form
-- e `App` a1 `App` a2 ... `App` an, convert it into
-- (e, [CpeApp a1, CpeApp a2, ..., CpeApp an], depth)
-- We use 'ArgInfo' because we may also need to
-- record casts and ticks. Depth counts the number
-- of arguments that would consume strictness information
-- (so, no type or coercion arguments.)
collect_args :: CoreExpr -> (CoreExpr, [ArgInfo], Int)
collect_args e = go e [] 0
where
go (App fun arg) as !depth
= go fun (CpeApp arg : as)
(if isTyCoArg arg then depth else depth + 1)
go (Cast fun co) as depth
= go fun (CpeCast co : as) depth
go (Tick tickish fun) as depth
| tickishPlace tickish == PlaceNonLam
&& tickish `tickishScopesLike` SoftScope
= go fun (CpeTick tickish : as) depth
go terminal as depth = (terminal, as, depth)
cpe_app :: CorePrepEnv
-> CoreExpr
-> [ArgInfo]
-> Int
-> UniqSM (Floats, CpeRhs)
cpe_app env (Var f) (CpeApp Type{} : CpeApp arg : args) depth
| f `hasKey` lazyIdKey -- Replace (lazy a) with a, and
|| f `hasKey` noinlineIdKey -- Replace (noinline a) with a
-- Consider the code:
--
-- lazy (f x) y
--
-- We need to make sure that we need to recursively collect arguments on
-- "f x", otherwise we'll float "f x" out (it's not a variable) and
-- end up with this awful -ddump-prep:
--
-- case f x of f_x {
-- __DEFAULT -> f_x y
-- }
--
-- rather than the far superior "f x y". Test case is par01.
= let (terminal, args', depth') = collect_args arg
in cpe_app env terminal (args' ++ args) (depth + depth' - 1)
cpe_app env (Var f) [CpeApp _type@Type{}, CpeApp arg] 1
| f `hasKey` runRWKey
-- Replace (runRW# f) by (f realWorld#), beta reducing if possible (this
-- is why we return a CorePrepEnv as well)
= case arg of
Lam s body -> cpe_app (extendCorePrepEnv env s realWorldPrimId) body [] 0
_ -> cpe_app env arg [CpeApp (Var realWorldPrimId)] 1
cpe_app env (Var v) args depth
= do { v1 <- fiddleCCall v
; let e2 = lookupCorePrepEnv env v1
hd = getIdFromTrivialExpr_maybe e2
-- NB: depth from collect_args is right, because e2 is a trivial expression
-- and thus its embedded Id *must* be at the same depth as any
-- Apps it is under are type applications only (c.f.
-- exprIsTrivial). But note that we need the type of the
-- expression, not the id.
; (app, floats) <- rebuild_app args e2 (exprType e2) emptyFloats stricts
; mb_saturate hd app floats depth }
where
stricts = case idStrictness v of
StrictSig (DmdType _ demands _)
| listLengthCmp demands depth /= GT -> demands
-- length demands <= depth
| otherwise -> []
-- If depth < length demands, then we have too few args to
-- satisfy strictness info so we have to ignore all the
-- strictness info, e.g. + (error "urk")
-- Here, we can't evaluate the arg strictly, because this
-- partial application might be seq'd
-- We inlined into something that's not a var and has no args.
-- Bounce it back up to cpeRhsE.
cpe_app env fun [] _ = cpeRhsE env fun
-- N-variable fun, better let-bind it
cpe_app env fun args depth
= do { (fun_floats, fun') <- cpeArg env evalDmd fun ty
-- The evalDmd says that it's sure to be evaluated,
-- so we'll end up case-binding it
; (app, floats) <- rebuild_app args fun' ty fun_floats []
; mb_saturate Nothing app floats depth }
where
ty = exprType fun
-- Saturate if necessary
mb_saturate head app floats depth =
case head of
Just fn_id -> do { sat_app <- maybeSaturate fn_id app depth
; return (floats, sat_app) }
_other -> return (floats, app)
-- Deconstruct and rebuild the application, floating any non-atomic
-- arguments to the outside. We collect the type of the expression,
-- the head of the application, and the number of actual value arguments,
-- all of which are used to possibly saturate this application if it
-- has a constructor or primop at the head.
rebuild_app
:: [ArgInfo] -- The arguments (inner to outer)
-> CpeApp
-> Type
-> Floats
-> [Demand]
-> UniqSM (CpeApp, Floats)
rebuild_app [] app _ floats ss = do
MASSERT(null ss) -- make sure we used all the strictness info
return (app, floats)
rebuild_app (a : as) fun' fun_ty floats ss = case a of
CpeApp arg@(Type arg_ty) ->
rebuild_app as (App fun' arg) (piResultTy fun_ty arg_ty) floats ss
CpeApp arg@(Coercion {}) ->
rebuild_app as (App fun' arg) (funResultTy fun_ty) floats ss
CpeApp arg -> do
let (ss1, ss_rest) -- See Note [lazyId magic] in MkId
= case (ss, isLazyExpr arg) of
(_ : ss_rest, True) -> (topDmd, ss_rest)
(ss1 : ss_rest, False) -> (ss1, ss_rest)
([], _) -> (topDmd, [])
(arg_ty, res_ty) = expectJust "cpeBody:collect_args" $
splitFunTy_maybe fun_ty
(fs, arg') <- cpeArg top_env ss1 arg arg_ty
rebuild_app as (App fun' arg') res_ty (fs `appendFloats` floats) ss_rest
CpeCast co ->
let Pair _ty1 ty2 = coercionKind co
in rebuild_app as (Cast fun' co) ty2 floats ss
CpeTick tickish ->
-- See [Floating Ticks in CorePrep]
rebuild_app as fun' fun_ty (addFloat floats (FloatTick tickish)) ss
isLazyExpr :: CoreExpr -> Bool
-- See Note [lazyId magic] in MkId
isLazyExpr (Cast e _) = isLazyExpr e
isLazyExpr (Tick _ e) = isLazyExpr e
isLazyExpr (Var f `App` _ `App` _) = f `hasKey` lazyIdKey
isLazyExpr _ = False
-- ---------------------------------------------------------------------------
-- CpeArg: produces a result satisfying CpeArg
-- ---------------------------------------------------------------------------
{-
Note [ANF-ising literal string arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider a program like,
data Foo = Foo Addr#
foo = Foo "turtle"#
When we go to ANFise this we might think that we want to float the string
literal like we do any other non-trivial argument. This would look like,
foo = u\ [] case "turtle"# of s { __DEFAULT__ -> Foo s }
However, this 1) isn't necessary since strings are in a sense "trivial"; and 2)
wreaks havoc on the CAF annotations that we produce here since we the result
above is caffy since it is updateable. Ideally at some point in the future we
would like to just float the literal to the top level as suggested in #11312,
s = "turtle"#
foo = Foo s
However, until then we simply add a special case excluding literals from the
floating done by cpeArg.
-}
-- | Is an argument okay to CPE?
okCpeArg :: CoreExpr -> Bool
-- Don't float literals. See Note [ANF-ising literal string arguments].
okCpeArg (Lit _) = False
-- Do not eta expand a trivial argument
okCpeArg expr = not (exprIsTrivial expr)
-- This is where we arrange that a non-trivial argument is let-bound
cpeArg :: CorePrepEnv -> Demand
-> CoreArg -> Type -> UniqSM (Floats, CpeArg)
cpeArg env dmd arg arg_ty
= do { (floats1, arg1) <- cpeRhsE env arg -- arg1 can be a lambda
; (floats2, arg2) <- if want_float floats1 arg1
then return (floats1, arg1)
else dontFloat floats1 arg1
-- Else case: arg1 might have lambdas, and we can't
-- put them inside a wrapBinds
; if okCpeArg arg2
then do { v <- newVar arg_ty
; let arg3 = cpeEtaExpand (exprArity arg2) arg2
arg_float = mkFloat dmd is_unlifted v arg3
; return (addFloat floats2 arg_float, varToCoreExpr v) }
else return (floats2, arg2)
}
where
is_unlifted = isUnLiftedType arg_ty
want_float = wantFloatNested NonRecursive dmd is_unlifted
{-
Note [Floating unlifted arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider C (let v* = expensive in v)
where the "*" indicates "will be demanded". Usually v will have been
inlined by now, but let's suppose it hasn't (see Trac #2756). Then we
do *not* want to get
let v* = expensive in C v
because that has different strictness. Hence the use of 'allLazy'.
(NB: the let v* turns into a FloatCase, in mkLocalNonRec.)
------------------------------------------------------------------------------
-- Building the saturated syntax
-- ---------------------------------------------------------------------------
maybeSaturate deals with saturating primops and constructors
The type is the type of the entire application
-}
maybeSaturate :: Id -> CpeApp -> Int -> UniqSM CpeRhs
maybeSaturate fn expr n_args
| Just DataToTagOp <- isPrimOpId_maybe fn -- DataToTag must have an evaluated arg
-- A gruesome special case
= saturateDataToTag sat_expr
| hasNoBinding fn -- There's no binding
= return sat_expr
| otherwise
= return expr
where
fn_arity = idArity fn
excess_arity = fn_arity - n_args
sat_expr = cpeEtaExpand excess_arity expr
-------------
saturateDataToTag :: CpeApp -> UniqSM CpeApp
-- See Note [dataToTag magic]
saturateDataToTag sat_expr
= do { let (eta_bndrs, eta_body) = collectBinders sat_expr
; eta_body' <- eval_data2tag_arg eta_body
; return (mkLams eta_bndrs eta_body') }
where
eval_data2tag_arg :: CpeApp -> UniqSM CpeBody
eval_data2tag_arg app@(fun `App` arg)
| exprIsHNF arg -- Includes nullary constructors
= return app -- The arg is evaluated
| otherwise -- Arg not evaluated, so evaluate it
= do { arg_id <- newVar (exprType arg)
; let arg_id1 = setIdUnfolding arg_id evaldUnfolding
; return (Case arg arg_id1 (exprType app)
[(DEFAULT, [], fun `App` Var arg_id1)]) }
eval_data2tag_arg (Tick t app) -- Scc notes can appear
= do { app' <- eval_data2tag_arg app
; return (Tick t app') }
eval_data2tag_arg other -- Should not happen
= pprPanic "eval_data2tag" (ppr other)
{-
Note [dataToTag magic]
~~~~~~~~~~~~~~~~~~~~~~
Horrid: we must ensure that the arg of data2TagOp is evaluated
(data2tag x) --> (case x of y -> data2tag y)
(yuk yuk) take into account the lambdas we've now introduced
How might it not be evaluated? Well, we might have floated it out
of the scope of a `seq`, or dropped the `seq` altogether.
************************************************************************
* *
Simple CoreSyn operations
* *
************************************************************************
-}
{-
-- -----------------------------------------------------------------------------
-- Eta reduction
-- -----------------------------------------------------------------------------
Note [Eta expansion]
~~~~~~~~~~~~~~~~~~~~~
Eta expand to match the arity claimed by the binder Remember,
CorePrep must not change arity
Eta expansion might not have happened already, because it is done by
the simplifier only when there at least one lambda already.
NB1:we could refrain when the RHS is trivial (which can happen
for exported things). This would reduce the amount of code
generated (a little) and make things a little words for
code compiled without -O. The case in point is data constructor
wrappers.
NB2: we have to be careful that the result of etaExpand doesn't
invalidate any of the assumptions that CorePrep is attempting
to establish. One possible cause is eta expanding inside of
an SCC note - we're now careful in etaExpand to make sure the
SCC is pushed inside any new lambdas that are generated.
Note [Eta expansion and the CorePrep invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It turns out to be much much easier to do eta expansion
*after* the main CorePrep stuff. But that places constraints
on the eta expander: given a CpeRhs, it must return a CpeRhs.
For example here is what we do not want:
f = /\a -> g (h 3) -- h has arity 2
After ANFing we get
f = /\a -> let s = h 3 in g s
and now we do NOT want eta expansion to give
f = /\a -> \ y -> (let s = h 3 in g s) y
Instead CoreArity.etaExpand gives
f = /\a -> \y -> let s = h 3 in g s y
-}
cpeEtaExpand :: Arity -> CpeRhs -> CpeRhs
cpeEtaExpand arity expr
| arity == 0 = expr
| otherwise = etaExpand arity expr
{-
-- -----------------------------------------------------------------------------
-- Eta reduction
-- -----------------------------------------------------------------------------
Why try eta reduction? Hasn't the simplifier already done eta?
But the simplifier only eta reduces if that leaves something
trivial (like f, or f Int). But for deLam it would be enough to
get to a partial application:
case x of { p -> \xs. map f xs }
==> case x of { p -> map f }
-}
tryEtaReducePrep :: [CoreBndr] -> CoreExpr -> Maybe CoreExpr
tryEtaReducePrep bndrs expr@(App _ _)
| ok_to_eta_reduce f
, n_remaining >= 0
, and (zipWith ok bndrs last_args)
, not (any (`elemVarSet` fvs_remaining) bndrs)
, exprIsHNF remaining_expr -- Don't turn value into a non-value
-- else the behaviour with 'seq' changes
= Just remaining_expr
where
(f, args) = collectArgs expr
remaining_expr = mkApps f remaining_args
fvs_remaining = exprFreeVars remaining_expr
(remaining_args, last_args) = splitAt n_remaining args
n_remaining = length args - length bndrs
ok bndr (Var arg) = bndr == arg
ok _ _ = False
-- We can't eta reduce something which must be saturated.
ok_to_eta_reduce (Var f) = not (hasNoBinding f)
ok_to_eta_reduce _ = False -- Safe. ToDo: generalise
tryEtaReducePrep bndrs (Let bind@(NonRec _ r) body)
| not (any (`elemVarSet` fvs) bndrs)
= case tryEtaReducePrep bndrs body of
Just e -> Just (Let bind e)
Nothing -> Nothing
where
fvs = exprFreeVars r
-- NB: do not attempt to eta-reduce across ticks
-- Otherwise we risk reducing
-- \x. (Tick (Breakpoint {x}) f x)
-- ==> Tick (breakpoint {x}) f
-- which is bogus (Trac #17228)
-- tryEtaReducePrep bndrs (Tick tickish e)
-- = fmap (mkTick tickish) $ tryEtaReducePrep bndrs e
tryEtaReducePrep _ _ = Nothing
{-
************************************************************************
* *
Floats
* *
************************************************************************
Note [Pin demand info on floats]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We pin demand info on floated lets, so that we can see the one-shot thunks.
-}
data FloatingBind
= FloatLet CoreBind -- Rhs of bindings are CpeRhss
-- They are always of lifted type;
-- unlifted ones are done with FloatCase
| FloatCase
Id CpeBody
Bool -- The bool indicates "ok-for-speculation"
-- | See Note [Floating Ticks in CorePrep]
| FloatTick (Tickish Id)
data Floats = Floats OkToSpec (OrdList FloatingBind)
instance Outputable FloatingBind where
ppr (FloatLet b) = ppr b
ppr (FloatCase b r ok) = brackets (ppr ok) <+> ppr b <+> equals <+> ppr r
ppr (FloatTick t) = ppr t
instance Outputable Floats where
ppr (Floats flag fs) = text "Floats" <> brackets (ppr flag) <+>
braces (vcat (map ppr (fromOL fs)))
instance Outputable OkToSpec where
ppr OkToSpec = text "OkToSpec"
ppr IfUnboxedOk = text "IfUnboxedOk"
ppr NotOkToSpec = text "NotOkToSpec"
-- Can we float these binds out of the rhs of a let? We cache this decision
-- to avoid having to recompute it in a non-linear way when there are
-- deeply nested lets.
data OkToSpec
= OkToSpec -- Lazy bindings of lifted type
| IfUnboxedOk -- A mixture of lazy lifted bindings and n
-- ok-to-speculate unlifted bindings
| NotOkToSpec -- Some not-ok-to-speculate unlifted bindings
mkFloat :: Demand -> Bool -> Id -> CpeRhs -> FloatingBind
mkFloat dmd is_unlifted bndr rhs
| use_case = FloatCase bndr rhs (exprOkForSpeculation rhs)
| is_hnf = FloatLet (NonRec bndr rhs)
| otherwise = FloatLet (NonRec (setIdDemandInfo bndr dmd) rhs)
-- See Note [Pin demand info on floats]
where
is_hnf = exprIsHNF rhs
is_strict = isStrictDmd dmd
use_case = is_unlifted || is_strict && not is_hnf
-- Don't make a case for a value binding,
-- even if it's strict. Otherwise we get
-- case (\x -> e) of ...!
emptyFloats :: Floats
emptyFloats = Floats OkToSpec nilOL
isEmptyFloats :: Floats -> Bool
isEmptyFloats (Floats _ bs) = isNilOL bs
wrapBinds :: Floats -> CpeBody -> CpeBody
wrapBinds (Floats _ binds) body
= foldrOL mk_bind body binds
where
mk_bind (FloatCase bndr rhs _) body = Case rhs bndr (exprType body) [(DEFAULT, [], body)]
mk_bind (FloatLet bind) body = Let bind body
mk_bind (FloatTick tickish) body = mkTick tickish body
addFloat :: Floats -> FloatingBind -> Floats
addFloat (Floats ok_to_spec floats) new_float
= Floats (combine ok_to_spec (check new_float)) (floats `snocOL` new_float)
where
check (FloatLet _) = OkToSpec
check (FloatCase _ _ ok_for_spec)
| ok_for_spec = IfUnboxedOk
| otherwise = NotOkToSpec
check FloatTick{} = OkToSpec
-- The ok-for-speculation flag says that it's safe to
-- float this Case out of a let, and thereby do it more eagerly
-- We need the top-level flag because it's never ok to float
-- an unboxed binding to the top level
unitFloat :: FloatingBind -> Floats
unitFloat = addFloat emptyFloats
appendFloats :: Floats -> Floats -> Floats
appendFloats (Floats spec1 floats1) (Floats spec2 floats2)
= Floats (combine spec1 spec2) (floats1 `appOL` floats2)
concatFloats :: [Floats] -> OrdList FloatingBind
concatFloats = foldr (\ (Floats _ bs1) bs2 -> appOL bs1 bs2) nilOL
combine :: OkToSpec -> OkToSpec -> OkToSpec
combine NotOkToSpec _ = NotOkToSpec
combine _ NotOkToSpec = NotOkToSpec
combine IfUnboxedOk _ = IfUnboxedOk
combine _ IfUnboxedOk = IfUnboxedOk
combine _ _ = OkToSpec
deFloatTop :: Floats -> [CoreBind]
-- For top level only; we don't expect any FloatCases
deFloatTop (Floats _ floats)
= foldrOL get [] floats
where
get (FloatLet b) bs = occurAnalyseRHSs b : bs
get (FloatCase var body _) bs =
occurAnalyseRHSs (NonRec var body) : bs
get b _ = pprPanic "corePrepPgm" (ppr b)
-- See Note [Dead code in CorePrep]
occurAnalyseRHSs (NonRec x e) = NonRec x (occurAnalyseExpr_NoBinderSwap e)
occurAnalyseRHSs (Rec xes) = Rec [(x, occurAnalyseExpr_NoBinderSwap e) | (x, e) <- xes]
---------------------------------------------------------------------------
canFloatFromNoCaf :: Platform -> Floats -> CpeRhs -> Maybe (Floats, CpeRhs)
-- Note [CafInfo and floating]
canFloatFromNoCaf platform (Floats ok_to_spec fs) rhs
| OkToSpec <- ok_to_spec -- Worth trying
, Just (subst, fs') <- go (emptySubst, nilOL) (fromOL fs)
= Just (Floats OkToSpec fs', subst_expr subst rhs)
| otherwise
= Nothing
where
subst_expr = substExpr (text "CorePrep")
go :: (Subst, OrdList FloatingBind) -> [FloatingBind]
-> Maybe (Subst, OrdList FloatingBind)
go (subst, fbs_out) [] = Just (subst, fbs_out)
go (subst, fbs_out) (FloatLet (NonRec b r) : fbs_in)
| rhs_ok r
= go (subst', fbs_out `snocOL` new_fb) fbs_in
where
(subst', b') = set_nocaf_bndr subst b
new_fb = FloatLet (NonRec b' (subst_expr subst r))
go (subst, fbs_out) (FloatLet (Rec prs) : fbs_in)
| all rhs_ok rs
= go (subst', fbs_out `snocOL` new_fb) fbs_in
where
(bs,rs) = unzip prs
(subst', bs') = mapAccumL set_nocaf_bndr subst bs
rs' = map (subst_expr subst') rs
new_fb = FloatLet (Rec (bs' `zip` rs'))
go (subst, fbs_out) (ft@FloatTick{} : fbs_in)
= go (subst, fbs_out `snocOL` ft) fbs_in
go _ _ = Nothing -- Encountered a caffy binding
------------
set_nocaf_bndr subst bndr
= (extendIdSubst subst bndr (Var bndr'), bndr')
where
bndr' = bndr `setIdCafInfo` NoCafRefs
------------
rhs_ok :: CoreExpr -> Bool
-- We can only float to top level from a NoCaf thing if
-- the new binding is static. However it can't mention
-- any non-static things or it would *already* be Caffy
rhs_ok = rhsIsStatic platform (\_ -> False)
(\i -> pprPanic "rhsIsStatic" (integer i))
-- Integer literals should not show up
wantFloatNested :: RecFlag -> Demand -> Bool -> Floats -> CpeRhs -> Bool
wantFloatNested is_rec dmd is_unlifted floats rhs
= isEmptyFloats floats
|| isStrictDmd dmd
|| is_unlifted
|| (allLazyNested is_rec floats && exprIsHNF rhs)
-- Why the test for allLazyNested?
-- v = f (x `divInt#` y)
-- we don't want to float the case, even if f has arity 2,
-- because floating the case would make it evaluated too early
allLazyTop :: Floats -> Bool
allLazyTop (Floats OkToSpec _) = True
allLazyTop _ = False
allLazyNested :: RecFlag -> Floats -> Bool
allLazyNested _ (Floats OkToSpec _) = True
allLazyNested _ (Floats NotOkToSpec _) = False
allLazyNested is_rec (Floats IfUnboxedOk _) = isNonRec is_rec
{-
************************************************************************
* *
Cloning
* *
************************************************************************
-}
-- ---------------------------------------------------------------------------
-- The environment
-- ---------------------------------------------------------------------------
-- Note [Inlining in CorePrep]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- There is a subtle but important invariant that must be upheld in the output
-- of CorePrep: there are no "trivial" updatable thunks. Thus, this Core
-- is impermissible:
--
-- let x :: ()
-- x = y
--
-- (where y is a reference to a GLOBAL variable). Thunks like this are silly:
-- they can always be profitably replaced by inlining x with y. Consequently,
-- the code generator/runtime does not bother implementing this properly
-- (specifically, there is no implementation of stg_ap_0_upd_info, which is the
-- stack frame that would be used to update this thunk. The "0" means it has
-- zero free variables.)
--
-- In general, the inliner is good at eliminating these let-bindings. However,
-- there is one case where these trivial updatable thunks can arise: when
-- we are optimizing away 'lazy' (see Note [lazyId magic], and also
-- 'cpeRhsE'.) Then, we could have started with:
--
-- let x :: ()
-- x = lazy @ () y
--
-- which is a perfectly fine, non-trivial thunk, but then CorePrep will
-- drop 'lazy', giving us 'x = y' which is trivial and impermissible.
-- The solution is CorePrep to have a miniature inlining pass which deals
-- with cases like this. We can then drop the let-binding altogether.
--
-- Why does the removal of 'lazy' have to occur in CorePrep?
-- The gory details are in Note [lazyId magic] in MkId, but the
-- main reason is that lazy must appear in unfoldings (optimizer
-- output) and it must prevent call-by-value for catch# (which
-- is implemented by CorePrep.)
--
-- An alternate strategy for solving this problem is to have the
-- inliner treat 'lazy e' as a trivial expression if 'e' is trivial.
-- We decided not to adopt this solution to keep the definition
-- of 'exprIsTrivial' simple.
--
-- There is ONE caveat however: for top-level bindings we have
-- to preserve the binding so that we float the (hacky) non-recursive
-- binding for data constructors; see Note [Data constructor workers].
--
-- Note [CorePrep inlines trivial CoreExpr not Id]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Why does cpe_env need to be an IdEnv CoreExpr, as opposed to an
-- IdEnv Id? Naively, we might conjecture that trivial updatable thunks
-- as per Note [Inlining in CorePrep] always have the form
-- 'lazy @ SomeType gbl_id'. But this is not true: the following is
-- perfectly reasonable Core:
--
-- let x :: ()
-- x = lazy @ (forall a. a) y @ Bool
--
-- When we inline 'x' after eliminating 'lazy', we need to replace
-- occurrences of 'x' with 'y @ bool', not just 'y'. Situations like
-- this can easily arise with higher-rank types; thus, cpe_env must
-- map to CoreExprs, not Ids.
data CorePrepEnv
= CPE { cpe_dynFlags :: DynFlags
, cpe_env :: IdEnv CoreExpr -- Clone local Ids
-- ^ This environment is used for three operations:
--
-- 1. To support cloning of local Ids so that they are
-- all unique (see item (6) of CorePrep overview).
--
-- 2. To support beta-reduction of runRW, see
-- Note [runRW magic] and Note [runRW arg].
--
-- 3. To let us inline trivial RHSs of non top-level let-bindings,
-- see Note [lazyId magic], Note [Inlining in CorePrep]
-- and Note [CorePrep inlines trivial CoreExpr not Id] (#12076)
, cpe_mkIntegerId :: Id
, cpe_integerSDataCon :: Maybe DataCon
}
lookupMkIntegerName :: DynFlags -> HscEnv -> IO Id
lookupMkIntegerName dflags hsc_env
= guardIntegerUse dflags $ liftM tyThingId $
lookupGlobal hsc_env mkIntegerName
lookupIntegerSDataConName :: DynFlags -> HscEnv -> IO (Maybe DataCon)
lookupIntegerSDataConName dflags hsc_env
= guardIntegerUse dflags $ liftM (Just . tyThingDataCon) $
lookupGlobal hsc_env integerSDataConName
-- | Helper for 'lookupMkIntegerName' and 'lookupIntegerSDataConName'
guardIntegerUse :: DynFlags -> IO a -> IO a
guardIntegerUse dflags act
| thisPackage dflags == primUnitId
= return $ panic "Can't use Integer in ghc-prim"
| thisPackage dflags == integerUnitId
= return $ panic "Can't use Integer in integer-*"
| otherwise = act
mkInitialCorePrepEnv :: DynFlags -> HscEnv -> IO CorePrepEnv
mkInitialCorePrepEnv dflags hsc_env
= do mkIntegerId <- lookupMkIntegerName dflags hsc_env
integerSDataCon <- lookupIntegerSDataConName dflags hsc_env
return $ CPE {
cpe_dynFlags = dflags,
cpe_env = emptyVarEnv,
cpe_mkIntegerId = mkIntegerId,
cpe_integerSDataCon = integerSDataCon
}
extendCorePrepEnv :: CorePrepEnv -> Id -> Id -> CorePrepEnv
extendCorePrepEnv cpe id id'
= cpe { cpe_env = extendVarEnv (cpe_env cpe) id (Var id') }
extendCorePrepEnvExpr :: CorePrepEnv -> Id -> CoreExpr -> CorePrepEnv
extendCorePrepEnvExpr cpe id expr
= cpe { cpe_env = extendVarEnv (cpe_env cpe) id expr }
extendCorePrepEnvList :: CorePrepEnv -> [(Id,Id)] -> CorePrepEnv
extendCorePrepEnvList cpe prs
= cpe { cpe_env = extendVarEnvList (cpe_env cpe)
(map (\(id, id') -> (id, Var id')) prs) }
lookupCorePrepEnv :: CorePrepEnv -> Id -> CoreExpr
lookupCorePrepEnv cpe id
= case lookupVarEnv (cpe_env cpe) id of
Nothing -> Var id
Just exp -> exp
getMkIntegerId :: CorePrepEnv -> Id
getMkIntegerId = cpe_mkIntegerId
------------------------------------------------------------------------------
-- Cloning binders
-- ---------------------------------------------------------------------------
cpCloneBndrs :: CorePrepEnv -> [Var] -> UniqSM (CorePrepEnv, [Var])
cpCloneBndrs env bs = mapAccumLM cpCloneBndr env bs
cpCloneBndr :: CorePrepEnv -> Var -> UniqSM (CorePrepEnv, Var)
cpCloneBndr env bndr
| isLocalId bndr, not (isCoVar bndr)
= do bndr' <- setVarUnique bndr <$> getUniqueM
-- We are going to OccAnal soon, so drop (now-useless) rules/unfoldings
-- so that we can drop more stuff as dead code.
-- See also Note [Dead code in CorePrep]
let bndr'' = bndr' `setIdUnfolding` noUnfolding
`setIdSpecialisation` emptyRuleInfo
return (extendCorePrepEnv env bndr bndr'', bndr'')
| otherwise -- Top level things, which we don't want
-- to clone, have become GlobalIds by now
-- And we don't clone tyvars, or coercion variables
= return (env, bndr)
------------------------------------------------------------------------------
-- Cloning ccall Ids; each must have a unique name,
-- to give the code generator a handle to hang it on
-- ---------------------------------------------------------------------------
fiddleCCall :: Id -> UniqSM Id
fiddleCCall id
| isFCallId id = (id `setVarUnique`) <$> getUniqueM
| otherwise = return id
------------------------------------------------------------------------------
-- Generating new binders
-- ---------------------------------------------------------------------------
newVar :: Type -> UniqSM Id
newVar ty
= seqType ty `seq` do
uniq <- getUniqueM
return (mkSysLocal (fsLit "sat") uniq ty)
------------------------------------------------------------------------------
-- Floating ticks
-- ---------------------------------------------------------------------------
--
-- Note [Floating Ticks in CorePrep]
--
-- It might seem counter-intuitive to float ticks by default, given
-- that we don't actually want to move them if we can help it. On the
-- other hand, nothing gets very far in CorePrep anyway, and we want
-- to preserve the order of let bindings and tick annotations in
-- relation to each other. For example, if we just wrapped let floats
-- when they pass through ticks, we might end up performing the
-- following transformation:
--
-- src<...> let foo = bar in baz
-- ==> let foo = src<...> bar in src<...> baz
--
-- Because the let-binding would float through the tick, and then
-- immediately materialize, achieving nothing but decreasing tick
-- accuracy. The only special case is the following scenario:
--
-- let foo = src<...> (let a = b in bar) in baz
-- ==> let foo = src<...> bar; a = src<...> b in baz
--
-- Here we would not want the source tick to end up covering "baz" and
-- therefore refrain from pushing ticks outside. Instead, we copy them
-- into the floating binds (here "a") in cpePair. Note that where "b"
-- or "bar" are (value) lambdas we have to push the annotations
-- further inside in order to uphold our rules.
--
-- All of this is implemented below in @wrapTicks@.
-- | Like wrapFloats, but only wraps tick floats
wrapTicks :: Floats -> CoreExpr -> (Floats, CoreExpr)
wrapTicks (Floats flag floats0) expr =
(Floats flag (toOL $ reverse floats1), foldr mkTick expr (reverse ticks1))
where (floats1, ticks1) = foldlOL go ([], []) $ floats0
-- Deeply nested constructors will produce long lists of
-- redundant source note floats here. We need to eliminate
-- those early, as relying on mkTick to spot it after the fact
-- can yield O(n^3) complexity [#11095]
go (floats, ticks) (FloatTick t)
= ASSERT(tickishPlace t == PlaceNonLam)
(floats, if any (flip tickishContains t) ticks
then ticks else t:ticks)
go (floats, ticks) f
= (foldr wrap f (reverse ticks):floats, ticks)
wrap t (FloatLet bind) = FloatLet (wrapBind t bind)
wrap t (FloatCase b r ok) = FloatCase b (mkTick t r) ok
wrap _ other = pprPanic "wrapTicks: unexpected float!"
(ppr other)
wrapBind t (NonRec binder rhs) = NonRec binder (mkTick t rhs)
wrapBind t (Rec pairs) = Rec (mapSnd (mkTick t) pairs)
| rahulmutt/ghcvm | compiler/Eta/Core/CorePrep.hs | bsd-3-clause | 63,349 | 1 | 19 | 17,318 | 9,729 | 5,147 | 4,582 | -1 | -1 |
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
-- | This module should contain all the global type definitions and basic instances.
{-@ LIQUID "--cabaldir" @-}
module Language.Haskell.Liquid.Types (
-- * Options
Config (..)
-- * Ghc Information
, GhcInfo (..)
, GhcSpec (..)
, TargetVars (..)
-- * Located Things
, Located (..)
, dummyLoc
-- * Symbols
, LocSymbol
, LocText
-- * Default unknown name
, dummyName, isDummy
-- * Refined Type Constructors
, RTyCon (RTyCon, rtc_tc, rtc_info)
, TyConInfo(..), defaultTyConInfo
, rTyConPVs
, rTyConPropVs
, isClassRTyCon, isClassType
-- * Refinement Types
, RType (..), Ref(..), RTProp
, RTyVar (..)
, RTAlias (..)
-- * Worlds
, HSeg (..)
, World (..)
-- * Classes describing operations on `RTypes`
, TyConable (..)
, RefTypable (..)
, SubsTy (..)
-- * Predicate Variables
, PVar (PV, pname, parg, ptype, pargs), isPropPV, pvType
, PVKind (..)
, Predicate (..)
-- * Refinements
, UReft(..)
-- * Parse-time entities describing refined data types
, DataDecl (..)
, DataConP (..)
, TyConP (..)
-- * Pre-instantiated RType
, RRType, BRType, RRProp
, BSort, BPVar
-- * Instantiated RType
, BareType, PrType
, SpecType, SpecProp
, RSort
, UsedPVar, RPVar, RReft
, REnv (..)
-- * Constructing & Destructing RTypes
, RTypeRep(..), fromRTypeRep, toRTypeRep
, mkArrow, bkArrowDeep, bkArrow, safeBkArrow
, mkUnivs, bkUniv, bkClass
, rFun, rCls, rRCls
-- * Manipulating `Predicates`
, pvars, pappSym, pApp
-- * Some tests on RTypes
, isBase
, isFunTy
, isTrivial
-- * Traversing `RType`
, efoldReft, foldReft
, mapReft, mapReftM
, mapBot, mapBind
-- * ???
, Oblig(..)
, ignoreOblig
, addTermCond
, addInvCond
-- * Inferred Annotations
, AnnInfo (..)
, Annot (..)
-- * Overall Output
, Output (..)
-- * Refinement Hole
, hole, isHole, hasHole
-- * Converting To and From Sort
, ofRSort, toRSort
, rTypeValueVar
, rTypeReft
, stripRTypeBase
-- * Class for values that can be pretty printed
, PPrint (..)
, showpp
-- * Printer Configuration
, PPEnv (..)
, Tidy (..)
, ppEnv
, ppEnvShort
-- * Modules and Imports
, ModName (..), ModType (..)
, isSrcImport, isSpecImport
, getModName, getModString
-- * Refinement Type Aliases
, RTEnv (..)
, mapRT, mapRP, mapRE
-- * Final Result
, Result (..)
-- * Errors and Error Messages
, Error
, TError (..)
, EMsg (..)
-- , LParseError (..)
, ErrorResult
, errSpan
, errOther
, errToFCrash
-- * Source information (associated with constraints)
, Cinfo (..)
-- * Measures
, Measure (..)
, CMeasure (..)
, Def (..)
, Body (..)
-- * Type Classes
, RClass (..)
-- * KV Profiling
, KVKind (..) -- types of kvars
, KVProf -- profile table
, emptyKVProf -- empty profile
, updKVProf -- extend profile
-- * Misc
, mapRTAVars
, insertsSEnv
-- * Strata
, Stratum(..), Strata
, isSVar
, getStrata
, makeDivType, makeFinType
-- * CoreToLogic
, LogicMap, toLogicMap, eAppWithMap, LMap(..)
-- * Refined Instances
, RDEnv, DEnv(..), RInstance(..)
-- * Ureftable Instances
, UReftable(..)
-- * String Literals
, liquidBegin, liquidEnd
)
where
import SrcLoc (noSrcSpan, SrcSpan)
import TyCon
import DataCon
import NameSet
import Module (moduleNameFS)
import TypeRep hiding (maybeParen, pprArrowChain)
import Var
import Text.Printf
import GHC (HscEnv, ModuleName, moduleNameString)
import GHC.Generics
import Language.Haskell.Liquid.GhcMisc
import PrelInfo (isNumericClass)
import TysWiredIn (listTyCon)
import Control.Arrow (second)
import Control.Monad (liftM, liftM2, liftM3, liftM4)
import qualified Control.Monad.Error as Ex
import Control.DeepSeq
import Control.Applicative ((<$>))
import Data.Typeable (Typeable)
import Data.Generics (Data)
import Data.Monoid hiding ((<>))
import qualified Data.Foldable as F
import Data.Hashable
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as S
import Data.Maybe (fromMaybe)
import Data.Traversable hiding (mapM)
import Data.List (nub)
import Data.Text (Text)
import qualified Data.Text as T
import Text.Parsec.Pos (SourcePos)
import Text.Parsec.Error (ParseError)
import Text.PrettyPrint.HughesPJ
import Language.Fixpoint.Config hiding (Config)
import Language.Fixpoint.Misc
import Language.Fixpoint.Types hiding (Result, Predicate, Def, R)
import Language.Fixpoint.Names (funConName, listConName, tupConName)
import qualified Language.Fixpoint.PrettyPrint as F
import CoreSyn (CoreBind)
import Language.Haskell.Liquid.Variance
import Language.Haskell.Liquid.Misc (mapSndM, safeZip3WithError)
import Data.Default
-----------------------------------------------------------------------------
-- | Command Line Config Options --------------------------------------------
-----------------------------------------------------------------------------
-- NOTE: adding strictness annotations breaks the help message
data Config = Config {
files :: [FilePath] -- ^ source files to check
, idirs :: [FilePath] -- ^ path to directory for including specs
, diffcheck :: Bool -- ^ check subset of binders modified (+ dependencies) since last check
, real :: Bool -- ^ supports real number arithmetic
, fullcheck :: Bool -- ^ check all binders (overrides diffcheck)
, native :: Bool -- ^ use native (Haskell) fixpoint constraint solver
, binders :: [String] -- ^ set of binders to check
, noCheckUnknown :: Bool -- ^ whether to complain about specifications for unexported and unused values
, notermination :: Bool -- ^ disable termination check
, nowarnings :: Bool -- ^ disable warnings output (only show errors)
, trustinternals :: Bool -- ^ type all internal variables with true
, nocaseexpand :: Bool -- ^ disable case expand
, strata :: Bool -- ^ enable strata analysis
, notruetypes :: Bool -- ^ disable truing top level types
, totality :: Bool -- ^ check totality in definitions
, noPrune :: Bool -- ^ disable prunning unsorted Refinements
, maxParams :: Int -- ^ the maximum number of parameters to accept when mining qualifiers
, smtsolver :: Maybe SMTSolver -- ^ name of smtsolver to use [default: try z3, cvc4, mathsat in order]
, shortNames :: Bool -- ^ drop module qualifers from pretty-printed names.
, shortErrors :: Bool -- ^ don't show subtyping errors and contexts.
, cabalDir :: Bool -- ^ find and use .cabal file to include paths to sources for imported modules
, ghcOptions :: [String] -- ^ command-line options to pass to GHC
, cFiles :: [String] -- ^ .c files to compile and link against (for GHC)
} deriving (Data, Typeable, Show, Eq)
-----------------------------------------------------------------------------
-- | Printer ----------------------------------------------------------------
-----------------------------------------------------------------------------
data Tidy = Lossy | Full deriving (Eq, Ord)
class PPrint a where
pprint :: a -> Doc
pprintTidy :: Tidy -> a -> Doc
pprintTidy _ = pprint
showpp :: (PPrint a) => a -> String
showpp = render . pprint
instance PPrint a => PPrint (Maybe a) where
pprint = maybe (text "Nothing") ((text "Just" <+>) . pprint)
instance PPrint a => PPrint [a] where
pprint = brackets . intersperse comma . map pprint
instance (PPrint a, PPrint b) => PPrint (a,b) where
pprint (x, y) = pprint x <+> text ":" <+> pprint y
data PPEnv
= PP { ppPs :: Bool
, ppTyVar :: Bool -- TODO if set to True all Bare fails
, ppSs :: Bool
, ppShort :: Bool
}
ppEnv = ppEnvPrintPreds
_ppEnvCurrent = PP False False False False
ppEnvPrintPreds = PP False False False False
ppEnvShort pp = pp { ppShort = True }
------------------------------------------------------------------
-- | GHC Information : Code & Spec ------------------------------
------------------------------------------------------------------
data GhcInfo = GI {
env :: !HscEnv
, cbs :: ![CoreBind]
, derVars :: ![Var]
, impVars :: ![Var]
, defVars :: ![Var]
, useVars :: ![Var]
, hqFiles :: ![FilePath]
, imports :: ![String]
, includes :: ![FilePath]
, spec :: !GhcSpec
}
-- | The following is the overall type for /specifications/ obtained from
-- parsing the target source and dependent libraries
data GhcSpec = SP {
tySigs :: ![(Var, Located SpecType)] -- ^ Asserted Reftypes
-- eg. see include/Prelude.spec
, asmSigs :: ![(Var, Located SpecType)] -- ^ Assumed Reftypes
, ctors :: ![(Var, Located SpecType)] -- ^ Data Constructor Measure Sigs
-- eg. (:) :: a -> xs:[a] -> {v: Int | v = 1 + len(xs) }
, meas :: ![(Symbol, Located SpecType)] -- ^ Measure Types
-- eg. len :: [a] -> Int
, invariants :: ![Located SpecType] -- ^ Data Type Invariants
-- eg. forall a. {v: [a] | len(v) >= 0}
, ialiases :: ![(Located SpecType, Located SpecType)] -- ^ Data Type Invariant Aliases
, dconsP :: ![(DataCon, DataConP)] -- ^ Predicated Data-Constructors
-- e.g. see tests/pos/Map.hs
, tconsP :: ![(TyCon, TyConP)] -- ^ Predicated Type-Constructors
-- eg. see tests/pos/Map.hs
, freeSyms :: ![(Symbol, Var)] -- ^ List of `Symbol` free in spec and corresponding GHC var
-- eg. (Cons, Cons#7uz) from tests/pos/ex1.hs
, tcEmbeds :: TCEmb TyCon -- ^ How to embed GHC Tycons into fixpoint sorts
-- e.g. "embed Set as Set_set" from include/Data/Set.spec
, qualifiers :: ![Qualifier] -- ^ Qualifiers in Source/Spec files
-- e.g tests/pos/qualTest.hs
, tgtVars :: ![Var] -- ^ Top-level Binders To Verify (empty means ALL binders)
, decr :: ![(Var, [Int])] -- ^ Lexicographically ordered size witnesses for termination
, texprs :: ![(Var, [Expr])] -- ^ Lexicographically ordered expressions for termination
, lvars :: !(S.HashSet Var) -- ^ Variables that should be checked in the environment they are used
, lazy :: !(S.HashSet Var) -- ^ Binders to IGNORE during termination checking
, autosize :: !(S.HashSet TyCon) -- ^ Binders to IGNORE during termination checking
, config :: !Config -- ^ Configuration Options
, exports :: !NameSet -- ^ `Name`s exported by the module being verified
, measures :: [Measure SpecType DataCon]
, tyconEnv :: M.HashMap TyCon RTyCon
, dicts :: DEnv Var SpecType -- ^ Dictionary Environment
}
type LogicMap = M.HashMap Symbol LMap
data LMap = LMap { lvar :: Symbol
, largs :: [Symbol]
, lexpr :: Expr
}
instance Show LMap where
show (LMap x xs e) = show x ++ " " ++ show xs ++ "\t|->\t" ++ show e
toLogicMap = M.fromList . map toLMap
where
toLMap (x, xs, e) = (x, LMap {lvar = x, largs = xs, lexpr = e})
eAppWithMap lmap f es def
| Just (LMap _ xs e) <- M.lookup (val f) lmap
= subst (mkSubst $ zip xs es) e
| otherwise
= def
data TyConP = TyConP { freeTyVarsTy :: ![RTyVar]
, freePredTy :: ![PVar RSort]
, freeLabelTy :: ![Symbol]
, varianceTs :: !VarianceInfo
, variancePs :: !VarianceInfo
, sizeFun :: !(Maybe (Symbol -> Expr))
} deriving (Generic, Data, Typeable)
data DataConP = DataConP { dc_loc :: !SourcePos
, freeTyVars :: ![RTyVar]
, freePred :: ![PVar RSort]
, freeLabels :: ![Symbol]
, tyConsts :: ![SpecType] -- ^ FIXME: WHAT IS THIS??
, tyArgs :: ![(Symbol, SpecType)] -- ^ These are backwards, why??
, tyRes :: !SpecType
, dc_locE :: !SourcePos
} deriving (Generic, Data, Typeable)
-- instance {-# OVERLAPPING #-} Data TyConP
-- instance {-# OVERLAPPING #-} Data DataConP
-- | Which Top-Level Binders Should be Verified
data TargetVars = AllVars | Only ![Var]
--------------------------------------------------------------------
-- | Abstract Predicate Variables ----------------------------------
--------------------------------------------------------------------
data PVar t
= PV { pname :: !Symbol
, ptype :: !(PVKind t)
, parg :: !Symbol
, pargs :: ![(t, Symbol, Expr)]
}
deriving (Generic, Data, Typeable, Show)
pvType p = case ptype p of
PVProp t -> t
PVHProp -> errorstar "pvType on HProp-PVar"
data PVKind t
= PVProp t | PVHProp
deriving (Generic, Data, Typeable, F.Foldable, Traversable, Show)
instance Eq (PVar t) where
pv == pv' = pname pv == pname pv' {- UNIFY: What about: && eqArgs pv pv' -}
instance Ord (PVar t) where
compare (PV n _ _ _) (PV n' _ _ _) = compare n n'
instance Functor PVKind where
fmap f (PVProp t) = PVProp (f t)
fmap _ (PVHProp) = PVHProp
instance Functor PVar where
fmap f (PV x t v txys) = PV x (f <$> t) v (mapFst3 f <$> txys)
instance (NFData a) => NFData (PVKind a) where
rnf (PVProp t) = rnf t
rnf (PVHProp) = ()
instance (NFData a) => NFData (PVar a) where
rnf (PV n t v txys) = rnf n `seq` rnf v `seq` rnf t `seq` rnf txys
instance Hashable (PVar a) where
hashWithSalt i (PV n _ _ _) = hashWithSalt i n
--------------------------------------------------------------------
------ Strictness --------------------------------------------------
--------------------------------------------------------------------
instance NFData Var where
rnf x = seq x ()
instance NFData SrcSpan where
rnf x = seq x ()
--------------------------------------------------------------------
------------------ Predicates --------------------------------------
--------------------------------------------------------------------
type UsedPVar = PVar ()
newtype Predicate = Pr [UsedPVar] deriving (Generic, Data, Typeable)
instance NFData Predicate where
rnf _ = ()
instance Monoid Predicate where
mempty = pdTrue
mappend p p' = pdAnd [p, p']
instance (Monoid a) => Monoid (UReft a) where
mempty = U mempty mempty mempty
mappend (U x y z) (U x' y' z') = U (mappend x x') (mappend y y') (mappend z z')
pdTrue = Pr []
pdAnd ps = Pr (nub $ concatMap pvars ps)
pvars (Pr pvs) = pvs
instance Subable UsedPVar where
syms pv = [ y | (_, x, EVar y) <- pargs pv, x /= y ]
subst s pv = pv { pargs = mapThd3 (subst s) <$> pargs pv }
substf f pv = pv { pargs = mapThd3 (substf f) <$> pargs pv }
substa f pv = pv { pargs = mapThd3 (substa f) <$> pargs pv }
instance Subable Predicate where
syms (Pr pvs) = concatMap syms pvs
subst s (Pr pvs) = Pr (subst s <$> pvs)
substf f (Pr pvs) = Pr (substf f <$> pvs)
substa f (Pr pvs) = Pr (substa f <$> pvs)
instance Subable Qualifier where
syms = syms . q_body
subst = mapQualBody . subst
substf = mapQualBody . substf
substa = mapQualBody . substa
mapQualBody f q = q { q_body = f (q_body q) }
instance NFData r => NFData (UReft r) where
rnf (U r p s) = rnf r `seq` rnf p `seq` rnf s
instance NFData Strata where
rnf _ = ()
instance NFData PrType where
rnf _ = ()
instance NFData RTyVar where
rnf _ = ()
-- MOVE TO TYPES
newtype RTyVar = RTV TyVar deriving (Generic, Data, Typeable)
instance Symbolic RTyVar where
symbol (RTV tv) = symbol . T.pack . showPpr $ tv
data RTyCon = RTyCon
{ rtc_tc :: TyCon -- ^ GHC Type Constructor
, rtc_pvars :: ![RPVar] -- ^ Predicate Parameters
, rtc_info :: !TyConInfo -- ^ TyConInfo
}
deriving (Generic, Data, Typeable)
-- | Accessors for @RTyCon@
isClassRTyCon = isClassTyCon . rtc_tc
rTyConPVs = rtc_pvars
rTyConPropVs = filter isPropPV . rtc_pvars
isPropPV = isProp . ptype
isClassType (RApp c _ _ _) = isClass c
isClassType _ = False
-- rTyConPVHPs = filter isHPropPV . rtc_pvars
-- isHPropPV = not . isPropPV
isProp (PVProp _) = True
isProp _ = False
defaultTyConInfo = TyConInfo [] [] Nothing
instance Default TyConInfo where
def = defaultTyConInfo
-----------------------------------------------------------------------
-- | Co- and Contra-variance for TyCon --------------------------------
-----------------------------------------------------------------------
-- | Indexes start from 0 and type or predicate arguments can be both
-- covariant and contravaariant e.g., for the below Foo dataType
--
-- data Foo a b c d <p :: b -> Prop, q :: Int -> Prop, r :: a -> Prop>
-- = F (a<r> -> b<p>) | Q (c -> a) | G (Int<q> -> a<r>)
--
-- there will be:
--
-- varianceTyArgs = [Bivariant , Covariant, Contravatiant, Invariant]
-- variancePsArgs = [Covariant, Contravatiant, Bivariant]
--
data TyConInfo = TyConInfo
{ varianceTyArgs :: !VarianceInfo -- ^ variance info for type variables
, variancePsArgs :: !VarianceInfo -- ^ variance info for predicate variables
, sizeFunction :: !(Maybe (Symbol -> Expr)) -- ^ logical function that computes the size of the structure
} deriving (Generic, Data, Typeable)
-- instance {-# OVERLAPPING #-} Data TyConInfo
instance Show TyConInfo where
show (TyConInfo x y _) = show x ++ "\n" ++ show y
--------------------------------------------------------------------
---- Unified Representation of Refinement Types --------------------
--------------------------------------------------------------------
-- MOVE TO TYPES
data RType c tv r
= RVar {
rt_var :: !tv
, rt_reft :: !r
}
| RFun {
rt_bind :: !Symbol
, rt_in :: !(RType c tv r)
, rt_out :: !(RType c tv r)
, rt_reft :: !r
}
| RAllT {
rt_tvbind :: !tv
, rt_ty :: !(RType c tv r)
}
| RAllP {
rt_pvbind :: !(PVar (RType c tv ()))
, rt_ty :: !(RType c tv r)
}
| RAllS {
rt_sbind :: !(Symbol)
, rt_ty :: !(RType c tv r)
}
| RApp {
rt_tycon :: !c
, rt_args :: ![RType c tv r]
, rt_pargs :: ![RTProp c tv r]
, rt_reft :: !r
}
| RAllE {
rt_bind :: !Symbol
, rt_allarg :: !(RType c tv r)
, rt_ty :: !(RType c tv r)
}
| REx {
rt_bind :: !Symbol
, rt_exarg :: !(RType c tv r)
, rt_ty :: !(RType c tv r)
}
| RExprArg (Located Expr) -- ^ For expression arguments to type aliases
-- see tests/pos/vector2.hs
| RAppTy{
rt_arg :: !(RType c tv r)
, rt_res :: !(RType c tv r)
, rt_reft :: !r
}
| RRTy {
rt_env :: ![(Symbol, RType c tv r)]
, rt_ref :: !r
, rt_obl :: !Oblig
, rt_ty :: !(RType c tv r)
}
| RHole r -- ^ let LH match against the Haskell type and add k-vars, e.g. `x:_`
-- see tests/pos/Holes.hs
deriving (Generic, Data, Typeable)
data Oblig
= OTerm -- ^ Obligation that proves termination
| OInv -- ^ Obligation that proves invariants
| OCons -- ^ Obligation that proves constraints
deriving (Generic, Data, Typeable)
ignoreOblig (RRTy _ _ _ t) = t
ignoreOblig t = t
instance Show Oblig where
show OTerm = "termination-condition"
show OInv = "invariant-obligation"
show OCons = "constraint-obligation"
instance PPrint Oblig where
pprint = text . show
-- | @Ref@ describes `Prop τ` and `HProp` arguments applied to type constructors.
-- For example, in [a]<{\h -> v > h}>, we apply (via `RApp`)
-- * the `RProp` denoted by `{\h -> v > h}` to
-- * the `RTyCon` denoted by `[]`.
-- Thus, @Ref@ is used for abstract-predicate (arguments) that are associated
-- with _type constructors_ i.e. whose semantics are _dependent upon_ the data-type.
-- In contrast, the `Predicate` argument in `ur_pred` in the @UReft@ applies
-- directly to any type and has semantics _independent of_ the data-type.
data Ref τ r t
= RPropP {
rf_args :: [(Symbol, τ)]
, rf_reft :: r
} -- ^ Parse-time `RProp`
| RProp {
rf_args :: [(Symbol, τ)]
, rf_body :: t
} -- ^ Abstract refinement associated with `RTyCon`
| RHProp {
rf_args :: [(Symbol, τ)]
, rf_heap :: World t
} -- ^ Abstract heap-refinement associated with `RTyCon`
deriving (Generic, Data, Typeable)
-- | @RTProp@ is a convenient alias for @Ref@ that will save a bunch of typing.
-- In general, perhaps we need not expose @Ref@ directly at all.
type RTProp c tv r = Ref (RType c tv ()) r (RType c tv r)
-- | A @World@ is a Separation Logic predicate that is essentially a sequence of binders
-- that satisfies two invariants (TODO:LIQUID):
-- 1. Each `hs_addr :: Symbol` appears at most once,
-- 2. There is at most one `HVar` in a list.
newtype World t = World [HSeg t]
deriving (Generic, Data, Typeable)
data HSeg t = HBind {hs_addr :: !Symbol, hs_val :: t}
| HVar UsedPVar
deriving (Generic, Data, Typeable)
data UReft r
= U { ur_reft :: !r, ur_pred :: !Predicate, ur_strata :: !Strata }
deriving (Generic, Data, Typeable)
type BRType = RType LocSymbol Symbol
type RRType = RType RTyCon RTyVar
type BSort = BRType ()
type RSort = RRType ()
type BPVar = PVar BSort
type RPVar = PVar RSort
type RReft = UReft Reft
type PrType = RRType Predicate
type BareType = BRType RReft
type SpecType = RRType RReft
type SpecProp = RRProp RReft
type RRProp r = Ref RSort r (RRType r)
data Stratum = SVar Symbol | SDiv | SWhnf | SFin
deriving (Generic, Data, Typeable, Eq)
type Strata = [Stratum]
isSVar (SVar _) = True
isSVar _ = False
instance Monoid Strata where
mempty = []
mappend s1 s2 = nub $ s1 ++ s2
class SubsTy tv ty a where
subt :: (tv, ty) -> a -> a
class (Eq c) => TyConable c where
isFun :: c -> Bool
isList :: c -> Bool
isTuple :: c -> Bool
ppTycon :: c -> Doc
isClass :: c -> Bool
isNumCls :: c -> Bool
isFracCls :: c -> Bool
isClass = const False
isNumCls = const False
isFracCls = const False
class ( TyConable c
, Eq c, Eq tv
, Hashable tv
, Reftable r
, PPrint r
) => RefTypable c tv r
where
-- ppCls :: p -> [RType c tv r] -> Doc
ppRType :: Prec -> RType c tv r -> Doc
-------------------------------------------------------------------------------
-- | TyConable Instances -------------------------------------------------------
-------------------------------------------------------------------------------
-- MOVE TO TYPES
instance TyConable RTyCon where
isFun = isFunTyCon . rtc_tc
isList = (listTyCon ==) . rtc_tc
isTuple = TyCon.isTupleTyCon . rtc_tc
isClass = isClassRTyCon
ppTycon = toFix
isNumCls c = maybe False isNumericClass (tyConClass_maybe $ rtc_tc c)
isFracCls c = maybe False isFractionalClass (tyConClass_maybe $ rtc_tc c)
-- MOVE TO TYPES
instance TyConable Symbol where
isFun s = funConName == s
isList s = listConName == s
isTuple s = tupConName == s
ppTycon = text . symbolString
instance TyConable LocSymbol where
isFun = isFun . val
isList = isList . val
isTuple = isTuple . val
ppTycon = ppTycon . val
instance Eq RTyCon where
x == y = rtc_tc x == rtc_tc y
instance Fixpoint RTyCon where
toFix (RTyCon c _ _) = text $ showPpr c -- <+> text "\n<<" <+> hsep (map toFix ts) <+> text ">>\n"
instance Fixpoint Cinfo where
toFix = text . showPpr . ci_loc
instance PPrint RTyCon where
pprint = text . showPpr . rtc_tc
instance Show RTyCon where
show = showpp
--------------------------------------------------------------------------
-- | Refined Instances ---------------------------------------------------
--------------------------------------------------------------------------
data RInstance t = RI { riclass :: LocSymbol
, ritype :: t
, risigs :: [(LocSymbol, t)]
}
newtype DEnv x ty = DEnv (M.HashMap x (M.HashMap Symbol ty)) deriving (Monoid)
type RDEnv = DEnv Var SpecType
instance Functor RInstance where
fmap f (RI x t xts) = RI x (f t) (mapSnd f <$> xts)
--------------------------------------------------------------------------
-- | Values Related to Specifications ------------------------------------
--------------------------------------------------------------------------
-- | Data type refinements
data DataDecl = D { tycName :: LocSymbol
-- ^ Type Constructor Name
, tycTyVars :: [Symbol]
-- ^ Tyvar Parameters
, tycPVars :: [PVar BSort]
-- ^ PVar Parameters
, tycTyLabs :: [Symbol]
-- ^ PLabel Parameters
, tycDCons :: [(LocSymbol, [(Symbol, BareType)])]
-- ^ [DataCon, [(fieldName, fieldType)]]
, tycSrcPos :: !SourcePos
-- ^ Source Position
, tycSFun :: (Maybe (Symbol -> Expr))
-- ^ Measure that should decrease in recursive calls
}
-- deriving (Show)
instance Eq DataDecl where
d1 == d2 = (tycName d1) == (tycName d2)
instance Ord DataDecl where
compare d1 d2 = compare (tycName d1) (tycName d2)
-- | For debugging.
instance Show DataDecl where
show dd = printf "DataDecl: data = %s, tyvars = %s"
(show $ tycName dd)
(show $ tycTyVars dd)
-- | Refinement Type Aliases
data RTAlias tv ty
= RTA { rtName :: Symbol
, rtTArgs :: [tv]
, rtVArgs :: [tv]
, rtBody :: ty
, rtPos :: SourcePos
, rtPosE :: SourcePos
}
mapRTAVars f rt = rt { rtTArgs = f <$> rtTArgs rt
, rtVArgs = f <$> rtVArgs rt
}
------------------------------------------------------------------------
-- | Constructor and Destructors for RTypes ----------------------------
------------------------------------------------------------------------
data RTypeRep c tv r
= RTypeRep { ty_vars :: [tv]
, ty_preds :: [PVar (RType c tv ())]
, ty_labels :: [Symbol]
, ty_binds :: [Symbol]
, ty_refts :: [r]
, ty_args :: [RType c tv r]
, ty_res :: (RType c tv r)
}
fromRTypeRep (RTypeRep {..})
= mkArrow ty_vars ty_preds ty_labels arrs ty_res
where
arrs = safeZip3WithError "fromRTypeRep" ty_binds ty_args ty_refts
toRTypeRep :: RType c tv r -> RTypeRep c tv r
toRTypeRep t = RTypeRep αs πs ls xs rs ts t''
where
(αs, πs, ls, t') = bkUniv t
(xs, ts, rs, t'') = bkArrow t'
mkArrow αs πs ls xts = mkUnivs αs πs ls . mkArrs xts
where
mkArrs xts t = foldr (\(b,t1,r) t2 -> RFun b t1 t2 r) t xts
bkArrowDeep (RAllT _ t) = bkArrowDeep t
bkArrowDeep (RAllP _ t) = bkArrowDeep t
bkArrowDeep (RAllS _ t) = bkArrowDeep t
bkArrowDeep (RFun x t t' r) = let (xs, ts, rs, t'') = bkArrowDeep t' in (x:xs, t:ts, r:rs, t'')
bkArrowDeep t = ([], [], [], t)
bkArrow (RFun x t t' r) = let (xs, ts, rs, t'') = bkArrow t' in (x:xs, t:ts, r:rs, t'')
bkArrow t = ([], [], [], t)
safeBkArrow (RAllT _ _) = errorstar "safeBkArrow on RAllT"
safeBkArrow (RAllP _ _) = errorstar "safeBkArrow on RAllP"
safeBkArrow (RAllS _ t) = safeBkArrow t
safeBkArrow t = bkArrow t
mkUnivs αs πs ls t = foldr RAllT (foldr RAllP (foldr RAllS t ls) πs) αs
bkUniv :: RType t1 a t2 -> ([a], [PVar (RType t1 a ())], [Symbol], RType t1 a t2)
bkUniv (RAllT α t) = let (αs, πs, ls, t') = bkUniv t in (α:αs, πs, ls, t')
bkUniv (RAllP π t) = let (αs, πs, ls, t') = bkUniv t in (αs, π:πs, ls, t')
bkUniv (RAllS s t) = let (αs, πs, ss, t') = bkUniv t in (αs, πs, s:ss, t')
bkUniv t = ([], [], [], t)
bkClass (RFun _ (RApp c t _ _) t' _)
| isClass c
= let (cs, t'') = bkClass t' in ((c, t):cs, t'')
bkClass (RRTy e r o t)
= let (cs, t') = bkClass t in (cs, RRTy e r o t')
bkClass t
= ([], t)
rFun b t t' = RFun b t t' mempty
rCls c ts = RApp (RTyCon c [] defaultTyConInfo) ts [] mempty
rRCls rc ts = RApp rc ts [] mempty
addTermCond = addObligation OTerm
addInvCond :: SpecType -> RReft -> SpecType
addInvCond t r'
| isTauto $ ur_reft r' -- null rv
= t
| otherwise
= fromRTypeRep $ trep {ty_res = RRTy [(x', tbd)] r OInv tbd}
where
trep = toRTypeRep t
tbd = ty_res trep
r = r' {ur_reft = Reft (v, Refa rx)}
su = (v, EVar x')
x' = "xInv"
rx = PIff (PBexp $ EVar v) $ subst1 (raPred rv) su
Reft(v, rv) = ur_reft r'
addObligation :: Oblig -> SpecType -> RReft -> SpecType
addObligation o t r = mkArrow αs πs ls xts $ RRTy [] r o t2
where
(αs, πs, ls, t1) = bkUniv t
(xs, ts, rs, t2) = bkArrow t1
xts = zip3 xs ts rs
--------------------------------------------
instance Subable Stratum where
syms (SVar s) = [s]
syms _ = []
subst su (SVar s) = SVar $ subst su s
subst _ s = s
substf f (SVar s) = SVar $ substf f s
substf _ s = s
substa f (SVar s) = SVar $ substa f s
substa _ s = s
instance Subable Strata where
syms s = concatMap syms s
subst su = (subst su <$>)
substf f = (substf f <$>)
substa f = (substa f <$>)
instance Reftable Strata where
isTauto [] = True
isTauto _ = False
ppTy _ = error "ppTy on Strata"
toReft _ = mempty
params s = [l | SVar l <- s]
bot _ = []
top _ = []
ofReft = error "TODO: Strata.ofReft"
class Reftable r => UReftable r where
ofUReft :: UReft Reft -> r
ofUReft (U r _ _) = ofReft r
instance UReftable (UReft Reft) where
ofUReft r = r
instance UReftable () where
ofUReft _ = mempty
instance (PPrint r, Reftable r) => Reftable (UReft r) where
isTauto = isTauto_ureft
ppTy = ppTy_ureft
toReft (U r ps _) = toReft r `meet` toReft ps
params (U r _ _) = params r
bot (U r _ s) = U (bot r) (Pr []) (bot s)
top (U r p s) = U (top r) (top p) s
ofReft r = U (ofReft r) mempty mempty
isTauto_ureft u = isTauto (ur_reft u) && isTauto (ur_pred u) -- && (isTauto $ ur_strata u)
ppTy_ureft u@(U r p s) d
| isTauto_ureft u = d
| otherwise = ppr_reft r (ppTy p d) s
ppr_reft r d s = braces (pprint v <+> colon <+> d <> ppr_str s <+> text "|" <+> pprint r')
where
r'@(Reft (v, _)) = toReft r
ppr_str [] = empty
ppr_str s = text "^" <> pprint s
instance Subable r => Subable (UReft r) where
syms (U r p _) = syms r ++ syms p
subst s (U r z l) = U (subst s r) (subst s z) (subst s l)
substf f (U r z l) = U (substf f r) (substf f z) (substf f l)
substa f (U r z l) = U (substa f r) (substa f z) (substa f l)
instance (Reftable r, RefTypable c tv r) => Subable (RTProp c tv r) where
syms (RPropP ss r) = (fst <$> ss) ++ syms r
syms (RProp ss r) = (fst <$> ss) ++ syms r
syms (RHProp _ _) = error "TODO: PHProp.syms"
subst su (RPropP ss r) = RPropP ss (subst su r)
subst su (RProp ss t) = RProp ss (subst su <$> t)
subst _ (RHProp _ _) = error "TODO: PHProp.subst"
substf f (RPropP ss r) = RPropP ss (substf f r)
substf f (RProp ss t) = RProp ss (substf f <$> t)
substf _ (RHProp _ _) = error "TODO PHProp.substf"
substa f (RPropP ss r) = RPropP ss (substa f r)
substa f (RProp ss t) = RProp ss (substa f <$> t)
substa _ (RHProp _ _) = error "TODO PHProp.substa"
instance (Subable r, RefTypable c tv r) => Subable (RType c tv r) where
syms = foldReft (\r acc -> syms r ++ acc) []
substa f = mapReft (substa f)
substf f = emapReft (substf . substfExcept f) []
subst su = emapReft (subst . substExcept su) []
subst1 t su = emapReft (\xs r -> subst1Except xs r su) [] t
instance Reftable Predicate where
isTauto (Pr ps) = null ps
bot (Pr _) = errorstar "No BOT instance for Predicate"
-- NV: This does not print abstract refinements....
-- HACK: Hiding to not render types in WEB DEMO. NEED TO FIX.
ppTy r d | isTauto r = d
| not (ppPs ppEnv) = d
| otherwise = d <> (angleBrackets $ pprint r)
toReft (Pr ps@(p:_)) = Reft (parg p, refa $ pToRef <$> ps)
toReft _ = mempty
params = errorstar "TODO: instance of params for Predicate"
ofReft = error "TODO: Predicate.ofReft"
pToRef p = pApp (pname p) $ (EVar $ parg p) : (thd3 <$> pargs p)
pApp :: Symbol -> [Expr] -> Pred
pApp p es = PBexp $ EApp (dummyLoc $ pappSym $ length es) (EVar p:es)
pappSym n = symbol $ "papp" ++ show n
---------------------------------------------------------------
--------------------------- Visitors --------------------------
---------------------------------------------------------------
isTrivial t = foldReft (\r b -> isTauto r && b) True t
instance Functor UReft where
fmap f (U r p s) = U (f r) p s
instance Functor (RType a b) where
fmap = mapReft
-- instance Fold.Foldable (RType a b c) where
-- foldr = foldReft
mapReft :: (r1 -> r2) -> RType c tv r1 -> RType c tv r2
mapReft f = emapReft (\_ -> f) []
emapReft :: ([Symbol] -> r1 -> r2) -> [Symbol] -> RType c tv r1 -> RType c tv r2
emapReft f γ (RVar α r) = RVar α (f γ r)
emapReft f γ (RAllT α t) = RAllT α (emapReft f γ t)
emapReft f γ (RAllP π t) = RAllP π (emapReft f γ t)
emapReft f γ (RAllS p t) = RAllS p (emapReft f γ t)
emapReft f γ (RFun x t t' r) = RFun x (emapReft f γ t) (emapReft f (x:γ) t') (f γ r)
emapReft f γ (RApp c ts rs r) = RApp c (emapReft f γ <$> ts) (emapRef f γ <$> rs) (f γ r)
emapReft f γ (RAllE z t t') = RAllE z (emapReft f γ t) (emapReft f γ t')
emapReft f γ (REx z t t') = REx z (emapReft f γ t) (emapReft f γ t')
emapReft _ _ (RExprArg e) = RExprArg e
emapReft f γ (RAppTy t t' r) = RAppTy (emapReft f γ t) (emapReft f γ t') (f γ r)
emapReft f γ (RRTy e r o t) = RRTy (mapSnd (emapReft f γ) <$> e) (f γ r) o (emapReft f γ t)
emapReft f γ (RHole r) = RHole (f γ r)
emapRef :: ([Symbol] -> t -> s) -> [Symbol] -> RTProp c tv t -> RTProp c tv s
emapRef f γ (RPropP s r) = RPropP s $ f γ r
emapRef f γ (RProp s t) = RProp s $ emapReft f γ t
emapRef _ _ (RHProp _ _) = error "TODO: PHProp empaReft"
------------------------------------------------------------------------------------------------------
-- isBase' x t = traceShow ("isBase: " ++ showpp x) $ isBase t
-- isBase :: RType a -> Bool
isBase (RAllT _ t) = isBase t
isBase (RAllP _ t) = isBase t
isBase (RVar _ _) = True
isBase (RApp _ ts _ _) = all isBase ts
isBase (RFun _ t1 t2 _) = isBase t1 && isBase t2
isBase (RAppTy t1 t2 _) = isBase t1 && isBase t2
isBase (RRTy _ _ _ t) = isBase t
isBase (RAllE _ _ t) = isBase t
isBase _ = False
isFunTy (RAllE _ _ t) = isFunTy t
isFunTy (RAllS _ t) = isFunTy t
isFunTy (RAllT _ t) = isFunTy t
isFunTy (RAllP _ t) = isFunTy t
isFunTy (RFun _ _ _ _) = True
isFunTy _ = False
mapReftM :: (Monad m) => (r1 -> m r2) -> RType c tv r1 -> m (RType c tv r2)
mapReftM f (RVar α r) = liftM (RVar α) (f r)
mapReftM f (RAllT α t) = liftM (RAllT α) (mapReftM f t)
mapReftM f (RAllP π t) = liftM (RAllP π) (mapReftM f t)
mapReftM f (RAllS s t) = liftM (RAllS s) (mapReftM f t)
mapReftM f (RFun x t t' r) = liftM3 (RFun x) (mapReftM f t) (mapReftM f t') (f r)
mapReftM f (RApp c ts rs r) = liftM3 (RApp c) (mapM (mapReftM f) ts) (mapM (mapRefM f) rs) (f r)
mapReftM f (RAllE z t t') = liftM2 (RAllE z) (mapReftM f t) (mapReftM f t')
mapReftM f (REx z t t') = liftM2 (REx z) (mapReftM f t) (mapReftM f t')
mapReftM _ (RExprArg e) = return $ RExprArg e
mapReftM f (RAppTy t t' r) = liftM3 RAppTy (mapReftM f t) (mapReftM f t') (f r)
mapReftM f (RHole r) = liftM RHole (f r)
mapReftM f (RRTy xts r o t) = liftM4 RRTy (mapM (mapSndM (mapReftM f)) xts) (f r) (return o) (mapReftM f t)
mapRefM :: (Monad m) => (t -> m s) -> (RTProp c tv t) -> m (RTProp c tv s)
mapRefM f (RPropP s r) = liftM (RPropP s) (f r)
mapRefM f (RProp s t) = liftM (RProp s) (mapReftM f t)
mapRefM _ (RHProp _ _) = error "TODO PHProp.mapRefM"
-- foldReft :: (r -> a -> a) -> a -> RType c tv r -> a
foldReft f = efoldReft (\_ _ -> []) (\_ -> ()) (\_ _ -> f) (\_ γ -> γ) emptySEnv
-- efoldReft :: Reftable r =>(p -> [RType c tv r] -> [(Symbol, a)])-> (RType c tv r -> a)-> (SEnv a -> Maybe (RType c tv r) -> r -> c1 -> c1)-> SEnv a-> c1-> RType c tv r-> c1
efoldReft cb g f fp = go
where
-- folding over RType
go γ z me@(RVar _ r) = f γ (Just me) r z
go γ z (RAllT _ t) = go γ z t
go γ z (RAllP p t) = go (fp p γ) z t
go γ z (RAllS _ t) = go γ z t
go γ z me@(RFun _ (RApp c ts _ _) t' r)
| isClass c = f γ (Just me) r (go (insertsSEnv γ (cb c ts)) (go' γ z ts) t')
go γ z me@(RFun x t t' r) = f γ (Just me) r (go (insertSEnv x (g t) γ) (go γ z t) t')
go γ z me@(RApp _ ts rs r) = f γ (Just me) r (ho' γ (go' (insertSEnv (rTypeValueVar me) (g me) γ) z ts) rs)
go γ z (RAllE x t t') = go (insertSEnv x (g t) γ) (go γ z t) t'
go γ z (REx x t t') = go (insertSEnv x (g t) γ) (go γ z t) t'
go γ z me@(RRTy [] r _ t) = f γ (Just me) r (go γ z t)
go γ z me@(RRTy xts r _ t) = f γ (Just me) r (go γ (go γ z (envtoType xts)) t)
go γ z me@(RAppTy t t' r) = f γ (Just me) r (go γ (go γ z t) t')
go _ z (RExprArg _) = z
go γ z me@(RHole r) = f γ (Just me) r z
-- folding over Ref
ho γ z (RPropP ss r) = f (insertsSEnv γ (mapSnd (g . ofRSort) <$> ss)) Nothing r z
ho γ z (RProp ss t) = go (insertsSEnv γ ((mapSnd (g . ofRSort)) <$> ss)) z t
ho _ _ (RHProp _ _) = error "TODO: RHProp.ho"
-- folding over [RType]
go' γ z ts = foldr (flip $ go γ) z ts
-- folding over [Ref]
ho' γ z rs = foldr (flip $ ho γ) z rs
envtoType xts = foldr (\(x,t1) t2 -> rFun x t1 t2) (snd $ last xts) (init xts)
mapBot f (RAllT α t) = RAllT α (mapBot f t)
mapBot f (RAllP π t) = RAllP π (mapBot f t)
mapBot f (RAllS s t) = RAllS s (mapBot f t)
mapBot f (RFun x t t' r) = RFun x (mapBot f t) (mapBot f t') r
mapBot f (RAppTy t t' r) = RAppTy (mapBot f t) (mapBot f t') r
mapBot f (RApp c ts rs r) = f $ RApp c (mapBot f <$> ts) (mapBotRef f <$> rs) r
mapBot f (REx b t1 t2) = REx b (mapBot f t1) (mapBot f t2)
mapBot f (RAllE b t1 t2) = RAllE b (mapBot f t1) (mapBot f t2)
mapBot f (RRTy e r o t) = RRTy (mapSnd (mapBot f) <$> e) r o (mapBot f t)
mapBot f t' = f t'
mapBotRef _ (RPropP s r) = RPropP s $ r
mapBotRef f (RProp s t) = RProp s $ mapBot f t
mapBotRef _ (RHProp _ _) = error "TODO: RHProp.mapBotRef"
mapBind f (RAllT α t) = RAllT α (mapBind f t)
mapBind f (RAllP π t) = RAllP π (mapBind f t)
mapBind f (RAllS s t) = RAllS s (mapBind f t)
mapBind f (RFun b t1 t2 r) = RFun (f b) (mapBind f t1) (mapBind f t2) r
mapBind f (RApp c ts rs r) = RApp c (mapBind f <$> ts) (mapBindRef f <$> rs) r
mapBind f (RAllE b t1 t2) = RAllE (f b) (mapBind f t1) (mapBind f t2)
mapBind f (REx b t1 t2) = REx (f b) (mapBind f t1) (mapBind f t2)
mapBind _ (RVar α r) = RVar α r
mapBind _ (RHole r) = RHole r
mapBind f (RRTy e r o t) = RRTy e r o (mapBind f t)
mapBind _ (RExprArg e) = RExprArg e
mapBind f (RAppTy t t' r) = RAppTy (mapBind f t) (mapBind f t') r
mapBindRef f (RPropP s r) = RPropP (mapFst f <$> s) r
mapBindRef f (RProp s t) = RProp (mapFst f <$> s) $ mapBind f t
mapBindRef _ (RHProp _ _) = error "TODO: RHProp.mapBindRef"
--------------------------------------------------
ofRSort :: Reftable r => RType c tv () -> RType c tv r
ofRSort = fmap mempty
toRSort :: RType c tv r -> RType c tv ()
toRSort = stripAnnotations . mapBind (const dummySymbol) . fmap (const ())
stripAnnotations (RAllT α t) = RAllT α (stripAnnotations t)
stripAnnotations (RAllP _ t) = stripAnnotations t
stripAnnotations (RAllS _ t) = stripAnnotations t
stripAnnotations (RAllE _ _ t) = stripAnnotations t
stripAnnotations (REx _ _ t) = stripAnnotations t
stripAnnotations (RFun x t t' r) = RFun x (stripAnnotations t) (stripAnnotations t') r
stripAnnotations (RAppTy t t' r) = RAppTy (stripAnnotations t) (stripAnnotations t') r
stripAnnotations (RApp c ts rs r) = RApp c (stripAnnotations <$> ts) (stripAnnotationsRef <$> rs) r
stripAnnotations (RRTy _ _ _ t) = stripAnnotations t
stripAnnotations t = t
stripAnnotationsRef (RProp s t) = RProp s $ stripAnnotations t
stripAnnotationsRef r = r
insertsSEnv = foldr (\(x, t) γ -> insertSEnv x t γ)
rTypeValueVar :: (Reftable r) => RType c tv r -> Symbol
rTypeValueVar t = vv where Reft (vv,_) = rTypeReft t
rTypeReft :: (Reftable r) => RType c tv r -> Reft
rTypeReft = fromMaybe trueReft . fmap toReft . stripRTypeBase
-- stripRTypeBase :: RType a -> Maybe a
stripRTypeBase (RApp _ _ _ x)
= Just x
stripRTypeBase (RVar _ x)
= Just x
stripRTypeBase (RFun _ _ _ x)
= Just x
stripRTypeBase (RAppTy _ _ x)
= Just x
stripRTypeBase _
= Nothing
mapRBase f (RApp c ts rs r) = RApp c ts rs $ f r
mapRBase f (RVar a r) = RVar a $ f r
mapRBase f (RFun x t1 t2 r) = RFun x t1 t2 $ f r
mapRBase f (RAppTy t1 t2 r) = RAppTy t1 t2 $ f r
mapRBase _ t = t
makeLType :: Stratum -> SpecType -> SpecType
makeLType l t = fromRTypeRep trep{ty_res = mapRBase f $ ty_res trep}
where trep = toRTypeRep t
f (U r p _) = U r p [l]
makeDivType = makeLType SDiv
makeFinType = makeLType SFin
getStrata = maybe [] ur_strata . stripRTypeBase
-----------------------------------------------------------------------------
-- | PPrint -----------------------------------------------------------------
-----------------------------------------------------------------------------
instance Show Stratum where
show SFin = "Fin"
show SDiv = "Div"
show SWhnf = "Whnf"
show (SVar s) = show s
instance PPrint Stratum where
pprint = text . show
instance PPrint Strata where
pprint [] = empty
pprint ss = hsep (pprint <$> nub ss)
instance PPrint SourcePos where
pprint = text . show
instance PPrint () where
pprint = text . show
instance PPrint String where
pprint = text
instance PPrint Text where
pprint = text . T.unpack
instance PPrint a => PPrint (Located a) where
pprint = pprint . val
instance PPrint Int where
pprint = F.pprint
instance PPrint Integer where
pprint = F.pprint
instance PPrint Constant where
pprint = F.pprint
instance PPrint Brel where
pprint = F.pprint
instance PPrint Bop where
pprint = F.pprint
instance PPrint Sort where
pprint = F.pprint
instance PPrint Symbol where
pprint = pprint . symbolText
instance PPrint Expr where
pprint = F.pprint
instance PPrint SymConst where
pprint = F.pprint
instance PPrint Pred where
pprint = F.pprint
instance PPrint a => PPrint (PVar a) where
pprint (PV s _ _ xts) = pprint s <+> hsep (pprint <$> dargs xts)
where
dargs = map thd3 . takeWhile (\(_, x, y) -> EVar x /= y)
instance PPrint Predicate where
pprint (Pr []) = text "True"
pprint (Pr pvs) = hsep $ punctuate (text "&") (map pprint pvs)
instance PPrint Refa where
pprint = pprint . raPred
instance PPrint Reft where
pprint = F.pprint
instance PPrint SortedReft where
pprint = F.pprint
------------------------------------------------------------------------
-- | Error Data Type ---------------------------------------------------
------------------------------------------------------------------------
-- | The type used during constraint generation, used also to define contexts
-- for errors, hence in this file, and NOT in Constraint.hs
newtype REnv = REnv (M.HashMap Symbol SpecType)
type ErrorResult = FixResult Error
newtype EMsg = EMsg String deriving (Generic, Data, Typeable)
instance PPrint EMsg where
pprint (EMsg s) = text s
-- | In the below, we use EMsg instead of, say, SpecType because
-- the latter is impossible to serialize, as it contains GHC
-- internals like TyCon and Class inside it.
type Error = TError SpecType
-- | INVARIANT : all Error constructors should have a pos field
data TError t =
ErrSubType { pos :: !SrcSpan
, msg :: !Doc
, ctx :: !(M.HashMap Symbol t)
, tact :: !t
, texp :: !t
} -- ^ liquid type error
| ErrFCrash { pos :: !SrcSpan
, msg :: !Doc
, ctx :: !(M.HashMap Symbol t)
, tact :: !t
, texp :: !t
} -- ^ liquid type error
| ErrAssType { pos :: !SrcSpan
, obl :: !Oblig
, msg :: !Doc
, ref :: !RReft
} -- ^ liquid type error
| ErrParse { pos :: !SrcSpan
, msg :: !Doc
, err :: !ParseError
} -- ^ specification parse error
| ErrTySpec { pos :: !SrcSpan
, var :: !Doc
, typ :: !t
, msg :: !Doc
} -- ^ sort error in specification
| ErrTermSpec { pos :: !SrcSpan
, var :: !Doc
, exp :: !Expr
, msg :: !Doc
} -- ^ sort error in specification
| ErrDupAlias { pos :: !SrcSpan
, var :: !Doc
, kind :: !Doc
, locs :: ![SrcSpan]
} -- ^ multiple alias with same name error
| ErrDupSpecs { pos :: !SrcSpan
, var :: !Doc
, locs:: ![SrcSpan]
} -- ^ multiple specs for same binder error
| ErrBadData { pos :: !SrcSpan
, var :: !Doc
, msg :: !Doc
} -- ^ multiple specs for same binder error
| ErrInvt { pos :: !SrcSpan
, inv :: !t
, msg :: !Doc
} -- ^ Invariant sort error
| ErrIAl { pos :: !SrcSpan
, inv :: !t
, msg :: !Doc
} -- ^ Using sort error
| ErrIAlMis { pos :: !SrcSpan
, t1 :: !t
, t2 :: !t
, msg :: !Doc
} -- ^ Incompatible using error
| ErrMeas { pos :: !SrcSpan
, ms :: !Symbol
, msg :: !Doc
} -- ^ Measure sort error
| ErrHMeas { pos :: !SrcSpan
, ms :: !Symbol
, msg :: !Doc
} -- ^ Haskell bad Measure error
| ErrUnbound { pos :: !SrcSpan
, var :: !Doc
} -- ^ Unbound symbol in specification
| ErrGhc { pos :: !SrcSpan
, msg :: !Doc
} -- ^ GHC error: parsing or type checking
| ErrMismatch { pos :: !SrcSpan
, var :: !Doc
, hs :: !Type
, lq :: !Type
} -- ^ Mismatch between Liquid and Haskell types
| ErrAliasCycle { pos :: !SrcSpan
, acycle :: ![(SrcSpan, Doc)]
} -- ^ Cyclic Refined Type Alias Definitions
| ErrIllegalAliasApp { pos :: !SrcSpan
, dname :: !Doc
, dpos :: !SrcSpan
} -- ^ Illegal RTAlias application (from BSort, eg. in PVar)
| ErrAliasApp { pos :: !SrcSpan
, nargs :: !Int
, dname :: !Doc
, dpos :: !SrcSpan
, dargs :: !Int
}
| ErrSaved { pos :: !SrcSpan
, msg :: !Doc
} -- ^ Previously saved error, that carries over after DiffCheck
| ErrTermin { bind :: ![Var]
, pos :: !SrcSpan
, msg :: !Doc
} -- ^ Termination Error
| ErrRClass { pos :: !SrcSpan
, cls :: !Doc
, insts :: ![(SrcSpan, Doc)]
} -- ^ Refined Class/Interfaces Conflict
| ErrOther { pos :: !SrcSpan
, msg :: !Doc
} -- ^ Unexpected PANIC
deriving (Typeable, Functor)
-- data LParseError = LPE !SourcePos [String]
-- deriving (Data, Typeable, Generic)
errToFCrash :: Error -> Error
errToFCrash (ErrSubType l m g t1 t2)
= ErrFCrash l m g t1 t2
errToFCrash e
= e
instance Eq Error where
e1 == e2 = pos e1 == pos e2
instance Ord Error where
e1 <= e2 = pos e1 <= pos e2
instance Ex.Error Error where
strMsg = errOther . pprint
errSpan :: TError a -> SrcSpan
errSpan = pos
errOther :: Doc -> Error
errOther = ErrOther noSrcSpan
------------------------------------------------------------------------
-- | Source Information Associated With Constraints --------------------
------------------------------------------------------------------------
data Cinfo = Ci { ci_loc :: !SrcSpan
, ci_err :: !(Maybe Error)
}
deriving (Eq, Ord, Generic)
instance NFData Cinfo where
rnf x = seq x ()
------------------------------------------------------------------------
-- | Converting Results To Answers -------------------------------------
------------------------------------------------------------------------
class Result a where
result :: a -> FixResult Error
instance Result [Error] where
result es = Crash es ""
instance Result Error where
result (ErrOther _ d) = UnknownError $ render d
result e = result [e]
instance Result (FixResult Cinfo) where
result = fmap cinfoError
--------------------------------------------------------------------------------
--- Module Names
--------------------------------------------------------------------------------
data ModName = ModName !ModType !ModuleName deriving (Eq,Ord)
instance Show ModName where
show = getModString
instance Symbolic ModName where
symbol (ModName _ m) = symbol m
instance Symbolic ModuleName where
symbol = symbol . moduleNameFS
data ModType = Target | SrcImport | SpecImport deriving (Eq,Ord)
isSrcImport (ModName SrcImport _) = True
isSrcImport _ = False
isSpecImport (ModName SpecImport _) = True
isSpecImport _ = False
getModName (ModName _ m) = m
getModString = moduleNameString . getModName
-------------------------------------------------------------------------------
----------- Refinement Type Aliases -------------------------------------------
-------------------------------------------------------------------------------
data RTEnv = RTE { typeAliases :: M.HashMap Symbol (RTAlias RTyVar SpecType)
, predAliases :: M.HashMap Symbol (RTAlias Symbol Pred)
, exprAliases :: M.HashMap Symbol (RTAlias Symbol Expr)
}
instance Monoid RTEnv where
(RTE ta1 pa1 ea1) `mappend` (RTE ta2 pa2 ea2)
= RTE (ta1 `M.union` ta2) (pa1 `M.union` pa2) (ea1 `M.union` ea2)
mempty = RTE M.empty M.empty M.empty
mapRT f e = e { typeAliases = f $ typeAliases e }
mapRP f e = e { predAliases = f $ predAliases e }
mapRE f e = e { exprAliases = f $ exprAliases e }
cinfoError (Ci _ (Just e)) = e
cinfoError (Ci l _) = errOther $ text $ "Cinfo:" ++ showPpr l
--------------------------------------------------------------------------------
--- Measures
--------------------------------------------------------------------------------
data Measure ty ctor = M {
name :: LocSymbol
, sort :: ty
, eqns :: [Def ty ctor]
} deriving (Data, Typeable)
data CMeasure ty
= CM { cName :: LocSymbol
, cSort :: ty
}
data Def ty ctor
= Def {
measure :: LocSymbol
, dparams :: [(Symbol, ty)]
, ctor :: ctor
, dsort :: Maybe ty
, binds :: [(Symbol, Maybe ty)]
, body :: Body
} deriving (Show, Data, Typeable)
deriving instance (Eq ctor, Eq ty) => Eq (Def ty ctor)
data Body
= E Expr -- ^ Measure Refinement: {v | v = e }
| P Pred -- ^ Measure Refinement: {v | (? v) <=> p }
| R Symbol Pred -- ^ Measure Refinement: {v | p}
deriving (Show, Eq, Data, Typeable)
instance Subable (Measure ty ctor) where
syms (M _ _ es) = concatMap syms es
substa f (M n s es) = M n s $ substa f <$> es
substf f (M n s es) = M n s $ substf f <$> es
subst su (M n s es) = M n s $ subst su <$> es
instance Subable (Def ty ctor) where
syms (Def _ sp _ _ sb bd) = (fst <$> sp) ++ (fst <$> sb) ++ syms bd
substa f (Def m p c t b bd) = Def m p c t b $ substa f bd
substf f (Def m p c t b bd) = Def m p c t b $ substf f bd
subst su (Def m p c t b bd) = Def m p c t b $ subst su bd
instance Subable Body where
syms (E e) = syms e
syms (P e) = syms e
syms (R s e) = s:syms e
substa f (E e) = E $ substa f e
substa f (P e) = P $ substa f e
substa f (R s e) = R s $ substa f e
substf f (E e) = E $ substf f e
substf f (P e) = P $ substf f e
substf f (R s e) = R s $ substf f e
subst su (E e) = E $ subst su e
subst su (P e) = P $ subst su e
subst su (R s e) = R s $ subst su e
data RClass ty
= RClass { rcName :: LocSymbol
, rcSupers :: [ty]
, rcTyVars :: [Symbol]
, rcMethods :: [(LocSymbol,ty)]
} deriving (Show)
instance Functor RClass where
fmap f (RClass n ss tvs ms) = RClass n (fmap f ss) tvs (fmap (second f) ms)
------------------------------------------------------------------------
-- | Annotations -------------------------------------------------------
------------------------------------------------------------------------
newtype AnnInfo a = AI (M.HashMap SrcSpan [(Maybe Text, a)]) deriving (Generic)
data Annot t = AnnUse t
| AnnDef t
| AnnRDf t
| AnnLoc SrcSpan
instance Monoid (AnnInfo a) where
mempty = AI M.empty
mappend (AI m1) (AI m2) = AI $ M.unionWith (++) m1 m2
instance Functor AnnInfo where
fmap f (AI m) = AI (fmap (fmap (\(x, y) -> (x, f y)) ) m)
instance NFData a => NFData (AnnInfo a) where
rnf (AI _) = ()
instance NFData (Annot a) where
rnf (AnnDef _) = ()
rnf (AnnRDf _) = ()
rnf (AnnUse _) = ()
rnf (AnnLoc _) = ()
------------------------------------------------------------------------
-- | Output ------------------------------------------------------------
------------------------------------------------------------------------
data Output a = O { o_vars :: Maybe [String]
, o_errors :: ! [Error]
, o_types :: !(AnnInfo a)
, o_templs :: !(AnnInfo a)
, o_bots :: ![SrcSpan]
, o_result :: FixResult Error
} deriving (Generic)
emptyOutput = O Nothing [] mempty mempty [] mempty
instance Monoid (Output a) where
mempty = emptyOutput
mappend o1 o2 = O { o_vars = sortNub <$> mappend (o_vars o1) (o_vars o2)
, o_errors = sortNub $ mappend (o_errors o1) (o_errors o2)
, o_types = mappend (o_types o1) (o_types o2)
, o_templs = mappend (o_templs o1) (o_templs o2)
, o_bots = sortNub $ mappend (o_bots o1) (o_bots o2)
, o_result = mappend (o_result o1) (o_result o2)
}
-----------------------------------------------------------
-- | KVar Profile -----------------------------------------
-----------------------------------------------------------
data KVKind
= RecBindE
| NonRecBindE
| TypeInstE
| PredInstE
| LamE
| CaseE
| LetE
deriving (Generic, Eq, Ord, Show, Enum, Data, Typeable)
instance Hashable KVKind where
hashWithSalt i = hashWithSalt i. fromEnum
newtype KVProf = KVP (M.HashMap KVKind Int)
emptyKVProf :: KVProf
emptyKVProf = KVP M.empty
updKVProf :: KVKind -> [KVar] -> KVProf -> KVProf
updKVProf k kvs (KVP m) = KVP $ M.insert k (kn + length kvs) m
where
kn = M.lookupDefault 0 k m
instance NFData KVKind where
rnf z = z `seq` ()
instance PPrint KVKind where
pprint = text . show
instance PPrint KVProf where
pprint (KVP m) = pprint $ M.toList m
instance NFData KVProf where
rnf (KVP m) = rnf m `seq` ()
-- hasHole (toReft -> (Reft (_, rs))) = any isHole rs
hole :: Pred
hole = PKVar "HOLE" mempty
isHole :: Pred -> Bool
isHole (PKVar ("HOLE") _) = True
isHole _ = False
hasHole :: Reftable r => r -> Bool
hasHole = any isHole . conjuncts . reftPred . toReft
-- isHole :: KVar -> Bool
-- isHole "HOLE" = True
-- isHole _ = False
-- classToRApp :: SpecType -> SpecType
-- classToRApp (RCls cl ts)
-- = RApp (RTyCon (classTyCon cl) def def) ts mempty mempty
instance Symbolic DataCon where
symbol = symbol . dataConWorkId
instance PPrint DataCon where
pprint = text . showPpr
instance Show DataCon where
show = showpp
liquidBegin :: String
liquidBegin = ['{', '-', '@']
liquidEnd :: String
liquidEnd = ['@', '-', '}']
| mightymoose/liquidhaskell | src/Language/Haskell/Liquid/Types.hs | bsd-3-clause | 60,189 | 2 | 15 | 18,267 | 18,678 | 9,987 | 8,691 | 1,507 | 16 |
{-# LANGUAGE BangPatterns, CPP #-}
-- | File descriptor cache to avoid locks in kernel.
module Network.Wai.Handler.Warp.FdCache (
withFdCache
, Fd
, Refresh
#ifndef WINDOWS
, openFile
, closeFile
, setFileCloseOnExec
#endif
) where
#ifndef WINDOWS
import UnliftIO.Exception (bracket)
import Control.Reaper
import Data.IORef
import Network.Wai.Handler.Warp.MultiMap as MM
import System.Posix.IO (openFd, OpenFileFlags(..), defaultFileFlags, OpenMode(ReadOnly), closeFd, FdOption(CloseOnExec), setFdOption)
#endif
import System.Posix.Types (Fd)
----------------------------------------------------------------
-- | An action to activate a Fd cache entry.
type Refresh = IO ()
getFdNothing :: FilePath -> IO (Maybe Fd, Refresh)
getFdNothing _ = return (Nothing, return ())
----------------------------------------------------------------
-- | Creating 'MutableFdCache' and executing the action in the second
-- argument. The first argument is a cache duration in second.
withFdCache :: Int -> ((FilePath -> IO (Maybe Fd, Refresh)) -> IO a) -> IO a
#ifdef WINDOWS
withFdCache _ action = action getFdNothing
#else
withFdCache 0 action = action getFdNothing
withFdCache duration action = bracket (initialize duration)
terminate
(action . getFd)
----------------------------------------------------------------
data Status = Active | Inactive
newtype MutableStatus = MutableStatus (IORef Status)
status :: MutableStatus -> IO Status
status (MutableStatus ref) = readIORef ref
newActiveStatus :: IO MutableStatus
newActiveStatus = MutableStatus <$> newIORef Active
refresh :: MutableStatus -> Refresh
refresh (MutableStatus ref) = writeIORef ref Active
inactive :: MutableStatus -> IO ()
inactive (MutableStatus ref) = writeIORef ref Inactive
----------------------------------------------------------------
data FdEntry = FdEntry !Fd !MutableStatus
openFile :: FilePath -> IO Fd
openFile path = do
fd <- openFd path ReadOnly Nothing defaultFileFlags{nonBlock=False}
setFileCloseOnExec fd
return fd
closeFile :: Fd -> IO ()
closeFile = closeFd
newFdEntry :: FilePath -> IO FdEntry
newFdEntry path = FdEntry <$> openFile path <*> newActiveStatus
setFileCloseOnExec :: Fd -> IO ()
setFileCloseOnExec fd = setFdOption fd CloseOnExec True
----------------------------------------------------------------
type FdCache = MultiMap FdEntry
-- | Mutable Fd cacher.
newtype MutableFdCache = MutableFdCache (Reaper FdCache (FilePath,FdEntry))
fdCache :: MutableFdCache -> IO FdCache
fdCache (MutableFdCache reaper) = reaperRead reaper
look :: MutableFdCache -> FilePath -> IO (Maybe FdEntry)
look mfc path = MM.lookup path <$> fdCache mfc
----------------------------------------------------------------
-- The first argument is a cache duration in second.
initialize :: Int -> IO MutableFdCache
initialize duration = MutableFdCache <$> mkReaper settings
where
settings = defaultReaperSettings {
reaperAction = clean
, reaperDelay = duration
, reaperCons = uncurry insert
, reaperNull = isEmpty
, reaperEmpty = empty
}
clean :: FdCache -> IO (FdCache -> FdCache)
clean old = do
new <- pruneWith old prune
return $ merge new
where
prune (_,FdEntry fd mst) = status mst >>= act
where
act Active = inactive mst >> return True
act Inactive = closeFd fd >> return False
----------------------------------------------------------------
terminate :: MutableFdCache -> IO ()
terminate (MutableFdCache reaper) = do
!t <- reaperStop reaper
mapM_ (closeIt . snd) $ toList t
where
closeIt (FdEntry fd _) = closeFd fd
----------------------------------------------------------------
-- | Getting 'Fd' and 'Refresh' from the mutable Fd cacher.
getFd :: MutableFdCache -> FilePath -> IO (Maybe Fd, Refresh)
getFd mfc@(MutableFdCache reaper) path = look mfc path >>= get
where
get Nothing = do
ent@(FdEntry fd mst) <- newFdEntry path
reaperAdd reaper (path,ent)
return (Just fd, refresh mst)
get (Just (FdEntry fd mst)) = do
refresh mst
return (Just fd, refresh mst)
#endif
| kazu-yamamoto/wai | warp/Network/Wai/Handler/Warp/FdCache.hs | mit | 4,240 | 0 | 13 | 825 | 237 | 146 | 91 | 83 | 2 |
--Contributed by Ron Watkins
module Main where
fib n
| n <= 2 = n - 1
| otherwise = fib(n - 1) + fib(n - 2)
-- This part is related to the Input/Output and can be used as it is
-- Do not modify it
main = do
input <- getLine
print . fib . (read :: String -> Int) $ input
| EdisonAlgorithms/HackerRank | practice/fp/recursion/functional-programming-warmups-in-recursion---fibonacci-numbers/functional-programming-warmups-in-recursion---fibonacci-numbers.hs | mit | 291 | 0 | 10 | 85 | 99 | 51 | 48 | 7 | 1 |
{-# LANGUAGE DataKinds, GADTs, MultiParamTypeClasses,
FlexibleInstances, StandaloneDeriving,
GeneralizedNewtypeDeriving, FlexibleContexts #-}
-- {-# OPTIONS_GHC -ftype-function-depth=400 #-}
-- {-# OPTIONS_GHC -fcontext-stack=400 #-}
-- | References:
--
-- [1] Jose Guivant, Eduardo Nebot, and Stephan Baiker. Autonomous navigation and map
-- building using laser range sensors in outdoor applications.
-- Journal of Robotic Systems, 17(10):565–583, Oct. 2000.
module Slam where
import Prelude as P
import Control.Monad as CM
import Language.Hakaru.Syntax as H
import qualified Language.Hakaru.Lazy as L
import qualified System.Random.MWC as MWC
import Language.Hakaru.Sample
import Control.Monad.Cont (runCont, cont)
import qualified Data.Sequence as S
import qualified Data.Foldable as F
import Control.Monad.Primitive (PrimState, PrimMonad)
-- Stuff for Data IO
import Text.Printf
import System.Exit
import System.Directory
import System.Environment
import System.FilePath
import Language.Hakaru.Util.Csv (decodeFileStream)
import Data.Csv
import qualified Control.Applicative as A
import qualified Data.Vector as V
import qualified Data.ByteString.Lazy as B
----------
-- Inputs
----------
--
-- Inputs per timestamp:
-------------------------
-- 1. v_e : speed (Either this or what the paper calls v_c)
-- 2. alpha: steering angle
-- 3. z_rad_i : distances to object i
-- 4. z_I_i : intensity from objects i
-- 5. z_beta_i : angle to object i
--
-- Starting input (starting state):
------------------------------------
-- 1. GPSLon, GPSLat
-- 2. initial angle (alpha)
-- 3. dimensions of vehicle (L,h,a,b)
--
--
-----------
-- Outputs
-----------
-- 1. GPSLon, GPSLat
-- 2. phi : world angle
-- 3. (x_i, y_i) : world coords (lon, lat) of each object i in the map
range :: Int
range = 361
type ZRad = H.Real -- ^ Observed radial distance to a beacon
type ZInt = H.Real -- ^ Observed light intensity (reflected) from a beacon
type GPS = H.Real
type Angle = H.Real -- ^ In radians
type Vel = H.Real
type DelTime = H.Real
-- | Dimensions of the vehicle
-- L = distance between front and rear axles
-- H = distance between center of back left wheel and center of rear axle
-- a = distance between rear axle and front-based laser
-- b = width offset of the front-based laser
type Dims = Vector H.Real -- ^ <L,H,a,b>
dimL, dimH, dimA, dimB :: (Base repr) => repr Dims -> repr H.Real
dimL v = H.index v 0
dimH v = H.index v 1
dimA v = H.index v 2
dimB v = H.index v 3
type LaserReads = (Vector ZRad, Vector ZInt)
type Coords = (Angle, (GPS, GPS)) -- ^ phi (world angle), vehLon, vehLat
vPhi :: (Base repr) => repr Coords -> repr Angle
vPhi cds = unpair cds $ \p _ -> p
vLon, vLat :: (Base repr) => repr Coords -> repr GPS
vLon cds = unpair cds $ \_ ll -> unpair ll $ \lon _ -> lon
vLat cds = unpair cds $ \_ ll -> unpair ll $ \_ lat -> lat
type Steering = (Vel, Angle) -- ^ vel, alpha
vel :: (Base repr) => repr Steering -> repr Vel
vel steer = unpair steer $ \v _ -> v
alpha :: (Base repr) => repr Steering -> repr Angle
alpha steer = unpair steer $ \_ a -> a
type State = (LaserReads, Coords)
type Simulator repr = repr Dims
-> repr (Vector GPS) -- ^ beacon lons
-> repr (Vector GPS) -- ^ beacon lats
-> repr Coords -> repr Steering
-> repr DelTime -- ^ timestamp
-> repr (Measure State)
--------------------------------------------------------------------------------
-- MODEL --
--------------------------------------------------------------------------------
simulate :: (Mochastic repr) => Simulator repr
simulate ds blons blats cds steerE delT =
let_' (wheelToAxle steerE ds) $ \vc ->
let_' (pair vc (alpha steerE)) $ \steerC ->
unpair (newPos ds cds steerC delT) $ \calc_lon calc_lat ->
let_' (newPhi ds cds steerC delT) $ \calc_phi ->
let_' (mapV ((-) calc_lon) blons) $ \lon_ds ->
let_' (mapV ((-) calc_lat) blats) $ \lat_ds ->
-- Equation 10 from [1]
let_' (mapV sqrt_ (zipWithV (+) (mapV sqr lon_ds)
(mapV sqr lat_ds))) $ \calc_zrads ->
-- inverse-square for intensities
let_' (mapV (\r -> cIntensity / (pow_ r 2)) calc_zrads) $ \calc_zints ->
-- Equation 10 from [1]
-- Note: removed a "+ pi/2" term: it is present as (i - 180) in laserAssigns
let_' (mapV (\r -> atan r - calc_phi)
(zipWithV (/) lat_ds lon_ds)) $ \calc_zbetas ->
-- | Add some noise
normal calc_lon ((*) cVehicle . sqrt_ . unsafeProb $ delT) `bind` \lon ->
normal calc_lat ((*) cVehicle . sqrt_ . unsafeProb $ delT) `bind` \lat ->
normal calc_phi ((*) cVehicle . sqrt_ . unsafeProb $ delT) `bind` \phi ->
normalNoise cBeacon calc_zbetas `bind` \zbetas ->
makeLasers (mapV fromProb calc_zrads) zbetas muZRads cBeacon `bind` \lasersR ->
makeLasers (mapV fromProb calc_zints) zbetas muZInts cBeacon `bind` \lasersI ->
dirac $ pair (pair lasersR lasersI) (pair phi (pair lon lat))
-- | Translate velocity from back left wheel (where the velocity
-- encoder is present) to the center of the rear axle
-- Equation 6 from [1]
wheelToAxle :: (Base repr) => repr Steering -> repr Dims -> repr Vel
wheelToAxle s ds = (vel s) / (1 - (tan (alpha s))*(dimH ds)/(dimL ds))
-- | Equation 7 (corrected) from [1]
newPos :: (Base repr) => repr Dims -> repr Coords
-> repr Steering -> repr DelTime
-> repr (GPS,GPS)
newPos ds cds s delT = pair lonPos latPos
where lonPos = (vLon cds) + delT*lonVel
latPos = (vLat cds) + delT*latVel
lonVel = (vel s)*(cos phi) - axleToLaser lonMag
latVel = (vel s)*(sin phi) + axleToLaser latMag
phi = vPhi cds
axleToLaser mag = (vel s) * mag * (tan (alpha s)) / (dimL ds)
lonMag = (dimA ds)*(sin phi) + (dimB ds)*(cos phi)
latMag = (dimA ds)*(cos phi) - (dimB ds)*(sin phi)
-- | Equation 7 (corrected) from [1]
newPhi :: (Base repr) => repr Dims -> repr Coords
-> repr Steering -> repr DelTime -> repr Angle
newPhi ds cds s delT = (vPhi cds) + delT*(vel s)*(tan (alpha s)) / (dimL ds)
cVehicle :: (Base repr) => repr Prob
cVehicle = 0.42
cBeacon :: (Base repr) => repr Prob
cBeacon = 0.37
cIntensity :: (Base repr) => repr Prob
cIntensity = 19
muZRads :: (Base repr) => repr H.Real
muZRads = 40
sigmaZRads :: (Base repr) => repr Prob
sigmaZRads = 1
muZInts :: (Base repr) => repr H.Real
muZInts = 40
sigmaZInts :: (Base repr) => repr Prob
sigmaZInts = 1
sqr :: (Base repr) => repr H.Real -> repr Prob
sqr a = unsafeProb $ a * a -- pow_ (unsafeProb a) 2
let_' :: (Mochastic repr)
=> repr a -> (repr a -> repr (Measure b)) -> repr (Measure b)
let_' = bind . dirac
normalNoise :: (Mochastic repr) => repr Prob -> repr (Vector H.Real)
-> repr (Measure (Vector H.Real))
normalNoise sd v = plate (mapV (`normal` sd) v)
-- | Make a vector of laser readings (length 361)
-- The vector contains values from "reads" at positions from "betas"
-- Normal noise is then added to the vector
makeLasers :: (Mochastic repr) => repr (Vector H.Real)
-> repr (Vector H.Real)
-> repr H.Real -> repr Prob
-> repr (Measure (Vector H.Real))
makeLasers reads betas mu sd =
let base = vector 361 (const mu)
combine r b = vector 361 (\i -> if_ (withinLaser (i-180) b) (r-mu) 0)
combined = zipWithV combine reads betas
in normalNoise sd (reduce (zipWithV (+)) base combined)
withinLaser :: (Base repr) => repr Int -> repr H.Real -> repr Bool
withinLaser n b = and_ [ lessOrEq (convert (fromInt n - 0.5)) tb2
, less tb2 (convert (fromInt n + 0.5)) ]
where lessOrEq a b = or_ [less a b, equal a b]
tb2 = tan (b/2)
toRadian d = d*pi/180
convert = tan . toRadian . ((/) 4)
--------------------------------------------------------------------------------
-- SIMULATIONS --
--------------------------------------------------------------------------------
type Rand = MWC.Gen (PrimState IO)
data Particle = PL
{ dims :: V.Vector Double -- ^ l,h,a,b
, bLats :: V.Vector Double
, bLons :: V.Vector Double
}
data Params = PM
{ sensors :: [Sensor]
, controls :: [Control]
, lasers :: [Laser]
, coords :: (Double,(Double,Double)) -- ^ phi, lon, lat
, steer :: (Double,Double) -- ^ vel, alpha
, tm :: Double
}
type Generator = Particle -> Params -> IO ()
data Gen = Conditioned | Unconditioned
deriving Eq
-- | Returns the pair (longitudes, latitudes)
genBeacons :: Rand -> Maybe FilePath -> IO (V.Vector Double, V.Vector Double)
genBeacons _ Nothing =
return ( V.fromList [1,3]
, V.fromList [2,4] )
genBeacons g (Just evalPath) = do
trueBeacons <- obstacles evalPath
return ( V.map lon trueBeacons
, V.map lat trueBeacons )
updateParams :: Params -> (Double,(Double,Double)) -> Double -> Params
updateParams prms cds tcurr =
prms { sensors = tail (sensors prms)
, coords = cds
, tm = tcurr }
plotPoint :: FilePath -> (Double,(Double,Double)) -> IO ()
plotPoint out (_,(lon,lat)) = do
dExist <- doesDirectoryExist out
unless dExist $ createDirectory out
let fp = out </> "slam_out_path.txt"
appendFile fp $ show lon ++ "," ++ show lat ++ "\n"
generate :: Gen -> FilePath -> FilePath -> Maybe FilePath -> IO ()
generate c input output eval = do
g <- MWC.createSystemRandom
Init ds phi ilt iln <- initialVals input
controls <- controlData input
sensors <- sensorData input
lasers <- if c==Unconditioned then return [] else laserReadings input
(lons, lats) <- genBeacons g eval
gen c output g (PL ds lons lats)
(PM sensors controls lasers (iln,(ilt,phi)) (0,0) 0)
gen :: Gen -> FilePath -> Rand -> Generator
gen c out g prtcl params = go params
where
go prms
| null $ sensors prms = putStrLn "Finished reading input_sensor"
| otherwise = do
let (Sensor tcurr snum) = head $ sensors prms
case snum of
1 -> do (_,coords) <- sampleState prtcl prms tcurr g
putStrLn "writing to simulated_slam_out_path"
plotPoint out coords
go $ updateParams prms coords tcurr
2 -> do when (null $ controls prms) $
error "input_control has fewer data than\
\it should according to input_sensor"
(_,coords) <- sampleState prtcl prms tcurr g
let prms' = updateParams prms coords tcurr
(Control _ nv nalph) = head $ controls prms
go $ prms' { controls = tail (controls prms)
, steer = (nv, nalph) }
3 -> case c of
Unconditioned ->
do ((zr,zi), coords) <- sampleState prtcl prms tcurr g
putStrLn "writing to simulated_input_laser"
plotReads out zr zi
go $ updateParams prms coords tcurr
Conditioned ->
do when (null $ lasers prms) $
error "input_laser has fewer data than\
\it should according to input_sensor"
let L _ zr zi = head (lasers prms)
lreads = (V.fromList zr, V.fromList zi)
coords <- sampleCoords prtcl prms lreads tcurr g
let prms' = updateParams prms coords tcurr
go $ prms' { lasers = tail (lasers prms) }
_ -> error "Invalid sensor ID (must be 1, 2 or 3)"
------------------
-- UNCONDITIONED
------------------
type SimLaser
= Dims -> Vector GPS -> Vector GPS
-> Coords -> Steering -> DelTime
-> Measure State
simLasers :: (Mochastic repr, Lambda repr) => repr SimLaser
simLasers =
lam $ \ds -> lam $ \blons -> lam $ \blats ->
lam $ \cds -> lam $ \s -> lam $ \delT ->
simulate ds blons blats cds s delT
sampleState
:: Particle -> Params -> Double -> Rand
-> IO ((V.Vector Double, V.Vector Double) , (Double, (Double, Double)))
sampleState prtcl prms tcurr g =
fmap (\(Just (s,1)) -> s) $
(unSample $ simLasers) ds blons blats cds s (tcurr-tprev) 1 g
where (PL ds blons blats) = prtcl
(PM _ _ _ cds s tprev) = prms
plotReads :: FilePath -> V.Vector Double -> V.Vector Double -> IO ()
plotReads out rads ints = do
dExist <- doesDirectoryExist out
unless dExist $ createDirectory out
let file = out </> "slam_simulated_laser.txt"
go file (V.toList $ rads V.++ ints)
where
go fp [] = appendFile fp "\n"
go fp [l] = appendFile fp ((show l) ++ "\n")
go fp (l:ls) = appendFile fp ((show l) ++ ",") >> go fp ls
----------------------------------
-- CONDITIONED ON LASER READINGS
----------------------------------
type Env = (Dims, (Vector GPS, (Vector GPS, (Coords, (Steering, DelTime)))))
evolve
:: (Mochastic repr)
=> repr Env
-> [ repr (LaserReads -> (Measure Coords)) ]
evolve env = undefined
-- TODO the code below would work if Lazy supported Vector
-- [ app d env
-- | d <- L.runDisintegrate $ \e0 ->
-- unpair e0 $ \ds e1 ->
-- unpair e1 $ \blons e2 ->
-- unpair e2 $ \blats e3 ->
-- unpair e3 $ \cds e4 ->
-- unpair e4 $ \s delT ->
-- simulate ds blons blats cds s delT ]
readLasers
:: (Mochastic repr, Lambda repr)
=> repr (Env -> LaserReads -> Measure Coords)
readLasers = lam $ \env -> lam $ \lrs -> app (head (evolve env)) lrs
sampleCoords prtcl prms lreads tcurr g =
fmap (\(Just (s,1)) -> s) $
(unSample $ readLasers)
(ds,(blons,(blats,(cds,(s,tcurr-tprev)))))
lreads 1 g
where (PL ds blons blats) = prtcl
(PM _ _ _ cds s tprev) = prms
--------------------------------------------------------------------------------
-- MAIN --
--------------------------------------------------------------------------------
main :: IO ()
main = do
args <- getArgs
case args of
[input, output] -> generate Unconditioned input output Nothing
[input, output, eval] -> generate Unconditioned input output (Just eval)
_ -> usageExit
usageExit :: IO ()
usageExit = do
pname <- getProgName
putStrLn (usage pname) >> exitSuccess
where usage pname = "Usage: " ++ pname ++ " input_dir output_dir [eval_dir]\n"
--------------------------------------------------------------------------------
-- DATA IO --
--------------------------------------------------------------------------------
data Initial = Init
{ dimensions :: V.Vector Double -- ^ l,h,a,b
, initPhi :: Double
, initLat :: Double
, initLon :: Double
} deriving Show
instance FromRecord Initial where
parseRecord v
| V.length v == 7 =
Init A.<$> parseRecord (V.slice 0 4 v)
A.<*> v .! 4
A.<*> v .! 5
A.<*> v .! 6
| otherwise = fail "wrong number of fields in input_properties"
noFileBye :: FilePath -> IO ()
noFileBye fp = putStrLn ("Could not find " ++ fp) >> exitFailure
initialVals :: FilePath -> IO Initial
initialVals inpath = do
let input = inpath </> "input_properties.csv"
doesFileExist input >>= flip unless (noFileBye input)
bytestr <- B.readFile input
case decode HasHeader bytestr of
Left msg -> fail msg
Right v
| V.length v == 1 -> return $ v V.! 0
| otherwise -> fail "wrong number of rows in input_properties"
data Laser = L
{ timestamp :: Double
, zrads :: [Double]
, intensities :: [Double]
}
instance FromRecord Laser where
parseRecord v
| V.length v == 1 + 2*range =
L A.<$> v .! 0
A.<*> parseRecord (V.slice 1 range v)
A.<*> parseRecord (V.slice (range+1) range v)
| otherwise = fail "wrong number of fields in input_laser"
laserReadings :: FilePath -> IO [Laser]
laserReadings inpath = do
let input = inpath </> "input_laser.csv"
doesFileExist input >>= flip unless (noFileBye input)
decodeFileStream input
data Sensor = Sensor
{ sensetime :: Double
, sensorID :: Int
}
deriving (Show)
instance FromRecord Sensor where
parseRecord v
| V.length v == 2 = Sensor A.<$> v .! 0 A.<*> v .! 1
| otherwise = fail "wrong number of fields in input_sensor"
sensorData :: FilePath -> IO [Sensor]
sensorData inpath = do
let input = inpath </> "input_sensor.csv"
doesFileExist input >>= flip unless (noFileBye input)
decodeFileStream input
data Control = Control
{ contime :: Double
, velocity :: Double
, steering :: Double
} deriving (Show)
instance FromRecord Control where
parseRecord v
| V.length v == 3 = Control A.<$> v .! 0 A.<*> v .! 1 A.<*> v .! 2
| otherwise = fail "wrong number of fields in input_control"
controlData :: FilePath -> IO [Control]
controlData inpath = do
let input = inpath </> "input_control.csv"
doesFileExist input >>= flip unless (noFileBye input)
decodeFileStream input
-- | True beacon positions (from eval_data/eval_obstacles.csv for each path type)
-- This is for simulation purposes only!
-- Not to be used during inference
data Obstacle = Obstacle {lat :: Double, lon :: Double}
instance FromRecord Obstacle where
parseRecord v
| V.length v == 2 = Obstacle A.<$> v .! 0 A.<*> v .! 1
| otherwise = fail "wrong number of fields in eval_obstacles"
obstacles :: FilePath -> IO (V.Vector Obstacle)
obstacles evalPath = do
let evalObs = evalPath </> "eval_obstacles.csv"
doesFileExist evalObs >>= flip unless (noFileBye evalObs)
fmap V.fromList $ decodeFileStream evalObs
--------------------------------------------------------------------------------
-- MISC MINI-TESTS --
--------------------------------------------------------------------------------
testIO :: FilePath -> IO ()
testIO inpath = do
-- initialVals "test" >>= print
laserReads <- laserReadings inpath
let laserVector = V.fromList laserReads
print . (V.slice 330 31) . V.fromList . zrads $ laserVector V.! 50
V.mapM_ ((printf "%.6f\n") . timestamp) $ V.take 10 laserVector
sensors <- sensorData inpath
putStrLn "-------- Here are some sensors -----------"
print $ V.slice 0 20 (V.fromList sensors)
controls <- controlData inpath
putStrLn "-------- Here are some controls -----------"
print $ V.slice 0 20 (V.fromList controls)
hakvec :: (Mochastic repr) => repr (Measure (Vector H.Real))
hakvec = plate $ vector 11 (const (normal 0 1))
| zaxtax/hakaru | haskell/Examples/Slam.hs | bsd-3-clause | 19,711 | 0 | 40 | 5,790 | 5,904 | 3,065 | 2,839 | 357 | 5 |
{-# OPTIONS_GHC -fwarn-unused-matches -fwarn-incomplete-patterns -fwarn-type-defaults #-}
module FrontEnd.Syn.Traverse(module FrontEnd.Syn.Traverse, module FrontEnd.HsSyn) where
import Control.Monad.Writer
import Util.Std
import qualified Data.Set as Set
import qualified Data.Traversable as T
import FrontEnd.HsSyn
import Name.Name
import Support.FreeVars
import Util.Inst()
import qualified Util.Seq as Seq
instance FreeVars HsType (Set.Set Name) where
freeVars t = execWriter (f t) where
f (HsTyVar v) = tell (Set.singleton v)
f (HsTyCon v) = tell (Set.singleton v)
f t = traverseHsType_ f t
traverse_ :: Applicative m => (a -> m b) -> a -> m a
traverse_ fn x = x <$ fn x
traverseHsExp_ :: (Monad m,Applicative m,MonadSetSrcLoc m,TraverseHsOps e) => (HsExp -> m ()) -> e -> m ()
traverseHsExp_ fn e = traverseHsExp (traverse_ fn) e *> pure ()
traverseHsExp :: (Monad m,MonadSetSrcLoc m,TraverseHsOps e) => (HsExp -> m HsExp) -> e -> m e
traverseHsExp fn e = traverseHsOps ops e where
ops = (hsOpsDefault ops) { opHsExp, opHsPat, opHsType } where
opHsExp e = fn e
opHsPat p = return p
opHsType t = return t
traverseHsDecl_ :: (Monad m,Applicative m,MonadSetSrcLoc m,TraverseHsOps e) => (HsDecl -> m ()) -> e -> m ()
traverseHsDecl_ fn e = traverseHsDecl (traverse_ fn) e *> pure ()
traverseHsDecl :: (Monad m,MonadSetSrcLoc m,TraverseHsOps e) => (HsDecl -> m HsDecl) -> e -> m e
traverseHsDecl fn e = traverseHsOps ops e where
ops = (hsOpsDefault ops) { opHsDecl, opHsPat, opHsType } where
opHsDecl e = fn e
opHsPat p = return p
opHsType t = return t
traverseHsType_ :: Applicative m => (HsType -> m b) -> HsType -> m ()
traverseHsType_ fn p = traverseHsType (traverse_ fn) p *> pure ()
traverseHsType :: Applicative m => (HsType -> m HsType) -> HsType -> m HsType
traverseHsType fn t = f t where
f (HsTyFun a1 a2) = HsTyFun <$> fn a1 <*> fn a2
f (HsTyTuple a1) = HsTyTuple <$> T.traverse fn a1
f (HsTyUnboxedTuple a1) = HsTyUnboxedTuple <$> T.traverse fn a1
f (HsTyApp a1 a2) = HsTyApp <$> fn a1 <*> fn a2
f (HsTyForall vs qt) = doQual HsTyForall f vs qt
f (HsTyExists vs qt) = doQual HsTyExists f vs qt
f x@HsTyVar {} = pure x
f x@HsTyCon {} = pure x
f HsTyExpKind { .. } = h <$> T.traverse fn hsTyLType
where h hsTyLType = HsTyExpKind { .. }
-- f HsTyExpKind { .. } = do
-- hsTyLType <- T.mapM f hsTyLType
-- return HsTyExpKind { .. }
f (HsTyEq a1 a2) = HsTyEq <$> fn a1 <*> fn a2
--f (HsTyEq a b) = return HsTyEq `ap` f a `ap` f b
f (HsTyStrictType a1 a2) = HsTyStrictType <$> pure a1 <*> T.traverse fn a2
-- f (HsTyStrictType a b ) = return HsTyStrictType `ap` return a `ap` T.mapM f b
f HsTyAssoc = pure HsTyAssoc
doQual :: Applicative m => (a -> HsQualType -> b) -> (HsType -> m HsType) -> a -> HsQualType -> m b
doQual hsTyForall f vs qt = cr <$> cntx <*> f (hsQualTypeType qt) where
cr cntx x = hsTyForall vs qt { hsQualTypeContext = cntx, hsQualTypeType = x }
cntx = flip T.traverse (hsQualTypeContext qt) $ \v -> case v of
x@HsAsst {} -> pure x
HsAsstEq a b -> HsAsstEq <$> f a <*> f b
-- return $ hsTyForall vs qt { hsQualTypeContext = cntx, hsQualTypeType = x }
traverseHsPat_ :: (Monad m,Applicative m,MonadSetSrcLoc m) => (HsPat -> m b) -> HsPat -> m ()
traverseHsPat_ fn p = traverseHsPat (traverse_ fn) p *> pure ()
traverseHsPat :: (Monad m,MonadSetSrcLoc m,TraverseHsOps e) => (HsPat -> m HsPat) -> e -> m e
traverseHsPat fn e = traverseHsOps ops e where
ops = (hsOpsDefault ops) { opHsPat, opHsType } where
opHsPat p = fn p
opHsType t = return t
traverseHsDeclHsExp :: (Monad m,MonadSetSrcLoc m) => (HsExp -> m HsExp) -> HsDecl -> m HsDecl
traverseHsDeclHsExp fn d = traverseHsExp fn d
getNamesFromHsPat :: HsPat -> [Name]
getNamesFromHsPat p = Seq.toList $ execWriter (f p) where
f (HsPVar hsName) = tell $ Seq.singleton hsName
f (HsPAsPat hsName hsPat) = do tell $ Seq.singleton hsName; f hsPat
f p = traverseHsPat_ f p
data HsOps m = HsOps {
opHsDecl :: HsDecl -> m HsDecl,
opHsExp :: HsExp -> m HsExp,
opHsPat :: HsPat -> m HsPat,
opHsType :: HsType -> m HsType,
opHsStmt :: HsStmt -> m HsStmt
}
-- | provides a default hsOps that recurses further down the tree for undeclared
-- operations. In order to tie the knot properly, you need to pass its return
-- value into itself, as in.
--
-- let ops = (hsOpsDefault ops) { opHsType = custom type handler }
--
-- NOTE: if you forget the parentheses around hsopsdefault ops, your program
-- will still typecheck and compile, but it will behave incorrectly.
hsOpsDefault :: (Applicative m, MonadSetSrcLoc m) => HsOps m -> HsOps m
hsOpsDefault hops = HsOps { .. } where
f x = traverseHsOps hops x
opHsDecl = f
opHsExp = f
opHsPat = f
opHsStmt = f
opHsType = f
class TraverseHsOps a where
-- act on the direct children of the argument.
traverseHsOps :: (Applicative m,MonadSetSrcLoc m) => HsOps m -> a -> m a
-- act on the argument itself or its children.
applyHsOps :: (Applicative m,MonadSetSrcLoc m) => HsOps m -> a -> m a
applyHsOps os x = traverseHsOps os x
instance TraverseHsOps HsAlt where
traverseHsOps ops@HsOps { .. } (HsAlt sl p rhs ds) =
HsAlt sl <$> opHsPat p <*> applyHsOps ops rhs <*> T.traverse opHsDecl ds
instance TraverseHsOps HsModule where
traverseHsOps HsOps { .. } HsModule { .. } = cr <$> T.traverse opHsDecl hsModuleDecls
where cr hsModuleDecls = HsModule { .. }
instance TraverseHsOps HsType where
applyHsOps = opHsType
traverseHsOps HsOps { .. } = traverseHsType opHsType
instance TraverseHsOps HsDecl where
applyHsOps = opHsDecl
traverseHsOps hops@HsOps { .. } x = g x where
thops x = applyHsOps hops x
g x = withSrcLoc (srcLoc x) $ f x
f HsTypeFamilyDecl { .. } = h <$> thops hsDeclTArgs
where h hsDeclTArgs = HsTypeFamilyDecl { .. }
f HsTypeDecl { .. } = h <$> thops hsDeclTArgs <*> thops hsDeclType
where h hsDeclTArgs hsDeclType = HsTypeDecl { .. }
f HsDefaultDecl { .. } = h <$> thops hsDeclType
where h hsDeclType = HsDefaultDecl { .. }
f HsDataDecl { .. } = h <$> thops hsDeclContext <*> thops hsDeclCons
where h hsDeclContext hsDeclCons = HsDataDecl { .. }
f HsClassDecl { .. } = h <$> thops hsDeclClassHead <*> thops hsDeclDecls
where h hsDeclClassHead hsDeclDecls = HsClassDecl { .. }
f HsClassAliasDecl { .. } = h <$> thops hsDeclTypeArgs <*> thops hsDeclContext <*> thops hsDeclClasses <*> thops hsDeclDecls
where h hsDeclTypeArgs hsDeclContext hsDeclClasses hsDeclDecls = HsClassAliasDecl { .. }
f HsInstDecl { .. } = h <$> thops hsDeclClassHead <*> thops hsDeclDecls
where h hsDeclClassHead hsDeclDecls = HsInstDecl { .. }
f HsTypeSig { .. } = h <$> thops hsDeclQualType
where h hsDeclQualType = HsTypeSig { .. }
f HsActionDecl { .. } = h <$> thops hsDeclPat <*> thops hsDeclExp
where h hsDeclPat hsDeclExp = HsActionDecl { .. }
f (HsFunBind ms) = HsFunBind <$> thops ms
f HsPatBind { .. } = h <$> thops hsDeclPat <*> thops hsDeclRhs <*> thops hsDeclDecls
where h hsDeclPat hsDeclRhs hsDeclDecls = HsPatBind { .. }
f HsSpaceDecl { .. } = dr <$> opHsExp hsDeclExp <*> thops hsDeclQualType
where dr hsDeclExp hsDeclQualType = HsSpaceDecl { .. }
f HsForeignDecl { .. } = dr <$> thops hsDeclQualType
where dr hsDeclQualType = HsForeignDecl { .. }
f HsForeignExport { .. } = dr <$> thops hsDeclQualType
where dr hsDeclQualType = HsForeignExport { .. }
f HsDeclDeriving { .. } = dr <$> thops hsDeclClassHead
where dr hsDeclClassHead = HsDeclDeriving { .. }
f x@HsInfixDecl {} = pure x
f x@HsPragmaProps {} = pure x
f (HsPragmaRules rs) = HsPragmaRules <$> thops rs
f HsPragmaSpecialize { .. } = dr <$> thops hsDeclType
where dr hsDeclType = HsPragmaSpecialize { .. }
instance TraverseHsOps HsRule where
traverseHsOps hops HsRule { .. } = hr <$>
ah hsRuleLeftExpr <*> ah hsRuleRightExpr <*> f hsRuleFreeVars where
--f xs = T.traverse (T.traverse (T.traverse ah)) xs
f xs = applyHsOps hops xs
hr hsRuleLeftExpr hsRuleRightExpr hsRuleFreeVars = HsRule { .. }
ah x = applyHsOps hops x
instance TraverseHsOps HsClassHead where
traverseHsOps hops HsClassHead { .. } =
mch <$> applyHsOps hops hsClassHeadContext <*> applyHsOps hops hsClassHeadArgs where
mch hsClassHeadContext hsClassHeadArgs = HsClassHead { .. }
instance TraverseHsOps HsMatch where
traverseHsOps hops m = withSrcLoc (hsMatchSrcLoc m) $ f m where
f HsMatch { .. } = h <$> thops hsMatchPats <*> thops hsMatchRhs <*> thops hsMatchDecls
where h hsMatchPats hsMatchRhs hsMatchDecls = HsMatch { .. }
thops x = applyHsOps hops x
instance TraverseHsOps HsConDecl where
traverseHsOps hops d = withSrcLoc (srcLoc d) $ f d where
thops x = applyHsOps hops x
f HsConDecl { .. } = h <$> thops hsConDeclConArg
where h hsConDeclConArg = HsConDecl { .. }
f HsRecDecl { .. } = h <$> thops hsConDeclRecArg
where h hsConDeclRecArg = HsRecDecl { .. }
instance TraverseHsOps HsPat where
applyHsOps ho x = opHsPat ho x
traverseHsOps hops@HsOps { .. } x = f x where
fn x = applyHsOps hops x
f (HsPTypeSig sl p qt) = HsPTypeSig sl <$> fn p <*> fn qt
f p@HsPVar {} = pure p
f p@HsPLit {} = pure p
f (HsPNeg a1) = HsPNeg <$> fn a1
f (HsPInfixApp a1 a2 a3) = HsPInfixApp <$> fn a1 <*> pure a2 <*> fn a3
f (HsPApp d1 a1) = HsPApp d1 <$> fn a1
f (HsPTuple a1) = HsPTuple <$> fn a1
f (HsPUnboxedTuple a1) = HsPUnboxedTuple <$> fn a1
f (HsPList a1) = HsPList <$> fn a1
f (HsPParen a1) = HsPParen <$> fn a1
f (HsPAsPat d1 a1) = HsPAsPat d1 <$> fn a1
f HsPWildCard = pure HsPWildCard
f (HsPIrrPat a1) = HsPIrrPat <$> fn a1
f (HsPBangPat a1) = HsPBangPat <$> fn a1
f (HsPRec d1 a1) = HsPRec d1 <$> fn a1
f (HsPatWords ws) = HsPatWords <$> fn ws
f (HsPatBackTick ws) = HsPatBackTick <$> fn ws
instance TraverseHsOps HsQualType where
traverseHsOps hops HsQualType { .. } = h <$> applyHsOps hops hsQualTypeContext <*> applyHsOps hops hsQualTypeType
where h hsQualTypeContext hsQualTypeType = HsQualType { .. }
-- traverseHsOps hops HsQualType { .. } = do
-- hsQualTypeContext <- applyHsOps hops hsQualTypeContext
-- hsQualTypeType <- opHsType hops hsQualTypeType
-- return HsQualType { .. }
instance TraverseHsOps HsAsst where
traverseHsOps HsOps { .. } (HsAsstEq a b) = HsAsstEq <$> opHsType a <*> opHsType b
traverseHsOps _ x = pure x
instance TraverseHsOps HsComp where
traverseHsOps ops HsComp { .. } = h <$> applyHsOps ops hsCompStmts <*> applyHsOps ops hsCompBody where
h hsCompStmts hsCompBody = HsComp { .. }
instance TraverseHsOps HsRhs where
traverseHsOps ops (HsUnGuardedRhs rhs) = HsUnGuardedRhs <$> applyHsOps ops rhs
traverseHsOps ops (HsGuardedRhss rhss) = HsGuardedRhss <$> applyHsOps ops rhss
instance TraverseHsOps HsStmt where
applyHsOps = opHsStmt
traverseHsOps hops@HsOps { .. } x = f x where
f (HsGenerator sl p e) = withSrcLoc sl $ HsGenerator sl <$> opHsPat p <*> opHsExp e
f (HsQualifier e) = HsQualifier <$> opHsExp e
f (HsLetStmt dl) = HsLetStmt <$> applyHsOps hops dl
instance TraverseHsOps HsExp where
applyHsOps = opHsExp
traverseHsOps hops@HsOps { .. } e = g e where
fn x = applyHsOps hops x
g e = withSrcLoc (srcLoc e) $ f e
f (HsCase e as) = HsCase <$> fn e <*> fn as
f (HsLCase as) = HsLCase <$> fn as
f (HsDo hsStmts) = HsDo <$> fn hsStmts
f (HsExpTypeSig srcLoc e hsQualType) = HsExpTypeSig srcLoc <$> fn e <*> fn hsQualType
f (HsLambda srcLoc hsPats e) = HsLambda srcLoc <$> fn hsPats <*> fn e
f (HsLet hsDecls e) = HsLet <$> fn hsDecls <*> fn e
f (HsListComp c) = HsListComp <$> fn c
f (HsRecConstr n fus) = HsRecConstr n <$> fn fus
f (HsRecUpdate e fus) = HsRecUpdate <$> fn e <*> fn fus
-- only exp
f e@HsCon {} = pure e
f e@HsError {} = pure e
f e@HsLit {} = pure e
f e@HsVar {} = pure e
f (HsApp a1 a2) = HsApp <$> fn a1 <*> fn a2
f (HsAsPat hsName e) = HsAsPat hsName <$> fn e
f (HsBackTick e) = HsBackTick <$> fn e
f (HsBangPat e) = HsBangPat <$> fn e
f (HsEnumFrom e) = HsEnumFrom <$> fn e
f (HsEnumFromThen e1 e2) = HsEnumFromThen <$> fn e1 <*> fn e2
f (HsEnumFromThenTo a1 a2 a3) = HsEnumFromThenTo <$> fn a1 <*> fn a2 <*> fn a3
f (HsEnumFromTo e1 e2) = HsEnumFromTo <$> fn e1 <*> fn e2
f (HsIf e1 e2 e3) = liftA3 HsIf (fn e1) (fn e2) (fn e3)
f (HsInfixApp a1 a2 a3) = HsInfixApp <$> fn a1 <*> fn a2 <*> fn a3
f (HsIrrPat hsExp) = HsIrrPat <$> fn hsExp
f (HsLeftSection e1 e2) = HsLeftSection <$> fn e1 <*> fn e2
f (HsList hsExps) = HsList <$> fn hsExps
f (HsLocatedExp le) = HsLocatedExp <$> fn le
f (HsNegApp a1) = HsNegApp <$> fn a1
f (HsParen e) = HsParen <$> fn e
f (HsRightSection e1 e2) = HsRightSection <$> fn e1 <*> fn e2
f (HsTuple es) = HsTuple <$> fn es
f (HsUnboxedTuple es) = HsUnboxedTuple <$> fn es
f (HsWildCard x) = pure (HsWildCard x)
f (HsWords ws) = HsWords <$> fn ws
--f h = error $ "FrontEnd.Syn.Traverse.traverseHsExp f unrecognized construct: " ++ show h
instance TraverseHsOps e => TraverseHsOps (Located e) where
traverseHsOps hops (Located l e) = withSrcSpan l (Located l <$> applyHsOps hops e)
instance (TraverseHsOps a,T.Traversable f) => TraverseHsOps (f a) where
traverseHsOps hops xs = T.traverse (applyHsOps hops) xs
getDeclNames :: HsDecl -> [Name]
getDeclNames d = f d where
f HsPatBind { .. } = getNamesFromHsPat hsDeclPat
f HsActionDecl { .. } = getNamesFromHsPat hsDeclPat
f (HsFunBind ((HsMatch _ name _ _ _):_)) = [name]
f HsDataDecl { .. } = [hsDeclName]
f HsClassDecl { hsDeclClassHead = h } = [hsClassHead h]
f HsForeignDecl { .. } = [hsDeclName]
f (HsForeignExport _ e _ _) = [ffiExportName e]
f _ = []
--maybeGetDeclName (HsTypeSig _ [n] _ ) = return n
maybeGetDeclName :: Monad m => HsDecl -> m Name
maybeGetDeclName d = case getDeclNames d of
[] -> fail $ "getDeclName: could not find name for a decl: " ++ show d
[x] -> return x
_ -> fail $ "getDeclName: found too many names for decl: " ++ show d
getDeclName :: HsDecl -> Name
getDeclName d = runIdentity $ maybeGetDeclName d
-- HsDecl getDeps function
getDeclDeps :: HsDecl -> [HsName]
getDeclDeps = Seq.toList . execWriter . traverseHsExp_ f where
f (HsVar name) = tell $ Seq.singleton name
f e = traverseHsExp_ f e
| hvr/jhc | src/FrontEnd/Syn/Traverse.hs | mit | 15,784 | 0 | 14 | 4,574 | 5,432 | 2,674 | 2,758 | -1 | -1 |
module B where
data Value = Value String Int
| sdiehl/ghc | testsuite/tests/ghci/T16525a/B.hs | bsd-3-clause | 46 | 0 | 6 | 10 | 15 | 9 | 6 | 2 | 0 |
-- |
-- Module : Data.Text.Internal.Read
-- Copyright : (c) 2014 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- Common internal functiopns for reading textual data.
module Data.Text.Internal.Read
(
IReader
, IParser(..)
, T(..)
, digitToInt
, hexDigitToInt
, perhaps
) where
import Control.Applicative (Applicative(..))
import Control.Arrow (first)
import Control.Monad (ap)
import Data.Char (ord)
type IReader t a = t -> Either String (a,t)
newtype IParser t a = P {
runP :: IReader t a
}
instance Functor (IParser t) where
fmap f m = P $ fmap (first f) . runP m
instance Applicative (IParser t) where
pure a = P $ \t -> Right (a,t)
{-# INLINE pure #-}
(<*>) = ap
instance Monad (IParser t) where
return = pure
m >>= k = P $ \t -> case runP m t of
Left err -> Left err
Right (a,t') -> runP (k a) t'
{-# INLINE (>>=) #-}
fail msg = P $ \_ -> Left msg
data T = T !Integer !Int
perhaps :: a -> IParser t a -> IParser t a
perhaps def m = P $ \t -> case runP m t of
Left _ -> Right (def,t)
r@(Right _) -> r
hexDigitToInt :: Char -> Int
hexDigitToInt c
| c >= '0' && c <= '9' = ord c - ord '0'
| c >= 'a' && c <= 'f' = ord c - (ord 'a' - 10)
| otherwise = ord c - (ord 'A' - 10)
digitToInt :: Char -> Int
digitToInt c = ord c - ord '0'
| spencerjanssen/text | Data/Text/Internal/Read.hs | bsd-2-clause | 1,560 | 0 | 13 | 516 | 565 | 302 | 263 | 44 | 2 |
--- Pretty printing for the P functor
module HsPatPretty () where
import HsPatStruct
import HsIdentPretty
import HsFieldsPretty()
import HsLiteralPretty()
import PrettyPrint
instance (PrintableOp i,Printable p) => Printable (PI i p) where
ppi (HsPId n) = ppcon wrap n
ppi (HsPLit s l) = lit l
ppi (HsPNeg s l) = kw "-" <> lit l
ppi (HsPSucc s n l) = parenBinOp n (kw "+") l
ppi (HsPInfixApp x op y) = parenBinOp x (conop (ppiOp op)) y
ppi (HsPApp n ps) = con (wrap n) <+> (fsep $ map wrap ps)
ppi (HsPTuple s ps) = ppiFTuple ps
ppi (HsPList s ps) = ppiList ps
ppi (HsPParen p) = wrap p
ppi (HsPRec n fs) = con (wrap n) <> braces fs
ppi (HsPAsPat n p) = wrap n <> kw "@" <> wrap p
ppi (HsPWildCard) = kw '_'
ppi (HsPIrrPat p) = kw "~" <> wrap p
wrap p =
case p of
HsPId n -> ppcon wrap n
HsPLit s l -> lit l
HsPApp n [] -> con (wrap n)
HsPTuple s ps -> ppi p
HsPList s ps -> ppi p
HsPParen p -> parens p
HsPAsPat n _ -> ppi p
HsPWildCard -> kw '_'
HsPInfixApp{} -> ppi p
HsPSucc{} -> ppi p
_ -> parens p
| forste/haReFork | tools/base/AST/HsPatPretty.hs | bsd-3-clause | 1,224 | 0 | 11 | 435 | 551 | 264 | 287 | 33 | 0 |
module Qoo where
{-@ intid :: forall <r :: y0: Int -> Bool>. i: Int<r> -> Int<r> @-}
intid :: Int -> Int
intid i = i
| mightymoose/liquidhaskell | tests/todo/intP.hs | bsd-3-clause | 119 | 0 | 5 | 29 | 22 | 13 | 9 | 3 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.