code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
{-# LANGUAGE OverloadedStrings #-} -- | Module: System.Linux.Cgroups.CpuAcct -- Author: Alexander Vershilov <[email protected]> -- License: BSD -- -- The CPU accounting controller is used to group tasks using cgroups and -- account the CPU usage of these groups of tasks. -- -- The CPU accounting controller supports multi-hierarchy groups. An accounting -- group accumulates the CPU usage of all of its child groups and the tasks -- directly present in its group. -- -- -- cpuacct controller uses percpu_counter interface to collect user and -- system times. This has two side effects: -- -- * It is theoretically possible to see wrong values for user and system times. -- This is because percpu_counter_read() on 32bit systems isn't safe -- against concurrent writes. -- -- * It is possible to see slightly outdated values for user and system times -- due to the batch processing nature of percpu_counter. -- module System.Linux.Cgroups.Subsystem.CpuAcct ( subCpuAcct , CpuStat(..) , CpuUsage(..) , CpuUsagePerCpu(..) ) where import BasicPrelude import Data.Int import Data.Default import System.Linux.Cgroups.Types subCpuAcct = Controller "cpuAcct" type UserHZ = Int64 -- | CPU time obtained by the cgroup into user and system times. Currently -- the following statistics are supported: -- -- user: Time spent by tasks of the cgroup in user mode. -- system: Time spent by tasks of the cgroup in kernel mode. -- data CpuStat = CpuStat { cpuStatUser::UserHZ, cpuStatSystem::UserHZ} deriving (Eq,Show) instance Default CpuStat where def = CpuStat 0 0 instance CgroupValue CpuStat where subsystem _ = subCpuAcct param _ = "stat" instance CgroupRead CpuStat where unprint = fromLines specCpuSet specCpuSet :: [Spec CpuStat] specCpuSet = [ ("user", \x (l:_) -> x{cpuStatUser = read l}) , ("system", \x (l:_) -> x{cpuStatSystem = read l}) ] -- | cpuacct.usage gives the CPU time (in nanoseconds) obtained -- by this group which is essentially the CPU time obtained by all the tasks -- in the system. newtype CpuUsage = CpuUsage {unCpuUsage :: Int64 } deriving (Eq, Show) instance CgroupValue CpuUsage where subsystem _ = subCpuAcct param _ = "usage" instance CgroupRead CpuUsage where unprint = CpuUsage . read newtype CpuUsagePerCpu = CpuUsagePerCpu {unCpuUsagePerCpu :: [Int64]} deriving (Eq, Show) instance CgroupValue CpuUsagePerCpu where subsystem _ = subCpuAcct param _ = "usage_percpu" instance CgroupRead CpuUsagePerCpu where unprint = CpuUsagePerCpu . map read . words
qnikst/cgroups-hs
System/Linux/Cgroups/Subsystem/CpuAcct.hs
bsd-3-clause
2,673
0
10
567
415
252
163
38
1
{-# LANGUAGE CPP, FlexibleInstances, BangPatterns, ViewPatterns #-} #if __GLASGOW_HASKELL__ >= 700 {-# OPTIONS -fllvm #-} #endif module Data.TrieMap.RadixTrie.Zipper () where import Data.TrieMap.RadixTrie.Base #define V(f) f (VVector) (k) #define U(f) f (PVector) (Word) #define LOC(args) !(locView -> Loc args) #define DEEP(args) !(pView -> Deep args) instance TrieKey k => Zippable (TrieMap (VVector k)) where empty = Radix Nothing clear (Hole h) = Radix (clearEdge h) assign a (Hole h) = Radix (Just (assignEdge a h)) instance Zippable (TrieMap (PVector Word)) where empty = WRadix Nothing clear (WHole h) = WRadix (clearEdge h) assign a (WHole h) = WRadix (Just (assignEdge a h)) {-# INLINE assignEdge #-} assignEdge :: (Label v k, Sized a) => a -> EdgeLoc v k a -> Edge v k a assignEdge v LOC(ks ts path) = assignP (edge ks (Just v) ts) path {-# SPECIALIZE assignP :: (TrieKey k, Sized a) => V(Edge) a -> V(Path) a -> V(Edge) a, Sized a => U(Edge) a -> U(Path) a -> U(Edge) a #-} assignP :: (Label v k, Sized a) => Edge v k a -> Path v k a -> Edge v k a assignP e DEEP(path ks v tHole) = assignP (edge ks v (assign e tHole)) path assignP e _ = e {-# SPECIALIZE clearEdge :: (TrieKey k, Sized a) => V(EdgeLoc) a -> V(MEdge) a, Sized a => U(EdgeLoc) a -> U(MEdge) a #-} clearEdge :: (Label v k, Sized a) => EdgeLoc v k a -> MEdge v k a clearEdge LOC(ks ts path) = rebuild (cEdge ks Nothing ts) path where rebuild Nothing DEEP(path ks v tHole) = rebuild (cEdge ks v (clear tHole)) path rebuild Nothing _ = Nothing rebuild (Just e) path = Just $ assignP e path
lowasser/TrieMap
Data/TrieMap/RadixTrie/Zipper.hs
bsd-3-clause
1,613
8
11
340
557
282
275
-1
-1
module Main where import Geo.Garmin.OsmAustralia import System.FilePath import System.Command import System.Directory import System.Environment import Control.Monad main :: IO () main = do a <- getArgs case a of [] -> putStrLn "Usage: osmgarmin <output-dir>" >> exitWith (exitCode 107) (o:_) -> do e <- gmapsupp [minBound .. maxBound] (\st p -> let o' = o </> show st in do mkdir o' copyFile p (o' </> takeFileName p)) isFailure e `when` exitWith e
tonymorris/osmaustralia
Main.hs
bsd-3-clause
631
0
23
254
186
94
92
17
2
-- | This is the syntax for bkp files which are parsed in 'ghc --backpack' -- mode. This syntax is used purely for testing purposes. module BkpSyn ( -- * Backpack abstract syntax HsUnitId(..), LHsUnitId, HsModuleSubst, LHsModuleSubst, HsModuleId(..), LHsModuleId, HsComponentId(..), LHsUnit, HsUnit(..), LHsUnitDecl, HsUnitDecl(..), HsDeclType(..), IncludeDecl(..), LRenaming, Renaming(..), ) where import HsSyn import RdrName import SrcLoc import Outputable import Module import PackageConfig {- ************************************************************************ * * User syntax * * ************************************************************************ -} data HsComponentId = HsComponentId { hsPackageName :: PackageName, hsComponentId :: ComponentId } instance Outputable HsComponentId where ppr (HsComponentId _pn cid) = ppr cid -- todo debug with pn data HsUnitId n = HsUnitId (Located n) [LHsModuleSubst n] type LHsUnitId n = Located (HsUnitId n) type HsModuleSubst n = (Located ModuleName, LHsModuleId n) type LHsModuleSubst n = Located (HsModuleSubst n) data HsModuleId n = HsModuleVar (Located ModuleName) | HsModuleId (LHsUnitId n) (Located ModuleName) type LHsModuleId n = Located (HsModuleId n) -- | Top level @unit@ declaration in a Backpack file. data HsUnit n = HsUnit { hsunitName :: Located n, hsunitBody :: [LHsUnitDecl n] } type LHsUnit n = Located (HsUnit n) -- | A declaration in a package, e.g. a module or signature definition, -- or an include. data HsDeclType = ModuleD | SignatureD data HsUnitDecl n = DeclD HsDeclType (Located ModuleName) (Maybe (Located (HsModule RdrName))) | IncludeD (IncludeDecl n) type LHsUnitDecl n = Located (HsUnitDecl n) -- | An include of another unit data IncludeDecl n = IncludeDecl { idUnitId :: LHsUnitId n, idModRenaming :: Maybe [ LRenaming ] } -- | Rename a module from one name to another. The identity renaming -- means that the module should be brought into scope. data Renaming = Renaming { renameFrom :: ModuleName, renameTo :: ModuleName } type LRenaming = Located Renaming
snoyberg/ghc
compiler/backpack/BkpSyn.hs
bsd-3-clause
2,371
0
12
614
470
282
188
45
0
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE PackageImports #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE UnicodeSyntax #-} {-| [@ISO639-1@] gv [@ISO639-2@] glv [@ISO639-3@] glv [@Native name@] Gaelg [@English name@] Manx -} module Text.Numeral.Language.GLV.TestData (cardinals) where -------------------------------------------------------------------------------- -- Imports -------------------------------------------------------------------------------- import "base" Prelude ( Integral ) import "numerals" Text.Numeral.Grammar.Reified ( defaultInflection ) import "this" Text.Numeral.Test ( TestData ) -------------------------------------------------------------------------------- -- Test data -------------------------------------------------------------------------------- {- Sources: http://www.sf.airnet.ne.jp/~ts/language/number/manx.html http://www.gaelg.iofm.net/LESSONS/P/P19.html http://www.gaelg.iofm.net/LESSONS/mona/Lessons.pdf -} cardinals ∷ (Integral i) ⇒ TestData i cardinals = [ ( "default" , defaultInflection , [ (1, "nane") , (2, "jees") , (3, "tree") , (4, "kiare") , (5, "queig") , (6, "shey") , (7, "shiaght") , (8, "hoght") , (9, "nuy") , (10, "jeih") , (11, "nane-jeig") , (12, "daa-yeig") , (13, "tree-jeig") , (14, "kiare-jeig") , (15, "queig-jeig") , (16, "shey-jeig") , (17, "shiaght-jeig") , (18, "hoght-jeig") , (19, "nuy-jeig") , (20, "feed") , (21, "nane as feed") , (22, "jees as feed") , (23, "tree as feed") , (24, "kiare as feed") , (25, "queig as feed") , (26, "shey as feed") , (27, "shiaght as feed") , (28, "hoght as feed") , (29, "nuy as feed") , (30, "jeih as feed") , (31, "nane-jeig as feed") , (32, "daa-yeig as feed") , (33, "tree-jeig as feed") , (34, "kiare-jeig as feed") , (35, "queig-jeig as feed") , (36, "shey-jeig as feed") , (37, "shiaght-jeig as feed") , (38, "hoght-jeig as feed") , (39, "nuy-jeig as feed") , (40, "daeed") , (41, "nane as daeed") , (42, "jees as daeed") , (43, "tree as daeed") , (44, "kiare as daeed") , (45, "queig as daeed") , (46, "shey as daeed") , (47, "shiaght as daeed") , (48, "hoght as daeed") , (49, "nuy as daeed") , (50, "jeih as daeed") , (51, "nane-jeig as daeed") , (52, "daa-yeig as daeed") , (53, "tree-jeig as daeed") , (54, "kiare-jeig as daeed") , (55, "queig-jeig as daeed") , (56, "shey-jeig as daeed") , (57, "shiaght-jeig as daeed") , (58, "hoght-jeig as daeed") , (59, "nuy-jeig as daeed") , (60, "tree feed") , (61, "tree feed as nane") , (62, "tree feed as jees") , (63, "tree feed as tree") , (64, "tree feed as kiare") , (65, "tree feed as queig") , (66, "tree feed as shey") , (67, "tree feed as shiaght") , (68, "tree feed as hoght") , (69, "tree feed as nuy") , (70, "tree feed as jeih") , (71, "tree feed as nane-jeig") , (72, "tree feed as daa-yeig") , (73, "tree feed as tree-jeig") , (74, "tree feed as kiare-jeig") , (75, "tree feed as queig-jeig") , (76, "tree feed as shey-jeig") , (77, "tree feed as shiaght-jeig") , (78, "tree feed as hoght-jeig") , (79, "tree feed as nuy-jeig") , (80, "kiare feed") , (81, "kiare feed as nane") , (82, "kiare feed as jees") , (83, "kiare feed as tree") , (84, "kiare feed as kiare") , (85, "kiare feed as queig") , (86, "kiare feed as shey") , (87, "kiare feed as shiaght") , (88, "kiare feed as hoght") , (89, "kiare feed as nuy") , (90, "kiare feed as jeih") , (91, "kiare feed as nane-jeig") , (92, "kiare feed as daa-yeig") , (93, "kiare feed as tree-jeig") , (94, "kiare feed as kiare-jeig") , (95, "kiare feed as queig-jeig") , (96, "kiare feed as shey-jeig") , (97, "kiare feed as shiaght-jeig") , (98, "kiare feed as hoght-jeig") , (99, "kiare feed as nuy-jeig") , (100, "keead") , (200, "daa cheead") , (300, "tree cheead") , (400, "kiare cheead") , (500, "queig cheead") , (600, "shey cheead") , (700, "shiaght cheead") , (800, "hoght cheead") , (900, "nuy cheead") , (1000, "thousane") -- , (dec 6, "jeih cheead thousane") ] ) ]
telser/numerals
src-test/Text/Numeral/Language/GLV/TestData.hs
bsd-3-clause
4,691
0
8
1,365
1,077
721
356
122
1
{-# LANGUAGE DeriveDataTypeable #-} module Hans.Layer.Tcp.Socket ( -- * Socket Layer Socket() , SocketError(..) , listenPort , acceptSocket , connect , sendSocket , closeSocket , readBytes , readLine ) where import Hans.Address.IP4 import Hans.Channel import Hans.Layer import Hans.Layer.Tcp.Monad import Hans.Message.Tcp (TcpPort(..)) import Network.TCP.LTS.User (tcp_process_user_request) import Network.TCP.Type.Base (IPAddr(..),SocketID,TCPAddr(..)) import Network.TCP.Type.Syscall (SockReq(..),SockRsp(..)) import Control.Exception (throwIO,Exception) import Control.Concurrent (MVar,newMVar,newEmptyMVar,takeMVar,putMVar) import Data.Typeable (Typeable) import qualified Data.ByteString as S import qualified Data.ByteString.Lazy as L -- Socket Layer ---------------------------------------------------------------- data Socket = Socket { socketTcpHandle :: TcpHandle , socketId :: !SocketID , socketBuffer :: MVar L.ByteString } data SocketResult a = SocketResult a | SocketError SocketError data SocketError = ListenError String | AcceptError String | ConnectError String | SendError String | RecvError String | CloseError String deriving (Typeable,Show) instance Exception SocketError -- | Block on a socket operation, waiting for the TCP layer to finish an action. blockResult :: TcpHandle -> (MVar (SocketResult a) -> Tcp ()) -> IO a blockResult tcp k = do var <- newEmptyMVar send tcp (k var) sr <- takeMVar var case sr of SocketResult a -> return a SocketError se -> throwIO se -- | Call @output@ if the @Tcp@ action returns a @Just@. maybeOutput :: Tcp (Maybe (IO ())) -> Tcp () maybeOutput body = do mb <- body case mb of Just m -> output m Nothing -> return () -- | Listen on a port. listenPort :: TcpHandle -> TcpPort -> IO Socket listenPort tcp (TcpPort port) = blockResult tcp $ \ res -> do let mkError = SocketError . ListenError k rsp = case rsp of SockNew sid -> do buf <- newMVar L.empty putMVar res (SocketResult (Socket tcp sid buf)) SockError err -> putMVar res (mkError err) _ -> putMVar res (mkError "Unexpected response") maybeOutput (tcp_process_user_request (SockListen port,k)) -- | Accept a client connection on a @Socket@. acceptSocket :: Socket -> IO Socket acceptSocket sock = blockResult (socketTcpHandle sock) $ \ res -> do let mkError = SocketError . AcceptError k rsp = case rsp of SockNew sid -> do buf <- newMVar L.empty putMVar res (SocketResult (Socket (socketTcpHandle sock) sid buf)) SockError err -> putMVar res (mkError err) _ -> putMVar res (mkError "Unexpected response") maybeOutput (tcp_process_user_request (SockAccept (socketId sock),k)) -- | Connect to a remote server. connect :: TcpHandle -> IP4 -> IP4 -> TcpPort -> IO Socket connect tcp src dst (TcpPort port) = blockResult tcp $ \ res -> do let us = IPAddr (convertToWord32 src) them = TCPAddr (IPAddr (convertToWord32 dst), port) mkError = SocketError . ConnectError k rsp = case rsp of SockNew sid -> do buf <- newMVar L.empty putMVar res (SocketResult (Socket tcp sid buf)) SockError err -> putMVar res (mkError err) _ -> putMVar res (mkError "Unexpected response") maybeOutput (tcp_process_user_request (SockConnect us them,k)) -- | Send on a @Socket@. sendSocket :: Socket -> S.ByteString -> IO () sendSocket sock bytes = blockResult (socketTcpHandle sock) $ \ res -> do let mkError = SocketError . SendError k rsp = putMVar res $! case rsp of SockOK -> SocketResult () SockError err -> mkError err _ -> mkError "Unexpected response" maybeOutput (tcp_process_user_request (SockSend (socketId sock) bytes,k)) -- | Receive from a @Socket@. recvSocket :: Socket -> IO S.ByteString recvSocket sock = blockResult (socketTcpHandle sock) $ \ res -> do let mkError = SocketError . RecvError k rsp = putMVar res $! case rsp of SockData bs -> SocketResult bs SockError err -> mkError err _ -> mkError "Unexpected response" maybeOutput (tcp_process_user_request (SockRecv (socketId sock),k)) -- | Close a socket. closeSocket :: Socket -> IO () closeSocket sock = blockResult (socketTcpHandle sock) $ \ res -> do let mkError = SocketError . CloseError k rsp = putMVar res $! case rsp of SockOK -> SocketResult () SockError err -> mkError err _ -> mkError "Unexpected response" maybeOutput (tcp_process_user_request (SockClose (socketId sock),k)) -- Derived Interaction --------------------------------------------------------- -- | Read n bytes from a @Socket@. readBytes :: Socket -> Int -> IO S.ByteString readBytes sock goal = do buf <- takeMVar (socketBuffer sock) loop buf (fromIntegral (L.length buf)) where loop buf len | goal <= len = finish buf | otherwise = do bytes <- recvSocket sock if S.null bytes then finish buf else loop (buf `L.append` L.fromChunks [bytes]) (len + S.length bytes) finish buf = do let (as,bs) = L.splitAt (fromIntegral goal) buf putMVar (socketBuffer sock) bs return (S.concat (L.toChunks as)) -- | Read until a CRLF, LF or CR are read. readLine :: Socket -> IO S.ByteString readLine sock = do buf <- takeMVar (socketBuffer sock) loop False 0 buf where loop cr ix buf | L.null buf = fillBuffer cr ix buf | otherwise = case L.index buf ix of 0x0d -> loop True (ix+1) buf 0x0a -> finish (ix+1) buf _ | cr -> finish ix buf | otherwise -> loop False (ix+1) buf fillBuffer cr ix buf = do bytes <- recvSocket sock if S.null bytes then finish ix buf else loop cr ix (buf `L.append` L.fromChunks [bytes]) finish ix buf = do let (as,bs) = L.splitAt ix buf putMVar (socketBuffer sock) bs return (S.concat (L.toChunks as))
Tener/HaNS
src/Hans/Layer/Tcp/Socket.hs
bsd-3-clause
6,132
0
23
1,524
2,035
1,016
1,019
153
4
{-# LANGUAGE RecordWildCards, PatternGuards #-} module Text.HTML.TagSoup.Specification(parse) where import Text.HTML.TagSoup.Implementation import Data.Char (isAlpha, isAlphaNum, isDigit, toLower) -- We make some generalisations: -- <!name is a valid tag start closed by > -- <?name is a valid tag start closed by ?> -- </!name> is a valid closing tag -- </?name> is a valid closing tag -- <a "foo"> is a valid tag attibute in ! and ?, i.e missing an attribute name -- We also don't do lowercase conversion -- Entities are handled without a list of known entity names -- We don't have RCData, CData or Escape modes (only effects dat and tagOpen) data TypeTag = TypeNormal -- <foo | TypeXml -- <?foo | TypeDecl -- <!foo | TypeScript -- <script deriving Eq -- 2.4.1 Common parser idioms white x = x `elem` " \t\n\f\r" -- 8.2.4 Tokenization type Parser = S -> [Out] parse :: String -> [Out] parse = dat . state -- 8.2.4.1 Data state dat :: Parser dat S{..} = pos $ case hd of '&' -> charReference tl '<' -> tagOpen tl _ | eof -> [] _ -> hd & dat tl -- 8.2.4.2 Character reference data state charReference s = charRef dat False Nothing s -- 8.2.4.3 Tag open state tagOpen S{..} = case hd of '!' -> markupDeclOpen tl '/' -> closeTagOpen tl _ | isAlpha hd -> Tag & hd & tagName (if isScript s then TypeScript else TypeNormal) tl '>' -> errSeen "<>" & '<' & '>' & dat tl '?' -> neilXmlTagOpen tl -- NEIL _ -> errSeen "<" & '<' & dat s isScript = f "script" where f (c:cs) S{..} = toLower hd == c && f cs tl f [] S{..} = white hd || hd == '/' || hd == '>' || hd == '?' || eof -- seen "<?", emitted [] neilXmlTagOpen S{..} = case hd of _ | isAlpha hd -> Tag & '?' & hd & tagName TypeXml tl _ -> errSeen "<?" & '<' & '?' & dat s -- seen "?", expecting ">" neilXmlTagClose S{..} = pos $ case hd of '>' -> TagEnd & dat tl _ -> errSeen "?" & beforeAttName TypeXml s -- just seen ">" at the end, am given tl neilTagEnd typ S{..} | typ == TypeXml = pos $ errWant "?>" & TagEnd & dat s | typ == TypeScript = pos $ TagEnd & neilScriptBody s | otherwise = pos $ TagEnd & dat s -- Inside a <script> tag, only break on </script neilScriptBody o@S{..} | hd == '<', S{..} <- tl , hd == '/', S{..} <- tl , isScript s = dat o | eof = [] | otherwise = pos $ hd & neilScriptBody tl -- 8.2.4.4 Close tag open state -- Deviation: We ignore the if CDATA/RCDATA bits and tag matching -- Deviation: On </> we output </> to the text -- Deviation: </!name> is a closing tag, not a bogus comment closeTagOpen S{..} = case hd of _ | isAlpha hd || hd `elem` "?!" -> TagShut & hd & tagName TypeNormal tl '>' -> errSeen "</>" & '<' & '/' & '>' & dat tl _ | eof -> '<' & '/' & dat s _ -> errWant "tag name" & bogusComment s -- 8.2.4.5 Tag name state tagName typ S{..} = pos $ case hd of _ | white hd -> beforeAttName typ tl '/' -> selfClosingStartTag typ tl '>' -> neilTagEnd typ tl '?' | typ == TypeXml -> neilXmlTagClose tl _ | isAlpha hd -> hd & tagName typ tl _ | eof -> errWant (if typ == TypeXml then "?>" else ">") & dat s _ -> hd & tagName typ tl -- 8.2.4.6 Before attribute name state beforeAttName typ S{..} = pos $ case hd of _ | white hd -> beforeAttName typ tl '/' -> selfClosingStartTag typ tl '>' -> neilTagEnd typ tl '?' | typ == TypeXml -> neilXmlTagClose tl _ | typ /= TypeNormal && hd `elem` "\'\"" -> beforeAttValue typ s -- NEIL _ | hd `elem` "\"'<=" -> errSeen [hd] & AttName & hd & attName typ tl _ | eof -> errWant (if typ == TypeXml then "?>" else ">") & dat s _ -> AttName & hd & attName typ tl -- 8.2.4.7 Attribute name state attName typ S{..} = pos $ case hd of _ | white hd -> afterAttName typ tl '/' -> selfClosingStartTag typ tl '=' -> beforeAttValue typ tl '>' -> neilTagEnd typ tl '?' | typ == TypeXml -> neilXmlTagClose tl _ | hd `elem` "\"'<" -> errSeen [hd] & def _ | eof -> errWant (if typ == TypeXml then "?>" else ">") & dat s _ -> def where def = hd & attName typ tl -- 8.2.4.8 After attribute name state afterAttName typ S{..} = pos $ case hd of _ | white hd -> afterAttName typ tl '/' -> selfClosingStartTag typ tl '=' -> beforeAttValue typ tl '>' -> neilTagEnd typ tl '?' | typ == TypeXml -> neilXmlTagClose tl _ | typ /= TypeNormal && hd `elem` "\"'" -> AttVal & beforeAttValue typ s -- NEIL _ | hd `elem` "\"'<" -> errSeen [hd] & def _ | eof -> errWant (if typ == TypeXml then "?>" else ">") & dat s _ -> def where def = AttName & hd & attName typ tl -- 8.2.4.9 Before attribute value state beforeAttValue typ S{..} = pos $ case hd of _ | white hd -> beforeAttValue typ tl '\"' -> AttVal & attValueDQuoted typ tl '&' -> AttVal & attValueUnquoted typ s '\'' -> AttVal & attValueSQuoted typ tl '>' -> errSeen "=" & neilTagEnd typ tl '?' | typ == TypeXml -> neilXmlTagClose tl _ | hd `elem` "<=" -> errSeen [hd] & def _ | eof -> errWant (if typ == TypeXml then "?>" else ">") & dat s _ -> def where def = AttVal & hd & attValueUnquoted typ tl -- 8.2.4.10 Attribute value (double-quoted) state attValueDQuoted typ S{..} = pos $ case hd of '\"' -> afterAttValueQuoted typ tl '&' -> charRefAttValue (attValueDQuoted typ) (Just '\"') tl _ | eof -> errWant "\"" & dat s _ -> hd & attValueDQuoted typ tl -- 8.2.4.11 Attribute value (single-quoted) state attValueSQuoted typ S{..} = pos $ case hd of '\'' -> afterAttValueQuoted typ tl '&' -> charRefAttValue (attValueSQuoted typ) (Just '\'') tl _ | eof -> errWant "\'" & dat s _ -> hd & attValueSQuoted typ tl -- 8.2.4.12 Attribute value (unquoted) state attValueUnquoted typ S{..} = pos $ case hd of _ | white hd -> beforeAttName typ tl '&' -> charRefAttValue (attValueUnquoted typ) Nothing tl '>' -> neilTagEnd typ tl '?' | typ == TypeXml -> neilXmlTagClose tl _ | hd `elem` "\"'<=" -> errSeen [hd] & def _ | eof -> errWant (if typ == TypeXml then "?>" else ">") & dat s _ -> def where def = hd & attValueUnquoted typ tl -- 8.2.4.13 Character reference in attribute value state charRefAttValue :: Parser -> Maybe Char -> Parser charRefAttValue resume c s = charRef resume True c s -- 8.2.4.14 After attribute value (quoted) state afterAttValueQuoted typ S{..} = pos $ case hd of _ | white hd -> beforeAttName typ tl '/' -> selfClosingStartTag typ tl '>' -> neilTagEnd typ tl '?' | typ == TypeXml -> neilXmlTagClose tl _ | eof -> dat s _ -> errSeen [hd] & beforeAttName typ s -- 8.2.4.15 Self-closing start tag state selfClosingStartTag typ S{..} = pos $ case hd of _ | typ == TypeXml -> errSeen "/" & beforeAttName typ s '>' -> TagEndClose & dat tl _ | eof -> errWant ">" & dat s _ -> errSeen "/" & beforeAttName typ s -- 8.2.4.16 Bogus comment state bogusComment S{..} = Comment & bogusComment1 s bogusComment1 S{..} = pos $ case hd of '>' -> CommentEnd & dat tl _ | eof -> CommentEnd & dat s _ -> hd & bogusComment1 tl -- 8.2.4.17 Markup declaration open state markupDeclOpen S{..} = case hd of _ | Just s <- next "--" -> Comment & commentStart s _ | isAlpha hd -> Tag & '!' & hd & tagName TypeDecl tl -- NEIL _ | Just s <- next "[CDATA[" -> cdataSection s _ -> errWant "tag name" & bogusComment s -- 8.2.4.18 Comment start state commentStart S{..} = pos $ case hd of '-' -> commentStartDash tl '>' -> errSeen "<!-->" & CommentEnd & dat tl _ | eof -> errWant "-->" & CommentEnd & dat s _ -> hd & comment tl -- 8.2.4.19 Comment start dash state commentStartDash S{..} = pos $ case hd of '-' -> commentEnd tl '>' -> errSeen "<!--->" & CommentEnd & dat tl _ | eof -> errWant "-->" & CommentEnd & dat s _ -> '-' & hd & comment tl -- 8.2.4.20 Comment state comment S{..} = pos $ case hd of '-' -> commentEndDash tl _ | eof -> errWant "-->" & CommentEnd & dat s _ -> hd & comment tl -- 8.2.4.21 Comment end dash state commentEndDash S{..} = pos $ case hd of '-' -> commentEnd tl _ | eof -> errWant "-->" & CommentEnd & dat s _ -> '-' & hd & comment tl -- 8.2.4.22 Comment end state commentEnd S{..} = pos $ case hd of '>' -> CommentEnd & dat tl '-' -> errWant "-->" & '-' & commentEnd tl _ | white hd -> errSeen "--" & '-' & '-' & hd & commentEndSpace tl '!' -> errSeen "!" & commentEndBang tl _ | eof -> errWant "-->" & CommentEnd & dat s _ -> errSeen "--" & '-' & '-' & hd & comment tl -- 8.2.4.23 Comment end bang state commentEndBang S{..} = pos $ case hd of '>' -> CommentEnd & dat tl '-' -> '-' & '-' & '!' & commentEndDash tl _ | eof -> errWant "-->" & CommentEnd & dat s _ -> '-' & '-' & '!' & hd & comment tl -- 8.2.4.24 Comment end space state commentEndSpace S{..} = pos $ case hd of '>' -> CommentEnd & dat tl '-' -> commentEndDash tl _ | white hd -> hd & commentEndSpace tl _ | eof -> errWant "-->" & CommentEnd & dat s _ -> hd & comment tl -- 8.2.4.38 CDATA section state cdataSection S{..} = pos $ case hd of _ | Just s <- next "]]>" -> dat s _ | eof -> dat s _ | otherwise -> hd & cdataSection tl -- 8.2.4.39 Tokenizing character references -- Change from spec: this is reponsible for writing '&' if nothing is to be written charRef :: Parser -> Bool -> Maybe Char -> S -> [Out] charRef resume att end S{..} = case hd of _ | eof || hd `elem` "\t\n\f <&" || maybe False (== hd) end -> '&' & resume s '#' -> charRefNum resume s tl _ -> charRefAlpha resume att s charRefNum resume o S{..} = case hd of _ | hd `elem` "xX" -> charRefNum2 resume o True tl _ -> charRefNum2 resume o False s charRefNum2 resume o hex S{..} = case hd of _ | hexChar hex hd -> (if hex then EntityHex else EntityNum) & hd & charRefNum3 resume hex tl _ -> errSeen "&" & '&' & resume o charRefNum3 resume hex S{..} = case hd of _ | hexChar hex hd -> hd & charRefNum3 resume hex tl ';' -> EntityEnd True & resume tl _ -> EntityEnd False & errWant ";" & resume s charRefAlpha resume att S{..} = case hd of _ | isAlpha hd -> EntityName & hd & charRefAlpha2 resume att tl _ -> errSeen "&" & '&' & resume s charRefAlpha2 resume att S{..} = case hd of _ | alphaChar hd -> hd & charRefAlpha2 resume att tl ';' -> EntityEnd True & resume tl _ | att -> EntityEnd False & resume s _ -> EntityEnd False & errWant ";" & resume s alphaChar x = isAlphaNum x || x `elem` ":-_" hexChar False x = isDigit x hexChar True x = isDigit x || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F')
ndmitchell/tagsoup
src/Text/HTML/TagSoup/Specification.hs
bsd-3-clause
10,765
0
14
2,824
4,171
1,981
2,190
210
10
module Main where import Test.Hspec import Data.Dfa.EquivalenceSpec import Parser.BuildSpec import Parser.DfaSpec import ProgramExecutionSpec main :: IO () main = hspec $ do rtSpec parseSpec dfaSpec equivalenceSpec isomorphismSpec dfaTextComparisonSpec
qfjp/csce_dfa_project_test
test/SpecMain.hs
bsd-3-clause
344
0
7
119
65
34
31
15
1
{-# LANGUAGE OverloadedStrings #-} module Main where import Turtle import Turtle.Options.Scale (Scale, optScale, defScaleHelp) import Turtle.Options.Percentage (optPercentage, defPercentageHelp) import Turtle.Options.Quality (optQuality, defQualityHelp) import Turtle.Options.Timecode (Timecode, optTimecode, defTimecodeHelp) parser :: Parser (Scale, Float, Float, Timecode) parser = (,,,) <$> optScale "scale" 's' defScaleHelp <*> optPercentage "percentage" 'p' defPercentageHelp <*> optQuality "quality" 'q' defQualityHelp <*> optTimecode "timecode" 't' defTimecodeHelp main :: IO () main = do (scale, percent, quality, timecode) <- options "Turtle options example" parser putStrLn $ "Scale " ++ (show scale) putStrLn $ "Percent " ++ (show percent) putStrLn $ "Quality " ++ (show quality) putStrLn $ "Timecode " ++ (show timecode)
elaye/turtle-options
app/Main.hs
bsd-3-clause
883
0
9
152
251
138
113
19
1
{-# Language PatternGuards #-} module Blub ( f ) where import Ugah.Foo import Control.Applicative f :: Int -> Int f i = i + 3
dan-t/hsimport
tests/goldenFiles/ReplaceCppTest3.hs
bsd-3-clause
136
0
5
35
41
24
17
7
1
module Ant.Influence ( lookupInfluence , makeInfluenceTable , makeDistanceTable , makeInfluenceMap , makeDistanceMap , moveIndex , DistanceTable , InfluenceTable ) where import Ant.Map import Ant.Point import Ant.RegionBuilder import Ant.IO import Ant.Square import Ant.Vector import Data.Bits import Control.Monad import qualified Data.Set as S import qualified Data.Map as M import qualified Data.Vector.Unboxed as U import qualified Data.Vector.Unboxed.Mutable as UM import qualified Data.Vector as V import qualified Data.Vector.Mutable as VM offsetTable :: [(Int, Size)] offsetTable = [ (1, Size (-1) 0) , (2, Size 1 0) , (4, Size 0 (-1)) , (8, Size 0 1) ] type Point' = (Int, Int) type InfluenceTable = V.Vector (U.Vector Point') type DistanceTable = V.Vector (U.Vector (Point', Float)) getPoints :: (Size -> Size -> a) -> Int -> Int -> [a] getPoints f radiusSq bits = [ f p o | p <- circlePoints radiusSq, o <- Size 0 0 : offsets] where offsets = map snd . filter ((> 0) . (bits .&.) . fst) $ offsetTable makeDistanceTable :: Int -> DistanceTable makeDistanceTable radiusSq = V.generate 16 influenceMap where influenceMap i = U.fromList $ M.toList . M.fromListWith min $ points where points = getPoints makePair radiusSq i makePair (Size x y) (Size x' y') = ((x + x', y + y'), sqrt (fromIntegral (x * x + y * y)) ) makeInfluenceTable :: Int -> InfluenceTable makeInfluenceTable radiusSq = V.generate 16 influenceMap where influenceMap i = U.fromList $ S.toList . S.fromList $ points where points = getPoints makePair radiusSq i makePair (Size x y) (Size x' y') = (x + x', y + y') moveIndex :: Map -> Point -> Int moveIndex world p = foldr (.|.) 0 offsets where offsets = map fst . filter (isLand . (world `at`) . (p `addSize`) . snd) $ offsetTable {-# INLINE moveIndex #-} lookupInfluence :: Map -> Point -> V.Vector a -> a lookupInfluence world p table = table `indexV` moveIndex world p {-# INLINE lookupInfluence #-} addOffset :: Point -> Point' -> Point addOffset (Point x y) (x', y') = Point (x + x') (y + y') {-# INLINE addOffset #-} makeInfluenceMap :: Map -> InfluenceTable -> [Point] -> U.Vector Int makeInfluenceMap world table ants = U.create $ do v <- UM.replicate (area (mapSize world)) 0 forM_ ants $ \ant -> do let offsets = lookupInfluence world ant table U.forM_ offsets $ \o -> do let i = mapSize world `wrapIndex` (ant `addOffset` o) n <- readU v i writeU v i (n + 1) return v makeDistanceMap :: Map -> DistanceTable -> [Point] -> U.Vector (Float, PointIndex) makeDistanceMap world table ants = U.create $ do v <- UM.replicate (area (mapSize world)) (1000.0, 0) forM_ ants $ \ant -> do let offsets = lookupInfluence world ant table U.forM_ offsets $ \(o, d) -> do let i = toIndex (ant `addOffset` o) d' <- readU v i writeU v i (min (d, toIndex ant ) d') return v where toIndex = (mapSize world `wrapIndex`)
Saulzar/Ants
Ant/Influence.hs
bsd-3-clause
3,338
0
21
991
1,209
650
559
75
1
{-# LANGUAGE CPP #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE Rank2Types #-} {-# LANGUAGE TypeOperators #-} module DTypes.Classes.DApplicative ( DApplicative (..) , dpureTrafo , dliftA1, dliftA2, dliftA3, dliftA4, dliftA5, dliftA6, dliftA7, dliftA8 ) where import DTypes.Classes.DFunctor import DTypes.Compose import DTypes.Trafo #if MIN_VERSION_base(4,8,0) import Control.Applicative (liftA2) #else import Control.Applicative (Applicative (..), (<$>), liftA2) #endif infixl 4 <<*>>, <<*, *>> class DFunctor d => DApplicative (d :: (k -> *) -> *) where -- axioms (analog to applicative axioms): -- dpure idTrafo <<*>> d = r -- pureTrafo o <<*>> u <<*>> v <<*>> w = u <<*>> (v <<*>> w) -- pureTrafo f <<**>> dpure x = dpure (f x) -- u <*> dpure y = dpure ($$ y) <*> u dpure :: (forall (a :: k). f a) -> d f (<<*>>) :: d (f ==>> g) -> d f -> d g (*>>) :: d f -> d g -> d g s *>> t = (wrap1 id <<$ s) <<*>> t (<<*) :: d f -> d g -> d f (<<*) = dliftA2 const instance (Applicative f, DApplicative d) => DApplicative (Compose f d) where dpure x = Compose (pure (dpure x)) Compose f <<*>> Compose x = Compose (liftA2 (<<*>>) f x) dpureTrafo :: DApplicative d => (f ==> g) -> d (f ==>> g) dpureTrafo f = dpure (TrafoComp f) wrap1 :: (f a -> g a) -> (f ==>> g) a wrap1 = TrafoComp wrap2 :: (f a -> g a -> h a) -> (f ==>> g ==>> h) a wrap2 f = TrafoComp (wrap1 <$> f) wrap3 :: (f a -> g a -> h a -> k a) -> (f ==>> g ==>> h ==>> k) a wrap3 f = TrafoComp (wrap2 <$> f) wrap4 :: (f a -> g a -> h a -> i a -> j a) -> (f ==>> g ==>> h ==>> i ==>> j) a wrap4 f = TrafoComp (wrap3 <$> f) wrap5 :: (f a -> g a -> h a -> i a -> j a -> k a) -> (f ==>> g ==>> h ==>> i ==>> j ==>> k) a wrap5 f = TrafoComp (wrap4 <$> f) wrap6 :: (f a -> g a -> h a -> i a -> j a -> k a -> l a) -> (f ==>> g ==>> h ==>> i ==>> j ==>> k ==>> l) a wrap6 f = TrafoComp (wrap5 <$> f) wrap7 :: (f a -> g a -> h a -> i a -> j a -> k a -> l a -> m a) -> (f ==>> g ==>> h ==>> i ==>> j ==>> k ==>> l ==>> m) a wrap7 f = TrafoComp (wrap6 <$> f) dliftA1 :: DFunctor d => (forall a. f a -> g a) -> d f -> d g dliftA1 = dfmap dliftA2 :: DApplicative d => (forall a. f a -> g a -> h a) -> d f -> d g -> d h dliftA2 f s t = (wrap1 <$> f) <<$>> s <<*>> t dliftA3 :: DApplicative d => (forall a. f a -> g a -> h a -> i a) -> d f -> d g -> d h -> d i dliftA3 f s t u = (wrap2 <$> f) <<$>> s <<*>> t <<*>> u dliftA4 :: DApplicative d => (forall a. f a -> g a -> h a -> i a -> j a) -> d f -> d g -> d h -> d i -> d j dliftA4 f s t u v = (wrap3 <$> f) <<$>> s <<*>> t <<*>> u <<*>> v dliftA5 :: DApplicative d => (forall a. f a -> g a -> h a -> i a -> j a -> k a) -> d f -> d g -> d h -> d i -> d j -> d k dliftA5 f s t u v w = (wrap4 <$> f) <<$>> s <<*>> t <<*>> u <<*>> v <<*>> w dliftA6 :: DApplicative d => (forall a. f a -> g a -> h a -> i a -> j a -> k a -> l a) -> d f -> d g -> d h -> d i -> d j -> d k -> d l dliftA6 f s t u v w x = (wrap5 <$> f) <<$>> s <<*>> t <<*>> u <<*>> v <<*>> w <<*>> x dliftA7 :: DApplicative d => (forall a. f a -> g a -> h a -> i a -> j a -> k a -> l a -> m a) -> d f -> d g -> d h -> d i -> d j -> d k -> d l -> d m dliftA7 f s t u v w x y = (wrap6 <$> f) <<$>> s <<*>> t <<*>> u <<*>> v <<*>> w <<*>> x <<*>> y dliftA8 :: DApplicative d => (forall a. f a -> g a -> h a -> i a -> j a -> k a -> l a -> m a -> n a) -> d f -> d g -> d h -> d i -> d j -> d k -> d l -> d m -> d n dliftA8 f s t u v w x y z = (wrap7 <$> f) <<$>> s <<*>> t <<*>> u <<*>> v <<*>> w <<*>> x <<*>> y <<*>> z
timjb/ftypes
src/DTypes/Classes/DApplicative.hs
mit
3,609
0
17
1,068
2,036
996
1,040
96
1
module RegisteredUser where newtype UserName = UserName String newtype AccountNumber = AccountNumber Integer data User = UnregisteredUser | RegisteredUser UserName AccountNumber printUser :: User -> IO () printUser UnregisteredUser = putStrLn "UnregisteredUser" printUser (RegisteredUser (UserName name) (AccountNumber acctNum)) = putStrLn $ name ++ " " ++ show acctNum
brodyberg/Notes
ProjectRosalind.hsproj/LearnHaskell/lib/HaskellBook/RegisteredUser2.hs
mit
429
0
9
107
102
54
48
15
1
module Graphics.ArrayObjects where import Graphics.Rendering.OpenGL makeArrayObject :: IO VertexArrayObject makeArrayObject = genObjectName withArrayObjectBound :: VertexArrayObject -> IO () -> IO () withArrayObjectBound vao m = do bindVertexArrayObject $= Just vao m bindVertexArrayObject $= Nothing
sgillis/HaskHull
src/Graphics/ArrayObjects.hs
gpl-3.0
316
0
8
49
78
39
39
9
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.EC2.AllocateAddress -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Acquires an Elastic IP address. -- -- An Elastic IP address is for use either in the EC2-Classic platform or -- in a VPC. For more information, see -- <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/elastic-ip-addresses-eip.html Elastic IP Addresses> -- in the /Amazon Elastic Compute Cloud User Guide/. -- -- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-AllocateAddress.html AWS API Reference> for AllocateAddress. module Network.AWS.EC2.AllocateAddress ( -- * Creating a Request allocateAddress , AllocateAddress -- * Request Lenses , aaDomain , aaDryRun -- * Destructuring the Response , allocateAddressResponse , AllocateAddressResponse -- * Response Lenses , aarsAllocationId , aarsDomain , aarsPublicIP , aarsResponseStatus ) where import Network.AWS.EC2.Types import Network.AWS.EC2.Types.Product import Network.AWS.Prelude import Network.AWS.Request import Network.AWS.Response -- | /See:/ 'allocateAddress' smart constructor. data AllocateAddress = AllocateAddress' { _aaDomain :: !(Maybe DomainType) , _aaDryRun :: !(Maybe Bool) } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'AllocateAddress' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'aaDomain' -- -- * 'aaDryRun' allocateAddress :: AllocateAddress allocateAddress = AllocateAddress' { _aaDomain = Nothing , _aaDryRun = Nothing } -- | Set to 'vpc' to allocate the address for use with instances in a VPC. -- -- Default: The address is for use with instances in EC2-Classic. aaDomain :: Lens' AllocateAddress (Maybe DomainType) aaDomain = lens _aaDomain (\ s a -> s{_aaDomain = a}); -- | Checks whether you have the required permissions for the action, without -- actually making the request, and provides an error response. If you have -- the required permissions, the error response is 'DryRunOperation'. -- Otherwise, it is 'UnauthorizedOperation'. aaDryRun :: Lens' AllocateAddress (Maybe Bool) aaDryRun = lens _aaDryRun (\ s a -> s{_aaDryRun = a}); instance AWSRequest AllocateAddress where type Rs AllocateAddress = AllocateAddressResponse request = postQuery eC2 response = receiveXML (\ s h x -> AllocateAddressResponse' <$> (x .@? "allocationId") <*> (x .@? "domain") <*> (x .@? "publicIp") <*> (pure (fromEnum s))) instance ToHeaders AllocateAddress where toHeaders = const mempty instance ToPath AllocateAddress where toPath = const "/" instance ToQuery AllocateAddress where toQuery AllocateAddress'{..} = mconcat ["Action" =: ("AllocateAddress" :: ByteString), "Version" =: ("2015-04-15" :: ByteString), "Domain" =: _aaDomain, "DryRun" =: _aaDryRun] -- | /See:/ 'allocateAddressResponse' smart constructor. data AllocateAddressResponse = AllocateAddressResponse' { _aarsAllocationId :: !(Maybe Text) , _aarsDomain :: !(Maybe DomainType) , _aarsPublicIP :: !(Maybe Text) , _aarsResponseStatus :: !Int } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'AllocateAddressResponse' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'aarsAllocationId' -- -- * 'aarsDomain' -- -- * 'aarsPublicIP' -- -- * 'aarsResponseStatus' allocateAddressResponse :: Int -- ^ 'aarsResponseStatus' -> AllocateAddressResponse allocateAddressResponse pResponseStatus_ = AllocateAddressResponse' { _aarsAllocationId = Nothing , _aarsDomain = Nothing , _aarsPublicIP = Nothing , _aarsResponseStatus = pResponseStatus_ } -- | [EC2-VPC] The ID that AWS assigns to represent the allocation of the -- Elastic IP address for use with instances in a VPC. aarsAllocationId :: Lens' AllocateAddressResponse (Maybe Text) aarsAllocationId = lens _aarsAllocationId (\ s a -> s{_aarsAllocationId = a}); -- | Indicates whether this Elastic IP address is for use with instances in -- EC2-Classic ('standard') or instances in a VPC ('vpc'). aarsDomain :: Lens' AllocateAddressResponse (Maybe DomainType) aarsDomain = lens _aarsDomain (\ s a -> s{_aarsDomain = a}); -- | The Elastic IP address. aarsPublicIP :: Lens' AllocateAddressResponse (Maybe Text) aarsPublicIP = lens _aarsPublicIP (\ s a -> s{_aarsPublicIP = a}); -- | The response status code. aarsResponseStatus :: Lens' AllocateAddressResponse Int aarsResponseStatus = lens _aarsResponseStatus (\ s a -> s{_aarsResponseStatus = a});
fmapfmapfmap/amazonka
amazonka-ec2/gen/Network/AWS/EC2/AllocateAddress.hs
mpl-2.0
5,494
0
14
1,144
802
480
322
94
1
{- Copyright 2012-2013 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -} module Plush.Run.BuiltIns.Utilities ( -- $types RegularUtility, ShellUtility, SpecialUtility(..), DirectUtility(..), BuiltInUtility(..), unSpecial, unDirect, unBuiltIn, unUtility, ) where import Control.Monad.Trans.Class (lift) import Data.Maybe (fromMaybe) import qualified Data.Text as T import Plush.Run.Posix import Plush.Run.ShellExec import Plush.Run.Types import Plush.Types.CommandSummary -- $types -- Utilities built into the shell can be treated in several different ways. -- The kind of utility determines the environment in which it is executed, and -- how it may, or may not, affect shell state. The actual action for all -- built in utilities is @'ShellUtility' m@. These types wrap that and form -- a promise for how the utility operates. type RegularUtility m = Utility m ExitCode type ShellUtility m = Utility (ShellExec m) ShellStatus -- | Special Built-In Utilities (§2.14) -- These found without PATH search, and execute directly within the shell. -- In particular, variable assignments made with these commands affect the -- shell. newtype SpecialUtility m = SpecialUtility (CommandSummary -> Utility (ShellExec m) ShellStatus) -- | Direct Built-In Utilities (§2.9.1) -- These are executed without PATH search. These commands may have side-effects -- that affect the shell environment (for example cd changes the shells cwd), -- but are invoked as if they were external commands. In particular, variable -- assignments only affect the environment variables available to these commands -- and not the shell's variables. newtype DirectUtility m = DirectUtility (CommandSummary -> Utility (ShellExec m) ExitCode) -- | Regular Built-In Utilities (§2.9.1) -- If PATH search succeeds in finding an executable for these, then the built-in -- version may be executed instead. However, it is run in an environment -- equivalent to one the executable would have run in. These built-ins can't -- affect the shell state. newtype BuiltInUtility m = BuiltInUtility (CommandSummary -> Utility m ExitCode) unSpecial :: (PosixLike m) => String -> SpecialUtility m -> ShellUtility m unSpecial name (SpecialUtility csu) = bindSummary name csu unDirect :: (PosixLike m) => String -> DirectUtility m -> ShellUtility m unDirect name (DirectUtility csu) = wrapReturn $ bindSummary name csu unBuiltIn :: (PosixLike m) => String -> BuiltInUtility m -> ShellUtility m unBuiltIn name (BuiltInUtility csu) = wrapReturn $ bindSummary name csu' where csu' summ = case csu summ of (Utility e a) -> Utility (runLifted e) (runLifted a) runLifted exec args = lift $ exec args unUtility :: (PosixLike m) => String -> BuiltInUtility m -> RegularUtility m unUtility name (BuiltInUtility csu) = bindName name csu bindSummary :: (PosixLike m) => String -> (CommandSummary -> Utility (ShellExec m) e) -> Utility (ShellExec m) e bindSummary name csu = Utility (bind utilExecute) (bind utilAnnotate) where bind f args = getSummary name >>= ($args) . f . csu . fromMaybe s0 s0 = CommandSummary (T.pack name) T.empty [] bindName :: (PosixLike m) => String -> (CommandSummary -> Utility m e) -> Utility m e bindName name csu = csu s0 where s0 = CommandSummary (T.pack name) T.empty [] wrapReturn :: (Monad m) => Utility m ExitCode -> Utility m ShellStatus wrapReturn (Utility e a) = Utility (\args -> e args >>= return . StStatus) a
mzero/plush
src/Plush/Run/BuiltIns/Utilities.hs
apache-2.0
3,995
0
12
699
787
426
361
41
1
module ListSpec where import Control.Applicative import Language.Haskell.GhcMod import Test.Hspec import TestUtils spec :: Spec spec = do describe "modules" $ do it "contains at least `Data.Map'" $ do mdls <- runD $ lines <$> modules mdls `shouldContain` ["Data.Map"]
cabrera/ghc-mod
test/ListSpec.hs
bsd-3-clause
306
0
15
76
80
43
37
11
1
------------------------------------------------------------------------------ --- Knowledge-based Autonomic Heterogeneous Networks (KAHN) --- @author Rajesh Krishnan, Cosocket LLC --- @version September 2014 ------------------------------------------------------------------------------ {- Copyright (c) 2014, Cosocket LLC All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Cosocket LLC nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -} module KAHN.TimerManager where import qualified Data.HashTable.IO as H import Control.Concurrent.Timer(TimerIO,newTimer,repeatedStart,oneShotStart,stopTimer) import Control.Concurrent.Suspend.Lifted(msDelay) import Data.Maybe(isJust,fromJust) import Data.Int(Int64) type HashTable k v = H.BasicHashTable k v type TimerManager = (HashTable String TimerIO) timerManager :: IO (TimerManager) timerManager = H.new printKeys :: TimerManager -> IO () printKeys tM = H.toList tM >>= return . (map fst) >>= print addTimer :: TimerManager -> String -> IO (TimerIO) addTimer tM tN = H.lookup tM tN >>= (\t -> if (isJust t) then return (fromJust t) else newTimer >>= (\x -> H.insert tM tN x >> return x)) cancelTimer :: TimerManager -> String -> IO () cancelTimer tM tN = H.lookup tM tN >>= (\t -> if (isJust t) then stopTimer (fromJust t) >> H.delete tM tN else return ()) scheduleTimer :: TimerManager -> String -> Bool -> IO () -> Int64 -> IO (Bool) scheduleTimer tM tN tR tF tD = addTimer tM tN >>= (\t -> if (tR) then repeatedStart t tF (msDelay tD) else oneShotStart t (tF >> H.delete tM tN) (msDelay tD))
Cosocket-LLC/kahn
v2.0/src/KAHN/TimerManager.hs
bsd-3-clause
2,976
0
14
527
476
258
218
24
2
{-# LANGUAGE Haskell98 #-} {-# LINE 1 "src/Data/Ratio/Compat.hs" #-} {-# LANGUAGE CPP, NoImplicitPrelude #-} module Data.Ratio.Compat ( module Base ) where import Data.Ratio as Base
phischu/fragnix
tests/packages/scotty/Data.Ratio.Compat.hs
bsd-3-clause
235
0
4
78
23
17
6
6
0
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MagicHash #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE UnboxedTuples #-} {-# LANGUAGE Unsafe #-} {-# OPTIONS_HADDOCK not-home #-} ----------------------------------------------------------------------------- -- | -- Module : GHC.Natural -- Copyright : (C) 2014 Herbert Valerio Riedel, -- (C) 2011 Edward Kmett -- License : see libraries/base/LICENSE -- -- Maintainer : [email protected] -- Stability : internal -- Portability : non-portable (GHC Extensions) -- -- The arbitrary-precision 'Natural' number type. -- -- __Note__: This is an internal GHC module with an API subject to -- change. It's recommended use the "Numeric.Natural" module to import -- the 'Natural' type. -- -- @since 4.8.0.0 ----------------------------------------------------------------------------- module GHC.Natural ( -- * The 'Natural' number type -- -- | __Warning__: The internal implementation of 'Natural' -- (i.e. which constructors are available) depends on the -- 'Integer' backend used! Natural(..) , isValidNatural -- * Conversions , wordToNatural , naturalToWordMaybe -- * Checked subtraction , minusNaturalMaybe -- * Modular arithmetic , powModNatural ) where #include "MachDeps.h" #if defined(MIN_VERSION_integer_gmp) # define HAVE_GMP_BIGNAT MIN_VERSION_integer_gmp(1,0,0) #else # define HAVE_GMP_BIGNAT 0 #endif import GHC.Arr import GHC.Base import GHC.Exception #if HAVE_GMP_BIGNAT import GHC.Integer.GMP.Internals import Data.Word import Data.Int #endif import GHC.Num import GHC.Real import GHC.Read import GHC.Show import GHC.Enum import GHC.List import Data.Bits import Data.Data default () #if HAVE_GMP_BIGNAT -- TODO: if saturated arithmetic is to used, replace 'throw Underflow' by '0' -- | Type representing arbitrary-precision non-negative integers. -- -- Operations whose result would be negative -- @'throw' ('Underflow' :: 'ArithException')@. -- -- @since 4.8.0.0 data Natural = NatS# GmpLimb# -- ^ in @[0, maxBound::Word]@ | NatJ# {-# UNPACK #-} !BigNat -- ^ in @]maxBound::Word, +inf[@ -- -- __Invariant__: 'NatJ#' is used -- /iff/ value doesn't fit in -- 'NatS#' constructor. deriving (Eq,Ord) -- NB: Order of constructors *must* -- coincide with 'Ord' relation -- | Test whether all internal invariants are satisfied by 'Natural' value -- -- This operation is mostly useful for test-suites and/or code which -- constructs 'Integer' values directly. -- -- @since 4.8.0.0 isValidNatural :: Natural -> Bool isValidNatural (NatS# _) = True isValidNatural (NatJ# bn) = isTrue# (isValidBigNat# bn) && I# (sizeofBigNat# bn) > 0 {-# RULES "fromIntegral/Natural->Natural" fromIntegral = id :: Natural -> Natural "fromIntegral/Natural->Integer" fromIntegral = toInteger :: Natural->Integer "fromIntegral/Natural->Word" fromIntegral = naturalToWord "fromIntegral/Natural->Word8" fromIntegral = (fromIntegral :: Word -> Word8) . naturalToWord "fromIntegral/Natural->Word16" fromIntegral = (fromIntegral :: Word -> Word16) . naturalToWord "fromIntegral/Natural->Word32" fromIntegral = (fromIntegral :: Word -> Word32) . naturalToWord "fromIntegral/Natural->Int8" fromIntegral = (fromIntegral :: Int -> Int8) . naturalToInt "fromIntegral/Natural->Int16" fromIntegral = (fromIntegral :: Int -> Int16) . naturalToInt "fromIntegral/Natural->Int32" fromIntegral = (fromIntegral :: Int -> Int32) . naturalToInt #-} {-# RULES "fromIntegral/Word->Natural" fromIntegral = wordToNatural "fromIntegral/Word8->Natural" fromIntegral = wordToNatural . (fromIntegral :: Word8 -> Word) "fromIntegral/Word16->Natural" fromIntegral = wordToNatural . (fromIntegral :: Word16 -> Word) "fromIntegral/Word32->Natural" fromIntegral = wordToNatural . (fromIntegral :: Word32 -> Word) "fromIntegral/Int->Natural" fromIntegral = intToNatural "fromIntegral/Int8->Natural" fromIntegral = intToNatural . (fromIntegral :: Int8 -> Int) "fromIntegral/Int16->Natural" fromIntegral = intToNatural . (fromIntegral :: Int16 -> Int) "fromIntegral/Int32->Natural" fromIntegral = intToNatural . (fromIntegral :: Int32 -> Int) #-} #if WORD_SIZE_IN_BITS == 64 -- these RULES are valid for Word==Word64 & Int==Int64 {-# RULES "fromIntegral/Natural->Word64" fromIntegral = (fromIntegral :: Word -> Word64) . naturalToWord "fromIntegral/Natural->Int64" fromIntegral = (fromIntegral :: Int -> Int64) . naturalToInt "fromIntegral/Word64->Natural" fromIntegral = wordToNatural . (fromIntegral :: Word64 -> Word) "fromIntegral/Int64->Natural" fromIntegral = intToNatural . (fromIntegral :: Int64 -> Int) #-} #endif -- | @since 4.8.0.0 instance Show Natural where showsPrec p (NatS# w#) = showsPrec p (W# w#) showsPrec p (NatJ# bn) = showsPrec p (Jp# bn) -- | @since 4.8.0.0 instance Read Natural where readsPrec d = map (\(n, s) -> (fromInteger n, s)) . filter ((>= 0) . (\(x,_)->x)) . readsPrec d -- | @since 4.8.0.0 instance Num Natural where fromInteger (S# i#) | I# i# >= 0 = NatS# (int2Word# i#) fromInteger (Jp# bn) = bigNatToNatural bn fromInteger _ = throw Underflow (+) = plusNatural (*) = timesNatural (-) = minusNatural abs = id signum (NatS# 0##) = NatS# 0## signum _ = NatS# 1## negate (NatS# 0##) = NatS# 0## negate _ = throw Underflow -- | @since 4.8.0.0 instance Real Natural where toRational (NatS# w) = toRational (W# w) toRational (NatJ# bn) = toRational (Jp# bn) #if OPTIMISE_INTEGER_GCD_LCM {-# RULES "gcd/Natural->Natural->Natural" gcd = gcdNatural "lcm/Natural->Natural->Natural" lcm = lcmNatural #-} -- | Compute greatest common divisor. gcdNatural :: Natural -> Natural -> Natural gcdNatural (NatS# 0##) y = y gcdNatural x (NatS# 0##) = x gcdNatural (NatS# 1##) _ = (NatS# 1##) gcdNatural _ (NatS# 1##) = (NatS# 1##) gcdNatural (NatJ# x) (NatJ# y) = bigNatToNatural (gcdBigNat x y) gcdNatural (NatJ# x) (NatS# y) = NatS# (gcdBigNatWord x y) gcdNatural (NatS# x) (NatJ# y) = NatS# (gcdBigNatWord y x) gcdNatural (NatS# x) (NatS# y) = NatS# (gcdWord x y) -- | compute least common multiplier. lcmNatural :: Natural -> Natural -> Natural lcmNatural (NatS# 0##) _ = (NatS# 0##) lcmNatural _ (NatS# 0##) = (NatS# 0##) lcmNatural (NatS# 1##) y = y lcmNatural x (NatS# 1##) = x lcmNatural x y = (x `quot` (gcdNatural x y)) * y #endif -- | @since 4.8.0.0 instance Enum Natural where succ n = n `plusNatural` NatS# 1## pred n = n `minusNatural` NatS# 1## toEnum = intToNatural fromEnum (NatS# w) | i >= 0 = i where i = fromIntegral (W# w) fromEnum _ = errorWithoutStackTrace "fromEnum: out of Int range" enumFrom x = enumDeltaNatural x (NatS# 1##) enumFromThen x y | x <= y = enumDeltaNatural x (y-x) | otherwise = enumNegDeltaToNatural x (x-y) (NatS# 0##) enumFromTo x lim = enumDeltaToNatural x (NatS# 1##) lim enumFromThenTo x y lim | x <= y = enumDeltaToNatural x (y-x) lim | otherwise = enumNegDeltaToNatural x (x-y) lim ---------------------------------------------------------------------------- -- Helpers for 'Enum Natural'; TODO: optimise & make fusion work enumDeltaNatural :: Natural -> Natural -> [Natural] enumDeltaNatural !x d = x : enumDeltaNatural (x+d) d enumDeltaToNatural :: Natural -> Natural -> Natural -> [Natural] enumDeltaToNatural x0 delta lim = go x0 where go x | x > lim = [] | otherwise = x : go (x+delta) enumNegDeltaToNatural :: Natural -> Natural -> Natural -> [Natural] enumNegDeltaToNatural x0 ndelta lim = go x0 where go x | x < lim = [] | x >= ndelta = x : go (x-ndelta) | otherwise = [x] ---------------------------------------------------------------------------- -- | @since 4.8.0.0 instance Integral Natural where toInteger (NatS# w) = wordToInteger w toInteger (NatJ# bn) = Jp# bn divMod = quotRem div = quot mod = rem quotRem _ (NatS# 0##) = throw DivideByZero quotRem n (NatS# 1##) = (n,NatS# 0##) quotRem n@(NatS# _) (NatJ# _) = (NatS# 0##, n) quotRem (NatS# n) (NatS# d) = case quotRem (W# n) (W# d) of (q,r) -> (wordToNatural q, wordToNatural r) quotRem (NatJ# n) (NatS# d) = case quotRemBigNatWord n d of (# q,r #) -> (bigNatToNatural q, NatS# r) quotRem (NatJ# n) (NatJ# d) = case quotRemBigNat n d of (# q,r #) -> (bigNatToNatural q, bigNatToNatural r) quot _ (NatS# 0##) = throw DivideByZero quot n (NatS# 1##) = n quot (NatS# _) (NatJ# _) = NatS# 0## quot (NatS# n) (NatS# d) = wordToNatural (quot (W# n) (W# d)) quot (NatJ# n) (NatS# d) = bigNatToNatural (quotBigNatWord n d) quot (NatJ# n) (NatJ# d) = bigNatToNatural (quotBigNat n d) rem _ (NatS# 0##) = throw DivideByZero rem _ (NatS# 1##) = NatS# 0## rem n@(NatS# _) (NatJ# _) = n rem (NatS# n) (NatS# d) = wordToNatural (rem (W# n) (W# d)) rem (NatJ# n) (NatS# d) = NatS# (remBigNatWord n d) rem (NatJ# n) (NatJ# d) = bigNatToNatural (remBigNat n d) -- | @since 4.8.0.0 instance Ix Natural where range (m,n) = [m..n] inRange (m,n) i = m <= i && i <= n unsafeIndex (m,_) i = fromIntegral (i-m) index b i | inRange b i = unsafeIndex b i | otherwise = indexError b i "Natural" -- | @since 4.8.0.0 instance Bits Natural where NatS# n .&. NatS# m = wordToNatural (W# n .&. W# m) NatS# n .&. NatJ# m = wordToNatural (W# n .&. W# (bigNatToWord m)) NatJ# n .&. NatS# m = wordToNatural (W# (bigNatToWord n) .&. W# m) NatJ# n .&. NatJ# m = bigNatToNatural (andBigNat n m) NatS# n .|. NatS# m = wordToNatural (W# n .|. W# m) NatS# n .|. NatJ# m = NatJ# (orBigNat (wordToBigNat n) m) NatJ# n .|. NatS# m = NatJ# (orBigNat n (wordToBigNat m)) NatJ# n .|. NatJ# m = NatJ# (orBigNat n m) NatS# n `xor` NatS# m = wordToNatural (W# n `xor` W# m) NatS# n `xor` NatJ# m = NatJ# (xorBigNat (wordToBigNat n) m) NatJ# n `xor` NatS# m = NatJ# (xorBigNat n (wordToBigNat m)) NatJ# n `xor` NatJ# m = bigNatToNatural (xorBigNat n m) complement _ = errorWithoutStackTrace "Bits.complement: Natural complement undefined" bitSizeMaybe _ = Nothing bitSize = errorWithoutStackTrace "Natural: bitSize" isSigned _ = False bit i@(I# i#) | i < finiteBitSize (0::Word) = wordToNatural (bit i) | otherwise = NatJ# (bitBigNat i#) testBit (NatS# w) i = testBit (W# w) i testBit (NatJ# bn) (I# i#) = testBitBigNat bn i# -- TODO: setBit, clearBit, complementBit (needs more primitives) shiftL n 0 = n shiftL (NatS# 0##) _ = NatS# 0## shiftL (NatS# 1##) i = bit i shiftL (NatS# w) (I# i#) = bigNatToNatural $ shiftLBigNat (wordToBigNat w) i# shiftL (NatJ# bn) (I# i#) = bigNatToNatural $ shiftLBigNat bn i# shiftR n 0 = n shiftR (NatS# w) i = wordToNatural $ shiftR (W# w) i shiftR (NatJ# bn) (I# i#) = bigNatToNatural (shiftRBigNat bn i#) rotateL = shiftL rotateR = shiftR popCount (NatS# w) = popCount (W# w) popCount (NatJ# bn) = I# (popCountBigNat bn) zeroBits = NatS# 0## ---------------------------------------------------------------------------- -- | 'Natural' Addition plusNatural :: Natural -> Natural -> Natural plusNatural (NatS# 0##) y = y plusNatural x (NatS# 0##) = x plusNatural (NatS# x) (NatS# y) = case plusWord2# x y of (# 0##, l #) -> NatS# l (# h, l #) -> NatJ# (wordToBigNat2 h l) plusNatural (NatS# x) (NatJ# y) = NatJ# (plusBigNatWord y x) plusNatural (NatJ# x) (NatS# y) = NatJ# (plusBigNatWord x y) plusNatural (NatJ# x) (NatJ# y) = NatJ# (plusBigNat x y) -- | 'Natural' multiplication timesNatural :: Natural -> Natural -> Natural timesNatural _ (NatS# 0##) = NatS# 0## timesNatural (NatS# 0##) _ = NatS# 0## timesNatural x (NatS# 1##) = x timesNatural (NatS# 1##) y = y timesNatural (NatS# x) (NatS# y) = case timesWord2# x y of (# 0##, 0## #) -> NatS# 0## (# 0##, xy #) -> NatS# xy (# h , l #) -> NatJ# $ wordToBigNat2 h l timesNatural (NatS# x) (NatJ# y) = NatJ# $ timesBigNatWord y x timesNatural (NatJ# x) (NatS# y) = NatJ# $ timesBigNatWord x y timesNatural (NatJ# x) (NatJ# y) = NatJ# $ timesBigNat x y -- | 'Natural' subtraction. May @'throw' 'Underflow'@. minusNatural :: Natural -> Natural -> Natural minusNatural x (NatS# 0##) = x minusNatural (NatS# x) (NatS# y) = case subWordC# x y of (# l, 0# #) -> NatS# l _ -> throw Underflow minusNatural (NatS# _) (NatJ# _) = throw Underflow minusNatural (NatJ# x) (NatS# y) = bigNatToNatural $ minusBigNatWord x y minusNatural (NatJ# x) (NatJ# y) = bigNatToNatural $ minusBigNat x y -- | 'Natural' subtraction. Returns 'Nothing's for non-positive results. -- -- @since 4.8.0.0 minusNaturalMaybe :: Natural -> Natural -> Maybe Natural minusNaturalMaybe x (NatS# 0##) = Just x minusNaturalMaybe (NatS# x) (NatS# y) = case subWordC# x y of (# l, 0# #) -> Just (NatS# l) _ -> Nothing where minusNaturalMaybe (NatS# _) (NatJ# _) = Nothing minusNaturalMaybe (NatJ# x) (NatS# y) = Just $ bigNatToNatural $ minusBigNatWord x y minusNaturalMaybe (NatJ# x) (NatJ# y) | isTrue# (isNullBigNat# res) = Nothing | otherwise = Just (bigNatToNatural res) where res = minusBigNat x y -- | Convert 'BigNat' to 'Natural'. -- Throws 'Underflow' if passed a 'nullBigNat'. bigNatToNatural :: BigNat -> Natural bigNatToNatural bn | isTrue# (sizeofBigNat# bn ==# 1#) = NatS# (bigNatToWord bn) | isTrue# (isNullBigNat# bn) = throw Underflow | otherwise = NatJ# bn naturalToBigNat :: Natural -> BigNat naturalToBigNat (NatS# w#) = wordToBigNat w# naturalToBigNat (NatJ# bn) = bn -- | Convert 'Int' to 'Natural'. -- Throws 'Underflow' when passed a negative 'Int'. intToNatural :: Int -> Natural intToNatural i | i<0 = throw Underflow intToNatural (I# i#) = NatS# (int2Word# i#) naturalToWord :: Natural -> Word naturalToWord (NatS# w#) = W# w# naturalToWord (NatJ# bn) = W# (bigNatToWord bn) naturalToInt :: Natural -> Int naturalToInt (NatS# w#) = I# (word2Int# w#) naturalToInt (NatJ# bn) = I# (bigNatToInt bn) #else /* !HAVE_GMP_BIGNAT */ ---------------------------------------------------------------------------- -- Use wrapped 'Integer' as fallback; taken from Edward Kmett's nats package -- | Type representing arbitrary-precision non-negative integers. -- -- Operations whose result would be negative -- @'throw' ('Underflow' :: 'ArithException')@. -- -- @since 4.8.0.0 newtype Natural = Natural Integer -- ^ __Invariant__: non-negative 'Integer' deriving (Eq,Ord,Ix) -- | Test whether all internal invariants are satisfied by 'Natural' value -- -- This operation is mostly useful for test-suites and/or code which -- constructs 'Integer' values directly. -- -- @since 4.8.0.0 isValidNatural :: Natural -> Bool isValidNatural (Natural i) = i >= 0 -- | @since 4.8.0.0 instance Read Natural where readsPrec d = map (\(n, s) -> (Natural n, s)) . filter ((>= 0) . (\(x,_)->x)) . readsPrec d -- | @since 4.8.0.0 instance Show Natural where showsPrec d (Natural i) = showsPrec d i -- | @since 4.8.0.0 instance Num Natural where Natural n + Natural m = Natural (n + m) {-# INLINE (+) #-} Natural n * Natural m = Natural (n * m) {-# INLINE (*) #-} Natural n - Natural m | result < 0 = throw Underflow | otherwise = Natural result where result = n - m {-# INLINE (-) #-} abs (Natural n) = Natural n {-# INLINE abs #-} signum (Natural n) = Natural (signum n) {-# INLINE signum #-} fromInteger n | n >= 0 = Natural n | otherwise = throw Underflow {-# INLINE fromInteger #-} -- | 'Natural' subtraction. Returns 'Nothing's for non-positive results. -- -- @since 4.8.0.0 minusNaturalMaybe :: Natural -> Natural -> Maybe Natural minusNaturalMaybe x y | x >= y = Just (x - y) | otherwise = Nothing -- | @since 4.8.0.0 instance Bits Natural where Natural n .&. Natural m = Natural (n .&. m) {-# INLINE (.&.) #-} Natural n .|. Natural m = Natural (n .|. m) {-# INLINE (.|.) #-} xor (Natural n) (Natural m) = Natural (xor n m) {-# INLINE xor #-} complement _ = errorWithoutStackTrace "Bits.complement: Natural complement undefined" {-# INLINE complement #-} shift (Natural n) = Natural . shift n {-# INLINE shift #-} rotate (Natural n) = Natural . rotate n {-# INLINE rotate #-} bit = Natural . bit {-# INLINE bit #-} setBit (Natural n) = Natural . setBit n {-# INLINE setBit #-} clearBit (Natural n) = Natural . clearBit n {-# INLINE clearBit #-} complementBit (Natural n) = Natural . complementBit n {-# INLINE complementBit #-} testBit (Natural n) = testBit n {-# INLINE testBit #-} bitSizeMaybe _ = Nothing {-# INLINE bitSizeMaybe #-} bitSize = errorWithoutStackTrace "Natural: bitSize" {-# INLINE bitSize #-} isSigned _ = False {-# INLINE isSigned #-} shiftL (Natural n) = Natural . shiftL n {-# INLINE shiftL #-} shiftR (Natural n) = Natural . shiftR n {-# INLINE shiftR #-} rotateL (Natural n) = Natural . rotateL n {-# INLINE rotateL #-} rotateR (Natural n) = Natural . rotateR n {-# INLINE rotateR #-} popCount (Natural n) = popCount n {-# INLINE popCount #-} zeroBits = Natural 0 -- | @since 4.8.0.0 instance Real Natural where toRational (Natural a) = toRational a {-# INLINE toRational #-} -- | @since 4.8.0.0 instance Enum Natural where pred (Natural 0) = errorWithoutStackTrace "Natural.pred: 0" pred (Natural n) = Natural (pred n) {-# INLINE pred #-} succ (Natural n) = Natural (succ n) {-# INLINE succ #-} fromEnum (Natural n) = fromEnum n {-# INLINE fromEnum #-} toEnum n | n < 0 = errorWithoutStackTrace "Natural.toEnum: negative" | otherwise = Natural (toEnum n) {-# INLINE toEnum #-} enumFrom = coerce (enumFrom :: Integer -> [Integer]) enumFromThen x y | x <= y = coerce (enumFromThen :: Integer -> Integer -> [Integer]) x y | otherwise = enumFromThenTo x y 0 enumFromTo = coerce (enumFromTo :: Integer -> Integer -> [Integer]) enumFromThenTo = coerce (enumFromThenTo :: Integer -> Integer -> Integer -> [Integer]) -- | @since 4.8.0.0 instance Integral Natural where quot (Natural a) (Natural b) = Natural (quot a b) {-# INLINE quot #-} rem (Natural a) (Natural b) = Natural (rem a b) {-# INLINE rem #-} div (Natural a) (Natural b) = Natural (div a b) {-# INLINE div #-} mod (Natural a) (Natural b) = Natural (mod a b) {-# INLINE mod #-} divMod (Natural a) (Natural b) = (Natural q, Natural r) where (q,r) = divMod a b {-# INLINE divMod #-} quotRem (Natural a) (Natural b) = (Natural q, Natural r) where (q,r) = quotRem a b {-# INLINE quotRem #-} toInteger (Natural a) = a {-# INLINE toInteger #-} #endif -- | Construct 'Natural' from 'Word' value. -- -- @since 4.8.0.0 wordToNatural :: Word -> Natural #if HAVE_GMP_BIGNAT wordToNatural (W# w#) = NatS# w# #else wordToNatural w = Natural (fromIntegral w) #endif -- | Try downcasting 'Natural' to 'Word' value. -- Returns 'Nothing' if value doesn't fit in 'Word'. -- -- @since 4.8.0.0 naturalToWordMaybe :: Natural -> Maybe Word #if HAVE_GMP_BIGNAT naturalToWordMaybe (NatS# w#) = Just (W# w#) naturalToWordMaybe (NatJ# _) = Nothing #else naturalToWordMaybe (Natural i) | i <= maxw = Just (fromIntegral i) | otherwise = Nothing where maxw = toInteger (maxBound :: Word) #endif -- This follows the same style as the other integral 'Data' instances -- defined in "Data.Data" naturalType :: DataType naturalType = mkIntType "Numeric.Natural.Natural" -- | @since 4.8.0.0 instance Data Natural where toConstr x = mkIntegralConstr naturalType x gunfold _ z c = case constrRep c of (IntConstr x) -> z (fromIntegral x) _ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c ++ " is not of type Natural" dataTypeOf _ = naturalType -- | \"@'powModNatural' /b/ /e/ /m/@\" computes base @/b/@ raised to -- exponent @/e/@ modulo @/m/@. -- -- @since 4.8.0.0 powModNatural :: Natural -> Natural -> Natural -> Natural #if HAVE_GMP_BIGNAT powModNatural _ _ (NatS# 0##) = throw DivideByZero powModNatural _ _ (NatS# 1##) = NatS# 0## powModNatural _ (NatS# 0##) _ = NatS# 1## powModNatural (NatS# 0##) _ _ = NatS# 0## powModNatural (NatS# 1##) _ _ = NatS# 1## powModNatural (NatS# b) (NatS# e) (NatS# m) = NatS# (powModWord b e m) powModNatural b e (NatS# m) = NatS# (powModBigNatWord (naturalToBigNat b) (naturalToBigNat e) m) powModNatural b e (NatJ# m) = bigNatToNatural (powModBigNat (naturalToBigNat b) (naturalToBigNat e) m) #else -- Portable reference fallback implementation powModNatural _ _ 0 = throw DivideByZero powModNatural _ _ 1 = 0 powModNatural _ 0 _ = 1 powModNatural 0 _ _ = 0 powModNatural 1 _ _ = 1 powModNatural b0 e0 m = go b0 e0 1 where go !b e !r | odd e = go b' e' (r*b `mod` m) | e == 0 = r | otherwise = go b' e' r where b' = b*b `mod` m e' = e `unsafeShiftR` 1 -- slightly faster than "e `div` 2" #endif
snoyberg/ghc
libraries/base/GHC/Natural.hs
bsd-3-clause
22,152
0
13
5,388
4,646
2,346
2,300
165
1
{-# LANGUAGE DeriveFunctor, FlexibleInstances, MultiParamTypeClasses, TemplateHaskell, GeneralizedNewtypeDeriving #-} module Graphics.UI.Bottle.Widget ( module Graphics.UI.Bottle.WidgetId , Widget(..), MEnter, R, Size , EnterResult(..), enterResultEvent, enterResultRect , EventHandlers , EventResult(..), eventResultFromCursor , keysEventMap, keysEventMapMovesCursor , eAnimIdMapping, eCursor , wMaybeEnter, wEventMap, wFrame, wFocalArea , wIsFocused, wSize , atWFrameWithSize, atEvents , takesFocus, doesntTakeFocus , backgroundColor, tint, liftView , strongerEvents, weakerEvents , translate, translateBy, scale, scaleDownContent, pad , overlayView ) where import Control.Applicative ((<$>), liftA2) import Control.Lens.Operators import Data.Derive.Monoid (makeMonoid) import Data.DeriveTH (derive) import Data.Monoid (Monoid(..)) import Data.Vector.Vector2 (Vector2) import Graphics.UI.Bottle.Animation (AnimId, R, Size) import Graphics.UI.Bottle.Direction (Direction) import Graphics.UI.Bottle.EventMap (EventMap) import Graphics.UI.Bottle.Rect (Rect(..)) import Graphics.UI.Bottle.View (View) import Graphics.UI.Bottle.WidgetId (Id(..), augmentId, toAnimId, joinId, subId) import qualified Control.Lens as Lens import qualified Data.Monoid as Monoid import qualified Graphics.DrawingCombinators as Draw import qualified Graphics.UI.Bottle.Animation as Anim import qualified Graphics.UI.Bottle.Direction as Direction import qualified Graphics.UI.Bottle.EventMap as EventMap import qualified Graphics.UI.Bottle.Rect as Rect data EventResult = EventResult { _eCursor :: Monoid.Last Id, _eAnimIdMapping :: Monoid.Endo AnimId } derive makeMonoid ''EventResult data EnterResult f = EnterResult { _enterResultRect :: Rect, _enterResultEvent :: f EventResult } type MEnter f = Maybe (Direction -> EnterResult f) type EventHandlers f = EventMap (f EventResult) data Widget f = Widget { _wIsFocused :: Bool , _wSize :: Size , _wFrame :: Anim.Frame , _wMaybeEnter :: MEnter f -- Nothing if we're not enterable , _wEventMap :: EventHandlers f , _wFocalArea :: Rect } Lens.makeLenses ''EnterResult Lens.makeLenses ''EventResult Lens.makeLenses ''Widget eventResultFromCursor :: Id -> EventResult eventResultFromCursor cursor = EventResult { _eCursor = Monoid.Last $ Just cursor , _eAnimIdMapping = mempty } atEvents :: (f EventResult -> g EventResult) -> Widget f -> Widget g atEvents func w = w { _wMaybeEnter = (Lens.mapped . Lens.mapped . enterResultEvent %~ func) $ _wMaybeEnter w , _wEventMap = fmap func $ _wEventMap w } liftView :: Anim.Size -> Anim.Frame -> Widget f liftView sz frame = Widget { _wIsFocused = False , _wSize = sz , _wFocalArea = Rect 0 sz , _wFrame = frame , _wEventMap = mempty , _wMaybeEnter = Nothing } atWFrameWithSize :: (Size -> Anim.Frame -> Anim.Frame) -> Widget f -> Widget f atWFrameWithSize f w = w & wFrame %~ f (w ^. wSize) -- TODO: Would be nicer as (Direction -> Id), but then TextEdit's "f" couldn't be ((,) String).. takesFocus :: Functor f => (Direction -> f Id) -> Widget f -> Widget f takesFocus enter w = w & wMaybeEnter .~ mEnter where mEnter = Just $ EnterResult focalArea . fmap eventResultFromCursor <$> enter focalArea = w ^. wFocalArea doesntTakeFocus :: Widget f -> Widget f doesntTakeFocus = wMaybeEnter .~ Nothing -- ^ If doesn't take focus, event map is ignored strongerEvents :: EventHandlers f -> Widget f -> Widget f strongerEvents events = wEventMap %~ (events `mappend`) -- ^ If doesn't take focus, event map is ignored weakerEvents :: EventHandlers f -> Widget f -> Widget f weakerEvents events = wEventMap %~ (`mappend` events) backgroundColor :: Int -> AnimId -> Draw.Color -> Widget f -> Widget f backgroundColor layer animId = atWFrameWithSize . Anim.backgroundColor animId layer tint :: Draw.Color -> Widget f -> Widget f tint color = wFrame %~ Anim.onImages (Draw.tint color) keysEventMap :: Functor f => [EventMap.ModKey] -> EventMap.Doc -> f () -> EventHandlers f keysEventMap keys doc act = (fmap . fmap . const) mempty $ EventMap.keyPresses keys doc act keysEventMapMovesCursor :: Functor f => [EventMap.ModKey] -> EventMap.Doc -> f Id -> EventHandlers f keysEventMapMovesCursor keys doc act = (fmap . fmap) eventResultFromCursor $ EventMap.keyPresses keys doc act -- TODO: This actually makes an incorrect widget because its size -- remains same, but it is now translated away from 0..size translate :: Vector2 R -> Widget f -> Widget f translate pos = (wFrame %~ Anim.translate pos) . (wFocalArea . Rect.topLeft %~ (+pos)) . (wMaybeEnter . Lens.mapped %~ (Lens.mapped . enterResultRect . Rect.topLeft %~ (+ pos)) . (Lens.argument . Direction.coordinates . Rect.topLeft %~ subtract pos)) translateBy :: (Vector2 R -> Vector2 R) -> Widget f -> Widget f translateBy mkPos w = (translate . mkPos . (^. wSize)) w w scale :: Vector2 R -> Widget f -> Widget f scale mult = (wFrame %~ Anim.scale mult) . (wFocalArea . Rect.topLeftAndSize %~ (* mult)) . (wMaybeEnter . Lens.traversed %~ (Lens.mapped . enterResultRect . Rect.topLeftAndSize %~ (*mult)) . (Lens.argument . Direction.coordinates . Rect.topLeftAndSize %~ (/mult))) . (wSize %~ (* mult)) -- | Scale down a widget without affecting its exported size scaleDownContent :: Vector2 R -> Vector2 R -> Widget f -> Widget f scaleDownContent factor align w = w & scale factor & translate ((w ^. wSize) * align * (1 - factor)) & wSize .~ (w ^. wSize) -- Surround a widget with padding pad :: Vector2 R -> Widget f -> Widget f pad p w = w & wSize .~ withPadding & translate p where withPadding = w^.wSize + 2*p overlayView :: View -> Widget f -> Widget f overlayView (size, frame) w = w & wSize %~ liftA2 max size & wFrame %~ mappend frame
schell/lamdu
bottlelib/Graphics/UI/Bottle/Widget.hs
gpl-3.0
5,909
0
14
1,090
1,824
1,004
820
-1
-1
{-# LANGUAGE DataKinds #-} module Math.Hclaws.Systems.ShallowWater ( system, solution1, solution2, solution3, solution4, solution5, ) where import Data.Proxy import Math.FTensor.Algebra import Math.FTensor.General as F import Math.Hclaws.Fan import Math.Hclaws.ConservationLaws h :: F.TensorBoxed '[2] Double -> Double h t = F.pIndex t (Proxy::Proxy '[0]) q :: F.TensorBoxed '[2] Double -> Double q t = F.pIndex t (Proxy::Proxy '[1]) v m = (q m) / (h m) rarefaction1 m a = [hn, hn * (a*2/3 + (q m) / (h m))] where hn = (sqrt (h m) - a/3)**2 shock1 m a = [hn, (q m) * hn / h0 - hn * (hn-h0) * sqrt (1/hn + 1/h0) / sqrt 2] where h0 = h m hn = h0 - a speed1 ul ur = (v ul) - hr * sqrt (1/hr + 1/(h ul)) / sqrt 2 where hr = h ur field1 :: CharField 2 field1 = CharField { λ = \m -> (q m) / (h m) - sqrt (h m) , r = \m -> (-2/3) *: [sqrt (h m), (q m) / sqrt (h m) - (h m)] , rarefactionCurve = rarefaction1 , shockCurve = shock1 , shockSpeed = speed1 , linearity = GNL } rarefaction2 m a = [hn, hn * (a*2/3 + (q m) / (h m))] where hn = (sqrt (h m) + a/3)**2 shock2 m a = [hn, (q m) * hn / h0 + hn * (hn-h0) * sqrt (1/hn + 1/h0) / sqrt 2] where h0 = h m hn = h0 + a speed2 ul ur = (v ul) + hr * sqrt (1/hr + 1/(h ul)) / sqrt 2 where hr = h ur field2 :: CharField 2 field2 = CharField { λ = \m -> (q m) / (h m) + sqrt (h m) , r = \m -> (2/3) *: [sqrt (h m), (q m) / sqrt (h m) + (h m)] , rarefactionCurve = rarefaction2 , shockCurve = shock2 , shockSpeed = speed2 , linearity = GNL } newton1 f f' x0 = x0 - (f x0) / (f' x0) newtonN :: (Double -> Double) -> (Double -> Double) -> Double -> Int -> Double newtonN _ _ x0 0 = x0 newtonN f f' x0 n = newtonN f f' (newton1 f f' x0) (n-1) solveRiemann' :: Vector 2 -> Vector 2 -> WaveFan 2 solveRiemann' uL uR | hL <= 0 || hR <= 0 || r2 uR >= r1 uL = -- region V or out of domain error "ShallowWater solveRiemann: out of domain" | uL == uR = Fan [] uL | r1 uR == r1 uL && hR < hL = -- along R1 Fan [(uL, rarefactionWave field1 1 uL uR)] uR | r2 uR == r2 uL && hR > hL = -- along R2 Fan [(uL, rarefactionWave field2 2 uL uR)] uR | s1 uR == s1 uL && hR > hL = -- along S1 Fan [(uL, shockWave field1 1 uL uR)] uR | s2 uR == s2 uL && hR < hL = -- along S2 Fan [(uL, shockWave field2 2 uL uR)] uR | r2 uR < r2 uL && s1 uR > s1 uL = -- region I, S1.R2 let hM = newtonN (f hR hL vR vL) (f' hR hL) ((hL+hR)/2) 50 qM = hM * (vL - (1 / sqrt 2) * (hM-hL) * c hM hL) uM = [hM, qM] in waveFan uM shockWave rarefactionWave | r1 uR > r1 uL && r2 uR > r2 uL = -- region II, R1.R2 let hM = ((2 * (sqrt hR + sqrt hL) - vR + vL) / 4)^(2::Int) qM = (r1 uL) * hM - 2 * hM**1.5 uM = [hM, qM] in waveFan uM rarefactionWave rarefactionWave | r1 uR < r1 uL && s2 uR > s2 uL = -- region III, R1.S2 let -- the eqn that determines hM in region III is the same -- as that for region I, but with the roles of uL, uR -- reversed hM = newtonN (f hL hR vL vR) (f' hL hR) ((hL+hR)/2) 50 qM = (r1 uL) * hM - 2 * hM**1.5 uM = [hM, qM] in waveFan uM rarefactionWave shockWave | s1 uR < s1 uL && s2 uR < s2 uL = -- region IV, S1.S2 let hM = newtonN g g' ((hL+hR)/2) 50 qM = hM * (vL - (hM-hL) * c hM hL / sqrt 2) uM = [hM, qM] in waveFan uM shockWave shockWave | otherwise = error "ShallowWater solveRiemann: can't happen" where waveFan :: Vector 2 -> (CharField 2 -> Int -> Vector 2 -> Vector 2 -> Wave 2) -> (CharField 2 -> Int -> Vector 2 -> Vector 2 -> Wave 2) -> WaveFan 2 waveFan uM waveA waveB = Fan [(uL, waveA field1 1 uL uM), (uM, waveB field2 2 uM uR)] uR hL = h uL qL = q uL hR = h uR vR = v uR vL = v uL c hr hm = sqrt (1/hr + 1/hm) d hr hm = (hr - hm) * c hr hm f hr hl vr vl hm = 2 * sqrt 2 * (sqrt hr - sqrt hm) - (hm-hl)*sqrt(1/hm+1/hl) - sqrt 2 * (vr - vl) f' hr hl hm = negate (c hr hm) + (hm - hl) / (2*hm^(2::Int)*c hr hm) - sqrt (2 / hm) g hm = d hR hm + d hL hm - sqrt 2 * (vR - vL) g' hm = (hm-hL) / (2 * hm^(2::Int) * c hL hm) - c hL hm + (hm-hR) / (2 * hm^(2::Int) * c hR hm) - c hR hm r1 pt = (v pt) + 2 * sqrt (h pt) r2 pt = (v pt) - 2 * sqrt (h pt) s1 pt = (q pt) - qL * (h pt) / hL + (h pt) * (h pt - hL) * sqrt (1/(h pt) + 1/hL) / sqrt 2 s2 pt = (q pt) - qL * (h pt) / hL - (h pt) * (h pt - hL) * sqrt (1/(h pt) + 1/hL) / sqrt 2 system :: System 2 system = System { flux = \m -> [q m, (q m)**2 / (h m) + (h m)^(2::Int)/2] , dFlux = \m -> [[0, 1], [(h m) - (v m)^(2::Int), 2 * (v m)]] , fields = [field1, field2] , solveRiemann = solveRiemann' } solution1 :: WaveFan 2 solution1 = Fan [(pt1, SWave Shock {speed = speed', sFamily = 0})] pt2 where pt1 = [1,1] pt2 = shock1 pt1 (-1) speed' = speed1 pt1 pt2 solution2 = solveRiemann system [1,1] [2,2] solution3 = solveRiemann system [1,1] [1,2] solution4 = solveRiemann system [1,1] [0.5,0.5] solution5 = solveRiemann system [1,1] [1,0]
mikebenfield/hclaws
src/Math/Hclaws/Systems/ShallowWater.hs
isc
5,383
0
19
1,798
3,061
1,583
1,478
142
1
{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} module Web.Spock.Rest ( -- * Spock's route definition monad S.spock, S.SpockM, S.SpockCtxM , S.spockT, S.spockLimT, S.SpockT, S.SpockCtxT -- * Defining routes , S.Path, S.root, S.Var, S.var, S.static, (S.<//>) -- * Rendering routes , S.renderRoute -- * Content Encoders , CTypes(..), (:~>)(..), NoReqBody , ContentType(..), ContentWriter(..), ContentReader(..) , JSON(..) -- * Hooking routes , S.subcomponent, S.prehook , get, post, getpost, head, put, delete, patch, wire , S.StdMethod (..) -- * Adding Wai.Middleware , S.middleware -- * Safe actions , S.SafeAction (..) , S.safeActionPath , module Web.Spock.Shared -- * Helper classes and families , AllAre, AllHave ) where import Control.Monad.Trans import Data.Aeson import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy as BSL import Data.HList.FakePrelude import Data.HList.HCurry import qualified Data.HList.HList as H import Data.HVect hiding (head) import qualified Data.Text as T import GHC.Exts import Network.HTTP.Types.Status import Prelude hiding (curry, head, uncurry) import Web.Routing.SafeRouting hiding (renderRoute, singleton) import qualified Web.Spock as S import Web.Spock.Shared -- | Specify an action that will be run when the HTTP verb 'GET' and the given route match get :: (AllAre NoReqBody reqs, RestCallable reqs resps req resp xs n f ctx m) => CTypes reqs resps -> Path xs -> f -> S.SpockCtxT ctx m () get = wire S.GET -- | Specify an action that will be run when the HTTP verb 'POST' and the given route match post :: RestCallable reqs resps req resp xs n f ctx m => CTypes reqs resps -> Path xs -> f -> S.SpockCtxT ctx m () post = wire S.POST -- | Specify an action that will be run when the HTTP verb 'GET'/'POST' and the given route match getpost :: RestCallable reqs resps req resp xs n f ctx m => CTypes reqs resps -> Path xs -> f -> S.SpockCtxT ctx m () getpost r a = wire S.POST r a >> wire S.GET r a -- | Specify an action that will be run when the HTTP verb 'HEAD' and the given route match head :: RestCallable reqs resps req resp xs n f ctx m => CTypes reqs resps -> Path xs -> f -> S.SpockCtxT ctx m () head = wire S.HEAD -- | Specify an action that will be run when the HTTP verb 'PUT' and the given route match put :: RestCallable reqs resps req resp xs n f ctx m => CTypes reqs resps -> Path xs -> f -> S.SpockCtxT ctx m () put = wire S.PUT -- | Specify an action that will be run when the HTTP verb 'DELETE' and the given route match delete :: RestCallable reqs resps req resp xs n f ctx m => CTypes reqs resps -> Path xs -> f -> S.SpockCtxT ctx m () delete = wire S.DELETE -- | Specify an action that will be run when the HTTP verb 'PATCH' and the given route match patch :: RestCallable reqs resps req resp xs n f ctx m => CTypes reqs resps -> Path xs -> f -> S.SpockCtxT ctx m () patch = wire S.PATCH -- | Convert 'HVect' to 'H.HList' vectToHlist :: HVect xs -> H.HList xs vectToHlist HNil = H.HNil vectToHlist (a :&: as) = H.HCons a (vectToHlist as) -- | Length of 'HVect' as 'HNat' type family HVectLenH (ts :: [*]) :: HNat where HVectLenH '[] = 'HZero HVectLenH (t ': ts) = 'HSucc (HVectLenH ts) -- | Type constraints for a rest callable function type RestCallable reqs resps req resp xs n f ctx m = ( AllHave ContentType reqs, AllHave ContentType resps , AllHave (ContentReader req) reqs, AllHave (ContentWriter resp) resps , HasRep xs, n ~ HVectLenH (req ': xs) , HCurry' n f (req ': xs) (S.ActionCtxT ctx m resp) , ArityFwd f n, ArityRev f n , MonadIO m ) -- | Proof that all elements in a type level list are equal to the first param type family AllAre (x :: *) (xs :: [*]) :: Constraint where AllAre x '[] = 'True ~ 'True AllAre x (y ': ys) = (x ~ y, AllAre x ys) -- | Proof that all types in a list conform to a constraint type family AllHave (c :: * -> Constraint) (xs :: [*]) :: Constraint where AllHave x '[] = 'True ~ 'True AllHave x (y ': ys) = (x y, AllHave x ys) -- | List that maps request content types to response content types data CTypes (reqs :: [*]) (resps :: [*]) where CtNull :: CTypes '[] '[] (:|:) :: (req :~> resp) -> CTypes as bs -> CTypes (req ': as) (resp ': bs) infixr 5 :|: -- | Type level information indicating that clients will not send any data data NoReqBody -- | Map a request content type to a response content type data (:~>) req resp where -- | Content-Type header must match the requests mimeType and Accept header the responses mimeType (:~>) :: req -> resp -> req :~> resp -- | Accept header must match the responses mimeType Only :: resp -> NoReqBody :~> resp -- | Data that can be parsed and/or serialized to json data JSON = JSON -- | Define a content type class ContentType ct where ctMimeType :: Proxy ct -> T.Text instance ContentType JSON where ctMimeType _ = "application/json" instance ContentType NoReqBody where ctMimeType _ = error "Library error, this should never be called" -- | Parse a value from a request bytestring class ContentReader a ct where crDecode :: Proxy ct -> BS.ByteString -> Either String a instance ContentReader () NoReqBody where crDecode _ _ = Right () -- | Serialize a value to a request bytestring class ContentWriter a ct where cwEncode :: Proxy ct -> a -> BS.ByteString instance ToJSON a => ContentWriter a JSON where cwEncode _ = BSL.toStrict . encode instance FromJSON a => ContentReader a JSON where crDecode _ = eitherDecodeStrict' matchesMimeType :: T.Text -> T.Text -> Bool matchesMimeType t needle = let (mimeTypeStr, _) = T.breakOn ";" t mimeTypes = map (T.toLower . T.strip) $ T.splitOn "," mimeTypeStr firstMatch [] = False firstMatch (x:xs) = x == needle || firstMatch xs in firstMatch mimeTypes -- | Specify an action that will be run when a HTTP verb and the given route match wire :: forall reqs resps req resp xs n f ctx m. ( RestCallable reqs resps req resp xs n f ctx m ) => S.StdMethod -> CTypes reqs resps -> S.Path xs -> f -> S.SpockCtxT ctx m () wire m ctypes path a = let fun :: H.HList (req ': xs) -> S.ActionCtxT ctx m resp fun = hUncurry a matcherLoop :: forall as bs. ( AllHave ContentType as, AllHave ContentType bs , AllHave (ContentReader req) as , AllHave (ContentWriter resp) bs ) => T.Text -> HVect xs -> CTypes as bs -> ActionCtxT ctx m () matcherLoop accept captures cts = case cts of CtNull -> do S.setStatus status500 text "Invalid request: Can not handle required Content-type." ((rule :: x :~> y) :|: xs) -> let px :: Proxy x px = Proxy py :: Proxy y py = Proxy bodyHandler runCond = do res <- runCond if res then do bsBody <- S.body case crDecode px bsBody of Left errMsg -> do S.setStatus status500 text $ T.pack $ "Invalid JSON: " ++ errMsg Right (req :: req) -> do (result :: resp) <- fun (H.HCons req (vectToHlist captures)) S.setHeader "Content-Type" (ctMimeType py) S.bytes (cwEncode py result) else matcherLoop accept captures xs in case rule of Only _ -> bodyHandler $ return $ accept `matchesMimeType` ctMimeType py (_ :~> _) -> bodyHandler $ do mContentType <- header "content-type" case mContentType of Nothing -> return False Just t -> return $ accept `matchesMimeType` ctMimeType py && t `matchesMimeType` ctMimeType px handler :: HVect xs -> S.ActionCtxT ctx m () handler captures = do mAccept <- header "accept" case mAccept of Nothing -> do S.setStatus status500 text "Missing Accept header!" Just t -> matcherLoop t captures ctypes hook :: HVectElim xs (S.ActionCtxT ctx m ()) hook = curry handler in S.hookRoute m path hook
agrafix/Spock-rest
src/Web/Spock/Rest.hs
mit
9,564
0
31
3,233
2,500
1,335
1,165
-1
-1
{-# LANGUAGE OverloadedStrings #-} module Main where import Network.Neks.Message (formatRequests, parseResponses) import Network.Neks.NetPack (netWrite, netRead) import Network.Neks.Actions (Request(Set, Get, Delete, Atomic), Reply(Found, NotFound)) import qualified Network as Net import System.IO (Handle) import System.Environment (getArgs) import Data.ByteString.Char8 (pack) import Control.Monad (when) import Control.Concurrent (forkIO) import Control.Concurrent.MVar (MVar, newEmptyMVar, takeMVar, putMVar) main = Net.withSocketsDo $ do args <- getArgs case args of (host:port:command) -> do let portID = Net.PortNumber . fromInteger . read $ port server <- Net.connectTo host portID case command of ["--set", k, v] -> set k v server ["--get", k] -> get k server ["--test"] -> test host portID _ -> putStrLn instructions ["--help"] -> putStrLn instructions -- To be explicit _ -> putStrLn instructions request :: Handle -> [Request] -> IO (Either String [Reply]) request server requests = do netWrite server $ formatRequests requests responseData <- netRead server return (responseData >>= parseResponses) set :: String -> String -> Handle -> IO () set k v server = do response <- request server [Set (pack k) (pack v)] case response of Left error -> putStrLn ("Error setting value: " ++ error) Right [] -> putStrLn "Set successful" get :: String -> Handle -> IO () get k server = do response <- request server [Get (pack k)] case response of Left error -> putStrLn ("Error getting value: " ++ error) Right response -> putStrLn ("Response received: " ++ show response) test :: String -> Net.PortID -> IO () test host port = do putStrLn "Spawning 50 threads x 200 requests x 100 transactions" locks <- sequence [newEmptyMVar | _ <- [1..50]] -- 50 threads threads <- sequence [forkIO (testWith lock host port) | lock <- locks] sequence_ [takeMVar lock | lock <- locks] -- Wait for threads to finish putStrLn "Test complete" where testWith lock host port = do server <- Net.connectTo host port sequence . replicate 200 $ do let requests = [Set k v | (k, v) <- zip testKeys testValues] ++ [Get k | k <- testKeys] responses <- request server requests when (responses /= Right [Found v | v <- testValues]) (error "Bad response") putMVar lock () testKeys = ["66991fb944", "afe0c0261a", "a4242d5dda", "d10db90845", "4384ecbfe", "a839702a82", "1ed8680b95", "0d2189d279", "f4b0795239", "a24d4e7e87", "28e24e1d51", "9bb0dfbfbd", "9776bad265", "89f79a8c71", "d50de7c1cd", "167a350f93", "36f41a6205", "f5bbd3bc20", "69a3d20bef", "33644bede7", "8744571558", "cd4ab79d3a", "8c26e6936c", "88c1d42e4e", "f31d532d05", "a9ad46aea2", "e9b0aeee64", "dffc6a25af", "90952b9dd", "04a136756e", "31ca38445e", "21c27b172", "5c09e01c46", "9b23b5ef27", "a9fd5ea170", "aa1718e735", "1ce6781a57", "a927b0584e", "e7aea00872", "52223f7078", "e620de282a", "1a4c71def8", "75bd1abc65", "af93442708", "2257127db4", "68ec4b4f7", "9b5473f839", "d453871c0f", "9657631a3d", "95503a22b9"] testValues = ["5e7a195e90", "accdfc69c4", "43be950623", "afed0a6890", "0d23711bcf", "3b3d9b4043", "139ba09036", "a54b56630d", "61a729c150", "34891805ca", "d3dc68c9d3", "e1b4943d72", "8731015486", "f8f626c071", "4262ca1f24", "3c55632f50", "d32b8b30ca", "3311af7221", "29144d27ea", "0e0f97257e", "d6a2e1086", "aae1906c17", "d57f58433f", "9232138b5e", "fd1711214f", "84a66c50ac", "9b65ffc322", "d2d447396e", "6fc6c53265", "5183bca85", "884a5cc1cf", "7914d452ae", "6e2a351fd8", "7fb80954be", "3c3f1bf0cd", "112e60a719", "4917c12e1c", "9aaf5cc6d1", "7ccd97a418", "48c91da08c", "349524f781", "7d248047c", "9bfec0c3a4", "c0de587385", "216dd64a29", "eac5049f63", "133a259613", "843e1f1ee3", "e9c11331c0", "48e720933e"] instructions = "Usage: NeksClient <host> <port> <args>\n" ++ "<args> are \"--test\", \"--get <key>\", or \"--set <key> <value>\""
wyager/Neks
Network/Neks/NeksClient.hs
mit
4,417
0
19
1,095
1,191
654
537
60
6
{-# LANGUAGE OverloadedStrings #-} module Shiva.Server ( runServer, testServer ) where import Paths_shiva (getDataFileName) import Shiva.Config import Shiva.Execute import Shiva.HTML import Shiva.Sources import Control.Monad.Except (runExceptT) import Control.Monad.IO.Class (liftIO) import Control.Monad.Reader (runReaderT) import Data.Text (Text) import Lucid import Network.Wai.Middleware.Static import Shiva.Table.ArticleContent (getRecent, insert) import Web.Scotty ---- Page generation ---- runHtmlGen :: ShivaData -> (a -> Html ()) -> ShivaM a -> IO (Html ()) runHtmlGen d f = flip runReaderT d . runShivaM . catchErrorPage . fmap f generateFeedPage :: ShivaData -> Text -> IO (Html ()) generateFeedPage d t = runHtmlGen d feedPage (loadFeedByTitleCode t) generateContentPage :: ShivaData -> Text -> IO (Html ()) generateContentPage d t = runHtmlGen d articlePage $ do art <- generateResultFromName t art <$ insert [art] generateMainPage :: ShivaData -> IO (Html ()) generateMainPage d = runHtmlGen d mainPage (getRecent 20) ---- Server ---- getStaticPath :: IO FilePath getStaticPath = getDataFileName "static" server :: FilePath -> ShivaData -> IO () server staticPath d = scotty 7777 $ do middleware $ staticPolicy (noDots >-> addBase staticPath) get "/" $ html =<< renderText <$> liftIO (generateMainPage d) get "/sources/:x" $ do x <- param "x" html =<< renderText <$> liftIO (generateFeedPage d x) get "/content/dn/:x" $ do x <- param "x" html =<< renderText <$> liftIO (generateContentPage d x) -- | Run the server (as an installed executable). runServer :: [Source] -> IO () runServer srcs = do path <- getStaticPath mconf <- runExceptT (loadEverything srcs) case mconf of Right conf -> server path conf Left err -> do putStrLn "No valid config file found. Try running 'shiva setup' first." putStrLn $ "Details: " ++ err -- | Run the server (from within ghci). Uses the repo's 'static' path instead of one -- set up by Stack or Cabal. Useful for testing out changes in styling or other static aspects. testServer :: IO () testServer = runIOX (loadEverything sources) >>= server "static" {- get :: RoutePattern -> ActionM () -> ScottyM () param :: Parsable a => Text -> ActionM a params :: ActionM [Param] html :: Text -> ActionM () renderText :: Html a -> Text -}
BlackBrane/shiva
src/Shiva/Server.hs
mit
2,539
0
14
611
657
329
328
54
2
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-} module Control.Monad.Future.Async ( AsyncT, Async , future, future_ , runAsyncT, runAsync , execAsyncT, execAsync , evalAsyncT, evalAsync ) where import Control.Applicative import Control.Arrow import Control.Monad import Control.Monad.Trans import Control.Monad.Future.Class import Data.Future.Event import Data.Future.Result import Data.Monoid -- | We maintain the following invariant: -- `waitFor (asyncEvent x)' is before any use of result of `(asyncAction x)' -- -- Exsamples: -- clExecKernel :: CLMem -> CLEvent -> IO CLEvent -- This function wait for events thene _async_ exec program _immedeately_ -- returning the event. But we don't want to do synchonization explicitly. -- -- execKernel :: AsyncR CLEvent CLMem -> Async () -- execKernel (AsyncR e mem) = future_ $ clExecKernel mem e -- -- and now if want run execKernel sync we just write -- -- _ <- execKernel mem -- -- if we want run execKernel async we write -- -- r <- async $ execKernel mem -- ..do something.. -- await r -- -- or we can even write something like this: -- -- execKernel mem -- -- will be executed sync. -- -- As rule of thumb: if you see MonadIO in signature then there are _maybe_ -- will be synchonization, if not then there are surely no. -- -- Exsamples of async: -- -- aRes <- async m -- bRes <- async m' -- ... -- a <- await aRes -- b <- await bRes -- data AsyncT e m a = AsyncT { runAsyncT :: m (e, a) } -- | 'AsyncT' version with 'IO' on top. type Async e a = AsyncT e IO a evalAsyncT :: Functor f => AsyncT e f a -> f a evalAsyncT = fmap snd . runAsyncT {-# INLINE evalAsyncT #-} execAsyncT :: Functor f => AsyncT e f a -> f e execAsyncT = fmap fst . runAsyncT {-# INLINE execAsyncT #-} runAsync :: Async e a -> IO (e, a) runAsync = runAsyncT {-# INLINE runAsync #-} evalAsync :: Async e a -> IO a evalAsync = evalAsyncT {-# INLINE evalAsync #-} execAsync :: Async e a -> IO e execAsync = execAsyncT {-# INLINE execAsync #-} -- | Resource will be accessible only in future. -- In contrast with lift -- resource should be synchonized. -- Normally this function should be used in only libs and -- but not in lib user code because 'future' is the only -- place there consistency can be violated. -- future :: m (e, a) -> AsyncT e m a future = AsyncT {-# INLINE future #-} future_ :: (Functor m, Event e) => m e -> AsyncT e m () future_ = AsyncT . fmap (flip (,) ()) -- XTupleSections? {-# INLINE future_ #-} -- We surely don't need synchonization here. Exsample: -- fmap (\io -> io >>= \x -> -- peekPtr blah blah x) -- will run only after action event is performed. -- -- prove: fmap id = id -- heh, need we? -- -- fmap id (Async e a) = Async (e (fmap id a)) -- (Async e a) = Async (e (fmap id a)) -- (Async e a) = Async (e a) -- -- prove: fmap f . fmap g = fmap (f . g) -- -- fmap f (fmap g (AsyncT e a)) = fmap (f . g) (AsyncT e a)) -- fmap f (AsyncT e (fmap g a)) = AsyncT e (fmap (f . g) a) -- AsyncT e (fmap f. (fmap g a)) = AsyncT e (fmap (f . g) a) -- AsyncT e (fmap (f . g) a)) = AsyncT e (fmap (f . g) a) -- instance Functor f => Functor (AsyncT e f) where fmap f = AsyncT . fmap (second f) . runAsyncT {-# INLINE fmap #-} instance (Applicative f, Event e, Monoid e) => Applicative (AsyncT e f) where pure = AsyncT . fmap ((,) noWait) . pure {-# INLINE pure #-} m <*> m' = AsyncT (g <$> runAsyncT m <*> runAsyncT m') where g (e, f) (e', x) = ((e <> e'), f x) {-# INLINE g #-} {-# INLINE (<*>) #-} ---------------------- A Consistency ------------------------------------------- -- === What is consistency? -- Here are that AsyncT consider as /consistency/: -- Expression of type `Async e a' is consistent iff: -- * either `a' in sync -- * or async and associated with /consistent/ event. -- Event is consistent iff `waitFor event >>' _makes_ future use of -- associated value consistent. -- -- === How relates action with consistency? -- return | lift in async context _pure_ computation which is -- | consistent _yet_. -- -- m >>= \x -> f x | make sure that `x' is consistent and chain the action -- with it. Here we basically have two cases: -- * `x' in m already in sync => `x' is consistent; -- * `x' in m not yet in sync so we should wait for => `x' is consistent. -- -- === How consistency is : consistency propagation. -- Here we'll try to prove that >>=/return always gives consistent AsyncT. -- base case: return --- consistent from definition of return. (already in sync) -- induction: if `m' is consistent and `f' gives a consistent action then -- (m >>= f) is consistent from definition of (>>=). -- `f' gives a consistent action if it takes a consistent value. -- (why? try to prove, but assume it's true for the first time) <TODO:> -- `f' takes a consistent value because of definition of (>>=). -- -- === Future note: 'future' as base case. -- Actually 'future' is the only place which can violate consistency and it -- happens iff event associated with value is not consistent. -- (see definition of consistent event) -- -- === Monad laws <TODO> -- m >>= return = m | return always gives consistent action + def of (>>=) -- return a >>= f = f a | iff `f' gives a consistent value. (see Future Note) -- (m >>= f) >>= g = m >>= (\x -> f x >>= g) -------------------------------------------------------------------------------- -- Return without newtype wrapper. pureRes :: (Monad m, Event e) => a -> m (e, a) pureRes = return . (,) noWait {-# INLINE pureRes #-} instance (MonadIO m, Event e) => Monad (AsyncT e m) where return = AsyncT . pureRes {-# INLINE return #-} m >>= f = AsyncT $ do (e, x) <- runAsyncT m waitForM e runAsyncT (f x) fail = AsyncT . fail -- or maybe: fail = AsyncT . fmap ((,) noWait) . fail -- will it violate consistency? instance (MonadPlus m, MonadIO m, Event e) => MonadPlus (AsyncT e m) where mzero = AsyncT mzero {-# INLINE mzero #-} mplus m m' = AsyncT (runAsyncT m `mplus` runAsyncT m') {-# INLINE mplus #-} -- Maybe MonadIO -> Monad? But it requres to remove Monad constraint -- in MonadFuture instance (MonadIO m, Event e) => MonadFuture (AsyncR e) (AsyncT e m) where async m = AsyncT $ do (e, a) <- runAsyncT m return (noWait, AsyncR e a) {-# INLINE async #-} await (AsyncR e a) = AsyncT (return (e, a)) {-# INLINE await #-} instance (MonadIO m, Event e) => MonadIO (AsyncT e m) where liftIO m = AsyncT (liftIO m >>= pureRes) {-# INLINE liftIO #-} instance Event e => MonadTrans (AsyncT e) where lift m = AsyncT (m >>= pureRes) {-# INLINE lift #-}
pxqr/monad-future
Control/Monad/Future/Async.hs
mit
6,848
0
11
1,629
1,098
634
464
77
1
{-# LANGUAGE OverloadedStrings #-} module Rx.Logger.TestLogLevelParser where import Control.Monad (forM_) import qualified Data.Set as Set import qualified Data.Text as Text import Test.Hspec import Test.HUnit (assertBool, assertFailure) import Rx.Logger.LogLevelParser import Rx.Logger.Types allLogLevels = Set.fromList $ enumFrom NOISY assertLogLevelParser :: Text.Text -> [LogLevel] -> IO () assertLogLevelParser input expected0 = do case parseLogLevel input of Just pred -> do forM_ (Set.toList expected) $ \level-> assertBool ("'" ++ inputS ++ "' should match level " ++ show level) (pred level) forM_ (Set.toList unexpected) $ \level-> assertBool ("'" ++ inputS ++ "' should not match level " ++ show level) (not $ pred level) Nothing -> assertFailure $ "'" ++ inputS ++ "' is an invalid input" where inputS = Text.unpack input expected = Set.fromList expected0 unexpected = Set.difference expected allLogLevels tests :: Spec tests = do describe "parseLogLevel" $ do describe "when only one log level is given" $ it "parses specified log level only" $ do assertLogLevelParser "noisy" [NOISY] assertLogLevelParser "NoIsY" [NOISY] describe "when many log levels are given" $ it "parses every specified log level" $ assertLogLevelParser "info, trace" [INFO, TRACE] describe "when input includes a comparision operator" $ it "parses every LogLevel in the given range" $ assertLogLevelParser "<= loud, >= warning" [NOISY, LOUD, WARNING, SEVERE] describe "when multiple log level and comparision operators" $ it "parses every LogLevel correctly" $ assertLogLevelParser "< loud, >= warning, trace" [NOISY, TRACE, WARNING, SEVERE] describe "when none is given" $ it "no LogLevel should match" $ do assertLogLevelParser "none" [] assertLogLevelParser "none, trace" [] assertLogLevelParser "none, >= trace" []
roman/Haskell-Reactive-Extensions
rx-logger/test/Rx/Logger/TestLogLevelParser.hs
mit
2,158
0
18
608
492
246
246
52
2
{-# htermination intersect :: [(Ratio Int)] -> [(Ratio Int)] -> [(Ratio Int)] #-} import List
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/List_intersect_9.hs
mit
94
0
3
15
5
3
2
1
0
module ArgsParserSpec (main, spec) where import Data.List import Test.Hspec import ArgsParser main :: IO () main = hspec spec spec :: Spec spec = do describe "help" $ do it "contains correct execution command line" $ do help `shouldSatisfy` ("calculator -n:SAMPLE expr1 [expr2..n]" `isInfixOf`) describe "parseArgs" $ do it "returns HELP page, when no arguments are passed" $ do parseArgs [] `shouldBe` Left help it "returns HELP page, when not enought arguments are passed" $ do parseArgs ["-n:just_this"] `shouldBe` Left help it "returns Args with given Notation and expression" $ do parseArgs ["-n:X", ""] `shouldBe` Right Args { notation = "X", expressions = [""] } it "returns Args with given Notation and all expressions in given order" $ do parseArgs ["-n:MAGIC", "C", "A", "B"] `shouldBe` Right Args { notation = "MAGIC", expressions = ["C", "A", "B"] } describe "dump" $ do it "produces some message" $ do dump "program name" ["some", "arguments"] `shouldSatisfy` not . null it "contains program name in first line" $ do dump "AppName.exe" [] `shouldSatisfy` (\x -> "AppName.exe" `isInfixOf` (head . lines) x)
DominikJaniec/LearnHaskell
problems/calculator/test/ArgsParserSpec.hs
mit
1,232
0
19
290
355
185
170
-1
-1
-- MakeLowerCase -- https://www.codewars.com/kata/57a059d753ba33229500001a module MakeLower where import Data.Char (toLower) makeLowerCase :: String -> String makeLowerCase = map toLower
gafiatulin/codewars
src/8 kyu/MakeLower.hs
mit
190
0
5
22
33
20
13
4
1
-- Project Euler Problem 5 - Smallest multiple -- -- Smallest positive number evenly divisible by all of the numbers from 1 to 20 -- import Data.List -- Note coprimes assumes the input list is sorted coprimes :: (Integral a) => [a] -> [a] coprimes [] = [] coprimes [x] = [x] coprimes (x:xs) = x:(coprimes (xs \\ [2*x, 3*x .. (last xs)]) ) factors n = if (n<=1) then [] else [x | x <- coprimes [2 .. n], (n `mod` x) == 0] factorization n = if (n<=1) then [] else f ++ (factorization (n `div` product(f))) where f = factors n -- multiset union of two lists union_multi_pair a b = if ((a /= [])||(b /= [])) then c ++ (union_multi_pair (a\\c) (b\\c)) else [] where c = union a b -- multiset union of a list of lists union_multi x = if (x /= []) then union_multi_pair (head x) (union_multi (tail x)) else [] main = do print (product (union_multi (map factorization [1..20])))
yunwilliamyu/programming-exercises
project_euler/p005_lcm.hs
cc0-1.0
885
2
14
181
411
226
185
13
2
module Configuration where nnConfiguration :: [Int] nnConfiguration = [2, 6, 4, 3] initMagnitude :: Double initMagnitude = 10 mutationRate :: Double mutationRate = 0.05 mutationTypeThreshold :: Double mutationTypeThreshold = 0.9 additiveMutationMagnitude :: Double additiveMutationMagnitude = 0.1 destructiveMutationMagnitude :: Double destructiveMutationMagnitude = 10 populationSize :: Int populationSize = 80 tournamentSelectionK :: Int tournamentSelectionK = 5 nIterations :: Int nIterations = 100000 maxError :: Double maxError = 1e-7
mrlovre/super-memory
Pro7/src/Configuration.hs
gpl-3.0
550
0
5
76
119
73
46
21
1
{-# LANGUAGE ScopedTypeVariables, OverloadedStrings #-} module Ampersand.FSpec.ToFSpec.Calc ( deriveProofs , showProof, showPrf, conjuncts , quadsOfRules ) where import Ampersand.Basics import Ampersand.Classes import Ampersand.ADL1 import Ampersand.Core.ShowAStruct import Ampersand.FSpec.FSpec import Ampersand.FSpec.ToFSpec.NormalForms import Ampersand.Misc (Options(..)) import Data.List hiding (head) import qualified Data.Set as Set import Text.Pandoc.Builder testConfluence :: A_Context -> Blocks testConfluence context = let tcss = [(expr,tcs) | expr<-Set.elems $ expressionsIn context, let tcs=dfProofs expr, length tcs>1] sumt = sum (map (length.snd) tcss) in para ("Confluence analysis statistics from "<>(str.show.length.expressionsIn) context<>" expressions."<>linebreak)<> para ("This script contains "<>linebreak<>(str.show.length) tcss<> " non-confluent expressions "<>linebreak)<> para (linebreak<>"Total number of derived expressions: "<>(str.show) sumt<>linebreak)<> para ("Confluence analysis for "<>(str.name) context)<> mconcat [ para (linebreak<>"expression: "<>(str . showA) expr<>linebreak)<> bulletList [ showProof (para.str.showA) prf | (_,prf)<-tcs ] | (expr,tcs)<-tcss] deriveProofs :: Options -> A_Context -> Blocks deriveProofs opts context = testConfluence context<> para (linebreak<>"--------------"<>linebreak)<> para ("Rules and their conjuncts for "<>(str.name) context)<> bulletList [ para ("rule r: "<>str (name r)<>linebreak<> "formalExpression r: "<>str (showA (formalExpression r))<>linebreak<> "conjNF: "<>str (showA (conjNF opts (formalExpression r)))<>linebreak<> interText linebreak [ " conj: "<>str (showA conj) | conj<-conjuncts opts r ] ) | r<-Set.elems $ allRules context] where -- interText :: (Data.String.IsString a, Data.Monoid.Monoid a) => a -> [a] -> a interText _ [] = "" interText inbetween (xs:xss) = xs<>inbetween<>interText inbetween xss type Proof expr = [(expr,[String],String)] showProof :: (expr->Blocks) -> Proof expr -> Blocks showProof shw [(expr,ss,_)] = shw expr<> para ( str(" { "++intercalate " and " ss++" }")) showProof shw ((expr,ss,equ):prf) = shw expr<> para (if null ss then str equ else if null equ then str (unwords ss) else str equ<>str (" { "++intercalate " and " ss++" }"))<> showProof shw prf --where e'= if null prf then "" else let (expr,_,_):_ = prf in showHS options "" expr showProof _ [] = fromList [] -- showPrf is meant to circumvent Pandoc. For example when a proof needs to be shown in debugging texts. showPrf :: (expr->String) -> Proof expr -> [String] showPrf shw [(expr,_ ,_)] = [ " "++shw expr] showPrf shw ((expr,ss,equ):prf) = [ " "++shw expr] ++ (if null ss then [ equ ] else if null equ then [ unwords ss ] else [ equ++" { "++intercalate " and " ss++" }" ])++ showPrf shw prf showPrf _ [] = [] quadsOfRules :: Options -> Rules -> [Quad] quadsOfRules opts rules = makeAllQuads (converse [ (conj, Set.elems $ rc_orgRules conj) | conj <- makeAllConjs opts rules ]) -- Quads embody the "switchboard" of rules. A quad represents a "proto-rule" with the following meaning: -- whenever relation r is affected (i.e. tuples in r are inserted or deleted), -- the rule may have to be restored using functionality from one of the clauses. makeAllQuads :: [(Rule, [Conjunct])] -> [Quad] makeAllQuads conjsPerRule = [ Quad { qDcl = d , qRule = rule , qConjuncts = conjs } | (rule,conjs) <- conjsPerRule, d <-Set.elems $ bindedRelationsIn rule ]
AmpersandTarski/ampersand
src/Ampersand/FSpec/ToFSpec/Calc.hs
gpl-3.0
4,227
0
21
1,277
1,221
649
572
65
3
---------------------------------------------------------------------- -- | -- Module : Text.TeX.Parser.Core -- Copyright : 2015-2017 Mathias Schenner, -- 2015-2016 Language Science Press. -- License : GPL-3 -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : GHC -- -- Low-level TeX parsers. ---------------------------------------------------------------------- module Text.TeX.Parser.Core ( -- * Parser type TeXParser , runTeXParser -- * Fundamental parser , satisfy ) where import Text.Parsec (Parsec, ParseError, SourcePos, incSourceColumn, parse, tokenPrim) import Text.TeX.Lexer.Token (Token) -------------------- Parser type -- | Parser for 'Token' input streams, -- running over Identity monad without user state. type TeXParser = Parsec [Token] () -- | Run a TeX parser on a 'Token' input stream. runTeXParser :: TeXParser a -> String -> [Token] -> Either ParseError a runTeXParser = parse -------------------- Fundamental parsers -- | Fundamental parser for 'Token' streams. satisfy :: (Token -> Bool) -> TeXParser Token satisfy p = tokenPrim show nextpos test where nextpos pos _ _ = updatePosToken pos test t = if p t then Just t else Nothing -- Increment column for each token, ignore line number. updatePosToken :: SourcePos -> SourcePos updatePosToken = flip incSourceColumn 1
synsem/texhs
src/Text/TeX/Parser/Core.hs
gpl-3.0
1,399
0
8
253
219
131
88
16
2
{-| Module : Craeft.Lexer Description : A Parsec-based lexical analyzer for Craeft. Copyright : (c) Ian Kuehne, 2017 License : GPL-3 Maintainer : [email protected] Stability : experimental -} module Craeft.Lexer where import qualified Data.Char as Char import Control.Monad import Text.Parsec.String (Parser) import Text.Parsec.Char import Text.Parsec import Text.Parsec.Language (emptyDef) import qualified Text.Parsec.Token as Tok lexer :: Tok.TokenParser () lexer = Tok.makeTokenParser style where names = ["fn", "struct", "return", "if", "else"] operators = ["+", "/", "-", "+", "&&", "||", "&", ".", "->", "<:", ":>", "==", "!=", ">=", "<=", ">", "<", "="] style = emptyDef { Tok.commentLine = "//" , Tok.commentStart = "/*" , Tok.commentEnd = "*/" , Tok.nestedComments = True , Tok.identStart = letter <|> char '_' , Tok.identLetter = alphaNum <|> char '_' , Tok.reservedOpNames = operators , Tok.reservedNames = names } braces :: Parser a -> Parser a braces = Tok.braces lexer integer :: Parser Integer integer = Tok.integer lexer unsigned :: Parser Integer unsigned = Tok.natural lexer float :: Parser Double float = Tok.float lexer string :: Parser String string = Tok.stringLiteral lexer parens :: Parser a -> Parser a parens = Tok.parens lexer commaSep :: Parser a -> Parser [a] commaSep = Tok.commaSep lexer semiSep :: Parser a -> Parser [a] semiSep = Tok.semiSep lexer semi :: Parser () semi = void $ Tok.semi lexer identifier :: Parser String identifier = try $ do (x:xs) <- Tok.identifier lexer <?> "identifier" guard $ Char.isLower x return $ x:xs tname :: Parser String tname = try $ do (x:xs) <- Tok.identifier lexer <?> "type name" guard $ Char.isUpper x return $ x:xs reserved :: String -> Parser () reserved = Tok.reserved lexer reservedOp :: String -> Parser () reservedOp = Tok.reservedOp lexer openTemplate :: Parser () openTemplate = void $ reservedOp "<:" closeTemplate :: Parser () closeTemplate = void $ reservedOp ":>" arrow :: Parser () arrow = void $ reservedOp "->" equals :: Parser () equals = void $ reservedOp "=" dot :: Parser () dot = void $ Tok.dot lexer startLexer :: Parser () startLexer = Tok.whiteSpace lexer
ikuehne/craeft-hs
lib/Craeft/Lexer.hs
gpl-3.0
2,489
0
11
676
758
406
352
66
1
myButLast :: [a] -> a myButLast xs = xs !! (length xs - 2)
medik/lang-hack
Haskell/Ninety-Nine_Haskell_Problems/2.hs
gpl-3.0
59
0
8
14
35
18
17
2
1
{-# LANGUAGE DeriveDataTypeable #-} module CFG5 where import Prelude hiding ((++)) import Control.Monad import HipSpec (++) :: [a] -> [a] -> [a] (x:xs) ++ ys = x:(xs ++ ys) [] ++ ys = ys data E = T :+: E | Term T deriving (Typeable,Eq,Ord,Show) data T = TX | TY deriving (Typeable,Eq,Ord,Show) data Tok = C | D | X | Y | Plus deriving (Typeable,Eq,Ord,Show) lin :: E -> [Tok] lin (a :+: b) = linTerm a ++ [Plus] ++ lin b lin (Term t) = linTerm t linTerm :: T -> [Tok] linTerm TX = [X] linTerm TY = [Y] unambig u v = lin u =:= lin v ==> u =:= v unambigTerm u v = linTerm u =:= linTerm v ==> u =:= v injR u v w = v ++ u =:= w ++ u ==> v =:= w inj1 x v w = v ++ [x] =:= w ++ [x] ==> v =:= w injL u v w = u ++ v =:= u ++ w ==> v =:= w lemma v w s t = lin v ++ s =:= lin w ++ t ==> (v,s) =:= (w,t) lemmaTerm v w s t = linTerm v ++ s =:= linTerm w ++ t ==> (v,s) =:= (w,t) instance Arbitrary E where arbitrary = sized arb where arb s = frequency [ (1,liftM Term arbitrary) , (s,liftM2 (:+:) arbitrary (arb (s-1))) ] instance Arbitrary T where arbitrary = elements [TX,TY] instance Arbitrary Tok where arbitrary = elements [C,D,X,Y,Plus] instance Names E where names _ = ["u","v","w"] instance Names T where names _ = ["t","t2","t3"] instance Names Tok where names _ = ["a","b","c"]
danr/hipspec
examples/CFG5.hs
gpl-3.0
1,360
0
15
363
751
404
347
42
1
module TermTest where import Language.Arith.Eval import Language.Arith.Syntax import Test.QuickCheck instance Arbitrary Term where arbitrary = undefined
juanbono/tapl-haskell
arith/test/TermTest.hs
gpl-3.0
159
0
5
22
34
21
13
6
0
{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DuplicateRecordFields #-} -- Request and related data types module Web.BitTorrent.Tracker.Types.Request where import qualified Data.Sequence as Sequence import Data.Int (Int32, Int64) import Web.BitTorrent.Tracker.Types.Common data Request = ConnectRequest ConnectRequestInner | AnnounceRequest AnnounceRequestInner | ScrapeRequest ScrapeRequestInner | InvalidRequest deriving (Show, Eq) data ConnectRequestInner = ConnectRequestInner { _transactionID :: !TransactionID } deriving (Show, Eq) data AnnounceRequestInner = AnnounceRequestInner { _connectionID :: !ConnectionID, _transactionID :: !TransactionID, _infoHash :: !InfoHash, _peerID :: !PeerID, _bytesDownloaded :: !BytesDownloaded, _bytesUploaded :: !BytesUploaded, _bytesLeft :: !BytesLeft, _announceEvent :: !AnnounceEvent, _ipAddress :: !IPAddress, _key :: !PeerKey, _peersWanted :: !PeersWanted, _port :: !PeerPort } deriving (Show, Eq) data ScrapeRequestInner = ScrapeRequestInner { _connectionID :: !ConnectionID, _transactionID :: !TransactionID, _infoHashes :: !(Sequence.Seq InfoHash) } deriving (Show, Eq) data AnnounceEvent = AnnounceEventCompleted | AnnounceEventStarted | AnnounceEventStopped | AnnounceEventNone deriving (Show, Eq) newtype BytesDownloaded = BytesDownloaded Int64 deriving (Show, Eq, Ord, Num, Enum, Real, Integral) newtype BytesLeft = BytesLeft Int64 deriving (Show, Eq, Ord, Num, Enum, Real, Integral) newtype BytesUploaded = BytesUploaded Int64 deriving (Show, Eq, Ord, Num, Enum, Real, Integral) newtype PeersWanted = PeersWanted Int32 deriving (Show, Eq, Ord, Num, Enum, Real, Integral)
greatest-ape/hs-bt-tracker
src/Web/BitTorrent/Tracker/Types/Request.hs
gpl-3.0
1,892
0
12
412
438
251
187
81
0
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Chat.Media.Download -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Downloads media. Download is supported on the URI -- \`\/v1\/media\/{+name}?alt=media\`. -- -- /See:/ <https://developers.google.com/hangouts/chat Google Chat API Reference> for @chat.media.download@. module Network.Google.Resource.Chat.Media.Download ( -- * REST Resource MediaDownloadResource -- * Creating a Request , mediaDownload , MediaDownload' -- * Request Lenses , mdXgafv , mdUploadProtocol , mdResourceName , mdAccessToken , mdUploadType , mdCallback ) where import Network.Google.Chat.Types import Network.Google.Prelude -- | A resource alias for @chat.media.download@ method which the -- 'MediaDownload'' request conforms to. type MediaDownloadResource = "v1" :> "media" :> Capture "resourceName" Text :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> Get '[JSON] Media :<|> "v1" :> "media" :> Capture "resourceName" Text :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltMedia :> Get '[OctetStream] Stream -- | Downloads media. Download is supported on the URI -- \`\/v1\/media\/{+name}?alt=media\`. -- -- /See:/ 'mediaDownload' smart constructor. data MediaDownload' = MediaDownload'' { _mdXgafv :: !(Maybe Xgafv) , _mdUploadProtocol :: !(Maybe Text) , _mdResourceName :: !Text , _mdAccessToken :: !(Maybe Text) , _mdUploadType :: !(Maybe Text) , _mdCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'MediaDownload' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'mdXgafv' -- -- * 'mdUploadProtocol' -- -- * 'mdResourceName' -- -- * 'mdAccessToken' -- -- * 'mdUploadType' -- -- * 'mdCallback' mediaDownload :: Text -- ^ 'mdResourceName' -> MediaDownload' mediaDownload pMdResourceName_ = MediaDownload'' { _mdXgafv = Nothing , _mdUploadProtocol = Nothing , _mdResourceName = pMdResourceName_ , _mdAccessToken = Nothing , _mdUploadType = Nothing , _mdCallback = Nothing } -- | V1 error format. mdXgafv :: Lens' MediaDownload' (Maybe Xgafv) mdXgafv = lens _mdXgafv (\ s a -> s{_mdXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). mdUploadProtocol :: Lens' MediaDownload' (Maybe Text) mdUploadProtocol = lens _mdUploadProtocol (\ s a -> s{_mdUploadProtocol = a}) -- | Name of the media that is being downloaded. See -- ReadRequest.resource_name. mdResourceName :: Lens' MediaDownload' Text mdResourceName = lens _mdResourceName (\ s a -> s{_mdResourceName = a}) -- | OAuth access token. mdAccessToken :: Lens' MediaDownload' (Maybe Text) mdAccessToken = lens _mdAccessToken (\ s a -> s{_mdAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). mdUploadType :: Lens' MediaDownload' (Maybe Text) mdUploadType = lens _mdUploadType (\ s a -> s{_mdUploadType = a}) -- | JSONP mdCallback :: Lens' MediaDownload' (Maybe Text) mdCallback = lens _mdCallback (\ s a -> s{_mdCallback = a}) instance GoogleRequest MediaDownload' where type Rs MediaDownload' = Media type Scopes MediaDownload' = '[] requestClient MediaDownload''{..} = go _mdResourceName _mdXgafv _mdUploadProtocol _mdAccessToken _mdUploadType _mdCallback (Just AltJSON) chatService where go :<|> _ = buildClient (Proxy :: Proxy MediaDownloadResource) mempty instance GoogleRequest (MediaDownload MediaDownload') where type Rs (MediaDownload MediaDownload') = Stream type Scopes (MediaDownload MediaDownload') = Scopes MediaDownload' requestClient (MediaDownload MediaDownload''{..}) = go _mdResourceName _mdXgafv _mdUploadProtocol _mdAccessToken _mdUploadType _mdCallback (Just AltMedia) chatService where _ :<|> go = buildClient (Proxy :: Proxy MediaDownloadResource) mempty
brendanhay/gogol
gogol-chat/gen/Network/Google/Resource/Chat/Media/Download.hs
mpl-2.0
5,468
0
26
1,452
906
508
398
125
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} -- | -- Module : Network.Google.Blogger -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- API for access to the data within Blogger. -- -- /See:/ <https://developers.google.com/blogger/docs/3.0/getting_started Blogger API Reference> module Network.Google.Blogger ( -- * Service Configuration bloggerService -- * OAuth Scopes , bloggerScope , bloggerReadOnlyScope -- * API Declaration , BloggerAPI -- * Resources -- ** blogger.blogUserInfos.get , module Network.Google.Resource.Blogger.BlogUserInfos.Get -- ** blogger.blogs.get , module Network.Google.Resource.Blogger.Blogs.Get -- ** blogger.blogs.getByUrl , module Network.Google.Resource.Blogger.Blogs.GetByURL -- ** blogger.blogs.listByUser , module Network.Google.Resource.Blogger.Blogs.ListByUser -- ** blogger.comments.approve , module Network.Google.Resource.Blogger.Comments.Approve -- ** blogger.comments.delete , module Network.Google.Resource.Blogger.Comments.Delete -- ** blogger.comments.get , module Network.Google.Resource.Blogger.Comments.Get -- ** blogger.comments.list , module Network.Google.Resource.Blogger.Comments.List -- ** blogger.comments.listByBlog , module Network.Google.Resource.Blogger.Comments.ListByBlog -- ** blogger.comments.markAsSpam , module Network.Google.Resource.Blogger.Comments.MarkAsSpam -- ** blogger.comments.removeContent , module Network.Google.Resource.Blogger.Comments.RemoveContent -- ** blogger.pageViews.get , module Network.Google.Resource.Blogger.PageViews.Get -- ** blogger.pages.delete , module Network.Google.Resource.Blogger.Pages.Delete -- ** blogger.pages.get , module Network.Google.Resource.Blogger.Pages.Get -- ** blogger.pages.insert , module Network.Google.Resource.Blogger.Pages.Insert -- ** blogger.pages.list , module Network.Google.Resource.Blogger.Pages.List -- ** blogger.pages.patch , module Network.Google.Resource.Blogger.Pages.Patch -- ** blogger.pages.publish , module Network.Google.Resource.Blogger.Pages.Publish -- ** blogger.pages.revert , module Network.Google.Resource.Blogger.Pages.Revert -- ** blogger.pages.update , module Network.Google.Resource.Blogger.Pages.Update -- ** blogger.postUserInfos.get , module Network.Google.Resource.Blogger.PostUserInfos.Get -- ** blogger.postUserInfos.list , module Network.Google.Resource.Blogger.PostUserInfos.List -- ** blogger.posts.delete , module Network.Google.Resource.Blogger.Posts.Delete -- ** blogger.posts.get , module Network.Google.Resource.Blogger.Posts.Get -- ** blogger.posts.getByPath , module Network.Google.Resource.Blogger.Posts.GetByPath -- ** blogger.posts.insert , module Network.Google.Resource.Blogger.Posts.Insert -- ** blogger.posts.list , module Network.Google.Resource.Blogger.Posts.List -- ** blogger.posts.patch , module Network.Google.Resource.Blogger.Posts.Patch -- ** blogger.posts.publish , module Network.Google.Resource.Blogger.Posts.Publish -- ** blogger.posts.revert , module Network.Google.Resource.Blogger.Posts.Revert -- ** blogger.posts.search , module Network.Google.Resource.Blogger.Posts.Search -- ** blogger.posts.update , module Network.Google.Resource.Blogger.Posts.Update -- ** blogger.users.get , module Network.Google.Resource.Blogger.Users.Get -- * Types -- ** PostsListOrderBy , PostsListOrderBy (..) -- ** PostsListView , PostsListView (..) -- ** PageViewsGetRange , PageViewsGetRange (..) -- ** PostUserInfo , PostUserInfo , postUserInfo , puiPostUserInfo , puiPost , puiKind -- ** CommentsListView , CommentsListView (..) -- ** PostAuthorImage , PostAuthorImage , postAuthorImage , paiURL -- ** PostUserInfosListStatus , PostUserInfosListStatus (..) -- ** PostList , PostList , postList , plEtag , plNextPageToken , plKind , plItems -- ** CommentInReplyTo , CommentInReplyTo , commentInReplyTo , cirtId -- ** CommentBlog , CommentBlog , commentBlog , cbId -- ** Pageviews , Pageviews , pageviews , pKind , pCounts , pBlogId -- ** PostLocation , PostLocation , postLocation , plSpan , plLat , plName , plLng -- ** BlogPosts , BlogPosts , blogPosts , bpTotalItems , bpItems , bpSelfLink -- ** PostsGetView , PostsGetView (..) -- ** Post' , Post' , post , posImages , posStatus , posEtag , posReaderComments , posLocation , posKind , posPublished , posURL , posBlog , posCustomMetaData , posContent , posReplies , posSelfLink , posAuthor , posId , posLabels , posUpdated , posTitleLink , posTitle -- ** PostsSearchOrderBy , PostsSearchOrderBy (..) -- ** CommentsListByBlogStatus , CommentsListByBlogStatus (..) -- ** PagesGetView , PagesGetView (..) -- ** PostUserInfosListOrderBy , PostUserInfosListOrderBy (..) -- ** Page , Page , page , pagStatus , pagEtag , pagKind , pagPublished , pagURL , pagBlog , pagContent , pagSelfLink , pagAuthor , pagId , pagUpdated , pagTitle -- ** BlogLocale , BlogLocale , blogLocale , blVariant , blCountry , blLanguage -- ** PageAuthor , PageAuthor , pageAuthor , paImage , paURL , paDisplayName , paId -- ** BlogsGetView , BlogsGetView (..) -- ** Blog , Blog , blog , bStatus , bKind , bPages , bLocale , bPublished , bURL , bCustomMetaData , bSelfLink , bName , bId , bUpdated , bPosts , bDescription -- ** BlogsGetByURLView , BlogsGetByURLView (..) -- ** CommentsListStatus , CommentsListStatus (..) -- ** BlogPages , BlogPages , blogPages , bpsTotalItems , bpsSelfLink -- ** PostBlog , PostBlog , postBlog , pbId -- ** BlogsListByUserStatus , BlogsListByUserStatus (..) -- ** PageList , PageList , pageList , pllEtag , pllNextPageToken , pllKind , pllItems -- ** UserLocale , UserLocale , userLocale , ulVariant , ulCountry , ulLanguage -- ** CommentAuthorImage , CommentAuthorImage , commentAuthorImage , caiURL -- ** User , User , user , uBlogs , uKind , uCreated , uLocale , uURL , uSelfLink , uAbout , uDisplayName , uId -- ** UserBlogs , UserBlogs , userBlogs , ubSelfLink -- ** PostReplies , PostReplies , postReplies , prTotalItems , prItems , prSelfLink -- ** BlogList , BlogList , blogList , blKind , blItems , blBlogUserInfos -- ** PagesListView , PagesListView (..) -- ** PageBlog , PageBlog , pageBlog , pId -- ** PostsListStatus , PostsListStatus (..) -- ** PostAuthor , PostAuthor , postAuthor , paaImage , paaURL , paaDisplayName , paaId -- ** PostPerUserInfo , PostPerUserInfo , postPerUserInfo , ppuiKind , ppuiBlogId , ppuiUserId , ppuiHasEditAccess , ppuiPostId -- ** BlogsListByUserView , BlogsListByUserView (..) -- ** PageviewsCountsItem , PageviewsCountsItem , pageviewsCountsItem , pciTimeRange , pciCount -- ** PostUserInfosListView , PostUserInfosListView (..) -- ** Comment , Comment , comment , cStatus , cPost , cKind , cPublished , cBlog , cContent , cSelfLink , cAuthor , cId , cUpdated , cInReplyTo -- ** CommentsGetView , CommentsGetView (..) -- ** CommentPost , CommentPost , commentPost , cpId -- ** PostsGetByPathView , PostsGetByPathView (..) -- ** BlogPerUserInfo , BlogPerUserInfo , blogPerUserInfo , bpuiPhotosAlbumKey , bpuiKind , bpuiBlogId , bpuiUserId , bpuiRole , bpuiHasAdminAccess -- ** PostUserInfosList , PostUserInfosList , postUserInfosList , puilNextPageToken , puilKind , puilItems -- ** PagesListStatus , PagesListStatus (..) -- ** CommentAuthor , CommentAuthor , commentAuthor , caImage , caURL , caDisplayName , caId -- ** BlogsListByUserRole , BlogsListByUserRole (..) -- ** BlogUserInfo , BlogUserInfo , blogUserInfo , buiKind , buiBlog , buiBlogUserInfo -- ** PageAuthorImage , PageAuthorImage , pageAuthorImage , pURL -- ** CommentList , CommentList , commentList , clEtag , clNextPageToken , clKind , clItems , clPrevPageToken -- ** PostImagesItem , PostImagesItem , postImagesItem , piiURL ) where import Network.Google.Blogger.Types import Network.Google.Prelude import Network.Google.Resource.Blogger.Blogs.Get import Network.Google.Resource.Blogger.Blogs.GetByURL import Network.Google.Resource.Blogger.Blogs.ListByUser import Network.Google.Resource.Blogger.BlogUserInfos.Get import Network.Google.Resource.Blogger.Comments.Approve import Network.Google.Resource.Blogger.Comments.Delete import Network.Google.Resource.Blogger.Comments.Get import Network.Google.Resource.Blogger.Comments.List import Network.Google.Resource.Blogger.Comments.ListByBlog import Network.Google.Resource.Blogger.Comments.MarkAsSpam import Network.Google.Resource.Blogger.Comments.RemoveContent import Network.Google.Resource.Blogger.Pages.Delete import Network.Google.Resource.Blogger.Pages.Get import Network.Google.Resource.Blogger.Pages.Insert import Network.Google.Resource.Blogger.Pages.List import Network.Google.Resource.Blogger.Pages.Patch import Network.Google.Resource.Blogger.Pages.Publish import Network.Google.Resource.Blogger.Pages.Revert import Network.Google.Resource.Blogger.Pages.Update import Network.Google.Resource.Blogger.PageViews.Get import Network.Google.Resource.Blogger.Posts.Delete import Network.Google.Resource.Blogger.Posts.Get import Network.Google.Resource.Blogger.Posts.GetByPath import Network.Google.Resource.Blogger.Posts.Insert import Network.Google.Resource.Blogger.Posts.List import Network.Google.Resource.Blogger.Posts.Patch import Network.Google.Resource.Blogger.Posts.Publish import Network.Google.Resource.Blogger.Posts.Revert import Network.Google.Resource.Blogger.Posts.Search import Network.Google.Resource.Blogger.Posts.Update import Network.Google.Resource.Blogger.PostUserInfos.Get import Network.Google.Resource.Blogger.PostUserInfos.List import Network.Google.Resource.Blogger.Users.Get {- $resources TODO -} -- | Represents the entirety of the methods and resources available for the Blogger API service. type BloggerAPI = PostUserInfosListResource :<|> PostUserInfosGetResource :<|> UsersGetResource :<|> PageViewsGetResource :<|> BlogsListByUserResource :<|> BlogsGetResource :<|> BlogsGetByURLResource :<|> PagesInsertResource :<|> PagesListResource :<|> PagesPatchResource :<|> PagesGetResource :<|> PagesRevertResource :<|> PagesDeleteResource :<|> PagesUpdateResource :<|> PagesPublishResource :<|> BlogUserInfosGetResource :<|> CommentsListResource :<|> CommentsGetResource :<|> CommentsListByBlogResource :<|> CommentsRemoveContentResource :<|> CommentsApproveResource :<|> CommentsMarkAsSpamResource :<|> CommentsDeleteResource :<|> PostsInsertResource :<|> PostsListResource :<|> PostsPatchResource :<|> PostsGetResource :<|> PostsRevertResource :<|> PostsGetByPathResource :<|> PostsSearchResource :<|> PostsDeleteResource :<|> PostsUpdateResource :<|> PostsPublishResource
rueshyna/gogol
gogol-blogger/gen/Network/Google/Blogger.hs
mpl-2.0
12,901
0
36
3,320
1,707
1,239
468
350
0
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.IAM.Projects.ServiceAccounts.List -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Lists ServiceAccounts for a project. -- -- /See:/ <https://cloud.google.com/iam/ Google Identity and Access Management (IAM) API Reference> for @iam.projects.serviceAccounts.list@. module Network.Google.Resource.IAM.Projects.ServiceAccounts.List ( -- * REST Resource ProjectsServiceAccountsListResource -- * Creating a Request , projectsServiceAccountsList , ProjectsServiceAccountsList -- * Request Lenses , psalXgafv , psalUploadProtocol , psalPp , psalAccessToken , psalUploadType , psalBearerToken , psalName , psalPageToken , psalPageSize , psalCallback ) where import Network.Google.IAM.Types import Network.Google.Prelude -- | A resource alias for @iam.projects.serviceAccounts.list@ method which the -- 'ProjectsServiceAccountsList' request conforms to. type ProjectsServiceAccountsListResource = "v1" :> Capture "name" Text :> "serviceAccounts" :> QueryParam "$.xgafv" Text :> QueryParam "upload_protocol" Text :> QueryParam "pp" Bool :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "bearer_token" Text :> QueryParam "pageToken" Text :> QueryParam "pageSize" (Textual Int32) :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> Get '[JSON] ListServiceAccountsResponse -- | Lists ServiceAccounts for a project. -- -- /See:/ 'projectsServiceAccountsList' smart constructor. data ProjectsServiceAccountsList = ProjectsServiceAccountsList' { _psalXgafv :: !(Maybe Text) , _psalUploadProtocol :: !(Maybe Text) , _psalPp :: !Bool , _psalAccessToken :: !(Maybe Text) , _psalUploadType :: !(Maybe Text) , _psalBearerToken :: !(Maybe Text) , _psalName :: !Text , _psalPageToken :: !(Maybe Text) , _psalPageSize :: !(Maybe (Textual Int32)) , _psalCallback :: !(Maybe Text) } deriving (Eq,Show,Data,Typeable,Generic) -- | Creates a value of 'ProjectsServiceAccountsList' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'psalXgafv' -- -- * 'psalUploadProtocol' -- -- * 'psalPp' -- -- * 'psalAccessToken' -- -- * 'psalUploadType' -- -- * 'psalBearerToken' -- -- * 'psalName' -- -- * 'psalPageToken' -- -- * 'psalPageSize' -- -- * 'psalCallback' projectsServiceAccountsList :: Text -- ^ 'psalName' -> ProjectsServiceAccountsList projectsServiceAccountsList pPsalName_ = ProjectsServiceAccountsList' { _psalXgafv = Nothing , _psalUploadProtocol = Nothing , _psalPp = True , _psalAccessToken = Nothing , _psalUploadType = Nothing , _psalBearerToken = Nothing , _psalName = pPsalName_ , _psalPageToken = Nothing , _psalPageSize = Nothing , _psalCallback = Nothing } -- | V1 error format. psalXgafv :: Lens' ProjectsServiceAccountsList (Maybe Text) psalXgafv = lens _psalXgafv (\ s a -> s{_psalXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). psalUploadProtocol :: Lens' ProjectsServiceAccountsList (Maybe Text) psalUploadProtocol = lens _psalUploadProtocol (\ s a -> s{_psalUploadProtocol = a}) -- | Pretty-print response. psalPp :: Lens' ProjectsServiceAccountsList Bool psalPp = lens _psalPp (\ s a -> s{_psalPp = a}) -- | OAuth access token. psalAccessToken :: Lens' ProjectsServiceAccountsList (Maybe Text) psalAccessToken = lens _psalAccessToken (\ s a -> s{_psalAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). psalUploadType :: Lens' ProjectsServiceAccountsList (Maybe Text) psalUploadType = lens _psalUploadType (\ s a -> s{_psalUploadType = a}) -- | OAuth bearer token. psalBearerToken :: Lens' ProjectsServiceAccountsList (Maybe Text) psalBearerToken = lens _psalBearerToken (\ s a -> s{_psalBearerToken = a}) -- | Required. The resource name of the project associated with the service -- accounts, such as \`projects\/my-project-123\`. psalName :: Lens' ProjectsServiceAccountsList Text psalName = lens _psalName (\ s a -> s{_psalName = a}) -- | Optional pagination token returned in an earlier -- ListServiceAccountsResponse.next_page_token. psalPageToken :: Lens' ProjectsServiceAccountsList (Maybe Text) psalPageToken = lens _psalPageToken (\ s a -> s{_psalPageToken = a}) -- | Optional limit on the number of service accounts to include in the -- response. Further accounts can subsequently be obtained by including the -- ListServiceAccountsResponse.next_page_token in a subsequent request. psalPageSize :: Lens' ProjectsServiceAccountsList (Maybe Int32) psalPageSize = lens _psalPageSize (\ s a -> s{_psalPageSize = a}) . mapping _Coerce -- | JSONP psalCallback :: Lens' ProjectsServiceAccountsList (Maybe Text) psalCallback = lens _psalCallback (\ s a -> s{_psalCallback = a}) instance GoogleRequest ProjectsServiceAccountsList where type Rs ProjectsServiceAccountsList = ListServiceAccountsResponse type Scopes ProjectsServiceAccountsList = '["https://www.googleapis.com/auth/cloud-platform"] requestClient ProjectsServiceAccountsList'{..} = go _psalName _psalXgafv _psalUploadProtocol (Just _psalPp) _psalAccessToken _psalUploadType _psalBearerToken _psalPageToken _psalPageSize _psalCallback (Just AltJSON) iAMService where go = buildClient (Proxy :: Proxy ProjectsServiceAccountsListResource) mempty
rueshyna/gogol
gogol-iam/gen/Network/Google/Resource/IAM/Projects/ServiceAccounts/List.hs
mpl-2.0
6,726
0
20
1,623
1,036
598
438
145
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.ToolResults.Projects.Histories.Create -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Creates a History. The returned History will have the id set. May return -- any of the following canonical error codes: - PERMISSION_DENIED - if the -- user is not authorized to write to project - INVALID_ARGUMENT - if the -- request is malformed - NOT_FOUND - if the containing project does not -- exist -- -- /See:/ <https://firebase.google.com/docs/test-lab/ Cloud Tool Results API Reference> for @toolresults.projects.histories.create@. module Network.Google.Resource.ToolResults.Projects.Histories.Create ( -- * REST Resource ProjectsHistoriesCreateResource -- * Creating a Request , projectsHistoriesCreate , ProjectsHistoriesCreate -- * Request Lenses , phcRequestId , phcPayload , phcProjectId ) where import Network.Google.Prelude import Network.Google.ToolResults.Types -- | A resource alias for @toolresults.projects.histories.create@ method which the -- 'ProjectsHistoriesCreate' request conforms to. type ProjectsHistoriesCreateResource = "toolresults" :> "v1beta3" :> "projects" :> Capture "projectId" Text :> "histories" :> QueryParam "requestId" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] History :> Post '[JSON] History -- | Creates a History. The returned History will have the id set. May return -- any of the following canonical error codes: - PERMISSION_DENIED - if the -- user is not authorized to write to project - INVALID_ARGUMENT - if the -- request is malformed - NOT_FOUND - if the containing project does not -- exist -- -- /See:/ 'projectsHistoriesCreate' smart constructor. data ProjectsHistoriesCreate = ProjectsHistoriesCreate' { _phcRequestId :: !(Maybe Text) , _phcPayload :: !History , _phcProjectId :: !Text } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ProjectsHistoriesCreate' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'phcRequestId' -- -- * 'phcPayload' -- -- * 'phcProjectId' projectsHistoriesCreate :: History -- ^ 'phcPayload' -> Text -- ^ 'phcProjectId' -> ProjectsHistoriesCreate projectsHistoriesCreate pPhcPayload_ pPhcProjectId_ = ProjectsHistoriesCreate' { _phcRequestId = Nothing , _phcPayload = pPhcPayload_ , _phcProjectId = pPhcProjectId_ } -- | A unique request ID for server to detect duplicated requests. For -- example, a UUID. Optional, but strongly recommended. phcRequestId :: Lens' ProjectsHistoriesCreate (Maybe Text) phcRequestId = lens _phcRequestId (\ s a -> s{_phcRequestId = a}) -- | Multipart request metadata. phcPayload :: Lens' ProjectsHistoriesCreate History phcPayload = lens _phcPayload (\ s a -> s{_phcPayload = a}) -- | A Project id. Required. phcProjectId :: Lens' ProjectsHistoriesCreate Text phcProjectId = lens _phcProjectId (\ s a -> s{_phcProjectId = a}) instance GoogleRequest ProjectsHistoriesCreate where type Rs ProjectsHistoriesCreate = History type Scopes ProjectsHistoriesCreate = '["https://www.googleapis.com/auth/cloud-platform"] requestClient ProjectsHistoriesCreate'{..} = go _phcProjectId _phcRequestId (Just AltJSON) _phcPayload toolResultsService where go = buildClient (Proxy :: Proxy ProjectsHistoriesCreateResource) mempty
brendanhay/gogol
gogol-toolresults/gen/Network/Google/Resource/ToolResults/Projects/Histories/Create.hs
mpl-2.0
4,314
0
15
939
476
287
189
73
1
{-# LANGUAGE CPP #-} module Utils where import Control.Monad import Control.Arrow import Control.Applicative import Control.Exception import Data.Bool import Data.List import Data.Char import Data.Either import qualified Data.Foldable as F import qualified Data.Traversable as F import qualified Data.ByteString.Lazy as LBS import Data.Bifunctor (Bifunctor, bimap) import Data.Functor.Identity import qualified Data.Map as Map import System.Exit import System.Process hiding (callProcess, system, rawSystem) import System.IO.Temp import System.IO import System.Environment import System.Directory import System.FilePath import System.Posix.User import System.Posix.Files import System.Posix.Types import Prelude system cmd = do (_,_,_,p) <- createProcess (shell cmd) { #if MIN_VERSION_process(1,2,0) delegate_ctlc = True, #endif close_fds = True } waitForProcess p rawSystem cmd args = do (_,_,_,p) <- createProcess (proc cmd args) { #if MIN_VERSION_process(1,2,0) delegate_ctlc = True, #endif close_fds = True } waitForProcess p pro (cmd:args) = do res <- callProcess Nothing cmd args case res of ExitSuccess -> return () ExitFailure rv -> do hPutStrLn stderr $ "command failed '" ++ intercalate " " (map prettyShow $ cmd:args) ++ "' (exit code "++ show rv ++")" exitWith res pro_ (cmd:args) = do res <- callProcess Nothing cmd args case res of ExitSuccess -> return () ExitFailure rv -> hPutStrLn stderr $ "command failed '" ++ intercalate " " (map prettyShow $ cmd:args) ++ "' (exit code "++ show rv ++")" prettyShow x | any isSpace x = show x | otherwise = x callProcess :: Maybe FilePath -> FilePath -> [String] -> IO ExitCode callProcess mwd exe args = do (_, _, _, h) <- createProcess (proc exe args) { cwd = mwd } waitForProcess h writeFile'LBS f c = writeFile''LBS f Nothing c writeFile''LBS f mpe c = do withTempFile (takeDirectory f) (takeFileName f) $ \tf h -> do LBS.hPutStr h c hClose h unlessTesting $ maybe (return ()) (setPerms tf) mpe renameFile tf f writeFile' f c = writeFile'' f Nothing c writeFile'' f mpe c = do withTempFile (takeDirectory f) (takeFileName f) $ \tf h -> do hPutStr h c hClose h unlessTesting $ maybe (return ()) (setPerms tf) mpe renameFile tf f setPerms path (uidgid, mmask) = do let midmod = case uidgid of (Just uid, Just gid) -> Just $ uid ++ ":" ++ gid (Just uid, Nothing) -> Just $ uid (Nothing, Just gid) -> Just $ ":" ++ gid (Nothing, Nothing) -> Nothing maybe (return ()) (\mask -> void $ rawSystem "chmod" [mask, path]) mmask maybe (return ()) (\idmod -> void $ rawSystem "chown" [idmod, path]) midmod amNotTesting :: IO Bool amNotTesting = (`elem` [Nothing, Just ""]) <$> lookupEnv "KIB_TESTING" whenTesting, unlessTesting :: IO () -> IO () whenTesting a = do nt <- amNotTesting when (not nt) a unlessTesting a = do nt <- amNotTesting when nt a linkExists p = flip catch (\(SomeException _) -> return False) $ do getSymbolicLinkStatus p return True readFileMaybe p = do e <- doesFileExist p if e then Just <$> readFile p else return Nothing fst3 (x,_,_) = x snd3 (_,x,_) = x thd3 (_,_,x) = x flip2fst :: (a -> b -> c -> d) -> (b -> a -> c -> d) flip2snd :: (a -> b -> c -> d) -> (a -> c -> b -> d) flip2both :: (a -> b -> c -> d) -> (c -> b -> a -> d) flip2fst f b a c = f a b c flip2snd f a c b = f a b c flip2both f c b a = f a b c fst4 :: (a, b, c, d) -> a snd4 :: (a, b, c, d) -> b thd4 :: (a, b, c, d) -> c fth4 :: (a, b, c, d) -> d fst4 (a, b, c, d) = a snd4 (a, b, c, d) = b thd4 (a, b, c, d) = c fth4 (a, b, c, d) = d class IxFunctor i f | f -> i where imap :: (i -> a -> b) -> f a -> f b class IxFoldable i t | t -> i where ifoldr :: (i -> a -> b -> b) -> b -> t a -> b class IxFoldable i t => IxTraversable i t | t -> i where itraverse :: Applicative f => (i -> a -> f b) -> t a -> f (t b) instance IxFunctor k (Map.Map k) where imap = Map.mapWithKey instance IxFoldable k (Map.Map k) where ifoldr = Map.foldrWithKey -- test_stuff = do -- quickCheckWith args $ \(x :: T) -> -- uncurry unmpartition (mpartition x) == x -- quickCheckWith args $ \(x :: T) (ys :: [Int]) -> let -- (mas, bs) = mpartition x -- in -- unmpartition mas (bs ++ ys) == x ++ map Right ys -- quickCheckWith args $ \(xss :: AList Int (NonEmptyList (Int, Int))) -> let -- xss' = nubBy ((==) `on` fst) $ map (second getNonEmpty) xss -- in -- curryAList (uncurryAList xss') == xss' -- [Left 1, Left 2, Right 3, Left 4, Right 5, Right 6] -- -> -- [Just 1, Just 2, Nothing, Just 4, Nothing, Nothing] -- [3, 5, 6] prop_mpartition_append :: [Either Int Int] -> [Int] -> Bool prop_mpartition_append xs ys = let (mas, bs) = mpartition xs in unmpartition mas (bs ++ ys) == xs ++ map Right ys mpartition :: [Either a b] -> ([Maybe a], [b]) mpartition = map (either Just (const Nothing)) &&& rights unmpartition :: [Maybe a] -> [b] -> [Either a b] unmpartition (Just a : mas) bs = Left a : unmpartition mas bs unmpartition (Nothing : mas) (b:bs) = Right b : unmpartition mas bs unmpartition [] bs = map Right bs unmpartition _ [] = [] type AList k v = [(k, v)] uncurryAList :: AList k (AList kk v) -> AList (k, kk) v uncurryAList ass = assert (all (not . null . snd) ass) $ concat $ map (\(k,as) -> map (first (k,)) as ) ass curryAList :: Eq k => AList (k, kk) v -> AList k (AList kk v) curryAList ass = map (second (map (first snd))) $ groupByK (fst . fst) (==) ass prop_curryiso :: AList (Int, Int) Int -> Bool prop_curryiso xs = uncurryAList (curryAList xs) == xs prop_uncurryiso :: AList Int (AList Int Int) -> Bool prop_uncurryiso xs = curryAList (uncurryAList xs) == xs groupByK :: (a -> b) -> (b -> b -> Bool) -> [a] -> [(b, [a])] groupByK _ _ [] = [] groupByK f eq (x:xs) = (f x, (x:ys)) : groupByK f eq zs where (ys,zs) = span (eq (f x) . f) xs alldifferent :: Eq a => [a] -> Bool alldifferent (x:xs) = all (/=x) xs && alldifferent xs alldifferent [] = True prop_alldifferent :: [Int] -> Bool prop_alldifferent xs = alldifferent (uniq xs) where uniq = map head . group . sort unionAList :: Eq k => AList k v -> AList k v -> AList k v unionAList xs ys = checkKeysAList $ xs ++ ys unionsAList :: Eq k => [AList k v] -> AList k v unionsAList = checkKeysAList . concat unionAListWithKey :: Eq k => (k -> v -> v -> v) -> AList k v -> AList k v -> AList k v unionAListWithKey f (x@(k,v):xs) ys | Just v' <- lookup k ys = (k, f k v v') : unionAListWithKey f xs (filter ((/=k) . fst) ys) | otherwise = x : unionAListWithKey f xs ys unionAListWithKey f [] ys = ys unionAListWith :: Eq k => (v -> v -> v) -> AList k v -> AList k v -> AList k v unionAListWith f = unionAListWithKey (const f) unionsAListWithKey :: Eq k => (k -> v -> v -> v) -> [AList k v] -> AList k v unionsAListWithKey f = foldr (unionAListWithKey f) [] unionsAListWith :: Eq k => (v -> v -> v) -> [AList k v] -> AList k v unionsAListWith f = foldr (unionAListWith f) [] intersectionAListWith :: Eq k => (v -> v -> v) -> AList k v -> AList k v -> AList k v intersectionAListWith f xs ys = [ (x, f xv yv) | (x, xv) <- xs , (y, yv) <- ys , x == y ] -- unionsAListWith :: Eq k => (v -> v -> v) -> [AList k v] -> AList k v -- unionsAListWith f xss = foldr (unionAListWith f) [] xss checkKeysAList :: Eq a => [(a, b)] -> [(a, b)] checkKeysAList als = assert (alldifferent $ map fst als) als singletonAList :: k -> v -> AList k v singletonAList k v = (:[]) (k,v) both :: Bifunctor p => (c -> d) -> p c c -> p d d both f = bimap f f
DanielG/kvm-in-a-box
src/Utils.hs
agpl-3.0
7,980
0
20
2,115
3,402
1,764
1,638
-1
-1
{- | Module : $Header$ Description : This is the main tempuhs file. It is meant to export all the functionality you would normally want to use when making a tempuhs implementation. The Internal module should not be imported in your implementation, but it may make sense to import other modules manually instead of importing this big meta-module. Copyright : (c) plaimi 2014 License : AGPL-3 Maintainer : [email protected] -} module Tempuhs.Chronology ( module X ) where import Tempuhs.Database as X import Tempuhs.Functions as X import Tempuhs.Types as X
plaimi/tempuhs
src/Tempuhs/Chronology.hs
agpl-3.0
660
0
4
193
34
24
10
5
0
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleInstances #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module Language.Haskell.TokenUtils.DualTree ( renderLayoutTree , layoutTreeToSourceTree , retrieveLinesFromLayoutTree , retrieveLines , renderLines , renderSourceTree , SourceTree , Line(..) , Source(..) , renderLinesFromLayoutTree -- * to enable pretty printing , Alignment(..) , Annot(..) , DeletedSpan(..) , LineOpt(..) , Prim(..) , Transformation(..) , Up(..) ) where -- import qualified GHC as GHC -- import qualified Outputable as GHC import Control.Monad.State import qualified Data.Tree as T import qualified Text.PrettyPrint as P import Language.Haskell.TokenUtils.Types import Language.Haskell.TokenUtils.Utils -- ---------- import Data.Tree.DUAL import Data.Semigroup import Data.Monoid.Action import qualified Data.List.NonEmpty as NE import qualified Data.Tree.DUAL.Internal as I import Debug.Trace -- --------------------------------------------------------------------- data DeletedSpan = DeletedSpan SimpSpan RowOffset SimpPos deriving (Show,Eq) data Transformation = TAbove ColOffset EndOffset (Row,Col) (Row,Col) EndOffset deriving Show {- transform :: Transformation -> Prim -> Prim transform AsIs p = p transform (T _n) (PToks s) = (PToks s) transform (TAbove _co _bo _p1 _p2 _eo) (PToks s) = (PToks s) transform (TDeleted _sspan _ro _p) (PToks s) = (PToks s) transform TAdded (PToks s) = (PToks s) -} -- | The value that bubbles up. This is the Span occupied by the -- subtree, together with a string representation of the subtree. The -- origin of the string is the start of the span. data Up a = Up DtSimpSpan Alignment (NE.NonEmpty (Line a)) [DeletedSpan] | UDeleted [DeletedSpan] deriving Show -- | We need this otherwise the SemiGroup instance for SimpSpan cause problems data DtSimpSpan = Dt SimpSpan deriving (Eq,Show) instance Outputable DtSimpSpan where ppr s = P.parens $ P.text "DtSimpSpan" P.<+> ppr s data Line a = Line Row Col RowOffset Source LineOpt [a] data Alignment = ANone | AVertical deriving (Show,Eq) instance (IsToken a) => Show (Line a) where show (Line r c o f s toks) = "(" ++ show r ++ " " ++ show c ++ " " ++ show o ++ " " ++ show f ++ " " ++ show s -- ++ " " ++ "\"" ++ showTokenStream toks ++ "\")" ++ " " ++ "\"" ++ showFriendlyToks toks ++ "\")" data Source = SOriginal | SAdded | SWasAdded deriving (Show,Eq) data LineOpt = ONone -- | This line needs to be grouped with the next in terms -- of layout, so any column offsets need to be propagated | OGroup deriving (Show,Eq) data Annot = Ann String | ADeleted ForestSpan RowOffset SimpPos | ASubtree ForestSpan deriving Show data Prim a = PToks [a] | PDeleted ForestSpan RowOffset SimpPos deriving Show -- | The main data structure for this module type SourceTree a = DUALTree Transformation (Up a) Annot (Prim a) instance Semigroup DtSimpSpan where Dt (p1,_p2) <> Dt (_q1,q2) = Dt (p1,q2) instance (IsToken a) => Semigroup (Up a) where u1 <> u2 = combineUps u1 u2 instance Semigroup Transformation where (TAbove co1 bo1 p11 _p21 _eo1) <> (TAbove _co2 _bo2 _p12 p22 eo2) = (TAbove co1 bo1 p11 p22 eo2) instance (Action Transformation (Up a)) where act (TAbove _co _bo _p1 _p2 _eo) (Up sspan _a s ds) = (Up sspan a' s' ds) where a' = AVertical s' = NE.map (\(Line r c o ss _f toks) -> (Line r c o ss OGroup toks)) s act (TAbove _co _bo _p1 _p2 _eo) (UDeleted ds) = UDeleted ds -- --------------------------------------------------------------------- renderLayoutTree :: (IsToken a) => LayoutTree a -> String renderLayoutTree = renderSourceTree . layoutTreeToSourceTree -- --------------------------------------------------------------------- renderLinesFromLayoutTree :: (IsToken a) => LayoutTree a -> String renderLinesFromLayoutTree = renderLines . retrieveLinesFromLayoutTree -- --------------------------------------------------------------------- retrieveLinesFromLayoutTree :: (IsToken a) => LayoutTree a -> [Line a] retrieveLinesFromLayoutTree = retrieveLines . layoutTreeToSourceTree -- --------------------------------------------------------------------- retrieveLines :: (IsToken a) => SourceTree a -> [Line a] retrieveLines srcTree = case getU srcTree of Nothing -> [] Just (Up _ss _a str _ds) -> NE.toList str Just (UDeleted _) -> [] -- --------------------------------------------------------------------- renderSourceTree :: (IsToken a) => SourceTree a -> String renderSourceTree srcTree = r where r = case getU srcTree of Nothing -> "" Just (Up _ss _a str _ds) -> renderLines $ NE.toList str Just (UDeleted _) -> "" -- --------------------------------------------------------------------- renderLines :: (IsToken a) => [Line a] -> String renderLines ls = res where (_,(_,res)) = runState (go 0 ls) ((1,1),"") go _ [] = do return () go ci ((Line r c _o _f _s str):ls') = do newPos r (c+ci) addString (showTokenStream str) go ci ls' -- State operations ---------------- getRC = do (rc,_) <- get return rc putRC (r,c) = do (_,str) <- get put ((r,c),str) newPos newRow newCol = do (oldRow',oldCol) <- getRC -- Allow for out of order additions that result from additions -- to the tree. Will break the invariant. let oldRow = if oldRow' <= newRow then oldRow' else (newRow - 1) putRC (oldRow,oldCol) if oldRow == newRow then addString (take (newCol - oldCol) $ repeat ' ') else addString ( (take (newRow - oldRow) $ repeat '\n') ++ (take (newCol - 1) $ repeat ' ') ) addString [] = return () addString str = do ((r,c),curr) <- get let ll = (length $ filter (=='\n') str) let c'' = (length $ takeWhile (/='\n') $ reverse str) let (r',c') = case ll of 0 -> (r,c + c'') _ -> (r + ll, c'' + 1) put ((r',c'),curr++str) -- checkInvariant $ "addString" ++ show str addDebugString str = do ((r,c),curr) <- get put ((r,c),curr++str) -- --------------------------------------------------------------------- layoutTreeToSourceTree :: (IsToken a) => LayoutTree a -> SourceTree a -- TODO: simplify by getting rid of PDeleted, and use leafU layoutTreeToSourceTree (T.Node (Deleted sspan pg eg) _) = leaf (UDeleted [(DeletedSpan (fs2s sspan) pg eg)]) (PDeleted sspan pg eg) layoutTreeToSourceTree (T.Node (Entry sspan NoChange []) ts0) = annot (ASubtree sspan) (mconcatl $ map layoutTreeToSourceTree ts0) -- TODO: only apply TAbove if the subs go on to the next line layoutTreeToSourceTree (T.Node (Entry sspan (Above bo p1 p2 eo) []) ts0) = case (numLines ts0) of 0 -> annot (ASubtree sspan) (mconcatl $ map layoutTreeToSourceTree ts0) _ -> annot (ASubtree sspan) (applyD (TAbove co bo p1 p2 eo) subs) where subs = (mconcatl $ map layoutTreeToSourceTree ts0) co = 0 numLines :: [T.Tree (Entry a)] -> Int numLines [] = 0 numLines sts = l - f where ((f,_),_ ) = forestSpanToSimpPos $ treeStartEnd $ head sts (_ ,(l,_)) = forestSpanToSimpPos $ treeStartEnd $ last sts layoutTreeToSourceTree (T.Node (Entry sspan _lay toks) _ts) = leaf (mkUp sspan toks) (PToks toks) -- ------------------------------------- -- We use the foldl version to get a more bushy tree, else the ppr of -- it is very hard to follow mconcatl :: (Monoid a) => [a] -> a mconcatl = foldl mappend mempty -- --------------------------------------------------------------------- fs2s :: ForestSpan -> SimpSpan fs2s ss = (sp,ep) where (sp,ep) = forestSpanToSimpPos ss -- --------------------------------------------------------------------- mkUp :: (IsToken a) => ForestSpan -> [a] -> Up a mkUp sspan toks = Up (Dt ss) a ls [] where a = ANone s = if forestSpanVersionSet sspan then SAdded else SOriginal ss = mkSpan sspan -- toksByLine = groupTokensByLine $ reAlignMarked toks toksByLine = groupTokensByLine toks ls = NE.fromList $ concatMap (mkLinesFromToks s) toksByLine -- --------------------------------------------------------------------- -- TODO: What if the toks comprise multiple lines, e.g. in a block comment? mkLinesFromToks :: (IsToken a) => Source -> [a] -> [Line a] mkLinesFromToks _ [] = [] mkLinesFromToks s toks = [Line ro co 0 s f toks'] where f = ONone ro' = tokenRow $ head toks co' = tokenCol $ head toks (ro,co) = srcPosToSimpPos (tokenRow $ head toks, tokenCol $ head toks) toks' = addOffsetToToks (-ro',-co') toks -- --------------------------------------------------------------------- -- | Combine the 'U' annotations as they propagate from the leafs to -- be cached at the root of the tree. This is the heart of the -- DualTree functionality combineUps :: (IsToken a) => Up a -> Up a -> Up a combineUps (UDeleted d1) (UDeleted d2) = UDeleted (d1 <> d2) combineUps (UDeleted d1) (Up sp2 a2 l2 d2) = (Up sp2 a2 l (d1 <> d2)) where l = adjustForDeleted d1 l2 combineUps (Up sp1 a1 l1 d1) (UDeleted d2) = (Up sp1 a1 l1 (d1 <> d2)) combineUps u1@(Up sp1 _a1 l1 d1) u2@(Up sp2 _a2 l2 d2) = -- trace ("combineUps:" ++ show (u1,u2)) (Up (sp1 <> sp2) a l (d1 <> d2)) where a = ANone l2' = adjustForDeleted d1 l2 (Line _ _ o2 _ _ _) = NE.head l2' -- 1 0 l2'' = if o1 == o2 then l2' else NE.fromList $ map (\(Line r c f aa ff s) -> (Line (r + (o1-f)) c (o1-f) aa ff s)) (NE.toList l2') (Line r1 c1 o1 ss1 ff1 s1) = NE.last l1 (Line r2 c2 _o2 ss2 ff2 s2) = NE.head l2'' l = if r1 == r2 then NE.fromList $ (NE.init l1) ++ m ++ ll else NE.fromList $ (NE.toList l1) ++ rest -- PROBLEM: assumes c1 is final addition to the left of the line. -- i.e. tree must be created top down, not bottom up s2' = addOffsetToToks (0,c2 - c1) s2 s1' = s1 ++ s2' ff' = if ff1 == OGroup || ff2 == OGroup then OGroup else ONone m' = [Line r1 c1 o1 ss1 ff' s1'] -- 'o' takes account of any length change due to tokens being -- replaced by others of different length odiff = sum $ map (\t -> (tokenLen t) - (tokenColEnd t - tokenCol t)) $ filter (not . isComment) s1 st1 = showTokenStream s1 st2 = showTokenStream (s1 ++ s2') st3 = drop (length st1) st2 st4 = takeWhile (==' ') st3 oo = length (st1++st4) coo = c1 + oo o = coo - c2 (m,ll) = if (ss1 /= ss2) && (length s1 == 1 && (tokenLen $ head s1) == 0) then ([NE.last l1],map (\(Line r c f aa ff s) -> (Line (r+1) (c + o) (f+1) aa ff s)) (NE.toList l2'')) else if ff' == OGroup then (m',addOffsetToGroup o (NE.tail l2'')) else (m', (NE.tail l2'')) -- rest = if ff2 == OGroup rest = if ff2 == OGroup && ff1 == OGroup then addOffsetToGroup odiff (NE.toList l2'') else NE.toList l2'' addOffsetToGroup _off [] = [] addOffsetToGroup _off (ls@((Line _r _c _f _aa ONone _s):_)) = ls addOffsetToGroup off ((Line r c f aa OGroup s):ls) = (Line r (c+off) f aa OGroup s) : addOffsetToGroup off ls {- Should end up with [(Line 1 1 0 SOriginal ONone \"module LayoutIn1 where\"), (Line 3 1 0 SOriginal ONone \"--Layout rule applies after 'where','let','do' and 'of'\"), (Line 5 1 0 SOriginal ONone \"--In this Example: rename 'sq' to 'square'.\"), (Line 7 1 0 SOriginal OGroup \"sumSquares x y= square x + square y where square x= x^pow\"), (Line 8 11 0 SOriginal OGroup \"--There is a comment.\"), (Line 9 43 0 SOriginal OGroup \"pow=2\"), (Line 10 1 0 SOriginal ONone \"\")] But we are getting Up (Span (7,12) (9,40)) ANone ( (7 12 0 SOriginal OGroup "x y= square x + square y where square x= x^pow") :| [ (8 13 0 SOriginal OGroup "--There is a comment."), (9 45 0 SOriginal OGroup "pow=2")]) []) From (Up (Span (7,12) (7,15)) ANone ( (7 12 0 SOriginal ONone "x y") :| []) [], Up (Span (7,15) (9,40)) ANone ( (7 15 0 SOriginal OGroup "= square x + square y where square x= x^pow") :| [ (8 11 0 SOriginal OGroup "--There is a comment."), (9 43 0 SOriginal OGroup "pow=2")]) []) ----------------------------------------------------- ((((36,23),(41,25)),ITblockComment \" ++AZ++ : hsBinds does not re (Up (Span (31, 23) (34, 72)) ANone [(Line 31 23 0 SOriginal ONone \"-- renamed <- getRefactRenamed\"), (Line 32 23 0 SOriginal OGroup \"let renamed = undefined\"), (Line 33 23 0 SOriginal OGroup \"let declsr = hsBinds renamed\"), (Line 34 23 0 SOriginal OGroup \"let (before,parent,after) = divideDecls declsr pn\"), (Line 35 23 0 SOriginal OGroup \"-- error (\"liftToMod:(before,parent,after)=\" ++ (showGhc (before,parent,after))) -- ++AZ++\"), (Line 36 23 0 SOriginal OGroup \"{- ++AZ++ : hsBinds does not return class or instance definitions when (isClassDecl $ ghead \"liftToMod\" parent) $ error \"Sorry, the refactorer cannot lift a definition from a class declaration!\" when (isInstDecl $ ghead \"liftToMod\" parent) $ error \"Sorry, the refactorer cannot lift a definition from an instance declaration!\" -}\")] []) ------------------------ (Up (Span (42, 23) (43, 79)) ANone [(Line 42 23 0 SOriginal OGroup \"let liftedDecls = definingDeclsNames [n] parent True True\"), (Line 43 27 0 SOriginal OGroup \"declaredPns = nub $ concatMap definedPNs liftedDecls\")] []) -} {- (Line r1 = 10 c1 = 3 o1 = 0 ss1 = SOriginal ff1 = ONone s1 = \"g2 <- getCurrentModuleGraph\") (Line r2 = 11 c2 = 3 o2 = 0 ss2 = SOriginal ff2 = OGroup s2 = \"let scc = topSortModuleGraph False g2 Nothing\") --- (Up (Span (9, 3) (11, 47)) ANone [(Line 9 3 0 SOriginal ONone \"-- g <- GHC.getModuleGraph\"), (Line 10 3 0 SOriginal ONone \"g2 <- getCurrentModuleGraph\"), (Line 11 4 0 SOriginal OGroup \"let scc = topSortModuleGraph False g2 Nothing\")] []) ------------------------- Up1 (Up (Span (9, 3) (10, 29)) ANone [(Line 9 3 0 SOriginal ONone \"-- g <- GHC.getModuleGraph\"), (Line 10 3 0 SOriginal ONone \"g2 <- getCurrentModuleGraph\")] []) Up2 (Up (Span (11, 3) (11, 47)) ANone [(Line 11 3 0 SOriginal OGroup \"let scc = topSortModuleGraph False g2 Nothing\")] []) -} {- ((o,st1,st3)=(0,"x y= sq x + sq y where"," sq x= x^pow")) (Line r1 = 7 c1 = 12 o1 = 0 ss1 = SOriginal ff1 = ONone s1 = \"x y= square x + square y where\") (Line r2 = 7 c2 = 35 o2 = 0 ss2 = SOriginal ff2 = OGroup s2 = \"square x= x^pow\") ------------------------ (Up (Span (7, 12) (9, 40)) ANone [(Line 7 12 0 SOriginal OGroup \"x y= square x + square y where square x= x^pow\"), (Line 8 -5 0 SOriginal OGroup \"--There is a comment.\"), (Line 9 27 0 SOriginal OGroup \"pow=2\")] []) ------------- Up1 (Up (Span (7, 12) (7, 34)) ANone [(Line 7 12 0 SOriginal ONone \"x y= square x + square y where\")] []) Up2 (Up (Span (7, 35) (9, 40)) AVertical [(Line 7 35 0 SOriginal OGroup \"square x= x^pow\"), (Line 8 3 0 SOriginal OGroup \"--There is a comment.\"), (Line 9 35 0 SOriginal OGroup \"pow=2\")] []) -} -- ------------------------------------- adjustForDeleted :: (IsToken a) => [DeletedSpan] -> NE.NonEmpty (Line a) -> NE.NonEmpty (Line a) adjustForDeleted d1 l2 = l where deltaL = calcDelta d1 l = NE.map go l2 go (Line r c o SOriginal f str) = Line (r - deltaL) c o SOriginal f str go (Line r c o SWasAdded f str) = Line (r - deltaL) c o SWasAdded f str go (Line r c o SAdded f str) = Line r c o SWasAdded f str -- ------------------------------------- calcDelta :: [DeletedSpan] -> RowOffset calcDelta d1 = deltaL where deltaL = case d1 of [] -> 0 _ -> (-1) + (sum $ map calcDelta' d1) calcDelta' :: DeletedSpan -> RowOffset calcDelta' (DeletedSpan ((rs,_cs),(re,_ce)) pg (rd,_cd)) = r + 1 where ol = re - rs eg = rd r = (pg + ol + eg) - (max pg eg) -- --------------------------------------------------------------------- mkSpan :: ForestSpan -> SimpSpan mkSpan ss = (s,e) where (s,e) = forestSpanToSimpPos ss -- ---------------------------------------------------------------------
alanz/haskell-token-utils
src/Language/Haskell/TokenUtils/DualTree.hs
unlicense
16,929
4
19
4,301
4,120
2,200
1,920
233
9
-- Copyright 2017 gRPC authors. -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. -------------------------------------------------------------------------------- {-# LANGUAGE OverloadedStrings #-} module Network.Grpc.Lib.ChannelArgs ( enableSensus , disableSensus , enableLoadReporting , disableLoadReporting , maxConcurrentStreams , maxReceiveMessageLength , maxSendMessageLength , http2InitialSequenceNumber , http2StreamLookaheadBytes , http2HpackTableSizeDecoder , http2HpackTableSizeEncoder , http2MaxFrameSize , defaultAuthority , primaryUserAgentString , secondaryUserAgentString , maxReconnectBackoffMs , initialReconnectBackoffMs , sslTargetNameOverrideArg , maxMetadataSize , allowReusePort , disableReusePort ) where import qualified Data.ByteString as B import qualified Data.HashMap.Strict as Map import Network.Grpc.Lib.Core import Network.Grpc.Lib.ChannelArgsStrings -- | Enable census for tracing and stats collection. enableSensus :: ChannelArgs enableSensus = argB grpcArg_EnableSensus True -- | Disable census for tracing and stats collection. disableSensus :: ChannelArgs disableSensus = argB grpcArg_EnableSensus False -- | Enable load reporting. enableLoadReporting :: ChannelArgs enableLoadReporting = argB grpcArg_EnableLoadReporting True -- | Disable load reporting. disableLoadReporting :: ChannelArgs disableLoadReporting = argB grpcArg_EnableLoadReporting False -- | Maximum number of concurrent incoming streams to allow on a http2 -- connection. maxConcurrentStreams :: ArgInt -> ChannelArgs maxConcurrentStreams = argI grpcArg_MaxConcurrentStreams -- | Maximum message length in bytes that the channel can receive. -1 means -- unlimited. maxReceiveMessageLength :: ArgInt -> ChannelArgs maxReceiveMessageLength = argI grpcArg_MaxReceiveMessageLength -- | Maximum message length in bytes that the channel can send. -1 means -- unlimited. maxSendMessageLength :: ArgInt -> ChannelArgs maxSendMessageLength = argI grpcArg_MaxSendMessageLength -- | Initial sequence number for http2 transports. http2InitialSequenceNumber :: ArgInt -> ChannelArgs http2InitialSequenceNumber = argI grpcArg_Http2InitialSequenceNumber -- | Amount of bytes to read ahead on individual streams. Defaults to 64kb, -- larger values can help throughput on high-latency connections. NOTE: at -- some point we'd like to auto-tune this, and this parameter will become a -- no-op. http2StreamLookaheadBytes :: ArgInt -> ChannelArgs http2StreamLookaheadBytes = argI grpcArg_Http2StreamLookaheadBytes -- | How much memory (in bytes) to use for hpack decoding. http2HpackTableSizeDecoder :: ArgInt -> ChannelArgs http2HpackTableSizeDecoder = argI grpcArg_Http2HpackTableSizeDecoder -- | How much memory (in bytes) to use for hpack encoding. http2HpackTableSizeEncoder :: ArgInt -> ChannelArgs http2HpackTableSizeEncoder = argI grpcArg_Http2HpackTableSizeEncoder -- | How big a frame are we willing to receive via HTTP2. Min 16384, max -- 16777215. Larger values give lower CPU usage for large messages, but more -- head of line blocking for small messages. http2MaxFrameSize :: ArgInt -> ChannelArgs http2MaxFrameSize = argI grpcArg_Http2MaxFrameSize -- | Default authority to pass if none specified on call construction. defaultAuthority :: B.ByteString -> ChannelArgs defaultAuthority = argS grpcArg_DefaultAuthority -- | Primary user agent: goes at the start of the user-agent metadata sent on -- each request. primaryUserAgentString :: B.ByteString -> ChannelArgs primaryUserAgentString = argS grpcArg_PrimaryUserAgentString -- | Primary user agent: goes at the end of the user-agent metadata sent on -- each request. secondaryUserAgentString :: B.ByteString -> ChannelArgs secondaryUserAgentString = argS grpcArg_SecondaryUserAgentString -- | The maximum time between subsequent connection attempts, in ms. maxReconnectBackoffMs :: ArgInt -> ChannelArgs maxReconnectBackoffMs = argI grpcArg_MaxReconnectBackoffMs -- | The time between the first and second connection attempts, in ms. initialReconnectBackoffMs :: ArgInt -> ChannelArgs initialReconnectBackoffMs = argI grpcArg_InitialReconnectBackoffMs -- | The caller of the 'secure_channel_create' functions may override the -- target name used for SSL host name checking using this channel argument. -- This should be used for testing only. If this argument is not -- specified, the name used for SSL host name checking will be the target -- parameter (assuming that the secure channel is an SSL channel). If this -- parameter is specified and the underlying is not an SSL channel, it will -- just be ignored. sslTargetNameOverrideArg :: B.ByteString -> ChannelArgs sslTargetNameOverrideArg = argS grpcArg_SslTargetNameOverrideArg -- | Maximum metadata size, in bytes. maxMetadataSize :: ArgInt -> ChannelArgs maxMetadataSize = argI grpcArg_MaxMetadataSize -- | Allow the use of SO_REUSEPORT if it's available (default allow). allowReusePort :: ChannelArgs allowReusePort = argB grpcArg_AllowReuseport True -- | Disable the use of SO_REUSEPORT (default allow if available). disableReusePort :: ChannelArgs disableReusePort = argB grpcArg_AllowReuseport False arg :: B.ByteString -> ArgValue -> ChannelArgs arg s v = ChannelArgs (Map.singleton s v) argB :: B.ByteString -> Bool -> ChannelArgs argB s = arg s . ArgI . (\x -> if x then 1 else 0) argI :: B.ByteString -> ArgInt -> ChannelArgs argI s = arg s . ArgI argS :: B.ByteString -> B.ByteString -> ChannelArgs argS s = arg s . ArgS
grpc/grpc-haskell
src/Network/Grpc/Lib/ChannelArgs.hs
apache-2.0
6,078
0
8
913
659
388
271
77
2
-- This approach is very straightforward. We just use two separate -- databases. We have one database for production, called -- "production.sqlite", and one database for testing, called -- "testing.sqlite". The database name is specified in the 'main' -- function. {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE InstanceSigs #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} -- This is an unfortunate hack. Used to make the code slightly easier to -- follow. See below for how we could fix it. {-# LANGUAGE UndecidableInstances #-} -- This is another unfortunate hack to make the code simpler and easier to -- understand. Described at the end of this file. {-# OPTIONS_GHC -fno-warn-orphans #-} module Lib (module Lib, EntityField(..)) where import Control.Exception (Exception) import Control.Monad.Catch (catch, throwM) import Control.Monad.Error.Class (throwError) import Control.Monad.IO.Class (liftIO) import Control.Monad.Logger (runStderrLoggingT) import Control.Monad.Trans.Either (EitherT) import Data.Proxy (Proxy(..)) import Database.Persist ( Key, EntityField, ToBackendKey, delete, get, insert, replace ) import Database.Persist.Sqlite ( SqlBackend, SqlPersistT, runMigration, runSqlConn, toSqlKey , withSqliteConn ) import Database.Persist.TH ( mkMigrate, mkPersist, persistLowerCase, share, sqlSettings ) import Data.Text (Text) import Network.Wai.Handler.Warp (run) import Servant ( (:<|>)(..), (:>), Capture, Delete, FromText(..), Get, JSON, Post, Put , ReqBody, ServantErr(..), Server, err404, serve ) ---------------------------------- ---------------------------------- -- Persistent model definitions -- ---------------------------------- ---------------------------------- -- This uses Persistent (a database library) to define a BlogPost data -- type as well as it's corresponding database table. -- -- If you've never used Persistent, you can think of it as defining the -- following data types and sql statement. This is a vast simplification of -- what it is actually doing, but it's good for a start: -- -- data BlogPost = BlogPost { blogPostTitle :: Text -- , blogPostContent :: Text -- } -- -- type Key BlogPost = Int -- -- CREATE TABLE "blog_post" ("id" PRIMARY KEY,"title" VARCHAR,"content" VARCHAR) -- -- It also defines some helper functions to query the "blog_post" table. share [ mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase| BlogPost json title Text content Text deriving Show |] ----------------- ----------------- -- servant api -- ----------------- ----------------- -- | This defines a type which represents the API. A description of the -- API is given in the README.md. If you read the README.md, this should -- be very understandable. type BlogPostApi = "create" :> ReqBody '[JSON] BlogPost :> Post '[JSON] (Key BlogPost) :<|> "read" :> Capture "id" (Key BlogPost) :> Get '[JSON] BlogPost :<|> "update" :> Capture "id" (Key BlogPost) :> ReqBody '[JSON] BlogPost :> Put '[JSON] () :<|> "delete" :> Capture "id" (Key BlogPost) :> Delete '[JSON] () -- | This defines handlers for our API. This 'server' function is -- Servant-specfic and not too interesting. If you want to learn more -- about it, see the Servant tutorial. -- -- However, there is one interesting things here. The first is the -- 'createBlogPost', 'readBlogPost', 'updateBlogPost', and 'deleteBlogPost' -- functions. See their documentation for an explanation of what they are -- doing. -- In production, the 'SqlBackend' argument will contain connection -- information to access the production database, while in testing, the -- 'SqlBackend' argument will contain connection information to access -- a testing database. server :: SqlBackend -> Server BlogPostApi server conn = createBlogPost :<|> readBlogPost :<|> updateBlogPost :<|> deleteBlogPost where -- This is the handler for the API call that creates a blog post. -- -- Looking at the type, you can see that we get a 'BlogPost' object as -- input, and we need to return a 'Key' 'BlogPost' (which you can think -- of as an integer that corresponds to a database id). -- -- -- We use the 'runDb' function defined below. createBlogPost :: BlogPost -> EitherT ServantErr IO (Key BlogPost) createBlogPost blogPost = runDb $ insert blogPost -- This is the handler for the API call that fetches a blog post from -- the database. Return a 404 if the blog post can't be found. readBlogPost :: Key BlogPost -> EitherT ServantErr IO BlogPost readBlogPost key = runDb $ do maybeVal <- get key case maybeVal of Just blogPost -> return blogPost Nothing -> throwM err404 -- Similar to 'createBlogPost'. updateBlogPost :: Key BlogPost -> BlogPost -> EitherT ServantErr IO () updateBlogPost key val = runDb $ replace key val -- Similar to 'createBlogPost'. deleteBlogPost :: Key BlogPost -> EitherT ServantErr IO () deleteBlogPost key = runDb $ delete key -- This is a small helper function for running a Persistent database -- action. This is used in the four handlers above. runDb :: SqlPersistT IO a -> EitherT ServantErr IO a runDb query = liftIO (runSqlConn query conn) `catch` \(err::ServantErr) -> throwError err -- | This is another artifact of Servant. See the Servant tutorial or this -- article I wrote about Servant for an overview of what this is: -- <http://functor.tokyo/blog/2015-08-13-servant-type-families> blogPostApiProxy :: Proxy BlogPostApi blogPostApiProxy = Proxy ---------- ---------- -- main -- ---------- ---------- -- This is the main function. It basically does three things. -- -- 1. Open up a connection to the sqlite database "production.sqlite". In -- production this would probably be something like Postgres, MongoDB, -- AWS's DynamoDB, etc. -- 2. Perform migration. This creates the "blog_post" table in the -- database if it doesn't exist. -- 3. Run our 'server' function, which effectively runs the api. defaultMain :: IO () defaultMain = runStderrLoggingT $ withSqliteConn "production.sqlite" $ \conn -> do liftIO $ runSqlConn (runMigration migrateAll) conn liftIO $ putStrLn "\napi running on port 8080..." liftIO . run 8080 . serve blogPostApiProxy $ server conn ----------------- ----------------- -- other stuff -- ----------------- ----------------- --- | XXX: Hack. -- -- Read the comment at the bottom of Lib.hs in the free-monad -- implementation to find out more about this. instance Exception ServantErr -- | XXX: Hack. -- -- Read the comment at the bottom of Lib.hs in the free-monad -- implementation to find out more about this. instance (ToBackendKey SqlBackend a) => FromText (Key a) where fromText :: Text -> Maybe (Key a) fromText text = toSqlKey <$> fromText text
cdepillabout/testing-code-that-accesses-db-in-haskell
with-db/testing-db/src/Lib.hs
apache-2.0
7,822
0
19
1,802
999
590
409
84
2
module ListUtils where import Data.Monoid dropLast :: Int -> [a] -> [a] dropLast n = reverse. drop n. reverse takeLast :: Int -> [a] -> [a] takeLast n = reverse. take n. reverse -- vector minus vecminus :: Num a => [a] -> [a] -> [a] vecminus = zipWith (-) -- element-wise min vecmin x = map (min x) fconcat :: [(a -> a)] -> a -> a fconcat = appEndo . mconcat . fmap Endo
epeld/zatacka
old/ListUtils.hs
apache-2.0
376
0
8
81
182
99
83
11
1
module Codewars.Kata.SIGT where import Prelude hiding (read, reads, readsPrec, Integer, fromIntegral, fromInteger, toInteger) import Data.Char stringIntGreaterThan :: String -> String -> Bool stringIntGreaterThan a b = if a == b then False else case ((head a == '-'), (head b == '-')) of (False, False) -> gt a b (False, True) -> True (True, False) -> False (True, True) -> not $ gt (tail a) (tail b) where gt a b = case compare (length a) (length b) of GT -> True LT -> False EQ -> gt2 a b gt2 "" "" = False gt2 a b = case compare (head a) (head b) of GT -> True LT -> False EQ -> gt2 (tail a) (tail b) {- stringIntGreaterThan a b = (strToInt a) > (strToInt b) where strToInt s = if head s == '-' then -(strToInt2 (tail s) 0) else -(strToInt2 s 0) strToInt2 [] acc = acc strToInt2 (x:xs) acc = strToInt2 xs (acc * 10 + (ord x) - (ord '0')) -}
lisphacker/codewars
SIGT.hs
bsd-2-clause
1,425
0
12
749
311
166
145
20
10
-- -- Copyright (c) 2013, Carl Joachim Svenn -- All rights reserved. -- -- Redistribution and use in source and binary forms, with or without -- modification, are permitted provided that the following conditions are met: -- -- 1. Redistributions of source code must retain the above copyright notice, this -- list of conditions and the following disclaimer. -- 2. Redistributions in binary form must reproduce the above copyright notice, -- this list of conditions and the following disclaimer in the documentation -- and/or other materials provided with the distribution. -- -- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -- (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -- LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -- SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -- module MEnv.System.GLFW ( systemHandleFrontBegin, systemHandleFrontEnd, systemHandleBackBegin, systemHandleBackEnd, ) where import MEnv systemHandleFrontBegin :: a -> a -> MEnv res a systemHandleFrontBegin a a' = return a systemHandleFrontEnd :: a -> a -> MEnv res a systemHandleFrontEnd a a' = return a systemHandleBackBegin :: a -> a -> MEnv res a systemHandleBackBegin a a' = return a systemHandleBackEnd :: a -> a -> MEnv res a systemHandleBackEnd a a' = return a {- data Friend data FriendData = meta :: XXX, content :: BS.ByteString friendsEatData :: ([FriendData] -> [FriendData]) -> MEnv res () -}
karamellpelle/MEnv
source/MEnv/System/GLFW.hs
bsd-2-clause
2,128
0
7
468
174
102
72
19
1
{-# LANGUAGE OverloadedStrings #-} import System.IO import Text.XML.Pipe import Network import Network.PeyoTLS.ReadFile import Network.XmlPush.HttpPush.Tls import qualified Data.ByteString.Char8 as BSC import TestPusher main :: IO () main = do ca <- readCertificateStore ["certs/cacert.sample_pem"] k' <- readKey "certs/yoshikuni.sample_key" c' <- readCertificateChain ["certs/yoshikuni.sample_crt"] ch <- connectTo "localhost" $ PortNumber 443 testPusher (undefined :: HttpPushTls Handle) (Two (Just ch) Nothing) (HttpPushTlsArgs (HttpPushArgs (const Nothing) getServerHandle (Just ("localhost", 443, "")) gtPth wntRspns) (tlsArgsCl "localhost" True (const Nothing) ["TLS_RSA_WITH_AES_128_CBC_SHA"] ca [(k', c')]) (tlsArgsSv gtNm (const Nothing) ["TLS_RSA_WITH_AES_128_CBC_SHA"] (Just ca) [(k', c')]) ) getServerHandle :: Maybe (IO Handle) getServerHandle = Just $ do soc <- listenOn $ PortNumber 8080 (h, _, _) <- accept soc return h wntRspns :: XmlNode -> Bool wntRspns (XmlNode (_, "monologue") _ [] []) = False wntRspns _ = True gtPth :: XmlNode -> FilePath gtPth (XmlNode (_, "father") _ [] []) = "family" gtPth _ = "others" gtNm :: XmlNode -> Maybe String gtNm (XmlNode (_, "name") _ _ [XmlCharData n]) = Just $ BSC.unpack n gtNm _ = Nothing
YoshikuniJujo/xml-push
examples/httpPushTlsE.hs
bsd-3-clause
1,286
16
14
206
492
258
234
37
1
{-# LANGUAGE BangPatterns #-} module Lookup(lookupTiles,withTemplate,withTemplates,Score,Val,Count,Hist(..),HandTiles(..)) where import qualified Data.IntMap as M import Data.Array.IArray((!)) --import Data.List(group,sort) import Data.Monoid(Monoid(..)) --import Data.Ix(inRange) import Data.Char (toLower) import Data.List(group) import Dict(Dict(Dict)) import Tiles(valArray) import Template -- imports for testing import Dict(buildInts,Dictionary(..)) import Test.HUnit import Test.QuickCheck import Test.QuickCheck.Batch import Data.List(nub,sort,nubBy,unfoldr) import Data.Maybe(listToMaybe) import Control.Monad(replicateM,foldM,liftM2) -- import Common import Debug.Trace type IsWild = Bool type Val = Int type Count = Int type Score = Int newtype Hist = Hist {unHist :: [(Val,Count)]} deriving Show data HandTiles = HandTiles Count Hist Count -- # wilds, Hist of A..Z, # of tiles including wilds data SM = SM !Int !Int -- First is running total of tiles, the second is multipler hist :: [Val] -> Hist hist = Hist . map hist' . group . sort where hist' xs = (head xs, length xs) fromHist :: Hist -> [Val] fromHist = concatMap (uncurry (flip replicate)) . unHist pullTile :: Hist -> [(Val,Hist)] pullTile = helper id . unHist where helper _ [] = [] helper f (pair@(x,1):xs) = (x,Hist $ f xs) : helper (f . (pair:)) xs helper f (pair@(x,n):xs) = (x,Hist $ f ((x,pred n):xs)) : helper (f . (pair:)) xs addBonus :: (Score,String) -> (Score,String) addBonus (score,str) = (score+50,str) lookupTiles :: Dict -> HandTiles -> [(Score,String)] lookupTiles !t (HandTiles wilds s n) = let scoreBonus | n/=7 = id | otherwise = map scoreBonus' scoreBonus' x@(_,str) | length str == 7 = addBonus x | otherwise = x in scoreBonus $ lookupAll t 0 wilds s withTemplate :: Dict -> HandTiles -> String -> [(Score,String)] withTemplate t (HandTiles wilds s n) sTemplate = let template = parseTemplate sTemplate m = length (filter isBlank template) --msg = show(wilds,s,template,(m,n,m<=n)) scoreBonus | m == 7 = map addBonus | otherwise = id in if m <= n then scoreBonus . mapFst total . lookupTemplate t mempty wilds s $ template else [] withTemplates :: Dict -> HandTiles -> String -> [(Score,String)] withTemplates t (HandTiles wilds s n) sTemplate = let templates = allTemplates (parseTemplate sTemplate) templateLens = map (\template -> (template,length (filter isBlank template))) templates validTemplates = filter (\(_,m)-> 1<=m && m<=n) templateLens oneTemplateLen (template,m) | m==7 = map addBonus . oneTemplate $ template | otherwise = oneTemplate template oneTemplate template = mapFst total . lookupTemplate t mempty wilds s $ template in concatMap oneTemplateLen $ validTemplates lookupAll :: Dict -> Score -> Count -> Hist -> [(Score,String)] lookupAll !(Dict here m) !score !wilds !s = if here then (score,[]) : rest else rest where rest :: [(Score,String)] rest = concatMap choose (pullTile s) ++ wild choose :: (Val,Hist) -> [(Score,String)] choose (y,s') = case M.lookup y m of Nothing -> [] Just t -> let c = toEnum y score' = score + valArray!y in mapSnd (c:) (lookupAll t score' wilds s') wild :: [(Score,String)] wild = if wilds>0 then let wilds' = pred wilds f (y,t) = let c = toLower (toEnum y) in mapSnd (c:) (lookupAll t score wilds' s) in concatMap f (M.assocs m) else [] mapFst :: (a1 -> a2) -> [(a1,b)] -> [(a2,b)] mapFst f = go where go [] = [] go ((a,b):rest) = (f a,b) : go rest mapSnd :: (b1 -> b2) -> [(a,b1)] -> [(a,b2)] mapSnd f = go where go [] = [] go ((a,b):rest) = (a,f b) : go rest scoreOf :: Square -> IsWild -> Val -> SM scoreOf square isWild val = case square of Normal -> SM base 1 DoubleLetter -> SM (2*base) 1 TripleLetter -> SM (3*base) 1 DoubleWord -> SM base 2 TripleWord -> SM base 3 where base | isWild = 0 | otherwise = valArray!val instance Monoid SM where mempty = SM 0 1 mappend (SM score1 mult1) (SM score2 mult2) = SM (score1+score2) (mult1 * mult2) addTile :: SM -> Square -> IsWild -> Val -> SM addTile sm s i v = mappend sm (scoreOf s i v) total :: SM -> Score total (SM t m) = m*t lookupTemplate :: Dict -> SM -> Count -> Hist -> Template -> [(SM,String)] lookupTemplate !(Dict here _m) !score !_wilds !_s [] = if here then [(score,[])] else [] lookupTemplate !(Dict _here m) !score !wilds !s (Filled isWild val : template') = case M.lookup val m of Nothing -> [] Just t -> let c = (if isWild then toLower else id) (toEnum val) score' = addTile score Normal isWild val in mapSnd (c:) (lookupTemplate t score' wilds s template') lookupTemplate !(Dict _here m) !score !wilds !s (Blank square:template') = concatMap choose (pullTile s) ++ useWild where choose :: (Val,Hist) -> [(SM,String)] choose (val,s') = case M.lookup val m of Nothing -> [] Just t -> let c = toEnum val score' = addTile score square False val in mapSnd (c:) (lookupTemplate t score' wilds s' template') useWild ::[(SM,String)] useWild | 0 == wilds = [] | otherwise = let wilds' = pred wilds useAs (val,t) = let c = toLower (toEnum val) score' = addTile score square True val in mapSnd (c:) (lookupTemplate t score' wilds' s template') in concatMap useAs (M.assocs m) -- testing instance Arbitrary Hist where arbitrary = do n <- choose (1,7) vals <- replicateM n (choose (fromEnum 'A',fromEnum 'E')) return (hist vals) test_hist_fromHist :: [Val] -> Bool test_hist_fromHist x = sort x == (fromHist (hist x)) test_pullTile :: Hist -> Bool test_pullTile h@(Hist x) = let each = map fst x == nub (sort (map fst (pullTile h))) y0 = (fromHist h) y1 = (sort $ unfoldr (listToMaybe . pullTile) h) y2 = (sort $ unfoldr (listToMaybe . reverse . pullTile) h) in each && (y0==y1) && (y0==y2) instance Arbitrary HandTiles where arbitrary = do w <- choose (0,2) n <- choose (0,7-w) vals <- replicateM n (choose (1,26)) return (HandTiles w (hist vals) (w+n)) test_lookup :: Dictionary -> Bool test_lookup (Dictionary ds) = let dict = buildInts ds isOk :: [Int] -> Bool isOk h = (score==score')&&(sort str==str') where score' = (sum $ map (valArray!) h) + (if length h == 7 then 50 else 0) str' = sort $ map toEnum h (score,str) = last . sort $ lookupTiles dict $ HandTiles 0 (hist h) (length h) in all isOk ds
ChrisKuklewicz/XWords
src/Lookup.hs
bsd-3-clause
7,088
0
18
2,005
2,984
1,575
1,409
163
5
{-# LANGUAGE FlexibleContexts #-} module Rash.IR.Bash2Rough ( translate , convertList ) where import qualified Data.Data import Data.Generics.Uniplate.Data (rewriteBi) import qualified Data.Maybe as Maybe import qualified Data.Typeable as Typeable import qualified Language.Bash.Cond as C import qualified Language.Bash.Parse as BashParse import qualified Language.Bash.Parse.Word import qualified Language.Bash.Pretty as BashPretty import qualified Language.Bash.Syntax as S import qualified Language.Bash.Word as W import Text.Parsec (parse) import Text.Parsec.Error (ParseError) import Rash.IR.Rough import qualified Rash.Debug as Debug die :: (Show a, BashPretty.Pretty a) => String -> a -> b die msg a = Debug.die "B2R" msg a dieT :: (Show a, Typeable.Typeable a, BashPretty.Pretty a) => String -> a -> b dieT msg x = die (msg ++ " [" ++ (show $ Typeable.typeOf x) ++ "]") x fc :: String -> [Expr] -> Expr fc = FunctionCall -- | Lists convertList :: S.List -> Expr -- TODO: ignoring pipeline args convertList (S.List stmts) = listOrExpr [ convertAndOr x | (S.Statement x _) <- stmts ] listOrExpr :: [Expr] -> Expr listOrExpr [] = Nop listOrExpr [e] = e listOrExpr es = List es -- | Pipelines convertAndOr :: S.AndOr -> Expr convertAndOr (S.Last p) = convertPipeline p convertAndOr (S.And p ao) = Binop (convertPipeline p) And (convertAndOr ao) convertAndOr (S.Or p ao) = Binop (convertPipeline p) And (convertAndOr ao) listOrPipe :: [Expr] -> Expr listOrPipe [e] = e listOrPipe es = Pipe es addToPipe :: Expr -> Expr -> Expr addToPipe (Pipe ps) new = Pipe (ps ++ [new]) addToPipe expr1 expr2 = Pipe [expr1, expr2] convertPipeline :: S.Pipeline -> Expr -- ignored timing and inverted. I think this is right. convertPipeline (S.Pipeline _ _ _ cs) = listOrPipe $ map convertCommand cs convertCommand :: S.Command -> Expr convertCommand (S.Command sc rs) = foldl convertRedir (convertShellCommand sc) rs convertRedir :: Expr -> S.Redir -> Expr convertRedir expr (S.Heredoc S.Here _ False doc) = (Stdin (convertWord doc) expr) convertRedir expr (S.Redir {S.redirDesc=Nothing , S.redirOp=S.Append , S.redirTarget=file}) = addToPipe expr (fc "stdout.appendTo" [convertWord file]) convertRedir expr (S.Redir {S.redirDesc=Just(S.IONumber 2) , S.redirOp=S.OutAnd , S.redirTarget=[W.Char '1']}) = addToPipe expr (fc "stderr.intoStdout" []) convertRedir expr (S.Redir {S.redirDesc=Nothing , S.redirOp=S.OutAnd , S.redirTarget=[W.Char '2']}) = addToPipe expr (fc "stderr.replaceStdout" []) convertRedir expr (S.Redir {S.redirDesc=Nothing , S.redirOp=S.Out , S.redirTarget=file}) = addToPipe expr (fc "stdout.writeTo" [convertWord file]) convertRedir expr (S.Redir {S.redirDesc=Just(S.IONumber 2) , S.redirOp=S.Out , S.redirTarget=file}) = addToPipe expr (fc "stderr.writeTo" [convertWord file]) convertRedir _ r = dieT "cr" r -- | Commands convertShellCommand :: S.ShellCommand -> Expr convertShellCommand (S.If cond l1 Nothing) = If (convertList cond) (convertList l1) (listOrExpr [Nop]) -- TODO: is Maybe nicer here? convertShellCommand (S.If cond l1 (Just l2)) = If (convertList cond) (convertList l1) (convertList l2) convertShellCommand (S.SimpleCommand as ws) = convertSimpleCommand as ws convertShellCommand (S.AssignBuiltin w es) | w == W.fromString "local" = listOrExpr (map convertAssignOrWord es) | otherwise = dieT "ccscab" w convertShellCommand (S.Cond e) = convertCondExpr e convertShellCommand (S.FunctionDef name cmds) = postProcessFunctionDefs (FunctionDefinition (FuncDef name [] (convertList cmds))) convertShellCommand (S.For v wl cmds) = For (LVar v) (convertWordList wl) (convertList cmds) convertShellCommand (S.While expr cmds) = For AnonVar (convertList expr) (convertList cmds) convertShellCommand (S.Group list) = convertList list convertShellCommand x = dieT "cc" x -- | SimpleCommands (assignments) convertSimpleCommand :: [S.Assign] -> [W.Word] -> Expr convertSimpleCommand as [] = listOrExpr (map convertAssign as) convertSimpleCommand as ws = listOrExpr (map convertAssign as ++ [convertWords ws]) -- TODO: parameter doesn't take subscript -- TODO: assignment doesn't handle += convertAssign :: S.Assign -> Expr convertAssign (S.Assign l S.Equals r) = Assignment (convertAssignLeft l) (convertAssignRight r) convertAssign (S.Assign _ op _) = dieT "another op" op convertAssignLeft :: W.Parameter -> LValue convertAssignLeft (W.Parameter name Nothing) = LVar name convertAssignLeft (W.Parameter name (Just sub)) = LSubscript (Variable name) (convertWord sub) convertAssignRight :: S.RValue -> Expr convertAssignRight (S.RValue r) = convertWord r convertAssignRight (S.RArray r) = convertRArray r convertAssignOrWord :: Either S.Assign W.Word -> Expr convertAssignOrWord = either convertAssign convertWord convertRArray :: [(Maybe W.Word, W.Word)] -> Expr convertRArray v = if isHash v || isArray v then if isHash v then convertHash v else convertArray v else dieT "mixed hash/array" $ S.RArray v isHash :: [(Maybe W.Word, W.Word)] -> Bool isHash v = and $ map (Maybe.isJust . fst) v isArray :: [(Maybe W.Word, W.Word)] -> Bool isArray v = and $ map (Maybe.isNothing . fst) v convertHash :: [(Maybe W.Word, W.Word)] -> Expr convertHash v = dieT "hash" $ S.RArray v convertArray :: [(Maybe W.Word, W.Word)] -> Expr convertArray v = Array $ map (convertWord . snd) v -- | WordLists and Words and Spans convertWordList :: S.WordList -> Expr convertWordList S.Args = Debug "$@" -- TODO convertWordList (S.WordList wl) = listOrExpr (map convertWord wl) convertWords :: [W.Word] -> Expr convertWords ([W.Char '['] : ws) | (convertString . last $ ws) == "]" = convertTest . init $ ws | otherwise = dieT "cw" ws convertWords (w:ws) = convertFunctionCall (convertWord w) (map convertWord ws) convertWords ws@[] = dieT "cwEmpty" ws convertWord :: W.Word -> Expr convertWord ss = cConcat [convertSpan s | s <- ss] convertSpan :: W.Span -> Expr convertSpan (W.Char c) = Str [c] convertSpan (W.Double w) = cConcat [convertWord w] convertSpan (W.Single w) = cConcat [convertWord w] convertSpan (W.Escape c) = Str [c] convertSpan (W.CommandSubst c) = parseString c convertSpan (W.ProcessSubst W.ProcessIn w) = addToPipe (Exec w) (fc "sys.procSubst" []) convertSpan (W.ParamSubst W.Brace {W.indirect = False, W.parameter = (W.Parameter p Nothing)}) = Variable p convertSpan (W.ParamSubst W.Length {W.parameter = (W.Parameter p Nothing)}) = fc "string.length" [Variable p] convertSpan (W.ParamSubst W.Bare {W.parameter = (W.Parameter p Nothing)}) = Variable p convertSpan (W.ParamSubst W.Delete {W.indirect = False , W.parameter = (W.Parameter p Nothing) , W.longest = longest , W.deleteDirection = direction , W.pattern = pattern }) = fc ("string." ++ name) args where name = if direction == W.Front then "replaceFront" else "replaceBack" args = [Variable p, convertWord pattern] ++ longestArgs longestArgs = if longest then [] else [Str "--nongreedy"] -- TODO: indirect? convertSpan (W.Backquote w) = parseWord w convertSpan w = dieT "cs" w -- like convertWord but we expect a string convertString :: W.Word -> String convertString w = case convertWord w of Str s -> s _ -> die "not a string" w -- | Functions convertFunctionCall :: Expr -> [Expr] -> Expr convertFunctionCall (Str name) args = fc name args convertFunctionCall fn args = (IndirectFunctionCall fn args) -- | CondExprs convertCondExpr :: C.CondExpr W.Word -> Expr convertCondExpr (C.Not e) = Unop Not (convertCondExpr e) convertCondExpr (C.And a b) = Binop (convertCondExpr a) And (convertCondExpr b) convertCondExpr (C.Or a b) = Binop (convertCondExpr a) Or (convertCondExpr b) convertCondExpr (C.Unary uop w) = Pipe [convertWord w, fc (uop2FunctionName uop) []] convertCondExpr (C.Binary l C.StrEQ r) = Binop (convertWord l) Equals (convertWord r) convertCondExpr (C.Binary l C.ArithEQ r) = Binop (convertWord l) Equals (convertWord r) convertCondExpr (C.Binary l C.StrNE r) = Unop Not (Binop (convertWord l) Equals (convertWord r)) convertCondExpr (C.Binary l C.ArithNE r) = Unop Not (Binop (convertWord l) Equals (convertWord r)) convertCondExpr (C.Binary l C.StrLT r) = Binop (convertWord l) LessThan (convertWord r) convertCondExpr (C.Binary l C.ArithLT r) = Binop (convertWord l) LessThan (convertWord r) convertCondExpr (C.Binary l C.StrGT r) = Binop (convertWord l) GreaterThan (convertWord r) convertCondExpr (C.Binary l C.ArithLE r) = Binop (convertWord l) LessThanOrEquals (convertWord r) convertCondExpr (C.Binary l C.ArithGT r) = Binop (convertWord l) GreaterThan (convertWord r) convertCondExpr (C.Binary l C.ArithGE r) = Binop (convertWord l) GreaterThanOrEquals (convertWord r) convertCondExpr (C.Binary l bop r) = fc (bop2FunctionName bop) [convertWord l, convertWord r] -- | Function names for BinaryOps bop2FunctionName :: C.BinaryOp -> String bop2FunctionName C.SameFile = "file.same?" bop2FunctionName C.NewerThan = "file.newer_than?" bop2FunctionName C.OlderThan = "file.older_than?" bop2FunctionName C.StrMatch = "re.matches" bop2FunctionName x = dieT "binop" x -- | Function names for UnaryOps uop2FunctionName :: C.UnaryOp -> String uop2FunctionName C.BlockFile = "file.isBlockFile" uop2FunctionName C.CharacterFile = "file.isCharacterFile" uop2FunctionName C.Directory = "file.is_directory?" uop2FunctionName C.FileExists = "file.exists?" uop2FunctionName C.RegularFile = "file.is_regular_file?" uop2FunctionName C.SetGID = "file.isSetGID" uop2FunctionName C.Sticky = "file.isSticky" uop2FunctionName C.NamedPipe = "file.isNamedPipe" uop2FunctionName C.Readable = "file.isReadable" uop2FunctionName C.FileSize = "file.isFileSize" uop2FunctionName C.Terminal = "file.isTerminal" uop2FunctionName C.SetUID = "file.isSetUID" uop2FunctionName C.Writable = "file.isWritable" uop2FunctionName C.Executable = "file.isExecutable" uop2FunctionName C.GroupOwned = "file.isGroupOwned" uop2FunctionName C.SymbolicLink = "file.isSymbolicLink" uop2FunctionName C.Modified = "file.isModified" uop2FunctionName C.UserOwned = "file.isUserOwned" uop2FunctionName C.Socket = "file.isSocket" uop2FunctionName C.ZeroString = "string.blank?" uop2FunctionName C.NonzeroString = "string.nonblank?" uop2FunctionName a = die "uop2FunctionName" a -- TODO: these ones are a bit odd -- uop2FunctionName C.Optname = -- uop2FunctionName C.Varname = -- | Tests (handles test, '[' and '[[') convertTest :: [W.Word] -> Expr -- convertTest receives a list of words. Semantically, Bash would evaluate many -- of those words (expanding arguments and parameters, etc), because passing it -- to `test`. So semantically, we can't parse this correctly. -- For example, what does `[ "$x" a ]` do? -- parseTestExpr is really designed for run-time use, and doesn't produce parsed -- words. For example [ "x" = "`uname`" ] won't result in a CommandSubst with -- "uname" in it, because Bash doesn't actually do that, semantically. But -- that's what we want! -- Another apporach is to wrap the string in [[. Unfortunately, [[ doesn't actually work the same as [, for example -a doesn't work the same. -- I think the correct approach is to parse it, then reparse the words again. convertTest ws = case condExpr of Left err -> Debug $ "doesn't parse" ++ show err ++ show hacked Right e -> convertCondExpr . fmap parseString2Word $ e where condExpr = C.parseTestExpr strs strs = map W.unquote hacked hacked = hackTestExpr ws -- | break the bash rules to fix up mistakes hackTestExpr :: [W.Word] -> [W.Word] -- [ -a asd ] works, but [ ! -a asd] doesnt because -a is the "and" operator. -e -- does the same though. hackTestExpr ws@(n:a:rest) | n == W.fromString "!" && a == W.fromString "-a" = n : W.fromString "-e" : rest | otherwise = ws hackTestExpr ws = ws -- | Turn lists of Strings or string components into a Str foldStrs :: [Expr] -> [Expr] foldStrs (Str a : Str b : ss) = foldStrs (Str (a ++ b) : ss) foldStrs (s : ss) = s : foldStrs ss foldStrs ss = ss cConcat :: [Expr] -> Expr cConcat es = cConcat0 (foldStrs es) cConcat0 :: [Expr] -> Expr cConcat0 [] = Str "" cConcat0 [e] = e cConcat0 es = Concat es --- Transformations transformFixed :: (Data.Data.Data a, Eq a) => (Expr -> Expr) -> a -> a transformFixed f = rewriteBi g where g x = let y = f x in if x == y then Nothing else Just y -- | Perform transformations across the AST (everywhere) postProcess :: Program -> Program postProcess = transformFixed f where -- | Convert `while read input` into `for $input sys.read()` f (For AnonVar (Assignment v rv) block) = For v rv block -- | Convert `read input` into `input = sys.read()` f (FunctionCall "read" [Str var]) = Assignment (LVar var) (fc "sys.read" []) -- | Convert `type wget` into `sys.onPath wget` f (FunctionCall "type" args) = fc "sys.onPath" args -- | Convert exit and it's arguments f (FunctionCall "exit" args) = fc "sys.exit" (map convertExitArg args) -- | String match and it's arguments f binop@(Binop a Equals (Str b)) = case reverse b of ('*':rest) -> fc "string.matches?" [a, (Str $ (reverse rest) ++ ".*")] _ -> binop -- TODO: convert this into some sort of exception f (FunctionCall "set" [Str "-e"]) = Nop f (FunctionCall "set" [Str "+e"]) = Nop f (FunctionCall "stderr.writeTo" [Str "/dev/null"]) = fc "stderr.ignore" [] f (FunctionCall "stdout.writeTo" [Str "/dev/null"]) = fc "stdout.ignore" [] -- TODO write a simple awk parser f (FunctionCall "awk" [Str "{print $0}"]) = fc "string.column" [Integer 0] f (FunctionCall "awk" [Str "{print $1}"]) = fc "string.column" [Integer 1] f (FunctionCall "awk" [Str "{print $2}"]) = fc "string.column" [Integer 2] f (FunctionCall "awk" [Str "{print $3}"]) = fc "string.column" [Integer 3] f (FunctionCall "awk" [Str "{print $4}"]) = fc "string.column" [Integer 4] f (FunctionCall "awk" [Str "{print $5}"]) = fc "string.column" [Integer 5] f (FunctionCall "awk" [Str "{print $6}"]) = fc "string.column" [Integer 6] f (FunctionCall "awk" [Str "{print $7}"]) = fc "string.column" [Integer 7] f (FunctionCall "awk" [Str "{print $8}"]) = fc "string.column" [Integer 8] f (FunctionCall "awk" [Str "{print $9}"]) = fc "string.column" [Integer 9] f (FunctionCall "awk" [Str "{print $0}", file]) = Pipe [fc "file.read" [file], fc "string.column" [Integer 0]] f (FunctionCall "awk" [Str "{print $1}", file]) = Pipe [fc "file.read" [file], fc "string.column" [Integer 1]] f (FunctionCall "awk" [Str "{print $2}", file]) = Pipe [fc "file.read" [file], fc "string.column" [Integer 2]] f (FunctionCall "awk" [Str "{print $3}", file]) = Pipe [fc "file.read" [file], fc "string.column" [Integer 3]] f (FunctionCall "awk" [Str "{print $4}", file]) = Pipe [fc "file.read" [file], fc "string.column" [Integer 4]] f (FunctionCall "awk" [Str "{print $5}", file]) = Pipe [fc "file.read" [file], fc "string.column" [Integer 5]] f (FunctionCall "awk" [Str "{print $6}", file]) = Pipe [fc "file.read" [file], fc "string.column" [Integer 6]] f (FunctionCall "awk" [Str "{print $7}", file]) = Pipe [fc "file.read" [file], fc "string.column" [Integer 7]] f (FunctionCall "awk" [Str "{print $8}", file]) = Pipe [fc "file.read" [file], fc "string.column" [Integer 8]] f (FunctionCall "awk" [Str "{print $9}", file]) = Pipe [fc "file.read" [file], fc "string.column" [Integer 9]] -- regexes use Pipes f (FunctionCall "re.matches" [val, arg]) = Pipe [val, fc "re.matches" [arg]] -- TODO: handle escaping with -e and -E properly f (Pipe (FunctionCall "echo" [Str "-e", str] : rest)) = Pipe (str : rest) f (Pipe (FunctionCall "echo" [Str "-E", str] : rest)) = Pipe (str : rest) f (Pipe (FunctionCall "echo" [Str "-n", str] : rest)) = Pipe (str : rest) -- we drop the implicit \n - I think that's safe f (Pipe ((FunctionCall "echo" [arg]) : rest)) = Pipe $ (arg : rest) -- fold within pipes f (Pipe es) = Pipe $ foldPipe es f x = x foldPipe exprs = foldr merge [] exprs merge (FunctionCall "stdout.ignore" []) ((FunctionCall "stderr.intoStdout" []) : cs) = (fc "stderr.replaceStdout" [] : cs) -- pipes within pipes merge (Pipe as) bs = as ++ bs merge a bs = (a:bs) convertExitArg (Str v) = Integer $ read v convertExitArg v = v postProcessFunctionDefs :: Expr -> Expr postProcessFunctionDefs = transformFixed $ f where f (FunctionDefinition (FuncDef name [] (List (Assignment (LVar lv) (Variable "1"): rest )))) = FunctionDefinition (FuncDef name [FunctionParameter lv] $ List rest) f x = x postProcessGlobals :: Program -> Program postProcessGlobals (Program (List exprs)) = Program $ List (map postProcessGlobalExpr exprs) postProcessGlobals (Program expr) = Program $ postProcessGlobalExpr expr postProcessGlobalExpr :: Expr -> Expr postProcessGlobalExpr fd@(FunctionDefinition _) = fd postProcessGlobalExpr e = transformFixed g $ transformFixed f e where -- | Comparing $# with something should convert to an int f (Binop v@(Variable "#") Equals (Str s)) = Binop v Equals (Integer $ read s) -- | Convert $1, $2, etc to sys.argv[0] etc f (Variable "0") = Variable "sys.command" f (Variable "1") = Subscript (Variable "sys.argv") (Integer 0) f (Variable "2") = Subscript (Variable "sys.argv") (Integer 1) f (Variable "3") = Subscript (Variable "sys.argv") (Integer 2) f (Variable "4") = Subscript (Variable "sys.argv") (Integer 3) f (Variable "5") = Subscript (Variable "sys.argv") (Integer 4) f (Variable "6") = Subscript (Variable "sys.argv") (Integer 5) f (Variable "7") = Subscript (Variable "sys.argv") (Integer 6) f (Variable "8") = Subscript (Variable "sys.argv") (Integer 7) f (Variable "9") = Subscript (Variable "sys.argv") (Integer 8) f (Binop s@(Subscript (Variable "sys.argv") _) op (Str "")) = (Binop s op Null) f x = x -- | Convert $# to sys.argv.length -- | Convert $@ to sys.argv g (Variable "#") = Pipe [Variable "sys.argv", fc "length" []] g (Variable "@") = Variable "sys.argv" g x = x -- || TODO: -- | possibly buggy parsing -- parse DoubleQuoted strings -- heredocs dont remove leading tabs for <<- -- when parsing AssignBuiltin, lists of words wont be handled as lists of words -- return is here as a function name, not a control flow thing -- shell redirection -- | handle builtins -- $# -> sys.argv | length -- process substitution (pipe into sys.procSub) -- echo (-e) -- basename -- printf -- exit (convert to a number) -- $#, $1, etc -- nullglob and dotglob -- getopts -- set -e - how to allow failure to be handled well? Exceptions? We currently -- just rip them out for now -- type -- backticks and $()s -- | replace shell utilities that are tricky to use -- awk -- sed -- cut -- expr -- grep? -- readlink? -- tr -- xargs? -- seq -- curl/wget into builtin -- | Handle bash idioms -- when it expects env args (eg, undefined vars being compared to -z, all caps), -- use sys.argv instead of the var itself -- __n=$(cat) - reading from stdin -- | obvious improvements -- keep comments -- convert lists of echos into a single heredoc -- convert globals into params being passed around -- convert assignments of "0" and "1" into real bools -- find BASH_REMATCH and convert it into proper usage -- find duplicated code -- variable variables into hashtables -- turn bash RE into PCRE -- turn nested if/else into switches -- exit code into integer -- if IFS is set, all bets are off parseWord :: W.Word -> Expr parseWord word = parseString (W.unquote word) parseString :: String -> Expr parseString source = case translate "src" source of Left err -> error ("nested parse of " ++ source ++ " failed: " ++ show err) Right (Program expr) -> expr parseString2Word :: String -> W.Word parseString2Word s = case Text.Parsec.parse Language.Bash.Parse.Word.word s s of Left err -> error ("nested parse of " ++ s ++ " failed: " ++ show err) Right word -> word translate :: String -> String -> Either ParseError Program translate name source = do pt <- BashParse.parse name source let _ = Debug.groom "pt" "Bash parse tree" pt Right $ postProcess $ postProcessGlobals $ Program $ convertList pt
pbiggar/rash
src/Rash/IR/Bash2Rough.hs
bsd-3-clause
21,423
0
18
4,700
6,835
3,504
3,331
344
40
{-# LANGUAGE TypeFamilies #-} module FPNLA.Operations.Parameters( -- * Elements Elt(..), -- * Strategies and contexts StratCtx(), -- * Result type -- | In BLAS it's common that operations in higher levels use operations in the lower levels, so, an operation in level three that by its signature manipulates matrices only, internally uses level two operations that manipulates vectors. In order to avoid the /show . read/ problem, the type of the vector (or any other internal data type) must appear in the signature of an operation. -- To solve the problem we use phantom types to pass the internally used types to the Haskell type system. ResM(), ResV(), ResS(), blasResultM, blasResultV, blasResultS, getResultDataM, getResultDataV, getResultDataS, -- * Miscellaneous TransType(..), UnitType(..), TriangType(..), unTransT, unUnitT, unTriangT, elemTrans_m, dimTrans_m, elemSymm, dimTriang, elemUnit_m, dimUnit_m, elemTransUnit_m, dimTransUnit_m, transTrans_m ) where import FPNLA.Matrix(Matrix(..)) import Data.Complex (Complex, conjugate) import Data.Tuple (swap) -- | This class represents the elements that can be used in the BLAS operations. -- The elements in BLAS are real or complex numbers, so we provide default instances for the Haskell 'Double', 'Float' and 'Complex' types. class (Eq e, Floating e) => Elt e where -- | Returns the conjugate of a number. For real numbers it's the identity function and for complex numbers it's the common 'Complex.conjugate' function. getConjugate :: e -> e getConjugate = id instance Elt Double instance Elt Float instance (RealFloat e) => Elt (Complex e) where getConjugate = conjugate -- | This type family is used to represent the /context/ of an operation. -- A particular implementation is a combination of an algorithm and a parallelism technique, and we call it a /strategy/. A particular strategy may need particular information to execute. For example, an operation that computes the matrix-matrix multiplication by splitting the matrices in blocks must require the size of the blocks. -- With this context we allows to pass any additional information that the operation needs to execute as parameters, but maintaining a common signature. -- The /s/ type parameter is the strategy so, there must exist a Haskell data type to represent a particular strategy. type family StratCtx s :: * -- | The 'ResM' data type is used as result of level three BLAS operations and returns a matrix /m/ of elements /e/ and contains the strategy /s/ and vector /v/ as phantom types. data ResM s (v :: * -> *) m e = ResM { unResM :: m e } deriving (Show) -- | The 'ResV' data type is used as result of level two BLAS operations and returns a vector /v/ of elements /e/ and contains the strategy /s/ as phantom types. data ResV s v e = ResV { unResV :: v e } deriving (Show) -- | The 'ResS' data type is used as result of level one BLAS operations and returns an scalar /e/ and contains the strategy /s/ as phantom types. data ResS s e = ResS { unResS :: e } deriving (Show) -- | Wrap a matrix into a 'ResM'. blasResultM :: m e -> ResM s v m e blasResultM = ResM -- | Unwrap a matrix from a 'ResM'. getResultDataM :: ResM s v m e -> m e getResultDataM = unResM -- | Wrap a vector into a 'ResV'. blasResultV :: v e -> ResV s v e blasResultV = ResV -- | Unwrap a vector from a 'ResV'. getResultDataV :: ResV s v e -> v e getResultDataV = unResV -- | Wrap a scalar into a 'ResS'. blasResultS :: e -> ResS s e blasResultS = ResS -- | Unwrap a scalar from a 'ResS'. getResultDataS :: ResS s e -> e getResultDataS = unResS -- | Indicates if a matrix must be considered as normal, transposed or transposed conjugated. -- This is part of the common flags in the BLAS operation signatures and it's useful to work with a transposed matrix without really computing the transposed matrix. data TransType m = Trans m | NoTrans m | ConjTrans m deriving (Eq, Show) -- | Indicates if a matrix must be considered as unitary or not. An unitary matrix is a matrix that contains ones in the diagonal. -- This is part of the common flags in the BLAS operation signatures. data UnitType m = Unit m | NoUnit m deriving (Eq, Show) -- | Indicates that a matrix is symmetric and with which triangular part of the matrix the operation is going to work ('Upper' or 'Lower'). -- The operation only will see the indicated part of the matrix and should not try to access the other part. -- This is part of the common flags in the BLAS operation signatures. data TriangType m = Lower m | Upper m deriving (Eq, Show) -- | Given a data type flagged by a TransType, returns a pair containing the TransType constructor and the data type. unTransT :: TransType a -> (b -> TransType b, a) unTransT (Trans a) = (Trans, a) unTransT (NoTrans a) = (NoTrans, a) unTransT (ConjTrans a) = (ConjTrans, a) -- | Given a data type flagged by a UnitType, returns a pair containing the UnitType constructor and the data type. unUnitT :: UnitType a -> (b -> UnitType b, a) unUnitT (Unit a) = (Unit, a) unUnitT (NoUnit a) = (NoUnit, a) -- | Given a data type flagged by a TriangType, returns a pair containing the TriangType constructor and the data type. unTriangT :: TriangType a -> (b -> TriangType b, a) unTriangT (Lower a) = (Lower, a) unTriangT (Upper a) = (Upper, a) -- | Given an /i,j/ position and a TransType flagged matrix, returns the element in that position without computing the transpose. elemTrans_m :: (Elt e, Matrix m e) => Int -> Int -> TransType (m e) -> e elemTrans_m i j (NoTrans m) = elem_m i j m elemTrans_m i j (Trans m) = elem_m j i m elemTrans_m i j (ConjTrans m) = getConjugate $ elem_m j i m -- | Given a TransType flagged matrix, returns the dimension of the matrix without computing the transpose. dimTrans_m :: (Matrix m e) => TransType (m e) -> (Int, Int) dimTrans_m (NoTrans m) = dim_m m dimTrans_m (ConjTrans m) = swap $ dim_m m dimTrans_m (Trans m) = swap $ dim_m m -- | Given an /i,j/ position and a TransType flagged matrix, returns the element in that position only accessing the part indicated by the TransType. elemSymm :: (Matrix m e) => Int -> Int -> TriangType (m e) -> e elemSymm i j (Upper m) | i > j = elem_m j i m | otherwise = elem_m i j m elemSymm i j (Lower m) | i > j = elem_m i j m | otherwise = elem_m j i m -- | Given a TransType flagged matrix, returns the dimension of the matrix. dimTriang :: (Matrix m e) => TriangType (m e) -> (Int, Int) dimTriang = dim_m . snd . unTriangT -- | Given an /i,j/ position and a UnitType flagged matrix, returns the element in that position. If the matrix is flagged as Unit and /i == j/ (the element is in the diagonal) returns one. elemUnit_m :: (Elt e, Matrix m e) => Int -> Int -> UnitType (m e) -> e elemUnit_m i j (Unit m) | i == j = 1 | otherwise = elem_m i j m elemUnit_m i j (NoUnit m) = elem_m i j m -- | Given a UnitType flagged matrix, returns the dimension of the matrix. dimUnit_m :: (Matrix m e) => UnitType (m e) -> (Int, Int) dimUnit_m (Unit m) = dim_m m dimUnit_m (NoUnit m) = dim_m m -- | Given an /i,j/ position and a TransType-UnitType flagged matrix, returns the element in that position without computing the transpose. elemTransUnit_m :: (Elt e, Matrix m e) => Int -> Int -> TransType (UnitType (m e)) -> e elemTransUnit_m i j (NoTrans pmA) = elemUnit_m i j pmA elemTransUnit_m i j (Trans pmA) = elemUnit_m j i pmA elemTransUnit_m i j (ConjTrans pmA) = getConjugate $ elemUnit_m j i pmA -- | Given a TransType-UnitType flagged matrix, returns the dimension of the matrix. dimTransUnit_m :: Matrix m e => TransType (UnitType (m e)) -> (Int, Int) dimTransUnit_m = dimUnit_m . snd . unTransT -- | Given a TransType flagged matrix, computes and returns its transpose. transTrans_m :: (Elt e, Matrix m e) => TransType (m e) -> m e transTrans_m (NoTrans m) = m transTrans_m (ConjTrans m) = map_m getConjugate $ transpose_m m transTrans_m (Trans m) = transpose_m m
mauroblanco/fpnla
src/FPNLA/Operations/Parameters.hs
bsd-3-clause
8,055
0
13
1,631
1,667
902
765
111
1
-- http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=ITP1_4_B -- Circle -- input: 2 -- output: 12.566371 12.566371 import Control.Applicative import Data.Fixed main = do r <- (read :: String -> Double) <$> getLine putStrLn $ unwords $ show . toFixedE9 <$> [(pi*r^2), (2*pi*r)] toFixedE9 :: Double -> Fixed E9 toFixedE9 r = realToFrac r :: Fixed E9
ku00/aoj-haskell
src/ITP1_4_B.hs
bsd-3-clause
368
0
11
64
117
63
54
7
1
module Frontend ( ec2Properties , s3Properties , r53Properties , cfProperties , rdsProperties , dbProperties , ecProperties , vpcProperties , lsProperties ) where import Frontend.Properties.EC2 import Frontend.Properties.S3 import Frontend.Properties.R53 import Frontend.Properties.CF import Frontend.Properties.RDS import Frontend.Properties.DDB import Frontend.Properties.EC import Frontend.Properties.VPC import Frontend.Properties.LS
Rizary/awspi
Lib/Frontend.hs
bsd-3-clause
465
0
4
67
87
58
29
20
0
{-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE ViewPatterns #-} module Visualization.Queue.RealTime (rtQueueVis) where import Visualization.Common import VisualizationData.Thunk import qualified Visualization.Queue.Generic as VQG import qualified VisualizationData.Queue.RealTime as RTQ import Control.Concurrent (threadDelay, forkIO) import Control.DeepSeq (NFData) import Control.Monad (forM_, void) import Data.Maybe (fromMaybe) import Data.Monoid ((<>)) import Data.Typeable (Typeable) import GHC.Generics (Generic) import React.Flux newtype RTQueueVisState = RTQueueVisState { _unRTQueueVisState :: VQG.QueueVisState RTQ.RTQueue } deriving (Typeable) pattern RTQVS nextInt queue hist = RTQueueVisState (VQG.QueueVisState nextInt queue hist) initialState :: RTQueueVisState initialState = RTQueueVisState VQG.initialState data RTQueueAction = SimpleQueueAction VQG.QueueAction | ScheduleStep deriving (Show, Eq, Typeable, Generic, NFData) forceStepFuture :: IO () forceStepFuture = void $ forkIO $ do threadDelay (700*1000) alterStore queueStore ScheduleStep instance StoreData RTQueueVisState where type StoreAction RTQueueVisState = RTQueueAction transform (SimpleQueueAction VQG.Tail) (RTQVS k (RTQ.schedStep -> q) hist) = do forceStepFuture pure $ RTQVS k (fromMaybe q (RTQ.tail' q)) (q:hist) transform (SimpleQueueAction VQG.Snoc) (RTQVS k (RTQ.schedStep -> q) hist) = do forceStepFuture pure $ RTQVS (k+1) (RTQ.snoc' q k) (q:hist) -- remaining two SimpleQueueAction's: back and clear transform (SimpleQueueAction action) (RTQueueVisState state) = RTQueueVisState <$> transform action state transform ScheduleStep (RTQVS k q hist) = pure (RTQVS k (RTQ.schedStep q) hist) queueStore :: ReactStore RTQueueVisState queueStore = mkStore initialState dispatch :: RTQueueAction -> [SomeStoreAction] dispatch a = [SomeStoreAction queueStore a] rtQueueVis :: ReactView () rtQueueVis = defineControllerView "rtqueue-visualization" queueStore $ \(RTQueueVisState queueState@(VQG.QueueVisState _ rtq hist)) _ -> div_ $ do VQG.renderControls queueState (dispatch . SimpleQueueAction) div_ $ renderRTQueue rtq forM_ hist $ div_ . renderRTQueue renderLazyList :: Show a => RTQ.LazyListRef a -> Int -> ReactElementM handler () renderLazyList = go True where schedWrapper schedHtml = cldiv_ "list schedule" $ do clspan_ "len-list-name" "schedule" schedHtml go isToplevel thunk ix = (if ix == 0 || (isToplevel && ix == -1) then schedWrapper else id) $ case readThunk thunk of Left (RTQ.AppendReverseThunk xs rs ys) -> cldiv_ "list thunk" $ do cldiv_ "list" $ go True xs (-10) code_ " ++ reverse " renderList ys code_ " ++ " go True (wrapThunk rs) (-10) Right RTQ.Nil -> if isToplevel then cldiv_ "list empty" mempty else mempty Right (RTQ.Cons x xs) -> (if isToplevel then cldiv_ "list" else id) $ do clspan_ "list-cell" $ clspan_ "item" (elemShow x) go False xs (ix-1) renderRTQueue :: Show a => RTQ.RTQueue a -> ReactElementM handler () renderRTQueue queue@(RTQ.RTQueue front frontL rear rearL _ _) = do cldiv_ "front" $ do clspan_ "len-list-name" "front" cldiv_ "len-list" $ do clspan_ "len-list-length" $ "(length: " <> elemShow frontL <> ")" renderLazyList front (RTQ.scheduleIndex queue) cldiv_ "rear" $ do clspan_ "len-list-name" "rear" renderListWithLen rear rearL
timjb/pfds-visualizations
src/Visualization/Queue/RealTime.hs
bsd-3-clause
3,731
0
17
781
1,127
568
559
97
6
module Patch where import Catalogue import Control.Applicative import Control.Monad import CSV import qualified Data.EdgeLabeledGraph as G -- TMP! import Data.List import Data.Maybe import qualified Data.Set as S import System.Directory import System.Environment import System.FilePath import Data.Tree import Debug.Trace type Patch = (Catalogue, [(ItemId,Op)]) data Op = ReplaceBy [ItemId] | Delete | Resolve | Modify | Add deriving (Eq,Ord,Show,Read) readPatch :: String -> Either ParseError Patch readPatch s = mk . readItemForest . dropWhile isHeader <$> readCSV s where mk ts = ( emptyCat { catAreas = G.unions $ map (G.fromTree (const SubItem) . fixTreeTopicIds . fmap fst) ts } , mapMaybe readOp . filter ((==KU) . itemType . fst) $ concatMap flatten ts ) isHeader [] = True isHeader (x:_) = isNothing $ readItemId' x readOp :: (Item,[Field]) -> Maybe (ItemId,Op) readOp (x, zs) = case zs !!! 1 of Nothing -> Nothing Just "DEL" -> Just (x', Delete) Just "ADD" -> Just (x', Add) Just xs -> Just $ (x', let ys = readItemIds xs in if ys==[itemId x] then Resolve else ReplaceBy ys) where x' = itemId x loadPatch :: FilePath -> IO (Either ParseError Patch) loadPatch f = readPatch <$> readFile f data Diff = Diff { removed :: [ItemId] , added :: [ItemId] , modified :: [ItemId] , resolved :: [ItemId] , replaced :: [(ItemId,ItemId)] } deriving (Eq,Ord,Show) patchDiff :: Catalogue -> Patch -> Diff patchDiff cat (cmp,ops) = Diff { removed = removed , added = added , modified = modified , resolved = resolved , replaced = replaced } where catUnits = knowledgeUnits cat cmpUnits = knowledgeUnits cmp catUnits' = map itemId catUnits cmpUnits' = map itemId cmpUnits removed = [x | (x,Delete) <- ops, x `elem` catUnits'] add = [x | (x,Add) <- ops] replaced = [(x,y) | (x,ReplaceBy ys) <- ops, x `elem` catUnits', y <- ys] replaceTo = nub $ map snd replaced resolved = [x | (x,Resolve) <- ops] -- add a unit if (1) pointed to or (2) not changed, but new added = (nub $ replaceTo ++ add) \\ catUnits' modified = filter (not . fromJust . identicalItems cat cmp) $ ((cmpUnits' `intersect` catUnits') \\ removed) \\ added identicalItems :: Catalogue -> Catalogue -> ItemId -> Maybe Bool identicalItems c1 c2 x = liftA2 (==) (getItemTree c1 x) (getItemTree c2 x) -- where f = fmap (\x -> x { itemRemark = Nothing, -- itemEditors = if itemType x == KT then [] -- else itemEditors x }) patch :: Catalogue -> Patch -> (Catalogue, Diff) patch c p@(cmp,_) = (c6, d) where d = patchDiff c p c2 = removeItems' c $ removed d adds = map fixTreeTopicIds . treesWithItems cmp $ added d ++ modified d c3 = foldl' addItemTree c2 adds overlaps = overlapLinks c3 resolved' = resolved d ++ map fst (replaced d) units = map itemId $ knowledgeUnits cmp overlaps2 = filter (\(x1,x2,_) -> (x1 `elem` resolved' && x2 `elem` units) || (x1 `elem` units && x2 `elem` resolved')) overlaps c4 = foldl' removeLink c3 overlaps2 c5 = combineEditorsAndOverlaps c4 p c6 = foldl' (\c (x1,x2) -> fromMaybe c (replaceItem' c x1 x2)) c5 $ replaced d overlapLinks :: Catalogue -> [(ItemId,ItemId,Link)] overlapLinks = filter (\(_,_,l) -> l==Overlaps) . links' -- for all ReplaceBy targets, take ReplaceBy source editors -- for all ReokaceBy targets, take ReplaceBy -- (remark: inefficient, because diff is computed again) combineEditorsAndOverlaps :: Catalogue -> Patch -> Catalogue combineEditorsAndOverlaps c p@(cmp,_) = c3 where Diff rem add mod ret rep = patchDiff c p rep' = mapMaybe (\(x1,x2) -> liftA2 (,) (getItem c x1) (getItem cmp x2)) rep addEditors x1 x2 = x2 { itemEditors = sort . nub $ itemEditors x1 ++ itemEditors x2 } links' = substitute $ overlapLinks c c2 = foldl' (\c (x1,x2) -> modifyItem c (itemId x2) (addEditors x1)) c rep' c3 = fromJust $ addLinks c2 links' substitute xs = [(x1',x2',l) | (x1,x2,l) <- xs, let x1' = fromMaybe x1 $ lookup x1 rep, let x2' = fromMaybe x2 $ lookup x2 rep, x1'/=x2', x1 `elem` map fst rep || x2 `elem` map fst rep ] -- patch is bogus if it operates on a (non-added) unit that does not exist in the -- catalogue or maps to a unit that does not exist in the component nor in the -- catalogue bogusPatch :: Catalogue -> Patch -> Bool bogusPatch c p@(cmp,ops) = (opItems `intersect` catUnits /= opItems) || (replaceTo `intersect` (catUnits ++ cmpUnits) /= replaceTo) where catUnits = map itemId $ knowledgeUnits c cmpUnits = map itemId $ knowledgeUnits cmp opItems = map fst ops \\ added replaceTo = nub $ [y | (_,ReplaceBy ys) <- ops, y <- ys] added = [x | (x,Add) <- ops] patchAndLog :: Catalogue -> FilePath -> IO (Catalogue,CSV) patchAndLog c f = do Right p <- loadPatch f let (c2,d) = patch c p Diff rem add mod res rep = d xs = [[patchFile,showItemId x,"Removed"] | x <- rem] ++ [[patchFile,showItemId x,"Added"] | x <- add] ++ [[patchFile,showItemId x,"Modified"] | x <- mod] ++ [[patchFile,showItemId x,"Declared as non-overlapping (within this component)"] | x <- res] ++ [[patchFile,showItemId x,"Replaced by " ++ showItemId y] | (x,y) <- rep] return (c2,zipWith (\x xs -> show x : xs) [1..] xs) where patchFile = takeFileName f dir = "/home/jan/fer3/fer3-catalogue/data/catalogue/v0.3" catFile = dir </> "FER3-KC-v0.3.0.csv" dirInbox = dir </> "components-csv-resolved" dirOk = dirInbox </> "ok" dirBogus = dirInbox </> "bogus" newCatFile = "FER3-KC-v0.4.0.csv" dirOut = dir </> "resolved" sortPatches = do Right c <- loadCatalogue catFile fs <- filter (".csv" `isSuffixOf`) <$> getDirectoryContents dirInbox forM_ fs $ \f -> do putStr $ f ++ ": " Right p <- loadPatch $ dirInbox </> f if bogusPatch c p then do putStrLn "BOGUS" renameFile (dirInbox </> f) (dirBogus </> f) else do putStrLn "OK" renameFile (dirInbox </> f) (dirOk </> f) applyPatches = do Right c <- loadCatalogue catFile fs <- map (dirOk </>) . filter (".csv" `isSuffixOf`) <$> getDirectoryContents dirOk (cNew,log) <- foldM (\(c,csv) f -> do (c2,csv2) <- patchAndLog c f putStrLn $ f ++ show (length csv2) return (c2,csv++csv2)) (c,[]) fs saveCatalogue (dirOut </> newCatFile) (removeTopicEditors $ addInfoRemark cNew) writeFile (dirOut </> "log.csv") (showCSV log)
jsnajder/fer3-catalogue
src/Patch.hs
bsd-3-clause
6,904
0
17
1,878
2,488
1,337
1,151
142
5
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeFamilies #-} -- | <http://www.libtorrent.org/reference-Core.html#block-info block_info> structure for "Libtorrent" module Network.Libtorrent.TorrentHandle.BlockInfo( BlockState(..) , BlockInfo(..) , getBytesProgress , getBlockInfoBlockSize , getBlockInfoNumPeers , getBlockInfoState ) where import Control.Monad.IO.Class (MonadIO, liftIO) import Foreign.C.Types (CInt) import Foreign.ForeignPtr (ForeignPtr, withForeignPtr) import qualified Language.C.Inline as C import qualified Language.C.Inline.Cpp as C import qualified Language.C.Inline.Unsafe as CU import Network.Libtorrent.Inline import Network.Libtorrent.Internal import Network.Libtorrent.Types C.context libtorrentCtx C.include "<libtorrent/torrent_handle.hpp>" C.include "<boost/asio.hpp>" C.include "torrent_handle.hpp" C.using "namespace libtorrent" C.using "namespace std" data BlockState = BlockNone | BlockRequested | BlockWriting | BlockFinished deriving (Show, Enum, Bounded, Eq, Ord) newtype BlockInfo = BlockInfo { unBlockInfo :: ForeignPtr (CType BlockInfo)} instance Show BlockInfo where show _ = "BlockInfo" instance Inlinable BlockInfo where type (CType BlockInfo) = C'BlockInfo instance FromPtr BlockInfo where fromPtr = objFromPtr BlockInfo $ \ptr -> [CU.exp| void { delete $(block_info * ptr); } |] instance WithPtr BlockInfo where withPtr (BlockInfo fptr) = withForeignPtr fptr -- FIXME: get linking error, don't know why -- getPeer :: MonadIO m => BlockInfo -> m (Text, C.CShort) -- getPeer ho = -- liftIO . withPtr ho $ \hoPtr -> do -- addr <- fromPtr [CU.block| string * { -- tcp::endpoint ep = $(block_info * hoPtr)->peer(); -- return new std::string(ep.address().to_string()); -- } -- |] -- port <- [CU.block| short { -- tcp::endpoint ep = $(block_info * hoPtr)->peer(); -- return ep.port(); -- } -- |] -- ( , port) <$> stdStringToText addr getBytesProgress :: MonadIO m => BlockInfo -> m CInt getBytesProgress ho = liftIO . withPtr ho $ \hoPtr -> [CU.exp| int { $(block_info * hoPtr)->bytes_progress } |] getBlockInfoBlockSize :: MonadIO m => BlockInfo -> m CInt getBlockInfoBlockSize ho = liftIO . withPtr ho $ \hoPtr -> [CU.exp| int { $(block_info * hoPtr)->block_size } |] getBlockInfoNumPeers :: MonadIO m => BlockInfo -> m CInt getBlockInfoNumPeers ho = liftIO . withPtr ho $ \hoPtr -> [CU.exp| int { $(block_info * hoPtr)->num_peers } |] getBlockInfoState :: MonadIO m => BlockInfo -> m BlockState getBlockInfoState ho = liftIO . withPtr ho $ \hoPtr -> toEnum . fromIntegral <$> [CU.exp| int { $(block_info * hoPtr)->state } |]
eryx67/haskell-libtorrent
src/Network/Libtorrent/TorrentHandle/BlockInfo.hs
bsd-3-clause
3,265
0
9
955
549
316
233
58
1
module Embot ( module Export ) where import Embot.Action as Export import Embot.Config as Export import Embot.Driver as Export import Embot.Types as Export
Dridus/embot
src/Embot.hs
bsd-3-clause
165
0
4
32
38
27
11
6
0
{-# LANGUAGE TypeOperators , GADTs , KindSignatures , FlexibleContexts , RankNTypes , ScopedTypeVariables #-} module Data.Morphism.Apomorphism where -- import Control.Applicative import Control.Monad hiding (mapM) import Control.Monad.Identity import Data.Annotation import Data.Fixpoint import Data.Identity import Data.Traversable import Prelude hiding (mapM) data CoalgebraA (a :: (* -> *) -> * -> *) (f :: * -> *) (s :: *) where Phi :: (s -> f (Either s (FixBotA a f))) -> CoalgebraA a f s type Coalgebra s f = forall a. CoalgebraA a f s apomorphismMA :: (Monad m, Traversable f, In a f m) => CoalgebraA a f s -> s -> m (FixA a f) apomorphismMA (Phi phi) = return . In <=< inA <=< mapM (apomorphismMA (Phi phi) `either` topIn) . phi apomorphismA :: (Traversable f, In a f Identity) => CoalgebraA a f s -> s -> FixA a f apomorphismA phi = runIdentity . apomorphismMA phi apomorphism :: Traversable f => CoalgebraA Id f s -> s -> Fix f apomorphism phi = fullyOutId . runIdentity . apomorphismMA phi
sebastiaanvisser/fixpoints
src/Data/Morphism/Apomorphism.hs
bsd-3-clause
1,026
0
13
198
376
203
173
-1
-1
{- (c) The University of Glasgow 2006 (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 \section[TcType]{Types used in the typechecker} This module provides the Type interface for front-end parts of the compiler. These parts * treat "source types" as opaque: newtypes, and predicates are meaningful. * look through usage types The "tc" prefix is for "TypeChecker", because the type checker is the principal client. -} {-# LANGUAGE CPP #-} module ETA.TypeCheck.TcType ( -------------------------------- -- Types TcType, TcSigmaType, TcRhoType, TcTauType, TcPredType, TcThetaType, TcTyVar, TcTyVarSet, TcKind, TcCoVar, -- TcLevel TcLevel(..), topTcLevel, pushTcLevel, strictlyDeeperThan, sameDepthAs, fskTcLevel, -------------------------------- -- MetaDetails UserTypeCtxt(..), pprUserTypeCtxt, pprSigCtxt, TcTyVarDetails(..), pprTcTyVarDetails, vanillaSkolemTv, superSkolemTv, MetaDetails(Flexi, Indirect), MetaInfo(..), isImmutableTyVar, isSkolemTyVar, isMetaTyVar, isMetaTyVarTy, isTyVarTy, isSigTyVar, isOverlappableTyVar, isTyConableTyVar, isFskTyVar, isFmvTyVar, isFlattenTyVar, isReturnTyVar, isAmbiguousTyVar, metaTvRef, metaTyVarInfo, isFlexi, isIndirect, isRuntimeUnkSkol, isTypeVar, isKindVar, metaTyVarTcLevel, setMetaTyVarTcLevel, metaTyVarTcLevel_maybe, isTouchableMetaTyVar, isTouchableOrFmv, isFloatedTouchableMetaTyVar, canUnifyWithPolyType, -------------------------------- -- Builders mkPhiTy, mkSigmaTy, mkTcEqPred, mkTcReprEqPred, mkTcEqPredRole, -------------------------------- -- Splitters -- These are important because they do not look through newtypes tcView, tcSplitForAllTys, tcSplitPhiTy, tcSplitPredFunTy_maybe, tcSplitFunTy_maybe, tcSplitFunTys, tcFunArgTy, tcFunResultTy, tcSplitFunTysN, tcSplitTyConApp, tcSplitTyConApp_maybe, tcTyConAppTyCon, tcTyConAppArgs, tcSplitAppTy_maybe, tcSplitAppTy, tcSplitAppTys, repSplitAppTy_maybe, tcInstHeadTyNotSynonym, tcInstHeadTyAppAllTyVars, tcGetTyVar_maybe, tcGetTyVar, nextRole, tcSplitSigmaTy, tcDeepSplitSigmaTy_maybe, --------------------------------- -- Predicates. -- Again, newtypes are opaque eqType, eqTypes, eqPred, cmpType, cmpTypes, cmpPred, eqTypeX, pickyEqType, tcEqType, tcEqKind, isSigmaTy, isRhoTy, isOverloadedTy, isFloatingTy, isDoubleTy, isFloatTy, isIntTy, isWordTy, isStringTy, isIntegerTy, isBoolTy, isUnitTy, isCharTy, isTauTy, isTauTyCon, tcIsTyVarTy, tcIsForAllTy, isPredTy, isTyVarClassPred, isTyVarExposed, isTyVarUnderDatatype, --------------------------------- -- Misc type manipulators deNoteType, occurCheckExpand, OccCheckResult(..), orphNamesOfType, orphNamesOfDFunHead, orphNamesOfCo, orphNamesOfTypes, orphNamesOfCoCon, getDFunTyKey, evVarPred_maybe, evVarPred, --------------------------------- -- Predicate types mkMinimalBySCs, transSuperClasses, immSuperClasses, -- * Finding type instances tcTyFamInsts, -- * Finding "exact" (non-dead) type variables exactTyVarsOfType, exactTyVarsOfTypes, --------------------------------- -- Foreign import and export isFFIArgumentTy, -- :: DynFlags -> Safety -> Type -> Bool isFFIImportResultTy, -- :: DynFlags -> Type -> Bool isFFIExportResultTy, -- :: Type -> Bool isFFIExternalTy, -- :: Type -> Bool --isFFIDynTy, -- :: Type -> Type -> Bool isFFIPrimArgumentTy, -- :: DynFlags -> Type -> Bool isFFIPrimResultTy, -- :: DynFlags -> Type -> Bool --isFFILabelTy, -- :: Type -> Bool --isFFITy, -- :: Type -> Bool isFunPtrTy, -- :: Type -> Bool tcSplitIOType_maybe, -- :: Type -> Maybe Type tcSplitJavaType_maybe, -- :: Type -> Maybe Type tcSplitExtendsType_maybe, -- :: Type -> Maybe Type tcSplitExtendsType, -- :: Type -> Maybe Type extendsVars, -------------------------------- -- Rexported from Kind Kind, typeKind, unliftedTypeKind, liftedTypeKind, openTypeKind, constraintKind, mkArrowKind, mkArrowKinds, isLiftedTypeKind, isUnliftedTypeKind, isSubOpenTypeKind, tcIsSubKind, splitKindFunTys, defaultKind, -------------------------------- -- Rexported from Type Type, PredType, ThetaType, mkForAllTy, mkForAllTys, mkFunTy, mkFunTys, zipFunTys, mkTyConApp, mkAppTy, mkAppTys, applyTy, applyTys, mkTyVarTy, mkTyVarTys, mkTyConTy, isClassPred, isEqPred, isIPPred, mkClassPred, isDictLikeTy, tcSplitDFunTy, tcSplitDFunHead, mkEqPred, -- Type substitutions TvSubst(..), -- Representation visible to a few friends TvSubstEnv, emptyTvSubst, mkOpenTvSubst, zipOpenTvSubst, zipTopTvSubst, mkTopTvSubst, notElemTvSubst, unionTvSubst, getTvSubstEnv, setTvSubstEnv, getTvInScope, extendTvInScope, Type.lookupTyVar, Type.extendTvSubst, Type.substTyVarBndr, extendTvSubstList, isInScope, mkTvSubst, zipTyEnv, Type.substTy, substTys, substTyWith, substTheta, substTyVar, substTyVars, isUnLiftedType, -- Source types are always lifted isUnboxedTupleType, -- Ditto isPrimitiveType, tyVarsOfType, tyVarsOfTypes, closeOverKinds, tcTyVarsOfType, tcTyVarsOfTypes, pprKind, pprParendKind, pprSigmaType, pprType, pprParendType, pprTypeApp, pprTyThingCategory, pprTheta, pprThetaArrowTy, pprClassPred ) where #include "HsVersions.h" -- friends: import ETA.Types.Kind import ETA.Types.TypeRep import ETA.Types.Class import ETA.BasicTypes.Var import ETA.Prelude.ForeignCall import ETA.BasicTypes.VarSet import ETA.Types.Coercion import ETA.Types.Type import qualified ETA.Types.Type as Type import ETA.Types.TyCon import ETA.Types.CoAxiom -- others: import ETA.Main.DynFlags import ETA.BasicTypes.Name import qualified ETA.BasicTypes.Name as Name -- hiding (varName) -- We use this to make dictionaries for type literals. -- Perhaps there's a better way to do this? import ETA.BasicTypes.NameSet import ETA.BasicTypes.VarEnv import ETA.BasicTypes.DataCon import ETA.Prelude.PrelNames import ETA.Prelude.TysWiredIn import ETA.BasicTypes.BasicTypes import ETA.Utils.Util import ETA.Utils.Maybes import ETA.Utils.ListSetOps import ETA.Utils.Outputable import ETA.Utils.FastString import ETA.Main.ErrUtils( Validity(..), isValid ) import Data.IORef import Control.Monad (liftM, ap) {- ************************************************************************ * * \subsection{Types} * * ************************************************************************ The type checker divides the generic Type world into the following more structured beasts: sigma ::= forall tyvars. phi -- A sigma type is a qualified type -- -- Note that even if 'tyvars' is empty, theta -- may not be: e.g. (?x::Int) => Int -- Note that 'sigma' is in prenex form: -- all the foralls are at the front. -- A 'phi' type has no foralls to the right of -- an arrow phi :: theta => rho rho ::= sigma -> rho | tau -- A 'tau' type has no quantification anywhere -- Note that the args of a type constructor must be taus tau ::= tyvar | tycon tau_1 .. tau_n | tau_1 tau_2 | tau_1 -> tau_2 -- In all cases, a (saturated) type synonym application is legal, -- provided it expands to the required form. -} type TcTyVar = TyVar -- Used only during type inference type TcCoVar = CoVar -- Used only during type inference; mutable type TcType = Type -- A TcType can have mutable type variables -- Invariant on ForAllTy in TcTypes: -- forall a. T -- a cannot occur inside a MutTyVar in T; that is, -- T is "flattened" before quantifying over a -- These types do not have boxy type variables in them type TcPredType = PredType type TcThetaType = ThetaType type TcSigmaType = TcType type TcRhoType = TcType -- Note [TcRhoType] type TcTauType = TcType type TcKind = Kind type TcTyVarSet = TyVarSet {- Note [TcRhoType] ~~~~~~~~~~~~~~~~ A TcRhoType has no foralls or contexts at the top, or to the right of an arrow YES (forall a. a->a) -> Int NO forall a. a -> Int NO Eq a => a -> a NO Int -> forall a. a -> Int ************************************************************************ * * \subsection{TyVarDetails} * * ************************************************************************ TyVarDetails gives extra info about type variables, used during type checking. It's attached to mutable type variables only. It's knot-tied back to Var.lhs. There is no reason in principle why Var.lhs shouldn't actually have the definition, but it "belongs" here. Note [Signature skolems] ~~~~~~~~~~~~~~~~~~~~~~~~ Consider this f :: forall a. [a] -> Int f (x::b : xs) = 3 Here 'b' is a lexically scoped type variable, but it turns out to be the same as the skolem 'a'. So we have a special kind of skolem constant, SigTv, which can unify with other SigTvs. They are used *only* for pattern type signatures. Similarly consider data T (a:k1) = MkT (S a) data S (b:k2) = MkS (T b) When doing kind inference on {S,T} we don't want *skolems* for k1,k2, because they end up unifying; we want those SigTvs again. Note [ReturnTv] ~~~~~~~~~~~~~~~ We sometimes want to convert a checking algorithm into an inference algorithm. An easy way to do this is to "check" that a term has a metavariable as a type. But, we must be careful to allow that metavariable to unify with *anything*. (Well, anything that doesn't fail an occurs-check.) This is what ReturnTv means. For example, if we have (undefined :: (forall a. TF1 a ~ TF2 a => a)) x we'll call (tcInfer . tcExpr) on the function expression. tcInfer will create a ReturnTv to represent the expression's type. We really need this ReturnTv to become set to (forall a. TF1 a ~ TF2 a => a) despite the fact that this type mentions type families and is a polytype. However, we must also be careful to make sure that the ReturnTvs really always do get unified with something -- we don't want these floating around in the solver. So, we check after running the checker to make sure the ReturnTv is filled. If it's not, we set it to a TauTv. We can't ASSERT that no ReturnTvs hit the solver, because they can if there's, say, a kind error that stops checkTauTvUpdate from working. This happens in test case typecheck/should_fail/T5570, for example. See also the commentary on #9404. -} -- A TyVarDetails is inside a TyVar data TcTyVarDetails = SkolemTv -- A skolem Bool -- True <=> this skolem type variable can be overlapped -- when looking up instances -- See Note [Binding when looking up instances] in InstEnv | FlatSkol -- A flatten-skolem. It stands for the TcType, and zonking TcType -- will replace it by that type. -- See Note [The flattening story] in TcFlatten | RuntimeUnk -- Stands for an as-yet-unknown type in the GHCi -- interactive context | MetaTv { mtv_info :: MetaInfo , mtv_ref :: IORef MetaDetails , mtv_tclvl :: TcLevel } -- See Note [TcLevel and untouchable type variables] vanillaSkolemTv, superSkolemTv :: TcTyVarDetails -- See Note [Binding when looking up instances] in InstEnv vanillaSkolemTv = SkolemTv False -- Might be instantiated superSkolemTv = SkolemTv True -- Treat this as a completely distinct type ----------------------------- data MetaDetails = Flexi -- Flexi type variables unify to become Indirects | Indirect TcType instance Outputable MetaDetails where ppr Flexi = ptext (sLit "Flexi") ppr (Indirect ty) = ptext (sLit "Indirect") <+> ppr ty data MetaInfo = TauTv Bool -- This MetaTv is an ordinary unification variable -- A TauTv is always filled in with a tau-type, which -- never contains any ForAlls. -- The boolean is true when the meta var originates -- from a wildcard. | ReturnTv -- Can unify with *anything*. Used to convert a -- type "checking" algorithm into a type inference algorithm. -- See Note [ReturnTv] | SigTv -- A variant of TauTv, except that it should not be -- unified with a type, only with a type variable -- SigTvs are only distinguished to improve error messages -- see Note [Signature skolems] -- The MetaDetails, if filled in, will -- always be another SigTv or a SkolemTv | FlatMetaTv -- A flatten meta-tyvar -- It is a meta-tyvar, but it is always untouchable, with level 0 -- See Note [The flattening story] in TcFlatten ------------------------------------- -- UserTypeCtxt describes the origin of the polymorphic type -- in the places where we need to an expression has that type data UserTypeCtxt = FunSigCtxt Name -- Function type signature -- Also used for types in SPECIALISE pragmas | InfSigCtxt Name -- Inferred type for function | ExprSigCtxt -- Expression type signature | ConArgCtxt Name -- Data constructor argument | TySynCtxt Name -- RHS of a type synonym decl | PatSigCtxt -- Type sig in pattern -- eg f (x::t) = ... -- or (x::t, y) = e | RuleSigCtxt Name -- LHS of a RULE forall -- RULE "foo" forall (x :: a -> a). f (Just x) = ... | ResSigCtxt -- Result type sig -- f x :: t = .... | ForSigCtxt Name -- Foreign import or export signature | DefaultDeclCtxt -- Types in a default declaration | InstDeclCtxt -- An instance declaration | SpecInstCtxt -- SPECIALISE instance pragma | ThBrackCtxt -- Template Haskell type brackets [t| ... |] | GenSigCtxt -- Higher-rank or impredicative situations -- e.g. (f e) where f has a higher-rank type -- We might want to elaborate this | GhciCtxt -- GHCi command :kind <type> | ClassSCCtxt Name -- Superclasses of a class | SigmaCtxt -- Theta part of a normal for-all type -- f :: <S> => a -> a | DataTyCtxt Name -- Theta part of a data decl -- data <S> => T a = MkT a {- -- Notes re TySynCtxt -- We allow type synonyms that aren't types; e.g. type List = [] -- -- If the RHS mentions tyvars that aren't in scope, we'll -- quantify over them: -- e.g. type T = a->a -- will become type T = forall a. a->a -- -- With gla-exts that's right, but for H98 we should complain. ************************************************************************ * * Untoucable type variables * * ************************************************************************ -} newtype TcLevel = TcLevel Int deriving( Eq ) -- See Note [TcLevel and untouchable type variables] for what this Int is {- Note [TcLevel and untouchable type variables] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Each unification variable (MetaTv) and each Implication has a level number (of type TcLevel) * INVARIANTS. In a tree of Implications, (ImplicInv) The level number of an Implication is STRICTLY GREATER THAN that of its parent (MetaTvInv) The level number of a unification variable is LESS THAN OR EQUAL TO that of its parent implication * A unification variable is *touchable* if its level number is EQUAL TO that of its immediate parent implication. * INVARIANT (GivenInv) The free variables of the ic_given of an implication are all untouchable; ie their level numbers are LESS THAN the ic_tclvl of the implication Note [Skolem escape prevention] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We only unify touchable unification variables. Because of (MetaTvInv), there can be no occurrences of the variable further out, so the unification can't cause the skolems to escape. Example: data T = forall a. MkT a (a->Int) f x (MkT v f) = length [v,x] We decide (x::alpha), and generate an implication like [1]forall a. (a ~ alpha[0]) But we must not unify alpha:=a, because the skolem would escape. For the cases where we DO want to unify, we rely on floating the equality. Example (with same T) g x (MkT v f) = x && True We decide (x::alpha), and generate an implication like [1]forall a. (Bool ~ alpha[0]) We do NOT unify directly, bur rather float out (if the constraint does not mention 'a') to get (Bool ~ alpha[0]) /\ [1]forall a.() and NOW we can unify alpha. The same idea of only unifying touchables solves another problem. Suppose we had (F Int ~ uf[0]) /\ [1](forall a. C a => F Int ~ beta[1]) In this example, beta is touchable inside the implication. The first solveSimpleWanteds step leaves 'uf' un-unified. Then we move inside the implication where a new constraint uf ~ beta emerges. If we (wrongly) spontaneously solved it to get uf := beta, the whole implication disappears but when we pop out again we are left with (F Int ~ uf) which will be unified by our final zonking stage and uf will get unified *once more* to (F Int). -} fskTcLevel :: TcLevel fskTcLevel = TcLevel 0 -- 0 = Outside the outermost level: -- flatten skolems topTcLevel :: TcLevel topTcLevel = TcLevel 1 -- 1 = outermost level pushTcLevel :: TcLevel -> TcLevel pushTcLevel (TcLevel us) = TcLevel (us+1) strictlyDeeperThan :: TcLevel -> TcLevel -> Bool strictlyDeeperThan (TcLevel tv_tclvl) (TcLevel ctxt_tclvl) = tv_tclvl > ctxt_tclvl sameDepthAs :: TcLevel -> TcLevel -> Bool sameDepthAs (TcLevel ctxt_tclvl) (TcLevel tv_tclvl) = ctxt_tclvl == tv_tclvl -- NB: invariant ctxt_tclvl >= tv_tclvl -- So <= would be equivalent checkTcLevelInvariant :: TcLevel -> TcLevel -> Bool -- Checks (MetaTvInv) from Note [TcLevel and untouchable type variables] checkTcLevelInvariant (TcLevel ctxt_tclvl) (TcLevel tv_tclvl) = ctxt_tclvl >= tv_tclvl instance Outputable TcLevel where ppr (TcLevel us) = ppr us {- ************************************************************************ * * Pretty-printing * * ************************************************************************ -} pprTcTyVarDetails :: TcTyVarDetails -> SDoc -- For debugging pprTcTyVarDetails (SkolemTv True) = ptext (sLit "ssk") pprTcTyVarDetails (SkolemTv False) = ptext (sLit "sk") pprTcTyVarDetails (RuntimeUnk {}) = ptext (sLit "rt") pprTcTyVarDetails (FlatSkol {}) = ptext (sLit "fsk") pprTcTyVarDetails (MetaTv { mtv_info = info, mtv_tclvl = tclvl }) = pp_info <> colon <> ppr tclvl where pp_info = case info of ReturnTv -> ptext (sLit "ret") TauTv True -> ptext (sLit "twc") TauTv False -> ptext (sLit "tau") SigTv -> ptext (sLit "sig") FlatMetaTv -> ptext (sLit "fuv") pprUserTypeCtxt :: UserTypeCtxt -> SDoc pprUserTypeCtxt (InfSigCtxt n) = ptext (sLit "the inferred type for") <+> quotes (ppr n) pprUserTypeCtxt (FunSigCtxt n) = ptext (sLit "the type signature for") <+> quotes (ppr n) pprUserTypeCtxt (RuleSigCtxt n) = ptext (sLit "a RULE for") <+> quotes (ppr n) pprUserTypeCtxt ExprSigCtxt = ptext (sLit "an expression type signature") pprUserTypeCtxt (ConArgCtxt c) = ptext (sLit "the type of the constructor") <+> quotes (ppr c) pprUserTypeCtxt (TySynCtxt c) = ptext (sLit "the RHS of the type synonym") <+> quotes (ppr c) pprUserTypeCtxt ThBrackCtxt = ptext (sLit "a Template Haskell quotation [t|...|]") pprUserTypeCtxt PatSigCtxt = ptext (sLit "a pattern type signature") pprUserTypeCtxt ResSigCtxt = ptext (sLit "a result type signature") pprUserTypeCtxt (ForSigCtxt n) = ptext (sLit "the foreign declaration for") <+> quotes (ppr n) pprUserTypeCtxt DefaultDeclCtxt = ptext (sLit "a type in a `default' declaration") pprUserTypeCtxt InstDeclCtxt = ptext (sLit "an instance declaration") pprUserTypeCtxt SpecInstCtxt = ptext (sLit "a SPECIALISE instance pragma") pprUserTypeCtxt GenSigCtxt = ptext (sLit "a type expected by the context") pprUserTypeCtxt GhciCtxt = ptext (sLit "a type in a GHCi command") pprUserTypeCtxt (ClassSCCtxt c) = ptext (sLit "the super-classes of class") <+> quotes (ppr c) pprUserTypeCtxt SigmaCtxt = ptext (sLit "the context of a polymorphic type") pprUserTypeCtxt (DataTyCtxt tc) = ptext (sLit "the context of the data type declaration for") <+> quotes (ppr tc) pprSigCtxt :: UserTypeCtxt -> SDoc -> SDoc -> SDoc -- (pprSigCtxt ctxt <extra> <type>) -- prints In <extra> the type signature for 'f': -- f :: <type> -- The <extra> is either empty or "the ambiguity check for" pprSigCtxt ctxt extra pp_ty = sep [ ptext (sLit "In") <+> extra <+> pprUserTypeCtxt ctxt <> colon , nest 2 (pp_sig ctxt) ] where pp_sig (FunSigCtxt n) = pp_n_colon n pp_sig (ConArgCtxt n) = pp_n_colon n pp_sig (ForSigCtxt n) = pp_n_colon n pp_sig _ = pp_ty pp_n_colon n = pprPrefixOcc n <+> dcolon <+> pp_ty {- ************************************************************************ * * Finding type family instances * * ************************************************************************ -} -- | Finds outermost type-family applications occuring in a type, -- after expanding synonyms. tcTyFamInsts :: Type -> [(TyCon, [Type])] tcTyFamInsts ty | Just exp_ty <- tcView ty = tcTyFamInsts exp_ty tcTyFamInsts (TyVarTy _) = [] tcTyFamInsts (TyConApp tc tys) | isTypeFamilyTyCon tc = [(tc, tys)] | otherwise = concat (map tcTyFamInsts tys) tcTyFamInsts (LitTy {}) = [] tcTyFamInsts (FunTy ty1 ty2) = tcTyFamInsts ty1 ++ tcTyFamInsts ty2 tcTyFamInsts (AppTy ty1 ty2) = tcTyFamInsts ty1 ++ tcTyFamInsts ty2 tcTyFamInsts (ForAllTy _ ty) = tcTyFamInsts ty {- ************************************************************************ * * The "exact" free variables of a type * * ************************************************************************ Note [Silly type synonym] ~~~~~~~~~~~~~~~~~~~~~~~~~ Consider type T a = Int What are the free tyvars of (T x)? Empty, of course! Here's the example that Ralf Laemmel showed me: foo :: (forall a. C u a -> C u a) -> u mappend :: Monoid u => u -> u -> u bar :: Monoid u => u bar = foo (\t -> t `mappend` t) We have to generalise at the arg to f, and we don't want to capture the constraint (Monad (C u a)) because it appears to mention a. Pretty silly, but it was useful to him. exactTyVarsOfType is used by the type checker to figure out exactly which type variables are mentioned in a type. It's also used in the smart-app checking code --- see TcExpr.tcIdApp On the other hand, consider a *top-level* definition f = (\x -> x) :: T a -> T a If we don't abstract over 'a' it'll get fixed to GHC.Prim.Any, and then if we have an application like (f "x") we get a confusing error message involving Any. So the conclusion is this: when generalising - at top level use tyVarsOfType - in nested bindings use exactTyVarsOfType See Trac #1813 for example. -} exactTyVarsOfType :: Type -> TyVarSet -- Find the free type variables (of any kind) -- but *expand* type synonyms. See Note [Silly type synonym] above. exactTyVarsOfType ty = go ty where go ty | Just ty' <- tcView ty = go ty' -- This is the key line go (TyVarTy tv) = unitVarSet tv go (TyConApp _ tys) = exactTyVarsOfTypes tys go (LitTy {}) = emptyVarSet go (FunTy arg res) = go arg `unionVarSet` go res go (AppTy fun arg) = go fun `unionVarSet` go arg go (ForAllTy tyvar ty) = delVarSet (go ty) tyvar exactTyVarsOfTypes :: [Type] -> TyVarSet exactTyVarsOfTypes = mapUnionVarSet exactTyVarsOfType {- ************************************************************************ * * Predicates * * ************************************************************************ -} isTouchableOrFmv :: TcLevel -> TcTyVar -> Bool isTouchableOrFmv ctxt_tclvl tv = ASSERT2( isTcTyVar tv, ppr tv ) case tcTyVarDetails tv of MetaTv { mtv_tclvl = tv_tclvl, mtv_info = info } -> ASSERT2( checkTcLevelInvariant ctxt_tclvl tv_tclvl, ppr tv $$ ppr tv_tclvl $$ ppr ctxt_tclvl ) case info of FlatMetaTv -> True _ -> tv_tclvl `sameDepthAs` ctxt_tclvl _ -> False isTouchableMetaTyVar :: TcLevel -> TcTyVar -> Bool isTouchableMetaTyVar ctxt_tclvl tv = ASSERT2( isTcTyVar tv, ppr tv ) case tcTyVarDetails tv of MetaTv { mtv_tclvl = tv_tclvl } -> ASSERT2( checkTcLevelInvariant ctxt_tclvl tv_tclvl, ppr tv $$ ppr tv_tclvl $$ ppr ctxt_tclvl ) tv_tclvl `sameDepthAs` ctxt_tclvl _ -> False isFloatedTouchableMetaTyVar :: TcLevel -> TcTyVar -> Bool isFloatedTouchableMetaTyVar ctxt_tclvl tv = ASSERT2( isTcTyVar tv, ppr tv ) case tcTyVarDetails tv of MetaTv { mtv_tclvl = tv_tclvl } -> tv_tclvl `strictlyDeeperThan` ctxt_tclvl _ -> False isImmutableTyVar :: TyVar -> Bool isImmutableTyVar tv | isTcTyVar tv = isSkolemTyVar tv | otherwise = True isTyConableTyVar, isSkolemTyVar, isOverlappableTyVar, isMetaTyVar, isAmbiguousTyVar, isFmvTyVar, isFskTyVar, isFlattenTyVar, isReturnTyVar :: TcTyVar -> Bool isTyConableTyVar tv -- True of a meta-type variable that can be filled in -- with a type constructor application; in particular, -- not a SigTv = ASSERT( isTcTyVar tv) case tcTyVarDetails tv of MetaTv { mtv_info = SigTv } -> False _ -> True isFmvTyVar tv = ASSERT2( isTcTyVar tv, ppr tv ) case tcTyVarDetails tv of MetaTv { mtv_info = FlatMetaTv } -> True _ -> False -- | True of both given and wanted flatten-skolems (fak and usk) isFlattenTyVar tv = ASSERT2( isTcTyVar tv, ppr tv ) case tcTyVarDetails tv of FlatSkol {} -> True MetaTv { mtv_info = FlatMetaTv } -> True _ -> False -- | True of FlatSkol skolems only isFskTyVar tv = ASSERT2( isTcTyVar tv, ppr tv ) case tcTyVarDetails tv of FlatSkol {} -> True _ -> False isSkolemTyVar tv = ASSERT2( isTcTyVar tv, ppr tv ) case tcTyVarDetails tv of MetaTv {} -> False _other -> True isOverlappableTyVar tv = ASSERT( isTcTyVar tv ) case tcTyVarDetails tv of SkolemTv overlappable -> overlappable _ -> False isMetaTyVar tv = ASSERT2( isTcTyVar tv, ppr tv ) case tcTyVarDetails tv of MetaTv {} -> True _ -> False isReturnTyVar tv = ASSERT2( isTcTyVar tv, ppr tv ) case tcTyVarDetails tv of MetaTv { mtv_info = ReturnTv } -> True _ -> False -- isAmbiguousTyVar is used only when reporting type errors -- It picks out variables that are unbound, namely meta -- type variables and the RuntimUnk variables created by -- RtClosureInspect.zonkRTTIType. These are "ambiguous" in -- the sense that they stand for an as-yet-unknown type isAmbiguousTyVar tv = ASSERT2( isTcTyVar tv, ppr tv ) case tcTyVarDetails tv of MetaTv {} -> True RuntimeUnk {} -> True _ -> False isMetaTyVarTy :: TcType -> Bool isMetaTyVarTy (TyVarTy tv) = isMetaTyVar tv isMetaTyVarTy _ = False metaTyVarInfo :: TcTyVar -> MetaInfo metaTyVarInfo tv = ASSERT( isTcTyVar tv ) case tcTyVarDetails tv of MetaTv { mtv_info = info } -> info _ -> pprPanic "metaTyVarInfo" (ppr tv) metaTyVarTcLevel :: TcTyVar -> TcLevel metaTyVarTcLevel tv = ASSERT( isTcTyVar tv ) case tcTyVarDetails tv of MetaTv { mtv_tclvl = tclvl } -> tclvl _ -> pprPanic "metaTyVarTcLevel" (ppr tv) metaTyVarTcLevel_maybe :: TcTyVar -> Maybe TcLevel metaTyVarTcLevel_maybe tv = ASSERT( isTcTyVar tv ) case tcTyVarDetails tv of MetaTv { mtv_tclvl = tclvl } -> Just tclvl _ -> Nothing setMetaTyVarTcLevel :: TcTyVar -> TcLevel -> TcTyVar setMetaTyVarTcLevel tv tclvl = ASSERT( isTcTyVar tv ) case tcTyVarDetails tv of details@(MetaTv {}) -> setTcTyVarDetails tv (details { mtv_tclvl = tclvl }) _ -> pprPanic "metaTyVarTcLevel" (ppr tv) isSigTyVar :: Var -> Bool isSigTyVar tv = ASSERT( isTcTyVar tv ) case tcTyVarDetails tv of MetaTv { mtv_info = SigTv } -> True _ -> False metaTvRef :: TyVar -> IORef MetaDetails metaTvRef tv = ASSERT2( isTcTyVar tv, ppr tv ) case tcTyVarDetails tv of MetaTv { mtv_ref = ref } -> ref _ -> pprPanic "metaTvRef" (ppr tv) isFlexi, isIndirect :: MetaDetails -> Bool isFlexi Flexi = True isFlexi _ = False isIndirect (Indirect _) = True isIndirect _ = False isRuntimeUnkSkol :: TyVar -> Bool -- Called only in TcErrors; see Note [Runtime skolems] there isRuntimeUnkSkol x | isTcTyVar x, RuntimeUnk <- tcTyVarDetails x = True | otherwise = False {- ************************************************************************ * * \subsection{Tau, sigma and rho} * * ************************************************************************ -} mkSigmaTy :: [TyVar] -> [PredType] -> Type -> Type mkSigmaTy tyvars theta tau = mkForAllTys tyvars (mkPhiTy theta tau) mkPhiTy :: [PredType] -> Type -> Type mkPhiTy theta ty = foldr mkFunTy ty theta mkTcEqPred :: TcType -> TcType -> Type -- During type checking we build equalities between -- type variables with OpenKind or ArgKind. Ultimately -- they will all settle, but we want the equality predicate -- itself to have kind '*'. I think. -- -- But for now we call mkTyConApp, not mkEqPred, because the invariants -- of the latter might not be satisfied during type checking. -- Notably when we form an equalty (a : OpenKind) ~ (Int : *) -- -- But this is horribly delicate: what about type variables -- that turn out to be bound to Int#? mkTcEqPred ty1 ty2 = mkTyConApp eqTyCon [k, ty1, ty2] where k = typeKind ty1 -- | Make a representational equality predicate mkTcReprEqPred :: TcType -> TcType -> Type mkTcReprEqPred ty1 ty2 = mkTyConApp coercibleTyCon [k, ty1, ty2] where k = typeKind ty1 -- | Make an equality predicate at a given role. The role must not be Phantom. mkTcEqPredRole :: Role -> TcType -> TcType -> Type mkTcEqPredRole Nominal = mkTcEqPred mkTcEqPredRole Representational = mkTcReprEqPred mkTcEqPredRole Phantom = panic "mkTcEqPredRole Phantom" -- @isTauTy@ tests for nested for-alls. It should not be called on a boxy type. isTauTy :: Type -> Bool isTauTy ty | Just ty' <- tcView ty = isTauTy ty' isTauTy (TyVarTy _) = True isTauTy (LitTy {}) = True isTauTy (TyConApp tc tys) = all isTauTy tys && isTauTyCon tc isTauTy (AppTy a b) = isTauTy a && isTauTy b isTauTy (FunTy a b) = isTauTy a && isTauTy b isTauTy (ForAllTy {}) = False isTauTyCon :: TyCon -> Bool -- Returns False for type synonyms whose expansion is a polytype isTauTyCon tc | Just (_, rhs) <- synTyConDefn_maybe tc = isTauTy rhs | otherwise = True --------------- getDFunTyKey :: Type -> OccName -- Get some string from a type, to be used to -- construct a dictionary function name getDFunTyKey ty | Just ty' <- tcView ty = getDFunTyKey ty' getDFunTyKey (TyVarTy tv) = getOccName tv getDFunTyKey (TyConApp tc _) = getOccName tc getDFunTyKey (LitTy x) = getDFunTyLitKey x getDFunTyKey (AppTy fun _) = getDFunTyKey fun getDFunTyKey (FunTy _ _) = getOccName funTyCon getDFunTyKey (ForAllTy _ t) = getDFunTyKey t getDFunTyLitKey :: TyLit -> OccName getDFunTyLitKey (NumTyLit n) = mkOccName Name.varName (show n) getDFunTyLitKey (StrTyLit n) = mkOccName Name.varName (show n) -- hm {- ************************************************************************ * * \subsection{Expanding and splitting} * * ************************************************************************ These tcSplit functions are like their non-Tc analogues, but *) they do not look through newtypes However, they are non-monadic and do not follow through mutable type variables. It's up to you to make sure this doesn't matter. -} tcSplitForAllTys :: Type -> ([TyVar], Type) tcSplitForAllTys ty = split ty ty [] where split orig_ty ty tvs | Just ty' <- tcView ty = split orig_ty ty' tvs split _ (ForAllTy tv ty) tvs = split ty ty (tv:tvs) split orig_ty _ tvs = (reverse tvs, orig_ty) tcIsForAllTy :: Type -> Bool tcIsForAllTy ty | Just ty' <- tcView ty = tcIsForAllTy ty' tcIsForAllTy (ForAllTy {}) = True tcIsForAllTy _ = False tcSplitPredFunTy_maybe :: Type -> Maybe (PredType, Type) -- Split off the first predicate argument from a type tcSplitPredFunTy_maybe ty | Just ty' <- tcView ty = tcSplitPredFunTy_maybe ty' tcSplitPredFunTy_maybe (FunTy arg res) | isPredTy arg = Just (arg, res) tcSplitPredFunTy_maybe _ = Nothing tcSplitPhiTy :: Type -> (ThetaType, Type) tcSplitPhiTy ty = split ty [] where split ty ts = case tcSplitPredFunTy_maybe ty of Just (pred, ty) -> split ty (pred:ts) Nothing -> (reverse ts, ty) tcSplitSigmaTy :: Type -> ([TyVar], ThetaType, Type) tcSplitSigmaTy ty = case tcSplitForAllTys ty of (tvs, rho) -> case tcSplitPhiTy rho of (theta, tau) -> (tvs, theta, tau) ----------------------- tcDeepSplitSigmaTy_maybe :: TcSigmaType -> Maybe ([TcType], [TyVar], ThetaType, TcSigmaType) -- Looks for a *non-trivial* quantified type, under zero or more function arrows -- By "non-trivial" we mean either tyvars or constraints are non-empty tcDeepSplitSigmaTy_maybe ty | Just (arg_ty, res_ty) <- tcSplitFunTy_maybe ty , Just (arg_tys, tvs, theta, rho) <- tcDeepSplitSigmaTy_maybe res_ty = Just (arg_ty:arg_tys, tvs, theta, rho) | (tvs, theta, rho) <- tcSplitSigmaTy ty , not (null tvs && null theta) = Just ([], tvs, theta, rho) | otherwise = Nothing ----------------------- tcTyConAppTyCon :: Type -> TyCon tcTyConAppTyCon ty = case tcSplitTyConApp_maybe ty of Just (tc, _) -> tc Nothing -> pprPanic "tcTyConAppTyCon" (pprType ty) tcTyConAppArgs :: Type -> [Type] tcTyConAppArgs ty = case tcSplitTyConApp_maybe ty of Just (_, args) -> args Nothing -> pprPanic "tcTyConAppArgs" (pprType ty) tcSplitTyConApp :: Type -> (TyCon, [Type]) tcSplitTyConApp ty = case tcSplitTyConApp_maybe ty of Just stuff -> stuff Nothing -> pprPanic "tcSplitTyConApp" (pprType ty) tcSplitTyConApp_maybe :: Type -> Maybe (TyCon, [Type]) tcSplitTyConApp_maybe ty | Just ty' <- tcView ty = tcSplitTyConApp_maybe ty' tcSplitTyConApp_maybe (TyConApp tc tys) = Just (tc, tys) tcSplitTyConApp_maybe (FunTy arg res) = Just (funTyCon, [arg,res]) -- Newtypes are opaque, so they may be split -- However, predicates are not treated -- as tycon applications by the type checker tcSplitTyConApp_maybe _ = Nothing ----------------------- tcSplitFunTys :: Type -> ([Type], Type) tcSplitFunTys ty = case tcSplitFunTy_maybe ty of Nothing -> ([], ty) Just (arg,res) -> (arg:args, res') where (args,res') = tcSplitFunTys res tcSplitFunTy_maybe :: Type -> Maybe (Type, Type) tcSplitFunTy_maybe ty | Just ty' <- tcView ty = tcSplitFunTy_maybe ty' tcSplitFunTy_maybe (FunTy arg res) | not (isPredTy arg) = Just (arg, res) tcSplitFunTy_maybe _ = Nothing -- Note the typeKind guard -- Consider (?x::Int) => Bool -- We don't want to treat this as a function type! -- A concrete example is test tc230: -- f :: () -> (?p :: ()) => () -> () -- -- g = f () () tcSplitFunTysN :: TcRhoType -> Arity -- N: Number of desired args -> ([TcSigmaType], -- Arg types (N or fewer) TcSigmaType) -- The rest of the type tcSplitFunTysN ty n_args | n_args == 0 = ([], ty) | Just (arg,res) <- tcSplitFunTy_maybe ty = case tcSplitFunTysN res (n_args - 1) of (args, res) -> (arg:args, res) | otherwise = ([], ty) tcSplitFunTy :: Type -> (Type, Type) tcSplitFunTy ty = expectJust "tcSplitFunTy" (tcSplitFunTy_maybe ty) tcFunArgTy :: Type -> Type tcFunArgTy ty = fst (tcSplitFunTy ty) tcFunResultTy :: Type -> Type tcFunResultTy ty = snd (tcSplitFunTy ty) ----------------------- tcSplitAppTy_maybe :: Type -> Maybe (Type, Type) tcSplitAppTy_maybe ty | Just ty' <- tcView ty = tcSplitAppTy_maybe ty' tcSplitAppTy_maybe ty = repSplitAppTy_maybe ty tcSplitAppTy :: Type -> (Type, Type) tcSplitAppTy ty = case tcSplitAppTy_maybe ty of Just stuff -> stuff Nothing -> pprPanic "tcSplitAppTy" (pprType ty) tcSplitAppTys :: Type -> (Type, [Type]) tcSplitAppTys ty = go ty [] where go ty args = case tcSplitAppTy_maybe ty of Just (ty', arg) -> go ty' (arg:args) Nothing -> (ty,args) ----------------------- tcGetTyVar_maybe :: Type -> Maybe TyVar tcGetTyVar_maybe ty | Just ty' <- tcView ty = tcGetTyVar_maybe ty' tcGetTyVar_maybe (TyVarTy tv) = Just tv tcGetTyVar_maybe _ = Nothing tcGetTyVar :: String -> Type -> TyVar tcGetTyVar msg ty = expectJust msg (tcGetTyVar_maybe ty) tcIsTyVarTy :: Type -> Bool tcIsTyVarTy ty = isJust (tcGetTyVar_maybe ty) ----------------------- tcSplitDFunTy :: Type -> ([TyVar], [Type], Class, [Type]) -- Split the type of a dictionary function -- We don't use tcSplitSigmaTy, because a DFun may (with NDP) -- have non-Pred arguments, such as -- df :: forall m. (forall b. Eq b => Eq (m b)) -> C m -- -- Also NB splitFunTys, not tcSplitFunTys; -- the latter specifically stops at PredTy arguments, -- and we don't want to do that here tcSplitDFunTy ty = case tcSplitForAllTys ty of { (tvs, rho) -> case splitFunTys rho of { (theta, tau) -> case tcSplitDFunHead tau of { (clas, tys) -> (tvs, theta, clas, tys) }}} tcSplitDFunHead :: Type -> (Class, [Type]) tcSplitDFunHead = getClassPredTys tcInstHeadTyNotSynonym :: Type -> Bool -- Used in Haskell-98 mode, for the argument types of an instance head -- These must not be type synonyms, but everywhere else type synonyms -- are transparent, so we need a special function here tcInstHeadTyNotSynonym ty = case ty of TyConApp tc _ -> not (isTypeSynonymTyCon tc) _ -> True tcInstHeadTyAppAllTyVars :: Type -> Bool -- Used in Haskell-98 mode, for the argument types of an instance head -- These must be a constructor applied to type variable arguments. -- But we allow kind instantiations. tcInstHeadTyAppAllTyVars ty | Just ty' <- tcView ty -- Look through synonyms = tcInstHeadTyAppAllTyVars ty' | otherwise = case ty of TyConApp _ tys -> ok (filter (not . isKind) tys) -- avoid kinds FunTy arg res -> ok [arg, res] _ -> False where -- Check that all the types are type variables, -- and that each is distinct ok tys = equalLength tvs tys && hasNoDups tvs where tvs = mapMaybe get_tv tys get_tv (TyVarTy tv) = Just tv -- through synonyms get_tv _ = Nothing tcEqKind :: TcKind -> TcKind -> Bool tcEqKind = tcEqType tcEqType :: TcType -> TcType -> Bool -- tcEqType is a proper, sensible type-equality function, that does -- just what you'd expect The function Type.eqType (currently) has a -- grotesque hack that makes OpenKind = *, and that is NOT what we -- want in the type checker! Otherwise, for example, TcCanonical.reOrient -- thinks the LHS and RHS have the same kinds, when they don't, and -- fails to re-orient. That in turn caused Trac #8553. tcEqType ty1 ty2 = go init_env ty1 ty2 where init_env = mkRnEnv2 (mkInScopeSet (tyVarsOfType ty1 `unionVarSet` tyVarsOfType ty2)) go env t1 t2 | Just t1' <- tcView t1 = go env t1' t2 | Just t2' <- tcView t2 = go env t1 t2' go env (TyVarTy tv1) (TyVarTy tv2) = rnOccL env tv1 == rnOccR env tv2 go _ (LitTy lit1) (LitTy lit2) = lit1 == lit2 go env (ForAllTy tv1 t1) (ForAllTy tv2 t2) = go env (tyVarKind tv1) (tyVarKind tv2) && go (rnBndr2 env tv1 tv2) t1 t2 go env (AppTy s1 t1) (AppTy s2 t2) = go env s1 s2 && go env t1 t2 go env (FunTy s1 t1) (FunTy s2 t2) = go env s1 s2 && go env t1 t2 go env (TyConApp tc1 ts1) (TyConApp tc2 ts2) = (tc1 == tc2) && gos env ts1 ts2 go _ _ _ = False gos _ [] [] = True gos env (t1:ts1) (t2:ts2) = go env t1 t2 && gos env ts1 ts2 gos _ _ _ = False pickyEqType :: TcType -> TcType -> Bool -- Check when two types _look_ the same, _including_ synonyms. -- So (pickyEqType String [Char]) returns False pickyEqType ty1 ty2 = go init_env ty1 ty2 where init_env = mkRnEnv2 (mkInScopeSet (tyVarsOfType ty1 `unionVarSet` tyVarsOfType ty2)) go env (TyVarTy tv1) (TyVarTy tv2) = rnOccL env tv1 == rnOccR env tv2 go _ (LitTy lit1) (LitTy lit2) = lit1 == lit2 go env (ForAllTy tv1 t1) (ForAllTy tv2 t2) = go env (tyVarKind tv1) (tyVarKind tv2) && go (rnBndr2 env tv1 tv2) t1 t2 go env (AppTy s1 t1) (AppTy s2 t2) = go env s1 s2 && go env t1 t2 go env (FunTy s1 t1) (FunTy s2 t2) = go env s1 s2 && go env t1 t2 go env (TyConApp tc1 ts1) (TyConApp tc2 ts2) = (tc1 == tc2) && gos env ts1 ts2 go _ _ _ = False gos _ [] [] = True gos env (t1:ts1) (t2:ts2) = go env t1 t2 && gos env ts1 ts2 gos _ _ _ = False {- Note [Occurs check expansion] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ (occurCheckExpand tv xi) expands synonyms in xi just enough to get rid of occurrences of tv outside type function arguments, if that is possible; otherwise, it returns Nothing. For example, suppose we have type F a b = [a] Then occurCheckExpand b (F Int b) = Just [Int] but occurCheckExpand a (F a Int) = Nothing We don't promise to do the absolute minimum amount of expanding necessary, but we try not to do expansions we don't need to. We prefer doing inner expansions first. For example, type F a b = (a, Int, a, [a]) type G b = Char We have occurCheckExpand b (F (G b)) = F Char even though we could also expand F to get rid of b. See also Note [occurCheckExpand] in TcCanonical -} data OccCheckResult a = OC_OK a | OC_Forall | OC_NonTyVar | OC_Occurs instance Functor OccCheckResult where fmap = liftM instance Applicative OccCheckResult where pure = return (<*>) = ap instance Monad OccCheckResult where return x = OC_OK x OC_OK x >>= k = k x OC_Forall >>= _ = OC_Forall OC_NonTyVar >>= _ = OC_NonTyVar OC_Occurs >>= _ = OC_Occurs occurCheckExpand :: DynFlags -> TcTyVar -> Type -> OccCheckResult Type -- See Note [Occurs check expansion] -- Check whether -- a) the given variable occurs in the given type. -- b) there is a forall in the type (unless we have -XImpredicativeTypes -- or it's a ReturnTv -- c) if it's a SigTv, ty should be a tyvar -- -- We may have needed to do some type synonym unfolding in order to -- get rid of the variable (or forall), so we also return the unfolded -- version of the type, which is guaranteed to be syntactically free -- of the given type variable. If the type is already syntactically -- free of the variable, then the same type is returned. occurCheckExpand dflags tv ty | MetaTv { mtv_info = SigTv } <- details = go_sig_tv ty | fast_check ty = return ty | otherwise = go ty where details = ASSERT2( isTcTyVar tv, ppr tv ) tcTyVarDetails tv impredicative = canUnifyWithPolyType dflags details (tyVarKind tv) -- Check 'ty' is a tyvar, or can be expanded into one go_sig_tv ty@(TyVarTy {}) = OC_OK ty go_sig_tv ty | Just ty' <- tcView ty = go_sig_tv ty' go_sig_tv _ = OC_NonTyVar -- True => fine fast_check (LitTy {}) = True fast_check (TyVarTy tv') = tv /= tv' fast_check (TyConApp _ tys) = all fast_check tys fast_check (FunTy arg res) = fast_check arg && fast_check res fast_check (AppTy fun arg) = fast_check fun && fast_check arg fast_check (ForAllTy tv' ty) = impredicative && fast_check (tyVarKind tv') && (tv == tv' || fast_check ty) go t@(TyVarTy tv') | tv == tv' = OC_Occurs | otherwise = return t go ty@(LitTy {}) = return ty go (AppTy ty1 ty2) = do { ty1' <- go ty1 ; ty2' <- go ty2 ; return (mkAppTy ty1' ty2') } go (FunTy ty1 ty2) = do { ty1' <- go ty1 ; ty2' <- go ty2 ; return (mkFunTy ty1' ty2') } go ty@(ForAllTy tv' body_ty) | not impredicative = OC_Forall | not (fast_check (tyVarKind tv')) = OC_Occurs -- Can't expand away the kinds unless we create -- fresh variables which we don't want to do at this point. -- In principle fast_check might fail because of a for-all -- but we don't yet have poly-kinded tyvars so I'm not -- going to worry about that now | tv == tv' = return ty | otherwise = do { body' <- go body_ty ; return (ForAllTy tv' body') } -- For a type constructor application, first try expanding away the -- offending variable from the arguments. If that doesn't work, next -- see if the type constructor is a type synonym, and if so, expand -- it and try again. go ty@(TyConApp tc tys) = case do { tys <- mapM go tys; return (mkTyConApp tc tys) } of OC_OK ty -> return ty -- First try to eliminate the tyvar from the args bad | Just ty' <- tcView ty -> go ty' | otherwise -> bad -- Failing that, try to expand a synonym canUnifyWithPolyType :: DynFlags -> TcTyVarDetails -> TcKind -> Bool canUnifyWithPolyType dflags details kind = case details of MetaTv { mtv_info = ReturnTv } -> True -- See Note [ReturnTv] MetaTv { mtv_info = SigTv } -> False MetaTv { mtv_info = TauTv _ } -> xopt Opt_ImpredicativeTypes dflags || isOpenTypeKind kind -- Note [OpenTypeKind accepts foralls] _other -> True -- We can have non-meta tyvars in given constraints {- Note [OpenTypeKind accepts foralls] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Here is a common paradigm: foo :: (forall a. a -> a) -> Int foo = error "urk" To make this work we need to instantiate 'error' with a polytype. A similar case is bar :: Bool -> (forall a. a->a) -> Int bar True = \x. (x 3) bar False = error "urk" Here we need to instantiate 'error' with a polytype. But 'error' has an OpenTypeKind type variable, precisely so that we can instantiate it with Int#. So we also allow such type variables to be instantiated with foralls. It's a bit of a hack, but seems straightforward. ************************************************************************ * * \subsection{Predicate types} * * ************************************************************************ Deconstructors and tests on predicate types -} isTyVarClassPred :: PredType -> Bool isTyVarClassPred ty = case getClassPredTys_maybe ty of Just (_, tys) -> all isTyVarTy tys _ -> False evVarPred_maybe :: EvVar -> Maybe PredType evVarPred_maybe v = if isPredTy ty then Just ty else Nothing where ty = varType v evVarPred :: EvVar -> PredType evVarPred var | debugIsOn = case evVarPred_maybe var of Just pred -> pred Nothing -> pprPanic "tcEvVarPred" (ppr var <+> ppr (varType var)) | otherwise = varType var -- Superclasses mkMinimalBySCs :: [PredType] -> [PredType] -- Remove predicates that can be deduced from others by superclasses mkMinimalBySCs ptys = [ ploc | ploc <- ptys , ploc `not_in_preds` rec_scs ] where rec_scs = concatMap trans_super_classes ptys not_in_preds p ps = not (any (eqPred p) ps) trans_super_classes pred -- Superclasses of pred, excluding pred itself = case classifyPredType pred of ClassPred cls tys -> transSuperClasses cls tys TuplePred ts -> concatMap trans_super_classes ts _ -> [] transSuperClasses :: Class -> [Type] -> [PredType] transSuperClasses cls tys -- Superclasses of (cls tys), -- excluding (cls tys) itself = concatMap trans_sc (immSuperClasses cls tys) where trans_sc :: PredType -> [PredType] -- (trans_sc p) returns (p : p's superclasses) trans_sc p = case classifyPredType p of ClassPred cls tys -> p : transSuperClasses cls tys TuplePred ps -> concatMap trans_sc ps _ -> [p] immSuperClasses :: Class -> [Type] -> [PredType] immSuperClasses cls tys = substTheta (zipTopTvSubst tyvars tys) sc_theta where (tyvars,sc_theta,_,_) = classBigSig cls {- ************************************************************************ * * \subsection{Predicates} * * ************************************************************************ -} isSigmaTy :: TcType -> Bool -- isSigmaTy returns true of any qualified type. It doesn't -- *necessarily* have any foralls. E.g -- f :: (?x::Int) => Int -> Int isSigmaTy ty | Just ty' <- tcView ty = isSigmaTy ty' isSigmaTy (ForAllTy _ _) = True isSigmaTy (FunTy a _) = isPredTy a isSigmaTy _ = False isRhoTy :: TcType -> Bool -- True of TcRhoTypes; see Note [TcRhoType] isRhoTy ty | Just ty' <- tcView ty = isRhoTy ty' isRhoTy (ForAllTy {}) = False isRhoTy (FunTy a r) = not (isPredTy a) && isRhoTy r isRhoTy _ = True isOverloadedTy :: Type -> Bool -- Yes for a type of a function that might require evidence-passing -- Used only by bindLocalMethods isOverloadedTy ty | Just ty' <- tcView ty = isOverloadedTy ty' isOverloadedTy (ForAllTy _ ty) = isOverloadedTy ty isOverloadedTy (FunTy a _) = isPredTy a isOverloadedTy _ = False isFloatTy, isDoubleTy, isIntegerTy, isIntTy, isWordTy, isBoolTy, isUnitTy, isCharTy, isAnyTy :: Type -> Bool isFloatTy = is_tc floatTyConKey isDoubleTy = is_tc doubleTyConKey isIntegerTy = is_tc integerTyConKey isIntTy = is_tc intTyConKey isWordTy = is_tc wordTyConKey isBoolTy = is_tc boolTyConKey isUnitTy = is_tc unitTyConKey isCharTy = is_tc charTyConKey isAnyTy = is_tc anyTyConKey -- | Does a type represent a floating-point number? isFloatingTy :: Type -> Bool isFloatingTy ty = isFloatTy ty || isDoubleTy ty -- | Is a type 'String'? isStringTy :: Type -> Bool isStringTy ty = case tcSplitTyConApp_maybe ty of Just (tc, [arg_ty]) -> tc == listTyCon && isCharTy arg_ty _ -> False is_tc :: Unique -> Type -> Bool -- Newtypes are opaque to this is_tc uniq ty = case tcSplitTyConApp_maybe ty of Just (tc, _) -> uniq == getUnique tc Nothing -> False -- | Does the given tyvar appear in the given type outside of any -- non-newtypes? Assume we're looking for @a@. Says "yes" for -- @a@, @N a@, @b a@, @a b@, @b (N a)@. Says "no" for -- @[a]@, @Maybe a@, @T a@, where @N@ is a newtype and @T@ is a datatype. isTyVarExposed :: TcTyVar -> TcType -> Bool isTyVarExposed tv (TyVarTy tv') = tv == tv' isTyVarExposed tv (TyConApp tc tys) | isNewTyCon tc = any (isTyVarExposed tv) tys | otherwise = False isTyVarExposed _ (LitTy {}) = False isTyVarExposed _ (FunTy {}) = False isTyVarExposed tv (AppTy fun arg) = isTyVarExposed tv fun || isTyVarExposed tv arg isTyVarExposed _ (ForAllTy {}) = False -- | Does the given tyvar appear under a type generative w.r.t. -- representational equality? See Note [Occurs check error] in -- TcCanonical for the motivation for this function. isTyVarUnderDatatype :: TcTyVar -> TcType -> Bool isTyVarUnderDatatype tv = go False where go under_dt ty | Just ty' <- tcView ty = go under_dt ty' go under_dt (TyVarTy tv') = under_dt && (tv == tv') go under_dt (TyConApp tc tys) = let under_dt' = under_dt || isGenerativeTyCon tc Representational in any (go under_dt') tys go _ (LitTy {}) = False go _ (FunTy arg res) = go True arg || go True res go under_dt (AppTy fun arg) = go under_dt fun || go under_dt arg go under_dt (ForAllTy tv' inner_ty) | tv' == tv = False | otherwise = go under_dt inner_ty {- ************************************************************************ * * \subsection{Misc} * * ************************************************************************ -} deNoteType :: Type -> Type -- Remove all *outermost* type synonyms and other notes deNoteType ty | Just ty' <- tcView ty = deNoteType ty' deNoteType ty = ty tcTyVarsOfType :: Type -> TcTyVarSet -- Just the *TcTyVars* free in the type -- (Types.tyVarsOfTypes finds all free TyVars) tcTyVarsOfType (TyVarTy tv) = if isTcTyVar tv then unitVarSet tv else emptyVarSet tcTyVarsOfType (TyConApp _ tys) = tcTyVarsOfTypes tys tcTyVarsOfType (LitTy {}) = emptyVarSet tcTyVarsOfType (FunTy arg res) = tcTyVarsOfType arg `unionVarSet` tcTyVarsOfType res tcTyVarsOfType (AppTy fun arg) = tcTyVarsOfType fun `unionVarSet` tcTyVarsOfType arg tcTyVarsOfType (ForAllTy tyvar ty) = tcTyVarsOfType ty `delVarSet` tyvar -- We do sometimes quantify over skolem TcTyVars tcTyVarsOfTypes :: [Type] -> TyVarSet tcTyVarsOfTypes = mapUnionVarSet tcTyVarsOfType {- Find the free tycons and classes of a type. This is used in the front end of the compiler. -} orphNamesOfTyCon :: TyCon -> NameSet orphNamesOfTyCon tycon = unitNameSet (getName tycon) `unionNameSet` case tyConClass_maybe tycon of Nothing -> emptyNameSet Just cls -> unitNameSet (getName cls) orphNamesOfType :: Type -> NameSet orphNamesOfType ty | Just ty' <- tcView ty = orphNamesOfType ty' -- Look through type synonyms (Trac #4912) orphNamesOfType (TyVarTy _) = emptyNameSet orphNamesOfType (LitTy {}) = emptyNameSet orphNamesOfType (TyConApp tycon tys) = orphNamesOfTyCon tycon `unionNameSet` orphNamesOfTypes tys orphNamesOfType (FunTy arg res) = orphNamesOfTyCon funTyCon -- NB! See Trac #8535 `unionNameSet` orphNamesOfType arg `unionNameSet` orphNamesOfType res orphNamesOfType (AppTy fun arg) = orphNamesOfType fun `unionNameSet` orphNamesOfType arg orphNamesOfType (ForAllTy _ ty) = orphNamesOfType ty orphNamesOfThings :: (a -> NameSet) -> [a] -> NameSet orphNamesOfThings f = foldr (unionNameSet . f) emptyNameSet orphNamesOfTypes :: [Type] -> NameSet orphNamesOfTypes = orphNamesOfThings orphNamesOfType orphNamesOfDFunHead :: Type -> NameSet -- Find the free type constructors and classes -- of the head of the dfun instance type -- The 'dfun_head_type' is because of -- instance Foo a => Baz T where ... -- The decl is an orphan if Baz and T are both not locally defined, -- even if Foo *is* locally defined orphNamesOfDFunHead dfun_ty = case tcSplitSigmaTy dfun_ty of (_, _, head_ty) -> orphNamesOfType head_ty orphNamesOfCo :: Coercion -> NameSet orphNamesOfCo (Refl _ ty) = orphNamesOfType ty orphNamesOfCo (TyConAppCo _ tc cos) = unitNameSet (getName tc) `unionNameSet` orphNamesOfCos cos orphNamesOfCo (AppCo co1 co2) = orphNamesOfCo co1 `unionNameSet` orphNamesOfCo co2 orphNamesOfCo (ForAllCo _ co) = orphNamesOfCo co orphNamesOfCo (CoVarCo _) = emptyNameSet orphNamesOfCo (AxiomInstCo con _ cos) = orphNamesOfCoCon con `unionNameSet` orphNamesOfCos cos orphNamesOfCo (UnivCo _ _ ty1 ty2) = orphNamesOfType ty1 `unionNameSet` orphNamesOfType ty2 orphNamesOfCo (SymCo co) = orphNamesOfCo co orphNamesOfCo (TransCo co1 co2) = orphNamesOfCo co1 `unionNameSet` orphNamesOfCo co2 orphNamesOfCo (NthCo _ co) = orphNamesOfCo co orphNamesOfCo (LRCo _ co) = orphNamesOfCo co orphNamesOfCo (InstCo co ty) = orphNamesOfCo co `unionNameSet` orphNamesOfType ty orphNamesOfCo (SubCo co) = orphNamesOfCo co orphNamesOfCo (AxiomRuleCo _ ts cs) = orphNamesOfTypes ts `unionNameSet` orphNamesOfCos cs orphNamesOfCos :: [Coercion] -> NameSet orphNamesOfCos = orphNamesOfThings orphNamesOfCo orphNamesOfCoCon :: CoAxiom br -> NameSet orphNamesOfCoCon (CoAxiom { co_ax_tc = tc, co_ax_branches = branches }) = orphNamesOfTyCon tc `unionNameSet` orphNamesOfCoAxBranches branches orphNamesOfCoAxBranches :: BranchList CoAxBranch br -> NameSet orphNamesOfCoAxBranches = brListFoldr (unionNameSet . orphNamesOfCoAxBranch) emptyNameSet orphNamesOfCoAxBranch :: CoAxBranch -> NameSet orphNamesOfCoAxBranch (CoAxBranch { cab_lhs = lhs, cab_rhs = rhs }) = orphNamesOfTypes lhs `unionNameSet` orphNamesOfType rhs {- ************************************************************************ * * \subsection[TysWiredIn-ext-type]{External types} * * ************************************************************************ The compiler's foreign function interface supports the passing of a restricted set of types as arguments and results (the restricting factor being the ) -} tcSplitIOType_maybe :: Type -> Maybe (TyCon, Type) -- (tcSplitIOType_maybe t) returns Just (IO,t',co) -- if co : t ~ IO t' -- returns Nothing otherwise tcSplitIOType_maybe ty = case tcSplitTyConApp_maybe ty of Just (io_tycon, [io_res_ty]) | io_tycon `hasKey` ioTyConKey -> Just (io_tycon, io_res_ty) _ -> Nothing tcSplitJavaType_maybe :: Type -> Maybe (TyCon, Type, Type) tcSplitJavaType_maybe ty = case tcSplitTyConApp_maybe ty of Just (javaTyCon, [javaTagType, javaResType]) | javaTyCon `hasKey` javaTyConKey -> Just (javaTyCon, javaTagType, javaResType) _ -> Nothing tcSplitExtendsType_maybe :: Type -> Maybe (Type, Type) tcSplitExtendsType_maybe ty = case tcSplitTyConApp_maybe ty of Just (extendsTyCon, [extendsVarType, extendsTagType]) | extendsTyCon `hasKey` extendsClassKey -> Just ( extendsVarType , extendsTagType ) _ -> Nothing tcSplitExtendsType :: Type -> (Type, Type) tcSplitExtendsType ty = expectJust "tcSplitExtendsType" $ tcSplitExtendsType_maybe ty extendsVars :: ThetaType -> VarSet extendsVars = mkVarSet . mapMaybe ( fmap ( getTyVar "extendsVars: Not type variable!" . fst) . tcSplitExtendsType_maybe ) -- isFFITy :: Type -> Bool -- -- True for any TyCon that can possibly be an arg or result of an FFI call -- isFFITy ty = isValid (checkRepTyCon legalFFITyCon ty empty) isFFIArgumentTy :: DynFlags -> Safety -> VarSet -> Type -> Validity -- Checks for valid argument type for a 'foreign import' isFFIArgumentTy dflags safety vs ty | checkValidTyVar vs ty = IsValid | otherwise = checkRepTyCon (legalOutgoingTyCon dflags safety) ty empty isFFIExternalTy :: Type -> Validity -- Types that are allowed as arguments of a 'foreign export' isFFIExternalTy ty = checkRepTyCon legalFEArgTyCon ty empty isFFIImportResultTy :: DynFlags -> Type -> Validity isFFIImportResultTy dflags ty | isTyVarTy ty = IsValid | otherwise = checkRepTyCon (legalFIResultTyCon dflags) ty empty isFFIExportResultTy :: Type -> Validity isFFIExportResultTy ty = checkRepTyCon legalFEResultTyCon ty empty -- isFFIDynTy :: Type -> Type -> Validity -- -- The type in a foreign import dynamic must be Ptr, FunPtr, or a newtype of -- -- either, and the wrapped function type must be equal to the given type. -- -- We assume that all types have been run through normaliseFfiType, so we don't -- -- need to worry about expanding newtypes here. -- isFFIDynTy expected ty -- -- Note [Foreign import dynamic] -- -- In the example below, expected would be 'CInt -> IO ()', while ty would -- -- be 'FunPtr (CDouble -> IO ())'. -- | Just (tc, [ty']) <- splitTyConApp_maybe ty -- , tyConUnique tc `elem` [ptrTyConKey, funPtrTyConKey] -- , eqType ty' expected -- = IsValid -- | otherwise -- = NotValid (vcat [ ptext (sLit "Expected: Ptr/FunPtr") <+> pprParendType expected <> comma -- , ptext (sLit " Actual:") <+> ppr ty ]) -- isFFILabelTy :: Type -> Validity -- -- The type of a foreign label must be Ptr, FunPtr, or a newtype of either. -- isFFILabelTy ty = checkRepTyCon ok ty extra -- where -- ok tc _ = tc `hasKey` funPtrTyConKey || tc `hasKey` ptrTyConKey -- extra = ptext (sLit "A foreign-imported address (via &foo) must have type (Ptr a) or (FunPtr a)") isFFIPrimArgumentTy :: DynFlags -> Type -> Validity -- Checks for valid argument type for a 'foreign import prim' -- Currently they must all be simple unlifted types, or the well-known type -- Any, which can be used to pass the address to a Haskell object on the heap to -- the foreign function. isFFIPrimArgumentTy dflags ty | isAnyTy ty = IsValid | otherwise = checkRepTyCon (legalFIPrimArgTyCon dflags) ty empty isFFIPrimResultTy :: DynFlags -> Type -> Validity -- Checks for valid result type for a 'foreign import prim' -- Currently it must be an unlifted type, including unboxed tuples. isFFIPrimResultTy dflags ty = checkRepTyCon (legalFIPrimResultTyCon dflags) ty empty isFunPtrTy :: Type -> Bool isFunPtrTy ty = isValid (checkRepTyCon (\tc _ -> tc `hasKey` funPtrTyConKey) ty empty) -- normaliseFfiType gets run before checkRepTyCon, so we don't -- need to worry about looking through newtypes or type functions -- here; that's already been taken care of. checkRepTyCon :: (TyCon -> Type -> Bool) -> Type -> SDoc -> Validity checkRepTyCon checkTc ty extra = case splitTyConApp_maybe ty of Just (tc, tys) | isNewTyCon tc -> NotValid (hang msg 2 (mk_nt_reason tc tys $$ nt_fix)) | checkTc tc ty -> IsValid | otherwise -> NotValid (msg $$ extra) Nothing -> NotValid (quotes (ppr ty) <+> ptext (sLit "is not a data type") $$ extra) where msg = quotes (ppr ty) <+> ptext (sLit "cannot be marshalled in a foreign call") mk_nt_reason tc tys | null tys = ptext (sLit "because its data construtor is not in scope") | otherwise = ptext (sLit "because the data construtor for") <+> quotes (ppr tc) <+> ptext (sLit "is not in scope") nt_fix = ptext (sLit "Possible fix: import the data constructor to bring it into scope") checkValidTyVar :: VarSet -> Type -> Bool checkValidTyVar vs ty | Just var <- getTyVar_maybe ty , var `elemVarSet` vs = True | otherwise = False {- Note [Foreign import dynamic] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A dynamic stub must be of the form 'FunPtr ft -> ft' where ft is any foreign type. Similarly, a wrapper stub must be of the form 'ft -> IO (FunPtr ft)'. We use isFFIDynTy to check whether a signature is well-formed. For example, given a (illegal) declaration like: foreign import ccall "dynamic" foo :: FunPtr (CDouble -> IO ()) -> CInt -> IO () isFFIDynTy will compare the 'FunPtr' type 'CDouble -> IO ()' with the curried result type 'CInt -> IO ()', and return False, as they are not equal. ---------------------------------------------- These chaps do the work; they are not exported ---------------------------------------------- -} legalFEArgTyCon :: TyCon -> Type -> Bool legalFEArgTyCon tc ty -- It's illegal to make foreign exports that take unboxed -- arguments. The RTS API currently can't invoke such things. --SDM 7/2000 = boxedMarshalableTyCon tc ty legalFIResultTyCon :: DynFlags -> TyCon -> Type -> Bool legalFIResultTyCon dflags tc ty | tc == unitTyCon = True | otherwise = marshalableTyCon dflags tc ty legalFEResultTyCon :: TyCon -> Type -> Bool legalFEResultTyCon tc ty | tc == unitTyCon = True | otherwise = boxedMarshalableTyCon tc ty legalOutgoingTyCon :: DynFlags -> Safety -> TyCon -> Type -> Bool -- Checks validity of types going from Haskell -> external world legalOutgoingTyCon dflags _ tc ty = marshalableTyCon dflags tc ty -- legalFFITyCon :: VarSet -> TyCon -> Type -> Bool -- -- True for any TyCon that can possibly be an arg or result of an FFI call -- legalFFITyCon vs tc ty -- | isUnLiftedTyCon tc = True -- | tc == unitTyCon = True -- | otherwise = boxedMarshalableTyCon vs tc ty marshalableTyCon :: DynFlags -> TyCon -> Type -> Bool marshalableTyCon dflags tc ty | (xopt Opt_UnliftedFFITypes dflags && isUnLiftedTyCon tc && not (isUnboxedTupleTyCon tc)) -- && not (isVoidRep (typePrimRep ty))) = True | otherwise = boxedMarshalableTyCon tc ty boxedMarshalableTyCon :: TyCon -> Type -> Bool boxedMarshalableTyCon tc ty | getUnique tc `elem` [ intTyConKey, int8TyConKey, int16TyConKey , int32TyConKey, int64TyConKey , wordTyConKey, word8TyConKey, word16TyConKey , word32TyConKey, word64TyConKey , floatTyConKey, doubleTyConKey , ptrTyConKey, funPtrTyConKey , charTyConKey , stablePtrTyConKey , boolTyConKey , maybeTyConKey , listTyConKey ] = True -- TODO: Optimize this to add just raw key checks like above. -- Can be done once the GHC source in integrated. | Just (_, _, _, [primTy]) <- splitDataProductType_maybe ty , isPrimitiveType primTy = True | otherwise = False legalFIPrimArgTyCon :: DynFlags -> TyCon -> Type -> Bool -- Check args of 'foreign import prim', only allow simple unlifted types. -- Strictly speaking it is unnecessary to ban unboxed tuples here since -- currently they're of the wrong kind to use in function args anyway. legalFIPrimArgTyCon dflags tc _ | xopt Opt_UnliftedFFITypes dflags && isUnLiftedTyCon tc && not (isUnboxedTupleTyCon tc) = True | otherwise = False legalFIPrimResultTyCon :: DynFlags -> TyCon -> Type -> Bool -- Check result type of 'foreign import prim'. Allow simple unlifted -- types and also unboxed tuple result types '... -> (# , , #)' legalFIPrimResultTyCon dflags tc ty | xopt Opt_UnliftedFFITypes dflags && isUnLiftedTyCon tc && (isUnboxedTupleTyCon tc || not (isVoidRep (typePrimRep ty))) = True | otherwise = False {- Note [Marshalling VoidRep] ~~~~~~~~~~~~~~~~~~~~~~~~~~ We don't treat State# (whose PrimRep is VoidRep) as marshalable. In turn that means you can't write foreign import foo :: Int -> State# RealWorld Reason: the back end falls over with panic "primRepHint:VoidRep"; and there is no compelling reason to permit it -}
AlexeyRaga/eta
compiler/ETA/TypeCheck/TcType.hs
bsd-3-clause
70,176
0
15
18,673
12,825
6,728
6,097
-1
-1
{- Data/Singletons/Names.hs (c) Richard Eisenberg 2014 [email protected] Defining names and manipulations on names for use in promotion and singling. -} {-# LANGUAGE TemplateHaskell #-} module Data.Singletons.Names where import Data.Singletons import Data.Singletons.SuppressUnusedWarnings import Data.Singletons.Decide import Language.Haskell.TH.Syntax import Language.Haskell.TH.Desugar import GHC.TypeLits ( Nat, Symbol ) import GHC.Exts ( Any, Constraint ) import Data.Typeable ( TypeRep ) import Data.Singletons.Util import Control.Monad anyTypeName, boolName, andName, tyEqName, compareName, minBoundName, maxBoundName, repName, nilName, consName, listName, tyFunName, applyName, natName, symbolName, undefinedName, typeRepName, stringName, eqName, ordName, boundedName, orderingName, singFamilyName, singIName, singMethName, demoteName, singKindClassName, sEqClassName, sEqMethName, sconsName, snilName, sIfName, someSingTypeName, someSingDataName, sListName, sDecideClassName, sDecideMethName, provedName, disprovedName, reflName, toSingName, fromSingName, equalityName, applySingName, suppressClassName, suppressMethodName, thenCmpName, sameKindName, tyFromIntegerName, tyNegateName, sFromIntegerName, sNegateName, errorName, foldlName, cmpEQName, cmpLTName, cmpGTName, singletonsToEnumName, singletonsFromEnumName, enumName, singletonsEnumName, equalsName, constraintName :: Name anyTypeName = ''Any boolName = ''Bool andName = '(&&) compareName = 'compare minBoundName = 'minBound maxBoundName = 'maxBound tyEqName = mk_name_tc "Data.Singletons.Prelude.Eq" ":==" repName = mkName "Rep" -- this is actually defined in client code! nilName = '[] consName = '(:) listName = ''[] tyFunName = ''TyFun applyName = ''Apply symbolName = ''Symbol natName = ''Nat undefinedName = 'undefined typeRepName = ''TypeRep stringName = ''String eqName = ''Eq ordName = ''Ord boundedName = ''Bounded orderingName = ''Ordering singFamilyName = ''Sing singIName = ''SingI singMethName = 'sing toSingName = 'toSing fromSingName = 'fromSing demoteName = ''Demote singKindClassName = ''SingKind sEqClassName = mk_name_tc "Data.Singletons.Prelude.Eq" "SEq" sEqMethName = mk_name_v "Data.Singletons.Prelude.Eq" "%:==" sIfName = mk_name_v "Data.Singletons.Prelude.Bool" "sIf" sconsName = mk_name_d "Data.Singletons.Prelude.Instances" "SCons" snilName = mk_name_d "Data.Singletons.Prelude.Instances" "SNil" someSingTypeName = ''SomeSing someSingDataName = 'SomeSing sListName = mk_name_tc "Data.Singletons.Prelude.Instances" "SList" sDecideClassName = ''SDecide sDecideMethName = '(%~) provedName = 'Proved disprovedName = 'Disproved reflName = 'Refl equalityName = ''(~) applySingName = 'applySing suppressClassName = ''SuppressUnusedWarnings suppressMethodName = 'suppressUnusedWarnings thenCmpName = mk_name_v "Data.Singletons.Prelude.Ord" "thenCmp" sameKindName = ''SameKind tyFromIntegerName = mk_name_tc "Data.Singletons.Prelude.Num" "FromInteger" tyNegateName = mk_name_tc "Data.Singletons.Prelude.Num" "Negate" sFromIntegerName = mk_name_v "Data.Singletons.Prelude.Num" "sFromInteger" sNegateName = mk_name_v "Data.Singletons.Prelude.Num" "sNegate" errorName = 'error foldlName = 'foldl cmpEQName = 'EQ cmpLTName = 'LT cmpGTName = 'GT singletonsToEnumName = mk_name_v "Data.Singletons.Prelude.Enum" "toEnum" singletonsFromEnumName = mk_name_v "Data.Singletons.Prelude.Enum" "fromEnum" enumName = ''Enum singletonsEnumName = mk_name_tc "Data.Singletons.Prelude.Enum" "Enum" equalsName = '(==) constraintName = ''Constraint singPkg :: String singPkg = $( (LitE . StringL . loc_package) `liftM` location ) mk_name_tc :: String -> String -> Name mk_name_tc = mkNameG_tc singPkg mk_name_d :: String -> String -> Name mk_name_d = mkNameG_d singPkg mk_name_v :: String -> String -> Name mk_name_v = mkNameG_v singPkg mkTupleTypeName :: Int -> Name mkTupleTypeName n = mk_name_tc "Data.Singletons.Prelude.Instances" $ "STuple" ++ (show n) mkTupleDataName :: Int -> Name mkTupleDataName n = mk_name_d "Data.Singletons.Prelude.Instances" $ "STuple" ++ (show n) -- used when a value name appears in a pattern context -- works only for proper variables (lower-case names) promoteValNameLhs :: Name -> Name promoteValNameLhs = upcase -- like promoteValNameLhs, but adds a prefix to the promoted name promoteValNameLhsPrefix :: (String, String) -> Name -> Name promoteValNameLhsPrefix pres n = mkName $ toUpcaseStr pres n -- used when a value name appears in an expression context -- works for both variables and datacons promoteValRhs :: Name -> DType promoteValRhs name | name == nilName = DConT nilName -- workaround for #21 | otherwise = DConT $ promoteTySym name 0 -- generates type-level symbol for a given name. Int parameter represents -- saturation: 0 - no parameters passed to the symbol, 1 - one parameter -- passed to the symbol, and so on. Works on both promoted and unpromoted -- names. promoteTySym :: Name -> Int -> Name promoteTySym name sat | name == undefinedName = anyTypeName | name == nilName = mkName $ "NilSym" ++ (show sat) -- treat unboxed tuples like tuples | Just degree <- tupleNameDegree_maybe name `mplus` unboxedTupleNameDegree_maybe name = mk_name_tc "Data.Singletons.Prelude.Instances" $ "Tuple" ++ show degree ++ "Sym" ++ (show sat) | otherwise = let capped = toUpcaseStr noPrefix name in if isHsLetter (head capped) then mkName (capped ++ "Sym" ++ (show sat)) else mkName (capped ++ (replicate (sat + 1) '$')) promoteClassName :: Name -> Name promoteClassName = prefixUCName "P" "#" mkTyName :: Quasi q => Name -> q Name mkTyName tmName = do let nameStr = nameBase tmName symbolic = not (isHsLetter (head nameStr)) qNewName (if symbolic then "ty" else nameStr) falseTySym :: DType falseTySym = promoteValRhs falseName trueTySym :: DType trueTySym = promoteValRhs trueName boolKi :: DKind boolKi = DConT boolName andTySym :: DType andTySym = promoteValRhs andName -- Singletons singDataConName :: Name -> Name singDataConName nm | nm == nilName = snilName | nm == consName = sconsName | Just degree <- tupleNameDegree_maybe nm = mkTupleDataName degree | Just degree <- unboxedTupleNameDegree_maybe nm = mkTupleDataName degree | otherwise = prefixUCName "S" ":%" nm singTyConName :: Name -> Name singTyConName name | name == listName = sListName | Just degree <- tupleNameDegree_maybe name = mkTupleTypeName degree | Just degree <- unboxedTupleNameDegree_maybe name = mkTupleTypeName degree | otherwise = prefixUCName "S" ":%" name singClassName :: Name -> Name singClassName = singTyConName singValName :: Name -> Name singValName n | n == undefinedName = undefinedName -- avoid unused variable warnings | head (nameBase n) == '_' = (prefixLCName "_s" "%") $ n | otherwise = (prefixLCName "s" "%") $ upcase n singFamily :: DType singFamily = DConT singFamilyName singKindConstraint :: DKind -> DPred singKindConstraint = DAppPr (DConPr singKindClassName) demote :: DType demote = DConT demoteName apply :: DType -> DType -> DType apply t1 t2 = DAppT (DAppT (DConT applyName) t1) t2 mkListE :: [DExp] -> DExp mkListE = foldr (\h t -> DConE consName `DAppE` h `DAppE` t) (DConE nilName) -- apply a type to a list of types using Apply type family -- This is defined here, not in Utils, to avoid cyclic dependencies foldApply :: DType -> [DType] -> DType foldApply = foldl apply -- make and equality predicate mkEqPred :: DType -> DType -> DPred mkEqPred ty1 ty2 = foldl DAppPr (DConPr equalityName) [ty1, ty2]
int-index/singletons
src/Data/Singletons/Names.hs
bsd-3-clause
7,886
1
15
1,439
1,820
1,030
790
-1
-1
{-# OPTIONS_GHC -Wall #-} {-# LANGUAGE OverloadedStrings #-} module Develop (run) where import Control.Applicative ((<|>)) import Control.Concurrent (threadDelay) import Control.Monad (guard) import Control.Monad.Trans (MonadIO(liftIO)) import qualified Data.HashMap.Strict as HashMap import qualified Data.ByteString.Char8 as BSC import System.Directory import System.FilePath import Snap.Core import Snap.Http.Server import Snap.Util.FileServe import qualified Text.Blaze.Html5 as H import qualified Text.Blaze.Html.Renderer.Utf8 as Blaze import qualified CommandLine.Args as Args import qualified Develop.Compile as Compile import qualified Develop.Generate.Help as Generate import qualified Develop.Generate.Index as Index import qualified Develop.Generate.NotFound as NotFound import qualified Develop.StaticFiles as StaticFiles -- RUN THE DEV SERVER run :: Args.DevFlags -> IO () run (Args.DevFlags maybePort) = -- TODO get `elm reactor` running again if True then putStrLn $ "The reactor is not available in the ALPHA period.\n\ \\n\ \The goal is for package authors to get code updated and give early feedback.\n\ \Professionals and hobbyists should NOT be ugrading at this time!\n" else let port = maybe 8000 id maybePort in do putStrLn (startupMessage port) httpServe (config port) $ serveFiles <|> route [ ("_compile", compile) ] <|> route [ ("_elm/move-to-root", moveToRoot)] <|> route [ ("_elm/create-new-project", createNewProject)] <|> serveDirectoryWith directoryConfig "." <|> serveAssets <|> error404 config :: Int -> Config Snap a config port = defaultConfig # setVerbose False # setPort port # setAccessLog ConfigNoLog # setErrorLog ConfigNoLog (#) :: a -> (a -> b) -> b (#) value func = func value -- HELPERS startupMessage :: Int -> String startupMessage port = "Go to <http://localhost:" ++ show port ++ "> to see your project dashboard." directoryConfig :: MonadSnap m => DirectoryConfig m directoryConfig = let customGenerator directory = do project <- liftIO (Index.getProject directory) modifyResponse $ setContentType "text/html; charset=utf-8" writeBuilder (Blaze.renderHtmlBuilder (Index.toHtml project)) in fancyDirectoryConfig { indexFiles = [] , indexGenerator = customGenerator } compile :: Snap () compile = do file <- getSafePath guard =<< liftIO (doesFileExist file) modifyResponse (setContentType "text/javascript") writeBS =<< liftIO (Compile.toJavaScript file) error404 :: Snap () error404 = do modifyResponse $ setResponseStatus 404 "Not Found" modifyResponse $ setContentType "text/html; charset=utf-8" writeBuilder (Blaze.renderHtmlBuilder NotFound.html) -- CREATE NEW PROJECT createNewProject :: Snap () createNewProject = do liftIO $ threadDelay (2 * 1000 * 1000) return () moveToRoot :: Snap () moveToRoot = liftIO Index.moveToRoot -- SERVE FILES serveFiles :: Snap () serveFiles = do file <- getSafePath guard =<< liftIO (doesFileExist file) serveElm file <|> serveFilePretty file serveHtml :: MonadSnap m => H.Html -> m () serveHtml html = do modifyResponse (setContentType "text/html") writeBuilder (Blaze.renderHtmlBuilder html) -- SERVE FILES + CODE HIGHLIGHTING serveFilePretty :: FilePath -> Snap () serveFilePretty file = let possibleExtensions = getSubExts (takeExtensions file) in case mconcat (map lookupMimeType possibleExtensions) of Nothing -> serveCode file Just mimeType -> serveFileAs mimeType file getSubExts :: String -> [String] getSubExts fullExtension = if null fullExtension then [] else fullExtension : getSubExts (takeExtensions (drop 1 fullExtension)) serveCode :: String -> Snap () serveCode file = do code <- liftIO (readFile file) serveHtml $ Generate.makeCodeHtml ('~' : '/' : file) code -- SERVE ELM serveElm :: FilePath -> Snap () serveElm file = do guard (takeExtension file == ".elm") serveHtml (Generate.makeElmHtml file) -- SERVE STATIC ASSETS serveAssets :: Snap () serveAssets = do file <- getSafePath case StaticFiles.lookup file of Nothing -> pass Just (content, mimeType) -> do modifyResponse (setContentType $ BSC.pack (mimeType ++ ";charset=utf-8")) writeBS content -- MIME TYPES lookupMimeType :: FilePath -> Maybe BSC.ByteString lookupMimeType ext = HashMap.lookup ext mimeTypeDict (==>) :: a -> b -> (a,b) (==>) a b = (a, b) mimeTypeDict :: HashMap.HashMap FilePath BSC.ByteString mimeTypeDict = HashMap.fromList [ ".asc" ==> "text/plain" , ".asf" ==> "video/x-ms-asf" , ".asx" ==> "video/x-ms-asf" , ".avi" ==> "video/x-msvideo" , ".bz2" ==> "application/x-bzip" , ".css" ==> "text/css" , ".dtd" ==> "text/xml" , ".dvi" ==> "application/x-dvi" , ".gif" ==> "image/gif" , ".gz" ==> "application/x-gzip" , ".htm" ==> "text/html" , ".html" ==> "text/html" , ".ico" ==> "image/x-icon" , ".jpeg" ==> "image/jpeg" , ".jpg" ==> "image/jpeg" , ".js" ==> "text/javascript" , ".json" ==> "application/json" , ".m3u" ==> "audio/x-mpegurl" , ".mov" ==> "video/quicktime" , ".mp3" ==> "audio/mpeg" , ".mpeg" ==> "video/mpeg" , ".mpg" ==> "video/mpeg" , ".ogg" ==> "application/ogg" , ".pac" ==> "application/x-ns-proxy-autoconfig" , ".pdf" ==> "application/pdf" , ".png" ==> "image/png" , ".qt" ==> "video/quicktime" , ".sig" ==> "application/pgp-signature" , ".spl" ==> "application/futuresplash" , ".svg" ==> "image/svg+xml" , ".swf" ==> "application/x-shockwave-flash" , ".tar" ==> "application/x-tar" , ".tar.bz2" ==> "application/x-bzip-compressed-tar" , ".tar.gz" ==> "application/x-tgz" , ".tbz" ==> "application/x-bzip-compressed-tar" , ".text" ==> "text/plain" , ".tgz" ==> "application/x-tgz" , ".ttf" ==> "application/x-font-truetype" , ".txt" ==> "text/plain" , ".wav" ==> "audio/x-wav" , ".wax" ==> "audio/x-ms-wax" , ".wma" ==> "audio/x-ms-wma" , ".wmv" ==> "video/x-ms-wmv" , ".xbm" ==> "image/x-xbitmap" , ".xml" ==> "text/xml" , ".xpm" ==> "image/x-xpixmap" , ".xwd" ==> "image/x-xwindowdump" , ".zip" ==> "application/zip" ]
evancz/cli
src/Develop.hs
bsd-3-clause
6,694
0
19
1,641
1,609
854
755
178
2
-- Example of Multiple Type Binding {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} module MultiTypeBinding where import Control.Applicative (Alternative) import Control.Applicative (Applicative) import Control.Monad (MonadPlus) import Control.Monad.ST (ST) import Control.Monad.ST (runST) import Control.Monad.State (MonadState) import Control.Monad.State (StateT) import Control.Monad.State (evalStateT) import Control.Monad.State (gets) import Control.Monad.State (modify) import Control.Monad.State (runStateT) import Control.Monad.Trans (lift) import Data.Functor.Identity (runIdentity) import Data.IntMap (IntMap) import qualified Data.IntMap as IntMap import Data.STRef (STRef) import Data.STRef (newSTRef) import Data.STRef (readSTRef) import Data.STRef (writeSTRef) -- Interface for variable binding class Monad (m s) => Binding m s v where type Var m s :: * -> * newVar :: v -> (m s) (Var m s v) lookupVar :: Var m s v -> (m s) v updateVar :: Var m s v -> v -> (m s) () -- An implementation of binding with ST monads instance Binding ST s v where type Var ST s = STRef s newVar = newSTRef lookupVar = readSTRef updateVar = writeSTRef -- An implementation of binding with State monad + IntMap type IMVarId = IntMap.Key data IMVar s v = IMVar { imVarId :: IMVarId } deriving (Show, Eq) class IMEnvId e where getId :: e -> IMVarId setId :: e -> IMVarId -> e class IMEnvMap e v where getMap :: e -> IntMap v setMap :: e -> IntMap v -> e newtype IM e m s a = IM { unIM :: StateT e m a } deriving (Functor, Applicative, Alternative, Monad, MonadPlus, MonadState e) instance (Monad m, IMEnvId e , IMEnvMap e v) => Binding (IM e m) s v where type Var (IM e m) s = IMVar s newVar v = do vid <- gets getId modify $ \s -> setId s (vid + 1) let var = IMVar vid updateVar var v return var lookupVar (IMVar vid) = do si <- gets getMap return $ si IntMap.! vid updateVar (IMVar vid) v = do si <- gets getMap modify $ \s -> setMap s $ IntMap.insert vid v si runIM :: (forall s. IM e m s a) -> e -> m (a, e) runIM im = runStateT (unIM im) evalIM :: Monad m => (forall s. IM e m s a) -> e -> m a evalIM im = evalStateT (unIM im) -- Declaration per type (with concrete types) data MyIMEnv = MyIMEnv { imNextId :: IMVarId , imIMap :: IntMap Int , imBMap :: IntMap Bool , imCMap :: IntMap Color , imILMap :: IntMap [Int] , imBLMap :: IntMap [Bool] , imCLMap :: IntMap [Color] } deriving (Show) initialMyIMEnv :: MyIMEnv initialMyIMEnv = MyIMEnv { imNextId = 0 , imIMap = IntMap.empty , imBMap = IntMap.empty , imCMap = IntMap.empty , imILMap = IntMap.empty , imBLMap = IntMap.empty , imCLMap = IntMap.empty } runMyIM0 :: (forall s. IM MyIMEnv m s a) -> m (a, MyIMEnv) runMyIM0 im = runIM im initialMyIMEnv evalMyIM0 :: Monad m => (forall s. IM MyIMEnv m s a) -> m a evalMyIM0 im = evalIM im initialMyIMEnv instance IMEnvId MyIMEnv where getId = imNextId setId e vid = e { imNextId = vid } instance IMEnvMap MyIMEnv Int where getMap = imIMap setMap e m = e { imIMap = m } instance IMEnvMap MyIMEnv Bool where getMap = imBMap setMap e m = e { imBMap = m } data Color = Red | Green | Blue deriving (Show) instance IMEnvMap MyIMEnv Color where getMap = imCMap setMap e m = e { imCMap = m } instance IMEnvMap MyIMEnv [Int] where getMap = imILMap setMap e m = e { imILMap = m } instance IMEnvMap MyIMEnv [Bool] where getMap = imBLMap setMap e m = e { imBLMap = m } instance IMEnvMap MyIMEnv [Color] where getMap = imCLMap setMap e m = e { imCLMap = m } -- Declaration per type (with 2 type parameters) data MyIM2Env v1 v2 = MyIM2Env { im2NextId :: IMVarId , im2Map1 :: IntMap v1 , im2Map2 :: IntMap v2 } deriving (Show) initialMyIM2Env :: MyIM2Env v1 v2 initialMyIM2Env = MyIM2Env { im2NextId = 0 , im2Map1 = IntMap.empty , im2Map2 = IntMap.empty } runMyIM20 :: (forall s. IM (MyIM2Env v1 v2) m s a) -> m (a, MyIM2Env v1 v2) runMyIM20 im = runIM im initialMyIM2Env evalMyIM20 :: Monad m => (forall s. IM (MyIM2Env v1 v2) m s a) -> m a evalMyIM20 im = evalIM im initialMyIM2Env instance IMEnvId (MyIM2Env v1 v2) where getId = im2NextId setId e vid = e { im2NextId = vid } instance IMEnvMap (MyIM2Env v1 v2) v1 where getMap = im2Map1 setMap e m = e { im2Map1 = m } instance IMEnvMap (MyIM2Env v1 v2) v2 where getMap = im2Map2 setMap e m = e { im2Map2 = m } -- Example for use prog :: (Binding m s Bool, Binding m s Color) => m s ((Bool, Color), (Bool, Color)) prog = do vb <- newVar True vc <- newVar Red b <- lookupVar vb c <- lookupVar vc updateVar vb False updateVar vc Blue b' <- lookupVar vb c' <- lookupVar vc return ((b, c), (b', c')) {-| >>> testST ((True,Red),(False,Blue)) -} testST :: ((Bool, Color), (Bool, Color)) testST = runST prog {-| >>> testIM (((True,Red),(False,Blue)),MyIMEnv {imNextId = 2, imIMap = fromList [], imBMap = fromList [(0,False)], imCMap = fromList [(1,Blue)]}) -} testIM :: (((Bool, Color), (Bool, Color)), MyIMEnv) testIM = runIdentity $ runMyIM0 prog -- with List monad as inner monad {-| >>> testIML [(True,Red),(True,Green),(True,Blue),(False,Red),(False,Green),(False,Blue)] -} testIML :: [(Bool, Color)] testIML = evalMyIM0 $ do vb <- newVar [True, False] vc <- newVar [Red, Green, Blue] lb <- lookupVar vb lc <- lookupVar vc b <- IM $ lift lb c <- IM $ lift lc return (b, c) -- with predefined instances of class IMEnvId, IMEnvMap progIM2 :: (Binding m e Bool, Binding m e Color) => m e (Bool, Color) progIM2 = do vb <- newVar True vc <- newVar Green b <- lookupVar vb c <- lookupVar vc return (b, c) {-| >>> testIM2 ((True,Green),MyIM2Env {im2NextId = 2, im2Map1 = fromList [(0,True)], im2Map2 = fromList [(1,Green)]}) -} testIM2 :: ((Bool, Color), MyIM2Env Bool Color) testIM2 = runIdentity $ runMyIM20 progIM2 -- Type check with phantom type parameter -- Exporting variable should cause type error -- progIMExportError :: (Binding m s Int) => m s (Var m s Int) -- progIMExportError = newVar (123::Int) -- progIMExportError :: IM MyIMEnv s (Var (IM MyIMEnv) s Int) -- progIMExportError = newVar 123 -- testIMExportError = runMyIM0 (newVar 123) -- Importing variable should cause type error -- progIMImportError :: (Binding m s Int) => Var m s Int -> m s Int -- progIMImportError v = lookupVar v -- progIMImportError :: Var (IM MyIMEnv) s Int -> IM MyIMEnv s Int -- progIMImportError v = lookupVar v -- testIMImportError :: Var (IM MyIMEnv) s Int -> (Int, MyIMEnv) -- testIMImportError v = runMyIM0 (lookupVar v)
notae/haskell-exercise
pack/MultiTypeBinding.hs
bsd-3-clause
7,209
0
12
1,844
2,197
1,191
1,006
172
1
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE InstanceSigs #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE UndecidableInstances #-} -- {-# LANGUAGE TypeOperators #-} {- How to GHCI this: stack exec ghci -- -isrc -XFlexibleContexts -XTypeFamilies -XPolyKinds -XDataKinds src/Sky/Compositional/TypeShiet.hs :kind! TOr False True -} module Sky.Compositional.PolyKindCats where class Category hom where ident :: hom a a compose :: hom a b -> hom b c -> hom a c instance Category (->) where ident = id compose = flip (.) -- Natural transformations newtype NatTrans f g = NatTrans { unNatTrans :: (forall a. f a -> g a) } instance Category NatTrans where ident = NatTrans id compose f g = NatTrans (unNatTrans g . unNatTrans f) -- Arbitrary functors class HFunctor hom f where hmap :: hom a b -> hom (f a) (f b) -- Check with Fixpoint and some data structure data ListR r a = Nil | Cons a (r a) deriving (Eq, Show) instance HFunctor (->) r => HFunctor (->) (ListR r) where hmap :: (a -> b) -> (ListR r a -> ListR r b) hmap f (Nil) = Nil hmap f (Cons a r) = Cons (f a) (hmap f r) instance HFunctor (NatTrans) (ListR) where hmap :: forall a b. (NatTrans a b) -> (NatTrans (ListR a) (ListR b)) --hmap :: (forall x. a x -> b x) -> (forall y. List a y -> List b y) hmap (NatTrans f) = NatTrans g where g :: forall y. ListR a y -> ListR b y g (Nil) = Nil g (Cons y ar) = Cons y (f ar) --data Fix (f :: k -> k) = Fix (f (Fix f)) -- Does not work: f (Fix f) :: k is not a type (*) --data Fix (f :: * -> *) = Fix (f (Fix f)) data Fix1 f = Fix1 (f (Fix1 f)) data Fix2 f a = Fix2 (f (Fix2 f) a) instance Show (x (Fix2 x) a) => Show (Fix2 x a) where show (Fix2 x) = "(" ++ show x ++ ")" instance HFunctor (->) (x (Fix2 x)) => HFunctor (->) (Fix2 x) where hmap :: (a -> b) -> (Fix2 x a -> Fix2 x b) hmap f (Fix2 xFix2a) = Fix2 $ hmap f xFix2a type List = Fix2 ListR -- instance HFunctor (->) (List) where -- hmap :: (a -> b) -> (List a -> List b) -- hmap f (Fix2 (Nil)) = Fix2 $ Nil -- hmap f (Fix2 (Cons a xs)) = Fix2 $ Cons (f a) (hmap f xs) intList :: List Int intList = Fix2 $ Cons (-1) $ Fix2 $ Cons 1 $ Fix2 Nil demo :: List Bool demo = hmap (> 0) intList
xicesky/sky-haskell-playground
src/Sky/Compositional/PolyKindCats.hs
bsd-3-clause
2,553
0
11
721
789
417
372
48
1
----------------------------------------------------------------------------- -- | -- License : BSD-3-Clause -- Maintainer : Oleg Grenrus <[email protected]> -- module GitHub.Data.GitData where import GitHub.Data.Definitions import GitHub.Data.Name (Name) import GitHub.Data.URL (URL) import GitHub.Internal.Prelude import Prelude () import qualified Data.Vector as V -- | The options for querying commits. data CommitQueryOption = CommitQuerySha !Text | CommitQueryPath !Text | CommitQueryAuthor !Text | CommitQuerySince !UTCTime | CommitQueryUntil !UTCTime deriving (Show, Eq, Ord, Generic, Typeable, Data) data Stats = Stats { statsAdditions :: !Int , statsTotal :: !Int , statsDeletions :: !Int } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData Stats where rnf = genericRnf instance Binary Stats data Commit = Commit { commitSha :: !(Name Commit) , commitParents :: !(Vector Tree) , commitUrl :: !URL , commitGitCommit :: !GitCommit , commitCommitter :: !(Maybe SimpleUser) , commitAuthor :: !(Maybe SimpleUser) , commitFiles :: !(Vector File) , commitStats :: !(Maybe Stats) } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData Commit where rnf = genericRnf instance Binary Commit data Tree = Tree { treeSha :: !(Name Tree) , treeUrl :: !URL , treeGitTrees :: !(Vector GitTree) } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData Tree where rnf = genericRnf instance Binary Tree data GitTree = GitTree { gitTreeType :: !Text , gitTreeSha :: !(Name GitTree) -- Can be empty for submodule , gitTreeUrl :: !(Maybe URL) , gitTreeSize :: !(Maybe Int) , gitTreePath :: !Text , gitTreeMode :: !Text } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData GitTree where rnf = genericRnf instance Binary GitTree data GitCommit = GitCommit { gitCommitMessage :: !Text , gitCommitUrl :: !URL , gitCommitCommitter :: !GitUser , gitCommitAuthor :: !GitUser , gitCommitTree :: !Tree , gitCommitSha :: !(Maybe (Name GitCommit)) , gitCommitParents :: !(Vector Tree) } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData GitCommit where rnf = genericRnf instance Binary GitCommit data Blob = Blob { blobUrl :: !URL , blobEncoding :: !Text , blobContent :: !Text , blobSha :: !(Name Blob) , blobSize :: !Int } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData Blob where rnf = genericRnf instance Binary Blob data Tag = Tag { tagName :: !Text , tagZipballUrl :: !URL , tagTarballUrl :: !URL , tagCommit :: !BranchCommit } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData Tag where rnf = genericRnf instance Binary Tag data Branch = Branch { branchName :: !Text , branchCommit :: !BranchCommit } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData Branch where rnf = genericRnf data BranchCommit = BranchCommit { branchCommitSha :: !Text , branchCommitUrl :: !URL } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData BranchCommit where rnf = genericRnf instance Binary BranchCommit data Diff = Diff { diffStatus :: !Text , diffBehindBy :: !Int , diffPatchUrl :: !URL , diffUrl :: !URL , diffBaseCommit :: !Commit , diffCommits :: !(Vector Commit) , diffTotalCommits :: !Int , diffHtmlUrl :: !URL , diffFiles :: !(Vector File) , diffAheadBy :: !Int , diffDiffUrl :: !URL , diffPermalinkUrl :: !URL } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData Diff where rnf = genericRnf instance Binary Diff data NewGitReference = NewGitReference { newGitReferenceRef :: !Text , newGitReferenceSha :: !Text } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData NewGitReference where rnf = genericRnf instance Binary NewGitReference data GitReference = GitReference { gitReferenceObject :: !GitObject , gitReferenceUrl :: !URL , gitReferenceRef :: !Text } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData GitReference where rnf = genericRnf instance Binary GitReference data GitObject = GitObject { gitObjectType :: !Text , gitObjectSha :: !Text , gitObjectUrl :: !URL } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData GitObject where rnf = genericRnf instance Binary GitObject data GitUser = GitUser { gitUserName :: !Text , gitUserEmail :: !Text , gitUserDate :: !UTCTime } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData GitUser where rnf = genericRnf instance Binary GitUser data File = File { fileBlobUrl :: !(Maybe URL) , fileStatus :: !Text , fileRawUrl :: !(Maybe URL) , fileAdditions :: !Int , fileSha :: !(Maybe Text) , fileChanges :: !Int , filePatch :: !(Maybe Text) , fileFilename :: !Text , fileDeletions :: !Int } deriving (Show, Data, Typeable, Eq, Ord, Generic) instance NFData File where rnf = genericRnf instance Binary File -- JSON instances instance FromJSON Stats where parseJSON = withObject "Stats" $ \o -> Stats <$> o .: "additions" <*> o .: "total" <*> o .: "deletions" instance FromJSON Commit where parseJSON = withObject "Commit" $ \o -> Commit <$> o .: "sha" <*> o .: "parents" <*> o .: "url" <*> o .: "commit" <*> o .:? "committer" <*> o .:? "author" <*> o .:? "files" .!= V.empty <*> o .:? "stats" instance FromJSON Tree where parseJSON = withObject "Tree" $ \o -> Tree <$> o .: "sha" <*> o .: "url" <*> o .:? "tree" .!= V.empty instance FromJSON GitTree where parseJSON = withObject "GitTree" $ \o -> GitTree <$> o .: "type" <*> o .: "sha" <*> o .:? "url" <*> o .:? "size" <*> o .: "path" <*> o .: "mode" instance FromJSON GitCommit where parseJSON = withObject "GitCommit" $ \o -> GitCommit <$> o .: "message" <*> o .: "url" <*> o .: "committer" <*> o .: "author" <*> o .: "tree" <*> o .:? "sha" <*> o .:? "parents" .!= V.empty instance FromJSON GitUser where parseJSON = withObject "GitUser" $ \o -> GitUser <$> o .: "name" <*> o .: "email" <*> o .: "date" instance FromJSON File where parseJSON = withObject "File" $ \o -> File <$> o .:? "blob_url" <*> o .: "status" <*> o .:? "raw_url" <*> o .: "additions" <*> o .:? "sha" <*> o .: "changes" <*> o .:? "patch" <*> o .: "filename" <*> o .: "deletions" instance ToJSON NewGitReference where toJSON (NewGitReference r s) = object [ "ref" .= r, "sha" .= s ] instance FromJSON GitReference where parseJSON = withObject "GitReference" $ \o -> GitReference <$> o .: "object" <*> o .: "url" <*> o .: "ref" instance FromJSON GitObject where parseJSON = withObject "GitObject" $ \o -> GitObject <$> o .: "type" <*> o .: "sha" <*> o .: "url" instance FromJSON Diff where parseJSON = withObject "Diff" $ \o -> Diff <$> o .: "status" <*> o .: "behind_by" <*> o .: "patch_url" <*> o .: "url" <*> o .: "base_commit" <*> o .:? "commits" .!= V.empty <*> o .: "total_commits" <*> o .: "html_url" <*> o .:? "files" .!= V.empty <*> o .: "ahead_by" <*> o .: "diff_url" <*> o .: "permalink_url" instance FromJSON Blob where parseJSON = withObject "Blob" $ \o -> Blob <$> o .: "url" <*> o .: "encoding" <*> o .: "content" <*> o .: "sha" <*> o .: "size" instance FromJSON Tag where parseJSON = withObject "Tag" $ \o -> Tag <$> o .: "name" <*> o .: "zipball_url" <*> o .: "tarball_url" <*> o .: "commit" instance FromJSON Branch where parseJSON = withObject "Branch" $ \o -> Branch <$> o .: "name" <*> o .: "commit" instance FromJSON BranchCommit where parseJSON = withObject "BranchCommit" $ \o -> BranchCommit <$> o .: "sha" <*> o .: "url"
jwiegley/github
src/GitHub/Data/GitData.hs
bsd-3-clause
8,572
0
33
2,458
2,568
1,357
1,211
399
0
{-# LANGUAGE DeriveDataTypeable#-} module Main where import Control.Monad import Data.Generics import Data.List import Iptables import Iptables.Parser import Iptables.Print import Iptables.Types import Iptables.Types.Arbitrary import System.Console.GetOpt import System.Environment import System.Exit import Test.QuickCheck hiding (Result()) import Test.QuickCheck.Property -- GetOpt stuff -------------------------------------- data GOFlag = Version | Help | ParseFile FilePath | Test | Gen deriving (Eq, Ord, Show, Typeable, Data) options = [ Option ['h'] ["help"] (NoArg Help) "Print this help message" , Option [] ["parse"] (ReqArg (\a -> ParseFile a) "<file>") "Parse file. Example: test --parse ./iptables-save.dat" , Option [] ["test"] (NoArg Test) "Run tests" , Option [] ["generate"] (NoArg Gen) "Generate example iptables config in iptables-save -c format" ] ------------------------------------------------------ main :: IO () main = do args <- getArgs let (opts, params, errs) = getOpt RequireOrder options args when (not $ null errs) $ do putStr $ concat $ nub errs exitFailure when (Help `elem` opts) $ do putStrLn "Iptables-helpers testing utility" putStr $ usageInfo "Usage:" options exitSuccess let getParseFile :: GOFlag -> Maybe FilePath getParseFile (ParseFile a) = Just a getParseFile _ = Nothing case everything mplus (mkQ Nothing getParseFile) opts of Just file -> do -- putStrLn $ "Trying to open '" ++ file ++ "' ..." a <- readFile file let b = parseIptables a case b of Left er -> do putStrLn "Decoding failed:" putStrLn $ show er Right res -> do -- putStrLn "Iptables config has been parsed:" putStrLn $ printIptables $ sortIptables res exitSuccess Nothing -> return () when ( Gen `elem` opts) $ do testData <- sample' (arbitrary :: Gen Iptables) putStr $ printIptables $ sortIptables $ testData !! 6 exitSuccess when (Test `elem` opts) $ do quickCheck tryToParsePrint exitSuccess tryToParsePrint :: Iptables -> Result tryToParsePrint a = case parseIptables $ printIptables $ sortIptables a of Left err -> MkResult (Just False) True (show err ++ "\n" ++ printIptables (sortIptables a)) False False [] [] Right res -> let a' = sortIptables a res' = sortIptables res in if a' == res' then MkResult (Just True) True "" False False [] [] else MkResult (Just False) True ( printIptables a' ++ "\n" ++ printIptables res' ++ iptablesDiff a' res' ) False False [] [] iptablesDiff :: Iptables -> Iptables -> String iptablesDiff ip1 ip2 = if map cName (tFilter ip1) /= map cName (tFilter ip2) then "1: \n" ++ show (map cName $ tFilter ip1) ++ "\n" ++ show (map cName $ tFilter ip2) else "" ++ if map cName (tNat ip1) /= map cName (tNat ip2) then "1: \n" ++ show (map cName $ tNat ip1) ++ "\n" ++ show (map cName $ tNat ip2) else "" ++ if map cName (tMangle ip1) /= map cName (tMangle ip2) then "1: \n" ++ show (map cName $ tMangle ip1) ++ "\n" ++ show (map cName $ tMangle ip2) else "" ++ if map cName (tRaw ip1) /= map cName (tRaw ip2) then "1: \n" ++ show (map cName $ tRaw ip1) ++ "\n" ++ show (map cName $ tRaw ip2) else "" ++ tableDiff (tFilter ip1) (tFilter ip2) ++ tableDiff (tNat ip1) (tNat ip2) ++ tableDiff (tMangle ip1) (tMangle ip2) ++ tableDiff (tRaw ip1) (tRaw ip2) tableDiff :: [Chain] -> [Chain] -> String tableDiff [] (c:cx) = "Table 2 has more chains: " ++ show (map cName (c:cx)) tableDiff (c:cx) [] = "Table 1 has more chains: " ++ show (map cName (c:cx)) tableDiff [] [] = "" tableDiff (c1:cx1) (c2:cx2) = chainDiff c1 c2 ++ tableDiff cx1 cx2 chainDiff :: Chain -> Chain -> String chainDiff c1 c2 = if cName c1 /= cName c2 then "Chains have different names: " ++ cName c1 ++ "/" ++ cName c2 ++ "\n" else if cPolicy c1 /= cPolicy c2 then "Chains nave different policy:\n" ++ (show $ cPolicy c1) ++ "/" ++ (show $ cPolicy c2) ++ "\n" else "" ++ rulesDiff (cRules c1) (cRules c2) rulesDiff :: [Rule] -> [Rule] -> String rulesDiff rs1 rs2 = concat $ zipWith (\ r1 r2 -> let equal = r1 == r2 in if equal then "" else show equal ++ "\n" ++ show r1 ++ "\n" ++ show r2 ) rs1 rs2
etarasov/iptables-helpers
src/Test.hs
bsd-3-clause
5,120
0
18
1,799
1,643
813
830
116
5
{-# LANGUAGE CPP #-} {-# OPTIONS_HADDOCK hide #-} module Distribution.Compat.CopyFile ( copyFile, filesEqual, copyOrdinaryFile, copyExecutableFile, setFileOrdinary, setFileExecutable, setDirOrdinary, ) where import Control.Applicative ( (<$>), (<*>) ) import Control.Monad ( when ) import Control.Exception ( bracket, bracketOnError, evaluate, throwIO ) import qualified Data.ByteString.Lazy as BSL import Distribution.Compat.Exception ( catchIO ) import System.IO.Error ( ioeSetLocation ) import System.Directory ( renameFile, removeFile ) import Distribution.Compat.TempFile ( openBinaryTempFile ) import System.FilePath ( takeDirectory ) import System.IO ( openBinaryFile, IOMode(ReadMode), hClose, hGetBuf, hPutBuf , withBinaryFile ) import Foreign ( allocaBytes ) #ifndef mingw32_HOST_OS import System.Posix.Internals (withFilePath) import System.Posix.Types ( FileMode ) import System.Posix.Internals ( c_chmod ) import Foreign.C ( throwErrnoPathIfMinus1_ ) #endif /* mingw32_HOST_OS */ copyOrdinaryFile, copyExecutableFile :: FilePath -> FilePath -> IO () copyOrdinaryFile src dest = copyFile src dest >> setFileOrdinary dest copyExecutableFile src dest = copyFile src dest >> setFileExecutable dest setFileOrdinary, setFileExecutable, setDirOrdinary :: FilePath -> IO () #ifndef mingw32_HOST_OS setFileOrdinary path = setFileMode path 0o644 -- file perms -rw-r--r-- setFileExecutable path = setFileMode path 0o755 -- file perms -rwxr-xr-x setFileMode :: FilePath -> FileMode -> IO () setFileMode name m = withFilePath name $ \s -> do throwErrnoPathIfMinus1_ "setFileMode" name (c_chmod s m) #else setFileOrdinary _ = return () setFileExecutable _ = return () #endif -- This happens to be true on Unix and currently on Windows too: setDirOrdinary = setFileExecutable copyFile :: FilePath -> FilePath -> IO () copyFile fromFPath toFPath = copy `catchIO` (\ioe -> throwIO (ioeSetLocation ioe "copyFile")) where copy = bracket (openBinaryFile fromFPath ReadMode) hClose $ \hFrom -> bracketOnError openTmp cleanTmp $ \(tmpFPath, hTmp) -> do allocaBytes bufferSize $ copyContents hFrom hTmp hClose hTmp renameFile tmpFPath toFPath openTmp = openBinaryTempFile (takeDirectory toFPath) ".copyFile.tmp" cleanTmp (tmpFPath, hTmp) = do hClose hTmp `catchIO` \_ -> return () removeFile tmpFPath `catchIO` \_ -> return () bufferSize = 4096 copyContents hFrom hTo buffer = do count <- hGetBuf hFrom buffer bufferSize when (count > 0) $ do hPutBuf hTo buffer count copyContents hFrom hTo buffer -- | Checks if two files are byte-identical. -- Returns False if either of the files do not exist. filesEqual :: FilePath -> FilePath -> IO Bool filesEqual f1 f2 = (`catchIO` \ _ -> return False) $ do withBinaryFile f1 ReadMode $ \ h1 -> do withBinaryFile f2 ReadMode $ \ h2 -> do evaluate =<< (==) <$> BSL.hGetContents h1 <*> BSL.hGetContents h2
fpco/cabal
Cabal/Distribution/Compat/CopyFile.hs
bsd-3-clause
3,254
0
19
808
783
428
355
-1
-1
module ZipTest where --import Base import PreludeIO class Zip a b c where z :: a -> b -> c instance Zip [a] [b] [(a,b)] where z = [] instance Zip [(a,b)] c_d e => Zip [a] [b] (c_d->e) where z as bs c_d = z (z as bs) c_d test = do print ( z [1] [2] ) print ( z [1] [2] [3]) print ( z [1] [2] [3] [4] )
rodrigogribeiro/mptc
src/Libs/ZipTest.hs
bsd-3-clause
334
0
10
106
218
118
100
-1
-1
module Ho.Collected( CollectedHo(..), choDataTable, choClassHierarchy, choTypeSynonyms, choFixities, choAssumps, choRules, choEs, updateChoHo )where import Control.Monad.Identity import Data.List import Data.Monoid import DataConstructors import E.Annotate import E.E import Ho.Type import Info.Types import Name.Name import Util.SetLike import qualified Data.Map as Map import qualified Info.Info as Info choDataTable = hoDataTable . hoBuild . choHo choClassHierarchy = hoClassHierarchy . hoTcInfo . choHo choTypeSynonyms = hoTypeSynonyms . hoTcInfo . choHo choFixities = hoFixities . hoTcInfo . choHo choAssumps = hoAssumps . hoTcInfo . choHo choRules = hoRules . hoBuild . choHo choEs cho = [ (combHead c,combBody c) | c <- values $ choCombinators cho] instance Monoid CollectedHo where mempty = updateChoHo CollectedHo { choExternalNames = mempty, choOrphanRules = mempty, choHoMap = Map.singleton primModule pho, choCombinators = mempty, choHo = error "choHo-a", choVarMap = error "choVarMap-a", choLibDeps = mempty } where pho = mempty { hoBuild = mempty { hoDataTable = dataTablePrims } } a `mappend` b = updateChoHo CollectedHo { choExternalNames = choExternalNames a `mappend` choExternalNames b, choVarMap = error "choVarMap-b", choOrphanRules = choOrphanRules a `mappend` choOrphanRules b, choCombinators = choCombinators a `mergeChoCombinators` choCombinators b, choLibDeps = choLibDeps a `mappend` choLibDeps b, choHo = error "choHo-b", choHoMap = Map.union (choHoMap a) (choHoMap b) } updateChoHo cho = cho { choHo = ho, choVarMap = varMap } where ho = hoBuild_u (hoEs_u f) . mconcat . Map.elems $ choHoMap cho f ds = runIdentity $ annotateDs mmap (\_ -> return) (\_ -> return) (\_ -> return) (map g ds) where mmap = sfilter (\(k,_) -> (k `notElem` (map (tvrIdent . fst) ds))) varMap g (t,e) = case mlookup (tvrIdent t) varMap of Just (Just (EVar t')) -> (t',e) _ -> (t,e) varMap = fmap (\c -> Just (EVar $ combHead c)) $ choCombinators cho -- this will have to merge rules and properties. mergeChoCombinators :: IdMap Comb -> IdMap Comb -> IdMap Comb mergeChoCombinators x y = unionWith f x y where f c1 c2 = combRules_s (combRules c1 `Data.List.union` combRules c2) . combHead_s (merge (combHead c1) (combHead c2)) $ c1 merge ta tb = ta { tvrInfo = minfo' } where minfo = tvrInfo ta `mappend` tvrInfo tb minfo' = dex (undefined :: Properties) $ minfo dex dummy y = g (Info.lookup (tvrInfo tb) `asTypeOf` Just dummy) where g Nothing = y g (Just x) = Info.insertWith mappend x y
dec9ue/jhc_copygc
src/Ho/Collected.hs
gpl-2.0
2,775
0
17
666
941
514
427
64
2
{-# LANGUAGE FlexibleInstances, FlexibleContexts, TypeFamilies, MultiParamTypeClasses, GeneralizedNewtypeDeriving, StandaloneDeriving #-} {-| A pure implementation of MonadLog using MonadWriter -} {- Copyright (C) 2014 Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -} module Ganeti.Logging.WriterLog ( WriterLogT , WriterLog , runWriterLogT , runWriterLog , dumpLogSeq , execWriterLogT , execWriterLog ) where import Control.Applicative import Control.Monad import Control.Monad.Base import Control.Monad.IO.Class import Control.Monad.Trans.Control import Control.Monad.Writer import qualified Data.Foldable as F import Data.Functor.Identity import Data.Sequence import Ganeti.Logging -- * The data type of the monad transformer type LogSeq = Seq (Priority, String) type WriterSeq = WriterT LogSeq -- | A monad transformer that adds pure logging capability. newtype WriterLogT m a = WriterLogT { unwrapWriterLogT :: WriterSeq m a } deriving (Functor, Applicative, Alternative, Monad, MonadPlus, MonadIO, MonadTrans) deriving instance (MonadBase IO m) => MonadBase IO (WriterLogT m) type WriterLog = WriterLogT Identity -- Runs a 'WriterLogT', returning the result and accumulated messages. runWriterLogT :: WriterLogT m a -> m (a, LogSeq) runWriterLogT = runWriterT . unwrapWriterLogT -- Runs a 'WriterLog', returning the result and accumulated messages. runWriterLog :: WriterLog a -> (a, LogSeq) runWriterLog = runIdentity . runWriterLogT -- | Runs a 'WriterLogT', and when it finishes, resends all log messages -- to the underlying monad that implements 'MonadLog'. -- -- This can be used to delay logging messages, by accumulating them -- in 'WriterLogT', and resending them at the end to the underlying monad. execWriterLogT :: (MonadLog m) => WriterLogT m a -> m a execWriterLogT k = do (r, msgs) <- runWriterLogT k F.mapM_ (uncurry logAt) msgs return r -- | Sends all log messages to the a monad that implements 'MonadLog'. dumpLogSeq :: (MonadLog m) => LogSeq -> m () dumpLogSeq = F.mapM_ (uncurry logAt) -- | Runs a 'WriterLog', and when it finishes, resends all log messages -- to the a monad that implements 'MonadLog'. execWriterLog :: (MonadLog m) => WriterLog a -> m a execWriterLog k = do let (r, msgs) = runWriterLog k dumpLogSeq msgs return r instance (Monad m) => MonadLog (WriterLogT m) where logAt = curry (WriterLogT . tell . singleton) instance MonadTransControl WriterLogT where newtype StT WriterLogT a = StWriterLog { unStWriterLog :: (a, LogSeq) } liftWith f = WriterLogT . WriterT $ liftM (\x -> (x, mempty)) (f $ liftM StWriterLog . runWriterLogT) restoreT = WriterLogT . WriterT . liftM unStWriterLog {-# INLINE liftWith #-} {-# INLINE restoreT #-} instance (MonadBaseControl IO m) => MonadBaseControl IO (WriterLogT m) where newtype StM (WriterLogT m) a = StMWriterLog { runStMWriterLog :: ComposeSt WriterLogT m a } liftBaseWith = defaultLiftBaseWith StMWriterLog restoreM = defaultRestoreM runStMWriterLog {-# INLINE liftBaseWith #-} {-# INLINE restoreM #-}
apyrgio/ganeti
src/Ganeti/Logging/WriterLog.hs
bsd-2-clause
4,387
0
11
795
668
370
298
63
1
----------------------------------------------------------------------------- -- | -- Module : Data.Set -- Copyright : (c) Daan Leijen 2002 -- License : BSD-style -- Maintainer : [email protected] -- Stability : provisional -- Portability : portable -- -- An efficient implementation of sets. -- -- Since many function names (but not the type name) clash with -- "Prelude" names, this module is usually imported @qualified@, e.g. -- -- > import Data.Set (Set) -- > import qualified Data.Set as Set -- -- The implementation of 'Set' is based on /size balanced/ binary trees (or -- trees of /bounded balance/) as described by: -- -- * Stephen Adams, \"/Efficient sets: a balancing act/\", -- Journal of Functional Programming 3(4):553-562, October 1993, -- <http://www.swiss.ai.mit.edu/~adams/BB>. -- -- * J. Nievergelt and E.M. Reingold, -- \"/Binary search trees of bounded balance/\", -- SIAM journal of computing 2(1), March 1973. -- -- Note that the implementation is /left-biased/ -- the elements of a -- first argument are always preferred to the second, for example in -- 'union' or 'insert'. Of course, left-biasing can only be observed -- when equality is an equivalence relation instead of structural -- equality. ----------------------------------------------------------------------------- module Data.Set ( -- * Set type Set -- instance Eq,Ord,Show,Read,Data,Typeable -- * Operators , (\\) -- * Query , null , size , member , notMember , isSubsetOf , isProperSubsetOf -- * Construction , empty , singleton , insert , delete -- * Combine , union, unions , difference , intersection -- * Filter , filter , partition , split , splitMember -- * Map , map , mapMonotonic -- * Fold , fold -- * Min\/Max , findMin , findMax , deleteMin , deleteMax , deleteFindMin , deleteFindMax , maxView , minView -- * Conversion -- ** List , elems , toList , fromList -- ** Ordered list , toAscList , fromAscList , fromDistinctAscList -- * Debugging , showTree , showTreeWith , valid ) where import Prelude hiding (filter,foldr,null,map) import qualified Data.List as List import Data.Monoid (Monoid(..)) import Data.Typeable import Data.Foldable (Foldable(foldMap)) {- -- just for testing import QuickCheck import List (nub,sort) import qualified List -} #if __GLASGOW_HASKELL__ import Text.Read import Data.Generics.Basics import Data.Generics.Instances #endif {-------------------------------------------------------------------- Operators --------------------------------------------------------------------} infixl 9 \\ -- -- | /O(n+m)/. See 'difference'. (\\) :: Ord a => Set a -> Set a -> Set a m1 \\ m2 = difference m1 m2 {-------------------------------------------------------------------- Sets are size balanced trees --------------------------------------------------------------------} -- | A set of values @a@. data Set a = Tip | Bin {-# UNPACK #-} !Size a !(Set a) !(Set a) type Size = Int instance Ord a => Monoid (Set a) where mempty = empty mappend = union mconcat = unions instance Foldable Set where foldMap f Tip = mempty foldMap f (Bin _s k l r) = foldMap f l `mappend` f k `mappend` foldMap f r #if __GLASGOW_HASKELL__ {-------------------------------------------------------------------- A Data instance --------------------------------------------------------------------} -- This instance preserves data abstraction at the cost of inefficiency. -- We omit reflection services for the sake of data abstraction. instance (Data a, Ord a) => Data (Set a) where gfoldl f z set = z fromList `f` (toList set) toConstr _ = error "toConstr" gunfold _ _ = error "gunfold" dataTypeOf _ = mkNorepType "Data.Set.Set" dataCast1 f = gcast1 f #endif {-------------------------------------------------------------------- Query --------------------------------------------------------------------} -- | /O(1)/. Is this the empty set? null :: Set a -> Bool null t = case t of Tip -> True Bin sz x l r -> False -- | /O(1)/. The number of elements in the set. size :: Set a -> Int size t = case t of Tip -> 0 Bin sz x l r -> sz -- | /O(log n)/. Is the element in the set? member :: Ord a => a -> Set a -> Bool member x t = case t of Tip -> False Bin sz y l r -> case compare x y of LT -> member x l GT -> member x r EQ -> True -- | /O(log n)/. Is the element not in the set? notMember :: Ord a => a -> Set a -> Bool notMember x t = not $ member x t {-------------------------------------------------------------------- Construction --------------------------------------------------------------------} -- | /O(1)/. The empty set. empty :: Set a empty = Tip -- | /O(1)/. Create a singleton set. singleton :: a -> Set a singleton x = Bin 1 x Tip Tip {-------------------------------------------------------------------- Insertion, Deletion --------------------------------------------------------------------} -- | /O(log n)/. Insert an element in a set. -- If the set already contains an element equal to the given value, -- it is replaced with the new value. insert :: Ord a => a -> Set a -> Set a insert x t = case t of Tip -> singleton x Bin sz y l r -> case compare x y of LT -> balance y (insert x l) r GT -> balance y l (insert x r) EQ -> Bin sz x l r -- | /O(log n)/. Delete an element from a set. delete :: Ord a => a -> Set a -> Set a delete x t = case t of Tip -> Tip Bin sz y l r -> case compare x y of LT -> balance y (delete x l) r GT -> balance y l (delete x r) EQ -> glue l r {-------------------------------------------------------------------- Subset --------------------------------------------------------------------} -- | /O(n+m)/. Is this a proper subset? (ie. a subset but not equal). isProperSubsetOf :: Ord a => Set a -> Set a -> Bool isProperSubsetOf s1 s2 = (size s1 < size s2) && (isSubsetOf s1 s2) -- | /O(n+m)/. Is this a subset? -- @(s1 `isSubsetOf` s2)@ tells whether @s1@ is a subset of @s2@. isSubsetOf :: Ord a => Set a -> Set a -> Bool isSubsetOf t1 t2 = (size t1 <= size t2) && (isSubsetOfX t1 t2) isSubsetOfX Tip t = True isSubsetOfX t Tip = False isSubsetOfX (Bin _ x l r) t = found && isSubsetOfX l lt && isSubsetOfX r gt where (lt,found,gt) = splitMember x t {-------------------------------------------------------------------- Minimal, Maximal --------------------------------------------------------------------} -- | /O(log n)/. The minimal element of a set. findMin :: Set a -> a findMin (Bin _ x Tip r) = x findMin (Bin _ x l r) = findMin l findMin Tip = error "Set.findMin: empty set has no minimal element" -- | /O(log n)/. The maximal element of a set. findMax :: Set a -> a findMax (Bin _ x l Tip) = x findMax (Bin _ x l r) = findMax r findMax Tip = error "Set.findMax: empty set has no maximal element" -- | /O(log n)/. Delete the minimal element. deleteMin :: Set a -> Set a deleteMin (Bin _ x Tip r) = r deleteMin (Bin _ x l r) = balance x (deleteMin l) r deleteMin Tip = Tip -- | /O(log n)/. Delete the maximal element. deleteMax :: Set a -> Set a deleteMax (Bin _ x l Tip) = l deleteMax (Bin _ x l r) = balance x l (deleteMax r) deleteMax Tip = Tip {-------------------------------------------------------------------- Union. --------------------------------------------------------------------} -- | The union of a list of sets: (@'unions' == 'foldl' 'union' 'empty'@). unions :: Ord a => [Set a] -> Set a unions ts = foldlStrict union empty ts -- | /O(n+m)/. The union of two sets, preferring the first set when -- equal elements are encountered. -- The implementation uses the efficient /hedge-union/ algorithm. -- Hedge-union is more efficient on (bigset `union` smallset). union :: Ord a => Set a -> Set a -> Set a union Tip t2 = t2 union t1 Tip = t1 union t1 t2 = hedgeUnion (const LT) (const GT) t1 t2 hedgeUnion cmplo cmphi t1 Tip = t1 hedgeUnion cmplo cmphi Tip (Bin _ x l r) = join x (filterGt cmplo l) (filterLt cmphi r) hedgeUnion cmplo cmphi (Bin _ x l r) t2 = join x (hedgeUnion cmplo cmpx l (trim cmplo cmpx t2)) (hedgeUnion cmpx cmphi r (trim cmpx cmphi t2)) where cmpx y = compare x y {-------------------------------------------------------------------- Difference --------------------------------------------------------------------} -- | /O(n+m)/. Difference of two sets. -- The implementation uses an efficient /hedge/ algorithm comparable with /hedge-union/. difference :: Ord a => Set a -> Set a -> Set a difference Tip t2 = Tip difference t1 Tip = t1 difference t1 t2 = hedgeDiff (const LT) (const GT) t1 t2 hedgeDiff cmplo cmphi Tip t = Tip hedgeDiff cmplo cmphi (Bin _ x l r) Tip = join x (filterGt cmplo l) (filterLt cmphi r) hedgeDiff cmplo cmphi t (Bin _ x l r) = merge (hedgeDiff cmplo cmpx (trim cmplo cmpx t) l) (hedgeDiff cmpx cmphi (trim cmpx cmphi t) r) where cmpx y = compare x y {-------------------------------------------------------------------- Intersection --------------------------------------------------------------------} -- | /O(n+m)/. The intersection of two sets. -- Elements of the result come from the first set. intersection :: Ord a => Set a -> Set a -> Set a intersection Tip t = Tip intersection t Tip = Tip intersection t1@(Bin s1 x1 l1 r1) t2@(Bin s2 x2 l2 r2) = if s1 >= s2 then let (lt,found,gt) = splitLookup x2 t1 tl = intersection lt l2 tr = intersection gt r2 in case found of Just x -> join x tl tr Nothing -> merge tl tr else let (lt,found,gt) = splitMember x1 t2 tl = intersection l1 lt tr = intersection r1 gt in if found then join x1 tl tr else merge tl tr {-------------------------------------------------------------------- Filter and partition --------------------------------------------------------------------} -- | /O(n)/. Filter all elements that satisfy the predicate. filter :: Ord a => (a -> Bool) -> Set a -> Set a filter p Tip = Tip filter p (Bin _ x l r) | p x = join x (filter p l) (filter p r) | otherwise = merge (filter p l) (filter p r) -- | /O(n)/. Partition the set into two sets, one with all elements that satisfy -- the predicate and one with all elements that don't satisfy the predicate. -- See also 'split'. partition :: Ord a => (a -> Bool) -> Set a -> (Set a,Set a) partition p Tip = (Tip,Tip) partition p (Bin _ x l r) | p x = (join x l1 r1,merge l2 r2) | otherwise = (merge l1 r1,join x l2 r2) where (l1,l2) = partition p l (r1,r2) = partition p r {---------------------------------------------------------------------- Map ----------------------------------------------------------------------} -- | /O(n*log n)/. -- @'map' f s@ is the set obtained by applying @f@ to each element of @s@. -- -- It's worth noting that the size of the result may be smaller if, -- for some @(x,y)@, @x \/= y && f x == f y@ map :: (Ord a, Ord b) => (a->b) -> Set a -> Set b map f = fromList . List.map f . toList -- | /O(n)/. The -- -- @'mapMonotonic' f s == 'map' f s@, but works only when @f@ is monotonic. -- /The precondition is not checked./ -- Semi-formally, we have: -- -- > and [x < y ==> f x < f y | x <- ls, y <- ls] -- > ==> mapMonotonic f s == map f s -- > where ls = toList s mapMonotonic :: (a->b) -> Set a -> Set b mapMonotonic f Tip = Tip mapMonotonic f (Bin sz x l r) = Bin sz (f x) (mapMonotonic f l) (mapMonotonic f r) {-------------------------------------------------------------------- Fold --------------------------------------------------------------------} -- | /O(n)/. Fold over the elements of a set in an unspecified order. fold :: (a -> b -> b) -> b -> Set a -> b fold f z s = foldr f z s -- | /O(n)/. Post-order fold. foldr :: (a -> b -> b) -> b -> Set a -> b foldr f z Tip = z foldr f z (Bin _ x l r) = foldr f (f x (foldr f z r)) l {-------------------------------------------------------------------- List variations --------------------------------------------------------------------} -- | /O(n)/. The elements of a set. elems :: Set a -> [a] elems s = toList s {-------------------------------------------------------------------- Lists --------------------------------------------------------------------} -- | /O(n)/. Convert the set to a list of elements. toList :: Set a -> [a] toList s = toAscList s -- | /O(n)/. Convert the set to an ascending list of elements. toAscList :: Set a -> [a] toAscList t = foldr (:) [] t -- | /O(n*log n)/. Create a set from a list of elements. fromList :: Ord a => [a] -> Set a fromList xs = foldlStrict ins empty xs where ins t x = insert x t {-------------------------------------------------------------------- Building trees from ascending/descending lists can be done in linear time. Note that if [xs] is ascending that: fromAscList xs == fromList xs --------------------------------------------------------------------} -- | /O(n)/. Build a set from an ascending list in linear time. -- /The precondition (input list is ascending) is not checked./ fromAscList :: Eq a => [a] -> Set a fromAscList xs = fromDistinctAscList (combineEq xs) where -- [combineEq xs] combines equal elements with [const] in an ordered list [xs] combineEq xs = case xs of [] -> [] [x] -> [x] (x:xx) -> combineEq' x xx combineEq' z [] = [z] combineEq' z (x:xs) | z==x = combineEq' z xs | otherwise = z:combineEq' x xs -- | /O(n)/. Build a set from an ascending list of distinct elements in linear time. -- /The precondition (input list is strictly ascending) is not checked./ fromDistinctAscList :: [a] -> Set a fromDistinctAscList xs = build const (length xs) xs where -- 1) use continutations so that we use heap space instead of stack space. -- 2) special case for n==5 to build bushier trees. build c 0 xs = c Tip xs build c 5 xs = case xs of (x1:x2:x3:x4:x5:xx) -> c (bin x4 (bin x2 (singleton x1) (singleton x3)) (singleton x5)) xx build c n xs = seq nr $ build (buildR nr c) nl xs where nl = n `div` 2 nr = n - nl - 1 buildR n c l (x:ys) = build (buildB l x c) n ys buildB l x c r zs = c (bin x l r) zs {-------------------------------------------------------------------- Eq converts the set to a list. In a lazy setting, this actually seems one of the faster methods to compare two trees and it is certainly the simplest :-) --------------------------------------------------------------------} instance Eq a => Eq (Set a) where t1 == t2 = (size t1 == size t2) && (toAscList t1 == toAscList t2) {-------------------------------------------------------------------- Ord --------------------------------------------------------------------} instance Ord a => Ord (Set a) where compare s1 s2 = compare (toAscList s1) (toAscList s2) {-------------------------------------------------------------------- Show --------------------------------------------------------------------} instance Show a => Show (Set a) where showsPrec p xs = showParen (p > 10) $ showString "fromList " . shows (toList xs) showSet :: (Show a) => [a] -> ShowS showSet [] = showString "{}" showSet (x:xs) = showChar '{' . shows x . showTail xs where showTail [] = showChar '}' showTail (x:xs) = showChar ',' . shows x . showTail xs {-------------------------------------------------------------------- Read --------------------------------------------------------------------} instance (Read a, Ord a) => Read (Set a) where #ifdef __GLASGOW_HASKELL__ readPrec = parens $ prec 10 $ do Ident "fromList" <- lexP xs <- readPrec return (fromList xs) readListPrec = readListPrecDefault #else readsPrec p = readParen (p > 10) $ \ r -> do ("fromList",s) <- lex r (xs,t) <- reads s return (fromList xs,t) #endif {-------------------------------------------------------------------- Typeable/Data --------------------------------------------------------------------} #include "Typeable.h" INSTANCE_TYPEABLE1(Set,setTc,"Set") {-------------------------------------------------------------------- Utility functions that return sub-ranges of the original tree. Some functions take a comparison function as argument to allow comparisons against infinite values. A function [cmplo x] should be read as [compare lo x]. [trim cmplo cmphi t] A tree that is either empty or where [cmplo x == LT] and [cmphi x == GT] for the value [x] of the root. [filterGt cmp t] A tree where for all values [k]. [cmp k == LT] [filterLt cmp t] A tree where for all values [k]. [cmp k == GT] [split k t] Returns two trees [l] and [r] where all values in [l] are <[k] and all keys in [r] are >[k]. [splitMember k t] Just like [split] but also returns whether [k] was found in the tree. --------------------------------------------------------------------} {-------------------------------------------------------------------- [trim lo hi t] trims away all subtrees that surely contain no values between the range [lo] to [hi]. The returned tree is either empty or the key of the root is between @lo@ and @hi@. --------------------------------------------------------------------} trim :: (a -> Ordering) -> (a -> Ordering) -> Set a -> Set a trim cmplo cmphi Tip = Tip trim cmplo cmphi t@(Bin sx x l r) = case cmplo x of LT -> case cmphi x of GT -> t le -> trim cmplo cmphi l ge -> trim cmplo cmphi r trimMemberLo :: Ord a => a -> (a -> Ordering) -> Set a -> (Bool, Set a) trimMemberLo lo cmphi Tip = (False,Tip) trimMemberLo lo cmphi t@(Bin sx x l r) = case compare lo x of LT -> case cmphi x of GT -> (member lo t, t) le -> trimMemberLo lo cmphi l GT -> trimMemberLo lo cmphi r EQ -> (True,trim (compare lo) cmphi r) {-------------------------------------------------------------------- [filterGt x t] filter all values >[x] from tree [t] [filterLt x t] filter all values <[x] from tree [t] --------------------------------------------------------------------} filterGt :: (a -> Ordering) -> Set a -> Set a filterGt cmp Tip = Tip filterGt cmp (Bin sx x l r) = case cmp x of LT -> join x (filterGt cmp l) r GT -> filterGt cmp r EQ -> r filterLt :: (a -> Ordering) -> Set a -> Set a filterLt cmp Tip = Tip filterLt cmp (Bin sx x l r) = case cmp x of LT -> filterLt cmp l GT -> join x l (filterLt cmp r) EQ -> l {-------------------------------------------------------------------- Split --------------------------------------------------------------------} -- | /O(log n)/. The expression (@'split' x set@) is a pair @(set1,set2)@ -- where all elements in @set1@ are lower than @x@ and all elements in -- @set2@ larger than @x@. @x@ is not found in neither @set1@ nor @set2@. split :: Ord a => a -> Set a -> (Set a,Set a) split x Tip = (Tip,Tip) split x (Bin sy y l r) = case compare x y of LT -> let (lt,gt) = split x l in (lt,join y gt r) GT -> let (lt,gt) = split x r in (join y l lt,gt) EQ -> (l,r) -- | /O(log n)/. Performs a 'split' but also returns whether the pivot -- element was found in the original set. splitMember :: Ord a => a -> Set a -> (Set a,Bool,Set a) splitMember x t = let (l,m,r) = splitLookup x t in (l,maybe False (const True) m,r) -- | /O(log n)/. Performs a 'split' but also returns the pivot -- element that was found in the original set. splitLookup :: Ord a => a -> Set a -> (Set a,Maybe a,Set a) splitLookup x Tip = (Tip,Nothing,Tip) splitLookup x (Bin sy y l r) = case compare x y of LT -> let (lt,found,gt) = splitLookup x l in (lt,found,join y gt r) GT -> let (lt,found,gt) = splitLookup x r in (join y l lt,found,gt) EQ -> (l,Just y,r) {-------------------------------------------------------------------- Utility functions that maintain the balance properties of the tree. All constructors assume that all values in [l] < [x] and all values in [r] > [x], and that [l] and [r] are valid trees. In order of sophistication: [Bin sz x l r] The type constructor. [bin x l r] Maintains the correct size, assumes that both [l] and [r] are balanced with respect to each other. [balance x l r] Restores the balance and size. Assumes that the original tree was balanced and that [l] or [r] has changed by at most one element. [join x l r] Restores balance and size. Furthermore, we can construct a new tree from two trees. Both operations assume that all values in [l] < all values in [r] and that [l] and [r] are valid: [glue l r] Glues [l] and [r] together. Assumes that [l] and [r] are already balanced with respect to each other. [merge l r] Merges two trees and restores balance. Note: in contrast to Adam's paper, we use (<=) comparisons instead of (<) comparisons in [join], [merge] and [balance]. Quickcheck (on [difference]) showed that this was necessary in order to maintain the invariants. It is quite unsatisfactory that I haven't been able to find out why this is actually the case! Fortunately, it doesn't hurt to be a bit more conservative. --------------------------------------------------------------------} {-------------------------------------------------------------------- Join --------------------------------------------------------------------} join :: a -> Set a -> Set a -> Set a join x Tip r = insertMin x r join x l Tip = insertMax x l join x l@(Bin sizeL y ly ry) r@(Bin sizeR z lz rz) | delta*sizeL <= sizeR = balance z (join x l lz) rz | delta*sizeR <= sizeL = balance y ly (join x ry r) | otherwise = bin x l r -- insertMin and insertMax don't perform potentially expensive comparisons. insertMax,insertMin :: a -> Set a -> Set a insertMax x t = case t of Tip -> singleton x Bin sz y l r -> balance y l (insertMax x r) insertMin x t = case t of Tip -> singleton x Bin sz y l r -> balance y (insertMin x l) r {-------------------------------------------------------------------- [merge l r]: merges two trees. --------------------------------------------------------------------} merge :: Set a -> Set a -> Set a merge Tip r = r merge l Tip = l merge l@(Bin sizeL x lx rx) r@(Bin sizeR y ly ry) | delta*sizeL <= sizeR = balance y (merge l ly) ry | delta*sizeR <= sizeL = balance x lx (merge rx r) | otherwise = glue l r {-------------------------------------------------------------------- [glue l r]: glues two trees together. Assumes that [l] and [r] are already balanced with respect to each other. --------------------------------------------------------------------} glue :: Set a -> Set a -> Set a glue Tip r = r glue l Tip = l glue l r | size l > size r = let (m,l') = deleteFindMax l in balance m l' r | otherwise = let (m,r') = deleteFindMin r in balance m l r' -- | /O(log n)/. Delete and find the minimal element. -- -- > deleteFindMin set = (findMin set, deleteMin set) deleteFindMin :: Set a -> (a,Set a) deleteFindMin t = case t of Bin _ x Tip r -> (x,r) Bin _ x l r -> let (xm,l') = deleteFindMin l in (xm,balance x l' r) Tip -> (error "Set.deleteFindMin: can not return the minimal element of an empty set", Tip) -- | /O(log n)/. Delete and find the maximal element. -- -- > deleteFindMax set = (findMax set, deleteMax set) deleteFindMax :: Set a -> (a,Set a) deleteFindMax t = case t of Bin _ x l Tip -> (x,l) Bin _ x l r -> let (xm,r') = deleteFindMax r in (xm,balance x l r') Tip -> (error "Set.deleteFindMax: can not return the maximal element of an empty set", Tip) -- | /O(log n)/. Retrieves the minimal key of the set, and the set stripped from that element -- @fail@s (in the monad) when passed an empty set. minView :: Monad m => Set a -> m (Set a, a) minView Tip = fail "Set.minView: empty set" minView x = return (swap $ deleteFindMin x) -- | /O(log n)/. Retrieves the maximal key of the set, and the set stripped from that element -- @fail@s (in the monad) when passed an empty set. maxView :: Monad m => Set a -> m (Set a, a) maxView Tip = fail "Set.maxView: empty set" maxView x = return (swap $ deleteFindMax x) swap (a,b) = (b,a) {-------------------------------------------------------------------- [balance x l r] balances two trees with value x. The sizes of the trees should balance after decreasing the size of one of them. (a rotation). [delta] is the maximal relative difference between the sizes of two trees, it corresponds with the [w] in Adams' paper, or equivalently, [1/delta] corresponds with the $\alpha$ in Nievergelt's paper. Adams shows that [delta] should be larger than 3.745 in order to garantee that the rotations can always restore balance. [ratio] is the ratio between an outer and inner sibling of the heavier subtree in an unbalanced setting. It determines whether a double or single rotation should be performed to restore balance. It is correspondes with the inverse of $\alpha$ in Adam's article. Note that: - [delta] should be larger than 4.646 with a [ratio] of 2. - [delta] should be larger than 3.745 with a [ratio] of 1.534. - A lower [delta] leads to a more 'perfectly' balanced tree. - A higher [delta] performs less rebalancing. - Balancing is automatic for random data and a balancing scheme is only necessary to avoid pathological worst cases. Almost any choice will do in practice - Allthough it seems that a rather large [delta] may perform better than smaller one, measurements have shown that the smallest [delta] of 4 is actually the fastest on a wide range of operations. It especially improves performance on worst-case scenarios like a sequence of ordered insertions. Note: in contrast to Adams' paper, we use a ratio of (at least) 2 to decide whether a single or double rotation is needed. Allthough he actually proves that this ratio is needed to maintain the invariants, his implementation uses a (invalid) ratio of 1. He is aware of the problem though since he has put a comment in his original source code that he doesn't care about generating a slightly inbalanced tree since it doesn't seem to matter in practice. However (since we use quickcheck :-) we will stick to strictly balanced trees. --------------------------------------------------------------------} delta,ratio :: Int delta = 4 ratio = 2 balance :: a -> Set a -> Set a -> Set a balance x l r | sizeL + sizeR <= 1 = Bin sizeX x l r | sizeR >= delta*sizeL = rotateL x l r | sizeL >= delta*sizeR = rotateR x l r | otherwise = Bin sizeX x l r where sizeL = size l sizeR = size r sizeX = sizeL + sizeR + 1 -- rotate rotateL x l r@(Bin _ _ ly ry) | size ly < ratio*size ry = singleL x l r | otherwise = doubleL x l r rotateR x l@(Bin _ _ ly ry) r | size ry < ratio*size ly = singleR x l r | otherwise = doubleR x l r -- basic rotations singleL x1 t1 (Bin _ x2 t2 t3) = bin x2 (bin x1 t1 t2) t3 singleR x1 (Bin _ x2 t1 t2) t3 = bin x2 t1 (bin x1 t2 t3) doubleL x1 t1 (Bin _ x2 (Bin _ x3 t2 t3) t4) = bin x3 (bin x1 t1 t2) (bin x2 t3 t4) doubleR x1 (Bin _ x2 t1 (Bin _ x3 t2 t3)) t4 = bin x3 (bin x2 t1 t2) (bin x1 t3 t4) {-------------------------------------------------------------------- The bin constructor maintains the size of the tree --------------------------------------------------------------------} bin :: a -> Set a -> Set a -> Set a bin x l r = Bin (size l + size r + 1) x l r {-------------------------------------------------------------------- Utilities --------------------------------------------------------------------} foldlStrict f z xs = case xs of [] -> z (x:xx) -> let z' = f z x in seq z' (foldlStrict f z' xx) {-------------------------------------------------------------------- Debugging --------------------------------------------------------------------} -- | /O(n)/. Show the tree that implements the set. The tree is shown -- in a compressed, hanging format. showTree :: Show a => Set a -> String showTree s = showTreeWith True False s {- | /O(n)/. The expression (@showTreeWith hang wide map@) shows the tree that implements the set. If @hang@ is @True@, a /hanging/ tree is shown otherwise a rotated tree is shown. If @wide@ is 'True', an extra wide version is shown. > Set> putStrLn $ showTreeWith True False $ fromDistinctAscList [1..5] > 4 > +--2 > | +--1 > | +--3 > +--5 > > Set> putStrLn $ showTreeWith True True $ fromDistinctAscList [1..5] > 4 > | > +--2 > | | > | +--1 > | | > | +--3 > | > +--5 > > Set> putStrLn $ showTreeWith False True $ fromDistinctAscList [1..5] > +--5 > | > 4 > | > | +--3 > | | > +--2 > | > +--1 -} showTreeWith :: Show a => Bool -> Bool -> Set a -> String showTreeWith hang wide t | hang = (showsTreeHang wide [] t) "" | otherwise = (showsTree wide [] [] t) "" showsTree :: Show a => Bool -> [String] -> [String] -> Set a -> ShowS showsTree wide lbars rbars t = case t of Tip -> showsBars lbars . showString "|\n" Bin sz x Tip Tip -> showsBars lbars . shows x . showString "\n" Bin sz x l r -> showsTree wide (withBar rbars) (withEmpty rbars) r . showWide wide rbars . showsBars lbars . shows x . showString "\n" . showWide wide lbars . showsTree wide (withEmpty lbars) (withBar lbars) l showsTreeHang :: Show a => Bool -> [String] -> Set a -> ShowS showsTreeHang wide bars t = case t of Tip -> showsBars bars . showString "|\n" Bin sz x Tip Tip -> showsBars bars . shows x . showString "\n" Bin sz x l r -> showsBars bars . shows x . showString "\n" . showWide wide bars . showsTreeHang wide (withBar bars) l . showWide wide bars . showsTreeHang wide (withEmpty bars) r showWide wide bars | wide = showString (concat (reverse bars)) . showString "|\n" | otherwise = id showsBars :: [String] -> ShowS showsBars bars = case bars of [] -> id _ -> showString (concat (reverse (tail bars))) . showString node node = "+--" withBar bars = "| ":bars withEmpty bars = " ":bars {-------------------------------------------------------------------- Assertions --------------------------------------------------------------------} -- | /O(n)/. Test if the internal set structure is valid. valid :: Ord a => Set a -> Bool valid t = balanced t && ordered t && validsize t ordered t = bounded (const True) (const True) t where bounded lo hi t = case t of Tip -> True Bin sz x l r -> (lo x) && (hi x) && bounded lo (<x) l && bounded (>x) hi r balanced :: Set a -> Bool balanced t = case t of Tip -> True Bin sz x l r -> (size l + size r <= 1 || (size l <= delta*size r && size r <= delta*size l)) && balanced l && balanced r validsize t = (realsize t == Just (size t)) where realsize t = case t of Tip -> Just 0 Bin sz x l r -> case (realsize l,realsize r) of (Just n,Just m) | n+m+1 == sz -> Just sz other -> Nothing {- {-------------------------------------------------------------------- Testing --------------------------------------------------------------------} testTree :: [Int] -> Set Int testTree xs = fromList xs test1 = testTree [1..20] test2 = testTree [30,29..10] test3 = testTree [1,4,6,89,2323,53,43,234,5,79,12,9,24,9,8,423,8,42,4,8,9,3] {-------------------------------------------------------------------- QuickCheck --------------------------------------------------------------------} qcheck prop = check config prop where config = Config { configMaxTest = 500 , configMaxFail = 5000 , configSize = \n -> (div n 2 + 3) , configEvery = \n args -> let s = show n in s ++ [ '\b' | _ <- s ] } {-------------------------------------------------------------------- Arbitrary, reasonably balanced trees --------------------------------------------------------------------} instance (Enum a) => Arbitrary (Set a) where arbitrary = sized (arbtree 0 maxkey) where maxkey = 10000 arbtree :: (Enum a) => Int -> Int -> Int -> Gen (Set a) arbtree lo hi n | n <= 0 = return Tip | lo >= hi = return Tip | otherwise = do{ i <- choose (lo,hi) ; m <- choose (1,30) ; let (ml,mr) | m==(1::Int)= (1,2) | m==2 = (2,1) | m==3 = (1,1) | otherwise = (2,2) ; l <- arbtree lo (i-1) (n `div` ml) ; r <- arbtree (i+1) hi (n `div` mr) ; return (bin (toEnum i) l r) } {-------------------------------------------------------------------- Valid tree's --------------------------------------------------------------------} forValid :: (Enum a,Show a,Testable b) => (Set a -> b) -> Property forValid f = forAll arbitrary $ \t -> -- classify (balanced t) "balanced" $ classify (size t == 0) "empty" $ classify (size t > 0 && size t <= 10) "small" $ classify (size t > 10 && size t <= 64) "medium" $ classify (size t > 64) "large" $ balanced t ==> f t forValidIntTree :: Testable a => (Set Int -> a) -> Property forValidIntTree f = forValid f forValidUnitTree :: Testable a => (Set Int -> a) -> Property forValidUnitTree f = forValid f prop_Valid = forValidUnitTree $ \t -> valid t {-------------------------------------------------------------------- Single, Insert, Delete --------------------------------------------------------------------} prop_Single :: Int -> Bool prop_Single x = (insert x empty == singleton x) prop_InsertValid :: Int -> Property prop_InsertValid k = forValidUnitTree $ \t -> valid (insert k t) prop_InsertDelete :: Int -> Set Int -> Property prop_InsertDelete k t = not (member k t) ==> delete k (insert k t) == t prop_DeleteValid :: Int -> Property prop_DeleteValid k = forValidUnitTree $ \t -> valid (delete k (insert k t)) {-------------------------------------------------------------------- Balance --------------------------------------------------------------------} prop_Join :: Int -> Property prop_Join x = forValidUnitTree $ \t -> let (l,r) = split x t in valid (join x l r) prop_Merge :: Int -> Property prop_Merge x = forValidUnitTree $ \t -> let (l,r) = split x t in valid (merge l r) {-------------------------------------------------------------------- Union --------------------------------------------------------------------} prop_UnionValid :: Property prop_UnionValid = forValidUnitTree $ \t1 -> forValidUnitTree $ \t2 -> valid (union t1 t2) prop_UnionInsert :: Int -> Set Int -> Bool prop_UnionInsert x t = union t (singleton x) == insert x t prop_UnionAssoc :: Set Int -> Set Int -> Set Int -> Bool prop_UnionAssoc t1 t2 t3 = union t1 (union t2 t3) == union (union t1 t2) t3 prop_UnionComm :: Set Int -> Set Int -> Bool prop_UnionComm t1 t2 = (union t1 t2 == union t2 t1) prop_DiffValid = forValidUnitTree $ \t1 -> forValidUnitTree $ \t2 -> valid (difference t1 t2) prop_Diff :: [Int] -> [Int] -> Bool prop_Diff xs ys = toAscList (difference (fromList xs) (fromList ys)) == List.sort ((List.\\) (nub xs) (nub ys)) prop_IntValid = forValidUnitTree $ \t1 -> forValidUnitTree $ \t2 -> valid (intersection t1 t2) prop_Int :: [Int] -> [Int] -> Bool prop_Int xs ys = toAscList (intersection (fromList xs) (fromList ys)) == List.sort (nub ((List.intersect) (xs) (ys))) {-------------------------------------------------------------------- Lists --------------------------------------------------------------------} prop_Ordered = forAll (choose (5,100)) $ \n -> let xs = [0..n::Int] in fromAscList xs == fromList xs prop_List :: [Int] -> Bool prop_List xs = (sort (nub xs) == toList (fromList xs)) -}
alekar/hugs
packages/base/Data/Set.hs
bsd-3-clause
38,039
0
18
9,708
7,651
3,856
3,795
-1
-1
{-| Module : Data.STM.PriorityQueue.Internal.THeapPQ Description : STM-based Concurrent Priority Queue data structure class implementation Copyright : (c) Alex Semin, 2015 License : BSD3 Maintainer : [email protected] Stability : experimental Portability : portable An implementation of 'Data.STM.PriorityQueue.Class' based on functional __fine-grained__ binary heap. Heap is implemented as described in <https://www.cs.cmu.edu/~rwh/theses/okasaki.pdf Purely Functional Data Structures>. -} module Data.STM.PriorityQueue.Internal.THeapPQ( THeapPQ ) where import Control.Concurrent.STM import Data.STM.PriorityQueue.Class data Heap k v = Nil | Node {-# UNPACK #-} !Int {-# UNPACK #-} !Int k v (TVar (Heap k v)) (TVar (Heap k v)) data THeapPQ k v = PQ (TVar (Heap k v)) rank :: Heap k v -> Int rank Nil = 0 rank (Node r _ _ _ _ _) = r size :: Heap k v -> Int size Nil = 0 size (Node _ s _ _ _ _) = s union :: Ord k => Heap k v -> Heap k v -> STM (Heap k v) h `union` Nil = return h Nil `union` h = return h union h1@(Node _ _ k1 v1 vl1 vr1) h2@(Node _ _ k2 v2 vl2 vr2) = do r1 <- readTVar vr1 r2 <- readTVar vr2 if k1 < k2 then do nr <- r1 `union` h2 vr' <- newTVar nr mk k1 v1 vl1 vr' else do nr <- r2 `union` h1 vr' <- newTVar nr mk k2 v2 vl2 vr' mk :: Ord k => k -> v -> TVar (Heap k v) -> TVar (Heap k v) -> STM (Heap k v) mk k v vh1 vh2 = do h1 <- readTVar vh1 h2 <- readTVar vh2 let (r1, r2) = both ((+1).rank) (h1, h2) let ss = size h1 + size h2 + 1 return $ if r1 > r2 then Node r1 ss k v vh1 vh2 else Node r2 ss k v vh2 vh1 where both f (a, b) = (f a, f b) pqInsert :: Ord k => THeapPQ k v -> k -> v -> STM () pqInsert (PQ hp) k v = do h <- readTVar hp l <- newTVar Nil r <- newTVar Nil h' <- h `union` Node 1 1 k v l r writeTVar hp h' pqPeekMin :: Ord k => THeapPQ k v -> STM v pqPeekMin (PQ hp) = do h <- readTVar hp case h of Nil -> retry (Node _ _ _ v _ _) -> return v pqDeleteMin :: Ord k => THeapPQ k b -> STM b pqDeleteMin (PQ hp) = do h <- readTVar hp case h of Nil -> retry (Node _ _ _ v vl vr) -> do l <- readTVar vl r <- readTVar vr h' <- l `union` r writeTVar hp h' return v instance PriorityQueue THeapPQ where new = PQ <$> newTVar Nil insert = pqInsert peekMin = pqPeekMin deleteMin = pqDeleteMin
Alllex/stm-data-collection
src/Data/STM/PriorityQueue/Internal/THeapPQ.hs
bsd-3-clause
2,580
0
13
832
1,023
501
522
73
2
-- (c) The University of Glasgow 2006 {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE CPP #-} {-# LANGUAGE DeriveFunctor #-} module Unify ( tcMatchTy, tcMatchTys, tcMatchTyX, tcMatchTysX, tcUnifyTyWithTFs, ruleMatchTyX, -- * Rough matching roughMatchTcs, instanceCantMatch, typesCantMatch, -- Side-effect free unification tcUnifyTy, tcUnifyTys, tcUnifyTysFG, BindFlag(..), UnifyResult, UnifyResultM(..), -- Matching a type against a lifted type (coercion) liftCoMatch ) where #include "HsVersions.h" import Var import VarEnv import VarSet import Kind import Name( Name ) import Type hiding ( getTvSubstEnv ) import Coercion hiding ( getCvSubstEnv ) import TyCon import TyCoRep hiding ( getTvSubstEnv, getCvSubstEnv ) import Util import Pair import Outputable import Control.Monad #if __GLASGOW_HASKELL__ > 710 import qualified Control.Monad.Fail as MonadFail #endif import Control.Applicative hiding ( empty ) import qualified Control.Applicative {- Unification is much tricker than you might think. 1. The substitution we generate binds the *template type variables* which are given to us explicitly. 2. We want to match in the presence of foralls; e.g (forall a. t1) ~ (forall b. t2) That is what the RnEnv2 is for; it does the alpha-renaming that makes it as if a and b were the same variable. Initialising the RnEnv2, so that it can generate a fresh binder when necessary, entails knowing the free variables of both types. 3. We must be careful not to bind a template type variable to a locally bound variable. E.g. (forall a. x) ~ (forall b. b) where x is the template type variable. Then we do not want to bind x to a/b! This is a kind of occurs check. The necessary locals accumulate in the RnEnv2. Note [Kind coercions in Unify] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We wish to match/unify while ignoring casts. But, we can't just ignore them completely, or we'll end up with ill-kinded substitutions. For example, say we're matching `a` with `ty |> co`. If we just drop the cast, we'll return [a |-> ty], but `a` and `ty` might have different kinds. We can't just match/unify their kinds, either, because this might gratuitously fail. After all, `co` is the witness that the kinds are the same -- they may look nothing alike. So, we pass a kind coercion to the match/unify worker. This coercion witnesses the equality between the substed kind of the left-hand type and the substed kind of the right-hand type. To get this coercion, we first have to match/unify the kinds before looking at the types. Happily, we need look only one level up, as all kinds are guaranteed to have kind *. We thought, at one point, that this was all unnecessary: why should casts be in types in the first place? But they do. In dependent/should_compile/KindEqualities2, we see, for example the constraint Num (Int |> (blah ; sym blah)). We naturally want to find a dictionary for that constraint, which requires dealing with coercions in this manner. -} -- | @tcMatchTy t1 t2@ produces a substitution (over fvs(t1)) -- @s@ such that @s(t1)@ equals @t2@. -- The returned substitution might bind coercion variables, -- if the variable is an argument to a GADT constructor. -- -- We don't pass in a set of "template variables" to be bound -- by the match, because tcMatchTy (and similar functions) are -- always used on top-level types, so we can bind any of the -- free variables of the LHS. tcMatchTy :: Type -> Type -> Maybe TCvSubst tcMatchTy ty1 ty2 = tcMatchTys [ty1] [ty2] -- | This is similar to 'tcMatchTy', but extends a substitution tcMatchTyX :: TCvSubst -- ^ Substitution to extend -> Type -- ^ Template -> Type -- ^ Target -> Maybe TCvSubst tcMatchTyX subst ty1 ty2 = tcMatchTysX subst [ty1] [ty2] -- | Like 'tcMatchTy' but over a list of types. tcMatchTys :: [Type] -- ^ Template -> [Type] -- ^ Target -> Maybe TCvSubst -- ^ One-shot; in principle the template -- variables could be free in the target tcMatchTys tys1 tys2 = tcMatchTysX (mkEmptyTCvSubst in_scope) tys1 tys2 where in_scope = mkInScopeSet (tyCoVarsOfTypes tys1 `unionVarSet` tyCoVarsOfTypes tys2) -- | Like 'tcMatchTys', but extending a substitution tcMatchTysX :: TCvSubst -- ^ Substitution to extend -> [Type] -- ^ Template -> [Type] -- ^ Target -> Maybe TCvSubst -- ^ One-shot substitution tcMatchTysX (TCvSubst in_scope tv_env cv_env) tys1 tys2 -- See Note [Kind coercions in Unify] = case tc_unify_tys (const BindMe) False -- Matching, not unifying False -- Not an injectivity check (mkRnEnv2 in_scope) tv_env cv_env tys1 tys2 of Unifiable (tv_env', cv_env') -> Just $ TCvSubst in_scope tv_env' cv_env' _ -> Nothing -- | This one is called from the expression matcher, -- which already has a MatchEnv in hand ruleMatchTyX :: TyCoVarSet -- ^ template variables -> RnEnv2 -> TvSubstEnv -- ^ type substitution to extend -> Type -- ^ Template -> Type -- ^ Target -> Maybe TvSubstEnv ruleMatchTyX tmpl_tvs rn_env tenv tmpl target -- See Note [Kind coercions in Unify] = case tc_unify_tys (matchBindFun tmpl_tvs) False False rn_env tenv emptyCvSubstEnv [tmpl] [target] of Unifiable (tenv', _) -> Just tenv' _ -> Nothing matchBindFun :: TyCoVarSet -> TyVar -> BindFlag matchBindFun tvs tv = if tv `elemVarSet` tvs then BindMe else Skolem {- ********************************************************************* * * Rough matching * * ********************************************************************* -} -- See Note [Rough match] field in InstEnv roughMatchTcs :: [Type] -> [Maybe Name] roughMatchTcs tys = map rough tys where rough ty | Just (ty', _) <- splitCastTy_maybe ty = rough ty' | Just (tc,_) <- splitTyConApp_maybe ty = Just (tyConName tc) | otherwise = Nothing instanceCantMatch :: [Maybe Name] -> [Maybe Name] -> Bool -- (instanceCantMatch tcs1 tcs2) returns True if tcs1 cannot -- possibly be instantiated to actual, nor vice versa; -- False is non-committal instanceCantMatch (mt : ts) (ma : as) = itemCantMatch mt ma || instanceCantMatch ts as instanceCantMatch _ _ = False -- Safe itemCantMatch :: Maybe Name -> Maybe Name -> Bool itemCantMatch (Just t) (Just a) = t /= a itemCantMatch _ _ = False {- ************************************************************************ * * GADTs * * ************************************************************************ Note [Pruning dead case alternatives] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider data T a where T1 :: T Int T2 :: T a newtype X = MkX Int newtype Y = MkY Char type family F a type instance F Bool = Int Now consider case x of { T1 -> e1; T2 -> e2 } The question before the house is this: if I know something about the type of x, can I prune away the T1 alternative? Suppose x::T Char. It's impossible to construct a (T Char) using T1, Answer = YES we can prune the T1 branch (clearly) Suppose x::T (F a), where 'a' is in scope. Then 'a' might be instantiated to 'Bool', in which case x::T Int, so ANSWER = NO (clearly) We see here that we want precisely the apartness check implemented within tcUnifyTysFG. So that's what we do! Two types cannot match if they are surely apart. Note that since we are simply dropping dead code, a conservative test suffices. -} -- | Given a list of pairs of types, are any two members of a pair surely -- apart, even after arbitrary type function evaluation and substitution? typesCantMatch :: [(Type,Type)] -> Bool -- See Note [Pruning dead case alternatives] typesCantMatch prs = any (uncurry cant_match) prs where cant_match :: Type -> Type -> Bool cant_match t1 t2 = case tcUnifyTysFG (const BindMe) [t1] [t2] of SurelyApart -> True _ -> False {- ************************************************************************ * * Unification * * ************************************************************************ Note [Fine-grained unification] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Do the types (x, x) and ([y], y) unify? The answer is seemingly "no" -- no substitution to finite types makes these match. But, a substitution to *infinite* types can unify these two types: [x |-> [[[...]]], y |-> [[[...]]] ]. Why do we care? Consider these two type family instances: type instance F x x = Int type instance F [y] y = Bool If we also have type instance Looper = [Looper] then the instances potentially overlap. The solution is to use unification over infinite terms. This is possible (see [1] for lots of gory details), but a full algorithm is a little more power than we need. Instead, we make a conservative approximation and just omit the occurs check. [1]: http://research.microsoft.com/en-us/um/people/simonpj/papers/ext-f/axioms-extended.pdf tcUnifyTys considers an occurs-check problem as the same as general unification failure. tcUnifyTysFG ("fine-grained") returns one of three results: success, occurs-check failure ("MaybeApart"), or general failure ("SurelyApart"). See also Trac #8162. It's worth noting that unification in the presence of infinite types is not complete. This means that, sometimes, a closed type family does not reduce when it should. See test case indexed-types/should_fail/Overlap15 for an example. Note [The substitution in MaybeApart] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The constructor MaybeApart carries data with it, typically a TvSubstEnv. Why? Because consider unifying these: (a, a, Int) ~ (b, [b], Bool) If we go left-to-right, we start with [a |-> b]. Then, on the middle terms, we apply the subst we have so far and discover that we need [b |-> [b]]. Because this fails the occurs check, we say that the types are MaybeApart (see above Note [Fine-grained unification]). But, we can't stop there! Because if we continue, we discover that Int is SurelyApart from Bool, and therefore the types are apart. This has practical consequences for the ability for closed type family applications to reduce. See test case indexed-types/should_compile/Overlap14. Note [Unifying with skolems] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If we discover that two types unify if and only if a skolem variable is substituted, we can't properly unify the types. But, that skolem variable may later be instantiated with a unifyable type. So, we return maybeApart in these cases. Note [Lists of different lengths are MaybeApart] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It is unusual to call tcUnifyTys or tcUnifyTysFG with lists of different lengths. The place where we know this can happen is from compatibleBranches in FamInstEnv, when checking data family instances. Data family instances may be eta-reduced; see Note [Eta reduction for data family axioms] in TcInstDcls. We wish to say that D :: * -> * -> * axDF1 :: D Int ~ DFInst1 axDF2 :: D Int Bool ~ DFInst2 overlap. If we conclude that lists of different lengths are SurelyApart, then it will look like these do *not* overlap, causing disaster. See Trac #9371. In usages of tcUnifyTys outside of family instances, we always use tcUnifyTys, which can't tell the difference between MaybeApart and SurelyApart, so those usages won't notice this design choice. -} tcUnifyTy :: Type -> Type -- All tyvars are bindable -> Maybe TCvSubst -- A regular one-shot (idempotent) substitution -- Simple unification of two types; all type variables are bindable tcUnifyTy t1 t2 = tcUnifyTys (const BindMe) [t1] [t2] -- | Unify two types, treating type family applications as possibly unifying -- with anything and looking through injective type family applications. tcUnifyTyWithTFs :: Bool -- ^ True <=> do two-way unification; -- False <=> do one-way matching. -- See end of sec 5.2 from the paper -> Type -> Type -> Maybe TCvSubst -- This algorithm is an implementation of the "Algorithm U" presented in -- the paper "Injective type families for Haskell", Figures 2 and 3. -- The code is incorporated with the standard unifier for convenience, but -- its operation should match the specification in the paper. tcUnifyTyWithTFs twoWay t1 t2 = case tc_unify_tys (const BindMe) twoWay True rn_env emptyTvSubstEnv emptyCvSubstEnv [t1] [t2] of Unifiable (subst, _) -> Just $ niFixTCvSubst subst MaybeApart (subst, _) -> Just $ niFixTCvSubst subst -- we want to *succeed* in questionable cases. This is a -- pre-unification algorithm. SurelyApart -> Nothing where rn_env = mkRnEnv2 $ mkInScopeSet $ tyCoVarsOfTypes [t1, t2] ----------------- tcUnifyTys :: (TyCoVar -> BindFlag) -> [Type] -> [Type] -> Maybe TCvSubst -- ^ A regular one-shot (idempotent) substitution -- that unifies the erased types. See comments -- for 'tcUnifyTysFG' -- The two types may have common type variables, and indeed do so in the -- second call to tcUnifyTys in FunDeps.checkClsFD tcUnifyTys bind_fn tys1 tys2 = case tcUnifyTysFG bind_fn tys1 tys2 of Unifiable result -> Just result _ -> Nothing -- This type does double-duty. It is used in the UM (unifier monad) and to -- return the final result. See Note [Fine-grained unification] type UnifyResult = UnifyResultM TCvSubst data UnifyResultM a = Unifiable a -- the subst that unifies the types | MaybeApart a -- the subst has as much as we know -- it must be part of an most general unifier -- See Note [The substitution in MaybeApart] | SurelyApart deriving Functor instance Applicative UnifyResultM where pure = Unifiable (<*>) = ap instance Monad UnifyResultM where SurelyApart >>= _ = SurelyApart MaybeApart x >>= f = case f x of Unifiable y -> MaybeApart y other -> other Unifiable x >>= f = f x instance Alternative UnifyResultM where empty = SurelyApart a@(Unifiable {}) <|> _ = a _ <|> b@(Unifiable {}) = b a@(MaybeApart {}) <|> _ = a _ <|> b@(MaybeApart {}) = b SurelyApart <|> SurelyApart = SurelyApart instance MonadPlus UnifyResultM -- | @tcUnifyTysFG bind_tv tys1 tys2@ attepts to find a substitution @s@ (whose -- domain elements all respond 'BindMe' to @bind_tv@) such that -- @s(tys1)@ and that of @s(tys2)@ are equal, as witnessed by the returned -- Coercions. tcUnifyTysFG :: (TyVar -> BindFlag) -> [Type] -> [Type] -> UnifyResult tcUnifyTysFG bind_fn tys1 tys2 = do { (env, _) <- tc_unify_tys bind_fn True False env emptyTvSubstEnv emptyCvSubstEnv tys1 tys2 ; return $ niFixTCvSubst env } where vars = tyCoVarsOfTypes tys1 `unionVarSet` tyCoVarsOfTypes tys2 env = mkRnEnv2 $ mkInScopeSet vars -- | This function is actually the one to call the unifier -- a little -- too general for outside clients, though. tc_unify_tys :: (TyVar -> BindFlag) -> Bool -- ^ True <=> unify; False <=> match -> Bool -- ^ True <=> doing an injectivity check -> RnEnv2 -> TvSubstEnv -- ^ substitution to extend -> CvSubstEnv -> [Type] -> [Type] -> UnifyResultM (TvSubstEnv, CvSubstEnv) tc_unify_tys bind_fn unif inj_check rn_env tv_env cv_env tys1 tys2 = initUM bind_fn unif inj_check rn_env tv_env cv_env $ do { unify_tys kis1 kis2 ; unify_tys tys1 tys2 ; (,) <$> getTvSubstEnv <*> getCvSubstEnv } where kis1 = map typeKind tys1 kis2 = map typeKind tys2 instance Outputable a => Outputable (UnifyResultM a) where ppr SurelyApart = text "SurelyApart" ppr (Unifiable x) = text "Unifiable" <+> ppr x ppr (MaybeApart x) = text "MaybeApart" <+> ppr x {- ************************************************************************ * * Non-idempotent substitution * * ************************************************************************ Note [Non-idempotent substitution] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ During unification we use a TvSubstEnv/CvSubstEnv pair that is (a) non-idempotent (b) loop-free; ie repeatedly applying it yields a fixed point Note [Finding the substitution fixpoint] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Finding the fixpoint of a non-idempotent substitution arising from a unification is harder than it looks, because of kinds. Consider T k (H k (f:k)) ~ T * (g:*) If we unify, we get the substitution [ k -> * , g -> H k (f:k) ] To make it idempotent we don't want to get just [ k -> * , g -> H * (f:k) ] We also want to substitute inside f's kind, to get [ k -> * , g -> H k (f:*) ] If we don't do this, we may apply the substitition to something, and get an ill-formed type, i.e. one where typeKind will fail. This happened, for example, in Trac #9106. This is the reason for extending env with [f:k -> f:*], in the definition of env' in niFixTvSubst -} niFixTCvSubst :: TvSubstEnv -> TCvSubst -- Find the idempotent fixed point of the non-idempotent substitution -- See Note [Finding the substitution fixpoint] -- ToDo: use laziness instead of iteration? niFixTCvSubst tenv = f tenv where f tenv | not_fixpoint = f (mapVarEnv (substTy subst') tenv) | otherwise = subst where not_fixpoint = foldVarSet ((||) . in_domain) False range_tvs in_domain tv = tv `elemVarEnv` tenv range_tvs = foldVarEnv (unionVarSet . tyCoVarsOfType) emptyVarSet tenv subst = mkTvSubst (mkInScopeSet range_tvs) tenv -- env' extends env by replacing any free type with -- that same tyvar with a substituted kind -- See note [Finding the substitution fixpoint] tenv' = extendVarEnvList tenv [ (rtv, mkTyVarTy $ setTyVarKind rtv $ substTy subst $ tyVarKind rtv) | rtv <- varSetElems range_tvs , not (in_domain rtv) ] subst' = mkTvSubst (mkInScopeSet range_tvs) tenv' niSubstTvSet :: TvSubstEnv -> TyCoVarSet -> TyCoVarSet -- Apply the non-idempotent substitution to a set of type variables, -- remembering that the substitution isn't necessarily idempotent -- This is used in the occurs check, before extending the substitution niSubstTvSet tsubst tvs = foldVarSet (unionVarSet . get) emptyVarSet tvs where get tv | Just ty <- lookupVarEnv tsubst tv = niSubstTvSet tsubst (tyCoVarsOfType ty) | otherwise = unitVarSet tv {- ************************************************************************ * * The workhorse * * ************************************************************************ Note [Specification of unification] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The algorithm implemented here is rather delicate, and we depend on it to uphold certain properties. This is a summary of these required properties. Any reference to "flattening" refers to the flattening algorithm in FamInstEnv (See Note [Flattening] in FamInstEnv), not the flattening algorithm in the solver. Notation: θ,φ substitutions ξ type-function-free types τ,σ other types τ♭ type τ, flattened ≡ eqType (U1) Soundness. If (unify τ₁ τ₂) = Unifiable θ, then θ(τ₁) ≡ θ(τ₂). θ is a most general unifier for τ₁ and τ₂. (U2) Completeness. If (unify ξ₁ ξ₂) = SurelyApart, then there exists no substitution θ such that θ(ξ₁) ≡ θ(ξ₂). These two properties are stated as Property 11 in the "Closed Type Families" paper (POPL'14). Below, this paper is called [CTF]. (U3) Apartness under substitution. If (unify ξ τ♭) = SurelyApart, then (unify ξ θ(τ)♭) = SurelyApart, for any θ. (Property 12 from [CTF]) (U4) Apart types do not unify. If (unify ξ τ♭) = SurelyApart, then there exists no θ such that θ(ξ) = θ(τ). (Property 13 from [CTF]) THEOREM. Completeness w.r.t ~ If (unify τ₁♭ τ₂♭) = SurelyApart, then there exists no proof that (τ₁ ~ τ₂). PROOF. See appendix of [CTF]. The unification algorithm is used for type family injectivity, as described in the "Injective Type Families" paper (Haskell'15), called [ITF]. When run in this mode, it has the following properties. (I1) If (unify σ τ) = SurelyApart, then σ and τ are not unifiable, even after arbitrary type family reductions. Note that σ and τ are not flattened here. (I2) If (unify σ τ) = MaybeApart θ, and if some φ exists such that φ(σ) ~ φ(τ), then φ extends θ. Furthermore, the RULES matching algorithm requires this property, but only when using this algorithm for matching: (M1) If (match σ τ) succeeds with θ, then all matchable tyvars in σ are bound in θ. Property M1 means that we must extend the substitution with, say (a ↦ a) when appropriate during matching. See also Note [Self-substitution when matching]. (M2) Completeness of matching. If θ(σ) = τ, then (match σ τ) = Unifiable φ, where θ is an extension of φ. Sadly, property M2 and I2 conflict. Consider type family F1 a b where F1 Int Bool = Char F1 Double String = Char Consider now two matching problems: P1. match (F1 a Bool) (F1 Int Bool) P2. match (F1 a Bool) (F1 Double String) In case P1, we must find (a ↦ Int) to satisfy M2. In case P2, we must /not/ find (a ↦ Double), in order to satisfy I2. (Note that the correct mapping for I2 is (a ↦ Int). There is no way to discover this, but we musn't map a to anything else!) We thus must parameterize the algorithm over whether it's being used for an injectivity check (refrain from looking at non-injective arguments to type families) or not (do indeed look at those arguments). (It's all a question of whether or not to include equation (7) from Fig. 2 of [ITF].) This extra parameter is a bit fiddly, perhaps, but seemingly less so than having two separate, almost-identical algorithms. Note [Self-substitution when matching] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ What should happen when we're *matching* (not unifying) a1 with a1? We should get a substitution [a1 |-> a1]. A successful match should map all the template variables (except ones that disappear when expanding synonyms). But when unifying, we don't want to do this, because we'll then fall into a loop. This arrangement affects the code in three places: - If we're matching a refined template variable, don't recur. Instead, just check for equality. That is, if we know [a |-> Maybe a] and are matching (a ~? Maybe Int), we want to just fail. - Skip the occurs check when matching. This comes up in two places, because matching against variables is handled separately from matching against full-on types. Note that this arrangement was provoked by a real failure, where the same unique ended up in the template as in the target. (It was a rule firing when compiling Data.List.NonEmpty.) Note [Matching coercion variables] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider this: type family F a data G a where MkG :: F a ~ Bool => G a type family Foo (x :: G a) :: F a type instance Foo MkG = False We would like that to be accepted. For that to work, we need to introduce a coercion variable on the left an then use it on the right. Accordingly, at use sites of Foo, we need to be able to use matching to figure out the value for the coercion. (See the desugared version: axFoo :: [a :: *, c :: F a ~ Bool]. Foo (MkG c) = False |> (sym c) ) We never want this action to happen during *unification* though, when all bets are off. -} -- See Note [Specification of unification] unify_ty :: Type -> Type -> Coercion -- Types to be unified and a co -- between their kinds -- See Note [Kind coercions in Unify] -> UM () -- Respects newtypes, PredTypes unify_ty ty1 ty2 kco | Just ty1' <- coreView ty1 = unify_ty ty1' ty2 kco | Just ty2' <- coreView ty2 = unify_ty ty1 ty2' kco | CastTy ty1' co <- ty1 = unify_ty ty1' ty2 (co `mkTransCo` kco) | CastTy ty2' co <- ty2 = unify_ty ty1 ty2' (kco `mkTransCo` mkSymCo co) unify_ty (TyVarTy tv1) ty2 kco = uVar tv1 ty2 kco unify_ty ty1 (TyVarTy tv2) kco = do { unif <- amIUnifying ; if unif then umSwapRn $ uVar tv2 ty1 (mkSymCo kco) else surelyApart } -- non-tv on left; tv on right: can't match. unify_ty ty1 ty2 _kco | Just (tc1, tys1) <- splitTyConApp_maybe ty1 , Just (tc2, tys2) <- splitTyConApp_maybe ty2 = if tc1 == tc2 || (isStarKind ty1 && isStarKind ty2) then if isInjectiveTyCon tc1 Nominal then unify_tys tys1 tys2 else do { let inj | isTypeFamilyTyCon tc1 = case familyTyConInjectivityInfo tc1 of NotInjective -> repeat False Injective bs -> bs | otherwise = repeat False (inj_tys1, noninj_tys1) = partitionByList inj tys1 (inj_tys2, noninj_tys2) = partitionByList inj tys2 ; unify_tys inj_tys1 inj_tys2 ; inj_tf <- checkingInjectivity ; unless inj_tf $ -- See (end of) Note [Specification of unification] don'tBeSoSure $ unify_tys noninj_tys1 noninj_tys2 } else -- tc1 /= tc2 if isGenerativeTyCon tc1 Nominal && isGenerativeTyCon tc2 Nominal then surelyApart else maybeApart -- Applications need a bit of care! -- They can match FunTy and TyConApp, so use splitAppTy_maybe -- NB: we've already dealt with type variables, -- so if one type is an App the other one jolly well better be too unify_ty (AppTy ty1a ty1b) ty2 _kco | Just (ty2a, ty2b) <- tcRepSplitAppTy_maybe ty2 = unify_ty_app ty1a ty1b ty2a ty2b unify_ty ty1 (AppTy ty2a ty2b) _kco | Just (ty1a, ty1b) <- tcRepSplitAppTy_maybe ty1 = unify_ty_app ty1a ty1b ty2a ty2b unify_ty (LitTy x) (LitTy y) _kco | x == y = return () unify_ty (ForAllTy (Named tv1 _) ty1) (ForAllTy (Named tv2 _) ty2) kco = do { unify_ty (tyVarKind tv1) (tyVarKind tv2) (mkNomReflCo liftedTypeKind) ; umRnBndr2 tv1 tv2 $ unify_ty ty1 ty2 kco } -- See Note [Matching coercion variables] unify_ty (CoercionTy co1) (CoercionTy co2) kco = do { unif <- amIUnifying ; c_subst <- getCvSubstEnv ; case co1 of CoVarCo cv | not unif , not (cv `elemVarEnv` c_subst) -> do { b <- tvBindFlagL cv ; if b == BindMe then do { checkRnEnvRCo co2 ; let [_, _, co_l, co_r] = decomposeCo 4 kco -- cv :: t1 ~ t2 -- co2 :: s1 ~ s2 -- co_l :: t1 ~ s1 -- co_r :: t2 ~ s2 ; extendCvEnv cv (co_l `mkTransCo` co2 `mkTransCo` mkSymCo co_r) } else return () } _ -> return () } unify_ty ty1 _ _ | Just (tc1, _) <- splitTyConApp_maybe ty1 , not (isGenerativeTyCon tc1 Nominal) = maybeApart unify_ty _ ty2 _ | Just (tc2, _) <- splitTyConApp_maybe ty2 , not (isGenerativeTyCon tc2 Nominal) = do { unif <- amIUnifying ; if unif then maybeApart else surelyApart } unify_ty _ _ _ = surelyApart unify_ty_app :: Type -> Type -> Type -> Type -> UM () unify_ty_app ty1a ty1b ty2a ty2b = do { -- TODO (RAE): Remove this exponential behavior. let ki1a = typeKind ty1a ki2a = typeKind ty2a ; unify_ty ki1a ki2a (mkNomReflCo liftedTypeKind) ; let kind_co = mkNomReflCo ki1a ; unify_ty ty1a ty2a kind_co ; unify_ty ty1b ty2b (mkNthCo 0 kind_co) } unify_tys :: [Type] -> [Type] -> UM () unify_tys orig_xs orig_ys = go orig_xs orig_ys where go [] [] = return () go (x:xs) (y:ys) = do { unify_ty x y (mkNomReflCo $ typeKind x) ; go xs ys } go _ _ = maybeApart -- See Note [Lists of different lengths are MaybeApart] --------------------------------- uVar :: TyVar -- Variable to be unified -> Type -- with this Type -> Coercion -- :: kind tv ~N kind ty -> UM () uVar tv1 ty kco = do { -- Check to see whether tv1 is refined by the substitution subst <- getTvSubstEnv ; case (lookupVarEnv subst tv1) of Just ty' -> do { unif <- amIUnifying ; if unif then unify_ty ty' ty kco -- Yes, call back into unify else -- when *matching*, we don't want to just recur here. -- this is because the range of the subst is the target -- type, not the template type. So, just check for -- normal type equality. guard ((ty' `mkCastTy` kco) `eqType` ty) } Nothing -> uUnrefined tv1 ty ty kco } -- No, continue uUnrefined :: TyVar -- variable to be unified -> Type -- with this Type -> Type -- (version w/ expanded synonyms) -> Coercion -- :: kind tv ~N kind ty -> UM () -- We know that tv1 isn't refined uUnrefined tv1 ty2 ty2' kco | Just ty2'' <- coreView ty2' = uUnrefined tv1 ty2 ty2'' kco -- Unwrap synonyms -- This is essential, in case we have -- type Foo a = a -- and then unify a ~ Foo a | TyVarTy tv2 <- ty2' = do { tv1' <- umRnOccL tv1 ; tv2' <- umRnOccR tv2 ; unif <- amIUnifying -- See Note [Self-substitution when matching] ; when (tv1' /= tv2' || not unif) $ do { subst <- getTvSubstEnv -- Check to see whether tv2 is refined ; case lookupVarEnv subst tv2 of { Just ty' | unif -> uUnrefined tv1 ty' ty' kco ; _ -> do { -- So both are unrefined -- And then bind one or the other, -- depending on which is bindable ; b1 <- tvBindFlagL tv1 ; b2 <- tvBindFlagR tv2 ; let ty1 = mkTyVarTy tv1 ; case (b1, b2) of (BindMe, _) -> do { checkRnEnvR ty2 -- make sure ty2 is not a local ; extendTvEnv tv1 (ty2 `mkCastTy` mkSymCo kco) } (_, BindMe) | unif -> do { checkRnEnvL ty1 -- ditto for ty1 ; extendTvEnv tv2 (ty1 `mkCastTy` kco) } _ | tv1' == tv2' -> return () -- How could this happen? If we're only matching and if -- we're comparing forall-bound variables. _ -> maybeApart -- See Note [Unification with skolems] }}}} uUnrefined tv1 ty2 ty2' kco -- ty2 is not a type variable = do { occurs <- elemNiSubstSet tv1 (tyCoVarsOfType ty2') ; unif <- amIUnifying ; if unif && occurs -- See Note [Self-substitution when matching] then maybeApart -- Occurs check, see Note [Fine-grained unification] else do bindTv tv1 (ty2 `mkCastTy` mkSymCo kco) } -- Bind tyvar to the synonym if poss elemNiSubstSet :: TyVar -> TyCoVarSet -> UM Bool elemNiSubstSet v set = do { tsubst <- getTvSubstEnv ; return $ v `elemVarSet` niSubstTvSet tsubst set } bindTv :: TyVar -> Type -> UM () bindTv tv ty -- ty is not a variable = do { checkRnEnvR ty -- make sure ty mentions no local variables ; b <- tvBindFlagL tv ; case b of Skolem -> maybeApart -- See Note [Unification with skolems] BindMe -> extendTvEnv tv ty } {- %************************************************************************ %* * Binding decisions * * ************************************************************************ -} data BindFlag = BindMe -- A regular type variable | Skolem -- This type variable is a skolem constant -- Don't bind it; it only matches itself deriving Eq {- ************************************************************************ * * Unification monad * * ************************************************************************ -} data UMEnv = UMEnv { um_bind_fun :: TyVar -> BindFlag -- the user-supplied BindFlag function , um_unif :: Bool -- unification (True) or matching? , um_inj_tf :: Bool -- checking for injectivity? -- See (end of) Note [Specification of unification] , um_rn_env :: RnEnv2 } data UMState = UMState { um_tv_env :: TvSubstEnv , um_cv_env :: CvSubstEnv } newtype UM a = UM { unUM :: UMEnv -> UMState -> UnifyResultM (UMState, a) } instance Functor UM where fmap = liftM instance Applicative UM where pure a = UM (\_ s -> pure (s, a)) (<*>) = ap instance Monad UM where fail _ = UM (\_ _ -> SurelyApart) -- failed pattern match m >>= k = UM (\env state -> do { (state', v) <- unUM m env state ; unUM (k v) env state' }) -- need this instance because of a use of 'guard' above instance Alternative UM where empty = UM (\_ _ -> Control.Applicative.empty) m1 <|> m2 = UM (\env state -> unUM m1 env state <|> unUM m2 env state) instance MonadPlus UM #if __GLASGOW_HASKELL__ > 710 instance MonadFail.MonadFail UM where fail _ = UM (\_tvs _subst -> SurelyApart) -- failed pattern match #endif initUM :: (TyVar -> BindFlag) -> Bool -- True <=> unify; False <=> match -> Bool -- True <=> doing an injectivity check -> RnEnv2 -> TvSubstEnv -- subst to extend -> CvSubstEnv -> UM a -> UnifyResultM a initUM badtvs unif inj_tf rn_env subst_env cv_subst_env um = case unUM um env state of Unifiable (_, subst) -> Unifiable subst MaybeApart (_, subst) -> MaybeApart subst SurelyApart -> SurelyApart where env = UMEnv { um_bind_fun = badtvs , um_unif = unif , um_inj_tf = inj_tf , um_rn_env = rn_env } state = UMState { um_tv_env = subst_env , um_cv_env = cv_subst_env } tvBindFlagL :: TyVar -> UM BindFlag tvBindFlagL tv = UM $ \env state -> Unifiable (state, if inRnEnvL (um_rn_env env) tv then Skolem else um_bind_fun env tv) tvBindFlagR :: TyVar -> UM BindFlag tvBindFlagR tv = UM $ \env state -> Unifiable (state, if inRnEnvR (um_rn_env env) tv then Skolem else um_bind_fun env tv) getTvSubstEnv :: UM TvSubstEnv getTvSubstEnv = UM $ \_ state -> Unifiable (state, um_tv_env state) getCvSubstEnv :: UM CvSubstEnv getCvSubstEnv = UM $ \_ state -> Unifiable (state, um_cv_env state) extendTvEnv :: TyVar -> Type -> UM () extendTvEnv tv ty = UM $ \_ state -> Unifiable (state { um_tv_env = extendVarEnv (um_tv_env state) tv ty }, ()) extendCvEnv :: CoVar -> Coercion -> UM () extendCvEnv cv co = UM $ \_ state -> Unifiable (state { um_cv_env = extendVarEnv (um_cv_env state) cv co }, ()) umRnBndr2 :: TyCoVar -> TyCoVar -> UM a -> UM a umRnBndr2 v1 v2 thing = UM $ \env state -> let rn_env' = rnBndr2 (um_rn_env env) v1 v2 in unUM thing (env { um_rn_env = rn_env' }) state checkRnEnv :: (RnEnv2 -> Var -> Bool) -> VarSet -> UM () checkRnEnv inRnEnv varset = UM $ \env state -> if any (inRnEnv (um_rn_env env)) (varSetElems varset) then MaybeApart (state, ()) else Unifiable (state, ()) -- | Converts any SurelyApart to a MaybeApart don'tBeSoSure :: UM () -> UM () don'tBeSoSure um = UM $ \env state -> case unUM um env state of SurelyApart -> MaybeApart (state, ()) other -> other checkRnEnvR :: Type -> UM () checkRnEnvR ty = checkRnEnv inRnEnvR (tyCoVarsOfType ty) checkRnEnvL :: Type -> UM () checkRnEnvL ty = checkRnEnv inRnEnvL (tyCoVarsOfType ty) checkRnEnvRCo :: Coercion -> UM () checkRnEnvRCo co = checkRnEnv inRnEnvR (tyCoVarsOfCo co) umRnOccL :: TyVar -> UM TyVar umRnOccL v = UM $ \env state -> Unifiable (state, rnOccL (um_rn_env env) v) umRnOccR :: TyVar -> UM TyVar umRnOccR v = UM $ \env state -> Unifiable (state, rnOccR (um_rn_env env) v) umSwapRn :: UM a -> UM a umSwapRn thing = UM $ \env state -> let rn_env' = rnSwap (um_rn_env env) in unUM thing (env { um_rn_env = rn_env' }) state amIUnifying :: UM Bool amIUnifying = UM $ \env state -> Unifiable (state, um_unif env) checkingInjectivity :: UM Bool checkingInjectivity = UM $ \env state -> Unifiable (state, um_inj_tf env) maybeApart :: UM () maybeApart = UM (\_ state -> MaybeApart (state, ())) surelyApart :: UM a surelyApart = UM (\_ _ -> SurelyApart) {- %************************************************************************ %* * Matching a (lifted) type against a coercion %* * %************************************************************************ This section defines essentially an inverse to liftCoSubst. It is defined here to avoid a dependency from Coercion on this module. -} data MatchEnv = ME { me_tmpls :: TyVarSet , me_env :: RnEnv2 } -- | 'liftCoMatch' is sort of inverse to 'liftCoSubst'. In particular, if -- @liftCoMatch vars ty co == Just s@, then @tyCoSubst s ty == co@, -- where @==@ there means that the result of tyCoSubst has the same -- type as the original co; but may be different under the hood. -- That is, it matches a type against a coercion of the same -- "shape", and returns a lifting substitution which could have been -- used to produce the given coercion from the given type. -- Note that this function is incomplete -- it might return Nothing -- when there does indeed exist a possible lifting context. -- -- This function is incomplete in that it doesn't respect the equality -- in `eqType`. That is, it's possible that this will succeed for t1 and -- fail for t2, even when t1 `eqType` t2. That's because it depends on -- there being a very similar structure between the type and the coercion. -- This incompleteness shouldn't be all that surprising, especially because -- it depends on the structure of the coercion, which is a silly thing to do. -- -- The lifting context produced doesn't have to be exacting in the roles -- of the mappings. This is because any use of the lifting context will -- also require a desired role. Thus, this algorithm prefers mapping to -- nominal coercions where it can do so. liftCoMatch :: TyCoVarSet -> Type -> Coercion -> Maybe LiftingContext liftCoMatch tmpls ty co = do { cenv1 <- ty_co_match menv emptyVarEnv ki ki_co ki_ki_co ki_ki_co ; cenv2 <- ty_co_match menv cenv1 ty co (mkNomReflCo co_lkind) (mkNomReflCo co_rkind) ; return (LC (mkEmptyTCvSubst in_scope) cenv2) } where menv = ME { me_tmpls = tmpls, me_env = mkRnEnv2 in_scope } in_scope = mkInScopeSet (tmpls `unionVarSet` tyCoVarsOfCo co) -- Like tcMatchTy, assume all the interesting variables -- in ty are in tmpls ki = typeKind ty ki_co = promoteCoercion co ki_ki_co = mkNomReflCo liftedTypeKind Pair co_lkind co_rkind = coercionKind ki_co -- | 'ty_co_match' does all the actual work for 'liftCoMatch'. ty_co_match :: MatchEnv -- ^ ambient helpful info -> LiftCoEnv -- ^ incoming subst -> Type -- ^ ty, type to match -> Coercion -- ^ co, coercion to match against -> Coercion -- ^ :: kind of L type of substed ty ~N L kind of co -> Coercion -- ^ :: kind of R type of substed ty ~N R kind of co -> Maybe LiftCoEnv ty_co_match menv subst ty co lkco rkco | Just ty' <- coreViewOneStarKind ty = ty_co_match menv subst ty' co lkco rkco -- handle Refl case: | tyCoVarsOfType ty `isNotInDomainOf` subst , Just (ty', _) <- isReflCo_maybe co , ty `eqType` ty' = Just subst where isNotInDomainOf :: VarSet -> VarEnv a -> Bool isNotInDomainOf set env = noneSet (\v -> elemVarEnv v env) set noneSet :: (Var -> Bool) -> VarSet -> Bool noneSet f = foldVarSet (\v rest -> rest && (not $ f v)) True ty_co_match menv subst ty co lkco rkco | CastTy ty' co' <- ty = ty_co_match menv subst ty' co (co' `mkTransCo` lkco) (co' `mkTransCo` rkco) | CoherenceCo co1 co2 <- co = ty_co_match menv subst ty co1 (lkco `mkTransCo` mkSymCo co2) rkco | SymCo co' <- co = swapLiftCoEnv <$> ty_co_match menv (swapLiftCoEnv subst) ty co' rkco lkco -- Match a type variable against a non-refl coercion ty_co_match menv subst (TyVarTy tv1) co lkco rkco | Just co1' <- lookupVarEnv subst tv1' -- tv1' is already bound to co1 = if eqCoercionX (nukeRnEnvL rn_env) co1' co then Just subst else Nothing -- no match since tv1 matches two different coercions | tv1' `elemVarSet` me_tmpls menv -- tv1' is a template var = if any (inRnEnvR rn_env) (tyCoVarsOfCoList co) then Nothing -- occurs check failed else Just $ extendVarEnv subst tv1' $ castCoercionKind co (mkSymCo lkco) (mkSymCo rkco) | otherwise = Nothing where rn_env = me_env menv tv1' = rnOccL rn_env tv1 -- just look through SubCo's. We don't really care about roles here. ty_co_match menv subst ty (SubCo co) lkco rkco = ty_co_match menv subst ty co lkco rkco ty_co_match menv subst (AppTy ty1a ty1b) co _lkco _rkco | Just (co2, arg2) <- splitAppCo_maybe co -- c.f. Unify.match on AppTy = ty_co_match_app menv subst ty1a ty1b co2 arg2 ty_co_match menv subst ty1 (AppCo co2 arg2) _lkco _rkco | Just (ty1a, ty1b) <- repSplitAppTy_maybe ty1 -- yes, the one from Type, not TcType; this is for coercion optimization = ty_co_match_app menv subst ty1a ty1b co2 arg2 ty_co_match menv subst (TyConApp tc1 tys) (TyConAppCo _ tc2 cos) _lkco _rkco = ty_co_match_tc menv subst tc1 tys tc2 cos ty_co_match menv subst (ForAllTy (Anon ty1) ty2) (TyConAppCo _ tc cos) _lkco _rkco = ty_co_match_tc menv subst funTyCon [ty1, ty2] tc cos ty_co_match menv subst (ForAllTy (Named tv1 _) ty1) (ForAllCo tv2 kind_co2 co2) lkco rkco = do { subst1 <- ty_co_match menv subst (tyVarKind tv1) kind_co2 ki_ki_co ki_ki_co ; let rn_env0 = me_env menv rn_env1 = rnBndr2 rn_env0 tv1 tv2 menv' = menv { me_env = rn_env1 } ; ty_co_match menv' subst1 ty1 co2 lkco rkco } where ki_ki_co = mkNomReflCo liftedTypeKind ty_co_match _ subst (CoercionTy {}) _ _ _ = Just subst -- don't inspect coercions ty_co_match menv subst ty co lkco rkco | Just co' <- pushRefl co = ty_co_match menv subst ty co' lkco rkco | otherwise = Nothing ty_co_match_tc :: MatchEnv -> LiftCoEnv -> TyCon -> [Type] -> TyCon -> [Coercion] -> Maybe LiftCoEnv ty_co_match_tc menv subst tc1 tys1 tc2 cos2 = do { guard (tc1 == tc2) ; ty_co_match_args menv subst tys1 cos2 lkcos rkcos } where Pair lkcos rkcos = traverse (fmap mkNomReflCo . coercionKind) cos2 ty_co_match_app :: MatchEnv -> LiftCoEnv -> Type -> Type -> Coercion -> Coercion -> Maybe LiftCoEnv ty_co_match_app menv subst ty1a ty1b co2a co2b = do { -- TODO (RAE): Remove this exponential behavior. subst1 <- ty_co_match menv subst ki1a ki2a ki_ki_co ki_ki_co ; let Pair lkco rkco = mkNomReflCo <$> coercionKind ki2a ; subst2 <- ty_co_match menv subst1 ty1a co2a lkco rkco ; ty_co_match menv subst2 ty1b co2b (mkNthCo 0 lkco) (mkNthCo 0 rkco) } where ki1a = typeKind ty1a ki2a = promoteCoercion co2a ki_ki_co = mkNomReflCo liftedTypeKind ty_co_match_args :: MatchEnv -> LiftCoEnv -> [Type] -> [Coercion] -> [Coercion] -> [Coercion] -> Maybe LiftCoEnv ty_co_match_args _ subst [] [] _ _ = Just subst ty_co_match_args menv subst (ty:tys) (arg:args) (lkco:lkcos) (rkco:rkcos) = do { subst' <- ty_co_match menv subst ty arg lkco rkco ; ty_co_match_args menv subst' tys args lkcos rkcos } ty_co_match_args _ _ _ _ _ _ = Nothing pushRefl :: Coercion -> Maybe Coercion pushRefl (Refl Nominal (AppTy ty1 ty2)) = Just (AppCo (Refl Nominal ty1) (mkNomReflCo ty2)) pushRefl (Refl r (ForAllTy (Anon ty1) ty2)) = Just (TyConAppCo r funTyCon [mkReflCo r ty1, mkReflCo r ty2]) pushRefl (Refl r (TyConApp tc tys)) = Just (TyConAppCo r tc (zipWith mkReflCo (tyConRolesX r tc) tys)) pushRefl (Refl r (ForAllTy (Named tv _) ty)) = Just (mkHomoForAllCos_NoRefl [tv] (Refl r ty)) -- NB: NoRefl variant. Otherwise, we get a loop! pushRefl (Refl r (CastTy ty co)) = Just (castCoercionKind (Refl r ty) co co) pushRefl _ = Nothing
oldmanmike/ghc
compiler/types/Unify.hs
bsd-3-clause
47,551
302
19
13,809
7,004
3,838
3,166
549
9
module Main where import Data.Version import Control.Monad import Data.Monoid import Prelude import Language.Futhark.Parser import Futhark.Version import Futhark.Passes import Futhark.Compiler import Futhark.Util.Options banner :: String banner = unlines [ "|// |\\ | |\\ |\\ /", "|/ | \\ |\\ |\\ |/ /", "| | \\ |/ | |\\ \\", "| | \\ | | | \\ \\" ] main :: IO () main = mainWithOptions interpreterConfig [] run where run [prog] config = Just $ interpret config prog run [] _ = Just repl run _ _ = Nothing repl :: IO () repl = do putStr banner putStrLn $ "Version " ++ showVersion version putStrLn "(C) HIPERFIT research centre" putStrLn "Department of Computer Science, University of Copenhagen (DIKU)" putStrLn "" forever $ print =<< parseExpIncrIO "input" mempty interpret :: FutharkConfig -> FilePath -> IO () interpret config = runCompilerOnProgram config standardPipeline interpretAction' interpreterConfig :: FutharkConfig interpreterConfig = newFutharkConfig
CulpaBS/wbBach
src/futharki.hs
bsd-3-clause
1,066
0
8
248
259
133
126
34
3
{- Copyright 2012-2015 Vidar Holen This file is part of ShellCheck. http://www.vidarholen.net/contents/shellcheck ShellCheck is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. ShellCheck is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. -} module ShellCheck.AST where import Control.Monad import Control.Monad.Identity import qualified ShellCheck.Regex as Re data Id = Id Int deriving (Show, Eq, Ord) data Quoted = Quoted | Unquoted deriving (Show, Eq) data Dashed = Dashed | Undashed deriving (Show, Eq) data AssignmentMode = Assign | Append deriving (Show, Eq) data FunctionKeyword = FunctionKeyword Bool deriving (Show, Eq) data FunctionParentheses = FunctionParentheses Bool deriving (Show, Eq) data CaseType = CaseBreak | CaseFallThrough | CaseContinue deriving (Show, Eq) data Token = TA_Binary Id String Token Token | TA_Expansion Id [Token] | TA_Index Id Token | TA_Sequence Id [Token] | TA_Trinary Id Token Token Token | TA_Unary Id String Token | TC_And Id ConditionType String Token Token | TC_Binary Id ConditionType String Token Token | TC_Group Id ConditionType Token | TC_Noary Id ConditionType Token | TC_Or Id ConditionType String Token Token | TC_Unary Id ConditionType String Token | T_AND_IF Id | T_AndIf Id (Token) (Token) | T_Arithmetic Id Token | T_Array Id [Token] | T_IndexedElement Id Token Token | T_Assignment Id AssignmentMode String (Maybe Token) Token | T_Backgrounded Id Token | T_Backticked Id [Token] | T_Bang Id | T_Banged Id Token | T_BraceExpansion Id [Token] | T_BraceGroup Id [Token] | T_CLOBBER Id | T_Case Id | T_CaseExpression Id Token [(CaseType, [Token], [Token])] | T_Condition Id ConditionType Token | T_DGREAT Id | T_DLESS Id | T_DLESSDASH Id | T_DSEMI Id | T_Do Id | T_DollarArithmetic Id Token | T_DollarBraced Id Token | T_DollarBracket Id Token | T_DollarDoubleQuoted Id [Token] | T_DollarExpansion Id [Token] | T_DollarSingleQuoted Id String | T_Done Id | T_DoubleQuoted Id [Token] | T_EOF Id | T_Elif Id | T_Else Id | T_Esac Id | T_Extglob Id String [Token] | T_FdRedirect Id String Token | T_Fi Id | T_For Id | T_ForArithmetic Id Token Token Token [Token] | T_ForIn Id String [Token] [Token] | T_Function Id FunctionKeyword FunctionParentheses String Token | T_GREATAND Id | T_Glob Id String | T_Greater Id | T_HereDoc Id Dashed Quoted String [Token] | T_HereString Id Token | T_If Id | T_IfExpression Id [([Token],[Token])] [Token] | T_In Id | T_IoFile Id Token Token | T_LESSAND Id | T_LESSGREAT Id | T_Lbrace Id | T_Less Id | T_Literal Id String | T_Lparen Id | T_NEWLINE Id | T_NormalWord Id [Token] | T_OR_IF Id | T_OrIf Id (Token) (Token) | T_Pipeline Id [Token] [Token] -- [Pipe separators] [Commands] | T_ProcSub Id String [Token] | T_Rbrace Id | T_Redirecting Id [Token] Token | T_Rparen Id | T_Script Id String [Token] | T_Select Id | T_SelectIn Id String [Token] [Token] | T_Semi Id | T_SimpleCommand Id [Token] [Token] | T_SingleQuoted Id String | T_Subshell Id [Token] | T_Then Id | T_Until Id | T_UntilExpression Id [Token] [Token] | T_While Id | T_WhileExpression Id [Token] [Token] | T_Annotation Id [Annotation] Token | T_Pipe Id String | T_CoProc Id (Maybe String) Token | T_CoProcBody Id Token deriving (Show) data Annotation = DisableComment Integer deriving (Show, Eq) data ConditionType = DoubleBracket | SingleBracket deriving (Show, Eq) -- This is an abomination. tokenEquals :: Token -> Token -> Bool tokenEquals a b = kludge a == kludge b where kludge s = Re.subRegex (Re.mkRegex "\\(Id [0-9]+\\)") (show s) "(Id 0)" instance Eq Token where (==) = tokenEquals analyze :: Monad m => (Token -> m ()) -> (Token -> m ()) -> (Token -> Token) -> Token -> m Token analyze f g i = round where round t = do f t newT <- delve t g t return . i $ newT roundAll = mapM round roundMaybe Nothing = return Nothing roundMaybe (Just v) = do s <- round v return (Just s) dl l v = do x <- roundAll l return $ v x dll l m v = do x <- roundAll l y <- roundAll m return $ v x m d1 t v = do x <- round t return $ v x d2 t1 t2 v = do x <- round t1 y <- round t2 return $ v x y delve (T_NormalWord id list) = dl list $ T_NormalWord id delve (T_DoubleQuoted id list) = dl list $ T_DoubleQuoted id delve (T_DollarDoubleQuoted id list) = dl list $ T_DollarDoubleQuoted id delve (T_DollarExpansion id list) = dl list $ T_DollarExpansion id delve (T_BraceExpansion id list) = dl list $ T_BraceExpansion id delve (T_Backticked id list) = dl list $ T_Backticked id delve (T_DollarArithmetic id c) = d1 c $ T_DollarArithmetic id delve (T_DollarBracket id c) = d1 c $ T_DollarBracket id delve (T_IoFile id op file) = d2 op file $ T_IoFile id delve (T_HereString id word) = d1 word $ T_HereString id delve (T_FdRedirect id v t) = d1 t $ T_FdRedirect id v delve (T_Assignment id mode var index value) = do a <- roundMaybe index b <- round value return $ T_Assignment id mode var a b delve (T_Array id t) = dl t $ T_Array id delve (T_IndexedElement id t1 t2) = d2 t1 t2 $ T_IndexedElement id delve (T_Redirecting id redirs cmd) = do a <- roundAll redirs b <- round cmd return $ T_Redirecting id a b delve (T_SimpleCommand id vars cmds) = dll vars cmds $ T_SimpleCommand id delve (T_Pipeline id l1 l2) = dll l1 l2 $ T_Pipeline id delve (T_Banged id l) = d1 l $ T_Banged id delve (T_AndIf id t u) = d2 t u $ T_AndIf id delve (T_OrIf id t u) = d2 t u $ T_OrIf id delve (T_Backgrounded id l) = d1 l $ T_Backgrounded id delve (T_Subshell id l) = dl l $ T_Subshell id delve (T_ProcSub id typ l) = dl l $ T_ProcSub id typ delve (T_Arithmetic id c) = d1 c $ T_Arithmetic id delve (T_IfExpression id conditions elses) = do newConds <- mapM (\(c, t) -> do x <- mapM round c y <- mapM round t return (x,y) ) conditions newElses <- roundAll elses return $ T_IfExpression id newConds newElses delve (T_BraceGroup id l) = dl l $ T_BraceGroup id delve (T_WhileExpression id c l) = dll c l $ T_WhileExpression id delve (T_UntilExpression id c l) = dll c l $ T_UntilExpression id delve (T_ForIn id v w l) = dll w l $ T_ForIn id v delve (T_SelectIn id v w l) = dll w l $ T_SelectIn id v delve (T_CaseExpression id word cases) = do newWord <- round word newCases <- mapM (\(o, c, t) -> do x <- mapM round c y <- mapM round t return (o, x,y) ) cases return $ T_CaseExpression id newWord newCases delve (T_ForArithmetic id a b c group) = do x <- round a y <- round b z <- round c list <- mapM round group return $ T_ForArithmetic id x y z list delve (T_Script id s l) = dl l $ T_Script id s delve (T_Function id a b name body) = d1 body $ T_Function id a b name delve (T_Condition id typ token) = d1 token $ T_Condition id typ delve (T_Extglob id str l) = dl l $ T_Extglob id str delve (T_DollarBraced id op) = d1 op $ T_DollarBraced id delve (T_HereDoc id d q str l) = dl l $ T_HereDoc id d q str delve (TC_And id typ str t1 t2) = d2 t1 t2 $ TC_And id typ str delve (TC_Or id typ str t1 t2) = d2 t1 t2 $ TC_Or id typ str delve (TC_Group id typ token) = d1 token $ TC_Group id typ delve (TC_Binary id typ op lhs rhs) = d2 lhs rhs $ TC_Binary id typ op delve (TC_Unary id typ op token) = d1 token $ TC_Unary id typ op delve (TC_Noary id typ token) = d1 token $ TC_Noary id typ delve (TA_Binary id op t1 t2) = d2 t1 t2 $ TA_Binary id op delve (TA_Unary id op t1) = d1 t1 $ TA_Unary id op delve (TA_Sequence id l) = dl l $ TA_Sequence id delve (TA_Trinary id t1 t2 t3) = do a <- round t1 b <- round t2 c <- round t3 return $ TA_Trinary id a b c delve (TA_Expansion id t) = dl t $ TA_Expansion id delve (TA_Index id t) = d1 t $ TA_Index id delve (T_Annotation id anns t) = d1 t $ T_Annotation id anns delve (T_CoProc id var body) = d1 body $ T_CoProc id var delve (T_CoProcBody id t) = d1 t $ T_CoProcBody id delve t = return t getId t = case t of T_AND_IF id -> id T_OR_IF id -> id T_DSEMI id -> id T_Semi id -> id T_DLESS id -> id T_DGREAT id -> id T_LESSAND id -> id T_GREATAND id -> id T_LESSGREAT id -> id T_DLESSDASH id -> id T_CLOBBER id -> id T_If id -> id T_Then id -> id T_Else id -> id T_Elif id -> id T_Fi id -> id T_Do id -> id T_Done id -> id T_Case id -> id T_Esac id -> id T_While id -> id T_Until id -> id T_For id -> id T_Select id -> id T_Lbrace id -> id T_Rbrace id -> id T_Lparen id -> id T_Rparen id -> id T_Bang id -> id T_In id -> id T_NEWLINE id -> id T_EOF id -> id T_Less id -> id T_Greater id -> id T_SingleQuoted id _ -> id T_Literal id _ -> id T_NormalWord id _ -> id T_DoubleQuoted id _ -> id T_DollarExpansion id _ -> id T_DollarBraced id _ -> id T_DollarArithmetic id _ -> id T_BraceExpansion id _ -> id T_IoFile id _ _ -> id T_HereDoc id _ _ _ _ -> id T_HereString id _ -> id T_FdRedirect id _ _ -> id T_Assignment id _ _ _ _ -> id T_Array id _ -> id T_IndexedElement id _ _ -> id T_Redirecting id _ _ -> id T_SimpleCommand id _ _ -> id T_Pipeline id _ _ -> id T_Banged id _ -> id T_AndIf id _ _ -> id T_OrIf id _ _ -> id T_Backgrounded id _ -> id T_IfExpression id _ _ -> id T_Subshell id _ -> id T_BraceGroup id _ -> id T_WhileExpression id _ _ -> id T_UntilExpression id _ _ -> id T_ForIn id _ _ _ -> id T_SelectIn id _ _ _ -> id T_CaseExpression id _ _ -> id T_Function id _ _ _ _ -> id T_Arithmetic id _ -> id T_Script id _ _ -> id T_Condition id _ _ -> id T_Extglob id _ _ -> id T_Backticked id _ -> id TC_And id _ _ _ _ -> id TC_Or id _ _ _ _ -> id TC_Group id _ _ -> id TC_Binary id _ _ _ _ -> id TC_Unary id _ _ _ -> id TC_Noary id _ _ -> id TA_Binary id _ _ _ -> id TA_Unary id _ _ -> id TA_Sequence id _ -> id TA_Trinary id _ _ _ -> id TA_Expansion id _ -> id TA_Index id _ -> id T_ProcSub id _ _ -> id T_Glob id _ -> id T_ForArithmetic id _ _ _ _ -> id T_DollarSingleQuoted id _ -> id T_DollarDoubleQuoted id _ -> id T_DollarBracket id _ -> id T_Annotation id _ _ -> id T_Pipe id _ -> id T_CoProc id _ _ -> id T_CoProcBody id _ -> id blank :: Monad m => Token -> m () blank = const $ return () doAnalysis f = analyze f blank id doStackAnalysis startToken endToken = analyze startToken endToken id doTransform i = runIdentity . analyze blank blank i isLoop t = case t of T_WhileExpression {} -> True T_UntilExpression {} -> True T_ForIn {} -> True T_ForArithmetic {} -> True T_SelectIn {} -> True _ -> False
icyfork/shellcheck
ShellCheck/AST.hs
gpl-3.0
12,632
0
16
4,147
4,547
2,230
2,317
327
92
module WithCli.ArgumentSpec where import Data.Proxy import Test.Hspec import WithCli.Argument spec :: Spec spec = do describe "Option.Double" $ do it "parses doubles" $ do parseArgument "1.2" `shouldBe` Just (1.2 :: Double) it "renders as NUMBER in help and error output" $ do argumentType (Proxy :: Proxy Double) `shouldBe` "NUMBER" it "parses doubles that start with a dot" $ do parseArgument ".4" `shouldBe` Just (0.4 :: Double) describe "Option.Float" $ do it "parses floats" $ do parseArgument "1.2" `shouldBe` Just (1.2 :: Float) it "renders as NUMBER in help and error output" $ do argumentType (Proxy :: Proxy Float) `shouldBe` "NUMBER"
kosmikus/getopt-generics
test/WithCli/ArgumentSpec.hs
bsd-3-clause
737
0
16
192
206
102
104
18
1
{-# OPTIONS -XImpredicativeTypes -fno-warn-deprecated-flags -XEmptyDataDecls -XGADTs -XLiberalTypeSynonyms -XFlexibleInstances -XScopedTypeVariables #-} -- See #1627. The point is that we should get nice -- compact code for Foo -- In GHC 7.0 this fails, and rightly so. module M(foo) where import Control.Monad import Control.Monad.ST import Data.Array.ST data E' v m a where E :: m a -> E' RValue m a V :: m a -> (a -> m ()) -> E' v m a data LValue data RValue type E m a = E' RValue m a type V m a = E' LValue m a {-# INLINE runE #-} runE :: E' v m a -> m a runE (E t) = t runE (V t _) = t instance Monad m => Functor (E' RValue m) where {-# INLINE fmap #-} fmap f x = liftM f x instance Monad m => Applicative (E' RValue m) where {-# INLINE pure #-} pure x = return x {-# INLINE (<*>) #-} (<*>) = ap instance (Monad m) => Monad (E' RValue m) where {-# INLINE return #-} return x = E $ return x {-# INLINE (>>=) #-} x >>= f = E $ do x' <- runE x runE (f x') liftArray :: forall arr m a i . (Ix i, MArray arr a m) => arr i a -> E m (forall v . [E m i] -> E' v m a) {-# INLINE liftArray #-} liftArray a = E (do let ix :: [E m i] -> m i ix [i] = runE i {-# INLINE f #-} f is = V (ix is >>= readArray a) (\ x -> ix is >>= \ i -> writeArray a i x) return f ) {-# INLINE liftE2 #-} liftE2 :: (Monad m) => (a -> b -> c) -> E' va m a -> E' vb m b -> E m c liftE2 op x y = E $ do x' <- runE x y' <- runE y return (x' `op` y') {-# INLINE plus #-} plus :: (Monad m) => E m Int -> E m Int -> E m Int plus = liftE2 (+) foo :: forall s . STArray s Int Int -> ST s Int foo ma = runE $ do a <- liftArray ma let one :: E (ST t) Int one = return 1 a[one] `plus` a[one]
sdiehl/ghc
testsuite/tests/simplCore/should_compile/simpl017.hs
bsd-3-clause
1,830
0
17
572
777
399
378
-1
-1
{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.InstallSymlink -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : [email protected] -- Stability : provisional -- Portability : portable -- -- Managing installing binaries with symlinks. ----------------------------------------------------------------------------- module Distribution.Client.InstallSymlink ( symlinkBinaries, symlinkBinary, ) where #if mingw32_HOST_OS import Distribution.Package (PackageIdentifier) import Distribution.Client.InstallPlan (InstallPlan) import Distribution.Client.Setup (InstallFlags) import Distribution.Simple.Setup (ConfigFlags) import Distribution.Simple.Compiler import Distribution.System symlinkBinaries :: Platform -> Compiler -> ConfigFlags -> InstallFlags -> InstallPlan -> IO [(PackageIdentifier, String, FilePath)] symlinkBinaries _ _ _ _ _ = return [] symlinkBinary :: FilePath -> FilePath -> String -> String -> IO Bool symlinkBinary _ _ _ _ = fail "Symlinking feature not available on Windows" #else import Distribution.Client.Types ( SourcePackage(..) , GenericReadyPackage(..), ReadyPackage, enableStanzas , ConfiguredPackage(..) , fakeUnitId) import Distribution.Client.Setup ( InstallFlags(installSymlinkBinDir) ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.InstallPlan (InstallPlan) import Distribution.Package ( PackageIdentifier, Package(packageId), UnitId(..) ) import Distribution.Compiler ( CompilerId(..) ) import qualified Distribution.PackageDescription as PackageDescription import Distribution.PackageDescription ( PackageDescription ) import Distribution.PackageDescription.Configuration ( finalizePackageDescription ) import Distribution.Simple.Setup ( ConfigFlags(..), fromFlag, fromFlagOrDefault, flagToMaybe ) import qualified Distribution.Simple.InstallDirs as InstallDirs import Distribution.Simple.Compiler ( Compiler, compilerInfo, CompilerInfo(..) ) import Distribution.System ( Platform ) import System.Posix.Files ( getSymbolicLinkStatus, isSymbolicLink, createSymbolicLink , removeLink ) import System.Directory ( canonicalizePath ) import System.FilePath ( (</>), splitPath, joinPath, isAbsolute ) import Prelude hiding (ioError) import System.IO.Error ( isDoesNotExistError, ioError ) import Distribution.Compat.Exception ( catchIO ) import Control.Exception ( assert ) import Data.Maybe ( catMaybes ) -- | We would like by default to install binaries into some location that is on -- the user's PATH. For per-user installations on Unix systems that basically -- means the @~/bin/@ directory. On the majority of platforms the @~/bin/@ -- directory will be on the user's PATH. However some people are a bit nervous -- about letting a package manager install programs into @~/bin/@. -- -- A compromise solution is that instead of installing binaries directly into -- @~/bin/@, we could install them in a private location under @~/.cabal/bin@ -- and then create symlinks in @~/bin/@. We can be careful when setting up the -- symlinks that we do not overwrite any binary that the user installed. We can -- check if it was a symlink we made because it would point to the private dir -- where we install our binaries. This means we can install normally without -- worrying and in a later phase set up symlinks, and if that fails then we -- report it to the user, but even in this case the package is still in an OK -- installed state. -- -- This is an optional feature that users can choose to use or not. It is -- controlled from the config file. Of course it only works on POSIX systems -- with symlinks so is not available to Windows users. -- symlinkBinaries :: Platform -> Compiler -> ConfigFlags -> InstallFlags -> InstallPlan -> IO [(PackageIdentifier, String, FilePath)] symlinkBinaries platform comp configFlags installFlags plan = case flagToMaybe (installSymlinkBinDir installFlags) of Nothing -> return [] Just symlinkBinDir | null exes -> return [] | otherwise -> do publicBinDir <- canonicalizePath symlinkBinDir -- TODO: do we want to do this here? : -- createDirectoryIfMissing True publicBinDir fmap catMaybes $ sequence [ do privateBinDir <- pkgBinDir pkg ipid ok <- symlinkBinary publicBinDir privateBinDir publicExeName privateExeName if ok then return Nothing else return (Just (pkgid, publicExeName, privateBinDir </> privateExeName)) | (ReadyPackage (ConfiguredPackage _ _flags _ _) _, pkg, exe) <- exes , let pkgid = packageId pkg -- This is a bit dodgy; probably won't work for Backpack packages ipid = fakeUnitId pkgid publicExeName = PackageDescription.exeName exe privateExeName = prefix ++ publicExeName ++ suffix prefix = substTemplate pkgid ipid prefixTemplate suffix = substTemplate pkgid ipid suffixTemplate ] where exes = [ (cpkg, pkg, exe) | InstallPlan.Installed cpkg _ _ <- InstallPlan.toList plan , let pkg = pkgDescription cpkg , exe <- PackageDescription.executables pkg , PackageDescription.buildable (PackageDescription.buildInfo exe) ] pkgDescription :: ReadyPackage -> PackageDescription pkgDescription (ReadyPackage (ConfiguredPackage (SourcePackage _ pkg _ _) flags stanzas _) _) = case finalizePackageDescription flags (const True) platform cinfo [] (enableStanzas stanzas pkg) of Left _ -> error "finalizePackageDescription ReadyPackage failed" Right (desc, _) -> desc -- This is sadly rather complicated. We're kind of re-doing part of the -- configuration for the package. :-( pkgBinDir :: PackageDescription -> UnitId -> IO FilePath pkgBinDir pkg ipid = do defaultDirs <- InstallDirs.defaultInstallDirs compilerFlavor (fromFlag (configUserInstall configFlags)) (PackageDescription.hasLibs pkg) let templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault defaultDirs (configInstallDirs configFlags) absoluteDirs = InstallDirs.absoluteInstallDirs (packageId pkg) ipid cinfo InstallDirs.NoCopyDest platform templateDirs canonicalizePath (InstallDirs.bindir absoluteDirs) substTemplate pkgid ipid = InstallDirs.fromPathTemplate . InstallDirs.substPathTemplate env where env = InstallDirs.initialPathTemplateEnv pkgid ipid cinfo platform fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) cinfo = compilerInfo comp (CompilerId compilerFlavor _) = compilerInfoId cinfo symlinkBinary :: FilePath -- ^ The canonical path of the public bin dir -- eg @/home/user/bin@ -> FilePath -- ^ The canonical path of the private bin dir -- eg @/home/user/.cabal/bin@ -> String -- ^ The name of the executable to go in the public -- bin dir, eg @foo@ -> String -- ^ The name of the executable to in the private bin -- dir, eg @foo-1.0@ -> IO Bool -- ^ If creating the symlink was successful. @False@ -- if there was another file there already that we -- did not own. Other errors like permission errors -- just propagate as exceptions. symlinkBinary publicBindir privateBindir publicName privateName = do ok <- targetOkToOverwrite (publicBindir </> publicName) (privateBindir </> privateName) case ok of NotOurFile -> return False NotExists -> mkLink >> return True OkToOverwrite -> rmLink >> mkLink >> return True where relativeBindir = makeRelative publicBindir privateBindir mkLink = createSymbolicLink (relativeBindir </> privateName) (publicBindir </> publicName) rmLink = removeLink (publicBindir </> publicName) -- | Check a file path of a symlink that we would like to create to see if it -- is OK. For it to be OK to overwrite it must either not already exist yet or -- be a symlink to our target (in which case we can assume ownership). -- targetOkToOverwrite :: FilePath -- ^ The file path of the symlink to the private -- binary that we would like to create -> FilePath -- ^ The canonical path of the private binary. -- Use 'canonicalizePath' to make this. -> IO SymlinkStatus targetOkToOverwrite symlink target = handleNotExist $ do status <- getSymbolicLinkStatus symlink if not (isSymbolicLink status) then return NotOurFile else do target' <- canonicalizePath symlink -- This relies on canonicalizePath handling symlinks if target == target' then return OkToOverwrite else return NotOurFile where handleNotExist action = catchIO action $ \ioexception -> -- If the target doesn't exist then there's no problem overwriting it! if isDoesNotExistError ioexception then return NotExists else ioError ioexception data SymlinkStatus = NotExists -- ^ The file doesn't exist so we can make a symlink. | OkToOverwrite -- ^ A symlink already exists, though it is ours. We'll -- have to delete it first before we make a new symlink. | NotOurFile -- ^ A file already exists and it is not one of our existing -- symlinks (either because it is not a symlink or because -- it points somewhere other than our managed space). deriving Show -- | Take two canonical paths and produce a relative path to get from the first -- to the second, even if it means adding @..@ path components. -- makeRelative :: FilePath -> FilePath -> FilePath makeRelative a b = assert (isAbsolute a && isAbsolute b) $ let as = splitPath a bs = splitPath b commonLen = length $ takeWhile id $ zipWith (==) as bs in joinPath $ [ ".." | _ <- drop commonLen as ] ++ drop commonLen bs #endif
tolysz/prepare-ghcjs
spec-lts8/cabal/cabal-install/Distribution/Client/InstallSymlink.hs
bsd-3-clause
11,228
0
12
3,180
188
113
75
152
4
----------------------------------------------------------------------------- -- | -- Module : Codec.Archive.Tar.Entry -- Copyright : (c) 2007 Bjorn Bringert, -- 2008 Andrea Vezzosi, -- 2008-2009 Duncan Coutts -- License : BSD3 -- -- Maintainer : [email protected] -- Portability : portable -- -- Types and functions to manipulate tar entries. -- -- While the "Codec.Archive.Tar" module provides only the simple high level -- API, this module provides full access to the details of tar entries. This -- lets you inspect all the meta-data, construct entries and handle error cases -- more precisely. -- -- This module uses common names and so is designed to be imported qualified: -- -- > import qualified Codec.Archive.Tar as Tar -- > import qualified Codec.Archive.Tar.Entry as Tar -- ----------------------------------------------------------------------------- module Codec.Archive.Tar.Entry ( -- * Tar entry and associated types Entry(..), --TODO: should be the following with the Entry constructor not exported, -- but haddock cannot document that properly -- see http://trac.haskell.org/haddock/ticket/3 --Entry(filePath, fileMode, ownerId, groupId, fileSize, modTime, -- fileType, linkTarget, headerExt, fileContent), entryPath, EntryContent(..), Ownership(..), FileSize, Permissions, EpochTime, DevMajor, DevMinor, TypeCode, Format(..), -- * Constructing simple entry values simpleEntry, fileEntry, directoryEntry, -- * Standard file permissions -- | For maximum portability when constructing archives use only these file -- permissions. ordinaryFilePermissions, executableFilePermissions, directoryPermissions, -- * Constructing entries from disk files packFileEntry, packDirectoryEntry, getDirectoryContentsRecursive, -- * TarPath type TarPath, toTarPath, fromTarPath, fromTarPathToPosixPath, fromTarPathToWindowsPath, -- * LinkTarget type LinkTarget, toLinkTarget, fromLinkTarget, fromLinkTargetToPosixPath, fromLinkTargetToWindowsPath, ) where import Codec.Archive.Tar.Types import Codec.Archive.Tar.Pack
waldheinz/ads
src/lib/Codec/Archive/Tar/Entry.hs
gpl-3.0
2,201
0
5
409
168
127
41
33
0
{-# LANGUAGE TypeFamilies, EmptyDataDecls, FlexibleContexts #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleInstances, OverlappingInstances, UndecidableInstances #-} module NonLinearLHS where type family E a b type instance E a a = [a] foo :: E [Int] (E Int Int) -> Int foo = sum . concat data family F a b data instance F a a = MkF [a] goo :: F Int Int -> F Bool Bool goo (MkF xs) = MkF $ map odd xs -- HList-like type equality data True; data False; type family EqTy a b type instance EqTy a a = True class EqTyP a b result instance (EqTy a b ~ isEq, Proxy isEq result) => EqTyP a b result class Proxy inp out instance (result ~ True) => Proxy True result instance (result ~ False) => Proxy notTrue result testTrue :: EqTyP Int Int r => r testTrue = undefined testFalse :: EqTyP Int Bool r => r testFalse = undefined
frantisekfarka/ghc-dsi
testsuite/tests/indexed-types/should_compile/NonLinearLHS.hs
bsd-3-clause
848
0
8
170
295
160
135
-1
-1
module Main (main) where import Control.Monad.State import Data.Char import Data.List import System.Directory import System.Environment import System.FilePath import BuildInfo import FilenameDescr import Change import Utils import Tar -- TODO: -- * Check installed trees too -- * Check hashbangs sizeChangeThresholds :: [(Integer, -- Theshold only applies if one of -- the files is at least this big Integer)] -- Size changed if the larger file's -- size is at least this %age of the -- smaller file's size sizeChangeThresholds = [( 1000, 150), (50 * 1000, 110)] main :: IO () main = do args <- getArgs (ignoreSizeChanges, p1, p2) <- case args of [p1, p2] -> return (False, p1, p2) ["--ignore-size-changes", p1, p2] -> return (True, p1, p2) _ -> die ["Bad args. Need 2 filepaths."] doFileOrDirectory ignoreSizeChanges p1 p2 doFileOrDirectory :: Bool -> FilePath -> FilePath -> IO () doFileOrDirectory ignoreSizeChanges p1 p2 = do b <- doesDirectoryExist p1 let doit = if b then doDirectory else doFile doit ignoreSizeChanges p1 p2 doDirectory :: Bool -> FilePath -> FilePath -> IO () doDirectory ignoreSizeChanges p1 p2 = do fs1 <- getDirectoryContents p1 fs2 <- getDirectoryContents p2 let isVersionChar c = isDigit c || c == '.' mkFileInfo "." = return [] mkFileInfo ".." = return [] mkFileInfo fp@('g':'h':'c':'-':x:xs) | isDigit x = return [(("ghc-", "VERSION", dropWhile isVersionChar xs), fp)] | otherwise = die ["No version number in " ++ show fp] mkFileInfo fp = do warn ["Unrecognised filename " ++ show fp] return [] fss1' <- mapM mkFileInfo fs1 fss2' <- mapM mkFileInfo fs2 let fs1' = sort $ concat fss1' fs2' = sort $ concat fss2' putBreak = putStrLn "==========" extraFile d fp = do putBreak putStrLn ("Extra file in " ++ show d ++ ": " ++ show fp) doFiles [] [] = do putBreak putStrLn "Done." doFiles ((_, fp) : xs) [] = do extraFile p1 fp doFiles xs [] doFiles [] ((_, fp) : ys) = do extraFile p2 fp doFiles [] ys doFiles xs@((fpc1, fp1) : xs') ys@((fpc2, fp2) : ys') = do case fpc1 `compare` fpc2 of EQ -> do putBreak putStrLn $ unwords ["Doing", show fp1, show fp2] doFile ignoreSizeChanges (p1 </> fp1) (p2 </> fp2) doFiles xs' ys' LT -> do extraFile p1 fp1 doFiles xs' ys GT -> do extraFile p2 fp2 doFiles xs ys' doFiles fs1' fs2' doFile :: Bool -> FilePath -> FilePath -> IO () doFile ignoreSizeChanges bd1 bd2 = do tls1 <- readTarLines bd1 tls2 <- readTarLines bd2 let mWays1 = findWays tls1 mWays2 = findWays tls2 wayDifferences <- case (mWays1, mWays2) of (Nothing, Nothing) -> return [] (Just ways1, Just ways2) -> return $ diffWays ways1 ways2 _ -> die ["One input has ways, but the other doesn't"] (content1, tvm1) <- dieOnErrors $ mkContents mWays1 tls1 (content2, tvm2) <- dieOnErrors $ mkContents mWays2 tls2 let sortedContent1 = sortByFst content1 sortedContent2 = sortByFst content2 (nubProbs1, nubbedContent1) = nubContents sortedContent1 (nubProbs2, nubbedContent2) = nubContents sortedContent2 differences = compareContent mWays1 nubbedContent1 mWays2 nubbedContent2 allProbs = map First nubProbs1 ++ map Second nubProbs2 ++ diffThingVersionMap tvm1 tvm2 ++ wayDifferences ++ differences wantedProbs = if ignoreSizeChanges then filter (not . isSizeChange) allProbs else allProbs mapM_ (putStrLn . pprFileChange) wantedProbs -- *nix bindists have ways. -- Windows "bindists", install trees, and testsuites don't. findWays :: [TarLine] -> Maybe Ways findWays tls = msum $ map f tls where f tl = case re regex (tlFileName tl) of Just [dashedWays] -> Just (unSepList '-' dashedWays) _ -> Nothing regex = "/libraries/base/dist-install/build/\\.depend-(.*)\\.haskell" diffWays :: Ways -> Ways -> [FileChange] diffWays ws1 ws2 = f (sort ws1) (sort ws2) where f [] [] = [] f xs [] = map (First . ExtraWay) xs f [] ys = map (Second . ExtraWay) ys f xs@(x : xs') ys@(y : ys') = case x `compare` y of LT -> First (ExtraWay x) : f xs' ys GT -> Second (ExtraWay y) : f xs ys' EQ -> f xs' ys' diffThingVersionMap :: ThingVersionMap -> ThingVersionMap -> [FileChange] diffThingVersionMap tvm1 tvm2 = f (sortByFst tvm1) (sortByFst tvm2) where f [] [] = [] f xs [] = map (First . ExtraThing . fst) xs f [] ys = map (Second . ExtraThing . fst) ys f xs@((xt, xv) : xs') ys@((yt, yv) : ys') = case xt `compare` yt of LT -> First (ExtraThing xt) : f xs' ys GT -> Second (ExtraThing yt) : f xs ys' EQ -> let this = if xv == yv then [] else [Change (ThingVersionChanged xt xv yv)] in this ++ f xs' ys' mkContents :: Maybe Ways -> [TarLine] -> Either Errors ([(FilenameDescr, TarLine)], ThingVersionMap) mkContents mWays tls = case runStateT (mapM f tls) (emptyBuildInfo mWays) of Nothing -> Left ["Can't happen: mkContents: Nothing"] Just (xs, finalBuildInfo) -> case concat $ map (checkContent finalBuildInfo) xs of [] -> Right (xs, biThingVersionMap finalBuildInfo) errs -> Left errs where f tl = do fnd <- mkFilePathDescr (tlFileName tl) return (fnd, tl) nubContents :: [(FilenameDescr, TarLine)] -> ([Change], [(FilenameDescr, TarLine)]) nubContents [] = ([], []) nubContents [x] = ([], [x]) nubContents (x1@(fd1, tl1) : xs@((fd2, _) : _)) | fd1 == fd2 = (DuplicateFile (tlFileName tl1) : ps, xs') | otherwise = (ps, x1 : xs') where (ps, xs') = nubContents xs mkFilePathDescr :: FilePath -> BIMonad FilenameDescr mkFilePathDescr fp | Just [ghcVersion, _, middle, filename] <- re ("^ghc-" ++ versionRE ++ "(/.*)?/([^/]*)$") fp = do haveThingVersion "ghc" ghcVersion middle' <- mkMiddleDescr middle filename' <- mkFileNameDescr filename let fd = FP "ghc-" : VersionOf "ghc" : middle' ++ FP "/" : filename' return $ normaliseDescr fd | otherwise = return [FP fp] mkMiddleDescr :: FilePath -> BIMonad FilenameDescr mkMiddleDescr middle -- haddock docs in a Windows installed tree | Just [thing, thingVersion, _, src] <- re ("^/doc/html/libraries/([^/]*)-" ++ versionRE ++ "(/src)?$") middle = do haveThingVersion thing thingVersion return [FP "/doc/html/libraries/", FP thing, FP "-", VersionOf thing, FP src] `mplus` unchanged -- libraries in a Windows installed tree | Just [thing, thingVersion, _, rest] <- re ("^/lib/([^/]*)-" ++ versionRE ++ "(/.*)?$") middle = do haveThingVersion thing thingVersion return [FP "/lib/", FP thing, FP "-", VersionOf thing, FP rest] `mplus` unchanged -- Windows in-tree gcc | Just [prefix, _, _, gccVersion, _, rest] <- re ("^(/mingw/(lib(exec)?/gcc/mingw32/|share/gcc-))" ++ versionRE ++ "(/.*)?$") middle = do haveThingVersion "gcc" gccVersion return [FP prefix, VersionOf "gcc", FP rest] `mplus` unchanged | otherwise = unchanged where unchanged = return [FP middle] mkFileNameDescr :: FilePath -> BIMonad FilenameDescr mkFileNameDescr filename | Just [prog, ghcVersion, _, exe] <- re ("^(ghc|ghci|ghcii|haddock)-" ++ versionRE ++ "(\\.exe|\\.sh|)$") filename = do haveThingVersion "ghc" ghcVersion return [FP prog, FP "-", VersionOf "ghc", FP exe] `mplus` unchanged | Just [thing, thingVersion, _, ghcVersion, _, soDll] <- re ("^libHS(.*)-" ++ versionRE ++ "-ghc" ++ versionRE ++ "\\.(so|dll|dylib)$") filename = do haveThingVersion "ghc" ghcVersion haveThingVersion thing thingVersion return [FP "libHS", FP thing, FP "-", VersionOf thing, FP "-ghc", VersionOf "ghc", FP ".", FP soDll] `mplus` unchanged | Just [way, thingVersion, _, soDll] <- re ("^libHSrts(_.*)?-ghc" ++ versionRE ++ "\\.(so|dll|dylib)$") filename = do haveThingVersion "ghc" thingVersion return [FP "libHSrts", FP way, FP "-ghc", VersionOf "ghc", FP ".", FP soDll] `mplus` unchanged | Just [thingVersion, _, soDll] <- re ("^libHSffi-ghc" ++ versionRE ++ "\\.(so|dll|dylib)$") filename = do haveThingVersion "ghc" thingVersion return [FP "libHSffi-ghc", VersionOf "ghc", FP ".", FP soDll] `mplus` unchanged | Just [thing, thingVersion, _, way] <- re ("^libHS(.*)-" ++ versionRE ++ "(_.*)?\\.a$") filename = do haveThingVersion thing thingVersion return [FP "libHS", FP thing, FP "-", VersionOf thing, FP way, FP ".a"] `mplus` unchanged | Just [thing, thingVersion, _] <- re ("^HS(.*)-" ++ versionRE ++ "\\.o$") filename = do haveThingVersion thing thingVersion return [FP "HS", FP thing, FP "-", VersionOf thing, FP ".o"] `mplus` unchanged | Just [thing, thingVersion, _, thingHash] <- re ("^(.*)-" ++ versionRE ++ "-([0-9a-f]{32})\\.conf$") filename = do haveThingVersion thing thingVersion haveThingHash thing thingHash return [FP thing, FP "-", VersionOf thing, FP "-", HashOf thing, FP ".conf"] `mplus` unchanged | Just [thingVersion, _] <- re ("^mingw32-gcc-" ++ versionRE ++ "\\.exe$") filename = do haveThingVersion "gcc" thingVersion return [FP "mingw32-gcc-", VersionOf "gcc", FP ".exe"] `mplus` unchanged | Just [dashedWays, depType] <- re "^\\.depend-(.*)\\.(haskell|c_asm)" filename = do mWays <- getMaybeWays if Just (unSepList '-' dashedWays) == mWays then return [FP ".depend-", Ways, FP ".", FP depType] else unchanged | otherwise = unchanged where unchanged = return [FP filename] compareContent :: Maybe Ways -> [(FilenameDescr, TarLine)] -> Maybe Ways -> [(FilenameDescr, TarLine)] -> [FileChange] compareContent mWays1 xs1all mWays2 xs2all = f xs1all xs2all where f [] [] = [] f xs [] = concatMap (mkExtraFile mWays1 mWays2 First . tlFileName . snd) xs f [] ys = concatMap (mkExtraFile mWays2 mWays1 Second . tlFileName . snd) ys f xs1@((fd1, tl1) : xs1') xs2@((fd2, tl2) : xs2') = case fd1 `compare` fd2 of EQ -> map Change (compareTarLine tl1 tl2) ++ f xs1' xs2' LT -> mkExtraFile mWays1 mWays2 First (tlFileName tl1) ++ f xs1' xs2 GT -> mkExtraFile mWays2 mWays1 Second (tlFileName tl2) ++ f xs1 xs2' mkExtraFile mWaysMe mWaysThem mkFileChange filename = case (findFileWay filename, mWaysMe, mWaysThem) of (Just way, Just waysMe, Just waysThem) | (way `elem` waysMe) && not (way `elem` waysThem) -> [] _ -> [mkFileChange (ExtraFile filename)] findFileWay :: FilePath -> Maybe String findFileWay fp | Just [way] <- re "\\.([a-z_]+)_hi$" fp = Just way | Just [_, _, way] <- re ("libHS.*-" ++ versionRE ++ "_([a-z_]+).a$") fp = Just way | otherwise = Nothing compareTarLine :: TarLine -> TarLine -> [Change] compareTarLine tl1 tl2 = [ PermissionsChanged fn1 fn2 perms1 perms2 | perms1 /= perms2 ] ++ [ FileSizeChanged fn1 fn2 size1 size2 | sizeChanged ] where fn1 = tlFileName tl1 fn2 = tlFileName tl2 perms1 = tlPermissions tl1 perms2 = tlPermissions tl2 size1 = tlSize tl1 size2 = tlSize tl2 sizeMin = size1 `min` size2 sizeMax = size1 `max` size2 sizeChanged = any sizeChangeThresholdReached sizeChangeThresholds sizeChangeThresholdReached (reqSize, percentage) = (sizeMax >= reqSize) && (((100 * sizeMax) `div` sizeMin) >= percentage) versionRE :: String versionRE = "([0-9]+(\\.[0-9]+)*)"
ghc-android/ghc
distrib/compare/compare.hs
bsd-3-clause
13,480
0
20
4,596
4,254
2,149
2,105
289
9
{-# LANGUAGE BangPatterns, DataKinds, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} module Hadoop.Protos.ClientNamenodeProtocolProtos.ConcatResponseProto (ConcatResponseProto(..)) where import Prelude ((+), (/)) import qualified Prelude as Prelude' import qualified Data.Typeable as Prelude' import qualified Data.Data as Prelude' import qualified Text.ProtocolBuffers.Header as P' data ConcatResponseProto = ConcatResponseProto{} deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data) instance P'.Mergeable ConcatResponseProto where mergeAppend ConcatResponseProto ConcatResponseProto = ConcatResponseProto instance P'.Default ConcatResponseProto where defaultValue = ConcatResponseProto instance P'.Wire ConcatResponseProto where wireSize ft' self'@(ConcatResponseProto) = case ft' of 10 -> calc'Size 11 -> P'.prependMessageSize calc'Size _ -> P'.wireSizeErr ft' self' where calc'Size = 0 wirePut ft' self'@(ConcatResponseProto) = case ft' of 10 -> put'Fields 11 -> do P'.putSize (P'.wireSize 10 self') put'Fields _ -> P'.wirePutErr ft' self' where put'Fields = do Prelude'.return () wireGet ft' = case ft' of 10 -> P'.getBareMessageWith update'Self 11 -> P'.getMessageWith update'Self _ -> P'.wireGetErr ft' where update'Self wire'Tag old'Self = case wire'Tag of _ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self instance P'.MessageAPI msg' (msg' -> ConcatResponseProto) ConcatResponseProto where getVal m' f' = f' m' instance P'.GPB ConcatResponseProto instance P'.ReflectDescriptor ConcatResponseProto where getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList []) (P'.fromDistinctAscList []) reflectDescriptorInfo _ = Prelude'.read "DescriptorInfo {descName = ProtoName {protobufName = FIName \".hadoop.hdfs.ConcatResponseProto\", haskellPrefix = [MName \"Hadoop\",MName \"Protos\"], parentModule = [MName \"ClientNamenodeProtocolProtos\"], baseName = MName \"ConcatResponseProto\"}, descFilePath = [\"Hadoop\",\"Protos\",\"ClientNamenodeProtocolProtos\",\"ConcatResponseProto.hs\"], isGroup = False, fields = fromList [], descOneofs = fromList [], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False, makeLenses = False}" instance P'.TextType ConcatResponseProto where tellT = P'.tellSubMessage getT = P'.getSubMessage instance P'.TextMsg ConcatResponseProto where textPut msg = Prelude'.return () textGet = Prelude'.return P'.defaultValue
alexbiehl/hoop
hadoop-protos/src/Hadoop/Protos/ClientNamenodeProtocolProtos/ConcatResponseProto.hs
mit
2,815
1
16
529
554
291
263
53
0
{-# LANGUAGE CPP #-} module CommandLoop ( newCommandLoopState , startCommandLoop ) where import Control.Monad (when) import Data.IORef import Data.List (find, intercalate) #if __GLASGOW_HASKELL__ < 709 import Data.Traversable (traverse) #endif import MonadUtils (MonadIO, liftIO) import System.Exit (ExitCode(ExitFailure, ExitSuccess)) import qualified ErrUtils import qualified Exception (ExceptionMonad) import qualified GHC import qualified GHC.Paths import qualified Outputable import Types (ClientDirective(..), Command(..)) import Info (getIdentifierInfo, getType) import FindSymbol (findSymbol) type CommandObj = (Command, [String]) type ClientSend = ClientDirective -> IO () data State = State { stateWarningsEnabled :: Bool } newCommandLoopState :: IO (IORef State) newCommandLoopState = do newIORef $ State { stateWarningsEnabled = True } withWarnings :: (MonadIO m, Exception.ExceptionMonad m) => IORef State -> Bool -> m a -> m a withWarnings state warningsValue action = do beforeState <- liftIO $ getWarnings liftIO $ setWarnings warningsValue action `GHC.gfinally` (liftIO $ setWarnings beforeState) where getWarnings :: IO Bool getWarnings = readIORef state >>= return . stateWarningsEnabled setWarnings :: Bool -> IO () setWarnings val = modifyIORef state $ \s -> s { stateWarningsEnabled = val } startCommandLoop :: IORef State -> ClientSend -> IO (Maybe CommandObj) -> [String] -> Maybe Command -> IO () startCommandLoop state clientSend getNextCommand initialGhcOpts mbInitial = do continue <- GHC.runGhc (Just GHC.Paths.libdir) $ do configOk <- GHC.gcatch (configSession state clientSend initialGhcOpts >> return True) handleConfigError if configOk then do doMaybe mbInitial $ \cmd -> sendErrors (runCommand state clientSend cmd) processNextCommand False else processNextCommand True case continue of Nothing -> -- Exit return () Just (cmd, ghcOpts) -> startCommandLoop state clientSend getNextCommand ghcOpts (Just cmd) where processNextCommand :: Bool -> GHC.Ghc (Maybe CommandObj) processNextCommand forceReconfig = do mbNextCmd <- liftIO getNextCommand case mbNextCmd of Nothing -> -- Exit return Nothing Just (cmd, ghcOpts) -> if forceReconfig || (ghcOpts /= initialGhcOpts) then return (Just (cmd, ghcOpts)) else sendErrors (runCommand state clientSend cmd) >> processNextCommand False sendErrors :: GHC.Ghc () -> GHC.Ghc () sendErrors action = GHC.gcatch action (\x -> handleConfigError x >> return ()) handleConfigError :: GHC.GhcException -> GHC.Ghc Bool handleConfigError e = do liftIO $ mapM_ clientSend [ ClientStderr (GHC.showGhcException e "") , ClientExit (ExitFailure 1) ] return False doMaybe :: Monad m => Maybe a -> (a -> m ()) -> m () doMaybe Nothing _ = return () doMaybe (Just x) f = f x configSession :: IORef State -> ClientSend -> [String] -> GHC.Ghc () configSession state clientSend ghcOpts = do initialDynFlags <- GHC.getSessionDynFlags let updatedDynFlags = initialDynFlags { GHC.log_action = logAction state clientSend , GHC.ghcLink = GHC.NoLink , GHC.hscTarget = GHC.HscInterpreted } (finalDynFlags, _, _) <- GHC.parseDynamicFlags updatedDynFlags (map GHC.noLoc ghcOpts) _ <- GHC.setSessionDynFlags finalDynFlags return () runCommand :: IORef State -> ClientSend -> Command -> GHC.Ghc () runCommand _ clientSend (CmdCheck file) = do let noPhase = Nothing target <- GHC.guessTarget file noPhase GHC.setTargets [target] let handler err = GHC.printException err >> return GHC.Failed flag <- GHC.handleSourceError handler (GHC.load GHC.LoadAllTargets) liftIO $ case flag of GHC.Succeeded -> clientSend (ClientExit ExitSuccess) GHC.Failed -> clientSend (ClientExit (ExitFailure 1)) runCommand _ clientSend (CmdModuleFile moduleName) = do moduleGraph <- GHC.getModuleGraph case find (moduleSummaryMatchesModuleName moduleName) moduleGraph of Nothing -> liftIO $ mapM_ clientSend [ ClientStderr "Module not found" , ClientExit (ExitFailure 1) ] Just modSummary -> case GHC.ml_hs_file (GHC.ms_location modSummary) of Nothing -> liftIO $ mapM_ clientSend [ ClientStderr "Module does not have a source file" , ClientExit (ExitFailure 1) ] Just file -> liftIO $ mapM_ clientSend [ ClientStdout file , ClientExit ExitSuccess ] where moduleSummaryMatchesModuleName modName modSummary = modName == (GHC.moduleNameString . GHC.moduleName . GHC.ms_mod) modSummary runCommand state clientSend (CmdInfo file identifier) = do result <- withWarnings state False $ getIdentifierInfo file identifier case result of Left err -> liftIO $ mapM_ clientSend [ ClientStderr err , ClientExit (ExitFailure 1) ] Right info -> liftIO $ mapM_ clientSend [ ClientStdout info , ClientExit ExitSuccess ] runCommand state clientSend (CmdType file (line, col)) = do result <- withWarnings state False $ getType file (line, col) case result of Left err -> liftIO $ mapM_ clientSend [ ClientStderr err , ClientExit (ExitFailure 1) ] Right types -> liftIO $ do mapM_ (clientSend . ClientStdout . formatType) types clientSend (ClientExit ExitSuccess) where formatType :: ((Int, Int, Int, Int), String) -> String formatType ((startLine, startCol, endLine, endCol), t) = concat [ show startLine , " " , show startCol , " " , show endLine , " " , show endCol , " " , "\"", t, "\"" ] runCommand state clientSend (CmdFindSymbol symbol files) = do result <- withWarnings state False $ findSymbol symbol files case result of [] -> liftIO $ mapM_ clientSend [ ClientStderr $ "Couldn't find modules containing '" ++ symbol ++ "'" , ClientExit (ExitFailure 1) ] modules -> liftIO $ mapM_ clientSend [ ClientStdout (formatModules modules) , ClientExit ExitSuccess ] where formatModules = intercalate "\n" #if __GLASGOW_HASKELL__ >= 706 logAction :: IORef State -> ClientSend -> GHC.DynFlags -> GHC.Severity -> GHC.SrcSpan -> Outputable.PprStyle -> ErrUtils.MsgDoc -> IO () logAction state clientSend dflags severity srcspan style msg = let out = Outputable.renderWithStyle dflags fullMsg style _ = severity in logActionSend state clientSend severity out where fullMsg = ErrUtils.mkLocMessage severity srcspan msg #else logAction :: IORef State -> ClientSend -> GHC.Severity -> GHC.SrcSpan -> Outputable.PprStyle -> ErrUtils.Message -> IO () logAction state clientSend severity srcspan style msg = let out = Outputable.renderWithStyle fullMsg style _ = severity in logActionSend state clientSend severity out where fullMsg = ErrUtils.mkLocMessage srcspan msg #endif logActionSend :: IORef State -> ClientSend -> GHC.Severity -> String -> IO () logActionSend state clientSend severity out = do currentState <- readIORef state when (not (isWarning severity) || stateWarningsEnabled currentState) $ clientSend (ClientStdout out) where isWarning :: GHC.Severity -> Bool isWarning GHC.SevWarning = True isWarning _ = False
dan-t/hdevtools
src/CommandLoop.hs
mit
8,173
0
18
2,398
2,208
1,107
1,101
164
7
import Crypto import qualified Data.ByteString.Char8 as C8 import qualified Data.ByteString as B import qualified Data.LargeWord as LGW import qualified Data.Word as W crack :: LGW.Word128 -> W.Word64 -> B.ByteString -> B.ByteString crack key nonce ct = aesCTR key nonce ct main :: IO () main = do print $ crack (aesKey . C8.pack $ "YELLOW SUBMARINE") 0 (fromBase64 "L77na/nrFsKvynd6HzOoG7GHTLXsTVu9qvY/2syLXzhPweyyMTJULu/6/kXX0KSvoOLSFQ==")
asib/cryptopals-haskell
s3ch18.hs
mit
446
0
12
63
139
75
64
10
1
{-# LANGUAGE OverloadedStrings #-} module Database.LEsqueleto.LSql where import Control.Applicative import Data.Attoparsec.Text import qualified Data.Char as Char import qualified Data.Text as Text -- | Represent the AST for lsql statements. data Command = Command { commandSelect :: Select , commandTerms :: Terms , commandTables :: Tables , commandWhere :: Maybe Where , commandOrderBy :: Maybe OrderBy , commandLimit :: Maybe Limit , commandOffset :: Maybe Offset } deriving (Show) data Select = Select | PSelect deriving (Eq, Show) --data Terms = Term Term | Terms Terms Term | TermsAll data Terms = Terms [Term] | TermsAll deriving (Show) data Tables = Table String | Tables Tables Join String BExpr deriving (Show) data Join = InnerJoin | LeftOuterJoin | RightOuterJoin | FullOuterJoin -- | CrossJoin deriving (Show) data Where = Where BExpr deriving (Show) data OrderBy = OrderBy [Order] deriving (Show) data Order = OrderAsc Term | OrderDesc Term deriving (Show) data Limit = Limit Integer deriving (Show) data Offset = Offset Integer deriving (Show) data Term = TermTF String TermField | TermF TermField deriving (Show) data TermField = Field String | FieldAll data BExpr = BExprAnd BExpr BExpr | BExprOr BExpr BExpr | BExprBinOp B BinOp B | BExprNull Term | BExprNotNull Term | BExprNot BExpr deriving (Show) data BinOp = BinEq | BinNEq | BinGE | BinG | BinLE | BinL deriving (Show) data B = BTerm Term | BAnti String | BConst C deriving (Show) data C = CBool Bool | CString String | CInt Integer | CDouble Double deriving (Show) instance Show TermField where show (Field s) = s show (FieldAll) = "*" parseCommand :: Parser Command parseCommand = do select <- parseSelect terms <- parseTerms _ <- parseFrom tables <- parseTables whereM <- parseWhere orderByM <- parseOrderBy limitM <- parseLimit offsetM <- parseOffset parseComma return $ Command select terms tables whereM orderByM limitM offsetM where takeNonSpace = takeWhile1 (not . Char.isSpace) takeAlphaNum = takeWhile1 Char.isAlphaNum takeUpperAlphaNum = do an <- takeAlphaNum return $ Text.map Char.toUpper an parseComma = do skipSpace -- Check for optional comma. commaM <- peekChar case commaM of Nothing -> return () Just c -> if c == ';' then return () else do rest <- takeText error $ "Error parsing from: `" ++ (Text.unpack rest) ++ "`" -- maybe (return ()) -- try (char ';' >> skipSpace) -- end <- atEnd -- unless end $ do -- rest <- takeText -- error $ "Error parsing from: `"++ (Text.unpack rest) ++"`" parseSelect = do skipSpace select <- takeUpperAlphaNum case select of "SELECT" -> return Select "PSELECT" -> return PSelect _ -> fail $ "Unknown keywork `" ++ (Text.unpack select) ++ "`. Use `SELECT` or `PSELECT`." parseTerms = let parseTerms' = do head <- parseTerm tail <- (do skipSpace _ <- char ',' parseTerms' ) <|> (return []) return $ head:tail in (skipSpace >> char '*' >> (return TermsAll)) <|> (parseTerms' >>= return . Terms) parseFrom = do skipSpace asciiCI "FROM" parseTerm = do skipSpace ( do table <- takeAlphaNum _ <- char '.' field <- parseField return $ TermTF (Text.unpack table) field ) <|> (parseField >>= (return . TermF)) -- Does not skip spaces!! parseField = (char '*' >> (return FieldAll)) <|> ( takeAlphaNum >>= (return . Field. Text.unpack)) parseTables = let parseTables' acc = ( do skipSpace join <- ( asciiCI "INNER JOIN" >> (return InnerJoin)) <|> ( asciiCI "OUTER JOIN" >> (return LeftOuterJoin)) <|> ( asciiCI "LEFT OUTER JOIN" >> (return LeftOuterJoin)) <|> ( asciiCI "RIGHT OUTER JOIN" >> (return RightOuterJoin)) <|> ( asciiCI "FULL OUTER JOIN" >> (return FullOuterJoin)) -- <|> --( asciiCI "CROSS JOIN" >> (return CrossJoin)) skipSpace table <- takeAlphaNum skipSpace _ <- asciiCI "ON" bexpr <- parseBExpr parseTables' $ Tables acc join (Text.unpack table) bexpr ) <|> (return acc) in do skipSpace table <- takeAlphaNum parseTables' $ Table $ Text.unpack table parseWhere = ( do skipSpace _ <- asciiCI "WHERE" bexp <- parseBExpr return $ Just $ Where bexp ) <|> (return Nothing) parseOrderBy = ( do skipSpace _ <- asciiCI "ORDER BY" orders <- parseOrders return $ Just $ OrderBy orders ) <|> (return Nothing) where parseOrders = do term <- parseTerm order <- ( skipSpace >> asciiCI "ASC" >> (return OrderAsc)) <|> ( skipSpace >> asciiCI "DESC" >> (return OrderDesc)) <|> ( return OrderAsc) tail <- ( do skipSpace _ <- asciiCI "," parseOrders ) <|> (return []) return $ (order term):tail parseLimit = ( do skipSpace _ <- asciiCI "LIMIT" skipSpace limit <- decimal return $ Just $ Limit limit ) <|> (return Nothing) parseOffset = ( do skipSpace _ <- asciiCI "OFFSET" skipSpace limit <- decimal return $ Just $ Offset limit ) <|> (return Nothing) parseBExpr = do expr1 <- ( do skipSpace _ <- char '(' res <- parseBExpr skipSpace _ <- char ')' return res ) <|> ( do skipSpace _ <- asciiCI "NOT" res <- parseBExpr return $ BExprNot res ) <|> ( do term <- parseTerm skipSpace _ <- asciiCI "IS NULL" return $ BExprNull term ) <|> ( do term <- parseTerm skipSpace _ <- asciiCI "IS NOT NULL" return $ BExprNotNull term ) <|> ( do b1 <- parseB op <- parseBOp b2 <- parseB return $ BExprBinOp b1 op b2 ) ( do skipSpace -- temp <- takeAlphaNum -- when (temp /= "where" && temp /= "and") $ -- error $ "here: " ++ (show expr1) ++ " **** " ++ (Text.unpack temp) constr <- (asciiCI "AND" >> (return BExprAnd)) <|> (asciiCI "OR" >> peekChar >>= (maybe (return BExprOr) $ \c -> if Char.isSpace c then return BExprOr else fail "OR: Some other keyword" )) expr2 <- parseBExpr return $ constr expr1 expr2 ) <|> (return expr1) where parseSQLString = takeWhile1 (/= '\'') parseConst = skipSpace >> ( asciiCI "TRUE" >> return (CBool True)) <|> ( asciiCI "FALSE" >> return (CBool False)) <|> ( do _ <- char '\'' skipSpace str <- parseSQLString skipSpace _ <- char '\'' return $ CString $ Text.unpack str ) <|> ( do int <- signed decimal next <- peekChar case next of Just '.' -> fail "this is a double" _ -> return $ CInt int ) <|> ( double >>= (return . CDouble)) parseB = ( do skipSpace _ <- asciiCI "#{" skipSpace var <- takeWhile1 (/= '}')--takeNonSpace -- TODO: maybe make this into a [String] and stop at '}' skipSpace _ <- char '}' return $ BAnti $ Text.unpack var ) <|> ( do term <- parseTerm return $ BTerm term ) <|> ( do c <- parseConst return $ BConst c ) parseBOp = do skipSpace op <- takeNonSpace return $ case op of "==" -> BinEq "!=" -> BinNEq ">=" -> BinGE ">" -> BinG "<=" -> BinLE "<" -> BinL t -> error $ "Invalid binop `" ++ (Text.unpack t) ++ "`" -- >> ( asciiCI "==" >> return BinEq) <|> -- ( asciiCI ">=" >> return BinGE) <|> -- ( char '>' >> return BinG) <|> -- ( asciiCI "<=" >> return BinLE) <|> -- ( char '<' >> return BinL) <|> -- ( takeAlphaNum >>= \t -> fail $ "Invalid binop `" ++ (Text.unpack t) ++ "`")
jprider63/LMonad-Yesod
src/Database/LEsqueleto/LSql.hs
mit
10,442
0
23
4,895
2,346
1,165
1,181
250
13
--this module provides some utility functions for lists --author: Tristan Bepler ([email protected]) module Utils where import Data.List import qualified Data.Map as Map import qualified Data.Set as Set rmdups :: Ord a => [a] -> [a] rmdups = rmdups' Set.empty where rmdups' _ [] = [] rmdups' a (b:c) = if Set.member b a then rmdups' a c else b : rmdups' (Set.insert b a) c quicksort :: Ord a => [a] -> [a] quicksort [] = [] quicksort (p:xs) = (quicksort lesser) ++ [p] ++ (quicksort greater) where lesser = filter (< p) xs greater = filter (>= p) xs select :: Ord a => Int -> [a] -> a select n xs = (quicksort xs) !! n range :: (Ord a, Num a) => [a] -> a range xs = (maximum xs) - (minimum xs) percentile :: (RealFrac a, Ord a) => a -> [a] -> a percentile p [] = error "Cannot find the percentile of an empty list" percentile p xs | p < (head ranks) = minimum xs | p > (last ranks) = maximum xs | index >= 0 = select index xs | otherwise = vbot + (p - pbot) * (vtop - vbot) / (ptop - pbot) where ranks = percentrank xs (Just index) = if findIndex (\x-> p == x) ranks /= Nothing then findIndex (\x-> p == x) ranks else Just (-1) ptop = minimum $ filter (\x-> x > p) ranks pbot = maximum $ filter (\x-> x < p) ranks vtop = select n xs where (Just n) = findIndex (\x-> ptop == x) ranks vbot = select n xs where (Just n) = findIndex (\x-> pbot == x) ranks percentrank :: (RealFrac b) => [a] -> [b] percentrank xs = map (\n-> 100.0 * ((fromIntegral n) - 0.5) / (fromIntegral m)) [1..m] where m = length xs median :: (Fractional a, Ord a) => [a] -> a median [] = error "Cannot find the median of an empty list" median [x] = x median xs = if odd $ length xs then selodd else seleven where selodd = select mid xs seleven = ((select mid xs) + (select (mid-1) xs)) / 2 mid = (length xs) `div` 2 iqr :: (RealFrac a, Ord a) => [a] -> a iqr [] = error "Cannot find the IQR of an empty list" iqr xs = (percentile 75.0 xs) - (percentile 25.0 xs) commonPrefix :: Eq a => [[a]] -> [a] commonPrefix [] = [] commonPrefix l = foldr1 commonPrefix' l where commonPrefix' (x:xs) (y:ys) | x == y = x : commonPrefix' xs ys commonPrefix' _ _ = [] groupBy :: Ord k => (a -> k) -> [a] -> [(k, [a])] groupBy fun ys = groupBy' fun ys Map.empty where groupBy' :: Ord k => (a -> k) -> [a] -> Map.Map k [a] -> [(k, [a])] groupBy' fun [] m = Map.toList m groupBy' fun (x:xs) m = if Map.member key m then groupBy' fun xs (Map.insert key (x : (m Map.! key)) m) else groupBy' fun xs (Map.insert key [x] m) where key = fun x
tbepler/PBM-Analysis
Utils.hs
mit
2,559
8
14
591
1,345
706
639
57
3
{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE BangPatterns #-} module Network.WebSockets.Messaging.Connection where import Network.WebSockets hiding (send, Request, Message) import Control.Concurrent (forkIO) import Control.Concurrent.STM import Control.Applicative import Control.Monad (guard, forever, void, (>=>), mplus) import Control.Monad.IO.Class import Data.Aeson (encode, decode, ToJSON(..), FromJSON(..), fromJSON, Result(..)) import qualified Data.Aeson as Json import Data.Traversable (traverse) import Data.Foldable (traverse_) import Data.Maybe (fromMaybe) import qualified Data.Text as T import Data.IntMap (IntMap) import qualified Data.IntMap as IntMap import Control.Exception (catch) import Prelude hiding (catch) import Network.WebSockets.Messaging.Container import Network.WebSockets.Messaging.Message type Closable c a = c (Maybe a) type Handler r = Json.Value -> STM (IO r) type SubId = Int data Connection = Connection { outbox :: !(Closable TQueue Json.Value) , disconnected :: !(TVar Bool) , subId :: !(TVar SubId) , requestSubs :: !(TVar (IntMap (Handler Json.Value))) , notifySubs :: !(TVar (IntMap (Handler ()))) , reqId :: !(TVar ReqId) , reqMap :: !(TVar (IntMap (TMVar Json.Value))) } newtype Future a = Future (TMVar a) get :: Future a -> STM a get (Future var) = readTMVar var newConnection :: STM Connection newConnection = Connection <$> newTQueue <*> newTVar False <*> newTVar 0 <*> newTVar IntMap.empty <*> newTVar IntMap.empty <*> newTVar 0 <*> newTVar IntMap.empty requestAsync :: (Message req, FromJSON resp) => Connection -> req -> IO (Future resp) requestAsync conn@(Connection {..}) !req = do resp <- newEmptyTMVarIO fut <- newEmptyTMVarIO void $ forkIO $ do rqId <- atomically $ do rqId <- nextReqId conn modifyTVar' reqMap $! IntMap.insert rqId resp send conn $! Request rqId $! msgToJSON req return rqId js <- atomically $ do modifyTVar' reqMap $! IntMap.delete rqId readTMVar resp case fromJSON js of Json.Success dat -> atomically $! putTMVar fut $! dat Json.Error msg -> do atomically $! send conn $! ProtocolError $! T.pack msg error "malformed response" return $ Future fut request :: (Message req, FromJSON resp) => Connection -> req -> IO resp request conn@(Connection {..}) !req = do rqId <- atomically $ do rqId' <- readTVar reqId writeTVar reqId $! rqId' + 1 return rqId' resp <- newEmptyTMVarIO atomically $ do modifyTVar' reqMap $! IntMap.insert rqId resp send conn $! Request rqId $! msgToJSON req js <- atomically $ do modifyTVar' reqMap $! IntMap.delete rqId readTMVar resp case fromJSON js of Json.Success dat -> return dat Json.Error msg -> do atomically $! send conn $! ProtocolError $! T.pack msg error "malformed response" notify :: Message ntfy => Connection -> ntfy -> STM () notify conn = send conn . Notification . msgToJSON nextSubId :: Connection -> STM SubId nextSubId (Connection {..}) = do sId <- readTVar subId writeTVar subId $! sId + 1 return sId nextReqId :: Connection -> STM SubId nextReqId (Connection {..}) = do rqId <- readTVar reqId writeTVar reqId $! rqId + 1 return rqId onRequest :: (Message req, Message resp) => Connection -> (req -> IO resp) -> STM () onRequest conn@(Connection {..}) !handler = do sid <- nextSubId conn modifyTVar' requestSubs (IntMap.insert sid handler') where handler' js = case msgFromJSON js of Json.Success rq -> return $! msgToJSON <$> handler rq Error _ -> retry onNotify :: Message req => Connection -> (req -> IO ()) -> STM () onNotify conn@(Connection{..}) !handler = do sid <- nextSubId conn modifyTVar' notifySubs (IntMap.insert sid handler') where handler' js = case msgFromJSON js of Json.Success ntfy -> return $! handler ntfy Error _ -> retry onDisconnect :: Connection -> STM () -> STM () onDisconnect !(Connection {..}) !handler = readTVar disconnected >>= guard >> handler send :: Connection -> Container -> STM () send (Connection {..}) = writeTQueue outbox . Just . toJSON recvJson :: (TextProtocol p, FromJSON a) => WebSockets p (Maybe a) recvJson = decode <$> receiveData sendJson :: TextProtocol p => Json.Value -> WebSockets p () sendJson = sendTextData . encode sinkJson :: TextProtocol p => Sink p -> Json.Value -> IO () sinkJson sink = sendSink sink . DataMessage . Text . encode -- sinkJson sink js = sendSink sink . DataMessage . Text . encode $ (trace (show js) js) untilClosed :: Closable TQueue a -> (a -> STM b) -> (b -> IO c) -> IO () untilClosed chan handler after = loop where loop = atomically (readTQueue chan >>= traverse handler) >>= traverse_ (after >=> const loop) dispatch :: Connection -> Container -> IO () dispatch conn@(Connection {..}) !c = case c of Request rqId js -> do handler <- atomically $ do subs <- readTVar requestSubs let trySubs = foldr mplus retry $ map ($ js) $ IntMap.elems subs fmap Just trySubs `orElse` return Nothing void $ forkIO $ maybe invalidRequest respond handler where invalidRequest = atomically . send conn $ ProtocolError "unrecognized request" respond h = h >>= atomically . send conn . Response rqId Notification js -> do handler <- atomically $ do subs <- readTVar notifySubs let trySubs = foldr mplus retry $ map ($ js) $ IntMap.elems subs fmap Just trySubs `orElse` return Nothing void $ forkIO $ fromMaybe noHandler handler where noHandler = atomically . send conn $ ProtocolDebug "ignored notification" Response rqId js -> atomically $ do h <- IntMap.lookup rqId <$> readTVar reqMap case h of Nothing -> responseIgnored Just var -> putTMVar var js where responseIgnored = send conn $ ProtocolDebug "ignored response" _ -> return () -- TODO: print/log error? onConnect :: TextProtocol p => (Connection -> IO ()) -> WebSockets p () onConnect handler = do conn@(Connection {..}) <- liftIO $ atomically newConnection let replyInvalid = send conn $ ProtocolError "invalid message" handleWriteError (_ :: ConnectionError) = signalDisconnect handleReadError _ = liftIO signalDisconnect signalDisconnect = do atomically $ do writeTQueue outbox Nothing writeTVar disconnected True readLoop = forever $ do recvJson >>= liftIO . maybe (atomically $ replyInvalid) (dispatch conn) sink <- getSink liftIO $ do void . forkIO $ untilClosed outbox return (sinkJson sink) `catch` handleWriteError void . forkIO $ handler conn catchWsError readLoop handleReadError
leonidas/lambda-webdev
lib/Network/WebSockets/Messaging/Connection.hs
mit
7,321
4
20
1,965
2,434
1,191
1,243
183
5
module S01C06Spec where import Test.Hspec import S01C06 main :: IO() main = hspec spec spec :: Spec spec = do describe "Hamming distance" $ do it "should correctly calculate the distance between two strings" $ do let fromstr = "this is a test" let tostr = "wokka wokka!!!" hamming fromstr tostr `shouldBe` 37
blast-hardcheese/cryptopals
test/S01C06Spec.hs
mit
362
0
15
106
92
46
46
12
1
module LC where missingNumber :: [Int] -> Int missingNumber [] = 0 missingNumber [_] = 1 missingNumber (x:y:ys) = if y - x == 1 then missingNumber ys else x + 1
AriaFallah/leetcode
haskell/MissingNumber.hs
mit
162
0
8
33
79
43
36
5
2
doubleMe x = x + x doubleSmallNumber x = if x > 100 then x else x*2 doubleSmallNumber' x = (if x > 100 then x else x*2) + 1
ikemonn/haskellPractice
LearnYouAHaskell/p5.hs
mit
174
1
8
80
71
36
35
5
2
{-# LANGUAGE AllowAmbiguousTypes #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeApplications #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} -- | Module: Capnp.Repr -- Description: Type-level plumbing for wire-representations. -- -- This module provides facilities for working with the wire -- representations of capnproto objects at the type level. The most -- central part of this module is the 'Repr' type. -- -- Recommended reading: https://capnproto.org/encoding.html module Capnp.Repr ( -- * Type-level descriptions of wire representations. Repr(..) , PtrRepr(..) , ListRepr(..) , NormalListRepr(..) , DataSz(..) -- * Mapping representations to value types from "Capnp.Untyped" , Untyped , UntypedData , UntypedPtr , UntypedSomePtr , UntypedList , UntypedSomeList -- * Mapping types to their wire representations. , ReprFor , PtrReprFor -- * Relating the representations of lists & their elements. , Element(..) , ElemRepr , ListReprFor -- * Working with pointers , IsPtrRepr(..) , IsListPtrRepr(..) -- * Working with wire-encoded values , Raw(..) -- * Working with lists , List , length , index , setIndex -- * Allocating values , Allocate(..) -- * Shorthands for types , IsStruct , IsCap , IsPtr ) where import Prelude hiding (length) import qualified Capnp.Message as M import Capnp.Mutability (MaybeMutable(..), Mutability(..)) import Capnp.TraversalLimit (evalLimitT) import Capnp.Untyped ( Allocate(..) , DataSz(..) , ElemRepr , Element(..) , IsListPtrRepr(..) , IsPtrRepr(..) , ListRepr(..) , ListReprFor , MaybePtr(..) , NormalListRepr(..) , PtrRepr(..) , Repr(..) , Untyped , UntypedData , UntypedList , UntypedPtr , UntypedSomeList , UntypedSomePtr , Unwrapped ) import qualified Capnp.Untyped as U import Control.Monad.Primitive (PrimMonad, PrimState) import Data.Default (Default(..)) import Data.Int import Data.Kind (Type) import Data.Maybe (fromJust) import Data.Traversable (for) import Data.Word import GHC.Generics (Generic) -- | @'ReprFor' a@ denotes the Cap'n Proto wire represent of the type @a@. type family ReprFor (a :: Type) :: Repr type instance ReprFor () = 'Data 'Sz0 type instance ReprFor Bool = 'Data 'Sz1 type instance ReprFor Word8 = 'Data 'Sz8 type instance ReprFor Word16 = 'Data 'Sz16 type instance ReprFor Word32 = 'Data 'Sz32 type instance ReprFor Word64 = 'Data 'Sz64 type instance ReprFor Int8 = 'Data 'Sz8 type instance ReprFor Int16 = 'Data 'Sz16 type instance ReprFor Int32 = 'Data 'Sz32 type instance ReprFor Int64 = 'Data 'Sz64 type instance ReprFor Float = 'Data 'Sz32 type instance ReprFor Double = 'Data 'Sz64 type instance ReprFor (U.Struct mut) = 'Ptr ('Just 'Struct) type instance ReprFor (U.Cap mut) = 'Ptr ('Just 'Cap) type instance ReprFor (U.Ptr mut) = 'Ptr 'Nothing type instance ReprFor (U.List mut) = 'Ptr ('Just ('List 'Nothing)) type instance ReprFor (U.ListOf r mut) = 'Ptr ('Just ('List ('Just (ListReprFor r)))) type instance ReprFor (List a) = 'Ptr ('Just ('List ('Just (ListReprFor (ReprFor a))))) -- | @PtrReprFor r@ extracts the pointer represnetation in r; undefined if -- r is not a pointer representation. type family PtrReprFor (r :: Repr) :: Maybe PtrRepr where PtrReprFor ('Ptr pr) = pr -- | A @'Raw' mut a@ is an @a@ embedded in a capnproto message with mutability -- @mut@. newtype Raw (a :: Type ) (mut :: Mutability) = Raw { fromRaw :: U.Unwrapped (Untyped (ReprFor a) mut) } deriving instance Show (U.Unwrapped (Untyped (ReprFor a) mut)) => Show (Raw a mut) deriving instance Read (U.Unwrapped (Untyped (ReprFor a) mut)) => Read (Raw a mut) deriving instance Eq (U.Unwrapped (Untyped (ReprFor a) mut)) => Eq (Raw a mut) deriving instance Generic (U.Unwrapped (Untyped (ReprFor a) mut)) => Generic (Raw a mut) -- | A phantom type denoting capnproto lists of type @a@. data List a type ListElem a = ( U.Element (ReprFor a) , U.ListItem (ElemRepr (ListReprFor (ReprFor a))) ) -- | Get the length of a capnproto list. length :: ListElem a => Raw (List a) mut -> Int {-# INLINE length #-} length (Raw l) = U.length l -- | @'index' i list@ gets the @i@th element of the list. index :: forall a m mut. ( U.ReadCtx m mut , U.HasMessage (U.ListOf (ElemRepr (ListReprFor (ReprFor a)))) , ListElem a ) => Int -> Raw (List a) mut -> m (Raw a mut) {-# INLINE index #-} index i (Raw l) = Raw <$> do elt <- U.index i l fromElement @(ReprFor a) @m @mut (U.message @(U.ListOf (ElemRepr (ListReprFor (ReprFor a)))) l) elt -- | @'setIndex' value i list@ sets the @i@th element of @list@ to @value@. setIndex :: forall a m s. ( U.RWCtx m s , U.ListItem (ElemRepr (ListReprFor (ReprFor a))) , U.Element (ReprFor a) ) => Raw a ('Mut s) -> Int -> Raw (List a) ('Mut s) -> m () {-# INLINE setIndex #-} setIndex (Raw v) i (Raw l) = U.setIndex (toElement @(ReprFor a) @('Mut s) v) i l instance U.HasMessage (Untyped (ReprFor a)) => U.HasMessage (Raw a) where message (Raw r) = U.message @(Untyped (ReprFor a)) r instance U.MessageDefault (Untyped (ReprFor a)) => U.MessageDefault (Raw a) where messageDefault msg = Raw <$> U.messageDefault @(Untyped (ReprFor a)) msg instance U.MessageDefault (Raw a) => Default (Raw a 'Const) where def = fromJust $ evalLimitT maxBound $ U.messageDefault @(Raw a) M.empty instance ReprMaybeMutable (ReprFor a) => MaybeMutable (Raw a) where thaw (Raw v) = Raw <$> rThaw @(ReprFor a) v freeze (Raw v) = Raw <$> rFreeze @(ReprFor a) v unsafeThaw (Raw v) = Raw <$> rUnsafeThaw @(ReprFor a) v unsafeFreeze (Raw v) = Raw <$> rUnsafeFreeze @(ReprFor a) v {-# INLINE thaw #-} {-# INLINE freeze #-} {-# INLINE unsafeThaw #-} {-# INLINE unsafeFreeze #-} -- | Like MaybeMutable, but defined on the repr. Helper for implementing -- MaybeMutable (Raw a) class ReprMaybeMutable (r :: Repr) where rThaw :: (PrimMonad m, PrimState m ~ s) => Unwrapped (Untyped r 'Const) -> m (Unwrapped (Untyped r ('Mut s))) rUnsafeThaw :: (PrimMonad m, PrimState m ~ s) => Unwrapped (Untyped r 'Const) -> m (Unwrapped (Untyped r ('Mut s))) rFreeze :: (PrimMonad m, PrimState m ~ s) => Unwrapped (Untyped r ('Mut s)) -> m (Unwrapped (Untyped r 'Const)) rUnsafeFreeze :: (PrimMonad m, PrimState m ~ s) => Unwrapped (Untyped r ('Mut s)) -> m (Unwrapped (Untyped r 'Const)) instance ReprMaybeMutable ('Ptr 'Nothing) where rThaw p = do MaybePtr p' <- thaw (MaybePtr p) pure p' rFreeze p = do MaybePtr p' <- freeze (MaybePtr p) pure p' rUnsafeThaw p = do MaybePtr p' <- unsafeThaw (MaybePtr p) pure p' rUnsafeFreeze p = do MaybePtr p' <- unsafeFreeze (MaybePtr p) pure p' do let types = [ [t|'Just 'Struct|] , [t|'Just 'Cap|] , [t|'Just ('List 'Nothing)|] , [t|'Just ('List ('Just 'ListComposite))|] , [t|'Just ('List ('Just ('ListNormal 'NormalListPtr)))|] ] concat <$> for types (\t -> do [d|instance ReprMaybeMutable ('Ptr $t) where rThaw = thaw rFreeze = freeze rUnsafeThaw = thaw rUnsafeFreeze = freeze |]) instance ReprMaybeMutable ('Ptr ('Just ('List ('Just ('ListNormal ('NormalListData sz)))))) where rThaw = thaw rFreeze = freeze rUnsafeThaw = thaw rUnsafeFreeze = freeze instance ReprMaybeMutable ('Data sz) where rThaw = pure rFreeze = pure rUnsafeThaw = pure rUnsafeFreeze = pure -- | Constraint that @a@ is a struct type. type IsStruct a = ReprFor a ~ 'Ptr ('Just 'Struct) -- | Constraint that @a@ is a capability type. type IsCap a = ReprFor a ~ 'Ptr ('Just 'Cap) -- | Constraint that @a@ is a pointer type. type IsPtr a = ( ReprFor a ~ 'Ptr (PtrReprFor (ReprFor a)) , IsPtrRepr (PtrReprFor (ReprFor a)) )
zenhack/haskell-capnp
lib/Capnp/Repr.hs
mit
8,804
0
19
2,300
2,699
1,460
1,239
-1
-1
-- Problems/Problem044.hs module Problems.Problem044 (p44) where import Data.Set main = print p44 p44 :: Int p44 = head [a-b | a <- pentagonal, b <- takeWhile (<a) pentagonal, isPentagonal (a-b), isPentagonal (b+a)] isPentagonal :: Int -> Bool isPentagonal = (`Data.Set.member` Data.Set.fromList pentagonal) pentagonal :: [Int] pentagonal = Prelude.map (\x -> x * (3 * x - 1) `div` 2) [1..3000]
Sgoettschkes/learning
haskell/ProjectEuler/src/Problems/Problem044.hs
mit
400
0
12
65
181
102
79
9
1
{-# LANGUAGE TemplateHaskell #-} module Diagrams.Plots.PlotArea ( P(..) , PlotArea , plotAreaWidth , plotAreaHeight , plotAreaLeft , plotAreaRight , plotAreaTop , plotAreaBottom , plotAreaBackground , plotArea , showPlot , placeOn , (<+) ) where import Diagrams.Prelude hiding (rotation, size) import Control.Lens (makeLenses, (^.)) import Diagrams.Plots.Axis import Diagrams.Plots.Types import Diagrams.Plots.Utils (text') -- | how to align a plot to the plot area data P = BL -- Bottom Left | TL -- Top Left | TR -- Top Right | BR -- Bottom Right data PlotArea = PlotArea { _plotAreaWidth :: !Double , _plotAreaHeight :: !Double , _plotAreaPlots :: ![DiaR2] , _plotAreaLeft :: !Axis -- ^ point map for left axis , _plotAreaTop :: !Axis -- ^ point map for top axis , _plotAreaRight :: !Axis -- ^ point map for right axis , _plotAreaBottom :: !Axis -- ^ point map for bottom axis , _plotAreaBackground :: !DiaR2 } makeLenses ''PlotArea -- | construct a plot area plotArea :: Double -- ^ width -> Double -- ^ height -> (AxisFn, AxisFn, AxisFn, AxisFn) -- ^ axes: left, top, right, bottom -> PlotArea plotArea w h (l, t, r, b) = PlotArea w h [] lAxis tAxis rAxis bAxis background where lAxis = makeAxis l h tAxis = makeAxis t w rAxis = makeAxis r h bAxis = makeAxis b w background = lwL 0 $ moveTo ((w/2) ^& (h/2)) $ rect w h showPlot :: PlotArea -> DiaR2 showPlot (PlotArea w h ps l t r b bgr) = mconcat [ drawAxis 'l' l , translateY h . drawAxis 't' $ t , translateX w . drawAxis 'r' $ r , drawAxis 'b' b , mconcat ps , bgr ] drawAxis :: Char -> Axis -> DiaR2 drawAxis p a | p == 'l' = (reflectX . rotateBy (1/4)) axis' <> mconcat ( flip map labels $ \((x,y), label) -> alignedText 1 0.5 label # rotateBy r # fontSizeO fontsize # font fontfamily # moveTo ((y+dx-0.1) ^& (x+dy)) ) | p == 't' = reflectY axis' <> mconcat ( flip map labels $ \((x,y), label) -> text' fontsize label # rotateBy r # moveTo ((x+dx) ^& (-y-dy)) ) | p == 'r' = rotateBy (1/4) axis' <> mconcat ( flip map labels $ \((x,y), label) -> alignedText 0 0.5 label # rotateBy r # fontSizeO fontsize # font fontfamily # moveTo ((-y-dx) ^& (x+dy)) ) | p == 'b' = axis' <> mconcat ( flip map labels $ \((x,y), label) -> let t | r == 0 = text label | otherwise = alignedText 1 0.5 label in t # rotateBy r # fontSizeO fontsize # font fontfamily # moveTo ((x+dx) ^& (y+dy-0.1)) ) | otherwise = undefined where axis' = a^.axisDiag labels = a^.axisLabels dx = a^.axisLabelOpt^.offsetX dy = a^.axisLabelOpt^.offsetY fontsize = a^.axisLabelOpt^.size fontfamily = a^.axisLabelOpt^.fontFamily r = a^.axisLabelOpt^.rotation {-# INLINE drawAxis #-} placeOn :: (PlotFn, P) -> PlotArea -> PlotArea placeOn (pltFn, p) area = plotAreaPlots %~ (mconcat plt:) $ area where plt = case p of BL -> pltFn bMap lMap TL -> pltFn tMap lMap TR -> pltFn tMap rMap BR -> pltFn bMap rMap lMap = area^.plotAreaLeft^.axisMap bMap = area^.plotAreaBottom^.axisMap tMap = area^.plotAreaTop^.axisMap rMap = area^.plotAreaRight^.axisMap (<+) :: PlotArea -> (PlotFn, P) -> PlotArea infixl 1 <+ (<+) = flip placeOn
kaizhang/haskell-plot
src/Diagrams/Plots/PlotArea.hs
mit
3,918
0
19
1,422
1,262
677
585
117
4
{-# LANGUAGE CPP #-} module GHCJS.DOM.WheelEvent ( #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) module GHCJS.DOM.JSFFI.Generated.WheelEvent #else module Graphics.UI.Gtk.WebKit.DOM.WheelEvent #endif ) where #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) import GHCJS.DOM.JSFFI.Generated.WheelEvent #else import Graphics.UI.Gtk.WebKit.DOM.WheelEvent #endif
plow-technologies/ghcjs-dom
src/GHCJS/DOM/WheelEvent.hs
mit
435
0
5
39
33
26
7
4
0
-- stack script import Prelude (putStrLn, IO) import Control.Category import Control.Arrow import Data.Either import Control.Applicative newtype Apply f a b = Apply (f (a -> b)) -- first of all, Apply forms a category (similar to Kleisli category) -- instance (Applicative f) => Category (Apply f) where id = Apply (pure id) (Apply f) . (Apply g) = Apply (liftA2 compose f g) where compose f' g' x = f' (g' x) -- then we can see that Apply is also an Arrow instance (Applicative f) => Arrow (Apply f) where arr f = Apply (pure f) first (Apply fbc) = Apply (bc2bdcd <$> fbc) where bc2bdcd bc (b, d) = (bc b, d) instance (Applicative f) => ArrowChoice (Apply f) where left (Apply fbc) = Apply (bc2ebdecd <$> fbc) where bc2ebdecd bc (Left b) = Left (bc b) bc2ebdecd _ (Right d) = Right d main :: IO () main = putStrLn "type checks!"
shouya/thinking-dumps
cat-code/Arrow.hs
mit
936
0
10
260
358
187
171
20
1
{-# LANGUAGE OverloadedStrings #-} module IndexController where import qualified Web.Scotty as S import qualified IndexViews import Session (getSession) app :: S.ScottyM () app = do S.get "/" $ getSession >>= IndexViews.index
robertjlooby/scotty-story-board
app/controllers/IndexController.hs
mit
244
0
10
49
60
35
25
8
1
-- -- Copyright (c) 2014 Citrix Systems, Inc. -- -- This program is free software; you can redistribute it and/or modify -- it under the terms of the GNU General Public License as published by -- the Free Software Foundation; either version 2 of the License, or -- (at your option) any later version. -- -- This program is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -- GNU General Public License for more details. -- -- You should have received a copy of the GNU General Public License -- along with this program; if not, write to the Free Software -- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -- {-# LANGUAGE TypeSynonymInstances,OverlappingInstances,TypeOperators,PatternGuards,ScopedTypeVariables,FlexibleInstances #-} module Vm.Config ( ConfigProperty , amtPtActive -- read / write / check for existence of config properties , readConfigProperty , readConfigPropertyDef , saveConfigProperty , saveOrRmConfigProperty , haveConfigProperty , locateConfigProperty , getConfigPropertyName -- Xenvm config out of database config , getXlConfig , stringifyXlConfig -- list of interesting config properties , vmUuidP, vmName, vmDescription, vmType, vmSlot, vmImagePath, vmPvAddons, vmPvAddonsVersion , vmVirtType , vmStartOnBoot, vmStartOnBootPriority, vmKeepAlive, vmProvidesNetworkBackend, vmTimeOffset , vmAmtPt, vmCryptoUser, vmCryptoKeyDirs, vmStartup , vmNotify, vmHvm, vmPae, vmAcpi, vmApic, vmViridian, vmNx, vmSound, vmMemory, vmHap , vmDisplay, vmBoot, vmCmdLine, vmKernel, vmInitrd, vmAcpiTable, vmVcpus, vmGpu , vmKernelExtract, vmInitrdExtract , vmMemoryStaticMax , vmMemoryMin , vmVideoram, vmHibernated, vmHidden, vmMeasured, vmShutdownPriority, vmProvidesGraphicsFallback , vmPorticaStatus, vmPassthroughMmio, vmPassthroughIo, vmHiddenInUi , vmFlaskLabel, vmInitFlaskLabel, vmStubdomFlaskLabel, vmCoresPerSocket, vmAutoS3Wake, vmSeamlessId, vmStartFromSuspendImage , vmExtraHvms, vmExtraXenvm, vmDisks, vmNics, vmPcis, vmDisk, vmNic, vmPci, vmQemuDmPath, vmQemuDmTimeout , vmTrackDependencies, vmSeamlessMouseLeft, vmSeamlessMouseRight , vmOs, vmControlPlatformPowerState , vmFirewallRules , vmSeamlessTraffic , vmOemAcpiFeatures, vmUsbEnabled, vmUsbAutoPassthrough, vmUsbControl, vmCpuid , vmStubdom, vmStubdomMemory, vmStubdomCmdline , vmUsbGrabDevices , vmGreedyPcibackBind , vmRunPostCreate, vmRunPreDelete, vmRunOnStateChange, vmRunOnAcpiStateChange , vmRunPreBoot , vmRunInsteadofStart , vmDomstoreReadAccess, vmDomstoreWriteAccess , vmShowSwitcher, vmWirelessControl, vmNativeExperience , vmXciCpuidSignature , vmS3Mode , vmS4Mode , vmVsnd, vmVkbd, vmArgo , vmRealm , vmSyncUuid , vmIcbinnPath , vmOvfTransportIso , vmReady , vmProvidesDefaultNetworkBackend , vmRestrictDisplayDepth , vmRestrictDisplayRes , vmPreserveOnReboot , vmBootSentinel , vmHpet, vmHpetDefault , vmTimerMode, vmTimerModeDefault , vmNestedHvm , vmSerial , vmBios , vmSecureboot , vmAuthenforce , vmHdType , vmDisplayHandlerStrict , vmLongForm , vmShortForm , vmTextColor , vmDomainColor , vmBorderWidth , vmBorderHeight , vmMosaicVmEnabled , vmVglassEnabled , vmMosaicMode , vmWindowedX , vmWindowedY , vmWindowedW , vmWindowedH , vmPrimaryDomainColor , vmSecondaryDomainColor ) where import Control.Arrow import Control.Monad hiding (join) import Control.Applicative import Data.Bits import Data.Char import Data.String import Data.List import Data.Maybe import qualified Data.Text.Lazy as TL import qualified Data.Map as M import Directory import Text.Printf import System.FilePath.Posix import Tools.Log import Tools.File import Tools.Misc import Tools.Future import Tools.IfM import Rpc.Core import Vm.Types import Vm.Policies import Vm.Pci import XenMgr.Db import XenMgr.Host import XenMgr.Notify import XenMgr.Rpc import XenMgr.Config import Rpc.Autogen.XenmgrConst import Rpc.Autogen.XenmgrVmConst ------------------------ -- Configuration Tree -- ------------------------ type Location = String type Value = String -- UUID as well instance Marshall Uuid where dbRead x = dbReadStr x >>= return . fromString dbWrite x v = dbWriteStr x (show v) -- VM type can be marshalled instance Marshall VmType where dbRead x = dbReadStr x >>= return . fromS where fromS "svm" = Svm fromS "pvm" = Svm fromS tag = ServiceVm tag dbWrite x v = dbWriteStr x (toS v) where toS Svm = "svm" toS (ServiceVm tag) = tag instance Marshall XbDeviceID where dbRead p = XbDeviceID <$> dbRead p dbWrite p (XbDeviceID v) = dbWrite p v instance EnumMarshall VirtType where enumMarshallMap = [ (PVH , eVIRT_TYPE_PVH ) , (HVM , eVIRT_TYPE_HVM ) , (PV , eVIRT_TYPE_PV ) ] instance EnumMarshall DiskDeviceType where enumMarshallMap = [ (DiskDeviceTypeDisk , "disk" ) , (DiskDeviceTypeCdRom, "cdrom") ] instance EnumMarshall DiskMode where enumMarshallMap = [ (Vm.Types.ReadOnly , "r") , (Vm.Types.ReadWrite, "w") ] instance EnumMarshall DiskType where enumMarshallMap = [ (DiskImage , "file") , (PhysicalDevice , "phy" ) , (QemuCopyOnWrite, "qcow") , (ExternalVdi , "vdi" ) , (Aio , "aio" ) , (VirtualHardDisk, "vhd" ) , (Raw , "raw" ) ] instance EnumMarshall DiskSnapshotMode where enumMarshallMap = [ (SnapshotTemporary , "temporary" ) , (SnapshotTemporaryEncrypted, "temporary-encrypted" ) , (SnapshotCoalesce , "coalesce" ) , (SnapshotScripted , "scripted" ) , (SnapshotScriptedAuthor , "scripted-author" ) , (SnapshotScriptedNoSnapshot, "scripted-no-snapshot") ] instance EnumMarshall ManagedDiskType where enumMarshallMap = [ (UnmanagedDisk, eMANAGED_DISKTYPE_NONE) , (SystemDisk, eMANAGED_DISKTYPE_SYSTEM) , (ApplicationDisk, eMANAGED_DISKTYPE_APPLICATION) , (UserDisk, eMANAGED_DISKTYPE_USER) ] instance EnumMarshall S3Mode where enumMarshallMap = [ (S3Pv, eS3_MODE_PV) , (S3Ignore, eS3_MODE_IGNORE) , (S3Restart, eS3_MODE_RESTART) , (S3Snapshot, eS3_MODE_SNAPSHOT) ] instance EnumMarshall S4Mode where enumMarshallMap = [ (S4Pv, eS4_MODE_PV) , (S4Ignore, eS4_MODE_IGNORE) , (S4Restart, eS4_MODE_RESTART) , (S4Snapshot, eS4_MODE_SNAPSHOT) ] instance Marshall VirtType where {dbRead = dbReadEnum; dbWrite = dbWriteEnum} instance Marshall DiskMode where {dbRead = dbReadEnum; dbWrite = dbWriteEnum} instance Marshall DiskType where {dbRead = dbReadEnum; dbWrite = dbWriteEnum} instance Marshall DiskSnapshotMode where {dbRead = dbReadEnum; dbWrite = dbWriteEnum} instance Marshall DiskDeviceType where {dbRead = dbReadEnum; dbWrite = dbWriteEnum} instance Marshall ManagedDiskType where {dbRead = dbReadEnum; dbWrite = dbWriteEnum} instance Marshall S3Mode where {dbRead = dbReadEnum; dbWrite = dbWriteEnum} instance Marshall S4Mode where {dbRead = dbReadEnum; dbWrite = dbWriteEnum} instance Marshall Sha1Sum where dbRead = fmap (read . ("0x" ++)) . dbReadStr dbWrite x = dbWriteStr x . printf "%040x" instance Marshall a => Marshall (Maybe a) where dbRead = dbMaybeRead dbWrite = dbMaybeWrite -- Disk definition info can be marshalled instance Marshall Disk where dbRead x = do path <- dbRead (x ++ "/path" ) typ <- dbRead (x ++ "/type" ) mtyp <- dbReadWithDefault UnmanagedDisk (x ++ "/managed-disktype" ) mode <- dbRead (x ++ "/mode" ) dev <- dbRead (x ++ "/device" ) devt <- dbRead (x ++ "/devtype" ) snap <- dbRead (x ++ "/snapshot") sha1Sum <- dbRead (x ++ "/sha1sum" ) shared <- dbReadWithDefault False (x ++ "/shared") enable <- dbReadWithDefault True (x ++ "/enable") return $ Disk { diskPath = path , diskType = typ , diskMode = mode , diskDevice = dev , diskDeviceType = devt , diskSnapshotMode = snap , diskSha1Sum = sha1Sum , diskShared = shared , diskManagedType = mtyp , diskEnabled = enable } dbWrite x v = do current <- dbRead x dbWrite (x ++ "/path" ) (diskPath v) dbWrite (x ++ "/type" ) (diskType v) let mmt UnmanagedDisk = Nothing; mmt x = Just x dbMaybeWrite (x ++ "/managed-disktype") (mmt $ diskManagedType v) dbWrite (x ++ "/mode" ) (diskMode v) dbWrite (x ++ "/device" ) (diskDevice v) dbWrite (x ++ "/devtype" ) (diskDeviceType v) dbWrite (x ++ "/snapshot") (diskSnapshotMode v) dbWrite (x ++ "/sha1sum" ) (diskSha1Sum v) dbWrite (x ++ "/shared" ) (diskShared v) when (diskEnabled v /= diskEnabled current) $ dbWrite (x ++ "/enable") (diskEnabled v) -- NIC definition can be marshalled instance Marshall NicDef where dbRead x = do ids <- dbReadStr (x ++ "/id" ) net <- dbMaybeRead (x ++ "/network" ) uuid <- dbMaybeRead (x ++ "/backend-uuid") bname <- dbMaybeRead (x ++ "/backend-name") enable <- dbReadWithDefault True (x ++ "/enable" ) wifi <- dbReadWithDefault False (x ++ "/wireless-driver") mac <- dbMaybeRead (x ++ "/mac" ) model <- dbMaybeRead (x ++ "/model" ) let nicid = case ids of "" -> 0 s -> read s :: Int return $ NicDef { nicdefId = XbDeviceID nicid , nicdefNetwork = fromMaybe fallbackNetwork (fmap networkFromStr net) , nicdefWirelessDriver = wifi , nicdefBackendUuid = case uuid of Just "" -> Nothing _ -> fmap fromString uuid , nicdefBackendName = case bname of Just "" -> Nothing _ -> fmap id bname , nicdefBackendDomid = Nothing , nicdefEnable = enable , nicdefMac = mac , nicdefModel = model } dbWrite x v = do current <- dbRead x let XbDeviceID nid = nicdefId v dbWriteStr (x ++ "/id") (show nid) when (nicdefEnable v /= nicdefEnable current) $ dbWrite (x ++ "/enable") (nicdefEnable v) dbWrite (x ++ "/network") (networkToStr $ nicdefNetwork v) case nicdefWirelessDriver v of False -> dbRm (x ++ "/wireless-driver") True -> dbWrite (x ++ "/wireless-driver") True case nicdefBackendUuid v of Nothing -> dbRm (x ++ "/backend-uuid") Just id -> dbWrite (x ++ "/backend-uuid") id case nicdefBackendName v of Nothing -> dbRm (x ++ "/backend-name") Just id -> dbWrite (x ++ "/backend-name") id case nicdefMac v of Nothing -> dbRm (x ++ "/mac") Just m -> dbWrite (x ++ "/mac") m case nicdefModel v of Nothing -> dbRm (x ++ "/model") Just m -> dbWrite (x ++ "/model") m -- Portica status is marshallable instance Marshall PorticaStatus where -- But this is dangerous, if the order of elements in PorticaStatus changes. dbRead x = PorticaStatus <$> (maybe False id <$> dbMaybeRead (x ++ "/portica-installed")) <*> (maybe False id <$> dbMaybeRead (x ++ "/portica-enabled")) dbWrite x (PorticaStatus installed enabled) = do dbWrite (x ++ "/portica-installed") installed dbWrite (x ++ "/portica-enabled") enabled -- A path to database from given VM dbPath :: Uuid -> Location dbPath uuid = "/vm/" ++ show uuid -- Convert a property path by getting rid of dots convert :: Location -> Location convert = map f where f '.' = '/' f x = x -- Join paths using a separator / join :: [String] -> String join = concat . intersperse "/" data ConfigProperty = ConfigProperty { property_name :: String , property_location :: Uuid -> String } property :: String -> ConfigProperty property name = ConfigProperty { property_name = name , property_location = \uuid -> join [dbPath uuid, convert name] } -- Locate a named property within a VM of given uuid locate :: ConfigProperty -> Uuid -> Location locate p uuid = property_location p uuid locateConfigProperty = property ------------------------ -- Individual properties ------------------------ -- Core Ones vmUuidP = property "uuid" vmName = property "name" vmDescription = property "description" vmSlot = property "slot" vmType = property "type" vmImagePath = property "image_path" vmPvAddons = property "pv-addons-installed" vmPvAddonsVersion = property "pv-addons-version" vmStartOnBoot = property "start_on_boot" vmStartOnBootPriority = property "start_on_boot_priority" vmStartFromSuspendImage = property "start-from-suspend-image" vmShutdownPriority = property "shutdown-priority" vmKeepAlive = property "keep-alive" vmProvidesNetworkBackend = property "provides-network-backend" vmProvidesDefaultNetworkBackend = property "provides-default-network-backend" vmProvidesGraphicsFallback = property "provides-graphics-fallback" vmTimeOffset = property "time-offset" vmAmtPt = property "amt-pt" vmHibernated = property "hibernated" vmHidden = property "hidden" vmHiddenInUi = property "hidden-in-ui" vmAutoS3Wake = property "auto-s3-wake" vmMeasured = property "measured" vmSeamlessId = property "seamless-id" vmTrackDependencies = property "track-dependencies" vmSeamlessMouseLeft = property "seamless-mouse-left" vmSeamlessMouseRight = property "seamless-mouse-right" vmOs = property "os" vmControlPlatformPowerState = property "control-platform-power-state" vmSeamlessTraffic = property "seamless-traffic" vmOemAcpiFeatures = property "oem-acpi-features" vmUsbEnabled = property "usb-enabled" vmUsbAutoPassthrough = property "usb-auto-passthrough" vmUsbControl = property "usb-control" vmUsbGrabDevices = property "usb-grab-devices" vmStubdom = property "stubdom" vmCpuid = property "cpuid" vmXciCpuidSignature = property "xci-cpuid-signature" vmGreedyPcibackBind = property "greedy-pciback-bind" vmRunPostCreate = property "run-post-create" vmRunPreDelete = property "run-pre-delete" vmRunPreBoot = property "run-pre-boot" vmRunInsteadofStart = property "run-insteadof-start" vmRunOnStateChange = property "run-on-state-change" vmRunOnAcpiStateChange = property "run-on-acpi-state-change" vmDomstoreReadAccess = property "domstore-read-access" vmDomstoreWriteAccess = property "domstore-write-access" vmShowSwitcher = property "show-switcher" vmWirelessControl = property "wireless-control" vmNativeExperience = property "native-experience" vmS3Mode = property "s3-mode" vmS4Mode = property "s4-mode" vmRealm = property "realm" vmSyncUuid = property "sync-uuid" vmIcbinnPath = property "icbinn-path" vmOvfTransportIso = property "ovf-transport-iso" vmReady = property "ready" vmRestrictDisplayDepth = property "restrict-display-depth" vmRestrictDisplayRes = property "restrict-display-res" vmPreserveOnReboot = property "preserve-on-reboot" vmBootSentinel = property "boot-sentinel" -- this one is stored directly under /vm node as two entries portica-installed and portica-enabled vmPorticaStatus = ConfigProperty { property_name = "portica-status" , property_location = dbPath} -- Crypto Ones vmCryptoUser = property "crypto-user" vmCryptoKeyDirs = property "crypto-key-dirs" -- Ones in CONFIG subtree vmNotify = property "config.notify" vmHvm = property "config.hvm" vmVirtType = property "config.virt-type" vmPae = property "config.pae" vmAcpi = property "config.acpi" vmApic = property "config.apic" vmViridian = property "config.viridian" vmHap = property "config.hap" vmNx = property "config.nx" vmSound = property "config.sound" vmMemory = property "config.memory" vmMemoryStaticMax = property "config.memory-static-max" vmMemoryMin = property "config.memory-min" vmDisplay = property "config.display" vmBoot = property "config.boot" vmCmdLine = property "config.cmdline" vmKernel = property "config.kernel" vmKernelExtract = property "config.kernel-extract" vmInitrd = property "config.initrd" vmInitrdExtract = property "config.initrd-extract" vmAcpiTable = property "config.acpi-table" vmVcpus = property "config.vcpus" vmVideoram = property "config.videoram" vmPassthroughMmio = property "config.passthrough-mmio" vmPassthroughIo = property "config.passthrough-io" vmStartup = property "config.startup" vmFlaskLabel = property "config.flask-label" vmInitFlaskLabel = property "config.init-flask-label" vmStubdomFlaskLabel = property "config.stubdom-flask-label" vmCoresPerSocket = property "config.cores-per-socket" vmQemuDmPath = property "config.qemu-dm-path" vmQemuDmTimeout = property "config.qemu-dm-timeout" vmVsnd = property "config.vsnd" vmVkbd = property "config.vkbd" vmArgo = property "config.argo" vmHpet = property "config.hpet" vmHpetDefault = True vmTimerMode = property "config.timer-mode" vmTimerModeDefault = "no_delay_for_missed_ticks" vmNestedHvm = property "config.nestedhvm" vmSerial = property "config.serial" vmDisplayHandlerStrict = property "config.display-handler-strict" vmLongForm = property "config.long-form" vmShortForm = property "config.short-form" vmTextColor = property "config.text-color" vmDomainColor = property "config.domain-color" vmBorderWidth = property "config.border-width" vmBorderHeight = property "config.border-height" vmMosaicVmEnabled = property "config.mosaic-vm-enabled" vmVglassEnabled = property "config.vglass-enabled" vmMosaicMode = property "config.mosaic-mode" vmWindowedX = property "config.windowed-x" vmWindowedY = property "config.windowed-y" vmWindowedW = property "config.windowed-w" vmWindowedH = property "config.windowed-h" vmPrimaryDomainColor = property "config.domain-color" vmSecondaryDomainColor = property "config.secondary-domain-color" vmStubdomMemory = property "config.stubdom-memory" vmStubdomCmdline = property "config.stubdom-cmdline" vmBios = property "config.bios" vmSecureboot = property "config.secureboot" vmAuthenforce = property "config.authenforce" vmHdType = property "config.hdtype" -- Composite ones and lists vmExtraHvms = property "config.extra-hvm" vmExtraXenvm = property "config.extra-xenvm" vmDisks = property "config.disk" vmNics = property "config.nic" vmPcis = property "config.pci" vmGpu = property "gpu" vmExtraHvm num = property $ "config.extra-hvm." ++ show num vmDisk num = property $ "config.disk." ++ show num vmNic num = property $ "config.nic." ++ show num vmPci num = property $ "config.pci." ++ show num vmFirewallRules= property "argo-firewall-rules" -- Read and Save a single property -- example usage, to save a list of disks : saveP uuid vmDisks [disk1, disk2, disk3].. -- getConfigPropertyName :: ConfigProperty -> String getConfigPropertyName = property_name -- Check if property exists before reading it readConfigProperty :: (MonadRpc e m, Marshall a) => Uuid -> ConfigProperty -> m (Maybe a) readConfigProperty uuid p = dbMaybeRead (locate p uuid) vmExists :: (MonadRpc e m) => Uuid -> m Bool vmExists uuid = dbExists (dbPath uuid) saveConfigProperty :: (MonadRpc e m, Marshall a) => Uuid -> ConfigProperty -> a -> m () saveConfigProperty uuid p v = whenM (vmExists uuid) $ dbMaybeRead (locate p uuid) >>= maybeSave where -- only save when the value is different, do nothing when it is equal. also save when it does not exist maybeSave Nothing = save maybeSave (Just currentV) | currentV == v = return () | otherwise = save save = do dbWrite (locate p uuid) v notifyVmConfigChanged uuid saveOrRmConfigProperty :: (MonadRpc e m, Marshall a) => Uuid -> ConfigProperty -> Maybe a -> m () saveOrRmConfigProperty uuid p Nothing = dbRm (locate p uuid) saveOrRmConfigProperty uuid p (Just v) = saveConfigProperty uuid p v haveConfigProperty :: (MonadRpc e m) => Uuid -> ConfigProperty -> m Bool haveConfigProperty uuid p = dbExists (locate p uuid) -- Read config property with default value if it doesn't exist readConfigPropertyDef :: (MonadRpc e m, Marshall a) => Uuid -> ConfigProperty -> a -> m a readConfigPropertyDef uuid p def = fromMaybe def <$> readConfigProperty uuid p type Problem = String isHvm :: VmConfig -> Bool isHvm cfg = vmcfgVirtType cfg == HVM ------------------------------------------ -- Create a config file for running Xl ------------------------------------------ -- Xl config is a simple list of strings in the format of key=value newtype XlConfig = XlConfig [ Param ] type Param = String type UserID = String type VhdName = String type DiskSpec = Param type NicSpec = Param type PciSpec = Param amtPtActive :: Uuid -> Rpc Bool amtPtActive uuid = do -- Amt PT is active if a) system amt pt is activated b) vm amt pt is activated (&&) <$> haveSystemAmtPt <*> readConfigPropertyDef uuid vmAmtPt False stringifyXlConfig :: XlConfig -> String stringifyXlConfig (XlConfig params) = unlines params -- Gets a xl config, given domain ID of networking domain. getXlConfig :: VmConfig -> Rpc XlConfig getXlConfig cfg = fmap (XlConfig . concat) . mapM (force <=< future) $ [prelude, virtSpecs cfg, diskSpecs cfg, nicSpecs cfg, pciSpecs cfg , miscSpecs cfg] where uuid = vmcfgUuid cfg -- First section of xenvm config file prelude = do Just uuid <- readConfigProperty uuid vmUuidP :: Rpc (Maybe Uuid) name <- readConfigPropertyDef uuid vmName "" hd_type <- readConfigPropertyDef uuid vmHdType "ide" let nameStr = if name == "" then [] else [("name='"++ name ++ "'")] let hdtype = ["hdtype='" ++ hd_type ++ "'"] return $ [ "uuid='" ++ (show uuid) ++ "'" , "vnc=0" , "crypto_key_dir='" ++ (vmcfgCryptoKeyDirs cfg) ++ "'" , "xci_cpuid_signature=" ++ (if vmcfgXciCpuidSignature cfg then "1" else "0") , "pci_permissive=1" , "pci_msitranslate=1" , "pci_seize=1" , "pci_power_mgmt=1" ] ++ nameStr ++ hdtype virtSpecs :: VmConfig -> Rpc [Param] virtSpecs cfg = do let virt = vmcfgVirtType cfg cmd <- readConfigProperty uuid vmCmdLine let builder = ["type='" ++ ( virtStr virt ) ++ "'"] let kernel = case virt of HVM -> [] _ -> maybe [] (\path -> ["kernel='"++path++"'"]) (vmcfgKernelPath cfg) let cmdline = case virt of HVM -> [] _ -> _cmdline cmd let dm_args = case virt of HVM -> ["device_model_version='qemu-xen'"] _ -> [] return $ builder ++ kernel ++ cmdline ++ dm_args where uuid = vmcfgUuid cfg _cmdline _cmd = maybe [] (\cmd -> ["cmdline=" ++ (wrapQuotes cmd)]) _cmd virtStr virt = case virt of HVM -> "hvm" PVH -> "pvh" PV -> "pv" -- Next section: information about disk drives allDisks = vmcfgDisks validDisks = filterM isDiskValid . allDisks isDiskValid :: Disk -> Rpc Bool isDiskValid disk = case diskType disk of VirtualHardDisk -> liftIO . doesFileExist $ diskPath disk _ -> return True --build an xl config style disk list diskSpecs :: VmConfig -> Rpc [DiskSpec] diskSpecs cfg = do disklist <- dSpec bsgList <- bsgSpec cfg return $ ["disk=[" ++ (concat (intersperse "," (disklist ++ bsgList))) ++ "]"] where dSpec = mapM (diskSpec uuid) =<< disks disks = filter diskEnabled <$> validDisks cfg uuid = vmcfgUuid cfg bsgSpec :: VmConfig -> Rpc [Param] bsgSpec cfg = do cdromA <- policyQueryCdAccess uuid cdromR <- policyQueryCdRecording uuid bsgs <- liftIO $ getHostBSGDevices let cdromParams = let bsgList = if cdromA then map cdromParam bsgs else [] in case length bsgList of 0 -> [] _ -> bsgList cdromParam (BSGDevice a b c d) = let bsg_str = "/dev/bsg/" ++ (concat . intersperse ":" $ map show [a,b,c,d]) in case (cdromA,cdromR) of -- no cdrom (False, _) -> "" -- full access to cdrom (True, True) -> printf "'%s:%s,raw,atapi-pt,devtype=cdrom,access=rw'" atapiType bsg_str -- readonly access to cdrom (True, False) -> printf "'%s:%s,raw,atapi-pt,devtype=cdrom,access=ro'" atapiType bsg_str atapiType = if (vmcfgStubdom cfg) then "atapi-pt-argo" else "atapi-pt-local" return cdromParams where uuid = vmcfgUuid cfg diskSpec :: Uuid -> Disk -> Rpc DiskSpec diskSpec uuid d = do stubdom <- readConfigPropertyDef uuid vmStubdom False hd_type <- readConfigPropertyDef uuid vmHdType "ide" return $ printf "'%s,%s,%s,%s,%s,%s'" (diskPath d) (fileToRaw (enumMarshall $ diskType d)) (cdType stubdom d) (adjDiskDevice d hd_type) (enumMarshall $ diskMode d) (if ((enumMarshall $ diskDeviceType d) == "cdrom") then (enumMarshall $ diskDeviceType d) else "") where cdType stubdom d = case (enumMarshall $ diskDeviceType d) of "cdrom" -> if stubdom then "backendtype=tap" else "backendtype=phy" _ -> if (enumMarshall $ diskType d) == "phy" then "backendtype=phy" else "backendtype=tap" fileToRaw typ = if typ == "file" || typ == "phy" then "raw" else typ -- convert hdX -> xvdX if hdtype is 'ahci' adjDiskDevice d hd_type = case hd_type of "ahci" -> if ((enumMarshall $ diskDeviceType d) == "cdrom") then (diskDevice d) else ("xvd" ++ [(last $ diskDevice d)]) _ -> diskDevice d -- Next section: information about Network Interfaces nicSpecs :: VmConfig -> Rpc [NicSpec] nicSpecs cfg = do amt <- amtPtActive (vmcfgUuid cfg) maybeHostmac <- liftIO eth0Mac -- Get the configuration file entries ... -- ... for all the nics which are defined & enabled & pass the policy check nics <- filterM policyCheck . filter nicdefEnable $ vmcfgNics cfg let niclist = fmap (\nic -> nicSpec cfg amt maybeHostmac nic (net_domid nic)) nics return $ ["vif=[" ++ (concat (intersperse "," niclist)) ++ "]"] where net_domid nic = fromMaybe 0 (nicdefBackendDomid nic) networks nic = filter (\n -> niHandle n == nicdefNetwork nic) (vmcfgNetworks cfg) isWireless nic = case networks nic of [] -> False (n:_) -> niIsWireless n policyCheck nic | isWireless nic = policyQueryWifiNetworking (vmcfgUuid cfg) | otherwise = policyQueryWiredNetworking (vmcfgUuid cfg) nicSpec :: VmConfig -> Bool -> Maybe Mac -> NicDef -> DomainID -> String nicSpec cfg amt eth0Mac nic networkDomID = let entries = bridge ++ backend ++ wireless ++ vmMac ++ nicType ++ modelType in "'" ++ (concat $ intersperse "," entries) ++ "'" where netinfo :: Maybe NetworkInfo netinfo = case filter (\net -> niHandle net == nicdefNetwork nic) (vmcfgNetworks cfg) of (ni:_) -> Just ni _ -> Nothing -- bridge name, only necessary for emulated net interfaces as qemu manages them bridge = ["bridge=" ++ (TL.unpack $ strObjectPath $ networkObjectPath$ nicdefNetwork nic)] bridgename= niBridgeName `fmap` netinfo -- force backend domid for NIC if specified backend = ["backend=" ++ show networkDomID] -- HACK: don't put device as wireless for linuxes, as we have no pv driver for that wireless | nicdefWirelessDriver nic , vmcfgOs cfg /= Linux = ["wireless=1"] | otherwise = [ ] -- use mac specified in configuration as first priority vmMac | Just mac <- nicdefMac nic = ["mac=" ++ mac] -- otherwise, -- If AMT is active, we set the VM mac to be equal to original eth0 mac (that is, before -- the bits on it got swizzled during boot) | Just mac <- eth0Mac, amt == True = ["mac=" ++ unswizzleMac mac] -- Otherwise we do not touch the VM mac and let xenvm choose | otherwise = [ ] nicType | stubdomNic cfg == True = ["type=ioemu"] | otherwise = ["type=vif"] modelType | stubdomNic cfg == False = [] | Just model <- nicdefModel nic = ["model="++model] | otherwise = ["model=e1000"] stubdomNic cfg = isHvm cfg && vmcfgStubdom cfg unswizzleMac :: Mac -> Mac unswizzleMac mac = let bytes = macToBytes mac h = (head bytes) .&. 253 in bytesToMac $ h : tail bytes -- Next section: information about PCI Passthrough Devices pciSpecs :: VmConfig -> Rpc [PciSpec] pciSpecs cfg = do let devices = vmcfgPciPtDevices cfg uuid = vmcfgUuid cfg pciSysfsResFiles = map (++ "/resource") $ map (\(PciPtDev d _ _ _) -> pciSysfsDevPath $ devAddr d) devices pciResources <- liftIO $ mapM pciGetMMIOResources pciSysfsResFiles let mmioHoleAdjusted = 0x100000000 - (pciGetMemHoleBase . pciGetMemHole $ concat pciResources) return $ [ "pci=[" ++ (concat (intersperse "," (map (\dev -> "'" ++ stringAddr dev ++ "'") devices))) ++ "]" , "mmio_hole=" ++ (show . toMB $ mmioHoleAdjusted) ] where stringAddr (PciPtDev d _ _ _) = printf "%04x:%02x:%02x.%x" (pciDomain addr) (pciBus addr) (pciSlot addr) (pciFunc addr) where addr = devAddr d toMB size = div size (1024 * 1024) cpuidResponses :: VmConfig -> [String] cpuidResponses cfg = map option (vmcfgCpuidResponses cfg) where option (CpuidResponse r) = printf "cpuid=%s" r --helper function to wrap config options in quotes (xl specific) wrapQuotes :: String -> String wrapQuotes = (++"'") <$> ("'"++) --helper function to wrap config option in brackets (xl specific) wrapBrackets :: String -> String wrapBrackets = (++"]") <$> ("["++) --helper function to combine all extra_hvm args into one 'extra_hvm' entry combineExtraHvmParams hvmStuff = ["device_model_args=[" ++ concat (intersperse "," (map wrapQuotes hvmStuff)) ++ "]"] -- Additional misc stuff in xenvm config miscSpecs :: VmConfig -> Rpc [Param] miscSpecs cfg = do t <- timeOffset v <- videoram other <- otherXenvmParams let empty = pure [] snd <- ifM (policyQueryAudioAccess uuid) sound empty audioRec <- ifM (policyQueryAudioRecording uuid) empty (pure ["-disable-audio-rec"]) vcpus <- readConfigPropertyDef uuid vmVcpus (1::Int) coresPS <- readConfigPropertyDef uuid vmCoresPerSocket vcpus stubdom_ <- liftIO stubdom usb <- usb_opts hpet_ <- hpet timer_mode_ <- timer_mode nested_ <- nested dm_override_ <- liftRpc dm_override dm_display_ <- liftRpc dm_display vkb_ <- vkb extra_hvms <- readConfigPropertyDef uuid vmExtraHvms [] acpi_table_ <- liftIO $ acpi_table let coresPSpms = if coresPS > 1 then ["cores_per_socket=" ++ show coresPS] else ["cores_per_socket=" ++ show vcpus] return $ t ++ v ++ combineExtraHvmParams (audioRec ++ extra_hvms) ++ ["memory="++show (vmcfgMemoryMib cfg) ] ++ ["maxmem="++show (vmcfgMemoryStaticMaxMib cfg) ] ++ snd ++ coresPSpms ++ stubdom_ ++ cpuidResponses cfg ++ usb ++ platform ++ other ++ hpet_ ++ timer_mode_ ++ nested_ ++ dm_override_ ++ dm_display_ ++ vkb_ ++ acpi_table_ where uuid = vmcfgUuid cfg -- omit if not specified timeOffset = maybeToList . fmap ("rtc_timeoffset="++) <$> readConfigProperty uuid vmTimeOffset -- 16 meg if not specified videoram = do let defaultVideoram = if isHvm cfg then 16 else 0 (\ram -> ["videoram="++ram]) . fromMaybe (show defaultVideoram) <$> readConfigProperty uuid vmVideoram hpet = (i <$> readConfigPropertyDef uuid vmHpet vmHpetDefault) >>= \ v -> return ["hpet=" ++ show v] where i True = 1 i _ = 0 timer_mode = do mode <- readConfigPropertyDef uuid vmTimerMode vmTimerModeDefault if isHvm cfg then return ["timer_mode=" ++ (show mode)] else return [] nested = readConfigPropertyDef uuid vmNestedHvm False >>= \ v -> if v then return ["nestedhvm=1"] else return [] acpi_table = do case (vmcfgAcpi cfg) of False -> return [] True -> do exists <- doesFileExist "/sys/firmware/acpi/tables/SLIC" if exists then return [ "acpi_firmware='/sys/firmware/acpi/tables/SLIC'" ] else do info $ "SLIC table missing" return [] -- Activate sound sound = maybeToList . fmap (("soundhw='"++) <$> (++"'")) <$> readConfigProperty uuid vmSound -- Tells xl to use a stubdom or not stubdom | isHvm cfg && vmcfgStubdom cfg = return ["device_model_stubdomain_override=1"] | otherwise = return [] -- Specifies path to qemu binary dm_override | isHvm cfg = return ["device_model_override='" ++ (vmcfgQemuDmPath cfg) ++ "'"] | otherwise = return [] usb_opts | not (vmcfgUsbEnabled cfg) = return ["usb=0"] | otherwise = return [] platform = x (vmcfgRestrictDisplayDepth cfg) "restrictdisplaydepth=" ++ x (vmcfgRestrictDisplayRes cfg) "restrictdisplayres=" where x cond s = if cond then [s++"1"] else [s++"0"] dm_display = do disp <- readConfigPropertyDef uuid vmDisplay "" case disp of "nogfx" -> return ["vga='none'", "nographic=1"] "none" -> return ["vga='stdvga'"] "" -> return ["vga='stdvga'"] d -> return ["vga='stdvga'", "dm_display='" ++ d ++ "'"] vkb = readConfigPropertyDef uuid vmVkbd False >>= \ v -> if v then return ["vkb=['backend-type=linux,feature-abs-pointer=1,height=32768,width=32768']"] else return [] -- Other config keys taken directly from .config subtree which we delegate directly -- to xenvm passToXenvmProperties = [ ("pae" , vmPae) , ("acpi" , vmAcpi) , ("apic" , vmApic) , ("viridian" , vmViridian) --set to 'default' , ("nx" , vmNx) , ("boot" , vmBoot) , ("vcpus" , vmVcpus) , ("hap" , vmHap) , ("seclabel" , vmFlaskLabel) , ("init_seclabel" , vmInitFlaskLabel) , ("device_model_stubdomain_seclabel", vmStubdomFlaskLabel) , ("serial" , vmSerial) , ("stubdom_cmdline" , vmStubdomCmdline) , ("stubdom_memory" , vmStubdomMemory) --OXT-1220: iomem and ioports should be reworked to support specifying multiple , ("iomem" , vmPassthroughMmio) --ranges at a finer granularity. Few ways to implement, likely as a db-node with , ("ioports" , vmPassthroughIo) --each range as an entry beneath it, which is read and parsed during xl cfg generation. , ("bios" , vmBios) , ("secureboot" , vmSecureboot) --set to False , ("authenforce" , vmAuthenforce) --set to True , ("initrd" , vmInitrd) ] --Remove this comment block when implemented. -- xl config handles certain options different than others (eg. quotes, brackets) -- we format them on a case by case basis here before sending them off to xl. otherXenvmParams = concat <$> sequence [ reverse . catMaybes <$> mapM g passToXenvmProperties , extra_xenvm ] where g (name,prop) = fmap (\v -> case v of "none" -> [] "true" -> name ++ "=" ++ "1" "false" -> name ++ "=" ++ "0" _ -> case name of "viridian" -> name ++ "=" ++ (wrapBrackets $ wrapQuotes v) "serial" -> name ++ "=" ++ (wrapBrackets $ wrapQuotes v) "iomem" -> name ++ "=" ++ (wrapBrackets $ wrapQuotes v) "ioports" -> name ++ "=" ++ (wrapBrackets $ wrapQuotes v) "seclabel" -> name ++ "=" ++ (wrapQuotes v) "init_seclabel" -> name ++ "=" ++ (wrapQuotes v) "device_model_stubdomain_seclabel" -> name ++ "=" ++ (wrapQuotes v) "boot" -> name ++ "=" ++ (wrapQuotes v) "bios" -> name ++ "=" ++ (wrapQuotes v) "stubdom_cmdline" -> name ++ "=" ++ (wrapQuotes v) "initrd" -> name ++ "=" ++ (wrapQuotes v) _ -> name ++ "=" ++ v) <$> readConfigProperty uuid prop -- additional parameters passed through config/extra-xenvm/... key extra_xenvm :: Rpc [Param] extra_xenvm = readConfigPropertyDef uuid vmExtraXenvm []
OpenXT/manager
xenmgr/Vm/Config.hs
gpl-2.0
41,309
0
27
13,167
9,109
4,781
4,328
746
27