code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
module Git.FastExport.AuthorFilter
( AuthorDB
, personRename
, personRenameFilter
, loadPersonFile
, loadPersonRename
)
where
import Control.Applicative
import Data.Monoid
import Data.Attoparsec.Char8 as A
import Data.Attoparsec.Combinator as A
import Git.FastExport.Types
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as C
import qualified Git.FastExport.Parser as P
import qualified Data.Trie as T
import System.IO
newtype AuthorDB = DB (T.Trie Person)
personRename :: AuthorDB -> CommitHeader -> CommitHeader
personRename (DB t) ch@CommitHeader{chAuthor=a, chCommitter=c} =
ch{chAuthor = fmap fixDated a, chCommitter = fixDated c}
where
fixDated d@(Dated{datedValue = p}) = d{datedValue = fixPerson p}
fixPerson :: Person -> Person
fixPerson p = maybe p id $ T.lookup (personName p) t <|> T.lookup (personEmail p) t
personRenameFilter :: AuthorDB -> CmdFilter
personRenameFilter t (GCommit commit) =
[GCommit commit{commitHeader=personRename t . commitHeader $ commit}]
personRenameFilter _ c = [c]
loadPersonRename :: String -> IO CmdFilter
loadPersonRename s = do
t <- loadPersonFile s
return $ personRenameFilter t
loadPersonFile :: String -> IO AuthorDB
loadPersonFile s = do
withFile s ReadMode $ \ f -> do
let loop ls = do
eof <- hIsEOF f
if eof then return ls else do
l <- B.hGetLine f
case flip feed "" $ parse parseAuthorLine l of
Done _ res -> loop (res:ls)
Fail t ctx err -> do
--putStrLn $ "Skipped: "++ C.unpack l ++ "(" ++ err ++ ": " ++ C.unpack t ++ ")"
loop ls -- - $ err ++ " (line: " ++ C.unpack l ++ ")"
Partial p -> loop ls
ls <- loop []
return . DB $ T.fromList ls
skipHSpace = skipWhile $ inClass " \t"
parseComment = do
skipHSpace
char '#'
skipWhile (/= '\n')
skipSpace
parseAuthorLine = do
s <- takeWhile1 (notInClass "=#")
char '='
let (key,_) = B.spanEnd isSpace_w8 s
skipHSpace
p <- P.parsePerson
skipSpace
return $ (key,p)
| lumimies/git-fastexport-filter | src/Git/FastExport/AuthorFilter.hs | bsd-3-clause | 2,237 | 0 | 24 | 617 | 681 | 352 | 329 | -1 | -1 |
module Main where
import Test.Framework (defaultMain)
import qualified Controllers.Tests (tests)
import qualified Views.Tests (tests)
main :: IO ()
main = defaultMain [ Views.Tests.tests
, Controllers.Tests.tests ]
| HaskellCNOrg/snap-web | tests/TestSuite.hs | bsd-3-clause | 256 | 0 | 7 | 67 | 66 | 40 | 26 | 7 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module Write.Header where
import Data.List.Extra(nubOrd, intercalate)
import Text.InterpolatedString.Perl6
data Import = Import ModuleName [String]
| ImportQualified ModuleName Alias [String]
newtype Extension = Extension String
deriving (Eq, Ord)
type ModuleName = String
type Alias = String
writeExtensions :: [Extension] -> String
writeExtensions es = let es' = nubOrd es
in unlines (languagePragma <$> es')
where languagePragma (Extension e) = "{-# LANGUAGE " ++ e ++ " #-}"
writeImports :: [Import] -> String
writeImports = unlines . fmap importDecl
where importDecl (Import mn is) =
[qc|import {mn} ({intercalate ", " is})|]
importDecl (ImportQualified mn alias is) =
[qc|import qualified {mn} as {alias} ({intercalate "," is})|]
| oldmanmike/vulkan | generate/src/Write/Header.hs | bsd-3-clause | 865 | 0 | 9 | 207 | 220 | 125 | 95 | 20 | 2 |
{-# LANGUAGE DeriveGeneric #-}
{-# OPTIONS_GHC -fplugin Brisk.Plugin #-}
{-# OPTIONS_GHC -fplugin-opt Brisk.Plugin:main #-}
module Managed where
import Data.Binary
import GHC.Generics (Generic)
import qualified Data.HashMap.Strict as M
import Control.Distributed.Process hiding (call)
import Control.Distributed.Process.Extras.Time
import Control.Distributed.Process.ManagedProcess
import Control.Distributed.Process.ManagedProcess.Client
data DataNodeState = DNS
data DataNodeAPI = Bloop
deriving (Eq, Ord, Show, Generic)
instance Binary DataNodeAPI
data DataNodeResponse = OK
deriving (Eq, Ord, Show, Generic)
instance Binary DataNodeResponse
initState = DNS
runDataNode :: Process ()
runDataNode =
serve initState initializeDataNode dataNodeProcess
initializeDataNode :: DataNodeState -> Process (InitResult DataNodeState)
initializeDataNode s = return $ InitOk s NoDelay
dataNodeProcess :: ProcessDefinition DataNodeState
dataNodeProcess = defaultProcess {
apiHandlers = [dataNodeAPIHandler]
}
type DataNodeReply = Process (ProcessReply DataNodeResponse DataNodeState)
dataNodeAPIHandler :: Dispatcher DataNodeState
dataNodeAPIHandler = handleCall dataNodeAPIHandler'
dataNodeAPIHandler' :: DataNodeState -> DataNodeAPI -> DataNodeReply
dataNodeAPIHandler' st Bloop
= reply OK st
foobert :: ProcessId -> Process DataNodeResponse
foobert p = call p Bloop
main :: Process DataNodeResponse
main = do server <- spawnLocal runDataNode
foobert server
| abakst/brisk-prelude | examples/Managed00.hs | bsd-3-clause | 1,522 | 0 | 8 | 232 | 340 | 186 | 154 | 38 | 1 |
-- | Main module
--
module Main
( main
) where
import Control.Applicative ((<$>))
import System.Environment (getArgs, getProgName)
import System.FilePath (replaceExtension)
import Text.Blaze.Html.Renderer.Utf8 (renderHtml)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Criterion.ToHtml.Html
import Criterion.ToHtml.Result
import Paths_criterion_to_html (getDataFileName)
toHtml' :: FilePath -> FilePath -> IO ()
toHtml' csv html = do
js <- B.readFile =<< getDataFileName "criterion-to-html.js"
putStrLn $ "Parsing " ++ csv
csv' <- parseCriterionCsv <$> readFile csv
BL.writeFile html $ renderHtml $ report js csv'
putStrLn $ "Wrote " ++ html
main :: IO ()
main = do
args <- getArgs
progName <- getProgName
case args of
[csv, html] -> toHtml' csv html
[csv] -> toHtml' csv (replaceExtension csv "html")
_ -> putStrLn $
"Usage: " ++ progName ++ " <csv-file> [out-file]"
| jaspervdj/criterion-to-html | src/Criterion/ToHtml.hs | bsd-3-clause | 1,008 | 0 | 12 | 224 | 290 | 157 | 133 | 27 | 3 |
{-# LANGUAGE BangPatterns #-}
module Sound.Ptr
(
module Data.Word
, ElemCount
-- * List-like Ptr
, ptrMap
, ptrMapM
, ptrZip2
, ptrZipM2
, ptrFoldl
, ptrFoldlM
-- * Specialized allocation
-- ** On the stack
, allocaIntArray
, allocaDoubleArray
-- ** On the heap
, mallocForeignPtrIntArray
, mallocForeignPtrDoubleArray
-- * Reexports
-- ** Stack allocation
, allocaArray
, allocaBytes
-- ** Heap allocation
, mallocForeignPtr
, mallocForeignPtrArray
, mallocForeignPtrBytes
-- ** Copying
, copyBytes
, moveBytes
-- ** Types
, castPtr
, castForeignPtr
, nullPtr
, withForeignPtr
, ForeignPtr
, Ptr
, Storable(..)
)
where
import Foreign
(
ForeignPtr
, Ptr
, Storable(..)
, allocaArray
, allocaBytes
, castForeignPtr
, castPtr
, copyBytes
, mallocForeignPtr
, mallocForeignPtrArray
, mallocForeignPtrBytes
, moveBytes
, nullPtr
, withForeignPtr
)
import Data.Word
type ElemCount a = Int
ptrMap :: (Storable a, Storable b) => (a -> b) -> ElemCount a -> Ptr a -> Ptr b -> IO ()
ptrMap !f !n !src !dst =
loop 0
where
loop !i =
if i < n
then do
!a <- peekElemOff src i
let !b = f a
pokeElemOff dst i b
loop (i + 1)
else return ()
{-# INLINE ptrMap #-}
ptrFoldl :: (Storable e) => (a -> e -> a) -> a -> ElemCount e -> Ptr e -> IO a
ptrFoldl !f !a0 !n !src =
loop 0 a0
where
loop !i !a =
if i < n
then do
!e <- peekElemOff src i
let !a' = f a e
loop (i + 1) a'
else return a
{-# INLINE ptrFoldl #-}
ptrFoldlM :: (Storable e) => (a -> e -> IO a) -> a -> ElemCount e -> Ptr e -> IO a
ptrFoldlM !k !a0 !n !src =
loop 0 a0
where
loop !i !a =
if i < n
then do
!e <- peekElemOff src i
!a' <- k a e
loop (i + 1) a'
else return a
{-# INLINE ptrFoldlM #-}
ptrZip2 :: (Storable a, Storable b, Storable c) => (a -> b -> c) -> ElemCount a -> Ptr a -> Ptr b -> Ptr c -> IO ()
ptrZip2 !f !n !src0 !src1 !dst =
loop 0
where
loop !i =
if i < n
then do
!a <- peekElemOff src0 i
!b <- peekElemOff src1 i
let !c = f a b
pokeElemOff dst i c
loop (i + 1)
else return ()
{-# INLINE ptrZip2 #-}
ptrZipM2 :: (Storable a, Storable b, Storable c) => (a -> b -> IO c) -> ElemCount a -> Ptr a -> Ptr b -> Ptr c -> IO ()
ptrZipM2 !k !n !src0 !src1 !dst =
loop 0
where
loop !i =
if i < n
then do
!a <- peekElemOff src0 i
!b <- peekElemOff src1 i
!c <- k a b
pokeElemOff dst i c
loop (i + 1)
else return ()
{-# INLINE ptrZipM2 #-}
ptrMapM :: (Storable a, Storable b) => (a -> IO b) -> ElemCount a -> Ptr a -> Ptr b -> IO ()
ptrMapM !k !n !src !dst =
loop 0
where
loop !i =
if i < n
then do
!a <- peekElemOff src i
!b <- k a
pokeElemOff dst i b
loop (i + 1)
else return ()
{-# INLINE ptrMapM #-}
allocaIntArray :: ElemCount Int -> (Ptr Int -> IO a) -> IO a
allocaIntArray = allocaArray
{-# INLINE allocaIntArray #-}
allocaDoubleArray :: ElemCount Double -> (Ptr Double -> IO a) -> IO a
allocaDoubleArray = allocaArray
{-# INLINE allocaDoubleArray #-}
mallocForeignPtrIntArray :: ElemCount Int -> IO (ForeignPtr Int)
mallocForeignPtrIntArray = mallocForeignPtrArray
{-# INLINE mallocForeignPtrIntArray #-}
mallocForeignPtrDoubleArray :: ElemCount Double -> IO (ForeignPtr Double)
mallocForeignPtrDoubleArray = mallocForeignPtrArray
{-# INLINE mallocForeignPtrDoubleArray #-}
| edom/sound | src/Sound/Ptr.hs | bsd-3-clause | 4,247 | 0 | 13 | 1,734 | 1,288 | 628 | 660 | 131 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Text as T (Text (..), pack)
import Rackspace.MailGun
import System.Environment
main :: IO ()
main = do
domain <- getEnv "MAILGUN_DOMAIN"
apiKey <- getEnv "MAILGUN_SECRET"
testAddr <- getEnv "MAILGUN_TEST_ADDRESS"
let message = TextMessage
{ from = T.pack ("someone@" ++ domain)
, to = T.pack testAddr
, cc = Nothing
, bcc = Nothing
, subject = Just "Test Message"
, text = "Hello, this is a test message!" }
res <- sendMessage domain apiKey message
print res
| AndrewRademacher/mailgun | test/Send.hs | mit | 735 | 0 | 14 | 296 | 166 | 89 | 77 | 19 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE PolyKinds #-}
module DTypes.Combinators
( FApply (..)
, FProd (..)
, FSum (..)
) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<*>), (<$>))
#endif
import DTypes.Classes
import DTypes.Compose
import DTypes.Trafo
newtype FApply (x :: k) (f :: k -> *)
= FApply
{ unFApply :: f x
}
instance DFunctor (FApply x) where
dfmap f (FApply x) = FApply (f x)
instance DApplicative (FApply x) where
dpure x = FApply x
FApply x <<*>> FApply y = FApply (x $$ y)
instance DTraversable (FApply x) where
dsequenceA (FApply (Compose x)) = FApply <$> x
data FProd (d1 :: k -> *) (d2 :: k -> *) (f :: k)
= FProd (d1 f) (d2 f)
instance (DFunctor d1, DFunctor d2) => DFunctor (FProd d1 d2) where
dfmap f (FProd x y) = FProd (dfmap f x) (dfmap f y)
instance (DApplicative d1, DApplicative d2) => DApplicative (FProd d1 d2) where
dpure x = FProd (dpure x) (dpure x)
FProd x1 x2 <<*>> FProd y1 y2 = FProd (x1 <<*>> y1) (x2 <<*>> y2)
instance (DTraversable d1, DTraversable d2) => DTraversable (FProd d1 d2) where
dsequenceA (FProd x y) = FProd <$> dsequenceA x <*> dsequenceA y
dtraverse f (FProd x y) = FProd <$> dtraverse f x <*> dtraverse f y
-- TODO: implement more?
data FSum (d1 :: k -> *) (d2 :: k -> *) (f :: k)
= FSum1 (d1 f) | FSum2 (d2 f)
instance (DFunctor d1, DFunctor d2) => DFunctor (FSum d1 d2) where
dfmap f (FSum1 x) = FSum1 (dfmap f x)
dfmap f (FSum2 y) = FSum2 (dfmap f y)
instance (DTraversable d1, DTraversable d2) => DTraversable (FSum d1 d2) where
dsequenceA (FSum1 x) = FSum1 <$> dsequenceA x
dsequenceA (FSum2 y) = FSum2 <$> dsequenceA y
| timjb/frecords | src/DTypes/Combinators.hs | mit | 1,650 | 0 | 10 | 358 | 757 | 401 | 356 | 38 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module WaiAppStatic.Listing
( defaultListing
) where
import qualified Text.Blaze.Html5.Attributes as A
import qualified Text.Blaze.Html5 as H
import Text.Blaze ((!))
import qualified Data.Text as T
import Data.Time
import Data.Time.Clock.POSIX
import WaiAppStatic.Types
import System.Locale (defaultTimeLocale)
import Data.List (sortBy)
import Util
import qualified Text.Blaze.Html.Renderer.Utf8 as HU
-- | Provides a default directory listing, suitable for most apps.
--
-- Code below taken from Happstack: <http://patch-tag.com/r/mae/happstack/snapshot/current/content/pretty/happstack-server/src/Happstack/Server/FileServe/BuildingBlocks.hs>
defaultListing :: Listing
defaultListing pieces (Folder contents) = do
let isTop = null pieces || map Just pieces == [toPiece ""]
let fps'' :: [Either FolderName File]
fps'' = (if isTop then id else (Left (unsafeToPiece "") :)) contents -- FIXME emptyParentFolder feels like a bit of a hack
return $ HU.renderHtmlBuilder
$ H.html $ do
H.head $ do
let title = T.intercalate "/" $ map fromPiece pieces
let title' = if T.null title then "root folder" else title
H.title $ H.toHtml title'
H.style $ H.toHtml $ unlines [ "table { margin: 0 auto; width: 760px; border-collapse: collapse; font-family: 'sans-serif'; }"
, "table, th, td { border: 1px solid #353948; }"
, "td.size { text-align: right; font-size: 0.7em; width: 50px }"
, "td.date { text-align: right; font-size: 0.7em; width: 130px }"
, "td { padding-right: 1em; padding-left: 1em; }"
, "th.first { background-color: white; width: 24px }"
, "td.first { padding-right: 0; padding-left: 0; text-align: center }"
, "tr { background-color: white; }"
, "tr.alt { background-color: #A3B5BA}"
, "th { background-color: #3C4569; color: white; font-size: 1.125em; }"
, "h1 { width: 760px; margin: 1em auto; font-size: 1em; font-family: sans-serif }"
, "img { width: 20px }"
, "a { text-decoration: none }"
]
H.body $ do
H.h1 $ showFolder $ filter (not . T.null . fromPiece) pieces
renderDirectoryContentsTable haskellSrc folderSrc fps''
where
image x = T.unpack $ T.concat [(relativeDirFromPieces pieces), ".hidden/", x, ".png"]
folderSrc = image "folder"
haskellSrc = image "haskell"
showName "" = "root"
showName x = x
showFolder :: Pieces -> H.Html
showFolder [] = "/"
showFolder [x] = H.toHtml $ showName $ fromPiece x
showFolder (x:xs) = do
let href = concat $ replicate (length xs) "../" :: String
H.a ! A.href (H.toValue href) $ H.toHtml $ showName $ fromPiece x
" / " :: H.Html
showFolder xs
-- | a function to generate an HTML table showing the contents of a directory on the disk
--
-- This function generates most of the content of the
-- 'renderDirectoryContents' page. If you want to style the page
-- differently, or add google analytics code, etc, you can just create
-- a new page template to wrap around this HTML.
--
-- see also: 'getMetaData', 'renderDirectoryContents'
renderDirectoryContentsTable :: String
-> String
-> [Either FolderName File]
-> H.Html
renderDirectoryContentsTable haskellSrc folderSrc fps =
H.table $ do H.thead $ do H.th ! (A.class_ "first") $ H.img ! (A.src $ H.toValue haskellSrc)
H.th "Name"
H.th "Modified"
H.th "Size"
H.tbody $ mapM_ mkRow (zip (sortBy sortMD fps) $ cycle [False, True])
where
sortMD :: Either FolderName File -> Either FolderName File -> Ordering
sortMD Left{} Right{} = LT
sortMD Right{} Left{} = GT
sortMD (Left a) (Left b) = compare a b
sortMD (Right a) (Right b) = compare (fileName a) (fileName b)
mkRow :: (Either FolderName File, Bool) -> H.Html
mkRow (md, alt) =
(if alt then (! A.class_ "alt") else id) $
H.tr $ do
H.td ! A.class_ "first"
$ case md of
Left{} -> H.img ! A.src (H.toValue folderSrc)
! A.alt "Folder"
Right{} -> return ()
let name = either id fileName md
let isFile = either (const False) (const True) md
H.td (H.a ! A.href (H.toValue $ fromPiece name `T.append` if isFile then "" else "/") $ H.toHtml $ fromPiece name)
H.td ! A.class_ "date" $ H.toHtml $
case md of
Right File { fileGetModified = Just t } ->
formatCalendarTime defaultTimeLocale "%d-%b-%Y %X" t
_ -> ""
H.td ! A.class_ "size" $ H.toHtml $
case md of
Right File { fileGetSize = s } -> prettyShow s
Left{} -> ""
formatCalendarTime a b c = formatTime a b $ posixSecondsToUTCTime (realToFrac c :: POSIXTime)
prettyShow x
| x > 1024 = prettyShowK $ x `div` 1024
| otherwise = addCommas "B" x
prettyShowK x
| x > 1024 = prettyShowM $ x `div` 1024
| otherwise = addCommas "KB" x
prettyShowM x
| x > 1024 = prettyShowG $ x `div` 1024
| otherwise = addCommas "MB" x
prettyShowG x = addCommas "GB" x
addCommas s = (++ (' ' : s)) . reverse . addCommas' . reverse . show
addCommas' (a:b:c:d:e) = a : b : c : ',' : addCommas' (d : e)
addCommas' x = x
| beni55/wai | wai-app-static/WaiAppStatic/Listing.hs | mit | 6,445 | 0 | 19 | 2,475 | 1,545 | 788 | 757 | 105 | 10 |
import Autolib.Exp
import Autolib.Exp.Inter
import Autolib.NFA
import Autolib.NFA.Shortest (is_accepted)
import Autolib.ToDoc
import System.Environment
import Control.Monad ( guard, when )
import Data.Array
main :: IO ()
main = do
args <- getArgs
when ( 2 /= length args ) $ error
$ "example usage : ./Main \"(lu+rd)^*\" 4"
let [ exp0, dep0 ] = args
let -- left, right, up, down
env = std_sigma "lrud"
print $ vcat
[ text "standard environment"
, nest 4 $ toDoc env
]
let exp :: RX Char
exp = read exp0
print $ vcat
[ text "input expression"
, nest 4 $ toDoc exp
]
let dep :: Int
dep = read dep0
print $ vcat
[ text "paint picture of depth"
, nest 4 $ toDoc dep
]
let aut :: NFA Char Int
aut = inter env exp
print $ vcat
[ text "automaton"
, nest 4 $ toDoc aut
]
let black :: [ (Path, Point) ]
black = do
p <- paths dep
guard $ is_accepted aut p
return ( p, position p )
print $ vcat
[ text "black pixels"
, nest 4 $ toDoc black
]
let pic :: Array (Int,Int) Char
pic = picture aut dep
print $ vcat
[ text "picture"
, nest 4 $ format pic
]
type Path = [ Char ]
paths :: Int -> [ Path ]
paths 0 = return []
paths d | d > 0 = do
h <- "lr"
v <- "ud"
rest <- paths ( d - 1 )
return $ h : v : rest
type Point = ( Int, Int )
position :: Path -> Point
position p =
let f (x,y) [] = (x, y)
f (x,y) (h : v : rest) =
let hh = case h of 'l' -> 0 ; 'r' -> 1
vv = case v of 'u' -> 0 ; 'd' -> 1
in f ( 2 * x + hh, 2 * y + vv ) rest
in f (0,0) p
picture aut dep =
let top = 2^dep - 1
bnd = ((0,0),(top,top))
in array bnd $ do
p <- paths dep
let c = if is_accepted aut p then '*' else '.'
return ( position p, c )
format :: Array (Int,Int) Char -> Doc
format a = vcat $ do
let ((u,l),(d,r)) = bounds a
row <- [ u .. d ]
return $ hcat $ do
col <- [ l .. r ]
return $ text [ a ! (row,col), ' ' ]
| Erdwolf/autotool-bonn | src/misc/pic-lang/Main.hs | gpl-2.0 | 2,042 | 95 | 12 | 684 | 940 | 502 | 438 | 78 | 4 |
{- |
Module : ./SoftFOL/tests/PrintTPTPTests.hs
Copyright : (c) C. Maeder, DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
-}
module Main where
import SoftFOL.Sign
import SoftFOL.PrintTPTP
import Common.AS_Annotation
import Common.Id
-- | a more pretty alternative for shows using PrintTPTP
showPretty2 :: PrintTPTP a => a -> ShowS
showPretty2 = shows . printTPTP
main :: IO ()
main = do
putStrLn "--- Term-Tests ---"
putStrLn $ showPretty2 spSimpleTermTest1 "\n"
putStrLn $ showPretty2 spQuantTermTest1 "\n"
putStrLn $ showPretty2 spQuantTermTest2 "\n"
putStrLn $ showPretty2 spQuantTermTest3 "\n"
putStrLn $ showPretty2 spQuantTermTest4 "\n"
putStrLn $ showPretty2 spQuantTermTest5 "\n"
putStrLn "--- Formula-Test ---"
print $ printFormula SPOriginAxioms spFormulaTest
putStrLn "\n"
putStrLn "--- FormulaList-Tests ---"
putStrLn $ showPretty2 spFormulaListTest1 "\n"
putStrLn $ showPretty2 spFormulaListTest2 "\n"
putStrLn $ showPretty2 spFormulaListTest3 "\n"
putStrLn $ showPretty2 spFormulaListTest4 "\n"
putStrLn "--- Description-Tests ---"
putStrLn $ showPretty2 spDescTest1 "\n"
putStrLn $ showPretty2 spDescTest2 "\n"
putStrLn "--- Problem-Test ---"
putStrLn $ showPretty2 spProblemTest "\n"
putStrLn "--- Declaration-Test ---"
putStrLn $ showPretty2 spDeclTest "\n"
spSimpleTermTest1 :: SPSymbol
spSimpleTermTest1 = mkSPCustomSymbol "testsymbol"
spQuantTermTest1 :: SPTerm
spQuantTermTest1 = SPQuantTerm {quantSym = SPForall, variableList = [simpTerm (mkSPCustomSymbol "a")], qFormula = SPComplexTerm {symbol = SPEqual, arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "a")]}}
spQuantTermTest2 :: SPTerm
spQuantTermTest2 = SPQuantTerm {quantSym = SPForall, variableList = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "b")], qFormula = SPComplexTerm {symbol = SPEqual, arguments = [
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "a")]},
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "b")]}
]}}
spQuantTermTest3 :: SPTerm
spQuantTermTest3 = SPQuantTerm {quantSym = SPExists, variableList = [SPComplexTerm {symbol = mkSPCustomSymbol "Klein", arguments = [simpTerm (mkSPCustomSymbol "pi")]}, SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "y")]}],
qFormula = SPComplexTerm {symbol = SPEqual, arguments = [simpTerm (mkSPCustomSymbol "pi"), simpTerm (mkSPCustomSymbol "y")]}}
spQuantTermTest4 :: SPTerm
spQuantTermTest4 = SPQuantTerm {quantSym = SPForall, variableList = [
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "y")]},
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "b"), simpTerm (mkSPCustomSymbol "c")]}
],
qFormula = SPComplexTerm {symbol = SPOr, arguments = [
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "y")]},
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "b"), simpTerm (mkSPCustomSymbol "c")]}
]}}
spQuantTermTest5 :: SPTerm
spQuantTermTest5 = SPQuantTerm {quantSym = SPCustomQuantSym $ mkSimpleId "T", variableList = [
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "y")]},
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "b"), simpTerm (mkSPCustomSymbol "c")]},
SPComplexTerm {symbol = SPNot, arguments = [simpTerm (mkSPCustomSymbol "blue")]}
],
qFormula =
SPComplexTerm {symbol = SPEqual, arguments = [
SPComplexTerm {symbol = SPOr, arguments = [
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "y")]},
SPComplexTerm {symbol = SPNot, arguments = [simpTerm (mkSPCustomSymbol "blue")]}
]},
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "b"), simpTerm (mkSPCustomSymbol "c")]}
]}}
toTestFormula :: SPTerm -> SPFormula
toTestFormula = makeNamed "testFormula"
spFormulaTest :: SPFormula
spFormulaTest = toTestFormula SPComplexTerm {symbol = SPEqual, arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "a")]}
spFormulaListTest1 :: SPFormulaList
spFormulaListTest1 = SPFormulaList {originType = SPOriginAxioms, formulae = [toTestFormula SPComplexTerm {symbol = SPEqual, arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "a")]}]}
spFormulaListTest2 :: SPFormulaList
spFormulaListTest2 = SPFormulaList {originType = SPOriginConjectures, formulae = [toTestFormula SPComplexTerm {symbol = SPEqual, arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "a")]}]}
spFormulaListTest3 :: SPFormulaList
spFormulaListTest3 = SPFormulaList {originType = SPOriginAxioms, formulae = [toTestFormula SPComplexTerm {symbol = SPEqual, arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "a")]}, toTestFormula SPComplexTerm {symbol = SPEqual, arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "a")]}]}
spFormulaListTest4 :: SPFormulaList
spFormulaListTest4 = SPFormulaList {originType = SPOriginConjectures, formulae = [toTestFormula SPComplexTerm {symbol = SPEqual, arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "a")]}, toTestFormula SPComplexTerm {symbol = SPEqual, arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "a")]}]}
spDescTest1 :: SPDescription
spDescTest1 = SPDescription {name = "testdesc", author = "testauthor", version = Nothing, logic = Nothing, status = SPStateUnknown, desc = "Just a test.", date = Nothing}
spDescTest2 :: SPDescription
spDescTest2 = SPDescription {name = "testdesc", author = "testauthor", version = Just "0.1", logic = Just "logic description", status = SPStateUnknown, desc = "Just a test.", date = Just "today"}
spProblemTest :: SPProblem
spProblemTest = SPProblem {identifier = "testproblem", description = descr, logicalPart = logical_part, settings = []}
where
descr = SPDescription {name = "testdesc", author = "testauthor", version = Nothing, logic = Nothing, status = SPStateUnknown, desc = "Just a test.", date = Nothing}
logical_part = emptySPLogicalPart {
declarationList = Just [spDeclTest, spDeclTest2],
formulaLists = [SPFormulaList {originType = SPOriginAxioms, formulae = [toTestFormula SPComplexTerm {symbol = SPEqual, arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "a")]}]}, SPFormulaList {originType = SPOriginConjectures, formulae = [toTestFormula SPComplexTerm {symbol = SPEqual, arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "a")]}, toTestFormula SPComplexTerm {symbol = SPEqual, arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "a")]}]}]}
spDeclTest :: SPDeclaration
spDeclTest = SPSubsortDecl
{ sortSymA = mkSimpleId "sortSymA"
, sortSymB = mkSimpleId "sortSymB" }
spDeclTest2 :: SPDeclaration
spDeclTest2 = SPTermDecl {termDeclTermList = [
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "y")]},
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "b"), simpTerm (mkSPCustomSymbol "c")]}
],
termDeclTerm = SPComplexTerm {symbol = SPOr, arguments = [
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "y")]},
SPComplexTerm {symbol = mkSPCustomSymbol "Elem", arguments = [simpTerm (mkSPCustomSymbol "a"), simpTerm (mkSPCustomSymbol "b"), simpTerm (mkSPCustomSymbol "c")]}
]}}
| spechub/Hets | SoftFOL/tests/PrintTPTPTests.hs | gpl-2.0 | 7,898 | 0 | 19 | 1,028 | 2,239 | 1,247 | 992 | 93 | 1 |
{-| Module : CollectFunctionBindings
License : GPL
Maintainer : [email protected]
Stability : experimental
Portability : portable
-}
module Helium.Parser.CollectFunctionBindings where
import Helium.Syntax.UHA_Syntax
import Helium.Syntax.UHA_Utils ()
import Helium.Syntax.UHA_Range
import Helium.Utils.Utils
-- Assumption: each FunctionBindings contains exactly one FunctionBinding
decls :: Declarations -> Declarations
decls = decls' . mergeFeedback
mergeFeedback :: Declarations -> Declarations
mergeFeedback [] = []
mergeFeedback (Declaration_FunctionBindings _ [FunctionBinding_Feedback rfb fb _]:ds) =
case mergeFeedback ds of
Declaration_FunctionBindings rdcls (funb : fbs) : mds ->
Declaration_FunctionBindings
(mergeRanges rfb rdcls)
(FunctionBinding_Feedback (mergeRanges rfb $ rangeOfFunctionBinding funb) fb funb : fbs) : mds
rs -> rs
mergeFeedback (x : xs) = x : mergeFeedback xs
decls' :: Declarations -> Declarations
decls' [] = []
decls' (d@(Declaration_FunctionBindings _ [_]):ds) =
let mn = nameOfDeclaration d
(same, others) = span ((== mn) . nameOfDeclaration) (d:ds)
fs = map functionBindingOfDeclaration same
in Declaration_FunctionBindings
(mergeRanges (rangeOfFunctionBinding (head fs)) (rangeOfFunctionBinding (last fs)))
fs
:
decls' others
decls' (Declaration_FunctionBindings _ _:_) =
internalError "CollectFunctionBindings" "decls" "not exactly one function binding in FunctionBindings"
decls' (d:ds) = d : decls' ds
functionBindingOfDeclaration :: Declaration -> FunctionBinding
functionBindingOfDeclaration (Declaration_FunctionBindings _ [f]) = f
functionBindingOfDeclaration _ =
internalError "CollectFunctionBindings" "getFunctionBinding" "unexpected declaration kind"
rangeOfFunctionBinding :: FunctionBinding -> Range
rangeOfFunctionBinding (FunctionBinding_FunctionBinding r _ _) = r
rangeOfFunctionBinding (FunctionBinding_Feedback r _ _) = r
rangeOfFunctionBinding (FunctionBinding_Hole _ _) = error "not supported"
nameOfDeclaration :: Declaration -> Maybe Name
nameOfDeclaration d =
case d of
Declaration_FunctionBindings _ [FunctionBinding_FunctionBinding _ l _] ->
Just (nameOfLeftHandSide l)
Declaration_FunctionBindings r [FunctionBinding_Feedback _ _ fb] ->
nameOfDeclaration (Declaration_FunctionBindings r [fb])
_ -> Nothing
nameOfLeftHandSide :: LeftHandSide -> Name
nameOfLeftHandSide lhs =
case lhs of
LeftHandSide_Function _ n _ -> n
LeftHandSide_Infix _ _ n _ -> n
LeftHandSide_Parenthesized _ innerLhs _ -> nameOfLeftHandSide innerLhs
mergeCaseFeedback :: Alternatives -> Alternatives
mergeCaseFeedback [] = []
mergeCaseFeedback (Alternative_Feedback r v _ : rs) =
case mergeCaseFeedback rs of
[] -> []
(x : xs) -> Alternative_Feedback r v x : xs
mergeCaseFeedback (x : xs) = x : mergeCaseFeedback xs
| roberth/uu-helium | src/Helium/Parser/CollectFunctionBindings.hs | gpl-3.0 | 3,006 | 0 | 15 | 585 | 781 | 397 | 384 | 60 | 3 |
through :: Applicative f => Lens' s a -> Lens s t a b -> Lens (f s) (f t) (f a) (f b)
through lens1 lens2 =
lens getBP (flip setBP)
-- (\sa sbt afb s -> sbt s <$> afb (sa s)) getBP (flip setBP)
-- (\sbt afb s -> sbt s <$> afb (getBP s)) (flip setBP)
-- (\afb s -> (flip setBP) s <$> afb (getBP s))
-- \afb s -> (flip setBP) s <$> afb (getBP s)
-- \afb s -> (flip $ \x a -> liftA2 (lens2 .~) x a) s <$> afb (getBP s)
-- \afb s -> (\a x -> liftA2 (lens2 .~) x a) s <$> afb (getBP s)
-- \afb s -> (\x -> liftA2 (lens2 .~) x s) <$> afb (getBP s)
-- \afb s -> (\x -> liftA2 (lens2 .~) x s) <$> afb ((^. lens1) <$> s)
-- \afb s -> (\x -> liftA2 (lens2 .~) x s) <$> afb ((\s -> s ^. lens1) <$> s)
-- \afb s -> (\x -> liftA2 (lens2 .~) x s) <$> afb ((\s -> getConst (lens1 Const s)) <$> s)
-- \afb s -> (\x -> liftA2 (\s -> set lens2 s) x s) <$> afb ((\s -> getConst (lens1 Const s)) <$> s)
-- \afb s -> (\x -> liftA2 (\b -> runIdentity .
-- lens2 (\_ -> Identity b)) x s) <$> afb ((\s -> getConst (lens1 Const s)) <$> s)
-- \afb s -> (\x -> liftA2 (\b -> runIdentity . lens2 (\_ -> Identity b)) x s)
-- <$>
-- afb ((\s -> getConst (lens1 Const s)) <$> s)
-- \f s -> (\x -> (\b -> runIdentity . lens2 (const $ Identity b)) <$> x <*> s)
-- <$>
-- f ((\s -> getConst (lens1 Const s)) <$> s)
-- \f s -> (\x -> liftA2 (\a b -> runIdentity $ (lens2 . const . Identity $ b) a) s x)
-- <$>
-- f ((getConst . lens1 Const) <$> s)
-- \f s -> liftA2 ( \a b -> runIdentity (lens2 (const (Identity b)) a) ) s <$> (f ((getConst . lens1 Const) <$> s))
-- \f s -> liftA2 ( \a -> runIdentity . flip lens2 a . const . Identity ) s <$> (f ((getConst . lens1 Const) <$> s))
-- \f s -> liftA2 (\a -> runIdentity . (`lens2` a) . const . Identity) s <$> f (getConst <$> lens1 Const <$> s)
where
getBP = fmap (view lens1)
setBP = liftA2 (set lens2)
{-# INLINE through #-}
| FranklinChen/music-score | sketch/old/Through.hs | bsd-3-clause | 1,953 | 0 | 10 | 541 | 151 | 84 | 67 | 6 | 1 |
-- | Evaluation of 64 bit values on 32 bit platforms.
module SPARC.CodeGen.Gen64 (
assignMem_I64Code,
assignReg_I64Code,
iselExpr64
)
where
import GhcPrelude
import {-# SOURCE #-} SPARC.CodeGen.Gen32
import SPARC.CodeGen.Base
import SPARC.CodeGen.Amode
import SPARC.Regs
import SPARC.AddrMode
import SPARC.Imm
import SPARC.Instr
import SPARC.Ppr()
import NCGMonad
import Instruction
import Format
import Reg
import Cmm
import DynFlags
import OrdList
import Outputable
-- | Code to assign a 64 bit value to memory.
assignMem_I64Code
:: CmmExpr -- ^ expr producing the destination address
-> CmmExpr -- ^ expr producing the source value.
-> NatM InstrBlock
assignMem_I64Code addrTree valueTree
= do
ChildCode64 vcode rlo <- iselExpr64 valueTree
(src, acode) <- getSomeReg addrTree
let
rhi = getHiVRegFromLo rlo
-- Big-endian store
mov_hi = ST II32 rhi (AddrRegImm src (ImmInt 0))
mov_lo = ST II32 rlo (AddrRegImm src (ImmInt 4))
code = vcode `appOL` acode `snocOL` mov_hi `snocOL` mov_lo
{- pprTrace "assignMem_I64Code"
(vcat [ text "addrTree: " <+> ppr addrTree
, text "valueTree: " <+> ppr valueTree
, text "vcode:"
, vcat $ map ppr $ fromOL vcode
, text ""
, text "acode:"
, vcat $ map ppr $ fromOL acode ])
$ -}
return code
-- | Code to assign a 64 bit value to a register.
assignReg_I64Code
:: CmmReg -- ^ the destination register
-> CmmExpr -- ^ expr producing the source value
-> NatM InstrBlock
assignReg_I64Code (CmmLocal (LocalReg u_dst pk)) valueTree
= do
ChildCode64 vcode r_src_lo <- iselExpr64 valueTree
let
r_dst_lo = RegVirtual $ mkVirtualReg u_dst (cmmTypeFormat pk)
r_dst_hi = getHiVRegFromLo r_dst_lo
r_src_hi = getHiVRegFromLo r_src_lo
mov_lo = mkMOV r_src_lo r_dst_lo
mov_hi = mkMOV r_src_hi r_dst_hi
mkMOV sreg dreg = OR False g0 (RIReg sreg) dreg
return (vcode `snocOL` mov_hi `snocOL` mov_lo)
assignReg_I64Code _ _
= panic "assignReg_I64Code(sparc): invalid lvalue"
-- | Get the value of an expression into a 64 bit register.
iselExpr64 :: CmmExpr -> NatM ChildCode64
-- Load a 64 bit word
iselExpr64 (CmmLoad addrTree ty)
| isWord64 ty
= do Amode amode addr_code <- getAmode addrTree
let result
| AddrRegReg r1 r2 <- amode
= do rlo <- getNewRegNat II32
tmp <- getNewRegNat II32
let rhi = getHiVRegFromLo rlo
return $ ChildCode64
( addr_code
`appOL` toOL
[ ADD False False r1 (RIReg r2) tmp
, LD II32 (AddrRegImm tmp (ImmInt 0)) rhi
, LD II32 (AddrRegImm tmp (ImmInt 4)) rlo ])
rlo
| AddrRegImm r1 (ImmInt i) <- amode
= do rlo <- getNewRegNat II32
let rhi = getHiVRegFromLo rlo
return $ ChildCode64
( addr_code
`appOL` toOL
[ LD II32 (AddrRegImm r1 (ImmInt $ 0 + i)) rhi
, LD II32 (AddrRegImm r1 (ImmInt $ 4 + i)) rlo ])
rlo
| otherwise
= panic "SPARC.CodeGen.Gen64: no match"
result
-- Add a literal to a 64 bit integer
iselExpr64 (CmmMachOp (MO_Add _) [e1, CmmLit (CmmInt i _)])
= do ChildCode64 code1 r1_lo <- iselExpr64 e1
let r1_hi = getHiVRegFromLo r1_lo
r_dst_lo <- getNewRegNat II32
let r_dst_hi = getHiVRegFromLo r_dst_lo
let code = code1
`appOL` toOL
[ ADD False True r1_lo (RIImm (ImmInteger i)) r_dst_lo
, ADD True False r1_hi (RIReg g0) r_dst_hi ]
return $ ChildCode64 code r_dst_lo
-- Addition of II64
iselExpr64 (CmmMachOp (MO_Add _) [e1, e2])
= do ChildCode64 code1 r1_lo <- iselExpr64 e1
let r1_hi = getHiVRegFromLo r1_lo
ChildCode64 code2 r2_lo <- iselExpr64 e2
let r2_hi = getHiVRegFromLo r2_lo
r_dst_lo <- getNewRegNat II32
let r_dst_hi = getHiVRegFromLo r_dst_lo
let code = code1
`appOL` code2
`appOL` toOL
[ ADD False True r1_lo (RIReg r2_lo) r_dst_lo
, ADD True False r1_hi (RIReg r2_hi) r_dst_hi ]
return $ ChildCode64 code r_dst_lo
iselExpr64 (CmmReg (CmmLocal (LocalReg uq ty)))
| isWord64 ty
= do
r_dst_lo <- getNewRegNat II32
let r_dst_hi = getHiVRegFromLo r_dst_lo
r_src_lo = RegVirtual $ mkVirtualReg uq II32
r_src_hi = getHiVRegFromLo r_src_lo
mov_lo = mkMOV r_src_lo r_dst_lo
mov_hi = mkMOV r_src_hi r_dst_hi
mkMOV sreg dreg = OR False g0 (RIReg sreg) dreg
return (
ChildCode64 (toOL [mov_hi, mov_lo]) r_dst_lo
)
-- Convert something into II64
iselExpr64 (CmmMachOp (MO_UU_Conv _ W64) [expr])
= do
r_dst_lo <- getNewRegNat II32
let r_dst_hi = getHiVRegFromLo r_dst_lo
-- compute expr and load it into r_dst_lo
(a_reg, a_code) <- getSomeReg expr
dflags <- getDynFlags
let platform = targetPlatform dflags
code = a_code
`appOL` toOL
[ mkRegRegMoveInstr platform g0 r_dst_hi -- clear high 32 bits
, mkRegRegMoveInstr platform a_reg r_dst_lo ]
return $ ChildCode64 code r_dst_lo
iselExpr64 expr
= pprPanic "iselExpr64(sparc)" (ppr expr)
| ezyang/ghc | compiler/nativeGen/SPARC/CodeGen/Gen64.hs | bsd-3-clause | 6,153 | 0 | 25 | 2,381 | 1,420 | 700 | 720 | 129 | 1 |
--------------------------------------------------------------------
-- |
-- Module : Text.Atom.Feed.Export
-- Copyright : (c) Galois, Inc. 2008,
-- (c) Sigbjorn Finne 2009-
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <[email protected]>
-- Stability : provisional
-- Portability:: portable
-- Description: Convert from Atom to XML
--
-- Convert from Atom to XML
--
--------------------------------------------------------------------
module Text.Atom.Feed.Export where
import Text.XML.Light as XML
import Text.Atom.Feed
atom_prefix :: Maybe String
atom_prefix = Nothing -- Just "atom"
atom_thr_prefix :: Maybe String
atom_thr_prefix = Just "thr"
atomNS :: String
atomNS = "http://www.w3.org/2005/Atom"
atomThreadNS :: String
atomThreadNS = "http://purl.org/syndication/thread/1.0"
xmlns_atom :: Attr
xmlns_atom = Attr qn atomNS
where
qn = case atom_prefix of
Nothing -> QName { qName = "xmlns"
, qURI = Nothing
, qPrefix = Nothing
}
Just s -> QName { qName = s
, qURI = Nothing -- XXX: is this ok?
, qPrefix = Just "xmlns"
}
xmlns_atom_thread :: Attr
xmlns_atom_thread = Attr qn atomThreadNS
where
qn = case atom_prefix of
Nothing -> QName { qName = "xmlns"
, qURI = Nothing
, qPrefix = Nothing
}
Just s -> QName { qName = s
, qURI = Nothing -- XXX: is this ok?
, qPrefix = Just "xmlns"
}
atomName :: String -> QName
atomName nc = QName { qName = nc
, qURI = Just atomNS
, qPrefix = atom_prefix
}
atomAttr :: String -> String -> Attr
atomAttr x y = Attr (atomName x) y
atomNode :: String -> [XML.Content] -> XML.Element
atomNode x xs = blank_element { elName = atomName x, elContent = xs }
atomLeaf :: String -> String -> XML.Element
atomLeaf tag txt = blank_element
{ elName = atomName tag
, elContent = [ Text blank_cdata { cdData = txt } ]
}
atomThreadName :: String -> QName
atomThreadName nc =
QName { qName = nc
, qURI = Just atomThreadNS
, qPrefix = atom_thr_prefix
}
atomThreadAttr :: String -> String -> Attr
atomThreadAttr x y = Attr (atomThreadName x) y
atomThreadNode :: String -> [XML.Content] -> XML.Element
atomThreadNode x xs =
blank_element { elName = atomThreadName x, elContent = xs }
atomThreadLeaf :: String -> String -> XML.Element
atomThreadLeaf tag txt =
blank_element { elName = atomThreadName tag
, elContent = [ Text blank_cdata { cdData = txt } ]
}
--------------------------------------------------------------------------------
xmlFeed :: Feed -> XML.Element
xmlFeed f = ( atomNode "feed"
$ map Elem
$ [ xmlTitle (feedTitle f) ]
++ [ xmlId (feedId f) ]
++ [ xmlUpdated (feedUpdated f) ]
++ map xmlLink (feedLinks f)
++ map xmlAuthor (feedAuthors f)
++ map xmlCategory (feedCategories f)
++ map xmlContributor (feedContributors f)
++ mb xmlGenerator (feedGenerator f)
++ mb xmlIcon (feedIcon f)
++ mb xmlLogo (feedLogo f)
++ mb xmlRights (feedRights f)
++ mb xmlSubtitle (feedSubtitle f)
++ map xmlEntry (feedEntries f)
++ feedOther f )
{ elAttribs = [xmlns_atom] }
xmlEntry :: Entry -> XML.Element
xmlEntry e = ( atomNode "entry"
$ map Elem
$ [ xmlId (entryId e) ]
++ [ xmlTitle (entryTitle e) ]
++ [ xmlUpdated (entryUpdated e) ]
++ map xmlAuthor (entryAuthors e)
++ map xmlCategory (entryCategories e)
++ mb xmlContent (entryContent e)
++ map xmlContributor (entryContributor e)
++ map xmlLink (entryLinks e)
++ mb xmlPublished (entryPublished e)
++ mb xmlRights (entryRights e)
++ mb xmlSource (entrySource e)
++ mb xmlSummary (entrySummary e)
++ mb xmlInReplyTo (entryInReplyTo e)
++ mb xmlInReplyTotal (entryInReplyTotal e)
++ entryOther e )
{ elAttribs = entryAttrs e }
xmlContent :: EntryContent -> XML.Element
xmlContent cont = case cont of
TextContent t -> (atomLeaf "content" t)
{ elAttribs = [ atomAttr "type" "text" ] }
HTMLContent t -> (atomLeaf "content" t)
{ elAttribs = [ atomAttr "type" "html" ] }
XHTMLContent x -> (atomNode "content" [ Elem x ])
{ elAttribs = [ atomAttr "type" "xhtml" ] }
MixedContent mbTy cs -> (atomNode "content" cs)
{ elAttribs = mb (atomAttr "type") mbTy }
ExternalContent mbTy src -> (atomNode "content" [])
{ elAttribs = [ atomAttr "src" src ]
++ mb (atomAttr "type") mbTy }
xmlCategory :: Category -> XML.Element
xmlCategory c = (atomNode "category" (map Elem (catOther c)))
{ elAttribs = [ atomAttr "term" (catTerm c) ]
++ mb (atomAttr "scheme") (catScheme c)
++ mb (atomAttr "label") (catLabel c)
}
xmlLink :: Link -> XML.Element
xmlLink l = (atomNode "link" (map Elem (linkOther l)))
{ elAttribs = [ atomAttr "href" (linkHref l) ]
++ mb (atomAttr "rel" . either id id) (linkRel l)
++ mb (atomAttr "type") (linkType l)
++ mb (atomAttr "hreflang") (linkHrefLang l)
++ mb (atomAttr "title") (linkTitle l)
++ mb (atomAttr "length") (linkLength l)
++ linkAttrs l
}
xmlSource :: Source -> Element
xmlSource s = atomNode "source"
$ map Elem
$ sourceOther s
++ map xmlAuthor (sourceAuthors s)
++ map xmlCategory (sourceCategories s)
++ mb xmlGenerator (sourceGenerator s)
++ mb xmlIcon (sourceIcon s)
++ mb xmlId (sourceId s)
++ map xmlLink (sourceLinks s)
++ mb xmlLogo (sourceLogo s)
++ mb xmlRights (sourceRights s)
++ mb xmlSubtitle (sourceSubtitle s)
++ mb xmlTitle (sourceTitle s)
++ mb xmlUpdated (sourceUpdated s)
xmlGenerator :: Generator -> Element
xmlGenerator g = (atomLeaf "generator" (genText g))
{ elAttribs = mb (atomAttr "uri") (genURI g)
++ mb (atomAttr "version") (genVersion g)
}
xmlAuthor :: Person -> XML.Element
xmlAuthor p = atomNode "author" (xmlPerson p)
xmlContributor :: Person -> XML.Element
xmlContributor c = atomNode "contributor" (xmlPerson c)
xmlPerson :: Person -> [XML.Content]
xmlPerson p = map Elem $
[ atomLeaf "name" (personName p) ]
++ mb (atomLeaf "uri") (personURI p)
++ mb (atomLeaf "email") (personEmail p)
++ personOther p
xmlInReplyTo :: InReplyTo -> XML.Element
xmlInReplyTo irt =
(atomThreadNode "in-reply-to" (replyToContent irt))
{ elAttribs =
mb (atomThreadAttr "ref") (Just $ replyToRef irt)
++ mb (atomThreadAttr "href") (replyToHRef irt)
++ mb (atomThreadAttr "type") (replyToType irt)
++ mb (atomThreadAttr "source") (replyToSource irt)
++ replyToOther irt
}
xmlInReplyTotal :: InReplyTotal -> XML.Element
xmlInReplyTotal irt =
(atomThreadLeaf "total" (show $ replyToTotal irt))
{ elAttribs = replyToTotalOther irt }
xmlId :: String -> XML.Element
xmlId i = atomLeaf "id" i
xmlIcon :: URI -> XML.Element
xmlIcon i = atomLeaf "icon" i
xmlLogo :: URI -> XML.Element
xmlLogo l = atomLeaf "logo" l
xmlUpdated :: Date -> XML.Element
xmlUpdated u = atomLeaf "updated" u
xmlPublished :: Date -> XML.Element
xmlPublished p = atomLeaf "published" p
xmlRights :: TextContent -> XML.Element
xmlRights r = xmlTextContent "rights" r
xmlTitle :: TextContent -> XML.Element
xmlTitle r = xmlTextContent "title" r
xmlSubtitle :: TextContent -> XML.Element
xmlSubtitle s = xmlTextContent "subtitle" s
xmlSummary :: TextContent -> XML.Element
xmlSummary s = xmlTextContent "summary" s
xmlTextContent :: String -> TextContent -> XML.Element
xmlTextContent tg t =
case t of
TextString s -> (atomLeaf tg s) { elAttribs = [atomAttr "type" "text"] }
HTMLString s -> (atomLeaf tg s) { elAttribs = [atomAttr "type" "html"] }
XHTMLString e -> (atomNode tg [XML.Elem e])
{ elAttribs = [atomAttr "type" "xhtml"] }
--------------------------------------------------------------------------------
mb :: (a -> b) -> Maybe a -> [b]
mb _ Nothing = []
mb f (Just x) = [f x]
| seereason/feed | Text/Atom/Feed/Export.hs | bsd-3-clause | 9,090 | 8 | 25 | 2,970 | 2,659 | 1,363 | 1,296 | 189 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module Text.Toml.Parser.Spec where
import Test.Tasty (TestTree)
import Test.Tasty.Hspec
import Data.HashMap.Strict (fromList)
import Data.Time.Calendar (Day (..))
import Data.Time.Clock (UTCTime (..))
import Text.Toml.Parser
tomlParserSpec :: IO TestTree
tomlParserSpec = testSpec "Parser Hspec suite" $ do
describe "Parser.tomlDoc generic" $ do
it "should parse empty input" $
testParser tomlDoc "" $ fromList []
it "should parse non-empty tomlDocs that do not end with a newline" $
testParser tomlDoc "number = 123" $
fromList [("number", NTValue $ VInteger 123)]
it "should parse when tomlDoc ends in a comment" $
testParser tomlDoc "q = 42 # understood?" $
fromList [("q", NTValue $ VInteger 42)]
it "should not parse re-assignment of key" $
testParserFails tomlDoc "q=42\nq=42"
it "should not parse rubbish" $
testParserFails tomlDoc "{"
describe "Parser.tomlDoc (named tables)" $ do
it "should parse simple named table" $
testParser tomlDoc "[a]\naa = 108" $
fromList [("a", NTable (fromList [("aa", NTValue $ VInteger 108)] ))]
it "should not parse redefined table header (key already exists at scope)" $
testParser tomlDoc "[a]\n[a]" $ fromList [("a", emptyNTable)]
it "should parse redefinition of implicit key" $
testParser tomlDoc "[a.b]\n[a]" $
fromList [("a", NTable (fromList [("b", emptyNTable)] ))]
it "should parse redefinition of implicit key, with table contents" $
testParser tomlDoc "[a.b]\nb=3\n[a]\na=4" $
fromList [("a", NTable (fromList [("b", NTable (fromList [("b", NTValue $ VInteger 3)])),
("a", NTValue $ VInteger 4)]))]
it "should parse redefinition by implicit table header" $
testParser tomlDoc "[a]\n[a.b]" $
fromList [("a", NTable (fromList [("b", emptyNTable)] ))]
it "should not parse redefinition key" $
testParserFails tomlDoc "[a]\nb=1\n[a.b]"
describe "Parser.tomlDoc (tables arrays)" $ do
it "should parse a simple empty table array" $
testParser tomlDoc "[[a]]\n[[a]]" $
fromList [("a", NTArray [ fromList []
, fromList [] ] )]
it "should parse a simple table array with content" $
testParser tomlDoc "[[a]]\na1=1\n[[a]]\na2=2" $
fromList [("a", NTArray [ fromList [("a1", NTValue $ VInteger 1)]
, fromList [("a2", NTValue $ VInteger 2)] ] )]
it "should not allow a simple table array to be inserted into a non table array" $
testParserFails tomlDoc "a = [1,2,3]\n[[a]]"
it "should parse a simple empty nested table array" $
testParser tomlDoc "[[a.b]]\n[[a.b]]" $
fromList [("a", NTable (fromList [("b", NTArray [ emptyTable
, emptyTable ] )] ) )]
it "should parse a simple non empty table array" $
testParser tomlDoc "[[a.b]]\na1=1\n[[a.b]]\na2=2" $
fromList [("a", NTable (fromList [("b", NTArray [ fromList [("a1", NTValue $ VInteger 1)]
, fromList [("a2", NTValue $ VInteger 2)]
] )] ) )]
it "should parse redefined implicit table header" $
testParserFails tomlDoc "[[a.b]]\n[[a]]"
it "should parse redefinition by implicit table header" $
testParser tomlDoc "[[a]]\n[[a.b]]" $
fromList [("a", NTArray [ fromList [("b", NTArray [ fromList [] ])] ] )]
describe "Parser.tomlDoc (mixed named tables and tables arrays)" $ do
it "should not parse redefinition of key by table header (table array by table)" $
testParserFails tomlDoc "[[a]]\n[a]"
it "should not parse redefinition of key by table header (table by table array)" $
testParserFails tomlDoc "[a]\n[[a]]"
it "should not parse redefinition implicit table header (table by array)" $
testParserFails tomlDoc "[a.b]\n[[a]]"
it "should parse redefined implicit table header (array by table)" $
testParser tomlDoc "[[a.b]]\n[a]" $
fromList [("a", NTable (fromList [("b", NTArray [ fromList [] ])] ) )]
it "should not parse redefined implicit table header (array by table), when keys collide" $
testParserFails tomlDoc "[[a.b]]\n[a]\nb=1"
it "should insert sub-key of regular table in most recently defined table array" $
testParser tomlDoc "[[a]]\ni=0\n[[a]]\ni=1\n[a.b]" $
fromList [("a", NTArray [ fromList [ ("i", NTValue $ VInteger 0) ]
, fromList [ ("b", NTable $ fromList [] )
, ("i", NTValue $ VInteger 1) ]
] )]
it "should insert sub-key of table array" $
testParser tomlDoc "[a]\n[[a.b]]" $
fromList [("a", NTable (fromList [("b", NTArray [fromList []])] ) )]
it "should insert sub-key (with content) of table array" $
testParser tomlDoc "[a]\nq=42\n[[a.b]]\ni=0" $
fromList [("a", NTable (fromList [ ("q", NTValue $ VInteger 42),
("b", NTArray [
fromList [("i", NTValue $ VInteger 0)]
]) ]) )]
describe "Parser.headerValue" $ do
it "should parse simple table header" $
testParser headerValue "table" ["table"]
it "should parse simple nested table header" $
testParser headerValue "main.sub" ["main", "sub"]
it "should not parse just a dot (separator)" $
testParserFails headerValue "."
it "should not parse an empty most right name" $
testParserFails headerValue "first."
it "should not parse an empty most left name" $
testParserFails headerValue ".second"
it "should not parse an empty middle name" $
testParserFails headerValue "first..second"
describe "Parser.tableHeader" $ do
it "should not parse an empty table header" $
testParserFails tableHeader "[]"
it "should parse simple table header" $
testParser tableHeader "[item]" ["item"]
it "should parse simple nested table header" $
testParser tableHeader "[main.sub]" ["main", "sub"]
describe "Parser.tableArrayHeader" $ do
it "should not parse an empty table header" $
testParserFails tableArrayHeader "[[]]"
it "should parse simple table array header" $
testParser tableArrayHeader "[[item]]" ["item"]
it "should parse simple nested table array header" $
testParser tableArrayHeader "[[main.sub]]" ["main", "sub"]
describe "Parser.assignment" $ do
it "should parse simple example" $
testParser assignment "country = \"\"" ("country", VString "")
it "should parse without spacing around the assignment operator" $
testParser assignment "a=108" ("a", VInteger 108)
it "should parse when value on next line" $
testParser assignment "a =\n108" ("a", VInteger 108)
it "should parse when assignment operator and value are on the next line" $
testParser assignment "a\n= 108" ("a", VInteger 108)
it "should parse when key, value and assignment operator are on separate lines" $
testParser assignment "a\n=\n108" ("a", VInteger 108)
describe "Parser.boolean" $ do
it "should parse true" $
testParser boolean "true" $ VBoolean True
it "should parse false" $
testParser boolean "false" $ VBoolean False
it "should not parse capitalized variant" $
testParserFails boolean "False"
describe "Parser.basicStr" $ do
it "should parse the common escape sequences in basic strings" $
testParser basicStr "\"123\\b\\t\\n\\f\\r\\\"\\/\\\\\"" $ VString "123\b\t\n\f\r\"/\\"
it "should parse the simple unicode value from the example" $
testParser basicStr "\"中国\"" $ VString "中国"
it "should parse escaped 4 digit unicode values" $
testParser assignment "special_k = \"\\u0416\"" ("special_k", VString "Ж")
it "should parse escaped 8 digit unicode values" $
testParser assignment "g_clef = \"\\U0001D11e\"" ("g_clef", VString "𝄞")
it "should not parse escaped unicode values with missing digits" $
testParserFails assignment "g_clef = \"\\U1D11e\""
describe "Parser.multiBasicStr" $ do
it "should parse simple example" $
testParser multiBasicStr "\"\"\"thorrough\"\"\"" $ VString "thorrough"
it "should parse with newlines" $
testParser multiBasicStr "\"\"\"One\nTwo\"\"\"" $ VString "One\nTwo"
it "should parse with escaped newlines" $
testParser multiBasicStr "\"\"\"One\\\nTwo\"\"\"" $ VString "OneTwo"
it "should parse newlines, ignoring 1 leading newline" $
testParser multiBasicStr "\"\"\"\nOne\\\nTwo\"\"\"" $ VString "OneTwo"
it "should parse with espaced whitespace" $
testParser multiBasicStr "\"\"\"\\\n\
\Quick \\\n\
\\\\n\
\Jumped \\\n\
\Lazy\\\n\
\ \"\"\"" $ VString "Quick Jumped Lazy"
describe "Parser.literalStr" $ do
it "should parse literally" $
testParser literalStr "'\"Your\" folder: \\\\User\\new\\tmp\\'" $
VString "\"Your\" folder: \\\\User\\new\\tmp\\"
it "has no notion of 'escaped single quotes'" $
testParserFails tomlDoc "q = 'I don\\'t know.'" -- string terminates before the "t"
describe "Parser.multiLiteralStr" $ do
it "should parse literally" $
testParser multiLiteralStr
"'''\nFirst newline is dropped.\n Other whitespace,\n is preserved -- isn't it?'''"
$ VString "First newline is dropped.\n Other whitespace,\n is preserved -- isn't it?"
describe "Parser.datetime" $ do
it "should parse a JSON formatted datetime string in zulu timezone" $
testParser datetime "1979-05-27T07:32:00Z" $
VDatetime $ UTCTime (ModifiedJulianDay 44020) 27120
it "should not parse only dates" $
testParserFails datetime "1979-05-27"
it "should not parse without the Z" $
testParserFails datetime "1979-05-27T07:32:00"
describe "Parser.float" $ do
it "should parse positive floats" $
testParser float "3.14" $ VFloat 3.14
it "should parse positive floats with plus sign" $
testParser float "+3.14" $ VFloat 3.14
it "should parse negative floats" $
testParser float "-0.1" $ VFloat (-0.1)
it "should parse more or less zero float" $
testParser float "0.0" $ VFloat 0.0
it "should parse 'scientific notation' ('e'-notation)" $
testParser float "1.5e6" $ VFloat 1500000.0
it "should parse 'scientific notation' ('e'-notation) with upper case E" $
testParser float "1E0" $ VFloat 1.0
it "should not accept floats starting with a dot" $
testParserFails float ".5"
it "should not accept floats without any decimals" $
testParserFails float "5."
describe "Parser.integer" $ do
it "should parse positive integers" $
testParser integer "108" $ VInteger 108
it "should parse negative integers" $
testParser integer "-1" $ VInteger (-1)
it "should parse zero" $
testParser integer "0" $ VInteger 0
it "should parse integers prefixed with a plus" $
testParser integer "+42" $ VInteger 42
describe "Parser.tomlDoc arrays" $ do
it "should parse an empty array" $
testParser array "[]" $ VArray []
it "should parse an empty array with whitespace" $
testParser array "[ ]" $ VArray []
it "should not parse an empty array with only a terminating comma" $
testParserFails array "[,]"
it "should parse an empty array of empty arrays" $
testParser array "[[],[]]" $ VArray [ VArray [], VArray [] ]
it "should parse an empty array of empty arrays with whitespace" $
testParser array "[ \n[ ]\n ,\n [ \n ] ,\n ]" $ VArray [ VArray [], VArray [] ]
it "should parse nested arrays" $
testParser assignment "d = [ ['gamma', 'delta'], [1, 2] ]"
$ ("d", VArray [ VArray [ VString "gamma"
, VString "delta" ]
, VArray [ VInteger 1
, VInteger 2 ] ])
it "should allow linebreaks in an array" $
testParser assignment "hosts = [\n'alpha',\n'omega'\n]"
$ ("hosts", VArray [VString "alpha", VString "omega"])
it "should allow some linebreaks in an array" $
testParser assignment "hosts = ['alpha' ,\n'omega']"
$ ("hosts", VArray [VString "alpha", VString "omega"])
it "should allow linebreaks in an array, with comments" $
testParser assignment "hosts = [\n\
\'alpha', # the first\n\
\'omega' # the last\n\
\]"
$ ("hosts", VArray [VString "alpha", VString "omega"])
it "should allow linebreaks in an array, with comments, and terminating comma" $
testParser assignment "hosts = [\n\
\'alpha', # the first\n\
\'omega', # the last\n\
\]"
$ ("hosts", VArray [VString "alpha", VString "omega"])
it "inside an array, all element should be of the same type" $
testParserFails array "[1, 2.0]"
it "inside an array of arrays, this inner arrays may contain values of different types" $
testParser array "[[1], [2.0], ['a']]" $
VArray [ VArray [VInteger 1], VArray [VFloat 2.0], VArray [VString "a"] ]
it "all string variants are of the same type of the same type" $
testParser assignment "data = [\"a\", \"\"\"b\"\"\", 'c', '''d''']" $
("data", VArray [ VString "a", VString "b",
VString "c", VString "d" ])
it "should parse terminating commas in arrays" $
testParser array "[1, 2, ]" $ VArray [ VInteger 1, VInteger 2 ]
it "should parse terminating commas in arrays(2)" $
testParser array "[1,2,]" $ VArray [ VInteger 1, VInteger 2 ]
where
testParser p str success = case parseOnly p str of Left _ -> False
Right x -> x == success
testParserFails p str = case parseOnly p str of Left _ -> True
Right _ -> False
| amitai-hoze/htoml | test/Text/Toml/Parser/Spec.hs | bsd-3-clause | 14,698 | 0 | 26 | 4,331 | 3,071 | 1,423 | 1,648 | 249 | 3 |
module Main ( main ) where
import Data.Set ( Set )
import qualified Data.Set as S
import System.FilePath
import Test.HUnit ( assertEqual )
import LLVM.Analysis
import LLVM.Analysis.CallGraph
import LLVM.Analysis.PointsTo.TrivialFunction
import LLVM.Analysis.CallGraphSCCTraversal
import LLVM.Analysis.Util.Testing
import LLVM.Parse
main :: IO ()
main = testAgainstExpected ["-mem2reg", "-basicaa"] bcParser testDescriptors
where
bcParser = parseLLVMFile defaultParserOptions
testDescriptors :: [TestDescriptor]
testDescriptors = [ TestDescriptor { testPattern = cgPattern
, testExpectedMapping = expectedMapper
, testResultBuilder = extractTraversalOrder
, testResultComparator = assertEqual
}
]
cgPattern :: String
cgPattern = "tests/callgraph/order/*.c"
expectedMapper :: FilePath -> FilePath
expectedMapper = (<.> "expected")
extractTraversalOrder :: Module -> [Set String]
extractTraversalOrder m =
case res == pres of
True -> res
False -> error "Mismatch between serial and parallel result"
where
Just mainFunc = findMain m
pta = runPointsToAnalysis m
cg = callGraph m pta [mainFunc]
res = callGraphSCCTraversal cg buildSummary []
pres = parallelCallGraphSCCTraversal cg buildSummary []
buildSummary :: [Function] -> [Set String] -> [Set String]
buildSummary scc summ = S.fromList fnames : summ
where
fnames = map (identifierAsString . functionName) scc | travitch/llvm-analysis | tests/CallGraphTest.hs | bsd-3-clause | 1,569 | 0 | 9 | 380 | 363 | 204 | 159 | 36 | 2 |
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies,
FlexibleInstances #-}
module ShouldFail where
class Foo f a r | f a -> r where
foo::f->a->r
-- These instances are incompatible because we can unify
-- the first two paramters, though it's rather obscure:
-- p -> (a,b)
-- t -> (,) (a,a)
-- c -> (,) a
-- r -> s
--
-- So a constraint which would sow this up is
-- Foo ((Int,Int)->Int)
-- ((Int,Int), (Int,Int))
-- t
-- This matches both. Not easy to spot, and the error
-- message would be improved by giving the unifier, or
-- a witness.
instance Foo (p->s) (t p) (t s)
instance Foo ((a,b)->r) (c a,c b)(c r)
| vTurbine/ghc | testsuite/tests/typecheck/should_fail/tcfail096.hs | bsd-3-clause | 693 | 0 | 8 | 188 | 124 | 73 | 51 | 7 | 0 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
TcPat: Typechecking patterns
-}
{-# LANGUAGE CPP, RankNTypes #-}
module TcPat ( tcLetPat, TcSigFun, TcPragFun
, TcSigInfo(..), TcPatSynInfo(..)
, findScopedTyVars, isPartialSig
, completeSigPolyId, completeSigPolyId_maybe
, LetBndrSpec(..), addInlinePrags, warnPrags
, tcPat, tcPats, newNoSigLetBndr
, addDataConStupidTheta, badFieldCon, polyPatSig ) where
#include "HsVersions.h"
import {-# SOURCE #-} TcExpr( tcSyntaxOp, tcInferRho)
import HsSyn
import TcHsSyn
import TcRnMonad
import Inst
import Id
import Var
import Name
import NameSet
import TcEnv
import TcMType
import TcValidity( arityErr )
import TcType
import TcUnify
import TcHsType
import TysWiredIn
import TcEvidence
import TyCon
import DataCon
import PatSyn
import ConLike
import PrelNames
import BasicTypes hiding (SuccessFlag(..))
import DynFlags
import SrcLoc
import Util
import Outputable
import FastString
import Control.Monad
{-
************************************************************************
* *
External interface
* *
************************************************************************
-}
tcLetPat :: TcSigFun -> LetBndrSpec
-> LPat Name -> TcSigmaType
-> TcM a
-> TcM (LPat TcId, a)
tcLetPat sig_fn no_gen pat pat_ty thing_inside
= tc_lpat pat pat_ty penv thing_inside
where
penv = PE { pe_lazy = True
, pe_ctxt = LetPat sig_fn no_gen }
-----------------
tcPats :: HsMatchContext Name
-> [LPat Name] -- Patterns,
-> [TcSigmaType] -- and their types
-> TcM a -- and the checker for the body
-> TcM ([LPat TcId], a)
-- This is the externally-callable wrapper function
-- Typecheck the patterns, extend the environment to bind the variables,
-- do the thing inside, use any existentially-bound dictionaries to
-- discharge parts of the returning LIE, and deal with pattern type
-- signatures
-- 1. Initialise the PatState
-- 2. Check the patterns
-- 3. Check the body
-- 4. Check that no existentials escape
tcPats ctxt pats pat_tys thing_inside
= tc_lpats penv pats pat_tys thing_inside
where
penv = PE { pe_lazy = False, pe_ctxt = LamPat ctxt }
tcPat :: HsMatchContext Name
-> LPat Name -> TcSigmaType
-> TcM a -- Checker for body, given
-- its result type
-> TcM (LPat TcId, a)
tcPat ctxt pat pat_ty thing_inside
= tc_lpat pat pat_ty penv thing_inside
where
penv = PE { pe_lazy = False, pe_ctxt = LamPat ctxt }
-----------------
data PatEnv
= PE { pe_lazy :: Bool -- True <=> lazy context, so no existentials allowed
, pe_ctxt :: PatCtxt -- Context in which the whole pattern appears
}
data PatCtxt
= LamPat -- Used for lambdas, case etc
(HsMatchContext Name)
| LetPat -- Used only for let(rec) pattern bindings
-- See Note [Typing patterns in pattern bindings]
TcSigFun -- Tells type sig if any
LetBndrSpec -- True <=> no generalisation of this let
data LetBndrSpec
= LetLclBndr -- The binder is just a local one;
-- an AbsBinds will provide the global version
| LetGblBndr TcPragFun -- Generalisation plan is NoGen, so there isn't going
-- to be an AbsBinds; So we must bind the global version
-- of the binder right away.
-- Oh, and here is the inline-pragma information
makeLazy :: PatEnv -> PatEnv
makeLazy penv = penv { pe_lazy = True }
inPatBind :: PatEnv -> Bool
inPatBind (PE { pe_ctxt = LetPat {} }) = True
inPatBind (PE { pe_ctxt = LamPat {} }) = False
---------------
type TcPragFun = Name -> [LSig Name]
type TcSigFun = Name -> Maybe TcSigInfo
data TcSigInfo
= TcSigInfo {
sig_name :: Name, -- The binder name of the type signature. When
-- sig_id = Just id, then sig_name = idName id.
sig_poly_id :: Maybe TcId,
-- Just f <=> the type signature had no wildcards, so the precise,
-- complete polymorphic type is known. In that case,
-- f is the polymorphic Id, with that type
-- Nothing <=> the type signature is partial (i.e. includes one or more
-- wildcards). In this case it doesn't make sense to give
-- the polymorphic Id, because we are going to /infer/ its
-- type, so we can't make the polymorphic Id ab-initio
--
-- See Note [Complete and partial type signatures]
sig_tvs :: [(Maybe Name, TcTyVar)],
-- Instantiated type and kind variables
-- Just n <=> this skolem is lexically in scope with name n
-- See Note [Binding scoped type variables]
sig_nwcs :: [(Name, TcTyVar)],
-- Instantiated wildcard variables
-- If sig_poly_id = Just f, then sig_nwcs must be empty
sig_extra_cts :: Maybe SrcSpan,
-- Just loc <=> An extra-constraints wildcard was present
-- at location loc
-- e.g. f :: (Eq a, _) => a -> a
-- Any extra constraints inferred during
-- type-checking will be added to the sig_theta.
-- If sig_poly_id = Just f, sig_extra_cts must be Nothing
sig_theta :: TcThetaType, -- Instantiated theta
sig_tau :: TcSigmaType, -- Instantiated tau
-- See Note [sig_tau may be polymorphic]
sig_loc :: SrcSpan, -- The location of the signature
sig_warn_redundant :: Bool -- True <=> report redundant constraints
-- when typechecking the value binding
-- for this type signature
-- This is usually True, but False for
-- * Record selectors (not important here)
-- * Class and instance methods. Here the code may legitimately
-- be more polymorphic than the signature generated from the
-- class declaration
}
| TcPatSynInfo TcPatSynInfo
data TcPatSynInfo
= TPSI {
patsig_name :: Name,
patsig_tau :: TcSigmaType,
patsig_ex :: [TcTyVar],
patsig_prov :: TcThetaType,
patsig_univ :: [TcTyVar],
patsig_req :: TcThetaType
}
findScopedTyVars -- See Note [Binding scoped type variables]
:: LHsType Name -- The HsType
-> TcType -- The corresponding Type:
-- uses same Names as the HsType
-> [TcTyVar] -- The instantiated forall variables of the Type
-> [(Maybe Name, TcTyVar)] -- In 1-1 correspondence with the instantiated vars
findScopedTyVars hs_ty sig_ty inst_tvs
= zipWith find sig_tvs inst_tvs
where
find sig_tv inst_tv
| tv_name `elemNameSet` scoped_names = (Just tv_name, inst_tv)
| otherwise = (Nothing, inst_tv)
where
tv_name = tyVarName sig_tv
scoped_names = mkNameSet (hsExplicitTvs hs_ty)
(sig_tvs,_) = tcSplitForAllTys sig_ty
instance NamedThing TcSigInfo where
getName TcSigInfo{ sig_name = name } = name
getName (TcPatSynInfo tpsi) = patsig_name tpsi
instance Outputable TcSigInfo where
ppr (TcSigInfo { sig_name = name, sig_poly_id = mb_poly_id, sig_tvs = tyvars
, sig_theta = theta, sig_tau = tau })
= maybe (ppr name) ppr mb_poly_id <+> dcolon <+>
vcat [ pprSigmaType (mkSigmaTy (map snd tyvars) theta tau)
, ppr (map fst tyvars) ]
ppr (TcPatSynInfo tpsi) = text "TcPatSynInfo" <+> ppr tpsi
instance Outputable TcPatSynInfo where
ppr (TPSI{ patsig_name = name}) = ppr name
isPartialSig :: TcSigInfo -> Bool
isPartialSig (TcSigInfo { sig_poly_id = Nothing }) = True
isPartialSig _ = False
-- Helper for cases when we know for sure we have a complete type
-- signature, e.g. class methods.
completeSigPolyId :: TcSigInfo -> TcId
completeSigPolyId (TcSigInfo { sig_poly_id = Just id }) = id
completeSigPolyId _ = panic "completeSigPolyId"
completeSigPolyId_maybe :: TcSigInfo -> Maybe TcId
completeSigPolyId_maybe (TcSigInfo { sig_poly_id = mb_id }) = mb_id
completeSigPolyId_maybe (TcPatSynInfo {}) = Nothing
{-
Note [Binding scoped type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The type variables *brought into lexical scope* by a type signature may
be a subset of the *quantified type variables* of the signatures, for two reasons:
* With kind polymorphism a signature like
f :: forall f a. f a -> f a
may actually give rise to
f :: forall k. forall (f::k -> *) (a:k). f a -> f a
So the sig_tvs will be [k,f,a], but only f,a are scoped.
NB: the scoped ones are not necessarily the *inital* ones!
* Even aside from kind polymorphism, tere may be more instantiated
type variables than lexically-scoped ones. For example:
type T a = forall b. b -> (a,b)
f :: forall c. T c
Here, the signature for f will have one scoped type variable, c,
but two instantiated type variables, c' and b'.
The function findScopedTyVars takes
* hs_ty: the original HsForAllTy
* sig_ty: the corresponding Type (which is guaranteed to use the same Names
as the HsForAllTy)
* inst_tvs: the skolems instantiated from the forall's in sig_ty
It returns a [(Maybe Name, TcTyVar)], in 1-1 correspondence with inst_tvs
but with a (Just n) for the lexically scoped name of each in-scope tyvar.
Note [sig_tau may be polymorphic]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note that "sig_tau" might actually be a polymorphic type,
if the original function had a signature like
forall a. Eq a => forall b. Ord b => ....
But that's ok: tcMatchesFun (called by tcRhs) can deal with that
It happens, too! See Note [Polymorphic methods] in TcClassDcl.
Note [Existential check]
~~~~~~~~~~~~~~~~~~~~~~~~
Lazy patterns can't bind existentials. They arise in two ways:
* Let bindings let { C a b = e } in b
* Twiddle patterns f ~(C a b) = e
The pe_lazy field of PatEnv says whether we are inside a lazy
pattern (perhaps deeply)
If we aren't inside a lazy pattern then we can bind existentials,
but we need to be careful about "extra" tyvars. Consider
(\C x -> d) : pat_ty -> res_ty
When looking for existential escape we must check that the existential
bound by C don't unify with the free variables of pat_ty, OR res_ty
(or of course the environment). Hence we need to keep track of the
res_ty free vars.
Note [Complete and partial type signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A type signature is partial when it contains one or more wildcards
(= type holes). The wildcard can either be:
* A (type) wildcard occurring in sig_theta or sig_tau. These are
stored in sig_nwcs.
f :: Bool -> _
g :: Eq _a => _a -> _a -> Bool
* Or an extra-constraints wildcard, stored in sig_extra_cts:
h :: (Num a, _) => a -> a
A type signature is a complete type signature when there are no
wildcards in the type signature, i.e. iff sig_nwcs is empty and
sig_extra_cts is Nothing.
************************************************************************
* *
Binders
* *
************************************************************************
-}
tcPatBndr :: PatEnv -> Name -> TcSigmaType -> TcM (TcCoercion, TcId)
-- (coi, xp) = tcPatBndr penv x pat_ty
-- Then coi : pat_ty ~ typeof(xp)
--
tcPatBndr (PE { pe_ctxt = LetPat lookup_sig no_gen}) bndr_name pat_ty
-- See Note [Typing patterns in pattern bindings]
| LetGblBndr prags <- no_gen
, Just sig <- lookup_sig bndr_name
, Just poly_id <- sig_poly_id sig
= do { bndr_id <- addInlinePrags poly_id (prags bndr_name)
; traceTc "tcPatBndr(gbl,sig)" (ppr bndr_id $$ ppr (idType bndr_id))
; co <- unifyPatType (idType bndr_id) pat_ty
; return (co, bndr_id) }
| otherwise
= do { bndr_id <- newNoSigLetBndr no_gen bndr_name pat_ty
; traceTc "tcPatBndr(no-sig)" (ppr bndr_id $$ ppr (idType bndr_id))
; return (mkTcNomReflCo pat_ty, bndr_id) }
tcPatBndr (PE { pe_ctxt = _lam_or_proc }) bndr_name pat_ty
= return (mkTcNomReflCo pat_ty, mkLocalId bndr_name pat_ty)
------------
newNoSigLetBndr :: LetBndrSpec -> Name -> TcType -> TcM TcId
-- In the polymorphic case (no_gen = LetLclBndr), generate a "monomorphic version"
-- of the Id; the original name will be bound to the polymorphic version
-- by the AbsBinds
-- In the monomorphic case (no_gen = LetBglBndr) there is no AbsBinds, and we
-- use the original name directly
newNoSigLetBndr LetLclBndr name ty
=do { mono_name <- newLocalName name
; return (mkLocalId mono_name ty) }
newNoSigLetBndr (LetGblBndr prags) name ty
= addInlinePrags (mkLocalId name ty) (prags name)
----------
addInlinePrags :: TcId -> [LSig Name] -> TcM TcId
addInlinePrags poly_id prags
= do { traceTc "addInlinePrags" (ppr poly_id $$ ppr prags)
; tc_inl inl_sigs }
where
inl_sigs = filter isInlineLSig prags
tc_inl [] = return poly_id
tc_inl (L loc (InlineSig _ prag) : other_inls)
= do { unless (null other_inls) (setSrcSpan loc warn_dup_inline)
; traceTc "addInlinePrag" (ppr poly_id $$ ppr prag)
; return (poly_id `setInlinePragma` prag) }
tc_inl _ = panic "tc_inl"
warn_dup_inline = warnPrags poly_id inl_sigs $
ptext (sLit "Duplicate INLINE pragmas for")
warnPrags :: Id -> [LSig Name] -> SDoc -> TcM ()
warnPrags id bad_sigs herald
= addWarnTc (hang (herald <+> quotes (ppr id))
2 (ppr_sigs bad_sigs))
where
ppr_sigs sigs = vcat (map (ppr . getLoc) sigs)
{-
Note [Typing patterns in pattern bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we are typing a pattern binding
pat = rhs
Then the PatCtxt will be (LetPat sig_fn let_bndr_spec).
There can still be signatures for the binders:
data T = MkT (forall a. a->a) Int
x :: forall a. a->a
y :: Int
MkT x y = <rhs>
Two cases, dealt with by the LetPat case of tcPatBndr
* If we are generalising (generalisation plan is InferGen or
CheckGen), then the let_bndr_spec will be LetLclBndr. In that case
we want to bind a cloned, local version of the variable, with the
type given by the pattern context, *not* by the signature (even if
there is one; see Trac #7268). The mkExport part of the
generalisation step will do the checking and impedence matching
against the signature.
* If for some some reason we are not generalising (plan = NoGen), the
LetBndrSpec will be LetGblBndr. In that case we must bind the
global version of the Id, and do so with precisely the type given
in the signature. (Then we unify with the type from the pattern
context type.
************************************************************************
* *
The main worker functions
* *
************************************************************************
Note [Nesting]
~~~~~~~~~~~~~~
tcPat takes a "thing inside" over which the pattern scopes. This is partly
so that tcPat can extend the environment for the thing_inside, but also
so that constraints arising in the thing_inside can be discharged by the
pattern.
This does not work so well for the ErrCtxt carried by the monad: we don't
want the error-context for the pattern to scope over the RHS.
Hence the getErrCtxt/setErrCtxt stuff in tcMultiple
-}
--------------------
type Checker inp out = forall r.
inp
-> PatEnv
-> TcM r
-> TcM (out, r)
tcMultiple :: Checker inp out -> Checker [inp] [out]
tcMultiple tc_pat args penv thing_inside
= do { err_ctxt <- getErrCtxt
; let loop _ []
= do { res <- thing_inside
; return ([], res) }
loop penv (arg:args)
= do { (p', (ps', res))
<- tc_pat arg penv $
setErrCtxt err_ctxt $
loop penv args
-- setErrCtxt: restore context before doing the next pattern
-- See note [Nesting] above
; return (p':ps', res) }
; loop penv args }
--------------------
tc_lpat :: LPat Name
-> TcSigmaType
-> PatEnv
-> TcM a
-> TcM (LPat TcId, a)
tc_lpat (L span pat) pat_ty penv thing_inside
= setSrcSpan span $
do { (pat', res) <- maybeWrapPatCtxt pat (tc_pat penv pat pat_ty)
thing_inside
; return (L span pat', res) }
tc_lpats :: PatEnv
-> [LPat Name] -> [TcSigmaType]
-> TcM a
-> TcM ([LPat TcId], a)
tc_lpats penv pats tys thing_inside
= ASSERT2( equalLength pats tys, ppr pats $$ ppr tys )
tcMultiple (\(p,t) -> tc_lpat p t)
(zipEqual "tc_lpats" pats tys)
penv thing_inside
--------------------
tc_pat :: PatEnv
-> Pat Name
-> TcSigmaType -- Fully refined result type
-> TcM a -- Thing inside
-> TcM (Pat TcId, -- Translated pattern
a) -- Result of thing inside
tc_pat penv (VarPat name) pat_ty thing_inside
= do { (co, id) <- tcPatBndr penv name pat_ty
; res <- tcExtendIdEnv1 name id thing_inside
; return (mkHsWrapPatCo co (VarPat id) pat_ty, res) }
tc_pat penv (ParPat pat) pat_ty thing_inside
= do { (pat', res) <- tc_lpat pat pat_ty penv thing_inside
; return (ParPat pat', res) }
tc_pat penv (BangPat pat) pat_ty thing_inside
= do { (pat', res) <- tc_lpat pat pat_ty penv thing_inside
; return (BangPat pat', res) }
tc_pat penv lpat@(LazyPat pat) pat_ty thing_inside
= do { (pat', (res, pat_ct))
<- tc_lpat pat pat_ty (makeLazy penv) $
captureConstraints thing_inside
-- Ignore refined penv', revert to penv
; emitConstraints pat_ct
-- captureConstraints/extendConstraints:
-- see Note [Hopping the LIE in lazy patterns]
-- Check there are no unlifted types under the lazy pattern
; when (any (isUnLiftedType . idType) $ collectPatBinders pat') $
lazyUnliftedPatErr lpat
-- Check that the expected pattern type is itself lifted
; pat_ty' <- newFlexiTyVarTy liftedTypeKind
; _ <- unifyType pat_ty pat_ty'
; return (LazyPat pat', res) }
tc_pat _ (WildPat _) pat_ty thing_inside
= do { res <- thing_inside
; return (WildPat pat_ty, res) }
tc_pat penv (AsPat (L nm_loc name) pat) pat_ty thing_inside
= do { (co, bndr_id) <- setSrcSpan nm_loc (tcPatBndr penv name pat_ty)
; (pat', res) <- tcExtendIdEnv1 name bndr_id $
tc_lpat pat (idType bndr_id) penv thing_inside
-- NB: if we do inference on:
-- \ (y@(x::forall a. a->a)) = e
-- we'll fail. The as-pattern infers a monotype for 'y', which then
-- fails to unify with the polymorphic type for 'x'. This could
-- perhaps be fixed, but only with a bit more work.
--
-- If you fix it, don't forget the bindInstsOfPatIds!
; return (mkHsWrapPatCo co (AsPat (L nm_loc bndr_id) pat') pat_ty, res) }
tc_pat penv (ViewPat expr pat _) overall_pat_ty thing_inside
= do {
-- Morally, expr must have type `forall a1...aN. OPT' -> B`
-- where overall_pat_ty is an instance of OPT'.
-- Here, we infer a rho type for it,
-- which replaces the leading foralls and constraints
-- with fresh unification variables.
; (expr',expr'_inferred) <- tcInferRho expr
-- next, we check that expr is coercible to `overall_pat_ty -> pat_ty`
-- NOTE: this forces pat_ty to be a monotype (because we use a unification
-- variable to find it). this means that in an example like
-- (view -> f) where view :: _ -> forall b. b
-- we will only be able to use view at one instantation in the
-- rest of the view
; (expr_co, pat_ty) <- tcInfer $ \ pat_ty ->
unifyType expr'_inferred (mkFunTy overall_pat_ty pat_ty)
-- pattern must have pat_ty
; (pat', res) <- tc_lpat pat pat_ty penv thing_inside
; return (ViewPat (mkLHsWrapCo expr_co expr') pat' overall_pat_ty, res) }
-- Type signatures in patterns
-- See Note [Pattern coercions] below
tc_pat penv (SigPatIn pat sig_ty) pat_ty thing_inside
= do { (inner_ty, tv_binds, nwc_binds, wrap) <- tcPatSig (inPatBind penv)
sig_ty pat_ty
; (pat', res) <- tcExtendTyVarEnv2 (tv_binds ++ nwc_binds) $
tc_lpat pat inner_ty penv thing_inside
; return (mkHsWrapPat wrap (SigPatOut pat' inner_ty) pat_ty, res) }
------------------------
-- Lists, tuples, arrays
tc_pat penv (ListPat pats _ Nothing) pat_ty thing_inside
= do { (coi, elt_ty) <- matchExpectedPatTy matchExpectedListTy pat_ty
; (pats', res) <- tcMultiple (\p -> tc_lpat p elt_ty)
pats penv thing_inside
; return (mkHsWrapPat coi (ListPat pats' elt_ty Nothing) pat_ty, res)
}
tc_pat penv (ListPat pats _ (Just (_,e))) pat_ty thing_inside
= do { list_pat_ty <- newFlexiTyVarTy liftedTypeKind
; e' <- tcSyntaxOp ListOrigin e (mkFunTy pat_ty list_pat_ty)
; (coi, elt_ty) <- matchExpectedPatTy matchExpectedListTy list_pat_ty
; (pats', res) <- tcMultiple (\p -> tc_lpat p elt_ty)
pats penv thing_inside
; return (mkHsWrapPat coi (ListPat pats' elt_ty (Just (pat_ty,e'))) list_pat_ty, res)
}
tc_pat penv (PArrPat pats _) pat_ty thing_inside
= do { (coi, elt_ty) <- matchExpectedPatTy matchExpectedPArrTy pat_ty
; (pats', res) <- tcMultiple (\p -> tc_lpat p elt_ty)
pats penv thing_inside
; return (mkHsWrapPat coi (PArrPat pats' elt_ty) pat_ty, res)
}
tc_pat penv (TuplePat pats boxity _) pat_ty thing_inside
= do { let tc = tupleTyCon boxity (length pats)
; (coi, arg_tys) <- matchExpectedPatTy (matchExpectedTyConApp tc) pat_ty
; (pats', res) <- tc_lpats penv pats arg_tys thing_inside
; dflags <- getDynFlags
-- Under flag control turn a pattern (x,y,z) into ~(x,y,z)
-- so that we can experiment with lazy tuple-matching.
-- This is a pretty odd place to make the switch, but
-- it was easy to do.
; let
unmangled_result = TuplePat pats' boxity arg_tys
-- pat_ty /= pat_ty iff coi /= IdCo
possibly_mangled_result
| gopt Opt_IrrefutableTuples dflags &&
isBoxed boxity = LazyPat (noLoc unmangled_result)
| otherwise = unmangled_result
; ASSERT( length arg_tys == length pats ) -- Syntactically enforced
return (mkHsWrapPat coi possibly_mangled_result pat_ty, res)
}
------------------------
-- Data constructors
tc_pat penv (ConPatIn con arg_pats) pat_ty thing_inside
= tcConPat penv con pat_ty arg_pats thing_inside
------------------------
-- Literal patterns
tc_pat _ (LitPat simple_lit) pat_ty thing_inside
= do { let lit_ty = hsLitType simple_lit
; co <- unifyPatType lit_ty pat_ty
-- coi is of kind: pat_ty ~ lit_ty
; res <- thing_inside
; return ( mkHsWrapPatCo co (LitPat simple_lit) pat_ty
, res) }
------------------------
-- Overloaded patterns: n, and n+k
tc_pat _ (NPat (L l over_lit) mb_neg eq) pat_ty thing_inside
= do { let orig = LiteralOrigin over_lit
; lit' <- newOverloadedLit orig over_lit pat_ty
; eq' <- tcSyntaxOp orig eq (mkFunTys [pat_ty, pat_ty] boolTy)
; mb_neg' <- case mb_neg of
Nothing -> return Nothing -- Positive literal
Just neg -> -- Negative literal
-- The 'negate' is re-mappable syntax
do { neg' <- tcSyntaxOp orig neg (mkFunTy pat_ty pat_ty)
; return (Just neg') }
; res <- thing_inside
; return (NPat (L l lit') mb_neg' eq', res) }
tc_pat penv (NPlusKPat (L nm_loc name) (L loc lit) ge minus) pat_ty thing_inside
= do { (co, bndr_id) <- setSrcSpan nm_loc (tcPatBndr penv name pat_ty)
; let pat_ty' = idType bndr_id
orig = LiteralOrigin lit
; lit' <- newOverloadedLit orig lit pat_ty'
-- The '>=' and '-' parts are re-mappable syntax
; ge' <- tcSyntaxOp orig ge (mkFunTys [pat_ty', pat_ty'] boolTy)
; minus' <- tcSyntaxOp orig minus (mkFunTys [pat_ty', pat_ty'] pat_ty')
; let pat' = NPlusKPat (L nm_loc bndr_id) (L loc lit') ge' minus'
-- The Report says that n+k patterns must be in Integral
-- We may not want this when using re-mappable syntax, though (ToDo?)
; icls <- tcLookupClass integralClassName
; instStupidTheta orig [mkClassPred icls [pat_ty']]
; res <- tcExtendIdEnv1 name bndr_id thing_inside
; return (mkHsWrapPatCo co pat' pat_ty, res) }
tc_pat _ _other_pat _ _ = panic "tc_pat" -- ConPatOut, SigPatOut
----------------
unifyPatType :: TcType -> TcType -> TcM TcCoercion
-- In patterns we want a coercion from the
-- context type (expected) to the actual pattern type
-- But we don't want to reverse the args to unifyType because
-- that controls the actual/expected stuff in error messages
unifyPatType actual_ty expected_ty
= do { coi <- unifyType actual_ty expected_ty
; return (mkTcSymCo coi) }
{-
Note [Hopping the LIE in lazy patterns]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In a lazy pattern, we must *not* discharge constraints from the RHS
from dictionaries bound in the pattern. E.g.
f ~(C x) = 3
We can't discharge the Num constraint from dictionaries bound by
the pattern C!
So we have to make the constraints from thing_inside "hop around"
the pattern. Hence the captureConstraints and emitConstraints.
The same thing ensures that equality constraints in a lazy match
are not made available in the RHS of the match. For example
data T a where { T1 :: Int -> T Int; ... }
f :: T a -> Int -> a
f ~(T1 i) y = y
It's obviously not sound to refine a to Int in the right
hand side, because the argument might not match T1 at all!
Finally, a lazy pattern should not bind any existential type variables
because they won't be in scope when we do the desugaring
************************************************************************
* *
Most of the work for constructors is here
(the rest is in the ConPatIn case of tc_pat)
* *
************************************************************************
[Pattern matching indexed data types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the following declarations:
data family Map k :: * -> *
data instance Map (a, b) v = MapPair (Map a (Pair b v))
and a case expression
case x :: Map (Int, c) w of MapPair m -> ...
As explained by [Wrappers for data instance tycons] in MkIds.hs, the
worker/wrapper types for MapPair are
$WMapPair :: forall a b v. Map a (Map a b v) -> Map (a, b) v
$wMapPair :: forall a b v. Map a (Map a b v) -> :R123Map a b v
So, the type of the scrutinee is Map (Int, c) w, but the tycon of MapPair is
:R123Map, which means the straight use of boxySplitTyConApp would give a type
error. Hence, the smart wrapper function boxySplitTyConAppWithFamily calls
boxySplitTyConApp with the family tycon Map instead, which gives us the family
type list {(Int, c), w}. To get the correct split for :R123Map, we need to
unify the family type list {(Int, c), w} with the instance types {(a, b), v}
(provided by tyConFamInst_maybe together with the family tycon). This
unification yields the substitution [a -> Int, b -> c, v -> w], which gives us
the split arguments for the representation tycon :R123Map as {Int, c, w}
In other words, boxySplitTyConAppWithFamily implicitly takes the coercion
Co123Map a b v :: {Map (a, b) v ~ :R123Map a b v}
moving between representation and family type into account. To produce type
correct Core, this coercion needs to be used to case the type of the scrutinee
from the family to the representation type. This is achieved by
unwrapFamInstScrutinee using a CoPat around the result pattern.
Now it might appear seem as if we could have used the previous GADT type
refinement infrastructure of refineAlt and friends instead of the explicit
unification and CoPat generation. However, that would be wrong. Why? The
whole point of GADT refinement is that the refinement is local to the case
alternative. In contrast, the substitution generated by the unification of
the family type list and instance types needs to be propagated to the outside.
Imagine that in the above example, the type of the scrutinee would have been
(Map x w), then we would have unified {x, w} with {(a, b), v}, yielding the
substitution [x -> (a, b), v -> w]. In contrast to GADT matching, the
instantiation of x with (a, b) must be global; ie, it must be valid in *all*
alternatives of the case expression, whereas in the GADT case it might vary
between alternatives.
RIP GADT refinement: refinements have been replaced by the use of explicit
equality constraints that are used in conjunction with implication constraints
to express the local scope of GADT refinements.
-}
-- Running example:
-- MkT :: forall a b c. (a~[b]) => b -> c -> T a
-- with scrutinee of type (T ty)
tcConPat :: PatEnv -> Located Name
-> TcRhoType -- Type of the pattern
-> HsConPatDetails Name -> TcM a
-> TcM (Pat TcId, a)
tcConPat penv con_lname@(L _ con_name) pat_ty arg_pats thing_inside
= do { con_like <- tcLookupConLike con_name
; case con_like of
RealDataCon data_con -> tcDataConPat penv con_lname data_con
pat_ty arg_pats thing_inside
PatSynCon pat_syn -> tcPatSynPat penv con_lname pat_syn
pat_ty arg_pats thing_inside
}
tcDataConPat :: PatEnv -> Located Name -> DataCon
-> TcRhoType -- Type of the pattern
-> HsConPatDetails Name -> TcM a
-> TcM (Pat TcId, a)
tcDataConPat penv (L con_span con_name) data_con pat_ty arg_pats thing_inside
= do { let tycon = dataConTyCon data_con
-- For data families this is the representation tycon
(univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _)
= dataConFullSig data_con
header = L con_span (RealDataCon data_con)
-- Instantiate the constructor type variables [a->ty]
-- This may involve doing a family-instance coercion,
-- and building a wrapper
; (wrap, ctxt_res_tys) <- matchExpectedPatTy (matchExpectedConTy tycon) pat_ty
-- Add the stupid theta
; setSrcSpan con_span $ addDataConStupidTheta data_con ctxt_res_tys
; checkExistentials ex_tvs penv
; (tenv, ex_tvs') <- tcInstSuperSkolTyVarsX
(zipTopTvSubst univ_tvs ctxt_res_tys) ex_tvs
-- Get location from monad, not from ex_tvs
; let -- pat_ty' = mkTyConApp tycon ctxt_res_tys
-- pat_ty' is type of the actual constructor application
-- pat_ty' /= pat_ty iff coi /= IdCo
arg_tys' = substTys tenv arg_tys
; traceTc "tcConPat" (vcat [ ppr con_name, ppr univ_tvs, ppr ex_tvs, ppr eq_spec
, ppr ex_tvs', ppr ctxt_res_tys, ppr arg_tys' ])
; if null ex_tvs && null eq_spec && null theta
then do { -- The common case; no class bindings etc
-- (see Note [Arrows and patterns])
(arg_pats', res) <- tcConArgs (RealDataCon data_con) arg_tys'
arg_pats penv thing_inside
; let res_pat = ConPatOut { pat_con = header,
pat_tvs = [], pat_dicts = [],
pat_binds = emptyTcEvBinds,
pat_args = arg_pats',
pat_arg_tys = ctxt_res_tys,
pat_wrap = idHsWrapper }
; return (mkHsWrapPat wrap res_pat pat_ty, res) }
else do -- The general case, with existential,
-- and local equality constraints
{ let theta' = substTheta tenv (eqSpecPreds eq_spec ++ theta)
-- order is *important* as we generate the list of
-- dictionary binders from theta'
no_equalities = not (any isEqPred theta')
skol_info = case pe_ctxt penv of
LamPat mc -> PatSkol (RealDataCon data_con) mc
LetPat {} -> UnkSkol -- Doesn't matter
; gadts_on <- xoptM Opt_GADTs
; families_on <- xoptM Opt_TypeFamilies
; checkTc (no_equalities || gadts_on || families_on)
(text "A pattern match on a GADT requires the" <+>
text "GADTs or TypeFamilies language extension")
-- Trac #2905 decided that a *pattern-match* of a GADT
-- should require the GADT language flag.
-- Re TypeFamilies see also #7156
; given <- newEvVars theta'
; (ev_binds, (arg_pats', res))
<- checkConstraints skol_info ex_tvs' given $
tcConArgs (RealDataCon data_con) arg_tys' arg_pats penv thing_inside
; let res_pat = ConPatOut { pat_con = header,
pat_tvs = ex_tvs',
pat_dicts = given,
pat_binds = ev_binds,
pat_args = arg_pats',
pat_arg_tys = ctxt_res_tys,
pat_wrap = idHsWrapper }
; return (mkHsWrapPat wrap res_pat pat_ty, res)
} }
tcPatSynPat :: PatEnv -> Located Name -> PatSyn
-> TcRhoType -- Type of the pattern
-> HsConPatDetails Name -> TcM a
-> TcM (Pat TcId, a)
tcPatSynPat penv (L con_span _) pat_syn pat_ty arg_pats thing_inside
= do { let (univ_tvs, ex_tvs, prov_theta, req_theta, arg_tys, ty) = patSynSig pat_syn
; (subst, univ_tvs') <- tcInstTyVars univ_tvs
; checkExistentials ex_tvs penv
; (tenv, ex_tvs') <- tcInstSuperSkolTyVarsX subst ex_tvs
; let ty' = substTy tenv ty
arg_tys' = substTys tenv arg_tys
prov_theta' = substTheta tenv prov_theta
req_theta' = substTheta tenv req_theta
; wrap <- coToHsWrapper <$> unifyType ty' pat_ty
; traceTc "tcPatSynPat" (ppr pat_syn $$
ppr pat_ty $$
ppr ty' $$
ppr ex_tvs' $$
ppr prov_theta' $$
ppr req_theta' $$
ppr arg_tys')
; prov_dicts' <- newEvVars prov_theta'
; let skol_info = case pe_ctxt penv of
LamPat mc -> PatSkol (PatSynCon pat_syn) mc
LetPat {} -> UnkSkol -- Doesn't matter
; req_wrap <- instCall PatOrigin (mkTyVarTys univ_tvs') req_theta'
; traceTc "instCall" (ppr req_wrap)
; traceTc "checkConstraints {" Outputable.empty
; (ev_binds, (arg_pats', res))
<- checkConstraints skol_info ex_tvs' prov_dicts' $
tcConArgs (PatSynCon pat_syn) arg_tys' arg_pats penv thing_inside
; traceTc "checkConstraints }" (ppr ev_binds)
; let res_pat = ConPatOut { pat_con = L con_span $ PatSynCon pat_syn,
pat_tvs = ex_tvs',
pat_dicts = prov_dicts',
pat_binds = ev_binds,
pat_args = arg_pats',
pat_arg_tys = mkTyVarTys univ_tvs',
pat_wrap = req_wrap }
; return (mkHsWrapPat wrap res_pat pat_ty, res) }
----------------------------
matchExpectedPatTy :: (TcRhoType -> TcM (TcCoercion, a))
-> TcRhoType -> TcM (HsWrapper, a)
-- See Note [Matching polytyped patterns]
-- Returns a wrapper : pat_ty ~ inner_ty
matchExpectedPatTy inner_match pat_ty
| null tvs && null theta
= do { (co, res) <- inner_match pat_ty
; return (coToHsWrapper (mkTcSymCo co), res) }
-- The Sym is because the inner_match returns a coercion
-- that is the other way round to matchExpectedPatTy
| otherwise
= do { (subst, tvs') <- tcInstTyVars tvs
; wrap1 <- instCall PatOrigin (mkTyVarTys tvs') (substTheta subst theta)
; (wrap2, arg_tys) <- matchExpectedPatTy inner_match (TcType.substTy subst tau)
; return (wrap2 <.> wrap1, arg_tys) }
where
(tvs, theta, tau) = tcSplitSigmaTy pat_ty
----------------------------
matchExpectedConTy :: TyCon -- The TyCon that this data
-- constructor actually returns
-> TcRhoType -- The type of the pattern
-> TcM (TcCoercion, [TcSigmaType])
-- See Note [Matching constructor patterns]
-- Returns a coercion : T ty1 ... tyn ~ pat_ty
-- This is the same way round as matchExpectedListTy etc
-- but the other way round to matchExpectedPatTy
matchExpectedConTy data_tc pat_ty
| Just (fam_tc, fam_args, co_tc) <- tyConFamInstSig_maybe data_tc
-- Comments refer to Note [Matching constructor patterns]
-- co_tc :: forall a. T [a] ~ T7 a
= do { (subst, tvs') <- tcInstTyVars (tyConTyVars data_tc)
-- tys = [ty1,ty2]
; traceTc "matchExpectedConTy" (vcat [ppr data_tc,
ppr (tyConTyVars data_tc),
ppr fam_tc, ppr fam_args])
; co1 <- unifyType (mkTyConApp fam_tc (substTys subst fam_args)) pat_ty
-- co1 : T (ty1,ty2) ~ pat_ty
; let tys' = mkTyVarTys tvs'
co2 = mkTcUnbranchedAxInstCo Nominal co_tc tys'
-- co2 : T (ty1,ty2) ~ T7 ty1 ty2
; return (mkTcSymCo co2 `mkTcTransCo` co1, tys') }
| otherwise
= matchExpectedTyConApp data_tc pat_ty
-- coi : T tys ~ pat_ty
{-
Note [Matching constructor patterns]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose (coi, tys) = matchExpectedConType data_tc pat_ty
* In the simple case, pat_ty = tc tys
* If pat_ty is a polytype, we want to instantiate it
This is like part of a subsumption check. Eg
f :: (forall a. [a]) -> blah
f [] = blah
* In a type family case, suppose we have
data family T a
data instance T (p,q) = A p | B q
Then we'll have internally generated
data T7 p q = A p | B q
axiom coT7 p q :: T (p,q) ~ T7 p q
So if pat_ty = T (ty1,ty2), we return (coi, [ty1,ty2]) such that
coi = coi2 . coi1 : T7 t ~ pat_ty
coi1 : T (ty1,ty2) ~ pat_ty
coi2 : T7 ty1 ty2 ~ T (ty1,ty2)
For families we do all this matching here, not in the unifier,
because we never want a whisper of the data_tycon to appear in
error messages; it's a purely internal thing
-}
tcConArgs :: ConLike -> [TcSigmaType]
-> Checker (HsConPatDetails Name) (HsConPatDetails Id)
tcConArgs con_like arg_tys (PrefixCon arg_pats) penv thing_inside
= do { checkTc (con_arity == no_of_args) -- Check correct arity
(arityErr "Constructor" con_like con_arity no_of_args)
; let pats_w_tys = zipEqual "tcConArgs" arg_pats arg_tys
; (arg_pats', res) <- tcMultiple tcConArg pats_w_tys
penv thing_inside
; return (PrefixCon arg_pats', res) }
where
con_arity = conLikeArity con_like
no_of_args = length arg_pats
tcConArgs con_like arg_tys (InfixCon p1 p2) penv thing_inside
= do { checkTc (con_arity == 2) -- Check correct arity
(arityErr "Constructor" con_like con_arity 2)
; let [arg_ty1,arg_ty2] = arg_tys -- This can't fail after the arity check
; ([p1',p2'], res) <- tcMultiple tcConArg [(p1,arg_ty1),(p2,arg_ty2)]
penv thing_inside
; return (InfixCon p1' p2', res) }
where
con_arity = conLikeArity con_like
tcConArgs con_like arg_tys (RecCon (HsRecFields rpats dd)) penv thing_inside
= do { (rpats', res) <- tcMultiple tc_field rpats penv thing_inside
; return (RecCon (HsRecFields rpats' dd), res) }
where
tc_field :: Checker (LHsRecField FieldLabel (LPat Name))
(LHsRecField TcId (LPat TcId))
tc_field (L l (HsRecField field_lbl pat pun)) penv thing_inside
= do { (sel_id, pat_ty) <- wrapLocFstM find_field_ty field_lbl
; (pat', res) <- tcConArg (pat, pat_ty) penv thing_inside
; return (L l (HsRecField sel_id pat' pun), res) }
find_field_ty :: FieldLabel -> TcM (Id, TcType)
find_field_ty field_lbl
= case [ty | (f,ty) <- field_tys, f == field_lbl] of
-- No matching field; chances are this field label comes from some
-- other record type (or maybe none). If this happens, just fail,
-- otherwise we get crashes later (Trac #8570), and similar:
-- f (R { foo = (a,b) }) = a+b
-- If foo isn't one of R's fields, we don't want to crash when
-- typechecking the "a+b".
[] -> failWith (badFieldCon con_like field_lbl)
-- The normal case, when the field comes from the right constructor
(pat_ty : extras) ->
ASSERT( null extras )
do { sel_id <- tcLookupField field_lbl
; return (sel_id, pat_ty) }
field_tys :: [(FieldLabel, TcType)]
field_tys = case con_like of
RealDataCon data_con -> zip (dataConFieldLabels data_con) arg_tys
-- Don't use zipEqual! If the constructor isn't really a record, then
-- dataConFieldLabels will be empty (and each field in the pattern
-- will generate an error below).
PatSynCon{} -> []
conLikeArity :: ConLike -> Arity
conLikeArity (RealDataCon data_con) = dataConSourceArity data_con
conLikeArity (PatSynCon pat_syn) = patSynArity pat_syn
tcConArg :: Checker (LPat Name, TcSigmaType) (LPat Id)
tcConArg (arg_pat, arg_ty) penv thing_inside
= tc_lpat arg_pat arg_ty penv thing_inside
addDataConStupidTheta :: DataCon -> [TcType] -> TcM ()
-- Instantiate the "stupid theta" of the data con, and throw
-- the constraints into the constraint set
addDataConStupidTheta data_con inst_tys
| null stupid_theta = return ()
| otherwise = instStupidTheta origin inst_theta
where
origin = OccurrenceOf (dataConName data_con)
-- The origin should always report "occurrence of C"
-- even when C occurs in a pattern
stupid_theta = dataConStupidTheta data_con
tenv = mkTopTvSubst (dataConUnivTyVars data_con `zip` inst_tys)
-- NB: inst_tys can be longer than the univ tyvars
-- because the constructor might have existentials
inst_theta = substTheta tenv stupid_theta
{-
Note [Arrows and patterns]
~~~~~~~~~~~~~~~~~~~~~~~~~~
(Oct 07) Arrow noation has the odd property that it involves
"holes in the scope". For example:
expr :: Arrow a => a () Int
expr = proc (y,z) -> do
x <- term -< y
expr' -< x
Here the 'proc (y,z)' binding scopes over the arrow tails but not the
arrow body (e.g 'term'). As things stand (bogusly) all the
constraints from the proc body are gathered together, so constraints
from 'term' will be seen by the tcPat for (y,z). But we must *not*
bind constraints from 'term' here, because the desugarer will not make
these bindings scope over 'term'.
The Right Thing is not to confuse these constraints together. But for
now the Easy Thing is to ensure that we do not have existential or
GADT constraints in a 'proc', and to short-cut the constraint
simplification for such vanilla patterns so that it binds no
constraints. Hence the 'fast path' in tcConPat; but it's also a good
plan for ordinary vanilla patterns to bypass the constraint
simplification step.
************************************************************************
* *
Note [Pattern coercions]
* *
************************************************************************
In principle, these program would be reasonable:
f :: (forall a. a->a) -> Int
f (x :: Int->Int) = x 3
g :: (forall a. [a]) -> Bool
g [] = True
In both cases, the function type signature restricts what arguments can be passed
in a call (to polymorphic ones). The pattern type signature then instantiates this
type. For example, in the first case, (forall a. a->a) <= Int -> Int, and we
generate the translated term
f = \x' :: (forall a. a->a). let x = x' Int in x 3
From a type-system point of view, this is perfectly fine, but it's *very* seldom useful.
And it requires a significant amount of code to implement, because we need to decorate
the translated pattern with coercion functions (generated from the subsumption check
by tcSub).
So for now I'm just insisting on type *equality* in patterns. No subsumption.
Old notes about desugaring, at a time when pattern coercions were handled:
A SigPat is a type coercion and must be handled one at at time. We can't
combine them unless the type of the pattern inside is identical, and we don't
bother to check for that. For example:
data T = T1 Int | T2 Bool
f :: (forall a. a -> a) -> T -> t
f (g::Int->Int) (T1 i) = T1 (g i)
f (g::Bool->Bool) (T2 b) = T2 (g b)
We desugar this as follows:
f = \ g::(forall a. a->a) t::T ->
let gi = g Int
in case t of { T1 i -> T1 (gi i)
other ->
let gb = g Bool
in case t of { T2 b -> T2 (gb b)
other -> fail }}
Note that we do not treat the first column of patterns as a
column of variables, because the coerced variables (gi, gb)
would be of different types. So we get rather grotty code.
But I don't think this is a common case, and if it was we could
doubtless improve it.
Meanwhile, the strategy is:
* treat each SigPat coercion (always non-identity coercions)
as a separate block
* deal with the stuff inside, and then wrap a binding round
the result to bind the new variable (gi, gb, etc)
************************************************************************
* *
\subsection{Errors and contexts}
* *
************************************************************************
-}
maybeWrapPatCtxt :: Pat Name -> (TcM a -> TcM b) -> TcM a -> TcM b
-- Not all patterns are worth pushing a context
maybeWrapPatCtxt pat tcm thing_inside
| not (worth_wrapping pat) = tcm thing_inside
| otherwise = addErrCtxt msg $ tcm $ popErrCtxt thing_inside
-- Remember to pop before doing thing_inside
where
worth_wrapping (VarPat {}) = False
worth_wrapping (ParPat {}) = False
worth_wrapping (AsPat {}) = False
worth_wrapping _ = True
msg = hang (ptext (sLit "In the pattern:")) 2 (ppr pat)
-----------------------------------------------
checkExistentials :: [TyVar] -> PatEnv -> TcM ()
-- See Note [Arrows and patterns]
checkExistentials [] _ = return ()
checkExistentials _ (PE { pe_ctxt = LetPat {}}) = failWithTc existentialLetPat
checkExistentials _ (PE { pe_ctxt = LamPat ProcExpr }) = failWithTc existentialProcPat
checkExistentials _ (PE { pe_lazy = True }) = failWithTc existentialLazyPat
checkExistentials _ _ = return ()
existentialLazyPat :: SDoc
existentialLazyPat
= hang (ptext (sLit "An existential or GADT data constructor cannot be used"))
2 (ptext (sLit "inside a lazy (~) pattern"))
existentialProcPat :: SDoc
existentialProcPat
= ptext (sLit "Proc patterns cannot use existential or GADT data constructors")
existentialLetPat :: SDoc
existentialLetPat
= vcat [text "My brain just exploded",
text "I can't handle pattern bindings for existential or GADT data constructors.",
text "Instead, use a case-expression, or do-notation, to unpack the constructor."]
badFieldCon :: ConLike -> Name -> SDoc
badFieldCon con field
= hsep [ptext (sLit "Constructor") <+> quotes (ppr con),
ptext (sLit "does not have field"), quotes (ppr field)]
polyPatSig :: TcType -> SDoc
polyPatSig sig_ty
= hang (ptext (sLit "Illegal polymorphic type signature in pattern:"))
2 (ppr sig_ty)
lazyUnliftedPatErr :: OutputableBndr name => Pat name -> TcM ()
lazyUnliftedPatErr pat
= failWithTc $
hang (ptext (sLit "A lazy (~) pattern cannot contain unlifted types:"))
2 (ppr pat)
| fmthoma/ghc | compiler/typecheck/TcPat.hs | bsd-3-clause | 51,241 | 1 | 18 | 15,797 | 7,968 | 4,199 | 3,769 | -1 | -1 |
{-|
Module : Idris.Core.Typecheck
Description : Idris' type checker.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE DeriveFunctor, FlexibleContexts, FlexibleInstances,
MultiParamTypeClasses, PatternGuards #-}
module Idris.Core.Typecheck where
import Idris.Core.Evaluate
import Idris.Core.TT
import Idris.Core.WHNF
import Control.Monad.State
import qualified Data.Vector.Unboxed as V (length)
import Debug.Trace
-- To check conversion, normalise each term wrt the current environment.
-- Since we haven't converted everything to de Bruijn indices yet, we'll have to
-- deal with alpha conversion - we do this by making each inner term de Bruijn
-- indexed with 'finalise'
convertsC :: Context -> Env -> Term -> Term -> StateT UCs TC ()
convertsC ctxt env x y =
do let hs = map fstEnv (filter isHole env)
c1 <- convEq ctxt hs x y
if c1 then return ()
else
do c2 <- convEq ctxt hs (finalise (normalise ctxt env x))
(finalise (normalise ctxt env y))
if c2 then return ()
else lift $ tfail (CantConvert
(finalise (normalise ctxt env x))
(finalise (normalise ctxt env y)) (errEnv env))
converts :: Context -> Env -> Term -> Term -> TC ()
converts ctxt env x y
= let hs = map fstEnv (filter isHole env) in
case convEq' ctxt hs x y of
OK True -> return ()
_ -> case convEq' ctxt hs (finalise (normalise ctxt env x))
(finalise (normalise ctxt env y)) of
OK True -> return ()
_ -> tfail (CantConvert
(finalise (normalise ctxt env x))
(finalise (normalise ctxt env y)) (errEnv env))
isHole (n, _, Hole _) = True
isHole _ = False
errEnv = map (\(x, _, b) -> (x, binderTy b))
isType :: Context -> Env -> Term -> TC ()
isType ctxt env tm = isType' (normalise ctxt env tm)
where isType' tm | isUniverse tm = return ()
| otherwise = fail (showEnv env tm ++ " is not a Type")
convType :: String -> Context -> Env -> Term -> StateT UCs TC ()
convType tcns ctxt env tm =
do (v, cs) <- get
put (v + 1, cs)
case normalise ctxt env tm of
UType _ -> return ()
_ -> convertsC ctxt env tm (TType (UVar tcns v))
recheck :: String -> Context -> Env -> Raw -> Term -> TC (Term, Type, UCs)
recheck = recheck_borrowing False []
recheck_borrowing :: Bool -> [Name] -> String -> Context -> Env -> Raw -> Term ->
TC (Term, Type, UCs)
recheck_borrowing uniq_check bs tcns ctxt env tm orig
= let v = next_tvar ctxt in
case runStateT (check' False tcns ctxt env tm) (v, []) of -- holes banned
Error (IncompleteTerm _) -> Error $ IncompleteTerm orig
Error e -> Error e
OK ((tm, ty), constraints) ->
do when uniq_check $ checkUnique bs ctxt env tm
return (tm, ty, constraints)
check :: Context -> Env -> Raw -> TC (Term, Type)
check ctxt env tm
-- Holes allowed, so constraint namespace doesn't matter
= evalStateT (check' True [] ctxt env tm) (0, [])
check' :: Bool -> String -> Context -> Env -> Raw -> StateT UCs TC (Term, Type)
check' holes tcns ctxt env top
= do (tm, ty, _) <- chk Rig1 (TType (UVar tcns (-5))) Nothing env top
return (tm, ty)
where
smaller (UType NullType) _ = UType NullType
smaller _ (UType NullType) = UType NullType
smaller (UType u) _ = UType u
smaller _ (UType u) = UType u
smaller x _ = x
astate | holes = MaybeHoles
| otherwise = Complete
chk :: RigCount -> -- multiplicity (need enough in context to produce this many of the term)
Type -> -- uniqueness level
Maybe UExp -> -- universe for kind
Env -> Raw -> StateT UCs TC (Term, Type, [Name])
chk rigc u lvl env (Var n)
| Just (i, erig, ty) <- lookupTyEnv n env
= case rigSafe holes erig rigc n of
Nothing -> return (P Bound n ty, ty, used rigc n)
Just msg -> lift $ tfail $ Msg msg
-- If we're elaborating, we don't want the private names; if we're
-- checking an already elaborated term, we do
| [P nt n' ty] <- lookupP_all (not holes) True n ctxt
= return (P nt n' ty, ty, [])
-- -- If the names are ambiguous, require it to be fully qualified
-- | [P nt n' ty] <- lookupP_all (not holes) True n ctxt
-- = return (P nt n' ty, ty, [])
| otherwise = do lift $ tfail $ NoSuchVariable n
where rigSafe True _ _ n = Nothing
rigSafe _ Rig1 RigW n = Just ("Trying to use linear name " ++ show n ++ " in non-linear context")
rigSafe _ Rig0 RigW n = Just ("Trying to use irrelevant name " ++ show n ++ " in relevant context")
rigSafe _ _ _ n = Nothing
used Rig0 n = []
used _ n = [n]
chk rigc u lvl env ap@(RApp f RType) | not holes
-- special case to reduce constraintss
= do (fv, fty, fns) <- chk rigc u Nothing env f
let fty' = case uniqueBinders (map fstEnv env) (finalise fty) of
ty@(Bind x (Pi _ i s k) t) -> ty
_ -> uniqueBinders (map fstEnv env)
$ case normalise ctxt env fty of
ty@(Bind x (Pi _ i s k) t) -> ty
_ -> normalise ctxt env fty
case fty' of
Bind x (Pi rig i (TType v') k) t ->
do (v, cs) <- get
put (v+1, ULT (UVar tcns v) v' : cs)
let apty = simplify initContext env
(Bind x (Let (TType v') (TType (UVar tcns v))) t)
return (App Complete fv (TType (UVar tcns v)), apty, fns)
Bind x (Pi rig i s k) t ->
do (av, aty, _) <- chk rigc u Nothing env RType
convertsC ctxt env aty s
let apty = simplify initContext env
(Bind x (Let aty av) t)
return (App astate fv av, apty, fns)
t -> lift $ tfail $ NonFunctionType fv fty
chk rigc u lvl env ap@(RApp f a)
= do (fv, fty, fns) <- chk rigc u Nothing env f
let (rigf, fty') =
case uniqueBinders (map fstEnv env) (finalise fty) of
ty@(Bind x (Pi rig i s k) t) -> (rig, ty)
_ -> case normalise ctxt env fty of
ty@(Bind x (Pi rig i s k) t) ->
(rig, uniqueBinders (map fstEnv env) ty)
_ -> (RigW, uniqueBinders (map fstEnv env)
(normalise ctxt env fty)) -- This is an error, caught below...
(av, aty, ans) <- chk (rigMult rigc rigf) u Nothing env a
case fty' of
Bind x (Pi rig i s k) t ->
do convertsC ctxt env aty s
let apty = simplify initContext env
(Bind x (Let aty av) t)
return (App astate fv av, apty, fns ++ ans)
t -> lift $ tfail $ NonFunctionType fv fty
chk rigc u lvl env RType
| holes = return (TType (UVal 0), TType (UVal 0), [])
| otherwise = do (v, cs) <- get
let c = ULT (UVar tcns v) (UVar tcns (v+1))
put (v+2, (c:cs))
return (TType (UVar tcns v), TType (UVar tcns (v+1)), [])
chk rigc u lvl env (RUType un)
| holes = return (UType un, TType (UVal 0), [])
| otherwise = do -- TODO! Issue #1715 on the issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1715
-- (v, cs) <- get
-- let c = ULT (UVar v) (UVar (v+1))
-- put (v+2, (c:cs))
-- return (TType (UVar v), TType (UVar (v+1)))
return (UType un, TType (UVal 0), [])
chk rigc u lvl env (RConstant Forgot) = return (Erased, Erased, [])
chk rigc u lvl env (RConstant c) = return (Constant c, constType c, [])
where constType (I _) = Constant (AType (ATInt ITNative))
constType (BI _) = Constant (AType (ATInt ITBig))
constType (Fl _) = Constant (AType ATFloat)
constType (Ch _) = Constant (AType (ATInt ITChar))
constType (Str _) = Constant StrType
constType (B8 _) = Constant (AType (ATInt (ITFixed IT8)))
constType (B16 _) = Constant (AType (ATInt (ITFixed IT16)))
constType (B32 _) = Constant (AType (ATInt (ITFixed IT32)))
constType (B64 _) = Constant (AType (ATInt (ITFixed IT64)))
constType TheWorld = Constant WorldType
constType Forgot = Erased
constType _ = TType (UVal 0)
chk rigc u lvl env (RBind n (Pi rig i s k) t)
= do (sv, st, sns) <- chk Rig0 u Nothing (envZero env) s
when (rig == RigW) $
lift $ linearCheckArg ctxt (normalise ctxt env sv)
(v, cs) <- get
(kv, kt, _) <- chk Rig0 u Nothing (envZero env) k -- no need to validate these constraints, they are independent
put (v+1, cs)
let maxu = case lvl of
Nothing -> UVar tcns v
Just v' -> v'
(tv, tt, tns) <- chk Rig0 st (Just maxu) ((n, Rig0, Pi Rig0 i sv kv) : envZero env) t
-- convertsC ctxt env st (TType maxu)
-- convertsC ctxt env tt (TType maxu)
-- when holes $ put (v, cs)
-- return (Bind n (Pi i (uniqueBinders (map fst env) sv) (TType maxu))
-- (pToV n tv), TType maxu)
case (normalise ctxt env st, normalise ctxt env tt) of
(TType su, TType tu) -> do
when (not holes) $ do (v, cs) <- get
put (v, ULE su maxu :
ULE tu maxu : cs)
let k' = TType (UVar tcns v) `smaller` st `smaller` kv `smaller` u
return (Bind n (Pi rig i (uniqueBinders (map fstEnv env) sv) k')
(pToV n tv), k', sns ++ tns)
(un, un') ->
let k' = st `smaller` kv `smaller` un `smaller` un' `smaller` u in
return (Bind n (Pi rig i (uniqueBinders (map fstEnv env) sv) k')
(pToV n tv), k', sns ++ tns)
where mkUniquePi kv (Bind n (Pi rig i s k) sc)
= let k' = smaller kv k in
Bind n (Pi rig i s k') (mkUniquePi k' sc)
mkUniquePi kv (Bind n (Lam rig t) sc)
= Bind n (Lam rig (mkUniquePi kv t)) (mkUniquePi kv sc)
mkUniquePi kv (Bind n (Let t v) sc)
= Bind n (Let (mkUniquePi kv t) v) (mkUniquePi kv sc)
mkUniquePi kv t = t
-- Kind of the whole thing is the kind of the most unique thing
-- in the environment (because uniqueness taints everything...)
mostUnique [] k = k
mostUnique (Pi _ _ _ pk : es) k = mostUnique es (smaller pk k)
mostUnique (_ : es) k = mostUnique es k
chk rigc u lvl env (RBind n b sc)
= do (b', bt', bns) <- checkBinder b
(scv, sct, scns) <- chk rigc (smaller bt' u) Nothing ((n, getCount b, b'):env) sc
when (getCount b == RigW) $
lift $ linearCheckArg ctxt (normalise ctxt env (binderTy b'))
checkUsageOK (getCount b) scns
discharge n b' bt' (pToV n scv) (pToV n sct) (bns ++ scns)
where getCount (Pi rig _ _ _) = rigMult rigc rig
getCount (PVar rig _) = rigMult rigc rig
getCount (Lam rig _) = rigMult rigc rig
getCount _ = rigMult rigc RigW
checkUsageOK Rig0 _ = return ()
checkUsageOK RigW _ = return ()
checkUsageOK Rig1 ns
= let used = length (filter (==n) ns) in
if used == 1 then return ()
else lift $ tfail $ Msg $ "There are " ++ (show used) ++
" uses of linear name " ++ show n
checkBinder (Lam rig t)
= do (tv, tt, _) <- chk Rig0 u Nothing (envZero env) t
let tv' = normalise ctxt env tv
convType tcns ctxt env tt
return (Lam rig tv, tt, [])
checkBinder (Let t v)
= do (tv, tt, _) <- chk Rig0 u Nothing (envZero env) t
-- May have multiple uses, check at RigW
-- (or rather, like an application of a lambda, multiply)
-- (Consider: adding a single use let?)
(vv, vt, vns) <- chk (rigMult rigc RigW) u Nothing env v
let tv' = normalise ctxt env tv
convertsC ctxt env vt tv
convType tcns ctxt env tt
return (Let tv vv, tt, vns)
checkBinder (NLet t v)
= do (tv, tt, _) <- chk Rig0 u Nothing (envZero env) t
(vv, vt, vns) <- chk rigc u Nothing env v
let tv' = normalise ctxt env tv
convertsC ctxt env vt tv
convType tcns ctxt env tt
return (NLet tv vv, tt, vns)
checkBinder (Hole t)
| not holes = lift $ tfail (IncompleteTerm undefined)
| otherwise
= do (tv, tt, _) <- chk Rig0 u Nothing (envZero env) t
let tv' = normalise ctxt env tv
convType tcns ctxt env tt
return (Hole tv, tt, [])
checkBinder (GHole i ns t)
= do (tv, tt, _) <- chk Rig0 u Nothing (envZero env) t
let tv' = normalise ctxt env tv
convType tcns ctxt env tt
return (GHole i ns tv, tt, [])
checkBinder (Guess t v)
| not holes = lift $ tfail (IncompleteTerm undefined)
| otherwise
= do (tv, tt, _) <- chk Rig0 u Nothing (envZero env) t
(vv, vt, vns) <- chk rigc u Nothing env v
let tv' = normalise ctxt env tv
convertsC ctxt env vt tv
convType tcns ctxt env tt
return (Guess tv vv, tt, vns)
checkBinder (PVar rig t)
= do (tv, tt, _) <- chk Rig0 u Nothing (envZero env) t
let tv' = normalise ctxt env tv
convType tcns ctxt env tt
-- Normalised version, for erasure purposes (it's easier
-- to tell if it's a collapsible variable)
return (PVar rig tv, tt, [])
checkBinder (PVTy t)
= do (tv, tt, _) <- chk Rig0 u Nothing (envZero env) t
let tv' = normalise ctxt env tv
convType tcns ctxt env tt
return (PVTy tv, tt, [])
discharge n (Lam r t) bt scv sct ns
= return (Bind n (Lam r t) scv, Bind n (Pi r Nothing t bt) sct, ns)
discharge n (Pi r i t k) bt scv sct ns
= return (Bind n (Pi r i t k) scv, sct, ns)
discharge n (Let t v) bt scv sct ns
= return (Bind n (Let t v) scv, Bind n (Let t v) sct, ns)
discharge n (NLet t v) bt scv sct ns
= return (Bind n (NLet t v) scv, Bind n (Let t v) sct, ns)
discharge n (Hole t) bt scv sct ns
= return (Bind n (Hole t) scv, sct, ns)
discharge n (GHole i ns t) bt scv sct uns
= return (Bind n (GHole i ns t) scv, sct, uns)
discharge n (Guess t v) bt scv sct ns
= return (Bind n (Guess t v) scv, sct, ns)
discharge n (PVar r t) bt scv sct ns
= return (Bind n (PVar r t) scv, Bind n (PVTy t) sct, ns)
discharge n (PVTy t) bt scv sct ns
= return (Bind n (PVTy t) scv, sct, ns)
-- Number of times a name can be used
data UniqueUse = Never -- no more times
| Once -- at most once more
| LendOnly -- only under 'lend'
| Many -- unlimited
deriving Eq
-- If any binders are of kind 'UniqueType' or 'AllTypes' and the name appears
-- in the scope more than once, this is an error.
checkUnique :: [Name] -> Context -> Env -> Term -> TC ()
checkUnique borrowed ctxt env tm
= evalStateT (chkBinders env (explicitNames tm)) []
where
isVar (P _ _ _) = True
isVar (V _) = True
isVar _ = False
chkBinders :: Env -> Term -> StateT [(Name, (UniqueUse, Universe))] TC ()
chkBinders env (V i) | length env > i = chkName (fstEnv (env!!i))
chkBinders env (P _ n _) = chkName n
-- 'lending' a unique or nulltype variable doesn't count as a use,
-- but we still can't lend something that's already been used.
chkBinders env (App _ (App _ (P _ (NS (UN lend) [owner]) _) t) a)
| isVar a && owner == txt "Ownership" &&
(lend == txt "lend" || lend == txt "Read")
= do chkBinders env t -- Check the type normally
st <- get
-- Remove the 'LendOnly' names from the unusable set
put (filter (\(n, (ok, _)) -> ok /= LendOnly) st)
chkBinders env a
put st -- Reset the old state after checking the argument
chkBinders env (App _ f a) = do chkBinders env f; chkBinders env a
chkBinders env (Bind n b t)
= do chkBinderName env n b
st <- get
case b of
Let t v -> chkBinders env v
_ -> return ()
chkBinders ((n, Rig0, b) : env) t
chkBinders env t = return ()
chkBinderName :: Env -> Name -> Binder Term ->
StateT [(Name, (UniqueUse, Universe))] TC ()
chkBinderName env n b
= do let rawty = forgetEnv (map fstEnv env) (binderTy b)
(_, kind) <- lift $ check ctxt env rawty
case kind of
UType UniqueType -> do ns <- get
if n `elem` borrowed
then put ((n, (LendOnly, NullType)) : ns)
else put ((n, (Once, UniqueType)) : ns)
UType NullType -> do ns <- get
put ((n, (Many, NullType)) : ns)
UType AllTypes -> do ns <- get
put ((n, (Once, AllTypes)) : ns)
_ -> return ()
chkName n
= do ns <- get
case lookup n ns of
Nothing -> return ()
Just (Many, k) -> return ()
Just (Never, k) -> lift $ tfail (UniqueError k n)
Just (LendOnly, k) -> lift $ tfail (UniqueError k n)
Just (Once, k) -> put ((n, (Never, k)) :
filter (\x -> fst x /= n) ns)
| Heather/Idris-dev | src/Idris/Core/Typecheck.hs | bsd-3-clause | 19,244 | 0 | 26 | 7,698 | 7,038 | 3,512 | 3,526 | 328 | 64 |
{- |
Module : $Header$
Description : Interface to the OWL Ontology provers.
Copyright : (c) Heng Jiang, Uni Bremen 2004-2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
prover states for pellet and fact++
-}
module OWL2.ProverState where
import Logic.Prover
import OWL2.MS
import OWL2.Morphism
import OWL2.Sign
import OWL2.ManchesterPrint
import OWL2.XMLConversion
import Common.AS_Annotation
data ProverState = ProverState
{ ontologySign :: Sign,
initialState :: [Named Axiom]
} deriving Show
owlProverState :: Sign -> [Named Axiom]
-> [FreeDefMorphism Axiom OWLMorphism] -- ^ freeness constraints
-> ProverState
owlProverState sig oSens _ = ProverState
{ ontologySign = sig,
initialState = filter isAxiom oSens }
{- |
Inserts a named OWL2 axiom into the prover state.
-}
insertOWLAxiom :: ProverState -- ^ prover state containing initial logical part
-> Named Axiom -- ^ goal to add
-> ProverState
insertOWLAxiom pps s = pps { initialState = initialState pps ++ [s] }
showOWLProblemS :: ProverState -> String -- ^ formatted output
showOWLProblemS pst =
let namedSens = initialState pst
sign = ontologySign pst
in mkODoc sign (filter isAxiom namedSens)
{- |
Pretty printing OWL goal for pellet or fact++
-}
showOWLProblem :: ProverState -- ^ prover state containing initial logical part
-> Named Axiom -- ^ goal to print
-> IO String -- ^ formatted output of the goal
showOWLProblem pst nGoal =
let sign = ontologySign pst
in return $ showOWLProblemS pst
++ "\n\nEntailments:\n\n" ++ show (printOWLBasicTheory (sign, [nGoal]))
| keithodulaigh/Hets | OWL2/ProverState.hs | gpl-2.0 | 1,765 | 0 | 12 | 395 | 315 | 171 | 144 | 34 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module PersistTestPetCollarType where
import Data.Aeson
import Data.Text (Text)
import GHC.Generics
import Database.Persist.TH
data PetCollar = PetCollar {tag :: Text, bell :: Bool}
deriving (Generic, Eq, Show)
instance ToJSON PetCollar
instance FromJSON PetCollar
derivePersistFieldJSON "PetCollar"
| yesodweb/persistent | persistent-test/src/PersistTestPetCollarType.hs | mit | 339 | 0 | 8 | 46 | 87 | 49 | 38 | -1 | -1 |
module Csv where
-- container
import Data.Tree (Tree(Node,rootLabel))
-- local imports
import qualified Language.Astview.Language as L
import Language.Astview.DataTree (data2tree)
-- Parsec (CSV Parser)
import Data.Generics hiding (Infix)
import Text.ParserCombinators.Parsec
import Text.Parsec.Combinator
import Text.ParserCombinators.Parsec.Expr
--import Text.Parsec.String
--import Text.Parsec.Char
--import Text.Parsec.Prim
csv =
L.Language
"CSV"
[]
[".csv"]
(const $ Left L.Err)
(data2tree::[[String]] -> Tree String)
Nothing
Nothing
-- (parse csvFile "(unknown)")
-- Parsec (Simple CSV)
csvFile = endBy line eol
line = sepBy cell (char ',')
cell = many (noneOf ",\n")
eol :: Parser Char
eol = char '\n'
| RefactoringTools/HaRe | hareview/data/Langs/Csv.hs | bsd-3-clause | 749 | 0 | 9 | 127 | 192 | 115 | 77 | 22 | 1 |
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE OverloadedStrings #-}
module IRTS.JavaScript.AST where
import Data.Word
import Data.Char (isDigit)
import qualified Data.Text as T
data JSType = JSIntTy
| JSStringTy
| JSIntegerTy
| JSFloatTy
| JSCharTy
| JSPtrTy
| JSForgotTy
deriving Eq
data JSInteger = JSBigZero
| JSBigOne
| JSBigInt Integer
| JSBigIntExpr JS
deriving Eq
data JSNum = JSInt Int
| JSFloat Double
| JSInteger JSInteger
deriving Eq
data JSWord = JSWord8 Word8
| JSWord16 Word16
| JSWord32 Word32
| JSWord64 Word64
deriving Eq
data JSAnnotation = JSConstructor deriving Eq
instance Show JSAnnotation where
show JSConstructor = "constructor"
data JS = JSRaw String
| JSIdent String
| JSFunction [String] JS
| JSType JSType
| JSSeq [JS]
| JSReturn JS
| JSApp JS [JS]
| JSNew String [JS]
| JSError String
| JSBinOp String JS JS
| JSPreOp String JS
| JSPostOp String JS
| JSProj JS String
| JSNull
| JSUndefined
| JSThis
| JSTrue
| JSFalse
| JSArray [JS]
| JSString String
| JSNum JSNum
| JSWord JSWord
| JSAssign JS JS
| JSAlloc String (Maybe JS)
| JSIndex JS JS
| JSSwitch JS [(JS, JS)] (Maybe JS)
| JSCond [(JS, JS)]
| JSTernary JS JS JS
| JSParens JS
| JSWhile JS JS
| JSFFI String [JS]
| JSAnnotation JSAnnotation JS
| JSNoop
deriving Eq
data FFI = FFICode Char | FFIArg Int | FFIError String
ffi :: String -> [String] -> T.Text
ffi code args = let parsed = ffiParse code in
case ffiError parsed of
Just err -> error err
Nothing -> renderFFI parsed args
where
ffiParse :: String -> [FFI]
ffiParse "" = []
ffiParse ['%'] = [FFIError $ "FFI - Invalid positional argument"]
ffiParse ('%':'%':ss) = FFICode '%' : ffiParse ss
ffiParse ('%':s:ss)
| isDigit s =
FFIArg (
read $ s : takeWhile isDigit ss
) : ffiParse (dropWhile isDigit ss)
| otherwise =
[FFIError "FFI - Invalid positional argument"]
ffiParse (s:ss) = FFICode s : ffiParse ss
ffiError :: [FFI] -> Maybe String
ffiError [] = Nothing
ffiError ((FFIError s):xs) = Just s
ffiError (x:xs) = ffiError xs
renderFFI :: [FFI] -> [String] -> T.Text
renderFFI [] _ = ""
renderFFI (FFICode c : fs) args = c `T.cons` renderFFI fs args
renderFFI (FFIArg i : fs) args
| i < length args && i >= 0 =
T.pack (args !! i)
`T.append` renderFFI fs args
| otherwise = error "FFI - Argument index out of bounds"
compileJS :: JS -> T.Text
compileJS = compileJS' 0
compileJS' :: Int -> JS -> T.Text
compileJS' indent JSNoop = ""
compileJS' indent (JSAnnotation annotation js) =
"/** @"
`T.append` T.pack (show annotation)
`T.append` " */\n"
`T.append` compileJS' indent js
compileJS' indent (JSFFI raw args) =
ffi raw (map (T.unpack . compileJS' indent) args)
compileJS' indent (JSRaw code) =
T.pack code
compileJS' indent (JSIdent ident) =
T.pack ident
compileJS' indent (JSFunction args body) =
T.replicate indent " " `T.append` "function("
`T.append` T.intercalate "," (map T.pack args)
`T.append` "){\n"
`T.append` compileJS' (indent + 2) body
`T.append` "\n}\n"
compileJS' indent (JSType ty)
| JSIntTy <- ty = "i$Int"
| JSStringTy <- ty = "i$String"
| JSIntegerTy <- ty = "i$Integer"
| JSFloatTy <- ty = "i$Float"
| JSCharTy <- ty = "i$Char"
| JSPtrTy <- ty = "i$Ptr"
| JSForgotTy <- ty = "i$Forgot"
compileJS' indent (JSSeq seq) =
T.intercalate ";\n" (
map (
(T.replicate indent " " `T.append`) . (compileJS' indent)
) $ filter (/= JSNoop) seq
) `T.append` ";"
compileJS' indent (JSReturn val) =
"return " `T.append` compileJS' indent val
compileJS' indent (JSApp lhs rhs)
| JSFunction {} <- lhs =
T.concat ["(", compileJS' indent lhs, ")(", args, ")"]
| otherwise =
T.concat [compileJS' indent lhs, "(", args, ")"]
where args :: T.Text
args = T.intercalate "," $ map (compileJS' 0) rhs
compileJS' indent (JSNew name args) =
"new "
`T.append` T.pack name
`T.append` "("
`T.append` T.intercalate "," (map (compileJS' 0) args)
`T.append` ")"
compileJS' indent (JSError exc) =
"(function(){throw new Error(\"" `T.append` T.pack exc `T.append` "\")})()"
compileJS' indent (JSBinOp op lhs rhs) =
compileJS' indent lhs
`T.append` " "
`T.append` T.pack op
`T.append` " "
`T.append` compileJS' indent rhs
compileJS' indent (JSPreOp op val) =
T.pack op `T.append` compileJS' indent val
compileJS' indent (JSProj obj field)
| JSFunction {} <- obj =
T.concat ["(", compileJS' indent obj, ").", T.pack field]
| JSAssign {} <- obj =
T.concat ["(", compileJS' indent obj, ").", T.pack field]
| otherwise =
compileJS' indent obj `T.append` ('.' `T.cons` T.pack field)
compileJS' indent JSNull =
"null"
compileJS' indent JSUndefined =
"undefined"
compileJS' indent JSThis =
"this"
compileJS' indent JSTrue =
"true"
compileJS' indent JSFalse =
"false"
compileJS' indent (JSArray elems) =
"[" `T.append` T.intercalate "," (map (compileJS' 0) elems) `T.append` "]"
compileJS' indent (JSString str) =
"\"" `T.append` T.pack str `T.append` "\""
compileJS' indent (JSNum num)
| JSInt i <- num = T.pack (show i)
| JSFloat f <- num = T.pack (show f)
| JSInteger JSBigZero <- num = T.pack "i$ZERO"
| JSInteger JSBigOne <- num = T.pack "i$ONE"
| JSInteger (JSBigInt i) <- num = T.pack (show i)
| JSInteger (JSBigIntExpr e) <- num =
"i$bigInt(" `T.append` compileJS' indent e `T.append` ")"
compileJS' indent (JSAssign lhs rhs) =
compileJS' indent lhs `T.append` " = " `T.append` compileJS' indent rhs
compileJS' 0 (JSAlloc name (Just val@(JSNew _ _))) =
"var "
`T.append` T.pack name
`T.append` " = "
`T.append` compileJS' 0 val
`T.append` ";\n"
compileJS' indent (JSAlloc name val) =
"var "
`T.append` T.pack name
`T.append` maybe "" ((" = " `T.append`) . compileJS' indent) val
compileJS' indent (JSIndex lhs rhs) =
compileJS' indent lhs
`T.append` "["
`T.append` compileJS' indent rhs
`T.append` "]"
compileJS' indent (JSCond branches) =
T.intercalate " else " $ map createIfBlock branches
where
createIfBlock (JSNoop, e@(JSSeq _)) =
"{\n"
`T.append` compileJS' (indent + 2) e
`T.append` "\n" `T.append` T.replicate indent " " `T.append` "}"
createIfBlock (JSNoop, e) =
"{\n"
`T.append` compileJS' (indent + 2) e
`T.append` ";\n" `T.append` T.replicate indent " " `T.append` "}"
createIfBlock (cond, e@(JSSeq _)) =
"if (" `T.append` compileJS' indent cond `T.append`") {\n"
`T.append` compileJS' (indent + 2) e
`T.append` "\n" `T.append` T.replicate indent " " `T.append` "}"
createIfBlock (cond, e) =
"if (" `T.append` compileJS' indent cond `T.append`") {\n"
`T.append` T.replicate (indent + 2) " "
`T.append` compileJS' (indent + 2) e
`T.append` ";\n"
`T.append` T.replicate indent " "
`T.append` "}"
compileJS' indent (JSSwitch val [(_,JSSeq seq)] Nothing) =
let (h,t) = splitAt 1 seq in
(T.concat (map (compileJS' indent) h) `T.append` ";\n")
`T.append` (
T.intercalate ";\n" $ map (
(T.replicate indent " " `T.append`) . compileJS' indent
) t
)
compileJS' indent (JSSwitch val branches def) =
"switch(" `T.append` compileJS' indent val `T.append` "){\n"
`T.append` T.concat (map mkBranch branches)
`T.append` mkDefault def
`T.append` T.replicate indent " " `T.append` "}"
where
mkBranch :: (JS, JS) -> T.Text
mkBranch (tag, code) =
T.replicate (indent + 2) " "
`T.append` "case "
`T.append` compileJS' indent tag
`T.append` ":\n"
`T.append` compileJS' (indent + 4) code
`T.append` "\n"
`T.append` (T.replicate (indent + 4) " " `T.append` "break;\n")
mkDefault :: Maybe JS -> T.Text
mkDefault Nothing = ""
mkDefault (Just def) =
T.replicate (indent + 2) " " `T.append` "default:\n"
`T.append` compileJS' (indent + 4)def
`T.append` "\n"
compileJS' indent (JSTernary cond true false) =
let c = compileJS' indent cond
t = compileJS' indent true
f = compileJS' indent false in
"("
`T.append` c
`T.append` ")?("
`T.append` t
`T.append` "):("
`T.append` f
`T.append` ")"
compileJS' indent (JSParens js) =
"(" `T.append` compileJS' indent js `T.append` ")"
compileJS' indent (JSWhile cond body) =
"while (" `T.append` compileJS' indent cond `T.append` ") {\n"
`T.append` compileJS' (indent + 2) body
`T.append` "\n" `T.append` T.replicate indent " " `T.append` "}"
compileJS' indent (JSWord word)
| JSWord8 b <- word =
"new Uint8Array([" `T.append` T.pack (show b) `T.append` "])"
| JSWord16 b <- word =
"new Uint16Array([" `T.append` T.pack (show b) `T.append` "])"
| JSWord32 b <- word =
"new Uint32Array([" `T.append` T.pack (show b) `T.append` "])"
| JSWord64 b <- word =
"i$bigInt(\"" `T.append` T.pack (show b) `T.append` "\")"
jsInstanceOf :: JS -> String -> JS
jsInstanceOf obj cls = JSBinOp "instanceof" obj (JSIdent cls)
jsOr :: JS -> JS -> JS
jsOr lhs rhs = JSBinOp "||" lhs rhs
jsAnd :: JS -> JS -> JS
jsAnd lhs rhs = JSBinOp "&&" lhs rhs
jsMeth :: JS -> String -> [JS] -> JS
jsMeth obj meth args = JSApp (JSProj obj meth) args
jsCall :: String -> [JS] -> JS
jsCall fun args = JSApp (JSIdent fun) args
jsTypeOf :: JS -> JS
jsTypeOf js = JSPreOp "typeof " js
jsEq :: JS -> JS -> JS
jsEq lhs@(JSNum (JSInteger _)) rhs = JSApp (JSProj lhs "equals") [rhs]
jsEq lhs rhs@(JSNum (JSInteger _)) = JSApp (JSProj lhs "equals") [rhs]
jsEq lhs rhs = JSBinOp "==" lhs rhs
jsNotEq :: JS -> JS -> JS
jsNotEq lhs rhs = JSBinOp "!=" lhs rhs
jsIsNumber :: JS -> JS
jsIsNumber js = (jsTypeOf js) `jsEq` (JSString "number")
jsIsNull :: JS -> JS
jsIsNull js = JSBinOp "==" js JSNull
jsBigInt :: JS -> JS
jsBigInt (JSString "0") = JSNum (JSInteger JSBigZero)
jsBigInt (JSString "1") = JSNum (JSInteger JSBigOne)
jsBigInt js = JSNum $ JSInteger $ JSBigIntExpr js
jsUnPackBits :: JS -> JS
jsUnPackBits js = JSIndex js $ JSNum (JSInt 0)
jsPackUBits8 :: JS -> JS
jsPackUBits8 js = JSNew "Uint8Array" [JSArray [js]]
jsPackUBits16 :: JS -> JS
jsPackUBits16 js = JSNew "Uint16Array" [JSArray [js]]
jsPackUBits32 :: JS -> JS
jsPackUBits32 js = JSNew "Uint32Array" [JSArray [js]]
jsPackSBits8 :: JS -> JS
jsPackSBits8 js = JSNew "Int8Array" [JSArray [js]]
jsPackSBits16 :: JS -> JS
jsPackSBits16 js = JSNew "Int16Array" [JSArray [js]]
jsPackSBits32 :: JS -> JS
jsPackSBits32 js = JSNew "Int32Array" [JSArray [js]]
| BartAdv/Idris-dev | src/IRTS/JavaScript/AST.hs | bsd-3-clause | 11,304 | 0 | 15 | 3,095 | 4,348 | 2,292 | 2,056 | 315 | 10 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.Machine
-- Copyright : (C) 2012 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : provisional
-- Portability : non-portable
--
----------------------------------------------------------------------------
module Data.Machine
( module Data.Machine.Is
, module Data.Machine.Moore
, module Data.Machine.Mealy
, module Data.Machine.Plan
, module Data.Machine.Process
, module Data.Machine.Source
, module Data.Machine.Tee
, module Data.Machine.Type
, module Data.Machine.Wye
) where
import Data.Machine.Is
import Data.Machine.Mealy
import Data.Machine.Moore
import Data.Machine.Plan
import Data.Machine.Process
import Data.Machine.Source
import Data.Machine.Tee
import Data.Machine.Type
import Data.Machine.Wye
| YoEight/machines | src/Data/Machine.hs | bsd-3-clause | 922 | 0 | 5 | 129 | 136 | 97 | 39 | 19 | 0 |
module C1 where
import D1 hiding (main)
sumSquares1 ((x : xs)) = (x ^ pow) + (sumSquares1 xs)
sumSquares1 [] = 0
| kmate/HaRe | old/testing/unfoldDef/C1_AstOut.hs | bsd-3-clause | 114 | 0 | 8 | 23 | 58 | 33 | 25 | 4 | 1 |
module WhereIn4 where
--In this Example: duplicate the local definition 'y' with new name 'x' will fail.
x = 5
foo,bar::Int->Int
foo x= x + 3
--this is comment
bar z = x + y + z
where
y::Int
y = 3
ram = (let fred = (let x = 5 in x) in fred + x) + 1
main = (foo 1, bar x, ram) | kmate/HaRe | old/testing/duplication/WhereIn4_TokOut.hs | bsd-3-clause | 308 | 0 | 15 | 98 | 123 | 68 | 55 | 9 | 1 |
module ComplexParamIn1 where
--The application of a function is replaced by the right-hand side of the definition,
--with actual parameters replacing formals.
--In this example, unfold the first 'sq' in 'sumSquares'
--This example aims to test unfolding a definition with guards.
sumSquares x y = (case (x, y) of
(m, n) -> m ^ n)
sq (m,n)=m^n
| kmate/HaRe | old/testing/unfoldDef/ComplexParamIn1_TokOut.hs | bsd-3-clause | 370 | 0 | 9 | 84 | 65 | 39 | 26 | 4 | 1 |
{-# LANGUAGE MagicHash #-}
module Main (main) where
import GHC.Exts (Double(D#), Float(F#), word2Double#, word2Float#)
main :: IO ()
main = do
print (D# (word2Double# 0##))
-- 9007199254740992 is 2^53, which is the largest integer which
-- can be stored in a 64-bit IEEE floating-point value without
-- loss of precision.
-- print (D# (word2Double# 9007199254740992##)) -- disabled, overflows 32 bit Word
print (D# (word2Double# 4294967295##))
print (D# (word2Double# 2147483647##))
print (F# (word2Float# 0##))
-- 16777216 is 2^24, which is the largest integer which can be
-- stored in a 32-bit IEEE floating-point value without loss of
-- precision
print (F# (word2Float# 16777216##))
| beni55/ghcjs | test/ghc/codeGen/word2Float64.hs | mit | 738 | 0 | 11 | 152 | 149 | 80 | 69 | 10 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
module Tests.Writers.Markdown (tests) where
import Test.Framework
import Text.Pandoc.Builder
import Text.Pandoc
import Tests.Helpers
import Tests.Arbitrary()
markdown :: (ToString a, ToPandoc a) => a -> String
markdown = writeMarkdown def . toPandoc
{-
"my test" =: X =?> Y
is shorthand for
test markdown "my test" $ X =?> Y
which is in turn shorthand for
test markdown "my test" (X,Y)
-}
infix 4 =:
(=:) :: (ToString a, ToPandoc a)
=> String -> (a, String) -> Test
(=:) = test markdown
tests :: [Test]
tests = [ "indented code after list"
=: (orderedList [ para "one" <> para "two" ] <> codeBlock "test")
=?> "1. one\n\n two\n\n<!-- -->\n\n test"
, "list with tight sublist"
=: bulletList [ plain "foo" <> bulletList [ plain "bar" ],
plain "baz" ]
=?> "- foo\n - bar\n- baz\n"
] ++ [shortcutLinkRefsTests]
shortcutLinkRefsTests :: Test
shortcutLinkRefsTests =
let infix 4 =:
(=:) :: (ToString a, ToPandoc a)
=> String -> (a, String) -> Test
(=:) = test (writeMarkdown (def {writerReferenceLinks = True}) . toPandoc)
in testGroup "Shortcut reference links"
[ "Simple link (shortcutable)"
=: (para (link "/url" "title" "foo"))
=?> "[foo]\n\n [foo]: /url \"title\""
, "Followed by another link (unshortcutable)"
=: (para ((link "/url1" "title1" "first")
<> (link "/url2" "title2" "second")))
=?> unlines [ "[first][][second]"
, ""
, " [first]: /url1 \"title1\""
, " [second]: /url2 \"title2\""
]
, "Followed by space and another link (unshortcutable)"
=: (para ((link "/url1" "title1" "first") <> " "
<> (link "/url2" "title2" "second")))
=?> unlines [ "[first][] [second]"
, ""
, " [first]: /url1 \"title1\""
, " [second]: /url2 \"title2\""
]
, "Reference link is used multiple times (unshortcutable)"
=: (para ((link "/url1" "" "foo") <> (link "/url2" "" "foo")
<> (link "/url3" "" "foo")))
=?> unlines [ "[foo][][foo][1][foo][2]"
, ""
, " [foo]: /url1"
, " [1]: /url2"
, " [2]: /url3"
]
, "Reference link is used multiple times (unshortcutable)"
=: (para ((link "/url1" "" "foo") <> " " <> (link "/url2" "" "foo")
<> " " <> (link "/url3" "" "foo")))
=?> unlines [ "[foo][] [foo][1] [foo][2]"
, ""
, " [foo]: /url1"
, " [1]: /url2"
, " [2]: /url3"
]
, "Reference link is followed by text in brackets"
=: (para ((link "/url" "" "link") <> "[text in brackets]"))
=?> unlines [ "[link][]\\[text in brackets\\]"
, ""
, " [link]: /url"
]
, "Reference link is followed by space and text in brackets"
=: (para ((link "/url" "" "link") <> " [text in brackets]"))
=?> unlines [ "[link][] \\[text in brackets\\]"
, ""
, " [link]: /url"
]
, "Reference link is followed by RawInline"
=: (para ((link "/url" "" "link") <> rawInline "markdown" "[rawText]"))
=?> unlines [ "[link][][rawText]"
, ""
, " [link]: /url"
]
, "Reference link is followed by space and RawInline"
=: (para ((link "/url" "" "link") <> space <> rawInline "markdown" "[rawText]"))
=?> unlines [ "[link][] [rawText]"
, ""
, " [link]: /url"
]
, "Reference link is followed by RawInline with space"
=: (para ((link "/url" "" "link") <> rawInline "markdown" " [rawText]"))
=?> unlines [ "[link][] [rawText]"
, ""
, " [link]: /url"
]
, "Reference link is followed by citation"
=: (para ((link "/url" "" "link") <> cite [Citation "author" [] [] NormalCitation 0 0] (str "[@author]")))
=?> unlines [ "[link][][@author]"
, ""
, " [link]: /url"
]
, "Reference link is followed by space and citation"
=: (para ((link "/url" "" "link") <> space <> cite [Citation "author" [] [] NormalCitation 0 0] (str "[@author]")))
=?> unlines [ "[link][] [@author]"
, ""
, " [link]: /url"
]
]
| mindriot101/pandoc | tests/Tests/Writers/Markdown.hs | gpl-2.0 | 5,104 | 0 | 19 | 2,099 | 1,036 | 560 | 476 | 98 | 1 |
-- |
-- Module: BigE.Model
-- Copyright: (c) 2017 Patrik Sandahl
-- Licence: MIT
-- Maintainer: Patrik Sandahl <[email protected]>
-- Stability: experimental
-- Portability: portable
-- Utilities to read Wavefront model files and produce vectors that can be
-- used to create meshes.
module BigE.Model
( vertPFromFile
, vertPNFromFile
, vertPNTxFromFile
) where
import qualified BigE.Attribute.Vert_P as Vert_P
import qualified BigE.Attribute.Vert_P_N as Vert_P_N
import qualified BigE.Attribute.Vert_P_N_Tx as Vert_P_N_Tx
import BigE.Model.Assembler (assembleVertP, assembleVertPN,
assembleVertPNTx)
import BigE.Model.Parser (FilePart, fromFile)
import Control.Monad.IO.Class (MonadIO)
import Data.Vector.Storable (Vector, fromList)
import Foreign (Storable)
import Graphics.GL (GLuint)
-- | Read a model from the given file. Only use the position attribute. All
-- valid model files shall be able to read.
vertPFromFile :: MonadIO m => FilePath
-> m (Either String (Vector Vert_P.Vertex, Vector GLuint))
vertPFromFile = readIt assembleVertP
-- | Read a model from the given file. The model file must have the attributes
-- position and normal to be able to load.
vertPNFromFile :: MonadIO m => FilePath
-> m (Either String (Vector Vert_P_N.Vertex, Vector GLuint))
vertPNFromFile = readIt assembleVertPN
-- | Read a model from the given file. The model file must have the attributes
-- position, normal and texture coordinate to be able to load.
vertPNTxFromFile :: MonadIO m => FilePath
-> m (Either String (Vector Vert_P_N_Tx.Vertex, Vector GLuint))
vertPNTxFromFile = readIt assembleVertPNTx
readIt :: (Storable a, Storable b, MonadIO m)
=> ([FilePart] -> Maybe ([a], [b])) -> FilePath
-> m (Either String (Vector a, Vector b))
readIt assemble file = do
eParts <- fromFile file
case eParts of
Right parts ->
case assemble parts of
Just (xs, ys) -> return $ Right (fromList xs, fromList ys)
Nothing -> return $ Left "Cannot assemble model parts"
Left err -> return $ Left err
| psandahl/big-engine | src/BigE/Model.hs | mit | 2,315 | 0 | 16 | 619 | 469 | 260 | 209 | 34 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Model.InitDB
( initDBSpecs
) where
import TestImport
import qualified Data.List as L
initDBSpecs :: Spec
initDBSpecs =
ydescribe "Initial DB should be empty" $ do
-- user/article/comment/image tables are empty when initial.
yit "leaves the article table empty" $ do
articles <- runDB $ selectList ([] :: [Filter Article]) []
assertEqual "article table empty" 0 $ L.length articles
yit "leaves the comment table empty" $ do
comments <- runDB $ selectList ([] :: [Filter Comment]) []
assertEqual "comment table empty" 0 $ L.length comments
yit "leaves the image table empty" $ do
image <- runDB $ selectList ([] :: [Filter Image]) []
assertEqual "image table empty" 0 $ L.length image
yit "leaves the tag table empty" $ do
tag <- runDB $ selectList ([] :: [Filter Tag]) []
assertEqual "tag table empty" 0 $ L.length tag
yit "leaves the user table empty" $ do
users <- runDB $ selectList ([] :: [Filter User]) []
assertEqual "user table empty" 0 $ L.length users
| cosmo0920/Ahblog | tests/Model/InitDB.hs | mit | 1,102 | 0 | 16 | 267 | 336 | 161 | 175 | 23 | 1 |
{-# OPTIONS_GHC -fno-warn-partial-type-signatures #-}
{-# LANGUAGE
PartialTypeSignatures
, OverloadedStrings
, RecordWildCards
#-}
module XMonad.Javran.Config
( myConfig
) where
-- TODO: xmonad restarter
import Data.Monoid
import System.IO
import XMonad
import XMonad.Layout.Fullscreen
import XMonad.Hooks.ManageDocks
import XMonad.Util.Run
import Data.Time.Clock
import qualified XMonad.Util.ExtensibleState as XS
import XMonad.Hooks.EwmhDesktops hiding (fullscreenEventHook)
import XMonad.Javran.Config.Workspace
import XMonad.Javran.Config.State
import XMonad.Javran.Config.LogHook
import qualified XMonad.Javran.Config.Keys as ConfKeys
import qualified XMonad.Javran.Config.LayoutHook as LyH
import qualified XMonad.Javran.Config.ManageHook as MgmH
-- TODO: fullscreen without frame?
myConfig :: Handle -> XConfig _
myConfig dzenHandle = def
{ modMask = mod3Mask
, terminal = "xfce4-terminal"
, keys = ConfKeys.keys
, manageHook = fullscreenManageHook <> manageDocks <> MgmH.manageHook
, handleEventHook = fullscreenEventHook <> docksEventHook -- <> myEwmhDesktopsEventHook
, layoutHook = LyH.layoutHook
, logHook = mkLogHook dzenHandle <> ewmhDesktopsLogHook
, focusedBorderColor = "cyan"
, workspaces = workspaceIds
, startupHook = myStartupHook <> ewmhDesktopsStartup
}
myStartupHook :: X ()
myStartupHook = do
StartupTime <$> liftIO getCurrentTime >>= XS.put
safeSpawn "/bin/bash" ["/home/javran/.xmonad/on-startup.sh"]
myEwmhDesktopsEventHook :: Event -> X All
myEwmhDesktopsEventHook e@ClientMessageEvent{..} = do
a_aw <- getAtom "_NET_ACTIVE_WINDOW"
curTime <- liftIO getCurrentTime
StartupTime starupTime <- XS.get
-- prevernt ewmh for the first 5 sec window after startup.
if ev_message_type == a_aw && curTime `diffUTCTime` starupTime <= 5.0
then pure (All True)
else ewmhDesktopsEventHook e
myEwmhDesktopsEventHook e = ewmhDesktopsEventHook e
| Javran/xmonad-javran | src/XMonad/Javran/Config.hs | mit | 1,960 | 0 | 11 | 309 | 393 | 231 | 162 | 47 | 2 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{- |
Module : Control.SIArrow
Description : Categories of reversible computations.
Copyright : (c) Paweł Nowak
License : MIT
Maintainer : Paweł Nowak <[email protected]>
Stability : experimental
Categories of reversible computations.
-}
module Control.SIArrow (
-- * Arrow.
SIArrow(..),
(^>>), (>>^), (^<<), (<<^),
(#>>), (>>#), (#<<), (<<#),
-- * Functor and applicative.
(/$/), (/*/), (/*), (*/),
-- * Signaling errors.
sifail, (/?/),
-- * Combinators.
sisequence,
sisequence_,
sireplicate,
sireplicate_
) where
import Control.Arrow (Kleisli(..))
import Control.Category
import Control.Category.Structures
import Control.Lens.Cons
import Control.Lens.Empty
import Control.Lens.Iso
import Control.Lens.SemiIso
import Control.Monad
import Data.Semigroupoid.Dual
import Prelude hiding (id, (.))
infixr 1 ^>>, ^<<, #>>, #<<
infixr 1 >>^, <<^, >>#, <<#
infixl 4 /$/
infixl 5 /*/, */, /*
infixl 3 /?/
-- | A category equipped with an embedding 'siarr' from @SemiIso@ into @cat@ and some
-- additional structure.
--
-- SIArrow abstracts categories of reversible computations
-- (with reversible side effects).
--
-- The category @cat@ should contain @SemiIso@ as a sort of
-- \"subcategory of pure computations\".
class (Products cat, Coproducts cat, CatPlus cat) => SIArrow cat where
-- | Allows you to lift a SemiIso into @cat@. The resulting arrow should be
-- in some sense minimal or \"pure\", similiar to 'pure', 'return' and
-- 'arr' from "Control.Category".
siarr :: ASemiIso' a b -> cat a b
siarr = sipure . rev
-- | Reversed version of 'siarr'.
--
-- Use this where you would use 'pure'.
sipure :: ASemiIso' b a -> cat a b
sipure = siarr . rev
-- | Allows a computation to depend on a its input value.
--
-- I am not sure if this is the right way to get that ArrowApply or Monad
-- like power. It seems quite easy to break the parser/pretty-printer inverse
-- guarantee using this. On the other hand we have to be careful only when
-- constructing the SemiIso using 'iso'/'semiIso' - and with an invalid SemiIso
-- we could break everything anyway using 'siarr'.
sibind :: ASemiIso a (cat a b) (cat a b) b -> cat a b
-- | @sisome v@ repeats @v@ as long as possible, but no less then once.
sisome :: cat () b -> cat () [b]
sisome v = _Cons /$/ v /*/ simany v
-- | @simany v@ repeats @v@ as long as possible.
simany :: cat () b -> cat () [b]
simany v = sisome v /+/ sipure _Empty
{-# MINIMAL (siarr | sipure), sibind #-}
instance MonadPlus m => SIArrow (Kleisli m) where
siarr ai = Kleisli $ either fail return . apply ai
sibind ai = Kleisli $ \a -> either fail (($ a) . runKleisli) $ apply ai a
instance SIArrow cat => SIArrow (Dual cat) where
siarr = Dual . sipure
sibind ai = Dual $ sibind (iso id getDual . rev ai . iso getDual id)
instance SIArrow ReifiedSemiIso' where
siarr = reifySemiIso
sibind ai = ReifiedSemiIso' $
semiIso (\a -> apply ai a >>= flip apply a . runSemiIso)
(\b -> unapply ai b >>= flip unapply b . runSemiIso)
-- | Composes a SemiIso with an arrow.
(^>>) :: SIArrow cat => ASemiIso' a b -> cat b c -> cat a c
f ^>> a = a . siarr f
-- | Composes an arrow with a SemiIso.
(>>^) :: SIArrow cat => cat a b -> ASemiIso' b c -> cat a c
a >>^ f = siarr f . a
-- | Composes a SemiIso with an arrow, backwards.
(^<<) :: SIArrow cat => ASemiIso' b c -> cat a b -> cat a c
f ^<< a = siarr f . a
-- | Composes an arrow with a SemiIso, backwards.
(<<^) :: SIArrow cat => cat b c -> ASemiIso' a b -> cat a c
a <<^ f = a . siarr f
-- | Composes a reversed SemiIso with an arrow.
(#>>) :: SIArrow cat => ASemiIso' b a -> cat b c -> cat a c
f #>> a = a . sipure f
-- | Composes an arrow with a reversed SemiIso.
(>>#) :: SIArrow cat => cat a b -> ASemiIso' c b -> cat a c
a >># f = sipure f . a
-- | Composes a reversed SemiIso with an arrow, backwards.
(#<<) :: SIArrow cat => ASemiIso' c b -> cat a b -> cat a c
f #<< a = sipure f . a
-- | Composes an arrow with a reversed SemiIso, backwards.
(<<#) :: SIArrow cat => cat b c -> ASemiIso' b a -> cat a c
a <<# f = a . sipure f
-- | Postcomposes an arrow with a reversed SemiIso.
-- The analogue of '<$>' and synonym for '#<<'.
(/$/) :: SIArrow cat => ASemiIso' b' b -> cat a b -> cat a b'
(/$/) = (#<<)
-- | The product of two arrows with duplicate units removed. Side effect are
-- sequenced from left to right.
--
-- The uncurried analogue of '<*>'.
(/*/) :: SIArrow cat => cat () b -> cat () c -> cat () (b, c)
a /*/ b = unit ^>> (a *** b)
-- | The product of two arrows, where the second one has no input and no output
-- (but can have side effects), with duplicate units removed. Side effect are
-- sequenced from left to right.
--
-- The uncurried analogue of '<*'.
(/*) :: SIArrow cat => cat () a -> cat () () -> cat () a
f /* g = unit /$/ f /*/ g
-- | The product of two arrows, where the first one has no input and no output
-- (but can have side effects), with duplicate units removed. Side effect are
-- sequenced from left to right.
--
-- The uncurried analogue of '*>'.
(*/) :: SIArrow cat => cat () () -> cat () a -> cat () a
f */ g = unit . swapped /$/ f /*/ g
-- | An arrow that fails with an error message.
sifail :: SIArrow cat => String -> cat a b
sifail = siarr . alwaysFailing
-- | Provides an error message in the case of failure.
(/?/) :: SIArrow cat => cat a b -> String -> cat a b
f /?/ msg = f /+/ sifail msg
-- | Equivalent of 'sequence'.
sisequence :: SIArrow cat => [cat () a] -> cat () [a]
sisequence [] = sipure _Empty
sisequence (x:xs) = _Cons /$/ x /*/ sisequence xs
-- | Equivalent of 'sequence_', restricted to units.
sisequence_ :: SIArrow cat => [cat () ()] -> cat () ()
sisequence_ [] = sipure _Empty
sisequence_ (x:xs) = unit /$/ x /*/ sisequence_ xs
-- | Equivalent of 'replicateM'.
sireplicate :: SIArrow cat => Int -> cat () a -> cat () [a]
sireplicate n f = sisequence (replicate n f)
-- | Equivalent of 'replicateM_', restricted to units.
sireplicate_ :: SIArrow cat => Int -> cat () () -> cat () ()
sireplicate_ n f = sisequence_ (replicate n f)
| pawel-n/semi-iso | Control/SIArrow.hs | mit | 6,380 | 0 | 12 | 1,462 | 1,771 | 940 | 831 | -1 | -1 |
module SimpleNoun where
import ClassyPrelude
import Numeric.Natural
import qualified Urbit.Noun as N
type Atom = Natural
type Noun = Tree Atom
data Tree a
= A !a
| C !(Tree a) !(Tree a)
deriving (Eq, Ord, Read, Functor, Generic)
instance Hashable a => Hashable (Tree a)
data Fern a
= FernA !a
| FernF [Fern a]
toFern :: Tree a -> Fern a
toFern = \case
A a -> FernA a
C h t -> case toFern t of
a@FernA{} -> FernF [toFern h, a]
FernF fs -> FernF (toFern h : fs)
instance Show a => Show (Fern a) where
show = \case
FernA a -> show a
FernF xs -> "[" <> intercalate " " (map show xs) <> "]"
instance Show a => Show (Tree a) where
show = show . toFern
yes, no :: Noun
yes = A 0
no = A 1
loob :: Bool -> Noun
loob = \case
True -> yes
False -> no
textToAtom :: Text -> Atom
textToAtom t = case N.textToUtf8Atom t of
N.A a -> a
N.C _ _ -> error "textToAtom: nani!?"
showA :: Atom -> String
showA a = show (N.A a)
tshowA :: Atom -> Text
tshowA = pack . showA
-- | Tree address
type Axis = Atom
data Dir = L | R
deriving (Eq, Ord, Enum, Read, Show)
type Path = [Dir]
-- some stuff from hoon.hoon
cap :: Axis -> Dir
cap = \case
2 -> L
3 -> R
a | a <= 1 -> error "cap: bad axis"
| otherwise -> cap (div a 2)
mas :: Axis -> Axis
mas = \case
2 -> 1
3 -> 1
a | a <= 1 -> error "mas: bad axis"
| otherwise -> (mod a 2) + 2 * mas (div a 2)
capMas :: Axis -> (Dir, Axis)
capMas = \case
2 -> (L, 1)
3 -> (R, 1)
a | a <= 1 -> error "capMas: bad axis"
| otherwise -> (d, (mod a 2) + 2 * r)
where
(d, r) = capMas (div a 2)
peg :: Axis -> Axis -> Axis
peg a = \case
1 -> a
2 -> a * 2
3 -> a * 2 + 1
b -> (mod b 2) + 2 * peg a (div b 2)
axis :: Axis -> Tree a -> Tree a
axis 1 n = n
axis (capMas -> (d, r)) (C n m) = case d of
L -> axis r n
R -> axis r m
axis a _ = error ("bad axis: " ++ show a)
edit :: Axis -> Tree a -> Tree a -> Tree a
edit 1 v n = v
edit (capMas -> (d, r)) v (C n m) = case d of
L -> C (edit r v n) m
R -> C n (edit r v m)
edit a _ _ = error ("bad edit: " ++ show a)
-- Write an axis as a binary number; e.g. 5 as 101.
-- The rule is: after droping the 1 in the msb, you read from left to right.
-- 0 becomes L and 1 becomes R. So 5 becomes [L,R]
toPath :: Axis -> Path
toPath = \case
1 -> []
(capMas -> (d, r)) -> d : toPath r
toAxis :: Path -> Axis
toAxis = foldl' step 1
where
step r = \case
L -> 2 * r
R -> 2 * r + 1
| urbit/urbit | pkg/hs/proto/lib/SimpleNoun.hs | mit | 2,478 | 0 | 14 | 741 | 1,249 | 636 | 613 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Engine.Probing where
import Control.Monad.IO.Class
import Data.Carthage.TargetPlatform
import Data.List ( intersect )
import Data.Romefile ( _frameworkPlatforms )
import Types hiding ( version )
import Utils
import qualified Turtle
import System.FilePath ( (</>) )
-- | Probes a `FilePath` to check if each `FrameworkVersion` exists for each `TargetPlatform`
probeEngineForFrameworks
:: MonadIO m
=> FilePath -- ^ The `FilePath` to the engine
-> CachePrefix -- ^ The top level directory prefix.
-> InvertedRepositoryMap -- ^ The map used to resolve `FrameworkName`s to `GitRepoName`s.
-> [FrameworkVersion] -- ^ A list of `FrameworkVersion` to probe for.
-> [TargetPlatform] -- ^ A list target platforms restricting the scope of this action.
-> m [FrameworkAvailability]
probeEngineForFrameworks lCacheDir cachePrefix reverseRomeMap frameworkVersions = sequence . probeForEachFramework
where probeForEachFramework = mapM (probeEngineForFramework lCacheDir cachePrefix reverseRomeMap) frameworkVersions
-- | Probes the engine at `FilePath` to check if a `FrameworkVersion` exists for each `TargetPlatform`
probeEngineForFramework
:: MonadIO m
=> FilePath -- ^ The `FilePath` to the engine
-> CachePrefix -- ^ The top level directory prefix.
-> InvertedRepositoryMap -- ^ The map used to resolve `FrameworkName`s to `GitRepoName`s.
-> FrameworkVersion -- ^ The `FrameworkVersion` to probe for.
-> [TargetPlatform] -- ^ A list target platforms restricting the scope of this action.
-> m FrameworkAvailability
probeEngineForFramework lCacheDir cachePrefix reverseRomeMap frameworkVersion platforms = fmap
(FrameworkAvailability frameworkVersion)
probeForEachPlatform
where
probeForEachPlatform = mapM
(probeEngineForFrameworkOnPlatform lCacheDir cachePrefix reverseRomeMap frameworkVersion)
(platforms `intersect` (_frameworkPlatforms . _framework $ frameworkVersion))
-- | Probes the engine at `FilePath` to check if a `FrameworkVersion` exists for a given `TargetPlatform`
probeEngineForFrameworkOnPlatform
:: MonadIO m
=> FilePath -- ^ The `FilePath` to the engine
-> CachePrefix -- ^ The top level directory prefix.
-> InvertedRepositoryMap -- ^ The map used to resolve `FrameworkName`s to `GitRepoName`s.
-> FrameworkVersion -- ^ The `FrameworkVersion` to probe for.
-> TargetPlatform -- ^ A target platforms restricting the scope of this action.
-> m PlatformAvailability
probeEngineForFrameworkOnPlatform enginePath (CachePrefix prefix) reverseRomeMap (FrameworkVersion fwn version) platform
= do
let cmd = Turtle.fromString enginePath
exitCode <- Turtle.proc cmd
["list", Turtle.fromString (prefix </> remoteFrameworkUploadPath)]
(return $ Turtle.unsafeTextToLine "")
case exitCode of
-- If engine exits with success, we assume the framework exists.
Turtle.ExitSuccess -> return (PlatformAvailability platform True)
Turtle.ExitFailure _ -> return (PlatformAvailability platform False)
where remoteFrameworkUploadPath = remoteFrameworkPath platform reverseRomeMap fwn version
| blender/Rome | src/Engine/Probing.hs | mit | 3,364 | 0 | 12 | 717 | 469 | 254 | 215 | 52 | 2 |
{-# LANGUAGE BangPatterns, RecordWildCards, FlexibleContexts #-}
module AI.Funn.Optimizer.SGD (SGDState, initSGD, extractSGD, updateSGD) where
import Control.Monad
import Data.Foldable
import AI.Funn.Space
type LearningRate = Double
type Momentum = Double
data SGDState m d p = SGDState {
sgdStepSize :: LearningRate,
sgdMomentumWeight :: Momentum,
sgdScale :: Double -> d -> m d,
sgdAddDP :: d -> p -> m p,
sgdAddDD :: d -> d -> m d,
sgdValue :: p,
sgdMoment :: d
}
initSGD :: (Monad m, VectorSpace m Double d) => LearningRate -> Momentum -> (d -> p -> m p) -> p -> m (SGDState m d p)
initSGD lr mr add x0 = do
d0 <- zero
return $ SGDState {
sgdStepSize = lr,
sgdMomentumWeight = mr,
sgdScale = scale,
sgdAddDP = add,
sgdAddDD = plus,
sgdValue = x0,
sgdMoment = d0
}
extractSGD :: SGDState m d p -> p
extractSGD = sgdValue
updateSGD :: (Monad m) => d -> SGDState m d p -> m (SGDState m d p)
updateSGD d (SGDState{..}) = do
newMoment <- join $ sgdAddDD <$> sgdScale sgdMomentumWeight sgdMoment <*> sgdScale (-sgdStepSize) d
newValue <- sgdAddDP newMoment sgdValue
return $ SGDState {
sgdStepSize = sgdStepSize,
sgdMomentumWeight = sgdMomentumWeight,
sgdScale = sgdScale,
sgdAddDP = sgdAddDP,
sgdAddDD = sgdAddDD,
sgdValue = newValue,
sgdMoment = newMoment
}
| nshepperd/funn | AI/Funn/Optimizer/SGD.hs | mit | 1,355 | 0 | 12 | 309 | 456 | 256 | 200 | 40 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Database.Hasqueue.Store.SimpleSpec ( spec ) where
import Control.Concurrent.STM.Class
import Control.Monad
import Database.Hasqueue
import Data.List (sort)
import Pipes
import qualified Pipes.Prelude as P
import Pipes.Concurrent
import Test.Hspec
spec :: Spec
spec = do
let withSimple :: (Simple -> IO ()) -> IO ()
withSimple f = do
simple <- startService
f simple
stopService simple
describe "listing all buckets" $ do
let buckets = ["bucket-one", "bucket-two", "bucket-three"]
commands = map CreateBucket buckets ++ [ListBuckets]
it "returns a list of buckets" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
forM_ buckets $ \bid -> do
result <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket bid))
result <- liftSTM $ recv input
case result of
Just (Right (Buckets bids)) -> sort bids `shouldBe` sort buckets
_ -> fail "Could not list buckets."
describe "creating a bucket" $ do
let bid = "test-bucket"
context "when the bucket does not exist" $ do
let commands = [CreateBucket bid, ListBuckets]
it "creates the bucket" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket bid))
result' `shouldBe` Just (Right (Buckets [bid]))
context "when the bucket does exist" $ do
let commands = [CreateBucket bid, CreateBucket bid, ListBuckets]
it "throws an error" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket bid))
result' `shouldBe` Just (Left (BucketExists bid))
result'' `shouldBe` Just (Right (Buckets [bid]))
describe "deleting a bucket" $ do
let bid = "sample-bucket"
context "when the bucket does not exist" $ do
let commands = [DeleteBucket bid, ListBuckets]
it "does nothing" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result `shouldBe` Just (Right Empty)
result' `shouldBe` Just (Right (Buckets []))
context "when the bucket does exist" $ do
let commands = [CreateBucket bid, ListBuckets, DeleteBucket bid, ListBuckets]
it "deletes that bucket" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result''' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket bid))
result' `shouldBe` Just (Right (Buckets [bid]))
result'' `shouldBe` Just (Right Empty)
result''' `shouldBe` Just (Right (Buckets []))
describe "renaming a bucket" $ do
let old = "old-bid"
new = "new-bid"
context "when the original bucket does not exist" $ do
let commands = [RenameBucket old new]
it "returns an error" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result `shouldBe` Just (Left (NoSuchBucket old))
context "when the original bucket exists" $ do
context "but the target bucket exists" $ do
let commands = [CreateBucket old, CreateBucket new, RenameBucket old new]
it "returns an error" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket old))
result' `shouldBe` Just (Right (Bucket new))
result'' `shouldBe` Just (Left (BucketExists new))
context "and the target bucket does not exist" $ do
let value = String "test"
valueID = "test-value-id"
commands = [CreateBucket old, PutValue old valueID value, RenameBucket old new]
it "moves the bucket" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket old))
result' `shouldBe` Just (Right Empty)
result'' `shouldBe` Just (Right (Bucket new))
describe "listing the contents of a bucket" $ do
let bid = "tmp-bid"
context "when the bucket is does not exist" $ do
let commands = [ListBucket bid]
it "returns an error" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result `shouldBe` Just (Left (NoSuchBucket bid))
context "when the bucket exists" $ do
let vid = "tmp-vid"
value = Int 1
commands = [CreateBucket bid, PutValue bid vid value, ListBucket bid]
it "lists the keys" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket bid))
result' `shouldBe` Just (Right Empty)
result'' `shouldBe` Just (Right (Values [vid]))
describe "accessing a value" $ do
let bid = "my-bid"
vid = "my-vid"
context "when the value's bucket does not exist" $ do
let commands = [GetValue bid vid]
it "returns an error" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result `shouldBe` Just (Left (NoSuchBucket bid))
context "when the value's bucket exists" $ do
context "when the value does not exist" $ do
let commands = [CreateBucket bid, GetValue bid vid]
it "returns an error" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket bid))
result' `shouldBe` Just (Left (NoSuchValue bid vid))
context "when the value does exists" $ do
let value = Double 1.2
commands = [CreateBucket bid, PutValue bid vid value, GetValue bid vid]
it "returns that value" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket bid))
result' `shouldBe` Just (Right Empty)
result'' `shouldBe` Just (Right (Value value))
describe "deleting a value" $ do
let bid = "sample-bid"
vid = "sample-vid"
context "when the bucket does not exist" $ do
let commands = [DeleteValue bid vid]
it "returns an error" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result `shouldBe` Just (Left (NoSuchBucket bid))
context "when the bucket does exists" $ do
context "but the value doesn't exist" $ do
let commands = [CreateBucket bid, ListBucket bid, DeleteValue bid vid, ListBucket bid]
it "does nothing" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result''' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket bid))
result' `shouldBe` Just (Right (Values []))
result'' `shouldBe` Just (Right Empty)
result''' `shouldBe` Just (Right (Values []))
context "and the value exists" $ do
let value = Null
commands = [CreateBucket bid, PutValue bid vid value, ListBucket bid, DeleteValue bid vid, ListBucket bid]
it "deletes that value" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result''' <- liftSTM $ recv input
result'''' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket bid))
result' `shouldBe` Just (Right Empty)
result'' `shouldBe` Just (Right (Values [vid]))
result''' `shouldBe` Just (Right Empty)
result'''' `shouldBe` Just (Right (Values []))
describe "putting a value" $ do
let bid = "put-bucket"
vid = "put-value"
value = Int 2
context "when the bucket does not exist" $ do
let commands = [PutValue bid vid value]
it "returns an error" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result `shouldBe` Just (Left (NoSuchBucket bid))
context "when the bucket does exist" $ do
context "when the value doesn't exist" $ do
let commands = [ CreateBucket bid
, PutValue bid vid value
, GetValue bid vid
]
it "puts the value" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket bid))
result' `shouldBe` Just (Right Empty)
result'' `shouldBe` Just (Right (Value value))
context "when the value already exists" $ do
let value' = Null
commands = [ CreateBucket bid
, PutValue bid vid value'
, GetValue bid vid
, PutValue bid vid value
, GetValue bid vid
]
it "puts the value" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result''' <- liftSTM $ recv input
result'''' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket bid))
result' `shouldBe` Just (Right Empty)
result'' `shouldBe` Just (Right (Value value'))
result''' `shouldBe` Just (Right Empty)
result'''' `shouldBe` Just (Right (Value value))
describe "renaming a value" $ do
let oldBID = "old-bid"
oldVID = "old-vid"
newBID = "new-bid"
newVID = "new-vid"
old = (oldBID, oldVID)
new = (newBID, newVID)
value = String "hello"
context "when the source bucket does not exist" $ do
let commands = [RenameValue old new]
it "returns an error" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result `shouldBe` Just (Left (NoSuchBucket oldBID))
context "when the source bucket exists" $ do
context "when the source value does not exist" $ do
let commands = [CreateBucket oldBID, RenameValue old new]
it "returns an error" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket oldBID))
result' `shouldBe` Just (Left (NoSuchValue oldBID oldVID))
context "when the source value exists" $ do
context "when the target bucket does not exist" $ do
let commands = [CreateBucket oldBID, PutValue oldBID oldVID value, RenameValue old new]
it "returns an error" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket oldBID))
result' `shouldBe` Just (Right Empty)
result'' `shouldBe` Just (Left (NoSuchBucket newBID))
context "when the target bucket exists" $ do
context "when the target value does not exist" $ do
let commands = [ CreateBucket oldBID
, PutValue oldBID oldVID value
, CreateBucket newBID
, RenameValue old new
, uncurry GetValue new
]
it "renames the value" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result''' <- liftSTM $ recv input
result'''' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket oldBID))
result' `shouldBe` Just (Right Empty)
result'' `shouldBe` Just (Right (Bucket newBID))
result''' `shouldBe` Just (Right Empty)
result'''' `shouldBe` Just (Right (Value value))
context "when the target value exists" $ do
let value' = Null
commands = [ CreateBucket oldBID
, PutValue oldBID oldVID value
, CreateBucket newBID
, PutValue newBID newVID value'
, uncurry GetValue new
, RenameValue old new
, uncurry GetValue new
]
it "clobbers the old value" $ do
(output, input) <- spawn Unbounded
withSimple $ \simple ->
runEffect $ each commands >-> toPipe simple >-> toOutput output
result <- liftSTM $ recv input
result' <- liftSTM $ recv input
result'' <- liftSTM $ recv input
result''' <- liftSTM $ recv input
result'''' <- liftSTM $ recv input
result''''' <- liftSTM $ recv input
result'''''' <- liftSTM $ recv input
result `shouldBe` Just (Right (Bucket oldBID))
result' `shouldBe` Just (Right Empty)
result'' `shouldBe` Just (Right (Bucket newBID))
result''' `shouldBe` Just (Right Empty)
result'''' `shouldBe` Just (Right (Value value'))
result''''' `shouldBe` Just (Right Empty)
result'''''' `shouldBe` Just (Right (Value value))
describe "shutting down" $ do
let commands = [CreateBucket "test-bucket"]
it "does not respond after a shutdown" $ do
pending
(output, input) <- spawn Unbounded
putStrLn "pre start"
simple <- startService :: IO Simple
stopService simple
putStrLn "pre stop"
runEffect $ each commands >-> toPipe simple >-> toOutput output
putStrLn "post stop"
exhausted <- P.null $ fromInput input
unless exhausted $ fail "Output should be closed"
| nahiluhmot/hasqueue | spec/Database/Hasqueue/Store/SimpleSpec.hs | mit | 20,475 | 0 | 30 | 8,909 | 5,491 | 2,573 | 2,918 | 370 | 2 |
{-# LANGUAGE Arrows #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.Auto
import Data.Profunctor
import Data.Serialize
import Data.Traversable
import Debug.Trace
import Linear.Matrix
import Linear.Metric
import Linear.V1
import Linear.V2
import Linear.V3
import Linear.V4
import Linear.Vector
import Prelude hiding ((.), id)
import System.Random
import qualified Data.List as L
type Neural m i o = Auto m (Either (i, o) i) o
type UNeural m i o = Auto m i o
type TNeural m i o = Auto m (i, o) o
fromU :: Monad m
=> UNeural m i o
-> Neural m i o
fromU = lmap (either fst id)
fromT :: (Monad m, Additive o, Num a)
=> TNeural m i (o a)
-> Neural m i (o a)
fromT = lmap (either id (, zero))
logistic :: Floating a => a -> a -> a -> a
logistic x0 k x = 1 / (1 + exp (-k * (x - x0)))
-- for weights: outer layer is each output, nested/inner layer is the
-- weights for each input.
trainNodeFrom :: forall m vi vo.
( Monad vi
, Applicative vi
, Metric vi
, Additive vi
, Traversable vi
, Num (vi Double)
, Monad vo
, Applicative vo
, Metric vo
, Additive vo
, Traversable vo
-- , Num (vo Double)
, Serialize (vo (vi Double))
, Show (vo (vi Double))
, Monad m
)
=> (vo Double -> vo Double) -- map before exit
-> vo (vi Double) -- inner: by-input weights
-- outer: by-output weight sets
-> Neural m (vi Double) (vo Double)
trainNodeFrom outFunc = mkState f
where
dw :: Double
dw = 0.05
wStep :: Double
wStep = 1
-- the types work out :|
nudges :: vo (vi (vo (vi Double)))
nudges = fmap (outer (scaled (pure dw))) (scaled (pure (pure dw)))
f :: Either (vi Double, vo Double) (vi Double)
-> vo (vi Double)
-> (vo Double, vo (vi Double))
f (Left (input, expected)) weights =
-- traceShow weights'
(outFunc $ weights' !* input, weights')
where
result = outFunc $ weights !* input
resultErr = result `qd` expected
weights' :: vo (vi Double)
weights' = do
nudgeRow <- nudges :: vo (vi (vo (vi Double)))
row <- weights :: vo (vi Double)
return $ do
-- nudgeEl : matrix with a 1 only at the row of this column
nudgeEl <- nudgeRow :: vi (vo (vi Double))
weight <- row :: vi Double
let nudged = weights !+! nudgeEl
resNudged = outFunc $ nudged !* input
nudgedErr = resNudged `qd` expected
dErrdW = (nudgedErr - resultErr) / dw
return (weight - dErrdW * wStep)
f (Right input) weights = (outFunc $ weights !* input, weights)
testPoints :: [(V4 Double, V3 Double)]
testPoints = map (\[a,b,c,d] -> (V4 a b c d, ws !* V4 a b c d))
. L.transpose . map (randoms . mkStdGen)
$ [25645,45764,1354,75673]
where
-- ws = V1 (V4 0.05 0.6 0.2 0.15)
ws = V3 (V4 0.05 0.6 0.2 0.15)
(V4 0 0.1 0.2 0.7 )
(V4 0.4 0.4 0.1 0.1 )
asTest :: (Additive vo, Monad m)
=> Neural m (vi Double) (vo Double)
-> Neural m (vi Double) (vo Double)
asTest = liftA2 (^-^) (arr (either snd (const zero)))
testNudge :: V2 (V3 (V2 (V3 Double)))
testNudge = V2 (V3 (V2 (V3 1 0 0)
(V3 0 0 0))
(V2 (V3 0 1 0)
(V3 0 0 0))
(V2 (V3 0 0 1)
(V3 0 0 0)))
(V3 (V2 (V3 0 0 0)
(V3 1 0 0))
(V2 (V3 0 0 0)
(V3 0 1 0))
(V2 (V3 0 0 0)
(V3 0 0 1)))
main :: IO ()
main = mapM_ print $ streamAuto' (quadrance <$> asTest (trainNodeFrom id w0)) (take 1000 $ map Left testPoints)
where
-- w0 = V1 (V4 0.25 0.25 0.25 0.25)
w0 = V3 (V4 0.25 0.25 0.25 0.25)
(V4 0.25 0.25 0.25 0.25)
(V4 0.25 0.25 0.25 0.25)
| mstksg/auto-examples | src/Experimental/Neural.hs | mit | 4,299 | 0 | 19 | 1,678 | 1,573 | 828 | 745 | 106 | 2 |
let q s = putStrLn (s ++ show s) in q "let q s = putStrLn (s ++ show s) in q " | mishadoff/langolier | resources/haskell/quine.hs | epl-1.0 | 78 | 0 | 12 | 22 | 34 | 14 | 20 | -1 | -1 |
module HeelGenerators.SandalsHeel(sandalHeelDebugToFile, sandalHeelStlToFile ) where
import TriCad.MathPolar(
slopeAdjustedForVerticalAngle,
createTopFaces,
createBottomFaces,
radiusAdjustedForZslope,
xyQuadrantAngle,
QuadrantAngle(..),
createCornerPoint,
Slope(..),
Radius(..),
flatXSlope,
flatYSlope,
)
import TriCad.Points(Point(..))
import TriCad.CornerPoints(CornerPoints(..), (++>), (+++), (++++), Faces(..))
import TriCad.StlCornerPoints((+++^))
import TriCad.StlBase (StlShape(..), newStlShape, stlShapeToText)
import TriCad.CornerPointsFaceExtraction ( extractTopFace, extractBottomFrontLine, extractFrontTopLine, extractBackTopLine, extractBottomFace, extractBackBottomLine, extractFrontFace )
import TriCad.CornerPointsFaceConversions(lowerFaceFromUpperFace, backBottomLineFromBottomFrontLine, backTopLineFromFrontTopLine,
frontTopLineFromBackTopLine, upperFaceFromLowerFace, bottomFrontLineFromBackBottomLine)
import TriCad.CornerPointsDebug((+++^?), CubeName(..), CubeDebug(..), CubeDebugs(..))
import TriCad.StlFileWriter(writeStlToFile, writeStlDebugToFile)
sandalHeelDebugToFile = writeStlDebugToFile strapTopFaceDebug
sandalHeelStlToFile = writeStlToFile sandalToeStlFile
sandalToeStlFile = newStlShape "SandalToe" strapTriangles -- $ shoeFlatTriangles ++ shoeSlopeTriangles
angles = [0,10..360]
{--------------------------------------------- strap ---------------------------------------
a flat strap that can be heated and glued to 2 adjoining pieces.
-}
strapWidth = 20
strapLength = 30
strapHeight = 1.5
strapTriangles = concat [
[FacesAll]
]
+++^
[strapCube]
strapTopFaceDebug =
[CubeName "strapCube" | x <- [1..]]
+++^?
[strapCube]
strapCube = strapBtmFace +++ strapTopFace
strapTopFace = strapBackTopLn +++ strapFrontTopLn
strapFrontTopLn = strapF2 +++ strapF3
strapF3 = F3 (Point strapWidth strapLength strapHeight)
strapF2 = F2 (Point 0 strapLength strapHeight)
strapBackTopLn = strapB2 +++ strapB3
strapB3 = B3 (Point strapWidth 0 strapHeight)
strapB2 = B2 (Point 0 0 strapHeight)
strapBtmFrontLnDebug =
[CubeName "strapBtmFrontLn" | x <- [1..]]
+++^?
[strapBtmFrontLn]
strapBtmFrontLn = strapF1 +++ strapF4
strapF4 = F4 (Point strapWidth strapLength 0)
strapF1 = F1 (Point 0 strapLength 0)
strapBtmFaceDebug =
[CubeName "strapBtmFace" | x <- [1..]]
+++^?
[strapBtmFace]
strapBtmFace = strapBackBtmLn +++ strapBtmFrontLn
strapBackBtmLnDebug =
[CubeName "strapBackBtmLn" | x <- [1..]]
+++^?
[strapBackBtmLn]
strapBackBtmLn = strapB1 +++ strapB4
strapB1 = B1 (Point 0 0 0)
strapB4 = B4 (Point strapWidth 0 0)
{------------------------------------------------ brace -----------------------------------------------
The brace that gives ankle support.
Make it without a bottom, so that it can be glued to the
-}
braceTopOrigin = (Point{x_axis=0, y_axis=(0), z_axis=30})
braceBtmOrigin = (Point{x_axis=0, y_axis=(0), z_axis=0})
braceTriangles = concat [
[FacesBackBottomFrontTop | x <- [1..12]],
[FacesAllButRight],
[FacesNada | x <- [14..23]],
[FacesAllButLeft],
[FacesBackBottomFrontTop | x <- [24..36]]
]
+++^
braceCubes
braceCubesDebug =
[CubeName "braceCubes" | x <- [1..]]
+++^?
braceCubes
braceCubes =
braceTopFaces
++++
braceBtmFaces
braceTopFacesDebug =
[CubeName "braceTopFaces" | x <- [1..]]
+++^?
braceTopFaces
braceTopFaces =
--front line
map (extractFrontTopLine) (createTopFaces braceTopOrigin collarRadius angles flatXSlope (PosYSlope 0))
++++
--back line
map (backTopLineFromFrontTopLine . extractFrontTopLine) (createTopFaces braceTopOrigin braceRadius angles flatXSlope (PosYSlope 0))
braceBtmFacesDebug =
[CubeName "braceBtmFaces" | x <- [1..]]
+++^?
braceBtmFaces
braceBtmFaces =
--front line
map (extractBottomFrontLine) (createBottomFaces braceBtmOrigin collarRadius angles flatXSlope flatYSlope)
++++
--back line
map (backBottomLineFromBottomFrontLine . extractBottomFrontLine) (createBottomFaces braceBtmOrigin braceRadius angles flatXSlope flatYSlope)
{----------------------------------------------- shoe layer ---------------------------------------------
Fits onto the heel of the shoe.
Has a sloped top.
Has a non-sloped bottom section so keyway will not be tapered for that section.
Bottom is still the shoe radius.
-}
shoeHalfRadius =
[
Radius 38,--0
Radius 38,--1
Radius 37.5,--2
Radius 37,--3
Radius 36,--4
Radius 35,--5
Radius 33,--6
Radius 31.5,--7
Radius 30.5,--8
Radius 31,--9
Radius 31,--10
Radius 32,--11
Radius 35,--12
Radius 38,--13
Radius 32.5,--14
Radius 29,--15
Radius 26.5,--16
Radius 25 --17
]
--the center val is 180 deg
--It is symmetrical, so can be mirrored.
shoeRadius = concat [shoeHalfRadius, [Radius 24.5], reverse shoeHalfRadius]
--make it an extra 5mm radius to account for the fact that the brace has to go around the heel of shoe.
braceRadius = map (\(Radius x) -> (Radius (x + 5))) shoeRadius
collarRadius = map (\(Radius x) -> (Radius (x + 8))) shoeRadius
shoeSlopeOrigin = (Point{x_axis=0, y_axis=(0), z_axis=90})
shoeFlatOrigin = (Point{x_axis=0, y_axis=(0), z_axis=70})
shoeSlopeTriangles = [FacesBackFrontTop | x <- [1,2..36]]
+++^
shoeSlopeCubes
shoeSlopeCubesDebug =
[CubeName "shoeCubes" | x <- [1..]]
+++^?
shoeSlopeCubes
shoeSlopeCubes =
shoeFlatCubes
++++
shoeSlopeTopFaces
shoeSlopeTopFacesDebug =
[CubeName "shoeSlopeTopFaces" | x <- [1..]]
+++^?
shoeSlopeTopFaces
shoeSlopeTopFaces =
--front line
map (extractFrontTopLine) (createTopFaces shoeSlopeOrigin braceRadius angles flatXSlope (NegYSlope 20))
++++
--back line
map (backTopLineFromFrontTopLine . extractFrontTopLine) (createTopFaces shoeSlopeOrigin treadInnerRadius angles flatXSlope (NegYSlope 20))
shoeFlatTriangles = [FacesBackBottomFront | x <- [1,2..36]]
+++^
shoeFlatCubes
shoeFlatCubesDebug =
[CubeName "shoeCubes" | x <- [1..]]
+++^?
shoeFlatTopFaces
shoeFlatCubes =
riserCubes
++++
shoeFlatTopFaces
shoeFlatTopFacesDebug =
[CubeName "shoeFlatTopFaces" | x <- [1..]]
+++^?
shoeFlatTopFaces
shoeFlatTopFaces =
--front line
map (extractFrontTopLine) (createTopFaces shoeFlatOrigin shoeRadius angles flatXSlope (PosYSlope 0))
++++
--back line
map (backTopLineFromFrontTopLine . extractFrontTopLine) (createTopFaces shoeFlatOrigin treadInnerRadius angles flatXSlope (PosYSlope 0))
{------------------------------------ riser -----------------------------------------------
Has shoe radius for top and bottom.
-}
riserOrigin = (Point{x_axis=0, y_axis=(0), z_axis=60})
riserTriangles = [FacesBackBottomFrontTop | x <- [1,2..36]]
+++^
riserCubes
riserCubesDebug =
[CubeName "riserCubes" | x <- [1..]]
+++^?
riserCubes
riserCubes =
adaptorCubes
++++
riserTopFaces
riserTopFacesDebug =
[CubeName "riserTopFaces" | x <- [1..]]
+++^?
riserTopFaces
riserTopFaces =
--front line
map (extractFrontTopLine) (createTopFaces riserOrigin treadRadius angles flatXSlope (PosYSlope 0))
++++
--back line
map (backTopLineFromFrontTopLine . extractFrontTopLine) (createTopFaces riserOrigin treadInnerRadius angles flatXSlope (PosYSlope 0))
{------------------------------------------------ adaptor: tread to riser adaptor layer -------------------------------------------
Adapts from the tread radius to the shoe radius.
Uses standare half-tread radius for inner key.
-}
adaptorOrigin = (Point{x_axis=0, y_axis=(0), z_axis=30})
adaptorTriangles = [FacesBackBottomFrontTop | x <- [1,2..36]]
+++^
adaptorCubes
adaptorCubesDebug =
[CubeName "adaptorCubes" | x <- [1..]]
+++^?
adaptorCubes
adaptorCubes =
treadCubes
++++
adaptorTopFaces
adaptorTopFacesDebug =
[CubeName "adaptorTopFaces" | x <- [1..]]
+++^?
adaptorTopFaces
adaptorTopFaces =
--front line
map (extractFrontTopLine) (createTopFaces adaptorOrigin braceRadius angles flatXSlope (PosYSlope 0))
++++
--back line
map (backTopLineFromFrontTopLine . extractFrontTopLine) (createTopFaces adaptorOrigin treadInnerRadius angles flatXSlope (PosYSlope 0))
{----------------------------------------------- tread layer------------------------------------------------
It is symmetrical, so use half radius.
Goes from tread radius to shoe radius
-}
treadHalfRadius =
[
Radius 38,--0
Radius 38,--1
Radius 38,--2
Radius 38.5,--3
Radius 38,--4
Radius 37,--5
Radius 35.5,--6
Radius 34,--7
Radius 34,--8
Radius 33.5,--9
Radius 34,--10
Radius 36,--11
Radius 38.5,--12
Radius 41,--13
Radius 35,--14
Radius 30.5,--15
Radius 28,--16
Radius 26--17
]
topTreadOrigin = (Point{x_axis=0, y_axis=(0), z_axis=15})
btmTreadOrigin = (Point{x_axis=0, y_axis=0, z_axis=0})
--the center val is 180 deg
--It is symmetrical, so can be mirrored.
treadRadius = concat [treadHalfRadius, [Radius 26], reverse treadHalfRadius]
treadInnerRadius = map (\(Radius x) -> (Radius (x * 0.5))) treadRadius
treadTriangles = [FacesBackBottomFrontTop | x <- [1,2..36]]
+++^
treadCubes
treadCubesDebug =
[CubeName "treadCubes" | x <- [1..]]
+++^?
treadCubes
treadCubes =
treadTopFaces
++++
treadBtmFaces
treadTopFacesDebug =
[CubeName "treadTopFaces" | x <- [1..]]
+++^?
treadTopFaces
treadTopFaces =
--front line
map (extractFrontTopLine) (createTopFaces topTreadOrigin treadRadius angles flatXSlope (PosYSlope 18))
++++
--back line
map (backTopLineFromFrontTopLine . extractFrontTopLine) (createTopFaces topTreadOrigin treadInnerRadius angles flatXSlope (PosYSlope 18))
treadBtmFacesDebug =
[CubeName "treadBtmFaces" | x <- [1..]]
+++^?
treadBtmFaces
treadBtmFaces =
--front line
map (extractBottomFrontLine) (createBottomFaces btmTreadOrigin treadRadius angles flatXSlope flatYSlope)
++++
--back line
map (backBottomLineFromBottomFrontLine . extractBottomFrontLine) (createBottomFaces btmTreadOrigin treadInnerRadius angles flatXSlope flatYSlope)
| heathweiss/Tricad | src/Examples/ShoeLift/SandalsHeel.hs | gpl-2.0 | 10,335 | 0 | 11 | 1,872 | 2,511 | 1,424 | 1,087 | 252 | 1 |
module Test(
zero, one, two, three, four, succ, mul, true, false, and, or, neg, isZero,
add, mult, par, prim, seg, phi, pred, y, r,
testArithmetic, testLogic, testPairs, testRecurs, testRecursDefs) where
import Lambda
zero = "(Ls.Lz.z)"
one = "(Ls.Lz.sz)"
two = "(Ls.Lz.s(sz))"
three = "(Ls.Lz.s(s(sz)))"
four = "(Ls.Lz.s(s(s(sz))))"
prox = "(Lw.Ly.Lx.y(wyx))"
mul = "(Lx.Ly.Lz.x(yz))"
add = "(Ln.Lm.(n" ++ prox ++ "m))"
mult n m = "(" ++ mul ++ n ++ m ++ ")"
true = "(Lx.Ly.x)"
false = "(Lx.Ly.y)"
land = "(Lx.Ly.xy"++false++")"
lor = "(Lx.Ly.x"++true++"y)"
neg = "(Lx.x"++false++true++")"
isZero = "(Lx.x"++false++neg++false++")"
par a b = "(Lz.z"++a++b++")"
prim par = "("++par++true++")"
seg par = "("++par++false++")"
phi = "(Lp.Lz.z("++prox++(prim "p")++")("++(prim "p")++"))"
prev = "(Ln.n"++phi++(par zero zero)++false++")"
y = "(Ly.(Lx.y(xx))(Lx.y(xx)))"
r = "(Lr.Ln."++isZero++"n"++zero++"(n"++prox++"(r("++prev++"n))))"
testArithmetic =
do
putStrLn ("0 = " ++ zero)
putStrLn ("1 = " ++ one)
putStrLn ("2 = " ++ two)
putStrLn ("3 = " ++ three)
putStrLn ("4 = " ++ four)
putStrLn ("SUCC = " ++ prox)
putStrLn "-----------------------"
putStrLn ("SUCC 3 = " ++ prox ++ three)
evalStr (prox ++ three)
putStrLn "-----------------------"
putStrLn ("ADD = " ++ add)
putStrLn ("ADD 2 3 = " ++ add ++ two ++ three)
evalStr (add ++ two ++ three)
putStrLn "-----------------------"
putStrLn ("MULT = " ++ mul)
putStrLn ("MULT 3 2 = " ++ (mult three two))
evalStr (mult three two)
putStrLn "-----------------------"
testLogic =
do
putStrLn ("TRUE = " ++ true)
putStrLn ("FALSE = " ++ false)
putStrLn ("NOT = " ++ neg)
putStrLn "-----------------------"
putStrLn ("NOT TRUE = " ++ neg ++ true)
evalStr (neg++true)
putStrLn "-----------------------"
putStrLn ("NOT FALSE = " ++ neg ++ false)
evalStr (neg++false)
putStrLn "-----------------------"
putStrLn ("AND = " ++ land)
putStrLn ("OR = " ++ lor)
putStrLn "-----------------------"
putStrLn ("AND TRUE TRUE = " ++ land ++ true ++ true)
evalStr (land ++ true ++ true)
putStrLn "-----------------------"
putStrLn ("AND TRUE FALSE = " ++ land ++ true ++ false)
evalStr (land ++ true ++ false)
putStrLn "-----------------------"
putStrLn ("AND FALSE TRUE = " ++ land ++ false ++ true)
evalStr (land ++ false ++ true)
putStrLn "-----------------------"
putStrLn ("AND FALSE FALSE = " ++ land ++ false ++ false)
evalStr (land ++ false ++ false)
putStrLn "-----------------------"
putStrLn ("OR TRUE TRUE = " ++ lor ++ true ++ true)
evalStr (lor ++ true ++ true)
putStrLn "-----------------------"
putStrLn ("OR TRUE FALSE = " ++ lor ++ true ++ false)
evalStr (lor ++ true ++ false)
putStrLn "-----------------------"
putStrLn ("OR FALSE TRUE = " ++ lor ++ false ++ true)
evalStr (lor ++ false ++ true)
putStrLn "-----------------------"
putStrLn ("OR FALSE FALSE = " ++ lor ++ false ++ false)
evalStr (lor ++ false ++ false)
putStrLn "-----------------------"
putStrLn ("0? = " ++ isZero)
putStrLn ("0? 0 = " ++ isZero ++ zero)
evalStr (isZero ++ zero)
putStrLn "-----------------------"
putStrLn ("0? 3 = " ++ isZero ++ three)
evalStr (isZero ++ three)
putStrLn "-----------------------"
testPairs =
do
putStrLn ("(a,b) = " ++ (par "a" "b"))
putStrLn ("FIRST (a,b) = (a,b)TRUE = " ++ (par "a" "b") ++ true)
evalStr ((par "a" "b") ++ true)
putStrLn "-----------------------"
putStrLn ("SECOND (a,b) = (a,b)FALSE = " ++ (par "a" "b") ++ false)
evalStr ((par "a" "b") ++ false)
putStrLn "-----------------------"
putStrLn ("PHI = " ++ phi)
putStrLn "PHI (n,k) = (n+1,n)"
putStrLn (" => PHI (3,1) = (4,3)")
putStrLn ("(3,1) = " ++ (par three one))
putStrLn ("(4,3) = " ++ (par four three))
putStrLn ("PHI (3,1) = " ++ phi ++ (par three one))
evalStr (phi ++ (par three one))
putStrLn "-----------------------"
putStrLn "PREV = (Ln.n PHI (0,0) FALSE)"
putStrLn ("PREV = " ++ prev)
putStrLn ("PREV 3 = " ++ prev ++ three)
evalStr (prev ++ three)
putStrLn "-----------------------"
testRecursDefs =
do
putStrLn ("Y = " ++ y)
putStrLn ("SUM n = if (?0 n) 0 else n + sum(n-1)")
putStrLn (" implemented with Y combinator")
putStrLn "R = (Lrn.0?n0(ADD n (r(PREV n))))"
putStrLn ("R = " ++ r)
putStrLn "SUM n = YR n"
putStrLn ("SUM 3 = " ++ y ++ r ++ three)
putStrLn "-----------------------"
testRecurs =
do
putStrLn ("SUM 3 = " ++ y ++ r ++ three)
evalStr (y ++ r ++ three)
putStrLn "-----------------------"
| josecastro/lambda | Test.hs | gpl-2.0 | 5,048 | 0 | 12 | 1,400 | 1,679 | 805 | 874 | 130 | 1 |
{-
Given: At most 10 DNA strings in FASTA format (of length at most 1 kbp each).
Return: The ID of the string having the highest GC-content,
followed by the GC-content of that string. Rosalind allows
for a default error of 0.001 in all decimal answers
unless otherwise stated;
please see the note on absolute error below.
-}
t = "AGC\nTATAG"
removeGC [] = []
removeGC (x:xs)
| x == 'G' || x == 'C' = [x] ++ removeGC xs
| otherwise = removeGC xs
contentGC s = (fromIntegral $ length $ removeGC s) / (fromIntegral $ length s)
formTuple [] = []
formTuple (x:y:xs) = [(x,contentGC y)] ++ formTuple (xs)
maxInTuple [] (curMaxStr, curMax) = (curMaxStr, curMax)
maxInTuple ((x,y):xs) (curMaxStr, curMax)
| y > curMax = maxInTuple xs (x,y)
| otherwise = maxInTuple xs (curMaxStr, curMax)
concGC [] = []
concGC (x:xs) = concTwo x $ head xs ++ concGC xs
concTwo s1 s2
| s1!!0 /= '>' && s2!!0 /= '>' = s1 ++ s2
| otherwise = []
splitStr [] = ""
splitStr [x] = [x]
splitStr [x,y] = [x,y]
splitStr (x:y:z:xs)
| x `elem` ['C','G','T','A'] && y == '\n' && z `elem` ['C','G','T','A'] = splitStr (x:z:xs)
| otherwise = [x] ++ splitStr (y:z:xs)
main = do
input <- readFile "rosalind_gc.txt"
let
s = splitStr input
tmp = formTuple $ lines s
(tmp', max) = maxInTuple tmp ("",0)
putStrLn $ [x | x<-tmp', x /= '>']
print $ 100 * max
| forgit/Rosalind | gc.hs | gpl-2.0 | 1,458 | 0 | 13 | 391 | 616 | 318 | 298 | 31 | 1 |
-- P01 Find the last element of a list.
-- Predefined function
f0 :: [a] -> a
f0 = last
-- Recursion
f1 :: [a] -> a
f1 [] = error "empty list"
f1 [x] = x
f1 (_:xs) = f1 xs
-- Function application
f2 :: [a] -> a
f2 xs = head $ reverse xs
-- Function composition
f3 :: [a] -> a
f3 = head . reverse
-- Folding
f4 :: [a] -> a
f4 = foldl (curry snd) (error "empty list")
f5 :: [a] -> a
f5 = foldr1 (flip const)
f6 :: [a] -> a
f6 = foldr1 (const id)
f7 :: [a] -> a
f7 = foldr1 (\_ y -> y) | pavelfatin/ninety-nine | haskell/p01.hs | gpl-3.0 | 490 | 0 | 7 | 124 | 242 | 133 | 109 | 18 | 1 |
lambda . bimap destroy id . split = id
rho . bimap id destroy . split = id | hmemcpy/milewski-ctfp-pdf | src/content/3.7/code/haskell/snippet28.hs | gpl-3.0 | 74 | 2 | 7 | 17 | 43 | 18 | 25 | -1 | -1 |
module HsPredictor.CSV.Hash where
-- standard
import Data.ByteString.Lazy.Char8 (pack)
-- 3rd party
import Data.Digest.Pure.MD5
{-| Genereates MD5 hash of file contents -}
genHash :: String -- ^ file contents
-> IO String
genHash fileContent = do
let md5Digest = md5 $ pack fileContent
return $ show md5Digest
{-| Checks if old hash == new hash -}
checkHash :: String -> String -> Bool
checkHash hashFile hashDB = hashFile /= hashDB
| jacekm-git/HsPredictor | library/HsPredictor/CSV/Hash.hs | gpl-3.0 | 471 | 0 | 11 | 107 | 104 | 58 | 46 | 10 | 1 |
<?xml version='1.0' encoding='ISO-8859-1'?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0">
<title>LA-iMageS Help</title>
<maps>
<homeID>top</homeID>
<mapref location="map.xml"/>
</maps>
<view mergetype="javax.help.AppendMerge">
<name>TOC</name>
<label>Table of Contents</label>
<type>javax.help.TOCView</type>
<data>toc.xml</data>
</view>
<view xml:lang="en">
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<presentation default="true">
<name>main window</name>
<size width="1200" height="800" />
<location x="100" y="100"/>
<title>LA-iMageS Help</title>
<toolbar>
<helpaction>javax.help.BackAction</helpaction>
<helpaction>javax.help.ForwardAction</helpaction>
<helpaction image="homeicon">javax.help.HomeAction</helpaction>
</toolbar>
</presentation>
</helpset> | sing-group/la-images | la-images-aibench/src/main/resources/help/helpset.hs | gpl-3.0 | 1,146 | 89 | 68 | 199 | 479 | 237 | 242 | -1 | -1 |
module System.LogFS.Internal (
runLogFS,
Packet(..)
) where
import qualified Data.ByteString.Char8 as B
(ByteString, pack, length, empty)
import qualified Control.Exception as E
(Exception)
import System.Posix.Files
(ownerWriteMode, ownerReadMode, ownerExecuteMode, groupWriteMode, groupReadMode, groupExecuteMode, otherWriteMode, otherReadMode, otherExecuteMode)
import System.Posix.Types
(FileOffset, ByteCount, FileMode)
import Foreign.C.Error
(Errno)
import System.Posix.IO
(OpenFileFlags)
import System.Fuse
(FileStat(..), EntryType(..), FuseContext(..), FuseOperations(..), FileSystemStats(..), SyncType, OpenMode, defaultFuseOps, fuseCtxUserID, fuseCtxGroupID, fuseRun, unionFileModes, eOK, getFuseContext, defaultFuseOps)
data Packet = Packet {
_path :: FilePath,
_payload :: B.ByteString
} deriving (Show)
type HT = ()
logString :: B.ByteString
logString = B.pack ""
runLogFS :: E.Exception e => String -> [String] -> (Packet -> IO ()) -> (String -> Bool) -> (e -> IO Errno) -> IO ()
runLogFS prog argv f dirFilter handler = do
let
logFSOps :: FuseOperations HT
logFSOps =
defaultFuseOps {
fuseGetFileSystemStats = logGetFileSystemStats,
fuseGetFileStat = logGetFileStat,
fuseAccess = logAccess,
fuseOpen = logOpen,
fuseRead = logRead,
fuseWrite = logWrite,
fuseFlush = logFlush,
fuseRelease = logRelease,
fuseSynchronizeFile = logSynchronizeFile,
fuseCreateDirectory = logCreateDirectory,
fuseOpenDirectory = logOpenDirectory,
fuseReadDirectory = logReadDirectory,
fuseSetFileSize = logSetFileSize
}
logGetFileSystemStats :: String -> IO (Either Errno FileSystemStats)
logGetFileSystemStats _ = do
return $ Right $ FileSystemStats
{ fsStatBlockSize = 512
, fsStatBlockCount = 1
, fsStatBlocksFree = 1
, fsStatBlocksAvailable = 1
, fsStatFileCount = 5
, fsStatFilesFree = 10
, fsStatMaxNameLength = 255
}
logGetFileStat :: FilePath -> IO (Either Errno FileStat)
logGetFileStat dir = do
ctx <- getFuseContext
case (dirFilter dir) of
True -> return $ Right $ dirStat ctx
_ -> return $ Right $ fileStat ctx
logAccess :: FilePath -> Int -> IO Errno
logAccess _ _ = return eOK
logCreateDirectory :: FilePath -> FileMode -> IO Errno
logCreateDirectory _ _ = return eOK
logOpenDirectory :: FilePath -> IO Errno
logOpenDirectory _ = return eOK
logReadDirectory :: FilePath -> IO (Either Errno [(FilePath, FileStat)])
logReadDirectory _ = do
ctx <- getFuseContext
return $ Right
[(".", dirStat ctx)
,("..", dirStat ctx)
]
logSetFileSize :: FilePath -> FileOffset -> IO Errno
logSetFileSize _ _ = return eOK
logOpen :: FilePath -> OpenMode -> OpenFileFlags -> IO (Either Errno HT)
logOpen _ _ _ = return (Right ())
logRead :: FilePath -> HT -> ByteCount -> FileOffset -> IO (Either Errno B.ByteString)
logRead _ _ _ _ = return $ Right $ B.empty
logWrite :: FilePath -> HT -> B.ByteString -> FileOffset -> IO (Either Errno ByteCount)
logWrite path _ byteString _ = do
f $ Packet { _path = path, _payload = byteString }
return $ Right $ fromIntegral $ B.length byteString
logFlush :: FilePath -> HT -> IO Errno
logFlush _ _ = return eOK
logRelease :: FilePath -> HT -> IO ()
logRelease _ _ = return ()
logSynchronizeFile :: FilePath -> SyncType -> IO Errno
logSynchronizeFile _ _ = return eOK
fuseRun prog argv logFSOps (\e -> print e >> handler e)
dirStat :: FuseContext -> FileStat
dirStat ctx = FileStat {
statEntryType = Directory
, statFileMode = foldr1 unionFileModes
[ ownerReadMode
, ownerExecuteMode
, groupReadMode
, groupExecuteMode
, otherReadMode
, otherExecuteMode
, ownerWriteMode
, groupWriteMode
, otherWriteMode
]
, statLinkCount = 2
, statFileOwner = fuseCtxUserID ctx
, statFileGroup = fuseCtxGroupID ctx
, statSpecialDeviceID = 0
, statFileSize = 4096
, statBlocks = 1
, statAccessTime = 0
, statModificationTime = 0
, statStatusChangeTime = 0
}
fileStat :: FuseContext -> FileStat
fileStat ctx = FileStat
{ statEntryType = RegularFile
, statFileMode = foldr1 unionFileModes
[ ownerReadMode
, groupReadMode
, otherReadMode
, ownerWriteMode
, groupWriteMode
, otherWriteMode
]
, statLinkCount = 1
, statFileOwner = fuseCtxUserID ctx
, statFileGroup = fuseCtxGroupID ctx
, statSpecialDeviceID = 0
, statFileSize = fromIntegral $ B.length logString
, statBlocks = 1
, statAccessTime = 0
, statModificationTime = 0
, statStatusChangeTime = 0
}
| adarqui/LogFS | src/System/LogFS/Internal.hs | gpl-3.0 | 4,987 | 0 | 17 | 1,344 | 1,358 | 760 | 598 | 129 | 2 |
module UCeuler3
( solve
) where
import Primes
-- https://en.wikipedia.org/wiki/Prime_factor#Perfect_squares
-- https://en.wikipedia.org/wiki/Square_number
--
-- fromIntegral:
-- For example, given an Int value n, one does not simply take
-- its square root by typing sqrt n, since sqrt can only be applied
-- to Floating-point numbers. Instead, one must write
-- sqrt (fromIntegral n) to explicitly convert n to a floating-point number.
--
-- '.'
-- In Haskell, '.' works like the UNIX |, but "backwards".
perfect_squares n = takeWhile (< i) primes
where i = round . sqrt . fromIntegral $ n
-- Anonymus function that filter if the reminder
-- for n divided by n is zero, i.e. evenly divided by perfect_squares input
-- (Perfect square numbers)
prime_factors n = filter (\x -> ((n `mod` x) == 0)) (perfect_squares n)
largest_prime_factor :: Integer -> Integer
largest_prime_factor n = maximum (prime_factors n)
solve :: Int -> [Char]
solve ucid = "Solved UC "++show(ucid)++": Result is: "++show(largest_prime_factor 600851475143) | tedhag/teuler | haskell/rest-euler/src/UCeuler3.hs | gpl-3.0 | 1,040 | 0 | 11 | 169 | 178 | 102 | 76 | 10 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTubeReporting.Media.Download
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Method for media download. Download is supported on the URI
-- \`\/v1\/media\/{+name}?alt=media\`.
--
-- /See:/ <https://developers.google.com/youtube/reporting/v1/reports/ YouTube Reporting API Reference> for @youtubereporting.media.download@.
module Network.Google.Resource.YouTubeReporting.Media.Download
(
-- * REST Resource
MediaDownloadResource
-- * Creating a Request
, mediaDownload
, MediaDownload'
-- * Request Lenses
, mdXgafv
, mdUploadProtocol
, mdResourceName
, mdPp
, mdAccessToken
, mdUploadType
, mdBearerToken
, mdCallback
) where
import Network.Google.Prelude
import Network.Google.YouTubeReporting.Types
-- | A resource alias for @youtubereporting.media.download@ method which the
-- 'MediaDownload'' request conforms to.
type MediaDownloadResource =
"v1" :>
"media" :>
Capture "resourceName" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Media
:<|>
"v1" :>
"media" :>
Capture "resourceName" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltMedia :>
Get '[OctetStream] Stream
-- | Method for media download. Download is supported on the URI
-- \`\/v1\/media\/{+name}?alt=media\`.
--
-- /See:/ 'mediaDownload' smart constructor.
data MediaDownload' = MediaDownload''
{ _mdXgafv :: !(Maybe Xgafv)
, _mdUploadProtocol :: !(Maybe Text)
, _mdResourceName :: !Text
, _mdPp :: !Bool
, _mdAccessToken :: !(Maybe Text)
, _mdUploadType :: !(Maybe Text)
, _mdBearerToken :: !(Maybe Text)
, _mdCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'MediaDownload' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mdXgafv'
--
-- * 'mdUploadProtocol'
--
-- * 'mdResourceName'
--
-- * 'mdPp'
--
-- * 'mdAccessToken'
--
-- * 'mdUploadType'
--
-- * 'mdBearerToken'
--
-- * 'mdCallback'
mediaDownload
:: Text -- ^ 'mdResourceName'
-> MediaDownload'
mediaDownload pMdResourceName_ =
MediaDownload''
{ _mdXgafv = Nothing
, _mdUploadProtocol = Nothing
, _mdResourceName = pMdResourceName_
, _mdPp = True
, _mdAccessToken = Nothing
, _mdUploadType = Nothing
, _mdBearerToken = Nothing
, _mdCallback = Nothing
}
-- | V1 error format.
mdXgafv :: Lens' MediaDownload' (Maybe Xgafv)
mdXgafv = lens _mdXgafv (\ s a -> s{_mdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
mdUploadProtocol :: Lens' MediaDownload' (Maybe Text)
mdUploadProtocol
= lens _mdUploadProtocol
(\ s a -> s{_mdUploadProtocol = a})
-- | Name of the media that is being downloaded. See
-- ReadRequest.resource_name.
mdResourceName :: Lens' MediaDownload' Text
mdResourceName
= lens _mdResourceName
(\ s a -> s{_mdResourceName = a})
-- | Pretty-print response.
mdPp :: Lens' MediaDownload' Bool
mdPp = lens _mdPp (\ s a -> s{_mdPp = a})
-- | OAuth access token.
mdAccessToken :: Lens' MediaDownload' (Maybe Text)
mdAccessToken
= lens _mdAccessToken
(\ s a -> s{_mdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
mdUploadType :: Lens' MediaDownload' (Maybe Text)
mdUploadType
= lens _mdUploadType (\ s a -> s{_mdUploadType = a})
-- | OAuth bearer token.
mdBearerToken :: Lens' MediaDownload' (Maybe Text)
mdBearerToken
= lens _mdBearerToken
(\ s a -> s{_mdBearerToken = a})
-- | JSONP
mdCallback :: Lens' MediaDownload' (Maybe Text)
mdCallback
= lens _mdCallback (\ s a -> s{_mdCallback = a})
instance GoogleRequest MediaDownload' where
type Rs MediaDownload' = Media
type Scopes MediaDownload' =
'["https://www.googleapis.com/auth/yt-analytics-monetary.readonly",
"https://www.googleapis.com/auth/yt-analytics.readonly"]
requestClient MediaDownload''{..}
= go _mdResourceName _mdXgafv _mdUploadProtocol
(Just _mdPp)
_mdAccessToken
_mdUploadType
_mdBearerToken
_mdCallback
(Just AltJSON)
youTubeReportingService
where go :<|> _
= buildClient (Proxy :: Proxy MediaDownloadResource)
mempty
instance GoogleRequest (MediaDownload MediaDownload')
where
type Rs (MediaDownload MediaDownload') = Stream
type Scopes (MediaDownload MediaDownload') =
Scopes MediaDownload'
requestClient (MediaDownload MediaDownload''{..})
= go _mdResourceName _mdXgafv _mdUploadProtocol
(Just _mdPp)
_mdAccessToken
_mdUploadType
_mdBearerToken
_mdCallback
(Just AltMedia)
youTubeReportingService
where _ :<|> go
= buildClient (Proxy :: Proxy MediaDownloadResource)
mempty
| rueshyna/gogol | gogol-youtube-reporting/gen/Network/Google/Resource/YouTubeReporting/Media/Download.hs | mpl-2.0 | 6,564 | 0 | 30 | 1,823 | 1,092 | 611 | 481 | 150 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Glacier.DescribeJob
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This operation returns information about a job you previously initiated,
-- including the job initiation date, the user who initiated the job, the job
-- status code/message and the Amazon SNS topic to notify after Amazon Glacier
-- completes the job. For more information about initiating a job, see 'InitiateJob'.
--
-- This operation enables you to check the status of your job. However, it is
-- strongly recommended that you set up an Amazon SNS topic and specify it in
-- your initiate job request so that Amazon Glacier can notify the topic after
-- it completes the job.
--
-- A job ID will not expire for at least 24 hours after Amazon Glacier
-- completes the job.
--
-- An AWS account has full permission to perform all operations (actions).
-- However, AWS Identity and Access Management (IAM) users don't have any
-- permissions by default. You must grant them explicit permission to perform
-- specific actions. For more information, see <http://docs.aws.amazon.com/amazonglacier/latest/dev/using-iam-with-amazon-glacier.html Access Control Using AWS Identityand Access Management (IAM)>.
--
-- For information about the underlying REST API, go to <http://docs.aws.amazon.com/amazonglacier/latest/dev/api-describe-job-get.html Working with Archivesin Amazon Glacier> in the /Amazon Glacier Developer Guide/.
--
-- <http://docs.aws.amazon.com/amazonglacier/latest/dev/api-DescribeJob.html>
module Network.AWS.Glacier.DescribeJob
(
-- * Request
DescribeJob
-- ** Request constructor
, describeJob
-- ** Request lenses
, djAccountId
, djJobId
, djVaultName
-- * Response
, DescribeJobResponse
-- ** Response constructor
, describeJobResponse
-- ** Response lenses
, djrAction
, djrArchiveId
, djrArchiveSHA256TreeHash
, djrArchiveSizeInBytes
, djrCompleted
, djrCompletionDate
, djrCreationDate
, djrInventoryRetrievalParameters
, djrInventorySizeInBytes
, djrJobDescription
, djrJobId
, djrRetrievalByteRange
, djrSHA256TreeHash
, djrSNSTopic
, djrStatusCode
, djrStatusMessage
, djrVaultARN
) where
import Network.AWS.Prelude
import Network.AWS.Request.RestJSON
import Network.AWS.Glacier.Types
import qualified GHC.Exts
data DescribeJob = DescribeJob
{ _djAccountId :: Text
, _djJobId :: Text
, _djVaultName :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'DescribeJob' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'djAccountId' @::@ 'Text'
--
-- * 'djJobId' @::@ 'Text'
--
-- * 'djVaultName' @::@ 'Text'
--
describeJob :: Text -- ^ 'djAccountId'
-> Text -- ^ 'djVaultName'
-> Text -- ^ 'djJobId'
-> DescribeJob
describeJob p1 p2 p3 = DescribeJob
{ _djAccountId = p1
, _djVaultName = p2
, _djJobId = p3
}
-- | The 'AccountId' is the AWS Account ID. You can specify either the AWS Account
-- ID or optionally a '-', in which case Amazon Glacier uses the AWS Account ID
-- associated with the credentials used to sign the request. If you specify your
-- Account ID, do not include hyphens in it.
djAccountId :: Lens' DescribeJob Text
djAccountId = lens _djAccountId (\s a -> s { _djAccountId = a })
-- | The ID of the job to describe.
djJobId :: Lens' DescribeJob Text
djJobId = lens _djJobId (\s a -> s { _djJobId = a })
-- | The name of the vault.
djVaultName :: Lens' DescribeJob Text
djVaultName = lens _djVaultName (\s a -> s { _djVaultName = a })
data DescribeJobResponse = DescribeJobResponse
{ _djrAction :: Maybe ActionCode
, _djrArchiveId :: Maybe Text
, _djrArchiveSHA256TreeHash :: Maybe Text
, _djrArchiveSizeInBytes :: Maybe Integer
, _djrCompleted :: Maybe Bool
, _djrCompletionDate :: Maybe Text
, _djrCreationDate :: Maybe Text
, _djrInventoryRetrievalParameters :: Maybe InventoryRetrievalJobDescription
, _djrInventorySizeInBytes :: Maybe Integer
, _djrJobDescription :: Maybe Text
, _djrJobId :: Maybe Text
, _djrRetrievalByteRange :: Maybe Text
, _djrSHA256TreeHash :: Maybe Text
, _djrSNSTopic :: Maybe Text
, _djrStatusCode :: Maybe StatusCode
, _djrStatusMessage :: Maybe Text
, _djrVaultARN :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'DescribeJobResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'djrAction' @::@ 'Maybe' 'ActionCode'
--
-- * 'djrArchiveId' @::@ 'Maybe' 'Text'
--
-- * 'djrArchiveSHA256TreeHash' @::@ 'Maybe' 'Text'
--
-- * 'djrArchiveSizeInBytes' @::@ 'Maybe' 'Integer'
--
-- * 'djrCompleted' @::@ 'Maybe' 'Bool'
--
-- * 'djrCompletionDate' @::@ 'Maybe' 'Text'
--
-- * 'djrCreationDate' @::@ 'Maybe' 'Text'
--
-- * 'djrInventoryRetrievalParameters' @::@ 'Maybe' 'InventoryRetrievalJobDescription'
--
-- * 'djrInventorySizeInBytes' @::@ 'Maybe' 'Integer'
--
-- * 'djrJobDescription' @::@ 'Maybe' 'Text'
--
-- * 'djrJobId' @::@ 'Maybe' 'Text'
--
-- * 'djrRetrievalByteRange' @::@ 'Maybe' 'Text'
--
-- * 'djrSHA256TreeHash' @::@ 'Maybe' 'Text'
--
-- * 'djrSNSTopic' @::@ 'Maybe' 'Text'
--
-- * 'djrStatusCode' @::@ 'Maybe' 'StatusCode'
--
-- * 'djrStatusMessage' @::@ 'Maybe' 'Text'
--
-- * 'djrVaultARN' @::@ 'Maybe' 'Text'
--
describeJobResponse :: DescribeJobResponse
describeJobResponse = DescribeJobResponse
{ _djrJobId = Nothing
, _djrJobDescription = Nothing
, _djrAction = Nothing
, _djrArchiveId = Nothing
, _djrVaultARN = Nothing
, _djrCreationDate = Nothing
, _djrCompleted = Nothing
, _djrStatusCode = Nothing
, _djrStatusMessage = Nothing
, _djrArchiveSizeInBytes = Nothing
, _djrInventorySizeInBytes = Nothing
, _djrSNSTopic = Nothing
, _djrCompletionDate = Nothing
, _djrSHA256TreeHash = Nothing
, _djrArchiveSHA256TreeHash = Nothing
, _djrRetrievalByteRange = Nothing
, _djrInventoryRetrievalParameters = Nothing
}
-- | The job type. It is either ArchiveRetrieval or InventoryRetrieval.
djrAction :: Lens' DescribeJobResponse (Maybe ActionCode)
djrAction = lens _djrAction (\s a -> s { _djrAction = a })
-- | For an ArchiveRetrieval job, this is the archive ID requested for download.
-- Otherwise, this field is null.
djrArchiveId :: Lens' DescribeJobResponse (Maybe Text)
djrArchiveId = lens _djrArchiveId (\s a -> s { _djrArchiveId = a })
-- | The SHA256 tree hash of the entire archive for an archive retrieval. For
-- inventory retrieval jobs, this field is null.
djrArchiveSHA256TreeHash :: Lens' DescribeJobResponse (Maybe Text)
djrArchiveSHA256TreeHash =
lens _djrArchiveSHA256TreeHash
(\s a -> s { _djrArchiveSHA256TreeHash = a })
-- | For an ArchiveRetrieval job, this is the size in bytes of the archive being
-- requested for download. For the InventoryRetrieval job, the value is null.
djrArchiveSizeInBytes :: Lens' DescribeJobResponse (Maybe Integer)
djrArchiveSizeInBytes =
lens _djrArchiveSizeInBytes (\s a -> s { _djrArchiveSizeInBytes = a })
-- | The job status. When a job is completed, you get the job's output.
djrCompleted :: Lens' DescribeJobResponse (Maybe Bool)
djrCompleted = lens _djrCompleted (\s a -> s { _djrCompleted = a })
-- | The UTC time that the archive retrieval request completed. While the job is
-- in progress, the value will be null.
djrCompletionDate :: Lens' DescribeJobResponse (Maybe Text)
djrCompletionDate =
lens _djrCompletionDate (\s a -> s { _djrCompletionDate = a })
-- | The UTC date when the job was created. A string representation of ISO 8601
-- date format, for example, "2012-03-20T17:03:43.221Z".
djrCreationDate :: Lens' DescribeJobResponse (Maybe Text)
djrCreationDate = lens _djrCreationDate (\s a -> s { _djrCreationDate = a })
-- | Parameters used for range inventory retrieval.
djrInventoryRetrievalParameters :: Lens' DescribeJobResponse (Maybe InventoryRetrievalJobDescription)
djrInventoryRetrievalParameters =
lens _djrInventoryRetrievalParameters
(\s a -> s { _djrInventoryRetrievalParameters = a })
-- | For an InventoryRetrieval job, this is the size in bytes of the inventory
-- requested for download. For the ArchiveRetrieval job, the value is null.
djrInventorySizeInBytes :: Lens' DescribeJobResponse (Maybe Integer)
djrInventorySizeInBytes =
lens _djrInventorySizeInBytes (\s a -> s { _djrInventorySizeInBytes = a })
-- | The job description you provided when you initiated the job.
djrJobDescription :: Lens' DescribeJobResponse (Maybe Text)
djrJobDescription =
lens _djrJobDescription (\s a -> s { _djrJobDescription = a })
-- | An opaque string that identifies an Amazon Glacier job.
djrJobId :: Lens' DescribeJobResponse (Maybe Text)
djrJobId = lens _djrJobId (\s a -> s { _djrJobId = a })
-- | The retrieved byte range for archive retrieval jobs in the form "/StartByteValue/-/EndByteValue/" If no range was specified in the archive retrieval, then the
-- whole archive is retrieved and /StartByteValue/ equals 0 and /EndByteValue/
-- equals the size of the archive minus 1. For inventory retrieval jobs this
-- field is null.
djrRetrievalByteRange :: Lens' DescribeJobResponse (Maybe Text)
djrRetrievalByteRange =
lens _djrRetrievalByteRange (\s a -> s { _djrRetrievalByteRange = a })
-- | For an ArchiveRetrieval job, it is the checksum of the archive. Otherwise,
-- the value is null.
--
-- The SHA256 tree hash value for the requested range of an archive. If the
-- Initiate a Job request for an archive specified a tree-hash aligned range,
-- then this field returns a value.
--
-- For the specific case when the whole archive is retrieved, this value is
-- the same as the ArchiveSHA256TreeHash value.
--
-- This field is null in the following situations: Archive retrieval jobs
-- that specify a range that is not tree-hash aligned.
--
-- Archival jobs that specify a range that is equal to the whole archive and
-- the job status is InProgress.
--
-- Inventory jobs.
--
--
djrSHA256TreeHash :: Lens' DescribeJobResponse (Maybe Text)
djrSHA256TreeHash =
lens _djrSHA256TreeHash (\s a -> s { _djrSHA256TreeHash = a })
-- | An Amazon Simple Notification Service (Amazon SNS) topic that receives
-- notification.
djrSNSTopic :: Lens' DescribeJobResponse (Maybe Text)
djrSNSTopic = lens _djrSNSTopic (\s a -> s { _djrSNSTopic = a })
-- | The status code can be InProgress, Succeeded, or Failed, and indicates the
-- status of the job.
djrStatusCode :: Lens' DescribeJobResponse (Maybe StatusCode)
djrStatusCode = lens _djrStatusCode (\s a -> s { _djrStatusCode = a })
-- | A friendly message that describes the job status.
djrStatusMessage :: Lens' DescribeJobResponse (Maybe Text)
djrStatusMessage = lens _djrStatusMessage (\s a -> s { _djrStatusMessage = a })
-- | The Amazon Resource Name (ARN) of the vault from which the archive retrieval
-- was requested.
djrVaultARN :: Lens' DescribeJobResponse (Maybe Text)
djrVaultARN = lens _djrVaultARN (\s a -> s { _djrVaultARN = a })
instance ToPath DescribeJob where
toPath DescribeJob{..} = mconcat
[ "/"
, toText _djAccountId
, "/vaults/"
, toText _djVaultName
, "/jobs/"
, toText _djJobId
]
instance ToQuery DescribeJob where
toQuery = const mempty
instance ToHeaders DescribeJob
instance ToJSON DescribeJob where
toJSON = const (toJSON Empty)
instance AWSRequest DescribeJob where
type Sv DescribeJob = Glacier
type Rs DescribeJob = DescribeJobResponse
request = get
response = jsonResponse
instance FromJSON DescribeJobResponse where
parseJSON = withObject "DescribeJobResponse" $ \o -> DescribeJobResponse
<$> o .:? "Action"
<*> o .:? "ArchiveId"
<*> o .:? "ArchiveSHA256TreeHash"
<*> o .:? "ArchiveSizeInBytes"
<*> o .:? "Completed"
<*> o .:? "CompletionDate"
<*> o .:? "CreationDate"
<*> o .:? "InventoryRetrievalParameters"
<*> o .:? "InventorySizeInBytes"
<*> o .:? "JobDescription"
<*> o .:? "JobId"
<*> o .:? "RetrievalByteRange"
<*> o .:? "SHA256TreeHash"
<*> o .:? "SNSTopic"
<*> o .:? "StatusCode"
<*> o .:? "StatusMessage"
<*> o .:? "VaultARN"
| dysinger/amazonka | amazonka-glacier/gen/Network/AWS/Glacier/DescribeJob.hs | mpl-2.0 | 13,737 | 0 | 41 | 3,153 | 1,802 | 1,074 | 728 | 178 | 1 |
{-# LANGUAGE PackageImports #-}
import "yacs" Application (develMain)
import Prelude (IO)
main :: IO ()
main = develMain
| nek0/yacs | app/devel.hs | agpl-3.0 | 122 | 0 | 6 | 19 | 34 | 20 | 14 | 5 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Test.Arbitrary () where
import Control.Applicative ((<$>), (<*>))
import Data.Aeson
import Data.CaseInsensitive
import Data.Scientific
import Data.String.Conversions
import Test.QuickCheck
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Vector as Vector
-- | 25 most common adjectives according to the Oxford English
-- Dictionary.
readableStrings :: [String]
readableStrings =
"good" : "new" : "first" : "last" : "long" : "great" : "little" :
"own" : "other" : "old" : "right" : "big" : "high" : "different" :
"small" : "large" : "next" : "early" : "young" : "important" :
"few" : "public" : "bad" : "same" : "able" :
[]
instance Arbitrary ST where
arbitrary = cs <$> elements readableStrings
instance Arbitrary (CI ST) where
arbitrary = mk <$> arbitrary
instance Arbitrary Value where
arbitrary = sized $ \ size -> let size' = size `div` 2 in oneof
[ return Null
, String . cs <$> elements readableStrings
, Number <$> (scientific <$> arbitrary <*> oneof [return 0, (`mod` 12) <$> arbitrary])
, Bool <$> arbitrary
, Array . Vector.fromList <$> resize size' arbitrary
, object <$> resize size' arbitrary
]
shrink (Array v) = Array . Vector.fromList <$> shrink (Vector.toList v)
shrink (Object m) = Object . HashMap.fromList <$> shrink (HashMap.toList m)
shrink _ = []
| zerobuzz/configifier | tests/Test/Arbitrary.hs | agpl-3.0 | 1,598 | 0 | 29 | 347 | 453 | 248 | 205 | 36 | 1 |
module TesML.Data.Path (Path
, PathToken (..)
, PathStack
, rootPathStack
, pushPathStack)
where
import TesML.Data.Types
type Path = String
data PathToken = PathToken String Size
deriving Show
type PathStack = [PathToken]
pathSeparator :: String
pathSeparator = "/"
rootPathStack :: PathStack
rootPathStack = [PathToken pathSeparator (maxBound :: Size)]
pushPathStack :: (String, Size, Size) -> PathStack -> Either String PathStack
pushPathStack (name, ownSz, fullSz) [] = Right $ (PathToken (pathSeparator ++ name ++ pathSeparator) (fullSz - ownSz)) : rootPathStack
pushPathStack tk@(name, ownSz, fullSz) stack@((PathToken parentPath parentSz) : _)
| ownSz > fullSz = Left $ "Trying to push invalid token to PathStack. OwnSize > FullSize. " ++ msgEnd
| parentSz < ownSz = Left $ "PathStack underrun. " ++ msgEnd
| parentSz < fullSz = Left $ "PathStack will be underrun when processing children. Child entries need more\
\ space than parent has. " ++ msgEnd
-- last child
| parentSz == ownSz = Right filteredCutStack
-- childless, has more siblings
| parentSz > ownSz && ownSz == fullSz = Right cutStack
-- has children
| parentSz > ownSz && ownSz < fullSz = Right $ (PathToken (parentPath ++ name ++ pathSeparator) (fullSz - ownSz)) : cutStack
| otherwise = Left $ "Unhandled case when pushing PathStack." ++ msgEnd
where cutStack = fmap (\(PathToken p' s') -> PathToken p' (s' - ownSz)) stack
filteredCutStack = filter (\(PathToken _ s') -> s' > 0) cutStack
msgEnd = "Token " ++ show tk ++ ", pushing to: " ++ show stack
| Kromgart/tesML | lib/TesML/Data/Path.hs | agpl-3.0 | 1,710 | 0 | 12 | 434 | 487 | 262 | 225 | 28 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE OverloadedStrings #-}
module Yesod.Content
( -- * Content
Content (..)
, emptyContent
, ToContent (..)
-- * Mime types
-- ** Data type
, ContentType
, typeHtml
, typePlain
, typeJson
, typeXml
, typeAtom
, typeRss
, typeJpeg
, typePng
, typeGif
, typeSvg
, typeJavascript
, typeCss
, typeFlv
, typeOgv
, typeOctet
-- * Utilities
, simpleContentType
-- * Representations
, ChooseRep
, HasReps (..)
, defChooseRep
-- ** Specific content types
, RepHtml (..)
, RepJson (..)
, RepHtmlJson (..)
, RepPlain (..)
, RepXml (..)
-- * Utilities
, formatW3
, formatRFC1123
, formatRFC822
) where
import Data.Maybe (mapMaybe)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import Data.Text.Lazy (Text, pack)
import qualified Data.Text as T
import Data.Time
import System.Locale
import qualified Data.Text.Encoding
import qualified Data.Text.Lazy.Encoding
import Blaze.ByteString.Builder (Builder, fromByteString, fromLazyByteString)
import Data.Monoid (mempty)
import Text.Hamlet (Html)
import Text.Blaze.Renderer.Utf8 (renderHtmlBuilder)
import Data.String (IsString (fromString))
import Network.Wai (FilePart)
import Data.Conduit (Source, Flush)
data Content = ContentBuilder Builder (Maybe Int) -- ^ The content and optional content length.
| ContentSource (Source IO (Flush Builder))
| ContentFile FilePath (Maybe FilePart)
-- | Zero-length enumerator.
emptyContent :: Content
emptyContent = ContentBuilder mempty $ Just 0
instance IsString Content where
fromString = toContent
-- | Anything which can be converted into 'Content'. Most of the time, you will
-- want to use the 'ContentBuilder' constructor. An easier approach will be to use
-- a pre-defined 'toContent' function, such as converting your data into a lazy
-- bytestring and then calling 'toContent' on that.
--
-- Please note that the built-in instances for lazy data structures ('String',
-- lazy 'L.ByteString', lazy 'Text' and 'Html') will not automatically include
-- the content length for the 'ContentBuilder' constructor.
class ToContent a where
toContent :: a -> Content
instance ToContent Builder where
toContent = flip ContentBuilder Nothing
instance ToContent B.ByteString where
toContent bs = ContentBuilder (fromByteString bs) $ Just $ B.length bs
instance ToContent L.ByteString where
toContent = flip ContentBuilder Nothing . fromLazyByteString
instance ToContent T.Text where
toContent = toContent . Data.Text.Encoding.encodeUtf8
instance ToContent Text where
toContent = toContent . Data.Text.Lazy.Encoding.encodeUtf8
instance ToContent String where
toContent = toContent . pack
instance ToContent Html where
toContent bs = ContentBuilder (renderHtmlBuilder bs) Nothing
-- | A function which gives targetted representations of content based on the
-- content-types the user accepts.
type ChooseRep =
[ContentType] -- ^ list of content-types user accepts, ordered by preference
-> IO (ContentType, Content)
-- | Any type which can be converted to representations.
class HasReps a where
chooseRep :: a -> ChooseRep
-- | A helper method for generating 'HasReps' instances.
--
-- This function should be given a list of pairs of content type and conversion
-- functions. If none of the content types match, the first pair is used.
defChooseRep :: [(ContentType, a -> IO Content)] -> a -> ChooseRep
defChooseRep reps a ts = do
let (ct, c) =
case mapMaybe helper ts of
(x:_) -> x
[] -> case reps of
[] -> error "Empty reps to defChooseRep"
(x:_) -> x
c' <- c a
return (ct, c')
where
helper ct = do
c <- lookup ct reps
return (ct, c)
instance HasReps ChooseRep where
chooseRep = id
instance HasReps () where
chooseRep = defChooseRep [(typePlain, const $ return $ toContent B.empty)]
instance HasReps (ContentType, Content) where
chooseRep = const . return
instance HasReps [(ContentType, Content)] where
chooseRep a cts = return $
case filter (\(ct, _) -> go ct `elem` map go cts) a of
((ct, c):_) -> (ct, c)
_ -> case a of
(x:_) -> x
_ -> error "chooseRep [(ContentType, Content)] of empty"
where
go = simpleContentType
newtype RepHtml = RepHtml Content
instance HasReps RepHtml where
chooseRep (RepHtml c) _ = return (typeHtml, c)
newtype RepJson = RepJson Content
instance HasReps RepJson where
chooseRep (RepJson c) _ = return (typeJson, c)
data RepHtmlJson = RepHtmlJson Content Content
instance HasReps RepHtmlJson where
chooseRep (RepHtmlJson html json) = chooseRep
[ (typeHtml, html)
, (typeJson, json)
]
newtype RepPlain = RepPlain Content
instance HasReps RepPlain where
chooseRep (RepPlain c) _ = return (typePlain, c)
newtype RepXml = RepXml Content
instance HasReps RepXml where
chooseRep (RepXml c) _ = return (typeXml, c)
type ContentType = B.ByteString -- FIXME Text?
typeHtml :: ContentType
typeHtml = "text/html; charset=utf-8"
typePlain :: ContentType
typePlain = "text/plain; charset=utf-8"
typeJson :: ContentType
typeJson = "application/json; charset=utf-8"
typeXml :: ContentType
typeXml = "text/xml"
typeAtom :: ContentType
typeAtom = "application/atom+xml"
typeRss :: ContentType
typeRss = "application/rss+xml"
typeJpeg :: ContentType
typeJpeg = "image/jpeg"
typePng :: ContentType
typePng = "image/png"
typeGif :: ContentType
typeGif = "image/gif"
typeSvg :: ContentType
typeSvg = "image/svg+xml"
typeJavascript :: ContentType
typeJavascript = "text/javascript; charset=utf-8"
typeCss :: ContentType
typeCss = "text/css; charset=utf-8"
typeFlv :: ContentType
typeFlv = "video/x-flv"
typeOgv :: ContentType
typeOgv = "video/ogg"
typeOctet :: ContentType
typeOctet = "application/octet-stream"
-- | Removes \"extra\" information at the end of a content type string. In
-- particular, removes everything after the semicolon, if present.
--
-- For example, \"text/html; charset=utf-8\" is commonly used to specify the
-- character encoding for HTML data. This function would return \"text/html\".
simpleContentType :: ContentType -> ContentType
simpleContentType = fst . B.breakByte 59 -- 59 == ;
-- | Format a 'UTCTime' in W3 format.
formatW3 :: UTCTime -> T.Text
formatW3 = T.pack . formatTime defaultTimeLocale "%FT%X-00:00"
-- | Format as per RFC 1123.
formatRFC1123 :: UTCTime -> T.Text
formatRFC1123 = T.pack . formatTime defaultTimeLocale "%a, %d %b %Y %X %Z"
-- | Format as per RFC 822.
formatRFC822 :: UTCTime -> T.Text
formatRFC822 = T.pack . formatTime defaultTimeLocale "%a, %d %b %Y %H:%M:%S %z"
| chreekat/yesod | yesod-core/Yesod/Content.hs | bsd-2-clause | 7,002 | 0 | 17 | 1,498 | 1,504 | 863 | 641 | 164 | 3 |
{-# LANGUAGE TypeFamilies, FlexibleInstances, FlexibleContexts,
DeriveDataTypeable, StandaloneDeriving #-}
module HEP.Automation.MadGraph.Model.ZpH where
import Data.Typeable
import Data.Data
import Text.Printf
import Text.Parsec
import Control.Monad.Identity
import Text.StringTemplate
import Text.StringTemplate.Helpers
import HEP.Automation.MadGraph.Model
import HEP.Automation.MadGraph.Model.Common
data ZpH = ZpH
deriving (Show, Typeable, Data)
instance Model ZpH where
data ModelParam ZpH = ZpHParam { massZp :: Double, gRZp :: Double }
deriving Show
briefShow ZpH = "Zp"
madgraphVersion _ = MadGraph4
modelName ZpH = "zHorizontal_MG"
modelFromString str = case str of
"zHorizontal_MG" -> Just ZpH
_ -> Nothing
paramCard4Model ZpH = "param_card_zHorizontal.dat"
paramCardSetup tpath ZpH (ZpHParam m g) = do
templates <- directoryGroup tpath
return $ ( renderTemplateGroup
templates
[ ("masszp" , (printf "%.4e" m :: String))
, ("gRoverSqrtTwo" , (printf "%.4e" (g / (sqrt 2.0)) :: String))
, ("widthzp" , (printf "%.4e" (gammaWpZp m g) :: String)) ]
(paramCard4Model ZpH) ) ++ "\n\n\n"
briefParamShow (ZpHParam m g) = "M"++show m++"G"++show g
interpreteParam str = let r = parse zphparse "" str
in case r of
Right param -> param
Left err -> error (show err)
zphparse :: ParsecT String () Identity (ModelParam ZpH)
zphparse = do
char 'M'
massstr <- many1 (oneOf "+-0123456789.")
char 'G'
gstr <- many1 (oneOf "+-0123456789.")
return (ZpHParam (read massstr) (read gstr))
gammaWpZp :: Double -> Double -> Double
gammaWpZp mass coup =
let r = mtop^(2 :: Int)/ mass^(2 :: Int)
in coup^(2 :: Int) / (16.0 * pi) *mass*( 1.0 - 1.5 * r + 0.5 * r^(3 :: Int))
zpHTr :: TypeRep
zpHTr = mkTyConApp (mkTyCon "HEP.Automation.MadGraph.Model.ZpH.ZpH") []
instance Typeable (ModelParam ZpH) where
typeOf _ = mkTyConApp modelParamTc [zpHTr]
deriving instance Data (ModelParam ZpH)
| wavewave/madgraph-auto-model | src/HEP/Automation/MadGraph/Model/ZpH.hs | bsd-2-clause | 2,229 | 0 | 19 | 625 | 688 | 360 | 328 | 53 | 1 |
-----------------------------------------------------------------------------
-- |
-- Copyright : (C) 2015 Dimitri Sabadie
-- License : BSD3
--
-- Maintainer : Dimitri Sabadie <[email protected]>
-- Stability : experimental
-- Portability : portable
----------------------------------------------------------------------------
module Graphics.Luminance.Shader.Program where
import Control.Applicative ( liftA2 )
import Control.Monad.Except ( MonadError(throwError) )
import Control.Monad.IO.Class ( MonadIO(..) )
import Control.Monad.Trans.Resource ( MonadResource, register )
import Data.Foldable ( traverse_ )
import Foreign.C ( peekCString, withCString )
import Foreign.Marshal.Alloc ( alloca )
import Foreign.Marshal.Array ( allocaArray )
import Foreign.Ptr ( castPtr, nullPtr )
import Foreign.Storable ( peek )
import Graphics.Luminance.Shader.Stage ( Stage(..) )
import Graphics.Luminance.Shader.Uniform ( U, Uniform(..) )
import Graphics.GL
import Numeric.Natural ( Natural )
newtype Program = Program { programID :: GLuint }
data ProgramError
= LinkFailed String
| InactiveUniform String
deriving (Eq,Show)
class HasProgramError a where
fromProgramError :: ProgramError -> a
createProgram :: (HasProgramError e,MonadError e m,MonadIO m,MonadResource m)
=> [Stage]
-> ((forall a. (Uniform a) => Either String Natural -> m (U a)) -> m i)
-> m (Program,i)
createProgram stages buildIface = do
(pid,linked,cl) <- liftIO $ do
pid <- glCreateProgram
traverse_ (glAttachShader pid . stageID) stages
glLinkProgram pid
linked <- isLinked pid
ll <- clogLength pid
cl <- clog ll pid
pure (pid,linked,cl)
if
| linked -> do
_ <- register $ glDeleteProgram pid
let prog = Program pid
iface <- buildIface $ ifaceWith prog
pure (prog,iface)
| otherwise -> throwError . fromProgramError $ LinkFailed cl
createProgram_ :: (HasProgramError e,MonadError e m,MonadIO m,MonadResource m)
=> [Stage]
-> m Program
createProgram_ stages = fmap fst $ createProgram stages (\_ -> pure ())
isLinked :: GLuint -> IO Bool
isLinked pid = do
ok <- alloca $ liftA2 (*>) (glGetProgramiv pid GL_LINK_STATUS) peek
pure $ ok == GL_TRUE
clogLength :: GLuint -> IO Int
clogLength pid =
fmap fromIntegral .
alloca $ liftA2 (*>) (glGetProgramiv pid GL_INFO_LOG_LENGTH) peek
clog :: Int -> GLuint -> IO String
clog l pid =
allocaArray l $
liftA2 (*>) (glGetProgramInfoLog pid (fromIntegral l) nullPtr)
(peekCString . castPtr)
ifaceWith :: (HasProgramError e,MonadError e m,MonadIO m,Uniform a)
=> Program
-> Either String Natural
-> m (U a)
ifaceWith prog access = case access of
Left name -> do
location <- liftIO . withCString name $ glGetUniformLocation pid
if
| isActive location -> pure $ toU pid location
| otherwise -> throwError . fromProgramError $ InactiveUniform name
Right sem
| isActive sem -> pure $ toU pid (fromIntegral sem)
| otherwise -> throwError . fromProgramError $ InactiveUniform (show sem)
where
pid = programID prog
isActive :: (Ord a,Num a) => a -> Bool
isActive = (> -1)
| apriori/luminance | src/Graphics/Luminance/Shader/Program.hs | bsd-3-clause | 3,276 | 0 | 16 | 726 | 1,035 | 541 | 494 | -1 | -1 |
{-# LANGUAGE CPP #-}
module Text.Search.Sphinx.Types (
module Text.Search.Sphinx.Types
, ByteString ) where
import Data.ByteString.Lazy (ByteString)
import Data.Int (Int64)
import Data.Maybe (Maybe, isJust)
import Data.Text (Text,empty)
-- | Data structure representing one query. It can be sent with 'runQueries'
-- or 'runQueries'' to the server in batch mode.
data Query = Query { queryString :: Text -- ^ The actual query string
, queryIndexes :: Text -- ^ The indexes, \"*\" means every index
, queryComment :: Text -- ^ A comment string.
} deriving (Show)
-- | Search commands
data SearchdCommand = ScSearch
| ScExcerpt
| ScUpdate
| ScKeywords
deriving (Show, Enum)
searchdCommand :: SearchdCommand -> Int
searchdCommand = fromEnum
-- | Current client-side command implementation versions
data VerCommand = VcSearch
| VcExcerpt
| VcUpdate
| VcKeywords
deriving (Show)
#ifdef ONE_ONE_BETA
-- | Important! only 1.1 compatible, not 9.9.x
verCommand VcSearch = 0x117
verCommand VcExcerpt = 0x102
#else
-- | Important! 2.0 compatible
verCommand VcSearch = 0x118
verCommand VcExcerpt = 0x103
#endif
verCommand VcUpdate = 0x101
verCommand VcKeywords = 0x100
-- | Searchd status codes
data Status = OK
| RETRY
| WARNING
| ERROR Int
deriving (Show)
-- | status from an individual query
data QueryStatus = QueryOK
| QueryWARNING
| QueryERROR Int
deriving (Show)
toQueryStatus 0 = QueryOK
toQueryStatus 3 = QueryWARNING
toQueryStatus 2 = error "Didn't think retry was possible"
toQueryStatus n = QueryERROR n
toStatus 0 = OK
toStatus 2 = RETRY
toStatus 3 = WARNING
toStatus n = ERROR n
-- | Match modes
data MatchMode = All
| Any
| Phrase
| Boolean
| Extended
| Fullscan
| Extended2 -- extended engine V2 (TEMPORARY, WILL BE REMOVED)
deriving (Show, Enum)
-- | Ranking modes (ext2 only)
data Rank = ProximityBm25 -- default mode, phrase proximity major factor and BM25 minor one
| Bm25 -- statistical mode, BM25 ranking only (faster but worse quality)
| None -- no ranking, all matches get a weight of 1
| WordCount -- simple word-count weighting, rank is a weighted sum of per-field keyword occurence counts
| Proximity -- internally used to emulate SPH_MATCH_ALL queries
| MatchAny -- internaly used to emulate SPHINX_MATCH_ANY searching mode
| Fieldmask -- ?
| Sph04 -- like ProximityBm25, but more weight given to matches at beginning or end of field
| Total
deriving (Show, Enum)
-- | Sort modes
data Sort = Relevance
| AttrDesc
| AttrAsc
| TimeSegments
| SortExtended -- constructor already existed
| Expr
deriving (Show, Enum)
-- | Filter types
data Filter = ExclusionFilter Filter
| FilterValues String [Int64]
| FilterRange String Int64 Int64
| FilterFloatRange String Float Float
deriving (Show)
-- | shortcut for creating an exclusion filter
exclude filter = ExclusionFilter filter
fromEnumFilter (FilterValues _ _) = 0
fromEnumFilter (FilterRange _ _ _) = 1
fromEnumFilter (FilterFloatRange _ _ _) = 2
-- | Attribute types
data AttrT = AttrTUInt -- unsigned 32-bit integer
| AttrTTimestamp -- timestamp
| AttrTStr2Ordinal -- ordinal string number (integer at search time, specially handled at indexing time)
| AttrTBool -- boolean bit field
| AttrTFloat -- floating point number (IEEE 32-bit)
| AttrTBigInt -- signed 64-bit integer
| AttrTString -- string (binary; in-memory)
| AttrTWordCount -- string word count (integer at search time,tokenized and counted at indexing time)
| AttrTMulti AttrT -- multiple values (0 or more)
deriving (Show)
instance Enum AttrT where
toEnum = toAttrT
fromEnum = attrT
toAttrT 1 = AttrTUInt
toAttrT 2 = AttrTTimestamp
toAttrT 3 = AttrTStr2Ordinal
toAttrT 4 = AttrTBool
toAttrT 5 = AttrTFloat
toAttrT 6 = AttrTBigInt
toAttrT 7 = AttrTString
toAttrT 8 = AttrTWordCount
toAttrT 0x40000001 = AttrTMulti AttrTUInt
attrMultiMask = 0x40000000
attrT AttrTUInt = 1
attrT AttrTTimestamp = 2
attrT AttrTStr2Ordinal = 3
attrT AttrTBool = 4
attrT AttrTFloat = 5
attrT AttrTBigInt = 6
attrT AttrTString = 7
attrT AttrTWordCount = 8
attrT (AttrTMulti AttrTUInt) = 0x40000001
-- | Grouping functions
data GroupByFunction = Day
| Week
| Month
| Year
| Attr
| AttrPair
deriving (Show, Enum)
-- | The result of a query
data QueryResult = QueryResult {
-- | The matches
matches :: [Match]
-- | Total amount of matches retrieved on server by this query.
, total :: Int
-- | Total amount of matching documents in index.
, totalFound :: Int
-- | processed words with the number of docs and the number of hits.
, words :: [(Text, Int, Int)]
-- | List of attribute names returned in the result.
-- | The Match will contain just the attribute values in the same order.
, attributeNames :: [ByteString]
}
deriving Show
-- | a single query result, runQueries returns a list of these
data SingleResult = QueryOk QueryResult
| QueryWarning Text QueryResult
| QueryError Int Text
deriving (Show)
-- | a result returned from searchd
data Result a = Ok a
| Warning Text a
| Error Int Text
| Retry Text
deriving (Show)
data Match = Match {
-- Document ID
documentId :: Int64
-- Document weight
, documentWeight :: Int
-- Attribute values
, attributeValues :: [Attr]
}
deriving Show
instance Eq Match where
d1 == d2 = documentId d1 == documentId d2
data Attr = AttrMulti [Attr]
| AttrUInt Int
| AttrBigInt Int64
| AttrString Text
| AttrFloat Float
deriving (Show)
| gregwebs/haskell-sphinx-client | Text/Search/Sphinx/Types.hs | bsd-3-clause | 6,681 | 0 | 10 | 2,279 | 1,064 | 626 | 438 | 147 | 1 |
{-#LANGUAGE MultiParamTypeClasses #-}
{-#LANGUAGE OverloadedStrings #-}
{-#LANGUAGE ViewPatterns #-}
module Twilio.Transcription
( -- * Resource
Transcription(..)
, Twilio.Transcription.get
-- * Types
, PriceUnit(..)
, TranscriptionStatus(..)
) where
import Control.Applicative
import Control.Error.Safe
import Control.Monad
import Control.Monad.Catch
import Data.Aeson
import Data.Monoid
import Data.Text (Text)
import Data.Time.Clock
import Network.URI
import Control.Monad.Twilio
import Twilio.Internal.Parser
import Twilio.Internal.Request
import Twilio.Internal.Resource as Resource
import Twilio.Types
{- Resource -}
data Transcription = Transcription
{ sid :: !TranscriptionSID
, dateCreated :: !UTCTime
, dateUpdated :: !UTCTime
, accountSID :: !AccountSID
, status :: !TranscriptionStatus
, recordingSID :: !RecordingSID
, duration :: !(Maybe Int)
, transcriptionText :: !Text
, price :: !(Maybe Double)
, priceUnit :: !PriceUnit
, apiVersion :: !APIVersion
, uri :: !URI
} deriving (Show, Eq)
instance FromJSON Transcription where
parseJSON (Object v) = Transcription
<$> v .: "sid"
<*> (v .: "date_created" >>= parseDateTime)
<*> (v .: "date_updated" >>= parseDateTime)
<*> v .: "account_sid"
<*> v .: "status"
<*> v .: "recording_sid"
<*> (v .: "duration" <&> fmap readZ
>>= maybeReturn')
<*> v .: "transcription_text"
<*> (v .: "price" <&> fmap readZ
>>= maybeReturn')
<*> v .: "price_unit"
<*> v .: "api_version"
<*> (v .: "uri" <&> parseRelativeReference
>>= maybeReturn)
parseJSON _ = mzero
instance Get1 TranscriptionSID Transcription where
get1 (getSID -> sid) = request parseJSONFromResponse =<< makeTwilioRequest
("/Transcriptions/" <> sid <> ".json")
-- | Get a 'Transcription' by 'TranscriptionSID'.
get :: MonadThrow m => TranscriptionSID -> TwilioT m Transcription
get = Resource.get
{- Types -}
data TranscriptionStatus
= InProgress
| Completed
| Failed
deriving Eq
instance Show TranscriptionStatus where
show InProgress = "in-progress"
show Completed = "completed"
show Failed = "failed"
instance FromJSON TranscriptionStatus where
parseJSON (String "in-progress") = return InProgress
parseJSON (String "completed") = return Completed
parseJSON (String "failed") = return Failed
parseJSON _ = mzero
| seagreen/twilio-haskell | src/Twilio/Transcription.hs | bsd-3-clause | 2,632 | 0 | 25 | 698 | 611 | 337 | 274 | 98 | 1 |
{-# LANGUAGE GADTs, TypeOperators, PolyKinds, RankNTypes, CPP #-}
#include "macros.h"
LANGUAGE_TRUSTWORTHY
LANGUAGE_AUTODERIVETYPEABLE
{-# OPTIONS_GHC -fno-warn-unused-imports -fno-warn-orphans #-}
-- | Kind-polymorphic functions for manipulating type equality evidence.
--
-- This module is available only if @PolyKinds@ are available (GHC 7.6+).
module Type.Eq.Poly (module Type.Eq, module Type.Eq.Poly) where
import Control.Applicative ((<$>))
import Control.Category ((.)) -- for haddock
import Data.Typeable (Typeable1, typeOf1, Typeable2, typeOf2, Typeable3, typeOf3, Typeable4, typeOf4, Typeable5, typeOf5, Typeable6, typeOf6, Typeable7, typeOf7)
import Type.Eq
import Type.Eq.Higher ((::~::)(..), (:::~:::)(..), OuterEq1(..), InnerEq1(..))
import Type.Eq.Unsafe
import Prelude hiding ((.))
import Unsafe.Coerce
{-
INSTANCE_TYPEABLE(1,:~:,f,g,"Type.Eq",":~:",())
INSTANCE_TYPEABLE(2,:~:,m,n,"Type.Eq",":~:",() ())
INSTANCE_TYPEABLE(3,:~:,x,y,"Type.Eq",":~:",() () ())
INSTANCE_TYPEABLE(4,:~:,x,y,"Type.Eq",":~:",() () () ())
INSTANCE_TYPEABLE(5,:~:,x,y,"Type.Eq",":~:",() () () () ())
INSTANCE_TYPEABLE(6,:~:,x,y,"Type.Eq",":~:",() () () () () ())
INSTANCE_TYPEABLE(7,:~:,x,y,"Type.Eq",":~:",() () () () () () ())
-}
-- | Synonym for @'composeEq'@. Kind-polymorphic, unlike @('.')@.
(|.|) :: b :~: c -> a :~: b -> a :~: c
(|.|) = composeEq
-- | Congruence?
applyEq, (|$|) :: f :~: g -> a :~: b -> f a :~: g b
applyEq = withEq (withEq Eq)
(|$|) = applyEq
-- | Type constructors are generative
constructorEq :: f a :~: g b -> f :~: g
constructorEq = withEq BUG_5591(Eq)
DYNAMIC_EQ(1,,:~:,f,g,())
DYNAMIC_EQ(2,,:~:,n,m,() ())
DYNAMIC_EQ(3,,:~:,x,y,() () ())
DYNAMIC_EQ(4,,:~:,x,y,() () () ())
DYNAMIC_EQ(5,,:~:,x,y,() () () () ())
DYNAMIC_EQ(6,,:~:,x,y,() () () () () ())
DYNAMIC_EQ(7,,:~:,x,y,() () () () () () ())
sameOuterEq :: OuterEq f a -> OuterEq g a -> f :~: g
sameOuterEq OuterEq OuterEq = BUG_5591(Eq)
-- * Compatibility with Type.Eq.Higher
fromEq1 :: f ::~:: g -> f :~: g
fromEq1 Eq1 = Eq
toEq1 :: f :~: g -> f ::~:: g
toEq1 Eq = Eq1
fromEq2 :: n :::~::: m -> n :~: m
fromEq2 Eq2 = Eq
toEq2 :: n :~: m -> n :::~::: m
toEq2 Eq = Eq2
fromOuterEq1 :: OuterEq1 m f -> OuterEq m f
fromOuterEq1 OuterEq1 = BUG_5591(OuterEq)
toOuterEq1 :: OuterEq m f -> OuterEq1 m f
toOuterEq1 OuterEq = OuterEq1
fromInnerEq1 :: InnerEq1 a f -> InnerEq a f
fromInnerEq1 InnerEq1 = BUG_5591(InnerEq)
toInnerEq1 :: InnerEq a f -> InnerEq1 a f
toInnerEq1 InnerEq = InnerEq1
| glaebhoerl/type-eq | Type/Eq/Poly.hs | bsd-3-clause | 2,487 | 14 | 8 | 368 | 829 | 457 | 372 | -1 | -1 |
module Database.Algebra.Rewrite.Traversal
( preOrder
, postOrder
, applyToAll
, topologically
, iteratively
, sequenceRewrites
) where
import Control.Monad
import qualified Data.IntMap as M
import qualified Data.Set as S
import qualified Database.Algebra.Dag as Dag
import Database.Algebra.Dag.Common
import Database.Algebra.Rewrite.DagRewrite
import Database.Algebra.Rewrite.Rule
applyToAll :: Rewrite o e (NodeMap p) -> RuleSet o p e -> Rewrite o e Bool
applyToAll inferProps rules = iterateRewrites False 0
where iterateRewrites anyChanges offset = do
-- drop the first nodes, assuming that we already visited them
nodes <- drop offset <$> M.keys <$> Dag.nodeMap <$> exposeDag
-- re-infer properties
props <- inferProps
extras <- getExtras
-- try to apply the rewrites, beginning with node at position offset
matchedOffset <- traverseNodes offset props extras rules nodes
case matchedOffset of
-- A rewrite applied at offset o -> we continue at this offset
Just o -> iterateRewrites True o
-- No rewrite applied -> report if any changes occured at all
Nothing -> return anyChanges
traverseNodes :: Int -> NodeMap p -> e -> RuleSet o p e -> [AlgNode] -> Rewrite o e (Maybe Int)
traverseNodes offset props extras rules nodes =
case nodes of
n : ns -> do
changed <- applyRuleSet extras props rules n
if changed
then return $ Just offset
else traverseNodes (offset + 1) props extras rules ns
[] -> return Nothing
-- | Infer properties, then traverse the DAG in preorder fashion and apply the rule set
-- at every node. Properties are re-inferred after every change.
preOrder :: Dag.Operator o
=> Rewrite o e (NodeMap p)
-> RuleSet o p e
-> Rewrite o e Bool
preOrder inferAction rules =
let traversePre (changedPrev, mProps, visited) q =
if q `S.member` visited
then return (changedPrev, mProps, visited)
else do
props <- case mProps of
Just ps -> return ps
Nothing -> inferAction
e <- getExtras
changedSelf <- applyRuleSet e props rules q
-- Have to be careful here: With garbage collection, the current node 'q'
-- might no longer be present after a rewrite.
mop <- operatorSafe q
case mop of
Just op -> do
-- the node still seems to be around, so we need to look after its children
let mProps' = if changedSelf then Nothing else Just props
let cs = Dag.opChildren op
(changedChild, mProps'', visited') <- foldM descend (changedSelf, mProps', visited) cs
let visited'' = S.insert q visited'
if changedChild
then return (True, Nothing, visited'')
else return (changedPrev || (changedSelf || changedChild), mProps'', visited'')
Nothing -> return (True, Nothing, visited) -- The node has been collected -> do nothing
descend (changedPrev, mProps, visited) c = do
props <- case mProps of
Just ps -> return ps
Nothing -> inferAction
traversePre (changedPrev, Just props, visited) c
in do
pm <- inferAction
rs <- rootNodes
(changed, _, _) <- foldM traversePre (False, Just pm, S.empty) rs
return changed
{- | Map a ruleset over the nodes of a DAG in topological order. This function assumes that
the structur of the DAG is not changed during the rewrites. Properties are only inferred
once.
-}
topologically :: Rewrite o e (NodeMap p) -> RuleSet o p e -> Rewrite o e Bool
topologically inferAction rules = do
topoOrdering <- topsort
props <- inferAction
let rewriteNode changedPrev q = do
e <- getExtras
changed <- applyRuleSet e props rules q
return $ changed || changedPrev
foldM rewriteNode False topoOrdering where
-- | Infer properties, then traverse the DAG in a postorder fashion and apply the rule set at
-- every node. Properties are re-inferred after every change.
postOrder :: Dag.Operator o
=> Rewrite o e (NodeMap p)
-> RuleSet o p e
-> Rewrite o e Bool
postOrder inferAction rules =
let traversePost (changedPrev, props, visited) q =
if q `S.member` visited
then return (changedPrev, props, visited)
else do
op <- operator q
let cs = Dag.opChildren op
(changedChild, mProps, visited') <- foldM descend (False, props, visited) cs
props' <- case mProps of
Just ps -> return ps
Nothing -> inferAction
e <- getExtras
-- Check if the current node is still around after its children
-- have been rewritten. This should not happen regularly, but
-- better safe than sorry.
mop <- operatorSafe q
case mop of
Just _ -> do
changedSelf <- applyRuleSet e props' rules q
let visited'' = S.insert q visited'
if changedSelf
then return (True, Nothing, visited'')
else return (changedChild || changedPrev, Just props', visited'')
Nothing -> return (True, Nothing, visited)
descend (changedPrev, mProps, visited) c = do
props <- case mProps of
Just ps -> return ps
Nothing -> inferAction
traversePost (changedPrev, Just props, visited) c
in do
pm <- inferAction
rs <- rootNodes
(changed, _, _) <- foldM traversePost (False, Just pm, S.empty) rs
return changed
-- | Iteratively apply a rewrite, until no further changes occur.
iteratively :: Rewrite o e Bool -> Rewrite o e Bool
iteratively rewrite = aux False
where aux b = do
changed <- rewrite
if changed
then logGeneral ">>> Iterate" >> aux True
else return b
-- | Sequence a list of rewrites and propagate information about
-- wether one of them applied.
sequenceRewrites :: [Rewrite o e Bool] -> Rewrite o e Bool
sequenceRewrites rewrites = or <$> sequence rewrites
| ulricha/algebra-dag | src/Database/Algebra/Rewrite/Traversal.hs | bsd-3-clause | 6,370 | 0 | 22 | 2,021 | 1,545 | 781 | 764 | 121 | 7 |
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-}
-- | Implementation of the @dhall to-directory-tree@ subcommand
module Dhall.DirectoryTree
( -- * Filesystem
toDirectoryTree
, FilesystemError(..)
) where
import Control.Applicative (empty)
import Control.Exception (Exception)
import Data.Void (Void)
import Dhall.Syntax (Chunks (..), Expr (..), RecordField (..))
import System.FilePath ((</>))
import qualified Control.Exception as Exception
import qualified Data.Foldable as Foldable
import qualified Data.Text as Text
import qualified Data.Text.IO as Text.IO
import qualified Dhall.Map as Map
import qualified Dhall.Pretty
import qualified Dhall.Util as Util
import qualified Prettyprinter.Render.String as Pretty
import qualified System.Directory as Directory
import qualified System.FilePath as FilePath
{-| Attempt to transform a Dhall record into a directory tree where:
* Records are translated into directories
* @Map@s are also translated into directories
* @Text@ values or fields are translated into files
* @Optional@ values are omitted if @None@
For example, the following Dhall record:
> { dir = { `hello.txt` = "Hello\n" }
> , `goodbye.txt`= Some "Goodbye\n"
> , `missing.txt` = None Text
> }
... should translate to this directory tree:
> $ tree result
> result
> ├── dir
> │ └── hello.txt
> └── goodbye.txt
>
> $ cat result/dir/hello.txt
> Hello
>
> $ cat result/goodbye.txt
> Goodbye
Use this in conjunction with the Prelude's support for rendering JSON/YAML
in "pure Dhall" so that you can generate files containing JSON. For
example:
> let JSON =
> https://prelude.dhall-lang.org/v12.0.0/JSON/package.dhall sha256:843783d29e60b558c2de431ce1206ce34bdfde375fcf06de8ec5bf77092fdef7
>
> in { `example.json` =
> JSON.render (JSON.array [ JSON.number 1.0, JSON.bool True ])
> , `example.yaml` =
> JSON.renderYAML
> (JSON.object (toMap { foo = JSON.string "Hello", bar = JSON.null }))
> }
... which would generate:
> $ cat result/example.json
> [ 1.0, true ]
>
> $ cat result/example.yaml
> ! "bar": null
> ! "foo": "Hello"
This utility does not take care of type-checking and normalizing the
provided expression. This will raise a `FilesystemError` exception upon
encountering an expression that cannot be converted as-is.
-}
toDirectoryTree :: FilePath -> Expr Void Void -> IO ()
toDirectoryTree path expression = case expression of
RecordLit keyValues ->
Map.unorderedTraverseWithKey_ process $ recordFieldValue <$> keyValues
ListLit (Just (Record [ ("mapKey", recordFieldValue -> Text), ("mapValue", _) ])) [] ->
return ()
ListLit _ records
| not (null records)
, Just keyValues <- extract (Foldable.toList records) ->
Foldable.traverse_ (uncurry process) keyValues
TextLit (Chunks [] text) ->
Text.IO.writeFile path text
Some value ->
toDirectoryTree path value
App (Field (Union _) _) value ->
toDirectoryTree path value
App None _ ->
return ()
_ ->
die
where
extract [] =
return []
extract (RecordLit [ ("mapKey", recordFieldValue -> TextLit (Chunks [] key))
, ("mapValue", recordFieldValue -> value)] : records) =
fmap ((key, value) :) (extract records)
extract _ =
empty
process key value = do
if Text.isInfixOf (Text.pack [ FilePath.pathSeparator ]) key
then die
else return ()
Directory.createDirectoryIfMissing False path
toDirectoryTree (path </> Text.unpack key) value
die = Exception.throwIO FilesystemError{..}
where
unexpectedExpression = expression
{- | This error indicates that you supplied an invalid Dhall expression to the
`toDirectoryTree` function. The Dhall expression could not be translated
to a directory tree.
-}
newtype FilesystemError =
FilesystemError { unexpectedExpression :: Expr Void Void }
instance Show FilesystemError where
show FilesystemError{..} =
Pretty.renderString (Dhall.Pretty.layout message)
where
message =
Util._ERROR <> ": Not a valid directory tree expression \n\
\ \n\
\Explanation: Only a subset of Dhall expressions can be converted to a directory \n\
\tree. Specifically, record literals or maps can be converted to directories, \n\
\❰Text❱ literals can be converted to files, and ❰Optional❱ values are included if \n\
\❰Some❱ and omitted if ❰None❱. Values of union types can also be converted if \n\
\they are an alternative which has a non-nullary constructor whose argument is of \n\
\an otherwise convertible type. No other type of value can be translated to a \n\
\directory tree. \n\
\ \n\
\For example, this is a valid expression that can be translated to a directory \n\
\tree: \n\
\ \n\
\ \n\
\ ┌──────────────────────────────────┐ \n\
\ │ { `example.json` = \"[1, true]\" } │ \n\
\ └──────────────────────────────────┘ \n\
\ \n\
\ \n\
\In contrast, the following expression is not allowed due to containing a \n\
\❰Natural❱ field, which cannot be translated in this way: \n\
\ \n\
\ \n\
\ ┌───────────────────────┐ \n\
\ │ { `example.txt` = 1 } │ \n\
\ └───────────────────────┘ \n\
\ \n\
\ \n\
\Note that key names cannot contain path separators: \n\
\ \n\
\ \n\
\ ┌─────────────────────────────────────┐ \n\
\ │ { `directory/example.txt` = \"ABC\" } │ Invalid: Key contains a forward slash\n\
\ └─────────────────────────────────────┘ \n\
\ \n\
\ \n\
\Instead, you need to refactor the expression to use nested records instead: \n\
\ \n\
\ \n\
\ ┌───────────────────────────────────────────┐ \n\
\ │ { directory = { `example.txt` = \"ABC\" } } │ \n\
\ └───────────────────────────────────────────┘ \n\
\ \n\
\ \n\
\You tried to translate the following expression to a directory tree: \n\
\ \n\
\" <> Util.insert unexpectedExpression <> "\n\
\ \n\
\... which is not an expression that can be translated to a directory tree. \n"
instance Exception FilesystemError
| Gabriel439/Haskell-Dhall-Library | dhall/src/Dhall/DirectoryTree.hs | bsd-3-clause | 9,987 | 0 | 17 | 4,473 | 731 | 400 | 331 | 67 | 11 |
-- | Usage: http-bench <total> <concurrent>
module Main where
import Control.Concurrent
import Control.Monad
import Network.HTTP
import Text.Printf
import System.Exit
import System.Environment
import Control.Exception as E
import Debug.Trace
data Stats = Stats {hits :: Int, prevHits :: Int} deriving (Show)
data Interval = Interval {} deriving (Show)
interval = 10 * 1000 * 1000 -- ten seconds
main = do
args <- getArgs
when (length args /= 3) (fail usage)
let [url, minConcStr, maxConcStr] = args
minConc <- readIO minConcStr :: IO Int
maxConc <- readIO maxConcStr :: IO Int
currentConcurrent <- newMVar minConc
stats <- newMVar $ Stats {hits = 0, prevHits = 0}
flip E.catch (handleInterrupts stats) $ do
-- create initial set of threads
threads <- forM [1 .. minConc] $ \_ -> forkIO $ go url stats
-- spawn thread for pool control
forkIO $ poolControl threads (go url stats) currentConcurrent
-- main thread does stat control
statControl minConc maxConc currentConcurrent stats
handleInterrupts stats e | e /= UserInterrupt = E.throwIO e
| otherwise = do s <- readMVar stats
putStr "\n\n"
print s
error "Exiting..."
poolControl :: [ThreadId] -> IO () -> MVar Int -> IO ()
poolControl threads action currentConcurrent = do
-- maintain a list of type [ThreadId] that represents a threadpool
threadDelay interval
let currentThreads = length threads
wantedThreads <- readMVar currentConcurrent
-- periodically spawn or kill threads as needed to keep the pool at the size specified by an mvar
case compare wantedThreads currentThreads of
GT -> do let newThreads = wantedThreads - currentThreads
tids <- forM [1 .. newThreads] $ \_ -> forkIO action
poolControl (tids ++ threads) action currentConcurrent
LT -> do let removeThreads = currentThreads - wantedThreads
(remove, keep) = splitAt removeThreads threads
forM_ remove $ \tid -> killThread tid
poolControl keep action currentConcurrent
EQ -> poolControl threads action currentConcurrent
statControl :: Int -> Int -> MVar Int -> MVar Stats -> IO ()
statControl minConc maxConc currentConcurrent statsRef = forever $ do
threadDelay interval
-- read current stats information
stats <- readMVar statsRef
conc <- readMVar currentConcurrent
-- use information from stats to update concurrency level, if necessary
-- if we end up unable to rise above the minimum concurrency level, print message
let wanted = case (prevHits stats `compare` hits stats) of
EQ -> conc
LT -> min maxConc (conc + 1)
GT -> max minConc (conc - 1)
-- if we end up stable at the maximum concurrency level, print message
printf "Hits: %i - Concurrent: %i\n" (hits stats) wanted
-- reset stats for current interval
modifyMVar_ statsRef (return . reset)
modifyMVar_ currentConcurrent (return . const wanted)
return ()
reset :: Stats -> Stats
reset s = s {hits = 0, prevHits = hits s}
go :: String -> MVar Stats -> IO ()
go url stats = forever $ do
result <- simpleHTTP (getRequest url)
let success = case result of
(Right response) | rspCode response == (2, 0, 0) -> True
_ -> False
modifyMVar_ stats $ \s -> return s {hits = hits s + 1}
usage = "\n\
\Usage: http-bench <url> <min-concurrent> <max-concurrent>\n\
\ Benchmark a website by requesting a URL many times concurrently.\n\
\ http-bench will begin at the minimum number of concurrent requests,\n\
\ and slowly scale to the speed of your webserver, or the upper\n\
\ concurrency limit parameter.\n"
| headprogrammingczar/http-bench | Main.hs | bsd-3-clause | 3,858 | 1 | 18 | 1,034 | 993 | 487 | 506 | 66 | 3 |
{--
Another instance of Applicative is (->) r, so functions. They are rarely used with the applicative style outside of code golf, but they're still interesting as applicatives, so let's take a look at how the function instance is implemented.
instance Applicative ((->) r) where
pure x = (\_ -> x)
f <*> g = \x -> f x (g x)
When we wrap a value into an applicative functor with pure, the result it yields always has to be that value. A minimal default context that still yields that value as a result.
That's why in the function instance implementation, pure takes a value and creates a function that ignores its parameter and always returns that value.
If we look at the type for pure, but specialized for the (->) r instance:
it's pure :: a -> (r -> a).
ghci> (pure 3) "blah"
3
Because of currying, function application is left-associative, so we can omit the parentheses.
ghci> pure 3 "blah"
3
ghci> :t (+) <$> (+3) <*> (*100)
(+) <$> (+3) <*> (*100) :: (Num a) => a -> a
ghci> (+) <$> (+3) <*> (*100) $ 5
508
When we do (+) <$> (+3) <*> (*100), we're making a function that will use + on the results of (+3) and (*100) and return that.
To demonstrate on a real example, when we did (+) <$> (+3) <*> (*100) $ 5, the 5 first got applied to (+3) and (*100), resulting in 8 and 500. Then, + gets called with 8 and 500, resulting in 508.
--}
{-- review of ((->) r) in <*>, <$>
we know fmap f g :: (a -> b) -> z a -> z b
so f :: (a ->b), g :: z a
and in ((->) r) higer-kinded type this is defined as:
z a :: ((->) r) a
fmap f g :: (a -> b) -> ((->) r) a -> ((->) r) b as infix:
(a -> b) -> (r -> a) -> (r -> b)
Note that f a must be concrete type! so it is ((->) r) a
Then rewrite it as infix operator: r -> a
<$> :: (functor f) => (a -> b) -> f a -> f b
f <$> g = fmap f g
so output of <$> for ((->) r) is as:
((->) r) b which is equivalent to ((->) r) (f (g r)) , rewrite it to lambda:
r -> (f a) == r -> z b
then:
fmap f g = \r -> f (g r) or as \x -> f (g x)
<*> :: f (a -> b) -> f a -> f b the same as:
((-> r) a -> b) -> ((-> r) a) -> ((-> r) b) ==
(r -> (a -> b)) -> (r -> a) -> (r -> b)
f <*> g => then we know f r == (a -> b) so
f <*> g = \x -> f x (g x), since here x or r is the same type
In here we can fnd that f should be a "binary" higer-kinded type, ex: (+), (-)
In here we can fnd that g should be a "unary" higer-kinded type, ex: (+ 3), (- 2)
so (+) <$> (+3) <*> (*100) $ 5
=> fmap (+) (+3) ...
=> (\x -> (+ ((+) 3 x))) <*> (* 100) ...
^ as z
=> (\r -> z r (* 100 r)) $ 5
=> z 5 (* 100 5)
=> z 5 (500)
=> (+ ((+) 3 5)) 500
=> (+ 8) 500
=> 508
pure f <*> x <*> y ... == fmap f x <*> y ... == f <$> x <*> y ...
--}
{--
by type signature. we might have different impl for [], ex:
[(+3),(*2)] <*> [1,2] could result to [4,5,2,4] or [1 + 3, 2 * 2]
In order to distinguish this, haskell provide a ZipList applicative functor:
instance Applicative ZipList where
pure x = ZipList (repeat x)
ZipList fs <*> ZipList xs = ZipList (zipWith (\f x -> f x) fs xs)
for all cases like [1 + 3, 2 * 2]
So how do zip lists work in an applicative style?
Let's see. Oh, the ZipList a type doesn't have a Show instance, so we have to use the "getZipList" function to extract a raw list out of a zip list.
--}
getZipList $ (,,) <$> ZipList "dog" <*> ZipList "cat" <*> ZipList "rat"
-- [('d','c','r'),('o','a','a'),('g','t','t')]
{--
The (,,) function is the same as \x y z -> (x,y,z). Also, the (,) function is the same as \x y -> (x,y).
Control.Applicative defines a function that's called liftA2, which has a type of
liftA2 :: (Applicative f) => (a -> b -> c) -> f a -> f b -> f c
It's defined like this:
liftA2 :: (Applicative f) => (a -> b -> c) -> f a -> f b -> f c
liftA2 f a b = f <$> a <*> b
It's also interesting to look at this function's type as:
(a -> b -> c) -> (f a -> f b -> f c)
When we look at it like this, we can say that liftA2 takes a normal binary function and promotes it to a function that operates on two functors.
ghci> liftA2 (:) (Just 3) (Just [4])
Just [3,4]
ghci> (:) <$> Just 3 <*> Just [4]
Just [3,4]
It seems that we can combine any amount of applicatives into one applicative that has a list of the results of those applicatives inside it.
Let's try implementing a function that takes a list of applicatives and returns an applicative that has a list as its result value. We'll call it sequenceA.
sequenceA :: (Applicative f) => [f a] -> f [a]
sequenceA [] = pure []
sequenceA (x:xs) = (:) <$> x <*> sequenceA xs
Another way to implement sequenceA is with a fold. Remember, pretty much any function where we go over a list element by element and accumulate a result along the way can be implemented with a fold.
sequenceA :: (Applicative f) => [f a] -> f [a]
sequenceA = foldr (liftA2 (:)) (pure [])
ghci> sequenceA [Just 3, Just 2, Just 1]
Just [3,2,1]
ghci> sequenceA [Just 3, Nothing, Just 1]
Nothing
ghci> sequenceA [(+3),(+2),(+1)] 3
[6,5,4]
ghci> sequenceA [[1,2,3],[4,5,6]]
[[1,4],[1,5],[1,6],[2,4],[2,5],[2,6],[3,4],[3,5],[3,6]]
ghci> sequenceA [[1,2,3],[4,5,6],[3,4,4],[]]
[]
sequenceA [[1], []]
=> (:) <$> [1] <*> sequenceA [[]]
=> (:) <$> [1] <*> ((:) <$> [] <*> sequenceA [])
=> (:) <$> [1] <*> ((:) <$> [] <*> [[]])
=> (:) <$> [1] <*> []
=> [((:) 1)] <*> []
=> fmap ((:) 1) []
=> by defintion we know fmap f [] = []
=> []
=> or
=> [f x | f <- ((:) 1), x <- []] by fs <*> xs = [f x | f <- fs, x <- xs]
=> or
=> do f <- fs
x <- xs
f x
=> fs >>= (\f -> xs >>= (\x -> f x))
=> since [] >>= (\x -> [(+ 1) x]) = []
=> fs >>= (\f -> [])
=> and xs >>= k = join (fmap k xs)
=> join (fmap (\f -> []) fs)
=> []
instance Functor [] where
fmap = map
-- https://hackage.haskell.org/package/base-4.10.0.0/docs/src/GHC.Base.html#fmap
map _ [] = []
map f (x:xs) = f x : map f xs
-- http://hackage.haskell.org/package/base-4.10.0.0/docs/src/GHC.Base.html#map
--}
{--
ghci> map (\f -> f 7) [(>4),(<10),odd]
[True,True,True]
ghci> and $ map (\f -> f 7) [(>4),(<10),odd]
True
ghci> sequenceA [(>4),(<10),odd] 7
[True,True,True]
ghci> and $ sequenceA [(>4),(<10),odd] 7
True
--}
| jamesyang124/haskell-playground | src/Chp111.hs | bsd-3-clause | 6,133 | 0 | 9 | 1,410 | 39 | 21 | 18 | -1 | -1 |
-- |
-- Functions for constructing and parsing Atom feeds for use in the
-- request and response bodies of the various web methods.
--
module Network.TableStorage.Atom (
atomNamespace, dataServicesNamespace, metadataNamespace,
qualifyAtom, qualifyDataServices, qualifyMetadata,
atomElement, atomAttr, wrapContent
) where
import Network.TableStorage.XML
( qualify, cDataText, namespaceAttr )
import Network.TableStorage.Format ( atomDate )
import Text.XML.Light
( Element(elAttribs, elContent, elName),
Content(Elem),
QName, CDataKind(..), Content(..), CData(..),
Attr(..),
blank_element,
unqual )
import Data.Maybe (fromMaybe)
atomNamespace :: String
atomNamespace = "http://www.w3.org/2005/Atom"
dataServicesNamespace :: String
dataServicesNamespace = "http://schemas.microsoft.com/ado/2007/08/dataservices"
metadataNamespace :: String
metadataNamespace = "http://schemas.microsoft.com/ado/2007/08/dataservices/metadata"
qualifyAtom :: String -> QName
qualifyAtom = qualify (Just atomNamespace) Nothing
qualifyDataServices :: String -> QName
qualifyDataServices = qualify (Just dataServicesNamespace) (Just "d")
qualifyMetadata :: String -> QName
qualifyMetadata = qualify (Just metadataNamespace) (Just "m")
-- |
-- An element in the Atom namespace with the provided attributes and child elements
--
atomElement :: String -> Maybe String -> [Attr] -> [Element] -> Element
atomElement name content attrs els =
blank_element { elName = qualifyAtom name,
elAttribs = attrs,
elContent = map Elem els ++ maybe [] cDataText content }
-- |
-- An attribute in the Atom namespace
--
atomAttr :: String -> String -> Attr
atomAttr name value =
Attr { attrKey = qualifyAtom name,
attrVal = value }
-- |
-- Create an Atom entry using the specified element as the content element
--
wrapContent :: Maybe String -> Element -> IO Element
wrapContent entityID content = do
date <- atomDate
return $
atomElement "entry" Nothing
[ Attr { attrKey = unqual "xmlns", attrVal = atomNamespace }
, namespaceAttr "d" dataServicesNamespace
, namespaceAttr "m" metadataNamespace
]
[ atomElement "category" Nothing
[ atomAttr "scheme" "http://schemas.microsoft.com/ado/2007/08/dataservices/scheme"
, atomAttr "term" "clio.cookies"
] []
, atomElement "title" Nothing [] []
, atomElement "author" Nothing []
[ atomElement "name" Nothing [] [] ]
, atomElement "updated" (Just date) [] []
, blank_element
{ elName = qualifyAtom "id"
, elAttribs = []
, elContent = [Text CData { cdVerbatim = CDataRaw, cdData = fromMaybe "" entityID, cdLine = Nothing }]
}
, atomElement "content" Nothing
[ atomAttr "type" "application/xml" ]
[ content ]
] | paf31/tablestorage | src/Network/TableStorage/Atom.hs | bsd-3-clause | 2,878 | 0 | 16 | 618 | 660 | 376 | 284 | 59 | 1 |
-- | State monad for the linear register allocator.
-- Here we keep all the state that the register allocator keeps track
-- of as it walks the instructions in a basic block.
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
module RegAlloc.Linear.State (
RA_State(..),
RegM,
runR,
spillR,
loadR,
getFreeRegsR,
setFreeRegsR,
getAssigR,
setAssigR,
getBlockAssigR,
setBlockAssigR,
setDeltaR,
getDeltaR,
getUniqueR,
recordSpill
)
where
import RegAlloc.Linear.Stats
import RegAlloc.Linear.StackMap
import RegAlloc.Linear.Base
import RegAlloc.Liveness
import Instruction
import Reg
import Platform
import Unique
import UniqSupply
-- | The RegM Monad
instance Monad (RegM freeRegs) where
m >>= k = RegM $ \s -> case unReg m s of { (# s, a #) -> unReg (k a) s }
return a = RegM $ \s -> (# s, a #)
-- | Run a computation in the RegM register allocator monad.
runR :: BlockAssignment freeRegs
-> freeRegs
-> RegMap Loc
-> StackMap
-> UniqSupply
-> RegM freeRegs a
-> (BlockAssignment freeRegs, StackMap, RegAllocStats, a)
runR block_assig freeregs assig stack us thing =
case unReg thing
(RA_State
{ ra_blockassig = block_assig
, ra_freeregs = freeregs
, ra_assig = assig
, ra_delta = 0{-???-}
, ra_stack = stack
, ra_us = us
, ra_spills = [] })
of
(# state'@RA_State
{ ra_blockassig = block_assig
, ra_stack = stack' }
, returned_thing #)
-> (block_assig, stack', makeRAStats state', returned_thing)
-- | Make register allocator stats from its final state.
makeRAStats :: RA_State freeRegs -> RegAllocStats
makeRAStats state
= RegAllocStats
{ ra_spillInstrs = binSpillReasons (ra_spills state) }
spillR :: Instruction instr
=> Platform -> Reg -> Unique -> RegM freeRegs (instr, Int)
spillR platform reg temp = RegM $ \ s@RA_State{ra_delta=delta, ra_stack=stack} ->
let (stack',slot) = getStackSlotFor stack temp
instr = mkSpillInstr platform reg delta slot
in
(# s{ra_stack=stack'}, (instr,slot) #)
loadR :: Instruction instr
=> Platform -> Reg -> Int -> RegM freeRegs instr
loadR platform reg slot = RegM $ \ s@RA_State{ra_delta=delta} ->
(# s, mkLoadInstr platform reg delta slot #)
getFreeRegsR :: RegM freeRegs freeRegs
getFreeRegsR = RegM $ \ s@RA_State{ra_freeregs = freeregs} ->
(# s, freeregs #)
setFreeRegsR :: freeRegs -> RegM freeRegs ()
setFreeRegsR regs = RegM $ \ s ->
(# s{ra_freeregs = regs}, () #)
getAssigR :: RegM freeRegs (RegMap Loc)
getAssigR = RegM $ \ s@RA_State{ra_assig = assig} ->
(# s, assig #)
setAssigR :: RegMap Loc -> RegM freeRegs ()
setAssigR assig = RegM $ \ s ->
(# s{ra_assig=assig}, () #)
getBlockAssigR :: RegM freeRegs (BlockAssignment freeRegs)
getBlockAssigR = RegM $ \ s@RA_State{ra_blockassig = assig} ->
(# s, assig #)
setBlockAssigR :: BlockAssignment freeRegs -> RegM freeRegs ()
setBlockAssigR assig = RegM $ \ s ->
(# s{ra_blockassig = assig}, () #)
setDeltaR :: Int -> RegM freeRegs ()
setDeltaR n = RegM $ \ s ->
(# s{ra_delta = n}, () #)
getDeltaR :: RegM freeRegs Int
getDeltaR = RegM $ \s -> (# s, ra_delta s #)
getUniqueR :: RegM freeRegs Unique
getUniqueR = RegM $ \s ->
case takeUniqFromSupply (ra_us s) of
(uniq, us) -> (# s{ra_us = us}, uniq #)
-- | Record that a spill instruction was inserted, for profiling.
recordSpill :: SpillReason -> RegM freeRegs ()
recordSpill spill
= RegM $ \s -> (# s { ra_spills = spill : ra_spills s}, () #)
| nomeata/ghc | compiler/nativeGen/RegAlloc/Linear/State.hs | bsd-3-clause | 3,775 | 149 | 20 | 754 | 1,164 | 649 | 515 | -1 | -1 |
module Data.Persist.Compile (compile) where
import Data.Persist.AST
import Language.Haskell.Exts.Syntax
import Data.Either (partitionEithers)
compile :: [Either Decl Relationship] -> Module
compile input = Module noLoc (ModuleName "Model") pragmas Nothing Nothing imports (compileDecls input)
where imports = map mkImport ["Data.Persist.Interface", "Generics.Regular"]
pragmas = [LanguagePragma noLoc $ map Ident ["TemplateHaskell", "EmptyDataDecls", "TypeFamilies"]]
mkImport nm = ImportDecl noLoc (ModuleName nm) False False Nothing Nothing Nothing
compileDecls :: [Either Decl Relationship] -> [Decl]
compileDecls input = let (decls, relationships) = partitionEithers input
relationshipsBothDirections = relationships ++ (map reverseRelationship relationships)
dbClass = UnQual (Ident "Persistent")
in concat [ decls
, concatMap derivingRegular decls
, concatMap (compileRelationship decls) relationships
, concatMap (createMethod dbClass relationshipsBothDirections) decls
, createSchema dbClass decls relationships
]
compileRelationship :: [Decl] -> Relationship -> [Decl]
compileRelationship _ r = [ TypeSig noLoc [funName] (relType `TyApp` from `TyApp` to)
, FunBind [Match noLoc funName [] Nothing rhs (BDecls [])]
]
where funName = Ident $ relName r
rhs = UnGuardedRhs $ Con (UnQual (Ident "Relation")) `App` (Lit $ String $ relName r)
from = TyCon (UnQual (Ident (relFromName r)))
to = TyCon (UnQual (Ident (relToName r)))
createMethod :: QName -> [Relationship] -> Decl -> [Decl]
createMethod dbClass rs d =
[ TypeSig noLoc [funName] (TyForall Nothing ctx $ TyFun (typ d) (f (TyApp monad (TyApp refType (typ d)))))
, FunBind [Match noLoc funName ((PVar (Ident "value")):(map (PVar . Ident . snd) relArgs)) Nothing (UnGuardedRhs (rhs relArgs) ) (BDecls [])]
]
where ctx = [ClassA dbClass [monad]]
monad = TyVar (Ident "db")
funName = Ident $ "create" ++ datatypeName
relArgs = zip rels (map relVarName [1..])
relVarName x = "x" ++ show x
datatypeName = name d
rels = involvedRelationships datatypeName rs
f = relationshipsToFun rels
rhs relArgs = Do $ concat
[ [ Generator noLoc (PVar (Ident "i")) (App (var "create_") (var "value")) ]
, concatMap relCreate relArgs
, [ Qualifier $ App (var "return") (var "i") ]
]
where relCreate :: (Relationship, String) -> [Stmt]
relCreate (r,s) = [ addRelation r s ]
addRelation r s | reversed r == False = Qualifier $ App (App (var "addRelation") (var "i")) (var s)
| otherwise = Qualifier $ var "addRelation" `App` var s `App` var "i" `App` var (relName r)
createSchema :: QName -> [Decl] -> [Relationship] -> [Decl]
createSchema dbClass decls rels =
[ TypeSig noLoc [funName] (TyForall Nothing ctx $ monad `TyApp` unit_tycon)
, FunBind [Match noLoc funName [] Nothing (UnGuardedRhs (schemaRhs decls rels)) (BDecls []) ]
]
where ctx = [ClassA dbClass [monad]]
monad = TyVar (Ident "db")
schemaRhs decls rels = Do (map entSchema decls ++ map relSchema rels)
funName = Ident "createSchema"
entSchema ent = Qualifier $ App (var "createSchemaEntity_") (ExpTypeSig noLoc (var "undefined") (TyCon (UnQual (Ident $ name ent))))
relSchema rel = Qualifier $ App (var "createSchemaRelationship_") (var $ relName rel)
var :: String -> Exp
var = Var . UnQual . Ident
relationshipsToFun :: [Relationship] -> (Type -> Type)
relationshipsToFun [] = id
relationshipsToFun (x:xs) = TyFun (TyApp refType $ TyCon (UnQual (Ident (relToName x)))) . relationshipsToFun xs
derivingRegular :: Decl -> [Decl]
derivingRegular x = [ SpliceDecl noLoc $ SpliceExp $ ParenSplice $ var "deriveAll" `App` TypQuote typeName `App` (Lit $ String pfName)
, TypeInsDecl noLoc (TyCon pFType `TyApp` TyCon typeName) (TyCon $ UnQual $ Ident pfName)
]
where nm = name x
pfName = "PF" ++ nm
typeName = UnQual $ Ident nm
involvedRelationships :: String -> [Relationship] -> [Relationship]
involvedRelationships d = filter (\r -> relFromName r == d && isToOne r)
typ :: Decl -> Type
typ (DataDecl _ _ _ nm _ _ _) = TyCon (UnQual nm)
typ _ = error "Compile.typ"
pFType :: QName
pFType = (UnQual (Ident "PF"))
relType :: Type
relType = TyCon (UnQual (Ident "Relation"))
refType :: Type
refType = TyCon (UnQual (Ident "Ref"))
name :: Decl -> String
name (DataDecl _ _ _ (Ident nm) _ _ _) = nm
name s = error $ "Compile.name" ++ show s
noLoc :: SrcLoc
noLoc = SrcLoc "" 0 0
| chriseidhof/persist | src/Data/Persist/Compile.hs | bsd-3-clause | 4,882 | 0 | 17 | 1,266 | 1,804 | 932 | 872 | 82 | 1 |
{-- snippet all --}
import System.IO
import System.Directory(getTemporaryDirectory, removeFile)
import System.IO.Error(catch)
import Control.Exception(finally)
-- The main entry point. Work with a temp file in myAction.
main :: IO ()
main = withTempFile "mytemp.txt" myAction
{- The guts of the program. Called with the path and handle of a temporary
file. When this function exits, that file will be closed and deleted
because myAction was called from withTempFile. -}
myAction :: FilePath -> Handle -> IO ()
myAction tempname temph =
do -- Start by displaying a greeting on the terminal
putStrLn "Welcome to tempfile.hs"
putStrLn $ "I have a temporary file at " ++ tempname
-- Let's see what the initial position is
pos <- hTell temph
putStrLn $ "My initial position is " ++ show pos
-- Now, write some data to the temporary file
let tempdata = show [1..10]
putStrLn $ "Writing one line containing " ++
show (length tempdata) ++ " bytes: " ++
tempdata
hPutStrLn temph tempdata
-- Get our new position. This doesn't actually modify pos
-- in memory, but makes the name "pos" correspond to a different
-- value for the remainder of the "do" block.
pos <- hTell temph
putStrLn $ "After writing, my new position is " ++ show pos
-- Seek to the beginning of the file and display it
putStrLn $ "The file content is: "
hSeek temph AbsoluteSeek 0
-- hGetContents performs a lazy read of the entire file
c <- hGetContents temph
-- Copy the file byte-for-byte to stdout, followed by \n
putStrLn c
-- Let's also display it as a Haskell literal
putStrLn $ "Which could be expressed as this Haskell literal:"
print c
{- This function takes two parameters: a filename pattern and another
function. It will create a temporary file, and pass the name and Handle
of that file to the given function.
The temporary file is created with openTempFile. The directory is the one
indicated by getTemporaryDirectory, or, if the system has no notion of
a temporary directory, "." is used. The given pattern is passed to
openTempFile.
After the given function terminates, even if it terminates due to an
exception, the Handle is closed and the file is deleted. -}
withTempFile :: String -> (FilePath -> Handle -> IO a) -> IO a
withTempFile pattern func =
do -- The library ref says that getTemporaryDirectory may raise on
-- exception on systems that have no notion of a temporary directory.
-- So, we run getTemporaryDirectory under catch. catch takes
-- two functions: one to run, and a different one to run if the
-- first raised an exception. If getTemporaryDirectory raised an
-- exception, just use "." (the current working directory).
tempdir <- catch (getTemporaryDirectory) (\_ -> return ".")
(tempfile, temph) <- openTempFile tempdir pattern
-- Call (func tempfile temph) to perform the action on the temporary
-- file. finally takes two actions. The first is the action to run.
-- The second is an action to run after the first, regardless of
-- whether the first action raised an exception. This way, we ensure
-- the temporary file is always deleted. The return value from finally
-- is the first action's return value.
finally (func tempfile temph)
(do hClose temph
removeFile tempfile)
{-- /snippet all --}
| binesiyu/ifl | examples/ch07/tempfile.hs | mit | 3,603 | 0 | 12 | 950 | 394 | 199 | 195 | 34 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-SP">
<title>Import Urls | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | secdec/zap-extensions | addOns/importurls/src/main/javahelp/org/zaproxy/zap/extension/importurls/resources/help_sr_SP/helpset_sr_SP.hs | apache-2.0 | 972 | 78 | 66 | 159 | 413 | 209 | 204 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ur-PK">
<title>Export Report | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/exportreport/src/main/javahelp/org/zaproxy/zap/extension/exportreport/resources/help_ur_PK/helpset_ur_PK.hs | apache-2.0 | 975 | 80 | 66 | 160 | 415 | 210 | 205 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Hooks.EwmhDesktops
-- Copyright : (c) 2007, 2008 Joachim Breitner <[email protected]>
-- License : BSD
--
-- Maintainer : Joachim Breitner <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- Makes xmonad use the EWMH hints to tell panel applications about its
-- workspaces and the windows therein. It also allows the user to interact
-- with xmonad by clicking on panels and window lists.
-----------------------------------------------------------------------------
module XMonad.Hooks.EwmhDesktops (
-- * Usage
-- $usage
ewmh,
ewmhDesktopsStartup,
ewmhDesktopsLogHook,
ewmhDesktopsLogHookCustom,
ewmhDesktopsEventHook,
ewmhDesktopsEventHookCustom,
fullscreenEventHook
) where
import Codec.Binary.UTF8.String (encode)
import Data.List
import Data.Maybe
import Data.Monoid
import XMonad
import Control.Monad
import qualified XMonad.StackSet as W
import XMonad.Hooks.SetWMName
import XMonad.Util.XUtils (fi)
import XMonad.Util.WorkspaceCompare
import XMonad.Util.WindowProperties (getProp32)
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad
-- > import XMonad.Hooks.EwmhDesktops
-- >
-- > main = xmonad $ ewmh def{ handleEventHook =
-- > handleEventHook def <+> fullscreenEventHook }
--
-- You may also be interested in 'avoidStruts' from "XMonad.Hooks.ManageDocks".
-- | Add EWMH functionality to the given config. See above for an example.
ewmh :: XConfig a -> XConfig a
ewmh c = c { startupHook = startupHook c +++ ewmhDesktopsStartup
, handleEventHook = handleEventHook c +++ ewmhDesktopsEventHook
, logHook = logHook c +++ ewmhDesktopsLogHook }
where x +++ y = mappend x y
-- |
-- Initializes EwmhDesktops and advertises EWMH support to the X
-- server
ewmhDesktopsStartup :: X ()
ewmhDesktopsStartup = setSupported
-- |
-- Notifies pagers and window lists, such as those in the gnome-panel
-- of the current state of workspaces and windows.
ewmhDesktopsLogHook :: X ()
ewmhDesktopsLogHook = ewmhDesktopsLogHookCustom id
-- |
-- Generalized version of ewmhDesktopsLogHook that allows an arbitrary
-- user-specified function to transform the workspace list (post-sorting)
ewmhDesktopsLogHookCustom :: ([WindowSpace] -> [WindowSpace]) -> X ()
ewmhDesktopsLogHookCustom f = withWindowSet $ \s -> do
sort' <- getSortByIndex
let ws = f $ sort' $ W.workspaces s
-- Number of Workspaces
setNumberOfDesktops (length ws)
-- Names thereof
setDesktopNames (map W.tag ws)
-- all windows, with focused windows last
let wins = nub . concatMap (maybe [] (\(W.Stack x l r)-> reverse l ++ r ++ [x]) . W.stack) $ ws
setClientList wins
-- Current desktop
case (elemIndex (W.currentTag s) $ map W.tag ws) of
Nothing -> return ()
Just curr -> do
setCurrentDesktop curr
-- Per window Desktop
-- To make gnome-panel accept our xinerama stuff, we display
-- all visible windows on the current desktop.
forM_ (W.current s : W.visible s) $ \x ->
forM_ (W.integrate' (W.stack (W.workspace x))) $ \win -> do
setWindowDesktop win curr
forM_ (W.hidden s) $ \w ->
case elemIndex (W.tag w) (map W.tag ws) of
Nothing -> return ()
Just wn -> forM_ (W.integrate' (W.stack w)) $ \win -> do
setWindowDesktop win wn
setActiveWindow
return ()
-- |
-- Intercepts messages from pagers and similar applications and reacts on them.
-- Currently supports:
--
-- * _NET_CURRENT_DESKTOP (switching desktops)
--
-- * _NET_WM_DESKTOP (move windows to other desktops)
--
-- * _NET_ACTIVE_WINDOW (activate another window, changing workspace if needed)
ewmhDesktopsEventHook :: Event -> X All
ewmhDesktopsEventHook = ewmhDesktopsEventHookCustom id
-- |
-- Generalized version of ewmhDesktopsEventHook that allows an arbitrary
-- user-specified function to transform the workspace list (post-sorting)
ewmhDesktopsEventHookCustom :: ([WindowSpace] -> [WindowSpace]) -> Event -> X All
ewmhDesktopsEventHookCustom f e = handle f e >> return (All True)
handle :: ([WindowSpace] -> [WindowSpace]) -> Event -> X ()
handle f (ClientMessageEvent {
ev_window = w,
ev_message_type = mt,
ev_data = d
}) = withWindowSet $ \s -> do
sort' <- getSortByIndex
let ws = f $ sort' $ W.workspaces s
a_cd <- getAtom "_NET_CURRENT_DESKTOP"
a_d <- getAtom "_NET_WM_DESKTOP"
a_aw <- getAtom "_NET_ACTIVE_WINDOW"
a_cw <- getAtom "_NET_CLOSE_WINDOW"
a_ignore <- mapM getAtom ["XMONAD_TIMER"]
if mt == a_cd then do
let n = head d
if 0 <= n && fi n < length ws then
windows $ W.view (W.tag (ws !! fi n))
else trace $ "Bad _NET_CURRENT_DESKTOP with data[0]="++show n
else if mt == a_d then do
let n = head d
if 0 <= n && fi n < length ws then
windows $ W.shiftWin (W.tag (ws !! fi n)) w
else trace $ "Bad _NET_DESKTOP with data[0]="++show n
else if mt == a_aw then do
windows $ W.focusWindow w
else if mt == a_cw then do
killWindow w
else if mt `elem` a_ignore then do
return ()
else do
-- The Message is unknown to us, but that is ok, not all are meant
-- to be handled by the window manager
return ()
handle _ _ = return ()
-- |
-- An event hook to handle applications that wish to fullscreen using the
-- _NET_WM_STATE protocol. This includes users of the gtk_window_fullscreen()
-- function, such as Totem, Evince and OpenOffice.org.
--
-- Note this is not included in 'ewmh'.
fullscreenEventHook :: Event -> X All
fullscreenEventHook (ClientMessageEvent _ _ _ dpy win typ (action:dats)) = do
wmstate <- getAtom "_NET_WM_STATE"
fullsc <- getAtom "_NET_WM_STATE_FULLSCREEN"
wstate <- fromMaybe [] `fmap` getProp32 wmstate win
let isFull = fromIntegral fullsc `elem` wstate
-- Constants for the _NET_WM_STATE protocol:
remove = 0
add = 1
toggle = 2
ptype = 4 -- The atom property type for changeProperty
chWstate f = io $ changeProperty32 dpy win wmstate ptype propModeReplace (f wstate)
when (typ == wmstate && fi fullsc `elem` dats) $ do
when (action == add || (action == toggle && not isFull)) $ do
chWstate (fi fullsc:)
windows $ W.float win $ W.RationalRect 0 0 1 1
when (action == remove || (action == toggle && isFull)) $ do
chWstate $ delete (fi fullsc)
windows $ W.sink win
return $ All True
fullscreenEventHook _ = return $ All True
setNumberOfDesktops :: (Integral a) => a -> X ()
setNumberOfDesktops n = withDisplay $ \dpy -> do
a <- getAtom "_NET_NUMBER_OF_DESKTOPS"
c <- getAtom "CARDINAL"
r <- asks theRoot
io $ changeProperty32 dpy r a c propModeReplace [fromIntegral n]
setCurrentDesktop :: (Integral a) => a -> X ()
setCurrentDesktop i = withDisplay $ \dpy -> do
a <- getAtom "_NET_CURRENT_DESKTOP"
c <- getAtom "CARDINAL"
r <- asks theRoot
io $ changeProperty32 dpy r a c propModeReplace [fromIntegral i]
setDesktopNames :: [String] -> X ()
setDesktopNames names = withDisplay $ \dpy -> do
-- Names thereof
r <- asks theRoot
a <- getAtom "_NET_DESKTOP_NAMES"
c <- getAtom "UTF8_STRING"
let names' = map fromIntegral $ concatMap ((++[0]) . encode) names
io $ changeProperty8 dpy r a c propModeReplace names'
setClientList :: [Window] -> X ()
setClientList wins = withDisplay $ \dpy -> do
-- (What order do we really need? Something about age and stacking)
r <- asks theRoot
c <- getAtom "WINDOW"
a <- getAtom "_NET_CLIENT_LIST"
io $ changeProperty32 dpy r a c propModeReplace (fmap fromIntegral wins)
a' <- getAtom "_NET_CLIENT_LIST_STACKING"
io $ changeProperty32 dpy r a' c propModeReplace (fmap fromIntegral wins)
setWindowDesktop :: (Integral a) => Window -> a -> X ()
setWindowDesktop win i = withDisplay $ \dpy -> do
a <- getAtom "_NET_WM_DESKTOP"
c <- getAtom "CARDINAL"
io $ changeProperty32 dpy win a c propModeReplace [fromIntegral i]
setSupported :: X ()
setSupported = withDisplay $ \dpy -> do
r <- asks theRoot
a <- getAtom "_NET_SUPPORTED"
c <- getAtom "ATOM"
supp <- mapM getAtom ["_NET_WM_STATE_HIDDEN"
,"_NET_NUMBER_OF_DESKTOPS"
,"_NET_CLIENT_LIST"
,"_NET_CLIENT_LIST_STACKING"
,"_NET_CURRENT_DESKTOP"
,"_NET_DESKTOP_NAMES"
,"_NET_ACTIVE_WINDOW"
,"_NET_WM_DESKTOP"
,"_NET_WM_STRUT"
]
io $ changeProperty32 dpy r a c propModeReplace (fmap fromIntegral supp)
setWMName "xmonad"
setActiveWindow :: X ()
setActiveWindow = withWindowSet $ \s -> withDisplay $ \dpy -> do
let w = fromMaybe none (W.peek s)
r <- asks theRoot
a <- getAtom "_NET_ACTIVE_WINDOW"
c <- getAtom "WINDOW"
io $ changeProperty32 dpy r a c propModeReplace [fromIntegral w]
| eb-gh-cr/XMonadContrib1 | XMonad/Hooks/EwmhDesktops.hs | bsd-3-clause | 9,444 | 0 | 24 | 2,420 | 2,344 | 1,180 | 1,164 | 161 | 8 |
{-# LANGUAGE TupleSections, OverloadedStrings, QuasiQuotes, TemplateHaskell, TypeFamilies, RecordWildCards,
DeriveGeneric ,MultiParamTypeClasses ,FlexibleInstances #-}
module Protocol.ROC.PointTypes (module PointTypes
,decodePTID
,fetchPointType
,pt0
,pt1
,pt2
,pt3
,pt4
,pt5
,pt6
,pt7
,pt8
,pt9
,pt10
,pt12
,pt13
,pt14
,pt15
,pt16
,pt17
,pt18
,pt19
,pt20
,pt21
,pt40
,pt41
,pt42
,pt43
,pt44
,pt45
,pt46
,pt47
,pt48
,pt52
,pt53
,pt54
,pt55
,pt56
,pt57
,pt58
,pt59
,pt80
,pt81
,pt85
,pt86
,pt88
,pt89
,pt93
,pt94
,pt98
,pt117
,pt118
,pt120
,pt121
,pt122
,pt172
,pt173
,pt174
,pt175
,pt176
,pt177
,PointTypes (..)
) where
import qualified Data.ByteString.Lazy as LB
import qualified Data.ByteString.Lazy.Char8 as C8
import Data.Word
import Data.Binary.Get
import Protocol.ROC.PointTypes.PointType0 as PointTypes
import Protocol.ROC.PointTypes.PointType1 as PointTypes
import Protocol.ROC.PointTypes.PointType2 as PointTYpes
import Protocol.ROC.PointTypes.PointType3 as PointTYpes
import Protocol.ROC.PointTypes.PointType4 as PointTYpes
import Protocol.ROC.PointTypes.PointType5 as PointTypes
import Protocol.ROC.PointTypes.PointType6 as PointTypes
import Protocol.ROC.PointTypes.PointType7 as PointTypes
import Protocol.ROC.PointTypes.PointType8 as PointTypes
import Protocol.ROC.PointTypes.PointType9 as PointTypes
import Protocol.ROC.PointTypes.PointType10 as PointTypes
import Protocol.ROC.PointTypes.PointType12 as PointTYpes
import Protocol.ROC.PointTypes.PointType13 as PointTYpes
import Protocol.ROC.PointTypes.PointType14 as PointTypes
import Protocol.ROC.PointTypes.PointType15 as PointTypes
import Protocol.ROC.PointTypes.PointType16 as PointTypes
import Protocol.ROC.PointTypes.PointType17 as PointTypes
import Protocol.ROC.PointTypes.PointType18 as PointTypes
import Protocol.ROC.PointTypes.PointType19 as PointTypes
import Protocol.ROC.PointTypes.PointType20 as PointTYpes
import Protocol.ROC.PointTypes.PointType21 as PointTYpes
import Protocol.ROC.PointTypes.PointType40 as PointTYpes
import Protocol.ROC.PointTypes.PointType41 as PointTypes
import Protocol.ROC.PointTypes.PointType42 as PointTypes
import Protocol.ROC.PointTypes.PointType43 as PointTypes
import Protocol.ROC.PointTypes.PointType44 as PointTYpes
import Protocol.ROC.PointTypes.PointType45 as PointTypes
import Protocol.ROC.PointTypes.PointType46 as PointTypes
import Protocol.ROC.PointTypes.PointType47 as PointTypes
import Protocol.ROC.PointTypes.PointType48 as PointTYpes
import Protocol.ROC.PointTypes.PointType52 as PointTypes
import Protocol.ROC.PointTypes.PointType53 as PointTypes
import Protocol.ROC.PointTypes.PointType54 as PointTypes
import Protocol.ROC.PointTypes.PointType55 as PointTypes
import Protocol.ROC.PointTypes.PointType56 as PointTypes
import Protocol.ROC.PointTypes.PointType57 as PointTypes
import Protocol.ROC.PointTypes.PointType58 as PointTypes
import Protocol.ROC.PointTypes.PointType59 as PointTypes
import Protocol.ROC.PointTypes.PointType80 as PointTypes
import Protocol.ROC.PointTypes.PointType81 as PointTYpes
import Protocol.ROC.PointTypes.PointType85 as PointTypes
import Protocol.ROC.PointTypes.PointType86 as PointTypes
import Protocol.ROC.PointTypes.PointType88 as PointTypes
import Protocol.ROC.PointTypes.PointType89 as PointTypes
import Protocol.ROC.PointTypes.PointType93 as PointTypes
import Protocol.ROC.PointTypes.PointType94 as PointTypes
import Protocol.ROC.PointTypes.PointType98 as PointTypes
import Protocol.ROC.PointTypes.PointType117 as PointTypes
import Protocol.ROC.PointTypes.PointType118 as PointTypes
import Protocol.ROC.PointTypes.PointType120 as PointTypes
import Protocol.ROC.PointTypes.PointType121 as PointTypes
import Protocol.ROC.PointTypes.PointType122 as PointTypes
import Protocol.ROC.PointTypes.PointType172 as PointTypes
import Protocol.ROC.PointTypes.PointType173 as PointTypes
import Protocol.ROC.PointTypes.PointType174 as PointTypes
import Protocol.ROC.PointTypes.PointType175 as PointTypes
import Protocol.ROC.PointTypes.PointType176 as PointTypes
import Protocol.ROC.PointTypes.PointType177 as PointTypes
data PointTypes a = PTID0 (Either a PointType0)
| PTID1 (Either a PointType1)
| PTID2 (Either a PointType2)
| PTID3 (Either a PointType3)
| PTID4 (Either a PointType4)
| PTID5 (Either a PointType5)
| PTID6 (Either a PointType6)
| PTID7 (Either a PointType7)
| PTID8 (Either a PointType8)
| PTID9 (Either a PointType9)
| PTID10 (Either a PointType10)
| PTID12 (Either a PointType12)
| PTID13 (Either a PointType13)
| PTID14 (Either a PointType14)
| PTID15 (Either a PointType15)
| PTID16 (Either a PointType16)
| PTID17 (Either a PointType17)
| PTID18 (Either a PointType18)
| PTID19 (Either a PointType19)
| PTID20 (Either a PointType20)
| PTID21 (Either a PointType21)
| PTID40 (Either a PointType40)
| PTID41 (Either a PointType41)
| PTID42 (Either a PointType42)
| PTID43 (Either a PointType43)
| PTID44 (Either a PointType44)
| PTID45 (Either a PointType45)
| PTID46 (Either a PointType46)
| PTID47 (Either a PointType47)
| PTID48 (Either a PointType48)
| PTID52 (Either a PointType52)
| PTID53 (Either a PointType53)
| PTID54 (Either a PointType54)
| PTID55 (Either a PointType55)
| PTID56 (Either a PointType56)
| PTID57 (Either a PointType57)
| PTID58 (Either a PointType58)
| PTID59 (Either a PointType59)
| PTID80 (Either a PointType80)
| PTID81 (Either a PointType81)
| PTID85 (Either a PointType85)
| PTID86 (Either a PointType86)
| PTID88 (Either a PointType88)
| PTID89 (Either a PointType89)
| PTID93 (Either a PointType93)
| PTID94 (Either a PointType94)
| PTID98 (Either a PointType98)
| PTID117 (Either a PointType117)
| PTID118 (Either a PointType118)
| PTID120 (Either a PointType120)
| PTID121 (Either a PointType121)
| PTID122 (Either a PointType122)
| PTID172 (Either a PointType172)
| PTID173 (Either a PointType173)
| PTID174 (Either a PointType174)
| PTID175 (Either a PointType175)
| PTID176 (Either a PointType176)
| PTID177 (Either a PointType177)
deriving (Eq,Show)
pt0 :: PointTypes ()
pt0 = PTID0 $ Left ()
pt1 :: PointTypes ()
pt1 = PTID1 $ Left ()
pt2 :: PointTypes ()
pt2 = PTID2 $ Left ()
pt3 :: PointTypes ()
pt3 = PTID3 $ Left ()
pt4 :: PointTypes ()
pt4 = PTID4 $ Left ()
pt5 :: PointTypes ()
pt5 = PTID5 $ Left ()
pt6 :: PointTypes ()
pt6 = PTID6 $ Left ()
pt7 :: PointTypes ()
pt7 = PTID7 $ Left ()
pt8 :: PointTypes ()
pt8 = PTID8 $ Left ()
pt9 :: PointTypes ()
pt9 = PTID9 $ Left ()
pt10 :: PointTypes ()
pt10 = PTID10 $ Left ()
pt12 :: PointTypes ()
pt12 = PTID12 $ Left ()
pt13 :: PointTypes ()
pt13 = PTID13 $ Left ()
pt14 :: PointTypes ()
pt14 = PTID14 $ Left ()
pt15 :: PointTypes ()
pt15 = PTID15 $ Left ()
pt16 :: PointTypes ()
pt16 = PTID16 $ Left ()
pt17 :: PointTypes ()
pt17 = PTID17 $ Left ()
pt18 :: PointTypes ()
pt18 = PTID18 $ Left ()
pt19 :: PointTypes ()
pt19 = PTID19 $ Left ()
pt20 :: PointTypes ()
pt20 = PTID20 $ Left ()
pt21 :: PointTypes ()
pt21 = PTID21 $ Left ()
pt40 :: PointTypes ()
pt40 = PTID40 $ Left ()
pt41 :: PointTypes ()
pt41 = PTID41 $ Left ()
pt42 :: PointTypes ()
pt42 = PTID42 $ Left ()
pt43 :: PointTypes ()
pt43 = PTID43 $ Left ()
pt44 :: PointTypes ()
pt44 = PTID44 $ Left ()
pt45 :: PointTypes ()
pt45 = PTID45 $ Left ()
pt46 :: PointTypes ()
pt46 = PTID46 $ Left ()
pt47 :: PointTypes ()
pt47 = PTID47 $ Left ()
pt48 :: PointTypes ()
pt48 = PTID48 $ Left ()
pt52 :: PointTypes ()
pt52 = PTID52 $ Left ()
pt53 :: PointTypes ()
pt53 = PTID53 $ Left ()
pt54 :: PointTypes ()
pt54 = PTID54 $ Left ()
pt55 :: PointTypes ()
pt55 = PTID55 $ Left ()
pt56 :: PointTypes ()
pt56 = PTID56 $ Left ()
pt57 :: PointTypes ()
pt57 = PTID57 $ Left ()
pt58 :: PointTypes ()
pt58 = PTID58 $ Left ()
pt59 :: PointTypes ()
pt59 = PTID59 $ Left ()
pt80 :: PointTypes ()
pt80 = PTID80 $ Left ()
pt81 :: PointTypes ()
pt81 = PTID81 $ Left ()
pt85 :: PointTypes ()
pt85 = PTID85 $ Left ()
pt86 :: PointTypes ()
pt86 = PTID86 $ Left ()
pt88 :: PointTypes ()
pt88 = PTID88 $ Left ()
pt89 :: PointTypes ()
pt89 = PTID89 $ Left ()
pt93 :: PointTypes ()
pt93 = PTID93 $ Left ()
pt94 :: PointTypes ()
pt94 = PTID94 $ Left ()
pt98 :: PointTypes ()
pt98 = PTID98 $ Left ()
pt117 :: PointTypes ()
pt117 = PTID117 $ Left ()
pt118 :: PointTypes ()
pt118 = PTID118 $ Left ()
pt120 :: PointTypes ()
pt120 = PTID120 $ Left ()
pt121 :: PointTypes ()
pt121 = PTID121 $ Left ()
pt122 :: PointTypes ()
pt122 = PTID122 $ Left ()
pt172 :: PointTypes ()
pt172 = PTID172 $ Left ()
pt173 :: PointTypes ()
pt173 = PTID173 $ Left ()
pt174 :: PointTypes ()
pt174 = PTID174 $ Left ()
pt175 :: PointTypes ()
pt175 = PTID175 $ Left ()
pt176 :: PointTypes ()
pt176 = PTID176 $ Left ()
pt177 :: PointTypes ()
pt177 = PTID177 $ Left ()
decodePTID :: PointTypes a -> Word8
decodePTID (PTID0 _) = 0
decodePTID (PTID1 _) = 1
decodePTID (PTID2 _) = 2
decodePTID (PTID3 _) = 3
decodePTID (PTID4 _) = 4
decodePTID (PTID5 _) = 5
decodePTID (PTID6 _) = 6
decodePTID (PTID7 _) = 7
decodePTID (PTID8 _) = 8
decodePTID (PTID9 _) = 9
decodePTID (PTID10 _) = 10
decodePTID (PTID12 _) = 12
decodePTID (PTID13 _) = 13
decodePTID (PTID14 _) = 14
decodePTID (PTID15 _) = 15
decodePTID (PTID16 _) = 16
decodePTID (PTID17 _) = 17
decodePTID (PTID18 _) = 18
decodePTID (PTID19 _) = 19
decodePTID (PTID20 _) = 20
decodePTID (PTID21 _) = 21
decodePTID (PTID40 _) = 40
decodePTID (PTID41 _) = 41
decodePTID (PTID42 _) = 42
decodePTID (PTID43 _) = 43
decodePTID (PTID44 _) = 44
decodePTID (PTID45 _) = 45
decodePTID (PTID46 _) = 46
decodePTID (PTID47 _) = 47
decodePTID (PTID48 _) = 48
decodePTID (PTID52 _) = 52
decodePTID (PTID53 _) = 53
decodePTID (PTID54 _) = 54
decodePTID (PTID55 _) = 55
decodePTID (PTID56 _) = 56
decodePTID (PTID57 _) = 57
decodePTID (PTID58 _) = 58
decodePTID (PTID59 _) = 59
decodePTID (PTID80 _) = 80
decodePTID (PTID81 _) = 81
decodePTID (PTID85 _) = 85
decodePTID (PTID86 _) = 86
decodePTID (PTID88 _) = 88
decodePTID (PTID89 _) = 89
decodePTID (PTID93 _) = 93
decodePTID (PTID94 _) = 94
decodePTID (PTID98 _) = 98
decodePTID (PTID117 _) = 117
decodePTID (PTID118 _) = 118
decodePTID (PTID120 _) = 120
decodePTID (PTID121 _) = 121
decodePTID (PTID122 _) = 122
decodePTID (PTID172 _) = 172
decodePTID (PTID173 _) = 173
decodePTID (PTID174 _) = 174
decodePTID (PTID175 _) = 175
decodePTID (PTID176 _) = 176
decodePTID (PTID177 _) = 177
-----------------------------------------------------------------------------------
--data PointTypeTest = PointTypeTest {
--pointTypeTestLowRead :: !PointTypeTestLowRead
--}
--type PointTypeTestLowRead = Float
--pointTypeTestParser :: Get PointTypeTestLowRead
--pointTypeTestParser = get
--fetchPointTypeTest :: LB.ByteString -> Decoder PointTypeTestLowRead
--fetchPointTypeTest bs = runGetIncremental pointTypeTestParser `pushChunks` bs
------------------------------------------------------------------------------------
fetchPointType :: PointTypes a -> LB.ByteString -> PointTypes LB.ByteString
fetchPointType (PTID0 _ ) bs = PTID0 $ decodeToEither $ runGetIncremental pointType0Parser `pushChunks` bs
fetchPointType (PTID1 _ ) bs = PTID1 $ decodeToEither $ runGetIncremental pointType1Parser `pushChunks` bs
fetchPointType (PTID2 _ ) bs = PTID2 $ decodeToEither $ runGetIncremental pointType2Parser `pushChunks` bs
fetchPointType (PTID3 _ ) bs = PTID3 $ decodeToEither $ runGetIncremental pointType3Parser `pushChunks` bs
fetchPointType (PTID4 _ ) bs = PTID4 $ decodeToEither $ runGetIncremental pointType4Parser `pushChunks` bs
fetchPointType (PTID5 _ ) bs = PTID5 $ decodeToEither $ runGetIncremental pointType5Parser `pushChunks` bs
fetchPointType (PTID6 _ ) bs = PTID6 $ decodeToEither $ runGetIncremental pointType6Parser `pushChunks` bs
fetchPointType (PTID7 _ ) bs = PTID7 $ decodeToEither $ runGetIncremental pointType7Parser `pushChunks` bs
fetchPointType (PTID8 _ ) bs = PTID8 $ decodeToEither $ runGetIncremental pointType8Parser `pushChunks` bs
fetchPointType (PTID9 _ ) bs = PTID9 $ decodeToEither $ runGetIncremental pointType9Parser `pushChunks` bs
fetchPointType (PTID10 _ ) bs = PTID10 $ decodeToEither $ runGetIncremental pointType10Parser `pushChunks` bs
fetchPointType (PTID12 _ ) bs = PTID12 $ decodeToEither $ runGetIncremental pointType12Parser `pushChunks` bs
fetchPointType (PTID13 _ ) bs = PTID13 $ decodeToEither $ runGetIncremental pointType13Parser `pushChunks` bs
fetchPointType (PTID14 _ ) bs = PTID14 $ decodeToEither $ runGetIncremental pointType14Parser `pushChunks` bs
fetchPointType (PTID15 _ ) bs = PTID15 $ decodeToEither $ runGetIncremental pointType15Parser `pushChunks` bs
fetchPointType (PTID16 _ ) bs = PTID16 $ decodeToEither $ runGetIncremental pointType16Parser `pushChunks` bs
fetchPointType (PTID17 _ ) bs = PTID17 $ decodeToEither $ runGetIncremental pointType17Parser `pushChunks` bs
fetchPointType (PTID18 _ ) bs = PTID18 $ decodeToEither $ runGetIncremental pointType18Parser `pushChunks` bs
fetchPointType (PTID19 _ ) bs = PTID19 $ decodeToEither $ runGetIncremental pointType19Parser `pushChunks` bs
fetchPointType (PTID20 _ ) bs = PTID20 $ decodeToEither $ runGetIncremental pointType20Parser `pushChunks` bs
fetchPointType (PTID21 _ ) bs = PTID21 $ decodeToEither $ runGetIncremental pointType21Parser `pushChunks` bs
fetchPointType (PTID40 _ ) bs = PTID40 $ decodeToEither $ runGetIncremental pointType40Parser `pushChunks` bs
fetchPointType (PTID41 _ ) bs = PTID41 $ decodeToEither $ runGetIncremental pointType41Parser `pushChunks` bs
fetchPointType (PTID42 _ ) bs = PTID42 $ decodeToEither $ runGetIncremental pointType42Parser `pushChunks` bs
fetchPointType (PTID43 _ ) bs = PTID43 $ decodeToEither $ runGetIncremental pointType43Parser `pushChunks` bs
fetchPointType (PTID44 _ ) bs = PTID44 $ decodeToEither $ runGetIncremental pointType44Parser `pushChunks` bs
fetchPointType (PTID45 _ ) bs = PTID45 $ decodeToEither $ runGetIncremental pointType45Parser `pushChunks` bs
fetchPointType (PTID46 _ ) bs = PTID46 $ decodeToEither $ runGetIncremental pointType46Parser `pushChunks` bs
fetchPointType (PTID47 _ ) bs = PTID47 $ decodeToEither $ runGetIncremental pointType47Parser `pushChunks` bs
fetchPointType (PTID48 _ ) bs = PTID48 $ decodeToEither $ runGetIncremental pointType48Parser `pushChunks` bs
fetchPointType (PTID52 _ ) bs = PTID52 $ decodeToEither $ runGetIncremental pointType52Parser `pushChunks` bs
fetchPointType (PTID53 _ ) bs = PTID53 $ decodeToEither $ runGetIncremental pointType53Parser `pushChunks` bs
fetchPointType (PTID54 _ ) bs = PTID54 $ decodeToEither $ runGetIncremental pointType54Parser `pushChunks` bs
fetchPointType (PTID55 _ ) bs = PTID55 $ decodeToEither $ runGetIncremental pointType55Parser `pushChunks` bs
fetchPointType (PTID56 _ ) bs = PTID56 $ decodeToEither $ runGetIncremental pointType56Parser `pushChunks` bs
fetchPointType (PTID57 _ ) bs = PTID57 $ decodeToEither $ runGetIncremental pointType57Parser `pushChunks` bs
fetchPointType (PTID58 _ ) bs = PTID58 $ decodeToEither $ runGetIncremental pointType58Parser `pushChunks` bs
fetchPointType (PTID59 _ ) bs = PTID59 $ decodeToEither $ runGetIncremental pointType59Parser `pushChunks` bs
fetchPointType (PTID80 _ ) bs = PTID80 $ decodeToEither $ runGetIncremental pointType80Parser `pushChunks` bs
fetchPointType (PTID81 _ ) bs = PTID81 $ decodeToEither $ runGetIncremental pointType81Parser `pushChunks` bs
fetchPointType (PTID85 _ ) bs = PTID85 $ decodeToEither $ runGetIncremental pointType85Parser `pushChunks` bs
fetchPointType (PTID86 _ ) bs = PTID86 $ decodeToEither $ runGetIncremental pointType86Parser `pushChunks` bs
fetchPointType (PTID88 _ ) bs = PTID88 $ decodeToEither $ runGetIncremental pointType88Parser `pushChunks` bs
fetchPointType (PTID89 _ ) bs = PTID89 $ decodeToEither $ runGetIncremental pointType89Parser `pushChunks` bs
fetchPointType (PTID93 _ ) bs = PTID93 $ decodeToEither $ runGetIncremental pointType93Parser `pushChunks` bs
fetchPointType (PTID94 _ ) bs = PTID94 $ decodeToEither $ runGetIncremental pointType94Parser `pushChunks` bs
fetchPointType (PTID98 _ ) bs = PTID98 $ decodeToEither $ runGetIncremental pointType98Parser `pushChunks` bs
fetchPointType (PTID117 _ ) bs = PTID117 $ decodeToEither $ runGetIncremental pointType117Parser `pushChunks` bs
fetchPointType (PTID118 _ ) bs = PTID118 $ decodeToEither $ runGetIncremental pointType118Parser `pushChunks` bs
fetchPointType (PTID120 _ ) bs = PTID120 $ decodeToEither $ runGetIncremental pointType120Parser `pushChunks` bs
fetchPointType (PTID121 _ ) bs = PTID121 $ decodeToEither $ runGetIncremental pointType121Parser `pushChunks` bs
fetchPointType (PTID122 _ ) bs = PTID122 $ decodeToEither $ runGetIncremental pointType122Parser `pushChunks` bs
fetchPointType (PTID172 _ ) bs = PTID172 $ decodeToEither $ runGetIncremental pointType172Parser `pushChunks` bs
fetchPointType (PTID173 _ ) bs = PTID173 $ decodeToEither $ runGetIncremental pointType173Parser `pushChunks` bs
fetchPointType (PTID174 _ ) bs = PTID174 $ decodeToEither $ runGetIncremental pointType174Parser `pushChunks` bs
fetchPointType (PTID175 _ ) bs = PTID175 $ decodeToEither $ runGetIncremental pointType175Parser `pushChunks` bs
fetchPointType (PTID176 _ ) bs = PTID176 $ decodeToEither $ runGetIncremental pointType176Parser `pushChunks` bs
fetchPointType (PTID177 _ ) bs = PTID177 $ decodeToEither $ runGetIncremental pointType177Parser `pushChunks` bs
decodeToEither :: (Show a) => Decoder a -> Either LB.ByteString a
decodeToEither (Fail _ _ s) = Left $ C8.append "decoder Failed with" (C8.pack s)
decodeToEither (Done _ _ a) = Right a
decodeToEither _ = Left "incomplete parsing SHOULD NOT HAPPEN!"
--debugDecoderPointType :: (Show a) => Decoder a -> IO ()
--debugDecoderPointType (Fail _ _ s) = print $ "decoder Failed with" ++ s
--debugDecoderPointType (Done _ _ pt) = print "Point type finished" >> print pt
--debugDecoderPointType _ = print "incomplete parsing SHOULD NOT HAPPEN!" | jqpeterson/roc-translator | src/Protocol/ROC/PointTypes.hs | bsd-3-clause | 21,145 | 0 | 9 | 5,836 | 5,787 | 3,144 | 2,643 | 423 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
module Modal.Code where
import Prelude hiding (readFile, sequence, mapM, foldr1, concat, concatMap)
import Control.Applicative
import Control.Monad.Except hiding (mapM, sequence)
import Control.Monad.State hiding (mapM, sequence, state)
import Data.Map (Map)
import Data.Maybe (mapMaybe, maybeToList)
import Data.Monoid ((<>))
import Data.Foldable
import Data.Traversable
import Modal.CompilerBase hiding (main)
import Modal.Display
import Modal.Formulas (ModalFormula, (%^), (%|))
import Modal.Parser hiding (main)
import Modal.Programming
import Modal.Statement hiding (main)
import Modal.Utilities
import Text.Parsec hiding ((<|>), optional, many, State)
import Text.Parsec.Expr
import Text.Parsec.Text (Parser)
import Text.Printf (printf)
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Modal.Formulas as F
-------------------------------------------------------------------------------
data CodeConfig = CodeConfig
{ actionKw :: String
, actionsKw :: String
, outcomeKw :: String
, outcomesKw :: String
} deriving (Eq, Ord, Read, Show)
-------------------------------------------------------------------------------
data SimpleExpr
= Num (Ref Int)
| Add SimpleExpr SimpleExpr
| Sub SimpleExpr SimpleExpr
| Mul SimpleExpr SimpleExpr
| Exp SimpleExpr SimpleExpr
deriving Eq
instance Show SimpleExpr where
show (Num v) = show v
show (Add x y) = show x ++ "+" ++ show y
show (Sub x y) = show x ++ "-" ++ show y
show (Mul x y) = show x ++ "*" ++ show y
show (Exp x y) = show x ++ "^" ++ show y
instance Parsable SimpleExpr where
parser = buildExpressionParser lTable term where
lTable =
[ [Infix (try $ symbol "+" $> Add) AssocRight]
, [Infix (try $ symbol "-" $> Sub) AssocRight]
, [Infix (try $ symbol "*" $> Mul) AssocRight]
, [Infix (try $ symbol "^" $> Exp) AssocRight] ]
term
= parens parser
<|> try (Num <$> (parser :: Parser (Ref Int)))
<?> "a math expression"
compileExpr :: MonadCompile m => SimpleExpr -> m Int
compileExpr (Num v) = lookupN v
compileExpr (Add x y) = (+) <$> compileExpr x <*> compileExpr y
compileExpr (Sub x y) = (-) <$> compileExpr x <*> compileExpr y
compileExpr (Mul x y) = (*) <$> compileExpr x <*> compileExpr y
compileExpr (Exp x y) = (^) <$> compileExpr x <*> compileExpr y
-------------------------------------------------------------------------------
data Range x
= EnumRange (Ref x) (Maybe (Ref x)) (Maybe (Ref Int))
| ListRange [Ref x]
| TotalRange
deriving Eq
instance Show x => Show (Range x) where
show (EnumRange sta msto mste) = printf "%s..%s%s" (show sta) x y where
x = maybe ("" :: String) show msto
y = maybe ("" :: String) (printf " by %s" . show) mste
show (ListRange xs) = printf "[%s]" (List.intercalate ", " $ map show xs)
show TotalRange = "[...]"
instance Parsable x => Parsable (Range x) where
parser = rangeParser "[...]" parser
rangeParser :: String -> Parser x -> Parser (Range x)
rangeParser allname x = try rEnum <|> try rList <|> try rAll <?> "a range" where
rEnum = EnumRange <$>
(symbol "[" *> refParser x <* symbol "..") <*>
(optional (refParser x) <* symbol "]") <*>
optional (try $ keyword "by" *> parser)
rList = ListRange <$> listParser (refParser x)
rAll = keyword allname $> TotalRange
_testRangeParser :: IO ()
_testRangeParser = do
let succeeds = verifyParser (parser :: Parser (Range Int))
let fails = verifyParserFails (parser :: Parser (Range Int))
succeeds "[1..]" (EnumRange (Lit 1) Nothing Nothing)
succeeds "[ 1 ..]" (EnumRange (Lit 1) Nothing Nothing)
succeeds "[ 1 .. 2 ]" (EnumRange (Lit 1) (Just (Lit 2)) Nothing)
succeeds "[&n..]" (EnumRange (Ref "n") Nothing Nothing)
succeeds "[&n..3]" (EnumRange (Ref "n") (Just (Lit 3)) Nothing)
succeeds "[&n..3] by 2" (EnumRange (Ref "n") (Just (Lit 3)) (Just (Lit 2)))
fails "[1..2..3]"
succeeds "[1, 2, &three]" (ListRange [Lit 1, Lit 2, Ref "three"])
succeeds "[...]" TotalRange
succeeds "[ ]" (ListRange [])
fails "[ "
boundedRange :: Parsable x => Parser (Range x)
boundedRange = try rBoundedEnum <|> try rList <?> "a bounded range" where
rBoundedEnum = EnumRange <$>
(symbol "[" *> parser <* symbol "..") <*>
(Just <$> parser <* symbol "]") <*>
optional (try $ keyword "by" *> parser)
rList = ListRange <$> parser
_testBoundedRangeParser :: IO ()
_testBoundedRangeParser = do
let succeeds = verifyParser (boundedRange :: Parser (Range Int))
let fails = verifyParserFails (boundedRange :: Parser (Range Int))
fails "[1..]"
succeeds "[1 .. 2]" (EnumRange (Lit 1) (Just (Lit 2)) Nothing)
succeeds "[&n .. 2] by 10" (EnumRange (Ref "n") (Just (Lit 2)) (Just (Lit 10)))
succeeds "[1, 2, &three]" (ListRange [Lit 1, Lit 2, Ref "three"])
fails "[...]"
rangeLitValues :: Range x -> [x]
rangeLitValues (EnumRange sta sto _) =
maybeToList (lit sta) ++ maybe [] (maybeToList . lit) sto
rangeLitValues (ListRange refs) = mapMaybe lit refs
rangeLitValues _ = []
compileRange :: (Eq x, MonadCompile m) => m [x] -> (Ref x -> m x) -> Range x -> m [x]
compileRange getXs _ TotalRange = getXs
compileRange _ getX (ListRange xs) = mapM getX xs
compileRange getXs getX (EnumRange sta msto mste) = renum msto mste where
renum Nothing Nothing = dropWhile . (/=) <$> getX sta <*> getXs
renum (Just sto) Nothing = takeWhile . (/=) <$> getX sto <*> renum Nothing Nothing
renum _ (Just ste) = every <$> lookupN ste <*> renum msto Nothing
-------------------------------------------------------------------------------
data CodeFragment
= For ClaimType Name (Range Value) [CodeFragment]
| ForN Name (Range Int) [CodeFragment]
| LetN Name SimpleExpr
| If Statement [CodeFragment]
| IfElse Statement [CodeFragment] [CodeFragment]
| Return (Maybe (Ref Value))
| Pass
deriving Eq
instance Blockable CodeFragment where
blockLines (For t n r cs) =
[(0, Text.pack $ printf "for %s %s in %s" (show t) n (show r))] <>
increaseIndent (concatMap blockLines cs)
blockLines (ForN n r cs) =
[(0, Text.pack $ printf "for number %s in %s" n (show r))] <>
increaseIndent (concatMap blockLines cs)
blockLines (LetN n x) =
[(0, Text.pack $ printf "let %s = %s" n (show x))]
blockLines (If s xs) =
[(0, Text.pack $ printf "if %s" $ show s)] <>
increaseIndent (concatMap blockLines xs)
blockLines (IfElse s xs ys) =
[(0, Text.pack $ printf "if %s" $ show s)] <>
increaseIndent (concatMap blockLines xs) <>
[(0, "else")] <>
increaseIndent (concatMap blockLines ys)
blockLines (Return Nothing) = [(0, "return")]
blockLines (Return (Just x)) = [(0, Text.pack $ printf "return %s" (show x))]
blockLines (Pass) = [(0, "pass")]
instance Show CodeFragment where
show = Text.unpack . renderBlock
data CodeFragConfig = CodeFragConfig
{ indentLevel :: Int
, codeConfig :: CodeConfig
} deriving (Eq, Ord, Read, Show)
eatIndent :: CodeFragConfig -> Parser ()
eatIndent conf = void (count (indentLevel conf) (char '\t'))
<?> printf "%d tabs" (indentLevel conf)
codeFragmentParser :: CodeFragConfig -> Parser CodeFragment
codeFragmentParser conf = try indent *> pFrag where
indent = (many $ try ignoredLine) *> eatIndent conf
pFrag = try pForA
<|> try pForO
<|> try pForN
<|> try pLetN
<|> try pIfElse
<|> try pIf
<|> try pReturn
<|> try pPass
pForA = pFor ActionT action actions
pForO = pFor OutcomeT outcome outcomes
pFor t x xs = For t
<$> (keyword "for" *> keyword x *> varname)
<*> (keyword "in" *> rangeParser xs parser <* w <* endOfLine)
<*> pBlock
pForN = ForN
<$> (keyword "for" *> keyword "number" *> varname)
<*> (keyword "in" *> boundedRange <* w <* endOfLine)
<*> pBlock
pLetN = LetN
<$> (keyword "let" *> varname <* symbol "=")
<*> parser <* eols
pIf = If
<$> (keyword "if" *> parser <* w <* endOfLine)
<*> pBlock
pIfElse = IfElse
<$> (keyword "if" *> parser <* w <* endOfLine)
<*> pBlock
<*> (indent *> keyword "else" *> w *> endOfLine *> pBlock)
pBlock = many1 $ try $ codeFragmentParser conf{indentLevel=succ $ indentLevel conf}
pPass = symbol "pass" $> Pass <* w <* eol
pReturn = try returnThing <|> returnNothing <?> "a return statement"
returnNothing :: Parser CodeFragment
returnThing = symbol "return " *> (Return . Just <$> parser) <* w <* eol
returnNothing = symbol "return" $> Return Nothing <* w <* eol
action = actionKw $ codeConfig conf
outcome = outcomeKw $ codeConfig conf
actions = actionsKw $ codeConfig conf
outcomes = outcomesKw $ codeConfig conf
varname = char '&' *> name
compileCodeFragment :: MonadCompile m =>
CodeFragment -> m (PartialProgram Value CompiledClaim)
compileCodeFragment code = case code of
For ActionT n r x -> loop (withA n) x =<< compileRange (gets actionList) lookupA r
For OutcomeT n r x -> loop (withO n) x =<< compileRange (gets outcomeList) lookupO r
ForN n r x -> loop (withN n) x =<< compileRange (return [0..]) lookupN r
LetN n x -> compileExpr x >>= modify . withN n >> return id
If s block -> compileCodeFragment (IfElse s block [Pass])
IfElse s tblock eblock -> do
cond <- compileStatement compileClaim s
thens <- mapM compileCodeFragment tblock
elses <- mapM compileCodeFragment eblock
let yes = foldr1 (.) thens
let no = foldr1 (.) elses
return (\continue act ->
(cond %^ yes continue act) %| (F.Neg cond %^ no continue act))
Return (Just v) -> (\a -> const $ F.Val . (a ==)) <$> lookupA v
Return Nothing -> (\a -> const $ F.Val . (a ==)) <$> defaultAction
Pass -> return id
where loop update block xs
| null xs = return id
| otherwise = foldr1 (.) . concat <$> mapM doFragment xs
where doFragment x = modify (update x) >> mapM compileCodeFragment block
-------------------------------------------------------------------------------
data Code
= Code [CodeFragment]
| ActionMap (Map Value Statement)
deriving Eq
instance Blockable Code where
blockLines (Code frags) = concatMap blockLines frags
blockLines (ActionMap a2s) = [
(0, Text.pack $ printf "%s ↔ %s" (show a) (show s)) | (a, s) <- Map.toList a2s]
instance Show Code where
show = Text.unpack . renderBlock
codeParser :: CodeConfig -> Parser Code
codeParser conf = Code <$> many1 (codeFragmentParser $ CodeFragConfig 1 conf)
_testCodeParser :: IO ()
_testCodeParser = testAllSamples where
sample1 = Text.unlines
["\tlet &step = 0"
,"\tfor action &a in actions"
,"\t\t-- This is a comment about the inner loop."
,"\t\tfor outcome &u in utilities"
,"\t\t\tif [&step][A()=&a -> U()=&u]"
,"\t\t\t\treturn &a"
,"\t\t\tlet &step = &step + 1"
,"\treturn"]
sample2 = Text.unlines
["\tif {- IGNORE THIS COMMENT -} [][Them(Me)=C]"
,"\t\treturn C -- Ignore this one too."
,""
,"\telse"
,"\t\treturn D"]
sample3 = Text.unlines
[" -- Sample 3:"
,"\tfor number &n in [0, 1, 2, 3]"
,"\t\tif Possible(&n)[Them(Me)=C]"
,"\t\t\treturn C"
," \t "
,""
,"\treturn D"]
sample4 = Text.unlines
["\tfor number &n in [...]"
,"\t\treturn &n"]
sample5 = Text.unlines
["\tif ⊤"
,"\treturn 0"]
sample6 = Text.unlines
["\tif ⊤"
,"\t return 0"]
sample7 = Text.unlines
["\tif ⊤"
,"\t\t\treturn 0"]
conf = CodeConfig "action" "actions" "outcome" "utilities"
testAllSamples = do
verifyParser (codeParser conf) sample1 (Code
[ LetN "step" (Num (Lit 0))
, For ActionT "a" TotalRange
[ For OutcomeT "u" TotalRange
[ If (Provable (Ref "step")
(Imp
(Var $ ParsedClaim "A" Nothing (Equals (Ref "a")))
(Var $ ParsedClaim "U" Nothing (Equals (Ref "u")))))
[ Return (Just (Ref "a")) ]
, LetN "step" (Add (Num $ Ref "step") (Num $ Lit 1)) ] ]
, Return Nothing ])
verifyParser (codeParser conf) sample2 (Code
[ IfElse (Provable (Lit 0)
(Var $ ParsedClaim "Them"
(Just $ Call "Me" [] Map.empty [] [])
(Equals $ Lit "C")))
[ Return (Just (Lit "C")) ]
[ Return (Just (Lit "D")) ] ])
verifyParser (codeParser conf) sample3 (Code
[ ForN "n" (ListRange [Lit 0, Lit 1, Lit 2, Lit 3])
[ If (Possible (Ref "n")
(Var $ ParsedClaim "Them"
(Just $ Call "Me" [] Map.empty [] [])
(Equals $ Lit "C")))
[ Return (Just (Lit "C")) ] ]
, Return (Just (Lit "D")) ])
verifyParserFails (codeParser conf) sample4
verifyParserFails (codeParser conf) sample5
verifyParserFails (codeParser conf) sample6
verifyParserFails (codeParser conf) sample7
codeMapParser :: Parser Code
codeMapParser = ActionMap . Map.fromList <$> many1 assignment where
indent = (many (w *> endOfLine)) *> char '\t'
iffParsers = [symbol "↔", symbol "<->", keyword "iff"]
pIff = void $ choice $ map try iffParsers
assignment = (,) <$> (indent *> parser <* pIff) <*> (parser <* eols)
_testCodeMapParser :: IO ()
_testCodeMapParser = testAllSamples where
sample1 = Text.unlines
["\tC ↔ [][Them(Me)=C]"
,"\tD ↔ ~[][Them(Me)=C]"]
sample2 = Text.unlines
["\tCD iff A1()=C and A2()=D"
,"\tCC iff A1()=C and A2()=C"
,"\tDD iff A1()=D and A2()=D"
,"\tDC iff A1()=D and A2()=C"]
sample3 = Text.unlines
["\tC ↔ [][Them(Me)=C]"
,"\t\tD ↔ ~[][Them(Me)=C]"]
sample4 = Text.unlines
["\tC ↔ [][Them(Me)=C]"
," D ↔ ~[][Them(Me)=C]"]
testAllSamples = do
verifyParser codeMapParser sample1 (ActionMap $ Map.fromList
[ ("C", Provable (Lit 0) (Var $ ParsedClaim "Them"
(Just $ Call "Me" [] Map.empty [] [])
(Equals $ Lit "C")))
, ("D", Neg $ Provable (Lit 0) (Var $ ParsedClaim "Them"
(Just $ Call "Me" [] Map.empty [] [])
(Equals $ Lit "C"))) ])
verifyParser codeMapParser sample2 (ActionMap $ Map.fromList
[ ("CD", (And
(Var $ ParsedClaim "A1" Nothing (Equals $ Lit "C"))
(Var $ ParsedClaim "A2" Nothing (Equals $ Lit "D"))))
, ("CC", (And
(Var $ ParsedClaim "A1" Nothing (Equals $ Lit "C"))
(Var $ ParsedClaim "A2" Nothing (Equals $ Lit "C"))))
, ("DD", (And
(Var $ ParsedClaim "A1" Nothing (Equals $ Lit "D"))
(Var $ ParsedClaim "A2" Nothing (Equals $ Lit "D"))))
, ("DC", (And
(Var $ ParsedClaim "A1" Nothing (Equals $ Lit "D"))
(Var $ ParsedClaim "A2" Nothing (Equals $ Lit "C")))) ])
verifyParserFails codeMapParser sample3
verifyParserFails codeMapParser sample4
compileCode :: MonadCompile m => Code -> m (ModalProgram Value CompiledClaim)
compileCode (Code frags) = do
prog <- foldM (\f c -> (f .) <$> compileCodeFragment c) id frags
dflt <- defaultAction
return $ prog (F.Val . (dflt ==))
compileCode (ActionMap a2smap) = do
let a2slist = Map.toList a2smap
formulas <- mapM (compileStatement compileClaim . snd) a2slist
let a2flist = zip (map fst a2slist) formulas
return $ \a -> let Just f = List.lookup a a2flist in f
-- Note: Code not dead; just not yet used.
actionsMentioned :: Code -> [Value]
actionsMentioned (ActionMap m) = Map.keys m
actionsMentioned (Code frags) = concatMap fragRets frags where
fragRets (For ActionT _ range fs) = rangeLitValues range ++ concatMap fragRets fs
fragRets (For OutcomeT _ _ fs) = concatMap fragRets fs
fragRets (ForN _ _ fs) = concatMap fragRets fs
fragRets (If _ fs) = concatMap fragRets fs
fragRets (IfElse _ fs gs) = concatMap fragRets fs ++ concatMap fragRets gs
fragRets (Return (Just v)) = maybeToList $ lit v
fragRets (Return _) = []
fragRets (LetN _ _) = []
fragRets Pass = []
-- Note: Code not dead; just not yet used.
outcomesMentioned :: Code -> [Value]
outcomesMentioned (ActionMap _) = []
outcomesMentioned (Code frags) = concatMap fragRets frags where
fragRets (For ActionT _ _ fs) = concatMap fragRets fs
fragRets (For OutcomeT _ range fs) = rangeLitValues range ++ concatMap fragRets fs
fragRets (ForN _ _ fs) = concatMap fragRets fs
fragRets (If _ fs) = concatMap fragRets fs
fragRets (IfElse _ fs gs) = concatMap fragRets fs ++ concatMap fragRets gs
fragRets (Return _) = []
fragRets (LetN _ _) = []
fragRets Pass = []
-- Note: Code not dead; just not yet used.
claimsMade :: Code -> [ParsedClaim]
claimsMade (ActionMap m) = concatMap claimsParsed $ Map.elems m
claimsMade (Code frags) = concatMap fragClaims frags where
fragClaims (For _ _ _ fs) = concatMap fragClaims fs
fragClaims (ForN _ _ fs) = concatMap fragClaims fs
fragClaims (If s fs) = claimsParsed s ++ concatMap fragClaims fs
fragClaims (IfElse s fs gs) =
claimsParsed s ++ concatMap fragClaims fs ++ concatMap fragClaims gs
fragClaims (LetN _ _) = []
fragClaims (Return _) = []
fragClaims Pass = []
-------------------------------------------------------------------------------
type CompiledAgent = Map Value (ModalFormula CompiledClaim)
codeToProgram :: MonadError CompileError m =>
CompileContext -> Code -> m CompiledAgent
codeToProgram context code = do
(prog, state) <- runStateT (compileCode code) context
return $ Map.fromList [(a, prog a) | a <- actionList state]
-------------------------------------------------------------------------------
-- Testing
main :: IO ()
main = do
_testRangeParser
_testBoundedRangeParser
_testCodeParser
_testCodeMapParser
putStrLn ""
| daniel-ziegler/provability | src/Modal/Code.hs | bsd-3-clause | 17,712 | 0 | 28 | 3,934 | 6,598 | 3,337 | 3,261 | 414 | 9 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE CPP #-}
module Network.Wai.Handler.Warp.Response (
sendResponse
, sanitizeHeaderValue -- for testing
, warpVersion
, hasBody
, replaceHeader
) where
#ifndef MIN_VERSION_base
#define MIN_VERSION_base(x,y,z) 1
#endif
#ifndef MIN_VERSION_http_types
#define MIN_VERSION_http_types(x,y,z) 1
#endif
import Blaze.ByteString.Builder.HTTP (chunkedTransferEncoding, chunkedTransferTerminator)
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative
#endif
import qualified Control.Exception as E
import Control.Monad (unless, when)
import Data.Array ((!))
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import Data.ByteString.Builder (byteString, Builder)
import Data.ByteString.Builder.Extra (flush)
import qualified Data.CaseInsensitive as CI
import Data.Function (on)
import Data.List (deleteBy)
import Data.Maybe
#if MIN_VERSION_base(4,5,0)
# if __GLASGOW_HASKELL__ < 709
import Data.Monoid (mempty)
# endif
import Data.Monoid ((<>))
#else
import Data.Monoid (mappend, mempty)
#endif
import Data.Streaming.Blaze (newBlazeRecv, reuseBufferStrategy)
import Data.Version (showVersion)
import Data.Word8 (_cr, _lf)
import qualified Network.HTTP.Types as H
#if MIN_VERSION_http_types(0,9,0)
import qualified Network.HTTP.Types.Header as H
#endif
import Network.Wai
import Network.Wai.Handler.Warp.Buffer (toBuilderBuffer)
import qualified Network.Wai.Handler.Warp.Date as D
import Network.Wai.Handler.Warp.File
import Network.Wai.Handler.Warp.Header
import Network.Wai.Handler.Warp.IO (toBufIOWith)
import Network.Wai.Handler.Warp.ResponseHeader
import Network.Wai.Handler.Warp.Settings
import qualified Network.Wai.Handler.Warp.Timeout as T
import Network.Wai.Handler.Warp.Types
import Network.Wai.Internal
import qualified Paths_warp
#if !MIN_VERSION_base(4,5,0)
(<>) :: Monoid m => m -> m -> m
(<>) = mappend
#endif
-- $setup
-- >>> :set -XOverloadedStrings
----------------------------------------------------------------
-- | Sending a HTTP response to 'Connection' according to 'Response'.
--
-- Applications/middlewares MUST provide a proper 'H.ResponseHeaders'.
-- so that inconsistency does not happen.
-- No header is deleted by this function.
--
-- Especially, Applications/middlewares MUST provide a proper
-- Content-Type. They MUST NOT provide
-- Content-Length, Content-Range, and Transfer-Encoding
-- because they are inserted, when necessary,
-- regardless they already exist.
-- This function does not insert Content-Encoding. It's middleware's
-- responsibility.
--
-- The Date and Server header is added if not exist
-- in HTTP response header.
--
-- There are three basic APIs to create 'Response':
--
-- ['responseBuilder' :: 'H.Status' -> 'H.ResponseHeaders' -> 'Builder' -> 'Response']
-- HTTP response body is created from 'Builder'.
-- Transfer-Encoding: chunked is used in HTTP/1.1.
--
-- ['responseStream' :: 'H.Status' -> 'H.ResponseHeaders' -> 'StreamingBody' -> 'Response']
-- HTTP response body is created from 'Builder'.
-- Transfer-Encoding: chunked is used in HTTP/1.1.
--
-- ['responseRaw' :: ('IO' 'ByteString' -> ('ByteString' -> 'IO' ()) -> 'IO' ()) -> 'Response' -> 'Response']
-- No header is added and no Transfer-Encoding: is applied.
--
-- ['responseFile' :: 'H.Status' -> 'H.ResponseHeaders' -> 'FilePath' -> 'Maybe' 'FilePart' -> 'Response']
-- HTTP response body is sent (by sendfile(), if possible) for GET method.
-- HTTP response body is not sent by HEAD method.
-- Content-Length and Content-Range are automatically
-- added into the HTTP response header if necessary.
-- If Content-Length and Content-Range exist in the HTTP response header,
-- they would cause inconsistency.
-- \"Accept-Ranges: bytes\" is also inserted.
--
-- Applications are categorized into simple and sophisticated.
-- Sophisticated applications should specify 'Just' to
-- 'Maybe' 'FilePart'. They should treat the conditional request
-- by themselves. A proper 'Status' (200 or 206) must be provided.
--
-- Simple applications should specify 'Nothing' to
-- 'Maybe' 'FilePart'. The size of the specified file is obtained
-- by disk access or from the file infor cache.
-- If-Modified-Since, If-Unmodified-Since, If-Range and Range
-- are processed. Since a proper status is chosen, 'Status' is
-- ignored. Last-Modified is inserted.
sendResponse :: Settings
-> Connection
-> InternalInfo
-> Request -- ^ HTTP request.
-> IndexedHeader -- ^ Indexed header of HTTP request.
-> IO ByteString -- ^ source from client, for raw response
-> Response -- ^ HTTP response including status code and response header.
-> IO Bool -- ^ Returing True if the connection is persistent.
sendResponse settings conn ii req reqidxhdr src response = do
hs <- addServerAndDate hs0
if hasBody s then do
-- The response to HEAD does not have body.
-- But to handle the conditional requests defined RFC 7232 and
-- to generate appropriate content-length, content-range,
-- and status, the response to HEAD is processed here.
--
-- See definition of rsp below for proper body stripping.
(ms, mlen) <- sendRsp conn ii ver s hs rsp
case ms of
Nothing -> return ()
Just realStatus -> logger req realStatus mlen
T.tickle th
return ret
else do
_ <- sendRsp conn ii ver s hs RspNoBody
logger req s Nothing
T.tickle th
return isPersist
where
defServer = settingsServerName settings
logger = settingsLogger settings
ver = httpVersion req
s = responseStatus response
hs0 = sanitizeHeaders $ responseHeaders response
rspidxhdr = indexResponseHeader hs0
th = threadHandle ii
getdate = getDate ii
addServerAndDate = addDate getdate rspidxhdr . addServer defServer rspidxhdr
(isPersist,isChunked0) = infoFromRequest req reqidxhdr
isChunked = not isHead && isChunked0
(isKeepAlive, needsChunked) = infoFromResponse rspidxhdr (isPersist,isChunked)
isHead = requestMethod req == H.methodHead
rsp = case response of
ResponseFile _ _ path mPart -> RspFile path mPart reqidxhdr isHead (T.tickle th)
ResponseBuilder _ _ b
| isHead -> RspNoBody
| otherwise -> RspBuilder b needsChunked
ResponseStream _ _ fb
| isHead -> RspNoBody
| otherwise -> RspStream fb needsChunked th
ResponseRaw raw _ -> RspRaw raw src (T.tickle th)
ret = case response of
ResponseFile {} -> isPersist
ResponseBuilder {} -> isKeepAlive
ResponseStream {} -> isKeepAlive
ResponseRaw {} -> False
----------------------------------------------------------------
sanitizeHeaders :: H.ResponseHeaders -> H.ResponseHeaders
sanitizeHeaders = map (sanitize <$>)
where
sanitize v
| containsNewlines v = sanitizeHeaderValue v -- slow path
| otherwise = v -- fast path
{-# INLINE containsNewlines #-}
containsNewlines :: ByteString -> Bool
containsNewlines = S.any (\w -> w == _cr || w == _lf)
{-# INLINE sanitizeHeaderValue #-}
sanitizeHeaderValue :: ByteString -> ByteString
sanitizeHeaderValue v = case S8.lines $ S.filter (/= _cr) v of
[] -> ""
x : xs -> S8.intercalate "\r\n" (x : mapMaybe addSpaceIfMissing xs)
where
addSpaceIfMissing line = case S8.uncons line of
Nothing -> Nothing
Just (first, _)
| first == ' ' || first == '\t' -> Just line
| otherwise -> Just $ " " <> line
----------------------------------------------------------------
data Rsp = RspNoBody
| RspFile FilePath (Maybe FilePart) IndexedHeader Bool (IO ())
| RspBuilder Builder Bool
| RspStream StreamingBody Bool T.Handle
| RspRaw (IO ByteString -> (ByteString -> IO ()) -> IO ()) (IO ByteString) (IO ())
----------------------------------------------------------------
sendRsp :: Connection
-> InternalInfo
-> H.HttpVersion
-> H.Status
-> H.ResponseHeaders
-> Rsp
-> IO (Maybe H.Status, Maybe Integer)
----------------------------------------------------------------
sendRsp conn _ ver s hs RspNoBody = do
-- Not adding Content-Length.
-- User agents treats it as Content-Length: 0.
composeHeader ver s hs >>= connSendAll conn
return (Just s, Nothing)
----------------------------------------------------------------
sendRsp conn _ ver s hs (RspBuilder body needsChunked) = do
header <- composeHeaderBuilder ver s hs needsChunked
let hdrBdy
| needsChunked = header <> chunkedTransferEncoding body
<> chunkedTransferTerminator
| otherwise = header <> body
buffer = connWriteBuffer conn
size = connBufferSize conn
toBufIOWith buffer size (connSendAll conn) hdrBdy
return (Just s, Nothing) -- fixme: can we tell the actual sent bytes?
----------------------------------------------------------------
sendRsp conn _ ver s hs (RspStream streamingBody needsChunked th) = do
header <- composeHeaderBuilder ver s hs needsChunked
(recv, finish) <- newBlazeRecv $ reuseBufferStrategy
$ toBuilderBuffer (connWriteBuffer conn) (connBufferSize conn)
let send builder = do
popper <- recv builder
let loop = do
bs <- popper
unless (S.null bs) $ do
sendFragment conn th bs
loop
loop
sendChunk
| needsChunked = send . chunkedTransferEncoding
| otherwise = send
send header
streamingBody sendChunk (sendChunk flush)
when needsChunked $ send chunkedTransferTerminator
mbs <- finish
maybe (return ()) (sendFragment conn th) mbs
return (Just s, Nothing) -- fixme: can we tell the actual sent bytes?
----------------------------------------------------------------
sendRsp conn _ _ _ _ (RspRaw withApp src tickle) = do
withApp recv send
return (Nothing, Nothing)
where
recv = do
bs <- src
unless (S.null bs) tickle
return bs
send bs = connSendAll conn bs >> tickle
----------------------------------------------------------------
-- Sophisticated WAI applications.
-- We respect s0. s0 MUST be a proper value.
sendRsp conn ii ver s0 hs0 (RspFile path (Just part) _ isHead hook) =
sendRspFile2XX conn ii ver s0 hs path beg len isHead hook
where
beg = filePartOffset part
len = filePartByteCount part
hs = addContentHeadersForFilePart hs0 part
----------------------------------------------------------------
-- Simple WAI applications.
-- Status is ignored
sendRsp conn ii ver _ hs0 (RspFile path Nothing idxhdr isHead hook) = do
efinfo <- E.try $ getFileInfo ii path
case efinfo of
Left (_ex :: E.IOException) ->
#ifdef WARP_DEBUG
print _ex >>
#endif
sendRspFile404 conn ii ver hs0
Right finfo -> case conditionalRequest finfo hs0 idxhdr of
WithoutBody s -> sendRsp conn ii ver s hs0 RspNoBody
WithBody s hs beg len -> sendRspFile2XX conn ii ver s hs path beg len isHead hook
----------------------------------------------------------------
sendRspFile2XX :: Connection
-> InternalInfo
-> H.HttpVersion
-> H.Status
-> H.ResponseHeaders
-> FilePath
-> Integer
-> Integer
-> Bool
-> IO ()
-> IO (Maybe H.Status, Maybe Integer)
sendRspFile2XX conn ii ver s hs path beg len isHead hook
| isHead = sendRsp conn ii ver s hs RspNoBody
| otherwise = do
lheader <- composeHeader ver s hs
(mfd, fresher) <- getFd ii path
let fid = FileId path mfd
hook' = hook >> fresher
connSendFile conn fid beg len hook' [lheader]
return (Just s, Just len)
sendRspFile404 :: Connection
-> InternalInfo
-> H.HttpVersion
-> H.ResponseHeaders
-> IO (Maybe H.Status, Maybe Integer)
sendRspFile404 conn ii ver hs0 = sendRsp conn ii ver s hs (RspBuilder body True)
where
s = H.notFound404
hs = replaceHeader H.hContentType "text/plain; charset=utf-8" hs0
body = byteString "File not found"
----------------------------------------------------------------
----------------------------------------------------------------
-- | Use 'connSendAll' to send this data while respecting timeout rules.
sendFragment :: Connection -> T.Handle -> ByteString -> IO ()
sendFragment Connection { connSendAll = send } th bs = do
T.resume th
send bs
T.pause th
-- We pause timeouts before passing control back to user code. This ensures
-- that a timeout will only ever be executed when Warp is in control. We
-- also make sure to resume the timeout after the completion of user code
-- so that we can kill idle connections.
----------------------------------------------------------------
infoFromRequest :: Request -> IndexedHeader -> (Bool -- isPersist
,Bool) -- isChunked
infoFromRequest req reqidxhdr = (checkPersist req reqidxhdr, checkChunk req)
checkPersist :: Request -> IndexedHeader -> Bool
checkPersist req reqidxhdr
| ver == H.http11 = checkPersist11 conn
| otherwise = checkPersist10 conn
where
ver = httpVersion req
conn = reqidxhdr ! fromEnum ReqConnection
checkPersist11 (Just x)
| CI.foldCase x == "close" = False
checkPersist11 _ = True
checkPersist10 (Just x)
| CI.foldCase x == "keep-alive" = True
checkPersist10 _ = False
checkChunk :: Request -> Bool
checkChunk req = httpVersion req == H.http11
----------------------------------------------------------------
-- Used for ResponseBuilder and ResponseSource.
-- Don't use this for ResponseFile since this logic does not fit
-- for ResponseFile. For instance, isKeepAlive should be True in some cases
-- even if the response header does not have Content-Length.
--
-- Content-Length is specified by a reverse proxy.
-- Note that CGI does not specify Content-Length.
infoFromResponse :: IndexedHeader -> (Bool,Bool) -> (Bool,Bool)
infoFromResponse rspidxhdr (isPersist,isChunked) = (isKeepAlive, needsChunked)
where
needsChunked = isChunked && not hasLength
isKeepAlive = isPersist && (isChunked || hasLength)
hasLength = isJust $ rspidxhdr ! fromEnum ResContentLength
----------------------------------------------------------------
hasBody :: H.Status -> Bool
hasBody s = sc /= 204
&& sc /= 304
&& sc >= 200
where
sc = H.statusCode s
----------------------------------------------------------------
addTransferEncoding :: H.ResponseHeaders -> H.ResponseHeaders
#if MIN_VERSION_http_types(0,9,0)
addTransferEncoding hdrs = (H.hTransferEncoding, "chunked") : hdrs
#else
addTransferEncoding hdrs = ("transfer-encoding", "chunked") : hdrs
#endif
addDate :: IO D.GMTDate -> IndexedHeader -> H.ResponseHeaders -> IO H.ResponseHeaders
addDate getdate rspidxhdr hdrs = case rspidxhdr ! fromEnum ResDate of
Nothing -> do
gmtdate <- getdate
return $ (H.hDate, gmtdate) : hdrs
Just _ -> return hdrs
----------------------------------------------------------------
-- | The version of Warp.
warpVersion :: String
warpVersion = showVersion Paths_warp.version
addServer :: HeaderValue -> IndexedHeader -> H.ResponseHeaders -> H.ResponseHeaders
addServer serverName rspidxhdr hdrs = case rspidxhdr ! fromEnum ResServer of
Nothing -> (H.hServer, serverName) : hdrs
_ -> hdrs
----------------------------------------------------------------
-- |
--
-- >>> replaceHeader "Content-Type" "new" [("content-type","old")]
-- [("Content-Type","new")]
replaceHeader :: H.HeaderName -> HeaderValue -> H.ResponseHeaders -> H.ResponseHeaders
replaceHeader k v hdrs = (k,v) : deleteBy ((==) `on` fst) (k,v) hdrs
----------------------------------------------------------------
composeHeaderBuilder :: H.HttpVersion -> H.Status -> H.ResponseHeaders -> Bool -> IO Builder
composeHeaderBuilder ver s hs True =
byteString <$> composeHeader ver s (addTransferEncoding hs)
composeHeaderBuilder ver s hs False =
byteString <$> composeHeader ver s hs
| utdemir/wai | warp/Network/Wai/Handler/Warp/Response.hs | mit | 16,931 | 0 | 21 | 3,929 | 3,381 | 1,785 | 1,596 | 260 | 9 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="zh-CN">
<title>DOM XSS Active Scan Rule | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/domxss/src/main/javahelp/org/zaproxy/zap/extension/domxss/resources/help_zh_CN/helpset_zh_CN.hs | apache-2.0 | 985 | 78 | 66 | 162 | 419 | 212 | 207 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hi-IN">
<title>Technology detection | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/wappalyzer/src/main/javahelp/org/zaproxy/zap/extension/wappalyzer/resources/help_hi_IN/helpset_hi_IN.hs | apache-2.0 | 981 | 78 | 66 | 159 | 413 | 209 | 204 | -1 | -1 |
module Qualified2 where
-- import qualified Control.Parallel.Strategies as T
import qualified Control.Parallel.Strategies as S
-- should fail, as there are two possible qualifiers...
fib n
| n <= 1 = 1
| otherwise = n1_2 + n2_2 + 1
where
n1 = fib (n-1)
n2 = fib (n-2)
(n1_2, n2_2)
= S.runEval
(do n1_2 <- S.rpar n1
n2_2 <- S.rpar n2
return (n1_2, n2_2))
n1_2 = "bob"
| RefactoringTools/HaRe | old/testing/evalAddEvalMon/Qualified2_TokOut.hs | bsd-3-clause | 487 | 0 | 13 | 187 | 143 | 75 | 68 | 13 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ur-PK">
<title>AJAX Spider | ZAP Extensions</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | ccgreen13/zap-extensions | src/org/zaproxy/zap/extension/spiderAjax/resources/help_ur_PK/helpset_ur_PK.hs | apache-2.0 | 974 | 80 | 66 | 160 | 415 | 210 | 205 | -1 | -1 |
-- This one elicited a bug in the simplifier
-- that produces a Lint out-of-scope error
module T4345 where
isNull :: IO Bool
isNull = error "urk"
wrapMatchAll :: IO (Maybe ())
wrapMatchAll = do
nsub <- undefined
let loop True = do atEnd <- isNull
return Nothing
loop False = loop False
result <- undefined
loop undefined
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/simplCore/should_compile/T4345.hs | bsd-3-clause | 365 | 0 | 12 | 102 | 99 | 47 | 52 | 11 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Cric.PackagesSpec (
test
) where
import Test.Hspec
import SpecHelpers
import Cric
import Cric.Packages
test :: Spec
test = do
describe "getPackageManager" $ do
it "detects RPM" $ do
let sshMock = mockCommand "which rpm" (0, "/bin/rpm") defaultSshMock
result <- testCricWith sshMock getPackageManager
result `shouldBe` RPM
it "detects Yum" $ do
let sshMock = mockCommand "which yum" (0, "/bin/yum") defaultSshMock
result <- testCricWith sshMock getPackageManager
result `shouldBe` Yum
it "detects APT" $ do
let sshMock = mockCommand "which apt-get" (0, "/bin/apt-get") defaultSshMock
result <- testCricWith sshMock getPackageManager
result `shouldBe` APT
it "returns UnknownPackageManager if it can't find anything" $ do
result <- testCric getPackageManager
result `shouldBe` UnknownPackageManager
describe "installPackage" $ do
it "uses the package manager found" $ do
let mock = mockCommand "which apt-get" (0, "/bin/apt-get")
. mockCommand "apt-get install" (0, "apt-get called")
$ defaultSshMock
result <- testCricWith mock $ installPackage ("haskell-platform" :: String)
result `shouldBe` Right "apt-get called"
context "when it can't find a package manager" $ do
it "returns a NoPackageManagerFound error" $ do
result <- testCric $ installPackage ("haskell-platform" :: String)
result `shouldBe` Left NoPackageManagerFound
context "when the installation fails" $ do
it "returns an error" $ do
let mock = mockCommand "which rpm" (0, "/bin/rpm")
. mockCommand "rpm -i" (1, "installation failed")
$ defaultSshMock
result <- testCricWith mock $ installPackage ("haskell-platform" :: String)
result `shouldBe`
Left (UnknownPkgManagerError $ Failure 1 "installation failed" "")
describe "removePackage" $ do
it "uses the package manager found" $ do
let mock = mockCommand "which apt-get" (0, "/bin/apt-get")
. mockCommand "apt-get remove" (0, "apt-get called")
$ defaultSshMock
result <- testCricWith mock $ removePackage ("haskell-platform" :: String)
result `shouldBe` Right "apt-get called"
context "when it can't find a package manager" $ do
it "returns a NoPackageManagerFound error" $ do
result <- testCric $ removePackage ("haskell-platform" :: String)
result `shouldBe` Left NoPackageManagerFound
context "when the removal fails" $ do
it "returns an error" $ do
let mock = mockCommand "which rpm" (0, "/bin/rpm")
. mockCommand "rpm -e" (1, "removal failed")
$ defaultSshMock
result <- testCricWith mock $ removePackage ("haskell-platform" :: String)
result `shouldBe`
Left (UnknownPkgManagerError $ Failure 1 "removal failed" "")
| thoferon/cric | tests/Cric/PackagesSpec.hs | mit | 3,007 | 0 | 22 | 781 | 744 | 358 | 386 | 63 | 1 |
module Main where
import Control.Monad
import Control.AutoUpdate
main = do
update <- mkAutoUpdate $ do
putStrLn "hello, world"
forever update
| arianvp/ghcjs-auto-update | example/Main.hs | mit | 155 | 0 | 11 | 33 | 42 | 21 | 21 | 7 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SVGPathSegCurvetoQuadraticAbs
(setX, getX, setY, getY, setX1, getX1, setY1, getY1,
SVGPathSegCurvetoQuadraticAbs(..),
gTypeSVGPathSegCurvetoQuadraticAbs)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoQuadraticAbs.x Mozilla SVGPathSegCurvetoQuadraticAbs.x documentation>
setX ::
(MonadDOM m) => SVGPathSegCurvetoQuadraticAbs -> Float -> m ()
setX self val = liftDOM (self ^. jss "x" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoQuadraticAbs.x Mozilla SVGPathSegCurvetoQuadraticAbs.x documentation>
getX :: (MonadDOM m) => SVGPathSegCurvetoQuadraticAbs -> m Float
getX self
= liftDOM (realToFrac <$> ((self ^. js "x") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoQuadraticAbs.y Mozilla SVGPathSegCurvetoQuadraticAbs.y documentation>
setY ::
(MonadDOM m) => SVGPathSegCurvetoQuadraticAbs -> Float -> m ()
setY self val = liftDOM (self ^. jss "y" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoQuadraticAbs.y Mozilla SVGPathSegCurvetoQuadraticAbs.y documentation>
getY :: (MonadDOM m) => SVGPathSegCurvetoQuadraticAbs -> m Float
getY self
= liftDOM (realToFrac <$> ((self ^. js "y") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoQuadraticAbs.x1 Mozilla SVGPathSegCurvetoQuadraticAbs.x1 documentation>
setX1 ::
(MonadDOM m) => SVGPathSegCurvetoQuadraticAbs -> Float -> m ()
setX1 self val = liftDOM (self ^. jss "x1" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoQuadraticAbs.x1 Mozilla SVGPathSegCurvetoQuadraticAbs.x1 documentation>
getX1 :: (MonadDOM m) => SVGPathSegCurvetoQuadraticAbs -> m Float
getX1 self
= liftDOM (realToFrac <$> ((self ^. js "x1") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoQuadraticAbs.y1 Mozilla SVGPathSegCurvetoQuadraticAbs.y1 documentation>
setY1 ::
(MonadDOM m) => SVGPathSegCurvetoQuadraticAbs -> Float -> m ()
setY1 self val = liftDOM (self ^. jss "y1" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegCurvetoQuadraticAbs.y1 Mozilla SVGPathSegCurvetoQuadraticAbs.y1 documentation>
getY1 :: (MonadDOM m) => SVGPathSegCurvetoQuadraticAbs -> m Float
getY1 self
= liftDOM (realToFrac <$> ((self ^. js "y1") >>= valToNumber))
| ghcjs/jsaddle-dom | src/JSDOM/Generated/SVGPathSegCurvetoQuadraticAbs.hs | mit | 3,385 | 0 | 12 | 405 | 780 | 452 | 328 | 44 | 1 |
module ZoomHub.Web.Types.EmbedConstraint
( EmbedConstraint (..),
)
where
import Data.Bifunctor (first)
import qualified Data.Text as T
import Servant (FromHttpApiData, parseUrlPiece)
import ZoomHub.Web.Types.OpenSeadragonViewerConfig (Constraint)
import qualified ZoomHub.Web.Types.OpenSeadragonViewerConfig as Constraint
-- Type
newtype EmbedConstraint = EmbedConstraint {unEmbedConstraint :: Constraint}
deriving (Eq, Show)
parse :: String -> Either String EmbedConstraint
parse value =
case value of
"zoom" -> Right $ EmbedConstraint Constraint.Zoom
"full" -> Right $ EmbedConstraint Constraint.Full
s -> Left $ "Invalid value: " <> s
-- Text
instance FromHttpApiData EmbedConstraint where
parseUrlPiece p = first T.pack $ parse . T.unpack $ p
| zoomhub/zoomhub | src/ZoomHub/Web/Types/EmbedConstraint.hs | mit | 773 | 0 | 10 | 116 | 200 | 116 | 84 | 17 | 3 |
{- provides 'setupGUI' the main gui initialization
function. (using the module Language.Astview.Menu to build the menu bar)
-
-}
module Language.Astview.Gui.Init(setupGUI) where
import Language.Astview.Gui.Types
import Language.Astview.Gui.Actions
import Language.Astview.Gui.Menu
import Language.Astview.Languages(languages)
import Control.Monad.Trans (liftIO)
import Data.IORef
import System.FilePath ((</>))
import Graphics.UI.Gtk hiding (Language)
import Graphics.UI.Gtk.SourceView
import Paths_astview (getDataFileName)
-- |builds initial gui state from builder file
builderToGui :: Builder -> IO GUI
builderToGui builder = do
win <- builderGetObjectStr builder castToWindow "mainWindow"
treeview <- builderGetObjectStr builder castToTreeView "treeview"
tb <- buildSourceView =<< builderGetObjectStr builder castToScrolledWindow "swSource"
return $ GUI win treeview tb
-- |creates initial program state and provides an IORef to that
buildState :: Builder -> IO (IORef AstState)
buildState builder = do
g <- builderToGui builder
let astSt = AstState st g defaultValue
st = defaultValue { knownLanguages = languages}
newIORef astSt
-- | initiates gui and returns initial program state
setupGUI :: IO (IORef AstState)
setupGUI = do
initGUI
builder <- builderNew
builderAddFromFile builder =<< getDataFileName ("data" </> "astview.xml")
r <- buildState builder
initMenu builder r
hooks r
return r
-- | setup the GtkSourceView and add it to the ScrollPane. return the
-- underlying textbuffer
buildSourceView :: ScrolledWindow -> IO SourceBuffer
buildSourceView sw = do
sourceBuffer <- sourceBufferNew Nothing
sourceBufferSetHighlightSyntax sourceBuffer True
sourceView <- sourceViewNewWithBuffer sourceBuffer
sourceViewSetShowLineNumbers sourceView True
sourceViewSetHighlightCurrentLine sourceView True
srcfont <- fontDescriptionFromString $ font defaultValue ++" "++show (fsize defaultValue)
widgetModifyFont sourceView (Just srcfont)
containerAdd sw sourceView
return sourceBuffer
-- ** hooks
-- | adds actions to widgets defined in type 'Gui'. (see 'hookNonGuiStateWidgets')
hooks :: AstAction (ConnectId Window)
hooks ref = do
textbuffer <- getSourceBuffer ref
storeLastActiveTextPosition textbuffer ref
tree <- getTreeView ref
storeLastActiveTreePosition tree ref
win <- getWindow ref
closeAstviewOnWindowClosed win ref
close win ref
type Hook a = a -> AstAction (ConnectId a)
-- |stores the last active cursor position in text to the program state
storeLastActiveTextPosition :: Hook SourceBuffer
storeLastActiveTextPosition buffer ref = buffer `on` bufferChanged $ do
actionBufferChanged ref
cp <- getCursorPosition ref
setCursor cp ref
-- |stores the path to the last selected tree cell to the program state
storeLastActiveTreePosition :: Hook TreeView
storeLastActiveTreePosition tree ref =
tree `on` cursorChanged $ do
(p,_) <- treeViewGetCursor tree
setTreePath p ref
-- |softly terminate application on main window closed
closeAstviewOnWindowClosed :: Hook Window
closeAstviewOnWindowClosed w ref =
w `on` deleteEvent $ tryEvent $ liftIO $ actionQuit ref
-- |terminate application on main window closed
close :: Hook Window
close w _ = w `on` objectDestroy $ mainQuit
| jokusi/Astview | src/gui/Language/Astview/Gui/Init.hs | mit | 3,295 | 0 | 11 | 519 | 740 | 366 | 374 | 68 | 1 |
{-# LANGUAGE
StandaloneDeriving,
DeriveFunctor,
FlexibleInstances,
KindSignatures,
ConstraintKinds,
MultiParamTypeClasses,
NoImplicitPrelude
#-}
module OctoTactics.Util.ImprovedPrelude (
module Prelude,
module OctoTactics.Util.ImprovedPrelude,
module OctoTactics.Util.Combinators
) where
import Prelude hiding ((<$>))
import Data.Set (Set)
import qualified Data.Set as Set
import OctoTactics.Util.Combinators
import OctoTactics.Util.Class
instance {-# OVERLAPPING #-} Ord b => Functor' Set a b where
fmap' = Set.map
| Solonarv/OctoTactics | OctoTactics/Util/ImprovedPrelude.hs | mit | 573 | 0 | 6 | 109 | 96 | 62 | 34 | 19 | 0 |
module Main where
import Control.Monad.Reader (runReaderT)
import qualified Data.Set as Set
import Web.Twitter.Conduit (newManager, tlsManagerSettings)
import Control.Monad.Base (MonadBase, liftBase)
import Web.Twitter.PleaseCaption.Config (getTWInfo)
import qualified Web.Twitter.PleaseCaption.Client as Client
main :: IO ()
main = do
putStrLn "pleasecaption is pruning follow list"
mgr <- newManager tlsManagerSettings
twinfo <- getTWInfo
let client = Client.Client { Client.twInfo = twinfo, Client.manager = mgr }
flip runReaderT client $ do
uid <- Client.getUserId
following <- Set.fromList <$> Client.getFollowees uid
followers <- Set.fromList <$> Client.getFollowers uid
let toUnfollow = Set.toList $ following `Set.difference` followers
liftBase $ putStrLn $ "unfollowing " ++ show (length toUnfollow) ++ " users"
mapM_ Client.unfollowUser toUnfollow
putStrLn "all done"
| stillinbeta/pleasecaption | exe/Main-unfollow.hs | mit | 917 | 0 | 15 | 145 | 264 | 139 | 125 | 21 | 1 |
module Options ( ColourSpace(..),
Settings(..),
getSettings ) where
-- This has been cribbed from:
-- http://www.haskell.org/haskellwiki/High-level_option_handling_with_GetOpt
-- At the current stage of my Haskell development, this is basically
-- black magic to me :P I'll figure it out eventually!
import System.Environment (getArgs)
import System.Exit
import System.Console.GetOpt
import System.FilePath (replaceExtension)
import Data.Ratio
data ColourSpace = Adaptive | Greyscale | Colour
data Settings = Settings { inputFile :: FilePath,
paletteFile :: Maybe FilePath,
dpiScale :: Rational,
outputFile :: FilePath,
colourSpace :: ColourSpace }
settingsHelp :: ExitCode -> IO a
settingsHelp status = do
putStrLn $ usageInfo "Usage: tapestry [OPTIONS] FILENAME\n" options
exitWith status
options :: [OptDescr (Settings -> IO Settings)]
options =
[ Option "p" ["palette"]
(ReqArg (\x i -> return i { paletteFile = Just x }) "FILENAME")
"Palette description file",
Option "a" ["dpi-in"]
(ReqArg (\x i -> let scale = dpiScale i in
return i { dpiScale = scale * (1 % (read x :: Integer)) }) "DPI")
"Input resolution",
Option "b" ["dpi-out"]
(ReqArg (\x i -> let scale = dpiScale i in
return i { dpiScale = scale * ((read x :: Integer) % 1) }) "DPI")
"Output resolution",
Option "o" ["output"]
(ReqArg (\x i -> return i { outputFile = x }) "FILENAME")
"Output file",
Option [] ["grey"]
(NoArg (\i -> return i { colourSpace = Greyscale }))
"Force greyscale",
Option [] ["colour"]
(NoArg (\i -> return i { colourSpace = Colour }))
"Force colour",
Option "h" ["help"]
(NoArg (\_ -> settingsHelp ExitSuccess))
"Show this help...useful, huh?" ]
getSettings :: IO Settings
getSettings = do
args <- getArgs
let (actions, inputFiles, _) = getOpt Permute options args
if null inputFiles then
settingsHelp $ ExitFailure 1
else do
let defaults = Settings { inputFile = filename,
paletteFile = Nothing,
dpiScale = 1,
outputFile = replaceExtension filename "html",
colourSpace = Adaptive }
where filename = head inputFiles
foldl (>>=) (return defaults) actions
| Xophmeister/tapestry | Options.hs | mit | 2,544 | 0 | 20 | 810 | 686 | 375 | 311 | 57 | 2 |
{-# htermination (fmapMaybe :: (a -> b) -> Maybe a -> Maybe b) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Maybe a = Nothing | Just a ;
fmapMaybe :: (c -> b) -> Maybe c -> Maybe b
fmapMaybe f Nothing = Nothing;
fmapMaybe f (Just x) = Just (f x);
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/fmap_1.hs | mit | 322 | 0 | 8 | 85 | 114 | 63 | 51 | 7 | 1 |
{-# LANGUAGE BangPatterns #-}
module Maze (renderGrid, genMaze) where
import Control.Monad.State
import Linear
import System.Random
-- | Alias for the maze generator
type MazeGen a = State (StdGen, Grid) a
-- | The grid is a collection of all non-wall pieces
type Grid = [Point]
-- | A position inside the grid type
type Point = V2 Integer
-- | The grid size
type Size = V2 Integer
-- | Render the grid to a list of lines
renderGrid :: String -- ^ What 'String' to use to render a wall
-> String -- ^ What 'String' to user to render an empty space/path
-> Size -- ^ The grid size
-> Grid -- ^ Input grid
-> [String] -- ^ Output lines
renderGrid wall empty (V2 sx sy) grid = [concat [renderPos (V2 x y) | x <- [-1..sx]] | y <- [-1..sy]]
where
-- | Get the correct 'String' for this location
renderPos v = if v `elem` grid
then empty
else wall
-- | Generat a maze of the given size
genMaze :: Size -- ^ The grid size
-> StdGen -- ^ Random number generator
-> Grid -- ^ Generated grid
genMaze size g = snd $ execState (let p = (round <$> (fromIntegral <$> size) / 2) in iter p p) (g,[])
where
iter :: Point -> Point -> MazeGen ()
iter p' p = do
(gen, !grid) <- get
when (valid grid p) $ do
let (ndirs, gen') = shuffle gen dirs
put (gen', p:p':grid)
forM_ ndirs $ \d -> iter (p+d) (p+d*2)
-- | All four walkable directions
dirs :: [Point]
dirs = [ V2 1 0
, V2 (-1) 0
, V2 0 1
, V2 0 (-1)
]
-- | Check if this point is inside the grid and not already used
valid :: Grid -> Point -> Bool
valid grid p = inside p && p `notElem` grid
-- | Chick if this Point lies inside the grid
inside :: Point -> Bool
inside (V2 x y) = let V2 sx sy = size
in and [ x < sx, x >= 0
, y < sy, y >= 0 ]
-- | shuffle a list
shuffle :: RandomGen g => g -> [a] -> ([a], g)
shuffle gen [] = ([], gen)
shuffle gen xs = (e:rest, gen'')
where
(i, gen') = randomR (0, length xs - 1) gen
(ys, e:zs) = splitAt i xs
(rest,gen'') = shuffle gen' (ys++zs)
| TomSmeets/maze | src/Maze.hs | mit | 2,276 | 0 | 17 | 771 | 742 | 404 | 338 | 46 | 2 |
import Utils
answer = goodPairs
where interval = [3..10000000]
asDigits = map numberToDigits interval
sumFactorials = foldl (\a -> \b -> a + (factorial b)) 0
sums = map sumFactorials asDigits
pairs = zip interval sums
goodPairs = filter (\(a,b) -> a==b) pairs
| arekfu/project_euler | p0034/p0034.hs | mit | 338 | 0 | 13 | 121 | 112 | 61 | 51 | 8 | 1 |
module Account5 where
import System.IO
import Control.Concurrent.STM
launchMissiles :: IO ()
launchMissiles = hPutStr stdout "Zzzing!"
main = do
xv <- atomically (newTVar 2)
yv <- atomically (newTVar 1)
atomically (do x <- readTVar xv
y <- readTVar yv
if x > y then launchMissiles
else return () ) | NickAger/LearningHaskell | ParallelConcurrent/2-BankAccount.hsproj/Account5.hs | mit | 379 | 0 | 13 | 130 | 121 | 59 | 62 | 12 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TupleSections #-}
module Main where
import Debian.Control.ByteString
import Debian.Relation
import Data.Graph.Inductive
import Data.Tree
import Data.Set (fromList, member)
import Data.List (find, intercalate, sortOn)
import Data.Maybe
import Data.Either
import System.IO
import System.Console.CmdArgs.Implicit
import qualified Data.ByteString.Char8 as B
#if !MIN_VERSION_base(4,10,0)
fromRight :: b -> Either a b -> b
fromRight d = either (const d) id
#endif
type Package = Paragraph
type FieldValue = B.ByteString
type PackageName = FieldValue
data Style = Roots | Forest
deriving (Show, Data, Typeable)
data Options = Options
{ statusFile :: String
, style :: Style }
deriving (Show, Data, Typeable)
options :: Options
options = Options
{ statusFile = def &= typ "STATUSFILE" &= argPos 0 &= opt "/var/lib/dpkg/status"
, style = enum [Roots &= help "Show dependency roots (default)", Forest &= help "Show dependency forest"]
&= groupname "Options" }
&= program "DependencyRoots"
&= summary "DependencyRoots v0.5"
&= details ["STATUSFILE defaults to /var/lib/dpkg/status"]
main :: IO ()
main = do
args <- cmdArgs options
parseControlFromFile (statusFile args)
>>= either (putErr "Parse error") (putDeps (style args) . packageDeps)
where putDeps style = case style of
Roots -> putRoots graphRoots showAlts
Forest -> putRoots graphForest showTree
showTree = drawTree
showAlts = intercalate "|" . flatten
putErr :: Show e => String -> e -> IO ()
putErr msg e = hPutStrLn stderr $ msg ++ ": " ++ show e
putRoots :: (Gr String () -> Forest String) -> (Tree String -> String) -> [[String]] -> IO ()
putRoots fRoots fShow = mapM_ (putStrLn . fShow) . sortForest . fRoots . makeGraph
where sortForest = sortOn rootLabel
graphRoots :: Gr a b -> Forest a
graphRoots g = map labelAlts alternates
where forest = dff (topsort g) g
alternates = map (ancestors . rootLabel) forest
ancestors n = head $ rdff [n] g
labelAlts = fmap (fromJust . lab g)
graphForest :: Gr a b -> Forest a
graphForest g = map labelTree forest
where forest = dff (topsort g) g
labelTree = fmap (fromJust . lab g)
makeGraph :: [[String]] -> Gr String ()
makeGraph deps = fst $ mkMapGraph nodes edges
where nodes = map head deps
edges = concatMap mkEdges deps
mkEdges (n : sucs) = map (n,, ()) sucs
mkEdges _ = error "Empty deps"
packageDeps :: Control -> [[String]]
packageDeps c = map mkDeps pkgs
where pkgs = filter pkgIsInstalled . unControl $ c
names = fromList . map extName $ pkgs
mkDeps p = extName p : filter installed (pkgDeps p)
installed name = name `member` names
extName p = if a /= baseArch && a /= "all" then n ++ ':' : a else n
where n = pkgName p
a = pkgArch p
baseArch = maybe "" pkgArch $ find (\p -> pkgName p == "base-files") pkgs
pkgName :: Package -> String
pkgName = maybe "Unnamed" B.unpack . fieldValue "Package"
pkgArch :: Package -> String
pkgArch = maybe "" B.unpack . fieldValue "Architecture"
pkgIsInstalled :: Package -> Bool
pkgIsInstalled = maybe False isInstalled . fieldValue "Status"
where isInstalled v = parseStatus v !! 2 == B.pack "installed"
parseStatus = B.split ' ' . stripWS
#if !MIN_VERSION_debian(3,64,0)
unBinPkgName = id
#elif !MIN_VERSION_debian(3,69,0)
unBinPkgName_ = unPkgName . unBinPkgName
#define unBinPkgName unBinPkgName_
#endif
pkgDeps :: Package -> [String]
pkgDeps p = names "Depends" ++ names "Recommends"
where field = B.unpack . fromMaybe B.empty . flip fieldValue p
rels = fromRight [] . parseRelations . field
names = map (relName . head) . rels
relName (Rel name _ _) = unBinPkgName name
| neilmayhew/RepoExplorer | DependencyRoots.hs | mit | 3,900 | 0 | 14 | 902 | 1,284 | 663 | 621 | 90 | 2 |
{-
Copyright (c) 2008
Russell O'Connor
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-}
-- |Standard illuminants defined by the International Commission on
-- Illumination (CIE).
module Data.Colour.CIE.Illuminant where
import Data.Colour.CIE.Chromaticity
-- |Incandescent \/ Tungsten
a :: (Fractional a) => Chromaticity a
a = mkChromaticity 0.44757 0.40745
-- |{obsolete} Direct sunlight at noon
b :: (Fractional a) => Chromaticity a
b = mkChromaticity 0.34842 0.35161
-- |{obsolete} Average \/ North sky Daylight
c :: (Fractional a) => Chromaticity a
c = mkChromaticity 0.31006 0.31616
-- |Horizon Light. ICC profile PCS
d50 :: (Fractional a) => Chromaticity a
d50 = mkChromaticity 0.34567 0.35850
-- |Mid-morning \/ Mid-afternoon Daylight
d55 :: (Fractional a) => Chromaticity a
d55 = mkChromaticity 0.33242 0.34743
-- |Noon Daylight: Television, sRGB color space
d65 :: (Fractional a) => Chromaticity a
d65 = mkChromaticity 0.31271 0.32902
-- |North sky Daylight
d75 :: (Fractional a) => Chromaticity a
d75 = mkChromaticity 0.29902 0.31485
-- |Equal energy
e :: (Fractional a) => Chromaticity a
e = mkChromaticity (1/3) (1/3)
-- |Daylight Fluorescent
f1 :: (Fractional a) => Chromaticity a
f1 = mkChromaticity 0.31310 0.33727
-- |Cool White Fluorescent
f2 :: (Fractional a) => Chromaticity a
f2 = mkChromaticity 0.37208 0.37529
-- |White Fluorescent
f3 :: (Fractional a) => Chromaticity a
f3 = mkChromaticity 0.40910 0.39430
-- |Warm White Fluorescent
f4 :: (Fractional a) => Chromaticity a
f4 = mkChromaticity 0.44018 0.40329
-- |Daylight Fluorescent
f5 :: (Fractional a) => Chromaticity a
f5 = mkChromaticity 0.31379 0.34531
-- |Lite White Fluorescent
f6 :: (Fractional a) => Chromaticity a
f6 = mkChromaticity 0.37790 0.38835
-- |D65 simulator, Daylight simulator
f7 :: (Fractional a) => Chromaticity a
f7 = mkChromaticity 0.31292 0.32933
-- |D50 simulator, Sylvania F40 Design 50
f8 :: (Fractional a) => Chromaticity a
f8 = mkChromaticity 0.34588 0.35875
-- |Cool White Deluxe Fluorescent
f9 :: (Fractional a) => Chromaticity a
f9 = mkChromaticity 0.37417 0.37281
-- |Philips TL85, Ultralume 50
f10 :: (Fractional a) => Chromaticity a
f10 = mkChromaticity 0.34609 0.35986
-- |Philips TL84, Ultralume 40
f11 :: (Fractional a) => Chromaticity a
f11 = mkChromaticity 0.38052 0.37713
-- |Philips TL83, Ultralume 30
f12 :: (Fractional a) => Chromaticity a
f12 = mkChromaticity 0.43695 0.40441 | haasn/colour | Data/Colour/CIE/Illuminant.hs | mit | 3,431 | 0 | 7 | 582 | 592 | 322 | 270 | 42 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DeriveGeneric #-}
{-# OPTIONS_GHC -fdefer-type-errors #-}
module Handler.GameJson where
import Import
import Control.Lens (makeLenses, (^.), (^?), (.~), (&))
import qualified Data.Aeson.Lens as AL (key, _Bool, _Integer)
import Data.Aeson.TH (deriveJSON, defaultOptions, fieldLabelModifier)
import qualified Data.List as L (foldl)
import Data.Time.LocalTime (zonedTimeToUTC)
import Jabara.Persist.Util (toKey, toRecord)
import Jabara.Util (omittedFirstCharLower)
import Minibas.Util (buildGameData)
import ModelDef (Quarter(..), mapQuartersM)
getGameIndexR :: Handler [GameData]
getGameIndexR = runDB $ do
games::[Entity Game] <- selectList [] [Asc GameDate]
leagues::[Entity League] <- selectList [LeagueId <-. map (_gameLeague.toRecord) games] []
scores::[Entity Score] <- selectList [ScoreGame <-. map toKey games] []
teams::[Entity Team] <- selectList [TeamId <-. (map (_gameTeamA.toRecord) games)++(map (_gameTeamB.toRecord) games)] []
mapM (buildGameData leagues teams scores) games
data Put = Put {
_putLeagueName :: Text
, _putGameName :: Text
, _putGamePlace :: Text
, _putTeamAName :: Text
, _putTeamBName :: Text
} deriving (Show, Eq, Read, Generic)
makeLenses ''Put
$(deriveJSON defaultOptions {
fieldLabelModifier = omittedFirstCharLower "_put"
} ''Put)
putGameIndexR :: Handler ()
putGameIndexR = do
put::Put <- requireJsonBody
gameId <- runDB $ do
leagueId <- insertIfNotExists (UniqueLeague $ put^.putLeagueName)
(League $ put^.putLeagueName)
teamAId <- insertIfNotExists (UniqueTeam $ put^.putTeamAName)
(Team $ put^.putTeamAName)
teamBId <- insertIfNotExists (UniqueTeam $ put^.putTeamBName)
(Team $ put^.putTeamBName)
now <- liftIO $ getCurrentTime
game <- pure $ Game { _gameLeague = leagueId
, _gameName = put^.putGameName
, _gamePlace = put^.putGamePlace
, _gameTeamA = teamAId
, _gameTeamB = teamBId
, _gameDate = now
}
gameId <- insert game
_ <- mapQuartersM (\q -> insert $ emptyScore gameId q)
pure gameId
sendResponseCreated $ GameUiR gameId
emptyScore :: GameId -> Quarter -> Score
emptyScore gameId quarter = Score {
_scoreGame = gameId
, _scoreQuarter = quarter
, _scoreTeamAPoint = 0
, _scoreTeamBPoint = 0
, _scoreLock = False
}
insertIfNotExists :: (MonadIO m, PersistUnique (PersistEntityBackend val),
PersistEntity val) =>
Unique val -> val -> ReaderT (PersistEntityBackend val) m (Key val)
insertIfNotExists unique creator = do
mEntity <- getBy unique
case mEntity of
Nothing -> insert creator
Just e -> pure $ toKey e
getGameR :: GameId -> Handler GameData
getGameR gameId = runDB $ do
game <- get404 gameId
leagues <- selectList [LeagueId ==. game^.gameLeague] []
teams <- selectList [TeamId <-. [game^.gameTeamA, game^.gameTeamB]] []
scores <- selectList [ScoreGame ==. gameId] []
buildGameData leagues teams scores (Entity gameId game)
postGameR :: GameId -> Handler ()
postGameR gameId = do
req::GameData <- requireJsonBody
runDB $ do
replace gameId $ toGame req
mapM_ (\score -> replace (score^.scoreDataId) (toScore score)) $ req^.gameDataScoreList
where
toGame :: GameData -> Game
toGame g = Game {
_gameLeague = toKey $ g^.gameDataLeague
, _gameName = g^.gameDataName
, _gamePlace = g^.gameDataPlace
, _gameTeamA = toKey $ g^.gameDataTeamA
, _gameTeamB = toKey $ g^.gameDataTeamB
, _gameDate = zonedTimeToUTC $ g^.gameDataDate
}
toScore :: ScoreData -> Score
toScore s = Score {
_scoreGame = gameId
, _scoreQuarter = s^.scoreDataQuarter
, _scoreTeamAPoint = s^.scoreDataTeamAPoint
, _scoreTeamBPoint = s^.scoreDataTeamBPoint
, _scoreLock = s^.scoreDataLock
}
deleteGameR :: GameId -> Handler ()
deleteGameR gameId = runDB $ do
deleteWhere [ScoreGame ==. gameId]
delete gameId
type QuarterIndex = Int
patchGameScoreFirstR :: GameId -> Handler (Entity Score)
patchGameScoreFirstR gameId = patchGameScore gameId First
patchGameScoreSecondR :: GameId -> Handler (Entity Score)
patchGameScoreSecondR gameId = patchGameScore gameId Second
patchGameScoreThirdR :: GameId -> Handler (Entity Score)
patchGameScoreThirdR gameId = patchGameScore gameId Third
patchGameScoreFourthR :: GameId -> Handler (Entity Score)
patchGameScoreFourthR gameId = patchGameScore gameId Fourth
patchGameScoreExtraR :: GameId -> Handler (Entity Score)
patchGameScoreExtraR gameId = patchGameScore gameId Extra
patchGameScore :: GameId -> Quarter -> Handler (Entity Score)
patchGameScore gameId quarter = do
req::Value <- requireJsonBody
eScore@(Entity key score) <- runDB $ getBy404 $ UniqueScore gameId quarter
_ <- case req of
Object _ -> pure req
_ -> sendResponseStatus badRequest400 eScore
let ps::[(Score -> Score)] = [
(\sc -> case req ^? AL.key "lock" . AL._Bool of
Nothing -> sc
Just b -> sc&scoreLock .~ b
)
, (\sc -> case req ^? AL.key "teamAPoint" . AL._Integer of
Nothing -> sc
Just p -> sc&scoreTeamAPoint .~ (fromInteger p)
)
, (\sc -> case req ^? AL.key "teamBPoint" . AL._Integer of
Nothing -> sc
Just p -> sc&scoreTeamBPoint .~ (fromInteger p)
)
]
score' = L.foldl (\sc f -> f sc) score ps
_ <- runDB $ replace key score'
pure $ Entity key score'
| jabaraster/minibas-web | Handler/GameJson.hs | mit | 6,539 | 0 | 18 | 2,178 | 1,833 | 951 | 882 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-kms-key.html
module Stratosphere.Resources.KMSKey where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.Tag
-- | Full data type definition for KMSKey. See 'kmsKey' for a more convenient
-- constructor.
data KMSKey =
KMSKey
{ _kMSKeyDescription :: Maybe (Val Text)
, _kMSKeyEnableKeyRotation :: Maybe (Val Bool)
, _kMSKeyEnabled :: Maybe (Val Bool)
, _kMSKeyKeyPolicy :: Object
, _kMSKeyKeyUsage :: Maybe (Val Text)
, _kMSKeyPendingWindowInDays :: Maybe (Val Integer)
, _kMSKeyTags :: Maybe [Tag]
} deriving (Show, Eq)
instance ToResourceProperties KMSKey where
toResourceProperties KMSKey{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::KMS::Key"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("Description",) . toJSON) _kMSKeyDescription
, fmap (("EnableKeyRotation",) . toJSON) _kMSKeyEnableKeyRotation
, fmap (("Enabled",) . toJSON) _kMSKeyEnabled
, (Just . ("KeyPolicy",) . toJSON) _kMSKeyKeyPolicy
, fmap (("KeyUsage",) . toJSON) _kMSKeyKeyUsage
, fmap (("PendingWindowInDays",) . toJSON) _kMSKeyPendingWindowInDays
, fmap (("Tags",) . toJSON) _kMSKeyTags
]
}
-- | Constructor for 'KMSKey' containing required fields as arguments.
kmsKey
:: Object -- ^ 'kmskKeyPolicy'
-> KMSKey
kmsKey keyPolicyarg =
KMSKey
{ _kMSKeyDescription = Nothing
, _kMSKeyEnableKeyRotation = Nothing
, _kMSKeyEnabled = Nothing
, _kMSKeyKeyPolicy = keyPolicyarg
, _kMSKeyKeyUsage = Nothing
, _kMSKeyPendingWindowInDays = Nothing
, _kMSKeyTags = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-kms-key.html#cfn-kms-key-description
kmskDescription :: Lens' KMSKey (Maybe (Val Text))
kmskDescription = lens _kMSKeyDescription (\s a -> s { _kMSKeyDescription = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-kms-key.html#cfn-kms-key-enablekeyrotation
kmskEnableKeyRotation :: Lens' KMSKey (Maybe (Val Bool))
kmskEnableKeyRotation = lens _kMSKeyEnableKeyRotation (\s a -> s { _kMSKeyEnableKeyRotation = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-kms-key.html#cfn-kms-key-enabled
kmskEnabled :: Lens' KMSKey (Maybe (Val Bool))
kmskEnabled = lens _kMSKeyEnabled (\s a -> s { _kMSKeyEnabled = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-kms-key.html#cfn-kms-key-keypolicy
kmskKeyPolicy :: Lens' KMSKey Object
kmskKeyPolicy = lens _kMSKeyKeyPolicy (\s a -> s { _kMSKeyKeyPolicy = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-kms-key.html#cfn-kms-key-keyusage
kmskKeyUsage :: Lens' KMSKey (Maybe (Val Text))
kmskKeyUsage = lens _kMSKeyKeyUsage (\s a -> s { _kMSKeyKeyUsage = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-kms-key.html#cfn-kms-key-pendingwindowindays
kmskPendingWindowInDays :: Lens' KMSKey (Maybe (Val Integer))
kmskPendingWindowInDays = lens _kMSKeyPendingWindowInDays (\s a -> s { _kMSKeyPendingWindowInDays = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-kms-key.html#cfn-kms-key-tags
kmskTags :: Lens' KMSKey (Maybe [Tag])
kmskTags = lens _kMSKeyTags (\s a -> s { _kMSKeyTags = a })
| frontrowed/stratosphere | library-gen/Stratosphere/Resources/KMSKey.hs | mit | 3,564 | 0 | 15 | 504 | 721 | 411 | 310 | 56 | 1 |
year =
concat [[1..31],[1..28],[1..31],[1..30],[1..31],[1..30],
[1..31],[1..31],[1..30],[1..31],[1..30],[1..31]]
leapYear =
concat [[1..31],[1..29],[1..31],[1..30],[1..31],[1..30],
[1..31],[1..31],[1..30],[1..31],[1..30],[1..31]]
dates =
cycle $ concat [year, year, year, leapYear]
daysOfWeek =
cycle ["Tu", "W", "Th", "F", "Sa", "Su", "M"]
solution =
length $ filter ((1, "Su") ==) $ take daysInCentury $ zip dates daysOfWeek
where daysInCentury = 25 * (366 + 365 + 365 + 365)
main = print solution
| drcabana/euler-fp | source/hs/P19.hs | epl-1.0 | 559 | 0 | 11 | 119 | 334 | 198 | 136 | 14 | 1 |
module T where
import Prelude hiding ( id )
import Tests.ModelCheckerBasis
-- Some initial states.
c = proc () ->
do rec x <- (| unsafeNonDetAC (\x -> xorA -< x) (falseA -< ()) |)
returnA -< x
Just (m, (a, b)) = isConstructive c
ctlM = mkCTLModel m
test_model = ctlM `seq` True
test_nondet_init = isOK (mc ctlM (prop a `xor` prop b))
| peteg/ADHOC | Tests/07_Nondeterminism/030_init_nondet_pairs.hs | gpl-2.0 | 345 | 3 | 15 | 75 | 157 | 85 | 72 | -1 | -1 |
module Cube where
import Graphics.UI.GLUT
vertex3f :: (GLfloat, GLfloat, GLfloat) -> IO ()
vertex3f (x, y, z) = vertex $ Vertex3 x y z
cube :: GLfloat -> IO ()
cube w = renderPrimitive Quads $ mapM_ vertex3f
[ ( w, w, w), ( w, w,-w), ( w,-w,-w), ( w,-w, w),
( w, w, w), ( w, w,-w), (-w, w,-w), (-w, w, w),
( w, w, w), ( w,-w, w), (-w,-w, w), (-w, w, w),
(-w, w, w), (-w, w,-w), (-w,-w,-w), (-w,-w, w),
( w,-w, w), ( w,-w,-w), (-w,-w,-w), (-w,-w, w),
( w, w,-w), ( w,-w,-w), (-w,-w,-w), (-w, w,-w) ]
cubeFrame :: GLfloat -> IO ()
cubeFrame w = renderPrimitive Lines $ mapM_ vertex3f
[ ( w,-w, w), ( w, w, w), ( w, w, w), (-w, w, w),
(-w, w, w), (-w,-w, w), (-w,-w, w), ( w,-w, w),
( w,-w, w), ( w,-w,-w), ( w, w, w), ( w, w,-w),
(-w, w, w), (-w, w,-w), (-w,-w, w), (-w,-w,-w),
( w,-w,-w), ( w, w,-w), ( w, w,-w), (-w, w,-w),
(-w, w,-w), (-w,-w,-w), (-w,-w,-w), ( w,-w,-w) ] | MaximusDesign/Haskell_Project | cube.hs | gpl-2.0 | 929 | 0 | 9 | 232 | 849 | 523 | 326 | 20 | 1 |
type Peg = String
type Move = (Peg, Peg)
--------------------------------------------------------------------------------
---------------------------------Exercise 5-------------------------------------
--------------------------------------------------------------------------------
{-|
The Towers of Hanoi is a classic puzzle with a solution that can be described
recursively. Disks of different sizes are stacked on three pegs; the goal is to
get from a starting configuration with all disks stacked on the first peg to an
ending configuration with all disks stacked on the last peg.
The only rules are:
-) you may only move one disk at a time
-) a larger disk may never be stacked on top of a smaller one
To move n discs (stacked in increasing size) from peg a to peg b using peg c as
temporary storage:
1) move n − 1 discs from a to c using b as temporary storage
2) move the top disc from a to b
3) move n − 1 discs from c to b using a as temporary storage
'hanoi' is a function which resolves the Towers of Hanoi game. Given the number
of discs and names for the three pegs, 'hanoi' returns a list of moves to be
performed to move the stack of discs from the first peg to the second.
-}
hanoi :: Integer -> Peg -> Peg -> Peg -> [Move]
hanoi 0 _ _ _ = []
hanoi 1 a b _ = [(a, b)]
hanoi n a b c = hanoi (n - 1) a c b ++ [(a, b)] ++ hanoi (n - 1) c b a
--------------------------------------------------------------------------------
---------------------------------Exercise 6-------------------------------------
--------------------------------------------------------------------------------
{-|
What if there are four pegs instead of three?
That is, the goal is still to move a stack of discs from the first peg to
the last peg, without ever placing a larger disc on top of a smaller
one, but now there are two extra pegs that can be used as “temporary”
storage instead of only one. Write a function similar to hanoi
which solves this problem in as few moves as possible.
In this solution I used the Frame–Stewart algorithm, which gives an optimal
solution for four (and conjecturally for even more) pegs. The steps are the
following:
1) for some k, 1 <= k < n, move the top k disks to a single peg other than the
start or destination pegs
2) without disturbing the peg that now contains the top k disks, transfer the
remaining n-k disks to the destination peg, using only the remaining 3 pegs
3) finally, transfer the top k disks to the destination peg.
The optimal k for four pegs is defined as `n - round[sqrt(2*n + 1)] + 1`.
-}
hanoi4 :: Integer -> Peg -> Peg -> Peg -> Peg -> [Move]
hanoi4 0 _ _ _ _ = []
hanoi4 1 a b _ _ = [(a, b)]
hanoi4 n a b c d = hanoi4 k a c b d ++ hanoi (n - k) a b d ++ hanoi4 k c b a d
where k = n - round (sqrt (fromIntegral (2 * n + 1))) + 1
| mdipirro/functional-languages-homeworks | Week 3/hanoi.hs | gpl-3.0 | 2,831 | 0 | 16 | 546 | 327 | 176 | 151 | 11 | 1 |
module Web.Crew (
startScotty,
module C
) where
import Web.Crew.Templates as C
import Web.Crew.Session as C
import Web.Crew.User as C
--import Data.Configurator
import Control.Exception
import Web.Scotty
import Data.Acid
import Data.Acid.Local
startScotty :: Int -> (AcidState Users -> ScottyM ()) -> IO ()
startScotty port f = do
--(cfg, _) <- autoReload autoConfig [Required "btci.config"]
--users <- lookupDefault "initialUsers" cfg []
bracket (openLocalState initialUsers)
createCheckpointAndClose
(\acid -> scotty port $ do
f acid
userRoutes acid)
| schell/scottys-crew | Web/Crew.hs | gpl-3.0 | 631 | 0 | 13 | 152 | 149 | 84 | 65 | 17 | 1 |
Subsets and Splits