code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
-- | The endpoints on the server
module Development.Shake.Internal.History.Cloud(
Cloud, newCloud, addCloud, lookupCloud
) where
import Development.Shake.Internal.Value
import Development.Shake.Internal.Core.Database
import Development.Shake.Internal.History.Types
import Development.Shake.Internal.History.Network
import Development.Shake.Internal.History.Server
import Development.Shake.Internal.History.Bloom
import Control.Concurrent.Extra
import System.Time.Extra
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Class
import General.Fence
import qualified Data.HashMap.Strict as Map
import Data.Typeable
import Data.Either.Extra
import General.Binary
import General.Extra
import General.Wait
type Initial = Map.HashMap Key (Ver, [Key], Bloom [BS_Identity])
data Cloud = Cloud Server (Locked () -> IO ()) (Fence Locked Initial)
newLaterFence :: (Locked () -> IO ()) -> Seconds -> a -> IO a -> IO (Fence Locked a)
newLaterFence relock maxTime def act = do
fence <- newFence
forkFinally (timeout maxTime act) $ \res -> relock $ signalFence fence $ case res of
Right (Just v) -> v
_ -> def
pure fence
laterFence :: MonadIO m => Fence m a -> Wait m a
laterFence fence = do
res <- liftIO $ testFence fence
case res of
Just v -> pure v
Nothing -> Later $ waitFence fence
newCloud :: (Locked () -> IO ()) -> Map.HashMap TypeRep (BinaryOp Key) -> Ver -> [(TypeRep, Ver)] -> [String] -> Maybe (IO Cloud)
newCloud relock binop globalVer ruleVer urls = flip fmap (if null urls then Nothing else connect $ last urls) $ \conn -> do
conn <- conn
server <- newServer conn binop globalVer
fence <- newLaterFence relock 10 Map.empty $ do
xs <- serverAllKeys server ruleVer
pure $ Map.fromList [(k,(v,ds,test)) | (k,v,ds,test) <- xs]
pure $ Cloud server relock fence
addCloud :: Cloud -> Key -> Ver -> Ver -> [[(Key, BS_Identity)]] -> BS_Store -> [FilePath] -> IO ()
addCloud (Cloud server _ _) x1 x2 x3 x4 x5 x6 = void $ forkIO $ serverUpload server x1 x2 x3 x4 x5 x6
lookupCloud :: Cloud -> (Key -> Wait Locked (Maybe BS_Identity)) -> Key -> Ver -> Ver -> Wait Locked (Maybe (BS_Store, [[Key]], IO ()))
lookupCloud (Cloud server relock initial) ask key builtinVer userVer = runMaybeT $ do
mp <- lift $ laterFence initial
Just (ver, deps, bloom)<- pure $ Map.lookup key mp
unless (ver == userVer) $ fail ""
Right vs <- lift $ firstLeftWaitUnordered (fmap (maybeToEither ()) . ask) deps
unless (bloomTest bloom vs) $ fail ""
fence <- liftIO $ newLaterFence relock 10 mempty $ serverOneKey server key builtinVer userVer $ zip deps vs
tree <- lift $ laterFence fence
f [deps] tree
where
f :: [[Key]] -> BuildTree Key -> MaybeT (Wait Locked) (BS_Store, [[Key]], IO ())
f ks (Done store xs) = pure (store, reverse ks, serverDownloadFiles server key xs)
f ks (Depend deps trees) = do
Right vs <- lift $ firstLeftWaitUnordered (fmap (maybeToEither ()) . ask) deps
Just tree<- pure $ lookup vs trees
f (deps:ks) tree
| ndmitchell/shake | src/Development/Shake/Internal/History/Cloud.hs | bsd-3-clause | 3,161 | 0 | 18 | 657 | 1,261 | 650 | 611 | 62 | 2 |
module BFS where
-- Creating tree ds
data Tree a = Empty | Node a (Tree a) (Tree a) deriving (Show)
-- DFS
traverseBFS :: Tree a -> [a]
traverseBFS tree = tbf [tree]
where
tbf [] = []
tbf xs = map nodeValue xs ++ tbf (concat (map leftAndRightNodes xs))
nodeValue (Node a _ _) = a
leftAndRightNodes (Node _ Empty Empty) = []
leftAndRightNodes (Node _ Empty b) = [b]
leftAndRightNodes (Node _ a Empty) = [a]
leftAndRightNodes (Node _ a b) = [a,b]
| Cnidarias/al-go-rithms | graphsearch/breadth-first-search/haskell/BFS.hs | mit | 487 | 0 | 12 | 128 | 223 | 117 | 106 | 11 | 5 |
import Control.Monad
----------------------------------
-- Getting our feet wet with Maybe
----------------------------------
applyMaybe :: Maybe a -> (a -> Maybe b) -> Maybe b
applyMaybe Nothing f = Nothing
applyMaybe (Just x) f = f x
-----------------------
-- The Monad type class
-----------------------
--instance Monad Maybe where
-- return x = Just x
-- Nothing >>= f = Nothing
-- Just x >>= f = f x
-- fail _ = Nothing
----------------
-- Walk the line
----------------
type Birds = Int
type Pole = (Birds,Birds)
--landLeft :: Birds -> Pole -> Pole
--landLeft n (left,right) = (left + n,right)
--landRight :: Birds -> Pole -> Pole
--landRight n (left,right) = (left,right + n)
x -: f = f x
landLeft :: Birds -> Pole -> Maybe Pole
landLeft n (left,right)
| abs ((left + n) - right) < 4 = Just (left + n, right)
| otherwise = Nothing
landRight :: Birds -> Pole -> Maybe Pole
landRight n (left,right)
| abs (left - (right + n)) < 4 = Just (left, right + n)
| otherwise = Nothing
banana :: Pole -> Maybe Pole
banana _ = Nothing
--如果不用 Maybe 的话,就需要增加很多麻烦的判断
--routine :: Maybe Pole
--routine = case landLeft 1 (0,0) of
-- Nothing -> Nothing
-- Just pole1 -> case landRight 4 pole1 of
-- Nothing -> Nothing
-- Just pole2 -> case landLeft 2 pole2 of
-- Nothing -> Nothing
-- Just pole3 -> landLeft 1 pole3
--------------
-- do notation
--------------
--foo :: Maybe String
--foo = Just 3 >>= (\x ->
-- Just "!" >>= (\y ->
-- Just (show x ++ y)))
foo :: Maybe String
foo = do
x <- Just 3
y <- Just "!"
Just (show x ++ y)
marySue :: Maybe Bool
marySue = do
x <- Just 9
Just (x > 8)
--嵌套的 lambda 表达式 Just 3 >>= (\x -> Just "!" >>= (\y -> Just (show x ++ y)))
--顺序的 lambda 表达式 Just 3 >>= (\x -> Just "!") >>= (\y -> Just (show 3 ++ y))
--两者结合的 lambda 表达式 Just 3 >>= (\x -> Just "!" >>= (\y -> Just (show x ++ y))) >>= (\z -> Just (z ++ "!"))
--test = Just 3 >>= (\x -> Just "!" >>= (\y -> Just (show x ++ y))) >>= (\z -> Just (z ++ "!"))
test :: Maybe String
test = do
x <- Just 3
y <- Just "!"
z <- Just (show x ++ y)
Just (z ++ "!")
--routine :: Maybe Pole
--routine = do
-- start <- return (0,0)
-- first <- landLeft 2 start
-- second <- landRight 2 first
-- landLeft 1 second
--routine :: Maybe Pole
--routine =
-- case Just (0,0) of
-- Nothing -> Nothing
-- Just start -> case landLeft 2 start of
-- Nothing -> Nothing
-- Just first -> case landRight 2 first of
-- Nothing -> Nothing
-- Just second -> landLeft 1 second
routine :: Maybe Pole
routine = do
start <- return (0,0)
first <- landLeft 2 start
Nothing
second <- landRight 2 first
landLeft 1 second
justH :: Maybe Char
justH = do
(x:xs) <- Just "hello"
return x
wopwop :: Maybe Char
wopwop = do
(x:xs) <- Just ""
return x
-----------------
-- The list monad
-----------------
--instance Monad [] where
-- return x = [x]
-- xs >>= f = concat (map f xs)
-- fail _ = []
listOfTuples :: [(Int,Char)]
listOfTuples = do
n <- [1,2]
ch <- ['a','b']
return (n,ch)
--guard :: (MonadPlus m) => Bool -> m ()
--guard True = return ()
--guard False = mzero
sevensOnly :: [Int]
sevensOnly = do
x <- [1..50]
guard ('7' `elem` show x)
return x
type KnightPos = (Int,Int)
moveKnight :: KnightPos -> [KnightPos]
moveKnight (c,r) = do
(c',r') <- [(c+2,r-1),(c+2,r+1),(c-2,r-1),(c-2,r+1)
,(c+1,r-2),(c+1,r+2),(c-1,r-2),(c-1,r+2)
]
guard (c' `elem` [1..8] && r' `elem` [1..8])
return (c',r')
--moveKnight :: KnightPos -> [KnightPos]
--moveKnight (c,r) = filter onBoard
-- [(c+2,r-1),(c+2,r+1),(c-2,r-1),(c-2,r+1)
-- ,(c+1,r-2),(c+1,r+2),(c-1,r-2),(c-1,r+2)
-- ]
-- where onBoard (c,r) = c `elem` [1..8] && r `elem` [1..8]
in3 :: KnightPos -> [KnightPos]
in3 start = do
first <- moveKnight start
second <- moveKnight first
moveKnight second
--in3 start = return start >>= moveKnight >>= moveKnight >>= moveKnight
canReachIn3 :: KnightPos -> KnightPos -> Bool
canReachIn3 start end = end `elem` in3 start
-------------
-- Monad laws
-------------
--Left identity
--return x >>= f is the same damn thing as f x
--Right identity
--m >>= return is no different than just m
--Associativity
--Doing (m >>= f) >>= g is just like doing m >>= (\x -> f x >>= g)
| zxl20zxl/learnyouahaskell | A-Fistful-of-Monads.hs | mit | 4,580 | 0 | 13 | 1,181 | 1,062 | 583 | 479 | 71 | 1 |
module Lab3 where
import Prelude
-----------------------------------------------------------------------------------------------------------------------------
-- LIST COMPREHENSIONS
------------------------------------------------------------------------------------------------------------------------------
-- ===================================
-- Ex. 0 - 2
-- ===================================
{-
Using a list comprehension, define a function that selects all the even numbers from a list.
Example: evens [2, 5, 6, 13, 32] = [2, 6, 32]
Test with: evens [827305 .. 927104]
Then copy the outcome and paste it after 'sumevens'
Gives answer: 43772529500
Note:
sum . evens $ [827305 .. 927104] is equivalent to:
(sum . evens) [827305 .. 927104]
whereas
sum . evens [827305 .. 927104] is equivalent to:
sum . (evens [827305 .. 927104])
Now the types don't line up.
Another (correct) option would be to write it as follows:
sum (evens [827305 .. 927104])
-}
evens :: [Integer] -> [Integer]
-- WRONG evens n = [x | x <- [1..10], n `mod` 2 == 0]
evens ints = filter even (ints)
{- Return sum of even integers from list. See page 63 and 71 text -}
sumevens :: [Integer] -> Integer
sumevens ns = sum(filter even ns)
-- ===================================
-- Ex. 3 - 4
-- ===================================
{-
Using a list comprehension, define a function squares that
takes a non-bottom Integer n >= 0 as its argument and returns
a list of the numbers [1..n] squared.
Example:
squares 4 = [1*1, 2*2, 3*3, 4*4]
squares 0 = []
-}
-- complete the following line with the correct type signature for this function
{- Squares each element of an array -}
{-
squares :: [Int] -> [Int]
squares (x:xs) = x * x : squares xs
squares [] = []
-}
-- WRONG squares :: Num -> [Num]
-- WRONG squares :: Integer a => a -> [a]
-- WRONG squares :: a -> [a]
-- RIGHT
squares :: Integer -> [Integer]
squares n = [n^2 | n <- [1..n]]
-- Example:
-- *Lab3> sumSquares 50
-- Result:
-- 42925
sumSquares :: Integer -> Integer
sumSquares n = sum (squares n)
-- ===================================
-- Ex. 5 - 7
-- ===================================
-- complete the following line with the correct type signature for this function
{-
Modify the previous definition of squares such that it now takes two non-bottom
Integer arguments, m >= 0 and n >= 0 and returns a list of the m square numbers
that come after the first n square numbers.
Example:
squares' 4 2 = [3*3, 4*4, 5*5, 6*6]
squares' 2 0 = [1*1, 2*2]
squares' 0 2 = []
squares' 0 0 = []
-}
squares' :: Integer -> Integer -> [Integer]
squares' m n = [m^2 | m <- [(n+1)..(m+n)]]
{-
Example:
sum $ squares' 10 0
-- is equivalent to:
*Lab3> sum (squares' 10 0)
-- Returns:
385
-}
{-
Example:
sum $ squares' 0 10
-- is equivalent to:
*Lab3> sum (squares' 0 10)
-- Returns:
0
-}
sumSquares' :: Integer -> Integer
sumSquares' x = sum . uncurry squares' $ (x, x)
-- ===================================
-- Ex. 8
-- ===================================
{-
Using a list comprehension, define a function
coords :: Integer -> Integer -> [(Integer, Integer)]
that returns a list of all coordinate pairs on an
[0..m] × [0..n] rectangular grid, where m and n are non-bottom Integers >= 0.
Example:
coords 1 1 = [(0,0), (0,1), (1,0), (1,1)]
coords 1 2 = [(0,0), (0,1), (0,2), (1,0), (1, 1), (1, 2)]
Test: What is the value of: foldr (-) 0 . map (uncurry (*)) $ coords 5 7
*Lab3> (foldr (-) 0 . map (uncurry (*))) (coords 5 7)
Answer: -60
-}
-- referencing the pyths example in Chapter 5
coords :: Integer -> Integer -> [(Integer,Integer)]
coords m n = [(x,y) | x <- [0..m],
y <- [0..n]]
{-
After watching the jam session about Church Numerals,
what could be a possible implementation for exponentiation?
-}
-- cExp :: CNat -> CNat -> CNat
-- cExp (CNat a) (CNat b) = CNat (a ^ b)
-- cExp (CNat a) (CNat b) = CNat (a b)
-- cExp (CNat a) (CNat b) = CNat (b a)
-- cExp (CNat a) (CNat b) = CNat (a . b)
| ltfschoen/HelloHaskell | src/Chapter2/Section2/lab3.hs | mit | 4,301 | 0 | 10 | 1,072 | 338 | 204 | 134 | 17 | 1 |
module InfixAmbiguous where
infix +++
main = 3 +++ 4 +++ 4
(+++) x y = x | roberth/uu-helium | test/parser/InfixAmbiguous.hs | gpl-3.0 | 75 | 0 | 6 | 19 | 33 | 19 | 14 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.ElastiCache.ModifyCacheSubnetGroup
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- The /ModifyCacheSubnetGroup/ action modifies an existing cache subnet
-- group.
--
-- /See:/ <http://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_ModifyCacheSubnetGroup.html AWS API Reference> for ModifyCacheSubnetGroup.
module Network.AWS.ElastiCache.ModifyCacheSubnetGroup
(
-- * Creating a Request
modifyCacheSubnetGroup
, ModifyCacheSubnetGroup
-- * Request Lenses
, mcsgSubnetIds
, mcsgCacheSubnetGroupDescription
, mcsgCacheSubnetGroupName
-- * Destructuring the Response
, modifyCacheSubnetGroupResponse
, ModifyCacheSubnetGroupResponse
-- * Response Lenses
, mcsgrsCacheSubnetGroup
, mcsgrsResponseStatus
) where
import Network.AWS.ElastiCache.Types
import Network.AWS.ElastiCache.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Represents the input of a /ModifyCacheSubnetGroup/ action.
--
-- /See:/ 'modifyCacheSubnetGroup' smart constructor.
data ModifyCacheSubnetGroup = ModifyCacheSubnetGroup'
{ _mcsgSubnetIds :: !(Maybe [Text])
, _mcsgCacheSubnetGroupDescription :: !(Maybe Text)
, _mcsgCacheSubnetGroupName :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ModifyCacheSubnetGroup' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mcsgSubnetIds'
--
-- * 'mcsgCacheSubnetGroupDescription'
--
-- * 'mcsgCacheSubnetGroupName'
modifyCacheSubnetGroup
:: Text -- ^ 'mcsgCacheSubnetGroupName'
-> ModifyCacheSubnetGroup
modifyCacheSubnetGroup pCacheSubnetGroupName_ =
ModifyCacheSubnetGroup'
{ _mcsgSubnetIds = Nothing
, _mcsgCacheSubnetGroupDescription = Nothing
, _mcsgCacheSubnetGroupName = pCacheSubnetGroupName_
}
-- | The EC2 subnet IDs for the cache subnet group.
mcsgSubnetIds :: Lens' ModifyCacheSubnetGroup [Text]
mcsgSubnetIds = lens _mcsgSubnetIds (\ s a -> s{_mcsgSubnetIds = a}) . _Default . _Coerce;
-- | A description for the cache subnet group.
mcsgCacheSubnetGroupDescription :: Lens' ModifyCacheSubnetGroup (Maybe Text)
mcsgCacheSubnetGroupDescription = lens _mcsgCacheSubnetGroupDescription (\ s a -> s{_mcsgCacheSubnetGroupDescription = a});
-- | The name for the cache subnet group. This value is stored as a lowercase
-- string.
--
-- Constraints: Must contain no more than 255 alphanumeric characters or
-- hyphens.
--
-- Example: 'mysubnetgroup'
mcsgCacheSubnetGroupName :: Lens' ModifyCacheSubnetGroup Text
mcsgCacheSubnetGroupName = lens _mcsgCacheSubnetGroupName (\ s a -> s{_mcsgCacheSubnetGroupName = a});
instance AWSRequest ModifyCacheSubnetGroup where
type Rs ModifyCacheSubnetGroup =
ModifyCacheSubnetGroupResponse
request = postQuery elastiCache
response
= receiveXMLWrapper "ModifyCacheSubnetGroupResult"
(\ s h x ->
ModifyCacheSubnetGroupResponse' <$>
(x .@? "CacheSubnetGroup") <*> (pure (fromEnum s)))
instance ToHeaders ModifyCacheSubnetGroup where
toHeaders = const mempty
instance ToPath ModifyCacheSubnetGroup where
toPath = const "/"
instance ToQuery ModifyCacheSubnetGroup where
toQuery ModifyCacheSubnetGroup'{..}
= mconcat
["Action" =:
("ModifyCacheSubnetGroup" :: ByteString),
"Version" =: ("2015-02-02" :: ByteString),
"SubnetIds" =:
toQuery
(toQueryList "SubnetIdentifier" <$> _mcsgSubnetIds),
"CacheSubnetGroupDescription" =:
_mcsgCacheSubnetGroupDescription,
"CacheSubnetGroupName" =: _mcsgCacheSubnetGroupName]
-- | /See:/ 'modifyCacheSubnetGroupResponse' smart constructor.
data ModifyCacheSubnetGroupResponse = ModifyCacheSubnetGroupResponse'
{ _mcsgrsCacheSubnetGroup :: !(Maybe CacheSubnetGroup)
, _mcsgrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ModifyCacheSubnetGroupResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mcsgrsCacheSubnetGroup'
--
-- * 'mcsgrsResponseStatus'
modifyCacheSubnetGroupResponse
:: Int -- ^ 'mcsgrsResponseStatus'
-> ModifyCacheSubnetGroupResponse
modifyCacheSubnetGroupResponse pResponseStatus_ =
ModifyCacheSubnetGroupResponse'
{ _mcsgrsCacheSubnetGroup = Nothing
, _mcsgrsResponseStatus = pResponseStatus_
}
-- | Undocumented member.
mcsgrsCacheSubnetGroup :: Lens' ModifyCacheSubnetGroupResponse (Maybe CacheSubnetGroup)
mcsgrsCacheSubnetGroup = lens _mcsgrsCacheSubnetGroup (\ s a -> s{_mcsgrsCacheSubnetGroup = a});
-- | The response status code.
mcsgrsResponseStatus :: Lens' ModifyCacheSubnetGroupResponse Int
mcsgrsResponseStatus = lens _mcsgrsResponseStatus (\ s a -> s{_mcsgrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-elasticache/gen/Network/AWS/ElastiCache/ModifyCacheSubnetGroup.hs | mpl-2.0 | 5,731 | 0 | 13 | 1,107 | 733 | 438 | 295 | 93 | 1 |
{-
Author : shelarcy 2004
Advised by: Sean Seefried
Adapted from: BezCurve.hs
By: (c) Sven Panne 2003 <[email protected]>
"BezCurve.hs (adapted from fog.c which is (c) Silicon Graphics, Inc)
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE"
This program renders a lighted, filled Bezier surface, using two-dimensional
evaluators.
-}
-- ghci -package wx -package OpenGL
module Main
where
import Data.List ( transpose )
import Graphics.UI.WX
import Graphics.UI.WXCore
import Graphics.Rendering.OpenGL
-- Many code and Type are ambiguous, so we must qualify names.
import qualified Graphics.UI.WX as WX
import qualified Graphics.Rendering.OpenGL as GL
main :: IO()
main = start gui
defaultWidth = 320
defaultHeight = 200
gui = do
f <- frame [ text := "Simple OpenGL" ]
glCanvas <- glCanvasCreateEx f 0 (Rect 0 0 defaultWidth defaultHeight) 0 "GLCanvas" [GL_RGBA] nullPalette
glContext <- glContextCreateFromNull glCanvas
glCanvasSetCurrent glCanvas glContext
let glWidgetLayout = fill $ widget glCanvas
WX.set f [ layout := glWidgetLayout
-- you have to use the paintRaw event. Otherwise the OpenGL window won't
-- show anything!
, on paintRaw := paintGL glCanvas
]
repaint f
convWG (WX.Size w h) = (GL.Size (convInt32 w) (convInt32 h))
convInt32 = fromInteger . toInteger
-- This paint function gets the current glCanvas for knowing where to draw in.
-- It is possible to have multiple GL windows in your application.
paintGL :: GLCanvas a -> DC b -> WX.Rect -> [WX.Rect]-> IO ()
paintGL glWindow dc myrect _ = do
myInit
reshape $ convWG $ rectSize myrect
display
glCanvasSwapBuffers glWindow
return ()
ctrlPoints :: [[GL.Vertex3 GL.GLfloat]]
ctrlPoints = [
[ GL.Vertex3 (-1.5) (-1.5) 4.0, GL.Vertex3 (-0.5) (-1.5) 2.0,
GL.Vertex3 0.5 (-1.5) (-1.0), GL.Vertex3 1.5 (-1.5) 2.0 ],
[ GL.Vertex3 (-1.5) (-0.5) 1.0, GL.Vertex3 (-0.5) (-0.5) 3.0,
GL.Vertex3 0.5 (-0.5) 0.0, GL.Vertex3 1.5 (-0.5) (-1.0) ],
[ GL.Vertex3 (-1.5) 0.5 4.0, GL.Vertex3 (-0.5) 0.5 0.0,
GL.Vertex3 0.5 0.5 3.0, GL.Vertex3 1.5 0.5 4.0 ],
[ GL.Vertex3 (-1.5) 1.5 (-2.0), GL.Vertex3 (-0.5) 1.5 (-2.0),
GL.Vertex3 0.5 1.5 0.0, GL.Vertex3 1.5 1.5 (-1.0) ]]
initlights :: IO ()
initlights = do
GL.lighting GL.$= GL.Enabled
GL.light (GL.Light 0) GL.$= GL.Enabled
GL.ambient (GL.Light 0) GL.$= GL.Color4 0.2 0.2 0.2 1.0
GL.position (GL.Light 0) GL.$= GL.Vertex4 0 0 2 1
GL.materialDiffuse GL.Front GL.$= GL.Color4 0.6 0.6 0.6 1.0
GL.materialSpecular GL.Front GL.$= GL.Color4 1.0 1.0 1.0 1.0
GL.materialShininess GL.Front GL.$= 50
myInit :: IO ()
myInit = do
GL.clearColor GL.$= GL.Color4 0.1 0.1 0.6 0
GL.depthFunc GL.$= Just GL.Less
m <- GL.newMap2 (0, 1) (0, 1) (transpose ctrlPoints)
GL.map2 GL.$= Just (m :: GLmap2 GL.Vertex3 GL.GLfloat)
GL.autoNormal GL.$= GL.Enabled
mapGrid2 GL.$= ((20, (0, 1)), (20, (0, 1 :: GL.GLfloat)))
initlights -- for lighted version only
display = do
GL.clear [ GL.ColorBuffer, GL.DepthBuffer ]
GL.preservingMatrix $ do
GL.rotate (85 :: GL.GLfloat) (GL.Vector3 1 1 1)
evalMesh2 Fill (0, 20) (0, 20)
GL.flush
reshape mysize@(GL.Size w h) = do
GL.viewport GL.$= (GL.Position 0 0, mysize)
GL.matrixMode GL.$= GL.Projection
GL.loadIdentity
let wf = fromIntegral w
hf = fromIntegral h
if w <= h
then GL.ortho (-4.0) 4.0 (-4.0*hf/wf) (4.0*hf/wf) (-4.0) 4.0
else GL.ortho (-4.0*wf/hf) (4.0*wf/hf) (-4.0) 4.0 (-4.0) 4.0
GL.matrixMode GL.$= GL.Modelview 0
GL.loadIdentity
| sherwoodwang/wxHaskell | samples/contrib/GLCanvas.hs | lgpl-2.1 | 3,862 | 0 | 13 | 964 | 1,353 | 692 | 661 | 74 | 2 |
module Propellor.Property.Cron where
import Propellor
import qualified Propellor.Property.File as File
import qualified Propellor.Property.Apt as Apt
import Utility.SafeCommand
import Utility.FileMode
import Data.Char
-- | When to run a cron job.
--
-- The Daily, Monthly, and Weekly options allow the cron job to be run
-- by anacron, which is useful for non-servers.
data Times
= Times String -- ^ formatted as in crontab(5)
| Daily
| Weekly
| Monthly
-- | Installs a cron job, that will run as a specified user in a particular
-- directory. Note that the Desc must be unique, as it is used for the
-- cron job filename.
--
-- Only one instance of the cron job is allowed to run at a time, no matter
-- how long it runs. This is accomplished using flock locking of the cron
-- job file.
--
-- The cron job's output will only be emailed if it exits nonzero.
job :: Desc -> Times -> UserName -> FilePath -> String -> Property NoInfo
job desc times user cddir command = combineProperties ("cronned " ++ desc)
[ cronjobfile `File.hasContent`
[ case times of
Times _ -> ""
_ -> "#!/bin/sh\nset -e"
, "# Generated by propellor"
, ""
, "SHELL=/bin/sh"
, "PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin"
, ""
, case times of
Times t -> t ++ "\t" ++ user ++ "\tchronic " ++ shellEscape scriptfile
_ -> case user of
"root" -> "chronic " ++ shellEscape scriptfile
_ -> "chronic su " ++ user ++ " -c " ++ shellEscape scriptfile
]
, case times of
Times _ -> doNothing
_ -> cronjobfile `File.mode` combineModes (readModes ++ executeModes)
-- Use a separate script because it makes the cron job name
-- prettier in emails, and also allows running the job manually.
, scriptfile `File.hasContent`
[ "#!/bin/sh"
, "# Generated by propellor"
, "set -e"
, "flock -n " ++ shellEscape cronjobfile
++ " sh -c " ++ shellEscape cmdline
]
, scriptfile `File.mode` combineModes (readModes ++ executeModes)
]
`requires` Apt.serviceInstalledRunning "cron"
`requires` Apt.installed ["util-linux", "moreutils"]
where
cmdline = "cd " ++ cddir ++ " && ( " ++ command ++ " )"
cronjobfile = "/etc" </> cronjobdir </> name
cronjobdir = case times of
Times _ -> "cron.d"
Daily -> "cron.daily"
Weekly -> "cron.weekly"
Monthly -> "cron.monthly"
scriptfile = "/usr/local/bin/" ++ name ++ "_cronjob"
name = map sanitize desc
sanitize c
| isAlphaNum c = c
| otherwise = '_'
-- | Installs a cron job, and runs it niced and ioniced.
niceJob :: Desc -> Times -> UserName -> FilePath -> String -> Property NoInfo
niceJob desc times user cddir command = job desc times user cddir
("nice ionice -c 3 sh -c " ++ shellEscape command)
-- | Installs a cron job to run propellor.
runPropellor :: Times -> Property NoInfo
runPropellor times = niceJob "propellor" times "root" localdir "./propellor"
| avengerpenguin/propellor | src/Propellor/Property/Cron.hs | bsd-2-clause | 2,862 | 20 | 19 | 567 | 629 | 342 | 287 | 57 | 8 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
@DsMonad@: monadery used in desugaring
-}
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-} -- instance MonadThings is necessarily an orphan
module DsMonad (
DsM, mapM, mapAndUnzipM,
initDs, initDsTc, initTcDsForSolver, fixDs,
foldlM, foldrM, whenGOptM, unsetGOptM, unsetWOptM,
Applicative(..),(<$>),
newLocalName,
duplicateLocalDs, newSysLocalDs, newSysLocalsDs, newUniqueId,
newFailLocalDs, newPredVarDs,
getSrcSpanDs, putSrcSpanDs,
mkPrintUnqualifiedDs,
newUnique,
UniqSupply, newUniqueSupply,
getGhcModeDs, dsGetFamInstEnvs, dsGetStaticBindsVar,
dsLookupGlobal, dsLookupGlobalId, dsDPHBuiltin, dsLookupTyCon, dsLookupDataCon,
PArrBuiltin(..),
dsLookupDPHRdrEnv, dsLookupDPHRdrEnv_maybe,
dsInitPArrBuiltin,
DsMetaEnv, DsMetaVal(..), dsGetMetaEnv, dsLookupMetaEnv, dsExtendMetaEnv,
-- Getting and setting EvVars and term constraints in local environment
getDictsDs, addDictsDs, getTmCsDs, addTmCsDs,
-- Warnings
DsWarning, warnDs, failWithDs, discardWarningsDs,
-- Data types
DsMatchContext(..),
EquationInfo(..), MatchResult(..), DsWrapper, idDsWrapper,
CanItFail(..), orFail
) where
import TcRnMonad
import FamInstEnv
import CoreSyn
import HsSyn
import TcIface
import LoadIface
import Finder
import PrelNames
import RnNames
import RdrName
import HscTypes
import Bag
import DataCon
import TyCon
import PmExpr
import Id
import Module
import Outputable
import SrcLoc
import Type
import UniqSupply
import Name
import NameEnv
import DynFlags
import ErrUtils
import FastString
import Maybes
import Var (EvVar)
import GHC.Fingerprint
import qualified GHC.LanguageExtensions as LangExt
import Data.IORef
import Control.Monad
{-
************************************************************************
* *
Data types for the desugarer
* *
************************************************************************
-}
data DsMatchContext
= DsMatchContext (HsMatchContext Name) SrcSpan
deriving ()
data EquationInfo
= EqnInfo { eqn_pats :: [Pat Id], -- The patterns for an eqn
eqn_rhs :: MatchResult } -- What to do after match
instance Outputable EquationInfo where
ppr (EqnInfo pats _) = ppr pats
type DsWrapper = CoreExpr -> CoreExpr
idDsWrapper :: DsWrapper
idDsWrapper e = e
-- The semantics of (match vs (EqnInfo wrap pats rhs)) is the MatchResult
-- \fail. wrap (case vs of { pats -> rhs fail })
-- where vs are not bound by wrap
-- A MatchResult is an expression with a hole in it
data MatchResult
= MatchResult
CanItFail -- Tells whether the failure expression is used
(CoreExpr -> DsM CoreExpr)
-- Takes a expression to plug in at the
-- failure point(s). The expression should
-- be duplicatable!
data CanItFail = CanFail | CantFail
orFail :: CanItFail -> CanItFail -> CanItFail
orFail CantFail CantFail = CantFail
orFail _ _ = CanFail
{-
************************************************************************
* *
Monad functions
* *
************************************************************************
-}
-- Compatibility functions
fixDs :: (a -> DsM a) -> DsM a
fixDs = fixM
type DsWarning = (SrcSpan, SDoc)
-- Not quite the same as a WarnMsg, we have an SDoc here
-- and we'll do the print_unqual stuff later on to turn it
-- into a Doc.
initDs :: HscEnv
-> Module -> GlobalRdrEnv -> TypeEnv -> FamInstEnv
-> DsM a
-> IO (Messages, Maybe a)
-- Print errors and warnings, if any arise
initDs hsc_env mod rdr_env type_env fam_inst_env thing_inside
= do { msg_var <- newIORef (emptyBag, emptyBag)
; static_binds_var <- newIORef []
; let dflags = hsc_dflags hsc_env
(ds_gbl_env, ds_lcl_env) = mkDsEnvs dflags mod rdr_env type_env
fam_inst_env msg_var
static_binds_var
; either_res <- initTcRnIf 'd' hsc_env ds_gbl_env ds_lcl_env $
loadDAP $
initDPHBuiltins $
tryM thing_inside -- Catch exceptions (= errors during desugaring)
-- Display any errors and warnings
-- Note: if -Werror is used, we don't signal an error here.
; msgs <- readIORef msg_var
; let final_res | errorsFound dflags msgs = Nothing
| otherwise = case either_res of
Right res -> Just res
Left exn -> pprPanic "initDs" (text (show exn))
-- The (Left exn) case happens when the thing_inside throws
-- a UserError exception. Then it should have put an error
-- message in msg_var, so we just discard the exception
; return (msgs, final_res)
}
where
-- Extend the global environment with a 'GlobalRdrEnv' containing the exported entities of
-- * 'Data.Array.Parallel' iff '-XParallelArrays' specified (see also 'checkLoadDAP').
-- * 'Data.Array.Parallel.Prim' iff '-fvectorise' specified.
loadDAP thing_inside
= do { dapEnv <- loadOneModule dATA_ARRAY_PARALLEL_NAME checkLoadDAP paErr
; dappEnv <- loadOneModule dATA_ARRAY_PARALLEL_PRIM_NAME (goptM Opt_Vectorise) veErr
; updGblEnv (\env -> env {ds_dph_env = dapEnv `plusOccEnv` dappEnv }) thing_inside
}
where
loadOneModule :: ModuleName -- the module to load
-> DsM Bool -- under which condition
-> MsgDoc -- error message if module not found
-> DsM GlobalRdrEnv -- empty if condition 'False'
loadOneModule modname check err
= do { doLoad <- check
; if not doLoad
then return emptyGlobalRdrEnv
else do {
; result <- liftIO $ findImportedModule hsc_env modname Nothing
; case result of
Found _ mod -> loadModule err mod
_ -> pprPgmError "Unable to use Data Parallel Haskell (DPH):" err
} }
paErr = text "To use ParallelArrays," <+> specBackend $$ hint1 $$ hint2
veErr = text "To use -fvectorise," <+> specBackend $$ hint1 $$ hint2
specBackend = text "you must specify a DPH backend package"
hint1 = text "Look for packages named 'dph-lifted-*' with 'ghc-pkg'"
hint2 = text "You may need to install them with 'cabal install dph-examples'"
initDPHBuiltins thing_inside
= do { -- If '-XParallelArrays' given, we populate the builtin table for desugaring those
; doInitBuiltins <- checkLoadDAP
; if doInitBuiltins
then dsInitPArrBuiltin thing_inside
else thing_inside
}
checkLoadDAP = do { paEnabled <- xoptM LangExt.ParallelArrays
; return $ paEnabled &&
mod /= gHC_PARR' &&
moduleName mod /= dATA_ARRAY_PARALLEL_NAME
}
-- do not load 'Data.Array.Parallel' iff compiling 'base:GHC.PArr' or a
-- module called 'dATA_ARRAY_PARALLEL_NAME'; see also the comments at the top
-- of 'base:GHC.PArr' and 'Data.Array.Parallel' in the DPH libraries
initDsTc :: DsM a -> TcM a
initDsTc thing_inside
= do { this_mod <- getModule
; tcg_env <- getGblEnv
; msg_var <- getErrsVar
; dflags <- getDynFlags
; static_binds_var <- liftIO $ newIORef []
; let type_env = tcg_type_env tcg_env
rdr_env = tcg_rdr_env tcg_env
fam_inst_env = tcg_fam_inst_env tcg_env
ds_envs = mkDsEnvs dflags this_mod rdr_env type_env fam_inst_env
msg_var static_binds_var
; setEnvs ds_envs thing_inside
}
initTcDsForSolver :: TcM a -> DsM (Messages, Maybe a)
-- Spin up a TcM context so that we can run the constraint solver
-- Returns any error messages generated by the constraint solver
-- and (Just res) if no error happened; Nothing if an errror happened
--
-- Simon says: I'm not very happy about this. We spin up a complete TcM monad
-- only to immediately refine it to a TcS monad.
-- Better perhaps to make TcS into its own monad, rather than building on TcS
-- But that may in turn interact with plugins
initTcDsForSolver thing_inside
= do { (gbl, lcl) <- getEnvs
; hsc_env <- getTopEnv
; let DsGblEnv { ds_mod = mod
, ds_fam_inst_env = fam_inst_env } = gbl
DsLclEnv { dsl_loc = loc } = lcl
; liftIO $ initTc hsc_env HsSrcFile False mod loc $
updGblEnv (\tc_gbl -> tc_gbl { tcg_fam_inst_env = fam_inst_env }) $
thing_inside }
mkDsEnvs :: DynFlags -> Module -> GlobalRdrEnv -> TypeEnv -> FamInstEnv
-> IORef Messages -> IORef [(Fingerprint, (Id, CoreExpr))]
-> (DsGblEnv, DsLclEnv)
mkDsEnvs dflags mod rdr_env type_env fam_inst_env msg_var static_binds_var
= let if_genv = IfGblEnv { if_rec_types = Just (mod, return type_env) }
if_lenv = mkIfLclEnv mod (text "GHC error in desugarer lookup in" <+> ppr mod)
real_span = realSrcLocSpan (mkRealSrcLoc (moduleNameFS (moduleName mod)) 1 1)
gbl_env = DsGblEnv { ds_mod = mod
, ds_fam_inst_env = fam_inst_env
, ds_if_env = (if_genv, if_lenv)
, ds_unqual = mkPrintUnqualified dflags rdr_env
, ds_msgs = msg_var
, ds_dph_env = emptyGlobalRdrEnv
, ds_parr_bi = panic "DsMonad: uninitialised ds_parr_bi"
, ds_static_binds = static_binds_var
}
lcl_env = DsLclEnv { dsl_meta = emptyNameEnv
, dsl_loc = real_span
, dsl_dicts = emptyBag
, dsl_tm_cs = emptyBag
}
in (gbl_env, lcl_env)
-- Attempt to load the given module and return its exported entities if successful.
--
loadModule :: SDoc -> Module -> DsM GlobalRdrEnv
loadModule doc mod
= do { env <- getGblEnv
; setEnvs (ds_if_env env) $ do
{ iface <- loadInterface doc mod ImportBySystem
; case iface of
Failed err -> pprPanic "DsMonad.loadModule: failed to load" (err $$ doc)
Succeeded iface -> return $ mkGlobalRdrEnv . gresFromAvails prov . mi_exports $ iface
} }
where
prov = Just (ImpSpec { is_decl = imp_spec, is_item = ImpAll })
imp_spec = ImpDeclSpec { is_mod = name, is_qual = True,
is_dloc = wiredInSrcSpan, is_as = name }
name = moduleName mod
{-
************************************************************************
* *
Operations in the monad
* *
************************************************************************
And all this mysterious stuff is so we can occasionally reach out and
grab one or more names. @newLocalDs@ isn't exported---exported
functions are defined with it. The difference in name-strings makes
it easier to read debugging output.
-}
-- Make a new Id with the same print name, but different type, and new unique
newUniqueId :: Id -> Type -> DsM Id
newUniqueId id = mkSysLocalOrCoVarM (occNameFS (nameOccName (idName id)))
duplicateLocalDs :: Id -> DsM Id
duplicateLocalDs old_local
= do { uniq <- newUnique
; return (setIdUnique old_local uniq) }
newPredVarDs :: PredType -> DsM Var
newPredVarDs pred
= newSysLocalDs pred
newSysLocalDs, newFailLocalDs :: Type -> DsM Id
newSysLocalDs = mkSysLocalOrCoVarM (fsLit "ds")
newFailLocalDs = mkSysLocalOrCoVarM (fsLit "fail")
newSysLocalsDs :: [Type] -> DsM [Id]
newSysLocalsDs tys = mapM newSysLocalDs tys
{-
We can also reach out and either set/grab location information from
the @SrcSpan@ being carried around.
-}
getGhcModeDs :: DsM GhcMode
getGhcModeDs = getDynFlags >>= return . ghcMode
-- | Get in-scope type constraints (pm check)
getDictsDs :: DsM (Bag EvVar)
getDictsDs = do { env <- getLclEnv; return (dsl_dicts env) }
-- | Add in-scope type constraints (pm check)
addDictsDs :: Bag EvVar -> DsM a -> DsM a
addDictsDs ev_vars
= updLclEnv (\env -> env { dsl_dicts = unionBags ev_vars (dsl_dicts env) })
-- | Get in-scope term constraints (pm check)
getTmCsDs :: DsM (Bag SimpleEq)
getTmCsDs = do { env <- getLclEnv; return (dsl_tm_cs env) }
-- | Add in-scope term constraints (pm check)
addTmCsDs :: Bag SimpleEq -> DsM a -> DsM a
addTmCsDs tm_cs
= updLclEnv (\env -> env { dsl_tm_cs = unionBags tm_cs (dsl_tm_cs env) })
getSrcSpanDs :: DsM SrcSpan
getSrcSpanDs = do { env <- getLclEnv
; return (RealSrcSpan (dsl_loc env)) }
putSrcSpanDs :: SrcSpan -> DsM a -> DsM a
putSrcSpanDs (UnhelpfulSpan {}) thing_inside
= thing_inside
putSrcSpanDs (RealSrcSpan real_span) thing_inside
= updLclEnv (\ env -> env {dsl_loc = real_span}) thing_inside
warnDs :: SDoc -> DsM ()
warnDs warn = do { env <- getGblEnv
; loc <- getSrcSpanDs
; dflags <- getDynFlags
; let msg = mkWarnMsg dflags loc (ds_unqual env) warn
; updMutVar (ds_msgs env) (\ (w,e) -> (w `snocBag` msg, e)) }
failWithDs :: SDoc -> DsM a
failWithDs err
= do { env <- getGblEnv
; loc <- getSrcSpanDs
; dflags <- getDynFlags
; let msg = mkErrMsg dflags loc (ds_unqual env) err
; updMutVar (ds_msgs env) (\ (w,e) -> (w, e `snocBag` msg))
; failM }
mkPrintUnqualifiedDs :: DsM PrintUnqualified
mkPrintUnqualifiedDs = ds_unqual <$> getGblEnv
instance MonadThings (IOEnv (Env DsGblEnv DsLclEnv)) where
lookupThing = dsLookupGlobal
dsLookupGlobal :: Name -> DsM TyThing
-- Very like TcEnv.tcLookupGlobal
dsLookupGlobal name
= do { env <- getGblEnv
; setEnvs (ds_if_env env)
(tcIfaceGlobal name) }
dsLookupGlobalId :: Name -> DsM Id
dsLookupGlobalId name
= tyThingId <$> dsLookupGlobal name
-- |Get a name from "Data.Array.Parallel" for the desugarer, from the 'ds_parr_bi' component of the
-- global desugerar environment.
--
dsDPHBuiltin :: (PArrBuiltin -> a) -> DsM a
dsDPHBuiltin sel = (sel . ds_parr_bi) <$> getGblEnv
dsLookupTyCon :: Name -> DsM TyCon
dsLookupTyCon name
= tyThingTyCon <$> dsLookupGlobal name
dsLookupDataCon :: Name -> DsM DataCon
dsLookupDataCon name
= tyThingDataCon <$> dsLookupGlobal name
-- |Lookup a name exported by 'Data.Array.Parallel.Prim' or 'Data.Array.Parallel.Prim'.
-- Panic if there isn't one, or if it is defined multiple times.
dsLookupDPHRdrEnv :: OccName -> DsM Name
dsLookupDPHRdrEnv occ
= liftM (fromMaybe (pprPanic nameNotFound (ppr occ)))
$ dsLookupDPHRdrEnv_maybe occ
where nameNotFound = "Name not found in 'Data.Array.Parallel' or 'Data.Array.Parallel.Prim':"
-- |Lookup a name exported by 'Data.Array.Parallel.Prim' or 'Data.Array.Parallel.Prim',
-- returning `Nothing` if it's not defined. Panic if it's defined multiple times.
dsLookupDPHRdrEnv_maybe :: OccName -> DsM (Maybe Name)
dsLookupDPHRdrEnv_maybe occ
= do { env <- ds_dph_env <$> getGblEnv
; let gres = lookupGlobalRdrEnv env occ
; case gres of
[] -> return $ Nothing
[gre] -> return $ Just $ gre_name gre
_ -> pprPanic multipleNames (ppr occ)
}
where multipleNames = "Multiple definitions in 'Data.Array.Parallel' and 'Data.Array.Parallel.Prim':"
-- Populate 'ds_parr_bi' from 'ds_dph_env'.
--
dsInitPArrBuiltin :: DsM a -> DsM a
dsInitPArrBuiltin thing_inside
= do { lengthPVar <- externalVar (fsLit "lengthP")
; replicatePVar <- externalVar (fsLit "replicateP")
; singletonPVar <- externalVar (fsLit "singletonP")
; mapPVar <- externalVar (fsLit "mapP")
; filterPVar <- externalVar (fsLit "filterP")
; zipPVar <- externalVar (fsLit "zipP")
; crossMapPVar <- externalVar (fsLit "crossMapP")
; indexPVar <- externalVar (fsLit "!:")
; emptyPVar <- externalVar (fsLit "emptyP")
; appPVar <- externalVar (fsLit "+:+")
-- ; enumFromToPVar <- externalVar (fsLit "enumFromToP")
-- ; enumFromThenToPVar <- externalVar (fsLit "enumFromThenToP")
; enumFromToPVar <- return arithErr
; enumFromThenToPVar <- return arithErr
; updGblEnv (\env -> env {ds_parr_bi = PArrBuiltin
{ lengthPVar = lengthPVar
, replicatePVar = replicatePVar
, singletonPVar = singletonPVar
, mapPVar = mapPVar
, filterPVar = filterPVar
, zipPVar = zipPVar
, crossMapPVar = crossMapPVar
, indexPVar = indexPVar
, emptyPVar = emptyPVar
, appPVar = appPVar
, enumFromToPVar = enumFromToPVar
, enumFromThenToPVar = enumFromThenToPVar
} })
thing_inside
}
where
externalVar :: FastString -> DsM Var
externalVar fs = dsLookupDPHRdrEnv (mkVarOccFS fs) >>= dsLookupGlobalId
arithErr = panic "Arithmetic sequences have to wait until we support type classes"
dsGetFamInstEnvs :: DsM FamInstEnvs
-- Gets both the external-package inst-env
-- and the home-pkg inst env (includes module being compiled)
dsGetFamInstEnvs
= do { eps <- getEps; env <- getGblEnv
; return (eps_fam_inst_env eps, ds_fam_inst_env env) }
dsGetMetaEnv :: DsM (NameEnv DsMetaVal)
dsGetMetaEnv = do { env <- getLclEnv; return (dsl_meta env) }
dsLookupMetaEnv :: Name -> DsM (Maybe DsMetaVal)
dsLookupMetaEnv name = do { env <- getLclEnv; return (lookupNameEnv (dsl_meta env) name) }
dsExtendMetaEnv :: DsMetaEnv -> DsM a -> DsM a
dsExtendMetaEnv menv thing_inside
= updLclEnv (\env -> env { dsl_meta = dsl_meta env `plusNameEnv` menv }) thing_inside
-- | Gets a reference to the SPT entries created so far.
dsGetStaticBindsVar :: DsM (IORef [(Fingerprint, (Id,CoreExpr))])
dsGetStaticBindsVar = fmap ds_static_binds getGblEnv
discardWarningsDs :: DsM a -> DsM a
-- Ignore warnings inside the thing inside;
-- used to ignore inaccessable cases etc. inside generated code
discardWarningsDs thing_inside
= do { env <- getGblEnv
; old_msgs <- readTcRef (ds_msgs env)
; result <- thing_inside
-- Revert messages to old_msgs
; writeTcRef (ds_msgs env) old_msgs
; return result }
| nushio3/ghc | compiler/deSugar/DsMonad.hs | bsd-3-clause | 20,093 | 2 | 18 | 6,416 | 3,759 | 2,027 | 1,732 | 320 | 5 |
{-
Teak synthesiser for the Balsa language
Copyright (C) 2007-2010 The University of Manchester
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Andrew Bardsley <[email protected]> (and others, see AUTHORS)
School of Computer Science, The University of Manchester
Oxford Road, MANCHESTER, M13 9PL, UK
-}
module Gen (
genPartToGateNetlist,
GenPartToGateOption (..),
mappingNetlistToTechMapping,
-- mappingNetlistToCosts,
genMakeComp,
genTechMap,
fullBundles2,
genMakeGatesFile,
TechMapping,
GateCosts
) where
import Misc
import NetParts
import ParseTree
import Data.List
import qualified Data.Map as DM
import Gates
import Type
import Data.Bits
import Bits
import Data.Maybe
import Control.Monad
import Data.Char
import Numeric
import System.IO
import System.Time
data RelDir = Forward | Reverse
deriving (Show, Read, Eq)
invertDir :: Direction -> Direction
invertDir Input = Output
invertDir Output = Input
data Portion = R0 | R1 | A | N | R | G
deriving (Show, Read)
bundle :: String -> Int -> Portion -> String
bundle name index A = name ++ "_" ++ show index ++ "a"
bundle name index R0 = name ++ "_" ++ show index ++ "r0"
bundle name index R1 = name ++ "_" ++ show index ++ "r1"
bundle name index R = name ++ "_" ++ show index ++ "r"
bundle name index N = name ++ "_" ++ show index
bundle name _ G = name
fullBundles :: String -> Int -> Int -> [(String, Int, RelDir)]
fullBundles name width index
| width == 0 = [(portion R, 1, Forward), (portion A, 1, Reverse)]
| otherwise = [(portion R0, width, Forward), (portion R1, width, Forward), (portion A, 1, Reverse)]
where portion = bundle name index
fullBundles2 :: String -> Int -> Int -> [(String, Int, Bool)] -----Ture=Forward False = Reverse
fullBundles2 name width index
| width == 0 = [(portion R, 1, True), (portion A, 1, False)]
| otherwise = [(portion R0, width, True), (portion R1, width, True), (portion A, 1, False)]
where portion = bundle name index
relDirXDirToPortDir :: RelDir -> Direction -> Direction
relDirXDirToPortDir Forward = id
relDirXDirToPortDir Reverse = invertDir
mkPorts :: String -> Direction -> Int -> [Int] -> [GatePort]
mkPorts name dir count widths = map mkPort $ concatMap bundle $ zip widths [0..count - 1]
where
mkPort (name, width, relDir) = GatePort name (relDirXDirToPortDir relDir dir) width
bundle (width, i) = fullBundles name width i
nets :: String -> Int -> Int -> [GateElem]
nets _ _ 0 = []
nets name count width = map (\i -> GateNet (bundle name i N) width) [0..count - 1]
netsW :: String -> Int -> [Int] -> [GateElem]
netsW name count widths = map (\(i, width) -> GateNet (bundle name i N) width) $ zip [0..count - 1] widths
netsIW :: String -> [Int] -> [Int] -> [GateElem]
netsIW name indices widths = map (\(i, width) -> GateNet (bundle name i N) width) $ zip indices widths
conn :: Portion -> String -> Int -> Slice Int -> [GateConn]
conn portion name index slice = [GateConn (bundle name index portion) slice]
connSlice :: Slice Int -> [GateConn] -> [GateConn]
connSlice slice conns
| retLength == 0 = error $ "connSlice: no slices: " ++ show conns ++ " " ++ verilogShowSlice slice ""
| otherwise = ret
where
retLength = length ret
ret = body 0 (sliceWidth slice) conns
topBit = sliceHigh slice
offset = sliceOffset slice
body _ 0 [] = []
body _ _ [] = error $ "connSlice: not enough bits: " ++ show conns ++ " " ++ verilogShowSlice slice ""
body thisOffset remainingWidth ((GateConn name connSlice):cs)
| nextOffset <= offset = next remainingWidth cs
| beyondSlice && remainingWidth /= 0 = error "connSlice: not enough bits"
| beyondSlice = []
| otherwise = (GateConn name ((connOffset + newConnShift) +:
(min remainingWidth (connWidth - newConnShift)))) : next (remainingWidth - newConnWidth) cs
where
connWidth = sliceWidth connSlice
connOffset = sliceOffset connSlice
beyondSlice = thisOffset > topBit
newConnShift = max 0 (offset - thisOffset)
newConnWidth = min remainingWidth (connWidth - newConnShift)
nextOffset = thisOffset + connWidth
next = body nextOffset
connWidth :: [GateConn] -> Int
connWidth conns = sum $ map gateConnWidth conns
smash :: [GateConn] -> [GateConn]
smash conns = concatMap smashConn conns
where smashConn (GateConn name slice) = map makeGate $ sliceIndices slice
where makeGate i = GateConn name (i +: 1)
smashSplit :: [GateConn] -> [[GateConn]]
smashSplit = map (:[]) . smash
gateConnWidth :: GateConn -> Int
gateConnWidth (GateConn _ slice) = sliceWidth slice
gate :: String -> [[GateConn]] -> [GateElem]
gate _ [] = []
gate gateName connss
| length widths > 0 && any (/= width) widths = error $ "bad widths in `" ++ gateName ++ "': " ++ show connss
| otherwise = map makeGate [0..width-1]
where
makeGate i = GateInstance gateName (map (connSlice (i +: 1)) connss)
widths = map connWidth connss
width = head widths
singleGate :: String -> [[GateConn]] -> [GateElem]
singleGate gateName connss = [GateInstance gateName connss]
singleGateParam :: String -> [GateParam] -> [[GateConn]] -> [GateElem]
singleGateParam gateName params connss = [GateInstanceParam gateName params connss]
gateSome :: String -> [Some [GateConn]] -> [GateElem]
gateSome gateName connss = gate gateName $ flattenSome $ Some connss
each :: Int -> Portion -> String -> Slice Int -> [[GateConn]]
each count portion name slice = map
(\index -> conn portion name index slice) [0..count-1]
eachW :: Int -> Portion -> String -> Int -> [Int] -> [[GateConn]]
eachW count portion name offset widths = map
(\(index, width) -> conn portion name index (offset +: width)) $ zip [0..count-1] widths
eachIW :: Portion -> String -> Int -> [Int] -> [Int] -> [[GateConn]]
eachIW portion name offset indices widths = map
(\(index, width) -> conn portion name index (offset +: width)) $ zip indices widths
dupEach :: Int -> [[GateConn]] -> [[GateConn]]
dupEach w connss = map (concat . replicate w) $ connss
-- dup w connss = map (replicate w . concat) $ connss
dupEachW :: [Int] -> [[GateConn]] -> [[GateConn]]
dupEachW ws connss = map (\(w, cons) -> concat (replicate w cons)) $ zip ws connss
makeGateTree :: String -> String -> Int -> Bool -> [GateConn] -> [GateConn] -> [GateElem]
makeGateTree prefix gateType maxFanIn useInvGates output input =
gateTree (1 :: Int) invOutputIfLast firstStageGate (smashSplit input)
where
fullGateInverts = gateInverts gateType
invOutputIfLast = if useInvGates then not fullGateInverts else fullGateInverts
invGateType = if fullGateInverts then gateType else invertGateOutput gateType
firstStageGate = if useInvGates then invGateType else invertGateOutput invGateType
gateTree _ _ _ [] = error "makeGateTree: zero inputs"
gateTree level invOutputIfLast stageGate inputs
| inputCount <= maxFanIn = gateSome lastGate [One output, Many inputs]
-- | stageOutputWidth /= length groupedInputs = error "!!!!"
| otherwise = stageNets ++ stageGates ++ rest
where
inputCount = length inputs
lastGate = gateMap inputCount $ if invOutputIfLast
then invertGateOutput stageGate
else stageGate
stageOutputName = prefix ++ show level
stageOutputWidth = (inputCount + maxFanIn - 1) `div` maxFanIn
stageNets = nets stageOutputName 1 stageOutputWidth
stageOutputConn = conn N stageOutputName 0 (0 +: stageOutputWidth)
stageOutputs = smashSplit stageOutputConn
groupedInputs = mapN maxFanIn id inputs
stageGates = map makeGate $ zip stageOutputs groupedInputs
makeGate (output, inputs) = head $ gateSome mappedGateName [One output, Many inputs]
where mappedGateName = gateMap (length inputs) stageGate
rest = if useInvGates
then gateTree (level + 1) (not invOutputIfLast) (deMorgansOpposite stageGate) stageOutputs
else gateTree (level + 1) invOutputIfLast stageGate stageOutputs
deMorgansOpposite :: String -> String
deMorgansOpposite gateType = case gateType of
"and" -> "or"
"buff" -> "buff"
"c" -> "c"
"inv" -> "inv"
"nand" -> "nor"
"nc" -> "nc"
"nor" -> "nand"
"or" -> "and"
_ -> error $ "deMorgansOpposite: unrecognised gate `" ++ gateType ++ "'"
invertGateOutput :: String -> String
invertGateOutput gateType = case gateType of
"and" -> "nand"
"buff" -> "inv"
"c" -> "nc"
"inv" -> "buff"
"nand" -> "and"
"nc" -> "c"
"nor" -> "or"
"or" -> "nor"
_ -> error $ "invertGateOutput: unrecognised gate `" ++ gateType ++ "'"
gateInverts :: String -> Bool
gateInverts gateType = case gateType of
"and" -> False
"buff" -> False
"c" -> False
"inv" -> True
"nand" -> True
"nc" -> True
"nor" -> True
"or" -> False
_ -> error $ "gateInverts: unrecognised gate `" ++ gateType ++ "'"
gateMap :: Int -> String -> String
gateMap 1 gate = if gateInverts gate then "inv" else "buff"
gateMap width gate = gate ++ show width
drCompletion :: String -> [GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
drCompletion tempName connF connT connResult = concat [
nets tempName 1 width,
gate "or" [temp, connF, connT],
gateSome "c" [One connResult, Many (smashSplit temp)] ]
where
width = connWidth connF
temp = conn N tempName 0 (0 +: width)
drCompletions :: String -> [[GateConn]] -> [[GateConn]] -> [[GateConn]] -> [GateElem]
drCompletions tempName d0s d1s outputs = concatMap makeCompletion $ zip4 ([0..] :: [Int]) d0s d1s outputs
where makeCompletion (i, d0, d1, output) = drCompletion (tempName ++ show i) d0 d1 output
reset :: [GateConn]
reset = conn G "reset" 0 (0 +: 1)
pipeLatch :: String -> [GateConn] -> [GateConn] -> [GateConn] ->
[GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
pipeLatch tempName inpF inpT inpA outF outT outA = concat [
nets nackName 1 1,
gate "c2r1" [outF, inpF, dupWidth nack, dupWidth reset],
gate "c2r1" [outT, inpT, dupWidth nack, dupWidth reset],
gate "inv" [nack, outA],
drCompletion (tempName ++ "comp") outF outT inpA ]
where
nackName = tempName ++ "na"
width = connWidth inpF
nack = conn N nackName 0 (0 +: 1)
dupWidth conn = concat (dupEach width [conn])
pipeLatch0 :: String -> [GateConn] -> [GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
pipeLatch0 tempName inpR inpA outR outA = concat [
nets tempName 1 1,
gate "c2r1" [outR, inpR, temp, reset],
gate "inv" [temp, outA],
gateSome "connect" [One outR, Many [inpA]] ]
where temp = conn N tempName 0 (0 +: 1)
pipeLatchN :: String -> Int -> [GateConn] -> [GateConn] -> [GateConn] ->
[GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
pipeLatchN _ 0 inpF inpT inpA outF outT outA = concat [
gateSome "connect" [One inpF, Many [outF]],
gateSome "connect" [One inpT, Many [outT]],
gateSome "connect" [One outA, Many [inpA]] ]
pipeLatchN tempName 1 inpF inpT inpA outF outT outA = pipeLatch tempName inpF inpT inpA outF outT outA
pipeLatchN tempName depth inpF inpT inpA outF outT outA = concat [
nets tempFName 1 width,
nets tempTName 1 width,
nets tempAName 1 1,
pipeLatch (tempName ++ "b") inpF inpT inpA tempF tempT tempA,
pipeLatchN (tempName ++ "o") (depth - 1) tempF tempT tempA outF outT outA ]
where
width = connWidth inpF
tempFName = tempName ++ "f"
tempTName = tempName ++ "t"
tempAName = tempName ++ "a"
tempF = conn N tempFName 0 (0 +: width)
tempT = conn N tempTName 0 (0 +: width)
tempA = conn N tempAName 0 (0 +: 1)
pipeLatch0N :: String -> Int -> [GateConn] -> [GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
pipeLatch0N _ 0 inpR inpA outR outA = concat [
gateSome "connect" [One inpR, Many [outR]],
gateSome "connect" [One outA, Many [inpA]] ]
pipeLatch0N tempName 1 inpR inpA outR outA = pipeLatch0 tempName inpR inpA outR outA
pipeLatch0N tempName depth inpR inpA outR outA = concat [
nets tempRName 1 1,
nets tempAName 1 1,
pipeLatch0 (tempName ++ "b") inpR inpA tempR tempA,
pipeLatch0N (tempName ++ "o") (depth - 1) tempR tempA outR outA ]
where
tempRName = tempName ++ "f"
tempAName = tempName ++ "a"
tempR = conn N tempRName 0 (0 +: 1)
tempA = conn N tempAName 0 (0 +: 1)
bundlesAtIndicesW :: [Int] -> Portion -> String -> Int -> [Int] -> [[GateConn]]
bundlesAtIndicesW indices portion name offset widths = map
(\index -> conn portion name index (offset +: (widths !! index))) indices
bundlesAtIndices :: [Int] -> Portion -> String -> Slice Int -> [[GateConn]]
bundlesAtIndices indices portion name slice = map
(\index -> conn portion name index slice) indices
connectBundleSlices :: [Slice Int] -> [GateConn] -> [GateConn] -> Portion -> String -> [GateElem]
connectBundleSlices slices complete input outputPortion outputName = elems
where
elems = concatMap makeBundle $ zip [0..] slices
inpWidth = connWidth input
makeBundle (index, slice)
| isEmptySlice slice = []
| sliceWidth slice == inpWidth = gateSome "connect" [One (connSlice slice input),
Many [conn outputPortion outputName index (0 +: sliceWidth slice)]]
| otherwise = concat [
gate "c" [conn outputPortion outputName index (0 +: 1),
connSlice (sliceOffset slice +: 1) input, complete],
if sliceWidth slice > 1
then gateSome "connect"
[One (connSlice ((sliceOffset slice + 1) +: (sliceWidth slice - 1)) input),
Many [conn outputPortion outputName index (1 +: (sliceWidth slice - 1))]]
else []
]
drLatch :: String -> [GateConn] -> [GateConn] -> [GateConn] ->
[GateConn] -> [GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
drLatch tempName inpF inpT en inpA outF outT reset = concat [
nets gfName 1 width,
nets gtName 1 width,
gate "and" [gf, inpF, en],
gate "and" [gt, inpT, en],
gate "nor" [outF, outT, gt],
gate "nor" [outT, outF, gf, concat (dupEach width [reset])],
gate "ao22" [inpA, gf, outF, gt, outT] ]
where
gfName = tempName ++ "gf"
gtName = tempName ++ "gt"
gf = conn N gfName 0 (0 +: width)
gt = conn N gtName 0 (0 +: width)
width = connWidth inpF
-- findInputBits : from write offsets and widths give a list of input bits which form part of this
-- output. Returns (inpIndex, inpBit) pairs
findInputBits :: [Slice Int] -> Int -> [(Int, Int)]
findInputBits slices index = concatMap isAtThisBit $ zip [0..] slices
where
isAtThisBit (inpNo, slice)
| sliceInRange slice index = [(inpNo, sliceIndex slice index)]
| otherwise = []
connsAtBit :: [[GateConn]] -> [(Int, Int)] -> [[GateConn]]
connsAtBit inps bitSelection = map makeSlice bitSelection
where makeSlice (inpNo, inpBit) = connSlice (inpBit +: 1) (inps !! inpNo)
connectWrites :: String -> [Int] -> [[GateConn]] -> [[GateConn]] ->
[GateConn] -> [GateConn] -> [GateConn] -> [[GateConn]] -> [[GateConn]] ->
[GateConn] -> [GateConn] -> [GateElem]
connectWrites tempName writeOffsets wgFs wgTs wF wT anyRead writeAcks inpCompletes bitEns bitAcks = concat [
nets igcName count 1,
nets igcanwName 1 1,
netsW gitName count widths,
netsW gifName count widths,
nets igName count 1,
gate "and" [concat gif, concat wgFs,
concat (dupEachW widths (each count N igName (0 +: 1)))],
gate "and" [concat git, concat wgTs,
concat (dupEachW widths (each count N igName (0 +: 1)))],
gateSome "connect" [One (concat inpCompletes), Many [concat (each count N igcName (0 +: 1))]],
gate "c1u1" [concat (each count N igName (0 +: 1)),
concat (each count N igcName (0 +: 1)),
concat (dupEach count [conn N igcanwName 0 (0 +: 1)])],
gateSome "nor" [One (conn N igcanwName 0 (0 +: 1)), Many (anyRead : each count N igName (0 +: 1))],
concatMap muxForBit [0..width - 1],
concatMap completeInput $ zip4 writeOffsets widths writeAcks (each count N igName (0 +: 1))
]
where
gif = eachW count N gifName 0 widths
git = eachW count N gitName 0 widths
-- allInpCompletes = concat inpCompletes
muxForBit i = concat [
gateSome "or" [One (connSlice (i +: 1) wF), Many (connsAtBit gif inputBits)],
gateSome "or" [One (connSlice (i +: 1) wT), Many (connsAtBit git inputBits)],
-- or together inpCompletes for each bit to form enable to latches
gateSome "or" [One (connSlice (i +: 1) bitEns), Many (map ((inpCompletes !!) . fst) inputBits)]
]
where inputBits = findInputBits (zipWith (+:) writeOffsets widths) i
completeInput (offset, width, inpAck, ig) = gateSome "c" [One inpAck, One ig, Many
(map (\i -> connSlice (i +: 1) bitAcks) [offset..offset + width - 1])]
width = connWidth wF
widths = map connWidth wgFs
igcName = tempName ++ "igc"
igcanwName = tempName ++ "igcanw"
gitName = tempName ++ "git"
gifName = tempName ++ "gif"
igName = tempName ++ "ig"
count = length writeOffsets
-- FIXME
handleBuiltinWrites :: String -> [Int] -> [GateConn] -> [GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
handleBuiltinWrites name offsets bitEns wF wT dT = concatMap makeReref offsets
where
makeReref offset = singleGateParam "tkr_builtin_var_write_reref"
[GateParamString name, GateParamInt width, GateParamInt offset]
[bitEns, dT, wF, wT]
width = connWidth dT
handleBuiltinReads :: String -> [Int] -> [Int] -> [[GateConn]] -> [GateConn] -> [GateElem]
handleBuiltinReads name builtinOffsets readOffsets readGos dT = concatMap makeReref $ zip readOffsets readGos
where
makeReref (offset, readGo)
| offset `elem` builtinOffsets = singleGateParam "tkr_builtin_var_read_reref"
[GateParamString name, GateParamInt width, GateParamInt offset]
[dT, readGo]
| otherwise = []
width = connWidth dT
connectReads :: String -> [Slice Int] -> [GateConn] -> [[GateConn]] -> [[GateConn]] -> [GateElem]
connectReads andGate slices inp gos outs = concatMap makeOutputGates $
zip4 ([0..] :: [Int]) slices gos outs
where
makeOutputGates (_, slice, go, out)
| not (isEmptySlice slice) =
gate andGate [out, connSlice slice inp, concat (dupEach (sliceWidth slice) [go])]
| otherwise = []
steerMatches :: String -> Slice Int -> [[Implicant]] -> [GateConn] -> [GateConn] -> [[GateConn]] -> [GateElem]
steerMatches tempName slice impss inpF inpT sels = concatMap steerMatch $ zip3 ([0..] :: [Int]) impss sels
where
steerMatch (i, imps, sel) = concat [
nets orImpName 1 impCount,
gateSome "or" [One sel, Many (smashSplit (conn N orImpName 0 (0 +: impCount)))],
concatMap (\(i, imp) -> gateSome "c" [One (conn N orImpName 0 (i +: 1)), Many
(implicantMatchConns imp)]) $ zip [0..] imps
]
where
orImpName = tempName ++ show i
impCount = length imps
-- width = connWidth inpF
implicantMatchConns (Imp value dcs) = mapMaybe bitMatch [0..sliceWidth slice - 1]
where
bitMatch i
| not (testBit dcs i) = Just $ connSlice ((sliceOffset slice + i) +: 1)
(if testBit value i then inpT else inpF)
| otherwise = Nothing
oTermNeedsGo :: TeakOTerm -> Bool
oTermNeedsGo (TeakOConstant {}) = True
oTermNeedsGo (TeakOBuiltin {}) = True
oTermNeedsGo _ = False
minTerms2 :: String -> [GateConn] -> [GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
minTerms2 termName lF lT hF hT = concat [
nets termName 1 4,
gate "c" [conn N termName 0 (0 +: 4), concat [hF, hF, hT, hT], concat [lF, lT, lF, lT]] ]
type DRBin = String -> [GateConn] -> [GateConn] -> [GateConn] ->
[GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
drBin :: [Int] -> [Int] -> DRBin
drBin minTermsF minTermsT outName outF outT aF aT bF bT = concat [
minTerms2 outName aF aT bF bT,
gateSome "or" [One outF, Many (map minTerm minTermsF)],
gateSome "or" [One outT, Many (map minTerm minTermsT)] ]
where minTerm i = conn N outName 0 (i +: 1)
drOr :: DRBin
drOr = drBin [0] [1,2,3]
drAnd :: DRBin
drAnd = drBin [0,1,2] [3]
drXor :: DRBin
drXor = drBin [0,3] [1,2]
{-
drNor :: DRBin
drNor = drBin [1,2,3] [0]
drNand :: DRBin
drNand = drBin [3] [0,1,2]
-}
drXnor :: DRBin
drXnor = drBin [1,2] [0,3]
drTree :: DRBin -> String -> [GateConn] -> [GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
drTree moduleFunc prefix outf outt inpf inpt = binModuleTree moduleFunc' prefix [outf,outt] [inpf,inpt]
where
moduleFunc' p [outf,outt] [lf,lt] [hf,ht] = moduleFunc p outf outt lf lt hf ht
moduleFunc' _ _ _ _ = error "moduleFunc': bad args"
-- binModuleTree : make a tree pair combining nth elements of `inputs' into nth elements of `outputs'
-- using modules created by applying `moduleFunc'
binModuleTree :: (String -> [[GateConn]] -> [[GateConn]] -> [[GateConn]] -> [GateElem]) ->
String -> [[GateConn]] -> [[GateConn]] -> [GateElem]
binModuleTree moduleFunc prefix outputs inputs
| inputWidth == 0 = error "binModuleTree: inputs must be wider than 0 bits"
| inputWidth == 1 = concatMap connect $ zip outputs inputs
| otherwise = concat [
newOutputNets,
nextRank,
if odd then topBitAlias else [],
concatMap (\i ->
moduleFunc (prefix ++ show i)
(map (connSlice (i +: 1)) newOutputs)
(map (connSlice ((i * 2) +: 1)) inputs)
(map (connSlice ((1 + i * 2) +: 1)) inputs)
) [0..halfWidth-1] ]
where
(newOutputNets, newOutputs, nextRank) = if inputWidth == 2
then ([], outputs, [])
else (nets newOutputName count newOutputWidth, each count N newOutputName (0 +: newOutputWidth),
binModuleTree moduleFunc (prefix ++ "r") outputs newOutputs)
topBitAlias = concatMap connect $ zip (map topBitOut newOutputs) (map topBitInp inputs)
topBitOut = connSlice ((newOutputWidth - 1) +: 1)
topBitInp = connSlice ((inputWidth - 1) +: 1)
newOutputName = prefix ++ "o"
newOutputWidth = inputWidth - halfWidth
connect (out, inp) = gateSome "connect" [One inp, Many [out]]
inputWidth = connWidth (head inputs)
halfWidth = inputWidth `div` 2
odd = inputWidth `mod` 2 == 1
count = length inputs
combineELG :: String -> [[GateConn]] -> [[GateConn]] -> [[GateConn]] -> [GateElem]
combineELG prefix [oeq,olt,ogt] [leq,llt,lgt] [heq,hlt,hgt] = concat [
nets ltint 1 1,
nets gtint 1 1,
gate "c" [oeq, leq, heq],
gate "c" [conn N ltint 0 (0 +: 1), llt, heq],
gate "c" [conn N gtint 0 (0 +: 1), lgt, heq],
gate "or" [olt, conn N ltint 0 (0 +: 1), hlt],
gate "or" [ogt, conn N gtint 0 (0 +: 1), hgt] ]
where
ltint = prefix ++ "ltint"
gtint = prefix ++ "gtint"
combineELG _ _ _ _ = error "combineELG: can't happen"
halfAdder0 :: String -> [GateConn] -> [GateConn] -> [GateConn] -> [GateConn] -> [GateConn] ->
[GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
halfAdder0 prefix outF outT coF coT lF lT rF rT = concat [
minTerms2 prefix lF lT rF rT,
gate "or" [coF, minTerm 0, minTerm 1, minTerm 2],
gate "or" [coT, minTerm 3],
gate "or" [outF, minTerm 0, minTerm 3],
gate "or" [outT, minTerm 1, minTerm 2] ]
where minTerm i = conn N prefix 0 (i +: 1)
halfAdder1 :: String -> [GateConn] -> [GateConn] -> [GateConn] -> [GateConn] -> [GateConn] ->
[GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
halfAdder1 prefix outF outT coF coT lF lT rF rT = concat [
minTerms2 prefix lF lT rF rT,
gate "or" [coF, minTerm 0],
gate "or" [coT, minTerm 1, minTerm 2, minTerm 3],
gate "or" [outF, minTerm 1, minTerm 2],
gate "or" [outT, minTerm 0, minTerm 3] ]
where minTerm i = conn N prefix 0 (i +: 1)
fullAdder :: String -> [GateConn] -> [GateConn] -> [GateConn] -> [GateConn] -> [GateConn] ->
[GateConn] -> [GateConn] -> [GateConn] -> [GateConn] -> [GateConn] -> [GateElem]
fullAdder prefix outF outT coF coT lF lT rF rT ciF ciT = concat [
nets min 1 8,
gate "c" [conn N min 0 (0 +: 8),
concat (concat (replicate 1 ((replicate 4 ciF) ++ (replicate 4 ciT)))),
concat (concat (replicate 2 ((replicate 2 rF) ++ (replicate 2 rT)))),
concat (concat (replicate 4 ((replicate 1 lF) ++ (replicate 1 lT)))) ],
gate "or" [outF, conn N min 0 (0 +: 1), conn N min 0 (3 +: 1), conn N min 0 (5 +: 1), conn N min 0 (6 +: 1)],
gate "or" [outT, conn N min 0 (1 +: 1), conn N min 0 (2 +: 1), conn N min 0 (4 +: 1), conn N min 0 (7 +: 1)],
gate "ao222" [coT, lT, rT, lT, ciT, rT, ciT],
gate "ao222" [coF, lF, rF, lF, ciF, rF, ciF] ]
where min = prefix ++ "min"
handleOBuiltin :: String -> String -> Int -> [TeakParam] -> [GateConn] ->
[GateConn] -> [GateConn] -> [[GateConn]] -> [[GateConn]] -> [GateElem]
handleOBuiltin _ name _ params go outF outT inFs inTs = body name where
body "String" = concat [
singleGateParam "tkr_string" [GateParamString string] [go, outF, outT]
]
-- where [ExprFuncActual _ (ValueExpr _ _ (StringValue string))] = params
where [TeakParamString string] = params
body "tWriteMessage" = concat [
-- nets done 1 1,
singleGate "tkr_print" [inFs !! 0, inTs !! 0] -- , conn N done 0 0 1]
]
body "ToString" = concat [
singleGateParam "tkr_to_string" [GateParamInt (widthOfType [] typ)] [inFs !! 0, inTs !! 0,
outF, outT]
]
-- where [TypeFuncActual typ] = params
where [TeakParamType typ] = params
body "NumberToString" = concat [
singleGateParam "tkr_number_to_string" [GateParamInt (widthOfType [] typ)] [inFs !! 0, inTs !! 0,
inFs !! 1, inTs !! 1,
inFs !! 2, inTs !! 2,
inFs !! 3, inTs !! 3,
outF, outT]
]
-- where [TypeFuncActual typ] = params
where [TeakParamType typ] = params
body "StringAppend" = concat [
singleGate "tkr_string_append" [inFs !! 0, inTs !! 0, inFs !! 1, inTs !! 1, outF, outT]
]
body "Chr" = concat [
singleGate "tkr_chr" [inFs !! 0, inTs !! 0, outF, outT]
]
body "BalsaSimulationStop" = concat [
singleGate "tkr_stop" [go],
gate "gnd" [outF],
gate "gnd" [outT]
]
body name = error $ "Don't recognise builtin " ++ name
makeOTerms :: [(Int, TeakOTerm)] -> [GateConn] -> Maybe [GateConn] -> [[GateConn]] -> [[GateConn]] -> [GateElem]
makeOTerms terms go done termF termT = concatMap makeOTerm terms
where
oneIndices width value = filter (testBit value) [0..width - 1]
termIndexMapping = (0, 0) : zip (map fst terms) [1..]
findTermIndex i
| isNothing termPos = error $ "makeOTerms: bad term index " ++ show i
| otherwise = fromJust $ termPos
where termPos = lookup i termIndexMapping
osliceConn termConns (i, slice) = connSlice slice (termConns !! findTermIndex i)
makeOTerm (i, term) = case term of
TeakOConstant width value -> concat [
gateSome "connect" [One go, Many (filterBits outT value)],
gateSome "gnd" [Many (filterBits outF value)],
gateSome "connect" [One go, Many (filterBits outF notValue)],
gateSome "gnd" [Many (filterBits outT notValue)] ]
where
filterBits conn value = map (smashSplit conn !!) (oneIndices width value)
notValue = (bit width) - (1 + value)
TeakOAppend count slices -> concatMap connect (map (*slicesWidth) [0..count - 1])
where
slicesWidth = sum $ map oSliceWidth slices
connect offset = concat [
gateSome "connect" [One (concatMap (osliceConn termF) slices),
Many [connSlice (offset +: slicesWidth) outF]],
gateSome "connect" [One (concatMap (osliceConn termT) slices),
Many [connSlice (offset +: slicesWidth) outT]]
]
TeakOBuiltin name width params slices -> concat [
-- nets iComp iCount 1,
-- nets termGo 1 1,
-- gateSome "c" [One (conn N termGo 0 0 1), Many (each iCount N iComp 0 1), One go],
-- drCompletions ("comp" ++ show i) inF inT (each iCount N iComp 0 1),
handleOBuiltin ("b" ++ show i)
name width params {- (conn N termGo 0 0 1) -} go outF outT
(map (osliceConn termF) slices)
(map (osliceConn termT) slices),
-- FIXME, need to thread go/done
if isJust done
then gateSome "connect" [One go, Many [fromJust done]]
else []
]
TeakOp TeakOpAdd [l, r] -> add True l r
TeakOp TeakOpSub [l, r] -> add False l r
TeakOp TeakOpOr [l, r] -> bin drOr l r
TeakOp TeakOpAnd [l, r] -> bin drAnd l r
TeakOp TeakOpXor [l, r] -> bin drXor l r
TeakOp TeakOpNot [r] -> concatMap (\i -> concat [
gateSome "connect" [One (connSlice (i +: 1) (osliceConn termF r)), Many [connSlice (i +: 1) outT]],
gateSome "connect" [One (connSlice (i +: 1) (osliceConn termT r)), Many [connSlice (i +: 1) outF]] ]
) [0..width - 1]
where width = oSliceWidth r
TeakOp op [l, r]
| op == TeakOpEqual -> concat [eqNets, bitEqs, drTree drAnd comb outF outT eqConnF eqConnT]
| op == TeakOpNotEqual -> concat [eqNets, bitEqs, drTree drAnd comb outT outF eqConnF eqConnT]
where
eqNets = nets eqF 1 width ++ nets eqT 1 width
bitEqs = binChooseOut eqConnF eqConnT drXnor l r
eqF = "xf" ++ show i
eqT = "xt" ++ show i
eqConnF = conn N eqF 0 (0 +: width)
eqConnT = conn N eqT 0 (0 +: width)
comb = "c" ++ show i
width = oSliceWidth r
TeakOp op [l, r]
| op `elem` compares -> concat [
nets bitEqs 1 width,
nets bitGts 1 width,
nets bitLts 1 width,
nets outEq 1 1,
nets outGt 1 1,
nets outLt 1 1,
nets mt0 1 width,
nets mt3 1 width,
gate "c" [conn N mt0 0 (0 +: width), lConnF, rConnF],
gate "c" [conn N mt3 0 (0 +: width), lConnT, rConnT],
gate "c" [conn N bitLts 0 (0 +: width), lConnF, rConnT],
gate "c" [conn N bitGts 0 (0 +: width), lConnT, rConnF],
gate "or" [conn N bitEqs 0 (0 +: width), conn N mt0 0 (0 +: width), conn N mt3 0 (0 +: width)],
binModuleTree combineELG ("comb" ++ show i)
[outEqConn, outLtConn, outGtConn]
[conn N bitEqs 0 (0 +: width), conn N bitLts 0 (0 +: width), conn N bitGts 0 (0 +: width)],
case op of
TeakOpUnsignedGT -> concat [
gate "or" [outF, outLtConn, outEqConn],
gateSome "connect" [One outGtConn, Many [outT]] ]
TeakOpUnsignedGE -> concat [
gateSome "connect" [One outLtConn, Many [outF]],
gate "or" [outT, outGtConn, outEqConn] ]
_ -> error "FIXME signed comparisons"
-- FIXME, signed comparisons
]
where
mt0 = "mt0_" ++ show i
mt3 = "mt3_" ++ show i
bitEqs = "eq" ++ show i
bitLts = "lt" ++ show i
bitGts = "gt" ++ show i
outEq = "oeq" ++ show i
outLt = "olt" ++ show i
outGt = "ogt" ++ show i
outEqConn = conn N outEq 0 (0 +: 1)
outLtConn = conn N outLt 0 (0 +: 1)
outGtConn = conn N outGt 0 (0 +: 1)
compares = [TeakOpUnsignedGT, TeakOpUnsignedGE]
lConnF = osliceConn termF l
lConnT = osliceConn termT l
rConnF = osliceConn termF r
rConnT = osliceConn termT r
width = oSliceWidth r
TeakOMux spec (selSlice:slices) -> concat [
nets gintF c w,
nets gintT c w,
nets selcomp c 1,
nets sel c 1,
nets selg c 1,
nets icomplete 1 1,
nets scomplete 1 1,
drCompletions ("comp" ++ show i)
(map (osliceConn termF) slices)
(map (osliceConn termT) slices)
(each c N selcomp (0 +: 1)),
drCompletion ("dcomp" ++ show i) selF selT (conn N scomplete 0 (0 +: 1)),
gateSome "c" [One (conn N icomplete 0 (0 +: 1)),
Many ((conn N scomplete 0 (0 +: 1)):(each c N selcomp (0 +: 1)))],
gateSome "or" [One outF, Many (each c N gintF (0 +: w))],
gateSome "or" [One outT, Many (each c N gintT (0 +: w))],
gate "c2r1" [concat (each c N sel (0 +: 1)), concat (each c N selg (0 +: 1)),
concat (dupEach c [conn N icomplete 0 (0 +: 1)]), cReset],
-- FIXME, C elements, can use AND gates?
gate "c2r1" [concat (each c N gintF (0 +: w)),
concat (dupEach w (each c N sel (0 +: 1))), concatMap (osliceConn termF) slices, wcReset],
gate "c2r1" [concat (each c N gintT (0 +: w)),
concat (dupEach w (each c N sel (0 +: 1))), concatMap (osliceConn termT) slices, wcReset],
-- FIXME, don't do full completion on inputs to start with
-- steerMatches would need to generate true and complement to do this
steerMatches match (0 +: selWidth) spec selF selT (each c N selg (0 +: 1))
]
where
gintF = "gfint" ++ show i
gintT = "gtint" ++ show i
c = length spec
w = oSliceWidth $ head slices
selWidth = oSliceWidth selSlice
sel = "sel" ++ show i
selg = "selg" ++ show i
selcomp = "selcomp" ++ show i
icomplete = "icomplete" ++ show i
scomplete = "scomplete" ++ show i
selF = osliceConn termF selSlice
selT = osliceConn termT selSlice
match = "match" ++ show i
wcReset = concat $ dupEach (w * c) [reset]
cReset = concat $ dupEach c [reset]
op -> error $ "Gen.hs.makeOTerms: FIXME Unhandled TeakOp " ++ show op ++ " " ++ showNameOTerm op
where
binChooseOut outF outT opFunc l r = concat [
concatMap (\i -> opFunc (op ++ show i)
(connSlice (i +: 1) outF) (connSlice (i +: 1) outT)
(connSlice (i +: 1) lConnF) (connSlice (i +: 1) lConnT)
(connSlice (i +: 1) rConnF) (connSlice (i +: 1) rConnT)
) [0..width - 1] ]
where
op = "op" ++ show i ++ "_"
lConnF = osliceConn termF l
lConnT = osliceConn termT l
rConnF = osliceConn termF r
rConnT = osliceConn termT r
width = oSliceWidth l
bin = binChooseOut outF outT
add addNsub l r = concat [
nets cf 1 width,
nets ct 1 width,
(if addNsub then halfAdder0 else halfAdder1) ha
(connSlice (0 +: 1) outF) (connSlice (0 +: 1) outT)
(conn N cf 0 (0 +: 1)) (conn N ct 0 (0 +: 1))
(connSlice (0 +: 1) lConnF) (connSlice (0 +: 1) lConnT)
(rF 0) (rT 0),
concatMap (\i -> fullAdder (fa ++ show i)
(connSlice (i +: 1) outF) (connSlice (i +: 1) outT)
(conn N cf 0 (i +: 1)) (conn N ct 0 (i +: 1))
(connSlice (i +: 1) lConnF) (connSlice (i +: 1) lConnT)
(rF i) (rT i)
(conn N cf 0 ((i - 1) +: 1)) (conn N ct 0 ((i - 1) +: 1))
) [1..width - 1] ]
where
fa = "fa" ++ show i ++ "_"
ha = "ha" ++ show i ++ "_"
cf = "cf" ++ show i ++ "_"
ct = "ct" ++ show i ++ "_"
lConnF = osliceConn termF l
lConnT = osliceConn termT l
rConnF = osliceConn termF r
rConnT = osliceConn termT r
width = oSliceWidth l
rF i = connSlice (i +: 1) $ if addNsub then rConnF else rConnT
rT i = connSlice (i +: 1) $ if addNsub then rConnT else rConnF
outF = termF !! (findTermIndex i)
outT = termT !! (findTermIndex i)
genMake :: TeakCompType -> [Some Int] -> GateNetlist
genMake typ@TeakJ widths@[Many inWidths, One outWidth]
| outWidth /= (sum inWidths) = error $ "J: bad widths: " ++ show widths
-- Just token links
| outWidth == 0 = netlist [
gateSome "c" [One (conn R "o" 0 (0 +: 1)), Many (each c R "i" (0 +: 1))],
gateSome "connect" [One (conn A "o" 0 (0 +: 1)), Many (each c A "i" (0 +: 1))] ]
-- Single input, just flow through
| inWidths == [outWidth] = netlist [
gateSome "connect" [One (conn R0 "i" 0 (0 +: 1)), One (conn R0 "o" 0 (0 +: 1))],
gateSome "connect" [One (conn R1 "i" 0 (0 +: 1)), One (conn R1 "o" 0 (0 +: 1))],
gateSome "connect" [One (conn A "o" 0 (0 +: 1)), Many (each c A "i" (0 +: 1))]
]
-- Must be at least two inputs and go bottom output bit needs guarding
| otherwise = netlist [
nets "icomplete" 1 1,
nets "joinf" 1 outWidth,
nets "joint" 1 outWidth,
gateSome "connect" [One (concat (bundlesAtIndicesW nonZeroIndices R0 "i" 0 inWidths)),
Many [conn N "joinf" 0 (0 +: outWidth)]],
gateSome "connect" [One (concat (bundlesAtIndicesW nonZeroIndices R1 "i" 0 inWidths)),
Many [conn N "joint" 0 (0 +: outWidth)]],
-- Guard with only token inputs, or from data
if nonZeroCount >= 2
then concat [
nets "dcomplete" (nonZeroCount - 1) 1,
gateSome "or" [
One (concat (each (nonZeroCount - 1) N "dcomplete" (0 +: 1))),
One (concat (eachIW R0 "i" 0 (tail nonZeroIndices) (repeat 1))),
One (concat (eachIW R1 "i" 0 (tail nonZeroIndices) (repeat 1))) ],
gateSome "c" [One icomplete, Many zeroRequests,
Many (each (nonZeroCount - 1) N "dcomplete" (0 +: 1))]
]
else -- Only token guards. There must be at least one here
gateSome "c" [One icomplete, Many zeroRequests],
gate "c" [conn R0 "o" 0 (0 +: 1), conn N "joinf" 0 (0 +: 1), icomplete],
gate "c" [conn R1 "o" 0 (0 +: 1), conn N "joint" 0 (0 +: 1), icomplete],
if outWidth > 1
then concat [
gateSome "connect" [One (conn N "joinf" 0 (1 +: (outWidth - 1))),
Many [conn R0 "o" 0 (1 +: (outWidth - 1))]],
gateSome "connect" [One (conn N "joint" 0 (1 +: (outWidth - 1))),
Many [conn R1 "o" 0 (1 +: (outWidth - 1))]]
]
else [],
gateSome "connect" [One (conn A "o" 0 (0 +: 1)), Many (each c A "i" (0 +: 1))] ]
where
netlist = GateNetlist (genTeakName typ widths) ports [] . concat
c = length inWidths
icomplete = conn N "icomplete" 0 (0 +: 1)
ports = mkPorts "i" Input c inWidths ++ mkPorts "o" Output 1 [outWidth]
zeroIndices = findIndices (== 0) inWidths
nonZeroIndices = findIndices (/= 0) inWidths
zeroRequests = bundlesAtIndices zeroIndices R "i" (0 +: 1)
nonZeroCount = length nonZeroIndices
genMake typ@TeakM widths@[Many inWidths, One outWidth]
| any (/= w) inWidths = error $ "M: bad widths: " ++ show widths
| w == 0 = netlist [
nets "nchosen" 1 1,
nets "choice" c 1,
gate "c2r1" [concat (each c N "choice" (0 +: 1)), concat (each c R "i" (0 +: 1)), -- 1
dupC (conn N "nchosen" 0 (0 +: 1)), dupC reset],
gate "nor" [conn N "nchosen" 0 (0 +: 1), conn R "o" 0 (0 +: 1), conn A "o" 0 (0 +: 1)], -- 2
gateSome "or" [One $ conn R "o" 0 (0 +: 1), Many $ each c N "choice" (0 +: 1)], -- 3
gate "c2r1" [concat (each c A "i" (0 +: 1)), concat (each c N "choice" (0 +: 1)), -- 4
dupC (conn A "o" 0 (0 +: 1)), dupC reset] ]
| otherwise = netlist [
nets "gfint" c w,
nets "gtint" c w,
nets "choice" c 1,
nets "anychoice" 1 1,
nets "icomp" c 1,
nets "nchosen" 1 1,
gateSome "or" [One (conn R0 "o" 0 (0 +: w)), Many (each c N "gfint" (0 +: w))], -- 1
gateSome "or" [One (conn R1 "o" 0 (0 +: w)), Many (each c N "gtint" (0 +: w))], -- 2
gate "and" [concat (each c N "gtint" (0 +: w)), -- 3
concat (dupEach w (each c N "choice" (0 +: 1))), concat (each c R1 "i" (0 +: w))],
gate "and" [concat (each c N "gfint" (0 +: w)), -- 4
concat (dupEach w (each c N "choice" (0 +: 1))), concat (each c R0 "i" (0 +: w))],
drCompletions "comp" (each c R0 "i" (0 +: w)) (each c R1 "i" (0 +: w)) (each c N "icomp" (0 +: 1)), -- 5
gate "c2r1" [concat (each c N "choice" (0 +: 1)), concat (each c N "icomp" (0 +: 1)), -- 6
dupC (conn N "nchosen" 0 (0 +: 1)), dupC reset],
gateSome "or" [One (conn N "anychoice" 0 (0 +: 1)), Many (each c N "choice" (0 +: 1))], -- 7
gate "nor" [conn N "nchosen" 0 (0 +: 1), conn N "anychoice" 0 (0 +: 1), conn A "o" 0 (0 +: 1)], -- 8
gate "c2r1" [concat (each c A "i" (0 +: 1)), concat (each c N "choice" (0 +: 1)), -- 9
dupC (conn A "o" 0 (0 +: 1)), dupC reset] ]
where
w = outWidth
netlist = GateNetlist (genTeakName typ widths) ports [] . concat
c = length inWidths
ports = mkPorts "i" Input c inWidths ++ mkPorts "o" Output 1 [outWidth]
dupC conn = concat (dupEach c [conn])
-- FIXME, need to make sure that input is complete before allowing outputs to become complete
-- use bottom bit as usual? Carry bottom bit complete to lowest output bit for each output
-- which isn't 0-index-based
genMake typ@(TeakF offsets) widths@[One inWidth, Many outWidths]
| any (> inWidth) (map (uncurry (+)) $ zip outWidths offsets) = error $ "F: bad widths: " ++ show widths
| inWidth == 0 = netlist [
gateSome "connect" [One (conn R "i" 0 (0 +: 1)), Many (each c R "o" (0 +: 1))],
gateSome "c" [One (conn A "i" 0 (0 +: 1)), Many (each c A "o" (0 +: 1))] ]
| otherwise = netlist [
nets "acomplete" 1 1,
nets "icomplete" 1 1,
if strict
then concat [
drCompletion "comp"
(conn R0 "i" 0 (0 +: inWidth))
(conn R1 "i" 0 (0 +: inWidth))
icomplete,
gateSome "connect" [One icomplete, One acomplete]
]
else concat [
gate "or" [ icomplete, conn R0 "i" 0 (0 +: 1), conn R1 "i" 0 (0 +: 1) ],
if null unusedSlices
then gateSome "connect" [One icomplete, One acomplete]
else concat [
nets "ucomplete" 1 1,
drCompletion "comp"
(concatMap (conn R0 "i" 0) unusedSlices)
(concatMap (conn R1 "i" 0) unusedSlices)
ucomplete,
gate "c" [acomplete, ucomplete, icomplete]
]
],
connectBundleSlices slices icomplete (conn R0 "i" 0 (0 +: inWidth)) R0 "o",
connectBundleSlices slices icomplete (conn R1 "i" 0 (0 +: inWidth)) R1 "o",
gateSome "connect" [One icomplete, Many (bundlesAtIndices zeroIndices R "o" (0 +: 1))],
gateSome "c" [One (conn A "i" 0 (0 +: 1)), One acomplete, Many (each c A "o" (0 +: 1))] ]
where
strict = False -- FIXME, this must be an option/style
netlist = GateNetlist (genTeakName typ widths) ports [] . concat
c = length outWidths
icomplete = conn N "icomplete" 0 (0 +: 1)
acomplete = conn N "acomplete" 0 (0 +: 1)
ucomplete = conn N "ucomplete" 0 (0 +: 1)
ports = mkPorts "i" Input 1 [inWidth] ++ mkPorts "o" Output c outWidths
slices = zipWith (+:) offsets outWidths
zeroIndices = findIndices ((== 0) . sliceWidth) slices
usedBitmask = foldl' (.|.) (0 :: Integer) $ map sliceToBitmask slices
unusedSlices = bitmaskToIntervals (bitNot inWidth usedBitmask :: Integer)
genMake typ@(TeakS selSlice specs) widths@[One inWidth, Many outWidths]
| any (> inWidth) outWidths = error $ "S: bad widths: " ++ show widths
| otherwise = netlist [
nets "icomplete" 1 1,
nets "sel" c 1,
nets "gsel" c 1,
nets "oack" 1 1,
steerMatches "match" selSlice impss
(conn R0 "i" 0 (0 +: inWidth)) (conn R1 "i" 0 (0 +: inWidth)) (each c N "sel" (0 +: 1)),
gate "c" [concat (each c N "gsel" (0 +: 1)), concat (each c N "sel" (0 +: 1)),
concat (dupEach c [conn N "icomplete" 0 (0 +: 1)])],
drCompletion "comp" (conn R0 "i" 0 (0 +: inWidth)) (conn R1 "i" 0 (0 +: inWidth))
(conn N "icomplete" 0 (0 +: 1)),
connectReads "c" outSlices (conn R0 "i" 0 (0 +: inWidth)) (each c N "gsel" (0 +: 1))
(eachW c R0 "o" 0 outWidths),
connectReads "c" outSlices (conn R1 "i" 0 (0 +: inWidth)) (each c N "gsel" (0 +: 1))
(eachW c R1 "o" 0 outWidths),
gateSome "connect" [One (concat (bundlesAtIndices zeroWidthIndices N "gsel" (0 +: 1))),
Many [concat (bundlesAtIndices zeroWidthIndices R "o" (0 +: 1))]],
gateSome "or" [One (conn N "oack" 0 (0 +: 1)), Many (each c A "o" (0 +: 1))],
gate "c" [conn A "i" 0 (0 +: 1), conn N "oack" 0 (0 +: 1), conn N "icomplete" 0 (0 +: 1)]
]
where
netlist = GateNetlist (genTeakName typ widths) ports [] . concat
c = length outWidths
ports = mkPorts "i" Input 1 [inWidth] ++ mkPorts "o" Output c outWidths
(impss, outOffsets) = unzip specs
outSlices = zipWith (+:) outOffsets outWidths
zeroWidthIndices = findIndices (== 0) outWidths
genMake typ@(TeakO terms) widths@[One inWidth, One outWidth] = netlist [
if needGo && inWidth /= 0 then concat [
nets "go" 1 1,
-- gate "or" [conn N "go" 0 0 1, conn R0 "i" 0 0 1, conn R1 "i" 0 0 1]
drCompletion "gocomp" (conn R0 "i" 0 (0 +: inWidth)) (conn R1 "i" 0 (0 +: inWidth))
(conn N "go" 0 (0 +: 1))
] else [],
netsIW "termf" termIndices termWidths,
netsIW "termt" termIndices termWidths,
-- FIXME, zero width in/out
makeOTerms terms go (if outWidth == 0 then Just (conn R "o" 0 (0 +: 1)) else Nothing)
(conn R0 "i" 0 (0 +: inWidth) :
eachIW N "termf" 0 termIndices termWidths ++ [conn R0 "o" 0 (0 +: outWidth)])
(conn R1 "i" 0 (0 +: inWidth) :
eachIW N "termt" 0 termIndices termWidths ++ [conn R1 "o" 0 (0 +: outWidth)]),
gateSome "connect" [One (conn A "o" 0 (0 +: 1)), One (conn A "i" 0 (0 +: 1))]
]
where
netlist = GateNetlist (genTeakName typ widths) ports [] . concat
go = if inWidth /= 0 then conn N "go" 0 (0 +: 1) else conn R "i" 0 (0 +: 1)
needGo = or (map (oTermNeedsGo . snd) terms)
termWidths = map (oTermResultWidth . snd) $ init terms
termIndices = map fst $ init terms
ports = mkPorts "i" Input 1 [inWidth] ++ mkPorts "o" Output 1 [outWidth]
genMake typ@(TeakV name width bs ws rs) widths@[Many wgWidths, Many wdWidths, Many rgWidths, Many rdWidths]
| any (/= 0) wdWidths || any (/= 0) rgWidths ||
any (> width) wgWidths || any (> width) rdWidths ||
length wgWidths /= length wdWidths || length rgWidths /= length rdWidths ||
any (>= width) ws || any (>= width) rs =
error $ "V: bad widths: " ++ show widths
| otherwise = netlist [
-- nets "reset" 1 1,
nets "wf" 1 width,
nets "wt" 1 width,
nets "df" 1 width,
nets "dt" 1 width,
nets "wc" wc 1,
nets "wacks" 1 width,
nets "wenr" 1 width,
nets "wen" 1 width,
nets "anyread" 1 1,
nets "nreset" 1 1,
gateSome "inv" [One (conn N "nreset" 0 (0 +: 1)), One reset],
gateSome "and" [One (conn N "wen" 0 (0 +: width)), Many [conn N "wenr" 0 (0 +: width),
concat (dupEach width [conn N "nreset" 0 (0 +: 1)])]],
drLatch "drl" (conn N "wf" 0 (0 +: width)) (conn N "wt" 0 (0 +: width))
(conn N "wen" 0 (0 +: width)) (conn N "wacks" 0 (0 +: width))
(conn N "df" 0 (0 +: width)) (conn N "dt" 0 (0 +: width))
reset,
drCompletions "comp" (eachW wc R0 "wg" 0 wgWidths) (eachW wc R1 "wg" 0 wgWidths)
(each wc N "wc" (0 +: 1)),
connectWrites "conw" ws
(eachW wc R0 "wg" 0 wgWidths) (eachW wc R1 "wg" 0 wgWidths)
(conn N "wf" 0 (0 +: width)) (conn N "wt" 0 (0 +: width))
(conn N "anyread" 0 (0 +: 1))
(each wc R "wd" (0 +: 1))
(each wc N "wc" (0 +: 1))
(conn N "wenr" 0 (0 +: width))
(conn N "wacks" 0 (0 +: width)),
handleBuiltinWrites name bs (conn N "wen" 0 (0 +: width))
(conn N "wf" 0 (0 +: width)) (conn N "wt" 0 (0 +: width))
(conn N "dt" 0 (0 +: width)),
handleBuiltinReads name bs rs (each rc R "rg" (0 +: 1)) (conn N "dt" 0 (0 +: width)),
connectReads "and" readSlices (conn N "df" 0 (0 +: width))
(each rc R "rg" (0 +: 1)) (eachW rc R0 "rd" 0 rdWidths),
connectReads "and" readSlices (conn N "dt" 0 (0 +: width))
(each rc R "rg" (0 +: 1)) (eachW rc R1 "rd" 0 rdWidths),
gateSome "or" [One (conn N "anyread" 0 (0 +: 1)), Many (each rc R "rg" (0 +: 1)),
Many (each rc A "rg" (0 +: 1))],
gateSome "connect" [One (concat (each wc A "wd" (0 +: 1))), Many [concat (each wc A "wg" (0 +: 1))]],
gateSome "connect" [One (concat (each rc A "rd" (0 +: 1))), Many [concat (each rc A "rg" (0 +: 1))]]
]
where
netlist = GateNetlist (genTeakName typ widths) ports [] . concat
wc = length wgWidths
rc = length rdWidths
readSlices = zipWith (+:) rs rdWidths
ports = mkPorts "wg" Input wc wgWidths ++ mkPorts "wd" Output wc (repeat 0) ++
mkPorts "rg" Input rc (repeat 0) ++ mkPorts "rd" Output rc rdWidths
genMake typ@TeakA widths@[Many inWidths, One outWidth]
| any (/= outWidth) inWidths = error $ "A: bad widths: " ++ show widths
| c /= 2 = error $ "A: must be exactly two inputs: " ++ show widths
| outWidth == 0 = netlist [
nets "sel" 1 c,
gateSome "or" [One (conn R "o" 0 (0 +: 1)), Many (smashSplit (conn N "sel" 0 (0 +: c)))],
gateSome "connect" [One (concat (each c R "i" (0 +: 1))), Many [conn N "sel" 0 (0 +: c)]],
gate "c2r1" [concat (each c A "i" (0 +: 1)), concat (each c N "sel" (0 +: 1)),
concat (each c A "o" (0 +: 1)), concat (dupEach c [reset])] ]
| otherwise = netlist [
nets "sel" 2 1,
nets "gsel" 2 1,
nets "nia" 2 1,
nets "selcomp" c 1,
nets "gfint" c outWidth,
nets "gtint" c outWidth,
-- input completion
drCompletions "comp" (each c R0 "i" (0 +: outWidth)) (each c R1 "i" (0 +: outWidth))
(each c N "selcomp" (0 +: 1)),
-- generate sels
gate "inv" [concat (each c N "nia" (0 +: 1)), concat (each c A "i" (0 +: 1))],
gate "and" [concat (each c N "sel" (0 +: 1)), concat (reverse (each c N "nia" (0 +: 1))),
concat (each c N "gsel" (0 +: 1))],
gate "mutex" [conn N "selcomp" 0 (0 +: 1), conn N "selcomp" 1 (0 +: 1),
conn N "gsel" 0 (0 +: 1), conn N "gsel" 1 (0 +: 1)],
-- multiplexing
gateSome "or" [One (conn R0 "o" 0 (0 +: outWidth)), Many (each c N "gfint" (0 +: outWidth))],
gateSome "or" [One (conn R1 "o" 0 (0 +: outWidth)), Many (each c N "gtint" (0 +: outWidth))],
gate "and" [concat (each c N "gtint" (0 +: outWidth)),
concat (dupEach outWidth (each c N "sel" (0 +: 1))), concat (each c R1 "i" (0 +: outWidth))],
gate "and" [concat (each c N "gfint" (0 +: outWidth)),
concat (dupEach outWidth (each c N "sel" (0 +: 1))), concat (each c R0 "i" (0 +: outWidth))],
-- ack steering
gate "c2r1" [concat (each c A "i" (0 +: 1)), concat (each c N "sel" (0 +: 1)),
concat (dupEach c [conn A "o" 0 (0 +: 1)]), concat (dupEach c [reset])] ]
where
netlist = GateNetlist (genTeakName typ widths) ports [] . concat
c = length inWidths
ports = mkPorts "i" Input c inWidths ++ mkPorts "o" Output 1 [outWidth]
genMake typ@TeakI widths = netlist [
nets "nreset" 1 1,
nets "firsthsa" 1 1,
nets "nfirsthsa" 1 1,
nets "firsthsd" 1 1,
nets "noa" 1 1,
gate "inv" [conn N "nreset" 0 (0 +: 1), conn G "reset" 0 (0 +: 1)],
gate "inv" [conn N "nfirsthsa" 0 (0 +: 1), conn N "firsthsa" 0 (0 +: 1)],
gate "inv" [conn N "noa" 0 (0 +: 1), conn A "o" 0 (0 +: 1)],
gate "ao22" [conn R "o" 0 (0 +: 1), conn N "nreset" 0 (0 +: 1),
conn N "nfirsthsa" 0 (0 +: 1), conn R "i" 0 (0 +: 1), conn N "firsthsd" 0 (0 +: 1)],
gate "c1u1" [conn N "firsthsa" 0 (0 +: 1), conn N "nreset" 0 (0 +: 1), conn A "o" 0 (0 +: 1)],
gate "c1u1" [conn N "firsthsd" 0 (0 +: 1), conn N "firsthsa" 0 (0 +: 1), conn N "noa" 0 (0 +: 1)],
gate "and" [conn A "i" 0 (0 +: 1), conn A "o" 0 (0 +: 1), conn N "firsthsd" 0 (0 +: 1)] ]
where
netlist = GateNetlist (genTeakName typ widths) ports [] . concat
ports = mkPorts "i" Input 1 [0] ++ mkPorts "o" Output 1 [0]
genMake typ@TeakR widths = netlist [
nets "fb1" 1 1,
nets "fb2" 1 1,
gate "nor" [conn N "fb1" 0 (0 +: 1), conn G "reset" 0 (0 +: 1), conn N "fb2" 0 (0 +: 1)],
gate "nor" [conn N "fb2" 0 (0 +: 1), conn A "o" 0 (0 +: 1), conn N "fb1" 0 (0 +: 1)],
gate "nor" [conn R "o" 0 (0 +: 1), conn G "reset" 0 (0 +: 1), conn N "fb1" 0 (0 +: 1)] ]
where
netlist = GateNetlist (genTeakName typ widths) ports [] . concat
ports = mkPorts "o" Output 1 [0]
genMake _ _ = error "genMake: can't happen"
latchName :: Int -> Int -> String
latchName width depth = "tkl" ++ show width ++ "x" ++ show depth
makeLatch :: TechMapping -> Int -> Int -> GateNetlist
makeLatch mapping width depth = addGlobalPorts $ genTechMap mapping $ body width
where
body 0 = netlist [
pipeLatch0N "b" depth (conn R "i" 0 (0 +: 1)) (conn A "i" 0 (0 +: 1))
(conn R "o" 0 (0 +: 1)) (conn A "o" 0 (0 +: 1)) ]
body _ = netlist [
pipeLatchN "b" depth (conn R0 "i" 0 (0 +: width)) (conn R1 "i" 0 (0 +: width)) (conn A "i" 0 (0 +: 1))
(conn R0 "o" 0 (0 +: width)) (conn R1 "o" 0 (0 +: width)) (conn A "o" 0 (0 +: 1)) ]
netlist = GateNetlist (latchName width depth) ports [] . concat
ports = mkPorts "i" Input 1 [width] ++ mkPorts "o" Output 1 [width]
nwEscapeName :: String -> String
nwEscapeName name = concatMap escChar name
where
escChar '[' = ['_']
escChar ']' = []
escChar chr
| isAlphaNum chr = [chr]
| otherwise = []
genShowImp :: Implicant -> String
genShowImp (Imp value 0) = showHex value ""
genShowImp (Imp value dcs) = showHex value "" ++ "c" ++ showHex dcs ""
showNameOTerm :: TeakOTerm -> String
showNameOTerm (TeakOConstant width value) = "nm" ++ show width ++ "b" ++ showHex value ""
showNameOTerm (TeakOAppend 1 slices) = "ap" ++ concatMap showNameOSlice slices
showNameOTerm (TeakOp op slices) = fst (teakOOpNames op) ++ concatMap showNameOSlice slices
showNameOTerm (TeakOAppend count slices) = "ap" ++ show count ++ "x" ++ concatMap showNameOSlice slices
showNameOTerm (TeakOBuiltin name width params slices) = "bi" ++ name ++ "_" ++ show width ++
"_" ++ nwEscapeName (show params) ++ "_" ++ concatMap showNameOSlice slices
showNameOTerm (TeakOMux spec slices) = "mx" ++
joinWith "_" (map (joinWith "o" . map showNameImp) spec) ++ "_" ++ concatMap showNameOSlice slices
-- showNameOTerm term = nwEscapeName (show term)
showNameImp :: Implicant -> String
showNameImp (Imp val 0) = show val
showNameImp (Imp val dcs) = show val ++ "m" ++ show dcs
showNameOSlice :: TeakOSlice -> String
showNameOSlice (0, slice) = "i" ++ show (sliceOffset slice) ++ "w" ++ show (sliceWidth slice) ++ "b"
showNameOSlice (term, slice) = "t" ++ show term ++ "o" ++ show (sliceOffset slice) ++
"w" ++ show (sliceWidth slice) ++ "b"
showSlice :: Slice Int -> String
showSlice slice = "o" ++ show (sliceOffset slice) ++ "w" ++ show (sliceWidth slice)
genTeakName :: TeakCompType -> [Some Int] -> String
genTeakName TeakJ [Many inWidths, One outWidth] =
"tkj" ++ show outWidth ++ "m" ++ joinWith "_" (map show inWidths)
genTeakName TeakM [Many inWidths, One outWidth] =
"tkm" ++ show c ++ "x" ++ show outWidth ++ "b"
where c = length inWidths
genTeakName (TeakF offsets) [One inWidth, Many outWidths] =
"tkf" ++ show inWidth ++ "m" ++ joinWith "_" (map showSlice (zipWith (+:) offsets outWidths))
genTeakName (TeakS selSlice specs) [One inWidth, Many outWidths] =
"tks" ++ show inWidth ++ "_" ++ showSlice selSlice ++ "_" ++
joinWith "_" (map showSpec (zip specs outWidths))
where showSpec ((imps, offset), width) = joinWith "m" (map genShowImp imps) ++ showSlice (offset +: width)
genTeakName (TeakO terms) [One inWidth, One outWidth] =
"tko" ++ show inWidth ++ "m" ++ show outWidth ++
concatMap (\(i, term) -> "_" ++ show i ++ showNameOTerm term) terms
genTeakName (TeakV vName width bs ws rs) [Many wgWidths, Many _, Many _, Many rdWidths] =
"tkv" ++ name ++ show width ++ (if (null bs)
then ""
else "_b" ++ concatMap show bs) ++
"_w" ++ concatMap showSlice (zipWith (+:) ws wgWidths) ++
"_r" ++ concatMap showSlice (zipWith (+:) rs rdWidths)
where name = nwEscapeName vName
genTeakName TeakA [Many inWidths, One outWidth] =
"tka" ++ show c ++ "x" ++ show outWidth ++ "b"
where c = length inWidths
genTeakName TeakI _ = "tki"
genTeakName TeakR _ = "tkr"
genTeakName _ _ = error "genTeakName: can't happen"
treeGates :: [String]
treeGates = ["and", "or", "nand", "nor"]
nonInvTreeGates :: [String]
nonInvTreeGates = ["c", "nc"]
type TechMapping = String -> GateElem -> [GateElem]
genToSimpleGates :: String -> GateElem -> [GateElem]
genToSimpleGates _ (GateInstance "connect" (from:tos)) = map makeGate tos
where makeGate to = GateInstance "buff" [to, from]
genToSimpleGates _ (GateInstance "gnd" tos) = map makeGate tos
where makeGate to = GateInstance "gnd" [to]
genToSimpleGates prefix (GateInstance name (to:froms))
| name `elem` treeGates = makeGateTree prefix name 3 True to (concat froms)
| name `elem` nonInvTreeGates = makeGateTree prefix name 3 False to (concat froms)
genToSimpleGates _ elem = [elem]
globalPorts :: [GatePort]
globalPorts = [GatePort "reset" Input 1]
globalConns :: [[GateConn]]
globalConns = [reset]
genTechMap :: TechMapping -> GateNetlist -> GateNetlist
genTechMap mapping (GateNetlist name ports props elems) = GateNetlist name ports props simpleElems
where
n0 = [0..] :: [Int]
simpleElems = concatMap makeTech $ zip n0 $ concatMap makeSimple $ zip n0 elems
makeTech (i, elem) = mapping ("tech" ++ show i ++ "_") elem
makeSimple (i, elem) = genToSimpleGates ("simp" ++ show i) elem
addGlobalPorts :: GateNetlist -> GateNetlist
addGlobalPorts (GateNetlist name ports props elems) = GateNetlist name (ports ++ globalPorts) props elems
genMakeComp :: TechMapping -> TeakCompType -> [Some Int] -> GateNetlist
genMakeComp mapping typ widths = addGlobalPorts $ genTechMap mapping $ genMake typ widths
data GenPartToGateOption = GenPartToGateProtocolTest
deriving (Show, Eq)
genPartToGateNetlist :: NetworkIF network => [GenPartToGateOption] -> Part network ->
(GateNetlist, [(String, TeakCompType, [Some Int])], [(Int, Int)])
genPartToGateNetlist options (Part partName ports body) = tryNetwork body $ do
(names, gateNetss, netCompss, latchesToMakes) <- liftM unzip4 $ nwMapLinks makeLink
let linkNames = DM.fromList names
(gateComps, modulesToMake) <- liftM unzip $ nwMapComps $ makeComp linkNames
-- gateNetss <- nwMapLinks (makeNets linkNames)
testComps <- if GenPartToGateProtocolTest `elem` options
then liftM concat $ nwMapLinks $ makeProtocolTester linkNames
else return []
let
netlist = GateNetlist gateName
(gatePorts ++ globalPorts)
[]
-- (portAliasComps ++ concat gateNetss ++ gateComps ++ testComps)
(concat gateNetss ++ gateComps ++ concat netCompss ++ testComps)
return (netlist, catMaybes modulesToMake, nub $ concat latchesToMakes)
where
makeLink link = do
let ref = refLink link
Just pas <- nwGetLinkUsage Passive ref
Just act <- nwGetLinkUsage Active ref
width <- nwGetLinkWidth ref
depth <- liftM latchingDepth $ nwGetLinkLatching ref
let
defName = defaultLinkName ref
nets name = map makeNet $ fullBundles name width 0
conn name = map makeConn $ fullBundles name width 0
portName accessRef = nwEscapeName $ nwPortName port
where Just port = nwFindPortByRef ports accessRef
latch from to = ([GateInstance name (conn from ++ conn to ++ globalConns)],
[(width, depth)])
where name = latchName width depth
alias from to = (concat [
concatMap alias $ zip forwardFrom forwardTo,
concatMap alias $ zip reverseTo reverseFrom ], [])
where
(forwardFrom, reverseFrom) = partition isForward $ fullBundles from width 0
(forwardTo, reverseTo) = partition isForward $ fullBundles to width 0
isForward (_, _, Forward) = True
isForward _ = False
alias ((fromName, width, _), (toName, _, _)) = gateSome "connect"
[One [GateConn fromName (0 +: width)], Many [[GateConn toName (0 +: width)]]]
noConnect _ _ = ([], [])
(pasName, actName, netDecls, connectFunc) = case (pas, act) of
(LinkComp {}, LinkComp {})
| depth == 0 -> (defName, defName, nets defName, noConnect)
| otherwise -> (pasDefName, actDefName, nets pasDefName ++ nets actDefName, latch)
where
pasDefName = defName ++ "P"
actDefName = defName ++ "A"
(LinkComp {}, LinkAccess from _)
| depth == 0 -> (fromPortName, fromPortName, [], noConnect)
| otherwise -> (defName, fromPortName, nets defName, latch)
where fromPortName = portName from
(LinkAccess to _, LinkComp {})
| depth == 0 -> (toPortName, toPortName, [], noConnect)
| otherwise -> (toPortName, defName, nets defName, latch)
where toPortName = portName to
(LinkAccess to _, LinkAccess from _) -> (toPortName, fromPortName, [],
if depth == 0 then alias else latch)
where
fromPortName = portName from
toPortName = portName to
_ -> (defName, defName, nets defName, noConnect)
(connectInsts, latchesReqd) = connectFunc actName pasName
return ((ref, (pasName, actName)), netDecls, connectInsts, latchesReqd)
-- (name, ports, nw) = netlistThingSplitPartNetlist part
gateName = "teak_" ++ partName
defaultLinkName link = show link
makeProtocolTester linkNames link
| pasName == actName = return [makeTester actName "L"]
| otherwise = return [makeTester pasName "P", makeTester actName "A"]
where
(pasName, actName) = linkNames DM.! (refLink link)
width = nwLinkWidth link
makeTester name end = case width of
0 -> GateInstanceParam "tkr_ra_monitor" [GateParamString site] conns
_ -> GateInstanceParam "tkr_dr_monitor" [GateParamString site, GateParamInt width] conns
where
conns = map makeConn $ fullBundles name width 0
site = partName ++ "." ++ defaultLinkName (refLink link) ++ "." ++ end
makeNet (name, width, _) = GateNet name width
makeComp linkNames comp = do
let
links = nwCompLinks comp
senses = nwCompPortSenses comp
flatten (One link) sense = [(link, sense)]
flatten (Many links) sense = zip links $ repeat sense
flatten (Some links) sense = concat $ zipWith flatten links $ repeat sense
let
flatLinks = concat $ zipWith flatten links senses
linkWidths <- mapM nwGetLinkWidth $ map fst flatLinks
let linkConns = concat $ zipWith (makeLinkConns linkNames) flatLinks linkWidths
case comp of
TeakComp _ typ _ _ -> do
let
(Some someWidths, []) = mapOverSome (\_ w -> w) (Some links) linkWidths
name = genTeakName typ someWidths
return (GateInstance name (linkConns ++ globalConns), Just (name, typ, someWidths))
InstanceComp _ name _ _ _ -> do
return (GateInstance ("teak_" ++ name) (linkConns ++ globalConns), Nothing)
makeLinkConns linkNames (link, sense) width = map makeConn $ fullBundles name width 0
where
(pasName, actName) = linkNames DM.! link
name = case sense of
Passive -> pasName
Active -> actName
makeConn (name, width, _) = [GateConn name (0 +: width)]
gatePorts = concatMap makePort ports
makePort (NetworkPort name dir width _) = mkPorts (nwEscapeName name) dir 1 [width]
-- gateNets = concatMap makeChan
-- simpleGateNames : map of all valid gate names that may be defined in mapping files with tkg_ prefices
simpleGateNames :: DM.Map String ()
simpleGateNames = DM.fromList $ map (\name -> (name,())) [ "and2", "and3",
"ao22", "ao222",
"buff",
"c1u1", "c2", "c2r1", "c3",
"gnd",
"inv",
"mutex",
"nand2", "nand3",
"nor2", "nor3",
"or2", "or3" ]
type GateCosts = DM.Map String (DM.Map String Int)
{-
mappingNetlistToCosts :: [GateNetlist] -> GateCosts
mappingNetlistToCosts netlists = DM.fromList $ map findCost netlists
where
findCost (GateNetlist name _ props _) = fromMaybe (name, 0) $ do
cost <- lookup "cost" props
(costVal, _) <- listToMaybe $ reads cost
return (name, costVal)
-}
-- mappingNetlistToTechMapping : make a TechMapping function from a set of netlists which
-- map tkg_... gates into technology specific gates
mappingNetlistToTechMapping :: [GateNetlist] -> TechMapping
mappingNetlistToTechMapping netlists = topMappingFunc
where
netlistToMapping (GateNetlist name ports _ elems) = (name, (map netName nets, map makeInstMapping insts))
-- (instCell, map connsToIndex instConns))
where
nets = filter isGateNet elems
insts = filter isGateInstance elems
makeInstMapping (GateInstance instCell instConns) = (instCell, map connsToIndex instConns)
makeInstMapping _ = error "not an instance"
netName (GateNet name _) = name
netName _ = error "not a net"
portName (GatePort name _ _) = name
portIndices :: DM.Map String Int
portIndices = DM.fromList $ zip (map portName ports) [0..]
netIndices :: DM.Map String Int
netIndices = DM.fromList $ zip (map netName nets) [0..]
connsToIndex conns@[GateConn connName slice]
| slice /= (0 +: 1) = error $ "Bad mapping conns `" ++ show conns ++
"', in module `" ++ name ++ "'"
| isJust portNo = (True, fromJust portNo)
| isJust netNo = (False, fromJust netNo)
where
portNo = DM.lookup connName portIndices
netNo = DM.lookup connName netIndices
connsToIndex conns = error $ "Bad mapping conns `" ++ show conns ++
"', in module `" ++ name ++ "'"
-- netlistToMapping netlist = error $ "Bad mapping netlist `" ++ show netlist ++ "'"
-- Port names, Local nets, [(CellName, CellsCons)]
mappings :: DM.Map String ([String], [(String, [(Bool, Int)])])
mappings = DM.fromList $ map netlistToMapping netlists
-- Tack tkg_ onto gates, ie. instances whose names don't start tkr_
topMappingFunc prefix (GateInstance name conss)
| isJust (DM.lookup name simpleGateNames) = mappingFunc prefix $ GateInstance ("tkg_" ++ name) conss
topMappingFunc prefix inst = mappingFunc prefix inst
mappingFunc prefix inst@(GateInstance name connss)
| isJust mapping = map makeNet localNetNames' ++ mappedInsts
| otherwise = [inst]
where
n0 = [0..] :: [Int]
mapping = DM.lookup name mappings
Just (localNetNames, instMappings) = mapping
localNetNames' = map (prefix ++) localNetNames
makeInst (instCell, connIndices) = GateInstance instCell $ map mapConn connIndices
where
mapConn (True, portNo) = connss !! portNo
mapConn (False, netNo) = [GateConn (localNetNames' !! netNo) (0 +: 1)]
makeNet name = GateNet name 1
insts = map makeInst instMappings
mappedInsts = concatMap (\(i, inst) -> mappingFunc (prefix ++ show i ++ "_") inst) $
zip n0 insts
mappingFunc _ elem = [elem]
netlistCost :: GateNetlist -> GateCosts -> (String, DM.Map String Int)
netlistCost (GateNetlist name _ _ elems) costs = (name,
foldl (\ret elem -> DM.unionWith (+) ret $ findCost elem) DM.empty elems)
where
findCost (GateInstance name _) = fromMaybe (DM.fromList [(name, 1)]) $ DM.lookup name costs
findCost _ = DM.empty
genMakeGatesFile :: NetworkIF network => Bool -> [GenPartToGateOption] -> String -> TechMapping ->
FilePath -> [Part network] -> IO ()
genMakeGatesFile verbose options flatArgs mapping filename parts = do
let
partToGate = genPartToGateNetlist options
(procedureNetlists, modulesToMakeByProc, latchesToMakes) = unzip3 $ map partToGate parts
modulesToMake = nub $ concat modulesToMakeByProc
latchesToMake = nub $ concat latchesToMakes
makeProcNetlist (_, typ, widths) = genMakeComp mapping typ widths
lineFormat handle comment str = do
hPutStrLn handle $ comment ++ " " ++ head lines
mapM_ (\l -> hPutStrLn handle $ comment ++ " " ++ l) $ tail lines
where lines = filter (not . (all isSpace)) $
concatMap (mapN 100 id) $ splitWith "\n" str
timeStamp f = do
time <- getClockTime
hPutStrLn f $ "// Generated on: " ++ show time
clipNames = False
file <- openFile filename WriteMode
hPutStrLn file $ "//"
when (not (null flatArgs)) $ do
hPutStrLn file $ "// " ++ flatArgs
hPutStrLn file $ "//"
timeStamp file
hPutStrLn file $ "//\n"
hPutStrLn file "\n`timescale 1ns/1ps\n"
compCosts <- mapM (\(name, typ, widths) -> do
when verbose $ do
lineFormat file "//" $ name ++ " " ++ show typ ++ " " ++ show widths
lineFormat stdout "--" $ name ++ "\n" ++ show typ ++ "\n" ++ showSomeHaskell show (Some widths)
hFlush stdout
let netlist = makeProcNetlist (name, typ, widths)
hPutStrLn file $ showVerilog clipNames netlist ""
return $ netlistCost netlist DM.empty
) modulesToMake
latchCosts <- mapM (\(width, depth) -> do
let name = latchName width depth
when verbose $ do
lineFormat file "//" $ "latch " ++ name ++ " width = " ++ show width ++ ", depth = " ++ show depth
lineFormat stdout "--" $ "latch " ++ name ++ " width = " ++ show width ++ ", depth = " ++ show depth
hFlush stdout
let netlist = makeLatch mapping width depth
hPutStrLn file $ showVerilog clipNames netlist ""
return $ netlistCost netlist DM.empty
) latchesToMake
let
mappedNetlists = map (genTechMap mapping) procedureNetlists
costs' = DM.union (DM.fromList compCosts) (DM.fromList latchCosts)
netlistCosts = foldl (\costs netlist -> let
(name, cost) = netlistCost netlist costs
in DM.insert name cost costs) costs' mappedNetlists
showCostPair (name, count) = name ++ "*" ++ show count
mapM_ (\nl -> do
hPutStrLn file $ showVerilog clipNames nl ""
) mappedNetlists
hPutStrLn file "// Netlist costs:"
mapM_ (\(name, cost) -> do
hPutStrLn file $ "// " ++ name ++ ": " ++ joinWith " " (map showCostPair $ DM.toList cost)) $
DM.toList netlistCosts
hClose file
| balangs/eTeak | src/Gen.hs | bsd-3-clause | 82,718 | 0 | 22 | 29,996 | 28,203 | 14,617 | 13,586 | 1,275 | 19 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP
, NoImplicitPrelude
, MagicHash
, UnboxedTuples
, UnliftedFFITypes
#-}
{-# OPTIONS_HADDOCK not-home #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.TopHandler
-- Copyright : (c) The University of Glasgow, 2001-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- Support for catching exceptions raised during top-level computations
-- (e.g. @Main.main@, 'Control.Concurrent.forkIO', and foreign exports)
--
-----------------------------------------------------------------------------
module GHC.TopHandler (
runMainIO, runIO, runIOFastExit, runNonIO,
topHandler, topHandlerFastExit,
reportStackOverflow, reportError,
flushStdHandles
) where
#include "HsBaseConfig.h"
import Control.Exception
import Data.Maybe
import Foreign
import Foreign.C
import GHC.Base
import GHC.Conc hiding (throwTo)
import GHC.Real
import GHC.IO
import GHC.IO.Handle.FD
import GHC.IO.Handle
import GHC.IO.Exception
import GHC.Weak
#if defined(mingw32_HOST_OS)
import GHC.ConsoleHandler
#else
import Data.Dynamic (toDyn)
#endif
-- Note [rts_setMainThread must be called unsafely]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- rts_setMainThread must be called as unsafe, because it
-- dereferences the Weak# and manipulates the raw Haskell value
-- behind it. Therefore, it must not race with a garbage collection.
-- Note [rts_setMainThread has an unsound type]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- 'rts_setMainThread' is imported with type Weak# ThreadId -> IO (),
-- but this is an unsound type for it: it grabs the /key/ of the
-- 'Weak#' object, which isn't tracked by the type at all.
-- That this works at all is a consequence of the fact that
-- 'mkWeakThreadId' produces a 'Weak#' with a 'ThreadId#' as the key
-- This is fairly robust, in that 'mkWeakThreadId' wouldn't work
-- otherwise, but it still is sufficiently non-trivial to justify an
-- ASSERT in rts/TopHandler.c.
-- see Note [rts_setMainThread must be called unsafely] and
-- Note [rts_setMainThread has an unsound type]
foreign import ccall unsafe "rts_setMainThread"
setMainThread :: Weak# ThreadId -> IO ()
-- | 'runMainIO' is wrapped around 'Main.main' (or whatever main is
-- called in the program). It catches otherwise uncaught exceptions,
-- and also flushes stdout\/stderr before exiting.
runMainIO :: IO a -> IO a
runMainIO main =
do
main_thread_id <- myThreadId
weak_tid <- mkWeakThreadId main_thread_id
case weak_tid of (Weak w) -> setMainThread w
install_interrupt_handler $ do
m <- deRefWeak weak_tid
case m of
Nothing -> return ()
Just tid -> throwTo tid (toException UserInterrupt)
main -- hs_exit() will flush
`catch`
topHandler
install_interrupt_handler :: IO () -> IO ()
#if defined(mingw32_HOST_OS)
install_interrupt_handler handler = do
_ <- GHC.ConsoleHandler.installHandler $
Catch $ \event ->
case event of
ControlC -> handler
Break -> handler
Close -> handler
_ -> return ()
return ()
#else
#include "rts/Signals.h"
-- specialised version of System.Posix.Signals.installHandler, which
-- isn't available here.
install_interrupt_handler handler = do
let sig = CONST_SIGINT :: CInt
_ <- setHandler sig (Just (const handler, toDyn handler))
_ <- stg_sig_install sig STG_SIG_RST nullPtr
-- STG_SIG_RST: the second ^C kills us for real, just in case the
-- RTS or program is unresponsive.
return ()
foreign import ccall unsafe
stg_sig_install
:: CInt -- sig no.
-> CInt -- action code (STG_SIG_HAN etc.)
-> Ptr () -- (in, out) blocked
-> IO CInt -- (ret) old action code
#endif
-- | 'runIO' is wrapped around every @foreign export@ and @foreign
-- import \"wrapper\"@ to mop up any uncaught exceptions. Thus, the
-- result of running 'System.Exit.exitWith' in a foreign-exported
-- function is the same as in the main thread: it terminates the
-- program.
--
runIO :: IO a -> IO a
runIO main = catch main topHandler
-- | Like 'runIO', but in the event of an exception that causes an exit,
-- we don't shut down the system cleanly, we just exit. This is
-- useful in some cases, because the safe exit version will give other
-- threads a chance to clean up first, which might shut down the
-- system in a different way. For example, try
--
-- main = forkIO (runIO (exitWith (ExitFailure 1))) >> threadDelay 10000
--
-- This will sometimes exit with "interrupted" and code 0, because the
-- main thread is given a chance to shut down when the child thread calls
-- safeExit. There is a race to shut down between the main and child threads.
--
runIOFastExit :: IO a -> IO a
runIOFastExit main = catch main topHandlerFastExit
-- NB. this is used by the testsuite driver
-- | The same as 'runIO', but for non-IO computations. Used for
-- wrapping @foreign export@ and @foreign import \"wrapper\"@ when these
-- are used to export Haskell functions with non-IO types.
--
runNonIO :: a -> IO a
runNonIO a = catch (a `seq` return a) topHandler
topHandler :: SomeException -> IO a
topHandler err = catch (real_handler safeExit err) topHandler
topHandlerFastExit :: SomeException -> IO a
topHandlerFastExit err =
catchException (real_handler fastExit err) topHandlerFastExit
-- Make sure we handle errors while reporting the error!
-- (e.g. evaluating the string passed to 'error' might generate
-- another error, etc.)
--
real_handler :: (Int -> IO a) -> SomeException -> IO a
real_handler exit se = do
flushStdHandles -- before any error output
case fromException se of
Just StackOverflow -> do
reportStackOverflow
exit 2
Just UserInterrupt -> exitInterrupted
Just HeapOverflow -> do
reportHeapOverflow
exit 251
_ -> case fromException se of
-- only the main thread gets ExitException exceptions
Just ExitSuccess -> exit 0
Just (ExitFailure n) -> exit n
-- EPIPE errors received for stdout are ignored (#2699)
_ -> catch (case fromException se of
Just IOError{ ioe_type = ResourceVanished,
ioe_errno = Just ioe,
ioe_handle = Just hdl }
| Errno ioe == ePIPE, hdl == stdout -> exit 0
_ -> do reportError se
exit 1
) (disasterHandler exit) -- See Note [Disaster with iconv]
-- don't use errorBelch() directly, because we cannot call varargs functions
-- using the FFI.
foreign import ccall unsafe "HsBase.h errorBelch2"
errorBelch :: CString -> CString -> IO ()
disasterHandler :: (Int -> IO a) -> IOError -> IO a
disasterHandler exit _ =
withCAString "%s" $ \fmt ->
withCAString msgStr $ \msg ->
errorBelch fmt msg >> exit 1
where
msgStr =
"encountered an exception while trying to report an exception.\n" ++
"One possible reason for this is that we failed while trying to " ++
"encode an error message. Check that your locale is configured " ++
"properly."
{- Note [Disaster with iconv]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When using iconv, it's possible for things like iconv_open to fail in
restricted environments (like an initram or restricted container), but
when this happens the error raised inevitably calls `peekCString`,
which depends on the users locale, which depends on using
`iconv_open`... which causes an infinite loop.
This occurrence is also known as tickets #10298 and #7695. So to work
around it we just set _another_ error handler and bail directly by
calling the RTS, without iconv at all.
-}
-- try to flush stdout/stderr, but don't worry if we fail
-- (these handles might have errors, and we don't want to go into
-- an infinite loop).
flushStdHandles :: IO ()
flushStdHandles = do
hFlush stdout `catchAny` \_ -> return ()
hFlush stderr `catchAny` \_ -> return ()
safeExit, fastExit :: Int -> IO a
safeExit = exitHelper useSafeExit
fastExit = exitHelper useFastExit
unreachable :: IO a
unreachable = failIO "If you can read this, shutdownHaskellAndExit did not exit."
exitHelper :: CInt -> Int -> IO a
#if defined(mingw32_HOST_OS)
exitHelper exitKind r =
shutdownHaskellAndExit (fromIntegral r) exitKind >> unreachable
#else
-- On Unix we use an encoding for the ExitCode:
-- 0 -- 255 normal exit code
-- -127 -- -1 exit by signal
-- For any invalid encoding we just use a replacement (0xff).
exitHelper exitKind r
| r >= 0 && r <= 255
= shutdownHaskellAndExit (fromIntegral r) exitKind >> unreachable
| r >= -127 && r <= -1
= shutdownHaskellAndSignal (fromIntegral (-r)) exitKind >> unreachable
| otherwise
= shutdownHaskellAndExit 0xff exitKind >> unreachable
foreign import ccall "shutdownHaskellAndSignal"
shutdownHaskellAndSignal :: CInt -> CInt -> IO ()
#endif
exitInterrupted :: IO a
exitInterrupted =
#if defined(mingw32_HOST_OS)
safeExit 252
#else
-- we must exit via the default action for SIGINT, so that the
-- parent of this process can take appropriate action (see #2301)
safeExit (-CONST_SIGINT)
#endif
-- NOTE: shutdownHaskellAndExit must be called "safe", because it *can*
-- re-enter Haskell land through finalizers.
foreign import ccall "Rts.h shutdownHaskellAndExit"
shutdownHaskellAndExit :: CInt -> CInt -> IO ()
useFastExit, useSafeExit :: CInt
useFastExit = 1
useSafeExit = 0
| sdiehl/ghc | libraries/base/GHC/TopHandler.hs | bsd-3-clause | 9,882 | 0 | 22 | 2,251 | 1,166 | 626 | 540 | 125 | 7 |
module Graphics.Gnuplot.Private.Graph3D where
import qualified Graphics.Gnuplot.Private.FrameOptionSet as OptionSet
import qualified Graphics.Gnuplot.Private.FrameOption as Option
import qualified Graphics.Gnuplot.Private.LineSpecification as LineSpec
import qualified Graphics.Gnuplot.Private.Graph3DType as GraphType
import qualified Graphics.Gnuplot.Private.Graph as Graph
import qualified Graphics.Gnuplot.Value.Atom as Atom
import qualified Data.Map as Map
import Graphics.Gnuplot.Private.Graph2D (Columns, columnToString, )
import Prelude hiding (lines, )
data T x y z =
Cons {
column_ :: Columns,
type_ :: Type,
lineSpec_ :: LineSpec.T
}
type Type = String
toString :: T x y z -> String
toString (Cons c t l) =
"using " ++ columnToString c ++
" with " ++ t ++
" " ++ LineSpec.toString l
type AxisOption x y z a =
OptionSet.T (T x y z) -> Atom.OptionSet a
defltOptions :: (Atom.C x, Atom.C y, Atom.C z) => OptionSet.T (T x y z)
defltOptions =
let mk ::
Option.T -> Option.T ->
Atom.OptionSet a ->
[(Option.T, [String])]
mk optData optFormat opts =
(optData, Atom.optData opts) :
(optFormat, Atom.optFormat opts) :
Atom.optOthers opts
result ::
Atom.OptionSet x ->
Atom.OptionSet y ->
Atom.OptionSet z ->
OptionSet.T (T x y z)
result optX optY optZ =
OptionSet.Cons $
flip Map.union OptionSet.deflt $
Map.fromList $
mk Option.xData Option.xFormat optX ++
mk Option.yData Option.yFormat optY ++
mk Option.yData Option.yFormat optZ ++
[]
in result Atom.options Atom.options Atom.options
instance (Atom.C x, Atom.C y, Atom.C z) => Graph.C (T x y z) where
command _ = "splot"
toString = toString
defltOptions = defltOptions
pm3d :: T x y z
pm3d = Cons (1:2:3:[]) "pm3d" LineSpec.deflt
deflt :: GraphType.T x y z a -> Columns -> T x y z
deflt t c = Cons c (GraphType.toString t) LineSpec.deflt
typ :: Type -> T x y z -> T x y z
typ t gr = gr{type_ = t}
{-
for 3D plots not all line attributes are supported like:
pointsize
pointtype
pm3d and impulses allow:
linestyle
linewidth
linecolor
linetype
title
FIXME:
Do we need a separate LineSpec3D type or a type parameter for LineSpec?
-}
lineSpec :: LineSpec.T -> T x y z -> T x y z
lineSpec ls gr = gr{lineSpec_ = ls}
| wavewave/gnuplot | src/Graphics/Gnuplot/Private/Graph3D.hs | bsd-3-clause | 2,456 | 0 | 16 | 635 | 784 | 427 | 357 | 59 | 1 |
module Test.Hspec.Formatters (module Test.Hspec.Core.Formatters) where
import Test.Hspec.Core.Formatters
| hspec/hspec | src/Test/Hspec/Formatters.hs | mit | 115 | 0 | 5 | 17 | 24 | 17 | 7 | 2 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
-- | A sourcemap maps a package name to how it should be built,
-- including source code, flags, options, etc. This module contains
-- various stages of source map construction. See the
-- @build_overview.md@ doc for details on these stages.
module Stack.Types.SourceMap
( -- * Different source map types
SMWanted (..)
, SMActual (..)
, Target (..)
, PackageType (..)
, SMTargets (..)
, SourceMap (..)
-- * Helper types
, FromSnapshot (..)
, DepPackage (..)
, ProjectPackage (..)
, CommonPackage (..)
, GlobalPackageVersion (..)
, GlobalPackage (..)
, isReplacedGlobal
, SourceMapHash (..)
, smRelDir
) where
import qualified Data.Text as T
import qualified Pantry.SHA256 as SHA256
import Path
import Stack.Prelude
import Stack.Types.Compiler
import Stack.Types.NamedComponent
import Distribution.PackageDescription (GenericPackageDescription)
-- | Common settings for both dependency and project package.
data CommonPackage = CommonPackage
{ cpGPD :: !(IO GenericPackageDescription)
, cpName :: !PackageName
, cpFlags :: !(Map FlagName Bool)
-- ^ overrides default flags
, cpGhcOptions :: ![Text] -- also lets us know if we're doing profiling
, cpCabalConfigOpts :: ![Text]
, cpHaddocks :: !Bool
}
-- | Flag showing if package comes from a snapshot
-- needed to ignore dependency bounds between such packages
data FromSnapshot
= FromSnapshot
| NotFromSnapshot
deriving (Show)
-- | A view of a dependency package, specified in stack.yaml
data DepPackage = DepPackage
{ dpCommon :: !CommonPackage
, dpLocation :: !PackageLocation
, dpHidden :: !Bool
-- ^ Should the package be hidden after registering?
-- Affects the script interpreter's module name import parser.
, dpFromSnapshot :: !FromSnapshot
-- ^ Needed to ignore bounds between snapshot packages
-- See https://github.com/commercialhaskell/stackage/issues/3185
}
-- | A view of a project package needed for resolving components
data ProjectPackage = ProjectPackage
{ ppCommon :: !CommonPackage
, ppCabalFP :: !(Path Abs File)
, ppResolvedDir :: !(ResolvedPath Dir)
}
-- | A view of a package installed in the global package database also
-- could include marker for a replaced global package (could be replaced
-- because of a replaced dependency)
data GlobalPackage
= GlobalPackage !Version
| ReplacedGlobalPackage ![PackageName]
deriving Eq
isReplacedGlobal :: GlobalPackage -> Bool
isReplacedGlobal (ReplacedGlobalPackage _) = True
isReplacedGlobal (GlobalPackage _) = False
-- | A source map with information on the wanted (but not actual)
-- compiler. This is derived by parsing the @stack.yaml@ file for
-- @packages@, @extra-deps@, their configuration (e.g., flags and
-- options), and parsing the snapshot it refers to. It does not
-- include global packages or any information from the command line.
--
-- Invariant: a @PackageName@ appears in either 'smwProject' or
-- 'smwDeps', but not both.
data SMWanted = SMWanted
{ smwCompiler :: !WantedCompiler
, smwProject :: !(Map PackageName ProjectPackage)
, smwDeps :: !(Map PackageName DepPackage)
, smwSnapshotLocation :: !RawSnapshotLocation
-- ^ Where this snapshot is loaded from.
}
-- | Adds in actual compiler information to 'SMWanted', in particular
-- the contents of the global package database.
--
-- Invariant: a @PackageName@ appears in only one of the @Map@s.
data SMActual global = SMActual
{ smaCompiler :: !ActualCompiler
, smaProject :: !(Map PackageName ProjectPackage)
, smaDeps :: !(Map PackageName DepPackage)
, smaGlobal :: !(Map PackageName global)
}
newtype GlobalPackageVersion = GlobalPackageVersion Version
-- | How a package is intended to be built
data Target
= TargetAll !PackageType
-- ^ Build all of the default components.
| TargetComps !(Set NamedComponent)
-- ^ Only build specific components
data PackageType = PTProject | PTDependency
deriving (Eq, Show)
-- | Builds on an 'SMActual' by resolving the targets specified on the
-- command line, potentially adding in new dependency packages in the
-- process.
data SMTargets = SMTargets
{ smtTargets :: !(Map PackageName Target)
, smtDeps :: !(Map PackageName DepPackage)
}
-- | The final source map, taking an 'SMTargets' and applying all
-- command line flags and GHC options.
data SourceMap = SourceMap
{ smTargets :: !SMTargets
-- ^ Doesn't need to be included in the hash, does not affect the
-- source map.
, smCompiler :: !ActualCompiler
-- ^ Need to hash the compiler version _and_ its installation
-- path. Ideally there would be some kind of output from GHC
-- telling us some unique ID for the compiler itself.
, smProject :: !(Map PackageName ProjectPackage)
-- ^ Doesn't need to be included in hash, doesn't affect any of
-- the packages that get stored in the snapshot database.
, smDeps :: !(Map PackageName DepPackage)
-- ^ Need to hash all of the immutable dependencies, can ignore
-- the mutable dependencies.
, smGlobal :: !(Map PackageName GlobalPackage)
-- ^ Doesn't actually need to be hashed, implicitly captured by
-- smCompiler. Can be broken if someone installs new global
-- packages. We can document that as not supported, _or_ we could
-- actually include all of this in the hash and make Stack more
-- resilient.
}
-- | A unique hash for the immutable portions of a 'SourceMap'.
newtype SourceMapHash = SourceMapHash SHA256
-- | Returns relative directory name with source map's hash
smRelDir :: (MonadThrow m) => SourceMapHash -> m (Path Rel Dir)
smRelDir (SourceMapHash smh) = parseRelDir $ T.unpack $ SHA256.toHexText smh
| juhp/stack | src/Stack/Types/SourceMap.hs | bsd-3-clause | 5,716 | 0 | 11 | 1,070 | 777 | 473 | 304 | 144 | 1 |
module GA4 where
failBoundPrecon :: IO [String]
failBoundPrecon = do
fileName <- getLine
file <- readFile fileName
return $ lines file
failOrderPrecon :: IO (Int,Int)
failOrderPrecon = do
b <- getB
a <- getA
return (a,b)
getA = return 1
getB = return 2
| RefactoringTools/HaRe | test/testdata/GenApplicative/GA4.hs | bsd-3-clause | 270 | 0 | 8 | 61 | 108 | 54 | 54 | 13 | 1 |
module RefacDupTrans where
import System.IO.Unsafe
import PosSyntax hiding (ModuleName, HsName, SN)
import SourceNames
import ScopeModule
import UniqueNames hiding (srcLoc)
import HsName
import HsLexerPass1
import PNT
import TiPNT
import SimpleGraphs(reverseGraph,reachable)
import HsTokens
import PrettyPrint
import RefacTypeSyn
import RefacLocUtils
import Data.Char
import GHC.Unicode
import AbstractIO
import Data.Maybe
import Data.List
import Data.Function
import RefacUtils
import LocalSettings (classTransformPath,answerFilePath)
import DuplicateCode (foldDo)
type NameToCall = String
type NameToReplace = String
type Module = String
type FileName = String
refacDupTrans args
= do
AbstractIO.putStrLn "refacDupTrans"
{- let fileName = ghead "fileName'" args
beginRow = read (args!!1)::Int
beginCol = read (args!!2)::Int
endRow = read (args!!3)::Int
endCol = read (args!!4)::Int
-- collect the answers...
(inscps, exps, mod, tokList)<-parseSourceFileOld fileName
let subExp = locToExp (beginRow, beginCol) (endRow, endCol) tokList mod
expression <- AbstractIO.readFile transFilePath
let expressions = (read expression)::([ [(HsExpP, String)] ])
let clonedExps = concat (filter (subExp `myElem`) expressions)
groupedClones = groupClones clonedExps -}
cloneCall <- AbstractIO.readFile classTransformPath
answers <- AbstractIO.readFile answerFilePath
let clonedExps = pruneCloneClass (read cloneCall::[(HsExpP, String, String)]) (filter (isAlpha) answers)
groupedClones = groupClones clonedExps
if clonedExps == []
then error "Please use introduce a new definition instead; selected expression is not a member of a clone class!"
else do
-- make sure all the files we need to modify
-- are added to the project. Otherwise we run into "module not found"
-- issues...
let names = nub $ concatMap (map snd) groupedClones
mods <- (mapM parseSourceFile names)
destMods <- mapM fileNameToModName names
-- addFile names
res <- createParameters (makeFullASTList mods) 1 (createAbs (map fst clonedExps))
let pns = nub $ concatMap definedPNs [(fromJust res)]
results <- callFindSafeModules destMods (myZip mods names groupedClones) pns res
writeRefactoredFiles False results
AbstractIO.putStrLn "Clone Transformation Completed.\n"
makeFullASTList gf= [mod | (inscps, exps, mod, tokList) <- gf]
pruneCloneClass :: [ (HsExpP, String, String) ] -> String -> [ (HsExpP, String) ]
pruneCloneClass ((x,y,z):xs) ('y':ys) = (x,y) : pruneCloneClass xs ys
pruneCloneClass (x:xs) ('n':ys) = pruneCloneClass xs ys
pruneCloneClass _ _ = []
retainLocs [] _ = []
retainLocs _ [] = []
retainLocs ((i,e,_,_):ms) (((_,_),(t,m)):ts)
= (i,e,m,t) : retainLocs ms ts
myZip :: [(a,b,c,d)] -> [e] -> [f] -> [(a,b,c,d,e,f)]
myZip ((a,b,c,d):as) (e:bs) (f:fs) = (a,b,c,d,e,f) : myZip as bs fs
myZip _ _ _ = []
-- find safe Module
-- this function checks to see where there is a safe place
-- to put the abstraction.
--
-- given the list of the files that we are transforming,
-- we introduce a cyclic inclusion if the module
-- we want to introduce the abstraction already imports
-- another module.... Therefore the module cannot import
-- any of the modules from our transformation set.
-- findSafeModule :: Term t => [ ModuleName ] -> t -> [Bool]
findSafeModule destMods mod
= not $ or $ map (flip elem mod) (map fst destMods)
findSafeModules destmods [] _ = -- basically we can't do imports, everywhere needs to define abstraction!
return Nothing
findSafeModules destmods ((inscps, exps, mod, tokList, fileName, clonedExps@(c:cs)):mods) res
= do
modName <- fileNameToModName fileName
modules <- serverModsAndFiles modName
if findSafeModule modules destmods
then
do AbstractIO.putStrLn $ show fileName
-- add the abstraction in this module...
-- we also need to make sure all other modules import this module...
((f,m), (newToks, newMod)) <- applyRefac (addAbstraction res (map fst clonedExps))
(Just (inscps, exps, mod, tokList)) fileName
return (Just (((f,m), (newToks, newMod)), mod, fileName))
else do rest <- findSafeModules destmods mods res
return rest
callFindSafeModules destMods mods pns res
= do
-- mod is the module that everything has to import...
result <- findSafeModules destMods mods res
if result == Nothing
then error "All will introduce cyclic inclusion!"
else do let (transformation, m, fName) = fromJust result
fFile <- fileNameToModName fName
transformation' <- addImports mods res m pns fName fFile
-- we also need to transform the module we are left with
-- i.e. the module where we have added the abstraction...
return (transformation:transformation')
addImports [] _ _ _ _ _ = return []
addImports (t@(inscps, exps, mod, tokList, fileName, clonedExps@(c:cs)):ms) res m pns fFile f
| mod == m = do
rest <- addImports ms res m pns fFile f
return rest
| otherwise = do modified <- addImports' t res pns fFile f
rest <- addImports ms res m pns fFile f
return (modified: rest)
addImports' (inscps, exps2, mod, tokList, fileName, clonedExps@(c:cs)) res pns fFile f
= do
((f,m), (newToks, newMod)) <- applyRefac (addImport'' res pns fFile f (map fst clonedExps)) (Just (inscps, exps2, mod, tokList)) fileName
return ((f,m), (newToks, newMod))
addImport'' res pns fFile f exps (_,_,t)
= do
replacedT <- replaceOccurrences t res exps
t' <- addImport fFile f pns replacedT
return t'
-- transformClones takes a list of [(HsExpP, String)]
-- parses the module of String, and performs a
-- "addAbstraction" over that module and writes the
-- refactoredFiles.
--
-- transformClones should be called from a mapM_ to allow
-- the monadic effect to be preserved.
transformClones destMods res f clonedExps@(c:cs)
= do
let fileName = snd c
currentMod <- fileNameToModName fileName
origFile <- fileNameToModName f
let pns = nub $ concatMap definedPNs [(fromJust res)]
(inscps, exps, mod, tokList)<-parseSourceFileOld fileName
((f2,m), (newToks, newMod)) <- applyRefac (extractExpression res pns fileName origFile currentMod (map fst clonedExps))
(Just (inscps, exps, mod, tokList)) fileName
return ((f2,m), (newToks, newMod))
-- group the expressions by their defining module.
-- groupBy :: (a -> a -> Bool) -> [a] -> [[a]]
groupClones :: [ (HsExpP, String) ] -> [ [(HsExpP, String)] ]
groupClones clones
= groupBy checkFile clones
where
checkFile :: Eq b => (a,b) -> (a,b) -> Bool
checkFile (a,b) (c,d) = b == d
myElem :: HsExpP -> [(HsExpP, String)] -> Bool
myElem _ [] = False
myElem e ((x,y):xs)
| toRelativeLocs e == toRelativeLocs x = True
| otherwise = myElem e xs
extractExpression decs pns fFile f fileName exps (_,_,t)
| f /= fileName
= do
-- find the expressions in t that are associated
-- with exps. Replace these expressions with a call
-- to the abstactions.
replacedT <- replaceOccurrences t decs exps
return replacedT
| otherwise = do replacedT <- replaceOccurrences t decs exps
return replacedT
--add a definition name to the import. If the module is not imported yet, then create a new import decl.
-- addImport::String->HsName.ModuleName->[PName]->HsModuleP->HsModuleP
addImport destFileName destModName pns mod@(HsModule _ _ _ imp _)
=if itemIsImportedByDefault destModName mod -- Is the definition name explicitly imported?
then return mod -- Yes. Do nothing.
else if itemsAreExplicitlyImported destModName mod --Is the module imported and some of its items are explicitly imported?
then addVarItemInImport1 destModName pns mod -- Yes. Then add the definition name to the list.
else addImportDecl mod (modNameToStr destModName) False Nothing False (map pNtoName pns)
--addImportDecl mod (mkImportDecl destFileName destModName False (map pNtoName pns)) --Create a new import decl.
where
{- Compose a import declaration which imports the module specified by 'modName',
and only imports the definition spcified by 'e'.
-}
itemsAreExplicitlyImported serverModName (HsModule _ _ _ imps _)
= any (isExplicitlyImported serverModName) imps
where
isExplicitlyImported serverModName ((HsImportDecl _ (SN modName _) _ _ h)::HsImportDeclP)
= serverModName == modName && isJust h && not (fst (fromJust h))
-- are items defined in the serverModName imported by the current module by default?
itemIsImportedByDefault serverModName (HsModule _ _ _ imps _)
= any (isImportedByDefault' serverModName) imps
where
isImportedByDefault' serverModName ((HsImportDecl _ (SN modName _) _ _ h)::HsImportDeclP)
= serverModName == modName && ( isNothing h || (isJust h && fst(fromJust h)))
addVarItemInImport1 serverModName pns mod
= applyTP ((once_tdTP (failTP `adhocTP` inImport)) `choiceTP` idTP) mod
where
inImport (imp@(HsImportDecl loc@(SrcLoc fileName _ row col) (SN modName l) qual as (Just (b,ents))):: HsImportDeclP)
| serverModName == modName && not b
=
addItemsToImport serverModName Nothing (Left (map pNtoName pns)) imp
inImport x = mzero
addAbstraction decs exps (_,_,t)
= do
replacedT <- replaceOccurrences t decs exps
t' <- addDecl replacedT Nothing (maybeToList decs, Nothing) True
t'' <- addItemsToExport t' Nothing False (Left (map declToName (maybeToList decs)))
return t''
-- replaceOccurrences :: (Term t, MonadPlus m, Monad m) => t -> [ HsDeclP ] -> [ [HsExpP] ] -> m t
-- replaceOccurrences t [] _ = return t
replaceOccurrences t _ [] = return t
replaceOccurrences t dec exps
= do
res <- repOcc dec exps t
-- rest <- replaceOccurrences res dec expss
return res
repOcc Nothing _ t = return t
repOcc (Just dec) es t
= do
-- get the expression on the RHS of Abstraction,
-- and abstraction name for the call.
let (name, rhs) = getNameAndRHS dec
newT <- repOcc' name rhs es t
return newT
where
getNameAndRHS (Dec (HsFunBind _ [HsMatch _ name _ (HsBody e) _]))
= (name, e)
repOcc' _ _ [] t = return t
repOcc' name rhs (x:xs) t
= do
res <- applyTP (once_tdTP (failTP `adhocTP` inExp)) t
-- error $ show x
rest <- repOcc' name rhs xs res
return rest
where
inExp e@(Exp (HsDo e1))
= do
let new = foldAgainstAbs [] x rhs
-- we need to actually find the bit to update...
las <- (getStmtList2 rhs)
-- error $ show las
if las == []
then mzero
else do
let (p, newStmts) = foldStmt' (head las) [] (createFunc name (rmAllLocs new)) e1 x
if p
then
do
-- error $ show (e1, rhs)
-- new' = (render.ppi) new
-- n' <- update e (Exp (HsDo (HsLast (createFunc name new)))) e
-- n' <- RefacUtils.delete e e
lift $ AbstractIO.putStrLn $ show new
n'' <- update e (Exp (HsDo newStmts)) e
return n''
else mzero
inExp e@(Exp (HsParen e1))
| sameOccurrence e x
= do
let new = foldAgainstAbs [] e rhs
new' = (render.ppi) new
e' <- update e (Exp (HsParen (createFunc name new))) e
return e'
-- | otherwise = mzero
inExp (e::HsExpP)
| sameOccurrence e x
= do
let new = foldAgainstAbs [] e rhs
new' = (render.ppi) new
e' <- update e (createFunc name new) e
return e'
inExp e =
mzero
getStmtList2 (Exp (HsDo s))
= return [last $ getStmtList s]
getStmtList2 _ = return []
foldStmt' :: HsStmtAtomP -> [HsPatP] -> HsExpP -> HsStmtP -> HsExpP -> (Bool, HsStmtP)
foldStmt' ss p e s (Exp (HsDo s1)) = foldStmt ss p e s s1
foldStmt' ss p e s1 _ = (False, s1)
foldStmt :: HsStmtAtomP -> [HsPatP] -> HsExpP -> HsStmtP -> HsStmtP -> (Bool, HsStmtP)
foldStmt ss p e s1@(HsGenerator _ p1 e1 s0) s2@(HsGenerator _ p2 e2 s3)
| p1 == p2 && sameOccurrence e1 e2 = foldStmt ss (p++[p1]) e s0 s3
| otherwise = (False, s1)
foldStmt (HsLastAtom ee) p e s1@(HsQualifier e1 s3) (HsLast e2)
| isReturn ee && e2 == defaultExp = (True, newStmt)
| sameOccurrence e1 e2 = (True, s1)
where
newStmt = (HsGenerator loc0 (Pat (HsPTuple loc0 p)) e s3)
foldStmt ee p e s1@(HsQualifier e1 s0) s2@(HsQualifier e2 s3)
| sameOccurrence e1 e2 = foldStmt ee p e s0 s3
| otherwise = (False, s1)
foldStmt (HsLastAtom ee) p e s1@(HsLast e1) s2@(HsLast e2)
| isReturn ee && e2 == defaultExp = (True, newStmt)
| e2 == defaultExp = (True, s1)
| sameOccurrence e1 e2 && isReturn ee = (True, newStmt)
| sameOccurrence e1 e2 = (True, s1)
where
newStmt = (HsGenerator loc0 (Pat (HsPTuple loc0 p)) e s1)
foldStmt ee p e s1 s2 = (False, s1)
isReturn e = (render.ppi) (head (flatternApp e)) == "return"
flatternApp :: HsExpP -> [HsExpP]
flatternApp (Exp (HsApp e1 e2)) = flatternApp e1 ++ flatternApp e2
flatternApp (Exp (HsParen e)) = flatternApp e
flatternApp x = [x]
grabPNT :: PNT -> [PNT] -> PNT
grabPNT x [] = x
grabPNT x (y:ys)
| defineLoc x == defineLoc y = y
| otherwise = grabPNT x ys
checkPNTInPat :: [HsPatP] -> PNT -> Bool
checkPNTInPat [] _ = False
checkPNTInPat (p:ps) i
| defineLoc i == (SrcLoc "__unknown__" 0 0 0) = False
| defineLoc i == defineLoc (patToPNT p) = True
| otherwise = checkPNTInPat ps i
foldAgainstAbs :: [HsPatP] -> HsExpP -> HsExpP -> [ HsExpP ]
foldAgainstAbs _ e1 e2
| e1 == defaultExp || e2 == defaultExp = []
foldAgainstAbs pats e@(Exp (HsId (HsVar x))) (Exp (HsId (HsVar y)))
| x == y && isTopLevelPNT x = []
| x == y && isTopLevelPNT x = []
| checkPNTInPat pats x = []
| otherwise = [e]
foldAgainstAbs pats e@(Exp (HsId (HsCon x))) (Exp (HsId (HsCon y)))
| x == y = []
| otherwise = [e]
foldAgainstAbs pats e@(Exp (HsLit s l1)) (Exp (HsLit s2 l2))
| l1 == l2 = []
| otherwise = [(Exp (HsLit loc0 l1))]
foldAgainstAbs pats e@(Exp (HsInfixApp e1 o1 e2)) (Exp (HsInfixApp e3 o2 e4))
= (e1' ++ o1' ++ e2')
where
e1' = foldAgainstAbs pats e1 e3
o1'
| o1 == o2 = []
| otherwise = [Exp (HsId o1)]
e2' = foldAgainstAbs pats e2 e4
foldAgainstAbs pats e@(Exp (HsApp e1 e2)) (Exp (HsApp e3 e4))
= (foldAgainstAbs pats e1 e3) ++ (foldAgainstAbs pats e2 e4)
foldAgainstAbs pats e@(Exp (HsNegApp s1 e1)) (Exp (HsNegApp s2 e2))
= (foldAgainstAbs pats e1 e2)
foldAgainstAbs pats1 e@(Exp (HsLambda pats e1)) (Exp (HsLambda pats2 e2))
= []
foldAgainstAbs pats e@(Exp (HsLet decs e1)) (Exp (HsLet decs2 e2))
= (foldAgainstAbs pats e1 e2)
foldAgainstAbs pats e@(Exp (HsIf e1 e2 e3)) (Exp (HsIf e4 e5 e6))
= (e1' ++ e2' ++ e3')
where
e1' = foldAgainstAbs pats e1 e4
e2' = foldAgainstAbs pats e2 e5
e3' = foldAgainstAbs pats e3 e6
foldAgainstAbs pats e@(Exp (HsCase e1 alts1)) (Exp (HsCase e2 alts2))
= []
foldAgainstAbs pats e@(Exp (HsParen e1)) (Exp (HsParen e2))
= (foldAgainstAbs pats e1 e2)
foldAgainstAbs pats (Exp (HsParen e1)) e2
= foldAgainstAbs pats e1 e2
foldAgainstAbs pats e1 (Exp (HsParen e2))
= foldAgainstAbs pats e1 e2
foldAgainstAbs pats (Exp (HsTuple es1)) (Exp (HsTuple es2))
= concat (foldAgainstAbs' pats es1 es2)
foldAgainstAbs pats (Exp (HsList es1)) (Exp (HsList es2))
= concat (foldAgainstAbs' pats es1 es2)
foldAgainstAbs pats (Exp (HsLeftSection e1 o1)) (Exp (HsLeftSection e2 o2))
= e1' ++ o1'
where
e1' = foldAgainstAbs pats e1 e2
o1'
| o1 == o2 = []
| otherwise = [Exp (HsId o1)]
foldAgainstAbs pats (Exp (HsRightSection o1 e1)) (Exp (HsRightSection o2 e2))
= o1' ++ e1'
where
e1' = foldAgainstAbs pats e1 e2
o1'
| o1 == o2 = []
| otherwise = [Exp (HsId o1)]
foldAgainstAbs pats (Exp (HsEnumFrom e1)) (Exp (HsEnumFrom e2))
= foldAgainstAbs pats e1 e2
foldAgainstAbs pats (Exp (HsEnumFromTo e1 e2)) (Exp (HsEnumFromTo e3 e4))
= foldAgainstAbs pats e1 e2 ++ foldAgainstAbs pats e3 e4
foldAgainstAbs pats (Exp (HsEnumFromThen e1 e2)) (Exp (HsEnumFromThen e3 e4))
= foldAgainstAbs pats e1 e2 ++ foldAgainstAbs pats e3 e4
foldAgainstAbs pats (Exp (HsEnumFromThenTo e1 e2 e3)) (Exp (HsEnumFromThenTo e4 e5 e6))
= foldAgainstAbs pats e1 e4 ++ foldAgainstAbs pats e2 e5 ++ foldAgainstAbs pats e3 e6
foldAgainstAbs pats (Exp (HsAsPat i1 e1)) (Exp (HsAsPat i2 e2))
= i1' ++ e1'
where
e1' = foldAgainstAbs pats e1 e2
i1'
| i1 == i2 = []
| otherwise = [Exp (HsId (HsVar i1))]
foldAgainstAbs pats (Exp (HsIrrPat e1)) (Exp (HsIrrPat e2))
= foldAgainstAbs pats e1 e2
foldAgainstAbs pats (Exp (HsDo e1)) (Exp (HsDo e2))
= foldAgainstAbsStmts pats e1 e2
foldAgainstAbs pats e1 e2 = [e1]
foldAgainstAbsAlt pats as _ = as
foldAgainstAbs' pats [] [] = []
foldAgainstAbs' p x [] = []
foldAgainstAbs' p [] x = []
foldAgainstAbs' p (e:es) (x:xs) = foldAgainstAbs p e x : foldAgainstAbs' p es xs
foldAgainstAbsStmts pats a@(HsGenerator _ p1 e1 s1) b@(HsGenerator _ p2 e2 s2)
= foldAgainstAbs (pats++[p1]++[p2]) e1 e2 ++ foldAgainstAbsStmts (pats++[p1]++[p2]) s1 s2
foldAgainstAbsStmts pats (HsQualifier e1 s1) (HsQualifier e2 s2)
= foldAgainstAbs pats e1 e2 ++ foldAgainstAbsStmts pats s1 s2
foldAgainstAbsStmts pats (HsLast e1) (HsLast e2)
= foldAgainstAbs pats e1 e2
foldAgainstAbsStmts pats (HsLast e1) (HsQualifier e2 s1)
= foldAgainstAbs pats e1 e2
-- createParameters :: HsExpP -> HsDeclP
-- createParameters mod n [] = return []
createParameters mods n Nothing = return Nothing
createParameters mods n (Just e)
= do
numParams <- countParams e
let nameParams = mkNewNames (length numParams) e []
e' <- renameNormals e nameParams
let newDec = createDec (transformBindings e') nameParams
return (Just newDec)
where
transformBindings (Exp (HsDo stmts))
= (Exp (HsDo (transformBind [] stmts)))
transformBindings e = e
transformBind pats (HsGenerator s p1 e1 stmts)
= (HsGenerator s p1 e1 (transformBind (pats++[p1]) stmts))
transformBind pats (HsQualifier e stmts)
= (HsQualifier e (transformBind pats stmts))
transformBind pats (HsLast e)
| e == defaultExp
= (HsLast (Exp (HsApp (nameToExp "return") (Exp (HsTuple (getPats pats))))))
| pats /= [] = (HsQualifier e (HsLast (Exp (HsApp (nameToExp "return") (Exp (HsTuple (getPats pats)))))))
| otherwise = (HsLast e)
getPats [] = []
getPats (p:ps) = nameToExp (pNTtoName (patToPNT p)) : getPats ps
createDec e' nameParams
= Dec (HsFunBind loc0 [HsMatch loc0 (nameToPNT newName)
(map nameToPat nameParams)
(HsBody e')
[] ])
newName = mkNewName "abs" (map pNTtoName (hsPNTs mods)) n
renameNormals e [] = return e
renameNormals e (x:xs)
= do
e' <- renameANorm e x
res <- renameNormals e' xs
return res
where
renameANorm e x
= applyTP (once_tdTP (failTP `adhocTP` (inPNT x))) e
inPNT x (p::PNT)
| pNTtoName p == "$I" = return (nameToPNT x)
inPNT _ _ = mzero
countParams t
= applyTU (full_tdTU (constTU [] `adhocTU` inPNT)) t
inPNT (p::PNT)
| pNTtoName p == "$I" = return [p]
inPNT x = return []
mkNewNames :: Int -> HsExpP -> [String] -> [String]
mkNewNames 0 e names = names
mkNewNames n e names
= mkNewNames (n-1) e (result : names)
where
result = mkNewName "p" (oldNames ++ names) n
oldNames = map pNTtoName (hsPNTs e)
posToExp :: (Term t) => [PosToken] -> t -> [(SimpPos, SimpPos)] -> [HsExpP]
posToExp _ _ [] = []
posToExp toks mod ((x,y):xs)
= locToExp x y toks mod : posToExp toks mod xs
-- compareExp takes a clone class and tries to
-- figure out which parts can be extracted away or not.
-- we take all the variables out of each expression and compare them
-- the ones that match stay in the abstraction,
createAbs' :: [ HsExpP ] -> [ HsExpP ]
createAbs' [] = []
createAbs' [x] = [compareExp [] x x]
createAbs' (x:y:es)
= int : createAbs' (y:es)
where
int = compareExp [] x y
createAbs'' :: [ HsExpP ] -> Maybe HsExpP
createAbs'' [] = Nothing
createAbs'' [x] = Just x
createAbs'' (x:y:xs)
= createAbs'' ((compareExp [] x y):xs)
createAbs :: [ HsExpP ] -> Maybe HsExpP
createAbs list
= let f = createAbs' list in (createAbs'' f)
compareExp :: [HsPatP] -> HsExpP -> HsExpP -> HsExpP
compareExp pats e@(Exp (HsId (HsVar x))) (Exp (HsId (HsVar y)))
| x == y && isTopLevelPNT x = e
| checkPNTInPat pats x = e
| otherwise = (Exp (HsId (HsVar (nameToPNT "$I"))))
compareExp pats e@(Exp (HsId (HsCon x))) (Exp (HsId (HsCon y)))
| x == y = e
| otherwise = (Exp (HsId (HsVar (nameToPNT "$I"))))
compareExp pats e@(Exp (HsLit s l1)) (Exp (HsLit s2 l2))
| l1 == l2 = e
| otherwise = (Exp (HsId (HsVar (nameToPNT "$I"))))
compareExp pats e@(Exp (HsInfixApp e1 o1 e2)) (Exp (HsInfixApp e3 o2 e4))
= Exp (HsInfixApp e1' o1' e2')
where
e1' = compareExp pats e1 e3
o1'
| o1 == o2 = o1
| otherwise = HsVar (nameToPNT "$I")
e2' = compareExp pats e2 e4
compareExp pats e@(Exp (HsApp e1 e2)) (Exp (HsApp e3 e4))
= Exp (HsApp (compareExp pats e1 e3) (compareExp pats e2 e4))
compareExp pats e@(Exp (HsNegApp s1 e1)) (Exp (HsNegApp s2 e2))
= Exp (HsNegApp s1 (compareExp pats e1 e2))
compareExp pats1 e@(Exp (HsLambda pats e1)) (Exp (HsLambda pats2 e2))
= Exp (HsLambda pats (compareExp pats1 e1 e2))
compareExp pats e@(Exp (HsLet decs e1)) (Exp (HsLet decs2 e2))
= Exp (HsLet decs (compareExp pats e1 e2))
compareExp pats e@(Exp (HsIf e1 e2 e3)) (Exp (HsIf e4 e5 e6))
= Exp (HsIf e1' e2' e3')
where
e1' = compareExp pats e1 e4
e2' = compareExp pats e2 e5
e3' = compareExp pats e3 e6
compareExp pats e@(Exp (HsCase e1 alts1)) (Exp (HsCase e2 alts2))
= Exp (HsCase (compareExp pats e1 e2) (compareAlt pats alts1 alts2))
compareExp pats e@(Exp (HsParen e1)) (Exp (HsParen e2))
= Exp (HsParen (compareExp pats e1 e2))
compareExp pats (Exp (HsParen e1)) e2
= compareExp pats e1 e2
compareExp pats e1 (Exp (HsParen e2))
= compareExp pats e1 e2
compareExp pats (Exp (HsList es1)) (Exp (HsList es2))
= Exp (HsList (compareExp' pats es1 es2))
compareExp pats (Exp (HsTuple es1)) (Exp (HsTuple es2))
= Exp (HsTuple (compareExp' pats es1 es2))
compareExp pats (Exp (HsLeftSection e1 i1)) (Exp (HsLeftSection e2 i2))
= Exp (HsLeftSection (compareExp pats e1 e2) i2')
where
i2'
| i1 == i2 = i1
| otherwise = HsVar (nameToPNT "$I")
compareExp pats (Exp (HsRightSection i1 e1)) (Exp (HsRightSection i2 e2))
= Exp (HsRightSection i1' (compareExp pats e1 e2))
where
i1'
| i1 == i2 = i1
| otherwise = HsVar (nameToPNT "$I" )
compareExp pats (Exp (HsEnumFrom e1)) (Exp (HsEnumFrom e2))
= Exp (HsEnumFrom (compareExp pats e1 e2))
compareExp pats (Exp (HsEnumFromTo e1 e2)) (Exp (HsEnumFromTo e3 e4))
= Exp (HsEnumFromTo (compareExp pats e1 e3) (compareExp pats e2 e4))
compareExp pats (Exp (HsEnumFromThen e1 e2)) (Exp (HsEnumFromThen e3 e4))
= Exp (HsEnumFromThen (compareExp pats e1 e3) (compareExp pats e2 e4))
compareExp pats (Exp (HsEnumFromThenTo e1 e2 e3)) (Exp (HsEnumFromThenTo e4 e5 e6))
= Exp (HsEnumFromThenTo (compareExp pats e1 e4) (compareExp pats e2 e5) (compareExp pats e3 e6))
compareExp pats (Exp (HsAsPat i1 e1)) (Exp (HsAsPat i2 e2))
= Exp (HsAsPat i1' (compareExp pats e1 e2))
where
i1'
| i1 == i2 = i1
| otherwise = (nameToPNT "$I")
compareExp pats (Exp (HsIrrPat e1)) (Exp (HsIrrPat e2))
= Exp (HsIrrPat (compareExp pats e1 e2))
compareExp pats e1@(Exp (HsDo stmts1)) e2@(Exp (HsDo stmts2))
= Exp (HsDo (compareStmts pats stmts1 stmts2))
compareExp pats e1 e2 = (Exp (HsId (HsVar (nameToPNT "$I"))))
compareAlt pats as _ = as
compareStmts pats (HsGenerator s p1 e1 s1) (HsGenerator _ p2 e2 s2)
= HsGenerator s p1 (compareExp (pats++[p1]++[p2]) e1 e2) (compareStmts (pats++[p1]++[p2]) s1 s2)
compareStmts pats (HsQualifier e1 stmts1) (HsQualifier e2 stmts2)
= HsQualifier (compareExp pats e1 e2) (compareStmts pats stmts1 stmts2)
compareStmts pats (HsLast e1) (HsLast e2)
= HsLast (compareExp pats e1 e2)
compareStmts pats s1 s2 = s1
compareExp' p [] [] = []
compareExp' p _ [] = []
compareExp' p [] _ = []
compareExp' p (e:es) (x:xs) = compareExp p e x : compareExp' p es xs
catPositions :: String -> [ String ]
catPositions [] = []
catPositions ('[':ps)
= grabbed : catPositions ps'
where
(grabbed, ps') = (grabPositions ps, dropPositions ps)
grabPositions [] = []
grabPositions (']':xs) = []
grabPositions ('<':'&':'>':ps)
= ":" ++ grabPositions ps
grabPositions (x:xs) = x : grabPositions xs
dropPositions [] = []
dropPositions (']':xs) = xs
dropPositions (x:xs) = dropPositions xs
prunePositions :: String -> [String] -> [ [(SimpPos, SimpPos)] ]
prunePositions [] [] = []
prunePositions [] _ = []
prunePositions _ [] = []
prunePositions as (p:ps)
= createSet as p : prunePositions remAnswers ps
where
remAnswers = drop ((length (filter (==':') p)) + 1) as
createSet :: String -> String -> [ (SimpPos, SimpPos) ]
createSet [] [] = []
createSet [] _ = []
createSet _ [] = []
createSet ('y':xs) ps
= ((read p2)::(SimpPos, SimpPos)) : createSet xs ps'
where
p2 = getPos ps
ps' = dropPos ps
getPos [] = []
getPos (':':xs) = []
getPos (x:xs) = x : getPos xs
dropPos [] = []
dropPos (':':xs) = xs
dropPos (x:xs) = dropPos xs
createSet ('n':xs) ps
= createSet xs ps'
where
ps' = dropPos ps
dropPos [] = []
dropPos (':':xs) = xs
dropPos (x:xs) = dropPos xs
createSet _ _ = []
| SAdams601/HaRe | old/refactorer/RefacDupTrans.hs | bsd-3-clause | 27,405 | 0 | 21 | 7,489 | 10,220 | 5,178 | 5,042 | -1 | -1 |
module Network.XmlRpc.Base64 (
encode,
decode
) where
import Data.ByteString
import qualified Data.ByteString.Base64 as B64
encode :: ByteString -> ByteString
encode = B64.encode
decode :: ByteString -> ByteString
decode = B64.decodeLenient
| laurencer/confluence-sync | vendor/haxr/Network/XmlRpc/Base64.hs | bsd-3-clause | 262 | 0 | 5 | 51 | 61 | 38 | 23 | 9 | 1 |
{-|
Module : Text.Parsec
Copyright : (c) Daan Leijen 1999-2001, (c) Paolo Martini 2007
License : BSD-style (see the LICENSE file)
Maintainer : [email protected]
Stability : provisional
Portability : portable
This module includes everything you need to get started writing a
parser.
By default this module is set up to parse character data. If you'd like
to parse the result of your own tokenizer you should start with the following
imports:
@
import Text.Parsec.Prim
import Text.Parsec.Combinator
@
Then you can implement your own version of 'satisfy' on top of the 'tokenPrim'
primitive.
-}
module Text.Parsec
( -- * Parsers
ParsecT
, Parsec
, token
, tokens
, runParserT
, runParser
, parse
, parseTest
, getPosition
, getInput
, getState
, putState
, modifyState
-- * Combinators
, (<|>)
, (<?>)
, label
, labels
, try
, unexpected
, choice
, many
, many1
, skipMany
, skipMany1
, count
, between
, option
, optionMaybe
, optional
, sepBy
, sepBy1
, endBy
, endBy1
, sepEndBy
, sepEndBy1
, chainl
, chainl1
, chainr
, chainr1
, eof
, notFollowedBy
, manyTill
, lookAhead
, anyToken
-- * Character Parsing
, module Text.Parsec.Char
-- * Error messages
, ParseError
, errorPos
-- * Position
, SourcePos
, SourceName, Line, Column
, sourceName, sourceLine, sourceColumn
, incSourceLine, incSourceColumn
, setSourceLine, setSourceColumn, setSourceName
-- * Low-level operations
, manyAccum
, tokenPrim
, tokenPrimEx
, runPT
, unknownError
, sysUnExpectError
, mergeErrorReply
, getParserState
, setParserState
, updateParserState
, Stream (..)
, runParsecT
, mkPT
, runP
, Consumed (..)
, Reply (..)
, State (..)
, setPosition
, setInput
-- * Other stuff
, setState
, updateState
, parsecMap
, parserReturn
, parserBind
, parserFail
, parserZero
, parserPlus
) where
import Text.Parsec.Pos
import Text.Parsec.Error
import Text.Parsec.Prim
import Text.Parsec.Char
import Text.Parsec.Combinator
| 23Skidoo/parsec | Text/Parsec.hs | bsd-2-clause | 2,279 | 0 | 5 | 688 | 328 | 224 | 104 | 86 | 0 |
module CSV () where
-- | Using LiquidHaskell for CSV lists
-- c.f. http://www.reddit.com/r/scala/comments/1nhzi2/using_shapelesss_sized_type_to_eliminate_real/
data CSV = Csv { headers :: [String]
, rows :: [[String]]
}
{-@ data CSV = Csv { headers :: [String]
, rows :: [{v:[String] | (len v) = (len headers)}]
}
@-}
-- Eeks, we missed the column name.
csvBad1 = Csv ["Date"]
[ ["Mon", "1"]
, ["Tue", "2"]
, ["Wed", "3"]
]
-- Eeks, we missed a column.
csvBad2 = Csv ["Name", "Age"]
[ ["Alice", "32"]
, ["Bob" ]
, ["Cris" , "29"]
]
-- All is well!
csvGood = Csv ["Id", "Name", "Days"]
[ ["1", "Jan", "31"]
, ["2", "Feb", "28"]
, ["3", "Mar", "31"]
, ["4", "Apr", "30"]
]
| abakst/liquidhaskell | tests/neg/csv.hs | bsd-3-clause | 973 | 0 | 10 | 420 | 190 | 123 | 67 | 16 | 1 |
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE ExistentialQuantification, NoImplicitPrelude #-}
module GHC.Event.Internal
(
-- * Event back end
Backend
, backend
, delete
, poll
, modifyFd
-- * Event type
, Event
, evtRead
, evtWrite
, evtClose
, eventIs
-- * Timeout type
, Timeout(..)
-- * Helpers
, throwErrnoIfMinus1NoRetry
) where
import Data.Bits ((.|.), (.&.))
import Data.List (foldl', intercalate)
import Data.Monoid (Monoid(..))
import Foreign.C.Error (eINTR, getErrno, throwErrno)
import System.Posix.Types (Fd)
import GHC.Base
import GHC.Num (Num(..))
import GHC.Show (Show(..))
import GHC.List (filter, null)
-- | An I\/O event.
newtype Event = Event Int
deriving (Eq)
evtNothing :: Event
evtNothing = Event 0
{-# INLINE evtNothing #-}
-- | Data is available to be read.
evtRead :: Event
evtRead = Event 1
{-# INLINE evtRead #-}
-- | The file descriptor is ready to accept a write.
evtWrite :: Event
evtWrite = Event 2
{-# INLINE evtWrite #-}
-- | Another thread closed the file descriptor.
evtClose :: Event
evtClose = Event 4
{-# INLINE evtClose #-}
eventIs :: Event -> Event -> Bool
eventIs (Event a) (Event b) = a .&. b /= 0
instance Show Event where
show e = '[' : (intercalate "," . filter (not . null) $
[evtRead `so` "evtRead",
evtWrite `so` "evtWrite",
evtClose `so` "evtClose"]) ++ "]"
where ev `so` disp | e `eventIs` ev = disp
| otherwise = ""
instance Monoid Event where
mempty = evtNothing
mappend = evtCombine
mconcat = evtConcat
evtCombine :: Event -> Event -> Event
evtCombine (Event a) (Event b) = Event (a .|. b)
{-# INLINE evtCombine #-}
evtConcat :: [Event] -> Event
evtConcat = foldl' evtCombine evtNothing
{-# INLINE evtConcat #-}
-- | A type alias for timeouts, specified in seconds.
data Timeout = Timeout {-# UNPACK #-} !Double
| Forever
deriving (Show)
-- | Event notification backend.
data Backend = forall a. Backend {
_beState :: !a
-- | Poll backend for new events. The provided callback is called
-- once per file descriptor with new events.
, _bePoll :: a -- backend state
-> Timeout -- timeout in milliseconds
-> (Fd -> Event -> IO ()) -- I/O callback
-> IO ()
-- | Register, modify, or unregister interest in the given events
-- on the given file descriptor.
, _beModifyFd :: a
-> Fd -- file descriptor
-> Event -- old events to watch for ('mempty' for new)
-> Event -- new events to watch for ('mempty' to delete)
-> IO ()
, _beDelete :: a -> IO ()
}
backend :: (a -> Timeout -> (Fd -> Event -> IO ()) -> IO ())
-> (a -> Fd -> Event -> Event -> IO ())
-> (a -> IO ())
-> a
-> Backend
backend bPoll bModifyFd bDelete state = Backend state bPoll bModifyFd bDelete
{-# INLINE backend #-}
poll :: Backend -> Timeout -> (Fd -> Event -> IO ()) -> IO ()
poll (Backend bState bPoll _ _) = bPoll bState
{-# INLINE poll #-}
modifyFd :: Backend -> Fd -> Event -> Event -> IO ()
modifyFd (Backend bState _ bModifyFd _) = bModifyFd bState
{-# INLINE modifyFd #-}
delete :: Backend -> IO ()
delete (Backend bState _ _ bDelete) = bDelete bState
{-# INLINE delete #-}
-- | Throw an 'IOError' corresponding to the current value of
-- 'getErrno' if the result value of the 'IO' action is -1 and
-- 'getErrno' is not 'eINTR'. If the result value is -1 and
-- 'getErrno' returns 'eINTR' 0 is returned. Otherwise the result
-- value is returned.
throwErrnoIfMinus1NoRetry :: (Eq a, Num a) => String -> IO a -> IO a
throwErrnoIfMinus1NoRetry loc f = do
res <- f
if res == -1
then do
err <- getErrno
if err == eINTR then return 0 else throwErrno loc
else return res
| beni55/haste-compiler | libraries/ghc-7.8/base/GHC/Event/Internal.hs | bsd-3-clause | 4,024 | 0 | 16 | 1,175 | 1,000 | 557 | 443 | 99 | 3 |
{-# LANGUAGE GADTs #-}
module ShouldCompile where
data T a where
T :: b -> (b->Int) -> a -> T a
f (T b f a) = a
| siddhanathan/ghc | testsuite/tests/gadt/gadt6.hs | bsd-3-clause | 118 | 0 | 9 | 34 | 55 | 31 | 24 | 5 | 1 |
{-# LANGUAGE QuasiQuotes #-}
import HarmLang.Interpreter
import HarmLang.Types
import HarmLang.InitialBasis
import HarmLang.QuasiQuoter
import HarmLang.IO
progression = [hl|[CM C7 F7 C7 G7 F7 G#7]|]
main :: IO ()
main =
do
putStrLn "Welcome to the Blues Buddy!"
putStrLn "Original 12 bar blues in C."
putStrLn . show $ progression
putStrLn "Please enter the key to which you wish to transpose."
newKey <- fmap interpretPitchClass getLine
let newchords = transpose progression (intervalAB [hl|'C'|] newKey)
outputToMidi newchords "blues.mid"
let timedchords = map (\c -> (TimedChord c (Time 8 8))) newchords
let arpeggios = arpeggiate timedchords
outputToMidi arpeggios "arpeggio.mid"
writeMidi [makeTrack arpeggios, makeTrack (transpose timedchords (Interval (-12)))] "jazz.mid"
putStrLn $ "Transposed 12 bar blues, to " ++ (show newKey) ++ " and output to blues.mid"
putStrLn $ "Arpegiatted transposed blues to arpeggio.mid"
putStrLn $ "Together in jazz.mid"
| lrassaby/harmlang | examples/bluesmachine.hs | mit | 1,029 | 0 | 16 | 202 | 258 | 126 | 132 | 24 | 1 |
addOneTo i = i + 1
alwaysEven a b = let isEven x = if even x
then x
else x - 1
in (isEven a, isEven b)
alwaysEven a b = (isEven a, isEven b)
where isEven x = if even x then x else x - 1
| betoesquivel/haskell | tut1.hs | mit | 290 | 0 | 10 | 153 | 111 | 55 | 56 | 7 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Data.Streaming.NetworkSpec where
import Control.Concurrent.Async (withAsync)
import Control.Exception (bracket)
import Control.Monad (forever, replicateM_)
import Data.Array.Unboxed (elems)
import qualified Data.ByteString.Char8 as S8
import Data.Char (toUpper)
import Data.Streaming.Network
import Network.Socket (close)
import Test.Hspec
import Test.Hspec.QuickCheck
spec :: Spec
spec = do
describe "getDefaultReadBufferSize" $ do
it "sanity" $ do
getReadBufferSize (clientSettingsTCP 8080 "localhost") >= 4096 `shouldBe` True
describe "getUnassignedPort" $ do
it "sanity" $ replicateM_ 100000 $ do
port <- getUnassignedPort
(port `elem` elems unassignedPorts) `shouldBe` True
describe "bindRandomPortTCP" $ do
modifyMaxSuccess (const 5) $ prop "sanity" $ \content -> bracket
(bindRandomPortTCP "*4")
(close . snd)
$ \(port, socket) -> do
let server ad = forever $ appRead ad >>= appWrite ad . S8.map toUpper
client ad = do
appWrite ad bs
appRead ad >>= (`shouldBe` S8.map toUpper bs)
bs
| null content = "hello"
| otherwise = S8.pack $ take 1000 content
withAsync (runTCPServer (serverSettingsTCPSocket socket) server) $ \_ -> do
runTCPClient (clientSettingsTCP port "localhost") client
| fpco/streaming-commons | test/Data/Streaming/NetworkSpec.hs | mit | 1,673 | 0 | 24 | 600 | 432 | 221 | 211 | 35 | 1 |
module PatternGuard where
import Prelude hiding (take)
checkNum :: Int -> Bool
checkNum 2 = True
checkNum _ = False
-- Inline data structures, so that this test case
-- works independent of inter-module data flow
data Pairing a b = Pair a b
data List a = Nil | Cons a (List a)
-- Reverse order of arguments to make the termination checker happy
take :: List a -> Int -> List a
take _ n | n <= 0 = Nil
take Nil _ = Nil
take (Cons x xs) n = Cons x (take xs (n-1))
take2 :: List a -> Int -> List a
take2 x n = case Pair n x of
Pair n _ | n <= 0 -> Nil
Pair _ Nil -> Nil
Pair n (Cons x xs) -> Cons x (take2 xs (n-1))
| antalsz/hs-to-coq | examples/base-tests/PatternGuard.hs | mit | 689 | 0 | 12 | 218 | 271 | 138 | 133 | 16 | 3 |
import Data.List.Split
import Control.Monad.State
data Dir = N | E | S | W
deriving Show
data Turn = L | R
deriving (Show, Read)
data Move = Move { dir :: Turn
, step :: Integer
} deriving Show
mkMove :: String -> Move
mkMove [] = undefined
mkMove (x:xs) = Move { dir = read [x], step = read xs }
data Me = Me { heading :: Dir
, pos :: (Integer, Integer)
} deriving Show
moveStep :: Me -> Move -> Me
moveStep m mv = newMe
where newMe = Me { heading = d, pos = p }
d = turn (heading m) (dir mv)
p = move d (pos m) (step mv)
turn N L = W
turn N R = E
turn E L = N
turn E R = S
turn S L = E
turn S R = W
turn W L = S
turn W R = N
move N (x,y) n = (x, y + n)
move E (x,y) n = (x + n, y)
move S (x,y) n = (x, y - n)
move W (x,y) n = (x - n, y)
type Pos = (Integer, Integer)
type GameState = (Me, Pos)
startState :: GameState
startState = (startMe, (0,0))
where startMe = Me { heading = N, pos = (0,0) }
nextState :: [Move] -> State GameState Pos
nextState [] = do
(_, d) <- get
return d
nextState (x:xs) = do
(me, _) <- get
let newMe = moveStep me x
put (newMe, pos newMe)
nextState xs
main :: IO ()
main = do
moves <- map mkMove . splitOn ", " <$> readFile "input"
let lastState = evalState (nextState moves) startState
print $ (\(x,y) -> abs x + abs y) lastState
| wizzup/advent_of_code | 2016/1/part1.hs | mit | 1,476 | 0 | 12 | 511 | 721 | 393 | 328 | 51 | 11 |
module Yesod.Hunt.Routes where
import Control.Applicative
import Hunt.Interpreter.Interpreter
import Yesod
-- | wrapper type for index environment
data HuntS = HuntS { getHunt :: DefHuntEnv }
-- | helper for easy initiation
initHuntS :: IO HuntS
initHuntS = HuntS <$> initHunt
-- | class that has to be implemented for yesod master application
class Yesod master => YesodHunt master where
-- | TemplateHaskell magic: create Types for routes with
-- that small QQ-Dsl then generate Yesod Dispatch
mkYesodSubData "HuntS" [parseRoutes|
/search/#String HSearch GET
/search/#String/#Int/#Int HPagedSearch GET
/completion/#String HCompletion GET
|]
| hunt-framework/yesod-hunt | src/Yesod/Hunt/Routes.hs | mit | 715 | 0 | 8 | 161 | 86 | 51 | 35 | -1 | -1 |
-------------------------------------------------------------------------
--
-- Haskell: The Craft of Functional Programming, 3e
-- Simon Thompson
-- (c) Addison-Wesley, 1996-2011.
--
-- Chapter 7
--
-------------------------------------------------------------------------
module Chapter7 where
-- Defining functions over lists
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- For pedagogical reasons, this chapter repeats many of the definitions in the
-- standard Prelude. They are repeated in this file, and so the original
-- definitions have to be hidden when the Prelude is imported:
import Prelude hiding (Word,id,head,tail,null,sum,concat,(++),zip,take,getLine)
import qualified Prelude
import Chapter5 (digits,isEven)
import Test.QuickCheck
-- Pattern matching revisited
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^
-- An example function using guards ...
mystery :: Integer -> Integer -> Integer
mystery x y
| x==0 = y
| otherwise = x
-- ... or pattern matching
mystery' :: Integer -> Integer -> Integer
mystery' 0 y = y
mystery' x _ = x
-- To join two strings
joinStrings :: (String,String) -> String
joinStrings (st1,st2) = st1 ++ "\t" ++ st2
-- Lists and list patterns
-- ^^^^^^^^^^^^^^^^^^^^^^^
-- From the Prelude ...
head :: [a] -> a
head (x:_) = x
tail :: [a] -> [a]
tail (_:xs) = xs
null :: [a] -> Bool
null [] = True
null (_:_) = False
-- The case construction
-- ^^^^^^^^^^^^^^^^^^^^^
-- Return the first digit in a string.
firstDigit :: String -> Char
firstDigit st
= case (digits st) of
[] -> '\0'
(x:_) -> x
-- Primitive recursion over lists
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- The sum of a list of Ints.
sum :: [Integer] -> Integer
sum [] = 0
sum (x:xs) = x + sum xs
-- Property to test the re-implementation of sum
-- against the version in the prelude.
prop_sum :: [Integer] -> Bool
prop_sum xs = sum xs == Prelude.sum xs
-- Finding primitive recursive definitions
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- Concatenating a list of lists.
concat :: [[a]] -> [a]
concat [] = []
concat (x:xs) = x ++ concat xs
-- Joining two lists
(++) :: [a] -> [a] -> [a]
[] ++ ys = ys
(x:xs) ++ ys = x:(xs++ys)
-- Testing whether something is a member of a list.
-- Renamed to elem' as we use the elem from Prelude
-- elsewhere in the file.
elem' :: Integer -> [Integer] -> Bool
elem' x [] = False
elem' x (y:ys) = (x==y) || (elem' x ys)
-- To double every element of an integer list
doubleAll :: [Integer] -> [Integer]
doubleAll xs = [ 2*x | x<-xs ]
doubleAll' [] = []
doubleAll' (x:xs) = 2*x : doubleAll' xs
-- To select the even elements from an integer list.
selectEven :: [Integer] -> [Integer]
selectEven xs = [ x | x<-xs , isEven x ]
selectEven' [] = []
selectEven' (x:xs)
| isEven x = x : selectEven' xs
| otherwise = selectEven' xs
-- To sort a list of numbers into ascending order.
iSort :: [Integer] -> [Integer]
iSort [] = []
iSort (x:xs) = ins x (iSort xs)
-- To insert an element at the right place into a sorted list.
ins :: Integer -> [Integer] -> [Integer]
ins x [] = [x]
ins x (y:ys)
| x <= y = x:(y:ys)
| otherwise = y : ins x ys
-- General recursions over lists
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- Zipping together two lists.
zip :: [a] -> [b] -> [(a,b)]
zip (x:xs) (y:ys) = (x,y) : zip xs ys
zip (x:xs) [] = []
zip [] zs = []
-- Taking a given number of elements from a list.
take :: Int -> [a] -> [a]
take 0 _ = []
take _ [] = []
take n (x:xs)
| n>0 = x : take (n-1) xs
take _ _ = error "PreludeList.take: negative argument"
-- Quicksort over lists.
qSort :: [Integer] -> [Integer]
qSort [] = []
qSort (x:xs)
= qSort [ y | y<-xs , y<=x] ++ [x] ++ qSort [ y | y<-xs , y>x]
-- Example: Text Processing
-- ^^^^^^^^^^^^^^^^^^^^^^^^
-- The `whitespace' characters.
whitespace :: String
whitespace = ['\n','\t',' ']
-- Get a word from the front of a string.
getWord :: String -> String
getWord [] = []
getWord (x:xs)
| elem x whitespace = []
| otherwise = x : getWord xs
-- In a similar way, the first word of a string can be dropped.
dropWord :: String -> String
dropWord [] = []
dropWord (x:xs)
| elem x whitespace = (x:xs)
| otherwise = dropWord xs
-- To remove the whitespace character(s) from the front of a string.
dropSpace :: String -> String
dropSpace [] = []
dropSpace (x:xs)
| elem x whitespace = dropSpace xs
| otherwise = (x:xs)
-- A word is a string.
type Word = String
-- Splitting a string into words.
splitWords :: String -> [Word]
splitWords st = split (dropSpace st)
split :: String -> [Word]
split [] = []
split st
= (getWord st) : split (dropSpace (dropWord st))
-- Splitting into lines of length at most lineLen
lineLen :: Int
lineLen = 80
-- A line is a list of words.
type Line = [Word]
-- Getting a line from a list of words.
getLine :: Int -> [Word] -> Line
getLine len [] = []
getLine len (w:ws)
| length w <= len = w : restOfLine
| otherwise = []
where
newlen = len - (length w + 1)
restOfLine = getLine newlen ws
-- Dropping the first line from a list of words.
dropLine :: Int -> [Word] -> Line
dropLine = dropLine -- DUMMY DEFINITION
-- Splitting into lines.
splitLines :: [Word] -> [Line]
splitLines [] = []
splitLines ws
= getLine lineLen ws
: splitLines (dropLine lineLen ws)
-- To fill a text string into lines, we write
fill :: String -> [Line]
fill = splitLines . splitWords
| tonyfloatersu/solution-haskell-craft-of-FP | Chapter7.hs | mit | 5,637 | 1 | 10 | 1,381 | 1,779 | 965 | 814 | 115 | 2 |
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE GADTs #-}
module DB.Users where
import Prelude hiding (readFile, putStrLn)
import Data.String
import Control.Monad
import Control.Monad.Writer
import Database.SQLite.Simple
import Database.SQLite.Simple.FromRow
import System.Random
import Control.Monad.Error
import Control.Exception
import DB0
-- | change the user mail
migrate :: Env -> Login -> Mail -> String -> ConnectionMonad ()
migrate e l m href = checkingLogin e l $ \(CheckLogin i m' _) -> do
eexecute e "update users set email=? where id=?" (m,i)
tell . return $ EvSendMail m (Migration m' l) href
-- | compute a new 30 digits login key
mkLogin :: ConnectionMonad Login
mkLogin = liftIO $ show <$> foldr (\n m -> m *10 + n) 1 <$> forM [0..30] (const $ randomRIO (0,9::Integer))
-- | insert a new user by mail
inviteUser :: Env -> Login -> Mail -> String -> (UserId -> UserId -> ConnectionMonad ()) -> ConnectionMonad ()
inviteUser e l m' href f = checkingLogin e l $ \(CheckLogin i m _) -> etransaction e $ do
r <- equery e "select inviter from users where email=?" (Only m')
l' <- mkLogin
let newuser = do
eexecute e "insert into users values (null,?,?,?,0)" (m',l',i)
r <- equery e "select last_insert_rowid()" ()
case (r :: [Only UserId]) of
[Only ui'] -> f i ui'
_ -> throwError $ DatabaseError "last rowid lost"
tell . return $ EvSendMail m' (Invitation m l') href
case (r :: [Only (Maybe UserId)]) of
[] -> newuser
[Only (Just ((==i) -> True))] -> newuser
[Only (Just ((==i) -> False))] -> throwError UserInvitedBySomeoneElse
_ -> throwError $ DatabaseError "user multiple email inconsistence"
-- | change the user login
logout :: Env -> Login -> String -> ConnectionMonad ()
logout e l href = checkingLogin e l $ \(CheckLogin ui m _) -> do
l' <- mkLogin
eexecute e "update users set login=? where id=?" (l',ui)
tell . return $ EvSendMail m (LogginOut l') href
reminder :: Env -> Mail -> String -> ConnectionMonad ()
reminder e m href = do
r <- equery e "select login from users where email=?" (Only m)
case (r :: [Only Login]) of
[Only l] -> tell . return $ EvSendMail m (Reminding l) href
_ -> throwError UnknownUser
getLogins :: Env -> ConnectionMonad [Login]
getLogins e = map fromOnly <$> equery e "select login from users" ()
boot :: Env -> Mail -> String -> ConnectionMonad ()
boot e m href = do
l <- mkLogin
r <- equery e "select id from users" ()
case (r :: [Only UserId]) of
[] -> do
eexecute e "insert into users values (null,?,?,null,0)" (m,l)
tell . return $ EvSendMail m (Booting l) href
_ -> throwError AlreadyBooted
getLogin :: Env -> Login -> ConnectionMonad String
getLogin e l = checkingLogin e l $ \(CheckLogin i m _) -> return m
| paolino/mootzoo | DB/Users.hs | mit | 3,241 | 0 | 19 | 962 | 1,049 | 532 | 517 | 62 | 5 |
module Parser (
parseExpr
) where
import Text.Parsec
import Text.Parsec.String (Parser)
import qualified Text.Parsec.Expr as Ex
import Lexer
import Pretty
import Syntax
-------------------------------------------------------------------------------
-- Expression
-------------------------------------------------------------------------------
variable :: Parser Expr
variable = do
x <- identifier
return (Var x)
number :: Parser Expr
number = do
n <- natural
return (Lit (LInt (fromIntegral n)))
addition :: Parser Expr
addition = do
reservedOp "+"
e <- term
e' <- term
return (Add e e')
lambda :: Parser Expr
lambda = do
reservedOp "\\"
x <- identifier
reservedOp ":"
t <- type'
reservedOp "."
e <- expr
return (Lam x t e)
derive' :: Expr -> Expr
derive' (Var name) = Var ("d" ++ name)
derive' (Add x y) = Add (Add x y) (Add (derive' x) (derive' y))
derive' (App x y) = App (App (derive' x) y) (derive' y)
derive' (Lam n t e) = (Lam n t (Lam ("d" ++ n) t (derive' e))) -- todo: derive type t
derive' x = x
-- Lift non-derivative terms to the top of the lambda stack
liftNonDerivativeTerms' :: Expr -> Maybe String -> [Expr] -> [Expr] -> Expr
liftNonDerivativeTerms' x@(Lam n t e) ms ts dts =
case ms of
Just prevN | ("d" ++ prevN) == n -> liftNonDerivativeTerms' e Nothing ts (dts ++ [x])
_ -> liftNonDerivativeTerms' e (Just n) (ts ++ [x]) dts
liftNonDerivativeTerms' e _ ts dts = reconstructLambdas ts dts e
-- term lambdas, derivate term lambdas, final expression
reconstructLambdas :: [Expr] -> [Expr] -> Expr -> Expr
reconstructLambdas ((Lam n t _):ds) dts e = (Lam n t (reconstructLambdas ds dts e))
reconstructLambdas [] ((Lam n t _):dts) e = (Lam n t (reconstructLambdas [] dts e))
reconstructLambdas [] [] e = e
liftNonDerivativeTerms :: Expr -> Expr
liftNonDerivativeTerms e = liftNonDerivativeTerms' e Nothing [] []
derive :: Parser Expr
derive = do
reservedOp "derive"
e <- expr
return (liftNonDerivativeTerms $ derive' e)
bool :: Parser Expr
bool = (reserved "True" >> return (Lit (LBool True)))
<|> (reserved "False" >> return (Lit (LBool False)))
term :: Parser Expr
term = parens expr
<|> bool
<|> number
<|> variable
<|> addition
<|> lambda
<|> derive
expr :: Parser Expr
expr = do
es <- many1 term
return (foldl1 App es)
-------------------------------------------------------------------------------
-- Types
-------------------------------------------------------------------------------
tyatom :: Parser Type
tyatom = tylit <|> (parens type')
tylit :: Parser Type
tylit = (reservedOp "Bool" >> return TBool)
<|> (reservedOp "Int" >> return TInt)
<|> (reservedOp "+" >> (return $ TArr (TArr TInt TInt) TInt))
type' :: Parser Type
type' = Ex.buildExpressionParser tyops tyatom
where
infixOp x f = Ex.Infix (reservedOp x >> return f)
tyops = [
[infixOp "->" TArr Ex.AssocRight]
]
-------------------------------------------------------------------------------
-- Toplevel
-------------------------------------------------------------------------------
parseExpr :: String -> Either ParseError Expr
parseExpr input = parse (contents expr) "<stdin>" input
| JoshuaGross/STILC | Parser.hs | mit | 3,257 | 0 | 13 | 647 | 1,148 | 577 | 571 | 82 | 2 |
-- Template Haskell
-- ref: https://wiki.haskell.org/A_practical_Template_Haskell_Tutorial
-- ref: https://wiki.haskell.org/Template_Haskell | Airtnp/Freshman_Simple_Haskell_Lib | Incomplete/Template-Haskell.hs | mit | 140 | 0 | 2 | 8 | 5 | 4 | 1 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module GeoChat.WebsocketServer
(wsApplication) where
import Control.Concurrent (MVar, newMVar, modifyMVar_, readMVar)
import Control.Monad (forM_, liftM)
import qualified Data.Map as M
import Data.Monoid (mappend)
import Data.Text (Text)
import Control.OldException
import Control.Exception (fromException)
import qualified Network.WebSockets as WS
import GeoChat.Types
import GeoChat.JSONInstances
import GeoChat.EventProcessor
import Data.Text.Lazy.Encoding as E
import Data.Aeson
import Database.PostgreSQL.Simple (Connection)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Text.Lazy.IO as TL
import Control.Monad.IO.Class (liftIO)
type Bounds = (LatLng,LatLng)
type ClientSink = (ClientId, (Maybe Bounds, WS.Sink WS.Hybi10))
type ServerState = M.Map ClientId (Maybe Bounds, WS.Sink WS.Hybi10)
addClientSink :: ClientSink -> ServerState -> ServerState
addClientSink cs@(cid, (_,sink)) s = M.insert cid (Nothing,sink) s
removeClientSink :: ClientId -> ServerState -> ServerState
removeClientSink cid s = M.delete cid s
updateClientSinkBounds :: Client -> LatLng -> LatLng -> ServerState -> ServerState
updateClientSinkBounds c sw ne s =
M.update f (clientId c) s
where f (_, sink) = Just (Just (sw, ne), sink)
-- inefficient, change
broadcast :: [MessageFromServer] -> MVar ServerState -> IO ()
broadcast ms state = do
clientSinks <- readMVar state
forM_ (M.toList clientSinks) $ \c -> mapM (sendMessageIfClientInBounds state c) ms
return ()
singlecast :: [MessageFromServer] -> WS.Sink WS.Hybi10 -> IO ()
singlecast ms sink =
WS.sendSink sink $ WS.textData $ encode ms
sendEncoded :: WS.Sink WS.Hybi10 -> MessageFromServer -> IO ()
sendEncoded sink message = do
-- putStrLn $ "Sending " ++ (show message)
WS.sendSink sink $ WS.textData $ encode message
inBounds ((swlat,swlng), (nelat,nelng)) (lat, lng) =
lat > swlat && lat < nelat && lng > swlng && lng < nelng
refuseSend :: ClientId -> MessageFromServer -> Bounds -> IO ()
refuseSend cid m bounds = do
putStrLn $ "Client " ++ (show cid) ++ " is out of bounds for message " ++ (show m)
return ()
-- TODO change this to use faster lookup by key and calculate target clients with PostGIS
sendMessageIfClientInBounds :: MVar ServerState -> ClientSink -> MessageFromServer -> IO ()
sendMessageIfClientInBounds state (cid, (Just bounds, sink)) m = do
r <- try (
case m of
UpdatedRoom latLng _ _ ->
if (inBounds bounds latLng) then sendEncoded sink m else refuseSend cid m bounds
Broadcast latLng _ _ _ ->
if (inBounds bounds latLng) then sendEncoded sink m else refuseSend cid m bounds
otherwise -> sendEncoded sink m)
case r of
Left e -> do
liftIO $ modifyMVar_ state $ \s -> do
let s' = removeClientSink cid s
putStrLn $ "Removed client sink " ++ (show cid)
return s'
putStrLn $ "Error sending sink for client " ++ (show cid) ++ ". Removing sink."
Right _ -> do
putStrLn $ "Successfully sent " ++ (show m) ++ " to client " ++ (show cid)
return ()
sendMessageIfClientInBounds state (cid,(Nothing,_)) _ = putStrLn $ "No sendMessage; client " ++ (show cid) ++ " has no latLng"
wsApplication :: MVar ServerState -> WS.Request -> WS.WebSockets WS.Hybi10 ()
wsApplication state rq = do
WS.acceptRequest rq
WS.getVersion >>= liftIO . putStrLn . ("Client version: " ++)
WS.spawnPingThread 30 :: WS.WebSockets WS.Hybi10 ()
sink <- WS.getSink
sinks <- liftIO $ readMVar state
conn <- liftIO GeoChat.EventProcessor.dbconn
client <- liftIO $ createClient conn
liftIO $ putStrLn $ "Created client " `mappend` (show $ clientId client)
liftIO $ modifyMVar_ state $ \s -> do
let s' = addClientSink (clientId client, (Nothing, sink)) s
WS.sendSink sink $ WS.textData $ encode $ Handshake $ clientId client
return s'
receiveMessage state conn client sink
receiveMessage :: WS.Protocol p => MVar ServerState -> Connection -> Client -> WS.Sink WS.Hybi10 -> WS.WebSockets p ()
receiveMessage state conn client sink = flip WS.catchWsError catchDisconnect $ do
rawMsg <- WS.receiveData
liftIO $ putStrLn $ "receiveData: " ++ (show rawMsg)
case (decode rawMsg :: Maybe MessageFromClient) of
Just (MapBoundsUpdated sw ne) -> do
liftIO $ modifyMVar_ state $ \s -> do
let s' = updateClientSinkBounds client sw ne s
return s'
Just m@(ListActiveRooms sw ne) -> do
msgsFromServer <- liftIO $ processMsg conn client m
liftIO $ singlecast msgsFromServer sink
Just clientMessage -> do
msgsFromServer <- liftIO $ processMsg conn client clientMessage
liftIO $ putStrLn $ "about to broadcast: " ++ (show msgsFromServer)
liftIO $ broadcast msgsFromServer state
return ()
Nothing -> do
let errMsg = (E.decodeUtf8 rawMsg)
liftIO $ TL.putStrLn $ "Failed to decode: " `mappend` errMsg
return ()
receiveMessage state conn client sink
where
catchDisconnect e = case fromException e of
Just WS.ConnectionClosed -> do
liftIO $ modifyMVar_ state $ \s -> do
let s' = removeClientSink (clientId client) s
putStrLn $ "Connection closed by client " ++ (show . clientId $ client)
putStrLn $ "Removed client sink " ++ (show $ clientId client)
putStrLn $ "Sinks left: " ++ ((show . M.size) s')
return s'
msgsFromServer <- liftIO $ processMsg conn client Leave
liftIO $ broadcast msgsFromServer state
_ -> do
liftIO $ putStrLn "Uncaught Error"
return ()
| danchoi/geochat | src/WebsocketServer.hs | mit | 5,862 | 0 | 21 | 1,433 | 1,916 | 959 | 957 | 119 | 6 |
-- | Physics for bead bouncing.
module Collide where
import World
import Actor
import Graphics.Gloss.Data.Point
import Graphics.Gloss.Data.Vector
import Graphics.Gloss.Geometry
-- Config -----------------------------------------------------------------------------------------
-- How bouncy the beads are
-- at 0.2 and they look like melting plastic.
-- at 0.8 and they look like bouncy rubber balls.
-- at > 1 and they gain energy with each bounce and escape the box.
--
beadBeadLoss = 0.95
beadWallLoss = 0.8
-- | Move a bead which is in contact with a wall.
collideBeadWall
:: Actor -- ^ the bead
-> Actor -- ^ the wall that bead is in contact with
-> Actor -- ^ the new bead
collideBeadWall
bead@(Bead ix _ radius pBead vIn@(velX, velY))
wall@(Wall _ pWall1 pWall2)
= let -- Take the collision point as being the point on the wall which is
-- closest to the bead's center.
pCollision = closestPointOnLine pWall1 pWall2 pBead
-- then do a static, non energy transfering collision.
in collideBeadPoint_static
bead
pCollision
beadWallLoss
-- | Move two beads which have bounced into each other.
collideBeadBead_elastic
:: Actor -> Actor
-> (Actor, Actor)
collideBeadBead_elastic
bead1@(Bead ix1 mode1 r1 p1 v1)
bead2@(Bead ix2 mode2 r2 p2 v2)
= let mass1 = 1
mass2 = 1
-- the axis of collision (towards p2)
vCollision@(cX, cY) = normaliseV (p2 - p1)
vCollisionR = (cY, -cX)
-- the velocity component of each bead along the axis of collision
s1 = dotV v1 vCollision
s2 = dotV v2 vCollision
-- work out new velocities along the collision
s1' = (s1 * (mass1 - mass2) + 2 * mass2 * s2) / (mass1 + mass2)
s2' = (s2 * (mass2 - mass1) + 2 * mass1 * s1) / (mass1 + mass2)
-- the velocity components at right angles to the collision
-- there is no friction in the collision so these don't change
k1 = dotV v1 vCollisionR
k2 = dotV v2 vCollisionR
-- new bead velocities
v1' = mulSV s1' vCollision + mulSV k1 vCollisionR
v2' = mulSV s2' vCollision + mulSV k2 vCollisionR
v1_slow = mulSV beadBeadLoss v1'
v2_slow = mulSV beadBeadLoss v2'
-- work out the point of collision
u1 = r1 / (r1 + r2)
u2 = r2 / (r1 + r2)
pCollision
= p1 + mulSV u1 (p2 - p1)
-- place the beads just next to each other so they are no longer overlapping.
p1' = pCollision - (r1 + 0.001) `mulSV` vCollision
p2' = pCollision + (r2 + 0.001) `mulSV` vCollision
bead1' = Bead ix1 mode1 r1 p1' v1_slow
bead2' = Bead ix2 mode2 r2 p2' v2_slow
in (bead1', bead2')
collideBeadBead_static
:: Actor -> Actor
-> Actor
collideBeadBead_static
bead1@(Bead ix1 _ radius1 pBead1 _)
bead2@(Bead ix2 _ radius2 pBead2 _)
= let -- Take the collision point as being between the center's of the two beads.
-- For beads which have the same radius the collision point is half way between
-- their centers and u == 0.5
u = radius1 / (radius1 + radius2)
pCollision = pBead1 + mulSV u (pBead2 - pBead1)
bead1' = collideBeadPoint_static
bead1
pCollision
beadBeadLoss
in bead1'
-- | Move a bead which has collided with something.
collideBeadPoint_static
:: Actor -- ^ the bead which collided with something
-> Point -- ^ the point of collision (should be near the bead's surface)
-> Float -- ^ velocity scaling factor (how much to slow the bead down after the collision)
-> Actor
collideBeadPoint_static
bead@(Bead ix mode radius pBead vIn)
pCollision
velLoss
= let
-- take a normal vector from the wall to the bead.
-- this vector is at a right angle to the wall.
vNormal = normaliseV (pBead - pCollision)
-- the bead at pBead is overlapping with what it collided with, but we don't want that.
-- place the bead so it's surface is just next to the point of collision.
pBead_new = pCollision + (radius + 0.01) `mulSV` vNormal
-- work out the angle of incidence for the bounce.
-- this is the angle between the surface normal and
-- the direction of travel for the bead.
aInc = angleVV vNormal (negate vIn)
-- aInc2 is the angle between the wall /surface/ and
-- the direction of travel.
aInc2 = (pi / 2) - aInc
-- take the determinant between the surface normal and the direction of travel.
-- This will tell us what direction the bead hit the wall.
-- The diagram shows the sign of the determinant for the four possiblities.
--
-- \ +ve -ve /
-- \ /
-- \/ \/
-- pWall1 ---------- pWall2 pWall1 ---------- pWall2
-- /\ /\
-- / \
-- / -ve +ve \
--
determinant = detV vIn vNormal
-- Use the determinant to rotate the bead's velocity vector for the bounce.
vOut
| determinant > 0 = rotateV (2 * aInc2) vIn
| otherwise = rotateV (negate (2 * aInc2)) vIn
-- Slow down the bead when it hits the wall
vSlow = velLoss `mulSV` vOut
bead1_new = Bead ix mode radius pBead_new vSlow
in bead1_new
| gscalzo/HaskellTheHardWay | gloss-try/gloss-master/gloss-examples/picture/Styrene/Collide.hs | mit | 5,094 | 148 | 16 | 1,312 | 1,085 | 601 | 484 | 85 | 1 |
module Main where
import Criterion.Main
import Data.CReal.Internal
main :: IO ()
main = defaultMain [ bgroup "pi" [ bench "0" $ whnf (pi `atPrecision`) 0
, bench "4" $ whnf (pi `atPrecision`) 4
, bench "16" $ whnf (pi `atPrecision`) 16
, bench "64" $ whnf (pi `atPrecision`) 64
, bench "256" $ whnf (pi `atPrecision`) 256
, bench "1024" $ whnf (pi `atPrecision`) 1024
]
, bgroup "sin 1" [ bench "0" $ whnf (sin 1 `atPrecision`) 0
, bench "4" $ whnf (sin 1 `atPrecision`) 4
, bench "16" $ whnf (sin 1 `atPrecision`) 16
, bench "64" $ whnf (sin 1 `atPrecision`) 64
, bench "256" $ whnf (sin 1 `atPrecision`) 256
, bench "1024" $ whnf (sin 1 `atPrecision`) 1024
]
, bgroup "sin (π/4)" [ bench "0" $ whnf (sin (pi/4) `atPrecision`) 0
, bench "4" $ whnf (sin (pi/4) `atPrecision`) 4
, bench "16" $ whnf (sin (pi/4) `atPrecision`) 16
, bench "64" $ whnf (sin (pi/4) `atPrecision`) 64
, bench "256" $ whnf (sin (pi/4) `atPrecision`) 256
, bench "1024" $ whnf (sin (pi/4) `atPrecision`) 1024
]
, bgroup "asin (π/4)" [ bench "0" $ whnf (asin (pi/4) `atPrecision`) 0
, bench "4" $ whnf (asin (pi/4) `atPrecision`) 4
, bench "16" $ whnf (asin (pi/4) `atPrecision`) 16
, bench "64" $ whnf (asin (pi/4) `atPrecision`) 64
, bench "256" $ whnf (asin (pi/4) `atPrecision`) 256
, bench "1024" $ whnf (asin (pi/4) `atPrecision`) 1024
]
]
| expipiplus1/exact-real | bench/Bench.hs | mit | 2,341 | 0 | 14 | 1,263 | 707 | 389 | 318 | 28 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ConstraintKinds #-}
module EndToEndSpec (spec) where
import Prelude hiding (writeFile)
import qualified Prelude
import Helper
import Test.HUnit
import System.Directory (canonicalizePath, createDirectory)
import Data.Maybe
import Data.List
import Data.String.Interpolate
import Data.String.Interpolate.Util
import Data.Version (showVersion)
import qualified Hpack.Render as Hpack
import Hpack.Config (packageConfig, readPackageConfig, DecodeOptions(..), DecodeResult(..), defaultDecodeOptions)
import Hpack.Render.Hints (FormattingHints(..), sniffFormattingHints)
import qualified Paths_hpack as Hpack (version)
writeFile :: FilePath -> String -> IO ()
writeFile file c = touch file >> Prelude.writeFile file c
spec :: Spec
spec = around_ (inTempDirectoryNamed "foo") $ do
describe "hpack" $ do
it "ignores fields that start with an underscore" $ do
[i|
_foo:
bar: 23
library: {}
|] `shouldRenderTo` library [i|
other-modules:
Paths_foo
|]
describe "tested-with" $ do
it "accepts a string" $ do
[i|
tested-with: GHC == 7.0.4
|] `shouldRenderTo` package [i|
tested-with:
GHC == 7.0.4
|]
it "accepts a list" $ do
[i|
tested-with:
- GHC == 7.0.4
- GHC == 7.2.2
- GHC == 7.4.2
|] `shouldRenderTo` package [i|
tested-with:
GHC == 7.0.4
, GHC == 7.2.2
, GHC == 7.4.2
|]
it "warns on duplicate fields" $ do
[i|
name: foo
name: foo
|] `shouldWarn` [
"package.yaml: Duplicate field $.name"
]
describe "handling of Paths_ module" $ do
it "adds Paths_ to other-modules" $ do
[i|
library: {}
|] `shouldRenderTo` library [i|
other-modules:
Paths_foo
|]
context "when cabal-version is >= 2" $ do
it "adds Paths_ to autogen-modules" $ do
[i|
verbatim:
cabal-version: 2.0
library: {}
|] `shouldRenderTo` (library [i|
other-modules:
Paths_foo
autogen-modules:
Paths_foo
|]) { packageCabalVersion = "2.0" }
context "when Paths_ module is listed explicitly under generated-other-modules" $ do
it "adds Paths_ to autogen-modules only once" $ do
[i|
verbatim:
cabal-version: 2.0
library:
generated-other-modules: Paths_foo
|] `shouldRenderTo` (library [i|
other-modules:
Paths_foo
autogen-modules:
Paths_foo
|]) { packageCabalVersion = "2.0" }
context "when Paths_ module is listed explicitly under generated-exposed-modules" $ do
it "adds Paths_ to autogen-modules only once" $ do
[i|
verbatim:
cabal-version: 2.0
library:
generated-exposed-modules: Paths_foo
|] `shouldRenderTo` (library [i|
exposed-modules:
Paths_foo
autogen-modules:
Paths_foo
|]) { packageCabalVersion = "2.0" }
context "when Paths_ is mentioned in a conditional that is always false" $ do
it "does not add Paths_" $ do
[i|
library:
when:
- condition: false
other-modules: Paths_foo
|] `shouldRenderTo` library [i|
|]
context "with RebindableSyntax and OverloadedStrings or OverloadedStrings" $ do
it "infers cabal-version 2.2" $ do
[i|
default-extensions: [RebindableSyntax, OverloadedStrings]
library: {}
|] `shouldRenderTo` (library [i|
default-extensions:
RebindableSyntax
OverloadedStrings
other-modules:
Paths_foo
autogen-modules:
Paths_foo
|]) {packageCabalVersion = "2.2"}
context "when Paths_ is mentioned in a conditional that is always false" $ do
it "does not infer cabal-version 2.2" $ do
[i|
default-extensions: [RebindableSyntax, OverloadedStrings]
library:
when:
- condition: false
other-modules: Paths_foo
|] `shouldRenderTo` (library [i|
default-extensions:
RebindableSyntax
OverloadedStrings
|])
describe "spec-version" $ do
it "accepts spec-version" $ do
[i|
spec-version: 0.29.5
|] `shouldRenderTo` package [i|
|]
it "fails on malformed spec-version" $ do
[i|
spec-version: foo
|] `shouldFailWith` "package.yaml: Error while parsing $.spec-version - invalid value \"foo\""
it "fails on unsupported spec-version" $ do
[i|
spec-version: 25.0
|] `shouldFailWith` ("The file package.yaml requires version 25.0 of the Hpack package specification, however this version of hpack only supports versions up to " ++ showVersion Hpack.version ++ ". Upgrading to the latest version of hpack may resolve this issue.")
it "fails on unsupported spec-version from defaults" $ do
let file = joinPath ["defaults", "sol", "hpack-template", "2017", "defaults.yaml"]
writeFile file [i|
spec-version: 25.0
|]
[i|
defaults:
github: sol/hpack-template
path: defaults.yaml
ref: "2017"
library: {}
|] `shouldFailWith` ("The file " ++ file ++ " requires version 25.0 of the Hpack package specification, however this version of hpack only supports versions up to " ++ showVersion Hpack.version ++ ". Upgrading to the latest version of hpack may resolve this issue.")
describe "data-files" $ do
it "accepts data-files" $ do
touch "data/foo/index.html"
touch "data/bar/index.html"
[i|
data-files:
- data/**/*.html
|] `shouldRenderTo` package [i|
data-files:
data/bar/index.html
data/foo/index.html
|]
describe "data-dir" $ do
it "accepts data-dir" $ do
touch "data/foo.html"
touch "data/bar.html"
[i|
data-dir: data
data-files:
- "*.html"
|] `shouldRenderTo` package [i|
data-files:
bar.html
foo.html
data-dir: data
|]
describe "github" $ do
it "accepts owner/repo" $ do
[i|
github: sol/hpack
|] `shouldRenderTo` package [i|
homepage: https://github.com/sol/hpack#readme
bug-reports: https://github.com/sol/hpack/issues
source-repository head
type: git
location: https://github.com/sol/hpack
|]
it "accepts owner/repo/path" $ do
[i|
github: hspec/hspec/hspec-core
|] `shouldRenderTo` package [i|
homepage: https://github.com/hspec/hspec#readme
bug-reports: https://github.com/hspec/hspec/issues
source-repository head
type: git
location: https://github.com/hspec/hspec
subdir: hspec-core
|]
it "rejects URLs" $ do
[i|
github: https://github.com/sol/hpack/issues/365
|] `shouldFailWith` "package.yaml: Error while parsing $.github - expected owner/repo or owner/repo/subdir, but encountered \"https://github.com/sol/hpack/issues/365\""
describe "homepage" $ do
it "accepts homepage URL" $ do
[i|
homepage: https://example.com/
|] `shouldRenderTo` package [i|
homepage: https://example.com/
|]
context "with github" $ do
it "gives homepage URL precedence" $ do
[i|
github: hspec/hspec
homepage: https://example.com/
|] `shouldRenderTo` package [i|
homepage: https://example.com/
bug-reports: https://github.com/hspec/hspec/issues
source-repository head
type: git
location: https://github.com/hspec/hspec
|]
it "omits homepage URL if it is null" $ do
[i|
github: hspec/hspec
homepage: null
|] `shouldRenderTo` package [i|
bug-reports: https://github.com/hspec/hspec/issues
source-repository head
type: git
location: https://github.com/hspec/hspec
|]
describe "bug-reports" $ do
it "accepts bug-reports URL" $ do
[i|
bug-reports: https://example.com/
|] `shouldRenderTo` package [i|
bug-reports: https://example.com/
|]
context "with github" $ do
it "gives bug-reports URL precedence" $ do
[i|
github: hspec/hspec
bug-reports: https://example.com/
|] `shouldRenderTo` package [i|
homepage: https://github.com/hspec/hspec#readme
bug-reports: https://example.com/
source-repository head
type: git
location: https://github.com/hspec/hspec
|]
it "omits bug-reports URL if it is null" $ do
[i|
github: hspec/hspec
bug-reports: null
|] `shouldRenderTo` package [i|
homepage: https://github.com/hspec/hspec#readme
source-repository head
type: git
location: https://github.com/hspec/hspec
|]
describe "defaults" $ do
it "accepts global defaults" $ do
writeFile "defaults/sol/hpack-template/2017/defaults.yaml" [i|
default-extensions:
- RecordWildCards
- DeriveFunctor
|]
[i|
defaults:
github: sol/hpack-template
path: defaults.yaml
ref: "2017"
library: {}
|] `shouldRenderTo` library_ [i|
default-extensions:
RecordWildCards
DeriveFunctor
|]
it "accepts library defaults" $ do
writeFile "defaults/sol/hpack-template/2017/defaults.yaml" [i|
exposed-modules: Foo
|]
[i|
library:
defaults:
github: sol/hpack-template
path: defaults.yaml
ref: "2017"
|] `shouldRenderTo` library [i|
exposed-modules:
Foo
other-modules:
Paths_foo
|]
it "accepts a list of defaults" $ do
writeFile "defaults/foo/bar/v1/.hpack/defaults.yaml" "default-extensions: RecordWildCards"
writeFile "defaults/foo/bar/v2/.hpack/defaults.yaml" "default-extensions: DeriveFunctor"
[i|
defaults:
- foo/bar@v1
- foo/bar@v2
library: {}
|] `shouldRenderTo` library_ [i|
default-extensions:
RecordWildCards
DeriveFunctor
|]
it "accepts defaults recursively" $ do
writeFile "defaults/foo/bar/v1/.hpack/defaults.yaml" "defaults: foo/bar@v2"
writeFile "defaults/foo/bar/v2/.hpack/defaults.yaml" "default-extensions: DeriveFunctor"
[i|
defaults: foo/bar@v1
library: {}
|] `shouldRenderTo` library_ [i|
default-extensions:
DeriveFunctor
|]
it "fails on cyclic defaults" $ do
let
file1 = "defaults/foo/bar/v1/.hpack/defaults.yaml"
file2 = "defaults/foo/bar/v2/.hpack/defaults.yaml"
writeFile file1 "defaults: foo/bar@v2"
writeFile file2 "defaults: foo/bar@v1"
canonic1 <- canonicalizePath file1
canonic2 <- canonicalizePath file2
[i|
defaults: foo/bar@v1
library: {}
|] `shouldFailWith` [i|cycle in defaults (#{canonic1} -> #{canonic2} -> #{canonic1})|]
it "fails if defaults don't exist" $ do
pending
[i|
defaults:
github: sol/foo
ref: bar
library: {}
|] `shouldFailWith` "Invalid value for \"defaults\"! File https://raw.githubusercontent.com/sol/foo/bar/.hpack/defaults.yaml does not exist!"
it "fails on parse error" $ do
let file = joinPath ["defaults", "sol", "hpack-template", "2017", "defaults.yaml"]
writeFile file "[]"
[i|
defaults:
github: sol/hpack-template
path: defaults.yaml
ref: "2017"
library: {}
|] `shouldFailWith` (file ++ ": Error while parsing $ - expected Object, but encountered Array")
it "warns on unknown fields" $ do
let file = joinPath ["defaults", "sol", "hpack-template", "2017", "defaults.yaml"]
writeFile file "foo: bar"
[i|
name: foo
defaults:
github: sol/hpack-template
path: defaults.yaml
ref: "2017"
bar: baz
library: {}
|] `shouldWarn` [
"package.yaml: Ignoring unrecognized field $.defaults.bar"
, file ++ ": Ignoring unrecognized field $.foo"
]
it "accepts defaults from local files" $ do
writeFile "defaults/foo.yaml" [i|
defaults:
local: bar.yaml
|]
writeFile "defaults/bar.yaml" [i|
default-extensions:
- RecordWildCards
- DeriveFunctor
|]
[i|
defaults:
local: defaults/foo.yaml
library: {}
|] `shouldRenderTo` library [i|
other-modules:
Paths_foo
default-extensions:
RecordWildCards
DeriveFunctor
|]
describe "version" $ do
it "accepts string" $ do
[i|
version: 0.1.0
|] `shouldRenderTo` (package "") {packageVersion = "0.1.0"}
it "accepts number" $ do
[i|
version: 0.1
|] `shouldRenderTo` (package [i|
|]) {packageVersion = "0.1"}
it "rejects other values" $ do
[i|
version: {}
|] `shouldFailWith` "package.yaml: Error while parsing $.version - expected Number or String, but encountered Object"
describe "license" $ do
it "accepts cabal-style licenses" $ do
[i|
license: BSD3
|] `shouldRenderTo` (package [i|
license: BSD3
|])
it "accepts SPDX licenses" $ do
[i|
license: BSD-3-Clause
|] `shouldRenderTo` (package [i|
license: BSD-3-Clause
|]) {packageCabalVersion = "2.2"}
context "with an ambiguous license" $ do
it "treats it as a cabal-style license" $ do
[i|
license: MIT
|] `shouldRenderTo` (package [i|
license: MIT
|])
context "when cabal-version >= 2.2" $ do
it "maps license to SPDX license identifier" $ do
[i|
license: BSD3
library:
cxx-options: -Wall
|] `shouldRenderTo` (package [i|
license: BSD-3-Clause
library
other-modules:
Paths_foo
autogen-modules:
Paths_foo
cxx-options: -Wall
default-language: Haskell2010
|]) {packageCabalVersion = "2.2"}
it "doesn't touch unknown licenses" $ do
[i|
license: some-license
library:
cxx-options: -Wall
|] `shouldRenderTo` (package [i|
license: some-license
library
other-modules:
Paths_foo
autogen-modules:
Paths_foo
cxx-options: -Wall
default-language: Haskell2010
|]) {packageCabalVersion = "2.2"}
context "with a LICENSE file" $ do
before_ (writeFile "LICENSE" license) $ do
it "infers license" $ do
[i|
|] `shouldRenderTo` (package [i|
license-file: LICENSE
license: MIT
|])
context "when license can not be inferred" $ do
it "warns" $ do
writeFile "LICENSE" "some-licenese"
[i|
name: foo
|] `shouldWarn` ["Inferring license from file LICENSE failed!"]
context "when license is null" $ do
it "does not infer license" $ do
[i|
license: null
|] `shouldRenderTo` (package [i|
license-file: LICENSE
|])
describe "build-type" $ do
it "accept Simple" $ do
[i|
build-type: Simple
|] `shouldRenderTo` (package "") {packageBuildType = "Simple"}
it "accept Configure" $ do
[i|
build-type: Configure
|] `shouldRenderTo` (package "") {packageBuildType = "Configure"}
it "accept Make" $ do
[i|
build-type: Make
|] `shouldRenderTo` (package "") {packageBuildType = "Make"}
it "accept Custom" $ do
[i|
build-type: Custom
|] `shouldRenderTo` (package "") {packageBuildType = "Custom"}
it "rejects invalid values" $ do
[i|
build-type: foo
|] `shouldFailWith` "package.yaml: Error while parsing $.build-type - expected one of Simple, Configure, Make, or Custom"
describe "extra-doc-files" $ do
it "accepts a list of files" $ do
touch "CHANGES.markdown"
touch "README.markdown"
[i|
extra-doc-files:
- CHANGES.markdown
- README.markdown
|] `shouldRenderTo` (package [i|
extra-doc-files:
CHANGES.markdown
README.markdown
|]) {packageCabalVersion = "1.18"}
it "accepts glob patterns" $ do
touch "CHANGES.markdown"
touch "README.markdown"
[i|
extra-doc-files:
- "*.markdown"
|] `shouldRenderTo` (package [i|
extra-doc-files:
CHANGES.markdown
README.markdown
|]) {packageCabalVersion = "1.18"}
it "warns if a glob pattern does not match anything" $ do
[i|
name: foo
extra-doc-files:
- "*.markdown"
|] `shouldWarn` ["Specified pattern \"*.markdown\" for extra-doc-files does not match any files"]
describe "build-tools" $ do
it "adds known build tools to build-tools" $ do
[i|
executable:
build-tools:
alex == 0.1.0
|] `shouldRenderTo` executable_ "foo" [i|
build-tools:
alex ==0.1.0
|]
it "adds other build tools to build-tool-depends" $ do
[i|
executable:
build-tools:
hspec-discover: 0.1.0
|] `shouldRenderTo` (executable_ "foo" [i|
build-tool-depends:
hspec-discover:hspec-discover ==0.1.0
|]) {
-- NOTE: We do not set this to 2.0 on purpose, so that the .cabal
-- file is compatible with a wider range of Cabal versions!
packageCabalVersion = "1.12"
}
context "when the name of a build tool matches an executable from the same package" $ do
it "adds it to build-tools" $ do
[i|
executables:
bar:
build-tools:
- bar
|] `shouldRenderTo` executable_ "bar" [i|
build-tools:
bar
|]
it "gives per-section unqualified names precedence over global qualified names" $ do
[i|
build-tools:
- foo:bar == 0.1.0
executables:
bar:
build-tools:
- bar == 0.2.0
|] `shouldRenderTo` executable_ "bar" [i|
build-tools:
bar ==0.2.0
|]
it "gives per-section qualified names precedence over global unqualified names" $ do
[i|
build-tools:
- bar == 0.1.0
executables:
bar:
build-tools:
- foo:bar == 0.2.0
|] `shouldRenderTo` executable_ "bar" [i|
build-tools:
bar ==0.2.0
|]
context "when the name of a build tool matches a legacy system build tool" $ do
it "adds it to build-tools" $ do
[i|
executable:
build-tools:
ghc >= 7.10
|] `shouldRenderTo` (executable_ "foo" [i|
build-tools:
ghc >=7.10
|]) { packageWarnings = ["Listing \"ghc\" under build-tools is deperecated! Please list system executables under system-build-tools instead!"] }
describe "system-build-tools" $ do
it "adds system build tools to build-tools" $ do
[i|
executable:
system-build-tools:
ghc >= 7.10
|] `shouldRenderTo` executable_ "foo" [i|
build-tools:
ghc >=7.10
|]
context "with hpc" $ do
it "infers cabal-version 1.14" $ do
[i|
executable:
system-build-tools:
hpc
|] `shouldRenderTo` (executable_ "foo" [i|
build-tools:
hpc
|]) {packageCabalVersion = "1.14"}
context "with ghcjs" $ do
it "infers cabal-version 1.22" $ do
[i|
executable:
system-build-tools:
ghcjs
|] `shouldRenderTo` (executable_ "foo" [i|
build-tools:
ghcjs
|]) {packageCabalVersion = "1.22"}
context "with an unknown system build tool" $ do
it "infers cabal-version 2.0" $ do
[i|
executable:
system-build-tools:
g++ >= 5.4.0
|] `shouldRenderTo` (executable_ "foo" [i|
autogen-modules:
Paths_foo
build-tools:
g++ >=5.4.0
|]) {packageCabalVersion = "2.0"}
describe "dependencies" $ do
it "accepts single dependency" $ do
[i|
executable:
dependencies: base
|] `shouldRenderTo` executable_ "foo" [i|
build-depends:
base
|]
it "accepts dependencies with subcomponents" $ do
[i|
executable:
dependencies: foo:bar
|] `shouldRenderTo` (executable_ "foo" [i|
autogen-modules:
Paths_foo
build-depends:
foo:bar
|]) {packageCabalVersion = "3.0"}
it "accepts list of dependencies" $ do
[i|
executable:
dependencies:
- base
- transformers
|] `shouldRenderTo` executable_ "foo" [i|
build-depends:
base
, transformers
|]
context "with both global and section specific dependencies" $ do
it "combines dependencies" $ do
[i|
dependencies:
- base
executable:
dependencies: hspec
|] `shouldRenderTo` executable_ "foo" [i|
build-depends:
base
, hspec
|]
it "gives section specific dependencies precedence" $ do
[i|
dependencies:
- base
executable:
dependencies: base >= 2
|] `shouldRenderTo` executable_ "foo" [i|
build-depends:
base >=2
|]
describe "pkg-config-dependencies" $ do
it "accepts pkg-config-dependencies" $ do
[i|
pkg-config-dependencies:
- QtWebKit
- weston
executable: {}
|] `shouldRenderTo` executable_ "foo" [i|
pkgconfig-depends:
QtWebKit
, weston
|]
describe "include-dirs" $ do
it "accepts include-dirs" $ do
[i|
include-dirs:
- foo
- bar
executable: {}
|] `shouldRenderTo` executable_ "foo" [i|
include-dirs:
foo
bar
|]
describe "install-includes" $ do
it "accepts install-includes" $ do
[i|
install-includes:
- foo.h
- bar.h
executable: {}
|] `shouldRenderTo` executable_ "foo" [i|
install-includes:
foo.h
bar.h
|]
describe "js-sources" $ before_ (touch "foo.js" >> touch "jsbits/bar.js") $ do
it "accepts js-sources" $ do
[i|
executable:
js-sources:
- foo.js
- jsbits/*.js
|] `shouldRenderTo` executable_ "foo" [i|
js-sources:
foo.js
jsbits/bar.js
|]
it "accepts global js-sources" $ do
[i|
js-sources:
- foo.js
- jsbits/*.js
executable: {}
|] `shouldRenderTo` executable_ "foo" [i|
js-sources:
foo.js
jsbits/bar.js
|]
describe "cxx-options" $ do
it "accepts cxx-options" $ do
[i|
executable:
cxx-options: -Wall
|] `shouldRenderTo` (executable_ "foo" [i|
autogen-modules:
Paths_foo
cxx-options: -Wall
|]) {packageCabalVersion = "2.2"}
context "when used inside a nested conditional" $ do
it "infers correct cabal-version" $ do
[i|
executable:
when:
condition: True
when:
condition: True
when:
condition: True
cxx-options: -Wall
|] `shouldRenderTo` (executable_ "foo" [i|
autogen-modules:
Paths_foo
if true
if true
if true
cxx-options: -Wall
|]) {packageCabalVersion = "2.2"}
describe "cxx-sources" $ before_ (touch "foo.cc" >> touch "cxxbits/bar.cc") $ do
it "accepts cxx-sources" $ do
[i|
executable:
cxx-sources:
- foo.cc
- cxxbits/*.cc
|] `shouldRenderTo` (executable_ "foo" [i|
autogen-modules:
Paths_foo
cxx-sources:
foo.cc
cxxbits/bar.cc
|]) {packageCabalVersion = "2.2"}
describe "extra-lib-dirs" $ do
it "accepts extra-lib-dirs" $ do
[i|
extra-lib-dirs:
- foo
- bar
executable: {}
|] `shouldRenderTo` executable_ "foo" [i|
extra-lib-dirs:
foo
bar
|]
describe "extra-libraries" $ do
it "accepts extra-libraries" $ do
[i|
extra-libraries:
- foo
- bar
executable: {}
|] `shouldRenderTo` executable_ "foo" [i|
extra-libraries:
foo
bar
|]
describe "extra-frameworks-dirs" $ do
it "accepts extra-frameworks-dirs" $ do
[i|
extra-frameworks-dirs:
- foo
- bar
executable: {}
|] `shouldRenderTo` executable_ "foo" [i|
extra-frameworks-dirs:
foo
bar
|]
describe "frameworks" $ do
it "accepts frameworks" $ do
[i|
frameworks:
- foo
- bar
executable: {}
|] `shouldRenderTo` executable_ "foo" [i|
frameworks:
foo
bar
|]
describe "c-sources" $ before_ (touch "cbits/foo.c" >> touch "cbits/bar.c" >> touch "cbits/baz.c") $ do
it "keeps declaration order" $ do
-- IMPORTANT: This is crucial as a workaround for https://ghc.haskell.org/trac/ghc/ticket/13786
[i|
library:
c-sources:
- cbits/foo.c
- cbits/bar.c
- cbits/baz.c
|] `shouldRenderTo` library_ [i|
c-sources:
cbits/foo.c
cbits/bar.c
cbits/baz.c
|]
it "accepts glob patterns" $ do
[i|
library:
c-sources: cbits/*.c
|] `shouldRenderTo` library_ [i|
c-sources:
cbits/bar.c
cbits/baz.c
cbits/foo.c
|]
it "warns when a glob pattern does not match any files" $ do
[i|
name: foo
library:
c-sources: foo/*.c
|] `shouldWarn` pure "Specified pattern \"foo/*.c\" for c-sources does not match any files"
it "quotes filenames with special characters" $ do
touch "cbits/foo bar.c"
[i|
library:
c-sources:
- cbits/foo bar.c
|] `shouldRenderTo` library_ [i|
c-sources:
"cbits/foo bar.c"
|]
describe "custom-setup" $ do
it "warns on unknown fields" $ do
[i|
name: foo
custom-setup:
foo: 1
bar: 2
|] `shouldWarn` [
"package.yaml: Ignoring unrecognized field $.custom-setup.bar"
, "package.yaml: Ignoring unrecognized field $.custom-setup.foo"
]
it "accepts dependencies" $ do
[i|
custom-setup:
dependencies:
- base
|] `shouldRenderTo` customSetup [i|
setup-depends:
base
|]
it "leaves build-type alone, if it exists" $ do
[i|
build-type: Make
custom-setup:
dependencies:
- base
|] `shouldRenderTo` (customSetup [i|
setup-depends:
base
|]) {packageBuildType = "Make"}
describe "library" $ do
it "accepts reexported-modules" $ do
[i|
library:
reexported-modules: Baz
|] `shouldRenderTo` (library_ [i|
reexported-modules:
Baz
|]) {packageCabalVersion = "1.22"}
it "accepts signatures" $ do
[i|
library:
signatures: Foo
|] `shouldRenderTo` (library_ [i|
autogen-modules:
Paths_foo
signatures:
Foo
|]) {packageCabalVersion = "2.0"}
context "when package.yaml contains duplicate modules" $ do
it "generates a cabal file with duplicate modules" $ do
-- garbage in, garbage out
[i|
library:
exposed-modules: Foo
other-modules: Foo
|] `shouldRenderTo` library [i|
exposed-modules:
Foo
other-modules:
Foo
|]
context "with mixins" $ do
it "infers cabal-version 2.0" $ do
[i|
library:
dependencies:
foo:
mixin:
- (Blah as Etc)
|] `shouldRenderTo` (library [i|
other-modules:
Paths_foo
autogen-modules:
Paths_foo
build-depends:
foo
mixins:
foo (Blah as Etc)
|]) {packageCabalVersion = "2.0"}
describe "internal-libraries" $ do
it "accepts internal-libraries" $ do
touch "src/Foo.hs"
[i|
internal-libraries:
bar:
source-dirs: src
|] `shouldRenderTo` internalLibrary "bar" [i|
exposed-modules:
Foo
other-modules:
Paths_foo
autogen-modules:
Paths_foo
hs-source-dirs:
src
|]
it "warns on unknown fields" $ do
[i|
name: foo
internal-libraries:
bar:
baz: 42
|] `shouldWarn` pure "package.yaml: Ignoring unrecognized field $.internal-libraries.bar.baz"
it "warns on missing source-dirs" $ do
[i|
name: foo
internal-libraries:
bar:
source-dirs: src
|] `shouldWarn` pure "Specified source-dir \"src\" does not exist"
it "accepts visibility" $ do
[i|
internal-libraries:
bar:
visibility: public
|] `shouldRenderTo` (internalLibrary "bar" [i|
visibility: public
other-modules:
Paths_foo
autogen-modules:
Paths_foo
|]) {packageCabalVersion = "3.0"}
context "when inferring modules" $ do
context "with a library" $ do
it "ignores duplicate source directories" $ do
touch "src/Foo.hs"
[i|
source-dirs: src
library:
source-dirs: src
|] `shouldRenderTo` library [i|
hs-source-dirs:
src
exposed-modules:
Foo
other-modules:
Paths_foo
|]
it "ignores duplicate modules" $ do
touch "src/Foo.hs"
touch "src/Foo.x"
[i|
library:
source-dirs: src
|] `shouldRenderTo` library [i|
hs-source-dirs:
src
exposed-modules:
Foo
other-modules:
Paths_foo
|]
context "with exposed-modules" $ do
it "infers other-modules" $ do
touch "src/Foo.hs"
touch "src/Bar.hs"
[i|
library:
source-dirs: src
exposed-modules: Foo
|] `shouldRenderTo` library [i|
hs-source-dirs:
src
exposed-modules:
Foo
other-modules:
Bar
Paths_foo
|]
context "with other-modules" $ do
it "infers exposed-modules" $ do
touch "src/Foo.hs"
touch "src/Bar.hs"
[i|
library:
source-dirs: src
other-modules: Bar
|] `shouldRenderTo` library [i|
hs-source-dirs:
src
exposed-modules:
Foo
other-modules:
Bar
|]
context "with both exposed-modules and other-modules" $ do
it "doesn't infer any modules" $ do
touch "src/Foo.hs"
touch "src/Bar.hs"
[i|
library:
source-dirs: src
exposed-modules: Foo
other-modules: Bar
|] `shouldRenderTo` library [i|
hs-source-dirs:
src
exposed-modules:
Foo
other-modules:
Bar
|]
context "with neither exposed-modules nor other-modules" $ do
it "infers exposed-modules" $ do
touch "src/Foo.hs"
touch "src/Bar.hs"
[i|
library:
source-dirs: src
|] `shouldRenderTo` library [i|
hs-source-dirs:
src
exposed-modules:
Bar
Foo
other-modules:
Paths_foo
|]
context "with a conditional" $ do
it "doesn't infer any modules mentioned in that conditional" $ do
touch "src/Foo.hs"
touch "src/Bar.hs"
[i|
library:
source-dirs: src
when:
condition: os(windows)
exposed-modules:
- Foo
- Paths_foo
|] `shouldRenderTo` library [i|
hs-source-dirs:
src
if os(windows)
exposed-modules:
Foo
Paths_foo
exposed-modules:
Bar
|]
context "with a source-dir inside the conditional" $ do
it "infers other-modules" $ do
touch "windows/Foo.hs"
[i|
library:
when:
condition: os(windows)
source-dirs: windows
|] `shouldRenderTo` library [i|
other-modules:
Paths_foo
if os(windows)
other-modules:
Foo
hs-source-dirs:
windows
|]
it "does not infer outer modules" $ do
touch "windows/Foo.hs"
touch "unix/Foo.hs"
[i|
library:
exposed-modules: Foo
when:
condition: os(windows)
then:
source-dirs: windows/
else:
source-dirs: unix/
|] `shouldRenderTo` library [i|
exposed-modules:
Foo
other-modules:
Paths_foo
if os(windows)
hs-source-dirs:
windows/
else
hs-source-dirs:
unix/
|]
context "with generated modules" $ do
it "includes generated modules in autogen-modules" $ do
[i|
library:
generated-exposed-modules: Foo
generated-other-modules: Bar
|] `shouldRenderTo` (library [i|
exposed-modules:
Foo
other-modules:
Paths_foo
Bar
autogen-modules:
Paths_foo
Foo
Bar
|]) {packageCabalVersion = "2.0"}
it "does not infer any mentioned generated modules" $ do
touch "src/Exposed.hs"
touch "src/Other.hs"
[i|
library:
source-dirs: src
generated-exposed-modules: Exposed
generated-other-modules: Other
|] `shouldRenderTo` (library [i|
hs-source-dirs:
src
exposed-modules:
Exposed
other-modules:
Paths_foo
Other
autogen-modules:
Paths_foo
Exposed
Other
|]) {packageCabalVersion = "2.0"}
it "does not infer any generated modules mentioned inside conditionals" $ do
touch "src/Exposed.hs"
touch "src/Other.hs"
[i|
library:
source-dirs: src
when:
condition: os(windows)
generated-exposed-modules: Exposed
generated-other-modules: Other
|] `shouldRenderTo` (library [i|
other-modules:
Paths_foo
autogen-modules:
Paths_foo
hs-source-dirs:
src
if os(windows)
exposed-modules:
Exposed
other-modules:
Other
autogen-modules:
Other
Exposed
|]) {packageCabalVersion = "2.0"}
context "with an executable" $ do
it "infers other-modules" $ do
touch "src/Main.hs"
touch "src/Foo.hs"
[i|
executables:
foo:
main: Main.hs
source-dirs: src
|] `shouldRenderTo` executable "foo" [i|
main-is: Main.hs
hs-source-dirs:
src
other-modules:
Foo
Paths_foo
|]
it "allows to specify other-modules" $ do
touch "src/Foo.hs"
touch "src/Bar.hs"
[i|
executables:
foo:
main: Main.hs
source-dirs: src
other-modules: Baz
|] `shouldRenderTo` executable "foo" [i|
main-is: Main.hs
hs-source-dirs:
src
other-modules:
Baz
|]
it "does not infer any mentioned generated modules" $ do
touch "src/Foo.hs"
[i|
executables:
foo:
main: Main.hs
source-dirs: src
generated-other-modules: Foo
|] `shouldRenderTo` (executable "foo" [i|
main-is: Main.hs
hs-source-dirs:
src
other-modules:
Paths_foo
Foo
autogen-modules:
Paths_foo
Foo
|]) {packageCabalVersion = "2.0"}
context "with a conditional" $ do
it "doesn't infer any modules mentioned in that conditional" $ do
touch "src/Foo.hs"
touch "src/Bar.hs"
[i|
executables:
foo:
source-dirs: src
when:
condition: os(windows)
other-modules: Foo
|] `shouldRenderTo` executable "foo" [i|
other-modules:
Bar
Paths_foo
hs-source-dirs:
src
if os(windows)
other-modules:
Foo
|]
it "infers other-modules" $ do
touch "src/Foo.hs"
touch "windows/Bar.hs"
[i|
executables:
foo:
source-dirs: src
when:
condition: os(windows)
source-dirs: windows
|] `shouldRenderTo` executable "foo" [i|
other-modules:
Foo
Paths_foo
hs-source-dirs:
src
if os(windows)
other-modules:
Bar
hs-source-dirs:
windows
|]
describe "executables" $ do
it "accepts arbitrary entry points as main" $ do
touch "src/Foo.hs"
touch "src/Bar.hs"
[i|
executables:
foo:
source-dirs: src
main: Foo
|] `shouldRenderTo` executable "foo" [i|
main-is: Foo.hs
ghc-options: -main-is Foo
hs-source-dirs:
src
other-modules:
Bar
Paths_foo
|]
context "with a conditional" $ do
it "does not apply global options" $ do
-- related bug: https://github.com/sol/hpack/issues/214
[i|
ghc-options: -Wall
executables:
foo:
when:
condition: os(windows)
main: Foo.hs
|] `shouldRenderTo` executable_ "foo" [i|
ghc-options: -Wall
if os(windows)
main-is: Foo.hs
|]
it "accepts executable-specific fields" $ do
[i|
executables:
foo:
when:
condition: os(windows)
main: Foo
|] `shouldRenderTo` executable_ "foo" [i|
if os(windows)
main-is: Foo.hs
ghc-options: -main-is Foo
|]
describe "when" $ do
it "accepts conditionals" $ do
[i|
when:
condition: os(windows)
dependencies: Win32
executable: {}
|] `shouldRenderTo` executable_ "foo" [i|
if os(windows)
build-depends:
Win32
|]
it "warns on unknown fields" $ do
[i|
name: foo
foo: 23
when:
- condition: os(windows)
bar: 23
when:
condition: os(windows)
bar2: 23
- condition: os(windows)
baz: 23
|] `shouldWarn` [
"package.yaml: Ignoring unrecognized field $.foo"
, "package.yaml: Ignoring unrecognized field $.when[0].bar"
, "package.yaml: Ignoring unrecognized field $.when[0].when.bar2"
, "package.yaml: Ignoring unrecognized field $.when[1].baz"
]
context "when parsing conditionals with else-branch" $ do
it "accepts conditionals with else-branch" $ do
[i|
when:
condition: os(windows)
then:
dependencies: Win32
else:
dependencies: unix
executable: {}
|] `shouldRenderTo` executable_ "foo" [i|
if os(windows)
build-depends:
Win32
else
build-depends:
unix
|]
context "with empty then-branch" $ do
it "provides a hint" $ do
[i|
when:
condition: os(windows)
then: {}
else:
dependencies: unix
executable: {}
|] `shouldFailWith` unlines [
"package.yaml: Error while parsing $.when - an empty \"then\" section is not allowed, try the following instead:"
, ""
, "when:"
, " condition: '!(os(windows))'"
, " dependencies: unix"
]
context "with empty else-branch" $ do
it "provides a hint" $ do
[i|
when:
condition: os(windows)
then:
dependencies: Win32
else: {}
executable: {}
|] `shouldFailWith` unlines [
"package.yaml: Error while parsing $.when - an empty \"else\" section is not allowed, try the following instead:"
, ""
, "when:"
, " condition: os(windows)"
, " dependencies: Win32"
]
it "rejects invalid conditionals" $ do
[i|
when:
condition: os(windows)
then:
dependencies: Win32
else: null
|] `shouldFailWith` "package.yaml: Error while parsing $.when.else - expected Object, but encountered Null"
it "rejects invalid conditionals" $ do
[i|
dependencies:
- foo
- 23
|] `shouldFailWith` "package.yaml: Error while parsing $.dependencies[1] - expected Object or String, but encountered Number"
it "warns on unknown fields" $ do
[i|
name: foo
when:
condition: os(windows)
foo: null
then:
bar: null
else:
when:
condition: os(windows)
then:
dependencies: foo
else:
baz: null
|] `shouldWarn` [
"package.yaml: Ignoring unrecognized field $.when.foo"
, "package.yaml: Ignoring unrecognized field $.when.then.bar"
, "package.yaml: Ignoring unrecognized field $.when.else.when.else.baz"
]
describe "verbatim" $ do
it "accepts strings" $ do
[i|
library:
verbatim: |
foo: 23
bar: 42
|] `shouldRenderTo` package [i|
library
other-modules:
Paths_foo
default-language: Haskell2010
foo: 23
bar: 42
|]
it "accepts multi-line strings as field values" $ do
[i|
library:
verbatim:
build-depneds: |
foo
bar
baz
|] `shouldRenderTo` package [i|
library
other-modules:
Paths_foo
default-language: Haskell2010
build-depneds:
foo
bar
baz
|]
it "allows to null out existing fields" $ do
[i|
library:
verbatim:
default-language: null
|] `shouldRenderTo` package [i|
library
other-modules:
Paths_foo
|]
context "when specified globally" $ do
it "overrides header fields" $ do
[i|
verbatim:
cabal-version: foo
|] `shouldRenderTo` (package "") {packageCabalVersion = "foo"}
it "overrides other fields" $ do
touch "foo"
[i|
extra-source-files: foo
verbatim:
extra-source-files: bar
|] `shouldRenderTo` package [i|
extra-source-files: bar
|]
it "is not propagated into sections" $ do
[i|
verbatim:
foo: 23
library: {}
|] `shouldRenderTo` package [i|
foo: 23
library
other-modules:
Paths_foo
default-language: Haskell2010
|]
context "within a section" $ do
it "overrides section fields" $ do
[i|
tests:
spec:
verbatim:
type: detailed-0.9
|] `shouldRenderTo` package [i|
test-suite spec
type: detailed-0.9
other-modules:
Paths_foo
default-language: Haskell2010
|]
describe "default value of maintainer" $ do
it "gives maintainer precedence" $ do
[i|
author: John Doe
maintainer: Jane Doe
|] `shouldRenderTo` package [i|
author: John Doe
maintainer: Jane Doe
|]
context "with author" $ do
it "uses author if maintainer is not specified" $ do
[i|
author: John Doe
|] `shouldRenderTo` package [i|
author: John Doe
maintainer: John Doe
|]
it "omits maintainer if it is null" $ do
[i|
author: John Doe
maintainer: null
|] `shouldRenderTo` package [i|
author: John Doe
|]
run :: HasCallStack => FilePath -> FilePath -> String -> IO ([String], String)
run userDataDir c old = run_ userDataDir c old >>= either assertFailure return
run_ :: FilePath -> FilePath -> String -> IO (Either String ([String], String))
run_ userDataDir c old = do
mPackage <- readPackageConfig defaultDecodeOptions {decodeOptionsTarget = c, decodeOptionsUserDataDir = Just userDataDir}
return $ case mPackage of
Right (DecodeResult pkg cabalVersion _ warnings) ->
let
FormattingHints{..} = sniffFormattingHints (lines old)
alignment = fromMaybe 0 formattingHintsAlignment
settings = formattingHintsRenderSettings
output = cabalVersion ++ Hpack.renderPackageWith settings alignment formattingHintsFieldOrder formattingHintsSectionsFieldOrder pkg
in
Right (warnings, output)
Left err -> Left err
data RenderResult = RenderResult [String] String
deriving Eq
instance Show RenderResult where
show (RenderResult warnings output) = unlines (map ("WARNING: " ++) warnings) ++ output
shouldRenderTo :: HasCallStack => String -> Package -> Expectation
shouldRenderTo input p = do
writeFile packageConfig ("name: foo\n" ++ unindent input)
let currentDirectory = ".working-directory"
createDirectory currentDirectory
withCurrentDirectory currentDirectory $ do
(warnings, output) <- run ".." (".." </> packageConfig) expected
RenderResult warnings (dropEmptyLines output) `shouldBe` RenderResult (packageWarnings p) expected
where
expected = dropEmptyLines (renderPackage p)
dropEmptyLines = unlines . filter (not . null) . lines
shouldWarn :: HasCallStack => String -> [String] -> Expectation
shouldWarn input expected = do
writeFile packageConfig input
(warnings, _) <- run "" packageConfig ""
sort warnings `shouldBe` sort expected
shouldFailWith :: HasCallStack => String -> String -> Expectation
shouldFailWith input expected = do
writeFile packageConfig input
run_ "" packageConfig "" `shouldReturn` Left expected
customSetup :: String -> Package
customSetup a = (package content) {packageCabalVersion = "1.24", packageBuildType = "Custom"}
where
content = [i|
custom-setup
#{indentBy 2 $ unindent a}
|]
library_ :: String -> Package
library_ l = package content
where
content = [i|
library
other-modules:
Paths_foo
#{indentBy 2 $ unindent l}
default-language: Haskell2010
|]
library :: String -> Package
library l = package content
where
content = [i|
library
#{indentBy 2 $ unindent l}
default-language: Haskell2010
|]
internalLibrary :: String -> String -> Package
internalLibrary name e = (package content) {packageCabalVersion = "2.0"}
where
content = [i|
library #{name}
#{indentBy 2 $ unindent e}
default-language: Haskell2010
|]
executable_ :: String -> String -> Package
executable_ name e = package content
where
content = [i|
executable #{name}
other-modules:
Paths_foo
#{indentBy 2 $ unindent e}
default-language: Haskell2010
|]
executable :: String -> String -> Package
executable name e = package content
where
content = [i|
executable #{name}
#{indentBy 2 $ unindent e}
default-language: Haskell2010
|]
package :: String -> Package
package c = Package "foo" "0.0.0" "Simple" "1.12" c []
data Package = Package {
packageName :: String
, packageVersion :: String
, packageBuildType :: String
, packageCabalVersion :: String
, packageContent :: String
, packageWarnings :: [String]
}
renderPackage :: Package -> String
renderPackage Package{..} = unindent [i|
cabal-version: #{packageCabalVersion}
name: #{packageName}
version: #{packageVersion}
build-type: #{packageBuildType}
#{unindent packageContent}
|]
indentBy :: Int -> String -> String
indentBy n = unlines . map (replicate n ' ' ++) . lines
license :: String
license = [i|
Copyright (c) 2014-2018 Simon Hengel <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
|]
| sol/hpack | test/EndToEndSpec.hs | mit | 54,809 | 0 | 27 | 21,715 | 7,020 | 3,859 | 3,161 | -1 | -1 |
module Main where
import Zelus
import Optimize
import Plot
import Data.List( nub, sortOn )
import Test.QuickCheck
import Test.QuickCheck.Modifiers
--------------------------------------------------------------------------------
-- heater + controller
type Level = Double -- pump level
type Temp = Double -- temperature
-- computing the weighted average
weigh :: Fractional a => [(a,a)] -> a
weigh axs = sum [ a*x | (a,x) <- axs ] / sum [ a | (a,_) <- axs ]
-- the plant
plant :: S Level -> S Temp
plant pump = roomTemp
where
startTemp = outsideTemp
boilerTemp = 90
heaterCoeff = 0.1
outsideTemp = -5 -- (-5)
outsideCoeff = 0.05
-- the heater temperature is influenced by how much hot water is pumped into it
-- and the room temperature
heaterTemp = startTemp |-> weigh [ (1-pump, heaterTemp)
, (pump, boilerTemp)
, (heaterCoeff, roomTemp)
]
-- the room temperature is influenced by the heater temperature and the outside
-- temperature
roomTemp = startTemp |-> weigh [ (1, roomTemp)
, (heaterCoeff, heaterTemp)
, (outsideCoeff, outsideTemp)
]
-- controller
type Control = (Double, Double, Double)
controller :: Control -> S Temp -> S Temp -> S Level
controller (k_p,k_i,k_d) goalTemp roomTemp =
(pump' `mn` 1) `mx` 0
where
err = goalTemp - roomTemp
pump' = val k_p * err
+ val k_i * integral ((0 |> pre pump') >? 1 ? (0, err))
+ val k_d * deriv err
controlleR :: Control -> S Temp -> S Temp -> S Level
controlleR (k_p,k_i,k_d) goalTemp roomTemp =
(pump' >=? 0) ? ((1 >=? pump') ? (pump', 1), 0)
where
err = goalTemp - roomTemp
pump' = val k_p * err
+ val k_i * integ (err `in1t` 0 `reset` (0 `when` changeGoalTemp))
+ val k_d * deriv err
changeGoalTemp = abs (goalTemp - pre goalTemp) >? 1
cgood :: Control
--cgood = (3.997591176733649e-3,8.194771741046325e-5,5.618398605936785e-3)
cgood = (5.0e-3,1.1446889636416996e-4,5.0e-3)
--------------------------------------------------------------------------------
-- properties
main = quickCheck prop_ReactFast
prop_ReactFast (GoalTemp _ goalTemp) =
whenFail (plot "failed" 300
[ ("bad", graph (ok ? (0,5)))
, ("goal",graph goalTemp)
, ("room",graph roomTemp)
-- , ("stable", stableFor)
, ("diff", graph $ let d = 10 * abs (roomTemp - goalTemp) in d `mn` 50)
]) $
for tot $
ok
where
tot = 1000
--ok = errTemp <? ((200 / stableFor) `mx` 1)
ok = (stableFor >=? 150) ? (errTemp <=? 1, val True)
errTemp = abs (goalTemp - roomTemp)
roomTemp = plant pump
pump = controller cgood goalTemp roomTemp
stableFor = n
where
--n = 1 |> (goalTemp ==? pre goalTemp ? (pre n+1,1))
n = integ (1 `in1t` 1 `reset` (1 `when` (goalTemp /=? pre goalTemp)))
data GoalTemp = GoalTemp [(Int,Temp)] (S Temp)
deriving ( Eq, Ord )
goalTemp :: [(Int,Temp)] -> GoalTemp
goalTemp ds = GoalTemp ds (interp undefined ds)
where
interp t [] = repeat t
interp _ ((n,t):nts) = replicate n t ++ interp t nts
instance Show GoalTemp where
show (GoalTemp ds xs) = show ds
instance Arbitrary GoalTemp where
arbitrary =
do ds <- listOf (do t <- choose (10,30)
n <- choose (0,100)
return (n,t)) `suchThat` (not . null)
return (goalTemp ds)
shrink (GoalTemp ds _) =
[ goalTemp ds'
| ds' <- shrinkList (\_ -> []) ds ++ erase ds ++ smaller ds
, not (null ds')
, all (\(_,t) -> 10 <= t && t <= 30) ds'
]
where
erase [] = []
erase (d@(n,t1):ds) =
concat
[ nub [ (n+m,t1):ds', (n+m,t2):ds' ]
| (m,t2):ds' <- [ds]
] ++
[ d : ds' | ds' <- erase ds ]
smaller [] = []
smaller ((n,t):ds) =
[ (n',t):ds | n' <- shrink n ] ++
[ (n,t'):ds | t' <- shrinkFloat t, 10 <= t' ] ++
[ (n,t):ds' | ds' <- smaller ds ]
prop_Shrink (Fixed g@(GoalTemp _ _)) =
g `notElem` take 1000 (xs ++ concatMap shrink xs)
where
xs = take 100 $ shrink g
--------------------------------------------------------------------------------
-- show a given controller
display :: String -> (S Temp -> S Level) -> IO ()
display name controller =
plot name 300
[ ("room", graph roomTemp)
, ("pump", graph (fmap (50*) pump))
]
where
roomTemp = plant pump
pump = controller roomTemp
--------------------------------------------------------------------------------
-- search
analyze :: Control -> (Integer, Double)
analyze c = stable 0 0 0 roomTemp
where
roomTemp = plant pump
pump = controller c 20 roomTemp
stable m n k (t:ts)
| k >= 100 = (n, m)
| n >= 10000 = (n+1, m)
| abs (t - 20) <= 0.01 = stable (m `max` t) n (k+1) ts
| otherwise = stable (m `max` t) (n+k+1) 0 ts
fit :: Control -> Double
fit c = fromInteger n / 100 + m
where
(n,m) = analyze c
cbest = (a,b,c)
where
[a,b,c] = optiVec (\[a,b,c] -> -fit (a,b,c)) (v0,v1)
v0 = [0,0,0]
v1 = [e,e,e]
e = 0.01
--------------------------------------------------------------------------------
-- main
main1 :: IO ()
main1 =
do putStrLn "-- a good controller --"
print cgood
print (analyze cgood)
print (fit cgood)
display "good" (controller cgood goalTemp)
putStrLn "-- the best (?) controller --"
print cbest
print (analyze cbest)
print (fit cbest)
display "best" (controller cbest goalTemp)
where
goalTemp = repeat 20 -- replicate 60 20 ++ repeat 15
--------------------------------------------------------------------------------
| koengit/cyphy | src/Heater.hs | mit | 5,830 | 1 | 20 | 1,650 | 2,154 | 1,163 | 991 | 130 | 2 |
module Agda.Convert where
import Render ( Block(..), Inlines, renderATop, Render(..) )
import Agda.IR (FromAgda (..))
import qualified Agda.IR as IR
import Agda.Interaction.Base
import Agda.Interaction.BasicOps as B
import Agda.Interaction.EmacsCommand (Lisp)
import Agda.Interaction.Highlighting.Common (chooseHighlightingMethod, toAtoms)
import Agda.Interaction.Highlighting.Precise (Aspects (..), DefinitionSite (..), HighlightingInfo, TokenBased (..))
import qualified Agda.Interaction.Highlighting.Range as Highlighting
import Agda.Interaction.InteractionTop (localStateCommandM)
import Agda.Interaction.Response as R
import Agda.Syntax.Abstract as A
import Agda.Syntax.Abstract.Pretty (prettyATop)
import Agda.Syntax.Common
import Agda.Syntax.Concrete as C
import Agda.Syntax.Internal (alwaysUnblock)
import Agda.Syntax.Position (HasRange (getRange), Range, noRange)
import Agda.Syntax.Scope.Base
import Agda.TypeChecking.Errors (getAllWarningsOfTCErr, prettyError)
import Agda.TypeChecking.Monad hiding (Function)
import Agda.TypeChecking.Pretty (prettyTCM)
import qualified Agda.TypeChecking.Pretty as TCP
import Agda.TypeChecking.Pretty.Warning (filterTCWarnings, prettyTCWarnings, prettyTCWarnings')
import Agda.TypeChecking.Warnings (WarningsAndNonFatalErrors (..))
import Agda.Utils.FileName (filePath)
import Agda.Utils.Function (applyWhen)
import Agda.Utils.IO.TempFile (writeToTempFile)
import Agda.Utils.Impossible (__IMPOSSIBLE__)
import Agda.Utils.Maybe (catMaybes)
import Agda.Utils.Null (empty)
import Agda.Utils.Pretty hiding (render)
import Agda.Utils.RangeMap ( IsBasicRangeMap(toList) )
import Agda.Utils.String (delimiter)
import Agda.Utils.Time (CPUTime)
import Agda.VersionCommit (versionWithCommitInfo)
import Control.Monad.State hiding (state)
import qualified Data.Aeson as JSON
import qualified Data.ByteString.Lazy.Char8 as BS8
import qualified Data.List as List
import qualified Data.Map as Map
import Data.String (IsString)
import qualified Render
responseAbbr :: IsString a => Response -> a
responseAbbr res = case res of
Resp_HighlightingInfo {} -> "Resp_HighlightingInfo"
Resp_Status {} -> "Resp_Status"
Resp_JumpToError {} -> "Resp_JumpToError"
Resp_InteractionPoints {} -> "Resp_InteractionPoints"
Resp_GiveAction {} -> "Resp_GiveAction"
Resp_MakeCase {} -> "Resp_MakeCase"
Resp_SolveAll {} -> "Resp_SolveAll"
Resp_DisplayInfo {} -> "Resp_DisplayInfo"
Resp_RunningInfo {} -> "Resp_RunningInfo"
Resp_ClearRunningInfo {} -> "Resp_ClearRunningInfo"
Resp_ClearHighlighting {} -> "Resp_ClearHighlighting"
Resp_DoneAborting {} -> "Resp_DoneAborting"
Resp_DoneExiting {} -> "Resp_DoneExiting"
----------------------------------
serialize :: Lisp String -> String
serialize = show . pretty
fromResponse :: Response -> TCM IR.Response
fromResponse (Resp_HighlightingInfo info remove method modFile) =
fromHighlightingInfo info remove method modFile
fromResponse (Resp_DisplayInfo info) = IR.ResponseDisplayInfo <$> fromDisplayInfo info
fromResponse (Resp_ClearHighlighting TokenBased) = return IR.ResponseClearHighlightingTokenBased
fromResponse (Resp_ClearHighlighting NotOnlyTokenBased) = return IR.ResponseClearHighlightingNotOnlyTokenBased
fromResponse Resp_DoneAborting = return IR.ResponseDoneAborting
fromResponse Resp_DoneExiting = return IR.ResponseDoneExiting
fromResponse Resp_ClearRunningInfo = return IR.ResponseClearRunningInfo
fromResponse (Resp_RunningInfo n s) = return $ IR.ResponseRunningInfo n s
fromResponse (Resp_Status s) = return $ IR.ResponseStatus (sChecked s) (sShowImplicitArguments s)
fromResponse (Resp_JumpToError f p) = return $ IR.ResponseJumpToError f (fromIntegral p)
fromResponse (Resp_InteractionPoints is) =
return $ IR.ResponseInteractionPoints (fmap interactionId is)
fromResponse (Resp_GiveAction (InteractionId i) giveAction) =
return $ IR.ResponseGiveAction i (fromAgda giveAction)
fromResponse (Resp_MakeCase _ Function pcs) = return $ IR.ResponseMakeCaseFunction pcs
fromResponse (Resp_MakeCase _ ExtendedLambda pcs) = return $ IR.ResponseMakeCaseExtendedLambda pcs
fromResponse (Resp_SolveAll ps) = return $ IR.ResponseSolveAll (fmap prn ps)
where
prn (InteractionId i, e) = (i, prettyShow e)
fromHighlightingInfo ::
HighlightingInfo ->
RemoveTokenBasedHighlighting ->
HighlightingMethod ->
ModuleToSource ->
TCM IR.Response
fromHighlightingInfo h remove method modFile =
case chooseHighlightingMethod h method of
Direct -> return $ IR.ResponseHighlightingInfoDirect info
Indirect -> IR.ResponseHighlightingInfoIndirect <$> indirect
where
fromAspects ::
(Highlighting.Range, Aspects) ->
IR.HighlightingInfo
fromAspects (range, aspects) =
IR.HighlightingInfo
(Highlighting.from range)
(Highlighting.to range)
(toAtoms aspects)
(tokenBased aspects == TokenBased)
(note aspects)
(defSite <$> definitionSite aspects)
where
defSite (DefinitionSite moduleName offset _ _) =
(filePath (Map.findWithDefault __IMPOSSIBLE__ moduleName modFile), offset)
infos :: [IR.HighlightingInfo]
infos = fmap fromAspects (toList h)
keepHighlighting :: Bool
keepHighlighting =
case remove of
RemoveHighlighting -> False
KeepHighlighting -> True
info :: IR.HighlightingInfos
info = IR.HighlightingInfos keepHighlighting infos
indirect :: TCM FilePath
indirect = liftIO $ writeToTempFile (BS8.unpack (JSON.encode info))
fromDisplayInfo :: DisplayInfo -> TCM IR.DisplayInfo
fromDisplayInfo = \case
Info_CompilationOk _ ws -> do
-- filter
let filteredWarnings = filterTCWarnings (tcWarnings ws)
let filteredErrors = filterTCWarnings (nonFatalErrors ws)
-- serializes
warnings <- mapM prettyTCM filteredWarnings
errors <- mapM prettyTCM filteredErrors
return $ IR.DisplayInfoCompilationOk (fmap show warnings) (fmap show errors)
Info_Constraints s -> do
-- constraints <- forM s $ \e -> do
-- rendered <- renderTCM e
-- let raw = show (pretty e)
-- return $ Unlabeled rendered (Just raw)
-- return $ IR.DisplayInfoGeneric "Constraints" constraints
return $ IR.DisplayInfoGeneric "Constraints" [Unlabeled (Render.text $ show $ vcat $ fmap pretty s) Nothing Nothing]
Info_AllGoalsWarnings (ims, hms) ws -> do
-- visible metas (goals)
goals <- mapM convertGoal ims
-- hidden (unsolved) metas
metas <- mapM convertHiddenMeta hms
-- errors / warnings
-- filter
let filteredWarnings = filterTCWarnings (tcWarnings ws)
let filteredErrors = filterTCWarnings (nonFatalErrors ws)
-- serializes
warnings <- mapM prettyTCM filteredWarnings
errors <- mapM prettyTCM filteredErrors
let isG = not (null goals && null metas)
let isW = not $ null warnings
let isE = not $ null errors
let title =
List.intercalate "," $
catMaybes
[ " Goals" <$ guard isG,
" Errors" <$ guard isE,
" Warnings" <$ guard isW,
" Done" <$ guard (not (isG || isW || isE))
]
return $ IR.DisplayInfoAllGoalsWarnings ("*All" ++ title ++ "*") goals metas (fmap show warnings) (fmap show errors)
where
convertHiddenMeta :: OutputConstraint A.Expr NamedMeta -> TCM Block
convertHiddenMeta m = do
let i = nmid $ namedMetaOf m
-- output constrain
meta <- withMetaId i $ renderATop m
serialized <- show <$> withMetaId i (prettyATop m)
-- range
range <- getMetaRange i
return $ Unlabeled meta (Just serialized) (Just range)
convertGoal :: OutputConstraint A.Expr InteractionId -> TCM Block
convertGoal i = do
-- output constrain
goal <-
withInteractionId (outputFormId $ OutputForm noRange [] alwaysUnblock i) $
renderATop i
serialized <-
withInteractionId (outputFormId $ OutputForm noRange [] alwaysUnblock i) $
prettyATop i
return $ Unlabeled goal (Just $ show serialized) Nothing
Info_Auto s -> return $ IR.DisplayInfoAuto s
Info_Error err -> do
s <- showInfoError err
return $ IR.DisplayInfoError s
Info_Time s ->
return $ IR.DisplayInfoTime (show (prettyTimed s))
Info_NormalForm state cmode time expr -> do
exprDoc <- evalStateT prettyExpr state
let doc = maybe empty prettyTimed time $$ exprDoc
return $ IR.DisplayInfoNormalForm (show doc)
where
prettyExpr =
localStateCommandM $
lift $
B.atTopLevel $
allowNonTerminatingReductions $
(if computeIgnoreAbstract cmode then ignoreAbstractMode else inConcreteMode) $
B.showComputed cmode expr
Info_InferredType state time expr -> do
renderedExpr <-
flip evalStateT state $
localStateCommandM $
lift $
B.atTopLevel $
Render.renderA expr
let rendered = case time of
Nothing -> renderedExpr
-- TODO: handle this newline
Just t -> "Time:" Render.<+> Render.render t Render.<+> "\n" Render.<+> renderedExpr
exprDoc <-
flip evalStateT state $
localStateCommandM $
lift $
B.atTopLevel $
TCP.prettyA expr
let raw = show $ maybe empty prettyTimed time $$ exprDoc
return $ IR.DisplayInfoGeneric "Inferred Type" [Unlabeled rendered (Just raw) Nothing]
Info_ModuleContents modules tel types -> do
doc <- localTCState $ do
typeDocs <- addContext tel $
forM types $ \(x, t) -> do
doc <- prettyTCM t
return (prettyShow x, ":" <+> doc)
return $
vcat
[ "Modules",
nest 2 $ vcat $ fmap pretty modules,
"Names",
nest 2 $ align 10 typeDocs
]
return $ IR.DisplayInfoGeneric "Module contents" [Unlabeled (Render.text $ show doc) Nothing Nothing]
Info_SearchAbout hits names -> do
hitDocs <- forM hits $ \(x, t) -> do
doc <- prettyTCM t
return (prettyShow x, ":" <+> doc)
let doc =
"Definitions about"
<+> text (List.intercalate ", " $ words names) $$ nest 2 (align 10 hitDocs)
return $ IR.DisplayInfoGeneric "Search About" [Unlabeled (Render.text $ show doc) Nothing Nothing]
Info_WhyInScope s cwd v xs ms -> do
doc <- explainWhyInScope s cwd v xs ms
return $ IR.DisplayInfoGeneric "Scope Info" [Unlabeled (Render.text $ show doc) Nothing Nothing]
Info_Context ii ctx -> do
doc <- localTCState (prettyResponseContexts ii False ctx)
return $ IR.DisplayInfoGeneric "Context" [Unlabeled (Render.text $ show doc) Nothing Nothing]
Info_Intro_NotFound ->
return $ IR.DisplayInfoGeneric "Intro" [Unlabeled (Render.text "No introduction forms found.") Nothing Nothing]
Info_Intro_ConstructorUnknown ss -> do
let doc =
sep
[ "Don't know which constructor to introduce of",
let mkOr [] = []
mkOr [x, y] = [text x <+> "or" <+> text y]
mkOr (x : xs) = text x : mkOr xs
in nest 2 $ fsep $ punctuate comma (mkOr ss)
]
return $ IR.DisplayInfoGeneric "Intro" [Unlabeled (Render.text $ show doc) Nothing Nothing]
Info_Version ->
return $ IR.DisplayInfoGeneric "Agda Version" [Unlabeled (Render.text $ "Agda version " ++ versionWithCommitInfo) Nothing Nothing]
Info_GoalSpecific ii kind -> lispifyGoalSpecificDisplayInfo ii kind
lispifyGoalSpecificDisplayInfo :: InteractionId -> GoalDisplayInfo -> TCM IR.DisplayInfo
lispifyGoalSpecificDisplayInfo ii kind = localTCState $
B.withInteractionId ii $
case kind of
Goal_HelperFunction helperType -> do
doc <- inTopContext $ prettyATop helperType
return $ IR.DisplayInfoGeneric "Helper function" [Unlabeled (Render.text $ show doc ++ "\n") Nothing Nothing]
Goal_NormalForm cmode expr -> do
doc <- showComputed cmode expr
return $ IR.DisplayInfoGeneric "Normal Form" [Unlabeled (Render.text $ show doc) Nothing Nothing]
Goal_GoalType norm aux resCtxs boundaries constraints -> do
goalSect <- do
(rendered, raw) <- prettyTypeOfMeta norm ii
return [Labeled rendered (Just raw) Nothing "Goal" "special"]
auxSect <- case aux of
GoalOnly -> return []
GoalAndHave expr -> do
rendered <- renderATop expr
raw <- show <$> prettyATop expr
return [Labeled rendered (Just raw) Nothing "Have" "special"]
GoalAndElaboration term -> do
let rendered = render term
raw <- show <$> TCP.prettyTCM term
return [Labeled rendered (Just raw) Nothing "Elaborates to" "special"]
let boundarySect =
if null boundaries
then []
else
Header "Boundary" :
fmap (\boundary -> Unlabeled (render boundary) (Just $ show $ pretty boundary) Nothing) boundaries
contextSect <- reverse . concat <$> mapM (renderResponseContext ii) resCtxs
let constraintSect =
if null constraints
then []
else
Header "Constraints" :
fmap (\constraint -> Unlabeled (render constraint) (Just $ show $ pretty constraint) Nothing) constraints
return $
IR.DisplayInfoGeneric "Goal type etc" $ goalSect ++ auxSect ++ boundarySect ++ contextSect ++ constraintSect
Goal_CurrentGoal norm -> do
(rendered, raw) <- prettyTypeOfMeta norm ii
return $ IR.DisplayInfoCurrentGoal (Unlabeled rendered (Just raw) Nothing)
Goal_InferredType expr -> do
rendered <- renderATop expr
raw <- show <$> prettyATop expr
return $ IR.DisplayInfoInferredType (Unlabeled rendered (Just raw) Nothing)
-- -- | Format responses of DisplayInfo
-- formatPrim :: Bool -> [Block] -> String -> TCM IR.DisplayInfo
-- formatPrim _copy items header = return $ IR.DisplayInfoGeneric header items
-- -- | Format responses of DisplayInfo ("agda2-info-action")
-- format :: [Block] -> String -> TCM IR.DisplayInfo
-- format = formatPrim False
-- -- | Format responses of DisplayInfo ("agda2-info-action-copy")
-- formatAndCopy :: [Block] -> String -> TCM IR.DisplayInfo
-- formatAndCopy = formatPrim True
--------------------------------------------------------------------------------
-- | Serializing Info_Error
showInfoError :: Info_Error -> TCM String
showInfoError (Info_GenericError err) = do
e <- prettyError err
w <- prettyTCWarnings' =<< getAllWarningsOfTCErr err
let errorMsg =
if null w
then e
else delimiter "Error" ++ "\n" ++ e
let warningMsg =
List.intercalate "\n" $
delimiter "Warning(s)" :
filter (not . null) w
return $
if null w
then errorMsg
else errorMsg ++ "\n\n" ++ warningMsg
showInfoError (Info_CompilationError warnings) = do
s <- prettyTCWarnings warnings
return $
unlines
[ "You need to fix the following errors before you can compile",
"the module:",
"",
s
]
showInfoError (Info_HighlightingParseError ii) =
return $ "Highlighting failed to parse expression in " ++ show ii
showInfoError (Info_HighlightingScopeCheckError ii) =
return $ "Highlighting failed to scope check expression in " ++ show ii
explainWhyInScope ::
FilePath ->
String ->
Maybe LocalVar ->
[AbstractName] ->
[AbstractModule] ->
TCM Doc
explainWhyInScope s _ Nothing [] [] = TCP.text (s ++ " is not in scope.")
explainWhyInScope s _ v xs ms =
TCP.vcat
[ TCP.text (s ++ " is in scope as"),
TCP.nest 2 $ TCP.vcat [variable v xs, modules ms]
]
where
-- variable :: Maybe _ -> [_] -> TCM Doc
variable Nothing vs = names vs
variable (Just x) vs
| null vs = asVar
| otherwise =
TCP.vcat
[ TCP.sep [asVar, TCP.nest 2 $ shadowing x],
TCP.nest 2 $ names vs
]
where
asVar :: TCM Doc
asVar =
"* a variable bound at" TCP.<+> TCP.prettyTCM (nameBindingSite $ localVar x)
shadowing :: LocalVar -> TCM Doc
shadowing (LocalVar _ _ []) = "shadowing"
shadowing _ = "in conflict with"
names = TCP.vcat . fmap pName
modules = TCP.vcat . fmap pMod
pKind = \case
AxiomName -> "postulate"
ConName -> "constructor"
CoConName -> "coinductive constructor"
DataName -> "data type"
DisallowedGeneralizeName -> "generalizable variable from let open"
FldName -> "record field"
FunName -> "defined name"
GeneralizeName -> "generalizable variable"
MacroName -> "macro name"
PatternSynName -> "pattern synonym"
PrimName -> "primitive function"
QuotableName -> "quotable name"
-- previously DefName:
RecName -> "record type"
OtherDefName -> "defined name"
pName :: AbstractName -> TCM Doc
pName a =
TCP.sep
[ "* a"
TCP.<+> pKind (anameKind a)
TCP.<+> TCP.text (prettyShow $ anameName a),
TCP.nest 2 "brought into scope by"
]
TCP.$$ TCP.nest 2 (pWhy (nameBindingSite $ qnameName $ anameName a) (anameLineage a))
pMod :: AbstractModule -> TCM Doc
pMod a =
TCP.sep
[ "* a module" TCP.<+> TCP.text (prettyShow $ amodName a),
TCP.nest 2 "brought into scope by"
]
TCP.$$ TCP.nest 2 (pWhy (nameBindingSite $ qnameName $ mnameToQName $ amodName a) (amodLineage a))
pWhy :: Range -> WhyInScope -> TCM Doc
pWhy r Defined = "- its definition at" TCP.<+> TCP.prettyTCM r
pWhy r (Opened (C.QName x) w) | isNoName x = pWhy r w
pWhy r (Opened m w) =
"- the opening of"
TCP.<+> TCP.prettyTCM m
TCP.<+> "at"
TCP.<+> TCP.prettyTCM (getRange m)
TCP.$$ pWhy r w
pWhy r (Applied m w) =
"- the application of"
TCP.<+> TCP.prettyTCM m
TCP.<+> "at"
TCP.<+> TCP.prettyTCM (getRange m)
TCP.$$ pWhy r w
-- | Pretty-prints the context of the given meta-variable.
prettyResponseContexts ::
-- | Context of this meta-variable.
InteractionId ->
-- | Print the elements in reverse order?
Bool ->
[ResponseContextEntry] ->
TCM Doc
prettyResponseContexts ii rev ctxs = do
rows <- mapM (prettyResponseContext ii) ctxs
return $ align 10 $ concat $ applyWhen rev reverse rows
-- | Pretty-prints the context of the given meta-variable.
prettyResponseContext ::
-- | Context of this meta-variable.
InteractionId ->
ResponseContextEntry ->
TCM [(String, Doc)]
prettyResponseContext ii (ResponseContextEntry n x (Arg ai expr) letv nis) = withInteractionId ii $ do
modality <- asksTC getModality
do
let prettyCtxName :: String
prettyCtxName
| n == x = prettyShow x
| isInScope n == InScope = prettyShow n ++ " = " ++ prettyShow x
| otherwise = prettyShow x
-- Some attributes are useful to report whenever they are not
-- in the default state.
attribute :: String
attribute = c ++ if null c then "" else " "
where
c = prettyShow (getCohesion ai)
extras :: [Doc]
extras =
concat
[ ["not in scope" | isInScope nis == C.NotInScope],
-- Print erased if hypothesis is erased by goal is non-erased.
["erased" | not $ getQuantity ai `moreQuantity` getQuantity modality],
-- Print irrelevant if hypothesis is strictly less relevant than goal.
["irrelevant" | not $ getRelevance ai `moreRelevant` getRelevance modality],
-- Print instance if variable is considered by instance search
["instance" | isInstance ai]
]
ty <- prettyATop expr
letv' <- case letv of
Nothing -> return []
Just val -> do
val' <- prettyATop val
return [(prettyShow x, "=" <+> val')]
return $
(attribute ++ prettyCtxName, ":" <+> ty <+> parenSep extras) : letv'
where
parenSep :: [Doc] -> Doc
parenSep docs
| null docs = empty
| otherwise = (" " <+>) $ parens $ fsep $ punctuate comma docs
-- | Render the context of the given meta-variable.
renderResponseContext ::
-- | Context of this meta-variable.
InteractionId ->
ResponseContextEntry ->
TCM [Block]
renderResponseContext ii (ResponseContextEntry n x (Arg ai expr) letv nis) = withInteractionId ii $ do
modality <- asksTC getModality
do
let
rawCtxName :: String
rawCtxName
| n == x = prettyShow x
| isInScope n == InScope = prettyShow n ++ " = " ++ prettyShow x
| otherwise = prettyShow x
renderedCtxName :: Inlines
renderedCtxName
| n == x = render x
| isInScope n == InScope = render n Render.<+> "=" Render.<+> render x
| otherwise = render x
-- Some attributes are useful to report whenever they are not
-- in the default state.
rawAttribute :: String
rawAttribute = c ++ if null c then "" else " "
where
c = prettyShow (getCohesion ai)
renderedAttribute :: Inlines
renderedAttribute = c <> if null (show c) then "" else " "
where
c = render (getCohesion ai)
extras :: IsString a => [a]
extras =
concat
[ ["not in scope" | isInScope nis == C.NotInScope],
-- Print erased if hypothesis is erased by goal is non-erased.
["erased" | not $ getQuantity ai `moreQuantity` getQuantity modality],
-- Print irrelevant if hypothesis is strictly less relevant than goal.
["irrelevant" | not $ getRelevance ai `moreRelevant` getRelevance modality],
-- Print instance if variable is considered by instance search
["instance" | isInstance ai]
]
extras2 :: [Inlines]
extras2 =
concat
[ ["not in scope" | isInScope nis == C.NotInScope],
-- Print erased if hypothesis is erased by goal is non-erased.
["erased" | not $ getQuantity ai `moreQuantity` getQuantity modality],
-- Print irrelevant if hypothesis is strictly less relevant than goal.
["irrelevant" | not $ getRelevance ai `moreRelevant` getRelevance modality],
-- Print instance if variable is considered by instance search
["instance" | isInstance ai]
]
-- raw
rawExpr <- prettyATop expr
let rawType = show $ align 10 [(rawAttribute ++ rawCtxName, ":" <+> rawExpr <+> parenSep extras)]
-- rendered
renderedExpr <- renderATop expr
let renderedType = (renderedCtxName <> renderedAttribute) Render.<+> ":" Render.<+> renderedExpr Render.<+> parenSep2 extras2
-- (Render.fsep $ Render.punctuate "," extras)
-- result
let typeItem = Unlabeled renderedType (Just rawType) Nothing
valueItem <- case letv of
Nothing -> return []
Just val -> do
valText <- renderATop val
valString <- prettyATop val
let renderedValue = Render.render x Render.<+> "=" Render.<+> valText
let rawValue = show $ align 10 [(prettyShow x, "=" <+> valString)]
return
[ Unlabeled renderedValue (Just rawValue) Nothing
]
return $ typeItem : valueItem
where
parenSep :: [Doc] -> Doc
parenSep docs
| null docs = empty
| otherwise = (" " <+>) $ parens $ fsep $ punctuate comma docs
parenSep2 :: [Inlines] -> Inlines
parenSep2 docs
| null docs = mempty
| otherwise = (" " Render.<+>) $ Render.parens $ Render.fsep $ Render.punctuate "," docs
-- | Pretty-prints the type of the meta-variable.
prettyTypeOfMeta :: Rewrite -> InteractionId -> TCM (Inlines, String)
prettyTypeOfMeta norm ii = do
form <- B.typeOfMeta norm ii
case form of
OfType _ e -> do
rendered <- renderATop e
raw <- show <$> prettyATop e
return (rendered, raw)
_ -> do
rendered <- renderATop form
raw <- show <$> prettyATop form
return (rendered, raw)
-- | Prefix prettified CPUTime with "Time:"
prettyTimed :: CPUTime -> Doc
prettyTimed time = "Time:" <+> pretty time
| banacorn/agda-language-server | src/Agda/Convert.hs | mit | 24,621 | 0 | 23 | 6,457 | 6,639 | 3,313 | 3,326 | -1 | -1 |
{-
- Copyright (c) 2016 Christopher Wells <[email protected]>
-
- Permission is hereby granted, free of charge, to any person obtaining a copy
- of this software and associated documentation files (the "Software"), to deal
- in the Software without restriction, including without limitation the rights
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- copies of the Software, and to permit persons to whom the Software is
- furnished to do so, subject to the following conditions:
-
- The above copyright notice and this permission notice shall be included in
- all copies or substantial portions of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- SOFTWARE.
-}
{-|
Module : CLI
Description : Contains functions for the command line interface.
Copyright : (c) Christopher Wells, 2016
License : MIT
Maintainer : [email protected]
-}
module CLI where
import BinarySearchTree
import Command
{-|
Prompts the user for a command and returns the given command.
>>> promptForCommand
Enter a command (i, c, in, pre, post, or q):
i
-}
promptForCommand :: IO String
promptForCommand = do
putStrLn "Enter a command (i, c, in, pre, post, or q):"
getLine
{-|
Prints the contents of the given tree with the given show function.
-}
printTree :: BST -> (BST -> String) -> IO BST
printTree b f = do
let result = f b
let resultStr = if null result then "The tree has no nodes." else result
putStrLn resultStr
return b
{-|
Prints a String representing whether or not the given object is contained
within the given Binary Search Tree.
-}
printContains :: BST -> String -> IO BST
printContains b item = do
let result = if containsBST b item then item ++ " is contained in the tree." else item ++ " is not contained in the tree."
putStrLn result
return b
{-|
Preforms an action based on the user entered command.
-}
action :: BST -> IO ()
action b = do
commandString <- promptForCommand
let command = toCommand commandString
bs <- runCommand b command
case command of
Just Quit -> return ()
Nothing -> do
printInvalid commandString
action bs
_ -> action bs
{-|
Runs the given command on the given Binary Search Tree, and returns the resulting Binary Search Tree.
-}
runCommand :: BST -> Maybe Command -> IO BST
runCommand b command = case command of
Just (Insert item) -> return (insertBST b item)
Just (Contains item) -> printContains b item
Just InOrder -> printTree b showBST
Just PreOrder -> printTree b showPreBST
Just PostOrder -> printTree b showPostBST
Just Quit -> return b
_ -> return b
{-|
Prints that an invalid command was entered.
>>> printInvalid "test Alice"
Invalid command -- test Alice
-}
printInvalid :: String -> IO ()
printInvalid s = putStrLn ("Invalid command -- " ++ s)
| ExcaliburZero/binary-search-tree-haskell | src/CLI.hs | mit | 3,334 | 0 | 12 | 740 | 460 | 217 | 243 | 40 | 7 |
--
-- Copyright (c) 2013 Bonelli Nicola <[email protected]>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
module CGrep.Output (Output(),
mkOutput,
putPrettyHeader,
putPrettyFooter,
prettyOutput,
showFile) where
import qualified Data.ByteString.Char8 as C
import System.Console.ANSI
#ifdef ENABLE_HINT
import Language.Haskell.Interpreter
#endif
import Data.Maybe
import Data.List
import Data.List.Split
import Data.Function
import CGrep.Types
import CGrep.Token
import Safe
import Options
data Output = Output FilePath Int Text8 [Token]
deriving (Show)
getOffsetsLines :: Text8 -> [Int]
getOffsetsLines = C.elemIndices '\n'
getOffset2d :: [OffsetLine] -> Offset -> Offset2d
getOffset2d idx off = let prc = fst $ partition (< off) idx in
case prc of
[] -> (0, off)
_ -> (length prc, off - last prc - 1)
mkOutput :: Options -> FilePath -> Text8 -> Text8 -> [Token] -> [Output]
mkOutput Options { invert_match = invert } f text multi ts
| invert = map (\(n, xs) -> Output f n (ls !! (n-1)) xs) . invertMatchLines (length ls) $ mkMatchLines multi ts
| otherwise = map (\(n, xs) -> Output f n (ls !! (n-1)) xs) $ mkMatchLines multi ts
where ls = C.lines text
mkMatchLines :: Text8 -> [Token] -> [MatchLine]
mkMatchLines _ [] = []
mkMatchLines text ts = map mergeGroup $ groupBy ((==) `on` fst) $
sortBy (compare `on` fst) $ map (\t -> let (r,c) = getOffset2d ols (fst t) in (1 + r, [(c, snd t)])) ts
where mergeGroup ls = (fst $ head ls, foldl (\l m -> l ++ snd m) [] ls)
ols = getOffsetsLines text
invertMatchLines :: Int -> [MatchLine] -> [MatchLine]
invertMatchLines n xs = filter (\(i,_) -> i `notElem` idx ) $ take n [ (i, []) | i <- [1..]]
where idx = map fst xs
putPrettyHeader :: Options -> IO ()
putPrettyHeader opt =
case () of
_ | json opt -> putStrLn "["
| xml opt -> putStrLn "<?xml version=\"1.0\"?>" >> putStrLn "<cgrep>"
| otherwise -> return ()
putPrettyFooter :: Options -> IO ()
putPrettyFooter opt =
case () of
_ | json opt -> putStrLn "]"
| xml opt -> putStrLn "</cgrep>"
| otherwise -> return ()
prettyOutput :: Options -> [Output] -> IO [String]
prettyOutput opt out
#ifdef ENABLE_HINT
| isJust $ hint opt = hintOputput opt out
#endif
| isJust $ format opt = return $ map (formatOutput opt) out
| json opt = return $ jsonOutput opt out
| xml opt = return $ xmlOutput opt out
| otherwise = return $ defaultOutput opt out
defaultOutput :: Options -> [Output] -> [String]
defaultOutput opt@Options{ no_filename = False, no_linenumber = False , count = False } xs =
map (\(Output f n l ts) -> showFile opt f ++ ":" ++ show n ++ ":" ++ showTokens opt ts ++ showLine opt ts l) xs
defaultOutput opt@Options{ no_filename = False, no_linenumber = True , count = False } xs =
map (\(Output f _ l ts) -> showFile opt f ++ ":" ++ showTokens opt ts ++ showLine opt ts l) xs
defaultOutput opt@Options{ no_filename = True , no_linenumber = False , count = False } xs =
map (\(Output _ n l ts) -> show n ++ ":" ++ showTokens opt ts ++ showLine opt ts l) xs
defaultOutput opt@Options{ no_filename = True , no_linenumber = True , count = False } xs =
map (\(Output _ _ l ts) -> showTokens opt ts ++ showLine opt ts l) xs
defaultOutput opt@Options{ count = True } xs =
let gs = groupBy (\(Output f1 _ _ _) (Output f2 _ _ _) -> f1 == f2) xs
in map (\ys@(y:_) -> showFile opt (outputFilename y) ++ ":" ++ show (length ys)) gs
where outputFilename (Output f _ _ _) = f
jsonOutput :: Options -> [Output] -> [String]
jsonOutput _ outs =
[" { \"file\": " ++ show fname ++ ", \"matches\": ["] ++
[ intercalate "," (foldl mkMatch [] outs) ] ++
["] }"]
where fname | (Output f _ _ _) <- head outs = f
mkToken (n, xs) = "{ \"col\": " ++ show n ++ ", \"token\": " ++ show xs ++ " }"
mkMatch xs (Output _ n l ts) = xs ++ [ "{ \"row\": " ++ show n ++ ", \"tokens\": [" ++ intercalate "," (map mkToken ts) ++ "], \"line\":" ++ show l ++ "}" ]
xmlOutput :: Options -> [Output] -> [String]
xmlOutput _ outs =
["<file name=" ++ show fname ++ ">" ] ++
["<matches>" ] ++
[foldl mkMatch "" outs] ++
["</matches>"] ++
["</file>"]
where fname | (Output f _ _ _) <- head outs = f
mkToken (n, xs) = "<token col=\"" ++ show n ++ "\" >" ++ xs ++ "</token>"
mkMatch xs (Output _ n l ts) = xs ++ "<match line=" ++ show l ++ " row=\"" ++ show n ++ "\">" ++
unwords (map mkToken ts) ++
"</match>"
formatOutput :: Options -> Output -> String
formatOutput opt (Output f n l ts) =
foldl trans (fromJust $ format opt)
[
("#f", showFile opt f),
("#n", show n),
("#l", showLine opt ts l),
("#t", show ts'),
("##", unwords ts'),
("#,", intercalate "," ts'),
("#;", intercalate ";" ts'),
("#0", atDef "" ts' 0),
("#1", atDef "" ts' 1),
("#2", atDef "" ts' 2),
("#3", atDef "" ts' 3),
("#4", atDef "" ts' 4),
("#5", atDef "" ts' 5),
("#6", atDef "" ts' 6),
("#7", atDef "" ts' 7),
("#8", atDef "" ts' 8),
("#9", atDef "" ts' 9)
]
where trans str (old, new) = replace old new str
ts' = map snd ts
replace :: Eq a => [a] -> [a] -> [a] -> [a]
replace old new = intercalate new . splitOn old
#ifdef ENABLE_HINT
hintOputput :: Options -> [Output] -> IO [String]
hintOputput opt outs = do
let cmds = map mkCmd outs
out <- runInterpreter $ setImports ["Prelude", "Data.List"] >> mapM (`interpret` (as :: String)) cmds
return $ either ((:[]) . show) id out
where mkCmd (Output f n l ts) = "let a # b = a !! b " ++
"; file = " ++ show (showFile opt f) ++
"; row = " ++ show n ++
"; line = " ++ show (showLine opt ts l) ++
"; tokens = " ++ show (map snd ts) ++ " in " ++
(fromJust $ hint opt)
#endif
blue, bold, resetTerm :: String
blue = setSGRCode [SetColor Foreground Vivid Blue]
bold = setSGRCode [SetConsoleIntensity BoldIntensity]
resetTerm = setSGRCode []
showTokens :: Options -> [Token] -> String
showTokens Options { show_match = st } xs
| st = show (map snd xs)
| otherwise = ""
showFile :: Options -> String -> String
showFile Options { color = c } f
| c = bold ++ blue ++ f ++ resetTerm
| otherwise = f
showLine :: Options -> [Token] -> Line8 -> String
showLine Options { color = c } ts l
| c = hilightLine (sortBy (flip compare `on` (length . snd )) ts) (C.unpack l)
| otherwise = C.unpack l
hilightLine :: [Token] -> String -> String
hilightLine ts = hilightLine' (hilightIndicies ts, 0)
where hilightLine' :: ([Int],Int) -> String -> String
hilightLine' _ [] = []
hilightLine' (ns,n) (x:xs) = (if n `elem` ns then bold ++ [x] ++ resetTerm
else [x]) ++ hilightLine' (ns, n+1) xs
hilightIndicies :: [Token] -> [Int]
hilightIndicies = concatMap (\(o, s) -> take (length s) [o..])
| YelaSeamless/cgrep | src/CGrep/Output.hs | gpl-2.0 | 8,335 | 0 | 18 | 2,565 | 3,074 | 1,611 | 1,463 | 141 | 3 |
{-# language TemplateHaskell #-}
{-# language DeriveDataTypeable #-}
module BDD.Quiz where
import Expression.Op
import Boolean.Op
import qualified Boolean.BDD
import qualified OBDD as O
import Inter.Types hiding ( Var )
import Autolib.TES.Term
import qualified Autolib.TES.Binu as B
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Reporter
import Data.Function ( on )
import Data.List ( minimumBy )
import Data.Typeable
import System.Random
data Config =
Config { formula_size :: Int
, operators :: B.Binu (Op Bool)
, variables :: [ Identifier ]
, bdd_size :: Int
, bdd_candidates :: Int
}
deriving ( Typeable )
config0 :: Config
config0 = Config { formula_size = 12
, variables = read "[p,q,r,s]"
, operators = B.Binu
{ B.binary = [ read "&&", read "||" , read "<->" ]
, B.unary = [ read "not" ]
, B.nullary = [] -- [ read "true", read "false" ]
}
, bdd_size = 12
, bdd_candidates = 1000
}
derives [makeReader, makeToDoc] [ ''Config ]
roll_formula conf = do
fs <- forM [ 1 .. bdd_candidates conf ] $ \ i -> do
f <- roll_term (operators conf) (variables conf) (formula_size conf)
return $ case result $ Boolean.BDD.evaluate f of
Nothing -> []
Just s -> [ (f, abs $ O.size s - bdd_size conf) ]
return $ fst $ minimumBy ( compare `on` snd ) $ concat fs
roll_term ops vars s =
if s <= 1 then do
pick $ map ( \f -> Node f []) (B.nullary ops) ++ map Var vars
else do
op <- pick $ map Left (B.unary ops) ++ map Right (B.binary ops)
case op of
Left u -> do
arg <- roll_term ops vars $ s - 1
return $ Node u [ arg ]
Right b -> do
sl <- randomRIO (1, s-1) ; let sr = s - 1 - sl
arg1 <- roll_term ops vars sl
arg2 <- roll_term ops vars sr
return $ Node b [ arg1, arg2 ]
pick xs = do
i <- randomRIO ( 0, length xs - 1)
return $ xs !! i
| marcellussiegburg/autotool | collection/src/BDD/Quiz.hs | gpl-2.0 | 2,206 | 0 | 21 | 809 | 736 | 390 | 346 | 58 | 3 |
--------------------------------------------------------------------------------
-- Copyright (C) 1997, 1998, 2008 Joern Dinkla, www.dinkla.net
--------------------------------------------------------------------------------
--
-- see
-- Joern Dinkla, Geometrische Algorithmen in Haskell, Diploma Thesis,
-- University of Bonn, Germany, 1998.
--
module Main ( main ) where
import DelaunayDAG
import Applications.NearestPoint
import RBox ( readPoints2 )
import MetaPost
import System.Environment ( getArgs )
import System.Exit ( ExitCode (ExitFailure), exitWith )
import Line ( Line ( Segment ) )
import Point2 ( Point (mapP), P2 )
import Basics.DoubleEps
data Mode = N | DE deriving (Eq, Read, Show)
examine :: [String] -> IO (Mode, Double, String, String)
examine [mode, scale, file1, file2]
= return (read mode, read scale, file1, file2)
examine _
= do putStrLn "Synopsis: nearest {N|DE} scale file1 file2\n"
exitWith (ExitFailure 1)
main :: IO ()
main = do
args <- getArgs
(m, sc, f1, f2) <- examine args
(_, _, ps) <- readPoints2 f1
(_, _, qs) <- readPoints2 f2
putStrLn ("beginfig(1);\npicture p;" ++ pen 3)
putMP [Scaled sc] ps
putMP [Scaled sc, red] qs
putStrLn (pen 0.5)
if m == N
then do let dag = delaunay ps
-- putMP [Scaled sc] (map MP_Triangle (triangles dag))
putMP [Scaled sc] (triangles dag)
putStrLn (pen 1)
putMP [Scaled sc, green]
-- [MP_Line (Segment p (nearestPoint dag p)) | p <- qs ]
[Segment p (nearestPoint dag p) | p <- qs ]
else do let dag = delaunay (dbl ps)
--putMP [Scaled sc] (map MP_Triangle (triangles dag))
putMP [Scaled sc] (triangles dag)
putStrLn (pen 1)
putMP [Scaled sc, green] [Segment p (nearestPoint dag p) | p <- (dbl qs) ]
--[MP_Line (Segment p (nearestPoint dag p)) | p <- (dbl qs) ]
putStrLn ("endfig;\nend")
dbl :: [P2 Double] -> [P2 DoubleEps]
dbl = map (mapP DoubleEps)
| smoothdeveloper/GeoAlgLib | src/Tests/Nearest.hs | gpl-3.0 | 2,072 | 8 | 15 | 539 | 608 | 319 | 289 | 40 | 2 |
module Language.Mulang.Transform.Renamer (rename) where
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Language.Mulang.Ast
import Language.Mulang.Ast.Visitor
import Data.Maybe (catMaybes)
import Control.Monad.State
type RenameState a = State ReferencesMap a
data ReferencesMap = ReferencesMap {
variables :: Map String String,
parameters :: Map String String
} deriving Show
emptyReferencesMap :: ReferencesMap
emptyReferencesMap = ReferencesMap (Map.empty) (Map.empty)
rename :: Expression -> Expression
rename e = evalState (renameState e) emptyReferencesMap
renameState :: Expression -> RenameState Expression
renameState (Reference r) = renameReference r
renameState (Variable n e) = renameVariable n e
renameState e@(Exist _ _) = return e
renameState f@(Fact _ _) = return f
renameState f@(Findall _ _ _) = return f
renameState f@(Forall _ _) = return f
renameState n@(Not _) = return n
--
renameState (For stms e1) = do { stms' <- mapM renameStatement stms; e1' <- renameState e1; return $ For stms' e1' }
renameState (ForLoop i c a b) = do { [i', c', a', b'] <- mapM renameState [i, c, a, b]; return $ ForLoop i' c' a' b' }
renameState (Lambda ps e2) = do { e2' <- renameState e2; return $ Lambda ps e2' }
renameState (Match e1 eqs) = do { e1' <- renameState e1; eqs' <- renameEquations eqs; return $ Match e1' eqs' }
renameState (Send r e es) = do { (r':e':es') <- mapM renameState (r:e:es); return $ Send r' e' es' }
renameState (Switch v cs d) = do { v' <- renameState v; cs' <- renameSwitchCases cs; d' <- renameState d; return $ Switch v' cs' d' }
renameState (Try t cs f) = do { t' <- renameState t; cs' <- renameTryCases cs; f' <- renameState f; return $ Try t' cs' f' }
renameState a@(Assert _ _) = return a
renameState r@(Rule _ _ _) = return r
--
renameState (ExpressionAndExpressionsList e es c) = do { (e':es') <- mapM renameState (e:es); return $ c e' es' }
renameState (SingleEquationsList eqs c) = do { eqs' <- renameEquations eqs; return $ c eqs' }
renameState (SingleExpression e c) = do { e' <- renameState e; return $ c e' }
renameState (SingleExpressionsList es c) = do { es' <- mapM renameState es; return $ c es' }
renameState (ThreeExpressions e1 e2 e3 c) = do { [e1', e2', e3'] <- mapM renameState [e1, e2, e3]; return $ c e1' e2' e3' }
renameState (TwoExpressions e1 e2 c) = do { e1' <- renameState e1; e2' <- renameState e2; return $ c e1' e2' }
renameState e@(SinglePatternsList _ _) = return e
renameState e@Terminal = return e
renameTryCases = mapM (\(p, e) -> do { e' <- renameState e; return (p, e') })
renameSwitchCases = mapM (\(e1, e2) -> do { e1' <- renameState e1; e2' <- renameState e2; return (e1', e2') })
renameStatement :: Statement -> RenameState Statement
renameStatement (Generator p e) = do { p' <- renameParameter p; e' <- renameState e; return $ Generator p' e' }
renameStatement (Guard e) = do { e' <- renameState e; return $ Guard e' }
renameEquations :: [Equation] -> RenameState [Equation]
renameEquations equations = do
m <- get
equations' <- mapM renameEquation equations
put m
return equations'
renameEquation :: Equation -> RenameState Equation
renameEquation (Equation ps b) = do
ps' <- mapM renameParameter ps
b' <- renameEquationBody b
return $ Equation ps' b'
renameParameter :: Pattern -> RenameState Pattern
renameParameter (VariablePattern n) = fmap VariablePattern . createParameter $ n
renameParameter e = return e
renameEquationBody (UnguardedBody e) = fmap UnguardedBody . renameState $ e
renameEquationBody (GuardedBody es) = fmap GuardedBody . mapM renameGuard $ es
where
renameGuard (e1, e2) = do
e1' <- renameState e1
e2' <- renameState e2
return (e1', e2')
renameVariable :: String -> Expression -> RenameState Expression
renameVariable n e = do
n' <- createVariable n
e1' <- renameState e
return $ Variable n' e1'
renameReference :: String -> RenameState Expression
renameReference n = do
m <- get
return . Reference . head . catMaybes $ [lookupVariable n m, lookupParameter n m, Just n]
createVariable :: String -> RenameState String
createVariable n = do
m <- get
let n' = makeRef "mulang_var_n" variables m
put (m { variables = insertRef n n' variables m })
return n'
createParameter :: String -> RenameState String
createParameter n = do
m <- get
let n' = makeRef "mulang_param_n" parameters m
put (m { parameters = insertRef n n' parameters m })
return n'
makeRef :: String -> (ReferencesMap -> Map String String) -> ReferencesMap -> String
makeRef kind f = (kind++) . show . length . f
insertRef :: String -> String -> (ReferencesMap -> Map String String) -> ReferencesMap -> Map String String
insertRef n n' f = Map.insert n n' . f
lookupVariable :: String -> ReferencesMap -> (Maybe String)
lookupVariable n m = Map.lookup n (variables m)
lookupParameter :: String -> ReferencesMap -> (Maybe String)
lookupParameter n m = Map.lookup n (parameters m)
| mumuki/mulang | src/Language/Mulang/Transform/Renamer.hs | gpl-3.0 | 5,247 | 0 | 11 | 1,197 | 2,079 | 1,040 | 1,039 | 95 | 1 |
{-
This file is part of evolutionary-algorithms-sandbox.
evolutionary-algorithms-sandbox is free software: you can redistribute it
and/or modify it under the terms of the GNU General Public License as published
by the Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
evolutionary-algorithms-sandbox is distributed in the hope that it will be
useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
Public License for more details.
You should have received a copy of the GNU General Public License along with
evolutionary-algorithms-sandbox. If not, see <http://www.gnu.org/licenses/>.
-}
import System.Random
{- (1+1)-EA bitflip
- Jendrik Poloczek <[email protected]> -}
type Bit = Bool
type Probability = Float
onemax :: [Bit] -> Int
onemax bits = foldl (\x y -> x + y) 0 (map convert bits)
where convert bit
| bit == True = 1
| bit == False = 0
flips :: Probability -> [Probability] -> [Bit]
flips alpha randoms = map (cut alpha) randoms
where cut alpha p
| p < alpha = False
| otherwise = True
recurse :: [Bit] -> [Float] -> Float -> [[Bit]]
recurse bits rnds alpha
| onemax bits == length bits = [bits]
| otherwise = bits : continue bits rnds alpha
mutate :: [Bit] -> [Bit] -> [Bit]
mutate bits flips = map flipit (zip bits flips)
where flipit (x,y) = (not x) && y
continue :: [Bit] -> [Float] -> Float -> [[Bit]]
continue bits rnds alpha = recurse mutated rnds' alpha
where
mutated = mutate bits (flips alpha randomfloats)
rnds' = drop (length bits) rnds
randomfloats = take (length bits) rnds
main :: IO ()
main = do
putStrLn (show (recurse example randomlist alpha))
where
alpha = 0.25
example = [False, False, False, False]
randomlist = ((randoms (mkStdGen 42)) :: [Float])
| jpzk/ea-sandbox | bitflip/bitflip.hs | gpl-3.0 | 1,996 | 0 | 11 | 468 | 511 | 267 | 244 | 31 | 1 |
{-# LANGUAGE ConstraintKinds #-}
module Constraints.Vector
( SomeVector
) where
import Linear
import Data.Functor.Rep
-- Constraint for vector type variables.
type SomeVector v =
( Applicative v
, Traversable v
, Metric v
, Representable v
, Ord (v Int)
)
| MatthiasHu/4d-labyrinth | src/Constraints/Vector.hs | gpl-3.0 | 276 | 0 | 8 | 61 | 65 | 38 | 27 | 11 | 0 |
module Main (main) where
import qualified Utility.TSLogAnalyzer
main :: IO ()
main = Utility.TSLogAnalyzer.main
| taktoa/TSLogAnalyzer | executable/Main.hs | gpl-3.0 | 114 | 0 | 6 | 16 | 33 | 20 | 13 | 4 | 1 |
module FormalLanguage.GrammarProduct.Op.Add where
import Control.Lens hiding (outside,indices)
import Control.Lens.Fold
import "newtype" Control.Newtype
import Data.List (genericReplicate)
import Data.Monoid hiding ((<>))
import Data.Semigroup
import qualified Data.Set as S
import Text.Printf
import Data.Default
import FormalLanguage.CFG.Grammar
import FormalLanguage.GrammarProduct.Op.Common
-- |
add :: Grammar -> Grammar -> Grammar
add l r = runAdd $ Add l <> Add r
-- | Add two grammars. Implemented as the union of production rules without any
-- renaming.
newtype Add a = Add {runAdd :: a}
-- | Note that the semigroup on Add will create a new rule S_gh -> S_g | S_h in
-- case two start symbols with different rhs exist (If S_g, S_h are the same,
-- there is no problem).
instance Semigroup (Add Grammar) where
(Add l) <> (Add r)
| Left err <- opCompatible l r = error err
| otherwise = Add $ Grammar (l^.synvars <> r^.synvars)
(l^.synterms <> r^.synterms) -- TODO add the newly created symbol to the non-terminals (or maybe just run ``fix T+N 's from the rules?'')
(l^.termvars <> r^.termvars)
(l^.outside)
(l^.rules <> r^.rules) --
s
(l^.params <> r^.params)
(l^.indices <> r^.indices)
(l^.grammarName <> r^.grammarName)
False
where s | l^.start == r^.start = l^.start
| l^.start /= mempty && r^.start /= mempty = l^.start -- error "add new start symbol" -- TODO to be discussed ?!
| l^.start == mempty = r^.start
| r^.start == mempty = l^.start
instance Monoid (Add Grammar) where
mempty = Add def
mappend = (<>)
-- idempotency is not made explicit here
| choener/GrammarProducts | FormalLanguage/GrammarProduct/Op/Add.hs | gpl-3.0 | 1,971 | 0 | 15 | 654 | 491 | 268 | 223 | -1 | -1 |
{-# language PatternSignatures #-}
{-# language DeriveDataTypeable #-}
module Game where
import Prelude hiding ( catch )
import Spieler
import Wurf
import State
import Bank
import Registrar
import Call
import Rating
import qualified Data.Map as M
import Data.Typeable
import System.IO
import Control.Monad ( when, void, forM, forM_ )
import System.Random
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception
import qualified System.Timeout
import Network.XmlRpc.Client
import Data.Acid ( update )
-- | choose a subset of players (with at least two)
-- have them play a game, record the result
game :: Server -> IO ()
game server = void $ do
xs <- select_players server
Control.Exception.catch ( do
message server $ Game xs
verify_callbacks server xs
winner <- play_game server xs
message server $ Game_Won_By winner
( forM xs $ \ y -> ignore_errors server
( logged1 server y "Player.game_won_by" ( name winner ) :: IO Bool ) )
process_regular_game_result server xs winner
) $ \ ( e :: SomeException ) -> do
process_offenses server
select_players server = do
xs <- atomically $ do
m <- readTVar $ registry server
check $ M.size m >= 2
return $ M.elems m
ys <- permute xs
n <- randomRIO ( 2, length ys )
return $ take n ys
permute :: [a] -> IO [a]
permute [] = return []
permute xs = do
k <- randomRIO ( 0, length xs - 1 )
let (pre, this : post ) = splitAt k xs
rest <- permute $ pre ++ post
return $ this : rest
------------------------------------------------------
verify_callbacks :: Server -> [ Spieler ] -> IO ()
verify_callbacks server xs = forM_ xs $ \ x -> do
res <- logged0 server x "Player.who_are_you"
when ( name x /= res ) $ do
message server $ Callback_Mismatch x res
add_offender server x
throwIO $ ProtocolE x
-- | Resultat: der Gewinner (alle anderen sind raus)
play_game :: Server -> [ Spieler ] -> IO Spieler
play_game server ys = bracket_
( forM ys $ \ y -> ignore_errors server ( logged0 server y "Player.begin_game" :: IO Bool ) )
( forM ys $ \ y -> ignore_errors server ( logged0 server y "Player.end_game" :: IO Bool ) ) $ do
continue_game server ys
continue_game server ys = case ys of
[] -> error "play_game []"
[winner] -> return winner
_ -> do
( loser, rest ) <- play_round server ys
continue_game server rest
-- | Resultat: der Verlierer und der Rest (der weiterspielen darf)
play_round :: Server
-> [ Spieler ] -> IO (Spieler, [Spieler])
play_round server (s : ss) = bracket_
( forM (s:ss) $ \ y -> ignore_errors server ( logged0 server y "Player.begin_round" :: IO Bool ) )
( forM (s:ss) $ \ y -> ignore_errors server ( logged0 server y "Player.end_round" :: IO Bool ) ) $ do
message server $ Round (s:ss)
w <- roll
w' <- logged1 server s "Player.say" w
(loser, rest) <- continue_round server (ss ++ [s]) (w, w')
message server $ Round_Lost_By loser
( forM (s:ss) $ \ y -> ignore_errors server
( logged1 server y "Player.round_lost_by" ( name loser ) :: IO Bool ) )
return (loser, rest)
continue_round server (s : ss) (echt, ansage) = do
message server $ Bid ansage
threadDelay $ 10^6
forM ss $ \ s' -> ( logged1 server s' "Player.other" ansage :: IO Bool )
a <- logged1 server s "Player.accept" ansage
if a
then do -- weiterspielen
echt' <- Wurf.roll
ansage' <- logged1 server s "Player.say" echt'
if ansage' <= ansage
then return (s, ss) -- verloren
else continue_round server (ss ++ [s]) ( echt', ansage' )
else do -- aufdecken
if echt >= ansage
then return ( s, ss )
else return ( last ss , s : init ss )
--------------------------------------------------------------------
| jwaldmann/mex | src/Game.hs | gpl-3.0 | 4,147 | 0 | 20 | 1,236 | 1,334 | 668 | 666 | 97 | 4 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
module TestUtil where
import Test.SmallCheck
import Test.SmallCheck.Series
import Control.Applicative
import Music.Muse.Pitch
instance Monad m => Serial m PitchClass where
series = cons0 PC0 \/ cons0 PC1 \/ cons0 PC2 \/ cons0 PC3
\/ cons0 PC4 \/ cons0 PC5 \/ cons0 PC6 \/ cons0 PC7
\/ cons0 PC8 \/ cons0 PC9 \/ cons0 PC10 \/ cons0 PC11
instance Monad m => Serial m Pitch where
series = cons2 Pitch
suchThat :: Monad m => Series m a -> (a -> Bool) -> Series m a
suchThat s p = s >>= \x -> if p x then pure x else empty
nonZero :: (Serial m a, Monad m, Eq a, Num a) => Series m a
nonZero = series `suchThat` (/= 0)
naturalTill :: (Serial m a, Monad m, Eq a, Ord a, Num a) => a -> Series m a
naturalTill n = series `suchThat` (\x -> x >= 0 && x < n)
| alxgnon/muse | test/TestUtil.hs | gpl-3.0 | 843 | 0 | 17 | 197 | 361 | 184 | 177 | 18 | 2 |
import Rsa
import Data.Char
import Data.List
import Data.List.Split
import System.Environment
import System.IO
import System.IO.Error
padd :: [String]->[Int]
padd x = map (foldl (\a b -> 1000 * a + b) 0) enc_blocks
where enc_blocks = map (map ord) x
main = do
args <- getArgs
input <- readFile (head args)
keys <- readFile ((args !! 1) ++ "_pub.key")
let blocks = splitEvery 6 input
padded = padd blocks
key = splitOn ";" keys
enc_blocks = map (\m -> cypher m ((read(key!!0)::Integer),(read(key!!1)::Integer))) (map toInteger padded)
enc_file = foldl (\a b -> a ++ (show b) ++ ";") "" enc_blocks
writeFile (args !! 2) enc_file
return ()
| h3nnn4n/rsa-haskell | encrypt.hs | gpl-3.0 | 705 | 0 | 18 | 173 | 320 | 167 | 153 | 21 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Compute.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Compute.Types
(
-- * Service Configuration
computeService
-- * OAuth Scopes
, computeScope
, cloudPlatformScope
, storageReadOnlyScope
, storageReadWriteScope
, computeReadOnlyScope
, storageFullControlScope
-- * TargetHTTPSProxyList
, TargetHTTPSProxyList
, targetHTTPSProxyList
, thplNextPageToken
, thplKind
, thplItems
, thplSelfLink
, thplId
-- * RoutersScopedList
, RoutersScopedList
, routersScopedList
, rslRouters
, rslWarning
-- * RouterStatusResponse
, RouterStatusResponse
, routerStatusResponse
, rsrKind
, rsrResult
-- * RegionInstanceGroupManagersDeleteInstancesRequest
, RegionInstanceGroupManagersDeleteInstancesRequest
, regionInstanceGroupManagersDeleteInstancesRequest
, rigmdirInstances
-- * AddressesScopedList
, AddressesScopedList
, addressesScopedList
, aslAddresses
, aslWarning
-- * OperationWarningsItemDataItem
, OperationWarningsItemDataItem
, operationWarningsItemDataItem
, owidiValue
, owidiKey
-- * SchedulingOnHostMaintenance
, SchedulingOnHostMaintenance (..)
-- * RegionInstanceGroupsListInstancesRequest
, RegionInstanceGroupsListInstancesRequest
, regionInstanceGroupsListInstancesRequest
, riglirInstanceState
, riglirPortName
-- * AutoscalingPolicyCustomMetricUtilizationUtilizationTargetType
, AutoscalingPolicyCustomMetricUtilizationUtilizationTargetType (..)
-- * InstanceGroupManagersAbandonInstancesRequest
, InstanceGroupManagersAbandonInstancesRequest
, instanceGroupManagersAbandonInstancesRequest
, igmairInstances
-- * MachineTypeAggregatedListItems
, MachineTypeAggregatedListItems
, machineTypeAggregatedListItems
, mtaliAddtional
-- * DiskTypeAggregatedListItems
, DiskTypeAggregatedListItems
, diskTypeAggregatedListItems
, dtaliAddtional
-- * RouterAggregatedList
, RouterAggregatedList
, routerAggregatedList
, ralNextPageToken
, ralKind
, ralItems
, ralSelfLink
, ralId
-- * FirewallList
, FirewallList
, firewallList
, flNextPageToken
, flKind
, flItems
, flSelfLink
, flId
-- * InstancesScopedListWarning
, InstancesScopedListWarning
, instancesScopedListWarning
, islwData
, islwCode
, islwMessage
-- * RegionInstanceGroupManagersRecreateRequest
, RegionInstanceGroupManagersRecreateRequest
, regionInstanceGroupManagersRecreateRequest
, rigmrrInstances
-- * BackendServicesScopedListWarning
, BackendServicesScopedListWarning
, backendServicesScopedListWarning
, bsslwData
, bsslwCode
, bsslwMessage
-- * InstanceGroupList
, InstanceGroupList
, instanceGroupList
, iglNextPageToken
, iglKind
, iglItems
, iglSelfLink
, iglId
-- * InstancesSetMachineTypeRequest
, InstancesSetMachineTypeRequest
, instancesSetMachineTypeRequest
, ismtrMachineType
-- * CustomerEncryptionKey
, CustomerEncryptionKey
, customerEncryptionKey
, cekSha256
, cekRawKey
-- * AutoscalerAggregatedListItems
, AutoscalerAggregatedListItems
, autoscalerAggregatedListItems
, aaliAddtional
-- * InstanceGroupManagersSetInstanceTemplateRequest
, InstanceGroupManagersSetInstanceTemplateRequest
, instanceGroupManagersSetInstanceTemplateRequest
, igmsitrInstanceTemplate
-- * DeprecationStatus
, DeprecationStatus
, deprecationStatus
, dsState
, dsDeleted
, dsReplacement
, dsObsolete
, dsDeprecated
-- * OperationWarningsItemCode
, OperationWarningsItemCode (..)
-- * Snapshot
, Snapshot
, snapshot
, sStorageBytesStatus
, sStatus
, sDiskSizeGb
, sSourceDiskId
, sKind
, sSourceDiskEncryptionKey
, sStorageBytes
, sSelfLink
, sSnapshotEncryptionKey
, sName
, sCreationTimestamp
, sId
, sLicenses
, sSourceDisk
, sDescription
-- * RouterStatus
, RouterStatus
, routerStatus
, rsBGPPeerStatus
, rsNetwork
, rsBestRoutes
-- * AutoscalingPolicyCustomMetricUtilization
, AutoscalingPolicyCustomMetricUtilization
, autoscalingPolicyCustomMetricUtilization
, apcmuUtilizationTarget
, apcmuMetric
, apcmuUtilizationTargetType
-- * ForwardingRuleList
, ForwardingRuleList
, forwardingRuleList
, frlNextPageToken
, frlKind
, frlItems
, frlSelfLink
, frlId
-- * VPNTunnelsScopedList
, VPNTunnelsScopedList
, vpnTunnelsScopedList
, vtslVPNTunnels
, vtslWarning
-- * BackendServiceProtocol
, BackendServiceProtocol (..)
-- * InstanceGroupsSetNamedPortsRequest
, InstanceGroupsSetNamedPortsRequest
, instanceGroupsSetNamedPortsRequest
, igsnprFingerprint
, igsnprNamedPorts
-- * OperationList
, OperationList
, operationList
, olNextPageToken
, olKind
, olItems
, olSelfLink
, olId
-- * DiskList
, DiskList
, diskList
, dlNextPageToken
, dlKind
, dlItems
, dlSelfLink
, dlId
-- * TargetPoolsAddInstanceRequest
, TargetPoolsAddInstanceRequest
, targetPoolsAddInstanceRequest
, tpairInstances
-- * RegionAutoscalerList
, RegionAutoscalerList
, regionAutoscalerList
, rNextPageToken
, rKind
, rItems
, rSelfLink
, rId
-- * InstanceGroupsAddInstancesRequest
, InstanceGroupsAddInstancesRequest
, instanceGroupsAddInstancesRequest
, igairInstances
-- * InstanceGroupManagerList
, InstanceGroupManagerList
, instanceGroupManagerList
, igmlNextPageToken
, igmlKind
, igmlItems
, igmlSelfLink
, igmlId
-- * SubnetworksScopedListWarning
, SubnetworksScopedListWarning
, subnetworksScopedListWarning
, sslwData
, sslwCode
, sslwMessage
-- * AttachedDiskType
, AttachedDiskType (..)
-- * Image
, Image
, image
, iStatus
, iImageEncryptionKey
, iDiskSizeGb
, iSourceType
, iSourceDiskId
, iKind
, iSourceDiskEncryptionKey
, iGuestOSFeatures
, iArchiveSizeBytes
, iFamily
, iRawDisk
, iSelfLink
, iName
, iCreationTimestamp
, iId
, iLicenses
, iSourceDisk
, iDescription
, iDeprecated
-- * URLMap
, URLMap
, urlMap
, umTests
, umKind
, umFingerprint
, umDefaultService
, umSelfLink
, umName
, umCreationTimestamp
, umPathMatchers
, umId
, umHostRules
, umDescription
-- * InstanceGroupAggregatedListItems
, InstanceGroupAggregatedListItems
, instanceGroupAggregatedListItems
, igaliAddtional
-- * TargetPoolList
, TargetPoolList
, targetPoolList
, tplNextPageToken
, tplKind
, tplItems
, tplSelfLink
, tplId
-- * TargetInstanceAggregatedList
, TargetInstanceAggregatedList
, targetInstanceAggregatedList
, tialNextPageToken
, tialKind
, tialItems
, tialSelfLink
, tialId
-- * DisksScopedList
, DisksScopedList
, disksScopedList
, dslWarning
, dslDisks
-- * InstanceGroupManagersScopedList
, InstanceGroupManagersScopedList
, instanceGroupManagersScopedList
, igmslWarning
, igmslInstanceGroupManagers
-- * HealthCheck
, HealthCheck
, healthCheck
, hcHealthyThreshold
, hcTCPHealthCheck
, hcKind
, hcSSLHealthCheck
, hcSelfLink
, hcCheckIntervalSec
, hcName
, hcCreationTimestamp
, hcHTTPHealthCheck
, hcId
, hcType
, hcTimeoutSec
, hcDescription
, hcUnhealthyThreshold
, hcHTTPSHealthCheck
-- * TargetSSLProxyProxyHeader
, TargetSSLProxyProxyHeader (..)
-- * DiskAggregatedList
, DiskAggregatedList
, diskAggregatedList
, dalNextPageToken
, dalKind
, dalItems
, dalSelfLink
, dalId
-- * InstanceWithNamedPorts
, InstanceWithNamedPorts
, instanceWithNamedPorts
, iwnpStatus
, iwnpNamedPorts
, iwnpInstance
-- * ForwardingRulesScopedList
, ForwardingRulesScopedList
, forwardingRulesScopedList
, frslWarning
, frslForwardingRules
-- * InstanceReference
, InstanceReference
, instanceReference
, iInstance
-- * OperationAggregatedList
, OperationAggregatedList
, operationAggregatedList
, oalNextPageToken
, oalKind
, oalItems
, oalSelfLink
, oalId
-- * OperationsScopedList
, OperationsScopedList
, operationsScopedList
, oslWarning
, oslOperations
-- * NamedPort
, NamedPort
, namedPort
, npName
, npPort
-- * RegionInstanceGroupsListInstancesRequestInstanceState
, RegionInstanceGroupsListInstancesRequestInstanceState (..)
-- * TargetInstanceList
, TargetInstanceList
, targetInstanceList
, tilNextPageToken
, tilKind
, tilItems
, tilSelfLink
, tilId
-- * InstanceGroupManagerAggregatedList
, InstanceGroupManagerAggregatedList
, instanceGroupManagerAggregatedList
, igmalNextPageToken
, igmalKind
, igmalItems
, igmalSelfLink
, igmalId
-- * ImageSourceType
, ImageSourceType (..)
-- * TargetPoolsScopedList
, TargetPoolsScopedList
, targetPoolsScopedList
, tpslWarning
, tpslTargetPools
-- * ForwardingRuleAggregatedList
, ForwardingRuleAggregatedList
, forwardingRuleAggregatedList
, fralNextPageToken
, fralKind
, fralItems
, fralSelfLink
, fralId
-- * TargetReference
, TargetReference
, targetReference
, trTarget
-- * TargetPoolAggregatedList
, TargetPoolAggregatedList
, targetPoolAggregatedList
, tpalNextPageToken
, tpalKind
, tpalItems
, tpalSelfLink
, tpalId
-- * OperationsScopedListWarningDataItem
, OperationsScopedListWarningDataItem
, operationsScopedListWarningDataItem
, oslwdiValue
, oslwdiKey
-- * BackendServiceSessionAffinity
, BackendServiceSessionAffinity (..)
-- * TargetPool
, TargetPool
, targetPool
, tpSessionAffinity
, tpBackupPool
, tpKind
, tpSelfLink
, tpName
, tpCreationTimestamp
, tpInstances
, tpId
, tpFailoverRatio
, tpRegion
, tpDescription
, tpHealthChecks
-- * ImageList
, ImageList
, imageList
, ilNextPageToken
, ilKind
, ilItems
, ilSelfLink
, ilId
-- * VPNTunnelsScopedListWarning
, VPNTunnelsScopedListWarning
, vpnTunnelsScopedListWarning
, vtslwData
, vtslwCode
, vtslwMessage
-- * ForwardingRulesScopedListWarningCode
, ForwardingRulesScopedListWarningCode (..)
-- * OperationsScopedListWarningCode
, OperationsScopedListWarningCode (..)
-- * TargetSSLProxiesSetBackendServiceRequest
, TargetSSLProxiesSetBackendServiceRequest
, targetSSLProxiesSetBackendServiceRequest
, tspsbsrService
-- * ForwardingRule
, ForwardingRule
, forwardingRule
, frIPAddress
, frLoadBalancingScheme
, frKind
, frNetwork
, frPortRange
, frSelfLink
, frName
, frIPProtocol
, frCreationTimestamp
, frSubnetwork
, frPorts
, frId
, frRegion
, frDescription
, frTarget
, frBackendService
-- * URLMapList
, URLMapList
, urlMapList
, umlNextPageToken
, umlKind
, umlItems
, umlSelfLink
, umlId
-- * ForwardingRulesScopedListWarningDataItem
, ForwardingRulesScopedListWarningDataItem
, forwardingRulesScopedListWarningDataItem
, frslwdiValue
, frslwdiKey
-- * InstanceGroupManagersScopedListWarningDataItem
, InstanceGroupManagersScopedListWarningDataItem
, instanceGroupManagersScopedListWarningDataItem
, igmslwdiValue
, igmslwdiKey
-- * SubnetworksScopedList
, SubnetworksScopedList
, subnetworksScopedList
, sslSubnetworks
, sslWarning
-- * DisksScopedListWarningCode
, DisksScopedListWarningCode (..)
-- * Project
, Project
, project
, pKind
, pUsageExportLocation
, pSelfLink
, pName
, pDefaultServiceAccount
, pCreationTimestamp
, pEnabledFeatures
, pQuotas
, pId
, pDescription
, pCommonInstanceMetadata
-- * RegionInstanceGroupManagersListInstancesResponse
, RegionInstanceGroupManagersListInstancesResponse
, regionInstanceGroupManagersListInstancesResponse
, rigmlirManagedInstances
-- * Operation
, Operation
, operation
, oTargetId
, oStatus
, oInsertTime
, oProgress
, oStartTime
, oKind
, oError
, oHTTPErrorMessage
, oZone
, oWarnings
, oHTTPErrorStatusCode
, oUser
, oSelfLink
, oName
, oStatusMessage
, oCreationTimestamp
, oEndTime
, oId
, oOperationType
, oRegion
, oDescription
, oTargetLink
, oClientOperationId
-- * DisksScopedListWarningDataItem
, DisksScopedListWarningDataItem
, disksScopedListWarningDataItem
, dslwdiValue
, dslwdiKey
-- * InstanceGroupManagersScopedListWarningCode
, InstanceGroupManagersScopedListWarningCode (..)
-- * Disk
, Disk
, disk
, dStatus
, dSourceSnapshotId
, dLastAttachTimestamp
, dUsers
, dSourceImage
, dDiskEncryptionKey
, dSizeGb
, dKind
, dLastDetachTimestamp
, dZone
, dSelfLink
, dName
, dSourceImageId
, dCreationTimestamp
, dSourceImageEncryptionKey
, dId
, dLicenses
, dOptions
, dType
, dDescription
, dSourceSnapshotEncryptionKey
, dSourceSnapshot
-- * DiskMoveRequest
, DiskMoveRequest
, diskMoveRequest
, dmrTargetDisk
, dmrDestinationZone
-- * AutoscalingPolicyLoadBalancingUtilization
, AutoscalingPolicyLoadBalancingUtilization
, autoscalingPolicyLoadBalancingUtilization
, aplbuUtilizationTarget
-- * TargetPoolsScopedListWarningDataItem
, TargetPoolsScopedListWarningDataItem
, targetPoolsScopedListWarningDataItem
, tpslwdiValue
, tpslwdiKey
-- * InstanceGroupManager
, InstanceGroupManager
, instanceGroupManager
, igmKind
, igmFingerprint
, igmBaseInstanceName
, igmZone
, igmInstanceTemplate
, igmTargetSize
, igmSelfLink
, igmCurrentActions
, igmName
, igmCreationTimestamp
, igmId
, igmRegion
, igmTargetPools
, igmDescription
, igmInstanceGroup
, igmNamedPorts
-- * RegionInstanceGroupsListInstances
, RegionInstanceGroupsListInstances
, regionInstanceGroupsListInstances
, rigliNextPageToken
, rigliKind
, rigliItems
, rigliSelfLink
, rigliId
-- * TCPHealthCheck
, TCPHealthCheck
, tcpHealthCheck
, thcResponse
, thcProxyHeader
, thcPortName
, thcPort
, thcRequest
-- * TargetPoolsScopedListWarningCode
, TargetPoolsScopedListWarningCode (..)
-- * SSLHealthCheckProxyHeader
, SSLHealthCheckProxyHeader (..)
-- * TargetVPNGatewayStatus
, TargetVPNGatewayStatus (..)
-- * InstanceGroupsRemoveInstancesRequest
, InstanceGroupsRemoveInstancesRequest
, instanceGroupsRemoveInstancesRequest
, igrirInstances
-- * SnapshotStatus
, SnapshotStatus (..)
-- * RouteWarningsItemDataItem
, RouteWarningsItemDataItem
, routeWarningsItemDataItem
, rwidiValue
, rwidiKey
-- * TargetInstancesScopedListWarningCode
, TargetInstancesScopedListWarningCode (..)
-- * BackendServiceAggregatedListItems
, BackendServiceAggregatedListItems
, backendServiceAggregatedListItems
, bsaliAddtional
-- * InstanceAggregatedListItems
, InstanceAggregatedListItems
, instanceAggregatedListItems
, ialiAddtional
-- * AutoscalersScopedListWarning
, AutoscalersScopedListWarning
, autoscalersScopedListWarning
, aslwData
, aslwCode
, aslwMessage
-- * HealthCheckList
, HealthCheckList
, healthCheckList
, hclNextPageToken
, hclKind
, hclItems
, hclSelfLink
, hclId
-- * ManagedInstanceLastAttemptErrors
, ManagedInstanceLastAttemptErrors
, managedInstanceLastAttemptErrors
, milaeErrors
-- * GuestOSFeatureType
, GuestOSFeatureType (..)
-- * RouteWarningsItemCode
, RouteWarningsItemCode (..)
-- * TargetPoolsRemoveInstanceRequest
, TargetPoolsRemoveInstanceRequest
, targetPoolsRemoveInstanceRequest
, tprirInstances
-- * TargetInstancesScopedListWarningDataItem
, TargetInstancesScopedListWarningDataItem
, targetInstancesScopedListWarningDataItem
, tislwdiValue
, tislwdiKey
-- * MachineTypesScopedListWarning
, MachineTypesScopedListWarning
, machineTypesScopedListWarning
, mtslwData
, mtslwCode
, mtslwMessage
-- * TargetInstance
, TargetInstance
, targetInstance
, tiKind
, tiNATPolicy
, tiZone
, tiSelfLink
, tiName
, tiCreationTimestamp
, tiId
, tiDescription
, tiInstance
-- * TargetPoolInstanceHealth
, TargetPoolInstanceHealth
, targetPoolInstanceHealth
, tpihKind
, tpihHealthStatus
-- * SnapshotStorageBytesStatus
, SnapshotStorageBytesStatus (..)
-- * InstanceGroupManagersListManagedInstancesResponse
, InstanceGroupManagersListManagedInstancesResponse
, instanceGroupManagersListManagedInstancesResponse
, igmlmirManagedInstances
-- * InstanceProperties
, InstanceProperties
, instanceProperties
, ipServiceAccounts
, ipNetworkInterfaces
, ipMachineType
, ipMetadata
, ipScheduling
, ipDisks
, ipCanIPForward
, ipDescription
, ipTags
-- * DiskTypesScopedListWarning
, DiskTypesScopedListWarning
, diskTypesScopedListWarning
, dtslwData
, dtslwCode
, dtslwMessage
-- * AddressesScopedListWarningCode
, AddressesScopedListWarningCode (..)
-- * AttachedDiskInitializeParams
, AttachedDiskInitializeParams
, attachedDiskInitializeParams
, adipSourceImage
, adipDiskSizeGb
, adipDiskName
, adipSourceImageEncryptionKey
, adipDiskType
-- * AddressesScopedListWarningDataItem
, AddressesScopedListWarningDataItem
, addressesScopedListWarningDataItem
, aslwdiValue
, aslwdiKey
-- * ImageStatus
, ImageStatus (..)
-- * NetworkInterface
, NetworkInterface
, networkInterface
, niKind
, niNetwork
, niName
, niNetworkIP
, niSubnetwork
, niAccessConfigs
-- * TargetPoolsRemoveHealthCheckRequest
, TargetPoolsRemoveHealthCheckRequest
, targetPoolsRemoveHealthCheckRequest
, tprhcrHealthChecks
-- * RegionInstanceGroupManagersSetTargetPoolsRequest
, RegionInstanceGroupManagersSetTargetPoolsRequest
, regionInstanceGroupManagersSetTargetPoolsRequest
, rigmstprFingerprint
, rigmstprTargetPools
-- * TargetSSLProxyList
, TargetSSLProxyList
, targetSSLProxyList
, tsplNextPageToken
, tsplKind
, tsplItems
, tsplSelfLink
, tsplId
-- * CustomerEncryptionKeyProtectedDisk
, CustomerEncryptionKeyProtectedDisk
, customerEncryptionKeyProtectedDisk
, cekpdDiskEncryptionKey
, cekpdSource
-- * HealthStatusHealthState
, HealthStatusHealthState (..)
-- * InstanceTemplateList
, InstanceTemplateList
, instanceTemplateList
, itlNextPageToken
, itlKind
, itlItems
, itlSelfLink
, itlId
-- * RouteList
, RouteList
, routeList
, rlNextPageToken
, rlKind
, rlItems
, rlSelfLink
, rlId
-- * DeprecationStatusState
, DeprecationStatusState (..)
-- * Router
, Router
, router
, rouBGPPeers
, rouBGP
, rouKind
, rouNetwork
, rouInterfaces
, rouSelfLink
, rouName
, rouCreationTimestamp
, rouId
, rouRegion
, rouDescription
-- * RoutersScopedListWarningCode
, RoutersScopedListWarningCode (..)
-- * RoutersScopedListWarningDataItem
, RoutersScopedListWarningDataItem
, routersScopedListWarningDataItem
, rslwdiValue
, rslwdiKey
-- * ManagedInstanceCurrentAction
, ManagedInstanceCurrentAction (..)
-- * TargetVPNGatewayList
, TargetVPNGatewayList
, targetVPNGatewayList
, tvglNextPageToken
, tvglKind
, tvglItems
, tvglSelfLink
, tvglId
-- * TargetInstanceNATPolicy
, TargetInstanceNATPolicy (..)
-- * SSLCertificateList
, SSLCertificateList
, sslCertificateList
, sclNextPageToken
, sclKind
, sclItems
, sclSelfLink
, sclId
-- * FirewallAllowedItem
, FirewallAllowedItem
, firewallAllowedItem
, faiIPProtocol
, faiPorts
-- * BackendServiceAggregatedList
, BackendServiceAggregatedList
, backendServiceAggregatedList
, bsalNextPageToken
, bsalKind
, bsalItems
, bsalSelfLink
, bsalId
-- * Network
, Network
, network
, nAutoCreateSubnetworks
, nKind
, nSubnetworks
, nIPv4Range
, nSelfLink
, nName
, nCreationTimestamp
, nId
, nGatewayIPv4
, nDescription
-- * RoutersScopedListWarning
, RoutersScopedListWarning
, routersScopedListWarning
, rslwData
, rslwCode
, rslwMessage
-- * AccessConfigType
, AccessConfigType (..)
-- * AddressesScopedListWarning
, AddressesScopedListWarning
, addressesScopedListWarning
, aData
, aCode
, aMessage
-- * ImageRawDisk
, ImageRawDisk
, imageRawDisk
, irdContainerType
, irdSource
, irdSha1Checksum
-- * InstanceAggregatedList
, InstanceAggregatedList
, instanceAggregatedList
, ialNextPageToken
, ialKind
, ialItems
, ialSelfLink
, ialId
-- * SSLHealthCheck
, SSLHealthCheck
, sslHealthCheck
, shcResponse
, shcProxyHeader
, shcPortName
, shcPort
, shcRequest
-- * Address
, Address
, address
, aStatus
, aUsers
, aKind
, aAddress
, aSelfLink
, aName
, aCreationTimestamp
, aId
, aRegion
, aDescription
-- * Zone
, Zone
, zone
, zStatus
, zKind
, zSelfLink
, zName
, zCreationTimestamp
, zId
, zRegion
, zDescription
, zDeprecated
-- * RouterBGP
, RouterBGP
, routerBGP
, rbASN
-- * BackendServicesScopedList
, BackendServicesScopedList
, backendServicesScopedList
, bsslWarning
, bsslBackendServices
-- * InstanceGroupManagersRecreateInstancesRequest
, InstanceGroupManagersRecreateInstancesRequest
, instanceGroupManagersRecreateInstancesRequest
, igmrirInstances
-- * TargetSSLProxiesSetSSLCertificatesRequest
, TargetSSLProxiesSetSSLCertificatesRequest
, targetSSLProxiesSetSSLCertificatesRequest
, tspsscrSSLCertificates
-- * InstancesScopedList
, InstancesScopedList
, instancesScopedList
, islWarning
, islInstances
-- * BackendServiceLoadBalancingScheme
, BackendServiceLoadBalancingScheme (..)
-- * HealthCheckReference
, HealthCheckReference
, healthCheckReference
, hcrHealthCheck
-- * TargetInstanceAggregatedListItems
, TargetInstanceAggregatedListItems
, targetInstanceAggregatedListItems
, tialiAddtional
-- * InstanceGroupAggregatedList
, InstanceGroupAggregatedList
, instanceGroupAggregatedList
, igalNextPageToken
, igalKind
, igalItems
, igalSelfLink
, igalId
-- * OperationStatus
, OperationStatus (..)
-- * Route
, Route
, route
, rrPriority
, rrKind
, rrNextHopGateway
, rrNextHopNetwork
, rrNetwork
, rrWarnings
, rrNextHopIP
, rrDestRange
, rrSelfLink
, rrName
, rrCreationTimestamp
, rrId
, rrNextHopVPNTunnel
, rrDescription
, rrTags
, rrNextHopInstance
-- * TargetVPNGatewaysScopedListWarningDataItem
, TargetVPNGatewaysScopedListWarningDataItem
, targetVPNGatewaysScopedListWarningDataItem
, tvgslwdiValue
, tvgslwdiKey
-- * TargetVPNGatewaysScopedListWarningCode
, TargetVPNGatewaysScopedListWarningCode (..)
-- * TargetHTTPSProxiesSetSSLCertificatesRequest
, TargetHTTPSProxiesSetSSLCertificatesRequest
, targetHTTPSProxiesSetSSLCertificatesRequest
, thpsscrSSLCertificates
-- * InstanceTemplate
, InstanceTemplate
, instanceTemplate
, itKind
, itSelfLink
, itName
, itCreationTimestamp
, itId
, itDescription
, itProperties
-- * RouterList
, RouterList
, routerList
, rllNextPageToken
, rllKind
, rllItems
, rllSelfLink
, rllId
-- * TargetSSLProxy
, TargetSSLProxy
, targetSSLProxy
, tspSSLCertificates
, tspService
, tspKind
, tspSelfLink
, tspName
, tspCreationTimestamp
, tspId
, tspProxyHeader
, tspDescription
-- * TargetVPNGateway
, TargetVPNGateway
, targetVPNGateway
, tvgStatus
, tvgKind
, tvgNetwork
, tvgSelfLink
, tvgName
, tvgCreationTimestamp
, tvgId
, tvgRegion
, tvgTunnels
, tvgDescription
, tvgForwardingRules
-- * DiskStatus
, DiskStatus (..)
-- * ManagedInstanceInstanceStatus
, ManagedInstanceInstanceStatus (..)
-- * HTTPHealthCheckProxyHeader
, HTTPHealthCheckProxyHeader (..)
-- * URLMapsValidateResponse
, URLMapsValidateResponse
, urlMapsValidateResponse
, umvrResult
-- * SSLCertificate
, SSLCertificate
, sslCertificate
, scPrivateKey
, scKind
, scSelfLink
, scName
, scCreationTimestamp
, scId
, scCertificate
, scDescription
-- * RouterStatusBGPPeerStatus
, RouterStatusBGPPeerStatus
, routerStatusBGPPeerStatus
, rsbpsStatus
, rsbpsIPAddress
, rsbpsState
, rsbpsPeerIPAddress
, rsbpsUptime
, rsbpsNumLearnedRoutes
, rsbpsName
, rsbpsUptimeSeconds
, rsbpsAdvertisedRoutes
, rsbpsLinkedVPNTunnel
-- * URLMapReference
, URLMapReference
, urlMapReference
, umrURLMap
-- * AttachedDiskMode
, AttachedDiskMode (..)
-- * TargetPoolsAddHealthCheckRequest
, TargetPoolsAddHealthCheckRequest
, targetPoolsAddHealthCheckRequest
, tpahcrHealthChecks
-- * DiskAggregatedListItems
, DiskAggregatedListItems
, diskAggregatedListItems
, daliAddtional
-- * UsageExportLocation
, UsageExportLocation
, usageExportLocation
, uelReportNamePrefix
, uelBucketName
-- * ZoneList
, ZoneList
, zoneList
, zlNextPageToken
, zlKind
, zlItems
, zlSelfLink
, zlId
-- * RegionStatus
, RegionStatus (..)
-- * RouterBGPPeer
, RouterBGPPeer
, routerBGPPeer
, rbpIPAddress
, rbpInterfaceName
, rbpPeerIPAddress
, rbpAdvertisedRoutePriority
, rbpPeerASN
, rbpName
-- * SubnetworksExpandIPCIdRRangeRequest
, SubnetworksExpandIPCIdRRangeRequest
, subnetworksExpandIPCIdRRangeRequest
, seicirrrIPCIdRRange
-- * ManagedInstance
, ManagedInstance
, managedInstance
, miLastAttempt
, miCurrentAction
, miId
, miInstanceStatus
, miInstance
-- * InstanceGroupManagerAggregatedListItems
, InstanceGroupManagerAggregatedListItems
, instanceGroupManagerAggregatedListItems
, igmaliAddtional
-- * InstanceGroupManagersDeleteInstancesRequest
, InstanceGroupManagersDeleteInstancesRequest
, instanceGroupManagersDeleteInstancesRequest
, igmdirInstances
-- * Backend
, Backend
, backend
, bGroup
, bBalancingMode
, bMaxUtilization
, bMaxRate
, bMaxConnections
, bMaxConnectionsPerInstance
, bMaxRatePerInstance
, bDescription
, bCapacityScaler
-- * TargetVPNGatewaysScopedListWarning
, TargetVPNGatewaysScopedListWarning
, targetVPNGatewaysScopedListWarning
, tvgslwData
, tvgslwCode
, tvgslwMessage
-- * TargetSSLProxiesSetProxyHeaderRequestProxyHeader
, TargetSSLProxiesSetProxyHeaderRequestProxyHeader (..)
-- * AddressList
, AddressList
, addressList
, alNextPageToken
, alKind
, alItems
, alSelfLink
, alId
-- * ForwardingRuleAggregatedListItems
, ForwardingRuleAggregatedListItems
, forwardingRuleAggregatedListItems
, fraliAddtional
-- * OperationAggregatedListItems
, OperationAggregatedListItems
, operationAggregatedListItems
, oaliAddtional
-- * InstanceGroupManagerActionsSummary
, InstanceGroupManagerActionsSummary
, instanceGroupManagerActionsSummary
, igmasDeleting
, igmasRestarting
, igmasNone
, igmasCreating
, igmasRefreshing
, igmasCreatingWithoutRetries
, igmasRecreating
, igmasAbandoning
-- * VPNTunnelStatus
, VPNTunnelStatus (..)
-- * ServiceAccount
, ServiceAccount
, serviceAccount
, saEmail
, saScopes
-- * RegionInstanceGroupManagersAbandonInstancesRequest
, RegionInstanceGroupManagersAbandonInstancesRequest
, regionInstanceGroupManagersAbandonInstancesRequest
, rigmairInstances
-- * NetworkList
, NetworkList
, networkList
, nlNextPageToken
, nlKind
, nlItems
, nlSelfLink
, nlId
-- * InstanceGroupsListInstancesRequest
, InstanceGroupsListInstancesRequest
, instanceGroupsListInstancesRequest
, iglirInstanceState
-- * BackendBalancingMode
, BackendBalancingMode (..)
-- * RegionInstanceGroupList
, RegionInstanceGroupList
, regionInstanceGroupList
, riglNextPageToken
, riglKind
, riglItems
, riglSelfLink
, riglId
-- * TargetPoolAggregatedListItems
, TargetPoolAggregatedListItems
, targetPoolAggregatedListItems
, tpaliAddtional
-- * TargetInstancesScopedList
, TargetInstancesScopedList
, targetInstancesScopedList
, tislWarning
, tislTargetInstances
-- * AddressAggregatedListItems
, AddressAggregatedListItems
, addressAggregatedListItems
, aAddtional
-- * AutoscalerList
, AutoscalerList
, autoscalerList
, autNextPageToken
, autKind
, autItems
, autSelfLink
, autId
-- * TargetSSLProxiesSetProxyHeaderRequest
, TargetSSLProxiesSetProxyHeaderRequest
, targetSSLProxiesSetProxyHeaderRequest
, tspsphrProxyHeader
-- * VPNTunnelAggregatedList
, VPNTunnelAggregatedList
, vpnTunnelAggregatedList
, vtalNextPageToken
, vtalKind
, vtalItems
, vtalSelfLink
, vtalId
-- * AttachedDisk
, AttachedDisk
, attachedDisk
, adDiskEncryptionKey
, adKind
, adMode
, adBoot
, adAutoDelete
, adInitializeParams
, adDeviceName
, adInterface
, adSource
, adLicenses
, adType
, adIndex
-- * DiskTypeList
, DiskTypeList
, diskTypeList
, dtlNextPageToken
, dtlKind
, dtlItems
, dtlSelfLink
, dtlId
-- * RegionInstanceGroupsSetNamedPortsRequest
, RegionInstanceGroupsSetNamedPortsRequest
, regionInstanceGroupsSetNamedPortsRequest
, rigsnprFingerprint
, rigsnprNamedPorts
-- * MachineTypeList
, MachineTypeList
, machineTypeList
, mtlNextPageToken
, mtlKind
, mtlItems
, mtlSelfLink
, mtlId
-- * TargetHTTPProxyList
, TargetHTTPProxyList
, targetHTTPProxyList
, thttpplNextPageToken
, thttpplKind
, thttpplItems
, thttpplSelfLink
, thttpplId
-- * RegionInstanceGroupManagerList
, RegionInstanceGroupManagerList
, regionInstanceGroupManagerList
, rigmlNextPageToken
, rigmlKind
, rigmlItems
, rigmlSelfLink
, rigmlId
-- * ForwardingRuleIPProtocol
, ForwardingRuleIPProtocol (..)
-- * DiskTypesScopedList
, DiskTypesScopedList
, diskTypesScopedList
, dtslDiskTypes
, dtslWarning
-- * AddressStatus
, AddressStatus (..)
-- * DiskTypeAggregatedList
, DiskTypeAggregatedList
, diskTypeAggregatedList
, dtalNextPageToken
, dtalKind
, dtalItems
, dtalSelfLink
, dtalId
-- * HTTPHealthCheck
, HTTPHealthCheck
, hTTPHealthCheck
, httphcRequestPath
, httphcHost
, httphcProxyHeader
, httphcPortName
, httphcPort
-- * BackendServiceGroupHealth
, BackendServiceGroupHealth
, backendServiceGroupHealth
, bsghKind
, bsghHealthStatus
-- * InstanceGroupsListInstancesRequestInstanceState
, InstanceGroupsListInstancesRequestInstanceState (..)
-- * AutoscalersScopedList
, AutoscalersScopedList
, autoscalersScopedList
, aAutoscalers
, aWarning
-- * AutoscalerAggregatedList
, AutoscalerAggregatedList
, autoscalerAggregatedList
, aalNextPageToken
, aalKind
, aalItems
, aalSelfLink
, aalId
-- * RouterAggregatedListItems
, RouterAggregatedListItems
, routerAggregatedListItems
, raliAddtional
-- * HTTPSHealthCheckProxyHeader
, HTTPSHealthCheckProxyHeader (..)
-- * AutoscalingPolicy
, AutoscalingPolicy
, autoscalingPolicy
, apCustomMetricUtilizations
, apMaxNumReplicas
, apCPUUtilization
, apLoadBalancingUtilization
, apMinNumReplicas
, apCoolDownPeriodSec
-- * RegionList
, RegionList
, regionList
, regNextPageToken
, regKind
, regItems
, regSelfLink
, regId
-- * AttachedDiskInterface
, AttachedDiskInterface (..)
-- * HealthCheckType
, HealthCheckType (..)
-- * ZoneStatus
, ZoneStatus (..)
-- * VPNTunnelList
, VPNTunnelList
, vpnTunnelList
, vtlNextPageToken
, vtlKind
, vtlItems
, vtlSelfLink
, vtlId
-- * MachineTypeScratchDisksItem
, MachineTypeScratchDisksItem
, machineTypeScratchDisksItem
, mtsdiDiskGb
-- * SubnetworksScopedListWarningDataItem
, SubnetworksScopedListWarningDataItem
, subnetworksScopedListWarningDataItem
, sslwdiValue
, sslwdiKey
-- * MachineTypesScopedList
, MachineTypesScopedList
, machineTypesScopedList
, mtslMachineTypes
, mtslWarning
-- * SubnetworksScopedListWarningCode
, SubnetworksScopedListWarningCode (..)
-- * Subnetwork
, Subnetwork
, subnetwork
, subKind
, subNetwork
, subGatewayAddress
, subSelfLink
, subName
, subCreationTimestamp
, subIPCIdRRange
, subId
, subRegion
, subDescription
-- * MachineTypeAggregatedList
, MachineTypeAggregatedList
, machineTypeAggregatedList
, mtalNextPageToken
, mtalKind
, mtalItems
, mtalSelfLink
, mtalId
-- * QuotaMetric
, QuotaMetric (..)
-- * DiskType
, DiskType
, diskType
, dtKind
, dtZone
, dtSelfLink
, dtName
, dtCreationTimestamp
, dtId
, dtValidDiskSize
, dtDescription
, dtDefaultDiskSizeGb
, dtDeprecated
-- * URLMapValidationResult
, URLMapValidationResult
, urlMapValidationResult
, umvrLoadErrors
, umvrLoadSucceeded
, umvrTestPassed
, umvrTestFailures
-- * Metadata
, Metadata
, metadata
, mKind
, mFingerprint
, mItems
-- * RouteWarningsItem
, RouteWarningsItem
, routeWarningsItem
, rwiData
, rwiCode
, rwiMessage
-- * MachineTypesScopedListWarningDataItem
, MachineTypesScopedListWarningDataItem
, machineTypesScopedListWarningDataItem
, mtslwdiValue
, mtslwdiKey
-- * InstanceStatus
, InstanceStatus (..)
-- * MachineTypesScopedListWarningCode
, MachineTypesScopedListWarningCode (..)
-- * InstancesSetServiceAccountRequest
, InstancesSetServiceAccountRequest
, instancesSetServiceAccountRequest
, issarEmail
, issarScopes
-- * DiskTypesScopedListWarningDataItem
, DiskTypesScopedListWarningDataItem
, diskTypesScopedListWarningDataItem
, dtslwdiValue
, dtslwdiKey
-- * TargetHTTPProxy
, TargetHTTPProxy
, targetHTTPProxy
, thttppURLMap
, thttppKind
, thttppSelfLink
, thttppName
, thttppCreationTimestamp
, thttppId
, thttppDescription
-- * MachineType
, MachineType
, machineType
, mtIsSharedCPU
, mtKind
, mtImageSpaceGb
, mtZone
, mtSelfLink
, mtName
, mtCreationTimestamp
, mtScratchDisks
, mtId
, mtGuestCPUs
, mtMaximumPersistentDisksSizeGb
, mtMaximumPersistentDisks
, mtMemoryMb
, mtDescription
, mtDeprecated
-- * DiskTypesScopedListWarningCode
, DiskTypesScopedListWarningCode (..)
-- * OperationError
, OperationError
, operationError
, oeErrors
-- * TargetInstancesScopedListWarning
, TargetInstancesScopedListWarning
, targetInstancesScopedListWarning
, tislwData
, tislwCode
, tislwMessage
-- * SubnetworkAggregatedList
, SubnetworkAggregatedList
, subnetworkAggregatedList
, salNextPageToken
, salKind
, salItems
, salSelfLink
, salId
-- * DisksResizeRequest
, DisksResizeRequest
, disksResizeRequest
, drrSizeGb
-- * AutoscalersScopedListWarningDataItem
, AutoscalersScopedListWarningDataItem
, autoscalersScopedListWarningDataItem
, aValue
, aKey
-- * AutoscalersScopedListWarningCode
, AutoscalersScopedListWarningCode (..)
-- * ForwardingRuleLoadBalancingScheme
, ForwardingRuleLoadBalancingScheme (..)
-- * RegionInstanceGroupManagersSetTemplateRequest
, RegionInstanceGroupManagersSetTemplateRequest
, regionInstanceGroupManagersSetTemplateRequest
, rigmstrInstanceTemplate
-- * InstanceGroupsListInstances
, InstanceGroupsListInstances
, instanceGroupsListInstances
, igliNextPageToken
, igliKind
, igliItems
, igliSelfLink
, igliId
-- * Autoscaler
, Autoscaler
, autoscaler
, aaKind
, aaZone
, aaSelfLink
, aaName
, aaCreationTimestamp
, aaAutoscalingPolicy
, aaId
, aaRegion
, aaDescription
, aaTarget
-- * DisksScopedListWarning
, DisksScopedListWarning
, disksScopedListWarning
, dslwData
, dslwCode
, dslwMessage
-- * TargetVPNGatewayAggregatedListItems
, TargetVPNGatewayAggregatedListItems
, targetVPNGatewayAggregatedListItems
, tvgaliAddtional
-- * RouterStatusBGPPeerStatusStatus
, RouterStatusBGPPeerStatusStatus (..)
-- * TargetPoolsScopedListWarning
, TargetPoolsScopedListWarning
, targetPoolsScopedListWarning
, tpslwData
, tpslwCode
, tpslwMessage
-- * HealthStatus
, HealthStatus
, healthStatus
, hsIPAddress
, hsHealthState
, hsPort
, hsInstance
-- * Region
, Region
, region
, regeStatus
, regeZones
, regeKind
, regeSelfLink
, regeName
, regeCreationTimestamp
, regeQuotas
, regeId
, regeDescription
, regeDeprecated
-- * GuestOSFeature
, GuestOSFeature
, guestOSFeature
, gofType
-- * VPNTunnel
, VPNTunnel
, vpnTunnel
, vtDetailedStatus
, vtStatus
, vtLocalTrafficSelector
, vtKind
, vtPeerIP
, vtRouter
, vtTargetVPNGateway
, vtRemoteTrafficSelector
, vtSelfLink
, vtSharedSecret
, vtName
, vtCreationTimestamp
, vtSharedSecretHash
, vtId
, vtIkeVersion
, vtRegion
, vtDescription
-- * VPNTunnelsScopedListWarningCode
, VPNTunnelsScopedListWarningCode (..)
-- * OperationsScopedListWarning
, OperationsScopedListWarning
, operationsScopedListWarning
, oslwData
, oslwCode
, oslwMessage
-- * Scheduling
, Scheduling
, scheduling
, sAutomaticRestart
, sOnHostMaintenance
, sPreemptible
-- * VPNTunnelsScopedListWarningDataItem
, VPNTunnelsScopedListWarningDataItem
, vpnTunnelsScopedListWarningDataItem
, vtslwdiValue
, vtslwdiKey
-- * SubnetworkList
, SubnetworkList
, subnetworkList
, slNextPageToken
, slKind
, slItems
, slSelfLink
, slId
-- * ForwardingRulesScopedListWarning
, ForwardingRulesScopedListWarning
, forwardingRulesScopedListWarning
, frslwData
, frslwCode
, frslwMessage
-- * HTTPHealthCheckList
, HTTPHealthCheckList
, hTTPHealthCheckList
, httphclNextPageToken
, httphclKind
, httphclItems
, httphclSelfLink
, httphclId
-- * InstanceGroupManagersScopedListWarning
, InstanceGroupManagersScopedListWarning
, instanceGroupManagersScopedListWarning
, igmslwData
, igmslwCode
, igmslwMessage
-- * URLMapsValidateRequest
, URLMapsValidateRequest
, urlMapsValidateRequest
, umvrResource
-- * InstanceGroupManagersSetTargetPoolsRequest
, InstanceGroupManagersSetTargetPoolsRequest
, instanceGroupManagersSetTargetPoolsRequest
, igmstprFingerprint
, igmstprTargetPools
-- * HTTPSHealthCheckList
, HTTPSHealthCheckList
, httpsHealthCheckList
, hhclNextPageToken
, hhclKind
, hhclItems
, hhclSelfLink
, hhclId
-- * OperationErrorErrorsItem
, OperationErrorErrorsItem
, operationErrorErrorsItem
, oeeiLocation
, oeeiCode
, oeeiMessage
-- * License
, License
, license
, lChargesUseFee
, lKind
, lSelfLink
, lName
-- * PathRule
, PathRule
, pathRule
, prService
, prPaths
-- * InstanceList
, InstanceList
, instanceList
, insNextPageToken
, insKind
, insItems
, insSelfLink
, insId
-- * SubnetworkAggregatedListItems
, SubnetworkAggregatedListItems
, subnetworkAggregatedListItems
, saliAddtional
-- * ManagedInstanceLastAttempt
, ManagedInstanceLastAttempt
, managedInstanceLastAttempt
, milaErrors
-- * BackendServiceList
, BackendServiceList
, backendServiceList
, bslNextPageToken
, bslKind
, bslItems
, bslSelfLink
, bslId
-- * InstanceGroupsScopedListWarning
, InstanceGroupsScopedListWarning
, instanceGroupsScopedListWarning
, igslwData
, igslwCode
, igslwMessage
-- * AutoscalingPolicyCPUUtilization
, AutoscalingPolicyCPUUtilization
, autoscalingPolicyCPUUtilization
, apcuUtilizationTarget
-- * InstanceGroupsScopedListWarningCode
, InstanceGroupsScopedListWarningCode (..)
-- * InstanceGroupsScopedListWarningDataItem
, InstanceGroupsScopedListWarningDataItem
, instanceGroupsScopedListWarningDataItem
, igslwdiValue
, igslwdiKey
-- * ResourceGroupReference
, ResourceGroupReference
, resourceGroupReference
, rgrGroup
-- * Firewall
, Firewall
, firewall
, fSourceTags
, fKind
, fTargetTags
, fNetwork
, fSourceRanges
, fSelfLink
, fName
, fCreationTimestamp
, fId
, fAllowed
, fDescription
-- * HostRule
, HostRule
, hostRule
, hrHosts
, hrDescription
, hrPathMatcher
-- * Quota
, Quota
, quota
, qMetric
, qLimit
, qUsage
-- * InstanceGroup
, InstanceGroup
, instanceGroup
, iiSize
, iiKind
, iiFingerprint
, iiNetwork
, iiZone
, iiSelfLink
, iiName
, iiCreationTimestamp
, iiSubnetwork
, iiId
, iiRegion
, iiDescription
, iiNamedPorts
-- * RouterInterface
, RouterInterface
, routerInterface
, riName
, riIPRange
, riLinkedVPNTunnel
-- * InstanceWithNamedPortsStatus
, InstanceWithNamedPortsStatus (..)
-- * TCPHealthCheckProxyHeader
, TCPHealthCheckProxyHeader (..)
-- * SnapshotList
, SnapshotList
, snapshotList
, snaNextPageToken
, snaKind
, snaItems
, snaSelfLink
, snaId
-- * TestFailure
, TestFailure
, testFailure
, tfPath
, tfExpectedService
, tfHost
, tfActualService
-- * SerialPortOutput
, SerialPortOutput
, serialPortOutput
, spoNext
, spoContents
, spoKind
, spoStart
, spoSelfLink
-- * TargetVPNGatewayAggregatedList
, TargetVPNGatewayAggregatedList
, targetVPNGatewayAggregatedList
, tvgalNextPageToken
, tvgalKind
, tvgalItems
, tvgalSelfLink
, tvgalId
-- * MetadataItemsItem
, MetadataItemsItem
, metadataItemsItem
, miiValue
, miiKey
-- * TargetHTTPSProxy
, TargetHTTPSProxy
, targetHTTPSProxy
, thpURLMap
, thpSSLCertificates
, thpKind
, thpSelfLink
, thpName
, thpCreationTimestamp
, thpId
, thpDescription
-- * ConnectionDraining
, ConnectionDraining
, connectionDraining
, cdDrainingTimeoutSec
-- * CacheInvalidationRule
, CacheInvalidationRule
, cacheInvalidationRule
, cirPath
, cirHost
-- * TargetVPNGatewaysScopedList
, TargetVPNGatewaysScopedList
, targetVPNGatewaysScopedList
, tvgslTargetVPNGateways
, tvgslWarning
-- * AccessConfig
, AccessConfig
, accessConfig
, acKind
, acName
, acNATIP
, acType
-- * ManagedInstanceLastAttemptErrorsErrorsItem
, ManagedInstanceLastAttemptErrorsErrorsItem
, managedInstanceLastAttemptErrorsErrorsItem
, milaeeiLocation
, milaeeiCode
, milaeeiMessage
-- * InstancesScopedListWarningCode
, InstancesScopedListWarningCode (..)
-- * InstancesScopedListWarningDataItem
, InstancesScopedListWarningDataItem
, instancesScopedListWarningDataItem
, islwdiValue
, islwdiKey
-- * BackendServicesScopedListWarningDataItem
, BackendServicesScopedListWarningDataItem
, backendServicesScopedListWarningDataItem
, bsslwdiValue
, bsslwdiKey
-- * BackendService
, BackendService
, backendService
, bsSessionAffinity
, bsBackends
, bsAffinityCookieTtlSec
, bsLoadBalancingScheme
, bsKind
, bsEnableCDN
, bsFingerprint
, bsProtocol
, bsSelfLink
, bsName
, bsCreationTimestamp
, bsId
, bsRegion
, bsConnectionDraining
, bsTimeoutSec
, bsDescription
, bsPortName
, bsHealthChecks
, bsPort
-- * InstanceMoveRequest
, InstanceMoveRequest
, instanceMoveRequest
, imrTargetInstance
, imrDestinationZone
-- * BackendServicesScopedListWarningCode
, BackendServicesScopedListWarningCode (..)
-- * TargetPoolSessionAffinity
, TargetPoolSessionAffinity (..)
-- * InstanceGroupsScopedList
, InstanceGroupsScopedList
, instanceGroupsScopedList
, igslWarning
, igslInstanceGroups
-- * InstancesStartWithEncryptionKeyRequest
, InstancesStartWithEncryptionKeyRequest
, instancesStartWithEncryptionKeyRequest
, iswekrDisks
-- * HTTPSHealthCheck
, HTTPSHealthCheck
, httpsHealthCheck
, hhcRequestPath
, hhcHost
, hhcProxyHeader
, hhcPortName
, hhcPort
-- * ImageRawDiskContainerType
, ImageRawDiskContainerType (..)
-- * VPNTunnelAggregatedListItems
, VPNTunnelAggregatedListItems
, vpnTunnelAggregatedListItems
, vtaliAddtional
-- * Tags
, Tags
, tags
, tFingerprint
, tItems
-- * AddressAggregatedList
, AddressAggregatedList
, addressAggregatedList
, addNextPageToken
, addKind
, addItems
, addSelfLink
, addId
-- * OperationWarningsItem
, OperationWarningsItem
, operationWarningsItem
, owiData
, owiCode
, owiMessage
-- * URLMapTest
, URLMapTest
, urlMapTest
, umtPath
, umtService
, umtHost
, umtDescription
-- * RoutersPreviewResponse
, RoutersPreviewResponse
, routersPreviewResponse
, rprResource
-- * Instance
, Instance
, instance'
, insnStatus
, insnServiceAccounts
, insnNetworkInterfaces
, insnKind
, insnZone
, insnCPUPlatform
, insnSelfLink
, insnName
, insnStatusMessage
, insnCreationTimestamp
, insnMachineType
, insnMetadata
, insnId
, insnScheduling
, insnDisks
, insnCanIPForward
, insnDescription
, insnTags
-- * PathMatcher
, PathMatcher
, pathMatcher
, pmDefaultService
, pmName
, pmPathRules
, pmDescription
) where
import Network.Google.Compute.Types.Product
import Network.Google.Compute.Types.Sum
import Network.Google.Prelude
-- | Default request referring to version 'v1' of the Compute Engine API. This contains the host and root path used as a starting point for constructing service requests.
computeService :: ServiceConfig
computeService
= defaultService (ServiceId "compute:v1")
"www.googleapis.com"
-- | View and manage your Google Compute Engine resources
computeScope :: Proxy '["https://www.googleapis.com/auth/compute"]
computeScope = Proxy;
-- | View and manage your data across Google Cloud Platform services
cloudPlatformScope :: Proxy '["https://www.googleapis.com/auth/cloud-platform"]
cloudPlatformScope = Proxy;
-- | View your data in Google Cloud Storage
storageReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/devstorage.read_only"]
storageReadOnlyScope = Proxy;
-- | Manage your data in Google Cloud Storage
storageReadWriteScope :: Proxy '["https://www.googleapis.com/auth/devstorage.read_write"]
storageReadWriteScope = Proxy;
-- | View your Google Compute Engine resources
computeReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/compute.readonly"]
computeReadOnlyScope = Proxy;
-- | Manage your data and permissions in Google Cloud Storage
storageFullControlScope :: Proxy '["https://www.googleapis.com/auth/devstorage.full_control"]
storageFullControlScope = Proxy;
| rueshyna/gogol | gogol-compute/gen/Network/Google/Compute/Types.hs | mpl-2.0 | 50,592 | 0 | 7 | 12,903 | 5,747 | 3,955 | 1,792 | 1,689 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Controller
( withFoundation
, withDevelApp
) where
import Foundation
import Settings
import Yesod.Helpers.Static
import Yesod.Helpers.Auth
import Database.Persist.GenericSql
import Data.ByteString (ByteString)
import Data.Dynamic (Dynamic, toDyn)
import Store.File
import Store.Base
import Control.Exception hiding (Handler)
import Data.Enumerator (enumEOF, ($=))
import qualified Data.Enumerator.List as EL
import Blaze.ByteString.Builder (fromByteString)
import Data.Aeson (toJSON, ToJSON)
import Yesod.Json
-- Import all relevant handler modules here.
import Handler.Root
-- This line actually creates our YesodSite instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see
-- the comments there for more details.
mkYesodDispatch "Foundation" resourcesFoundation
-- Some default handlers that ship with the Yesod site template. You will
-- very rarely need to modify this.
getFaviconR :: Handler ()
getFaviconR = sendFile "image/x-icon" "config/favicon.ico"
getRobotsR :: Handler RepPlain
getRobotsR = return $ RepPlain $ toContent ("User-agent: *" :: ByteString)
-- This function allocates resources (such as a database connection pool),
-- performs initialization and creates a WAI application. This is also the
-- place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
withFoundation :: (Application -> IO a) -> IO a
withFoundation f = Settings.withConnectionPool $ \p -> do
runConnectionPool (runMigration migrateAll) p
let h = Foundation s p
toWaiApp h >>= f
where
s = static Settings.staticdir
withDevelApp :: Dynamic
withDevelApp = toDyn (withFoundation :: (Application -> IO ()) -> IO ())
-- our code starts here
echo :: ToJSON a => a -> GHandler sub master RepJson
echo = jsonToRepJson . toJSON
getBuildR = wrapStoreAction build
getNewR = wrapStoreAction newRepo
getFindR = wrapStoreAction findRepos
getGetR = wrapStoreAction getRepo
wrapStoreAction f arg = do
result <- liftIO $ f defaultFileStore arg
echo result
getExportR name = return (("application/x-tar"::ContentType), ContentEnum $ export defaultFileStore name $= EL.map fromByteString)
defaultFileStore = fileStore "./stores/default"
| tehgeekmeister/apters-web | Controller.hs | agpl-3.0 | 2,436 | 0 | 12 | 371 | 488 | 269 | 219 | 48 | 1 |
module TestTree23(test23) where
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.QuickCheck
import Data.FixFile
import Data.FixFile.Tree23 as Tree23
import Data.List hiding (null)
import Data.Maybe
import Data.Monoid
import Prelude hiding (null)
empty23 :: Fix (Tree23 d)
empty23 = empty
prop_SetInsert :: [Int] -> Bool
prop_SetInsert xs = allIns where
fullSet = foldr insertSet empty23 xs
allIns = all (flip lookupSet fullSet) xs
prop_SetDelete :: [Int] -> [Int] -> Bool
prop_SetDelete xs ys = allDels where
fullSet = foldr insertSet empty23 xs
delSet = foldr deleteSet fullSet ys
allDels = all (not . flip lookupSet delSet) ys
prop_SetDeleteAll :: [Int] -> Bool
prop_SetDeleteAll xs = allDeleted where
fullSet = foldr insertSet empty23 xs
delSet = foldr deleteSet fullSet xs
allDeleted = [] == toListSet delSet
prop_SetPartition :: [Int] -> Int -> Bool
prop_SetPartition xs i = parted where
fullSet = fromListSet xs :: Fix (Tree23 (Set Int))
(ltSet', gteSet') = partitionSet i fullSet
ltSet = toListSet ltSet'
gteSet = toListSet gteSet'
parted = all (< i) ltSet && all (>= i) gteSet
prop_SetMinMax :: [Int] -> Int -> Bool
prop_SetMinMax xs' i = minMax where
xs = i:xs'
minxs = minimum xs
maxxs = maximum xs
fullSet = fromListSet xs :: Fix (Tree23 (Set Int))
Just minxs' = minSet fullSet
Just maxxs' = maxSet fullSet
minMax = minxs == minxs' && maxxs == maxxs'
prop_SetFoldable :: [Int] -> Bool
prop_SetFoldable xs = setSum == listSum where
fullSet = fromListSet xs :: Fix (Tree23 (Set Int))
setSum = getSum $ foldMapF Sum fullSet
listSum = getSum $ foldMap Sum (nub xs)
prop_MapInsert :: [(Int,String)] -> Bool
prop_MapInsert xs = allIns where
empt = empty :: Fix (Tree23 (Map Int String))
fullSet = foldr (uncurry insertMap) empt xs
allIns = all (isJust . flip lookupMap fullSet) $ fmap fst xs
prop_MapDelete :: [(Int,String)] -> [Int] -> Bool
prop_MapDelete ins dels = allDels where
fullMap :: Fix (Tree23 (Map Int String))
fullMap = fromListMap ins
delSet = foldr deleteMap fullMap dels
allDels = all (isNothing . flip lookupMap delSet) dels
prop_MapReplace :: [(Int,String)] -> Int -> String -> String -> Bool
prop_MapReplace ins rk rv rv' = replTest where
fullMap :: Fix (Tree23 (Map Int String))
fullMap = foldr (uncurry insertMap) empty23 ins
replMap = insertMap rk rv' $ insertMap rk rv fullMap
replTest = Just rv' == lookupMap rk replMap
prop_MapDeleteAll :: [(Int,String)] -> Bool
prop_MapDeleteAll xs = allDeleted where
fullMap :: Fix (Tree23 (Map Int String))
fullMap = fromListMap xs
delSet = foldr deleteMap fullMap $ fmap fst xs
allDeleted = [] == toListMap delSet
prop_MapPartition :: [(Int, String)] -> Int -> Bool
prop_MapPartition xs i = parted where
fullMap = fromListMap xs :: Fix (Tree23 (Map Int String))
(ltMap', gteMap') = partitionMap i fullMap
ltMap = fmap fst $ toListMap ltMap'
gteMap = fmap fst $ toListMap gteMap'
parted = all (< i) ltMap && all (>= i) gteMap
prop_MapFunctor :: [(Int,String)] -> String -> Bool
prop_MapFunctor xs pre = allMap where
fullMap :: Fix (Tree23 (Map Int String))
fullMap = foldr (uncurry insertMap) empty23 xs
pl = length pre
mapped :: Fix (Tree23 (Map Int String))
mapped = fmapF (pre ++) fullMap
keys = fmap fst xs
allMap = all ((Just pre ==) . fmap (take pl) . flip lookupMap mapped) keys
prop_MapMinMax :: [(Int, String)] -> (Int, String) -> Bool
prop_MapMinMax xs'' i = minMax where
xs' = i:xs''
fullMap = fromListMap xs' :: Fix (Tree23 (Map Int String))
xs = toListMap fullMap
minxs = minimum xs
maxxs = maximum xs
Just minxs' = minMap fullMap
Just maxxs' = maxMap fullMap
minMax = minxs == minxs' && maxxs == maxxs'
prop_MapFoldable :: [Int] -> Bool
prop_MapFoldable xs = mapSum == listSum where
fullMap = fromListMap (zip [1..] xs) :: Fix (Tree23 (Map Int Int))
mapSum = getSum $ foldMapF Sum fullMap
listSum = getSum $ foldMap Sum xs
prop_MapTraversable :: [Int] -> Bool
prop_MapTraversable xs = testEvens evens' && testOdds odds' where
evens :: Fix (Tree23 (Map Int Int))
evens = fromListMap (zip [1..] $ filter even xs)
odds :: Fix (Tree23 (Map Int Int))
odds = fromListMap (zip [1..] $ filter odd xs)
f x = if even x then Nothing else Just x
evens' = traverseF' f evens
odds' = traverseF' f odds
testEvens Nothing = True
testEvens (Just ev) = null ev
testOdds Nothing = False
testOdds _ = True
test23 = testGroup "Tree23"
[
testGroup "Set"
[
testProperty "Set Insert" prop_SetInsert
,testProperty "Set Delete" prop_SetDelete
,testProperty "Set Delete All" prop_SetDeleteAll
,testProperty "Set Partition" prop_SetPartition
,testProperty "Set Min/Max" prop_SetMinMax
,testProperty "Set Foldable" prop_SetFoldable
]
,testGroup "Map"
[
testProperty "Map Insert" prop_MapInsert
,testProperty "Map Delete" prop_MapDelete
,testProperty "Map Replace" prop_MapReplace
,testProperty "Map Delete All" prop_MapDeleteAll
,testProperty "Map Partition" prop_MapPartition
,testProperty "Map Foldable" prop_MapFoldable
,testProperty "Map Functor" prop_MapFunctor
,testProperty "Map Traversable" prop_MapTraversable
]
]
| revnull/fixfile | tests/TestTree23.hs | lgpl-3.0 | 5,516 | 0 | 13 | 1,323 | 1,902 | 985 | 917 | 134 | 4 |
module Network.Haskoin.Wallet.Tests (tests) where
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Data.Aeson (FromJSON, ToJSON, encode, decode)
import Data.HashMap.Strict (singleton)
import Network.Haskoin.Wallet.Arbitrary ()
import Network.Haskoin.Wallet
tests :: [Test]
tests =
[ testGroup "Serialize & de-serialize types to JSON"
[ testProperty "AccountType" (metaID :: AccountType -> Bool)
, testProperty "NodeAction" (metaID :: NodeAction -> Bool)
]
]
metaID :: (FromJSON a, ToJSON a, Eq a) => a -> Bool
metaID x = (decode . encode) (singleton ("object" :: String) x) ==
Just (singleton ("object" :: String) x)
| plaprade/haskoin | haskoin-wallet/test/Network/Haskoin/Wallet/Tests.hs | unlicense | 716 | 0 | 10 | 128 | 224 | 131 | 93 | 15 | 1 |
{- Copyright 2014 David Farrell <[email protected]>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module Dated where
import Data.Time.Clock (UTCTime)
data Dated a = a :@ UTCTime deriving (Eq, Ord)
instance Show a => Show (Dated a) where
show (x :@ _) = "Dated (" ++ show x ++ ")"
| shockkolate/arata | src/Dated.hs | apache-2.0 | 796 | 0 | 8 | 151 | 86 | 47 | 39 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Page
( Message(..)
, PageConfig(..)
, defaultPageConfig
, render
) where
--import Control.Monad.Trans (liftIO)
import Data.Monoid ((<>))
import qualified Data.Text as T
import Text.Blaze.Internal (textValue)
import Text.Blaze.Html5 (Html, (!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Text.Blaze.Html.Renderer.Text (renderHtml)
import qualified Web.Scotty as W
import App (Action)
import Config.Config (staticURL)
import Model.User (User(..))
import Model.Plan (Plan(..))
import qualified Actions.Main.Url as Main
import qualified Actions.Register.Url as Register
import qualified Actions.LoginScreen.Login.Url as Login
import qualified Actions.Logout.Url as Logout
import qualified Actions.EditProfile.Url as EditProfile
import qualified Actions.ManagePlans.Url as ManagePlans
import qualified Bridge as B
{-# ANN module ("HLint: ignore Redundant do" :: String) #-}
{-# ANN module ("HLint: ignore Use camelCase" :: String) #-}
data Message = InfoMessage Html | ErrorMessage Html | NoMessage
data PageConfig = PageConfig
{ pc_isMain :: Bool
, pc_mUser :: Maybe User
, pc_mPlan :: Maybe Plan
}
defaultPageConfig :: PageConfig
defaultPageConfig = PageConfig { pc_isMain = False, pc_mUser = Nothing, pc_mPlan = Nothing }
render :: Html -> PageConfig -> Action
render page pConfig = W.html $ renderHtml $
H.docTypeHtml ! A.class_ "no-js" ! A.lang "" $ do
renderHead
H.body $
H.div ! A.id "container" $ do
renderLogin (pc_mUser pConfig)
H.div ! A.class_ "banner-bar" $ do
renderBanner
if pc_isMain pConfig then renderControlPanel pConfig else mempty
renderMessageBars
H.div ! A.class_ "inside" $
page
H.div ! A.id "overlay" ! A.class_ "overlay" $ H.div "overlay"
renderFooter
renderAcknowledgement
--if pc_isMain pConfig then
H.script ! A.src (textValue $ T.pack staticURL <> "js/main.js") $ mempty
--else
-- mempty
H.script ! A.src (textValue $ T.pack staticURL <> "js/analytics.js") $ mempty
renderHead :: Html
renderHead = H.head $ do
H.meta ! A.charset "utf-8"
H.meta ! A.httpEquiv "X-UA-Compatible" ! A.content "IE=edge"
H.title "DS Wizard"
H.meta ! A.name "viewport" ! A.content "width=device-width, initial-scale=1"
H.link ! A.rel "stylesheet" ! A.href (textValue $ T.pack staticURL <> "css/normalize.min.css")
H.link ! A.rel "stylesheet" ! A.href ( textValue $ T.pack staticURL <> "css/main.css")
H.script ! A.src (textValue $ T.pack staticURL <> "js/vendor/jquery-3.1.1.min.js") $ mempty
H.script ! A.src (textValue $ T.pack staticURL <> "js/vendor/js.cookie-2.1.4.min.js") $ mempty
renderLogin :: Maybe User -> Html
renderLogin mUser = H.div ! A.class_ "login-box" $ do
case mUser of
Just user -> do
H.span $ H.a ! A.href (textValue $ T.pack EditProfile.url) $ H.toHtml $ u_name user
_ <- " | "
H.a ! A.href (textValue $ T.pack Logout.url) $ "Logout"
Nothing -> do
H.a ! A.href (textValue $ T.pack Login.url) $ "Login"
_ <- " | "
H.a ! A.href (textValue $ T.pack Register.url) $ "Register"
renderBanner :: Html
renderBanner = H.div ! A.id "banner" ! A.class_ "banner" $ do
H.div ! A.class_ "banner-element" $
H.a ! A.href (textValue $ T.pack Main.url) $
H.img ! A.class_ "dsplogo" ! A.src (textValue $ T.pack staticURL <> "img/DSP-logo.png") ! A.alt "DSP logo"
H.div ! A.class_ "banner-element" $ do
H.h1 ! A.class_ "title" $ do
_ <- "Data Stewardship Wizard"
H.span ! A.class_ "version" $ " v0.10.1, "
H.span ! A.class_ "version" $ " KM: 2017-08-21"
H.div ! A.class_ "subtitle" $ "Data Management Plans for FAIR Open Science"
renderControlPanel :: PageConfig -> Html
renderControlPanel pConfig = case pc_mUser pConfig of
Nothing -> mempty
Just _ -> do
H.div ! A.class_ "control-panel" $ do
case pc_mPlan pConfig of
Just plan -> do
H.div ! A.class_ "control-panel-label" $ do
_ <- "Plan: "
H.a ! A.href (textValue $ T.pack ManagePlans.url) $ H.toHtml $ p_name plan
H.button ! A.class_ "action-button" ! A.onclick (textValue $ T.pack $ B.call0 B.SavePlan) $
H.img ! A.class_ "action-icon" ! A.src (textValue $ T.pack staticURL <> "img/save.png") ! A.alt "Save the plan"
Nothing -> do
H.div ! A.class_ "control-panel-label no-plan" $ "No plan opened"
H.button ! A.class_ "action-button action-button-disabled" $
H.img ! A.class_ "action-icon action-icon-disabled" ! A.src (textValue $ T.pack staticURL <> "img/save.png") ! A.alt "Save the plan"
H.a ! A.class_ "action-button" ! A.href (textValue $ T.pack ManagePlans.url) $
H.img ! A.class_ "action-icon" ! A.src (textValue $ T.pack staticURL <> "img/manage.png") ! A.alt "Manage plans"
renderMessageBars :: Html
renderMessageBars = do
H.div ! A.id (textValue $ T.pack B.infoBar) ! A.class_ "bar-fixed info" $ mempty
H.div ! A.id (textValue $ T.pack B.warningBar) ! A.class_ "bar-fixed warning" $ mempty
H.div ! A.id (textValue $ T.pack B.errorBar) ! A.class_ "bar-fixed error" $ mempty
renderFooter :: Html
renderFooter = H.div ! A.id "footer" ! A.class_ "stripe" $
H.table ! A.class_ "footer-table" $ H.tbody $
H.tr $ do
H.td $ do
H.h3 "Technical contacts"
H.a ! A.href "mailto:[email protected]" $ "Robert Pergl"
H.br
H.a ! A.href "mailto:[email protected]" $ "Marek Suchánek"
H.td ! A.style "text-align: center; " $ do
H.h3 "Data stewardship action team"
H.a ! A.href "http://elixir-czech.cz" $ H.img ! A.src (textValue $ T.pack staticURL <> "img/logo-elixir-cz.jpg") ! A.class_ "logo" ! A.alt "ELIXIR-CZ logo"
H.a ! A.href "https://www.uochb.cz" $ H.img ! A.src (textValue $ T.pack staticURL <> "img/logo-uochb.png") ! A.class_ "logo" ! A.alt "FIT logo"
H.a ! A.href "http://ccmi.fit.cvut.cz/en" $ H.img ! A.src (textValue $ T.pack staticURL <> "img/logo-ccmi.png") ! A.class_ "logo" ! A.alt "CCMi logo"
H.a ! A.href "http://fit.cvut.cz/en" $ H.img ! A.src (textValue $ T.pack staticURL <> "img/logo-fit.png") ! A.class_ "logo" ! A.alt "FIT logo"
H.a ! A.href "http://www.dtls.nl/elixir-nl/elixir-nl-2/" $ H.img ! A.src (textValue $ T.pack staticURL <> "img/logo-elixir-nl.png") ! A.class_ "logo" ! A.alt "ELIXIR-NL logo"
H.a ! A.href "http://www.dtls.nl/" $ H.img ! A.src (textValue $ T.pack staticURL <> "img/logo-dtl.png") ! A.class_ "logo" ! A.alt "DTL logo"
H.td $ do
H.h3 "Links"
H.a ! A.href "http://www.elixir-europe.org/" $ "ELIXIR Europe"
H.br
H.a ! A.href "http://www.elixir-europe.org/about/elixir-nodes" $ "ELIXIR Nodes"
renderAcknowledgement :: Html
renderAcknowledgement =
H.div ! A.class_ "colophon-box" $ do
H.p ! A.class_ "colophon-line" $ do
H.span ! A.class_ "colophon-text" $ "Crafted with "
H.a ! A.href "https://www.haskell.org/ghc/" ! A.class_ "colophon-text" $ "GHC"
H.span ! A.class_ "colophon-text" $ " & "
H.a ! A.href "http://haste-lang.org/" ! A.class_ "colophon-text" $ "Haste"
H.span ! A.class_ "colophon-text" $ ", powered by "
H.a ! A.href "http://hackage.haskell.org/package/scotty" ! A.class_ "colophon-text" $ "Scotty"
H.img ! A.src (textValue $ T.pack staticURL <> "img/haskell.png") ! A.alt "Haskell logo" ! A.class_ "logo"
| DataStewardshipPortal/ds-wizard | DSServer/app/Page.hs | apache-2.0 | 7,597 | 0 | 28 | 1,654 | 2,582 | 1,259 | 1,323 | 140 | 3 |
{-# OPTIONS_GHC -cpp #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.Either
-- Copyright : (C) 2008 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- Incompatible with Control.Monad.Error, but removes the Error restriction
-- that prevents a natural encoding of Apomorphisms. This module is
-- therefore incompatible with Control.Monad.Error
----------------------------------------------------------------------------
module Control.Monad.Either
( Either(Left,Right)
, EitherT(EitherT,runEitherT)
) where
import Control.Functor.Pointed
import Control.Applicative
import Control.Monad.Fix
#if __BROKEN_EITHER__
import Prelude hiding (Either(Left,Right))
#endif
-- we have to define our own because the Control.Monad.Error instance is
-- baked into the prelude on old versions.
#if __BROKEN_EITHER__
data Either a b = Left a | Right b
instance Functor (Either e) where
fmap _ (Left a) = Left a
fmap f (Right a) = Right (f a)
#endif
newtype EitherT a m b = EitherT { runEitherT :: m (Either a b) }
-- defined in Control.Functor.Pointed
--instance Pointed (Either e) where
-- point = Right
instance Monad (Either e) where
return = Right
Right m >>= k = k m
Left e >>= _ = Left e
instance Applicative (Either e) where
pure = Right
a <*> b = do x <- a; y <- b; return (x y)
instance MonadFix (Either e) where
mfix f = let
a = f $ case a of
Right r -> r
_ -> error "empty mfix argument"
in a
instance Functor f => Functor (EitherT a f) where
fmap f = EitherT . fmap (fmap f) . runEitherT
instance Pointed f => Pointed (EitherT a f) where
point = EitherT . point . Right
instance Monad m => Monad (EitherT a m) where
return = EitherT . return . return
m >>= k = EitherT $ do
a <- runEitherT m
case a of
Left l -> return (Left l)
Right r -> runEitherT (k r)
instance MonadFix m => MonadFix (EitherT a m) where
mfix f = EitherT $ mfix $ \a -> runEitherT $ f $ case a of
Right r -> r
_ -> error "empty mfix argument"
| urska19/MFP---Samodejno-racunanje-dvosmernih-preslikav | Control/Monad/Either.hs | apache-2.0 | 2,283 | 4 | 14 | 545 | 620 | 328 | 292 | 41 | 0 |
{-# LANGUAGE FlexibleContexts #-}
import AdventOfCode (readInputFile)
import Control.Monad (forM, forM_, when)
import Data.Array.IArray (elems)
import Data.Array.MArray (newListArray, readArray, writeArray)
import Data.Array.ST (runSTUArray)
import Data.Bits (shiftL)
import Data.Int (Int8)
import Data.Maybe (catMaybes)
gameOfLife :: [[Bool]] -> Int -> Bool -> [Int8]
gameOfLife lights n cornersStuck = elems $ runSTUArray $ do
let len = length lights
let maxXY = len - 1
a <- newListArray ((0, 0), (maxXY, maxXY)) (pack lights)
when cornersStuck $
forM_ [(y, x) | y <- [0, maxXY], x <- [0, maxXY]] $ \(y, x) ->
writeArray a (y, x) 1
let allCells = [(y, x) | y <- [0..maxXY], x <- [0..maxXY]]
-- Initial setup: Count neighbours.
forM_ allCells $ \(y, x) -> do
neighbourVals <- mapM (readArray a) (neighbours maxXY (y, x))
val <- readArray a (y, x)
writeArray a (y, x) (val + shiftL (fromIntegral (countAlive neighbourVals)) 1)
forM_ [1..n] $ \_ -> do
changed <- forM allCells $ \(y, x) -> do
val <- readArray a (y, x)
let isCorner = (x == 0 || x == maxXY) && (y == 0 || y == maxXY)
isAlive = cornersStuck && isCorner || 5 <= val && val <= 7
if isAlive /= alive val
then return (Just (y, x))
else return Nothing
forM_ (catMaybes changed) $ \(y, x) -> do
val <- readArray a (y, x)
let delta = if alive val then -2 else 2
forM_ (neighboursAndSelf maxXY (y, x)) $ \(ny, nx) -> do
nval <- readArray a (ny, nx)
writeArray a (ny, nx) (nval + delta)
val' <- readArray a (y, x)
writeArray a (y, x) (val' - (delta `div` 2))
return a
neighbours :: Int -> (Int, Int) -> [(Int, Int)]
neighbours maxXY (y, x) =
[(ny, nx) | ny <- yrange, nx <- xrange, (ny, nx) /= (y, x)]
where (yrange, xrange) = ranges maxXY y x
-- The duplication is regrettable, but does save some time.
neighboursAndSelf :: Int -> (Int, Int) -> [(Int, Int)]
neighboursAndSelf maxXY (y, x) = [(ny, nx) | ny <- yrange, nx <- xrange]
where (yrange, xrange) = ranges maxXY y x
ranges :: Int -> Int -> Int -> ([Int], [Int])
ranges maxXY y x = ([yMin..yMax], [xMin..xMax])
where yMin = max (y - 1) 0
yMax = min (y + 1) maxXY
xMin = max (x - 1) 0
xMax = min (x + 1) maxXY
-- Packs lights into their byte form:
-- bit 0 is alive/dead
-- bits 1+ are neighbour count
pack :: [[Bool]] -> [Int8]
pack = concatMap (map (\b -> if b then 1 else 0))
alive :: Int8 -> Bool
alive = odd
countAlive :: [Int8] -> Int
countAlive = length . filter alive
toLight :: Char -> Bool
toLight '#' = True
toLight '.' = False
toLight c = error (c : ": invalid character")
main :: IO ()
main = do
s <- readInputFile
let rows = map (map toLight) (lines s)
print (countAlive (gameOfLife rows 100 False))
print (countAlive (gameOfLife rows 100 True))
| petertseng/adventofcode-hs | bin/18_game_of_life.hs | apache-2.0 | 2,867 | 0 | 24 | 698 | 1,354 | 729 | 625 | 68 | 3 |
{-# LANGUAGE FlexibleContexts #-}
module Etch.Types.Analysis where
import qualified Data.HashMap.Lazy as HM
import Data.Text
import Etch.Types.SemanticTree
type Scope = HM.HashMap Text Term
data Term = Term Type Scope
deriving Show
data AnalysisState = AnalysisState { _analysisStateNextID :: Integer
, _analysisStateScope :: Scope
} deriving Show
defaultAnalysisState :: AnalysisState
defaultAnalysisState = AnalysisState { _analysisStateNextID = 1
, _analysisStateScope = HM.empty
}
| shockkolate/etch | src/Etch/Types/Analysis.hs | apache-2.0 | 649 | 0 | 8 | 224 | 105 | 66 | 39 | 14 | 1 |
module Main where
import Life.Engine.QTree
import Life.Types
--import Life.Scenes
-- or
import Life.Formations
-- Libraries for testing
import qualified Life.Engine.Hutton as Hutton -- Needed to test correctness with QuickCheck
import Test.QuickCheck -- For correctness tests
import Data.List (sort)
import Criterion.Main -- For performance tests
-- Runs the Life (without display) for the specified number of generations
life :: Int -> Config -> Scene -> Board
life x c = (runLife x) . (scene c)
lifeHutton :: Int -> Config -> Scene -> Hutton.Board
lifeHutton x c = (runLife x) . (scene c)
-- QuickCheck test of source code engine vs. hermit converted engine
testHermit x c b = sort (alive (life x c b)) == sort (alive (lifeHutton x c b))
-- Tests conversion against original for correctness and performance
main :: IO ()
main = do
quickCheck $ testHermit 1000 ((20,20),True) $ glider (0,0)
quickCheck $ testHermit 1000 ((50,50),False) $ gliderGunL (0,0)
quickCheck $ testHermit 1000 ((50,50),False) $ acorn (0,0)
{- defaultMain
[ bench "QTree-G-20x20" $ whnf (life 1000 ((20,20),True)) $ glider (0,0)
, bench "QTree-GG-50x50" $ whnf (life 1000 ((50,50),False)) $ gliderGunL (0,0)
, bench "QTree-A-50x50" $ whnf (life 1000 ((50,50),False)) $ acorn (0,0)
]
-}
| ku-fpg/better-life | examples/engines/QTreeEngine.hs | bsd-2-clause | 1,289 | 0 | 11 | 227 | 327 | 181 | 146 | 18 | 1 |
{-# LANGUAGE BangPatterns, CPP, Rank2Types #-}
{-# OPTIONS_HADDOCK not-home #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Text.Internal.Builder
-- Copyright : (c) 2013 Bryan O'Sullivan
-- (c) 2010 Johan Tibell
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Johan Tibell <[email protected]>
-- Stability : experimental
-- Portability : portable to Hugs and GHC
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
--
-- Efficient construction of lazy @Text@ values. The principal
-- operations on a @Builder@ are @singleton@, @fromText@, and
-- @fromLazyText@, which construct new builders, and 'mappend', which
-- concatenates two builders.
--
-- To get maximum performance when building lazy @Text@ values using a
-- builder, associate @mappend@ calls to the right. For example,
-- prefer
--
-- > singleton 'a' `mappend` (singleton 'b' `mappend` singleton 'c')
--
-- to
--
-- > singleton 'a' `mappend` singleton 'b' `mappend` singleton 'c'
--
-- as the latter associates @mappend@ to the left.
--
-----------------------------------------------------------------------------
module Data.Text.Internal.Builder
( -- * Public API
-- ** The Builder type
Builder
, toLazyText
, toLazyTextWith
-- ** Constructing Builders
, singleton
, fromText
, fromLazyText
, fromString
-- ** Flushing the buffer state
, flush
-- * Internal functions
, append'
, ensureFree
, writeN
) where
import Control.Monad.ST (ST, runST)
import Data.Monoid (Monoid(..))
#if !MIN_VERSION_base(4,11,0) && MIN_VERSION_base(4,9,0)
import Data.Semigroup (Semigroup(..))
#endif
import Data.Text.Internal (Text(..))
import Data.Text.Internal.Lazy (smallChunkSize)
import Data.Text.Unsafe (inlineInterleaveST)
import Data.Text.Internal.Unsafe.Char (unsafeWrite)
import Prelude hiding (map, putChar)
import qualified Data.String as String
import qualified Data.Text as S
import qualified Data.Text.Array as A
import qualified Data.Text.Lazy as L
------------------------------------------------------------------------
-- | A @Builder@ is an efficient way to build lazy @Text@ values.
-- There are several functions for constructing builders, but only one
-- to inspect them: to extract any data, you have to turn them into
-- lazy @Text@ values using @toLazyText@.
--
-- Internally, a builder constructs a lazy @Text@ by filling arrays
-- piece by piece. As each buffer is filled, it is \'popped\' off, to
-- become a new chunk of the resulting lazy @Text@. All this is
-- hidden from the user of the @Builder@.
newtype Builder = Builder {
-- Invariant (from Data.Text.Lazy):
-- The lists include no null Texts.
runBuilder :: forall s. (Buffer s -> ST s [S.Text])
-> Buffer s
-> ST s [S.Text]
}
#if MIN_VERSION_base(4,9,0)
instance Semigroup Builder where
(<>) = append
{-# INLINE (<>) #-}
#endif
instance Monoid Builder where
mempty = empty
{-# INLINE mempty #-}
#if MIN_VERSION_base(4,9,0)
mappend = (<>) -- future-proof definition
#else
mappend = append
#endif
{-# INLINE mappend #-}
mconcat = foldr mappend Data.Monoid.mempty
{-# INLINE mconcat #-}
instance String.IsString Builder where
fromString = fromString
{-# INLINE fromString #-}
instance Show Builder where
show = show . toLazyText
instance Eq Builder where
a == b = toLazyText a == toLazyText b
instance Ord Builder where
a <= b = toLazyText a <= toLazyText b
------------------------------------------------------------------------
-- | /O(1)./ The empty @Builder@, satisfying
--
-- * @'toLazyText' 'empty' = 'L.empty'@
--
empty :: Builder
empty = Builder (\ k buf -> k buf)
{-# INLINE empty #-}
-- | /O(1)./ A @Builder@ taking a single character, satisfying
--
-- * @'toLazyText' ('singleton' c) = 'L.singleton' c@
--
singleton :: Char -> Builder
singleton c = writeAtMost 2 $ \ marr o -> unsafeWrite marr o c
{-# INLINE singleton #-}
------------------------------------------------------------------------
-- | /O(1)./ The concatenation of two builders, an associative
-- operation with identity 'empty', satisfying
--
-- * @'toLazyText' ('append' x y) = 'L.append' ('toLazyText' x) ('toLazyText' y)@
--
append :: Builder -> Builder -> Builder
append (Builder f) (Builder g) = Builder (f . g)
{-# INLINE [0] append #-}
-- TODO: Experiment to find the right threshold.
copyLimit :: Int
copyLimit = 128
-- This function attempts to merge small @Text@ values instead of
-- treating each value as its own chunk. We may not always want this.
-- | /O(1)./ A @Builder@ taking a 'S.Text', satisfying
--
-- * @'toLazyText' ('fromText' t) = 'L.fromChunks' [t]@
--
fromText :: S.Text -> Builder
fromText t@(Text arr off l)
| S.null t = empty
| l <= copyLimit = writeN l $ \marr o -> A.copyI marr o arr off (l+o)
| otherwise = flush `append` mapBuilder (t :)
{-# INLINE [1] fromText #-}
{-# RULES
"fromText/pack" forall s .
fromText (S.pack s) = fromString s
#-}
-- | /O(1)./ A Builder taking a @String@, satisfying
--
-- * @'toLazyText' ('fromString' s) = 'L.fromChunks' [S.pack s]@
--
fromString :: String -> Builder
fromString str = Builder $ \k (Buffer p0 o0 u0 l0) ->
let loop !marr !o !u !l [] = k (Buffer marr o u l)
loop marr o u l s@(c:cs)
| l <= 1 = do
arr <- A.unsafeFreeze marr
let !t = Text arr o u
marr' <- A.new chunkSize
ts <- inlineInterleaveST (loop marr' 0 0 chunkSize s)
return $ t : ts
| otherwise = do
n <- unsafeWrite marr (o+u) c
loop marr o (u+n) (l-n) cs
in loop p0 o0 u0 l0 str
where
chunkSize = smallChunkSize
{-# INLINE fromString #-}
-- | /O(1)./ A @Builder@ taking a lazy @Text@, satisfying
--
-- * @'toLazyText' ('fromLazyText' t) = t@
--
fromLazyText :: L.Text -> Builder
fromLazyText ts = flush `append` mapBuilder (L.toChunks ts ++)
{-# INLINE fromLazyText #-}
------------------------------------------------------------------------
-- Our internal buffer type
data Buffer s = Buffer {-# UNPACK #-} !(A.MArray s)
{-# UNPACK #-} !Int -- offset
{-# UNPACK #-} !Int -- used units
{-# UNPACK #-} !Int -- length left
------------------------------------------------------------------------
-- | /O(n)./ Extract a lazy @Text@ from a @Builder@ with a default
-- buffer size. The construction work takes place if and when the
-- relevant part of the lazy @Text@ is demanded.
toLazyText :: Builder -> L.Text
toLazyText = toLazyTextWith smallChunkSize
-- | /O(n)./ Extract a lazy @Text@ from a @Builder@, using the given
-- size for the initial buffer. The construction work takes place if
-- and when the relevant part of the lazy @Text@ is demanded.
--
-- If the initial buffer is too small to hold all data, subsequent
-- buffers will be the default buffer size.
toLazyTextWith :: Int -> Builder -> L.Text
toLazyTextWith chunkSize m = L.fromChunks (runST $
newBuffer chunkSize >>= runBuilder (m `append` flush) (const (return [])))
-- | /O(1)./ Pop the strict @Text@ we have constructed so far, if any,
-- yielding a new chunk in the result lazy @Text@.
flush :: Builder
flush = Builder $ \ k buf@(Buffer p o u l) ->
if u == 0
then k buf
else do arr <- A.unsafeFreeze p
let !b = Buffer p (o+u) 0 l
!t = Text arr o u
ts <- inlineInterleaveST (k b)
return $! t : ts
{-# INLINE [1] flush #-}
-- defer inlining so that flush/flush rule may fire.
------------------------------------------------------------------------
-- | Sequence an ST operation on the buffer
withBuffer :: (forall s. Buffer s -> ST s (Buffer s)) -> Builder
withBuffer f = Builder $ \k buf -> f buf >>= k
{-# INLINE withBuffer #-}
-- | Get the size of the buffer
withSize :: (Int -> Builder) -> Builder
withSize f = Builder $ \ k buf@(Buffer _ _ _ l) ->
runBuilder (f l) k buf
{-# INLINE withSize #-}
-- | Map the resulting list of texts.
mapBuilder :: ([S.Text] -> [S.Text]) -> Builder
mapBuilder f = Builder (fmap f .)
------------------------------------------------------------------------
-- | Ensure that there are at least @n@ many elements available.
ensureFree :: Int -> Builder
ensureFree !n = withSize $ \ l ->
if n <= l
then empty
else flush `append'` withBuffer (const (newBuffer (max n smallChunkSize)))
{-# INLINE [0] ensureFree #-}
writeAtMost :: Int -> (forall s. A.MArray s -> Int -> ST s Int) -> Builder
writeAtMost n f = ensureFree n `append'` withBuffer (writeBuffer f)
{-# INLINE [0] writeAtMost #-}
-- | Ensure that @n@ many elements are available, and then use @f@ to
-- write some elements into the memory.
writeN :: Int -> (forall s. A.MArray s -> Int -> ST s ()) -> Builder
writeN n f = writeAtMost n (\ p o -> f p o >> return n)
{-# INLINE writeN #-}
writeBuffer :: (A.MArray s -> Int -> ST s Int) -> Buffer s -> ST s (Buffer s)
writeBuffer f (Buffer p o u l) = do
n <- f p (o+u)
return $! Buffer p o (u+n) (l-n)
{-# INLINE writeBuffer #-}
newBuffer :: Int -> ST s (Buffer s)
newBuffer size = do
arr <- A.new size
return $! Buffer arr 0 0 size
{-# INLINE newBuffer #-}
------------------------------------------------------------------------
-- Some nice rules for Builder
-- This function makes GHC understand that 'writeN' and 'ensureFree'
-- are *not* recursive in the precense of the rewrite rules below.
-- This is not needed with GHC 7+.
append' :: Builder -> Builder -> Builder
append' (Builder f) (Builder g) = Builder (f . g)
{-# INLINE append' #-}
{-# RULES
"append/writeAtMost" forall a b (f::forall s. A.MArray s -> Int -> ST s Int)
(g::forall s. A.MArray s -> Int -> ST s Int) ws.
append (writeAtMost a f) (append (writeAtMost b g) ws) =
append (writeAtMost (a+b) (\marr o -> f marr o >>= \ n ->
g marr (o+n) >>= \ m ->
let s = n+m in s `seq` return s)) ws
"writeAtMost/writeAtMost" forall a b (f::forall s. A.MArray s -> Int -> ST s Int)
(g::forall s. A.MArray s -> Int -> ST s Int).
append (writeAtMost a f) (writeAtMost b g) =
writeAtMost (a+b) (\marr o -> f marr o >>= \ n ->
g marr (o+n) >>= \ m ->
let s = n+m in s `seq` return s)
"ensureFree/ensureFree" forall a b .
append (ensureFree a) (ensureFree b) = ensureFree (max a b)
"flush/flush"
append flush flush = flush
#-}
| bgamari/text | src/Data/Text/Internal/Builder.hs | bsd-2-clause | 10,887 | 0 | 17 | 2,506 | 1,917 | 1,066 | 851 | 155 | 2 |
{-|
Module : $Header$
Copyright : (c) Petr Penzin, 2015
License : BSD2
Maintainer : [email protected]
Stability : stable
Portability : cross-platform
Test reading and writing build configuration
-}
-- module Junta.TestProjectConfiguration where
import Junta.ProjectConfiguration
import Control.Applicative
import Control.Monad
import Data.ByteString.Char8 as B
import Data.Map
import Data.Yaml.YamlLight
import Test.Tasty
import Test.Tasty.SmallCheck as SC
-- We are trying to test what values return and fail methods are called whith
-- so we ca use this type
data Verifier a = Value a | Failure String
deriving (Show, Eq)
instance Monad Verifier where
v >>= f = case v of
Failure s -> Failure s
Value x -> f x
return = Value
fail = Failure
instance Applicative Verifier where
pure = return
(<*>) = ap
instance Functor Verifier where
fmap = liftM
data TestStatus = Pass | Fail | Error
deriving (Show, Eq)
main = defaultMain tests
tests = testGroup "Test Build configuration"
[ SC.testProperty "Banal read YAML test"
((parseConfigYaml (YMap (fromList [(YStr $ B.pack "name", YStr $ B.pack "a"), (YStr $ B.pack "version", YStr $ B.pack "1")])) :: Verifier ProjectConfiguration) == Value (ProjectConfiguration "a" "1"))
, SC.testProperty "Missing \"version\" field"
((parseConfigYaml (YMap (fromList [(YStr $ B.pack "name", YStr $ B.pack "a")])) :: Verifier ProjectConfiguration) == Failure "Field \"version\" not found")
, SC.testProperty "Missing \"name\" field"
((parseConfigYaml (YMap (fromList [(YStr $ B.pack "version", YStr $ B.pack "a")])) :: Verifier ProjectConfiguration) == Failure "Field \"name\" not found")
, SC.testProperty "Empty input"
((parseConfigYaml (YMap (fromList [])) :: Verifier ProjectConfiguration) == Failure "Field \"name\" not found")
]
| ppenzin/junta | src/test/haskell/Junta/TestBuildConfiguration.hs | bsd-2-clause | 1,909 | 0 | 20 | 394 | 504 | 268 | 236 | 33 | 1 |
{-# LANGUAGE GADTs #-}
import Control.Applicative
import Control.Monad.Trans
import Control.Monad.Operational
type Stack s = [s]
data StackI s a where
Push :: s -> StackI s ()
Pop :: StackI s s
type StackP s m a = ProgramT (StackI s) m a
push :: (Monad m) => s -> StackP s m ()
push = singleton . Push
pop :: (Monad m) => StackP s m s
pop = singleton Pop
interpret :: (Monad m) => StackP s m a -> Stack s -> m (Stack s)
interpret is stack = (\v -> eval v stack) =<< (viewT is)
where
eval :: (Monad m) => ProgramViewT (StackI s) m a -> Stack s -> m (Stack s)
eval (Push x :>>= is) stack = interpret (is ()) (x:stack)
eval (Pop :>>= is) (x:stack) = interpret (is x) stack
eval (Return _) stack = return stack
add :: (Monad m) => StackP Int m ()
add = do
x <- pop
y <- pop
push (x + y)
addInput :: StackP Int IO ()
addInput = do
push =<< read <$> liftIO getLine
push =<< read <$> liftIO getLine
add
main :: IO ()
main = do
(putStrLn . show) =<< interpret addInput [1, 2]
| fujiyan/toriaezuzakki | haskell/operational/StackT.hs | bsd-2-clause | 1,054 | 0 | 12 | 291 | 510 | 263 | 247 | 32 | 3 |
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ImpredicativeTypes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeFamilies #-}
module Snap.Snaplet.HTTPAuth.Types.AuthHeader (
AuthHeaderWrapper (..),
parseBasicAuthHeader,
parserToAHW,
parseAuthorizationHeader
) where
import Snap.Snaplet.HTTPAuth.Types.AuthHeader.Base
import Snap.Snaplet.HTTPAuth.Types.AuthHeader.Basic
| anchor/snaplet-httpauth | lib/Snap/Snaplet/HTTPAuth/Types/AuthHeader.hs | bsd-3-clause | 519 | 0 | 5 | 119 | 51 | 39 | 12 | 13 | 0 |
module Graphics.GChart.ChartItems.Basics where
import Graphics.GChart.Types
import Graphics.GChart.ChartItems.Util
-- Chart Size
instance ChartItem ChartSize where
set size = updateChart $ \chart -> chart { chartSize = Just size }
encode (Size width height) = asList ("chs", widthStr ++ show height) where
widthStr | width == 0 = ""
| otherwise = show width ++ "x"
-- Chart Type
instance ChartItem ChartType where
set cType = updateChart $ \chart -> chart { chartType = cType }
encode cType = asList ("cht",t)
where t = case cType of
Line -> "lc"
LineXY -> "lxy"
Sparklines -> "ls"
Pie -> "p"
Pie3D -> "p3"
PieConcentric -> "pc"
BarHorizontalStacked -> "bhs"
BarVerticalStacked -> "bvs"
BarHorizontalGrouped -> "bhg"
BarVerticalGrouped -> "bvg"
Venn -> "v"
ScatterPlot -> "s"
Radar -> "r"
RadarCurvedLines -> "rs"
GoogleOMeter -> "gom"
Formula -> "tx"
QRCode -> "qr"
| deepakjois/hs-gchart | Graphics/GChart/ChartItems/Basics.hs | bsd-3-clause | 1,763 | 0 | 11 | 1,032 | 287 | 151 | 136 | 29 | 0 |
module SLM.DataTypes where
import Data.List.Split
import qualified Data.ByteString.Lazy.Char8 as LC
type PredictorName = LC.ByteString
data PredictorType = FactorType
| MeasureType
| InvalidType
deriving (Show)
data PredictorDefinition = PredictorDefinition !PredictorName !PredictorType
deriving (Show)
isFactorVariable :: PredictorDefinition -> Bool
isFactorVariable (PredictorDefinition _ FactorType) = True
isFactorVariable _ = False
data PredictorValue = FactorValue String
| MeasureValue Double
| NA
deriving (Show)
data Predictor = Predictor PredictorName PredictorValue
deriving (Show)
type Alt = String
data ChoiceSetWithChosenAlt = ChoiceSetWithChosenAlt {
chosen :: Alt
, allAlts :: [Alt]
} deriving Show
makePredictorDefinition :: LC.ByteString -> PredictorDefinition
makePredictorDefinition = processTokenisedPredictorDefinition . (splitOn ":") . LC.unpack
processTokenisedPredictorDefinition :: [String] -> PredictorDefinition
processTokenisedPredictorDefinition (name:[]) = PredictorDefinition (LC.pack name) FactorType
processTokenisedPredictorDefinition (name:"F":[]) = PredictorDefinition (LC.pack name) FactorType
processTokenisedPredictorDefinition (name:"N":[]) = PredictorDefinition (LC.pack name) MeasureType
processTokenisedPredictorDefinition (name:_) = PredictorDefinition (LC.pack name) FactorType
makePredictor :: PredictorDefinition -> String -> Predictor
makePredictor (PredictorDefinition name FactorType) = Predictor name . FactorValue
makePredictor (PredictorDefinition name MeasureType) = Predictor name . MeasureValue . read
data Observation = Observation {
y :: Double
, xs :: [Predictor]
}
data TrainingSet = TrainingSet {
observations :: [Observation]
}
| timveitch/Gobble | src/SLM/DataTypes.hs | bsd-3-clause | 1,820 | 0 | 9 | 309 | 453 | 249 | 204 | 43 | 1 |
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies, DataKinds #-}
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Geometry.Transform.SpaceTransform
-- Copyright : Copyright (C) 2015 Artem M. Chirkin <[email protected]>
-- License : BSD3
--
-- Maintainer : Artem M. Chirkin <[email protected]>
-- Stability : Experimental
--
-- Space transform definition
--
-----------------------------------------------------------------------------
module Data.Geometry.Transform.SpaceTransform where
import GHC.TypeLits
import Data.Geometry.VectorMath
#if defined(ghcjs_HOST_OS)
import Data.Geometry.Quaternion
#endif
-- | SpaceTransform separates space transformations (such as rotation, scaling, and others) from actual points.
-- This means objects inside SpaceTransform Monad normally stay untouched until transformations are applied.
-- This is useful, for instance, when working with OpenGL: one can submit into GPU transform and coordinates separately.
-- Final behavior is similar to OpenGL's push and pop matrices:
-- > translate (Vector3 1 0 0) x >>= scale 2 >>= rotateX pi
-- The code above means: first translate, then scale, then rotate; if transforms were just matrices, @>>=@ would be matrix multiplication.
-- Important: these transforms are applied inside, not outside - i.e. translate in the example above is outermost matrix.
class Monad s => SpaceTransform s (n :: Nat) t | s -> n, s -> t where
-- | Create rotation transform
rotate :: (Eq t, Floating t, Real t) => Vector n t -> t -> x -> s x
-- | Create transform by uniform scaling
scale :: (Num t) => t -> x -> s x
-- | Create transform by translating
translate :: (Num t) => Vector n t -> x -> s x
-- | Get bare data without applying transform
unwrap :: s x -> x
-- | Wrap data into existing transform discarding transform content
wrap :: (Num t) => x -> s y -> s x
-- | Lift transform into Functor's inside
liftTransform :: (Functor f) => s (f x) -> f (s x)
-- | Transform another STransform using this one. Multitype analogue of `<*>`
mergeSecond :: (SpaceTransform z n t) => z (x -> y) -> s x -> z y
-- | Transform this STransform using another one. Multitype analogue of `<*>`
mergeFirst :: (SpaceTransform z n t) => s (x -> y) -> z x -> z y
-- | Inverse of the transform that should satisfy
-- >>> return x == inverseTransform s >> s
inverseTransform :: s x -> s x
-- | Apply transform to nD vector
applyV :: (Eq t, Floating t) => s (Vector n t) -> Vector n t
-- | Apply transform to homogeneous point in (n+1)D (corresponding nD Euclidian space)
applyVH :: (Eq t, Floating t) => s (Vector (n+1) t) -> Vector (n+1) t
-- | Create transform from transformation matrix
transformM :: (Eq t, Floating t) => Matrix n t -> x -> s x
-- | Create transform from transformation matrix -- homogeneous coordinates
transformMH :: (Eq t, Floating t) => Matrix (n+1) t -> x -> s x
{-# RULES
"mergeSecond/any" mergeSecond = (<*>) :: (SpaceTransform z n t) => z (x -> y) -> z x -> z y
"mergeFirst/any" mergeFirst = (<*>) :: (SpaceTransform z n t) => z (x -> y) -> z x -> z y
#-}
#if defined(ghcjs_HOST_OS)
class ( Monad s
, SpaceTransform s 3 t
, Quaternion q t
, Floating t
) => Space3DTransform s t q | s -> t, q -> t, t -> q where
-- | Create rotation transform by rotating w.r.t. X axis
rotateX :: t -> x -> s x
-- | Create rotation transform by rotating w.r.t. Y axis
rotateY :: t -> x -> s x
-- | Create rotation transform by rotating w.r.t. Z axis
rotateZ :: t -> x -> s x
-- | Create transform from quaternion
rotateScale :: q -> x -> s x
#endif
-- | Kind of object that can be transformed
class Transformable x n t | x -> t where
-- | Apply wrapping transform on the object inside
transform :: (SpaceTransform s n t, Floating t, Eq t) => s x -> x
instance Transformable (Vector 2 t) 2 t where
transform = applyV
instance Transformable (Vector 3 t) 3 t where
transform = applyV
instance Transformable (Vector 4 t) 4 t where
transform = applyV
instance Transformable (Vector 5 t) 5 t where
transform = applyV
instance Transformable (Vector 6 t) 6 t where
transform = applyV
instance Transformable (Vector 7 t) 7 t where
transform = applyV
instance Transformable (Vector 8 t) 8 t where
transform = applyV
instance Transformable (Vector 3 t) 2 t where
transform = applyVH
instance Transformable (Vector 4 t) 3 t where
transform = applyVH
instance Transformable (Vector 5 t) 4 t where
transform = applyVH
instance Transformable (Vector 6 t) 5 t where
transform = applyVH
instance Transformable (Vector 7 t) 6 t where
transform = applyVH
instance Transformable (Vector 8 t) 7 t where
transform = applyVH
--instance ( Functor f
-- , Transformable b n t
-- ) => Transformable (f b) n t where
-- transform = fmap transform . liftTransform
-- | Apply transform on each point within Functor
ftransform :: ( SpaceTransform s n t
, Functor f
, Transformable b n t
, Floating t, Eq t)
=> s (f b) -> f b
ftransform = fmap transform . liftTransform
-- -- | return the overall rotation and scale
-- getRotationScale :: s x -> Quaternion t
-- -- | return the overall translation of the transform
-- getTranslation :: s x -> Vector 3 t
| achirkin/fastvec | src/Data/Geometry/Transform/SpaceTransform.hs | bsd-3-clause | 5,730 | 0 | 13 | 1,346 | 1,152 | 626 | 526 | 61 | 1 |
import Data.List (sort)
fibonacciTail :: [Int]
fibonacciTail = 1 : 1 : zipWith (\x y -> (x + y) `mod` 1000000000) fibonacciTail (tail fibonacciTail)
fibonacciHead :: [Int]
fibonacciHead = [ gen n | n <- [1 .. ] ] where
logPhi = log ((1 + sqrt 5) / 2) / (log 10) :: Double
logSqrt5 = (log (sqrt 5)) / (log 10) :: Double
gen n = (round $ 10 ** (t - (fromIntegral (floor t)) + 10)) `div` 100
where t = logPhi * n - logSqrt5 :: Double
main = print $ solve fibonacciHead fibonacciTail 1 where
pandigital xs = (sort xs) == "123456789"
check = pandigital . show
solve (x:xs) (y:ys) index
| (check x) && (check y) = index
| otherwise = solve xs ys (index + 1)
| foreverbell/project-euler-solutions | src/104.hs | bsd-3-clause | 704 | 5 | 16 | 184 | 360 | 188 | 172 | 15 | 1 |
{-
Inspired by the article "Evaluating Cellular Automata is Comonadic"
by Dan Piponi.
See: http://blog.sigfpe.com/2006/12/evaluating-cellular-automata-is.html
-}
module Lets.Cellular where
-- define a Zipper for type x
data U x = U [x] x [x]
-- deriving Show
-- move one step left
left :: U x -> U x
left (U (l:ls) v r) = U ls l (v:r)
-- move one step right
right :: U x -> U x
right (U l v (r:rs)) = U (v:l) r rs
-- implement Functor for U
instance Functor U where
fmap f (U l v r) = U (fmap f l) (f v) (fmap f r)
-- define Comonad
class Functor w => Comonad w where
extend :: (w a -> b) -> w a -> w b
extract :: w a -> a
duplicate :: w a -> w (w a)
-- implement Comonad for U
instance Comonad U where
extract = extractU
extend = extendU
duplicate = duplicateU
-- extract returns the current focus value 'v' from the comonad
-- a.k.a. coreturn, or copure
extractU (U _ v _) = v
-- duplicate constructs a "universe" of all possible shifts of a
-- a.k.a. cojoin
duplicateU :: U x -> U (U x)
duplicateU a = U (tail $ iterate left a) a (tail $ iterate right a)
extendU :: (U x -> b) -> U x -> U b
extendU = \f -> fmap f . duplicateU
-- infix versions of extend
(<<=) = extendU
infixr 1 <<=
-- and flipped extend, a.k.a. cobind
(=>>) :: U x -> (U x -> b) -> U b
(=>>) = flip extendU
infixr 1 =>>
--
-- test
-- simple rule from the article
rule :: U Bool -> Bool
rule (U (l:_) v (r:_)) = not (l && v && not r || (l==v))
-- shift by i steps to the left (negative) or right (positive)
shift :: Int -> U x -> U x
shift i u = (iterate (if i<0 then left else right) u) !! abs i
toList i j u = take (j-i) $ half $ shift i u where
half (U _ b c) = [b] ++ c
rtest rule n =
let u = U (repeat False) True (repeat False)
in putStr $
unlines $
take n $
fmap (fmap (\x -> if x then '*' else ' ') . toList (-n) n) $
iterate (=>> rule) u
test = rtest rule 20
-- trivial initial state
u = U (repeat False) True (repeat False)
-- rule 110:
-- current pattern 111 110 101 100 011 010 001 000
-- new state for center cell 0 1 1 0 1 1 1 0
--
-- See: https://en.wikipedia.org/wiki/Rule_110
rule110 :: U Bool -> Bool
rule110 (U (True:_) True (False:_)) = True
rule110 (U (True:_) False (True:_)) = True
rule110 (U (False:_) True (True:_)) = True
rule110 (U (False:_) True (False:_)) = True
rule110 (U (False:_) False (True:_)) = True
rule110 _ = False
r110 = rtest rule110
--
-- rule 30:
-- current pattern 111 110 101 100 011 010 001 000
-- new state for center cell 0 0 0 1 1 1 1 0
--
-- See: https://en.wikipedia.org/wiki/Rule_30
rule30 :: U Bool -> Bool
rule30 (U (True:_) False (False:_)) = True
rule30 (U (False:_) True (True:_)) = True
rule30 (U (False:_) True (False:_)) = True
rule30 (U (False:_) False (True:_)) = True
rule30 _ = False
r30 = rtest rule30
--
-- rule 90: (XOR)
--
-- current pattern 111 110 101 100 011 010 001 000
-- new state for center cell 0 1 0 1 1 0 1 0
--
-- See: https://en.wikipedia.org/wiki/Rule_90
xor :: Bool -> Bool -> Bool
xor True True = True
xor False False = True
xor _ _ = False
rule90 :: U Bool -> Bool
rule90 (U (l:_) v (r:_)) = l `xor` v `xor` r
-- when u = U (F..) T (F..), rule90 produces a Sierpinski triangle
r90 = rtest rule90
--
-- rule 184:
--
-- current pattern 111 110 101 100 011 010 001 000
-- new state for center cell 1 0 1 1 1 0 0 0
--
-- See: https://en.wikipedia.org/wiki/Rule_184
rule184 :: U Bool -> Bool
rule184 (U (True:_) True (True:_)) = True
rule184 (U (True:_) False (True:_)) = True
rule184 (U (True:_) False (False:_)) = True
rule184 (U (False:_) True (True:_)) = True
rule184 _ = False
r184 = rtest rule184
| peterson/lets-haskell | src/Lets/Cellular.hs | bsd-3-clause | 3,835 | 0 | 15 | 1,028 | 1,460 | 776 | 684 | 70 | 2 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Quantity.RO.Rules
( rules
) where
import Data.String
import Prelude
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (isPositive)
import Duckling.Numeral.Types (NumeralData(..))
import Duckling.Quantity.Helpers
import Duckling.Regex.Types
import Duckling.Types
import qualified Duckling.Numeral.Types as TNumeral
import qualified Duckling.Quantity.Types as TQuantity
ruleNumeralUnits :: Rule
ruleNumeralUnits = Rule
{ name = "<number> <units>"
, pattern =
[ Predicate isPositive
, regex "(de )?livr(a|e|ă)"
]
, prod = \case
(Token Numeral NumeralData {TNumeral.value = v}:_) ->
Just . Token Quantity $ quantity TQuantity.Pound v
_ -> Nothing
}
ruleQuantityOfProduct :: Rule
ruleQuantityOfProduct = Rule
{ name = "<quantity> of product"
, pattern =
[ dimension Quantity
, regex "de (carne|can[aă]|zah[aă]r|mamaliga)"
]
, prod = \case
(Token Quantity qd:
Token RegexMatch (GroupMatch (product:_)):
_) -> Just . Token Quantity $ withProduct product qd
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleNumeralUnits
, ruleQuantityOfProduct
]
| facebookincubator/duckling | Duckling/Quantity/RO/Rules.hs | bsd-3-clause | 1,463 | 0 | 17 | 288 | 318 | 188 | 130 | 40 | 2 |
module Language.Xi.Base.Data where
| fizruk/xi-base | src/Language/Xi/Base/Data.hs | bsd-3-clause | 35 | 0 | 3 | 3 | 8 | 6 | 2 | 1 | 0 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, BangPatterns, StandaloneDeriving,
MagicHash, UnboxedTuples #-}
{-# OPTIONS_HADDOCK hide #-}
#include "MachDeps.h"
#if SIZEOF_HSWORD == 4
#define DIGITS 9
#define BASE 1000000000
#elif SIZEOF_HSWORD == 8
#define DIGITS 18
#define BASE 1000000000000000000
#else
#error Please define DIGITS and BASE
-- DIGITS should be the largest integer such that
-- 10^DIGITS < 2^(SIZEOF_HSWORD * 8 - 1)
-- BASE should be 10^DIGITS. Note that ^ is not available yet.
#endif
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Show
-- Copyright : (c) The University of Glasgow, 1992-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- The 'Show' class, and related operations.
--
-----------------------------------------------------------------------------
module GHC.Show
(
Show(..), ShowS,
-- Instances for Show: (), [], Bool, Ordering, Int, Char
-- Show support code
shows, showChar, showString, showMultiLineString,
showParen, showList__, showCommaSpace, showSpace,
showLitChar, showLitString, protectEsc,
intToDigit, showSignedInt,
appPrec, appPrec1,
-- Character operations
asciiTab,
)
where
import GHC.Base
import GHC.List ((!!), foldr1, break)
import GHC.Num
import GHC.Stack.Types
-- | The @shows@ functions return a function that prepends the
-- output 'String' to an existing 'String'. This allows constant-time
-- concatenation of results using function composition.
type ShowS = String -> String
-- | Conversion of values to readable 'String's.
--
-- Derived instances of 'Show' have the following properties, which
-- are compatible with derived instances of 'Text.Read.Read':
--
-- * The result of 'show' is a syntactically correct Haskell
-- expression containing only constants, given the fixity
-- declarations in force at the point where the type is declared.
-- It contains only the constructor names defined in the data type,
-- parentheses, and spaces. When labelled constructor fields are
-- used, braces, commas, field names, and equal signs are also used.
--
-- * If the constructor is defined to be an infix operator, then
-- 'showsPrec' will produce infix applications of the constructor.
--
-- * the representation will be enclosed in parentheses if the
-- precedence of the top-level constructor in @x@ is less than @d@
-- (associativity is ignored). Thus, if @d@ is @0@ then the result
-- is never surrounded in parentheses; if @d@ is @11@ it is always
-- surrounded in parentheses, unless it is an atomic expression.
--
-- * If the constructor is defined using record syntax, then 'show'
-- will produce the record-syntax form, with the fields given in the
-- same order as the original declaration.
--
-- For example, given the declarations
--
-- > infixr 5 :^:
-- > data Tree a = Leaf a | Tree a :^: Tree a
--
-- the derived instance of 'Show' is equivalent to
--
-- > instance (Show a) => Show (Tree a) where
-- >
-- > showsPrec d (Leaf m) = showParen (d > app_prec) $
-- > showString "Leaf " . showsPrec (app_prec+1) m
-- > where app_prec = 10
-- >
-- > showsPrec d (u :^: v) = showParen (d > up_prec) $
-- > showsPrec (up_prec+1) u .
-- > showString " :^: " .
-- > showsPrec (up_prec+1) v
-- > where up_prec = 5
--
-- Note that right-associativity of @:^:@ is ignored. For example,
--
-- * @'show' (Leaf 1 :^: Leaf 2 :^: Leaf 3)@ produces the string
-- @\"Leaf 1 :^: (Leaf 2 :^: Leaf 3)\"@.
class Show a where
{-# MINIMAL showsPrec | show #-}
-- | Convert a value to a readable 'String'.
--
-- 'showsPrec' should satisfy the law
--
-- > showsPrec d x r ++ s == showsPrec d x (r ++ s)
--
-- Derived instances of 'Text.Read.Read' and 'Show' satisfy the following:
--
-- * @(x,\"\")@ is an element of
-- @('Text.Read.readsPrec' d ('showsPrec' d x \"\"))@.
--
-- That is, 'Text.Read.readsPrec' parses the string produced by
-- 'showsPrec', and delivers the value that 'showsPrec' started with.
showsPrec :: Int -- ^ the operator precedence of the enclosing
-- context (a number from @0@ to @11@).
-- Function application has precedence @10@.
-> a -- ^ the value to be converted to a 'String'
-> ShowS
-- | A specialised variant of 'showsPrec', using precedence context
-- zero, and returning an ordinary 'String'.
show :: a -> String
-- | The method 'showList' is provided to allow the programmer to
-- give a specialised way of showing lists of values.
-- For example, this is used by the predefined 'Show' instance of
-- the 'Char' type, where values of type 'String' should be shown
-- in double quotes, rather than between square brackets.
showList :: [a] -> ShowS
showsPrec _ x s = show x ++ s
show x = shows x ""
showList ls s = showList__ shows ls s
showList__ :: (a -> ShowS) -> [a] -> ShowS
showList__ _ [] s = "[]" ++ s
showList__ showx (x:xs) s = '[' : showx x (showl xs)
where
showl [] = ']' : s
showl (y:ys) = ',' : showx y (showl ys)
appPrec, appPrec1 :: Int
-- Use unboxed stuff because we don't have overloaded numerics yet
appPrec = I# 10# -- Precedence of application:
-- one more than the maximum operator precedence of 9
appPrec1 = I# 11# -- appPrec + 1
--------------------------------------------------------------
-- Simple Instances
--------------------------------------------------------------
deriving instance Show ()
-- | @since 2.01
instance Show a => Show [a] where
{-# SPECIALISE instance Show [String] #-}
{-# SPECIALISE instance Show [Char] #-}
{-# SPECIALISE instance Show [Int] #-}
showsPrec _ = showList
deriving instance Show Bool
deriving instance Show Ordering
-- | @since 2.01
instance Show Char where
showsPrec _ '\'' = showString "'\\''"
showsPrec _ c = showChar '\'' . showLitChar c . showChar '\''
showList cs = showChar '"' . showLitString cs . showChar '"'
-- | @since 2.01
instance Show Int where
showsPrec = showSignedInt
-- | @since 2.01
instance Show Word where
showsPrec _ (W# w) = showWord w
showWord :: Word# -> ShowS
showWord w# cs
| isTrue# (w# `ltWord#` 10##) = C# (chr# (ord# '0'# +# word2Int# w#)) : cs
| otherwise = case chr# (ord# '0'# +# word2Int# (w# `remWord#` 10##)) of
c# ->
showWord (w# `quotWord#` 10##) (C# c# : cs)
deriving instance Show a => Show (Maybe a)
deriving instance Show a => Show (NonEmpty a)
-- | @since 2.01
instance Show TyCon where
showsPrec p (TyCon _ _ _ tc_name _ _) = showsPrec p tc_name
-- | @since 4.9.0.0
instance Show TrName where
showsPrec _ (TrNameS s) = showString (unpackCString# s)
showsPrec _ (TrNameD s) = showString s
-- | @since 4.9.0.0
instance Show Module where
showsPrec _ (Module p m) = shows p . (':' :) . shows m
-- | @since 4.9.0.0
instance Show CallStack where
showsPrec _ = shows . getCallStack
deriving instance Show SrcLoc
--------------------------------------------------------------
-- Show instances for the first few tuple
--------------------------------------------------------------
-- The explicit 's' parameters are important
-- Otherwise GHC thinks that "shows x" might take a lot of work to compute
-- and generates defns like
-- showsPrec _ (x,y) = let sx = shows x; sy = shows y in
-- \s -> showChar '(' (sx (showChar ',' (sy (showChar ')' s))))
-- | @since 2.01
instance (Show a, Show b) => Show (a,b) where
showsPrec _ (a,b) s = show_tuple [shows a, shows b] s
-- | @since 2.01
instance (Show a, Show b, Show c) => Show (a, b, c) where
showsPrec _ (a,b,c) s = show_tuple [shows a, shows b, shows c] s
-- | @since 2.01
instance (Show a, Show b, Show c, Show d) => Show (a, b, c, d) where
showsPrec _ (a,b,c,d) s = show_tuple [shows a, shows b, shows c, shows d] s
-- | @since 2.01
instance (Show a, Show b, Show c, Show d, Show e) => Show (a, b, c, d, e) where
showsPrec _ (a,b,c,d,e) s = show_tuple [shows a, shows b, shows c, shows d, shows e] s
-- | @since 2.01
instance (Show a, Show b, Show c, Show d, Show e, Show f) => Show (a,b,c,d,e,f) where
showsPrec _ (a,b,c,d,e,f) s = show_tuple [shows a, shows b, shows c, shows d, shows e, shows f] s
-- | @since 2.01
instance (Show a, Show b, Show c, Show d, Show e, Show f, Show g)
=> Show (a,b,c,d,e,f,g) where
showsPrec _ (a,b,c,d,e,f,g) s
= show_tuple [shows a, shows b, shows c, shows d, shows e, shows f, shows g] s
-- | @since 2.01
instance (Show a, Show b, Show c, Show d, Show e, Show f, Show g, Show h)
=> Show (a,b,c,d,e,f,g,h) where
showsPrec _ (a,b,c,d,e,f,g,h) s
= show_tuple [shows a, shows b, shows c, shows d, shows e, shows f, shows g, shows h] s
-- | @since 2.01
instance (Show a, Show b, Show c, Show d, Show e, Show f, Show g, Show h, Show i)
=> Show (a,b,c,d,e,f,g,h,i) where
showsPrec _ (a,b,c,d,e,f,g,h,i) s
= show_tuple [shows a, shows b, shows c, shows d, shows e, shows f, shows g, shows h,
shows i] s
-- | @since 2.01
instance (Show a, Show b, Show c, Show d, Show e, Show f, Show g, Show h, Show i, Show j)
=> Show (a,b,c,d,e,f,g,h,i,j) where
showsPrec _ (a,b,c,d,e,f,g,h,i,j) s
= show_tuple [shows a, shows b, shows c, shows d, shows e, shows f, shows g, shows h,
shows i, shows j] s
-- | @since 2.01
instance (Show a, Show b, Show c, Show d, Show e, Show f, Show g, Show h, Show i, Show j, Show k)
=> Show (a,b,c,d,e,f,g,h,i,j,k) where
showsPrec _ (a,b,c,d,e,f,g,h,i,j,k) s
= show_tuple [shows a, shows b, shows c, shows d, shows e, shows f, shows g, shows h,
shows i, shows j, shows k] s
-- | @since 2.01
instance (Show a, Show b, Show c, Show d, Show e, Show f, Show g, Show h, Show i, Show j, Show k,
Show l)
=> Show (a,b,c,d,e,f,g,h,i,j,k,l) where
showsPrec _ (a,b,c,d,e,f,g,h,i,j,k,l) s
= show_tuple [shows a, shows b, shows c, shows d, shows e, shows f, shows g, shows h,
shows i, shows j, shows k, shows l] s
-- | @since 2.01
instance (Show a, Show b, Show c, Show d, Show e, Show f, Show g, Show h, Show i, Show j, Show k,
Show l, Show m)
=> Show (a,b,c,d,e,f,g,h,i,j,k,l,m) where
showsPrec _ (a,b,c,d,e,f,g,h,i,j,k,l,m) s
= show_tuple [shows a, shows b, shows c, shows d, shows e, shows f, shows g, shows h,
shows i, shows j, shows k, shows l, shows m] s
-- | @since 2.01
instance (Show a, Show b, Show c, Show d, Show e, Show f, Show g, Show h, Show i, Show j, Show k,
Show l, Show m, Show n)
=> Show (a,b,c,d,e,f,g,h,i,j,k,l,m,n) where
showsPrec _ (a,b,c,d,e,f,g,h,i,j,k,l,m,n) s
= show_tuple [shows a, shows b, shows c, shows d, shows e, shows f, shows g, shows h,
shows i, shows j, shows k, shows l, shows m, shows n] s
-- | @since 2.01
instance (Show a, Show b, Show c, Show d, Show e, Show f, Show g, Show h, Show i, Show j, Show k,
Show l, Show m, Show n, Show o)
=> Show (a,b,c,d,e,f,g,h,i,j,k,l,m,n,o) where
showsPrec _ (a,b,c,d,e,f,g,h,i,j,k,l,m,n,o) s
= show_tuple [shows a, shows b, shows c, shows d, shows e, shows f, shows g, shows h,
shows i, shows j, shows k, shows l, shows m, shows n, shows o] s
show_tuple :: [ShowS] -> ShowS
show_tuple ss = showChar '('
. foldr1 (\s r -> s . showChar ',' . r) ss
. showChar ')'
--------------------------------------------------------------
-- Support code for Show
--------------------------------------------------------------
-- | equivalent to 'showsPrec' with a precedence of 0.
shows :: (Show a) => a -> ShowS
shows = showsPrec 0
-- | utility function converting a 'Char' to a show function that
-- simply prepends the character unchanged.
showChar :: Char -> ShowS
showChar = (:)
-- | utility function converting a 'String' to a show function that
-- simply prepends the string unchanged.
showString :: String -> ShowS
showString = (++)
-- | utility function that surrounds the inner show function with
-- parentheses when the 'Bool' parameter is 'True'.
showParen :: Bool -> ShowS -> ShowS
showParen b p = if b then showChar '(' . p . showChar ')' else p
showSpace :: ShowS
showSpace = {-showChar ' '-} \ xs -> ' ' : xs
showCommaSpace :: ShowS
showCommaSpace = showString ", "
-- Code specific for characters
-- | Convert a character to a string using only printable characters,
-- using Haskell source-language escape conventions. For example:
--
-- > showLitChar '\n' s = "\\n" ++ s
--
showLitChar :: Char -> ShowS
showLitChar c s | c > '\DEL' = showChar '\\' (protectEsc isDec (shows (ord c)) s)
showLitChar '\DEL' s = showString "\\DEL" s
showLitChar '\\' s = showString "\\\\" s
showLitChar c s | c >= ' ' = showChar c s
showLitChar '\a' s = showString "\\a" s
showLitChar '\b' s = showString "\\b" s
showLitChar '\f' s = showString "\\f" s
showLitChar '\n' s = showString "\\n" s
showLitChar '\r' s = showString "\\r" s
showLitChar '\t' s = showString "\\t" s
showLitChar '\v' s = showString "\\v" s
showLitChar '\SO' s = protectEsc (== 'H') (showString "\\SO") s
showLitChar c s = showString ('\\' : asciiTab!!ord c) s
-- I've done manual eta-expansion here, because otherwise it's
-- impossible to stop (asciiTab!!ord) getting floated out as an MFE
showLitString :: String -> ShowS
-- | Same as 'showLitChar', but for strings
-- It converts the string to a string using Haskell escape conventions
-- for non-printable characters. Does not add double-quotes around the
-- whole thing; the caller should do that.
-- The main difference from showLitChar (apart from the fact that the
-- argument is a string not a list) is that we must escape double-quotes
showLitString [] s = s
showLitString ('"' : cs) s = showString "\\\"" (showLitString cs s)
showLitString (c : cs) s = showLitChar c (showLitString cs s)
-- Making 's' an explicit parameter makes it clear to GHC that
-- showLitString has arity 2, which avoids it allocating an extra lambda
-- The sticking point is the recursive call to (showLitString cs), which
-- it can't figure out would be ok with arity 2.
showMultiLineString :: String -> [String]
-- | Like 'showLitString' (expand escape characters using Haskell
-- escape conventions), but
-- * break the string into multiple lines
-- * wrap the entire thing in double quotes
-- Example: @showMultiLineString "hello\ngoodbye\nblah"@
-- returns @["\"hello\\n\\", "\\goodbye\n\\", "\\blah\""]@
showMultiLineString str
= go '\"' str
where
go ch s = case break (== '\n') s of
(l, _:s'@(_:_)) -> (ch : showLitString l "\\n\\") : go '\\' s'
(l, "\n") -> [ch : showLitString l "\\n\""]
(l, _) -> [ch : showLitString l "\""]
isDec :: Char -> Bool
isDec c = c >= '0' && c <= '9'
protectEsc :: (Char -> Bool) -> ShowS -> ShowS
protectEsc p f = f . cont
where cont s@(c:_) | p c = "\\&" ++ s
cont s = s
asciiTab :: [String]
asciiTab = -- Using an array drags in the array module. listArray ('\NUL', ' ')
["NUL", "SOH", "STX", "ETX", "EOT", "ENQ", "ACK", "BEL",
"BS", "HT", "LF", "VT", "FF", "CR", "SO", "SI",
"DLE", "DC1", "DC2", "DC3", "DC4", "NAK", "SYN", "ETB",
"CAN", "EM", "SUB", "ESC", "FS", "GS", "RS", "US",
"SP"]
-- Code specific for Ints.
-- | Convert an 'Int' in the range @0@..@15@ to the corresponding single
-- digit 'Char'. This function fails on other inputs, and generates
-- lower-case hexadecimal digits.
intToDigit :: Int -> Char
intToDigit (I# i)
| isTrue# (i >=# 0#) && isTrue# (i <=# 9#) = unsafeChr (ord '0' + I# i)
| isTrue# (i >=# 10#) && isTrue# (i <=# 15#) = unsafeChr (ord 'a' + I# i - 10)
| otherwise = errorWithoutStackTrace ("Char.intToDigit: not a digit " ++ show (I# i))
showSignedInt :: Int -> Int -> ShowS
showSignedInt (I# p) (I# n) r
| isTrue# (n <# 0#) && isTrue# (p ># 6#) = '(' : itos n (')' : r)
| otherwise = itos n r
itos :: Int# -> String -> String
itos n# cs
| isTrue# (n# <# 0#) =
let !(I# minInt#) = minInt in
if isTrue# (n# ==# minInt#)
-- negateInt# minInt overflows, so we can't do that:
then '-' : (case n# `quotRemInt#` 10# of
(# q, r #) ->
itos' (negateInt# q) (itos' (negateInt# r) cs))
else '-' : itos' (negateInt# n#) cs
| otherwise = itos' n# cs
where
itos' :: Int# -> String -> String
itos' x# cs'
| isTrue# (x# <# 10#) = C# (chr# (ord# '0'# +# x#)) : cs'
| otherwise = case x# `quotRemInt#` 10# of
(# q, r #) ->
case chr# (ord# '0'# +# r) of
c# ->
itos' q (C# c# : cs')
--------------------------------------------------------------
-- The Integer instances for Show
--------------------------------------------------------------
-- | @since 2.01
instance Show Integer where
showsPrec p n r
| p > 6 && n < 0 = '(' : integerToString n (')' : r)
-- Minor point: testing p first gives better code
-- in the not-uncommon case where the p argument
-- is a constant
| otherwise = integerToString n r
showList = showList__ (showsPrec 0)
-- Divide and conquer implementation of string conversion
integerToString :: Integer -> String -> String
integerToString n0 cs0
| n0 < 0 = '-' : integerToString' (- n0) cs0
| otherwise = integerToString' n0 cs0
where
integerToString' :: Integer -> String -> String
integerToString' n cs
| n < BASE = jhead (fromInteger n) cs
| otherwise = jprinth (jsplitf (BASE*BASE) n) cs
-- Split n into digits in base p. We first split n into digits
-- in base p*p and then split each of these digits into two.
-- Note that the first 'digit' modulo p*p may have a leading zero
-- in base p that we need to drop - this is what jsplith takes care of.
-- jsplitb the handles the remaining digits.
jsplitf :: Integer -> Integer -> [Integer]
jsplitf p n
| p > n = [n]
| otherwise = jsplith p (jsplitf (p*p) n)
jsplith :: Integer -> [Integer] -> [Integer]
jsplith p (n:ns) =
case n `quotRemInteger` p of
(# q, r #) ->
if q > 0 then q : r : jsplitb p ns
else r : jsplitb p ns
jsplith _ [] = errorWithoutStackTrace "jsplith: []"
jsplitb :: Integer -> [Integer] -> [Integer]
jsplitb _ [] = []
jsplitb p (n:ns) = case n `quotRemInteger` p of
(# q, r #) ->
q : r : jsplitb p ns
-- Convert a number that has been split into digits in base BASE^2
-- this includes a last splitting step and then conversion of digits
-- that all fit into a machine word.
jprinth :: [Integer] -> String -> String
jprinth (n:ns) cs =
case n `quotRemInteger` BASE of
(# q', r' #) ->
let q = fromInteger q'
r = fromInteger r'
in if q > 0 then jhead q $ jblock r $ jprintb ns cs
else jhead r $ jprintb ns cs
jprinth [] _ = errorWithoutStackTrace "jprinth []"
jprintb :: [Integer] -> String -> String
jprintb [] cs = cs
jprintb (n:ns) cs = case n `quotRemInteger` BASE of
(# q', r' #) ->
let q = fromInteger q'
r = fromInteger r'
in jblock q $ jblock r $ jprintb ns cs
-- Convert an integer that fits into a machine word. Again, we have two
-- functions, one that drops leading zeros (jhead) and one that doesn't
-- (jblock)
jhead :: Int -> String -> String
jhead n cs
| n < 10 = case unsafeChr (ord '0' + n) of
c@(C# _) -> c : cs
| otherwise = case unsafeChr (ord '0' + r) of
c@(C# _) -> jhead q (c : cs)
where
(q, r) = n `quotRemInt` 10
jblock = jblock' {- ' -} DIGITS
jblock' :: Int -> Int -> String -> String
jblock' d n cs
| d == 1 = case unsafeChr (ord '0' + n) of
c@(C# _) -> c : cs
| otherwise = case unsafeChr (ord '0' + r) of
c@(C# _) -> jblock' (d - 1) q (c : cs)
where
(q, r) = n `quotRemInt` 10
| ezyang/ghc | libraries/base/GHC/Show.hs | bsd-3-clause | 21,363 | 0 | 18 | 6,006 | 6,054 | 3,283 | 2,771 | -1 | -1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE RecordWildCards #-}
module Duckling.Ranking.Train
( makeClassifiers
) where
import Data.HashSet (HashSet)
import Prelude
import qualified Data.HashMap.Strict as HashMap
import qualified Data.HashSet as HashSet
import qualified Data.List as List
import Duckling.Engine
import Duckling.Ranking.Extraction
import Duckling.Ranking.Types
import Duckling.Resolve
import Duckling.Testing.Types
import Duckling.Types
-- -----------------------------------------------------------------
-- Probabilistic layer
-- Naive Bayes classifier with Laplace smoothing
-- Train one classifier per rule, based on the test corpus.
makeClassifiers :: [Rule] -> Corpus -> Classifiers
makeClassifiers rules corpus = HashMap.map train $ makeDataset rules corpus
-- | Train a classifier for a single rule
train :: [Datum] -> Classifier
train datums = Classifier {okData = okClass, koData = koClass}
where
total = List.length datums
(ok, ko) = List.partition snd datums
merge :: [BagOfFeatures] -> BagOfFeatures -> BagOfFeatures
merge xs m = List.foldl' (HashMap.unionWith (+)) m xs
okCounts = merge (map fst ok) HashMap.empty
koCounts = merge (map fst ko) HashMap.empty
vocSize = HashMap.size $ HashMap.union okCounts koCounts
okClass = makeClass okCounts total (List.length ok) vocSize
koClass = makeClass koCounts total (List.length ko) vocSize
-- | Compute prior and likelihoods log-probabilities for one class.
makeClass :: BagOfFeatures -> Int -> Int -> Int -> ClassData
makeClass feats total classTotal vocSize = ClassData
{ prior = prior
, unseen = unseen
, likelihoods = likelihoods
, n = classTotal
}
where
prior = log $ fromIntegral classTotal / fromIntegral total
denum = vocSize + sum (HashMap.elems feats)
unseen = log $ 1.0 / (fromIntegral denum + 1.0)
likelihoods = HashMap.map (\x ->
log $ (fromIntegral x + 1.0) / fromIntegral denum
) feats
-- | Augment the dataset with one example.
-- | Add all the nodes contributing to the resolutions of the input sentence.
-- | Classes:
-- | 1) True (node contributed to a token passing test predicate)
-- | 2) False (node didn't contribute to any passing tokens)
makeDataset1 :: [Rule] -> Context -> Options -> Dataset -> Example -> Dataset
makeDataset1 rules context options dataset (sentence, predicate) = dataset'
where
tokens = parseAndResolve rules sentence context options
(ok, ko) = List.partition (predicate context) tokens
subnodes :: Node -> HashSet Node
subnodes node@(Node{..}) = case children of
[] -> HashSet.empty
cs -> HashSet.unions $ HashSet.singleton node:map subnodes cs
nodesOK = HashSet.unions $ map (subnodes . node) ok
nodesKO = HashSet.difference
(HashSet.unions $ map (subnodes . node) ko) nodesOK
updateDataset :: Class -> HashSet Node -> Dataset -> Dataset
updateDataset klass nodes dataset =
HashSet.foldl' (\dataset node@Node {..} ->
case rule of
Just rule -> HashMap.insertWith (++) rule
[(extractFeatures node, klass)] dataset
Nothing -> dataset
) dataset nodes
dataset' = updateDataset False nodesKO $ updateDataset True nodesOK dataset
-- | Build a dataset (rule name -> datums)
makeDataset :: [Rule] -> Corpus -> Dataset
makeDataset rules (context, options, examples) =
List.foldl' (makeDataset1 rules context options) HashMap.empty examples
| facebookincubator/duckling | exe/Duckling/Ranking/Train.hs | bsd-3-clause | 3,632 | 0 | 16 | 705 | 919 | 500 | 419 | 62 | 3 |
--------------------------------------------------------------
--------------------------------------------------------------
--
-- HaskellEngine
-- MyForteIsTimeTravel
--
--------------------------------------------------------------
--------------------------------------------------------------
module Window (fps, width, height, window, background, bottomLeftX, bottomY, onScreen) where
-------------------------------------
-- Window Data
-------------------------------------
import Graphics.Gloss
import Linear
-------------------------------------
-- | window parameters
-------------------------------------
fps :: Int; fps = 60 -- | frame rate
width :: Int; width = 1400 -- | width of render window
height :: Int; height = 800 -- | height of render window
-------------------------------------
-- | render target
-------------------------------------
window :: Display; window = InWindow "HaskellEngine" (width, height) (10, 10)
-------------------------------------
-- | clear colour
-------------------------------------
background :: Color; background = dark white
-------------------------------------
-- | useful positions
-------------------------------------
bottomLeftX :: Float
bottomLeftX = (fromIntegral width) * (-0.5)
bottomY :: Float
bottomY = (fromIntegral height) * (-0.5)
-------------------------------------
-- | says if a point is on the screen
-------------------------------------
onScreen :: Vector2D -> Bool
onScreen point =
if (x point) > fromIntegral (width) * 0.5 then False else
if (x point) < fromIntegral (width) * (-0.5) then False else
if (y point) > fromIntegral (height) * 0.5 then False else
if (y point) < fromIntegral (height) * (-0.5) then False else
True | MyForteIsTimeTravel/HaskellEngine | src/Window.hs | bsd-3-clause | 1,753 | 0 | 12 | 238 | 333 | 204 | 129 | 19 | 5 |
module Observable.MCMC.NUTS (nuts) where
import Control.Monad
import Control.Monad.Primitive
import Control.Monad.Trans
import Control.Monad.Trans.State.Strict
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import Data.Vector.Unboxed (Vector, Unbox)
import qualified Data.Vector.Unboxed as V
import Observable.Core
import Observable.Types
import Debug.Trace
type Parameters = Vector Double
type Gradient = Parameters -> Parameters
type Particle = (Parameters, Parameters)
getStepSize :: Maybe Double -> OptionalStore -> Double
getStepSize (Just e) _ = e
getStepSize Nothing store = e where
(ODouble e) = HashMap.lookupDefault (ODouble 0.1) NUTS store
updateStepSize :: Double -> OptionalStore -> OptionalStore
updateStepSize e = HashMap.insert NUTS (ODouble e)
-- | The NUTS transition kernel.
nuts :: PrimMonad m => Transition m Double
nuts = do
Chain t target _ store <- get
r0 <- V.replicateM (V.length t) (lift $ normal 0 1)
z0 <- lift $ exponential 1
let logu = log (auxilliaryTarget lTarget t r0) - z0
lTarget = logObjective target
glTarget = handleGradient $ gradient target
e = getStepSize Nothing store
let go (tn, tp, rn, rp, tm, j, n, s)
| s == 1 = do
vj <- lift $ categorical [-1, 1]
z <- lift unit
(tnn, rnn, tpp, rpp, t1, n1, s1) <-
if vj == -1
then do
(tnn', rnn', _, _, t1', n1', s1') <-
buildTree lTarget glTarget tn rn logu vj j e
return (tnn', rnn', tp, rp, t1', n1', s1')
else do
(_, _, tpp', rpp', t1', n1', s1') <-
buildTree lTarget glTarget tp rp logu vj j e
return (tn, rn, tpp', rpp', t1', n1', s1')
let accept = s1 == 1 && (min 1 (fi n1 / fi n :: Double)) > z
n2 = n + n1
s2 = s1 * stopCriterion tnn tpp rnn rpp
j1 = succ j
t2 | accept = t1
| otherwise = tm
go (tnn, tpp, rnn, rpp, t2, j1, n2, s2)
| otherwise = do
put $ Chain tm target (lTarget tm) (updateStepSize e store)
return tm
go (t, t, r0, r0, t, 0, 1, 1)
buildTree lTarget glTarget t r logu v 0 e = do
let (t0, r0) = leapfrog glTarget (t, r) (v * e)
joint = log $ auxilliaryTarget lTarget t0 r0
n = indicate (logu < joint)
s = indicate (logu - 1000 < joint)
return (t0, r0, t0, r0, t0, n, s)
buildTree lTarget glTarget t r logu v j e = do
z <- lift unit
(tn, rn, tp, rp, t0, n0, s0) <-
buildTree lTarget glTarget t r logu v (pred j) e
if s0 == 1
then do
(tnn, rnn, tpp, rpp, t1, n1, s1) <-
if v == -1
then do
(tnn', rnn', _, _, t1', n1', s1') <-
buildTree lTarget glTarget tn rn logu v (pred j) e
return (tnn', rnn', tp, rp, t1', n1', s1')
else do
(_, _, tpp', rpp', t1', n1', s1') <-
buildTree lTarget glTarget tp rp logu v (pred j) e
return (tn, rn, tpp', rpp', t1', n1', s1')
let accept = (fi n1 / max (fi (n0 + n1)) 1) > (z :: Double)
n2 = n0 + n1
s2 = s0 * s1 * stopCriterion tnn tpp rnn rpp
t2 | accept = t1
| otherwise = t0
return (tnn, rnn, tpp, rpp, t2, n2, s2)
else return (tn, rn, tp, rp, t0, n0, s0)
-- | Determine whether or not to stop doubling the tree of candidate states.
stopCriterion :: (Integral a, Num b, Ord b, Unbox b)
=> Vector b -> Vector b -> Vector b -> Vector b -> a
stopCriterion tn tp rn rp =
indicate (positionDifference `innerProduct` rn >= 0)
* indicate (positionDifference `innerProduct` rp >= 0)
where
positionDifference = tp .- tn
-- | Simulate a single step of Hamiltonian dynamics.
leapfrog :: Gradient -> Particle -> Double -> Particle
leapfrog glTarget (t, r) e = (tf, rf) where
rm = adjustMomentum glTarget e t r
tf = adjustPosition e rm t
rf = adjustMomentum glTarget e tf rm
-- | Adjust momentum.
adjustMomentum :: (Fractional c, Unbox c)
=> (t -> Vector c) -> c -> t -> Vector c -> Vector c
adjustMomentum glTarget e t r = r .+ ((e / 2) .* glTarget t)
-- | Adjust position.
adjustPosition :: (Num c, Unbox c) => c -> Vector c -> Vector c -> Vector c
adjustPosition e r t = t .+ (e .* r)
-- | The MH acceptance ratio for a given proposal.
acceptanceRatio :: (Floating a, Unbox a)
=> (t -> a) -> t -> t -> Vector a -> Vector a -> a
acceptanceRatio lTarget t0 t1 r0 r1 = auxilliaryTarget lTarget t1 r1
/ auxilliaryTarget lTarget t0 r0
-- | The negative potential.
auxilliaryTarget :: (Floating a, Unbox a) => (t -> a) -> t -> Vector a -> a
auxilliaryTarget lTarget t r = exp (lTarget t - 0.5 * innerProduct r r)
-- | Simple inner product.
innerProduct :: (Num a, Unbox a) => Vector a -> Vector a -> a
innerProduct xs ys = V.sum $ V.zipWith (*) xs ys
-- | Vectorized multiplication.
(.*) :: (Num a, Unbox a) => a -> Vector a -> Vector a
z .* xs = V.map (* z) xs
-- | Vectorized subtraction.
(.-) :: (Num a, Unbox a) => Vector a -> Vector a -> Vector a
xs .- ys = V.zipWith (-) xs ys
-- | Vectorized addition.
(.+) :: (Num a, Unbox a) => Vector a -> Vector a -> Vector a
xs .+ ys = V.zipWith (+) xs ys
-- | Indicator function.
indicate :: Integral a => Bool -> a
indicate True = 1
indicate False = 0
-- | Alias for fromIntegral.
fi :: (Integral a, Num b) => a -> b
fi = fromIntegral
| jtobin/deprecated-observable | src/Observable/MCMC/NUTS.hs | bsd-3-clause | 5,611 | 0 | 22 | 1,723 | 2,255 | 1,201 | 1,054 | 119 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE RecordWildCards, NamedFieldPuns #-}
{-# LANGUAGE PatternGuards, BangPatterns #-}
{-# LANGUAGE CPP #-}
module Network.Wai.Handler.Warp.HTTP2.Worker (
Responder
, response
, worker
) where
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative
#endif
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception (Exception, SomeException(..), AsyncException(..))
import qualified Control.Exception as E
import Control.Monad (void, when)
import Data.Typeable
import qualified Network.HTTP.Types as H
import Network.HTTP2
import Network.HTTP2.Priority
import Network.Wai
import Network.Wai.Handler.Warp.HTTP2.EncodeFrame
import Network.Wai.Handler.Warp.HTTP2.Manager
import Network.Wai.Handler.Warp.HTTP2.Types
import Network.Wai.Handler.Warp.IORef
import qualified Network.Wai.Handler.Warp.Response as R
import qualified Network.Wai.Handler.Warp.Settings as S
import qualified Network.Wai.Handler.Warp.Timeout as T
import Network.Wai.Internal (Response(..), ResponseReceived(..), ResponseReceived(..))
----------------------------------------------------------------
-- | The wai definition is 'type Application = Request -> (Response -> IO ResponseReceived) -> IO ResponseReceived'.
-- This type implements the second argument (Response -> IO ResponseReceived)
-- with extra arguments.
type Responder = ThreadContinue -> T.Handle -> Stream -> Priority -> Request ->
Response -> IO ResponseReceived
-- | This function is passed to workers.
-- They also pass 'Response's from 'Application's to this function.
-- This function enqueues commands for the HTTP/2 sender.
response :: Context -> Manager -> Responder
response Context{outputQ} mgr tconf th strm pri req rsp = do
case rsp of
ResponseStream _ _ strmbdy -> do
-- We must not exit this WAI application.
-- If the application exits, streaming would be also closed.
-- So, this work occupies this thread.
--
-- We need to increase the number of workers.
myThreadId >>= replaceWithAction mgr
-- After this work, this thread stops to decease
-- the number of workers.
setThreadContinue tconf False
-- Since 'StreamingBody' is loop, we cannot control it.
-- So, let's serialize 'Builder' with a designated queue.
sq <- newTBQueueIO 10 -- fixme: hard coding: 10
tvar <- newTVarIO SyncNone
enqueue outputQ (OResponse strm rsp (Persist sq tvar)) pri
let push b = do
atomically $ writeTBQueue sq (SBuilder b)
T.tickle th
flush = atomically $ writeTBQueue sq SFlush
-- Since we must not enqueue an empty queue to the priority
-- queue, we spawn a thread to ensure that the designated
-- queue is not empty.
void $ forkIO $ waiter tvar sq (enqueue outputQ) strm pri
strmbdy push flush
atomically $ writeTBQueue sq SFinish
_ -> do
setThreadContinue tconf True
let hasBody = requestMethod req /= H.methodHead
&& R.hasBody (responseStatus rsp)
enqueue outputQ (OResponse strm rsp (Oneshot hasBody)) pri
return ResponseReceived
data Break = Break deriving (Show, Typeable)
instance Exception Break
worker :: Context -> S.Settings -> T.Manager -> Application -> Responder -> IO ()
worker ctx@Context{inputQ,outputQ} set tm app responder = do
tid <- myThreadId
sinfo <- newStreamInfo
tcont <- newThreadContinue
let setup = T.register tm $ E.throwTo tid Break
E.bracket setup T.cancel $ go sinfo tcont
where
go sinfo tcont th = do
setThreadContinue tcont True
ex <- E.try $ do
T.pause th
Input strm req pri <- atomically $ readTQueue inputQ
setStreamInfo sinfo strm req
T.resume th
T.tickle th
app req $ responder tcont th strm pri req
cont1 <- case ex of
Right ResponseReceived -> return True
Left e@(SomeException _)
| Just Break <- E.fromException e -> do
cleanup sinfo Nothing
return True
-- killed by the sender
| Just ThreadKilled <- E.fromException e -> do
cleanup sinfo Nothing
return False
| otherwise -> do
cleanup sinfo (Just e)
return True
cont2 <- getThreadContinue tcont
when (cont1 && cont2) $ go sinfo tcont th
cleanup sinfo me = do
m <- getStreamInfo sinfo
case m of
Nothing -> return ()
Just (strm,req) -> do
closed ctx strm Killed
let frame = resetFrame InternalError (streamNumber strm)
enqueue outputQ (OFrame frame) highestPriority
case me of
Nothing -> return ()
Just e -> S.settingsOnException set (Just req) e
clearStreamInfo sinfo
waiter :: TVar Sync -> TBQueue Sequence
-> (Output -> Priority -> IO ()) -> Stream -> Priority
-> IO ()
waiter tvar sq enq strm pri = do
mx <- atomically $ do
mout <- readTVar tvar
case mout of
SyncNone -> retry
SyncNext nxt -> do
writeTVar tvar SyncNone
return $ Just nxt
SyncFinish -> return Nothing
case mx of
Nothing -> return ()
Just next -> do
atomically $ do
isEmpty <- isEmptyTBQueue sq
when isEmpty retry
enq (ONext strm next) pri
waiter tvar sq enq strm pri
----------------------------------------------------------------
-- | It would nice if responders could return values to workers.
-- Unfortunately, 'ResponseReceived' is already defined in WAI 2.0.
-- It is not wise to change this type.
-- So, a reference is shared by a responder and its worker.
-- The reference refers a value of this type as a return value.
-- If 'True', the worker continue to serve requests.
-- Otherwise, the worker get finished.
newtype ThreadContinue = ThreadContinue (IORef Bool)
newThreadContinue :: IO ThreadContinue
newThreadContinue = ThreadContinue <$> newIORef True
setThreadContinue :: ThreadContinue -> Bool -> IO ()
setThreadContinue (ThreadContinue ref) x = writeIORef ref x
getThreadContinue :: ThreadContinue -> IO Bool
getThreadContinue (ThreadContinue ref) = readIORef ref
----------------------------------------------------------------
-- | The type to store enough information for 'settingsOnException'.
newtype StreamInfo = StreamInfo (IORef (Maybe (Stream,Request)))
newStreamInfo :: IO StreamInfo
newStreamInfo = StreamInfo <$> newIORef Nothing
clearStreamInfo :: StreamInfo -> IO ()
clearStreamInfo (StreamInfo ref) = writeIORef ref Nothing
setStreamInfo :: StreamInfo -> Stream -> Request -> IO ()
setStreamInfo (StreamInfo ref) strm req = writeIORef ref $ Just (strm,req)
getStreamInfo :: StreamInfo -> IO (Maybe (Stream, Request))
getStreamInfo (StreamInfo ref) = readIORef ref
| mfine/wai | warp/Network/Wai/Handler/Warp/HTTP2/Worker.hs | mit | 7,312 | 0 | 20 | 2,045 | 1,668 | 839 | 829 | 131 | 4 |
module Main where
import qualified Data.ByteString.Lazy as BL
import qualified Data.Vector as V
-- from cassava
import Data.Csv
-- a simple type alias for data
type BaseballStats = (BL.ByteString, Int, BL.ByteString, Int)
main :: IO ()
main = do
csvData <- BL.readFile "batting.csv"
let v = decode NoHeader csvData :: Either String (V.Vector BaseballStats)
let summed = fmap (V.foldr summer 0) v
putStrLn $ "Total atBats was: " ++ (show summed)
where summer (name, year, team, atBats) n = n + atBats | dawsonc/bassbull | src/Main.hs | mit | 512 | 0 | 13 | 94 | 176 | 97 | 79 | 12 | 1 |
data A = A
data B = B deriving Show
main = show A
| roberth/uu-helium | test/typeerrors/Examples/NoInstance2.hs | gpl-3.0 | 51 | 4 | 4 | 15 | 33 | 14 | 19 | 3 | 1 |
-- https://projecteuler.net/problem=8
import Data.Char
adjacentWithGreatestProduct :: Int -> String -> Int
adjacentWithGreatestProduct nAdjacent inputStr = maximum [ product (adjacentNumbers startingIndex) | startingIndex <- [0..(inputLength-nAdjacent)]]
where
inputLength = length inputStr
adjacentNumbers fromIndex = map digitToInt $ take nAdjacent $ drop fromIndex inputStr
taskInputStr = "73167176531330624919225119674426574742355349194934\
\96983520312774506326239578318016984801869478851843\
\85861560789112949495459501737958331952853208805511\
\12540698747158523863050715693290963295227443043557\
\66896648950445244523161731856403098711121722383113\
\62229893423380308135336276614282806444486645238749\
\30358907296290491560440772390713810515859307960866\
\70172427121883998797908792274921901699720888093776\
\65727333001053367881220235421809751254540594752243\
\52584907711670556013604839586446706324415722155397\
\53697817977846174064955149290862569321978468622482\
\83972241375657056057490261407972968652414535100474\
\82166370484403199890008895243450658541227588666881\
\16427171479924442928230863465674813919123162824586\
\17866458359124566529476545682848912883142607690042\
\24219022671055626321111109370544217506941658960408\
\07198403850962455444362981230987879927244284909188\
\84580156166097919133875499200524063689912560717606\
\05886116467109405077541002256983155200055935729725\
\71636269561882670428252483600823257530420752963450"
--adjacentWithGreatestProduct 13 taskInputStr
| nothiphop/project-euler | 008/solution.hs | apache-2.0 | 1,512 | 1 | 11 | 80 | 112 | 55 | 57 | 6 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Reporting
-- Copyright : (c) David Waern 2008
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Anonymous build report data structure, printing and parsing
--
-----------------------------------------------------------------------------
module Distribution.Client.BuildReports.Storage (
-- * Storing and retrieving build reports
storeAnonymous,
storeLocal,
-- retrieve,
-- * 'InstallPlan' support
fromInstallPlan,
fromPlanningFailure,
) where
import qualified Distribution.Client.BuildReports.Anonymous as BuildReport
import Distribution.Client.BuildReports.Anonymous (BuildReport)
import Distribution.Client.Types
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.InstallPlan
( InstallPlan )
import Distribution.Package
( PackageId, packageId )
import Distribution.PackageDescription
( FlagAssignment )
import Distribution.Simple.InstallDirs
( PathTemplate, fromPathTemplate
, initialPathTemplateEnv, substPathTemplate )
import Distribution.System
( Platform(Platform) )
import Distribution.Compiler
( CompilerId(..), CompilerInfo(..) )
import Distribution.Simple.Utils
( comparing, equating )
import Data.List
( groupBy, sortBy )
import Data.Maybe
( catMaybes )
import System.FilePath
( (</>), takeDirectory )
import System.Directory
( createDirectoryIfMissing )
storeAnonymous :: [(BuildReport, Maybe Repo)] -> IO ()
storeAnonymous reports = sequence_
[ appendFile file (concatMap format reports')
| (repo, reports') <- separate reports
, let file = repoLocalDir repo </> "build-reports.log" ]
--TODO: make this concurrency safe, either lock the report file or make sure
-- the writes for each report are atomic (under 4k and flush at boundaries)
where
format r = '\n' : BuildReport.show r ++ "\n"
separate :: [(BuildReport, Maybe Repo)]
-> [(Repo, [BuildReport])]
separate = map (\rs@((_,repo,_):_) -> (repo, [ r | (r,_,_) <- rs ]))
. map concat
. groupBy (equating (repoName . head))
. sortBy (comparing (repoName . head))
. groupBy (equating repoName)
. onlyRemote
repoName (_,_,rrepo) = remoteRepoName rrepo
onlyRemote :: [(BuildReport, Maybe Repo)] -> [(BuildReport, Repo, RemoteRepo)]
onlyRemote rs =
[ (report, repo, remoteRepo)
| (report, Just repo@Repo { repoKind = Left remoteRepo }) <- rs ]
storeLocal :: CompilerInfo -> [PathTemplate] -> [(BuildReport, Maybe Repo)]
-> Platform -> IO ()
storeLocal cinfo templates reports platform = sequence_
[ do createDirectoryIfMissing True (takeDirectory file)
appendFile file output
--TODO: make this concurrency safe, either lock the report file or make
-- sure the writes for each report are atomic
| (file, reports') <- groupByFileName
[ (reportFileName template report, report)
| template <- templates
, (report, _repo) <- reports ]
, let output = concatMap format reports'
]
where
format r = '\n' : BuildReport.show r ++ "\n"
reportFileName template report =
fromPathTemplate (substPathTemplate env template)
where env = initialPathTemplateEnv
(BuildReport.package report)
-- ToDo: In principle, we can support $pkgkey, but only
-- if the configure step succeeds. So add a Maybe field
-- to the build report, and either use that or make up
-- a fake identifier if it's not available.
(error "storeLocal: package key not available")
cinfo
platform
groupByFileName = map (\grp@((filename,_):_) -> (filename, map snd grp))
. groupBy (equating fst)
. sortBy (comparing fst)
-- ------------------------------------------------------------
-- * InstallPlan support
-- ------------------------------------------------------------
fromInstallPlan :: InstallPlan -> [(BuildReport, Maybe Repo)]
fromInstallPlan plan = catMaybes
. map (fromPlanPackage platform comp)
. InstallPlan.toList
$ plan
where platform = InstallPlan.planPlatform plan
comp = compilerInfoId (InstallPlan.planCompiler plan)
fromPlanPackage :: Platform -> CompilerId
-> InstallPlan.PlanPackage
-> Maybe (BuildReport, Maybe Repo)
fromPlanPackage (Platform arch os) comp planPackage = case planPackage of
InstallPlan.Installed (ReadyPackage srcPkg flags _ deps) result
-> Just $ ( BuildReport.new os arch comp
(packageId srcPkg) flags (map packageId deps)
(Right result)
, extractRepo srcPkg)
InstallPlan.Failed (ConfiguredPackage srcPkg flags _ deps) result
-> Just $ ( BuildReport.new os arch comp
(packageId srcPkg) flags (map confSrcId deps)
(Left result)
, extractRepo srcPkg )
_ -> Nothing
where
extractRepo (SourcePackage { packageSource = RepoTarballPackage repo _ _ }) = Just repo
extractRepo _ = Nothing
fromPlanningFailure :: Platform -> CompilerId
-> [PackageId] -> FlagAssignment -> [(BuildReport, Maybe Repo)]
fromPlanningFailure (Platform arch os) comp pkgids flags =
[ (BuildReport.new os arch comp pkgid flags [] (Left PlanningFailed), Nothing)
| pkgid <- pkgids ]
| seereason/cabal | cabal-install/Distribution/Client/BuildReports/Storage.hs | bsd-3-clause | 5,883 | 0 | 19 | 1,585 | 1,338 | 739 | 599 | 101 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Arrow
import qualified Data.Aeson as Json
import Data.Aeson ((.=))
import qualified Data.Text.Lazy.Encoding as T
import qualified Data.Text.Lazy.IO as T
import qualified Distribution.PackageDescription as PD
import Distribution.PackageDescription.Parse
import Distribution.ModuleName (components)
import qualified System.Environment as Environment
data CabalInfo = CabalInfo {
cabalLibrary :: Maybe CabalLibrary,
cabalExecutables :: [CabalExecutable],
cabalTests :: [CabalTest] }
deriving (Show)
instance Json.ToJSON CabalInfo where
toJSON info = Json.object [
"library" .= cabalLibrary info,
"executables" .= cabalExecutables info,
"tests" .= cabalTests info]
data CabalLibrary = CabalLibrary {
libraryModules :: [[String]],
libraryBuildInfo :: Info }
deriving (Show)
instance Json.ToJSON CabalLibrary where
toJSON lib = Json.object [
"modules" .= libraryModules lib,
"info" .= libraryBuildInfo lib]
data CabalExecutable = CabalExecutable {
executableName :: String,
executablePath :: FilePath,
executableBuildInfo :: Info }
deriving (Show)
instance Json.ToJSON CabalExecutable where
toJSON exe = Json.object [
"name" .= executableName exe,
"path" .= executablePath exe,
"info" .= executableBuildInfo exe]
data CabalTest = CabalTest {
testName :: String,
testEnabled :: Bool,
testBuildInfo :: Info }
deriving (Show)
instance Json.ToJSON CabalTest where
toJSON tst = Json.object [
"name" .= testName tst,
"enabled" .= testEnabled tst,
"info" .= testBuildInfo tst]
data Info = Info {
infoSourceDirs :: [FilePath] }
deriving (Show)
instance Json.ToJSON Info where
toJSON i = Json.object [
"source-dirs" .= infoSourceDirs i]
analyzeCabal :: String -> Either String CabalInfo
analyzeCabal source = case parsePackageDescription source of
ParseOk _ r -> Right CabalInfo {
cabalLibrary = fmap (toLibrary . PD.condTreeData) $ PD.condLibrary r,
cabalExecutables = fmap (toExecutable . second PD.condTreeData) $ PD.condExecutables r,
cabalTests = fmap (toTest . second PD.condTreeData) $ PD.condTestSuites r }
ParseFailed e -> Left $ "Parse failed: " ++ show e
where
toLibrary (PD.Library exposeds _ info) = CabalLibrary (map components exposeds) (toInfo info)
toExecutable (name, PD.Executable _ path info) = CabalExecutable name path (toInfo info)
toTest (name, PD.TestSuite _ _ info enabled) = CabalTest name enabled (toInfo info)
toInfo info = Info {
infoSourceDirs = PD.hsSourceDirs info }
main :: IO ()
main = do
programName <- Environment.getProgName
args <- Environment.getArgs
case args of
[filename] -> do
source <- readFile filename
let
output = case analyzeCabal source of
Left excuse -> Json.toJSON $ Json.object ["error" .= excuse]
Right info -> Json.toJSON info
T.putStrLn . T.decodeUtf8 . Json.encode $ output
_ -> putStrLn ("Usage: " ++ programName ++ " FILENAME")
| kgadek/SublimeHaskell | CabalInspector.hs | mit | 3,258 | 0 | 21 | 801 | 932 | 494 | 438 | 80 | 3 |
{-# LANGUAGE CPP #-}
module DataFamilies.Properties (tests) where
import Prelude ()
import Prelude.Compat
import DataFamilies.Encoders
import DataFamilies.Instances ()
import Properties hiding (tests)
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.QuickCheck2 (testProperty)
--------------------------------------------------------------------------------
tests :: Test
tests = testGroup "data families" [
testGroup "template-haskell" [
testGroup "toJSON" [
testGroup "Nullary" [
testProperty "string" (isString . thNullaryToJSONString)
, testProperty "2ElemArray" (is2ElemArray . thNullaryToJSON2ElemArray)
, testProperty "TaggedObject" (isNullaryTaggedObject . thNullaryToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . thNullaryToJSONObjectWithSingleField)
, testGroup "roundTrip" [
testProperty "string" (toParseJSON thNullaryParseJSONString thNullaryToJSONString)
, testProperty "2ElemArray" (toParseJSON thNullaryParseJSON2ElemArray thNullaryToJSON2ElemArray)
, testProperty "TaggedObject" (toParseJSON thNullaryParseJSONTaggedObject thNullaryToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (toParseJSON thNullaryParseJSONObjectWithSingleField thNullaryToJSONObjectWithSingleField)
]
]
, testGroup "SomeType" [
testProperty "2ElemArray" (is2ElemArray . thSomeTypeToJSON2ElemArray)
, testProperty "TaggedObject" (isTaggedObject . thSomeTypeToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . thSomeTypeToJSONObjectWithSingleField)
, testGroup "roundTrip" [
testProperty "2ElemArray" (toParseJSON thSomeTypeParseJSON2ElemArray thSomeTypeToJSON2ElemArray)
, testProperty "TaggedObject" (toParseJSON thSomeTypeParseJSONTaggedObject thSomeTypeToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (toParseJSON thSomeTypeParseJSONObjectWithSingleField thSomeTypeToJSONObjectWithSingleField)
]
]
, testGroup "Approx" [
testProperty "string" (isString . thApproxToJSONUnwrap)
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . thApproxToJSONDefault)
, testGroup "roundTrip" [
testProperty "string" (toParseJSON thApproxParseJSONUnwrap thApproxToJSONUnwrap)
, testProperty "ObjectWithSingleField" (toParseJSON thApproxParseJSONDefault thApproxToJSONDefault)
]
]
, testGroup "GADT" [
testProperty "string" (isString . thGADTToJSONUnwrap)
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . thGADTToJSONDefault)
, testGroup "roundTrip" [
testProperty "string" (toParseJSON thGADTParseJSONUnwrap thGADTToJSONUnwrap)
, testProperty "ObjectWithSingleField" (toParseJSON thGADTParseJSONDefault thGADTToJSONDefault)
]
]
]
, testGroup "toEncoding" [
testProperty "NullaryString" $
thNullaryToJSONString `sameAs` thNullaryToEncodingString
, testProperty "Nullary2ElemArray" $
thNullaryToJSON2ElemArray `sameAs` thNullaryToEncoding2ElemArray
, testProperty "NullaryTaggedObject" $
thNullaryToJSONTaggedObject `sameAs` thNullaryToEncodingTaggedObject
, testProperty "NullaryObjectWithSingleField" $
thNullaryToJSONObjectWithSingleField `sameAs`
thNullaryToEncodingObjectWithSingleField
, testProperty "ApproxUnwrap" $
thApproxToJSONUnwrap `sameAs` thApproxToEncodingUnwrap
, testProperty "ApproxDefault" $
thApproxToJSONDefault `sameAs` thApproxToEncodingDefault
, testProperty "SomeType2ElemArray" $
thSomeTypeToJSON2ElemArray `sameAs` thSomeTypeToEncoding2ElemArray
, testProperty "SomeTypeTaggedObject" $
thSomeTypeToJSONTaggedObject `sameAs` thSomeTypeToEncodingTaggedObject
, testProperty "SomeTypeObjectWithSingleField" $
thSomeTypeToJSONObjectWithSingleField `sameAs`
thSomeTypeToEncodingObjectWithSingleField
]
]
, testGroup "generics" [
testGroup "toJSON" [
testGroup "Nullary" [
testProperty "string" (isString . gNullaryToJSONString)
, testProperty "2ElemArray" (is2ElemArray . gNullaryToJSON2ElemArray)
, testProperty "TaggedObject" (isNullaryTaggedObject . gNullaryToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . gNullaryToJSONObjectWithSingleField)
, testGroup "roundTrip" [
testProperty "string" (toParseJSON gNullaryParseJSONString gNullaryToJSONString)
, testProperty "2ElemArray" (toParseJSON gNullaryParseJSON2ElemArray gNullaryToJSON2ElemArray)
, testProperty "TaggedObject" (toParseJSON gNullaryParseJSONTaggedObject gNullaryToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (toParseJSON gNullaryParseJSONObjectWithSingleField gNullaryToJSONObjectWithSingleField)
]
]
, testGroup "SomeType" [
testProperty "2ElemArray" (is2ElemArray . gSomeTypeToJSON2ElemArray)
, testProperty "TaggedObject" (isTaggedObject . gSomeTypeToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . gSomeTypeToJSONObjectWithSingleField)
, testGroup "roundTrip" [
testProperty "2ElemArray" (toParseJSON gSomeTypeParseJSON2ElemArray gSomeTypeToJSON2ElemArray)
, testProperty "TaggedObject" (toParseJSON gSomeTypeParseJSONTaggedObject gSomeTypeToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (toParseJSON gSomeTypeParseJSONObjectWithSingleField gSomeTypeToJSONObjectWithSingleField)
]
]
, testGroup "Approx" [
testProperty "string" (isString . gApproxToJSONUnwrap)
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . gApproxToJSONDefault)
, testGroup "roundTrip" [
testProperty "string" (toParseJSON gApproxParseJSONUnwrap gApproxToJSONUnwrap)
, testProperty "ObjectWithSingleField" (toParseJSON gApproxParseJSONDefault gApproxToJSONDefault)
]
]
]
, testGroup "toEncoding" [
testProperty "NullaryString" $
gNullaryToJSONString `sameAs` gNullaryToEncodingString
, testProperty "Nullary2ElemArray" $
gNullaryToJSON2ElemArray `sameAs` gNullaryToEncoding2ElemArray
, testProperty "NullaryTaggedObject" $
gNullaryToJSONTaggedObject `sameAs` gNullaryToEncodingTaggedObject
, testProperty "NullaryObjectWithSingleField" $
gNullaryToJSONObjectWithSingleField `sameAs`
gNullaryToEncodingObjectWithSingleField
, testProperty "ApproxUnwrap" $
gApproxToJSONUnwrap `sameAs` gApproxToEncodingUnwrap
, testProperty "ApproxDefault" $
gApproxToJSONDefault `sameAs` gApproxToEncodingDefault
, testProperty "SomeType2ElemArray" $
gSomeTypeToJSON2ElemArray `sameAs` gSomeTypeToEncoding2ElemArray
, testProperty "SomeTypeTaggedObject" $
gSomeTypeToJSONTaggedObject `sameAs` gSomeTypeToEncodingTaggedObject
, testProperty "SomeTypeObjectWithSingleField" $
gSomeTypeToJSONObjectWithSingleField `sameAs`
gSomeTypeToEncodingObjectWithSingleField
]
]
]
| sol/aeson | tests/DataFamilies/Properties.hs | bsd-3-clause | 7,598 | 0 | 17 | 1,634 | 1,154 | 616 | 538 | 111 | 1 |
{-|
Module : Idris.Elab.Clause
Description : Code to elaborate clauses.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE PatternGuards #-}
module Idris.Elab.Clause where
import Idris.AbsSyntax
import Idris.ASTUtils
import Idris.Core.CaseTree
import Idris.Core.Elaborate hiding (Tactic(..))
import Idris.Core.Evaluate
import Idris.Core.Execute
import Idris.Core.TT
import Idris.Core.Typecheck
import Idris.Core.WHNF
import Idris.Coverage
import Idris.DataOpts
import Idris.DeepSeq
import Idris.Delaborate
import Idris.Docstrings hiding (Unchecked)
import Idris.DSL
import Idris.Elab.AsPat
import Idris.Elab.Term
import Idris.Elab.Transform
import Idris.Elab.Type
import Idris.Elab.Utils
import Idris.Error
import Idris.Imports
import Idris.Inliner
import Idris.Options
import Idris.Output (iRenderResult, iWarn, iputStrLn, pshow, sendHighlighting)
import Idris.PartialEval
import Idris.Primitives
import Idris.Providers
import Idris.Termination
import Idris.Transforms
import IRTS.Lang
import Util.Pretty hiding ((<$>))
import Util.Pretty (pretty, text)
import Prelude hiding (id, (.))
import Control.Applicative hiding (Const)
import Control.Category
import Control.DeepSeq
import Control.Monad
import qualified Control.Monad.State.Lazy as LState
import Control.Monad.State.Strict as State
import Data.Char (isLetter, toLower)
import Data.List
import Data.List.Split (splitOn)
import qualified Data.Map as Map
import Data.Maybe
import qualified Data.Set as S
import qualified Data.Text as T
import Data.Word
import Debug.Trace
import Numeric
-- | Elaborate a collection of left-hand and right-hand pairs - that is, a
-- top-level definition.
elabClauses :: ElabInfo -> FC -> FnOpts -> Name -> [PClause] -> Idris ()
elabClauses info' fc opts n_in cs =
do let n = liftname info n_in
info = info' { elabFC = Just fc }
ctxt <- getContext
ist <- getIState
optimise <- getOptimise
let petrans = PETransform `elem` optimise
inacc <- map fst <$> fgetState (opt_inaccessible . ist_optimisation n)
-- Check n actually exists, with no definition yet
let tys = lookupTy n ctxt
let reflect = Reflection `elem` opts
when (reflect && FCReflection `notElem` idris_language_extensions ist) $
ierror $ At fc (Msg "You must turn on the FirstClassReflection extension to use %reflection")
checkUndefined n ctxt
unless (length tys > 1) $ do
fty <- case tys of
[] -> -- TODO: turn into a CAF if there's no arguments
-- question: CAFs in where blocks?
tclift $ tfail $ At fc (NoTypeDecl n)
[ty] -> return ty
let atys_in = map snd (getArgTys (normalise ctxt [] fty))
let atys = map (\x -> (x, isCanonical x ctxt)) atys_in
cs_elab <- mapM (elabClause info opts)
(zip [0..] cs)
ctxt <- getContext
-- pats_raw is the basic type checked version, no PE or forcing
let optinfo = idris_optimisation ist
let (pats_in, cs_full) = unzip cs_elab
let pats_raw = map (simple_lhs ctxt) pats_in
-- We'll apply forcing to the left hand side here, so that we don't
-- do any unnecessary case splits
let pats_forced = map (force_lhs optinfo) pats_raw
logElab 3 $ "Elaborated patterns:\n" ++ show pats_raw
logElab 5 $ "Forced patterns:\n" ++ show pats_forced
solveDeferred fc n
-- just ensure that the structure exists
fmodifyState (ist_optimisation n) id
addIBC (IBCOpt n)
ist <- getIState
ctxt <- getContext
-- Don't apply rules if this is a partial evaluation definition,
-- or we'll make something that just runs itself!
let tpats = case specNames opts of
Nothing -> transformPats ist pats_in
_ -> pats_in
-- If the definition is specialisable, this reduces the
-- RHS
pe_tm <- doPartialEval ist tpats
let pats_pe = if petrans
then map (force_lhs optinfo . simple_lhs ctxt) pe_tm
else pats_forced
let tcase = opt_typecase (idris_options ist)
-- Look for 'static' names and generate new specialised
-- definitions for them, as well as generating rewrite rules
-- for partially evaluated definitions
newrules <- if petrans
then mapM (\ e -> case e of
Left _ -> return []
Right (l, r) -> elabPE info fc n r) pats_pe
else return []
-- Redo transforms with the newly generated transformations, so
-- that the specialised application we've just made gets
-- used in place of the general one
ist <- getIState
let pats_transformed = if petrans
then transformPats ist pats_pe
else pats_pe
-- Summary of what's about to happen: Definitions go:
--
-- pats_in -> pats -> pdef -> pdef'
-- addCaseDef builds case trees from <pdef> and <pdef'>
-- pdef is the compile-time pattern definition, after forcing
-- optimisation applied to LHS
let pdef = map (\(ns, lhs, rhs) -> (map fst ns, lhs, rhs)) $
map debind pats_forced
-- pdef_cov is the pattern definition without forcing, which
-- we feed to the coverage checker (we need to know what the
-- programmer wrote before forcing erasure)
let pdef_cov
= map (\(ns, lhs, rhs) -> (map fst ns, lhs, rhs)) $
map debind pats_raw
-- pdef_pe is the one which will get further optimised
-- for run-time, with no forcing optimisation of the LHS because
-- the affects erasure. Also, it's partially evaluated
let pdef_pe = map debind pats_transformed
logElab 5 $ "Initial typechecked patterns:\n" ++ show pats_raw
logElab 5 $ "Initial typechecked pattern def:\n" ++ show pdef
-- NOTE: Need to store original definition so that proofs which
-- rely on its structure aren't affected by any changes to the
-- inliner. Just use the inlined version to generate pdef' and to
-- help with later inlinings.
ist <- getIState
let pdef_inl = inlineDef ist pdef
numArgs <- tclift $ sameLength pdef
case specNames opts of
Just _ ->
do logElab 3 $ "Partially evaluated:\n" ++ show pats_pe
_ -> return ()
logElab 3 $ "Transformed:\n" ++ show pats_transformed
erInfo <- getErasureInfo <$> getIState
tree@(CaseDef scargs sc _) <- tclift $
simpleCase tcase (UnmatchedCase "Error") reflect CompileTime fc inacc atys pdef erInfo
cov <- coverage
pmissing <-
if cov && not (hasDefault pats_raw)
then do -- Generate clauses from the given possible cases
missing <- genClauses fc n
(map (\ (ns,tm,_) -> (ns, tm)) pdef)
cs_full
-- missing <- genMissing n scargs sc
missing' <- checkPossibles info fc True n missing
-- Filter out the ones which match one of the
-- given cases (including impossible ones)
let clhs = map getLHS pdef
logElab 2 $ "Must be unreachable (" ++ show (length missing') ++ "):\n" ++
showSep "\n" (map showTmImpls missing') ++
"\nAgainst: " ++
showSep "\n" (map (\t -> showTmImpls (delab ist t)) (map getLHS pdef))
-- filter out anything in missing' which is
-- matched by any of clhs. This might happen since
-- unification may force a variable to take a
-- particular form, rather than force a case
-- to be impossible.
return missing' -- (filter (noMatch ist clhs) missing')
else return []
let pcover = null pmissing
-- pdef' is the version that gets compiled for run-time,
-- so we start from the partially evaluated version
pdef_in' <- applyOpts $ map (\(ns, lhs, rhs) -> (map fst ns, lhs, rhs)) pdef_pe
ctxt <- getContext
let pdef' = map (simple_rt ctxt) pdef_in'
logElab 5 $ "After data structure transformations:\n" ++ show pdef'
ist <- getIState
let tot | pcover = Unchecked -- finish later
| AssertTotal `elem` opts = Total []
| PEGenerated `elem` opts = Generated
| otherwise = Partial NotCovering -- already know it's not total
case tree of
CaseDef _ _ [] -> return ()
CaseDef _ _ xs -> mapM_ (\x ->
iputStrLn $ show fc ++
":warning - Unreachable case: " ++
show (delab ist x)) xs
let knowncovering = (pcover && cov) || AssertTotal `elem` opts
let defaultcase = if knowncovering
then STerm Erased
else UnmatchedCase $ "*** " ++
show fc ++
":unmatched case in " ++ show n ++
" ***"
tree' <- tclift $ simpleCase tcase defaultcase reflect
RunTime fc inacc atys pdef' erInfo
logElab 3 $ "Unoptimised " ++ show n ++ ": " ++ show tree
logElab 3 $ "Optimised: " ++ show tree'
ctxt <- getContext
ist <- getIState
let opt = idris_optimisation ist
putIState (ist { idris_patdefs = addDef n (force pdef_pe, force pmissing)
(idris_patdefs ist) })
let caseInfo = CaseInfo (inlinable opts) (inlinable opts) (dictionary opts)
case lookupTyExact n ctxt of
Just ty ->
do ctxt' <- do ctxt <- getContext
tclift $
addCasedef n erInfo caseInfo
tcase defaultcase
reflect
(AssertTotal `elem` opts)
atys
inacc
pats_forced
pdef
pdef' ty
ctxt
setContext ctxt'
addIBC (IBCDef n)
addDefinedName n
setTotality n tot
when (not reflect && PEGenerated `notElem` opts) $
do totcheck (fc, n)
defer_totcheck (fc, n)
when (tot /= Unchecked) $ addIBC (IBCTotal n tot)
i <- getIState
ctxt <- getContext
case lookupDef n ctxt of
(CaseOp _ _ _ _ _ cd : _) ->
let (scargs, sc) = cases_compiletime cd in
do let calls = map fst $ findCalls sc scargs
-- let scg = buildSCG i sc scargs
-- add SCG later, when checking totality
logElab 2 $ "Called names: " ++ show calls
-- if the definition is public, make sure
-- it only uses public names
nvis <- getFromHideList n
case nvis of
Just Public -> mapM_ (checkVisibility fc n Public Public) calls
_ -> return ()
addCalls n calls
let rig = if linearArg (whnfArgs ctxt [] ty)
then Rig1
else RigW
updateContext (setRigCount n (minRig ctxt rig calls))
addIBC (IBCCG n)
_ -> return ()
return ()
-- addIBC (IBCTotal n tot)
_ -> return ()
-- Check it's covering, if 'covering' option is used. Chase
-- all called functions, and fail if any of them are also
-- 'Partial NotCovering'
when (CoveringFn `elem` opts) $ checkAllCovering fc [] n n
-- Add the 'AllGuarded' flag if it's guaranteed that every
-- 'Inf' argument will be guarded by constructors in the result
-- (allows productivity check to go under this function)
checkIfGuarded n
-- If this has %static arguments, cache the names of functions
-- it calls for partial evaluation later
ist <- getIState
let statics = case lookupCtxtExact n (idris_statics ist) of
Just ns -> ns
Nothing -> []
when (or statics) $ do getAllNames n
return ()
where
noMatch i cs tm = all (\x -> case trim_matchClause i (delab' i x True True) tm of
Right _ -> False
Left miss -> True) cs
where
trim_matchClause i (PApp fcl fl ls) (PApp fcr fr rs)
= let args = min (length ls) (length rs) in
matchClause i (PApp fcl fl (take args ls))
(PApp fcr fr (take args rs))
checkUndefined n ctxt = case lookupDef n ctxt of
[] -> return ()
[TyDecl _ _] -> return ()
_ -> tclift $ tfail (At fc (AlreadyDefined n))
debind (Right (x, y)) = let (vs, x') = depat [] x
(_, y') = depat [] y in
(vs, x', y')
debind (Left x) = let (vs, x') = depat [] x in
(vs, x', Impossible)
depat acc (Bind n (PVar rig t) sc) = depat ((n, t) : acc) (instantiate (P Bound n t) sc)
depat acc x = (acc, x)
getPVs (Bind x (PVar rig _) tm) = let (vs, tm') = getPVs tm
in (x:vs, tm')
getPVs tm = ([], tm)
isPatVar vs (P Bound n _) = n `elem` vs
isPatVar _ _ = False
hasDefault cs | (Right (lhs, rhs) : _) <- reverse cs
, (pvs, tm) <- getPVs (explicitNames lhs)
, (f, args) <- unApply tm = all (isPatVar pvs) args
hasDefault _ = False
getLHS (_, l, _) = l
-- Simplify the left hand side of a definition, to remove any lets
-- that may have arisen during elaboration
simple_lhs ctxt (Right (x, y))
= Right (Idris.Core.Evaluate.simplify ctxt [] x, y)
simple_lhs ctxt t = t
force_lhs opts (Right (x, y)) = Right (forceWith opts x, y)
force_lhs opts t = t
simple_rt ctxt (p, x, y) = (p, x, force (uniqueBinders p
(rt_simplify ctxt [] y)))
specNames [] = Nothing
specNames (Specialise ns : _) = Just ns
specNames (_ : xs) = specNames xs
sameLength ((_, x, _) : xs)
= do l <- sameLength xs
let (f, as) = unApply x
if (null xs || l == length as) then return (length as)
else tfail (At fc (Msg "Clauses have differing numbers of arguments "))
sameLength [] = return 0
-- Partially evaluate, if the definition is marked as specialisable
doPartialEval ist pats =
case specNames opts of
Nothing -> return pats
Just ns -> case partial_eval (tt_ctxt ist) ns pats of
Just t -> return t
Nothing -> ierror (At fc (Msg "No specialisation achieved"))
minRig :: Context -> RigCount -> [Name] -> RigCount
minRig c minr [] = minr
minRig c minr (r : rs) = case lookupRigCountExact r c of
Nothing -> minRig c minr rs
Just rc -> minRig c (min minr rc) rs
forceWith :: Ctxt OptInfo -> Term -> Term
forceWith opts lhs = -- trace (show lhs ++ "\n==>\n" ++ show (force lhs) ++ "\n----") $
force lhs
where
-- If there's forced arguments, erase them
force ap@(App _ _ _)
| (fn@(P _ c _), args) <- unApply ap,
Just copt <- lookupCtxtExact c opts
= let args' = eraseArg 0 (forceable copt) args in
mkApp fn (map force args')
force (App t f a)
= App t (force f) (force a)
-- We might have pat bindings, so go under them
force (Bind n b sc) = Bind n b (force sc)
-- Everything else, leave it alone
force t = t
eraseArg i fs (n : ns) | i `elem` fs = Erased : eraseArg (i + 1) fs ns
| otherwise = n : eraseArg (i + 1) fs ns
eraseArg i _ [] = []
-- | Find 'static' applications in a term and partially evaluate them.
-- Return any new transformation rules
elabPE :: ElabInfo -> FC -> Name -> Term -> Idris [(Term, Term)]
-- Don't go deeper than 5 nested partially evaluated definitions in one go
-- (make this configurable? It's a good limit for most cases, certainly for
-- interfaces and polymorphic definitions, but maybe not for DSLs and
-- interpreters in complicated cases.
-- Possibly only worry about the limit if we've specialised the same function
-- a number of times in one go.)
elabPE info fc caller r | pe_depth info > 5 = return []
elabPE info fc caller r =
do ist <- getIState
let sa = filter (\ap -> fst ap /= caller) $ getSpecApps ist [] r
rules <- mapM mkSpecialised sa
return $ concat rules
where
-- Make a specialised version of the application, and
-- add a PTerm level transformation rule, which is basically the
-- new definition in reverse (before specialising it).
-- RHS => LHS where implicit arguments are left blank in the
-- transformation.
-- Transformation rules are applied after every PClause elaboration
mkSpecialised :: (Name, [(PEArgType, Term)]) -> Idris [(Term, Term)]
mkSpecialised specapp_in = do
ist <- getIState
ctxt <- getContext
(specTy, specapp) <- getSpecTy ist specapp_in
let (n, newnm, specdecl) = getSpecClause ist specapp specTy
let lhs = pe_app specdecl
let rhs = pe_def specdecl
let undef = case lookupDefExact newnm ctxt of
Nothing -> True
_ -> False
logElab 5 $ show (newnm, undef, map (concreteArg ist) (snd specapp))
idrisCatch
(if (undef && all (concreteArg ist) (snd specapp)) then do
cgns <- getAllNames n
-- on the RHS of the new definition, we should reduce
-- everything that's not itself static (because we'll
-- want to be a PE version of those next)
let cgns' = filter (\x -> x /= n &&
notStatic ist x) cgns
-- set small reduction limit on partial/productive things
let maxred = case lookupTotal n ctxt of
[Total _] -> 65536
[Productive] -> 16
_ -> 1
let specnames = mapMaybe (specName (pe_simple specdecl))
(snd specapp)
descs <- mapM getStaticsFrom (map fst specnames)
let opts = [Specialise ((if pe_simple specdecl
then map (\x -> (x, Nothing)) cgns'
else []) ++
(n, Just maxred) : specnames ++
concat descs)]
logElab 3 $ "Specialising application: " ++ show specapp
++ "\n in \n" ++ show caller ++
"\n with \n" ++ show opts
++ "\nCalling: " ++ show cgns
logElab 3 $ "New name: " ++ show newnm
logElab 3 $ "PE definition type : " ++ (show specTy)
++ "\n" ++ show opts
logElab 2 $ "PE definition " ++ show newnm ++ ":\n" ++
showSep "\n"
(map (\ (lhs, rhs) ->
(showTmImpls lhs ++ " = " ++
showTmImpls rhs)) (pe_clauses specdecl))
logElab 5 $ show n ++ " transformation rule: " ++
showTmImpls rhs ++ " ==> " ++ showTmImpls lhs
elabType info defaultSyntax emptyDocstring [] fc opts newnm NoFC specTy
let def = map (\(lhs, rhs) ->
let lhs' = mapPT hiddenToPH $ stripUnmatchable ist lhs in
PClause fc newnm lhs' [] rhs [])
(pe_clauses specdecl)
trans <- elabTransform info fc False rhs lhs
elabClauses (info {pe_depth = pe_depth info + 1}) fc
(PEGenerated:opts) newnm def
return [trans]
else return [])
-- if it doesn't work, just don't specialise. Could happen for lots
-- of valid reasons (e.g. local variables in scope which can't be
-- lifted out).
(\e -> do logElab 5 $ "Couldn't specialise: " ++ (pshow ist e)
return [])
hiddenToPH (PHidden _) = Placeholder
hiddenToPH x = x
specName simpl (ImplicitS _, tm)
| (P Ref n _, _) <- unApply tm = Just (n, Just (if simpl then 1 else 0))
specName simpl (ExplicitS, tm)
| (P Ref n _, _) <- unApply tm = Just (n, Just (if simpl then 1 else 0))
specName simpl (ConstraintS, tm)
| (P Ref n _, _) <- unApply tm = Just (n, Just (if simpl then 1 else 0))
specName simpl _ = Nothing
-- get the descendants of the name 'n' which are marked %static
-- Marking a function %static essentially means it's used to construct
-- programs, so should be evaluated by the partial evaluator
getStaticsFrom :: Name -> Idris [(Name, Maybe Int)]
getStaticsFrom n = do ns <- getAllNames n
i <- getIState
let statics = filter (staticFn i) ns
return (map (\n -> (n, Nothing)) statics)
staticFn :: IState -> Name -> Bool
staticFn i n = case lookupCtxt n (idris_flags i) of
[opts] -> elem StaticFn opts
_ -> False
notStatic ist n = case lookupCtxtExact n (idris_statics ist) of
Just s -> not (or s)
_ -> True
concreteArg ist (ImplicitS _, tm) = concreteTm ist tm
concreteArg ist (ExplicitS, tm) = concreteTm ist tm
concreteArg ist _ = True
concreteTm ist tm | (P _ n _, _) <- unApply tm =
case lookupTy n (tt_ctxt ist) of
[] -> False
_ -> True
concreteTm ist (Constant _) = True
concreteTm ist (Bind n (Lam _ _) sc) = True
concreteTm ist (Bind n (Pi _ _ _ _) sc) = True
concreteTm ist (Bind n (Let _ _) sc) = concreteTm ist sc
concreteTm ist _ = False
-- get the type of a specialised application
getSpecTy ist (n, args)
= case lookupTy n (tt_ctxt ist) of
[ty] -> let (specty_in, args') = specType args (explicitNames ty)
specty = normalise (tt_ctxt ist) [] (finalise specty_in)
t = mkPE_TyDecl ist args' (explicitNames specty) in
return (t, (n, args'))
-- (normalise (tt_ctxt ist) [] (specType args ty))
_ -> ifail $ "Ambiguous name " ++ show n ++ " (getSpecTy)"
-- get the clause of a specialised application
getSpecClause ist (n, args) specTy
= let newnm = sUN ("PE_" ++ show (nsroot n) ++ "_" ++
qhash 5381 (showSep "_" (map showArg args))) in
-- UN (show n ++ show (map snd args)) in
(n, newnm, mkPE_TermDecl ist newnm n specTy args)
where showArg (ExplicitS, n) = qshow n
showArg (ImplicitS _, n) = qshow n
showArg _ = ""
qshow (Bind _ _ _) = "fn"
qshow (App _ f a) = qshow f ++ qshow a
qshow (P _ n _) = show n
qshow (Constant c) = show c
qshow _ = ""
-- Simple but effective string hashing...
-- Keep it to 32 bits for readability/debuggability
qhash :: Word64 -> String -> String
qhash hash [] = showHex (abs hash `mod` 0xffffffff) ""
qhash hash (x:xs) = qhash (hash * 33 + fromIntegral(fromEnum x)) xs
-- | Checks if the clause is a possible left hand side.
-- NOTE: A lot of this is repeated for reflected definitions in Idris.Elab.Term
-- One day, these should be merged, but until then remember that if you edit
-- this you might need to edit the other version...
checkPossible :: ElabInfo -> FC -> Bool -> Name -> PTerm -> Idris (Maybe PTerm)
checkPossible info fc tcgen fname lhs_in
= do ctxt <- getContext
i <- getIState
let lhs = addImplPat i lhs_in
logElab 10 $ "Trying missing case: " ++ showTmImpls lhs
-- if the LHS type checks, it is possible
case elaborate (constraintNS info) ctxt (idris_datatypes i) (idris_name i) (sMN 0 "patLHS") infP initEState
(erun fc (buildTC i info EImpossible [] fname
(allNamesIn lhs_in)
(infTerm lhs))) of
OK (ElabResult lhs' _ _ ctxt' newDecls highlights newGName, _) ->
do setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
updateIState $ \i -> i { idris_name = newGName }
let lhs_tm = normalise ctxt [] (orderPats (getInferTerm lhs'))
let emptyPat = hasEmptyPat ctxt (idris_datatypes i) lhs_tm
if emptyPat then
do logElab 10 $ "Empty type in pattern "
return Nothing
else
case recheck (constraintNS info) ctxt' [] (forget lhs_tm) lhs_tm of
OK (tm, _, _) ->
do logElab 10 $ "Valid " ++ show tm ++ "\n"
++ " from " ++ show lhs
return (Just (delab' i tm True True))
err -> do logElab 10 $ "Conversion failure"
return Nothing
-- if it's a recoverable error, the case may become possible
Error err -> do logLvl 10 $ "Impossible case " ++ (pshow i err)
++ "\n" ++ show (recoverableCoverage ctxt err,
validCoverageCase ctxt err)
-- tcgen means that it was generated by genClauses,
-- so only looking for an error. Otherwise, it
-- needs to be the right kind of error (a type mismatch
-- in the same family).
if tcgen then returnTm i err (recoverableCoverage ctxt err)
else returnTm i err (validCoverageCase ctxt err ||
recoverableCoverage ctxt err)
where returnTm i err True = do logLvl 10 $ "Possibly resolvable error on " ++
pshow i (fmap (normalise (tt_ctxt i) []) err)
++ " on " ++ showTmImpls lhs_in
return $ Just lhs_in
returnTm i err False = return $ Nothing
-- Filter out the terms which are not well type left hand sides. Whenever we
-- eliminate one, also eliminate later ones which match it without checking,
-- because they're obviously going to have the same result
checkPossibles :: ElabInfo -> FC -> Bool -> Name -> [PTerm] -> Idris [PTerm]
checkPossibles info fc tcgen fname (lhs : rest)
= do ok <- checkPossible info fc tcgen fname lhs
i <- getIState
-- Hypothesis: any we can remove will be within the next few, because
-- leftmost patterns tend to change less
-- Since the match could take a while if there's a lot of cases to
-- check, just remove from the next batch
let rest' = filter (\x -> not (qmatch x lhs)) (take 200 rest) ++ drop 200 rest
restpos <- checkPossibles info fc tcgen fname rest'
case ok of
Nothing -> return restpos
Just lhstm -> return (lhstm : restpos)
where
qmatch _ Placeholder = True
qmatch (PApp _ f args) (PApp _ f' args')
| length args == length args'
= qmatch f f' && and (zipWith qmatch (map getTm args)
(map getTm args'))
qmatch (PRef _ _ n) (PRef _ _ n') = n == n'
qmatch (PPair _ _ _ l r) (PPair _ _ _ l' r') = qmatch l l' && qmatch r r'
qmatch (PDPair _ _ _ l t r) (PDPair _ _ _ l' t' r')
= qmatch l l' && qmatch t t' && qmatch r r'
qmatch x y = x == y
checkPossibles _ _ _ _ [] = return []
findUnique :: Context -> Env -> Term -> [Name]
findUnique ctxt env (Bind n b sc)
= let rawTy = forgetEnv (map fstEnv env) (binderTy b)
uniq = case check ctxt env rawTy of
OK (_, UType UniqueType) -> True
OK (_, UType NullType) -> True
OK (_, UType AllTypes) -> True
_ -> False in
if uniq then n : findUnique ctxt ((n, RigW, b) : env) sc
else findUnique ctxt ((n, RigW, b) : env) sc
findUnique _ _ _ = []
-- | Return the elaborated LHS/RHS, and the original LHS with implicits added
elabClause :: ElabInfo -> FnOpts -> (Int, PClause) ->
Idris (Either Term (Term, Term), PTerm)
elabClause info opts (_, PClause fc fname lhs_in [] PImpossible [])
= do let tcgen = Dictionary `elem` opts
i <- get
let lhs = addImpl [] i lhs_in
b <- checkPossible info fc tcgen fname lhs_in
case b of
Just _ -> tclift $ tfail (At fc
(Msg $ show lhs_in ++ " is a valid case"))
Nothing -> do ptm <- mkPatTm lhs_in
logElab 5 $ "Elaborated impossible case " ++ showTmImpls lhs ++
"\n" ++ show ptm
return (Left ptm, lhs)
elabClause info opts (cnum, PClause fc fname lhs_in_as withs rhs_in_as whereblock)
= do let tcgen = Dictionary `elem` opts
push_estack fname False
ctxt <- getContext
let (lhs_in, rhs_in) = desugarAs lhs_in_as rhs_in_as
-- Build the LHS as an "Infer", and pull out its type and
-- pattern bindings
i <- getIState
inf <- isTyInferred fname
-- Check if we have "with" patterns outside of "with" block
when (isOutsideWith lhs_in && (not $ null withs)) $
ierror (At fc (Elaborating "left hand side of " fname Nothing
(Msg "unexpected patterns outside of \"with\" block")))
-- get the parameters first, to pass through to any where block
let fn_ty = case lookupTy fname ctxt of
[t] -> t
_ -> error "Can't happen (elabClause function type)"
let fn_is = case lookupCtxt fname (idris_implicits i) of
[t] -> t
_ -> []
let norm_ty = normalise ctxt [] fn_ty
let params = getParamsInType i [] fn_is norm_ty
let tcparams = getTCParamsInType i [] fn_is norm_ty
let lhs = mkLHSapp $ stripLinear i $ stripUnmatchable i $
propagateParams i params norm_ty (allNamesIn lhs_in) (addImplPat i lhs_in)
-- let lhs = mkLHSapp $
-- propagateParams i params fn_ty (addImplPat i lhs_in)
logElab 10 (show (params, fn_ty) ++ " " ++ showTmImpls (addImplPat i lhs_in))
logElab 5 ("LHS: " ++ show opts ++ "\n" ++ show fc ++ " " ++ showTmImpls lhs)
logElab 4 ("Fixed parameters: " ++ show params ++ " from " ++ showTmImpls lhs_in ++
"\n" ++ show (fn_ty, fn_is))
((ElabResult lhs' dlhs [] ctxt' newDecls highlights newGName, probs, inj), _) <-
tclift $ elaborate (constraintNS info) ctxt (idris_datatypes i) (idris_name i) (sMN 0 "patLHS") infP initEState
(do res <- errAt "left hand side of " fname Nothing
(erun fc (buildTC i info ELHS opts fname
(allNamesIn lhs_in)
(infTerm lhs)))
probs <- get_probs
inj <- get_inj
return (res, probs, inj))
setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
updateIState $ \i -> i { idris_name = newGName }
when inf $ addTyInfConstraints fc (map (\(x,y,_,_,_,_,_) -> (x,y)) probs)
let lhs_tm = orderPats (getInferTerm lhs')
let lhs_ty = getInferType lhs'
let static_names = getStaticNames i lhs_tm
logElab 3 ("Elaborated: " ++ show lhs_tm)
logElab 3 ("Elaborated type: " ++ show lhs_ty)
logElab 5 ("Injective: " ++ show fname ++ " " ++ show inj)
-- If we're inferring metavariables in the type, don't recheck,
-- because we're only doing this to try to work out those metavariables
ctxt <- getContext
(clhs_c, clhsty) <- if not inf
then recheckC_borrowing False (PEGenerated `notElem` opts)
[] (constraintNS info) fc id [] lhs_tm
else return (lhs_tm, lhs_ty)
let clhs = normalise ctxt [] clhs_c
let borrowed = borrowedNames [] clhs
-- These are the names we're not allowed to use on the RHS, because
-- they're UniqueTypes and borrowed from another function.
when (not (null borrowed)) $
logElab 5 ("Borrowed names on LHS: " ++ show borrowed)
logElab 3 ("Normalised LHS: " ++ showTmImpls (delabMV i clhs))
rep <- useREPL
when rep $ do
addInternalApp (fc_fname fc) (fst . fc_start $ fc) (delabMV i clhs) -- TODO: Should use span instead of line and filename?
addIBC (IBCLineApp (fc_fname fc) (fst . fc_start $ fc) (delabMV i clhs))
logElab 5 ("Checked " ++ show clhs ++ "\n" ++ show clhsty)
-- Elaborate where block
ist <- getIState
ctxt <- getContext
windex <- getName
let decls = nub (concatMap declared whereblock)
let defs = nub (decls ++ concatMap defined whereblock)
let newargs_all = pvars ist lhs_tm
-- Unique arguments must be passed to the where block explicitly
-- (since we can't control "usage" easlily otherwise). Remove them
-- from newargs here
let uniqargs = findUnique ctxt [] lhs_tm
let newargs = filter (\(n,_) -> n `notElem` uniqargs) newargs_all
let winfo = (pinfo info newargs defs windex) { elabFC = Just fc }
let wb = map (mkStatic static_names) $
map (expandImplementationScope ist decorate newargs defs) $
map (expandParamsD False ist decorate newargs defs) whereblock
-- Split the where block into declarations with a type, and those
-- without
-- Elaborate those with a type *before* RHS, those without *after*
let (wbefore, wafter) = sepBlocks wb
logElab 5 $ "Where block:\n " ++ show wbefore ++ "\n" ++ show wafter
mapM_ (rec_elabDecl info EAll winfo) wbefore
-- Now build the RHS, using the type of the LHS as the goal.
i <- getIState -- new implicits from where block
logElab 5 (showTmImpls (expandParams decorate newargs defs (defs \\ decls) rhs_in))
let rhs = rhs_trans info $
addImplBoundInf i (map fst newargs_all) (defs \\ decls)
(expandParams decorate newargs defs (defs \\ decls) rhs_in)
logElab 2 $ "RHS: " ++ show (map fst newargs_all) ++ " " ++ showTmImpls rhs
ctxt <- getContext -- new context with where block added
logElab 5 "STARTING CHECK"
((rhsElab, defer, holes, is, probs, ctxt', newDecls, highlights, newGName), _) <-
tclift $ elaborate (constraintNS info) ctxt (idris_datatypes i) (idris_name i) (sMN 0 "patRHS") clhsty initEState
(do pbinds ist lhs_tm
-- proof search can use explicitly written names
mapM_ addPSname (allNamesIn lhs_in)
ulog <- getUnifyLog
traceWhen ulog ("Setting injective: " ++ show (nub (tcparams ++ inj))) $
mapM_ setinj (nub (tcparams ++ inj))
setNextName
(ElabResult _ _ is ctxt' newDecls highlights newGName) <-
errAt "right hand side of " fname (Just clhsty)
(erun fc (build i winfo ERHS opts fname rhs))
errAt "right hand side of " fname (Just clhsty)
(erun fc $ psolve lhs_tm)
tt <- get_term
aux <- getAux
let (tm, ds) = runState (collectDeferred (Just fname)
(map fst $ case_decls aux) ctxt tt) []
probs <- get_probs
hs <- get_holes
return (tm, ds, hs, is, probs, ctxt', newDecls, highlights, newGName))
setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
updateIState $ \i -> i { idris_name = newGName }
when inf $ addTyInfConstraints fc (map (\(x,y,_,_,_,_,_) -> (x,y)) probs)
logElab 3 "DONE CHECK"
logElab 3 $ "---> " ++ show rhsElab
ctxt <- getContext
let rhs' = rhsElab
when (not (null defer)) $ logElab 2 $ "DEFERRED " ++
show (map (\ (n, (_,_,t,_)) -> (n, t)) defer)
-- If there's holes, set the metavariables as undefinable
def' <- checkDef info fc (\n -> Elaborating "deferred type of " n Nothing) (null holes) defer
let def'' = map (\(n, (i, top, t, ns)) -> (n, (i, top, t, ns, False, null holes))) def'
addDeferred def''
mapM_ (\(n, _) -> addIBC (IBCDef n)) def''
when (not (null def')) $ do
mapM_ defer_totcheck (map (\x -> (fc, fst x)) def'')
-- Now the remaining deferred (i.e. no type declarations) clauses
-- from the where block
mapM_ (rec_elabDecl info EAll winfo) wafter
mapM_ (elabCaseBlock winfo opts) is
ctxt <- getContext
logElab 5 "Rechecking"
logElab 6 $ " ==> " ++ show (forget rhs')
(crhs, crhsty) -- if there's holes && deferred things, it's okay
-- but we'll need to freeze the definition and not
-- allow the deferred things to be definable
-- (this is just to allow users to inspect intermediate
-- things)
<- if (null holes || null def') && not inf
then recheckC_borrowing True (PEGenerated `notElem` opts)
borrowed (constraintNS info) fc id [] rhs'
else return (rhs', clhsty)
logElab 6 $ " ==> " ++ showEnvDbg [] crhsty ++ " against " ++ showEnvDbg [] clhsty
-- If there's holes, make sure this definition is frozen
when (not (null holes)) $ do
logElab 5 $ "Making " ++ show fname ++ " frozen due to " ++ show holes
setAccessibility fname Frozen
ctxt <- getContext
let constv = next_tvar ctxt
tit <- typeInType
case LState.runStateT (convertsC ctxt [] crhsty clhsty) (constv, []) of
OK (_, cs) -> when (PEGenerated `notElem` opts && not tit) $ do
addConstraints fc cs
mapM_ (\c -> addIBC (IBCConstraint fc c)) (snd cs)
logElab 6 $ "CONSTRAINTS ADDED: " ++ show cs ++ "\n" ++ show (clhsty, crhsty)
return ()
Error e -> ierror (At fc (CantUnify False (clhsty, Nothing) (crhsty, Nothing) e [] 0))
i <- getIState
checkInferred fc (delab' i crhs True True) rhs
-- if the function is declared '%error_reverse',
-- then we'll try running it in reverse to improve error messages
-- Also if the type is '%error_reverse' and the LHS is smaller than
-- the RHS
let (ret_fam, _) = unApply (getRetTy crhsty)
rev <- case ret_fam of
P _ rfamn _ ->
case lookupCtxt rfamn (idris_datatypes i) of
[TI _ _ dopts _ _ _] ->
return (DataErrRev `elem` dopts &&
size clhs <= size crhs)
_ -> return False
_ -> return False
when (rev || ErrorReverse `elem` opts) $ do
addIBC (IBCErrRev (crhs, clhs))
addErrRev (crhs, clhs)
when (rev || ErrorReduce `elem` opts) $ do
addIBC (IBCErrReduce fname)
addErrReduce fname
pop_estack
return (Right (clhs, crhs), lhs)
where
pinfo :: ElabInfo -> [(Name, PTerm)] -> [Name] -> Int -> ElabInfo
pinfo info ns ds i
= let newps = params info ++ ns
dsParams = map (\n -> (n, map fst newps)) ds
newb = addAlist dsParams (inblock info)
l = liftname info in
info { params = newps,
inblock = newb,
liftname = id -- (\n -> case lookupCtxt n newb of
-- Nothing -> n
-- _ -> MN i (show n)) . l
}
-- Find the variable names which appear under a 'Ownership.Read' so that
-- we know they can't be used on the RHS
borrowedNames :: [Name] -> Term -> [Name]
borrowedNames env (App _ (App _ (P _ (NS (UN lend) [owner]) _) _) arg)
| owner == txt "Ownership" &&
(lend == txt "lend" || lend == txt "Read") = getVs arg
where
getVs (V i) = [env!!i]
getVs (App _ f a) = nub $ getVs f ++ getVs a
getVs _ = []
borrowedNames env (App _ f a) = nub $ borrowedNames env f ++ borrowedNames env a
borrowedNames env (Bind n b sc) = nub $ borrowedB b ++ borrowedNames (n:env) sc
where borrowedB (Let t v) = nub $ borrowedNames env t ++ borrowedNames env v
borrowedB b = borrowedNames env (binderTy b)
borrowedNames _ _ = []
mkLHSapp t@(PRef _ _ _) = PApp fc t []
mkLHSapp t = t
decorate (NS x ns)
= NS (SN (WhereN cnum fname x)) ns
decorate x
= SN (WhereN cnum fname x)
sepBlocks bs = sepBlocks' [] bs where
sepBlocks' ns (d@(PTy _ _ _ _ _ n _ t) : bs)
= let (bf, af) = sepBlocks' (n : ns) bs in
(d : bf, af)
sepBlocks' ns (d@(PClauses _ _ n _) : bs)
| not (n `elem` ns) = let (bf, af) = sepBlocks' ns bs in
(bf, d : af)
sepBlocks' ns (b : bs) = let (bf, af) = sepBlocks' ns bs in
(b : bf, af)
sepBlocks' ns [] = ([], [])
-- term is not within "with" block
isOutsideWith :: PTerm -> Bool
isOutsideWith (PApp _ (PRef _ _ (SN (WithN _ _))) _) = False
isOutsideWith _ = True
elabClause info opts (_, PWith fc fname lhs_in withs wval_in pn_in withblock)
= do let tcgen = Dictionary `elem` opts
ctxt <- getContext
-- Build the LHS as an "Infer", and pull out its type and
-- pattern bindings
i <- getIState
-- get the parameters first, to pass through to any where block
let fn_ty = case lookupTy fname ctxt of
[t] -> t
_ -> error "Can't happen (elabClause function type)"
let fn_is = case lookupCtxt fname (idris_implicits i) of
[t] -> t
_ -> []
let params = getParamsInType i [] fn_is (normalise ctxt [] fn_ty)
let lhs = stripLinear i $ stripUnmatchable i $
propagateParams i params fn_ty (allNamesIn lhs_in)
(addImplPat i lhs_in)
logElab 2 ("LHS: " ++ show lhs)
(ElabResult lhs' dlhs [] ctxt' newDecls highlights newGName, _) <-
tclift $ elaborate (constraintNS info) ctxt (idris_datatypes i) (idris_name i) (sMN 0 "patLHS") infP initEState
(errAt "left hand side of with in " fname Nothing
(erun fc (buildTC i info ELHS opts fname
(allNamesIn lhs_in)
(infTerm lhs))) )
setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
updateIState $ \i -> i { idris_name = newGName }
ctxt <- getContext
let lhs_tm = orderPats (getInferTerm lhs')
let lhs_ty = getInferType lhs'
let ret_ty = getRetTy (explicitNames (normalise ctxt [] lhs_ty))
let static_names = getStaticNames i lhs_tm
logElab 5 (show lhs_tm ++ "\n" ++ show static_names)
(clhs_c, clhsty) <- recheckC (constraintNS info) fc id [] lhs_tm
let clhs = normalise ctxt [] clhs_c
logElab 5 ("Checked " ++ show clhs)
let bargs = getPBtys (explicitNames (normalise ctxt [] lhs_tm))
wval <- case wval_in of
Placeholder -> ierror $ At fc $
Msg "No expression for the with block to inspect.\nYou need to replace the _ with an expression."
_ -> return $
rhs_trans info $
addImplBound i (map fst bargs) wval_in
logElab 5 ("Checking " ++ showTmImpls wval)
-- Elaborate wval in this context
((wvalElab, defer, is, ctxt', newDecls, highlights, newGName), _) <-
tclift $ elaborate (constraintNS info) ctxt (idris_datatypes i) (idris_name i) (sMN 0 "withRHS")
(bindTyArgs PVTy bargs infP) initEState
(do pbinds i lhs_tm
-- proof search can use explicitly written names
mapM_ addPSname (allNamesIn lhs_in)
setNextName
-- TODO: may want where here - see winfo abpve
(ElabResult _ d is ctxt' newDecls highlights newGName) <- errAt "with value in " fname Nothing
(erun fc (build i info ERHS opts fname (infTerm wval)))
erun fc $ psolve lhs_tm
tt <- get_term
return (tt, d, is, ctxt', newDecls, highlights, newGName))
setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
updateIState $ \i -> i { idris_name = newGName }
def' <- checkDef info fc iderr True defer
let def'' = map (\(n, (i, top, t, ns)) -> (n, (i, top, t, ns, False, True))) def'
addDeferred def''
mapM_ (elabCaseBlock info opts) is
let wval' = wvalElab
logElab 5 ("Checked wval " ++ show wval')
ctxt <- getContext
(cwval, cwvalty) <- recheckC (constraintNS info) fc id [] (getInferTerm wval')
let cwvaltyN = explicitNames (normalise ctxt [] cwvalty)
let cwvalN = explicitNames (normalise ctxt [] cwval)
logElab 3 ("With type " ++ show cwvalty ++ "\nRet type " ++ show ret_ty)
-- We're going to assume the with type is not a function shortly,
-- so report an error if it is (you can't match on a function anyway
-- so this doesn't lose anything)
case getArgTys cwvaltyN of
[] -> return ()
(_:_) -> ierror $ At fc (WithFnType cwvalty)
let pvars = map fst (getPBtys cwvalty)
-- we need the unelaborated term to get the names it depends on
-- rather than a de Bruijn index.
let pdeps = usedNamesIn pvars i (delab i cwvalty)
let (bargs_pre, bargs_post) = split pdeps bargs []
let mpn = case pn_in of
Nothing -> Nothing
Just (n, nfc) -> Just (uniqueName n (map fst bargs))
-- Highlight explicit proofs
sendHighlighting [(fc, AnnBoundName n False) | (n, fc) <- maybeToList pn_in]
logElab 10 ("With type " ++ show (getRetTy cwvaltyN) ++
" depends on " ++ show pdeps ++ " from " ++ show pvars)
logElab 10 ("Pre " ++ show bargs_pre ++ "\nPost " ++ show bargs_post)
windex <- getName
-- build a type declaration for the new function:
-- (ps : Xs) -> (withval : cwvalty) -> (ps' : Xs') -> ret_ty
let wargval = getRetTy cwvalN
let wargtype = getRetTy cwvaltyN
let wargname = sMN windex "warg"
logElab 5 ("Abstract over " ++ show wargval ++ " in " ++ show wargtype)
let wtype = bindTyArgs (flip (Pi RigW Nothing) (TType (UVar [] 0))) (bargs_pre ++
(wargname, wargtype) :
map (abstract wargname wargval wargtype) bargs_post ++
case mpn of
Just pn -> [(pn, mkApp (P Ref eqTy Erased)
[wargtype, wargtype,
P Bound wargname Erased, wargval])]
Nothing -> [])
(substTerm wargval (P Bound wargname wargtype) ret_ty)
logElab 3 ("New function type " ++ show wtype)
let wname = SN (WithN windex fname)
let imps = getImps wtype -- add to implicits context
putIState (i { idris_implicits = addDef wname imps (idris_implicits i) })
let statics = getStatics static_names wtype
logElab 5 ("Static positions " ++ show statics)
i <- getIState
putIState (i { idris_statics = addDef wname statics (idris_statics i) })
addIBC (IBCDef wname)
addIBC (IBCImp wname)
addIBC (IBCStatic wname)
def' <- checkDef info fc iderr True [(wname, (-1, Nothing, wtype, []))]
let def'' = map (\(n, (i, top, t, ns)) -> (n, (i, top, t, ns, False, True))) def'
addDeferred def''
-- in the subdecls, lhs becomes:
-- fname pats | wpat [rest]
-- ==> fname' ps wpat [rest], match pats against toplevel for ps
wb <- mapM (mkAuxC mpn wname lhs (map fst bargs_pre) (map fst bargs_post))
withblock
logElab 3 ("with block " ++ show wb)
setFlags wname [Inlinable]
when (AssertTotal `elem` opts) $
setFlags wname [Inlinable, AssertTotal]
i <- getIState
let rhstrans' = updateWithTerm i mpn wname lhs (map fst bargs_pre) (map fst (bargs_post))
. rhs_trans info
mapM_ (rec_elabDecl info EAll (info { rhs_trans = rhstrans' })) wb
-- rhs becomes: fname' ps_pre wval ps_post Refl
let rhs = PApp fc (PRef fc [] wname)
(map (pexp . (PRef fc []) . fst) bargs_pre ++
pexp wval :
(map (pexp . (PRef fc []) . fst) bargs_post) ++
case mpn of
Nothing -> []
Just _ -> [pexp (PApp NoFC (PRef NoFC [] eqCon)
[ pimp (sUN "A") Placeholder False
, pimp (sUN "x") Placeholder False
])])
logElab 5 ("New RHS " ++ showTmImpls rhs)
ctxt <- getContext -- New context with block added
i <- getIState
((rhsElab, defer, is, ctxt', newDecls, highlights, newGName), _) <-
tclift $ elaborate (constraintNS info) ctxt (idris_datatypes i) (idris_name i) (sMN 0 "wpatRHS") clhsty initEState
(do pbinds i lhs_tm
setNextName
(ElabResult _ d is ctxt' newDecls highlights newGName) <-
erun fc (build i info ERHS opts fname rhs)
psolve lhs_tm
tt <- get_term
return (tt, d, is, ctxt', newDecls, highlights, newGName))
setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
updateIState $ \i -> i { idris_name = newGName }
ctxt <- getContext
let rhs' = rhsElab
def' <- checkDef info fc iderr True defer
let def'' = map (\(n, (i, top, t, ns)) -> (n, (i, top, t, ns, False, True))) def'
addDeferred def''
mapM_ (elabCaseBlock info opts) is
logElab 5 ("Checked RHS " ++ show rhs')
(crhs, crhsty) <- recheckC (constraintNS info) fc id [] rhs'
return (Right (clhs, crhs), lhs)
where
getImps (Bind n (Pi _ _ _ _) t) = pexp Placeholder : getImps t
getImps _ = []
mkAuxC pn wname lhs ns ns' (PClauses fc o n cs)
= do cs' <- mapM (mkAux pn wname lhs ns ns') cs
return $ PClauses fc o wname cs'
mkAuxC pn wname lhs ns ns' d = return d
mkAux pn wname toplhs ns ns' (PClause fc n tm_in (w:ws) rhs wheres)
= do i <- getIState
let tm = addImplPat i tm_in
logElab 2 ("Matching " ++ showTmImpls tm ++ " against " ++
showTmImpls toplhs)
case matchClause i toplhs tm of
Left (a,b) -> ifail $ show fc ++ ":with clause does not match top level"
Right mvars ->
do logElab 3 ("Match vars : " ++ show mvars)
lhs <- updateLHS n pn wname mvars ns ns' (fullApp tm) w
return $ PClause fc wname lhs ws rhs wheres
mkAux pn wname toplhs ns ns' (PWith fc n tm_in (w:ws) wval pn' withs)
= do i <- getIState
let tm = addImplPat i tm_in
logElab 2 ("Matching " ++ showTmImpls tm ++ " against " ++
showTmImpls toplhs)
withs' <- mapM (mkAuxC pn wname toplhs ns ns') withs
case matchClause i toplhs tm of
Left (a,b) -> trace ("matchClause: " ++ show a ++ " =/= " ++ show b) (ifail $ show fc ++ "with clause does not match top level")
Right mvars ->
do lhs <- updateLHS n pn wname mvars ns ns' (fullApp tm) w
return $ PWith fc wname lhs ws wval pn' withs'
mkAux pn wname toplhs ns ns' c
= ifail $ show fc ++ ":badly formed with clause"
addArg (PApp fc f args) w = PApp fc f (args ++ [pexp w])
addArg (PRef fc hls f) w = PApp fc (PRef fc hls f) [pexp w]
-- ns, arguments which don't depend on the with argument
-- ns', arguments which do
updateLHS n pn wname mvars ns_in ns_in' (PApp fc (PRef fc' hls' n') args) w
= let ns = map (keepMvar (map fst mvars) fc') ns_in
ns' = map (keepMvar (map fst mvars) fc') ns_in' in
return $ substMatches mvars $
PApp fc (PRef fc' [] wname)
(map pexp ns ++ pexp w : (map pexp ns') ++
case pn of
Nothing -> []
Just pnm -> [pexp (PRef fc [] pnm)])
updateLHS n pn wname mvars ns_in ns_in' tm w
= updateLHS n pn wname mvars ns_in ns_in' (PApp fc tm []) w
-- Only keep a var as a pattern variable in the with block if it's
-- matched in the top level pattern
keepMvar mvs fc v | v `elem` mvs = PRef fc [] v
| otherwise = Placeholder
updateWithTerm :: IState -> Maybe Name -> Name -> PTerm -> [Name] -> [Name] -> PTerm -> PTerm
updateWithTerm ist pn wname toplhs ns_in ns_in' tm
= mapPT updateApp tm
where
arity (PApp _ _ as) = length as
arity _ = 0
lhs_arity = arity toplhs
currentFn fname (PAlternative _ _ as)
| Just tm <- getApp as = tm
where getApp (tm@(PApp _ (PRef _ _ f) _) : as)
| f == fname = Just tm
getApp (_ : as) = getApp as
getApp [] = Nothing
currentFn _ tm = tm
updateApp wtm@(PWithApp fcw tm_in warg) =
let tm = currentFn fname tm_in in
case matchClause ist toplhs tm of
Left _ -> PElabError (Msg (show fc ++ ":with application does not match top level "))
Right mvars ->
let ns = map (keepMvar (map fst mvars) fcw) ns_in
ns' = map (keepMvar (map fst mvars) fcw) ns_in'
wty = lookupTyExact wname (tt_ctxt ist)
res = substMatches mvars $
PApp fcw (PRef fcw [] wname)
(map pexp ns ++ pexp warg : (map pexp ns') ++
case pn of
Nothing -> []
Just pnm -> [pexp (PRef fc [] pnm)]) in
case wty of
Nothing -> res -- can't happen!
Just ty -> addResolves ty res
updateApp tm = tm
addResolves ty (PApp fc f args) = PApp fc f (addResolvesArgs fc ty args)
addResolves ty tm = tm
-- if an argument's type is an interface, and is otherwise to
-- be inferred, then resolve it with implementation search
-- This is something of a hack, because matching on the top level
-- application won't find this information for us
addResolvesArgs :: FC -> Term -> [PArg] -> [PArg]
addResolvesArgs fc (Bind n (Pi _ _ ty _) sc) (a : args)
| (P _ cn _, _) <- unApply ty,
getTm a == Placeholder
= case lookupCtxtExact cn (idris_interfaces ist) of
Just _ -> a { getTm = PResolveTC fc } : addResolvesArgs fc sc args
Nothing -> a : addResolvesArgs fc sc args
addResolvesArgs fc (Bind n (Pi _ _ ty _) sc) (a : args)
= a : addResolvesArgs fc sc args
addResolvesArgs fc _ args = args
fullApp (PApp _ (PApp fc f args) xs) = fullApp (PApp fc f (args ++ xs))
fullApp x = x
split [] rest pre = (reverse pre, rest)
split deps ((n, ty) : rest) pre
| n `elem` deps = split (deps \\ [n]) rest ((n, ty) : pre)
| otherwise = split deps rest ((n, ty) : pre)
split deps [] pre = (reverse pre, [])
abstract wn wv wty (n, argty) = (n, substTerm wv (P Bound wn wty) argty)
-- | Apply a transformation to all RHSes and nested RHSs
mapRHS :: (PTerm -> PTerm) -> PClause -> PClause
mapRHS f (PClause fc n lhs args rhs ws)
= PClause fc n lhs args (f rhs) (map (mapRHSdecl f) ws)
mapRHS f (PWith fc n lhs args warg prf ws)
= PWith fc n lhs args (f warg) prf (map (mapRHSdecl f) ws)
mapRHS f (PClauseR fc args rhs ws)
= PClauseR fc args (f rhs) (map (mapRHSdecl f) ws)
mapRHS f (PWithR fc args warg prf ws)
= PWithR fc args (f warg) prf (map (mapRHSdecl f) ws)
mapRHSdecl :: (PTerm -> PTerm) -> PDecl -> PDecl
mapRHSdecl f (PClauses fc opt n cs)
= PClauses fc opt n (map (mapRHS f) cs)
mapRHSdecl f t = t
| mpkh/Idris-dev | src/Idris/Elab/Clause.hs | bsd-3-clause | 61,872 | 9 | 30 | 24,648 | 17,385 | 8,614 | 8,771 | 956 | 52 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Graphics.UI.Gtk
import Data.List ( findIndex )
import Control.Monad.IO.Class (MonadIO(..))
import qualified Data.Text as T
main = do
initGUI
win <- windowNew
on win deleteEvent $ liftIO mainQuit >> return False
combo <- comboBoxNewWithEntry
comboBoxSetModelText combo
mapM_ (comboBoxAppendText combo)
(T.words "ice-cream turkey pasta sandwich steak")
-- select the first item
comboBoxSetActive combo 0
-- Get the entry widget that the ComboBoxEntry uses.
(Just w) <- binGetChild combo
let entry = castToEntry w
-- Whenever the user has completed editing the text, append the new
-- text to the store unless it's already in there.
on entry entryActivated $ do
str <- entryGetText entry
store <- comboBoxGetModelText combo
elems <- listStoreToList store
comboBoxSetActive combo (-1)
idx <- case (findIndex ((==) str) elems) of
Just idx -> return idx
Nothing -> listStoreAppend store str
comboBoxSetActive combo idx
return ()
containerAdd win combo
widgetShowAll win
mainGUI
| k0001/gtk2hs | gtk/demo/menu/ComboDemo.hs | gpl-3.0 | 1,120 | 0 | 15 | 238 | 295 | 140 | 155 | 30 | 2 |
-- | Benchmarks various pure functions from the Text library
--
-- Tested in this benchmark:
--
-- * Most pure functions defined the string types
--
{-# LANGUAGE BangPatterns, CPP, GADTs, MagicHash #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Benchmarks.Pure
( benchmark
) where
import Control.DeepSeq (NFData (..))
import Control.Exception (evaluate)
import Criterion (Benchmark, bgroup, bench, nf)
import Data.Char (toLower, toUpper)
import Data.Monoid (mappend, mempty)
import GHC.Base (Char (..), Int (..), chr#, ord#, (+#))
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.ByteString.UTF8 as UTF8
import qualified Data.List as L
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Builder as TB
import qualified Data.Text.Lazy.Encoding as TL
benchmark :: String -> FilePath -> IO Benchmark
benchmark kind fp = do
-- Evaluate stuff before actually running the benchmark, we don't want to
-- count it here.
-- ByteString A
bsa <- BS.readFile fp
-- Text A/B, LazyText A/B
ta <- evaluate $ T.decodeUtf8 bsa
tb <- evaluate $ T.toUpper ta
tla <- evaluate $ TL.fromChunks (T.chunksOf 16376 ta)
tlb <- evaluate $ TL.fromChunks (T.chunksOf 16376 tb)
-- ByteString B, LazyByteString A/B
bsb <- evaluate $ T.encodeUtf8 tb
bla <- evaluate $ BL.fromChunks (chunksOf 16376 bsa)
blb <- evaluate $ BL.fromChunks (chunksOf 16376 bsb)
-- String A/B
sa <- evaluate $ UTF8.toString bsa
sb <- evaluate $ T.unpack tb
-- Lengths
bsa_len <- evaluate $ BS.length bsa
ta_len <- evaluate $ T.length ta
bla_len <- evaluate $ BL.length bla
tla_len <- evaluate $ TL.length tla
sa_len <- evaluate $ L.length sa
-- Lines
bsl <- evaluate $ BS.lines bsa
bll <- evaluate $ BL.lines bla
tl <- evaluate $ T.lines ta
tll <- evaluate $ TL.lines tla
sl <- evaluate $ L.lines sa
return $ bgroup "Pure"
[ bgroup "append"
[ benchT $ nf (T.append tb) ta
, benchTL $ nf (TL.append tlb) tla
, benchBS $ nf (BS.append bsb) bsa
, benchBSL $ nf (BL.append blb) bla
, benchS $ nf ((++) sb) sa
]
, bgroup "concat"
[ benchT $ nf T.concat tl
, benchTL $ nf TL.concat tll
, benchBS $ nf BS.concat bsl
, benchBSL $ nf BL.concat bll
, benchS $ nf L.concat sl
]
, bgroup "cons"
[ benchT $ nf (T.cons c) ta
, benchTL $ nf (TL.cons c) tla
, benchBS $ nf (BS.cons c) bsa
, benchBSL $ nf (BL.cons c) bla
, benchS $ nf (c:) sa
]
, bgroup "concatMap"
[ benchT $ nf (T.concatMap (T.replicate 3 . T.singleton)) ta
, benchTL $ nf (TL.concatMap (TL.replicate 3 . TL.singleton)) tla
, benchBS $ nf (BS.concatMap (BS.replicate 3)) bsa
, benchBSL $ nf (BL.concatMap (BL.replicate 3)) bla
, benchS $ nf (L.concatMap (L.replicate 3 . (:[]))) sa
]
, bgroup "decode"
[ benchT $ nf T.decodeUtf8 bsa
, benchTL $ nf TL.decodeUtf8 bla
, benchBS $ nf BS.unpack bsa
, benchBSL $ nf BL.unpack bla
, benchS $ nf UTF8.toString bsa
]
, bgroup "decode'"
[ benchT $ nf T.decodeUtf8' bsa
, benchTL $ nf TL.decodeUtf8' bla
]
, bgroup "drop"
[ benchT $ nf (T.drop (ta_len `div` 3)) ta
, benchTL $ nf (TL.drop (tla_len `div` 3)) tla
, benchBS $ nf (BS.drop (bsa_len `div` 3)) bsa
, benchBSL $ nf (BL.drop (bla_len `div` 3)) bla
, benchS $ nf (L.drop (sa_len `div` 3)) sa
]
, bgroup "encode"
[ benchT $ nf T.encodeUtf8 ta
, benchTL $ nf TL.encodeUtf8 tla
, benchBS $ nf BS.pack sa
, benchBSL $ nf BL.pack sa
, benchS $ nf UTF8.fromString sa
]
, bgroup "filter"
[ benchT $ nf (T.filter p0) ta
, benchTL $ nf (TL.filter p0) tla
, benchBS $ nf (BS.filter p0) bsa
, benchBSL $ nf (BL.filter p0) bla
, benchS $ nf (L.filter p0) sa
]
, bgroup "filter.filter"
[ benchT $ nf (T.filter p1 . T.filter p0) ta
, benchTL $ nf (TL.filter p1 . TL.filter p0) tla
, benchBS $ nf (BS.filter p1 . BS.filter p0) bsa
, benchBSL $ nf (BL.filter p1 . BL.filter p0) bla
, benchS $ nf (L.filter p1 . L.filter p0) sa
]
, bgroup "foldl'"
[ benchT $ nf (T.foldl' len 0) ta
, benchTL $ nf (TL.foldl' len 0) tla
, benchBS $ nf (BS.foldl' len 0) bsa
, benchBSL $ nf (BL.foldl' len 0) bla
, benchS $ nf (L.foldl' len 0) sa
]
, bgroup "foldr"
[ benchT $ nf (L.length . T.foldr (:) []) ta
, benchTL $ nf (L.length . TL.foldr (:) []) tla
, benchBS $ nf (L.length . BS.foldr (:) []) bsa
, benchBSL $ nf (L.length . BL.foldr (:) []) bla
, benchS $ nf (L.length . L.foldr (:) []) sa
]
, bgroup "head"
[ benchT $ nf T.head ta
, benchTL $ nf TL.head tla
, benchBS $ nf BS.head bsa
, benchBSL $ nf BL.head bla
, benchS $ nf L.head sa
]
, bgroup "init"
[ benchT $ nf T.init ta
, benchTL $ nf TL.init tla
, benchBS $ nf BS.init bsa
, benchBSL $ nf BL.init bla
, benchS $ nf L.init sa
]
, bgroup "intercalate"
[ benchT $ nf (T.intercalate tsw) tl
, benchTL $ nf (TL.intercalate tlw) tll
, benchBS $ nf (BS.intercalate bsw) bsl
, benchBSL $ nf (BL.intercalate blw) bll
, benchS $ nf (L.intercalate lw) sl
]
, bgroup "intersperse"
[ benchT $ nf (T.intersperse c) ta
, benchTL $ nf (TL.intersperse c) tla
, benchBS $ nf (BS.intersperse c) bsa
, benchBSL $ nf (BL.intersperse c) bla
, benchS $ nf (L.intersperse c) sa
]
, bgroup "isInfixOf"
[ benchT $ nf (T.isInfixOf tsw) ta
, benchTL $ nf (TL.isInfixOf tlw) tla
, benchBS $ nf (BS.isInfixOf bsw) bsa
-- no isInfixOf for lazy bytestrings
, benchS $ nf (L.isInfixOf lw) sa
]
, bgroup "last"
[ benchT $ nf T.last ta
, benchTL $ nf TL.last tla
, benchBS $ nf BS.last bsa
, benchBSL $ nf BL.last bla
, benchS $ nf L.last sa
]
, bgroup "map"
[ benchT $ nf (T.map f) ta
, benchTL $ nf (TL.map f) tla
, benchBS $ nf (BS.map f) bsa
, benchBSL $ nf (BL.map f) bla
, benchS $ nf (L.map f) sa
]
, bgroup "mapAccumL"
[ benchT $ nf (T.mapAccumL g 0) ta
, benchTL $ nf (TL.mapAccumL g 0) tla
, benchBS $ nf (BS.mapAccumL g 0) bsa
, benchBSL $ nf (BL.mapAccumL g 0) bla
, benchS $ nf (L.mapAccumL g 0) sa
]
, bgroup "mapAccumR"
[ benchT $ nf (T.mapAccumR g 0) ta
, benchTL $ nf (TL.mapAccumR g 0) tla
, benchBS $ nf (BS.mapAccumR g 0) bsa
, benchBSL $ nf (BL.mapAccumR g 0) bla
, benchS $ nf (L.mapAccumR g 0) sa
]
, bgroup "map.map"
[ benchT $ nf (T.map f . T.map f) ta
, benchTL $ nf (TL.map f . TL.map f) tla
, benchBS $ nf (BS.map f . BS.map f) bsa
, benchBSL $ nf (BL.map f . BL.map f) bla
, benchS $ nf (L.map f . L.map f) sa
]
, bgroup "replicate char"
[ benchT $ nf (T.replicate bsa_len) (T.singleton c)
, benchTL $ nf (TL.replicate (fromIntegral bsa_len)) (TL.singleton c)
, benchBS $ nf (BS.replicate bsa_len) c
, benchBSL $ nf (BL.replicate (fromIntegral bsa_len)) c
, benchS $ nf (L.replicate bsa_len) c
]
, bgroup "replicate string"
[ benchT $ nf (T.replicate (bsa_len `div` T.length tsw)) tsw
, benchTL $ nf (TL.replicate (fromIntegral bsa_len `div` TL.length tlw)) tlw
, benchS $ nf (replicat (bsa_len `div` T.length tsw)) lw
]
, bgroup "reverse"
[ benchT $ nf T.reverse ta
, benchTL $ nf TL.reverse tla
, benchBS $ nf BS.reverse bsa
, benchBSL $ nf BL.reverse bla
, benchS $ nf L.reverse sa
]
, bgroup "take"
[ benchT $ nf (T.take (ta_len `div` 3)) ta
, benchTL $ nf (TL.take (tla_len `div` 3)) tla
, benchBS $ nf (BS.take (bsa_len `div` 3)) bsa
, benchBSL $ nf (BL.take (bla_len `div` 3)) bla
, benchS $ nf (L.take (sa_len `div` 3)) sa
]
, bgroup "tail"
[ benchT $ nf T.tail ta
, benchTL $ nf TL.tail tla
, benchBS $ nf BS.tail bsa
, benchBSL $ nf BL.tail bla
, benchS $ nf L.tail sa
]
, bgroup "toLower"
[ benchT $ nf T.toLower ta
, benchTL $ nf TL.toLower tla
, benchBS $ nf (BS.map toLower) bsa
, benchBSL $ nf (BL.map toLower) bla
, benchS $ nf (L.map toLower) sa
]
, bgroup "toUpper"
[ benchT $ nf T.toUpper ta
, benchTL $ nf TL.toUpper tla
, benchBS $ nf (BS.map toUpper) bsa
, benchBSL $ nf (BL.map toUpper) bla
, benchS $ nf (L.map toUpper) sa
]
, bgroup "words"
[ benchT $ nf T.words ta
, benchTL $ nf TL.words tla
, benchBS $ nf BS.words bsa
, benchBSL $ nf BL.words bla
, benchS $ nf L.words sa
]
, bgroup "zipWith"
[ benchT $ nf (T.zipWith min tb) ta
, benchTL $ nf (TL.zipWith min tlb) tla
, benchBS $ nf (BS.zipWith min bsb) bsa
, benchBSL $ nf (BL.zipWith min blb) bla
, benchS $ nf (L.zipWith min sb) sa
]
, bgroup "length"
[ bgroup "cons"
[ benchT $ nf (T.length . T.cons c) ta
, benchTL $ nf (TL.length . TL.cons c) tla
, benchBS $ nf (BS.length . BS.cons c) bsa
, benchBSL $ nf (BL.length . BL.cons c) bla
, benchS $ nf (L.length . (:) c) sa
]
, bgroup "decode"
[ benchT $ nf (T.length . T.decodeUtf8) bsa
, benchTL $ nf (TL.length . TL.decodeUtf8) bla
, benchBS $ nf (L.length . BS.unpack) bsa
, benchBSL $ nf (L.length . BL.unpack) bla
, bench "StringUTF8" $ nf (L.length . UTF8.toString) bsa
]
, bgroup "drop"
[ benchT $ nf (T.length . T.drop (ta_len `div` 3)) ta
, benchTL $ nf (TL.length . TL.drop (tla_len `div` 3)) tla
, benchBS $ nf (BS.length . BS.drop (bsa_len `div` 3)) bsa
, benchBSL $ nf (BL.length . BL.drop (bla_len `div` 3)) bla
, benchS $ nf (L.length . L.drop (sa_len `div` 3)) sa
]
, bgroup "filter"
[ benchT $ nf (T.length . T.filter p0) ta
, benchTL $ nf (TL.length . TL.filter p0) tla
, benchBS $ nf (BS.length . BS.filter p0) bsa
, benchBSL $ nf (BL.length . BL.filter p0) bla
, benchS $ nf (L.length . L.filter p0) sa
]
, bgroup "filter.filter"
[ benchT $ nf (T.length . T.filter p1 . T.filter p0) ta
, benchTL $ nf (TL.length . TL.filter p1 . TL.filter p0) tla
, benchBS $ nf (BS.length . BS.filter p1 . BS.filter p0) bsa
, benchBSL $ nf (BL.length . BL.filter p1 . BL.filter p0) bla
, benchS $ nf (L.length . L.filter p1 . L.filter p0) sa
]
, bgroup "init"
[ benchT $ nf (T.length . T.init) ta
, benchTL $ nf (TL.length . TL.init) tla
, benchBS $ nf (BS.length . BS.init) bsa
, benchBSL $ nf (BL.length . BL.init) bla
, benchS $ nf (L.length . L.init) sa
]
, bgroup "intercalate"
[ benchT $ nf (T.length . T.intercalate tsw) tl
, benchTL $ nf (TL.length . TL.intercalate tlw) tll
, benchBS $ nf (BS.length . BS.intercalate bsw) bsl
, benchBSL $ nf (BL.length . BL.intercalate blw) bll
, benchS $ nf (L.length . L.intercalate lw) sl
]
, bgroup "intersperse"
[ benchT $ nf (T.length . T.intersperse c) ta
, benchTL $ nf (TL.length . TL.intersperse c) tla
, benchBS $ nf (BS.length . BS.intersperse c) bsa
, benchBSL $ nf (BL.length . BL.intersperse c) bla
, benchS $ nf (L.length . L.intersperse c) sa
]
, bgroup "map"
[ benchT $ nf (T.length . T.map f) ta
, benchTL $ nf (TL.length . TL.map f) tla
, benchBS $ nf (BS.length . BS.map f) bsa
, benchBSL $ nf (BL.length . BL.map f) bla
, benchS $ nf (L.length . L.map f) sa
]
, bgroup "map.map"
[ benchT $ nf (T.length . T.map f . T.map f) ta
, benchTL $ nf (TL.length . TL.map f . TL.map f) tla
, benchBS $ nf (BS.length . BS.map f . BS.map f) bsa
, benchS $ nf (L.length . L.map f . L.map f) sa
]
, bgroup "replicate char"
[ benchT $ nf (T.length . T.replicate bsa_len) (T.singleton c)
, benchTL $ nf (TL.length . TL.replicate (fromIntegral bsa_len)) (TL.singleton c)
, benchBS $ nf (BS.length . BS.replicate bsa_len) c
, benchBSL $ nf (BL.length . BL.replicate (fromIntegral bsa_len)) c
, benchS $ nf (L.length . L.replicate bsa_len) c
]
, bgroup "replicate string"
[ benchT $ nf (T.length . T.replicate (bsa_len `div` T.length tsw)) tsw
, benchTL $ nf (TL.length . TL.replicate (fromIntegral bsa_len `div` TL.length tlw)) tlw
, benchS $ nf (L.length . replicat (bsa_len `div` T.length tsw)) lw
]
, bgroup "take"
[ benchT $ nf (T.length . T.take (ta_len `div` 3)) ta
, benchTL $ nf (TL.length . TL.take (tla_len `div` 3)) tla
, benchBS $ nf (BS.length . BS.take (bsa_len `div` 3)) bsa
, benchBSL $ nf (BL.length . BL.take (bla_len `div` 3)) bla
, benchS $ nf (L.length . L.take (sa_len `div` 3)) sa
]
, bgroup "tail"
[ benchT $ nf (T.length . T.tail) ta
, benchTL $ nf (TL.length . TL.tail) tla
, benchBS $ nf (BS.length . BS.tail) bsa
, benchBSL $ nf (BL.length . BL.tail) bla
, benchS $ nf (L.length . L.tail) sa
]
, bgroup "toLower"
[ benchT $ nf (T.length . T.toLower) ta
, benchTL $ nf (TL.length . TL.toLower) tla
, benchBS $ nf (BS.length . BS.map toLower) bsa
, benchBSL $ nf (BL.length . BL.map toLower) bla
, benchS $ nf (L.length . L.map toLower) sa
]
, bgroup "toUpper"
[ benchT $ nf (T.length . T.toUpper) ta
, benchTL $ nf (TL.length . TL.toUpper) tla
, benchBS $ nf (BS.length . BS.map toUpper) bsa
, benchBSL $ nf (BL.length . BL.map toUpper) bla
, benchS $ nf (L.length . L.map toUpper) sa
]
, bgroup "words"
[ benchT $ nf (L.length . T.words) ta
, benchTL $ nf (L.length . TL.words) tla
, benchBS $ nf (L.length . BS.words) bsa
, benchBSL $ nf (L.length . BL.words) bla
, benchS $ nf (L.length . L.words) sa
]
, bgroup "zipWith"
[ benchT $ nf (T.length . T.zipWith min tb) ta
, benchTL $ nf (TL.length . TL.zipWith min tlb) tla
, benchBS $ nf (L.length . BS.zipWith min bsb) bsa
, benchBSL $ nf (L.length . BL.zipWith min blb) bla
, benchS $ nf (L.length . L.zipWith min sb) sa
]
]
, bgroup "Builder"
[ bench "mappend char" $ nf (TL.length . TB.toLazyText . mappendNChar 'a') 10000
, bench "mappend 8 char" $ nf (TL.length . TB.toLazyText . mappend8Char) 'a'
, bench "mappend text" $ nf (TL.length . TB.toLazyText . mappendNText short) 10000
]
]
where
benchS = bench ("String+" ++ kind)
benchT = bench ("Text+" ++ kind)
benchTL = bench ("LazyText+" ++ kind)
benchBS = bench ("ByteString+" ++ kind)
benchBSL = bench ("LazyByteString+" ++ kind)
c = 'й'
p0 = (== c)
p1 = (/= 'д')
lw = "право"
bsw = UTF8.fromString lw
blw = BL.fromChunks [bsw]
tsw = T.pack lw
tlw = TL.fromChunks [tsw]
f (C# c#) = C# (chr# (ord# c# +# 1#))
g (I# i#) (C# c#) = (I# (i# +# 1#), C# (chr# (ord# c# +# i#)))
len l _ = l + (1::Int)
replicat n = concat . L.replicate n
short = T.pack "short"
#if !MIN_VERSION_bytestring(0,10,0)
instance NFData BS.ByteString
instance NFData BL.ByteString where
rnf BL.Empty = ()
rnf (BL.Chunk _ ts) = rnf ts
#endif
data B where
B :: NFData a => a -> B
instance NFData B where
rnf (B b) = rnf b
-- | Split a bytestring in chunks
--
chunksOf :: Int -> BS.ByteString -> [BS.ByteString]
chunksOf k = go
where
go t = case BS.splitAt k t of
(a,b) | BS.null a -> []
| otherwise -> a : go b
-- | Append a character n times
--
mappendNChar :: Char -> Int -> TB.Builder
mappendNChar c n = go 0
where
go i
| i < n = TB.singleton c `mappend` go (i+1)
| otherwise = mempty
-- | Gives more opportunity for inlining and elimination of unnecesary
-- bounds checks.
--
mappend8Char :: Char -> TB.Builder
mappend8Char c = TB.singleton c `mappend` TB.singleton c `mappend`
TB.singleton c `mappend` TB.singleton c `mappend`
TB.singleton c `mappend` TB.singleton c `mappend`
TB.singleton c `mappend` TB.singleton c
-- | Append a text N times
--
mappendNText :: T.Text -> Int -> TB.Builder
mappendNText t n = go 0
where
go i
| i < n = TB.fromText t `mappend` go (i+1)
| otherwise = mempty
| beni55/text | benchmarks/haskell/Benchmarks/Pure.hs | bsd-2-clause | 19,718 | 0 | 22 | 7,932 | 7,376 | 3,773 | 3,603 | 378 | 1 |
module HAD.Y2014.M04.D09.Solution where
import Data.List (sortBy)
import Data.Monoid (mconcat, (<>))
import Data.Ord (comparing)
-- $setup
-- >>> import Data.List
data Foo = Foo {x :: Int, y :: String, z :: String}
deriving (Read, Show, Eq)
{- | orderXYZ
Order Foo by x then by y and then by z
prop> sort xs == (map x . orderXYZ . map (\v -> Foo v "y" "z")) xs
prop> sort xs == (map y . orderXYZ . map (\v -> Foo 42 v "z")) xs
prop> sort xs == (map z . orderXYZ . map (\v -> Foo 42 "y" v )) xs
-}
orderXYZ :: [Foo] -> [Foo]
orderXYZ = sortBy $ comparing x <> comparing y <> comparing z
orderXYZ' :: [Foo] -> [Foo]
orderXYZ' = sortBy $ mconcat [comparing x, comparing y, comparing z]
| 1HaskellADay/1HAD | exercises/HAD/Y2014/M04/D09/Solution.hs | mit | 710 | 0 | 8 | 163 | 175 | 102 | 73 | 10 | 1 |
module Foo where
-- TODO: Expressions inside applications of type and predicate aliases.
{-@ predicate Rng Lo V Hi = (Lo <= V && V < Hi) @-}
{-@ bog :: {v:Int | (Rng 0 v 10)} @-}
bog :: Int
bog = 5
| ssaavedra/liquidhaskell | tests/todo/aliasConst.hs | bsd-3-clause | 201 | 0 | 4 | 47 | 17 | 12 | 5 | 3 | 1 |
module Boilerplater where
import Test.Framework.Providers.QuickCheck2
import Language.Haskell.TH
testProperties :: [Name] -> Q Exp
testProperties nms = fmap ListE $ sequence [[| testProperty $(stringE prop_name) $(varE nm) |]
| nm <- nms
, Just prop_name <- [stripPrefix_maybe "prop_" (nameBase nm)]]
-- This nice clean solution doesn't quite work since I need to use lexically-scoped type
-- variables, which aren't supported by Template Haskell. Argh!
-- testProperties :: Q [Dec] -> Q Exp
-- testProperties mdecs = do
-- decs <- mdecs
-- property_exprs <- sequence [[| testProperty "$prop_name" $(return $ VarE nm) |]
-- | FunD nm _clauses <- decs
-- , Just prop_name <- [stripPrefix_maybe "prop_" (nameBase nm)]]
-- return $ LetE decs (ListE property_exprs)
stripPrefix_maybe :: String -> String -> Maybe String
stripPrefix_maybe prefix what
| what_start == prefix = Just what_end
| otherwise = Nothing
where (what_start, what_end) = splitAt (length prefix) what
| dolio/vector | tests/Boilerplater.hs | bsd-3-clause | 1,150 | 0 | 13 | 328 | 171 | 93 | 78 | -1 | -1 |
/* { dg-options "-I. -Winvalid-pch -g" } */
extern int x;
| SanDisk-Open-Source/SSD_Dashboard | uefi/gcc/gcc-4.6.3/gcc/testsuite/gcc.dg/pch/valid-1b.hs | gpl-2.0 | 59 | 4 | 5 | 12 | 26 | 14 | 12 | -1 | -1 |
{-# LANGUAGE GADTs, DataKinds, KindSignatures, TypeFamilies, PolyKinds #-}
module T7386 where
data Nat = Zero | Succ Nat
data family Sing (a :: k)
data instance Sing (a :: Nat) where
SZero :: Sing Zero
SSucc :: Sing n -> Sing (Succ n)
| ghc-android/ghc | testsuite/tests/ghci.debugger/scripts/T7386.hs | bsd-3-clause | 244 | 0 | 9 | 52 | 72 | 42 | 30 | 7 | 0 |
module Parser where
import Expense
import Errors
import Data.Maybe
import Data.Time.Calendar
import Data.Time.Format
import System.Locale
parseExpenseList :: String -> [Either Expense ErrString]
parseExpenseList s = map parseExpense $ lines s
parseExpense :: String -> Either Expense ErrString
parseExpense es | pExpLen == 0 = Right ParserNothing
| pExpLen > 3 = Right $ ParserTooMany es
| pExpLen < 3 = Right $ ParserTooLittle es
| isNothing pAmnt = Right $ ParserErrAmount (pExp !! 0)
| isNothing pDate = Right $ ParserErrDate (pExp !! 1)
| isNothing pTags = Right $ ParserErrTags (pExp !! 2)
| otherwise = Left $ Expense {amountOf = pAmntUnp,
dateOf = pDateUnp,
tagsOf = pTagsUnp}
where pExp = words es
pExpLen = length pExp
pAmnt = parseAmount $ pExp !! 0
pDate = parseDate $ pExp !! 1
pTags = parseTags $ pExp !! 2
pAmntUnp = (maybeToList pAmnt) !! 0
pDateUnp = (maybeToList pDate) !! 0
pTagsUnp = (maybeToList pTags) !! 0
parseAmount :: String -> Maybe Double
parseAmount s | parsedS == [] = Nothing
| otherwise = Just $ fst $ parsedS !! 0
where parsedS = reads s :: [(Double, String)]
parseYear :: String -> Maybe Day
parseYear = parseTime defaultTimeLocale "%Y"
parseMonth :: String -> Maybe Day
parseMonth = parseTime defaultTimeLocale "%Y-%m"
parseDate :: String -> Maybe Day
parseDate = parseTime defaultTimeLocale "%Y-%m-%d"
parseTags :: String -> Maybe [String]
parseTags s | length res == 0 = Nothing
| otherwise = Just res
where res = filter (/= "") $ __parseTags s "" []
__parseTags :: String -> String -> [String] -> [String]
__parseTags [] ct tl = tl ++ [ct]
__parseTags (x:xs) ct tl | x /= ',' = __parseTags xs (ct ++ [x]) tl
| otherwise = __parseTags xs "" (tl ++ [ct])
| fredmorcos/attic | projects/pet/archive/pet_haskell_master_complete/Parser.hs | isc | 2,072 | 0 | 9 | 676 | 687 | 348 | 339 | 45 | 1 |
{-# LANGUAGE MagicHash #-}
module Physics.Contact.Circle.Benchmark where
import Physics.Contact.Circle
import Physics.Linear
test =
contact
(Circle (P2 $ V2 (-1.0##) 0.0##) 1.1)
(Circle (P2 $ V2 1.0## 0.0##) 1.1)
| ublubu/shapes | shapes/bench/Physics/Contact/Circle/Benchmark.hs | mit | 226 | 0 | 12 | 41 | 75 | 42 | 33 | 8 | 1 |
module TicTacToe.StateSpec where
import Test.Hspec
import Control.Lens ((^.))
import qualified TicTacToe.Board as Board
import TicTacToe.Player (Player(..))
import TicTacToe.State (board, player)
import qualified TicTacToe.State as State
spec :: Spec
spec = describe "TicTacToe.State" $
describe "new" $ do
let state = State.new
it "should initialise the player to Crosses" $
(state^.player) `shouldBe` X
it "should initialise the board to an empty board" $
(state^.board) `shouldBe` Board.empty
| tomphp/haskell-tictactoe | test/TicTacToe/StateSpec.hs | mit | 551 | 0 | 11 | 120 | 149 | 86 | 63 | 15 | 1 |
-- Strings Mix
-- http://www.codewars.com/kata/5629db57620258aa9d000014
module Codewars.G964.Mixin where
import Data.Map.Lazy (fromList, unionWith, mapWithKey, elems)
import Data.List (group, sort, sortBy, intercalate)
import Data.Char (isLower)
mix :: String -> String -> String
mix s1 s2 = intercalate "/" (sortBy h (elems $ mapWithKey g (unionWith f (fun s1 "1") (fun s2 "2"))))
where fun str id = fromList
. map (\x -> (head x, (length x, id)))
. filter (\x -> length x > 1)
. group . sort
$ filter isLower str
f x y = case compare (fst x) (fst y)
of LT -> y
GT -> x
EQ -> (fst x, "=")
g k v = snd v ++ ":" ++ replicate (fst v) k
h l r = case compare (length r) (length l)
of EQ -> compare l r
x -> x
| gafiatulin/codewars | src/4 kyu/Mixin.hs | mit | 931 | 0 | 17 | 361 | 354 | 185 | 169 | 19 | 4 |
import Test.Hspec
-- Problem 3
-- Find the K'th element of a list. The first element in the list is number 1.
elementAt :: [a] -> Int -> a
elementAt ls i = ls !! index
where index = i - 1
main :: IO()
main = hspec $
describe "99-exercises.3 = k th element of a list" $
it "returns the element in defined position" $ do
elementAt [1..10] 8 `shouldBe` (8 :: Int)
elementAt ['a'..'z'] 4 `shouldBe` 'd'
| baldore/haskell-99-exercises | 3.hs | mit | 421 | 1 | 10 | 102 | 130 | 67 | 63 | 10 | 1 |
{-# LANGUAGE UndecidableInstances, TypeSynonymInstances, FlexibleInstances,
GeneralizedNewtypeDeriving #-}
-- this module is part of the risc386 simulator (IntelMain)
module Risc386Clone.FrameIntel where
-- import Data.List -- reverse
import Data.Map (Map)
import qualified Data.Map as Map
import Text.PrettyPrint
import Risc386Clone.Util -- splitmap
import Risc386Clone.GenSym
import Risc386Clone.Frame
import Risc386Clone.Intel
-- import TreePrinter (Pretty(..))
type IntelFrame = Frame [Instr]
-- data IntelFrame = IntelFrame String [Acc] [Instr]
instance Pretty IntelFrame where
ppr (Frame f dat il) =
nest 8 (text ".global" <+> nest 8 (text f))
$+$ nest 8 (text ".type" <+> nest 8 (text f <> comma <+> text "@function"))
$+$ text f <> colon
$+$ nest 8 (text "#args" <+> (hsep $ punctuate comma $ map ppr (parameters dat)))
$+$ ppr il
instance Pretty [IntelFrame] where
ppr fs = nest 8 (text ".intel_syntax")
$+$ (vcat $ map ppr fs)
{- The code of each function is split up into a collection of blocks,
beginning with a LABEL, ending with a JMP or RET.
No LABEL within a block.
-}
type IBlock = [Instr]
type IBlockMap = Map Label IBlock
data IBlockFrame = IBlockFrame
String -- name of the function
FrameData -- list of parameters
IBlockMap -- map from labels to blocks (contains at least first label)
type FrameMap = Map Label IBlockFrame
instance Pretty IBlockFrame where
ppr (IBlockFrame f dat blocks)
| Just (b1, rest) <- splitMap f blocks =
nest 8 (text ".global" <+> nest 8 (text f))
$+$ nest 8 (text ".type" <+> nest 8 (text f <> comma <+> text "@function"))
$+$ text f <> colon
$+$ nest 8 (text "#args" <+> (hsep $ punctuate comma $ map ppr (parameters dat)))
$+$ ppr b1
$+$ ppr (Map.foldrWithKey (\ k rs ss -> LABEL k : rs ++ ss) [] rest)
instance Pretty [IBlockFrame] where
ppr fs = vcat $ map ppr fs
-- divide a frame into blocks ;
-- the first block inherits its label from the frame
iBlocksFrame :: IntelFrame -> IBlockFrame
iBlocksFrame (Frame f dat ss) =
(IBlockFrame f dat (insertIBlocks (LABEL f : ss) Map.empty))
-- insertBlocks (ss, map_acc) = map
insertIBlocks :: [Instr] -> IBlockMap -> IBlockMap
insertIBlocks [] acc = acc
insertIBlocks (LABEL l : ss) acc =
let (block, rest) = chopOffIBlock ss
in insertIBlocks rest (Map.insert l block acc)
insertIBlocks ss acc = insertIBlocks (LABEL ("Ldummy" ++ show (length ss)) : ss) acc
-- chopOffBlock (l', ss) = (l, block, rest)
-- Precondition: ss begins with LABEL l
chopOffIBlock :: [Instr] -> ([Instr], [Instr])
chopOffIBlock ss =
let (block, rest) = chopOffIBlock' ss []
in (reverse block, rest)
-- chopOffBlock' ss block_acc = (rev_block, ss_rest)
chopOffIBlock' :: [Instr] -> [Instr] -> ([Instr], [Instr])
chopOffIBlock' [] acc = error "Reached the end of a block that does not end with a label or a jump. The last instruction must be a jump, typically 'ret'."
chopOffIBlock' ss@(LABEL l : _) acc = (JMP l : acc, ss) -- insert artificial jump
chopOffIBlock' (s@RET : ss) acc = (s : acc, ss)
chopOffIBlock' (s@(JMP _) : ss) acc = (s : acc, ss)
-- conditional jump not the end of a block
-- chopOffIBlock' (s@(J _ _) : ss) acc = (s : acc, ss)
chopOffIBlock' (s : ss) acc = chopOffIBlock' ss (s : acc)
| cirquit/hjc | src/Risc386Clone/FrameIntel.hs | mit | 3,356 | 0 | 20 | 723 | 1,003 | 523 | 480 | 58 | 1 |
{--
Copyright (c) 2014 Gorka Suárez García
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
--}
{- ***************************************************************
The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
Find the sum of all the primes below two million.
*************************************************************** -}
module Problem0010 (main) where
squareRoot :: (Integral a) => a -> a
squareRoot x = truncate $ sqrt $ fromIntegral x
multipleOf :: (Integral a) => a -> a -> Bool
multipleOf a b = (mod a b) == 0
isPrime :: (Integral a) => a -> Bool
isPrime 2 = True
isPrime n = not $ or [multipleOf n x | x <- 2:[3,5..upperLimit]]
where upperLimit = squareRoot n + 1
sumPrimes :: Integer -> Integer
sumPrimes limit = sum $ takeWhile (< limit) primesList
where primesList = [x | x <- [2..], isPrime x]
main = do putStr "The sum of all the primes below two million "
putStrLn $ "is " ++ show (sumPrimes 2000000) ++ "." | gorkinovich/Haskell | Problems/Problem0010a.hs | mit | 1,943 | 0 | 11 | 359 | 268 | 140 | 128 | 14 | 1 |
module Absyn.ValueOccursCheck where
import Absyn.Base
import Absyn.Meta
import Typing.Env
import Typing.TypeError
import Util.Error
import Control.Monad (mapM_, when)
class ValueOccursCheck b where
valueOccursCheck :: String -> AST b String c -> Tc Bool
instance ValueOccursCheck BaseExpr where
-- TRIVIAL
valueOccursCheck var (_ :< expr) = do
valueOccursCheckExpr var expr
return False
valueOccursCheckExpr :: String -> ASTNode BaseExpr String c -> Tc ()
valueOccursCheckExpr _ VoidExpr = return ()
valueOccursCheckExpr _ (Literal _) = return ()
valueOccursCheckExpr _ (FnExpr _) = return ()
valueOccursCheckExpr var (ParenthesizedExpr x) = do
valueOccursCheck var x
return ()
valueOccursCheckExpr var (Ident names) =
check var (last names)
valueOccursCheckExpr var (Match { expr, cases }) = do
valueOccursCheck var expr
mapM_ (valueOccursCheckCase var) cases
valueOccursCheckExpr var (If { ifCond, ifBody, ifElseBody }) = do
valueOccursCheck var ifCond
_ <- valueOccursCheck var ifBody
_ <- valueOccursCheck var ifElseBody
return ()
valueOccursCheckExpr var (Call { callee, args }) = do
valueOccursCheck var callee
_ <- mapValueOccursCheck var args
return ()
valueOccursCheckExpr var (BinOp { lhs, op, rhs }) = do
check op var
valueOccursCheck var lhs
valueOccursCheck var rhs
return ()
valueOccursCheckExpr var (Record x) = do
_ <- mapValueOccursCheck var $ map snd x
return ()
valueOccursCheckExpr var (List _ items) = do
_ <- mapValueOccursCheck var items
return ()
valueOccursCheckExpr var (FieldAccess obj _) = do
valueOccursCheck var obj
return ()
valueOccursCheckExpr var (TypeCall callee _) = do
valueOccursCheck var callee
return ()
valueOccursCheckExpr var (Negate _ expr) = do
valueOccursCheck var expr
return ()
instance ValueOccursCheck BaseStmt where
valueOccursCheck var (_ :< Decl x) =
valueOccursCheck var x
valueOccursCheck var (_ :< Expr x) =
valueOccursCheck var x
instance ValueOccursCheck BaseCodeBlock where
valueOccursCheck v (_ :< CodeBlock stmts) =
mapValueOccursCheck v stmts
instance ValueOccursCheck BaseDecl where
valueOccursCheck var (_ :< FnStmt (_ :< Function { name })) =
return (var == name)
valueOccursCheck var (_ :< Enum name _ _ ) =
return (var == name)
valueOccursCheck var (_ :< Class { className }) =
return (var == className)
valueOccursCheck var (_ :< Operator { opName }) =
return (var == opName)
valueOccursCheck var (_ :< Interface { intfName }) =
return (var == intfName)
valueOccursCheck var (_ :< Let (name, _) expr) =
if var == name
then return True
else valueOccursCheck var expr
valueOccursCheck _ (_ :< Implementation {}) =
return False
valueOccursCheck _ (_ :< TypeAlias {}) =
return False
valueOccursCheckCase :: String -> AST BaseCase String c -> Tc ()
valueOccursCheckCase var (_ :< Case { pattern, caseBody }) = do
shadowed <- valueOccursCheckPattern var pattern
if not shadowed
then valueOccursCheck var caseBody >> return ()
else return ()
valueOccursCheckPattern :: String -> AST BasePattern String c -> Tc Bool
valueOccursCheckPattern _ (_ :< PatDefault) = return False
valueOccursCheckPattern _ (_ :< PatLiteral _) = return False
valueOccursCheckPattern var (_ :< PatVar x) =
return (var == x)
valueOccursCheckPattern var (_ :< PatRecord x) =
or <$> mapM (valueOccursCheckPattern var . snd) x
valueOccursCheckPattern var (_ :< PatList items rest) = do
sItems <- or <$> mapM (valueOccursCheckPattern var) items
sRest <- valueOccursCheckPatternRest var rest
return (sItems || sRest)
valueOccursCheckPattern var (_ :< PatCtor _ args) =
or <$> mapM (valueOccursCheckPattern var) args
valueOccursCheckPatternRest :: String -> PatternRest -> Tc Bool
valueOccursCheckPatternRest _ NoRest = return False
valueOccursCheckPatternRest _ DiscardRest = return False
valueOccursCheckPatternRest var (NamedRest rest) =
return (var == rest)
mapValueOccursCheck :: ValueOccursCheck e => String -> [AST e String c] -> Tc Bool
mapValueOccursCheck _ [] =
return False
mapValueOccursCheck var (x : xs) = do
shadowed <- valueOccursCheck var x
if shadowed
then return True
else mapValueOccursCheck var xs
check :: String -> String -> Tc ()
check v1 v2 =
when (v1 == v2) $ throwError (VariableUsedDuringInitialization v1)
| tadeuzagallo/verve-lang | src/Absyn/ValueOccursCheck.hs | mit | 4,395 | 0 | 13 | 852 | 1,579 | 761 | 818 | -1 | -1 |
module Zwerg.UI.Port where
import Zwerg.Prelude
import Zwerg.Data.Position
import Zwerg.UI.Menu
import qualified Data.List.NonEmpty as NE (repeat, zip)
data Port
= MainScreen
| MainMenu (Menu ())
| ChooseTarget
| LoadingScreen
| ViewEquipment
| ViewInventory (MenuGroupSelect UUID)
| ExamineTiles Position
| DeathScreen Text
| ExitScreen
deriving stock Generic
deriving anyclass Binary
instance ZDefault Port where
zDefault = initMainMenu
type Portal = NonEmpty Port
initMainMenu :: Port
initMainMenu =
MainMenu $ makeMenu $
NE.zip ("new game" :| ["load game", "options", "about", "exit"]) $ NE.repeat ()
| zmeadows/zwerg | lib/Zwerg/UI/Port.hs | mit | 660 | 0 | 10 | 136 | 179 | 106 | 73 | -1 | -1 |
{-# htermination (esEsTup0 :: Tup0 -> Tup0 -> MyBool) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Tup0 = Tup0 ;
esEsTup0 :: Tup0 -> Tup0 -> MyBool
esEsTup0 Tup0 Tup0 = MyTrue;
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/EQEQ_1.hs | mit | 247 | 0 | 8 | 60 | 74 | 41 | 33 | 6 | 1 |
{-|
Module : MillerRabin
Description : Module is a wrapper for all functions required for proper MillerRabin primality test.
Copyright : (c) Grzegorz Jasinski, Wojtek Chmielarz
License : MIT
Module provides service for checking if given numbers is prime.
-}
module MillerRabin
(
isPrime
)
where
import System.Random
import System.IO.Unsafe
-- |The 'isPrime' function is a wrapper for Miller-Rabin
isPrime :: Integer -> Bool
isPrime n = unsafePerformIO (testMillerRabin 100 n)
-- |The 'testMillerRabin' tests primality of numer using Miller-Rabin Algoritm
testMillerRabin :: Int -> Integer -> IO Bool
testMillerRabin k n
| even n = return (n == 2)
| otherwise = do ws <- witnesses k n
return $ and [test n (pred n) evens (head odds) a | a <- ws]
where
(evens,odds) = span even (iterate (`div` 2) (pred n))
-- |The 'test' tests if number is in given sets
test :: Integral nat => nat -> nat -> [nat] -> nat -> nat -> Bool
test n n_1 evens d a = x `elem` [1,n_1] || n_1 `elem` powers
where
x = powerMod n a d
powers = map (powerMod n a) evens
-- |The 'witnesses' returns witnesses for numer which are sufficient to check
witnesses :: (Num a, Ord a, Random a) => Int -> a -> IO [a]
witnesses k n
| n < 2047 = return [2]
| n < 1373653 = return [2,3]
| n < 9080191 = return [31,73]
| n < 25326001 = return [2,3,5]
| n < 3215031751 = return [2,3,5,7]
| n < 4759123141 = return [2,7,61]
| n < 1122004669633 = return [2,13,21,1662803]
| n < 2152302898747 = return [2,3,5,6,11]
| n < 3474749660383 = return [2,3,5,7,11,13]
| n < 341550071728321 = return [2,3,5,7,11,13,17]
| otherwise = do g <- newStdGen
return $ take k (randomRs (2, n - 1) g)
-- |The 'powerMod' returns x which satisfy x^n `mod` m
powerMod :: Integral nat => nat -> nat -> nat -> nat
powerMod m x n = f (n - 1) x x `rem` m
where
f d a y = if d==0 then y else g d a y
g i b y | even i = g (i `quot` 2) (b*b `rem` m) y
| otherwise = f (i-1) b (b*y `rem` m)
| gjasinski/rsa-haskell | src/MillerRabin.hs | mit | 2,165 | 0 | 13 | 632 | 847 | 443 | 404 | 36 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.