code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
import Data.Bool fun1 :: [Integer] -> Integer fun1 [] = 1 fun1 (x:xs) | even x = (x - 2) * fun1 xs | otherwise = fun1 xs fun1' :: [Integer] -> Integer fun1' l = product . map (\x -> x-2) . filter even $ l -------------------------------------------------------------------- fun2 :: Integer -> Integer fun2 1 = 0 fun2 n | even n = n + fun2 (n `div` 2) | otherwise = fun2 (3 * n + 1) calcNext :: Integer -> Integer calcNext n = if even n then n `div` 2 else 3 * n + 1 fun2' :: Integer -> Integer fun2' n = sum . filter even . takeWhile (>1) . iterate calcNext $ n -------------------------------------------------------------------- data Tree a = Leaf | Node Integer (Tree a) a (Tree a) deriving (Show, Eq) getHeight :: Tree a -> Integer getHeight Leaf = -1 getHeight (Node h _ _ _) = h insert :: Tree a -> a -> Tree a insert Leaf x = Node 0 Leaf x Leaf insert (Node _ l v r) x | hl < hr = let tl = insert l x nh = (if getHeight tl > hr then getHeight tl else hr) + 1 in Node nh tl v r | otherwise = let tr = insert r x nh = (if getHeight tr > hl then getHeight tr else hl) + 1 in Node nh l v tr where hl = getHeight l hr = getHeight r foldTree :: [a] -> Tree a foldTree = foldl insert Leaf ----------------------------------------------------------------------------------------- xor :: [Bool] -> Bool xor = foldl (\x a -> (x && not a) || (not x && a)) False ----------------------------------------------------------------------------------------- map' :: (a -> b) -> [a] -> [b] map' f = foldr (\x a -> f x : a) [] ----------------------------------------------------------------------------------------- myFoldl :: (a -> b -> a) -> a -> [b] -> a myFoldl f base xs = foldr (\a b -> f b a) base (reverse xs) ----------------------------------------------------------------------------------------- cartProd :: [a] -> [b] -> [(a, b)] cartProd xs ys = [(x,y) | x <- xs, y <- ys] excl :: Integer -> [Integer] excl n = let calc i j = i + j + 2 * i * j in map (\(i,j) -> calc i j) . filter (\(i,j) -> calc i j <= n) $ cartProd [1..floor (sqrt (fromIntegral n / 2))] [1..n] sieveSundaram :: Integer -> [Integer] sieveSundaram n = let ex = excl n in map (\x -> 2 * x + 1) . filter (\i -> not (i `elem` ex)) $ [1..n]
flocknroll/haskell_cis194
H4/h4.hs
mit
2,555
0
15
779
1,090
563
527
51
3
module Zwerg.Prelude ( show , module EXPORTED ) where import Prelude as EXPORTED hiding (id, (.), show, Monoid(..)) import qualified Prelude as P (show) import Zwerg.Data.UUID as EXPORTED import Zwerg.Data.ZColor as EXPORTED import Zwerg.Prelude.Class as EXPORTED import Zwerg.Prelude.Primitives as EXPORTED import Control.Arrow as EXPORTED hiding ((<+>)) import Control.Category as EXPORTED import Control.Monad.Random.Class as EXPORTED hiding (fromList) import Control.Monad.Reader as EXPORTED import Control.Monad.State.Strict as EXPORTED import Data.Bifunctor as EXPORTED (bimap) import Data.Binary as EXPORTED (Binary) import Data.List.NonEmpty as EXPORTED (NonEmpty(..),) import Data.Maybe as EXPORTED (catMaybes, mapMaybe) import Data.Semigroup as EXPORTED import Data.String.Conv (StringConv, toS) import Data.Text as EXPORTED (Text, pack, unpack, singleton, append) import Data.Traversable as EXPORTED (forM) import Data.Tuple.Sequence as EXPORTED (sequenceT) import GHC.Exts as EXPORTED (IsList(..)) import GHC.Generics as EXPORTED (Generic) import GHC.Stack as EXPORTED (HasCallStack, CallStack, callStack, prettyCallStack) {-# SPECIALIZE show :: Show a => a -> Text #-} {-# SPECIALIZE show :: Show a => a -> String #-} show :: (Show a, StringConv String b) => a -> b show x = toS (P.show x)
zmeadows/zwerg
lib/Zwerg/Prelude.hs
mit
1,318
0
8
184
361
245
116
-1
-1
{-# htermination (>) :: () -> () -> Bool #-}
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/Prelude_GT_2.hs
mit
45
0
2
10
3
2
1
1
0
----------------------------------------------------------------------------- -- | -- Module : InsertPlaces -- Copyright : (c) Artem Chirkin -- License : MIT -- -- Maintainer : Artem Chirkin <[email protected]> -- Stability : experimental -- -- Import places from a tab-separated text file provided by GeoNames -- ----------------------------------------------------------------------------- module InsertPlaces ( insertPlaces ) where import Prelude hiding (id) import Data.Conduit import qualified Data.Conduit.List as CL import Database.Persist import Control.Monad.Trans.Reader (ReaderT) import Control.Monad.IO.Class (liftIO) import Data.Text (Text) import qualified Data.Text as Text import Data.Map (Map) import qualified Data.Map as Map -- The data format is tab-delimited text in utf8 encoding. -- -- I plan to load one of these: -- allCountries.zip : all countries combined in one file, see 'geoname' table for columns -- cities1000.zip : all cities with a population > 1000 or seats of adm div (ca 80.000), see 'geoname' table for columns -- cities5000.zip : all cities with a population > 5000 or PPLA (ca 40.000), see 'geoname' table for columns -- cities15000.zip : all cities with a population > 15000 or capitals (ca 20.000), see 'geoname' table for columns import Model type DBEnv = ReaderT (PersistEntityBackend Place) IO insertPlaces :: Map Text CountryId -> Source DBEnv Text -> DBEnv () insertPlaces countries src = do -- clean table first deleteWhere ([] :: [Filter Place]) -- run line-by-line processing, discarding all values src $$ dst where fetchPlace = parsePlace (lookupCountry countries) react :: Either String Place -> DBEnv () react (Left err) = liftIO $ putStrLn err react (Right place) = insert_ place processLine :: Text -> DBEnv () processLine = react . fetchPlace dst :: Sink Text DBEnv () dst = CL.mapM_ processLine parsePlace :: (Text -> Either String CountryId) -> Text -> Either String Place parsePlace fetchCountry t = if n /= 19 then Left $ "A row has incorrect number of fields " ++ "(" ++ show n ++ ")" ++ ":\n\t" ++ Text.unpack t else case tcountryId of Right countryId -> Right Place { placeGeonameId = read . Text.unpack $ fields !! 0 , placeCountry = countryId , placeName = fields !! 1 , placeAsciiName = fields !! 2 , placeAlternateNames = fields !! 3 , placeLatitude = read . Text.unpack $ fields !! 4 , placeLongitude = read . Text.unpack $ fields !! 5 } Left err -> Left $ err ++ "\n\t" ++ Text.unpack t where fields = Text.split ('\t' ==) t n = length fields tcountryId = fetchCountry $ fields !! 8 ---- | The main 'geoname' table has the following fields : --header :: [Text] --header = -- [ "geonameid" -- 0 integer id of record in geonames database -- , "name" -- 1 name of geographical point (utf8) varchar(200) -- , "asciiname" -- 2 name of geographical point in plain ascii characters, varchar(200) -- , "alternatenames" -- 3 alternatenames, comma separated, ascii names automatically transliterated, convenience attribute from alternatename table, varchar(10000) -- , "latitude" -- 4 latitude in decimal degrees (wgs84) -- , "longitude" -- 5 longitude in decimal degrees (wgs84) -- , "feature class" -- 6 see http://www.geonames.org/export/codes.html, char(1) -- , "feature code" -- 7 see http://www.geonames.org/export/codes.html, varchar(10) -- , "country code" -- 8 ISO-3166 2-letter country code, 2 characters -- , "cc2" -- 9 alternate country codes, comma separated, ISO-3166 2-letter country code, 200 characters -- , "admin1 code" -- 10 fipscode (subject to change to iso code), see exceptions below, see file admin1Codes.txt for display names of this code; varchar(20) -- , "admin2 code" -- 11 code for the second administrative division, a county in the US, see file admin2Codes.txt; varchar(80) -- , "admin3 code" -- 12 code for third level administrative division, varchar(20) -- , "admin4 code" -- 13 code for fourth level administrative division, varchar(20) -- , "population" -- 14 bigint (8 byte int) -- , "elevation" -- 15 in meters, integer -- , "dem" -- 16 digital elevation model, srtm3 or gtopo30, average elevation of 3''x3'' (ca 90mx90m) or 30''x30'' (ca 900mx900m) area in meters, integer. srtm processed by cgiar/ciat. -- , "timezone" -- 17 the timezone id (see file timeZone.txt) varchar(40) -- , "modification date" -- 18 date of last modification in yyyy-MM-dd format -- ] lookupCountry :: Map Text CountryId -> Text -> Either String CountryId lookupCountry m t = case Map.lookup t m of Nothing -> Left $ "Failed to lookup ISO country code: " ++ Text.unpack t Just c -> Right c
achirkin/mooc-images
locations/InsertPlaces.hs
mit
5,041
0
15
1,200
684
383
301
52
3
-- | -- Module: BigE.TextRenderer.Text -- Copyright: (c) 2017 Patrik Sandahl -- Licence: MIT -- Maintainer: Patrik Sandahl <[email protected]> -- Stability: experimental -- Portability: portable module BigE.TextRenderer.Text ( Text (..) , init , update , enable , disable , delete , render ) where import BigE.Attribute.Vert_P_Tx (Vertex (..)) import BigE.Mesh (Mesh) import qualified BigE.Mesh as Mesh import BigE.TextRenderer.Font (Font (..)) import BigE.TextRenderer.Types (Character (..), Common (..)) import BigE.Types (BufferUsage (..), Primitive (..)) import Control.Monad.IO.Class (MonadIO) import Data.Char (ord) import qualified Data.HashMap.Strict as HashMap import Data.List (foldl') import Data.Vector.Storable (Vector) import qualified Data.Vector.Storable as Vector import Graphics.GL (GLfloat, GLuint) import Linear (V2 (..), V3 (..)) import Prelude hiding (init) -- | Representation of a drawable piece of text. data Text = Text { font :: !Font , mesh :: !Mesh , gridWidth :: !GLfloat , string :: !String } deriving Show type Cursor = GLfloat type PixToCoord = Int -> GLfloat -- | Initialize the 'Text' using a 'Font' and a string. init :: MonadIO m => Font -> String -> m Text init fnt str = do let verts = mkCharacterBoxVertices fnt str indices = mkIndices $ length str gridWidth' = getGridWidth verts mesh' <- Mesh.fromVector DynamicDraw verts indices return Text { font = fnt, mesh = mesh', gridWidth = gridWidth', string = str } -- | Update the 'Text' with a new string. -- NOTE: The new string must not be longer than the original string. update :: MonadIO m => String -> Text -> m Text update str text = do let verts = mkCharacterBoxVertices (font text) str indices = mkIndices $ length str gridWidth' = getGridWidth verts mesh' <- Mesh.update verts indices (mesh text) return text { mesh = mesh', gridWidth = gridWidth', string = str } -- | Enable the 'Text'. I.e. enable the VAO for the text's mesh. enable :: MonadIO m => Text -> m () enable = Mesh.enable . mesh -- | Disable the 'Text'. I.e. disable the currently bound VAO disable :: MonadIO m => m () disable = Mesh.disable -- | Delete the 'Text'. I.e. delete the text's mesh. delete :: MonadIO m => Text -> m () delete = Mesh.delete . mesh -- | Render the 'Text'. render :: MonadIO m => Text -> m () render = Mesh.render Triangles . mesh mkCharacterBoxVertices :: Font -> String -> Vector Vertex mkCharacterBoxVertices fnt str = let vert = pixToCoord (lineHeight $ common fnt) tex = pixToCoord (scaleW $ common fnt) -- Assume square texture. chars = characters fnt in snd $ foldl' (\(cursor, vec) ascii -> case HashMap.lookup (ord ascii) chars of -- Character found. Make a box and advance the -- the cursor. Just char -> let box = mkCharacterBox cursor vert tex char cursor' = cursor + vert (xAdvance char) vec' = Vector.concat [vec, box] in (cursor', vec') -- No valid character found. Just skip. Nothing -> (cursor, vec) ) (0, Vector.empty) str -- | Construct one single character box. mkCharacterBox :: Cursor -> PixToCoord -> PixToCoord -> Character -> Vector Vertex mkCharacterBox cursor vert tex char = -- Start calculate the coordinates for the square surrounding the -- character. Vertice coords are normalized to the line height which -- is interpreted as the length one. let xStart = cursor + vert (xOffset char) xStop = xStart + vert (width char) yTop = negate (vert (yOffset char)) yBottom = yTop - vert (height char) -- Then calculate the texture coordinates. Texture coords are normalized -- to the dimensions of the texture atlas for the font. xStartTex = tex (x char) xStopTex = xStartTex + tex (width char) yTopTex = tex (y char) yBottomTex = yTopTex + tex (height char) in Vector.fromList [ -- Upper right corner. Vertex { position = V3 xStop yTop 0 , texCoord = V2 xStopTex yTopTex } -- Upper left corner. , Vertex { position = V3 xStart yTop 0 , texCoord = V2 xStartTex yTopTex } -- Lower left corner. , Vertex { position = V3 xStart yBottom 0 , texCoord = V2 xStartTex yBottomTex } -- Lower right corner. , Vertex { position = V3 xStop yBottom 0 , texCoord = V2 xStopTex yBottomTex } ] -- | Make vertex indices for the specified number of boxes. mkIndices :: Int -> Vector GLuint mkIndices 0 = Vector.empty mkIndices num = Vector.fromList $ concatMap indicesFor [b * 4 | b <- [0 .. fromIntegral num - 1]] where indicesFor b = [ b, b + 1, b + 2, b, b + 2, b + 3 ] -- | Transform a pixel length to a coordinate. To help a unit value, which -- represents the coordinate length of 1, is provided. pixToCoord :: Int -> Int -> GLfloat pixToCoord unit len = fromIntegral len / fromIntegral unit -- | Get the biggest x-value from the last character box. It represents the -- grid width of the text. getGridWidth :: Vector Vertex -> GLfloat getGridWidth vec | not (Vector.null vec) = let Vertex {position = V3 outmostX _ _} = Vector.last vec in outmostX | otherwise = 0
psandahl/big-engine
src/BigE/TextRenderer/Text.hs
mit
6,019
0
21
1,991
1,413
773
640
108
2
{-# LANGUAGE OverloadedStrings #-} import qualified Data.Text as T import qualified Blaze.ByteString.Builder as Blaze import Control.Exception (finally) import Data.Array.IO import qualified Data.ByteString.UTF8 as BU import Data.Heap (singleton, MinPrioHeap, view, insert) import qualified Control.Concurrent as CC import qualified Text.Read as TR import Network.Wai import Network.Wai.Handler.Warp import qualified Network.HTTP.Types as HTTP import qualified Network.WebSockets as WS import qualified Network.Wai.Handler.WebSockets as WWS import qualified System.Environment as Env main :: IO () main = do env <- Env.getEnvironment let port = maybe 9000 read $ lookup "PORT" env putStrLn $ "Listening on port " ++ show port page <- readFile "qs.html" let app _ respond = respond $ wrap page app' = WWS.websocketsOr WS.defaultConnectionOptions ws app run port app' wrap :: String -> Response wrap page = responseBuilder HTTP.status200 [ ("Content-Type", "text/html") ] $ Blaze.copyByteString $ BU.fromString page ws :: WS.ServerApp ws pending = do conn <- WS.acceptRequest pending print ("accept connection: " ++ host pending) msg <- WS.receiveData conn case TR.readMaybe (T.unpack msg) of Just xs -> do arr <- newListArray (0, length xs - 1) xs let disconnect = print ("disconnect: " ++ host pending) -- todo: deallocate array manually if possible pushWS t = WS.sendTextData conn (T.pack t) flip finally disconnect $ qs pushWS arr Nothing -> return () host :: WS.PendingConnection -> String host pending = maybe "???" BU.toString $ lookup "Host" headers where headers = WS.requestHeaders $ WS.pendingRequest pending type Json = String pivotJson :: Int -> Int -> Int -> Json pivotJson pivot st end = "{\"type\": \"partition\", \"pivot\": " ++ show pivot ++ ", \"st\": " ++ show st ++ ", \"end\": " ++ show end ++ "}" swapJson :: Int -> Int -> Json swapJson i j = "{\"type\": \"swap\", \"i\": " ++ show i ++ ", \"j\": " ++ show j ++ "}" type Interval = (Int, Int) type PrintF = String -> IO () qs :: PrintF -> IOArray Int Int -> IO () qs pf arr = do (st, end) <- getBounds arr let heap = singleton (end - st, (st, end)) :: MinPrioHeap Int Interval loop heap where loop h = case view h of (Just ((_, interval), h')) -> do intervals <- partition pf arr interval let h'' = foldl insert' h' intervals loop h'' Nothing -> return () insert' h (a,b) = insert (b-a, (a,b)) h partition :: PrintF -> IOArray Int Int -> Interval -> IO [Interval] partition pf arr (st, end) = do pivot <- readArray arr st pf $ pivotJson st st end loop (st + 1) (st + 1) pivot where swap i0 i1 | i0 /= i1 = do CC.threadDelay (20 * 1000) pf $ swapJson i0 i1 v0 <- readArray arr i0 v1 <- readArray arr i1 writeArray arr i0 v1 writeArray arr i1 v0 | otherwise = return () loop i j p | i <= end = do x <- readArray arr i if x < p then swap i j >> loop (i+1) (j+1) p else loop (i+1) j p | otherwise = do swap st (j - 1) return $ filter (uncurry (<)) [(st, j - 2), (j, end)]
pkinsky/qs-ws
ws.hs
mit
3,546
0
18
1,126
1,244
630
614
79
2
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE DeriveGeneric #-} module Codec.Xlsx.Types.Internal.SharedStringTable ( -- * Main types SharedStringTable(..) , sstConstruct , sstLookupText , sstLookupRich , sstItem , sstEmpty ) where import Control.Monad import qualified Data.Map as Map import Data.Maybe (mapMaybe) import qualified Data.Set as Set import Data.Text (Text) import Data.Vector (Vector) import qualified Data.Vector as V import GHC.Generics (Generic) import Numeric.Search.Range (searchFromTo) import Safe (fromJustNote) import Text.XML import Text.XML.Cursor import Codec.Xlsx.Parser.Internal import Codec.Xlsx.Types import Codec.Xlsx.Writer.Internal -- | Shared string table -- -- A workbook can contain thousands of cells containing string (non-numeric) -- data. Furthermore this data is very likely to be repeated across many rows or -- columns. The goal of implementing a single string table that is shared across -- the workbook is to improve performance in opening and saving the file by only -- reading and writing the repetitive information once. -- -- Relevant parts of the EMCA standard (2nd edition, part 1, -- <http://www.ecma-international.org/publications/standards/Ecma-376.htm>), -- page numbers refer to the page in the PDF rather than the page number as -- printed on the page): -- -- * Section 18.4, "Shared String Table" (p. 1712) -- in particular subsection 18.4.9, "sst (Shared String Table)" (p. 1726) -- -- TODO: The @extLst@ child element is currently unsupported. newtype SharedStringTable = SharedStringTable { sstTable :: Vector XlsxText } deriving (Eq, Ord, Show, Generic) sstEmpty :: SharedStringTable sstEmpty = SharedStringTable V.empty {------------------------------------------------------------------------------- Rendering -------------------------------------------------------------------------------} instance ToDocument SharedStringTable where toDocument = documentFromElement "Shared string table generated by xlsx" . toElement "sst" -- | See @CT_Sst@, p. 3902. -- -- TODO: The @count@ and @uniqCount@ attributes are currently unsupported. instance ToElement SharedStringTable where toElement nm SharedStringTable{..} = Element { elementName = nm , elementAttributes = Map.empty , elementNodes = map (NodeElement . toElement "si") $ V.toList sstTable } {------------------------------------------------------------------------------- Parsing -------------------------------------------------------------------------------} -- | See @CT_Sst@, p. 3902 -- -- The optional attributes @count@ and @uniqCount@ are being ignored at least currently instance FromCursor SharedStringTable where fromCursor cur = do let items = cur $/ element (n_ "si") >=> fromCursor return (SharedStringTable (V.fromList items)) {------------------------------------------------------------------------------- Extract shared strings -------------------------------------------------------------------------------} -- | Construct the 'SharedStringsTable' from an existing document sstConstruct :: [Worksheet] -> SharedStringTable sstConstruct = SharedStringTable . V.fromList . uniq . concatMap goSheet where goSheet :: Worksheet -> [XlsxText] goSheet = mapMaybe (_cellValue >=> sstEntry) . Map.elems . _wsCells sstEntry :: CellValue -> Maybe XlsxText sstEntry (CellText text) = Just $ XlsxText text sstEntry (CellRich rich) = Just $ XlsxRichText rich sstEntry _ = Nothing uniq :: Ord a => [a] -> [a] uniq = Set.elems . Set.fromList sstLookupText :: SharedStringTable -> Text -> Int sstLookupText sst = sstLookup sst . XlsxText sstLookupRich :: SharedStringTable -> [RichTextRun] -> Int sstLookupRich sst = sstLookup sst . XlsxRichText -- | Internal generalization used by 'sstLookupText' and 'sstLookupRich' sstLookup :: SharedStringTable -> XlsxText -> Int sstLookup SharedStringTable{sstTable = shared} si = fromJustNote ("SST entry for " ++ show si ++ " not found") $ searchFromTo (\p -> shared V.! p >= si) 0 (V.length shared - 1) sstItem :: SharedStringTable -> Int -> Maybe XlsxText sstItem (SharedStringTable shared) = (V.!?) shared
qrilka/xlsx
src/Codec/Xlsx/Types/Internal/SharedStringTable.hs
mit
4,305
0
15
719
743
420
323
65
3
-- | The Dreaded Monomorphism Restriction. module DMR where import Data.List plus = (+) -- If you don't use plus in functions @myInt@ and @myDouble@ below, the type -- will default to: -- -- > Integer -> Integer -> Integer -- -- Instead of what we would expect: -- -- > Num a => a -> a -> a myInt :: Double myInt = plus 3 2 myDouble :: Double myDouble = plus 3.5 2.7 -- | A first approximation of the restriction can be stated as "you cannot -- overload a function unless you provide a type signature". -- | This will compile, and show type: -- -- > f0 :: Show a => a -> String f0 x = show x -- | This won't compile: -- -- > f1 = \x -> show x -- -- > Ambiguous type variable ‘a0’ arising from a use of ‘show’ -- > prevents the constraint ‘(Show a0)’ from being solved. -- -- Adding the type signature solves the problem. f1 :: (Show a) => a -> String f1 = \x -> show x -- | The same applies to this definition: f2 :: (Show a) => a -> String f2 = show -- | However if you define @f1@ and @f2@ in @ghci@ the will compile, but show type: -- -- > () -> String -- -- What's going on? -- * Avoiding repeated computations by means of rule 1: lenLen xs = (len, len) where len = genericLength xs -- If you ask the type of the functions above it will give: -- -- > :t lenLen -- > lenLen :: Num t => [a] -> (t, t) -- > genericLength :: Num a => [b] -> a -- However the type of @lenLen@ could have been more general: lenLen' :: (Num b, Num c) => [a] -> (b, c) lenLen' xs = (genericLength xs, genericLength xs) lenLen'' :: (Num b, Num c) => [a] -> (b, c) lenLen'' xs = (len, len) where len :: Num a => a -- This is needed to make the code compile! Otherwise the -- monomorphic restriction rules will infer @len :: b@ len = genericLength xs -- if you comment the type of @len@ in @lenLen''@ above, then it won't compile -- because the @len@ will be given type @b@. It seems that by making @len :: b@ -- we're avoiding computing len twice, since @len :: Num a => a@ means that -- @len@ is no longer a constant but a function (Num a) is the implicit -- parameter. -- * Preventing ambiguitiy by means of rule 1. myParse t = n where [(n, s)] = reads t -- * Not defautable constraints -- | @mSort@ without a default type will result in the error: -- -- > Ambiguous type variable ‘a0’ arising from a use of ‘sort’ -- prevents the constraint ‘(Ord a0)’ from being solved. mSort :: (Ord a) => [a] -> [a] mSort = sort
capitanbatata/functional-systems-in-haskell
fsh-exercises/src/DMR.hs
mit
2,492
0
8
571
362
222
140
24
1
{-# LANGUAGE FlexibleContexts, FlexibleInstances, UndecidableInstances #-} -- Pure Haskell implementation of standard BFGS algorithm with cubic -- line search, with the BFGS step exposed to provide access to -- intermediate Hessian estimates. -- Author: Ian Ross, for OpenBrain Ltd module Caret.BFGS ( BFGSOpts (..) , LineSearchOpts (..) , BFGS (..) , bfgs, bfgsWith , bfgsInit , bfgsStep, bfgsStepWith ) where import Numeric.LinearAlgebra import Numeric.LinearAlgebra.Data import Numeric.LinearAlgebra.Devel import Data.Default import Debug.Trace -- Type synonyms mostly just for documentation purposes... type Point = Vector Double type Gradient = Vector Double type Direction = Vector Double type Fn = Point -> Double type GradFn = Point -> Vector Double type Hessian = Matrix Double -------------------------------------------------------------------------------- -- -- NaN CHECKING -- class Nanable a where hasnan :: a -> Bool instance Nanable Double where hasnan = isNaN instance (Nanable a, Container c a) => Nanable (c a) where hasnan = not . null . (find hasnan) -------------------------------------------------------------------------------- -- -- BFGS -- -- Options for BFGS solver: point tolerance, gradient tolerance, -- maximum iterations. -- data BFGSOpts = BFGSOpts { ptol :: Double , gtol :: Double , maxiters :: Int } deriving Show instance Default BFGSOpts where def = BFGSOpts 1.0E-7 1.0E-7 200 epsilon :: Double epsilon = 3.0E-8 -- BFGS solver data to carry between steps: current point, function -- value at point, gradient at point, current direction, current -- Hessian estimate, maximum line search step. -- data BFGS = BFGS { p :: Point , fp :: Double , g :: Gradient , xi :: Direction , h :: Matrix Double , stpmax :: Double } deriving Show -- Main solver interface with default options. -- bfgs :: Fn -> GradFn -> Point -> Either String (Point, Hessian) bfgs f df p0 = bfgsWith def f df p0 (ident $ size p0) {- Collection solver state into an infinite list: useful as "take n $ -- bfgsCollect f df b0" -- bfgsCollect :: Fn -> GradFn -> BFGS -> [BFGS] bfgsCollect f df b0 = map snd $ iterate step (False,b0) where step (cvg,b) = if cvg then b else bfgsStep f df b -} -- Utility function to set up initial BFGS state for bfgsCollect. -- bfgsInit :: Fn -> GradFn -> Point -> Either String BFGS bfgsInit f df p0 = case (hasnan f0, hasnan g0) of (False, False) -> Right $ BFGS p0 f0 g0 (-g0) (ident n) (maxStep p0) errs -> Left $ nanMsg p0 (Just f0) (Just g0) where n = size p0 ; f0 = f p0 ; g0 = df p0 -- Main iteration routine: sets up initial BFGS state, then steps -- until converged or maximum iterations exceeded. -- bfgsWith :: BFGSOpts -> Fn -> GradFn -> Point -> Hessian -> Either String (Point, Hessian) bfgsWith opt@(BFGSOpts _ _ maxiters) f df p0 h0 = case (hasnan f0, hasnan g0) of (False, False) -> go 0 b0 errs -> Left $ nanMsg p0 (Just f0) (Just g0) where go iters b = if iters > maxiters then Left "maximum iterations exceeded in bfgs" else case bfgsStepWith opt f df b of Left err -> Left err Right (True, b') -> Right (p b', h b') Right (False, b') -> go (iters+1) b' f0 = f p0 ; g0 = df p0 b0 = BFGS p0 f0 g0 (-g0) h0 (maxStep p0) -- Do a BFGS step with default parameters. -- bfgsStep :: Fn -> GradFn -> BFGS -> Either String (Bool, BFGS) bfgsStep = bfgsStepWith def -- Main BFGS step routine. This is a more or less verbatim -- translation of the description in Section 10.7 of Numerical Recipes -- in C, 2nd ed. -- bfgsStepWith :: BFGSOpts -> Fn -> GradFn -> BFGS -> Either String (Bool, BFGS) bfgsStepWith (BFGSOpts ptol gtol _) f df (BFGS p fp g xi h stpmax) = case lineSearch f p fp g xi stpmax of Left err -> Left err Right (pn, fpn) -> if hasnan gn then Left $ nanMsg pn Nothing (Just gn) else if cvg then Right (True, BFGS pn fpn gn xi h stpmax) else Right (False, BFGS pn fpn gn xin hn stpmax) where gn = df pn ; dp = pn - p ; dg = gn - g ; hdg = h #> dg dpdg = dp `dot` dg ; dghdg = dg `dot` hdg hn = h + ((dpdg + dghdg) / dpdg^2) `scale` (dp `outer` dp) - (1/dpdg) `scale` (h <> (dg `outer` dp) + (dp `outer` dg) <> h) xin = -hn #> gn cvg = maxabsratio dp p < ptol || maxabsratio' (fpn `max` 1) gn p < gtol -- Generate error messages for NaN production in function and gradient -- calculations. -- nanMsg :: Point -> Maybe Double -> Maybe Gradient -> String nanMsg p fval grad = "NaNs produced: p = " ++ show p ++ maybe "" ((" fval = " ++) . show) fval ++ maybe "" ((" grad = " ++) . show) grad -------------------------------------------------------------------------------- -- -- LINE SEARCH -- -- Options for line search routine: point tolerance, "acceptable -- decrease" parameter. -- data LineSearchOpts = LineSearchOpts { xtol :: Double , alpha :: Double } deriving Show instance Default LineSearchOpts where def = LineSearchOpts 1.0E-7 1.0E-4 -- Maximum line search step length. -- maxStep :: Point -> Double maxStep p0 = (100 * (norm_2 p0 `max` n)) where n = fromIntegral (size p0) -- Line search with default parameters. -- lineSearch :: Fn -> Point -> Double -> Gradient -> Direction -> Double -> Either String (Point, Double) lineSearch = lineSearchWith def -- Main line search routine. This is kind of nasty to translate into -- functional form because of the switching about between the -- quadratic and cubic approximations. It works, but it could be -- prettier. -- lineSearchWith :: LineSearchOpts -> Fn -> Point -> Double -> Gradient -> Direction -> Double -> Either String (Point, Double) lineSearchWith (LineSearchOpts xtol alpha) func xold fold g pin stpmax = go 1.0 Nothing where p = if pinnorm > stpmax then (stpmax/pinnorm) `scale` pin else pin pinnorm = norm_2 pin slope = g `dot` p lammin = xtol / (maxabsratio p xold) go :: Double -> Maybe (Double,Double) -> Either String (Point,Double) go lam pass = if hasnan fnew then Left $ nanMsg xnew (Just fnew) Nothing else case check xnew fnew of Just xandf -> Right xandf Nothing -> case pass of -- First time. Nothing -> go (lambound $ quadlam fnew) $ Just (lam,fnew) -- Subsequent times. Just val2 -> case cubiclam fnew val2 of Right newlam -> go (lambound newlam) $ Just (lam,fnew) Left err -> Left err where xnew = xold + lam `scale` p fnew = func xnew -- Check for convergence or a "sufficiently large" step. check :: Vector Double -> Double -> Maybe (Vector Double,Double) check x f = if lam < lammin then Just (xold,fold) else if f <= fold + alpha * lam * slope then Just (x,f) else Nothing -- Keep step length within bounds. lambound lam' = max (0.1 * lam) (min lam' (0.5 * lam)) -- Quadratic and cubic approximations to better step -- value. quadlam fnew = -slope / (2 * (fnew - fold - slope)) cubiclam fnew (lam2,f2) = if a == 0 then Right (-slope / (2 * b)) else if disc < 0 then Left "Roundoff problem in lineSearch" else Right $ (-b + sqrt disc) / (3 * a) where rhs1 = fnew - fold - lam * slope rhs2 = f2 - fold - lam2 * slope a = (rhs1 / lam^2 - rhs2 / lam2^2) / (lam - lam2) b = (-lam2 * rhs1 / lam^2 + lam * rhs2 / lam2^2) / (lam - lam2) disc = b^2 - 3 * a * slope -- Utility functions for ratio testing. -- absratio :: Double -> Double -> Double absratio n d = abs n / (abs d `max` 1) absratio' :: Double -> Double -> Double -> Double absratio' scale n d = abs n / (abs d `max` 1) / scale maxabsratio :: Vector Double -> Vector Double -> Double maxabsratio n d = maxElement $ zipVectorWith absratio n d maxabsratio' :: Double -> Vector Double -> Vector Double -> Double maxabsratio' scale n d = maxElement $ zipVectorWith (absratio' scale) n d -------------------------------------------------------------------------------- -- -- TEST FUNCTIONS -- -- Simple test function. Minimum at (3,4): -- -- *BFGS> bfgs tstf1 tstgrad1 (fromList [-10,-10]) -- Right (fromList [3.0,4.0]) -- tstf1 :: Fn tstf1 p = (x-3)^2 + (y-4)^2 where [x,y] = toList p tstgrad1 :: GradFn tstgrad1 p = fromList [2*(x-3),2*(y-4)] where [x,y] = toList p -- Rosenbrock's function. Minimum at (1,1): -- -- *BFGS> bfgs tstf2 tstgrad2 (fromList [-10,-10]) -- Right (fromList [0.9999999992103538,0.9999999985219549]) tstf2 :: Fn tstf2 p = (1-x)^2 + 100*(y-x^2)^2 where [x,y] = toList p tstgrad2 :: GradFn tstgrad2 p = fromList [2*(x-1) - 400*x*(y-x^2), 200*(y-x^2)] where [x,y] = toList p -- Test function from Numerical Recipes. Minimum at (-2.0,0.89442719): -- -- *BFGS> bfgs tstfnr tstgradnr nrp0 -- Right (fromList [-1.9999999564447526,0.8944271925873616]) tstfnr :: Fn tstfnr p = 10*(y^2*(3-x)-x^2*(3+x))^2+(2+x)^2/(1+(2+x)^2) where [x,y] = toList p tstgradnr :: GradFn tstgradnr p = fromList [20*(y^2*x3m-x^2*x3p)*(-y^2-6*x-3*x^2)+ 2*x2p/(1+x2p^2)-2*x2p^3/(1+x2p^2)^2, 40*(y^2*x3m-x^2*x3p)*y*x3m] where [x,y] = toList p x3m = 3 - x x3p = 3 + x x2p = 2 + x nrp0 :: Point nrp0 = fromList [0.1,4.2] -- Test function to check NaN handling. -- -- *BFGS> bfgs nantstf nantstgrad (fromList [-10,-10]) -- Left "function application returned NaN" nantstf :: Fn nantstf p = log x + (x-3)^2 + (y-4)^2 where [x,y] = toList p nantstgrad :: GradFn nantstgrad p = fromList [1/x+2*(x-3),2*(y-4)] where [x,y] = toList p
BeautifulDestinations/caret
lib/Caret/BFGS.hs
mit
10,548
0
23
3,147
3,256
1,769
1,487
171
10
module Math ( fromDegrees, toDegrees ) where fromDegrees :: Double -> Double fromDegrees angle = angle * pi / 180 toDegrees :: Double -> Double toDegrees angle = angle * 180 / pi
bitc/crap-chopper-game
Math.hs
mit
182
0
6
36
62
34
28
8
1
-- Problems/Problem042Spec.hs module Problems.Problem042Spec (main, spec) where import Test.Hspec import Problems.Problem042 main :: IO() main = hspec spec spec :: Spec spec = describe "Problem 42" $ it "Should evaluate to 162" $ p42 `shouldBe` 162
Sgoettschkes/learning
haskell/ProjectEuler/tests/Problems/Problem042Spec.hs
mit
264
0
8
51
73
41
32
9
1
{-# LANGUAGE OverloadedStrings #-} {- This file is part of the Haskell package thetvdb. It is subject to the license terms in the LICENSE file found in the top-level directory of this distribution and at git://pmade.com/thetvdb/LICENSE. No part of themoviedb package, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the LICENSE file. -} module Network.API.TheTVDB.Fetch (Network.API.TheTVDB.Fetch.fetch, fetchErr) where import Data.Maybe (fromMaybe) import Network.API.TheTVDB.Types.Context (posterBaseURL) import Network.API.TheTVDB.Types.Episode (Episode(..)) import Network.API.TheTVDB.Types.Season (Season(..)) import Network.API.TheTVDB.Types.Series (Series(..)) import qualified Data.Map as M import qualified Data.Text as T import qualified Network.API.TheTVDB.Types.API as API import qualified Text.XML as X import Network.API.TheTVDB.XML (content, convert, nullWrap, maybeDate) import Control.Monad (liftM) import Network.API.TheTVDB.Types.API (API(..), Query(..), Result, UniqueID, Disposition) import Text.XML.Cursor (Cursor, fromDocument, element, parent, ($|), ($/), (&/)) newtype Fetch = Fetch {fetchSeriesID :: UniqueID} instance Query Fetch where path q key lang = T.concat ["/api/", T.pack key, "/series/", T.pack . show $ fetchSeriesID q, "/all/", lang, ".xml"] params _ = [] fetch :: (API api) => api -> UniqueID -> IO Series fetch api sid = do result <- fetchErr api sid either (fail . show) return result fetchErr :: (API api) => api -> UniqueID -> IO (Result Series) fetchErr api sid = API.fetch api (Fetch sid) parse parse :: Disposition Series parse = do doc <- X.sinkDoc X.def let cursor = fromDocument doc seriesNodes = cursor $/ element "Series" if length seriesNodes == 1 then return $ parseSeries (head seriesNodes) else error "WTF" -- FIXME: parseSeries :: Cursor -> Series parseSeries c = Series { seriesID = convert c 0 "id" , seriesIMDB = content c "IMDB_ID" , seriesName = content c "SeriesName" , seriesOverview = content c "Overview" , seriesPosterURL = liftM (T.append posterBaseURL) $ nullWrap c "poster" , seasonList = parseSeasons c } parseSeasons :: Cursor -> [Season] parseSeasons c = M.elems $ foldr collapse M.empty episodes where episodes = map parseEpisode (c $| parent &/ element "Episode") collapse (s, e) m = insert (find s m) e m find s m = fromMaybe s $ M.lookup (seasonNumber s) m insert s e = M.insert (seasonNumber s) (cons s e) cons s e = s {episodeList = e : episodeList s} parseEpisode :: Cursor -> (Season, Episode) parseEpisode c = (season, episode) where season = Season { seasonID = convert c 0 "seasonid" , seasonNumber = convert c 0 "SeasonNumber" , episodeList = [] -- filled in later } episode = Episode { episodeID = convert c 0 "id" , episodeNumber = convert c 0 "EpisodeNumber" , episodeName = content c "EpisodeName" , episodeOverview = content c "Overview" , episodeDate = maybeDate c "FirstAired" }
pjones/thetvdb
Network/API/TheTVDB/Fetch.hs
mit
3,450
0
11
951
941
531
410
61
2
module Prolog.Parser where import Prelude hiding (exp) import Text.Megaparsec import Text.Megaparsec.String import qualified Text.Megaparsec.Lexer as L type Prolog = [Clause] data Clause = Clause Pexp Pexps deriving (Show, Eq) type Pexps = [Pexp] data Pexp = PExp Exp | PIs Exp Exp | PLess Exp Exp | PGreat Exp Exp | PLessEq Exp Exp | PGreatEq Exp Exp | PEq Exp Exp | PNotEq Exp Exp deriving (Show, Eq) data Exp = ETerm Term | EPlus Exp Term | EMinus Exp Term deriving (Show, Eq) data Term = TFactor Factor | TMult Term Factor | TDiv Term Factor deriving (Show, Eq) data Factor = FVar Var | FNum Integer | FId Id | FPred Id Pexps | FNeg Factor | FGroup Exp deriving (Show, Eq) type Var = String type Id = String program :: Parser Prolog program = some clause clause :: Parser Clause clause = do p <- pexp space ps <- option [] (space >> string ":-" >> space >> pexps) space string "." space return (Clause p ps) pexps :: Parser Pexps pexps = sepBy1 pexp (space >> string "," >> space) pexp :: Parser Pexp pexp = try isPexp <|> try lessPexp <|> try greatPexp <|> try lessEqPexp <|> try greatEqPexp <|> try eqPexp <|> try notEqPexp <|> PExp <$> exp pexpOp :: String -> (Exp -> Exp -> Pexp) -> Parser Pexp pexpOp op con = do e1 <- exp space string op space e2 <- exp return (con e1 e2) isPexp = pexpOp "is" PIs lessPexp = pexpOp "<" PLess greatPexp = pexpOp ">" PGreat lessEqPexp = pexpOp "<=" PLessEq greatEqPexp = pexpOp ">=" PGreatEq eqPexp = pexpOp "=" PEq notEqPexp = pexpOp "/=" PNotEq exp :: Parser Exp exp = try plusExp <|> try minusExp <|> ETerm <$> term expOp :: String -> (Exp -> Term -> Exp) -> Parser Exp expOp op con = do e <- exp space string op space t <- term space return (con e t) plusExp = expOp "+" EPlus minusExp = expOp "-" EMinus term :: Parser Term term = try multTerm <|> try divTerm <|> TFactor <$> factor termOp :: String -> (Term -> Factor -> Term) -> Parser Term termOp op con = do t <- term space string op space f <- factor space return (con t f) multTerm = termOp "*" TMult divTerm = termOp "/" TDiv factor :: Parser Factor factor = try var <|> try num <|> try pterm <|> try pterm0 <|> try (FNeg <$> (string "-" *> space *> factor)) <|> try group parens = between (string "(") (string ")") num = FNum <$> L.integer -- vars start with an uppercase letter or an _ var = do s <- try upperChar <|> try (char '_') rest <- many alphaNumChar return $ FVar (s : rest) -- ids start with a lowercase letter ident = do s <- lowerChar rest <- many alphaNumChar return $ (s : rest) pterm = FPred <$> ident <*> parens pexps pterm0 = FId <$> ident group = FGroup <$> parens exp
sdemos/prolog
src/Prolog/Parser.hs
mit
3,049
0
13
966
1,110
556
554
122
1
module Writer (writer, toTex, toPdf) where import Paths_ppp (version) import Emb (emb) import qualified Data.ByteString.Lazy.Char8 as BS import Data.Text (Text) import Data.Version (showVersion) import Skylighting.Styles (tango) import System.Exit (exitFailure) import System.IO (stderr) import Text.Pandoc.Class (runIOorExplode) import Text.Pandoc.Definition (Pandoc(..), MetaValue(..), lookupMeta, nullMeta) import Text.Pandoc.Options (WriterOptions(..), TopLevelDivision(..), def) import Text.Pandoc.PDF (makePDF) import Text.Pandoc.Writers.LaTeX (writeLaTeX) writer :: WriterOptions writer = def { writerTemplate = fmap (BS.unpack . BS.fromStrict) . lookup "template.tex" $ emb , writerHighlightStyle = Just tango , writerVariables = [ ("ppp-version", showVersion version) ] } extendWriterOptions :: WriterOptions -> Pandoc -> WriterOptions extendWriterOptions opts (Pandoc meta _) = let dc = lookupMeta "documentclass" meta tl = if dc == (Just $ MetaString "scrartcl") then TopLevelSection else TopLevelChapter in opts { writerTopLevelDivision = tl } toTex :: Pandoc -> IO Text toTex doc = runIOorExplode $ writeLaTeX (extendWriterOptions writer doc) doc toPdf :: Text -> IO BS.ByteString toPdf tex = do let doc = Pandoc nullMeta [] pdf <- runIOorExplode $ makePDF "xelatex" [] (\_ _ -> return tex) writer doc case pdf of Left err -> do BS.hPutStrLn stderr err exitFailure Right bs -> return bs
Thhethssmuz/ppp
src/Writer.hs
mit
1,537
0
13
322
483
269
214
38
2
module CheckPalindrome where isPalindrome :: (Eq a) => [a] -> Bool isPalindrome x = x == y where y = reverse x
rasheedja/HaskellFromFirstPrinciples
Chapter4/checkPalindrome.hs
mit
112
0
7
23
47
26
21
3
1
module Parser(parseExpr, parseMain, parseSequenceNoBrackets, parseWhile, parseListGen, parseUserFunc, parseFunc) where import Text.ParserCombinators.Parsec hiding (spaces) import qualified Text.ParserCombinators.Parsec as P(spaces) import Types spaces :: Parser () spaces = skipMany1 (oneOf " ,") symbol = oneOf "!@#$%^&*-+/<>=:" uSymbols = oneOf "!@#$%^&*-+/<>" functionSeparators = skipMany1 (oneOf " ") listSeparators = skipMany1 (oneOf ",;") sequenceSeparators = skipMany1 (oneOf " \n;") --sequenceSkips = skipMany (oneOf "\n ") whiteskips = skipMany (oneOf " \n\t") -- separator = string ", " parseNumber :: Parser SExpr parseNumber = do neg <- char '-' <|> (return ' ') x <- many1 digit d <- (char '.') <|> (return ' ') y <- (many1 digit) <|> (return "") return $ Number $ read $ if d == ' ' then (neg:x) else (neg:x) ++ "." ++ y --return $ Number $ read x parseString :: Parser SExpr parseString = do char '"' x <- (many $ noneOf "\"") char '"' return $ String x parseInfixFunc :: Parser SExpr parseInfixFunc = do arg1 <- (parseFuncArgs (fail "noparams")) char ' ' <|> (return ' ') funcName <- many1 symbol char ' ' <|> (return ' ') arg2 <- (parseFuncArgs (fail "noparams")) return $ ExecFunc funcName [arg1, arg2] parseAtom :: Parser SExpr parseAtom = do funcName <- many letter case funcName of "true" -> return $ Boolean True "false" -> return $ Boolean False otherwise -> return $ if (length funcName) > 0 then Atom funcName else Error "" parseFunc :: Parser SExpr parseFunc = do funcName <- many (letter <|> symbol) char ' ' args <- sepBy1 (parseFuncArgs (fail "noparams")) functionSeparators -- Previously was sepBy let args' = removeEmpties args --error $ "poes; " ++ (show $ length args') ++ "; " -- ++ (show args) if args' == [] then fail "noargs" else return $ ExecFunc funcName args' parseAtom' :: Parser SExpr parseAtom' = do --char '`' first <- letter rest <- many (letter) let atom = first:rest case atom of "true" -> return $ Boolean True "false" -> return $ Boolean False "let" -> fail "wrong turn" otherwise -> return $ Atom atom parseList :: Parser SExpr parseList = do char '[' x <- sepBy parseExpr listSeparators char ']' return $ List x parseIf :: Parser SExpr parseIf = do string "if" cond <- parseExpr whiteskips string "then" conseq <- parseExpr whiteskips string "else" alter <- parseExpr return $ If cond conseq alter parseWhile :: Parser SExpr parseWhile = do string "while" cond <- parseExpr whiteskips string "do" body <- parseExpr whiteskips string "where" whiteskips varName <- many letter whiteskips char '=' whiteskips initial <- parseExpr return $ WhileDo varName initial cond body parseSequence :: Parser SExpr parseSequence = do char '{' whiteskips seqi <- parseSequenceNoBrackets whiteskips char '}' return seqi parseSequenceNoBrackets :: Parser SExpr -- Mainly used by runeOnce to parse a main file parseSequenceNoBrackets = do --sequenceSkips <|> (return ()) x <- endBy parseExpr sequenceSeparators --sequenceSkips <|> (return ()) return $ Sequence x parseMain :: Parser [SExpr] -- Mainly used by runeOnce to parse a main file parseMain = do whiteskips --sequenceSkips <|> (return ()) x <- (try $ endBy parseExpr sequenceSeparators) <|> (return []) y <- parseExpr <|> (return Empty) let res = x ++ (removeEmpties [y]) --sequenceSkips <|> (return ()) whiteskips return res parseComment :: Parser () parseComment = do string "--" many (noneOf "\n") return () parseNothing :: Parser SExpr parseNothing = do string "" return $ Empty parseListGen :: Parser SExpr parseListGen = do char '[' start <- parseExpr whiteskips string ".." whiteskips end <- parseExpr <|> (return Empty) whiteskips char ';' <|> (return ' ') inc <- parseExpr <|> (return $ Number 1) char ']' return $ BindListGen start end inc parseUserFunc :: Parser SExpr parseUserFunc = do string "let" char ' ' funcName <- many (letter <|> uSymbols) char ' ' args <- getArgs' string "=" char ' ' <|> (return ' ') func <- parseExpr let args' = (removeEmptyStrings args) return $ case (length args') of 0 -> BindLet funcName func otherwise -> BindFunc funcName args' func --parseLet :: Parser SExpr --parseLet = do -- string "let" -- char ' ' -- funcName <- many letter -- string " = " -- func <- parseExpr -- return $ BindLet funcName func removeEmpties = filter (\x -> x /= Empty) removeEmptyStrings :: [String] -> [String] removeEmptyStrings [] = [] removeEmptyStrings (x:xs) | (x==" ") || (x=="") = removeEmptyStrings xs | otherwise = x:(removeEmptyStrings xs) getArgs' :: Parser [String] getArgs' = do sepBy getArg (skipMany1 (oneOf " ")) getArg :: Parser [Char] getArg = do --x <- letter xs <- many (letter) --(char ' ') <|> (return ' ') return xs parseExpr = do P.spaces --skipMany (oneOf " ") a <- do ( (try parseInfixFunc) <|> parseExpr') --P.spaces return a parseExpr' = do parseFuncArgs (try parseFunc) parseFuncArgs' add = do (try parseInfixFunc) <|> parseFuncArgs (try add) parseFuncArgs add = do P.spaces (try parseString) <|> (try parseNumber) <|> (try parseListGen) <|> (try parseList) <|> (try parseSequence) <|> (try parseUserFunc) <|> (try parseIf) <|> (try parseWhile) <|> add <|> parseAtom' -- <|> (try parseNothing)
stefan-j/slang
parser.hs
gpl-2.0
5,395
51
14
1,095
1,903
915
988
188
4
module Grid where import Control.Monad -- Interval has two bounds -- data Interval a = Interval { min :: a, max :: a } type Interval a = (a, a) -- Interval for each dimension, creates a n-dimensional cube. type Domain a = [Interval a] -- A cutter cuts an interval in many type Cutter a = Interval a -> [Interval a] -- How to decide if I want to cut a domain or not type CutCondition a = Domain a -> Bool -- A grid is a number of non overlapping domains type Grid a = [Domain a] -- Creates a grid from a domain cutDomain :: Cutter a -> Domain a -> Grid a cutDomain cut dom = sequence $ map cut dom
Vetii/Komposition
Grid.hs
gpl-2.0
605
0
7
133
130
76
54
9
1
{- | Module : $Header$ Description : qualify all names in the nodes of development graphs Copyright : (c) Igor Stassiy, C.Maeder DFKI Bremen 2008 License : GPLv2 or higher, see LICENSE.txt Maintainer : [email protected] Stability : provisional Portability : non-portable(Logic) qualify and disambiguate all names in the nodes of a development graph for OMDoc output or for writing out multiple theories for Isabelle or VSE. Note however that signature will be always be complete, i.e. imported entities will be repeated. -} module Proofs.QualifyNames (qualifyLibEnv) where import Logic.Coerce import Logic.Comorphism import Logic.ExtSign import Logic.Grothendieck import Logic.Logic import Logic.Prover import Static.DevGraph import Static.DgUtils import Static.GTheory import Static.History import Static.ComputeTheory import Common.DocUtils import Common.ExtSign import Common.Id import Common.LibName import Common.Result import Data.Graph.Inductive.Graph import Data.List import Data.Maybe import qualified Data.Map as Map import qualified Data.Set as Set import Control.Monad qualifyLibEnv :: LibEnv -> Result LibEnv qualifyLibEnv libEnv = fmap fst $ foldM (\ (le, m) ln -> do dg0 <- updateRefNodes (le, m) $ lookupDGraph ln le (dg, trm) <- qualifyDGraph ln dg0 return ( Map.insert ln (computeDGraphTheories le dg) le , Map.insert ln trm m)) (libEnv, Map.empty) $ getTopsortedLibs libEnv type RenameMap = Map.Map Int (GMorphism, GMorphism) qualifyDGraph :: LibName -> DGraph -> Result (DGraph, RenameMap) qualifyDGraph ln dg = addErrorDiag "qualification failed for" (getLibId ln) $ do let es = map (\ (_, _, lb) -> dgl_id lb) $ labEdgesDG dg unless (Set.size (Set.fromList es) == length es) $ fail $ "inkonsistent graph for library " ++ showDoc ln "" (dg1, trm) <- foldM (qualifyLabNode ln) (dg, Map.empty) $ topsortedNodes dg return (groupHistory dg (DGRule "Qualified-Names") dg1, trm) {- consider that loops are part of innDG and outDG that should not be handled twice -} properEdge :: LEdge a -> Bool properEdge (x, y, _) = x /= y properInEdges :: DGraph -> Node -> [LEdge DGLinkLab] properInEdges dg n = let pes = filter properEdge $ innDG dg n (gs, rs) = partition (liftE isGlobalDef) pes in gs ++ rs constructUnion :: Logic lid sublogics basic_spec sentence symb_items symb_map_items sign morphism symbol raw_symbol proof_tree => lid -> morphism -> [morphism] -> morphism constructUnion lid hd l = case l of [] -> hd sd : tl -> case maybeResult $ morphism_union lid hd sd of Just m -> case maybeResult $ inverse m of Just _ -> constructUnion lid m tl Nothing -> constructUnion lid sd tl Nothing -> constructUnion lid sd tl updateRefNodes :: (LibEnv, Map.Map LibName RenameMap) -> DGraph -> Result DGraph updateRefNodes (le, trm) dgraph = foldM (\ dg (n, lb) -> if isDGRef lb then do let refLn = dgn_libname lb refNode = dgn_node lb gp = Map.findWithDefault (error "updateRefNodes2") refNode $ Map.findWithDefault (error "updateRefNodes1") refLn trm refGr = lookupDGraph refLn le gth = dgn_theory $ labDG refGr refNode newlb = lb { dgn_theory = createGThWith gth startSigId startThId } (ds, is) <- createChanges dg n (properInEdges dg n) gp return $ changesDGH dg $ ds ++ SetNodeLab lb (n, newlb) : is else return dg) dgraph $ labNodesDG dgraph createChanges :: DGraph -> Node -> [LEdge DGLinkLab] -> (GMorphism, GMorphism) -> Result ([DGChange], [DGChange]) createChanges dg n inss (gm1, grm) = do let allOuts = outDG dg n nAllouts <- mapM (composeWithMorphism False gm1 grm) allOuts let (nouts, nloops) = partition properEdge nAllouts nAllinss <- mapM (composeWithMorphism True gm1 grm) $ nloops ++ inss return (map DeleteEdge $ allOuts ++ inss, map InsertEdge $ nAllinss ++ nouts) qualifyLabNode :: LibName -> (DGraph, RenameMap) -> LNode DGNodeLab -> Result (DGraph, RenameMap) qualifyLabNode ln (dg, mormap) (n, lb) = if isDGRef lb then return (dg, mormap) else case dgn_theory lb of G_theory lid (ExtSign sig _) _ sens _ -> do let inss = properInEdges dg n hins <- foldM (\ l (GMorphism cid _ _ mor _) -> if isIdComorphism (Comorphism cid) && language_name lid == language_name (targetLogic cid) then do hmor <- coerceMorphism (targetLogic cid) lid "qualifyLabNode" mor return $ hmor : l else return l) [] $ map (\ (_, _, ld) -> dgl_morphism ld) inss let revHins = mapMaybe (maybeResult . inverse) hins m = case revHins of [] -> ide sig hd : tl -> constructUnion lid hd tl (m1, osens) <- qualify lid (mkSimpleId $ getDGNodeName lb) (getLibId ln) m sig rm <- inverse m1 nThSens <- mapThSensValueM (map_sen lid m1) $ joinSens sens $ toThSens osens let nlb = lb { dgn_theory = G_theory lid (makeExtSign lid (cod m1)) startSigId nThSens startThId } gp = ( gEmbed $ G_morphism lid m1 startMorId , gEmbed $ G_morphism lid rm startMorId) (ds, is) <- createChanges dg n inss gp return ( changesDGH dg $ ds ++ SetNodeLab lb (n, nlb) : is , Map.insert n gp mormap) {- consider that hiding definition links have a reverse morphism and hiding theorems are also special -} composeWithMorphism :: Bool -> GMorphism -> GMorphism -> LEdge DGLinkLab -> Result (LEdge DGLinkLab) composeWithMorphism dir mor rmor (s, t, lb) = do let lmor = dgl_morphism lb inmor = comp lmor mor outmor = comp rmor lmor nlb <- addErrorDiag ((if dir then "in" else "out") ++ "-edge " ++ show (s, t, dgl_id lb)) () $ case dgl_type lb of HidingDefLink -> do nmor <- if dir then outmor else inmor return lb { dgl_morphism = nmor } HidingFreeOrCofreeThm {} -> -- adjusting the morphisms here is more tricky and omitted for now return lb _ -> do nmor <- if dir then inmor else outmor return lb { dgl_morphism = nmor } return (s, t, nlb)
nevrenato/Hets_Fork
Proofs/QualifyNames.hs
gpl-2.0
6,396
0
22
1,698
1,997
1,016
981
130
6
{- emacs2nix - Generate Nix expressions for Emacs packages Copyright (C) 2018 Thomas Tuegel This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. -} {-# LANGUAGE OverloadedStrings #-} module Distribution.Bzr ( revision ) where import qualified Data.Char as Char import Data.Text ( Text ) import qualified Data.Text as Text import qualified System.IO.Streams as Stream import Exceptions import Process ( runInteractiveProcess ) -- | Find the latest revision in a Bazaar repository. revision :: FilePath -> IO Text revision tmp = do let args = [ "log", "-l1", tmp ] runInteractiveProcess "bzr" args Nothing Nothing $ \out -> do revs <- Stream.mapMaybe revno =<< (Stream.lines out >>= Stream.decodeUtf8) maybe (throwM NoRevision) pure =<< Stream.read revs where revno = (Text.takeWhile Char.isDigit . Text.strip <$>) . Text.stripPrefix "revno:"
ttuegel/emacs2nix
src/Distribution/Bzr.hs
gpl-3.0
1,436
0
16
255
205
113
92
16
1
module OhBool.Reducer where import OhBool.Common import OhBool.Evaluation import OhBool.Utils import qualified Data.Map as M reduce :: Expression -> Expression reduce ex = ex where table = constructTruthTable ex eval = M.toAscList $ evaluation table grayEval = grayify eval trues = filter snd grayEval
RomainGehrig/OhBool
src/OhBool/Reducer.hs
gpl-3.0
332
0
8
73
88
49
39
11
1
-- | -- Copyright: (C) 2015 Siddhanathan Shanmugam -- License: GPL (see LICENSE) -- Maintainer: [email protected] -- Portability: very -- -- Concise Brainfuck Interpreter using ListZippers -- module Main where import Control.Applicative ((<$>)) import Data.Char (ord, chr) import Data.List.Zipper (Zipper, fromList, left, right, cursor, replace, beginp, endp) import System.Environment (getArgs) data Direction = L | R deriving Eq loopTraverse :: Direction -> Zipper Char -> Zipper Char loopTraverse d z = l d z 0 where l :: Direction -> Zipper Char -> Int -> Zipper Char l d z n = case (d, cursor z) of (L, ']') -> l d (left z) (n+1) (L, '[') -> if n == 0 then z else l d (left z) (n-1) (L, _ ) -> l d (left z) n (R, '[') -> l d (right z) (n+1) (R, ']') -> if n == 0 then right z else l d (right z) (n-1) (R, _ ) -> l d (right z) n f :: Zipper Int -> Zipper Char -> IO () f zi zc | endp zc = return () | cursor zc == '>' = f (right zi) (right zc) | cursor zc == '<' = f (left zi) (right zc) | cursor zc == '+' = f (replace (cursor zi + 1) zi) (right zc) | cursor zc == '-' = f (replace (cursor zi - 1) zi) (right zc) | cursor zc == '.' = putChar (chr $ cursor zi) >> f zi (right zc) | cursor zc == ',' = getChar >>= \x -> f (replace (ord x) zi) (right zc) | cursor zc == '[' && cursor zi /= 0 = f zi (right zc) | cursor zc == '[' && cursor zi == 0 = f zi (loopTraverse R (right zc)) | cursor zc == ']' = f zi (loopTraverse L (left zc)) | otherwise = f zi (right zc) main :: IO () main = head <$> getArgs >>= readFile >>= \x -> f (fromList $ replicate 30000 0) (fromList x)
siddhanathan/brainfuck
src/Main.hs
gpl-3.0
1,859
0
13
619
875
441
434
31
8
import Parser import Stundenplan import GHC.Exts (sortWith) main = do seminar <- leseSeminar "jena/" print seminar
turion/hasched
TestParser.hs
gpl-3.0
125
0
8
26
38
19
19
6
1
{-# LANGUAGE Trustworthy #-} {-# LANGUAGE DeriveDataTypeable, BangPatterns #-} {-# OPTIONS_GHC -funbox-strict-fields #-} ----------------------------------------------------------------------------- -- | -- Module : Control.Concurrent.QSem -- Copyright : (c) The University of Glasgow 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : non-portable (concurrency) -- -- Simple quantity semaphores. -- ----------------------------------------------------------------------------- module Control.Concurrent.QSem ( -- * Simple Quantity Semaphores QSem, -- abstract newQSem, -- :: Int -> IO QSem waitQSem, -- :: QSem -> IO () signalQSem -- :: QSem -> IO () ) where import Control.Concurrent.MVar ( MVar, newEmptyMVar, takeMVar, tryTakeMVar , putMVar, newMVar, tryPutMVar) import Control.Exception import Data.Maybe -- | 'QSem' is a quantity semaphore in which the resource is aqcuired -- and released in units of one. It provides guaranteed FIFO ordering -- for satisfying blocked `waitQSem` calls. -- -- The pattern -- -- > bracket_ waitQSem signalQSem (...) -- -- is safe; it never loses a unit of the resource. -- data QSem = QSem !(MVar (Int, [MVar ()], [MVar ()])) -- The semaphore state (i, xs, ys): -- -- i is the current resource value -- -- (xs,ys) is the queue of blocked threads, where the queue is -- given by xs ++ reverse ys. We can enqueue new blocked threads -- by consing onto ys, and dequeue by removing from the head of xs. -- -- A blocked thread is represented by an empty (MVar ()). To unblock -- the thread, we put () into the MVar. -- -- A thread can dequeue itself by also putting () into the MVar, which -- it must do if it receives an exception while blocked in waitQSem. -- This means that when unblocking a thread in signalQSem we must -- first check whether the MVar is already full; the MVar lock on the -- semaphore itself resolves race conditions between signalQSem and a -- thread attempting to dequeue itself. -- |Build a new 'QSem' with a supplied initial quantity. -- The initial quantity must be at least 0. newQSem :: Int -> IO QSem newQSem initial | initial < 0 = fail "newQSem: Initial quantity must be non-negative" | otherwise = do sem <- newMVar (initial, [], []) return (QSem sem) -- |Wait for a unit to become available waitQSem :: QSem -> IO () waitQSem (QSem m) = mask_ $ do (i,b1,b2) <- takeMVar m if i == 0 then do b <- newEmptyMVar putMVar m (i, b1, b:b2) wait b else do let !z = i-1 putMVar m (z, b1, b2) return () where wait b = takeMVar b `onException` do (uninterruptibleMask_ $ do -- Note [signal uninterruptible] (i,b1,b2) <- takeMVar m r <- tryTakeMVar b r' <- if isJust r then signal (i,b1,b2) else do putMVar b (); return (i,b1,b2) putMVar m r') -- |Signal that a unit of the 'QSem' is available signalQSem :: QSem -> IO () signalQSem (QSem m) = uninterruptibleMask_ $ do -- Note [signal uninterruptible] r <- takeMVar m r' <- signal r putMVar m r' -- Note [signal uninterruptible] -- -- If we have -- -- bracket waitQSem signalQSem (...) -- -- and an exception arrives at the signalQSem, then we must not lose -- the resource. The signalQSem is masked by bracket, but taking -- the MVar might block, and so it would be interruptible. Hence we -- need an uninterruptibleMask here. -- -- This isn't ideal: during high contention, some threads won't be -- interruptible. The QSemSTM implementation has better behaviour -- here, but it performs much worse than this one in some -- benchmarks. signal :: (Int,[MVar ()],[MVar ()]) -> IO (Int,[MVar ()],[MVar ()]) signal (i,a1,a2) = if i == 0 then loop a1 a2 else let !z = i+1 in return (z, a1, a2) where loop [] [] = return (1, [], []) loop [] b2 = loop (reverse b2) [] loop (b:bs) b2 = do r <- tryPutMVar b () if r then return (0, bs, b2) else loop bs b2
jwiegley/ghc-release
libraries/base/Control/Concurrent/QSem.hs
gpl-3.0
4,346
0
19
1,156
810
448
362
60
5
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Logging.Organizations.Exclusions.Patch -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Changes one or more properties of an existing exclusion. -- -- /See:/ <https://cloud.google.com/logging/docs/ Cloud Logging API Reference> for @logging.organizations.exclusions.patch@. module Network.Google.Resource.Logging.Organizations.Exclusions.Patch ( -- * REST Resource OrganizationsExclusionsPatchResource -- * Creating a Request , organizationsExclusionsPatch , OrganizationsExclusionsPatch -- * Request Lenses , oepXgafv , oepUploadProtocol , oepUpdateMask , oepAccessToken , oepUploadType , oepPayload , oepName , oepCallback ) where import Network.Google.Logging.Types import Network.Google.Prelude -- | A resource alias for @logging.organizations.exclusions.patch@ method which the -- 'OrganizationsExclusionsPatch' request conforms to. type OrganizationsExclusionsPatchResource = "v2" :> Capture "name" Text :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "updateMask" GFieldMask :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] LogExclusion :> Patch '[JSON] LogExclusion -- | Changes one or more properties of an existing exclusion. -- -- /See:/ 'organizationsExclusionsPatch' smart constructor. data OrganizationsExclusionsPatch = OrganizationsExclusionsPatch' { _oepXgafv :: !(Maybe Xgafv) , _oepUploadProtocol :: !(Maybe Text) , _oepUpdateMask :: !(Maybe GFieldMask) , _oepAccessToken :: !(Maybe Text) , _oepUploadType :: !(Maybe Text) , _oepPayload :: !LogExclusion , _oepName :: !Text , _oepCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'OrganizationsExclusionsPatch' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'oepXgafv' -- -- * 'oepUploadProtocol' -- -- * 'oepUpdateMask' -- -- * 'oepAccessToken' -- -- * 'oepUploadType' -- -- * 'oepPayload' -- -- * 'oepName' -- -- * 'oepCallback' organizationsExclusionsPatch :: LogExclusion -- ^ 'oepPayload' -> Text -- ^ 'oepName' -> OrganizationsExclusionsPatch organizationsExclusionsPatch pOepPayload_ pOepName_ = OrganizationsExclusionsPatch' { _oepXgafv = Nothing , _oepUploadProtocol = Nothing , _oepUpdateMask = Nothing , _oepAccessToken = Nothing , _oepUploadType = Nothing , _oepPayload = pOepPayload_ , _oepName = pOepName_ , _oepCallback = Nothing } -- | V1 error format. oepXgafv :: Lens' OrganizationsExclusionsPatch (Maybe Xgafv) oepXgafv = lens _oepXgafv (\ s a -> s{_oepXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). oepUploadProtocol :: Lens' OrganizationsExclusionsPatch (Maybe Text) oepUploadProtocol = lens _oepUploadProtocol (\ s a -> s{_oepUploadProtocol = a}) -- | Required. A non-empty list of fields to change in the existing -- exclusion. New values for the fields are taken from the corresponding -- fields in the LogExclusion included in this request. Fields not -- mentioned in update_mask are not changed and are ignored in the -- request.For example, to change the filter and description of an -- exclusion, specify an update_mask of \"filter,description\". oepUpdateMask :: Lens' OrganizationsExclusionsPatch (Maybe GFieldMask) oepUpdateMask = lens _oepUpdateMask (\ s a -> s{_oepUpdateMask = a}) -- | OAuth access token. oepAccessToken :: Lens' OrganizationsExclusionsPatch (Maybe Text) oepAccessToken = lens _oepAccessToken (\ s a -> s{_oepAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). oepUploadType :: Lens' OrganizationsExclusionsPatch (Maybe Text) oepUploadType = lens _oepUploadType (\ s a -> s{_oepUploadType = a}) -- | Multipart request metadata. oepPayload :: Lens' OrganizationsExclusionsPatch LogExclusion oepPayload = lens _oepPayload (\ s a -> s{_oepPayload = a}) -- | Required. The resource name of the exclusion to update: -- \"projects\/[PROJECT_ID]\/exclusions\/[EXCLUSION_ID]\" -- \"organizations\/[ORGANIZATION_ID]\/exclusions\/[EXCLUSION_ID]\" -- \"billingAccounts\/[BILLING_ACCOUNT_ID]\/exclusions\/[EXCLUSION_ID]\" -- \"folders\/[FOLDER_ID]\/exclusions\/[EXCLUSION_ID]\" Example: -- \"projects\/my-project-id\/exclusions\/my-exclusion-id\". oepName :: Lens' OrganizationsExclusionsPatch Text oepName = lens _oepName (\ s a -> s{_oepName = a}) -- | JSONP oepCallback :: Lens' OrganizationsExclusionsPatch (Maybe Text) oepCallback = lens _oepCallback (\ s a -> s{_oepCallback = a}) instance GoogleRequest OrganizationsExclusionsPatch where type Rs OrganizationsExclusionsPatch = LogExclusion type Scopes OrganizationsExclusionsPatch = '["https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/logging.admin"] requestClient OrganizationsExclusionsPatch'{..} = go _oepName _oepXgafv _oepUploadProtocol _oepUpdateMask _oepAccessToken _oepUploadType _oepCallback (Just AltJSON) _oepPayload loggingService where go = buildClient (Proxy :: Proxy OrganizationsExclusionsPatchResource) mempty
brendanhay/gogol
gogol-logging/gen/Network/Google/Resource/Logging/Organizations/Exclusions/Patch.hs
mpl-2.0
6,362
0
17
1,355
868
509
359
124
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} -- | -- Module : Network.Google.Datastore -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Accesses the schemaless NoSQL database to provide fully managed, robust, -- scalable storage for your application. -- -- /See:/ <https://cloud.google.com/datastore/ Google Cloud Datastore API Reference> module Network.Google.Datastore ( -- * Service Configuration datastoreService -- * OAuth Scopes , cloudPlatformScope , datastoreScope -- * API Declaration , DatastoreAPI -- * Resources -- ** datastore.projects.allocateIds , module Network.Google.Resource.Datastore.Projects.AllocateIds -- ** datastore.projects.beginTransaction , module Network.Google.Resource.Datastore.Projects.BeginTransaction -- ** datastore.projects.commit , module Network.Google.Resource.Datastore.Projects.Commit -- ** datastore.projects.lookup , module Network.Google.Resource.Datastore.Projects.Lookup -- ** datastore.projects.rollback , module Network.Google.Resource.Datastore.Projects.Rollback -- ** datastore.projects.runQuery , module Network.Google.Resource.Datastore.Projects.RunQuery -- * Types -- ** LatLng , LatLng , latLng , llLatitude , llLongitude -- ** PropertyOrderDirection , PropertyOrderDirection (..) -- ** RollbackRequest , RollbackRequest , rollbackRequest , rrTransaction -- ** PartitionId , PartitionId , partitionId , piNamespaceId , piProjectId -- ** QueryResultBatch , QueryResultBatch , queryResultBatch , qrbSkippedResults , qrbSkippedCursor , qrbEntityResultType , qrbSnapshotVersion , qrbEntityResults , qrbMoreResults , qrbEndCursor -- ** CompositeFilterOp , CompositeFilterOp (..) -- ** EntityProperties , EntityProperties , entityProperties , epAddtional -- ** BeginTransactionRequest , BeginTransactionRequest , beginTransactionRequest -- ** RunQueryRequest , RunQueryRequest , runQueryRequest , rqrPartitionId , rqrGqlQuery , rqrQuery , rqrReadOptions -- ** AllocateIdsRequest , AllocateIdsRequest , allocateIdsRequest , airKeys -- ** QueryResultBatchEntityResultType , QueryResultBatchEntityResultType (..) -- ** CompositeFilter , CompositeFilter , compositeFilter , cfOp , cfFilters -- ** QueryResultBatchMoreResults , QueryResultBatchMoreResults (..) -- ** BeginTransactionResponse , BeginTransactionResponse , beginTransactionResponse , btrTransaction -- ** MutationResult , MutationResult , mutationResult , mrConflictDetected , mrKey , mrVersion -- ** AllocateIdsResponse , AllocateIdsResponse , allocateIdsResponse , aKeys -- ** GqlQuery , GqlQuery , gqlQuery , gqPositionalBindings , gqNamedBindings , gqQueryString , gqAllowLiterals -- ** RunQueryResponse , RunQueryResponse , runQueryResponse , rBatch , rQuery -- ** Value , Value , value , vKeyValue , vGeoPointValue , vIntegerValue , vTimestampValue , vEntityValue , vExcludeFromIndexes , vDoubleValue , vStringValue , vBooleanValue , vMeaning , vArrayValue , vNullValue , vBlobValue -- ** ValueNullValue , ValueNullValue (..) -- ** LookupRequest , LookupRequest , lookupRequest , lrKeys , lrReadOptions -- ** ReadOptionsReadConsistency , ReadOptionsReadConsistency (..) -- ** Mutation , Mutation , mutation , mBaseVersion , mInsert , mUpsert , mDelete , mUpdate -- ** GqlQueryNamedBindings , GqlQueryNamedBindings , gqlQueryNamedBindings , gqnbAddtional -- ** PropertyReference , PropertyReference , propertyReference , prName -- ** Key , Key , key , kPartitionId , kPath -- ** PropertyFilter , PropertyFilter , propertyFilter , pfProperty , pfOp , pfValue -- ** Query , Query , query , qStartCursor , qOffSet , qKind , qDistinctOn , qEndCursor , qLimit , qProjection , qFilter , qOrder -- ** ArrayValue , ArrayValue , arrayValue , avValues -- ** EntityResult , EntityResult , entityResult , erCursor , erVersion , erEntity -- ** Xgafv , Xgafv (..) -- ** CommitResponse , CommitResponse , commitResponse , crIndexUpdates , crMutationResults -- ** KindExpression , KindExpression , kindExpression , keName -- ** ReadOptions , ReadOptions , readOptions , roReadConsistency , roTransaction -- ** RollbackResponse , RollbackResponse , rollbackResponse -- ** Projection , Projection , projection , pProperty -- ** Filter , Filter , filter' , fCompositeFilter , fPropertyFilter -- ** PropertyFilterOp , PropertyFilterOp (..) -- ** CommitRequest , CommitRequest , commitRequest , crMutations , crMode , crTransaction -- ** CommitRequestMode , CommitRequestMode (..) -- ** PathElement , PathElement , pathElement , peKind , peName , peId -- ** Entity , Entity , entity , eKey , eProperties -- ** LookupResponse , LookupResponse , lookupResponse , lrDeferred , lrFound , lrMissing -- ** PropertyOrder , PropertyOrder , propertyOrder , poProperty , poDirection -- ** GqlQueryParameter , GqlQueryParameter , gqlQueryParameter , gqpCursor , gqpValue ) where import Network.Google.Datastore.Types import Network.Google.Prelude import Network.Google.Resource.Datastore.Projects.AllocateIds import Network.Google.Resource.Datastore.Projects.BeginTransaction import Network.Google.Resource.Datastore.Projects.Commit import Network.Google.Resource.Datastore.Projects.Lookup import Network.Google.Resource.Datastore.Projects.Rollback import Network.Google.Resource.Datastore.Projects.RunQuery {- $resources TODO -} -- | Represents the entirety of the methods and resources available for the Google Cloud Datastore API service. type DatastoreAPI = ProjectsBeginTransactionResource :<|> ProjectsAllocateIdsResource :<|> ProjectsRunQueryResource :<|> ProjectsRollbackResource :<|> ProjectsLookupResource :<|> ProjectsCommitResource
rueshyna/gogol
gogol-datastore/gen/Network/Google/Datastore.hs
mpl-2.0
7,023
0
9
1,879
810
579
231
207
0
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Partners.ClientMessages.Log -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Logs a generic message from the client, such as \`Failed to render -- component\`, \`Profile page is running slow\`, \`More than 500 users -- have accessed this result.\`, etc. -- -- /See:/ <https://developers.google.com/partners/ Google Partners API Reference> for @partners.clientMessages.log@. module Network.Google.Resource.Partners.ClientMessages.Log ( -- * REST Resource ClientMessagesLogResource -- * Creating a Request , clientMessagesLog , ClientMessagesLog -- * Request Lenses , cmlXgafv , cmlUploadProtocol , cmlPp , cmlAccessToken , cmlUploadType , cmlPayload , cmlBearerToken , cmlCallback ) where import Network.Google.Partners.Types import Network.Google.Prelude -- | A resource alias for @partners.clientMessages.log@ method which the -- 'ClientMessagesLog' request conforms to. type ClientMessagesLogResource = "v2" :> "clientMessages:log" :> QueryParam "$.xgafv" Text :> QueryParam "upload_protocol" Text :> QueryParam "pp" Bool :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "bearer_token" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] LogMessageRequest :> Post '[JSON] LogMessageResponse -- | Logs a generic message from the client, such as \`Failed to render -- component\`, \`Profile page is running slow\`, \`More than 500 users -- have accessed this result.\`, etc. -- -- /See:/ 'clientMessagesLog' smart constructor. data ClientMessagesLog = ClientMessagesLog' { _cmlXgafv :: !(Maybe Text) , _cmlUploadProtocol :: !(Maybe Text) , _cmlPp :: !Bool , _cmlAccessToken :: !(Maybe Text) , _cmlUploadType :: !(Maybe Text) , _cmlPayload :: !LogMessageRequest , _cmlBearerToken :: !(Maybe Text) , _cmlCallback :: !(Maybe Text) } deriving (Eq,Show,Data,Typeable,Generic) -- | Creates a value of 'ClientMessagesLog' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'cmlXgafv' -- -- * 'cmlUploadProtocol' -- -- * 'cmlPp' -- -- * 'cmlAccessToken' -- -- * 'cmlUploadType' -- -- * 'cmlPayload' -- -- * 'cmlBearerToken' -- -- * 'cmlCallback' clientMessagesLog :: LogMessageRequest -- ^ 'cmlPayload' -> ClientMessagesLog clientMessagesLog pCmlPayload_ = ClientMessagesLog' { _cmlXgafv = Nothing , _cmlUploadProtocol = Nothing , _cmlPp = True , _cmlAccessToken = Nothing , _cmlUploadType = Nothing , _cmlPayload = pCmlPayload_ , _cmlBearerToken = Nothing , _cmlCallback = Nothing } -- | V1 error format. cmlXgafv :: Lens' ClientMessagesLog (Maybe Text) cmlXgafv = lens _cmlXgafv (\ s a -> s{_cmlXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). cmlUploadProtocol :: Lens' ClientMessagesLog (Maybe Text) cmlUploadProtocol = lens _cmlUploadProtocol (\ s a -> s{_cmlUploadProtocol = a}) -- | Pretty-print response. cmlPp :: Lens' ClientMessagesLog Bool cmlPp = lens _cmlPp (\ s a -> s{_cmlPp = a}) -- | OAuth access token. cmlAccessToken :: Lens' ClientMessagesLog (Maybe Text) cmlAccessToken = lens _cmlAccessToken (\ s a -> s{_cmlAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). cmlUploadType :: Lens' ClientMessagesLog (Maybe Text) cmlUploadType = lens _cmlUploadType (\ s a -> s{_cmlUploadType = a}) -- | Multipart request metadata. cmlPayload :: Lens' ClientMessagesLog LogMessageRequest cmlPayload = lens _cmlPayload (\ s a -> s{_cmlPayload = a}) -- | OAuth bearer token. cmlBearerToken :: Lens' ClientMessagesLog (Maybe Text) cmlBearerToken = lens _cmlBearerToken (\ s a -> s{_cmlBearerToken = a}) -- | JSONP cmlCallback :: Lens' ClientMessagesLog (Maybe Text) cmlCallback = lens _cmlCallback (\ s a -> s{_cmlCallback = a}) instance GoogleRequest ClientMessagesLog where type Rs ClientMessagesLog = LogMessageResponse type Scopes ClientMessagesLog = '[] requestClient ClientMessagesLog'{..} = go _cmlXgafv _cmlUploadProtocol (Just _cmlPp) _cmlAccessToken _cmlUploadType _cmlBearerToken _cmlCallback (Just AltJSON) _cmlPayload partnersService where go = buildClient (Proxy :: Proxy ClientMessagesLogResource) mempty
rueshyna/gogol
gogol-partners/gen/Network/Google/Resource/Partners/ClientMessages/Log.hs
mpl-2.0
5,480
0
18
1,354
860
500
360
121
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- Module : Network.AWS.ElasticTranscoder.UpdatePipelineStatus -- Copyright : (c) 2013-2014 Brendan Hay <[email protected]> -- License : This Source Code Form is subject to the terms of -- the Mozilla Public License, v. 2.0. -- A copy of the MPL can be found in the LICENSE file or -- you can obtain it at http://mozilla.org/MPL/2.0/. -- Maintainer : Brendan Hay <[email protected]> -- Stability : experimental -- Portability : non-portable (GHC extensions) -- -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | The UpdatePipelineStatus operation pauses or reactivates a pipeline, so that -- the pipeline stops or restarts the processing of jobs. -- -- Changing the pipeline status is useful if you want to cancel one or more -- jobs. You can't cancel jobs after Elastic Transcoder has started processing -- them; if you pause the pipeline to which you submitted the jobs, you have -- more time to get the job IDs for the jobs that you want to cancel, and to -- send a 'CancelJob' request. -- -- <http://docs.aws.amazon.com/elastictranscoder/latest/developerguide/UpdatePipelineStatus.html> module Network.AWS.ElasticTranscoder.UpdatePipelineStatus ( -- * Request UpdatePipelineStatus -- ** Request constructor , updatePipelineStatus -- ** Request lenses , upsId , upsStatus -- * Response , UpdatePipelineStatusResponse -- ** Response constructor , updatePipelineStatusResponse -- ** Response lenses , upsrPipeline ) where import Network.AWS.Prelude import Network.AWS.Request.RestJSON import Network.AWS.ElasticTranscoder.Types import qualified GHC.Exts data UpdatePipelineStatus = UpdatePipelineStatus { _upsId :: Text , _upsStatus :: Text } deriving (Eq, Ord, Read, Show) -- | 'UpdatePipelineStatus' constructor. -- -- The fields accessible through corresponding lenses are: -- -- * 'upsId' @::@ 'Text' -- -- * 'upsStatus' @::@ 'Text' -- updatePipelineStatus :: Text -- ^ 'upsId' -> Text -- ^ 'upsStatus' -> UpdatePipelineStatus updatePipelineStatus p1 p2 = UpdatePipelineStatus { _upsId = p1 , _upsStatus = p2 } -- | The identifier of the pipeline to update. upsId :: Lens' UpdatePipelineStatus Text upsId = lens _upsId (\s a -> s { _upsId = a }) -- | The desired status of the pipeline: -- -- 'Active': The pipeline is processing jobs. 'Paused': The pipeline is not -- currently processing jobs. upsStatus :: Lens' UpdatePipelineStatus Text upsStatus = lens _upsStatus (\s a -> s { _upsStatus = a }) newtype UpdatePipelineStatusResponse = UpdatePipelineStatusResponse { _upsrPipeline :: Maybe Pipeline } deriving (Eq, Read, Show) -- | 'UpdatePipelineStatusResponse' constructor. -- -- The fields accessible through corresponding lenses are: -- -- * 'upsrPipeline' @::@ 'Maybe' 'Pipeline' -- updatePipelineStatusResponse :: UpdatePipelineStatusResponse updatePipelineStatusResponse = UpdatePipelineStatusResponse { _upsrPipeline = Nothing } -- | A section of the response body that provides information about the pipeline. upsrPipeline :: Lens' UpdatePipelineStatusResponse (Maybe Pipeline) upsrPipeline = lens _upsrPipeline (\s a -> s { _upsrPipeline = a }) instance ToPath UpdatePipelineStatus where toPath UpdatePipelineStatus{..} = mconcat [ "/2012-09-25/pipelines/" , toText _upsId , "/status" ] instance ToQuery UpdatePipelineStatus where toQuery = const mempty instance ToHeaders UpdatePipelineStatus instance ToJSON UpdatePipelineStatus where toJSON UpdatePipelineStatus{..} = object [ "Status" .= _upsStatus ] instance AWSRequest UpdatePipelineStatus where type Sv UpdatePipelineStatus = ElasticTranscoder type Rs UpdatePipelineStatus = UpdatePipelineStatusResponse request = post response = jsonResponse instance FromJSON UpdatePipelineStatusResponse where parseJSON = withObject "UpdatePipelineStatusResponse" $ \o -> UpdatePipelineStatusResponse <$> o .:? "Pipeline"
dysinger/amazonka
amazonka-elastictranscoder/gen/Network/AWS/ElasticTranscoder/UpdatePipelineStatus.hs
mpl-2.0
4,585
0
9
980
523
320
203
64
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Spanner.Projects.Instances.Backups.SetIAMPolicy -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Sets the access control policy on a database or backup resource. -- Replaces any existing policy. Authorization requires -- \`spanner.databases.setIamPolicy\` permission on resource. For backups, -- authorization requires \`spanner.backups.setIamPolicy\` permission on -- resource. -- -- /See:/ <https://cloud.google.com/spanner/ Cloud Spanner API Reference> for @spanner.projects.instances.backups.setIamPolicy@. module Network.Google.Resource.Spanner.Projects.Instances.Backups.SetIAMPolicy ( -- * REST Resource ProjectsInstancesBackupsSetIAMPolicyResource -- * Creating a Request , projectsInstancesBackupsSetIAMPolicy , ProjectsInstancesBackupsSetIAMPolicy -- * Request Lenses , pibsipXgafv , pibsipUploadProtocol , pibsipAccessToken , pibsipUploadType , pibsipPayload , pibsipResource , pibsipCallback ) where import Network.Google.Prelude import Network.Google.Spanner.Types -- | A resource alias for @spanner.projects.instances.backups.setIamPolicy@ method which the -- 'ProjectsInstancesBackupsSetIAMPolicy' request conforms to. type ProjectsInstancesBackupsSetIAMPolicyResource = "v1" :> CaptureMode "resource" "setIamPolicy" Text :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] SetIAMPolicyRequest :> Post '[JSON] Policy -- | Sets the access control policy on a database or backup resource. -- Replaces any existing policy. Authorization requires -- \`spanner.databases.setIamPolicy\` permission on resource. For backups, -- authorization requires \`spanner.backups.setIamPolicy\` permission on -- resource. -- -- /See:/ 'projectsInstancesBackupsSetIAMPolicy' smart constructor. data ProjectsInstancesBackupsSetIAMPolicy = ProjectsInstancesBackupsSetIAMPolicy' { _pibsipXgafv :: !(Maybe Xgafv) , _pibsipUploadProtocol :: !(Maybe Text) , _pibsipAccessToken :: !(Maybe Text) , _pibsipUploadType :: !(Maybe Text) , _pibsipPayload :: !SetIAMPolicyRequest , _pibsipResource :: !Text , _pibsipCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ProjectsInstancesBackupsSetIAMPolicy' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'pibsipXgafv' -- -- * 'pibsipUploadProtocol' -- -- * 'pibsipAccessToken' -- -- * 'pibsipUploadType' -- -- * 'pibsipPayload' -- -- * 'pibsipResource' -- -- * 'pibsipCallback' projectsInstancesBackupsSetIAMPolicy :: SetIAMPolicyRequest -- ^ 'pibsipPayload' -> Text -- ^ 'pibsipResource' -> ProjectsInstancesBackupsSetIAMPolicy projectsInstancesBackupsSetIAMPolicy pPibsipPayload_ pPibsipResource_ = ProjectsInstancesBackupsSetIAMPolicy' { _pibsipXgafv = Nothing , _pibsipUploadProtocol = Nothing , _pibsipAccessToken = Nothing , _pibsipUploadType = Nothing , _pibsipPayload = pPibsipPayload_ , _pibsipResource = pPibsipResource_ , _pibsipCallback = Nothing } -- | V1 error format. pibsipXgafv :: Lens' ProjectsInstancesBackupsSetIAMPolicy (Maybe Xgafv) pibsipXgafv = lens _pibsipXgafv (\ s a -> s{_pibsipXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). pibsipUploadProtocol :: Lens' ProjectsInstancesBackupsSetIAMPolicy (Maybe Text) pibsipUploadProtocol = lens _pibsipUploadProtocol (\ s a -> s{_pibsipUploadProtocol = a}) -- | OAuth access token. pibsipAccessToken :: Lens' ProjectsInstancesBackupsSetIAMPolicy (Maybe Text) pibsipAccessToken = lens _pibsipAccessToken (\ s a -> s{_pibsipAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). pibsipUploadType :: Lens' ProjectsInstancesBackupsSetIAMPolicy (Maybe Text) pibsipUploadType = lens _pibsipUploadType (\ s a -> s{_pibsipUploadType = a}) -- | Multipart request metadata. pibsipPayload :: Lens' ProjectsInstancesBackupsSetIAMPolicy SetIAMPolicyRequest pibsipPayload = lens _pibsipPayload (\ s a -> s{_pibsipPayload = a}) -- | REQUIRED: The Cloud Spanner resource for which the policy is being set. -- The format is \`projects\/\/instances\/\` for instance resources and -- \`projects\/\/instances\/\/databases\/\` for databases resources. pibsipResource :: Lens' ProjectsInstancesBackupsSetIAMPolicy Text pibsipResource = lens _pibsipResource (\ s a -> s{_pibsipResource = a}) -- | JSONP pibsipCallback :: Lens' ProjectsInstancesBackupsSetIAMPolicy (Maybe Text) pibsipCallback = lens _pibsipCallback (\ s a -> s{_pibsipCallback = a}) instance GoogleRequest ProjectsInstancesBackupsSetIAMPolicy where type Rs ProjectsInstancesBackupsSetIAMPolicy = Policy type Scopes ProjectsInstancesBackupsSetIAMPolicy = '["https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/spanner.admin"] requestClient ProjectsInstancesBackupsSetIAMPolicy'{..} = go _pibsipResource _pibsipXgafv _pibsipUploadProtocol _pibsipAccessToken _pibsipUploadType _pibsipCallback (Just AltJSON) _pibsipPayload spannerService where go = buildClient (Proxy :: Proxy ProjectsInstancesBackupsSetIAMPolicyResource) mempty
brendanhay/gogol
gogol-spanner/gen/Network/Google/Resource/Spanner/Projects/Instances/Backups/SetIAMPolicy.hs
mpl-2.0
6,484
0
16
1,329
791
466
325
122
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- Module : Network.AWS.Glacier.GetVaultNotifications -- Copyright : (c) 2013-2014 Brendan Hay <[email protected]> -- License : This Source Code Form is subject to the terms of -- the Mozilla Public License, v. 2.0. -- A copy of the MPL can be found in the LICENSE file or -- you can obtain it at http://mozilla.org/MPL/2.0/. -- Maintainer : Brendan Hay <[email protected]> -- Stability : experimental -- Portability : non-portable (GHC extensions) -- -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | This operation retrieves the 'notification-configuration' subresource of the -- specified vault. -- -- For information about setting a notification configuration on a vault, see 'SetVaultNotifications'. If a notification configuration for a vault is not set, the operation -- returns a '404 Not Found' error. For more information about vault -- notifications, see <http://docs.aws.amazon.com/amazonglacier/latest/dev/configuring-notifications.html Configuring Vault Notifications in Amazon Glacier>. -- -- An AWS account has full permission to perform all operations (actions). -- However, AWS Identity and Access Management (IAM) users don't have any -- permissions by default. You must grant them explicit permission to perform -- specific actions. For more information, see <http://docs.aws.amazon.com/amazonglacier/latest/dev/using-iam-with-amazon-glacier.html Access Control Using AWS Identityand Access Management (IAM)>. -- -- For conceptual information and underlying REST API, go to <http://docs.aws.amazon.com/amazonglacier/latest/dev/configuring-notifications.html Configuring VaultNotifications in Amazon Glacier> and <http://docs.aws.amazon.com/amazonglacier/latest/dev/api-vault-notifications-get.html Get Vault Notification Configuration > in -- the /Amazon Glacier Developer Guide/. -- -- <http://docs.aws.amazon.com/amazonglacier/latest/dev/api-GetVaultNotifications.html> module Network.AWS.Glacier.GetVaultNotifications ( -- * Request GetVaultNotifications -- ** Request constructor , getVaultNotifications -- ** Request lenses , gvnAccountId , gvnVaultName -- * Response , GetVaultNotificationsResponse -- ** Response constructor , getVaultNotificationsResponse -- ** Response lenses , gvnrVaultNotificationConfig ) where import Network.AWS.Prelude import Network.AWS.Request.RestJSON import Network.AWS.Glacier.Types import qualified GHC.Exts data GetVaultNotifications = GetVaultNotifications { _gvnAccountId :: Text , _gvnVaultName :: Text } deriving (Eq, Ord, Read, Show) -- | 'GetVaultNotifications' constructor. -- -- The fields accessible through corresponding lenses are: -- -- * 'gvnAccountId' @::@ 'Text' -- -- * 'gvnVaultName' @::@ 'Text' -- getVaultNotifications :: Text -- ^ 'gvnAccountId' -> Text -- ^ 'gvnVaultName' -> GetVaultNotifications getVaultNotifications p1 p2 = GetVaultNotifications { _gvnAccountId = p1 , _gvnVaultName = p2 } -- | The 'AccountId' is the AWS Account ID. You can specify either the AWS Account -- ID or optionally a '-', in which case Amazon Glacier uses the AWS Account ID -- associated with the credentials used to sign the request. If you specify your -- Account ID, do not include hyphens in it. gvnAccountId :: Lens' GetVaultNotifications Text gvnAccountId = lens _gvnAccountId (\s a -> s { _gvnAccountId = a }) -- | The name of the vault. gvnVaultName :: Lens' GetVaultNotifications Text gvnVaultName = lens _gvnVaultName (\s a -> s { _gvnVaultName = a }) newtype GetVaultNotificationsResponse = GetVaultNotificationsResponse { _gvnrVaultNotificationConfig :: Maybe VaultNotificationConfig } deriving (Eq, Read, Show) -- | 'GetVaultNotificationsResponse' constructor. -- -- The fields accessible through corresponding lenses are: -- -- * 'gvnrVaultNotificationConfig' @::@ 'Maybe' 'VaultNotificationConfig' -- getVaultNotificationsResponse :: GetVaultNotificationsResponse getVaultNotificationsResponse = GetVaultNotificationsResponse { _gvnrVaultNotificationConfig = Nothing } -- | Returns the notification configuration set on the vault. gvnrVaultNotificationConfig :: Lens' GetVaultNotificationsResponse (Maybe VaultNotificationConfig) gvnrVaultNotificationConfig = lens _gvnrVaultNotificationConfig (\s a -> s { _gvnrVaultNotificationConfig = a }) instance ToPath GetVaultNotifications where toPath GetVaultNotifications{..} = mconcat [ "/" , toText _gvnAccountId , "/vaults/" , toText _gvnVaultName , "/notification-configuration" ] instance ToQuery GetVaultNotifications where toQuery = const mempty instance ToHeaders GetVaultNotifications instance ToJSON GetVaultNotifications where toJSON = const (toJSON Empty) instance AWSRequest GetVaultNotifications where type Sv GetVaultNotifications = Glacier type Rs GetVaultNotifications = GetVaultNotificationsResponse request = get response = jsonResponse instance FromJSON GetVaultNotificationsResponse where parseJSON = withObject "GetVaultNotificationsResponse" $ \o -> GetVaultNotificationsResponse <$> o .:? "vaultNotificationConfig"
dysinger/amazonka
amazonka-glacier/gen/Network/AWS/Glacier/GetVaultNotifications.hs
mpl-2.0
5,769
0
9
1,067
528
326
202
67
1
{- Copyright (C) 2009 Andrejs Sisojevs <[email protected]> All rights reserved. For license and copyright information, see the file COPYRIGHT -} -------------------------------------------------------------------------- -------------------------------------------------------------------------- {-# OPTIONS_HADDOCK hide #-} module Text.PCLT.Template__ where import qualified Data.ByteString.Lazy.UTF8.Unified as Lazy (ByteString) import qualified Data.ByteString.Lazy.UTF8.Unified as B hiding (ByteString) import qualified Data.Map as M import Data.Map (Map, (!)) import Text.PCLT.SH__ import Text.PCLT.ShowAsPCSI__ import Text.PCLT.SDL import Text.PCLT.SDL__ import Text.PCLT.Template ------------------------------------------------------------------------------ data PCLT_ShowDetalizationLevel_PCSIWrapper = PCLT_ShowDetalizationLevel_PCSIWrapper PCLT_ShowDetalizationLevel instance ShowAsPCSI PCLT_ShowDetalizationLevel_PCSIWrapper where showAsPCSI (PCLT_ShowDetalizationLevel_PCSIWrapper psdl) = thePCSI "PCLT_PSDL" [("psdl", PCSI_PV $ showAsPCSI psdl)] instance ShowAsPCSI PCLT_ShowDetalizationLevel where showAsPCSI psdl = case psdl of PCLT_SDL sdl -> thePCSI "PCLT_PSDL_SDL" [("sdl", PCSI_PV $ showAsPCSI sdl)] PCLT_SDL_ToTemplateLink tpl_id -> thePCSI "PCLT_PSDL_CMPST" [("tpl_id", PlainText_PV tpl_id)] PCLT_SDL_ToParamCompositeLink p_name -> thePCSI "PCLT_PSDL_PARAMCMPST" [("param_name", PlainText_PV p_name)] PCLT_SDL_Errornous err -> thePCSI "PCLT_PSDL_ERR" [("err_details", PCSI_PV $ showAsPCSI err)] instance HasStaticRawPCLTs PCLT_ShowDetalizationLevel where getStaticRawPCLTs inner_cfg _ = flip (,) [] $ PCLT_RawCatalogData $ M.fromList [ ("PCLT_PSDL", (M.fromList [("rus", B.pack "##|PCLT_PSDL_PREFIX##| @@|psdl@@|"), ("eng", B.pack "##|PCLT_PSDL_PREFIX##| @@|psdl@@|")], str2PCLT_SDL Required_SDLM "##|PCLT_SDL##|" inner_cfg)) , ("PCLT_PSDL_PREFIX", (M.fromList [("rus", B.pack "Уровень детализации отображения сообщения из шаблона (определённый при шаблоне, как минимальный необходимый для отображения сообщения): "), ("eng", B.pack "Show detalization level from a message template (specified by the template, as a required minimum for message representation):")], str2PCLT_SDL Required_SDLM "##|PCLT_PSDL##|" inner_cfg)) , ("PCLT_PSDL_SDL", (M.fromList [("rus", B.pack "@@|sdl@@|"), ("eng", B.pack "@@|sdl@@|")], str2PCLT_SDL Required_SDLM "##|PCLT_PSDL##|" inner_cfg)) , ("PCLT_PSDL_CMPST", (M.fromList [("rus", B.pack "такой же, как для шаблона '@@|tpl_id@@|'"), ("eng", B.pack "the same, as specified for template '@@|tpl_id@@|'")], str2PCLT_SDL Required_SDLM "##|PCLT_PSDL##|" inner_cfg)) , ("PCLT_PSDL_PARAMCMPST", (M.fromList [("rus", B.pack "такой же, как для шаблона, который должен быть под параметром '@@|param_name@@|'"), ("eng", B.pack "the same, as specified for template, that is to be put under parameter '@@|param_name@@|'")], str2PCLT_SDL Required_SDLM "##|PCLT_PSDL##|" inner_cfg)) , ("PCLT_PSDL_ERR", (M.fromList [("rus", B.pack "ошибка определения уровня ('@@|err_details@@|')"), ("eng", B.pack "SDL specification error ('@@|err_details@@|')")], str2PCLT_SDL Required_SDLM "##|PCLT_PSDL##|" inner_cfg)) ] ------------------------------------------------------------------------------ instance ShowAsPCSI PCLT_ErrornousSDL where showAsPCSI (UnreadableSDL_ESDL sdlm raw_input) = addToPCSI [showAsPCSI sdlm] (thePCSI "E_PCLT_ESDL" [("sdle_raw_inp", PlainText_PV raw_input)]) instance HasStaticRawPCLTs PCLT_ErrornousSDL where getStaticRawPCLTs inner_cfg _ = flip (,) [] $ PCLT_RawCatalogData $ M.fromList [ ("PCLT_ESDL", (M.fromList [("rus", B.pack "##|PCLT_ESDL_PREFIX##|@@|pclt_esdl_details@@|."), ("eng", B.pack "##|PCLT_ESDL_PREFIX##|@@|pclt_esdl_details@@|.")], str2PCLT_SDL Required_SDLM "##|PCLT_PSDL##|" inner_cfg)) , ("PCLT_ESDL_PREFIX", (M.fromList [("rus", B.pack "Причина ошибочности данного уровня детализации отображения сообщения: "), ("eng", B.pack "Error in definition of level of detalization of representation:")], str2PCLT_SDL Required_SDLM "##|PCLT_ESDL##|" inner_cfg)) , ("PCLT_ESDL_UNREAD", (M.fromList [("rus", B.pack "задан нечитаемый уровень. ##|PCLT_SDLM##| Текст заданого: \"@@|sdle_raw_inp@@|\" "), ("eng", B.pack "unreadable level specified. ##|PCLT_SDLM##| Input: \"@@|sdle_raw_inp@@|\"")], str2PCLT_SDL Required_SDLM "##|PCLT_ESDL##|" inner_cfg)) ] ------------------------------------------------------------------------------ instance ShowAsPCSI PCS_SpecificMarkings where showAsPCSI pcs_sm = case pcs_sm of PlainText_LngTplM -> empPCSI "PCLT_MARKER_PLAINTXT" Parameter_LngTplM -> empPCSI "PCLT_MARKER_PARAM" Composite_LngTplM -> empPCSI "PCLT_MARKER_COMPOSITE" Unsupported_LngTplM ssm -> thePCSI "PCLT_MARKER_UNSUP" [("ssm", PCSI_PV $ showAsPCSI ssm)] data PCS_SpecificMarkings_PCSIWrapped = PCS_SpecificMarkings_PCSIWrapped PCS_SpecificMarkings instance ShowAsPCSI PCS_SpecificMarkings_PCSIWrapped where showAsPCSI (PCS_SpecificMarkings_PCSIWrapped pcs_sm) = thePCSI "PCLT_MARKER" [("pclt_marker_details", PCSI_PV $ showAsPCSI pcs_sm)] instance HasStaticRawPCLTs PCS_SpecificMarkings where getStaticRawPCLTs inner_cfg _ = flip (,) [] $ PCLT_RawCatalogData $ M.fromList [ ("PCLT_MARKER", (M.fromList [("rus", B.pack "##|PCLT_MARKER_PREFIX##|@@|pclt_marker_details@@|."), ("eng", B.pack "##|PCLT_MARKER_PREFIX##|@@|pclt_marker_details@@|.")], str2PCLT_SDL Required_SDLM "##|PARSER_SSM##|" inner_cfg)) , ("PCLT_MARKER_PREFIX", (M.fromList [("rus", B.pack "PCLT маркер:"), ("eng", B.pack "PCLT marker: ")], str2PCLT_SDL Required_SDLM "##|PCLT_MARKER##|" inner_cfg)) , ("PCLT_MARKER_PLAINTXT", (M.fromList [("rus", B.pack "обычный текст"), ("eng", B.pack "plain text")], str2PCLT_SDL Required_SDLM "##|PCLT_MARKER##|" inner_cfg)) , ("PCLT_MARKER_PARAM", (M.fromList [("rus", B.pack "параметр"), ("eng", B.pack "parameter")], str2PCLT_SDL Required_SDLM "##|PCLT_MARKER##|" inner_cfg)) , ("PCLT_MARKER_COMPOSITE", (M.fromList [("rus", B.pack "композит"), ("eng", B.pack "composite")], str2PCLT_SDL Required_SDLM "##|PCLT_MARKER##|" inner_cfg)) , ("PCLT_MARKER_UNSUP", (M.fromList [("rus", B.pack "неподдерживаемый маркер \"@@|ssm@@|\""), ("eng", B.pack "unsupported marker \"@@|ssm@@|\"")], str2PCLT_SDL Required_SDLM "##|PCLT_MARKER##|" inner_cfg)) ] ------------------------------------------------------------------------------ instance ShowAsPCSI PCLT_ParserLowLevelFailure where showAsPCSI pclt_pllf = case pclt_pllf of UnexpectedParserResult_PLLF_PCLT parse_result_str -> thePCSI "E_PCLT_PLLF_UNEXP" [("parse_result_str", PlainText_PV parse_result_str)] BadMarker_PLLF_PCLT ssm s chunk_idx -> thePCSI "E_PCLT_PLLF_BADSSM" [("ssm", PCSI_PV $ showAsPCSI ssm), ("chunk_idx", PlainText_PV $ show chunk_idx)] data PCLT_ParserLowLevelFailure_PCSIWrapped = PCLT_ParserLowLevelFailure_PCSIWrapped PCLT_ParserLowLevelFailure instance ShowAsPCSI PCLT_ParserLowLevelFailure_PCSIWrapped where showAsPCSI (PCLT_ParserLowLevelFailure_PCSIWrapped pclt_pllf) = thePCSI "E_PCLT_PLLF" [("pclt_pllf_details", PCSI_PV $ showAsPCSI pclt_pllf)] instance HasStaticRawPCLTs PCLT_ParserLowLevelFailure where getStaticRawPCLTs inner_cfg _ = flip (,) [] $ PCLT_RawCatalogData $ M.fromList [ ("E_PCLT_PLLF", (M.fromList [("rus", B.pack "##|E_PCLT_PLLF_PREFIX##|@@|pclt_pllf_details@@|."), ("eng", B.pack "##|E_PCLT_PLLF_PREFIX##|@@|pclt_pllf_details@@|.")], str2PCLT_SDL Required_SDLM "##|LLEXCPT##|" inner_cfg)) , ("E_PCLT_PLLF_PREFIX", (M.fromList [("rus", B.pack "Произошла ошибка в результате применения парсера, который разделяет обычный текст, параметры и композиты:"), ("eng", B.pack "An error occurred when applying parser, that separates plain text, parameters and composites: ")], str2PCLT_SDL Required_SDLM "##|E_PCLT_PLLF##|" inner_cfg)) , ("E_PCLT_PLLF_UNEXP", (M.fromList [("rus", B.pack "парсер вернул ненормальное состояние \"@@|parse_result_str@@|\""), ("eng", B.pack "parser returned an unexpected state \"@@|parse_result_str@@|\"")], str2PCLT_SDL Required_SDLM "##|E_PCLT_PLLF##|" inner_cfg)) , ("E_PCLT_PLLF_BADSSM", (M.fromList [("rus", B.pack "плохой маркер у куска текста (позиция: @@|chunk_idx@@|): @@|ssm@@|"), ("eng", B.pack "bad chunk marker (position: @@|chunk_idx@@|): @@|ssm@@|")], str2PCLT_SDL Required_SDLM "##|E_PCLT_PLLF##|" inner_cfg)) ] ------------------------------------------------------------------------------ data PCLTRawCatalog__Text_PCLT_Template = PCLTRawCatalog__Text_PCLT_Template instance HasStaticRawPCLTs PCLTRawCatalog__Text_PCLT_Template where widenessOfStaticRawPCLTsSet _ = Module_RPSW getStaticRawPCLTs inner_cfg _ = mergeRawCatalogDataSets2 True [ getStaticRawPCLTs inner_cfg (undefined :: PCLT_ErrornousSDL) , getStaticRawPCLTs inner_cfg (undefined :: PCS_SpecificMarkings) , getStaticRawPCLTs inner_cfg (undefined :: PCLT_ParserLowLevelFailure) , getStaticRawPCLTs inner_cfg (undefined :: PCLT_ShowDetalizationLevel) ]-- i wish i knew an easy way how to achieve a smaller code of this by using a "map (getStaticRawPCLTs inner_cfg) [...]" function
Andrey-Sisoyev/haskell-PCLT
Text/PCLT/Template__.hs
lgpl-2.1
10,969
0
15
2,278
1,922
1,057
865
92
0
-- Basic Input and Output main2 = do putStrLn "wangyixiang" place <- getLine let year = (length place) * 10 putStrLn $ "It's " ++ place ++ " distance is " ++ show year
wangyixiang/beginninghaskell
chapter9/src/Chapter9/wangyixiang.hs
unlicense
186
0
12
52
61
28
33
5
1
{-# LANGUAGE OverloadedStrings, GeneralizedNewtypeDeriving #-} module Database where import Data.Aeson ((.:), (.:?), decode, FromJSON(..), Value(..), ToJSON(..), (.=), object) import Control.Applicative import qualified Data.Map as M import qualified Data.ByteString.Lazy.Char8 as B import Types addTodoItem :: String -> (Result, WebM ()) addTodoItem json = let parsed = parseJsonToTodo json in case parsed of Left error -> (Failure error, modify id) Right t -> let newState = modify $ \st -> let nextUid = nextId st + 1 todoWithId = Todo (text t) (Just nextUid) in st { nextId = nextUid, todos = M.insert nextUid todoWithId (todos st) } in (Success, newState) parseJsonToTodo :: String -> Either String Todo parseJsonToTodo json = let parsed = decode (B.pack json) :: Maybe Todo in case parsed of Nothing -> Left "Cannot parse JSON to Todo" Just todo -> Right todo findTodoById :: Int -> WebM (Maybe Todo) findTodoById uid = gets (\st -> let todoItems = (todos st) in M.lookup uid todoItems) getTodos :: WebM [Todo] getTodos = gets (\st -> M.elems (todos st))
karun012/scotty-todo-sample
src/Database.hs
unlicense
1,436
0
22
544
425
229
196
25
2
-- Define all quicktests that are used to test the additive properties of expresssions module QuickTests.Addition (tests) where import Test.QuickCheck (quickCheck, counterexample) import CAS import QuickTests.Arbitrary (arbitrary) -- If one wants a look at the generated expressions in any quickCheck simply replace the call with 'verboseCheck'. This is a good debugging strategy. -- We use 'counterexample' to attach a label to each test which will be printed if the test fails. This will let us know at a glance what went wrong. tests = do quickCheck $ counterexample "Adding zero to an expression" prop_Add_0 quickCheck $ counterexample "Adding an expression with itself" prop_Add_equal quickCheck $ counterexample "Add a constant times an expression with another contant times its expressions" prop_Add_equal2 quickCheck $ counterexample "Subtract an expression from itself" prop_Sub_equal quickCheck $ counterexample "Additive Commutation between two expressions" prop_Add_Commute -- Any function that starts with "prop_" is considered a property by QuickCheck -- Define properties that test the additive features of expressions prop_Add_0 :: Expr Int -> Bool -- A property of expressions is that adding zero to an expression should result in the same expression prop_Add_0 e = e + 0 == e where types = e::(Expr Int) prop_Add_equal :: Expr Int -> Bool prop_Add_equal e = e + e == (2 * e) where types = e :: (Expr Int) prop_Add_equal2 :: Expr Int -> Bool prop_Add_equal2 e = (2 * e) + (5 * e) == (7 * e) where types = e :: (Expr Int) prop_Sub_equal :: Expr Int -> Bool prop_Sub_equal e = (e - e) == 0 where types = e :: (Expr Int) prop_Add_Commute :: Expr Int -> Expr Int -> Bool prop_Add_Commute e1 e2 = e1 + e2 == e2 + e1 where types = (e1 :: Expr Int, e2 :: Expr Int)
abid-mujtaba/haskell-cas
test/QuickTests/Addition.hs
apache-2.0
1,878
0
8
402
381
201
180
25
1
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="id-ID"> <title>Context Alert Filters | ZAP Extension</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
0xkasun/security-tools
src/org/zaproxy/zap/extension/alertFilters/resources/help_id_ID/helpset_id_ID.hs
apache-2.0
983
80
66
161
417
211
206
-1
-1
{- | Module : Camfort.Specification.Units.Analysis.Criticals Description : Critical-units analysis. Copyright : (c) 2017, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish License : Apache-2.0 Maintainer : [email protected] Stability : experimental This module defines an analysis for finding the 'critical' variables in a program. These critical variables form a set of variables that, when given unit annotations, can be used to infer the unit types of all other variables in the program. -} module Camfort.Specification.Units.Analysis.Criticals ( inferCriticalVariables ) where import Control.Monad.State (get) import Control.Monad.Reader (asks, lift) import Data.Generics.Uniplate.Operations import qualified Data.Map.Strict as M import Data.Maybe (fromMaybe) import Camfort.Analysis import Camfort.Analysis.Annotations import Camfort.Analysis.ModFile (withCombinedModuleMap) import Camfort.Specification.Units.InferenceBackendSBV (criticalVariables) -- Provides the types and data accessors used in this module import Camfort.Specification.Units.Analysis (UnitAnalysis, runInference) import qualified Camfort.Specification.Units.Annotation as UA import Camfort.Specification.Units.Environment import Camfort.Specification.Units.Monad import Camfort.Specification.Units.MonadTypes import qualified Language.Fortran.AST as F import qualified Language.Fortran.Analysis as FA import qualified Language.Fortran.Analysis.Renaming as FAR import Language.Fortran.Util.ModFile import qualified Language.Fortran.Util.Position as FU -- | An inference of variables that must be provided with -- unit annotations before units for all variables can be -- resolved. data Criticals = Criticals { -- | 'ProgramFile' analysis was performed upon. criticalsPf :: F.ProgramFile Annotation -- | The inferred critical variables. , criticalsVariables :: [UnitInfo] -- | Map of all declarations. , criticalsDeclarations :: M.Map F.Name (DeclContext, FU.SrcSpan) -- | Map of unique names. , criticalsUniqMap :: M.Map F.Name F.Name -- | Location of criticals. , criticalsFromWhere :: M.Map F.Name FilePath } instance Show Criticals where show crits = case vars of [] -> concat ["\n", fname, ": No additional annotations are necessary.\n"] _ -> concat ["\n", fname, ": ", show numVars , " variable declarations suggested to be given a specification:\n" , unlines [ " " ++ declReport d | d <- M.toList dmapSlice ]] where fname = F.pfGetFilename . criticalsPf $ crits dmap = criticalsDeclarations crits uniqnameMap = criticalsUniqMap crits fromWhereMap = criticalsFromWhere crits vars = criticalsVariables crits unitVarName (UnitVar (v, _)) = v unitVarName (UnitParamVarUse (_, (v, _), _)) = v unitVarName _ = "<bad>" varNames = map unitVarName vars dmapSlice = M.filterWithKey (\ k _ -> k `elem` varNames) dmap numVars = M.size dmapSlice declReport (v, (_, ss)) = vfilename ++ " (" ++ showSpanStart ss ++ ") " ++ fromMaybe v (M.lookup v uniqnameMap) where vfilename = fromMaybe fname $ M.lookup v fromWhereMap showSpanStart (FU.SrcSpan l _) = show l instance Describe Criticals -- | Return a list of critical variables as UnitInfo list (most likely -- to be of the UnitVar constructor). runCriticalVariables :: UnitSolver [UnitInfo] runCriticalVariables = do cons <- usConstraints `fmap` get return $ criticalVariables cons -- | Infer one possible set of critical variables for a program. inferCriticalVariables :: UnitAnalysis Criticals inferCriticalVariables = do pf <- asks unitProgramFile mfs <- lift analysisModFiles (eVars, _) <- runInference runCriticalVariables let -- Use the module map derived from all of the included Camfort Mod files. (pfRenamed, mmap) = withCombinedModuleMap mfs . FA.initAnalysis . fmap UA.mkUnitAnnotation $ pf -- unique name -> src name across modules -- Map of all declarations dmap = extractDeclMap pfRenamed `M.union` combinedDeclMap mfs uniqnameMap = M.fromList [ (FA.varName e, FA.srcName e) | e@(F.ExpValue _ _ F.ValVariable{}) <- universeBi pfRenamed :: [F.Expression UA] -- going to ignore intrinsics here ] `M.union` (M.unions . map (M.fromList . map (\ (a, (b, _)) -> (b, a)) . M.toList) $ M.elems mmap) fromWhereMap = genUniqNameToFilenameMap mfs pure Criticals { criticalsPf = pf , criticalsVariables = eVars , criticalsDeclarations = dmap , criticalsUniqMap = uniqnameMap , criticalsFromWhere = fromWhereMap }
dorchard/camfort
src/Camfort/Specification/Units/Analysis/Criticals.hs
apache-2.0
5,103
0
22
1,333
980
558
422
71
1
module Analysis.Types.AnnotationTests where import Analysis.Types.Annotation import qualified Analysis.Types.Sorts as S import qualified Analysis.Types.Common as C import qualified Analysis.Types.CommonTests as CT import Test.QuickCheck.Gen import Test.QuickCheck import Control.Applicative import qualified Data.Map as M import Control.Monad.State import qualified Data.Set as D -- | Type that denotes that both of its arguments should -- have the same normal form data Equiv = Equiv Annotation Annotation deriving Show -- | Given an environment of variables and a sort, produces a -- randomly generated Annotation of the input sort arbitraryWithGammaAndSort :: M.Map Int S.Sort -> S.Sort -> Gen Annotation arbitraryWithGammaAndSort gamma' sort' = evalStateT (arbitrary' (0 :: Int) gamma' sort') 0 where arbitrary' pUn gamma sort = do sz <- get put (sz + 1) if sz > CT.maxTermSize then return $ C.emptyG sort else arbitrary'' pUn gamma sort arbitrary'' pUn gamma sort = do p <- lift $ choose ((0,99) :: (Int,Int)) let varRange = lift $ elements $ [1..3] lbl = lift $ elements $ map show [1..100] var <- case filter ((== sort) . snd) $ M.toList gamma of [] -> return Nothing vs -> Just . fst <$> lift (elements vs) ann' <- case sort of S.Eff -> error "Annotations cannot have effect in the Sort" S.Ann | p `mod` 2 < 1 -> return Empty S.Ann -> Label <$> lbl S.Arr a1 a2 -> do v <- varRange ann <- arbitrary' pUn (M.insert v a1 gamma) a2 return $ Abs (S.Var v a1) ann pOver <- (lift (choose (0,99) :: Gen Int)) ann' <- case var of _ | pOver `mod` 10 < 1 -> do ann1 <- arbitrary' pUn gamma (S.Arr S.Ann sort) ann2 <- arbitrary' pUn gamma sort return $ App ann1 ann2 Just v | pOver `mod` 3 > 0 -> return $ Var v _ -> return ann' pUn' <- lift $ choose (1,pUn + 7) let mkUnion = do u1 <- arbitrary' (pUn + 1) gamma' sort u2 <- arbitrary' (pUn + 1) gamma' sort return $ Union (Union u1 u2) ann' if pUn' < 5 then mkUnion else return ann' instance Arbitrary Annotation where arbitrary = arbitraryWithGammaAndSort M.empty S.Ann shrink x = case x of Empty -> [] Union a1 a2 -> shrinkMerge Union a1 a2 App a1 a2 -> shrinkMerge App a1 a2 Abs v e -> [Empty] ++ map (Abs v) (shrink e) _ -> [Empty] where shrinkMerge c a1 a2 = [Empty,a1,a2] ++ [c a1' a2' | (a1',a2') <- shrink (a1,a2)] instance Arbitrary Equiv where arbitrary = do e <- arbitrary Equiv e <$> randomRewrite e shrink x = [] randomReplace v a = do (ann, s) <- runStateT (foldAnnM (CT.randomReplaceAlg v) a) Nothing case s of Nothing -> return Nothing Just s' -> return $ Just (ann, s') betaEq a = do mRep <- randomReplace var a case mRep of Just (a', exp) -> return $ App (Abs (S.Var var S.Ann) a') exp Nothing -> return a where var = 1 + (maximum $ 0 : (D.toList $ D.map fst $ vars a)) randomRewrite ann = evalStateT (randomRewrite' ann) 1 where randomRewrite' e'' = do p <- get put (p + 1) e <- lift $ CT.maybeRuleProb (0,p) betaEq e'' case e of e'@(Union _ _) -> do e'' <- lift $ CT.unionEq p e' case e'' of Union a b -> Union <$> randomRewrite' a <*> randomRewrite' b -- The rewrite resulted in an empty set (is possible) _ -> return e'' (App (Abs v e1) e2) -> do (\x -> App (Abs v x)) <$> randomRewrite' e1 <*> randomRewrite' e2 Abs v e1 -> Abs v <$> randomRewrite' e1 App a b -> App <$> randomRewrite' a <*> randomRewrite' b Empty -> lift $ CT.maybeRuleProb (0,p) CT.identEq Empty a -> return a -- | Property that requires that two equivalent terms -- (upto the equality rules defined in the paper) are -- equal after normalization normalizeEquivalent (Equiv a b) = normalize a == normalize b
netogallo/polyvariant
test/Analysis/Types/AnnotationTests.hs
bsd-3-clause
4,239
0
20
1,348
1,533
764
769
93
9
{-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE OverloadedStrings #-} module Text.Scalar.Types ( URI , VersionURI , mkVersionURI , unVersionURI , PathID , mkPathID , unPathID , Path , PathResourceURI , PathBodyURI , PathTargetURI , PathComponent(..) , Page(..) , ScalarM , runScalarM , Scalar(..) , ScalarError(..) , PageOrderStrategy(..) , ScalarOptions(..) ) where import Text.Pandoc.Error (PandocError(..)) import Data.RDF (ParseFailure) import Data.Default import Data.Map (Map) import Control.Monad.Except import Control.Monad.Writer.Strict import qualified Data.Text as T type URI = T.Text newtype VersionURI = VersionURI { unVersionURI :: URI } deriving (Eq, Show, Ord) mkVersionURI :: URI -> VersionURI mkVersionURI uri = VersionURI preFragment where preFragment = T.takeWhile (/= '#') uri newtype PathID = PathID { unPathID :: T.Text } deriving (Eq, Show, Ord) mkPathID :: T.Text -> PathID mkPathID = PathID type Path = [VersionURI] data PathComponent = PathComponent { pathIndex :: Int , pathVersionURI :: VersionURI } deriving (Eq, Show, Ord) type PathResourceURI = URI type PathBodyURI = URI type PathTargetURI = URI data Page = Page { pageTitle :: T.Text , pageContent :: T.Text } deriving (Eq, Show) data PageOrderStrategy = IndexPath | Path URI | None deriving (Eq, Show) type ScalarM = ExceptT ScalarError (Writer String) data ScalarOptions = ScalarOptions { orderPagesBy :: PageOrderStrategy } deriving (Eq, Show) instance Default ScalarOptions where def = ScalarOptions { orderPagesBy = IndexPath } deriving instance Eq PandocError data ScalarError = ScalarError String | RdfError ParseFailure | FromPandoc PandocError deriving (Eq, Show) data Scalar = Scalar { scalarOptions :: ScalarOptions , scalarPaths :: Map PathID Path , scalarPages :: Map VersionURI Page } deriving (Eq, Show) runScalarM :: ScalarM a -> (Either ScalarError a, String) runScalarM = runWriter . runExceptT
corajr/scalar-convert
src/Text/Scalar/Types.hs
bsd-3-clause
2,568
0
9
943
580
349
231
72
1
module Text.Highlighter.Lexers.Felix (lexer) where import Text.Regex.PCRE.Light import Text.Highlighter.Types lexer :: Lexer lexer = Lexer { lName = "Felix" , lAliases = ["felix", "flx"] , lExtensions = [".flx", ".flxh"] , lMimetypes = ["text/x-felix"] , lStart = root' , lFlags = [multiline] } comment' :: TokenMatcher comment' = [ tok "//(.*?)\\n" (Arbitrary "Comment" :. Arbitrary "Single") , tokNext "/[*]" (Arbitrary "Comment" :. Arbitrary "Multiline") (GoTo comment2') ] modulename2' :: TokenMatcher modulename2' = [ anyOf whitespace' , tokNext "([a-zA-Z_]\\w*)" (Arbitrary "Name" :. Arbitrary "Namespace") (PopNum 2) ] modulename' :: TokenMatcher modulename' = [ anyOf whitespace' , tokNext "\\[" (Arbitrary "Punctuation") (DoAll [(GoTo modulename2'), (GoTo tvarlist')]) , tokNext "" (Arbitrary "Error") (GoTo modulename2') ] if0' :: TokenMatcher if0' = [ tokNext "^\\s*#if.*?(?<!\\\\)\\n" (Arbitrary "Comment") Push , tokNext "^\\s*#endif.*?(?<!\\\\)\\n" (Arbitrary "Comment") Pop , tok ".*?\\n" (Arbitrary "Comment") ] whitespace' :: TokenMatcher whitespace' = [ tok "\\n" (Arbitrary "Text") , tok "\\s+" (Arbitrary "Text") , anyOf comment' , tokNext "#\\s*if\\s+0" (Arbitrary "Comment" :. Arbitrary "Preproc") (GoTo if0') , tokNext "#" (Arbitrary "Comment" :. Arbitrary "Preproc") (GoTo macro') ] dqs' :: TokenMatcher dqs' = [ tokNext "\"" (Arbitrary "Literal" :. Arbitrary "String") Pop , tok "\\\\\\\\|\\\\\"|\\\\\\n" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Escape") , anyOf strings' ] sqs' :: TokenMatcher sqs' = [ tokNext "'" (Arbitrary "Literal" :. Arbitrary "String") Pop , tok "\\\\\\\\|\\\\'|\\\\\\n" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Escape") , anyOf strings' ] macro' :: TokenMatcher macro' = [ anyOf comment' , tokNext "(import|include)(\\s+)(<[^>]*?>)" (ByGroups [(Arbitrary "Comment" :. Arbitrary "Preproc"), (Arbitrary "Text"), (Arbitrary "Literal" :. Arbitrary "String")]) Pop , tokNext "(import|include)(\\s+)(\"[^\"]*?\")" (ByGroups [(Arbitrary "Comment" :. Arbitrary "Preproc"), (Arbitrary "Text"), (Arbitrary "Literal" :. Arbitrary "String")]) Pop , tokNext "(import|include)(\\s+)('[^']*?')" (ByGroups [(Arbitrary "Comment" :. Arbitrary "Preproc"), (Arbitrary "Text"), (Arbitrary "Literal" :. Arbitrary "String")]) Pop , tok "[^/\\n]+" (Arbitrary "Comment" :. Arbitrary "Preproc") , tok "/" (Arbitrary "Comment" :. Arbitrary "Preproc") , tok "(?<=\\\\)\\n" (Arbitrary "Comment" :. Arbitrary "Preproc") , tokNext "\\n" (Arbitrary "Comment" :. Arbitrary "Preproc") Pop ] operators' :: TokenMatcher operators' = [ tok "(and|not|in|is|isin|or|xor)\\b" (Arbitrary "Operator" :. Arbitrary "Word") , tok "!=|==|<<|>>|\\|\\||&&|[-\126+/*%=<>&^|.$]" (Arbitrary "Operator") ] tsqs' :: TokenMatcher tsqs' = [ tokNext "'''" (Arbitrary "Literal" :. Arbitrary "String") Pop , anyOf strings' , anyOf nl' ] comment2' :: TokenMatcher comment2' = [ tok "[^\\/*]" (Arbitrary "Comment" :. Arbitrary "Multiline") , tokNext "/[*]" (Arbitrary "Comment" :. Arbitrary "Multiline") Push , tokNext "[*]/" (Arbitrary "Comment" :. Arbitrary "Multiline") Pop , tok "[\\/*]" (Arbitrary "Comment" :. Arbitrary "Multiline") ] classname' :: TokenMatcher classname' = [ anyOf whitespace' , tokNext "[a-zA-Z_]\\w*" (Arbitrary "Name" :. Arbitrary "Class") Pop , tokNext "(?=\\{)" (Arbitrary "Text") Pop ] stringescape' :: TokenMatcher stringescape' = [ tok "\\\\([\\\\abfnrtv\"\\']|\\n|N{.*?}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Escape") ] tdqs' :: TokenMatcher tdqs' = [ tokNext "\"\"\"" (Arbitrary "Literal" :. Arbitrary "String") Pop , anyOf strings' , anyOf nl' ] nl' :: TokenMatcher nl' = [ tok "\\n" (Arbitrary "Literal" :. Arbitrary "String") ] funcname' :: TokenMatcher funcname' = [ anyOf whitespace' , tokNext "[a-zA-Z_]\\w*" (Arbitrary "Name" :. Arbitrary "Function") Pop , tokNext "(?=\\()" (Arbitrary "Text") Pop ] root' :: TokenMatcher root' = [ anyOf whitespace' , tokNext "(axiom|ctor|fun|gen|proc|reduce|union)\\b" (Arbitrary "Keyword") (GoTo funcname') , tokNext "(class|cclass|cstruct|obj|struct)\\b" (Arbitrary "Keyword") (GoTo classname') , tokNext "(instance|module|typeclass)\\b" (Arbitrary "Keyword") (GoTo modulename') , tok "(_|_deref|all|as|assert|attempt|call|callback|case|caseno|cclass|code|compound|ctypes|do|done|downto|elif|else|endattempt|endcase|endif|endmatch|enum|except|exceptions|expect|finally|for|forall|forget|fork|functor|goto|ident|if|incomplete|inherit|instance|interface|jump|lambda|loop|match|module|namespace|new|noexpand|nonterm|obj|of|open|parse|raise|regexp|reglex|regmatch|rename|return|the|then|to|type|typecase|typedef|typematch|typeof|upto|when|whilst|with|yield)\\b" (Arbitrary "Keyword") , tok "(_gc_pointer|_gc_type|body|comment|const|export|header|inline|lval|macro|noinline|noreturn|package|private|pod|property|public|publish|requires|todo|virtual|use)\\b" (Arbitrary "Name" :. Arbitrary "Decorator") , tok "(def|let|ref|val|var)\\b" (Arbitrary "Keyword" :. Arbitrary "Declaration") , tok "(unit|void|any|bool|byte|offset|address|caddress|cvaddress|vaddress|tiny|short|int|long|vlong|utiny|ushort|vshort|uint|ulong|uvlong|int8|int16|int32|int64|uint8|uint16|uint32|uint64|float|double|ldouble|complex|dcomplex|lcomplex|imaginary|dimaginary|limaginary|char|wchar|uchar|charp|charcp|ucharp|ucharcp|string|wstring|ustring|cont|array|varray|list|lvalue|opt|slice)\\b" (Arbitrary "Keyword" :. Arbitrary "Type") , tok "(false|true)\\b" (Arbitrary "Keyword" :. Arbitrary "Constant") , anyOf operators' , tok "0[xX]([0-9a-fA-F_]*\\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)[pP][+\\-]?[0-9_]+[lLfFdD]?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Float") , tok "[0-9_]+(\\.[0-9_]+[eE][+\\-]?[0-9_]+|\\.[0-9_]*|[eE][+\\-]?[0-9_]+)[lLfFdD]?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Float") , tok "\\.(0|[1-9][0-9_]*)([eE][+\\-]?[0-9_]+)?[lLfFdD]?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Float") , tok "0[Bb][01_]+([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?" (Arbitrary "Literal" :. Arbitrary "Number") , tok "0[0-7_]+([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Oct") , tok "0[xX][0-9a-fA-F_]+([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Hex") , tok "(0|[1-9][0-9_]*)([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Integer") , tokNext "([rR][cC]?|[cC][rR])\"\"\"" (Arbitrary "Literal" :. Arbitrary "String") (GoTo tdqs') , tokNext "([rR][cC]?|[cC][rR])'''" (Arbitrary "Literal" :. Arbitrary "String") (GoTo tsqs') , tokNext "([rR][cC]?|[cC][rR])\"" (Arbitrary "Literal" :. Arbitrary "String") (GoTo dqs') , tokNext "([rR][cC]?|[cC][rR])'" (Arbitrary "Literal" :. Arbitrary "String") (GoTo sqs') , tokNext "[cCfFqQwWuU]?\"\"\"" (Arbitrary "Literal" :. Arbitrary "String") (Combined [stringescape', tdqs']) , tokNext "[cCfFqQwWuU]?'''" (Arbitrary "Literal" :. Arbitrary "String") (Combined [stringescape', tsqs']) , tokNext "[cCfFqQwWuU]?\"" (Arbitrary "Literal" :. Arbitrary "String") (Combined [stringescape', dqs']) , tokNext "[cCfFqQwWuU]?'" (Arbitrary "Literal" :. Arbitrary "String") (Combined [stringescape', sqs']) , tok "[\\[\\]{}:(),;?]" (Arbitrary "Punctuation") , tok "[a-zA-Z_]\\w*:>" (Arbitrary "Name" :. Arbitrary "Label") , tok "(_svc|while)\\b" (Arbitrary "Name" :. Arbitrary "Builtin") , tok "(root|self|this)\\b" (Arbitrary "Name" :. Arbitrary "Builtin" :. Arbitrary "Pseudo") , tok "[a-zA-Z_]\\w*" (Arbitrary "Name") ] strings' :: TokenMatcher strings' = [ tok "%(\\([a-zA-Z0-9]+\\))?[-#0 +]*([0-9]+|[*])?(\\.([0-9]+|[*]))?[hlL]?[diouxXeEfFgGcrs%]" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Interpol") , tok "[^\\\\\\'\"%\\n]+" (Arbitrary "Literal" :. Arbitrary "String") , tok "[\\'\"\\\\]" (Arbitrary "Literal" :. Arbitrary "String") , tok "%" (Arbitrary "Literal" :. Arbitrary "String") ] tvarlist' :: TokenMatcher tvarlist' = [ anyOf whitespace' , anyOf operators' , tokNext "\\[" (Arbitrary "Punctuation") Push , tokNext "\\]" (Arbitrary "Punctuation") Pop , tok "," (Arbitrary "Punctuation") , tok "(with|where)\\b" (Arbitrary "Keyword") , tok "[a-zA-Z_]\\w*" (Arbitrary "Name") ]
chemist/highlighter
src/Text/Highlighter/Lexers/Felix.hs
bsd-3-clause
8,770
0
12
1,335
2,178
1,099
1,079
139
1
module Aws.S3.Commands.Multipart where import Aws.Aws import Aws.Core import Aws.S3.Core import Control.Applicative import Control.Arrow (second) import Control.Monad.Trans.Resource import Crypto.Hash import qualified Crypto.Hash.MD5 as MD5 import Data.ByteString.Char8 ({- IsString -}) import Data.Conduit import qualified Data.Conduit.Binary as CB import qualified Data.Conduit.List as CL import Data.Maybe import Text.XML.Cursor (($/)) import qualified Data.ByteString.Char8 as B8 import qualified Data.ByteString.Lazy as BL import qualified Data.CaseInsensitive as CI import qualified Data.Map as M import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Network.HTTP.Conduit as HTTP import qualified Network.HTTP.Types as HTTP import Text.Printf(printf) import qualified Text.XML as XML {- Aws supports following 6 api for Multipart-Upload. Currently this code does not support number 3 and 6. 1. Initiate Multipart Upload 2. Upload Part 3. Upload Part - Copy 4. Complete Multipart Upload 5. Abort Multipart Upload 6. List Parts -} data InitiateMultipartUpload = InitiateMultipartUpload { imuBucket :: Bucket , imuObjectName :: Object , imuCacheControl :: Maybe T.Text , imuContentDisposition :: Maybe T.Text , imuContentEncoding :: Maybe T.Text , imuContentType :: Maybe T.Text , imuExpires :: Maybe Int , imuMetadata :: [(T.Text,T.Text)] , imuStorageClass :: Maybe StorageClass , imuWebsiteRedirectLocation :: Maybe T.Text , imuAcl :: Maybe CannedAcl , imuServerSideEncryption :: Maybe ServerSideEncryption , imuAutoMakeBucket :: Bool -- ^ Internet Archive S3 nonstandard extension } deriving (Show) postInitiateMultipartUpload :: Bucket -> T.Text -> InitiateMultipartUpload postInitiateMultipartUpload b o = InitiateMultipartUpload b o Nothing Nothing Nothing Nothing Nothing [] Nothing Nothing Nothing Nothing False data InitiateMultipartUploadResponse = InitiateMultipartUploadResponse { imurBucket :: Bucket , imurKey :: T.Text , imurUploadId :: T.Text } -- | ServiceConfiguration: 'S3Configuration' instance SignQuery InitiateMultipartUpload where type ServiceConfiguration InitiateMultipartUpload = S3Configuration signQuery InitiateMultipartUpload {..} = s3SignQuery S3Query { s3QMethod = Post , s3QBucket = Just $ T.encodeUtf8 imuBucket , s3QObject = Just $ T.encodeUtf8 $ imuObjectName , s3QSubresources = HTTP.toQuery[ ("uploads" :: B8.ByteString , Nothing :: Maybe B8.ByteString)] , s3QQuery = [] , s3QContentType = T.encodeUtf8 <$> imuContentType , s3QContentMd5 = Nothing , s3QAmzHeaders = map (second T.encodeUtf8) $ catMaybes [ ("x-amz-acl",) <$> writeCannedAcl <$> imuAcl , ("x-amz-storage-class",) <$> writeStorageClass <$> imuStorageClass , ("x-amz-website-redirect-location",) <$> imuWebsiteRedirectLocation , ("x-amz-server-side-encryption",) <$> writeServerSideEncryption <$> imuServerSideEncryption , if imuAutoMakeBucket then Just ("x-amz-auto-make-bucket", "1") else Nothing ] ++ map( \x -> (CI.mk . T.encodeUtf8 $ T.concat ["x-amz-meta-", fst x], snd x)) imuMetadata , s3QOtherHeaders = map (second T.encodeUtf8) $ catMaybes [ ("Expires",) . T.pack . show <$> imuExpires , ("Cache-Control",) <$> imuCacheControl , ("Content-Disposition",) <$> imuContentDisposition , ("Content-Encoding",) <$> imuContentEncoding ] , s3QRequestBody = Nothing } instance ResponseConsumer r InitiateMultipartUploadResponse where type ResponseMetadata InitiateMultipartUploadResponse = S3Metadata responseConsumer _ = s3XmlResponseConsumer parse where parse cursor = do bucket <- force "Missing Bucket Name" $ cursor $/ elContent "Bucket" key <- force "Missing Key" $ cursor $/ elContent "Key" uploadId <- force "Missing UploadID" $ cursor $/ elContent "UploadId" return InitiateMultipartUploadResponse{ imurBucket = bucket , imurKey = key , imurUploadId = uploadId } instance Transaction InitiateMultipartUpload InitiateMultipartUploadResponse instance AsMemoryResponse InitiateMultipartUploadResponse where type MemoryResponse InitiateMultipartUploadResponse = InitiateMultipartUploadResponse loadToMemory = return ---------------------------------- data UploadPart = UploadPart { upObjectName :: T.Text , upBucket :: Bucket , upPartNumber :: Integer , upUploadId :: T.Text , upContentType :: Maybe B8.ByteString , upContentMD5 :: Maybe (Digest MD5) , upServerSideEncryption :: Maybe ServerSideEncryption , upRequestBody :: HTTP.RequestBody } uploadPart :: Bucket -> T.Text -> Integer -> T.Text -> HTTP.RequestBody -> UploadPart uploadPart bucket obj p i body = UploadPart obj bucket p i Nothing Nothing Nothing body data UploadPartResponse = UploadPartResponse { uprVersionId :: Maybe T.Text } deriving (Show) -- | ServiceConfiguration: 'S3Configuration' instance SignQuery UploadPart where type ServiceConfiguration UploadPart = S3Configuration signQuery UploadPart {..} = s3SignQuery S3Query { s3QMethod = Put , s3QBucket = Just $ T.encodeUtf8 upBucket , s3QObject = Just $ T.encodeUtf8 upObjectName , s3QSubresources = HTTP.toQuery[ ("partNumber" :: B8.ByteString , Just (T.pack (show upPartNumber)) :: Maybe T.Text) , ("uploadId" :: B8.ByteString, Just upUploadId :: Maybe T.Text) ] , s3QQuery = [] , s3QContentType = upContentType , s3QContentMd5 = upContentMD5 , s3QAmzHeaders = map (second T.encodeUtf8) $ catMaybes [ ("x-amz-server-side-encryption",) <$> writeServerSideEncryption <$> upServerSideEncryption ] , s3QOtherHeaders = [] , s3QRequestBody = Just upRequestBody } instance ResponseConsumer UploadPart UploadPartResponse where type ResponseMetadata UploadPartResponse = S3Metadata responseConsumer _ = s3ResponseConsumer $ \resp -> do let vid = T.decodeUtf8 `fmap` lookup "x-amz-version-id" (HTTP.responseHeaders resp) return $ UploadPartResponse vid instance Transaction UploadPart UploadPartResponse instance AsMemoryResponse UploadPartResponse where type MemoryResponse UploadPartResponse = UploadPartResponse loadToMemory = return ---------------------------- data CompleteMultipartUpload = CompleteMultipartUpload { cmuBucket :: Bucket , cmuObjectName :: Object , cmuUploadId :: T.Text , cmuPartNumberAndEtags :: [(Integer,T.Text)] , cmuExpiration :: Maybe T.Text , cmuServerSideEncryption :: Maybe T.Text , cmuServerSideEncryptionCustomerAlgorithm :: Maybe T.Text , cmuVersionId :: Maybe T.Text } deriving (Show) postCompleteMultipartUpload :: Bucket -> T.Text -> T.Text -> [(Integer,T.Text)]-> CompleteMultipartUpload postCompleteMultipartUpload b o i p = CompleteMultipartUpload b o i p Nothing Nothing Nothing Nothing data CompleteMultipartUploadResponse = CompleteMultipartUploadResponse { cmurLocation :: T.Text , cmurBucket :: Bucket , cmurKey :: T.Text , cmurETag :: T.Text } -- | ServiceConfiguration: 'S3Configuration' instance SignQuery CompleteMultipartUpload where type ServiceConfiguration CompleteMultipartUpload = S3Configuration signQuery CompleteMultipartUpload {..} = s3SignQuery S3Query { s3QMethod = Post , s3QBucket = Just $ T.encodeUtf8 cmuBucket , s3QObject = Just $ T.encodeUtf8 cmuObjectName , s3QSubresources = HTTP.toQuery[ ("uploadId" :: B8.ByteString, Just cmuUploadId :: Maybe T.Text) ] , s3QQuery = [] , s3QContentType = Nothing , s3QContentMd5 = Nothing , s3QAmzHeaders = catMaybes [ ("x-amz-expiration",) <$> (T.encodeUtf8 <$> cmuExpiration) , ("x-amz-server-side-encryption",) <$> (T.encodeUtf8 <$> cmuServerSideEncryption) , ("x-amz-server-side-encryption-customer-algorithm",) <$> (T.encodeUtf8 <$> cmuServerSideEncryptionCustomerAlgorithm) , ("x-amz-version-id",) <$> (T.encodeUtf8 <$> cmuVersionId) ] , s3QOtherHeaders = [] , s3QRequestBody = Just $ HTTP.RequestBodyLBS reqBody } where reqBody = XML.renderLBS XML.def XML.Document { XML.documentPrologue = XML.Prologue [] Nothing [] , XML.documentRoot = root , XML.documentEpilogue = [] } root = XML.Element { XML.elementName = "CompleteMultipartUpload" , XML.elementAttributes = M.empty , XML.elementNodes = (partNode <$> cmuPartNumberAndEtags) } partNode (partNumber, etag) = XML.NodeElement XML.Element { XML.elementName = "Part" , XML.elementAttributes = M.empty , XML.elementNodes = [keyNode (T.pack (show partNumber)),etagNode etag] } etagNode = toNode "ETag" keyNode = toNode "PartNumber" toNode name content = XML.NodeElement XML.Element { XML.elementName = name , XML.elementAttributes = M.empty , XML.elementNodes = [XML.NodeContent content] } instance ResponseConsumer r CompleteMultipartUploadResponse where type ResponseMetadata CompleteMultipartUploadResponse = S3Metadata responseConsumer _ = s3XmlResponseConsumer parse where parse cursor = do location <- force "Missing Location" $ cursor $/ elContent "Location" bucket <- force "Missing Bucket Name" $ cursor $/ elContent "Bucket" key <- force "Missing Key" $ cursor $/ elContent "Key" etag <- force "Missing ETag" $ cursor $/ elContent "ETag" return CompleteMultipartUploadResponse{ cmurLocation = location , cmurBucket = bucket , cmurKey = key , cmurETag = etag } instance Transaction CompleteMultipartUpload CompleteMultipartUploadResponse instance AsMemoryResponse CompleteMultipartUploadResponse where type MemoryResponse CompleteMultipartUploadResponse = CompleteMultipartUploadResponse loadToMemory = return ---------------------------- data AbortMultipartUpload = AbortMultipartUpload { amuBucket :: Bucket , amuObjectName :: Object , amuUploadId :: T.Text } deriving (Show) postAbortMultipartUpload :: Bucket -> T.Text -> T.Text -> AbortMultipartUpload postAbortMultipartUpload b o i = AbortMultipartUpload b o i data AbortMultipartUploadResponse = AbortMultipartUploadResponse { } -- | ServiceConfiguration: 'S3Configuration' instance SignQuery AbortMultipartUpload where type ServiceConfiguration AbortMultipartUpload = S3Configuration signQuery AbortMultipartUpload {..} = s3SignQuery S3Query { s3QMethod = Delete , s3QBucket = Just $ T.encodeUtf8 amuBucket , s3QObject = Just $ T.encodeUtf8 amuObjectName , s3QSubresources = HTTP.toQuery[ ("uploadId" :: B8.ByteString, Just amuUploadId :: Maybe T.Text) ] , s3QQuery = [] , s3QContentType = Nothing , s3QContentMd5 = Nothing , s3QAmzHeaders = [] , s3QOtherHeaders = [] , s3QRequestBody = Nothing } instance ResponseConsumer r AbortMultipartUploadResponse where type ResponseMetadata AbortMultipartUploadResponse = S3Metadata responseConsumer _ = s3XmlResponseConsumer parse where parse _cursor = return AbortMultipartUploadResponse {} instance Transaction AbortMultipartUpload AbortMultipartUploadResponse instance AsMemoryResponse AbortMultipartUploadResponse where type MemoryResponse AbortMultipartUploadResponse = AbortMultipartUploadResponse loadToMemory = return ---------------------------- getUploadId :: Configuration -> S3Configuration NormalQuery -> HTTP.Manager -> T.Text -> T.Text -> ResourceT IO T.Text getUploadId cfg s3cfg mgr bucket object = do InitiateMultipartUploadResponse { imurBucket = _bucket , imurKey = _object' , imurUploadId = uploadId } <- pureAws cfg s3cfg mgr $ postInitiateMultipartUpload bucket object return uploadId sendEtag :: Configuration -> S3Configuration NormalQuery -> HTTP.Manager -> T.Text -> T.Text -> T.Text -> [String] -> ResourceT IO () sendEtag cfg s3cfg mgr bucket object uploadId etags = do _ <- pureAws cfg s3cfg mgr $ postCompleteMultipartUpload bucket object uploadId (zip [1..] (map T.pack etags)) return () bstr2str :: B8.ByteString -> String bstr2str bstr = foldr1 (++) $ map toHex $ B8.unpack bstr where toHex :: Char -> String toHex chr = printf "%02x" chr putConduit :: MonadResource m => Configuration -> S3Configuration NormalQuery -> HTTP.Manager -> T.Text -> T.Text -> T.Text -> Conduit B8.ByteString m String putConduit cfg s3cfg mgr bucket object uploadId = loop 1 where loop n = do v' <- await case v' of Just v -> do let str= (BL.fromStrict v) _ <- liftResourceT $ pureAws cfg s3cfg mgr $ uploadPart bucket object n uploadId (HTTP.requestBodySource (BL.length str) (CB.sourceLbs str) ) let etag= bstr2str $ MD5.hash v yield etag loop (n+1) Nothing -> return () chunkedConduit :: (MonadResource m) => Integer -> Conduit B8.ByteString m B8.ByteString chunkedConduit size = do loop 0 "" where loop cnt str = do line' <- await case line' of Nothing -> do yield str return () Just line -> do let len = (B8.length line)+cnt let newStr = B8.concat [str, line] if len >= (fromIntegral size) then do yield newStr loop 0 "" else loop len newStr multipartUpload :: Configuration -> S3Configuration NormalQuery -> HTTP.Manager -> T.Text -> T.Text -> Conduit () (ResourceT IO) B8.ByteString -> Integer -> ResourceT IO () multipartUpload cfg s3cfg mgr bucket object src chunkSize = do uploadId <- getUploadId cfg s3cfg mgr bucket object etags <- src $= chunkedConduit chunkSize $= putConduit cfg s3cfg mgr bucket object uploadId $$ CL.consume sendEtag cfg s3cfg mgr bucket object uploadId etags
fpco/aws
Aws/S3/Commands/Multipart.hs
bsd-3-clause
15,895
1
21
4,753
3,541
1,911
1,630
-1
-1
module Hrpg.Framework.Chance ( PercentChance (..) , roll ) where import Control.Monad.Random newtype PercentChance = PercentChance Int deriving Show instance Num PercentChance where PercentChance x + PercentChance y = PercentChance (x + y) PercentChance x - PercentChance y = PercentChance (x - y) PercentChance x * PercentChance y = PercentChance (x * y) abs (PercentChance p) = PercentChance (abs p) signum (PercentChance p) = PercentChance (signum p) fromInteger i = PercentChance (fromInteger i) roll :: MonadRandom m => PercentChance -> m Bool roll (PercentChance chance) = do v <- getRandomR (0, 100) return (v <= chance)
cwmunn/hrpg
src/Hrpg/Framework/Chance.hs
bsd-3-clause
709
0
9
176
250
124
126
16
1
{-# LANGUAGE EmptyDataDecls, TypeSynonymInstances #-} {-# OPTIONS_GHC -fcontext-stack41 #-} module Games.Chaos2010.Database.Creating_new_game_table where import Games.Chaos2010.Database.Fields import Database.HaskellDB.DBLayout type Creating_new_game_table = Record (HCons (LVPair Creating_new_game (Expr Bool)) HNil) creating_new_game_table :: Table Creating_new_game_table creating_new_game_table = baseTable "creating_new_game_table"
JakeWheat/Chaos-2010
Games/Chaos2010/Database/Creating_new_game_table.hs
bsd-3-clause
446
0
11
43
70
41
29
9
1
{-# LANGUAGE TupleSections #-} import CoreSyn import CoreUtils import Id import Type import MkCore import CallArity (callArityRHS) import MkId import SysTools import DynFlags import ErrUtils import Outputable import TysWiredIn import Literal import GHC import Control.Monad import Control.Monad.IO.Class import System.Environment( getArgs ) import VarSet import PprCore import Unique import UniqSet import CoreLint import FastString -- Build IDs. use mkTemplateLocal, more predictable than proper uniques go, go2, x, d, n, y, z, scrutf, scruta :: Id [go, go2, x,d, n, y, z, scrutf, scruta, f] = mkTestIds (words "go go2 x d n y z scrutf scruta f") [ mkVisFunTys [intTy, intTy] intTy , mkVisFunTys [intTy, intTy] intTy , intTy , mkVisFunTys [intTy] intTy , mkVisFunTys [intTy] intTy , intTy , intTy , mkVisFunTys [boolTy] boolTy , boolTy , mkVisFunTys [intTy, intTy] intTy -- protoypical external function ] exprs :: [(String, CoreExpr)] exprs = [ ("go2",) $ mkRFun go [x] (mkLetNonRec d (mkACase (Var go `mkVarApps` [x]) (mkLams [y] $ Var y) ) $ mkLams [z] $ Var d `mkVarApps` [x]) $ go `mkLApps` [0, 0] , ("nested_go2",) $ mkRFun go [x] (mkLetNonRec n (mkACase (Var go `mkVarApps` [x]) (mkLams [y] $ Var y)) $ mkACase (Var n) $ mkFun go2 [y] (mkLetNonRec d (mkACase (Var go `mkVarApps` [x]) (mkLams [y] $ Var y) ) $ mkLams [z] $ Var d `mkVarApps` [x] )$ Var go2 `mkApps` [mkLit 1] ) $ go `mkLApps` [0, 0] , ("d0 (go 2 would be bad)",) $ mkRFun go [x] (mkLetNonRec d (mkACase (Var go `mkVarApps` [x]) (mkLams [y] $ Var y) ) $ mkLams [z] $ Var f `mkApps` [ Var d `mkVarApps` [x], Var d `mkVarApps` [x] ]) $ go `mkLApps` [0, 0] , ("go2 (in case crut)",) $ mkRFun go [x] (mkLetNonRec d (mkACase (Var go `mkVarApps` [x]) (mkLams [y] $ Var y) ) $ mkLams [z] $ Var d `mkVarApps` [x]) $ Case (go `mkLApps` [0, 0]) z intTy [(DEFAULT, [], Var f `mkVarApps` [z,z])] , ("go2 (in function call)",) $ mkRFun go [x] (mkLetNonRec d (mkACase (Var go `mkVarApps` [x]) (mkLams [y] $ Var y) ) $ mkLams [z] $ Var d `mkVarApps` [x]) $ f `mkLApps` [0] `mkApps` [go `mkLApps` [0, 0]] , ("go2 (using surrounding interesting let)",) $ mkLetNonRec n (f `mkLApps` [0]) $ mkRFun go [x] (mkLetNonRec d (mkACase (Var go `mkVarApps` [x]) (mkLams [y] $ Var y) ) $ mkLams [z] $ Var d `mkVarApps` [x]) $ Var f `mkApps` [n `mkLApps` [0], go `mkLApps` [0, 0]] , ("go2 (using surrounding boring let)",) $ mkLetNonRec z (mkLit 0) $ mkRFun go [x] (mkLetNonRec d (mkACase (Var go `mkVarApps` [x]) (mkLams [y] $ Var y) ) $ mkLams [z] $ Var d `mkVarApps` [x]) $ Var f `mkApps` [Var z, go `mkLApps` [0, 0]] , ("two calls, one from let and from body (d 1 would be bad)",) $ mkLetNonRec d (mkACase (mkLams [y] $ mkLit 0) (mkLams [y] $ mkLit 0)) $ mkFun go [x,y] (mkVarApps (Var d) [x]) $ mkApps (Var d) [mkLApps go [1,2]] , ("a thunk in a recursion (d 1 would be bad)",) $ mkRLet n (mkACase (mkLams [y] $ mkLit 0) (Var n)) $ mkRLet d (mkACase (mkLams [y] $ mkLit 0) (Var d)) $ Var n `mkApps` [d `mkLApps` [0]] , ("two thunks, one called multiple times (both arity 1 would be bad!)",) $ mkLetNonRec n (mkACase (mkLams [y] $ mkLit 0) (f `mkLApps` [0])) $ mkLetNonRec d (mkACase (mkLams [y] $ mkLit 0) (f `mkLApps` [0])) $ Var n `mkApps` [Var d `mkApps` [Var d `mkApps` [mkLit 0]]] , ("two functions, not thunks",) $ mkLetNonRec go (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var f `mkVarApps` [x]))) $ mkLetNonRec go2 (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var f `mkVarApps` [x]))) $ Var go `mkApps` [go2 `mkLApps` [0,1], mkLit 0] , ("a thunk, called multiple times via a forking recursion (d 1 would be bad!)",) $ mkLetNonRec d (mkACase (mkLams [y] $ mkLit 0) (f `mkLApps` [0])) $ mkRLet go2 (mkLams [x] (mkACase (Var go2 `mkApps` [Var go2 `mkApps` [mkLit 0, mkLit 0]]) (Var d))) $ go2 `mkLApps` [0,1] , ("a function, one called multiple times via a forking recursion",) $ mkLetNonRec go (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var f `mkVarApps` [x]))) $ mkRLet go2 (mkLams [x] (mkACase (Var go2 `mkApps` [Var go2 `mkApps` [mkLit 0, mkLit 0]]) (go `mkLApps` [0]))) $ go2 `mkLApps` [0,1] , ("two functions (recursive)",) $ mkRLet go (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go `mkVarApps` [x]))) $ mkRLet go2 (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go2 `mkVarApps` [x]))) $ Var go `mkApps` [go2 `mkLApps` [0,1], mkLit 0] , ("mutual recursion (thunks), called multiple times (both arity 1 would be bad!)",) $ Let (Rec [ (n, mkACase (mkLams [y] $ mkLit 0) (Var d)) , (d, mkACase (mkLams [y] $ mkLit 0) (Var n))]) $ Var n `mkApps` [Var d `mkApps` [Var d `mkApps` [mkLit 0]]] , ("mutual recursion (functions), but no thunks",) $ Let (Rec [ (go, mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go2 `mkVarApps` [x]))) , (go2, mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go `mkVarApps` [x])))]) $ Var go `mkApps` [go2 `mkLApps` [0,1], mkLit 0] , ("mutual recursion (functions), one boring (d 1 would be bad)",) $ mkLetNonRec d (f `mkLApps` [0]) $ Let (Rec [ (go, mkLams [x, y] (Var d `mkApps` [go2 `mkLApps` [1,2]])) , (go2, mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go `mkVarApps` [x])))]) $ Var d `mkApps` [go2 `mkLApps` [0,1]] , ("a thunk (non-function-type), called twice, still calls once",) $ mkLetNonRec d (f `mkLApps` [0]) $ mkLetNonRec x (d `mkLApps` [1]) $ Var f `mkVarApps` [x, x] , ("a thunk (function type), called multiple times, still calls once",) $ mkLetNonRec d (f `mkLApps` [0]) $ mkLetNonRec n (Var f `mkApps` [d `mkLApps` [1]]) $ mkLams [x] $ Var n `mkVarApps` [x] , ("a thunk (non-function-type), in mutual recursion, still calls once (d 1 would be good)",) $ mkLetNonRec d (f `mkLApps` [0]) $ Let (Rec [ (x, Var d `mkApps` [go `mkLApps` [1,2]]) , (go, mkLams [x] $ mkACase (mkLams [z] $ Var x) (Var go `mkVarApps` [x]) ) ]) $ Var go `mkApps` [mkLit 0, go `mkLApps` [0,1]] , ("a thunk (non-function-type), in mutual recursion, causes many calls (d 1 would be bad)",) $ mkLetNonRec d (f `mkLApps` [0]) $ Let (Rec [ (x, Var go `mkApps` [go `mkLApps` [1,2], go `mkLApps` [1,2]]) , (go, mkLams [x] $ mkACase (Var d) (Var go `mkVarApps` [x]) ) ]) $ Var go `mkApps` [mkLit 0, go `mkLApps` [0,1]] , ("a thunk (function type), in mutual recursion, still calls once (d 1 would be good)",) $ mkLetNonRec d (f `mkLApps` [0]) $ Let (Rec [ (n, Var go `mkApps` [d `mkLApps` [1]]) , (go, mkLams [x] $ mkACase (Var n) (Var go `mkApps` [Var n `mkVarApps` [x]]) ) ]) $ Var go `mkApps` [mkLit 0, go `mkLApps` [0,1]] , ("a thunk (non-function-type) co-calls with the body (d 1 would be bad)",) $ mkLetNonRec d (f `mkLApps` [0]) $ mkLetNonRec x (d `mkLApps` [1]) $ Var d `mkVarApps` [x] ] main = do [libdir] <- getArgs runGhc (Just libdir) $ do getSessionDynFlags >>= setSessionDynFlags . flip gopt_set Opt_SuppressUniques dflags <- getSessionDynFlags liftIO $ forM_ exprs $ \(n,e) -> do case lintExpr dflags [f,scrutf,scruta] e of Just msg -> putMsg dflags (msg $$ text "in" <+> text n) Nothing -> return () putMsg dflags (text n Outputable.<> char ':') -- liftIO $ putMsg dflags (ppr e) let e' = callArityRHS e let bndrs = nonDetEltsUniqSet (allBoundIds e') -- It should be OK to use nonDetEltsUniqSet here, if it becomes a -- problem we should use DVarSet -- liftIO $ putMsg dflags (ppr e') forM_ bndrs $ \v -> putMsg dflags $ nest 4 $ ppr v <+> ppr (idCallArity v) -- Utilities mkLApps :: Id -> [Integer] -> CoreExpr mkLApps v = mkApps (Var v) . map mkLit mkACase = mkIfThenElse (mkVarApps (Var scrutf) [scruta]) mkTestId :: Int -> String -> Type -> Id mkTestId i s ty = mkSysLocal (mkFastString s) (mkBuiltinUnique i) ty mkTestIds :: [String] -> [Type] -> [Id] mkTestIds ns tys = zipWith3 mkTestId [0..] ns tys mkRLet :: Id -> CoreExpr -> CoreExpr -> CoreExpr mkRLet v rhs body = mkLetRec [(v, rhs)] body mkFun :: Id -> [Id] -> CoreExpr -> CoreExpr -> CoreExpr mkFun v xs rhs body = mkLetNonRec v (mkLams xs rhs) body mkRFun :: Id -> [Id] -> CoreExpr -> CoreExpr -> CoreExpr mkRFun v xs rhs body = mkRLet v (mkLams xs rhs) body mkLit :: Integer -> CoreExpr mkLit i = Lit (mkLitInteger i intTy) -- Collects all let-bound IDs allBoundIds :: CoreExpr -> VarSet allBoundIds (Let (NonRec v rhs) body) = allBoundIds rhs `unionVarSet` allBoundIds body `extendVarSet` v allBoundIds (Let (Rec binds) body) = allBoundIds body `unionVarSet` unionVarSets [ allBoundIds rhs `extendVarSet` v | (v, rhs) <- binds ] allBoundIds (App e1 e2) = allBoundIds e1 `unionVarSet` allBoundIds e2 allBoundIds (Case scrut _ _ alts) = allBoundIds scrut `unionVarSet` unionVarSets [ allBoundIds e | (_, _ , e) <- alts ] allBoundIds (Lam _ e) = allBoundIds e allBoundIds (Tick _ e) = allBoundIds e allBoundIds (Cast e _) = allBoundIds e allBoundIds _ = emptyVarSet
sdiehl/ghc
testsuite/tests/callarity/unittest/CallArity1.hs
bsd-3-clause
10,034
0
25
2,954
4,302
2,379
1,923
200
2
module Formats ( defaultFormat , colorizedFormat , bibtexFormat , RefFormatter ) where import BibTex (Reference(..)) import Search (Filter) import System.Console.ANSI type RefFormatter = [Filter] -> Reference -> [String] defaultFormat :: RefFormatter defaultFormat fs r = key : fields where key = getName r ++ ":" fields = map fieldFormat $ getFields r fieldFormat f = " " ++ fst f ++ ": " ++ snd f colorizedFormat :: RefFormatter colorizedFormat fs r = key : fields where key = colorize Green $ getName r ++ ":" fields = map fieldFormat $ getFields r fieldFormat f = " " ++ (colorize Red $ fst f ++ ": ") ++ snd f bibtexFormat :: RefFormatter bibtexFormat fs r = header : body ++ ["}"] where header = "@" ++ getType r ++ "{" ++ getName r ++ "," body = map (++ ",") $ init body' body' = map fieldFormat $ getFields r fieldFormat (k,v) = " " ++ k ++ " = {" ++ v ++ "}" colorize :: Color -> String -> String colorize c s = colorCode ++ s ++ reset where colorCode = setSGRCode [SetColor Foreground Vivid c] reset = setSGRCode [Reset]
palaga/HsBible
Formats.hs
bsd-3-clause
1,232
0
12
395
410
214
196
29
1
-- | Take a single bitcode file and pump it through opt -S and parse -- out the named type definitions (structs, unions, and classes). -- Sort them and print them. -- -- Then use llvm-data-interop to parse the bitcode and print its -- sorted type list. It will be apparent if types were properly -- unified or not. module Main ( main ) where import Control.Monad ( forM_ ) import Data.Attoparsec.ByteString.Char8 import Data.Conduit import Data.Conduit.Attoparsec import Data.Conduit.Process as P import Data.List ( sort ) import Data.Monoid import Data.ByteString.Char8 ( ByteString ) import qualified Data.ByteString.Char8 as BS import System.Environment ( getArgs ) import Text.Printf import LLVM.Analysis -- import LLVM.Analysis.Util.Environment import LLVM.Parse llvmTypesParser :: Parser [ByteString] llvmTypesParser = do skipWhile (/='%') res <- many1 typeLineParser skipWhile (const True) return res typePrefix :: Parser ByteString typePrefix = choice [ string (BS.pack "%struct") , string (BS.pack "%union") , string (BS.pack "%class") ] typeLineParser :: Parser ByteString typeLineParser = do pfx <- typePrefix suffix <- takeTill (=='\n') endOfLine return $ pfx `mappend` suffix main :: IO () main = do [bcfile] <- getArgs -- First, get the output of opt. This uses process-conduit and -- attoparsec-conduit to parse the output of opt in constant space. typeLines <- runResourceT $ do sourceProcess (P.proc "opt" ["-S", bcfile]) $$ sinkParser llvmTypesParser let typeDefs = sort typeLines _ <- printf "opt types: %d\n" (length typeDefs) forM_ typeDefs $ \td -> BS.putStrLn (BS.pack " " `mappend` td) m <- parseLLVMFile defaultParserOptions bcfile let ts = sort $ map show $ filter isStructType (moduleRetainedTypes m) _ <- printf "unified types: %d\n" (length ts) forM_ ts $ \td -> putStrLn (" " ++ td) isStructType :: Type -> Bool isStructType t = case t of TypeStruct (Right _) _ _ -> True _ -> False
travitch/llvm-tools
tools/TypeUnificationCheck.hs
bsd-3-clause
2,040
0
15
421
538
281
257
47
2
module Utils.Feeds where import Hakyll myFeedConfiguration :: FeedConfiguration myFeedConfiguration = FeedConfiguration { feedTitle = "自由研究帳" , feedDescription = "Haskellや読んだ論文のまとめ" , feedAuthorName = "Kiripon" , feedAuthorEmail = "" , feedRoot = "http://kiripon.net" }
kiripon/gh-pages
Utils/Feeds.hs
bsd-3-clause
312
0
6
44
51
33
18
9
1
{-# LANGUAGE CPP #-} #ifdef DEBUG_TRACETREE {-# LANGUAGE FlexibleInstances #-} {-# OPTIONS_GHC -fno-warn-orphans #-} #endif module Distribution.Solver.Modular.Solver ( SolverConfig(..) , solve ) where import Data.Map as M import Data.List as L import Data.Set as S import Distribution.Verbosity import Distribution.Version import Distribution.Compiler (CompilerInfo) import Distribution.Solver.Types.PackagePath import Distribution.Solver.Types.PackagePreferences import Distribution.Solver.Types.PkgConfigDb (PkgConfigDb) import Distribution.Solver.Types.LabeledPackageConstraint import Distribution.Solver.Types.Settings import Distribution.Solver.Types.Variable import Distribution.Solver.Modular.Assignment import Distribution.Solver.Modular.Builder import Distribution.Solver.Modular.Cycles import Distribution.Solver.Modular.Dependency import Distribution.Solver.Modular.Explore import Distribution.Solver.Modular.Index import Distribution.Solver.Modular.Log import Distribution.Solver.Modular.Message import Distribution.Solver.Modular.Package import qualified Distribution.Solver.Modular.Preference as P import Distribution.Solver.Modular.Validate import Distribution.Solver.Modular.Linking import Distribution.Solver.Modular.PSQ (PSQ) import Distribution.Solver.Modular.Tree import qualified Distribution.Solver.Modular.PSQ as PSQ import Distribution.Simple.Setup (BooleanFlag(..)) #ifdef DEBUG_TRACETREE import qualified Distribution.Solver.Modular.ConflictSet as CS import qualified Distribution.Solver.Modular.WeightedPSQ as W import qualified Distribution.Text as T import Debug.Trace.Tree (gtraceJson) import Debug.Trace.Tree.Simple import Debug.Trace.Tree.Generic import Debug.Trace.Tree.Assoc (Assoc(..)) #endif -- | Various options for the modular solver. data SolverConfig = SolverConfig { reorderGoals :: ReorderGoals, countConflicts :: CountConflicts, independentGoals :: IndependentGoals, avoidReinstalls :: AvoidReinstalls, shadowPkgs :: ShadowPkgs, strongFlags :: StrongFlags, allowBootLibInstalls :: AllowBootLibInstalls, maxBackjumps :: Maybe Int, enableBackjumping :: EnableBackjumping, solveExecutables :: SolveExecutables, goalOrder :: Maybe (Variable QPN -> Variable QPN -> Ordering), solverVerbosity :: Verbosity } -- | Run all solver phases. -- -- In principle, we have a valid tree after 'validationPhase', which -- means that every 'Done' node should correspond to valid solution. -- -- There is one exception, though, and that is cycle detection, which -- has been added relatively recently. Cycles are only removed directly -- before exploration. -- solve :: SolverConfig -- ^ solver parameters -> CompilerInfo -> Index -- ^ all available packages as an index -> PkgConfigDb -- ^ available pkg-config pkgs -> (PN -> PackagePreferences) -- ^ preferences -> Map PN [LabeledPackageConstraint] -- ^ global constraints -> Set PN -- ^ global goals -> Log Message (Assignment, RevDepMap) solve sc cinfo idx pkgConfigDB userPrefs userConstraints userGoals = explorePhase $ detectCycles $ heuristicsPhase $ preferencesPhase $ validationPhase $ prunePhase $ buildPhase where explorePhase = backjumpAndExplore (enableBackjumping sc) (countConflicts sc) detectCycles = traceTree "cycles.json" id . detectCyclesPhase heuristicsPhase = let heuristicsTree = traceTree "heuristics.json" id in case goalOrder sc of Nothing -> goalChoiceHeuristics . heuristicsTree . P.deferSetupChoices . P.deferWeakFlagChoices . P.preferBaseGoalChoice Just order -> P.firstGoal . heuristicsTree . P.sortGoals order preferencesPhase = P.preferLinked . P.preferPackagePreferences userPrefs validationPhase = traceTree "validated.json" id . P.enforcePackageConstraints userConstraints . P.enforceManualFlags userConstraints . P.enforceSingleInstanceRestriction . validateLinking idx . validateTree cinfo idx pkgConfigDB prunePhase = (if asBool (avoidReinstalls sc) then P.avoidReinstalls (const True) else id) . (if asBool (allowBootLibInstalls sc) then id else P.requireInstalled (`elem` nonInstallable)) buildPhase = traceTree "build.json" id $ buildTree idx (independentGoals sc) (S.toList userGoals) -- packages that can never be installed or upgraded -- If you change this enumeration, make sure to update the list in -- "Distribution.Client.Dependency" as well nonInstallable :: [PackageName] nonInstallable = L.map mkPackageName [ "base" , "ghc-prim" , "integer-gmp" , "integer-simple" , "template-haskell" ] -- When --reorder-goals is set, we use preferReallyEasyGoalChoices, which -- prefers (keeps) goals only if the have 0 or 1 enabled choice. -- -- In the past, we furthermore used P.firstGoal to trim down the goal choice nodes -- to just a single option. This was a way to work around a space leak that was -- unnecessary and is now fixed, so we no longer do it. -- -- If --count-conflicts is active, it will then choose among the remaining goals -- the one that has been responsible for the most conflicts so far. -- -- Otherwise, we simply choose the first remaining goal. -- goalChoiceHeuristics | asBool (reorderGoals sc) = P.preferReallyEasyGoalChoices | otherwise = id {- P.firstGoal -} -- | Dump solver tree to a file (in debugging mode) -- -- This only does something if the @debug-tracetree@ configure argument was -- given; otherwise this is just the identity function. traceTree :: #ifdef DEBUG_TRACETREE GSimpleTree a => #endif FilePath -- ^ Output file -> (a -> a) -- ^ Function to summarize the tree before dumping -> a -> a #ifdef DEBUG_TRACETREE traceTree = gtraceJson #else traceTree _ _ = id #endif #ifdef DEBUG_TRACETREE instance GSimpleTree (Tree d c) where fromGeneric = go where go :: Tree d c -> SimpleTree go (PChoice qpn _ _ psq) = Node "P" $ Assoc $ L.map (uncurry (goP qpn)) $ psqToList psq go (FChoice _ _ _ _ _ _ psq) = Node "F" $ Assoc $ L.map (uncurry goFS) $ psqToList psq go (SChoice _ _ _ _ psq) = Node "S" $ Assoc $ L.map (uncurry goFS) $ psqToList psq go (GoalChoice _ psq) = Node "G" $ Assoc $ L.map (uncurry goG) $ PSQ.toList psq go (Done _rdm _s) = Node "D" $ Assoc [] go (Fail cs _reason) = Node "X" $ Assoc [("CS", Leaf $ goCS cs)] psqToList :: W.WeightedPSQ w k v -> [(k, v)] psqToList = L.map (\(_, k, v) -> (k, v)) . W.toList -- Show package choice goP :: QPN -> POption -> Tree d c -> (String, SimpleTree) goP _ (POption (I ver _loc) Nothing) subtree = (T.display ver, go subtree) goP (Q _ pn) (POption _ (Just pp)) subtree = (showQPN (Q pp pn), go subtree) -- Show flag or stanza choice goFS :: Bool -> Tree d c -> (String, SimpleTree) goFS val subtree = (show val, go subtree) -- Show goal choice goG :: Goal QPN -> Tree d c -> (String, SimpleTree) goG (Goal var gr) subtree = (showVar var ++ " (" ++ shortGR gr ++ ")", go subtree) -- Variation on 'showGR' that produces shorter strings -- (Actually, QGoalReason records more info than necessary: we only need -- to know the variable that introduced the goal, not the value assigned -- to that variable) shortGR :: QGoalReason -> String shortGR UserGoal = "user" shortGR (DependencyGoal dr) = showDependencyReason (\(PI nm _) -> showQPN nm) dr -- Show conflict set goCS :: ConflictSet -> String goCS cs = "{" ++ (intercalate "," . L.map showVar . CS.toList $ cs) ++ "}" #endif -- | Replace all goal reasons with a dummy goal reason in the tree -- -- This is useful for debugging (when experimenting with the impact of GRs) _removeGR :: Tree d c -> Tree d QGoalReason _removeGR = trav go where go :: TreeF d c (Tree d QGoalReason) -> TreeF d QGoalReason (Tree d QGoalReason) go (PChoiceF qpn rdm _ psq) = PChoiceF qpn rdm dummy psq go (FChoiceF qfn rdm _ a b d psq) = FChoiceF qfn rdm dummy a b d psq go (SChoiceF qsn rdm _ a psq) = SChoiceF qsn rdm dummy a psq go (GoalChoiceF rdm psq) = GoalChoiceF rdm (goG psq) go (DoneF rdm s) = DoneF rdm s go (FailF cs reason) = FailF cs reason goG :: PSQ (Goal QPN) (Tree d QGoalReason) -> PSQ (Goal QPN) (Tree d QGoalReason) goG = PSQ.fromList . L.map (\(Goal var _, subtree) -> (Goal var dummy, subtree)) . PSQ.toList dummy :: QGoalReason dummy = DependencyGoal $ DependencyReason (PI (Q (PackagePath DefaultNamespace QualToplevel) (mkPackageName "$")) (I (mkVersion [1]) InRepo)) [] []
themoritz/cabal
cabal-install/Distribution/Solver/Modular/Solver.hs
bsd-3-clause
9,575
0
15
2,598
2,114
1,166
948
124
6
-- Copyright (c) 2008 Stephen C. Harris. -- See COPYING file at the root of this distribution for copyright information. module HMQ.Metadata.PostgreSQL where import Control.Monad import Control.Exception import Text.Regex import Data.Maybe import Database.HDBC import qualified Database.HDBC.PostgreSQL as PG import HMQ.Metadata.TableMetadata(TableIdentifier(..),FieldMetadata(..),ForeignKeyConstraint(..),fieldName,FieldName,SchemaName) import qualified HMQ.Metadata.TableMetadata as TMD import HMQ.Utils.Strings instance TMD.IConnectionEx PG.Connection where getTableIdentifiers = getTableIdentifiers getFieldMetadatas = getFieldMetadatas getForeignKeyConstraints = getForeignKeyConstraints getPrimaryKeyFieldNames = getPrimaryKeyFieldNames getTableIdentifiers :: PG.Connection -> Maybe SchemaName -> IO [TableIdentifier] getTableIdentifiers conn mSchema = do stmt <- prepare conn qry execute stmt params tableIdRows <- fetchAllRows stmt return $ map (\[schema,table] -> TableIdentifier (fromSql schema) (fromSql table)) tableIdRows where qry = "SELECT ns.nspname, cl.relname FROM pg_class cl INNER JOIN pg_namespace ns ON (ns.oid = cl.relnamespace)\n" ++ "WHERE cl.relkind = 'r'" ++ (if isJust mSchema then " AND ns.nspname = ?" else "") params = if isJust mSchema then [toSql $ fromJust mSchema] else [] getFieldMetadatas :: PG.Connection -> TableIdentifier -> IO [FieldMetadata] getFieldMetadatas conn tableId = do stmt <- prepare conn qry execute stmt params rows <- fetchAllRows stmt return $ map makeFieldMdForRow rows where tableName = TMD.tableName tableId mSchema = TMD.tableSchema tableId qry = unlines ["SELECT a.attname, a.atttypid, a.attlen, format_type(a.atttypid, a.atttypmod), a.attnotnull", "FROM pg_attribute a", "INNER JOIN pg_class cl ON (a.attrelid = cl.oid)", "INNER JOIN pg_namespace ns ON (cl.relnamespace = ns.oid)", "WHERE a.attnum > 0 and a.attisdropped IS FALSE", "AND cl.relname = ?" ++ (if isJust mSchema then " AND ns.nspname = ?" else ""), "ORDER BY a.attnum"] params = toSql tableName : if isJust mSchema then [toSql $ fromJust mSchema] else [] makeFieldMdForRow :: [SqlValue] -> FieldMetadata makeFieldMdForRow [attname, atttypid, attlen, formattedtype, attnotnull] = let coltype = atttypeidToSqlTypeId (fromSql atttypid) size = case fromSql attlen of -1 -> maybeExtractFirstParenthesizedNumber (fromSql formattedtype) x -> Just x decDigs = if coltype == SqlNumericT then maybeExtractSecondParenthesizedNumber (fromSql formattedtype) else Nothing in FieldMetadata { fieldName = fromSql attname, fieldSqlColDesc = SqlColDesc { colType = coltype, colSize = size, colOctetLength = Nothing, -- not available in postgres colDecDigits = decDigs, colNullable = Just (fromSql attnotnull == 'f') } } maybeExtractFirstParenthesizedNumber :: String -> Maybe Int maybeExtractFirstParenthesizedNumber formattedType = let matches = matchRegexAll (mkRegex "\\( *([0-9]+) *[,)]") formattedType in case matches of Just (_,_,_, n:_) -> Just $ read n _ -> Nothing maybeExtractSecondParenthesizedNumber :: String -> Maybe Int maybeExtractSecondParenthesizedNumber formattedType = let matches = matchRegexAll (mkRegex "\\( *[0-9]+ *, *([0-9]+) *[,)]") formattedType in case matches of Just (_,_,_, n:_) -> Just $ read n _ -> Nothing atttypeidToSqlTypeId :: Integer -> SqlTypeId atttypeidToSqlTypeId oid = if oid == pgtype_CHAR then SqlCharT else if oid == pgtype_CHAR2 then SqlCharT else if oid == pgtype_CHAR4 then SqlCharT else if oid == pgtype_CHAR8 then SqlCharT else if oid == pgtype_NAME then SqlVarCharT else if oid == pgtype_BPCHAR then SqlCharT else if oid == pgtype_VARCHAR then SqlVarCharT else if oid == pgtype_TEXT then SqlVarCharT else if oid == pgtype_BYTEA then SqlVarBinaryT else if oid == pgtype_INT2 then SqlSmallIntT else if oid == pgtype_OID then SqlIntegerT else if oid == pgtype_XID then SqlIntegerT else if oid == pgtype_INT4 then SqlBigIntT else if oid == pgtype_INT8 then SqlBigIntT else if oid == pgtype_NUMERIC then SqlNumericT else if oid == pgtype_FLOAT4 then SqlRealT else if oid == pgtype_FLOAT8 then SqlFloatT else if oid == pgtype_DATE then SqlDateT else if oid == pgtype_ABSTIME then SqlTimestampT else if oid == pgtype_DATETIME then SqlTimestampT else if oid == pgtype_TIMESTAMP_NO_TMZONE then SqlTimestampT else if oid == pgtype_TIMESTAMP then SqlTimestampT else if oid == pgtype_TIME then SqlTimeT else if oid == pgtype_TIME_WITH_TMZONE then SqlTimeT else if oid == pgtype_TINTERVAL then SqlIntervalT SqlIntervalMonthT -- SqlIntervalMonthT chosen arbitrarily in these two. PG allows any parts else if oid == pgtype_RELTIME then SqlIntervalT SqlIntervalMonthT -- of an interval (microsecond to millennium) to be specified together. else if oid == pgtype_BOOL then SqlBitT else SqlUnknownT (show oid) where pgtype_BOOL = 16 pgtype_BYTEA = 17 pgtype_CHAR = 18 pgtype_NAME = 19 pgtype_INT8 = 20 pgtype_INT2 = 21 pgtype_INT2VECTOR = 22 pgtype_INT4 = 23 pgtype_REGPROC = 24 pgtype_TEXT = 25 pgtype_OID = 26 pgtype_TID = 27 pgtype_XID = 28 pgtype_CID = 29 pgtype_OIDVECTOR = 30 pgtype_SET = 32 pgtype_CHAR2 = 409 pgtype_CHAR4 = 410 pgtype_CHAR8 = 411 pgtype_POINT = 600 pgtype_LSEG = 601 pgtype_PATH = 602 pgtype_BOX = 603 pgtype_POLYGON = 604 pgtype_FILENAME = 605 pgtype_FLOAT4 = 700 pgtype_FLOAT8 = 701 pgtype_ABSTIME = 702 pgtype_RELTIME = 703 pgtype_TINTERVAL = 704 pgtype_UNKNOWN = 705 pgtype_MONEY = 790 pgtype_OIDINT2 = 810 pgtype_OIDINT4 = 910 pgtype_OIDNAME = 911 pgtype_BPCHAR = 1042 pgtype_VARCHAR = 1043 pgtype_DATE = 1082 pgtype_TIME = 1083 pgtype_TIMESTAMP_NO_TMZONE = 1114 pgtype_DATETIME = 1184 pgtype_TIME_WITH_TMZONE = 1266 pgtype_TIMESTAMP = 1296 pgtype_NUMERIC = 1700 getForeignKeyConstraints :: PG.Connection -> TableIdentifier -> IO [ForeignKeyConstraint] getForeignKeyConstraints conn tableId = do stmt <- prepare conn qry execute stmt params rows <- fetchAllRows stmt mapM getFKCForRow rows where qry = unlines $ [ "SELECT con.conrelid AS srctableid, con.conkey AS srcfieldnums, ", "tgtns.nspname AS tgtns, tgt.relname AS tgttable, con.confrelid AS tgttableid, con.confkey AS tgtfieldnums", "FROM pg_constraint con", "INNER JOIN pg_class src ON (con.conrelid = src.oid)", "INNER JOIN pg_class tgt ON (con.confrelid = tgt.oid)", "INNER JOIN pg_namespace srcns ON (src.relnamespace = srcns.oid)", "INNER JOIN pg_namespace tgtns ON (tgt.relnamespace = tgtns.oid)", "WHERE con.contype = 'f'", "AND src.relname = ?" ++ (if isJust mSchema then " AND srcns.nspname = ?" else "") ] params = toSql tableName : if isJust mSchema then [toSql $ fromJust mSchema] else [] getFKCForRow :: [SqlValue] -> IO ForeignKeyConstraint getFKCForRow (srcTableOid : srcAttNumsStr : tgtSchema : tgtTableName : tgtTableOid : tgtAttNumsStr : []) = do srcFieldNames <- getFieldNames conn (fromSql srcTableOid) (pgsqlIntArrayStringToList $ fromSql srcAttNumsStr) tgtFieldNames <- getFieldNames conn (fromSql tgtTableOid) (pgsqlIntArrayStringToList $ fromSql tgtAttNumsStr) return $ ForeignKeyConstraint tableId (TableIdentifier (fromSql tgtSchema) (fromSql tgtTableName)) (zip srcFieldNames tgtFieldNames) tableName = TMD.tableName tableId mSchema = TMD.tableSchema tableId getPrimaryKeyFieldNames :: PG.Connection -> TableIdentifier -> IO [FieldName] getPrimaryKeyFieldNames conn tableId = do stmt <- prepare conn qry execute stmt params rows <- fetchAllRows stmt if not $ null rows then getPKForRow (head rows) -- we're only interested in one pk (which may be multiple fields) else return [] where qry = unlines $ [ "SELECT con.conrelid AS tableid, con.conkey AS fieldnums", "FROM pg_constraint con", "INNER JOIN pg_class cl ON (con.conrelid = cl.oid)", "INNER JOIN pg_namespace ns ON (cl.relnamespace = ns.oid)", "WHERE con.contype = 'p'", "AND cl.relname = ?" ++ (if isJust mSchema then " AND ns.nspname = ?" else "") ] params = toSql tableName : if isJust mSchema then [toSql $ fromJust mSchema] else [] getPKForRow :: [SqlValue] -> IO [FieldName] getPKForRow (tableOid : fieldNumsStr : []) = getFieldNames conn (fromSql tableOid) (pgsqlIntArrayStringToList $ fromSql fieldNumsStr) tableName = TMD.tableName tableId mSchema = TMD.tableSchema tableId pgsqlIntArrayStringToList :: String -> [Int] pgsqlIntArrayStringToList s = let listStr = map (\c -> case c of '{' -> '['; '}' -> ']'; _ -> c) s in read listStr getFieldNames :: PG.Connection -> Integer -> [Int] -> IO [String] getFieldNames conn srcTableOid attNums = do attNames <- quickQuery conn attsQry [] return $ map (fromSql . head) attNames where attsQry = "SELECT attname FROM pg_attribute WHERE attrelid = " ++ show srcTableOid ++ " AND attnum IN (" ++ listAsString show "," attNums ++ ")" -- For testing testConnStr = "dbname='testdb' user='sharris' password='anna'" printMds = catchDyn (do conn <- PG.connectPostgreSQL testConnStr TMD.printTableMetadatas conn ) printSqlError where printSqlError :: SqlError -> IO () printSqlError se = do { print se } describe table = catchDyn doQry printSqlError where doQry = do conn <- PG.connectPostgreSQL testConnStr describeTable conn table printSqlError :: SqlError -> IO [(String,SqlColDesc)] printSqlError se = do { print se; return [] }
scharris/hmq
Metadata/PostgreSQL.hs
bsd-3-clause
12,507
0
36
4,669
2,327
1,246
1,081
226
34
import Control.Lens import Control.Monad hiding (forM_, mapM_) import Control.Monad.State (execState) import Data.Aeson import qualified Data.ByteString.Lazy.Char8 as L import Data.Foldable (forM_, mapM_) import Data.IORef import Data.Maybe import Graphics.UI.GLUT import Prelude hiding (mapM_) import Gvty.Bindings import Gvty.GraphicsCache import Gvty.Serialization import Gvty.World main :: IO () main = do (name, args) <- getArgsAndInitialize let filename = listToMaybe args world <- newIORef newWorld graphicsCache <- newIORef newGraphicsCache forM_ filename $ \file -> do json <- L.readFile $ fromJust filename let w = decode json :: Maybe World mapM_ (world $=) w s <- get world let (w, h) = over both fromIntegral $ s^.worldWindowSize initialWindowSize $= Size w h initialDisplayMode $= [ DoubleBuffered ] window <- createWindow "gvty" reshapeCallback $= Just (onReshape world) depthFunc $= Just Less displayCallback $= onDisplay world graphicsCache idleCallback $= Just (onIdle world) motionCallback $= Just (onMotion world) keyboardMouseCallback $= Just (onInput world) mainLoop
coffeecup-winner/gvty
gvty.hs
bsd-3-clause
1,221
0
14
276
385
192
193
36
1
----------------------------------------------------------------------------- -- | -- Module : Berp.Base.HashSet -- Copyright : (c) 2011 Bernie Pope -- License : BSD-style -- Maintainer : [email protected] -- Stability : experimental -- Portability : ghc -- -- Mutable hashset for the implementation of Python's sets. -- ----------------------------------------------------------------------------- module Berp.Base.HashSet ( empty , insert , lookup , delete , fromList , elements , sizeIO ) where import Control.Monad.Trans (liftIO) import Prelude hiding (lookup) import Control.Applicative ((<$>)) import qualified Data.IntMap as IntMap import Data.List (genericLength) import Control.Monad (foldM) import Berp.Base.SemanticTypes (Object (..), Eval, HashSet) import Berp.Base.Object (objectEquality) import Berp.Base.HashTable (hashObject) import Berp.Base.LiftedIO (MonadIO, readIORef, writeIORef, newIORef) elementsIO :: HashSet -> IO [Object] elementsIO hashSet = concat <$> IntMap.elems <$> readIORef hashSet elements :: HashSet -> Eval [Object] elements = liftIO . elementsIO sizeIO :: HashSet -> IO Integer sizeIO hashSet = genericLength <$> elementsIO hashSet empty :: MonadIO m => m HashSet empty = newIORef IntMap.empty fromList :: [Object] -> Eval HashSet fromList objs = do elementsVals <- mapM toElement objs newIORef $ IntMap.fromListWith (++) elementsVals where toElement :: Object -> Eval (Int, [Object]) toElement obj = do hashValue <- hashObject obj return (hashValue, [obj]) insert :: Object -> HashSet -> Eval () insert element hashSet = do table <- readIORef hashSet hashValue <- hashObject element case IntMap.lookup hashValue table of Nothing -> do let newTable = IntMap.insert hashValue [element] table writeIORef hashSet newTable Just matches -> do newMatches <- linearInsert element matches let newTable = IntMap.insert hashValue newMatches table writeIORef hashSet newTable where linearInsert :: Object -> [Object] -> Eval [Object] linearInsert obj [] = return [obj] linearInsert obj1 list@(obj2:rest) = do areEqual <- objectEquality obj1 obj2 if areEqual then return list else (obj2 :) <$> linearInsert obj1 rest lookup :: Object -> HashSet -> Eval Bool lookup element hashSet = do table <- readIORef hashSet hashValue <- hashObject element case IntMap.lookup hashValue table of Nothing -> return False Just matches -> linearSearch element matches where linearSearch :: Object -> [Object] -> Eval Bool linearSearch _ [] = return False linearSearch obj1 (obj2:rest) = do areEqual <- objectEquality obj1 obj2 if areEqual then return True else linearSearch obj1 rest linearFilter :: Object -> [Object] -> Eval [Object] linearFilter object matches = foldM collectNotEquals [] matches where collectNotEquals :: [Object] -> Object -> Eval [Object] collectNotEquals acc next = do areEqual <- objectEquality object next return $ if areEqual then acc else object:acc delete :: Object -> HashSet -> Eval () delete element hashSet = do table <- readIORef hashSet hashValue <- hashObject element case IntMap.lookup hashValue table of Nothing -> return () Just matches -> do newMatches <- linearFilter element matches let newTable = IntMap.adjust (const newMatches) hashValue table writeIORef hashSet newTable
bjpop/berp
libs/src/Berp/Base/HashSet.hs
bsd-3-clause
3,545
0
17
762
1,028
526
502
83
4
-- | Provides required graph tools. module Database.Algebra.SQL.Materialization.Graph ( Graph , Vertex , mkGraph , parents , children , node , topSort , vertices , reachable ) where import qualified Data.Graph.Inductive.Graph as G import qualified Data.Graph.Inductive.PatriciaTree as P import qualified Data.Graph.Inductive.Query.DFS as D -- | The vertex type. type Vertex = G.Node -- | The graph type. newtype Graph label = Graph { graph :: P.Gr label () } -- | Constructs a graph from the given out-adjacency list. mkGraph :: [(label, Vertex, [Vertex])] -> Graph label mkGraph outAdjacencyList = Graph $ G.mkGraph ns es where ns = map nf outAdjacencyList nf (n, k, _) = (k, n) ef (_, k, ks) = map (tf k) ks es = concatMap ef outAdjacencyList tf a b = (a, b, ()) -- | Fetches the parents of a vertex. parents :: Vertex -> Graph label -> [Vertex] parents v g = G.pre (graph g) v -- | Fetches the children of a vertex. children :: Vertex -> Graph label -> [Vertex] children v g = G.suc (graph g) v -- | Fetches the label of a vertex. node :: Vertex -> Graph label -> Maybe label node v g = G.lab (graph g) v -- | Sorts the vertices topological. topSort :: Graph label -> [Vertex] topSort = D.topsort . graph -- | Gets all vertices from a given graph. vertices :: Graph label -> [Vertex] vertices = G.nodes . graph reachable :: Vertex -> Graph label -> [Vertex] reachable v = D.reachable v . graph
ulricha/algebra-sql
src/Database/Algebra/SQL/Materialization/Graph.hs
bsd-3-clause
1,551
0
9
408
463
264
199
35
1
module Module1.Task12 where sum'n'count :: Integer -> (Integer, Integer) sum'n'count x | x == 0 = (0, 1) | x < 0 = iter 0 0 (-x) | otherwise = iter 0 0 x where iter sum count 0 = (sum, count) iter sum count x = let (x', d) = divMod x 10 in iter (sum + d) (count + 1) x'
dstarcev/stepic-haskell
src/Module1/Task12.hs
bsd-3-clause
313
0
11
106
162
83
79
10
2
{-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE StandaloneDeriving #-} module Network.ABCI.Types ( -- * Type-safe 'Request' / 'Response' GADTs Request (..) , Response (..) -- * Code types , CodeType(..) , codeTypeOK , codeTypeEncodingError , codeTypeBadNonce , codeTypeUnauthorized , codeTypeBadOption -- * An ABCI application type , App (..) -- * 'MsgType' kind and associated types. Used to tag -- with a phantom type the 'Request' and associated 'Response' -- types , MsgType , Echo , Flush , Info , SetOption , DeliverTx , CheckTx , Commit , Query , InitChain , BeginBlock , EndBlock -- * protobuf/GADT 'Request'/'Response' conversion , toProtoResponse , withProtoRequest -- * Re-exports , def -- * Protobuf safe types re-exports , Proto.BlockID () , Proto.ConsensusParams , Proto.Header () , Proto.PartSetHeader () , Proto.Validator () ) where import qualified Proto.Types as Proto import qualified Proto.Types_Fields as Proto import Proto.Vendored.Tendermint.Tendermint.Crypto.Merkle.Merkle (Proof) import Proto.Vendored.Tendermint.Tendermint.Libs.Common.Types (KVPair) import Data.ByteString (ByteString) import Data.Default (Default (def)) import Data.Int (Int64) import Data.ProtoLens (defMessage) import Data.ProtoLens.Prism (( # )) import Data.Text (Text) import Data.Word (Word32) import Lens.Micro import Debug.Trace -- | An 'App' is a monadic function from 'Request' to 'Response'. -- We tag both with the 'MsgType' to enforce at the type-level that -- the 'Request'/'Response' types match newtype App m = App (forall (t :: MsgType). Request t -> m (Response t)) -- | Type-level "tags" representing the type of 'Request'/'Response' pairs data MsgType = Echo | Flush | Info | SetOption | DeliverTx | CheckTx | Commit | Query | InitChain | BeginBlock | EndBlock -- We create type synonyms for the promoted constructors so our users don't -- need to enable the 'DataKinds' GHC extension type Echo = 'Echo type Flush = 'Flush type Info = 'Info type SetOption = 'SetOption type DeliverTx = 'DeliverTx type CheckTx = 'CheckTx type Commit = 'Commit type Query = 'Query type InitChain = 'InitChain type BeginBlock = 'BeginBlock type EndBlock = 'EndBlock -- | A type-safe 'Request' GADT tagged with the 'MsgType' data Request (t :: MsgType) where RequestEcho :: { requestEcho'message :: !Text } -> Request Echo RequestFlush :: Request Flush RequestInfo :: { requestInfo'version :: !Text } -> Request Info RequestSetOption :: { requestSetOption'key :: !Text , requestSetOption'value :: !Text } -> Request SetOption RequestDeliverTx :: { requestDeliverTx'tx :: !ByteString } -> Request DeliverTx RequestCheckTx :: { requestCheckTx'tx :: !ByteString } -> Request CheckTx RequestCommit :: Request Commit RequestQuery :: { requestQuery'data :: !ByteString , requestQuery'path :: !Text , requestQuery'height :: !Int64 , requestQuery'prove :: !Bool } -> Request Query RequestInitChain :: { requestInitChain'validators :: ![Proto.ValidatorUpdate] , requestInitChain'consensusParams :: !(Maybe Proto.ConsensusParams) } -> Request InitChain RequestBeginBlock :: { requestInitBlock'hash :: !ByteString , requestInitBlock'header :: !(Maybe Proto.Header) -- , requestInitBlock'absent_validators :: ![Int32] , requestInitBlock'byzantine_validators :: ![Proto.Evidence] } -> Request BeginBlock RequestEndBlock :: { requestEndBlock'height :: !Int64 } -> Request EndBlock deriving instance Show (Request t) -- | A type-safe 'Response' GADT tagged with the 'MsgType' data Response (t :: MsgType) where ResponseException :: { responseException'error :: !Text } -> Response t ResponseEcho :: { responseEcho'message :: !Text } -> Response Echo ResponseFlush :: Response Flush ResponseInfo :: { responseInfo'data :: !Text , responseInfo'version :: !Text , responseInfo'lastBlockHeight :: !Int64 , responseInfo'lastBlockAppHash :: !ByteString } -> Response Info ResponseSetOption :: { responseSetOption'code :: !CodeType , responseSetOption'log :: !Text } -> Response SetOption ResponseDeliverTx :: { responseDeliverTx'code :: !CodeType , responseDeliverTx'data :: !ByteString , responseDeliverTx'log :: !Text , responseDeliverTx'tags :: ![KVPair] } -> Response DeliverTx ResponseCheckTx :: { responseCheckTx'code :: !CodeType , responseCheckTx'data :: !ByteString , responseCheckTx'log :: !Text , responseCheckTx'gas :: !Int64 , responseCheckTx'fee :: !Int64 } -> Response CheckTx ResponseCommit :: { responseCommit'data :: !ByteString } -> Response Commit ResponseQuery :: { responseQuery'code :: !CodeType , responseQuery'index :: !Int64 , responseQuery'key :: !ByteString , responseQuery'value :: !ByteString , responseQuery'proof :: !Proof , responseQuery'height :: !Int64 , responseQuery'log :: !Text } -> Response Query ResponseInitChain :: { responseInitChain'Validators :: [Proto.ValidatorUpdate] , responseInitChain'ConsensusParams :: Maybe Proto.ConsensusParams } -> Response InitChain ResponseBeginBlock :: Response BeginBlock ResponseEndBlock :: { responseEndBlock'diffs :: ![Proto.ValidatorUpdate] , responseEndBlock'consensus :: !(Maybe Proto.ConsensusParams) } -> Response EndBlock deriving instance Show (Response t) instance Default (Response Flush) where def = ResponseFlush instance Default (Response Info) where def = ResponseInfo "" "" 0 "" instance Default (Response SetOption) where def = ResponseSetOption def "" instance Default (Response Query) where def = ResponseQuery def def "" "" defMessage 0 "" instance Default (Response InitChain) where def = ResponseInitChain def def instance Default (Response BeginBlock) where def = ResponseBeginBlock instance Default (Response EndBlock) where def = ResponseEndBlock [] Nothing ------------------------------------ -- code types newtype CodeType = CodeType { unCodeType :: Word32 } deriving (Show, Eq, Ord) instance Default CodeType where def = CodeType 0 codeTypeOK, codeTypeEncodingError, codeTypeBadNonce, codeTypeUnauthorized, codeTypeBadOption :: CodeType codeTypeOK = CodeType 0 codeTypeEncodingError = CodeType 1 codeTypeBadNonce = CodeType 2 codeTypeUnauthorized = CodeType 3 codeTypeBadOption = CodeType 101 {- from file: https://github.com/tendermint/abci/blob/master/example/code/code.go CodeTypeOK uint32 = 0 CodeTypeEncodingError uint32 = 1 CodeTypeBadNonce uint32 = 2 CodeTypeUnauthorized uint32 = 3 CodeTypeBadOption uint32 = 101 -} ------------------------------------ -- | Translates type-safe 'Response' GADT to the unsafe -- auto-generated 'Proto.Response' toProtoResponse :: Response t -> Proto.Response toProtoResponse (ResponseException error') = defMessage & Proto.maybe'value ?~ Proto._Response'Exception # (defMessage & Proto.error .~ error') toProtoResponse (ResponseEcho msg) = defMessage & Proto.maybe'value ?~ Proto._Response'Echo # (defMessage & Proto.message .~ msg) toProtoResponse ResponseFlush = defMessage & Proto.maybe'value ?~ Proto._Response'Flush # defMessage toProtoResponse (ResponseInfo d v h ah) = defMessage & Proto.maybe'value ?~ Proto._Response'Info # (defMessage & Proto.data' .~ d & Proto.version .~ v & Proto.lastBlockHeight .~ h & Proto.lastBlockAppHash .~ ah) toProtoResponse (ResponseSetOption (CodeType code') log') = defMessage & Proto.maybe'value ?~ Proto._Response'SetOption # (defMessage & Proto.code .~ code' & Proto.log .~ log') toProtoResponse (ResponseDeliverTx (CodeType code) data'' log' tags') = defMessage & Proto.maybe'value ?~ Proto._Response'DeliverTx # (defMessage & Proto.code .~ code & Proto.data' .~ data'' & Proto.log .~ log' & Proto.tags .~ tags') toProtoResponse (ResponseCheckTx (CodeType code) data'' log' _ _) = defMessage & Proto.maybe'value ?~ Proto._Response'CheckTx # (defMessage & Proto.code .~ code & Proto.data' .~ data'' & Proto.log .~ log') toProtoResponse (ResponseCommit data'') = defMessage & Proto.maybe'value ?~ Proto._Response'Commit # (defMessage & Proto.data' .~ data'') toProtoResponse (ResponseQuery (CodeType c) i k v p h l) = defMessage & Proto.maybe'value ?~ Proto._Response'Query # (defMessage & Proto.code .~ c & Proto.index .~ i & Proto.key .~ k & Proto.value .~ v & Proto.proof .~ p & Proto.height .~ h & Proto.log .~ l) toProtoResponse (ResponseInitChain vs cps) = defMessage & Proto.maybe'value ?~ Proto._Response'InitChain # (defMessage & Proto.validators .~ vs & Proto.maybe'consensusParams .~ cps ) toProtoResponse ResponseBeginBlock = defMessage & Proto.maybe'value ?~ Proto._Response'BeginBlock # defMessage toProtoResponse (ResponseEndBlock vs consensus) = defMessage & Proto.maybe'value ?~ Proto._Response'EndBlock # (defMessage & Proto.validatorUpdates .~ vs & Proto.maybe'consensusParamUpdates .~ consensus) -- | Translates the unsafe auto-generated 'Proto.Request' to a type-safe -- 'Request GADT so users can safely pattern-match on it -- (ie: the compiler will warn if any case is not covered) -- -- Note that we need to use a rank-n-types continuation since the -- 'Request' GADT carries a phantom-type 'MsgType' "tag" and Haskell -- does not allow a polymorphic return type on a "normal" function -- (only those belonging to a type classes) withProtoRequest :: Proto.Request -> (forall (t :: MsgType). Maybe (Request t) -> a) -> a withProtoRequest r f | Just echo <- r^.Proto.maybe'echo = f (Just (RequestEcho $ echo ^. Proto.message)) | Just info <- r^.Proto.maybe'info = f (Just (RequestInfo $ info ^. Proto.version)) | Just setOption <- r^.Proto.maybe'setOption = f (Just (RequestSetOption (setOption ^. Proto.key) (setOption ^. Proto.value))) | Just deliverTx <- r^.Proto.maybe'deliverTx = f (Just (RequestDeliverTx $ deliverTx ^. Proto.tx)) | Just requestTx <- r^.Proto.maybe'checkTx = f (Just (RequestCheckTx $ requestTx ^. Proto.tx)) | Just query <- r^.Proto.maybe'query = f (Just (RequestQuery (query ^. Proto.data') (query ^. Proto.path) (query ^. Proto.height) (query ^. Proto.prove))) | Just initChain <- r^.Proto.maybe'initChain = f (Just (RequestInitChain (initChain ^. Proto.validators) (initChain ^. Proto.maybe'consensusParams))) | Just beginBlock <- r^.Proto.maybe'beginBlock = f (Just (RequestBeginBlock (beginBlock ^. Proto.hash) (beginBlock ^. Proto.maybe'header) (beginBlock ^. Proto.byzantineValidators))) | Just endBlock <- r^.Proto.maybe'endBlock = f (Just (RequestEndBlock $ endBlock ^. Proto.height)) | Just _ <- r^.Proto.maybe'commit = f (Just RequestCommit) | Just _ <- r^.Proto.maybe'flush = f (Just RequestFlush) | otherwise = traceShow r $ f Nothing
albertov/hs-abci
src/Network/ABCI/Types.hs
bsd-3-clause
13,086
0
21
3,968
2,732
1,493
1,239
343
1
{-# LANGUAGE TemplateHaskell #-} module Lightray.Types.Camera where import Linear.V3 (V3) import Control.Lens data ViewPlane = ViewPlane { _viewPlaneWidth :: Int , _viewPlaneHeight :: Int , _viewPlaneSize :: Double , _viewPlaneGamma :: Double } data Camera = OrthogonalCamera { _camViewPlane :: ViewPlane , _camPosition :: V3 Double , _camLookPoint :: V3 Double , _camUpVector :: V3 Double } | PerspectiveCamera { _camViewPlane :: ViewPlane , _camPosition :: V3 Double , _camLookPoint :: V3 Double , _camUpVector :: V3 Double , _camViewPlaneDistance :: Double } makeLenses ''ViewPlane makeLenses ''Camera
alandao/lightray
src/Lightray/Types/Camera.hs
bsd-3-clause
1,002
0
9
484
158
92
66
19
0
module DB.Order ( create ) where import Import import DB import qualified Database.Persist.Sqlite as P import Data.Text as T create :: MonadIO m => String -> Int -> m (Key SqlPersist (Order)) create name price = do runDB $ P.insert $ Order name price
fujimura/persistent-hspec-example
DB/Order.hs
bsd-3-clause
261
0
11
54
95
53
42
10
1
{-# LANGUAGE RankNTypes #-} -- | Queries and operations on values. Useful for the interpreter and -- constant folding. module Futhark.Representation.AST.Attributes.Values ( valueType , valueShape -- * Rearranging , permuteArray , rotateArray , concatArrays , splitArray ) where import Data.Array import Data.List import Prelude import Futhark.Representation.AST.Syntax import Futhark.Representation.AST.Attributes.Constants import Futhark.Representation.AST.Attributes.Rearrange import Futhark.Util (chunk) -- | Return the type of the given value. valueType :: Value -> Type valueType (PrimVal v) = Prim $ primValueType v valueType (ArrayVal _ et shape) = Array et (Shape $ map constant shape) NoUniqueness -- | Return a list of the sizes of an array (the shape, in other -- terms). For non-arrays, this is the empty list. A two-dimensional -- array with five rows and three columns would return the list @[5, -- 3]@. If an array has @n@ dimensions, the result is always a list -- of @n@ elements. valueShape :: Value -> [Int] valueShape (ArrayVal _ _ shape) = shape valueShape _ = [] -- | Permute the dimensions of an array value. If the given value is -- not an array, it is returned unchanged. The length of the -- permutation must be equal to the rank of the value. permuteArray :: [Int] -> Value -> Value permuteArray perm (ArrayVal inarr et oldshape) = let newshape = move oldshape idx is shape = sum (zipWith (*) is (map product $ drop 1 (tails shape))) in ArrayVal (listArray (bounds inarr) [ inarr ! idx (invmove is) oldshape | is <- map reverse $ picks $ reverse newshape ]) et newshape where move = rearrangeShape perm invmove = rearrangeShape $ rearrangeInverse perm picks [] = [] picks [n] = map (:[]) [0..n-1] picks (n:ns) = [ i:is | is <- picks ns, i <- [0..n-1] ] permuteArray _ v = v -- | Rotate the elements of an array as per the 'Rotate' PrimOp. rotateArray :: [Int] -> Value -> Value rotateArray ks (ArrayVal inarr et shape) = ArrayVal (listArray (bounds inarr) $ rotate ks shape $ elems inarr) et shape rotateArray _ v = v rotate :: [Int] -> [Int] -> [a] -> [a] rotate (k:ks) (d:ds) xs = -- (0) Split xs into rows. -- (1) Recursively rotate every row. -- (2) Then rotate the order of rows. let rows = chunk (product ds) xs xs_rotated = map (rotate ks ds) rows new_rows | k > 0 = drop k xs_rotated ++ take k xs_rotated | otherwise = drop (d+k) xs_rotated ++ take (d+k) xs_rotated in concat new_rows rotate _ _ xs = xs -- | Concatenate two arrays as per the 'Concat' PrimOp. concatArrays :: Int -> Value -> Value -> Value concatArrays i (ArrayVal arr1 et shape1) (ArrayVal arr2 _ shape2) = ArrayVal (listArray (0,product shape3-1) $ concatenate xcs xs ycs ys) et shape3 where xcs = product $ drop i shape1 xs = elems arr1 ycs = product $ drop i shape2 ys = elems arr2 shape3 = zipWith3 update shape1 shape2 [0..] update x y j | i == j = x + y | otherwise = x concatArrays _ x _ = x concatenate :: Int -> [a] -> Int -> [a] -> [a] concatenate xcs xs ycs ys = let xs' = chunk xcs xs ys' = chunk ycs ys in concat $ zipWith (++) xs' ys' splitArray :: Int -> [Int] -> Value -> [Value] splitArray i splits (ArrayVal arr et shape) = [ ArrayVal (listArray (0, product splitshape-1) splitarr) et splitshape | (splitarr, splitshape) <- split i splits shape (elems arr) ] splitArray _ _ v = [v] split :: Int -> [Int] -> [Int] -> [a] -> [([a],[Int])] split 0 ss (_:ds) xs = let rows = chunk (product ds) xs mkSplit n m = (concat $ take (m-n) $ drop n rows, (m-n) : ds) in zipWith mkSplit (scanl (+) 0 ss) (scanl1 (+) ss) split i ss (d:ds) xs = let rows = chunk (product ds) xs in case map (split (i-1) ss ds) rows of [] -> [] r:rs -> let (splits, shapes) = unzip $ foldl combine r rs in zip splits $ map (d:) shapes where combine :: [([a],[Int])] -> [([a],[Int])] -> [([a],[Int])] combine splits_and_shapes1 splits_and_shapes2 = let (splits1, shapes1) = unzip splits_and_shapes1 (splits2, _) = unzip splits_and_shapes2 in zip (zipWith (++) splits1 splits2) shapes1 split _ _ ds xs = [(xs, ds)]
mrakgr/futhark
src/Futhark/Representation/AST/Attributes/Values.hs
bsd-3-clause
4,398
0
16
1,123
1,587
836
751
90
3
{-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module Test.Jetski.Arbitrary where import qualified Data.List as List import qualified Data.Text as T import Data.Word (Word) import Disorder.Corpus import Foreign.Ptr (IntPtr) import P import Test.QuickCheck ------------------------------------------------------------------------ newtype Name = Name { unName :: Text } deriving (Eq, Ord, Show) data Value = Double Name Double | Int32 Name Int32 | VoidPtr Name IntPtr deriving (Eq, Ord, Show) newtype Values = Values { getArguments :: [Value] } deriving (Eq, Ord, Show) nameOfArgument :: Value -> Text nameOfArgument = \case Double n _ -> unName n Int32 n _ -> unName n VoidPtr n _ -> unName n ------------------------------------------------------------------------ instance Arbitrary Values where arbitrary = Values . List.nubBy ((==) `on` nameOfArgument) <$> arbitrary instance Arbitrary Value where arbitrary = oneof [ Double <$> arbitrary <*> arbitrary , Int32 <$> arbitrary <*> arbitrary , VoidPtr <$> arbitrary <*> (fromInteger <$> arbitrary) ] instance Arbitrary Name where arbitrary = do name <- elements muppets (i :: Word) <- (`mod` 100) <$> arbitrary return (Name (name <> T.pack (show i)))
ambiata/jetski
test/Test/Jetski/Arbitrary.hs
bsd-3-clause
1,469
0
15
327
390
219
171
40
3
{-# LANGUAGE FlexibleInstances #-} module Data.TrieMap.WordMap.Subset () where import Control.Monad import Control.Monad.Option import Data.TrieMap.TrieKey import Data.Word import Data.TrieMap.WordMap.Base import Data.TrieMap.WordMap.Searchable () import Prelude hiding (lookup) instance Subset SNode where (<=?) = subMap where t1 `subMap` t2 = case (node t1, node t2) of (Bin p1 m1 l1 r1, Bin p2 m2 l2 r2) | shorter m1 m2 -> False | shorter m2 m1 -> match p1 p2 m2 && (if mask0 p1 m2 then t1 `subMap` l2 else t1 `subMap` r2) | otherwise -> (p1==p2) && l1 `subMap` l2 && r1 `subMap` r2 (Bin{}, _) -> False (Tip k x, _) -> isSome (mfilter (x <?=) (lookup k t2)) (Nil, _) -> True instance Subset (TrieMap Word) where WordMap m1 <=? WordMap m2 = m1 <=? m2
lowasser/TrieMap
Data/TrieMap/WordMap/Subset.hs
bsd-3-clause
806
12
15
180
340
190
150
22
0
module B1.Program.Chart.Vbo ( Vbo , VboSpec(..) , createVbo , renderVbo , deleteVbo ) where import Control.Concurrent import Control.Concurrent.MVar import Control.Monad import Data.Array.Storable import Debug.Trace import Foreign.Marshal.Array import Foreign.Ptr import Foreign.Storable import Graphics.Rendering.OpenGL import B1.Control.TaskManager import B1.Graphics.Rendering.OpenGL.BufferManager import B1.Program.Chart.Resources type NumElements = Int data Vbo = Vbo { bufferObject :: BufferObject , unmapMVar :: MVar Bool , primitiveGroups :: [(PrimitiveMode, NumElements)] } type ArraySize = Int data VboSpec = VboSpec PrimitiveMode ArraySize [GLfloat] createVbo :: Resources -> [VboSpec] -> IO Vbo createVbo resources vboSpecs = do bufferObject <- getOrCreateBindedBuffer (bufferManager resources) numBytes maybePtr <- mapBuffer ArrayBuffer WriteOnly bindBuffer ArrayBuffer $= Nothing unmapMVar <- newEmptyMVar case maybePtr of Just ptr -> do addTask (taskManager resources) $ do let allElements = concat $ map (\(VboSpec _ _ elements) -> elements) vboSpecs pokeArray ptr allElements putMVar unmapMVar True putTraceMsg $ "VBO Estimated size: " ++ show numBytes ++ " Real size: " ++ show (length allElements * 4) _ -> error "Couldn't map buffer..." return Vbo { bufferObject = bufferObject , unmapMVar = unmapMVar , primitiveGroups = primitiveGroups } where totalSize = sum $ map (\(VboSpec _ size _) -> size) vboSpecs numBytes = toEnum $ totalSize * 4 primitiveGroups = map (\(VboSpec primitiveMode size _) -> (primitiveMode, size `div` 5)) vboSpecs deleteVbo :: Resources -> Vbo -> IO () deleteVbo resources Vbo { bufferObject = bufferObject , unmapMVar = unmapMVar } = do unmap <- takeMVar unmapMVar unmapIfNecessary unmap bufferObject recycleBuffer (bufferManager resources) bufferObject unmapIfNecessary :: Bool -> BufferObject -> IO () unmapIfNecessary unmap bufferObject = do bindBuffer ArrayBuffer $= Just bufferObject when unmap $ do unmapBuffer ArrayBuffer return () bindBuffer ArrayBuffer $= Nothing renderVbo :: Vbo -> IO Bool renderVbo Vbo { bufferObject = bufferObject , unmapMVar = unmapMVar , primitiveGroups = primitiveGroups } = do maybeUnmap <- tryTakeMVar unmapMVar case maybeUnmap of Just unmap -> do tryPutMVar unmapMVar False unmapIfNecessary unmap bufferObject bindBuffer ArrayBuffer $= Just bufferObject arrayPointer VertexArray $= vertexArrayDescriptor arrayPointer ColorArray $= colorArrayDescriptor renderPrimitiveGroups primitiveGroups bindBuffer ArrayBuffer $= Nothing return True _ -> return False where vertexArrayDescriptor = VertexArrayDescriptor 2 Float 20 $ offset 0 colorArrayDescriptor = VertexArrayDescriptor 3 Float 20 $ offset 8 renderPrimitiveGroups :: [(PrimitiveMode, NumElements)] -> IO () renderPrimitiveGroups primitiveGroups = mapM_ (\(offset, (primitiveMode, numElements)) -> drawArrays primitiveMode (fromIntegral offset) (fromIntegral numElements) ) (zip offsets primitiveGroups) where offsets = scanl (+) 0 $ map snd primitiveGroups offset x = plusPtr nullPtr x
btmura/b1
src/B1/Program/Chart/Vbo.hs
bsd-3-clause
3,359
0
23
733
939
478
461
95
2
{-# OPTIONS_GHC -fno-warn-orphans #-} module Application ( getApplicationDev , appMain , develMain , makeFoundation , makeLogWare , handler , db ) where import Control.Monad.Logger (liftLoc, runLoggingT) import Database.Persist.Sqlite (createSqlitePool, runSqlPool, sqlDatabase, sqlPoolSize) import Import import Instrument (requestDuration, instrumentApp) import Language.Haskell.TH.Syntax (qLocation) import Network.Wai (Middleware) import Network.Wai.Handler.Warp (Settings, defaultSettings, defaultShouldDisplayException, runSettings, setHost, setOnException, setPort) import Network.Wai.Handler.WarpTLS (tlsSettingsChain, runTLS) import Network.Wai.Middleware.RequestLogger (Destination (Logger), IPAddrSource (..), OutputFormat (..), destination, mkRequestLogger, outputFormat) import Prometheus import Prometheus.Metric.GHC (ghcMetrics) import System.Log.FastLogger (defaultBufSize, newStdoutLoggerSet, toLogStr) -- Import all relevant handler modules here. -- Don't forget to add new modules to your cabal file! import Handler.Common import Handler.Home import Handler.UCD -- This line actually creates our YesodDispatch instance. It is the second half -- of the call to mkYesodData which occurs in Foundation.hs. Please see the -- comments there for more details. mkYesodDispatch "App" resourcesApp -- | This function allocates resources (such as a database connection pool), -- performs initialization and return a foundation datatype value. This is also -- the place to put your migrate statements to have automatic database -- migrations handled by Yesod. makeFoundation :: AppSettings -> IO App makeFoundation appSettings = do -- Some basic initializations: HTTP connection manager, logger, and static -- subsite. appHttpManager <- newManager appLogger <- newStdoutLoggerSet defaultBufSize >>= makeYesodLogger -- We need a log function to create a connection pool. We need a connection -- pool to create our foundation. And we need our foundation to get a -- logging function. To get out of this loop, we initially create a -- temporary foundation without a real connection pool, get a log function -- from there, and then create the real foundation. let mkFoundation appConnPool = App {..} tempFoundation = mkFoundation $ error "connPool forced in tempFoundation" logFunc = messageLoggerSource tempFoundation appLogger -- Create the database connection pool pool <- flip runLoggingT logFunc $ createSqlitePool (sqlDatabase $ appDatabaseConf appSettings) (sqlPoolSize $ appDatabaseConf appSettings) -- Perform database migration using our application's logging settings. runLoggingT (runSqlPool (runMigration migrateAll) pool) logFunc -- Return the foundation return $ mkFoundation pool -- | Convert our foundation to a WAI Application by calling @toWaiAppPlain@ and -- applyng some additional middlewares. makeApplication :: App -> IO Application makeApplication foundation = do logWare <- makeLogWare foundation -- Create the WAI application and apply middlewares appPlain <- toWaiAppPlain foundation return $ logWare $ defaultMiddlewaresNoLogging appPlain makeLogWare :: App -> IO Middleware makeLogWare foundation = do requests <- Prometheus.registerIO requestDuration void $ Prometheus.register ghcMetrics logger <- mkRequestLogger def { outputFormat = if appDetailedRequestLogging $ appSettings foundation then Detailed True else Apache (if appIpFromHeader $ appSettings foundation then FromFallback else FromSocket) , destination = Logger $ loggerSet $ appLogger foundation } let instrument = instrumentApp requests "ucdapi" return $ logger . instrument -- | Warp settings for the given foundation value. warpSettings :: App -> Settings warpSettings foundation = setPort (appPort $ appSettings foundation) $ setHost (appHost $ appSettings foundation) $ setOnException (\_req e -> when (defaultShouldDisplayException e) $ messageLoggerSource foundation (appLogger foundation) $(qLocation >>= liftLoc) "yesod" LevelError (toLogStr $ "Exception from Warp: " ++ show e)) defaultSettings -- | For yesod devel, return the Warp settings and WAI Application. getApplicationDev :: IO (Settings, Application) getApplicationDev = do settings <- getAppSettings foundation <- makeFoundation settings wsettings <- getDevSettings $ warpSettings foundation app <- makeApplication foundation return (wsettings, app) getAppSettings :: IO AppSettings getAppSettings = loadYamlSettings [] [configSettingsYmlValue] useEnv -- | main function for use by yesod devel develMain :: IO () develMain = develMainHelper getApplicationDev -- | The @main@ function for an executable running this site. appMain :: IO () appMain = do -- Get the settings from all relevant sources settings <- loadYamlSettingsArgs -- fall back to compile-time values, set to [] to require values at runtime [configSettingsYmlValue] -- allow environment variables to override useEnv -- Generate the foundation from the settings foundation <- makeFoundation settings -- Generate a WAI Application from the foundation app <- makeApplication foundation -- Run the application with Warp let wsettings = warpSettings foundation if appTls (appSettings foundation) then let base = "/etc/letsencrypt/live" </> unpack (appTlsHost (appSettings foundation)) tlsSettings = tlsSettingsChain (base </> "cert.pem") [base </> "chain.pem"] (base </> "privkey.pem") in runTLS tlsSettings wsettings app else runSettings wsettings app --------------------------------------------- -- Functions for use in development with GHCi --------------------------------------------- -- | Run a handler handler :: Handler a -> IO a handler h = getAppSettings >>= makeFoundation >>= flip unsafeHandler h -- | Run DB queries db :: ReaderT SqlBackend (HandlerT App IO) a -> IO a db = handler . runDB
mithrandi/ucd-api
Application.hs
mit
6,675
0
17
1,689
1,087
579
508
-1
-1
{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings, ExistentialQuantification, ScopedTypeVariables, ExtendedDefaultRules, FlexibleContexts, TemplateHaskell, MultiParamTypeClasses, OverloadedLabels, TypeOperators, DataKinds, DeriveGeneric, FlexibleInstances #-} import Youido.Serve import Youido.Types import Youido.Dashdo import Lucid import Lucid.Bootstrap import Lucid.Rdash import Numeric.Datasets.Gapminder import Numeric.Datasets import Control.Monad.Reader import Control.Monad.State.Strict import Control.Concurrent.STM.TVar import Control.Concurrent.STM import Data.List (nub) import Network.Wai import Data.Text (Text, pack, unpack) import Data.Maybe (fromMaybe) import qualified Data.Text as T import Data.Monoid import Lens.Micro.Platform --import Graphics.Plotly.Lucid.hs import Dashdo.Elements import Dashdo.FlexibleInput import Dashdo.Types hiding (FormField) import Data.Proxy import GHC.Generics import Text.Digestive.View (View) import qualified Data.Text.IO as TIO import qualified Text.Digestive as D import qualified Text.Digestive.Lucid.Html5 as DL data TodoR = ListTodos | EditTodoList | UpdateTodoList (Form TodoList) deriving (Show, Generic) instance MonadIO m => FromRequest m TodoR instance ToURL TodoR data TodoList = TodoList { title :: Text, items :: [Todo], category :: Text } deriving (Show, Generic) instance MonadIO m => FromForm m TodoList data TodoTag = TodoTag { tag :: Text } deriving (Show, Generic) instance Monad m => FromForm m TodoTag newtype Assignee = Assignee Int deriving (Generic, Show, Eq, Num) instance MonadIO m => FormField m Assignee where fromFormField def = D.monadic $ liftIO $ do employees <- getEmployees return $ D.choice employees def renderField _ fieldName label view = div_ [class_ "form-group"] $ do DL.label fieldName view (toHtml label) with (DL.inputSelect fieldName (toHtml <$> view)) -- DL.inputWithType typ_ attrs fieldName view) [class_ "form-control", autofocus_] DL.errorList fieldName (toHtml <$> view) data Todo = TodoItem { todoID :: Int, todo :: Text, assignee :: Assignee, done :: Bool, tags :: [TodoTag] } deriving (Show, Generic) instance MonadIO m => FromForm m Todo getEmployees :: IO [(Assignee, Text)] -- in the IO monad to simulate a database call getEmployees = return [(1, "Jim"), (2, "Sam"), (3, "Lisa")] -------------------------------------------------- data Countries = Countries | Country Text deriving (Show, Generic) instance MonadIO m => FromRequest m Countries instance ToURL Countries -------------------------------------------------- type ExampleM = ReaderT (TVar ExampleState) IO data ExampleState = ExampleState { todoState :: TodoList } -- readTodoState :: ExampleM TodoList readTodoState = ask >>= fmap todoState . liftIO . readTVarIO -------------------------------------------------- countryH :: [Gapminder] -> Countries -> HtmlT ExampleM () countryH gapM Countries = do let countries = nub $ map country gapM ul_ $ forM_ countries $ \c -> li_ $ a_ [href_ $ toURL $ Country c] $ (toHtml c) countryH gapM (Country c) = do let entries = filter ((==c) . country) gapM ul_ $ forM_ entries $ \e -> li_ $ "year: " <> toHtml (show $ year e) <> " population: "<> toHtml (show $ pop e) data BubblesDD = BubblesDD { _selYear :: Int} deriving Show makeLenses ''BubblesDD bubblesDD gapM = do let years = nub $ map year gapM selYear <<~ select (map showOpt years) h2_ "hello world" BubblesDD y <- getValue p_ (toHtml $ show $ y) clickAttrs <- onClickDo $ \dd -> do liftIO $ putStrLn $ "current state: "++show dd return Reset button_ (clickAttrs) "Print state" -------------------------------------------------- todoListEditForm :: MonadIO m => View Text -> HtmlT m () todoListEditForm view = container_ $ do form_ [method_ "post", action_ (toURL $ UpdateTodoList FormLink)] $ do renderForm (Proxy :: Proxy TodoList) view button_ [type_ "submit"] "Save" todoH :: TodoR -> HtmlT ExampleM () todoH ListTodos = container_ $ do TodoList titleT todosT _ <- readTodoState employees <- liftIO $ getEmployees br_ [] h4_ (toHtml titleT) a_ [type_ "button", class_ "btn btn-primary", href_ . toURL $ EditTodoList] "Edit List" widget_ . widgetBody_ $ forM_ todosT $ \(TodoItem idT nameT assignT doneT tags) -> do let employee = fromMaybe "unknown" $ lookup assignT employees container_ $ do div_ $ do toHtml $ show idT <> ". " <> (if doneT then "DONE: " else "TODO: ") <> unpack nameT <> " (" <> unpack employee <> ") " <> unpack (if length tags == 0 then "" else " (" <> T.intercalate ", " (map tag tags) <> ")") todoH EditTodoList = do tdos <- readTodoState todoListEditForm =<< (getView (Just tdos)) todoH (UpdateTodoList (Form tdos)) = do atom <- ask liftIO . atomically $ modifyTVar atom (\st -> st { todoState = tdos }) todoH ListTodos todoH (UpdateTodoList (FormError v)) = do liftIO . putStrLn $ "UpdateTodoList error: " <> show (FormError v) todoListEditForm v initialTodos = TodoList "My todos" [ TodoItem 1 "Make todo app" 1 False [TodoTag "dev", TodoTag "work"] , TodoItem 2 "Have lunch" 2 False [TodoTag "personal"] , TodoItem 3 "Buy bread" 3 True []] "A field after a subform" sidebar = rdashSidebar "Youido Example" (return ()) [ ("Bubbles", "fas") *~ #bubbles :/ Initial , ("Counties", "fas") *~ Countries , ("Todos", "fas") *~ ListTodos ] inHeader :: Text -> Html () inHeader js = do script_ [] js main :: IO () main = do gapM <- getDataset gapminder js <- TIO.readFile "form-repeat.js" atom <- newTVarIO $ ExampleState initialTodos let runIt :: Bool -> ExampleM a -> IO a runIt _ todoM = runReaderT todoM atom serveY runIt $ do dashdoGlobal dashdo #bubbles $ Dashdo (BubblesDD 1980) (bubblesDD gapM) port .= 3101 lookupUser .= \_ _ _ -> return $ Just True wrapper .= \_ -> rdashWrapper "Youido Example" (inHeader js) sidebar hHtmlT $ countryH gapM hHtmlT todoH
diffusionkinetics/open
youido/examples/Example.hs
mit
6,165
0
26
1,243
2,008
1,024
984
156
3
-- default desktop configuration for Fedora import System.Posix.Env (getEnv, putEnv) import Data.Maybe (maybe) import XMonad import XMonad.Config.Desktop import XMonad.Config.Gnome import XMonad.Config.Kde import XMonad.Config.Xfce main = do session <- getEnv "DESKTOP_SESSION" putEnv "_JAVA_AWT_WM_NONREPARENTING=1" xmonad $ maybe desktopConfig desktop session desktop "gnome" = gnomeConfig desktop "kde" = kde4Config desktop "xfce" = xfceConfig desktop "xmonad-mate" = gnomeConfig desktop _ = desktopConfig
jiangtao9999/MagicSPECS
SPECS.x/xmonad/xmonad.hs
gpl-2.0
529
0
8
78
128
69
59
16
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.EC2.DescribeSnapshots -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Describes one or more of the EBS snapshots available to you. Available -- snapshots include public snapshots available for any AWS account to -- launch, private snapshots that you own, and private snapshots owned by -- another AWS account but for which you\'ve been given explicit create -- volume permissions. -- -- The create volume permissions fall into the following categories: -- -- - /public/: The owner of the snapshot granted create volume -- permissions for the snapshot to the 'all' group. All AWS accounts -- have create volume permissions for these snapshots. -- - /explicit/: The owner of the snapshot granted create volume -- permissions to a specific AWS account. -- - /implicit/: An AWS account has implicit create volume permissions -- for all snapshots it owns. -- -- The list of snapshots returned can be modified by specifying snapshot -- IDs, snapshot owners, or AWS accounts with create volume permissions. If -- no options are specified, Amazon EC2 returns all snapshots for which you -- have create volume permissions. -- -- If you specify one or more snapshot IDs, only snapshots that have the -- specified IDs are returned. If you specify an invalid snapshot ID, an -- error is returned. If you specify a snapshot ID for which you do not -- have access, it is not included in the returned results. -- -- If you specify one or more snapshot owners, only snapshots from the -- specified owners and for which you have access are returned. The results -- can include the AWS account IDs of the specified owners, 'amazon' for -- snapshots owned by Amazon, or 'self' for snapshots that you own. -- -- If you specify a list of restorable users, only snapshots with create -- snapshot permissions for those users are returned. You can specify AWS -- account IDs (if you own the snapshots), 'self' for snapshots for which -- you own or have explicit permissions, or 'all' for public snapshots. -- -- If you are describing a long list of snapshots, you can paginate the -- output to make the list more manageable. The 'MaxResults' parameter sets -- the maximum number of results returned in a single page. If the list of -- results exceeds your 'MaxResults' value, then that number of results is -- returned along with a 'NextToken' value that can be passed to a -- subsequent 'DescribeSnapshots' request to retrieve the remaining -- results. -- -- For more information about EBS snapshots, see -- <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSSnapshots.html Amazon EBS Snapshots> -- in the /Amazon Elastic Compute Cloud User Guide/. -- -- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeSnapshots.html AWS API Reference> for DescribeSnapshots. -- -- This operation returns paginated results. module Network.AWS.EC2.DescribeSnapshots ( -- * Creating a Request describeSnapshots , DescribeSnapshots -- * Request Lenses , dssOwnerIds , dssFilters , dssNextToken , dssSnapshotIds , dssRestorableByUserIds , dssDryRun , dssMaxResults -- * Destructuring the Response , describeSnapshotsResponse , DescribeSnapshotsResponse -- * Response Lenses , dssrsNextToken , dssrsSnapshots , dssrsResponseStatus ) where import Network.AWS.EC2.Types import Network.AWS.EC2.Types.Product import Network.AWS.Pager import Network.AWS.Prelude import Network.AWS.Request import Network.AWS.Response -- | /See:/ 'describeSnapshots' smart constructor. data DescribeSnapshots = DescribeSnapshots' { _dssOwnerIds :: !(Maybe [Text]) , _dssFilters :: !(Maybe [Filter]) , _dssNextToken :: !(Maybe Text) , _dssSnapshotIds :: !(Maybe [Text]) , _dssRestorableByUserIds :: !(Maybe [Text]) , _dssDryRun :: !(Maybe Bool) , _dssMaxResults :: !(Maybe Int) } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'DescribeSnapshots' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'dssOwnerIds' -- -- * 'dssFilters' -- -- * 'dssNextToken' -- -- * 'dssSnapshotIds' -- -- * 'dssRestorableByUserIds' -- -- * 'dssDryRun' -- -- * 'dssMaxResults' describeSnapshots :: DescribeSnapshots describeSnapshots = DescribeSnapshots' { _dssOwnerIds = Nothing , _dssFilters = Nothing , _dssNextToken = Nothing , _dssSnapshotIds = Nothing , _dssRestorableByUserIds = Nothing , _dssDryRun = Nothing , _dssMaxResults = Nothing } -- | Returns the snapshots owned by the specified owner. Multiple owners can -- be specified. dssOwnerIds :: Lens' DescribeSnapshots [Text] dssOwnerIds = lens _dssOwnerIds (\ s a -> s{_dssOwnerIds = a}) . _Default . _Coerce; -- | One or more filters. -- -- - 'description' - A description of the snapshot. -- -- - 'owner-alias' - The AWS account alias (for example, 'amazon') that -- owns the snapshot. -- -- - 'owner-id' - The ID of the AWS account that owns the snapshot. -- -- - 'progress' - The progress of the snapshot, as a percentage (for -- example, 80%). -- -- - 'snapshot-id' - The snapshot ID. -- -- - 'start-time' - The time stamp when the snapshot was initiated. -- -- - 'status' - The status of the snapshot ('pending' | 'completed' | -- 'error'). -- -- - 'tag':/key/=/value/ - The key\/value combination of a tag assigned -- to the resource. -- -- - 'tag-key' - The key of a tag assigned to the resource. This filter -- is independent of the 'tag-value' filter. For example, if you use -- both the filter \"tag-key=Purpose\" and the filter \"tag-value=X\", -- you get any resources assigned both the tag key Purpose (regardless -- of what the tag\'s value is), and the tag value X (regardless of -- what the tag\'s key is). If you want to list only resources where -- Purpose is X, see the 'tag':/key/=/value/ filter. -- -- - 'tag-value' - The value of a tag assigned to the resource. This -- filter is independent of the 'tag-key' filter. -- -- - 'volume-id' - The ID of the volume the snapshot is for. -- -- - 'volume-size' - The size of the volume, in GiB. -- dssFilters :: Lens' DescribeSnapshots [Filter] dssFilters = lens _dssFilters (\ s a -> s{_dssFilters = a}) . _Default . _Coerce; -- | The 'NextToken' value returned from a previous paginated -- 'DescribeSnapshots' request where 'MaxResults' was used and the results -- exceeded the value of that parameter. Pagination continues from the end -- of the previous results that returned the 'NextToken' value. This value -- is 'null' when there are no more results to return. dssNextToken :: Lens' DescribeSnapshots (Maybe Text) dssNextToken = lens _dssNextToken (\ s a -> s{_dssNextToken = a}); -- | One or more snapshot IDs. -- -- Default: Describes snapshots for which you have launch permissions. dssSnapshotIds :: Lens' DescribeSnapshots [Text] dssSnapshotIds = lens _dssSnapshotIds (\ s a -> s{_dssSnapshotIds = a}) . _Default . _Coerce; -- | One or more AWS accounts IDs that can create volumes from the snapshot. dssRestorableByUserIds :: Lens' DescribeSnapshots [Text] dssRestorableByUserIds = lens _dssRestorableByUserIds (\ s a -> s{_dssRestorableByUserIds = a}) . _Default . _Coerce; -- | Checks whether you have the required permissions for the action, without -- actually making the request, and provides an error response. If you have -- the required permissions, the error response is 'DryRunOperation'. -- Otherwise, it is 'UnauthorizedOperation'. dssDryRun :: Lens' DescribeSnapshots (Maybe Bool) dssDryRun = lens _dssDryRun (\ s a -> s{_dssDryRun = a}); -- | The maximum number of snapshot results returned by 'DescribeSnapshots' -- in paginated output. When this parameter is used, 'DescribeSnapshots' -- only returns 'MaxResults' results in a single page along with a -- 'NextToken' response element. The remaining results of the initial -- request can be seen by sending another 'DescribeSnapshots' request with -- the returned 'NextToken' value. This value can be between 5 and 1000; if -- 'MaxResults' is given a value larger than 1000, only 1000 results are -- returned. If this parameter is not used, then 'DescribeSnapshots' -- returns all results. You cannot specify this parameter and the snapshot -- IDs parameter in the same request. dssMaxResults :: Lens' DescribeSnapshots (Maybe Int) dssMaxResults = lens _dssMaxResults (\ s a -> s{_dssMaxResults = a}); instance AWSPager DescribeSnapshots where page rq rs | stop (rs ^. dssrsNextToken) = Nothing | stop (rs ^. dssrsSnapshots) = Nothing | otherwise = Just $ rq & dssNextToken .~ rs ^. dssrsNextToken instance AWSRequest DescribeSnapshots where type Rs DescribeSnapshots = DescribeSnapshotsResponse request = postQuery eC2 response = receiveXML (\ s h x -> DescribeSnapshotsResponse' <$> (x .@? "nextToken") <*> (x .@? "snapshotSet" .!@ mempty >>= may (parseXMLList "item")) <*> (pure (fromEnum s))) instance ToHeaders DescribeSnapshots where toHeaders = const mempty instance ToPath DescribeSnapshots where toPath = const "/" instance ToQuery DescribeSnapshots where toQuery DescribeSnapshots'{..} = mconcat ["Action" =: ("DescribeSnapshots" :: ByteString), "Version" =: ("2015-04-15" :: ByteString), toQuery (toQueryList "Owner" <$> _dssOwnerIds), toQuery (toQueryList "Filter" <$> _dssFilters), "NextToken" =: _dssNextToken, toQuery (toQueryList "SnapshotId" <$> _dssSnapshotIds), toQuery (toQueryList "RestorableBy" <$> _dssRestorableByUserIds), "DryRun" =: _dssDryRun, "MaxResults" =: _dssMaxResults] -- | /See:/ 'describeSnapshotsResponse' smart constructor. data DescribeSnapshotsResponse = DescribeSnapshotsResponse' { _dssrsNextToken :: !(Maybe Text) , _dssrsSnapshots :: !(Maybe [Snapshot]) , _dssrsResponseStatus :: !Int } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'DescribeSnapshotsResponse' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'dssrsNextToken' -- -- * 'dssrsSnapshots' -- -- * 'dssrsResponseStatus' describeSnapshotsResponse :: Int -- ^ 'dssrsResponseStatus' -> DescribeSnapshotsResponse describeSnapshotsResponse pResponseStatus_ = DescribeSnapshotsResponse' { _dssrsNextToken = Nothing , _dssrsSnapshots = Nothing , _dssrsResponseStatus = pResponseStatus_ } -- | The 'NextToken' value to include in a future 'DescribeSnapshots' -- request. When the results of a 'DescribeSnapshots' request exceed -- 'MaxResults', this value can be used to retrieve the next page of -- results. This value is 'null' when there are no more results to return. dssrsNextToken :: Lens' DescribeSnapshotsResponse (Maybe Text) dssrsNextToken = lens _dssrsNextToken (\ s a -> s{_dssrsNextToken = a}); -- | Information about the snapshots. dssrsSnapshots :: Lens' DescribeSnapshotsResponse [Snapshot] dssrsSnapshots = lens _dssrsSnapshots (\ s a -> s{_dssrsSnapshots = a}) . _Default . _Coerce; -- | The response status code. dssrsResponseStatus :: Lens' DescribeSnapshotsResponse Int dssrsResponseStatus = lens _dssrsResponseStatus (\ s a -> s{_dssrsResponseStatus = a});
olorin/amazonka
amazonka-ec2/gen/Network/AWS/EC2/DescribeSnapshots.hs
mpl-2.0
12,422
0
15
2,607
1,377
842
535
139
1
module Widgets.Markdown where import Import import qualified Data.Text as T snowdriftMarkdownField :: (Monad m, HandlerSite m ~ App) => Field m Markdown snowdriftMarkdownField = Field { fieldParse = parseHelper $ Right . Markdown . T.filter (/= '\r') , fieldView = \theId name attrs value _isReq -> do render <- getUrlRender let tutorial = render MarkdownTutorialR in [whamlet| <div .markdown_label> Use <a href=#{tutorial} target="_blank"> Markdown syntax <div .markdown_label> Remember to follow the <a href=@{WikiR "snowdrift" LangEn "conduct"}> Code of Conduct <div .markdown_wrapper> <textarea id=#{theId} name=#{name} :_isReq:required *{attrs}>#{either id unMarkdown value} |] , fieldEnctype = UrlEncoded }
chreekat/snowdrift
Widgets/Markdown.hs
agpl-3.0
1,028
0
14
410
134
75
59
-1
-1
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="it-IT"> <title>SOAP Scanner | ZAP Extension</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
veggiespam/zap-extensions
addOns/soap/src/main/javahelp/org/zaproxy/zap/extension/soap/resources/help_it_IT/helpset_it_IT.hs
apache-2.0
974
80
66
160
415
210
205
-1
-1
module Blub ( blub , foo , bar ) where import Data.Text (Text) import Data.Maybe (Maybe(Just))
jystic/hsimport
tests/goldenFiles/SymbolTest24.hs
bsd-3-clause
107
0
6
28
39
25
14
6
0
{- Author: George Karachalias <[email protected]> Pattern Matching Coverage Checking. -} {-# LANGUAGE CPP, GADTs, DataKinds, KindSignatures #-} {-# LANGUAGE TupleSections #-} module Check ( -- Checking and printing checkSingle, checkMatches, isAnyPmCheckEnabled, -- See Note [Type and Term Equality Propagation] genCaseTmCs1, genCaseTmCs2, -- Pattern-match-specific type operations pmIsClosedType, pmTopNormaliseType_maybe ) where #include "HsVersions.h" import GhcPrelude import TmOracle import Unify( tcMatchTy ) import BasicTypes import DynFlags import HsSyn import TcHsSyn import Id import ConLike import Name import FamInstEnv import TysPrim (tYPETyCon) import TysWiredIn import TyCon import SrcLoc import Util import Outputable import FastString import DataCon import HscTypes (CompleteMatch(..)) import DsMonad import TcSimplify (tcCheckSatisfiability) import TcType (toTcType, isStringTy, isIntTy, isWordTy) import Bag import ErrUtils import Var (EvVar) import TyCoRep import Type import UniqSupply import DsGRHSs (isTrueLHsExpr) import Maybes ( expectJust ) import Data.List (find) import Data.Maybe (isJust, fromMaybe) import Control.Monad (forM, when, forM_) import Coercion import TcEvidence import IOEnv import qualified Data.Semigroup as Semi import ListT (ListT(..), fold, select) {- This module checks pattern matches for: \begin{enumerate} \item Equations that are redundant \item Equations with inaccessible right-hand-side \item Exhaustiveness \end{enumerate} The algorithm is based on the paper: "GADTs Meet Their Match: Pattern-matching Warnings That Account for GADTs, Guards, and Laziness" http://people.cs.kuleuven.be/~george.karachalias/papers/p424-karachalias.pdf %************************************************************************ %* * Pattern Match Check Types %* * %************************************************************************ -} -- We use the non-determinism monad to apply the algorithm to several -- possible sets of constructors. Users can specify complete sets of -- constructors by using COMPLETE pragmas. -- The algorithm only picks out constructor -- sets deep in the bowels which makes a simpler `mapM` more difficult to -- implement. The non-determinism is only used in one place, see the ConVar -- case in `pmCheckHd`. type PmM a = ListT DsM a liftD :: DsM a -> PmM a liftD m = ListT $ \sk fk -> m >>= \a -> sk a fk -- Pick the first match complete covered match or otherwise the "best" match. -- The best match is the one with the least uncovered clauses, ties broken -- by the number of inaccessible clauses followed by number of redundant -- clauses. -- -- This is specified in the -- "Disambiguating between multiple ``COMPLETE`` pragmas" section of the -- users' guide. If you update the implementation of this function, make sure -- to update that section of the users' guide as well. getResult :: PmM PmResult -> DsM PmResult getResult ls = do { res <- fold ls goM (pure Nothing) ; case res of Nothing -> panic "getResult is empty" Just a -> return a } where goM :: PmResult -> DsM (Maybe PmResult) -> DsM (Maybe PmResult) goM mpm dpm = do { pmr <- dpm ; return $ Just $ go pmr mpm } -- Careful not to force unecessary results go :: Maybe PmResult -> PmResult -> PmResult go Nothing rs = rs go (Just old@(PmResult prov rs (UncoveredPatterns us) is)) new | null us && null rs && null is = old | otherwise = let PmResult prov' rs' (UncoveredPatterns us') is' = new in case compareLength us us' `mappend` (compareLength is is') `mappend` (compareLength rs rs') `mappend` (compare prov prov') of GT -> new EQ -> new LT -> old go (Just (PmResult _ _ (TypeOfUncovered _) _)) _new = panic "getResult: No inhabitation candidates" data PatTy = PAT | VA -- Used only as a kind, to index PmPat -- The *arity* of a PatVec [p1,..,pn] is -- the number of p1..pn that are not Guards data PmPat :: PatTy -> * where PmCon :: { pm_con_con :: ConLike , pm_con_arg_tys :: [Type] , pm_con_tvs :: [TyVar] , pm_con_dicts :: [EvVar] , pm_con_args :: [PmPat t] } -> PmPat t -- For PmCon arguments' meaning see @ConPatOut@ in hsSyn/HsPat.hs PmVar :: { pm_var_id :: Id } -> PmPat t PmLit :: { pm_lit_lit :: PmLit } -> PmPat t -- See Note [Literals in PmPat] PmNLit :: { pm_lit_id :: Id , pm_lit_not :: [PmLit] } -> PmPat 'VA PmGrd :: { pm_grd_pv :: PatVec , pm_grd_expr :: PmExpr } -> PmPat 'PAT -- data T a where -- MkT :: forall p q. (Eq p, Ord q) => p -> q -> T [p] -- or MkT :: forall p q r. (Eq p, Ord q, [p] ~ r) => p -> q -> T r type Pattern = PmPat 'PAT -- ^ Patterns type ValAbs = PmPat 'VA -- ^ Value Abstractions type PatVec = [Pattern] -- ^ Pattern Vectors data ValVec = ValVec [ValAbs] Delta -- ^ Value Vector Abstractions -- | Term and type constraints to accompany each value vector abstraction. -- For efficiency, we store the term oracle state instead of the term -- constraints. TODO: Do the same for the type constraints? data Delta = MkDelta { delta_ty_cs :: Bag EvVar , delta_tm_cs :: TmState } type ValSetAbs = [ValVec] -- ^ Value Set Abstractions type Uncovered = ValSetAbs -- Instead of keeping the whole sets in memory, we keep a boolean for both the -- covered and the divergent set (we store the uncovered set though, since we -- want to print it). For both the covered and the divergent we have: -- -- True <=> The set is non-empty -- -- hence: -- C = True ==> Useful clause (no warning) -- C = False, D = True ==> Clause with inaccessible RHS -- C = False, D = False ==> Redundant clause data Covered = Covered | NotCovered deriving Show instance Outputable Covered where ppr (Covered) = text "Covered" ppr (NotCovered) = text "NotCovered" -- Like the or monoid for booleans -- Covered = True, Uncovered = False instance Semi.Semigroup Covered where Covered <> _ = Covered _ <> Covered = Covered NotCovered <> NotCovered = NotCovered instance Monoid Covered where mempty = NotCovered mappend = (Semi.<>) data Diverged = Diverged | NotDiverged deriving Show instance Outputable Diverged where ppr Diverged = text "Diverged" ppr NotDiverged = text "NotDiverged" instance Semi.Semigroup Diverged where Diverged <> _ = Diverged _ <> Diverged = Diverged NotDiverged <> NotDiverged = NotDiverged instance Monoid Diverged where mempty = NotDiverged mappend = (Semi.<>) -- | When we learned that a given match group is complete data Provenance = FromBuiltin -- ^ From the original definition of the type -- constructor. | FromComplete -- ^ From a user-provided @COMPLETE@ pragma deriving (Show, Eq, Ord) instance Outputable Provenance where ppr = text . show instance Semi.Semigroup Provenance where FromComplete <> _ = FromComplete _ <> FromComplete = FromComplete _ <> _ = FromBuiltin instance Monoid Provenance where mempty = FromBuiltin mappend = (Semi.<>) data PartialResult = PartialResult { presultProvenance :: Provenance -- keep track of provenance because we don't want -- to warn about redundant matches if the result -- is contaminated with a COMPLETE pragma , presultCovered :: Covered , presultUncovered :: Uncovered , presultDivergent :: Diverged } instance Outputable PartialResult where ppr (PartialResult prov c vsa d) = text "PartialResult" <+> ppr prov <+> ppr c <+> ppr d <+> ppr vsa instance Semi.Semigroup PartialResult where (PartialResult prov1 cs1 vsa1 ds1) <> (PartialResult prov2 cs2 vsa2 ds2) = PartialResult (prov1 Semi.<> prov2) (cs1 Semi.<> cs2) (vsa1 Semi.<> vsa2) (ds1 Semi.<> ds2) instance Monoid PartialResult where mempty = PartialResult mempty mempty [] mempty mappend = (Semi.<>) -- newtype ChoiceOf a = ChoiceOf [a] -- | Pattern check result -- -- * Redundant clauses -- * Not-covered clauses (or their type, if no pattern is available) -- * Clauses with inaccessible RHS -- -- More details about the classification of clauses into useful, redundant -- and with inaccessible right hand side can be found here: -- -- https://ghc.haskell.org/trac/ghc/wiki/PatternMatchCheck -- data PmResult = PmResult { pmresultProvenance :: Provenance , pmresultRedundant :: [Located [LPat GhcTc]] , pmresultUncovered :: UncoveredCandidates , pmresultInaccessible :: [Located [LPat GhcTc]] } -- | Either a list of patterns that are not covered, or their type, in case we -- have no patterns at hand. Not having patterns at hand can arise when -- handling EmptyCase expressions, in two cases: -- -- * The type of the scrutinee is a trivially inhabited type (like Int or Char) -- * The type of the scrutinee cannot be reduced to WHNF. -- -- In both these cases we have no inhabitation candidates for the type at hand, -- but we don't want to issue just a wildcard as missing. Instead, we print a -- type annotated wildcard, so that the user knows what kind of patterns is -- expected (e.g. (_ :: Int), or (_ :: F Int), where F Int does not reduce). data UncoveredCandidates = UncoveredPatterns Uncovered | TypeOfUncovered Type -- | The empty pattern check result emptyPmResult :: PmResult emptyPmResult = PmResult FromBuiltin [] (UncoveredPatterns []) [] -- | Non-exhaustive empty case with unknown/trivial inhabitants uncoveredWithTy :: Type -> PmResult uncoveredWithTy ty = PmResult FromBuiltin [] (TypeOfUncovered ty) [] {- %************************************************************************ %* * Entry points to the checker: checkSingle and checkMatches %* * %************************************************************************ -} -- | Check a single pattern binding (let) checkSingle :: DynFlags -> DsMatchContext -> Id -> Pat GhcTc -> DsM () checkSingle dflags ctxt@(DsMatchContext _ locn) var p = do tracePmD "checkSingle" (vcat [ppr ctxt, ppr var, ppr p]) mb_pm_res <- tryM (getResult (checkSingle' locn var p)) case mb_pm_res of Left _ -> warnPmIters dflags ctxt Right res -> dsPmWarn dflags ctxt res -- | Check a single pattern binding (let) checkSingle' :: SrcSpan -> Id -> Pat GhcTc -> PmM PmResult checkSingle' locn var p = do liftD resetPmIterDs -- set the iter-no to zero fam_insts <- liftD dsGetFamInstEnvs clause <- liftD $ translatePat fam_insts p missing <- mkInitialUncovered [var] tracePm "checkSingle: missing" (vcat (map pprValVecDebug missing)) -- no guards PartialResult prov cs us ds <- runMany (pmcheckI clause []) missing let us' = UncoveredPatterns us return $ case (cs,ds) of (Covered, _ ) -> PmResult prov [] us' [] -- useful (NotCovered, NotDiverged) -> PmResult prov m us' [] -- redundant (NotCovered, Diverged ) -> PmResult prov [] us' m -- inaccessible rhs where m = [L locn [L locn p]] -- | Check a matchgroup (case, functions, etc.) checkMatches :: DynFlags -> DsMatchContext -> [Id] -> [LMatch GhcTc (LHsExpr GhcTc)] -> DsM () checkMatches dflags ctxt vars matches = do tracePmD "checkMatches" (hang (vcat [ppr ctxt , ppr vars , text "Matches:"]) 2 (vcat (map ppr matches))) mb_pm_res <- tryM $ getResult $ case matches of -- Check EmptyCase separately -- See Note [Checking EmptyCase Expressions] [] | [var] <- vars -> checkEmptyCase' var _normal_match -> checkMatches' vars matches case mb_pm_res of Left _ -> warnPmIters dflags ctxt Right res -> dsPmWarn dflags ctxt res -- | Check a matchgroup (case, functions, etc.). To be called on a non-empty -- list of matches. For empty case expressions, use checkEmptyCase' instead. checkMatches' :: [Id] -> [LMatch GhcTc (LHsExpr GhcTc)] -> PmM PmResult checkMatches' vars matches | null matches = panic "checkMatches': EmptyCase" | otherwise = do liftD resetPmIterDs -- set the iter-no to zero missing <- mkInitialUncovered vars tracePm "checkMatches: missing" (vcat (map pprValVecDebug missing)) (prov, rs,us,ds) <- go matches missing return $ PmResult { pmresultProvenance = prov , pmresultRedundant = map hsLMatchToLPats rs , pmresultUncovered = UncoveredPatterns us , pmresultInaccessible = map hsLMatchToLPats ds } where go :: [LMatch GhcTc (LHsExpr GhcTc)] -> Uncovered -> PmM (Provenance , [LMatch GhcTc (LHsExpr GhcTc)] , Uncovered , [LMatch GhcTc (LHsExpr GhcTc)]) go [] missing = return (mempty, [], missing, []) go (m:ms) missing = do tracePm "checMatches': go" (ppr m $$ ppr missing) fam_insts <- liftD dsGetFamInstEnvs (clause, guards) <- liftD $ translateMatch fam_insts m r@(PartialResult prov cs missing' ds) <- runMany (pmcheckI clause guards) missing tracePm "checMatches': go: res" (ppr r) (ms_prov, rs, final_u, is) <- go ms missing' let final_prov = prov `mappend` ms_prov return $ case (cs, ds) of -- useful (Covered, _ ) -> (final_prov, rs, final_u, is) -- redundant (NotCovered, NotDiverged) -> (final_prov, m:rs, final_u,is) -- inaccessible (NotCovered, Diverged ) -> (final_prov, rs, final_u, m:is) hsLMatchToLPats :: LMatch id body -> Located [LPat id] hsLMatchToLPats (L l (Match { m_pats = pats })) = L l pats -- | Check an empty case expression. Since there are no clauses to process, we -- only compute the uncovered set. See Note [Checking EmptyCase Expressions] -- for details. checkEmptyCase' :: Id -> PmM PmResult checkEmptyCase' var = do tm_css <- map toComplex . bagToList <$> liftD getTmCsDs case tmOracle initialTmState tm_css of Just tm_state -> do ty_css <- liftD getDictsDs fam_insts <- liftD dsGetFamInstEnvs mb_candidates <- inhabitationCandidates fam_insts (idType var) case mb_candidates of -- Inhabitation checking failed / the type is trivially inhabited Left ty -> return (uncoveredWithTy ty) -- A list of inhabitant candidates is available: Check for each -- one for the satisfiability of the constraints it gives rise to. Right candidates -> do missing_m <- flip concatMapM candidates $ \(va,tm_ct,ty_cs) -> do let all_ty_cs = unionBags ty_cs ty_css sat_ty <- tyOracle all_ty_cs return $ case (sat_ty, tmOracle tm_state (tm_ct:tm_css)) of (True, Just tm_state') -> [(va, all_ty_cs, tm_state')] _non_sat -> [] let mkValVec (va,all_ty_cs,tm_state') = ValVec [va] (MkDelta all_ty_cs tm_state') uncovered = UncoveredPatterns (map mkValVec missing_m) return $ if null missing_m then emptyPmResult else PmResult FromBuiltin [] uncovered [] Nothing -> return emptyPmResult -- | Returns 'True' if the argument 'Type' is a fully saturated application of -- a closed type constructor. -- -- Closed type constructors are those with a fixed right hand side, as -- opposed to e.g. associated types. These are of particular interest for -- pattern-match coverage checking, because GHC can exhaustively consider all -- possible forms that values of a closed type can take on. -- -- Note that this function is intended to be used to check types of value-level -- patterns, so as a consequence, the 'Type' supplied as an argument to this -- function should be of kind @Type@. pmIsClosedType :: Type -> Bool pmIsClosedType ty = case splitTyConApp_maybe ty of Just (tc, ty_args) | is_algebraic_like tc && not (isFamilyTyCon tc) -> ASSERT2( ty_args `lengthIs` tyConArity tc, ppr ty ) True _other -> False where -- This returns True for TyCons which /act like/ algebraic types. -- (See "Type#type_classification" for what an algebraic type is.) -- -- This is qualified with \"like\" because of a particular special -- case: TYPE (the underlyind kind behind Type, among others). TYPE -- is conceptually a datatype (and thus algebraic), but in practice it is -- a primitive builtin type, so we must check for it specially. -- -- NB: it makes sense to think of TYPE as a closed type in a value-level, -- pattern-matching context. However, at the kind level, TYPE is certainly -- not closed! Since this function is specifically tailored towards pattern -- matching, however, it's OK to label TYPE as closed. is_algebraic_like :: TyCon -> Bool is_algebraic_like tc = isAlgTyCon tc || tc == tYPETyCon pmTopNormaliseType_maybe :: FamInstEnvs -> Type -> Maybe (Type, [DataCon], Type) -- ^ Get rid of *outermost* (or toplevel) -- * type function redex -- * data family redex -- * newtypes -- -- Behaves exactly like `topNormaliseType_maybe`, but instead of returning a -- coercion, it returns useful information for issuing pattern matching -- warnings. See Note [Type normalisation for EmptyCase] for details. pmTopNormaliseType_maybe env typ = do ((ty_f,tm_f), ty) <- topNormaliseTypeX stepper comb typ return (eq_src_ty ty (typ : ty_f [ty]), tm_f [], ty) where -- Find the first type in the sequence of rewrites that is a data type, -- newtype, or a data family application (not the representation tycon!). -- This is the one that is equal (in source Haskell) to the initial type. -- If none is found in the list, then all of them are type family -- applications, so we simply return the last one, which is the *simplest*. eq_src_ty :: Type -> [Type] -> Type eq_src_ty ty tys = maybe ty id (find is_closed_or_data_family tys) is_closed_or_data_family :: Type -> Bool is_closed_or_data_family ty = pmIsClosedType ty || isDataFamilyAppType ty -- For efficiency, represent both lists as difference lists. -- comb performs the concatenation, for both lists. comb (tyf1, tmf1) (tyf2, tmf2) = (tyf1 . tyf2, tmf1 . tmf2) stepper = newTypeStepper `composeSteppers` tyFamStepper -- A 'NormaliseStepper' that unwraps newtypes, careful not to fall into -- a loop. If it would fall into a loop, it produces 'NS_Abort'. newTypeStepper :: NormaliseStepper ([Type] -> [Type],[DataCon] -> [DataCon]) newTypeStepper rec_nts tc tys | Just (ty', _co) <- instNewTyCon_maybe tc tys = case checkRecTc rec_nts tc of Just rec_nts' -> let tyf = ((TyConApp tc tys):) tmf = ((tyConSingleDataCon tc):) in NS_Step rec_nts' ty' (tyf, tmf) Nothing -> NS_Abort | otherwise = NS_Done tyFamStepper :: NormaliseStepper ([Type] -> [Type], [DataCon] -> [DataCon]) tyFamStepper rec_nts tc tys -- Try to step a type/data family = let (_args_co, ntys) = normaliseTcArgs env Representational tc tys in -- NB: It's OK to use normaliseTcArgs here instead of -- normalise_tc_args (which takes the LiftingContext described -- in Note [Normalising types]) because the reduceTyFamApp below -- works only at top level. We'll never recur in this function -- after reducing the kind of a bound tyvar. case reduceTyFamApp_maybe env Representational tc ntys of Just (_co, rhs) -> NS_Step rec_nts rhs ((rhs:), id) _ -> NS_Done {- Note [Type normalisation for EmptyCase] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ EmptyCase is an exception for pattern matching, since it is strict. This means that it boils down to checking whether the type of the scrutinee is inhabited. Function pmTopNormaliseType_maybe gets rid of the outermost type function/data family redex and newtypes, in search of an algebraic type constructor, which is easier to check for inhabitation. It returns 3 results instead of one, because there are 2 subtle points: 1. Newtypes are isomorphic to the underlying type in core but not in the source language, 2. The representational data family tycon is used internally but should not be shown to the user Hence, if pmTopNormaliseType_maybe env ty = Just (src_ty, dcs, core_ty), then (a) src_ty is the rewritten type which we can show to the user. That is, the type we get if we rewrite type families but not data families or newtypes. (b) dcs is the list of data constructors "skipped", every time we normalise a newtype to it's core representation, we keep track of the source data constructor. (c) core_ty is the rewritten type. That is, pmTopNormaliseType_maybe env ty = Just (src_ty, dcs, core_ty) implies topNormaliseType_maybe env ty = Just (co, core_ty) for some coercion co. To see how all cases come into play, consider the following example: data family T a :: * data instance T Int = T1 | T2 Bool -- Which gives rise to FC: -- data T a -- data R:TInt = T1 | T2 Bool -- axiom ax_ti : T Int ~R R:TInt newtype G1 = MkG1 (T Int) newtype G2 = MkG2 G1 type instance F Int = F Char type instance F Char = G2 In this case pmTopNormaliseType_maybe env (F Int) results in Just (G2, [MkG2,MkG1], R:TInt) Which means that in source Haskell: - G2 is equivalent to F Int (in contrast, G1 isn't). - if (x : R:TInt) then (MkG2 (MkG1 x) : F Int). -} -- | Generate all inhabitation candidates for a given type. The result is -- either (Left ty), if the type cannot be reduced to a closed algebraic type -- (or if it's one trivially inhabited, like Int), or (Right candidates), if it -- can. In this case, the candidates are the signature of the tycon, each one -- accompanied by the term- and type- constraints it gives rise to. -- See also Note [Checking EmptyCase Expressions] inhabitationCandidates :: FamInstEnvs -> Type -> PmM (Either Type [(ValAbs, ComplexEq, Bag EvVar)]) inhabitationCandidates fam_insts ty = case pmTopNormaliseType_maybe fam_insts ty of Just (src_ty, dcs, core_ty) -> alts_to_check src_ty core_ty dcs Nothing -> alts_to_check ty ty [] where -- All these types are trivially inhabited trivially_inhabited = [ charTyCon, doubleTyCon, floatTyCon , intTyCon, wordTyCon, word8TyCon ] -- Note: At the moment we leave all the typing and constraint fields of -- PmCon empty, since we know that they are not gonna be used. Is the -- right-thing-to-do to actually create them, even if they are never used? build_tm :: ValAbs -> [DataCon] -> ValAbs build_tm = foldr (\dc e -> PmCon (RealDataCon dc) [] [] [] [e]) -- Inhabitation candidates, using the result of pmTopNormaliseType_maybe alts_to_check :: Type -> Type -> [DataCon] -> PmM (Either Type [(ValAbs, ComplexEq, Bag EvVar)]) alts_to_check src_ty core_ty dcs = case splitTyConApp_maybe core_ty of Just (tc, _) | tc `elem` trivially_inhabited -> case dcs of [] -> return (Left src_ty) (_:_) -> do var <- liftD $ mkPmId (toTcType core_ty) let va = build_tm (PmVar var) dcs return $ Right [(va, mkIdEq var, emptyBag)] | pmIsClosedType core_ty -> liftD $ do var <- mkPmId (toTcType core_ty) -- it would be wrong to unify x alts <- mapM (mkOneConFull var . RealDataCon) (tyConDataCons tc) return $ Right [(build_tm va dcs, eq, cs) | (va, eq, cs) <- alts] -- For other types conservatively assume that they are inhabited. _other -> return (Left src_ty) {- Note [Checking EmptyCase Expressions] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Empty case expressions are strict on the scrutinee. That is, `case x of {}` will force argument `x`. Hence, `checkMatches` is not sufficient for checking empty cases, because it assumes that the match is not strict (which is true for all other cases, apart from EmptyCase). This gave rise to #10746. Instead, we do the following: 1. We normalise the outermost type family redex, data family redex or newtype, using pmTopNormaliseType_maybe (in types/FamInstEnv.hs). This computes 3 things: (a) A normalised type src_ty, which is equal to the type of the scrutinee in source Haskell (does not normalise newtypes or data families) (b) The actual normalised type core_ty, which coincides with the result topNormaliseType_maybe. This type is not necessarily equal to the input type in source Haskell. And this is precicely the reason we compute (a) and (c): the reasoning happens with the underlying types, but both the patterns and types we print should respect newtypes and also show the family type constructors and not the representation constructors. (c) A list of all newtype data constructors dcs, each one corresponding to a newtype rewrite performed in (b). For an example see also Note [Type normalisation for EmptyCase] in types/FamInstEnv.hs. 2. Function checkEmptyCase' performs the check: - If core_ty is not an algebraic type, then we cannot check for inhabitation, so we emit (_ :: src_ty) as missing, conservatively assuming that the type is inhabited. - If core_ty is an algebraic type, then we unfold the scrutinee to all possible constructor patterns, using inhabitationCandidates, and then check each one for constraint satisfiability, same as we for normal pattern match checking. %************************************************************************ %* * Transform source syntax to *our* syntax %* * %************************************************************************ -} -- ----------------------------------------------------------------------- -- * Utilities nullaryConPattern :: ConLike -> Pattern -- Nullary data constructor and nullary type constructor nullaryConPattern con = PmCon { pm_con_con = con, pm_con_arg_tys = [] , pm_con_tvs = [], pm_con_dicts = [], pm_con_args = [] } {-# INLINE nullaryConPattern #-} truePattern :: Pattern truePattern = nullaryConPattern (RealDataCon trueDataCon) {-# INLINE truePattern #-} -- | A fake guard pattern (True <- _) used to represent cases we cannot handle fake_pat :: Pattern fake_pat = PmGrd { pm_grd_pv = [truePattern] , pm_grd_expr = PmExprOther EWildPat } {-# INLINE fake_pat #-} -- | Check whether a guard pattern is generated by the checker (unhandled) isFakeGuard :: [Pattern] -> PmExpr -> Bool isFakeGuard [PmCon { pm_con_con = RealDataCon c }] (PmExprOther EWildPat) | c == trueDataCon = True | otherwise = False isFakeGuard _pats _e = False -- | Generate a `canFail` pattern vector of a specific type mkCanFailPmPat :: Type -> DsM PatVec mkCanFailPmPat ty = do var <- mkPmVar ty return [var, fake_pat] vanillaConPattern :: ConLike -> [Type] -> PatVec -> Pattern -- ADT constructor pattern => no existentials, no local constraints vanillaConPattern con arg_tys args = PmCon { pm_con_con = con, pm_con_arg_tys = arg_tys , pm_con_tvs = [], pm_con_dicts = [], pm_con_args = args } {-# INLINE vanillaConPattern #-} -- | Create an empty list pattern of a given type nilPattern :: Type -> Pattern nilPattern ty = PmCon { pm_con_con = RealDataCon nilDataCon, pm_con_arg_tys = [ty] , pm_con_tvs = [], pm_con_dicts = [] , pm_con_args = [] } {-# INLINE nilPattern #-} mkListPatVec :: Type -> PatVec -> PatVec -> PatVec mkListPatVec ty xs ys = [PmCon { pm_con_con = RealDataCon consDataCon , pm_con_arg_tys = [ty] , pm_con_tvs = [], pm_con_dicts = [] , pm_con_args = xs++ys }] {-# INLINE mkListPatVec #-} -- | Create a (non-overloaded) literal pattern mkLitPattern :: HsLit GhcTc -> Pattern mkLitPattern lit = PmLit { pm_lit_lit = PmSLit lit } {-# INLINE mkLitPattern #-} -- ----------------------------------------------------------------------- -- * Transform (Pat Id) into of (PmPat Id) translatePat :: FamInstEnvs -> Pat GhcTc -> DsM PatVec translatePat fam_insts pat = case pat of WildPat ty -> mkPmVars [ty] VarPat id -> return [PmVar (unLoc id)] ParPat p -> translatePat fam_insts (unLoc p) LazyPat _ -> mkPmVars [hsPatType pat] -- like a variable -- ignore strictness annotations for now BangPat p -> translatePat fam_insts (unLoc p) AsPat lid p -> do -- Note [Translating As Patterns] ps <- translatePat fam_insts (unLoc p) let [e] = map vaToPmExpr (coercePatVec ps) g = PmGrd [PmVar (unLoc lid)] e return (ps ++ [g]) SigPatOut p _ty -> translatePat fam_insts (unLoc p) -- See Note [Translate CoPats] CoPat wrapper p ty | isIdHsWrapper wrapper -> translatePat fam_insts p | WpCast co <- wrapper, isReflexiveCo co -> translatePat fam_insts p | otherwise -> do ps <- translatePat fam_insts p (xp,xe) <- mkPmId2Forms ty let g = mkGuard ps (mkHsWrap wrapper (unLoc xe)) return [xp,g] -- (n + k) ===> x (True <- x >= k) (n <- x-k) NPlusKPat (L _ _n) _k1 _k2 _ge _minus ty -> mkCanFailPmPat ty -- (fun -> pat) ===> x (pat <- fun x) ViewPat lexpr lpat arg_ty -> do ps <- translatePat fam_insts (unLoc lpat) -- See Note [Guards and Approximation] case all cantFailPattern ps of True -> do (xp,xe) <- mkPmId2Forms arg_ty let g = mkGuard ps (HsApp lexpr xe) return [xp,g] False -> mkCanFailPmPat arg_ty -- list ListPat ps ty Nothing -> do foldr (mkListPatVec ty) [nilPattern ty] <$> translatePatVec fam_insts (map unLoc ps) -- overloaded list ListPat lpats elem_ty (Just (pat_ty, _to_list)) | Just e_ty <- splitListTyConApp_maybe pat_ty , (_, norm_elem_ty) <- normaliseType fam_insts Nominal elem_ty -- elem_ty is frequently something like -- `Item [Int]`, but we prefer `Int` , norm_elem_ty `eqType` e_ty -> -- We have to ensure that the element types are exactly the same. -- Otherwise, one may give an instance IsList [Int] (more specific than -- the default IsList [a]) with a different implementation for `toList' translatePat fam_insts (ListPat lpats e_ty Nothing) -- See Note [Guards and Approximation] | otherwise -> mkCanFailPmPat pat_ty ConPatOut { pat_con = L _ con , pat_arg_tys = arg_tys , pat_tvs = ex_tvs , pat_dicts = dicts , pat_args = ps } -> do groups <- allCompleteMatches con arg_tys case groups of [] -> mkCanFailPmPat (conLikeResTy con arg_tys) _ -> do args <- translateConPatVec fam_insts arg_tys ex_tvs con ps return [PmCon { pm_con_con = con , pm_con_arg_tys = arg_tys , pm_con_tvs = ex_tvs , pm_con_dicts = dicts , pm_con_args = args }] NPat (L _ ol) mb_neg _eq ty -> translateNPat fam_insts ol mb_neg ty LitPat lit -- If it is a string then convert it to a list of characters | HsString src s <- lit -> foldr (mkListPatVec charTy) [nilPattern charTy] <$> translatePatVec fam_insts (map (LitPat . HsChar src) (unpackFS s)) | otherwise -> return [mkLitPattern lit] PArrPat ps ty -> do tidy_ps <- translatePatVec fam_insts (map unLoc ps) let fake_con = RealDataCon (parrFakeCon (length ps)) return [vanillaConPattern fake_con [ty] (concat tidy_ps)] TuplePat ps boxity tys -> do tidy_ps <- translatePatVec fam_insts (map unLoc ps) let tuple_con = RealDataCon (tupleDataCon boxity (length ps)) return [vanillaConPattern tuple_con tys (concat tidy_ps)] SumPat p alt arity ty -> do tidy_p <- translatePat fam_insts (unLoc p) let sum_con = RealDataCon (sumDataCon alt arity) return [vanillaConPattern sum_con ty tidy_p] -- -------------------------------------------------------------------------- -- Not supposed to happen ConPatIn {} -> panic "Check.translatePat: ConPatIn" SplicePat {} -> panic "Check.translatePat: SplicePat" SigPatIn {} -> panic "Check.translatePat: SigPatIn" -- | Translate an overloaded literal (see `tidyNPat' in deSugar/MatchLit.hs) translateNPat :: FamInstEnvs -> HsOverLit GhcTc -> Maybe (SyntaxExpr GhcTc) -> Type -> DsM PatVec translateNPat fam_insts (OverLit val False _ ty) mb_neg outer_ty | not type_change, isStringTy ty, HsIsString src s <- val, Nothing <- mb_neg = translatePat fam_insts (LitPat (HsString src s)) | not type_change, isIntTy ty, HsIntegral i <- val = translatePat fam_insts (LitPat $ case mb_neg of Nothing -> HsInt def i Just _ -> HsInt def (negateIntegralLit i)) | not type_change, isWordTy ty, HsIntegral i <- val = translatePat fam_insts (LitPat $ case mb_neg of Nothing -> HsWordPrim (il_text i) (il_value i) Just _ -> let ni = negateIntegralLit i in HsWordPrim (il_text ni) (il_value ni)) where type_change = not (outer_ty `eqType` ty) translateNPat _ ol mb_neg _ = return [PmLit { pm_lit_lit = PmOLit (isJust mb_neg) ol }] -- | Translate a list of patterns (Note: each pattern is translated -- to a pattern vector but we do not concatenate the results). translatePatVec :: FamInstEnvs -> [Pat GhcTc] -> DsM [PatVec] translatePatVec fam_insts pats = mapM (translatePat fam_insts) pats -- | Translate a constructor pattern translateConPatVec :: FamInstEnvs -> [Type] -> [TyVar] -> ConLike -> HsConPatDetails GhcTc -> DsM PatVec translateConPatVec fam_insts _univ_tys _ex_tvs _ (PrefixCon ps) = concat <$> translatePatVec fam_insts (map unLoc ps) translateConPatVec fam_insts _univ_tys _ex_tvs _ (InfixCon p1 p2) = concat <$> translatePatVec fam_insts (map unLoc [p1,p2]) translateConPatVec fam_insts univ_tys ex_tvs c (RecCon (HsRecFields fs _)) -- Nothing matched. Make up some fresh term variables | null fs = mkPmVars arg_tys -- The data constructor was not defined using record syntax. For the -- pattern to be in record syntax it should be empty (e.g. Just {}). -- So just like the previous case. | null orig_lbls = ASSERT(null matched_lbls) mkPmVars arg_tys -- Some of the fields appear, in the original order (there may be holes). -- Generate a simple constructor pattern and make up fresh variables for -- the rest of the fields | matched_lbls `subsetOf` orig_lbls = ASSERT(orig_lbls `equalLength` arg_tys) let translateOne (lbl, ty) = case lookup lbl matched_pats of Just p -> translatePat fam_insts p Nothing -> mkPmVars [ty] in concatMapM translateOne (zip orig_lbls arg_tys) -- The fields that appear are not in the correct order. Make up fresh -- variables for all fields and add guards after matching, to force the -- evaluation in the correct order. | otherwise = do arg_var_pats <- mkPmVars arg_tys translated_pats <- forM matched_pats $ \(x,pat) -> do pvec <- translatePat fam_insts pat return (x, pvec) let zipped = zip orig_lbls [ x | PmVar x <- arg_var_pats ] guards = map (\(name,pvec) -> case lookup name zipped of Just x -> PmGrd pvec (PmExprVar (idName x)) Nothing -> panic "translateConPatVec: lookup") translated_pats return (arg_var_pats ++ guards) where -- The actual argument types (instantiated) arg_tys = conLikeInstOrigArgTys c (univ_tys ++ mkTyVarTys ex_tvs) -- Some label information orig_lbls = map flSelector $ conLikeFieldLabels c matched_pats = [ (getName (unLoc (hsRecFieldId x)), unLoc (hsRecFieldArg x)) | L _ x <- fs] matched_lbls = [ name | (name, _pat) <- matched_pats ] subsetOf :: Eq a => [a] -> [a] -> Bool subsetOf [] _ = True subsetOf (_:_) [] = False subsetOf (x:xs) (y:ys) | x == y = subsetOf xs ys | otherwise = subsetOf (x:xs) ys -- Translate a single match translateMatch :: FamInstEnvs -> LMatch GhcTc (LHsExpr GhcTc) -> DsM (PatVec,[PatVec]) translateMatch fam_insts (L _ (Match { m_pats = lpats, m_grhss = grhss })) = do pats' <- concat <$> translatePatVec fam_insts pats guards' <- mapM (translateGuards fam_insts) guards return (pats', guards') where extractGuards :: LGRHS GhcTc (LHsExpr GhcTc) -> [GuardStmt GhcTc] extractGuards (L _ (GRHS gs _)) = map unLoc gs pats = map unLoc lpats guards = map extractGuards (grhssGRHSs grhss) -- ----------------------------------------------------------------------- -- * Transform source guards (GuardStmt Id) to PmPats (Pattern) -- | Translate a list of guard statements to a pattern vector translateGuards :: FamInstEnvs -> [GuardStmt GhcTc] -> DsM PatVec translateGuards fam_insts guards = do all_guards <- concat <$> mapM (translateGuard fam_insts) guards return (replace_unhandled all_guards) -- It should have been (return all_guards) but it is too expressive. -- Since the term oracle does not handle all constraints we generate, -- we (hackily) replace all constraints the oracle cannot handle with a -- single one (we need to know if there is a possibility of falure). -- See Note [Guards and Approximation] for all guard-related approximations -- we implement. where replace_unhandled :: PatVec -> PatVec replace_unhandled gv | any_unhandled gv = fake_pat : [ p | p <- gv, shouldKeep p ] | otherwise = gv any_unhandled :: PatVec -> Bool any_unhandled gv = any (not . shouldKeep) gv shouldKeep :: Pattern -> Bool shouldKeep p | PmVar {} <- p = True | PmCon {} <- p = singleConstructor (pm_con_con p) && all shouldKeep (pm_con_args p) shouldKeep (PmGrd pv e) | all shouldKeep pv = True | isNotPmExprOther e = True -- expensive but we want it shouldKeep _other_pat = False -- let the rest.. -- | Check whether a pattern can fail to match cantFailPattern :: Pattern -> Bool cantFailPattern p | PmVar {} <- p = True | PmCon {} <- p = singleConstructor (pm_con_con p) && all cantFailPattern (pm_con_args p) cantFailPattern (PmGrd pv _e) = all cantFailPattern pv cantFailPattern _ = False -- | Translate a guard statement to Pattern translateGuard :: FamInstEnvs -> GuardStmt GhcTc -> DsM PatVec translateGuard fam_insts guard = case guard of BodyStmt e _ _ _ -> translateBoolGuard e LetStmt binds -> translateLet (unLoc binds) BindStmt p e _ _ _ -> translateBind fam_insts p e LastStmt {} -> panic "translateGuard LastStmt" ParStmt {} -> panic "translateGuard ParStmt" TransStmt {} -> panic "translateGuard TransStmt" RecStmt {} -> panic "translateGuard RecStmt" ApplicativeStmt {} -> panic "translateGuard ApplicativeLastStmt" -- | Translate let-bindings translateLet :: HsLocalBinds GhcTc -> DsM PatVec translateLet _binds = return [] -- | Translate a pattern guard translateBind :: FamInstEnvs -> LPat GhcTc -> LHsExpr GhcTc -> DsM PatVec translateBind fam_insts (L _ p) e = do ps <- translatePat fam_insts p return [mkGuard ps (unLoc e)] -- | Translate a boolean guard translateBoolGuard :: LHsExpr GhcTc -> DsM PatVec translateBoolGuard e | isJust (isTrueLHsExpr e) = return [] -- The formal thing to do would be to generate (True <- True) -- but it is trivial to solve so instead we give back an empty -- PatVec for efficiency | otherwise = return [mkGuard [truePattern] (unLoc e)] {- Note [Guards and Approximation] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Even if the algorithm is really expressive, the term oracle we use is not. Hence, several features are not translated *properly* but we approximate. The list includes: 1. View Patterns ---------------- A view pattern @(f -> p)@ should be translated to @x (p <- f x)@. The term oracle does not handle function applications so we know that the generated constraints will not be handled at the end. Hence, we distinguish between two cases: a) Pattern @p@ cannot fail. Then this is just a binding and we do the *right thing*. b) Pattern @p@ can fail. This means that when checking the guard, we will generate several cases, with no useful information. E.g.: h (f -> [a,b]) = ... h x ([a,b] <- f x) = ... uncovered set = { [x |> { False ~ (f x ~ []) }] , [x |> { False ~ (f x ~ (t1:[])) }] , [x |> { False ~ (f x ~ (t1:t2:t3:t4)) }] } So we have two problems: 1) Since we do not print the constraints in the general case (they may be too many), the warning will look like this: Pattern match(es) are non-exhaustive In an equation for `h': Patterns not matched: _ _ _ Which is not short and not more useful than a single underscore. 2) The size of the uncovered set increases a lot, without gaining more expressivity in our warnings. Hence, in this case, we replace the guard @([a,b] <- f x)@ with a *dummy* @fake_pat@: @True <- _@. That is, we record that there is a possibility of failure but we minimize it to a True/False. This generates a single warning and much smaller uncovered sets. 2. Overloaded Lists ------------------- An overloaded list @[...]@ should be translated to @x ([...] <- toList x)@. The problem is exactly like above, as its solution. For future reference, the code below is the *right thing to do*: ListPat lpats elem_ty (Just (pat_ty, to_list)) otherwise -> do (xp, xe) <- mkPmId2Forms pat_ty ps <- translatePatVec (map unLoc lpats) let pats = foldr (mkListPatVec elem_ty) [nilPattern elem_ty] ps g = mkGuard pats (HsApp (noLoc to_list) xe) return [xp,g] 3. Overloaded Literals ---------------------- The case with literals is a bit different. a literal @l@ should be translated to @x (True <- x == from l)@. Since we want to have better warnings for overloaded literals as it is a very common feature, we treat them differently. They are mainly covered in Note [Undecidable Equality on Overloaded Literals] in PmExpr. 4. N+K Patterns & Pattern Synonyms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ An n+k pattern (n+k) should be translated to @x (True <- x >= k) (n <- x-k)@. Since the only pattern of the three that causes failure is guard @(n <- x-k)@, and has two possible outcomes. Hence, there is no benefit in using a dummy and we implement the proper thing. Pattern synonyms are simply not implemented yet. Hence, to be conservative, we generate a dummy pattern, assuming that the pattern can fail. 5. Actual Guards ---------------- During translation, boolean guards and pattern guards are translated properly. Let bindings though are omitted by function @translateLet@. Since they are lazy bindings, we do not actually want to generate a (strict) equality (like we do in the pattern bind case). Hence, we safely drop them. Additionally, top-level guard translation (performed by @translateGuards@) replaces guards that cannot be reasoned about (like the ones we described in 1-4) with a single @fake_pat@ to record the possibility of failure to match. Note [Translate CoPats] ~~~~~~~~~~~~~~~~~~~~~~~ The pattern match checker did not know how to handle coerced patterns `CoPat` efficiently, which gave rise to #11276. The original approach translated `CoPat`s: pat |> co ===> x (pat <- (e |> co)) Instead, we now check whether the coercion is a hole or if it is just refl, in which case we can drop it. Unfortunately, data families generate useful coercions so guards are still generated in these cases and checking data families is not really efficient. %************************************************************************ %* * Utilities for Pattern Match Checking %* * %************************************************************************ -} -- ---------------------------------------------------------------------------- -- * Basic utilities -- | Get the type out of a PmPat. For guard patterns (ps <- e) we use the type -- of the first (or the single -WHEREVER IT IS- valid to use?) pattern pmPatType :: PmPat p -> Type pmPatType (PmCon { pm_con_con = con, pm_con_arg_tys = tys }) = conLikeResTy con tys pmPatType (PmVar { pm_var_id = x }) = idType x pmPatType (PmLit { pm_lit_lit = l }) = pmLitType l pmPatType (PmNLit { pm_lit_id = x }) = idType x pmPatType (PmGrd { pm_grd_pv = pv }) = ASSERT(patVecArity pv == 1) (pmPatType p) where Just p = find ((==1) . patternArity) pv -- | Generate a value abstraction for a given constructor (generate -- fresh variables of the appropriate type for arguments) mkOneConFull :: Id -> ConLike -> DsM (ValAbs, ComplexEq, Bag EvVar) -- * x :: T tys, where T is an algebraic data type -- NB: in the case of a data family, T is the *representation* TyCon -- e.g. data instance T (a,b) = T1 a b -- leads to -- data TPair a b = T1 a b -- The "representation" type -- It is TPair, not T, that is given to mkOneConFull -- -- * 'con' K is a constructor of data type T -- -- After instantiating the universal tyvars of K we get -- K tys :: forall bs. Q => s1 .. sn -> T tys -- -- Results: ValAbs: K (y1::s1) .. (yn::sn) -- ComplexEq: x ~ K y1..yn -- [EvVar]: Q mkOneConFull x con = do let res_ty = idType x (univ_tvs, ex_tvs, eq_spec, thetas, _req_theta , arg_tys, con_res_ty) = conLikeFullSig con tc_args = tyConAppArgs res_ty subst1 = case con of RealDataCon {} -> zipTvSubst univ_tvs tc_args PatSynCon {} -> expectJust "mkOneConFull" (tcMatchTy con_res_ty res_ty) -- See Note [Pattern synonym result type] in PatSyn (subst, ex_tvs') <- cloneTyVarBndrs subst1 ex_tvs <$> getUniqueSupplyM -- Fresh term variables (VAs) as arguments to the constructor arguments <- mapM mkPmVar (substTys subst arg_tys) -- All constraints bound by the constructor (alpha-renamed) let theta_cs = substTheta subst (eqSpecPreds eq_spec ++ thetas) evvars <- mapM (nameType "pm") theta_cs let con_abs = PmCon { pm_con_con = con , pm_con_arg_tys = tc_args , pm_con_tvs = ex_tvs' , pm_con_dicts = evvars , pm_con_args = arguments } return (con_abs, (PmExprVar (idName x), vaToPmExpr con_abs), listToBag evvars) -- ---------------------------------------------------------------------------- -- * More smart constructors and fresh variable generation -- | Create a guard pattern mkGuard :: PatVec -> HsExpr GhcTc -> Pattern mkGuard pv e | all cantFailPattern pv = PmGrd pv expr | PmExprOther {} <- expr = fake_pat | otherwise = PmGrd pv expr where expr = hsExprToPmExpr e -- | Create a term equality of the form: `(False ~ (x ~ lit))` mkNegEq :: Id -> PmLit -> ComplexEq mkNegEq x l = (falsePmExpr, PmExprVar (idName x) `PmExprEq` PmExprLit l) {-# INLINE mkNegEq #-} -- | Create a term equality of the form: `(x ~ lit)` mkPosEq :: Id -> PmLit -> ComplexEq mkPosEq x l = (PmExprVar (idName x), PmExprLit l) {-# INLINE mkPosEq #-} -- | Create a term equality of the form: `(x ~ x)` -- (always discharged by the term oracle) mkIdEq :: Id -> ComplexEq mkIdEq x = (PmExprVar name, PmExprVar name) where name = idName x {-# INLINE mkIdEq #-} -- | Generate a variable pattern of a given type mkPmVar :: Type -> DsM (PmPat p) mkPmVar ty = PmVar <$> mkPmId ty {-# INLINE mkPmVar #-} -- | Generate many variable patterns, given a list of types mkPmVars :: [Type] -> DsM PatVec mkPmVars tys = mapM mkPmVar tys {-# INLINE mkPmVars #-} -- | Generate a fresh `Id` of a given type mkPmId :: Type -> DsM Id mkPmId ty = getUniqueM >>= \unique -> let occname = mkVarOccFS $ fsLit "$pm" name = mkInternalName unique occname noSrcSpan in return (mkLocalId name ty) -- | Generate a fresh term variable of a given and return it in two forms: -- * A variable pattern -- * A variable expression mkPmId2Forms :: Type -> DsM (Pattern, LHsExpr GhcTc) mkPmId2Forms ty = do x <- mkPmId ty return (PmVar x, noLoc (HsVar (noLoc x))) -- ---------------------------------------------------------------------------- -- * Converting between Value Abstractions, Patterns and PmExpr -- | Convert a value abstraction an expression vaToPmExpr :: ValAbs -> PmExpr vaToPmExpr (PmCon { pm_con_con = c, pm_con_args = ps }) = PmExprCon c (map vaToPmExpr ps) vaToPmExpr (PmVar { pm_var_id = x }) = PmExprVar (idName x) vaToPmExpr (PmLit { pm_lit_lit = l }) = PmExprLit l vaToPmExpr (PmNLit { pm_lit_id = x }) = PmExprVar (idName x) -- | Convert a pattern vector to a list of value abstractions by dropping the -- guards (See Note [Translating As Patterns]) coercePatVec :: PatVec -> [ValAbs] coercePatVec pv = concatMap coercePmPat pv -- | Convert a pattern to a list of value abstractions (will be either an empty -- list if the pattern is a guard pattern, or a singleton list in all other -- cases) by dropping the guards (See Note [Translating As Patterns]) coercePmPat :: Pattern -> [ValAbs] coercePmPat (PmVar { pm_var_id = x }) = [PmVar { pm_var_id = x }] coercePmPat (PmLit { pm_lit_lit = l }) = [PmLit { pm_lit_lit = l }] coercePmPat (PmCon { pm_con_con = con, pm_con_arg_tys = arg_tys , pm_con_tvs = tvs, pm_con_dicts = dicts , pm_con_args = args }) = [PmCon { pm_con_con = con, pm_con_arg_tys = arg_tys , pm_con_tvs = tvs, pm_con_dicts = dicts , pm_con_args = coercePatVec args }] coercePmPat (PmGrd {}) = [] -- drop the guards -- | Check whether a data constructor is the only way to construct -- a data type. singleConstructor :: ConLike -> Bool singleConstructor (RealDataCon dc) = case tyConDataCons (dataConTyCon dc) of [_] -> True _ -> False singleConstructor _ = False -- | For a given conlike, finds all the sets of patterns which could -- be relevant to that conlike by consulting the result type. -- -- These come from two places. -- 1. From data constructors defined with the result type constructor. -- 2. From `COMPLETE` pragmas which have the same type as the result -- type constructor. Note that we only use `COMPLETE` pragmas -- *all* of whose pattern types match. See #14135 allCompleteMatches :: ConLike -> [Type] -> DsM [(Provenance, [ConLike])] allCompleteMatches cl tys = do let fam = case cl of RealDataCon dc -> [(FromBuiltin, map RealDataCon (tyConDataCons (dataConTyCon dc)))] PatSynCon _ -> [] ty = conLikeResTy cl tys pragmas <- case splitTyConApp_maybe ty of Just (tc, _) -> dsGetCompleteMatches tc Nothing -> return [] let fams cm = (FromComplete,) <$> mapM dsLookupConLike (completeMatchConLikes cm) from_pragma <- filter (\(_,m) -> isValidCompleteMatch ty m) <$> mapM fams pragmas let final_groups = fam ++ from_pragma return final_groups where -- Check that all the pattern types in a `COMPLETE` -- pragma subsume the type we're matching. See #14135. isValidCompleteMatch :: Type -> [ConLike] -> Bool isValidCompleteMatch ty = isJust . mapM (flip tcMatchTy ty . resTy . conLikeFullSig) where resTy (_, _, _, _, _, _, res_ty) = res_ty -- ----------------------------------------------------------------------- -- * Types and constraints newEvVar :: Name -> Type -> EvVar newEvVar name ty = mkLocalId name (toTcType ty) nameType :: String -> Type -> DsM EvVar nameType name ty = do unique <- getUniqueM let occname = mkVarOccFS (fsLit (name++"_"++show unique)) idname = mkInternalName unique occname noSrcSpan return (newEvVar idname ty) {- %************************************************************************ %* * The type oracle %* * %************************************************************************ -} -- | Check whether a set of type constraints is satisfiable. tyOracle :: Bag EvVar -> PmM Bool tyOracle evs = liftD $ do { ((_warns, errs), res) <- initTcDsForSolver $ tcCheckSatisfiability evs ; case res of Just sat -> return sat Nothing -> pprPanic "tyOracle" (vcat $ pprErrMsgBagWithLoc errs) } {- %************************************************************************ %* * Sanity Checks %* * %************************************************************************ -} -- | The arity of a pattern/pattern vector is the -- number of top-level patterns that are not guards type PmArity = Int -- | Compute the arity of a pattern vector patVecArity :: PatVec -> PmArity patVecArity = sum . map patternArity -- | Compute the arity of a pattern patternArity :: Pattern -> PmArity patternArity (PmGrd {}) = 0 patternArity _other_pat = 1 {- %************************************************************************ %* * Heart of the algorithm: Function pmcheck %* * %************************************************************************ Main functions are: * mkInitialUncovered :: [Id] -> PmM Uncovered Generates the initial uncovered set. Term and type constraints in scope are checked, if they are inconsistent, the set is empty, otherwise, the set contains only a vector of variables with the constraints in scope. * pmcheck :: PatVec -> [PatVec] -> ValVec -> PmM PartialResult Checks redundancy, coverage and inaccessibility, using auxilary functions `pmcheckGuards` and `pmcheckHd`. Mainly handles the guard case which is common in all three checks (see paper) and calls `pmcheckGuards` when the whole clause is checked, or `pmcheckHd` when the pattern vector does not start with a guard. * pmcheckGuards :: [PatVec] -> ValVec -> PmM PartialResult Processes the guards. * pmcheckHd :: Pattern -> PatVec -> [PatVec] -> ValAbs -> ValVec -> PmM PartialResult Worker: This function implements functions `covered`, `uncovered` and `divergent` from the paper at once. Slightly different from the paper because it does not even produce the covered and uncovered sets. Since we only care about whether a clause covers SOMETHING or if it may forces ANY argument, we only store a boolean in both cases, for efficiency. -} -- | Lift a pattern matching action from a single value vector abstration to a -- value set abstraction, but calling it on every vector and the combining the -- results. runMany :: (ValVec -> PmM PartialResult) -> (Uncovered -> PmM PartialResult) runMany _ [] = return mempty runMany pm (m:ms) = mappend <$> pm m <*> runMany pm ms -- | Generate the initial uncovered set. It initializes the -- delta with all term and type constraints in scope. mkInitialUncovered :: [Id] -> PmM Uncovered mkInitialUncovered vars = do ty_cs <- liftD getDictsDs tm_cs <- map toComplex . bagToList <$> liftD getTmCsDs sat_ty <- tyOracle ty_cs let initTyCs = if sat_ty then ty_cs else emptyBag initTmState = fromMaybe initialTmState (tmOracle initialTmState tm_cs) patterns = map PmVar vars -- If any of the term/type constraints are non -- satisfiable then return with the initialTmState. See #12957 return [ValVec patterns (MkDelta initTyCs initTmState)] -- | Increase the counter for elapsed algorithm iterations, check that the -- limit is not exceeded and call `pmcheck` pmcheckI :: PatVec -> [PatVec] -> ValVec -> PmM PartialResult pmcheckI ps guards vva = do n <- liftD incrCheckPmIterDs tracePm "pmCheck" (ppr n <> colon <+> pprPatVec ps $$ hang (text "guards:") 2 (vcat (map pprPatVec guards)) $$ pprValVecDebug vva) res <- pmcheck ps guards vva tracePm "pmCheckResult:" (ppr res) return res {-# INLINE pmcheckI #-} -- | Increase the counter for elapsed algorithm iterations, check that the -- limit is not exceeded and call `pmcheckGuards` pmcheckGuardsI :: [PatVec] -> ValVec -> PmM PartialResult pmcheckGuardsI gvs vva = liftD incrCheckPmIterDs >> pmcheckGuards gvs vva {-# INLINE pmcheckGuardsI #-} -- | Increase the counter for elapsed algorithm iterations, check that the -- limit is not exceeded and call `pmcheckHd` pmcheckHdI :: Pattern -> PatVec -> [PatVec] -> ValAbs -> ValVec -> PmM PartialResult pmcheckHdI p ps guards va vva = do n <- liftD incrCheckPmIterDs tracePm "pmCheckHdI" (ppr n <> colon <+> pprPmPatDebug p $$ pprPatVec ps $$ hang (text "guards:") 2 (vcat (map pprPatVec guards)) $$ pprPmPatDebug va $$ pprValVecDebug vva) res <- pmcheckHd p ps guards va vva tracePm "pmCheckHdI: res" (ppr res) return res {-# INLINE pmcheckHdI #-} -- | Matching function: Check simultaneously a clause (takes separately the -- patterns and the list of guards) for exhaustiveness, redundancy and -- inaccessibility. pmcheck :: PatVec -> [PatVec] -> ValVec -> PmM PartialResult pmcheck [] guards vva@(ValVec [] _) | null guards = return $ mempty { presultCovered = Covered } | otherwise = pmcheckGuardsI guards vva -- Guard pmcheck (p@(PmGrd pv e) : ps) guards vva@(ValVec vas delta) -- short-circuit if the guard pattern is useless. -- we just have two possible outcomes: fail here or match and recurse -- none of the two contains any useful information about the failure -- though. So just have these two cases but do not do all the boilerplate | isFakeGuard pv e = forces . mkCons vva <$> pmcheckI ps guards vva | otherwise = do y <- liftD $ mkPmId (pmPatType p) let tm_state = extendSubst y e (delta_tm_cs delta) delta' = delta { delta_tm_cs = tm_state } utail <$> pmcheckI (pv ++ ps) guards (ValVec (PmVar y : vas) delta') pmcheck [] _ (ValVec (_:_) _) = panic "pmcheck: nil-cons" pmcheck (_:_) _ (ValVec [] _) = panic "pmcheck: cons-nil" pmcheck (p:ps) guards (ValVec (va:vva) delta) = pmcheckHdI p ps guards va (ValVec vva delta) -- | Check the list of guards pmcheckGuards :: [PatVec] -> ValVec -> PmM PartialResult pmcheckGuards [] vva = return (usimple [vva]) pmcheckGuards (gv:gvs) vva = do (PartialResult prov1 cs vsa ds) <- pmcheckI gv [] vva (PartialResult prov2 css vsas dss) <- runMany (pmcheckGuardsI gvs) vsa return $ PartialResult (prov1 `mappend` prov2) (cs `mappend` css) vsas (ds `mappend` dss) -- | Worker function: Implements all cases described in the paper for all three -- functions (`covered`, `uncovered` and `divergent`) apart from the `Guard` -- cases which are handled by `pmcheck` pmcheckHd :: Pattern -> PatVec -> [PatVec] -> ValAbs -> ValVec -> PmM PartialResult -- Var pmcheckHd (PmVar x) ps guards va (ValVec vva delta) | Just tm_state <- solveOneEq (delta_tm_cs delta) (PmExprVar (idName x), vaToPmExpr va) = ucon va <$> pmcheckI ps guards (ValVec vva (delta {delta_tm_cs = tm_state})) | otherwise = return mempty -- ConCon pmcheckHd ( p@(PmCon {pm_con_con = c1, pm_con_args = args1})) ps guards (va@(PmCon {pm_con_con = c2, pm_con_args = args2})) (ValVec vva delta) | c1 /= c2 = return (usimple [ValVec (va:vva) delta]) | otherwise = kcon c1 (pm_con_arg_tys p) (pm_con_tvs p) (pm_con_dicts p) <$> pmcheckI (args1 ++ ps) guards (ValVec (args2 ++ vva) delta) -- LitLit pmcheckHd (PmLit l1) ps guards (va@(PmLit l2)) vva = case eqPmLit l1 l2 of True -> ucon va <$> pmcheckI ps guards vva False -> return $ ucon va (usimple [vva]) -- ConVar pmcheckHd (p@(PmCon { pm_con_con = con, pm_con_arg_tys = tys })) ps guards (PmVar x) (ValVec vva delta) = do (prov, complete_match) <- select =<< liftD (allCompleteMatches con tys) cons_cs <- mapM (liftD . mkOneConFull x) complete_match inst_vsa <- flip concatMapM cons_cs $ \(va, tm_ct, ty_cs) -> do let ty_state = ty_cs `unionBags` delta_ty_cs delta -- not actually a state sat_ty <- if isEmptyBag ty_cs then return True else tyOracle ty_state return $ case (sat_ty, solveOneEq (delta_tm_cs delta) tm_ct) of (True, Just tm_state) -> [ValVec (va:vva) (MkDelta ty_state tm_state)] _ty_or_tm_failed -> [] set_provenance prov . force_if (canDiverge (idName x) (delta_tm_cs delta)) <$> runMany (pmcheckI (p:ps) guards) inst_vsa -- LitVar pmcheckHd (p@(PmLit l)) ps guards (PmVar x) (ValVec vva delta) = force_if (canDiverge (idName x) (delta_tm_cs delta)) <$> mkUnion non_matched <$> case solveOneEq (delta_tm_cs delta) (mkPosEq x l) of Just tm_state -> pmcheckHdI p ps guards (PmLit l) $ ValVec vva (delta {delta_tm_cs = tm_state}) Nothing -> return mempty where us | Just tm_state <- solveOneEq (delta_tm_cs delta) (mkNegEq x l) = [ValVec (PmNLit x [l] : vva) (delta { delta_tm_cs = tm_state })] | otherwise = [] non_matched = usimple us -- LitNLit pmcheckHd (p@(PmLit l)) ps guards (PmNLit { pm_lit_id = x, pm_lit_not = lits }) (ValVec vva delta) | all (not . eqPmLit l) lits , Just tm_state <- solveOneEq (delta_tm_cs delta) (mkPosEq x l) -- Both guards check the same so it would be sufficient to have only -- the second one. Nevertheless, it is much cheaper to check whether -- the literal is in the list so we check it first, to avoid calling -- the term oracle (`solveOneEq`) if possible = mkUnion non_matched <$> pmcheckHdI p ps guards (PmLit l) (ValVec vva (delta { delta_tm_cs = tm_state })) | otherwise = return non_matched where us | Just tm_state <- solveOneEq (delta_tm_cs delta) (mkNegEq x l) = [ValVec (PmNLit x (l:lits) : vva) (delta { delta_tm_cs = tm_state })] | otherwise = [] non_matched = usimple us -- ---------------------------------------------------------------------------- -- The following three can happen only in cases like #322 where constructors -- and overloaded literals appear in the same match. The general strategy is -- to replace the literal (positive/negative) by a variable and recurse. The -- fact that the variable is equal to the literal is recorded in `delta` so -- no information is lost -- LitCon pmcheckHd (PmLit l) ps guards (va@(PmCon {})) (ValVec vva delta) = do y <- liftD $ mkPmId (pmPatType va) let tm_state = extendSubst y (PmExprLit l) (delta_tm_cs delta) delta' = delta { delta_tm_cs = tm_state } pmcheckHdI (PmVar y) ps guards va (ValVec vva delta') -- ConLit pmcheckHd (p@(PmCon {})) ps guards (PmLit l) (ValVec vva delta) = do y <- liftD $ mkPmId (pmPatType p) let tm_state = extendSubst y (PmExprLit l) (delta_tm_cs delta) delta' = delta { delta_tm_cs = tm_state } pmcheckHdI p ps guards (PmVar y) (ValVec vva delta') -- ConNLit pmcheckHd (p@(PmCon {})) ps guards (PmNLit { pm_lit_id = x }) vva = pmcheckHdI p ps guards (PmVar x) vva -- Impossible: handled by pmcheck pmcheckHd (PmGrd {}) _ _ _ _ = panic "pmcheckHd: Guard" -- ---------------------------------------------------------------------------- -- * Utilities for main checking updateVsa :: (ValSetAbs -> ValSetAbs) -> (PartialResult -> PartialResult) updateVsa f p@(PartialResult { presultUncovered = old }) = p { presultUncovered = f old } -- | Initialise with default values for covering and divergent information. usimple :: ValSetAbs -> PartialResult usimple vsa = mempty { presultUncovered = vsa } -- | Take the tail of all value vector abstractions in the uncovered set utail :: PartialResult -> PartialResult utail = updateVsa upd where upd vsa = [ ValVec vva delta | ValVec (_:vva) delta <- vsa ] -- | Prepend a value abstraction to all value vector abstractions in the -- uncovered set ucon :: ValAbs -> PartialResult -> PartialResult ucon va = updateVsa upd where upd vsa = [ ValVec (va:vva) delta | ValVec vva delta <- vsa ] -- | Given a data constructor of arity `a` and an uncovered set containing -- value vector abstractions of length `(a+n)`, pass the first `n` value -- abstractions to the constructor (Hence, the resulting value vector -- abstractions will have length `n+1`) kcon :: ConLike -> [Type] -> [TyVar] -> [EvVar] -> PartialResult -> PartialResult kcon con arg_tys ex_tvs dicts = let n = conLikeArity con upd vsa = [ ValVec (va:vva) delta | ValVec vva' delta <- vsa , let (args, vva) = splitAt n vva' , let va = PmCon { pm_con_con = con , pm_con_arg_tys = arg_tys , pm_con_tvs = ex_tvs , pm_con_dicts = dicts , pm_con_args = args } ] in updateVsa upd -- | Get the union of two covered, uncovered and divergent value set -- abstractions. Since the covered and divergent sets are represented by a -- boolean, union means computing the logical or (at least one of the two is -- non-empty). mkUnion :: PartialResult -> PartialResult -> PartialResult mkUnion = mappend -- | Add a value vector abstraction to a value set abstraction (uncovered). mkCons :: ValVec -> PartialResult -> PartialResult mkCons vva = updateVsa (vva:) -- | Set the divergent set to not empty forces :: PartialResult -> PartialResult forces pres = pres { presultDivergent = Diverged } -- | Set the divergent set to non-empty if the flag is `True` force_if :: Bool -> PartialResult -> PartialResult force_if True pres = forces pres force_if False pres = pres set_provenance :: Provenance -> PartialResult -> PartialResult set_provenance prov pr = pr { presultProvenance = prov } -- ---------------------------------------------------------------------------- -- * Propagation of term constraints inwards when checking nested matches {- Note [Type and Term Equality Propagation] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When checking a match it would be great to have all type and term information available so we can get more precise results. For this reason we have functions `addDictsDs' and `addTmCsDs' in PmMonad that store in the environment type and term constraints (respectively) as we go deeper. The type constraints we propagate inwards are collected by `collectEvVarsPats' in HsPat.hs. This handles bug #4139 ( see example https://ghc.haskell.org/trac/ghc/attachment/ticket/4139/GADTbug.hs ) where this is needed. For term equalities we do less, we just generate equalities for HsCase. For example we accurately give 2 redundancy warnings for the marked cases: f :: [a] -> Bool f x = case x of [] -> case x of -- brings (x ~ []) in scope [] -> True (_:_) -> False -- can't happen (_:_) -> case x of -- brings (x ~ (_:_)) in scope (_:_) -> True [] -> False -- can't happen Functions `genCaseTmCs1' and `genCaseTmCs2' are responsible for generating these constraints. -} -- | Generate equalities when checking a case expression: -- case x of { p1 -> e1; ... pn -> en } -- When we go deeper to check e.g. e1 we record two equalities: -- (x ~ y), where y is the initial uncovered when checking (p1; .. ; pn) -- and (x ~ p1). genCaseTmCs2 :: Maybe (LHsExpr GhcTc) -- Scrutinee -> [Pat GhcTc] -- LHS (should have length 1) -> [Id] -- MatchVars (should have length 1) -> DsM (Bag SimpleEq) genCaseTmCs2 Nothing _ _ = return emptyBag genCaseTmCs2 (Just scr) [p] [var] = do fam_insts <- dsGetFamInstEnvs [e] <- map vaToPmExpr . coercePatVec <$> translatePat fam_insts p let scr_e = lhsExprToPmExpr scr return $ listToBag [(var, e), (var, scr_e)] genCaseTmCs2 _ _ _ = panic "genCaseTmCs2: HsCase" -- | Generate a simple equality when checking a case expression: -- case x of { matches } -- When checking matches we record that (x ~ y) where y is the initial -- uncovered. All matches will have to satisfy this equality. genCaseTmCs1 :: Maybe (LHsExpr GhcTc) -> [Id] -> Bag SimpleEq genCaseTmCs1 Nothing _ = emptyBag genCaseTmCs1 (Just scr) [var] = unitBag (var, lhsExprToPmExpr scr) genCaseTmCs1 _ _ = panic "genCaseTmCs1: HsCase" {- Note [Literals in PmPat] ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Instead of translating a literal to a variable accompanied with a guard, we treat them like constructor patterns. The following example from "./libraries/base/GHC/IO/Encoding.hs" shows why: mkTextEncoding' :: CodingFailureMode -> String -> IO TextEncoding mkTextEncoding' cfm enc = case [toUpper c | c <- enc, c /= '-'] of "UTF8" -> return $ UTF8.mkUTF8 cfm "UTF16" -> return $ UTF16.mkUTF16 cfm "UTF16LE" -> return $ UTF16.mkUTF16le cfm ... Each clause gets translated to a list of variables with an equal number of guards. For every guard we generate two cases (equals True/equals False) which means that we generate 2^n cases to feed the oracle with, where n is the sum of the length of all strings that appear in the patterns. For this particular example this means over 2^40 cases. Instead, by representing them like with constructor we get the following: 1. We exploit the common prefix with our representation of VSAs 2. We prune immediately non-reachable cases (e.g. False == (x == "U"), True == (x == "U")) Note [Translating As Patterns] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Instead of translating x@p as: x (p <- x) we instead translate it as: p (x <- coercePattern p) for performance reasons. For example: f x@True = 1 f y@False = 2 Gives the following with the first translation: x |> {x == False, x == y, y == True} If we use the second translation we get an empty set, independently of the oracle. Since the pattern `p' may contain guard patterns though, it cannot be used as an expression. That's why we call `coercePatVec' to drop the guard and `vaToPmExpr' to transform the value abstraction to an expression in the guard pattern (value abstractions are a subset of expressions). We keep the guards in the first pattern `p' though. %************************************************************************ %* * Pretty printing of exhaustiveness/redundancy check warnings %* * %************************************************************************ -} -- | Check whether any part of pattern match checking is enabled (does not -- matter whether it is the redundancy check or the exhaustiveness check). isAnyPmCheckEnabled :: DynFlags -> DsMatchContext -> Bool isAnyPmCheckEnabled dflags (DsMatchContext kind _loc) = wopt Opt_WarnOverlappingPatterns dflags || exhaustive dflags kind instance Outputable ValVec where ppr (ValVec vva delta) = let (residual_eqs, subst) = wrapUpTmState (delta_tm_cs delta) vector = substInValAbs subst vva in ppr_uncovered (vector, residual_eqs) -- | Apply a term substitution to a value vector abstraction. All VAs are -- transformed to PmExpr (used only before pretty printing). substInValAbs :: PmVarEnv -> [ValAbs] -> [PmExpr] substInValAbs subst = map (exprDeepLookup subst . vaToPmExpr) -- | Wrap up the term oracle's state once solving is complete. Drop any -- information about unhandled constraints (involving HsExprs) and flatten -- (height 1) the substitution. wrapUpTmState :: TmState -> ([ComplexEq], PmVarEnv) wrapUpTmState (residual, (_, subst)) = (residual, flattenPmVarEnv subst) -- | Issue all the warnings (coverage, exhaustiveness, inaccessibility) dsPmWarn :: DynFlags -> DsMatchContext -> PmResult -> DsM () dsPmWarn dflags ctx@(DsMatchContext kind loc) pm_result = when (flag_i || flag_u) $ do let exists_r = flag_i && notNull redundant && onlyBuiltin exists_i = flag_i && notNull inaccessible && onlyBuiltin && not is_rec_upd exists_u = flag_u && (case uncovered of TypeOfUncovered _ -> True UncoveredPatterns u -> notNull u) when exists_r $ forM_ redundant $ \(L l q) -> do putSrcSpanDs l (warnDs (Reason Opt_WarnOverlappingPatterns) (pprEqn q "is redundant")) when exists_i $ forM_ inaccessible $ \(L l q) -> do putSrcSpanDs l (warnDs (Reason Opt_WarnOverlappingPatterns) (pprEqn q "has inaccessible right hand side")) when exists_u $ putSrcSpanDs loc $ warnDs flag_u_reason $ case uncovered of TypeOfUncovered ty -> warnEmptyCase ty UncoveredPatterns candidates -> pprEqns candidates where PmResult { pmresultProvenance = prov , pmresultRedundant = redundant , pmresultUncovered = uncovered , pmresultInaccessible = inaccessible } = pm_result flag_i = wopt Opt_WarnOverlappingPatterns dflags flag_u = exhaustive dflags kind flag_u_reason = maybe NoReason Reason (exhaustiveWarningFlag kind) is_rec_upd = case kind of { RecUpd -> True; _ -> False } -- See Note [Inaccessible warnings for record updates] onlyBuiltin = prov == FromBuiltin maxPatterns = maxUncoveredPatterns dflags -- Print a single clause (for redundant/with-inaccessible-rhs) pprEqn q txt = pp_context True ctx (text txt) $ \f -> ppr_eqn f kind q -- Print several clauses (for uncovered clauses) pprEqns qs = pp_context False ctx (text "are non-exhaustive") $ \_ -> case qs of -- See #11245 [ValVec [] _] -> text "Guards do not cover entire pattern space" _missing -> let us = map ppr qs in hang (text "Patterns not matched:") 4 (vcat (take maxPatterns us) $$ dots maxPatterns us) -- Print a type-annotated wildcard (for non-exhaustive `EmptyCase`s for -- which we only know the type and have no inhabitants at hand) warnEmptyCase ty = pp_context False ctx (text "are non-exhaustive") $ \_ -> hang (text "Patterns not matched:") 4 (underscore <+> dcolon <+> ppr ty) {- Note [Inaccessible warnings for record updates] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider (Trac #12957) data T a where T1 :: { x :: Int } -> T Bool T2 :: { x :: Int } -> T a T3 :: T a f :: T Char -> T a f r = r { x = 3 } The desugarer will (conservatively generate a case for T1 even though it's impossible: f r = case r of T1 x -> T1 3 -- Inaccessible branch T2 x -> T2 3 _ -> error "Missing" We don't want to warn about the inaccessible branch because the programmer didn't put it there! So we filter out the warning here. -} -- | Issue a warning when the predefined number of iterations is exceeded -- for the pattern match checker warnPmIters :: DynFlags -> DsMatchContext -> DsM () warnPmIters dflags (DsMatchContext kind loc) = when (flag_i || flag_u) $ do iters <- maxPmCheckIterations <$> getDynFlags putSrcSpanDs loc (warnDs NoReason (msg iters)) where ctxt = pprMatchContext kind msg is = fsep [ text "Pattern match checker exceeded" , parens (ppr is), text "iterations in", ctxt <> dot , text "(Use -fmax-pmcheck-iterations=n" , text "to set the maximun number of iterations to n)" ] flag_i = wopt Opt_WarnOverlappingPatterns dflags flag_u = exhaustive dflags kind dots :: Int -> [a] -> SDoc dots maxPatterns qs | qs `lengthExceeds` maxPatterns = text "..." | otherwise = empty -- | Check whether the exhaustiveness checker should run (exhaustiveness only) exhaustive :: DynFlags -> HsMatchContext id -> Bool exhaustive dflags = maybe False (`wopt` dflags) . exhaustiveWarningFlag -- | Denotes whether an exhaustiveness check is supported, and if so, -- via which 'WarningFlag' it's controlled. -- Returns 'Nothing' if check is not supported. exhaustiveWarningFlag :: HsMatchContext id -> Maybe WarningFlag exhaustiveWarningFlag (FunRhs {}) = Just Opt_WarnIncompletePatterns exhaustiveWarningFlag CaseAlt = Just Opt_WarnIncompletePatterns exhaustiveWarningFlag IfAlt = Nothing exhaustiveWarningFlag LambdaExpr = Just Opt_WarnIncompleteUniPatterns exhaustiveWarningFlag PatBindRhs = Just Opt_WarnIncompleteUniPatterns exhaustiveWarningFlag ProcExpr = Just Opt_WarnIncompleteUniPatterns exhaustiveWarningFlag RecUpd = Just Opt_WarnIncompletePatternsRecUpd exhaustiveWarningFlag ThPatSplice = Nothing exhaustiveWarningFlag PatSyn = Nothing exhaustiveWarningFlag ThPatQuote = Nothing exhaustiveWarningFlag (StmtCtxt {}) = Nothing -- Don't warn about incomplete patterns -- in list comprehensions, pattern guards -- etc. They are often *supposed* to be -- incomplete -- True <==> singular pp_context :: Bool -> DsMatchContext -> SDoc -> ((SDoc -> SDoc) -> SDoc) -> SDoc pp_context singular (DsMatchContext kind _loc) msg rest_of_msg_fun = vcat [text txt <+> msg, sep [ text "In" <+> ppr_match <> char ':' , nest 4 (rest_of_msg_fun pref)]] where txt | singular = "Pattern match" | otherwise = "Pattern match(es)" (ppr_match, pref) = case kind of FunRhs { mc_fun = L _ fun } -> (pprMatchContext kind, \ pp -> ppr fun <+> pp) _ -> (pprMatchContext kind, \ pp -> pp) ppr_pats :: HsMatchContext Name -> [Pat GhcTc] -> SDoc ppr_pats kind pats = sep [sep (map ppr pats), matchSeparator kind, text "..."] ppr_eqn :: (SDoc -> SDoc) -> HsMatchContext Name -> [LPat GhcTc] -> SDoc ppr_eqn prefixF kind eqn = prefixF (ppr_pats kind (map unLoc eqn)) ppr_constraint :: (SDoc,[PmLit]) -> SDoc ppr_constraint (var, lits) = var <+> text "is not one of" <+> braces (pprWithCommas ppr lits) ppr_uncovered :: ([PmExpr], [ComplexEq]) -> SDoc ppr_uncovered (expr_vec, complex) | null cs = fsep vec -- there are no literal constraints | otherwise = hang (fsep vec) 4 $ text "where" <+> vcat (map ppr_constraint cs) where sdoc_vec = mapM pprPmExprWithParens expr_vec (vec,cs) = runPmPprM sdoc_vec (filterComplex complex) {- Note [Representation of Term Equalities] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In the paper, term constraints always take the form (x ~ e). Of course, a more general constraint of the form (e1 ~ e1) can always be transformed to an equivalent set of the former constraints, by introducing a fresh, intermediate variable: { y ~ e1, y ~ e1 }. Yet, implementing this representation gave rise to #11160 (incredibly bad performance for literal pattern matching). Two are the main sources of this problem (the actual problem is how these two interact with each other): 1. Pattern matching on literals generates twice as many constraints as needed. Consider the following (tests/ghci/should_run/ghcirun004): foo :: Int -> Int foo 1 = 0 ... foo 5000 = 4999 The covered and uncovered set *should* look like: U0 = { x |> {} } C1 = { 1 |> { x ~ 1 } } U1 = { x |> { False ~ (x ~ 1) } } ... C10 = { 10 |> { False ~ (x ~ 1), .., False ~ (x ~ 9), x ~ 10 } } U10 = { x |> { False ~ (x ~ 1), .., False ~ (x ~ 9), False ~ (x ~ 10) } } ... If we replace { False ~ (x ~ 1) } with { y ~ False, y ~ (x ~ 1) } we get twice as many constraints. Also note that half of them are just the substitution [x |-> False]. 2. The term oracle (`tmOracle` in deSugar/TmOracle) uses equalities of the form (x ~ e) as substitutions [x |-> e]. More specifically, function `extendSubstAndSolve` applies such substitutions in the residual constraints and partitions them in the affected and non-affected ones, which are the new worklist. Essentially, this gives quadradic behaviour on the number of the residual constraints. (This would not be the case if the term oracle used mutable variables but, since we use it to handle disjunctions on value set abstractions (`Union` case), we chose a pure, incremental interface). Now the problem becomes apparent (e.g. for clause 300): * Set U300 contains 300 substituting constraints [y_i |-> False] and 300 constraints that we know that will not reduce (stay in the worklist). * To check for consistency, we apply the substituting constraints ONE BY ONE (since `tmOracle` is called incrementally, it does not have all of them available at once). Hence, we go through the (non-progressing) constraints over and over, achieving over-quadradic behaviour. If instead we allow constraints of the form (e ~ e), * All uncovered sets Ui contain no substituting constraints and i non-progressing constraints of the form (False ~ (x ~ lit)) so the oracle behaves linearly. * All covered sets Ci contain exactly (i-1) non-progressing constraints and a single substituting constraint. So the term oracle goes through the constraints only once. The performance improvement becomes even more important when more arguments are involved. -} -- Debugging Infrastructre tracePm :: String -> SDoc -> PmM () tracePm herald doc = liftD $ tracePmD herald doc tracePmD :: String -> SDoc -> DsM () tracePmD herald doc = do dflags <- getDynFlags printer <- mkPrintUnqualifiedDs liftIO $ dumpIfSet_dyn_printer printer dflags Opt_D_dump_ec_trace (text herald $$ (nest 2 doc)) pprPmPatDebug :: PmPat a -> SDoc pprPmPatDebug (PmCon cc _arg_tys _con_tvs _con_dicts con_args) = hsep [text "PmCon", ppr cc, hsep (map pprPmPatDebug con_args)] pprPmPatDebug (PmVar vid) = text "PmVar" <+> ppr vid pprPmPatDebug (PmLit li) = text "PmLit" <+> ppr li pprPmPatDebug (PmNLit i nl) = text "PmNLit" <+> ppr i <+> ppr nl pprPmPatDebug (PmGrd pv ge) = text "PmGrd" <+> hsep (map pprPmPatDebug pv) <+> ppr ge pprPatVec :: PatVec -> SDoc pprPatVec ps = hang (text "Pattern:") 2 (brackets $ sep $ punctuate (comma <> char '\n') (map pprPmPatDebug ps)) pprValAbs :: [ValAbs] -> SDoc pprValAbs ps = hang (text "ValAbs:") 2 (brackets $ sep $ punctuate (comma) (map pprPmPatDebug ps)) pprValVecDebug :: ValVec -> SDoc pprValVecDebug (ValVec vas _d) = text "ValVec" <+> parens (pprValAbs vas)
shlevy/ghc
compiler/deSugar/Check.hs
bsd-3-clause
85,282
59
28
21,677
15,858
8,192
7,666
-1
-1
import Test.HUnit (Assertion, (@=?), runTestTT, Test(..), Counts(..)) import System.Exit (ExitCode(..), exitWith) import Luhn (checkDigit, addends, checksum, isValid, create) exitProperly :: IO Counts -> IO () exitProperly m = do counts <- m exitWith $ if failures counts /= 0 || errors counts /= 0 then ExitFailure 1 else ExitSuccess testCase :: String -> Assertion -> Test testCase label assertion = TestLabel label (TestCase assertion) main :: IO () main = exitProperly $ runTestTT $ TestList [ TestList luhnTests ] int :: Integer -> Integer int = id ints :: [Integer] -> [Integer] ints = id luhnTests :: [Test] luhnTests = [ testCase "checkDigit" $ do int 7 @=? checkDigit 34567 int 0 @=? checkDigit 91370 , testCase "addends" $ do ints [1, 4, 1, 4, 1] @=? addends 12121 ints [7, 6, 6, 1] @=? addends 8631 , testCase "checksum" $ do -- NOTE: this differs from the ruby and js, the checksum really should -- be mod 10 like we are testing here. int 2 @=? checksum 4913 int 1 @=? checksum 201773 , testCase "isValid" $ do False @=? isValid (int 738) True @=? isValid (int 8739567) , testCase "create" $ do int 1230 @=? create 123 int 8739567 @=? create 873956 int 8372637564 @=? create 837263756 ]
pminten/xhaskell
luhn/luhn_test.hs
mit
1,285
0
12
296
479
243
236
34
2
{-# language ViewPatterns, NamedFieldPuns, ScopedTypeVariables, MultiParamTypeClasses, DeriveDataTypeable #-} module Sorts.Tiles ( sorts, isTileSort, tileShapeAttributes, mergeTiles, cacheTiles, ) where import Safe import Data.Abelian import Data.Data import Data.List import Data.Maybe import qualified Data.Indexable as I import Data.Indexable ((>:)) import Text.Parsec import Control.Monad import System.FilePath import Graphics.Qt as Qt import Physics.Chipmunk as CM import Utils import Base import Sorts.Tiles.Baking import qualified Sorts.StoryMode import qualified Sorts.DeathStones (laserAnimationFrameTime) -- * Tile configuration -- set to laser end pieces, is not individually configurable right now. -- (the only animated public tiles are laser end pieces) defaultFrameTime :: Seconds = Sorts.DeathStones.laserAnimationFrameTime -- all loaded tiles with offset and size names :: [(String, Qt.Position Int, Size Double)] names = ("tiles/black-standard", Position 1 1, Size 64 64) : ("tiles/white-standard", Position 1 1, Size 64 64) : ("tiles/yellow-standard", Position 1 1, Size 64 64) : ("tiles/green-standard", Position 1 1, Size 64 64) : ("tiles/aqua-standard", Position 1 1, Size 64 64) : ("tiles/blue-standard", Position 1 1, Size 64 64) : ("tiles/pink-standard", Position 1 1, Size 64 64) : ("tiles/red-standard", Position 1 1, Size 64 64) : ("tiles/black-small", Position 1 1, Size 32 32) : ("tiles/white-small", Position 1 1, Size 32 32) : ("tiles/yellow-small", Position 1 1, Size 32 32) : ("tiles/green-small", Position 1 1, Size 32 32) : ("tiles/aqua-small", Position 1 1, Size 32 32) : ("tiles/blue-small", Position 1 1, Size 32 32) : ("tiles/pink-small", Position 1 1, Size 32 32) : ("tiles/red-small", Position 1 1, Size 32 32) : ("terminals/terminal-standard-bottom-end", Position 1 1, Size 192 32) : ("tutorial/data-terminal-background", Position 1 1, Size 128 192) : ("deathstones/lasers/laser-up", split 1, fmap fromUber $ Size 15 4) : ("deathstones/lasers/laser-down", Position 1 5, fmap fromUber $ Size 15 4) : ("deathstones/lasers/laser-left", split 1, fmap fromUber $ Size 4 15) : ("deathstones/lasers/laser-right", Position 5 1, fmap fromUber $ Size 4 15) : [] -- | points are moved by this distance to avoid sticky edges tileMergingEpsilon = 1 -- * Tile loading sorts :: [RM (Maybe Sort_)] sorts = mkFreeSorts ++ mkStoryModeSorts mkFreeSorts = map (\ (a, b, c) -> mkSort False a b c defaultFrameTime Nothing) names mkStoryModeSorts = map (\ (a, b, c, frameTime, frameOrder) -> mkSort True a b c frameTime frameOrder) Sorts.StoryMode.tiles -- | returns Nothing if a story mode tile is not available mkSort :: Bool -> String -> Offset Int -> Size Double -> Seconds -> Maybe [Int] -> RM (Maybe Sort_) mkSort storyMode name offset size frameDuration frameOrder = do mPngFiles <- getFrameFileNames storyMode name case mPngFiles of Nothing -> return Nothing Just pngFiles -> do when (null pngFiles) $ fail ("no png files found for tile: " ++ name) let sortID = if storyMode then ("story-mode/" ++ name) else name frames <- reorderFrames <$> mapM mkTilePixmap pngFiles return $ Just $ Sort_ $ TSort sortID (mkAnimation frames [frameDuration]) where mkTilePixmap file = loadPixmap (fmap fromIntegral offset) size file reorderFrames :: [Pixmap] -> [Pixmap] reorderFrames pixmaps = maybe pixmaps (map (\ i -> atNote (note i) pixmaps i)) frameOrder note i = name ++ " has no frame with number " ++ show i -- | Returns the list of filenames for all the frames with the given name -- Returns Nothing in case a story mode tile is not available. getFrameFileNames :: Bool -> String -> RM (Maybe [FilePath]) getFrameFileNames storyMode name = do -- paths of all pngs in the corresponding directory mAbsolutePaths <- getPngFiles storyMode name case mAbsolutePaths of Nothing -> return Nothing Just absolutePaths -> do -- making them relative again let relativePaths = map ((takeDirectory name </>) . takeFileName) absolutePaths files <- mapM (getPngFileName storyMode) $ map (pngDir </>) $ map third $ sortBy (compare `on` snd3) $ filter (\ (candidateName, _, _) -> on (==) splitDirectories name candidateName) $ map parsePath relativePaths return $ Just $ catMaybes files where parsePath :: String -> (String, Maybe Int, FilePath) parsePath path = case parse parseTileName "" path of Right (a, b) -> (a, b, path) x -> error ("unparseable filename: " ++ path) parseTileName :: Parsec String () (String, Maybe Int) parseTileName = do n <- parseName i <- parseFrameNumber ignore $ string ".png" eof return (n, i) parseName = do a <- name r <- many namePart return (a ++ concat r) where name = many1 (noneOf ['_', '.']) namePart = try $ do ignore $ char '_' a <- letter r <- name return ('_' : a : r) parseFrameNumber :: Parsec String () (Maybe Int) parseFrameNumber = optionMaybe $ do ignore $ char '_' s <- many1 digit return $ readNote "frameNumber" s -- | returns all png files in the directory where the tile pngs should be. -- Returns Nothing in case a storymode tile is loaded, but the story mode is not available. getPngFiles :: Bool -> String -> RM (Maybe [FilePath]) getPngFiles False name = Just <$> getDataFiles (pngDir </> takeDirectory name) (Just ".png") getPngFiles True name = io $ getStoryModeDataFiles (pngDir </> takeDirectory name) (Just ".png") getPngFileName :: Bool -> FilePath -> RM (Maybe FilePath) getPngFileName False file = Just <$> getDataFileName file getPngFileName True file = io $ getStoryModeDataFileName file data TSort = TSort { name :: String, animation :: Animation Pixmap } deriving (Show, Typeable) isTileSort :: Sort s o => s -> Bool isTileSort (cast -> Just _ :: Maybe TSort) = True isTileSort (cast -> Just (Sort_ inner) :: Maybe Sort_) = isTileSort inner isTileSort _ = False data Tile = Tile { tchipmunk :: Chipmunk } deriving (Show, Typeable) instance Sort TSort Tile where sortId TSort{name} = SortId name size (TSort _ animation) = pixmapSize $ head $ ftoList animation renderIconified sort ptr = renderPixmapSimple ptr $ head $ ftoList $ animation sort initialize _ _ _ = es "initialize: use AllTiles" immutableCopy = es "immutableCopy: use AllTiles" chipmunks = es "chipmunks: use AllTiles" isUpdating = es "isUpdating: use AllTiles" renderObject _ _ (Tile (ImmutableChipmunk position _ _ _)) sort _ offset now = return $ return $ RenderPixmap pix position Nothing where pix = pickAnimationFrame (animation sort) now -- before initializing the scene, all tiles in the physics scene are being merged -- (in Top.Initialisation), resulting in an AllTiles object. -- This is a workaround for merging tiles. It relies on the following things: -- 1. Tiles are static -- 2. Tiles are being rendered above everything else in the physics layer unwrapTSort :: Sort_ -> Maybe TSort unwrapTSort (Sort_ s) = cast s unwrapTSortEditorObject :: EditorObject Sort_ -> Maybe (EditorObject TSort) unwrapTSortEditorObject (EditorObject sort pos oem) = case unwrapTSort sort of Just tsort -> Just $ EditorObject tsort pos oem Nothing -> Nothing data AllTilesSort = AllTilesSort [EditorObject TSort] deriving (Show, Typeable) data AllTiles = AllPhysicTiles { chipmunks_ :: Chipmunk, renderables :: [(Animation Pixmap, Qt.Position Double)] } | AllMultilayerTiles { renderables :: [(Animation Pixmap, Qt.Position Double)] } deriving (Show, Typeable) mergeTiles :: I.Indexable (EditorObject Sort_) -> I.Indexable (EditorObject Sort_) mergeTiles ixs = otherObjects >: Sorts.Tiles.mkAllTiles (I.toList ixs) where otherObjects = I.filter (not . isTileSort . editorSort) ixs mkAllTiles :: [EditorObject Sort_] -> EditorObject Sort_ mkAllTiles tiles = EditorObject (Sort_ (AllTilesSort (catMaybes (fmap unwrapTSortEditorObject tiles)))) zero Nothing instance Sort AllTilesSort AllTiles where sortId _ = SortId "allTiles" freeSort = error "freeSort: not in use for AllTiles" size = error "size: not in use for AllTiles" renderIconified = error "renderIconified: not in use for AllTiles" initialize app _ Nothing (AllTilesSort editorObjects) (EditorPosition 0 0) Nothing _ = io $ AllMultilayerTiles <$> bakeTiles (map toAnimation editorObjects) where toAnimation (EditorObject sort ep Nothing) = (animation sort, epToPosition (size sort) ep) initialize app _ (Just space) (AllTilesSort editorObjects) (EditorPosition 0 0) Nothing cachedTiles = io $ do renderables <- bakeTiles $ map mkRenderable editorObjects chipmunks <- initChipmunks space cachedTiles editorObjects return $ AllPhysicTiles chipmunks renderables immutableCopy (AllPhysicTiles c x) = do c' <- CM.immutableCopy c return $ AllPhysicTiles c' x immutableCopy x = return x chipmunks (AllPhysicTiles c _) = [c] chipmunks AllMultilayerTiles{} = [] isUpdating = const False renderObject _ _ allTiles sort _ _ now = return $ fmap inner $ renderables allTiles where inner (animation, pos) = RenderPixmap (pickAnimationFrame animation now) pos Nothing mkRenderable :: EditorObject TSort -> (Animation Pixmap, Qt.Position Double) mkRenderable (EditorObject sort ep Nothing) = (animation sort, epToPosition (size sort) ep) initChipmunks :: Space -> CachedTiles -> [EditorObject TSort] -> IO Chipmunk initChipmunks space cachedTiles objects = initShapes space $ mkAbsoluteShapes cachedTiles objects -- * polygon logick -- | creates ShapeTypes with absolute coordinates -- here the actual merging of Tiles takes place mkAbsoluteShapes :: CachedTiles -> [EditorObject TSort] -> [ShapeType] mkAbsoluteShapes Nothing = map mkAbsoluteShape >>> removeStickyEdges tileMergingEpsilon mkAbsoluteShapes (Just x) = const x mkAbsoluteShape :: EditorObject TSort -> ShapeType mkAbsoluteShape (EditorObject sort ep Nothing) = mapVectors (+~ chipmunkPosition) $ mkRectFromPositions (negateAbelian halfSizeVector) halfSizeVector where halfSizeVector = size2vector $ fmap (/ 2) $ size sort baryCenterOffset = halfSizeVector chipmunkPosition = position2vector (epToPosition (size sort) ep) +~ baryCenterOffset -- * caching cacheTiles :: I.Indexable (EditorObject Sort_) -> [ShapeType] cacheTiles ixs = let tiles = catMaybes $ I.toList $ fmap unwrapTSortEditorObject ixs in mkAbsoluteShapes Nothing tiles -- * chipmunk stuff initShapes :: Space -> [ShapeType] -> IO Chipmunk initShapes space shapeTypes = do let shapesWithAttributes = map (mkShapeDescription tileShapeAttributes) shapeTypes initChipmunk space (bodyAttributes zero) shapesWithAttributes zero bodyAttributes :: Vector -> BodyAttributes bodyAttributes pos = StaticBodyAttributes { CM.position = pos } tileShapeAttributes :: ShapeAttributes tileShapeAttributes = ShapeAttributes { elasticity = 0.5, friction = 0.95, CM.collisionType = TileCT }
changlinli/nikki
src/Sorts/Tiles.hs
lgpl-3.0
11,829
0
28
2,809
3,357
1,717
1,640
240
3
{-# LANGUAGE TypeFamilyDependencies #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE AllowAmbiguousTypes #-} {-# LANGUAGE UndecidableInstances #-} module T12522 where foo = f (Just 'c') data D1 x data D2 type family TF x = t | t -> x type instance TF (D1 x, a) = Maybe (TF (x, a)) type instance TF (D2, ()) = Char f :: TF (x, a) -> () f _ = () foo1 = f_good (Just 'c') foo2 = f_bad (Just 'c') type family TF2 x y = t | t -> x y type instance TF2 Int Float = Char type family TF_Good x y = t | t -> x y type instance TF_Good a (Maybe x) = Maybe (TF2 a x) f_good :: TF_Good a x -> () f_good _ = () type family TF_Bad x y = t | t -> x y type instance TF_Bad (Maybe x) a = Maybe (TF2 a x) f_bad :: TF_Bad x a -> () f_bad _ = () {- Maybe Char ~ TF (xx, aa) Model [D] s_aF4 ~ Maybe Char [W] TF (x_aDY, a_aJn) ~ s_aF4 FunEq --> {aJn = aJp) [W} TF (x_aDY, a_aJp) ~ s_aF4 FunEq --> {new derived equalities} [D] x_aDY ~ D1 x_aJq [D] a_aJp ~ a_aJR -}
sdiehl/ghc
testsuite/tests/indexed-types/should_compile/T12522.hs
bsd-3-clause
993
0
8
260
324
189
135
-1
-1
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="sq-AL"> <title>SOAP Support Add-on</title> <maps> <homeID>soap</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
kingthorin/zap-extensions
addOns/soap/src/main/javahelp/org/zaproxy/zap/extension/soap/resources/help_sq_AL/helpset_sq_AL.hs
apache-2.0
965
77
67
157
413
209
204
-1
-1
{-# LANGUAGE DefaultSignatures #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE Strict #-} {-# LANGUAGE Trustworthy #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeFamilies #-} -- | The type checker checks whether the program is type-consistent. module Futhark.TypeCheck ( -- * Interface checkProg, TypeError (..), ErrorCase (..), -- * Extensionality TypeM, bad, context, message, Checkable (..), CheckableOp (..), lookupVar, lookupAliases, checkOpWith, -- * Checkers require, requireI, requirePrimExp, checkSubExp, checkCerts, checkExp, checkStms, checkStm, checkType, checkExtType, matchExtPat, matchExtBranchType, argType, argAliases, noArgAliases, checkArg, checkSOACArrayArgs, checkLambda, checkBody, consume, consumeOnlyParams, binding, alternative, ) where import Control.Monad.Reader import Control.Monad.State.Strict import Control.Parallel.Strategies import Data.Bifunctor (second) import Data.List (find, intercalate, isPrefixOf, sort) import qualified Data.Map.Strict as M import Data.Maybe import qualified Data.Set as S import Futhark.Analysis.PrimExp import Futhark.Construct (instantiateShapes) import Futhark.IR.Aliases hiding (lookupAliases) import Futhark.Util import Futhark.Util.Pretty (Pretty, align, indent, ppr, prettyDoc, text, (<+>), (</>)) -- | Information about an error during type checking. The 'Show' -- instance for this type produces a human-readable description. data ErrorCase rep = TypeError String | UnexpectedType (Exp rep) Type [Type] | ReturnTypeError Name [ExtType] [ExtType] | DupDefinitionError Name | DupParamError Name VName | DupPatError VName | InvalidPatError (Pat (Aliases rep)) [ExtType] (Maybe String) | UnknownVariableError VName | UnknownFunctionError Name | ParameterMismatch (Maybe Name) [Type] [Type] | SlicingError Int Int | BadAnnotation String Type Type | ReturnAliased Name VName | UniqueReturnAliased Name | NotAnArray VName Type | PermutationError [Int] Int (Maybe VName) instance Checkable rep => Show (ErrorCase rep) where show (TypeError msg) = "Type error:\n" ++ msg show (UnexpectedType e _ []) = "Type of expression\n" ++ prettyDoc 160 (indent 2 $ ppr e) ++ "\ncannot have any type - possibly a bug in the type checker." show (UnexpectedType e t ts) = "Type of expression\n" ++ prettyDoc 160 (indent 2 $ ppr e) ++ "\nmust be one of " ++ intercalate ", " (map pretty ts) ++ ", but is " ++ pretty t ++ "." show (ReturnTypeError fname rettype bodytype) = "Declaration of function " ++ nameToString fname ++ " declares return type\n " ++ prettyTuple rettype ++ "\nBut body has type\n " ++ prettyTuple bodytype show (DupDefinitionError name) = "Duplicate definition of function " ++ nameToString name ++ "" show (DupParamError funname paramname) = "Parameter " ++ pretty paramname ++ " mentioned multiple times in argument list of function " ++ nameToString funname ++ "." show (DupPatError name) = "Variable " ++ pretty name ++ " bound twice in pattern." show (InvalidPatError pat t desc) = "Pat\n" ++ pretty pat ++ "\ncannot match value of type\n" ++ prettyTupleLines t ++ end where end = case desc of Nothing -> "." Just desc' -> ":\n" ++ desc' show (UnknownVariableError name) = "Use of unknown variable " ++ pretty name ++ "." show (UnknownFunctionError fname) = "Call of unknown function " ++ nameToString fname ++ "." show (ParameterMismatch fname expected got) = "In call of " ++ fname' ++ ":\n" ++ "expecting " ++ show nexpected ++ " arguments of type(s)\n" ++ intercalate ", " (map pretty expected) ++ "\nGot " ++ show ngot ++ " arguments of types\n" ++ intercalate ", " (map pretty got) where nexpected = length expected ngot = length got fname' = maybe "anonymous function" (("function " ++) . nameToString) fname show (SlicingError dims got) = show got ++ " indices given, but type of indexee has " ++ show dims ++ " dimension(s)." show (BadAnnotation desc expected got) = "Annotation of \"" ++ desc ++ "\" type of expression is " ++ pretty expected ++ ", but derived to be " ++ pretty got ++ "." show (ReturnAliased fname name) = "Unique return value of function " ++ nameToString fname ++ " is aliased to " ++ pretty name ++ ", which is not consumed." show (UniqueReturnAliased fname) = "A unique tuple element of return value of function " ++ nameToString fname ++ " is aliased to some other tuple component." show (NotAnArray e t) = "The expression " ++ pretty e ++ " is expected to be an array, but is " ++ pretty t ++ "." show (PermutationError perm rank name) = "The permutation (" ++ intercalate ", " (map show perm) ++ ") is not valid for array " ++ name' ++ "of rank " ++ show rank ++ "." where name' = maybe "" ((++ " ") . pretty) name -- | A type error. data TypeError rep = Error [String] (ErrorCase rep) instance Checkable rep => Show (TypeError rep) where show (Error [] err) = show err show (Error msgs err) = intercalate "\n" msgs ++ "\n" ++ show err -- | A tuple of a return type and a list of parameters, possibly -- named. type FunBinding rep = ([RetType (Aliases rep)], [FParam (Aliases rep)]) type VarBinding rep = NameInfo (Aliases rep) data Usage = Consumed | Observed deriving (Eq, Ord, Show) data Occurence = Occurence { observed :: Names, consumed :: Names } deriving (Eq, Show) observation :: Names -> Occurence observation = flip Occurence mempty consumption :: Names -> Occurence consumption = Occurence mempty nullOccurence :: Occurence -> Bool nullOccurence occ = observed occ == mempty && consumed occ == mempty type Occurences = [Occurence] allConsumed :: Occurences -> Names allConsumed = mconcat . map consumed seqOccurences :: Occurences -> Occurences -> Occurences seqOccurences occurs1 occurs2 = filter (not . nullOccurence) (map filt occurs1) ++ occurs2 where filt occ = occ {observed = observed occ `namesSubtract` postcons} postcons = allConsumed occurs2 altOccurences :: Occurences -> Occurences -> Occurences altOccurences occurs1 occurs2 = filter (not . nullOccurence) (map filt occurs1) ++ occurs2 where filt occ = occ { consumed = consumed occ `namesSubtract` postcons, observed = observed occ `namesSubtract` postcons } postcons = allConsumed occurs2 unOccur :: Names -> Occurences -> Occurences unOccur to_be_removed = filter (not . nullOccurence) . map unOccur' where unOccur' occ = occ { observed = observed occ `namesSubtract` to_be_removed, consumed = consumed occ `namesSubtract` to_be_removed } -- | The 'Consumption' data structure is used to keep track of which -- variables have been consumed, as well as whether a violation has been detected. data Consumption = ConsumptionError String | Consumption Occurences deriving (Show) instance Semigroup Consumption where ConsumptionError e <> _ = ConsumptionError e _ <> ConsumptionError e = ConsumptionError e Consumption o1 <> Consumption o2 | v : _ <- namesToList $ consumed_in_o1 `namesIntersection` used_in_o2 = ConsumptionError $ "Variable " <> pretty v <> " referenced after being consumed." | otherwise = Consumption $ o1 `seqOccurences` o2 where consumed_in_o1 = mconcat $ map consumed o1 used_in_o2 = mconcat $ map consumed o2 <> map observed o2 instance Monoid Consumption where mempty = Consumption mempty -- | The environment contains a variable table and a function table. -- Type checking happens with access to this environment. The -- function table is only initialised at the very beginning, but the -- variable table will be extended during type-checking when -- let-expressions are encountered. data Env rep = Env { envVtable :: M.Map VName (VarBinding rep), envFtable :: M.Map Name (FunBinding rep), envCheckOp :: OpWithAliases (Op rep) -> TypeM rep (), envContext :: [String] } data TState = TState { stateNames :: Names, stateCons :: Consumption } -- | The type checker runs in this monad. newtype TypeM rep a = TypeM ( ReaderT (Env rep) (StateT TState (Either (TypeError rep))) a ) deriving ( Monad, Functor, Applicative, MonadReader (Env rep), MonadState TState ) instance Checkable rep => HasScope (Aliases rep) (TypeM rep) where lookupType = fmap typeOf . lookupVar askScope = asks $ M.fromList . mapMaybe varType . M.toList . envVtable where varType (name, dec) = Just (name, dec) runTypeM :: Env rep -> TypeM rep a -> Either (TypeError rep) (a, Consumption) runTypeM env (TypeM m) = second stateCons <$> runStateT (runReaderT m env) (TState mempty mempty) bad :: ErrorCase rep -> TypeM rep a bad e = do messages <- asks envContext TypeM $ lift $ lift $ Left $ Error (reverse messages) e tell :: Consumption -> TypeM rep () tell cons = modify $ \s -> s {stateCons = stateCons s <> cons} -- | Add information about what is being type-checked to the current -- context. Liberal use of this combinator makes it easier to track -- type errors, as the strings are added to type errors signalled via -- 'bad'. context :: String -> TypeM rep a -> TypeM rep a context s = local $ \env -> env {envContext = s : envContext env} message :: Pretty a => String -> a -> String message s x = prettyDoc 80 $ text s <+> align (ppr x) -- | Mark a name as bound. If the name has been bound previously in -- the program, report a type error. bound :: VName -> TypeM rep () bound name = do already_seen <- gets $ nameIn name . stateNames when already_seen $ bad $ TypeError $ "Name " ++ pretty name ++ " bound twice" modify $ \s -> s {stateNames = oneName name <> stateNames s} occur :: Occurences -> TypeM rep () occur = tell . Consumption . filter (not . nullOccurence) -- | Proclaim that we have made read-only use of the given variable. -- No-op unless the variable is array-typed. observe :: Checkable rep => VName -> TypeM rep () observe name = do dec <- lookupVar name unless (primType $ typeOf dec) $ occur [observation $ oneName name <> aliases dec] -- | Proclaim that we have written to the given variables. consume :: Checkable rep => Names -> TypeM rep () consume als = do scope <- askScope let isArray = maybe False (not . primType . typeOf) . (`M.lookup` scope) occur [consumption $ namesFromList $ filter isArray $ namesToList als] collectOccurences :: TypeM rep a -> TypeM rep (a, Occurences) collectOccurences m = do old <- gets stateCons modify $ \s -> s {stateCons = mempty} x <- m new <- gets stateCons modify $ \s -> s {stateCons = old} o <- checkConsumption new pure (x, o) checkOpWith :: (OpWithAliases (Op rep) -> TypeM rep ()) -> TypeM rep a -> TypeM rep a checkOpWith checker = local $ \env -> env {envCheckOp = checker} checkConsumption :: Consumption -> TypeM rep Occurences checkConsumption (ConsumptionError e) = bad $ TypeError e checkConsumption (Consumption os) = return os alternative :: TypeM rep a -> TypeM rep b -> TypeM rep (a, b) alternative m1 m2 = do (x, os1) <- collectOccurences m1 (y, os2) <- collectOccurences m2 tell $ Consumption $ os1 `altOccurences` os2 pure (x, y) -- | Permit consumption of only the specified names. If one of these -- names is consumed, the consumption will be rewritten to be a -- consumption of the corresponding alias set. Consumption of -- anything else will result in a type error. consumeOnlyParams :: [(VName, Names)] -> TypeM rep a -> TypeM rep a consumeOnlyParams consumable m = do (x, os) <- collectOccurences m tell . Consumption =<< mapM inspect os return x where inspect o = do new_consumed <- mconcat <$> mapM wasConsumed (namesToList $ consumed o) return o {consumed = new_consumed} wasConsumed v | Just als <- lookup v consumable = return als | otherwise = bad $ TypeError $ unlines [ pretty v ++ " was invalidly consumed.", what ++ " can be consumed here." ] what | null consumable = "Nothing" | otherwise = "Only " ++ intercalate ", " (map (pretty . fst) consumable) -- | Given the immediate aliases, compute the full transitive alias -- set (including the immediate aliases). expandAliases :: Names -> Env rep -> Names expandAliases names env = names <> aliasesOfAliases where aliasesOfAliases = mconcat . map look . namesToList $ names look k = case M.lookup k $ envVtable env of Just (LetName (als, _)) -> unAliases als _ -> mempty binding :: Checkable rep => Scope (Aliases rep) -> TypeM rep a -> TypeM rep a binding stms = check . local (`bindVars` stms) where bindVars = M.foldlWithKey' bindVar boundnames = M.keys stms bindVar env name (LetName (AliasDec als, dec)) = let als' | primType (typeOf dec) = mempty | otherwise = expandAliases als env in env { envVtable = M.insert name (LetName (AliasDec als', dec)) $ envVtable env } bindVar env name dec = env {envVtable = M.insert name dec $ envVtable env} -- Check whether the bound variables have been used correctly -- within their scope. check m = do mapM_ bound $ M.keys stms (a, os) <- collectOccurences m tell $ Consumption $ unOccur (namesFromList boundnames) os return a lookupVar :: VName -> TypeM rep (NameInfo (Aliases rep)) lookupVar name = do stm <- asks $ M.lookup name . envVtable case stm of Nothing -> bad $ UnknownVariableError name Just dec -> return dec lookupAliases :: Checkable rep => VName -> TypeM rep Names lookupAliases name = do info <- lookupVar name return $ if primType $ typeOf info then mempty else oneName name <> aliases info aliases :: NameInfo (Aliases rep) -> Names aliases (LetName (als, _)) = unAliases als aliases _ = mempty subExpAliasesM :: Checkable rep => SubExp -> TypeM rep Names subExpAliasesM Constant {} = return mempty subExpAliasesM (Var v) = lookupAliases v lookupFun :: Checkable rep => Name -> [SubExp] -> TypeM rep ([RetType rep], [DeclType]) lookupFun fname args = do stm <- asks $ M.lookup fname . envFtable case stm of Nothing -> bad $ UnknownFunctionError fname Just (ftype, params) -> do argts <- mapM subExpType args case applyRetType ftype params $ zip args argts of Nothing -> bad $ ParameterMismatch (Just fname) (map paramType params) argts Just rt -> return (rt, map paramDeclType params) -- | @checkAnnotation loc s t1 t2@ checks if @t2@ is equal to -- @t1@. If not, a 'BadAnnotation' is raised. checkAnnotation :: String -> Type -> Type -> TypeM rep () checkAnnotation desc t1 t2 | t2 == t1 = return () | otherwise = bad $ BadAnnotation desc t1 t2 -- | @require ts se@ causes a '(TypeError vn)' if the type of @se@ is -- not a subtype of one of the types in @ts@. require :: Checkable rep => [Type] -> SubExp -> TypeM rep () require ts se = do t <- checkSubExp se unless (t `elem` ts) $ bad $ UnexpectedType (BasicOp $ SubExp se) t ts -- | Variant of 'require' working on variable names. requireI :: Checkable rep => [Type] -> VName -> TypeM rep () requireI ts ident = require ts $ Var ident checkArrIdent :: Checkable rep => VName -> TypeM rep Type checkArrIdent v = do t <- lookupType v case t of Array {} -> return t _ -> bad $ NotAnArray v t checkAccIdent :: Checkable rep => VName -> TypeM rep (Shape, [Type]) checkAccIdent v = do t <- lookupType v case t of Acc _ ispace ts _ -> pure (ispace, ts) _ -> bad . TypeError $ pretty v ++ " should be an accumulator but is of type " ++ pretty t -- | Type check a program containing arbitrary type information, -- yielding either a type error or a program with complete type -- information. checkProg :: Checkable rep => Prog (Aliases rep) -> Either (TypeError rep) () checkProg (Prog consts funs) = do let typeenv = Env { envVtable = M.empty, envFtable = mempty, envContext = [], envCheckOp = checkOp } let onFunction ftable vtable fun = fmap fst $ runTypeM typeenv $ local (\env -> env {envFtable = ftable, envVtable = vtable}) $ checkFun fun (ftable, _) <- runTypeM typeenv buildFtable (vtable, _) <- runTypeM typeenv {envFtable = ftable} $ checkStms consts $ asks envVtable sequence_ $ parMap rpar (onFunction ftable vtable) funs where buildFtable = do table <- initialFtable foldM expand table funs expand ftable (FunDef _ _ name ret params _) | M.member name ftable = bad $ DupDefinitionError name | otherwise = return $ M.insert name (ret, params) ftable initialFtable :: Checkable rep => TypeM rep (M.Map Name (FunBinding rep)) initialFtable = fmap M.fromList $ mapM addBuiltin $ M.toList builtInFunctions where addBuiltin (fname, (t, ts)) = do ps <- mapM (primFParam name) ts return (fname, ([primRetType t], ps)) name = VName (nameFromString "x") 0 checkFun :: Checkable rep => FunDef (Aliases rep) -> TypeM rep () checkFun (FunDef _ _ fname rettype params body) = context ("In function " ++ nameToString fname) $ checkFun' ( fname, map declExtTypeOf rettype, funParamsToNameInfos params ) (Just consumable) $ do checkFunParams params checkRetType rettype context "When checking function body" $ checkFunBody rettype body where consumable = [ (paramName param, mempty) | param <- params, unique $ paramDeclType param ] funParamsToNameInfos :: [FParam rep] -> [(VName, NameInfo (Aliases rep))] funParamsToNameInfos = map nameTypeAndDec where nameTypeAndDec fparam = ( paramName fparam, FParamName $ paramDec fparam ) checkFunParams :: Checkable rep => [FParam rep] -> TypeM rep () checkFunParams = mapM_ $ \param -> context ("In function parameter " ++ pretty param) $ checkFParamDec (paramName param) (paramDec param) checkLambdaParams :: Checkable rep => [LParam rep] -> TypeM rep () checkLambdaParams = mapM_ $ \param -> context ("In lambda parameter " ++ pretty param) $ checkLParamDec (paramName param) (paramDec param) checkFun' :: Checkable rep => ( Name, [DeclExtType], [(VName, NameInfo (Aliases rep))] ) -> Maybe [(VName, Names)] -> TypeM rep [Names] -> TypeM rep () checkFun' (fname, rettype, params) consumable check = do checkNoDuplicateParams binding (M.fromList params) $ maybe id consumeOnlyParams consumable $ do body_aliases <- check scope <- askScope let isArray = maybe False ((> 0) . arrayRank . typeOf) . (`M.lookup` scope) context ( "When checking the body aliases: " ++ pretty (map namesToList body_aliases) ) $ checkReturnAlias $ map (namesFromList . filter isArray . namesToList) body_aliases where param_names = map fst params checkNoDuplicateParams = foldM_ expand [] param_names expand seen pname | Just _ <- find (== pname) seen = bad $ DupParamError fname pname | otherwise = return $ pname : seen checkReturnAlias = foldM_ checkReturnAlias' mempty . returnAliasing rettype checkReturnAlias' seen (Unique, names) | any (`S.member` S.map fst seen) $ namesToList names = bad $ UniqueReturnAliased fname | otherwise = do consume names return $ seen <> tag Unique names checkReturnAlias' seen (Nonunique, names) | any (`S.member` seen) $ tag Unique names = bad $ UniqueReturnAliased fname | otherwise = return $ seen <> tag Nonunique names tag u = S.fromList . map (,u) . namesToList returnAliasing expected got = reverse $ zip (reverse (map uniqueness expected) ++ repeat Nonunique) $ reverse got checkSubExp :: Checkable rep => SubExp -> TypeM rep Type checkSubExp (Constant val) = return $ Prim $ primValueType val checkSubExp (Var ident) = context ("In subexp " ++ pretty ident) $ do observe ident lookupType ident checkCerts :: Checkable rep => Certs -> TypeM rep () checkCerts (Certs cs) = mapM_ (requireI [Prim Unit]) cs checkSubExpRes :: Checkable rep => SubExpRes -> TypeM rep Type checkSubExpRes (SubExpRes cs se) = do checkCerts cs checkSubExp se checkStms :: Checkable rep => Stms (Aliases rep) -> TypeM rep a -> TypeM rep a checkStms origstms m = delve $ stmsToList origstms where delve (stm@(Let pat _ e) : stms) = do context (pretty $ "In expression of statement" </> indent 2 (ppr pat)) $ checkExp e checkStm stm $ delve stms delve [] = m checkResult :: Checkable rep => Result -> TypeM rep () checkResult = mapM_ checkSubExpRes checkFunBody :: Checkable rep => [RetType rep] -> Body (Aliases rep) -> TypeM rep [Names] checkFunBody rt (Body (_, rep) stms res) = do checkBodyDec rep checkStms stms $ do context "When checking body result" $ checkResult res context "When matching declared return type to result of body" $ matchReturnType rt res map (`namesSubtract` bound_here) <$> mapM (subExpAliasesM . resSubExp) res where bound_here = namesFromList $ M.keys $ scopeOf stms checkLambdaBody :: Checkable rep => [Type] -> Body (Aliases rep) -> TypeM rep [Names] checkLambdaBody ret (Body (_, rep) stms res) = do checkBodyDec rep checkStms stms $ do checkLambdaResult ret res map (`namesSubtract` bound_here) <$> mapM (subExpAliasesM . resSubExp) res where bound_here = namesFromList $ M.keys $ scopeOf stms checkLambdaResult :: Checkable rep => [Type] -> Result -> TypeM rep () checkLambdaResult ts es | length ts /= length es = bad $ TypeError $ "Lambda has return type " ++ prettyTuple ts ++ " describing " ++ show (length ts) ++ " values, but body returns " ++ show (length es) ++ " values: " ++ prettyTuple es | otherwise = forM_ (zip ts es) $ \(t, e) -> do et <- checkSubExpRes e unless (et == t) $ bad $ TypeError $ "Subexpression " ++ pretty e ++ " has type " ++ pretty et ++ " but expected " ++ pretty t checkBody :: Checkable rep => Body (Aliases rep) -> TypeM rep [Names] checkBody (Body (_, rep) stms res) = do checkBodyDec rep checkStms stms $ do checkResult res map (`namesSubtract` bound_here) <$> mapM (subExpAliasesM . resSubExp) res where bound_here = namesFromList $ M.keys $ scopeOf stms checkBasicOp :: Checkable rep => BasicOp -> TypeM rep () checkBasicOp (SubExp es) = void $ checkSubExp es checkBasicOp (Opaque _ es) = void $ checkSubExp es checkBasicOp (ArrayLit [] _) = return () checkBasicOp (ArrayLit (e : es') t) = do let check elemt eleme = do elemet <- checkSubExp eleme unless (elemet == elemt) $ bad $ TypeError $ pretty elemet ++ " is not of expected type " ++ pretty elemt ++ "." et <- checkSubExp e -- Compare that type with the one given for the array literal. checkAnnotation "array-element" t et mapM_ (check et) es' checkBasicOp (UnOp op e) = require [Prim $ unOpType op] e checkBasicOp (BinOp op e1 e2) = checkBinOpArgs (binOpType op) e1 e2 checkBasicOp (CmpOp op e1 e2) = checkCmpOp op e1 e2 checkBasicOp (ConvOp op e) = require [Prim $ fst $ convOpType op] e checkBasicOp (Index ident (Slice idxes)) = do vt <- lookupType ident observe ident when (arrayRank vt /= length idxes) $ bad $ SlicingError (arrayRank vt) (length idxes) mapM_ checkDimIndex idxes checkBasicOp (Update _ src (Slice idxes) se) = do src_t <- checkArrIdent src when (arrayRank src_t /= length idxes) $ bad $ SlicingError (arrayRank src_t) (length idxes) se_aliases <- subExpAliasesM se when (src `nameIn` se_aliases) $ bad $ TypeError "The target of an Update must not alias the value to be written." mapM_ checkDimIndex idxes require [arrayOf (Prim (elemType src_t)) (Shape (sliceDims (Slice idxes))) NoUniqueness] se consume =<< lookupAliases src checkBasicOp (FlatIndex ident slice) = do vt <- lookupType ident observe ident when (arrayRank vt /= 1) $ bad $ SlicingError (arrayRank vt) 1 checkFlatSlice slice checkBasicOp (FlatUpdate src slice v) = do src_t <- checkArrIdent src when (arrayRank src_t /= 1) $ bad $ SlicingError (arrayRank src_t) 1 v_aliases <- lookupAliases v when (src `nameIn` v_aliases) $ bad $ TypeError "The target of an Update must not alias the value to be written." checkFlatSlice slice requireI [arrayOf (Prim (elemType src_t)) (Shape (flatSliceDims slice)) NoUniqueness] v consume =<< lookupAliases src checkBasicOp (Iota e x s et) = do require [Prim int64] e require [Prim $ IntType et] x require [Prim $ IntType et] s checkBasicOp (Replicate (Shape dims) valexp) = do mapM_ (require [Prim int64]) dims void $ checkSubExp valexp checkBasicOp (Scratch _ shape) = mapM_ checkSubExp shape checkBasicOp (Reshape newshape arrexp) = do rank <- arrayRank <$> checkArrIdent arrexp mapM_ (require [Prim int64] . newDim) newshape zipWithM_ (checkDimChange rank) newshape [0 ..] where checkDimChange _ (DimNew _) _ = return () checkDimChange rank (DimCoercion se) i | i >= rank = bad $ TypeError $ "Asked to coerce dimension " ++ show i ++ " to " ++ pretty se ++ ", but array " ++ pretty arrexp ++ " has only " ++ pretty rank ++ " dimensions" | otherwise = return () checkBasicOp (Rearrange perm arr) = do arrt <- lookupType arr let rank = arrayRank arrt when (length perm /= rank || sort perm /= [0 .. rank -1]) $ bad $ PermutationError perm rank $ Just arr checkBasicOp (Rotate rots arr) = do arrt <- lookupType arr let rank = arrayRank arrt mapM_ (require [Prim int64]) rots when (length rots /= rank) $ bad $ TypeError $ "Cannot rotate " ++ show (length rots) ++ " dimensions of " ++ show rank ++ "-dimensional array." checkBasicOp (Concat i arr1exp arr2exps ressize) = do arr1t <- checkArrIdent arr1exp arr2ts <- mapM checkArrIdent arr2exps let success = all ( (== dropAt i 1 (arrayDims arr1t)) . dropAt i 1 . arrayDims ) arr2ts unless success $ bad $ TypeError $ "Types of arguments to concat do not match. Got " ++ pretty arr1t ++ " and " ++ intercalate ", " (map pretty arr2ts) require [Prim int64] ressize checkBasicOp (Copy e) = void $ checkArrIdent e checkBasicOp (Manifest perm arr) = checkBasicOp $ Rearrange perm arr -- Basically same thing! checkBasicOp (Assert e (ErrorMsg parts) _) = do require [Prim Bool] e mapM_ checkPart parts where checkPart ErrorString {} = return () checkPart (ErrorVal t x) = require [Prim t] x checkBasicOp (UpdateAcc acc is ses) = do (shape, ts) <- checkAccIdent acc unless (length ses == length ts) $ bad $ TypeError $ "Accumulator requires " ++ show (length ts) ++ " values, but " ++ show (length ses) ++ " provided." unless (length is == shapeRank shape) $ bad $ TypeError $ "Accumulator requires " ++ show (shapeRank shape) ++ " indices, but " ++ show (length is) ++ " provided." zipWithM_ require (map pure ts) ses consume =<< lookupAliases acc matchLoopResultExt :: Checkable rep => [Param DeclType] -> Result -> TypeM rep () matchLoopResultExt merge loopres = do let rettype_ext = existentialiseExtTypes (map paramName merge) $ staticShapes $ map typeOf merge bodyt <- mapM subExpResType loopres case instantiateShapes (fmap resSubExp . (`maybeNth` loopres)) rettype_ext of Nothing -> bad $ ReturnTypeError (nameFromString "<loop body>") rettype_ext (staticShapes bodyt) Just rettype' -> unless (bodyt `subtypesOf` rettype') $ bad $ ReturnTypeError (nameFromString "<loop body>") (staticShapes rettype') (staticShapes bodyt) checkExp :: Checkable rep => Exp (Aliases rep) -> TypeM rep () checkExp (BasicOp op) = checkBasicOp op checkExp (If e1 e2 e3 info) = do require [Prim Bool] e1 _ <- context "in true branch" (checkBody e2) `alternative` context "in false branch" (checkBody e3) context "in true branch" $ matchBranchType (ifReturns info) e2 context "in false branch" $ matchBranchType (ifReturns info) e3 checkExp (Apply fname args rettype_annot _) = do (rettype_derived, paramtypes) <- lookupFun fname $ map fst args argflows <- mapM (checkArg . fst) args when (rettype_derived /= rettype_annot) $ bad . TypeError . pretty $ "Expected apply result type:" </> indent 2 (ppr rettype_derived) </> "But annotation is:" </> indent 2 (ppr rettype_annot) consumeArgs paramtypes argflows checkExp (DoLoop merge form loopbody) = do let (mergepat, mergeexps) = unzip merge mergeargs <- mapM checkArg mergeexps checkLoopArgs binding (scopeOf form) $ do form_consumable <- checkForm mergeargs form let rettype = map paramDeclType mergepat consumable = [ (paramName param, mempty) | param <- mergepat, unique $ paramDeclType param ] ++ form_consumable context "Inside the loop body" $ checkFun' ( nameFromString "<loop body>", staticShapes rettype, funParamsToNameInfos mergepat ) (Just consumable) $ do checkFunParams mergepat checkBodyDec $ snd $ bodyDec loopbody checkStms (bodyStms loopbody) $ do context "In loop body result" $ checkResult $ bodyResult loopbody context "When matching result of body with loop parameters" $ matchLoopResult (map fst merge) $ bodyResult loopbody let bound_here = namesFromList $ M.keys $ scopeOf $ bodyStms loopbody map (`namesSubtract` bound_here) <$> mapM (subExpAliasesM . resSubExp) (bodyResult loopbody) where checkLoopVar (p, a) = do a_t <- lookupType a observe a case peelArray 1 a_t of Just a_t_r -> do checkLParamDec (paramName p) $ paramDec p unless (a_t_r `subtypeOf` typeOf (paramDec p)) $ bad $ TypeError $ "Loop parameter " ++ pretty p ++ " not valid for element of " ++ pretty a ++ ", which has row type " ++ pretty a_t_r als <- lookupAliases a pure (paramName p, als) _ -> bad $ TypeError $ "Cannot loop over " ++ pretty a ++ " of type " ++ pretty a_t checkForm mergeargs (ForLoop loopvar it boundexp loopvars) = do iparam <- primFParam loopvar $ IntType it let mergepat = map fst merge funparams = iparam : mergepat paramts = map paramDeclType funparams consumable <- mapM checkLoopVar loopvars boundarg <- checkArg boundexp checkFuncall Nothing paramts $ boundarg : mergeargs pure consumable checkForm mergeargs (WhileLoop cond) = do case find ((== cond) . paramName . fst) merge of Just (condparam, _) -> unless (paramType condparam == Prim Bool) $ bad $ TypeError $ "Conditional '" ++ pretty cond ++ "' of while-loop is not boolean, but " ++ pretty (paramType condparam) ++ "." Nothing -> bad $ TypeError $ "Conditional '" ++ pretty cond ++ "' of while-loop is not a merge variable." let mergepat = map fst merge funparams = mergepat paramts = map paramDeclType funparams checkFuncall Nothing paramts mergeargs pure mempty checkLoopArgs = do let (params, args) = unzip merge argtypes <- mapM subExpType args let expected = expectedTypes (map paramName params) params args unless (expected == argtypes) . bad . TypeError . pretty $ "Loop parameters" </> indent 2 (ppTuple' params) </> "cannot accept initial values" </> indent 2 (ppTuple' args) </> "of types" </> indent 2 (ppTuple' argtypes) checkExp (WithAcc inputs lam) = do unless (length (lambdaParams lam) == 2 * num_accs) $ bad . TypeError $ show (length (lambdaParams lam)) ++ " parameters, but " ++ show num_accs ++ " accumulators." let cert_params = take num_accs $ lambdaParams lam acc_args <- forM (zip inputs cert_params) $ \((shape, arrs, op), p) -> do mapM_ (require [Prim int64]) (shapeDims shape) elem_ts <- forM arrs $ \arr -> do arr_t <- lookupType arr unless (shapeDims shape `isPrefixOf` arrayDims arr_t) $ bad . TypeError $ pretty arr <> " is not an array of outer shape " <> pretty shape consume =<< lookupAliases arr pure $ stripArray (shapeRank shape) arr_t case op of Just (op_lam, nes) -> do let mkArrArg t = (t, mempty) nes_ts <- mapM checkSubExp nes unless (nes_ts == lambdaReturnType op_lam) $ bad $ TypeError $ unlines [ "Accumulator operator return type: " ++ pretty (lambdaReturnType op_lam), "Type of neutral elements: " ++ pretty nes_ts ] checkLambda op_lam $ replicate (shapeRank shape) (Prim int64, mempty) ++ map mkArrArg (elem_ts ++ elem_ts) Nothing -> return () pure (Acc (paramName p) shape elem_ts NoUniqueness, mempty) checkAnyLambda False lam $ replicate num_accs (Prim Unit, mempty) ++ acc_args where num_accs = length inputs checkExp (Op op) = do checker <- asks envCheckOp checker op checkSOACArrayArgs :: Checkable rep => SubExp -> [VName] -> TypeM rep [Arg] checkSOACArrayArgs width = mapM checkSOACArrayArg where checkSOACArrayArg v = do (t, als) <- checkArg $ Var v case t of Acc {} -> pure (t, als) Array {} -> do let argSize = arraySize 0 t unless (argSize == width) $ bad . TypeError $ "SOAC argument " ++ pretty v ++ " has outer size " ++ pretty argSize ++ ", but width of SOAC is " ++ pretty width pure (rowType t, als) _ -> bad . TypeError $ "SOAC argument " ++ pretty v ++ " is not an array" checkType :: Checkable rep => TypeBase Shape u -> TypeM rep () checkType (Mem (ScalarSpace d _)) = mapM_ (require [Prim int64]) d checkType (Acc cert shape ts _) = do requireI [Prim Unit] cert mapM_ (require [Prim int64]) $ shapeDims shape mapM_ checkType ts checkType t = mapM_ checkSubExp $ arrayDims t checkExtType :: Checkable rep => TypeBase ExtShape u -> TypeM rep () checkExtType = mapM_ checkExtDim . shapeDims . arrayShape where checkExtDim (Free se) = void $ checkSubExp se checkExtDim (Ext _) = return () checkCmpOp :: Checkable rep => CmpOp -> SubExp -> SubExp -> TypeM rep () checkCmpOp (CmpEq t) x y = do require [Prim t] x require [Prim t] y checkCmpOp (CmpUlt t) x y = checkBinOpArgs (IntType t) x y checkCmpOp (CmpUle t) x y = checkBinOpArgs (IntType t) x y checkCmpOp (CmpSlt t) x y = checkBinOpArgs (IntType t) x y checkCmpOp (CmpSle t) x y = checkBinOpArgs (IntType t) x y checkCmpOp (FCmpLt t) x y = checkBinOpArgs (FloatType t) x y checkCmpOp (FCmpLe t) x y = checkBinOpArgs (FloatType t) x y checkCmpOp CmpLlt x y = checkBinOpArgs Bool x y checkCmpOp CmpLle x y = checkBinOpArgs Bool x y checkBinOpArgs :: Checkable rep => PrimType -> SubExp -> SubExp -> TypeM rep () checkBinOpArgs t e1 e2 = do require [Prim t] e1 require [Prim t] e2 checkPatElem :: Checkable rep => PatElemT (LetDec rep) -> TypeM rep () checkPatElem (PatElem name dec) = context ("When checking pattern element " ++ pretty name) $ checkLetBoundDec name dec checkFlatDimIndex :: Checkable rep => FlatDimIndex SubExp -> TypeM rep () checkFlatDimIndex (FlatDimIndex n s) = mapM_ (require [Prim int64]) [n, s] checkFlatSlice :: Checkable rep => FlatSlice SubExp -> TypeM rep () checkFlatSlice (FlatSlice offset idxs) = do require [Prim int64] offset mapM_ checkFlatDimIndex idxs checkDimIndex :: Checkable rep => DimIndex SubExp -> TypeM rep () checkDimIndex (DimFix i) = require [Prim int64] i checkDimIndex (DimSlice i n s) = mapM_ (require [Prim int64]) [i, n, s] checkStm :: Checkable rep => Stm (Aliases rep) -> TypeM rep a -> TypeM rep a checkStm stm@(Let pat (StmAux (Certs cs) _ (_, dec)) e) m = do context "When checking certificates" $ mapM_ (requireI [Prim Unit]) cs context "When checking expression annotation" $ checkExpDec dec context ("When matching\n" ++ message " " pat ++ "\nwith\n" ++ message " " e) $ matchPat pat e binding (maybeWithoutAliases $ scopeOf stm) $ do mapM_ checkPatElem (patElems $ removePatAliases pat) m where -- FIXME: this is wrong. However, the core language type system -- is not strong enough to fully capture the aliases we want (see -- issue #803). Since we eventually inline everything anyway, and -- our intra-procedural alias analysis is much simpler and -- correct, I could not justify spending time on improving the -- inter-procedural alias analysis. If we ever stop inlining -- everything, probably we need to go back and refine this. maybeWithoutAliases = case stmExp stm of Apply {} -> M.map withoutAliases _ -> id withoutAliases (LetName (_, ldec)) = LetName (mempty, ldec) withoutAliases info = info matchExtPat :: Checkable rep => Pat (Aliases rep) -> [ExtType] -> TypeM rep () matchExtPat pat ts = unless (expExtTypesFromPat pat == ts) $ bad $ InvalidPatError pat ts Nothing matchExtReturnType :: Checkable rep => [ExtType] -> Result -> TypeM rep () matchExtReturnType rettype res = do ts <- mapM subExpResType res matchExtReturns rettype res ts matchExtBranchType :: Checkable rep => [ExtType] -> Body (Aliases rep) -> TypeM rep () matchExtBranchType rettype (Body _ stms res) = do ts <- extendedScope (traverse subExpResType res) stmscope matchExtReturns rettype res ts where stmscope = scopeOf stms matchExtReturns :: [ExtType] -> Result -> [Type] -> TypeM rep () matchExtReturns rettype res ts = do let problem :: TypeM rep a problem = bad $ TypeError $ unlines [ "Type annotation is", " " ++ prettyTuple rettype, "But result returns type", " " ++ prettyTuple ts ] unless (length res == length rettype) problem let ctx_vals = zip res ts instantiateExt i = case maybeNth i ctx_vals of Just (SubExpRes _ se, Prim (IntType Int64)) -> return se _ -> problem rettype' <- instantiateShapes instantiateExt rettype unless (rettype' == ts) problem validApply :: ArrayShape shape => [TypeBase shape Uniqueness] -> [TypeBase shape NoUniqueness] -> Bool validApply expected got = length got == length expected && and ( zipWith subtypeOf (map rankShaped got) (map (fromDecl . rankShaped) expected) ) type Arg = (Type, Names) argType :: Arg -> Type argType (t, _) = t -- | Remove all aliases from the 'Arg'. argAliases :: Arg -> Names argAliases (_, als) = als noArgAliases :: Arg -> Arg noArgAliases (t, _) = (t, mempty) checkArg :: Checkable rep => SubExp -> TypeM rep Arg checkArg arg = do argt <- checkSubExp arg als <- subExpAliasesM arg return (argt, als) checkFuncall :: Maybe Name -> [DeclType] -> [Arg] -> TypeM rep () checkFuncall fname paramts args = do let argts = map argType args unless (validApply paramts argts) $ bad $ ParameterMismatch fname (map fromDecl paramts) $ map argType args consumeArgs paramts args consumeArgs :: [DeclType] -> [Arg] -> TypeM rep () consumeArgs paramts args = forM_ (zip (map diet paramts) args) $ \(d, (_, als)) -> occur [consumption (consumeArg als d)] where consumeArg als Consume = als consumeArg _ _ = mempty -- The boolean indicates whether we only allow consumption of -- parameters. checkAnyLambda :: Checkable rep => Bool -> Lambda (Aliases rep) -> [Arg] -> TypeM rep () checkAnyLambda soac (Lambda params body rettype) args = do let fname = nameFromString "<anonymous>" if length params == length args then do -- Consumption for this is done explicitly elsewhere. checkFuncall Nothing (map ((`toDecl` Nonunique) . paramType) params) $ map noArgAliases args let consumable = if soac then Just $ zip (map paramName params) (map argAliases args) else Nothing checkFun' ( fname, staticShapes $ map (`toDecl` Nonunique) rettype, [ ( paramName param, LParamName $ paramDec param ) | param <- params ] ) consumable $ do checkLambdaParams params mapM_ checkType rettype checkLambdaBody rettype body else bad $ TypeError $ "Anonymous function defined with " ++ show (length params) ++ " parameters:\n" ++ pretty params ++ "\nbut expected to take " ++ show (length args) ++ " arguments." checkLambda :: Checkable rep => Lambda (Aliases rep) -> [Arg] -> TypeM rep () checkLambda = checkAnyLambda True checkPrimExp :: Checkable rep => PrimExp VName -> TypeM rep () checkPrimExp ValueExp {} = return () checkPrimExp (LeafExp v pt) = requireI [Prim pt] v checkPrimExp (BinOpExp op x y) = do requirePrimExp (binOpType op) x requirePrimExp (binOpType op) y checkPrimExp (CmpOpExp op x y) = do requirePrimExp (cmpOpType op) x requirePrimExp (cmpOpType op) y checkPrimExp (UnOpExp op x) = requirePrimExp (unOpType op) x checkPrimExp (ConvOpExp op x) = requirePrimExp (fst $ convOpType op) x checkPrimExp (FunExp h args t) = do (h_ts, h_ret, _) <- maybe (bad $ TypeError $ "Unknown function: " ++ h) return $ M.lookup h primFuns when (length h_ts /= length args) $ bad $ TypeError $ "Function expects " ++ show (length h_ts) ++ " parameters, but given " ++ show (length args) ++ " arguments." when (h_ret /= t) $ bad $ TypeError $ "Function return annotation is " ++ pretty t ++ ", but expected " ++ pretty h_ret zipWithM_ requirePrimExp h_ts args requirePrimExp :: Checkable rep => PrimType -> PrimExp VName -> TypeM rep () requirePrimExp t e = context ("in PrimExp " ++ pretty e) $ do checkPrimExp e unless (primExpType e == t) $ bad $ TypeError $ pretty e ++ " must have type " ++ pretty t class ASTRep rep => CheckableOp rep where checkOp :: OpWithAliases (Op rep) -> TypeM rep () -- ^ Used at top level; can be locally changed with 'checkOpWith'. -- | The class of representations that can be type-checked. class (ASTRep rep, CanBeAliased (Op rep), CheckableOp rep) => Checkable rep where checkExpDec :: ExpDec rep -> TypeM rep () checkBodyDec :: BodyDec rep -> TypeM rep () checkFParamDec :: VName -> FParamInfo rep -> TypeM rep () checkLParamDec :: VName -> LParamInfo rep -> TypeM rep () checkLetBoundDec :: VName -> LetDec rep -> TypeM rep () checkRetType :: [RetType rep] -> TypeM rep () matchPat :: Pat (Aliases rep) -> Exp (Aliases rep) -> TypeM rep () primFParam :: VName -> PrimType -> TypeM rep (FParam (Aliases rep)) matchReturnType :: [RetType rep] -> Result -> TypeM rep () matchBranchType :: [BranchType rep] -> Body (Aliases rep) -> TypeM rep () matchLoopResult :: [FParam (Aliases rep)] -> Result -> TypeM rep () default checkExpDec :: ExpDec rep ~ () => ExpDec rep -> TypeM rep () checkExpDec = return default checkBodyDec :: BodyDec rep ~ () => BodyDec rep -> TypeM rep () checkBodyDec = return default checkFParamDec :: FParamInfo rep ~ DeclType => VName -> FParamInfo rep -> TypeM rep () checkFParamDec _ = checkType default checkLParamDec :: LParamInfo rep ~ Type => VName -> LParamInfo rep -> TypeM rep () checkLParamDec _ = checkType default checkLetBoundDec :: LetDec rep ~ Type => VName -> LetDec rep -> TypeM rep () checkLetBoundDec _ = checkType default checkRetType :: RetType rep ~ DeclExtType => [RetType rep] -> TypeM rep () checkRetType = mapM_ $ checkExtType . declExtTypeOf default matchPat :: Pat (Aliases rep) -> Exp (Aliases rep) -> TypeM rep () matchPat pat = matchExtPat pat <=< expExtType default primFParam :: FParamInfo rep ~ DeclType => VName -> PrimType -> TypeM rep (FParam (Aliases rep)) primFParam name t = return $ Param mempty name (Prim t) default matchReturnType :: RetType rep ~ DeclExtType => [RetType rep] -> Result -> TypeM rep () matchReturnType = matchExtReturnType . map fromDecl default matchBranchType :: BranchType rep ~ ExtType => [BranchType rep] -> Body (Aliases rep) -> TypeM rep () matchBranchType = matchExtBranchType default matchLoopResult :: FParamInfo rep ~ DeclType => [FParam (Aliases rep)] -> Result -> TypeM rep () matchLoopResult = matchLoopResultExt
HIPERFIT/futhark
src/Futhark/TypeCheck.hs
isc
46,997
0
27
12,645
15,206
7,422
7,784
1,274
5
{-# LANGUAGE CPP #-} module GHCJS.DOM.PerformanceEntryList ( #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) module GHCJS.DOM.JSFFI.Generated.PerformanceEntryList #else #endif ) where #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) import GHCJS.DOM.JSFFI.Generated.PerformanceEntryList #else #endif
plow-technologies/ghcjs-dom
src/GHCJS/DOM/PerformanceEntryList.hs
mit
373
0
5
33
33
26
7
4
0
-- | -- Module : Control.Concurrent.Actor.Internal -- Copyright : (c) 2014 Forkk -- License : MIT -- Maintainer : [email protected] -- Stability : experimental -- Portability : GHC only (requires throwTo) -- -- Module exposing more of hactor's internals. Use with caution. -- module Control.Concurrent.Actor.Internal ( -- * Types ActorHandle (..) , ActorMessage , MonadActor , ActorM -- * Sending Messages , send -- * Receiving Messages , receive , receiveMaybe , receiveSTM -- * Spawning Actors , runActorM , wrapActor , spawnActor , runActor -- * Getting Information , self , actorThread -- * Internals , ActorContext (..) , MailBox , getContext , getMailBox ) where import Control.Applicative import Control.Concurrent import Control.Concurrent.STM import Control.Monad.Base import Control.Monad.Reader import Control.Monad.Trans () import Control.Monad.Trans.Control import Control.Monad.Trans.Resource -- {{{ Types -- {{{ Message -- | The @ActorMessage@ class must be implemented by any type that will be sent -- as a message to actors. -- Any given type of actor will have one @ActorMessage@ type that is sent to -- that actor. This ensures type safety. -- Currently this is simply a dummy class with nothing in it, but things may be -- added in the future. class ActorMessage msg -- Allow actors that don't take messages. instance ActorMessage () -- }}} -- {{{ Handle and context -- | An @ActorHandle@ acts as a reference to a specific actor. data ActorMessage msg => ActorHandle msg = ActorHandle { ahContext :: ActorContext msg -- Context for this handle's actor. , ahThread :: ThreadId -- The actor's thread ID. } -- | The @ActorContext@ holds shared information about a given actor. -- This is information such as the actor's mail box, the list of actors it's -- linked to, etc. data ActorMessage msg => ActorContext msg = ActorContext { acMailBox :: MailBox msg -- Channel for the actor's messages. } -- | The type for the actor's mail box. type MailBox msg = TChan msg -- }}} -- }}} type MonadActorSuper m = (Functor m, Applicative m, Monad m, MonadIO m, MonadThrow m) -- | The `MonadActor` typeclass. This provides the `actorCtx` function, which -- all of the actor monad's functionality is based on. class (ActorMessage msg, MonadActorSuper m) => MonadActor msg m where actorCtx :: m (ActorContext msg) -- | The base actor monad. newtype ActorM msg a = A { unA :: ReaderT (ActorContext msg) IO a } deriving (Functor, Applicative, Monad, MonadIO, MonadThrow) -- {{{ MonadActor instances instance (ActorMessage msg) => MonadActor msg (ActorM msg) where actorCtx = A $ ask instance (ActorMessage msg, MonadActor msg m, MonadTrans t, MonadActorSuper (t m)) => MonadActor msg (t m) where actorCtx = lift actorCtx -- }}} -- | Runs the given `ActorM` in the IO monad with the given context. runActorM :: (ActorMessage msg) => ActorM msg a -> ActorContext msg -> IO a runActorM act ctx = runReaderT (unA act) ctx -- {{{ Stupid MonadBase nonsense for MonadResource support. instance (ActorMessage msg) => MonadBase IO (ActorM msg) where liftBase = A . liftBase instance (ActorMessage msg) => MonadBaseControl IO (ActorM msg) where newtype StM (ActorM msg) a = StMA { unStMA :: StM (ReaderT (ActorContext msg) IO) a } liftBaseWith f = A . liftBaseWith $ \runInBase -> f $ liftM StMA . runInBase . unA restoreM = A . restoreM . unStMA -- }}} -- {{{ Get info -- | Gets a handle to the current actor. self :: (ActorMessage msg, MonadActor msg m) => m (ActorHandle msg) self = do context <- actorCtx thread <- liftIO $ myThreadId return $ ActorHandle context thread -- | Retrieves the mail box for the current actor. -- This is an internal function and may be dangerous. Use with caution. getMailBox :: (ActorMessage msg, MonadActor msg m) => m (MailBox msg) getMailBox = acMailBox <$> actorCtx -- | Gets the internal context object for the current actor. -- This is an internal function and may be dangerous. Use with caution. getContext :: (ActorMessage msg, MonadActor msg m) => m (ActorContext msg) getContext = actorCtx -- }}} -- {{{ Receiving -- | Reads a message from the actor's mail box. -- If there are no messages, blocks until one is received. If you don't want -- this, use @receiveMaybe@ instead. receive :: (ActorMessage msg, MonadActor msg m) => m (msg) receive = do chan <- getMailBox -- Read from the channel, retrying if there is nothing to read. liftIO $ atomically $ readTChan chan -- | Reads a message from the actor's mail box. -- If there are no messages, returns @Nothing@. receiveMaybe :: (ActorMessage msg, MonadActor msg m) => m (Maybe msg) receiveMaybe = do chan <- getMailBox liftIO $ atomically $ tryReadTChan chan -- | An @ActorM@ action which returns an @STM@ action to receive a message. receiveSTM :: (ActorMessage msg, MonadActor msg m) => m (STM msg) receiveSTM = do chan <- getMailBox return $ readTChan chan -- }}} -- {{{ Sending -- | Sends a message to the given actor handle. send :: (MonadIO m, ActorMessage msg) => ActorHandle msg -> msg -> m () send hand msg = liftIO $ atomically $ writeTChan mailBox $ msg where mailBox = handleMailBox hand -- }}} -- {{{ Spawning -- | Internal function for starting actors. -- This takes an @ActorM@ action, makes a channel for it, wraps it in exception -- handling stuff, and turns it into an IO monad. The function returns a tuple -- containing the actor's context and the IO action to execute the actor. wrapActor :: ActorMessage msg => ActorM msg () -> IO (IO (), ActorContext msg) wrapActor actorAction = do -- TODO: Exception handling. -- First, create a channel for the actor. chan <- atomically newTChan -- Next, create the context and run the ReaderT action. let context = ActorContext chan ioAction = runActorM actorAction context -- Return the information. return (ioAction, context) -- | Spawns the given actor on another thread and returns a handle to it. spawnActor :: ActorMessage msg => ActorM msg () -> IO (ActorHandle msg) spawnActor actorAction = do -- Wrap the actor action. (ioAction, context) <- wrapActor actorAction -- Fork the actor's IO action to another thread. thread <- forkIO ioAction -- Return the handle. return $ ActorHandle context thread -- | Runs the given actor on the current thread. -- This function effectively turns the current thread into the actor's thread. -- Obviously, this means that this function will block until the actor exits. -- You probably want to use this for your "main" actor. runActor :: ActorMessage msg => ActorM msg () -> IO () runActor actorAction = do -- Wrap the actor action. We discard the context, because we won't be -- returning a handle to this actor. (ioAction, _) <- wrapActor actorAction -- Execute the IO action on the current thread. ioAction -- }}} -- {{{ Utility functions -- | Gets the mail box for the given handle. handleMailBox :: ActorMessage msg => ActorHandle msg -> MailBox msg handleMailBox = acMailBox . ahContext -- | Gets the thread ID for the given actor handle. actorThread :: ActorMessage msg => ActorHandle msg -> ThreadId actorThread = ahThread -- }}}
Forkk/hactor
Control/Concurrent/Actor/Internal.hs
mit
7,435
0
12
1,600
1,380
759
621
-1
-1
{-# LANGUAGE CPP #-} module GHCJS.DOM.XMLHttpRequestProgressEvent ( #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) module GHCJS.DOM.JSFFI.Generated.XMLHttpRequestProgressEvent #else #endif ) where #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) import GHCJS.DOM.JSFFI.Generated.XMLHttpRequestProgressEvent #else #endif
plow-technologies/ghcjs-dom
src/GHCJS/DOM/XMLHttpRequestProgressEvent.hs
mit
394
0
5
33
33
26
7
4
0
-- ------------------------------------------------------------------------------------- -- Author: Sourabh S Joshi (cbrghostrider); Copyright - All rights reserved. -- For email, run on linux (perl v5.8.5): -- perl -e 'print pack "H*","736f75726162682e732e6a6f73686940676d61696c2e636f6d0a"' -- ------------------------------------------------------------------------------------- -- -- Naive algorithm for now -- findSubStr :: String -> String -> Bool findSubStr ns hs | length ns > length hs = False | (take (length ns) hs) == ns = True | otherwise = findSubStr ns (drop 1 hs) runTestCase :: Int -> IO () runTestCase 0 = return () runTestCase n = do haystack <- getLine needle <- getLine let ans = findSubStr needle haystack putStrLn $ if ans == True then "YES" else "NO" runTestCase (n-1) main :: IO () main = do tc <- getLine runTestCase (read tc)
cbrghostrider/Hacking
HackerRank/FunctionalProgramming/FunctionalStructures/subStrSearchNaive.hs
mit
934
0
12
205
226
110
116
17
2
{-# LANGUAGE TypeSynonymInstances #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE MultiParamTypeClasses #-} import Criterion.Main import Text.Ogmarkup import Data.Text (Text) import Data.FileEmbed main = defaultMain [ bench "nf ogmarkup text -> text" $ nf (ogmarkup inputText) ConfText , bench "whnf ogmarkup text -> text" $ whnf (ogmarkup inputText) ConfText , bench "nf ogmarkup string -> string" $ nf (ogmarkup inputString) ConfString , bench "whnf ogmarkup string -> string" $ whnf (ogmarkup inputString) ConfString ] inputString :: String inputString = $(embedFile "bench/test.up") inputText :: Text inputText = $(embedFile "bench/test.up") data ConfText = ConfText instance GenConf ConfText Text where printSpace _ _ = " " data ConfString = ConfString instance GenConf ConfString String where printSpace _ _ = " "
ogma-project/ogmarkup
bench/Bench.hs
mit
982
0
10
215
211
110
101
23
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeSynonymInstances #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module GhostLang.Stringify where import Control.Monad (forM_) import Control.Monad.Writer (execWriter, tell) import Data.Char (toLower) import Data.List (intercalate) import Data.Maybe (fromJust, isJust) import GhostLang.CommonGenerators () import GhostLang.Interpreter (IntrinsicSet (..)) import GhostLang.Types ( Label , GhostModule (..) , ModuleDecl (..) , ImportDecl (..) , TimeUnit (..) , Payload (..) , Pace (..) , Method (..) , Content (..) , Value (..) , Pattern (..) , Procedure (..) , Operation (..) ) import Text.Printf (printf) import qualified Data.Text as T -- | Typeclass to "stringify", i.e. convert a data structure to real -- syntax. class Stringify a where stringify :: a -> String instance Stringify Label where stringify = T.unpack instance Stringify a => Stringify (GhostModule a) where stringify (GhostModule modDecl impDecls patterns procs) = execWriter $ do tell $ printf "%s\n\n" (stringify modDecl) forM_ impDecls $ \impDecl -> tell $ printf "%s\n" (stringify impDecl) forM_ patterns $ \pattern -> tell $ printf "%s\n" (stringify pattern) forM_ procs $ \proc -> tell $ printf "%s\n" (stringify proc) instance Stringify ModuleDecl where stringify (ModuleDecl _ segs) = printf "module %s" (str segs) where str = intercalate "." instance Stringify ImportDecl where stringify (ImportDecl segs) = printf "import %s" (str segs) where str = intercalate "." instance Stringify Value where stringify (Literal v) = printf "literal(%ld)" v stringify (Stored v) = printf "%s" (T.unpack v) stringify (Gaussian v1 v2) = printf "gaussian(%ld, %ld)" v1 v2 stringify (Uniform v1 v2) = printf "uniform(%ld, %ld)" v1 v2 instance Stringify TimeUnit where stringify (USec v) = printf "%s usec" (stringify v) stringify (MSec v) = printf "%s msec" (stringify v) stringify (Sec v) = printf "%s sec" (stringify v) instance Stringify Payload where stringify (B v) = printf "%s B" (stringify v) stringify (KB v) = printf "%s KB" (stringify v) stringify (MB v) = printf "%s MB" (stringify v) stringify (GB v) = printf "%s GB" (stringify v) instance Stringify Pace where stringify (Bps v) = printf "%s bps" (stringify v) stringify (Kbps v) = printf "%s kbps" (stringify v) stringify (Mbps v) = printf "%s mbps" (stringify v) stringify (Gbps v) = printf "%s gbps" (stringify v) instance Stringify Method where stringify m = show m instance Stringify Content where stringify c = map toLower $ show c instance Stringify IntrinsicSet where stringify (Delay t) = printf "Delay %s" (stringify t) stringify (Http m cs s p) = let str = printf "Http %s [%s] %s" (stringify m) (toCommaStr cs) (stringify s) in if isJust p then str ++ printf " %s" (stringify $ fromJust p) else str instance Stringify a => Stringify (Pattern a) where stringify (Pattern _ n w ops) = printf "pattern %s with weight %ld { %s }" (T.unpack n) w (stringify ops) instance Stringify a => Stringify (Procedure a) where stringify (Procedure n ps ops) = printf "procedure %s(%s) { %s }" (T.unpack n) (toCommaStr ps) (stringify ops) instance Stringify a => Stringify (Operation a) where stringify (Invoke i) = stringify i stringify (Loop c is) = printf "loop %s { %s }" (stringify c) (stringify is) stringify (Concurrently is) = printf "concurrently { %s }" (stringify is) stringify (Unresolved _ n vs) = printf "%s (%s)" (T.unpack n) (toCommaStr vs) stringify (Call _ _) = error "Not used" instance Stringify a => Stringify [a] where stringify [] = "" stringify xs = let y:ys = map stringify xs ys' = map (',':) ys in unlines (y:ys') toCommaStr :: Stringify a => [a] -> String toCommaStr [] = "" toCommaStr xs = let y:ys = map stringify xs ys' = concatMap (',':) ys in y ++ ys'
kosmoskatten/ghost-lang
ghost-lang/test/GhostLang/Stringify.hs
mit
4,701
0
14
1,559
1,486
757
729
105
1
import Control.Monad import Data.List.Extra import Data.Maybe import qualified Data.Char as C import qualified Data.Map as Map import qualified Data.Set as Set ------ iread :: String -> Int iread = read do2 f g x = (f x, g x) answer :: (Show a) => (String -> a) -> IO () answer f = interact $ (++"\n") . show . f ord0 c = C.ord c - C.ord 'a' chr0 i = C.chr (i + C.ord 'a') incletter c i = chr0 ((ord0 c + i) `mod` 26) splitOn1 a b = fromJust $ stripInfix a b rsplitOn1 a b = fromJust $ stripInfixEnd a b -- pull out every part of a String that can be read in -- for some Read a and ignore the rest readOut :: Read a => String -> [a] readOut "" = [] readOut s = case reads s of [] -> readOut $ tail s [(x, s')] -> x : readOut s' _ -> error "ambiguous parse" ireadOut :: String -> [Int] ireadOut = readOut -------- main = answer $ map ireadOut . lines
msullivan/advent-of-code
2017/Template.hs
mit
866
0
10
197
377
201
176
25
3
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverlappingInstances #-} {-# LANGUAGE TypeSynonymInstances #-} {-# LANGUAGE UndecidableInstances #-} {- | Description : command-line option parsing class Copyright : (c) Martyn J. Pearce 2014, 2015 License : BSD Maintainer : [email protected] a class for cmdline parseable data types -} module Console.Getopt.CmdlineParseable ( CmdlineParseable(..), FileRO, getHandle ) where -- base -------------------------------- import System.IO ( Handle, IOMode( ReadMode ), openFile ) -- deepseq ----------------------------- import Control.DeepSeq ( NFData ) -------------------------------------------------------------------------------- {- | A type that may be parsed from the cmdline. The purpose of this class is to provide an overridable default for how to parse, including any IO required. That default is @return . read@ -} class CmdlineParseable a where enactOpt :: String -> IO a -- FileRO ---------------------------------------------------------------------- -- | a CmdlineParseable file, in which a file path provided on the cmdline is -- returned as a Handle which is opened RO (and thus fails at options-parsing -- time if the open fails) newtype FileRO = FRO { getHandle :: Handle -- ^ the encapsulated IO handle } instance Show FileRO where show fro = "FRO: " ++ show (getHandle fro) instance NFData FileRO where instance CmdlineParseable FileRO where enactOpt = fmap FRO . flip openFile ReadMode instance (Read a) => CmdlineParseable a where enactOpt = return . read
sixears/getopt
src/Console/Getopt/CmdlineParseable.hs
mit
1,626
0
9
316
188
111
77
18
0
module Language.MSH.StateDecl ( module Language.MSH.MethodTable, StateMod(..), StateObjCtr(..), StateMemberDecl(..), StateDecl(..), isBaseClass, isAbstractClass, isFinalClass, ctrsForClass ) where import Language.Haskell.TH import Language.Haskell.TH.Syntax import Language.MSH.MethodTable data StateMod = Abstract | Final deriving Show data StateObjCtr = DataCtr | StartCtr | MiddleCtr | EndCtr deriving Show data StateMemberDecl = StateDataDecl { stateDataName :: String, stateDataExpr :: Maybe String, stateDataType :: String } deriving Show data StateDecl = StateDecl { stateMod :: Maybe StateMod, stateName :: String, stateParams :: [String], stateParentN :: Maybe String, stateParent :: Maybe StateDecl, stateData :: [StateMemberDecl], stateBody :: [Dec], stateMethods :: MethodTable } deriving Show isBaseClass :: StateDecl -> Bool isBaseClass (StateDecl { stateParentN = Nothing } ) = True isBaseClass _ = False isAbstractClass :: StateDecl -> Bool isAbstractClass (StateDecl { stateMod = Just Abstract }) = True isAbstractClass _ = False isFinalClass :: StateDecl -> Bool isFinalClass (StateDecl { stateMod = Just Final }) = True isFinalClass _ = False --isOverriden :: String -> StateDecl -> Bool --isOverriden name (StateDecl { stateMethods = ms }) = -- | `ctrsForClass dec' returns a list of object states for the state class -- described by `dec' ctrsForClass :: StateDecl -> [StateObjCtr] ctrsForClass (StateDecl { stateParentN = p, stateMod = m }) = case p of Nothing -> case m of Nothing -> [DataCtr, StartCtr] (Just Abstract) -> [StartCtr] (Just Final) -> [DataCtr] (Just _) -> case m of Nothing -> [DataCtr, StartCtr, MiddleCtr, EndCtr] (Just Abstract) -> [StartCtr, MiddleCtr] (Just Final) -> [DataCtr, EndCtr]
mbg/monadic-state-hierarchies
Language/MSH/StateDecl.hs
mit
1,926
0
12
439
503
297
206
49
6
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE NoMonomorphismRestriction #-} module Fun.Direct.Config ( module Fun.Direct.Config, module Fun.Matrix, mktafel2, mkmatrix ) where import Fun.Type import Fun.Matrix import Fun.Table import Autolib.Reader import Autolib.ToDoc import Autolib.Reporter import Data.Typeable import Data.Array import Data.List data Primrec_2D = Primrec_2D deriving ( Typeable ) data Config = Config { table :: Matrix Integer , properties :: [ Property ] } deriving Typeable example :: Config example = Config { Fun.Direct.Config.table = examplematrix 10 , properties = [ Builtins [] ] } $(derives [makeReader, makeToDoc] [''Primrec_2D]) $(derives [makeReader, makeToDoc] [''Config]) -- local variables: -- mode: haskell -- end:
florianpilz/autotool
src/Fun/Direct/Config.hs
gpl-2.0
808
2
9
156
209
125
84
27
1
{- | Module : $Header$ Description : static basic analysis for FPL Copyright : (c) Christian Maeder, DFKI GmbH 2011 License : GPLv2 or higher, see LICENSE.txt Maintainer : [email protected] Stability : provisional Portability : portable basic static analysis for FPL -} module Fpl.StatAna ( basicFplAnalysis , minFplTerm , simplifyTermExt ) where import Fpl.As import Fpl.Sign import CASL.Sign import CASL.MixfixParser import CASL.StaticAna import CASL.AS_Basic_CASL import CASL.ShowMixfix import CASL.Overload import CASL.Quantification import CASL.SimplifySen import Common.AS_Annotation import Common.DocUtils import Common.ExtSign import Common.GlobalAnnotations import Common.Id import Common.Lib.State import Common.Result import Common.Utils import qualified Common.Lib.MapSet as MapSet import Control.Monad import qualified Data.Set as Set import qualified Data.Map as Map import Data.Maybe basicFplAnalysis :: (FplBasicSpec, FplSign, GlobalAnnos) -> Result (FplBasicSpec, ExtSign FplSign Symbol, [Named FplForm]) basicFplAnalysis (b, s, ga) = fmap (\ (r, ExtSign t syms, sens) -> (r, ExtSign (delBuiltins t) syms, sens)) $ basicAnalysis minFplTerm anaFplExt (const return) mixFplAna (b, addBuiltins s, ga) mixFplAna :: Mix FplExt () TermExt SignExt mixFplAna = emptyMix { getBaseIds = fplIds , putParen = mapTermExt , mixResolve = resolveTermExt } fplIds :: FplExt -> IdSets fplIds fe = case fe of FplSortItems sis _ -> unite $ map (fplSortIds . item) sis FplOpItems ois _ -> unite $ map (fplOpIds . item) ois fplSortIds :: FplSortItem -> IdSets fplSortIds si = case si of FreeType dt -> (ids_DATATYPE_DECL dt, Set.empty) CaslSortItem _ -> emptyIdSets fplOpIds :: FplOpItem -> IdSets fplOpIds oi = let e = Set.empty in case oi of FunOp (FunDef i (Op_head _ vs _ _) _ _) -> let s = Set.singleton i in (if null vs then (s, e) else (e, s), e) CaslOpItem o -> (ids_OP_ITEM o, e) -- | put parens around terms mapTermExt :: TermExt -> TermExt mapTermExt te = let rec = mapTerm mapTermExt in case te of FixDef fd -> FixDef $ mapFunDef fd Case o l r -> Case (rec o) (map (\ (p, t) -> (rec p, rec t)) l) r Let fd t r -> Let (mapFunDef fd) (rec t) r IfThenElse i t e r -> IfThenElse (rec i) (rec t) (rec e) r EqTerm t e r -> EqTerm (rec t) (rec e) r BoolTerm t -> BoolTerm (rec t) -- | put parens around final term mapFunDef :: FunDef -> FunDef mapFunDef (FunDef o h at r) = FunDef o h (fmap (mapTerm mapTermExt) at) r {- | The is the plugin function for the mixfix analysis. Due to patterns there may be unknown simple identifiers that are turned to constants and later by the overload resolution to variables. Obviously, such variables cannot be fed into the mixfix analysis like all other known variables. -} resolveTermExt :: MixResolve TermExt resolveTermExt ga ids te = let recAux = resolveMixTrm mapTermExt resolveTermExt ga rec = recAux ids in case te of FixDef fd -> fmap FixDef $ resolveFunDef ga ids fd Case o l r -> do ro <- rec o -- CHECK: consider pattern variables rl <- mapM (\ (p, t) -> liftM2 (,) (rec p) $ rec t) l return $ Case ro rl r Let fd@(FunDef o _ _ _) t r -> do rfd <- resolveFunDef ga ids fd rt <- recAux (addIdToRules o ids) t return $ Let rfd rt r IfThenElse i t e r -> do ri <- rec i rt <- rec t re <- rec e return $ IfThenElse ri rt re r EqTerm t e r -> do rt <- rec t re <- rec e return $ EqTerm rt re r BoolTerm t -> fmap BoolTerm $ rec t -- | resolve overloading in rhs and assume function to be in the signature resolveFunDef :: MixResolve FunDef resolveFunDef ga ids (FunDef o h@(Op_head _ vs _ _) at r) = do nt <- resolveMixTrm mapTermExt resolveTermExt ga (addIdToRules o $ extendRules (varDeclTokens vs) ids) $ item at return $ FunDef o h at { item = nt } r -- | get constructors for input sort getConstrs :: FplSign -> SORT -> OpMap getConstrs sign resSort = MapSet.mapSet (Set.filter $ leqSort sign resSort . opRes) $ constr $ extendedInfo sign {- | This functions tries to recognize variables in case-patterns (application terms) after overload resolution. A current limitation is that a unique sort is needed as input that is taken from the term between @case@ and @of@. -} resolvePattern :: FplSign -> (SORT, FplTerm) -> Result ([VAR_DECL], FplTerm) resolvePattern sign (resSort, term) = let err msg = fail $ msg ++ " " ++ showDoc term "" in case term of Application opSym args p -> let ide@(Id ts _ _) = opSymbName opSym in case filter ( \ oTy -> length (opArgs oTy) == length args && case opSym of Qual_op_name _ symTy _ -> leqF sign oTy $ toOpType symTy _ -> True ) $ Set.toList $ MapSet.lookup ide $ getConstrs sign resSort of [] -> if null args && isSimpleId ide then let v = Var_decl [head ts] resSort $ posOfId ide in return ([v], toQualVar v) else err "unresolved pattern" [OpType k as r] -> do l <- mapM (resolvePattern sign) $ zip as args return (concatMap fst l, Application (Qual_op_name ide (Op_type k as r p) p) (map snd l) p) _ -> err "ambiguous pattern" Qual_var v s r -> if leqSort sign s resSort then return ([Var_decl [v] s r], term) else err "wrong type of pattern variable" Sorted_term t s r -> if leqSort sign s resSort then do (vs, nt) <- resolvePattern sign (s, t) return (vs, Sorted_term nt s r) else err "wrong typed pattern" _ -> err "unexpected pattern" addFunToSign :: FunDef -> State FplSign () addFunToSign (FunDef o h _ _) = maybe (return ()) (\ ty -> addOp (emptyAnno o) (toOpType ty) o) $ headToType h letVars :: FunDef -> [VAR_DECL] letVars (FunDef o (Op_head _ vs ms _) at ps) = [ Var_decl [idToSimpleId o] (sortOfTerm $ item at) ps | isSimpleId o && isNothing ms && null vs ] addFunVar :: FunDef -> State FplSign () addFunVar = mapM_ addVars . letVars {- | perform overload resolution after mixfix analysis. The type of patterns is deduced from the top term. Overlapping or exhaustive patterns are not recognized yet. -} minFplTerm :: Min TermExt SignExt minFplTerm sig te = case te of FixDef fd -> fmap FixDef $ minFunDef sig fd Case o l r -> do ro <- oneExpTerm minFplTerm sig o -- assume unique type of top-level term for now let s = sortOfTerm ro rl <- mapM (\ (p, t) -> do (vs, np) <- resolvePattern sig (s, p) appendDiags $ checkUniqueness . map fst $ flatVAR_DECLs vs let newSign = execState (mapM_ addVars vs) sig rt <- minExpTerm minFplTerm newSign t return (np, rt)) l let (ps, tts) = unzip rl cSupers tl = case tl of [] -> True hd : rt -> all (haveCommonSupersorts True sig (sortOfTerm hd) . sortOfTerm) rt && cSupers rt nts <- isUnambiguous (globAnnos sig) (map snd l) (map (filter cSupers . combine) $ combine tts) r let nl = zip ps nts minSort sl = if Set.null sl then Set.empty else let (hd, rt) = Set.deleteFindMin sl in Set.unions . map (Set.fromList . minimalSupers sig hd) . Set.toList $ Set.insert hd $ minSort rt mSort = minSort . Set.fromList $ map sortOfTerm nts case Set.toList mSort of [tSort] -> do fl <- mapM (\ (p, t) -> do let pvs = freeTermVars sig p tvs = freeTermVars sig t unused = Set.difference pvs tvs unless (Set.null unused) $ appendDiags $ map (mkDiag Warning "unused pattern variables") $ Set.toList unused return (p, mkSorted sig t tSort r)) nl return $ Case ro fl r sl -> mkError ("no common supersort for case terms: " ++ show sl) r Let fd t r -> do let newSign = execState (addFunToSign fd) sig rfd <- minFunDef newSign fd let sign2 = execState (addFunVar rfd) newSign rt <- oneExpTerm minFplTerm sign2 t return $ Let rfd rt r IfThenElse i t e r -> do ri <- oneExpTerm minFplTerm sig $ Sorted_term i boolSort r Strong_equation rt re _ <- minExpFORMULAeq minFplTerm sig Strong_equation t e r return $ IfThenElse ri rt re r EqTerm t e r -> do Strong_equation rt re _ <- minExpFORMULAeq minFplTerm sig Strong_equation t e r return $ EqTerm rt re r BoolTerm t -> fmap BoolTerm $ oneExpTerm minFplTerm sig t -- | type check rhs and assume function to be in the signature minFunDef :: Sign TermExt SignExt -> FunDef -> Result FunDef minFunDef sig fd@(FunDef o h@(Op_head _ vs ms _) at r) = do let newSign = execState (mapM_ addVars vs >> addFunToSign fd) sig varSign = execState (mapM_ addVars vs) $ emptySign emptyFplSign t = item at nt <- oneExpTerm minFplTerm newSign $ maybe t (\ s -> Sorted_term t s r) ms appendDiags $ warnUnusedVars " function " varSign $ freeTermVars newSign nt return $ FunDef o h at { item = nt } r getDDSorts :: [Annoted FplSortItem] -> [SORT] getDDSorts = foldl (\ l si -> case item si of FreeType (Datatype_decl s _ _) -> s : l CaslSortItem _ -> l) [] anaFplExt :: Ana FplExt FplExt () TermExt SignExt anaFplExt mix fe = case fe of FplSortItems ais r -> do mapM_ (\ s -> addSort NonEmptySorts (emptyAnno s) s) $ getDDSorts ais ns <- mapAnM (anaFplSortItem mix) ais closeSubsortRel return $ FplSortItems ns r FplOpItems ais r -> do ns <- mapAnM (anaFplOpItem mix) ais return $ FplOpItems ns r anaFplSortItem :: Ana FplSortItem FplExt () TermExt SignExt anaFplSortItem mix si = case si of FreeType dt@(Datatype_decl s aalts _) -> do ana_DATATYPE_DECL Free dt sign <- get let cm = getConstrs sign s updateExtInfo $ \ cs -> foldM (\ e aa -> let a = item aa in if isConsAlt a then do let (c, ty, _) = getConsType s a unless (MapSet.null cm) $ if Set.member (mkPartial ty) $ makePartial $ MapSet.lookup c cm then appendDiags [mkDiag Warning "repeated constructor" c] else mkError "illegal new constructor" c return e { constr = addOpTo c ty $ constr e } else mkError "unexpected subsort embedding" a) cs aalts return si CaslSortItem s -> fmap (CaslSortItem . item) $ ana_SORT_ITEM minFplTerm mix NonEmptySorts $ emptyAnno s anaFplOpItem :: Ana FplOpItem FplExt () TermExt SignExt anaFplOpItem mix oi = case oi of FunOp fd@(FunDef i oh@(Op_head _ vs r _) at ps) -> do let mty = headToType oh lb = getRLabel at addFunToSign fd e <- get -- save put e { varMap = Map.empty } mapM_ addVars vs sign <- get put e -- restore let Result ds mt = anaTerm minFplTerm mix sign r ps $ item at addDiags ds case mt of Nothing -> return $ maybe oi (\ ty -> CaslOpItem $ Op_decl [i] ty [] ps) mty Just (resT, anaT) -> do addSentences [(makeNamed lb $ ExtFORMULA $ FixDef $ FunDef i oh at { item = anaT } ps) { isAxiom = notImplied at, isDef = True }] return $ FunOp $ FunDef i oh at { item = resT } ps CaslOpItem o -> fmap (CaslOpItem . item) $ ana_OP_ITEM minFplTerm mix (emptyAnno o) freeFunDefVars :: Sign TermExt e -> FunDef -> VarSet freeFunDefVars s (FunDef _ (Op_head _ vs _ _) at _) = Set.difference (freeTermVars s $ item at) $ Set.fromList $ flatVAR_DECLs vs instance TermExtension TermExt where freeVarsOfExt s te = case te of FixDef fd -> freeFunDefVars s fd Case o l _ -> Set.unions $ freeTermVars s o : map (\ (p, t) -> Set.difference (freeTermVars s t) $ freeTermVars s p) l Let fd t _ -> Set.difference (Set.union (freeFunDefVars s fd) $ freeTermVars s t) $ Set.fromList $ flatVAR_DECLs $ letVars fd IfThenElse f t e _ -> Set.unions $ map (freeTermVars s) [f, t, e] EqTerm t e _ -> Set.unions $ map (freeTermVars s) [t, e] BoolTerm t -> freeTermVars s t optTermSort te = case te of Case _ ((_, t) : _) _ -> optTermSort t Let _ t _ -> optTermSort t IfThenElse _ t _ _ -> optTermSort t EqTerm _ _ _ -> Just boolSort BoolTerm t -> optTermSort t _ -> Nothing -- all others are formulas termToFormula t = let s = sortOfTerm t in if s == boolSort then return $ ExtFORMULA $ BoolTerm t else fail $ "expected boolean term but found sort: " ++ show s simplifyTermExt :: FplSign -> TermExt -> TermExt simplifyTermExt s te = let rec = simplifyTerm minFplTerm simplifyTermExt in case te of FixDef fd -> FixDef $ simplifyFunDef s fd Case o l r -> Case (rec s o) (map (\ (p, t) -> let vs = freeTermVars s p newSign = execState (mapM_ (uncurry $ flip addVar) $ Set.toList vs) s in (rec newSign p, rec newSign t)) l) r Let fd t r -> let newSign = execState (addFunToSign fd) s sign2 = execState (addFunVar fd) newSign in Let (simplifyFunDef newSign fd) (rec sign2 t) r IfThenElse f t e r -> IfThenElse (rec s f) (rec s t) (rec s e) r EqTerm t e r -> EqTerm (rec s t) (rec s e) r BoolTerm t -> BoolTerm (rec s t) simplifyFunDef :: FplSign -> FunDef -> FunDef simplifyFunDef sig fd@(FunDef o h@(Op_head _ vs _ _) at r) = let newSign = execState (mapM_ addVars vs >> addFunToSign fd) sig in FunDef o h (fmap (simplifyTerm minFplTerm simplifyTermExt newSign) at) r
nevrenato/Hets_Fork
Fpl/StatAna.hs
gpl-2.0
13,684
0
27
3,754
5,114
2,491
2,623
304
10
{-# OPTIONS -fglasgow-exts -fth #-} ---------------------------------------------------------------------------- -- | -- Module : Text.XML.Serializer.Deriver -- Copyright : (c) Simon Foster 2005 -- License : GPL version 2 (see COPYING) -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : non-portable (ghc >= 6 only) -- -- A Simple TH XMLData(+Data,Typeable) deriver for Algebraic Data-types. -- -- @This file is part of HAIFA.@ -- -- @HAIFA is free software; you can redistribute it and\/or modify it under the terms of the -- GNU General Public License as published by the Free Software Foundation; either version 2 -- of the License, or (at your option) any later version.@ -- -- @HAIFA is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without -- even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -- GNU General Public License for more details.@ -- -- @You should have received a copy of the GNU General Public License along with HAIFA; if not, -- write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA@ ---------------------------------------------------------------------------- module Text.XML.Serializer.Derive where import Language.Haskell.TH import Data.List import Data.Char import Data.Generics2 import Control.Monad import Data.Maybe import Text.XML.Serializer.Core import Text.XML.Serializer.Datatypes import Control.Monad import Network.URI data XCFilter = Decap -- Derive Data, Typeable and XMLData for the given data-type. Rather primitive at the moment, but should work for most data-types. xmlifyPrim :: [ExpQ] -> Name -> Q [Dec] xmlifyPrim funs name = do info' <- reify name let ftr = foldr (\x -> \y -> [|(.)|] `appE` x `appE` y) [| id |] funs case info' of TyConI d -> do (name, param, ca, terms) <- typeInfo ((return d) :: Q Dec) let typeQParams = map varT param let context = cxt [conT ''Data `appT` (conT ''DictXMLData `appT` (varT (mkName "hk"))) `appT` (foldl1 appT ([conT name] ++ typeQParams))] funcs <- [d| toXMLConstr q x = $ftr $ deriveXMLConstr q x |] sequence [instanceD context (conT ''XMLData `appT` varT (mkName "hk") `appT` (foldl1 appT ([conT name] ++ typeQParams))) (map return funcs)] _ -> do error "xmlify: Can only derive XMLData for data-type declarations" qualify :: String -> Name -> Q [Dec] qualify ns name = do info' <- reify name case info' of TyConI d -> do (name, param, ca, terms) <- typeInfo ((return d) :: Q Dec) let typeQParams = map varT param let context = cxt [] funcs <- [d| namespaceURI _ = parseURI ns |] sequence [instanceD context (conT ''XMLNamespace `appT` (foldl1 appT ([conT name] ++ typeQParams))) (map return funcs)] _ -> do error "qualify: Can only derive XMLNamespace for data-type declarations" qualifyP :: [Name] -> String -> String -> Q [Dec] qualifyP names ns p = mapM qual names >>= return . concat where qual name = do info' <- reify name case info' of TyConI d -> do (name, param, ca, terms) <- typeInfo ((return d) :: Q Dec) let typeQParams = map varT param let context = cxt [] funcs <- [d| namespaceURI _ = parseURI ns; defaultPrefix _ = p |] sequence [instanceD context (conT ''XMLNamespace `appT` (foldl1 appT ([conT name] ++ typeQParams))) (map return funcs)] _ -> do error "qualify: Can only derive XMLNamespace for data-type declarations" xmlify :: [Name] -> [ExpQ] -> Q [Dec] xmlify names f = do x <- mapM (xmlifyPrim f) names >>= return . concat d <- derive names return $ d++x xmlifyQ :: [Name] -> [ExpQ] -> String -> Q [Dec] xmlifyQ names f ns = do x <- mapM (xmlifyPrim f) names >>= return . concat y <- mapM (qualify ns) names >>= return . concat s <- derive names return $ x++y++s {-decapE = [| let dc (h:t) = toLower h:t in \x -> x{ elementNames = map dc (elementNames x) , attributeNames = map dc (attributeNames x) , defaultProp = case (defaultProp x) of Just (Elem n ns) -> Just (Elem (dc n) ns) Just (Attr n ns) -> Just (Attr (dc n) ns) x -> x } |]-} decapE = [| decap |]
twopoint718/haifa
src/Text/XML/Serializer/Derive.hs
gpl-2.0
5,060
0
24
1,719
1,074
567
507
-1
-1
module Keyboard ( keyDownEvents, Key( .. )) where --import FRP.Helm.Keyboard (Key (..)) --import FRP.Elerea.Param import Data.List (elemIndices) import Foreign (alloca, peekArray, peek, Word8, Ptr) import Foreign.C.Types import Control.Applicative import Data.Traversable (sequenceA) foreign import ccall unsafe "SDL_GetKeyboardState" sdlGetKeyState :: Ptr CInt -> IO (Ptr Word8) --TODO: Real definitions for these: data Key = UpKey | DownKey | LeftKey | RightKey keyDownEvents = undefined -- getKBState :: IO [Int] getKBState = alloca $ \numkeysPtr -> do keysPtr <- sdlGetKeyState numkeysPtr numkeys <- peek numkeysPtr (map fromIntegral . elemIndices 1) <$> peekArray (fromIntegral numkeys) keysPtr {- --isDown :: Key -> SignalGen p (Signal Bool) isDown k = effectful $ elem (fromEnum k) <$> getKBState keyDownEvents1 :: Key -> SignalGen p (Signal [Key]) keyDownEvents1 k = (liftA2.liftA2) f (isDown k) (wasUp k) where f True True = [k] f _ _ = [] wasUp k = isDown k >>= delay True . fmap not keyDownEvents :: [Key] -> SignalGen p (Signal [Key]) keyDownEvents ks = do k <- sequenceA $ fmap keyDownEvents1 ks return $ fmap concat $ sequenceA k -}
ZSarver/DungeonDash
src/Keyboard.hs
gpl-3.0
1,176
0
12
206
199
112
87
14
1
module Experiment.GenExperiment where import Prelude import Data.Char data Frnt = CDSL | QDSL deriving Show lower :: String -> String lower = fmap toLower main :: IO () main = sequence_ $ flip map ([(e,f,s) | e <- ["CRC","FFT","IPBW","IPGray","Windowing"] , f <- [CDSL,QDSL] , s <- [False,True]]) (\ (e,f,s) -> let ff = "import Prelude\n" ++ "import Experiment.Common\n"++ "import QFeldspar." ++ show f ++ "\n" ++ "import Examples." ++e++ "." ++ show f ++ "\n" ++ "main :: IO ()\n" ++ "main = writeFile \"Experiment/" ++ e ++ show f ++ show s ++".c\"" ++ " (header++(compileF True "++show s++" "++lower e ++")++loader"++e++")" in writeFile ("Experiment/run"++e++show f++show s++".hs") ff )
shayan-najd/QFeldspar
Experiment/GenExperiment.hs
gpl-3.0
886
0
34
306
300
159
141
24
1
module GtkUtils where import Graphics.UI.Gtk import Control.Applicative import Data.Maybe import Monad2 import Data.ByteString (ByteString) scrollIt, scrollItV :: WidgetClass widget => widget -> PolicyType -> PolicyType -> IO ScrolledWindow scrollIt widget pol1 pol2 = do scroll <- scrolledWindowNew Nothing Nothing scrolledWindowSetPolicy scroll pol1 pol2 containerAdd scroll widget return scroll scrollItV widget pol1 pol2 = do scroll <- scrolledWindowNew Nothing Nothing scrolledWindowSetPolicy scroll pol1 pol2 scrolledWindowAddWithViewport scroll widget vp <- binGetChild scroll whenJust vp $ \vp2 -> set (castToViewport vp2) [ viewportShadowType := ShadowNone ] set scroll [ scrolledWindowShadowType := ShadowNone ] return scroll class GeneralTreeView c where getElementPath :: TreeModelClass (a b) => c a b -> [Int] -> IO b getElementIter :: TreeModelClass (a b) => c a b -> TreeIter -> IO b getStore :: TreeModelClass (a b) => c a b -> a b getView :: c a b -> TreeView instance GeneralTreeView GenSimple where getView (GenSimple _ view) = view getElementIter (GenSimple store _) = treeModelGetRow store getElementPath (GenSimple store _) path = treeModelGetRow store =<< getIterUnsafe store path getStore (GenSimple store _) = store instance GeneralTreeView GenFilterSort where getStore (GenFilterSort store _ _ _) = store getView (GenFilterSort _ _ _ view) = view getElementPath g@(GenFilterSort _ _ sorted _) path = getElementIter g =<< getIterUnsafe sorted path getElementIter (GenFilterSort store filtered sorted _) it = treeModelSortConvertIterToChildIter sorted it >>= treeModelFilterConvertIterToChildIter filtered >>= treeModelGetRow store data GenCellRend i = RendText2 ByteString (i -> [AttrOp CellRendererText]) | RendMarkup ByteString (i -> [AttrOp CellRendererText]) | RendPixbuf2 (i -> [AttrOp CellRendererPixbuf]) data GenSimple store a where GenSimple :: (TypedTreeModelClass store, TreeModelClass (store a)) => !(store a) -> !TreeView -> GenSimple store a data GenFilterSort store a where GenFilterSort :: ( TreeModelClass (store a), TypedTreeModelClass store , TreeModelClass filter, TreeModelFilterClass filter , TreeModelClass sort, TreeModelSortClass sort, TreeSortableClass sort) => !(store a) -> !filter -> !sort -> !TreeView -> GenFilterSort store a newGenSimple :: (TypedTreeModelClass store, TreeModelClass (store a)) => store a -> IO (GenSimple store a) newGenSimple store = do view <- treeViewNewWithModel store return (GenSimple store view) newGenFilterSort :: (TypedTreeModelClass store, TreeModelClass (store a)) => store a -> IO (GenFilterSort store a) newGenFilterSort store = do filtered <- treeModelFilterNew store [] sorted <- treeModelSortNewWithModel filtered view <- treeViewNewWithModel sorted treeSortableSetDefaultSortFunc sorted Nothing return (GenFilterSort store filtered sorted view) addColumn :: GenSimple a e -> String -> Bool -> [AttrOp CellRendererText] -> (CellRendererText -> e -> IO ()) -> IO Int addColumn gen@(GenSimple store view) title expand rendOpts f = do col <- treeViewColumnNew set col [ treeViewColumnTitle := title , treeViewColumnExpand := expand ] rend <- fastCellTextRenderer set rend rendOpts cellLayoutPackStart col rend True cellLayoutSetAttributeFunc col rend store $ \iter -> do item <- getElementIter gen iter f rend item treeViewAppendColumn view col addColumnFS :: CellRendererClass rend => GenFilterSort a e -> String -> Bool -> Maybe (e -> e -> Ordering) -> (TreeViewColumn -> IO ()) -> IO rend -> [AttrOp rend] -> (rend -> e -> IO ()) -> IO () addColumnFS gen@(GenFilterSort store filtered sorted view) title expand sortf action mkRend rendOpts f = do col <- treeViewColumnNew set col [ treeViewColumnTitle := title , treeViewColumnExpand := expand ] afterColClicked col (action col) rend <- mkRend set rend rendOpts cellLayoutPackStart col rend True cellLayoutSetAttributeFunc col rend sorted $ \iter -> do item <- getElementIter gen iter f rend item n <- pred <$> treeViewAppendColumn view col whenJust sortf $ \g -> do treeViewColumnSetSortColumnId col n treeSortableSetSortFunc sorted n $ \it1 it2 -> do rit1 <- treeModelFilterConvertIterToChildIter filtered it1 rit2 <- treeModelFilterConvertIterToChildIter filtered it2 g <$> treeModelGetRow store rit1 <*> treeModelGetRow store rit2 fastCellTextRenderer :: IO CellRendererText fastCellTextRenderer = do rend <- cellRendererTextNew cellRendererTextSetFixedHeightFromFont rend 1 return rend getIterUnsafe :: TreeModelClass self => self -> TreePath -> IO TreeIter getIterUnsafe model path = fromMaybe (error "getElement: Imposssible error") <$> treeModelGetIter model path gtkPopup :: MessageType -> Window -> String -> IO () gtkPopup what win str = do a <- messageDialogNew Nothing [DialogDestroyWithParent, DialogModal] what ButtonsOk str windowSetPosition a WinPosCenterOnParent windowSetTransientFor a win dialogRun a widgetDestroy a gtkWarn, gtkError :: Window -> String -> IO () gtkWarn = gtkPopup MessageWarning gtkError = gtkPopup MessageError
Cadynum/Apelsin
src/GtkUtils.hs
gpl-3.0
5,219
146
14
913
1,765
870
895
-1
-1
{- Ðåàëèçóéòå ôóíêöèè distance, ñ÷èòàþùóþ ðàññòîÿíèå ìåæäó äâóìÿ òî÷êàìè ñ âåùåñòâåííûìè êîîðäèíàòàìè, è manhDistance, ñ÷èòàþùóþ ìàíõýòòåíñêîå ðàññòîÿíèå ìåæäó äâóìÿ òî÷êàìè ñ öåëî÷èñëåííûìè êîîðäèíàòàìè. -} module Demo where data Coord a = Coord a a deriving (Show) distance :: Coord Double -> Coord Double -> Double distance (Coord x1 y1) (Coord x2 y2) = sqrt ((x1-x2)^2 + (y1-y2)^2) manhDistance :: Coord Int -> Coord Int -> Int manhDistance (Coord x1 y1) (Coord x2 y2) = (abs (x1-x2)) + (abs (y1-y2))
devtype-blogspot-com/Haskell-Examples
Distance/Demo.hs
gpl-3.0
509
0
11
83
176
92
84
6
1
{-| Module : Interface.Nasm Description : Copyright : 2014, Jonas Cleve 2015, Tay Phuong Ho 2016, Philip Schmiel License : GPL-3 -} module Interface.Nasm ( NasmCode (..), Register (..), FRegister (..), Location (..), Operand (..), Immediate (..), Instruction (..), locationIsRegister, locationIsStackLocation, operandIsImmediate, operandIsImmediateDouble, operandIsLocation, operandIsRegister, operandIsFRegister, operandIsStackLocation, mov, add, sub, imul, imul', idiv, cmp, neg, push, pop, shl, sar, instr, mov', fmov, fadd, fsub, fmul, fdiv, fcmp, fneg ) where import Prelude ( Eq, Show, Ord, String, Int, Bool (..), Maybe (..), show, otherwise, not, (++), (*), (||), (&&), Double, Char, error, ($) ) import Data.Int ( Int64 ) import Data.Char ( toLower ) import Data.Functor ( (<$>) ) import Data.List ( intercalate ) -- | A class for nasm code. class NasmCode c where toCode :: c -> String -- | The sixteen registers available on 64bit x86 processors minus the two stack -- registers RBP and RSP. data Register = RAX | RBX | RCX | RDX | RSI | RDI | R8 | R9 | R10 | R11 | R12 | R13 | R14 | R15 deriving (Eq, Ord, Show) -- | Show instance NasmCode Register where toCode r = toLower <$> show r -- $| The 16 floating point registers available on 64bit x86 processors. data FRegister = XMM0 | XMM1 | XMM2 | XMM3 | XMM4 | XMM5 | XMM6 | XMM7 | XMM8 | XMM9 | XMM10 | XMM11 | XMM12 | XMM13 | XMM14 | XMM15 deriving (Eq, Ord, Show) -- $| Show instance NasmCode FRegister where toCode r = toLower <$> show r -- | A data location (either register or memory)- data Location = Register Register -- ^ A processor register | FRegister FRegister -- $ A floating point register | StackLocation Int -- ^ A memory location on the stack relative to the base -- pointer. deriving (Eq, Ord, Show) -- | Print the location so it can be used directly in the code. instance NasmCode Location where toCode (Register r) = toCode r toCode (FRegister r) = toCode r -- $ added toCode (StackLocation i) = "[rbp-" ++ show (i*8) ++ "]" -- | Immediate Values as operands. data Immediate = ImmediateInt Int64 -- $ modified | ImmediateDouble Double -- $ added | ImmediateChar Char | ImmediateReference String deriving (Eq, Ord, Show) -- $| Show the immediate. instance NasmCode Immediate where toCode (ImmediateInt i) = show i -- $ modified toCode (ImmediateDouble i) = show i -- $ added toCode (ImmediateChar c) = show c toCode (ImmediateReference s) = s -- | General operands for assembly instructions. data Operand = Location Location | Immediate Immediate deriving (Eq, Ord, Show) -- | Use the respective 'toCode' functions. instance NasmCode Operand where toCode (Location l) = toCode l toCode (Immediate i) = toCode i -- | Returns whether a given location is a register. locationIsRegister :: Location -> Bool locationIsRegister (Register _) = True locationIsRegister (FRegister _) = True -- $ added locationIsRegister _ = False -- | Returns whether a given location is a stack location. locationIsStackLocation :: Location -> Bool locationIsStackLocation (StackLocation _) = True locationIsStackLocation _ = False -- | Returns whether a given operand is a register. operandIsRegister :: Operand -> Bool operandIsRegister (Location (Register _)) = True operandIsRegister (Location (FRegister _)) = True -- $ added operandIsRegister _ = False -- $| Returns whether a given operand is a floating point register. operandIsFRegister :: Operand -> Bool operandIsFRegister (Location (FRegister _)) = True operandIsFRegister _ = False -- | Returns whether a given operand is a stack location. operandIsStackLocation :: Operand -> Bool operandIsStackLocation (Location (StackLocation _)) = True operandIsStackLocation _ = False -- | Returns whether a given operand is an immediate value. operandIsImmediate :: Operand -> Bool operandIsImmediate (Immediate _) = True operandIsImmediate _ = False -- $| Returns whether a given operand is an immediate double value. operandIsImmediateDouble :: Operand -> Bool operandIsImmediateDouble (Immediate (ImmediateDouble _)) = True operandIsImmediateDouble _ = False -- | Returns whether a given operand is a location type. operandIsLocation :: Operand -> Bool operandIsLocation (Location _) = True operandIsLocation _ = False data Instruction = Mov Operand Operand | Mov' Operand -- $ added | FMov Operand Operand -- $ added | Add Operand Operand | FAdd Operand Operand -- $ added | Sub Operand Operand | FSub Operand Operand -- $ added | IMul Operand Operand | IMul' Operand Operand Operand | FMul Operand Operand -- $ added | IDiv Operand | FDiv Operand Operand -- $ added | Cmp Operand Operand | FCmp Operand Operand -- $ added | Neg Operand | FNeg Operand -- $ added | Push Operand | Pop Operand | Shl Operand Operand | Sar Operand Operand | Instr String | Call String | DATA Operand | Solve Operand Operand String | MCall Operand Operand String instance NasmCode Instruction where toCode (Mov o1 o2) | isFR o1 || isFR o2 = "movq " ++ toCode o1 ++ ", " ++ toCode o2 -- $ added | isM o1 && not (isR o2) = "mov QWORD " ++ toCode o1 ++ ", " ++ toCode o2 | otherwise = format "mov" [o1, o2] toCode (Mov' o1) -- $ added | not (isD o1) = "mov rax, __float64__(" ++ toCode o1 ++ ".0)" | otherwise = "mov rax, __float64__(" ++ toCode o1 ++ ")" toCode (FMov o1 o2) = "movsd " ++ toCode o1 ++ ", " ++ toCode o2 -- $ added toCode (Add o1 o2) = format "add" [o1, o2] toCode (FAdd o1 o2) = format "addsd" [o1, o2] -- $ added toCode (Sub o1 o2) = format "sub" [o1, o2] toCode (FSub o1 o2) = format "subsd" [o1, o2] -- $ added toCode (IMul o1 o2) = format "imul" [o1, o2] toCode (IMul' o1 o2 o3) = format "imul" [o1, o2, o3] toCode (FMul o1 o2) = format "mulsd" [o1, o2] -- $ added toCode (IDiv o1) | isM o1 = "idiv QWORD " ++ toCode o1 | otherwise = format "idiv" [o1] toCode (FDiv o1 o2) = format "divsd" [o1, o2] -- $ added toCode (Cmp o1 o2) = format "cmp" [o1, o2] toCode (FCmp o1 o2) = format "comisd" [o1, o2] -- $ added toCode (Neg o1) | isM o1 = "neg QWORD " ++ toCode o1 | otherwise = format "neg" [o1] toCode (FNeg o1) = "movsd xmm0, [sign_mask]\npxor " ++ toCode o1 ++ ", xmm0" -- $ added toCode (Push o1) -- $ modified | isM o1 = "push QWORD " ++ toCode o1 | otherwise = "push " ++ toCode o1 toCode (Pop o1) -- $ modified | isM o1 = "pop QWORD " ++ toCode o1 | otherwise = "pop " ++ toCode o1 toCode (Shl o1 o2) | isM o1 = format "shl QWORD" [o1, o2] | otherwise = format "shl" [o1, o2] toCode (Sar o1 o2) | isM o1 = format "sar QWORD" [o1, o2] | otherwise = format "sar" [o1, o2] toCode (Instr s) = s toCode (Call s) = "call " ++ s toCode (DATA o) = "dq "++ toCode o toCode (Solve oTo oFrom label) = "multipush rbx, r8, r9, r10, r11\n"++ ";get Index\n"++ "mov RBX, "++toCode oFrom++"\n"++ "mov R8, [RBX]\n"++ "mov R9, [R8 +8]\n"++ "mov R10, ["++label++" + 16]\n"++ "mov R11, [R9 +8 + R10*8]\n"++ "; TODO check R11 == "++label++"\n"++ "mov R9, [R8 + 16]\n"++ "mov R11, [R9+8+R10 *8]\n"++ "mov RAX, [RBX + R11]\n"++ "multipop rbx, r8, r9, r10, r11\n"++ "mov "++toCode oTo++", RAX\n" toCode (MCall oTo oFrom label) = "multipush r8, r9, r10, r11\n"++ "mov r8, ["++ toCode oFrom++"]\n"++ "add r8, 24\n"++ "mov r9, [r8]\n"++ "mov r11, "++label++"\n"++ "add r11,16\n"++ "mov r10, [r11]\n"++ "imul r10, 8\n"++ "add r10,8\n"++ "mov r11, r9\n"++ "add r11, r10\n"++ "cmp QWORD [r11], "++label++"\n"++ "jne alloc_error ; TODO change error prompt\n"++ "add r8, 32\n"++ "mov r9, [r8]\n"++ "mov r11, r9\n"++ "add r11, r10\n"++ "mov r8, [r11]\n"++ "call r8\n"++ "pop rax\n"++ "multipop r8,r9,r10,r11\n"++ "mov "++ toCode oTo++", rax" format :: String -> [Operand] -> String format ins ops = ins ++ " " ++ intercalate ", " (toCode <$> ops) isL, isM, isR, isFR, isI, isD :: Operand -> Bool isL = operandIsLocation isM = operandIsStackLocation isR = operandIsRegister isFR= operandIsFRegister -- $ added isI = operandIsImmediate isD = operandIsImmediateDouble -- $ added imul' :: Operand -> Operand -> Operand -> Maybe Instruction mov, fmov, add, fadd, sub, fsub, imul, fmul, fdiv, cmp, fcmp, shl, sar :: Operand -> Operand -> Maybe Instruction mov', idiv, push, pop, neg, fneg :: Operand -> Maybe Instruction instr :: String -> Maybe Instruction mov o1 o2 | isL o1 && not (isM o2) || isR o1 = Just (Mov o1 o2) | otherwise = error $ "mov "++show o1++", "++show o2++" is not supported" mov' o1 -- $ added | isI o1 = Just (Mov' o1) | otherwise = error $ "mov' "++show o1++" is not supported" fmov o1 o2 -- $ added | isL o1 && isR o2 || isR o1 && isL o2 = Just (FMov o1 o2) | otherwise = error $ "fmov "++show o1 ++ " "++show o2++ " is not supported" add o1 o2 | isL o1 && not (isM o2) || isR o1 = Just (Add o1 o2) | otherwise = error $ "add "++show o1++" "++show o2++ " is not supported" fadd o1 o2 -- $ added | isR o1 && isR o2 = Just (FAdd o1 o2) | otherwise = error $ "fadd "++ show o1 ++ " "++ show o2++ " is not supported" sub o1 o2 | isL o1 && not (isM o2) || isR o1 = Just (Sub o1 o2) | otherwise = error $ "sub "++ show o1 ++ " " ++ show o2++ " is not supported" fsub o1 o2 -- $ added | isR o1 && isR o2 = Just (FSub o1 o2) | otherwise = error $ "fsub "++ show o1 ++" "++ show o2 ++ " is not supported" imul o1 o2 | isR o1 && isL o2 = Just (IMul o1 o2) | otherwise = error $ "imul "++ show o1 ++ " "++ show o2++ " is not supported" imul' o1 o2 o3 | isR o1 && isL o2 && isI o3 = Just (IMul' o1 o2 o3) | otherwise = error $ "imul' "++ show o1 ++ " "++ show o2++ " " ++ show o3++ " is not supported" fmul o1 o2 -- $ added | isR o1 && isR o2 = Just (FMul o1 o2) | otherwise = error $ "fmul "++ show o1 ++ " " ++ show o2 ++ " is not supported" idiv o1 | isL o1 = Just (IDiv o1) | otherwise = error $ "idiv "++ show o1++ " is not supported" fdiv o1 o2 -- $ added | isR o1 && isR o2 = Just (FDiv o1 o2) | otherwise = error $ "fdiv "++ show o1 ++ " "++ show o2++ " is not supported" cmp o1 o2 | isR o1 || isL o1 && not (isM o2) = Just (Cmp o1 o2) | otherwise = error $ "cmp "++ show o1 ++ " "++ show o2++ " is not supported" fcmp o1 o2 -- $ added | isR o1 && isL o2 = Just (FCmp o1 o2) | otherwise = error $ "fcmp "++ show o1 ++ " "++show o2++" is not supported" neg o1 | isL o1 = Just (Neg o1) | otherwise = error $"neg "++ show o1++" is not supported" fneg o1 -- $ added | isFR o1 = Just (FNeg o1) | otherwise = error $ "fneg "++ show o1++" is not supported" push o1 | not (isFR o1) = Just (Push o1) | otherwise = error $ "push "++ show o1++ "is not supported" pop o1 | isL o1 && not (isFR o1) = Just (Pop o1) -- $ modified | otherwise = error $ "pop "++ show o1 ++" is not supported" shl o1 o2 | isL o1 && isI o2 = Just (Shl o1 o2) | otherwise = error $ "shl "++ show o1 ++" "++ show o2++" is not supported" sar o1 o2 | isL o1 && isI o2 = Just (Sar o1 o2) | otherwise =error $ "sar "++show o1 ++" "++show o2++ "is not supported" instr i = Just (Instr i) {- Argument types for the various assembler instructions: MOV 1 = 2 reg64/mem64 , reg64 reg64 , reg64/mem64 reg64 , imm64 reg64/mem64 , imm32 ADD 1 = 1 + 2 reg64/mem64 , imm32 reg64/mem64 , imm8 reg64/mem64 , reg64 reg64 , reg64/mem64 SUB 1 = 1 - 2 reg64/mem64 , imm32 reg64/mem64 , imm8 reg64/mem64 , reg64 reg64 , reg64/mem64 IMUL 1 = 1 * 2 reg64 , reg64/mem64 1 = 2 * 3 reg64 , reg64/mem64 , imm32 IDIV RAX = RDX:RAX / 1 ; RDX = RDX:RAX % 1 reg64/mem64 CMP reg64/mem64 , imm32 reg64/mem64 , reg64 reg64 , reg64/mem64 -------------------------------------------------------------------------------- 18 POSSIBLE COMBINATIONS FROM TAC R = R + R R = R + M R = R + I R = M + R R = M + M R = M + I R = I + R R = I + M M = R + R M = R + M M = R + I M = M + R M = M + M M = M + I M = I + R M = I + M REMOVED BY PRIOR OPTIMIZATION: R = I + I M = I + I -}
Potregon/while
src/Interface/Nasm.hs
gpl-3.0
13,716
44
34
4,378
4,055
2,068
1,987
293
1
-- This Source Code Form is subject to the terms of the Mozilla Public -- License, v. 2.0. If a copy of the MPL was not distributed with this -- file, You can obtain one at http://mozilla.org/MPL/2.0/. import Test.Hspec main :: IO () main = hspec $ do describe "Testing framework" $ do it "runs" $ do True `shouldBe` True
tel/hotep
test/Spec.hs
mpl-2.0
337
0
14
74
57
29
28
6
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Books.MyConfig.UpdateUserSettings -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Sets the settings for the user. If a sub-object is specified, it will -- overwrite the existing sub-object stored in the server. Unspecified -- sub-objects will retain the existing value. -- -- /See:/ <https://code.google.com/apis/books/docs/v1/getting_started.html Books API Reference> for @books.myconfig.updateUserSettings@. module Network.Google.Resource.Books.MyConfig.UpdateUserSettings ( -- * REST Resource MyConfigUpdateUserSettingsResource -- * Creating a Request , myConfigUpdateUserSettings , MyConfigUpdateUserSettings -- * Request Lenses , mcuusXgafv , mcuusUploadProtocol , mcuusAccessToken , mcuusUploadType , mcuusPayload , mcuusCallback ) where import Network.Google.Books.Types import Network.Google.Prelude -- | A resource alias for @books.myconfig.updateUserSettings@ method which the -- 'MyConfigUpdateUserSettings' request conforms to. type MyConfigUpdateUserSettingsResource = "books" :> "v1" :> "myconfig" :> "updateUserSettings" :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] UserSettings :> Post '[JSON] UserSettings -- | Sets the settings for the user. If a sub-object is specified, it will -- overwrite the existing sub-object stored in the server. Unspecified -- sub-objects will retain the existing value. -- -- /See:/ 'myConfigUpdateUserSettings' smart constructor. data MyConfigUpdateUserSettings = MyConfigUpdateUserSettings' { _mcuusXgafv :: !(Maybe Xgafv) , _mcuusUploadProtocol :: !(Maybe Text) , _mcuusAccessToken :: !(Maybe Text) , _mcuusUploadType :: !(Maybe Text) , _mcuusPayload :: !UserSettings , _mcuusCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'MyConfigUpdateUserSettings' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'mcuusXgafv' -- -- * 'mcuusUploadProtocol' -- -- * 'mcuusAccessToken' -- -- * 'mcuusUploadType' -- -- * 'mcuusPayload' -- -- * 'mcuusCallback' myConfigUpdateUserSettings :: UserSettings -- ^ 'mcuusPayload' -> MyConfigUpdateUserSettings myConfigUpdateUserSettings pMcuusPayload_ = MyConfigUpdateUserSettings' { _mcuusXgafv = Nothing , _mcuusUploadProtocol = Nothing , _mcuusAccessToken = Nothing , _mcuusUploadType = Nothing , _mcuusPayload = pMcuusPayload_ , _mcuusCallback = Nothing } -- | V1 error format. mcuusXgafv :: Lens' MyConfigUpdateUserSettings (Maybe Xgafv) mcuusXgafv = lens _mcuusXgafv (\ s a -> s{_mcuusXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). mcuusUploadProtocol :: Lens' MyConfigUpdateUserSettings (Maybe Text) mcuusUploadProtocol = lens _mcuusUploadProtocol (\ s a -> s{_mcuusUploadProtocol = a}) -- | OAuth access token. mcuusAccessToken :: Lens' MyConfigUpdateUserSettings (Maybe Text) mcuusAccessToken = lens _mcuusAccessToken (\ s a -> s{_mcuusAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). mcuusUploadType :: Lens' MyConfigUpdateUserSettings (Maybe Text) mcuusUploadType = lens _mcuusUploadType (\ s a -> s{_mcuusUploadType = a}) -- | Multipart request metadata. mcuusPayload :: Lens' MyConfigUpdateUserSettings UserSettings mcuusPayload = lens _mcuusPayload (\ s a -> s{_mcuusPayload = a}) -- | JSONP mcuusCallback :: Lens' MyConfigUpdateUserSettings (Maybe Text) mcuusCallback = lens _mcuusCallback (\ s a -> s{_mcuusCallback = a}) instance GoogleRequest MyConfigUpdateUserSettings where type Rs MyConfigUpdateUserSettings = UserSettings type Scopes MyConfigUpdateUserSettings = '["https://www.googleapis.com/auth/books"] requestClient MyConfigUpdateUserSettings'{..} = go _mcuusXgafv _mcuusUploadProtocol _mcuusAccessToken _mcuusUploadType _mcuusCallback (Just AltJSON) _mcuusPayload booksService where go = buildClient (Proxy :: Proxy MyConfigUpdateUserSettingsResource) mempty
brendanhay/gogol
gogol-books/gen/Network/Google/Resource/Books/MyConfig/UpdateUserSettings.hs
mpl-2.0
5,311
0
18
1,203
715
418
297
108
1
-- http://hackage.haskell.org/package/base-4.7.0.0/docs/System-IO-Unsafe.html#v:unsafePerformIO {-# OPTIONS_GHC -fno-cse -fno-full-laziness #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE BangPatterns #-} module Data.Function.Decorator.Memoizer.Unsafe where import Prelude hiding (lookup) import Control.Applicative import Data.IORef import qualified Data.Map.Strict as Map import Data.Proxy import System.IO.Unsafe import Data.Function.Decorator.Curry import Data.Function.Decorator.Memoizer import Data.Function.Decorator.Unsafe ---------------------------------------------------------------- {-# NOINLINE unsafeMemoize #-} unsafeMemoize :: ( UnsafePurifiable n t , Ord (Args n t) ) => Proxy n -> t -> t unsafeMemoize p = unsafePurify p makeDecorator where makeDecorator = do cacheRef <- newIORef Map.empty let lookup args = Map.lookup args <$> readIORef cacheRef insert args ret = modifyIORef' cacheRef (Map.insert args ret) return $ simpleMemoize lookup insert ---------------------------------------------------------------- -- Original version not using 'unsafePurify'. -- -- Might be easier to understand ... {-# NOINLINE unsafeMemoize' #-} unsafeMemoize' :: forall n t. ( CurryUncurry n t , UncurryCurry n (Args n t) (IO (Ret n t)) , UncurryMCurry (Args n t) IO (Ret n t) , Ord (Args n t) ) => Proxy n -> t -> t unsafeMemoize' p f = unsafePerformIO $ do cacheRef <- newIORef Map.empty let lookup args = Map.lookup args <$> readIORef cacheRef insert args ret = modifyIORef' cacheRef (Map.insert args ret) return $ compose p unsafePerformIO' . simpleMemoize lookup insert $ compose p return' f where -- Polymorphism confuses 'compose'. return' :: Ret n t -> IO (Ret n t) unsafePerformIO' :: IO (Ret n t) -> Ret n t -- Subtle: make our 'return' strict to ensure that effects in -- recursive calls wrapped in 'unsafePerformIO' are correctly -- sequenced. return' !x = return x unsafePerformIO' = unsafePerformIO
ntc2/haskell-call-trace
src/Data/Function/Decorator/Memoizer/Unsafe.hs
mpl-2.0
2,221
0
15
452
503
265
238
52
1
func :: Int -- basic indentation amount -> Int -- currently used width in current line (after indent) -- used to accurately calc placing of the current-line -> LayoutDesc -> Int
lspitzner/brittany
data/Test275.hs
agpl-3.0
195
0
7
48
21
12
9
5
0
func = [ (thing, take 10 alts) --TODO: select best ones | (thing, _got, alts@(_ : _)) <- nosuchFooThing , gast <- award ]
lspitzner/brittany
data/Test280.hs
agpl-3.0
153
0
11
55
56
32
24
4
1
isDivisibleByAll :: [Integer] -> Integer -> Bool isDivisibleByAll divisors num = all ((==0) . rem num) divisors main = do let divisors = [1..20] print $ head $ filter (isDivisibleByAll divisors) [1..]
ulikoehler/ProjectEuler
Euler5.hs
apache-2.0
209
0
10
40
90
46
44
5
1
{-- Copyright (c) 2014-2020, Clockwork Dev Studio All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. --} {-# LANGUAGE CPP #-} module CompilerData where import ParserData import SemanticsData import DWARF import Data.Maybe import qualified Data.Map as Map import qualified Data.Sequence as Seq data AsmBucket = AsmBucket { asmBucketContents :: Seq.Seq String } deriving (Show) data Asm = Asm { asmCurrentBucket :: String, asmBucketMap :: Map.Map String AsmBucket } deriving (Show) arrayMaxDimensionality :: Int arrayMaxDimensionality = 16 data Register = Register { registerName :: String, registerAllocations :: Int, registerReservedForFunctionCall :: Bool, registerExcluded :: Bool } | NO_REGISTER deriving (Show, Eq) data CPUContext = CPUContext { cpuContextCurrentRegister :: Register, cpuContextSuggestedRegisters :: [Register], cpuContextNumFloatRegisters :: Int, cpuContextPool :: [Register], cpuContextOffset :: Int, cpuContextAligned :: Bool, cpuContextDataType :: VariableType, cpuContextFunctionCallContexts :: [FunctionCallContext] } deriving (Show) data FunctionCallContext = FunctionCallContext { functionCallContextFloatOffset :: Int, functionCallContextLeakyOffset :: Int } deriving (Show) {-- getDWARFAbbreviation :: [Char] -> DWARFAbbreviation getDWARFAbbreviation key = fromJust (Map.lookup key idlewildLangAbbreviations) --} rax = Register "rax" 0 False False rbx = Register "rbx" 0 False False rbp = Register "rbp" 0 False False r12 = Register "r12" 0 False False r13 = Register "r13" 0 False False r14 = Register "r14" 0 False False r15 = Register "r15" 0 False False r8 = Register "r8" 0 False False r9 = Register "r9" 0 False False rcx = Register "rcx" 0 False False rdx = Register "rdx" 0 False False rsi = Register "rsi" 0 False False rdi = Register "rdi" 0 False False r10 = Register "r10" 0 False False r11 = Register "r11" 0 False False #if LINUX==1 || MAC_OS==1 functionCallRegisters = [rdi, rsi, rdx, rcx, r8, r9] scratchRegisters = [rcx, rdx, rsi, rdi, r8, r9, r10, r11] preservedRegisters = [rbx, rbp, r12, r13, r14, r15] numPreservedRegisters :: Int numPreservedRegisters = 6 #elif WINDOWS==1 functionCallRegisters = [rcx, rdx, r8, r9] scratchRegisters = [rcx, rdx, r8, r9, r10, r11] preservedRegisters = [rbx, rsi, rdi, rbp, r12, r13, r14, r15] numPreservedRegisters :: Int numPreservedRegisters = 8 #endif multimediaRegisterNames = ["xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7"] #if LINUX==1 || MAC_OS==1 numFunctionCallGPRs :: Int numFunctionCallGPRs = 6 numFunctionCallMMRs :: Int numFunctionCallMMRs = 8 #elif WINDOWS==1 numFunctionCallGPRs :: Int numFunctionCallGPRs = 4 numFunctionCallMMRs :: Int numFunctionCallMMRs = 4 #endif numFPRs :: Int numFPRs = 8 #if LINUX==1 || MAC_OS==1 allRegisters = [rbx, r12, r13, r14, r15, r9, r8, rcx, rdx, rsi, rdi, r10, r11] #elif WINDOWS==1 allRegisters = [rbx, rsi, rdi, r12, r13, r14, r15, r9, r8, rdx, rcx, r10, r11] #endif sizeOfShadowSpace :: Int #if LINUX == 1 || MAC_OS == 1 sizeOfShadowSpace = 0 #elif WINDOWS == 1 sizeOfShadowSpace = 4 #endif id_ = "a id" directives_ = "b directives" globals_ = "c globals" code_ = "d code" functions_ = "e functions" data_ = "f data" debug_info_ = "g debug_info" debug_abbrev_ = "h debug_abbrev" debug_line_ = "i debug_line" debug_str_ = "j debug_str" debug_frame_ = "k debug_frame" data TempStackSpace = TempStackSpace { tempStackSpaceSize :: Int, tempStackSpaceOffset :: Int } deriving (Show) runtimeErrorNullPointerException = "\"Null pointer exception.\"" runtimeErrorOnGotoRangeException = "\"On... Goto index out of range.\"" runtimeErrorDivisionByZeroException = "\"Attempted division by zero.\""
clockworkdevstudio/Idlewild-Lang
CompilerData.hs
bsd-2-clause
5,129
0
10
980
775
464
311
81
1
-- | Benchmarks simple file reading -- -- Tested in this benchmark: -- -- * Reading a file from the disk -- {-# LANGUAGE CPP #-} module Benchmarks.FileRead ( benchmark ) where #if !MIN_VERSION_base(4,8,0) import Control.Applicative ((<$>)) #endif import Test.Tasty.Bench (Benchmark, bgroup, bench, whnfIO) import qualified Data.ByteString as SB import qualified Data.ByteString.Lazy as LB import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Data.Text.IO as T import qualified Data.Text.Lazy as LT import qualified Data.Text.Lazy.Encoding as LT import qualified Data.Text.Lazy.IO as LT benchmark :: FilePath -> Benchmark benchmark p = bgroup "FileRead" [ bench "Text" $ whnfIO $ T.length <$> T.readFile p , bench "LazyText" $ whnfIO $ LT.length <$> LT.readFile p , bench "TextByteString" $ whnfIO $ (T.length . T.decodeUtf8) <$> SB.readFile p , bench "LazyTextByteString" $ whnfIO $ (LT.length . LT.decodeUtf8) <$> LB.readFile p ]
bos/text
benchmarks/haskell/Benchmarks/FileRead.hs
bsd-2-clause
1,016
0
11
187
264
162
102
21
1
{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeFamilies #-} module HERMIT.Shell.ShellEffect ( ShellEffect(..) , ShellEffectBox(..) , performShellEffect , dumpT , dump ) where import Control.Monad.Error.Class (MonadError(..)) import Control.Monad.IO.Class (MonadIO(..)) import Control.Monad.Reader (ask) import Control.Monad.State (MonadState(..), gets) import Data.Typeable import HERMIT.External import HERMIT.Kure import HERMIT.PrettyPrinter.Common import HERMIT.Plugin.Renderer import HERMIT.Plugin.Types import HERMIT.Shell.Types import System.IO ---------------------------------------------------------------------------------- data ShellEffect :: * -> * where Abort :: ShellEffect () CLSModify :: CLT IO a -> ShellEffect a PluginComp :: PluginM () -> ShellEffect () Continue :: ShellEffect () Resume :: ShellEffect () FmapShellEffect :: (a -> b) -> ShellEffect a -> ShellEffect b deriving Typeable instance Functor ShellEffect where fmap = FmapShellEffect data ShellEffectBox where ShellEffectBox :: Typeable a => ShellEffect a -> ShellEffectBox deriving Typeable instance Typeable a => Extern (ShellEffect a) where type Box (ShellEffect a) = ShellEffectBox box = ShellEffectBox unbox (ShellEffectBox i) = case cast i of Just res -> res Nothing -> error "Extern -- unbox: casting of shell effect failed." ---------------------------------------------------------------------------------- performShellEffect :: (Functor m, -- TODO: RM when at 7.10 MonadCatch m, CLMonad m) => ShellEffect a -> m a performShellEffect Abort = abort performShellEffect Resume = announceUnprovens >> gets cl_cursor >>= resume performShellEffect Continue = announceUnprovens >> get >>= continue performShellEffect (CLSModify m) = clm2clt m performShellEffect (PluginComp m) = pluginM m performShellEffect (FmapShellEffect f s) = fmap f (performShellEffect s) dumpT :: FilePath -> PrettyPrinter -> String -> Int -> TransformH DocH () dumpT fileName pp renderer width = do case lookup renderer shellRenderers of Just r -> do doc <- idR liftIO $ do h <- openFile fileName WriteMode r h ((pOptions pp) { po_width = width }) (Right doc) hClose h _ -> fail "dump: bad renderer option" dump :: FilePath -> PrettyPrinter -> String -> Int -> CLT IO () dump fileName pp renderer width = do st <- get env <- ask let st' = setPrettyOpts (setPretty st pp) $ (cl_pretty_opts st) { po_width = width } (er, _st'') <- runCLT env st' $ do pluginM (changeRenderer renderer) h <- liftIO $ openFile fileName WriteMode printWindowAlways (Just h) liftIO $ hClose h either throwError return er
beni55/hermit
src/HERMIT/Shell/ShellEffect.hs
bsd-2-clause
3,178
1
19
733
829
431
398
75
2
{-# language DataKinds #-} {-# language TypeFamilies #-} {-# language GADTs #-} {-# language MultiParamTypeClasses #-} {-# language DeriveDataTypeable #-} {-# language StandaloneDeriving #-} {-# language FlexibleInstances #-} {-# language FlexibleContexts #-} {-# language UndecidableInstances #-} {-# language Rank2Types #-} {-# language TemplateHaskell #-} {-# language ScopedTypeVariables #-} {-# language ConstraintKinds #-} {-# language DeriveAnyClass #-} {-# language OverloadedStrings #-} {-# language OverloadedLists #-} {-# language RecursiveDo #-} {-# language QuasiQuotes #-} {-# language TypeInType #-} {-# language ViewPatterns #-} {-# language OverloadedLists #-} -- https://youtu.be/btyhpyJTyXg?list=RDG8yEe55gq2c -- module UI.DateInput where import Data.Dependent.Map (DMap,DSum((:=>)), singleton) import qualified Data.Dependent.Map as DMap import Data.GADT.Compare (GCompare) import Data.GADT.Compare.TH import UI.Lib -- (MS,ES,DS, Reason, domMorph, EitherG(LeftG,RightG), rightG,leftG, Cable,sselect) import Reflex.Dom hiding (Delete, Insert, Link) import Data.Bifunctor import Control.Lens hiding (dropping) import Data.Data.Lens import Data.Data import Data.Typeable import Control.Lens.TH import System.Random import qualified Data.Map as M import Status import World import Data.Text (Text,pack,unpack) import Data.String.Here import Data.String import Control.Monad import Data.Maybe import Data.Monoid import Control.Monad.Trans import Data.Either import Text.Read (readMaybe) import Text.Printf import UI.Constraints import UI.ValueInput import Data.Time () import Control.Monad.Reader import Instance.Date import UI.Lib data Selection a = Selected a | Back selectDay :: (MonadReader (DS Bool) m, MS m) => m (ES (Selection Day)) selectDay = divClass "select-day" $ el "ul" $ do bs <- forM [minBound .. maxBound] $ \d -> el "li" $ do fmap (Selected d <$) $ button $ pack $ show d b <- fmap (Back <$) $ floater $ icon ["arrow-left","2x"] "back" return $ leftmost (b:bs) times = map (\(x,y) -> Delta (ATime x) (ATime y)) $ zip <*> tail $ [7,7.5..22] selectATime :: (MonadReader (DS Bool) m,MS m) => m (ES (Selection Delta)) selectATime = divClass "select-time" $ el "ul" $ do bs <- forM times $ \d -> el "li" $ do fmap (Selected d <$) $ button $ pack $ show d b <- fmap (Back <$) $ floater $ icon ["arrow-left","2x"] "back" return $ leftmost (b:bs) data Stage = Closed | Daying | Delting Day | Picked Date picked (Picked x) = Just x picked _ = Nothing pickDate' ::(MonadReader (DS Bool) m, MS m) => (Bool,Stage) -> m (ES Stage) pickDate' (False, Closed) = (Daying <$) <$> icon ["calendar","3x"] "pick a date" pickDate' (True, Closed) = (Daying <$) <$> icon ["calendar-times-o","3x"] "change the date" pickDate' (_,Daying) = do let f Back = Closed f (Selected x) = Delting x (f <$>) <$> selectDay pickDate' (_,Delting x) = do let f Back = Daying f (Selected y) = Picked (Date x y) (f <$>) <$> selectATime pickDate' (r,_) = pickDate' (r,Closed) instance (MonadReader (DS Bool) m, MS m) => HasInput m Date where getInput = divClass "pick-a-date" $ do rec domMorph (\d -> never <$ maybe (return ()) (divClass "picked-date" . text . pack . show) d) d r <- holdDyn Closed e e <- domMorph pickDate' $ (,) <$> (isJust <$> d) <*> r d <- holdDyn Nothing $ Just <$> fmapMaybe picked e return d
paolino/book-a-visit
client/UI/DateInput.hs
bsd-3-clause
3,401
0
20
588
1,145
625
520
90
3
module Model.State where type State = String
redelmann/e-zimod-server
Model/State.hs
bsd-3-clause
47
0
4
9
12
8
4
2
0
{-# LANGUAGE RankNTypes, GADTs, PolyKinds, UndecidableInstances, DataKinds, KindSignatures, TypeFamilies, MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances, TypeSynonymInstances, StandaloneDeriving #-} module Elf.Types where import Control.Applicative import Control.Monad import Control.Monad.Reader import Control.Monad.Trans import Control.Concurrent import Control.Monad.Catch as Catch import System.IO import Foreign.Storable import Elf.Constants import Elf.ElfHeaders import Data.Word import Data.Int import qualified Data.Bits as B import Foreign.C.Types data Bits = B32 | B64 | Bx deriving (Show, Read, Eq, Ord) data ElfTypes = ElfAddr | ElfOff | ElfSection | ElfVerSym | ElfByte | ElfHalf | ElfSword | ElfWord | ElfSxword | ElfXword | ElfString deriving (Show, Read, Eq) data ElfClass = ClassInvalid | Class32 | Class64 | ClassUnknown Word8 deriving (Show, Read,Eq) data ElfData = DataUnknown | Data2LSB | Data2MSB | DataOther Word8 deriving (Show, Read,Eq) data ElfVersion = VersionInvalid | VersionCurrent | VersionOther Word8 deriving (Show, Read, Eq) data ElfAbi = AbiNone | AbiSysV | AbiHPUX | AbiNetBSD | AbiLinux | AbiSolaris | AbiIrix | AbiFreeBSD | AbiTru64 | AbiArm | AbiStandAlone | AbiUnknown Word8 deriving (Show, Eq, Read) type family ElfType (b :: Bits) (c :: ElfTypes) type instance ElfType B64 ElfAddr = Word64 type instance ElfType B64 ElfOff = Word64 type instance ElfType b ElfSection = Word16 type instance ElfType b ElfVerSym = Word16 type instance ElfType b ElfByte = Word8 type instance ElfType b ElfHalf = Word16 type instance ElfType b ElfSword = Int32 type instance ElfType b ElfWord = Word32 type instance ElfType b ElfSxword = Int64 type instance ElfType b ElfXword = Word64 type instance ElfType b ElfString = [ElfType b ElfByte] type family NumBits x :: Bits type instance NumBits (Ehdr B64 n ) = B64 data MachineType = X86_64 | I386 deriving (Show, Eq, Read) type family BitsExtract c :: Bits data Machine = KnownMachine MachineType | OtherMachine Word16 deriving instance Show Machine deriving instance Eq Machine data FileType = Rel | Exec | Dyn | Core | UnknownType | OtherType Word16 deriving (Show, Eq, Read) -- class was: -- class Convertible s c (b :: Bits) (n :: ElfByte) | s -> c, s -> b, c -> n -- class Eq c => Convertible s c where type From (b :: Bits) c fromElf :: s -> From (NumBits s) c -> c toElf :: s -> c -> From (NumBits s) c fromStructure :: s -> (s -> From (NumBits s) c) -> c fromCoStructure :: s -> (s -> c) -> From (NumBits s) c fromStructure s f = fromElf s (f s) fromCoStructure s f = toElf s (f s) instance Convertible s [CUChar] where type From b [CUChar] = [ElfType b ElfByte] fromElf s = fmap ( fromElf s) toElf s = fmap (toElf s) instance Convertible s CUChar where type From b CUChar = Word8 fromElf _ = CUChar toElf _ (CUChar w) = w instance Convertible s ElfData where type From b ElfData = ElfType b ElfByte fromElf _ x | x == c'ELFDATANONE = DataUnknown | x == c'ELFDATA2LSB = Data2LSB | x == c'ELFDATA2MSB = Data2MSB | otherwise = DataOther $ fromIntegral x toElf _ DataUnknown = c'ELFDATANONE toElf _ Data2LSB = c'ELFDATA2LSB toElf _ Data2MSB = c'ELFDATA2MSB toElf _ (DataOther x) = fromIntegral x instance Convertible s ElfAbi where type From b ElfAbi = ElfType b ElfByte fromElf _ x | x == c'ELFOSABI_NONE = AbiNone | x == c'ELFOSABI_SYSV = AbiSysV | x == c'ELFOSABI_HPUX = AbiHPUX | x == c'ELFOSABI_NETBSD = AbiNetBSD | x == c'ELFOSABI_LINUX = AbiLinux | x == c'ELFOSABI_SOLARIS = AbiSolaris | x == c'ELFOSABI_IRIX = AbiIrix | x == c'ELFOSABI_FREEBSD = AbiFreeBSD | x == c'ELFOSABI_TRU64 = AbiTru64 | x == c'ELFOSABI_ARM = AbiArm | x == c'ELFOSABI_STANDALONE = AbiStandAlone | otherwise = AbiUnknown x toElf _ AbiNone = c'ELFOSABI_NONE toElf _ AbiSysV = c'ELFOSABI_SYSV toElf _ AbiHPUX = c'ELFOSABI_HPUX toElf _ AbiNetBSD = c'ELFOSABI_NETBSD toElf _ AbiLinux = c'ELFOSABI_LINUX toElf _ AbiSolaris = c'ELFOSABI_SOLARIS toElf _ AbiIrix = c'ELFOSABI_IRIX toElf _ AbiTru64 = c'ELFOSABI_TRU64 toElf _ AbiArm = c'ELFOSABI_ARM toElf _ AbiStandAlone = c'ELFOSABI_STANDALONE toElf _ (AbiUnknown x) = x instance Convertible s FileType where type From b FileType = ElfType B64 ElfHalf fromElf _ x | x == c'ET_NONE = UnknownType | x == c'ET_REL = Rel | x == c'ET_EXEC = Exec | x == c'ET_DYN = Dyn | x == c'ET_CORE = Core toElf _ UnknownType = c'ET_NONE toElf _ Rel = c'ET_REL toElf _ Exec = c'ET_EXEC toElf _ Dyn = c'ET_DYN toElf _ Core = c'ET_CORE instance Convertible s ElfVersion where type From b ElfVersion = ElfType b ElfByte fromElf _ x | x == c'EV_NONE = VersionInvalid | x == c'EV_CURRENT = VersionCurrent | otherwise = VersionOther x toElf _ VersionInvalid = c'EV_NONE toElf _ VersionCurrent = c'EV_CURRENT toElf _ (VersionOther x) = x instance Convertible s ElfClass where type From b ElfClass = ElfType b ElfByte fromElf _ x | x == c'ELFCLASSNONE = ClassInvalid | x == c'ELFCLASS32 = Class32 | x == c'ELFCLASS64 = Class64 | otherwise = ClassUnknown x toElf _ ClassInvalid = c'ELFCLASSNONE toElf _ Class32 = c'ELFCLASS32 toElf _ Class64 = c'ELFCLASS64 toElf _ (ClassUnknown x) = x fromStructure s f = fromElf s (f s) fromCoStructure s f = toElf s (f s) instance Convertible s Word32 where type From b Word32 = ElfType b ElfWord fromElf _ n = undefined toElf _ = undefined instance Convertible s Int32 where type From b Int32 = ElfType b ElfSword fromElf _ s = s toElf _ s = s instance Convertible s Word64 where type From b Word64 = ElfType b ElfXword fromElf _ = id toElf _ = id instance Convertible s Int64 where type From b Int64 = ElfType b ElfSxword fromElf _ = id toElf _ = id instance Convertible s Word16 where type From b Word16 = ElfType b ElfHalf fromElf _ = id toElf _ = id instance Convertible s Word8 where type From b Word8 = ElfType b ElfByte fromElf _ = id toElf _ = id instance Convertible s CUInt where type From b CUInt = ElfType b ElfWord fromElf _ = CUInt toElf _ (CUInt x) = x instance Convertible s Machine where type From b Machine = ElfType b ElfHalf fromElf _ x | x == c'EM_X86_64 = KnownMachine X86_64 | x == c'EM_386 = KnownMachine I386 | otherwise = OtherMachine $ fromIntegral x toElf _ (KnownMachine X86_64) = c'EM_X86_64 toElf _ (KnownMachine I386) = c'EM_386 toElf _ (OtherMachine x) = fromIntegral x class EhdrStructGetter (b :: Bits) (c :: MachineType) where data Ehdr b c :: * e_ident :: Ehdr b c -> ElfType b ElfString e_type :: Ehdr b c -> FileType e_machine :: Ehdr b c -> Machine e_version :: Ehdr b c -> ElfType b ElfWord e_entry :: Ehdr b c -> ElfType b ElfAddr e_phoff :: Ehdr b c -> ElfType b ElfOff e_shoff :: Ehdr b c -> ElfType b ElfOff e_flags :: Ehdr b c -> ElfType b ElfWord e_ehsize :: Ehdr b c -> ElfType b ElfHalf e_phentsize :: Ehdr b c -> ElfType b ElfHalf e_phnum :: Ehdr b c -> ElfType b ElfHalf e_shentsize :: Ehdr b c -> ElfType b ElfHalf e_shnum :: Ehdr b c -> ElfType b ElfHalf e_shstrndx :: Ehdr b c -> ElfType b ElfHalf -- further break down e_class :: Ehdr b c -> ElfClass e_magic_valid :: Ehdr b c -> Bool e_data :: Ehdr b c -> ElfData e_elf_version :: Ehdr b c -> ElfVersion e_abi :: Ehdr b c -> ElfAbi e_sections :: ShdrStructGetter b c => Ehdr b c -> [Shdr b c] instance EhdrStructGetter B64 X86_64 where data Ehdr B64 X86_64 = X8664 C'Elf64_Ehdr e_ident (X8664 p) = fromCoStructure p c'Elf64_Ehdr'e_ident e_type (X8664 p) = fromStructure p c'Elf64_Ehdr'e_type e_machine (X8664 p) = fromStructure p c'Elf64_Ehdr'e_machine e_version(X8664 p) = fromCoStructure p c'Elf64_Ehdr'e_version e_entry (X8664 p) = fromCoStructure p c'Elf64_Ehdr'e_entry e_phoff (X8664 p) = fromCoStructure p c'Elf64_Ehdr'e_phoff e_shoff (X8664 p) = fromCoStructure p c'Elf64_Ehdr'e_shoff e_flags (X8664 p) = fromCoStructure p c'Elf64_Ehdr'e_flags e_ehsize (X8664 p) = fromCoStructure p c'Elf64_Ehdr'e_ehsize e_phentsize (X8664 p) = fromCoStructure p c'Elf64_Ehdr'e_phentsize e_phnum (X8664 p) = fromCoStructure p c'Elf64_Ehdr'e_phnum e_shentsize (X8664 p) = fromCoStructure p c'Elf64_Ehdr'e_shentsize e_shnum (X8664 p) = fromCoStructure p c'Elf64_Ehdr'e_shnum e_shstrndx (X8664 p) = fromCoStructure p c'Elf64_Ehdr'e_shstrndx e_class x = fromElf x ( e_ident x !! c'EI_CLASS) e_magic_valid x = b0 == 0x7f && b1 == 0x45 && b2 == 0x4c && b3 == 0x46 where (b0, b1,b2,b3) = let xs = e_ident x in (xs !! c'EI_MAG0, xs !! c'EI_MAG1, xs !! c'EI_MAG2, xs !! c'EI_MAG3) e_data x = fromElf x (e_ident x !! c'EI_DATA) e_elf_version x = fromElf x (e_ident x !! c'EI_VERSION) e_abi x = fromElf x (e_ident x !! c'EI_OSABI) class ShdrStructGetter (b :: Bits) (m :: MachineType) where data Shdr b m :: * -- Elf monad class ElfOp (m :: * -> * -> *) where withHandle :: (Handle -> m r a) -> m r a loadElf :: FilePath -> m r () runElf :: MonadIO t => m r a -> t a instance ElfOp (ElfMonad) where loadElf fp = undefined runElf = undefined withHandle f = do m <- asks filehandle Catch.bracket (liftIO $ takeMVar m) (liftIO . putMVar m) f data ElfEnv = ElfEnv { filehandle :: MVar Handle } newtype ElfMonad s a = ElfMonad { runElfMonad :: ReaderT ElfEnv IO a } instance Functor (ElfMonad s) where fmap f = ElfMonad . fmap f . runElfMonad instance Applicative (ElfMonad s) where pure = return (<*>) = ap instance Alternative (ElfMonad s) where empty = mzero (<|>) = mplus instance Monad (ElfMonad s) where return = ElfMonad . return (>>=) m f = ElfMonad (runElfMonad m >>= (runElfMonad . f)) instance MonadPlus (ElfMonad s) where mzero = ElfMonad mzero mplus m n = ElfMonad $ runElfMonad m `mplus` runElfMonad n instance MonadIO (ElfMonad s) where liftIO = ElfMonad . liftIO instance MonadReader ElfEnv (ElfMonad s) where ask = ElfMonad ask local f = ElfMonad . local f . runElfMonad instance MonadThrow (ElfMonad s) where throwM = ElfMonad . throwM instance MonadCatch (ElfMonad s) where catch m f = ElfMonad (catch (runElfMonad m) (runElfMonad . f)) instance MonadMask (ElfMonad s) where mask f = ElfMonad (mask $ \u -> runElfMonad (f ( ElfMonad . u . runElfMonad))) uninterruptibleMask f = ElfMonad (uninterruptibleMask $ \u -> runElfMonad (f ( ElfMonad . u . runElfMonad)))
edgarklerks/elf-bindings
src/Elf/Types.hs
bsd-3-clause
11,748
0
15
3,484
3,748
1,917
1,831
291
0