code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module RunLength (decode, encode) where
import Data.Char (isDigit)
decode :: String -> String
decode [] = []
decode xs = (replicate n c) ++ decode xs'
where
n | isDigit . head $ xs = read . takeWhile isDigit $ xs
| otherwise = 1
c = head . dropWhile isDigit $ xs
xs' | isDigit . head $ xs = tail . dropWhile isDigit $ xs
| otherwise = tail xs
encode :: String -> String
encode [] = []
encode xs = (str n) ++ [c] ++ encode xs'
where
n = length . takeWhile (==head xs) $ xs
c = head xs
xs' = dropWhile (==head xs) $ xs
str 1 = ""
str n = show n
| vaibhav276/exercism_haskell | run-length-encoding/src/RunLength.hs | mit | 704 | 0 | 11 | 282 | 285 | 143 | 142 | 18 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Collab.App
( app
) where
import Control.Applicative ((<$>))
import Control.Exception (finally)
import Control.Monad (forever, when)
import Control.Monad.IO.Class (liftIO)
import Data.Aeson (decode)
import Data.Text (Text)
import Network.HTTP.Types.URI (decodePathSegments)
import qualified Network.WebSockets as WS
import qualified Collab.Api as Api
import Collab.Json
import Collab.State (State)
import Collab.Client
import Collab.Parse (parseMessage)
import Collab.Util (textToByteString, generateID)
-- | The main application. It accepts every request
-- with at least one path segment. The path segment
-- is used as room name.
app :: State -> WS.ServerApp
app state pending = do
when (length pathSegments /= 1) $ error "Connection failed"
let room = head pathSegments
conn <- WS.acceptRequest pending
id <- generateID
let client = Client id id room conn
liftIO $ Api.join state client
flip finally (Api.leave state client) $ do
forever $ do
(event, _, message) <- parseMessage <$> WS.receiveData conn
liftIO $ hub state client event message
where
req = WS.pendingRequest pending
pathSegments = decodePathSegments (WS.requestPath req)
-- | The message hub. It distributes the messages to the
-- corresponding actions.
hub :: State -> Client -> Text -> Text -> IO ()
hub state sender event message = case event of
"code" -> Api.code state sender m
"cursor" -> Api.cursor state sender m
"change-nick" -> maybeDo Api.changeNick (decode m :: Maybe ChangeNick)
"members" -> Api.members state sender
"message" -> Api.message state sender m
_ -> putStrLn $ "Unknown message: " ++ show event
where maybeDo f = maybe (return ()) (f state sender)
m = textToByteString message
| dennis84/collab-haskell | src/Collab/App.hs | mit | 1,847 | 0 | 16 | 386 | 543 | 284 | 259 | 41 | 6 |
-- file: ch08/ElfMagic.hs
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy.Char8 as LC
--我们将检测一个文件是否是 ELF object 文件:这种文件类型几乎被所有现代类 Unix 系统作为可执行文件。
--这个简单的问题可以通过查看文件头部的四个字节解决,看他们是否匹配某个特定的字节序列。表示某种文件类型的字节序列通常被称为 魔法数 。
hasElfMagic :: L.ByteString -> Bool
hasElfMagic content = L.take 4 content == elfMagic
where elfMagic = L.pack [0x7f, 0x45, 0x4c, 0x46]
isElfFile :: FilePath -> IO Bool
isElfFile path = do
inf <- L.readFile path
return $ hasElfMagic inf
geneFile :: FilePath -> IO ()
geneFile path= do
writeFile path. unlines. map ((++ ".00") . show) $ [1..10]
highestClosingFrom :: FilePath -> IO ()
highestClosingFrom path = do
contents <- L.readFile path
print $ highestClosing contents
closing = readPrice . (!!0) .LC.split ','
readPrice :: LC.ByteString -> Maybe Int
readPrice str =
case LC.readInt str of
Nothing -> Nothing
Just (yuan, rest) ->
case LC.readInt $ L.tail rest of
Nothing -> Nothing
Just (cents, more) ->
Just (yuan * 100 + cents)
highestClosing = maximum . (Nothing:) .map closing . LC.lines
| Numberartificial/workflow | haskell-first-principles/src/RW/CH8/ElfMageic.hs | mit | 1,316 | 3 | 14 | 219 | 385 | 196 | 189 | 27 | 3 |
commonWords :: Int -> [Char] -> [Char]
type Text = [Char]
type Word = [Char]
-- words :: [Char] -> [[Char]]
words :: Text -> [Word]
map :: (a -> b) -> [a] -> [b]
sortWords :: [Word] -> [Word]
countRuns :: [Word] -> [(Int, Word)]
sortRuns :: [(Int, Word)] -> [(Int, Word)]
take :: Int -> [a] -> [a]
showRun :: (Int, Word) -> String
toLower :: Char -> Char
map showRun :: [(Int, Word)] -> [String]
map toLower :: String -> String
concat :: [[a]] -> [a]
commonWords :: Int -> Text -> String
-- commonWords n :: String -> String
-- Technically speaking, Haskell functions take only one argument.
-- max :: (Ord a) => a -> a -> a
-- max :: (Ord a) => a -> (a -> a)
-- max 4 5
-- (max 4) 5
-- f . g x = f ( g x )
-- f g x = ( f g ) x
commonWords n = concat
. map showRun
. take n
. sortRuns
. countRuns
. sortWords
. words
. map toLower
-- Thing make much sense when you read it from right to left:
-- given a String input `text`
-- convert the text to lowercase by mapping each character to a lowercase version of it
-- and returning the transformed array,
-- then make it an array of words,
-- then sort that array,
-- then create an array of tuples,
-- then sort those tuples
-- then take the first n from the top as an array
-- then convert those array of tuples to an array of strings
-- then concatenate the array into a joint string.
--
-- map toLower thePhrase --> words --> sort --> countRuns --> sort --> take n --> map showRun --> concat | v0lkan/learning-haskell | common-words.hs | mit | 1,545 | 7 | 7 | 406 | 351 | 199 | 152 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
import Reflex
import Reflex.Dom
import Data.FileEmbed
import qualified Data.Map as Map
main = mainWidgetWithCss $(embedFile "style.css") $ do
elAttr "div" ("id" =: "page-wrap") $ do
el "header" $ do
el "h1" $ text "External CSS Demo"
el "ul" $ do
el "li" $ text "use TemplateHaskell"
el "li" $ text "import Data.FileEmbed"
el "li" $ text "call 'mainWidgetWithCss'"
el "li" $ do
text "with '$(embedFile "
el "em" $ text "yourCssFileName.css"
text ")'"
el "li" $ text "And the widget, just like 'mainWidget'"
| Ninju/reflex-dom-demos | external-css/source.hs | mit | 613 | 1 | 18 | 160 | 184 | 79 | 105 | 18 | 1 |
module Specs.P4Spec(main, spec) where
import P4
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "p4" $ do
it "" $
p4 [] `shouldBe` 0
it "" $
p4 [1,2,3,4,5] `shouldBe` 5
it "" $
p4 "Hello" `shouldBe` 5
describe "p4'" $ do
it "" $
p4' [] `shouldBe` 0
it "" $
p4' [1,2,3,4,5] `shouldBe` 5
it "" $
p4' "Hello" `shouldBe` 5
describe "p4''" $ do
it "" $
p4'' [] `shouldBe` 0
it "" $
p4'' [1,2,3,4,5] `shouldBe` 5
it "" $
p4'' "Hello" `shouldBe` 5
| nosoosso/nnh | test/Specs/P4Spec.hs | mit | 624 | 0 | 13 | 246 | 302 | 155 | 147 | 28 | 1 |
module Test.Handlers.PayReq where
import qualified PayProto as P
import PayProto.API
import Test.Constants
import Test.Util
import qualified Network.Haskoin.Constants as HCC
import Data.ProtoLens
import Servant
import Data.Time.Clock.POSIX
import Data.Time.Clock
import Servant.Common.BaseUrl
-- | Respond with PaymentRequest
payRequestH :: Maybe String -> Handler (BinaryContent P.PaymentRequest)
payRequestH hostM = do
payReq <- maybe (userErr "Missing Host HTTP header") mkSpec hostM >>= mkPayRequest
return $ binaryHeader payReq
where
mkPayUrl h = BaseUrl Https (cs h) 443 "/pay_deliver"
mkSpec host = return $ P.PayReqSpec
[ (testAddress, testOutValue), (testAddress, testOutValue) ]
(mkPayUrl host)
(round $ 10 * 3600)
testMerchantMemo
testMerchantData
mkPayRequest :: P.PayReqSpec -> Handler P.PaymentRequest
mkPayRequest spec = do
now <- liftIO getCurrentTime
return $ P.mkPayRequestT now spec
| runeksvendsen/bitcoin-payment-protocol | test/Test/Handlers/PayReq.hs | cc0-1.0 | 1,025 | 0 | 11 | 235 | 263 | 142 | 121 | 26 | 1 |
{-# OPTIONS -fglasgow-exts #-}
{-# LANGUAGE TemplateHaskell #-}
module Baum.RedBlack.Type where
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
data RedBlackColor = Red | Black
deriving ( Eq, Typeable )
data RedBlackTree a = Empty
| RedBlackTree RedBlackColor ( RedBlackTree a ) a ( RedBlackTree a )
deriving ( Eq, Typeable )
$(derives [makeReader, makeToDoc] [''RedBlackTree])
$(derives [makeReader, makeToDoc] [''RedBlackColor])
instance Functor RedBlackTree where
fmap f = foldt ( \ color left key right -> RedBlackTree color left ( f key ) right ) Empty
isLeaf :: RedBlackTree a -> Bool
isLeaf Empty = True
isLeaf _ = False
foldt redblacktree empty Empty = empty
foldt redblacktree empty ( RedBlackTree color left key right ) = redblacktree color ( foldt redblacktree empty left ) key ( foldt redblacktree empty right )
| florianpilz/autotool | src/Baum/RedBlack/Type.hs | gpl-2.0 | 866 | 0 | 11 | 155 | 276 | 146 | 130 | 20 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveDataTypeable #-}
module PL.Interpretation where
import PL.Struktur
import PL.Signatur
import Autolib.Set
import Autolib.Size
import Autolib.FiniteMap
import Autolib.TES.Identifier
import Autolib.Reader
import Autolib.ToDoc
import Autolib.Reporter
import qualified Autolib.Reporter.Set as S
import Data.Typeable
data Ord u => Interpretation u =
Interpretation { struktur :: Struktur u
, belegung :: FiniteMap Identifier u
}
deriving ( Typeable )
instance Ord u => Size ( Interpretation u ) where
size i = size ( struktur i ) + sizeFM ( belegung i )
instance ( Ord u, ToDoc u ) => Signed ( Interpretation u ) where
check sig i = do
check sig $ struktur i
check_variablen ( freie_variablen sig ) ( belegung i )
check_variablen fvs bel = do
silent $ S.eq ( text "Variablenmenge aus Signatur", fvs )
( text "Variablen aus Belegung", mkSet $ keysFM bel )
empty :: Ord u
=> Signatur
-> Set u
-> Interpretation u
empty sig uni = Interpretation
{ struktur = PL.Struktur.empty sig uni
, belegung = leere_belegung sig uni
}
leere_belegung sig uni = listToFM
$ zip ( setToList $ freie_variablen sig )
$ concat $ repeat $ setToList uni
$(derives [makeReader, makeToDoc] [''Interpretation])
-- local variables:
-- mode: haskell
-- end:
| marcellussiegburg/autotool | collection/src/PL/Interpretation.hs | gpl-2.0 | 1,377 | 35 | 9 | 307 | 396 | 220 | 176 | 38 | 1 |
module Types
( LispExp(..)
, showType )
where
data LispExp = LInt Integer
| LBool Bool
| LString String
| LList [LispExp]
| LSymbol String
| LFunction String
| LBind String
| LNil
deriving (Eq)
instance Show LispExp where
show (LInt x) = show x
show (LBool True) = "#t"
show (LBool False) = "#f"
show (LList x) = "(" ++ (unwords $ map show x) ++ ")"
show (LString x) = x
show _ = ""
showType :: LispExp -> String
showType (LInt _) = "Int"
showType (LBool _) = "Bool"
showType (LString _) = "String"
showType (LList _) = "List"
showType (LSymbol _) = "Symbol"
showType (LFunction _) = "Function"
showType (LNil) = "Nil"
showType (LBind _) = "SpecialFunction"
| felixsch/simplelisp | src/Types.hs | gpl-2.0 | 832 | 0 | 10 | 295 | 295 | 156 | 139 | 28 | 1 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE LambdaCase #-}
-- {-# LANGUAGE Strict #-}
{-|
Module : Hypercube.Chunk
Description : 3D Vectors
Copyright : (c) Jaro Reinders, 2017
License : GPL-3
Maintainer : [email protected]
This module contains all code involving Chunks in Hypercube (except for the type declaration).
-}
module Hypercube.Chunk
( newChunk
, renderChunk
, startChunkManager
) where
import Hypercube.Chunk.Faces
import Hypercube.Config (chunkSize, generatingF)
import Hypercube.Types
import Hypercube.Util
import qualified Data.Vector as V
import qualified Data.Vector.Storable as VS
import Graphics.Rendering.OpenGL hiding (get)
import Linear
import Control.Monad
import Control.Concurrent
import Control.Lens
import Data.Int (Int8)
import Control.Monad.Trans.State
import Control.Concurrent.STM.TChan
import Control.Concurrent.STM
import Control.Applicative (liftA3)
import Control.Monad.IO.Class (liftIO)
import Data.IORef
import Control.DeepSeq (deepseq)
startChunkManager
:: IORef [V3 Int]
-> TChan (V3 Int, Chunk, VS.Vector (V4 Int8))
-> IO ()
startChunkManager todo chan = void $ forkIO $ forever $
atomicModifyIORef' todo (maybe ([],Nothing) (\(h,t) -> (t,Just h)) . uncons)
>>= maybe (return ()) (\pos -> do
-- atomicModifyIORef' busy (\a -> (pos:a,())
--putStrLn ("chunkMan: " ++ show pos)
chunk <- newChunk pos
atomically $ writeTChan chan (pos, chunk, extractSurface pos (chunk ^. chunkBlk)))
toPos :: V3 Int -> Int
toPos (V3 x y z) = x + chunkSize * y + chunkSize * chunkSize * z
fromPos :: Int -> V3 Int
fromPos n =
let (n',x) = n `quotRem` chunkSize
(z,y) = n' `quotRem` chunkSize
in V3 x y z
newChunk :: V3 Int -> IO Chunk
newChunk pos = do
let blk = V.generate (chunkSize ^ (3 :: Int)) (generatingF . (+ chunkSize *^ pos) . fromPos)
blk `deepseq` return (Chunk blk undefined undefined 0 True)
data Direction = North | East | South | West | Top | Bottom
deriving (Show, Eq, Enum)
dir :: Direction -> V3 Int -> V3 Int
dir = \case
East -> _x +~ 1
West -> _x -~ 1
Top -> _y +~ 1
Bottom -> _y -~ 1
North -> _z +~ 1
South -> _z -~ 1
face :: Direction -> [V4 Int8]
face = \case
East -> eastFace
West -> westFace
Top -> topFace
Bottom -> bottomFace
North -> northFace
South -> southFace
extractSurface :: V3 Int -> V.Vector Block -> VS.Vector (V4 Int8)
extractSurface pos blk = VS.fromList $ do
v <- liftA3 V3 [0..chunkSize - 1] [0..chunkSize - 1] [0..chunkSize - 1]
d <- [North .. Bottom]
let v' = dir d v
guard (blk V.! toPos v /= Air)
guard $ (Air ==) $ if all (\x -> 0 <= x && x < chunkSize) v'
then blk V.! toPos v'
else generatingF ((chunkSize *^ pos) + v')
face d & traverse +~ (0 & _xyz .~ fmap fromIntegral v
& _w .~ if d `elem` [Top,Bottom] then 0 else 1)
renderChunk :: V3 Int -> UniformLocation -> StateT Chunk IO ()
renderChunk pos modelLoc = do
n <- use chunkElements
when (n > 0) $ do
use chunkVao >>= (bindVertexArrayObject $=) . Just
liftIO $ do
model <- toGLmatrix $ identity & translation +~
(fromIntegral <$> chunkSize *^ pos)
uniform modelLoc $= model
drawArrays Triangles 0 $ fromIntegral n
| noughtmare/hypercube | src/Hypercube/Chunk.hs | gpl-3.0 | 3,395 | 0 | 17 | 798 | 1,170 | 621 | 549 | 86 | 6 |
module Problem039 (answer) where
import Data.List (maximumBy)
import Data.Function (on)
answer :: Int
answer = fst $ maximumBy (compare `on` snd) solutionSpace
where
solutionSpace = zip [3..1000] $ fmap (length . rightTriangles) [3..1000]
rightTriangles :: Int -> [(Int, Int, Int)]
rightTriangles perimeter = [(a, b, truncate c) |
a <- [1..perimeter-1],
b <- [a..perimeter],
let c = sqrt . fromIntegral $ a*a + b*b,
isInt c,
a + b + truncate c == perimeter
]
isInt :: Float -> Bool
isInt a = fromIntegral(round a) == a
| geekingfrog/project-euler | Problem039.hs | gpl-3.0 | 541 | 0 | 14 | 109 | 253 | 137 | 116 | 15 | 1 |
module Vdv.Text(extractDelimited,readUnsafeInt) where
import ClassyPrelude
import Data.Text(breakOn)
import Data.Text.Read(decimal)
readUnsafeInt :: Text -> Int
readUnsafeInt x = either (error $ "invalid decimal \"" <> unpack x <> "\"") fst (decimal x)
data BreakMode = BreakWithDelimiter
| BreakWithoutDelimiter
breakOnSafe :: BreakMode -> Text -> Text -> Maybe (Text,Text)
breakOnSafe bm del t =
case bm of
BreakWithoutDelimiter ->
case breakOn del t of
(_,"") -> Nothing
(x,y) -> return (x,y)
BreakWithDelimiter ->
case breakOn del t of
(_,"") -> Nothing
(x,y) -> return (x ++ del,drop (length del) y)
extractDelimited :: Text -> Text -> Text -> Maybe (Text,Text)
extractDelimited begin end t = do
(_,rest) <- breakOnSafe BreakWithoutDelimiter begin t
(daae,rest') <- breakOnSafe BreakWithDelimiter end rest
return (daae,rest')
| pmiddend/vdvanalyze | src/Vdv/Text.hs | gpl-3.0 | 911 | 0 | 15 | 198 | 339 | 181 | 158 | 24 | 4 |
module Jumpie.UnitType where
import Jumpie.Types
type UnitType = Real
| pmiddend/jumpie | lib/Jumpie/UnitType.hs | gpl-3.0 | 82 | 0 | 4 | 21 | 17 | 11 | 6 | 3 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Dataproc.Projects.Regions.WorkflowTemplates.SetIAMPolicy
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Sets the access control policy on the specified resource. Replaces any
-- existing policy.Can return NOT_FOUND, INVALID_ARGUMENT, and
-- PERMISSION_DENIED errors.
--
-- /See:/ <https://cloud.google.com/dataproc/ Cloud Dataproc API Reference> for @dataproc.projects.regions.workflowTemplates.setIamPolicy@.
module Network.Google.Resource.Dataproc.Projects.Regions.WorkflowTemplates.SetIAMPolicy
(
-- * REST Resource
ProjectsRegionsWorkflowTemplatesSetIAMPolicyResource
-- * Creating a Request
, projectsRegionsWorkflowTemplatesSetIAMPolicy
, ProjectsRegionsWorkflowTemplatesSetIAMPolicy
-- * Request Lenses
, prwtsipXgafv
, prwtsipUploadProtocol
, prwtsipAccessToken
, prwtsipUploadType
, prwtsipPayload
, prwtsipResource
, prwtsipCallback
) where
import Network.Google.Dataproc.Types
import Network.Google.Prelude
-- | A resource alias for @dataproc.projects.regions.workflowTemplates.setIamPolicy@ method which the
-- 'ProjectsRegionsWorkflowTemplatesSetIAMPolicy' request conforms to.
type ProjectsRegionsWorkflowTemplatesSetIAMPolicyResource
=
"v1" :>
CaptureMode "resource" "setIamPolicy" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] SetIAMPolicyRequest :>
Post '[JSON] Policy
-- | Sets the access control policy on the specified resource. Replaces any
-- existing policy.Can return NOT_FOUND, INVALID_ARGUMENT, and
-- PERMISSION_DENIED errors.
--
-- /See:/ 'projectsRegionsWorkflowTemplatesSetIAMPolicy' smart constructor.
data ProjectsRegionsWorkflowTemplatesSetIAMPolicy =
ProjectsRegionsWorkflowTemplatesSetIAMPolicy'
{ _prwtsipXgafv :: !(Maybe Xgafv)
, _prwtsipUploadProtocol :: !(Maybe Text)
, _prwtsipAccessToken :: !(Maybe Text)
, _prwtsipUploadType :: !(Maybe Text)
, _prwtsipPayload :: !SetIAMPolicyRequest
, _prwtsipResource :: !Text
, _prwtsipCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsRegionsWorkflowTemplatesSetIAMPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'prwtsipXgafv'
--
-- * 'prwtsipUploadProtocol'
--
-- * 'prwtsipAccessToken'
--
-- * 'prwtsipUploadType'
--
-- * 'prwtsipPayload'
--
-- * 'prwtsipResource'
--
-- * 'prwtsipCallback'
projectsRegionsWorkflowTemplatesSetIAMPolicy
:: SetIAMPolicyRequest -- ^ 'prwtsipPayload'
-> Text -- ^ 'prwtsipResource'
-> ProjectsRegionsWorkflowTemplatesSetIAMPolicy
projectsRegionsWorkflowTemplatesSetIAMPolicy pPrwtsipPayload_ pPrwtsipResource_ =
ProjectsRegionsWorkflowTemplatesSetIAMPolicy'
{ _prwtsipXgafv = Nothing
, _prwtsipUploadProtocol = Nothing
, _prwtsipAccessToken = Nothing
, _prwtsipUploadType = Nothing
, _prwtsipPayload = pPrwtsipPayload_
, _prwtsipResource = pPrwtsipResource_
, _prwtsipCallback = Nothing
}
-- | V1 error format.
prwtsipXgafv :: Lens' ProjectsRegionsWorkflowTemplatesSetIAMPolicy (Maybe Xgafv)
prwtsipXgafv
= lens _prwtsipXgafv (\ s a -> s{_prwtsipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
prwtsipUploadProtocol :: Lens' ProjectsRegionsWorkflowTemplatesSetIAMPolicy (Maybe Text)
prwtsipUploadProtocol
= lens _prwtsipUploadProtocol
(\ s a -> s{_prwtsipUploadProtocol = a})
-- | OAuth access token.
prwtsipAccessToken :: Lens' ProjectsRegionsWorkflowTemplatesSetIAMPolicy (Maybe Text)
prwtsipAccessToken
= lens _prwtsipAccessToken
(\ s a -> s{_prwtsipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
prwtsipUploadType :: Lens' ProjectsRegionsWorkflowTemplatesSetIAMPolicy (Maybe Text)
prwtsipUploadType
= lens _prwtsipUploadType
(\ s a -> s{_prwtsipUploadType = a})
-- | Multipart request metadata.
prwtsipPayload :: Lens' ProjectsRegionsWorkflowTemplatesSetIAMPolicy SetIAMPolicyRequest
prwtsipPayload
= lens _prwtsipPayload
(\ s a -> s{_prwtsipPayload = a})
-- | REQUIRED: The resource for which the policy is being specified. See the
-- operation documentation for the appropriate value for this field.
prwtsipResource :: Lens' ProjectsRegionsWorkflowTemplatesSetIAMPolicy Text
prwtsipResource
= lens _prwtsipResource
(\ s a -> s{_prwtsipResource = a})
-- | JSONP
prwtsipCallback :: Lens' ProjectsRegionsWorkflowTemplatesSetIAMPolicy (Maybe Text)
prwtsipCallback
= lens _prwtsipCallback
(\ s a -> s{_prwtsipCallback = a})
instance GoogleRequest
ProjectsRegionsWorkflowTemplatesSetIAMPolicy
where
type Rs ProjectsRegionsWorkflowTemplatesSetIAMPolicy
= Policy
type Scopes
ProjectsRegionsWorkflowTemplatesSetIAMPolicy
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsRegionsWorkflowTemplatesSetIAMPolicy'{..}
= go _prwtsipResource _prwtsipXgafv
_prwtsipUploadProtocol
_prwtsipAccessToken
_prwtsipUploadType
_prwtsipCallback
(Just AltJSON)
_prwtsipPayload
dataprocService
where go
= buildClient
(Proxy ::
Proxy
ProjectsRegionsWorkflowTemplatesSetIAMPolicyResource)
mempty
| brendanhay/gogol | gogol-dataproc/gen/Network/Google/Resource/Dataproc/Projects/Regions/WorkflowTemplates/SetIAMPolicy.hs | mpl-2.0 | 6,479 | 0 | 16 | 1,345 | 783 | 459 | 324 | 124 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DLP.Projects.Locations.StoredInfoTypes.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a stored infoType. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-stored-infotypes to
-- learn more.
--
-- /See:/ <https://cloud.google.com/dlp/docs/ Cloud Data Loss Prevention (DLP) API Reference> for @dlp.projects.locations.storedInfoTypes.delete@.
module Network.Google.Resource.DLP.Projects.Locations.StoredInfoTypes.Delete
(
-- * REST Resource
ProjectsLocationsStoredInfoTypesDeleteResource
-- * Creating a Request
, projectsLocationsStoredInfoTypesDelete
, ProjectsLocationsStoredInfoTypesDelete
-- * Request Lenses
, plsitdXgafv
, plsitdUploadProtocol
, plsitdAccessToken
, plsitdUploadType
, plsitdName
, plsitdCallback
) where
import Network.Google.DLP.Types
import Network.Google.Prelude
-- | A resource alias for @dlp.projects.locations.storedInfoTypes.delete@ method which the
-- 'ProjectsLocationsStoredInfoTypesDelete' request conforms to.
type ProjectsLocationsStoredInfoTypesDeleteResource =
"v2" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Delete '[JSON] GoogleProtobufEmpty
-- | Deletes a stored infoType. See
-- https:\/\/cloud.google.com\/dlp\/docs\/creating-stored-infotypes to
-- learn more.
--
-- /See:/ 'projectsLocationsStoredInfoTypesDelete' smart constructor.
data ProjectsLocationsStoredInfoTypesDelete =
ProjectsLocationsStoredInfoTypesDelete'
{ _plsitdXgafv :: !(Maybe Xgafv)
, _plsitdUploadProtocol :: !(Maybe Text)
, _plsitdAccessToken :: !(Maybe Text)
, _plsitdUploadType :: !(Maybe Text)
, _plsitdName :: !Text
, _plsitdCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsStoredInfoTypesDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plsitdXgafv'
--
-- * 'plsitdUploadProtocol'
--
-- * 'plsitdAccessToken'
--
-- * 'plsitdUploadType'
--
-- * 'plsitdName'
--
-- * 'plsitdCallback'
projectsLocationsStoredInfoTypesDelete
:: Text -- ^ 'plsitdName'
-> ProjectsLocationsStoredInfoTypesDelete
projectsLocationsStoredInfoTypesDelete pPlsitdName_ =
ProjectsLocationsStoredInfoTypesDelete'
{ _plsitdXgafv = Nothing
, _plsitdUploadProtocol = Nothing
, _plsitdAccessToken = Nothing
, _plsitdUploadType = Nothing
, _plsitdName = pPlsitdName_
, _plsitdCallback = Nothing
}
-- | V1 error format.
plsitdXgafv :: Lens' ProjectsLocationsStoredInfoTypesDelete (Maybe Xgafv)
plsitdXgafv
= lens _plsitdXgafv (\ s a -> s{_plsitdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plsitdUploadProtocol :: Lens' ProjectsLocationsStoredInfoTypesDelete (Maybe Text)
plsitdUploadProtocol
= lens _plsitdUploadProtocol
(\ s a -> s{_plsitdUploadProtocol = a})
-- | OAuth access token.
plsitdAccessToken :: Lens' ProjectsLocationsStoredInfoTypesDelete (Maybe Text)
plsitdAccessToken
= lens _plsitdAccessToken
(\ s a -> s{_plsitdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plsitdUploadType :: Lens' ProjectsLocationsStoredInfoTypesDelete (Maybe Text)
plsitdUploadType
= lens _plsitdUploadType
(\ s a -> s{_plsitdUploadType = a})
-- | Required. Resource name of the organization and storedInfoType to be
-- deleted, for example
-- \`organizations\/433245324\/storedInfoTypes\/432452342\` or
-- projects\/project-id\/storedInfoTypes\/432452342.
plsitdName :: Lens' ProjectsLocationsStoredInfoTypesDelete Text
plsitdName
= lens _plsitdName (\ s a -> s{_plsitdName = a})
-- | JSONP
plsitdCallback :: Lens' ProjectsLocationsStoredInfoTypesDelete (Maybe Text)
plsitdCallback
= lens _plsitdCallback
(\ s a -> s{_plsitdCallback = a})
instance GoogleRequest
ProjectsLocationsStoredInfoTypesDelete
where
type Rs ProjectsLocationsStoredInfoTypesDelete =
GoogleProtobufEmpty
type Scopes ProjectsLocationsStoredInfoTypesDelete =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsStoredInfoTypesDelete'{..}
= go _plsitdName _plsitdXgafv _plsitdUploadProtocol
_plsitdAccessToken
_plsitdUploadType
_plsitdCallback
(Just AltJSON)
dLPService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsStoredInfoTypesDeleteResource)
mempty
| brendanhay/gogol | gogol-dlp/gen/Network/Google/Resource/DLP/Projects/Locations/StoredInfoTypes/Delete.hs | mpl-2.0 | 5,614 | 0 | 15 | 1,149 | 703 | 414 | 289 | 108 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Reader (ask)
import Database.PostgreSQL.Migrations
import Database.PostgreSQL.Simple
up :: Connection -> IO ()
up = migrate $ do
conn <- ask
liftIO $ execute_ conn "create extension if not exists pgcrypto"
add_column "blog" "password_digest"
"text NOT NULL DEFAULT crypt(md5(random()::text), gen_salt('bf'))"
down :: Connection -> IO ()
down = migrate $ do
drop_column "blog" "password_digest"
conn <- ask
liftIO $ execute_ conn "drop extension if exists pgcrypto"
main :: IO ()
main = defaultMain up down
| alevy/mappend | db/migrations/20151124163354_blog_password.hs | agpl-3.0 | 622 | 0 | 9 | 103 | 159 | 81 | 78 | 18 | 1 |
{-# LANGUAGE ViewPatterns #-}
import Test.QuickCheck
import Test.QuickCheck.Function
-- 16.9
functorIdentity :: (Functor f, Eq (f a)) =>
f a -> Bool
functorIdentity f = fmap id f == f
functorCompose :: (Eq (f c), Functor f) =>
(a -> b)
-> (b -> c)
-> f a
-> Bool
functorCompose f g x = (fmap g (fmap f x)) == (fmap (g . f) x)
functorCompose' :: (Eq (f c), Functor f) =>
f a
-> Fun a b
-> Fun b c
-> Bool
functorCompose' x (Fun _ f) (Fun _ g) =
(fmap (g . f) x) == (fmap g . fmap f $ x)
-- 1.
newtype Identity a = Identity a deriving (Eq, Show)
instance Functor Identity where
fmap f (Identity a) = Identity (f a)
genId :: Arbitrary a => Gen (Identity a)
genId = do
x <- arbitrary
return $ Identity x
instance Arbitrary a => Arbitrary (Identity a) where
arbitrary = genId
-- 2.
data Pair a = Pair a a deriving (Eq, Show)
instance Functor Pair where
fmap f (Pair a b) = Pair (f a) (f b)
genPair :: Arbitrary a => Gen (Pair a)
genPair = do
x <- arbitrary
y <- arbitrary
return $ Pair x y
instance Arbitrary a => Arbitrary (Pair a) where
arbitrary = genPair
-- 3.
data Two a b = Two a b deriving (Eq, Show)
instance Functor (Two a) where
fmap f (Two a b) = Two a (f b)
genTwo :: (Arbitrary a, Arbitrary b) => Gen (Two a b)
genTwo = do
x <- arbitrary
y <- arbitrary
return $ Two x y
instance (Arbitrary a, Arbitrary b) =>
Arbitrary (Two a b) where
arbitrary = genTwo
-- 4.
data Three a b c = Three a b c deriving (Eq, Show)
instance Functor (Three a b) where
fmap f (Three a b c) = Three a b (f c)
genThree :: (Arbitrary a, Arbitrary b, Arbitrary c) =>
Gen (Three a b c)
genThree = do
a <- arbitrary
b <- arbitrary
c <- arbitrary
return $ Three a b c
instance (Arbitrary a, Arbitrary b, Arbitrary c) =>
Arbitrary (Three a b c) where
arbitrary = genThree
-- 5.
data Three' a b = Three' a b b deriving (Eq, Show)
instance Functor (Three' a) where
fmap f (Three' a b c) = Three' a (f b) (f c)
genThree' :: (Arbitrary a, Arbitrary b) =>
Gen (Three' a b)
genThree' = do
a <- arbitrary
b <- arbitrary
c <- arbitrary
return $ Three' a b c
instance (Arbitrary a, Arbitrary b) =>
Arbitrary (Three' a b) where
arbitrary = genThree'
-- 6.
data Four a b c d = Four a b c d deriving (Eq, Show)
instance Functor (Four a b c) where
fmap f (Four a b c d) = Four a b c (f d)
instance (Arbitrary a, Arbitrary b,
Arbitrary c, Arbitrary d) =>
Arbitrary (Four a b c d) where
arbitrary = genFour
genFour :: (Arbitrary a, Arbitrary b,
Arbitrary c, Arbitrary d) =>
Gen (Four a b c d)
genFour = do
a <- arbitrary
b <- arbitrary
c <- arbitrary
d <- arbitrary
return $ Four a b c d
-- 7.
data Four' a b = Four' a a a b deriving (Eq, Show)
instance Functor (Four' a) where
fmap f (Four' a b c d) = Four' a b c (f d)
instance (Arbitrary a, Arbitrary b) =>
Arbitrary (Four' a b) where
arbitrary = genFour'
genFour' :: (Arbitrary a, Arbitrary b) =>
Gen (Four' a b)
genFour' = do
a <- arbitrary
b <- arbitrary
c <- arbitrary
d <- arbitrary
return $ Four' a b c d
-- main
type IntToInt = Fun Int Int
type IntFC = [Int] -> IntToInt -> IntToInt -> Bool
type IdFC = Identity Int -> IntToInt -> IntToInt -> Bool
type PairFC = Pair Int -> IntToInt -> IntToInt -> Bool
type TwoFC = Two Int Int -> IntToInt -> IntToInt -> Bool
type ThreeFC = Three Int Int Int -> IntToInt -> IntToInt -> Bool
type ThreeFC' = Three' Int Int -> IntToInt -> IntToInt -> Bool
type FourFC = Four Char Char Char Int -> IntToInt -> IntToInt -> Bool
type FourFC' = Four' Char Int -> IntToInt -> IntToInt -> Bool
main :: IO ()
main = do
putStrLn "\n [Int]"
quickCheck (functorIdentity :: [Int] -> Bool)
quickCheck (functorCompose' :: IntFC)
putStrLn "\n Identity"
quickCheck (functorIdentity :: Identity Int -> Bool)
quickCheck (functorCompose' :: IdFC)
putStrLn "\n Pair"
quickCheck (functorIdentity :: Pair Int -> Bool)
quickCheck (functorCompose' :: PairFC)
putStrLn "\n Two"
quickCheck (functorIdentity :: Two Int Int -> Bool)
quickCheck (functorCompose' :: TwoFC)
putStrLn "\n Three"
quickCheck (functorIdentity :: Three Int Int Int -> Bool)
quickCheck (functorCompose' :: ThreeFC)
putStrLn "\n Three'"
quickCheck (functorIdentity :: Three' Int Int -> Bool)
quickCheck (functorCompose' :: ThreeFC')
putStrLn "\n Four"
quickCheck (functorIdentity :: Four Char Char Char Char -> Bool)
quickCheck (functorCompose' :: FourFC)
putStrLn "\n Four'"
quickCheck (functorIdentity :: Four' Char Int -> Bool)
quickCheck (functorCompose' :: FourFC')
| dmvianna/haskellbook | src/Ch16-FunctorQuickCheck.hs | unlicense | 4,814 | 0 | 10 | 1,294 | 2,055 | 1,035 | 1,020 | 140 | 1 |
module Provided.ExprT where
data ExprT = Lit Integer
| Add ExprT ExprT
| Mul ExprT ExprT
deriving (Show, Eq)
| nilthehuman/cis194 | Provided/ExprT.hs | unlicense | 134 | 0 | 6 | 44 | 40 | 23 | 17 | 5 | 0 |
import Data.Char
f :: Show a => (a, b) -> IO (a, b)
f t@(a, _) = do
print a
return t
isSubsequenceOf :: (Eq a) => [a] -> [a] -> Bool
isSubsequenceOf sub par =
case sub of
[] -> True
x:xs -> x `elem` par && isSubsequenceOf xs par
-- isLetter :: Char -> Bool
-- isLetter = flip elem (['a'..'z'] ++ ['A'..'Z'])
splitWords :: String -> [String]
splitWords str =
go str [] True
where
go [] acc _ = reverse acc
go (x:xs) [] True
| isLetter x = go xs [[x]] False
| otherwise = go xs [] True
go (x:xs) acc@(w:ws) startWord
| (not $ isLetter x) && startWord = go xs acc startWord
| (isLetter x && startWord) = go xs ([x]:acc) False
| (not $ isLetter x) && not startWord = go xs acc True
| (isLetter x) && not startWord = go xs ((w ++ [x]):ws) startWord
capitalizeWords :: String -> [(String, String)]
capitalizeWords str = map (\x -> (x, capitalizeWord x)) $ splitWords str
capitalizeWord :: String -> String
capitalizeWord [] = []
capitalizeWord (x:xs) = toUpper x:xs
capitalizeParagraph :: String -> String
capitalizeParagraph s =
go s [] True
where
go [] acc _ = acc
go (x:xs) acc period
| x == '.' = go xs (acc ++ [x]) True
| x `elem` ['a'..'z'] && period =
go xs (acc ++ [toUpper x]) False
| x `elem` ['A'..'Z'] && period =
go xs (acc ++ [x]) False
| otherwise = go xs (acc ++ [x]) period
-- Phone excercise
data DaPhone = Sommat
convo :: [String]
convo = ["Wanna play 20 questions",
"Ya",
"U 1st haha",
"Lol ok. Have u ever tasted alcohol lol",
"Lol ya",
"Wow ur cool haha. Ur turn",
"Ok. Do u think I am pretty Lol",
"Lol ya",
"Haha thanks just making sure rofl ur turn"]
-- validButtons = "1234567890*#"
type Digit = Char
-- valid presses = [1..4]
type Presses = Int
cellPhonesDead :: DaPhone -> String -> [(Digit, Presses)]
cellPhonesDead = undefined
| dmvianna/haskellbook | src/Ch11Ex-asPatterns.hs | unlicense | 2,103 | 0 | 13 | 692 | 807 | 423 | 384 | 53 | 3 |
module Main where
import System.Environment (getArgs)
import Blockchain.Node.RestApi.Server (bootstrap)
import Blockchain.Node.Config (BlockchainConfig(..), defaultConfig)
config :: BlockchainConfig
config = defaultConfig
main :: IO ()
main = getArgs >>= parse
parse :: [String] -> IO ()
parse [] = bootstrap config
parse (x:_) = bootstrap config {httpPort = read x :: Int}
| carbolymer/blockchain | blockchain-node/app/Main.hs | apache-2.0 | 382 | 0 | 8 | 58 | 135 | 77 | 58 | 11 | 1 |
-- Copyright 2019-2021 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingVia #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE QuantifiedConstraints #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
-- | An approximation of a dependent pair type.
module Data.Ten.Sigma
( (:**)(..), overFragment, lmapFragment, eqKey
, OpCostar(..), caseFragment
) where
import Data.Functor.Contravariant (Contravariant(..))
import Data.Maybe (fromMaybe, isJust)
import Data.Type.Equality ((:~:)(Refl), TestEquality(..))
import Control.DeepSeq (NFData(..))
import Data.GADT.Compare (GEq(..))
import Data.Portray (Portray(..), Portrayal(..), infixr_)
import Data.Portray.Diff (Diff(..), diffVs)
import Data.Ten.Entails ((:!:), Entails, withEntailment)
import Data.Ten.Foldable (Foldable10(..))
import Data.Ten.Foldable.WithIndex (Foldable10WithIndex(..))
import Data.Ten.Functor (Functor10(..))
import Data.Ten.Functor.WithIndex (Index10, Functor10WithIndex(..))
import Data.Ten.Representable (Representable10(..))
import Data.Ten.Traversable (Traversable10(..))
import Data.Ten.Traversable.WithIndex (Traversable10WithIndex(..))
import Data.Ten.Update (Update10, overRep10)
infixr 5 :**
-- | A pair of @k a@ and @m a@ for any (existential) @a@.
--
-- This is a lot like a dependent pair, in that it contains a left-hand-side
-- value that's meant to identify a type, and a right-hand-side parameterized
-- by that type. For example, the true dependent pair type (in e.g. Idris)
-- @(n :: Nat ** Vec n Bool)@ could be approximated in Haskell as
-- @SInt :** Ap10 Bool Vec@.
--
-- This can be used to represent one field of a 'Representable10', where @k@ is
-- set to @Rep10 f@. The @k a@ identifies which field (and locks down its
-- type), and the @m a@ provides its value.
data k :** m = forall a. k a :** m a
instance (forall a. NFData (k a), Entails k (NFData :!: m))
=> NFData (k :** m) where
rnf (k :** m) = withEntailment @(NFData :!: m) k $ rnf k `seq` rnf m
instance (GEq k, Entails k (Eq :!: m))
=> Eq (k :** m) where
(kl :** ml) == (kr :** mr) = case geq kl kr of
Nothing -> False
Just Refl -> withEntailment @(Eq :!: m) kl $ ml == mr
{-
instance ( Arbitrary (Exists k)
, Representable10 rec, Entails k (Arbitrary :!: m)
)
=> Arbitrary (k :** m) where
arbitrary = do
Exists k <- arbitrary
a <- withEntailment @(Arbitrary :!: m) k arbitrary
return $ k :** a
-}
instance (forall a. Show (k a), Entails k (Show :!: m))
=> Show (k :** m) where
-- We have to write this by hand because the derived version doesn't know how
-- to call into 'Constrained10' to find a 'Show' instance for @a@ based on
-- @ka@.
showsPrec p (ka :** ma) = showParen (p > prec) $
showsPrec (1+prec) ka .
showString " :** " .
withEntailment @(Show :!: m) ka (showsPrec (1+prec) ma)
where
prec = 5
instance (forall a. Portray (k a), Entails k (Portray :!: m))
=> Portray (k :** m) where
portray (ka :** ma) = withEntailment @(Portray :!: m) ka $
Binop ":**" (infixr_ 5) (portray ka) (portray ma)
instance ( TestEquality k, forall a. Portray (k a), forall a. Diff (k a)
, Entails k (Portray :!: m), Entails k (Diff :!: m)
)
=> Diff (k :** m) where
diff (ka :** ma) (kb :** mb) = case testEquality ka kb of
Just Refl -> withEntailment @(Diff :!: m) ka $
case (diff ka kb, diff ma mb) of
(Nothing, Nothing) -> Nothing
(dk, dm) ->
Just $ Binop ":**" (infixr_ 5)
(fromMaybe (portray ka) dk)
(fromMaybe (Opaque "_") dm)
Nothing -> Just $
withEntailment @(Portray :!: m) ka (portray (ka :** ma)) `diffVs`
withEntailment @(Portray :!: m) kb (portray (kb :** mb))
instance Functor10 ((:**) k) where fmap10 f (k :** m) = k :** f m
instance Foldable10 ((:**) k) where foldMap10 f (_ :** m) = f m
instance Traversable10 ((:**) k) where
mapTraverse10 r f (k :** m) = r . (k :**) <$> f m
type instance Index10 ((:**) k) = k
instance Functor10WithIndex ((:**) k) where imap10 f (k :** m) = k :** f k m
instance Foldable10WithIndex ((:**) k) where ifoldMap10 f (k :** m) = f k m
instance Traversable10WithIndex ((:**) k) where
imapTraverse10 r f (k :** m) = r . (k :**) <$> f k m
-- | Check if two pairs have the same key.
eqKey :: GEq k => k :** m -> k :** n -> Bool
eqKey (kl :** _) (kr :** _) = isJust (geq kl kr)
-- | "Zip" a single field of a record with a (':**').
--
-- Since we're only operating on a single field, the @n@ type can't vary like
-- in a traditional zip function.
overFragment
:: Update10 rec
=> (forall a. m a -> n a -> n a) -> Rep10 rec :** m -> rec n -> rec n
overFragment f (k :** x) = overRep10 k (f x)
-- | Newtype used in implementing contravariant conversion of Fragments. See
-- 'lmapFragment'. Only exported because it's used in the type of
-- 'lmapFragment', but it can be largely ignored, like the many "ALens" etc.
-- types in "lens".
newtype OpCostar f r a = OpCostar { getOpCostar :: f a -> r }
instance Functor f => Contravariant (OpCostar f r) where
contramap f (OpCostar g) = OpCostar (g . fmap f)
-- | Simulate a case statement on a (':**') with a record of functions.
--
-- @
-- caseFragment (MyRecord1 (OpCostar isJust) (OpCostar isNothing)) x
-- @
--
-- Is analogous to (pseudo-code):
--
-- @
-- case x of { (_mr1A :** mx) -> isJust mx; (_mr1B :** mx) -> isNothing mx }
-- @
--
-- This is just the action of `Representable10` (whereby @f m@ is isomorphic to
-- @forall a. Rep10 f a -> m a@) plus some newtyping:
--
-- @
-- f (OpCostar m r) ~= (by Representable10)
-- forall a. Rep10 f a -> OpCostar m r a ~= (by newtype)
-- forall a. Rep10 f a -> f a -> r ~= (by GADT constructor)
-- Rep10 f :** m -> r
-- @
caseFragment
:: Representable10 f
=> f (OpCostar m r)
-> Rep10 f :** m -> r
caseFragment fco (k :** v) = getOpCostar (fco `index10` k) v
-- | Convert a (':**') to a different key type contravariantly.
--
-- Example usage:
--
-- data MyRecord1 m = MyRecord1 { _mr1A :: Ap10 Int m, _mr1B :: Ap10 Int m }
-- data MyRecord2 m = MyRecord2 { _mr2A :: Ap10 Int m }
--
-- -- Collapse both fields _mr1A and _mr1B onto _mr2A.
-- example
-- :: Rep10 MyRecord1 :** Identity
-- -> Rep10 MyRecord2 :** Identity
-- example = lmapFragment $ \MyRecord2{..} -> MyRecord1
-- { _mr1A = _mr2A
-- , _mr1B = _mr2A
-- }
--
-- It looks weird that the argument converts from @recB@ to @recA@ in order
-- to convert (':**') the other way, so it merits some explanation: first,
-- note that, by @'Representable10' recA@, we know that @recA m@ is
-- isomorphic to @forall a. 'Rep10' recA a -> m a@. That is, @Rep10 recA@
-- effectively appears in negative position in @recA m@. So, a function from
-- @recB@ to @recA@ hand-wavingly contains a function in the opposite
-- direction from @Rep10 recA@ to @Rep10 recB@.
--
-- With the intuition out of the way, here's how we actually accomplish the
-- conversion: start off with a record @recB@ where each field is a function
-- that trivially rebuilds the corresponding @(:**)@ in each field with
-- @k :: Rep10 recB@ we literally just put @(k :**)@ with the appropriate
-- newtype constructors. Then, apply the user's contravariant conversion
-- function, to turn our @recB@ of @recB@-pair-builders into an
-- @recA@ of @recB@-pair-builders. If the user-provided conversion
-- function involves changing any field types, it must have done so by
-- @contramap@ping the pair-builders: instead of a function that just
-- directly applies @(k :=)@ to its argument, they will now contain functions
-- equivalent to @\ma -> k := _f ma@. Finally, unpack the @recA@ pair
-- and use its @k@ to fetch that field's @recB@-pair-builder (potentially
-- with a conversion inserted at the front), and apply it to the payload.
--
-- Usage will typically involve applying contramap to some number of fields and
-- leaving the rest unchanged. If you have a type-changing
-- 'Control.Lens.Setter' at hand, it's probably easier to use
-- 'Data.Ten.Lens.fragmented'.
lmapFragment
:: forall recA recB m f
. ( Representable10 recA, Representable10 recB
, f ~ OpCostar m (Rep10 recB :** m)
)
=> (recB f -> recA f)
-> Rep10 recA :** m -> Rep10 recB :** m
lmapFragment f = caseFragment fragmentBuilders
where
fragmentBuilders :: recA (OpCostar m (Rep10 recB :** m))
fragmentBuilders = f (tabulate10 (\k' -> OpCostar (k' :**)))
| google/hs-ten | ten/src/Data/Ten/Sigma.hs | apache-2.0 | 9,453 | 0 | 18 | 2,002 | 1,947 | 1,115 | 832 | -1 | -1 |
module AlgabraicDataTypes where
-- Data type with multiple constructor options. Enum type
data Thing = Shoe
| Ship
| SealingWax
| Cabbage
| King
deriving Show
shoe :: Thing
shoe = Shoe
listO'Things :: [Thing]
listO'Things = [Shoe, SealingWax, King, Cabbage, King]
-- write function by pattern matching
isSmall :: Thing -> Bool
isSmall Shoe = True
isSmall Ship = False
isSmall SealingWax = True
isSmall Cabbage = True
isSmall King = False
-- better pattern matching
isSmall2 :: Thing -> Bool
isSmall2 Shoe = True
isSmall2 Cabbage = True
isSmall2 SealingWax = True
isSmall2 _ = False
-- beyond enumerations
data FailableDouble = Failure
| OK Double
deriving Show
ex01 = Failure
ex02 = OK 0.34
safeDiv :: Double -> Double -> FailableDouble
safeDiv _ 0 = Failure
safeDiv x y = OK (x / y)
failureToZero :: FailableDouble -> Double
failureToZero Failure = 0
failureToZero (OK d) = d
--- Person
data Person = Person String Int Thing
deriving Show
richard = Person "richard" 32 Ship
stan = Person "Stan" 15 King
getAge ::Person -> Int
getAge (Person _ x _) = x
-- get the whole p with the pattern matched data
baz :: Person -> String
baz p@(Person n _ _) = "The name field of (" ++ show p ++ ") is " ++ n
checkFav :: Person -> String
checkFav (Person n _ Ship) = n ++ ": Best thing ever"
checkFav (Person n _ _) = n ++ ": Whatevs."
-- use case expressions
failureToZero' :: FailableDouble -> Double
failureToZero' x = case x of
Failure -> 0
OK d -> d
-- polymorphic data types
example_a :: Maybe Int -> Int
example_a (Just n) = n
example_a Nothing = -1
data LogMessage = LogMessage Int String
example_b :: LogMessage -> Maybe String
example_b (LogMessage severity string) | severity >= 50 = Just string
example_b _ = Nothing
-- recursive data types
data List t = Empty | Cons t (List t)
deriving Show
lst1 :: List Int
lst1 = Cons 1 (Cons 2 (Cons 3 Empty))
intListProd :: List Int -> Int
intListProd (Cons x l) = x * intListProd l
intListProd Empty = 1
-- binary tree
data BinaryTree t = Nil
| Leaf t
| Node (BinaryTree t) t (BinaryTree t)
deriving Show | markmandel/cis194 | src/week3/lecture/AlgabraicDataTypes.hs | apache-2.0 | 2,208 | 0 | 9 | 546 | 701 | 374 | 327 | 66 | 2 |
{-|
Copyright 2015 Michael Krolikowski
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module CouchDump.Convert (convert) where
import Control.Monad
import CouchDump.Misc
import Data.Aeson
import Data.ByteString.Lazy
import Data.HashMap.Strict
import GHC.Generics
newtype Row = Row {doc :: Object} deriving (Show, Generic, FromJSON, ToJSON)
newtype ExportDocument = ExportDocument {rows :: [Row]} deriving (Show, Generic, FromJSON, ToJSON)
newtype ImportDocument = ImportDocument {docs :: [Object]} deriving (Show, Generic, FromJSON, ToJSON)
exportToImport :: ExportDocument -> ImportDocument
exportToImport (ExportDocument rows) = ImportDocument $ liftM (delete "_rev" . doc) rows
decodeImportDocument :: Monad m => ByteString -> m ImportDocument
decodeImportDocument content = case decode content :: Maybe ExportDocument of
Just r -> return $ exportToImport r
Nothing -> decodeWithFailure content
convert :: Monad m => ByteString -> m ByteString
convert i = liftM encode $ decodeImportDocument i
| mkroli/couchdump | src/CouchDump/Convert.hs | apache-2.0 | 1,654 | 0 | 9 | 297 | 293 | 160 | 133 | 21 | 2 |
-- Copyright 2015 Google Inc. All Rights Reserved.
--
-- Licensed under the Apache License, Version 2.0 (the "License")--
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Parser (parseModule) where
import Datatypes
import Text.Parsec
import Text.Parsec.String
import Text.Parsec.Language
import qualified Text.Parsec.Token as P
language = javaStyle
{ P.reservedNames = ["CONSTRUCTORS", "FUNCTIONS", "RULES"]
, P.reservedOpNames = ["=", "->", ":", "*", "!", "+", "\\"]
}
lexer = P.makeTokenParser javaStyle
parens = P.parens lexer
identifier = P.identifier lexer
colon = P.colon lexer
comma = P.comma lexer
lexeme = P.lexeme lexer
whiteSpace = P.whiteSpace lexer
equals = P.reservedOp lexer "="
arrow = P.reservedOp lexer "->"
pipe = P.reservedOp lexer "|"
star = P.reservedOp lexer "*"
bang = P.reservedOp lexer "!"
plus = P.reserved lexer "+"
minus = P.reserved lexer "\\"
constructorsKw = P.reserved lexer "CONSTRUCTORS"
functionsKw = P.reserved lexer "FUNCTIONS"
rulesKw = P.reserved lexer "RULES"
funName = FunName <$> identifier
varName = VarName <$> identifier
typeName = TypeName <$> identifier
termSum :: Parser Term
termSum = mkPlus <$> termCompl `sepBy1` plus
where mkPlus [t] = t
mkPlus ts = foldr1 Plus ts
termCompl :: Parser Term
termCompl = try (mkCompl <$> term <*> minus <*> term)
<|> term
where mkCompl t1 _ t2 = Compl t1 t2
term :: Parser Term
term = try (Appl <$> funName <*> parens (termSum `sepBy` comma))
<|> mkAnti <$> bang <*> term
<|> Var <$> varName
<|> parens termSum
where mkAnti _ t = Anti t
rule :: Parser Rule
rule = mkRule <$> termSum <*> arrow <*> term
where mkRule lhs _ rhs = Rule lhs rhs
rules :: Parser [Rule]
rules = many rule
funType :: Parser ([TypeName], TypeName)
funType = try (mkType <$> (typeName `sepBy` star) <*> arrow <*> typeName)
<|> mkEmptyType <$> typeName
where
mkType domain _ range = (domain, range)
mkEmptyType range = ([], range)
decl :: Parser Decl
decl = mkDecl <$> funName <*> colon <*> funType
where mkDecl f _ (domain, range) = Decl f domain range
decls :: Parser [Decl]
decls = many (try decl)
modul :: Parser Module
modul = mkModule <$> constructorsKw <*> decls <*> functionsKw <*> decls <*> rulesKw <*> rules
where mkModule _ ctors _ funs _ rules = Module (Signature ctors funs) rules
parseModule :: String -> String -> Either ParseError Module
parseModule sourceName input = parse (whiteSpace *> modul <* eof) sourceName input
| polux/subsume | Parser.hs | apache-2.0 | 2,937 | 0 | 16 | 563 | 854 | 459 | 395 | 63 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ExistentialQuantification #-}
module Yesod.Form.Types
( -- * Helpers
Enctype (..)
, FormResult (..)
, FormMessage (..)
, Env
, FileEnv
, Ints (..)
-- * Form
, MForm
, AForm (..)
-- * Build forms
, Field (..)
, FieldSettings (..)
, FieldView (..)
) where
import Control.Monad.Trans.RWS (RWST)
import Yesod.Request (FileInfo)
import Data.Text (Text)
import Data.Monoid (Monoid (..))
import Text.Blaze (Html, ToHtml (toHtml))
import Control.Applicative ((<$>), Applicative (..))
import Control.Monad (liftM)
import Data.String (IsString (..))
import Yesod.Core (GHandler, GWidget, SomeMessage)
import qualified Data.Map as Map
-- | A form can produce three different results: there was no data available,
-- the data was invalid, or there was a successful parse.
--
-- The 'Applicative' instance will concatenate the failure messages in two
-- 'FormResult's.
data FormResult a = FormMissing
| FormFailure [Text]
| FormSuccess a
deriving Show
instance Functor FormResult where
fmap _ FormMissing = FormMissing
fmap _ (FormFailure errs) = FormFailure errs
fmap f (FormSuccess a) = FormSuccess $ f a
instance Applicative FormResult where
pure = FormSuccess
(FormSuccess f) <*> (FormSuccess g) = FormSuccess $ f g
(FormFailure x) <*> (FormFailure y) = FormFailure $ x ++ y
(FormFailure x) <*> _ = FormFailure x
_ <*> (FormFailure y) = FormFailure y
_ <*> _ = FormMissing
instance Monoid m => Monoid (FormResult m) where
mempty = pure mempty
mappend x y = mappend <$> x <*> y
-- | The encoding type required by a form. The 'ToHtml' instance produces values
-- that can be inserted directly into HTML.
data Enctype = UrlEncoded | Multipart
deriving (Eq, Enum, Bounded)
instance ToHtml Enctype where
toHtml UrlEncoded = "application/x-www-form-urlencoded"
toHtml Multipart = "multipart/form-data"
instance Monoid Enctype where
mempty = UrlEncoded
mappend UrlEncoded UrlEncoded = UrlEncoded
mappend _ _ = Multipart
data Ints = IntCons Int Ints | IntSingle Int
instance Show Ints where
show (IntSingle i) = show i
show (IntCons i is) = show i ++ ('-' : show is)
type Env = Map.Map Text [Text]
type FileEnv = Map.Map Text FileInfo
type Lang = Text
type MForm sub master a = RWST (Maybe (Env, FileEnv), master, [Lang]) Enctype Ints (GHandler sub master) a
newtype AForm sub master a = AForm
{ unAForm :: (master, [Text]) -> Maybe (Env, FileEnv) -> Ints -> GHandler sub master (FormResult a, [FieldView sub master] -> [FieldView sub master], Ints, Enctype)
}
instance Functor (AForm sub master) where
fmap f (AForm a) =
AForm $ \x y z -> liftM go $ a x y z
where
go (w, x, y, z) = (fmap f w, x, y, z)
instance Applicative (AForm sub master) where
pure x = AForm $ const $ const $ \ints -> return (FormSuccess x, mempty, ints, mempty)
(AForm f) <*> (AForm g) = AForm $ \mr env ints -> do
(a, b, ints', c) <- f mr env ints
(x, y, ints'', z) <- g mr env ints'
return (a <*> x, b `mappend` y, ints'', c `mappend` z)
instance Monoid a => Monoid (AForm sub master a) where
mempty = pure mempty
mappend a b = mappend <$> a <*> b
data FieldSettings msg = FieldSettings
{ fsLabel :: msg -- FIXME switch to SomeMessage?
, fsTooltip :: Maybe msg
, fsId :: Maybe Text
, fsName :: Maybe Text
, fsClass :: [Text]
}
instance (a ~ Text) => IsString (FieldSettings a) where
fromString s = FieldSettings (fromString s) Nothing Nothing Nothing []
data FieldView sub master = FieldView
{ fvLabel :: Html
, fvTooltip :: Maybe Html
, fvId :: Text
, fvInput :: GWidget sub master ()
, fvErrors :: Maybe Html
, fvRequired :: Bool
}
data Field sub master a = Field
{ fieldParse :: [Text] -> GHandler sub master (Either (SomeMessage master) (Maybe a))
-- | ID, name, class, (invalid text OR legimiate result), required?
, fieldView :: Text
-> Text
-> [Text]
-> Either Text a
-> Bool
-> GWidget sub master ()
}
data FormMessage = MsgInvalidInteger Text
| MsgInvalidNumber Text
| MsgInvalidEntry Text
| MsgInvalidUrl Text
| MsgInvalidEmail Text
| MsgInvalidTimeFormat
| MsgInvalidHour Text
| MsgInvalidMinute Text
| MsgInvalidSecond Text
| MsgInvalidDay
| MsgCsrfWarning
| MsgValueRequired
| MsgInputNotFound Text
| MsgSelectNone
| MsgInvalidBool Text
| MsgBoolYes
| MsgBoolNo
| MsgDelete
| chreekat/yesod | yesod-form/Yesod/Form/Types.hs | bsd-2-clause | 4,933 | 0 | 15 | 1,423 | 1,472 | 824 | 648 | 117 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module WebNewsAPI where
import Network.Curl
import Data.String.Utils (join) --MissingH
import Network.HTTP
import Data.Aeson (decode, encode)
import Data.Aeson.TH (deriveJSON)
import qualified Data.ByteString.Lazy.Char8 as BL (pack, unpack)
import Data.Maybe (Maybe, fromJust)
import Control.Monad.Trans (liftIO)
import WebNewsAPITypes
opts = [CurlHttpHeaders ["Accept: application/json"]]
baseURL = "https://webnews-dev.csh.rit.edu/" --Temporary, production environment doesn't have API access yet
loginEncapsulation url apiKey = baseURL ++ url ++ parameterize [("api_key", apiKey), ("api_agent", "Clockfort's Mobile WebNews")]
-- queryList should look like [ (param1, value1), (param2, value2) ]
parameterize queryList = '?' : join "&" ( map (\ x -> concat[fst x, "=", urlEncode (snd x)]) queryList )
$(deriveJSON id ''Preferences)
$(deriveJSON id ''UserObj)
$(deriveJSON id ''User)
$(deriveJSON id ''Unread)
$(deriveJSON id ''UnreadCounts)
$(deriveJSON id ''Newsgroup)
$(deriveJSON id ''Newsgroups)
getJSON url apiKey = withCurlDo $ do
curl <- initialize
setopts curl opts
response <- do_curl_ curl (loginEncapsulation url apiKey) method_GET :: IO CurlResponse
let json = respBody response
return json
-- These will probably eventually be glommed into one function that also takes in "what you want" and
-- looks up the url for it in a mapping somewhere (or vicey-versey), but seperate for now is easier to debug.
getUnread apiKey = do
json <- getJSON "unread_counts" apiKey
let req = decode (BL.pack json) :: Maybe Unread
return req
getUser apiKey = do
json <- getJSON "user" apiKey
let req = decode (BL.pack json) :: Maybe User
return req
getPreferences apiKey = do
json <- getJSON "preferences" apiKey
let req = decode (BL.pack json) :: Maybe Preferences
return req
getNewsgroups apiKey = do
json <- getJSON "newsgroups" apiKey
let req = decode (BL.pack json) :: Maybe Newsgroups
return req
| clockfort/mobile-webnews | WebNewsAPI.hs | bsd-3-clause | 2,042 | 11 | 15 | 352 | 605 | 304 | 301 | 45 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module Language.Parser.Errors where
import Control.Monad ( mapM )
import Control.Monad.State ( State(..)
, gets
, put
, evalState
)
import GHC.Generics
import Data.Void
import qualified Data.List.NonEmpty as NE
import Data.Aeson
import Text.Megaparsec hiding ( State )
import Text.Megaparsec.Pos ( unPos
, SourcePos(..)
)
import Text.Megaparsec.Error ( parseErrorTextPretty
, errorOffset
)
import Text.Megaparsec.Stream ( reachOffsetNoLine )
type ParserError = ParseErrorBundle String Void
data ImprovizCodeError = ImprovizCodeError
{ line :: Int
, column :: Int
, message :: String
} deriving (Generic, Show, Eq)
instance ToJSON ImprovizCodeError where
toEncoding = genericToEncoding defaultOptions
parseErrorToIError
:: ParseError String Void -> State (PosState String) ImprovizCodeError
parseErrorToIError e = do
(epos, pst') <- gets (reachOffsetNoLine (errorOffset e))
put pst'
let lineNum = unPos $ sourceLine epos
let colNum = unPos $ sourceColumn epos
let msg = parseErrorTextPretty e
return $ ImprovizCodeError lineNum colNum msg
parseErrorsOut :: ParseErrorBundle String Void -> [ImprovizCodeError]
parseErrorsOut pBundle =
let errors = mapM parseErrorToIError (bundleErrors pBundle)
in NE.toList $ evalState errors (bundlePosState pBundle)
prettyPrintErrors :: ParseErrorBundle String Void -> String
prettyPrintErrors = errorBundlePretty
| rumblesan/proviz | src/Language/Parser/Errors.hs | bsd-3-clause | 1,973 | 0 | 12 | 770 | 397 | 216 | 181 | 40 | 1 |
module GHC.Hs.Type (module HsTypes) where
import HsTypes | google/ghc-source-gen | compat/GHC/Hs/Type.hs | bsd-3-clause | 56 | 0 | 4 | 6 | 16 | 11 | 5 | 2 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Scheme where
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BC -- needed by pattern match
import Prelude hiding (lookup)
data Object = SDouble Double
| SInteger Integer
| SString ByteString
| SBool Bool
deriving (Show)
type Variable = ByteString
data Expression = EObject Object
| EVariable Variable
| EList [Expression]
deriving (Show)
isVariable :: Expression -> Bool
isVariable (EVariable _) = True
isVariable _ = False
data StackValue = SV -- ??
deriving (Show)
data AssemblyCode = AHalt
| ARefer Variable AssemblyCode
| AConstant Expression AssemblyCode
| AClose [Variable] AssemblyCode AssemblyCode
| ATest AssemblyCode AssemblyCode
| AAssign Variable AssemblyCode
| AConti AssemblyCode
| ANuate [StackValue] Variable
| AFrame AssemblyCode AssemblyCode
| AArgument AssemblyCode
| Apply
| Return
deriving (Show)
isTail :: AssemblyCode -> Bool
isTail Return = True
isTail _ = False
compile :: Expression -> AssemblyCode -> AssemblyCode
compile o@(EObject _) next = AConstant o next
compile (EVariable v) next = ARefer v next
compile (EList []) next = error "should not be here"
compile (EList [EVariable "quote", o]) next = AConstant o next
compile (EList [EVariable "lambda", EList vs, body]) next
| all isVariable vs = AClose (map (\(EVariable v) -> v) vs) (compile body Return) next
compile (EList [EVariable "if", test, tcase, fcase]) next = compile test $ ATest (compile tcase next) (compile fcase next)
compile (EList [EVariable "set!", EVariable v]) next = AAssign v next
compile (EList [EVariable "call/cc", x]) next =
if isTail next
then c
else AFrame next c
where
c = AConti . AArgument $ compile x Apply
compile (EList (x:xs)) next = loop xs $ compile x Apply
where
loop :: [Expression] -> AssemblyCode -> AssemblyCode
loop [] c = if isTail next
then c
else AFrame next c
loop (a:as) c = loop as $ compile a $ AArgument c
data Environment = Env -- ??
deriving (Show)
type Rib = [Expression]
-- a: the accumulator
-- x: the next expression
-- e: the current environment
-- r: the current valur rib
-- s: the current stack
vm :: Expression -> AssemblyCode -> Environment -> Rib -> [StackValue] -> Expression
vm a x e r s =
case x of
AHalt -> a
ARefer var x' -> vm a' x' e r s
where a' = lookup var e
AConstant obj x' -> vm obj x' e r s
AClose vars body x' -> vm a' x' e r s
where a' = closure body e vars
ATest tcase fcase -> vm a x' e r s
where x' = sif a tcase fcase
AAssign var x' -> vm a x' e' r s
where e' = insert var e a
AConti x' -> vm a' x e r s
where a' = continuation s
ANuate s' var -> vm a' x' e r s'
where a' = lookup var e
x' = Return
AFrame ret x' -> vm a x' e r' s'
where r' = []
s' = callFrame ret e r s
AArgument x' -> vm a x' e (a:r) s
Apply -> let EList (body:e:vars) = a
in
vm a body
Return -> undefined
lookup :: Variable -> Environment -> Expression
lookup = undefined
insert :: Variable -> Environment -> Expression -> Environment
insert = undefined
closure :: AssemblyCode -> Environment -> [Variable] -> Expression
closure = undefined
sif :: Expression -> a -> a -> a
sif a tcase fcase = if isTrueOfScheme a
then tcase
else fcase
isTrueOfScheme :: Expression -> Bool
isTrueOfScheme = undefined
continuation :: [StackValue] -> Expression
continuation = undefined
callFrame :: AssemblyCode -> Environment -> Rib -> [StackValue] -> [StackValue]
callFrame = undefined
| masaedw/3imp | src/Scheme.hs | bsd-3-clause | 3,987 | 0 | 15 | 1,217 | 1,315 | 686 | 629 | 100 | 12 |
{-
Welcome to your custom Prelude
Export here everything that should always be in your library scope
For more info on what is exported by Protolude check:
https://github.com/sdiehl/protolude/blob/master/Symbols.md
-}
module PgRecorder.Prelude
( module Exports
, id
) where
import Protolude as Exports
import Data.Function (id)
| diogob/pg-recorder | src/PgRecorder/Prelude.hs | bsd-3-clause | 340 | 0 | 5 | 58 | 31 | 21 | 10 | 5 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Data.Concurrent.Queue.Roq.QueueWatch
(
-- * API
queue_watch
-- * Types
, Metric (..)
-- * Debug
-- , ping
-- * Remote Table
-- , Data.Concurrent.Queue.Roq.QueueWatchServer.__remoteTable
) where
import Control.Distributed.Process hiding (call)
import Control.Distributed.Process.Closure
import Control.Distributed.Process.Platform hiding (__remoteTable,monitor)
import Control.Distributed.Process.Platform.ManagedProcess hiding (runProcess)
import Control.Distributed.Process.Platform.Time
import Control.Distributed.Process.Serializable()
import Control.Exception hiding (try,catch)
import Data.Binary
import Data.Concurrent.Queue.Roq.Hroq
import Data.Concurrent.Queue.Roq.Groups
import Data.Concurrent.Queue.Roq.Logger
import Data.Concurrent.Queue.Roq.StatsGatherer
-- import Data.Time.Calendar
import Data.Thyme.Calendar
-- import Data.Time.Clock
import Data.Thyme.Clock
import Data.Typeable (Typeable)
import GHC.Generics
import System.Environment
import Text.Regex.Base
import Text.Regex.TDFA
import qualified Data.Map as Map
-- ---------------------------------------------------------------------
data Metric = Size | Enq | Deq
deriving Show
-- ---------------------------------------------------------------------
{-
-module(eroq_queue_watch).
-include("eroq.hrl").
-export([
queue_watch/1
]).
-define(EROQ_QUEUE_WATCH_USE_SIZE, size).
-define(EROQ_QUEUE_WATCH_USE_ENQ, enq).
-define(EROQ_QUEUE_WATCH_USE_DEQ, deq).
-}
-- ---------------------------------------------------------------------
{-
queue_watch(QueueWatchConstructs)->
case preprocess_constructs(QueueWatchConstructs, []) of
{error, Reason} ->
{error, Reason};
{ok, PreprocessedCons} ->
Queues = eroq_groups:queues(),
TS = make_timestamp(),
FinalDict = do_queue_watch_queues(Queues, dict:new(), PreprocessedCons),
{ok, TS ++ contruct_queue_watch_string(FinalDict, PreprocessedCons)}
end.
-}
queue_watch :: ProcessId -> [(String,String,Metric)] -> Process String
queue_watch statsPid queueWatchConstructs = do
logt $ "HroqQueueWatch.queue_watch entered"
let handler :: SomeException -> Process [(String,Regex,Metric)]
handler e = (logm $ "HRoqQueueWatch.handler e =" ++ show (e))
>> return []
preprocessedCons <- catch (preprocess_constructs queueWatchConstructs []) handler
qs <- queues
ts <- make_timestamp
finalDict <- do_queue_watch_queues statsPid qs Map.empty preprocessedCons
return $ ts ++ (construct_queue_watch_string finalDict preprocessedCons)
{-
%Worker piggies
make_timestamp()->
{Year, Month, Day} = erlang:date(),
{Hour, Minute, Second} = erlang:time(),
lists:flatten(io_lib:format("[~4.4.0w-~2.2.0w-~2.2.0w ~2.2.0w:~2.2.0w:~2.2.0w]", [Year, Month, Day, Hour, Minute, Second])).
-}
make_timestamp :: Process String
make_timestamp = do
now <- liftIO $ getCurrentTime
return (show now)
-- ---------------------------------------------------------------------
{-
preprocess_constructs([], PreProcessedConstructs)->
{ok, PreProcessedConstructs};
preprocess_constructs([{Label, AppInfoRegExp, Metric} | T], PreProcessedConstructs)->
case re:compile(AppInfoRegExp) of
{ok, CompiledRegexp} ->
case Metric of
"size" ->
preprocess_constructs(T, PreProcessedConstructs ++ [{Label, CompiledRegexp, size}]);
"enq" ->
preprocess_constructs(T, PreProcessedConstructs ++ [{Label, CompiledRegexp, enq}]);
"deq" ->
preprocess_constructs(T, PreProcessedConstructs ++ [{Label, CompiledRegexp, deq}]);
_ ->
{error, {metric, Label, Metric}}
end;
_ ->
{error, {regexp, Label, AppInfoRegExp}}
end.
-}
preprocess_constructs
:: [(String, String, Metric)] -> [(String, Regex, Metric)]
-> Process [(String, Regex, Metric)]
preprocess_constructs [] preProcessedConstructs = return preProcessedConstructs
preprocess_constructs ((label,appInfoRegExp,metric):t) preProcessedConstructs = do
let
mkr :: String -> Process Regex
mkr reg = makeRegexM reg
compiledRegex <- mkr appInfoRegExp
preprocess_constructs t (preProcessedConstructs ++ [(label,compiledRegex,metric)])
-- ---------------------------------------------------------------------
{-
contruct_queue_watch_string(_, [])->
"";
contruct_queue_watch_string(Dict, [{Label, _, _} | T])->
case dict:find(Label, Dict) of
{ok, Value} ->
Tag = " " ++ Label ++ " " ++ integer_to_list(Value);
_ ->
Tag = " " ++ Label ++ " 0"
end,
Tag ++ contruct_queue_watch_string(Dict, T).
-}
construct_queue_watch_string :: Map.Map String Integer -> [(String,Regex,Metric)] -> String
construct_queue_watch_string _ [] = ""
construct_queue_watch_string dict ((label,_,_):t)
= tag ++ construct_queue_watch_string dict t
where
tag = case Map.lookup label dict of
Just v -> " " ++ label ++ " " ++ show v
Nothing -> " " ++ label ++ " 0"
-- ---------------------------------------------------------------------
{-
increment_dict_value(Label, IncValue, Dict)->
case dict:find(Label, Dict) of
{ok, Value} ->
NewValue = Value + IncValue;
_ ->
NewValue = IncValue
end,
dict:store(Label, NewValue, Dict).
-}
increment_dict_value ::
(Num a, Ord k) => k -> a -> Map.Map k a -> Map.Map k a
increment_dict_value label incValue dict =
case Map.lookup label dict of
Just v -> Map.insert label (v + incValue) dict
Nothing -> Map.insert label incValue dict
-- ---------------------------------------------------------------------
{-
process_queue(_, Dict, [])->
Dict;
process_queue({AppInfo, Size, Enq, Deq}, Dict, [{Label, Regexp, Metric} | T])->
case re:run(AppInfo, Regexp, [{capture, first}]) of
{match, _} ->
case Metric of
size ->
NewDict = increment_dict_value(Label, Size, Dict);
enq ->
NewDict = increment_dict_value(Label, Enq, Dict);
deq ->
NewDict = increment_dict_value(Label, Deq, Dict)
end;
_ ->
NewDict = Dict
end,
process_queue({AppInfo, Size, Enq, Deq}, NewDict, T).
-}
process_queue
:: QStats
-> Map.Map String Integer
-> [(String,Regex,Metric)]
-> Map.Map String Integer
process_queue _ dict [] = dict
process_queue qs@(QStats appInfo size enq deq) dict ((label,regexp,metric):t)
= process_queue qs newDict t
where
newDict = if matchTest regexp appInfo
then case metric of
Size -> increment_dict_value label size dict
Enq -> increment_dict_value label enq dict
Deq -> increment_dict_value label deq dict
else dict
-- ---------------------------------------------------------------------
{-
do_queue_watch_queues([], Dict, _)->
Dict;
do_queue_watch_queues([QueueName | T], Dict, QueueWatchConstructs)->
NewDict =
case catch(eroq_stats_gatherer:get_queue_stats(QueueName)) of
{ok, {AppInfo, Size, Enq, Deq}} ->
process_queue({AppInfo, Size, Enq, Deq}, Dict, QueueWatchConstructs);
_ ->
Dict
end,
do_queue_watch_queues(T, NewDict, QueueWatchConstructs).
-}
do_queue_watch_queues
:: ProcessId
-> [QName] -> Map.Map String Integer-> [(String, Regex, Metric)]
-> Process (Map.Map String Integer)
do_queue_watch_queues _ [] dict _ = return dict
do_queue_watch_queues statsPid (queueName:t) dict queueWatchConstructs = do
mQueueStats <- get_queue_stats statsPid queueName
let newDict = case mQueueStats of
ReplyQStats queueStats -> process_queue queueStats dict queueWatchConstructs
ReplyQStatsNotFound -> dict
do_queue_watch_queues statsPid t newDict queueWatchConstructs
-- ---------------------------------------------------------------------
{-
%-ifdef('TEST').
%-include_lib("eunit/include/eunit.hrl").
%
%
%eroq_queue_watch_all_test()->
%
% mnesia:stop(),
%
% mnesia:delete_schema([node()]),
%
% mnesia:create_schema([node()]),
%
% mnesia:start(),
%
% eroq_groups:start_link(),
%
% eroq_log_dumper:start_link(),
%
% eroq_queue:start_link(feq1, "feq_1", true),
% eroq_queue:start_link(feq1_dlq, "feq_1_dlq", true),
%
% eroq_queue:start_link(feq2, "feq_2", true),
% eroq_queue:start_link(feq2_dlq, "feq_2_dlq", true),
%
% eroq_queue:start_link(beq1, "beq_1", true),
% eroq_queue:start_link(beq1_dlq, "beq_1_dlq", true),
%
% eroq_queue:start_link(beq2, "beq_2", true),
% eroq_queue:start_link(beq2_dlq, "beq_2_dlq", true),
%
% eroq_consumer:start_link(feq1_cons, "feq_1_cons", feq1, unset, ?MODULE, dequeue_fail, [], paused, true),
% eroq_consumer:start_link(beq1_cons, "beq_1_cons", beq1, unset, ?MODULE, dequeue_fail, [], paused, true),
%
% ok = eroq_queue:enqueue(feq1, "Test Message"),
% ok = eroq_queue:enqueue(feq1, "Test Message"),
% ok = eroq_queue:enqueue(feq2, "Test Message"),
%
% ok = eroq_queue:enqueue(feq1_dlq, "Test Message"),
% ok = eroq_queue:enqueue(feq1_dlq, "Test Message"),
% ok = eroq_queue:enqueue(feq2_dlq, "Test Message"),
% ok = eroq_queue:enqueue(feq2_dlq, "Test Message"),
%
% ok = eroq_queue:enqueue(beq1, "Test Message"),
% ok = eroq_queue:enqueue(beq1, "Test Message"),
% ok = eroq_queue:enqueue(beq2, "Test Message"),
% ok = eroq_queue:enqueue(beq2, "Test Message"),
%
% ok = eroq_queue:enqueue(beq1_dlq, "Test Message"),
% ok = eroq_queue:enqueue(beq1_dlq, "Test Message"),
% ok = eroq_queue:enqueue(beq2_dlq, "Test Message"),
% ok = eroq_queue:enqueue(beq2_dlq, "Test Message"),
% ok = eroq_queue:enqueue(beq2_dlq, "Test Message"),
%
% ok = eroq_queue:dequeue(feq1, ?MODULE, dequeue, []),
% ok = eroq_queue:dequeue(feq2, ?MODULE, dequeue, []),
%
%
% ok = eroq_queue:dequeue(feq1_dlq, ?MODULE, dequeue, []),
% ok = eroq_queue:dequeue(feq2_dlq, ?MODULE, dequeue, []),
%
% ok = eroq_queue:dequeue(beq1, ?MODULE, dequeue, []),
% ok = eroq_queue:dequeue(beq2, ?MODULE, dequeue, []),
%
% ok = eroq_queue:dequeue(beq1_dlq, ?MODULE, dequeue, []),
% ok = eroq_queue:dequeue(beq2_dlq, ?MODULE, dequeue, []),
%
% eroq_consumer:resume(feq1_cons),
% eroq_consumer:resume(beq1_cons),
%
% timer:sleep(2000),
%
% {ok, Qw} = eroq_queue_watch:queue_watch( [
%
% {"FEQ", "^feq_[0-9]+$", "size"},
% {"FENQ", "^feq_[0-9]+$", "enq"},
% {"FDEQ", "^feq_[0-9]+$", "deq"},
% {"FDLQ", "^feq_[0-9]+_dlq$", "size"},
%
% {"BEQ", "^beq_[0-9]+$", "size"},
% {"BENQ", "^beq_[0-9]+$", "enq"},
% {"BDEQ", "^beq_[0-9]+$", "deq"},
% {"BDLQ", "^beq_[0-9]+_dlq$", "size"}
%
% ]),
%
% {match, _} = re:run(Qw, "^.* FEQ 0 FENQ 3 FDEQ 3 FDLQ 2 BEQ 1 BENQ 4 BDEQ 3 BDLQ 3$", [{capture, first}]),
%
% eroq_consumer:stop(beq1_cons),
%
% eroq_consumer:stop(feq1_cons),
%
% timer:sleep(2000),
%
% eroq_queue:stop(beq2_dlq),
% eroq_queue:stop(beq2),
%
% eroq_queue:stop(beq1_dlq),
% eroq_queue:stop(beq1),
%
% eroq_queue:stop(feq2_dlq),
% eroq_queue:stop(feq2),
%
% eroq_queue:stop(feq1_dlq),
% eroq_queue:stop(feq1),
%
% timer:sleep(2000),
%
% eroq_groups:stop(),
%
% timer:sleep(2000),
%
% mnesia:stop(),
%
% mnesia:delete_schema([node()]),
%
% ok.
%
%
%
%-endif.
%EOF
-}
| alanz/hroq | src/Data/Concurrent/Queue/Roq/QueueWatch.hs | bsd-3-clause | 11,915 | 0 | 14 | 2,601 | 1,178 | 649 | 529 | 92 | 4 |
module Main where
import qualified AI.Tests as LogicHelpers
import qualified AI.VersionSpaceTests as VersionSpaces
import Test.Framework ( defaultMain )
main :: IO ()
main = defaultMain [ LogicHelpers.tests
, VersionSpaces.tests
]
| creswick/HaVSA | tests/Main.hs | bsd-3-clause | 272 | 0 | 7 | 71 | 58 | 36 | 22 | 7 | 1 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PolyKinds #-}
module Fragment.IsoRec.Rules.Type (
IsoRecNormalizeConstraint
, isoRecNormalizeRules
) where
import Bound (Bound)
import Control.Lens (review, preview)
import Ast.Type
import Data.Bitransversable
import Rules.Type
import Fragment.IsoRec.Ast.Type
type IsoRecNormalizeConstraint ki ty a =
( AsTyIsoRec ki ty
, Bound ki
, Bitransversable ki
)
normalizeRec :: IsoRecNormalizeConstraint ki ty a
=> (forall b. Type ki ty b -> Type ki ty b)
-> Type ki ty a
-> Maybe (Type ki ty a)
normalizeRec normalizeFn ty = do
s <- preview _TyRec ty
return $ review _TyRec (scopeAppTy normalizeFn s)
isoRecNormalizeRules :: IsoRecNormalizeConstraint ki ty a
=> NormalizeInput ki ty a
isoRecNormalizeRules =
NormalizeInput
[ NormalizeTypeRecurse normalizeRec ]
| dalaing/type-systems | src/Fragment/IsoRec/Rules/Type.hs | bsd-3-clause | 1,111 | 0 | 10 | 242 | 242 | 131 | 111 | 29 | 1 |
module Y15.D01 where
char2move :: Char -> Int
char2move = \case
'(' -> 1
')' -> -1
x -> error $ "Unexpected character: " ++ [x]
solve1 :: String -> Int
solve1 = sum . map char2move
solve2 :: String -> Int
solve2 =
fst
. head
. dropWhile (\(_, x) -> x >= 0)
. zip [0..]
. scanl (+) 0
. map char2move
| oshyshko/adventofcode | src/Y15/D01.hs | bsd-3-clause | 344 | 0 | 12 | 110 | 145 | 78 | 67 | -1 | -1 |
module RHM.EnumeratorT
( enumerate
, enumHandle
, enumTextFile ) where
import RHM.Input
import RHM.Intern.Trans
import Control.Monad.IO.Class
import System.IO
type EnumeratorT e m a = IterateeT e m a -> IterateeT e m a
enumerate :: Monad m => [e] -> EnumeratorT e m a
enumerate (x:xs) m = IterateeT $ do
iter <- runIterateeT m
case iter of
ContM k -> runIterateeT . enumerate xs $ k (Elem x)
_ -> return iter
enumerate _ m = m
enumHandle :: Handle -> EnumeratorT String IO a
enumHandle handle m = do
eof <- liftIO $ hIsEOF handle
if eof
then m
else IterateeT $ do
iter <- runIterateeT m
case iter of
ContM k -> hGetLine handle >>= runIterateeT . enumHandle handle . k . Elem
_ -> return iter
enumTextFile :: FilePath -> EnumeratorT String IO a
enumTextFile fn m = IterateeT $ withFile fn ReadMode $ runIterateeT . flip enumHandle m
| afwlehmann/rhm-io-iter | src/RHM/EnumeratorT.hs | bsd-3-clause | 934 | 0 | 18 | 255 | 339 | 169 | 170 | 28 | 3 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.AmountOfMoney.MN.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.AmountOfMoney.Types
import Duckling.Locale
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale MN Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (simple MNT 1)
[ "1 төг"
, "нэг төгрөг"
, "1 ₮"
, "1 төгрөг"
]
, examples (simple MNT 10)
[ "10 төгрөг"
, "₮ 10"
, "10₮"
, "10MNT"
, "10төг"
, "10 төгрөг"
, "төгрөг 10"
, "10 төгрөгийн"
]
, examples (simple Dollar 1)
[ "$1"
, "нэг доллар"
]
, examples (simple Dollar 10)
[ "$10"
, "$ 10"
, "10$"
, "10 доллар"
, "арван доллар"
]
, examples (simple Cent 10)
[ "10 цент"
, "арван пени"
, "арван цент"
, "10 c"
, "10¢"
]
, examples (simple EUR 20)
[ "20€"
, "20 €ur"
, "20 евро"
, "Евро 20"
]
, examples (simple Pound 10)
[ "\x00a3\&10"
, "арван фунт"
]
, examples (simple INR 20)
[ "20Rs"
, "Rs20"
]
, examples (simple GBP 3.01)
[ "GBP3.01"
, "GBP 3.01"
, "3.01 Английн фунт"
]
, examples (under MNT 10)
[ "10₮-c бага"
]
, examples (above MNT 20)
[ "20MNT-c их"
]
, examples (between MNT (5, 10))
[ "5-c MNT10 хүртэл"
, "5-c ₮10 хүртэл"
, "5-c 10 MNT хүртэл"
, "MNT5-c 10 хүртэл"
, "MNT5-c 10₮ хооронд"
, "5-c ₮10-н хооронд"
, "MNT5-c 10₮-н хүртэл"
]
]
| facebookincubator/duckling | Duckling/AmountOfMoney/MN/Corpus.hs | bsd-3-clause | 2,513 | 0 | 10 | 1,044 | 427 | 250 | 177 | 69 | 1 |
--------------------------------------------------------------------------------
-- | Internally used compiler module
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Hakyll.Core.Compiler.Internal
( -- * Types
CompilerRead (..)
, CompilerWrite (..)
, CompilerResult (..)
, Compiler (..)
, runCompiler
-- * Core operations
, compilerTell
, compilerAsk
, compilerThrow
, compilerCatch
, compilerResult
, compilerUnsafeIO
-- * Utilities
, compilerTellDependencies
, compilerTellCacheHits
) where
--------------------------------------------------------------------------------
import Control.Applicative (Alternative (..),
Applicative (..), (<$>))
import Control.Exception (SomeException, handle)
import Control.Monad (forM_)
import Control.Monad.Error (MonadError (..))
import Data.Monoid (Monoid (..))
import Data.Set (Set)
import qualified Data.Set as S
--------------------------------------------------------------------------------
import Hakyll.Core.Configuration
import Hakyll.Core.Dependencies
import Hakyll.Core.Identifier
import Hakyll.Core.Identifier.Pattern
import Hakyll.Core.Logger (Logger)
import qualified Hakyll.Core.Logger as Logger
import Hakyll.Core.Metadata
import Hakyll.Core.Provider
import Hakyll.Core.Routes
import Hakyll.Core.Store
--------------------------------------------------------------------------------
-- | Environment in which a compiler runs
data CompilerRead = CompilerRead
{ -- | Main configuration
compilerConfig :: Configuration
, -- | Underlying identifier
compilerUnderlying :: Identifier
, -- | Resource provider
compilerProvider :: Provider
, -- | List of all known identifiers
compilerUniverse :: Set Identifier
, -- | Site routes
compilerRoutes :: Routes
, -- | Compiler store
compilerStore :: Store
, -- | Logger
compilerLogger :: Logger
}
--------------------------------------------------------------------------------
data CompilerWrite = CompilerWrite
{ compilerDependencies :: [Dependency]
, compilerCacheHits :: Int
} deriving (Show)
--------------------------------------------------------------------------------
instance Monoid CompilerWrite where
mempty = CompilerWrite [] 0
mappend (CompilerWrite d1 h1) (CompilerWrite d2 h2) =
CompilerWrite (d1 ++ d2) (h1 + h2)
--------------------------------------------------------------------------------
data CompilerResult a where
CompilerDone :: a -> CompilerWrite -> CompilerResult a
CompilerError :: [String] -> CompilerResult a
CompilerRequire :: Identifier -> Compiler a -> CompilerResult a
--------------------------------------------------------------------------------
-- | A monad which lets you compile items and takes care of dependency tracking
-- for you.
newtype Compiler a = Compiler
{ unCompiler :: CompilerRead -> IO (CompilerResult a)
}
--------------------------------------------------------------------------------
instance Functor Compiler where
fmap f (Compiler c) = Compiler $ \r -> do
res <- c r
return $ case res of
CompilerDone x w -> CompilerDone (f x) w
CompilerError e -> CompilerError e
CompilerRequire i c' -> CompilerRequire i (fmap f c')
{-# INLINE fmap #-}
--------------------------------------------------------------------------------
instance Monad Compiler where
return x = Compiler $ \_ -> return $ CompilerDone x mempty
{-# INLINE return #-}
Compiler c >>= f = Compiler $ \r -> do
res <- c r
case res of
CompilerDone x w -> do
res' <- unCompiler (f x) r
return $ case res' of
CompilerDone y w' -> CompilerDone y (w `mappend` w')
CompilerError e -> CompilerError e
CompilerRequire i c' -> CompilerRequire i $ do
compilerTell w -- Save dependencies!
c'
CompilerError e -> return $ CompilerError e
CompilerRequire i c' -> return $ CompilerRequire i $ c' >>= f
{-# INLINE (>>=) #-}
fail = compilerThrow . return
{-# INLINE fail #-}
--------------------------------------------------------------------------------
instance Applicative Compiler where
pure x = return x
{-# INLINE pure #-}
f <*> x = f >>= \f' -> fmap f' x
{-# INLINE (<*>) #-}
--------------------------------------------------------------------------------
instance MonadMetadata Compiler where
getMetadata = compilerGetMetadata
getMatches = compilerGetMatches
--------------------------------------------------------------------------------
instance MonadError [String] Compiler where
throwError = compilerThrow
catchError = compilerCatch
--------------------------------------------------------------------------------
runCompiler :: Compiler a -> CompilerRead -> IO (CompilerResult a)
runCompiler compiler read' = handle handler $ unCompiler compiler read'
where
handler :: SomeException -> IO (CompilerResult a)
handler e = return $ CompilerError [show e]
--------------------------------------------------------------------------------
instance Alternative Compiler where
empty = compilerThrow []
x <|> y = compilerCatch x $ \es -> do
logger <- compilerLogger <$> compilerAsk
forM_ es $ \e -> compilerUnsafeIO $ Logger.debug logger $
"Hakyll.Core.Compiler.Internal: Alternative failed: " ++ e
y
{-# INLINE (<|>) #-}
--------------------------------------------------------------------------------
compilerAsk :: Compiler CompilerRead
compilerAsk = Compiler $ \r -> return $ CompilerDone r mempty
{-# INLINE compilerAsk #-}
--------------------------------------------------------------------------------
compilerTell :: CompilerWrite -> Compiler ()
compilerTell deps = Compiler $ \_ -> return $ CompilerDone () deps
{-# INLINE compilerTell #-}
--------------------------------------------------------------------------------
compilerThrow :: [String] -> Compiler a
compilerThrow es = Compiler $ \_ -> return $ CompilerError es
{-# INLINE compilerThrow #-}
--------------------------------------------------------------------------------
compilerCatch :: Compiler a -> ([String] -> Compiler a) -> Compiler a
compilerCatch (Compiler x) f = Compiler $ \r -> do
res <- x r
case res of
CompilerDone res' w -> return (CompilerDone res' w)
CompilerError e -> unCompiler (f e) r
CompilerRequire i c -> return (CompilerRequire i (compilerCatch c f))
{-# INLINE compilerCatch #-}
--------------------------------------------------------------------------------
-- | Put the result back in a compiler
compilerResult :: CompilerResult a -> Compiler a
compilerResult x = Compiler $ \_ -> return x
{-# INLINE compilerResult #-}
--------------------------------------------------------------------------------
compilerUnsafeIO :: IO a -> Compiler a
compilerUnsafeIO io = Compiler $ \_ -> do
x <- io
return $ CompilerDone x mempty
{-# INLINE compilerUnsafeIO #-}
--------------------------------------------------------------------------------
compilerTellDependencies :: [Dependency] -> Compiler ()
compilerTellDependencies ds = do
logger <- compilerLogger <$> compilerAsk
forM_ ds $ \d -> compilerUnsafeIO $ Logger.debug logger $
"Hakyll.Core.Compiler.Internal: Adding dependency: " ++ show d
compilerTell mempty {compilerDependencies = ds}
{-# INLINE compilerTellDependencies #-}
--------------------------------------------------------------------------------
compilerTellCacheHits :: Int -> Compiler ()
compilerTellCacheHits ch = compilerTell mempty {compilerCacheHits = ch}
{-# INLINE compilerTellCacheHits #-}
--------------------------------------------------------------------------------
compilerGetMetadata :: Identifier -> Compiler Metadata
compilerGetMetadata identifier = do
provider <- compilerProvider <$> compilerAsk
compilerTellDependencies [IdentifierDependency identifier]
compilerUnsafeIO $ resourceMetadata provider identifier
--------------------------------------------------------------------------------
compilerGetMatches :: Pattern -> Compiler [Identifier]
compilerGetMatches pattern = do
universe <- compilerUniverse <$> compilerAsk
let matching = filterMatches pattern $ S.toList universe
compilerTellDependencies [PatternDependency pattern matching]
return matching
| freizl/freizl.github.com-old | src/Hakyll/Core/Compiler/Internal.hs | bsd-3-clause | 9,115 | 0 | 22 | 2,020 | 1,715 | 915 | 800 | 155 | 3 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
-------------------------------------------------------------------
-- |
-- Module : Irreverent.Bitbucket.Http.Repositories.Pipelines.GetConfig
-- Copyright : (C) 2018 Irreverent Pixel Feats
-- License : BSD-style (see the file /LICENSE.md)
-- Maintainer : Dom De Re
--
-------------------------------------------------------------------
module Irreverent.Bitbucket.Http.Repositories.Pipelines.GetConfig (
-- * Functions
getPipelinesConfig
) where
import Irreverent.Bitbucket.Http.Common
import Irreverent.Bitbucket.Http.Error
import Irreverent.Bitbucket.Core.Control (BitbucketT(..), getAuth)
import Irreverent.Bitbucket.Core.Data.Auth (Auth(..))
import Irreverent.Bitbucket.Core.Data.Common (RepoName(..), Username(..))
import Irreverent.Bitbucket.Core.Data.Pipelines.Config (PipelinesConfig(..))
import Irreverent.Bitbucket.Json.Pipelines.Config (pipelineConfigFromJson)
import Ultra.Control.Monad.Trans.Either (EitherT, left)
import Ultra.Control.Monad.Catch (MonadCatch(..))
import Ultra.Control.Lens ((?~))
import Ultra.Data.Aeson (eitherDecode)
import qualified Ultra.Data.Text as T
import qualified Ultra.Data.Text.Encoding as T
import qualified Ultra.Network.HTTP.Client as H
import qualified Data.ByteString.Lazy as BSL
import qualified Network.Wreq as W
import qualified Network.Wreq.Session as S
import Preamble
getPipelinesConfig
:: (MonadCatch m, MonadIO m)
=> S.Session
-> Username
-> RepoName
-> EitherT BitbucketAPIError (BitbucketT m) (Maybe PipelinesConfig)
getPipelinesConfig sess (Username owner) (RepoName repo) =
let
initReq :: T.Text
initReq = T.concat [baseV2URL, "/repositories/", owner, "/" , repo, "/pipelines_config"]
httpHandler :: (Monad m) => H.HttpException -> EitherT BitbucketAPIError m a
httpHandler = left . HttpError
in do
auth <- lift getAuth
opts <- case auth of
(Basic username password) -> pure (W.auth ?~ W.basicAuth (T.encodeUtf8 username) (T.encodeUtf8 password) $ baseReq)
resp <- fmap H.optionalHttpClientResponse ((liftIO $ S.getWith opts sess (T.unpack initReq)) `catch` httpHandler)
forM resp $ \case
H.HttpNotOk r -> left $ NotOkHttpResponse (BSL.toStrict <$> r)
H.HttpOk r -> either (left . JsonError . T.pack) (pure . pipelineConfigFromJson) . eitherDecode . H.responseBody $ r
| irreverent-pixel-feats/bitbucket | bitbucket-http-client/src/Irreverent/Bitbucket/Http/Repositories/Pipelines/GetConfig.hs | bsd-3-clause | 2,496 | 0 | 20 | 353 | 612 | 365 | 247 | 45 | 2 |
module Import.Semantic where
import Import
-- | Use SemanticUI to render a form.
renderSemantic :: Monad m => FormRender m a
renderSemantic aform fragment = do
(res, views') <- aFormToForm aform
let views = views' []
let widget = [whamlet|
$newline never
\#{fragment}
$forall view <- views
<div .ui .field :fvRequired view:.required :not $ fvRequired view:.optional>
<label for=#{fvId view}>#{fvLabel view}
$maybe tt <- fvTooltip view
<div .tooltip>#{tt}
^{fvInput view}
$maybe err <- fvErrors view
<div .errors>#{err}
|]
return (res, widget)
| sulami/hGM | Import/Semantic.hs | bsd-3-clause | 632 | 0 | 11 | 169 | 92 | 48 | 44 | -1 | -1 |
{-# OPTIONS_GHC -Wall #-}
module Messages.Formatter.HumanReadable (format) where
import Messages.Formatter.Format
import Messages.Types
import CommandLine.Helpers (showErrors)
format :: InfoFormatterF a -> IO a
format infoFormatter =
case infoFormatter of
OnInfo info next ->
renderInfo info
*> return next
renderInfo :: InfoMessage -> IO ()
renderInfo (ProcessingFiles files) =
case files of
[file] ->
putStrLn $ "Processing file " ++ file
_ ->
putStrLn "Processing multiple files..."
renderInfo (FileWouldChange file) =
putStrLn $ "File would be changed " ++ file
renderInfo (ParseError inputFile inputText errs) =
showErrors inputFile inputText errs
| nukisman/elm-format-short | src/Messages/Formatter/HumanReadable.hs | bsd-3-clause | 753 | 0 | 9 | 188 | 187 | 96 | 91 | 22 | 2 |
module Language.Modelica.Test.Lexer (test) where
import Language.Modelica.Syntax.ToString (toString)
import qualified Language.Modelica.Parser.Lexer as Lexer
import Language.Modelica.Test.Utility (testparser)
import Text.ParserCombinators.Parsec (eof)
import Text.Printf (printf)
import Control.Applicative ((<*), Applicative)
tests_unsigned_number :: [(String, Double)]
tests_unsigned_number =
("123", 123) :
("123 ", 123) :
("123.567", 123.567) :
("123.", 123) :
("123e0", 123) :
("123.e10", 1.23e12) :
("123.5e10", 1.235e12) :
("123e+10", 1.23e12) :
("123.e+10", 1.23e12) :
("123.5e+10", 1.235e12) :
("123E-10", 1.23e-8) :
("123.E-10", 1.23e-8) :
("123.5e-10", 1.235e-8) :
[]
tests_unicode_string :: [String]
tests_unicode_string =
"\"def\"" :
"\"äüß³¼æðſđilha\"" :
"\" ֆդսիոասԱԴՍՁՅ՞՞ՉՋ1Ձյձ \"" :
[]
tests_ident :: [String]
tests_ident =
"x" :
"a23aa2" :
"'bla'" :
[]
test :: IO [Bool]
test = do
let f (str, res) = (str, testparser (Lexer.unsigned_number <* eof) str == res)
h str = (str, printf "\"%s\"" (testparser (Lexer.unicode_string <* eof) str) == str)
i str = (str, toString (testparser (Lexer.ident <* eof) str) == str)
as = map f tests_unsigned_number
cs = map h tests_unicode_string
ds = map i tests_ident
pr str res = do
print str
print res
putStrLn "-----------------------------------------------------------"
let res = as ++ cs ++ ds
mapM_ (uncurry pr) res
return (map snd res)
| xie-dongping/modelicaparser | test/Language/Modelica/Test/Lexer.hs | bsd-3-clause | 1,558 | 0 | 18 | 319 | 527 | 295 | 232 | 50 | 1 |
module Data.Arib.CRC (CRC32(..)) where
import Data.Arib.CRC.Internal (CRC32(..))
| philopon/arib | src/Data/Arib/CRC.hs | bsd-3-clause | 82 | 0 | 6 | 8 | 32 | 22 | 10 | 2 | 0 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoRebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.RO.Rules
( rules ) where
import Control.Monad (liftM2)
import Prelude
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (parseInt)
import Duckling.Regex.Types
import Duckling.Time.Helpers
import Duckling.Time.Types (TimeData (..))
import qualified Duckling.Time.Types as TTime
import qualified Duckling.TimeGrain.Types as TG
import Duckling.Types
ruleAcum :: Rule
ruleAcum = Rule
{ name = "acum"
, pattern =
[ regex "(chiar)? ?acum|imediat"
]
, prod = \_ -> tt $ cycleNth TG.Second 0
}
ruleNamedday :: Rule
ruleNamedday = Rule
{ name = "named-day"
, pattern =
[ regex "lu(n(ea|i)?)?"
]
, prod = \_ -> tt $ dayOfWeek 1
}
ruleDupamiaza :: Rule
ruleDupamiaza = Rule
{ name = "dupamiaza"
, pattern =
[ regex "dupamiaz(a|\x0103)|dup(a|\x0103) amiaz(a|\x0103)"
]
, prod = \_ -> Token Time . mkLatent . partOfDay <$>
interval TTime.Open (hour False 12) (hour False 19)
}
ruleNamedmonth12 :: Rule
ruleNamedmonth12 = Rule
{ name = "named-month"
, pattern =
[ regex "dec(embrie)?"
]
, prod = \_ -> tt $ month 12
}
ruleNamedday2 :: Rule
ruleNamedday2 = Rule
{ name = "named-day"
, pattern =
[ regex "ma(r((t|\x021b)(ea|i))?)?"
]
, prod = \_ -> tt $ dayOfWeek 2
}
ruleValentinesDay :: Rule
ruleValentinesDay = Rule
{ name = "valentine's day"
, pattern =
[ regex "sf\\.?((a|\x00e2)ntul)? Valentin"
]
, prod = \_ -> tt $ monthDay 2 14
}
ruleSinceTimeofday :: Rule
ruleSinceTimeofday = Rule
{ name = "since <time-of-day>"
, pattern =
[ regex "de|din"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ withDirection TTime.After td
_ -> Nothing
}
ruleNewYearsDay :: Rule
ruleNewYearsDay = Rule
{ name = "new year's day"
, pattern =
[ regex "(siua de )? an(ul)? nou"
]
, prod = \_ -> tt $ monthDay 1 1
}
ruleNamedday6 :: Rule
ruleNamedday6 = Rule
{ name = "named-day"
, pattern =
[ regex "s(a|\x00e2)mb(a|\x0103)t(a|\x0103)|s(a|\x00e2)m|s(a|\x00e2)"
]
, prod = \_ -> tt $ dayOfWeek 6
}
ruleNamedmonth7 :: Rule
ruleNamedmonth7 = Rule
{ name = "named-month"
, pattern =
[ regex "iul(ie)?"
]
, prod = \_ -> tt $ month 7
}
ruleOrdinalTrimestruYear :: Rule
ruleOrdinalTrimestruYear = Rule
{ name = "<ordinal> trimestru <year>"
, pattern =
[ dimension Ordinal
, Predicate $ isGrain TG.Quarter
, dimension Time
]
, prod = \tokens -> case tokens of
(token:_:Token Time td:_) -> do
n <- getIntValue token
tt $ cycleNthAfter False TG.Quarter (n - 1) td
_ -> Nothing
}
ruleInNamedmonth :: Rule
ruleInNamedmonth = Rule
{ name = "in <named-month>"
, pattern =
[ regex "(i|\x00ee)n"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(_:x:_) -> Just x
_ -> Nothing
}
ruleLastCycleOfTime :: Rule
ruleLastCycleOfTime = Rule
{ name = "last <cycle> of <time>"
, pattern =
[ regex "ultim(ul|a)"
, dimension TimeGrain
, regex "din"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_:Token Time td:_) ->
tt $ cycleLastOf grain td
_ -> Nothing
}
ruleIntroNamedday :: Rule
ruleIntroNamedday = Rule
{ name = "intr-o <named-day>"
, pattern =
[ regex "((i|\x00ee)n(tr)?(\\-?o)?)"
, Predicate isADayOfWeek
]
, prod = \tokens -> case tokens of
(_:x:_) -> Just x
_ -> Nothing
}
ruleMonthDdddInterval :: Rule
ruleMonthDdddInterval = Rule
{ name = "<month> dd-dd (interval)"
, pattern =
[ Predicate isAMonth
, regex "(3[01]|[12]\\d|0?[1-9])"
, regex "\\-"
, regex "(3[01]|[12]\\d|0?[1-9])"
]
, prod = \tokens -> case tokens of
(Token Time td:
Token RegexMatch (GroupMatch (m1:_)):
_:
Token RegexMatch (GroupMatch (m2:_)):
_) -> do
d1 <- parseInt m1
d2 <- parseInt m2
dom1 <- intersect (dayOfMonth d1) td
dom2 <- intersect (dayOfMonth d2) td
Token Time <$> interval TTime.Closed dom1 dom2
_ -> Nothing
}
ruleNamedday4 :: Rule
ruleNamedday4 = Rule
{ name = "named-day"
, pattern =
[ regex "jo(ia?)?"
]
, prod = \_ -> tt $ dayOfWeek 4
}
ruleInDuration :: Rule
ruleInDuration = Rule
{ name = "in <duration>"
, pattern =
[ regex "(i|\x00ee)n"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) -> tt $ inDuration dd
_ -> Nothing
}
ruleIeri :: Rule
ruleIeri = Rule
{ name = "ieri"
, pattern =
[ regex "ieri"
]
, prod = \_ -> tt . cycleNth TG.Day $ - 1
}
ruleCycleAcesta :: Rule
ruleCycleAcesta = Rule
{ name = "<cycle> acesta"
, pattern =
[ regex "aceasta|acest|(a|\x0103)sta"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) -> tt $ cycleNth grain 0
_ -> Nothing
}
ruleYearLatent2 :: Rule
ruleYearLatent2 = Rule
{ name = "year (latent)"
, pattern =
[ Predicate $ isIntegerBetween 2101 10000
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt . mkLatent $ year v
_ -> Nothing
}
ruleAzi :: Rule
ruleAzi = Rule
{ name = "azi"
, pattern =
[ regex "a(st(a|\x0103))?zi"
]
, prod = \_ -> tt $ cycleNth TG.Day 0
}
ruleNoon :: Rule
ruleNoon = Rule
{ name = "noon"
, pattern =
[ regex "noon"
]
, prod = \_ -> tt $ hour False 12
}
ruleTimeTrecuta :: Rule
ruleTimeTrecuta = Rule
{ name = "<time> trecut[aă]?"
, pattern =
[ dimension Time
, regex "(trecut(a|\x0103)?)"
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ predNth (-1) False td
_ -> Nothing
}
ruleThisnextDayofweek :: Rule
ruleThisnextDayofweek = Rule
{ name = "this|next <day-of-week>"
, pattern =
[ Predicate isADayOfWeek
, regex "aceasta|(a|\x0103)sta|urm(a|\x0103)toare"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> tt $ predNth 0 True td
_ -> Nothing
}
ruleBetweenTimeofdayAndTimeofdayInterval :: Rule
ruleBetweenTimeofdayAndTimeofdayInterval = Rule
{ name = "between <time-of-day> and <time-of-day> (interval)"
, pattern =
[ regex "(i|\x00ee)ntre"
, Predicate isATimeOfDay
, regex "(s|\x0219)i"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleNamedmonth :: Rule
ruleNamedmonth = Rule
{ name = "named-month"
, pattern =
[ regex "ian(uarie)?"
]
, prod = \_ -> tt $ month 1
}
ruleUrmatoareaCycle :: Rule
ruleUrmatoareaCycle = Rule
{ name = "urmatoarea <cycle>"
, pattern =
[ regex "(urm(a|\x0103)to(area|rul)|viito(are|r))"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) -> tt $ cycleNth grain 1
_ -> Nothing
}
ruleCycleAcesta2 :: Rule
ruleCycleAcesta2 = Rule
{ name = "<cycle> acesta"
, pattern =
[ dimension TimeGrain
, regex "aceasta|acest|(a|\x0103)sta|curent(a|\x0103)"
]
, prod = \tokens -> case tokens of
(Token TimeGrain grain:_) -> tt $ cycleNth grain 0
_ -> Nothing
}
ruleNamedmonth3 :: Rule
ruleNamedmonth3 = Rule
{ name = "named-month"
, pattern =
[ regex "martie|mar"
]
, prod = \_ -> tt $ month 3
}
ruleCraciun :: Rule
ruleCraciun = Rule
{ name = "craciun"
, pattern =
[ regex "(ziua de )?cr(a|\x0103)ciun"
]
, prod = \_ -> tt $ monthDay 12 25
}
ruleTrimestruNumeralYear :: Rule
ruleTrimestruNumeralYear = Rule
{ name = "trimestru <number> <year>"
, pattern =
[ Predicate $ isGrain TG.Quarter
, dimension Numeral
, dimension Time
]
, prod = \tokens -> case tokens of
(_:token:Token Time td:_) -> do
v <- getIntValue token
tt $ cycleNthAfter False TG.Quarter (v - 1) td
_ -> Nothing
}
ruleDdmm :: Rule
ruleDdmm = Rule
{ name = "dd/mm"
, pattern =
[ regex "(3[01]|[12]\\d|0?[1-9])/(0?[1-9]|1[0-2])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (dd:mm:_)):_) -> do
m <- parseInt mm
d <- parseInt dd
tt $ monthDay m d
_ -> Nothing
}
ruleLaTimeofday :: Rule
ruleLaTimeofday = Rule
{ name = "la <time-of-day>"
, pattern =
[ regex "la|@ (ora)?"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleNamedmonth4 :: Rule
ruleNamedmonth4 = Rule
{ name = "named-month"
, pattern =
[ regex "apr(ilie)?"
]
, prod = \_ -> tt $ month 4
}
ruleBlackFriday :: Rule
ruleBlackFriday = Rule
{ name = "black friday"
, pattern =
[ regex "black frid?day"
]
, prod = \_ -> tt $ nthDOWOfMonth 4 5 11
}
ruleChristmasEve :: Rule
ruleChristmasEve = Rule
{ name = "christmas eve"
, pattern =
[ regex "ajun(ul)? (de )?cr(a|\x0103)ciun"
]
, prod = \_ -> tt $ monthDay 12 24
}
rulePentruDuration :: Rule
rulePentruDuration = Rule
{ name = "pentru <duration>"
, pattern =
[ regex "pentru"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) -> tt $ inDuration dd
_ -> Nothing
}
ruleHourofdayIntegerAsRelativeMinutes :: Rule
ruleHourofdayIntegerAsRelativeMinutes = Rule
{ name = "<hour-of-day> <integer> (as relative minutes)"
, pattern =
[ Predicate isAnHourOfDay
, Predicate $ isIntegerBetween 1 59
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
token:
_) -> do
n <- getIntValue token
tt $ hourMinute is12H hours n
_ -> Nothing
}
ruleHourofdaySfert :: Rule
ruleHourofdaySfert = Rule
{ name = "<hour-of-day> sfert"
, pattern =
[ Predicate isAnHourOfDay
, regex "((s|\x0219)i )?(un )?sfert"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_) -> tt $ hourMinute is12H hours 15
_ -> Nothing
}
ruleHourofdayJumatate :: Rule
ruleHourofdayJumatate = Rule
{ name = "<hour-of-day> sfert"
, pattern =
[ Predicate isAnHourOfDay
, regex "((s|\x0219)i )?jum(a|\x0103)tate|jumate"
]
, prod = \tokens -> case tokens of
(Token Time TimeData {TTime.form = Just (TTime.TimeOfDay (Just hours) is12H)}:
_) -> tt $ hourMinute is12H hours 30
_ -> Nothing
}
ruleNamedday5 :: Rule
ruleNamedday5 = Rule
{ name = "named-day"
, pattern =
[ regex "vi(n(er(ea|i))?)?"
]
, prod = \_ -> tt $ dayOfWeek 5
}
ruleDiseara :: Rule
ruleDiseara = Rule
{ name = "diseara"
, pattern =
[ regex "disear(a|\x0103)|((i|\x00ee)n aceas(a|\x0103) )?sear(a|\x0103)"
]
, prod = \_ -> do
let td1 = cycleNth TG.Day 0
td2 <- interval TTime.Open (hour False 18) (hour False 0)
Token Time . partOfDay <$> intersect td1 td2
}
ruleIntersectBy :: Rule
ruleIntersectBy = Rule
{ name = "intersect by \",\""
, pattern =
[ Predicate isNotLatent
, regex ","
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:_:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleDayofmonthNumeral :: Rule
ruleDayofmonthNumeral = Rule
{ name = "<day-of-month> (number)"
, pattern =
[ Predicate isDOMInteger
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt . mkLatent $ dayOfMonth v
_ -> Nothing
}
ruleNthTimeAfterTime :: Rule
ruleNthTimeAfterTime = Rule
{ name = "nth <time> after <time>"
, pattern =
[ dimension Ordinal
, dimension Time
, regex "dup(a|\x0103)"
, dimension Time
]
, prod = \tokens -> case tokens of
(token:Token Time td1:_:Token Time td2:_) -> do
v <- getIntValue token
tt $ predNthAfter (v - 1) td1 td2
_ -> Nothing
}
ruleMmdd :: Rule
ruleMmdd = Rule
{ name = "mm/dd"
, pattern =
[ regex "(0?[1-9]|1[0-2])/(3[01]|[12]\\d|0?[1-9])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (mm:dd:_)):_) -> do
m <- parseInt mm
d <- parseInt dd
tt $ monthDay m d
_ -> Nothing
}
ruleTimeofdayLatent :: Rule
ruleTimeofdayLatent = Rule
{ name = "time-of-day (latent)"
, pattern =
[ Predicate $ isIntegerBetween 0 23
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt . mkLatent $ hour True v
_ -> Nothing
}
ruleFromTimeofdayTimeofdayInterval :: Rule
ruleFromTimeofdayTimeofdayInterval = Rule
{ name = "from <time-of-day> - <time-of-day> (interval)"
, pattern =
[ regex "(dup(a|\x0103)|(i|\x00ee)ncep(a|\x00e2)nd cu)"
, Predicate isATimeOfDay
, regex "(dar |(s|\x0219)i )?((i|\x00ee)nainte|p(a|\x00e2)n(a|\x0103) la( de)?)"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleNamedmonth2 :: Rule
ruleNamedmonth2 = Rule
{ name = "named-month"
, pattern =
[ regex "feb(ruarie)?"
]
, prod = \_ -> tt $ month 2
}
ruleSeason3 :: Rule
ruleSeason3 = Rule
{ name = "season"
, pattern =
[ regex "primavar(a|\x0103)"
]
, prod = \_ -> Token Time <$>
interval TTime.Open (monthDay 3 20) (monthDay 6 21)
}
ruleUrmatoareleNCycle :: Rule
ruleUrmatoareleNCycle = Rule
{ name = "urmatoarele n <cycle>"
, pattern =
[ regex "urm(a|\x0103)to(arele|rii|area)"
, Predicate $ isIntegerBetween 1 9999
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:token:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain v
_ -> Nothing
}
ruleSeason :: Rule
ruleSeason = Rule
{ name = "season"
, pattern =
[ regex "toamn(a|\x0103)"
]
, prod = \_ -> Token Time <$>
interval TTime.Open (monthDay 9 23) (monthDay 12 21)
}
ruleDupaDuration :: Rule
ruleDupaDuration = Rule
{ name = "dupa <duration>"
, pattern =
[ regex "dup(a|\x0103)"
, dimension Duration
]
, prod = \tokens -> case tokens of
(_:Token Duration dd:_) ->
tt . withDirection TTime.After $ inDuration dd
_ -> Nothing
}
ruleNewYearsEve :: Rule
ruleNewYearsEve = Rule
{ name = "new year's eve"
, pattern =
[ regex "(ajun(ul)? )?(de )?an(ul)? nou"
]
, prod = \_ -> tt $ monthDay 12 31
}
ruleByTheEndOfTime :: Rule
ruleByTheEndOfTime = Rule
{ name = "by the end of <time>"
, pattern =
[ regex "p(a|\x00ee)n(a|\x0103) ((i|\x00ee)n|la)"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
Token Time <$> interval TTime.Closed (cycleNth TG.Second 0) td
_ -> Nothing
}
ruleNameddayPeDayofmonthNumeral :: Rule
ruleNameddayPeDayofmonthNumeral = Rule
{ name = "<named-day> pe <day-of-month> (number)"
, pattern =
[ Predicate isADayOfWeek
, regex "pe"
, Predicate isDOMInteger
]
, prod = \tokens -> case tokens of
(Token Time td:_:token:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleDayofmonthNonOrdinalNamedmonth :: Rule
ruleDayofmonthNonOrdinalNamedmonth = Rule
{ name = "<day-of-month> (non ordinal) <named-month>"
, pattern =
[ Predicate isDOMInteger
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleIntersect :: Rule
ruleIntersect = Rule
{ name = "intersect"
, pattern =
[ Predicate isNotLatent
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleAboutTimeofday :: Rule
ruleAboutTimeofday = Rule
{ name = "about <time-of-day>"
, pattern =
[ regex "(cam|aproximativ|(i|\x00ee)n jur de)"
, Predicate isATimeOfDay
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleUntilTimeofday :: Rule
ruleUntilTimeofday = Rule
{ name = "until <time-of-day>"
, pattern =
[ regex "p(a|\x00ee)n(a|\x0103) ((i|\x00ee)n|la)"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) ->
tt $ withDirection TTime.Before td
_ -> Nothing
}
ruleDayofmonthnumberNamedmonth :: Rule
ruleDayofmonthnumberNamedmonth = Rule
{ name = "<day-of-month>(number) <named-month>"
, pattern =
[ Predicate isDOMInteger
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleNamedmonth6 :: Rule
ruleNamedmonth6 = Rule
{ name = "named-month"
, pattern =
[ regex "iun(ie)?"
]
, prod = \_ -> tt $ month 6
}
ruleIntreDatetimeSiDatetimeInterval :: Rule
ruleIntreDatetimeSiDatetimeInterval = Rule
{ name = "intre <datetime> si <datetime> (interval)"
, pattern =
[ regex "(i|\x00ee)nre"
, dimension Time
, regex "(s|\x0219)i"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
Token Time <$> interval TTime.Closed td1 td2
_ -> Nothing
}
ruleNthTimeOfTime :: Rule
ruleNthTimeOfTime = Rule
{ name = "nth <time> of <time>"
, pattern =
[ dimension Ordinal
, dimension Time
, regex "din"
, dimension Time
]
, prod = \tokens -> case tokens of
(token:Token Time td1:_:Token Time td2:_) -> do
v <- getIntValue token
Token Time . predNth (v - 1) False <$> intersect td2 td1
_ -> Nothing
}
ruleNamedmonth8 :: Rule
ruleNamedmonth8 = Rule
{ name = "named-month"
, pattern =
[ regex "aug(ust)?"
]
, prod = \_ -> tt $ month 8
}
ruleTimePartofday :: Rule
ruleTimePartofday = Rule
{ name = "<time> <part-of-day>"
, pattern =
[ dimension Time
, Predicate isAPartOfDay
]
, prod = \tokens -> case tokens of
(Token Time td1:Token Time td2:_) ->
Token Time <$> intersect td1 td2
_ -> Nothing
}
ruleWeekend :: Rule
ruleWeekend = Rule
{ name = "week-end"
, pattern =
[ regex "(week(\\s|\\-)?end|wkend)"
]
, prod = \_ -> do
fri <- intersect (dayOfWeek 5) (hour False 18)
mon <- intersect (dayOfWeek 1) (hour False 0)
Token Time <$> interval TTime.Open fri mon
}
rulePeDayofmonthNonOrdinal :: Rule
rulePeDayofmonthNonOrdinal = Rule
{ name = "pe <day-of-month> (non ordinal)"
, pattern =
[ regex "pe"
, Predicate isDOMInteger
]
, prod = \tokens -> case tokens of
(_:token:_) -> do
v <- getIntValue token
tt . mkLatent $ dayOfMonth v
_ -> Nothing
}
ruleTimeAceastaacestaasta :: Rule
ruleTimeAceastaacestaasta = Rule
{ name = "<time> (aceasta|acesta|[aă]sta)"
, pattern =
[ dimension Time
, regex "aceasta|(a|\x0103)sta|urm(a|\x0103)toare"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
tt $ predNth 0 False td
_ -> Nothing
}
ruleEomendOfMonth :: Rule
ruleEomendOfMonth = Rule
{ name = "EOM|End of month"
, pattern =
[ regex "sf(a|\x00e2)r(s|\x0219)itul lunii"
]
, prod = \_ -> tt $ cycleNth TG.Month 1
}
rulePartofdayAsta :: Rule
rulePartofdayAsta = Rule
{ name = "<part-of-day> asta"
, pattern =
[ Predicate isAPartOfDay
, regex "asta"
]
, prod = \tokens -> case tokens of
(Token Time td:_) ->
Token Time . partOfDay <$> intersect (cycleNth TG.Day 0) td
_ -> Nothing
}
ruleUltimaCycle :: Rule
ruleUltimaCycle = Rule
{ name = "ultima <cycle>"
, pattern =
[ regex "ultim(a|ul)"
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) -> tt . cycleNth grain $ - 1
_ -> Nothing
}
ruleCycleUrmatoare :: Rule
ruleCycleUrmatoare = Rule
{ name = "<cycle> urmatoare"
, pattern =
[ dimension TimeGrain
, regex "(urm(a|\x0103)to(are|r)|viito(are|r))"
]
, prod = \tokens -> case tokens of
(_:Token TimeGrain grain:_) -> tt $ cycleNth grain 1
_ -> Nothing
}
ruleTheDayofmonthNumeral :: Rule
ruleTheDayofmonthNumeral = Rule
{ name = "the <day-of-month> (number)"
, pattern =
[ regex "pe"
, Predicate isDOMInteger
]
, prod = \tokens -> case tokens of
(_:token:_) -> do
v <- getIntValue token
tt $ dayOfMonth v
_ -> Nothing
}
ruleYyyymmdd :: Rule
ruleYyyymmdd = Rule
{ name = "yyyy-mm-dd"
, pattern =
[ regex "(\\d{2,4})-(0?[1-9]|1[0-2])-(3[01]|[12]\\d|0?[1-9])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (yy:mm:dd:_)):_) -> do
y <- parseInt yy
m <- parseInt mm
d <- parseInt dd
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleCycleTrecut :: Rule
ruleCycleTrecut = Rule
{ name = "<cycle> trecut"
, pattern =
[ dimension TimeGrain
, regex "trecut(a|\x0103)?"
]
, prod = \tokens -> case tokens of
(Token TimeGrain grain:_) ->
tt . cycleNth grain $ - 1
_ -> Nothing
}
ruleDayofmonthNumeralOfNamedmonth :: Rule
ruleDayofmonthNumeralOfNamedmonth = Rule
{ name = "<day-of-month> (number) of <named-month>"
, pattern =
[ Predicate isDOMInteger
, regex "din"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:_:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleDurationInainteDeTime :: Rule
ruleDurationInainteDeTime = Rule
{ name = "<duration> inainte de <time>"
, pattern =
[ dimension Duration
, regex "(i|\x00ee)nainte de"
, dimension Time
]
, prod = \tokens -> case tokens of
(Token Duration dd:_:Token Time td:_) ->
tt $ durationBefore dd td
_ -> Nothing
}
ruleDayofmonthNonOrdinalOfNamedmonth :: Rule
ruleDayofmonthNonOrdinalOfNamedmonth = Rule
{ name = "<day-of-month> (non ordinal) of <named-month>"
, pattern =
[ Predicate isDOMInteger
, regex "din"
, Predicate isAMonth
]
, prod = \tokens -> case tokens of
(token:_:Token Time td:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleDurationInUrma :: Rule
ruleDurationInUrma = Rule
{ name = "<duration> in urma"
, pattern =
[ dimension Duration
, regex "(i|\x00ee)n urm(a|\x0103)"
]
, prod = \tokens -> case tokens of
(Token Duration dd:_) -> tt $ durationAgo dd
_ -> Nothing
}
ruleDurationDeAcum :: Rule
ruleDurationDeAcum = Rule
{ name = "<duration> de acum"
, pattern =
[ dimension Duration
, regex "de (acum|azi)"
]
, prod = \tokens -> case tokens of
(Token Duration dd:_) -> tt $ inDuration dd
_ -> Nothing
}
ruleSezonAnotimp :: Rule
ruleSezonAnotimp = Rule
{ name = "sezon anotimp"
, pattern =
[ regex "var(a|\x0103)"
]
, prod = \_ ->
Token Time <$> interval TTime.Open (monthDay 6 21) (monthDay 9 23)
}
ruleSearaNoapte :: Rule
ruleSearaNoapte = Rule
{ name = "sear[aă] noapte"
, pattern =
[ regex "sear(a|\x0103)|noapte"
]
, prod = \_ -> Token Time . mkLatent . partOfDay <$>
interval TTime.Open (hour False 18) (hour False 0)
}
ruleYearLatent :: Rule
ruleYearLatent = Rule
{ name = "year (latent)"
, pattern =
[ Predicate $ isIntegerBetween (- 10000) 999
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt . mkLatent $ year v
_ -> Nothing
}
ruleSeason2 :: Rule
ruleSeason2 = Rule
{ name = "season"
, pattern =
[ regex "iarn(a|\x0103)"
]
, prod = \_ ->
Token Time <$> interval TTime.Open (monthDay 12 21) (monthDay 3 20)
}
ruleUltimeleNCycle :: Rule
ruleUltimeleNCycle = Rule
{ name = "ultimele n <cycle>"
, pattern =
[ regex "ultim(ele|ii|a)"
, Predicate $ isIntegerBetween 1 9999
, dimension TimeGrain
]
, prod = \tokens -> case tokens of
(_:token:Token TimeGrain grain:_) -> do
v <- getIntValue token
tt $ cycleN True grain (- v)
_ -> Nothing
}
ruleAfterTimeofday :: Rule
ruleAfterTimeofday = Rule
{ name = "after <time-of-day>"
, pattern =
[ regex "dup(a|\x0103)"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ withDirection TTime.After td
_ -> Nothing
}
ruleDimineata :: Rule
ruleDimineata = Rule
{ name = "diminea[tț][aă]"
, pattern =
[ regex "diminea(t|\x021b)(a|\x0103)"
]
, prod = \_ -> Token Time . mkLatent . partOfDay <$>
interval TTime.Open (hour False 4) (hour False 12)
}
ruleTimeUrmatoarer :: Rule
ruleTimeUrmatoarer = Rule
{ name = "<time> urm[aă]to(are|r)"
, pattern =
[ regex "urm(a|\x0103)to(are|r)"
, Predicate isNotLatent
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> tt $ predNth 0 True td
_ -> Nothing
}
ruleNamedmonth5 :: Rule
ruleNamedmonth5 = Rule
{ name = "named-month"
, pattern =
[ regex "mai"
]
, prod = \_ -> tt $ month 5
}
ruleTimeofdayFix :: Rule
ruleTimeofdayFix = Rule
{ name = "<time-of-day> fix"
, pattern =
[ Predicate isATimeOfDay
, regex "(fix|exact)"
]
, prod = \tokens -> case tokens of
(Token Time td:_) -> tt $ notLatent td
_ -> Nothing
}
ruleNamedday7 :: Rule
ruleNamedday7 = Rule
{ name = "named-day"
, pattern =
[ regex "du(m(inic(a|\x0103))?)?"
]
, prod = \_ -> tt $ dayOfWeek 7
}
ruleHhmm :: Rule
ruleHhmm = Rule
{ name = "hh:mm"
, pattern =
[ regex "((?:[01]?\\d)|(?:2[0-3]))[:.]([0-5]\\d)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (hh:mm:_)):_) -> do
h <- parseInt hh
m <- parseInt mm
tt $ hourMinute True h m
_ -> Nothing
}
ruleMaine :: Rule
ruleMaine = Rule
{ name = "maine"
, pattern =
[ regex "m(a|\x00e2)ine"
]
, prod = \_ -> tt $ cycleNth TG.Day 1
}
ruleTimezone :: Rule
ruleTimezone = Rule
{ name = "<time> timezone"
, pattern =
[ Predicate $ liftM2 (&&) isATimeOfDay isNotLatent
, regex "\\b(YEKT|YEKST|YAKT|YAKST|WITA|WIT|WIB|WGT|WGST|WFT|WET|WEST|WAT|WAST|VUT|VLAT|VLAST|VET|UZT|UYT|UYST|UTC|ULAT|TVT|TMT|TLT|TKT|TJT|TFT|TAHT|SST|SRT|SGT|SCT|SBT|SAST|SAMT|RET|PYT|PYST|PWT|PST|PONT|PMST|PMDT|PKT|PHT|PHOT|PGT|PETT|PETST|PET|PDT|OMST|OMSST|NZST|NZDT|NUT|NST|NPT|NOVT|NOVST|NFT|NDT|NCT|MYT|MVT|MUT|MST|MSK|MSD|MMT|MHT|MDT|MAWT|MART|MAGT|MAGST|LINT|LHST|LHDT|KUYT|KST|KRAT|KRAST|KGT|JST|IST|IRST|IRKT|IRKST|IRDT|IOT|IDT|ICT|HOVT|HKT|GYT|GST|GMT|GILT|GFT|GET|GAMT|GALT|FNT|FKT|FKST|FJT|FJST|EST|EGT|EGST|EET|EEST|EDT|ECT|EAT|EAST|EASST|DAVT|ChST|CXT|CVT|CST|COT|CLT|CLST|CKT|CHAST|CHADT|CET|CEST|CDT|CCT|CAT|CAST|BTT|BST|BRT|BRST|BOT|BNT|AZT|AZST|AZOT|AZOST|AWST|AWDT|AST|ART|AQTT|ANAT|ANAST|AMT|AMST|ALMT|AKST|AKDT|AFT|AEST|AEDT|ADT|ACST|ACDT)\\b"
]
, prod = \tokens -> case tokens of
(Token Time td:
Token RegexMatch (GroupMatch (tz:_)):
_) -> Token Time <$> inTimezone tz td
_ -> Nothing
}
ruleYear :: Rule
ruleYear = Rule
{ name = "year"
, pattern =
[ Predicate $ isIntegerBetween 1000 2100
]
, prod = \tokens -> case tokens of
(token:_) -> do
v <- getIntValue token
tt $ year v
_ -> Nothing
}
ruleNamedmonth10 :: Rule
ruleNamedmonth10 = Rule
{ name = "named-month"
, pattern =
[ regex "oct(ombrie)?"
]
, prod = \_ -> tt $ month 10
}
ruleHalloweenDay :: Rule
ruleHalloweenDay = Rule
{ name = "halloween day"
, pattern =
[ regex "hall?owe?en"
]
, prod = \_ -> tt $ monthDay 10 31
}
ruleNamedmonthDayofmonthNonOrdinal :: Rule
ruleNamedmonthDayofmonthNonOrdinal = Rule
{ name = "<named-month> <day-of-month> (non ordinal)"
, pattern =
[ Predicate isAMonth
, Predicate isDOMInteger
]
, prod = \tokens -> case tokens of
(Token Time td:token:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
rulePeDate :: Rule
rulePeDate = Rule
{ name = "pe <date>"
, pattern =
[ regex "pe"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:x:_) -> Just x
_ -> Nothing
}
ruleDayofmonthnumberNamedmonthYear :: Rule
ruleDayofmonthnumberNamedmonthYear = Rule
{ name = "<day-of-month>(number) <named-month> year"
, pattern =
[ Predicate isDOMInteger
, Predicate isAMonth
, regex "(\\d{2,4})"
]
, prod = \tokens -> case tokens of
(token:
Token Time td:
Token RegexMatch (GroupMatch (match:_)):
_) -> do
v <- parseInt match
dom <- intersectDOM td token
Token Time <$> intersect dom (year v)
_ -> Nothing
}
ruleAbsorptionOfAfterNamedDay :: Rule
ruleAbsorptionOfAfterNamedDay = Rule
{ name = "absorption of , after named day"
, pattern =
[ Predicate isADayOfWeek
, regex ","
]
, prod = \tokens -> case tokens of
(x:_) -> Just x
_ -> Nothing
}
ruleLastDayofweekOfTime :: Rule
ruleLastDayofweekOfTime = Rule
{ name = "last <day-of-week> of <time>"
, pattern =
[ regex "ultima"
, Predicate isADayOfWeek
, regex "din"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td1:_:Token Time td2:_) ->
tt $ predLastOf td1 td2
_ -> Nothing
}
ruleOrdinalTrimestru :: Rule
ruleOrdinalTrimestru = Rule
{ name = "<ordinal> trimestru"
, pattern =
[ dimension Ordinal
, Predicate $ isGrain TG.Quarter
]
, prod = \tokens -> case tokens of
(token:_) -> do
n <- getIntValue token
tt . cycleNthAfter True TG.Quarter (n - 1) $
cycleNth TG.Year 0
_ -> Nothing
}
ruleByTime :: Rule
ruleByTime = Rule
{ name = "by <time>"
, pattern =
[ regex "p(a|\x00e2)n(a|\x0103) (la|(i|\x00ee)n)"
, dimension Time
]
, prod = \tokens -> case tokens of
(_:Token Time td:_) -> Token Time <$>
interval TTime.Open (cycleNth TG.Second 0) td
_ -> Nothing
}
ruleDdmmyyyy :: Rule
ruleDdmmyyyy = Rule
{ name = "dd/mm/yyyy"
, pattern =
[ regex "(3[01]|[12]\\d|0?[1-9])[-/](0?[1-9]|1[0-2])[/-](\\d{2,4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (dd:mm:yy:_)):_) -> do
y <- parseInt yy
m <- parseInt mm
d <- parseInt dd
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleNamedmonth11 :: Rule
ruleNamedmonth11 = Rule
{ name = "named-month"
, pattern =
[ regex "noi(embrie)?"
]
, prod = \_ -> tt $ month 11
}
ruleNamedday3 :: Rule
ruleNamedday3 = Rule
{ name = "named-day"
, pattern =
[ regex "mi(e(rcur(ea|i))?)?"
]
, prod = \_ -> tt $ dayOfWeek 3
}
ruleMmddyyyy :: Rule
ruleMmddyyyy = Rule
{ name = "mm/dd/yyyy"
, pattern =
[ regex "(0?[1-9]|1[0-2])[/-](3[01]|[12]\\d|0?[1-9])[-/](\\d{2,4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (mm:dd:yy:_)):_) -> do
y <- parseInt yy
m <- parseInt mm
d <- parseInt dd
tt $ yearMonthDay y m d
_ -> Nothing
}
ruleEoyendOfYear :: Rule
ruleEoyendOfYear = Rule
{ name = "EOY|End of year"
, pattern =
[ regex "sf(a|\x00e2)r(s|\x0219)itul anului"
]
, prod = \_ -> tt $ cycleNth TG.Year 1
}
ruleMothersDay :: Rule
ruleMothersDay = Rule
{ name = "Mother's Day"
, pattern =
[ regex "ziua (mamei|memeii)"
]
, prod = \_ -> tt $ nthDOWOfMonth 1 7 5
}
ruleNameddayDayofmonthNumeral :: Rule
ruleNameddayDayofmonthNumeral = Rule
{ name = "<named-day> <day-of-month> (number)"
, pattern =
[ Predicate isADayOfWeek
, Predicate isDOMInteger
]
, prod = \tokens -> case tokens of
(Token Time td:token:_) -> Token Time <$> intersectDOM td token
_ -> Nothing
}
ruleNamedmonth9 :: Rule
ruleNamedmonth9 = Rule
{ name = "named-month"
, pattern =
[ regex "sept(embrie)?"
]
, prod = \_ -> tt $ month 9
}
ruleHhmmss :: Rule
ruleHhmmss = Rule
{ name = "hh:mm:ss"
, pattern =
[ regex "((?:[01]?\\d)|(?:2[0-3]))[:.]([0-5]\\d)[:.]([0-5]\\d)"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (hh:mm:ss:_)):_) -> do
h <- parseInt hh
m <- parseInt mm
s <- parseInt ss
tt $ hourMinuteSecond True h m s
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleAboutTimeofday
, ruleAbsorptionOfAfterNamedDay
, ruleAcum
, ruleAfterTimeofday
, ruleAzi
, ruleBetweenTimeofdayAndTimeofdayInterval
, ruleBlackFriday
, ruleByTheEndOfTime
, ruleByTime
, ruleChristmasEve
, ruleCraciun
, ruleCycleAcesta
, ruleCycleAcesta2
, ruleCycleTrecut
, ruleCycleUrmatoare
, ruleDayofmonthNonOrdinalNamedmonth
, ruleDayofmonthNonOrdinalOfNamedmonth
, ruleDayofmonthNumeral
, ruleDayofmonthNumeralOfNamedmonth
, ruleDayofmonthnumberNamedmonth
, ruleDayofmonthnumberNamedmonthYear
, ruleDdmm
, ruleDdmmyyyy
, ruleDimineata
, ruleDiseara
, ruleDupaDuration
, ruleDupamiaza
, ruleDurationDeAcum
, ruleDurationInUrma
, ruleDurationInainteDeTime
, ruleEomendOfMonth
, ruleEoyendOfYear
, ruleFromTimeofdayTimeofdayInterval
, ruleHalloweenDay
, ruleHhmm
, ruleHhmmss
, ruleHourofdayIntegerAsRelativeMinutes
, ruleHourofdayJumatate
, ruleHourofdaySfert
, ruleIeri
, ruleInDuration
, ruleInNamedmonth
, ruleIntersect
, ruleIntersectBy
, ruleIntreDatetimeSiDatetimeInterval
, ruleIntroNamedday
, ruleLaTimeofday
, ruleLastCycleOfTime
, ruleLastDayofweekOfTime
, ruleMaine
, ruleMmdd
, ruleMmddyyyy
, ruleMonthDdddInterval
, ruleMothersDay
, ruleNamedday
, ruleNamedday2
, ruleNamedday3
, ruleNamedday4
, ruleNamedday5
, ruleNamedday6
, ruleNamedday7
, ruleNameddayDayofmonthNumeral
, ruleNameddayPeDayofmonthNumeral
, ruleNamedmonth
, ruleNamedmonth10
, ruleNamedmonth11
, ruleNamedmonth12
, ruleNamedmonth2
, ruleNamedmonth3
, ruleNamedmonth4
, ruleNamedmonth5
, ruleNamedmonth6
, ruleNamedmonth7
, ruleNamedmonth8
, ruleNamedmonth9
, ruleNamedmonthDayofmonthNonOrdinal
, ruleNewYearsDay
, ruleNewYearsEve
, ruleNoon
, ruleNthTimeAfterTime
, ruleNthTimeOfTime
, ruleOrdinalTrimestru
, ruleOrdinalTrimestruYear
, rulePartofdayAsta
, rulePeDate
, rulePeDayofmonthNonOrdinal
, rulePentruDuration
, ruleSearaNoapte
, ruleSeason
, ruleSeason2
, ruleSeason3
, ruleSezonAnotimp
, ruleSinceTimeofday
, ruleTheDayofmonthNumeral
, ruleThisnextDayofweek
, ruleTimeAceastaacestaasta
, ruleTimePartofday
, ruleTimezone
, ruleTimeTrecuta
, ruleTimeUrmatoarer
, ruleTimeofdayFix
, ruleTimeofdayLatent
, ruleTrimestruNumeralYear
, ruleUltimaCycle
, ruleUltimeleNCycle
, ruleUntilTimeofday
, ruleUrmatoareaCycle
, ruleUrmatoareleNCycle
, ruleValentinesDay
, ruleWeekend
, ruleYear
, ruleYearLatent
, ruleYearLatent2
, ruleYyyymmdd
]
| rfranek/duckling | Duckling/Time/RO/Rules.hs | bsd-3-clause | 35,577 | 0 | 21 | 9,250 | 10,309 | 5,627 | 4,682 | 1,148 | 2 |
module Data.XCB.Utils where
-- random utility functions
import Data.Char
import Control.Applicative
ensureUpper :: String -> String
ensureUpper [] = []
ensureUpper (x:xs) = (toUpper x) : xs
-- |Like mapMaybe, but for any Alternative.
-- Never returns 'empty', instead returns 'pure []'
mapAlt :: Alternative f => (a -> f b) -> [a] -> f [b]
mapAlt f xs = go xs
where go [] = pure []
go (y:ys) = pure (:) <*> f y <*> go ys
<|> go ys
| aslatter/xcb-types | Data/XCB/Utils.hs | bsd-3-clause | 458 | 0 | 10 | 109 | 176 | 92 | 84 | 11 | 2 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE CPP, DeriveDataTypeable, ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-} -- Note [Pass sensitive types]
-- in module PlaceHolder
{-# LANGUAGE ConstraintKinds #-}
-- | Abstract Haskell syntax for expressions.
module HsExpr where
#include "HsVersions.h"
-- friends:
import HsDecls
import HsPat
import HsLit
import PlaceHolder ( PostTc,PostRn,DataId )
import HsTypes
import HsBinds
-- others:
import TcEvidence
import CoreSyn
import Var
import RdrName
import Name
import BasicTypes
import DataCon
import SrcLoc
import Util
import StaticFlags( opt_PprStyle_Debug )
import Outputable
import FastString
import Type
-- libraries:
import Data.Data hiding (Fixity)
{-
************************************************************************
* *
\subsection{Expressions proper}
* *
************************************************************************
-}
-- * Expressions proper
type LHsExpr id = Located (HsExpr id)
-- ^ May have 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnComma' when
-- in a list
-------------------------
-- | PostTcExpr is an evidence expression attached to the syntax tree by the
-- type checker (c.f. postTcType).
type PostTcExpr = HsExpr Id
-- | We use a PostTcTable where there are a bunch of pieces of evidence, more
-- than is convenient to keep individually.
type PostTcTable = [(Name, PostTcExpr)]
noPostTcExpr :: PostTcExpr
noPostTcExpr = HsLit (HsString "" (fsLit "noPostTcExpr"))
noPostTcTable :: PostTcTable
noPostTcTable = []
-------------------------
-- | SyntaxExpr is like 'PostTcExpr', but it's filled in a little earlier,
-- by the renamer. It's used for rebindable syntax.
--
-- E.g. @(>>=)@ is filled in before the renamer by the appropriate 'Name' for
-- @(>>=)@, and then instantiated by the type checker with its type args
-- etc
type SyntaxExpr id = HsExpr id
noSyntaxExpr :: SyntaxExpr id -- Before renaming, and sometimes after,
-- (if the syntax slot makes no sense)
noSyntaxExpr = HsLit (HsString "" (fsLit "noSyntaxExpr"))
type CmdSyntaxTable id = [(Name, SyntaxExpr id)]
-- See Note [CmdSyntaxTable]
{-
Note [CmdSyntaxtable]
~~~~~~~~~~~~~~~~~~~~~
Used only for arrow-syntax stuff (HsCmdTop), the CmdSyntaxTable keeps
track of the methods needed for a Cmd.
* Before the renamer, this list is an empty list
* After the renamer, it takes the form @[(std_name, HsVar actual_name)]@
For example, for the 'arr' method
* normal case: (GHC.Control.Arrow.arr, HsVar GHC.Control.Arrow.arr)
* with rebindable syntax: (GHC.Control.Arrow.arr, arr_22)
where @arr_22@ is whatever 'arr' is in scope
* After the type checker, it takes the form [(std_name, <expression>)]
where <expression> is the evidence for the method. This evidence is
instantiated with the class, but is still polymorphic in everything
else. For example, in the case of 'arr', the evidence has type
forall b c. (b->c) -> a b c
where 'a' is the ambient type of the arrow. This polymorphism is
important because the desugarer uses the same evidence at multiple
different types.
This is Less Cool than what we normally do for rebindable syntax, which is to
make fully-instantiated piece of evidence at every use site. The Cmd way
is Less Cool because
* The renamer has to predict which methods are needed.
See the tedious RnExpr.methodNamesCmd.
* The desugarer has to know the polymorphic type of the instantiated
method. This is checked by Inst.tcSyntaxName, but is less flexible
than the rest of rebindable syntax, where the type is less
pre-ordained. (And this flexibility is useful; for example we can
typecheck do-notation with (>>=) :: m1 a -> (a -> m2 b) -> m2 b.)
-}
-- | A Haskell expression.
data HsExpr id
= HsVar id -- ^ Variable
| HsIPVar HsIPName -- ^ Implicit parameter
| HsOverLit (HsOverLit id) -- ^ Overloaded literals
| HsLit HsLit -- ^ Simple (non-overloaded) literals
| HsLam (MatchGroup id (LHsExpr id)) -- ^ Lambda abstraction. Currently always a single match
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnLam',
-- 'ApiAnnotation.AnnRarrow',
| HsLamCase (PostTc id Type) (MatchGroup id (LHsExpr id)) -- ^ Lambda-case
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnLam',
-- 'ApiAnnotation.AnnCase','ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
| HsApp (LHsExpr id) (LHsExpr id) -- ^ Application
-- | Operator applications:
-- NB Bracketed ops such as (+) come out as Vars.
-- NB We need an expr for the operator in an OpApp/Section since
-- the typechecker may need to apply the operator to a few types.
| OpApp (LHsExpr id) -- left operand
(LHsExpr id) -- operator
(PostRn id Fixity) -- Renamer adds fixity; bottom until then
(LHsExpr id) -- right operand
-- | Negation operator. Contains the negated expression and the name
-- of 'negate'
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnMinus'
| NegApp (LHsExpr id)
(SyntaxExpr id)
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
-- - Note: if 'ApiAnnotation.AnnVal' is present this is actually an
-- inactive 'HsSCC'
-- - Note: if multiple 'ApiAnnotation.AnnVal' are
-- present this is actually an inactive 'HsTickPragma'
| HsPar (LHsExpr id) -- ^ Parenthesised expr; see Note [Parens in HsSyn]
| SectionL (LHsExpr id) -- operand; see Note [Sections in HsSyn]
(LHsExpr id) -- operator
| SectionR (LHsExpr id) -- operator; see Note [Sections in HsSyn]
(LHsExpr id) -- operand
-- | Used for explicit tuples and sections thereof
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
| ExplicitTuple
[LHsTupArg id]
Boxity
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnCase',
-- 'ApiAnnotation.AnnOf','ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
| HsCase (LHsExpr id)
(MatchGroup id (LHsExpr id))
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnIf',
-- 'ApiAnnotation.AnnSemi',
-- 'ApiAnnotation.AnnThen','ApiAnnotation.AnnSemi2',
-- 'ApiAnnotation.AnnElse',
| HsIf (Maybe (SyntaxExpr id)) -- cond function
-- Nothing => use the built-in 'if'
-- See Note [Rebindable if]
(LHsExpr id) -- predicate
(LHsExpr id) -- then part
(LHsExpr id) -- else part
-- | Multi-way if
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnIf'
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose',
| HsMultiIf (PostTc id Type) [LGRHS id (LHsExpr id)]
-- | let(rec)
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnLet',
-- 'ApiAnnotation.AnnIn','ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
| HsLet (HsLocalBinds id)
(LHsExpr id)
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnDo',
-- 'ApiAnnotation.AnnOpen', 'ApiAnnotation.AnnSemi',
-- 'ApiAnnotation.AnnVbar',
-- 'ApiAnnotation.AnnClose'
| HsDo (HsStmtContext Name) -- The parameterisation is unimportant
-- because in this context we never use
-- the PatGuard or ParStmt variant
[ExprLStmt id] -- "do":one or more stmts
(PostTc id Type) -- Type of the whole expression
-- | Syntactic list: [a,b,c,...]
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
| ExplicitList
(PostTc id Type) -- Gives type of components of list
(Maybe (SyntaxExpr id)) -- For OverloadedLists, the fromListN witness
[LHsExpr id]
-- | Syntactic parallel array: [:e1, ..., en:]
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnDotdot','ApiAnnotation.AnnComma',
-- 'ApiAnnotation.AnnVbar'
-- 'ApiAnnotation.AnnClose'
| ExplicitPArr
(PostTc id Type) -- type of elements of the parallel array
[LHsExpr id]
-- | Record construction
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnDotdot','ApiAnnotation.AnnClose'
| RecordCon (Located id) -- The constructor. After type checking
-- it's the dataConWrapId of the constructor
PostTcExpr -- Data con Id applied to type args
(HsRecordBinds id)
-- | Record update
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnDotdot','ApiAnnotation.AnnClose'
| RecordUpd (LHsExpr id)
(HsRecordBinds id)
-- (HsMatchGroup Id) -- Filled in by the type checker to be
-- -- a match that does the job
[DataCon] -- Filled in by the type checker to the
-- _non-empty_ list of DataCons that have
-- all the upd'd fields
[PostTc id Type] -- Argument types of *input* record type
[PostTc id Type] -- and *output* record type
-- For a type family, the arg types are of the *instance* tycon,
-- not the family tycon
-- | Expression with an explicit type signature. @e :: type@
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnDcolon'
| ExprWithTySig
(LHsExpr id)
(LHsType id)
(PostRn id [Name]) -- After renaming, the list of Names
-- contains the named and unnamed
-- wildcards brought in scope by the
-- signature
| ExprWithTySigOut -- TRANSLATION
(LHsExpr id)
(LHsType Name) -- Retain the signature for
-- round-tripping purposes
-- | Arithmetic sequence
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnDotdot','ApiAnnotation.AnnComma',
-- 'ApiAnnotation.AnnClose'
| ArithSeq
PostTcExpr
(Maybe (SyntaxExpr id)) -- For OverloadedLists, the fromList witness
(ArithSeqInfo id)
-- | Arithmetic sequence for parallel array
| PArrSeq
PostTcExpr -- [:e1..e2:] or [:e1, e2..e3:]
(ArithSeqInfo id)
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnVal', 'ApiAnnotation.AnnClose'
| HsSCC FastString -- "set cost centre" SCC pragma
(LHsExpr id) -- expr whose cost is to be measured
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnVal', 'ApiAnnotation.AnnClose'
| HsCoreAnn FastString -- hdaume: core annotation
(LHsExpr id)
-----------------------------------------------------------
-- MetaHaskell Extensions
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose',
-- 'ApiAnnotation.AnnClose'
| HsBracket (HsBracket id)
-- See Note [Pending Splices]
| HsRnBracketOut
(HsBracket Name) -- Output of the renamer is the *original* renamed
-- expression, plus
[PendingRnSplice] -- _renamed_ splices to be type checked
| HsTcBracketOut
(HsBracket Name) -- Output of the type checker is the *original*
-- renamed expression, plus
[PendingTcSplice] -- _typechecked_ splices to be
-- pasted back in by the desugarer
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
| HsSpliceE Bool -- True <=> typed splice
(HsSplice id) -- False <=> untyped
| HsQuasiQuoteE (HsQuasiQuote id)
-- See Note [Quasi-quote overview] in TcSplice
-----------------------------------------------------------
-- Arrow notation extension
-- | @proc@ notation for Arrows
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnProc',
-- 'ApiAnnotation.AnnRarrow'
| HsProc (LPat id) -- arrow abstraction, proc
(LHsCmdTop id) -- body of the abstraction
-- always has an empty stack
---------------------------------------
-- static pointers extension
| HsStatic (LHsExpr id)
---------------------------------------
-- The following are commands, not expressions proper
-- They are only used in the parsing stage and are removed
-- immediately in parser.RdrHsSyn.checkCommand
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.Annlarrowtail',
-- 'ApiAnnotation.Annrarrowtail','ApiAnnotation.AnnLarrowtail',
-- 'ApiAnnotation.AnnRarrowtail'
| HsArrApp -- Arrow tail, or arrow application (f -< arg)
(LHsExpr id) -- arrow expression, f
(LHsExpr id) -- input expression, arg
(PostTc id Type) -- type of the arrow expressions f,
-- of the form a t t', where arg :: t
HsArrAppType -- higher-order (-<<) or first-order (-<)
Bool -- True => right-to-left (f -< arg)
-- False => left-to-right (arg >- f)
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose'
| HsArrForm -- Command formation, (| e cmd1 .. cmdn |)
(LHsExpr id) -- the operator
-- after type-checking, a type abstraction to be
-- applied to the type of the local environment tuple
(Maybe Fixity) -- fixity (filled in by the renamer), for forms that
-- were converted from OpApp's by the renamer
[LHsCmdTop id] -- argument commands
---------------------------------------
-- Haskell program coverage (Hpc) Support
| HsTick
(Tickish id)
(LHsExpr id) -- sub-expression
| HsBinTick
Int -- module-local tick number for True
Int -- module-local tick number for False
(LHsExpr id) -- sub-expression
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnVal','ApiAnnotation.AnnVal2',
-- 'ApiAnnotation.AnnColon','ApiAnnotation.AnnVal3',
-- 'ApiAnnotation.AnnMinus',
-- 'ApiAnnotation.AnnVal4','ApiAnnotation.AnnColon2',
-- 'ApiAnnotation.AnnVal5',
-- 'ApiAnnotation.AnnClose'
| HsTickPragma -- A pragma introduced tick
(FastString,(Int,Int),(Int,Int)) -- external span for this tick
(LHsExpr id)
---------------------------------------
-- These constructors only appear temporarily in the parser.
-- The renamer translates them into the Right Thing.
| EWildPat -- wildcard
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnAt'
| EAsPat (Located id) -- as pattern
(LHsExpr id)
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnRarrow'
| EViewPat (LHsExpr id) -- view pattern
(LHsExpr id)
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnTilde'
| ELazyPat (LHsExpr id) -- ~ pattern
| HsType (LHsType id) -- Explicit type argument; e.g f {| Int |} x y
---------------------------------------
-- Finally, HsWrap appears only in typechecker output
| HsWrap HsWrapper -- TRANSLATION
(HsExpr id)
| HsUnboundVar RdrName
deriving (Typeable)
deriving instance (DataId id) => Data (HsExpr id)
-- | HsTupArg is used for tuple sections
-- (,a,) is represented by ExplicitTuple [Missing ty1, Present a, Missing ty3]
-- Which in turn stands for (\x:ty1 \y:ty2. (x,a,y))
type LHsTupArg id = Located (HsTupArg id)
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnComma'
data HsTupArg id
= Present (LHsExpr id) -- ^ The argument
| Missing (PostTc id Type) -- ^ The argument is missing, but this is its type
deriving (Typeable)
deriving instance (DataId id) => Data (HsTupArg id)
tupArgPresent :: LHsTupArg id -> Bool
tupArgPresent (L _ (Present {})) = True
tupArgPresent (L _ (Missing {})) = False
{-
Note [Parens in HsSyn]
~~~~~~~~~~~~~~~~~~~~~~
HsPar (and ParPat in patterns, HsParTy in types) is used as follows
* Generally HsPar is optional; the pretty printer adds parens where
necessary. Eg (HsApp f (HsApp g x)) is fine, and prints 'f (g x)'
* HsPars are pretty printed as '( .. )' regardless of whether
or not they are strictly necssary
* HsPars are respected when rearranging operator fixities.
So a * (b + c) means what it says (where the parens are an HsPar)
Note [Sections in HsSyn]
~~~~~~~~~~~~~~~~~~~~~~~~
Sections should always appear wrapped in an HsPar, thus
HsPar (SectionR ...)
The parser parses sections in a wider variety of situations
(See Note [Parsing sections]), but the renamer checks for those
parens. This invariant makes pretty-printing easier; we don't need
a special case for adding the parens round sections.
Note [Rebindable if]
~~~~~~~~~~~~~~~~~~~~
The rebindable syntax for 'if' is a bit special, because when
rebindable syntax is *off* we do not want to treat
(if c then t else e)
as if it was an application (ifThenElse c t e). Why not?
Because we allow an 'if' to return *unboxed* results, thus
if blah then 3# else 4#
whereas that would not be possible using a all to a polymorphic function
(because you can't call a polymorphic function at an unboxed type).
So we use Nothing to mean "use the old built-in typing rule".
-}
instance OutputableBndr id => Outputable (HsExpr id) where
ppr expr = pprExpr expr
-----------------------
-- pprExpr, pprLExpr, pprBinds call pprDeeper;
-- the underscore versions do not
pprLExpr :: OutputableBndr id => LHsExpr id -> SDoc
pprLExpr (L _ e) = pprExpr e
pprExpr :: OutputableBndr id => HsExpr id -> SDoc
pprExpr e | isAtomicHsExpr e || isQuietHsExpr e = ppr_expr e
| otherwise = pprDeeper (ppr_expr e)
isQuietHsExpr :: HsExpr id -> Bool
-- Parentheses do display something, but it gives little info and
-- if we go deeper when we go inside them then we get ugly things
-- like (...)
isQuietHsExpr (HsPar _) = True
-- applications don't display anything themselves
isQuietHsExpr (HsApp _ _) = True
isQuietHsExpr (OpApp _ _ _ _) = True
isQuietHsExpr _ = False
pprBinds :: (OutputableBndr idL, OutputableBndr idR)
=> HsLocalBindsLR idL idR -> SDoc
pprBinds b = pprDeeper (ppr b)
-----------------------
ppr_lexpr :: OutputableBndr id => LHsExpr id -> SDoc
ppr_lexpr e = ppr_expr (unLoc e)
ppr_expr :: forall id. OutputableBndr id => HsExpr id -> SDoc
ppr_expr (HsVar v) = pprPrefixOcc v
ppr_expr (HsIPVar v) = ppr v
ppr_expr (HsLit lit) = ppr lit
ppr_expr (HsOverLit lit) = ppr lit
ppr_expr (HsPar e) = parens (ppr_lexpr e)
ppr_expr (HsCoreAnn s e)
= vcat [ptext (sLit "HsCoreAnn") <+> ftext s, ppr_lexpr e]
ppr_expr (HsApp e1 e2)
= let (fun, args) = collect_args e1 [e2] in
hang (ppr_lexpr fun) 2 (sep (map pprParendExpr args))
where
collect_args (L _ (HsApp fun arg)) args = collect_args fun (arg:args)
collect_args fun args = (fun, args)
ppr_expr (OpApp e1 op _ e2)
= case unLoc op of
HsVar v -> pp_infixly v
_ -> pp_prefixly
where
pp_e1 = pprDebugParendExpr e1 -- In debug mode, add parens
pp_e2 = pprDebugParendExpr e2 -- to make precedence clear
pp_prefixly
= hang (ppr op) 2 (sep [pp_e1, pp_e2])
pp_infixly v
= sep [pp_e1, sep [pprInfixOcc v, nest 2 pp_e2]]
ppr_expr (NegApp e _) = char '-' <+> pprDebugParendExpr e
ppr_expr (SectionL expr op)
= case unLoc op of
HsVar v -> pp_infixly v
_ -> pp_prefixly
where
pp_expr = pprDebugParendExpr expr
pp_prefixly = hang (hsep [text " \\ x_ ->", ppr op])
4 (hsep [pp_expr, ptext (sLit "x_ )")])
pp_infixly v = (sep [pp_expr, pprInfixOcc v])
ppr_expr (SectionR op expr)
= case unLoc op of
HsVar v -> pp_infixly v
_ -> pp_prefixly
where
pp_expr = pprDebugParendExpr expr
pp_prefixly = hang (hsep [text "( \\ x_ ->", ppr op, ptext (sLit "x_")])
4 (pp_expr <> rparen)
pp_infixly v = sep [pprInfixOcc v, pp_expr]
ppr_expr (ExplicitTuple exprs boxity)
= tupleParens (boxityNormalTupleSort boxity)
(fcat (ppr_tup_args $ map unLoc exprs))
where
ppr_tup_args [] = []
ppr_tup_args (Present e : es) = (ppr_lexpr e <> punc es) : ppr_tup_args es
ppr_tup_args (Missing _ : es) = punc es : ppr_tup_args es
punc (Present {} : _) = comma <> space
punc (Missing {} : _) = comma
punc [] = empty
--avoid using PatternSignatures for stage1 code portability
ppr_expr (HsLam matches)
= pprMatches (LambdaExpr :: HsMatchContext id) matches
ppr_expr (HsLamCase _ matches)
= sep [ sep [ptext (sLit "\\case {")],
nest 2 (pprMatches (CaseAlt :: HsMatchContext id) matches <+> char '}') ]
ppr_expr (HsCase expr matches)
= sep [ sep [ptext (sLit "case"), nest 4 (ppr expr), ptext (sLit "of {")],
nest 2 (pprMatches (CaseAlt :: HsMatchContext id) matches <+> char '}') ]
ppr_expr (HsIf _ e1 e2 e3)
= sep [hsep [ptext (sLit "if"), nest 2 (ppr e1), ptext (sLit "then")],
nest 4 (ppr e2),
ptext (sLit "else"),
nest 4 (ppr e3)]
ppr_expr (HsMultiIf _ alts)
= sep $ ptext (sLit "if") : map ppr_alt alts
where ppr_alt (L _ (GRHS guards expr)) =
sep [ char '|' <+> interpp'SP guards
, ptext (sLit "->") <+> pprDeeper (ppr expr) ]
-- special case: let ... in let ...
ppr_expr (HsLet binds expr@(L _ (HsLet _ _)))
= sep [hang (ptext (sLit "let")) 2 (hsep [pprBinds binds, ptext (sLit "in")]),
ppr_lexpr expr]
ppr_expr (HsLet binds expr)
= sep [hang (ptext (sLit "let")) 2 (pprBinds binds),
hang (ptext (sLit "in")) 2 (ppr expr)]
ppr_expr (HsDo do_or_list_comp stmts _) = pprDo do_or_list_comp stmts
ppr_expr (ExplicitList _ _ exprs)
= brackets (pprDeeperList fsep (punctuate comma (map ppr_lexpr exprs)))
ppr_expr (ExplicitPArr _ exprs)
= paBrackets (pprDeeperList fsep (punctuate comma (map ppr_lexpr exprs)))
ppr_expr (RecordCon con_id _ rbinds)
= hang (ppr con_id) 2 (ppr rbinds)
ppr_expr (RecordUpd aexp rbinds _ _ _)
= hang (pprParendExpr aexp) 2 (ppr rbinds)
ppr_expr (ExprWithTySig expr sig _)
= hang (nest 2 (ppr_lexpr expr) <+> dcolon)
4 (ppr sig)
ppr_expr (ExprWithTySigOut expr sig)
= hang (nest 2 (ppr_lexpr expr) <+> dcolon)
4 (ppr sig)
ppr_expr (ArithSeq _ _ info) = brackets (ppr info)
ppr_expr (PArrSeq _ info) = paBrackets (ppr info)
ppr_expr EWildPat = char '_'
ppr_expr (ELazyPat e) = char '~' <> pprParendExpr e
ppr_expr (EAsPat v e) = ppr v <> char '@' <> pprParendExpr e
ppr_expr (EViewPat p e) = ppr p <+> ptext (sLit "->") <+> ppr e
ppr_expr (HsSCC lbl expr)
= sep [ ptext (sLit "{-# SCC") <+> doubleQuotes (ftext lbl) <+> ptext (sLit "#-}"),
pprParendExpr expr ]
ppr_expr (HsWrap co_fn e) = pprHsWrapper (pprExpr e) co_fn
ppr_expr (HsType id) = ppr id
ppr_expr (HsSpliceE t s) = pprSplice t s
ppr_expr (HsBracket b) = pprHsBracket b
ppr_expr (HsRnBracketOut e []) = ppr e
ppr_expr (HsRnBracketOut e ps) = ppr e $$ ptext (sLit "pending(rn)") <+> ppr ps
ppr_expr (HsTcBracketOut e []) = ppr e
ppr_expr (HsTcBracketOut e ps) = ppr e $$ ptext (sLit "pending(tc)") <+> ppr ps
ppr_expr (HsQuasiQuoteE qq) = ppr qq
ppr_expr (HsProc pat (L _ (HsCmdTop cmd _ _ _)))
= hsep [ptext (sLit "proc"), ppr pat, ptext (sLit "->"), ppr cmd]
ppr_expr (HsStatic e)
= hsep [ptext (sLit "static"), pprParendExpr e]
ppr_expr (HsTick tickish exp)
= pprTicks (ppr exp) $
ppr tickish <+> ppr exp
ppr_expr (HsBinTick tickIdTrue tickIdFalse exp)
= pprTicks (ppr exp) $
hcat [ptext (sLit "bintick<"),
ppr tickIdTrue,
ptext (sLit ","),
ppr tickIdFalse,
ptext (sLit ">("),
ppr exp,ptext (sLit ")")]
ppr_expr (HsTickPragma externalSrcLoc exp)
= pprTicks (ppr exp) $
hcat [ptext (sLit "tickpragma<"),
ppr externalSrcLoc,
ptext (sLit ">("),
ppr exp,
ptext (sLit ")")]
ppr_expr (HsArrApp arrow arg _ HsFirstOrderApp True)
= hsep [ppr_lexpr arrow, larrowt, ppr_lexpr arg]
ppr_expr (HsArrApp arrow arg _ HsFirstOrderApp False)
= hsep [ppr_lexpr arg, arrowt, ppr_lexpr arrow]
ppr_expr (HsArrApp arrow arg _ HsHigherOrderApp True)
= hsep [ppr_lexpr arrow, larrowtt, ppr_lexpr arg]
ppr_expr (HsArrApp arrow arg _ HsHigherOrderApp False)
= hsep [ppr_lexpr arg, arrowtt, ppr_lexpr arrow]
ppr_expr (HsArrForm (L _ (HsVar v)) (Just _) [arg1, arg2])
= sep [pprCmdArg (unLoc arg1), hsep [pprInfixOcc v, pprCmdArg (unLoc arg2)]]
ppr_expr (HsArrForm op _ args)
= hang (ptext (sLit "(|") <+> ppr_lexpr op)
4 (sep (map (pprCmdArg.unLoc) args) <+> ptext (sLit "|)"))
ppr_expr (HsUnboundVar nm)
= ppr nm
{-
HsSyn records exactly where the user put parens, with HsPar.
So generally speaking we print without adding any parens.
However, some code is internally generated, and in some places
parens are absolutely required; so for these places we use
pprParendExpr (but don't print double parens of course).
For operator applications we don't add parens, because the oprerator
fixities should do the job, except in debug mode (-dppr-debug) so we
can see the structure of the parse tree.
-}
pprDebugParendExpr :: OutputableBndr id => LHsExpr id -> SDoc
pprDebugParendExpr expr
= getPprStyle (\sty ->
if debugStyle sty then pprParendExpr expr
else pprLExpr expr)
pprParendExpr :: OutputableBndr id => LHsExpr id -> SDoc
pprParendExpr expr
| hsExprNeedsParens (unLoc expr) = parens (pprLExpr expr)
| otherwise = pprLExpr expr
-- Using pprLExpr makes sure that we go 'deeper'
-- I think that is usually (always?) right
hsExprNeedsParens :: HsExpr id -> Bool
-- True of expressions for which '(e)' and 'e'
-- mean the same thing
hsExprNeedsParens (ArithSeq {}) = False
hsExprNeedsParens (PArrSeq {}) = False
hsExprNeedsParens (HsLit {}) = False
hsExprNeedsParens (HsOverLit {}) = False
hsExprNeedsParens (HsVar {}) = False
hsExprNeedsParens (HsUnboundVar {}) = False
hsExprNeedsParens (HsIPVar {}) = False
hsExprNeedsParens (ExplicitTuple {}) = False
hsExprNeedsParens (ExplicitList {}) = False
hsExprNeedsParens (ExplicitPArr {}) = False
hsExprNeedsParens (HsPar {}) = False
hsExprNeedsParens (HsBracket {}) = False
hsExprNeedsParens (HsRnBracketOut {}) = False
hsExprNeedsParens (HsTcBracketOut {}) = False
hsExprNeedsParens (HsDo sc _ _)
| isListCompExpr sc = False
hsExprNeedsParens _ = True
isAtomicHsExpr :: HsExpr id -> Bool
-- True of a single token
isAtomicHsExpr (HsVar {}) = True
isAtomicHsExpr (HsLit {}) = True
isAtomicHsExpr (HsOverLit {}) = True
isAtomicHsExpr (HsIPVar {}) = True
isAtomicHsExpr (HsUnboundVar {}) = True
isAtomicHsExpr (HsWrap _ e) = isAtomicHsExpr e
isAtomicHsExpr (HsPar e) = isAtomicHsExpr (unLoc e)
isAtomicHsExpr _ = False
{-
************************************************************************
* *
\subsection{Commands (in arrow abstractions)}
* *
************************************************************************
We re-use HsExpr to represent these.
-}
type LHsCmd id = Located (HsCmd id)
data HsCmd id
= HsCmdArrApp -- Arrow tail, or arrow application (f -< arg)
(LHsExpr id) -- arrow expression, f
(LHsExpr id) -- input expression, arg
(PostTc id Type) -- type of the arrow expressions f,
-- of the form a t t', where arg :: t
HsArrAppType -- higher-order (-<<) or first-order (-<)
Bool -- True => right-to-left (f -< arg)
-- False => left-to-right (arg >- f)
| HsCmdArrForm -- Command formation, (| e cmd1 .. cmdn |)
(LHsExpr id) -- the operator
-- after type-checking, a type abstraction to be
-- applied to the type of the local environment tuple
(Maybe Fixity) -- fixity (filled in by the renamer), for forms that
-- were converted from OpApp's by the renamer
[LHsCmdTop id] -- argument commands
| HsCmdApp (LHsCmd id)
(LHsExpr id)
| HsCmdLam (MatchGroup id (LHsCmd id)) -- kappa
| HsCmdPar (LHsCmd id) -- parenthesised command
| HsCmdCase (LHsExpr id)
(MatchGroup id (LHsCmd id)) -- bodies are HsCmd's
| HsCmdIf (Maybe (SyntaxExpr id)) -- cond function
(LHsExpr id) -- predicate
(LHsCmd id) -- then part
(LHsCmd id) -- else part
| HsCmdLet (HsLocalBinds id) -- let(rec)
(LHsCmd id)
| HsCmdDo [CmdLStmt id]
(PostTc id Type) -- Type of the whole expression
| HsCmdCast TcCoercion -- A simpler version of HsWrap in HsExpr
(HsCmd id) -- If cmd :: arg1 --> res
-- co :: arg1 ~ arg2
-- Then (HsCmdCast co cmd) :: arg2 --> res
deriving (Typeable)
deriving instance (DataId id) => Data (HsCmd id)
data HsArrAppType = HsHigherOrderApp | HsFirstOrderApp
deriving (Data, Typeable)
{-
Top-level command, introducing a new arrow.
This may occur inside a proc (where the stack is empty) or as an
argument of a command-forming operator.
-}
type LHsCmdTop id = Located (HsCmdTop id)
data HsCmdTop id
= HsCmdTop (LHsCmd id)
(PostTc id Type) -- Nested tuple of inputs on the command's stack
(PostTc id Type) -- return type of the command
(CmdSyntaxTable id) -- See Note [CmdSyntaxTable]
deriving (Typeable)
deriving instance (DataId id) => Data (HsCmdTop id)
instance OutputableBndr id => Outputable (HsCmd id) where
ppr cmd = pprCmd cmd
-----------------------
-- pprCmd and pprLCmd call pprDeeper;
-- the underscore versions do not
pprLCmd :: OutputableBndr id => LHsCmd id -> SDoc
pprLCmd (L _ c) = pprCmd c
pprCmd :: OutputableBndr id => HsCmd id -> SDoc
pprCmd c | isQuietHsCmd c = ppr_cmd c
| otherwise = pprDeeper (ppr_cmd c)
isQuietHsCmd :: HsCmd id -> Bool
-- Parentheses do display something, but it gives little info and
-- if we go deeper when we go inside them then we get ugly things
-- like (...)
isQuietHsCmd (HsCmdPar _) = True
-- applications don't display anything themselves
isQuietHsCmd (HsCmdApp _ _) = True
isQuietHsCmd _ = False
-----------------------
ppr_lcmd :: OutputableBndr id => LHsCmd id -> SDoc
ppr_lcmd c = ppr_cmd (unLoc c)
ppr_cmd :: forall id. OutputableBndr id => HsCmd id -> SDoc
ppr_cmd (HsCmdPar c) = parens (ppr_lcmd c)
ppr_cmd (HsCmdApp c e)
= let (fun, args) = collect_args c [e] in
hang (ppr_lcmd fun) 2 (sep (map pprParendExpr args))
where
collect_args (L _ (HsCmdApp fun arg)) args = collect_args fun (arg:args)
collect_args fun args = (fun, args)
--avoid using PatternSignatures for stage1 code portability
ppr_cmd (HsCmdLam matches)
= pprMatches (LambdaExpr :: HsMatchContext id) matches
ppr_cmd (HsCmdCase expr matches)
= sep [ sep [ptext (sLit "case"), nest 4 (ppr expr), ptext (sLit "of {")],
nest 2 (pprMatches (CaseAlt :: HsMatchContext id) matches <+> char '}') ]
ppr_cmd (HsCmdIf _ e ct ce)
= sep [hsep [ptext (sLit "if"), nest 2 (ppr e), ptext (sLit "then")],
nest 4 (ppr ct),
ptext (sLit "else"),
nest 4 (ppr ce)]
-- special case: let ... in let ...
ppr_cmd (HsCmdLet binds cmd@(L _ (HsCmdLet _ _)))
= sep [hang (ptext (sLit "let")) 2 (hsep [pprBinds binds, ptext (sLit "in")]),
ppr_lcmd cmd]
ppr_cmd (HsCmdLet binds cmd)
= sep [hang (ptext (sLit "let")) 2 (pprBinds binds),
hang (ptext (sLit "in")) 2 (ppr cmd)]
ppr_cmd (HsCmdDo stmts _) = pprDo ArrowExpr stmts
ppr_cmd (HsCmdCast co cmd) = sep [ ppr_cmd cmd
, ptext (sLit "|>") <+> ppr co ]
ppr_cmd (HsCmdArrApp arrow arg _ HsFirstOrderApp True)
= hsep [ppr_lexpr arrow, larrowt, ppr_lexpr arg]
ppr_cmd (HsCmdArrApp arrow arg _ HsFirstOrderApp False)
= hsep [ppr_lexpr arg, arrowt, ppr_lexpr arrow]
ppr_cmd (HsCmdArrApp arrow arg _ HsHigherOrderApp True)
= hsep [ppr_lexpr arrow, larrowtt, ppr_lexpr arg]
ppr_cmd (HsCmdArrApp arrow arg _ HsHigherOrderApp False)
= hsep [ppr_lexpr arg, arrowtt, ppr_lexpr arrow]
ppr_cmd (HsCmdArrForm (L _ (HsVar v)) (Just _) [arg1, arg2])
= sep [pprCmdArg (unLoc arg1), hsep [pprInfixOcc v, pprCmdArg (unLoc arg2)]]
ppr_cmd (HsCmdArrForm op _ args)
= hang (ptext (sLit "(|") <> ppr_lexpr op)
4 (sep (map (pprCmdArg.unLoc) args) <> ptext (sLit "|)"))
pprCmdArg :: OutputableBndr id => HsCmdTop id -> SDoc
pprCmdArg (HsCmdTop cmd@(L _ (HsCmdArrForm _ Nothing [])) _ _ _)
= ppr_lcmd cmd
pprCmdArg (HsCmdTop cmd _ _ _)
= parens (ppr_lcmd cmd)
instance OutputableBndr id => Outputable (HsCmdTop id) where
ppr = pprCmdArg
{-
************************************************************************
* *
\subsection{Record binds}
* *
************************************************************************
-}
type HsRecordBinds id = HsRecFields id (LHsExpr id)
{-
************************************************************************
* *
\subsection{@Match@, @GRHSs@, and @GRHS@ datatypes}
* *
************************************************************************
@Match@es are sets of pattern bindings and right hand sides for
functions, patterns or case branches. For example, if a function @g@
is defined as:
\begin{verbatim}
g (x,y) = y
g ((x:ys),y) = y+1,
\end{verbatim}
then \tr{g} has two @Match@es: @(x,y) = y@ and @((x:ys),y) = y+1@.
It is always the case that each element of an @[Match]@ list has the
same number of @pats@s inside it. This corresponds to saying that
a function defined by pattern matching must have the same number of
patterns in each equation.
-}
data MatchGroup id body
= MG { mg_alts :: [LMatch id body] -- The alternatives
, mg_arg_tys :: [PostTc id Type] -- Types of the arguments, t1..tn
, mg_res_ty :: PostTc id Type -- Type of the result, tr
, mg_origin :: Origin }
-- The type is the type of the entire group
-- t1 -> ... -> tn -> tr
-- where there are n patterns
deriving (Typeable)
deriving instance (Data body,DataId id) => Data (MatchGroup id body)
type LMatch id body = Located (Match id body)
-- ^ May have 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnSemi' when in a
-- list
data Match id body
= Match
[LPat id] -- The patterns
(Maybe (LHsType id)) -- A type signature for the result of the match
-- Nothing after typechecking
(GRHSs id body)
deriving (Typeable)
deriving instance (Data body,DataId id) => Data (Match id body)
isEmptyMatchGroup :: MatchGroup id body -> Bool
isEmptyMatchGroup (MG { mg_alts = ms }) = null ms
matchGroupArity :: MatchGroup id body -> Arity
-- Precondition: MatchGroup is non-empty
-- This is called before type checking, when mg_arg_tys is not set
matchGroupArity (MG { mg_alts = alts })
| (alt1:_) <- alts = length (hsLMatchPats alt1)
| otherwise = panic "matchGroupArity"
hsLMatchPats :: LMatch id body -> [LPat id]
hsLMatchPats (L _ (Match pats _ _)) = pats
-- | GRHSs are used both for pattern bindings and for Matches
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnVbar',
-- 'ApiAnnotation.AnnEqual','ApiAnnotation.AnnWhere',
-- 'ApiAnnotation.AnnOpen','ApiAnnotation.AnnClose'
-- 'ApiAnnotation.AnnRarrow','ApiAnnotation.AnnSemi'
data GRHSs id body
= GRHSs {
grhssGRHSs :: [LGRHS id body], -- ^ Guarded RHSs
grhssLocalBinds :: (HsLocalBinds id) -- ^ The where clause
} deriving (Typeable)
deriving instance (Data body,DataId id) => Data (GRHSs id body)
type LGRHS id body = Located (GRHS id body)
-- | Guarded Right Hand Side.
data GRHS id body = GRHS [GuardLStmt id] -- Guards
body -- Right hand side
deriving (Typeable)
deriving instance (Data body,DataId id) => Data (GRHS id body)
-- We know the list must have at least one @Match@ in it.
pprMatches :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> HsMatchContext idL -> MatchGroup idR body -> SDoc
pprMatches ctxt (MG { mg_alts = matches })
= vcat (map (pprMatch ctxt) (map unLoc matches))
-- Don't print the type; it's only a place-holder before typechecking
-- Exported to HsBinds, which can't see the defn of HsMatchContext
pprFunBind :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> idL -> Bool -> MatchGroup idR body -> SDoc
pprFunBind fun inf matches = pprMatches (FunRhs fun inf) matches
-- Exported to HsBinds, which can't see the defn of HsMatchContext
pprPatBind :: forall bndr id body. (OutputableBndr bndr, OutputableBndr id, Outputable body)
=> LPat bndr -> GRHSs id body -> SDoc
pprPatBind pat (grhss)
= sep [ppr pat, nest 2 (pprGRHSs (PatBindRhs :: HsMatchContext id) grhss)]
pprMatch :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> HsMatchContext idL -> Match idR body -> SDoc
pprMatch ctxt (Match pats maybe_ty grhss)
= sep [ sep (herald : map (nest 2 . pprParendLPat) other_pats)
, nest 2 ppr_maybe_ty
, nest 2 (pprGRHSs ctxt grhss) ]
where
(herald, other_pats)
= case ctxt of
FunRhs fun is_infix
| not is_infix -> (pprPrefixOcc fun, pats)
-- f x y z = e
-- Not pprBndr; the AbsBinds will
-- have printed the signature
| null pats2 -> (pp_infix, [])
-- x &&& y = e
| otherwise -> (parens pp_infix, pats2)
-- (x &&& y) z = e
where
pp_infix = pprParendLPat pat1 <+> pprInfixOcc fun <+> pprParendLPat pat2
LambdaExpr -> (char '\\', pats)
_ -> ASSERT( null pats1 )
(ppr pat1, []) -- No parens around the single pat
(pat1:pats1) = pats
(pat2:pats2) = pats1
ppr_maybe_ty = case maybe_ty of
Just ty -> dcolon <+> ppr ty
Nothing -> empty
pprGRHSs :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> HsMatchContext idL -> GRHSs idR body -> SDoc
pprGRHSs ctxt (GRHSs grhss binds)
= vcat (map (pprGRHS ctxt . unLoc) grhss)
$$ ppUnless (isEmptyLocalBinds binds)
(text "where" $$ nest 4 (pprBinds binds))
pprGRHS :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> HsMatchContext idL -> GRHS idR body -> SDoc
pprGRHS ctxt (GRHS [] body)
= pp_rhs ctxt body
pprGRHS ctxt (GRHS guards body)
= sep [char '|' <+> interpp'SP guards, pp_rhs ctxt body]
pp_rhs :: Outputable body => HsMatchContext idL -> body -> SDoc
pp_rhs ctxt rhs = matchSeparator ctxt <+> pprDeeper (ppr rhs)
{-
************************************************************************
* *
\subsection{Do stmts and list comprehensions}
* *
************************************************************************
-}
type LStmt id body = Located (StmtLR id id body)
type LStmtLR idL idR body = Located (StmtLR idL idR body)
type Stmt id body = StmtLR id id body
type CmdLStmt id = LStmt id (LHsCmd id)
type CmdStmt id = Stmt id (LHsCmd id)
type ExprLStmt id = LStmt id (LHsExpr id)
type ExprStmt id = Stmt id (LHsExpr id)
type GuardLStmt id = LStmt id (LHsExpr id)
type GuardStmt id = Stmt id (LHsExpr id)
type GhciLStmt id = LStmt id (LHsExpr id)
type GhciStmt id = Stmt id (LHsExpr id)
-- The SyntaxExprs in here are used *only* for do-notation and monad
-- comprehensions, which have rebindable syntax. Otherwise they are unused.
-- | API Annotations when in qualifier lists or guards
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnVbar',
-- 'ApiAnnotation.AnnComma','ApiAnnotation.AnnThen',
-- 'ApiAnnotation.AnnBy','ApiAnnotation.AnnBy',
-- 'ApiAnnotation.AnnGroup','ApiAnnotation.AnnUsing'
data StmtLR idL idR body -- body should always be (LHs**** idR)
= LastStmt -- Always the last Stmt in ListComp, MonadComp, PArrComp,
-- and (after the renamer) DoExpr, MDoExpr
-- Not used for GhciStmtCtxt, PatGuard, which scope over other stuff
body
(SyntaxExpr idR) -- The return operator, used only for MonadComp
-- For ListComp, PArrComp, we use the baked-in 'return'
-- For DoExpr, MDoExpr, we don't appply a 'return' at all
-- See Note [Monad Comprehensions]
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnLarrow'
| BindStmt (LPat idL)
body
(SyntaxExpr idR) -- The (>>=) operator; see Note [The type of bind]
(SyntaxExpr idR) -- The fail operator
-- The fail operator is noSyntaxExpr
-- if the pattern match can't fail
| BodyStmt body -- See Note [BodyStmt]
(SyntaxExpr idR) -- The (>>) operator
(SyntaxExpr idR) -- The `guard` operator; used only in MonadComp
-- See notes [Monad Comprehensions]
(PostTc idR Type) -- Element type of the RHS (used for arrows)
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnLet'
| LetStmt (HsLocalBindsLR idL idR)
-- ParStmts only occur in a list/monad comprehension
| ParStmt [ParStmtBlock idL idR]
(SyntaxExpr idR) -- Polymorphic `mzip` for monad comprehensions
(SyntaxExpr idR) -- The `>>=` operator
-- See notes [Monad Comprehensions]
-- After renaming, the ids are the binders
-- bound by the stmts and used after themp
| TransStmt {
trS_form :: TransForm,
trS_stmts :: [ExprLStmt idL], -- Stmts to the *left* of the 'group'
-- which generates the tuples to be grouped
trS_bndrs :: [(idR, idR)], -- See Note [TransStmt binder map]
trS_using :: LHsExpr idR,
trS_by :: Maybe (LHsExpr idR), -- "by e" (optional)
-- Invariant: if trS_form = GroupBy, then grp_by = Just e
trS_ret :: SyntaxExpr idR, -- The monomorphic 'return' function for
-- the inner monad comprehensions
trS_bind :: SyntaxExpr idR, -- The '(>>=)' operator
trS_fmap :: SyntaxExpr idR -- The polymorphic 'fmap' function for desugaring
-- Only for 'group' forms
} -- See Note [Monad Comprehensions]
-- Recursive statement (see Note [How RecStmt works] below)
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnRec'
| RecStmt
{ recS_stmts :: [LStmtLR idL idR body]
-- The next two fields are only valid after renaming
, recS_later_ids :: [idR] -- The ids are a subset of the variables bound by the
-- stmts that are used in stmts that follow the RecStmt
, recS_rec_ids :: [idR] -- Ditto, but these variables are the "recursive" ones,
-- that are used before they are bound in the stmts of
-- the RecStmt.
-- An Id can be in both groups
-- Both sets of Ids are (now) treated monomorphically
-- See Note [How RecStmt works] for why they are separate
-- Rebindable syntax
, recS_bind_fn :: SyntaxExpr idR -- The bind function
, recS_ret_fn :: SyntaxExpr idR -- The return function
, recS_mfix_fn :: SyntaxExpr idR -- The mfix function
-- These fields are only valid after typechecking
, recS_later_rets :: [PostTcExpr] -- (only used in the arrow version)
, recS_rec_rets :: [PostTcExpr] -- These expressions correspond 1-to-1
-- with recS_later_ids and recS_rec_ids,
-- and are the expressions that should be
-- returned by the recursion.
-- They may not quite be the Ids themselves,
-- because the Id may be *polymorphic*, but
-- the returned thing has to be *monomorphic*,
-- so they may be type applications
, recS_ret_ty :: PostTc idR Type -- The type of
-- do { stmts; return (a,b,c) }
-- With rebindable syntax the type might not
-- be quite as simple as (m (tya, tyb, tyc)).
}
deriving (Typeable)
deriving instance (Data body, DataId idL, DataId idR)
=> Data (StmtLR idL idR body)
data TransForm -- The 'f' below is the 'using' function, 'e' is the by function
= ThenForm -- then f or then f by e (depending on trS_by)
| GroupForm -- then group using f or then group by e using f (depending on trS_by)
deriving (Data, Typeable)
data ParStmtBlock idL idR
= ParStmtBlock
[ExprLStmt idL]
[idR] -- The variables to be returned
(SyntaxExpr idR) -- The return operator
deriving( Typeable )
deriving instance (DataId idL, DataId idR) => Data (ParStmtBlock idL idR)
{-
Note [The type of bind in Stmts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some Stmts, notably BindStmt, keep the (>>=) bind operator.
We do NOT assume that it has type
(>>=) :: m a -> (a -> m b) -> m b
In some cases (see Trac #303, #1537) it might have a more
exotic type, such as
(>>=) :: m i j a -> (a -> m j k b) -> m i k b
So we must be careful not to make assumptions about the type.
In particular, the monad may not be uniform throughout.
Note [TransStmt binder map]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
The [(idR,idR)] in a TransStmt behaves as follows:
* Before renaming: []
* After renaming:
[ (x27,x27), ..., (z35,z35) ]
These are the variables
bound by the stmts to the left of the 'group'
and used either in the 'by' clause,
or in the stmts following the 'group'
Each item is a pair of identical variables.
* After typechecking:
[ (x27:Int, x27:[Int]), ..., (z35:Bool, z35:[Bool]) ]
Each pair has the same unique, but different *types*.
Note [BodyStmt]
~~~~~~~~~~~~~~~
BodyStmts are a bit tricky, because what they mean
depends on the context. Consider the following contexts:
A do expression of type (m res_ty)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* BodyStmt E any_ty: do { ....; E; ... }
E :: m any_ty
Translation: E >> ...
A list comprehensions of type [elt_ty]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* BodyStmt E Bool: [ .. | .... E ]
[ .. | ..., E, ... ]
[ .. | .... | ..., E | ... ]
E :: Bool
Translation: if E then fail else ...
A guard list, guarding a RHS of type rhs_ty
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* BodyStmt E BooParStmtBlockl: f x | ..., E, ... = ...rhs...
E :: Bool
Translation: if E then fail else ...
A monad comprehension of type (m res_ty)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* BodyStmt E Bool: [ .. | .... E ]
E :: Bool
Translation: guard E >> ...
Array comprehensions are handled like list comprehensions.
Note [How RecStmt works]
~~~~~~~~~~~~~~~~~~~~~~~~
Example:
HsDo [ BindStmt x ex
, RecStmt { recS_rec_ids = [a, c]
, recS_stmts = [ BindStmt b (return (a,c))
, LetStmt a = ...b...
, BindStmt c ec ]
, recS_later_ids = [a, b]
, return (a b) ]
Here, the RecStmt binds a,b,c; but
- Only a,b are used in the stmts *following* the RecStmt,
- Only a,c are used in the stmts *inside* the RecStmt
*before* their bindings
Why do we need *both* rec_ids and later_ids? For monads they could be
combined into a single set of variables, but not for arrows. That
follows from the types of the respective feedback operators:
mfix :: MonadFix m => (a -> m a) -> m a
loop :: ArrowLoop a => a (b,d) (c,d) -> a b c
* For mfix, the 'a' covers the union of the later_ids and the rec_ids
* For 'loop', 'c' is the later_ids and 'd' is the rec_ids
Note [Typing a RecStmt]
~~~~~~~~~~~~~~~~~~~~~~~
A (RecStmt stmts) types as if you had written
(v1,..,vn, _, ..., _) <- mfix (\~(_, ..., _, r1, ..., rm) ->
do { stmts
; return (v1,..vn, r1, ..., rm) })
where v1..vn are the later_ids
r1..rm are the rec_ids
Note [Monad Comprehensions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Monad comprehensions require separate functions like 'return' and
'>>=' for desugaring. These functions are stored in the statements
used in monad comprehensions. For example, the 'return' of the 'LastStmt'
expression is used to lift the body of the monad comprehension:
[ body | stmts ]
=>
stmts >>= \bndrs -> return body
In transform and grouping statements ('then ..' and 'then group ..') the
'return' function is required for nested monad comprehensions, for example:
[ body | stmts, then f, rest ]
=>
f [ env | stmts ] >>= \bndrs -> [ body | rest ]
BodyStmts require the 'Control.Monad.guard' function for boolean
expressions:
[ body | exp, stmts ]
=>
guard exp >> [ body | stmts ]
Parallel statements require the 'Control.Monad.Zip.mzip' function:
[ body | stmts1 | stmts2 | .. ]
=>
mzip stmts1 (mzip stmts2 (..)) >>= \(bndrs1, (bndrs2, ..)) -> return body
In any other context than 'MonadComp', the fields for most of these
'SyntaxExpr's stay bottom.
-}
instance (OutputableBndr idL, OutputableBndr idR)
=> Outputable (ParStmtBlock idL idR) where
ppr (ParStmtBlock stmts _ _) = interpp'SP stmts
instance (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> Outputable (StmtLR idL idR body) where
ppr stmt = pprStmt stmt
pprStmt :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> (StmtLR idL idR body) -> SDoc
pprStmt (LastStmt expr _) = ifPprDebug (ptext (sLit "[last]")) <+> ppr expr
pprStmt (BindStmt pat expr _ _) = hsep [ppr pat, larrow, ppr expr]
pprStmt (LetStmt binds) = hsep [ptext (sLit "let"), pprBinds binds]
pprStmt (BodyStmt expr _ _ _) = ppr expr
pprStmt (ParStmt stmtss _ _) = sep (punctuate (ptext (sLit " | ")) (map ppr stmtss))
pprStmt (TransStmt { trS_stmts = stmts, trS_by = by, trS_using = using, trS_form = form })
= sep $ punctuate comma (map ppr stmts ++ [pprTransStmt by using form])
pprStmt (RecStmt { recS_stmts = segment, recS_rec_ids = rec_ids
, recS_later_ids = later_ids })
= ptext (sLit "rec") <+>
vcat [ ppr_do_stmts segment
, ifPprDebug (vcat [ ptext (sLit "rec_ids=") <> ppr rec_ids
, ptext (sLit "later_ids=") <> ppr later_ids])]
pprTransformStmt :: OutputableBndr id => [id] -> LHsExpr id -> Maybe (LHsExpr id) -> SDoc
pprTransformStmt bndrs using by
= sep [ ptext (sLit "then") <+> ifPprDebug (braces (ppr bndrs))
, nest 2 (ppr using)
, nest 2 (pprBy by)]
pprTransStmt :: Outputable body => Maybe body -> body -> TransForm -> SDoc
pprTransStmt by using ThenForm
= sep [ ptext (sLit "then"), nest 2 (ppr using), nest 2 (pprBy by)]
pprTransStmt by using GroupForm
= sep [ ptext (sLit "then group"), nest 2 (pprBy by), nest 2 (ptext (sLit "using") <+> ppr using)]
pprBy :: Outputable body => Maybe body -> SDoc
pprBy Nothing = empty
pprBy (Just e) = ptext (sLit "by") <+> ppr e
pprDo :: (OutputableBndr id, Outputable body)
=> HsStmtContext any -> [LStmt id body] -> SDoc
pprDo DoExpr stmts = ptext (sLit "do") <+> ppr_do_stmts stmts
pprDo GhciStmtCtxt stmts = ptext (sLit "do") <+> ppr_do_stmts stmts
pprDo ArrowExpr stmts = ptext (sLit "do") <+> ppr_do_stmts stmts
pprDo MDoExpr stmts = ptext (sLit "mdo") <+> ppr_do_stmts stmts
pprDo ListComp stmts = brackets $ pprComp stmts
pprDo PArrComp stmts = paBrackets $ pprComp stmts
pprDo MonadComp stmts = brackets $ pprComp stmts
pprDo _ _ = panic "pprDo" -- PatGuard, ParStmtCxt
ppr_do_stmts :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> [LStmtLR idL idR body] -> SDoc
-- Print a bunch of do stmts, with explicit braces and semicolons,
-- so that we are not vulnerable to layout bugs
ppr_do_stmts stmts
= lbrace <+> pprDeeperList vcat (punctuate semi (map ppr stmts))
<+> rbrace
pprComp :: (OutputableBndr id, Outputable body)
=> [LStmt id body] -> SDoc
pprComp quals -- Prints: body | qual1, ..., qualn
| not (null quals)
, L _ (LastStmt body _) <- last quals
= hang (ppr body <+> char '|') 2 (pprQuals (dropTail 1 quals))
| otherwise
= pprPanic "pprComp" (pprQuals quals)
pprQuals :: (OutputableBndr id, Outputable body)
=> [LStmt id body] -> SDoc
-- Show list comprehension qualifiers separated by commas
pprQuals quals = interpp'SP quals
{-
************************************************************************
* *
Template Haskell quotation brackets
* *
************************************************************************
-}
data HsSplice id
= HsSplice -- $z or $(f 4)
id -- A unique name to identify this splice point
(LHsExpr id) -- See Note [Pending Splices]
deriving (Typeable )
-- See Note [Pending Splices]
data PendingSplice id
= PendSplice Name (LHsExpr id)
deriving( Typeable )
-- It'd be convenient to re-use HsSplice, but the splice-name
-- really is a Name, never an Id. Using (PostRn id Name) is
-- nearly OK, but annoyingly we can't pretty-print it.
data PendingRnSplice
= PendingRnExpSplice (PendingSplice Name)
| PendingRnPatSplice (PendingSplice Name)
| PendingRnTypeSplice (PendingSplice Name)
| PendingRnDeclSplice (PendingSplice Name)
| PendingRnCrossStageSplice Name
deriving (Data, Typeable)
type PendingTcSplice = PendingSplice Id
deriving instance (DataId id) => Data (HsSplice id)
deriving instance (DataId id) => Data (PendingSplice id)
{-
Note [Pending Splices]
~~~~~~~~~~~~~~~~~~~~~~
When we rename an untyped bracket, we name and lift out all the nested
splices, so that when the typechecker hits the bracket, it can
typecheck those nested splices without having to walk over the untyped
bracket code. So for example
[| f $(g x) |]
looks like
HsBracket (HsApp (HsVar "f") (HsSpliceE _ (g x)))
which the renamer rewrites to
HsRnBracketOut (HsApp (HsVar f) (HsSpliceE sn (g x)))
[PendingRnExpSplice (HsSplice sn (g x))]
* The 'sn' is the Name of the splice point.
* The PendingRnExpSplice gives the splice that splice-point name maps to;
and the typechecker can now conveniently find these sub-expressions
* The other copy of the splice, in the second argument of HsSpliceE
in the renamed first arg of HsRnBracketOut
is used only for pretty printing
There are four varieties of pending splices generated by the renamer:
* Pending expression splices (PendingRnExpSplice), e.g.,
[|$(f x) + 2|]
* Pending pattern splices (PendingRnPatSplice), e.g.,
[|\ $(f x) -> x|]
* Pending type splices (PendingRnTypeSplice), e.g.,
[|f :: $(g x)|]
* Pending cross-stage splices (PendingRnCrossStageSplice), e.g.,
\x -> [| x |]
There is a fifth variety of pending splice, which is generated by the type
checker:
* Pending *typed* expression splices, (PendingTcSplice), e.g.,
[||1 + $$(f 2)||]
It would be possible to eliminate HsRnBracketOut and use HsBracketOut for the
output of the renamer. However, when pretty printing the output of the renamer,
e.g., in a type error message, we *do not* want to print out the pending
splices. In contrast, when pretty printing the output of the type checker, we
*do* want to print the pending splices. So splitting them up seems to make
sense, although I hate to add another constructor to HsExpr.
-}
instance OutputableBndr id => Outputable (HsSplice id) where
ppr (HsSplice n e) = angleBrackets (ppr n <> comma <+> ppr e)
instance OutputableBndr id => Outputable (PendingSplice id) where
ppr (PendSplice n e) = angleBrackets (ppr n <> comma <+> ppr e)
pprUntypedSplice :: OutputableBndr id => HsSplice id -> SDoc
pprUntypedSplice = pprSplice False
pprTypedSplice :: OutputableBndr id => HsSplice id -> SDoc
pprTypedSplice = pprSplice True
pprSplice :: OutputableBndr id => Bool -> HsSplice id -> SDoc
pprSplice is_typed (HsSplice n e)
= (if is_typed then ptext (sLit "$$") else char '$')
<> ifPprDebug (brackets (ppr n)) <> eDoc
where
-- We use pprLExpr to match pprParendExpr:
-- Using pprLExpr makes sure that we go 'deeper'
-- I think that is usually (always?) right
pp_as_was = pprLExpr e
eDoc = case unLoc e of
HsPar _ -> pp_as_was
HsVar _ -> pp_as_was
_ -> parens pp_as_was
data HsBracket id = ExpBr (LHsExpr id) -- [| expr |]
| PatBr (LPat id) -- [p| pat |]
| DecBrL [LHsDecl id] -- [d| decls |]; result of parser
| DecBrG (HsGroup id) -- [d| decls |]; result of renamer
| TypBr (LHsType id) -- [t| type |]
| VarBr Bool id -- True: 'x, False: ''T
-- (The Bool flag is used only in pprHsBracket)
| TExpBr (LHsExpr id) -- [|| expr ||]
deriving (Typeable)
deriving instance (DataId id) => Data (HsBracket id)
isTypedBracket :: HsBracket id -> Bool
isTypedBracket (TExpBr {}) = True
isTypedBracket _ = False
instance OutputableBndr id => Outputable (HsBracket id) where
ppr = pprHsBracket
pprHsBracket :: OutputableBndr id => HsBracket id -> SDoc
pprHsBracket (ExpBr e) = thBrackets empty (ppr e)
pprHsBracket (PatBr p) = thBrackets (char 'p') (ppr p)
pprHsBracket (DecBrG gp) = thBrackets (char 'd') (ppr gp)
pprHsBracket (DecBrL ds) = thBrackets (char 'd') (vcat (map ppr ds))
pprHsBracket (TypBr t) = thBrackets (char 't') (ppr t)
pprHsBracket (VarBr True n) = char '\'' <> ppr n
pprHsBracket (VarBr False n) = ptext (sLit "''") <> ppr n
pprHsBracket (TExpBr e) = thTyBrackets (ppr e)
thBrackets :: SDoc -> SDoc -> SDoc
thBrackets pp_kind pp_body = char '[' <> pp_kind <> char '|' <+>
pp_body <+> ptext (sLit "|]")
thTyBrackets :: SDoc -> SDoc
thTyBrackets pp_body = ptext (sLit "[||") <+> pp_body <+> ptext (sLit "||]")
instance Outputable PendingRnSplice where
ppr (PendingRnExpSplice s) = ppr s
ppr (PendingRnPatSplice s) = ppr s
ppr (PendingRnTypeSplice s) = ppr s
ppr (PendingRnDeclSplice s) = ppr s
ppr (PendingRnCrossStageSplice name) = ppr name
{-
************************************************************************
* *
\subsection{Enumerations and list comprehensions}
* *
************************************************************************
-}
data ArithSeqInfo id
= From (LHsExpr id)
| FromThen (LHsExpr id)
(LHsExpr id)
| FromTo (LHsExpr id)
(LHsExpr id)
| FromThenTo (LHsExpr id)
(LHsExpr id)
(LHsExpr id)
deriving (Typeable)
deriving instance (DataId id) => Data (ArithSeqInfo id)
instance OutputableBndr id => Outputable (ArithSeqInfo id) where
ppr (From e1) = hcat [ppr e1, pp_dotdot]
ppr (FromThen e1 e2) = hcat [ppr e1, comma, space, ppr e2, pp_dotdot]
ppr (FromTo e1 e3) = hcat [ppr e1, pp_dotdot, ppr e3]
ppr (FromThenTo e1 e2 e3)
= hcat [ppr e1, comma, space, ppr e2, pp_dotdot, ppr e3]
pp_dotdot :: SDoc
pp_dotdot = ptext (sLit " .. ")
{-
************************************************************************
* *
\subsection{HsMatchCtxt}
* *
************************************************************************
-}
data HsMatchContext id -- Context of a Match
= FunRhs id Bool -- Function binding for f; True <=> written infix
| LambdaExpr -- Patterns of a lambda
| CaseAlt -- Patterns and guards on a case alternative
| IfAlt -- Guards of a multi-way if alternative
| ProcExpr -- Patterns of a proc
| PatBindRhs -- A pattern binding eg [y] <- e = e
| RecUpd -- Record update [used only in DsExpr to
-- tell matchWrapper what sort of
-- runtime error message to generate]
| StmtCtxt (HsStmtContext id) -- Pattern of a do-stmt, list comprehension,
-- pattern guard, etc
| ThPatSplice -- A Template Haskell pattern splice
| ThPatQuote -- A Template Haskell pattern quotation [p| (a,b) |]
| PatSyn -- A pattern synonym declaration
deriving (Data, Typeable)
data HsStmtContext id
= ListComp
| MonadComp
| PArrComp -- Parallel array comprehension
| DoExpr -- do { ... }
| MDoExpr -- mdo { ... } ie recursive do-expression
| ArrowExpr -- do-notation in an arrow-command context
| GhciStmtCtxt -- A command-line Stmt in GHCi pat <- rhs
| PatGuard (HsMatchContext id) -- Pattern guard for specified thing
| ParStmtCtxt (HsStmtContext id) -- A branch of a parallel stmt
| TransStmtCtxt (HsStmtContext id) -- A branch of a transform stmt
deriving (Data, Typeable)
isListCompExpr :: HsStmtContext id -> Bool
-- Uses syntax [ e | quals ]
isListCompExpr ListComp = True
isListCompExpr PArrComp = True
isListCompExpr MonadComp = True
isListCompExpr (ParStmtCtxt c) = isListCompExpr c
isListCompExpr (TransStmtCtxt c) = isListCompExpr c
isListCompExpr _ = False
isMonadCompExpr :: HsStmtContext id -> Bool
isMonadCompExpr MonadComp = True
isMonadCompExpr (ParStmtCtxt ctxt) = isMonadCompExpr ctxt
isMonadCompExpr (TransStmtCtxt ctxt) = isMonadCompExpr ctxt
isMonadCompExpr _ = False
matchSeparator :: HsMatchContext id -> SDoc
matchSeparator (FunRhs {}) = ptext (sLit "=")
matchSeparator CaseAlt = ptext (sLit "->")
matchSeparator IfAlt = ptext (sLit "->")
matchSeparator LambdaExpr = ptext (sLit "->")
matchSeparator ProcExpr = ptext (sLit "->")
matchSeparator PatBindRhs = ptext (sLit "=")
matchSeparator (StmtCtxt _) = ptext (sLit "<-")
matchSeparator RecUpd = panic "unused"
matchSeparator ThPatSplice = panic "unused"
matchSeparator ThPatQuote = panic "unused"
matchSeparator PatSyn = panic "unused"
pprMatchContext :: Outputable id => HsMatchContext id -> SDoc
pprMatchContext ctxt
| want_an ctxt = ptext (sLit "an") <+> pprMatchContextNoun ctxt
| otherwise = ptext (sLit "a") <+> pprMatchContextNoun ctxt
where
want_an (FunRhs {}) = True -- Use "an" in front
want_an ProcExpr = True
want_an _ = False
pprMatchContextNoun :: Outputable id => HsMatchContext id -> SDoc
pprMatchContextNoun (FunRhs fun _) = ptext (sLit "equation for")
<+> quotes (ppr fun)
pprMatchContextNoun CaseAlt = ptext (sLit "case alternative")
pprMatchContextNoun IfAlt = ptext (sLit "multi-way if alternative")
pprMatchContextNoun RecUpd = ptext (sLit "record-update construct")
pprMatchContextNoun ThPatSplice = ptext (sLit "Template Haskell pattern splice")
pprMatchContextNoun ThPatQuote = ptext (sLit "Template Haskell pattern quotation")
pprMatchContextNoun PatBindRhs = ptext (sLit "pattern binding")
pprMatchContextNoun LambdaExpr = ptext (sLit "lambda abstraction")
pprMatchContextNoun ProcExpr = ptext (sLit "arrow abstraction")
pprMatchContextNoun (StmtCtxt ctxt) = ptext (sLit "pattern binding in")
$$ pprStmtContext ctxt
pprMatchContextNoun PatSyn = ptext (sLit "pattern synonym declaration")
-----------------
pprAStmtContext, pprStmtContext :: Outputable id => HsStmtContext id -> SDoc
pprAStmtContext ctxt = article <+> pprStmtContext ctxt
where
pp_an = ptext (sLit "an")
pp_a = ptext (sLit "a")
article = case ctxt of
MDoExpr -> pp_an
PArrComp -> pp_an
GhciStmtCtxt -> pp_an
_ -> pp_a
-----------------
pprStmtContext GhciStmtCtxt = ptext (sLit "interactive GHCi command")
pprStmtContext DoExpr = ptext (sLit "'do' block")
pprStmtContext MDoExpr = ptext (sLit "'mdo' block")
pprStmtContext ArrowExpr = ptext (sLit "'do' block in an arrow command")
pprStmtContext ListComp = ptext (sLit "list comprehension")
pprStmtContext MonadComp = ptext (sLit "monad comprehension")
pprStmtContext PArrComp = ptext (sLit "array comprehension")
pprStmtContext (PatGuard ctxt) = ptext (sLit "pattern guard for") $$ pprMatchContext ctxt
-- Drop the inner contexts when reporting errors, else we get
-- Unexpected transform statement
-- in a transformed branch of
-- transformed branch of
-- transformed branch of monad comprehension
pprStmtContext (ParStmtCtxt c)
| opt_PprStyle_Debug = sep [ptext (sLit "parallel branch of"), pprAStmtContext c]
| otherwise = pprStmtContext c
pprStmtContext (TransStmtCtxt c)
| opt_PprStyle_Debug = sep [ptext (sLit "transformed branch of"), pprAStmtContext c]
| otherwise = pprStmtContext c
-- Used to generate the string for a *runtime* error message
matchContextErrString :: Outputable id => HsMatchContext id -> SDoc
matchContextErrString (FunRhs fun _) = ptext (sLit "function") <+> ppr fun
matchContextErrString CaseAlt = ptext (sLit "case")
matchContextErrString IfAlt = ptext (sLit "multi-way if")
matchContextErrString PatBindRhs = ptext (sLit "pattern binding")
matchContextErrString RecUpd = ptext (sLit "record update")
matchContextErrString LambdaExpr = ptext (sLit "lambda")
matchContextErrString ProcExpr = ptext (sLit "proc")
matchContextErrString ThPatSplice = panic "matchContextErrString" -- Not used at runtime
matchContextErrString ThPatQuote = panic "matchContextErrString" -- Not used at runtime
matchContextErrString PatSyn = panic "matchContextErrString" -- Not used at runtime
matchContextErrString (StmtCtxt (ParStmtCtxt c)) = matchContextErrString (StmtCtxt c)
matchContextErrString (StmtCtxt (TransStmtCtxt c)) = matchContextErrString (StmtCtxt c)
matchContextErrString (StmtCtxt (PatGuard _)) = ptext (sLit "pattern guard")
matchContextErrString (StmtCtxt GhciStmtCtxt) = ptext (sLit "interactive GHCi command")
matchContextErrString (StmtCtxt DoExpr) = ptext (sLit "'do' block")
matchContextErrString (StmtCtxt ArrowExpr) = ptext (sLit "'do' block")
matchContextErrString (StmtCtxt MDoExpr) = ptext (sLit "'mdo' block")
matchContextErrString (StmtCtxt ListComp) = ptext (sLit "list comprehension")
matchContextErrString (StmtCtxt MonadComp) = ptext (sLit "monad comprehension")
matchContextErrString (StmtCtxt PArrComp) = ptext (sLit "array comprehension")
pprMatchInCtxt :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> HsMatchContext idL -> Match idR body -> SDoc
pprMatchInCtxt ctxt match = hang (ptext (sLit "In") <+> pprMatchContext ctxt <> colon)
4 (pprMatch ctxt match)
pprStmtInCtxt :: (OutputableBndr idL, OutputableBndr idR, Outputable body)
=> HsStmtContext idL -> StmtLR idL idR body -> SDoc
pprStmtInCtxt ctxt (LastStmt e _)
| isListCompExpr ctxt -- For [ e | .. ], do not mutter about "stmts"
= hang (ptext (sLit "In the expression:")) 2 (ppr e)
pprStmtInCtxt ctxt stmt
= hang (ptext (sLit "In a stmt of") <+> pprAStmtContext ctxt <> colon)
2 (ppr_stmt stmt)
where
-- For Group and Transform Stmts, don't print the nested stmts!
ppr_stmt (TransStmt { trS_by = by, trS_using = using
, trS_form = form }) = pprTransStmt by using form
ppr_stmt stmt = pprStmt stmt
| bitemyapp/ghc | compiler/hsSyn/HsExpr.hs | bsd-3-clause | 72,144 | 0 | 15 | 20,636 | 13,642 | 7,110 | 6,532 | 871 | 9 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Scripts.ReviewCommon where
import ClassyPrelude
import Appian
import Appian.Types
import Appian.Instances
import Appian.Lens
import Appian.Client
import Data.Aeson
import Data.Aeson.Lens
import Control.Lens
import Control.Lens.Action.Reified
import Scripts.Common
import Appian.Internal.Arbitrary
import Control.Retry
import qualified Streaming.Prelude as S
import qualified Data.Foldable as F
import Control.Monad.Time
import qualified Data.Csv as Csv
import Data.Random (MonadRandom)
import Control.Monad.Except
import Appian.Internal.Updates
data ReviewBaseConf = ReviewBaseConf
{ reviewType :: ReviewType
, reviewerType :: ReviewerType
, fundingYear :: FundingYear
} deriving Show
adminInitial2017 :: ReviewBaseConf
adminInitial2017 = ReviewBaseConf RevAdminCorrection RevInitial FY2017
adminFinal2017 :: ReviewBaseConf
adminFinal2017 = ReviewBaseConf RevAdminCorrection RevFinal FY2017
adminSolix2017 :: ReviewBaseConf
adminSolix2017 = ReviewBaseConf RevAdminCorrection RevSolix FY2017
adminUsac2017 :: ReviewBaseConf
adminUsac2017 = ReviewBaseConf RevAdminCorrection RevUsac FY2017
appealInitial2017 :: ReviewBaseConf
appealInitial2017 = ReviewBaseConf RevAppeals RevInitial FY2017
appealFinal2017 :: ReviewBaseConf
appealFinal2017 = ReviewBaseConf RevAppeals RevFinal FY2017
appealSolix2017 :: ReviewBaseConf
appealSolix2017 = ReviewBaseConf RevAppeals RevSolix FY2017
appealUsac2017 :: ReviewBaseConf
appealUsac2017 = ReviewBaseConf RevAppeals RevUsac FY2017
spinInitial2017 :: ReviewBaseConf
spinInitial2017 = ReviewBaseConf RevSpinChange RevInitial FY2017
spinFinal2017 :: ReviewBaseConf
spinFinal2017 = ReviewBaseConf RevSpinChange RevFinal FY2017
spinSolix2017 :: ReviewBaseConf
spinSolix2017 = ReviewBaseConf RevSpinChange RevSolix FY2017
spinUsac2017 :: ReviewBaseConf
spinUsac2017 = ReviewBaseConf RevSpinChange RevUsac FY2017
spinInitial2016 :: ReviewBaseConf
spinInitial2016 = ReviewBaseConf RevSpinChange RevInitial FY2016
spinFinal2016 :: ReviewBaseConf
spinFinal2016 = ReviewBaseConf RevSpinChange RevFinal FY2016
spinSolix2016 :: ReviewBaseConf
spinSolix2016 = ReviewBaseConf RevSpinChange RevSolix FY2016
spinUsac2016 :: ReviewBaseConf
spinUsac2016 = ReviewBaseConf RevSpinChange RevUsac FY2016
form486Initial2017 :: ReviewBaseConf
form486Initial2017 = ReviewBaseConf RevForm486 RevInitial FY2017
form486Final2017 :: ReviewBaseConf
form486Final2017 = ReviewBaseConf RevForm486 RevFinal FY2017
form486Solix2017 :: ReviewBaseConf
form486Solix2017 = ReviewBaseConf RevForm486 RevSolix FY2017
form486Usac2017 :: ReviewBaseConf
form486Usac2017 = ReviewBaseConf RevForm486 RevUsac FY2017
servSubInitial2017 :: ReviewBaseConf
servSubInitial2017 = ReviewBaseConf RevServSub RevInitial FY2017
servSubFinal2017 :: ReviewBaseConf
servSubFinal2017 = ReviewBaseConf RevServSub RevFinal FY2017
servSubSolix2017 :: ReviewBaseConf
servSubSolix2017 = ReviewBaseConf RevServSub RevSolix FY2017
servSubUsac2017 :: ReviewBaseConf
servSubUsac2017 = ReviewBaseConf RevServSub RevUsac FY2017
form500Initial2017 :: ReviewBaseConf
form500Initial2017 = ReviewBaseConf RevForm500 RevInitial FY2017
form500Final2017 :: ReviewBaseConf
form500Final2017 = ReviewBaseConf RevForm500 RevFinal FY2017
form500Solix2017 :: ReviewBaseConf
form500Solix2017 = ReviewBaseConf RevForm500 RevSolix FY2017
form500Usac2017 :: ReviewBaseConf
form500Usac2017 = ReviewBaseConf RevForm500 RevUsac FY2017
form500Initial2016 :: ReviewBaseConf
form500Initial2016 = ReviewBaseConf RevForm500 RevInitial FY2016
form500Final2016 :: ReviewBaseConf
form500Final2016 = ReviewBaseConf RevForm500 RevFinal FY2016
form500Solix2016 :: ReviewBaseConf
form500Solix2016 = ReviewBaseConf RevForm500 RevSolix FY2016
form500Usac2016 :: ReviewBaseConf
form500Usac2016 = ReviewBaseConf RevForm500 RevUsac FY2016
comadInitial2016 :: ReviewBaseConf
comadInitial2016 = ReviewBaseConf RevCOMAD RevInitial FY2016
comadFinal2016 :: ReviewBaseConf
comadFinal2016 = ReviewBaseConf RevCOMAD RevFinal FY2016
comadSolix2016 :: ReviewBaseConf
comadSolix2016 = ReviewBaseConf RevCOMAD RevSolix FY2016
comadUsac2016 :: ReviewBaseConf
comadUsac2016 = ReviewBaseConf RevCOMAD RevUsac FY2016
comadInitial2017 :: ReviewBaseConf
comadInitial2017 = ReviewBaseConf RevCOMAD RevInitial FY2017
comadFinal2017 :: ReviewBaseConf
comadFinal2017 = ReviewBaseConf RevCOMAD RevFinal FY2017
comadSolix2017 :: ReviewBaseConf
comadSolix2017 = ReviewBaseConf RevCOMAD RevSolix FY2017
comadUsac2017 :: ReviewBaseConf
comadUsac2017 = ReviewBaseConf RevCOMAD RevUsac FY2017
data ReviewType
= RevSelect
| RevAppeals
| RevForm486
| RevCOMAD
| RevForm500
| RevSpinChange
| RevServSub
| RevAdminCorrection
| RevSRCSpinChange
| RevBulkSpinChange
| RevOther Text
deriving (Show, Eq, Read)
instance Parseable ReviewType where
parseElement "-- Select a Review Type --" = pure RevSelect
parseElement "Appeals" = pure RevAppeals
parseElement "FCC Form 486/CIPA" = pure RevForm486
parseElement "COMAD" = pure RevCOMAD
parseElement "FCC Form 500" = pure RevForm500
parseElement "SPIN Change" = pure RevSpinChange
parseElement "Service Substitution" = pure RevServSub
parseElement "Administrative Correction" = pure RevAdminCorrection
parseElement "Bulk SPIN" = pure RevBulkSpinChange
parseElement "SRC SPIN" = pure RevSRCSpinChange
parseElement s = pure $ RevOther s -- throwM $ ParseException $ tshow s <> " is not a recognized Review Type."
data ReviewerType
= ReviewerSelect
| RevInitial
| RevFinal
| RevSolix
| RevUsac
| RevHSInit
| RevHSFinal
deriving (Show, Eq, Read)
instance Parseable ReviewerType where
parseElement "-- Select a Reviewer Type --" = pure ReviewerSelect
parseElement "Initial Review" = pure RevInitial
parseElement "Final Review" = pure RevFinal
parseElement "Solix QA Review" = pure RevSolix
parseElement "USAC QA Review" = pure RevUsac
parseElement "Heightened Scrutiny Initial Review" = pure RevHSInit
parseElement "Heightened Scrutiny Final Review" = pure RevHSFinal
parseElement s = Left $ tshow s <> " is not a recognized Reviewer Type." -- throwM $ ParseException $ tshow s <> " is not a recognized Reviewer Type."
data ReviewConf = ReviewConf
{ revTaskVar :: TVar DistributeTask
, revChan :: TChan (ThreadControl RecordRef)
}
newReviewConf :: MonadIO m => m ReviewConf
newReviewConf = ReviewConf <$> atomically (newTVar Produce) <*> atomically newTChan
-- delaySecs :: (MonadDelay m, MonadThreadId m) => Int -> m ()
-- delaySecs n = threadDelay $ n * 1000000
retryIt :: MonadIO m => m (Either SomeException a) -> m (Either SomeException a)
retryIt act = retrying reviewRetryPolicy shouldRetry (const act)
where
shouldRetry status (Left _) = do
putStrLn $ "Retrying with a " <> tshow (rsCumulativeDelay status) <> " delay"
pure True
shouldRetry _ (Right _) = pure False
reviewRetryPolicy :: Monad m => RetryPolicyM m
reviewRetryPolicy = exponentialBackoff 1000000 `mappend` limitRetries 1
distributeLinks_ :: (RapidFire m, MonadGen m, MonadIO m) => (RecordRef -> AppianT m Value) -> ReportId -> ReviewConf -> Value -> AppianT m Value
distributeLinks_ action rid conf v = do
gf <- handleMissing "FRN Case Grid" v $ v ^? getGridFieldCell . traverse
mVal <- distributeTasks (revTaskVar conf) (revChan conf) (getAllLinks rid v) action
handleMissing "Could not select FRN!" v mVal
distributeLinks :: (RapidFire m, MonadGen m, MonadIO m) => Text -> ReportId -> ReviewConf -> Value -> AppianT m Value
distributeLinks actionName rid conf v = distributeLinks_ (viewRelatedActions v >=> uncurry (executeRelatedAction actionName)) rid conf v
getAllLinks :: (RapidFire m, MonadGen m) => ReportId -> Value -> S.Stream (S.Of (ThreadControl RecordRef)) (AppianT m) ()
getAllLinks rid v = do
mIdents <- lift $ foldGridFieldPagesReport rid (MonadicFold $ getGridFieldCell . traverse) (accumLinks v) (Just mempty) v
case mIdents of
Nothing -> throwError $ MissingComponentError ("There are no identifiers!", v)
Just idents -> S.each $ fmap Item idents <> pure Finished
accumLinks :: Monad m => Value -> Maybe (Vector RecordRef) -> GridField GridFieldCell -> AppianT m (Maybe (Vector RecordRef), Value)
accumLinks val l gf = return (l', val)
where
l' = (<>) <$> l <*> (gf ^? gfColumns . at "Application/Request Number" . traverse . _TextCellLink . _2)
addNotes :: (RapidFire m, MonadGen m) => Value -> AppianT m Value
addNotes val = foldGridFieldPages (MonadicFold $ getGridFieldCell . traverse) makeNotes val val
makeNotes :: (RapidFire m, MonadGen m) => Value -> GridField GridFieldCell -> AppianT m (Value, Value)
makeNotes val gf = do
v <- foldGridField' makeNote val gf
return (v, v)
makeNote :: (RapidFire m, MonadGen m) => Value -> GridFieldIdent -> AppianT m Value
makeNote val ident = do
gf <- handleMissing "FRN Note Grid" val $ val ^? getGridFieldCell . traverse
assign appianValue val
sendUpdates1 "Notes: Select FRN Checkbox" (MonadicFold (failing (to (const (selectCheckbox ident gf)) . to toUpdate . to Right) (to $ const $ Left "Unable to make the gridfield update")))
isEdit <- usesValue (has $ getButton "Edit Note")
case isEdit of
False -> do
sendUpdates1 "Click Add Note" (buttonUpdateF "Add Note")
sendUpdates1 "Enter Note Text" (paragraphArbitraryUpdate "Note Text" 10000)
sendUpdates1 "Submit New Note" (buttonUpdateF "Submit New Note")
True -> do
sendUpdates1 "Edit Note" (buttonUpdateF "Edit Note")
sendUpdates1 "Enter Note Text" (paragraphArbitraryUpdate "Note Text" 10000)
sendUpdates1 "Submit Note Change" (buttonUpdateF "Submit Note Change")
use appianValue
-- val' <- sendUpdates "Notes: Select FRN Checkbox" (MonadicFold (failing (to (const (selectCheckbox ident gf)) . to toUpdate . to Right) (to $ const $ Left "Unable to make the gridfield update"))
-- ) val
-- case val' ^? getButton "Edit Note" of
-- Nothing ->
-- sendUpdates "Click Add Note" (MonadicFold (to $ buttonUpdate "Add Note")) val'
-- >>= sendUpdates "Enter Note Text" (paragraphArbitraryUpdate "Note Text" 10000
-- <|> MonadicFold (to $ buttonUpdate "Submit New Note")
-- )
-- Just _ -> sendUpdates "Edit Note" (MonadicFold (to $ buttonUpdate "Edit Note")) val'
-- >>= sendUpdates "Enter Note Text" (paragraphArbitraryUpdate "Note Text" 10000
-- <|> MonadicFold (to $ buttonUpdate "Submit Note Change")
-- )
newtype CaseNumber = CaseNumber
{ _caseNumber :: Int
} deriving (Show, Eq, Num, Csv.FromField)
makeLenses ''CaseNumber
data ReviewConf' = ReviewConf'
{ _frnCaseNumber :: CaseNumber
, _reviewer :: Login
} deriving (Show, Eq)
makeLenses ''ReviewConf'
instance Csv.FromNamedRecord ReviewConf' where
parseNamedRecord r = ReviewConf'
<$> r Csv..: "Case Id"
<*> Csv.parseNamedRecord r
instance HasLogin ReviewConf' where
getLogin conf = conf ^. reviewer
-- This needs to be renamed to replace the myAssignedReport function below.
myAssignedReport :: (RapidFire m, MonadGen m) => Maybe CaseNumber -> ReviewBaseConf -> AppianT m (ReportId, Value)
myAssignedReport mCaseNum conf = do
(rid, v) <- openReport "My Assigned Post-Commit Assignments"
let filterCase v = case mCaseNum of
Nothing -> return v
Just caseNum -> sendReportUpdates rid "Application/Request Number" (MonadicFold $ to $ textUpdate "Application/Request Number" (caseNum ^. caseNumber . to tshow)) v
res <- sendReportUpdates rid "Select Review Type" (dropdownUpdateF' "Review Type" (reviewType conf)) v
>>= sendReportUpdates rid "Select Reviewer Type" (dropdownUpdateF' "Reviewer Type" (reviewerType conf))
>>= sendReportUpdates rid "Select Funding Year" (dropdownUpdateF' "Funding Year" (fundingYear conf))
>>= filterCase
>>= sendReportUpdates rid "Click Apply Filters" (MonadicFold (to (buttonUpdate "Apply Filters")))
return (rid, res)
-- -- Needs to be replaced by myAssignedReportTemp above
-- myAssignedReport :: (RunClient m, MonadTime m, MonadThrow m, MonadLogger m, MonadCatch m, MonadDelay m, MonadThreadId m, MonadRandom m, MonadError ServantError m) => ReviewBaseConf -> AppianT m (ReportId, Value)
-- myAssignedReport = myAssignedReportTemp Nothing
| limaner2002/EPC-tools | USACScripts/src/Scripts/ReviewCommon.hs | bsd-3-clause | 12,604 | 0 | 19 | 2,101 | 2,757 | 1,416 | 1,341 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module Stack.FileWatch
( fileWatch
, fileWatchPoll
, printExceptionStderr
) where
import Blaze.ByteString.Builder (toLazyByteString, copyByteString)
import Blaze.ByteString.Builder.Char.Utf8 (fromShow)
import Control.Concurrent.Async (race_)
import Control.Concurrent.STM
import Control.Exception (Exception, fromException)
import Control.Exception.Enclosed (tryAny)
import Control.Monad (forever, unless, when)
import qualified Data.ByteString.Lazy as L
import qualified Data.Map.Strict as Map
import Data.Monoid ((<>))
import Data.Set (Set)
import qualified Data.Set as Set
import Data.String (fromString)
import Data.Traversable (forM)
import GHC.IO.Handle (hIsTerminalDevice)
import Ignore
import Path
import System.Console.ANSI
import System.Exit
import System.FSNotify
import System.IO (stdout, stderr)
-- | Print an exception to stderr
printExceptionStderr :: Exception e => e -> IO ()
printExceptionStderr e =
L.hPut stderr $ toLazyByteString $ fromShow e <> copyByteString "\n"
fileWatch :: IO (Path Abs Dir)
-> ((Set (Path Abs File) -> IO ()) -> IO ())
-> IO ()
fileWatch = fileWatchConf defaultConfig
fileWatchPoll :: IO (Path Abs Dir)
-> ((Set (Path Abs File) -> IO ()) -> IO ())
-> IO ()
fileWatchPoll = fileWatchConf $ defaultConfig { confUsePolling = True }
-- | Run an action, watching for file changes
--
-- The action provided takes a callback that is used to set the files to be
-- watched. When any of those files are changed, we rerun the action again.
fileWatchConf :: WatchConfig
-> IO (Path Abs Dir)
-> ((Set (Path Abs File) -> IO ()) -> IO ())
-> IO ()
fileWatchConf cfg getProjectRoot inner = withManagerConf cfg $ \manager -> do
allFiles <- newTVarIO Set.empty
dirtyVar <- newTVarIO True
watchVar <- newTVarIO Map.empty
projRoot <- getProjectRoot
mChecker <- findIgnoreFiles [VCSGit, VCSMercurial, VCSDarcs] projRoot >>= buildChecker
(FileIgnoredChecker isFileIgnored) <-
case mChecker of
Left err ->
do putStrLn $ "Failed to parse VCS's ignore file: " ++ err
return $ FileIgnoredChecker (const False)
Right chk -> return chk
let onChange event = atomically $ do
files <- readTVar allFiles
when (eventPath event `Set.member` files) (writeTVar dirtyVar True)
setWatched :: Set (Path Abs File) -> IO ()
setWatched files = do
atomically $ writeTVar allFiles $ Set.map toFilePath files
watch0 <- readTVarIO watchVar
let actions = Map.mergeWithKey
keepListening
stopListening
startListening
watch0
newDirs
watch1 <- forM (Map.toList actions) $ \(k, mmv) -> do
mv <- mmv
return $
case mv of
Nothing -> Map.empty
Just v -> Map.singleton k v
atomically $ writeTVar watchVar $ Map.unions watch1
where
newDirs = Map.fromList $ map (, ())
$ Set.toList
$ Set.map parent files
keepListening _dir listen () = Just $ return $ Just listen
stopListening = Map.map $ \f -> do
() <- f
return Nothing
startListening = Map.mapWithKey $ \dir () -> do
let dir' = fromString $ toFilePath dir
listen <- watchDir manager dir' (not . isFileIgnored . eventPath) onChange
return $ Just listen
let watchInput = do
line <- getLine
unless (line == "quit") $ do
case line of
"help" -> do
putStrLn ""
putStrLn "help: display this help"
putStrLn "quit: exit"
putStrLn "build: force a rebuild"
putStrLn "watched: display watched directories"
"build" -> atomically $ writeTVar dirtyVar True
"watched" -> do
watch <- readTVarIO watchVar
mapM_ (putStrLn . toFilePath) (Map.keys watch)
"" -> atomically $ writeTVar dirtyVar True
_ -> putStrLn $ concat
[ "Unknown command: "
, show line
, ". Try 'help'"
]
watchInput
race_ watchInput $ forever $ do
atomically $ do
dirty <- readTVar dirtyVar
check dirty
eres <- tryAny $ inner setWatched
-- Clear dirtiness flag after the build to avoid an infinite
-- loop caused by the build itself triggering dirtiness. This
-- could be viewed as a bug, since files changed during the
-- build will not trigger an extra rebuild, but overall seems
-- like better behavior. See
-- https://github.com/commercialhaskell/stack/issues/822
atomically $ writeTVar dirtyVar False
let withColor color action = do
outputIsTerminal <- hIsTerminalDevice stdout
if outputIsTerminal
then do
setSGR [SetColor Foreground Dull color]
action
setSGR [Reset]
else action
case eres of
Left e -> do
let color = case fromException e of
Just ExitSuccess -> Green
_ -> Red
withColor color $ printExceptionStderr e
_ -> withColor Green $
putStrLn "Success! Waiting for next file change."
putStrLn "Type help for available commands. Press enter to force a rebuild."
| meiersi-11ce/stack | src/Stack/FileWatch.hs | bsd-3-clause | 6,011 | 0 | 25 | 2,171 | 1,441 | 714 | 727 | 128 | 10 |
{-|
Module : Numeric.AERN.NumericOrder.Extrema
Description : types that have least and greatest elements
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
Types that have least and greatest elements.
This module is hidden and reexported via its parent NumericOrder.
-}
module Numeric.AERN.NumericOrder.Extrema where
{-|
A type with extrema.
-}
class (HasLeast t, HasGreatest t) => HasExtrema t
{-|
A type with a least element.
-}
class HasLeast t where
least :: t -> t
{-|
A type with a greatest element.
-}
class HasGreatest t where
greatest :: t -> t
| michalkonecny/aern | aern-order/src/Numeric/AERN/NumericOrder/Extrema.hs | bsd-3-clause | 729 | 0 | 7 | 197 | 70 | 39 | 31 | -1 | -1 |
{-# LANGUAGE LambdaCase, OverloadedStrings #-}
module Genotype.Comparison
( ReferenceComparison (..)
, PhaseKnowledge (..)
, compareToRef
, printCompResult
, headsTails
, firstCertain
) where
import Control.Monad (sequence)
import Data.Text (Text)
import Genotype.Types
data ReferenceComparison
= BothMatch
| FirstMatch
| LastMatch
| NoMatch
| CannotCompare
deriving (Eq, Show)
data PhaseKnowledge
= Known
| Unknown
deriving (Eq, Show)
compareToRef :: BasePair -> (Datum, Datum) -> ReferenceComparison
compareToRef ref datums =
case getBasePairs datums of
Just (bp1, bp2)
| bp1 == ref && bp2 == ref -> BothMatch
| bp1 == ref -> FirstMatch
| bp2 == ref -> LastMatch
| otherwise -> NoMatch
_ -> CannotCompare
getBasePair :: Datum -> Maybe BasePair
getBasePair = \case
Certain bp -> Just bp
Estimated bp -> Just bp
_ -> Nothing
getBasePairs :: (Datum, Datum) -> Maybe (BasePair, BasePair)
getBasePairs (d1,d2) = do
bp1 <- getBasePair d1
bp2 <- getBasePair d2
return (bp1,bp2)
printCompResult :: PhaseKnowledge -> ReferenceComparison -> Text
printCompResult Known = \case
BothMatch -> "0"
FirstMatch -> "1"
LastMatch -> "2"
NoMatch -> "3"
CannotCompare -> "-9"
printCompResult Unknown = \case
BothMatch -> "0"
FirstMatch -> "1"
LastMatch -> "1"
NoMatch -> "2"
CannotCompare -> "-9"
headsTails :: [[a]] -> Maybe ([a],[[a]])
headsTails ll = do
hts <- sequence $ map headTail ll
return (map fst hts, map snd hts)
headTail :: [a] -> Maybe (a,[a])
headTail (x:xs) = Just (x,xs)
headTail _ = Nothing
firstCertain :: [(Datum, Datum)] -> BasePair
firstCertain (d:ds) =
case d of
((Certain bp), _) -> bp
(_, (Certain bp)) -> bp
_ -> firstCertain ds
firstCertain [] = error "can't find reference BasePair"
| Jonplussed/genotype-parser | src/Genotype/Comparison.hs | bsd-3-clause | 1,856 | 0 | 13 | 429 | 663 | 352 | 311 | 68 | 9 |
module Test.Pos.Util.Golden where
import Universum
import Data.Aeson (FromJSON, ToJSON, eitherDecode, encode)
import Data.Aeson.Encode.Pretty (Config (..), Indent (..),
NumberFormat (..), encodePretty', keyOrder)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LB
import Data.FileEmbed (embedStringFile)
import qualified Data.List as List
import Data.SafeCopy (SafeCopy, safeGet, safePut)
import Data.Serialize (runGetLazy, runPutLazy)
import qualified Data.Text.Lazy as LT (unpack)
import Data.Text.Lazy.Builder (toLazyText)
import qualified Data.Yaml as Y
import Formatting.Buildable (build)
import Hedgehog (Gen, Group, Property, PropertyT, TestLimit,
discoverPrefix, forAll, property, withTests, (===))
import Hedgehog.Internal.Property (failWith)
import Hedgehog.Internal.TH (TExpQ)
import Language.Haskell.TH (ExpQ, Q, loc_filename, runIO)
import Language.Haskell.TH.Syntax (qLocation)
import System.Directory (canonicalizePath)
import System.FilePath (takeDirectory, (</>))
import qualified Text.JSON.Canonical as Canonical
import Pos.Util.Json.Canonical (SchemaError (..))
discoverGolden :: TExpQ Group
discoverGolden = discoverPrefix "golden_"
eachOf :: (Show a) => TestLimit -> Gen a -> (a -> PropertyT IO ()) -> Property
eachOf testLimit things hasProperty =
withTests testLimit . property $ forAll things >>= hasProperty
-- | A handy shortcut for embedding golden testing files
embedGoldenTest :: FilePath -> ExpQ
embedGoldenTest path =
makeRelativeToTestDir ("golden/" <> path) >>= embedStringFile
-- | Test if prettified JSON and unformatted JSON are equivalent.
goldenValueEquiv :: (Eq a)
=> Either String a -> Either String a -> Either String Bool
goldenValueEquiv prettified unformatted = do
p <- prettified
u <- unformatted
pure (p == u)
-- | Test if prettified canonical JSON and unformatted canonical
-- JSON are equivalent.
goldenFileCanonicalEquiv :: HasCallStack => FilePath -> FilePath -> Property
goldenFileCanonicalEquiv pPath uPath = withFrozenCallStack $ do
withTests 1 . property $ do
pStr <- liftIO $ readFile pPath
uBs <- liftIO $ LB.readFile uPath
case Canonical.parseCanonicalJSON uBs of
Left err -> failWith Nothing $ "could not decode: " <> show err
Right jsVal -> (toText $ Canonical.prettyCanonicalJSON jsVal) === pStr
goldenTestJSON :: (Eq a, FromJSON a, HasCallStack, Show a, ToJSON a)
=> a -> FilePath -> Property
goldenTestJSON x path = withFrozenCallStack $ do
withTests 1 . property $ do
bs <- liftIO (LB.readFile path)
encode x === bs
case eitherDecode bs of
Left err -> failWith Nothing $ "could not decode: " <> show err
Right x' -> x === x'
goldenTestJSONPretty :: (Eq a, FromJSON a, HasCallStack, Show a, ToJSON a)
=> a -> FilePath -> Property
goldenTestJSONPretty x path = withFrozenCallStack $ do
withTests 1 . property $ do
bs <- liftIO (LB.readFile path)
-- Sort keys by their order of appearance in the argument list
-- of `keyOrder`. Keys not in the argument list are moved to the
-- end, while their order is preserved.
let defConfig' = Config { confIndent = Spaces 4
, confCompare = keyOrder ["file", "hash"]
, confNumFormat = Generic
, confTrailingNewline = False }
encodePretty' defConfig' x === bs
case eitherDecode bs of
Left err -> failWith Nothing $ "could not decode: " <> show err
Right x' -> x === x'
-- | Only check that the datatype equals the decoding of the file
goldenTestJSONDec :: (Eq a, FromJSON a, HasCallStack, Show a)
=> a -> FilePath -> Property
goldenTestJSONDec x path = withFrozenCallStack $ do
withTests 1 . property $ do
bs <- liftIO $ LB.readFile path
case eitherDecode bs of
Left err -> failWith Nothing $ "could not decode: " <> show err
Right x' -> x === x'
goldenTestCanonicalJSONDec
:: ( Eq a
, Canonical.FromJSON (Either SchemaError) a
, HasCallStack
, Show a
)
=> a
-> FilePath
-> Property
goldenTestCanonicalJSONDec x path = withFrozenCallStack $ do
withTests 1 . property $ do
bs <- liftIO (LB.readFile path)
case Canonical.parseCanonicalJSON bs of
Left err -> failWith Nothing $ "could not parse: " <> show err
Right jsv -> case Canonical.fromJSON jsv of
Left (schErr :: SchemaError) ->
failWith Nothing $ LT.unpack $ toLazyText $ build schErr
Right x' -> x === x'
goldenTestSafeCopy :: (Eq a, SafeCopy a, HasCallStack, Show a)
=> a -> FilePath -> Property
goldenTestSafeCopy x path = withFrozenCallStack $ do
withTests 1 . property $ do
bs <- liftIO (LB.readFile path)
runPutLazy (safePut x) === bs
case runGetLazy safeGet bs of
Left err -> failWith Nothing $ "could not safeGet: " <> show err
Right x' -> x === x'
goldenTestSafeCopyDec :: (Eq a, SafeCopy a, HasCallStack, Show a)
=> a -> FilePath -> Property
goldenTestSafeCopyDec x path = withFrozenCallStack $ do
withTests 1 . property $ do
bs <- liftIO (LB.readFile path)
case runGetLazy safeGet bs of
Left err -> failWith Nothing $ "could not safeGet: " <> show err
Right x' -> x === x'
makeRelativeToTestDir :: FilePath -> Q FilePath
makeRelativeToTestDir rel = do
loc <- qLocation
fp <- runIO $ canonicalizePath $ loc_filename loc
case findTestDir fp of
Nothing ->
error $ "Couldn't find directory 'test' in path: " <> toText fp
Just testDir -> pure $ testDir </> rel
where
findTestDir f =
let dir = takeDirectory f
in if dir == f
then Nothing
else if "/test" `List.isSuffixOf` dir
then Just dir
else findTestDir dir
--------------------------------------------------------------------------------
-- YAML golden testing
--------------------------------------------------------------------------------
-- | Test if a datatype's Yaml encoding matches the encoding in the file,
-- and if the decoding of the file contents matches the datatype
goldenTestYaml :: (Eq a, FromJSON a, HasCallStack, Show a, ToJSON a)
=> a -> FilePath -> Property
goldenTestYaml x path = withFrozenCallStack $ do
withTests 1 . property $ do
bs <- liftIO (BS.readFile path)
Y.encode x === bs
case Y.decodeEither' bs of
Left err -> failWith Nothing $ "could not decode: " <> show err
Right x' -> x === x'
| input-output-hk/pos-haskell-prototype | util/test/Test/Pos/Util/Golden.hs | mit | 7,067 | 0 | 20 | 2,015 | 1,900 | 970 | 930 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Copyright : (C) 2013-15 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-----------------------------------------------------------------------------
module Succinct.Dictionary.Rank9
( Rank9(..)
, rank9
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Data.Bits
import qualified Data.Vector.Primitive as P
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Storable as S
import Data.Vector.Internal.Check as Ck
import Data.Word
import Data.Proxy
import Succinct.Dictionary.Builder
import Succinct.Dictionary.Class
import Succinct.Internal.Bit as B
import Succinct.Internal.StorableBitVector as SBV
import Succinct.Internal.PopCount
#define BOUNDS_CHECK(f) Ck.f __FILE__ __LINE__ Ck.Bounds
data Rank9 v = Rank9 {-# UNPACK #-} !Int !(v Word64) !(v Word64)
deriving instance Eq (v Word64) => Eq (Rank9 v)
deriving instance Ord (v Word64) => Ord (Rank9 v)
deriving instance Show (v Word64) => Show (Rank9 v)
instance G.Vector v Word64 => Access Bool (Rank9 v) where
size (Rank9 n _ _) = n
{-# INLINE size #-}
(!) (Rank9 n bs _) i
= BOUNDS_CHECK(checkIndex) "Rank9.!" i n
$ testBit (G.unsafeIndex bs $ wd i) (bt i)
{-# INLINE (!) #-}
instance Bitwise (Rank9 P.Vector) B.Vector where
bitwise (Rank9 n v _) = V_Bit n v
{-# INLINE bitwise #-}
instance Bitwise (Rank9 S.Vector) SBV.Vector where
bitwise (Rank9 n v _) = SBV.SV_Bit n v
{-# INLINE bitwise #-}
instance (G.Vector v Word64, G.Vector v Bool) => Dictionary Bool (Rank9 v)
instance (G.Vector v Word64, G.Vector v Bool) => Select0 (Rank9 v)
instance (G.Vector v Word64, G.Vector v Bool) => Select1 (Rank9 v)
instance (G.Vector v Word64, G.Vector v Bool) => Ranked (Rank9 v) where
rank1 t@(Rank9 n _ _) i =
BOUNDS_CHECK(checkIndex) "rank" i (n+1) $
unsafeRank1 t i
{-# INLINE rank1 #-}
unsafeRank1 (Rank9 _ ws ps) i = result
where
wi = wd i
block = wi `shiftR` 3 `shiftL` 1
base = G.unsafeIndex ps block
t = wi .&. 7 - 1
s = G.unsafeIndex ps (block + 1)
sShift = (t + t `shiftR` 60 .&. 8) * 9
count9 = s `unsafeShiftR` sShift .&. 0x1FF
-- If we just used 'wi' here, we would index out of 'ws' when
-- i == n and n `mod` 64 == 0. But, whenever i `mod` 64 == 0 we
-- .&. with 0, so the value read from the ws is effectively
-- ignored.
--
-- The following is a branchless work-around for this: we look
-- at the previous word whenever i `mod` 64 == 0, except when i
-- == 0.
--
-- TODO(klao): Is this needed? How to handle this better?
-- Abstract it out into Internal!
wi' = wd (i - 1) - (i - 1) `unsafeShiftR` 63
rest = popCountWord64 $ (G.unsafeIndex ws wi') .&. (unsafeBit (bt i) - 1)
result = fromIntegral (base + count9) + rest
{-# INLINE unsafeRank1 #-}
rank9 :: forall v t.
(G.Vector (Packed v) Word64, G.Vector v Word64, Bitwise t v, PackedBits v)
=> Proxy v -> t -> Rank9 (Packed v)
rank9 _ t = case bitwise t :: v Bit of
v -> Rank9 n (packedBits v) ps
where
-- Because we are building word-by-word and not bit-by-bit, we
-- sometimes build a bigger structure than is strictly
-- necessary. (In the expression below (n+63) should have been
-- simply n.)
k = ((n + 63) `shiftR` 9 + 1) `shiftL` 1
n = G.length v
ps = buildWithFoldlM foldlMPadded (r9Builder $ vectorSized k) v
{-# INLINE [0] rank9 #-}
-- {-# RULES "rank9" rank9 = id #-}
data Build9 a = Build9
{-# UNPACK #-} !Int -- word count `mod` 8
{-# UNPACK #-} !Word64 -- current rank
{-# UNPACK #-} !Word64 -- rank within the current block
{-# UNPACK #-} !Word64 -- current "rank9" word
!a -- rank vector builder
r9Builder :: G.Vector v Word64
=> Builder Word64 (v Word64) -> Builder Word64 (v Word64)
r9Builder vectorBuilder = Builder $ case vectorBuilder of
Builder (Building kr hr zr) -> Building stop step start
where
start = Build9 0 0 0 0 <$> zr
step (Build9 n tr br r9 rs) w
| n == 7 = Build9 0 tr' 0 0 <$> stepRank rs tr r9
| otherwise = return $ Build9 (n + 1) tr br' r9' rs
where
tr' = tr + br'
br' = br + fromIntegral (popCountWord64 w)
r9' = r9 .|. br' `unsafeShiftL` (9 * n)
stepRank rs tr r9 = hr rs tr >>= (`hr` r9)
stop (Build9 _n tr _br r9 rs)
= stepRank rs tr r9 >>= kr
{-# INLINE r9Builder #-}
rank9WordBuilder :: G.Vector v Word64 => Builder Word64 (Rank9 v)
rank9WordBuilder = f <$> vector <*> r9Builder vector
where
f ws rs = Rank9 (G.length ws `shiftL` 6) ws rs
{-# INLINE f #-}
{-# INLINE rank9WordBuilder #-}
instance G.Vector v Word64 => Buildable Bool (Rank9 v) where
builder = Builder $ case rank9WordBuilder of
Builder r9wb -> wordToBitBuilding r9wb fixSize
where
fixSize n (Rank9 _ ws rs) = return $ Rank9 n ws rs
{-# INLINE builder #-}
| Gabriel439/succinct | src/Succinct/Dictionary/Rank9.hs | bsd-2-clause | 5,443 | 0 | 16 | 1,308 | 1,560 | 838 | 722 | 107 | 1 |
-- | Organ definitions.
module Content.ItemKindOrgan ( organs ) where
import qualified Data.EnumMap.Strict as EM
import Game.LambdaHack.Common.Ability
import Game.LambdaHack.Common.Color
import Game.LambdaHack.Common.Dice
import Game.LambdaHack.Common.Flavour
import Game.LambdaHack.Common.Misc
import Game.LambdaHack.Common.Msg
import Game.LambdaHack.Content.ItemKind
organs :: [ItemKind]
organs =
[fist, foot, claw, smallClaw, snout, smallJaw, jaw, largeJaw, tooth, horn, tentacle, lash, noseTip, lip, torsionRight, torsionLeft, thorn, boilingFissure, arsenicFissure, sulfurFissure, beeSting, sting, venomTooth, venomFang, screechingBeak, largeTail, pupil, armoredSkin, eye2, eye3, eye4, eye5, eye6, eye7, eye8, vision4, vision6, vision8, vision10, vision12, vision14, vision16, nostril, insectMortality, sapientBrain, animalBrain, speedGland2, speedGland4, speedGland6, speedGland8, speedGland10, scentGland, boilingVent, arsenicVent, sulfurVent, bonusHP]
fist, foot, claw, smallClaw, snout, smallJaw, jaw, largeJaw, tooth, horn, tentacle, lash, noseTip, lip, torsionRight, torsionLeft, thorn, boilingFissure, arsenicFissure, sulfurFissure, beeSting, sting, venomTooth, venomFang, screechingBeak, largeTail, pupil, armoredSkin, eye2, eye3, eye4, eye5, eye6, eye7, eye8, vision4, vision6, vision8, vision10, vision12, vision14, vision16, nostril, insectMortality, sapientBrain, animalBrain, speedGland2, speedGland4, speedGland6, speedGland8, speedGland10, scentGland, boilingVent, arsenicVent, sulfurVent, bonusHP :: ItemKind
-- Weapons
-- * Human weapon organs
fist = ItemKind
{ isymbol = '%'
, iname = "fist"
, ifreq = [("fist", 100)]
, iflavour = zipPlain [Red]
, icount = 2
, irarity = [(1, 1)]
, iverbHit = "punch"
, iweight = 2000
, iaspects = []
, ieffects = [Hurt (4 * d 1)]
, ifeature = [Durable, Identified]
, idesc = ""
, ikit = []
}
foot = fist
{ iname = "foot"
, ifreq = [("foot", 50)]
, icount = 2
, iverbHit = "kick"
, ieffects = [Hurt (4 * d 1)]
, idesc = ""
}
-- * Universal weapon organs
claw = fist
{ iname = "claw"
, ifreq = [("claw", 50)]
, icount = 2 -- even if more, only the fore claws used for fighting
, iverbHit = "hook"
, iaspects = [Timeout $ 4 + d 4]
, ieffects = [Hurt (2 * d 1), Recharging (toOrganGameTurn "slow 10" 2)]
, idesc = ""
}
smallClaw = fist
{ iname = "small claw"
, ifreq = [("small claw", 50)]
, icount = 2
, iverbHit = "slash"
, ieffects = [Hurt (2 * d 1)]
, idesc = ""
}
snout = fist
{ iname = "snout"
, ifreq = [("snout", 10)]
, iverbHit = "bite"
, ieffects = [Hurt (2 * d 1)]
, idesc = ""
}
smallJaw = fist
{ iname = "small jaw"
, ifreq = [("small jaw", 20)]
, icount = 1
, iverbHit = "rip"
, ieffects = [Hurt (3 * d 1)]
, idesc = ""
}
jaw = fist
{ iname = "jaw"
, ifreq = [("jaw", 20)]
, icount = 1
, iverbHit = "rip"
, ieffects = [Hurt (5 * d 1)]
, idesc = ""
}
largeJaw = fist
{ iname = "large jaw"
, ifreq = [("large jaw", 100)]
, icount = 1
, iverbHit = "crush"
, ieffects = [Hurt (12 * d 1)]
, idesc = ""
}
tooth = fist
{ iname = "tooth"
, ifreq = [("tooth", 20)]
, icount = 3
, iverbHit = "nail"
, ieffects = [Hurt (2 * d 1)]
, idesc = ""
}
horn = fist
{ iname = "horn"
, ifreq = [("horn", 20)]
, icount = 2
, iverbHit = "impale"
, ieffects = [Hurt (8 * d 1)]
, idesc = ""
}
-- * Monster weapon organs
tentacle = fist
{ iname = "tentacle"
, ifreq = [("tentacle", 50)]
, icount = 4
, iverbHit = "slap"
, ieffects = [Hurt (4 * d 1)]
, idesc = ""
}
lash = fist
{ iname = "lash"
, ifreq = [("lash", 100)]
, icount = 1
, iverbHit = "lash"
, iaspects = []
, ieffects = [Hurt (3 * d 1)]
, idesc = ""
}
noseTip = fist
{ iname = "tip"
, ifreq = [("nose tip", 50)]
, icount = 1
, iverbHit = "poke"
, ieffects = [Hurt (2 * d 1)]
, idesc = ""
}
lip = fist
{ iname = "lip"
, ifreq = [("lip", 10)]
, icount = 1
, iverbHit = "lap"
, iaspects = [Timeout $ 3 + d 3]
, ieffects = [ Hurt (1 * d 1)
, Recharging (toOrganGameTurn "weakened" (2 + d 2)) ]
, idesc = ""
}
torsionRight = fist
{ iname = "right torsion"
, ifreq = [("right torsion", 100)]
, icount = 1
, iverbHit = "twist"
, iaspects = [Timeout $ 5 + d 5]
, ieffects = [ Hurt (17 * d 1)
, Recharging (toOrganGameTurn "slow 10" (3 + d 3)) ]
, idesc = ""
}
torsionLeft = fist
{ iname = "left torsion"
, ifreq = [("left torsion", 100)]
, icount = 1
, iverbHit = "twist"
, iaspects = [Timeout $ 5 + d 5]
, ieffects = [ Hurt (17 * d 1)
, Recharging (toOrganGameTurn "weakened" (3 + d 3)) ]
, idesc = ""
}
-- * Special weapon organs
thorn = fist
{ iname = "thorn"
, ifreq = [("thorn", 100)]
, icount = 2 + d 3
, iverbHit = "impale"
, ieffects = [Hurt (2 * d 1)]
, ifeature = [Identified] -- not Durable
, idesc = ""
}
boilingFissure = fist
{ iname = "fissure"
, ifreq = [("boiling fissure", 100)]
, icount = 5 + d 5
, iverbHit = "hiss at"
, ieffects = [Burn $ 1 * d 1]
, ifeature = [Identified] -- not Durable
, idesc = ""
}
arsenicFissure = boilingFissure
{ iname = "fissure"
, ifreq = [("arsenic fissure", 100)]
, icount = 2 + d 2
, ieffects = [Burn $ 1 * d 1, toOrganGameTurn "weakened" (2 + d 2)]
}
sulfurFissure = boilingFissure
{ iname = "fissure"
, ifreq = [("sulfur fissure", 100)]
, icount = 2 + d 2
, ieffects = [Burn $ 1 * d 1, RefillHP 6]
}
beeSting = fist
{ iname = "bee sting"
, ifreq = [("bee sting", 100)]
, icount = 1
, iverbHit = "sting"
, iaspects = [AddArmorMelee 90, AddArmorRanged 90]
, ieffects = [Burn $ 2 * d 1, Paralyze 3, RefillHP 5]
, ifeature = [Identified] -- not Durable
, idesc = "Painful, but beneficial."
}
sting = fist
{ iname = "sting"
, ifreq = [("sting", 100)]
, icount = 1
, iverbHit = "sting"
, iaspects = [Timeout $ 1 + d 5]
, ieffects = [Burn $ 2 * d 1, Recharging (Paralyze 2)]
, idesc = "Painful, debilitating and harmful."
}
venomTooth = fist
{ iname = "venom tooth"
, ifreq = [("venom tooth", 100)]
, icount = 2
, iverbHit = "bite"
, iaspects = [Timeout $ 5 + d 3]
, ieffects = [ Hurt (2 * d 1)
, Recharging (toOrganGameTurn "slow 10" (3 + d 3)) ]
, idesc = ""
}
-- TODO: should also confer poison resistance, but current implementation
-- is too costly (poison removal each turn)
venomFang = fist
{ iname = "venom fang"
, ifreq = [("venom fang", 100)]
, icount = 2
, iverbHit = "bite"
, iaspects = [Timeout $ 7 + d 5]
, ieffects = [ Hurt (2 * d 1)
, Recharging (toOrganNone "poisoned") ]
, idesc = ""
}
screechingBeak = armoredSkin
{ iname = "screeching beak"
, ifreq = [("screeching beak", 100)]
, icount = 1
, iverbHit = "peck"
, iaspects = [Timeout $ 5 + d 5]
, ieffects = [ Recharging (Summon [("scavenger", 1)] $ 1 + dl 2)
, Hurt (2 * d 1) ]
, idesc = ""
}
largeTail = fist
{ iname = "large tail"
, ifreq = [("large tail", 50)]
, icount = 1
, iverbHit = "knock"
, iaspects = [Timeout $ 1 + d 3]
, ieffects = [Hurt (8 * d 1), Recharging (PushActor (ThrowMod 400 25))]
, idesc = ""
}
pupil = fist
{ iname = "pupil"
, ifreq = [("pupil", 100)]
, icount = 1
, iverbHit = "gaze at"
, iaspects = [AddSight 10, Timeout $ 5 + d 5]
, ieffects = [ Hurt (1 * d 1)
, Recharging (DropItem COrgan "temporary conditions" True)
, Recharging $ RefillHP (-2)
]
, idesc = ""
}
-- Non-weapons
-- * Armor organs
armoredSkin = ItemKind
{ isymbol = '%'
, iname = "armored skin"
, ifreq = [("armored skin", 100)]
, iflavour = zipPlain [Red]
, icount = 1
, irarity = [(1, 1)]
, iverbHit = "bash"
, iweight = 2000
, iaspects = [AddArmorMelee 30, AddArmorRanged 30]
, ieffects = []
, ifeature = [Durable, Identified]
, idesc = ""
, ikit = []
}
-- * Sense organs
eye :: Int -> ItemKind
eye n = armoredSkin
{ iname = "eye"
, ifreq = [(toGroupName $ "eye" <+> tshow n, 100)]
, icount = 2
, iverbHit = "glare at"
, iaspects = [AddSight (intToDice n)]
, idesc = ""
}
eye2 = eye 2
eye3 = eye 3
eye4 = eye 4
eye5 = eye 5
eye6 = eye 6
eye7 = eye 7
eye8 = eye 8
vision :: Int -> ItemKind
vision n = armoredSkin
{ iname = "vision"
, ifreq = [(toGroupName $ "vision" <+> tshow n, 100)]
, icount = 1
, iverbHit = "visualize"
, iaspects = [AddSight (intToDice n)]
, idesc = ""
}
vision4 = vision 4
vision6 = vision 6
vision8 = vision 8
vision10 = vision 10
vision12 = vision 12
vision14 = vision 14
vision16 = vision 16
nostril = armoredSkin
{ iname = "nostril"
, ifreq = [("nostril", 100)]
, icount = 2
, iverbHit = "snuff"
, iaspects = [AddSmell 1] -- times 2, from icount
, idesc = ""
}
-- * Assorted
insectMortality = fist
{ iname = "insect mortality"
, ifreq = [("insect mortality", 100)]
, icount = 1
, iverbHit = "age"
, iaspects = [Periodic, Timeout $ 40 + d 10]
, ieffects = [Recharging (RefillHP (-1))]
, idesc = ""
}
sapientBrain = armoredSkin
{ iname = "sapient brain"
, ifreq = [("sapient brain", 100)]
, icount = 1
, iverbHit = "outbrain"
, iaspects = [AddSkills unitSkills]
, idesc = ""
}
animalBrain = armoredSkin
{ iname = "animal brain"
, ifreq = [("animal brain", 100)]
, icount = 1
, iverbHit = "blank"
, iaspects = [let absNo = [AbDisplace, AbMoveItem, AbProject, AbApply]
sk = EM.fromList $ zip absNo [-1, -1..]
in AddSkills $ addSkills unitSkills sk]
, idesc = ""
}
speedGland :: Int -> ItemKind
speedGland n = armoredSkin
{ iname = "speed gland"
, ifreq = [(toGroupName $ "speed gland" <+> tshow n, 100)]
, icount = 1
, iverbHit = "spit at"
, iaspects = [ AddSpeed $ intToDice n
, Periodic
, Timeout $ intToDice $ 100 `div` n ]
, ieffects = [Recharging (RefillHP 1)]
, idesc = ""
}
speedGland2 = speedGland 2
speedGland4 = speedGland 4
speedGland6 = speedGland 6
speedGland8 = speedGland 8
speedGland10 = speedGland 10
scentGland = armoredSkin -- TODO: cone attack, 3m away, project? apply?
{ iname = "scent gland"
, ifreq = [("scent gland", 100)]
, icount = 1
, iverbHit = "spray at"
, iaspects = [Periodic, Timeout $ 10 + d 2 |*| 5 ]
, ieffects = [ Recharging (Explode "distressing odor")
, Recharging ApplyPerfume ]
, idesc = ""
}
boilingVent = armoredSkin
{ iname = "vent"
, ifreq = [("boiling vent", 100)]
, iflavour = zipPlain [Blue]
, icount = 1
, iverbHit = "menace"
, iaspects = [Periodic, Timeout $ 2 + d 2 |*| 5]
, ieffects = [Recharging (Explode "boiling water")]
, idesc = ""
}
arsenicVent = boilingVent
{ iname = "vent"
, ifreq = [("arsenic vent", 100)]
, iflavour = zipPlain [Cyan]
, iaspects = [Periodic, Timeout $ 2 + d 2 |*| 5]
, ieffects = [Recharging (Explode "weakness mist")]
}
sulfurVent = boilingVent
{ iname = "vent"
, ifreq = [("sulfur vent", 100)]
, iflavour = zipPlain [BrYellow]
, iaspects = [Periodic, Timeout $ 2 + d 2 |*| 5]
, ieffects = [Recharging (Explode "strength mist")]
}
bonusHP = armoredSkin
{ iname = "bonus HP"
, ifreq = [("bonus HP", 100)]
, icount = 1
, iverbHit = "intimidate"
, iweight = 0
, iaspects = [AddMaxHP 1]
, idesc = ""
}
| Concomitant/LambdaHack | GameDefinition/Content/ItemKindOrgan.hs | bsd-3-clause | 11,806 | 0 | 15 | 3,359 | 4,121 | 2,529 | 1,592 | 356 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Database.DSH.VSL.Opt.Properties.Empty where
import Control.Monad
import qualified Data.Sequence as S
import Database.DSH.Common.VectorLang
import Database.DSH.VSL.Lang
import Database.DSH.VSL.Opt.Properties.Common
import Database.DSH.VSL.Opt.Properties.Types
unp :: Show a => VectorProp a -> Either String a
unp = unpack "Properties.Empty"
mapUnp :: Show a => VectorProp a
-> VectorProp a
-> (a -> a -> VectorProp a)
-> Either String (VectorProp a)
mapUnp = mapUnpack "Properties.Empty"
inferEmptyNullOp :: NullOp -> Either String (VectorProp Bool)
inferEmptyNullOp op =
case op of
Lit (_, ss) ->
case ss of
UnitSeg sd -> Right $ VProp $ S.null sd
Segs sds -> Right $ VProp $ any S.null sds
TableRef{} -> Right $ VProp False
inferEmptyUnOp :: VectorProp Bool -> UnOp r e -> Either String (VectorProp Bool)
inferEmptyUnOp e op =
case op of
WinFun _ -> Right e
Distinct -> Right e
MergeMap -> Right e
Segment -> Right e
Unsegment -> Right e
Reverse -> let ue = unp e in liftM2 VPropPair ue ue
Project _ -> Right e
Select _ -> let ue = unp e in liftM2 VPropPair ue ue
Sort _ -> let ue = unp e in liftM2 VPropPair ue ue
Group _ -> let ue = unp e in liftM3 VPropTriple ue ue ue
-- FIXME think about it: what happens if we feed an empty vector into the aggr operator?
GroupAggr (_, _) -> Right $ VProp False
Number -> Right e
Fold _ -> return $ VProp False
UpdateUnit -> return e
UnitMap -> return e
R1 ->
case e of
VProp _ -> Left "Properties.Empty: not a pair/triple"
VPropPair b _ -> Right $ VProp b
VPropTriple b _ _ -> Right $ VProp b
R2 ->
case e of
VProp _ -> Left "Properties.Empty: not a pair/triple"
VPropPair _ b -> Right $ VProp b
VPropTriple _ b _ -> Right $ VProp b
R3 ->
case e of
VPropTriple _ _ b -> Right $ VProp b
p -> Left ("Properties.Empty: not a triple" ++ show p)
inferEmptyBinOp :: VectorProp Bool -> VectorProp Bool -> BinOp e -> Either String (VectorProp Bool)
inferEmptyBinOp e1 e2 op =
case op of
ReplicateSeg -> mapUnp e1 e2 (\ue1 ue2 -> VPropPair (ue1 || ue2) (ue1 || ue2))
ReplicateScalar -> mapUnp e1 e2 (\_ ue2 -> VPropPair ue2 ue2)
UnboxSng -> mapUnp e1 e2 (\ue1 ue2 -> VPropPair (ue1 || ue2) (ue1 || ue2))
UnboxDefault _ -> mapUnp e1 e2 (\ue1 ue2 -> VPropPair (ue1 || ue2) (ue1 || ue2))
Append -> mapUnp e1 e2 (\ue1 ue2 -> VPropTriple (ue1 && ue2) ue1 ue2)
Align -> mapUnp e1 e2 (\ue1 ue2 -> VProp (ue1 || ue2))
UpdateMap -> mapUnp e1 e2 (\ue1 ue2 -> VProp (ue1 || ue2))
Zip -> mapUnp e1 e2 (\ue1 ue2 -> (\p -> VPropTriple p p p) (ue1 || ue2))
CartProduct -> mapUnp e1 e2 (\ue1 ue2 -> (\p -> VPropTriple p p p) (ue1 || ue2))
GroupJoin _ -> VProp <$> unp e1
ThetaJoin _ -> mapUnp e1 e2 (\ue1 ue2 -> (\p -> VPropTriple p p p) (ue1 || ue2))
NestJoin _ -> mapUnp e1 e2 (\ue1 ue2 -> (\p -> VPropTriple p p p) (ue1 || ue2))
SemiJoin _ -> mapUnp e1 e2 (\ue1 ue2 -> (\p -> VPropPair p p) (ue1 || ue2))
AntiJoin _ -> mapUnp e1 e2 (\ue1 _ -> (\p -> VPropPair p p) ue1)
Materialize -> mapUnp e1 e2 (\ue1 ue2 -> (\p -> VPropPair p p) (ue1 || ue2))
inferEmptyTerOp :: VectorProp Bool -> VectorProp Bool -> VectorProp Bool -> TerOp -> Either String (VectorProp Bool)
inferEmptyTerOp _ e2 e3 op =
case op of
Combine -> let ue2 = unp e2
ue3 = unp e3
in liftM3 VPropTriple (liftM2 (&&) ue2 ue3) ue2 ue3
| ulricha/dsh | src/Database/DSH/VSL/Opt/Properties/Empty.hs | bsd-3-clause | 3,749 | 0 | 14 | 1,117 | 1,497 | 740 | 757 | 79 | 23 |
-- Copyright (c) 2015 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
module Tests.Language.Salt(tests) where
import Test.HUnitPlus.Base
import qualified Tests.Language.Salt.Core as Core
import qualified Tests.Language.Salt.Surface as Surface
tests :: Test
tests = "Salt" ~: [Core.tests, Surface.tests]
| emc2/saltlang | test/library/Tests/Language/Salt.hs | bsd-3-clause | 1,793 | 0 | 7 | 295 | 91 | 71 | 20 | 6 | 1 |
module Main where
import Lib
main :: IO ()
main = exec
| hherman1/CatanServ | app/Main.hs | bsd-3-clause | 57 | 0 | 6 | 14 | 22 | 13 | 9 | 4 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeSynonymInstances #-}
module IHaskell.Display.Widgets.String.Latex (
-- * The Latex Widget
LatexWidget,
-- * Constructor
mkLatexWidget) where
-- To keep `cabal repl` happy when running from the ihaskell repo
import Prelude
import Control.Monad (when, join)
import Data.Aeson
import Data.IORef (newIORef)
import Data.Text (Text)
import Data.Vinyl (Rec(..), (<+>))
import IHaskell.Display
import IHaskell.Eval.Widgets
import IHaskell.IPython.Message.UUID as U
import IHaskell.Display.Widgets.Types
-- | A 'LatexWidget' represents a Latex widget from IPython.html.widgets.
type LatexWidget = IPythonWidget LatexType
-- | Create a new Latex widget
mkLatexWidget :: IO LatexWidget
mkLatexWidget = do
-- Default properties, with a random uuid
uuid <- U.random
let widgetState = WidgetState $ defaultStringWidget "LatexView"
stateIO <- newIORef widgetState
let widget = IPythonWidget uuid stateIO
initData = object ["model_name" .= str "WidgetModel", "widget_class" .= str "IPython.Latex"]
-- Open a comm for this widget, and store it in the kernel state
widgetSendOpen widget initData $ toJSON widgetState
-- Return the widget
return widget
instance IHaskellDisplay LatexWidget where
display b = do
widgetSendView b
return $ Display []
instance IHaskellWidget LatexWidget where
getCommUUID = uuid
| FranklinChen/IHaskell | ihaskell-display/ihaskell-widgets/src/IHaskell/Display/Widgets/String/Latex.hs | mit | 1,580 | 0 | 13 | 360 | 283 | 158 | 125 | 33 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="bs-BA">
<title>Server-Sent Events | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Sadržaj</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Traži</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoriti</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | 0xkasun/security-tools | src/org/zaproxy/zap/extension/sse/resources/help_bs_BA/helpset_bs_BA.hs | apache-2.0 | 980 | 80 | 67 | 160 | 425 | 214 | 211 | -1 | -1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
-- | Versions for packages.
module Stack.Types.Version
(Version
,Cabal.VersionRange -- TODO in the future should have a newtype wrapper
,VersionCheck(..)
,versionParser
,parseVersion
,parseVersionFromString
,versionString
,versionText
,toCabalVersion
,fromCabalVersion
,mkVersion
,versionRangeText
,withinRange
,Stack.Types.Version.intersectVersionRanges
,toMajorVersion
,checkVersion)
where
import Control.Applicative
import Control.DeepSeq
import Control.Monad.Catch
import Data.Aeson.Extended
import Data.Attoparsec.ByteString.Char8
import Data.Binary.VersionTagged (Binary, HasStructuralInfo)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S8
import Data.Data
import Data.Hashable
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Text (Text)
import qualified Data.Text as T
import Data.Vector.Binary ()
import Data.Vector.Unboxed (Vector)
import qualified Data.Vector.Unboxed as V
import Data.Word
import Distribution.Text (disp)
import qualified Distribution.Version as Cabal
import GHC.Generics
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Prelude -- Fix warning: Word in Prelude from base-4.8.
import Text.PrettyPrint (render)
-- | A parse fail.
data VersionParseFail =
VersionParseFail ByteString
deriving (Typeable)
instance Exception VersionParseFail
instance Show VersionParseFail where
show (VersionParseFail bs) = "Invalid version: " ++ show bs
-- | A package version.
newtype Version =
Version {unVersion :: Vector Word}
deriving (Eq,Ord,Typeable,Data,Generic,Binary,NFData)
instance HasStructuralInfo Version
instance Hashable Version where
hashWithSalt i = hashWithSalt i . V.toList . unVersion
instance Lift Version where
lift (Version n) =
appE (conE 'Version)
(appE (varE 'V.fromList)
(listE (map (litE . IntegerL . fromIntegral)
(V.toList n))))
instance Show Version where
show (Version v) =
intercalate "."
(map show (V.toList v))
instance ToJSON Version where
toJSON = toJSON . versionText
instance FromJSON Version where
parseJSON j =
do s <- parseJSON j
case parseVersionFromString s of
Nothing ->
fail ("Couldn't parse package version: " ++ s)
Just ver -> return ver
instance FromJSON a => FromJSON (Map Version a) where
parseJSON val = do
m <- parseJSON val
fmap Map.fromList $ mapM go $ Map.toList m
where
go (k, v) = do
k' <- either (fail . show) return $ parseVersionFromString k
return (k', v)
-- | Attoparsec parser for a package version from bytestring.
versionParser :: Parser Version
versionParser =
do ls <- ((:) <$> num <*> many num')
let !v = V.fromList ls
return (Version v)
where num = decimal
num' = point *> num
point = satisfy (== '.')
-- | Convenient way to parse a package version from a bytestring.
parseVersion :: MonadThrow m => ByteString -> m Version
parseVersion x = go x
where go =
either (const (throwM (VersionParseFail x))) return .
parseOnly (versionParser <* endOfInput)
-- | Migration function.
parseVersionFromString :: MonadThrow m => String -> m Version
parseVersionFromString =
parseVersion . S8.pack
-- | Get a string representation of a package version.
versionString :: Version -> String
versionString (Version v) =
intercalate "."
(map show (V.toList v))
-- | Get a string representation of a package version.
versionText :: Version -> Text
versionText (Version v) =
T.intercalate
"."
(map (T.pack . show)
(V.toList v))
-- | Convert to a Cabal version.
toCabalVersion :: Version -> Cabal.Version
toCabalVersion (Version v) =
Cabal.Version (map fromIntegral (V.toList v)) []
-- | Convert from a Cabal version.
fromCabalVersion :: Cabal.Version -> Version
fromCabalVersion (Cabal.Version vs _) =
let !v = V.fromList (map fromIntegral vs)
in Version v
-- | Make a package version.
mkVersion :: String -> Q Exp
mkVersion s =
case parseVersionFromString s of
Nothing -> error ("Invalid package version: " ++ show s)
Just pn -> [|pn|]
-- | Display a version range
versionRangeText :: Cabal.VersionRange -> Text
versionRangeText = T.pack . render . disp
-- | Check if a version is within a version range.
withinRange :: Version -> Cabal.VersionRange -> Bool
withinRange v r = toCabalVersion v `Cabal.withinRange` r
-- | A modified intersection which also simplifies, for better display.
intersectVersionRanges :: Cabal.VersionRange -> Cabal.VersionRange -> Cabal.VersionRange
intersectVersionRanges x y = Cabal.simplifyVersionRange $ Cabal.intersectVersionRanges x y
-- | Returns the first two components, defaulting to 0 if not present
toMajorVersion :: Version -> Version
toMajorVersion (Version v) =
case V.length v of
0 -> Version (V.fromList [0, 0])
1 -> Version (V.fromList [V.head v, 0])
_ -> Version (V.fromList [V.head v, v V.! 1])
data VersionCheck
= MatchMinor
| MatchExact
| NewerMinor
deriving (Show, Eq, Ord)
instance ToJSON VersionCheck where
toJSON MatchMinor = String "match-minor"
toJSON MatchExact = String "match-exact"
toJSON NewerMinor = String "newer-minor"
instance FromJSON VersionCheck where
parseJSON = withText expected $ \t ->
case t of
"match-minor" -> return MatchMinor
"match-exact" -> return MatchExact
"newer-minor" -> return NewerMinor
_ -> fail ("Expected " ++ expected ++ ", but got " ++ show t)
where
expected = "VersionCheck value (match-minor, match-exact, or newer-minor)"
checkVersion :: VersionCheck -> Version -> Version -> Bool
checkVersion check (Version wanted) (Version actual) =
case check of
MatchMinor -> V.and (V.take 3 matching)
MatchExact -> V.length wanted == V.length actual && V.and matching
NewerMinor -> V.and (V.take 2 matching) && newerMinor
where
matching = V.zipWith (==) wanted actual
newerMinor =
case (wanted V.!? 2, actual V.!? 2) of
(Nothing, _) -> True
(Just _, Nothing) -> False
(Just w, Just a) -> a >= w
| tedkornish/stack | src/Stack/Types/Version.hs | bsd-3-clause | 6,754 | 0 | 15 | 1,630 | 1,804 | 954 | 850 | 167 | 5 |
{-# LANGUAGE OverloadedStrings, RecordWildCards, ViewPatterns #-}
module GameEngine.Graphics.BSP
( addBSP
, addGPUBSP
, uploadBSP
, GPUBSP(..)
, BSPInstance(..)
) where
import Control.Monad
import Data.Maybe
import Data.HashSet (HashSet)
import Data.Map (Map)
import Data.Set (Set)
import Data.Vector (Vector)
import qualified Data.HashSet as HashSet
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Vector as V
import qualified Data.Vector.Storable as SV
import qualified Data.ByteString as SB
import qualified Data.ByteString.Char8 as SB8
import Text.Printf
import Foreign
import Codec.Picture
import LambdaCube.GL
import GameEngine.Data.BSP
import GameEngine.Graphics.Storage
import GameEngine.Graphics.BezierSurface
data GPUBSP
= GPUBSP
{ gpubspVertexBuffer :: Buffer
, gpubspIndexBuffer :: Buffer
, gpubspLightmaps :: Vector TextureData
, gpubspSurfaces :: [(String,Primitive,IndexStream Buffer,Map String (Stream Buffer),Maybe TextureData)]
, gpubspShaders :: HashSet String
, gpubspBSPLevel :: BSPLevel
}
data BSPInstance
= BSPInstance
{ bspinstanceBSPLevel :: BSPLevel
, bspinstanceSurfaces :: Vector [Object]
}
data BSPSurface
= BSPTriangleSoup Int Int Int Int -- lightmapIndex, firstVertex, firstIndex, indexCount
| BSPPatch Int (Vector DrawVertex) (Vector Int) -- lightmapIndex
{-
TODO:
- build index buffer
- build vertex buffer: bsp vertices + patch verices
- build lightmap atlas
- update lightmap uv coordinates
-}
batchBSP shaderMap BSPLevel{..} = (result 0 (Map.toList surfaces) indices,V.concat indices) where
result _ [] [] = []
result offset ((name,l):xs) (v:ys) = (lightmapIndex l,0,V.length blDrawVertices,offset,V.length v,TriangleList,name) : result (offset + V.length v) xs ys
indices = map join $ Map.elems surfaces
lightmapIndex ((BSPTriangleSoup i _ _ _):_) = i
lightmapIndex ((BSPPatch i _ _ ):_) = i
lightmapIndex _ = 0 :: Int-- TODO
join l = V.concat [(firstVertex +) <$> V.slice firstIndex indexCount blDrawIndices | BSPTriangleSoup lightmap firstVertex firstIndex indexCount <- l]
surfaces = foldl (\m s -> Map.insertWith (++) (let name = shName $ blShaders V.! srShaderNum s in if Set.member (SB8.unpack name) shaderMap then name else "missing shader") (bspSurface s) m) mempty blSurfaces
bspSurface s@Surface{..} = if noDraw then [] else case srSurfaceType of
Patch -> [BSPPatch srLightmapNum v i] where (v,i) = tessellatePatch blDrawVertices s 5
Flare -> []
_ -> [BSPTriangleSoup srLightmapNum srFirstVertex srFirstIndex srNumIndices]
where
surfaceFlags = shSurfaceFlags $ blShaders V.! srShaderNum
noDraw = surfaceFlags .&. 0x80 /= 0
uploadBSP :: Set String -> BSPLevel -> IO GPUBSP
uploadBSP shaderMap bsp@BSPLevel{..} = do
-- construct vertex and index buffer
let convertSurface (objs,lenV,arrV,lenI,arrI) s@Surface{..} = if noDraw then skip else case srSurfaceType of
Planar -> objs'
TriangleSoup -> objs'
-- tessellate, concatenate vertex and index data to fixed vertex and index buffer
Patch -> ((srLightmapNum, lenV, lenV', lenI, lenI', TriangleStrip, name):objs, lenV+lenV', v:arrV, lenI+lenI', i:arrI)
where
(v,i) = tessellatePatch blDrawVertices s 5
lenV' = V.length v
lenI' = V.length i
Flare -> skip
where
skip = ((srLightmapNum,srFirstVertex, srNumVertices, srFirstIndex, 0, TriangleList, name):objs, lenV, arrV, lenI, arrI)
objs' = ((srLightmapNum,srFirstVertex, srNumVertices, srFirstIndex, srNumIndices, TriangleList, name):objs, lenV, arrV, lenI, arrI)
Shader name sfFlags _ = blShaders V.! srShaderNum
noDraw = sfFlags .&. 0x80 /= 0
(objs,_,drawVl,_,drawIl) = V.foldl' convertSurface ([],V.length blDrawVertices,[blDrawVertices],V.length blDrawIndices,[blDrawIndices]) blSurfaces
drawV' = V.concat $ reverse drawVl
drawI' = V.concat $ reverse drawIl
{-
(objs,drawI') = batchBSP shaderMap bsp
drawV' = blDrawVertices
-}
withV w a f = w a (\p -> f $ castPtr p)
attribute f = withV SV.unsafeWith $ SV.convert $ V.map f drawV'
indices = SV.convert $ V.map fromIntegral drawI' :: SV.Vector Word32
vertexCount = V.length drawV'
vertexBuffer <- compileBuffer $
[ Array ArrFloat (3 * vertexCount) $ attribute dvPosition
, Array ArrFloat (2 * vertexCount) $ attribute dvDiffuseUV
, Array ArrFloat (2 * vertexCount) $ attribute dvLightmaptUV
, Array ArrFloat (3 * vertexCount) $ attribute dvNormal
, Array ArrFloat (4 * vertexCount) $ attribute dvColor
]
indexBuffer <- compileBuffer [Array ArrWord32 (SV.length indices) $ withV SV.unsafeWith indices]
-- upload light maps
let byteStringToVector = SV.fromList . SB.unpack :: SB.ByteString -> SV.Vector Word8
lightMapTextures <- fmap V.fromList $ forM (V.toList blLightmaps) $ \(Lightmap d) ->
uploadTexture2DToGPU' True False True True $ ImageRGB8 $ Image 128 128 $ byteStringToVector d
let gpuSurface (lmIdx,startV,countV,startI,countI,prim,SB8.unpack -> name) = (name,prim,index,attrs,lightmap) where
attrs = Map.fromList
[ ("position", Stream Attribute_V3F vertexBuffer 0 startV countV)
, ("diffuseUV", Stream Attribute_V2F vertexBuffer 1 startV countV)
, ("lightmapUV", Stream Attribute_V2F vertexBuffer 2 startV countV)
, ("normal", Stream Attribute_V3F vertexBuffer 3 startV countV)
, ("color", Stream Attribute_V4F vertexBuffer 4 startV countV)
]
index = IndexStream indexBuffer 0 startI countI
lightmap = lightMapTextures V.!? lmIdx
surfaces = map gpuSurface $ reverse objs
return $ GPUBSP
{ gpubspVertexBuffer = vertexBuffer
, gpubspIndexBuffer = indexBuffer
, gpubspLightmaps = lightMapTextures
, gpubspSurfaces = surfaces
, gpubspShaders = HashSet.fromList [name | (name,_,_,_,_) <- surfaces]
, gpubspBSPLevel = bsp
}
addGPUBSP :: TextureData -> GLStorage -> GPUBSP -> IO BSPInstance
addGPUBSP whiteTexture storage GPUBSP{..} = do
-- add to storage
let obj surfaceIdx (name,prim,index,attrs,lightmap) = do
let objUnis = ["LightMap","worldMat"]
o <- addObjectWithMaterial storage name prim (Just index) attrs objUnis
o1 <- addObject storage "LightMapOnly" prim (Just index) attrs objUnis
{-
#define LIGHTMAP_2D -4 // shader is for 2D rendering
#define LIGHTMAP_BY_VERTEX -3 // pre-lit triangle models
#define LIGHTMAP_WHITEIMAGE -2
#define LIGHTMAP_NONE -1
-}
forM_ [o,o1] $ \b -> uniformFTexture2D "LightMap" (objectUniformSetter b) $ fromMaybe whiteTexture lightmap
return [o,o1]
surfaceVector = V.fromList gpubspSurfaces
putStrLn $ printf "add %d bsp surfaces to storage" $ V.length surfaceVector
BSPInstance gpubspBSPLevel <$> V.imapM obj surfaceVector
addBSP :: Set String -> GLStorage -> BSPLevel -> IO BSPInstance
addBSP shaderMap storage bsp = do
whiteTexture <- uploadTexture2DToGPU' False False False False $ ImageRGB8 $ generateImage (\_ _ -> PixelRGB8 255 255 255) 1 1
addGPUBSP whiteTexture storage =<< uploadBSP shaderMap bsp
| csabahruska/quake3 | game-engine/GameEngine/Graphics/BSP.hs | bsd-3-clause | 7,526 | 0 | 18 | 1,742 | 2,229 | 1,203 | 1,026 | 120 | 8 |
{-# LANGUAGE BangPatterns, CPP, GeneralizedNewtypeDeriving #-}
-- |
-- Module : Data.Text.Foreign
-- Copyright : (c) 2009, 2010 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- Support for using 'Text' data with native code via the Haskell
-- foreign function interface.
module Data.Text.Foreign
(
-- * Interoperability with native code
-- $interop
I16
-- * Safe conversion functions
, fromPtr
, useAsPtr
, asForeignPtr
-- ** Encoding as UTF-8
, peekCStringLen
, withCStringLen
-- * Unsafe conversion code
, lengthWord16
, unsafeCopyToPtr
-- * Low-level manipulation
-- $lowlevel
, dropWord16
, takeWord16
) where
#if defined(ASSERTS)
import Control.Exception (assert)
#endif
#if __GLASGOW_HASKELL__ >= 702
import Control.Monad.ST.Unsafe (unsafeIOToST)
#else
import Control.Monad.ST (unsafeIOToST)
#endif
import Data.ByteString.Unsafe (unsafePackCStringLen, unsafeUseAsCStringLen)
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Data.Text.Internal (Text(..), empty)
import Data.Text.Unsafe (lengthWord16)
import Data.Word (Word16)
import Foreign.C.String (CStringLen)
import Foreign.ForeignPtr (ForeignPtr, mallocForeignPtrArray, withForeignPtr)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (Ptr, castPtr, plusPtr)
import Foreign.Storable (peek, poke)
import qualified Data.Text.Array as A
-- $interop
--
-- The 'Text' type is implemented using arrays that are not guaranteed
-- to have a fixed address in the Haskell heap. All communication with
-- native code must thus occur by copying data back and forth.
--
-- The 'Text' type's internal representation is UTF-16, using the
-- platform's native endianness. This makes copied data suitable for
-- use with native libraries that use a similar representation, such
-- as ICU. To interoperate with native libraries that use different
-- internal representations, such as UTF-8 or UTF-32, consider using
-- the functions in the 'Data.Text.Encoding' module.
-- | A type representing a number of UTF-16 code units.
newtype I16 = I16 Int
deriving (Bounded, Enum, Eq, Integral, Num, Ord, Read, Real, Show)
-- | /O(n)/ Create a new 'Text' from a 'Ptr' 'Word16' by copying the
-- contents of the array.
fromPtr :: Ptr Word16 -- ^ source array
-> I16 -- ^ length of source array (in 'Word16' units)
-> IO Text
fromPtr _ (I16 0) = return empty
fromPtr ptr (I16 len) =
#if defined(ASSERTS)
assert (len > 0) $
#endif
return $! Text arr 0 len
where
arr = A.run (A.new len >>= copy)
copy marr = loop ptr 0
where
loop !p !i | i == len = return marr
| otherwise = do
A.unsafeWrite marr i =<< unsafeIOToST (peek p)
loop (p `plusPtr` 2) (i + 1)
-- $lowlevel
--
-- Foreign functions that use UTF-16 internally may return indices in
-- units of 'Word16' instead of characters. These functions may
-- safely be used with such indices, as they will adjust offsets if
-- necessary to preserve the validity of a Unicode string.
-- | /O(1)/ Return the prefix of the 'Text' of @n@ 'Word16' units in
-- length.
--
-- If @n@ would cause the 'Text' to end inside a surrogate pair, the
-- end of the prefix will be advanced by one additional 'Word16' unit
-- to maintain its validity.
takeWord16 :: I16 -> Text -> Text
takeWord16 (I16 n) t@(Text arr off len)
| n <= 0 = empty
| n >= len || m >= len = t
| otherwise = Text arr off m
where
m | w < 0xDB00 || w > 0xD8FF = n
| otherwise = n+1
w = A.unsafeIndex arr (off+n-1)
-- | /O(1)/ Return the suffix of the 'Text', with @n@ 'Word16' units
-- dropped from its beginning.
--
-- If @n@ would cause the 'Text' to begin inside a surrogate pair, the
-- beginning of the suffix will be advanced by one additional 'Word16'
-- unit to maintain its validity.
dropWord16 :: I16 -> Text -> Text
dropWord16 (I16 n) t@(Text arr off len)
| n <= 0 = t
| n >= len || m >= len = empty
| otherwise = Text arr (off+m) (len-m)
where
m | w < 0xD800 || w > 0xDBFF = n
| otherwise = n+1
w = A.unsafeIndex arr (off+n-1)
-- | /O(n)/ Copy a 'Text' to an array. The array is assumed to be big
-- enough to hold the contents of the entire 'Text'.
unsafeCopyToPtr :: Text -> Ptr Word16 -> IO ()
unsafeCopyToPtr (Text arr off len) ptr = loop ptr off
where
end = off + len
loop !p !i | i == end = return ()
| otherwise = do
poke p (A.unsafeIndex arr i)
loop (p `plusPtr` 2) (i + 1)
-- | /O(n)/ Perform an action on a temporary, mutable copy of a
-- 'Text'. The copy is freed as soon as the action returns.
useAsPtr :: Text -> (Ptr Word16 -> I16 -> IO a) -> IO a
useAsPtr t@(Text _arr _off len) action =
allocaBytes (len * 2) $ \buf -> do
unsafeCopyToPtr t buf
action (castPtr buf) (fromIntegral len)
-- | /O(n)/ Make a mutable copy of a 'Text'.
asForeignPtr :: Text -> IO (ForeignPtr Word16, I16)
asForeignPtr t@(Text _arr _off len) = do
fp <- mallocForeignPtrArray len
withForeignPtr fp $ unsafeCopyToPtr t
return (fp, I16 len)
-- | /O(n)/ Decode a C string with explicit length, which is assumed
-- to have been encoded as UTF-8. If decoding fails, a
-- 'UnicodeException' is thrown.
peekCStringLen :: CStringLen -> IO Text
peekCStringLen cs = do
bs <- unsafePackCStringLen cs
return $! decodeUtf8 bs
-- | Marshal a 'Text' into a C string encoded as UTF-8 in temporary
-- storage, with explicit length information. The encoded string may
-- contain NUL bytes, and is not followed by a trailing NUL byte.
--
-- The temporary storage is freed when the subcomputation terminates
-- (either normally or via an exception), so the pointer to the
-- temporary storage must /not/ be used after this function returns.
withCStringLen :: Text -> (CStringLen -> IO a) -> IO a
withCStringLen t act = unsafeUseAsCStringLen (encodeUtf8 t) act
| beni55/text | Data/Text/Foreign.hs | bsd-2-clause | 6,114 | 0 | 15 | 1,412 | 1,245 | 674 | 571 | 78 | 1 |
module ClassIn2 where
--Any class/instance name declared in this module can be renamed
--Rename instance name 'myreverse' to 'reversable'
class Reversable a where
reversable :: a -> a
reversable _ = undefined
instance Reversable [a] where
reversable = reverse
data Foo = Boo | Moo
instance Eq Foo where
Boo == Boo = True
Moo == Moo = True
_ == _ = False
main = reversable [1,2,3]
| mpickering/HaRe | old/testing/renaming/ClassIn2_TokOut.hs | bsd-3-clause | 410 | 0 | 7 | 98 | 116 | 62 | 54 | 12 | 1 |
module HAD.Y2114.M03.D27.Exercise where
-- | compact Compact a list of values with the following rules:
-- - Nothing are removed
-- - If two remaining onsecutive values are equal,
-- they are replaced by one value equal to the successor of those values
-- - the previous rule is not recursive
-- - Other values are kept
--
-- Examples
--
-- >>> compact [Just 1, Nothing, Just 1, Just 2, Just 4, Just 3, Just 3]
-- [2,2,4,4]
--
-- >>> compact [Nothing, Nothing, Nothing]
-- []
--
-- >>> compact []
-- []
--
-- >>> compact $ map Just "hello"
-- "hemo"
--
-- prop> [succ x] == (compact . replicate 2 . Just) (x :: Int)
-- prop> [succ x, x] == (compact . replicate 3 . Just) (x :: Int)
-- prop> replicate 2 (succ x) == (compact . replicate 4 . Just) (x :: Int)
--
compact :: (Enum a, Eq a) => [Maybe a] -> [a]
compact = undefined
| 1HaskellADay/1HAD | exercises/HAD/Y2014/M03/D27/Exercise.hs | mit | 831 | 0 | 8 | 171 | 72 | 53 | 19 | 3 | 1 |
{-
Generate unicode tables used in shims, src/string.js
using the same encoding as the metadata and static data
and a simple RLE scheme that compresses sequences with the
same value and also alternating sequences
-}
module Main where
import Data.List.Split
import Data.Bits
import Data.Char
import Data.List (group)
import Gen2.Compactor
maxEnc = 737187
longEnc n = [n `shiftR` 16, n .&. 0xffff]
-- use one bit for more efficient encoding of stretches of length one
combineRanges :: [Int] -> [Int]
combineRanges xs | n > 1 = 0 : n : combineRanges (drop (2*n) xs) -- alternating pattern, zero
where n = (length $ takeWhile (==1) xs) `div` 2
combineRanges (1:x:xs)
| 2*x+2 > maxEnc = 1 : longEnc x ++ combineRanges xs -- length 1, odd number
| otherwise = 2*x+1 : combineRanges xs
combineRanges (x:xs)
| 2*x+1 > maxEnc = 2 : longEnc x ++ combineRanges xs -- other length, even number
| otherwise = 2*x+2 : combineRanges xs
combineRanges [] = []
{-
encode ranges, starts at -1, the first range is always False,
-}
encodeRLE :: [Bool] -> String
encodeRLE xs = encodeStr . combineRanges . map length . dropTrailingFalse . group $ False:xs
where
dropTrailingFalse ys
| not (head $ last ys) = init ys
| otherwise = ys
combineMappings :: [(Int,Int)] -> [Int]
combineMappings xs@((1,v1):(1,v2):_) -- alternating pattern, zero
| n >= 1 = [0,n,v1,v2] ++ combineMappings (drop (2*n) xs)
where n = length (takeWhile (==[(1,v1),(1,v2)]) (chunksOf 2 xs))
combineMappings ((1,v):xs)
| 2*v+1 <= maxEnc = [2*v+1] ++ combineMappings xs -- length 1 combined with value: odd number
combineMappings ((n,v):xs)
| 2*n+2 > maxEnc = 2 : longEnc n ++ [v] ++ combineMappings xs -- other length: even number
| otherwise = [2*n+2,v] ++ combineMappings xs
combineMappings [] = []
{-
encode mapping, starts at 0
-}
encodeMapping :: [Int] -> String
encodeMapping xs = encodeStr . combineMappings $ map (\ys -> (length ys, head ys)) (group xs)
-- we can assume that the encoder does not generate characters that have to be
-- escaped or are outside ASCII range as long as it's double quoted
assignDat :: String -> String -> String
assignDat var val = "var h$" ++ var ++ " = \"" ++ val ++ "\";"
mkRanges :: (Char -> Bool) -> String
mkRanges p = encodeRLE $ map p listChars
toAbs :: Int -> Int
toAbs x | x < 0 = 2 * (abs x) - 1
| otherwise = 2 * x
-- skip unassigned planes 3-13
listChars = map toEnum ([0..0x2FFFF] ++ [0xE0000..0x10FFFF])
-- must map to a nonnegative int
mkMapping :: (Char -> Int) -> String
mkMapping f = encodeMapping $ map f listChars
-- map the biggest categories to zero. PrivateUse is fixed and
-- the ranges are hardcoded in string.js
catNum :: GeneralCategory -> Int
catNum PrivateUse = 0
catNum NotAssigned = 0
catNum x = fromEnum x + 1
main = putStrLn . unlines $
"// Unicode tables generated by ghcjs/utils/genUnicode.hs" :
map (\(v,p) -> assignDat v (mkRanges p))
[ ("printRanges", isPrint), ("alnumRanges", isAlphaNum)
, ("lowerRanges", isLower), ("upperRanges", isUpper)
, ("alphaRanges", isAlpha)] ++
map (\(v,m) -> assignDat v (mkMapping m))
[ ("toLowerMapping", (\c -> toAbs (ord (toLower c) - (ord c))))
, ("toUpperMapping", (\c -> toAbs (ord (toUpper c) - (ord c))))
, ("catMapping", catNum . generalCategory)
]
| seereason/ghcjs | utils/genUnicode.hs | mit | 3,555 | 0 | 16 | 896 | 1,228 | 655 | 573 | 59 | 1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module T10598_fail6 where
newtype F x = F ([x], Maybe x) deriving Functor
| olsner/ghc | testsuite/tests/deriving/should_fail/T10598_fail6.hs | bsd-3-clause | 151 | 0 | 7 | 22 | 30 | 20 | 10 | 4 | 0 |
{-# LANGUAGE PatternGuards #-}
module Idris.Transforms(transformPats,
transformPatsWith,
applyTransRulesWith,
applyTransRules) where
import Idris.AbsSyntax
import Idris.Core.CaseTree
import Idris.Core.TT
import Debug.Trace
transformPats :: IState -> [Either Term (Term, Term)] ->
[Either Term (Term, Term)]
transformPats ist ps = map tClause ps where
tClause (Left t) = Left t -- not a clause, leave it alone
tClause (Right (lhs, rhs)) -- apply transforms on RHS
= let rhs' = applyTransRules ist rhs in
Right (lhs, rhs')
transformPatsWith :: [(Term, Term)] -> [Either Term (Term, Term)] ->
[Either Term (Term, Term)]
transformPatsWith rs ps = map tClause ps where
tClause (Left t) = Left t -- not a clause, leave it alone
tClause (Right (lhs, rhs)) -- apply transforms on RHS
= let rhs' = applyTransRulesWith rs rhs in
Right (lhs, rhs')
-- Work on explicitly named terms, so we don't have to manipulate
-- de Bruijn indices
applyTransRules :: IState -> Term -> Term
applyTransRules ist tm = finalise $ applyAll [] (idris_transforms ist) (vToP tm)
-- Work on explicitly named terms, so we don't have to manipulate
-- de Bruijn indices
applyTransRulesWith :: [(Term, Term)] -> Term -> Term
applyTransRulesWith rules tm
= finalise $ applyAll rules emptyContext (vToP tm)
applyAll :: [(Term, Term)] -> Ctxt [(Term, Term)] -> Term -> Term
applyAll extra ts ap@(App s f a)
| (P _ fn ty, args) <- unApply ap
= let rules = case lookupCtxtExact fn ts of
Just r -> extra ++ r
Nothing -> extra
ap' = App s (applyAll extra ts f) (applyAll extra ts a) in
case rules of
[] -> ap'
rs -> case applyFnRules rs ap of
Just tm'@(App s f' a') ->
App s (applyAll extra ts f')
(applyAll extra ts a')
Just tm' -> tm'
_ -> App s (applyAll extra ts f)
(applyAll extra ts a)
applyAll extra ts (Bind n b sc) = Bind n (fmap (applyAll extra ts) b)
(applyAll extra ts sc)
applyAll extra ts t = t
applyFnRules :: [(Term, Term)] -> Term -> Maybe Term
applyFnRules [] tm = Nothing
applyFnRules (r : rs) tm | Just tm' <- applyRule r tm = Just tm'
| otherwise = applyFnRules rs tm
applyRule :: (Term, Term) -> Term -> Maybe Term
applyRule (lhs, rhs) tm
| Just ms <- matchTerm lhs tm
-- = trace ("SUCCESS " ++ show ms ++ "\n FROM\n" ++ show lhs ++
-- "\n" ++ show rhs
-- ++ "\n" ++ show tm ++ " GIVES\n" ++ show (depat ms rhs)) $
= Just $ depat ms rhs
| otherwise = Nothing
-- ASSUMPTION: The names in the transformation rule bindings cannot occur
-- in the term being transformed.
-- (In general, this would not be true, but when we elaborate transformation
-- rules we mangle the names so that it is true. While this feels a bit
-- hacky, it's much easier to think about than mangling de Bruijn indices).
where depat ms (Bind n (PVar t) sc)
= case lookup n ms of
Just tm -> depat ms (subst n tm sc)
_ -> depat ms sc -- no occurrence? Shouldn't happen
depat ms tm = tm
matchTerm :: Term -> Term -> Maybe [(Name, Term)]
matchTerm lhs tm = matchVars [] lhs tm
where
matchVars acc (Bind n (PVar t) sc) tm
= matchVars (n : acc) (instantiate (P Bound n t) sc) tm
matchVars acc sc tm
= -- trace (show acc ++ ": " ++ show (sc, tm)) $
doMatch acc sc tm
doMatch :: [Name] -> Term -> Term -> Maybe [(Name, Term)]
doMatch ns (P _ n _) tm
| n `elem` ns = return [(n, tm)]
doMatch ns (App _ f a) (App _ f' a')
= do fm <- doMatch ns f f'
am <- doMatch ns a a'
return (fm ++ am)
doMatch ns x y | vToP x == vToP y = return []
| otherwise = Nothing
| mrmonday/Idris-dev | src/Idris/Transforms.hs | bsd-3-clause | 4,279 | 0 | 17 | 1,536 | 1,315 | 672 | 643 | 77 | 5 |
-- int32 to IPv4
-- http://www.codewars.com/kata/52e88b39ffb6ac53a400022e/
-- Note: Wrong type in tests, should be Word32 instead of Int32.
module IPv4 where
import Data.Word (Word32)
import Data.Bits ((.&.), shiftR)
import Data.List (unfoldr, intercalate)
type IPString = String
int32ToIP :: Word32 -> IPString
int32ToIP n = intercalate "." . map show . reverse . take 4 . (++ repeat 0) . unfoldr f $ n
where f 0 = Nothing
f x = Just ((.&.) x 0xff, shiftR x 0x8) | gafiatulin/codewars | src/5 kyu/IPv4.hs | mit | 482 | 0 | 11 | 96 | 150 | 83 | 67 | 9 | 2 |
--
-- Config.hs
-- Copyright (C) 2015 soud <[email protected]>
--
-- Distributed under terms of the MIT license.
--
module Config
(
server
, sPort
, nickname
, channel
) where
server = "irc.freenode.org"
sPort = 6667
nickname = "evalbot"
channel = "#nekoproject"
| soudy/evalbot | src/Config.hs | mit | 300 | 0 | 4 | 78 | 44 | 30 | 14 | 10 | 1 |
module ImageMapTests
( withWrongDimensions
, withRightDimensions
, reportingSize
, findingElements
, elementToRGB
, elementToRGBA
) where
import Test.HUnit
import BigE.ImageMap (ImageElement (..), VectorSpec (..), PixelRGB8 (..), elementAt,
fromVector, imageSize, toRGB, toRGBA)
import qualified Data.Vector as Vector
import Linear (V3 (..), V4 (..))
withWrongDimensions :: Assertion
withWrongDimensions =
case fromVector (Raw16Vector (1, 1) $ Vector.fromList [1, 2, 3]) of
Right _ -> assertBool "Shall fail" False
Left _ -> assertBool "Shall fail" True
withRightDimensions :: Assertion
withRightDimensions =
case fromVector (Raw16Vector (2, 2) $ Vector.fromList [1, 2, 3, 4]) of
Right _ -> assertBool "Shall succeed" True
Left _ -> assertBool "Shall succeed" False
reportingSize :: Assertion
reportingSize = do
let Right imgMap = fromVector (Raw16Vector (2, 2) $ Vector.fromList [1, 2, 3, 4])
(2, 2) @=? imageSize imgMap
findingElements :: Assertion
findingElements = do
let Right imgMap = fromVector (Raw16Vector (3, 2) $ Vector.fromList [1, 2, 3, 4, 5, 6])
Raw 1 @=? elementAt 0 0 imgMap
Raw 2 @=? elementAt 1 0 imgMap
Raw 3 @=? elementAt 2 0 imgMap
Raw 4 @=? elementAt 0 1 imgMap
Raw 5 @=? elementAt 1 1 imgMap
Raw 6 @=? elementAt 2 1 imgMap
elementToRGB :: Assertion
elementToRGB = do
let col1 = V3 0 0 0 :: V3 Float
col2 = V3 1 1 1 :: V3 Float
col3 = V3 1 0 0 :: V3 Float
col4 = V3 0 1 0 :: V3 Float
col5 = V3 0 0 1 :: V3 Float
col1 @=? toRGB (Raw 0)
col1 @=? toRGB (RGB $ PixelRGB8 0 0 0)
col2 @=? toRGB (Raw maxBound)
col2 @=? toRGB (RGB $ PixelRGB8 maxBound maxBound maxBound)
col3 @=? toRGB (RGB $ PixelRGB8 maxBound 0 0)
col4 @=? toRGB (RGB $ PixelRGB8 0 maxBound 0)
col5 @=? toRGB (RGB $ PixelRGB8 0 0 maxBound)
elementToRGBA :: Assertion
elementToRGBA = do
let col1 = V4 0 0 0 1 :: V4 Float
col2 = V4 1 1 1 1 :: V4 Float
col3 = V4 1 0 0 1 :: V4 Float
col4 = V4 0 1 0 1 :: V4 Float
col5 = V4 0 0 1 1 :: V4 Float
col1 @=? toRGBA (Raw 0)
col1 @=? toRGBA (RGB $ PixelRGB8 0 0 0)
col2 @=? toRGBA (Raw maxBound)
col2 @=? toRGBA (RGB $ PixelRGB8 maxBound maxBound maxBound)
col3 @=? toRGBA (RGB $ PixelRGB8 maxBound 0 0)
col4 @=? toRGBA (RGB $ PixelRGB8 0 maxBound 0)
col5 @=? toRGBA (RGB $ PixelRGB8 0 0 maxBound)
| psandahl/big-engine | test/ImageMapTests.hs | mit | 2,524 | 0 | 14 | 714 | 1,016 | 508 | 508 | 63 | 2 |
-- | Module for operating on vectors and matrices. It includes some functions
-- for using bra-ket notation.
module DiracLists where
import Data.Complex
import Data.List (transpose)
-- This can be used to hide the standard operators.
-- import Prelude hiding ((+))
-- import qualified Prelude
-- | Function 'ket' provides basic functionality for producing vectors from the
-- canonical basis. Vectors and matrices are represented as lists and are stored
-- in column-order (as in Mathematica). Operations on vectors and matrices have
-- to be defined using new operators.
ket :: Int -> Int -> [Complex Double]
ket d i = (take i zeros) ++ [1] ++ (take (d-1-i) zeros)
where zeros = replicate (d-1) 0
-- | The 'norm' function calculates the Euclidean norm of a vector.
norm :: [Complex Double] -> Double
norm a = sqrt $ realPart $ sum [ x^2 | x<-a ]
-- | The 'proj' function builds d-dimensional operator |i><j|.
proj :: Int -> Int -> Int -> [Complex Double]
proj d i j = outerWith (*) (ket d i) (ket d j)
-- | The 'outer' defines the form of outer product resulting in paris of elements.
outer :: [a] -> [b] -> [(a,b)]
outer a b = [ (x,y) | x<-a, y<-b ]
-- | General form of the outer product resulting in paris of elements.
outerWith :: (a -> b -> c) -> [a] -> [b] -> [c]
outerWith f a b = [ f x y | x<-a, y<-b ]
-- | Kronecker product is defined in terms of the outer product.
kron :: Num a => [a] -> [a] -> [a]
kron a b = outerWith (*) a b
-- | Function 'overlap' for calculating the inner product of two vectors. In
-- Dirac notation this is represented as <v|w>.
overlap :: Num a => [a] -> [a] -> a
overlap vec1 vec2 = sum $ zipWith (*) vec1 vec2
-- | The scalar product is defined to be consistent with the other operators. It
-- is identical with the 'overlap' function and it suppose to resemble the
-- braket.
(<>) :: [Complex Double] -> [Complex Double] -> Complex Double
(<>) = overlap
-- | Definition of the vector addition for two complex vectors. The standard '+'
-- operator does not work, and it is not a good idead to hide it.
(+>) :: [Complex Double] -> [Complex Double] -> [Complex Double]
(+>) = zipWith (+)
-- | The multiplication of a vector by a scalar.
(.*) :: Complex Double -> [Complex Double] -> [Complex Double]
(.*) c v = [ c*x | x<-v ]
-- | Matrix-vector multiplication.
(#>) :: [[Complex Double]] -> [Complex Double] -> [Complex Double]
(#>) m v = [ overlap w v | w<-tm ]
where tm = transpose m
| jmiszczak/hoqus | alternative/DiracLists.hs | mit | 2,444 | 0 | 10 | 486 | 683 | 381 | 302 | 27 | 1 |
module Data.Hiper.Types
( Value(..)
, Configurable (toValue, fromValue)
) where
import Data.Hiper.Types.Internal
| ivanjovanovic/hiper | src/Data/Hiper/Types.hs | mit | 145 | 0 | 5 | 44 | 32 | 23 | 9 | 6 | 0 |
module Chapter_7_my_note where
import Prelude hiding (Word, getLine)
import Data.Char
import Test.QuickCheck
head' :: [a] -> a
head' (x : _) = x
tail' :: [a] -> [a]
tail' (_ : xs) = xs
null' :: [a] -> Bool
null' [] = True
null' (_ : _) = False
digits :: String -> String
digits st = [ch | ch <- st, isDigit ch]
firstDigit :: String -> Char
firstDigit st = case digits st of
[] -> '\0'
(x : _) -> x
firstIntPlus :: [Integer] -> Integer
firstIntPlus (x : _) = x + 1
firstIntPlus [] = 0
twoIntegerAddition :: [Integer] -> Integer
twoIntegerAddition (x : y : _) = x + y
twoIntegerAddition [x] = x
twoIntegerAddition [] = 0
twoIntegerAddition' :: [Integer] -> Integer
twoIntegerAddition' ls = case length ls of
0 -> 0
1 -> ls !! 1
_ -> (ls !! 0) + (ls !! 1)
product' :: [Integer] -> Integer
product' (x : xs) = x * product' xs
product' [] = 1
prop_product :: [Integer] -> Bool
prop_product ls = product ls == product' ls
check_product_result :: IO ()
check_product_result = quickCheck prop_product
and' :: [Bool] -> Bool
and' (x : xs) = x && and' xs
and' [] = True
prop_and :: [Bool] -> Bool
prop_and ls = and ls == and' ls
check_and_result :: IO ()
check_and_result = quickCheck prop_and
or' :: [Bool] -> Bool
or' (x : xs) = x || or' xs
or' [] = False
prop_or :: [Bool] -> Bool
prop_or ls = or ls == or' ls
check_or_result :: IO ()
check_or_result = quickCheck prop_or
ins :: Integer -> [Integer] -> [Integer]
ins x [] = [x]
ins x (y : ys)
| x <= y = x : y : ys
| otherwise = y : ins x ys
iSort :: [Integer] -> [Integer]
iSort [] = []
iSort (x : xs) = ins x (iSort xs)
elemNum :: Integer -> [Integer] -> Integer
elemNum _ [] = 0
elemNum target (x : xs)
| target == x = 1 + elemNum target xs
| otherwise = elemNum target xs
elemNum' :: Integer -> [Integer] -> Integer
elemNum' target storage = toInteger (length [x | x <- storage, x == target])
unique :: [Integer] -> [Integer]
unique ls = [x | x <- ls, elemNum x ls == 1]
unique' :: [Integer] -> [Integer]
unique' [] = []
unique' (x : xs)
| elemNum x (x : xs) == 1 = x : unique' xs
| otherwise = unique' (deleteAll x xs)
where
deleteAll :: Integer -> [Integer] -> [Integer]
deleteAll target ls = [_x | _x <- ls, _x /= target]
reverse' :: [a] -> [a]
reverse' [] = []
reverse' _list = reverse' (drop 1 _list) ++ [head _list]
unzip' :: [(a, b)] -> ([a], [b])
unzip' [] = ([], [])
unzip' list = (fst (head list) : fst (unzip' (tail list)),
snd (head list) : snd (unzip' (tail list)))
minAndMax :: [Integer] -> (Integer, Integer)
minAndMax ls = (head (iSort ls), (iSort ls) !! (length ls - 1))
minAndMax' :: [Integer] -> (Integer, Integer)
minAndMax' [] = (0, 0)
minAndMax' [x] = (x, x)
minAndMax' (x : xs) = (if x < fst (minAndMax' xs) then x else fst (minAndMax' xs),
if x > snd (minAndMax' xs) then x else snd (minAndMax' xs))
isSorted :: [Integer] -> Bool
isSorted [] = True
isSorted [_] = True
isSorted (x : xs) = x <= (head xs) && isSorted xs
qSort :: [Integer] -> [Integer]
qSort [] = []
qSort (x : xs) = qSort [lft | lft <- xs, lft <= x] ++ [x] ++ qSort [rht | rht <- xs, rht > x]
qSort_prop :: [Integer] -> Bool
qSort_prop = isSorted . qSort
dicSort :: [(Integer, Integer)] -> [(Integer, Integer)]
dicSort [] = []
dicSort [x] = [x]
dicSort (x : xs) = insDic x (dicSort xs)
where
insDic :: (Integer, Integer) -> [(Integer, Integer)] -> [(Integer, Integer)]
insDic _x [] = [_x]
insDic _x (ele : eles)
| fst _x > fst ele = ele : insDic _x eles
| fst _x < fst ele = _x : ele : eles
| fst _x == fst ele && snd _x > snd ele = ele : insDic _x eles
| fst _x == fst ele && snd _x <= snd ele = _x : ele : eles
dicSortIsSorted :: [(Integer, Integer)] -> Bool
dicSortIsSorted [] = True
dicSortIsSorted [_] = True
dicSortIsSorted (x : xs)
| (fst . head) xs - fst x > 0 = dicSortIsSorted xs
| (fst . head) xs - fst x == 0 = (snd . head) xs - snd x >= 0 && dicSortIsSorted xs
| (fst . head) xs - fst x < 0 = False
dicSortProp :: [(Integer, Integer)] -> Bool
dicSortProp = dicSortIsSorted . dicSort
zip' :: [a] -> [b] -> [(a, b)]
zip' (x : xs) (y : ys) = (x, y) : zip' xs ys
zip' _ _ = []
take' :: Int -> [a] -> [a]
take' 0 _ = []
take' _ [] = []
take' n (x : xs)
| n > 0 = x : take' (n - 1) xs
take' _ _ = error "negative argument"
splitAt' :: Int -> [a] -> ([a], [a])
splitAt' = undefined
drop' :: Int -> [a] -> [a]
drop' 0 _ = []
drop' _ [] = []
drop' n (_ : xs)
| n > 0 = drop' (n - 1) xs
drop' _ _ = error "negative argument"
zip'' :: ([a], [b]) -> [(a, b)]
zip'' (xs, ys) = zip xs ys
prop_zip''_unzip :: Eq a => Eq b => ([a], [b]) -> Property
prop_zip''_unzip (xs, ys) = (length xs == length ys)
==> (xs, ys) == (unzip . zip'') (xs, ys)
another_prop_zip''_unzip :: Eq a => Eq b => ([a], [b]) -> Bool
another_prop_zip''_unzip (xs, ys)
| length xs == length ys = (xs, ys) == (unzip . zip'') (xs, ys)
| otherwise = True
zip3' :: ([a], [b], [c]) -> [(a, b, c)]
zip3' (x : xs, y : ys, z : zs) = (x, y, z) : zip3' (xs, ys, zs)
zip3' _ = []
zip3'' :: ([a], [b], [c]) -> [(a, b, c)]
zip3'' (xs, ys, zs) = [(xs !! i, ys !! i, zs !! i)
| i <- [0 .. (minThree (length xs - 1) (length ys - 1) (length zs - 1))]]
where
minThree :: Int -> Int -> Int -> Int
minThree x y z
| x >= y && y >= z = z
| y >= x = x
| otherwise = y
subseq :: Eq a => [a] -> [a] -> Bool
subseq target base = or [and [target !! (j - i) == base !! j | j <- [i .. i + length target - 1]]
| i <- [0 .. length base - length target]]
subseqRec :: Eq a => [a] -> [a] -> Bool
subseqRec [] _ = True
subseqRec _ [] = False
subseqRec (x : xs) (y : ys)
| x /= y = subseqRec (x : xs) ys
| x == y = subseqRec xs ys || subseqRec (x : xs) ys
sublist :: Eq a => [a] -> [a] -> Bool
sublist [] _ = True
sublist _ [] = False
sublist (x : xs) (y : ys)
| x == y = sublist xs ys
| x /= y = sublist (x : xs) ys
whiteSpace :: String
whiteSpace = ['\n', ' ', '\t', '\0']
getWord :: String -> String
getWord [] = []
getWord (x : xs)
| x `elem` whiteSpace = []
| otherwise = x : getWord xs
dropWord :: String -> String
dropWord [] = []
dropWord (x : xs)
| x `elem` whiteSpace = x : xs
| otherwise = dropWord xs
dropHeadSpace :: String -> String
dropHeadSpace [] = []
dropHeadSpace (x : xs)
| x `elem` whiteSpace = dropHeadSpace xs
| otherwise = x : xs
type Word = String
splitWord :: String -> [Word]
splitWord str = if str /= []
then (getWord . dropHeadSpace) str : splitWord ((dropWord . dropHeadSpace) str)
else []
lineWidth :: Int
lineWidth = 80
type Line = [Word]
getLine :: Int -> [Word] -> Line
getLine _ [] = []
getLine len (wd : wds)
| length wd <= len = wd : getLine (len - (length wd + 1)) wds
| otherwise = []
dropLine :: Int -> [Word] -> [Word]
dropLine _ [] = []
dropLine len (wd : wds)
| length wd <= len = dropLine (len - (length wd + 1)) wds
| otherwise = wd : wds
splitLines :: [Word] -> [Line]
splitLines [] = []
splitLines ws = getLine lineWidth ws : splitLines (dropLine lineWidth ws)
joinLine :: Line -> String
joinLine [] = []
joinLine [wd] = wd
joinLine (wd : wds) = wd ++ " " ++ joinLine wds
joinLines :: [Line] -> String
joinLines [] = []
joinLines (_line : _lines) = joinLine _line ++ "\n" ++ joinLines _lines
joinLine2lineLen :: Line -> Int -> String
joinLine2lineLen [] _ = []
joinLine2lineLen [wd] _ = wd
joinLine2lineLen (wd : wds) lineLen = if lineLen < (length . joinLine) (wd : wds)
then joinLine (wd : wds)
else wd ++ blanks
++ joinLine2lineLen wds (lineLen - length wd - length blanks)
where
blank_total_len :: Line -> Int -> Int
blank_total_len (wd' : wds') lineLen' = lineLen' - sum [length _wd | _wd <- wd' : wds']
blank_num :: Line -> Int
blank_num (wd' : wds') = length (wd' : wds') - 1
const_blank_total_len :: Int
const_blank_total_len = blank_total_len (wd : wds) lineLen
const_blank_num :: Int
const_blank_num = blank_num (wd : wds)
blanks :: String
blanks = concat [" " | _ <- [1 .. div const_blank_total_len const_blank_num
+ if mod const_blank_total_len const_blank_num == 0 then 0 else 1]]
wc :: String -> (Int, Int, Int)
wc [] = (0, 0, 0)
wc str' = (length str, (length . splitWord) str, (length . splitLines . splitWord) str)
where
str :: String
str = strSplit str'
strSplit :: String -> String
strSplit [] = []
strSplit (c : cs)
| c == '\n' = ""
| otherwise = c : strSplit cs
isPalin :: String -> Bool
isPalin [] = True
isPalin [_] = True
isPalin str = head orig == last orig && isPalin (tail (init orig))
where
orig :: String
orig = [toLower c | c <- str, c /= ' ', c /= '\'']
subst :: String -> String -> String -> String
subst oldSub newSub st = if subseqRec oldSub st
then replacer oldSub newSub st (subseqMarker_max oldSub st)
else st
-- here is a bug and I need to fix
-- check when the case is "change" "change"
subseqMarker :: String -> String -> [Int]
subseqMarker [] [] = [0]
subseqMarker _ [] = [-1]
subseqMarker [] remain = [length remain]
subseqMarker (x : xs) (y : ys)
| x /= y = subseqMarker (x : xs) ys
| x == y = subseqMarker xs ys ++ subseqMarker (x : xs) ys
subseqMarker_max :: String -> String -> Int
subseqMarker_max xs ys = maximum (subseqMarker xs ys)
replacer :: String -> String -> String -> Int -> String
replacer oldone newone base place = fst (splitAt (length base - place - length oldone) base)
++ newone ++ snd (splitAt (length base - place) base)
| tonyfloatersu/solution-haskell-craft-of-FP | Chapter_7_my_note.hs | mit | 11,015 | 0 | 15 | 3,836 | 4,944 | 2,578 | 2,366 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
import Dispatch ()
import Foundation
import Web.Notebook.App
import Web.YahooPortfolioManager.App
import Yesod
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Network.HTTP.Conduit (newManager)
import Database.Persist.Sqlite
(runMigration, createSqlitePool, runSqlPersistMPool)
import Control.Monad.Logger (runStdoutLoggingT)
main :: IO ()
main = do
-- create a new pool
pool <- runStdoutLoggingT $ createSqlitePool "notebook.db" 10
-- perform any necessary migration
runSqlPersistMPool (runMigration migrateNotebook) pool
-- create a new HTTP manager
manager <- newManager tlsManagerSettings
warp 3000 $ Intranet YahooPortfolioManagerSite Notebook pool manager
| lhoghu/intranet | app/Main.hs | mit | 750 | 0 | 9 | 118 | 154 | 86 | 68 | 17 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
module Main where
import Web.Twitter.Conduit.Parameters
import Web.Twitter.Conduit hiding (inReplyToStatusId)
import Web.Twitter.Types.Lens
import Control.Lens
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Resource
import qualified Data.ByteString.Char8 as B8
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Data.Text as T
import Data.Text (Text)
import Web.Authenticate.OAuth (OAuth(..), Credential(..))
import System.Environment (getEnv)
import Data.Aeson.Types
import Data.Monoid
import Paren
getOAuthTokens :: IO (OAuth, Credential)
getOAuthTokens = do
consumerKey <- getEnv' "OAUTH_CONSUMER_KEY"
consumerSecret <- getEnv' "OAUTH_CONSUMER_SECRET"
accessToken <- getEnv' "OAUTH_ACCESS_TOKEN"
accessSecret <- getEnv' "OAUTH_ACCESS_SECRET"
let oauth = twitterOAuth
{ oauthConsumerKey = consumerKey
, oauthConsumerSecret = consumerSecret
}
cred = Credential
[ ("oauth_token", accessToken)
, ("oauth_token_secret", accessSecret)
]
return (oauth, cred)
where
getEnv' = (B8.pack <$>) . getEnv
getTWInfoFromEnv :: IO TWInfo
getTWInfoFromEnv = do
(oa, cred) <- getOAuthTokens
return $ (setCredential oa cred def) { twProxy = Nothing }
main :: IO ()
main = do
twInfo <- getTWInfoFromEnv
putStrLn "Starting"
mgr <- newManager tlsManagerSettings
runResourceT $ do
src <- stream twInfo mgr userstream
liftIO $ putStrLn "User stream connected"
src C.$$+- CL.mapM_ (liftIO . takeAction)
takeAction :: StreamingAPI -> IO ()
takeAction (SStatus st) = case matchParenT stText of
Just t -> callRequest $ replyTo st (tweet t)
Nothing -> return ()
where stText = st ^. statusText
crop = T.take (140 - length face - 1)
tweet t = crop t <> T.pack face
face = "○( ̄□ ̄○)"
takeAction (SEvent e)
| e ^. evEvent == "follow" =
do let target = e ^. evTarget ^. to screenname
let source = e ^. evSource ^. to screenname
if target == "parenbot"
then do callRequest $ followUser source
putStrLn $ "Followed " <> T.unpack source
else return ()
| otherwise = return ()
where screenname :: EventTarget -> Text
screenname target = case target ^? _ETUser of
Just u -> u ^. userScreenName
_ -> ""
takeAction _ = return ()
callRequest :: (FromJSON b) => APIRequest a b -> IO ()
callRequest req = do
twInfo <- getTWInfoFromEnv
mgr <- newManager tlsManagerSettings
_ <- call twInfo mgr req
return ()
replyTo :: Status -> T.Text -> APIRequest StatusesUpdate Status
replyTo t txt = update tweetText & inReplyToStatusId ?~ (t ^. statusId)
where tweetText = "@" <> t ^. statusUser ^. userScreenName <>
" " <> txt
followUser :: Text -> APIRequest FriendshipsCreate User
followUser name = friendshipsCreate (ScreenNameParam $ T.unpack name)
favTweet :: Status -> APIRequest FavoritesCreate Status
favTweet t = favoritesCreate (t ^. statusId)
matchParenT :: Text -> Maybe Text
matchParenT = fmap T.pack . matchParen . T.unpack
| shouya/parenbot | Main.hs | mit | 3,316 | 0 | 13 | 802 | 992 | 512 | 480 | 85 | 4 |
{-# LANGUAGE Rank2Types #-}
module Extra.Field.Optics (
setMay,
viewMay,
overMay,
(^.?),
(%~?),
(.~?),
by,
useMay,
) where
import Control.Applicative (Const (Const), getConst)
import qualified Data.Map as M
import Extra.Field.Optics.Internal
import Control.Monad.State (MonadState, gets)
--------------------------------------------------------------------------------
-- set :: ((a -> Identity b) -> s -> Identity t) -> b -> s -> t
setMay :: ((a -> Maybe b) -> s -> Maybe t) -> b -> s -> Maybe t
setMay l b s = l (const $ Just b) s
-- view :: ((a -> Const a a) -> s -> Const a s) -> s -> a
viewMay :: ((a -> Const (Maybe a) a) -> s -> Const (Maybe a) s) -> s -> Maybe a
viewMay l s = getConst $ l (Const . Just) s
-- over :: ((a -> Identity b) -> s -> Identity t) -> (a -> b) -> s -> t
overMay :: ((a -> Maybe b) -> s -> Maybe t) -> (a -> b) -> s -> Maybe t
overMay l f s = l (Just . f) s
useMay :: MonadState s m =>
((a -> Const (Maybe a) a) -> s -> Const (Maybe a) s) -> m (Maybe a)
useMay l = gets (viewMay l)
-- same as the Control.Lens fixities for (^.), (%~), (.~) respectively.
infixl 8 ^.?
infixr 4 %~?
infixr 4 .~?
(^.?) = flip viewMay
(%~?) = overMay
(.~?) = setMay
by :: Ord k => k -> Binoculars' (M.Map k v) v
by k f m = case M.lookup k m of
Just v -> fmap (\z -> M.insert k z m) (f v)
_ -> nothing
| tempname11/field-optics | src/Extra/Field/Optics.hs | mit | 1,350 | 0 | 13 | 314 | 540 | 293 | 247 | 33 | 2 |
module Main
where
import Parser
import Processor
import Printer
import Data.ByteString hiding (unzip, map, concatMap, length, putStr, hPutStr, putStrLn, head)
import System.Environment (getArgs)
import System.IO (utf8, IOMode(..), Handle, openFile, hSetEncoding, hPutStr, withFile)
getUtf8Handle :: FilePath -> IOMode -> IO Handle
getUtf8Handle fp iom = do
file <- openFile fp iom
hSetEncoding file utf8
return file
readUtf8 :: FilePath -> IO ByteString
readUtf8 fp = do
file <- getUtf8Handle fp ReadMode
hGetContents file
getRawLines :: FilePath -> IO [[(RawLine, Int)]]
getRawLines fp = do
fileContents <- readUtf8 fp
case parseToRawLines fp fileContents of
Left parseError -> print parseError >> return [[]]
Right rawLines -> return rawLines
writeLinesToFile :: FilePath -> [[(RawLine, Int)]] -> IO ()
writeLinesToFile fp rawLines = do
-- handle <- getUtf8Handle fp WriteMode
let (ls, warnings) = processRawLineBatches rawLines
let outputString = showLines ls
withFile fp WriteMode (\handle -> do
hSetEncoding handle utf8
hPutStr handle outputString )
putStr . showWarnings $ warnings
main :: IO ()
main = do
args <- getArgs
if length args < 2
then putStrLn "not enough arguments given"
else writeLinesToFile (args !! 1) =<< getRawLines (head args)
| lukaramu/ircparse | src/Main.hs | mit | 1,367 | 0 | 13 | 299 | 450 | 229 | 221 | 36 | 2 |
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Module : Algebra.Graph.Export
-- Copyright : (c) Andrey Mokhov 2016-2022
-- License : MIT (see the file LICENSE)
-- Maintainer : [email protected]
-- Stability : experimental
--
-- __Alga__ is a library for algebraic construction and manipulation of graphs
-- in Haskell. See <https://github.com/snowleopard/alga-paper this paper> for the
-- motivation behind the library, the underlying theory, and implementation details.
--
-- This module defines basic functionality for exporting graphs in textual and
-- binary formats. "Algebra.Graph.Export.Dot" provides DOT-specific functions.
-----------------------------------------------------------------------------
module Algebra.Graph.Export (
-- * Constructing and exporting documents
Doc, isEmpty, literal, render,
-- * Common combinators for text documents
(<+>), brackets, doubleQuotes, indent, unlines,
-- * Generic graph export
export
) where
import Data.Foldable (fold)
import Data.String hiding (unlines)
import Prelude hiding (unlines)
import Algebra.Graph.ToGraph (ToGraph, ToVertex, toAdjacencyMap)
import Algebra.Graph.AdjacencyMap (vertexList, edgeList)
import Algebra.Graph.Internal
-- | An abstract document data type with /O(1)/ time concatenation (the current
-- implementation uses difference lists). Here @s@ is the type of abstract
-- symbols or strings (text or binary). 'Doc' @s@ is a 'Monoid', therefore
-- 'mempty' corresponds to the /empty document/ and two documents can be
-- concatenated with 'mappend' (or operator 'Data.Monoid.<>'). Documents
-- comprising a single symbol or string can be constructed using the function
-- 'literal'. Alternatively, you can construct documents as string literals,
-- e.g. simply as @"alga"@, by using the @OverloadedStrings@ GHC extension. To
-- extract the document contents use the function 'render'.
--
-- Note that the document comprising a single empty string is considered to be
-- different from the empty document. This design choice is motivated by the
-- desire to support string types @s@ that have no 'Eq' instance, such as
-- "Data.ByteString.Builder", for which there is no way to check whether a
-- string is empty or not. As a consequence, the 'Eq' and 'Ord' instances are
-- defined as follows:
--
-- @
-- 'mempty' /= 'literal' ""
-- 'mempty' < 'literal' ""
-- @
newtype Doc s = Doc (List s) deriving (Monoid, Semigroup)
instance (Monoid s, Show s) => Show (Doc s) where
show = show . render
instance (Monoid s, Eq s) => Eq (Doc s) where
x == y | isEmpty x = isEmpty y
| isEmpty y = False
| otherwise = render x == render y
-- | The empty document is smallest.
instance (Monoid s, Ord s) => Ord (Doc s) where
compare x y | isEmpty x = if isEmpty y then EQ else LT
| isEmpty y = GT
| otherwise = compare (render x) (render y)
instance IsString s => IsString (Doc s) where
fromString = literal . fromString
-- | Check if a document is empty. The result is the same as when comparing the
-- given document to 'mempty', but this function does not require the 'Eq' @s@
-- constraint. Note that the document comprising a single empty string is
-- considered to be different from the empty document.
--
-- @
-- isEmpty 'mempty' == True
-- isEmpty ('literal' \"\") == False
-- isEmpty x == (x == 'mempty')
-- @
isEmpty :: Doc s -> Bool
isEmpty (Doc xs) = null xs
-- | Construct a document comprising a single symbol or string. If @s@ is an
-- instance of class 'IsString', then documents of type 'Doc' @s@ can be
-- constructed directly from string literals (see the second example below).
--
-- @
-- literal "Hello, " 'Data.Monoid.<>' literal "World!" == literal "Hello, World!"
-- literal "I am just a string literal" == "I am just a string literal"
-- 'render' . literal == 'id'
-- @
literal :: s -> Doc s
literal = Doc . pure
-- | Render the document as a single string. An inverse of the function 'literal'.
--
-- @
-- render ('literal' "al" 'Data.Monoid.<>' 'literal' "ga") :: ('IsString' s, 'Monoid' s) => s
-- render ('literal' "al" 'Data.Monoid.<>' 'literal' "ga") == "alga"
-- render 'mempty' == 'mempty'
-- render . 'literal' == 'id'
-- @
render :: Monoid s => Doc s -> s
render (Doc x) = fold x
-- | Concatenate two documents, separated by a single space, unless one of the
-- documents is empty. The operator \<+\> is associative with identity 'mempty'.
--
-- @
-- x \<+\> 'mempty' == x
-- 'mempty' \<+\> x == x
-- x \<+\> (y \<+\> z) == (x \<+\> y) \<+\> z
-- "name" \<+\> "surname" == "name surname"
-- @
(<+>) :: IsString s => Doc s -> Doc s -> Doc s
x <+> y | isEmpty x = y
| isEmpty y = x
| otherwise = x <> " " <> y
infixl 7 <+>
-- | Wrap a document in square brackets.
--
-- @
-- brackets "i" == "[i]"
-- brackets 'mempty' == "[]"
-- @
brackets :: IsString s => Doc s -> Doc s
brackets x = "[" <> x <> "]"
-- | Wrap a document into double quotes.
--
-- @
-- doubleQuotes "\/path\/with spaces" == "\\"\/path\/with spaces\\""
-- doubleQuotes (doubleQuotes 'mempty') == "\\"\\"\\"\\""
-- @
doubleQuotes :: IsString s => Doc s -> Doc s
doubleQuotes x = "\"" <> x <> "\""
-- | Prepend a given number of spaces to a document.
--
-- @
-- indent 0 == 'id'
-- indent 1 'mempty' == " "
-- @
indent :: IsString s => Int -> Doc s -> Doc s
indent spaces x = fromString (replicate spaces ' ') <> x
-- | Concatenate documents after appending a terminating newline symbol to each.
--
-- @
-- unlines [] == 'mempty'
-- unlines ['mempty'] == "\\n"
-- unlines ["title", "subtitle"] == "title\\nsubtitle\\n"
-- @
unlines :: IsString s => [Doc s] -> Doc s
unlines [] = mempty
unlines (x:xs) = x <> "\n" <> unlines xs
-- TODO: Avoid round-trip graph conversion if g :: AdjacencyMap a.
-- | Export a graph into a document given two functions that construct documents
-- for individual vertices and edges. The order of export is: vertices, sorted
-- by 'Ord' @a@, and then edges, sorted by 'Ord' @(a, a)@.
--
-- For example:
--
-- @
-- vDoc x = 'literal' ('show' x) <> "\\n"
-- eDoc x y = 'literal' ('show' x) <> " -> " <> 'literal' ('show' y) <> "\\n"
-- > putStrLn $ 'render' $ export vDoc eDoc (1 + 2 * (3 + 4) :: 'Algebra.Graph.Graph' Int)
--
-- 1
-- 2
-- 3
-- 4
-- 2 -> 3
-- 2 -> 4
-- @
export :: (Ord a, ToGraph g, ToVertex g ~ a) => (a -> Doc s) -> (a -> a -> Doc s) -> g -> Doc s
export v e g = vDoc <> eDoc
where
vDoc = mconcat $ map v (vertexList adjMap)
eDoc = mconcat $ map (uncurry e) (edgeList adjMap)
adjMap = toAdjacencyMap g
| snowleopard/alga | src/Algebra/Graph/Export.hs | mit | 6,815 | 0 | 11 | 1,489 | 1,004 | 567 | 437 | 49 | 1 |
import Control.Applicative
import Test.QuickCheck
import Squarify
import Debug.Trace
instance Arbitrary Rectangle where
arbitrary = do
x <- abs <$> arbitrary
y <- abs <$> arbitrary
w <- (\v -> 1 + abs v) <$> arbitrary
h <- (\v -> 1 + abs v) <$> arbitrary
return $ Rectangle x y w h
rectArea (Rectangle _ _ w h) = w * h
nearlyEqual a b = 1e-6 > abs (a - b)
notLessThan a b = a - b > -1e-6
contains (Rectangle x1 y1 w1 h1) (Rectangle x2 y2 w2 h2) =
let (x1', y1') = (x1+w1, y1+h1)
(x2', y2') = (x2+w2, y2+h2) in
and [
x2 `notLessThan` x1,
y2 `notLessThan` y1,
x1' `notLessThan` x2',
y1' `notLessThan` y2'
]
disjoint (Rectangle x1 y1 w1 h1) (Rectangle x2 y2 w2 h2) =
let (x1', y1') = (x1+w1, y1+h1)
(x2', y2') = (x2+w2, y2+h2) in
or [
x2 `notLessThan` x1',
x1 `notLessThan` x2',
y2 `notLessThan` y1',
y1 `notLessThan` y2'
]
data RectAndAreas = RectAndAreas { rectangle::Rectangle, areas::[Area] } deriving Show
instance Arbitrary RectAndAreas where
arbitrary = do
rect <- arbitrary
let totalArea = rectArea rect
areas <- ((map abs) <$> arbitrary) `suchThat` \areas -> totalArea >= sum areas
return $ RectAndAreas rect areas
checkSplitRectangle1 (RectAndAreas r0 areas) =
((rectArea r1) + (rectArea r2)) `nearlyEqual` (rectArea r0) &&
r0 `contains` r1 &&
r0 `contains` r2 &&
r1 `disjoint` r2
where (r1, r2) = splitRect r0 areas
checkSplitRectangle2 (RectAndAreas rect areas) = (rectArea r1) `nearlyEqual` sum areas
where (r1, _) = splitRect rect areas
checkSquarify (RectAndAreas rect areas) =
and [a `nearlyEqual` rectArea r | (a,r) <- zip areas rs] &&
and [(rs !! i) `disjoint` (rs !! j) |
let n = length rs,
i <- [0 .. n-1],
j <- [i+1 .. n-1]] &&
and [rect `contains` r | r <- rs]
where rs = squarify rect areas []
| specify/TreeMap | CheckSquarify.hs | gpl-2.0 | 1,874 | 0 | 15 | 468 | 881 | 471 | 410 | 53 | 1 |
{-
Copyright (C) 2005 John Goerzen <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
module Types where
data FileRec = FileRec {frname :: String,
frsize :: Integer,
frmd5 :: String,
frmime :: String}
| jgoerzen/media-index | Types.hs | gpl-2.0 | 930 | 0 | 8 | 222 | 38 | 25 | 13 | 5 | 0 |
answer :: Int
answer = squareOfSum - sumOfSquare
where squareOfSum = (sum [1..100]) ^ 2
sumOfSquare = sum . map (^2) $ [1..100]
| rodgzilla/project-euler | problem_006/problem.hs | gpl-3.0 | 138 | 1 | 10 | 33 | 65 | 34 | 31 | 4 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Snap.Core (Snap, Method(GET), route, method, writeBS)
import Snap.Http.Server (httpServe, defaultConfig, setPort)
import Control.Monad.IO.Class (liftIO)
import Data.ByteString.Char8 (pack)
import Data.Char (toUpper)
import Test.QuickCheck.Gen (generate)
import Testimonial (testimonial)
-- | Path to file containing the bullshit
bs_file = "./dist/resources/bs.csv"
-- | The main function
main :: IO ()
main = do
putStrLn "resource path: /"
httpServe (setPort 8181 defaultConfig) site
-- | Main entry point, defining all routes
site :: Snap ()
site = route [ ("", method GET $ bsHandler) ]
-- | Handler for producing the bullshit response
bsHandler :: Snap ()
bsHandler = do
phrase <- liftIO $ generate testimonial
writeBS . pack $ capitalize phrase ++ "."
where
capitalize (head:tail) = toUpper head : tail
| 941design/bs-gen | src/Main.hs | gpl-3.0 | 890 | 0 | 9 | 148 | 251 | 141 | 110 | 21 | 1 |
module HEP.Automation.MadGraph.Dataset.Set20110717set2TEV where
import HEP.Storage.WebDAV.Type
import HEP.Automation.MadGraph.Model
import HEP.Automation.MadGraph.Machine
import HEP.Automation.MadGraph.UserCut
import HEP.Automation.MadGraph.SetupType
import HEP.Automation.MadGraph.Model.SChanC8V
import HEP.Automation.MadGraph.Dataset.Processes
import HEP.Automation.JobQueue.JobType
processSetup :: ProcessSetup SChanC8V
processSetup = PS {
model = SChanC8V
, process = preDefProcess TTBar0or1J
, processBrief = "TTBar0or1J"
, workname = "717_SChanC8V_TTBar0or1J_TEV"
}
paramSet :: [ModelParam SChanC8V]
paramSet = [ SChanC8VParam { mnp = m, gnpqR = g12, gnpqL = -g12,
gnpbR = g3, gnpbL = -g3,
gnptR = g3, gnptL = -g3 }
| (m,g12,g3) <- [ (1800,-0.3,1)
, (2200,-0.3,1) ] ]
sets :: [Int]
sets = [1..50]
ucut :: UserCut
ucut = UserCut {
uc_metcut = 15.0
, uc_etacutlep = 2.7
, uc_etcutlep = 18.0
, uc_etacutjet = 2.7
, uc_etcutjet = 15.0
}
eventsets :: [EventSet]
eventsets =
[ EventSet processSetup
(RS { param = p
, numevent = 100000
, machine = TeVatron
, rgrun = Fixed
, rgscale = 200.0
, match = MLM
, cut = DefCut
, pythia = RunPYTHIA
, usercut = UserCutDef ucut -- NoUserCutDef --
, pgs = RunPGS
, jetalgo = Cone 0.4
, uploadhep = NoUploadHEP
, setnum = num
})
| p <- paramSet , num <- sets ]
webdavdir :: WebDAVRemoteDir
webdavdir = WebDAVRemoteDir "paper3/ttbar_TEV_schanc8v_big"
| wavewave/madgraph-auto-dataset | src/HEP/Automation/MadGraph/Dataset/Set20110717set2TEV.hs | gpl-3.0 | 1,822 | 0 | 10 | 634 | 423 | 268 | 155 | 49 | 1 |
module Main where
main :: IO()
main = do
let list = [x | x <- [1..2017], x `rem` 400 == 0 || (x `rem` 4 == 0 && x `rem` 100 /= 0)]
print (list)
| llscm0202/BIGDATA2017 | ATIVIDADE1/exerciciosBasicos/ex8.hs | gpl-3.0 | 151 | 0 | 18 | 43 | 98 | 54 | 44 | 5 | 1 |
import Control.Applicative
import Control.Concurrent
import Control.Monad
import Control.Monad.IfElse
import Data.IORef
import Debug.Trace
import FRP.Yampa as Yampa
import Text.Printf
import Game
import Display
import Input
import Graphics.UI.Extra.SDL
-- TODO: Use MaybeT or ErrorT to report errors
main :: IO ()
main = do
initializeDisplay
timeRef <- initializeTimeRef
controllerRef <- initializeInputDevices
res <- loadResources
initGraphs res
reactimate (senseInput controllerRef)
(\_ -> do
-- Get clock and new input
mInput <- senseInput controllerRef
dtSecs <- senseTime timeRef mInput
-- trace ("Time : " ++ printf "%.5f" dtSecs) $
return (if controllerPause mInput then 0 else dtSecs, Just mInput)
)
(\_ (e,c) -> do render res e
-- putStrLn "*********************************************"
return (controllerExit c)
)
(wholeGame &&& arr id)
type MonadicT m a b = a -> m b
senseTime :: IORef Int -> MonadicT IO Controller DTime
senseTime timeRef = \mInput ->
let tt = if controllerSlow mInput then (/10) else id
tt1 = if controllerSuperSlow mInput then (/100) else tt
tt2 = if controllerFast mInput then (*10) else tt1
in (tt2 . milisecsToSecs) <$> senseTimeRef timeRef
| keera-studios/pang-a-lambda | Experiments/collisions/Main.hs | gpl-3.0 | 1,431 | 0 | 15 | 428 | 357 | 191 | 166 | 34 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module CC.ShellCheck.ShellScript (
-- * Validation
isShellScript
, hasShellExtension
, hasValidInterpretter
-- * Retrieval
, findShellScripts
) where
import Control.Monad.Extra
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as Char8
import Data.List
import Data.Shebang (Shebang(..), Interpretter(..), Argument(..))
import qualified Data.Shebang as Shebang
import System.Directory
import System.FilePath.Glob
import System.FilePath.Posix
--------------------------------------------------------------------------------
-- | List of shells the engine should be able to handle.
validShells :: [BS.ByteString]
validShells = ["sh", "ash", "dash", "bash", "ksh"]
--------------------------------------------------------------------------------
-- | List of valid shell file extensions.
validShellExtensions :: [BS.ByteString]
validShellExtensions = ("." <>) <$> validShells
--------------------------------------------------------------------------------
-- | Checks to see if file has correct extension.
hasShellExtension :: FilePath -> Bool
hasShellExtension path = extension `elem` validShellExtensions
where
extension :: BS.ByteString
extension = Char8.pack $ takeExtension path
--------------------------------------------------------------------------------
-- | Checks to see if script has a valid interpretter.
hasValidInterpretter :: Shebang -> Bool
hasValidInterpretter (Shebang (Interpretter int) maybeArgument) =
if BS.isSuffixOf "env" int
then case maybeArgument of
Nothing -> False
Just (Argument arg) -> any (`BS.isPrefixOf` arg) validShells
else any (`BS.isSuffixOf` int) validShells
--------------------------------------------------------------------------------
-- | Determines whether a file is a valid shell script.
isShellScript :: FilePath -> IO Bool
isShellScript path =
if hasExtension path
then return $! hasShellExtension path
else do
header <- Shebang.readFirstLine path
case Shebang.decode header of
Just shebang -> return $! hasValidInterpretter shebang
Nothing -> return False
--------------------------------------------------------------------------------
-- | Retrieve shell scripts for a list of paths.
findShellScripts :: [FilePath] -> IO [FilePath]
findShellScripts paths = do
dotShFiles <- concat <$> globDir patterns "."
allScripts <- filterM validateScript $! dotShFiles ++ otherFiles
return $ fmap clean allScripts
where
dirs :: [FilePath]; otherFiles :: [FilePath]
(dirs, otherFiles) = partition hasTrailingPathSeparator paths
clean :: String -> String
clean ('.':'/':x) = x
clean x = x
patterns :: [Pattern]
patterns = fmap (compile . (++ "**/*")) dirs
validateScript :: FilePath -> IO Bool
validateScript x = doesFileExist x &&^ isShellScript x
| filib/codeclimate-shellcheck | src/CC/ShellCheck/ShellScript.hs | gpl-3.0 | 2,972 | 0 | 12 | 536 | 603 | 340 | 263 | 53 | 3 |
module Util(recurseMonad,maybeDefault,trim) where
import Data.Text(strip,pack,unpack)
--TODO is there a generic method to do this? reminds me of fold... is it related?
recurseMonad :: (Monad b) => [a] -> (a -> b x) -> b ()
recurseMonad [] _ = return ()
recurseMonad (a : as) f = (f a) >> (recurseMonad as f)
maybeDefault :: Maybe m -> m -> m
maybeDefault Nothing def = def
maybeDefault (Just v) _ = v
trim :: String -> String
trim s = (unpack (strip (pack s)))
| redfish64/MpvLangLearn | Util.hs | gpl-3.0 | 468 | 0 | 10 | 91 | 201 | 107 | 94 | 10 | 1 |
module Util where
import qualified Data.ByteString as BS
chunk n = map (take n) . takeWhile (not.null) . iterate (drop n)
zipEither :: (b -> c -> d) -> [Either a b] -> [c] -> [Either a d]
zipEither f (Left x : xs) ys = Left x : zipEither f xs ys
zipEither f (Right x : xs) (y:ys) = Right (f x y) : zipEither f xs ys
zipEither _ _ _ = []
unhexlify :: String -> BS.ByteString
unhexlify = BS.pack . map (read . ("0x"++)) . chunk 2
| maugier/cctk | src/CCTK/Util.hs | gpl-3.0 | 435 | 0 | 10 | 99 | 245 | 126 | 119 | 9 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Surveys.Surveys.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a survey.
--
-- /See:/ <https://developers.google.com/surveys/ Surveys API Reference> for @surveys.surveys.insert@.
module Network.Google.Resource.Surveys.Surveys.Insert
(
-- * REST Resource
SurveysInsertResource
-- * Creating a Request
, surveysInsert
, SurveysInsert
-- * Request Lenses
, siPayload
) where
import Network.Google.Prelude
import Network.Google.Surveys.Types
-- | A resource alias for @surveys.surveys.insert@ method which the
-- 'SurveysInsert' request conforms to.
type SurveysInsertResource =
"surveys" :>
"v2" :>
"surveys" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Survey :> Post '[JSON] Survey
-- | Creates a survey.
--
-- /See:/ 'surveysInsert' smart constructor.
newtype SurveysInsert =
SurveysInsert'
{ _siPayload :: Survey
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SurveysInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'siPayload'
surveysInsert
:: Survey -- ^ 'siPayload'
-> SurveysInsert
surveysInsert pSiPayload_ = SurveysInsert' {_siPayload = pSiPayload_}
-- | Multipart request metadata.
siPayload :: Lens' SurveysInsert Survey
siPayload
= lens _siPayload (\ s a -> s{_siPayload = a})
instance GoogleRequest SurveysInsert where
type Rs SurveysInsert = Survey
type Scopes SurveysInsert =
'["https://www.googleapis.com/auth/surveys",
"https://www.googleapis.com/auth/userinfo.email"]
requestClient SurveysInsert'{..}
= go (Just AltJSON) _siPayload surveysService
where go
= buildClient (Proxy :: Proxy SurveysInsertResource)
mempty
| brendanhay/gogol | gogol-surveys/gen/Network/Google/Resource/Surveys/Surveys/Insert.hs | mpl-2.0 | 2,594 | 0 | 12 | 573 | 307 | 189 | 118 | 47 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CognitoIdentity.UpdateIdentityPool
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Updates a user pool.
--
-- <http://docs.aws.amazon.com/cognitoidentity/latest/APIReference/API_UpdateIdentityPool.html>
module Network.AWS.CognitoIdentity.UpdateIdentityPool
(
-- * Request
UpdateIdentityPool
-- ** Request constructor
, updateIdentityPool
-- ** Request lenses
, uipAllowUnauthenticatedIdentities
, uipDeveloperProviderName
, uipIdentityPoolId
, uipIdentityPoolName
, uipOpenIdConnectProviderARNs
, uipSupportedLoginProviders
-- * Response
, UpdateIdentityPoolResponse
-- ** Response constructor
, updateIdentityPoolResponse
-- ** Response lenses
, uiprAllowUnauthenticatedIdentities
, uiprDeveloperProviderName
, uiprIdentityPoolId
, uiprIdentityPoolName
, uiprOpenIdConnectProviderARNs
, uiprSupportedLoginProviders
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CognitoIdentity.Types
import qualified GHC.Exts
data UpdateIdentityPool = UpdateIdentityPool
{ _uipAllowUnauthenticatedIdentities :: Bool
, _uipDeveloperProviderName :: Maybe Text
, _uipIdentityPoolId :: Text
, _uipIdentityPoolName :: Text
, _uipOpenIdConnectProviderARNs :: List "OpenIdConnectProviderARNs" Text
, _uipSupportedLoginProviders :: Map Text Text
} deriving (Eq, Read, Show)
-- | 'UpdateIdentityPool' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'uipAllowUnauthenticatedIdentities' @::@ 'Bool'
--
-- * 'uipDeveloperProviderName' @::@ 'Maybe' 'Text'
--
-- * 'uipIdentityPoolId' @::@ 'Text'
--
-- * 'uipIdentityPoolName' @::@ 'Text'
--
-- * 'uipOpenIdConnectProviderARNs' @::@ ['Text']
--
-- * 'uipSupportedLoginProviders' @::@ 'HashMap' 'Text' 'Text'
--
updateIdentityPool :: Text -- ^ 'uipIdentityPoolId'
-> Text -- ^ 'uipIdentityPoolName'
-> Bool -- ^ 'uipAllowUnauthenticatedIdentities'
-> UpdateIdentityPool
updateIdentityPool p1 p2 p3 = UpdateIdentityPool
{ _uipIdentityPoolId = p1
, _uipIdentityPoolName = p2
, _uipAllowUnauthenticatedIdentities = p3
, _uipSupportedLoginProviders = mempty
, _uipDeveloperProviderName = Nothing
, _uipOpenIdConnectProviderARNs = mempty
}
-- | TRUE if the identity pool supports unauthenticated logins.
uipAllowUnauthenticatedIdentities :: Lens' UpdateIdentityPool Bool
uipAllowUnauthenticatedIdentities =
lens _uipAllowUnauthenticatedIdentities
(\s a -> s { _uipAllowUnauthenticatedIdentities = a })
-- | The "domain" by which Cognito will refer to your users.
uipDeveloperProviderName :: Lens' UpdateIdentityPool (Maybe Text)
uipDeveloperProviderName =
lens _uipDeveloperProviderName
(\s a -> s { _uipDeveloperProviderName = a })
-- | An identity pool ID in the format REGION:GUID.
uipIdentityPoolId :: Lens' UpdateIdentityPool Text
uipIdentityPoolId =
lens _uipIdentityPoolId (\s a -> s { _uipIdentityPoolId = a })
-- | A string that you provide.
uipIdentityPoolName :: Lens' UpdateIdentityPool Text
uipIdentityPoolName =
lens _uipIdentityPoolName (\s a -> s { _uipIdentityPoolName = a })
-- | A list of OpendID Connect provider ARNs.
uipOpenIdConnectProviderARNs :: Lens' UpdateIdentityPool [Text]
uipOpenIdConnectProviderARNs =
lens _uipOpenIdConnectProviderARNs
(\s a -> s { _uipOpenIdConnectProviderARNs = a })
. _List
-- | Optional key:value pairs mapping provider names to provider app IDs.
uipSupportedLoginProviders :: Lens' UpdateIdentityPool (HashMap Text Text)
uipSupportedLoginProviders =
lens _uipSupportedLoginProviders
(\s a -> s { _uipSupportedLoginProviders = a })
. _Map
data UpdateIdentityPoolResponse = UpdateIdentityPoolResponse
{ _uiprAllowUnauthenticatedIdentities :: Bool
, _uiprDeveloperProviderName :: Maybe Text
, _uiprIdentityPoolId :: Text
, _uiprIdentityPoolName :: Text
, _uiprOpenIdConnectProviderARNs :: List "OpenIdConnectProviderARNs" Text
, _uiprSupportedLoginProviders :: Map Text Text
} deriving (Eq, Read, Show)
-- | 'UpdateIdentityPoolResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'uiprAllowUnauthenticatedIdentities' @::@ 'Bool'
--
-- * 'uiprDeveloperProviderName' @::@ 'Maybe' 'Text'
--
-- * 'uiprIdentityPoolId' @::@ 'Text'
--
-- * 'uiprIdentityPoolName' @::@ 'Text'
--
-- * 'uiprOpenIdConnectProviderARNs' @::@ ['Text']
--
-- * 'uiprSupportedLoginProviders' @::@ 'HashMap' 'Text' 'Text'
--
updateIdentityPoolResponse :: Text -- ^ 'uiprIdentityPoolId'
-> Text -- ^ 'uiprIdentityPoolName'
-> Bool -- ^ 'uiprAllowUnauthenticatedIdentities'
-> UpdateIdentityPoolResponse
updateIdentityPoolResponse p1 p2 p3 = UpdateIdentityPoolResponse
{ _uiprIdentityPoolId = p1
, _uiprIdentityPoolName = p2
, _uiprAllowUnauthenticatedIdentities = p3
, _uiprSupportedLoginProviders = mempty
, _uiprDeveloperProviderName = Nothing
, _uiprOpenIdConnectProviderARNs = mempty
}
-- | TRUE if the identity pool supports unauthenticated logins.
uiprAllowUnauthenticatedIdentities :: Lens' UpdateIdentityPoolResponse Bool
uiprAllowUnauthenticatedIdentities =
lens _uiprAllowUnauthenticatedIdentities
(\s a -> s { _uiprAllowUnauthenticatedIdentities = a })
-- | The "domain" by which Cognito will refer to your users.
uiprDeveloperProviderName :: Lens' UpdateIdentityPoolResponse (Maybe Text)
uiprDeveloperProviderName =
lens _uiprDeveloperProviderName
(\s a -> s { _uiprDeveloperProviderName = a })
-- | An identity pool ID in the format REGION:GUID.
uiprIdentityPoolId :: Lens' UpdateIdentityPoolResponse Text
uiprIdentityPoolId =
lens _uiprIdentityPoolId (\s a -> s { _uiprIdentityPoolId = a })
-- | A string that you provide.
uiprIdentityPoolName :: Lens' UpdateIdentityPoolResponse Text
uiprIdentityPoolName =
lens _uiprIdentityPoolName (\s a -> s { _uiprIdentityPoolName = a })
-- | A list of OpendID Connect provider ARNs.
uiprOpenIdConnectProviderARNs :: Lens' UpdateIdentityPoolResponse [Text]
uiprOpenIdConnectProviderARNs =
lens _uiprOpenIdConnectProviderARNs
(\s a -> s { _uiprOpenIdConnectProviderARNs = a })
. _List
-- | Optional key:value pairs mapping provider names to provider app IDs.
uiprSupportedLoginProviders :: Lens' UpdateIdentityPoolResponse (HashMap Text Text)
uiprSupportedLoginProviders =
lens _uiprSupportedLoginProviders
(\s a -> s { _uiprSupportedLoginProviders = a })
. _Map
instance ToPath UpdateIdentityPool where
toPath = const "/"
instance ToQuery UpdateIdentityPool where
toQuery = const mempty
instance ToHeaders UpdateIdentityPool
instance ToJSON UpdateIdentityPool where
toJSON UpdateIdentityPool{..} = object
[ "IdentityPoolId" .= _uipIdentityPoolId
, "IdentityPoolName" .= _uipIdentityPoolName
, "AllowUnauthenticatedIdentities" .= _uipAllowUnauthenticatedIdentities
, "SupportedLoginProviders" .= _uipSupportedLoginProviders
, "DeveloperProviderName" .= _uipDeveloperProviderName
, "OpenIdConnectProviderARNs" .= _uipOpenIdConnectProviderARNs
]
instance AWSRequest UpdateIdentityPool where
type Sv UpdateIdentityPool = CognitoIdentity
type Rs UpdateIdentityPool = UpdateIdentityPoolResponse
request = post "UpdateIdentityPool"
response = jsonResponse
instance FromJSON UpdateIdentityPoolResponse where
parseJSON = withObject "UpdateIdentityPoolResponse" $ \o -> UpdateIdentityPoolResponse
<$> o .: "AllowUnauthenticatedIdentities"
<*> o .:? "DeveloperProviderName"
<*> o .: "IdentityPoolId"
<*> o .: "IdentityPoolName"
<*> o .:? "OpenIdConnectProviderARNs" .!= mempty
<*> o .:? "SupportedLoginProviders" .!= mempty
| dysinger/amazonka | amazonka-cognito-identity/gen/Network/AWS/CognitoIdentity/UpdateIdentityPool.hs | mpl-2.0 | 9,211 | 0 | 21 | 2,033 | 1,176 | 696 | 480 | 144 | 1 |
-- Chapter Exercises
-- 1.
data Constant a b = Constant b
instance Foldable (Constant a) where
foldr f i (Constant a) = f a i
-- 2.
data Two a b = Two a b
instance Foldable (Two a) where
foldr f i (Two _ b) = f b i
-- 3.
data Three a b c = Three a b c
instance Foldable (Three a b) where
foldr f i (Three _ _ c) = f c i
-- 4.
data Three' a b = Three' a b b
instance Foldable (Three' a) where
foldr f i (Three' _ b1 b2) = f b2 (f b1 i)
-- 5.
data Four' a b = Four' a b b b
instance Foldable (Four' a) where
foldr f i (Four' _ b1 b2 b3) = f b3 $ f b2 $ f b1 i
-- Thinking cap time.
-- But I cheated: https://github.com/dmvianna/haskellbook/blob/master/src/Ch20-Foldable.hs#L97
--
-- (g a) will be a Bool which determines if the element has been found. Then we just need
-- to lift a using pure or return mempty for the monoid return which is of type (Monoid (f a))
filterF :: (Applicative f, Foldable t, Monoid (f a)) => (a -> Bool) -> t a -> f a
filterF g ta = foldMap (\a -> if g a then pure a else mempty) ta
| dmp1ce/Haskell-Programming-Exercises | Chapter 20/Chapter Exercises.hs | unlicense | 1,033 | 0 | 9 | 251 | 399 | 210 | 189 | 17 | 2 |
{-# LANGUAGE BangPatterns, UnicodeSyntax #-}
import Math.NumberTheory.Primes.Sieve
import Math.NumberTheory.Primes.Testing
import Data.Set (Set)
import qualified Data.Set as Set
p2 x = x*x
p3 x = x*x*x
p4 x = x*x*x*x
main = do
let limit = 50000000
-- Reduce infinite set of primes to initial subset
let !relevantPrimes = takeWhile (< limit) primes :: [Integer]
-- Reduce set of primes and pre-square/cube/^4
-- Only those with ^n < limit could yield a sum < limit
let !rp2 = filter (< limit) $ map p2 relevantPrimes
let !rp3 = filter (< limit) $ map p3 relevantPrimes
let !rp4 = filter (< limit) $ map p4 relevantPrimes
-- Generate unique list of results
let generated = [x + y + z | x <- rp2, y <- rp3, z <- rp4]
let !genSet = filter (< limit) generated
let !nubbedGenSet = Set.fromList genSet
print $ Set.size nubbedGenSet | ulikoehler/ProjectEuler | Euler87.hs | apache-2.0 | 878 | 0 | 12 | 197 | 299 | 149 | 150 | 18 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
-- |
module VK.App.AppSpec (main, spec) where
import Control.Concurrent (threadDelay)
import Control.Monad.IO.Class (liftIO)
import Data.List (find)
import qualified Data.Text as T
import System.Directory (getCurrentDirectory)
-- import Test.Hspec
import Test.Hspec.WebDriver hiding (BrowserDefaults, Chrome)
import qualified Test.WebDriver as W
import TestSettings
main :: IO ()
main = hspec spec
data Browsers = Chrome
deriving (Show, Eq, Bounded, Enum)
instance TestCapabilities Browsers where
newCaps Chrome =
let copts = ["--disable-web-security"]
in
return W.defaultCaps{W.browser = W.chrome{W.chromeOptions = copts} }
instance Using Browsers where
type UsingList Browsers = [Browsers]
using d s = ([d], s)
instance Using [Browsers] where
type UsingList [Browsers] = [Browsers]
using d s = (d, s)
spec :: Spec
spec = session "VK application tests" $ using Chrome $ do
it "login to Vkontakte with user credentials" $ runWD $ do
dir <- liftIO getCurrentDirectory
openPage $ "file://" ++ dir ++ "/example/vk.html"
cw <- getCurrentWindow
findElem (ByCSS "div.authorization > div.panel-body > a.btn") >>= click
liftIO $ threadDelay 3000000
ws <- windows
length ws `shouldBe` 2
let Just vkW = find (/= cw) ws
focusWindow vkW
findElem (ByName "email") >>= sendKeys userName
findElem (ByName "pass") >>= sendKeys userPass
findElem (ByCSS "form input.button") >>= click
authUrl <- getCurrentURL
closeWindow vkW
focusWindow cw
findElem (ByCSS "input.form-control") >>= sendKeys (T.pack authUrl)
liftIO $ threadDelay 3000000
findElem (ByCSS "button") >>= click
liftIO $ threadDelay 3000000
it "selects \"AnyAudio\"" $ runWD $ do
findElem (ByCSS "a[href=\"#/audios/set-audio-selector/any-audio\"]") >>= click
liftIO $ threadDelay 3000000
pagerEls <- findElems (ByCSS "a[href^=\"#/audios/get-audio/\"]")
length pagerEls `shouldBe` 11
activeEls <- findElems (ByCSS "li.active a[href=\"#\"]")
length activeEls `shouldBe` 1
-- inspectSession
| eryx67/vk-api-example | test/VK/App/AppSpec.hs | bsd-3-clause | 2,348 | 0 | 15 | 560 | 655 | 330 | 325 | 56 | 1 |
{-# LANGUAGE ScopedTypeVariables, TypeSynonymInstances, FlexibleInstances #-}
module Math.FormalFFT where
import Control.Applicative ((<$>))
import Control.Arrow ((***))
import Control.Monad (join)
import Data.Monoid ((<>))
import Data.SBV
import Data.Tuple (swap)
import Prelude hiding (length)
import Test.QuickCheck
type Complex = (SReal, SReal)
cadd,cmul,csub :: Complex -> Complex -> Complex
(a,b) `cadd` (a', b') = (a + a', b + b')
(a,b) `csub` (a', b') = (a - a', b - b')
(a,b) `cmul` (a', b') = (a*a' - b*b', b*a' + a*b')
invmul :: SInteger -> Complex -> Complex
invmul k (a,b) = let k' = 1/toSReal k in (k' * a, -k' * b)
invmulk :: SInteger -> Complex -> Complex
invmulk k (a,b) = let k' = 1/toSReal k in (k' * a, k' * b)
pow :: Complex -> SInteger -> Complex
pow c n = ite (n.== 0) (1, 0) $ ite (n .== 1) c $ c `cmul` pow c (n - 1)
constrainPrimitiveUnitRoot :: Complex -> SInteger -> Symbolic ()
constrainPrimitiveUnitRoot c n = do
constrain $ pow c n .== (1, 0)
go c (n - 1)
where
go _ 0 = return ()
go c n' = constrain (pow c n' ./= (1, 0)) >> go c (n' - 1)
constrainTransform :: Complex -> Int -> Symbolic SBool
constrainTransform c np = do
work1 <- mkFreeVars np
work2 <- mkFreeVars np
let work = zip work1 work2
shared = dft false c work
dft' = dft true c shared
fft' = fft true c (fft false c work)
mixed = fft true c shared
return $ dft' .== work &&& mixed .== work &&& fft' .== work
proveTransform :: Int -> IO ThmResult
proveTransform np = do
let n = literal $ fromIntegral np
prove $ do
[real, imag] <- {- mkExistVars 2 -} symbolics ["c_re", "c_im"]
let c = (real, imag)
constrainPrimitiveUnitRoot c n >> constrainTransform c np
testTransform :: Int -> IO ()
testTransform np = do
let n = literal $ fromIntegral np
res <- sat $ do
[real, imag] <- mkExistVars 2
constrainPrimitiveUnitRoot (real,imag) n
return $ (literal true :: SBool)
let (Just (r1, r2)) = extractModel res
r1' = literal r1
r2' = literal r2
putStrLn $ "* " <> show np <> ", running with root: " <> show (r1', r2')
print $ isConcrete r1'
print $ isConcrete r2'
quickCheck $ constrainTransform (r1', r2') np
dft :: SBool -> Complex -> [Complex] -> [Complex]
dft invert root x = go x 0
where
powers c n = ite (n .== inputLength) [] (pow c n : powers c (n + 1))
powers' = powers root 0
inputLength = length x
go [] _ = []
go (_:rest) row = y_i : go rest (row + 1)
where
rowPowers = map (\c -> ite invert (inputLength `invmul` pow c row) (pow c row)) powers'
y_i = x `dot` rowPowers
length :: [a] -> SInteger
length = foldr (const (+1)) 0
dot :: [Complex] -> [Complex] -> Complex
dot [] [] = (0, 0)
dot (a:at) (b:bt) = (a `cmul` b) `cadd` dot at bt
fft :: SBool -> Complex -> [Complex] -> [Complex]
fft inverse root x = map (invmulk scaling) $ go root x
where
scaling = ite inverse (length x) 1
exponent = ite inverse (-2) 2
go :: Complex -> [Complex] -> [Complex]
go _ (a:[]) = [a]
go c a = uncurry (<>) $ unzip combined
where
(subresult_1, subresult_2) = join (***) (go $ pow c exponent) $ divideEvenOdd a
rootPowers = iterate (`cmul` c) (1, 0)
combined = zipWith3 (\y_i y_i' z -> (y_i `cadd` (z `cmul` y_i'), y_i `csub` (z `cmul` y_i')))
subresult_1 subresult_2 (ite inverse (map (invmul 1) rootPowers) rootPowers)
divideEvenOdd :: [a] -> ([a], [a])
divideEvenOdd = swap . foldr (\e (l1, l2) -> (l2, e:l1)) ([], [])
| davnils/formal-fft | src/Math/FormalFFT.hs | bsd-3-clause | 3,496 | 0 | 15 | 828 | 1,731 | 922 | 809 | 85 | 2 |
module Utils.Exception where
data Exception = IllformedLetPattern
| VarNotInCtx
| Nonlinearity
| NonlocallyClosed
| NonEmptyCtx
| TypeErrorLetNotTop
| TypeErrorLetNotTensor
| IllformedPromote
| TypeErrorPromoteNotBang
| TypeErrorTypesNotEqual
| TypeErrorAppNotImp
| NonLinearCtx
| TypeErrorDuplicatedFreeVar
deriving Show | heades/Agda-LLS | Source/ALL/Utils/Exception.hs | bsd-3-clause | 503 | 0 | 5 | 214 | 52 | 34 | 18 | 15 | 0 |
{-# LANGUAGE CPP #-}
import Control.Shell
import Data.Bits
import System.Info (os)
import System.Environment (getArgs)
import System.Exit
inBuildDir :: [String] -> Shell a -> Shell a
inBuildDir args act = do
srcdir <- pwd
isdir <- isDirectory "_build"
when (isdir && not ("no-rebuild" `elem` args)) $ rmdir "_build"
mkdir True "_build"
inDirectory "_build" $ do
unless ("no-rebuild" `elem` args) $ run_ "git" ["clone", srcdir] ""
inDirectory "haste-compiler" act
-- Packages will end up in ghc-$GHC_MAJOR.$GHC_MINOR. If the directory does
-- not exist, it is created. If the package already exists in that directory,
-- it is overwritten.
main = shell_ $ do
let args = fixAllArg cmdline
when (null args) $ do
echo $ "Usage: runghc build-release.hs [no-rebuild|in-place] formats\n"
echo $ "Supported formats: deb, tarball, 7z, all\n"
echo $ "no-rebuild\n Repackage whatever is already in the " ++
"_build directory\n instead of rebuilding from scratch."
echo $ "in-place\n Build package in current directory.\n" ++
" Packages end up in ghc-$GHC_MAJOR.$GHC_MINOR."
exit
when ("--debghcdeps" `elem` args) $ do
echo "ghc"
exit
let inplace = "in-place" `elem` args
chdir = if inplace then id else inBuildDir args
chdir $ do
(ver, ghcver) <- if ("no-rebuild" `elem` args)
then do
getVersions
else do
vers <- buildPortable
bootPortable
return vers
let (major, '.':rest) = break (== '.') ghcver
(minor, _) = break (== '.') rest
outdir
| inplace = "ghc-" ++ major ++ "." ++ minor
| otherwise = ".." </> ".." </> ("ghc-" ++ major ++ "." ++ minor)
mkdir True outdir
when ("tarball" `elem` args) $ do
tar <- buildBinaryTarball ver ghcver
mv tar (outdir </> tar)
when ("7z" `elem` args) $ do
f <- buildBinary7z ver ghcver
mv f (outdir </> f)
when ("deb" `elem` args) $ do
deb <- buildDebianPackage ver ghcver
mv (".." </> deb) (outdir </> deb)
where
fixAllArg args | "all" `elem` args = "deb" : "tarball" : "7z" : args
| otherwise = args
buildPortable = do
-- Build compiler
run_ "cabal" ["configure", "-f", "portable", "-f", "static"] ""
run_ "cabal" ["haddock"] ""
run_ "dist/setup/setup" ["build"] ""
-- Copy docs and build manpages
cpdir "dist/doc/html/haste-compiler" "haste-compiler/docs"
buildManPages
-- Strip symbols
case os of
"mingw32" -> do
-- windows
run_ "strip" ["-s", "haste-compiler\\bin\\haste-pkg.exe"] ""
run_ "strip" ["-s", "haste-compiler\\bin\\hastec.exe"] ""
run_ "strip" ["-s", "haste-compiler\\bin\\haste-cat.exe"] ""
"linux" -> do
-- linux
run_ "strip" ["-s", "haste-compiler/bin/haste-pkg"] ""
run_ "strip" ["-s", "haste-compiler/bin/hastec"] ""
run_ "strip" ["-s", "haste-compiler/bin/haste-cat"] ""
_ -> do
-- darwin
run_ "strip" ["haste-compiler/bin/haste-pkg"] ""
run_ "strip" ["haste-compiler/bin/hastec"] ""
run_ "strip" ["haste-compiler/bin/haste-cat"] ""
-- Get versions
getVersions
getVersions = do
ver <- fmap init $ run "haste-compiler/bin/hastec" ["--version"] ""
ghcver <- fmap init $ run "ghc" ["--numeric-version"] ""
return (ver, ghcver)
bootPortable = do
-- Build libs
run_ "haste-compiler/bin/haste-boot" ["--force", "--initial"] ""
-- Remove unnecessary binaries
case os of
"mingw32" -> do
-- windows
rm "haste-compiler\\bin\\haste-boot.exe"
_ -> do
-- linux/darwin
rm "haste-compiler/bin/haste-boot"
forEachFile "haste-compiler" $ \f -> do
when ((f `hasExt` ".o") || (f `hasExt` ".a")) $ rm f
where
f `hasExt` e = takeExtension f == e
buildManPages = do
mkdir True "man"
buildManPage "hastec"
buildManPage "haste-cat"
where
buildManPage inf = run_ "pandoc" ["-s", "-o", outf, inf'] ""
where
ext | os == "mingw32" = "html"
| otherwise = "1"
outf = "man" </> inf <.> ext
inf' = "doc" </> inf <.> "1.md"
buildBinaryTarball ver ghcver = do
-- Copy manpages
mkdir True "haste-compiler/man"
cp "man/hastec.1" "haste-compiler/man/hastec.1"
cp "man/haste-cat.1" "haste-compiler/man/haste-cat.1"
-- Get versions and create binary tarball
cp "install.sh" "haste-compiler/install.sh"
cp "uninstall.sh" "haste-compiler/uninstall.sh"
cp "doc/readme-portable-linux.txt" "haste-compiler/readme.txt"
run_ "tar" ["-cjf", tarball, "haste-compiler"] ""
mapM_ rm ["haste-compiler/install.sh",
"haste-compiler/uninstall.sh",
"haste-compiler/readme.txt"]
rmdir "haste-compiler/man"
return tarball
where
tarball =
concat ["haste-compiler-",ver,"_ghc-",ghcver,"-",os,".tar.bz2"]
buildBinary7z ver ghcver = do
-- Copy HTML "manpages"
mkdir True "haste-compiler/man"
cp "man/hastec.html" "haste-compiler/man/hastec.html"
cp "man/haste-cat.html" "haste-compiler/man/haste-cat.html"
-- Get versions and create binary tarball
run_ "7z" ["a", "-i!haste-compiler", name] ""
return $ name
where
name =
concat ["haste-compiler-",ver,"_ghc-",ghcver,"-",os,".7z"]
arch :: String
arch = "amd64" -- only amd64 supported
-- Debian packaging based on https://wiki.debian.org/IntroDebianPackaging.
-- Requires build-essential, devscripts and debhelper.
buildDebianPackage ver ghcver = do
run_ "debuild" ["-e", "LD_LIBRARY_PATH=haste-compiler/haste-cabal",
"-us", "-uc", "-b"] ""
return $ "haste-compiler_" ++ ver ++ "_" ++ arch ++ ".deb"
| nyson/haste-compiler | build-release.hs | bsd-3-clause | 5,957 | 1 | 19 | 1,607 | 1,516 | 754 | 762 | 127 | 3 |
{-|
Module : Data.Number.MPFR.Mutable.Misc
Description : Miscellaneous functions
Copyright : (c) Aleš Bizjak
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
For documentation on particular functions see
<http://www.mpfr.org/mpfr-current/mpfr.html#Miscellaneous-Functions>.
-}
{-# INCLUDE <mpfr.h> #-}
{-# INCLUDE <chsmpfr.h> #-}
module Data.Number.MPFR.Mutable.Misc where
import Data.Number.MPFR.Mutable.Internal
import Control.Monad.ST(ST)
{-TODO
nextToward :: MMPFR s -> MMPFR s -> ST s ()
nextToward = withMutableMPFRSNR mpfr_nexttoward
-}
nextAbove :: MMPFR s -> ST s ()
nextAbove = withMutableMPFRSNRNR mpfr_nextabove
nextbelow :: MMPFR s -> ST s ()
nextbelow = withMutableMPFRSNRNR mpfr_nextbelow
max :: MMPFR s -> MMPFR s -> MMPFR s -> RoundMode -> ST s Int
max = withMutableMPFRBA mpfr_max
min :: MMPFR s -> MMPFR s -> MMPFR s -> RoundMode -> ST s Int
min = withMutableMPFRBA mpfr_min
getExp :: MMPFR s -> ST s Exp
getExp m = unsafeFreeze m >>= return . \(MP _ _ e _) -> e
{- TODO
setExp :: MPFR s -> Exp -> ST s Int
setExp m e = do m' <- unsafeReadMMPFR m
uns
-}
getPrec :: MMPFR s -> ST s Precision
getPrec m = unsafeFreeze m >>= return . \(MP p _ _ _) -> fromIntegral p
| ekmett/hmpfr | src/Data/Number/MPFR/Mutable/Misc.hs | bsd-3-clause | 1,324 | 0 | 9 | 286 | 282 | 144 | 138 | 15 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ViewPatterns #-}
-- | Build-specific types.
module Stack.Types.Build
(StackBuildException(..)
,FlagSource(..)
,UnusedFlags(..)
,InstallLocation(..)
,ModTime
,modTime
,Installed(..)
,PackageInstallInfo(..)
,Task(..)
,taskLocation
,LocalPackage(..)
,BaseConfigOpts(..)
,Plan(..)
,TestOpts(..)
,BenchmarkOpts(..)
,BuildOpts(..)
,BuildSubset(..)
,defaultBuildOpts
,TaskType(..)
,TaskConfigOpts(..)
,ConfigCache(..)
,ConstructPlanException(..)
,configureOpts
,BadDependency(..)
,wantedLocalPackages
,FileCacheInfo (..))
where
import Control.DeepSeq
import Control.Exception
import Data.Binary.VersionTagged
import qualified Data.ByteString as S
import Data.Char (isSpace)
import Data.Data
import Data.Hashable
import Data.List (dropWhileEnd, nub, intercalate)
import qualified Data.Map as Map
import Data.Map.Strict (Map)
import Data.Maybe
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Data.Time.Calendar
import Data.Time.Clock
import Distribution.System (Arch)
import Distribution.Text (display)
import GHC.Generics
import Path (Path, Abs, File, Dir, mkRelDir, toFilePath, parseRelDir, (</>))
import Prelude
import Stack.Types.FlagName
import Stack.Types.GhcPkgId
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.Version
import System.Exit (ExitCode)
import System.FilePath (dropTrailingPathSeparator, pathSeparator)
----------------------------------------------
-- Exceptions
data StackBuildException
= Couldn'tFindPkgId PackageName
| CompilerVersionMismatch
(Maybe (CompilerVersion, Arch))
(CompilerVersion, Arch)
VersionCheck
(Maybe (Path Abs File))
Text -- recommended resolution
-- ^ Path to the stack.yaml file
| Couldn'tParseTargets [Text]
| UnknownTargets
(Set PackageName) -- no known version
(Map PackageName Version) -- not in snapshot, here's the most recent version in the index
(Path Abs File) -- stack.yaml
| TestSuiteFailure PackageIdentifier (Map Text (Maybe ExitCode)) (Maybe (Path Abs File)) S.ByteString
| ConstructPlanExceptions
[ConstructPlanException]
(Path Abs File) -- stack.yaml
| CabalExitedUnsuccessfully
ExitCode
PackageIdentifier
(Path Abs File) -- cabal Executable
[String] -- cabal arguments
(Maybe (Path Abs File)) -- logfiles location
S.ByteString -- log contents
| ExecutionFailure [SomeException]
| LocalPackageDoesn'tMatchTarget
PackageName
Version -- local version
Version -- version specified on command line
| NoSetupHsFound (Path Abs Dir)
| InvalidFlagSpecification (Set UnusedFlags)
| TargetParseException [Text]
deriving Typeable
data FlagSource = FSCommandLine | FSStackYaml
deriving (Show, Eq, Ord)
data UnusedFlags = UFNoPackage FlagSource PackageName
| UFFlagsNotDefined FlagSource Package (Set FlagName)
| UFSnapshot PackageName
deriving (Show, Eq, Ord)
instance Show StackBuildException where
show (Couldn'tFindPkgId name) =
("After installing " <> packageNameString name <>
", the package id couldn't be found " <> "(via ghc-pkg describe " <>
packageNameString name <> "). This shouldn't happen, " <>
"please report as a bug")
show (CompilerVersionMismatch mactual (expected, earch) check mstack resolution) = concat
[ case mactual of
Nothing -> "No compiler found, expected "
Just (actual, arch) -> concat
[ "Compiler version mismatched, found "
, T.unpack (compilerVersionName actual)
, " ("
, display arch
, ")"
, ", but expected "
]
, case check of
MatchMinor -> "minor version match with "
MatchExact -> "exact version "
NewerMinor -> "minor version match or newer with "
, T.unpack (compilerVersionName expected)
, " ("
, display earch
, ") (based on "
, case mstack of
Nothing -> "command line arguments"
Just stack -> "resolver setting in " ++ toFilePath stack
, "). "
, T.unpack resolution
]
show (Couldn'tParseTargets targets) = unlines
$ "The following targets could not be parsed as package names or directories:"
: map T.unpack targets
show (UnknownTargets noKnown notInSnapshot stackYaml) =
unlines $ noKnown' ++ notInSnapshot'
where
noKnown'
| Set.null noKnown = []
| otherwise = return $
"The following target packages were not found: " ++
intercalate ", " (map packageNameString $ Set.toList noKnown)
notInSnapshot'
| Map.null notInSnapshot = []
| otherwise =
"The following packages are not in your snapshot, but exist"
: "in your package index. Recommended action: add them to your"
: ("extra-deps in " ++ toFilePath stackYaml)
: "(Note: these are the most recent versions,"
: "but there's no guarantee that they'll build together)."
: ""
: map
(\(name, version) -> "- " ++ packageIdentifierString
(PackageIdentifier name version))
(Map.toList notInSnapshot)
show (TestSuiteFailure ident codes mlogFile bs) = unlines $ concat
[ ["Test suite failure for package " ++ packageIdentifierString ident]
, flip map (Map.toList codes) $ \(name, mcode) -> concat
[ " "
, T.unpack name
, ": "
, case mcode of
Nothing -> " executable not found"
Just ec -> " exited with: " ++ show ec
]
, return $ case mlogFile of
Nothing -> "Logs printed to console"
-- TODO Should we load up the full error output and print it here?
Just logFile -> "Full log available at " ++ toFilePath logFile
, if S.null bs
then []
else ["", "", doubleIndent $ T.unpack $ decodeUtf8With lenientDecode bs]
]
where
indent = dropWhileEnd isSpace . unlines . fmap (\line -> " " ++ line) . lines
doubleIndent = indent . indent
show (ConstructPlanExceptions exceptions stackYaml) =
"While constructing the BuildPlan the following exceptions were encountered:" ++
appendExceptions exceptions' ++
if Map.null extras then "" else (unlines
$ ("\n\nRecommended action: try adding the following to your extra-deps in "
++ toFilePath stackYaml)
: map (\(name, version) -> concat
[ "- "
, packageNameString name
, "-"
, versionString version
]) (Map.toList extras)
++ ["", "You may also want to try the 'stack solver' command"]
)
where
exceptions' = removeDuplicates exceptions
appendExceptions = foldr (\e -> (++) ("\n\n--" ++ show e)) ""
removeDuplicates = nub
extras = Map.unions $ map getExtras exceptions'
getExtras (DependencyCycleDetected _) = Map.empty
getExtras (UnknownPackage _) = Map.empty
getExtras (DependencyPlanFailures _ m) =
Map.unions $ map go $ Map.toList m
where
go (name, (_range, Just version, NotInBuildPlan)) =
Map.singleton name version
go _ = Map.empty
-- Supressing duplicate output
show (CabalExitedUnsuccessfully exitCode taskProvides' execName fullArgs logFiles bs) =
let fullCmd = (dropQuotes (toFilePath execName) ++ " " ++ (unwords fullArgs))
logLocations = maybe "" (\fp -> "\n Logs have been written to: " ++ toFilePath fp) logFiles
in "\n-- While building package " ++ dropQuotes (show taskProvides') ++ " using:\n" ++
" " ++ fullCmd ++ "\n" ++
" Process exited with code: " ++ show exitCode ++
logLocations ++
(if S.null bs
then ""
else "\n\n" ++ doubleIndent (T.unpack $ decodeUtf8With lenientDecode bs))
where
-- appendLines = foldr (\pName-> (++) ("\n" ++ show pName)) ""
indent = dropWhileEnd isSpace . unlines . fmap (\line -> " " ++ line) . lines
dropQuotes = filter ('\"' /=)
doubleIndent = indent . indent
show (ExecutionFailure es) = intercalate "\n\n" $ map show es
show (LocalPackageDoesn'tMatchTarget name localV requestedV) = concat
[ "Version for local package "
, packageNameString name
, " is "
, versionString localV
, ", but you asked for "
, versionString requestedV
, " on the command line"
]
show (NoSetupHsFound dir) =
"No Setup.hs or Setup.lhs file found in " ++ toFilePath dir
show (InvalidFlagSpecification unused) = unlines
$ "Invalid flag specification:"
: map go (Set.toList unused)
where
showFlagSrc :: FlagSource -> String
showFlagSrc FSCommandLine = " (specified on command line)"
showFlagSrc FSStackYaml = " (specified in stack.yaml)"
go :: UnusedFlags -> String
go (UFNoPackage src name) = concat
[ "- Package '"
, packageNameString name
, "' not found"
, showFlagSrc src
]
go (UFFlagsNotDefined src pkg flags) = concat
[ "- Package '"
, name
, "' does not define the following flags"
, showFlagSrc src
, ":\n"
, intercalate "\n"
(map (\flag -> " " ++ flagNameString flag)
(Set.toList flags))
, "\n- Flags defined by package '" ++ name ++ "':\n"
, intercalate "\n"
(map (\flag -> " " ++ name ++ ":" ++ flagNameString flag)
(Set.toList pkgFlags))
]
where name = packageNameString (packageName pkg)
pkgFlags = packageDefinedFlags pkg
go (UFSnapshot name) = concat
[ "- Attempted to set flag on snapshot package "
, packageNameString name
, ", please add to extra-deps"
]
show (TargetParseException [err]) = "Error parsing targets: " ++ T.unpack err
show (TargetParseException errs) = unlines
$ "The following errors occurred while parsing the build targets:"
: map (("- " ++) . T.unpack) errs
instance Exception StackBuildException
data ConstructPlanException
= DependencyCycleDetected [PackageName]
| DependencyPlanFailures PackageIdentifier (Map PackageName (VersionRange, LatestVersion, BadDependency))
| UnknownPackage PackageName -- TODO perhaps this constructor will be removed, and BadDependency will handle it all
-- ^ Recommend adding to extra-deps, give a helpful version number?
deriving (Typeable, Eq)
-- | For display purposes only, Nothing if package not found
type LatestVersion = Maybe Version
-- | Reason why a dependency was not used
data BadDependency
= NotInBuildPlan
| Couldn'tResolveItsDependencies
| DependencyMismatch Version
deriving (Typeable, Eq)
instance Show ConstructPlanException where
show e =
let details = case e of
(DependencyCycleDetected pNames) ->
"While checking call stack,\n" ++
" dependency cycle detected in packages:" ++ indent (appendLines pNames)
(DependencyPlanFailures pIdent (Map.toList -> pDeps)) ->
"Failure when adding dependencies:" ++ doubleIndent (appendDeps pDeps) ++ "\n" ++
" needed for package: " ++ packageIdentifierString pIdent
(UnknownPackage pName) ->
"While attempting to add dependency,\n" ++
" Could not find package " ++ show pName ++ " in known packages"
in indent details
where
appendLines = foldr (\pName-> (++) ("\n" ++ show pName)) ""
indent = dropWhileEnd isSpace . unlines . fmap (\line -> " " ++ line) . lines
doubleIndent = indent . indent
appendDeps = foldr (\dep-> (++) ("\n" ++ showDep dep)) ""
showDep (name, (range, mlatest, badDep)) = concat
[ show name
, ": needed ("
, display range
, ")"
, case mlatest of
Nothing -> ""
Just latest -> ", latest is " ++ versionString latest
, ", but "
, case badDep of
NotInBuildPlan -> "not present in build plan"
Couldn'tResolveItsDependencies -> "couldn't resolve its dependencies"
DependencyMismatch version -> versionString version ++ " found"
]
{- TODO Perhaps change the showDep function to look more like this:
dropQuotes = filter ((/=) '\"')
(VersionOutsideRange pName pIdentifier versionRange) ->
"Exception: Stack.Build.VersionOutsideRange\n" ++
" While adding dependency for package " ++ show pName ++ ",\n" ++
" " ++ dropQuotes (show pIdentifier) ++ " was found to be outside its allowed version range.\n" ++
" Allowed version range is " ++ display versionRange ++ ",\n" ++
" should you correct the version range for " ++ dropQuotes (show pIdentifier) ++ ", found in [extra-deps] in the project's stack.yaml?"
-}
----------------------------------------------
-- | Which subset of packages to build
data BuildSubset
= BSAll
| BSOnlySnapshot
-- ^ Only install packages in the snapshot database, skipping
-- packages intended for the local database.
| BSOnlyDependencies
deriving Show
-- | Configuration for building.
data BuildOpts =
BuildOpts {boptsTargets :: ![Text]
,boptsLibProfile :: !Bool
,boptsExeProfile :: !Bool
,boptsEnableOptimizations :: !(Maybe Bool)
,boptsHaddock :: !Bool
-- ^ Build haddocks?
,boptsHaddockDeps :: !(Maybe Bool)
-- ^ Build haddocks for dependencies?
,boptsDryrun :: !Bool
,boptsGhcOptions :: ![Text]
,boptsFlags :: !(Map (Maybe PackageName) (Map FlagName Bool))
,boptsInstallExes :: !Bool
-- ^ Install executables to user path after building?
,boptsPreFetch :: !Bool
-- ^ Fetch all packages immediately
,boptsBuildSubset :: !BuildSubset
,boptsFileWatch :: !Bool
-- ^ Watch files for changes and automatically rebuild
,boptsKeepGoing :: !(Maybe Bool)
-- ^ Keep building/running after failure
,boptsForceDirty :: !Bool
-- ^ Force treating all local packages as having dirty files
,boptsTests :: !Bool
-- ^ Turn on tests for local targets
,boptsTestOpts :: !TestOpts
-- ^ Additional test arguments
,boptsBenchmarks :: !Bool
-- ^ Turn on benchmarks for local targets
,boptsBenchmarkOpts :: !BenchmarkOpts
-- ^ Additional test arguments
,boptsExec :: ![(String, [String])]
-- ^ Commands (with arguments) to run after a successful build
,boptsOnlyConfigure :: !Bool
-- ^ Only perform the configure step when building
}
deriving (Show)
defaultBuildOpts :: BuildOpts
defaultBuildOpts = BuildOpts
{ boptsTargets = []
, boptsLibProfile = False
, boptsExeProfile = False
, boptsEnableOptimizations = Nothing
, boptsHaddock = False
, boptsHaddockDeps = Nothing
, boptsDryrun = False
, boptsGhcOptions = []
, boptsFlags = Map.empty
, boptsInstallExes = False
, boptsPreFetch = False
, boptsBuildSubset = BSAll
, boptsFileWatch = False
, boptsKeepGoing = Nothing
, boptsForceDirty = False
, boptsTests = False
, boptsTestOpts = defaultTestOpts
, boptsBenchmarks = False
, boptsBenchmarkOpts = defaultBenchmarkOpts
, boptsExec = []
, boptsOnlyConfigure = False
}
-- | Options for the 'FinalAction' 'DoTests'
data TestOpts =
TestOpts {toRerunTests :: !Bool -- ^ Whether successful tests will be run gain
,toAdditionalArgs :: ![String] -- ^ Arguments passed to the test program
,toCoverage :: !Bool -- ^ Generate a code coverage report
,toDisableRun :: !Bool -- ^ Disable running of tests
} deriving (Eq,Show)
defaultTestOpts :: TestOpts
defaultTestOpts = TestOpts
{ toRerunTests = True
, toAdditionalArgs = []
, toCoverage = False
, toDisableRun = False
}
-- | Options for the 'FinalAction' 'DoBenchmarks'
data BenchmarkOpts =
BenchmarkOpts {beoAdditionalArgs :: !(Maybe String) -- ^ Arguments passed to the benchmark program
,beoDisableRun :: !Bool -- ^ Disable running of benchmarks
} deriving (Eq,Show)
defaultBenchmarkOpts :: BenchmarkOpts
defaultBenchmarkOpts = BenchmarkOpts
{ beoAdditionalArgs = Nothing
, beoDisableRun = False
}
-- | Package dependency oracle.
newtype PkgDepsOracle =
PkgDeps PackageName
deriving (Show,Typeable,Eq,Hashable,Binary,NFData)
-- | Stored on disk to know whether the flags have changed or any
-- files have changed.
data ConfigCache = ConfigCache
{ configCacheOpts :: ![S.ByteString]
-- ^ All options used for this package.
, configCacheDeps :: !(Set GhcPkgId)
-- ^ The GhcPkgIds of all of the dependencies. Since Cabal doesn't take
-- the complete GhcPkgId (only a PackageIdentifier) in the configure
-- options, just using the previous value is insufficient to know if
-- dependencies have changed.
, configCacheComponents :: !(Set S.ByteString)
-- ^ The components to be built. It's a bit of a hack to include this in
-- here, as it's not a configure option (just a build option), but this
-- is a convenient way to force compilation when the components change.
, configCacheHaddock :: !Bool
-- ^ Are haddocks to be built?
}
deriving (Generic,Eq,Show)
instance Binary ConfigCache
instance NFData ConfigCache where
rnf = genericRnf
-- | A task to perform when building
data Task = Task
{ taskProvides :: !PackageIdentifier -- ^ the package/version to be built
, taskType :: !TaskType -- ^ the task type, telling us how to build this
, taskConfigOpts :: !TaskConfigOpts
, taskPresent :: !(Set GhcPkgId) -- ^ GhcPkgIds of already-installed dependencies
}
deriving Show
-- | Given the IDs of any missing packages, produce the configure options
data TaskConfigOpts = TaskConfigOpts
{ tcoMissing :: !(Set PackageIdentifier)
-- ^ Dependencies for which we don't yet have an GhcPkgId
, tcoOpts :: !(Set GhcPkgId -> [Text])
-- ^ Produce the list of options given the missing @GhcPkgId@s
}
instance Show TaskConfigOpts where
show (TaskConfigOpts missing f) = concat
[ "Missing: "
, show missing
, ". Without those: "
, show $ f Set.empty
]
-- | The type of a task, either building local code or something from the
-- package index (upstream)
data TaskType = TTLocal LocalPackage
| TTUpstream Package InstallLocation
deriving Show
taskLocation :: Task -> InstallLocation
taskLocation task =
case taskType task of
TTLocal _ -> Local
TTUpstream _ loc -> loc
-- | A complete plan of what needs to be built and how to do it
data Plan = Plan
{ planTasks :: !(Map PackageName Task)
, planFinals :: !(Map PackageName (Task, LocalPackageTB))
-- ^ Final actions to be taken (test, benchmark, etc)
, planUnregisterLocal :: !(Map GhcPkgId Text)
-- ^ Text is reason we're unregistering, for display only
, planInstallExes :: !(Map Text InstallLocation)
-- ^ Executables that should be installed after successful building
}
deriving Show
-- | Basic information used to calculate what the configure options are
data BaseConfigOpts = BaseConfigOpts
{ bcoSnapDB :: !(Path Abs Dir)
, bcoLocalDB :: !(Path Abs Dir)
, bcoSnapInstallRoot :: !(Path Abs Dir)
, bcoLocalInstallRoot :: !(Path Abs Dir)
, bcoBuildOpts :: !BuildOpts
}
-- | Render a @BaseConfigOpts@ to an actual list of options
configureOpts :: EnvConfig
-> BaseConfigOpts
-> Set GhcPkgId -- ^ dependencies
-> Bool -- ^ wanted?
-> InstallLocation
-> Package
-> [Text]
configureOpts econfig bco deps wanted loc package = map T.pack $ concat
[ ["--user", "--package-db=clear", "--package-db=global"]
, map (("--package-db=" ++) . toFilePath) $ case loc of
Snap -> [bcoSnapDB bco]
Local -> [bcoSnapDB bco, bcoLocalDB bco]
, depOptions
, [ "--libdir=" ++ toFilePathNoTrailingSlash (installRoot </> $(mkRelDir "lib"))
, "--bindir=" ++ toFilePathNoTrailingSlash (installRoot </> bindirSuffix)
, "--datadir=" ++ toFilePathNoTrailingSlash (installRoot </> $(mkRelDir "share"))
, "--docdir=" ++ toFilePathNoTrailingSlash docDir
, "--htmldir=" ++ toFilePathNoTrailingSlash docDir
, "--haddockdir=" ++ toFilePathNoTrailingSlash docDir]
, ["--enable-library-profiling" | boptsLibProfile bopts || boptsExeProfile bopts]
, ["--enable-executable-profiling" | boptsExeProfile bopts]
, map (\(name,enabled) ->
"-f" <>
(if enabled
then ""
else "-") <>
flagNameString name)
(Map.toList (packageFlags package))
-- FIXME Chris: where does this come from now? , ["--ghc-options=-O2" | gconfigOptimize gconfig]
, concatMap (\x -> ["--ghc-options", T.unpack x]) allGhcOptions
, map (("--extra-include-dirs=" ++) . T.unpack) (Set.toList (configExtraIncludeDirs config))
, map (("--extra-lib-dirs=" ++) . T.unpack) (Set.toList (configExtraLibDirs config))
, if whichCompiler (envConfigCompilerVersion econfig) == Ghcjs
then ["--ghcjs"]
else []
]
where
config = getConfig econfig
bopts = bcoBuildOpts bco
toFilePathNoTrailingSlash = dropTrailingPathSeparator . toFilePath
docDir =
case pkgVerDir of
Nothing -> installRoot </> docDirSuffix
Just dir -> installRoot </> docDirSuffix </> dir
installRoot =
case loc of
Snap -> bcoSnapInstallRoot bco
Local -> bcoLocalInstallRoot bco
pkgVerDir =
parseRelDir (packageIdentifierString (PackageIdentifier (packageName package)
(packageVersion package)) ++
[pathSeparator])
depOptions = map toDepOption $ Set.toList deps
where
toDepOption =
if envConfigCabalVersion econfig >= $(mkVersion "1.22")
then toDepOption1_22
else toDepOption1_18
toDepOption1_22 gid = concat
[ "--dependency="
, packageNameString $ packageIdentifierName $ ghcPkgIdPackageIdentifier gid
, "="
, ghcPkgIdString gid
]
toDepOption1_18 gid = concat
[ "--constraint="
, packageNameString name
, "=="
, versionString version
]
where
PackageIdentifier name version = ghcPkgIdPackageIdentifier gid
ghcOptionsMap = configGhcOptions $ getConfig econfig
allGhcOptions = concat
[ fromMaybe [] $ Map.lookup Nothing ghcOptionsMap
, fromMaybe [] $ Map.lookup (Just $ packageName package) ghcOptionsMap
, if wanted
then boptsGhcOptions bopts
else []
]
-- | Get set of wanted package names from locals.
wantedLocalPackages :: [LocalPackage] -> Set PackageName
wantedLocalPackages = Set.fromList . map (packageName . lpPackage) . filter lpWanted
-- | One-way conversion to serialized time.
modTime :: UTCTime -> ModTime
modTime x =
ModTime
( toModifiedJulianDay
(utctDay x)
, toRational
(utctDayTime x))
data Installed = Library GhcPkgId | Executable PackageIdentifier
deriving (Show, Eq, Ord)
| agrafix/stack | src/Stack/Types/Build.hs | bsd-3-clause | 25,817 | 0 | 19 | 8,071 | 4,927 | 2,714 | 2,213 | 596 | 8 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.Version14
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.Version14 (
-- * Types
GLbitfield,
GLboolean,
GLbyte,
GLclampd,
GLclampf,
GLdouble,
GLenum,
GLfloat,
GLint,
GLshort,
GLsizei,
GLubyte,
GLuint,
GLushort,
GLvoid,
-- * Enums
pattern GL_2D,
pattern GL_2_BYTES,
pattern GL_3D,
pattern GL_3D_COLOR,
pattern GL_3D_COLOR_TEXTURE,
pattern GL_3_BYTES,
pattern GL_4D_COLOR_TEXTURE,
pattern GL_4_BYTES,
pattern GL_ACCUM,
pattern GL_ACCUM_ALPHA_BITS,
pattern GL_ACCUM_BLUE_BITS,
pattern GL_ACCUM_BUFFER_BIT,
pattern GL_ACCUM_CLEAR_VALUE,
pattern GL_ACCUM_GREEN_BITS,
pattern GL_ACCUM_RED_BITS,
pattern GL_ACTIVE_TEXTURE,
pattern GL_ADD,
pattern GL_ADD_SIGNED,
pattern GL_ALIASED_LINE_WIDTH_RANGE,
pattern GL_ALIASED_POINT_SIZE_RANGE,
pattern GL_ALL_ATTRIB_BITS,
pattern GL_ALPHA,
pattern GL_ALPHA12,
pattern GL_ALPHA16,
pattern GL_ALPHA4,
pattern GL_ALPHA8,
pattern GL_ALPHA_BIAS,
pattern GL_ALPHA_BITS,
pattern GL_ALPHA_SCALE,
pattern GL_ALPHA_TEST,
pattern GL_ALPHA_TEST_FUNC,
pattern GL_ALPHA_TEST_REF,
pattern GL_ALWAYS,
pattern GL_AMBIENT,
pattern GL_AMBIENT_AND_DIFFUSE,
pattern GL_AND,
pattern GL_AND_INVERTED,
pattern GL_AND_REVERSE,
pattern GL_ATTRIB_STACK_DEPTH,
pattern GL_AUTO_NORMAL,
pattern GL_AUX0,
pattern GL_AUX1,
pattern GL_AUX2,
pattern GL_AUX3,
pattern GL_AUX_BUFFERS,
pattern GL_BACK,
pattern GL_BACK_LEFT,
pattern GL_BACK_RIGHT,
pattern GL_BGR,
pattern GL_BGRA,
pattern GL_BITMAP,
pattern GL_BITMAP_TOKEN,
pattern GL_BLEND,
pattern GL_BLEND_COLOR,
pattern GL_BLEND_DST,
pattern GL_BLEND_DST_ALPHA,
pattern GL_BLEND_DST_RGB,
pattern GL_BLEND_EQUATION,
pattern GL_BLEND_SRC,
pattern GL_BLEND_SRC_ALPHA,
pattern GL_BLEND_SRC_RGB,
pattern GL_BLUE,
pattern GL_BLUE_BIAS,
pattern GL_BLUE_BITS,
pattern GL_BLUE_SCALE,
pattern GL_BYTE,
pattern GL_C3F_V3F,
pattern GL_C4F_N3F_V3F,
pattern GL_C4UB_V2F,
pattern GL_C4UB_V3F,
pattern GL_CCW,
pattern GL_CLAMP,
pattern GL_CLAMP_TO_BORDER,
pattern GL_CLAMP_TO_EDGE,
pattern GL_CLEAR,
pattern GL_CLIENT_ACTIVE_TEXTURE,
pattern GL_CLIENT_ALL_ATTRIB_BITS,
pattern GL_CLIENT_ATTRIB_STACK_DEPTH,
pattern GL_CLIENT_PIXEL_STORE_BIT,
pattern GL_CLIENT_VERTEX_ARRAY_BIT,
pattern GL_CLIP_PLANE0,
pattern GL_CLIP_PLANE1,
pattern GL_CLIP_PLANE2,
pattern GL_CLIP_PLANE3,
pattern GL_CLIP_PLANE4,
pattern GL_CLIP_PLANE5,
pattern GL_COEFF,
pattern GL_COLOR,
pattern GL_COLOR_ARRAY,
pattern GL_COLOR_ARRAY_POINTER,
pattern GL_COLOR_ARRAY_SIZE,
pattern GL_COLOR_ARRAY_STRIDE,
pattern GL_COLOR_ARRAY_TYPE,
pattern GL_COLOR_BUFFER_BIT,
pattern GL_COLOR_CLEAR_VALUE,
pattern GL_COLOR_INDEX,
pattern GL_COLOR_INDEXES,
pattern GL_COLOR_LOGIC_OP,
pattern GL_COLOR_MATERIAL,
pattern GL_COLOR_MATERIAL_FACE,
pattern GL_COLOR_MATERIAL_PARAMETER,
pattern GL_COLOR_SUM,
pattern GL_COLOR_WRITEMASK,
pattern GL_COMBINE,
pattern GL_COMBINE_ALPHA,
pattern GL_COMBINE_RGB,
pattern GL_COMPARE_R_TO_TEXTURE,
pattern GL_COMPILE,
pattern GL_COMPILE_AND_EXECUTE,
pattern GL_COMPRESSED_ALPHA,
pattern GL_COMPRESSED_INTENSITY,
pattern GL_COMPRESSED_LUMINANCE,
pattern GL_COMPRESSED_LUMINANCE_ALPHA,
pattern GL_COMPRESSED_RGB,
pattern GL_COMPRESSED_RGBA,
pattern GL_COMPRESSED_TEXTURE_FORMATS,
pattern GL_CONSTANT,
pattern GL_CONSTANT_ALPHA,
pattern GL_CONSTANT_ATTENUATION,
pattern GL_CONSTANT_COLOR,
pattern GL_COPY,
pattern GL_COPY_INVERTED,
pattern GL_COPY_PIXEL_TOKEN,
pattern GL_CULL_FACE,
pattern GL_CULL_FACE_MODE,
pattern GL_CURRENT_BIT,
pattern GL_CURRENT_COLOR,
pattern GL_CURRENT_FOG_COORDINATE,
pattern GL_CURRENT_INDEX,
pattern GL_CURRENT_NORMAL,
pattern GL_CURRENT_RASTER_COLOR,
pattern GL_CURRENT_RASTER_DISTANCE,
pattern GL_CURRENT_RASTER_INDEX,
pattern GL_CURRENT_RASTER_POSITION,
pattern GL_CURRENT_RASTER_POSITION_VALID,
pattern GL_CURRENT_RASTER_TEXTURE_COORDS,
pattern GL_CURRENT_SECONDARY_COLOR,
pattern GL_CURRENT_TEXTURE_COORDS,
pattern GL_CW,
pattern GL_DECAL,
pattern GL_DECR,
pattern GL_DECR_WRAP,
pattern GL_DEPTH,
pattern GL_DEPTH_BIAS,
pattern GL_DEPTH_BITS,
pattern GL_DEPTH_BUFFER_BIT,
pattern GL_DEPTH_CLEAR_VALUE,
pattern GL_DEPTH_COMPONENT,
pattern GL_DEPTH_COMPONENT16,
pattern GL_DEPTH_COMPONENT24,
pattern GL_DEPTH_COMPONENT32,
pattern GL_DEPTH_FUNC,
pattern GL_DEPTH_RANGE,
pattern GL_DEPTH_SCALE,
pattern GL_DEPTH_TEST,
pattern GL_DEPTH_TEXTURE_MODE,
pattern GL_DEPTH_WRITEMASK,
pattern GL_DIFFUSE,
pattern GL_DITHER,
pattern GL_DOMAIN,
pattern GL_DONT_CARE,
pattern GL_DOT3_RGB,
pattern GL_DOT3_RGBA,
pattern GL_DOUBLE,
pattern GL_DOUBLEBUFFER,
pattern GL_DRAW_BUFFER,
pattern GL_DRAW_PIXEL_TOKEN,
pattern GL_DST_ALPHA,
pattern GL_DST_COLOR,
pattern GL_EDGE_FLAG,
pattern GL_EDGE_FLAG_ARRAY,
pattern GL_EDGE_FLAG_ARRAY_POINTER,
pattern GL_EDGE_FLAG_ARRAY_STRIDE,
pattern GL_EMISSION,
pattern GL_ENABLE_BIT,
pattern GL_EQUAL,
pattern GL_EQUIV,
pattern GL_EVAL_BIT,
pattern GL_EXP,
pattern GL_EXP2,
pattern GL_EXTENSIONS,
pattern GL_EYE_LINEAR,
pattern GL_EYE_PLANE,
pattern GL_FALSE,
pattern GL_FASTEST,
pattern GL_FEEDBACK,
pattern GL_FEEDBACK_BUFFER_POINTER,
pattern GL_FEEDBACK_BUFFER_SIZE,
pattern GL_FEEDBACK_BUFFER_TYPE,
pattern GL_FILL,
pattern GL_FLAT,
pattern GL_FLOAT,
pattern GL_FOG,
pattern GL_FOG_BIT,
pattern GL_FOG_COLOR,
pattern GL_FOG_COORDINATE,
pattern GL_FOG_COORDINATE_ARRAY,
pattern GL_FOG_COORDINATE_ARRAY_POINTER,
pattern GL_FOG_COORDINATE_ARRAY_STRIDE,
pattern GL_FOG_COORDINATE_ARRAY_TYPE,
pattern GL_FOG_COORDINATE_SOURCE,
pattern GL_FOG_DENSITY,
pattern GL_FOG_END,
pattern GL_FOG_HINT,
pattern GL_FOG_INDEX,
pattern GL_FOG_MODE,
pattern GL_FOG_START,
pattern GL_FRAGMENT_DEPTH,
pattern GL_FRONT,
pattern GL_FRONT_AND_BACK,
pattern GL_FRONT_FACE,
pattern GL_FRONT_LEFT,
pattern GL_FRONT_RIGHT,
pattern GL_FUNC_ADD,
pattern GL_FUNC_REVERSE_SUBTRACT,
pattern GL_FUNC_SUBTRACT,
pattern GL_GENERATE_MIPMAP,
pattern GL_GENERATE_MIPMAP_HINT,
pattern GL_GEQUAL,
pattern GL_GREATER,
pattern GL_GREEN,
pattern GL_GREEN_BIAS,
pattern GL_GREEN_BITS,
pattern GL_GREEN_SCALE,
pattern GL_HINT_BIT,
pattern GL_INCR,
pattern GL_INCR_WRAP,
pattern GL_INDEX_ARRAY,
pattern GL_INDEX_ARRAY_POINTER,
pattern GL_INDEX_ARRAY_STRIDE,
pattern GL_INDEX_ARRAY_TYPE,
pattern GL_INDEX_BITS,
pattern GL_INDEX_CLEAR_VALUE,
pattern GL_INDEX_LOGIC_OP,
pattern GL_INDEX_MODE,
pattern GL_INDEX_OFFSET,
pattern GL_INDEX_SHIFT,
pattern GL_INDEX_WRITEMASK,
pattern GL_INT,
pattern GL_INTENSITY,
pattern GL_INTENSITY12,
pattern GL_INTENSITY16,
pattern GL_INTENSITY4,
pattern GL_INTENSITY8,
pattern GL_INTERPOLATE,
pattern GL_INVALID_ENUM,
pattern GL_INVALID_OPERATION,
pattern GL_INVALID_VALUE,
pattern GL_INVERT,
pattern GL_KEEP,
pattern GL_LEFT,
pattern GL_LEQUAL,
pattern GL_LESS,
pattern GL_LIGHT0,
pattern GL_LIGHT1,
pattern GL_LIGHT2,
pattern GL_LIGHT3,
pattern GL_LIGHT4,
pattern GL_LIGHT5,
pattern GL_LIGHT6,
pattern GL_LIGHT7,
pattern GL_LIGHTING,
pattern GL_LIGHTING_BIT,
pattern GL_LIGHT_MODEL_AMBIENT,
pattern GL_LIGHT_MODEL_COLOR_CONTROL,
pattern GL_LIGHT_MODEL_LOCAL_VIEWER,
pattern GL_LIGHT_MODEL_TWO_SIDE,
pattern GL_LINE,
pattern GL_LINEAR,
pattern GL_LINEAR_ATTENUATION,
pattern GL_LINEAR_MIPMAP_LINEAR,
pattern GL_LINEAR_MIPMAP_NEAREST,
pattern GL_LINES,
pattern GL_LINE_BIT,
pattern GL_LINE_LOOP,
pattern GL_LINE_RESET_TOKEN,
pattern GL_LINE_SMOOTH,
pattern GL_LINE_SMOOTH_HINT,
pattern GL_LINE_STIPPLE,
pattern GL_LINE_STIPPLE_PATTERN,
pattern GL_LINE_STIPPLE_REPEAT,
pattern GL_LINE_STRIP,
pattern GL_LINE_TOKEN,
pattern GL_LINE_WIDTH,
pattern GL_LINE_WIDTH_GRANULARITY,
pattern GL_LINE_WIDTH_RANGE,
pattern GL_LIST_BASE,
pattern GL_LIST_BIT,
pattern GL_LIST_INDEX,
pattern GL_LIST_MODE,
pattern GL_LOAD,
pattern GL_LOGIC_OP,
pattern GL_LOGIC_OP_MODE,
pattern GL_LUMINANCE,
pattern GL_LUMINANCE12,
pattern GL_LUMINANCE12_ALPHA12,
pattern GL_LUMINANCE12_ALPHA4,
pattern GL_LUMINANCE16,
pattern GL_LUMINANCE16_ALPHA16,
pattern GL_LUMINANCE4,
pattern GL_LUMINANCE4_ALPHA4,
pattern GL_LUMINANCE6_ALPHA2,
pattern GL_LUMINANCE8,
pattern GL_LUMINANCE8_ALPHA8,
pattern GL_LUMINANCE_ALPHA,
pattern GL_MAP1_COLOR_4,
pattern GL_MAP1_GRID_DOMAIN,
pattern GL_MAP1_GRID_SEGMENTS,
pattern GL_MAP1_INDEX,
pattern GL_MAP1_NORMAL,
pattern GL_MAP1_TEXTURE_COORD_1,
pattern GL_MAP1_TEXTURE_COORD_2,
pattern GL_MAP1_TEXTURE_COORD_3,
pattern GL_MAP1_TEXTURE_COORD_4,
pattern GL_MAP1_VERTEX_3,
pattern GL_MAP1_VERTEX_4,
pattern GL_MAP2_COLOR_4,
pattern GL_MAP2_GRID_DOMAIN,
pattern GL_MAP2_GRID_SEGMENTS,
pattern GL_MAP2_INDEX,
pattern GL_MAP2_NORMAL,
pattern GL_MAP2_TEXTURE_COORD_1,
pattern GL_MAP2_TEXTURE_COORD_2,
pattern GL_MAP2_TEXTURE_COORD_3,
pattern GL_MAP2_TEXTURE_COORD_4,
pattern GL_MAP2_VERTEX_3,
pattern GL_MAP2_VERTEX_4,
pattern GL_MAP_COLOR,
pattern GL_MAP_STENCIL,
pattern GL_MATRIX_MODE,
pattern GL_MAX,
pattern GL_MAX_3D_TEXTURE_SIZE,
pattern GL_MAX_ATTRIB_STACK_DEPTH,
pattern GL_MAX_CLIENT_ATTRIB_STACK_DEPTH,
pattern GL_MAX_CLIP_PLANES,
pattern GL_MAX_CUBE_MAP_TEXTURE_SIZE,
pattern GL_MAX_ELEMENTS_INDICES,
pattern GL_MAX_ELEMENTS_VERTICES,
pattern GL_MAX_EVAL_ORDER,
pattern GL_MAX_LIGHTS,
pattern GL_MAX_LIST_NESTING,
pattern GL_MAX_MODELVIEW_STACK_DEPTH,
pattern GL_MAX_NAME_STACK_DEPTH,
pattern GL_MAX_PIXEL_MAP_TABLE,
pattern GL_MAX_PROJECTION_STACK_DEPTH,
pattern GL_MAX_TEXTURE_LOD_BIAS,
pattern GL_MAX_TEXTURE_SIZE,
pattern GL_MAX_TEXTURE_STACK_DEPTH,
pattern GL_MAX_TEXTURE_UNITS,
pattern GL_MAX_VIEWPORT_DIMS,
pattern GL_MIN,
pattern GL_MIRRORED_REPEAT,
pattern GL_MODELVIEW,
pattern GL_MODELVIEW_MATRIX,
pattern GL_MODELVIEW_STACK_DEPTH,
pattern GL_MODULATE,
pattern GL_MULT,
pattern GL_MULTISAMPLE,
pattern GL_MULTISAMPLE_BIT,
pattern GL_N3F_V3F,
pattern GL_NAME_STACK_DEPTH,
pattern GL_NAND,
pattern GL_NEAREST,
pattern GL_NEAREST_MIPMAP_LINEAR,
pattern GL_NEAREST_MIPMAP_NEAREST,
pattern GL_NEVER,
pattern GL_NICEST,
pattern GL_NONE,
pattern GL_NOOP,
pattern GL_NOR,
pattern GL_NORMALIZE,
pattern GL_NORMAL_ARRAY,
pattern GL_NORMAL_ARRAY_POINTER,
pattern GL_NORMAL_ARRAY_STRIDE,
pattern GL_NORMAL_ARRAY_TYPE,
pattern GL_NORMAL_MAP,
pattern GL_NOTEQUAL,
pattern GL_NO_ERROR,
pattern GL_NUM_COMPRESSED_TEXTURE_FORMATS,
pattern GL_OBJECT_LINEAR,
pattern GL_OBJECT_PLANE,
pattern GL_ONE,
pattern GL_ONE_MINUS_CONSTANT_ALPHA,
pattern GL_ONE_MINUS_CONSTANT_COLOR,
pattern GL_ONE_MINUS_DST_ALPHA,
pattern GL_ONE_MINUS_DST_COLOR,
pattern GL_ONE_MINUS_SRC_ALPHA,
pattern GL_ONE_MINUS_SRC_COLOR,
pattern GL_OPERAND0_ALPHA,
pattern GL_OPERAND0_RGB,
pattern GL_OPERAND1_ALPHA,
pattern GL_OPERAND1_RGB,
pattern GL_OPERAND2_ALPHA,
pattern GL_OPERAND2_RGB,
pattern GL_OR,
pattern GL_ORDER,
pattern GL_OR_INVERTED,
pattern GL_OR_REVERSE,
pattern GL_OUT_OF_MEMORY,
pattern GL_PACK_ALIGNMENT,
pattern GL_PACK_IMAGE_HEIGHT,
pattern GL_PACK_LSB_FIRST,
pattern GL_PACK_ROW_LENGTH,
pattern GL_PACK_SKIP_IMAGES,
pattern GL_PACK_SKIP_PIXELS,
pattern GL_PACK_SKIP_ROWS,
pattern GL_PACK_SWAP_BYTES,
pattern GL_PASS_THROUGH_TOKEN,
pattern GL_PERSPECTIVE_CORRECTION_HINT,
pattern GL_PIXEL_MAP_A_TO_A,
pattern GL_PIXEL_MAP_A_TO_A_SIZE,
pattern GL_PIXEL_MAP_B_TO_B,
pattern GL_PIXEL_MAP_B_TO_B_SIZE,
pattern GL_PIXEL_MAP_G_TO_G,
pattern GL_PIXEL_MAP_G_TO_G_SIZE,
pattern GL_PIXEL_MAP_I_TO_A,
pattern GL_PIXEL_MAP_I_TO_A_SIZE,
pattern GL_PIXEL_MAP_I_TO_B,
pattern GL_PIXEL_MAP_I_TO_B_SIZE,
pattern GL_PIXEL_MAP_I_TO_G,
pattern GL_PIXEL_MAP_I_TO_G_SIZE,
pattern GL_PIXEL_MAP_I_TO_I,
pattern GL_PIXEL_MAP_I_TO_I_SIZE,
pattern GL_PIXEL_MAP_I_TO_R,
pattern GL_PIXEL_MAP_I_TO_R_SIZE,
pattern GL_PIXEL_MAP_R_TO_R,
pattern GL_PIXEL_MAP_R_TO_R_SIZE,
pattern GL_PIXEL_MAP_S_TO_S,
pattern GL_PIXEL_MAP_S_TO_S_SIZE,
pattern GL_PIXEL_MODE_BIT,
pattern GL_POINT,
pattern GL_POINTS,
pattern GL_POINT_BIT,
pattern GL_POINT_DISTANCE_ATTENUATION,
pattern GL_POINT_FADE_THRESHOLD_SIZE,
pattern GL_POINT_SIZE,
pattern GL_POINT_SIZE_GRANULARITY,
pattern GL_POINT_SIZE_MAX,
pattern GL_POINT_SIZE_MIN,
pattern GL_POINT_SIZE_RANGE,
pattern GL_POINT_SMOOTH,
pattern GL_POINT_SMOOTH_HINT,
pattern GL_POINT_TOKEN,
pattern GL_POLYGON,
pattern GL_POLYGON_BIT,
pattern GL_POLYGON_MODE,
pattern GL_POLYGON_OFFSET_FACTOR,
pattern GL_POLYGON_OFFSET_FILL,
pattern GL_POLYGON_OFFSET_LINE,
pattern GL_POLYGON_OFFSET_POINT,
pattern GL_POLYGON_OFFSET_UNITS,
pattern GL_POLYGON_SMOOTH,
pattern GL_POLYGON_SMOOTH_HINT,
pattern GL_POLYGON_STIPPLE,
pattern GL_POLYGON_STIPPLE_BIT,
pattern GL_POLYGON_TOKEN,
pattern GL_POSITION,
pattern GL_PREVIOUS,
pattern GL_PRIMARY_COLOR,
pattern GL_PROJECTION,
pattern GL_PROJECTION_MATRIX,
pattern GL_PROJECTION_STACK_DEPTH,
pattern GL_PROXY_TEXTURE_1D,
pattern GL_PROXY_TEXTURE_2D,
pattern GL_PROXY_TEXTURE_3D,
pattern GL_PROXY_TEXTURE_CUBE_MAP,
pattern GL_Q,
pattern GL_QUADRATIC_ATTENUATION,
pattern GL_QUADS,
pattern GL_QUAD_STRIP,
pattern GL_R,
pattern GL_R3_G3_B2,
pattern GL_READ_BUFFER,
pattern GL_RED,
pattern GL_RED_BIAS,
pattern GL_RED_BITS,
pattern GL_RED_SCALE,
pattern GL_REFLECTION_MAP,
pattern GL_RENDER,
pattern GL_RENDERER,
pattern GL_RENDER_MODE,
pattern GL_REPEAT,
pattern GL_REPLACE,
pattern GL_RESCALE_NORMAL,
pattern GL_RETURN,
pattern GL_RGB,
pattern GL_RGB10,
pattern GL_RGB10_A2,
pattern GL_RGB12,
pattern GL_RGB16,
pattern GL_RGB4,
pattern GL_RGB5,
pattern GL_RGB5_A1,
pattern GL_RGB8,
pattern GL_RGBA,
pattern GL_RGBA12,
pattern GL_RGBA16,
pattern GL_RGBA2,
pattern GL_RGBA4,
pattern GL_RGBA8,
pattern GL_RGBA_MODE,
pattern GL_RGB_SCALE,
pattern GL_RIGHT,
pattern GL_S,
pattern GL_SAMPLES,
pattern GL_SAMPLE_ALPHA_TO_COVERAGE,
pattern GL_SAMPLE_ALPHA_TO_ONE,
pattern GL_SAMPLE_BUFFERS,
pattern GL_SAMPLE_COVERAGE,
pattern GL_SAMPLE_COVERAGE_INVERT,
pattern GL_SAMPLE_COVERAGE_VALUE,
pattern GL_SCISSOR_BIT,
pattern GL_SCISSOR_BOX,
pattern GL_SCISSOR_TEST,
pattern GL_SECONDARY_COLOR_ARRAY,
pattern GL_SECONDARY_COLOR_ARRAY_POINTER,
pattern GL_SECONDARY_COLOR_ARRAY_SIZE,
pattern GL_SECONDARY_COLOR_ARRAY_STRIDE,
pattern GL_SECONDARY_COLOR_ARRAY_TYPE,
pattern GL_SELECT,
pattern GL_SELECTION_BUFFER_POINTER,
pattern GL_SELECTION_BUFFER_SIZE,
pattern GL_SEPARATE_SPECULAR_COLOR,
pattern GL_SET,
pattern GL_SHADE_MODEL,
pattern GL_SHININESS,
pattern GL_SHORT,
pattern GL_SINGLE_COLOR,
pattern GL_SMOOTH,
pattern GL_SMOOTH_LINE_WIDTH_GRANULARITY,
pattern GL_SMOOTH_LINE_WIDTH_RANGE,
pattern GL_SMOOTH_POINT_SIZE_GRANULARITY,
pattern GL_SMOOTH_POINT_SIZE_RANGE,
pattern GL_SOURCE0_ALPHA,
pattern GL_SOURCE0_RGB,
pattern GL_SOURCE1_ALPHA,
pattern GL_SOURCE1_RGB,
pattern GL_SOURCE2_ALPHA,
pattern GL_SOURCE2_RGB,
pattern GL_SPECULAR,
pattern GL_SPHERE_MAP,
pattern GL_SPOT_CUTOFF,
pattern GL_SPOT_DIRECTION,
pattern GL_SPOT_EXPONENT,
pattern GL_SRC_ALPHA,
pattern GL_SRC_ALPHA_SATURATE,
pattern GL_SRC_COLOR,
pattern GL_STACK_OVERFLOW,
pattern GL_STACK_UNDERFLOW,
pattern GL_STENCIL,
pattern GL_STENCIL_BITS,
pattern GL_STENCIL_BUFFER_BIT,
pattern GL_STENCIL_CLEAR_VALUE,
pattern GL_STENCIL_FAIL,
pattern GL_STENCIL_FUNC,
pattern GL_STENCIL_INDEX,
pattern GL_STENCIL_PASS_DEPTH_FAIL,
pattern GL_STENCIL_PASS_DEPTH_PASS,
pattern GL_STENCIL_REF,
pattern GL_STENCIL_TEST,
pattern GL_STENCIL_VALUE_MASK,
pattern GL_STENCIL_WRITEMASK,
pattern GL_STEREO,
pattern GL_SUBPIXEL_BITS,
pattern GL_SUBTRACT,
pattern GL_T,
pattern GL_T2F_C3F_V3F,
pattern GL_T2F_C4F_N3F_V3F,
pattern GL_T2F_C4UB_V3F,
pattern GL_T2F_N3F_V3F,
pattern GL_T2F_V3F,
pattern GL_T4F_C4F_N3F_V4F,
pattern GL_T4F_V4F,
pattern GL_TEXTURE,
pattern GL_TEXTURE0,
pattern GL_TEXTURE1,
pattern GL_TEXTURE10,
pattern GL_TEXTURE11,
pattern GL_TEXTURE12,
pattern GL_TEXTURE13,
pattern GL_TEXTURE14,
pattern GL_TEXTURE15,
pattern GL_TEXTURE16,
pattern GL_TEXTURE17,
pattern GL_TEXTURE18,
pattern GL_TEXTURE19,
pattern GL_TEXTURE2,
pattern GL_TEXTURE20,
pattern GL_TEXTURE21,
pattern GL_TEXTURE22,
pattern GL_TEXTURE23,
pattern GL_TEXTURE24,
pattern GL_TEXTURE25,
pattern GL_TEXTURE26,
pattern GL_TEXTURE27,
pattern GL_TEXTURE28,
pattern GL_TEXTURE29,
pattern GL_TEXTURE3,
pattern GL_TEXTURE30,
pattern GL_TEXTURE31,
pattern GL_TEXTURE4,
pattern GL_TEXTURE5,
pattern GL_TEXTURE6,
pattern GL_TEXTURE7,
pattern GL_TEXTURE8,
pattern GL_TEXTURE9,
pattern GL_TEXTURE_1D,
pattern GL_TEXTURE_2D,
pattern GL_TEXTURE_3D,
pattern GL_TEXTURE_ALPHA_SIZE,
pattern GL_TEXTURE_BASE_LEVEL,
pattern GL_TEXTURE_BINDING_1D,
pattern GL_TEXTURE_BINDING_2D,
pattern GL_TEXTURE_BINDING_3D,
pattern GL_TEXTURE_BINDING_CUBE_MAP,
pattern GL_TEXTURE_BIT,
pattern GL_TEXTURE_BLUE_SIZE,
pattern GL_TEXTURE_BORDER,
pattern GL_TEXTURE_BORDER_COLOR,
pattern GL_TEXTURE_COMPARE_FUNC,
pattern GL_TEXTURE_COMPARE_MODE,
pattern GL_TEXTURE_COMPONENTS,
pattern GL_TEXTURE_COMPRESSED,
pattern GL_TEXTURE_COMPRESSED_IMAGE_SIZE,
pattern GL_TEXTURE_COMPRESSION_HINT,
pattern GL_TEXTURE_COORD_ARRAY,
pattern GL_TEXTURE_COORD_ARRAY_POINTER,
pattern GL_TEXTURE_COORD_ARRAY_SIZE,
pattern GL_TEXTURE_COORD_ARRAY_STRIDE,
pattern GL_TEXTURE_COORD_ARRAY_TYPE,
pattern GL_TEXTURE_CUBE_MAP,
pattern GL_TEXTURE_CUBE_MAP_NEGATIVE_X,
pattern GL_TEXTURE_CUBE_MAP_NEGATIVE_Y,
pattern GL_TEXTURE_CUBE_MAP_NEGATIVE_Z,
pattern GL_TEXTURE_CUBE_MAP_POSITIVE_X,
pattern GL_TEXTURE_CUBE_MAP_POSITIVE_Y,
pattern GL_TEXTURE_CUBE_MAP_POSITIVE_Z,
pattern GL_TEXTURE_DEPTH,
pattern GL_TEXTURE_DEPTH_SIZE,
pattern GL_TEXTURE_ENV,
pattern GL_TEXTURE_ENV_COLOR,
pattern GL_TEXTURE_ENV_MODE,
pattern GL_TEXTURE_FILTER_CONTROL,
pattern GL_TEXTURE_GEN_MODE,
pattern GL_TEXTURE_GEN_Q,
pattern GL_TEXTURE_GEN_R,
pattern GL_TEXTURE_GEN_S,
pattern GL_TEXTURE_GEN_T,
pattern GL_TEXTURE_GREEN_SIZE,
pattern GL_TEXTURE_HEIGHT,
pattern GL_TEXTURE_INTENSITY_SIZE,
pattern GL_TEXTURE_INTERNAL_FORMAT,
pattern GL_TEXTURE_LOD_BIAS,
pattern GL_TEXTURE_LUMINANCE_SIZE,
pattern GL_TEXTURE_MAG_FILTER,
pattern GL_TEXTURE_MATRIX,
pattern GL_TEXTURE_MAX_LEVEL,
pattern GL_TEXTURE_MAX_LOD,
pattern GL_TEXTURE_MIN_FILTER,
pattern GL_TEXTURE_MIN_LOD,
pattern GL_TEXTURE_PRIORITY,
pattern GL_TEXTURE_RED_SIZE,
pattern GL_TEXTURE_RESIDENT,
pattern GL_TEXTURE_STACK_DEPTH,
pattern GL_TEXTURE_WIDTH,
pattern GL_TEXTURE_WRAP_R,
pattern GL_TEXTURE_WRAP_S,
pattern GL_TEXTURE_WRAP_T,
pattern GL_TRANSFORM_BIT,
pattern GL_TRANSPOSE_COLOR_MATRIX,
pattern GL_TRANSPOSE_MODELVIEW_MATRIX,
pattern GL_TRANSPOSE_PROJECTION_MATRIX,
pattern GL_TRANSPOSE_TEXTURE_MATRIX,
pattern GL_TRIANGLES,
pattern GL_TRIANGLE_FAN,
pattern GL_TRIANGLE_STRIP,
pattern GL_TRUE,
pattern GL_UNPACK_ALIGNMENT,
pattern GL_UNPACK_IMAGE_HEIGHT,
pattern GL_UNPACK_LSB_FIRST,
pattern GL_UNPACK_ROW_LENGTH,
pattern GL_UNPACK_SKIP_IMAGES,
pattern GL_UNPACK_SKIP_PIXELS,
pattern GL_UNPACK_SKIP_ROWS,
pattern GL_UNPACK_SWAP_BYTES,
pattern GL_UNSIGNED_BYTE,
pattern GL_UNSIGNED_BYTE_2_3_3_REV,
pattern GL_UNSIGNED_BYTE_3_3_2,
pattern GL_UNSIGNED_INT,
pattern GL_UNSIGNED_INT_10_10_10_2,
pattern GL_UNSIGNED_INT_2_10_10_10_REV,
pattern GL_UNSIGNED_INT_8_8_8_8,
pattern GL_UNSIGNED_INT_8_8_8_8_REV,
pattern GL_UNSIGNED_SHORT,
pattern GL_UNSIGNED_SHORT_1_5_5_5_REV,
pattern GL_UNSIGNED_SHORT_4_4_4_4,
pattern GL_UNSIGNED_SHORT_4_4_4_4_REV,
pattern GL_UNSIGNED_SHORT_5_5_5_1,
pattern GL_UNSIGNED_SHORT_5_6_5,
pattern GL_UNSIGNED_SHORT_5_6_5_REV,
pattern GL_V2F,
pattern GL_V3F,
pattern GL_VENDOR,
pattern GL_VERSION,
pattern GL_VERTEX_ARRAY,
pattern GL_VERTEX_ARRAY_POINTER,
pattern GL_VERTEX_ARRAY_SIZE,
pattern GL_VERTEX_ARRAY_STRIDE,
pattern GL_VERTEX_ARRAY_TYPE,
pattern GL_VIEWPORT,
pattern GL_VIEWPORT_BIT,
pattern GL_XOR,
pattern GL_ZERO,
pattern GL_ZOOM_X,
pattern GL_ZOOM_Y,
-- * Functions
glAccum,
glActiveTexture,
glAlphaFunc,
glAreTexturesResident,
glArrayElement,
glBegin,
glBindTexture,
glBitmap,
glBlendColor,
glBlendEquation,
glBlendFunc,
glBlendFuncSeparate,
glCallList,
glCallLists,
glClear,
glClearAccum,
glClearColor,
glClearDepth,
glClearIndex,
glClearStencil,
glClientActiveTexture,
glClipPlane,
glColor3b,
glColor3bv,
glColor3d,
glColor3dv,
glColor3f,
glColor3fv,
glColor3i,
glColor3iv,
glColor3s,
glColor3sv,
glColor3ub,
glColor3ubv,
glColor3ui,
glColor3uiv,
glColor3us,
glColor3usv,
glColor4b,
glColor4bv,
glColor4d,
glColor4dv,
glColor4f,
glColor4fv,
glColor4i,
glColor4iv,
glColor4s,
glColor4sv,
glColor4ub,
glColor4ubv,
glColor4ui,
glColor4uiv,
glColor4us,
glColor4usv,
glColorMask,
glColorMaterial,
glColorPointer,
glCompressedTexImage1D,
glCompressedTexImage2D,
glCompressedTexImage3D,
glCompressedTexSubImage1D,
glCompressedTexSubImage2D,
glCompressedTexSubImage3D,
glCopyPixels,
glCopyTexImage1D,
glCopyTexImage2D,
glCopyTexSubImage1D,
glCopyTexSubImage2D,
glCopyTexSubImage3D,
glCullFace,
glDeleteLists,
glDeleteTextures,
glDepthFunc,
glDepthMask,
glDepthRange,
glDisable,
glDisableClientState,
glDrawArrays,
glDrawBuffer,
glDrawElements,
glDrawPixels,
glDrawRangeElements,
glEdgeFlag,
glEdgeFlagPointer,
glEdgeFlagv,
glEnable,
glEnableClientState,
glEnd,
glEndList,
glEvalCoord1d,
glEvalCoord1dv,
glEvalCoord1f,
glEvalCoord1fv,
glEvalCoord2d,
glEvalCoord2dv,
glEvalCoord2f,
glEvalCoord2fv,
glEvalMesh1,
glEvalMesh2,
glEvalPoint1,
glEvalPoint2,
glFeedbackBuffer,
glFinish,
glFlush,
glFogCoordPointer,
glFogCoordd,
glFogCoorddv,
glFogCoordf,
glFogCoordfv,
glFogf,
glFogfv,
glFogi,
glFogiv,
glFrontFace,
glFrustum,
glGenLists,
glGenTextures,
glGetBooleanv,
glGetClipPlane,
glGetCompressedTexImage,
glGetDoublev,
glGetError,
glGetFloatv,
glGetIntegerv,
glGetLightfv,
glGetLightiv,
glGetMapdv,
glGetMapfv,
glGetMapiv,
glGetMaterialfv,
glGetMaterialiv,
glGetPixelMapfv,
glGetPixelMapuiv,
glGetPixelMapusv,
glGetPointerv,
glGetPolygonStipple,
glGetString,
glGetTexEnvfv,
glGetTexEnviv,
glGetTexGendv,
glGetTexGenfv,
glGetTexGeniv,
glGetTexImage,
glGetTexLevelParameterfv,
glGetTexLevelParameteriv,
glGetTexParameterfv,
glGetTexParameteriv,
glHint,
glIndexMask,
glIndexPointer,
glIndexd,
glIndexdv,
glIndexf,
glIndexfv,
glIndexi,
glIndexiv,
glIndexs,
glIndexsv,
glIndexub,
glIndexubv,
glInitNames,
glInterleavedArrays,
glIsEnabled,
glIsList,
glIsTexture,
glLightModelf,
glLightModelfv,
glLightModeli,
glLightModeliv,
glLightf,
glLightfv,
glLighti,
glLightiv,
glLineStipple,
glLineWidth,
glListBase,
glLoadIdentity,
glLoadMatrixd,
glLoadMatrixf,
glLoadName,
glLoadTransposeMatrixd,
glLoadTransposeMatrixf,
glLogicOp,
glMap1d,
glMap1f,
glMap2d,
glMap2f,
glMapGrid1d,
glMapGrid1f,
glMapGrid2d,
glMapGrid2f,
glMaterialf,
glMaterialfv,
glMateriali,
glMaterialiv,
glMatrixMode,
glMultMatrixd,
glMultMatrixf,
glMultTransposeMatrixd,
glMultTransposeMatrixf,
glMultiDrawArrays,
glMultiDrawElements,
glMultiTexCoord1d,
glMultiTexCoord1dv,
glMultiTexCoord1f,
glMultiTexCoord1fv,
glMultiTexCoord1i,
glMultiTexCoord1iv,
glMultiTexCoord1s,
glMultiTexCoord1sv,
glMultiTexCoord2d,
glMultiTexCoord2dv,
glMultiTexCoord2f,
glMultiTexCoord2fv,
glMultiTexCoord2i,
glMultiTexCoord2iv,
glMultiTexCoord2s,
glMultiTexCoord2sv,
glMultiTexCoord3d,
glMultiTexCoord3dv,
glMultiTexCoord3f,
glMultiTexCoord3fv,
glMultiTexCoord3i,
glMultiTexCoord3iv,
glMultiTexCoord3s,
glMultiTexCoord3sv,
glMultiTexCoord4d,
glMultiTexCoord4dv,
glMultiTexCoord4f,
glMultiTexCoord4fv,
glMultiTexCoord4i,
glMultiTexCoord4iv,
glMultiTexCoord4s,
glMultiTexCoord4sv,
glNewList,
glNormal3b,
glNormal3bv,
glNormal3d,
glNormal3dv,
glNormal3f,
glNormal3fv,
glNormal3i,
glNormal3iv,
glNormal3s,
glNormal3sv,
glNormalPointer,
glOrtho,
glPassThrough,
glPixelMapfv,
glPixelMapuiv,
glPixelMapusv,
glPixelStoref,
glPixelStorei,
glPixelTransferf,
glPixelTransferi,
glPixelZoom,
glPointParameterf,
glPointParameterfv,
glPointParameteri,
glPointParameteriv,
glPointSize,
glPolygonMode,
glPolygonOffset,
glPolygonStipple,
glPopAttrib,
glPopClientAttrib,
glPopMatrix,
glPopName,
glPrioritizeTextures,
glPushAttrib,
glPushClientAttrib,
glPushMatrix,
glPushName,
glRasterPos2d,
glRasterPos2dv,
glRasterPos2f,
glRasterPos2fv,
glRasterPos2i,
glRasterPos2iv,
glRasterPos2s,
glRasterPos2sv,
glRasterPos3d,
glRasterPos3dv,
glRasterPos3f,
glRasterPos3fv,
glRasterPos3i,
glRasterPos3iv,
glRasterPos3s,
glRasterPos3sv,
glRasterPos4d,
glRasterPos4dv,
glRasterPos4f,
glRasterPos4fv,
glRasterPos4i,
glRasterPos4iv,
glRasterPos4s,
glRasterPos4sv,
glReadBuffer,
glReadPixels,
glRectd,
glRectdv,
glRectf,
glRectfv,
glRecti,
glRectiv,
glRects,
glRectsv,
glRenderMode,
glRotated,
glRotatef,
glSampleCoverage,
glScaled,
glScalef,
glScissor,
glSecondaryColor3b,
glSecondaryColor3bv,
glSecondaryColor3d,
glSecondaryColor3dv,
glSecondaryColor3f,
glSecondaryColor3fv,
glSecondaryColor3i,
glSecondaryColor3iv,
glSecondaryColor3s,
glSecondaryColor3sv,
glSecondaryColor3ub,
glSecondaryColor3ubv,
glSecondaryColor3ui,
glSecondaryColor3uiv,
glSecondaryColor3us,
glSecondaryColor3usv,
glSecondaryColorPointer,
glSelectBuffer,
glShadeModel,
glStencilFunc,
glStencilMask,
glStencilOp,
glTexCoord1d,
glTexCoord1dv,
glTexCoord1f,
glTexCoord1fv,
glTexCoord1i,
glTexCoord1iv,
glTexCoord1s,
glTexCoord1sv,
glTexCoord2d,
glTexCoord2dv,
glTexCoord2f,
glTexCoord2fv,
glTexCoord2i,
glTexCoord2iv,
glTexCoord2s,
glTexCoord2sv,
glTexCoord3d,
glTexCoord3dv,
glTexCoord3f,
glTexCoord3fv,
glTexCoord3i,
glTexCoord3iv,
glTexCoord3s,
glTexCoord3sv,
glTexCoord4d,
glTexCoord4dv,
glTexCoord4f,
glTexCoord4fv,
glTexCoord4i,
glTexCoord4iv,
glTexCoord4s,
glTexCoord4sv,
glTexCoordPointer,
glTexEnvf,
glTexEnvfv,
glTexEnvi,
glTexEnviv,
glTexGend,
glTexGendv,
glTexGenf,
glTexGenfv,
glTexGeni,
glTexGeniv,
glTexImage1D,
glTexImage2D,
glTexImage3D,
glTexParameterf,
glTexParameterfv,
glTexParameteri,
glTexParameteriv,
glTexSubImage1D,
glTexSubImage2D,
glTexSubImage3D,
glTranslated,
glTranslatef,
glVertex2d,
glVertex2dv,
glVertex2f,
glVertex2fv,
glVertex2i,
glVertex2iv,
glVertex2s,
glVertex2sv,
glVertex3d,
glVertex3dv,
glVertex3f,
glVertex3fv,
glVertex3i,
glVertex3iv,
glVertex3s,
glVertex3sv,
glVertex4d,
glVertex4dv,
glVertex4f,
glVertex4fv,
glVertex4i,
glVertex4iv,
glVertex4s,
glVertex4sv,
glVertexPointer,
glViewport,
glWindowPos2d,
glWindowPos2dv,
glWindowPos2f,
glWindowPos2fv,
glWindowPos2i,
glWindowPos2iv,
glWindowPos2s,
glWindowPos2sv,
glWindowPos3d,
glWindowPos3dv,
glWindowPos3f,
glWindowPos3fv,
glWindowPos3i,
glWindowPos3iv,
glWindowPos3s,
glWindowPos3sv
) where
import Graphics.GL.Types
import Graphics.GL.Tokens
import Graphics.GL.Functions
| haskell-opengl/OpenGLRaw | src/Graphics/GL/Version14.hs | bsd-3-clause | 28,339 | 0 | 5 | 4,291 | 4,961 | 3,074 | 1,887 | 1,168 | 0 |
module L5ToBinary.Compile where
import qualified L5.AbsL as L5
import qualified L1Tox64.Compile as L1c
import qualified L2ToL1.Compile as L2c
import qualified L3ToL2.Compile as L3c
import qualified L4ToL3.Compile as L4c
import qualified L5ToL4.Compile as L5c
translate :: L5.Program -> Either String String
translate = fmap L1c.assembleProgram . L2c.translate . L3c.translate . L4c.translate . L5c.translate
| mhuesch/scheme_compiler | src/L5ToBinary/Compile.hs | bsd-3-clause | 412 | 0 | 10 | 56 | 104 | 65 | 39 | 9 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecursiveDo #-}
import Control.Concurrent
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Graphics.QML.Engine
import Reflex
import Reflex.QML.Internal
import qualified Data.Text as T
import qualified Reflex.QML.Prop as Prop
engineConfig :: EngineConfig
engineConfig = defaultEngineConfig
{ initialDocument = fileDocument "examples/Games.qml"
}
startEventLoop :: IO ()
startEventLoop = do
l <- newEmptyMVar
void . forkIO $
catch (runEventLoop . liftIO $ putMVar l () >> forever (threadDelay maxBound)) $ \e ->
(e :: EventLoopException) `seq` putMVar l ()
takeMVar l
main :: IO ()
main = do
startEventLoop
requireEventLoop $ runApp engineConfig $ Prop.namespace "app" $
Prop.namespace "games" $ mdo
name <- holdDyn ("Example" :: T.Text) nameChanged
nameChanged <- Prop.mutable "name" name
won <- holdDyn (0 :: Int) wonChanged
wonChanged <- Prop.mutable "won" won
lost <- holdDyn 0 lostChanged
lostChanged <- Prop.mutable "lost" lost
total <- Prop.readonly "total" =<< combineDyn (+) won lost
pure ()
| bennofs/reflex-qml | examples/Games.hs | bsd-3-clause | 1,167 | 0 | 14 | 204 | 360 | 185 | 175 | 35 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.