code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeSynonymInstances #-}
-- | Some types for json logging.
module Pos.Core.JsonLog.LogEvents
( HasJsonLogConfig (..)
, InvReqDataFlowLog (..)
, JLEvent(..)
, JLTxS (..)
, JLTxR (..)
, JLMemPool (..)
, JLBlock (..)
, JLTimedEvent (..)
, JsonLogConfig (..)
, MemPoolModifyReason (..)
, appendJL
, jsonLogConfigFromHandle
, jsonLogDefault
, fromJLSlotId
, fromJLSlotIdUnsafe
) where
import Universum
import Control.Monad.Except (MonadError)
import Data.Aeson (FromJSON, ToJSON, Value (..), encode, object,
parseJSON, toJSON, (.:), (.=))
import Data.Aeson.Options (defaultOptions)
import Data.Aeson.TH (deriveJSON)
import Data.Aeson.Types (typeMismatch)
import qualified Data.ByteString.Lazy as LBS
import qualified Data.HashMap.Strict as HMS
import Pos.Core (EpochIndex (..), SlotCount, SlotId (..),
mkLocalSlotIndex)
import Pos.Core.JsonLog.JsonLogT (JsonLogConfig (..))
import qualified Pos.Core.JsonLog.JsonLogT as JL
import Pos.Util.Util (realTime)
import Pos.Util.Wlog (WithLogger)
type BlockId = Text
type TxId = Text
type JLSlotId = (Word64, Word16)
-- | Json log of one block with corresponding 'BlockId'.
data JLBlock = JLBlock
{ jlHash :: !BlockId
, jlPrevBlock :: !BlockId
, jlTxs :: ![TxId]
, jlSlot :: !JLSlotId
} deriving Show
-- | Json log of one transaction sent from the (light) wallet.
data JLTxS = JLTxS
{ jlsNodeId :: !Text
, jlsTxId :: !Text
, jlsInvReq :: !InvReqDataFlowLog
} deriving Show
-- | Json log of one transaction being received by a node.
data JLTxR = JLTxR
{ jlrTxId :: !Text
, jlrError :: !(Maybe Text)
} deriving Show
-- | Enumeration of all reasons for modifying the mempool.
data MemPoolModifyReason =
-- | Apply a block.
ApplyBlock
-- | Apply a block, with rollback.
| ApplyBlockWithRollback
-- | Include a transaction. It came from this peer.
| ProcessTransaction
deriving Show
-- | Json log of one mempool modification.
data JLMemPool = JLMemPool
{ -- | Reason for modifying the mempool
jlmReason :: !MemPoolModifyReason
-- | Queue length when trying to modify the mempool (not including this
-- modifier, so it could be 0).
, jlmQueueLength :: !Int
-- | Time spent waiting for the lock (microseconds)
, jlmWait :: !Integer
-- | Time spent doing the modification (microseconds, while holding the lock).
, jlmModify :: !Integer
-- | Size of the mempool before the modification.
, jlmSizeBefore :: !Int
-- | Size of the mempool after the modification.
, jlmSizeAfter :: !Int
-- | How much memory was allocated during the modification.
, jlmAllocated :: !Int64
} deriving Show
-- | Json log event.
data JLEvent = JLCreatedBlock !JLBlock
| JLAdoptedBlock !BlockId
| JLTpsStat !Int
| JLTxSent !JLTxS
| JLTxReceived !JLTxR
| JLMemPoolEvent !JLMemPool
deriving (Show, Generic)
-- | 'JLEvent' with 'Timestamp' -- corresponding time of this event.
data JLTimedEvent = JLTimedEvent
{ jlTimestamp :: !Integer
, jlEvent :: !JLEvent
} deriving Show
-- -----------------------------------------------------------------------------
-- This type was originally in Pos.Infra.Communication.Relay.Logic but was moved
-- here so the package dependency graph could be re-arranged.
data InvReqDataFlowLog =
InvReqAccepted
{ invReqStart :: !Integer
, invReqReceived :: !Integer
, invReqSent :: !Integer
, invReqClosed :: !Integer
}
| InvReqRejected
{ invReqStart :: !Integer
, invReqReceived :: !Integer
}
| InvReqException !Text
deriving (Eq, Generic, Show)
instance ToJSON InvReqDataFlowLog where
toJSON (InvReqAccepted str rece sen closed) =
object [ "invReqAccepted" .=
object [ "reqStart" .= toJSON str
, "reqReceived" .= toJSON rece
, "reqSent" .= toJSON sen
, "reqClosed" .= toJSON closed
]
]
toJSON (InvReqRejected str rece) =
object [ "invReqRejected" .=
object [ "reqStart" .= toJSON str
, "reqReceived" .= rece
]
]
toJSON (InvReqException exception) =
object [ "invReqException" .= toJSON exception]
instance FromJSON InvReqDataFlowLog where
parseJSON (Object o)
| HMS.member "invReqAccepted" o = do
invReqAccO <- o .: "invReqAccepted"
str <- invReqAccO .: "reqStart"
rece <- invReqAccO .: "reqReceived"
sen <- invReqAccO .: "reqSent"
closed <- invReqAccO .: "reqClosed"
return $ InvReqAccepted str rece sen closed
| HMS.member "invReqRejected" o = do
invReqRecO <- o .: "invReqRejected"
str <- invReqRecO .: "reqStart"
rece <- invReqRecO .: "reqReceived"
return $ InvReqRejected str rece
| HMS.member "invReqException" o =
InvReqException <$> (o .: "invReqException")
| otherwise = fail "Incorrect JSON encoding for InvReqDataFlowLog"
parseJSON invalid = typeMismatch "InvReqDataFlowLog" invalid
$(deriveJSON defaultOptions ''MemPoolModifyReason)
$(deriveJSON defaultOptions ''JLBlock)
$(deriveJSON defaultOptions ''JLEvent)
$(deriveJSON defaultOptions ''JLTimedEvent)
$(deriveJSON defaultOptions ''JLTxS)
$(deriveJSON defaultOptions ''JLTxR)
$(deriveJSON defaultOptions ''JLMemPool)
-- | Get 'SlotId' from 'JLSlotId'.
fromJLSlotId :: MonadError Text m => SlotCount -> JLSlotId -> m SlotId
fromJLSlotId epochSlots (ep, sl) =
SlotId (EpochIndex ep) <$> mkLocalSlotIndex epochSlots sl
-- | Get 'SlotId' from 'JLSlotId'.
fromJLSlotIdUnsafe :: SlotCount -> JLSlotId -> SlotId
fromJLSlotIdUnsafe epochSlots x = case fromJLSlotId epochSlots x of
Right y -> y
Left _ -> error "illegal slot id"
-- | Append event into log by given 'FilePath'.
appendJL :: (MonadIO m) => FilePath -> JLEvent -> m ()
appendJL path ev = liftIO $ do
time <- realTime -- TODO: Do we want to mock time in logs?
LBS.appendFile path . encode $ JLTimedEvent (fromIntegral time) ev
jsonLogConfigFromHandle :: MonadIO m => Handle -> m JsonLogConfig
jsonLogConfigFromHandle h = do
v <- newMVar h
return $ JsonLogConfig v (\_ -> return True)
class HasJsonLogConfig ctx where
jsonLogConfig :: Lens' ctx JsonLogConfig
jsonLogDefault
:: (ToJSON a, MonadReader ctx m, HasJsonLogConfig ctx, MonadCatch m,
MonadIO m, WithLogger m)
=> a -> m ()
jsonLogDefault x = do
jlc <- view jsonLogConfig
JL.jsonLogDefault jlc x
|
input-output-hk/pos-haskell-prototype
|
core/src/Pos/Core/JsonLog/LogEvents.hs
|
mit
| 7,366 | 0 | 12 | 2,067 | 1,495 | 817 | 678 | 218 | 2 |
{-# LANGUAGE FlexibleContexts #-}
module IRC.Commands where
import Control.Applicative
import Control.Monad
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.ByteString (ByteString)
import IRC.Types
import IRC.Raw.Monad
import Data.String
import qualified IRC.Raw as Raw
commandMatchServerHost :: Cmd -> ([Text] -> Maybe a) -> Raw.Message -> Maybe (Host, a)
commandMatchServerHost cmd params_fn = cnd
where cnd (Raw.Message
_
(Just (Raw.ServerName (Raw.Host host)))
cmd_input
(Raw.Params params))
| case (cmd, cmd_input) of
(N x, Raw.CmdNumber y) -> x == y
(S x, Raw.Command y) -> x == y
(_ , _ ) -> False
, Just rest <- params_fn (map (\(Raw.Param x) -> T.decodeUtf8 x) params)
= Just (T.decodeUtf8 host
,rest)
cnd _ = Nothing
commandMatchUser :: Cmd -> ([Text] -> Maybe a) -> Raw.Message -> Maybe (User , a)
commandMatchUser cmd params_fn = cnd
where cnd (Raw.Message
_
(Just (Raw.Prefix (Raw.Nick nick) (Just (Raw.User ident)) (Just hostp)))
cmd_input
(Raw.Params params))
| case (cmd, cmd_input) of
(N x, Raw.CmdNumber y) -> x == y
(S x, Raw.Command y) -> x == y
(_ , _ ) -> False
, let host = case hostp of
Raw.ValidHost (Raw.Host h) -> h
Raw.InvalidHost h -> h
, Just rest <- params_fn (map (\(Raw.Param x) -> T.decodeUtf8 x) params)
= Just (User (Nick (T.decodeUtf8 nick)) (T.decodeUtf8 ident) (T.decodeUtf8 host)
,rest)
cnd _ = Nothing
commandMatchRaw :: (Cmd -> [Text] -> Maybe a) -> Raw.Message -> Maybe a
commandMatchRaw params_fn = cnd
where cnd (Raw.Message
_
_
cmd_input
(Raw.Params params))
| let cmd = case cmd_input of
Raw.CmdNumber y -> N y
Raw.Command y -> S y
= params_fn cmd (map (\(Raw.Param x) -> T.decodeUtf8 x) params)
join_pattern :: Raw.Message -> Maybe (User , (Channel, [Text]))
join_pattern = commandMatchUser (S "JOIN") f
where f (channel:metadata) = Just (Channel channel, metadata)
f _ = Nothing
pattern JOIN user channel meta <- (join_pattern -> Just (user,(channel,meta)))
part_pattern :: Raw.Message -> Maybe (User , (Channel, Maybe Text))
part_pattern = commandMatchUser (S "PART") f
where f [channel] = Just (Channel channel, Nothing)
f [channel,x] = Just (Channel channel, Just x )
f _ = Nothing
pattern PART user channel msg <- (part_pattern -> Just (user,(channel,msg)))
quit_pattern :: Raw.Message -> Maybe (User, (Channel, Maybe Text))
quit_pattern = commandMatchUser (S "QUIT") f
where f [channel] = Just (Channel channel, Nothing)
f [channel,x] = Just (Channel channel, Just x )
f _ = Nothing
pattern QUIT user channel msg <- (quit_pattern -> Just (user,(channel,msg)))
kick_pattern :: Raw.Message -> Maybe (User, (Channel, Nick, Maybe Text))
kick_pattern = commandMatchUser (S "QUIT") f
where f [ch,kicked] = Just (Channel ch, Nick kicked, Nothing )
f [ch,kicked,partmsg] = Just (Channel ch, Nick kicked, Just partmsg )
f _ = Nothing
pattern KICK user channel kicked msg <- (kick_pattern -> Just (user,(channel,kicked,msg)))
channelmsg_pattern :: Raw.Message -> Maybe (User, (Channel, Message))
channelmsg_pattern = commandMatchUser (S "PRIVMSG") f
where f [channel,msg] | Just ('#', _) <- T.uncons channel
= Just (Channel channel , Message msg)
f _ = Nothing
pattern CHMSG user channel msg <- (channelmsg_pattern -> Just (user,(channel,msg)))
privmsg_pattern :: Raw.Message -> Maybe (User, (Target,Message))
privmsg_pattern= commandMatchUser (S "PRIVMSG") f
where f [channel,msg] = Just (Target channel , Message msg)
f _ = Nothing
pattern PRIVMSG user target msg <- (privmsg_pattern -> Just (user, (target,msg)))
nick_pattern :: Raw.Message -> Maybe (User, Nick)
nick_pattern= commandMatchUser (S "PRIVMSG") f
where f [new_nick] = Just (Nick new_nick)
f _ = Nothing
pattern NICK user nick <- (nick_pattern -> Just (user, nick))
account_pattern :: Raw.Message -> Maybe (User, Maybe Account)
account_pattern = commandMatchUser (S "ACCOUNT") f
where f ["*"] = Just Nothing
f [acc] = Just (Just (Account acc))
f _ = Nothing
pattern ACCOUNT user acc <- (account_pattern -> Just (user, acc))
raw_pattern :: Raw.Message -> Maybe (Cmd, [Text])
raw_pattern = commandMatchRaw f
where f cmd params = Just (cmd, params)
pattern RAW cmd pms <- (raw_pattern -> Just (cmd,pms))
encode :: [Text] -> Raw.Params
encode ts = Raw.Params (map (Raw.Param . T.encodeUtf8) ts)
cmd :: MonadIRC m => Text -> [Text] -> m ()
cmd cmd params = Raw.irc_send (command cmd params)
command :: Text -> [Text] -> Raw.Message
command cmd params = Raw.Message Nothing Nothing (Raw.Command (T.encodeUtf8 cmd)) (encode params)
msg :: (IRCCommand target, MonadIRC m) => target -> Message -> m ()
msg = privmsg
class IRCCommand target where
privmsg :: MonadIRC m => target -> Message -> m ()
notice :: MonadIRC m => target -> Message -> m ()
instance IRCCommand User where
privmsg (User (Nick target) _ _) (Message msg) = cmd "PRIVMSG" [target, msg]
notice (User (Nick target) _ _) (Message msg) = cmd "NOTICE" [target, msg]
instance IRCCommand Channel where
privmsg (Channel ch) (Message msg) = cmd "PRIVMSG" [ch, msg]
notice (Channel ch) (Message msg) = cmd "NOTICE" [ch, msg]
instance IRCCommand Nick where
privmsg (Nick n) (Message msg) = cmd "PRIVMSG" [n,msg]
notice (Nick n) (Message msg) = cmd "NOTICE" [n,msg]
instance IRCCommand Target where
privmsg (Target t) (Message msg) = cmd "PRIVMSG" [t,msg]
notice (Target t) (Message msg) = cmd "NOTICE" [t,msg]
instance IsString Message where
fromString = Message . fromString
|
EXio4/netza
|
src/IRC/Commands.hs
|
mit
| 6,729 | 0 | 19 | 2,177 | 2,542 | 1,320 | 1,222 | -1 | -1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Mealstrom.FSMApi
Description : API for FSMs
Copyright : (c) Max Amanshauser, 2016
License : MIT
Maintainer : [email protected]
This is the interface through which you primarily interact with a FSM
from the rest of your program.
-}
module Mealstrom.FSMApi where
import Control.Concurrent
import Control.Exception
import Control.Monad (void)
import qualified Data.Text as Text
import System.IO
import System.Timeout
import Mealstrom.FSM
import Mealstrom.FSMEngine
import Mealstrom.FSMStore
import Mealstrom.FSMTable
import Mealstrom.WALStore
data FSMHandle st wal k s e a where
FSMHandle :: (Eq s, Eq e, Eq a, FSMStore st k s e a, WALStore wal k, FSMKey k) => {
fsmStore :: st, -- ^ Which backend to use for storing FSMs.
walStore :: wal, -- ^ Which backend to use for the WAL.
fsmTable :: FSMTable s e a, -- ^ A table of transitions and effects.
-- This is not in a typeclass, because you may want to use MVars or similar in effects.
-- See the tests for examples.
effTimeout :: Int, -- ^ How much time to allow for Actions until they are considered failed.
retryCount :: Int -- ^ How often to automatically retry actions.
} -> FSMHandle st wal k s e a
get :: forall st wal k s e a . FSMStore st k s e a => FSMHandle st wal k s e a -> k -> IO(Maybe s)
get FSMHandle{..} k = fsmRead fsmStore k (Proxy :: Proxy k s e a)
-- |Idempotent because of usage of caller-generated keys.
post :: forall st wal k s e a . FSMStore st k s e a =>
FSMHandle st wal k s e a ->
k ->
s -> IO Bool
post FSMHandle{..} k s0 =
fsmCreate fsmStore (mkInstance k s0 [] :: Instance k s e a) >>= \case
Nothing -> return True
Just s -> hPutStrLn stderr s >> return False
-- |Concurrent updates will be serialised by Postgres.
-- Returns True when the state transition has been successfully computed
-- and actions have been scheduled, now or at any time in the past.
-- Returns False on failure.
patch :: forall st wal k s e a . (FSMStore st k s e a, MealyInstance k s e a, FSMKey k) => FSMHandle st wal k s e a -> k -> [Msg e] -> IO Bool
patch h@FSMHandle{..} k es = do
openTxn walStore k
status <- handle (\(e::SomeException) -> hPutStrLn stderr (show e) >> return MealyError)
(fsmUpdate fsmStore k ((patchPhase1 fsmTable es) :: MachineTransformer s e a))
if status /= MealyError
then recover h k >> return True
else return False
-- |Recovering is the process of asynchronously applying Actions. It is performed
-- immediately after the synchronous part of an update and, on failure, retried until it
-- succeeds or the retry limit is hit.
recover :: forall st wal k s e a . (FSMStore st k s e a, MealyInstance k s e a, FSMKey k) => FSMHandle st wal k s e a -> k -> IO ()
recover h@FSMHandle{..} k
| retryCount == 0 = hPutStrLn stderr $ "Alarma! Recovery retries for " ++ Text.unpack (toText k) ++ " exhausted. Giving up!"
| otherwise =
void $ forkFinally (timeout (effTimeout*10^6) (fsmUpdate fsmStore k (patchPhase2 fsmTable :: MachineTransformer s e a))) -- (patchPhase2 fsmTable))
(\case Left exn -> do -- the damn thing crashed, print log and try again
hPutStrLn stderr $ "Exception occurred while trying to recover " ++ Text.unpack (toText k)
hPrint stderr exn
recover h{retryCount = retryCount - 1} k
Right Nothing -> do -- We hit the timeout. Try again until we hit the retry limit.
hPutStrLn stderr $ "Timeout while trying to recover " ++ Text.unpack (toText k)
recover h{retryCount = retryCount - 1} k
Right (Just Done) -> closeTxn walStore k -- All good.
Right (Just Pending) -> -- Some actions did not complete successfully.
recover h{retryCount = retryCount - 1} k)
-- |During certain long-lasting failures, like network outage, the retry limit of Actions will be exhausted.
-- You should regularly, e.g. ever 10 minutes, call this function to clean up those hard cases.
recoverAll :: forall st wal k s e a . (MealyInstance k s e a) => FSMHandle st wal k s e a -> IO ()
recoverAll h@FSMHandle{..} = do
wals <- walScan walStore effTimeout
mapM_ (recover h . walId) wals
-- |A helper that is sometimes useful
upsert :: forall st wal k s e a . MealyInstance k s e a => FSMStore st k s e a =>
FSMHandle st wal k s e a -> k -> s -> [Msg e] -> IO ()
upsert h k s es = do
ms <- get h k
maybe (post h k s >> void (patch h k es))
(\_s -> void $ patch h k es)
ms
|
linearray/mealstrom
|
src/Mealstrom/FSMApi.hs
|
mit
| 5,551 | 0 | 17 | 1,888 | 1,307 | 687 | 620 | -1 | -1 |
{--------------------------------------------------------------------------------
Copyright (c) Daan Leijen 2003
wxWindows License.
Demonstrates:
- many different kind of controls
- message logging.
--------------------------------------------------------------------------------}
{-# LANGUAGE FlexibleContexts #-}
module Main where
import Graphics.UI.WX
import Graphics.UI.WXCore
main :: IO ()
main
= start gui
gui :: IO ()
gui
= do -- main gui elements: frame, panel, text control, and the notebook
f <- frame [text := "Controls"]
p <- panel f []
nb <- notebook p []
textlog <- textCtrl p [enabled := False, wrap := WrapNone]
-- use text control as logger
textCtrlMakeLogActiveTarget textlog
logMessage "logging enabled"
-- set f [on closing :~ \prev -> do logSetActiveTarget oldlog; logDelete log; prev]
-- button page
p1 <- panel nb []
ok <- button p1 [text := "Ok", on command := logMessage "ok button pressed"]
quit <- button p1 [text := "Quit", on command := close f]
-- radio box page
p2 <- panel nb []
let rlabels = ["first", "second", "third"]
r1 <- radioBox p2 Vertical rlabels [text := "radio box", on select ::= logSelect]
r2 <- radioBox p2 Horizontal rlabels [tooltip := "radio group two", on select ::= logSelect]
rb1 <- button p2 [text := "disable", on command ::= onEnable r1]
-- choice
p3 <- panel nb []
let clabels = ["mies","noot","aap"]
c1 <- choice p3 [tooltip := "unsorted choices", on select ::= logSelect, sorted := False, items := clabels]
c2 <- choice p3 [tooltip := "sorted choices", on select ::= logSelect, sorted := True, items := clabels]
cb1 <- button p3 [text := "disable", on command ::= onEnable c1]
-- list box page
p4 <- panel nb []
sl1 <- singleListBox p4
[items := clabels
,tooltip := "unsorted single-selection listbox"
,on select ::= logSelect]
sl2 <- singleListBox p4
[items := clabels
,tooltip := "sorted listbox"
,on select ::= logSelect, sorted := True]
sc1 <- checkBox p4 [text := "enable the listbox", checked := True, on command := set sl1 [enabled :~ not]]
-- slider/gauge page
p5 <- panel nb []
s <- hslider p5 True {- show labels -} 1 100 [selection := 50]
g <- hgauge p5 100 [selection := 50]
set s [on command := do{ i <- get s selection; set g [selection := i]} ]
-- specify layout
set f [layout :=
container p $
column 0
[ tabs nb
[tab "buttons" $
container p1 $ margin 10 $ floatCentre $ row 5 [widget ok, widget quit]
,tab "radio box" $
container p2 $ margin 10 $ column 5 [ hstretch $ widget rb1
, row 0 [floatLeft $ widget r1
,floatRight $ widget r2]]
,tab "choice" $
container p3 $ margin 10 $ column 5 [ hstretch $ widget cb1
, row 0 [floatLeft $ widget c1
,floatRight $ row 5 [label "sorted: ", widget c2]]]
,tab "listbox" $
container p4 $ margin 10 $ column 5 [ hstretch $ dynamic $ widget sc1
, floatLeft $
row 0 [widget sl1, widget sl2]]
,tab "slider" $
container p5 $ margin 10 $ column 5 [ hfill $ widget s
, hfill $ widget g
, glue
]
]
, hfill $ minsize (sz 20 80) $ widget textlog
]
, clientSize := sz 400 300 ]
return ()
where
-- logSelect :: (Selection w, Items w String) => w -> IO ()
logSelect w
= do i <- get w selection
s <- get w (item i)
logMessage ("selected index: " ++ show i ++ ": " ++ s)
onEnable w b
= do set w [enabled :~ not]
enable <- get w enabled
set b [text := (if enable then "disable" else "enable")]
kindof :: WxObject a -> String -> IO ()
kindof obj className
= do classInfo_ <- classInfoFindClass className
if (objectIsNull classInfo_)
then logMessage ("kindof " ++ className ++ ": no such class")
else if (objectIsNull obj)
then logMessage ("kindof " ++ className ++ ": null object")
else do haskind <- objectIsKindOf obj classInfo_
logMessage ("kindof " ++ className ++ ": " ++ show haskind)
|
jacekszymanski/wxHaskell
|
samples/wx/Controls.hs
|
lgpl-2.1
| 5,198 | 0 | 22 | 2,169 | 1,407 | 685 | 722 | 85 | 3 |
{-# LANGUAGE CPP, DeriveDataTypeable #-}
{-# OPTIONS -Wall #-}
-- | An 'Annotation' that describes the dependency of the nodes
-- and labels certain group of Manifest nodes
-- that can safely be accessed simultaneously
module Language.Paraiso.Annotation.Dependency (
Direct(..),
Calc(..),
Indirect(..),
KernelWriteGroup(..),
OMWriteGroup(..)
) where
import Data.Dynamic
import qualified Data.Graph.Inductive as FGL
import qualified Data.Set as Set
import Language.Paraiso.Prelude
-- | The list of Manifest or Existing nodes that this node directly depends on.
-- Y directly depends on X if you need to read X in subroutine you calculate Y
newtype Direct
= Direct [FGL.Node]
deriving (Eq, Show, Typeable)
-- | The list of Manifest or Existing nodes that this node indirectly depends on.
-- Y indirectly depends on X if you need to calculate X before you calculace Y
newtype Indirect
= Indirect [FGL.Node]
deriving (Eq, Show, Typeable)
-- | The list of All nodes that this node directly depends on.
-- Y directly depends on X if you need to calculate X in subroutine you calculate Y
newtype Calc
= Calc (Set.Set FGL.Node)
deriving (Eq, Show, Typeable)
-- | Write grouping, continuously numbered from [0 ..] .
-- The numbering starts from 0 for each kerenel in a Orthotope Machine.
data KernelWriteGroup
= KernelWriteGroup {getKernelGroupID :: Int}
deriving (Eq, Show, Typeable)
-- | Write grouping, continuously numbered from [0 ..] .
-- The numbering is unique in one Orthotope Machine.
data OMWriteGroup
= OMWriteGroup {getOMGroupID :: Int}
deriving (Eq, Show, Typeable)
|
nushio3/Paraiso
|
Language/Paraiso/Annotation/Dependency.hs
|
bsd-3-clause
| 1,646 | 0 | 8 | 319 | 237 | 151 | 86 | 27 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
#ifdef USE_REFLEX_OPTIMIZER
{-# OPTIONS_GHC -fplugin=Reflex.Optimizer #-}
#endif
-- |
-- Module:
-- Reflex.Adjustable.Class
-- Description:
-- A class for actions that can be "adjusted" over time based on some 'Event'
-- such that, when observed after the firing of any such 'Event', the result
-- is as though the action was originally run with the 'Event's value.
module Reflex.Adjustable.Class
(
-- * The Adjustable typeclass
Adjustable(..)
, sequenceDMapWithAdjust
, sequenceDMapWithAdjustWithMove
, mapMapWithAdjustWithMove
-- * Deprecated aliases
, MonadAdjust
) where
import Control.Monad.Identity
import Control.Monad.Reader
import Data.Dependent.Map (DMap)
import Data.GADT.Compare (GCompare(..))
import qualified Data.Dependent.Map as DMap
import Data.Functor.Constant
import Data.Functor.Misc
import Data.IntMap.Strict (IntMap)
import qualified Data.IntMap.Strict as IntMap
import Data.Map (Map)
import Reflex.Class
import Data.Patch.DMapWithMove
-- | A 'Monad' that supports adjustment over time. After an action has been
-- run, if the given events fire, it will adjust itself so that its net effect
-- is as though it had originally been run with the new value. Note that there
-- is some issue here with persistent side-effects: obviously, IO (and some
-- other side-effects) cannot be undone, so it is up to the instance implementer
-- to determine what the best meaning for this class is in such cases.
class (Reflex t, Monad m) => Adjustable t m | m -> t where
runWithReplace
:: m a
-> Event t (m b)
-> m (a, Event t b)
traverseIntMapWithKeyWithAdjust
:: (IntMap.Key -> v -> m v')
-> IntMap v
-> Event t (PatchIntMap v)
-> m (IntMap v', Event t (PatchIntMap v'))
traverseDMapWithKeyWithAdjust
:: GCompare k
=> (forall a. k a -> v a -> m (v' a))
-> DMap k v
-> Event t (PatchDMap k v)
-> m (DMap k v', Event t (PatchDMap k v'))
{-# INLINABLE traverseDMapWithKeyWithAdjust #-}
traverseDMapWithKeyWithAdjust f dm0 dm' = fmap (fmap (fmap fromPatchWithMove)) $
traverseDMapWithKeyWithAdjustWithMove f dm0 $ fmap toPatchWithMove dm'
where
toPatchWithMove (PatchDMap m) = PatchDMapWithMove $ DMap.map toNodeInfoWithMove m
toNodeInfoWithMove = \case
ComposeMaybe (Just v) -> NodeInfo (From_Insert v) $ ComposeMaybe Nothing
ComposeMaybe Nothing -> NodeInfo From_Delete $ ComposeMaybe Nothing
fromPatchWithMove (PatchDMapWithMove m) = PatchDMap $ DMap.map fromNodeInfoWithMove m
fromNodeInfoWithMove (NodeInfo from _) = ComposeMaybe $ case from of
From_Insert v -> Just v
From_Delete -> Nothing
From_Move _ -> error "traverseDMapWithKeyWithAdjust: implementation of traverseDMapWithKeyWithAdjustWithMove inserted spurious move"
traverseDMapWithKeyWithAdjustWithMove
:: GCompare k
=> (forall a. k a -> v a -> m (v' a))
-> DMap k v
-> Event t (PatchDMapWithMove k v)
-> m (DMap k v', Event t (PatchDMapWithMove k v'))
instance Adjustable t m => Adjustable t (ReaderT r m) where
runWithReplace a0 a' = do
r <- ask
lift $ runWithReplace (runReaderT a0 r) $ fmap (`runReaderT` r) a'
traverseIntMapWithKeyWithAdjust f dm0 dm' = do
r <- ask
lift $ traverseIntMapWithKeyWithAdjust (\k v -> runReaderT (f k v) r) dm0 dm'
traverseDMapWithKeyWithAdjust f dm0 dm' = do
r <- ask
lift $ traverseDMapWithKeyWithAdjust (\k v -> runReaderT (f k v) r) dm0 dm'
traverseDMapWithKeyWithAdjustWithMove f dm0 dm' = do
r <- ask
lift $ traverseDMapWithKeyWithAdjustWithMove (\k v -> runReaderT (f k v) r) dm0 dm'
-- | Traverse a 'DMap' of 'Adjustable' actions, running each of them. The provided 'Event' of patches
-- to the 'DMap' can add, remove, or update values.
sequenceDMapWithAdjust
:: (GCompare k, Adjustable t m)
=> DMap k m
-> Event t (PatchDMap k m)
-> m (DMap k Identity, Event t (PatchDMap k Identity))
sequenceDMapWithAdjust = traverseDMapWithKeyWithAdjust $ \_ -> fmap Identity
-- | Traverses a 'DMap' of 'Adjustable' actions, running each of them. The provided 'Event' of patches
-- to the 'DMap' can add, remove, update, move, or swap values.
sequenceDMapWithAdjustWithMove
:: (GCompare k, Adjustable t m)
=> DMap k m
-> Event t (PatchDMapWithMove k m)
-> m (DMap k Identity, Event t (PatchDMapWithMove k Identity))
sequenceDMapWithAdjustWithMove = traverseDMapWithKeyWithAdjustWithMove $ \_ -> fmap Identity
-- | Traverses a 'Map', running the provided 'Adjustable' action. The provided 'Event' of patches to the 'Map'
-- can add, remove, update, move, or swap values.
mapMapWithAdjustWithMove
:: forall t m k v v'. (Adjustable t m, Ord k)
=> (k -> v -> m v')
-> Map k v
-> Event t (PatchMapWithMove k v)
-> m (Map k v', Event t (PatchMapWithMove k v'))
mapMapWithAdjustWithMove f m0 m' = do
(out0 :: DMap (Const2 k v) (Constant v'), out') <- traverseDMapWithKeyWithAdjustWithMove (\(Const2 k) (Identity v) -> Constant <$> f k v) (mapToDMap m0) (const2PatchDMapWithMoveWith Identity <$> m')
return (dmapToMapWith (\(Constant v') -> v') out0, patchDMapWithMoveToPatchMapWithMoveWith (\(Constant v') -> v') <$> out')
--------------------------------------------------------------------------------
-- Deprecated functions
--------------------------------------------------------------------------------
{-# DEPRECATED MonadAdjust "Use Adjustable instead" #-}
-- | Synonym for 'Adjustable'
type MonadAdjust = Adjustable
|
reflex-frp/reflex
|
src/Reflex/Adjustable/Class.hs
|
bsd-3-clause
| 5,753 | 0 | 15 | 1,053 | 1,409 | 735 | 674 | -1 | -1 |
-- | Intel specific ELF additions
-- .
-- ZCA tables
-- ==========
-- .
-- ICC (Intel C Compiler) can generate an optimization report with the
-- opt-report-* family of flags. To make the correspondance between the
-- information in the generated file and the produced binary, a debug section
-- called ".debug_opt_report is added to the latter.
--
-- In [1], an old version (1.1) of the structure of this section is described
-- in the context of Cilk. In this version, the name of the section was
-- ".itt_notify_tab". Tables found in this section are called ZCA tables.
--
-- As of ICC 16.0.0, the basic header of the structure is the same and the
-- version is now 2.1. The other fields, however, are different: different
-- sizes, order, etc. As we don't have a specification, their meaning has been
-- inferred by observation and may be subject to errors. Please report them to
-- us!
--
-- [1] https://www.cilkplus.org/sites/default/files/open_specifications/LowOverheadAnnotations.pdf
module Haskus.Format.Elf.Intel
( ZCATable (..)
, ZCATableHeader (..)
, ZCATableEntry (..)
, getZCATable
, getZCATableHeader
, getZCATableEntry
, getZCAStringTable
)
where
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Haskus.Format.Text as Text
import Haskus.Format.Text (Text, getTextUtf8)
import Haskus.Format.Binary.Buffer
import Haskus.Format.Binary.Get
import Haskus.Format.Binary.Word
import Haskus.Utils.Flow (when, forM)
-- | ZCA table
data ZCATable = ZCATable
{ zcaHeader :: ZCATableHeader
, zcaEntries :: [ZCATableEntry]
}
deriving (Show)
-- | ZCA table header
data ZCATableHeader = ZCATableHeader
{ zcaVersionMajor :: Word8 -- ^ Major version number
, zcaVersionMinor :: Word8 -- ^ Minor version number
, zcaEntryOffset :: Word16 -- ^ Offset of the entry table
, zcaEntryCount :: Word32 -- ^ Count of entries that follow
, zcaStringsOffset :: Word32 -- ^ Offset in bytes to strings table
, zcaStringsSize :: Word32 -- ^ Size of string table (bytes)
, zcaExprsOffset :: Word32 -- ^ Offset in bytes to expression table
, zcaExprsSize :: Word32 -- ^ Size of expression table (bytes)
, zcaStuff1 :: Word32
}
deriving (Show)
-- | ZCA table name
zcaMagic :: Text
zcaMagic = Text.pack ".itt_notify_tab"
-- | Getter for a ZcA table header
getZCATableHeader :: Get ZCATableHeader
getZCATableHeader = do
magic <- getTextUtf8 (fromIntegral (Text.length zcaMagic))
when (magic /= zcaMagic) $
error "Not a ZCA table (invalid magic number)"
-- skip magic NUL terminal byte
skip 1
versionmaj <- getWord8
versionmin <- getWord8
when ((versionmaj,versionmin) /= (2,1)) $
error "Unsupported ZCA version"
-- read table header
ZCATableHeader versionmaj versionmin
<$> getWord16le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
-- | ZCA table entry
data ZCATableEntry = ZCATableEntry
{ zcaIP :: Word64 -- ^ Instruction pointer on entry
, zcaNameIndex :: Word32 -- ^ Offset in bytes into strings table
, zcaName :: Text -- ^ Entry string
, zcaValueIndex :: Word32 -- ^ Offset in bytes into expression table
, zcaValue :: Buffer -- ^ Values
}
deriving (Show)
-- | Getter for a table entry
getZCATableEntry :: Map Int Text -> Get ZCATableEntry
getZCATableEntry strs = do
off <- getWord64le
nidx <- getWord32le
eoff <- getWord32le
let name = strs Map.! fromIntegral nidx
return (ZCATableEntry off nidx name eoff emptyBuffer)
-- | Getter for table entries
getZCATableEntries :: ZCATableHeader -> Buffer -> [ZCATableEntry]
getZCATableEntries hdr bs = es
where
-- extract raw table
raw = bufferTake (fromIntegral $ zcaEntryCount hdr * 16)
$ bufferDrop (fromIntegral $ zcaEntryOffset hdr) bs
-- get strings
strs = getZCAStringTable hdr bs
-- decode entries
getEntries = forM [1..zcaEntryCount hdr] (const $ getZCATableEntry strs)
es = runGetOrFail getEntries raw
-- | Get string table
getZCAStringTable :: ZCATableHeader -> Buffer -> Map Int Text
getZCAStringTable hdr bs = Map.fromList (offs `zip` strs)
where
-- extract raw table
raw = bufferTake (fromIntegral $ zcaStringsSize hdr)
$ bufferDrop (fromIntegral $ zcaStringsOffset hdr) bs
-- decode strings
strs = fmap Text.bufferDecodeUtf8 . bufferSplitOn 0 . bufferInit $ raw
-- add offsets
offs = scanl (+) 0 $ fmap (\s -> Text.length s + 1) strs
-- | Get values
getZCAValues :: ZCATableHeader -> Buffer -> [ZCATableEntry] -> [ZCATableEntry]
getZCAValues hdr bs es = values
where
-- raw table of values
raw = bufferTake (fromIntegral $ zcaExprsSize hdr)
$ bufferDrop (fromIntegral $ zcaExprsOffset hdr) bs
-- offsets
offs = fmap (fromIntegral . zcaValueIndex) es
-- sizes
szs = fmap (uncurry (-)) (offs' `zip` offs)
where offs' = tail offs ++ [fromIntegral $ zcaExprsSize hdr]
-- values: we drop the first byte of the value (value size)
update e sz = e { zcaValue = bufferDrop 1
$ bufferTake sz
$ bufferDrop off raw }
where off = fromIntegral $ zcaValueIndex e
values = fmap (uncurry update) (es `zip` szs)
-- | Get table
getZCATable :: Buffer -> ZCATable
getZCATable bs = zca
where
-- ZCA table header
hdr = runGetOrFail getZCATableHeader bs
-- ZCA table entries
es = getZCATableEntries hdr bs
-- table
zca = ZCATable hdr (getZCAValues hdr bs es)
|
hsyl20/ViperVM
|
haskus-system/src/lib/Haskus/Format/Elf/Intel.hs
|
bsd-3-clause
| 5,837 | 0 | 14 | 1,504 | 1,107 | 625 | 482 | 95 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Constants used throughout the project.
module Stack.Constants
(builtConfigFileFromDir
,builtFileFromDir
,configuredFileFromDir
,defaultShakeThreads
,distDirFromDir
,distRelativeDir
,haskellModuleExts
,imageStagingDir
,projectDockerSandboxDir
,rawGithubUrl
,stackDotYaml
,stackRootEnvVar
,userDocsDir
,configCacheFile
,configCabalMod
,buildCacheFile
,testSuccessFile
,testBuiltFile
,benchBuiltFile
,stackProgName
,wiredInPackages
,cabalPackageName
,implicitGlobalDir
,hpcRelativeDir
,hpcDirFromDir
,dotHpc
,objectInterfaceDir
,templatesDir)
where
import Control.Monad.Catch (MonadThrow)
import Control.Monad.Reader
import Data.HashSet (HashSet)
import qualified Data.HashSet as HashSet
import Data.Text (Text)
import qualified Data.Text as T
import Path as FL
import Prelude
import Stack.Types.Config
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
-- | Extensions for anything that can be a Haskell module.
haskellModuleExts :: [Text]
haskellModuleExts = haskellFileExts ++ haskellPreprocessorExts
-- | Extensions used for Haskell modules. Excludes preprocessor ones.
haskellFileExts :: [Text]
haskellFileExts = ["hs", "hsc", "lhs"]
-- | Extensions for modules that are preprocessed by common preprocessors.
haskellPreprocessorExts :: [Text]
haskellPreprocessorExts = ["gc", "chs", "hsc", "x", "y", "ly", "cpphs"]
-- | The filename used for completed build indicators.
builtFileFromDir :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs File)
builtFileFromDir fp = do
dist <- distDirFromDir fp
return (dist </> $(mkRelFile "stack.gen"))
-- | The filename used for completed configure indicators.
configuredFileFromDir :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs File)
configuredFileFromDir fp = do
dist <- distDirFromDir fp
return (dist </> $(mkRelFile "setup-config"))
-- | The filename used for completed build indicators.
builtConfigFileFromDir :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs File)
builtConfigFileFromDir fp =
liftM (fp </>) builtConfigRelativeFile
-- | Relative location of completed build indicators.
builtConfigRelativeFile :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> m (Path Rel File)
builtConfigRelativeFile = do
dist <- distRelativeDir
return (dist </> $(mkRelFile "stack.config"))
-- | Default shake thread count for parallel builds.
defaultShakeThreads :: Int
defaultShakeThreads = 4
-- -- | Hoogle database file.
-- hoogleDatabaseFile :: Path Abs Dir -> Path Abs File
-- hoogleDatabaseFile docLoc =
-- docLoc </>
-- $(mkRelFile "default.hoo")
-- -- | Extension for hoogle databases.
-- hoogleDbExtension :: String
-- hoogleDbExtension = "hoo"
-- -- | Extension of haddock files
-- haddockExtension :: String
-- haddockExtension = "haddock"
-- | User documentation directory.
userDocsDir :: Config -> Path Abs Dir
userDocsDir config = configStackRoot config </> $(mkRelDir "doc/")
-- | Output .o/.hi directory.
objectInterfaceDir :: BuildConfig -> Path Abs Dir
objectInterfaceDir bconfig = bcWorkDir bconfig </> $(mkRelDir "odir/")
-- | The filename used for dirtiness check of source files.
buildCacheFile :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
buildCacheFile dir = do
liftM
(</> $(mkRelFile "stack-build-cache"))
(distDirFromDir dir)
-- | The filename used to mark tests as having succeeded
testSuccessFile :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory
-> m (Path Abs File)
testSuccessFile dir =
liftM
(</> $(mkRelFile "stack-test-success"))
(distDirFromDir dir)
-- | The filename used to mark tests as having built
testBuiltFile :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory
-> m (Path Abs File)
testBuiltFile dir =
liftM
(</> $(mkRelFile "stack-test-built"))
(distDirFromDir dir)
-- | The filename used to mark benchmarks as having built
benchBuiltFile :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory
-> m (Path Abs File)
benchBuiltFile dir =
liftM
(</> $(mkRelFile "stack-bench-built"))
(distDirFromDir dir)
-- | The filename used for dirtiness check of config.
configCacheFile :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
configCacheFile dir = do
liftM
(</> $(mkRelFile "stack-config-cache"))
(distDirFromDir dir)
-- | The filename used for modification check of .cabal
configCabalMod :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
configCabalMod dir = do
liftM
(</> $(mkRelFile "stack-cabal-mod"))
(distDirFromDir dir)
-- | Directory for HPC work.
hpcDirFromDir
:: (MonadThrow m, MonadReader env m, HasPlatform env, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs Dir)
hpcDirFromDir fp =
liftM (fp </>) hpcRelativeDir
-- | Relative location of directory for HPC work.
hpcRelativeDir :: (MonadThrow m, MonadReader env m, HasPlatform env, HasEnvConfig env)
=> m (Path Rel Dir)
hpcRelativeDir =
liftM (</> $(mkRelDir "hpc")) distRelativeDir
-- | Package's build artifacts directory.
distDirFromDir :: (MonadThrow m, MonadReader env m, HasPlatform env, HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs Dir)
distDirFromDir fp =
liftM (fp </>) distRelativeDir
-- | Directory for project templates.
templatesDir :: Config -> Path Abs Dir
templatesDir config = configStackRoot config </> $(mkRelDir "templates")
-- | Relative location of build artifacts.
distRelativeDir :: (MonadThrow m, MonadReader env m, HasPlatform env, HasEnvConfig env)
=> m (Path Rel Dir)
distRelativeDir = do
cabalPkgVer <- asks (envConfigCabalVersion . getEnvConfig)
platform <- platformRelDir
cabal <-
parseRelDir $
packageIdentifierString
(PackageIdentifier cabalPackageName cabalPkgVer)
return $
workDirRel </>
$(mkRelDir "dist") </>
platform </>
cabal
-- | Get a URL for a raw file on Github
rawGithubUrl :: Text -- ^ user/org name
-> Text -- ^ repo name
-> Text -- ^ branch name
-> Text -- ^ filename
-> Text
rawGithubUrl org repo branch file = T.concat
[ "https://raw.githubusercontent.com/"
, org
, "/"
, repo
, "/"
, branch
, "/"
, file
]
-- -- | Hoogle database file.
-- hoogleDatabaseFile :: Path Abs Dir -> Path Abs File
-- hoogleDatabaseFile docLoc =
-- docLoc </>
-- $(mkRelFile "default.hoo")
-- -- | Extension for hoogle databases.
-- hoogleDbExtension :: String
-- hoogleDbExtension = "hoo"
-- -- | Extension of haddock files
-- haddockExtension :: String
-- haddockExtension = "haddock"
-- | Docker sandbox from project root.
projectDockerSandboxDir :: Path Abs Dir -> Path Abs Dir
projectDockerSandboxDir projectRoot = projectRoot </> workDirRel </> $(mkRelDir "docker/")
-- | Image staging dir from project root.
imageStagingDir :: Path Abs Dir -> Path Abs Dir
imageStagingDir p = p </> workDirRel </> $(mkRelDir "image/")
-- | Name of the 'stack' program.
stackProgName :: String
stackProgName = "stack"
-- | The filename used for the stack config file.
stackDotYaml :: Path Rel File
stackDotYaml = $(mkRelFile "stack.yaml")
-- | Environment variable used to override the '~/.stack' location.
stackRootEnvVar :: String
stackRootEnvVar = "STACK_ROOT"
-- See https://downloads.haskell.org/~ghc/7.10.1/docs/html/libraries/ghc/src/Module.html#integerPackageKey
wiredInPackages :: HashSet PackageName
wiredInPackages =
maybe (error "Parse error in wiredInPackages") HashSet.fromList mparsed
where
mparsed = sequence $ map parsePackageName
[ "ghc-prim"
, "integer-gmp"
, "integer-simple"
, "base"
, "rts"
, "template-haskell"
, "dph-seq"
, "dph-par"
, "ghc"
, "interactive"
]
-- | Just to avoid repetition and magic strings.
cabalPackageName :: PackageName
cabalPackageName =
$(mkPackageName "Cabal")
-- | Implicit global directory used when outside of a project.
implicitGlobalDir :: Path Abs Dir -- ^ Stack root.
-> Path Abs Dir
implicitGlobalDir p =
p </>
$(mkRelDir "global")
-- | Where .mix files go.
dotHpc :: Path Rel Dir
dotHpc = $(mkRelDir ".hpc")
|
duplode/stack
|
src/Stack/Constants.hs
|
bsd-3-clause
| 9,433 | 0 | 12 | 2,252 | 1,946 | 1,046 | 900 | 197 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeInType #-}
module T14253 where
import GHC.Exts
import Data.Kind
data TypeRep (a :: k) where
Con :: TypeRep (a :: k)
TrFun :: forall (r1 :: RuntimeRep) (r2 :: RuntimeRep)
(a :: TYPE r1) (b :: TYPE r2).
TypeRep a
-> TypeRep b
-> TypeRep (a -> b)
pattern Fun :: forall k (fun :: k). ()
=> forall (r1 :: RuntimeRep) (r2 :: RuntimeRep)
(arg :: TYPE r1) (res :: TYPE r2).
(k ~ Type, fun ~~ (arg -> res))
=> TypeRep arg
-> TypeRep res
-> TypeRep fun
pattern Fun arg res <- TrFun arg res
data Dynamic where
Dynamic :: forall a. TypeRep a -> a -> Dynamic
-- Adding this results in failure
{-# COMPLETE Con #-}
dynApply :: Dynamic -> Dynamic -> Maybe Dynamic
-- Changing TrFun to Fun also results in failure
dynApply (Dynamic (Fun ta tr) f) (Dynamic ta' x) = undefined
dynApply _ _ = Nothing
|
ezyang/ghc
|
testsuite/tests/pmcheck/complete_sigs/T14253.hs
|
bsd-3-clause
| 1,082 | 0 | 13 | 340 | 321 | 183 | 138 | -1 | -1 |
-- Copyright (c) 2000 Galois Connections, Inc.
-- All rights reserved. This software is distributed as
-- free software under the license in the file "LICENSE",
-- which is included in the distribution.
-- Modified to use stdout (for testing)
module Illumination
( Object
, Light (..)
, light, pointlight, spotlight
, render
) where
import Data.Array
import Data.Char(chr)
import Data.Maybe
import Geometry
import CSG
import Surface
import Misc
type Object = CSG (SurfaceFn Color Double)
data Cxt = Cxt {ambient::Color, lights::[Light], object::Object, depth::Int}
deriving Show
render :: (Matrix,Matrix) -> Color -> [Light] -> Object -> Int ->
Radian -> Int -> Int -> String -> IO ()
render (m,m') amb ls obj dep fov wid ht file
= do { debugging
; txt <- readFile "galois.sample"
; let vals = read txt
; let rt_vals = showBitmap' wid ht pixels
; if length vals /= length rt_vals
then print ("BAD LENGTH",length vals,length rt_vals)
else do {
; let cmp = sum(zipWith (\ a b -> abs (a - b) * abs (a - b)) vals rt_vals)
; print $ if cmp <= (length vals * 16) then ("GOOD MATCH") else ("BAD MATCH:" ++ show cmp)
}}
where
debugging = return ()
{-
do { putStrLn (show cxt)
; putStrLn (show (width, delta, aspect, left, top))
}
-}
obj' = transform (m',m) obj
ls' = [ transformLight m' l | l <- ls ]
pixelA = listArray ((1,1), (ht,wid))
[ illumination cxt (start,pixel i j)
| j <- take ht [0.5..]
, i <- take wid [0.5..] ]
antiA = pixelA //
[ (ix, superSample ix (pixelA ! ix))
| j <- [2 .. ht - 1], i <- [2 .. wid - 1]
, let ix = (j, i)
, contrast ix pixelA ]
pixels = [ [ illumination cxt (start,pixel i j) | i<- take wid [0.5..] ]
| j <- take ht [0.5..]
]
cxt = Cxt {ambient=amb, lights=ls', object=obj', depth=dep}
start = point 0 0 (-1)
width = 2 * tan (fov/2)
delta = width / fromIntegral wid
aspect = fromIntegral ht / fromIntegral wid
left = - width / 2
top = - left * aspect
pixel i j = vector (left + i*delta) (top - j*delta) 1
superSample (y, x) col = avg $ col:
[ illumination cxt (start, pixel (fromIntegral x - 0.5 + xd) (fromIntegral y - 0.5 + yd))
| (xd, yd) <- [(-0.333, 0.0), (0.333, 0.0), (0.0, -0.333), (0.0, 0.333)]
]
avg cs = divN (fromIntegral (length cs)) (uncolor (sumCC cs))
where divN n (r,g,b) = color (r / n) (g / n) (b / n)
contrast :: (Int, Int) -> Array (Int, Int) Color -> Bool
contrast (x, y) arr = any diffMax [ subCC cur (arr ! (x + xd, y + yd))
| xd <- [-1, 1], yd <- [-1, 1]
]
where cur = arr ! (x, y)
diffMax col = (abs r) > 0.25 || (abs g) > 0.2 || (abs b) > 0.4
where
(r,g,b) = uncolor col
illumination :: Cxt -> Ray -> Color
illumination cxt (r,v)
| depth cxt <= 0 = black
| otherwise = case castRay (r,v) (object cxt) of
Nothing -> black
Just info -> illum (cxt{depth=(depth cxt)-1}) info v
illum :: Cxt -> (Point,Vector,Properties Color Double) -> Vector -> Color
illum cxt (pos,normV,(col,kd,ks,n)) v
= ambTerm `addCC` difTerm `addCC` spcTerm `addCC` recTerm
where
visibleLights = unobscured pos (object cxt) (lights cxt) normV
d = depth cxt
amb = ambient cxt
newV = subVV v (multSV (2 * dot normV v) normV)
ambTerm = multSC kd (multCC amb col)
difTerm = multSC kd (sumCC [multSC (dot normV lj) (multCC intensity col)
|(loc,intensity) <- visibleLights,
let lj = normalize ({- pos `subVV` -} loc)])
-- ZZ might want to avoid the phong, when you can...
spcTerm = multSC ks (sumCC [multSC ((dot normV hj) ** n ) (multCC intensity col)
|(loc,intensity) <- visibleLights,
-- ZZ note this is specific to the light at infinity
let lj = {- pos `subVV` -} normalize loc,
let hj = normalize (lj `subVV` normalize v)])
recTerm = if recCoeff `nearC` black then black else multCC recCoeff recRay
recCoeff = multSC ks col
recRay = illumination cxt (pos,newV)
showBitmapA :: Int -> Int -> Array (Int, Int) Color -> String
showBitmapA wid ht arr
= header ++ concatMap scaleColor (elems arr)
where
scaleColor col = [scalePixel r, scalePixel g, scalePixel b]
where (r,g,b) = uncolor col
header = "P6\n#Galois\n" ++ show wid ++ " " ++ show ht ++ "\n255\n"
showBitmap :: Int -> Int ->[[Color]] -> String
showBitmap wid ht pss
-- type of assert | length pss == ht && all (\ ps -> length ps == wid) pss
= header ++ concat [[scalePixel r,scalePixel g,scalePixel b]
| ps <- pss, (r,g,b) <- map uncolor ps]
where
header = "P6\n#Galois\n" ++ show wid ++ " " ++ show ht ++ "\n255\n"
showBitmap _ _ _ = error "incorrect length of bitmap string"
scalePixel :: Double -> Char
scalePixel p = chr (floor (clampf p * 255))
showBitmap' :: Int -> Int ->[[Color]] -> [Int]
showBitmap' wid ht pss
-- type of assert | length pss == ht && all (\ ps -> length ps == wid) pss
= concat [ concat [ [scalePixel' r,scalePixel' g,scalePixel' b]
| (r,g,b) <- map uncolor ps]
| ps <- pss ]
where
header = "P3\n#Galois\n" ++ show wid ++ " " ++ show ht ++ "\n255\n"
showBitmap' _ _ _ = error "incorrect length of bitmap string"
scalePixel' :: Double -> Int
scalePixel' p = floor (clampf p * 255)
-- Lights
data Light = Light Vector Color
| PointLight Point Color
| SpotLight Point Point Color Radian Double
deriving Show
light :: Coords -> Color -> Light
light (x,y,z) color =
Light (normalize (vector (-x) (-y) (-z))) color
pointlight (x,y,z) color =
PointLight (point x y z) color
spotlight (x,y,z) (p,q,r) col cutoff exp =
SpotLight (point x y z) (point p q r) col cutoff exp
transformLight m (Light v c) = Light (multMV m v) c
transformLight m (PointLight p c) = PointLight (multMP m p) c
transformLight m (SpotLight p q c r d) = SpotLight (multMP m p) (multMP m q) c r d
unobscured :: Point -> Object -> [Light] -> Vector -> [(Vector,Color)]
unobscured pos obj lights normV = catMaybes (map (unobscure pos obj normV) lights)
unobscure :: Point -> Object -> Vector -> Light -> Maybe (Vector,Color)
unobscure pos obj normV (Light vec color)
-- ZZ probably want to make this faster
| vec `dot` normV < 0 = Nothing
| intersects (pos `addPV` (0.0001 `multSV` vec),vec) obj = Nothing
| otherwise = Just (vec,color)
unobscure pos obj normV (PointLight pp color)
| vec `dot` normV < 0 = Nothing
| intersectWithin (pos `addPV` (0.0001 `multSV` (normalize vec)), vec) obj = Nothing
| otherwise = Just (vec,is)
where vec = pp `subPP` pos
is = attenuate vec color
unobscure org obj normV (SpotLight pos at color cutoff exp)
| vec `dot` normV < 0 = Nothing
| intersectWithin (org `addPV` (0.0001 `multSV` (normalize vec)), vec) obj = Nothing
| angle > cutoff = Nothing
| otherwise = Just (vec, is)
where vec = pos `subPP` org
vec' = pos `subPP` at
angle = acos (normalize vec `dot` (normalize vec'))
asp = normalize (at `subPP` pos)
qsp = normalize (org `subPP` pos)
is = attenuate vec (((asp `dot` qsp) ** exp) `multSC` color)
attenuate :: Vector -> Color -> Color
attenuate vec color = (100 / (99 + sq (norm vec))) `multSC` color
--
castRay ray p
= case intersectRayWithObject ray p of
(True, _, _) -> Nothing -- eye is inside
(False, [], _) -> Nothing -- eye is inside
(False, (0, b, _) : _, _) -> Nothing -- eye is inside
(False, (i, False, _) : _, _) -> Nothing -- eye is inside
(False, (t, b, (s, p0)) : _, _) ->
let (v, prop) = surface s p0 in
Just (offsetToPoint ray t, v, prop)
intersects ray p
= case intersectRayWithObject ray p of
(True, _, _) -> False
(False, [], _) -> False
(False, (0, b, _) : _, _) -> False
(False, (i, False, _) : _, _) -> False
(False, (i, b, _) : _, _) -> True
intersectWithin :: Ray -> Object -> Bool
intersectWithin ray p
= case intersectRayWithObject ray p of
(True, _, _) -> False -- eye is inside
(False, [], _) -> False -- eye is inside
(False, (0, b, _) : _, _) -> False -- eye is inside
(False, (i, False, _) : _, _) -> False -- eye is inside
(False, (t, b, _) : _, _) -> t < 1.0
|
gridaphobe/hpc
|
tests/raytrace/Illumination.hs
|
bsd-3-clause
| 8,981 | 11 | 21 | 2,834 | 3,623 | 1,964 | 1,659 | 171 | 5 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="tr-TR">
<title>SAML Support</title>
<maps>
<homeID>saml</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/saml/src/main/javahelp/help_tr_TR/helpset_tr_TR.hs
|
apache-2.0
| 958 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
module MultiMatchesIn1 where
square x y = let sq 0 = 0
sq x = x ^ pow
pow = 2
in sq x + sq y
where
pow' = 56
g = let blah = 42 + blah2
blah2 = 9
in blah
where
blah = 56
|
SAdams601/HaRe
|
old/testing/letToWhere/MultiMatchesIn1_TokOut.hs
|
bsd-3-clause
| 304 | 0 | 9 | 183 | 94 | 48 | 46 | 10 | 2 |
module SPARC.CodeGen.Base (
InstrBlock,
CondCode(..),
ChildCode64(..),
Amode(..),
Register(..),
setSizeOfRegister,
getRegisterReg,
mangleIndexTree
)
where
import SPARC.Instr
import SPARC.Cond
import SPARC.AddrMode
import SPARC.Regs
import Size
import Reg
import CodeGen.Platform
import DynFlags
import Cmm
import PprCmmExpr ()
import Platform
import Outputable
import OrdList
--------------------------------------------------------------------------------
-- | 'InstrBlock's are the insn sequences generated by the insn selectors.
-- They are really trees of insns to facilitate fast appending, where a
-- left-to-right traversal yields the insns in the correct order.
--
type InstrBlock
= OrdList Instr
-- | Condition codes passed up the tree.
--
data CondCode
= CondCode Bool Cond InstrBlock
-- | a.k.a "Register64"
-- Reg is the lower 32-bit temporary which contains the result.
-- Use getHiVRegFromLo to find the other VRegUnique.
--
-- Rules of this simplified insn selection game are therefore that
-- the returned Reg may be modified
--
data ChildCode64
= ChildCode64
InstrBlock
Reg
-- | Holds code that references a memory address.
data Amode
= Amode
-- the AddrMode we can use in the instruction
-- that does the real load\/store.
AddrMode
-- other setup code we have to run first before we can use the
-- above AddrMode.
InstrBlock
--------------------------------------------------------------------------------
-- | Code to produce a result into a register.
-- If the result must go in a specific register, it comes out as Fixed.
-- Otherwise, the parent can decide which register to put it in.
--
data Register
= Fixed Size Reg InstrBlock
| Any Size (Reg -> InstrBlock)
-- | Change the size field in a Register.
setSizeOfRegister
:: Register -> Size -> Register
setSizeOfRegister reg size
= case reg of
Fixed _ reg code -> Fixed size reg code
Any _ codefn -> Any size codefn
--------------------------------------------------------------------------------
-- | Grab the Reg for a CmmReg
getRegisterReg :: Platform -> CmmReg -> Reg
getRegisterReg _ (CmmLocal (LocalReg u pk))
= RegVirtual $ mkVirtualReg u (cmmTypeSize pk)
getRegisterReg platform (CmmGlobal mid)
= case globalRegMaybe platform mid of
Just reg -> RegReal reg
Nothing -> pprPanic
"SPARC.CodeGen.Base.getRegisterReg: global is in memory"
(ppr $ CmmGlobal mid)
-- Expand CmmRegOff. ToDo: should we do it this way around, or convert
-- CmmExprs into CmmRegOff?
mangleIndexTree :: DynFlags -> CmmExpr -> CmmExpr
mangleIndexTree dflags (CmmRegOff reg off)
= CmmMachOp (MO_Add width) [CmmReg reg, CmmLit (CmmInt (fromIntegral off) width)]
where width = typeWidth (cmmRegType dflags reg)
mangleIndexTree _ _
= panic "SPARC.CodeGen.Base.mangleIndexTree: no match"
|
forked-upstream-packages-for-ghcjs/ghc
|
compiler/nativeGen/SPARC/CodeGen/Base.hs
|
bsd-3-clause
| 3,178 | 0 | 11 | 840 | 473 | 267 | 206 | 58 | 2 |
module Main where
import BuggyOpt() -- bug inducer!
import Prelude hiding (lookup)
import Good
import M
mkLin :: Array Int -> Map (Array Int) Int
mkLin mseqs =
(isJust (lookup mseqs empty)) `seq` (insert mseqs 1 empty)
main :: IO ()
main = print $ isGood $ mkLin (array (1,1) [ (1,array (3,3) [(3, 42)]) ]!!!1)
|
urbanslug/ghc
|
testsuite/tests/stranal/should_run/T8425/Main.hs
|
bsd-3-clause
| 318 | 0 | 14 | 63 | 160 | 90 | 70 | 10 | 1 |
-- Tests grouping WITH a by clause
{-# OPTIONS_GHC -XMonadComprehensions -XTransformListComp #-}
module Main where
import GHC.Exts(the, groupWith)
main = putStrLn (show output)
where
output = [ (the dept, sum salary, name)
| (dept, salary, name) <- [("A", 1, "Bob"), ("B", 2, "Fred"), ("A", 5, "Jim"), ("A", 9, "Jim")]
, then group by dept using groupWith]
|
urbanslug/ghc
|
testsuite/tests/deSugar/should_run/mc08.hs
|
bsd-3-clause
| 394 | 0 | 11 | 93 | 132 | 81 | 51 | 7 | 1 |
{-%
# Text.ProseDoc
`Text.ProseDoc` is a tool that reads markdown formatted comments from a
Haskell source file and composes the comments and the associated source
into a HTML document where the prose and source flow side by side in sync.
The concept is blatantly borrowed from the CoffeeScript tool [docco](http://jashkenas.github.com/docco/).
ProseDoc can be seen as an alternative way to write and format literal Haskell
code. However, the main motivation behind writing ProseDoc was simply that it
seemed like an interesting project to tinker with.
The source code repository is located at [https://github.com/shangaslammi/prose-doc](https://github.com/shangaslammi/prose-doc).
-}
module Text.ProseDoc where
import Control.Applicative ((<$>))
import Control.Monad ((<=<), filterM, forM)
import Control.Error
import Data.Default (def)
import Data.Monoid (mempty)
import Data.List (sort, isPrefixOf)
import System.Directory (getDirectoryContents, doesFileExist, doesDirectoryExist)
import System.FilePath ((</>), takeExtension, makeRelative)
import Text.Pandoc.SelfContained
import Text.ProseDoc.Rendering
import Text.ProseDoc.Parser
import Paths_prose_doc
{-%
The current version can generate a document either from a single source file
or a hierarchical module structure.
-}
generatePage :: FilePath -> IO String
generatePage path = do
isFile <- doesFileExist path
if isFile then processSingle path else processDirectory path
processSingle :: FilePath -> IO String
processSingle path = do
t <- runScript (parseSourceFile path)
{-%
For a single document, we leave out the TOC and simply format the given
module. The [`makeSelfContained`](http://hackage.haskell.org/packages/archive/pandoc/latest/doc/html/Text-Pandoc-SelfContained.html#v:makeSelfContained)
function from [`pandoc`](http://hackage.haskell.org/package/pandoc)
is used to embed the style information from an external css file.
-}
cssPath <- getDataFileName "css/prose.css"
makeSelfContained def
$ renderPage cssPath mempty
$ [moduleToHtml (path, t)]
findModules :: FilePath -> IO [FilePath]
findModules root = do
isFile <- doesFileExist root
if isFile
{-%
For directories, we walk through all subdirectories and gather
all files with the extension `.hs`.
-}
then return $ if takeExtension root == ".hs" then [root] else []
else fmap concat
$ mapM findModules
=<< map (root </>) . filter (not . isPrefixOf ".")
<$> getDirectoryContents root
processDirectory :: FilePath -> IO String
processDirectory path = do
{-%
Currently, the modules are presented in alphabetical order but this should
be user configurable so that more relevant modules can be made to appear
first.
-}
mods <- sort . map (makeRelative path) <$> findModules path
htmls <- forM mods $ \m -> do
t <- runScript $ parseSourceFile (path </> m)
return $ moduleToHtml (m, t)
let toc = htmlTOC mods
cssPath <- getDataFileName "css/prose.css"
makeSelfContained def $ renderPage cssPath toc htmls
|
shangaslammi/prose-doc
|
src/Text/ProseDoc.hs
|
mit
| 3,087 | 0 | 16 | 552 | 513 | 267 | 246 | 42 | 3 |
import System.Log.Logger
import System.IO
import Network.Wreq
import Control.Lens (view)
import qualified Data.ByteString.Lazy as BS
import Control.Concurrent.Async
downloadUrl :: String -> IO ()
downloadUrl url = do
updateGlobalLogger "DownloaderApp" (setLevel INFO)
infoM "DownloaderApp" ("Downloading: " ++ url)
response <- get url
BS.writeFile fileName (view responseBody response)
infoM "DownloaderApp" ("Downloaded " ++ url ++ " to " ++ fileName)
where fileName = "/tmp/lastDownloadedFile"
main = do
mapConcurrently downloadUrl [ "http://www.google.com"
, "http://www.reddit.com"
, "http://www.github.com"
]
|
codygman/haskell-concurrent-downloader-example
|
src/Main.hs
|
mit
| 725 | 0 | 11 | 184 | 173 | 90 | 83 | 18 | 1 |
module System.FSQuery.Util where
import Data.Char (isSpace)
import qualified Data.Char as DChar
trimWhile :: (Char -> Bool) -> String -> String
trimWhile p = f . f
where f = reverse . dropWhile p
toLowerString :: String -> String
toLowerString "" = ""
toLowerString (x:xs) = DChar.toLower x : toLowerString xs
digits = ['0'..'9']
digits1 = tail digits
letters = ['a'..'z'] ++ ['A'..'Z']
doubleQuote :: String -> String
doubleQuote x = q ++ x ++ q where q = "\""
|
qwfy/fsquery
|
src/System/FSQuery/Util.hs
|
mit
| 471 | 0 | 8 | 88 | 184 | 101 | 83 | 14 | 1 |
module Text.Parsec.Utils
( parseWhiteSpaces
, parseUnquotedString
) where
import Control.Applicative ((<|>))
import qualified Text.Parsec as Parsec
parseWhiteSpaces :: Parsec.Parsec String () String
parseWhiteSpaces = Parsec.try (Parsec.many1 Parsec.space) <|> Parsec.many1 Parsec.tab
parseUnquotedString :: Parsec.Parsec String () String
parseUnquotedString = Parsec.many1 (Parsec.noneOf ['"', ' ', '\t', '\n', '\'', '\\', '\r'])
|
r-peck/Rome
|
src/Text/Parsec/Utils.hs
|
mit
| 469 | 0 | 9 | 86 | 137 | 78 | 59 | 9 | 1 |
{-# LANGUAGE GADTs #-}
module Kafkaesque.Request.KafkaRequest
( Request(..)
, Response(..)
, TimeoutMs(..)
, TopicData
, PartitionData
, ProduceResponseTopic
, FetchResponseTopic
, FetchResponsePartition
, FetchRequestPartition
, FetchRequestTopic
, OffsetListResponsePartition
, OffsetListResponseTopic
, Broker(..)
, PartitionMetadata(..)
, TopicMetadata(..)
, OffsetListRequestPartition
, OffsetListRequestTopic
, OffsetListRequestTimestamp(..)
, OffsetCommitPartitionData
, OffsetCommitTopicData
) where
import Data.ByteString (ByteString)
import Data.Int (Int16, Int32, Int64)
import Kafkaesque.KafkaError (KafkaError)
import Kafkaesque.Message (MessageSet)
import Kafkaesque.Protocol.ApiKey
(ApiVersions, Fetch, Metadata, OffsetCommit, OffsetFetch, Offsets,
Produce)
import Kafkaesque.Protocol.ApiVersion (V0, V1)
-- Produce
newtype TimeoutMs =
TimeoutMs Int32
type PartitionData = (Int32, MessageSet)
type TopicData = (String, [PartitionData])
-- Fetch
type FetchRequestPartition = (Int32, Int64, Int32)
type FetchRequestTopic = (String, [FetchRequestPartition])
-- Offsets
data OffsetListRequestTimestamp
= LatestOffset
| EarliestOffset
| OffsetListTimestamp Int64
type OffsetListRequestPartition = (Int32, OffsetListRequestTimestamp, Int32)
type OffsetListRequestTopic = (String, [OffsetListRequestPartition])
-- Metadata
data Broker =
Broker Int32
String
Int32
data PartitionMetadata =
PartitionMetadata KafkaError
Int32
Int32
[Int32]
[Int32]
data TopicMetadata =
TopicMetadata KafkaError
String
[PartitionMetadata]
-- OffsetCommit
type OffsetCommitPartitionData = (Int32, Int64, String)
type OffsetCommitTopicData = (String, [OffsetCommitPartitionData])
data Request k v where
ProduceRequestV0 :: Int16 -> TimeoutMs -> [TopicData] -> Request Produce V0
ProduceRequestV1 :: Int16 -> TimeoutMs -> [TopicData] -> Request Produce V1
FetchRequestV0
:: Int32 -> Int32 -> Int32 -> [FetchRequestTopic] -> Request Fetch V0
OffsetsRequestV0 :: Int32 -> [OffsetListRequestTopic] -> Request Offsets V0
MetadataRequestV0 :: Maybe [String] -> Request Metadata V0
OffsetCommitRequestV0
:: String -> [OffsetCommitTopicData] -> Request OffsetCommit V0
OffsetFetchRequestV0
:: String -> [(String, [Int32])] -> Request OffsetFetch V0
ApiVersionsRequestV0 :: Maybe [Int16] -> Request ApiVersions V0
-- Produce
type ProduceResponsePartition = (Int32, KafkaError, Int64)
type ProduceResponseTopic = (String, [ProduceResponsePartition])
-- Fetch
type PartitionHeader = (Int32, KafkaError, Int64)
type FetchResponsePartition = (PartitionHeader, [(Int64, ByteString)])
type FetchResponseTopic = (String, [FetchResponsePartition])
-- Offsets
type OffsetListResponsePartition = (Int32, KafkaError, Maybe [Int64])
type OffsetListResponseTopic = (String, [OffsetListResponsePartition])
data Response k v where
ProduceResponseV0 :: [ProduceResponseTopic] -> Response Produce V0
ProduceResponseV1 :: [ProduceResponseTopic] -> Int32 -> Response Produce V1
FetchResponseV0 :: [FetchResponseTopic] -> Response Fetch V0
OffsetsResponseV0 :: [OffsetListResponseTopic] -> Response Offsets V0
MetadataResponseV0 :: [Broker] -> [TopicMetadata] -> Response Metadata V0
OffsetCommitResponseV0
:: [(String, [(Int32, KafkaError)])] -> Response OffsetCommit V0
OffsetFetchResponseV0
:: [(String, [(Int32, Int64, String, KafkaError)])]
-> Response OffsetFetch V0
ApiVersionsResponseV0
:: KafkaError -> [(Int16, Int16, Int16)] -> Response ApiVersions V0
|
cjlarose/kafkaesque
|
src/Kafkaesque/Request/KafkaRequest.hs
|
mit
| 3,704 | 0 | 10 | 655 | 903 | 550 | 353 | 90 | 0 |
import Control.Monad
import System.IO
import Text.Printf
fac = product . flip take [1..]
solve x = 1 + sum (map (\t -> x ** fromIntegral t / fac t) [1..9])
main :: IO ()
main = do
n_temp <- getLine
let n = read n_temp :: Int
forM_ [1..n] $ \a0 -> do
x_temp <- getLine
let x = read x_temp :: Double
printf "%.4f\n" (solve x)
|
ahavrylyuk/hackerrank
|
haskell/eval-ex.hs
|
mit
| 367 | 0 | 14 | 110 | 176 | 87 | 89 | 13 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
newtype Velocity = Velocity { unVelocity :: Double }
deriving (Eq, Ord)
v :: Velocity
v = Velocity 2.718
x :: Double
x = 6.636
err = v + x
|
riwsky/wiwinwlh
|
src/newtype.hs
|
mit
| 189 | 1 | 6 | 40 | 70 | 36 | 34 | 8 | 1 |
module Rotations where
import Data.Ord (compare)
import Data.List (minimumBy)
rotateWord :: String -> (Int, String)
rotateWord word = minimumBy compareWithIndex allRotations
where
doubleWord = word ++ word
len = length word
allRotations = map (\n -> (n, take len (drop n doubleWord))) [0..(len-1)]
compareWithIndex (_, a) (_, b) = compare a b
|
tyehle/programming-studio
|
2017-W20/tobin/src/Rotations.hs
|
mit
| 363 | 0 | 13 | 72 | 147 | 82 | 65 | 9 | 1 |
module PinkKellet (kellet) where
kellet :: [Double] -> [Double] -- ^ pinked noise
kellet w = kellet' w 0 0 0 0 0 0 0
where kellet' [] _ _ _ _ _ _ _ = []
kellet' (white:ws) b0 b1 b2 b3 b4 b5 b6 = pink : kellet' ws b0' b1' b2' b3' b4' b5' b6'
where b0' = 0.99886 * b0 + white * 0.0555179;
b1' = 0.99332 * b1 + white * 0.0750759;
b2' = 0.96900 * b2 + white * 0.1538520;
b3' = 0.86650 * b3 + white * 0.3104856;
b4' = 0.55000 * b4 + white * 0.5329522;
b5' = -0.7616 * b5 - white * 0.0168980;
pink = b0 + b1 + b2 + b3 + b4 + b5 + b6 + white * 0.5362;
b6' = white * 0.115926
|
danplubell/color-noise
|
library/PinkKellet.hs
|
mit
| 735 | 0 | 16 | 304 | 287 | 154 | 133 | 13 | 2 |
module Chip8.Display (init) where
import Foreign.C.Types
import Linear
import qualified SDL
initDisplay :: IO ()
initDisplay = undefined
|
narrative/chip8
|
src/Chip8/Display.hs
|
mit
| 139 | 0 | 6 | 20 | 39 | 24 | 15 | 6 | 1 |
----------------------------------------------
-- CIS 194, Homework 1
-- Author: Glenn R. Fisher
-- Date: March 29, 2016
----------------------------------------------
----------------------------------------------
-- Part 1: Validating Credit Card Numbers
----------------------------------------------
-- | Convert positive integers to a list of digits. Non-positive integers are
-- converted to an empty list.
--
-- > toDigits 1234 == [1, 2, 3, 4]
-- > toDigits 0 == []
-- > toDigits (-1234) == []
toDigits :: Integer -> [Integer]
toDigits x = reverse (toDigitsRev x)
-- | Convert positive integers to a reversed list of digits. Non-positive
-- integers are converted to an empty list.
--
-- > toDigitsRev 1234 == [4, 3, 2, 1]
-- > toDigitsRev 0 == []
-- > toDigitsRev (-1234) == []
toDigitsRev :: Integer -> [Integer]
toDigitsRev x
| x <= 0 = []
| otherwise = (x `mod` 10) : (toDigitsRev (x `div` 10))
-- | Double every other number in a list, starting from the left. The result is
-- a list of the same length where the second, fourth, etc. numbers are doubled.
--
-- > doubleEveryOtherFromLeft [1, 2, 3] == [1, 4, 3]
-- > doubleEveryOtherFromLeft [1, 2, 3, 4] == [1, 4, 3, 8]
doubleEveryOtherFromLeft :: [Integer] -> [Integer]
doubleEveryOtherFromLeft [] = []
doubleEveryOtherFromLeft (x:[]) = [x]
doubleEveryOtherFromLeft (x:y:xs) = [x, 2*y] ++ doubleEveryOtherFromLeft xs
-- | Double every other number in a list, starting from the right. The result is
-- a list of the same length where the second-to-last, fourth-to-last, etc.
-- numbers are doubled.
--
-- > doubleEveryOtherFromRight [1, 2, 3] == [1, 4, 3]
-- > doubleEveryOtherFromRight [1, 2, 3, 4] == [2, 2, 6, 4]
doubleEveryOtherFromRight :: [Integer] -> [Integer]
doubleEveryOtherFromRight xs = reverse (doubleEveryOtherFromLeft (reverse xs))
-- | Double every other number in a list, starting from the right. The result is
-- a list of the same length where the second-to-last, fourth-to-last, etc.
-- numbers are doubled.
--
-- > doubleEveryOther [1, 2, 3] == [1, 4, 3]
-- > doubleEveryOther [1, 2, 3, 4] == [2, 2, 6, 4]
doubleEveryOther :: [Integer] -> [Integer]
doubleEveryOther = doubleEveryOtherFromRight
-- | Calculate the sum of all digits in the list.
--
-- > sumDigits [1, 2, 3] == 6
-- > sumDigits [10, 11, 12] == 6
sumDigits :: [Integer] -> Integer
sumDigits [] = 0
sumDigits (x:xs) = sum (toDigits x) + sumDigits xs
-- | Validate a credit card number. Returns True if the credit card number is
-- valid, and False otherwise.
--
-- Validation Algorithm:
-- 1. Double the value of every other second digit beginning from the right.
-- 2. Add the digits of the doubled values and the undoubled digits from the
-- original number.
-- 3. Calculate the remainder when the sum is divided by 0.
-- 4. Check if the remainder is 0. If so, then the credit card number is valid.
--
-- Example:
-- 1. 1386 -> [2, 3, 16, 6]
-- 2. 2+3+1+6+6 == 18
-- 3. 18 `mod` 10 == 8
-- 4. 8 /= 0, so 1386 is not a valid credit card number.
--
-- > validate 4012888888881881 == True
-- > validate 4012888888881882 == False
validate :: Integer -> Bool
validate x = sumDigits (doubleEveryOther (toDigits x)) `mod` 10 == 0
----------------------------------------------
-- Part 2: The Towers of Hanoi
----------------------------------------------
-- Pegs are identified by a String.
type Peg = String
-- A move transfers the top disk from one peg to another.
type Move = (Peg, Peg)
-- Return a list of moves to solve the Tower of Hanoi puzzle. The moves transfer
-- a given number of stacked discs from the first peg to the second, using the
-- third peg as temporary storage.
--
-- > hanoi 2 "a" "b" "c" == [("a", "c"), ("a", "b"), ("c", "b")]
hanoi :: Integer -> Peg -> Peg -> Peg -> [Move]
hanoi x peg1 peg2 peg3
| x <= 0 = []
| x == 1 = [(peg1, peg2)]
| otherwise =
let moveToTemp = hanoi (x-1) peg1 peg3 peg2 in
let moveLargest = [(peg1, peg2)] in
let moveToSolution = hanoi (x-1) peg3 peg2 peg1 in
moveToTemp ++ moveLargest ++ moveToSolution
|
glennrfisher/cis194-haskell
|
01 Intro/01-intro.hs
|
mit
| 4,079 | 0 | 16 | 780 | 585 | 341 | 244 | 30 | 1 |
--
-- Copyright (c) 2013-2019 Nicola Bonelli <[email protected]>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
{-# LANGUAGE RecordWildCards #-}
module Main where
import Data.List ( isSuffixOf, (\\), isInfixOf, nub, sort, union )
import Data.List.Split (chunksOf)
import qualified Data.Map as M
import Data.Maybe ( catMaybes, fromJust )
import Data.Char ( toLower )
import Data.Data()
import Data.IORef ( modifyIORef, newIORef, readIORef )
import Data.Version(showVersion)
import Data.Function ( fix )
import qualified Data.Set as Set
import Paths_cgrep ( version )
import Control.Exception as E ( catch, SomeException )
import Control.Concurrent ( forkIO, setNumCapabilities )
import Control.Concurrent.Async ( mapConcurrently )
import Control.Monad.STM ( atomically )
import Control.Concurrent.STM.TChan
( newTChanIO, readTChan, writeTChan )
import Control.Monad
( when, forM_, forever, replicateM_, unless, void, forM )
import Control.Monad.Trans ( MonadIO(liftIO), MonadTrans(lift) )
import Control.Monad.Trans.Except ( runExceptT, throwE )
import Control.Monad.Trans.Reader ( ReaderT(runReaderT), ask )
import Control.Applicative
( Applicative(liftA2), Alternative((<|>)) )
import System.Console.CmdArgs ( cmdArgsRun )
import System.Directory
( canonicalizePath, doesDirectoryExist, getDirectoryContents )
import System.FilePath ((</>))
import System.Environment ( lookupEnv, withArgs )
import System.PosixCompat.Files as PosixCompat
( getSymbolicLinkStatus, isSymbolicLink )
import System.IO
( stdout, stdin, hIsTerminalDevice, stderr, hPutStrLn )
import System.Exit ( exitSuccess )
import System.Process (readProcess, runProcess, waitForProcess)
import CGrep.CGrep ( sanitizeOptions, isRegexp, runCgrep )
import CGrep.Lang
( Lang, langMap, getFileLang, dumpLangMap, splitLangList )
import CGrep.Output
( Output(outLine, outTokens, outFilePath, outLineNo),
putPrettyHeader,
putPrettyFooter,
prettyOutput,
showFileName )
import CGrep.Common ( takeN, trim8, getTargetName )
import CGrep.Parser.WildCard ( wildCardMap )
import CmdOptions ( options )
import Options ( Options(..) )
import Util ( partitionM, notNull )
import Debug ( putStrLevel1 )
import Config
( Config(Config, configFileLine, configColorMatch, configColorFile,
configPruneDirs, configColors, configLanguages),
getConfig )
import Reader ( OptionT )
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as C
import qualified Codec.Binary.UTF8.String as UC
import Data.Tuple.Extra ( (&&&) )
fileFilter :: Options -> [Lang] -> FilePath -> Bool
fileFilter opts langs filename = maybe False (liftA2 (||) (const $ null langs) (`elem` langs)) (getFileLang opts filename)
{-# INLINE fileFilter #-}
getFilesMagic :: [FilePath] -> IO [String]
getFilesMagic filenames = lines <$> readProcess "/usr/bin/file" ("-b" : filenames) []
{-# INLINE getFilesMagic #-}
-- push file names in Chan...
withRecursiveContents :: Options -> FilePath -> [Lang] -> [String] -> Set.Set FilePath -> ([FilePath] -> IO ()) -> IO ()
withRecursiveContents opts dir langs pdirs visited action = do
isDir <- doesDirectoryExist dir
if isDir then do
xs <- getDirectoryContents dir
(dirs,files) <- partitionM doesDirectoryExist [dir </> x | x <- xs, x `notElem` [".", ".."]]
magics <- if null (magic_filter opts) || null files
then return []
else getFilesMagic files
-- filter the list of files
--
let files' = if null magics
then filter (fileFilter opts langs) files
else catMaybes $ zipWith (\f m -> if any (`isInfixOf` m) (magic_filter opts) then Just f else Nothing ) files magics
unless (null files') $
let chunks = chunksOf (Options.chunk opts) files' in
forM_ chunks $ \b -> action b
-- process dirs
--
forM_ dirs $ \path -> do
lstatus <- getSymbolicLinkStatus path
when ( deference_recursive opts || not (PosixCompat.isSymbolicLink lstatus)) $
unless (isPruneableDir path pdirs) $ do -- this is a good directory (unless already visited)!
cpath <- canonicalizePath path
unless (cpath `Set.member` visited) $
withRecursiveContents opts path langs pdirs (Set.insert cpath visited) action
else action [dir]
isPruneableDir:: FilePath -> [FilePath] -> Bool
isPruneableDir dir = any (`isSuffixOf` pdir)
where pdir = mkPrunableDirName dir
{-# INLINE isPruneableDir #-}
mkPrunableDirName :: FilePath -> FilePath
mkPrunableDirName xs | "/" `isSuffixOf` xs = xs
| otherwise = xs ++ "/"
{-# INLINE mkPrunableDirName #-}
-- read patterns from file
readPatternsFromFile :: FilePath -> IO [C.ByteString]
readPatternsFromFile f =
if null f then return []
else map trim8 . C.lines <$> C.readFile f
getFilePaths :: Bool -> -- pattern(s) from file
[String] -> -- list of patterns and files
[String]
getFilePaths False xs = if length xs == 1 then [] else tail xs
getFilePaths True xs = xs
parallelSearch :: [FilePath] -> [C.ByteString] -> [Lang] -> (Bool, Bool) -> OptionT IO ()
parallelSearch paths patterns langs (isTermIn, _) = do
(conf@Config{..}, opts@Options{..}) <- ask
-- create Transactional Chan and Vars...
in_chan <- liftIO newTChanIO
out_chan <- liftIO newTChanIO
-- launch worker threads...
forM_ [1 .. jobs] $ \_ -> liftIO . forkIO $
void $ runExceptT . forever $ do
fs <- lift $ atomically $ readTChan in_chan
lift $
E.catch (
case fs of
[] -> atomically $ writeTChan out_chan []
xs -> void $ (if asynch then flip mapConcurrently
else forM) xs $ \x -> do
out <- fmap (take max_count ) (runReaderT (runCgrep conf opts x patterns) (conf, sanitizeOptions x opts))
unless (null out) $ atomically $ writeTChan out_chan out)
(\e -> let msg = show (e :: SomeException) in
hPutStrLn stderr (showFileName conf opts (getTargetName (head fs))
++ ": exception: " ++ takeN 80 msg))
when (null fs) $ throwE ()
-- push the files to grep for...
_ <- liftIO . forkIO $ do
if recursive || deference_recursive
then forM_ (if null paths then ["."] else paths) $ \p ->
withRecursiveContents opts p langs
(mkPrunableDirName <$> configPruneDirs ++ prune_dir) (Set.singleton p) (atomically . writeTChan in_chan)
else forM_ (if null paths && not isTermIn then [""] else paths) (atomically . writeTChan in_chan . (:[]))
-- enqueue EOF messages:
replicateM_ jobs ((atomically . writeTChan in_chan) [])
-- dump output until workers are done
putPrettyHeader
let stop = jobs
matchingFiles <- liftIO $ newIORef Set.empty
fix (\action n m ->
unless (n == stop) $ do
out <- liftIO $ atomically $ readTChan out_chan
case out of
[] -> action (n+1) m
_ -> do
case () of
_ | json -> when m $ liftIO $ putStrLn ","
| otherwise -> return ()
let out' = map (\p -> p {outTokens = map (\(off, s) -> (length $ UC.decode $ B.unpack $ C.take off $ outLine p, UC.decodeString s)) $ outTokens p}) out
prettyOutput out' >>= mapM_ (liftIO . putStrLn)
liftIO $ when (vim || editor) $
mapM_ (modifyIORef matchingFiles . Set.insert . (outFilePath &&& outLineNo)) out
action n True
) 0 False
putPrettyFooter
-- run editor...
when (vim || editor ) $ liftIO $ do
editor' <- if vim
then return (Just "vim")
else lookupEnv "EDITOR"
files <- Set.toList <$> readIORef matchingFiles
let editFiles = (if fileline || configFileLine
then fmap (\(a,b) -> a ++ ":" ++ show b)
else nub . sort . fmap fst) files
putStrLn $ "cgrep: open files " ++ unwords editFiles ++ "..."
void $ runProcess (fromJust $ editor' <|> Just "vi")
editFiles
Nothing
Nothing
(Just stdin)
(Just stdout)
(Just stderr) >>= waitForProcess
main :: IO ()
main = do
-- check whether this is a terminal device
isTermIn <- hIsTerminalDevice stdin
isTermOut <- hIsTerminalDevice stdout
-- read Cgrep config options
(conf, _) <- getConfig
-- read command-line options
opts <- (if isTermOut
then \o -> o { color = color o || configColors conf }
else id) <$> cmdArgsRun options
-- check for multiple backends...
when (length (catMaybes [
#ifdef ENABLE_HINT
hint opts,
#endif
format opts,
if xml opts then Just "" else Nothing,
if json opts then Just "" else Nothing
]) > 1)
$ error "you can use one back-end at time!"
-- display lang-map and exit...
when (language_map opts) $
dumpLangMap langMap >> exitSuccess
-- check whether the pattern list is empty, display help message if it's the case
when (null (others opts) && isTermIn && null (file opts)) $
withArgs ["--help"] $ void (cmdArgsRun options)
-- load patterns:
patterns <- if null (file opts) then return $ map (C.pack . UC.encodeString) (((:[]).head.others) opts)
else readPatternsFromFile $ file opts
let patterns' = map (if ignore_case opts then ic else id) patterns
where ic | (not . isRegexp) opts && semantic opts = C.unwords . map (\p -> if C.unpack p `elem` wildCardTokens then p else C.map toLower p) . C.words
| otherwise = C.map toLower
where wildCardTokens = "OR" : M.keys wildCardMap -- "OR" is not included in wildCardMap
-- display the configuration in use
-- when (isJust confpath) $
-- hPutStrLn stderr $ showBold opts ("Using '" ++ fromJust confpath ++ "' configuration file...")
-- load files to parse:
let paths = getFilePaths (notNull (file opts)) (others opts)
-- parse cmd line language list:
let (l0, l1, l2) = splitLangList (language_filter opts)
-- language enabled:
let langs = (if null l0 then configLanguages conf else l0 `union` l1) \\ l2
runReaderT (do putStrLevel1 $ "Cgrep " ++ showVersion version ++ "!"
putStrLevel1 $ "options : " ++ show opts
putStrLevel1 $ "config : " ++ show conf
putStrLevel1 $ "languages : " ++ show langs
putStrLevel1 $ "pattern : " ++ show patterns'
putStrLevel1 $ "files : " ++ show paths
putStrLevel1 $ "isTermIn : " ++ show isTermIn
putStrLevel1 $ "isTermOut : " ++ show isTermOut
) (conf, opts)
-- specify number of cores
when (cores opts /= 0) $ setNumCapabilities (cores opts)
-- run search
runReaderT (parallelSearch paths patterns' langs (isTermIn, isTermOut)) (conf, opts)
|
awgn/cgrep
|
src/Main.hs
|
gpl-2.0
| 12,676 | 0 | 35 | 3,960 | 3,361 | 1,794 | 1,567 | 207 | 9 |
{- |
Module : $Header$
Description : parser for CASL architectural specifications
Copyright : (c) Maciek Makowski, Warsaw University 2003-2004, C. Maeder
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (via imports)
Parser for CASL architectural specifications
Follows Sect. II:3.1.4 of the CASL Reference Manual plus refinement
extensions
-}
module Syntax.Parse_AS_Architecture
( unitSpec
, refSpec
, annotedArchSpec
) where
import Logic.Grothendieck (LogicGraph)
import Syntax.AS_Structured
import Syntax.AS_Architecture
import Syntax.Parse_AS_Structured
(hetIRI, annoParser2, groupSpec, parseMapping, translationList)
import Common.AS_Annotation
import Common.AnnoState
import Common.Id
import Common.IRI
import Common.Keywords
import Common.Lexer
import Common.Token
import Text.ParserCombinators.Parsec
-- * Parsing functions
-- | Parse annotated architectural specification
annotedArchSpec :: LogicGraph -> AParser st (Annoted ARCH_SPEC)
annotedArchSpec = annoParser2 . archSpec
{- | Parse architectural specification
@
ARCH-SPEC ::= BASIC-ARCH-SPEC | GROUP-ARCH-SPEC
@ -}
archSpec :: LogicGraph -> AParser st (Annoted ARCH_SPEC)
archSpec l = basicArchSpec l <|> groupArchSpec l
{- | Parse group architectural specification
@
GROUP-ARCH-SPEC ::= { ARCH-SPEC } | ARCH-SPEC-NAME
@ -}
groupArchSpec :: LogicGraph -> AParser st (Annoted ARCH_SPEC)
groupArchSpec l = do
kOpBr <- oBraceT
asp <- annoParser $ archSpec l
kClBr <- cBraceT
return $ replaceAnnoted
(Group_arch_spec (item asp) $ toRange kOpBr [] kClBr) asp
<|> fmap (emptyAnno . Arch_spec_name) hetIRI
{- | Parse basic architectural specification
@
BASIC-ARCH-SPEC ::= unit/units UNIT-DECL-DEFNS
result UNIT-EXPRESSION ;/
@ -}
basicArchSpec :: LogicGraph -> AParser st (Annoted ARCH_SPEC)
basicArchSpec l = do
kUnit <- pluralKeyword unitS
(declDefn, ps) <- auxItemList [resultS] [] (unitDeclDefn l) (,)
kResult <- asKey resultS
expr <- annoParser2 $ unitExpr l
(m, an) <- optSemi
return $ emptyAnno $ Basic_arch_spec declDefn (appendAnno expr an)
$ tokPos kUnit `appRange` ps `appRange` catRange (kResult : m)
{- | Parse unit declaration or definition
@
UNIT-DECL-DEFN ::= UNIT-DECL | UNIT-DEFN
@ -}
unitDeclDefn :: LogicGraph -> AParser st UNIT_DECL_DEFN
unitDeclDefn l = do
name <- hetIRI
do c <- colonT -- unit declaration
decl <- refSpec l
(gs, ps) <- option ([], []) $ do
kGiven <- asKey givenS
(guts, qs) <- groupUnitTerm l `separatedBy` anComma
return (guts, kGiven : qs)
return $ Unit_decl name decl gs $ catRange $ c : ps
<|> -- unit definition
unitDefn' l name
{- | Parse unit declaration
@
UNIT-REF ::= UNIT-NAME : REF-SPEC
@ -}
unitRef :: LogicGraph -> AParser st UNIT_REF
unitRef l = do
name <- hetIRI
sep1 <- asKey toS
usp <- refSpec l
return $ Unit_ref name usp $ tokPos sep1
{- | Parse unit specification
@
UNIT-SPEC ::= GROUP-SPEC
GROUP-SPEC * .. * GROUP-SPEC -> GROUP-SPEC
closed UNIT-SPEC
@ -}
unitSpec :: LogicGraph -> AParser st UNIT_SPEC
unitSpec l =
-- closed unit spec
do kClosed <- asKey closedS
uSpec <- unitSpec l
return $ Closed_unit_spec uSpec $ tokPos kClosed
<|> {- unit type
NOTE: this can also be a spec name. If this is the case, this unit spec
will be converted on the static analysis stage.
See Static.AnalysisArchitecture.ana_UNIT_SPEC. -}
do gps@(gs : gss, _) <- annoParser (groupSpec l) `separatedBy` crossT
let rest = unitRestType l gps
if null gss then
option ( {- case item gs of
Spec_inst sn [] _ -> Spec_name sn -- annotations are lost
_ -> -} Unit_type [] gs nullRange) rest
else rest
unitRestType :: LogicGraph -> ([Annoted SPEC], [Token]) -> AParser st UNIT_SPEC
unitRestType l (gs, ps) = do
a <- asKey funS
g <- annoParser $ groupSpec l
return (Unit_type gs g $ catRange (ps ++ [a]))
refSpec :: LogicGraph -> AParser st REF_SPEC
refSpec l = do
(rs, ps) <- basicRefSpec l `separatedBy` asKey thenS
return $ if isSingle rs then head rs else Compose_ref rs $ catRange ps
{- | Parse refinement specification
@
REF-SPEC ::= UNIT_SPEC
UNIT_SPEC [bahav..] refined [via SYMB-MAP-ITEMS*] to REF-SPEC
arch spec GROUP-ARCH-SPEC
{ UNIT-DECL, ..., UNIT-DECL }
@ -}
basicRefSpec :: LogicGraph -> AParser st REF_SPEC
basicRefSpec l = -- component spec
do o <- oBraceT `followedWith` (simpleId >> asKey toS)
(us, ps) <- unitRef l `separatedBy` anComma
c <- cBraceT
return (Component_ref us $ toRange c ps o)
<|> -- architectural spec
do kArch <- asKey archS
kSpec <- asKey specS
asp <- groupArchSpec l
return (Arch_unit_spec asp (toRange kArch [] kSpec))
<|> -- unit spec
do uSpec <- unitSpec l
refinedRestSpec l uSpec <|> return (Unit_spec uSpec)
refinedRestSpec :: LogicGraph -> UNIT_SPEC -> AParser st REF_SPEC
refinedRestSpec l u = do
b <- asKey behaviourallyS
onlyRefinedRestSpec l (tokPos b) u
<|> onlyRefinedRestSpec l nullRange u
onlyRefinedRestSpec :: LogicGraph -> Range -> UNIT_SPEC -> AParser st REF_SPEC
onlyRefinedRestSpec l b u = do
r <- asKey refinedS
(ms, ps) <- option ([], []) $ do
v <- asKey viaS -- not a keyword
(m, ts) <- parseMapping l
return (m, v : ts)
t <- asKey toS
rsp <- refSpec l
return $ Refinement (isNullRange b) u ms rsp (b `appRange` toRange r ps t)
{- | Parse group unit term
@
GROUP-UNIT-TERM ::= UNIT-NAME
UNIT-NAME FIT-ARG-UNITS
{ UNIT-TERM }
@ -}
groupUnitTerm :: LogicGraph -> AParser st (Annoted UNIT_TERM)
groupUnitTerm l = annoParser $
-- unit name/application
do name <- hetIRI
args <- many (fitArgUnit l)
return (Unit_appl name args nullRange)
<|> -- unit term in brackets
do lbr <- oBraceT
ut <- unitTerm l
rbr <- cBraceT
return (Group_unit_term ut (catRange [lbr, rbr]))
{- | Parse an argument for unit application.
@
FIT-ARG-UNIT ::= [ UNIT-TERM ]
[ UNIT-TERM fit SYMB-MAP-ITEMS-LIST ]
@
The SYMB-MAP-ITEMS-LIST is parsed using parseItemsMap. -}
fitArgUnit :: LogicGraph -> AParser st FIT_ARG_UNIT
fitArgUnit l = do
o <- oBracketT
ut <- unitTerm l
(fargs, qs) <- option ([], []) $ do
kFit <- asKey fitS
(smis, ps) <- parseMapping l
return (smis, kFit : ps)
c <- cBracketT
return $ Fit_arg_unit ut fargs $ toRange o qs c
{- | Parse unit term.
@
UNIT-TERM ::= UNIT-TERM RENAMING
UNIT-TERM RESTRICTION
UNIT-TERM and ... and UNIT-TERM
local UNIT-DEFNS within UNIT-TERM
GROUP-UNIT-TERM
@
This will be done by subsequent functions in order to preserve
the operator precedence; see other 'unitTerm*' functions. -}
unitTerm :: LogicGraph -> AParser st (Annoted UNIT_TERM)
unitTerm = unitTermAmalgamation
{- | Parse unit amalgamation.
@
UNIT-TERM-AMALGAMATION ::= UNIT-TERM-LOCAL and ... and UNIT-TERM-LOCAL
@ -}
unitTermAmalgamation :: LogicGraph -> AParser st (Annoted UNIT_TERM)
unitTermAmalgamation l = do
(uts, toks) <- annoParser2 (unitTermLocal l) `separatedBy` asKey andS
return $ case uts of
[ut] -> ut
_ -> emptyAnno $ Amalgamation uts $ catRange toks
{- | Parse local unit term
@
UNIT-TERM-LOCAL ::= local UNIT-DEFNS within UNIT-TERM-LOCAL
UNIT-TERM-TRANS-RED
@ -}
unitTermLocal :: LogicGraph -> AParser st (Annoted UNIT_TERM)
unitTermLocal l =
-- local unit
do kLocal <- asKey localS
(uDefns, ps) <- auxItemList [withinS] [] (unitDefn l) (,)
kWithin <- asKey withinS
uTerm <- unitTermLocal l
return $ emptyAnno $ Local_unit uDefns uTerm
$ tokPos kLocal `appRange` ps `appRange` tokPos kWithin
<|> -- translation/reduction
unitTermTransRed l
{- | Parse translation or reduction unit term
The original grammar
@
UNIT-TERM-TRANS-RED ::= UNIT-TERM-TRANS-RED RENAMING
UNIT-TERM-TRANS-RED RESTRICTION
GROUP-UNIT-TERM
@ -}
unitTermTransRed :: LogicGraph -> AParser st (Annoted UNIT_TERM)
unitTermTransRed l = groupUnitTerm l >>=
translationList l Unit_translation Unit_reduction
{- | Parse unit expression
@
UNIT-EXPRESSION ::= lambda UNIT-BINDINGS "." UNIT-TERM
UNIT-TERM
@ -}
unitExpr :: LogicGraph -> AParser st (Annoted UNIT_EXPRESSION)
unitExpr l = do
(bindings, poss) <- option ([], nullRange) $ do
kLambda <- asKey lambdaS
(bindings, poss) <- unitBinding l `separatedBy` anSemi
kDot <- dotT
return (bindings, toRange kLambda poss kDot)
ut <- unitTerm l
return $ emptyAnno $ Unit_expression bindings ut poss
{- | Parse unit binding
@
UNIT-BINDING ::= UNIT-NAME : UNIT-SPEC
@ -}
unitBinding :: LogicGraph -> AParser st UNIT_BINDING
unitBinding l = do
name <- hetIRI
kCol <- colonT
usp <- unitSpec l
return $ Unit_binding name usp $ tokPos kCol
{- | Parse an unit definition
@
UNIT-DEFN ::= UNIT-NAME = UNIT-EXPRESSION
@ -}
unitDefn :: LogicGraph -> AParser st UNIT_DECL_DEFN
unitDefn l = hetIRI >>= unitDefn' l
unitDefn' :: LogicGraph -> IRI -> AParser st UNIT_DECL_DEFN
unitDefn' l name = do
kEqu <- equalT
expr <- annoParser2 $ unitExpr l
return $ Unit_defn name (item expr) $ tokPos kEqu
|
nevrenato/Hets_Fork
|
Syntax/Parse_AS_Architecture.hs
|
gpl-2.0
| 9,288 | 0 | 16 | 2,014 | 2,341 | 1,149 | 1,192 | 169 | 2 |
module Main where
import Pythagoras (Triple(..), pythagoreanTripleSequence, perimeter)
import Primes (divides)
main :: IO ()
main =
let
target = 1000
sieve trip = (perimeter trip) `divides` target
match = head $ filter sieve pythagoreanTripleSequence
factor = target `div` (perimeter match)
asList (Triple a b c) = map (*factor) [a, b, c]
result = asList match
triple = show $ result
summed = show $ sum result
answer = show $ product result
in
putStr $ "triple: " ++ triple ++ "\n" ++
"summed: " ++ summed ++ "\n" ++
"answer: " ++ answer ++ "\n"
-- brute force, unused
slowTriples n = [[x,y,z] | x <- [1..n],
y <- [1..n],
z <- [1..n],
x^2 + y^2 == z^2]
|
liefswanson/projectEuler
|
app/p1/q9/Main.hs
|
gpl-2.0
| 865 | 0 | 15 | 330 | 304 | 166 | 138 | 22 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE StandaloneDeriving #-}
module NLP.TextClustering
(Algorithm(..),
Distance(..),
clusterDocuments,
clusterDocumentsVerbose)
where
import Math.IDF(buildFeatureVectors)
import NLP.TextClustering.DistanceUtils
import Math.HKMeans.KMeans (kmeans)
import Numeric.LinearAlgebra (Vector, dim)
import Data.Char (toLower)
import NLP.Tokenize (tokenize)
import Data.Ord (comparing)
import Data.List (sortBy)
-- Option of the clusterization
data Algorithm a where
KMeans :: Int -> Algorithm Int
data Distance where
Euclidean :: Distance
Cosine :: Distance
deriving instance Show a => Show (Algorithm a)
deriving instance Show Distance
-- Types to make the code cleaner
type Text = String
type Name = String
type Word = String
type Document = (Name, Text)
-- Part Of Speech Tagging
type Tag = Int
partOfSpeechTagging :: [Word] -> [(Tag, Word)]
partOfSpeechTagging = zip [1..] -- Just to make it compile
-- Filter Useless Words
stopWords :: [Word]
stopWords = ["a", "able", "about", "across", "after", "all", "almost", "also", "am", "among", "an", "and", "any", "are", "as", "at", "be", "because", "been", "but", "by", "can", "cannot", "could", "dear", "did", "do", "does", "either", "else", "ever", "every", "for", "from", "get", "got", "had", "has", "have", "he", "her", "hers", "him", "his", "how", "however", "i", "if", "in", "into", "is", "it", "its", "just", "least", "let", "like", "likely", "may", "me", "might", "most", "must", "my", "neither", "no", "nor", "not", "of", "off", "often", "on", "only", "or", "other", "our", "own", "rather", "said", "say", "says", "she", "should", "since", "so", "some", "than", "that", "the", "their", "them", "then", "there", "these", "they", "this", "tis", "to", "too", "twas", "us", "wants", "was", "we", "were", "what", "when", "where", "which", "while", "who", "whom", "why", "will", "with", "would", "yet", "you", "your"]
filterWords :: [(Tag, Word)] -> [(Tag, Word)]
filterWords = filter (\w -> (notElem (snd w) stopWords) && (length . snd) w > 3)
-- Lemmatize
lemmatize :: [(Tag, Word)] -> [Word]
lemmatize = snd . unzip
-- Clusterize
clusterize :: Algorithm a -> Distance -> [Vector Double] -> [Int]
clusterize algorithm distance =
case algorithm of
(KMeans k) -> kmeans d k
where
-- Select the distance to use
d = case distance of
Euclidean -> euclideanDistance
Cosine -> cosineDistance
clusterDocuments :: Algorithm a -> Distance -> [Document] -> [(Name, Int)]
clusterDocuments algorithm distance documents =
let (names, texts) = unzip documents -- Separate names from their respective text
preprocessing = map $ -- Apply preprocessing on texts
lemmatize -- Lemmatize each word to a canonical form
. filterWords -- Filter useless and common words
. partOfSpeechTagging -- Find part of speech tag for each word
. tokenize -- Tokenize the document
. map toLower -- Lower every letters in the document
datas = buildFeatureVectors $ preprocessing texts -- Build feature space
in sortBy (comparing snd) $ zip names $ clusterize algorithm distance datas
clusterDocumentsVerbose :: Show a => Algorithm a -> Distance -> [Document] -> IO ([(Name, Int)])
clusterDocumentsVerbose algorithm distance documents =
do
let (names, texts) = unzip documents -- Separate names from their respective text
preprocessing = map $ -- Apply preprocessing on texts
lemmatize -- Lemmatize each word to a canonical form
. filterWords -- Filter useless and common words
. partOfSpeechTagging -- Find part of speech tag for each word
. tokenize -- Tokenize the document
. map toLower -- Lower every letters in the document
datas = buildFeatureVectors $ preprocessing texts -- Build feature space
res = sortBy (comparing snd) $ zip names $ clusterize algorithm distance datas
putStrLn $ "Debug trace of clustering"
putStrLn $ "Distance : " ++ (show distance)
putStrLn $ "Clustering : " ++ (show algorithm)
putStrLn $ "Dimension of feature space : " ++ (show $ dim $ head datas)
print $ datas
return res
|
remusao/DocumentClustering
|
NLP/TextClustering.hs
|
gpl-2.0
| 4,468 | 0 | 14 | 1,109 | 1,179 | 694 | 485 | 71 | 2 |
-- | This module provides the functionallity to use
-- pandocs reader for reStructuredText (RST) in Yesod.
-- The code bases heavily on yesod-markdown from
-- Patrick Brisbin, which does the same thing for Markdown.
module Yesod.RST
( RST(..)
-- * Wrappers
, rstToHtml
, rstToHtmlTrusted
, rstFromFile
-- * Conversions
, parseRST
, writePandoc
, writePandocTrusted
-- * Option sets
, yesodDefaultWriterOptions
, yesodDefaultParserState
-- * Form helper
, rstField
)
where
import Yesod.Form (ToField(..), areq, aopt)
import Yesod.Core (RenderMessage, SomeMessage(..))
import Yesod.Form.Types
import Yesod.Widget (toWidget)
import Text.Hamlet (hamlet, Html)
import Database.Persist (PersistField)
import Text.Blaze.Html (preEscapedToMarkup)
import Text.Pandoc
import Text.HTML.SanitizeXSS (sanitizeBalance)
import Data.Monoid (Monoid)
import Data.String (IsString)
import System.Directory (doesFileExist)
import Data.Text (Text, pack, unpack)
newtype RST = RST String
deriving (Eq, Ord, Show, Read, PersistField, IsString, Monoid)
instance ToField RST master where
toField = areq rstField
instance ToField (Maybe RST) master where
toField = aopt rstField
rstField :: RenderMessage master FormMessage => Field sub master RST
rstField = Field
{ fieldParse = \values _ -> (blank $ Right . RST . unlines . lines' . unpack) values
, fieldView = \theId name attrs val _isReq -> toWidget
[hamlet|
<textarea id="#{theId}" name="#{name}" *{attrs}>#{either id unRST val}
|]
, fieldEnctype = UrlEncoded
}
where
unRST :: RST -> Text
unRST (RST s) = pack s
lines' :: String -> [String]
lines' = map (filter (/= '\r')) . lines
blank :: (Monad m, RenderMessage master FormMessage)
=> (Text -> Either FormMessage a) -> [Text] -> m (Either (SomeMessage master) (Maybe a))
blank _ [] = return $ Right Nothing
blank _ ("":_) = return $ Right Nothing
blank f (x:_) = return $ either (Left . SomeMessage) (Right . Just) $ f x
-- | Converts RST to sanitizied Html
rstToHtml :: RST -> Html
rstToHtml = writePandoc yesodDefaultWriterOptions
. parseRST yesodDefaultParserState
-- | Converts RST to unsanitizied Html
rstToHtmlTrusted :: RST -> Html
rstToHtmlTrusted = writePandocTrusted yesodDefaultWriterOptions
. parseRST yesodDefaultParserState
-- | Reads RST in from the specified file; returns the empty string
-- if the file does not exist
rstFromFile :: FilePath -> IO RST
rstFromFile f = do
exists <- doesFileExist f
content <- do
if exists
then readFile f
else return ""
return $ RST content
-- | Converts the intermediate Pandoc type to Html. Sanitizes HTML.
writePandoc :: WriterOptions -> Pandoc -> Html
writePandoc wo = preEscapedToMarkup . sanitizeBalance . pack . writeHtmlString wo
-- | Skips the sanitization and its required conversion to Text
writePandocTrusted :: WriterOptions -> Pandoc -> Html
writePandocTrusted wo = preEscapedToMarkup . writeHtmlString wo
-- | Parses Markdown into the intermediate Pandoc type
parseRST :: ParserState -> RST -> Pandoc
parseRST ro (RST m) = readRST ro m
-- | Pandoc defaults, plus Html5, minus WrapText
yesodDefaultWriterOptions :: WriterOptions
yesodDefaultWriterOptions = defaultWriterOptions
{ writerHtml5 = True
, writerWrapText = False
}
-- | Pandoc defaults, plus Smart, plus ParseRaw
yesodDefaultParserState :: ParserState
yesodDefaultParserState = defaultParserState
{ stateSmart = True
, stateParseRaw = True
}
|
pSub/yesod-rst
|
Yesod/RST.hs
|
gpl-2.0
| 3,670 | 0 | 14 | 814 | 866 | 480 | 386 | -1 | -1 |
-- Need this for ghc >= 7
{-# LANGUAGE NoMonoLocalBinds #-}
----------------------------------------------------
-- --
-- HyLoRes.ClauseSet.InUse: --
-- A set of FullClauses classified by the --
-- family of the distinguished formula --
-- --
----------------------------------------------------
{-
Copyright (C) HyLoRes 2002-2007 - See AUTHORS file
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307,
USA.
-}
module HyLoRes.ClauseSet.InUse(
-- types
InUseClauseSet,
-- constructors
newSet,
-- manipulators
add, remove,
-- observers
allClauses, clausesByNom, clauses, keys,
allClausesByFamily, allClausesIdx, allClausesOpaquedIdx,
-- indexes
unitEqIdx, nonUnitEqIdx, atNegNomIdx,
atPropIdx, atNegPropIdx,
atDiaNomIdx, atDiaNomRevIdx,
atBoxFIdx,
-- show
showPretty,
-- unit tests
unit_tests
)
where
import Test.QuickCheck ( Arbitrary(..), sized, resize )
import HyLo.Test ( UnitTest, runTest )
import Control.Monad ( replicateM )
import Data.Maybe ( fromMaybe )
import Data.List ( sort, nub, foldl' )
import Data.Map ( Map )
import qualified Data.Map as Map
import Text.Read ( Read(..) )
import Text.ParserCombinators.ReadP ( string, skipSpaces )
import Text.ParserCombinators.ReadPrec ( lift )
import HyLoRes.Formula ( NomSym, PropSym, RelSym,
At, Nom, Prop, Neg, Box, Diam, Opaque,
label, relSym, flatten, subf, specialize )
import HyLoRes.Formula.TypeLevel ( Spec(..) )
import HyLo.Signature ( hasInv )
import HyLoRes.Util ( filterFirst )
import HyLoRes.Util.Classify ( classifyBy )
import HyLoRes.Clause ( size )
import HyLoRes.Clause.FullClause ( FullClause, clauseId, distFormula,
opaqueClause,
specialize )
import qualified HyLoRes.Clause.FullClause as FC
data InUseClauseSet = IU {unitEqIdx :: InUseIdx (At Nom) NomSym,
nonUnitEqIdx :: InUseIdx (At Nom) NomSym,
-- reducing type safety. Need to comment, because of ghc >= 7.
-- The next line replaces the commented
-- atNegNomIdx :: InUseIdx (At (Neg Nom)) (Neg NomSym),
atNegNomIdx :: InUseIdx (At (Neg Nom)) NomSym,
atPropIdx :: InUseIdx (At Prop) PropSym,
atNegPropIdx :: InUseIdx (At (Neg Prop)) PropSym,
atBoxFIdx :: InUseIdx (At (Box Opaque)) RelSym,
atDiaNomIdx :: InUseIdx (At (Diam Nom)) RelSym,
atDiaNomRevIdx :: InUseIdx (At (Diam Nom)) RelSym}
data InUseIdx f k =
InUseIdx {asMap :: Map NomSym (Map k [FullClause f]),
nomPicker :: FullClause f -> NomSym,
keyPicker :: FullClause f -> k}
mkIdx :: (FullClause f -> NomSym) -> (FullClause f -> k) -> InUseIdx f k
mkIdx f g = InUseIdx {asMap = Map.empty, nomPicker = f, keyPicker = g}
newSet :: InUseClauseSet
newSet =
IU{unitEqIdx = mkIdx (fst . flatten . distF) (snd . flatten . distF),
nonUnitEqIdx = mkIdx (fst . flatten . distF) (snd . flatten . distF),
atNegNomIdx = mkIdx (fst . flatten . distF) (snd . flatten . distF),
atPropIdx = mkIdx (fst . flatten . distF) (snd . flatten . distF),
atNegPropIdx = mkIdx (fst . flatten . distF) (snd . flatten . distF),
atDiaNomIdx = mkIdx (fst3 . flatten . distF) (snd3 . flatten . distF),
atDiaNomRevIdx = mkIdx (thd3 . flatten . distF) (snd3 . flatten . distF),
atBoxFIdx = mkIdx (label . distF) (relSym . subf . distF)}
where fst3 (x,_,_) = x
snd3 (_,x,_) = x
thd3 (_,_,x) = x
distF = distFormula
addI :: Ord k => FullClause f -> InUseIdx f k -> InUseIdx f k
addI cl idx = idx{asMap = addToMap (nomPicker idx cl)
(keyPicker idx)
(asMap idx)}
where addToMap i f = Map.insertWith (\_ -> classifyBy f cl)
i
(Map.singleton (f cl) [cl])
add :: FullClause (At f) -> InUseClauseSet -> InUseClauseSet
add cl iu =
case specialize cl of
AtNom c -> if size c == 1
then iu{unitEqIdx = addI c (unitEqIdx iu)}
else iu{nonUnitEqIdx = addI c (nonUnitEqIdx iu)}
AtNegNom c -> iu{atNegNomIdx = addI c (atNegNomIdx iu)}
AtProp c -> iu{atPropIdx = addI c (atPropIdx iu)}
AtNegProp c -> iu{atNegPropIdx = addI c (atNegPropIdx iu)}
AtDiamNom c -> if hasInv (relSym . subf $ distFormula c)
then iu{atDiaNomIdx = addI c (atDiaNomIdx iu),
atDiaNomRevIdx = addI c (atDiaNomRevIdx iu)}
else iu{atDiaNomIdx = addI c (atDiaNomIdx iu)}
AtBoxF c -> iu{atBoxFIdx = addI c (atBoxFIdx iu)}
_ -> error $ "InUse.add: can't store " ++ show cl
removeI :: Ord k => FullClause f -> InUseIdx f k -> InUseIdx f k
removeI cl idx = idx{asMap = removeFromMap (nomPicker idx cl)
(keyPicker idx)
(asMap idx)}
where removeFromMap nom f = Map.adjust (removeClause (f cl)) nom
removeClause = Map.adjust (filterFirst ((clId ==) . clauseId))
clId = clauseId cl
remove :: FullClause (At f) -> InUseClauseSet -> InUseClauseSet
remove cl iu =
case specialize cl of
AtNom c -> if size c == 1
then iu{unitEqIdx = removeI c (unitEqIdx iu)}
else iu{nonUnitEqIdx = removeI c (nonUnitEqIdx iu)}
AtNegNom c -> iu{atNegNomIdx = removeI c (atNegNomIdx iu)}
AtProp c -> iu{atPropIdx = removeI c (atPropIdx iu)}
AtNegProp c -> iu{atNegPropIdx = removeI c (atNegPropIdx iu)}
AtDiamNom c -> if hasInv (relSym . subf $ distFormula c)
then iu{atDiaNomIdx = removeI c (atDiaNomIdx iu),
atDiaNomRevIdx = removeI c (atDiaNomRevIdx iu)}
else iu{atDiaNomIdx = removeI c (atDiaNomIdx iu)}
AtBoxF c -> iu{atBoxFIdx = removeI c (atBoxFIdx iu)}
_ -> iu
allClauses :: InUseClauseSet -> [FullClause (At Opaque)]
allClauses iu = concat [allClausesOpaquedIdx . atPropIdx $ iu,
allClausesOpaquedIdx . atNegPropIdx $ iu,
allClausesOpaquedIdx . unitEqIdx $ iu,
allClausesOpaquedIdx . nonUnitEqIdx $ iu,
allClausesOpaquedIdx . atNegNomIdx $ iu,
allClausesOpaquedIdx . atBoxFIdx $ iu,
allClausesOpaquedIdx . atDiaNomIdx $ iu]
allClausesOpaquedIdx :: InUseIdx (At f) k -> [FullClause (At Opaque)]
allClausesOpaquedIdx = map opaqueClause . allClausesIdx
allClausesIdx :: InUseIdx (At f) k -> [FullClause (At f)]
allClausesIdx = concat . concatMap Map.elems . Map.elems . asMap
clausesByNom :: NomSym -> InUseIdx f k -> [FullClause f]
clausesByNom nom = maybe [] (concat . Map.elems) . Map.lookup nom . asMap
clauses :: Ord k => NomSym -> k -> InUseIdx f k -> [FullClause f]
clauses nom k index = fromMaybe [] $ do m <- Map.lookup nom (asMap index)
Map.lookup k m
allClausesByFamily :: InUseClauseSet -> [(String, [FullClause (At Opaque)])]
allClausesByFamily iu = [("AtN" , concat[
allClausesOpaquedIdx . unitEqIdx $ iu,
allClausesOpaquedIdx . nonUnitEqIdx $ iu
]),
("At!N" , allClausesOpaquedIdx . atNegNomIdx $ iu),
("AtP" , allClausesOpaquedIdx . atPropIdx $ iu),
("At!P" , allClausesOpaquedIdx . atNegPropIdx $ iu),
("AtDiaN", allClausesOpaquedIdx . atDiaNomIdx $ iu),
("AtBoxF", allClausesOpaquedIdx . atBoxFIdx $ iu)]
keys :: InUseIdx f k -> [(NomSym, k)]
keys idx = [(i,k) | (i,m) <- Map.toList (asMap idx), k <- Map.keys m]
showPretty :: InUseClauseSet -> String
showPretty iu = unlines . concat $ [["InUse: {"],
map (uncurry showF) (allClausesByFamily iu),
["}"]
]
where showF _ [] = ""
showF f [c] = concat [" ", f, ": { ", show c, "}\n"]
showF f (c:cs) = unlines . concat $ [
[concat [" ", f, ": { ", show c]],
map ((indent ++) . show) cs,
[" }"]
]
indent = " "
-- ----------------------
-- QuickCheck stuff
-- ----------------------
instance Arbitrary InUseClauseSet where
arbitrary = sized $ \n ->
do cs <- replicateM n (simpleClause (n `div` 2))
return $ fromClauseList (filter isSimple cs)
where simpleClause n = do c <- resize n arbitrary
if isSimple c
then (return c)
else simpleClause n
isSimple c = case specialize c of
AtNom{} -> True
AtNegNom{} -> True
AtProp{} -> True
AtNegProp{} -> True
AtDiamNom{} -> True
AtBoxF{} -> True
AtDownF{} -> False
_ -> False
coarbitrary = coarbitrary . allClauses
fromClauseList :: [FullClause (At Opaque)] -> InUseClauseSet
fromClauseList = foldl' (flip add) newSet
instance Eq InUseClauseSet where
iu == iu' = map sort (toList iu) == map sort (toList iu')
where toList x = [allClausesOpaquedIdx $ unitEqIdx x,
allClausesOpaquedIdx $ nonUnitEqIdx x,
allClausesOpaquedIdx $ atNegNomIdx x,
allClausesOpaquedIdx $ atPropIdx x,
allClausesOpaquedIdx $ atNegPropIdx x,
allClausesOpaquedIdx $ atBoxFIdx x,
allClausesOpaquedIdx $ atDiaNomIdx x,
allClausesOpaquedIdx $ atDiaNomRevIdx x]
instance Show InUseClauseSet where
show iu = unlines [
"IU {",
" unitEqIdx = " ++ showIdx unitEqIdx iu,
" nonUnitEqIdx = " ++ showIdx nonUnitEqIdx iu,
" atNegNomIdx = " ++ showIdx atNegNomIdx iu,
" atPropIdx = " ++ showIdx atPropIdx iu,
" atNegPropIdx = " ++ showIdx atNegPropIdx iu,
" atBoxFIdx = " ++ showIdx atBoxFIdx iu,
" atDiaNomIdx = " ++ showIdx atDiaNomIdx iu,
" atDiaNomRevIdx = " ++ showIdx atDiaNomRevIdx iu,
"}"
]
where showIdx idx = show . allClausesIdx . idx
instance Read InUseClauseSet where
readPrec =
do str "IU {"
unit_eqs <- readIdx "unitEqIdx"
non_unit_eqs <- readIdx "nonUnitEqIdx"
neqs <- readIdx "atNegNomIdx"
props <- readIdx "atPropIdx"
negProps <- readIdx "atNegPropIdx"
boxes <- readIdx "atBoxFIdx"
rels <- readIdx "atDiaNomIdx"
relsInv <- readIdx "atDiaNomRevIdx"
str "}"
return . fromClauseList $ concat [unit_eqs,
non_unit_eqs,
neqs,
props,
negProps,
boxes,
nub (rels ++ relsInv)]
where readIdx s = do str s; str "="; r <- readPrec; return r
str s = lift (skipSpaces >> string s >> skipSpaces)
prop_readShow :: InUseClauseSet -> Bool
prop_readShow iu = iu == (read . show $ iu)
prop_unitEqs :: InUseClauseSet -> Bool
prop_unitEqs iu = all (\c -> size c == 1) (allClausesIdx $ unitEqIdx iu) &&
all (\c -> size c > 1) (allClausesIdx $ nonUnitEqIdx iu)
prop_relsAndInvs :: InUseClauseSet -> Bool
prop_relsAndInvs iu = all (match atDiaNomRevIdx) (cls atDiaNomIdx) &&
all (match atDiaNomIdx) (cls atDiaNomRevIdx)
where match idx c
| hasInv (relSym . subf $ distFormula c) = c `elem` (cls idx)
| otherwise = not $ c `elem` (cls idx)
cls idx = allClausesIdx (idx iu)
unit_tests :: UnitTest
unit_tests = [
("read/show - InUseClauseSet", runTest prop_readShow),
("unit eqs where they belong", runTest prop_unitEqs),
("atDiaNomRevIdx has inverses", runTest prop_relsAndInvs)
]
|
nevrenato/HyLoRes_Source
|
src/HyLoRes/ClauseSet/InUse.hs
|
gpl-2.0
| 14,234 | 4 | 15 | 5,544 | 3,718 | 1,970 | 1,748 | 227 | 9 |
module Util.ParallelTest where
import Control.Concurrent.MVar
import Control.Concurrent(forkIO, threadDelay)
import Control.Monad
import Util.TestWrapper
import Test.HUnit
delayTest millis = wrapTest $ \test -> threadDelay (millis * 1000) >> test
parallelTests label tests = TestLabel label $ TestCase $ do
forks <- mapM forkTest tests
countsAndLogs <- mapM join forks
forM_ (map snd countsAndLogs) (putStrLn . concat . reverse) -- putStrLn was the best I could come up with..
let counts = map fst countsAndLogs
let total = foldr plus (Counts 0 0 0 0) counts
assertEqual "Errors in parallel tests" 0 (errors total)
assertEqual "Failures in parallel tests" 0 (failures total)
where plus (Counts a1 b1 c1 d1) (Counts a2 b2 c2 d2) = Counts (a1+a2) (b1+b2) (c1+c2) (d1+d2)
forkTest t = forkAction $ runTestText (PutText logToBuffer []) t
forkAction a = do var <- newEmptyMVar
forkIO $ a >>= putMVar var
return var
join = takeMVar
logToBuffer line important lines | important = return (line : lines)
| otherwise = return lines
|
raimohanska/rump
|
src/Util/ParallelTest.hs
|
gpl-3.0
| 1,185 | 0 | 13 | 322 | 398 | 195 | 203 | 23 | 1 |
ka >>= kab = Cont (\hb -> ...)
|
hmemcpy/milewski-ctfp-pdf
|
src/content/3.5/code/haskell/snippet26.hs
|
gpl-3.0
| 30 | 1 | 7 | 7 | 22 | 11 | 11 | -1 | -1 |
module AST where
-- The syntax is taken from C99 (ISO/IEC 9899:1999), section 6
-- 6.4.2
data Identifier = Identifier String
deriving (Eq,Show)
-- 6.4.4
data Constant = Integer Integer
| Float Double
| Char Char
| Enumeration Identifier
deriving (Eq,Show)
--6.4.5
data StringLiteral = StringLiteral String
deriving (Eq,Show)
-- Expressions: 6.5
class ToExpr a where
toExpr :: a -> Expr
data Expression = AssignmentExpression AssignmentExpression
| Comma Expression AssignmentExpression
deriving (Eq,Show)
data AssignmentExpression = ConditionalExpression ConditionalExpression
| NestedAssignmentExpression UnaryExpression AssignmentOperator AssignmentExpression
deriving (Eq,Show)
data AssignmentOperator = SimpleAssign
| MultiplyAssign
| DivideAssign
| ModulusAssign
| PlusAssign
| MinusAssign
| LShiftAssign
| RShiftAssign
| AndAssign
| XorAssign
| OrAssign
deriving (Eq,Show)
data ConditionalExpression = LogicalOrExpression LogicalOrExpression
| Ternary LogicalOrExpression Expression ConditionalExpression
deriving (Eq,Show)
data LogicalOrExpression = LogicalAndExpression LogicalAndExpression
| LogicalOr LogicalOrExpression LogicalAndExpression
deriving (Eq,Show)
data LogicalAndExpression = BitwiseOrExpression BitwiseOrExpression
| LogicalAnd LogicalAndExpression BitwiseOrExpression
deriving (Eq,Show)
data BitwiseOrExpression = BitwiseXorExpression BitwiseXorExpression
| BitwiseOr BitwiseOrExpression BitwiseXorExpression
deriving (Eq,Show)
data BitwiseXorExpression = BitwiseAndExpression BitwiseAndExpression
| BitwiseXor BitwiseXorExpression BitwiseAndExpression
deriving (Eq,Show)
data BitwiseAndExpression = EqualityExpression EqualityExpression
| BitwiseAnd BitwiseAndExpression EqualityExpression
deriving (Eq,Show)
data EqualityExpression = RelationalExpression RelationalExpression
| Equal EqualityExpression RelationalExpression
| UnEqual EqualityExpression RelationalExpression
deriving (Eq,Show)
data RelationalExpression = ShiftExpression ShiftExpression
| Comparison RelationalExpression CompareOperator ShiftExpression
deriving (Eq,Show)
data CompareOperator = LT | GT | LTE | GTE
deriving (Eq,Show)
data ShiftExpression = AdditiveExpression AdditiveExpression
| LShift ShiftExpression AdditiveExpression
| RShift ShiftExpression AdditiveExpression
deriving (Eq,Show)
data AdditiveExpression = MultiplicativeExpression MultiplicativeExpression
| Add AdditiveExpression MultiplicativeExpression
| Subtract AdditiveExpression MultiplicativeExpression
deriving (Eq,Show)
data MultiplicativeExpression = CastExpression CastExpression
| Multiply MultiplicativeExpression CastExpression
| Divide MultiplicativeExpression CastExpression
| Modulus MultiplicativeExpression CastExpression
deriving (Eq,Show)
data CastExpression = UnaryExpression UnaryExpression
| NestedCastExpression Identifier CastExpression
deriving (Eq,Show)
data UnaryExpression = PostfixExpression PostfixExpression
| PreIncrement UnaryExpression
| PreDecrement UnaryExpression
| UnaryOp UnaryOp CastExpression
| ExpressionSize UnaryExpression
| TypeSize Identifier
deriving (Eq,Show)
data UnaryOp = AddressOf | Dereference | UnaryPlus | UnaryMinus | BitwiseNot | LogicalNot
deriving (Eq,Show)
-- Every postfix expression starts with either a '(' (Compound Literals) or a
-- primary expression
data PostfixExpression = PrimaryExpression PrimaryExpression
| ArraySubscript PostfixExpression Expression
| FunctionCall PostfixExpression ArgumentExpressionList
| Member PostfixExpression Identifier
| PointerToMember PostfixExpression Identifier
| PostIncrement PostfixExpression
| PostDecrement PostfixExpression
| CompoundLiteral Identifier InitializerList
deriving (Eq,Show)
data ArgumentExpressionList = ArgListHead AssignmentExpression
| ArgList ArgumentExpressionList AssignmentExpression
deriving (Eq,Show)
data PrimaryExpression = IdentifierExpression Identifier
| Constant Constant
| StringLiteralExpression StringLiteral
| Expr Expression
deriving (Eq,Show)
-- This is a collapsed type representing any expression
data Expr = CommaExpr Expr Expr
| AssignExpr Expr AssignmentOperator Expr
| TernaryExpr Expr Expr Expr
| LogOrExpr Expr Expr
| LogAndExpr Expr Expr
| BitOrExpr Expr Expr
| BitXorExpr Expr Expr
| BitAndExpr Expr Expr
| EqExpr Expr Expr
| DisEqExpr Expr Expr
| RelExpr Expr CompareOperator Expr
| LShiftExpr Expr Expr
| RShiftExpr Expr Expr
| AddExpr Expr Expr
| SubExpr Expr Expr
| MultExpr Expr Expr
| DivExpr Expr Expr
| ModExpr Expr Expr
| CastExpr Identifier Expr
| PreIncExpre Expr
| PostIncExpr Expr
| PreDecExpr Expr
| PostDecExpr Expr
| UnaryOpExpr UnaryOp Expr
| SizeofExprExpr Expr
| SizeofTypeExpr Identifier
| SubscriptExpr Expr Expr
| FunctionCallExpr Expr [Expr]
| MemberExpr Expr Identifier
| PointerMemberExpr Expr Identifier
| PostIncExpr Expr
| PostDecExpr Expr
| CompoundLiteralExpr Identifier InitializerList
-- 6.6 Constant Expressions
-- A constant expression had different semantic constraints, so it should be
-- represented as a different type
data ConstantExpression = ConstantExpression ConditionalExpression
deriving (Eq,Show)
-- §6.7 Declarations
-- The spec calls for init-declarator-list_opt. It then gives a constraint
-- requiring each declaration to have at least one declarator. The
-- reasoning here is not clear, so for now I am just going to require
-- a declarator list
data Declaration = Declaration [DeclarationSpecifier] [InitDeclarator]
deriving (Eq,Show)
data DeclarationSpecifier = StorageClass StorageClass
| TypeSpecifier TypeSpecifier
| TypeQualifier TypeQualifier
| FunctionSpecifier FunctionSpecifier
deriving (Eq,Show)
data InitDeclarator = Declarator Declarator
| InitDeclarator Declarator Initializer
deriving (Eq,Show)
-- §6.7.1 Storage-Class Specifiers
data StorageClass = Typedef | Extern | Static | Auto | Register
deriving (Eq,Show)
-- §6.7.2 Type Specifiers
-- Only a subset of possible combinations are allowed. The data type should
-- reflect this
data TypeSpecifier = VoidT | CharT | ShortT | IntT | LongT | FloatT | DoubleT
| Signed | Unsigned | Bool | Complex | Imaginary
| StructUnionSpecifier StructUnionSpecifier
| EnumSpecifier EnumSpecifier
-- | TypedefName TypedefName
deriving (Eq,Show)
data StructUnionSpecifier = StructLocalDefinition StructUnion (Maybe Identifier) [StructDeclaration]
| StructElseDefinition StructUnion Identifier
deriving (Eq,Show)
data StructUnion = Struct | Union
deriving (Eq,Show)
data StructDeclaration = StructDeclaration [TypeSpecifier] [StructDeclarator]
deriving (Eq,Show)
data StructDeclarator = RegularMember Declarator
| Bitfield (Maybe Declarator) ConstantExpression
deriving (Eq,Show)
data EnumSpecifier = Enum (Maybe Identifier) [Enumerator]
deriving (Eq,Show)
-- There is a constructor for enumeration-constants, but it isn't clear why
-- we don't just use an identifier here.
data Enumerator = Enumerator Identifier
| EnumeratorInit Identifier ConstantExpression
deriving (Eq,Show)
-- §6.7.3 Type Qualifiers
data TypeQualifier = Const | Restrict | Volatile
deriving (Eq,Show)
-- §6.7.4 Function Specifiers
data FunctionSpecifier = Inline
deriving (Eq,Show)
-- §6.7.5 Declarators
data Declarator = DirectDeclarator (Maybe PointerLevel) DirectDeclarator
deriving (Eq,Show)
data DirectDeclarator = IdentifierDDeclarator Identifier
| DeclaratorDDeclarator Declarator
-- The assignment expression is used in conjunction with
-- the static keyword to give a minimum size for an
-- array parameter
| ArrayDDeclarator DirectDeclarator [TypeQualifier] (Maybe AssignmentExpression)
| FunctionDDeclarator DirectDeclarator (Either [FParameter] [Identifier])
deriving (Eq,Show)
data FParameter = Varargs
| ParameterDeclaration ParameterDeclaration
deriving (Eq,Show)
data ParameterDeclaration = ConcreteDeclarationSpecifier [DeclarationSpecifier] Declarator
| AbstractDeclarationSpecifier [DeclarationSpecifier] (Maybe AbstractDeclarator)
deriving (Eq,Show)
-- §6.7.6 Type Names
data TypeName = TypeName [SpecifierQualifier] (Maybe AbstractDeclarator)
deriving (Eq,Show)
data AbstractDeclarator = Pointer PointerLevel
| DirectAbstractDeclarator DirectAbstractDeclarator
deriving (Eq,Show)
data DirectAbstractDeclarator = RecursiveDAD AbstractDeclarator
| FixedArrayDAD (Maybe DirectAbstractDeclarator) (Maybe AssignmentExpression)
| VariableArrayDAD (Maybe DirectAbstractDeclarator)
| FunctionDAD (Maybe DirectAbstractDeclarator) (Maybe [ParameterType])
deriving (Eq,Show)
data SpecifierQualifier = SpecifierQualifier
deriving (Eq,Show)
data ParameterType = ParameterType
deriving (Eq,Show)
-- A pointer is specified as type * {CRV} * {CRV} * {CRV} ... declarator
-- This can be represented as a list of lists of TypeQualifiers. Each list
-- corresponds to a level of pointer indirection, with the Qualifiers in the
-- list indicating what restrictions apply at that level
data PointerLevel = PointerLevel [[TypeQualifier]]
deriving (Eq,Show)
-- 6.7.8
data Initializer = Assignment AssignmentExpression
| IList InitializerList
deriving (Eq,Show)
-- The Initializer list specified in the C standard is left recursive.
newtype InitializerList = InitializerList [((Maybe Designation),Initializer)]
deriving (Eq,Show)
newtype Designation = DesignationList [Designator]
deriving (Eq,Show)
data Designator = SubscriptDesignator ConstantExpression
| MemberDesignator Identifier
deriving (Eq,Show)
|
joelwilliamson/jcc
|
AST.hs
|
gpl-3.0
| 12,482 | 0 | 9 | 4,242 | 1,937 | 1,114 | 823 | 209 | 0 |
module Lecture5NRC (solveAndShowNRC, genProblemNRC, blocksNRC) where
import Data.List
import System.Random
-- | Mainly a direct copy from Lecture 5, however renamed the functions with a NRC
-- suffix. Furthermore introduced a now set of blocks, the blocksNRC. The most
-- challenging bit was figuring out that I had to use sameblock and sameblockNRC
-- in the prune function. So we check for the sameblock on the normal Soduku,
-- and also on the NRC blocks.
-- | Refactored code
blocksNRC :: [[Int]]
blocksNRC = [[2..4],[6..8]]
blNRC :: Int -> [Int]
blNRC x = concat $ filter (elem x) blocksNRC
subGridNRC :: Sudoku -> (Row,Column) -> [Value]
subGridNRC s (r,c) =
[ s (r',c') | r' <- blNRC r, c' <- blNRC c ]
freeInSubgridNRC :: Sudoku -> (Row,Column) -> [Value]
freeInSubgridNRC s (r,c) = freeInSeq (subGridNRC s (r,c))
freeAtPos :: Sudoku -> (Row,Column) -> [Value]
freeAtPos s (r,c) =
(freeInRow s r)
`intersect` (freeInColumn s c)
`intersect` (freeInSubgrid s (r,c))
`intersect` (freeInSubgridNRC s (r,c))
consistent :: Sudoku -> Bool
consistent s = and $
[ rowInjective s r | r <- positions ]
++
[ colInjective s c | c <- positions ]
++
[ subgridInjective s (r,c) |
r <- [1,4,7], c <- [1,4,7]]
++
[ subgridInjectiveNRC s (r,c) |
r <- [2,6], c <- [2,6]]
subgridInjectiveNRC :: Sudoku -> (Row,Column) -> Bool
subgridInjectiveNRC s (r,c) = injective vs where
vs = filter (/= 0) (subGridNRC s (r,c))
-- | Check for same block with and without NRC
prune :: (Row,Column,Value) -> [Constraint] -> [Constraint]
prune _ [] = []
prune (r,c,v) ((x,y,zs):rest)
| r == x = (x,y,zs\\[v]) : prune (r,c,v) rest
| c == y = (x,y,zs\\[v]) : prune (r,c,v) rest
| sameblock (r,c) (x,y) = (x,y,zs\\[v]) : prune (r,c,v) rest
| sameblockNRC (r,c) (x,y) =
(x,y,zs\\[v]) : prune (r,c,v) rest
| otherwise = (x,y,zs) : prune (r,c,v) rest
sameblockNRC :: (Row,Column) -> (Row,Column) -> Bool
sameblockNRC (r,c) (x,y) = blNRC r == blNRC x && blNRC c == blNRC y
solveAndShowNRC :: Grid -> IO[()]
solveAndShowNRC gr = solveShowNs (initNode gr)
genProblemNRC :: IO ()
genProblemNRC = do [r] <- rsolveNs [emptyN]
showNode r
s <- genProblem r
showNode s
-- | Existing code from Lecture5.hs
type Row = Int
type Column = Int
type Value = Int
type Grid = [[Value]]
positions, values :: [Int]
positions = [1..9]
values = [1..9]
blocks :: [[Int]]
blocks = [[1..3],[4..6],[7..9]]
showVal :: Value -> String
showVal 0 = " "
showVal d = show d
showRow :: [Value] -> IO()
showRow [a1,a2,a3,a4,a5,a6,a7,a8,a9] =
do putChar '|' ; putChar ' '
putStr (showVal a1) ; putChar ' '
putStr (showVal a2) ; putChar ' '
putStr (showVal a3) ; putChar ' '
putChar '|' ; putChar ' '
putStr (showVal a4) ; putChar ' '
putStr (showVal a5) ; putChar ' '
putStr (showVal a6) ; putChar ' '
putChar '|' ; putChar ' '
putStr (showVal a7) ; putChar ' '
putStr (showVal a8) ; putChar ' '
putStr (showVal a9) ; putChar ' '
putChar '|' ; putChar '\n'
showGrid :: Grid -> IO()
showGrid [as,bs,cs,ds,es,fs,gs,hs,is] =
do putStrLn ("+-------+-------+-------+")
showRow as; showRow bs; showRow cs
putStrLn ("+-------+-------+-------+")
showRow ds; showRow es; showRow fs
putStrLn ("+-------+-------+-------+")
showRow gs; showRow hs; showRow is
putStrLn ("+-------+-------+-------+")
type Sudoku = (Row,Column) -> Value
sud2grid :: Sudoku -> Grid
sud2grid s =
[ [ s (r,c) | c <- [1..9] ] | r <- [1..9] ]
grid2sud :: Grid -> Sudoku
grid2sud gr = \ (r,c) -> pos gr (r,c)
where
pos :: [[a]] -> (Row,Column) -> a
pos gr (r,c) = (gr !! (r-1)) !! (c-1)
showSudoku :: Sudoku -> IO()
showSudoku = showGrid . sud2grid
bl :: Int -> [Int]
bl x = concat $ filter (elem x) blocks
subGrid :: Sudoku -> (Row,Column) -> [Value]
subGrid s (r,c) =
[ s (r',c') | r' <- bl r, c' <- bl c ]
freeInSeq :: [Value] -> [Value]
freeInSeq seq = values \\ seq
freeInRow :: Sudoku -> Row -> [Value]
freeInRow s r =
freeInSeq [ s (r,i) | i <- positions ]
freeInColumn :: Sudoku -> Column -> [Value]
freeInColumn s c =
freeInSeq [ s (i,c) | i <- positions ]
freeInSubgrid :: Sudoku -> (Row,Column) -> [Value]
freeInSubgrid s (r,c) = freeInSeq (subGrid s (r,c))
injective :: Eq a => [a] -> Bool
injective xs = nub xs == xs
rowInjective :: Sudoku -> Row -> Bool
rowInjective s r = injective vs where
vs = filter (/= 0) [ s (r,i) | i <- positions ]
colInjective :: Sudoku -> Column -> Bool
colInjective s c = injective vs where
vs = filter (/= 0) [ s (i,c) | i <- positions ]
subgridInjective :: Sudoku -> (Row,Column) -> Bool
subgridInjective s (r,c) = injective vs where
vs = filter (/= 0) (subGrid s (r,c))
extend :: Sudoku -> ((Row,Column),Value) -> Sudoku
extend = update
update :: Eq a => (a -> b) -> (a,b) -> a -> b
update f (y,z) x = if x == y then z else f x
type Constraint = (Row,Column,[Value])
type Node = (Sudoku,[Constraint])
showNode :: Node -> IO()
showNode = showSudoku . fst
solved :: Node -> Bool
solved = null . snd
extendNode :: Node -> Constraint -> [Node]
extendNode (s,constraints) (r,c,vs) =
[(extend s ((r,c),v),
sortBy length3rd $
prune (r,c,v) constraints) | v <- vs ]
sameblock :: (Row,Column) -> (Row,Column) -> Bool
sameblock (r,c) (x,y) = bl r == bl x && bl c == bl y
initNode :: Grid -> [Node]
initNode gr = let s = grid2sud gr in
if (not . consistent) s then []
else [(s, constraints s)]
openPositions :: Sudoku -> [(Row,Column)]
openPositions s = [ (r,c) | r <- positions,
c <- positions,
s (r,c) == 0 ]
length3rd :: (a,b,[c]) -> (a,b,[c]) -> Ordering
length3rd (_,_,zs) (_,_,zs') = compare (length zs) (length zs')
constraints :: Sudoku -> [Constraint]
constraints s = sortBy length3rd
[(r,c, freeAtPos s (r,c)) |
(r,c) <- openPositions s ]
data Tree a = T a [Tree a] deriving (Eq,Ord,Show)
exmple1 = T 1 [T 2 [], T 3 []]
exmple2 = T 0 [exmple1,exmple1,exmple1]
grow :: (node -> [node]) -> node -> Tree node
grow step seed = T seed (map (grow step) (step seed))
count :: Tree a -> Int
count (T _ ts) = 1 + sum (map count ts)
takeT :: Int -> Tree a -> Tree a
takeT 0 (T x _) = T x []
takeT n (T x ts) = T x $ map (takeT (n-1)) ts
search :: (node -> [node])
-> (node -> Bool) -> [node] -> [node]
search children goal [] = []
search children goal (x:xs)
| goal x = x : search children goal xs
| otherwise = search children goal ((children x) ++ xs)
solveNs :: [Node] -> [Node]
solveNs = search succNode solved
succNode :: Node -> [Node]
succNode (s,[]) = []
succNode (s,p:ps) = extendNode (s,ps) p
solveShowNs :: [Node] -> IO[()]
solveShowNs = sequence . fmap showNode . solveNs
example1 :: Grid
example1 = [[5,3,0,0,7,0,0,0,0],
[6,0,0,1,9,5,0,0,0],
[0,9,8,0,0,0,0,6,0],
[8,0,0,0,6,0,0,0,3],
[4,0,0,8,0,3,0,0,1],
[7,0,0,0,2,0,0,0,6],
[0,6,0,0,0,0,2,8,0],
[0,0,0,4,1,9,0,0,5],
[0,0,0,0,8,0,0,7,9]]
example2 :: Grid
example2 = [[0,3,0,0,7,0,0,0,0],
[6,0,0,1,9,5,0,0,0],
[0,9,8,0,0,0,0,6,0],
[8,0,0,0,6,0,0,0,3],
[4,0,0,8,0,3,0,0,1],
[7,0,0,0,2,0,0,0,6],
[0,6,0,0,0,0,2,8,0],
[0,0,0,4,1,9,0,0,5],
[0,0,0,0,8,0,0,7,9]]
example3 :: Grid
example3 = [[1,0,0,0,3,0,5,0,4],
[0,0,0,0,0,0,0,0,3],
[0,0,2,0,0,5,0,9,8],
[0,0,9,0,0,0,0,3,0],
[2,0,0,0,0,0,0,0,7],
[8,0,3,0,9,1,0,6,0],
[0,5,1,4,7,0,0,0,0],
[0,0,0,3,0,0,0,0,0],
[0,4,0,0,0,9,7,0,0]]
example4 :: Grid
example4 = [[1,2,3,4,5,6,7,8,9],
[2,0,0,0,0,0,0,0,0],
[3,0,0,0,0,0,0,0,0],
[4,0,0,0,0,0,0,0,0],
[5,0,0,0,0,0,0,0,0],
[6,0,0,0,0,0,0,0,0],
[7,0,0,0,0,0,0,0,0],
[8,0,0,0,0,0,0,0,0],
[9,0,0,0,0,0,0,0,0]]
example5 :: Grid
example5 = [[1,0,0,0,0,0,0,0,0],
[0,2,0,0,0,0,0,0,0],
[0,0,3,0,0,0,0,0,0],
[0,0,0,4,0,0,0,0,0],
[0,0,0,0,5,0,0,0,0],
[0,0,0,0,0,6,0,0,0],
[0,0,0,0,0,0,7,0,0],
[0,0,0,0,0,0,0,8,0],
[0,0,0,0,0,0,0,0,9]]
emptyN :: Node
emptyN = (\ _ -> 0,constraints (\ _ -> 0))
getRandomInt :: Int -> IO Int
getRandomInt n = getStdRandom (randomR (0,n))
getRandomItem :: [a] -> IO [a]
getRandomItem [] = return []
getRandomItem xs = do n <- getRandomInt maxi
return [xs !! n]
where maxi = length xs - 1
randomize :: Eq a => [a] -> IO [a]
randomize xs = do y <- getRandomItem xs
if null y
then return []
else do ys <- randomize (xs\\y)
return (head y:ys)
sameLen :: Constraint -> Constraint -> Bool
sameLen (_,_,xs) (_,_,ys) = length xs == length ys
getRandomCnstr :: [Constraint] -> IO [Constraint]
getRandomCnstr cs = getRandomItem (f cs)
where f [] = []
f (x:xs) = takeWhile (sameLen x) (x:xs)
rsuccNode :: Node -> IO [Node]
rsuccNode (s,cs) = do xs <- getRandomCnstr cs
if null xs
then return []
else return
(extendNode (s,cs\\xs) (head xs))
rsolveNs :: [Node] -> IO [Node]
rsolveNs ns = rsearch rsuccNode solved (return ns)
rsearch :: (node -> IO [node])
-> (node -> Bool) -> IO [node] -> IO [node]
rsearch succ goal ionodes =
do xs <- ionodes
if null xs
then return []
else
if goal (head xs)
then return [head xs]
else do ys <- rsearch succ goal (succ (head xs))
if (not . null) ys
then return [head ys]
else if null (tail xs) then return []
else
rsearch
succ goal (return $ tail xs)
genRandomSudoku :: IO Node
genRandomSudoku = do [r] <- rsolveNs [emptyN]
return r
randomS = genRandomSudoku >>= showNode
uniqueSol :: Node -> Bool
uniqueSol node = singleton (solveNs [node]) where
singleton [] = False
singleton [x] = True
singleton (x:y:zs) = False
eraseS :: Sudoku -> (Row,Column) -> Sudoku
eraseS s (r,c) (x,y) | (r,c) == (x,y) = 0
| otherwise = s (x,y)
eraseN :: Node -> (Row,Column) -> Node
eraseN n (r,c) = (s, constraints s)
where s = eraseS (fst n) (r,c)
minimalize :: Node -> [(Row,Column)] -> Node
minimalize n [] = n
minimalize n ((r,c):rcs) | uniqueSol n' = minimalize n' rcs
| otherwise = minimalize n rcs
where n' = eraseN n (r,c)
filledPositions :: Sudoku -> [(Row,Column)]
filledPositions s = [ (r,c) | r <- positions,
c <- positions, s (r,c) /= 0 ]
genProblem :: Node -> IO Node
genProblem n = do ys <- randomize xs
return (minimalize n ys)
where xs = filledPositions (fst n)
-- main :: IO ()
-- main = do [r] <- rsolveNs [emptyN]
-- showNode r
-- s <- genProblem r
-- showNode s
|
vdweegen/UvA-Software_Testing
|
Lab5/Willem/Lecture5NRC.hs
|
gpl-3.0
| 11,492 | 0 | 16 | 3,393 | 6,043 | 3,406 | 2,637 | 296 | 5 |
{-# Language OverloadedStrings #-}
module Equ.PreExpr.Symbols where
import Equ.Syntax
import Equ.Types
import Equ.Theories.AbsName
-- | Constante cero.
natZero :: Constant
natZero = Constant { conRepr = "0"
, conName = Zero
, conTy = TyAtom ATyNat
}
-- | Operador sucesor.
natSucc :: Operator
natSucc = Operator { opRepr = "succ"
, opName = Succ
, opTy = TyAtom ATyNat :-> TyAtom ATyNat
, opAssoc = None
, opNotationTy = NPrefix
, opPrec = 23 -- Analizar.
, opGlyphs = []
}
-- | Operador suma.
natSum :: Operator
natSum = Operator { opRepr = "+"
, opName = Sum
, opTy = TyAtom ATyNat :-> TyAtom ATyNat :-> TyAtom ATyNat
, opAssoc = ALeft
, opNotationTy = NInfix
, opPrec = 21
, opGlyphs = []
}
-- | Operador producto.
natProd :: Operator
natProd = Operator { opRepr = "*"
, opName = Prod
, opTy = TyAtom ATyNat :-> TyAtom ATyNat :-> TyAtom ATyNat
, opAssoc = ALeft
, opNotationTy = NInfix
, opPrec = 22
, opGlyphs = []
}
-- | Operador división.
natDiv :: Operator
natDiv = Operator { opRepr = "//"
, opName = Div
, opTy = TyAtom ATyNat :-> TyAtom ATyNat :-> TyAtom ATyNat
, opAssoc = ALeft
, opNotationTy = NInfix
, opPrec = 22
, opGlyphs = []
}
-- | Operador resto.
natMod :: Operator
natMod = Operator { opRepr = "%"
, opName = Mod
, opTy = TyAtom ATyNat :-> TyAtom ATyNat :-> TyAtom ATyNat
, opAssoc = ALeft
, opNotationTy = NInfix
, opPrec = 22
, opGlyphs = []
}
-- | Operador predecesor
natPred :: Operator
natPred = Operator { opRepr = "pred"
, opName = Pred
, opTy = TyAtom ATyNat :-> TyAtom ATyNat
, opAssoc = None
, opNotationTy = NPrefix
, opPrec = 23 -- Analizar.
, opGlyphs = []
}
-- | Operador resta
natDif :: Operator
natDif = Operator { opRepr = "-"
, opName = Dif
, opTy = TyAtom ATyNat :-> TyAtom ATyNat :-> TyAtom ATyNat
, opAssoc = ALeft
, opNotationTy = NInfix
, opPrec = 21
, opGlyphs = []
}
-- | Operador negación numerica.
natNegNum :: Operator
natNegNum = Operator { opRepr = "-"
, opName = NegNum
, opTy = TyAtom ATyNat :-> TyAtom ATyNat
, opAssoc = ALeft
, opNotationTy = NPrefix
, opPrec = 21
, opGlyphs = []
}
-- | Constructor del tipo de listas polimorficas; el string indica el
-- nombre de la variable de tipo
tyListVar :: String -> Type
tyListVar = TyList . tyVar
-- | La lista vacia.
listEmpty :: Constant
listEmpty = Constant { conRepr = "[]"
, conName = Empty
, conTy = tyListVar "B"
}
-- | Extender la lista con un elemento por la izquierda.
listApp :: Operator
listApp = Operator { opRepr = "▹"
, opName = Append
, opTy = tyVar "A" :-> tyListVar "A" :-> tyListVar "A"
, opAssoc = ARight
, opNotationTy = NInfix
, opPrec = 12
, opGlyphs = ["|>"]
}
-- | Tomar el n-esimo elemento de la lista.
listIndex :: Operator
listIndex = Operator { opRepr = "!"
, opName = Index
, opTy = tyListVar "A" :-> TyAtom ATyNat :-> tyVar "A"
, opAssoc = ALeft
, opNotationTy = NInfix
, opPrec = 24
, opGlyphs = []
}
-- | Concatenacion de listas.
listConcat :: Operator
listConcat = Operator { opRepr = "++"
, opName = Concat
, opTy = tyListVar "A" :-> tyListVar "A" :-> tyListVar "A"
, opAssoc = ALeft
, opNotationTy = NInfix
, opPrec = 10
, opGlyphs = []
}
-- | Cardinal de la lista.
listLength :: Operator
listLength = Operator { opRepr = "#"
, opName = Length
, opTy = tyListVar "A" :-> TyAtom ATyNat
, opAssoc = None
, opNotationTy = NPrefix
, opPrec = 10
, opGlyphs = []
}
-- | Toma los primeros n elementos de una lista.
listTake :: Operator
listTake = Operator { opRepr = "↑"
, opName = Take
, opTy = tyListVar "A" :-> TyAtom ATyNat :-> tyListVar "A"
, opAssoc = ALeft
, opNotationTy = NInfix
, opPrec = 10
, opGlyphs = ["^"]
}
-- | Tira los primeros n elementos de una lista.
listDrop :: Operator
listDrop = Operator { opRepr = "↓"
, opName = Drop
, opTy = tyListVar "A" :-> TyAtom ATyNat :-> tyListVar "A"
, opAssoc = ALeft
, opNotationTy = NInfix
, opPrec = 10
, opGlyphs = ["!"]
}
|
miguelpagano/equ
|
Equ/PreExpr/Symbols.hs
|
gpl-3.0
| 5,931 | 0 | 9 | 2,870 | 1,125 | 679 | 446 | 127 | 1 |
{-# LANGUAGE GADTs #-}
module Parselang where
import Math.Qeatem
import Language
import Data.Char
import Data.Maybe
import Control.Monad
import Control.Applicative ((<$>))
import Text.Parsec
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as H
import qualified Data.Map as M
type MetaParser a = Parsec String GlyphParseState a
data GlyphParseState = GlyphParseState {
stateGlyphData :: GlyphData,
stateFunctions :: HM.HashMap String [TypedVal],
stateDeps :: Deps,
stateInfixFuns :: HM.HashMap String [TypedVal],
stateInfixOps :: HM.HashMap String (Int, [TypedVal])}
makeGlyphData :: [Name] -> GlyphData
makeGlyphData params vars = GlyphData {
glyphParams = H.fromList params,
glyphVars = vars,
glyphHighlight = M.empty,
glyphPathOperations = [],
glyphEmptyVars = []}
makeParseState :: HM.HashMap String [TypedVal]
-> HM.HashMap String [TypedVal]
-> HM.HashMap String (Int, [TypedVal])
-> [Name] -> GlyphParseState
makeParseState funs vars infixFunctions infixOps params =
GlyphParseState {
stateGlyphData = makeGlyphData params vars,
stateFunctions = funs,
stateInfixFuns = infixFunctions,
stateDeps = emptyDeps,
stateInfixOps = infixOps
}
modifyGlyphData :: (GlyphData -> GlyphData) -> GlyphParseState -> GlyphParseState
modifyGlyphData f state =
state {stateGlyphData = f (stateGlyphData state)}
prependHighlight :: SyntaxToken -> SourcePos -> SourcePos -> GlyphParseState -> GlyphParseState
prependHighlight syntax sp sp2 =
let line = sourceLine sp
col = sourceColumn sp
len = sourceColumn sp2 - col
in modifyGlyphData $
\g -> g {glyphHighlight = M.insert (line, col)
(SourceRange line col len, syntax)
(glyphHighlight g)}
setSyntax :: SourcePos -> SyntaxToken -> MetaParser ()
setSyntax pos syntax = do
sp <- getPosition
void $ modifyState $ prependHighlight syntax pos sp
someSpace :: MetaParser ()
someSpace = void $ many $ oneOf " \t"
spacedAfter :: MetaParser a -> MetaParser a
spacedAfter p = do
r <- p
someSpace
return r
pointDecimal :: MetaParser Double
pointDecimal = do
void $ char '.'
s <- many1 digit
return $ foldr (\d t -> t/10+fromIntegral (digitToInt d)) 0 s / 10
decimal :: MetaParser Int
decimal = do
s <- many1 digit
return $ foldl (\t d -> t*10+fromIntegral (digitToInt d)) 0 s
number :: MetaParser Double
number =
pointDecimal <|> do
a <- decimal
b <- option 0 pointDecimal
return (fromIntegral a + b)
numericLit :: MetaParser Double
numericLit = spacedAfter $ do
pos <- getPosition
minus <- option 1 $ char '-' >> someSpace >> return (-1)
a <- number
b <- option 1 $ try $ someSpace >> char '/' >> someSpace >> number
setSyntax pos (ConstantTok $ minus * a/b)
return (minus * a/b)
index :: MetaParser Int
index = between (char '[') (char ']') decimal
castType :: Type a -> MetaParser TypedVal -> MetaParser a
castType t p =
do e <- p
case fromType t e of
Nothing -> fail ""
Just v -> return v
<?> typeStr t
numericExpr :: MetaParser DepExp
numericExpr = castType NumType expression
interpolate :: MetaParser (DepExp -> TypedVal)
interpolate = spacedAfter $ do
char '[' >> someSpace
a <- expression
someSpace >> char ',' >> someSpace
b <- expression
void $ someSpace >> char ']'
case (a, b) of
(TypedVal NumType dlA, TypedVal NumType dlB) ->
return $ \x -> addType $ (1-x)*dlA + x*dlB
(TypedVal PairType (Pair dlAx dlAy), TypedVal PairType (Pair dlBx dlBy)) ->
return $ \x -> addType $ Pair ((1-x)*dlAx + x*dlBx) ((1-x)*dlAy + x*dlBy)
_ -> fail "Can only interpolate between numeric or pair"
suffix :: MetaParser Suffix
suffix = (Tag <$> many1 letter) <|>
(SuffixIndex <$> decimal)
varOrApp :: MetaParser TypedVal
varOrApp = spacedAfter $ do
state <- getState
pos <- getPosition
a <- try $ many1 letter
case HM.lookup a (stateFunctions state) of
Just [] -> fail "Unexpected: Empty function."
Just f -> do
setSyntax pos (FunctionTok a)
foldr1 (<|>) $ map (try . parseFun) f
Nothing
| isJust (HM.lookup a $ stateInfixFuns state) -> fail ""
| otherwise -> do
name <- Name a <$> sepBy suffix (optional $ char '.')
if H.member name (glyphParams $ stateGlyphData state)
-- parameter
then do setSyntax pos (ParameterTok name)
return $ TypedVal NumType $ makeConstant $
Parameter $ ParamName name
-- variable
else do setSyntax pos (VariableTok name)
return $ fromMaybe
(TypedVal NumType $ makeVariable $ Var name)
(HM.lookup name $ glyphVars $ stateGlyphData state)
parseFun :: TypedVal -> MetaParser TypedVal
-- a suffix argument can be used without parens, for example penpos1(0, 90)
parseFun fun@(TypedVal (FunType SuffixType a) f) =
do s <- optional (char '.') >>
sepBy1 suffix (optional $ char '.')
someSpace
case a of
FunType _ _ ->
char '(' >> someSpace >>
(parseArgList $ TypedVal a $ f s)
_ -> return $ TypedVal a $ f s
<|> case a of
FunType _ _ ->
someSpace >> char '(' >>
someSpace >> parseArgList fun
_ -> fail ""
<?> "Suffix argument."
parseFun fun@(TypedVal (FunType a b) f) = do
someSpace
case b of
FunType _ _ ->
char '(' >> someSpace >>
parseArgList fun
_ -> (TypedVal b . f) <$> castType a term
parseFun val = return val
parseArgList :: TypedVal -> MetaParser TypedVal
parseArgList (TypedVal (FunType a b) f) =
do e <- castType a term
someSpace
case b of
FunType _ _ -> char ',' >> someSpace
_ -> return ()
parseArgList $ TypedVal b $ f e
parseArgList v =
(char ')' >> someSpace >> return v) <|> fail "to many arguments."
term :: MetaParser TypedVal
term = spacedAfter $ do
t <- simpleTerm
case t of
TypedVal NumType dl ->
($dl) <$> option (TypedVal NumType) interpolate
_ -> return t
optNegate :: MetaParser TypedVal -> MetaParser TypedVal
optNegate p = try $ do
minus <- optionMaybe $ char '-' >> someSpace
case minus of
Nothing -> p
Just _ -> do
n <- p
(case n of
TypedVal NumType m ->
return $ TypedVal NumType (-m)
TypedVal PairType (Pair x y) ->
return $ TypedVal PairType (Pair (-x) (-y))
_ -> fail ""
) <?> "numeric of pair expression."
simpleTerm :: MetaParser TypedVal
simpleTerm = optNegate varOrApp <|> optNegate subExpr <|> do
a <- numericLit
b <- optionMaybe (subExpr <|> varOrApp)
case b of
Nothing ->
return $ TypedVal NumType $ makeConstant $ Number a
Just (TypedVal NumType e) ->
return $ TypedVal NumType $ (makeConstant $ Number a)*e
Just (TypedVal PairType (Pair x y)) ->
return $ TypedVal PairType $
Pair ((makeConstant $ Number a)*x) ((makeConstant $ Number a)*y)
_ ->
fail "Can only multiply numeric literal with numeric or pair expression."
subExpr :: MetaParser TypedVal
subExpr = spacedAfter $ do
char '(' >> someSpace
TypedVal t e <- expression
someSpace
case t of
NumType ->
(char ')' >> someSpace >> return (TypedVal t e))
<|> do char ',' >> someSpace
e2 <- numericExpr
void $ someSpace >> char ')'
return $ TypedVal PairType $ Pair e e2
_ -> someSpace >> char ')' >> return (TypedVal t e)
-- term with optional infix function application
infixTerm :: MetaParser TypedVal
infixTerm = term >>= infixApp
infixFuns :: MetaParser [TypedVal]
infixFuns = do
pos <- getPosition
state <- getState
a <- many1 letter
case HM.lookup a (stateInfixFuns state) of
Just l -> setSyntax pos (InfixTok a) >> return l
Nothing -> fail ""
-- optionally parse and apply an infix function
infixApp :: TypedVal -> MetaParser TypedVal
infixApp t = do
(do funs <- try infixFuns
when (null funs) (fail "Empty function definition.")
(foldr1 (<|>) $ map (try . appInfix t) funs)
>>= infixApp
) <|> return t
-- apply infix function
appInfix :: TypedVal -> TypedVal -> MetaParser TypedVal
appInfix t (TypedVal (FunType a b) f) = do
case fromType a t of
Just v -> parseFun $ TypedVal b $ f v
Nothing -> fail $ "Infix function expects " ++ typeStr a
appInfix _ _ = fail "Invalid function"
-- find the next operator and check fixity
operator :: Int -> MetaParser (Int, String, [TypedVal])
operator n = spacedAfter $ do
state <- getState
pos <- getPosition
s <- many1 $ oneOf "<=>:|+-/*\\!?#&@^~"
setSyntax pos (InfixTok s)
case HM.lookup s (stateInfixOps state) of
Nothing -> fail ""
Just (m, f)
| m < n -> fail ""
| otherwise -> return (m, s, f)
applyInfixOp :: TypedVal -> TypedVal -> TypedVal -> MetaParser TypedVal
applyInfixOp v1 v2 (TypedVal (FunType t1 (FunType t2 t3)) f) =
case (fromType t1 v1, fromType t2 v2) of
(Just r1, Just r2) -> return $ TypedVal t3 $ f r1 r2
_ -> fail ""
applyInfixOp _ _ _ = fail "Illegal function"
getInfixOp :: Int -> TypedVal -> MetaParser TypedVal
getInfixOp n v@(TypedVal t1 _) =
(do (m, s, funs) <- try $ operator n
next <- term
v2@(TypedVal t2 _) <- getInfixOp (m+1) next
res <- foldr (<|>)
(fail $ typeStr t1 ++ " " ++ s ++ " " ++
typeStr t2 ++ " not defined.") $
map (applyInfixOp v v2) funs
getInfixOp n res
) <|> return v
expression :: MetaParser TypedVal
expression = infixTerm >>= getInfixOp 0
pairEq :: Pair -> MetaParser ()
pairEq e@(Pair x y) = do
void $ char '='; someSpace
Pair x2 y2 <- castType PairType expression
saveDeps $ (x, y) =&= (x2, y2)
pairEq e <|> return ()
numericEq :: DepExp -> MetaParser ()
numericEq e = do
void $ char '='; someSpace
e2 <- castType NumType expression
saveDeps $ e === e2
numericEq e <|> return ()
saveDeps :: (Deps -> Either (DepError Expression) Deps) -> MetaParser ()
saveDeps dep = do
state <- getState
case dep (stateDeps state) of
Left (UnknownUnary s) ->
fail $ "argument of " ++ s ++ " must be a known variable."
Left (UnknownBinary s) ->
fail $ "argument of " ++ s ++ " must be a known variable."
Left (InconsistentEq a) ->
fail $ "Inconsistent Equation, off by " ++ show a
Left RedundantEq ->
fail "Redundant Equation."
Right deps -> setState $ state {stateDeps = deps}
equations :: MetaParser ()
equations = do
(TypedVal t v) <- expression
case t of
NumType -> numericEq v
PairType -> pairEq v
_ -> fail $ "expected <numeric expression> or <pair expression>."
-- statement = declaration <|> equations <|> pathOp
|
kuribas/metatyper
|
src/Parselang.hs
|
gpl-3.0
| 10,801 | 0 | 23 | 2,793 | 3,958 | 1,925 | 2,033 | 299 | 5 |
module Main where
-- |
-- Module : Acme.CuteGirl
-- Copyright : (c) Mateusz Kowalczyk 2014
-- License : GPL-3
--
-- Maintainer : [email protected]
-- Stability : stable
--
-- Executable module handling "Acme.CuteGirl"
import Acme.CuteGirl
import System.Environment (getArgs)
main :: IO ()
main = do
args <- getArgs
if "-h" `elem` args || "--help" `elem` args
then putStrLn help
else if "-j" `elem` args || "--japanese" `elem` args
then jpnPrintCuteGirl
else printCuteGirl
where
help = concat [ "CuteGirl -- attempts to get a cute girl.\n"
, "\n"
, "Flags:\n"
, "-h, --help Prints this help menu\n"
, "-j, --japanese Prints the possible cute girl natively."
]
|
Fuuzetsu/acme-cutegirl
|
src/Main.hs
|
gpl-3.0
| 825 | 0 | 11 | 268 | 129 | 79 | 50 | 16 | 3 |
module Database.Design.Ampersand.Graphic.ClassDiag2Dot (
classdiagram2dot
)
where
import Data.List
import Database.Design.Ampersand.Basics
import Database.Design.Ampersand.Classes
import Database.Design.Ampersand.ADL1 hiding (Association,Box)
import Database.Design.Ampersand.Misc
import Data.String
import Data.GraphViz.Types.Canonical hiding (attrs)
import Data.GraphViz.Attributes.Complete as GVcomp
import Data.GraphViz.Attributes as GVatt
import Data.GraphViz.Attributes.HTML as Html
import Database.Design.Ampersand.Graphic.ClassDiagram
-- | translate a ClassDiagram to a DotGraph, so it can be used to show it as a picture.
classdiagram2dot :: Options -> ClassDiag -> DotGraph String
classdiagram2dot opts cd
= DotGraph { strictGraph = False
, directedGraph = True
, graphID = Nothing
, graphStatements =
DotStmts
{ attrStmts = [ GraphAttrs [ RankDir FromLeft
, bgColor White]
]
-- ++ [NodeAttrs [ ]]
++ [EdgeAttrs [ FontSize 11
, MinLen 4
] ]
, subGraphs = []
, nodeStmts = allNodes (classes cd) (nodes cd >- nodes (classes cd))
, edgeStmts = (map association2edge (assocs cd)) ++
(map aggregation2edge (aggrs cd)) ++
(concatMap generalization2edges (geners cd))
}
}
where
allNodes :: [Class] -> [String] -> [DotNode String]
allNodes cs others =
map class2node cs ++
map nonClass2node others
class2node :: Class -> DotNode String
class2node cl = DotNode
{ nodeID = name cl
, nodeAttributes = [ Shape PlainText
, GVcomp.Color [WC (X11Color Purple) Nothing]
, Label (HtmlLabel (Table htmlTable))
]
} where
htmlTable = HTable { tableFontAttrs = Nothing
, tableAttrs = [ Html.BGColor (X11Color White)
, Html.Color (X11Color Black) -- the color used for all cellborders
, Html.Border 0 -- 0 = no border
, CellBorder 1
, CellSpacing 0
]
, tableRows = [ Cells -- Header row, containing the name of the class
[ LabelCell
[ Html.BGColor (X11Color Gray10)
, Html.Color (X11Color Black)
]
(Html.Text [ Html.Font [ Html.Color (X11Color White)
]
[Html.Str (fromString (name cl))]
]
)
]
]++
map attrib2row (clAtts cl) ++
map method2row (clMths cl)
}
where
attrib2row a = Cells
[ Html.LabelCell [ Html.Align HLeft
, (Port .PN .fromString) (attNm a)
]
( Html.Text [ Html.Str (fromString (if attOptional a then "o " else "+ "))
, Html.Str (fromString (name a))
, Html.Str (fromString " : ")
, Html.Str (fromString (attTyp a))
]
)
]
method2row m = Cells
[ Html.LabelCell [ Html.Align HLeft]
( Html.Text [ Html.Str (fromString "+ ")
, Html.Str (fromString (show m))
]
)
]
nonClass2node :: String -> DotNode String
nonClass2node str = DotNode { nodeID = str
, nodeAttributes = [ Shape Box3D
, Label (StrLabel (fromString str))
]
}
---- In order to make classes, all relations that are univalent and injective are flipped
---- attRels contains all relations that occur as attributes in classes.
-- attRels = [r |r<-rels, isUni r, not (isInj r)] ++[flp r |r<-rels, not (isUni r), isInj r] ++
-- [r |r<-rels, isUni r, isInj r, isSur r]++[flp r |r<-rels, isUni r , isInj r, not (isSur r)]
---- assRels contains all relations that do not occur as attributes in classes
-- assRels = [r |r<-relsLim, not (isUni r), not (isInj r)]
-- attrs rs = [ OOAttr ((name.head.relsMentionedIn) r) (name (target r)) (not(isTot r))
-- | r<-rs, not (isPropty r)]
-- isPropty r = null([Sym,Asy]>-multiplicities r)
-------------------------------
-- ASSOCIATIONS: --
-------------------------------
association2edge :: Association -> DotEdge String
association2edge ass =
DotEdge { fromNode = assSrc ass
, toNode = assTgt ass
, edgeAttributes = [ ArrowHead (AType [(ArrMod OpenArrow BothSides, NoArrow)]) -- No arrowHead
, HeadLabel (mult2Lable (assrhm ass))
, TailLabel (mult2Lable (asslhm ass))
, Label (StrLabel (fromString (assrhr ass)))
, LabelFloat True
]
++[(TailPort (LabelledPort (PN ((fromString.assSrcPort) ass)) Nothing))]
}
where
mult2Lable = StrLabel . fromString . mult2Str
mult2Str (Mult MinZero MaxOne) = "0-1"
mult2Str (Mult MinZero MaxMany) = "*"
mult2Str (Mult MinOne MaxOne) = "1"
mult2Str (Mult MinOne MaxMany) = "1-*"
-------------------------------
-- AGGREGATIONS: --
-------------------------------
aggregation2edge :: Aggregation -> DotEdge String
aggregation2edge agg =
DotEdge { fromNode = name . aggChild $ agg
, toNode = name . aggParent $ agg
, edgeAttributes = [ ArrowHead (AType [(ArrMod (case aggDel agg of
Open -> OpenArrow
Close -> FilledArrow
) BothSides , Diamond)
])
]
}
-------------------------------
-- GENERALIZATIONS: -- -- Ampersand statements such as "SPEC Dolphin ISA Animal" are called generalization.
-- -- -- Generalizations are represented by a red arrow with a (larger) open triangle as arrowhead
-------------------------------
generalization2edges :: Generalization -> [DotEdge String]
generalization2edges ooGen = sub2edges (genAgen ooGen)
where
sub2edges gen
= [DotEdge { fromNode = name spec
, toNode = name gener
, edgeAttributes
= [ ArrowHead (AType [(ArrMod OpenArrow BothSides, Normal)]) -- Open normal arrowHead
, ArrowSize 2.0
] ++
( if blackWhite opts
then [Style [SItem Dashed []]]
else [GVcomp.Color [WC (X11Color Red) Nothing]]
)
}
| (spec,gener)<-splits gen]
splits gen = case gen of
Isa{} -> [(genspc gen, gengen gen)]
IsE{} -> [(genspc gen, x ) | x<-(genrhs gen)]
class CdNode a where
nodes :: a->[String]
instance CdNode ClassDiag where
nodes cd = nub (concat ( map nodes (classes cd)
++map nodes (assocs cd)
++map nodes (aggrs cd)
++map nodes (geners cd)
) )
instance CdNode Class where
nodes cl = [clName cl]
instance CdNode a => CdNode [a] where
nodes = concatMap nodes
instance CdNode Association where
nodes a = [assSrc a,assTgt a]
instance CdNode Aggregation where
nodes (OOAggr _ s t) = map name [s,t]
instance CdNode Generalization where
nodes g = map name ((concs.genAgen) g)
|
guoy34/ampersand
|
src/Database/Design/Ampersand/Graphic/ClassDiag2Dot.hs
|
gpl-3.0
| 9,989 | 2 | 26 | 5,138 | 1,799 | 963 | 836 | 125 | 8 |
module Language.Intentionally.Types where
import Data.Set (Set(..))
import qualified Data.Set as Set
-- User or device
newtype Agent = Agent (Set AState)
deriving (Show, Eq, Ord)
data AState = AP Physics
| AB Body
| AI Instinct
| AS Social
deriving (Show, Eq, Ord)
-- abstraction of Reality based interface categories
data Physics
deriving (Show, Eq, Ord)
data Body = Human | Device
deriving (Show, Eq, Ord)
data Instinct
deriving (Show, Eq, Ord)
data Social
deriving (Show, Eq, Ord)
data Action
deriving (Show, Eq, Ord)
data Intent a where
-- informational
Notify :: String -> Intent String
Inform :: String -> Intent String
-- actible
Suggest :: Action -> Intent Action
Command :: Action -> Intent Action
-- interactable
Query :: Context -> Intext Context
-- agent oriented
Focus :: Body -> Intent Body
Constrain :: Physics -> Intent Physics
Aware :: Instinct -> Intent Instinct
Relate :: Social -> Intent Social
Learn :: Agent -> Intent Agent
deriving (Show, Eq, Ord)
data Noise
deriving (Show, Eq, Ord)
data Context = Context Agent Agent Intent
deriving (Show, Eq, Ord)
data Meaning
deriving (Show, Eq, Ord)
|
mpahrens/Intentionally
|
Language/Intentionally/Types.hs
|
gpl-3.0
| 1,248 | 0 | 7 | 320 | 404 | 225 | 179 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.IAM.Projects.ServiceAccounts.Keys.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists every ServiceAccountKey for a service account.
--
-- /See:/ <https://cloud.google.com/iam/ Identity and Access Management (IAM) API Reference> for @iam.projects.serviceAccounts.keys.list@.
module Network.Google.Resource.IAM.Projects.ServiceAccounts.Keys.List
(
-- * REST Resource
ProjectsServiceAccountsKeysListResource
-- * Creating a Request
, projectsServiceAccountsKeysList
, ProjectsServiceAccountsKeysList
-- * Request Lenses
, psaklKeyTypes
, psaklXgafv
, psaklUploadProtocol
, psaklAccessToken
, psaklUploadType
, psaklName
, psaklCallback
) where
import Network.Google.IAM.Types
import Network.Google.Prelude
-- | A resource alias for @iam.projects.serviceAccounts.keys.list@ method which the
-- 'ProjectsServiceAccountsKeysList' request conforms to.
type ProjectsServiceAccountsKeysListResource =
"v1" :>
Capture "name" Text :>
"keys" :>
QueryParams "keyTypes"
ProjectsServiceAccountsKeysListKeyTypes
:>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListServiceAccountKeysResponse
-- | Lists every ServiceAccountKey for a service account.
--
-- /See:/ 'projectsServiceAccountsKeysList' smart constructor.
data ProjectsServiceAccountsKeysList =
ProjectsServiceAccountsKeysList'
{ _psaklKeyTypes :: !(Maybe [ProjectsServiceAccountsKeysListKeyTypes])
, _psaklXgafv :: !(Maybe Xgafv)
, _psaklUploadProtocol :: !(Maybe Text)
, _psaklAccessToken :: !(Maybe Text)
, _psaklUploadType :: !(Maybe Text)
, _psaklName :: !Text
, _psaklCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsServiceAccountsKeysList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'psaklKeyTypes'
--
-- * 'psaklXgafv'
--
-- * 'psaklUploadProtocol'
--
-- * 'psaklAccessToken'
--
-- * 'psaklUploadType'
--
-- * 'psaklName'
--
-- * 'psaklCallback'
projectsServiceAccountsKeysList
:: Text -- ^ 'psaklName'
-> ProjectsServiceAccountsKeysList
projectsServiceAccountsKeysList pPsaklName_ =
ProjectsServiceAccountsKeysList'
{ _psaklKeyTypes = Nothing
, _psaklXgafv = Nothing
, _psaklUploadProtocol = Nothing
, _psaklAccessToken = Nothing
, _psaklUploadType = Nothing
, _psaklName = pPsaklName_
, _psaklCallback = Nothing
}
-- | Filters the types of keys the user wants to include in the list
-- response. Duplicate key types are not allowed. If no key type is
-- provided, all keys are returned.
psaklKeyTypes :: Lens' ProjectsServiceAccountsKeysList [ProjectsServiceAccountsKeysListKeyTypes]
psaklKeyTypes
= lens _psaklKeyTypes
(\ s a -> s{_psaklKeyTypes = a})
. _Default
. _Coerce
-- | V1 error format.
psaklXgafv :: Lens' ProjectsServiceAccountsKeysList (Maybe Xgafv)
psaklXgafv
= lens _psaklXgafv (\ s a -> s{_psaklXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
psaklUploadProtocol :: Lens' ProjectsServiceAccountsKeysList (Maybe Text)
psaklUploadProtocol
= lens _psaklUploadProtocol
(\ s a -> s{_psaklUploadProtocol = a})
-- | OAuth access token.
psaklAccessToken :: Lens' ProjectsServiceAccountsKeysList (Maybe Text)
psaklAccessToken
= lens _psaklAccessToken
(\ s a -> s{_psaklAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
psaklUploadType :: Lens' ProjectsServiceAccountsKeysList (Maybe Text)
psaklUploadType
= lens _psaklUploadType
(\ s a -> s{_psaklUploadType = a})
-- | Required. The resource name of the service account in the following
-- format: \`projects\/{PROJECT_ID}\/serviceAccounts\/{ACCOUNT}\`. Using
-- \`-\` as a wildcard for the \`PROJECT_ID\`, will infer the project from
-- the account. The \`ACCOUNT\` value can be the \`email\` address or the
-- \`unique_id\` of the service account.
psaklName :: Lens' ProjectsServiceAccountsKeysList Text
psaklName
= lens _psaklName (\ s a -> s{_psaklName = a})
-- | JSONP
psaklCallback :: Lens' ProjectsServiceAccountsKeysList (Maybe Text)
psaklCallback
= lens _psaklCallback
(\ s a -> s{_psaklCallback = a})
instance GoogleRequest
ProjectsServiceAccountsKeysList
where
type Rs ProjectsServiceAccountsKeysList =
ListServiceAccountKeysResponse
type Scopes ProjectsServiceAccountsKeysList =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsServiceAccountsKeysList'{..}
= go _psaklName (_psaklKeyTypes ^. _Default)
_psaklXgafv
_psaklUploadProtocol
_psaklAccessToken
_psaklUploadType
_psaklCallback
(Just AltJSON)
iAMService
where go
= buildClient
(Proxy ::
Proxy ProjectsServiceAccountsKeysListResource)
mempty
|
brendanhay/gogol
|
gogol-iam/gen/Network/Google/Resource/IAM/Projects/ServiceAccounts/Keys/List.hs
|
mpl-2.0
| 6,101 | 0 | 17 | 1,372 | 801 | 469 | 332 | 124 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.STS.DecodeAuthorizationMessage
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Decodes additional information about the authorization status of a request
-- from an encoded message returned in response to an AWS request.
--
-- For example, if a user is not authorized to perform an action that he or she
-- has requested, the request returns a 'Client.UnauthorizedOperation' response
-- (an HTTP 403 response). Some AWS actions additionally return an encoded
-- message that can provide details about this authorization failure.
--
-- Only certain AWS actions return an encoded authorization message. The
-- documentation for an individual action indicates whether that action returns
-- an encoded message in addition to returning an HTTP code. The message is
-- encoded because the details of the authorization status can constitute
-- privileged information that the user who requested the action should not see.
-- To decode an authorization status message, a user must be granted permissions
-- via an IAM policy to request the 'DecodeAuthorizationMessage' ('sts:DecodeAuthorizationMessage') action.
--
-- The decoded message includes the following type of information:
--
-- Whether the request was denied due to an explicit deny or due to the
-- absence of an explicit allow. For more information, see <http://docs.aws.amazon.com/IAM/latest/UserGuide/AccessPolicyLanguage_EvaluationLogic.html#policy-eval-denyallow Determining Whether aRequest is Allowed or Denied> in /Using IAM/. The principal who made the
-- request. The requested action. The requested resource. The values of
-- condition keys in the context of the user's request.
--
-- <http://docs.aws.amazon.com/STS/latest/APIReference/API_DecodeAuthorizationMessage.html>
module Network.AWS.STS.DecodeAuthorizationMessage
(
-- * Request
DecodeAuthorizationMessage
-- ** Request constructor
, decodeAuthorizationMessage
-- ** Request lenses
, damEncodedMessage
-- * Response
, DecodeAuthorizationMessageResponse
-- ** Response constructor
, decodeAuthorizationMessageResponse
-- ** Response lenses
, damrDecodedMessage
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.STS.Types
import qualified GHC.Exts
newtype DecodeAuthorizationMessage = DecodeAuthorizationMessage
{ _damEncodedMessage :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DecodeAuthorizationMessage' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'damEncodedMessage' @::@ 'Text'
--
decodeAuthorizationMessage :: Text -- ^ 'damEncodedMessage'
-> DecodeAuthorizationMessage
decodeAuthorizationMessage p1 = DecodeAuthorizationMessage
{ _damEncodedMessage = p1
}
-- | The encoded message that was returned with the response.
damEncodedMessage :: Lens' DecodeAuthorizationMessage Text
damEncodedMessage =
lens _damEncodedMessage (\s a -> s { _damEncodedMessage = a })
newtype DecodeAuthorizationMessageResponse = DecodeAuthorizationMessageResponse
{ _damrDecodedMessage :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'DecodeAuthorizationMessageResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'damrDecodedMessage' @::@ 'Maybe' 'Text'
--
decodeAuthorizationMessageResponse :: DecodeAuthorizationMessageResponse
decodeAuthorizationMessageResponse = DecodeAuthorizationMessageResponse
{ _damrDecodedMessage = Nothing
}
-- | An XML document that contains the decoded message. For more information, see 'DecodeAuthorizationMessage'.
damrDecodedMessage :: Lens' DecodeAuthorizationMessageResponse (Maybe Text)
damrDecodedMessage =
lens _damrDecodedMessage (\s a -> s { _damrDecodedMessage = a })
instance ToPath DecodeAuthorizationMessage where
toPath = const "/"
instance ToQuery DecodeAuthorizationMessage where
toQuery DecodeAuthorizationMessage{..} = mconcat
[ "EncodedMessage" =? _damEncodedMessage
]
instance ToHeaders DecodeAuthorizationMessage
instance AWSRequest DecodeAuthorizationMessage where
type Sv DecodeAuthorizationMessage = STS
type Rs DecodeAuthorizationMessage = DecodeAuthorizationMessageResponse
request = post "DecodeAuthorizationMessage"
response = xmlResponse
instance FromXML DecodeAuthorizationMessageResponse where
parseXML = withElement "DecodeAuthorizationMessageResult" $ \x -> DecodeAuthorizationMessageResponse
<$> x .@? "DecodedMessage"
|
romanb/amazonka
|
amazonka-sts/gen/Network/AWS/STS/DecodeAuthorizationMessage.hs
|
mpl-2.0
| 5,502 | 0 | 9 | 987 | 451 | 283 | 168 | 55 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.LiveChatBans.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Inserts a new resource into this collection.
--
-- /See:/ <https://developers.google.com/youtube/ YouTube Data API v3 Reference> for @youtube.liveChatBans.insert@.
module Network.Google.Resource.YouTube.LiveChatBans.Insert
(
-- * REST Resource
LiveChatBansInsertResource
-- * Creating a Request
, liveChatBansInsert
, LiveChatBansInsert
-- * Request Lenses
, lcbiXgafv
, lcbiPart
, lcbiUploadProtocol
, lcbiAccessToken
, lcbiUploadType
, lcbiPayload
, lcbiCallback
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.liveChatBans.insert@ method which the
-- 'LiveChatBansInsert' request conforms to.
type LiveChatBansInsertResource =
"youtube" :>
"v3" :>
"liveChat" :>
"bans" :>
QueryParams "part" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] LiveChatBan :>
Post '[JSON] LiveChatBan
-- | Inserts a new resource into this collection.
--
-- /See:/ 'liveChatBansInsert' smart constructor.
data LiveChatBansInsert =
LiveChatBansInsert'
{ _lcbiXgafv :: !(Maybe Xgafv)
, _lcbiPart :: ![Text]
, _lcbiUploadProtocol :: !(Maybe Text)
, _lcbiAccessToken :: !(Maybe Text)
, _lcbiUploadType :: !(Maybe Text)
, _lcbiPayload :: !LiveChatBan
, _lcbiCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LiveChatBansInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lcbiXgafv'
--
-- * 'lcbiPart'
--
-- * 'lcbiUploadProtocol'
--
-- * 'lcbiAccessToken'
--
-- * 'lcbiUploadType'
--
-- * 'lcbiPayload'
--
-- * 'lcbiCallback'
liveChatBansInsert
:: [Text] -- ^ 'lcbiPart'
-> LiveChatBan -- ^ 'lcbiPayload'
-> LiveChatBansInsert
liveChatBansInsert pLcbiPart_ pLcbiPayload_ =
LiveChatBansInsert'
{ _lcbiXgafv = Nothing
, _lcbiPart = _Coerce # pLcbiPart_
, _lcbiUploadProtocol = Nothing
, _lcbiAccessToken = Nothing
, _lcbiUploadType = Nothing
, _lcbiPayload = pLcbiPayload_
, _lcbiCallback = Nothing
}
-- | V1 error format.
lcbiXgafv :: Lens' LiveChatBansInsert (Maybe Xgafv)
lcbiXgafv
= lens _lcbiXgafv (\ s a -> s{_lcbiXgafv = a})
-- | The *part* parameter serves two purposes in this operation. It
-- identifies the properties that the write operation will set as well as
-- the properties that the API response returns. Set the parameter value to
-- snippet.
lcbiPart :: Lens' LiveChatBansInsert [Text]
lcbiPart
= lens _lcbiPart (\ s a -> s{_lcbiPart = a}) .
_Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
lcbiUploadProtocol :: Lens' LiveChatBansInsert (Maybe Text)
lcbiUploadProtocol
= lens _lcbiUploadProtocol
(\ s a -> s{_lcbiUploadProtocol = a})
-- | OAuth access token.
lcbiAccessToken :: Lens' LiveChatBansInsert (Maybe Text)
lcbiAccessToken
= lens _lcbiAccessToken
(\ s a -> s{_lcbiAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
lcbiUploadType :: Lens' LiveChatBansInsert (Maybe Text)
lcbiUploadType
= lens _lcbiUploadType
(\ s a -> s{_lcbiUploadType = a})
-- | Multipart request metadata.
lcbiPayload :: Lens' LiveChatBansInsert LiveChatBan
lcbiPayload
= lens _lcbiPayload (\ s a -> s{_lcbiPayload = a})
-- | JSONP
lcbiCallback :: Lens' LiveChatBansInsert (Maybe Text)
lcbiCallback
= lens _lcbiCallback (\ s a -> s{_lcbiCallback = a})
instance GoogleRequest LiveChatBansInsert where
type Rs LiveChatBansInsert = LiveChatBan
type Scopes LiveChatBansInsert =
'["https://www.googleapis.com/auth/youtube",
"https://www.googleapis.com/auth/youtube.force-ssl"]
requestClient LiveChatBansInsert'{..}
= go _lcbiPart _lcbiXgafv _lcbiUploadProtocol
_lcbiAccessToken
_lcbiUploadType
_lcbiCallback
(Just AltJSON)
_lcbiPayload
youTubeService
where go
= buildClient
(Proxy :: Proxy LiveChatBansInsertResource)
mempty
|
brendanhay/gogol
|
gogol-youtube/gen/Network/Google/Resource/YouTube/LiveChatBans/Insert.hs
|
mpl-2.0
| 5,332 | 0 | 19 | 1,299 | 809 | 472 | 337 | 119 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Books.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Books.Types.Sum where
import Network.Google.Prelude
-- | Restrict information returned to a set of selected fields.
data VolumesListProjection
= Full
-- ^ @full@
-- Includes all volume data.
| Lite
-- ^ @lite@
-- Includes a subset of fields in volumeInfo and accessInfo.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesListProjection
instance FromHttpApiData VolumesListProjection where
parseQueryParam = \case
"full" -> Right Full
"lite" -> Right Lite
x -> Left ("Unable to parse VolumesListProjection from: " <> x)
instance ToHttpApiData VolumesListProjection where
toQueryParam = \case
Full -> "full"
Lite -> "lite"
instance FromJSON VolumesListProjection where
parseJSON = parseJSONText "VolumesListProjection"
instance ToJSON VolumesListProjection where
toJSON = toJSONText
-- | Restrict information returned to a set of selected fields.
data MyLibraryBookshelvesVolumesListProjection
= MLBVLPFull
-- ^ @full@
-- Includes all volume data.
| MLBVLPLite
-- ^ @lite@
-- Includes a subset of fields in volumeInfo and accessInfo.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MyLibraryBookshelvesVolumesListProjection
instance FromHttpApiData MyLibraryBookshelvesVolumesListProjection where
parseQueryParam = \case
"full" -> Right MLBVLPFull
"lite" -> Right MLBVLPLite
x -> Left ("Unable to parse MyLibraryBookshelvesVolumesListProjection from: " <> x)
instance ToHttpApiData MyLibraryBookshelvesVolumesListProjection where
toQueryParam = \case
MLBVLPFull -> "full"
MLBVLPLite -> "lite"
instance FromJSON MyLibraryBookshelvesVolumesListProjection where
parseJSON = parseJSONText "MyLibraryBookshelvesVolumesListProjection"
instance ToJSON MyLibraryBookshelvesVolumesListProjection where
toJSON = toJSONText
-- | Sort search results.
data VolumesListOrderBy
= Newest
-- ^ @newest@
-- Most recently published.
| Relevance
-- ^ @relevance@
-- Relevance to search terms.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesListOrderBy
instance FromHttpApiData VolumesListOrderBy where
parseQueryParam = \case
"newest" -> Right Newest
"relevance" -> Right Relevance
x -> Left ("Unable to parse VolumesListOrderBy from: " <> x)
instance ToHttpApiData VolumesListOrderBy where
toQueryParam = \case
Newest -> "newest"
Relevance -> "relevance"
instance FromJSON VolumesListOrderBy where
parseJSON = parseJSONText "VolumesListOrderBy"
instance ToJSON VolumesListOrderBy where
toJSON = toJSONText
-- | Restrict search to this user\'s library.
data VolumesListLibraryRestrict
= MyLibrary
-- ^ @my-library@
-- Restrict to the user\'s library, any shelf.
| NoRestrict
-- ^ @no-restrict@
-- Do not restrict based on user\'s library.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesListLibraryRestrict
instance FromHttpApiData VolumesListLibraryRestrict where
parseQueryParam = \case
"my-library" -> Right MyLibrary
"no-restrict" -> Right NoRestrict
x -> Left ("Unable to parse VolumesListLibraryRestrict from: " <> x)
instance ToHttpApiData VolumesListLibraryRestrict where
toQueryParam = \case
MyLibrary -> "my-library"
NoRestrict -> "no-restrict"
instance FromJSON VolumesListLibraryRestrict where
parseJSON = parseJSONText "VolumesListLibraryRestrict"
instance ToJSON VolumesListLibraryRestrict where
toJSON = toJSONText
-- | The reason for which the book is removed from the library.
data MyLibraryBookshelvesRemoveVolumeReason
= Onboarding
-- ^ @ONBOARDING@
-- Samples removed from the Onboarding flow.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MyLibraryBookshelvesRemoveVolumeReason
instance FromHttpApiData MyLibraryBookshelvesRemoveVolumeReason where
parseQueryParam = \case
"ONBOARDING" -> Right Onboarding
x -> Left ("Unable to parse MyLibraryBookshelvesRemoveVolumeReason from: " <> x)
instance ToHttpApiData MyLibraryBookshelvesRemoveVolumeReason where
toQueryParam = \case
Onboarding -> "ONBOARDING"
instance FromJSON MyLibraryBookshelvesRemoveVolumeReason where
parseJSON = parseJSONText "MyLibraryBookshelvesRemoveVolumeReason"
instance ToJSON MyLibraryBookshelvesRemoveVolumeReason where
toJSON = toJSONText
-- | The maximum allowed maturity rating of returned volumes. Books with a
-- higher maturity rating are filtered out.
data OnboardingListCategoryVolumesMaxAllowedMaturityRating
= Mature
-- ^ @mature@
-- Show books which are rated mature or lower.
| NotMature
-- ^ @not-mature@
-- Show books which are rated not mature.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable OnboardingListCategoryVolumesMaxAllowedMaturityRating
instance FromHttpApiData OnboardingListCategoryVolumesMaxAllowedMaturityRating where
parseQueryParam = \case
"mature" -> Right Mature
"not-mature" -> Right NotMature
x -> Left ("Unable to parse OnboardingListCategoryVolumesMaxAllowedMaturityRating from: " <> x)
instance ToHttpApiData OnboardingListCategoryVolumesMaxAllowedMaturityRating where
toQueryParam = \case
Mature -> "mature"
NotMature -> "not-mature"
instance FromJSON OnboardingListCategoryVolumesMaxAllowedMaturityRating where
parseJSON = parseJSONText "OnboardingListCategoryVolumesMaxAllowedMaturityRating"
instance ToJSON OnboardingListCategoryVolumesMaxAllowedMaturityRating where
toJSON = toJSONText
-- | Restrict to volumes by download availability.
data VolumesListDownload
= Epub
-- ^ @epub@
-- All volumes with epub.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesListDownload
instance FromHttpApiData VolumesListDownload where
parseQueryParam = \case
"epub" -> Right Epub
x -> Left ("Unable to parse VolumesListDownload from: " <> x)
instance ToHttpApiData VolumesListDownload where
toQueryParam = \case
Epub -> "epub"
instance FromJSON VolumesListDownload where
parseJSON = parseJSONText "VolumesListDownload"
instance ToJSON VolumesListDownload where
toJSON = toJSONText
-- | Restrict information returned to a set of selected fields.
data VolumesGetProjection
= VGPFull
-- ^ @full@
-- Includes all volume data.
| VGPLite
-- ^ @lite@
-- Includes a subset of fields in volumeInfo and accessInfo.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesGetProjection
instance FromHttpApiData VolumesGetProjection where
parseQueryParam = \case
"full" -> Right VGPFull
"lite" -> Right VGPLite
x -> Left ("Unable to parse VolumesGetProjection from: " <> x)
instance ToHttpApiData VolumesGetProjection where
toQueryParam = \case
VGPFull -> "full"
VGPLite -> "lite"
instance FromJSON VolumesGetProjection where
parseJSON = parseJSONText "VolumesGetProjection"
instance ToJSON VolumesGetProjection where
toJSON = toJSONText
-- | Association type.
data VolumesAssociatedListAssociation
= EndOfSample
-- ^ @end-of-sample@
-- Recommendations for display end-of-sample.
| EndOfVolume
-- ^ @end-of-volume@
-- Recommendations for display end-of-volume.
| RelatedForPlay
-- ^ @related-for-play@
-- Related volumes for Play Store.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesAssociatedListAssociation
instance FromHttpApiData VolumesAssociatedListAssociation where
parseQueryParam = \case
"end-of-sample" -> Right EndOfSample
"end-of-volume" -> Right EndOfVolume
"related-for-play" -> Right RelatedForPlay
x -> Left ("Unable to parse VolumesAssociatedListAssociation from: " <> x)
instance ToHttpApiData VolumesAssociatedListAssociation where
toQueryParam = \case
EndOfSample -> "end-of-sample"
EndOfVolume -> "end-of-volume"
RelatedForPlay -> "related-for-play"
instance FromJSON VolumesAssociatedListAssociation where
parseJSON = parseJSONText "VolumesAssociatedListAssociation"
instance ToJSON VolumesAssociatedListAssociation where
toJSON = toJSONText
-- | Rating to be given to the volume.
data VolumesRecommendedRateRating
= HaveIt
-- ^ @HAVE_IT@
-- Rating indicating a dismissal due to ownership.
| NotInterested
-- ^ @NOT_INTERESTED@
-- Rating indicating a negative dismissal of a volume.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesRecommendedRateRating
instance FromHttpApiData VolumesRecommendedRateRating where
parseQueryParam = \case
"HAVE_IT" -> Right HaveIt
"NOT_INTERESTED" -> Right NotInterested
x -> Left ("Unable to parse VolumesRecommendedRateRating from: " <> x)
instance ToHttpApiData VolumesRecommendedRateRating where
toQueryParam = \case
HaveIt -> "HAVE_IT"
NotInterested -> "NOT_INTERESTED"
instance FromJSON VolumesRecommendedRateRating where
parseJSON = parseJSONText "VolumesRecommendedRateRating"
instance ToJSON VolumesRecommendedRateRating where
toJSON = toJSONText
-- | The processing state of the user uploaded volumes to be returned.
data VolumesUserUploadedListProcessingState
= CompletedFailed
-- ^ @COMPLETED_FAILED@
-- The volume processing hase failed.
| CompletedSuccess
-- ^ @COMPLETED_SUCCESS@
-- The volume processing was completed.
| Running
-- ^ @RUNNING@
-- The volume processing is not completed.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesUserUploadedListProcessingState
instance FromHttpApiData VolumesUserUploadedListProcessingState where
parseQueryParam = \case
"COMPLETED_FAILED" -> Right CompletedFailed
"COMPLETED_SUCCESS" -> Right CompletedSuccess
"RUNNING" -> Right Running
x -> Left ("Unable to parse VolumesUserUploadedListProcessingState from: " <> x)
instance ToHttpApiData VolumesUserUploadedListProcessingState where
toQueryParam = \case
CompletedFailed -> "COMPLETED_FAILED"
CompletedSuccess -> "COMPLETED_SUCCESS"
Running -> "RUNNING"
instance FromJSON VolumesUserUploadedListProcessingState where
parseJSON = parseJSONText "VolumesUserUploadedListProcessingState"
instance ToJSON VolumesUserUploadedListProcessingState where
toJSON = toJSONText
-- | List of features supported by the client, i.e., \'RENTALS\'
data MyConfigSyncVolumeLicensesFeatures
= Rentals
-- ^ @RENTALS@
-- Client supports rentals.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MyConfigSyncVolumeLicensesFeatures
instance FromHttpApiData MyConfigSyncVolumeLicensesFeatures where
parseQueryParam = \case
"RENTALS" -> Right Rentals
x -> Left ("Unable to parse MyConfigSyncVolumeLicensesFeatures from: " <> x)
instance ToHttpApiData MyConfigSyncVolumeLicensesFeatures where
toQueryParam = \case
Rentals -> "RENTALS"
instance FromJSON MyConfigSyncVolumeLicensesFeatures where
parseJSON = parseJSONText "MyConfigSyncVolumeLicensesFeatures"
instance ToJSON MyConfigSyncVolumeLicensesFeatures where
toJSON = toJSONText
-- | The maximum allowed maturity rating of returned recommendations. Books
-- with a higher maturity rating are filtered out.
data VolumesAssociatedListMaxAllowedMaturityRating
= VALMAMRMature
-- ^ @mature@
-- Show books which are rated mature or lower.
| VALMAMRNotMature
-- ^ @not-mature@
-- Show books which are rated not mature.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesAssociatedListMaxAllowedMaturityRating
instance FromHttpApiData VolumesAssociatedListMaxAllowedMaturityRating where
parseQueryParam = \case
"mature" -> Right VALMAMRMature
"not-mature" -> Right VALMAMRNotMature
x -> Left ("Unable to parse VolumesAssociatedListMaxAllowedMaturityRating from: " <> x)
instance ToHttpApiData VolumesAssociatedListMaxAllowedMaturityRating where
toQueryParam = \case
VALMAMRMature -> "mature"
VALMAMRNotMature -> "not-mature"
instance FromJSON VolumesAssociatedListMaxAllowedMaturityRating where
parseJSON = parseJSONText "VolumesAssociatedListMaxAllowedMaturityRating"
instance ToJSON VolumesAssociatedListMaxAllowedMaturityRating where
toJSON = toJSONText
-- | Filter search results.
data VolumesListFilter
= VLFEbooks
-- ^ @ebooks@
-- All Google eBooks.
| VLFFreeEbooks
-- ^ @free-ebooks@
-- Google eBook with full volume text viewability.
| VLFFull
-- ^ @full@
-- Public can view entire volume text.
| VLFPaidEbooks
-- ^ @paid-ebooks@
-- Google eBook with a price.
| VLFPartial
-- ^ @partial@
-- Public able to see parts of text.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesListFilter
instance FromHttpApiData VolumesListFilter where
parseQueryParam = \case
"ebooks" -> Right VLFEbooks
"free-ebooks" -> Right VLFFreeEbooks
"full" -> Right VLFFull
"paid-ebooks" -> Right VLFPaidEbooks
"partial" -> Right VLFPartial
x -> Left ("Unable to parse VolumesListFilter from: " <> x)
instance ToHttpApiData VolumesListFilter where
toQueryParam = \case
VLFEbooks -> "ebooks"
VLFFreeEbooks -> "free-ebooks"
VLFFull -> "full"
VLFPaidEbooks -> "paid-ebooks"
VLFPartial -> "partial"
instance FromJSON VolumesListFilter where
parseJSON = parseJSONText "VolumesListFilter"
instance ToJSON VolumesListFilter where
toJSON = toJSONText
-- | The maximum allowed maturity rating of returned recommendations. Books
-- with a higher maturity rating are filtered out.
data VolumesRecommendedListMaxAllowedMaturityRating
= VRLMAMRMature
-- ^ @mature@
-- Show books which are rated mature or lower.
| VRLMAMRNotMature
-- ^ @not-mature@
-- Show books which are rated not mature.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesRecommendedListMaxAllowedMaturityRating
instance FromHttpApiData VolumesRecommendedListMaxAllowedMaturityRating where
parseQueryParam = \case
"mature" -> Right VRLMAMRMature
"not-mature" -> Right VRLMAMRNotMature
x -> Left ("Unable to parse VolumesRecommendedListMaxAllowedMaturityRating from: " <> x)
instance ToHttpApiData VolumesRecommendedListMaxAllowedMaturityRating where
toQueryParam = \case
VRLMAMRMature -> "mature"
VRLMAMRNotMature -> "not-mature"
instance FromJSON VolumesRecommendedListMaxAllowedMaturityRating where
parseJSON = parseJSONText "VolumesRecommendedListMaxAllowedMaturityRating"
instance ToJSON VolumesRecommendedListMaxAllowedMaturityRating where
toJSON = toJSONText
-- | The reason for which the book is added to the library.
data MyLibraryBookshelvesAddVolumeReason
= MLBAVRIosPrex
-- ^ @IOS_PREX@
-- Volumes added from the PREX flow on iOS.
| MLBAVRIosSearch
-- ^ @IOS_SEARCH@
-- Volumes added from the Search flow on iOS.
| MLBAVROnboarding
-- ^ @ONBOARDING@
-- Volumes added from the Onboarding flow.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MyLibraryBookshelvesAddVolumeReason
instance FromHttpApiData MyLibraryBookshelvesAddVolumeReason where
parseQueryParam = \case
"IOS_PREX" -> Right MLBAVRIosPrex
"IOS_SEARCH" -> Right MLBAVRIosSearch
"ONBOARDING" -> Right MLBAVROnboarding
x -> Left ("Unable to parse MyLibraryBookshelvesAddVolumeReason from: " <> x)
instance ToHttpApiData MyLibraryBookshelvesAddVolumeReason where
toQueryParam = \case
MLBAVRIosPrex -> "IOS_PREX"
MLBAVRIosSearch -> "IOS_SEARCH"
MLBAVROnboarding -> "ONBOARDING"
instance FromJSON MyLibraryBookshelvesAddVolumeReason where
parseJSON = parseJSONText "MyLibraryBookshelvesAddVolumeReason"
instance ToJSON MyLibraryBookshelvesAddVolumeReason where
toJSON = toJSONText
-- | How the book was acquired
data VolumesMybooksListAcquireMethod
= FamilyShared
-- ^ @FAMILY_SHARED@
-- Books acquired via Family Sharing
| PreOrdered
-- ^ @PREORDERED@
-- Preordered books (not yet available)
| PreviouslyRented
-- ^ @PREVIOUSLY_RENTED@
-- User-rented books past their expiration time
| PublicDomain
-- ^ @PUBLIC_DOMAIN@
-- Public domain books
| Purchased
-- ^ @PURCHASED@
-- Purchased books
| Rented
-- ^ @RENTED@
-- User-rented books
| Sample
-- ^ @SAMPLE@
-- Sample books
| Uploaded
-- ^ @UPLOADED@
-- User uploaded books
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesMybooksListAcquireMethod
instance FromHttpApiData VolumesMybooksListAcquireMethod where
parseQueryParam = \case
"FAMILY_SHARED" -> Right FamilyShared
"PREORDERED" -> Right PreOrdered
"PREVIOUSLY_RENTED" -> Right PreviouslyRented
"PUBLIC_DOMAIN" -> Right PublicDomain
"PURCHASED" -> Right Purchased
"RENTED" -> Right Rented
"SAMPLE" -> Right Sample
"UPLOADED" -> Right Uploaded
x -> Left ("Unable to parse VolumesMybooksListAcquireMethod from: " <> x)
instance ToHttpApiData VolumesMybooksListAcquireMethod where
toQueryParam = \case
FamilyShared -> "FAMILY_SHARED"
PreOrdered -> "PREORDERED"
PreviouslyRented -> "PREVIOUSLY_RENTED"
PublicDomain -> "PUBLIC_DOMAIN"
Purchased -> "PURCHASED"
Rented -> "RENTED"
Sample -> "SAMPLE"
Uploaded -> "UPLOADED"
instance FromJSON VolumesMybooksListAcquireMethod where
parseJSON = parseJSONText "VolumesMybooksListAcquireMethod"
instance ToJSON VolumesMybooksListAcquireMethod where
toJSON = toJSONText
-- | The maximum allowed maturity rating of returned recommendations. Books
-- with a higher maturity rating are filtered out.
data PersonalizedstreamGetMaxAllowedMaturityRating
= PGMAMRMature
-- ^ @mature@
-- Show books which are rated mature or lower.
| PGMAMRNotMature
-- ^ @not-mature@
-- Show books which are rated not mature.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable PersonalizedstreamGetMaxAllowedMaturityRating
instance FromHttpApiData PersonalizedstreamGetMaxAllowedMaturityRating where
parseQueryParam = \case
"mature" -> Right PGMAMRMature
"not-mature" -> Right PGMAMRNotMature
x -> Left ("Unable to parse PersonalizedstreamGetMaxAllowedMaturityRating from: " <> x)
instance ToHttpApiData PersonalizedstreamGetMaxAllowedMaturityRating where
toQueryParam = \case
PGMAMRMature -> "mature"
PGMAMRNotMature -> "not-mature"
instance FromJSON PersonalizedstreamGetMaxAllowedMaturityRating where
parseJSON = parseJSONText "PersonalizedstreamGetMaxAllowedMaturityRating"
instance ToJSON PersonalizedstreamGetMaxAllowedMaturityRating where
toJSON = toJSONText
-- | Restrict to books or magazines.
data VolumesListPrintType
= All
-- ^ @all@
-- All volume content types.
| Books
-- ^ @books@
-- Just books.
| Magazines
-- ^ @magazines@
-- Just magazines.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesListPrintType
instance FromHttpApiData VolumesListPrintType where
parseQueryParam = \case
"all" -> Right All
"books" -> Right Books
"magazines" -> Right Magazines
x -> Left ("Unable to parse VolumesListPrintType from: " <> x)
instance ToHttpApiData VolumesListPrintType where
toQueryParam = \case
All -> "all"
Books -> "books"
Magazines -> "magazines"
instance FromJSON VolumesListPrintType where
parseJSON = parseJSONText "VolumesListPrintType"
instance ToJSON VolumesListPrintType where
toJSON = toJSONText
-- | The processing state of the user uploaded volumes to be returned.
-- Applicable only if the UPLOADED is specified in the acquireMethod.
data VolumesMybooksListProcessingState
= VMLPSCompletedFailed
-- ^ @COMPLETED_FAILED@
-- The volume processing hase failed.
| VMLPSCompletedSuccess
-- ^ @COMPLETED_SUCCESS@
-- The volume processing was completed.
| VMLPSRunning
-- ^ @RUNNING@
-- The volume processing is not completed.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesMybooksListProcessingState
instance FromHttpApiData VolumesMybooksListProcessingState where
parseQueryParam = \case
"COMPLETED_FAILED" -> Right VMLPSCompletedFailed
"COMPLETED_SUCCESS" -> Right VMLPSCompletedSuccess
"RUNNING" -> Right VMLPSRunning
x -> Left ("Unable to parse VolumesMybooksListProcessingState from: " <> x)
instance ToHttpApiData VolumesMybooksListProcessingState where
toQueryParam = \case
VMLPSCompletedFailed -> "COMPLETED_FAILED"
VMLPSCompletedSuccess -> "COMPLETED_SUCCESS"
VMLPSRunning -> "RUNNING"
instance FromJSON VolumesMybooksListProcessingState where
parseJSON = parseJSONText "VolumesMybooksListProcessingState"
instance ToJSON VolumesMybooksListProcessingState where
toJSON = toJSONText
-- | The type of access license to request. If not specified, the default is
-- BOTH.
data MyConfigRequestAccessLicenseTypes
= Both
-- ^ @BOTH@
-- Both concurrent and download licenses.
| Concurrent
-- ^ @CONCURRENT@
-- Concurrent access license.
| Download
-- ^ @DOWNLOAD@
-- Offline download access license.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MyConfigRequestAccessLicenseTypes
instance FromHttpApiData MyConfigRequestAccessLicenseTypes where
parseQueryParam = \case
"BOTH" -> Right Both
"CONCURRENT" -> Right Concurrent
"DOWNLOAD" -> Right Download
x -> Left ("Unable to parse MyConfigRequestAccessLicenseTypes from: " <> x)
instance ToHttpApiData MyConfigRequestAccessLicenseTypes where
toQueryParam = \case
Both -> "BOTH"
Concurrent -> "CONCURRENT"
Download -> "DOWNLOAD"
instance FromJSON MyConfigRequestAccessLicenseTypes where
parseJSON = parseJSONText "MyConfigRequestAccessLicenseTypes"
instance ToJSON MyConfigRequestAccessLicenseTypes where
toJSON = toJSONText
-- | Action that caused this reading position to be set.
data MyLibraryReadingPositionsSetPositionAction
= Bookmark
-- ^ @bookmark@
-- User chose bookmark within volume.
| Chapter
-- ^ @chapter@
-- User selected chapter from list.
| NextPage
-- ^ @next-page@
-- Next page event.
| PrevPage
-- ^ @prev-page@
-- Previous page event.
| Scroll
-- ^ @scroll@
-- User navigated to page.
| Search
-- ^ @search@
-- User chose search results within volume.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MyLibraryReadingPositionsSetPositionAction
instance FromHttpApiData MyLibraryReadingPositionsSetPositionAction where
parseQueryParam = \case
"bookmark" -> Right Bookmark
"chapter" -> Right Chapter
"next-page" -> Right NextPage
"prev-page" -> Right PrevPage
"scroll" -> Right Scroll
"search" -> Right Search
x -> Left ("Unable to parse MyLibraryReadingPositionsSetPositionAction from: " <> x)
instance ToHttpApiData MyLibraryReadingPositionsSetPositionAction where
toQueryParam = \case
Bookmark -> "bookmark"
Chapter -> "chapter"
NextPage -> "next-page"
PrevPage -> "prev-page"
Scroll -> "scroll"
Search -> "search"
instance FromJSON MyLibraryReadingPositionsSetPositionAction where
parseJSON = parseJSONText "MyLibraryReadingPositionsSetPositionAction"
instance ToJSON MyLibraryReadingPositionsSetPositionAction where
toJSON = toJSONText
-- | The maximum allowed maturity rating of returned recommendations. Books
-- with a higher maturity rating are filtered out.
data VolumesListMaxAllowedMaturityRating
= VLMAMRMature
-- ^ @mature@
-- Show books which are rated mature or lower.
| VLMAMRNotMature
-- ^ @not-mature@
-- Show books which are rated not mature.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable VolumesListMaxAllowedMaturityRating
instance FromHttpApiData VolumesListMaxAllowedMaturityRating where
parseQueryParam = \case
"mature" -> Right VLMAMRMature
"not-mature" -> Right VLMAMRNotMature
x -> Left ("Unable to parse VolumesListMaxAllowedMaturityRating from: " <> x)
instance ToHttpApiData VolumesListMaxAllowedMaturityRating where
toQueryParam = \case
VLMAMRMature -> "mature"
VLMAMRNotMature -> "not-mature"
instance FromJSON VolumesListMaxAllowedMaturityRating where
parseJSON = parseJSONText "VolumesListMaxAllowedMaturityRating"
instance ToJSON VolumesListMaxAllowedMaturityRating where
toJSON = toJSONText
|
rueshyna/gogol
|
gogol-books/gen/Network/Google/Books/Types/Sum.hs
|
mpl-2.0
| 26,484 | 0 | 11 | 5,589 | 4,082 | 2,163 | 1,919 | 470 | 0 |
{-# LINE 1 "CCAR/Tests/TestRegex.hsc" #-}
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
{-# LINE 2 "CCAR/Tests/TestRegex.hsc" #-}
module CCAR.Tests.TestRegex
where
import Foreign
import Foreign.C.Types
{-# LINE 9 "CCAR/Tests/TestRegex.hsc" #-}
newtype PCREOption = PCREOption {unPCREOption :: CInt}
deriving (Show, Eq)
caseless :: PCREOption
caseless = PCREOption 1
dollar_endonly :: PCREOption
dollar_endonly = PCREOption 32
dotall :: PCREOption
dotall = PCREOption 4
combineOptions :: [PCREOption] -> [PCREOption]
combineOptions = PCREOpton . foldr ( (.|.) . unPCREOption) 0
|
asm-products/ccar-websockets
|
CCAR/Tests/TestRegex.hs
|
agpl-3.0
| 600 | 2 | 8 | 95 | 123 | 74 | 49 | 14 | 1 |
-- | Utility functions for C code generation
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module Data.GI.CodeGen.JNI.Utils.C where
import Data.List (intercalate)
import Data.Maybe (catMaybes, isNothing, maybeToList)
import Data.String (fromString)
import qualified Data.Text as T (Text, unpack)
import qualified Data.GI.CodeGen.API as GI
import qualified Data.GI.CodeGen.Type as GIType
-- The idea is to use this qualified everywhere except when using as a DSL
import Language.C.DSL as CDSL
import Data.GI.CodeGen.JNI.Types
import Data.GI.CodeGen.JNI.Utils.Java
import Data.GI.CodeGen.JNI.Utils.Type
jniGetObjectPointerIdent :: String
jniGetObjectPointerIdent = "gobject_from_jobject"
jniTypeDefDecl :: String -> String -> Bool -> Maybe CDSL.CExpr -> CDSL.CDecl
jniTypeDefDecl typ name isPtr =
let
typeSpec = CDSL.CTypeSpec . CDSL.ty . fromString $ typ
doPtr = if isPtr then CDSL.ptr else id
ident = doPtr . fromString $ name
in
CDSL.decl typeSpec ident
emptyCDecl :: CDSL.CDeclr
emptyCDecl = CDSL.CDeclr Nothing [] Nothing [] CDSL.undefNode
makeTypeDecl :: Bool -> CDSL.CDeclr -> CDSL.CTypeSpec -> CDSL.CDecl
makeTypeDecl isPtr ident typ =
let
maybePtr = if isPtr then ptr else id
in
decl (CDSL.CTypeSpec typ) (maybePtr ident) Nothing
typePtrDecl :: CDSL.CTypeSpec -> CDSL.CDecl
typePtrDecl = makeTypeDecl True emptyCDecl
jniNull :: CDSL.CExpr
jniNull =
0 `castTo` typePtrDecl voidSpec
jniEnvIdent :: String
jniEnvIdent = "env"
jniEnvDecl :: Maybe CDSL.CExpr -> CDSL.CDecl
jniEnvDecl = jniTypeDefDecl "JNIEnv" jniEnvIdent True
jniClassIdent :: String
jniClassIdent = "clazz"
jniClassDecl :: Maybe CDSL.CExpr -> CDSL.CDecl
jniClassDecl = jniTypeDefDecl "jclass" jniClassIdent False
jniInstanceIdent :: String
jniInstanceIdent = "thiz"
jniInstanceDecl :: Maybe CDSL.CExpr -> CDSL.CDecl
jniInstanceDecl = jniTypeDefDecl "jobject" jniInstanceIdent False
jniFieldIdent :: String
jniFieldIdent = "field"
jniFieldDecl :: Maybe CDSL.CExpr -> CDSL.CDecl
jniFieldDecl = jniTypeDefDecl "jfieldID" jniFieldIdent False
giCVarPrefix :: String
giCVarPrefix = "c_"
giArgToCIdent :: GI.Arg -> String
giArgToCIdent GI.Arg{..} = giCVarPrefix ++ T.unpack argCName
giInstanceCIdent :: String
giInstanceCIdent = giCVarPrefix ++ jniInstanceIdent
giNameToJNI :: Package -> GI.Name -> T.Text -> String
giNameToJNI packagePrefix giName cls =
intercalate "_" $ ["Java"]
++ giNamespaceToJava packagePrefix giName
++ [T.unpack cls, giMethodNameToJava giName]
jniClassName :: FQClass -> String
jniClassName (pkg, cls) = intercalate "/" (pkg ++ [cls])
giArgToJNIIdent :: GI.Arg -> String
giArgToJNIIdent GI.Arg{..} = T.unpack argCName
giArgToJNI :: Info -> GI.Arg -> (Maybe CDSL.CExpr -> CDSL.CDecl)
giArgToJNI info [email protected]{..} =
let
typ = CDSL.CTypeSpec . giTypeToJNI info $ argType
name = fromString . giArgToJNIIdent $ arg
in
CDSL.decl typ name
genJNIMethod :: Info
-> GI.Name -- ^ API method name
-> T.Text -- ^ Class name
-> Bool -- ^ Is instance method? (else class method)
-> Bool -- ^ Is constructor?
-> T.Text -- ^ Native method name
-> Bool -- ^ Throws error?
-> GI.Callable -- ^ Corresponding GI.Callable
-> CDSL.CExtDecl -- ^ Return C declaration + definition
genJNIMethod info@Info{..} giName cls isInstance isConstr symbol throws callable =
let
retType = GI.returnType callable
retCType = case retType of
Nothing -> CDSL.voidTy
Just typ -> CDSL.CTypeSpec . giTypeToJNI info $ typ
name = giNameToJNI infoPkgPrefix giName cls
args = if isInstance && not isConstr
then genFunctionInstanceArg cls giName : GI.args callable
else GI.args callable
cargs = genFunctionCArgs isInstance . GI.args $ callable
defn = genFunctionCDefn info isConstr symbol throws callable{GI.args = args}
in
export $ fun [retCType] name cargs $ block defn
where
genFunctionInstanceArg :: T.Text -> GI.Name -> GI.Arg
genFunctionInstanceArg cls (GI.Name ns _) =
GI.Arg (fromString jniInstanceIdent)
(GIType.TInterface ns cls)
GI.DirectionIn
False
GI.ScopeTypeInvalid
(-1)
(-1)
False
GI.TransferNothing
genFunctionCArgs :: Bool -> [GI.Arg] -> [Maybe CDSL.CExpr -> CDSL.CDecl]
genFunctionCArgs isInstance args =
let
second = if isInstance
then jniInstanceDecl
else jniClassDecl
in
jniEnvDecl : second : (giArgToJNI info <$> args)
genReturnCIdent :: String
genReturnCIdent = giCVarPrefix ++ "_ret"
genReturnJNIIdent :: String
genReturnJNIIdent = "j_ret"
genErrorCIdent :: String
genErrorCIdent = giCVarPrefix ++ "err"
-- `empty` distinguishes between the declaration case (True) and the case
-- where we use genArgCDecl for creating a declaration for a castTo.
genArgCDecl :: Info -> Bool -> GI.Arg -> CDSL.CDecl
genArgCDecl info empty [email protected]{..} =
let
(typ, isPtr) = giTypeToC info argType
ident = if empty
then emptyCDecl
else fromString . giArgToCIdent $ arg
in
makeTypeDecl isPtr ident typ
genReturnCDecl :: Info -> Maybe GIType.Type -> [CDSL.CDecl]
genReturnCDecl info giType =
case giType of
Nothing -> []
Just t -> genReturnCDecl' info t
where
genReturnCDecl' info t =
let
(cType, isPtr) = giTypeToC info t
jType = giTypeToJNI info t
cIdent = fromString genReturnCIdent
jIdent = fromString genReturnJNIIdent
in
[makeTypeDecl isPtr cIdent cType, makeTypeDecl False jIdent jType]
genErrorCDecl :: Info -> Bool -> Maybe CDSL.CDecl
genErrorCDecl info throws =
let
ident = fromString genErrorCIdent
(typ, isPtr) = giTypeToC info GIType.TError
in
if throws
then
Just $ makeTypeDecl isPtr ident typ
else
Nothing
genArgCInitAndCleanup :: Info -> GI.Arg -> (CDSL.CStat, Maybe CDSL.CStat)
genArgCInitAndCleanup info@Info{..} [email protected]{..} =
let
jniEnv = fromString jniEnvIdent
jniArg = fromString . giArgToJNIIdent $ arg
cVar = fromString . giArgToCIdent $ arg
cType = genArgCDecl info True arg
init = liftE $
if giIsStringType argType
then
cVar `transferAssign` ((star jniEnv &* "GetStringUTFChars")#[jniEnv, jniArg, 0])
-- FIXME: Do an exception check and assert if we have an exception
else if giIsObjectType info argType
then
cVar <-- fromString jniGetObjectPointerIdent#[jniEnv, jniArg] `castTo` cType
else
cVar `transferAssign` (jniArg `castTo` cType)
cleanup =
if giIsStringType argType
then
Just . liftE $
(star jniEnv &* "ReleaseStringUTFChars")#[jniEnv, jniArg, cVar]
else
Nothing
in
(init, cleanup)
where
transferAssign var exp =
case transfer of
GI.TransferNothing -> var <-- exp
GI.TransferContainer -> var <-- exp -- FIXME: what do we do here?
GI.TransferEverything
| giIsStringType argType -> var <-- "g_strdup"#[exp]
| giIsObjectType info argType -> var <-- "g_object_ref"#[exp]
| otherwise -> var <-- exp
genErrorCInit :: Bool -> Maybe CDSL.CStat
genErrorCInit throws =
let
err = fromString genErrorCIdent
in
if throws
then
Just . liftE $ err <-- 0
else
Nothing
genFunctionCCall :: Bool -> T.Text -> Bool -> GI.Callable -> [CDSL.CStat]
genFunctionCCall isConstr symbol throws callable =
let
fn = fromString . T.unpack $ symbol
err = fromString genErrorCIdent
errArg = if throws
then
Just $ Addr `pre` err
else
Nothing
args = (fromString . giArgToCIdent <$> GI.args callable) ++ maybeToList errArg
ret = fromString genReturnCIdent
call = fn # args
callExp
| isNothing . GI.returnType $ callable = liftE call
-- This is a floating ref, sink it
| isConstr &&
GI.returnTransfer callable == GI.TransferNothing = liftE $ ret <-- "g_object_ref_sink" # [call]
| otherwise = liftE $ ret <-- call
-- FIXME: log the error
handleErr = [ cif err $ hBlock [ "g_error_free" # [err] ] | throws ]
in
callExp : handleErr
genFunctionCReturn :: Info -> GI.Callable -> [CStat]
genFunctionCReturn info@Info{..} GI.Callable{..} =
let
cIdent = fromString genReturnCIdent
jIdent = fromString genReturnJNIIdent
in
case returnType of
Nothing -> [cvoidReturn]
Just t -> genFunctionCToJNI info t cIdent jIdent :
[creturn jIdent]
where
retCast t =
makeTypeDecl False emptyCDecl (giTypeToJNI info t)
genFunctionCToJNI info typ cVar jVar =
let
jniEnv = fromString jniEnvIdent
in
if giIsStringType typ
then
cifElse cVar
(hBlock $
(jVar <-- (star jniEnv &* "NewStringUTF") # [jniEnv, cVar]) :
[ "g_free" # [cVar] | returnTransfer /= GI.TransferEverything ]
)
(hBlock [
jVar <-- 0
])
else
-- FIXME: Can't just do a simple assign every time
liftE $ jVar <-- cVar `castTo` retCast typ
genFunctionCDefn :: Info -> Bool -> T.Text -> Bool -> GI.Callable -> [CDSL.CBlockItem]
genFunctionCDefn info@Info{..} isConstr symbol throws callable =
let
retDecl = genReturnCDecl info . GI.returnType $ callable
decls = genArgCDecl info False <$> GI.args callable
errDecl = maybeToList $ genErrorCDecl info throws
ic = genArgCInitAndCleanup info <$> GI.args callable
errInit = genErrorCInit throws
init = (fst <$> ic) ++ maybeToList errInit
cleanup = catMaybes $ snd <$> ic
call = genFunctionCCall isConstr symbol throws callable
ret = genFunctionCReturn info callable
in
(CDSL.intoB <$> retDecl ++ decls ++ errDecl) ++
(CDSL.intoB <$> init ++ call ++ cleanup ++ ret)
|
ford-prefect/gir2jni
|
src/Data/GI/CodeGen/JNI/Utils/C.hs
|
lgpl-2.1
| 11,028 | 0 | 21 | 3,482 | 2,805 | 1,473 | 1,332 | 233 | 16 |
import System.IO
import System.IO.Streams
import Control.Monad
import Data.List
initial_state :: [[Int]]
initial_state = []
find' :: Int -> [[Int]] -> Maybe [Int]
find' x [] = Nothing
find' x (st:sts)
| elem x st = Just st
| otherwise = find' x sts
drop' :: Int -> Int -> [[Int]] -> [[Int]]
drop' _ _ [[]] = [[]]
drop' _ _ [] = []
drop' x y (st:sts)
| elem x st = sts
| elem y st = sts
| otherwise = st : (drop' x y sts)
merge :: Int -> Int -> Maybe[Int] -> Maybe[Int] -> [Int]
merge x y Nothing Nothing = [x,y]
merge x y (Just xx) Nothing = (y:xx)
merge x y Nothing (Just yy) = (x:yy)
merge x y (Just xx) (Just yy) = xx++yy
unite :: Int -> Int -> [[Int]] -> [[Int]]
unite x y state =
let xs = find' x state
ys = find' y state
tmp = drop' x y state
in
(merge x y xs ys):tmp
same :: Int -> Int -> [[Int]] -> String
same x y state =
let set = find' x state
in
if set == Nothing
then "0"
else
let (Just s) = set
in
if elem y s
then "1"
else "0"
action :: Int -> [Int] -> IO [[Int]] -> IO [[Int]]
action max (code:x:y:_) state =
case code of
0 -> do
s <- state
return $ unite x y s
1 -> do
s <- state
putStrLn $ same x y s
return s
map' :: (String -> Int) -> [String] -> [Int]
map' f xs = foldl (\acc x -> acc ++ [f x]) [] xs
--map' f xs = foldr (\x acc -> (f x) : acc) [] xs
-- execute :: Int -> [[Int]] -> IO ()
-- execute max state = do
-- eof <- isEOF
-- if eof
-- then
-- return ()
-- else
-- do
-- c <- getLine
-- let c' = words c
-- cc = map' read c'
-- c0 = cc `seq` cc!!0
-- c1 = cc `seq` cc!!1
-- c2 = cc `seq` cc!!2
-- new_state <- action max c0 c1 c2 state
-- execute max new_state
-- return ()
foldrM' :: (Monad m) => (a -> m b -> m b) -> m b -> [a] -> m b
foldrM' _ z [] = z
foldrM' f z (x:xs) = f x (foldrM' f z xs)
main = do
c1 <- getLine
let l1 = map read $ words c1 :: [Int]
max = l1!!0
state = initial_state
c2 <- getContents
let i0 = map words $ lines c2
i1 = map (map read) i0
foldrM' (action max) (return []) i1
|
a143753/AOJ
|
DSL_1_A.hs
|
apache-2.0
| 2,270 | 0 | 13 | 796 | 1,014 | 524 | 490 | 62 | 3 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Cis194.Week12.Risk where
import Control.Monad
import Control.Monad.Random
import Data.List
------------------------------------------------------------
-- Die values
newtype DieValue = DV { unDV :: Int }
deriving (Eq, Ord, Show, Num)
first :: (a -> b) -> (a, c) -> (b, c)
first f (a, c) = (f a, c)
instance Random DieValue where
random = first DV . randomR (1,6)
randomR (low,hi) = first DV . randomR (max 1 (unDV low), min 6 (unDV hi))
die :: Rand StdGen DieValue
die = getRandom
------------------------------------------------------------
-- Risk
type Army = Int
data Battlefield = Battlefield { attackers :: Army, defenders :: Army }
battle :: Battlefield -> Rand StdGen Battlefield
battle b = replicateM (a + d) die >>= \ds ->
return $ r $ c $ dr ds
where (a,d) = (min 3 (attackers b - 1), min 2 $ defenders b)
dr ds = (\[x,y] -> (x,y)) $ fmap (reverse . sort) $ (\(x,y) -> [x,y]) $ splitAt a ds
c (ad,dd) = (length t - w, w)
where t = zipWith (>) ad dd
w = length $ filter id t
r (ac,dc) = Battlefield (attackers b - ac) (defenders b - dc)
invade :: Battlefield -> Rand StdGen Battlefield
invade b = battle b >>= f
where f bf@(Battlefield a d) | d == 0 || a < 2 = return bf
| otherwise = invade bf
successProb :: Battlefield -> Rand StdGen Double
successProb b = replicateM 1000 (invade b) >>= \bfs ->
return $ fl (filter isSuccess bfs) / fl bfs
where isSuccess (Battlefield a _) = a > 1
fl = fromIntegral . length
main:: IO()
main = do
b <- evalRandIO (successProb (Battlefield 10 10))
print b
|
gsnewmark/cis194
|
src/Cis194/Week12/Risk.hs
|
apache-2.0
| 1,691 | 0 | 12 | 424 | 737 | 391 | 346 | 38 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck as QC
import Data.Attoparsec.ByteString as Atto
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as B
import Data.Either
import Data.Word (Word16, Word32)
import Data.Hadoop.Types (FileType(..))
import Chmod
------------------------------------------------------------
main = defaultMain tests
------------------------------------------------------------
tests :: TestTree
tests = testGroup "Tests" [unitTests]
unitTests = testGroup "Unit tests"
[ testChmod "755" [SetOctal 493]
, testChmod "0755" [SetOctal 493]
, testChmod "u=r" [SetEqual Chmod_u [Chmod_r]]
, testChmodShouldFail "kittens"
, testApplyChmod File [SetEqual Chmod_o [Chmod_r]]
(oct 0 0 0) (oct 0 0 4)
, testApplyChmod File [SetEqual Chmod_o [Chmod_r, Chmod_w]]
(oct 0 0 0) (oct 0 0 6)
, testApplyChmod File [SetEqual Chmod_g [Chmod_r]]
(oct 0 0 0) (oct 0 4 0)
, testApplyChmod File [SetPlus Chmod_o [Chmod_w]]
(oct 0 0 4) (oct 0 0 6)
, testApplyChmod File [SetPlus Chmod_o [Chmod_w,Chmod_r]]
(oct 0 0 1) (oct 0 0 7)
, testApplyChmod File [SetMinus Chmod_o [Chmod_w]]
(oct 0 0 6) (oct 0 0 4)
, testApplyChmod File [SetMinus Chmod_o [Chmod_w,Chmod_r]]
(oct 0 0 7) (oct 0 0 1)
, testApplyChmod File [SetEqualWho Chmod_g Chmod_o]
(oct 0 0 1) (oct 0 1 1)
, testApplyChmod File [SetPlusWho Chmod_g Chmod_o]
(oct 0 2 1) (oct 0 3 1)
, testApplyChmod File [SetMinusWho Chmod_g Chmod_o]
(oct 0 3 1) (oct 0 2 1)
, testApplyChmod File [SetMinusWho Chmod_g Chmod_o]
(oct 0 3 1) (oct 0 2 1)
, testApplyChmod File [SetEqual Chmod_a [Chmod_r]]
(oct 0 0 0) (oct 4 4 4)
, testApplyChmod File [SetPlus Chmod_a [Chmod_r]]
(oct 0 1 0) (oct 4 5 4)
, testApplyChmod File [SetMinus Chmod_a [Chmod_x]]
(oct 7 5 5) (oct 6 4 4)
, testApplyChmod File [SetEqualWho Chmod_a Chmod_u]
(oct 6 4 4) (oct 6 6 6)
, testApplyChmod File [SetPlusWho Chmod_a Chmod_u]
(oct 7 5 5) (oct 7 7 7)
, testApplyChmod File [SetMinusWho Chmod_a Chmod_o]
(oct 7 5 5) (oct 2 0 0)
, testApplyChmod File [SetPlus Chmod_o [Chmod_X]]
(oct 0 1 0) (oct 0 1 1)
, testApplyChmod Dir [SetPlus Chmod_o [Chmod_X]]
(oct 0 0 0) (oct 0 0 1)
, testApplyChmod Dir [SetPlus Chmod_o [Chmod_X]]
(oct 0 1 0) (oct 0 1 1)
]
oct u g o = u*64 + g*8 + o
testChmod :: ByteString -> [Chmod] -> TestTree
testChmod input expected =
testCase (unwords ["Parse chmod", B.unpack input]) $
parseOnly parseChmod input @?= Right expected
testChmodShouldFail :: ByteString -> TestTree
testChmodShouldFail input =
testCase (unwords ["Parse chmod", B.unpack input]) $
assertBool "Failed to catch invalid chmod"
(isLeft $ parseOnly parseChmod input)
testApplyChmod :: FileType -> [Chmod] -> Word16 -> Word16 -> TestTree
testApplyChmod filetype input old expected =
testCase (unwords ["Apply chmod", show input, "to", show old]) $
applyChmod filetype input old @?= expected
|
shyam334/hadoop-tools
|
tests/test.hs
|
apache-2.0
| 3,312 | 0 | 12 | 866 | 1,254 | 654 | 600 | 74 | 1 |
-- -*- coding: utf-8 -*-
-- Copyright (c) 2010-2014, MIT Probabilistic Computing Project
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Main where
import Control.Monad.State
import Data.Random
import Data.RVar (sampleRVar)
import System.Random
import Types
import Haxcat
import TestUtils
bogogen :: RVar (Crosscat Double)
bogogen = do
ds <- bogodata2 300 23
cc <- train ds 15
return cc
bogo_cc :: Crosscat Double
bogo_cc = evalState (sampleRVar bogogen) (mkStdGen 0)
main :: IO ()
main = do
putStrLn $ show bogo_cc
putStrLn "done"
-- Up to the effects of input distribution on performance, satellites is
-- train (bogodata 1167 23) 1500 in 30 minutes
-- Admittedly, bogodata ends up all in one view and all in
-- one cluster, so satellites would be slower (if the thing works)
-- - A column sweep is O(table * num views)
-- - A row sweep is O(table * avg_col num clusters)
-- bogodata2 seems to lead to plenty of views and clusters, at least
-- without having debugged inference.
|
probcomp/haxcat
|
BenchInfer.hs
|
apache-2.0
| 1,537 | 0 | 8 | 292 | 171 | 98 | 73 | 19 | 1 |
import Data.List
import System.IO
-- List
primeNumbers = [3, 5, 7, 11]
morePrimes = primeNumbers ++ [13, 17, 19, 23, 29]
favNums = 2 : 7 : 21 : 66 : []
multlist = [[3, 5, 7], [11, 13, 17]]
morePrimes2 = 2 : morePrimes
lenPrime = length morePrimes2
revPrime = reverse morePrimes2
isListEmpty = null morePrimes2
secondPrime = morePrimes2 !! 1
fifthPrime = morePrimes2 !! 4
firstPrime = head morePrimes2
lastPrime = last morePrimes2
-- return the list except the last one
primeInit = init morePrimes2
-- first three element
first3Primes = take 3 morePrimes2
-- except first three element
removePrimes = drop 3 morePrimes2
is7InList = 7 `elem` morePrimes2
maxPrime = maximum morePrimes2
minPrime = minimum morePrimes2
newList = [2, 3, 5]
prodPrimes = product newList
zeroToTen = [0..10]
evenList = [2,4..20]
letterList = ['A', 'C'..'Z']
infinPow10 = [10, 20..]
many2s = take 10 (repeat 2)
many3s = replicate 10 3
cycleList = take 10 (cycle [1, 2, 3, 4, 5])
listTimes2 = [x * 2 | x <- [1, 2, 3, 4], x * 2 < 4]
divideBy9N13 = [x | x <- [1..500], x `mod` 13 == 0, x `mod` 9 == 0]
-- operater
sortedList = sort [1, 3, 5, 7, 9, 2, 4, 6, 8, 10]
sumOfList = zipWith (+) [1, 2, 3, 4, 5] [6, 7, 8, 9, 10]
listBiggerThan5 = filter (>5) morePrimes
evensUpTo20 = takeWhile (<= 20) [2, 4..]
-- fold right to left
multOfListr = foldr (*) 2 [2, 3, 4, 5]
-- fold left to right
multOfListl = foldl (*) 1 [2, 3, 4, 5]
powOf3 = [3^n | n <- [1..10]]
multTable = [[x * y | y <- [1..10]] | x <- [1..10]]
-- Tuple
randTuple = (1, "Random Tuple")
bobSmith = ("Bob Smith", 52)
bobsName = fst bobSmith
bobsAge = snd bobSmith
names = ["Bob", "Mary", "Tom"]
addresses = ["123 Main", "234 North", "567 South"]
-- zip :: [a] -> [b] -> [(a, b)]
namesNAddress = zip names addresses
|
Kermit95/Playground
|
lang/haskell/haskell_tut2.hs
|
apache-2.0
| 1,767 | 0 | 9 | 354 | 764 | 448 | 316 | 46 | 1 |
module Main where
import qualified PassThroughService_Client as Client
import PassThrough_Types
import Thrift
import Thrift.Protocol.Binary
import Thrift.Server
import Thrift.Transport
import Thrift.Transport.Handle
import Control.Exception
import Data.Either
import Data.Int
import Data.List
import Data.Maybe
import Data.Time
import Data.Text.Lazy
import Data.Vector
import Network
import System.Exit
import System.Random
import Text.Printf
getRight :: Either left right -> right
getRight (Right x) = x
check :: Vector Int32 -> [Int32] -> Int -> Int -> [Int] -> [Int32] -> [Int32] -> (Int, [Int], [Int32], [Int32])
check outDFE [] status iter iterL outDFEL outCPUL = (status, iterL, outDFEL, outCPUL)
check outDFE (outCPU:cpul) status iter iterL outDFEL outCPUL
| fromIntegral(((fromMaybe 0 ((!?) outDFE 0)) - outCPU)) ** 2 > 0.00001 = check (Data.Vector.drop 1 outDFE) cpul (status + 1) (iter + 1) (iterL Data.List.++ (iter:[])) (outDFEL Data.List.++ ((fromMaybe 0 ((!?) outDFE 0)):[])) (outCPUL Data.List.++ (outCPU:[]))
| otherwise = check (Data.Vector.drop 1 outDFE) cpul status (iter + 1) iterL outDFEL outCPUL
printErrors :: [Int] -> [Int32] -> [Int32] -> String -> String
printErrors [] [] [] output = output
printErrors (i:is) (x:xs) (y:ys) output = printErrors is xs ys (output Data.List.++ "Output data @ " Data.List.++ (show i) Data.List.++ " = " Data.List.++ (show x) Data.List.++ " (expected " Data.List.++ (show y) Data.List.++ ")\n")
passThroughCPU :: [Int32] -> [Int32]
passThroughCPU (dataIn : []) = dataIn : []
passThroughCPU (dataIn : l) = dataIn : passThroughCPU (l)
passThroughDFE :: Int64 -> [Int32] -> IO (Vector Int32)
passThroughDFE size dataIn = do
startTime <- getCurrentTime
-- Make socket
transport <- hOpen ("localhost", PortNumber 9090)
-- Wrap in a protocol
let protocol = BinaryProtocol transport
-- Create a client to use the protocol encoder
let client = (protocol, protocol)
stopTime <- getCurrentTime
putStrLn ("Createing a client and opening connection:\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Initialize maxfile
startTime <- getCurrentTime
e <- try (Client.passThrough_init client) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let maxfile = getRight e
stopTime <- getCurrentTime
putStrLn ("Initializing maxfile:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Load DFE
startTime <- getCurrentTime
e <- try (Client.max_load client maxfile (pack "*")) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let engine = getRight e
stopTime <- getCurrentTime
putStrLn ("Loading DFE:\t\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Allocate and send input streams to server
startTime <- getCurrentTime
e <- try (Client.malloc_int32_t client size) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let address_dataIn = getRight e
e <- try (Client.send_data_int32_t client address_dataIn (fromList dataIn)) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
stopTime <- getCurrentTime
putStrLn ("Sending input data:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Allocate memory for output stream on server
startTime <- getCurrentTime
e <- try (Client.malloc_int32_t client size) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let address_dataOut = getRight e
stopTime <- getCurrentTime
putStrLn ("Allocating memory for output stream on server:\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Action default
startTime <- getCurrentTime
let action = PassThrough_actions_t_struct (Just (fromIntegral size)) (Just (fromIntegral address_dataIn)) (Just (fromIntegral address_dataOut))
e <- try (Client.send_PassThrough_actions_t client action) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let address_action = getRight e
e <- try (Client.passThrough_run client engine address_action) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
stopTime <- getCurrentTime
putStrLn ("Pass through time:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Unload DFE
startTime <- getCurrentTime
e <- try (Client.max_unload client engine) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
stopTime <- getCurrentTime
putStrLn ("Unloading DFE:\t\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Get output stream from server
startTime <- getCurrentTime
dataOut <- Client.receive_data_int32_t client address_dataOut size
stopTime <- getCurrentTime
putStrLn ("Getting output stream:\t(size = " Data.List.++ (show (size * 32)) Data.List.++ " bit)\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Free allocated memory for streams on server
startTime <- getCurrentTime
e <- try (Client.free client address_dataIn) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
e <- try (Client.free client address_dataOut) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
stopTime <- getCurrentTime
putStrLn ("Freeing allocated memory for streams on server:\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Free allocated maxfile data
startTime <- getCurrentTime
e <- try (Client.passThrough_free client) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
stopTime <- getCurrentTime
putStrLn ("Freeing allocated maxfile data:\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Close!
startTime <- getCurrentTime
tClose transport
stopTime <- getCurrentTime
putStrLn ("Closing connection:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
return dataOut
main = do
let size = 1024
let dataIn = [fromIntegral(1)..fromIntegral(size)]
-- DFE Output
startTime <- getCurrentTime
dataOutDFE <- passThroughDFE size dataIn
stopTime <- getCurrentTime
putStrLn ("DFE moving average total time:\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- CPU Output
startTime <- getCurrentTime
let dataOutCPU = passThroughCPU dataIn
stopTime <- getCurrentTime
putStrLn ("CPU moving average total time:\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Checking results
startTime <- getCurrentTime
let (status, iter, dataErrDFE, dataErrCPU) = check dataOutDFE dataOutCPU 0 0 [] [] []
putStr (printErrors iter dataErrDFE dataErrCPU [])
stopTime <- getCurrentTime
putStrLn ("Checking results:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
if (status == 0)
then putStrLn ("Test successful!")
else do putStrLn ("Test failed " Data.List.++ show status Data.List.++ " times!")
exitWith $ ExitFailure (-1)
|
maxeler/maxskins
|
examples/PassThrough/client/hs/AdvancedStatic/PassThroughClient.hs
|
bsd-2-clause
| 7,987 | 0 | 17 | 1,730 | 2,640 | 1,309 | 1,331 | 144 | 12 |
module Test.Day4 where
import Day4 as D4
import Test.Tasty
import Test.Tasty.HUnit
day4 :: TestTree
day4 = testGroup "The Ideal Stocking Stuffer" [part1, part2]
part1 :: TestTree
part1 = testGroup "Part 1" [p1Tests, p1Puzzle]
p1Tests :: TestTree
p1Tests = testGroup "Test Cases" $
[
testCase "Example 1" $ zeroHashed "abcdef" 5 "609043" @?= True
, testCase "Example 2" $ zeroHashed "pqrstuv" 5 "1048970" @?= True
]
p1Puzzle :: TestTree
p1Puzzle = testGroup "Puzzle"
[
testCase "Puzzle" $ zeroHashed "ckczppom" 5 "117946" @?= True
, testCase "Puzzle" $ answer "ckczppom" 5 @?= "117946"
]
part2 :: TestTree
part2 = testGroup "Part 2" [p2Puzzle]
p2Puzzle :: TestTree
p2Puzzle = testGroup "Puzzle"
[
testCase "Puzzle" $ zeroHashed "ckczppom" 6 "3938038" @?= True
]
|
taylor1791/adventofcode
|
2015/test/Test/Day4.hs
|
bsd-2-clause
| 797 | 0 | 9 | 155 | 232 | 123 | 109 | 24 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module OpenRTB.Types.Enum.VideoPlaybackMethodSpec where
import Control.Applicative
import Data.Aeson
import Data.Aeson.TH
import Test.Hspec
import Test.QuickCheck
import Test.Instances
import OpenRTB.Types.Enum.VideoPlaybackMethod
data Mock = Mock { vpm :: VideoPlaybackMethod } deriving (Eq, Show)
$(deriveJSON defaultOptions ''Mock)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "VideoPlaybackMethod" $ do
context "JSON" $ do
it "should convert back and forth" $ property $ do
\m -> (decode . encode) m == Just (m :: Mock)
instance Arbitrary Mock where
arbitrary = Mock <$> arbitrary
|
ankhers/openRTB-hs
|
spec/OpenRTB/Types/Enum/VideoPlaybackMethodSpec.hs
|
bsd-3-clause
| 689 | 0 | 18 | 111 | 195 | 106 | 89 | 21 | 1 |
{-#
LANGUAGE
NoMonomorphismRestriction,
PackageImports,
TemplateHaskell,
FlexibleContexts
#-}
module Language.Granule.ReplParser where
import Prelude
--import Data.List
--import Data.Char
import qualified Data.Text as T
import Text.Parsec
import qualified Text.Parsec.Token as Token
import Text.Parsec.Language
--import Data.Functor.Identity
--import System.FilePath
--import Language.Granule.Syntax.Expr
lexer = haskellStyle {
Token.reservedOpNames = [":", "let"]
}
tokenizer = Token.makeTokenParser lexer
reservedOp = Token.reservedOp tokenizer
ws = Token.whiteSpace tokenizer
symbol = Token.symbol tokenizer
data REPLExpr =
ShowDef String
| DumpState
| LoadFile [FilePath]
| AddModule [FilePath]
| Reload
| CheckType String
| Eval String
| RunParser String
| RunLexer String
| Debuger [FilePath]
deriving Show
replTermCmdParser short long c p = do
symbol ":"
cmd <- many lower
ws
t <- p
eof
if (cmd == long || cmd == short)
then return $ c t
else fail $ "Command \":"<>cmd<>"\" is unrecognized."
replIntCmdParser short long c = do
symbol ":"
cmd <- many lower
eof
if (cmd == long || cmd == short)
then return c
else fail $ "Command \":"<>cmd<>"\" is unrecognized."
replTyCmdParser short long c = do
symbol ":"
cmd <- many lower
ws
term <- many1 anyChar
eof
if (cmd == long || cmd == short)
then return $ c term
else fail $ "Command \":"<>cmd<>"\" is unrecognized."
replTySchCmdParser short long c = do
symbol ":"
cmd <- many (lower <|> char '_')
ws
term <- many1 anyChar
eof
if (cmd == long || cmd == short)
then return $ c term
else fail $ "Command \":"<>cmd<>"\" is unrecognized."
replFileCmdParser short long c = do
symbol ":"
cmd <- many lower
ws
pathUntrimned <- many1 anyChar
eof
if(cmd == long || cmd == short)
then do
let tpath = T.words . T.pack $ pathUntrimned
let fpath = textToFilePath tpath
return $ c fpath
else fail $ "Command \":"<>cmd<>"\" is unrecognized."
evalParser = do
ev <- many anyChar
return $ Eval ev
-- showASTParser = replTermCmdParser "s" "show" ShowAST
-- unfoldTermParser = replTermCmdParser "u" "unfold" Unfold
dumpStateParser = replIntCmdParser "dump" "dump" DumpState
loadFileParser = replFileCmdParser "l" "load" LoadFile
replDebugger = replFileCmdParser "d" "debug" Debuger
addModuleParser = replFileCmdParser "m" "module" AddModule
reloadFileParser = replIntCmdParser "r" "reload" Reload
checkTypeParser = replTyCmdParser "t" "type" CheckType
showAstParser = replTyCmdParser "s" "show" ShowDef
runParserRepl = replTyCmdParser "p" "parse" RunParser
runLexer = replTyCmdParser "x" "lexer" RunLexer
pathParser = do
string "KEY"
string " =" <|> string "="
string "" <|> string " "
path <- manyTill anyChar (string "\n")
return path
pathParser' = endBy pathParser eof
-- lineParser =
lineParser = try dumpStateParser
<|> try loadFileParser
<|> try addModuleParser
<|> try reloadFileParser
<|> try checkTypeParser
<|> try replDebugger
<|> try showAstParser
<|> try runParserRepl
<|> try runLexer
-- <|> try unfoldTermParser5
-- <|> try showASTParser
<|> evalParser
parseLine :: String -> Either String REPLExpr
parseLine s = case (parse lineParser "" s) of
Left msg -> Left $ show msg
Right l -> Right l
textToFilePath :: [T.Text] -> [FilePath]
textToFilePath [] = []
textToFilePath (x:xs) = do
let spth = T.unpack x
spth : textToFilePath xs
parsePath :: String -> Either String [String]
parsePath s = do
case (parse pathParser' "" s) of
Right l -> Right l
Left msg -> Left $ show msg
|
dorchard/gram_lang
|
repl/app/Language/Granule/ReplParser.hs
|
bsd-3-clause
| 3,867 | 0 | 15 | 969 | 1,119 | 550 | 569 | 118 | 2 |
{-# OPTIONS_GHC -cpp #-}
-----------------------------------------------------------------------------
-- |
-- Module : Network
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/network/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- The "Network" interface is a \"higher-level\" interface to
-- networking facilities, and it is recommended unless you need the
-- lower-level interface in "Network.Socket".
--
-----------------------------------------------------------------------------
module Network (
-- * Basic data types
Socket,
PortID(..),
HostName,
PortNumber, -- instance (Eq, Enum, Num, Real, Integral)
-- * Initialisation
withSocketsDo, -- :: IO a -> IO a
-- * Server-side connections
listenOn, -- :: PortID -> IO Socket
accept, -- :: Socket -> IO (Handle, HostName, PortNumber)
sClose, -- :: Socket -> IO ()
-- * Client-side connections
connectTo, -- :: HostName -> PortID -> IO Handle
-- * Simple sending and receiving
{-$sendrecv-}
sendTo, -- :: HostName -> PortID -> String -> IO ()
recvFrom, -- :: HostName -> PortID -> IO String
-- * Miscellaneous
socketPort, -- :: Socket -> IO PortID
-- * Networking Issues
-- ** Buffering
{-$buffering-}
-- ** Improving I\/O Performance over sockets
{-$performance-}
-- ** @SIGPIPE@
{-$sigpipe-}
) where
import Network.BSD
import Network.Socket hiding ( accept, socketPort, recvFrom, sendTo, PortNumber )
import qualified Network.Socket as Socket ( accept )
import System.IO
import Prelude
import Control.Exception as Exception
-- ---------------------------------------------------------------------------
-- High Level ``Setup'' functions
-- If the @PortID@ specifies a unix family socket and the @Hostname@
-- differs from that returned by @getHostname@ then an error is
-- raised. Alternatively an empty string may be given to @connectTo@
-- signalling that the current hostname applies.
data PortID =
Service String -- Service Name eg "ftp"
| PortNumber PortNumber -- User defined Port Number
#if !defined(mingw32_HOST_OS) && !defined(cygwin32_HOST_OS) && !defined(_WIN32)
| UnixSocket String -- Unix family socket in file system
#endif
-- | Calling 'connectTo' creates a client side socket which is
-- connected to the given host and port. The Protocol and socket type is
-- derived from the given port identifier. If a port number is given
-- then the result is always an internet family 'Stream' socket.
connectTo :: HostName -- Hostname
-> PortID -- Port Identifier
-> IO Handle -- Connected Socket
connectTo hostname (Service serv) = do
proto <- getProtocolNumber "tcp"
Exception.bracketOnError
(socket AF_INET Stream proto)
(sClose) -- only done if there's an error
(\sock -> do
port <- getServicePortNumber serv
he <- getHostByName hostname
connect sock (SockAddrInet port (hostAddress he))
socketToHandle sock ReadWriteMode
)
connectTo hostname (PortNumber port) = do
proto <- getProtocolNumber "tcp"
Exception.bracketOnError
(socket AF_INET Stream proto)
(sClose) -- only done if there's an error
(\sock -> do
he <- getHostByName hostname
connect sock (SockAddrInet port (hostAddress he))
socketToHandle sock ReadWriteMode
)
#if !defined(mingw32_HOST_OS) && !defined(cygwin32_HOST_OS) && !defined(_WIN32)
connectTo _ (UnixSocket path) = do
Exception.bracketOnError
(socket AF_UNIX Stream 0)
(sClose)
(\sock -> do
connect sock (SockAddrUnix path)
socketToHandle sock ReadWriteMode
)
#endif
-- | Creates the server side socket which has been bound to the
-- specified port.
--
-- NOTE: To avoid the \"Address already in use\"
-- problems popped up several times on the GHC-Users mailing list we
-- set the 'ReuseAddr' socket option on the listening socket. If you
-- don't want this behaviour, please use the lower level
-- 'Network.Socket.listen' instead.
listenOn :: PortID -- ^ Port Identifier
-> IO Socket -- ^ Connected Socket
listenOn (Service serv) = do
proto <- getProtocolNumber "tcp"
Exception.bracketOnError
(socket AF_INET Stream proto)
(sClose)
(\sock -> do
port <- getServicePortNumber serv
setSocketOption sock ReuseAddr 1
bindSocket sock (SockAddrInet port iNADDR_ANY)
listen sock maxListenQueue
return sock
)
listenOn (PortNumber port) = do
proto <- getProtocolNumber "tcp"
Exception.bracketOnError
(socket AF_INET Stream proto)
(sClose)
(\sock -> do
setSocketOption sock ReuseAddr 1
bindSocket sock (SockAddrInet port iNADDR_ANY)
listen sock maxListenQueue
return sock
)
#if !defined(mingw32_HOST_OS) && !defined(cygwin32_HOST_OS) && !defined(_WIN32)
listenOn (UnixSocket path) =
Exception.bracketOnError
(socket AF_UNIX Stream 0)
(sClose)
(\sock -> do
setSocketOption sock ReuseAddr 1
bindSocket sock (SockAddrUnix path)
listen sock maxListenQueue
return sock
)
#endif
-- -----------------------------------------------------------------------------
-- accept
-- | Accept a connection on a socket created by 'listenOn'. Normal
-- I\/O opertaions (see "System.IO") can be used on the 'Handle'
-- returned to communicate with the client.
-- Notice that although you can pass any Socket to Network.accept, only
-- sockets of either AF_UNIX or AF_INET will work (this shouldn't be a problem,
-- though). When using AF_UNIX, HostName will be set to the path of the socket
-- and PortNumber to -1.
--
accept :: Socket -- ^ Listening Socket
-> IO (Handle,
HostName,
PortNumber) -- ^ Triple of: read\/write 'Handle' for
-- communicating with the client,
-- the 'HostName' of the peer socket, and
-- the 'PortNumber' of the remote connection.
accept sock@(MkSocket _ AF_INET _ _ _) = do
~(sock', (SockAddrInet port haddr)) <- Socket.accept sock
peer <- Exception.catchJust ioErrors
(do
(HostEntry peer _ _ _) <- getHostByAddr AF_INET haddr
return peer
)
(\e -> inet_ntoa haddr)
-- if getHostByName fails, we fall back to the IP address
handle <- socketToHandle sock' ReadWriteMode
return (handle, peer, port)
#if !defined(mingw32_HOST_OS) && !defined(cygwin32_HOST_OS) && !defined(_WIN32)
accept sock@(MkSocket _ AF_UNIX _ _ _) = do
~(sock', (SockAddrUnix path)) <- Socket.accept sock
handle <- socketToHandle sock' ReadWriteMode
return (handle, path, -1)
#endif
accept sock@(MkSocket _ family _ _ _) =
error $ "Sorry, address family " ++ (show family) ++ " is not supported!"
-- -----------------------------------------------------------------------------
-- sendTo/recvFrom
{-$sendrecv
Send and receive data from\/to the given host and port number. These
should normally only be used where the socket will not be required for
further calls. Also, note that due to the use of 'hGetContents' in 'recvFrom'
the socket will remain open (i.e. not available) even if the function already
returned. Their use is strongly discouraged except for small test-applications
or invocations from the command line.
-}
sendTo :: HostName -- Hostname
-> PortID -- Port Number
-> String -- Message to send
-> IO ()
sendTo h p msg = do
s <- connectTo h p
hPutStr s msg
hClose s
recvFrom :: HostName -- Hostname
-> PortID -- Port Number
-> IO String -- Received Data
recvFrom host port = do
ip <- getHostByName host
let ipHs = hostAddresses ip
s <- listenOn port
let
waiting = do
~(s', SockAddrInet _ haddr) <- Socket.accept s
he <- getHostByAddr AF_INET haddr
if not (any (`elem` ipHs) (hostAddresses he))
then do
sClose s'
waiting
else do
h <- socketToHandle s' ReadMode
msg <- hGetContents h
return msg
message <- waiting
return message
-- ---------------------------------------------------------------------------
-- Access function returning the port type/id of socket.
-- | Returns the 'PortID' associated with a given socket.
socketPort :: Socket -> IO PortID
socketPort s = do
sockaddr <- getSocketName s
return (portID sockaddr)
where
portID sa =
case sa of
SockAddrInet port _ -> PortNumber port
#if !defined(mingw32_HOST_OS) && !defined(cygwin32_HOST_OS) && !defined(_WIN32)
SockAddrUnix path -> UnixSocket path
#endif
-----------------------------------------------------------------------------
-- Extra documentation
{-$buffering
The 'Handle' returned by 'connectTo' and 'accept' is block-buffered by
default. For an interactive application you may want to set the
buffering mode on the 'Handle' to
'LineBuffering' or 'NoBuffering', like so:
> h <- connectTo host port
> hSetBuffering h LineBuffering
-}
{-$performance
For really fast I\/O, it might be worth looking at the 'hGetBuf' and
'hPutBuf' family of functions in "System.IO".
-}
{-$sigpipe
On Unix, when writing to a socket and the reading end is
closed by the remote client, the program is normally sent a
@SIGPIPE@ signal by the operating system. The
default behaviour when a @SIGPIPE@ is received is
to terminate the program silently, which can be somewhat confusing
if you haven't encountered this before. The solution is to
specify that @SIGPIPE@ is to be ignored, using
the POSIX library:
> import Posix
> main = do installHandler sigPIPE Ignore Nothing; ...
-}
|
FranklinChen/hugs98-plus-Sep2006
|
packages/network/Network.hs
|
bsd-3-clause
| 9,517 | 83 | 17 | 1,913 | 1,511 | 793 | 718 | 141 | 2 |
-- |
-- Module : FRP.Animas.MergeableRecord
-- Copyright : (c) Antony Courtney and Henrik Nilsson, Yale University, 2003
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Framework for record merging.
--
-- Idea:
--
-- MergeableRecord is intended to be a super class for classes providing
-- update operations on records. The ADT induced by such a set of operations
-- can be considered a "mergeable record", which can be merged into larger
-- mergeable records essentially by function composition. Finalization turns
-- a mergeable record into a record.
--
-- Typical use:
--
-- Given
--
-- > data Foo = Foo {l1 :: T1, l2 :: T2}
--
-- one define a mergeable record type (MR Foo) by the following instance:
--
-- @
-- instance MergeableRecord Foo where
-- mrDefault = Foo {l1 = v1_dflt, l2 = v2_dflt}
-- @
--
-- Typically, one would also provide definitions for setting the fields,
-- possibly (but not necessarily) overloaded:
--
-- @
-- instance HasL1 Foo where
-- setL1 v = mrMake (\foo -> foo {l1 = v})
-- @
--
-- Now Foo records can be created as follows:
--
-- @
-- let foo1 = setL1 v1
-- ...
-- let foo2 = setL2 v2 ~+~ foo1
-- ...
-- let foo<N> = setL1 vN ~+~ foo<N-1>
-- let fooFinal = mrFinalize foo<N>
-- @
module FRP.Animas.MergeableRecord (
MergeableRecord(..),
MR,
mrMake,
(~+~),
mrMerge,
mrFinalize
) where
-- | Typeclass for mergeable records
class MergeableRecord a where
-- | The default value of a record type
mrDefault :: a
-- | Type constructor for mergeable records.
newtype MergeableRecord a => MR a = MR (a -> a)
-- | Construction of a mergeable record.
mrMake :: MergeableRecord a => (a -> a) -> MR a
mrMake f = (MR f)
-- | Merge two mergeable records. Left \"overrides\" in case of conflict.
(~+~) :: MergeableRecord a => MR a -> MR a -> MR a
(MR f1) ~+~ (MR f2) = MR (f1 . f2)
-- | Equivalent to '(~+~)' above.
mrMerge :: MergeableRecord a => MR a -> MR a -> MR a
mrMerge = (~+~)
-- | Finalization: turn a mergeable record into a record.
mrFinalize :: MergeableRecord a => MR a -> a
mrFinalize (MR f) = f mrDefault
|
eamsden/Animas
|
src/FRP/Animas/MergeableRecord.hs
|
bsd-3-clause
| 2,250 | 2 | 8 | 498 | 305 | 187 | 118 | 18 | 1 |
{- Testing definitions in no-role-annots package.
Copyright 2014 Richard Eisenberg
https://github.com/goldfirere/no-role-annots/
-}
{-# LANGUAGE TemplateHaskell, CPP, GeneralizedNewtypeDeriving,
StandaloneDeriving #-}
module Test.Test where
#if __GLASGOW_HASKELL__ < 707
import Language.Haskell.RoleAnnots
#else
import Language.Haskell.TH
#endif
import Test.Defns
import Language.Haskell.RoleAnnots.Check
import System.Exit
class C a where
mymap :: MyMap2 String a
instance C Int where
mymap = mkMap2 "Foo" 3
newtype Age = MkAge Int
deriving C
class D a where
mymap' :: MyMap2 a String
instance D Int where
mymap' = mkMap2 3 "Foo"
-- deriving instance D Age -- shouldn't work
checkRoles ''MyMap1 [RepresentationalR, RepresentationalR]
checkRoles ''MyMap2 [NominalR, RepresentationalR]
checkRoles ''MyPtr1 [PhantomR]
checkRoles ''MyPtr2 [RepresentationalR]
checkRoles ''MyMap3 [NominalR, RepresentationalR]
checkRoles ''MyMap4 [NominalR, RepresentationalR]
-- checkRoles ''MyMap1 [NominalR, RepresentationalR] -- should report error
main :: IO ()
main = do
putStrLn "It compiled!"
let actual = [ $(checkRolesB ''MyMap1 [NominalR, RepresentationalR])
, $(checkRolesB ''MyMap2 [NominalR, RepresentationalR])
, $(checkRolesB ''MyPtr1 [RepresentationalR]) ]
#if __GLASGOW_HASKELL__ < 707
desired = [True, True, True]
#else
desired = [False, True, False]
#endif
putStrLn $ "Actual checkRolesB: " ++ show actual
putStrLn $ "Desired checkRolesB: " ++ show desired
let failed_appropriately =
#if __GLASGOW_HASKELL__ < 707
True
#else
$( do recover [| True |] $
do _ <- checkRoles ''MyMap1 [NominalR, RepresentationalR]
[| False |] )
#endif
putStrLn $ "Did checkRoles fail appropriately? " ++ show failed_appropriately
if actual == desired && failed_appropriately
then exitSuccess
else exitFailure
|
goldfirere/no-role-annots
|
Test/Test.hs
|
bsd-3-clause
| 1,953 | 0 | 14 | 394 | 388 | 205 | 183 | 38 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ImpredicativeTypes #-}
module Parser.PropertyParser where
import Control.Applicative.Interleaved
import Data.Char
import qualified Data.Map as Map
import Data.Maybe
import Text.ParserCombinators.UU
import Text.ParserCombinators.UU.BasicInstances
import Text.ParserCombinators.UU.Interleaved hiding (mkP)
import Text.ParserCombinators.UU.Utils
import Data.DataTreeCSS
import Data.Property
import Parser.BasicCssParser
import Parser.CombinadoresBasicos
-- | display
pDisplay :: Parser Value
pDisplay
= pKeyValues ["inline", "block", "list-item", "none", "inherit"] -- no support for: run-in, inline-block
-- | position
pPosition :: Parser Value
pPosition
= pKeyValues ["static", "relative", "inherit"] -- no support for absolute, fixed
-- | top, bottom, ...
pOffset :: Parser Value
pOffset
= pLength <|> pPercentagePos <|> pKeyValues ["auto", "inherit"]
-- | float
pFloat :: Parser Value
pFloat
= pKeyValues ["none", "inherit"] -- no support for left, right
-- | margin
pMarginWidth :: Parser Value
pMarginWidth
= pLength <|> pPercentagePos <|> pKeyValues ["auto", "inherit"]
pShorthandMargin :: Parser Declaraciones
pShorthandMargin
= buildSimpleShorthandProp
"margin"
["margin-top", "margin-right", "margin-bottom", "margin-left"]
pMarginWidth
-- | padding
pPaddingWidth :: Parser Value
pPaddingWidth
= pLengthPos <|> pPercentagePos <|> pKeyValues ["inherit"]
pShorthandPadding :: Parser Declaraciones
pShorthandPadding
= buildSimpleShorthandProp
"padding"
["padding-top", "padding-right", "padding-bottom", "padding-left"]
pPaddingWidth
-- | border
pBorderWidth :: Parser Value
pBorderWidth
= pBorderWidth' <|> pKeyValues ["inherit"]
pBorderWidth' :: Parser Value
pBorderWidth'
= pLengthPos
pShorthandBorderWidth :: Parser Declaraciones
pShorthandBorderWidth
= buildSimpleShorthandProp
"border-width"
["border-top-width", "border-right-width", "border-bottom-width", "border-left-width"]
pBorderWidth
pBorderColor :: Parser Value
pBorderColor
= pBorderColor' <|> pKeyValues ["inherit"]
pBorderColor' :: Parser Value
pBorderColor'
= pColor
pShorthandBorderColor :: Parser Declaraciones
pShorthandBorderColor
= buildSimpleShorthandProp
"border-color"
["border-top-color", "border-right-color", "border-bottom-color", "border-left-color"]
pBorderColor
pBorderStyle :: Parser Value
pBorderStyle
= pBorderStyle' <|> pKeyValues ["inherit"]
pBorderStyle' :: Parser Value
pBorderStyle'
= pKeyValues ["hidden", "dotted", "dashed", "solid", "none"]
pShorthandBorderStyle :: Parser Declaraciones
pShorthandBorderStyle
= buildSimpleShorthandProp
"border-style"
["border-top-style", "border-right-style", "border-bottom-style", "border-left-style"]
pBorderStyle
pShorthandBorderEdge :: Parser Declaraciones
pShorthandBorderEdge
= let names = ["border-top", "border-right", "border-bottom", "border-left"]
in pAny (\nm -> makeDecl nm) names
where makeDecl nm = pToken nm *> pSimboloAmb ":" *> pValue nm
pValue nm = catMaybes <$> sepBy ((\a b c -> [a,b,c])
<$> (pMaybe $ mkG (pPropBorderWidth nm))
<||> (pMaybe $ mkG (pPropBorderColor nm))
<||> (pMaybe $ mkG (pPropBorderStyle nm))
) pInutil
pPropBorderWidth nm = (\val -> Declaracion (nm ++ "-width") val False) <$> pBorderWidth'
pPropBorderColor nm = (\val -> Declaracion (nm ++ "-color") val False) <$> pBorderColor'
pPropBorderStyle nm = (\val -> Declaracion (nm ++ "-style") val False) <$> pBorderStyle'
pShorthandBorder :: Parser Declaraciones
pShorthandBorder
= let names = ["border-top", "border-right", "border-bottom", "border-left"]
in pToken "border" *> pSimboloAmb ":" *> pValue names
where pValue nms = concat . catMaybes <$> sepBy ((\a b c -> [a,b,c])
<$> (pMaybe $ mkG (pPropBorderWidth nms))
<||> (pMaybe $ mkG (pPropBorderColor nms))
<||> (pMaybe $ mkG (pPropBorderStyle nms))
) pInutil
doProps nms val = map (\nm -> Declaracion nm val False) nms
pPropBorderWidth nms = let pnms = map (++"-width") nms
in doProps pnms <$> pBorderWidth'
pPropBorderColor nms = let pnms = map (++"-color") nms
in doProps pnms <$> pBorderColor'
pPropBorderStyle nms = let pnms = map (++"-style") nms
in doProps pnms <$> pBorderStyle'
-- | fonts
pFontWeight :: Parser Value
pFontWeight
= pKeyValues ["normal", "bold", "inherit"]
pFontStyle :: Parser Value
pFontStyle
= pKeyValues ["normal", "italic", "oblique", "inherit"]
pFontFamily :: Parser Value
pFontFamily
= pFontFamilyList <|> pKeyValues ["inherit"]
pFontSize :: Parser Value
pFontSize
= pFontSizeValue <|> pKeyValues ["inherit"]
pFontSizeValue :: Parser Value
pFontSizeValue
= pAbosoluteFontSize <|> pRelativeFontSize <|> pLength <|> pPercentagePos
pAbosoluteFontSize :: Parser Value
pAbosoluteFontSize
= KeyValue <$> ( pKeyword "xx-small"
<|> pKeyword "x-small"
<|> pKeyword "small"
<|> pKeyword "medium"
<|> pKeyword "large"
<|> pKeyword "x-large"
<|> pKeyword "xx-large"
)
pRelativeFontSize :: Parser Value
pRelativeFontSize
= KeyValue <$> (pKeyword "smaller" <|> pKeyword "larger")
pFontFamilyList :: Parser Value
pFontFamilyList
= ListValue <$> pList1Sep_ng (pSimboloAmb ",") pFontFamilyValue
pFontFamilyValue :: Parser Value
pFontFamilyValue
= pStringValue <|> pGenericFamily
pGenericFamily :: Parser Value
pGenericFamily
= KeyValue <$> (pKeyword "serif" <|> pKeyword "sans-serif" <|> pKeyword "cursive" <|> pKeyword "fantasy" <|> pKeyword "monospace")
-- | color
pColorValue :: Parser Value
pColorValue
= pColor <|> pKeyValues ["inherit"]
-- | dimentions: height and width
pDimentionValue :: Parser Value
pDimentionValue
= pLengthPos <|> pPercentagePos <|> pKeyValues ["auto", "inherit"]
-- | line height
pLineHeight :: Parser Value
pLineHeight
= pLengthPos <|> pPercentagePos <|> pKeyValues ["inherit"]
-- | vertical align
pVerticalAlign :: Parser Value
pVerticalAlign
= pLength <|> pPercentage <|> pKeyValues ["baseline", "sub", "super", "text-top", "text-bottom", "inherit"] -- "top", "bottom", "middle"
-- | generated content, quotes and lists
pContent :: Parser Value
pContent
= pListContent <|> pKeyValues ["normal", "none", "inherit"]
pListContent :: Parser Value
pListContent
= ListValue <$> pList1Sep_ng pInutil (pStringValue <|> pCounter <|> pKeyValues ["open-quote", "close-quote", "no-open-quote", "no-close-quote"])
-- pInutil??? isn't better pInutil1
pCounter :: Parser Value
pCounter
= Counter <$ pKeyword "counter" <* pSimboloAmb "("
<*> pSimpleContent <*> pCounterStyle
<* pSimboloAmb ")"
<|> Counters <$ pKeyword "counters" <* pSimboloAmb "("
<*> pSimpleContent <* pSimboloAmb "," <*> pString <*> pCounterStyle
<* pSimboloAmb ")"
pCounterStyle :: Parser (Maybe Value)
pCounterStyle
= Just <$ pSimboloAmb "," <*> pListStyleType
<|> pReturn Nothing
pCounterReset :: Parser Value
pCounterReset
= pListCounter <|> pKeyValues ["none", "inherit"]
pCounterIncrement :: Parser Value
pCounterIncrement
= pListCounter <|> pKeyValues ["none", "inherit"]
pListCounter :: Parser Value
pListCounter
= ListValue <$> pList1Sep_ng pInutil pCounterValue
pCounterValue :: Parser Value
pCounterValue
= CounterValue <$> pSimpleContent <* pInutil <*> pMaybeInteger
pMaybeInteger :: Parser (Maybe Int)
pMaybeInteger
= Just <$> pEntero --) `opt` Nothing
<|> pReturn Nothing
pQuotes :: Parser Value
pQuotes
= pListQuote <|> pKeyValues ["none", "inherit"]
pListQuote :: Parser Value
pListQuote
= ListValue <$> pList1Sep_ng pInutil pQuoteValue
pQuoteValue :: Parser Value
pQuoteValue
= QuoteValue <$> pString <* pInutil <*> pString
pListStylePositionProp :: Parser Value
pListStylePositionProp
= pKeyValues ["outside","inherit"]
pListStyleTypeProp :: Parser Value
pListStyleTypeProp
= pListStyleType <|> pKeyValues ["inherit"]
pListStyleType :: Parser Value
pListStyleType
= pKeyValues ["disc", "circle", "square", "decimal", "lower-roman", "upper-roman", "none"]
-- | background color
pBackgroundColor :: Parser Value
pBackgroundColor
= pColor <|> pKeyValues ["transparent", "inherit"]
-- | text
pTextIndent :: Parser Value
pTextIndent
= pLength <|> pPercentage <|> pKeyValues ["inherit"]
pTextAlign :: Parser Value
pTextAlign
= pKeyValues ["left", "right", "center", "inherit"] -- no jutify
pTextDecoration :: Parser Value
pTextDecoration
= pListDecoration <|> pKeyValues ["none", "inherit"]
pTextTransform :: Parser Value
pTextTransform
= pKeyValues ["capitalize", "uppercase", "lowercase", "none", "inherit"]
pListDecoration :: Parser Value
pListDecoration
= ListValue <$> pList1Sep_ng pInutil pDecorationValue
pDecorationValue :: Parser Value
pDecorationValue
= pKeyValues ["underline", "overline", "line-through"] -- no blink
-- white space
pWhiteSpace :: Parser Value
pWhiteSpace
= pKeyValues ["normal", "pre", "nowrap", "pre-wrap", "pre-line", "inherit"]
|
carliros/Simple-San-Simon-Functional-Web-Browser
|
src/Parser/PropertyParser.hs
|
bsd-3-clause
| 10,124 | 0 | 18 | 2,558 | 2,277 | 1,222 | 1,055 | 234 | 1 |
module Algebra.Lattice.Levitated (
Levitated(..)
) where
import Algebra.Lattice
--
-- Levitated
--
-- | Graft a distinct top and bottom onto an otherwise unbounded lattice.
-- The top is the absorbing element for the join, and the bottom is the absorbing
-- element for the meet.
data Levitated a = Top
| Levitate a
| Bottom
instance JoinSemiLattice a => JoinSemiLattice (Levitated a) where
Top `join` _ = Top
_ `join` Top = Top
Levitate x `join` Levitate y = Levitate (x `join` y)
Bottom `join` lev_y = lev_y
lev_x `join` Bottom = lev_x
instance MeetSemiLattice a => MeetSemiLattice (Levitated a) where
Top `meet` lev_y = lev_y
lev_x `meet` Top = lev_x
Levitate x `meet` Levitate y = Levitate (x `meet` y)
Bottom `meet` _ = Bottom
_ `meet` Bottom = Bottom
instance Lattice a => Lattice (Levitated a) where
instance JoinSemiLattice a => BoundedJoinSemiLattice (Levitated a) where
bottom = Bottom
instance MeetSemiLattice a => BoundedMeetSemiLattice (Levitated a) where
top = Top
instance BoundedLattice a => BoundedLattice (Levitated a) where
|
batterseapower/lattices
|
Algebra/Lattice/Levitated.hs
|
bsd-3-clause
| 1,240 | 0 | 8 | 367 | 345 | 184 | 161 | 24 | 0 |
module Graphics.Vty.Widgets.Builder.Reader.XML.Namespaces
( coreNS
, widgetNS
, dataNS
)
where
coreNS, widgetNS, dataNS :: String
coreNS = "http://codevine.org/vty-ui-builder/xmlns/core/1.0"
widgetNS = "http://codevine.org/vty-ui-builder/xmlns/widget/1.0"
dataNS = "http://codevine.org/vty-ui-builder/xmlns/data/1.0"
|
jtdaugherty/vty-ui-builder-xml
|
src/Graphics/Vty/Widgets/Builder/Reader/XML/Namespaces.hs
|
bsd-3-clause
| 333 | 0 | 4 | 40 | 46 | 32 | 14 | 8 | 1 |
{-# OPTIONS_GHC -fno-warn-tabs #-}
{- $Id: TestsAccum.hs,v 1.2 2003/11/10 21:28:58 antony Exp $
******************************************************************************
* Y A M P A *
* *
* Module: TestsAccum *
* Purpose: Test cases for accumulators *
* Authors: Antony Courtney and Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* University of Nottingham, 2005 *
* *
******************************************************************************
-}
module TestsAccum (
accum_tr,
accum_trs,
accum_st0,
accum_st0r,
accum_st1,
accum_st1r
) where
import Data.Maybe (fromJust)
import FRP.Yampa
import TestsCommon
------------------------------------------------------------------------------
-- Test cases for accumulators
------------------------------------------------------------------------------
accum_inp1 = (fromJust (head delta_inp), zip (repeat 1.0) (tail delta_inp))
where
delta_inp =
[Just NoEvent, Nothing, Just (Event (+1.0)), Just NoEvent,
Just (Event (+2.0)), Just NoEvent, Nothing, Nothing,
Just (Event (*3.0)), Just (Event (+5.0)), Nothing, Just NoEvent,
Just (Event (/2.0)), Just NoEvent, Nothing, Nothing]
++ repeat Nothing
accum_inp2 = (fromJust (head delta_inp), zip (repeat 1.0) (tail delta_inp))
where
delta_inp =
[Just (Event (+1.0)), Just NoEvent, Nothing, Nothing,
Just (Event (+2.0)), Just NoEvent, Nothing, Nothing,
Just (Event (*3.0)), Just (Event (+5.0)), Nothing, Just NoEvent,
Just (Event (/2.0)), Just NoEvent, Nothing, Nothing]
++ repeat Nothing
accum_inp3 = deltaEncode 1.0 $
[NoEvent, NoEvent, Event 1.0, NoEvent,
Event 2.0, NoEvent, NoEvent, NoEvent,
Event 3.0, Event 5.0, Event 5.0, NoEvent,
Event 0.0, NoEvent, NoEvent, NoEvent]
++ repeat NoEvent
accum_inp4 = deltaEncode 1.0 $
[Event 1.0, NoEvent, NoEvent, NoEvent,
Event 2.0, NoEvent, NoEvent, NoEvent,
Event 3.0, Event 5.0, Event 5.0, NoEvent,
Event 0.0, NoEvent, NoEvent, NoEvent]
++ repeat NoEvent
accum_inp5 = deltaEncode 0.25 (repeat ())
accum_t0 :: [Event Double]
accum_t0 = take 16 $ embed (accum 0.0) accum_inp1
accum_t0r =
[NoEvent, NoEvent, Event 1.0, NoEvent,
Event 3.0, NoEvent, NoEvent, NoEvent,
Event 9.0, Event 14.0, Event 19.0, NoEvent,
Event 9.5, NoEvent, NoEvent, NoEvent]
accum_t1 :: [Event Double]
accum_t1 = take 16 $ embed (accum 0.0) accum_inp2
accum_t1r =
[Event 1.0, NoEvent, NoEvent, NoEvent,
Event 3.0, NoEvent, NoEvent, NoEvent,
Event 9.0, Event 14.0, Event 19.0, NoEvent,
Event 9.5, NoEvent, NoEvent, NoEvent]
accum_t2 :: [Event Int]
accum_t2 = take 16 $ embed (accumBy (\a d -> a + floor d) 0) accum_inp3
accum_t2r :: [Event Int]
accum_t2r =
[NoEvent, NoEvent, Event 1, NoEvent,
Event 3, NoEvent, NoEvent, NoEvent,
Event 6, Event 11, Event 16, NoEvent,
Event 16, NoEvent, NoEvent, NoEvent]
accum_t3 :: [Event Int]
accum_t3 = take 16 $ embed (accumBy (\a d -> a + floor d) 0) accum_inp4
accum_t3r :: [Event Int]
accum_t3r =
[Event 1, NoEvent, NoEvent, NoEvent,
Event 3, NoEvent, NoEvent, NoEvent,
Event 6, Event 11, Event 16, NoEvent,
Event 16, NoEvent, NoEvent, NoEvent]
accum_accFiltFun1 a d =
let a' = a + floor d
in
if even a' then
(a', Just (a' > 10, a'))
else
(a', Nothing)
accum_t4 :: [Event (Bool,Int)]
accum_t4 = take 16 $ embed (accumFilter accum_accFiltFun1 0) accum_inp3
accum_t4r :: [Event (Bool,Int)]
accum_t4r =
[NoEvent, NoEvent, NoEvent, NoEvent,
NoEvent, NoEvent, NoEvent, NoEvent,
Event (False,6), NoEvent, Event (True,16), NoEvent,
Event (True,16), NoEvent, NoEvent, NoEvent]
accum_accFiltFun2 a d =
let a' = a + floor d
in
if odd a' then
(a', Just (a' > 10, a'))
else
(a', Nothing)
accum_t5 :: [Event (Bool,Int)]
accum_t5 = take 16 $ embed (accumFilter accum_accFiltFun2 0) accum_inp4
accum_t5r :: [Event (Bool,Int)]
accum_t5r =
[Event (False,1), NoEvent, NoEvent, NoEvent,
Event (False,3), NoEvent, NoEvent, NoEvent,
NoEvent, Event (True,11), NoEvent, NoEvent,
NoEvent, NoEvent, NoEvent, NoEvent]
-- This can be seen as the definition of accumFilter
accumFilter2 :: (c -> a -> (c, Maybe b)) -> c -> SF (Event a) (Event b)
accumFilter2 f c_init =
switch (never &&& attach c_init) afAux
where
afAux (c, a) =
case f c a of
(c', Nothing) -> switch (never &&& (notYet>>>attach c')) afAux
(c', Just b) -> switch (now b &&& (notYet>>>attach c')) afAux
attach :: b -> SF (Event a) (Event (b, a))
attach c = arr (fmap (\a -> (c, a)))
accum_t6 :: [Event (Bool,Int)]
accum_t6 = take 16 $ embed (accumFilter2 accum_accFiltFun1 0) accum_inp3
accum_t6r = accum_t4 -- Should agree!
accum_t7 :: [Event (Bool,Int)]
accum_t7 = take 16 $ embed (accumFilter2 accum_accFiltFun2 0) accum_inp4
accum_t7r = accum_t5 -- Should agree!
accum_t8 :: [Event Int]
accum_t8 = take 40 $ embed (repeatedly 1.0 1
>>> accumBy (+) 0
>>> accumBy (+) 0)
accum_inp5
accum_t8r :: [Event Int]
accum_t8r = [NoEvent, NoEvent, NoEvent, NoEvent,
Event 1, NoEvent, NoEvent, NoEvent,
Event 3, NoEvent, NoEvent, NoEvent,
Event 6, NoEvent, NoEvent, NoEvent,
Event 10, NoEvent, NoEvent, NoEvent,
Event 15, NoEvent, NoEvent, NoEvent,
Event 21, NoEvent, NoEvent, NoEvent,
Event 28, NoEvent, NoEvent, NoEvent,
Event 36, NoEvent, NoEvent, NoEvent,
Event 45, NoEvent, NoEvent, NoEvent]
accum_t9 :: [Int]
accum_t9 = take 40 $ embed (repeatedly 1.0 1
>>> accumBy (+) 0
>>> accumBy (+) 0
>>> hold 0)
accum_inp5
accum_t9r :: [Int]
accum_t9r = [0,0,0,0,1,1,1,1,3,3,3,3,6,6,6,6,10,10,10,10,15,15,15,15,
21,21,21,21,28,28,28,28,36,36,36,36,45,45,45,45]
accum_t10 :: [Int]
accum_t10 = take 40 $ embed (repeatedly 1.0 1
>>> accumBy (+) 0
>>> accumHoldBy (+) 0)
accum_inp5
accum_t10r :: [Int]
accum_t10r = accum_t9 -- Should agree!
accum_t11 :: [Int]
accum_t11 = take 40 $ embed (repeatedly 1.0 1
>>> accumBy (+) 0
>>> accumBy (+) 0
>>> dHold 0)
accum_inp5
accum_t11r :: [Int]
accum_t11r = [0,0,0,0,0,1,1,1,1,3,3,3,3,6,6,6,6,10,10,10,10,15,15,15,
15,21,21,21,21,28,28,28,28,36,36,36,36,45,45,45]
accum_t12 :: [Int]
accum_t12 = take 40 $ embed (repeatedly 1.0 1
>>> accumBy (+) 0
>>> dAccumHoldBy (+) 0)
accum_inp5
accum_t12r :: [Int]
accum_t12r = accum_t11 -- Should agree!
accum_accFiltFun3 :: Int -> Int -> (Int, Maybe Int)
accum_accFiltFun3 s a =
let s' = s + a
in
if odd s' then
(s', Just s')
else
(s', Nothing)
accum_t13 :: [Event Int]
accum_t13 = take 40 $ embed (repeatedly 1.0 1
>>> accumFilter accum_accFiltFun3 0
>>> accumBy (+) 0
>>> accumBy (+) 0)
accum_inp5
accum_t13r :: [Event Int]
accum_t13r = [NoEvent, NoEvent, NoEvent, NoEvent,
Event 1, NoEvent, NoEvent, NoEvent,
NoEvent, NoEvent, NoEvent, NoEvent,
Event 5, NoEvent, NoEvent, NoEvent,
NoEvent, NoEvent, NoEvent, NoEvent,
Event 14, NoEvent, NoEvent, NoEvent,
NoEvent, NoEvent, NoEvent, NoEvent,
Event 30, NoEvent, NoEvent, NoEvent,
NoEvent, NoEvent, NoEvent, NoEvent,
Event 55, NoEvent, NoEvent, NoEvent]
accum_t14 :: [Int]
accum_t14 = take 40 $ embed (repeatedly 1.0 1
>>> accumFilter accum_accFiltFun3 0
>>> accumBy (+) 0
>>> accumBy (+) 0
>>> hold 0)
accum_inp5
accum_t14r :: [Int]
accum_t14r = [0,0,0,0,1,1,1,1,1,1,1,1,5,5,5,5,5,5,5,5,14,14,14,14,
14,14,14,14,30,30,30,30,30,30,30,30,55,55,55,55]
accum_t15 :: [Int]
accum_t15 = take 40 $ embed (repeatedly 1.0 1
>>> accumFilter accum_accFiltFun3 0
>>> accumBy (+) 0
>>> accumHoldBy (+) 0)
accum_inp5
accum_t15r :: [Int]
accum_t15r = accum_t14 -- Should agree!
accum_t16 :: [Int]
accum_t16 = take 40 $ embed (repeatedly 1.0 1
>>> accumFilter accum_accFiltFun3 0
>>> accumBy (+) 0
>>> accumBy (+) 0
>>> dHold 0)
accum_inp5
accum_t16r :: [Int]
accum_t16r = [0,0,0,0,0,1,1,1,1,1,1,1,1,5,5,5,5,5,5,5,5,14,14,14,
14,14,14,14,14,30,30,30,30,30,30,30,30,55,55,55]
accum_t17 :: [Int]
accum_t17 = take 40 $ embed (repeatedly 1.0 1
>>> accumFilter accum_accFiltFun3 0
>>> accumBy (+) 0
>>> dAccumHoldBy (+) 0)
accum_inp5
accum_t17r :: [Int]
accum_t17r = accum_t16 -- Should agree!
accum_trs =
[ accum_t0 == accum_t0r,
accum_t1 == accum_t1r,
accum_t2 == accum_t2r,
accum_t3 == accum_t3r,
accum_t4 == accum_t4r,
accum_t5 == accum_t5r,
accum_t6 == accum_t6r,
accum_t7 == accum_t7r,
accum_t8 == accum_t8r,
accum_t9 == accum_t9r,
accum_t10 == accum_t10r,
accum_t11 == accum_t11r,
accum_t12 == accum_t12r,
accum_t13 == accum_t13r,
accum_t14 == accum_t14r,
accum_t15 == accum_t15r,
accum_t16 == accum_t16r,
accum_t17 == accum_t17r
]
accum_tr = and accum_trs
accum_st0 :: Double
accum_st0 = testSFSpaceLeak 1000000
(repeatedly 1.0 1.0
>>> accumBy (+) 0.0
>>> hold (-99.99))
accum_st0r = 249999.0
accum_st1 :: Double
accum_st1 = testSFSpaceLeak 1000000
(arr dup
>>> first (repeatedly 1.0 1.0)
>>> arr (\(e,a) -> tag e a)
>>> accumFilter accumFun 0.0
>>> hold (-99.99))
where
accumFun c a | even (floor a) = (c+a, Just (c+a))
| otherwise = (c, Nothing)
accum_st1r = 6.249975e10
|
ivanperez-keera/Yampa
|
yampa/tests/TestsAccum.hs
|
bsd-3-clause
| 11,381 | 28 | 15 | 4,051 | 3,783 | 2,141 | 1,642 | 252 | 2 |
{-# LANGUAGE BangPatterns #-}
module Pygmalion.Metadata
( FileMetadataMap
, FileSet
, FileMetadata (..)
, readMetadata
, checkMetadata
, rehashVersion
, hasBuildCommand
, newSourceEntry
, newInclusionEntry
, getCommandInfoForFile
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Writer
import Data.List (foldl')
import Data.Maybe (mapMaybe)
import qualified Data.IntMap.Strict as Map
import qualified Data.IntSet as Set
import Pygmalion.Core
import Pygmalion.Database.IO
type FileMetadataMap = Map.IntMap FileMetadata
type FileSet = Set.IntSet
-- The 'version hash' is the hash of this file's last mtime combined
-- with the version hashes of all of its inclusions. In this way leaf
-- inclusions have version hashes that depend only on their last
-- mtime, while other inclusions depend on their own last mtime and
-- the last mtimes of all their dependencies.
-- The version hash neatly solves the problem of triggering indexing
-- for dependencies, even in the presence of cyclic dependencies.
-- When we send a DepChanged index request, we compute the version
-- hash we _want_ the file to have. Since we only send DepChanged
-- requests one level down the tree, this is easy. We include that
-- version hash in the DepChanged index request. If the file's version
-- hash doesn't match when we handle the request, then indexing
-- happens and we trigger DepChanged requests for its includers. We
-- also update the version hash so that even if we cycle around to
-- that file again, we now have the correct version hash and nothing happens.
-- Filesystem notifications still use mtime directly as usual.
data FileMetadata = FileMetadata
{ fmFile :: !SourceFile
, fmMTime :: !Time
, fmVersionHash :: !TimeHash
, fmIncluders :: !FileSet
, fmInclusions :: !FileSet
, fmDirty :: !Bool
, fmCommandInfo :: !(Maybe CommandInfo)
} deriving (Eq, Show)
hasBuildCommand :: FileMetadata -> Bool
hasBuildCommand (FileMetadata _ _ _ _ _ _ (Just _)) = True
hasBuildCommand _ = False
newSourceEntry :: CommandInfo -> Time -> FileMetadata
newSourceEntry !ci !mt = FileMetadata (ciSourceFile ci) mt 0 Set.empty Set.empty False (Just ci)
newInclusionEntry :: SourceFile -> Time -> FileMetadata
newInclusionEntry !sf !mt = FileMetadata sf mt 0 Set.empty Set.empty False Nothing
getCommandInfoForFile :: FileMetadataMap -> SourceFileHash -> Maybe CommandInfo
getCommandInfoForFile = go Set.empty
where
go visited files sfHash
| sfHash `Set.member` visited = Nothing
| otherwise =
case Map.lookup sfHash files of
Just m -> fmCommandInfo m
<|> getIncluderCI (sfHash `Set.insert` visited) files (fmIncluders m)
Nothing -> Nothing
getIncluderCI visited fs is = foldr (<|>) Nothing $ map (go visited fs) (Set.toList is)
-- Reads file metadata from the database and creates the data structure
-- we query at runtime. Note that it is not safe to run this at the
-- same time as the database manager since it accesses the database directly.
-- Since this is only executed once, at startup, this shouldn't be a problem.
readMetadata :: IO FileMetadataMap
readMetadata = withDB $ \h -> do
files <- getAllFiles h
foldM (addEntryFromDB h) Map.empty files
addEntryFromDB :: DBHandle -> FileMetadataMap -> (SourceFile, Time, TimeHash)
-> IO FileMetadataMap
addEntryFromDB h files (sf, mtime, versionHash) = do
let sfHash = stableHash sf
-- Gather information about inclusions.
includers <- getDirectIncluderHashes h sfHash
inclusions <- getDirectInclusionHashes h sfHash
-- Look up command info, if any.
mayCI <- getCommandInfo h sfHash
-- Create the complete entry.
let includerSet = Set.fromList includers
inclusionSet = Set.fromList inclusions
newEntry = FileMetadata sf mtime versionHash
includerSet inclusionSet
False mayCI
-- Merge it into the map.
return $ Map.insert sfHash newEntry files
rehashVersion :: FileMetadataMap -> Time -> FileMetadata -> TimeHash
rehashVersion !files !mtime !entry = foldl' stableHashWithSalt (stableHash mtime) (mtimes entry)
where
mTimeHash = stableHash . fmMTime
lookupFile = (`Map.lookup` files)
mtimes = map mTimeHash . mapMaybe lookupFile . Set.toList . fmInclusions
-- Returns a list of strings describing any inconsistencies which are
-- found in the given metadata.
checkMetadata :: FileMetadataMap -> [String]
checkMetadata files = execWriter $ go (Map.elems files)
where
go [] = return ()
go (entry : es) = do
let sfHash = stableHash (fmFile entry)
-- Verify includer consistency.
forM_ (Set.elems $ fmIncluders entry) $ \i ->
case Map.lookup i files of
Just includer -> unless (sfHash `Set.member` fmInclusions includer) $
nonSymmetricIncluderErr entry includer
-- checkLoops [entry] (Set.singleton sfHash) includer
Nothing -> nonexistentIncluderErr entry i
-- Verify inclusion consistency.
forM_ (Set.elems $ fmInclusions entry) $ \i ->
case Map.lookup i files of
Just inclusion -> unless (sfHash `Set.member` fmIncluders inclusion) $
nonSymmetricInclusionErr entry inclusion
Nothing -> nonexistentInclusionErr entry i
-- Verify CommandInfo consistency.
case fmCommandInfo entry of
Just ci -> when (ciSourceFile ci /= fmFile entry) $
inconsistentCommandInfoErr entry ci
Nothing -> return ()
go es
-- Commented out since loops turn out to be frequent, but this
-- information is useful for debugging.
{-
checkLoops !path !visited !entry =
if stableHash (fmSourceFile entry) `Set.member` visited
then inclusionLoopErr (entry : path) entry
else let newVisited = stableHash (fmSourceFile entry) `Set.insert` visited
in forM_ (Set.elems $ fmIncluders entry) $ \i ->
maybe (return ()) (checkLoops (entry : path) newVisited)
(Map.lookup i files)
inclusionLoopErr path entry =
tell [entryFile entry ++ " includes itself via path " ++
show (map entryFile path)]
-}
entryFile = unSourceFile . fmFile
nonSymmetricIncluderErr entry includer =
tell [entryFile entry ++ " has includer " ++ entryFile includer ++
" which doesn't list it as an inclusion."]
nonexistentIncluderErr entry includer =
tell [entryFile entry ++ " has nonexistent includer " ++
show includer ++ "."]
nonSymmetricInclusionErr entry inclusion =
tell [entryFile entry ++ " has inclusion " ++ entryFile inclusion ++
" which doesn't list it as an includer."]
nonexistentInclusionErr entry inclusion =
tell [entryFile entry ++ " has nonexistent inclusion " ++
show inclusion ++ "."]
inconsistentCommandInfoErr entry ci =
tell [entryFile entry ++ " has a build command for different file " ++
(unSourceFile $ ciSourceFile ci) ++ "."]
|
sethfowler/pygmalion
|
src/Pygmalion/Metadata.hs
|
bsd-3-clause
| 7,308 | 0 | 18 | 1,838 | 1,355 | 699 | 656 | 121 | 5 |
{-# OPTIONS_GHC -fno-warn-tabs #-}
{- $Id: TestsLoop.hs,v 1.6 2003/11/10 21:28:58 antony Exp $
******************************************************************************
* Y A M P A *
* *
* Module: TestsLoop *
* Purpose: Test cases for loop *
* Authors: Antony Courtney and Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* *
******************************************************************************
-}
module TestsLoop (loop_trs, loop_tr, loop_st0, loop_st0r,
loop_st1, loop_st1r) where
import FRP.Yampa
import TestsCommon
------------------------------------------------------------------------------
-- Test cases for loop
------------------------------------------------------------------------------
loop_acc :: SF (Double, Double) (Double, Double)
loop_acc = arr (\(x, y)->(x+y, x+y))
loop_t0 :: [Double]
loop_t0 = testSF1 (loop (constant (42.0, 43.0)))
loop_t0r =
[42.0, 42.0, 42.0, 42.0, 42.0, 42.0, 42.0, 42.0, 42.0, 42.0,
42.0, 42.0, 42.0, 42.0, 42.0, 42.0, 42.0, 42.0, 42.0, 42.0,
42.0, 42.0, 42.0, 42.0, 42.0]
loop_t1 :: [Double]
loop_t1 = testSF1 (loop identity)
loop_t1r =
[0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0,
10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0,
20.0, 21.0, 22.0, 23.0, 24.0]
loop_t2 :: [Time]
loop_t2 = testSF1 (loop (first localTime))
loop_t2r =
[0.0, 0.25, 0.5, 0.75, 1.0,
1.25, 1.5, 1.75, 2.0, 2.25,
2.5, 2.75, 3.0, 3.25, 3.5,
3.75, 4.0, 4.25, 4.5, 4.75,
5.0, 5.25, 5.5, 5.75, 6.0]
-- AC, 10-March-2002: I think this is the simplest test that will
-- fail with AltST.
loop_t3 :: [Time]
loop_t3 = testSF1 (loop (second (iPre 0)))
loop_t3r =
[0.0, 1.0, 2.0, 3.0, 4.0,
5.0, 6.0, 7.0, 8.0, 9.0,
10.0, 11.0, 12.0, 13.0, 14.0,
15.0, 16.0, 17.0, 18.0, 19.0,
20.0, 21.0, 22.0, 23.0, 24.0]
loop_t4 :: [Double]
loop_t4 = testSF1 (loop (second (iPre 0) >>> loop_acc))
loop_t4r =
[0.0, 1.0, 3.0, 6.0, 10.0,
15.0, 21.0, 28.0, 36.0, 45.0,
55.0, 66.0, 78.0, 91.0, 105.0,
120.0, 136.0, 153.0, 171.0, 190.0,
210.0, 231.0, 253.0, 276.0, 300.0]
loop_t5 :: [Double]
loop_t5 = testSF2 (loop (second (iPre 0) >>> loop_acc))
loop_t5r =
[0.0, 0.0, 0.0, 0.0, 0.0,
1.0, 2.0, 3.0, 4.0, 5.0,
7.0, 9.0, 11.0, 13.0, 15.0,
18.0, 21.0, 24.0, 27.0, 30.0,
34.0, 38.0, 42.0, 46.0, 50.0]
loop_t6 :: [Double]
loop_t6 = testSF1 (loop (iPre (0,0) >>> first localTime >>> loop_acc))
loop_t6r =
[0.0, 0.25, 0.75, 1.5, 2.5,
3.75, 5.25, 7.0, 9.0, 11.25,
13.75, 16.5, 19.5, 22.75, 26.25,
30.0, 34.0, 38.25, 42.75, 47.5,
52.5, 57.75, 63.25, 69.0, 75.0]
loop_t7 :: [Double]
loop_t7 = testSF1 (loop (loop_acc >>> second (iPre 0)))
loop_t7r = loop_t4r
loop_t8 :: [Double]
loop_t8 = testSF2 (loop (loop_acc >>> second (iPre 0)))
loop_t8r = loop_t5r
loop_t9 :: [Double]
loop_t9 = testSF1 (loop (first localTime >>> loop_acc >>> iPre (0,0)))
loop_t9r =
[0.0, 0.0, 0.25, 0.75, 1.5,
2.5, 3.75, 5.25, 7.0, 9.0,
11.25, 13.75, 16.5, 19.5, 22.75,
26.25, 30.0, 34.0, 38.25, 42.75,
47.5, 52.5, 57.75, 63.25, 69.0]
loop_t10 :: [Double]
loop_t10 = testSF1 (loop (loop_acc >>> second (iPre 0) >>> identity))
loop_t10r = loop_t4r
loop_t11 :: [Double]
loop_t11 = testSF2 (loop (loop_acc >>> second (iPre 0) >>> identity))
loop_t11r = loop_t5r
loop_t12 :: [Double]
loop_t12 = testSF1 (loop (first localTime
>>> loop_acc
>>> iPre (0,0)
>>> identity))
loop_t12r = loop_t9r
-- Computation of approximation to exp 0, exp 1, ..., exp 5 by integration.
-- Values as given by using exp directly:
-- 1.0, 2.71828, 7.38906, 20.0855, 54.5981, 148.413
loop_t13 :: [Double]
loop_t13 =
let
es = embed (loop (second integral >>> arr (\(_, x) -> (x + 1, x + 1))))
(deltaEncode 0.001 (repeat ()))
in
[es!!0, es!!1000, es!!2000, es!!3000, es!!4000, es!!5000]
loop_t13r = [1.0,2.71692, 7.38167, 20.05544, 54.48911, 148.04276]
loop_t14 :: [Double]
loop_t14 =
let
es = embed (loop (arr (\(_, x) -> (x + 1, x + 1)) >>> second integral))
(deltaEncode 0.001 (repeat ()))
in
[es!!0, es!!1000, es!!2000, es!!3000, es!!4000, es!!5000]
loop_t14r = loop_t13r
loop_t15 :: [Double]
loop_t15 =
let
es = embed (loop (arr (\(_, x) -> (x + 1, x + 1))
>>> second integral
>>> identity))
(deltaEncode 0.001 (repeat ()))
in
[es!!0, es!!1000, es!!2000, es!!3000, es!!4000, es!!5000]
loop_t15r = loop_t13r
-- A generator for factorial: The least-fixed point of this function is
-- the factorial function.
factGen f n = if (n==0) then 1 else n*f(n-1)
-- Can we use loop to construct a fixed point?
loop_t16 :: [Int]
loop_t16 = testSF1 (loop $ arr (\ (_,f) -> (f 4,factGen f)))
loop_t16r =
[24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24]
-- A simple loop test taken from MiniYampa:
-- This results in pulling on the fed-back output during evaluation, because
-- switch is strict in its input sample:
loop_t17 :: [Double]
loop_t17 = testSF1 (loop $ second $ (switch identity (const (arr fst))) >>> arr (\x -> (x,noEvent)) >>> (iPre (25, noEvent)))
loop_t17r =
[0.0,1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0,11.0,12.0,13.0,14.0,15.0,
16.0,17.0,18.0,19.0,20.0,21.0,22.0,23.0,24.0]
loop_trs =
[ loop_t0 ~= loop_t0r,
loop_t1 ~= loop_t1r,
loop_t2 ~= loop_t2r,
loop_t3 ~= loop_t3r,
loop_t4 ~= loop_t4r,
loop_t5 ~= loop_t5r,
loop_t6 ~= loop_t6r,
loop_t7 ~= loop_t7r,
loop_t8 ~= loop_t8r,
loop_t9 ~= loop_t9r,
loop_t10 ~= loop_t10r,
loop_t11 ~= loop_t11r,
loop_t12 ~= loop_t12r,
loop_t13 ~= loop_t13r,
loop_t14 ~= loop_t14r,
loop_t15 ~= loop_t15r,
loop_t16 ~= loop_t16r,
loop_t17 ~= loop_t17r
]
loop_tr = and loop_trs
loop_st0 = testSFSpaceLeak 2000000
(loop (second (iPre 0) >>> loop_acc))
loop_st0r = 9.999995e11
-- A simple loop test taken from MiniYampa:
-- This results in pulling on the fed-back output during evaluation, because
-- switch is strict in its input sample:
loop_st1 :: Double
loop_st1 = testSFSpaceLeak 2000000
(loop $ second $ (switch identity (const (arr fst))) >>> arr (\x -> (x + x + x + x + x + x + x,noEvent)) >>> (iPre (25, noEvent)))
loop_st1r = 999999.5
|
ivanperez-keera/Yampa
|
yampa/tests/TestsLoop.hs
|
bsd-3-clause
| 6,914 | 10 | 19 | 1,931 | 2,339 | 1,382 | 957 | 145 | 2 |
{-# LANGUAGE OverloadedStrings #-}
-- | Tree interface using the @qtree@ package.
-- An example of usage is provided in the /examples/ directory of
-- the source distribution.
module Text.LaTeX.Packages.Trees.Qtree (
-- * Tree re-export
module Text.LaTeX.Packages.Trees
-- * Qtree package
, qtree
-- * Tree to LaTeX rendering
, tree
, rendertree
) where
import Text.LaTeX.Base
import Text.LaTeX.Base.Class
import Text.LaTeX.Packages.Trees
--
import Data.List (intersperse)
-- | The 'qtree' package.
qtree :: PackageName
qtree = "qtree"
tree_ :: LaTeXC l => (a -> l) -> Tree a -> l
tree_ f (Leaf x) = braces $ f x
tree_ f (Node mx ts) =
mconcat [ "["
, maybe mempty (("." <>) . braces . f) mx
, " "
, mconcat $ intersperse " " $ fmap (tree_ f) ts
, " ]"
]
-- | Given a function to @LaTeX@ values, you can create a @LaTeX@ tree from a
-- Haskell tree. The function specifies how to render the node values.
tree :: LaTeXC l => (a -> l) -> Tree a -> l
tree f t = commS "Tree" <> " " <> tree_ f t
-- | Instance defined in "Text.LaTeX.Packages.Trees.Qtree".
instance Texy a => Texy (Tree a) where
texy = tree texy
-- | This function works as 'tree', but use 'render' as rendering function.
rendertree :: (Render a, LaTeXC l) => Tree a -> l
rendertree = tree (raw . protectText . render)
|
dmcclean/HaTeX
|
Text/LaTeX/Packages/Trees/Qtree.hs
|
bsd-3-clause
| 1,374 | 0 | 11 | 332 | 345 | 192 | 153 | 26 | 1 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module CSVParsers (
Language
, Dic
, CSVFlashcard (..)
, CSVChapter (..)
, toCSVChapters
, DicOf
, CSVCourseIntro (..)
, CSVCourseIntroFlashcard (..)
, toCSVCourseIntro
, CSVLanguage (..)
, CSVCourseMeta (..)
) where
import qualified Data.Map as M
import Data.List.Split (splitOn, splitWhen)
import qualified Data.Text as T
import qualified Data.ByteString.Lazy as BL
import Data.Csv
import qualified Data.Vector as V
import Data.Either
import Text.Read (readEither)
-- Course Contetent
type Language = String
type Dic = M.Map Language String
data CSVFlashcard = CSVFlashcard {
csvFCIndex :: Integer
, csvFCDic :: Dic
} deriving Show
data CSVChapter = CSVChapter {
csvChapterIndex :: Integer
, csvChapterDic :: Dic
, csvChapterCards :: [CSVFlashcard]
} deriving Show
csvVectorToMatrix :: V.Vector [String] -> [[String]]
csvVectorToMatrix = V.toList
toCSVChapter :: [[String]] -> Integer -> CSVChapter
toCSVChapter matrix index = CSVChapter index (toDic $ drop 1 $ head matrix) (map toFlashcard $ drop 1 matrix)
toFlashcard :: [String] -> CSVFlashcard
toFlashcard list = CSVFlashcard (read $ head list) (toDic $ drop 1 list)
toDic :: [String] -> M.Map Language String
toDic list = M.fromList $ ["en","ar","fr","de","ru","es"] `zip` list
csvToChapters :: V.Vector [String] -> [CSVChapter]
csvToChapters file =
let csv = csvVectorToMatrix file
schapters = drop 1 $ splitWhen ((== "") . head) csv
in zipWith toCSVChapter schapters [1 ..]
-- exported
toCSVChapters :: BL.ByteString -> Either String [CSVChapter]
toCSVChapters csvData = csvToChapters <$> decode NoHeader csvData
-- Descriptions
data CSVLanguage = Arabic | English | Spanish | German | French | Russian deriving (Read, Show, Eq, Ord, Enum)
data CSVCourseMeta = CSVCourseMeta {
csvCourseMetaLanguage :: CSVLanguage
, csvCourseMetaId1 :: Integer
, csvCourseMetaId2 :: Integer
} deriving Show
type DicOf a = M.Map Language a
data CSVCourseIntro = CSVCourseIntro {
csvCourseTitle1 :: Dic
, csvCourseTitle2 :: Dic
, csvCourseIntroTitle :: Dic
, csvCourseIntroFC1 :: CSVCourseIntroFlashcard String
, csvCourseIntroFC2 :: CSVCourseIntroFlashcard [String]
, csvCourseMeta :: CSVCourseMeta
} deriving Show
data CSVCourseIntroFlashcard a = CSVCourseIntroFlashcard {
csvCiFcQuestion :: Dic
, csvCiFcShortAns :: Dic
, csvCiFcLongAns :: DicOf a
} deriving Show
emptyCsvCourseIntroFlashcard = CSVCourseIntroFlashcard {
csvCiFcQuestion = M.empty
, csvCiFcShortAns = M.empty
, csvCiFcLongAns = M.empty
}
toCSVCourseMeta :: String -> Either String CSVCourseMeta
toCSVCourseMeta col = do
(id1, id2, lang) <- readEither col :: Either String (Integer, Integer, CSVLanguage)
return CSVCourseMeta {
csvCourseMetaLanguage = lang
, csvCourseMetaId1 = id1
, csvCourseMetaId2 = id2
}
csvVectorToCourseIntro :: [String] -> CSVCourseIntro -> V.Vector [String] -> CSVCourseIntro
csvVectorToCourseIntro = go where
fieldMap v = M.fromList [
("1-title", \ val -> v {csvCourseTitle1 = val} ) -- courseTitle
, ("2-title", \ val -> v {csvCourseTitle2 = val} )
, ("intro_title", \ val -> v {csvCourseIntroTitle = val})
, ("intro_fc-1_q", \ val -> updateFC1 $ \ fc -> fc {csvCiFcQuestion = val})
, ("intro_fc-1_a_short", \ val -> updateFC1 $ \ fc -> fc {csvCiFcShortAns = val})
, ("intro_fc-1_a_long", \ val -> updateFC1 $ \ fc -> fc {csvCiFcLongAns = val})
, ("intro_fc-2_q", \ val -> updateFC2 $ \ fc -> fc {csvCiFcQuestion = val})
, ("intro_fc-2_a_short", \ val -> updateFC2 $ \ fc -> fc {csvCiFcShortAns = val})
, ("intro_fc-2_a_long", \ val -> updateFC2 $ \ fc -> fc {csvCiFcLongAns = M.map (map (T.unpack . T.strip . T.pack) . filter ((>1) . length) . splitOn "*") val })
] where
updateFC1 f =
let fc = csvCourseIntroFC1 v
fc' = f fc
in
v { csvCourseIntroFC1 = fc' }
updateFC2 f =
let fc = csvCourseIntroFC2 v
fc' = f fc
in
v { csvCourseIntroFC2 = fc' }
go langs = V.foldl $ \ intro (xs :: [String]) -> do
let f = M.lookup (xs !! 1) (fieldMap intro)
maybe intro ($ M.fromList $ langs `zip` drop 2 xs) f
-- exported
toCSVCourseIntro :: BL.ByteString -> Either String CSVCourseIntro
toCSVCourseIntro csvData = do
vs <- V.drop 1 <$> decode NoHeader csvData
meta <- toCSVCourseMeta $ V.head vs !! 1 -- first cell is a tuple (curseId, courseId, lang :: CSVLanguage)
let langs = drop 2 (V.head vs) -- list of langs defined in the CSV
let emptyCsvCourseIntro = CSVCourseIntro {
csvCourseTitle1 = M.empty
, csvCourseTitle2 = M.empty
, csvCourseIntroTitle = M.empty
, csvCourseIntroFC1 = emptyCsvCourseIntroFlashcard
, csvCourseIntroFC2 = emptyCsvCourseIntroFlashcard
, csvCourseMeta = meta
}
return $ csvVectorToCourseIntro langs emptyCsvCourseIntro vs
|
homam/babelbay-ma-parseit
|
src/CSVParsers.hs
|
bsd-3-clause
| 4,967 | 0 | 23 | 1,035 | 1,506 | 860 | 646 | 114 | 1 |
{-# LANGUAGE TemplateHaskell #-}
-- | Code snippets used by the JS backends.
module Futhark.CodeGen.RTS.JavaScript
( serverJs,
valuesJs,
wrapperclassesJs,
)
where
import Data.FileEmbed
import qualified Data.Text as T
-- | @rts/javascript/server.js@
serverJs :: T.Text
serverJs = $(embedStringFile "rts/javascript/server.js")
-- | @rts/javascript/values.js@
valuesJs :: T.Text
valuesJs = $(embedStringFile "rts/javascript/values.js")
-- | @rts/javascript/wrapperclasses.js@
wrapperclassesJs :: T.Text
wrapperclassesJs = $(embedStringFile "rts/javascript/wrapperclasses.js")
|
HIPERFIT/futhark
|
src/Futhark/CodeGen/RTS/JavaScript.hs
|
isc
| 590 | 0 | 7 | 75 | 95 | 58 | 37 | 13 | 1 |
{-# LANGUAGE NPlusKPatterns #-}
module Chapter2.Section2.Tut9 where
import Prelude
import Data.List(genericLength)
import Data.Char(isDigit)
import Unsafe.Coerce()
import Test.QuickCheck()
--import Test.QuickCheck(arbitrary)
--import Test.QuickCheck.Property ((==>))
--import MonadUtils ((<*>), (<$>))
--import Test.QuickCheck.Arbitrary (Arbitrary)
data Nat = Zero
| Succ Nat
deriving Show
{- Ex.0 - Select all possible total and terminating implementations of a
conversion function natToInteger :: Nat -> Integer that converts
any non-bottom, non-partial, finite natural number (note: 0 is a
natural number according to this definition), into the corresponding
Integer value.
Test with:
natToInteger Zero ==> 0
natToInteger (Succ Zero) ==> 1
natToInteger (Succ (Succ Zero)) ==> 2
etc
-}
natToInteger :: Nat -> Integer
{- RIGHT --}
natToInteger Zero = 0
natToInteger (Succ n) = natToInteger n + 1
{--}
{- RIGHT
natToInteger (Succ n) = natToInteger n + 1
natToInteger Zero = 0
-}
{- WRONG
natToInteger n = natToInteger n
-}
{- RIGHT
natToInteger (Succ n) = 1 + natToInteger n
natToInteger Zero = 0
-}
{- WRONG - converts all to 1
natToInteger Zero = 1
natToInteger (Succ n) = (1 + natToInteger n) - 1
-}
{- RIGHT
natToInteger = head . m
where m Zero = [0]
m (Succ n) = [sum [x | x <- (1 : m n)]]
-}
{- RIGHT
--natToInteger :: Nat -> Integer
natToInteger = \ n -> genericLength [c | c <- show n, c == 'S']
-}
-- WRONG
-- Warning: Does not type-check with Integer, but does with Int
-- https://www.haskell.org/hoogle/?hoogle=length
--natToInteger :: Nat -> Integer
--natToInteger :: Nat -> Int
--natToInteger = \ n -> length [c | c <- show n, c == 'S']
{- Ex.1 - Select all possible total and terminating implementations of
a conversion function integerToNat :: Integer -> Nat that
converts any non-bottom, non-partial, finite Integer value >= 0,
into the corresponding Nat value.
Note: make sure to enable n+k-patterns, if you don't know yet
how to do that, Google is your friend.
Insert above Module {-# LANGUAGE NPlusKPatterns #-}
Test with:
-}
integerToNat :: Integer -> Nat
{- RIGHT - non exhaustive pattern matches -}
integerToNat 0 = Zero
integerToNat (n+1) = Succ (integerToNat n)
{- -}
{-
integerToNat 0 = Zero
integerToNat _ = Succ (Succ Zero)
integerToNat (n+1) = Succ (integerToNat n)
-}
{- WRONG - case of 0 should be Zero (to return 0) instead of Succ Zero
integerToNat 0 = Succ Zero
integerToNat n = (Succ (integerToNat n))
-}
--http://www.cse.chalmers.se/~rjmh/QuickCheck/manual.html
{-
integerToNat = n
where n 0 = Succ Zero
n > 0 = (Succ (integerToNat n))
n < 0 = error "positive n only"
_ < 0 = error "you've forgotten to include a pattern match"
-}
--integerToNat n = if n < 0 then error "positive n only" else (Succ (integerToNat n))
-- filter out invalid data before using using ==> implication function
-- if negative integers exist for n, then the first integer n to consider
-- should be 0
-- check if each non-negative integer n is equal to 0
-- but integerToNat should have type Integer -> Nat
-- (so this should not even typecheck, unless we made Nat and instance of Num)
-- i.e. integerToNat 1 == Succ Zero (is not the same as 0 == Zero)
-- To use QuickCheck with Nats, must first make them instances of Arbitrary
-- https://stackoverflow.com/questions/5134259/quickcheck-defining-arbitrary-instance-in-terms-of-other-arbitraries
-- alternatively auto-derive the instances (with TemplateHaskell and auto-deriving)
-- https://stackoverflow.com/questions/16440208/how-to-generate-arbitrary-instances-of-a-simple-type-for-quickcheck
-- Test with: quickCheck (\n -> natToInteger (integerToNat n) == n)
{-
data C = C
instance Arbitrary C
where arbitrary = do a <- 1
b <- 2
return (C a b)
-}
--prop_integerToNat' n = not (n < 0) ==> 0 == n
{- WRONG
integerToNat n
= product [(unsafeCoerce c) :: Integer | c <- show n]
-}
{- WRONG
integerToNat n = integerToNat n
-}
{-
*Chapter2.Section2.Tut9> import Test.QuickCheck
*Chapter2.Section2.Tut9 Test.QuickCheck> quickCheck (\n -> natToInteger (integerToNat n) == n)
(0 tests)
--integerToNat n = integerToNat n
integerToNat n = if n < 0 then error "positive n only" else integerToNat n
-}
{- RIGHT - non exhaustive pattern matches
integerToNat (n+1) = Succ (integerToNat n)
integerToNat 0 = Zero
-}
{- RIGHT - non exhaustive pattern matches
integerToNat (n+1) = let m = integerToNat n in Succ m
integerToNat 0 = Zero
-}
{- WRONG - returns not in scope
integerToNat = head . m
where {
; m 0 = [0]
; m (n + 1) = [sum [x | x <- (1: m n)]]
}
-}
{- WRONG - no instances of Num Nat arising from use of genericLength
--integerToNat :: Integer -> Nat
integerToNat = \ n -> genericLength [c | c <- show n, isDigit c]
-}
{- Ex.2 -
Select all possible total and terminating implementations of an
addition function add :: Nat -> Nat -> Nat that adds two non-bottom,
non-partial, finite natural numbers m and n, such that
natToInteger (add m n) = natToInteger m + natToInteger n.
Test with:
natToInteger(add(Zero) (Zero)) ==> 0
natToInteger(add(Zero) (Succ Zero)) ==> 1
natToInteger(add(Succ Zero) (Zero)) ==> 1
natToInteger(add(Succ (Succ Zero)) (Zero)) ==> 2
natToInteger(add(Zero) (Succ (Succ Zero))) ==> 2
natToInteger(add(Succ (Succ Zero)) (Succ (Succ Zero))) ==> 4
-}
add :: Nat -> Nat -> Nat
{- RIGHT
add Zero n = n
add (Succ m) n = Succ (add n m)
-}
{- RIGHT
add (Succ m) n = Succ (add n m)
add Zero n = n
-}
{- WRONG
add Zero n = Zero
add (Succ m) n = Succ (add m n)
-}
{- WRONG
add (Succ m) n = Succ (add m n)
add Zero n = Zero
-}
{- WRONG
add n Zero = Zero
add n (Succ m) = Succ (add n m)
-}
{- WRONG
add n (Succ m) = Succ (add n m)
add n Zero = Zero
-}
{- RIGHT
add n Zero = n
add n (Succ m) = Succ (add m n)
-}
{- RIGHT -}
add n (Succ m) = Succ (add m n)
add n Zero = n
{- Ex.2 -
Using recursion, and any correct implementation of the function add
from the previous exercise, select from the following options, a total
and terminating multiplication function mult :: Nat -> Nat -> Nat that
multiplies two non-bottom, non-partial, finite natural numbers m and n,
such that natToInteger (mult m n) = natToInteger m * natToInteger n.
Test with:
natToInteger(mult(Zero) (Zero)) ==> 0 * 0 = 0
natToInteger(mult(Zero) (Succ Zero)) ==> 0 * 1 =0
natToInteger(mult(Succ Zero) (Zero)) ==> 1 * 0 = 0
natToInteger(mult(Succ (Succ Zero)) (Zero)) ==> 2 * 0 = 0
natToInteger(mult(Zero) (Succ (Succ Zero))) ==> 0 * 2 = 0
natToInteger(mult(Succ (Succ Zero)) (Succ (Succ Zero))) ==> 2 * 2 = 4
natToInteger(mult(Succ (Succ (Succ Zero))) (Succ (Succ Zero))) ==> 3 * 2 = 6
-}
mult :: Nat -> Nat -> Nat
{- WRONG
mult Zero Zero = Zero
mult m (Succ n) = add m (mult m n)
-}
{- RIGHT -}
mult m Zero = Zero
mult m (Succ n) = add m (mult m n)
{--}
{- WRONG
mult m Zero = Zero
mult m (Succ n) = add n (mult m n)
-}
{- WRONG
mult m Zero = Zero
mult m n = add m (mult m (Succ n))
-}
{- Ex.3 -
The standard library defines the following algebraic data type to represent
the possible comparisons between two values.
together with a function:
compare :: (Ord a) => a -> a -> Ordering
that decides if a value
x :: Ord a => a -- is less than (LT), equal to (EQ), or greater than (GT) another value
y :: Ord a => a
Given the following data type for trees with Integers at the leafs and inside
the nodes:
data Tree = Leaf Integer
| Node Tree Integer Tree
Select all correct implementations of the function
occurs :: Integer -> Tree -> Bool
that decides whether the given Integer occurs in the given Tree. The Tree parameter
is a finite, non-partial, non-bottom binary search tree.
Note: If you haven't encountered case expressions before, Google is your friend.
Example Binary Tree
t :: Tree
t = Node (Node (Leaf 1) 3 (Leaf 4)) 5 (Node (Leaf 6) 7 (Leaf 9))
5
/ \
3 7
/ \ / \
1 4 6 9
Bad Example:
Tree 4 3 5
-- Invalid binary search tree since 3 should be to the left of 4
testtree = Node (Leaf 4) 3 (Leaf 5)
WRONG (not a binary search tree)
t = Node (Node (Node (Leaf 1) 50 (Leaf 2)) 80 (Node (Leaf 3) 51 (Leaf 4))) 100 (Node (Node (Leaf 5) 60 (Leaf 6)) 90 (Node (Leaf 7) 61 (Leaf 8)))
Test with:
occurs 3 (Node (Leaf 3) 4 (Leaf 5))
-}
{-
data Ordering = LT
| EQ
| GT
compare :: (Ord a) => a -> a -> Ordering
-}
{-
data Tree = Leaf Integer
| Node Tree Integer Tree
occurs :: Integer -> Tree -> Bool
-}
{- RIGHT
occurs m (Leaf n) = m == n
occurs m (Node l n r)
= case compare m n of
LT -> occurs m l
EQ -> True
GT -> occurs m r
-}
{- WRONG
occurs m (Leaf n) = m == n
occurs m (Node l n r)
= case compare m n of
LT -> occurs m r
EQ -> True
GT -> occurs m l
-}
{- WRONG
occurs m (Leaf n) = compare m n
occurs m (Node l n r)
= case compare m n of
LT -> occurs m l
EQ -> True
GT -> occurs m r
-}
{- WRONG
occurs m (Leaf n) = m == n
occurs m (Node l n r)
= case compare m n of
LT -> occurs m l
EQ -> False
GT -> occurs m r
-}
{- RIGHT
occurs m (Leaf n) = m == n
occurs m (Node l n r)
| m == n = True
| m < n = occurs m l
| otherwise = occurs m r
-}
{- WRONG (does not find all leaves
occurs m (Leaf n) = m == n
occurs m (Node l n r)
| m == n = True
| m > n = occurs m l
| otherwise = occurs m r
-}
{- WRONG
occurs m n = m == n
occurs m (Node l n r)
| m == n = True
| m < n = occurs m l
| otherwise = occurs m r
-}
{- WRONG
occurs m n = m == n
occurs m (Node l n r)
| m == n = False
| m < n = occurs m r
| otherwise = occurs m l
-}
{- Ex.5 -
Consider the following type of binary trees, with only values at the leafs:
data Tree = Leaf Integer
| Node Tree Tree
We say that a tree is balanced if the number of leaves in the left and right
subtree of every node differs by at most one, with leaves themselves being
trivially balanced. Which option correctly implements
balanced :: Tree -> Bool
that decides if a finite, non-partial, non-bottom binary tree is balanced or not?
Test with:
leaves (Node (Leaf 3) (Leaf 7)) ==> 10
balanced (Node (Leaf 3) (Leaf 5))
balanced (Node (Leaf 3) (Leaf 6))
-}
data Tree = Leaf Integer
| Node Tree Tree deriving Show
balanced :: Tree -> Bool
{-
leaves (Leaf x) = x
leaves (Node l r) = leaves l + leaves r
balanced (Leaf _) = True
balanced (Node l r)
= abs (leaves l - leaves r) <= 1 || balanced l || balanced r
-}
{-
leaves (Leaf _) = True
leaves (Node l r) = leaves l + leaves r
balanced (Leaf _) = True
balanced (Node l r)
= abs (leaves l - leaves r) <= 1
-}
{- WRONG
leaves (Leaf _) = True
leaves (Node l r) = leaves l + leaves r
balanced (Leaf _) = True
balanced (Node l r)
= abs (leaves l + leaves r) <= 1
-}
{- RIGHT -}
leaves (Leaf _) = 1
leaves (Node l r) = leaves l + leaves r
balanced (Leaf _) = True
balanced (Node l r)
= abs (leaves l - leaves r) <= 1 && balanced l && balanced r
{- Ex.6 -
Given the definition of binary trees from the previous exercise,
define a function:
balance :: [Integer] -> Tree
that converts a finite, non-empty, non-partial,
non-bottom list of non-bottom integers into a balanced tree.
Note:
use
data Tree = Leaf Integer | Node Tree Tree deriving Show
where deriving Show makes Tree an instance of Show which
let's ghci pretty print expressions.
Test with:
balance [1,2] ==> Node (Leaf 1) (Leaf 2)
-}
balance :: [Integer] -> Tree
{- RIGHT -}
halve xs = splitAt (length xs `div` 2) xs
balance [x] = Leaf x
balance xs = Node (balance ys) (balance zs)
where (ys, zs) = halve xs
{- WRONG
halve xs = splitAt (length xs / 2) xs
balance [x] = Leaf x
balance xs = Node (balance ys) (balance zs)
where (ys, zs) = halve xs
-}
{- WRONG
-- Couldn't match expected type ‘[Integer]’
with actual type ‘([Integer], [Integer])’
halve xs = splitAt (length xs `div` 2) xs
balance [x] = Leaf x
balance xs = Node ys zs
where (ys, zs) = balance (halve xs)
-}
{- WRONG
halve xs = splitAt (length xs `div` 2) xs
balance x = Leaf x
balance xs = Node (balance ys) (balance zs)
where (ys, zs) = halve xs
-}
{- Ex.9 -
Given the algebraic data type
data Maybe a = Nothing | Just a
, pick the correct instance declaration that shows that the type constructor
"Maybe" is a Monad. Assume that all values of type Maybe a are finite, non-partial,
and non-bottom. You don't have to prove that the Monad laws hold, but use
your common sense when picking the right answer.
-}
{- RIGHT
instance Monad Maybe where
return x = Just x
Nothing >>= _ = Nothing
(Just x) >>= f = f x
-}
{- Ex.10 -
Given the list type from the standard Prelude, pick the correct instance
declaration that shows that the type constructor [] is a Monad. Assume that
all values of type [a] are finite, non-partial, and non-bottom. You don't
have to prove that the Monad laws hold, but use your common sense when
picking the right answer.
Note: :t [] ==> [] :: [t]
:t concat ==> concat :: [[a]] -> [a]
:t map ==> map :: (a -> b) -> [a] -> [b]
-}
{- Ex.11 -
A monoid is an algebraic structure over a type "a" with a single associative
binary operation
(<>) :: Monoid a => a -> a -> a
and a neutral element, assuming bottoms don't exist.
mempty :: Monoid a => a
class Monoid a where
mempty :: a
(<>) :: a -> a -> a
Note: this class declaration does not enforce the fact that (<>) is
associative, and neither that mempty is the neutral element.
Complete the following instance declaration, assuming values of type a are
non-bottom. You don't need to prove that (<>) is associative and that mempty
is a neutral element, but use your common sense when picking the correct
implementation.
(<>) is "mappend"
-}
{- RIGHT
instance Monoid [a] where
mempty = []
(<>) = (++)
-}
{- Ex.12 -
A functor is a type constructor with an operation
fmap :: Functor f => (a -> b) -> f a -> f b
such that, ignoring the existence of bottoms.
(fmap f) . (fmap g) = fmap (f . g)
fmap id = id
class Functor f where
fmap :: (a -> b) -> f a -> f b
Note: this class declaration does not enforce the fact that fmap satisfies the
two laws mentioned above. A value of type Functor f => f a can be considered as a
"collection" with elements of type "a" and shape "f" (for instance "f" can be [] or Maybe).
Complete the following instance declaration, assuming bottoms don't exist. You don't
need to prove that the two laws hold, but use your common sense when picking the
correct implementation.
See http://en.wikibooks.org/wiki/Haskell/The_Functor_class
-}
{- RIGHT
instance Functor Maybe where
fmap _ Nothing = Nothing
fmap f (Just a) = Just (f a)
-}
{- Ex.13 -
Given a type constructor "f" such that "Functor f", and an element type "a" such that "Monoid a",
we can define a
function fold :: (Foldable f, Monoid a) => f a -> a
that folds the values in the argument "collection" using the monoids neutral element "mempty"
and the operation <>.
Assuming that the "collection" is finite, non-partial, and non-bottom, it doesn't matter
from which direction the fold operates because the <> operator is "associative". When the
"collection" is empty the result of folding is the neutral element "mempty".
The module "Data.Foldable" defines the following type class for folding functors with monoid
elements:
class (Functor f) => Foldable f where
fold :: (Monoid m) => f m -> m
Assuming bottom does not exist, complete the following instance declaration for Foldable []:
-}
{- RIGHT
instance Foldable [] where
fold = foldr (<>) mempty
-}
|
ltfschoen/HelloHaskell
|
src/Chapter2/Section2/Tut9.hs
|
mit
| 17,359 | 1 | 11 | 5,223 | 562 | 336 | 226 | 35 | 1 |
module Sproxy.Application.State
( decode
, encode
) where
import Data.ByteString (ByteString)
import qualified Data.ByteString.Base64 as Base64
import Data.ByteString.Lazy (fromStrict, toStrict)
import Data.Digest.Pure.SHA (bytestringDigest, hmacSha1)
import qualified Data.Serialize as DS
-- FIXME: Compress / decompress ?
encode :: ByteString -> ByteString -> ByteString
encode key payload = Base64.encode . DS.encode $ (payload, digest key payload)
decode :: ByteString -> ByteString -> Either String ByteString
decode key d = do
(payload, dgst) <- DS.decode =<< Base64.decode d
if dgst /= digest key payload
then Left "junk"
else Right payload
digest :: ByteString -> ByteString -> ByteString
digest key payload =
toStrict . bytestringDigest $ hmacSha1 (fromStrict key) (fromStrict payload)
|
ip1981/sproxy2
|
src/Sproxy/Application/State.hs
|
mit
| 819 | 0 | 10 | 131 | 244 | 135 | 109 | 19 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
import Prelude hiding (filter)
import System.Environment (getArgs)
import Vision.Detector.Edge (canny)
import Vision.Image
import Vision.Image.Storage.DevIL (Autodetect (..), load, save)
-- Detects the edge of the image with the Canny's edge detector.
--
-- usage: ./canny input.png output.png
main :: IO ()
main = do
[input, output] <- getArgs
-- Loads the image. Automatically infers the format.
io <- load Autodetect input
case io of
Left err -> do
putStrLn "Unable to load the image:"
print err
Right (grey :: Grey) -> do
let blurred, edges :: Grey
-- Applies a Gaussian filter with a 3x3 Double kernel to remove
-- small noises.
blurred = gaussianBlur 1 (Nothing :: Maybe Double) grey
-- Applies the Canny's algorithm with a 5x5 Sobel kernel (radius
-- = 2).
edges = canny 2 256 1024 blurred
-- Saves the edges image. Automatically infers the output format.
mErr <- save Autodetect output edges
case mErr of
Nothing ->
putStrLn "Success."
Just err -> do
putStrLn "Unable to save the image:"
print err
|
RaphaelJ/friday-examples
|
src/Canny.hs
|
lgpl-3.0
| 1,354 | 0 | 17 | 481 | 249 | 131 | 118 | 25 | 3 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExistentialQuantification #-}
#ifdef USE_REFLEX_OPTIMIZER
{-# OPTIONS_GHC -fplugin=Reflex.Optimizer #-}
#endif
-- | This module defines the 'WeakBag' type, which represents a mutable
-- collection of items that does not cause the items to be retained in memory.
-- This is useful for situations where a value needs to be inspected or modified
-- if it is still alive, but can be ignored if it is dead.
module Data.WeakBag
( WeakBag
, WeakBagTicket
, empty
, singleton
, insert
, traverse
, traverse_
, remove
-- * Internal functions
-- These will not always be available.
, _weakBag_children --TODO: Don't export this
) where
import Prelude hiding (traverse)
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Data.IntMap.Strict (IntMap)
import qualified Data.IntMap.Strict as IntMap
import Data.IORef
import System.Mem.Weak
-- | A 'WeakBag' holds a set of values of type @/a/@, but does not retain them -
-- that is, they can still be garbage-collected. As long as the @/a/@ values remain
-- alive, the 'WeakBag' will continue to refer to them.
data WeakBag a = WeakBag
{ _weakBag_nextId :: {-# UNPACK #-} !(IORef Int) --TODO: what if this wraps around?
, _weakBag_children :: {-# UNPACK #-} !(IORef (IntMap (Weak a))) -- ^ The items referenced by the WeakBag
}
-- | When inserting an item into a 'WeakBag', a 'WeakBagTicket' is returned. If
-- the caller retains the ticket, the item is guranteed to stay in memory (and
-- thus in the 'WeakBag'). The ticket can also be used to remove the item from
-- the 'WeakBag' prematurely (i.e. while it is still alive), using 'remove'.
data WeakBagTicket = forall a. WeakBagTicket
{ _weakBagTicket_weakItem :: {-# UNPACK #-} !(Weak a)
, _weakBagTicket_item :: {-# NOUNPACK #-} !a
}
-- | Insert an item into a 'WeakBag'.
{-# INLINE insert #-}
insert :: a -- ^ The item
-> WeakBag a -- ^ The 'WeakBag' to insert into
-> IORef (Weak b) -- ^ An arbitrary value to be used in the following
-- callback
-> (b -> IO ()) -- ^ A callback to be invoked when the item is removed
-- (whether automatically by the item being garbage
-- collected or manually via 'remove')
-> IO WeakBagTicket -- ^ Returns a 'WeakBagTicket' that ensures the item
-- is retained and allows the item to be removed.
insert a (WeakBag nextId children) wbRef finalizer = {-# SCC "insert" #-} do
a' <- evaluate a
wbRef' <- evaluate wbRef
myId <- atomicModifyIORef' nextId $ \n -> (succ n, n)
let cleanup = do
wb <- readIORef wbRef'
mb <- deRefWeak wb
forM_ mb $ \b -> do
csWithoutMe <- atomicModifyIORef children $ \cs ->
let !csWithoutMe = IntMap.delete myId cs
in (csWithoutMe, csWithoutMe)
when (IntMap.null csWithoutMe) $ finalizer b
wa <- mkWeakPtr a' $ Just cleanup
atomicModifyIORef' children $ \cs -> (IntMap.insert myId wa cs, ())
return $ WeakBagTicket
{ _weakBagTicket_weakItem = wa
, _weakBagTicket_item = a'
}
-- | Create an empty 'WeakBag'.
{-# INLINE empty #-}
empty :: IO (WeakBag a)
empty = {-# SCC "empty" #-} do
nextId <- newIORef 1
children <- newIORef IntMap.empty
let bag = WeakBag
{ _weakBag_nextId = nextId
, _weakBag_children = children
}
return bag
-- | Create a 'WeakBag' with one item; equivalent to creating the 'WeakBag' with
-- 'empty', then using 'insert'.
{-# INLINE singleton #-}
singleton :: a -> IORef (Weak b) -> (b -> IO ()) -> IO (WeakBag a, WeakBagTicket)
singleton a wbRef finalizer = {-# SCC "singleton" #-} do
bag <- empty
ticket <- insert a bag wbRef finalizer
return (bag, ticket)
{-# INLINE traverse_ #-}
-- | Visit every node in the given list. If new nodes are appended during the
-- traversal, they will not be visited. Every live node that was in the list
-- when the traversal began will be visited exactly once; however, no guarantee
-- is made about the order of the traversal.
traverse_ :: MonadIO m => WeakBag a -> (a -> m ()) -> m ()
traverse_ (WeakBag _ children) f = {-# SCC "traverse" #-} do
cs <- liftIO $ readIORef children
forM_ cs $ \c -> do
ma <- liftIO $ deRefWeak c
mapM_ f ma
{-# DEPRECATED traverse "Use 'traverse_' instead" #-}
traverse :: MonadIO m => WeakBag a -> (a -> m ()) -> m ()
traverse = traverse_
-- | Remove an item from the 'WeakBag'; does nothing if invoked multiple times
-- on the same 'WeakBagTicket'.
{-# INLINE remove #-}
remove :: WeakBagTicket -> IO ()
remove (WeakBagTicket w _) = {-# SCC "remove" #-} finalize w
--TODO: Should 'remove' also drop the reference to the item?
--TODO: can/should we provide a null WeakBagTicket?
|
ryantrinkle/reflex
|
src/Data/WeakBag.hs
|
bsd-3-clause
| 4,839 | 0 | 24 | 1,109 | 884 | 473 | 411 | 78 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Traduisons.Util where
import Control.Monad.Except
import Data.List
import Data.Maybe
import Data.Time.Clock.POSIX
import Network.HTTP.Client as N
import Network.HTTP.Types
import Control.Exception
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Debug.Trace as Trace
import Traduisons.Types
import Traduisons.Resources
trace :: Show a => String -> a -> a
trace s = join (Trace.trace . prefix s . show)
where prefix :: String -> String -> String
prefix "" a = a
prefix p a = p ++ ": " ++ a
trace' :: Show a => a -> a
trace' = trace ""
liftEither :: (Monad m, MonadError e (t e m)) => Either e a -> t e m a
liftEither = either throwError return
currentTime :: IO Seconds
currentTime = fmap round getPOSIXTime
mkReq :: String -> Either TraduisonsError Request
mkReq url = let err = Left $ TErr CurlError ("Failed to parse URL: " ++ show url)
in maybe err Right (parseUrl url)
curl :: (MonadIO m, Functor m) => URL -> StdMethod -> [Header] -> FormData
-> Manager -> ExceptT TraduisonsError m B.ByteString
curl url httpMethod hdrs formData man = do
req'' <- liftEither (mkReq url)
req' <- case httpMethod of
-- FIXME: Make a newtype for this
GET -> return $ setQueryString formData req''
POST -> let f (a, Just b) = (a, b)
f (a, Nothing) = (a, "")
in return $ urlEncodedBody (map f formData) req''
_ -> let err = "Curl doesn't know " ++ show httpMethod
in throwError $ TErr CurlError err
let ua = (hUserAgent, userAgent)
addHeaders h r = r { requestHeaders = ua:h ++ requestHeaders r }
req = addHeaders hdrs req'
-- WTF IS GOING ON HERE?
let trySomeException = try $ httpLbs req man
trySomeException :: IO (Either SomeException (Response BL.ByteString))
resp <- lift . liftIO $ trySomeException
return $ trace (show resp) ()
case resp of
Left err -> throwError $ TErr CurlError (show err)
Right body -> return . BL.toStrict . responseBody $ body
-- Remove the BOM from Unicode string
stripBOM :: String -> String
stripBOM s = fromMaybe s (stripPrefix "\65279" s)
|
johntyree/traduisons-hs
|
src/Traduisons/Util.hs
|
bsd-3-clause
| 2,257 | 0 | 17 | 549 | 772 | 399 | 373 | 51 | 5 |
{-# LANGUAGE TemplateHaskell #-}
{-# Language OverloadedStrings #-}
module BitcoinCore.BloomFilter
( Filter(..)
, Tweak(..)
, NFlags(..)
, probability
, maxFilterBytes
, numberHashFunctions
, filterSize
, pDefault
, blankFilter
, updateFilter
, hardcodedTweak
, unroll
, roll
, serializeFilter
, deserializeFilter
, filterLengthBytes
, FilterContext(..)
, tweak
, nHashFunctions
, defaultFilterWithElements
)where
import General.Util (roll, unroll, Endian(..))
import Data.Hash.Murmur (murmur3)
import Data.Word (Word32)
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.ByteString.Base16 (encode)
import Data.Bits (setBit)
import Control.Lens (makeLenses, (^.), over)
data Filter = Filter
{ _filterLengthBytes :: Int
, _filterValue :: Integer
} deriving (Eq)
data FilterContext = FilterContext
{ _tweak :: Tweak
, _nHashFunctions :: Int
} deriving (Eq, Show)
-- TODO: Include NFlags?
newtype Tweak = Tweak Int
deriving (Show, Eq)
makeLenses ''Filter
makeLenses ''FilterContext
filterLengthBits :: Filter -> Int
filterLengthBits f = 8 * (f^.filterLengthBytes)
instance Show Filter where
show f =
"Filter { filterLengthBytes = " ++ show (f^.filterLengthBytes)
++ " filterValue = " ++ show (f^.filterValue)
++ " hexEncoded = " ++ (show . encode . serializeFilter) f
++ " } "
newtype Probability = Probability Float
deriving (Show, Eq)
data NFlags
= BLOOM_UPDATE_NONE
| BLOOP_UPDATE_ALL
| BLOOM_UPDATE_P2PUBKEY_ONLY
deriving (Enum, Show, Eq, Bounded)
probability :: Float -> Probability
probability p =
if p >= 0 && p <= 1
then Probability p
else error $ "Can't construct a probability " ++ show p
-- For reference, see: https://github.com/bitcoin/bips/blob/master/bip-0037.mediawiki
maxFilterBytes :: Int
maxFilterBytes = 36000
maxHashFuncs :: Int
maxHashFuncs = 50
-- TODO: This should be random, not hardcoded
hardcodedTweak :: Tweak
hardcodedTweak = Tweak 0
pDefault :: Probability
pDefault = probability 0.0001
seed :: Int -> Tweak -> Word32
seed hashNum (Tweak tweak') = fromIntegral $ (hashNum * 0xFBA4C795) + tweak'
-- n: number of elements to be added to the set
-- p: probability of false positive. 1.0 is match everything, 0 is unachievable
-- returns the filter size in bytes
filterSize :: Int -> Probability -> Int
filterSize nElements (Probability p) =
min (floor (numerator / denominator)) maxFilterBytes
where
numerator = (-1) * fromIntegral nElements * log p
denominator = (log 2 ^ 2) * 8
-- s: filter size (Bytes)
numberHashFunctions :: Int -> Int -> Int
numberHashFunctions s nElements = min calculatedHashFunctions maxHashFuncs
where calculatedHashFunctions = floor $ (fromIntegral s * 8 * (log 2 :: Double)) / fromIntegral nElements
updateFilter :: FilterContext -> ByteString -> Filter -> Filter
updateFilter filterContext hashData fltr=
foldl (\fltr' updateFunc -> updateFunc fltr') fltr updateFuncs
where
updateFuncs = map
(updateFilterStep filterContext hashData)
[0..((filterContext^.nHashFunctions) - 1)]
updateFilterStep :: FilterContext -> ByteString -> Int -> Filter -> Filter
updateFilterStep filterContext hashData hashNum f =
over filterValue (`setBit` index) f
where
index = bloomHash hashNum (filterContext^.tweak) hashData (filterLengthBits f)
-- Performs a single hash and returns the hash value
bloomHash :: Int -> Tweak -> ByteString -> Int -> Int
bloomHash hashNum tweak hashData fLengthBits =
hash `mod` fLengthBits
where
hash = fromIntegral $ murmur3 seedValue hashData
seedValue = seed hashNum tweak
defaultFilterWithElements :: [ByteString] -> (Filter, FilterContext)
defaultFilterWithElements elements = (filter', context)
where
filter' = foldr (updateFilter context) blank elements
(blank, context) = blankFilter nElements pDefault
nElements = max 10 (length elements)
-- in case there are no elements to add to the filter
-- we still use nElements 10, so the filter isn't blank
blankFilter :: Int -> Probability -> (Filter, FilterContext)
blankFilter nElements p = (bloomFilter, context)
where bloomFilter = Filter
{ _filterLengthBytes = size
, _filterValue = 0}
context = FilterContext
{ _nHashFunctions = nHashFunctions'
, _tweak = tweak' }
size = filterSize nElements p
nHashFunctions' = numberHashFunctions size nElements
tweak' = hardcodedTweak
serializeFilter :: Filter -> ByteString
serializeFilter f = unroll LE (f^.filterValue) `B.append` paddingNullBytes
where filterBase = unroll LE (f^.filterValue)
paddingNullBytes = B.replicate ((f^.filterLengthBytes) - B.length filterBase) 0
deserializeFilter :: ByteString -> Filter
deserializeFilter bs = Filter
{_filterLengthBytes = fLength
, _filterValue = fValue
}
where fLength = B.length bs
fValue = roll LE bs
|
clample/lamdabtc
|
backend/src/BitcoinCore/BloomFilter.hs
|
bsd-3-clause
| 4,977 | 0 | 14 | 984 | 1,301 | 724 | 577 | 122 | 2 |
import Test.HUnit (Assertion, (@=?), runTestTT, Test(..), Counts(..))
import System.Exit (ExitCode(..), exitWith)
import Sieve (primesUpTo)
exitProperly :: IO Counts -> IO ()
exitProperly m = do
counts <- m
exitWith $ if failures counts /= 0 || errors counts /= 0 then ExitFailure 1 else ExitSuccess
testCase :: String -> Assertion -> Test
testCase label assertion = TestLabel label (TestCase assertion)
main :: IO ()
main = exitProperly $ runTestTT $ TestList
[ TestList sieveTests ]
sieveTests :: [Test]
sieveTests =
[ testCase "small primes" $ do
[2, 3, 5, 7] @=? primesUpTo 10
[2, 3, 5, 7, 11] @=? primesUpTo 11
, testCase "primes up to 1000" $
[ 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71
, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151
, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233
, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317
, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419
, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503
, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, 607
, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701
, 709, 719, 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, 811
, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877, 881, 883, 887, 907, 911
, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997
] @=? primesUpTo 1000
, testCase "first thousand primes" $
1000 @=? length (primesUpTo 7919)
, testCase "edge cases" $ do
[] @=? primesUpTo 1
[2] @=? primesUpTo 2
]
|
pminten/xhaskell
|
sieve/sieve_test.hs
|
mit
| 1,728 | 0 | 12 | 409 | 841 | 513 | 328 | 35 | 2 |
module Text.Peggy (
module Text.Peggy.Prim,
module Text.Peggy.SrcLoc,
module Text.Peggy.Syntax,
module Text.Peggy.Quote,
) where
import Text.Peggy.Prim
import Text.Peggy.SrcLoc
import Text.Peggy.Syntax
import Text.Peggy.Quote
|
tanakh/Peggy
|
Text/Peggy.hs
|
bsd-3-clause
| 237 | 0 | 5 | 32 | 61 | 42 | 19 | 9 | 0 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module Lazyfoo.Lesson17 (main) where
import Prelude hiding (foldl1)
import Control.Applicative
import Control.Monad
import Data.Foldable
import Data.Monoid
import Data.Maybe
import Foreign.C.Types
import Linear
import Linear.Affine
import SDL (($=))
import qualified SDL
import Paths_sdl2 (getDataFileName)
screenWidth, screenHeight :: CInt
(screenWidth, screenHeight) = (640, 480)
data Texture = Texture SDL.Texture (V2 CInt)
loadTexture :: SDL.Renderer -> FilePath -> IO Texture
loadTexture r filePath = do
surface <- getDataFileName filePath >>= SDL.loadBMP
size <- SDL.surfaceDimensions surface
format <- SDL.surfaceFormat surface
key <- SDL.mapRGB format (V3 0 maxBound maxBound)
SDL.colorKey surface $= Just key
t <- SDL.createTextureFromSurface r surface
SDL.freeSurface surface
return (Texture t size)
renderTexture :: SDL.Renderer -> Texture -> Point V2 CInt -> Maybe (SDL.Rectangle CInt) -> Maybe CDouble -> Maybe (Point V2 CInt) -> Maybe (V2 Bool) -> IO ()
renderTexture r (Texture t size) xy clip theta center flips =
let dstSize =
maybe size (\(SDL.Rectangle _ size') -> size') clip
in SDL.renderCopyEx r
t
clip
(Just (SDL.Rectangle xy dstSize))
(fromMaybe 0 theta)
center
(fromMaybe (pure False) flips)
data ButtonSprite = MouseOut | MouseOver | MouseDown | MouseUp
data Button = Button (Point V2 CInt) ButtonSprite
buttonSize :: V2 CInt
buttonWidth, buttonHeight :: CInt
buttonSize@(V2 buttonWidth buttonHeight) = V2 300 200
handleEvent :: Point V2 CInt -> SDL.EventPayload -> Button -> Button
handleEvent mousePos e (Button buttonPos _) =
let inside = foldl1 (&&) ((>=) <$> mousePos <*> buttonPos) &&
foldl1 (&&) ((<=) <$> mousePos <*> buttonPos .+^ buttonSize)
sprite
| inside = case e of
SDL.MouseButtonEvent e
| SDL.mouseButtonEventMotion e == SDL.Pressed -> MouseDown
| SDL.mouseButtonEventMotion e == SDL.Released -> MouseUp
| otherwise -> MouseOver
_ -> MouseOver
| otherwise = MouseOut
in Button buttonPos sprite
renderButton :: SDL.Renderer -> Texture -> Button -> IO ()
renderButton r spriteSheet (Button xy sprite) =
renderTexture r spriteSheet xy (Just spriteClipRect) Nothing Nothing Nothing
where
spriteClipRect =
let i = case sprite of
MouseOut -> 0
MouseOver -> 1
MouseDown -> 2
MouseUp -> 3
in SDL.Rectangle (P (V2 0 (i * 200))) (V2 300 200)
main :: IO ()
main = do
SDL.initialize [SDL.InitVideo]
SDL.HintRenderScaleQuality $= SDL.ScaleLinear
do renderQuality <- SDL.get SDL.HintRenderScaleQuality
when (renderQuality /= SDL.ScaleLinear) $
putStrLn "Warning: Linear texture filtering not enabled!"
window <-
SDL.createWindow
"SDL Tutorial"
SDL.defaultWindow {SDL.windowInitialSize = V2 screenWidth screenHeight}
SDL.showWindow window
renderer <-
SDL.createRenderer
window
(-1)
(SDL.RendererConfig
{ SDL.rendererType = SDL.AcceleratedVSyncRenderer
, SDL.rendererTargetTexture = False
})
SDL.renderDrawColor renderer $= V4 maxBound maxBound maxBound maxBound
buttonSpriteSheet <- loadTexture renderer "examples/lazyfoo/button.bmp"
let loop buttons = do
let collectEvents = do
e <- SDL.pollEvent
case e of
Nothing -> return []
Just e' -> (e' :) <$> collectEvents
events <- collectEvents
mousePos <- SDL.getMouseLocation
let (Any quit, Endo updateButton) =
foldMap (\case
SDL.QuitEvent -> (Any True, mempty)
e -> (mempty, Endo (handleEvent mousePos e))) $
map SDL.eventPayload events
SDL.renderDrawColor renderer $= V4 maxBound maxBound maxBound maxBound
SDL.renderClear renderer
let buttons' = map (\b -> updateButton b) buttons
for_ buttons' (renderButton renderer buttonSpriteSheet)
SDL.renderPresent renderer
unless quit (loop buttons')
loop (let newButton xy = Button xy MouseOut
in [ newButton (P (V2 0 0))
, newButton (P (V2 (screenWidth - buttonWidth) 0))
, newButton (P (V2 0 (screenHeight - buttonHeight)))
, newButton (P (V2 screenWidth screenHeight - buttonSize))
])
SDL.destroyRenderer renderer
SDL.destroyWindow window
SDL.quit
|
bj4rtmar/sdl2
|
examples/lazyfoo/Lesson17.hs
|
bsd-3-clause
| 4,774 | 0 | 24 | 1,331 | 1,481 | 730 | 751 | 117 | 4 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE CPP #-}
-- The intent is that this Prelude provide the API of
-- the base 4.11 Prelude in a way that is portable for
-- all base versions.
module Prelude
(
module Prelude.Compat
, Semigroup(..)
)
where
import Prelude.Compat
import Data.Semigroup (Semigroup(..)) -- includes (<>)
|
jgm/pandoc-citeproc
|
prelude/Prelude.hs
|
bsd-3-clause
| 365 | 0 | 6 | 62 | 45 | 32 | 13 | 9 | 0 |
#!/usr/bin/env stack
-- stack --install-ghc runghc --package turtle
-- Generate a subset of sinex files to commit into the source tree as test
-- data
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Data.Text.Read as T (decimal)
import Turtle
sinexDir :: T.Text
sinexDir = "/nas/gemd/geodesy_data/gnss/solutions/final/weekly/"
filterSinexFiles :: Integer -> Integer -> [Text] -> IO ()
filterSinexFiles start end files = mapM_ TIO.putStrLn (filter p files)
where
week :: Text -> Integer
week file = case T.decimal (T.drop (T.length sinexDir + 3) file) of
Right (n, _) -> n
Left _ -> -1
p file = let n = week file in (n >= start) && (n <= end)
main :: IO ()
main = getFiles >>= filterSinexFiles 15647 16165
where
getFiles = arguments >>= \case
[] -> T.lines . snd <$> shellStrict ("ls " <> sinexDir <> "*.SNX") empty
as -> return as
|
lbodor/geodesy-domain-model
|
src/test/resources/solutions/final/weekly/sinex-subset.hs
|
bsd-3-clause
| 1,029 | 0 | 15 | 261 | 300 | 161 | 139 | 20 | 2 |
import Distribution.Simple
import System.Process
main = defaultMainWithHooks
simpleUserHooks { preConf = \args confFlags -> do
system "./build-parser.sh"
preConf simpleUserHooks args confFlags }
|
thomasjm/IHaskell
|
ghc-parser/Setup.hs
|
mit
| 247 | 0 | 11 | 74 | 50 | 25 | 25 | 6 | 1 |
{-# LANGUAGE CPP, BangPatterns #-}
module Main where
import Control.Monad
import Data.Int
import Network.Socket
( AddrInfo, AddrInfoFlag (AI_PASSIVE), HostName, ServiceName, Socket
, SocketType (Stream), SocketOption (ReuseAddr)
, accept, addrAddress, addrFlags, addrFamily, bindSocket, defaultProtocol
, defaultHints
, getAddrInfo, listen, setSocketOption, socket, sClose, withSocketsDo )
import System.Environment (getArgs, withArgs)
import Data.Time (getCurrentTime, diffUTCTime, NominalDiffTime)
import System.IO (withFile, IOMode(..), hPutStrLn, Handle, stderr)
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar (MVar, newEmptyMVar, takeMVar, putMVar)
import qualified Network.Socket as N
import Debug.Trace
import Data.ByteString (ByteString)
import Data.ByteString.Char8 (pack, unpack)
import qualified Data.ByteString as BS
import qualified Network.Socket.ByteString as NBS
import Data.Time (getCurrentTime, diffUTCTime, NominalDiffTime)
import Data.ByteString.Internal as BSI
import Foreign.Storable (pokeByteOff, peekByteOff)
import Foreign.C (CInt(..))
import Foreign.ForeignPtr (withForeignPtr)
import Control.Concurrent.Chan (Chan, newChan, readChan, writeChan)
foreign import ccall unsafe "htonl" htonl :: CInt -> CInt
foreign import ccall unsafe "ntohl" ntohl :: CInt -> CInt
passive :: Maybe AddrInfo
passive = Just (defaultHints { addrFlags = [AI_PASSIVE] })
main = do
[pingsStr] <- getArgs
serverReady <- newEmptyMVar
clientDone <- newEmptyMVar
-- Start the server
forkIO $ do
-- Initialize the server
serverAddr:_ <- getAddrInfo passive Nothing (Just "8080")
sock <- socket (addrFamily serverAddr) Stream defaultProtocol
setSocketOption sock ReuseAddr 1
bindSocket sock (addrAddress serverAddr)
listen sock 1
-- Set up multiplexing channel
multiplexMVar <- newEmptyMVar
-- Wait for incoming connections (pings from the client)
putMVar serverReady ()
(clientSock, pingAddr) <- accept sock
forkIO $ socketToMVar clientSock multiplexMVar
-- Reply to the client
forever $ takeMVar multiplexMVar >>= send clientSock
-- Start the client
forkIO $ do
takeMVar serverReady
serverAddr:_ <- getAddrInfo Nothing (Just "127.0.0.1") (Just "8080")
clientSock <- socket (addrFamily serverAddr) Stream defaultProtocol
N.connect clientSock (addrAddress serverAddr)
ping clientSock (read pingsStr)
putMVar clientDone ()
-- Wait for the client to finish
takeMVar clientDone
socketToMVar :: Socket -> MVar ByteString -> IO ()
socketToMVar sock mvar = go
where
go = do bs <- recv sock
when (BS.length bs > 0) $ do
putMVar mvar bs
go
pingMessage :: ByteString
pingMessage = pack "ping123"
ping :: Socket -> Int -> IO ()
ping sock pings = go pings
where
go :: Int -> IO ()
go 0 = do
putStrLn $ "client did " ++ show pings ++ " pings"
go !i = do
before <- getCurrentTime
send sock pingMessage
bs <- recv sock
after <- getCurrentTime
-- putStrLn $ "client received " ++ unpack bs
let latency = (1e6 :: Double) * realToFrac (diffUTCTime after before)
hPutStrLn stderr $ show i ++ " " ++ show latency
go (i - 1)
-- | Receive a package
recv :: Socket -> IO ByteString
recv sock = do
header <- NBS.recv sock 4
length <- decodeLength header
NBS.recv sock (fromIntegral (length :: Int32))
-- | Send a package
send :: Socket -> ByteString -> IO ()
send sock bs = do
length <- encodeLength (fromIntegral (BS.length bs))
NBS.sendMany sock [length, bs]
-- | Encode length (manual for now)
encodeLength :: Int32 -> IO ByteString
encodeLength i32 =
BSI.create 4 $ \p ->
pokeByteOff p 0 (htonl (fromIntegral i32))
-- | Decode length (manual for now)
decodeLength :: ByteString -> IO Int32
decodeLength bs =
let (fp, _, _) = BSI.toForeignPtr bs in
withForeignPtr fp $ \p -> do
w32 <- peekByteOff p 0
return (fromIntegral (ntohl w32))
|
aycanirican/network-transport-tcp
|
benchmarks/JustPingThroughMVar.hs
|
bsd-3-clause
| 4,012 | 0 | 15 | 819 | 1,225 | 638 | 587 | 94 | 2 |
import Numeric.Natural
import Control.Exception (evaluate)
newtype Mod a = Mod a deriving (Show)
instance Integral a => Num (Mod a) where
Mod a * Mod b = Mod (a * b `mod` 10000000019)
fromInteger n = Mod (fromInteger n `mod` 10000000019)
main :: IO ()
main = do
-- Should not allocate more compared to Integer
-- _ <- evaluate $ product $ map Mod [(1 :: Integer) .. 1000000]
_ <- evaluate $ product $ map Mod [(1 :: Natural) .. 1000000]
return ()
|
sdiehl/ghc
|
libraries/base/tests/T17499.hs
|
bsd-3-clause
| 471 | 0 | 11 | 109 | 171 | 88 | 83 | 10 | 1 |
----------------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.BluetileCommands
-- Copyright : (c) Jan Vornberger 2009
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : [email protected]
-- Stability : unstable
-- Portability : not portable
--
-- This is a list of selected commands that can be made available using
-- "XMonad.Hooks.ServerMode" to allow external programs to control
-- the window manager. Bluetile (<http://projects.haskell.org/bluetile/>)
-- uses this to enable its dock application to do things like changing
-- workspaces and layouts.
--
-----------------------------------------------------------------------------
module XMonad.Actions.BluetileCommands (
-- * Usage
-- $usage
bluetileCommands
) where
import XMonad
import qualified XMonad.StackSet as W
import XMonad.Layout.LayoutCombinators
import System.Exit
-- $usage
--
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Hooks.ServerMode
-- > import XMonad.Actions.BluetileCommands
--
-- Then edit your @handleEventHook@:
--
-- > main = xmonad defaultConfig { handleEventHook = serverModeEventHook' bluetileCommands }
--
-- See the documentation of "XMonad.Hooks.ServerMode" for details on
-- how to actually invoke the commands from external programs.
workspaceCommands :: Int -> X [(String, X ())]
workspaceCommands sid = asks (workspaces . config) >>= \spaces -> return
[(("greedyView" ++ show i),
activateScreen sid >> windows (W.greedyView i))
| i <- spaces ]
layoutCommands :: Int -> [(String, X ())]
layoutCommands sid = [ ("layout floating" , activateScreen sid >>
sendMessage (JumpToLayout "Floating"))
, ("layout tiled1" , activateScreen sid >>
sendMessage (JumpToLayout "Tiled1"))
, ("layout tiled2" , activateScreen sid >>
sendMessage (JumpToLayout "Tiled2"))
, ("layout fullscreen" , activateScreen sid >>
sendMessage (JumpToLayout "Fullscreen"))
]
masterAreaCommands :: Int -> [(String, X ())]
masterAreaCommands sid = [ ("increase master n", activateScreen sid >>
sendMessage (IncMasterN 1))
, ("decrease master n", activateScreen sid >>
sendMessage (IncMasterN (-1)))
]
quitCommands :: [(String, X ())]
quitCommands = [ ("quit bluetile", io (exitWith ExitSuccess))
, ("quit bluetile and start metacity", restart "metacity" False)
]
bluetileCommands :: X [(String, X ())]
bluetileCommands = do
let restartCommand = [ ("restart bluetile", restart "bluetile" True) ]
wscmds0 <- workspaceCommands 0
wscmds1 <- workspaceCommands 1
return $ restartCommand
++ wscmds0 ++ layoutCommands 0 ++ masterAreaCommands 0 ++ quitCommands
++ wscmds1 ++ layoutCommands 1 ++ masterAreaCommands 1 ++ quitCommands
activateScreen :: Int -> X ()
activateScreen sid = screenWorkspace (S sid) >>= flip whenJust (windows . W.view)
|
markus1189/xmonad-contrib-710
|
XMonad/Actions/BluetileCommands.hs
|
bsd-3-clause
| 3,505 | 0 | 15 | 1,055 | 610 | 336 | 274 | 38 | 1 |
{-@ LIQUID "--no-termination" @-}
{-@ LIQUID "--short-names" @-}
{-@ LIQUID "--fullcheck" @-}
{-@ LIQUID "--maxparams=3" @-}
module AlphaConvert (subst) where
import qualified Data.Set as S
import Language.Haskell.Liquid.Prelude
freshS :: S.Set Bndr -> Bndr
alpha :: S.Set Bndr -> Expr -> Expr
subst :: Expr -> Bndr -> Expr -> Expr
free :: Expr -> S.Set Bndr
---------------------------------------------------------------------
-- | Datatype Definition --------------------------------------------
---------------------------------------------------------------------
type Bndr
= Int
data Expr
= Var Bndr
| Abs Bndr Expr
| App Expr Expr
{-@ measure fv :: Expr -> (S.Set Bndr)
fv (Var x) = (Set_sng x)
fv (Abs x e) = (Set_dif (fv e) (Set_sng x))
fv (App e a) = (Set_cup (fv e) (fv a))
@-}
{-@ measure isAbs :: Expr -> Prop
isAbs (Var v) = false
isAbs (Abs v e) = true
isAbs (App e a) = false
@-}
{-@ predicate Elem X Ys = Set_mem X Ys @-}
{-@ predicate NotElem X Ys = not (Elem X Ys) @-}
{-@ predicate AddV E E2 X E1 = fv E = Set_cup (Set_dif (fv E2) (Set_sng X)) (fv E1) @-}
{-@ predicate EqV E1 E2 = fv E1 = fv E2 @-}
{-@ predicate Occ X E = Set_mem X (fv E) @-}
{-@ predicate Subst E E1 X E2 = if (Occ X E2) then (AddV E E2 X E1) else (EqV E E2) @-}
----------------------------------------------------------------------------
-- | Part 5: Capture Avoiding Substitution ---------------------------------
----------------------------------------------------------------------------
{-@ subst :: e1:Expr -> x:Bndr -> e2:Expr -> {e:Expr | Subst e e1 x e2} @-}
----------------------------------------------------------------------------
subst e' x e@(Var y)
| x == y = e'
| otherwise = e
subst e' x (App ea eb) = App ea' eb'
where
ea' = subst e' x ea
eb' = subst e' x eb
subst e1 x e2@(Abs y e)
| x == y = e2
| y `S.member` xs = subst e1 x (alpha xs e2)
| otherwise = Abs y (subst e1 x e)
where
xs = free e1
----------------------------------------------------------------------------
-- | Part 4: Alpha Conversion ----------------------------------------------
----------------------------------------------------------------------------
{-@ alpha :: ys:(S.Set Bndr) -> e:{Expr | isAbs e} -> {v:Expr | EqV v e} @-}
----------------------------------------------------------------------------
alpha ys (Abs x e) = Abs x' (subst (Var x') x e)
where
xs = free e
x' = freshS zs
zs = S.insert x (S.union ys xs)
alpha _ _ = liquidError "never"
----------------------------------------------------------------------------
-- | Part 3: Fresh Variables -----------------------------------------------
----------------------------------------------------------------------------
{-@ freshS :: xs:(S.Set Bndr) -> {v:Bndr | NotElem v xs} @-}
----------------------------------------------------------------------------
freshS xs = undefined
----------------------------------------------------------------------------
-- | Part 2: Free Variables ------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
{-@ free :: e:Expr -> {v : S.Set Bndr | v = fv e} @-}
----------------------------------------------------------------------------
free (Var x) = S.singleton x
free (App e e') = S.union (free e) (free e')
free (Abs x e) = S.delete x (free e)
|
mightymoose/liquidhaskell
|
tests/pos/alphaconvert-Set.hs
|
bsd-3-clause
| 3,870 | 0 | 10 | 959 | 528 | 283 | 245 | 33 | 1 |
{-# LANGUAGE GADTs, ExistentialQuantification, KindSignatures, RankNTypes #-}
-- Fails (needs the (Ord a) in TypeSet
-- c.f. gadt22.hs
module Expr where
import Data.Set (Set)
data Type a where
TypeInt :: Type Int
TypeSet :: {- Ord a => -} Type a -> Type (Set a)
TypeFun :: Type a -> Type b -> Type (a -> b)
data Expr :: * -> * where
Const :: Type a -> a -> Expr a
data DynExpr = forall a. DynExpr (Expr a)
withOrdDynExpr :: DynExpr -> (forall a. Ord a => Expr a -> b) -> Maybe b
withOrdDynExpr (DynExpr e@(Const (TypeSet _) _)) f = Just (f e)
withOrdDynExpr (DynExpr e@(Const TypeInt _)) f = Just (f e)
withOrdDynExpr _ _ = Nothing
|
urbanslug/ghc
|
testsuite/tests/gadt/gadt21.hs
|
bsd-3-clause
| 668 | 0 | 12 | 159 | 257 | 135 | 122 | 14 | 1 |
{-# OPTIONS_GHC -fpackage-trust #-}
module Check05 ( main' ) where
main' = do
let n = 1
print $ n
|
urbanslug/ghc
|
testsuite/tests/safeHaskell/check/Check05.hs
|
bsd-3-clause
| 108 | 0 | 9 | 30 | 31 | 17 | 14 | 5 | 1 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
module Main (main) where
import Control.Monad.IO.Class (liftIO)
import Control.Monad.State
import Control.Monad.Reader
import Data.FileEmbed
import Data.List
import Data.Monoid
import qualified Data.Map as M
import System.FilePath ((<.>), takeDirectory, takeExtension)
import System.Directory (createDirectoryIfMissing)
import System.Console.GetOpt
import System.IO
import System.Exit
import Text.Blaze.Html5 (docTypeHtml)
import Text.Blaze.Html.Renderer.String
import Language.Futhark.TypeChecker (Imports, FileModule(..))
import Language.Futhark.TypeChecker.Monad
import Language.Futhark
import Futhark.Doc.Generator
import Futhark.Compiler (readLibrary, dumpError, newFutharkConfig)
import Futhark.Pipeline (runFutharkM, FutharkM)
import Futhark.Util.Options
import Futhark.Util (directoryContents)
import Language.Futhark.Futlib.Prelude
main :: IO ()
main = mainWithOptions initialDocConfig commandLineOptions f
where f [dir] config = Just $ do
res <- runFutharkM (m config dir) True
case res of
Left err -> liftIO $ do
dumpError newFutharkConfig err
exitWith $ ExitFailure 2
Right () ->
return ()
f _ _ = Nothing
m :: DocConfig -> FilePath -> FutharkM ()
m config dir =
case docOutput config of
Nothing -> liftIO $ do
hPutStrLn stderr "Must specify output directory with -o."
exitWith $ ExitFailure 1
Just outdir -> do
files <- liftIO $ futFiles dir
when (docVerbose config) $ liftIO $ do
mapM_ (hPutStrLn stderr . ("Found source file "<>)) files
hPutStrLn stderr "Reading files..."
(prog, _w, imports, _vns) <-
readLibrary False preludeBasis mempty files
liftIO $ printDecs config outdir (nubBy sameImport imports) $
progDecs prog
sameImport (x, _) (y, _) = x == y
futFiles :: FilePath -> IO [FilePath]
futFiles dir = filter isFut <$> directoryContents dir
where isFut = (==".fut") . takeExtension
type DocEnv = M.Map (Namespace,VName) String
printDecs :: DocConfig -> FilePath -> Imports -> [Dec] -> IO ()
printDecs cfg dir imports decs = do
let to_write = evalState (mapM (f $ render decs) imports) mempty
mapM_ write to_write
write ("index", renderHtml $ indexPage to_write)
write' ("style.css", cssFile)
where f g x = (fst x,) <$> g x
write (name, content) = write' (name <.> "html", content)
write' (name, content) = do let file = dir ++ "/" ++ name
createDirectoryIfMissing True $ takeDirectory file
when (docVerbose cfg) $
hPutStrLn stderr $ "Writing " <> file
writeFile file content
render :: [Dec] -> (String, FileModule) -> State DocEnv String
render decs (name,fm) = runReaderT m (name,fm)
where m = renderHtml . docTypeHtml <$> renderFile decs
cssFile :: String
cssFile = $(embedStringFile "rts/futhark-doc/style.css")
data DocConfig = DocConfig { docOutput :: Maybe FilePath
, docVerbose :: Bool
}
initialDocConfig :: DocConfig
initialDocConfig = DocConfig { docOutput = Nothing
, docVerbose = False
}
type DocOption = OptDescr (Either (IO ()) (DocConfig -> DocConfig))
commandLineOptions :: [DocOption]
commandLineOptions = [ Option "o" ["output-directory"]
(ReqArg (\dirname -> Right $ \config -> config { docOutput = Just dirname })
"DIR")
"Directory in which to put generated documentation."
, Option "v" ["verbose"]
(NoArg $ Right $ \config -> config { docVerbose = True })
"Print status messages on stderr."
]
|
ihc/futhark
|
src/futhark-doc.hs
|
isc
| 4,092 | 0 | 18 | 1,231 | 1,139 | 608 | 531 | 90 | 4 |
import Text.Printf
main = do
line <- getLine
let money = (read line :: Double) * 95.0
putStrLn $ printf "%.2f" $ if money < 300 then money else money * 0.85
|
Voleking/ICPC
|
references/aoapc-book/BeginningAlgorithmContests/haskell/ch1/ex1-7.hs
|
mit
| 164 | 1 | 12 | 39 | 73 | 35 | 38 | 5 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module System.NotifySend (
defaultNotification,
notifySend,
summary,
body,
expireTime,
urgency,
icon,
Command,
commandOptions
) where
import Control.Lens hiding (elements)
import Control.Monad.IO.Class
import Data.Text hiding (map, toLower)
import qualified Data.Text as T
import Shelly
import Test.QuickCheck
data Urgency = Low | Normal | Critical deriving (Show, Eq)
data Command = Command {
_summary :: Text,
_body :: Text,
_expireTime :: Int,
_urgency :: Urgency,
_icon :: Prelude.FilePath
} deriving (Show, Eq)
genText :: Gen Text
genText = fmap pack $ listOf (elements ['a' .. 'z'])
instance Arbitrary Command where
arbitrary = do
s <- genText
b <- genText
e <- arbitrary
u <- elements [Low, Normal, Critical]
i <- fmap show genText
return $ Command s b e u i
$(makeLenses ''Command)
type ShellyOptions = [Text]
commandOptions :: Command -> ShellyOptions
commandOptions c = ["-u", (T.toLower . pack . show) $ c ^. urgency, "-t", (pack . show) $ max 0 (c ^. expireTime), "-i", pack $ c ^. icon]
-- | A default notification, with no message. You can modify the defaults using the Lens API
--
-- >>> notifySend $ defaultNotification & body .~ "hello world"
defaultNotification :: Command
defaultNotification = Command blank blank 1000 Low " "
where blank = " " :: Text
-- | Send the notification via the @notify-send@ command. This no-ops if notify-send is not found.
notifySend :: MonadIO m => Command -> m Text
notifySend send = shelly $ verbosely $ do
notifyPath <- which name
case notifyPath of
Just _ -> notify_send (commandOptions send) (send ^. summary) (send ^. body)
Nothing -> return ""
where notify_send opts s b = run name (opts ++ [s,b])
name = "notify-send"
|
bobjflong/notify-send
|
src/System/NotifySend.hs
|
mit
| 1,959 | 0 | 13 | 499 | 556 | 306 | 250 | 51 | 2 |
{-# LANGUAGE OverloadedStrings, FlexibleContexts #-}
module PDF.ContentStream
( parseStream
, parseColorSpace
) where
import Data.Char (chr, ord)
import Data.String (fromString)
import Data.List (isPrefixOf, dropWhileEnd)
import Numeric (readOct, readHex)
import Data.Maybe (fromMaybe)
import qualified Data.Map as Map
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy.Char8 as BSLC (ByteString, pack)
import qualified Data.ByteString.Char8 as BSSC (unpack)
import qualified Data.ByteString.Lazy.UTF8 as BSLU (toString)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Text.Parsec hiding (many, (<|>))
import Text.Parsec.ByteString.Lazy
import Control.Applicative
import Debug.Trace
import PDF.Definition
import PDF.Object
import PDF.Character (pdfcharmap, adobeJapanOneSixMap)
type PSParser a = GenParser Char PSR a
parseContentStream p st = runParser p st ""
parseStream :: PSR -> PDFStream -> PDFStream
parseStream psr pdfstream =
case parseContentStream (T.concat <$> (spaces >> many (try elems <|> skipOther))) psr pdfstream of
Left err -> error $ "Nothing to be parsed: " ++ (show err)
Right str -> BSLC.pack $ BSSC.unpack $ encodeUtf8 str
parseColorSpace :: PSR -> BSLC.ByteString -> [T.Text]
parseColorSpace psr pdfstream =
case parseContentStream (many (choice [ try colorSpace
, try $ T.concat <$> xObject
, (T.empty <$ elems)
])) psr pdfstream of
Left err -> error "Nothing to be parsed"
Right str -> str
-- | Parsers for Content Stream
elems :: PSParser T.Text
elems = choice [ try pdfopBT
, try pdfopTf
, try pdfopTD
, try pdfopTd
, try pdfopTm
, try pdfopTc
, try pdfopTs
, try pdfopTw
, try pdfopTL
, try pdfopTz
, try pdfopTj
, try pdfopTJ
, try pdfopTr
, try pdfQuote
, try pdfDoubleQuote
, try pdfopTast
, try letters <* spaces
, try hexletters <* spaces
, try array <* spaces
, try pdfopGraphics
, try dashPattern
, try $ T.empty <$ xObject
, try graphicState
, try pdfopcm
, try $ T.empty <$ colorSpace
, try $ T.empty <$ renderingIntent
, try pdfopBDC
, try pdfopBMC
, try pdfopEMC
, unknowns
]
pdfopGraphics :: PSParser T.Text
pdfopGraphics = do
spaces
choice [ try $ T.empty <$ oneOf "qQ" <* spaces
, try $ T.empty <$ oneOf "fFbBW" <* (many $ string "*") <* space <* spaces
, try $ T.empty <$ oneOf "nsS" <* spaces
, try $ T.empty <$ (digitParam <* spaces) <* oneOf "jJM" <* space <* spaces
, try $ T.empty <$ (digitParam <* spaces) <* oneOf "dwi" <* spaces
, try $ T.empty <$ (many1 (digitParam <* spaces) <* oneOf "ml" <* space <* spaces)
, try $ T.empty <$ (many1 (digitParam <* spaces) <* oneOf "vy" <* space <* spaces)
, try $ T.empty <$ (many1 (digitParam <* spaces) <* string "re" <* spaces)
, try $ T.empty <$ (many1 (digitParam <* spaces) <* string "SCN" <* spaces)
, try $ T.empty <$ (many1 (digitParam <* spaces) <* string "scn" <* spaces)
, try $ T.empty <$ (many1 (digitParam <* spaces) <* string "SC" <* spaces)
, try $ T.empty <$ (many1 (digitParam <* spaces) <* string "sc" <* spaces)
, try $ T.empty <$ (many1 (digitParam <* spaces) <* string "c" <* space <* spaces)
, try $ T.empty <$ oneOf "h" <* spaces
]
return T.empty
graphicState :: PSParser T.Text
graphicState = do
gs <- (++) <$> string "/" <*> manyTill anyChar (try space)
spaces
string "gs"
spaces
return T.empty
colorSpace :: PSParser T.Text
colorSpace = do
gs <- choice [ try $ string "/" *> manyTill anyChar (try space) <* (string "CS" <|> string "cs") <* spaces
, try $ "DeviceRGB" <$ (many1 (digitParam <* spaces) <* string "rg" <* spaces)
, try $ "DeviceRGB" <$ (many1 (digitParam <* spaces) <* string "RG" <* spaces)
, try $ "DeviceGray" <$ (digitParam <* spaces) <* oneOf "gG" <* spaces
, try $ "DeviceCMYK" <$ (many1 (digitParam <* spaces) <* oneOf "kK" <* spaces)
]
updateState (\s -> s {colorspace = gs})
return $ T.pack gs
dashPattern :: PSParser T.Text
dashPattern = do
char '[' >> many digit >> char ']' >> spaces >> many1 digit >> spaces >> string "d"
return T.empty
renderingIntent :: PSParser T.Text
renderingIntent = do
ri <- choice [ try $ string "/" *> manyTill anyChar (try space) <* string "ri" <* spaces
, try $ string "/" *> manyTill anyChar (try space) <* string "Intent" <* spaces
]
return $ T.pack ri
xObject :: PSParser [T.Text]
xObject = do
n <- (++) <$> string "/" <*> manyTill anyChar (try space)
spaces
string "Do"
spaces
st <- getState
let xobjcs = xcolorspaces st
-- updateState (\s -> s {colorspace = xobjcs})
return $ map T.pack xobjcs
pdfopBT :: PSParser T.Text
pdfopBT = do
st <- getState
updateState (\s -> s{text_m = (1,0,0,1,0,0), text_break = False})
string "BT"
spaces
t <- manyTill elems (try $ string "ET")
spaces
return $ T.concat t
-- should have refined according to the section 10.5 of PDF reference
pdfopBMC :: PSParser T.Text
pdfopBMC = do
n <- (++) <$> string "/" <*> manyTill anyChar (try space)
spaces
string "BMC"
spaces
manyTill elems (try $ string "EMC")
spaces
return T.empty
pdfopBDC :: PSParser T.Text
pdfopBDC = do
n1 <- name *> propertyList
spaces
string "BDC"
spaces
return T.empty
pdfopEMC :: PSParser T.Text
pdfopEMC = do
spaces
string "EMC"
spaces
return T.empty
propertyList :: PSParser T.Text
propertyList = spaces >> choice [try dictionary, try name]
dictionary :: PSParser T.Text
dictionary = T.concat <$> (spaces >> string "<<" >> spaces
*> manyTill dictEntry (try (string ">>" >> (notFollowedBy $ string ">"))))
dictEntry :: PSParser T.Text
dictEntry = choice [ try name
, try letters
, T.pack <$> try hex
, T.pack <$> try (many1 digit)
] <* spaces
where
hex = string "<" >> (manyTill (oneOf "0123456789abcdefABCDEF") (try $ string ">"))
name :: PSParser T.Text
name = T.pack <$> ((++) <$> string "/" <*> (manyTill anyChar (try $ lookAhead $ oneOf "><][)( \n\r/")) <* spaces)
pdfopTj :: PSParser T.Text
pdfopTj = do
spaces
t <- manyTill (letters <|> hexletters <|> array) (try $ string "Tj")
spaces
st <- getState
let needBreak = text_break st
t' = (if needBreak then ("\n":t) else t)
updateState (\s -> s{text_break = False})
return $ T.concat t'
pdfopTJ :: PSParser T.Text
pdfopTJ = do
spaces
t <- manyTill array (try $ string "TJ")
spaces
st <- getState
let needBreak = text_break st
t' = (if needBreak then ("":t) else t)
updateState (\s -> s{text_break = False})
return $ T.concat t'
pdfDoubleQuote :: PSParser T.Text
pdfDoubleQuote = do
spaces
t <- manyTill (letters <|> hexletters <|> array) (try $ string "\"")
spaces
return $ T.concat t
pdfQuote :: PSParser T.Text
pdfQuote = do
spaces
t <- manyTill (letters <|> hexletters <|> array) (try $ string "\'")
spaces
return $ T.concat t
unknowns :: PSParser T.Text
unknowns = do
ps <- manyTill anyChar (try $ oneOf "\r\n")
st <- getState
-- linebreak within (...) is parsed as Tj
return $ case runParser elems st "" $ BSLC.pack ((Data.List.dropWhileEnd (=='\\') ps)++")Tj") of
Right xs -> xs
Left e -> case runParser elems st "" $ BSLC.pack ("("++ps) of
Right xs -> xs
Left e -> case ps of
"" -> ""
otherwise -> T.pack $ "[[[UNKNOWN STREAM:" ++ take 100 (show ps) ++ "]]]"
skipOther :: PSParser T.Text
skipOther = do
a <- manyTill anyChar (try $ oneOf "\r\n")
return ""
array :: PSParser T.Text
array = do
st <- getState
char '['
spaces
str <- manyTill (letters <|> hexletters <|> kern) (try $ char ']')
-- for TJ
let needBreak = text_break st
t' = (if needBreak then "\n":str else str)
updateState (\s -> s{text_break = False})
return $ T.concat t'
letters :: PSParser T.Text
letters = do
char '('
st <- getState
let cmap = fromMaybe [] (lookup (curfont st) (cmaps st))
letterParser = case lookup (curfont st) (fontmaps st) of
Just (Encoding m) -> psletter m
Just (CIDmap s) -> cidletter s
Just (WithCharSet s) -> try $ bytesletter cmap <|> cidletters
Just NullMap -> psletter []
Nothing -> (T.pack) <$> (many1 $ choice [ try $ ')' <$ (string "\\)")
, try $ '(' <$ (string "\\(")
, try $ noneOf ")"
])
lets <- manyTill letterParser $ (try $ char ')')
spaces
return $ T.concat lets
bytesletter :: CMap -> PSParser T.Text
bytesletter cmap = do
txt <- (many1 $ choice [ try $ ')' <$ (string "\\)")
, try $ '(' <$ (string "\\(")
, try $ (chr 10) <$ (string "\\n")
, try $ (chr 13) <$ (string "\\r")
, try $ (chr 8) <$ (string "\\b")
, try $ (chr 9) <$ (string "\\t")
, try $ (chr 12) <$ (string "\\f")
, try $ (chr 92) <$ (string "\\\\")
, try $ (chr 0) <$ (char '\NUL')
, try $ (chr 32) <$ (char ' ')
, try $ chr <$> ((string "\\") *> octnum)
, try $ noneOf ")"
])
return $ byteStringToText cmap txt
where
byteStringToText :: CMap -> String -> T.Text
byteStringToText cmap str = T.concat $ map (toUcs cmap) $ asInt16 $ map ord str
asInt16 :: [Int] -> [Int]
asInt16 [] = []
asInt16 (a:[]) = [a] --error $ "Can not read string "++(show a)
asInt16 (a:b:rest) = (a * 256 + b):(asInt16 rest)
-- for debug
-- myToUcs cmap x = if x == 636 then trace (show cmap) $ toUcs cmap x else toUcs cmap x
hexletters :: PSParser T.Text
hexletters = do
char '<'
lets <- manyTill hexletter (try $ char '>')
spaces
return $ T.concat lets
octletters :: PSParser T.Text
octletters = do
char '('
lets <- manyTill octletter (try $ char ')')
spaces
return $ T.concat lets
adobeOneSix :: Int -> T.Text
adobeOneSix a = case Map.lookup a adobeJapanOneSixMap of
Just cs -> T.pack $ BSLU.toString cs
Nothing -> T.pack $ "[" ++ (show a) ++ "]"
toUcs :: CMap -> Int -> T.Text
toUcs m h = case lookup h m of
Just ucs -> T.pack ucs
Nothing -> if m == [] then adobeOneSix h else T.pack [chr h]
cidletters = choice [try hexletter, try octletter]
hexletter :: PSParser T.Text
hexletter = do
st <- getState
let font = curfont st
cmap = fromMaybe [] (lookup font (cmaps st))
(hexToString cmap . readHex) <$> choice [ try $ count 4 $ oneOf "0123456789ABCDEFabcdef"
, try $ count 2 $ oneOf "0123456789ABCDEFabcdef"
, try $ (:"0") <$> (oneOf "0123456789ABCDEFabcdef")
]
where hexToString m [(h,"")] = toUcs m h
hexToString _ _ = "????"
octletter :: PSParser T.Text
octletter = do
st <- getState
let cmap = fromMaybe [] (lookup (curfont st) (cmaps st))
o <- octnum
return $ toUcs cmap o
psletter :: [(Char,String)] -> PSParser T.Text
psletter fontmap = do
c <- try (char '\\' >> oneOf "\\()")
<|>
try (octToChar . readOct <$> (char '\\' >> (count 3 $ oneOf "01234567")))
<|>
noneOf "\\"
return $ replaceWithDiff fontmap c
where replaceWithDiff m c' = case lookup c' m of
Just s -> replaceWithCharDict s
Nothing -> T.pack [c']
replaceWithCharDict s = case Map.lookup s pdfcharmap of
Just cs -> cs
Nothing -> if "/uni" `isPrefixOf` s
then readUni s
else T.pack s
readUni s = case readHex (drop 4 s) of
[(i,"")] -> T.singleton $ chr i
[(i,x)] -> T.pack (chr i : " ")
_ -> T.pack s
octToChar [(o,"")] = chr o
octToChar _ = '?'
cidletter :: String -> PSParser T.Text
cidletter cidmapName = do
o1 <- octnum
o2 <- octnum
let d = 256 * o1 + o2
return $
if cidmapName == "Adobe-Japan1"
then adobeOneSix d
else error $ "Unknown cidmap" ++ cidmapName
octnum :: PSParser Int
octnum = do
d <- choice [ try $ escapedToDec <$> (char '\\' >> oneOf "nrtbf()\\")
, try $ octToDec . readOct <$> (char '\\' >> (count 3 $ oneOf "01234567"))
, try $ ord <$> noneOf "\\"
]
return $ d
where
octToDec [(o, "")] = o
octToDec _ = error "Unable to take Character in Octet"
escapedToDec 'n' = ord '\n'
escapedToDec 'r' = ord '\r'
escapedToDec 't' = ord '\t'
escapedToDec 'b' = ord '\b'
escapedToDec 'f' = ord '\f'
escapedToDec '\\' = ord '\\'
escapedToDec _ = 0
kern :: PSParser T.Text
kern = do
t <- digitParam
spaces
return $ if t < -60.0 then " " else ""
pdfopTf :: PSParser T.Text
pdfopTf = do
font <- (++) <$> string "/" <*> manyTill anyChar (try space)
spaces
t <- digitParam
spaces
string "Tf"
spaces
st <- getState
let ff = fontfactor st
updateState (\s -> s{ curfont = font
, fontfactor = t
, linex = t
, liney = t})
return ""
pdfopTD :: PSParser T.Text
pdfopTD = do
t1 <- digitParam
spaces
t2 <- digitParam
spaces
string "TD"
spaces
st <- getState
let ax = absolutex st
ay = absolutey st
lx = linex st
ly = liney st
lm = leftmargin st
ff = fontfactor st
(a,b,c,d,tmx,tmy) = text_m st
needBreakByX = a*t1 + c*t2 + tmx < ax
needBreakByY = abs (b*t1 + d*t2 + tmy - ay) > ff
needBreak = (needBreakByX || needBreakByY) && not (text_break st)
updateState (\s -> s { absolutex = if needBreak then 0 else a*t1 + c*t2 + tmx
, absolutey = b*t1 + d*t2 + tmy
, liney = -t2
, text_m = (a,b,c,d, a*t1 + c*t2 + tmx, b*t1 + d*t2 + tmy)
, text_break = needBreak
})
return $ if needBreak
then (desideParagraphBreak t1 t2 lx ly lm ff)
else if a*t1 + c*t2 + tmx > ax + 2*ff
then " " else ""
pdfopTd :: PSParser T.Text
pdfopTd = do
t1 <- digitParam
spaces
t2 <- digitParam
spaces
string "Td"
spaces
st <- getState
let ax = absolutex st
ay = absolutey st
lx = linex st
ly = liney st
lm = leftmargin st
ff = fontfactor st
(a,b,c,d,tmx,tmy) = text_m st
needBreakByX = a*t1 + c*t2 + tmx < ax
needBreakByY = abs (b*t1 + d*t2 + tmy - ay) > ff
needBreak = (needBreakByX || needBreakByY) && not (text_break st)
updateState (\s -> s { absolutex = if needBreak then 0 else a*t1 + c*t2 + tmx
, absolutey = b*t1 + d*t2 + tmy
, linex = lx
, liney = ly
, text_m = (a,b,c,d, a*t1 + c*t2 + tmx, b*t1 + d*t2 + tmy)
, text_break = needBreak
})
return $ if needBreak
then (desideParagraphBreak t1 t2 lx ly lm ff)
else if a*t1 + c*t2 + tmx > ax + 2*ff
then " " else ""
pdfopTw :: PSParser T.Text
pdfopTw = do
tw <- digitParam
spaces
string "Tw"
spaces
st <- getState
let ff = fontfactor st
updateState (\s -> s { fontfactor = tw
})
return $ ""
pdfopTL :: PSParser T.Text
pdfopTL = do
tl <- digitParam
spaces
string "TL"
spaces
st <- getState
let ff = fontfactor st
updateState (\s -> s { liney = ff + tl
})
return $ ""
pdfopTz :: PSParser T.Text
pdfopTz = do
tz <- digitParam
spaces
string "Tz"
spaces
st <- getState
let ff = fontfactor st
updateState (\s -> s { linex = ff + tz
})
return $ ""
pdfopTc :: PSParser T.Text
pdfopTc = do
tc <- digitParam
spaces
string "Tc"
spaces
return $ ""
pdfopTr :: PSParser T.Text
pdfopTr = do
tr <- digitParam
spaces
string "Tr"
spaces
st <- getState
let ff = fontfactor st
return $ ""
pdfopTs :: PSParser T.Text
pdfopTs = do
tc <- digitParam
spaces
string "Ts"
spaces
return $ ""
desideParagraphBreak :: Double -> Double -> Double -> Double -> Double -> Double
-> T.Text
desideParagraphBreak t1 t2 lx ly lm ff = T.pack $
(if abs t2 > 1.8*ly || (lx - t1) < lm
then " "
else "")
pdfopTm :: PSParser T.Text
pdfopTm = do
a <- digitParam
spaces
b <- digitParam
spaces
c <- digitParam
spaces
d <- digitParam
spaces
e <- digitParam
spaces
f <- digitParam
spaces
string "Tm"
spaces
st <- getState
let ax = absolutex st
ay = absolutey st
lx = linex st
ly = liney st
lm = leftmargin st
ff = fontfactor st
(_,_,_,_,tmx,tmy) = text_m st
newff = abs $ (a+d)/2
needBreakByX = a*tmx + c*tmy + e < ax
needBreakByY = abs (b*tmx + d*tmy + f - ay) > ff
needBreak = (needBreakByX || needBreakByY) && not (text_break st)
newst = st { absolutex = e
, absolutey = f
, linex = lx
, liney = ly
, text_lm = (a,b,c,d,e,f)
, text_m = (a,b,c,d,e,f)
, text_break = needBreak
}
putState newst
return $ T.empty
pdfopcm :: PSParser T.Text
pdfopcm = do
a <- digitParam
spaces
b <- digitParam
spaces
c <- digitParam
spaces
d <- digitParam
spaces
e <- digitParam
spaces
f <- digitParam
spaces
string "cm"
spaces
st <- getState
-- What should be the effect on the page text?
let ax = absolutex st
ay = absolutey st
lx = linex st
ly = liney st
lm = leftmargin st
ff = fontfactor st
(_,_,_,_,tmx,tmy) = text_m st
needBreakByX = a*tmx + c*tmy + e < ax
needBreakByY = abs (b*tmx + d*tmy + f - ay) > ff
needBreak = (needBreakByX || needBreakByY) && not (text_break st)
newst = st { absolutex = ax
, absolutey = ay
, linex = lx
, liney = ly
, text_lm = (a,b,c,d,e,f)
, text_m = (a,b,c,d,e,f)
, text_break = needBreak
}
putState newst
return T.empty
pdfopTast :: PSParser T.Text
pdfopTast = do
string "T*"
st <- getState
let ax = absolutex st
ay = absolutey st
lx = linex st
ly = liney st
lm = leftmargin st
ff = fontfactor st
(a,b,c,d,tmx,tmy) = text_m st
needBreakByX = tmx < ax
needBreakByY = d*ly + tmy > ly
needBreak = needBreakByX || needBreakByY
updateState (\s -> s { absolutex = if needBreak then 0 else tmx
, absolutey = tmy + ly
, linex = lx
, liney = ly
, text_m = (a,b,c,d, c*ly + tmx, d*ly + tmy)
, text_break = needBreak
})
return ""
digitParam :: PSParser Double
digitParam = do
sign <- many $ char '-'
num <- ((++) <$> (("0"++) <$> (string ".")) <*> many1 digit)
<|>
((++) <$> (many1 digit) <*> ((++) <$> (many $ char '.') <*> many digit))
return $ read $ sign ++ num
hexParam :: Parser T.Text
hexParam = do
char '<'
lets <- manyTill (oneOf "0123456789abcdefABCDEF") (try $ char '>')
return $ T.pack lets
|
k16shikano/hpdft
|
src/PDF/ContentStream.hs
|
mit
| 20,250 | 0 | 21 | 6,810 | 7,639 | 3,846 | 3,793 | 585 | 8 |
module Contextifier (
contextify
) where
import qualified Data.Map as Map
import Syntax (Context, Name (..), Term (..))
-- TODO invent a good name here.
newtype TermContexts = TermContexts (Map.Map String Context)
contextify :: Term -> Term
contextify = contextify' emptyContext
contextify' :: TermContexts -> Term -> Term
contextify' contexts term = case term of
Variable (Name name _) -> Variable (Name name (Just $ valueForName contexts name))
Application t1 t2 -> Application (contextify' contexts t1) (contextify' contexts t2)
Abstraction (Name name _) term ->
let
newContexts = shiftContextsForName contexts name
nameContext = valueForName newContexts name
in Abstraction (Name name (Just nameContext)) (contextify' newContexts term)
valueForName :: TermContexts -> String -> Context
valueForName (TermContexts ctxts) name
| Map.notMember name ctxts = 0
| otherwise = ctxts Map.! name
-- TODO: stop assuming context is an integer.
shiftContextsForName :: TermContexts -> String -> TermContexts
shiftContextsForName (TermContexts contexts) name =
TermContexts $ Map.insert name (valueForName (TermContexts contexts) name + 1) contexts
emptyContext :: TermContexts
emptyContext = TermContexts Map.empty
|
ysukhoverkhov/taplic4
|
src/06_UntypedLambda_Naive/Contextifier.hs
|
mit
| 1,266 | 0 | 14 | 225 | 379 | 195 | 184 | 25 | 3 |
module Main where
import qualified GHCJS.CommonJS as CommonJS
willThrowWithPairNumbers :: Int -> IO ()
willThrowWithPairNumbers n =
if even n
then error "What are you doing!?"
else putStrLn "[haskell] All good"
main :: IO ()
main = CommonJS.exportMain
[ CommonJS.pack ("willThrowWithPairNumbers", willThrowWithPairNumbers)
]
|
beijaflor-io/ghcjs-commonjs
|
examples/failure/Main.hs
|
mit
| 354 | 0 | 8 | 72 | 85 | 46 | 39 | 10 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Data.API.LinkedIn.Facet
( QueryFacet
, Facets(..)
, parseFacets
, Facet(..)
) where
import Control.Applicative ((<$>), (<*>))
import Data.Conduit (MonadThrow, Sink)
import Data.Text (Text, unpack)
import Data.XML.Types (Event)
import Text.XML.Stream.Parse
type QueryFacet = Text --temp
data Facets = Facets
{ totalFacets :: Integer
, allFacets :: [Facet]
} deriving (Show)
parseFacets :: MonadThrow m => Sink Event m (Maybe Facets)
parseFacets = tagName "facets" (requireAttr "total") $ \total -> do
facets <- many $ parseFacet
return $ Facets (read $ unpack total) facets
data Facet = Facet
{ facetCode :: Text
, facetName :: Text
, facetBuckets :: Maybe Buckets
} deriving (Show)
parseFacet :: MonadThrow m => Sink Event m (Maybe Facet)
parseFacet = tagNoAttr "facet" $ Facet
<$> (force "facet must contain a code" $ tagNoAttr "code" content)
<*> (force "facet must contain a name" $ tagNoAttr "name" content)
<*> parseBuckets
data Buckets = Buckets
{ totalBuckets :: Integer
, allBuckets :: [Bucket]
} deriving (Show)
parseBuckets :: MonadThrow m => Sink Event m (Maybe Buckets)
parseBuckets = tagName "buckets" (requireAttr "total") $ \total -> do
buckets <- many $ parseBucket
return $ Buckets (read $ unpack total) buckets
data Bucket = Bucket
{ bucketCode :: Text
, bucketName :: Text
, bucketCount :: Integer
, selected :: Bool
} deriving (Show)
parseBucket :: MonadThrow m => Sink Event m (Maybe Bucket)
parseBucket = tagNoAttr "bucket" $ Bucket
<$> (force "bucket must contain code" $ tagNoAttr "code" content)
<*> (force "bucket must contain name" $ tagNoAttr "name" content)
<*> (fmap (read . unpack) $ force "bucket must contain a count"
$ tagNoAttr "count" content)
<*> (fmap ("true"==) $ force "bucket must contain a selected"
$ tagNoAttr "selected" content)
|
whittle/linkedin-api
|
Data/API/LinkedIn/Facet.hs
|
mit
| 2,207 | 0 | 13 | 674 | 621 | 337 | 284 | 52 | 1 |
module A4com where
run file func = rdnam file >>= return.func -- example: run "task1_out.nam" task1_tcp
rdnam::String->IO [[String]]
rdnam fname = readFile fname >>= return.map words.lines
smplpred cmplist wds = and $ map (\(i,w)->wds !! i==w) cmplist
smplfilt cmplist = filter (smplpred cmplist)
sumtr term_index_in_line = sum . map (\wds->(read $ wds !! term_index_in_line)::Int)
task1_tcpftp::[[String]]->Int
task1_tcpftp = sumtr 10 . smplfilt [(0,"r"),(4,"2"),(6,"3"),(16,"1"),(8,"tcp")]
task1_udpcbr = sumtr 10 . smplfilt [(0,"r"),(4,"2"),(6,"3"),(16,"2"),(8,"cbr")]
|
ducis/computer-network-labs
|
a4/A4com.hs
|
mit
| 596 | 0 | 11 | 93 | 294 | 166 | 128 | 10 | 1 |
{-# LANGUAGE DeriveFunctor #-}
module Compiler.PreAST.Type.Expression where
import Compiler.Serializable
import Compiler.PreAST.Type.Symbol
--------------------------------------------------------------------------------
-- Expression (with >/>=/=....)
data Expression a = UnaryExpression (SimpleExpression a)
| BinaryExpression (SimpleExpression a) RelOp (SimpleExpression a)
deriving Functor
instance (Serializable a, Sym a) => Serializable (Expression a) where
serialize (UnaryExpression e) = serialize e
serialize (BinaryExpression a o b) = serialize a ++ " " ++ serialize o ++ " " ++ serialize b
--------------------------------------------------------------------------------
-- Simple Expression (with +/-)
data SimpleExpression a = TermSimpleExpression (Term a)
| OpSimpleExpression (SimpleExpression a) AddOp (Term a)
deriving Functor
instance (Serializable a, Sym a) => Serializable (SimpleExpression a) where
serialize (TermSimpleExpression t) = serialize t
serialize (OpSimpleExpression a o b) = serialize a ++ " " ++ serialize o ++ " " ++ serialize b
--------------------------------------------------------------------------------
-- Term (with *//)
data Term a = FactorTerm (Factor a)
| OpTerm (Term a) MulOp (Factor a)
| NegTerm (Factor a)
deriving Functor
instance (Serializable a, Sym a) => Serializable (Term a) where
serialize (FactorTerm f) = serialize f
serialize (OpTerm a o b) = serialize a ++ " " ++ serialize o ++ " " ++ serialize b
serialize (NegTerm f) = "-" ++ serialize f
--------------------------------------------------------------------------------
-- Factor (basic building block)
data Factor a = VariableFactor a
| NumberFactor Value
| InvocationFactor a [Expression a] -- id()
| SubFactor (Expression a) -- (...)
| NotFactor (Factor a) -- -id
deriving Functor
instance (Serializable a, Sym a) => Serializable (Factor a) where
serialize (VariableFactor sym) = serialize sym
serialize (InvocationFactor sym exprs) = serialize sym ++ "(" ++ exprs' ++ ")"
where exprs' = intercalate' ", " exprs
serialize (NumberFactor s) = serialize s
serialize (SubFactor e) = "(" ++ serialize e ++ ")"
serialize (NotFactor f) = "not " ++ serialize f
--------------------------------------------------------------------------------
-- Operators
data AddOp = Plus | Minus
data MulOp = Mul | Div
data RelOp = S | L | E | NE | SE | LE
instance Serializable AddOp where
serialize Plus = "+"
serialize Minus = "-"
instance Serializable MulOp where
serialize Mul = "*"
serialize Div = "/"
instance Serializable RelOp where
serialize S = "<"
serialize L = ">"
serialize E = "="
serialize NE = "!="
serialize SE = "<="
serialize LE = ">="
|
banacorn/mini-pascal
|
src/Compiler/PreAST/Type/Expression.hs
|
mit
| 3,002 | 0 | 10 | 726 | 804 | 421 | 383 | 53 | 0 |
import Control.Monad
import Control.Applicative
import Data.Char
import System.IO
---- Testing functions --------------------------------
-- tests whether current token is a recognized operator
isOperator :: String -> Bool
isOperator string = elem (head string) "+-*/"
-- tests whether current token is a recognized number
isANumber :: String -> Bool
isANumber string
| null parsed = False
| (snd . head) parsed == "" = True
| otherwise = False
where parsed = reads string :: [(Double, String)]
-------------------------------------------------------
-- passes the appropriate arithmetic function to operate
perform :: String -> [Double] -> [Double]
perform "+" = operate (+)
perform "-" = operate (-)
perform "*" = operate (*)
perform "/" = operate (/)
-- takes a fx and stack and returns stack after fx application
operate :: (Double -> Double -> Double) -> [Double] -> [Double]
operate f stack = f sd fs : rest where (fs:sd:rest) = stack
-- reducer task for the fold operation
reducer :: [Double] -> String -> Either String [Double]
reducer stack currentToken
| isANumber currentToken = Right $ read currentToken : stack
| isOperator currentToken = if (length stack < 2)
then Left "Too many operators\n"
else Right $ perform currentToken stack
| otherwise = Left "Unrecognized input\n"
-- if we got an error string, return the error string
-- if we got back a stack with more than one value, there
-- were too few operators
receiveFinalStack :: Either String [Double] -> Either String Double
receiveFinalStack (Left s) = Left s
receiveFinalStack (Right (x:xs:[])) = Left "Too few operators\n"
receiveFinalStack (Right (x:xs)) = Right x
receiveFinalStack _ = Left ""
-- breaks input string into list of tokens, folds
-- them with the reducer function and processes result
dispatcher :: String -> Either String Double
dispatcher = receiveFinalStack . (foldM reducer []) . words
displayResult :: Either String Double -> IO ()
displayResult v = case v of
Left s -> putStr s
Right d -> print d
main :: IO ()
main = forever $ do
putStr "RPN: "
hFlush stdout
getLine >>= (displayResult . dispatcher)
|
tonyfischetti/HRPNC
|
RPN.hs
|
mit
| 2,361 | 3 | 11 | 607 | 659 | 328 | 331 | 42 | 2 |
module Physics.Draw where
import Linear.Affine
import Linear.V2
import qualified SDL.Video.Renderer as R
import qualified Physics.Contact as C
import Physics.Transform
import Physics.Linear
import Physics.Geometry
toRenderable :: (Functor f, RealFrac a, Integral b) => f a -> f b
toRenderable = fmap floor
centeredRectangle :: (Fractional a) => P2 a -> V2 a -> R.Rectangle a
centeredRectangle center size = R.Rectangle (center .-^ halfSize) size
where halfSize = fmap (/2) size
viewTransform :: (Floating a) => V2 a -> V2 a -> V2 a -> WorldTransform a
viewTransform window (V2 x y) d = joinTransforms' [ translateTransform window'
, scaleTransform (V2 x (-y))
, translateTransform (-d) ]
where window' = fmap (/2) window
drawLine :: (RealFrac a) => R.Renderer -> P2 a -> P2 a -> IO ()
drawLine r a b = R.drawLine r (toRenderable a) (toRenderable b)
drawLine_ :: (RealFrac a) => R.Renderer -> (P2 a, P2 a) -> IO ()
drawLine_ r = uncurry (drawLine r)
drawPoint :: (RealFrac a) => R.Renderer -> P2 a -> IO ()
drawPoint r p = R.drawPoint r (toRenderable p)
drawThickPoint :: (RealFrac a) => R.Renderer -> P2 a -> IO ()
drawThickPoint r p = R.fillRect r (Just . toRenderable $ centeredRectangle p (V2 4 4))
drawConvexHull :: (RealFrac a) => R.Renderer -> ConvexHull a -> IO ()
drawConvexHull r h = sequence_ (fmap f (vList $ vertices h))
where f v = drawLine r (vertex v) (vertex $ vNext v)
extractDepth :: (Floating a, Ord a) => LocalT a (Overlap a) -> V2 a
extractDepth = wExtract_ . lmap f
where f ovl = fmap (*(-s)) n
where s = overlapDepth ovl
n = iExtract . snd . overlapEdge $ ovl
extractEdge :: (Floating a, Ord a) => LocalT a (Overlap a) -> (P2 a, P2 a)
extractEdge = wExtract_ . lmap f
where f ovl = (g a, g b)
where a = fst (overlapEdge ovl)
b = lmap vNext a
g = wExtract_ . lmap vertex
extractPenetrator :: (Floating a, Ord a) => LocalT a (Overlap a) -> P2 a
extractPenetrator = wExtract_ . lmap f
where f = iExtract . snd . overlapPenetrator
drawOverlap :: (Floating a, RealFrac a, Ord a) => R.Renderer -> LocalT a (Overlap a) -> IO ()
drawOverlap r ovl = do
drawLine r a b
drawLine r c c'
drawThickPoint r pen
where depth = extractDepth ovl
(a, b) = extractEdge ovl
c = center2 a b
c' = c .+^ depth
pen = extractPenetrator ovl
extractContactPoints :: (Floating a) => LocalT a (Contact a) -> Either (P2 a) (P2 a, P2 a)
extractContactPoints cont = either (Left . wExtract_) (Right . wExtract_) $ flipEither (lmap contactPoints' cont)
where flipEither :: LocalT a (Either b c) -> Either (LocalT a b) (LocalT a c)
flipEither (LocalT t (Left x)) = Left (LocalT t x)
flipEither (LocalT t (Right x)) = Right (LocalT t x)
extractContactNormal :: (Floating a) => LocalT a (Contact a) -> V2 a
extractContactNormal = wExtract_ . lmap contactNormal
drawContact' :: (Floating a, RealFrac a, Show a) => R.Renderer -> LocalT a (C.Contact a) -> IO ()
drawContact' r cont = do
drawThickPoint r p
drawLine r p (p .+^ n)
where p = wExtract_ . lmap C.contactPoint $ cont
n = wExtract_ . lmap C.contactNormal $ cont
drawContact :: (Floating a, RealFrac a) => R.Renderer -> LocalT a (Contact a) -> IO ()
drawContact r cont = do
(c, c') <- either f g ps
drawLine r c c'
where f a = do
drawThickPoint r a
return (a, a .+^ n)
g (a, b) = do
drawThickPoint r a
drawThickPoint r b
return (c, c')
where c = center2 a b
c' = c .+^ n
ps = extractContactPoints cont
n = extractContactNormal cont
|
ublubu/shapes-demo
|
src/Physics/Draw.hs
|
mit
| 3,752 | 0 | 11 | 1,005 | 1,634 | 818 | 816 | 81 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Network.API.Shopify.Request where
import Data.Default (def)
import Data.Text (append, pack, Text)
import Data.Text.Encoding (encodeUtf8)
import Network.API.Shopify.Types (MetafieldId(MetafieldId), APICredential(OAuthCred, BasicCred), ProductId(ProductId), StoreName(StoreName), VariantId(VariantId))
import Network.HTTP.Client (applyBasicAuth, method, Request(host, path, port, requestHeaders, secure))
import Network.HTTP.Types.Method (methodGet, methodPost, methodPut, methodDelete)
-- | default request
defaultRequest :: Request
defaultRequest = def
-- | base url
shopifyHost :: StoreName -> Text
shopifyHost (StoreName storeName) = storeName `append` ".myshopify.com"
-- | base product urls
createProductReq :: StoreName -> Request
createProductReq storeName = defaultRequest { method = methodPost
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/products.json"
, secure = True
, port = 443
}
readProductReq :: StoreName -> ProductId -> Request
readProductReq storeName (ProductId i) = defaultRequest { method = methodGet
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/products/" ++ show i ++ ".json"
, secure = True
, port = 443
}
readProductsReq :: StoreName -> Request
readProductsReq storeName = defaultRequest { method = methodGet
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/products.json"
, secure = True
, port = 443
}
updateProductReq :: StoreName -> ProductId -> Request
updateProductReq storeName (ProductId i) = defaultRequest { method = methodPut
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/products/" ++ show i ++ ".json"
, secure = True
, port = 443
}
deleteProductReq :: StoreName -> ProductId -> Request
deleteProductReq storeName (ProductId i) = defaultRequest { method = methodDelete
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/products/" ++ show i ++ ".json"
, secure = True
, port = 443
}
-- | base metafield urls
createMetafieldReq :: StoreName -> Request
createMetafieldReq storeName = defaultRequest { method = methodPost
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/metafields.json"
, secure = True
, port = 443
}
readMetafieldReq :: StoreName -> MetafieldId -> Request
readMetafieldReq storeName (MetafieldId i) = defaultRequest { method = methodGet
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/metafields/" ++ show i ++ ".json"
, secure = True
, port = 443
}
updateMetafieldReq :: StoreName -> MetafieldId -> Request
updateMetafieldReq storeName (MetafieldId i) = defaultRequest { method = methodPut
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/metafields/" ++ show i ++ ".json"
, secure = True
, port = 443
}
deleteMetafieldReq :: StoreName -> MetafieldId -> Request
deleteMetafieldReq storeName (MetafieldId i) = defaultRequest { method = methodDelete
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/metafields/" ++ show i ++ ".json"
, secure = True
, port = 443
}
-- | base variant urls
createVariantReq :: StoreName -> Request
createVariantReq storeName = defaultRequest { method = methodPost
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/variants.json"
, secure = True
, port = 443
}
readVariantReq :: StoreName -> VariantId -> Request
readVariantReq storeName (VariantId i) = defaultRequest { method = methodGet
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/variants/" ++ show i ++ ".json"
, secure = True
, port = 443
}
updateVariantReq :: StoreName -> VariantId -> Request
updateVariantReq storeName (VariantId i) = defaultRequest { method = methodPut
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/variants/" ++ show i ++ ".json"
, secure = True
, port = 443
}
deleteVariantReq :: StoreName -> VariantId -> Request
deleteVariantReq storeName (VariantId i) = defaultRequest { method = methodDelete
, host = encodeUtf8 $ shopifyHost storeName
, path = encodeUtf8 . pack $ "/admin/variants/" ++ show i ++ ".json"
, secure = True
, port = 443
}
-- | simple method to add the access token header to a request.
authorizeRequest :: APICredential -> Request -> Request
authorizeRequest (OAuthCred token) req = req { requestHeaders = newHeaders }
where newHeaders = ("X-Shopify-Access-Token", encodeUtf8 token) : requestHeaders req
authorizeRequest (BasicCred apiKey password) req = applyBasicAuth apiBytes passBytes req
where apiBytes = encodeUtf8 apiKey
passBytes = encodeUtf8 password
|
aaronlevin/haskell-shopify
|
src/Network/API/Shopify/Request.hs
|
mit
| 7,587 | 0 | 10 | 3,608 | 1,329 | 760 | 569 | 96 | 1 |
module Text.Fractions where
import Control.Applicative
import Data.Ratio ((%))
import Text.Trifecta
badFraction = "1/0"
alsoBad = "10"
shouldWork = "1/2"
shouldAlsoWork = "2/1"
parseFraction :: Parser Rational
parseFraction = do
numerator <- decimal
char '/'
denominator <- decimal
return (numerator % denominator)
virtuousFraction :: Parser Rational
virtuousFraction = do
numerator <- decimal
char '/'
denominator <- decimal
case denominator of
0 -> fail "Denominator cannot be zero"
_ -> return (numerator % denominator)
-- Exercise: Try Try
type FracOrInt = Either Rational Integer
parseFracOrInt :: Parser FracOrInt
parseFracOrInt = (Left <$> try virtuousFraction) <|> (Right <$> try integer)
parseDec :: Parser Double
parseDec = do
whole <- decimal
char '.'
dec <- decimal
return (read $ show whole ++ "." ++ show dec :: Double)
type DecOrFrac = Either Double Rational
parseDecOrFrac :: Parser DecOrFrac
parseDecOrFrac = (Left <$> try parseDec) <|> (Right <$> try virtuousFraction)
main :: IO ()
main = do
-- print $ parseString parseFraction mempty badFraction
-- print $ parseString parseFraction mempty shouldWork
-- print $ parseString parseFraction mempty shouldAlsoWork
-- print $ parseString parseFraction mempty alsoBad
print $ parseString virtuousFraction mempty badFraction
print $ parseString virtuousFraction mempty shouldWork
print $ parseString virtuousFraction mempty shouldAlsoWork
print $ parseString virtuousFraction mempty alsoBad
print $ parseString parseFracOrInt mempty "1/2"
print $ parseString parseFracOrInt mempty "42"
print $ parseString parseDecOrFrac mempty "1.5"
print $ parseString parseDecOrFrac mempty "5/2"
|
mudphone/HaskellBook
|
src/Fractions.hs
|
mit
| 1,713 | 0 | 12 | 295 | 450 | 221 | 229 | 44 | 2 |
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
module Db.Transaction
( Transaction'(Transaction)
, NewTransaction
, Transaction
, transactionQuery
, allTransactions
, getTransaction
, insertTransaction
, transactionId
, transactionDate
, transactionAmount
, transactionBalance
, transactionType
, transactionPlaceId
, transactionAccountId
) where
import BasePrelude hiding (optional)
import Control.Lens
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Data.Text (Text)
import Data.Time (Day)
import Opaleye
import Db.Internal
data Transaction' a b c d e f g = Transaction
{ _transactionId :: a
, _transactionDate :: b
, _transactionAmount :: c
, _transactionBalance :: d
, _transactionType :: e
, _transactionPlaceId :: f
, _transactionAccountId :: g
} deriving (Eq,Show)
makeLenses ''Transaction'
type Transaction = Transaction' Int Day Double Double Text (Maybe Int) Int
type TransactionColumn = Transaction'
(Column PGInt4)
(Column PGDate)
(Column PGFloat8) -- These should be non-floating point numbers
(Column PGFloat8) -- but Opaleye doesn't support these yet. :(
(Column PGText)
(Column (Nullable PGInt4))
(Column PGInt4)
makeAdaptorAndInstance "pTransaction" ''Transaction'
type NewTransaction = Transaction' (Maybe Int) Day Double Double Text (Maybe Int) Int
type NewTransactionColumn = Transaction'
(Maybe (Column PGInt4))
(Column PGDate)
(Column PGFloat8)
(Column PGFloat8)
(Column PGText)
(Column (Nullable PGInt4))
(Column PGInt4)
transactionTable :: Table NewTransactionColumn TransactionColumn
transactionTable = Table "transaction" $ pTransaction Transaction
{ _transactionId = optional "id"
, _transactionDate = required "date"
, _transactionAmount = required "amount"
, _transactionBalance = required "balance"
, _transactionType = required "type"
, _transactionPlaceId = required "place_id"
, _transactionAccountId = required "account_id"
}
transactionQuery :: Query TransactionColumn
transactionQuery = queryTable transactionTable
allTransactions :: Db [Transaction]
allTransactions = liftQuery transactionQuery
insertTransaction :: NewTransaction -> Db Int
insertTransaction =
liftInsertReturningFirst transactionTable (view transactionId) . packNew
getTransaction :: Int -> Db (Maybe Transaction)
getTransaction i = liftQueryFirst $ proc () -> do
t <- transactionQuery -< ()
restrict -< t^.transactionId .== pgInt4 i
returnA -< t
packNew :: NewTransaction -> NewTransactionColumn
packNew = pTransaction Transaction
{ _transactionId = fmap pgInt4
, _transactionDate = pgDay
, _transactionAmount = pgDouble
, _transactionBalance = pgDouble
, _transactionType = pgStrictText
, _transactionPlaceId = maybeToNullable . fmap pgInt4
, _transactionAccountId = pgInt4
}
|
benkolera/talk-stacking-your-monads
|
code/src/Db/Transaction.hs
|
mit
| 3,123 | 1 | 11 | 620 | 712 | 395 | 317 | 90 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.