code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Algebraic.Nested.Type
where
import Autolib.ToDoc hiding ( empty )
import Autolib.Reader
import qualified Autolib.Set as S
import Autolib.Size
import Autolib.Depth
import Data.Typeable
example :: Type Integer
example = read "{ 2, {}, {3, {4}}}"
data Type a = Make ( S.Set ( Item a ))
deriving ( Eq, Ord, Typeable )
instance ( Ord a, ToDoc a ) => ToDoc ( Type a ) where
toDoc ( Make xs )
= braces $ fsep $ punctuate comma
$ map toDoc $ S.setToList xs
instance ( Ord a, Reader a ) => Reader ( Type a ) where
reader = my_braces $ do
xs <- Autolib.Reader.sepBy reader my_comma
return $ Make $ S.mkSet xs
instance Size ( Type a ) where
size ( Make xs ) = sum $ map size $ S.setToList xs
full_size ( Make xs ) = succ $ sum $ map full_item_size $ S.setToList xs
top_length ( Make xs ) = S.cardinality xs
instance Depth ( Type a ) where
depth ( Make xs ) = 1 + maximum ( 0 : map depth ( S.setToList xs ) )
flatten ( Make xs ) = concat $ map flatten_item $ S.setToList xs
-----------------------------------------------------------------------
data Item a = Unit a | Packed ( Type a )
deriving ( Eq, Ord, Typeable )
instance ( Ord a, ToDoc a ) => ToDoc ( Item a ) where
toDoc ( Unit a ) = toDoc a
toDoc ( Packed p ) = toDoc p
instance ( Ord a, Reader a ) => Reader ( Item a ) where
reader
= do n <- reader ; return $ Packed n
<|> do i <- reader ; return $ Unit i
instance Size ( Item a ) where
size ( Unit a ) = 1
size ( Packed t ) = 1 + size t
full_item_size i = case i of
Unit a -> 1
Packed t -> 1 + full_size t
flatten_item i = case i of
Unit a -> [ a ]
Packed t -> flatten t
instance Depth ( Item a ) where
depth ( Unit a ) = 0
depth ( Packed t ) = depth t
| florianpilz/autotool | src/Algebraic/Nested/Type.hs | gpl-2.0 | 1,809 | 0 | 13 | 510 | 763 | 382 | 381 | 47 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Markov.Type where
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Size
import Data.Typeable
newtype Program = Program [ Rule ]
deriving (Eq, Ord, Typeable )
type Rule = ( String,String )
$(derives [makeReader, makeToDoc] [''Program])
example :: Program
example = Program [ ( "01", "10"), ("#", "") ]
instance Size Program where
size (Program rules) = fromIntegral $ length rules
instance Show Program where
show = render . toDoc
| marcellussiegburg/autotool | collection/src/Markov/Type.hs | gpl-2.0 | 682 | 0 | 9 | 113 | 170 | 99 | 71 | 21 | 1 |
--
-- (C) 2011-14 Nicola Bonelli <[email protected]>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software Foundation,
-- Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
-- The full GNU General Public License is included in this distribution in
-- the file called "COPYING".
import System.IO.Unsafe
import System.Process
import System.Directory
import System.Environment
import System.FilePath
import Control.Monad(void,when,unless,liftM,forM,filterM)
import Control.Applicative
import Text.Regex.Posix
import Data.List
import Data.Maybe
import Data.Function(on)
pfq_omatic_ver,pfq_kcompat,proc_cpuinfo :: String
pfq_symvers :: [String]
pfq_omatic_ver = "4.4"
proc_cpuinfo = "/proc/cpuinfo"
pfq_kcompat = "/usr/include/linux/pf_q-kcompat.h"
pfq_symvers = [ "/lib/modules/" ++ uname_r ++ "/kernel/net/pfq/Module.symvers",
home_dir ++ "/PFQ/kernel/Module.symvers",
"/opt/PFQ/kernel/Module.symvers"
]
getMostRecentFile :: [FilePath] -> IO (Maybe FilePath)
getMostRecentFile xs = do
xs' <- filterM doesFileExist xs >>=
mapM (\f -> liftM (\m -> (f,m)) $ getModificationTime f) >>= \x ->
return $ sortBy (flip compare `on` snd) x
return $ listToMaybe (map fst xs')
main :: IO ()
main = do
args <- getArgs
putStrLn $ "[PFQ] pfq-omatic: v" ++ pfq_omatic_ver
generalChecks
getRecursiveContents "." [".c"] >>= mapM_ tryPatch
symver <- liftM fromJust $ getMostRecentFile pfq_symvers
copyFile symver "Module.symvers"
let cmd = "make KBUILD_EXTRA_SYMBOLS=" ++ symver ++ " -j" ++ show getNumberOfPhyCores ++ " " ++ unwords args
putStrLn $ "[PFQ] compiling: " ++ cmd ++ "..."
void $ system cmd
putStrLn "[PFQ] done."
{-# NOINLINE uname_r #-}
uname_r :: String
uname_r = unsafePerformIO $
head . lines <$> readProcess "/bin/uname" ["-r"] ""
{-# NOINLINE home_dir #-}
home_dir :: String
home_dir = unsafePerformIO getHomeDirectory
regexFunCall :: String -> Int -> String
regexFunCall fun n =
fun ++ "[[:space:]]*" ++ "\\(" ++ args n ++ "\\)"
where args 0 = "[[:space:]]*"
args 1 = "[^,]*"
args x = "[^,]*," ++ args (x-1)
tryPatch :: FilePath -> IO ()
tryPatch file =
readFile file >>= \c ->
when (c =~ (regexFunCall "netif_rx" 1 ++ "|" ++ regexFunCall "netif_receive_skb" 1 ++ "|" ++ regexFunCall "napi_gro_receive" 2)) $
doesFileExist (file ++ ".omatic") >>= \orig ->
if orig
then putStrLn $ "[PFQ] " ++ file ++ " is already patched :)"
else makePatch file
makePatch :: FilePath -> IO ()
makePatch file = do
putStrLn $ "[PFQ] patching " ++ file
src <- readFile file
renameFile file $ file ++ ".omatic"
writeFile file $ "#include " ++ show pfq_kcompat ++ "\n" ++ src
generalChecks :: IO ()
generalChecks = do
doesFileExist pfq_kcompat >>= \kc ->
unless kc $ error "error: could not locate pfq-kcompat header!"
symver <- getMostRecentFile pfq_symvers
unless (isJust symver) $ error "error: could not locate pfq Module.symvers!"
putStrLn $ "[PFQ] using " ++ fromJust symver ++ " file (most recent)"
doesFileExist "Makefile" >>= \mf ->
unless mf $ error "error: Makefile not found!"
type Ext = String
getRecursiveContents :: FilePath -> [Ext] -> IO [FilePath]
getRecursiveContents topdir ext = do
names <- getDirectoryContents topdir
let properNames = filter (`notElem` [".", ".."]) names
paths <- forM properNames $ \fname -> do
let path = topdir </> fname
isDirectory <- doesDirectoryExist path
if isDirectory
then getRecursiveContents path ext
else return [path | takeExtensions path `elem` ext]
return (concat paths)
{-# NOINLINE getNumberOfPhyCores #-}
getNumberOfPhyCores :: Int
getNumberOfPhyCores = unsafePerformIO $
length . filter (isInfixOf "processor") . lines <$> readFile proc_cpuinfo
| Mr-Click/PFQ | user/pfq-omatic/pfq-omatic.hs | gpl-2.0 | 4,571 | 0 | 17 | 1,023 | 1,082 | 553 | 529 | 89 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Utils where
import Database.MySQL.Base
(MySQLConn, ConnectInfo(..), connect, defaultConnectInfo)
import System.Environment (lookupEnv)
import qualified Data.ByteString.Char8 as BS
connectToMysql :: IO MySQLConn
connectToMysql = do
mysqlUser <- maybe "" BS.pack <$> lookupEnv "DB_USER"
mysqlPass <- maybe "" BS.pack <$> lookupEnv "DB_PASS"
connect $ defaultConnectInfo
{ ciUser = mysqlUser
, ciDatabase = "retailzen"
, ciPassword = mysqlPass
}
| Southern-Exposure-Seed-Exchange/southernexposure.com | server/scripts/Utils.hs | gpl-3.0 | 552 | 0 | 10 | 127 | 130 | 74 | 56 | 14 | 1 |
import System.Environment
import System.Exit
import System.Process
import Control.Applicative
import Control.Concurrent.Async
import Data.Maybe
import Data.Traversable (Traversable)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BC
type MachineName = ByteString
-- the Traversable may not be a good choice here, since we cannot so
-- any form of filter, which is probably what we want.
filterReachable :: Traversable c => c MachineName -> IO (c (Maybe MachineName))
filterReachable = mapConcurrently isOK
where isOK m = checkOK <$> mkProcessForNode "uptime" noInput m
checkOK (RemoteCommandResult m ExitSuccess _ _) = Just m
checkOK _ = Nothing
data RemoteCommandResult = RemoteCommandResult
{ rcr_machine :: MachineName
, rcr_result :: ExitCode
, rcr_stdout :: ByteString
, rcr_stderr :: ByteString
} deriving (Eq, Ord, Show)
mkProcessForNode :: ByteString -> RemoteCommandInput -> MachineName -> IO RemoteCommandResult
mkProcessForNode command input node = (\(c, o, e) -> RemoteCommandResult node c (BC.pack o) (BC.pack e)) <$> readProcessWithExitCode
"ssh" [ "dollar-gate"
, unwords $ [ "ssh"
, "-o ConnectTimeout=5"
, "-o StrictHostKeyChecking=no"
, "-q"
, "-l", "root"
, BC.unpack node
, "'" ++ BC.unpack command ++ "'"
]
] (BC.unpack . input $ node)
type RemoteCommandInput = MachineName -> ByteString
noInput :: RemoteCommandInput
noInput = const BC.empty
mainTest command nodes = do
result <- mapConcurrently (mkProcessForNode command noInput) nodes
return result
mainTestWithInput command input nodes = do
result <- mapConcurrently (mkProcessForNode command input) nodes
return result
mainTestWithScript :: Traversable c => RemoteCommandInput -> c MachineName -> IO (c RemoteCommandResult)
mainTestWithScript = mainTestWithInput "/bin/bash"
script :: RemoteCommandInput
script _ = BC.unlines
[ "cat /proc/meminfo | head -n 1"
, "cat /proc/cpuinfo | grep -c '^processor'"
, "dmidecode -s system-product-name"
]
| mjansen/remote-command | remote-command.hs | gpl-3.0 | 2,156 | 0 | 13 | 471 | 527 | 280 | 247 | 48 | 2 |
module CNBase
where
import CNTypes
import Tridiag
import Vector
hbar :: (Fractional a) => a
hbar = 0.6582119 -- µeV ns
diffMtx :: RealFloat a => VKey -> Operator a
diffMtx = flip fromBand (1,-2,1)
waveEntries :: (RealFrac a) => Interval a -> a -> Int
waveEntries (x0,xe) dx = ceiling $ (xe-x0)/dx + 1
takeSteps2D :: Int -> Waveset2D a -> Waveset2D a
takeSteps2D i (Waveset2D ws dr dt r0) = Waveset2D (take i ws) dr dt r0
takeTil2D :: RealFrac a => a -> Waveset2D a -> Waveset2D a
takeTil2D a wset = takeSteps2D i wset
where i = ceiling $ a/wset2DDt wset + 1
takeSteps :: Int -> Waveset a -> Waveset a
takeSteps i (Waveset ws dx dt x0) = Waveset (take i ws) dx dt x0
takeTil :: RealFrac a => a -> Waveset a -> Waveset a
takeTil a wset = takeSteps i wset
where i = ceiling $ a/wsetDt wset + 1
| KiNaudiz/bachelor | CN/CNBase.hs | gpl-3.0 | 808 | 0 | 9 | 175 | 375 | 190 | 185 | 20 | 1 |
--------------------------------------------------------------------------------
{-# LANGUAGE LambdaCase #-}
--------------------------------------------------------------------------------
module Main (main) where
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
import Data.Semigroup
import System.Environment
import qualified Utils.Color as Color
import qualified Utils.Icon as Icon
import XMonad
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.ManageDocks
import XMonad.Hooks.SetWMName
import XMonad.Keybindings
import XMonad.Layout.Fullscreen
import XMonad.Layout.LayoutModifier
import XMonad.Util.EZConfig (additionalKeysP)
import XMonad.Workspaces
--------------------------------------------------------------------------------
main :: IO ()
main =
getArgs >>= \case
["--restart"] -> sendRestart
_ ->
launch
=<< statusBar "d12-xmobar" statusBarPP toggleStrutsKey xmonadConfig
--------------------------------------------------------------------------------
type XMonadConfig =
XConfig
( ModifiedLayout AvoidStruts (Choose Tall (Choose (Mirror Tall) Full))
)
xmonadConfig :: XMonadConfig
xmonadConfig =
def
{ -- Use Super instead of Alt
modMask = mod4Mask,
-- Hooks
manageHook = manageDocks <+> manageAppsWorkspace <+> manageHook def,
layoutHook = avoidStruts $ layoutHook def,
handleEventHook = handleEvent <+> handleEventHook def <+> docksEventHook <+> fullscreenEventHook,
-- Java swing applications and xmonad are not friends, so we need to pretend
-- a little bit
startupHook = setWMName "LG3D",
-- Borders
normalBorderColor = Color.backgroundInactive,
focusedBorderColor = Color.backgroundActive,
borderWidth = 1,
-- Workspaces
workspaces =
[ wsCode1,
wsCode2,
wsWeb,
wsChat,
wsMedia,
wsOther
],
terminal = "alacritty"
}
`additionalKeysP` keybindings
--------------------------------------------------------------------------------
statusBarPP :: PP
statusBarPP =
def
{ ppCurrent = Icon.active,
ppHidden = Icon.inactive,
ppWsSep = "",
ppTitle =
xmobarColor Color.textTitleFg Color.textTitleBg
. shorten 120,
ppLayout = \case
"Tall" -> Icon.inactiveThin "\x25E7"
"Mirror Tall" -> Icon.inactiveThin "\x2B12"
"Full" -> Icon.inactiveThin "\x23F9"
l -> l,
ppSep = " "
}
--------------------------------------------------------------------------------
toggleStrutsKey :: XConfig l -> (KeyMask, KeySym)
toggleStrutsKey XConfig {XMonad.modMask = m} = (m, xK_b)
--------------------------------------------------------------------------------
sendRestart :: IO ()
sendRestart = do
dpy <- openDisplay ""
rw <- rootWindow dpy $ defaultScreen dpy
xmonad_restart <- internAtom dpy "D12_XMONAD_RESTART" False
allocaXEvent $ \e -> do
setEventType e clientMessage
setClientMessageEvent e rw xmonad_restart 32 0 currentTime
sendEvent dpy rw False structureNotifyMask e
sync dpy False
--------------------------------------------------------------------------------
handleEvent :: Event -> X All
handleEvent e@ClientMessageEvent {ev_message_type = mt} = do
a <- getAtom "D12_XMONAD_RESTART"
if mt == a
then restart "d12-xmonad" True >> pure (All False)
else broadcastMessage e >> pure (All True)
handleEvent e = broadcastMessage e >> pure (All True)
--------------------------------------------------------------------------------
| d12frosted/environment | xmonad/xmonad/Main.hs | gpl-3.0 | 3,689 | 0 | 13 | 683 | 712 | 391 | 321 | 79 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module ADC.Types.Types
( WBox(..)
, Item(..)
, Auction(..)
, IStats(..)
, WBoxedStats(..)
, ItemS(..)
, ApiKey
, TrackingItems
, Profession(..)
, Slug
, Region(..)
, Realm(..)
, AucFile(..)
, Config(..)
, ReqParams(..)
, DLParams(..)
, oneSecond
, SqlE(..)
)
where
import ADC.Types.Locale
import qualified Data.Sequence as S
import Data.Aeson
import qualified Network.HTTP.Conduit as C
import Control.Concurrent.MVar
import Control.Concurrent.STM.TQueue
import Data.Time.Clock
import qualified Data.Map.Strict as M
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.ToRow
import Database.PostgreSQL.Simple.ToField
import Data.Pool
import Data.Monoid ((<>))
import Control.Exception
import Data.Typeable
data SqlE = SqlError
| ResultError
| QueryError
| FormatError deriving (Eq, Show, Typeable)
instance Exception SqlE
oneSecond :: Int
oneSecond = 1000000
-- Type for whiskers box diagram
data WBox = WBox {ic :: Int -- items count
,minW :: Int
,botW :: Int
,p25 :: Int
,p50 :: Int
,p75 :: Int
,topW :: Int
,maxW :: Int} deriving (Eq, Show)
instance ToRow WBox where
toRow b = map toField $ [ic, minW, botW, p25, p50, p75, topW, maxW] <*> pure b
instance ToField WBox where
toField b = Many $ map toField $ [ic, minW, botW, p25, p50, p75, topW, maxW] <*> pure b
data Item = Item {name :: String
,iid :: Int} deriving (Eq, Show)
data Auction = Auction {bid :: Int
,buyout :: Int
,quantity :: Int
,itemId :: Int} deriving (Eq, Show)
data IStats = IStats {bid' :: S.Seq Int
,buyout' :: S.Seq Int} deriving (Eq, Show)
instance Monoid IStats where
mempty = IStats mempty mempty
s1 `mappend` s2 = IStats (bid' s1 <> bid' s2) (buyout' s1 <> buyout' s2)
data WBoxedStats = WBoxedStats {bbid :: Maybe WBox
,bbuyout :: Maybe WBox} deriving (Eq, Show)
newtype ItemS = ItemS {items :: [Item]} deriving (Eq, Show)
type ApiKey = String
type TrackingItems = [Int]
data Profession = Alchemy
| Engineering
| Leatherworking
| Blacksmith
| EnQueueting
| Inscription
| Tailoring
| Skinning
| Jewelcrafting
| Herbalism
| Mining
| World deriving (Eq, Show)
type Slug = String
data Region = EU | KR | TW | US deriving (Eq, Ord, Show, Read)
data Realm = Realm
{rname :: String
,slug :: Slug
,locale :: Locale
,connectedRealms :: [Slug]} deriving (Eq, Show)
data AucFile = AucFile {url :: String
,lastModified :: Integer} deriving (Eq, Show)
data Config = Config {apiKey :: ApiKey
,region :: Region
,langLocale :: Locale
,filterLocale :: [Locale]
,counter :: MVar Int
,reqQueue :: MVar (S.Seq ReqParams)
,manager :: C.Manager
,dlQueue :: TQueue DLParams
,updatedAt :: MVar (M.Map Slug UTCTime)
,connPool :: Pool Connection}
data ReqParams = ReqAuc Config Realm
| ReqRealms Config
data DLParams = DLAucJson AucFile Realm
instance FromJSON AucFile where
parseJSON = withObject "file" $ \o -> do
url <- o .: "url"
lastModified <- o .: "lastModified"
pure AucFile{..}
instance FromJSON ItemS where
parseJSON = withObject "items" $ \o -> do
items <- o .: "items"
pure ItemS{..}
instance FromJSON Auction where
parseJSON = withObject "aucs" $ \o -> do
bid <- o .: "bid"
buyout <- o .: "buyout"
quantity <- o .: "quantity"
itemId <- o .: "item"
pure Auction{..}
instance FromJSON Item where
parseJSON = withObject "items" $ \o -> do
name <- o .: "name"
iid <- o .: "id"
pure Item{..}
instance FromJSON Realm where
parseJSON = withObject "realm" $ \o -> do
rname <- o .: "name"
slug <- o .: "slug"
locale <- o .: "locale"
connectedRealms <- o .: "connected_realms"
pure Realm{..} | gore-v/AuctionParser | src/ADC/Types/Types.hs | gpl-3.0 | 4,860 | 0 | 12 | 1,849 | 1,344 | 776 | 568 | 135 | 1 |
module RungeKutta where
{- classical 4th order Runge Kutta for solving differential equations
- x_n+1 = x_n + h/6*(k1+2*k2+2*k3+k4) where
- -}
rungeKutta :: ([Float] -> [Float]) -> Float -> [Float] -> [Float]
rungeKutta stepF h xn = xn' where
k1s = stepF xn
k2s = stepF $ zipWith (\a b -> a + h / 2 * b) xn k1s
k3s = stepF $ zipWith (\a b -> a + h / 2 * b) xn k2s
k4s = stepF $ zipWith (\a b -> a + h * b) xn k3s
k2s' = map (*2) k2s
k3s' = map (*2) k3s
ks = zipWith (+) k4s $ zipWith (+) k3s' $ zipWith (+) k2s' k1s
xn' = zipWith (\a b -> a + h / 6 * b) xn ks
| jrraymond/pendulum | src/RungeKutta.hs | gpl-3.0 | 582 | 0 | 13 | 154 | 271 | 148 | 123 | 11 | 1 |
{-# Language DataKinds #-}
{-# Language KindSignatures #-}
{-# Language TypeOperators #-}
{-# Language TypeSynonymInstances #-}
{-# Language FlexibleInstances #-}
module Data.SMT.BitBlasting.Types where
import qualified Data.IntSet as IS
import Data.Extensible.Sum
import Data.SMT.Abstract.Types
type TermComponents = [VAR, INT, ADD, MUL, NEG]
type FormulaComponents = [EQUAL, AND, LESSTHAN]
type TermOf = AbstTerm TermComponents
type Term = TermOf :| TermComponents
type FormulaOf = AbstFormula Term FormulaComponents
type Formula = FormulaOf :| FormulaComponents
instance Ppr Term where
ppr = (ppr :: TermOf VAR -> String)
<:| (ppr :: TermOf INT -> String)
<:| (ppr :: TermOf ADD -> String)
<:| (ppr :: TermOf MUL -> String)
<:| (ppr :: TermOf NEG -> String)
<:| exhaust
instance Ppr Formula where
ppr = (ppr :: FormulaOf EQUAL -> String)
<:| (ppr :: FormulaOf AND -> String)
<:| (ppr :: FormulaOf LESSTHAN -> String)
<:| exhaust
instance GetVariables Term where
fv = (fv :: TermOf VAR -> IS.IntSet)
<:| (fv :: TermOf INT -> IS.IntSet)
<:| (fv :: TermOf ADD -> IS.IntSet)
<:| (fv :: TermOf MUL -> IS.IntSet)
<:| (fv :: TermOf NEG -> IS.IntSet)
<:| exhaust
instance GetVariables Formula where
fv = (fv :: FormulaOf EQUAL -> IS.IntSet)
<:| (fv :: FormulaOf AND -> IS.IntSet)
<:| (fv :: FormulaOf LESSTHAN -> IS.IntSet)
<:| exhaust
-- configuration
data Config = Config {
startWidth :: Int
, maxWidth :: Int
}
defaultConfig :: Config
defaultConfig = Config 2 10
| xenophobia/experimental-smt-solver | src/Data/SMT/BitBlasting/Types.hs | gpl-3.0 | 1,608 | 0 | 13 | 379 | 487 | 275 | 212 | 44 | 1 |
{- Event handlers for Lazymail
-
- Copyright 2013 Raúl Benencia <[email protected]>
-
- Licensed under the GNU GPL version 3 or higher
-}
module Lazymail.Handlers where
import Codec.MIME.Parse(parseMIMEMessage)
import Codec.MIME.Type(MIMEValue(..))
import Control.Monad.Reader
import Control.Monad.State
import Data.List(intercalate, stripPrefix, sort)
import System.Directory(getTemporaryDirectory)
import System.Exit(ExitCode(..))
import System.FilePath(FilePath, takeFileName, dropTrailingPathSeparator, (</>))
import System.IO(openFile, IOMode(..), hClose, hSeek, SeekMode(..), hPutStrLn)
import System.Locale(rfc822DateFormat)
import System.Process(runProcess, waitForProcess)
import System.Random(randomR, getStdGen, setStdGen)
import Data.DateTime(parseDateTime, startOfTime, formatDateTime)
import qualified System.IO.UTF8 as UTF8
import qualified System.IO.Strict as Strict
import UI.NCurses(setEcho)
import Lazymail.Email(lookupField, getBody, formatBody)
import Lazymail.Maildir
import Lazymail.Print
import Lazymail.State
import Lazymail.Types
import Lazymail.Utils(drawNotification)
previousMode :: LazymailCurses ()
previousMode = get >>= \st -> previousMode' (mode st)
previousMode' MaildirMode = (=<<) put $ get >>= \st -> return st { exitRequested = True }
previousMode' EmailMode = do
st <- get
if (triggerUpdateIn . indexState $ st)
then advanceMode' MaildirMode >> solveIndexUpdate
else put $ st { mode = IndexMode }
previousMode' IndexMode = do
st <- get
let ist = (indexState st) { selectedRowIn = 0, scrollRowIn = 0 }
put $ st { mode = MaildirMode, indexState = ist }
previousMode' _ = get >>= \st -> put $ st { mode = MaildirMode}
advanceMode :: LazymailCurses ()
advanceMode = get >>= \st -> advanceMode' (mode st)
advanceMode' IndexMode = do
st <- get
let fp = selectedEmailPath . indexState $ st
nfp <- if (isNew fp)
then liftIO $ markAsRead fp
else return fp
when (fp /= nfp) triggerIndexUpdate
st <- get
msg <- liftIO $ UTF8.readFile nfp
let email = parseMIMEMessage msg
let body = getBody $ email
let el = formatBody body $ screenColumns st
let est = (emailState st) { currentEmail = email, emailLines = el, scrollRowEm = 0 }
put $ st { mode = EmailMode, emailState = est }
advanceMode' MaildirMode = do
st <- get
unsortedEmails <- liftIO $ do
freeOldHandlers st
let md = (selectedMD . maildirState) $ st
emails <- getMaildirEmails md
mapM toEmail emails
let selectedEmails' = reverse $ sort unsortedEmails
let scrollRow = scrollRowIn . indexState $ st
let scrRows = screenRows st
let indexState' = (indexState st) {
selectedEmails = selectedEmails'
, currentInLen = length selectedEmails'
, scrollBufferIn = formatIndexModeRows st $ scrollCrop scrollRow scrRows selectedEmails'
}
put $ st { mode = IndexMode, indexState = indexState' }
where
toEmail fp = do
handle <- openFile fp ReadMode
msg <- UTF8.hGetContents handle
let value = parseMIMEMessage msg
let headers = mime_val_headers value
let date = maybe startOfTime id $ parseDateTime rfc822DateFormat $ takeWhile (/= '(') $ lookupField "date" headers
return (Email value date fp handle)
advanceMode' _ = return ()
toComposeMode :: LazymailCurses ()
toComposeMode = get >>= \st -> put $ st { mode = ComposeMode }
freeOldHandlers st = mapM (hClose . emailHandle) $ selectedEmails . indexState $ st
scrollDown :: LazymailCurses ()
scrollDown = get >>= \st -> scrollDown' (mode st)
-- Boilerplate code
scrollDown' IndexMode = do
st <- get
let inSt = indexState st
let selRow = selectedRowIn inSt
let topScrollRow = scrollRowIn inSt
let startScrolling = (div (screenRows st) 4) * 3
let totalRows = currentInLen inSt
if selRow > startScrolling && (topScrollRow <= (totalRows - (screenRows st)))
then do -- Scroll emails
let scrollRowIn' = scrollRowIn inSt + 1
let scrollBufferIn' = formatIndexModeRows st $ scrollCrop scrollRowIn' (screenRows st) $ selectedEmails inSt
let inSt' = inSt { scrollRowIn = scrollRowIn', scrollBufferIn = scrollBufferIn' }
put st { indexState = inSt' }
else -- Move the selected row
put $ incrementSelectedRow st
scrollDown' MaildirMode = do
st <- get
let mdSt = maildirState st
let selRow = selectedRowMD mdSt
let topScrollRow = scrollRowMD mdSt
let startScrolling = (div (screenRows st) 4) * 3
let totalRows = length $ detectedMDs mdSt
if selRow > startScrolling && (topScrollRow <= (totalRows - (screenRows st)))
then do -- Scroll emails
let scrollRowMD' = topScrollRow + 1
let scrollBufferMD' = scrollCrop scrollRowMD' (screenRows st) $ detectedMDs mdSt
let mdSt' = mdSt { scrollRowMD = scrollRowMD', scrollBufferMD = scrollBufferMD' }
put st { maildirState = mdSt' }
else -- Move the selected row
put $ incrementSelectedRow st
{- Down-scrolling in Email mode -}
scrollDown' EmailMode = do
st <- get
let est = emailState st
let cur = scrollRowEm est
let scrRows = screenRows st
let totalRows = length $ emailLines est
let est' = est { scrollRowEm = (cur + 1) }
when ((totalRows - scrRows + (bodyStartRow est) - 1) > (scrollRowEm est)) $
put $ st { emailState = est' }
scrollDown' _ = return ()
scrollUp :: LazymailCurses ()
scrollUp = get >>= \st -> scrollUp' (mode st)
-- More boilerplate code
scrollUp' IndexMode = do
st <- get
let inSt = indexState st
let selRow = selectedRowIn inSt
let startScrolling = (div (screenRows st) 4)
let topScrollRow = scrollRowIn inSt
if topScrollRow > 0 && selRow < startScrolling
then do
let scrollRowIn' = scrollRowIn inSt - 1
let scrollBufferIn' = formatIndexModeRows st $ scrollCrop scrollRowIn' (screenRows st) $ selectedEmails inSt
let inSt' = inSt { scrollRowIn = scrollRowIn', scrollBufferIn = scrollBufferIn' }
put st { indexState = inSt' }
else
put $ decrementSelectedRow st
scrollUp' MaildirMode = do
st <- get
let mdSt = maildirState st
let selRow = selectedRowMD mdSt
let startScrolling = (div (screenRows st) 4)
let topScrollRow = scrollRowMD mdSt
if topScrollRow > 0 && selRow < startScrolling
then do
let scrollRowMD' = scrollRowMD mdSt - 1
let scrollBufferMD' = scrollCrop scrollRowMD' (screenRows st) $ detectedMDs mdSt
let mdSt' = mdSt { scrollRowMD = scrollRowMD', scrollBufferMD = scrollBufferMD' }
put st { maildirState = mdSt' }
else
put $ decrementSelectedRow st
scrollUp' EmailMode = do
st <- get
let est = emailState st
let cur = scrollRowEm est
let scrRows = screenRows st
let totalRows = length $ emailLines est
let est' = est { scrollRowEm = (cur - 1) }
when (cur > 0) $
put $ st { emailState = est' }
scrollUp' _ = return ()
incrementSelectedRow st | (selectedRow st) < limit =
case (mode st) of
MaildirMode ->
let
sr = (selectedRowMD . maildirState) st
maildirState' = (maildirState st) { selectedRowMD = sr + 1 }
in
st { maildirState = maildirState' }
IndexMode ->
let
sr = (selectedRowIn . indexState) st
indexState' = (indexState st) { selectedRowIn = sr + 1 }
in
st { indexState = indexState' }
_ -> st
| otherwise = st
where
scrRows = screenRows st
curInLen = length $ selectedEmails . indexState $ st
curMDLen = length $ detectedMDs . maildirState $ st
limit' = case (mode st) of
MaildirMode -> if curMDLen < scrRows then curMDLen - 1 else scrRows
IndexMode -> if curInLen < scrRows then curInLen - 1 else scrRows
limit = if (statusBar st) && (limit' == scrRows)
then fromIntegral $ limit' - 2
else fromIntegral limit'
decrementSelectedRow st | (selectedRow st) > 0 =
case (mode st) of
MaildirMode ->
let
sr = (selectedRowMD . maildirState) st
maildirState' = (maildirState st) { selectedRowMD = sr - 1 }
in
st { maildirState = maildirState' }
IndexMode ->
let
sr = (selectedRowIn . indexState) st
indexState' = (indexState st) { selectedRowIn = sr - 1 }
in
st { indexState = indexState' }
_ -> st
| otherwise = st
{- Given a list, it returns the elements that will be in the next screen refresh
- TODO: find a better name -}
scrollCrop top rows xs = take rows $ drop top xs
formatIndexModeRows :: LazymailState -> [Email] -> [(FilePath, String)]
formatIndexModeRows st = map formatRow where
formatRow e =
let fp = emailPath e
email = emailValue e
hs = mime_val_headers email
str = normalizeLen (screenColumns st) $ intercalate ppSep $
[ "[" ++ normalizeLen maxFlags (ppFlags . getFlags $ fp) ++ "]"
, formatDateTime "%b %d" $ emailDate e
, normalizeLen fromLen $ ppField $ lookupField "from" hs
, ppField $ lookupField "subject" hs
]
in (fp, str)
formatMaildirModeRows st = mapM formatRow where
formatRow fp = return $ (fp, (concat $ replicate (numPads - 1) pad) ++ name) where
bp = basePath st
str = case (stripPrefix bp fp) of
Nothing -> fp
Just s -> s
name' = takeFileName . dropTrailingPathSeparator $ str
name = takeFileName $ map (\x -> if x `elem` imapSep then '/' else x) name'
pad = " "
numPads = (length $ filter (== '/') str) + (length $ filter (`elem` imapSep) str)
imapSep = ['.'] -- IMAP usually separates its directories with dots
triggerIndexUpdate :: LazymailCurses ()
triggerIndexUpdate = do
st <- get
let ist = indexState st
put $ st { indexState = (ist { triggerUpdateIn = True }) }
solveIndexUpdate :: LazymailCurses ()
solveIndexUpdate = do
st <- get
let ist = indexState st
put $ st { indexState = (ist { triggerUpdateIn = False }) }
triggerMaildirUpdate :: LazymailCurses ()
triggerMaildirUpdate = do
st <- get
let mst = maildirState st
put $ st { maildirState = (mst { triggerUpdateMD = True }) }
solveMaildirUpdate :: LazymailCurses ()
solveMaildirUpdate = do
st <- get
let mst = maildirState st
put $ st { maildirState = (mst { triggerUpdateMD = False }) }
getField :: Maybe String -> LazymailCurses () -> LazymailCurses ()
getField pr postActions = do
st <- get
let is = initialInputState { inputRequested = True
, prompt = pr
, postInputActions = postActions}
put $ st { inputState = is }
updateField :: (ComposeFields -> String -> ComposeFields) -> LazymailCurses ()
updateField f = do
st <- get
let value = currentInput . inputState $ st
let cf = (composeFields . composeState $ st)
let cs = (composeState st) { composeFields = (f cf value) }
put $ st { inputState = initialInputState
, composeState = cs
}
getFrom :: LazymailCurses ()
getFrom = let postActions = updateField $ \cf val -> cf { fromField = Just val }
in getField (Just "From: ") postActions
getTo :: LazymailCurses ()
getTo = let postActions = updateField $ \cf val -> cf { toField = Just val }
in getField (Just "To: ") postActions
getSubject :: LazymailCurses ()
getSubject = let postActions = updateField $ \cf val -> cf { subjectField = Just val }
in getField (Just "Subject: ") postActions
getCc :: LazymailCurses ()
getCc = let postActions = updateField $ \cf val -> cf { ccField = Just val }
in getField (Just "Cc: ") postActions
getBcc :: LazymailCurses ()
getBcc = let postActions = updateField $ \cf val -> cf { bccField = Just val }
in getField (Just "Bcc: ") postActions
getReplyTo :: LazymailCurses ()
getReplyTo = let postActions = updateField $ \cf val -> cf { replyToField = Just val }
in getField (Just "Reply-To: ") postActions
editEmail :: LazymailCurses ()
editEmail = do
st <- get
cfg <- ask
fp <- getFileName
exitStatus <- liftIO $ do
child <- runProcess (textEditor cfg) [fp] Nothing Nothing Nothing Nothing Nothing
waitForProcess child
case exitStatus of
ExitSuccess -> do
st <- get
let cs = (composeState st) { bodyReady = True }
put $ st { composeState = cs }
_ -> drawNotification "The text editor exited abnormally"
-- | Retrieve current file name. Create a randomized one if its's Nothing.
getFileName :: LazymailCurses FilePath
getFileName = do
st <- get
let cs = composeState st
case bodyFileName cs of
Just fp -> return fp
Nothing -> do
fp <- liftIO $ newFilename
let cs = (composeState st) { bodyFileName = Just fp }
put $ st { composeState = cs }
return fp
where
newFilename = do
tmp <- getTemporaryDirectory
num <- getRandomNumber
return $ (tmp </>) $ ("lazymail-" ++ ) $ show num
getRandomNumber :: IO Int
getRandomNumber = do
r1 <- getStdGen
let (num, r2) = randomR (100000,999999) r1
setStdGen r2
return num
sendEmail :: LazymailCurses ()
sendEmail = do
st <- get
cfg <- ask
let cs = composeState st
if not . readyToSend $ cs
then drawNotification $
"The email is not ready to be sent. Please check that all fields are correct."
else do
exitStatus <- liftIO $ do
emailHandle <- prepareEmail cs
child <- runProcess (head . sendmailCommand $ cfg)
(tail . sendmailCommand $ cfg)
Nothing Nothing (Just emailHandle) Nothing Nothing
e <- waitForProcess child
hClose emailHandle
return e
handleExitStatus exitStatus
where
handleExitStatus ExitSuccess = do
drawNotification $ "The email was successfully sent."
st <- get
put $ st { mode = MaildirMode, composeState = initialComposeState }
handleExitStatus _ = drawNotification $
"Could not send the email. Please, check the logs of for your SMTP client."
prepareEmail cs = do
let fs = composeFields cs
let fileName = (maybe "" id $ bodyFileName cs)
body <- (Strict.hGetContents =<< openFile fileName ReadMode)
emailHandle <- openFile fileName WriteMode
hPutStrLn emailHandle $ (unlines . ppComposeFields True $ fs) ++ body
hClose emailHandle >> openFile fileName ReadMode
readyToSend cs =
let from = maybe False (\_ -> True) $ fromField . composeFields $ cs
to = maybe False (\_ -> True) $ toField . composeFields $ cs
in all id [from, to, bodyReady cs]
| rul/lazymail | src/Lazymail/Handlers.hs | gpl-3.0 | 14,667 | 0 | 18 | 3,728 | 4,783 | 2,416 | 2,367 | 340 | 7 |
-- (C) Copyright Chris Banks 2011-2012
-- This file is part of The Continuous Pi-calculus Workbench (CPiWB).
-- CPiWB is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
-- CPiWB is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-- You should have received a copy of the GNU General Public License
-- along with CPiWB. If not, see <http://www.gnu.org/licenses/>.
module CPi.Plot
(plotTimeSeries,
plotTimeSeriesFiltered,
plotTimeSeriesToFile,
plotTimeSeriesToFileFiltered,
phasePlot2
) where
import Graphics.Rendering.Chart
import Graphics.Rendering.Chart.Gtk
import Graphics.Rendering.Chart.Backend.Cairo
import Data.Colour.Names
import Data.Colour.SRGB
import Data.Colour
-- import Data.Accessor
import Data.Default.Class
import Control.Lens
import qualified Control.Exception as X
import qualified Numeric.LinearAlgebra as LA
import Control.Monad
import CPi.Lib
-- Takes data from the ODE solver and plots them
plotTimeSeries :: LA.Vector Double -> LA.Matrix Double -> [Species] -> IO ()
plotTimeSeries ts soln ss
= plot
(LA.toList ts)
(zip (map pretty ss) (map LA.toList (LA.toColumns soln)))
-- Plots the data to a PDF file
plotTimeSeriesToFile :: LA.Vector Double -> LA.Matrix Double -> [Species] -> String -> IO ()
plotTimeSeriesToFile ts soln ss file
= plotToFile
(LA.toList ts)
(zip (map pretty ss) (map LA.toList (LA.toColumns soln)))
file
-- Only plots selected species
plotTimeSeriesFiltered :: LA.Vector Double -> LA.Matrix Double -> [Species] -> [Species]
-> IO ()
plotTimeSeriesFiltered ts soln ss ss'
= plot
(LA.toList ts)
(filter (\(s,_)-> s `elem` (map specName ss'))
(zip (map specName ss) (map LA.toList (LA.toColumns soln))))
-- Only plots selected species to a PDF file
plotTimeSeriesToFileFiltered :: LA.Vector Double -> LA.Matrix Double -> [Species] -> [Species]
-> String -> IO ()
plotTimeSeriesToFileFiltered ts soln ss ss' file
= plotToFile
(LA.toList ts)
(filter (\(s,_)-> s `elem` (map specName ss'))
(zip (map pretty ss) (map LA.toList (LA.toColumns soln))))
file
-- Plots the time series in a GTK window
plot :: [Double] -> [(String,[Double])] -> IO ()
plot ts dims = renderableToWindow (toRenderable (layout ts dims)) 640 480
-- Plots the time series to a file
plotToFile :: [Double] -> [(String,[Double])] -> String -> IO ()
plotToFile ts dims file = void $ renderableToFile (FileOptions (842, 595) PDF) file (toRenderable (layout ts dims))
-- gets a plot layout with plots for each dimension
layout ts dims = layout_plots .~ plots ts (colours (length dims)) dims
$ def
-- layout1_legend ^= Nothing $ {-remove to add legend-}
-- gets the plots for each dimension
-- plots :: [Double] -> [AlphaColour Double] -> [(String,[Double])] ->
-- [Either (Plot Double Double) b]
plots :: [Double] -> [AlphaColour Double] -> [(String,[Double])] ->
[Plot Double Double]
plots _ _ [] = []
plots ts (colour:cs) ((lbl,pts):dims)
= (toPlot
$ plot_lines_style .~ solidLine 1 colour
$ plot_lines_values .~ [zip ts pts]
$ plot_lines_title .~ lbl
$ def
) : plots ts cs dims
plots _ [] _ = X.throw $ CpiException
"CPi.Plot.plots: Run out of colours!"
---------------
-- Phase plots:
---------------
-- a plot of two dimensions:
phasePlot2 :: LA.Vector Double
-> LA.Matrix Double
-> [Species]
-> (Species,Species)
-> IO ()
phasePlot2 ts soln ss ss'
= plotPhase
(filter (\(s,_)-> (s == (specName (fst ss'))) || s == (specName (snd ss')))
(zip (map specName ss) (map LA.toList (LA.toColumns soln))))
plotPhase dims = renderableToWindow (toRenderable (layout2phase dims)) 640 480
plotphase pts
= toPlot
$ plot_lines_values .~ [pts]
$ plot_lines_style .~ solidLine 1 (opaque blue)
$ def
layout2phase dims
= layout_plots .~ [plotphase $ zip (snd (dims!!0)) (snd (dims!!1))]
-- $ layout1_bottom_axis ^: laxis_generate ^= autoScaledLogAxis defaultLogAxis
$ layout_x_axis . laxis_title .~ "["++fst (dims!!0)++"]"
-- $ layout1_left_axis ^: laxis_generate ^= autoScaledLogAxis defaultLogAxis
$ layout_y_axis . laxis_title .~ "["++fst (dims!!1)++"]"
$ def
-------------------
-- gives n visually distinct colours
-- algorithm taken from the MATLAB 'varycolor' function
-- by Daniel Helmick: http://j.mp/xowLV2
colours :: Int -> [AlphaColour Double]
colours n
| n<=0 = []
| n==1 = [clr 0 1 0]
| n==2 = [clr 0 1 0,clr 0 1 1]
| n==3 = [clr 0 1 0,clr 0 1 1,clr 0 0 1]
| n==4 = [clr 0 1 0,clr 0 1 1,clr 0 0 1,clr 1 0 1]
| n==5 = [clr 0 1 0,clr 0 1 1,clr 0 0 1,clr 1 0 1,clr 1 0 0]
| n==6 = [clr 0 1 0,clr 0 1 1,clr 0 0 1,clr 1 0 1,clr 1 0 0,clr 0 0 0]
| otherwise = sec 1 ++ sec 2 ++ sec 3 ++ sec 4 ++ sec 5
where
s = fromIntegral(n `div` 5)
e = fromIntegral(n `mod` 5)
f x y
| x<=y = 1.0
| otherwise = 0.0
g x = [1..(s+(f x e))]
sec x
| x==1 = [clr 0 1 ((m-1)/(s+(f x e)-1)) | m<-g x]
| x==2 = [clr 0 ((s+(f x e)-m)/(s+(f x e))) 1 | m<-[1..(s+(f x e))]]
| x==3 = [clr (m/(s+(f x e))) 0 1 | m<-[1..(s+(f x e))]]
| x==4 = [clr 1 0 ((s+(f x e)-m)/(s+(f x e))) | m<-[1..(s+(f x e))]]
| x==5 = [clr ((s+(f x e)-m)/(s+(f x e))) 0 0 | m<-[1..(s+(f x e))]]
| otherwise = undefined
clr :: Double -> Double -> Double -> AlphaColour Double
clr r g b = opaque(sRGB r g b)
{-- test data
testT = [0.0,0.1..2500.0]::[Double]
testD1 = [0,0.1..2500]::[Double]
testD2 = [x*x|x<-[0,0.1..2500]]::[Double]
testPlot1 = plot_lines_style ^= solidLine 1 (opaque $ sRGB 0.5 0.5 1)
$ plot_lines_values ^= [zip testT testD1]
$ plot_lines_title ^= "test1"
$ defaultPlotLines
testPlot2 = plot_lines_style ^= solidLine 1 (opaque red)
$ plot_lines_values ^= [zip testT testD2]
$ plot_lines_title ^= "test2"
$ defaultPlotLines
testLayout = layout1_title ^= "Test graph!"
$ layout1_plots ^= [Left (toPlot testPlot1),
Left (toPlot testPlot2)]
$ defaultLayout1
testPlot = renderableToWindow (toRenderable testLayout) 640 480
-}
| continuouspi/cpiwb | CPi/Plot.hs | gpl-3.0 | 6,771 | 0 | 23 | 1,701 | 2,247 | 1,175 | 1,072 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Kinesis.GetShardIterator
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Gets a shard iterator. A shard iterator expires five minutes after it is
-- returned to the requester.
--
-- A shard iterator specifies the position in the shard from which to start
-- reading data records sequentially. A shard iterator specifies this position
-- using the sequence number of a data record in a shard. A sequence number is
-- the identifier associated with every record ingested in the Amazon Kinesis
-- stream. The sequence number is assigned when a record is put into the stream.
--
-- You must specify the shard iterator type. For example, you can set the 'ShardIteratorType' parameter to read exactly from the position denoted by a specific sequence
-- number by using the 'AT_SEQUENCE_NUMBER' shard iterator type, or right after
-- the sequence number by using the 'AFTER_SEQUENCE_NUMBER' shard iterator type,
-- using sequence numbers returned by earlier calls to 'PutRecord', 'PutRecords', 'GetRecords', or 'DescribeStream'. You can specify the shard iterator type 'TRIM_HORIZON' in
-- the request to cause 'ShardIterator' to point to the last untrimmed record in
-- the shard in the system, which is the oldest data record in the shard. Or you
-- can point to just after the most recent record in the shard, by using the
-- shard iterator type 'LATEST', so that you always read the most recent data in
-- the shard.
--
-- When you repeatedly read from an Amazon Kinesis stream use a 'GetShardIterator'
-- request to get the first shard iterator to to use in your first 'GetRecords'
-- request and then use the shard iterator returned by the 'GetRecords' request in 'NextShardIterator' for subsequent reads. A new shard iterator is returned by
-- every 'GetRecords' request in 'NextShardIterator', which you use in the 'ShardIterator' parameter of the next 'GetRecords' request.
--
-- If a 'GetShardIterator' request is made too often, you receive a 'ProvisionedThroughputExceededException'. For more information about throughput limits, see 'GetRecords'.
--
-- If the shard is closed, the iterator can't return more data, and 'GetShardIterator' returns 'null' for its 'ShardIterator'. A shard can be closed using 'SplitShard'
-- or 'MergeShards'.
--
-- 'GetShardIterator' has a limit of 5 transactions per second per account per
-- open shard.
--
-- <http://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetShardIterator.html>
module Network.AWS.Kinesis.GetShardIterator
(
-- * Request
GetShardIterator
-- ** Request constructor
, getShardIterator
-- ** Request lenses
, gsiShardId
, gsiShardIteratorType
, gsiStartingSequenceNumber
, gsiStreamName
-- * Response
, GetShardIteratorResponse
-- ** Response constructor
, getShardIteratorResponse
-- ** Response lenses
, gsirShardIterator
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.Kinesis.Types
import qualified GHC.Exts
data GetShardIterator = GetShardIterator
{ _gsiShardId :: Text
, _gsiShardIteratorType :: ShardIteratorType
, _gsiStartingSequenceNumber :: Maybe Text
, _gsiStreamName :: Text
} deriving (Eq, Read, Show)
-- | 'GetShardIterator' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gsiShardId' @::@ 'Text'
--
-- * 'gsiShardIteratorType' @::@ 'ShardIteratorType'
--
-- * 'gsiStartingSequenceNumber' @::@ 'Maybe' 'Text'
--
-- * 'gsiStreamName' @::@ 'Text'
--
getShardIterator :: Text -- ^ 'gsiStreamName'
-> Text -- ^ 'gsiShardId'
-> ShardIteratorType -- ^ 'gsiShardIteratorType'
-> GetShardIterator
getShardIterator p1 p2 p3 = GetShardIterator
{ _gsiStreamName = p1
, _gsiShardId = p2
, _gsiShardIteratorType = p3
, _gsiStartingSequenceNumber = Nothing
}
-- | The shard ID of the shard to get the iterator for.
gsiShardId :: Lens' GetShardIterator Text
gsiShardId = lens _gsiShardId (\s a -> s { _gsiShardId = a })
-- | Determines how the shard iterator is used to start reading data records from
-- the shard.
--
-- The following are the valid shard iterator types:
--
-- AT_SEQUENCE_NUMBER - Start reading exactly from the position denoted by a
-- specific sequence number. AFTER_SEQUENCE_NUMBER - Start reading right after
-- the position denoted by a specific sequence number. TRIM_HORIZON - Start
-- reading at the last untrimmed record in the shard in the system, which is the
-- oldest data record in the shard. LATEST - Start reading just after the most
-- recent record in the shard, so that you always read the most recent data in
-- the shard.
gsiShardIteratorType :: Lens' GetShardIterator ShardIteratorType
gsiShardIteratorType =
lens _gsiShardIteratorType (\s a -> s { _gsiShardIteratorType = a })
-- | The sequence number of the data record in the shard from which to start
-- reading from.
gsiStartingSequenceNumber :: Lens' GetShardIterator (Maybe Text)
gsiStartingSequenceNumber =
lens _gsiStartingSequenceNumber
(\s a -> s { _gsiStartingSequenceNumber = a })
-- | The name of the stream.
gsiStreamName :: Lens' GetShardIterator Text
gsiStreamName = lens _gsiStreamName (\s a -> s { _gsiStreamName = a })
newtype GetShardIteratorResponse = GetShardIteratorResponse
{ _gsirShardIterator :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'GetShardIteratorResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gsirShardIterator' @::@ 'Maybe' 'Text'
--
getShardIteratorResponse :: GetShardIteratorResponse
getShardIteratorResponse = GetShardIteratorResponse
{ _gsirShardIterator = Nothing
}
-- | The position in the shard from which to start reading data records
-- sequentially. A shard iterator specifies this position using the sequence
-- number of a data record in a shard.
gsirShardIterator :: Lens' GetShardIteratorResponse (Maybe Text)
gsirShardIterator =
lens _gsirShardIterator (\s a -> s { _gsirShardIterator = a })
instance ToPath GetShardIterator where
toPath = const "/"
instance ToQuery GetShardIterator where
toQuery = const mempty
instance ToHeaders GetShardIterator
instance ToJSON GetShardIterator where
toJSON GetShardIterator{..} = object
[ "StreamName" .= _gsiStreamName
, "ShardId" .= _gsiShardId
, "ShardIteratorType" .= _gsiShardIteratorType
, "StartingSequenceNumber" .= _gsiStartingSequenceNumber
]
instance AWSRequest GetShardIterator where
type Sv GetShardIterator = Kinesis
type Rs GetShardIterator = GetShardIteratorResponse
request = post "GetShardIterator"
response = jsonResponse
instance FromJSON GetShardIteratorResponse where
parseJSON = withObject "GetShardIteratorResponse" $ \o -> GetShardIteratorResponse
<$> o .:? "ShardIterator"
| dysinger/amazonka | amazonka-kinesis/gen/Network/AWS/Kinesis/GetShardIterator.hs | mpl-2.0 | 7,942 | 0 | 9 | 1,616 | 692 | 434 | 258 | 79 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
module Core.Config where
import Control.Arrow ((***))
import qualified Core.Parser as P
import qualified Data.ByteString.Char8 as BS
import qualified Data.Map.Strict as M
import qualified Language.Haskell.TH.Quote as TQ
import qualified Language.Haskell.TH.Syntax as TS
newtype Config =
Config ![(String, String)]
instance TS.Lift Config where
lift (Config list) = [|M.fromList $ map (BS.pack *** BS.pack) list|]
parseFile :: FilePath -> TS.Q TS.Exp
-- ^ Parse the configuration file
parseFile filePath = do
TS.qAddDependentFile filePath
s <- TS.qRunIO $ readFile filePath
TQ.quoteExp parse s
where
parse =
TQ.QuasiQuoter
{ TQ.quoteExp = quoteExp
, TQ.quotePat = undefined
, TQ.quoteType = undefined
, TQ.quoteDec = undefined
}
quoteExp str =
case P.parseOnly parseData (BS.pack str) of
Right tag -> [|tag|]
Left _ -> undefined
parseData = Config <$> P.many parseLine
parseLine = do
key <- (P.many . P.char) '\n' *> P.noneOf1 " =" <* (P.many . P.char) ' '
value <-
P.char '=' *> (P.many . P.char) ' ' *> P.noneOf1 "\n" <*
(P.many . P.char) '\n'
return (BS.unpack key, BS.unpack value)
| inq/agitpunkt | src/Core/Config.hs | agpl-3.0 | 1,408 | 2 | 16 | 373 | 410 | 226 | 184 | 39 | 2 |
module CGTools.CLI (cli) where
import Options.Applicative
import System.IO (hSetBuffering, stdout, BufferMode(NoBuffering))
import CGTools.Types
import CGTools.Install (runInstall)
import CGTools.Validate (runValidate)
import CGTools.Log (runLog)
version :: Parser (a -> a)
version = infoOption "0.1.0"
( long "version"
<> help "Print version information" )
withInfo :: Parser a -> String -> ParserInfo a
withInfo opts desc = info opts $ progDesc desc
parser :: Parser Args
parser = Args <$> commonOpts <*> commandParser
commandParser :: Parser Command
commandParser = hsubparser
$ command "install" (installParser `withInfo` "Installs required git hooks")
<> command "validate" (validateParser `withInfo` "Validate Git Commits")
<> command "logs" (logsParser `withInfo` "Parse Git Log file to create Changelog")
commonOpts :: Parser CommonOpts
commonOpts = CommonOpts
<$> flag Normal Verbose
( long "verbose"
<> short 'v'
<> help "Enable verbose mode" )
installParser :: Parser Command
installParser = Install <$> installOpts
installOpts :: Parser InstallOpts
installOpts = InstallOpts
<$> switch
( long "bash-completion"
<> short 'b'
<> help "Output bash completion script" )
<*> flag Safe Dangerous
( long "overwrite"
<> short 'o'
<> help "Overwrite existing files without prompt" )
validateParser :: Parser Command
validateParser = pure Validate
logsParser :: Parser Command
logsParser = Logs <$> logsOpts
logsOpts :: Parser LogsOpts
logsOpts = LogsOpts
<$> strOption
( long "output"
<> short 'o'
<> metavar "FILE"
<> help "Write output to FILE" )
pinfo :: ParserInfo Args
pinfo = (version <*> helper <*> parser) `withInfo` "Git Tools Command Line Helper"
run :: Args -> IO ()
run (Args cOpts cmd) = case cmd of
Install insOpts -> runInstall cOpts insOpts
Validate -> runValidate
Logs logOpts -> runLog logOpts
cli :: IO ()
cli = do
hSetBuffering stdout NoBuffering
execParser pinfo >>= run
| shanewilson/cgtools | src/CGTools/CLI.hs | apache-2.0 | 2,023 | 0 | 11 | 407 | 577 | 294 | 283 | 60 | 3 |
-- do not need this
filteWhenFirst :: (Num a) => (a -> Bool) -> [a] -> a
filteWhenFirst f [] = 0
filteWhenFirst f (x:xs) = if f x
then x
else filteWhenFirst f xs
sumUntil :: (Num a) => (a -> Bool) -> [a] -> a
sumUntil f (x:y:xs) = if f x
then x
else sumUntil f ((x + y):xs)
isPerfectSquare :: (Num a, Eq a, Ord a, Enum a) => a -> Bool
isPerfectSquare x = if x == (sumUntil (>= x) [1,3..])
then True
else False
main = do
let test0 = 14
let test1 = 16
print $ isPerfectSquare test0
print $ isPerfectSquare test1
| ccqpein/Arithmetic-Exercises | Valid-Perfect-Square/VPS.hs | apache-2.0 | 650 | 0 | 10 | 249 | 279 | 147 | 132 | 18 | 2 |
-- Copyright (C) 2016 Fraser Tweedale
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE ScopedTypeVariables #-}
{- |
Advanced Encryption Standard (AES) Key Wrap Algorithm;
<https://https://tools.ietf.org/html/rfc3394>.
-}
module Crypto.JOSE.AESKW
(
aesKeyWrap
, aesKeyUnwrap
) where
import Control.Monad.State
import Crypto.Cipher.Types
import Data.Bits (xor)
import Data.ByteArray as BA hiding (replicate, xor)
import Data.Memory.Endian (BE(..), toBE)
import Data.Memory.PtrMethods (memCopy)
import Data.Word (Word64)
import Foreign.Ptr (Ptr, plusPtr)
import Foreign.Storable (peek, peekElemOff, poke, pokeElemOff)
import System.IO.Unsafe (unsafePerformIO)
iv :: Word64
iv = 0xA6A6A6A6A6A6A6A6
aesKeyWrapStep
:: BlockCipher128 cipher
=> cipher
-> Ptr Word64 -- ^ register
-> (Int, Int) -- ^ step (t) and offset (i)
-> StateT Word64 IO ()
aesKeyWrapStep cipher p (t, i) = do
a <- get
r_i <- lift $ peekElemOff p i
m :: ScrubbedBytes <-
lift $ alloc 16 $ \p' -> poke p' a >> pokeElemOff p' 1 r_i
let b = ecbEncrypt cipher m
b_hi <- lift $ withByteArray b peek
b_lo <- lift $ withByteArray b (`peekElemOff` 1)
put (b_hi `xor` unBE (toBE (fromIntegral t)))
lift $ pokeElemOff p i b_lo
-- | Wrap a secret.
--
-- Input size must be a multiple of 8 bytes, and at least 16 bytes.
-- Output size is input size plus 8 bytes.
--
aesKeyWrap
:: (ByteArrayAccess m, ByteArray c, BlockCipher128 cipher)
=> cipher
-> m
-> c
aesKeyWrap cipher m = unsafePerformIO $ do
let n = BA.length m
c <- withByteArray m $ \p ->
alloc (n + 8) $ \p' ->
memCopy (p' `plusPtr` 8) p n
withByteArray c $ \p -> do
let coords = zip [1..] (join (replicate 6 [1 .. n `div` 8]))
a <- execStateT (mapM_ (aesKeyWrapStep cipher p) coords) iv
poke p a
return c
aesKeyUnwrapStep
:: BlockCipher128 cipher
=> cipher
-> Ptr Word64 -- ^ register
-> (Int, Int) -- ^ step (t) and offset (i)
-> StateT Word64 IO ()
aesKeyUnwrapStep cipher p (t, i) = do
a <- get
r_i <- lift $ peekElemOff p i
let a_t = a `xor` unBE (toBE (fromIntegral t))
m :: ScrubbedBytes <-
lift $ alloc 16 $ \p' -> poke p' a_t >> pokeElemOff p' 1 r_i
let b = ecbDecrypt cipher m
b_hi <- lift $ withByteArray b peek
b_lo <- lift $ withByteArray b (`peekElemOff` 1)
put b_hi
lift $ pokeElemOff p i b_lo
-- | Unwrap a secret.
--
-- Input size must be a multiple of 8 bytes, and at least 24 bytes.
-- Output size is input size minus 8 bytes.
--
-- Returns 'Nothing' if inherent integrity check fails. Otherwise,
-- the chance that the key data is corrupt is 2 ^ -64.
--
aesKeyUnwrap
:: (ByteArrayAccess c, ByteArray m, BlockCipher128 cipher)
=> cipher
-> c
-> Maybe m
aesKeyUnwrap cipher c = unsafePerformIO $ do
let n = BA.length c - 8
m <- withByteArray c $ \p' ->
alloc n $ \p ->
memCopy p (p' `plusPtr` 8) n
a <- withByteArray c $ \p' -> peek p'
a' <- withByteArray m $ \p -> do
let n' = n `div` 8
let tMax = n' * 6
let coords = zip [tMax,tMax-1..1] (cycle [n'-1,n'-2..0])
execStateT (mapM_ (aesKeyUnwrapStep cipher p) coords) a
return $ if a' == iv then Just m else Nothing
| frasertweedale/hs-jose | src/Crypto/JOSE/AESKW.hs | apache-2.0 | 3,692 | 0 | 21 | 804 | 1,122 | 586 | 536 | 82 | 2 |
module Database.VCache.PVar
( PVar
, newPVar
, newPVars
, newPVarIO
, newPVarsIO
, loadRootPVar
, loadRootPVarIO
, readPVar
, readPVarIO
, writePVar
, modifyPVar
, modifyPVar'
, swapPVar
, pvar_space
, unsafePVarAddr
, unsafePVarRefct
) where
import Control.Concurrent.STM
import Database.VCache.Types
import Database.VCache.Alloc ( newPVar, newPVars, newPVarIO, newPVarsIO
, loadRootPVar, loadRootPVarIO)
import Database.VCache.Read (readRefctIO)
-- | Read a PVar as part of a transaction.
readPVar :: PVar a -> VTx a
readPVar pvar =
getVTxSpace >>= \ space ->
if (space /= pvar_space pvar) then fail eBadSpace else
liftSTM $ readTVar (pvar_data pvar) >>= \ rdv ->
case rdv of { (RDV v) -> return v }
{-# INLINABLE readPVar #-}
-- Note that readPVar and readPVarIO must be strict in RDV in order to force
-- the initial, lazy read from the database. This is the only reason for RDV.
-- Without forcing here, a lazy read might return a value from an update.
-- | Read a PVar in the IO monad.
--
-- This is more efficient than a full transaction. It simply peeks at
-- the underlying TVar with readTVarIO. Durability of the value read
-- is not guaranteed.
readPVarIO :: PVar a -> IO a
readPVarIO pv =
readTVarIO (pvar_data pv) >>= \ rdv ->
case rdv of { (RDV v) -> return v }
{-# INLINE readPVarIO #-}
eBadSpace :: String
eBadSpace = "VTx: mismatch between VTx VSpace and PVar VSpace"
-- | Write a PVar as part of a transaction.
writePVar :: PVar a -> a -> VTx ()
writePVar pvar v =
getVTxSpace >>= \ space ->
if (space /= pvar_space pvar) then fail eBadSpace else
markForWrite pvar v >>
liftSTM (writeTVar (pvar_data pvar) (RDV v))
{-# INLINABLE writePVar #-}
-- | Modify a PVar.
modifyPVar :: PVar a -> (a -> a) -> VTx ()
modifyPVar var f = do
x <- readPVar var
writePVar var (f x)
{-# INLINE modifyPVar #-}
-- | Modify a PVar, strictly.
modifyPVar' :: PVar a -> (a -> a) -> VTx ()
modifyPVar' var f = do
x <- readPVar var
writePVar var $! f x
{-# INLINE modifyPVar' #-}
-- | Swap contents of a PVar for a new value.
swapPVar :: PVar a -> a -> VTx a
swapPVar var new = do
old <- readPVar var
writePVar var new
return old
{-# INLINE swapPVar #-}
-- | Each PVar has a stable address in the VCache. This address will
-- be very stable, but is not deterministic and isn't really something
-- you should treat as meaningful information about the PVar. Mostly,
-- this function exists to support hashtables or memoization with
-- PVar keys.
--
-- The Show instance for PVars will also show the address.
unsafePVarAddr :: PVar a -> Address
unsafePVarAddr = pvar_addr
{-# INLINE unsafePVarAddr #-}
-- | This function allows developers to access the reference count
-- for the PVar that is currently recorded in the database. This may
-- be useful for heuristic purposes. However, caveats are needed:
--
-- First, because the VCache writer operates in a background thread,
-- the reference count returned here may be slightly out of date.
--
-- Second, it is possible that VCache will eventually use some other
-- form of garbage collection than reference counting. This function
-- should be considered an unstable element of the API.
--
-- Root PVars start with one root reference.
unsafePVarRefct :: PVar a -> IO Int
unsafePVarRefct var = readRefctIO (pvar_space var) (pvar_addr var)
| bitemyapp/haskell-vcache | hsrc_lib/Database/VCache/PVar.hs | bsd-2-clause | 3,479 | 0 | 14 | 782 | 639 | 349 | 290 | 62 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{- |
Module : Glider.NLP.Language.English.Porter2Test
Copyright : Copyright (C) 2013-2014 Krzysztof Langner
License : BSD3
Maintainer : Krzysztof Langner <[email protected]>
Stability : alpha
Portability : portable
-}
module Glider.NLP.Language.English.PorterSpec (spec) where
import Test.Hspec
import Glider.NLP.Language.English.Porter
spec :: Spec
spec = do
describe "Porter" $ do
it "consign" $ stem "consign" `shouldBe` "consign"
it "class's" $ stem "class's" `shouldBe` "class'"
it "classes" $ stem "classes" `shouldBe` "class"
it "cried" $ stem "cried" `shouldBe` "cri"
it "ties" $ stem "ties" `shouldBe` "ti"
it "gas" $ stem "gas" `shouldBe` "ga"
it "gaps" $ stem "gaps" `shouldBe` "gap"
it "bleed" $ stem "bleed" `shouldBe` "bleed"
it "guaranteed" $ stem "guaranteed" `shouldBe` "guarante"
| klangner/glider-nlp | test-src/Glider/NLP/Language/English/PorterSpec.hs | bsd-2-clause | 878 | 0 | 12 | 156 | 220 | 112 | 108 | 16 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : Types.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:33
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Classes.Types (
Object(..), objectNull, objectIsNull, objectCast, objectFromPtr, objectFromPtr_nf, withObjectPtr, ptrFromObject, objectListFromPtrList, objectListFromPtrList_nf
, QVoid, TQVoid, CQVoid, withQVoidResult
, QMetaObject, TQMetaObject, CQMetaObject, withQMetaObjectResult
, QString, TQString, CQString
, QByteArray, TQByteArray, CQByteArray, withQByteArrayResult
, QResource, TQResource, CQResource, withQResourceResult
, QTransform, TQTransform, CQTransform, withQTransformResult
, Element, TElement, CElement, withElementResult
, PaintContext, TPaintContext, CPaintContext, withPaintContextResult
, ExtraSelection, TExtraSelection, CExtraSelection
, QTextInlineObject, TQTextInlineObject, CQTextInlineObject
, QTextObjectInterface, TQTextObjectInterface, CQTextObjectInterface
, QImageTextKeyLang, TQImageTextKeyLang, CQImageTextKeyLang
, Q_IPV6ADDR, TQ_IPV6ADDR, CQ_IPV6ADDR, withQ_IPV6ADDRResult
, withObjectResult, withObjectRefResult
, intFromBool, boolFromInt
, withQListObject, withPtrPtrObject, withQListString
, withQListObjectResult, withQListObjectRefResult, withPtrPtrObjectResult, withQListStringResult
, withQListDouble, withQListDoubleResult, withQListIntResult, withQListLongResult
, CString, withCString
, withStringResult, withCStringResult
, stringFromPtr, cstringFromPtr
, newCWString, CWString, withCWString
, CDouble, toCDouble, fromCDouble, withDoubleResult
, CInt, toCInt, fromCInt, withIntResult
, CUInt, toCUInt, fromCUInt, withUnsignedIntResult
, CShort, toCShort, fromCShort, withShortResult
, CUShort, toCUShort, fromCUShort, withUnsignedShortResult
, CLong, toCLong, fromCLong, withLongResult
, CULong, toCULong, fromCULong, withUnsignedLongResult
, CLLong, toCLLong, fromCLLong, withLongLongResult
, CULLong, toCULLong, fromCULLong, withUnsignedLongLongResult
, CChar, toCChar, fromCChar, withCharResult
, CWchar, toCWchar
, CBool, toCBool, fromCBool, withBoolResult
, Ptr, nullPtr, ptrNull, ptrIsNull, ptrCast
, ForeignPtr, fptrNull, fptrIsNull, fptrCast
, FunPtr, toCFunPtr, freeHaskellFunPtr, castPtrToFunPtr
, addForeignPtrFinalizer
) where
import Foreign.C.Types
import System.IO.Unsafe (unsafePerformIO)
import Foreign.C
import Foreign.Ptr
import Foreign.ForeignPtr
import Foreign.Storable
import Foreign.Marshal.Alloc
import Foreign.Marshal.Array
import Foreign.ForeignPtr.Unsafe as U
import Qtc.Classes.Object
type QVoid a = Object (CQVoid a)
type TQVoid a = CQVoid a
data CQVoid a = CQVoid
withQVoidResult :: IO (Ptr (TQVoid a)) -> IO (QVoid a)
withQVoidResult f
= withObjectRefResult f
foreign import ccall qtc_QVoid_GetFinalizer :: FunPtr (Ptr (TQVoid a) -> IO ())
type QMetaObject a = Object (CQMetaObject a)
type TQMetaObject a = CQMetaObject a
data CQMetaObject a = CQMetaObject
withQMetaObjectResult :: IO (Ptr (TQMetaObject a)) -> IO (QMetaObject a)
withQMetaObjectResult f
= withObjectRefResult f
foreign import ccall qtc_QMetaObject_GetFinalizer :: FunPtr (Ptr (TQMetaObject a) -> IO ())
type QString a = Object (CQString a)
type TQString a = CQString a
data CQString a = CQString
type QByteArray a = Object (CQByteArray a)
type TQByteArray a = CQByteArray a
data CQByteArray a = CQByteArray
withQByteArrayResult :: IO (Ptr (TQByteArray a)) -> IO (QByteArray a)
withQByteArrayResult f
= withObjectRefResult f
foreign import ccall qtc_QByteArray_GetFinalizer :: FunPtr (Ptr (TQByteArray a) -> IO ())
type QResource a = Object (CQResource a)
type TQResource a = CQResource a
data CQResource a = CQResource
withQResourceResult :: IO (Ptr (TQResource a)) -> IO (QResource a)
withQResourceResult f
= withObjectRefResult f
foreign import ccall qtc_QResource_GetFinalizer :: FunPtr (Ptr (TQResource a) -> IO ())
type Q_IPV6ADDR a = Object (CQ_IPV6ADDR a)
type TQ_IPV6ADDR a = CQ_IPV6ADDR a
data CQ_IPV6ADDR a = CQ_IPV6ADDR
withQ_IPV6ADDRResult :: IO (Ptr (TQ_IPV6ADDR a)) -> IO (Q_IPV6ADDR a)
withQ_IPV6ADDRResult f
= withObjectRefResult f
type QTransform a = Object (CQTransform a)
type TQTransform a = CQTransform a
data CQTransform a = CQTransform
withQTransformResult :: IO (Ptr (TQTransform a)) -> IO (QTransform a)
withQTransformResult f
= withObjectRefResult f
type Element a = Object (CElement a)
type TElement a = CElement a
data CElement a = CElement
withElementResult :: IO (Ptr (TElement a)) -> IO (Element a)
withElementResult f
= withObjectResult qtc_Element_getFinalizer f
foreign import ccall qtc_Element_getFinalizer :: FunPtr (Ptr (TElement a) -> IO ())
type PaintContext a = Object (CPaintContext a)
type TPaintContext a = CPaintContext a
data CPaintContext a = CPaintContext
withPaintContextResult :: IO (Ptr (TPaintContext a)) -> IO (PaintContext a)
withPaintContextResult f
= withObjectResult qtc_PaintContext_getFinalizer f
foreign import ccall qtc_PaintContext_getFinalizer :: FunPtr (Ptr (TPaintContext a) -> IO ())
type ExtraSelection a = Object (CExtraSelection a)
type TExtraSelection a = CExtraSelection a
data CExtraSelection a = CExtraSelection
type QTextInlineObject a = Object (CQTextInlineObject a)
type TQTextInlineObject a = CQTextInlineObject a
data CQTextInlineObject a = CQTextInlineObject
type QTextObjectInterface a = Object (CQTextObjectInterface a)
type TQTextObjectInterface a = CQTextObjectInterface a
data CQTextObjectInterface a = CQTextObjectInterface
type QImageTextKeyLang a = Object (CQImageTextKeyLang a)
type TQImageTextKeyLang a = CQImageTextKeyLang a
data CQImageTextKeyLang a = CQImageTextKeyLang
withObjectResult :: FunPtr (Ptr a -> IO ()) -> IO (Ptr a) -> IO (Object a)
withObjectResult f io
= do
p <- io
objectFromPtr f p
withObjectRefResult :: IO (Ptr a) -> IO (Object a)
withObjectRefResult io
= do
p <- io
objectFromPtr_nf p
withDoubleResult :: IO CDouble -> IO Double
withDoubleResult io
= do
x <- io
return (fromCDouble x)
toCDouble :: Double -> CDouble
toCDouble i = realToFrac i
fromCDouble :: CDouble -> Double
fromCDouble ci = realToFrac ci
withIntResult :: IO CInt -> IO Int
withIntResult io
= do
x <- io
return (fromCInt x)
toCInt :: Int -> CInt
toCInt i = fromIntegral i
fromCInt :: CInt -> Int
fromCInt ci = fromIntegral ci
withUnsignedIntResult :: IO CUInt -> IO Int
withUnsignedIntResult io
= do
x <- io
return (fromCUInt x)
toCUInt :: Int -> CUInt
toCUInt i = fromIntegral i
fromCUInt :: CUInt -> Int
fromCUInt ci = fromIntegral ci
withShortResult :: IO CShort -> IO Int
withShortResult io
= do
x <- io
return (fromCShort x)
toCShort :: Int -> CShort
toCShort i = fromIntegral i
fromCShort :: CShort -> Int
fromCShort ci = fromIntegral ci
withUnsignedShortResult :: IO CUShort -> IO Int
withUnsignedShortResult io
= do
x <- io
return (fromCUShort x)
toCUShort :: Int -> CUShort
toCUShort i = fromIntegral i
fromCUShort :: CUShort -> Int
fromCUShort ci = fromIntegral ci
withLongResult :: IO CLong -> IO Int
withLongResult io
= do
x <- io
return (fromCLong x)
toCLong :: Int -> CLong
toCLong i = fromIntegral i
fromCLong :: CLong -> Int
fromCLong ci = fromIntegral ci
withUnsignedLongResult :: IO CULong -> IO Int
withUnsignedLongResult io
= do
x <- io
return (fromCULong x)
toCULong :: Int -> CULong
toCULong i = fromIntegral i
fromCULong :: CULong -> Int
fromCULong ci = fromIntegral ci
withLongLongResult :: IO CLLong -> IO Int
withLongLongResult io
= do
x <- io
return (fromCLLong x)
toCLLong :: Int -> CLLong
toCLLong i = fromIntegral i
fromCLLong :: CLLong -> Int
fromCLLong ci = fromIntegral ci
withUnsignedLongLongResult :: IO CULLong -> IO Int
withUnsignedLongLongResult io
= do
x <- io
return (fromCULLong x)
toCULLong :: Int -> CULLong
toCULLong i = fromIntegral i
fromCULLong :: CULLong -> Int
fromCULLong ci = fromIntegral ci
type CBool = CInt
toCBool :: Bool -> CBool
toCBool b = toCInt (if b then 1 else 0)
withBoolResult :: IO CBool -> IO Bool
withBoolResult io
= do
x <- io
return (fromCBool x)
fromCBool :: CBool -> Bool
fromCBool cb = (cb /= 0)
intFromBool :: Bool -> Int
intFromBool b = if b then 1 else 0
boolFromInt :: Int -> Bool
boolFromInt i = (i/=0)
toCChar :: Char -> CChar
toCChar = castCharToCChar
withCharResult :: (Num a, Integral a) => IO a -> IO Char
withCharResult io
= do
x <- io
return (fromCWchar x)
fromCChar :: CChar -> Char
fromCChar = castCCharToChar
toCWchar :: (Num a) => Char -> a
toCWchar = fromIntegral . fromEnum
fromCWchar :: (Num a, Integral a) => a -> Char
fromCWchar = toEnum . fromIntegral
withQtWStringResult :: (Ptr CWchar -> IO CInt) -> IO String
withQtWStringResult f
= do
len <- f nullPtr
if (len<=0)
then return ""
else withCWString (replicate (fromCInt len) ' ') $ \cstr ->
do f cstr
peekCWString cstr
withStringResult :: IO (Ptr (TQString a)) -> IO String
withStringResult io
= do
qs <- io
s <- withQtWStringResult (qtc_QString_GetString qs)
qtc_QString_Delete qs
return s
stringFromPtr :: Ptr (TQString a) -> IO String
stringFromPtr qptr
= do s <- withQtWStringResult (qtc_QString_GetString qptr)
qtc_QString_Delete qptr
return s
foreign import ccall "qtc_QString_Delete" qtc_QString_Delete :: Ptr (TQString a) -> IO ()
foreign import ccall "qtc_QString_GetString" qtc_QString_GetString :: Ptr (TQString a) -> Ptr CWchar -> IO CInt
withQtStringResult :: (Ptr CChar -> IO CInt) -> IO String
withQtStringResult f
= do
len <- f nullPtr
if (len<=0)
then return ""
else withCString (replicate (fromCInt len) ' ') $ \cstr ->
do f cstr
peekCString cstr
withCStringResult :: IO (Ptr (TQByteArray a)) -> IO String
withCStringResult io
= do
qba <- io
s <- withQtStringResult (qtc_QByteArray_GetByteArray qba)
qtc_QByteArray_Delete qba
return s
cstringFromPtr :: Ptr (TQByteArray a) -> IO String
cstringFromPtr qptr
= do s <- withQtStringResult (qtc_QByteArray_GetByteArray qptr)
qtc_QByteArray_Delete qptr
return s
foreign import ccall "qtc_QByteArray_Delete" qtc_QByteArray_Delete :: Ptr (TQByteArray a) -> IO ()
foreign import ccall "qtc_QByteArray_GetByteArray" qtc_QByteArray_GetByteArray :: Ptr (TQByteArray a) -> Ptr CChar -> IO CInt
withQListStringResult :: (Ptr (Ptr (TQString a)) -> IO CInt) -> IO [String]
withQListStringResult f
= do
clen <- f nullPtr
let len = fromCInt clen
if (len <= 0)
then return []
else allocaArray len $ \carr ->
do f carr
arr <- peekArray len carr
mapM stringFromPtr arr
withQListIntResult :: (Ptr CInt -> IO CInt) -> IO [Int]
withQListIntResult f
= do
clen <- f nullPtr
let len = fromCInt clen
if (len <= 0)
then return []
else allocaArray len $ \carr ->
do f carr
xs <- peekArray len carr
return (map fromCInt xs)
withQListLongResult :: (Ptr CLong -> IO CInt) -> IO [Int]
withQListLongResult f
= do
clen <- f nullPtr
let len = fromCInt clen
if (len <= 0)
then return []
else allocaArray len $ \carr ->
do f carr
xs <- peekArray len carr
return (map fromCLong xs)
withQListDouble :: [Double] -> (CInt -> Ptr CDouble -> IO b) -> IO b
withQListDouble xs f
= withArray (map toCDouble xs) $ \carr ->
f (toCInt (length xs)) carr
withQListDoubleResult :: (Ptr CDouble -> IO CInt) -> IO [Double]
withQListDoubleResult f
= do
clen <- f nullPtr
let len = fromCInt clen
if (len <= 0)
then return []
else allocaArray len $ \carr ->
do f carr
xs <- peekArray len carr
return (map fromCDouble xs)
withQListObjectResult :: FunPtr (Ptr a -> IO ()) -> (Ptr (Ptr a) -> IO CInt) -> IO [Object a]
withQListObjectResult ff f
= do
clen <- f nullPtr
let len = fromCInt clen
if (len <= 0)
then return []
else allocaArray len $ \carr ->
do f carr
ps <- peekArray len carr
objectListFromPtrList ff ps
withQListObjectRefResult :: (Ptr (Ptr a) -> IO CInt) -> IO [Object a]
withQListObjectRefResult f
= do
clen <- f nullPtr
let len = fromCInt clen
if (len <= 0)
then return []
else allocaArray len $ \carr ->
do f carr
ps <- peekArray len carr
objectListFromPtrList_nf ps
withPtrPtrObjectResult :: (Ptr (Ptr ()) -> IO CInt) -> IO [Object ()]
withPtrPtrObjectResult f
= do
clen <- f nullPtr
let len = fromCInt clen
if (len <= 0)
then return []
else allocaArray len $ \carr ->
do f carr
ps <- peekArray len carr
objectListFromPtrList_nf ps
withQListString :: [String] -> (CInt -> Ptr CWString -> IO a) -> IO a
withQListString xs f
= withCWStrings xs [] $ \cxs ->
withArray0 ptrNull cxs $ \carr ->
f (toCInt len) carr
where
len = length xs
withCWStrings [] cxs f
= f (reverse cxs)
withCWStrings (x:xs) cxs f
= withCWString x $ \cx ->
withCWStrings xs (cx:cxs) f
withQListObject :: [Object a] -> (CInt -> Ptr (Ptr a) -> IO b) -> IO b
withQListObject xs f
= withArray0 ptrNull (map ptrFromObject xs) $ \carr ->
f (toCInt (length xs)) carr
withPtrPtrObject :: [Object ()] -> (CInt -> Ptr (Ptr ()) -> IO a) -> IO a
withPtrPtrObject xs f
= withArray0 ptrNull (map ptrFromObject xs) $ \carr ->
f (toCInt (length xs)) carr
toCFunPtr :: FunPtr a -> Ptr a
toCFunPtr fptr = castFunPtrToPtr fptr
ptrNull :: Ptr a
ptrNull = nullPtr
ptrIsNull :: Ptr a -> Bool
ptrIsNull p = (p == ptrNull)
ptrCast :: Ptr a -> Ptr b
ptrCast p = castPtr p
fptrNull :: IO (ForeignPtr a)
fptrNull = newForeignPtr_ nullPtr
fptrIsNull :: ForeignPtr a -> Bool
fptrIsNull fp
= ((U.unsafeForeignPtrToPtr fp) == ptrNull)
fptrCast :: ForeignPtr a -> ForeignPtr b
fptrCast p = castForeignPtr p
| keera-studios/hsQt | Qtc/Classes/Types.hs | bsd-2-clause | 14,345 | 0 | 14 | 3,014 | 4,779 | 2,429 | 2,350 | 399 | 2 |
{-|
This module provides the /Predecessor Estimation/ and the /Predecessor Estimation CP/ processor.
@
|- <pre(S1#) + S2# + S / S1# + W# + W, Q, T#> :f
------------------------------------------------
|- <S1# + S2# + S / W# + W, Q, T#> :f
@
Here @pre(R#)@, is defined as the union of all direct predecessors of all rules in @R#@.
We compute @S1#@ from a 'ExpressionSelector' sucht that @pre(S1#)@ is a subset of @S2#@, ie., all predeccessors occur
in the strict components.
-}
-- MS:
-- the subproof for predecessor estimation cp is currently stored as closed left branch (using assumption)
-- good: normally printed; (partially) certificable
-- bad: the (generic) proof output is a bit awkward
module Tct.Trs.Processor.DP.DPGraph.PredecessorEstimation
( predecessorEstimationDeclaration
, predecessorEstimation
, predecessorEstimation'
, predecessorEstimationCPDeclaration
, predecessorEstimationCP
, predecessorEstimationCP'
) where
import Control.Applicative ((<|>))
import Control.Monad (guard)
import Data.List (find)
import Data.Maybe (catMaybes)
import Data.Monoid
import qualified Data.Set as S
import qualified Data.Rewriting.Rule as R (Rule)
import qualified Tct.Core.Common.Pretty as PP
import Tct.Core.Common.SemiRing (bigAdd, zero)
import qualified Tct.Core.Common.Xml as Xml
import qualified Tct.Core.Data as T
import Tct.Core.Processor.Assumption (assumeWith)
import Tct.Common.ProofCombinators
import Tct.Trs.Data
import qualified Tct.Trs.Data.ComplexityPair as CP
import Tct.Trs.Data.DependencyGraph
import qualified Tct.Trs.Data.Problem as Prob
import qualified Tct.Trs.Data.RuleSelector as RS
import qualified Tct.Trs.Data.Rules as RS
import qualified Tct.Trs.Processor.ComplexityPair as CP
data Selected = Selected
{ node :: NodeId
, rule :: R.Rule F V
, preds :: [(NodeId,R.Rule F V)]
} deriving Show
data PredecessorEstimation = PredecessorEstimation
{ onSelection :: ExpressionSelector F V }
deriving Show
data PredecessorEstimationProof
= PredecessorEstimationProof
{ wdg_ :: DG F V
, selected_ :: [Selected] }
| PredecessorEstimationCPProof
{ wdg_ :: DG F V
, selected_ :: [Selected]
, cp_ :: ComplexityPair
, cpproof_ :: ComplexityPairProof
, cpcert_ :: T.Certificate }
| PredecessorEstimationFail
deriving Show
instance T.Processor (PredecessorEstimation) where
type ProofObject PredecessorEstimation = ApplicationProof PredecessorEstimationProof
type In PredecessorEstimation = Trs
type Out PredecessorEstimation = Trs
execute p prob =
maybe estimate (\s -> T.abortWith (Inapplicable s :: ApplicationProof PredecessorEstimationProof)) (Prob.isDPProblem' prob)
where
wdg = Prob.dependencyGraph prob
sdps = Prob.strictDPs prob
wdps = Prob.weakDPs prob
estimate
| null candidates = T.abortWith (Applicable PredecessorEstimationFail)
| otherwise = T.succeedWith1 (Applicable proof) T.fromId nprob
where
initialDPs = RS.dpRules $ RS.rsSelect (RS.selFirstAlternative $ onSelection p) prob
candidates = do
(n,cn) <- lnodes wdg
let predss = [ (n1,cn1) | (n1,cn1,_) <- lpredecessors wdg n ]
guard $ isStrict cn && RS.member (theRule cn) initialDPs
guard $ all (\(n1,cn1) -> n1 /= n && isStrict cn1) predss
return $ Selected { node=n, rule=theRule cn, preds=fmap theRule `map` predss }
-- estimate in bottom-upway
sort cs = reverse $ catMaybes [find ((n==) . node) cs | n <- topsort wdg]
select [] sel = sel
select (c:cs) sel = select cs sel' where
sel'
| any (c `isPredecessorOf`) sel = sel
| otherwise = c:sel
s1 `isPredecessorOf` s2 = node s2 `elem` reachablesBfs wdg [node s1]
selected = select (sort candidates) []
shiftStrict = RS.fromList [ r | s <- selected , (_,r) <- preds s ]
shiftWeak = RS.fromList [ rule s | s <- selected ]
-- MS: TODO: dpgraph modify isStrict for selected ones
nprob = Prob.sanitiseDPGraph $ prob
{ Prob.strictDPs = (sdps `RS.difference` shiftWeak) `RS.union` shiftStrict
, Prob.weakDPs = (wdps `RS.union` shiftWeak) `RS.difference` shiftStrict }
proof = PredecessorEstimationProof
{ wdg_ = wdg
, selected_ = selected }
data PredecessorEstimationCP = PredecessorEstimationCP
{ onSelectionCP :: ExpressionSelector F V
, withComplexityPair :: ComplexityPair }
deriving Show
instance T.Processor PredecessorEstimationCP where
type ProofObject PredecessorEstimationCP = ApplicationProof PredecessorEstimationProof
type In PredecessorEstimationCP = Trs
type Out PredecessorEstimationCP = Trs
type Forking PredecessorEstimationCP = T.Pair
execute p prob =
maybe (estimate $ withComplexityPair p) (\s -> T.abortWith (Inapplicable s :: ApplicationProof PredecessorEstimationProof)) (Prob.isDPProblem' prob)
where
wdg = Prob.dependencyGraph prob
sdps = Prob.strictDPs prob
wdps = Prob.weakDPs prob
estimate (CP.ComplexityPair cp) = do
let
rs = RS.RuleSelector
{ RS.rsName = "first alternative for predecessorEstimation on " ++ RS.rsName (onSelectionCP p)
, RS.rsSelect = withPredecessors . RS.rsSelect (onSelectionCP p) }
cpproof <- CP.solveComplexityPair cp rs prob
case cpproof of
Left msg -> T.abortWith msg
Right cpp -> mkProof cpp
where
snub = S.toList . S.fromList
withPredecessors (RS.SelectDP d) = RS.BigOr $ RS.SelectDP d : predss
where
predss = case lookupNode wdg DGNode{theRule=d, isStrict=True} <|> lookupNode wdg DGNode{theRule=d,isStrict=False} of
Just n -> [ withPreds n (S.singleton n) ]
Nothing -> []
withPreds n seen = bigAnd (k `fmap` snub [ (n', theRule cn') | (n',cn',_) <- lpredecessors wdg n])
where
k (n',r') = if n' `S.member` seen then RS.SelectDP r' else RS.BigOr [RS.SelectDP r', withPreds n' (n' `S.insert` seen) ]
bigAnd [a] = a
bigAnd as = RS.BigAnd as
withPredecessors (RS.SelectTrs ss) = RS.SelectTrs ss
withPredecessors (RS.BigOr ss) = RS.BigOr (withPredecessors `fmap` ss)
withPredecessors (RS.BigAnd ss) = RS.BigAnd (withPredecessors `fmap` ss)
mkProof cpproof
| RS.null shiftWeak = T.abortWith (Applicable PredecessorEstimationFail)
| otherwise = return $ T.Progress (Applicable proof) bigAdd (T.Pair (subProof, T.Open nprob))
where
(known, propagated) = propagate (CP.removableDPs cpproof) []
propagate seen props
| null newp = (seen, props)
| otherwise = propagate (RS.fromList (rule `fmap` newp) `RS.union` seen) (newp ++ props)
where
newp = do
(n,cn) <- lnodes wdg
guard $ not (theRule cn `RS.member` seen)
let predss = [ (n1,theRule cn1) | (n1,cn1,_) <- lpredecessors wdg n ]
guard $ all (\(_,r) -> r `RS.member` seen) predss
return $ Selected { node=n, rule=theRule cn, preds=predss }
shiftWeak = sdps `RS.intersect` known
nprob = Prob.sanitiseDPGraph $ prob
{ Prob.strictDPs = (sdps `RS.difference` shiftWeak)
, Prob.weakDPs = (wdps `RS.union` shiftWeak) }
subProof = assumeWith (T.timeUBCert zero) (CP.result cpproof)
proof = PredecessorEstimationCPProof
{ wdg_ = wdg
, selected_ = propagated
, cp_ = withComplexityPair p
, cpproof_ = cpproof
, cpcert_ = T.certificate subProof }
--- * instances ------------------------------------------------------------------------------------------------------
description :: [String]
description =
[ "Moves a strict dependency into the weak component, if all predecessors in the dependency graph are strict"
, "and there is no edge from the rule to itself." ]
selArg :: T.Argument 'T.Optional (ExpressionSelector F V)
selArg = RS.selectorArg
`T.withName` "onSelection"
`T.withHelp`
[ "Determines which rules to select."
, "Per default all dependency pairs are selected for knowledge propagation." ]
`T.optional` (RS.selAllOf RS.selDPs)
--- ** Predecessor Estimation ----------------------------------------------------------------------------------------
predecessorEstimationStrategy :: ExpressionSelector F V -> TrsStrategy
predecessorEstimationStrategy rs = T.Apply $ PredecessorEstimation { onSelection = rs }
predecessorEstimationDeclaration :: T.Declaration (
'[ T.Argument 'T.Optional (ExpressionSelector F V) ]
T.:-> TrsStrategy )
predecessorEstimationDeclaration =
T.declare "predecessorEstimation" description (T.OneTuple selArg) predecessorEstimationStrategy
predecessorEstimation :: ExpressionSelector F V -> TrsStrategy
predecessorEstimation = T.declFun predecessorEstimationDeclaration
predecessorEstimation' :: TrsStrategy
predecessorEstimation' = T.deflFun predecessorEstimationDeclaration
--- ** Predecessor Estimation CP -------------------------------------------------------------------------------------
predecessorEstimationCPStrategy :: ExpressionSelector F V -> ComplexityPair -> TrsStrategy
predecessorEstimationCPStrategy rs cp = T.Apply $ PredecessorEstimationCP { onSelectionCP = rs, withComplexityPair = cp }
predecessorEstimationCPDeclaration :: T.Declaration (
'[ T.Argument 'T.Optional (ExpressionSelector F V)
, T.Argument 'T.Required ComplexityPair ]
T.:-> TrsStrategy )
predecessorEstimationCPDeclaration =
T.declare "predecessorEstimationCP" description (selArg, CP.complexityPairArg) predecessorEstimationCPStrategy
predecessorEstimationCP :: ExpressionSelector F V -> ComplexityPair -> TrsStrategy
predecessorEstimationCP = T.declFun predecessorEstimationCPDeclaration
predecessorEstimationCP' :: ComplexityPair -> TrsStrategy
predecessorEstimationCP' = T.deflFun predecessorEstimationCPDeclaration
--- * proof data -----------------------------------------------------------------------------------------------------
instance PP.Pretty PredecessorEstimationProof where
pretty PredecessorEstimationFail = PP.text "Predecessor estimation is not applicable on selected rules."
pretty p@PredecessorEstimationProof{} = PP.vcat
[ PP.text "We estimate the number of application of"
, PP.indent 2 ppEstimated
, PP.text "by application of"
, PP.indent 2 $ PP.text "Pre" <> PP.parens ppEstimated <> PP.text " = " <> ppPredecessors <> PP.dot
, PP.text "Here rules are labelled as follows:"
, PP.indent 2 $ ppRules ]
where
ppRules = PP.listing' [ (n, theRule cn) | (n,cn) <- lnodes (wdg_ p) ]
ppEstimated = PP.set' [ (node s) | s <- selected_ p ]
ppPredecessors = PP.set' [ n | s <- selected_ p, (n,_) <- preds s]
pretty p@PredecessorEstimationCPProof{} = PP.vcat
[ PP.text $ "We first use the processor " ++ show (cp_ p) ++ " to orient following rules strictly:"
, PP.indent 2 $ PP.listing' rdps
, PP.indent 2 . PP.pretty $ CP.removableTrs (cpproof_ p)
, if null (selected_ p)
then PP.text "The strictly oriented rules are moved into the weak component."
else PP.vcat
[ PP.text "Consider the set of all dependency pairs"
, PP.indent 2 (PP.listing' ndps)
, PP.text ("Processor " ++ show (cp_ p) ++ "induces the complexity certificate")
<> PP.pretty (cpcert_ p)
<> PP.text "on application of the dependency pairs"
, PP.indent 2 (PP.set' orientedNodes)
, PP.text "These cover all (indirect) predecessors of dependency pairs"
, PP.indent 2 (PP.set' knownNodes)
, PP.text "their number of applications is equally bounded."
, PP.text "The dependency pairs are shifted into the weak component."] ]
where
remdps = CP.removableDPs (cpproof_ p)
ndps = asNodedRules $ lnodes (wdg_ p)
rdps = filter ((`RS.member` remdps) . snd) ndps
orientedNodes = S.fromList $ fst (unzip rdps)
knownNodes = orientedNodes `S.union` S.fromList (node `fmap` (selected_ p))
instance Xml.Xml PredecessorEstimationProof where
toXml PredecessorEstimationFail = Xml.elt "predecessorEstimation" []
toXml p@PredecessorEstimationProof{} =
Xml.elt "predecessorEstimation"
[ Xml.toXml (wdg_ p)
, Xml.elt "pe" $ concat
[ [ Xml.toXml (node s,rule s)
, Xml.elt "predecessors" [ Xml.toXml (n1,r1) | (n1,r1) <- preds s ]]
| s <- selected_ p]
]
-- MS: TODO:
toXml PredecessorEstimationCPProof{} = Xml.empty
| ComputationWithBoundedResources/tct-trs | src/Tct/Trs/Processor/DP/DPGraph/PredecessorEstimation.hs | bsd-3-clause | 13,374 | 0 | 22 | 3,458 | 3,407 | 1,833 | 1,574 | -1 | -1 |
{-# LANGUAGE BangPatterns, CPP, MagicHash, NondecreasingIndentation #-}
{-# OPTIONS_GHC -fprof-auto-top #-}
-------------------------------------------------------------------------------
--
-- | Main API for compiling plain Haskell source code.
--
-- This module implements compilation of a Haskell source. It is
-- /not/ concerned with preprocessing of source files; this is handled
-- in "DriverPipeline".
--
-- There are various entry points depending on what mode we're in:
-- "batch" mode (@--make@), "one-shot" mode (@-c@, @-S@ etc.), and
-- "interactive" mode (GHCi). There are also entry points for
-- individual passes: parsing, typechecking/renaming, desugaring, and
-- simplification.
--
-- All the functions here take an 'HscEnv' as a parameter, but none of
-- them return a new one: 'HscEnv' is treated as an immutable value
-- from here on in (although it has mutable components, for the
-- caches).
--
-- We use the Hsc monad to deal with warning messages consistently:
-- specifically, while executing within an Hsc monad, warnings are
-- collected. When a Hsc monad returns to an IO monad, the
-- warnings are printed, or compilation aborts if the @-Werror@
-- flag is enabled.
--
-- (c) The GRASP/AQUA Project, Glasgow University, 1993-2000
--
-------------------------------------------------------------------------------
module HscMain
(
-- * Making an HscEnv
newHscEnv
-- * Compiling complete source files
, Messager, batchMsg
, HscStatus (..)
, hscIncrementalCompile
, hscCompileCmmFile
, hscGenHardCode
, hscInteractive
-- * Running passes separately
, hscParse
, hscTypecheckRename
, hscDesugar
, makeSimpleDetails
, hscSimplify -- ToDo, shouldn't really export this
-- * Safe Haskell
, hscCheckSafe
, hscGetSafe
-- * Support for interactive evaluation
, hscParseIdentifier
, hscTcRcLookupName
, hscTcRnGetInfo
#ifdef GHCI
, hscIsGHCiMonad
, hscGetModuleInterface
, hscRnImportDecls
, hscTcRnLookupRdrName
, hscStmt, hscStmtWithLocation, hscParsedStmt
, hscDecls, hscDeclsWithLocation
, hscTcExpr, TcRnExprMode(..), hscImport, hscKcType
, hscParseExpr
, hscCompileCoreExpr
-- * Low-level exports for hooks
, hscCompileCoreExpr'
#endif
-- We want to make sure that we export enough to be able to redefine
-- hscFileFrontEnd in client code
, hscParse', hscSimplify', hscDesugar', tcRnModule'
, getHscEnv
, hscSimpleIface', hscNormalIface'
, oneShotMsg
, hscFileFrontEnd, genericHscFrontend, dumpIfaceStats
) where
#ifdef GHCI
import Id
import GHCi.RemoteTypes ( ForeignHValue )
import ByteCodeGen ( byteCodeGen, coreExprToBCOs )
import Linker
import CoreTidy ( tidyExpr )
import Type ( Type )
import {- Kind parts of -} Type ( Kind )
import CoreLint ( lintInteractiveExpr )
import VarEnv ( emptyTidyEnv )
import Panic
import ConLike
import Control.Concurrent
#endif
import THNames ( templateHaskellNames )
import Module
import Packages
import RdrName
import HsSyn
import CoreSyn
import StringBuffer
import Parser
import Lexer
import SrcLoc
import TcRnDriver
import TcIface ( typecheckIface )
import TcRnMonad
import IfaceEnv ( initNameCache )
import LoadIface ( ifaceStats, initExternalPackageState )
import PrelInfo
import MkIface
import Desugar
import SimplCore
import TidyPgm
import CorePrep
import CoreToStg ( coreToStg )
import qualified StgCmm ( codeGen )
import StgSyn
import CostCentre
import ProfInit
import TyCon
import Name
import SimplStg ( stg2stg )
import Cmm
import CmmParse ( parseCmmFile )
import CmmBuildInfoTables
import CmmPipeline
import CmmInfo
import CodeOutput
import InstEnv
import FamInstEnv
import Fingerprint ( Fingerprint )
import Hooks
import Maybes
import DynFlags
import ErrUtils
import Outputable
import UniqFM
import NameEnv
import HscStats ( ppSourceStats )
import HscTypes
import FastString
import UniqSupply
import Bag
import Exception
import qualified Stream
import Stream (Stream)
import Util
import Data.List
import Control.Monad
import Data.IORef
import System.FilePath as FilePath
import System.Directory
import qualified Data.Map as Map
#include "HsVersions.h"
{- **********************************************************************
%* *
Initialisation
%* *
%********************************************************************* -}
newHscEnv :: DynFlags -> IO HscEnv
newHscEnv dflags = do
eps_var <- newIORef initExternalPackageState
us <- mkSplitUniqSupply 'r'
nc_var <- newIORef (initNameCache us allKnownKeyNames)
fc_var <- newIORef emptyModuleEnv
#ifdef GHCI
iserv_mvar <- newMVar Nothing
#endif
return HscEnv { hsc_dflags = dflags
, hsc_targets = []
, hsc_mod_graph = []
, hsc_IC = emptyInteractiveContext dflags
, hsc_HPT = emptyHomePackageTable
, hsc_EPS = eps_var
, hsc_NC = nc_var
, hsc_FC = fc_var
, hsc_type_env_var = Nothing
#ifdef GHCI
, hsc_iserv = iserv_mvar
#endif
}
allKnownKeyNames :: [Name] -- Put here to avoid loops involving DsMeta,
allKnownKeyNames -- where templateHaskellNames are defined
| debugIsOn
, not (isNullUFM badNamesEnv)
= panic ("badAllKnownKeyNames:\n" ++ badNamesStr)
-- NB: We can't use ppr here, because this is sometimes evaluated in a
-- context where there are no DynFlags available, leading to a cryptic
-- "<<details unavailable>>" error. (This seems to happen only in the
-- stage 2 compiler, for reasons I [Richard] have no clue of.)
| otherwise
= all_names
where
all_names = knownKeyNames
++ templateHaskellNames
namesEnv = foldl (\m n -> extendNameEnv_Acc (:) singleton m n n)
emptyUFM all_names
badNamesEnv = filterNameEnv (\ns -> length ns > 1) namesEnv
badNamesPairs = nonDetUFMToList badNamesEnv
-- It's OK to use nonDetUFMToList here because the ordering only affects
-- the message when we get a panic
badNamesStrs = map pairToStr badNamesPairs
badNamesStr = unlines badNamesStrs
pairToStr (uniq, ns) = " " ++
show uniq ++
": [" ++
intercalate ", " (map (occNameString . nameOccName) ns) ++
"]"
-- -----------------------------------------------------------------------------
getWarnings :: Hsc WarningMessages
getWarnings = Hsc $ \_ w -> return (w, w)
clearWarnings :: Hsc ()
clearWarnings = Hsc $ \_ _ -> return ((), emptyBag)
logWarnings :: WarningMessages -> Hsc ()
logWarnings w = Hsc $ \_ w0 -> return ((), w0 `unionBags` w)
getHscEnv :: Hsc HscEnv
getHscEnv = Hsc $ \e w -> return (e, w)
handleWarnings :: Hsc ()
handleWarnings = do
dflags <- getDynFlags
w <- getWarnings
liftIO $ printOrThrowWarnings dflags w
clearWarnings
-- | log warning in the monad, and if there are errors then
-- throw a SourceError exception.
logWarningsReportErrors :: Messages -> Hsc ()
logWarningsReportErrors (warns,errs) = do
logWarnings warns
when (not $ isEmptyBag errs) $ throwErrors errs
-- | Throw some errors.
throwErrors :: ErrorMessages -> Hsc a
throwErrors = liftIO . throwIO . mkSrcErr
-- | Deal with errors and warnings returned by a compilation step
--
-- In order to reduce dependencies to other parts of the compiler, functions
-- outside the "main" parts of GHC return warnings and errors as a parameter
-- and signal success via by wrapping the result in a 'Maybe' type. This
-- function logs the returned warnings and propagates errors as exceptions
-- (of type 'SourceError').
--
-- This function assumes the following invariants:
--
-- 1. If the second result indicates success (is of the form 'Just x'),
-- there must be no error messages in the first result.
--
-- 2. If there are no error messages, but the second result indicates failure
-- there should be warnings in the first result. That is, if the action
-- failed, it must have been due to the warnings (i.e., @-Werror@).
ioMsgMaybe :: IO (Messages, Maybe a) -> Hsc a
ioMsgMaybe ioA = do
((warns,errs), mb_r) <- liftIO ioA
logWarnings warns
case mb_r of
Nothing -> throwErrors errs
Just r -> ASSERT( isEmptyBag errs ) return r
-- | like ioMsgMaybe, except that we ignore error messages and return
-- 'Nothing' instead.
ioMsgMaybe' :: IO (Messages, Maybe a) -> Hsc (Maybe a)
ioMsgMaybe' ioA = do
((warns,_errs), mb_r) <- liftIO $ ioA
logWarnings warns
return mb_r
-- -----------------------------------------------------------------------------
-- | Lookup things in the compiler's environment
#ifdef GHCI
hscTcRnLookupRdrName :: HscEnv -> Located RdrName -> IO [Name]
hscTcRnLookupRdrName hsc_env0 rdr_name
= runInteractiveHsc hsc_env0 $
do { hsc_env <- getHscEnv
; ioMsgMaybe $ tcRnLookupRdrName hsc_env rdr_name }
#endif
hscTcRcLookupName :: HscEnv -> Name -> IO (Maybe TyThing)
hscTcRcLookupName hsc_env0 name = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe' $ tcRnLookupName hsc_env name
-- ignore errors: the only error we're likely to get is
-- "name not found", and the Maybe in the return type
-- is used to indicate that.
hscTcRnGetInfo :: HscEnv -> Name -> IO (Maybe (TyThing, Fixity, [ClsInst], [FamInst]))
hscTcRnGetInfo hsc_env0 name
= runInteractiveHsc hsc_env0 $
do { hsc_env <- getHscEnv
; ioMsgMaybe' $ tcRnGetInfo hsc_env name }
#ifdef GHCI
hscIsGHCiMonad :: HscEnv -> String -> IO Name
hscIsGHCiMonad hsc_env name
= runHsc hsc_env $ ioMsgMaybe $ isGHCiMonad hsc_env name
hscGetModuleInterface :: HscEnv -> Module -> IO ModIface
hscGetModuleInterface hsc_env0 mod = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe $ getModuleInterface hsc_env mod
-- -----------------------------------------------------------------------------
-- | Rename some import declarations
hscRnImportDecls :: HscEnv -> [LImportDecl RdrName] -> IO GlobalRdrEnv
hscRnImportDecls hsc_env0 import_decls = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ioMsgMaybe $ tcRnImportDecls hsc_env import_decls
#endif
-- -----------------------------------------------------------------------------
-- | parse a file, returning the abstract syntax
hscParse :: HscEnv -> ModSummary -> IO HsParsedModule
hscParse hsc_env mod_summary = runHsc hsc_env $ hscParse' mod_summary
-- internal version, that doesn't fail due to -Werror
hscParse' :: ModSummary -> Hsc HsParsedModule
hscParse' mod_summary = {-# SCC "Parser" #-}
withTiming getDynFlags
(text "Parser"<+>brackets (ppr $ ms_mod mod_summary))
(const ()) $ do
dflags <- getDynFlags
let src_filename = ms_hspp_file mod_summary
maybe_src_buf = ms_hspp_buf mod_summary
-------------------------- Parser ----------------
-- sometimes we already have the buffer in memory, perhaps
-- because we needed to parse the imports out of it, or get the
-- module name.
buf <- case maybe_src_buf of
Just b -> return b
Nothing -> liftIO $ hGetStringBuffer src_filename
let loc = mkRealSrcLoc (mkFastString src_filename) 1 1
case unP parseModule (mkPState dflags buf loc) of
PFailed span err ->
liftIO $ throwOneError (mkPlainErrMsg dflags span err)
POk pst rdr_module -> do
logWarningsReportErrors (getMessages pst dflags)
liftIO $ dumpIfSet_dyn dflags Opt_D_dump_parsed "Parser" $
ppr rdr_module
liftIO $ dumpIfSet_dyn dflags Opt_D_source_stats "Source Statistics" $
ppSourceStats False rdr_module
-- To get the list of extra source files, we take the list
-- that the parser gave us,
-- - eliminate files beginning with '<'. gcc likes to use
-- pseudo-filenames like "<built-in>" and "<command-line>"
-- - normalise them (elimiante differences between ./f and f)
-- - filter out the preprocessed source file
-- - filter out anything beginning with tmpdir
-- - remove duplicates
-- - filter out the .hs/.lhs source filename if we have one
--
let n_hspp = FilePath.normalise src_filename
srcs0 = nub $ filter (not . (tmpDir dflags `isPrefixOf`))
$ filter (not . (== n_hspp))
$ map FilePath.normalise
$ filter (not . (isPrefixOf "<"))
$ map unpackFS
$ srcfiles pst
srcs1 = case ml_hs_file (ms_location mod_summary) of
Just f -> filter (/= FilePath.normalise f) srcs0
Nothing -> srcs0
-- sometimes we see source files from earlier
-- preprocessing stages that cannot be found, so just
-- filter them out:
srcs2 <- liftIO $ filterM doesFileExist srcs1
return HsParsedModule {
hpm_module = rdr_module,
hpm_src_files = srcs2,
hpm_annotations
= (Map.fromListWith (++) $ annotations pst,
Map.fromList $ ((noSrcSpan,comment_q pst)
:(annotations_comments pst)))
}
-- XXX: should this really be a Maybe X? Check under which circumstances this
-- can become a Nothing and decide whether this should instead throw an
-- exception/signal an error.
type RenamedStuff =
(Maybe (HsGroup Name, [LImportDecl Name], Maybe [LIE Name],
Maybe LHsDocString))
-- | Rename and typecheck a module, additionally returning the renamed syntax
hscTypecheckRename :: HscEnv -> ModSummary -> HsParsedModule
-> IO (TcGblEnv, RenamedStuff)
hscTypecheckRename hsc_env mod_summary rdr_module = runHsc hsc_env $ do
tc_result <- tcRnModule' hsc_env mod_summary True rdr_module
-- This 'do' is in the Maybe monad!
let rn_info = do decl <- tcg_rn_decls tc_result
let imports = tcg_rn_imports tc_result
exports = tcg_rn_exports tc_result
doc_hdr = tcg_doc_hdr tc_result
return (decl,imports,exports,doc_hdr)
return (tc_result, rn_info)
-- wrapper around tcRnModule to handle safe haskell extras
tcRnModule' :: HscEnv -> ModSummary -> Bool -> HsParsedModule
-> Hsc TcGblEnv
tcRnModule' hsc_env sum save_rn_syntax mod = do
tcg_res <- {-# SCC "Typecheck-Rename" #-}
ioMsgMaybe $
tcRnModule hsc_env (ms_hsc_src sum) save_rn_syntax mod
-- See Note [Safe Haskell Overlapping Instances Implementation]
-- although this is used for more than just that failure case.
(tcSafeOK, whyUnsafe) <- liftIO $ readIORef (tcg_safeInfer tcg_res)
dflags <- getDynFlags
let allSafeOK = safeInferred dflags && tcSafeOK
-- end of the safe haskell line, how to respond to user?
if not (safeHaskellOn dflags) || (safeInferOn dflags && not allSafeOK)
-- if safe Haskell off or safe infer failed, mark unsafe
then markUnsafeInfer tcg_res whyUnsafe
-- module (could be) safe, throw warning if needed
else do
tcg_res' <- hscCheckSafeImports tcg_res
safe <- liftIO $ fst <$> readIORef (tcg_safeInfer tcg_res')
when safe $ do
case wopt Opt_WarnSafe dflags of
True -> (logWarnings $ unitBag $
makeIntoWarning (Reason Opt_WarnSafe) $
mkPlainWarnMsg dflags (warnSafeOnLoc dflags) $
errSafe tcg_res')
False | safeHaskell dflags == Sf_Trustworthy &&
wopt Opt_WarnTrustworthySafe dflags ->
(logWarnings $ unitBag $
makeIntoWarning (Reason Opt_WarnTrustworthySafe) $
mkPlainWarnMsg dflags (trustworthyOnLoc dflags) $
errTwthySafe tcg_res')
False -> return ()
return tcg_res'
where
pprMod t = ppr $ moduleName $ tcg_mod t
errSafe t = quotes (pprMod t) <+> text "has been inferred as safe!"
errTwthySafe t = quotes (pprMod t)
<+> text "is marked as Trustworthy but has been inferred as safe!"
-- | Convert a typechecked module to Core
hscDesugar :: HscEnv -> ModSummary -> TcGblEnv -> IO ModGuts
hscDesugar hsc_env mod_summary tc_result =
runHsc hsc_env $ hscDesugar' (ms_location mod_summary) tc_result
hscDesugar' :: ModLocation -> TcGblEnv -> Hsc ModGuts
hscDesugar' mod_location tc_result = do
hsc_env <- getHscEnv
r <- ioMsgMaybe $
{-# SCC "deSugar" #-}
deSugar hsc_env mod_location tc_result
-- always check -Werror after desugaring, this is the last opportunity for
-- warnings to arise before the backend.
handleWarnings
return r
-- | Make a 'ModDetails' from the results of typechecking. Used when
-- typechecking only, as opposed to full compilation.
makeSimpleDetails :: HscEnv -> TcGblEnv -> IO ModDetails
makeSimpleDetails hsc_env tc_result = mkBootModDetailsTc hsc_env tc_result
{- **********************************************************************
%* *
The main compiler pipeline
%* *
%********************************************************************* -}
{-
--------------------------------
The compilation proper
--------------------------------
It's the task of the compilation proper to compile Haskell, hs-boot and core
files to either byte-code, hard-code (C, asm, LLVM, ect) or to nothing at all
(the module is still parsed and type-checked. This feature is mostly used by
IDE's and the likes). Compilation can happen in either 'one-shot', 'batch',
'nothing', or 'interactive' mode. 'One-shot' mode targets hard-code, 'batch'
mode targets hard-code, 'nothing' mode targets nothing and 'interactive' mode
targets byte-code.
The modes are kept separate because of their different types and meanings:
* In 'one-shot' mode, we're only compiling a single file and can therefore
discard the new ModIface and ModDetails. This is also the reason it only
targets hard-code; compiling to byte-code or nothing doesn't make sense when
we discard the result.
* 'Batch' mode is like 'one-shot' except that we keep the resulting ModIface
and ModDetails. 'Batch' mode doesn't target byte-code since that require us to
return the newly compiled byte-code.
* 'Nothing' mode has exactly the same type as 'batch' mode but they're still
kept separate. This is because compiling to nothing is fairly special: We
don't output any interface files, we don't run the simplifier and we don't
generate any code.
* 'Interactive' mode is similar to 'batch' mode except that we return the
compiled byte-code together with the ModIface and ModDetails.
Trying to compile a hs-boot file to byte-code will result in a run-time error.
This is the only thing that isn't caught by the type-system.
-}
type Messager = HscEnv -> (Int,Int) -> RecompileRequired -> ModSummary -> IO ()
-- | This function runs GHC's frontend with recompilation
-- avoidance. Specifically, it checks if recompilation is needed,
-- and if it is, it parses and typechecks the input module.
-- It does not write out the results of typechecking (See
-- compileOne and hscIncrementalCompile).
hscIncrementalFrontend :: Bool -- always do basic recompilation check?
-> Maybe TcGblEnv
-> Maybe Messager
-> ModSummary
-> SourceModified
-> Maybe ModIface -- Old interface, if available
-> (Int,Int) -- (i,n) = module i of n (for msgs)
-> Hsc (Either ModIface (FrontendResult, Maybe Fingerprint))
hscIncrementalFrontend
always_do_basic_recompilation_check m_tc_result
mHscMessage mod_summary source_modified mb_old_iface mod_index
= do
hsc_env <- getHscEnv
let msg what = case mHscMessage of
Just hscMessage -> hscMessage hsc_env mod_index what mod_summary
Nothing -> return ()
skip iface = do
liftIO $ msg UpToDate
return $ Left iface
compile mb_old_hash reason = do
liftIO $ msg reason
result <- genericHscFrontend mod_summary
return $ Right (result, mb_old_hash)
stable = case source_modified of
SourceUnmodifiedAndStable -> True
_ -> False
case m_tc_result of
Just tc_result
| not always_do_basic_recompilation_check ->
return $ Right (FrontendTypecheck tc_result, Nothing)
_ -> do
(recomp_reqd, mb_checked_iface)
<- {-# SCC "checkOldIface" #-}
liftIO $ checkOldIface hsc_env mod_summary
source_modified mb_old_iface
-- save the interface that comes back from checkOldIface.
-- In one-shot mode we don't have the old iface until this
-- point, when checkOldIface reads it from the disk.
let mb_old_hash = fmap mi_iface_hash mb_checked_iface
case mb_checked_iface of
Just iface | not (recompileRequired recomp_reqd) ->
-- If the module used TH splices when it was last
-- compiled, then the recompilation check is not
-- accurate enough (#481) and we must ignore
-- it. However, if the module is stable (none of
-- the modules it depends on, directly or
-- indirectly, changed), then we *can* skip
-- recompilation. This is why the SourceModified
-- type contains SourceUnmodifiedAndStable, and
-- it's pretty important: otherwise ghc --make
-- would always recompile TH modules, even if
-- nothing at all has changed. Stability is just
-- the same check that make is doing for us in
-- one-shot mode.
case m_tc_result of
Nothing
| mi_used_th iface && not stable ->
compile mb_old_hash (RecompBecause "TH")
_ ->
skip iface
_ ->
case m_tc_result of
Nothing -> compile mb_old_hash recomp_reqd
Just tc_result ->
return $ Right (FrontendTypecheck tc_result, mb_old_hash)
genericHscFrontend :: ModSummary -> Hsc FrontendResult
genericHscFrontend mod_summary =
getHooked hscFrontendHook genericHscFrontend' >>= ($ mod_summary)
genericHscFrontend' :: ModSummary -> Hsc FrontendResult
genericHscFrontend' mod_summary
= FrontendTypecheck `fmap` hscFileFrontEnd mod_summary
--------------------------------------------------------------
-- Compilers
--------------------------------------------------------------
-- Compile Haskell/boot in OneShot mode.
hscIncrementalCompile :: Bool
-> Maybe TcGblEnv
-> Maybe Messager
-> HscEnv
-> ModSummary
-> SourceModified
-> Maybe ModIface
-> (Int,Int)
-- HomeModInfo does not contain linkable, since we haven't
-- code-genned yet
-> IO (HscStatus, HomeModInfo)
hscIncrementalCompile always_do_basic_recompilation_check m_tc_result
mHscMessage hsc_env' mod_summary source_modified mb_old_iface mod_index
= do
-- One-shot mode needs a knot-tying mutable variable for interface
-- files. See TcRnTypes.TcGblEnv.tcg_type_env_var.
type_env_var <- newIORef emptyNameEnv
let mod = ms_mod mod_summary
hsc_env = hsc_env'{ hsc_type_env_var = Just (mod, type_env_var) }
-- NB: enter Hsc monad here so that we don't bail out early with
-- -Werror on typechecker warnings; we also want to run the desugarer
-- to get those warnings too. (But we'll always exit at that point
-- because the desugarer runs ioMsgMaybe.)
runHsc hsc_env $ do
let dflags = hsc_dflags hsc_env
e <- hscIncrementalFrontend always_do_basic_recompilation_check m_tc_result mHscMessage
mod_summary source_modified mb_old_iface mod_index
case e of
Left iface -> do
details <- liftIO $ genModDetails hsc_env iface
return (HscUpToDate, HomeModInfo{
hm_details = details,
hm_iface = iface,
hm_linkable = Nothing
})
Right (FrontendTypecheck tc_result, mb_old_hash) -> do
(status, hmi, no_change) <-
if hscTarget dflags /= HscNothing &&
ms_hsc_src mod_summary == HsSrcFile
then finish hsc_env mod_summary tc_result mb_old_hash
else finishTypecheckOnly hsc_env mod_summary tc_result mb_old_hash
liftIO $ hscMaybeWriteIface dflags (hm_iface hmi) no_change mod_summary
return (status, hmi)
-- Generates and writes out the final interface for a typecheck.
finishTypecheckOnly :: HscEnv
-> ModSummary
-> TcGblEnv
-> Maybe Fingerprint
-> Hsc (HscStatus, HomeModInfo, Bool)
finishTypecheckOnly hsc_env summary tc_result mb_old_hash = do
let dflags = hsc_dflags hsc_env
(iface, changed, details) <- liftIO $ hscSimpleIface hsc_env tc_result mb_old_hash
let hsc_status =
case (hscTarget dflags, ms_hsc_src summary) of
(HscNothing, _) -> HscNotGeneratingCode
(_, HsBootFile) -> HscUpdateBoot
(_, HsigFile) -> HscUpdateSig
_ -> panic "finishTypecheckOnly"
return (hsc_status,
HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = Nothing },
changed)
-- Runs the post-typechecking frontend (desugar and simplify),
-- and then generates and writes out the final interface. We want
-- to write the interface AFTER simplification so we can get
-- as up-to-date and good unfoldings and other info as possible
-- in the interface file. This is only ever run for HsSrcFile,
-- and NOT for HscNothing.
finish :: HscEnv
-> ModSummary
-> TcGblEnv
-> Maybe Fingerprint
-> Hsc (HscStatus, HomeModInfo, Bool)
finish hsc_env summary tc_result mb_old_hash = do
let dflags = hsc_dflags hsc_env
MASSERT( ms_hsc_src summary == HsSrcFile )
MASSERT( hscTarget dflags /= HscNothing )
guts0 <- hscDesugar' (ms_location summary) tc_result
guts <- hscSimplify' guts0
(iface, changed, details, cgguts) <- liftIO $ hscNormalIface hsc_env guts mb_old_hash
return (HscRecomp cgguts summary,
HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = Nothing },
changed)
hscMaybeWriteIface :: DynFlags -> ModIface -> Bool -> ModSummary -> IO ()
hscMaybeWriteIface dflags iface changed summary =
let force_write_interface = gopt Opt_WriteInterface dflags
write_interface = case hscTarget dflags of
HscNothing -> False
HscInterpreted -> False
_ -> True
in when (write_interface || force_write_interface) $
hscWriteIface dflags iface changed summary
--------------------------------------------------------------
-- NoRecomp handlers
--------------------------------------------------------------
genModDetails :: HscEnv -> ModIface -> IO ModDetails
genModDetails hsc_env old_iface
= do
new_details <- {-# SCC "tcRnIface" #-}
initIfaceCheck hsc_env (typecheckIface old_iface)
dumpIfaceStats hsc_env
return new_details
--------------------------------------------------------------
-- Progress displayers.
--------------------------------------------------------------
oneShotMsg :: HscEnv -> RecompileRequired -> IO ()
oneShotMsg hsc_env recomp =
case recomp of
UpToDate ->
compilationProgressMsg (hsc_dflags hsc_env) $
"compilation IS NOT required"
_ ->
return ()
batchMsg :: Messager
batchMsg hsc_env mod_index recomp mod_summary =
case recomp of
MustCompile -> showMsg "Compiling " ""
UpToDate
| verbosity (hsc_dflags hsc_env) >= 2 -> showMsg "Skipping " ""
| otherwise -> return ()
RecompBecause reason -> showMsg "Compiling " (" [" ++ reason ++ "]")
where
dflags = hsc_dflags hsc_env
showMsg msg reason =
compilationProgressMsg dflags $
(showModuleIndex mod_index ++
msg ++ showModMsg dflags (hscTarget dflags)
(recompileRequired recomp) mod_summary)
++ reason
--------------------------------------------------------------
-- FrontEnds
--------------------------------------------------------------
-- | Given a 'ModSummary', parses and typechecks it, returning the
-- 'TcGblEnv' resulting from type-checking.
hscFileFrontEnd :: ModSummary -> Hsc TcGblEnv
hscFileFrontEnd mod_summary = do
hpm <- hscParse' mod_summary
hsc_env <- getHscEnv
tcg_env <- tcRnModule' hsc_env mod_summary False hpm
return tcg_env
--------------------------------------------------------------
-- Safe Haskell
--------------------------------------------------------------
-- Note [Safe Haskell Trust Check]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Safe Haskell checks that an import is trusted according to the following
-- rules for an import of module M that resides in Package P:
--
-- * If M is recorded as Safe and all its trust dependencies are OK
-- then M is considered safe.
-- * If M is recorded as Trustworthy and P is considered trusted and
-- all M's trust dependencies are OK then M is considered safe.
--
-- By trust dependencies we mean that the check is transitive. So if
-- a module M that is Safe relies on a module N that is trustworthy,
-- importing module M will first check (according to the second case)
-- that N is trusted before checking M is trusted.
--
-- This is a minimal description, so please refer to the user guide
-- for more details. The user guide is also considered the authoritative
-- source in this matter, not the comments or code.
-- Note [Safe Haskell Inference]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Safe Haskell does Safe inference on modules that don't have any specific
-- safe haskell mode flag. The basic approach to this is:
-- * When deciding if we need to do a Safe language check, treat
-- an unmarked module as having -XSafe mode specified.
-- * For checks, don't throw errors but return them to the caller.
-- * Caller checks if there are errors:
-- * For modules explicitly marked -XSafe, we throw the errors.
-- * For unmarked modules (inference mode), we drop the errors
-- and mark the module as being Unsafe.
--
-- It used to be that we only did safe inference on modules that had no Safe
-- Haskell flags, but now we perform safe inference on all modules as we want
-- to allow users to set the `-Wsafe`, `-Wunsafe` and
-- `-Wtrustworthy-safe` flags on Trustworthy and Unsafe modules so that a
-- user can ensure their assumptions are correct and see reasons for why a
-- module is safe or unsafe.
--
-- This is tricky as we must be careful when we should throw an error compared
-- to just warnings. For checking safe imports we manage it as two steps. First
-- we check any imports that are required to be safe, then we check all other
-- imports to see if we can infer them to be safe.
-- | Check that the safe imports of the module being compiled are valid.
-- If not we either issue a compilation error if the module is explicitly
-- using Safe Haskell, or mark the module as unsafe if we're in safe
-- inference mode.
hscCheckSafeImports :: TcGblEnv -> Hsc TcGblEnv
hscCheckSafeImports tcg_env = do
dflags <- getDynFlags
tcg_env' <- checkSafeImports dflags tcg_env
checkRULES dflags tcg_env'
where
checkRULES dflags tcg_env' = do
case safeLanguageOn dflags of
True -> do
-- XSafe: we nuke user written RULES
logWarnings $ warns dflags (tcg_rules tcg_env')
return tcg_env' { tcg_rules = [] }
False
-- SafeInferred: user defined RULES, so not safe
| safeInferOn dflags && not (null $ tcg_rules tcg_env')
-> markUnsafeInfer tcg_env' $ warns dflags (tcg_rules tcg_env')
-- Trustworthy OR SafeInferred: with no RULES
| otherwise
-> return tcg_env'
warns dflags rules = listToBag $ map (warnRules dflags) rules
warnRules dflags (L loc (HsRule n _ _ _ _ _ _)) =
mkPlainWarnMsg dflags loc $
text "Rule \"" <> ftext (snd $ unLoc n) <> text "\" ignored" $+$
text "User defined rules are disabled under Safe Haskell"
-- | Validate that safe imported modules are actually safe. For modules in the
-- HomePackage (the package the module we are compiling in resides) this just
-- involves checking its trust type is 'Safe' or 'Trustworthy'. For modules
-- that reside in another package we also must check that the external pacakge
-- is trusted. See the Note [Safe Haskell Trust Check] above for more
-- information.
--
-- The code for this is quite tricky as the whole algorithm is done in a few
-- distinct phases in different parts of the code base. See
-- RnNames.rnImportDecl for where package trust dependencies for a module are
-- collected and unioned. Specifically see the Note [RnNames . Tracking Trust
-- Transitively] and the Note [RnNames . Trust Own Package].
checkSafeImports :: DynFlags -> TcGblEnv -> Hsc TcGblEnv
checkSafeImports dflags tcg_env
= do
imps <- mapM condense imports'
let (safeImps, regImps) = partition (\(_,_,s) -> s) imps
-- We want to use the warning state specifically for detecting if safe
-- inference has failed, so store and clear any existing warnings.
oldErrs <- getWarnings
clearWarnings
-- Check safe imports are correct
safePkgs <- mapM checkSafe safeImps
safeErrs <- getWarnings
clearWarnings
-- Check non-safe imports are correct if inferring safety
-- See the Note [Safe Haskell Inference]
(infErrs, infPkgs) <- case (safeInferOn dflags) of
False -> return (emptyBag, [])
True -> do infPkgs <- mapM checkSafe regImps
infErrs <- getWarnings
clearWarnings
return (infErrs, infPkgs)
-- restore old errors
logWarnings oldErrs
case (isEmptyBag safeErrs) of
-- Failed safe check
False -> liftIO . throwIO . mkSrcErr $ safeErrs
-- Passed safe check
True -> do
let infPassed = isEmptyBag infErrs
tcg_env' <- case (not infPassed) of
True -> markUnsafeInfer tcg_env infErrs
False -> return tcg_env
when (packageTrustOn dflags) $ checkPkgTrust dflags pkgReqs
let newTrust = pkgTrustReqs safePkgs infPkgs infPassed
return tcg_env' { tcg_imports = impInfo `plusImportAvails` newTrust }
where
impInfo = tcg_imports tcg_env -- ImportAvails
imports = imp_mods impInfo -- ImportedMods
imports' = moduleEnvToList imports -- (Module, [ImportedModsVal])
pkgReqs = imp_trust_pkgs impInfo -- [UnitId]
condense :: (Module, [ImportedModsVal]) -> Hsc (Module, SrcSpan, IsSafeImport)
condense (_, []) = panic "HscMain.condense: Pattern match failure!"
condense (m, x:xs) = do imv <- foldlM cond' x xs
return (m, imv_span imv, imv_is_safe imv)
-- ImportedModsVal = (ModuleName, Bool, SrcSpan, IsSafeImport)
cond' :: ImportedModsVal -> ImportedModsVal -> Hsc ImportedModsVal
cond' v1 v2
| imv_is_safe v1 /= imv_is_safe v2
= throwErrors $ unitBag $ mkPlainErrMsg dflags (imv_span v1)
(text "Module" <+> ppr (imv_name v1) <+>
(text $ "is imported both as a safe and unsafe import!"))
| otherwise
= return v1
-- easier interface to work with
checkSafe (m, l, _) = fst `fmap` hscCheckSafe' dflags m l
-- what pkg's to add to our trust requirements
pkgTrustReqs req inf infPassed | safeInferOn dflags
&& safeHaskell dflags == Sf_None && infPassed
= emptyImportAvails {
imp_trust_pkgs = catMaybes req ++ catMaybes inf
}
pkgTrustReqs _ _ _ | safeHaskell dflags == Sf_Unsafe
= emptyImportAvails
pkgTrustReqs req _ _ = emptyImportAvails { imp_trust_pkgs = catMaybes req }
-- | Check that a module is safe to import.
--
-- We return True to indicate the import is safe and False otherwise
-- although in the False case an exception may be thrown first.
hscCheckSafe :: HscEnv -> Module -> SrcSpan -> IO Bool
hscCheckSafe hsc_env m l = runHsc hsc_env $ do
dflags <- getDynFlags
pkgs <- snd `fmap` hscCheckSafe' dflags m l
when (packageTrustOn dflags) $ checkPkgTrust dflags pkgs
errs <- getWarnings
return $ isEmptyBag errs
-- | Return if a module is trusted and the pkgs it depends on to be trusted.
hscGetSafe :: HscEnv -> Module -> SrcSpan -> IO (Bool, [UnitId])
hscGetSafe hsc_env m l = runHsc hsc_env $ do
dflags <- getDynFlags
(self, pkgs) <- hscCheckSafe' dflags m l
good <- isEmptyBag `fmap` getWarnings
clearWarnings -- don't want them printed...
let pkgs' | Just p <- self = p:pkgs
| otherwise = pkgs
return (good, pkgs')
-- | Is a module trusted? If not, throw or log errors depending on the type.
-- Return (regardless of trusted or not) if the trust type requires the modules
-- own package be trusted and a list of other packages required to be trusted
-- (these later ones haven't been checked) but the own package trust has been.
hscCheckSafe' :: DynFlags -> Module -> SrcSpan -> Hsc (Maybe UnitId, [UnitId])
hscCheckSafe' dflags m l = do
(tw, pkgs) <- isModSafe m l
case tw of
False -> return (Nothing, pkgs)
True | isHomePkg m -> return (Nothing, pkgs)
| otherwise -> return (Just $ moduleUnitId m, pkgs)
where
isModSafe :: Module -> SrcSpan -> Hsc (Bool, [UnitId])
isModSafe m l = do
iface <- lookup' m
case iface of
-- can't load iface to check trust!
Nothing -> throwErrors $ unitBag $ mkPlainErrMsg dflags l
$ text "Can't load the interface file for" <+> ppr m
<> text ", to check that it can be safely imported"
-- got iface, check trust
Just iface' ->
let trust = getSafeMode $ mi_trust iface'
trust_own_pkg = mi_trust_pkg iface'
-- check module is trusted
safeM = trust `elem` [Sf_Safe, Sf_Trustworthy]
-- check package is trusted
safeP = packageTrusted trust trust_own_pkg m
-- pkg trust reqs
pkgRs = map fst $ filter snd $ dep_pkgs $ mi_deps iface'
-- General errors we throw but Safe errors we log
errs = case (safeM, safeP) of
(True, True ) -> emptyBag
(True, False) -> pkgTrustErr
(False, _ ) -> modTrustErr
in do
logWarnings errs
return (trust == Sf_Trustworthy, pkgRs)
where
pkgTrustErr = unitBag $ mkErrMsg dflags l (pkgQual dflags) $
sep [ ppr (moduleName m)
<> text ": Can't be safely imported!"
, text "The package (" <> ppr (moduleUnitId m)
<> text ") the module resides in isn't trusted."
]
modTrustErr = unitBag $ mkErrMsg dflags l (pkgQual dflags) $
sep [ ppr (moduleName m)
<> text ": Can't be safely imported!"
, text "The module itself isn't safe." ]
-- | Check the package a module resides in is trusted. Safe compiled
-- modules are trusted without requiring that their package is trusted. For
-- trustworthy modules, modules in the home package are trusted but
-- otherwise we check the package trust flag.
packageTrusted :: SafeHaskellMode -> Bool -> Module -> Bool
packageTrusted Sf_None _ _ = False -- shouldn't hit these cases
packageTrusted Sf_Unsafe _ _ = False -- prefer for completeness.
packageTrusted _ _ _
| not (packageTrustOn dflags) = True
packageTrusted Sf_Safe False _ = True
packageTrusted _ _ m
| isHomePkg m = True
| otherwise = trusted $ getPackageDetails dflags (moduleUnitId m)
lookup' :: Module -> Hsc (Maybe ModIface)
lookup' m = do
hsc_env <- getHscEnv
hsc_eps <- liftIO $ hscEPS hsc_env
let pkgIfaceT = eps_PIT hsc_eps
homePkgT = hsc_HPT hsc_env
iface = lookupIfaceByModule dflags homePkgT pkgIfaceT m
#ifdef GHCI
-- the 'lookupIfaceByModule' method will always fail when calling from GHCi
-- as the compiler hasn't filled in the various module tables
-- so we need to call 'getModuleInterface' to load from disk
iface' <- case iface of
Just _ -> return iface
Nothing -> snd `fmap` (liftIO $ getModuleInterface hsc_env m)
return iface'
#else
return iface
#endif
isHomePkg :: Module -> Bool
isHomePkg m
| thisPackage dflags == moduleUnitId m = True
| otherwise = False
-- | Check the list of packages are trusted.
checkPkgTrust :: DynFlags -> [UnitId] -> Hsc ()
checkPkgTrust dflags pkgs =
case errors of
[] -> return ()
_ -> (liftIO . throwIO . mkSrcErr . listToBag) errors
where
errors = catMaybes $ map go pkgs
go pkg
| trusted $ getPackageDetails dflags pkg
= Nothing
| otherwise
= Just $ mkErrMsg dflags noSrcSpan (pkgQual dflags)
$ text "The package (" <> ppr pkg <> text ") is required" <>
text " to be trusted but it isn't!"
-- | Set module to unsafe and (potentially) wipe trust information.
--
-- Make sure to call this method to set a module to inferred unsafe, it should
-- be a central and single failure method. We only wipe the trust information
-- when we aren't in a specific Safe Haskell mode.
--
-- While we only use this for recording that a module was inferred unsafe, we
-- may call it on modules using Trustworthy or Unsafe flags so as to allow
-- warning flags for safety to function correctly. See Note [Safe Haskell
-- Inference].
markUnsafeInfer :: TcGblEnv -> WarningMessages -> Hsc TcGblEnv
markUnsafeInfer tcg_env whyUnsafe = do
dflags <- getDynFlags
when (wopt Opt_WarnUnsafe dflags)
(logWarnings $ unitBag $ makeIntoWarning (Reason Opt_WarnUnsafe) $
mkPlainWarnMsg dflags (warnUnsafeOnLoc dflags) (whyUnsafe' dflags))
liftIO $ writeIORef (tcg_safeInfer tcg_env) (False, whyUnsafe)
-- NOTE: Only wipe trust when not in an explicity safe haskell mode. Other
-- times inference may be on but we are in Trustworthy mode -- so we want
-- to record safe-inference failed but not wipe the trust dependencies.
case safeHaskell dflags == Sf_None of
True -> return $ tcg_env { tcg_imports = wiped_trust }
False -> return tcg_env
where
wiped_trust = (tcg_imports tcg_env) { imp_trust_pkgs = [] }
pprMod = ppr $ moduleName $ tcg_mod tcg_env
whyUnsafe' df = vcat [ quotes pprMod <+> text "has been inferred as unsafe!"
, text "Reason:"
, nest 4 $ (vcat $ badFlags df) $+$
(vcat $ pprErrMsgBagWithLoc whyUnsafe) $+$
(vcat $ badInsts $ tcg_insts tcg_env)
]
badFlags df = concat $ map (badFlag df) unsafeFlagsForInfer
badFlag df (str,loc,on,_)
| on df = [mkLocMessage SevOutput (loc df) $
text str <+> text "is not allowed in Safe Haskell"]
| otherwise = []
badInsts insts = concat $ map badInst insts
checkOverlap (NoOverlap _) = False
checkOverlap _ = True
badInst ins | checkOverlap (overlapMode (is_flag ins))
= [mkLocMessage SevOutput (nameSrcSpan $ getName $ is_dfun ins) $
ppr (overlapMode $ is_flag ins) <+>
text "overlap mode isn't allowed in Safe Haskell"]
| otherwise = []
-- | Figure out the final correct safe haskell mode
hscGetSafeMode :: TcGblEnv -> Hsc SafeHaskellMode
hscGetSafeMode tcg_env = do
dflags <- getDynFlags
liftIO $ finalSafeMode dflags tcg_env
--------------------------------------------------------------
-- Simplifiers
--------------------------------------------------------------
hscSimplify :: HscEnv -> ModGuts -> IO ModGuts
hscSimplify hsc_env modguts = runHsc hsc_env $ hscSimplify' modguts
hscSimplify' :: ModGuts -> Hsc ModGuts
hscSimplify' ds_result = do
hsc_env <- getHscEnv
{-# SCC "Core2Core" #-}
liftIO $ core2core hsc_env ds_result
--------------------------------------------------------------
-- Interface generators
--------------------------------------------------------------
hscSimpleIface :: HscEnv
-> TcGblEnv
-> Maybe Fingerprint
-> IO (ModIface, Bool, ModDetails)
hscSimpleIface hsc_env tc_result mb_old_iface
= runHsc hsc_env $ hscSimpleIface' tc_result mb_old_iface
hscSimpleIface' :: TcGblEnv
-> Maybe Fingerprint
-> Hsc (ModIface, Bool, ModDetails)
hscSimpleIface' tc_result mb_old_iface = do
hsc_env <- getHscEnv
details <- liftIO $ mkBootModDetailsTc hsc_env tc_result
safe_mode <- hscGetSafeMode tc_result
(new_iface, no_change)
<- {-# SCC "MkFinalIface" #-}
liftIO $
mkIfaceTc hsc_env mb_old_iface safe_mode details tc_result
-- And the answer is ...
liftIO $ dumpIfaceStats hsc_env
return (new_iface, no_change, details)
hscNormalIface :: HscEnv
-> ModGuts
-> Maybe Fingerprint
-> IO (ModIface, Bool, ModDetails, CgGuts)
hscNormalIface hsc_env simpl_result mb_old_iface =
runHsc hsc_env $ hscNormalIface' simpl_result mb_old_iface
hscNormalIface' :: ModGuts
-> Maybe Fingerprint
-> Hsc (ModIface, Bool, ModDetails, CgGuts)
hscNormalIface' simpl_result mb_old_iface = do
hsc_env <- getHscEnv
(cg_guts, details) <- {-# SCC "CoreTidy" #-}
liftIO $ tidyProgram hsc_env simpl_result
-- BUILD THE NEW ModIface and ModDetails
-- and emit external core if necessary
-- This has to happen *after* code gen so that the back-end
-- info has been set. Not yet clear if it matters waiting
-- until after code output
(new_iface, no_change)
<- {-# SCC "MkFinalIface" #-}
liftIO $
mkIface hsc_env mb_old_iface details simpl_result
liftIO $ dumpIfaceStats hsc_env
-- Return the prepared code.
return (new_iface, no_change, details, cg_guts)
--------------------------------------------------------------
-- BackEnd combinators
--------------------------------------------------------------
hscWriteIface :: DynFlags -> ModIface -> Bool -> ModSummary -> IO ()
hscWriteIface dflags iface no_change mod_summary = do
let ifaceFile = ml_hi_file (ms_location mod_summary)
unless no_change $
{-# SCC "writeIface" #-}
writeIfaceFile dflags ifaceFile iface
whenGeneratingDynamicToo dflags $ do
-- TODO: We should do a no_change check for the dynamic
-- interface file too
-- TODO: Should handle the dynamic hi filename properly
let dynIfaceFile = replaceExtension ifaceFile (dynHiSuf dflags)
dynIfaceFile' = addBootSuffix_maybe (mi_boot iface) dynIfaceFile
dynDflags = dynamicTooMkDynamicDynFlags dflags
writeIfaceFile dynDflags dynIfaceFile' iface
-- | Compile to hard-code.
hscGenHardCode :: HscEnv -> CgGuts -> ModSummary -> FilePath
-> IO (FilePath, Maybe FilePath) -- ^ @Just f@ <=> _stub.c is f
hscGenHardCode hsc_env cgguts mod_summary output_filename = do
let CgGuts{ -- This is the last use of the ModGuts in a compilation.
-- From now on, we just use the bits we need.
cg_module = this_mod,
cg_binds = core_binds,
cg_tycons = tycons,
cg_foreign = foreign_stubs0,
cg_dep_pkgs = dependencies,
cg_hpc_info = hpc_info } = cgguts
dflags = hsc_dflags hsc_env
location = ms_location mod_summary
data_tycons = filter isDataTyCon tycons
-- cg_tycons includes newtypes, for the benefit of External Core,
-- but we don't generate any code for newtypes
-------------------
-- PREPARE FOR CODE GENERATION
-- Do saturation and convert to A-normal form
prepd_binds <- {-# SCC "CorePrep" #-}
corePrepPgm hsc_env this_mod location
core_binds data_tycons
----------------- Convert to STG ------------------
(stg_binds, cost_centre_info)
<- {-# SCC "CoreToStg" #-}
myCoreToStg dflags this_mod prepd_binds
let prof_init = profilingInitCode this_mod cost_centre_info
foreign_stubs = foreign_stubs0 `appendStubC` prof_init
------------------ Code generation ------------------
-- The back-end is streamed: each top-level function goes
-- from Stg all the way to asm before dealing with the next
-- top-level function, so showPass isn't very useful here.
-- Hence we have one showPass for the whole backend, the
-- next showPass after this will be "Assembler".
withTiming (pure dflags)
(text "CodeGen"<+>brackets (ppr this_mod))
(const ()) $ do
cmms <- {-# SCC "StgCmm" #-}
doCodeGen hsc_env this_mod data_tycons
cost_centre_info
stg_binds hpc_info
------------------ Code output -----------------------
rawcmms0 <- {-# SCC "cmmToRawCmm" #-}
cmmToRawCmm dflags cmms
let dump a = do dumpIfSet_dyn dflags Opt_D_dump_cmm_raw "Raw Cmm"
(ppr a)
return a
rawcmms1 = Stream.mapM dump rawcmms0
(output_filename, (_stub_h_exists, stub_c_exists))
<- {-# SCC "codeOutput" #-}
codeOutput dflags this_mod output_filename location
foreign_stubs dependencies rawcmms1
return (output_filename, stub_c_exists)
hscInteractive :: HscEnv
-> CgGuts
-> ModSummary
-> IO (Maybe FilePath, CompiledByteCode)
#ifdef GHCI
hscInteractive hsc_env cgguts mod_summary = do
let dflags = hsc_dflags hsc_env
let CgGuts{ -- This is the last use of the ModGuts in a compilation.
-- From now on, we just use the bits we need.
cg_module = this_mod,
cg_binds = core_binds,
cg_tycons = tycons,
cg_foreign = foreign_stubs,
cg_modBreaks = mod_breaks } = cgguts
location = ms_location mod_summary
data_tycons = filter isDataTyCon tycons
-- cg_tycons includes newtypes, for the benefit of External Core,
-- but we don't generate any code for newtypes
-------------------
-- PREPARE FOR CODE GENERATION
-- Do saturation and convert to A-normal form
prepd_binds <- {-# SCC "CorePrep" #-}
corePrepPgm hsc_env this_mod location core_binds data_tycons
----------------- Generate byte code ------------------
comp_bc <- byteCodeGen hsc_env this_mod prepd_binds data_tycons mod_breaks
------------------ Create f-x-dynamic C-side stuff ---
(_istub_h_exists, istub_c_exists)
<- outputForeignStubs dflags this_mod location foreign_stubs
return (istub_c_exists, comp_bc)
#else
hscInteractive _ _ = panic "GHC not compiled with interpreter"
#endif
------------------------------
hscCompileCmmFile :: HscEnv -> FilePath -> FilePath -> IO ()
hscCompileCmmFile hsc_env filename output_filename = runHsc hsc_env $ do
let dflags = hsc_dflags hsc_env
cmm <- ioMsgMaybe $ parseCmmFile dflags filename
liftIO $ do
us <- mkSplitUniqSupply 'S'
let initTopSRT = initUs_ us emptySRT
dumpIfSet_dyn dflags Opt_D_dump_cmm_verbose "Parsed Cmm" (ppr cmm)
(_, cmmgroup) <- cmmPipeline hsc_env initTopSRT cmm
rawCmms <- cmmToRawCmm dflags (Stream.yield cmmgroup)
_ <- codeOutput dflags no_mod output_filename no_loc NoStubs [] rawCmms
return ()
where
no_mod = panic "hscCompileCmmFile: no_mod"
no_loc = ModLocation{ ml_hs_file = Just filename,
ml_hi_file = panic "hscCompileCmmFile: no hi file",
ml_obj_file = panic "hscCompileCmmFile: no obj file" }
-------------------- Stuff for new code gen ---------------------
doCodeGen :: HscEnv -> Module -> [TyCon]
-> CollectedCCs
-> [StgBinding]
-> HpcInfo
-> IO (Stream IO CmmGroup ())
-- Note we produce a 'Stream' of CmmGroups, so that the
-- backend can be run incrementally. Otherwise it generates all
-- the C-- up front, which has a significant space cost.
doCodeGen hsc_env this_mod data_tycons
cost_centre_info stg_binds hpc_info = do
let dflags = hsc_dflags hsc_env
let cmm_stream :: Stream IO CmmGroup ()
cmm_stream = {-# SCC "StgCmm" #-}
StgCmm.codeGen dflags this_mod data_tycons
cost_centre_info stg_binds hpc_info
-- codegen consumes a stream of CmmGroup, and produces a new
-- stream of CmmGroup (not necessarily synchronised: one
-- CmmGroup on input may produce many CmmGroups on output due
-- to proc-point splitting).
let dump1 a = do dumpIfSet_dyn dflags Opt_D_dump_cmm_from_stg
"Cmm produced by codegen" (ppr a)
return a
ppr_stream1 = Stream.mapM dump1 cmm_stream
-- We are building a single SRT for the entire module, so
-- we must thread it through all the procedures as we cps-convert them.
us <- mkSplitUniqSupply 'S'
-- When splitting, we generate one SRT per split chunk, otherwise
-- we generate one SRT for the whole module.
let
pipeline_stream
| gopt Opt_SplitObjs dflags || gopt Opt_SplitSections dflags
= {-# SCC "cmmPipeline" #-}
let run_pipeline us cmmgroup = do
let (topSRT', us') = initUs us emptySRT
(topSRT, cmmgroup) <- cmmPipeline hsc_env topSRT' cmmgroup
let srt | isEmptySRT topSRT = []
| otherwise = srtToData topSRT
return (us', srt ++ cmmgroup)
in do _ <- Stream.mapAccumL run_pipeline us ppr_stream1
return ()
| otherwise
= {-# SCC "cmmPipeline" #-}
let initTopSRT = initUs_ us emptySRT
run_pipeline = cmmPipeline hsc_env
in do topSRT <- Stream.mapAccumL run_pipeline initTopSRT ppr_stream1
Stream.yield (srtToData topSRT)
let
dump2 a = do dumpIfSet_dyn dflags Opt_D_dump_cmm
"Output Cmm" (ppr a)
return a
ppr_stream2 = Stream.mapM dump2 pipeline_stream
return ppr_stream2
myCoreToStg :: DynFlags -> Module -> CoreProgram
-> IO ( [StgBinding] -- output program
, CollectedCCs) -- cost centre info (declared and used)
myCoreToStg dflags this_mod prepd_binds = do
let stg_binds
= {-# SCC "Core2Stg" #-}
coreToStg dflags this_mod prepd_binds
(stg_binds2, cost_centre_info)
<- {-# SCC "Stg2Stg" #-}
stg2stg dflags this_mod stg_binds
return (stg_binds2, cost_centre_info)
{- **********************************************************************
%* *
\subsection{Compiling a do-statement}
%* *
%********************************************************************* -}
{-
When the UnlinkedBCOExpr is linked you get an HValue of type *IO [HValue]* When
you run it you get a list of HValues that should be the same length as the list
of names; add them to the ClosureEnv.
A naked expression returns a singleton Name [it]. The stmt is lifted into the
IO monad as explained in Note [Interactively-bound Ids in GHCi] in HscTypes
-}
#ifdef GHCI
-- | Compile a stmt all the way to an HValue, but don't run it
--
-- We return Nothing to indicate an empty statement (or comment only), not a
-- parse error.
hscStmt :: HscEnv -> String -> IO (Maybe ([Id], ForeignHValue, FixityEnv))
hscStmt hsc_env stmt = hscStmtWithLocation hsc_env stmt "<interactive>" 1
-- | Compile a stmt all the way to an HValue, but don't run it
--
-- We return Nothing to indicate an empty statement (or comment only), not a
-- parse error.
hscStmtWithLocation :: HscEnv
-> String -- ^ The statement
-> String -- ^ The source
-> Int -- ^ Starting line
-> IO ( Maybe ([Id]
, ForeignHValue {- IO [HValue] -}
, FixityEnv))
hscStmtWithLocation hsc_env0 stmt source linenumber =
runInteractiveHsc hsc_env0 $ do
maybe_stmt <- hscParseStmtWithLocation source linenumber stmt
case maybe_stmt of
Nothing -> return Nothing
Just parsed_stmt -> do
hsc_env <- getHscEnv
liftIO $ hscParsedStmt hsc_env parsed_stmt
hscParsedStmt :: HscEnv
-> GhciLStmt RdrName -- ^ The parsed statement
-> IO ( Maybe ([Id]
, ForeignHValue {- IO [HValue] -}
, FixityEnv))
hscParsedStmt hsc_env stmt = runInteractiveHsc hsc_env $ do
-- Rename and typecheck it
(ids, tc_expr, fix_env) <- ioMsgMaybe $ tcRnStmt hsc_env stmt
-- Desugar it
ds_expr <- ioMsgMaybe $ deSugarExpr hsc_env tc_expr
liftIO (lintInteractiveExpr "desugar expression" hsc_env ds_expr)
handleWarnings
-- Then code-gen, and link it
-- It's important NOT to have package 'interactive' as thisUnitId
-- for linking, else we try to link 'main' and can't find it.
-- Whereas the linker already knows to ignore 'interactive'
let src_span = srcLocSpan interactiveSrcLoc
hval <- liftIO $ hscCompileCoreExpr hsc_env src_span ds_expr
return $ Just (ids, hval, fix_env)
-- | Compile a decls
hscDecls :: HscEnv
-> String -- ^ The statement
-> IO ([TyThing], InteractiveContext)
hscDecls hsc_env str = hscDeclsWithLocation hsc_env str "<interactive>" 1
-- | Compile a decls
hscDeclsWithLocation :: HscEnv
-> String -- ^ The statement
-> String -- ^ The source
-> Int -- ^ Starting line
-> IO ([TyThing], InteractiveContext)
hscDeclsWithLocation hsc_env0 str source linenumber =
runInteractiveHsc hsc_env0 $ do
L _ (HsModule{ hsmodDecls = decls }) <-
hscParseThingWithLocation source linenumber parseModule str
{- Rename and typecheck it -}
hsc_env <- getHscEnv
tc_gblenv <- ioMsgMaybe $ tcRnDeclsi hsc_env decls
{- Grab the new instances -}
-- We grab the whole environment because of the overlapping that may have
-- been done. See the notes at the definition of InteractiveContext
-- (ic_instances) for more details.
let defaults = tcg_default tc_gblenv
{- Desugar it -}
-- We use a basically null location for iNTERACTIVE
let iNTERACTIVELoc = ModLocation{ ml_hs_file = Nothing,
ml_hi_file = panic "hsDeclsWithLocation:ml_hi_file",
ml_obj_file = panic "hsDeclsWithLocation:ml_hi_file"}
ds_result <- hscDesugar' iNTERACTIVELoc tc_gblenv
{- Simplify -}
simpl_mg <- liftIO $ hscSimplify hsc_env ds_result
{- Tidy -}
(tidy_cg, mod_details) <- liftIO $ tidyProgram hsc_env simpl_mg
let !CgGuts{ cg_module = this_mod,
cg_binds = core_binds,
cg_tycons = tycons,
cg_modBreaks = mod_breaks } = tidy_cg
!ModDetails { md_insts = cls_insts
, md_fam_insts = fam_insts } = mod_details
-- Get the *tidied* cls_insts and fam_insts
data_tycons = filter isDataTyCon tycons
{- Prepare For Code Generation -}
-- Do saturation and convert to A-normal form
prepd_binds <- {-# SCC "CorePrep" #-}
liftIO $ corePrepPgm hsc_env this_mod iNTERACTIVELoc core_binds data_tycons
{- Generate byte code -}
cbc <- liftIO $ byteCodeGen hsc_env this_mod
prepd_binds data_tycons mod_breaks
let src_span = srcLocSpan interactiveSrcLoc
liftIO $ linkDecls hsc_env src_span cbc
let tcs = filterOut isImplicitTyCon (mg_tcs simpl_mg)
patsyns = mg_patsyns simpl_mg
ext_ids = [ id | id <- bindersOfBinds core_binds
, isExternalName (idName id)
, not (isDFunId id || isImplicitId id) ]
-- We only need to keep around the external bindings
-- (as decided by TidyPgm), since those are the only ones
-- that might later be looked up by name. But we can exclude
-- - DFunIds, which are in 'cls_insts' (see Note [ic_tythings] in HscTypes
-- - Implicit Ids, which are implicit in tcs
-- c.f. TcRnDriver.runTcInteractive, which reconstructs the TypeEnv
new_tythings = map AnId ext_ids ++ map ATyCon tcs ++ map (AConLike . PatSynCon) patsyns
ictxt = hsc_IC hsc_env
-- See Note [Fixity declarations in GHCi]
fix_env = tcg_fix_env tc_gblenv
new_ictxt = extendInteractiveContext ictxt new_tythings cls_insts
fam_insts defaults fix_env
return (new_tythings, new_ictxt)
{-
Note [Fixity declarations in GHCi]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To support fixity declarations on types defined within GHCi (as requested
in #10018) we record the fixity environment in InteractiveContext.
When we want to evaluate something TcRnDriver.runTcInteractive pulls out this
fixity environment and uses it to initialize the global typechecker environment.
After the typechecker has finished its business, an updated fixity environment
(reflecting whatever fixity declarations were present in the statements we
passed it) will be returned from hscParsedStmt. This is passed to
updateFixityEnv, which will stuff it back into InteractiveContext, to be
used in evaluating the next statement.
-}
hscImport :: HscEnv -> String -> IO (ImportDecl RdrName)
hscImport hsc_env str = runInteractiveHsc hsc_env $ do
(L _ (HsModule{hsmodImports=is})) <-
hscParseThing parseModule str
case is of
[L _ i] -> return i
_ -> liftIO $ throwOneError $
mkPlainErrMsg (hsc_dflags hsc_env) noSrcSpan $
text "parse error in import declaration"
-- | Typecheck an expression (but don't run it)
hscTcExpr :: HscEnv
-> TcRnExprMode
-> String -- ^ The expression
-> IO Type
hscTcExpr hsc_env0 mode expr = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
parsed_expr <- hscParseExpr expr
ioMsgMaybe $ tcRnExpr hsc_env mode parsed_expr
-- | Find the kind of a type
-- Currently this does *not* generalise the kinds of the type
hscKcType
:: HscEnv
-> Bool -- ^ Normalise the type
-> String -- ^ The type as a string
-> IO (Type, Kind) -- ^ Resulting type (possibly normalised) and kind
hscKcType hsc_env0 normalise str = runInteractiveHsc hsc_env0 $ do
hsc_env <- getHscEnv
ty <- hscParseType str
ioMsgMaybe $ tcRnType hsc_env normalise ty
hscParseExpr :: String -> Hsc (LHsExpr RdrName)
hscParseExpr expr = do
hsc_env <- getHscEnv
maybe_stmt <- hscParseStmt expr
case maybe_stmt of
Just (L _ (BodyStmt expr _ _ _)) -> return expr
_ -> throwErrors $ unitBag $ mkPlainErrMsg (hsc_dflags hsc_env) noSrcSpan
(text "not an expression:" <+> quotes (text expr))
hscParseStmt :: String -> Hsc (Maybe (GhciLStmt RdrName))
hscParseStmt = hscParseThing parseStmt
hscParseStmtWithLocation :: String -> Int -> String
-> Hsc (Maybe (GhciLStmt RdrName))
hscParseStmtWithLocation source linenumber stmt =
hscParseThingWithLocation source linenumber parseStmt stmt
hscParseType :: String -> Hsc (LHsType RdrName)
hscParseType = hscParseThing parseType
#endif
hscParseIdentifier :: HscEnv -> String -> IO (Located RdrName)
hscParseIdentifier hsc_env str =
runInteractiveHsc hsc_env $ hscParseThing parseIdentifier str
hscParseThing :: (Outputable thing) => Lexer.P thing -> String -> Hsc thing
hscParseThing = hscParseThingWithLocation "<interactive>" 1
hscParseThingWithLocation :: (Outputable thing) => String -> Int
-> Lexer.P thing -> String -> Hsc thing
hscParseThingWithLocation source linenumber parser str
= withTiming getDynFlags
(text "Parser [source]")
(const ()) $ {-# SCC "Parser" #-} do
dflags <- getDynFlags
let buf = stringToStringBuffer str
loc = mkRealSrcLoc (fsLit source) linenumber 1
case unP parser (mkPState dflags buf loc) of
PFailed span err -> do
let msg = mkPlainErrMsg dflags span err
throwErrors $ unitBag msg
POk pst thing -> do
logWarningsReportErrors (getMessages pst dflags)
liftIO $ dumpIfSet_dyn dflags Opt_D_dump_parsed "Parser" (ppr thing)
return thing
{- **********************************************************************
%* *
Desugar, simplify, convert to bytecode, and link an expression
%* *
%********************************************************************* -}
#ifdef GHCI
hscCompileCoreExpr :: HscEnv -> SrcSpan -> CoreExpr -> IO ForeignHValue
hscCompileCoreExpr hsc_env =
lookupHook hscCompileCoreExprHook hscCompileCoreExpr' (hsc_dflags hsc_env) hsc_env
hscCompileCoreExpr' :: HscEnv -> SrcSpan -> CoreExpr -> IO ForeignHValue
hscCompileCoreExpr' hsc_env srcspan ds_expr
= do { let dflags = hsc_dflags hsc_env
{- Simplify it -}
; simpl_expr <- simplifyExpr dflags ds_expr
{- Tidy it (temporary, until coreSat does cloning) -}
; let tidy_expr = tidyExpr emptyTidyEnv simpl_expr
{- Prepare for codegen -}
; prepd_expr <- corePrepExpr dflags hsc_env tidy_expr
{- Lint if necessary -}
; lintInteractiveExpr "hscCompileExpr" hsc_env prepd_expr
{- Convert to BCOs -}
; bcos <- coreExprToBCOs hsc_env
(icInteractiveModule (hsc_IC hsc_env)) prepd_expr
{- link it -}
; hval <- linkExpr hsc_env srcspan bcos
; return hval }
#endif
{- **********************************************************************
%* *
Statistics on reading interfaces
%* *
%********************************************************************* -}
dumpIfaceStats :: HscEnv -> IO ()
dumpIfaceStats hsc_env = do
eps <- readIORef (hsc_EPS hsc_env)
dumpIfSet dflags (dump_if_trace || dump_rn_stats)
"Interface statistics"
(ifaceStats eps)
where
dflags = hsc_dflags hsc_env
dump_rn_stats = dopt Opt_D_dump_rn_stats dflags
dump_if_trace = dopt Opt_D_dump_if_trace dflags
{- **********************************************************************
%* *
Progress Messages: Module i of n
%* *
%********************************************************************* -}
showModuleIndex :: (Int, Int) -> String
showModuleIndex (i,n) = "[" ++ padded ++ " of " ++ n_str ++ "] "
where
n_str = show n
i_str = show i
padded = replicate (length n_str - length i_str) ' ' ++ i_str
| sgillespie/ghc | compiler/main/HscMain.hs | bsd-3-clause | 71,286 | 0 | 26 | 21,107 | 11,833 | 6,046 | 5,787 | 827 | 9 |
module PythagTriples
( printTriples
, pythagTriplesOrdered1
, pythagTriplesOrdered2
, pythagTriplesFast
, showTriple
, sortTriple
, Triple
) where
import Data.List (intercalate)
import Data.List.Split (splitOn)
type Triple = (Int, Int, Int)
sortTriple :: Triple -> Triple
sortTriple triple =
case triple of
(a, b, c) | b < a -> (b, a, c)
_ -> triple
printTriples :: [Triple] -> IO()
printTriples triples = mapM_ putStrLn (map showTriple triples)
-- | Standard algorithm to generate Pythagorean Triples
pythagTriplesFast :: [Triple]
pythagTriplesFast = [ (a, 2*m*n, c) |
m <- [2 ..]
, let nstart = m `mod` 2 + 1
, n <- [nstart, nstart+2 .. m-1]
, let a = m*m - n*n
, let c = m*m + n*n
, gcd a c == 1 ]
-- | Generate ordered Pythagorean Triples lexiconically ordered
pythagTriplesOrdered1:: [Triple]
pythagTriplesOrdered1 = [ (a, b, c) |
a <- [3 .. ]
, b <- [a+1, a+3 .. ((a*a - 1) `div` 2)]
, gcd b a == 1
, let csqr = a*a + b*b
, isPerfectSquare csqr
, let c = floorSqrt csqr ]
-- | Generate ordered Pythagorean Triples lexiconically ordered
pythagTriplesOrdered2 :: [Triple]
pythagTriplesOrdered2 = [ (a, b, c) |
b <- [4 .. ]
, a <- [(floorSqrt $ 2*b + 1) .. b - 1]
, gcd b a == 1
, let csqr = a*a + b*b
, isPerfectSquare csqr
, let c = floorSqrt csqr ]
-- Utility functions
floorSqrt :: Int -> Int
floorSqrt = floor.sqrt.fromIntegral
isPerfectSquare :: Int -> Bool
isPerfectSquare = \n ->
let m = floorSqrt n
in n == m * m
-- | Print out Pythagorean Triples with space
-- | after comma, like how Python prints tuples.
showTriple :: Triple -> String
showTriple = (intercalate ", ").(splitOn ",").show
| grscheller/scheller-linux-archive | grok/Haskell/pythag-triples/src/PythagTriples.hs | bsd-3-clause | 1,730 | 1 | 13 | 428 | 668 | 369 | 299 | 50 | 2 |
{-# LANGUAGE CPP, DefaultSignatures, TypeFamilies #-}
module ST (MonadST (..)) where
import Control.Applicative
import Control.Monad.Reader
import Control.Monad.ST.Safe
import Control.Monad.State.Strict
class (Applicative m, Monad m) => MonadST m where
type World m
liftST :: ST (World m) a -> m a
#ifndef HLINT
default liftST :: (MonadTrans t, MonadST m) => ST (World m) a -> t m a
liftST = lift . liftST
#endif
instance MonadST (ST s) where
type World (ST s) = s
liftST = id
instance MonadST IO where
type World IO = RealWorld
liftST = stToIO
instance MonadST m => MonadST (ReaderT r m) where
type World (ReaderT r m) = World m
instance MonadST m => MonadST (StateT s m) where
type World (StateT s m) = World m
| sonyandy/mlf | src/ST.hs | bsd-3-clause | 739 | 0 | 12 | 150 | 285 | 153 | 132 | 21 | 0 |
{-# LANGUAGE CPP #-}
#define LAZY Strict
#define STRICT Lazy
#include "enumfun.inc"
| liyang/enumfun | Data/EnumFun/Strict.hs | bsd-3-clause | 84 | 0 | 2 | 12 | 6 | 5 | 1 | 1 | 0 |
-- | The type of definitions of screen layout and features.
module Game.LambdaHack.Client.UI.Content.Screen
( ScreenContent(..), emptyScreenContent, makeData
#ifdef EXPOSE_INTERNAL
-- * Internal operations
, emptyScreenContentRaw, validateSingle
#endif
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Data.ByteString as BS
import qualified Data.EnumMap.Strict as EM
import qualified Data.Text as T
import Game.LambdaHack.Content.ItemKind (ItemKind)
import qualified Game.LambdaHack.Content.RuleKind as RK
import Game.LambdaHack.Definition.Defs
-- | Screen layout and features definition.
--
-- Warning: this type is not abstract, but its values should not be
-- created ad hoc, even for unit tests, but should be constructed
-- with @makeData@, which includes validation,
--
-- The @emptyScreenContent@ is one such valid by construction value
-- of this type. It's suitable for bootstrapping and for testing.
data ScreenContent = ScreenContent
{ rwidth :: X -- ^ screen width
, rheight :: Y -- ^ screen height
, rwebAddress :: String -- ^ an extra blurb line for the main menu
, rintroScreen :: ([String], [[String]])
-- ^ the intro screen (first help screen) text
-- and the rest of the manual
, rapplyVerbMap :: EM.EnumMap (ContentSymbol ItemKind) T.Text
-- ^ verbs to use for apply actions
, rFontFiles :: [(FilePath, BS.ByteString)]
-- ^ embedded game-supplied font files
}
emptyScreenContentRaw :: ScreenContent
emptyScreenContentRaw = ScreenContent { rwidth = 5
, rheight = 5
, rwebAddress = ""
, rintroScreen = ([], [])
, rapplyVerbMap = EM.empty
, rFontFiles = []
}
emptyScreenContent :: ScreenContent
emptyScreenContent =
assert (null $ validateSingle RK.emptyRuleContent emptyScreenContentRaw)
emptyScreenContentRaw
-- | Catch invalid rule kind definitions.
validateSingle :: RK.RuleContent -> ScreenContent -> [Text]
validateSingle corule ScreenContent{..} =
(let tsGt80 = filter ((> 80) . T.length) $ map T.pack [rwebAddress]
in case tsGt80 of
[] -> []
tGt80 : _ -> ["rwebAddress's length is over 80:" <> tGt80])
++ (let tsGt41 = filter ((> 41) . T.length) $ map T.pack $ fst rintroScreen
in case tsGt41 of
[] -> []
tGt41 : _ -> ["intro screen has a line with length over 41:" <> tGt41])
++ (let tsGt80 = filter ((> 80) . T.length) $ map T.pack $ intercalate [""]
$ snd rintroScreen
in case tsGt80 of
[] -> []
tGt80 : _ -> ["manual has a line with length over 80:" <> tGt80])
-- The following reflect the only current UI implementation.
++ [ "rwidth /= RK.rWidthMax" | rwidth /= RK.rWidthMax corule ]
++ [ "rheight /= RK.rHeightMax + 3" | rheight /= RK.rHeightMax corule + 3]
makeData :: RK.RuleContent -> ScreenContent -> ScreenContent
makeData corule sc =
let singleOffenders = validateSingle corule sc
in assert (null singleOffenders
`blame` "Screen Content not valid"
`swith` singleOffenders)
sc
| LambdaHack/LambdaHack | engine-src/Game/LambdaHack/Client/UI/Content/Screen.hs | bsd-3-clause | 3,428 | 0 | 19 | 1,018 | 685 | 398 | 287 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Monad ( when )
import Data.Foldable ( forM_ )
import Network.URI ( parseRelativeReference )
import Prelude hiding ( mapM_ )
import System.Environment ( getArgs )
import System.Exit ( exitFailure, exitSuccess )
import DocReview.App ( runServer )
import Config ( parseArgs, unUsage, Usage, Action(..) )
import Analyze ( analyze )
import State.Types ( State, ChapterId, CommentId, State
, addChapter
)
import DocReview.Scan ( showAnalysis )
import qualified Report as Report
import qualified Config.Command.Run as Run
import qualified Config.Command.Scan as Scan
import qualified State.Logger as L
showUsage :: Usage -> IO ()
showUsage = putStr . unUsage 78
main :: IO ()
main = do
args <- getArgs
case parseArgs args of
Left usg ->
do showUsage usg
exitFailure
Right (Help usg) ->
do showUsage usg
exitSuccess
Right (Report cfg) ->
putStr . Report.genReport cfg =<< Report.analyzeFiles cfg
Right (RunServer cfg) ->
do st <- maybe return L.wrap (Run.cfgLogTo cfg) =<< Run.cfgStore cfg
-- Scan the content directory (unless requested not to)
when (Run.cfgScanOnStart cfg) $
do chapters <- analyze $ Run.cfgContentDir cfg
storeChapters chapters st
runServer cfg st
Right (Scan cfg) ->
do chapters <- analyze $ Scan.contentDir cfg
case Scan.store cfg of
-- If a store was specified, scan the directory and store
-- the results in the store
Just mk -> storeChapters chapters =<< mk
-- If no store was specified, scan the directory and dump
-- an analysis of the scan results to the console (check
-- for duplicate comment ids)
Nothing -> putStr $ unlines $ showAnalysis chapters
storeChapters :: [(String, [(Maybe ChapterId, [CommentId])])]
-> State -> IO ()
storeChapters files st = do
forM_ files $ \(fn, chapters) -> do
let uri = parseRelativeReference fn
forM_ chapters $ \(mChId, cIds) ->
maybe (return ()) (\chId -> addChapter st chId cIds uri) mChId
| j3h/doc-review | src/Main.hs | bsd-3-clause | 2,494 | 0 | 18 | 880 | 621 | 327 | 294 | 50 | 6 |
{-# LANGUAGE TemplateHaskell #-}
module Network.AuthorizeNet.TH (
module Network.AuthorizeNet.TH,
apply,
parseSchemaType,
schemaTypeToXML
) where
import Network.AuthorizeNet.Types
import Control.Monad
import GHC.Exts
import Language.Haskell.TH
import Language.Haskell.TH.Lift
import Language.Haskell.TH.Syntax
import Text.XML.HaXml hiding (Name, element, literal, x)
import Text.XML.HaXml.Schema.Schema
import System.IO
import Data.Char
import Data.List.Split as L (splitOn)
import qualified Data.Text as T
data Options = Options {
fieldLabelModifier :: String -> String,
constructorTagModifier :: String -> String,
typeTagModifier :: String -> String,
allNullaryToStringTag :: Bool,
namespaceLevel :: XmlNamespaceLevel
}
$(deriveLift ''XmlNamespaceLevel)
dropUntilUnderscore :: String -> String
dropUntilUnderscore name =
case dropWhile (/= '_') name of
[] -> error $ "Network.AuthorizeNet.TH.dropUntilUnderscore: When processing '" ++ name ++ "': No underscore in name or no text after underscore"
xs -> tail xs
lowerFirst :: String -> String
lowerFirst [] = []
lowerFirst (x:xs) = (toLower x):xs
dropHaskellModuleNames :: String -> String
dropHaskellModuleNames xs = last $ L.splitOn "." xs
defaultOptions :: Options
defaultOptions = Options {
fieldLabelModifier = dropUntilUnderscore,
constructorTagModifier = dropUntilUnderscore,
typeTagModifier = lowerFirst . dropHaskellModuleNames,
allNullaryToStringTag = True,
namespaceLevel = Namespace_xsd
}
-- | Drops everything up to and including the first underscore, so 'recordType_fieldOne' becomes 'fieldOne'
dropRecordName :: Options
dropRecordName = defaultOptions
choiceType :: Options
choiceType = defaultOptions
enumType :: Options
enumType = defaultOptions
requestOptions :: Options
requestOptions = choiceType {
constructorTagModifier = \(x:xs) -> (toLower x : xs) ++ "Request"
}
withType :: Name -> (Name -> [TyVarBndr] -> [Con] -> Q a) -> Q a
withType name f = do
let ns = "Network.AuthorizeNet.TH.withType: "
info <- reify name
case info of
TyConI dec ->
case dec of
DataD _ _ tvbs cons _ -> f name tvbs cons
NewtypeD _ _ tvbs con _ -> f name tvbs [con]
other -> error $ ns ++ "Unsupported type: " ++ show other
_ -> error $ ns ++ "Data constructor " ++ show name ++ " is not from a data or newtype constructor"
data SpecialType = SMaybe | SList | SNone deriving (Show)
specialType :: Type -> SpecialType
specialType (AppT (ConT m) x) | m == ''Maybe = SMaybe
specialType (AppT (ConT m) x) | m == ''ArrayOf = SList
specialType (AppT ListT x) = SList
specialType _ = SNone
type XmlName = String
type ConName = String
deriveIsList :: Name -> DecsQ
deriveIsList name = do
wrappedCon <- getWrappedTypeCon name
let wrappedConName = conName wrappedCon
let outerType = conT name
outerPattern = conP wrappedConName [varP $ mkName "x"]
innerType = return $ unarrayT $ conType wrappedCon
[d|
instance IsList $(outerType) where
type Item $(outerType) = $(innerType)
fromList xs = $(appE (conE wrappedConName) $ appE (conE 'ArrayOf) $ dyn "xs")
toList $(outerPattern) = case $(dyn "x") of
ArrayOf xs -> xs
|]
-- | Fills out Restricts, SchemaType, and SimpleType for a simple NewType wrapper
deriveXmlNewtype :: Options -> Name -> DecsQ
deriveXmlNewtype opts name = do
wrappedConName <- conName <$> getWrappedTypeCon name
wrappedTypeName <- getWrappedTypeName name
let outerType = conT name
innerType = conT wrappedTypeName
vX = varE $ mkName "x"
outerPattern = conP wrappedConName [varP $ mkName "x"]
outerCon = conE wrappedConName
restrictsDec = [d|
instance Restricts $(outerType) $(innerType) where
restricts $(outerPattern) = $(vX)
|]
schemaTypeDec = [d|
instance SchemaType $(outerType) where
parseSchemaType s = do
e <- element [s]
commit $ interior e $ parseSimpleType
schemaTypeToXML s $(outerPattern) =
toXMLElement s [] [toXMLText (simpleTypeText $(vX))]
|]
simpleTypeDec = [d|
instance SimpleType $(outerType) where
acceptingParser = fmap $(outerCon) acceptingParser
simpleTypeText $(outerPattern) = simpleTypeText $(vX)
|]
let standardDecs = [restrictsDec, schemaTypeDec, simpleTypeDec]
decs = case specialType $ ConT wrappedTypeName of
SList -> deriveIsList name : standardDecs
_ -> standardDecs
concat <$> sequence decs
joinExprs :: [ExpQ] -> ExpQ -> ExpQ
joinExprs leaves joiner = do
j <- joiner
(x:xs) <- sequence leaves
return $ foldl (\p a -> InfixE (Just p) j (Just a)) x xs
-- | data X = A | B | C is just a simple enum, so it gets treated as strings
deriveXmlEnum :: Options -> Name -> DecsQ
deriveXmlEnum opts name = withType name $ \name tvbs cons -> do
let doExpr :: (XmlName, ConName) -> Q Exp
doExpr (xmlName, conName) = [| do literal $(return $ LitE $ StringL xmlName) ; return $(return $ ConE $ mkName conName) |]
conInfo :: Con -> (XmlName, ConName)
conInfo con = let cn = showName $ conName con in (constructorTagModifier opts cn, cn)
nameInfos :: [(XmlName, ConName)]
nameInfos = map conInfo cons
acceptingParserBody :: ExpQ
acceptingParserBody = joinExprs (map doExpr nameInfos) (return $ VarE $ mkName "onFail")
nameInfoClause :: (XmlName, ConName) -> Clause
nameInfoClause (xmlName, conName) = Clause [ConP (mkName conName) []] (NormalB $ LitE $ StringL xmlName) []
simpleTypeTextDec :: Dec
simpleTypeTextDec = FunD (mkName "simpleTypeText") (map nameInfoClause nameInfos)
simpleTypeInstanceDec :: Q Dec
simpleTypeInstanceDec = do
acceptingParserB <- acceptingParserBody
let acceptingParserDec = FunD (mkName "acceptingParser") [Clause [] (NormalB $ acceptingParserB) []]
return $ InstanceD [] (AppT (ConT ''SimpleType) $ ConT name) [
acceptingParserDec,
simpleTypeTextDec
]
schemaTypeInstanceDec = [d|
instance SchemaType $(return $ ConT name) where
parseSchemaType s = do
e <- element [s]
commit $ interior e $ parseSimpleType
schemaTypeToXML s x =
toXMLElement s [] [toXMLText (simpleTypeText x)]
|]
(++) <$> schemaTypeInstanceDec <*> (pure <$> simpleTypeInstanceDec)
deriveXmlChoice :: Options -> Name -> DecsQ
deriveXmlChoice opts name = withType name $ \name tvbs cons -> do
let xmlName con = litE $ stringL $ constructorTagModifier opts $ showName $ conName con
let caseTuple con =
let xmlN = xmlName con
parser = [| fmap $(conE $ conName con) (parseSchemaType $(xmlN)) |]
in tupE [ xmlN, parser ]
cases = listE $ map caseTuple cons
splitX = caseE (dyn "x") $ flip map cons $ \con ->
let xmlN = xmlName con
in match (conP (conName con) [varP $ mkName "y"]) (normalB $ appsE [dyn "toXMLElement", dyn "s", listE [], listE [appsE [dyn "schemaTypeToXML", xmlN, dyn "y"]]]) []
parseSchemaTypeDec = [d|
instance SchemaType $(return $ ConT name) where
parseSchemaType s = do
(pos,e) <- posnElement [s]
commit $ interior e $ oneOf' $(cases)
schemaTypeToXML s x = $(splitX)
|]
parseSchemaTypeDec
deriveXmlObject :: Options -> Name -> DecsQ
deriveXmlObject opts name = withType name $ \name tvbs cons -> do
let ns = "Network.AuthorizeNet.TH.deriveXmlObject: Type - " ++ showName name ++ ": "
let context = []
ty = AppT (ConT ''SchemaType) (ConT name)
con = case cons of
[con] -> con
_ -> error $ ns ++ "Expected exactly one constructor on type " ++ showName name
conN = conName con
xV = varE $ mkName "x"
sV = varE $ mkName "s"
let parseOneField :: VarStrictType -> Q Exp
parseOneField (fieldNameRaw, _, ty) = do
let fieldName = showName fieldNameRaw :: String
xmlName = fieldLabelModifier opts fieldName
let parseExpr = [| parseSchemaType $(litE $ stringL xmlName) |]
case specialType ty of
SMaybe -> [| optional $(parseExpr) |]
SList -> parseExpr
SNone -> parseExpr
parseConstructor :: Con -> ExpQ
parseConstructor (RecC name vsts) =
let joinExpr = varE $ mkName "apply"
exprs = [appE (varE $ mkName "return") (conE name) ] ++ map parseOneField vsts
in joinExprs exprs joinExpr
parseConstructor _ = error $ ns ++ "Unsupported constructor for type"
decParseSchemaType :: DecsQ
decParseSchemaType = do
body <- [| do
(pos,e) <- posnElement [$(sV)]
commit $ interior e $ $(parseConstructor con)
|]
return $ pure $ FunD (mkName "parseSchemaType") [Clause [VarP $ mkName "s"] (NormalB body) []]
toXmlOneField :: VarStrictType -> ExpQ
toXmlOneField (fieldName, _, ty) =
let xmlName = fieldLabelModifier opts $ showName fieldName
sttxE = [| schemaTypeToXML $(litE $ stringL xmlName) |]
in case specialType ty of
SMaybe -> [| maybe [] $(sttxE) $ $(appE (varE fieldName) xV) |]
SList -> [| $(sttxE) $ $(appE (varE fieldName) xV) |]
SNone -> [| $(sttxE) $ $(appE (varE fieldName) xV) |]
decSchemaTypeToXml :: DecsQ
decSchemaTypeToXml =
let vsts = case con of
NormalC{} -> error $ ns ++ "You must use record syntax when automatically deriving SchemaType instances"
RecC _ vsts -> vsts
_ -> error $ ns ++ "Unsupported constructor for type"
in do
let exps = map toXmlOneField vsts
body <- [| toXMLElement $(varE $ mkName "s") [] $(listE exps) |]
let clause = Clause [VarP $ mkName "s", AsP (mkName "x") $ RecP conN []] (NormalB body) []
return $ pure $ FunD (mkName "schemaTypeToXML") [clause]
decs <- concat <$> sequence [decParseSchemaType, decSchemaTypeToXml]
-- If there is exactly one record and its a list, automatically derive IsLIst
let decIsList :: DecsQ
decIsList =
case con of
RecC _ [(fieldName, _, ty)] ->
case specialType ty of
SList -> deriveIsList name
_ -> pure []
_ -> pure []
(++) <$> return [ InstanceD context ty decs ] <*> decIsList
conName :: Con -> Name
conName con = case con of
NormalC x _ -> x
RecC x _ -> x
_ -> error $ "Network.AuthorizeNet.TH.conName: Unsupported constructor type" ++ show con
unarrayT :: Type -> Type
unarrayT ty =
let ns = "Network.AuthorizeNet.TH.unarrayT: "
in case ty of
(AppT outerTy innerTy) | outerTy == ConT ''ArrayOf -> innerTy
_ -> error $ ns ++ "Unexpected outer pattern " ++ show ty
conType :: Con -> Type
conType con =
let ns = "Network.AuthorizeNet.TH.conType: "
in case con of
NormalC _ [(_,x)] -> x
RecC _ [(_,_,x)] -> x
NormalC name xs -> error $ ns ++ "Expected exactly one field for constructor " ++ showName name
RecC name xs -> error $ ns ++ "Expected exactly one field for constructor " ++ showName name
_ -> error $ ns ++ "Unsupported constructor type" ++ show con
isAllNullary :: [Con] -> Bool
isAllNullary cons = flip all cons $ \con ->
case con of
NormalC _ [] -> True
NormalC _ _ -> False
RecC _ [] -> True
RecC _ _ -> False
_ -> error $ "Network.AuthorizeNet.TH.isAllNullary: Unsupported constructor type " ++ show cons
getWrappedTypeCon :: Name -> Q Con
getWrappedTypeCon name = do
let ns = "Network.AuthorizeNet.TH.getWrappedTypeName: Name was " ++ showName name ++ ": "
info <- reify name
case info of
TyConI dec ->
case dec of
NewtypeD _ _ _ con _ -> return con
DataD _ _ _ [con] _ -> return con
_ -> error $ ns ++ "Unexpected declaration"
getWrappedTypeName :: Name -> Q Name
getWrappedTypeName name = do
let ns = "Network.AuthorizeNet.TH.getWrappedTypeName: Name was " ++ showName name ++ ": "
con <- getWrappedTypeCon name
let ty = case con of
RecC _ [(_,_,ty)] -> ty
NormalC _ [(_, ty)] -> ty
_ -> error $ ns ++ "Unexpected constructor"
case ty of
ConT x -> return x
_ -> error $ ns ++ "Unexpected type"
deriveXmlParsable :: Options -> Name -> Q [Dec]
deriveXmlParsable opts name =
[d|
instance XmlParsable $(conT name) where
xmlParsableName _ = $(litE $ stringL $ typeTagModifier opts $ showName name)
xmlNamespaceLevel = const $(lift $ namespaceLevel opts)
|]
-- | The main intended entry point
deriveXml :: Name -> DecsQ
deriveXml = deriveXmlWithOptions defaultOptions
deriveXmlWithOptions :: Options -> Name -> DecsQ
deriveXmlWithOptions opts name = do
let ns = "Network.AuthorizeNet.TH.deriveXml: Name was '" ++ showName name ++ "':"
info <- reify name
decs <- case info of
TyConI dec ->
case dec of
NewtypeD{} -> deriveXmlNewtype opts name
DataD _ _ tvbs cons _ | isAllNullary cons && allNullaryToStringTag opts -> deriveXmlEnum opts name
DataD _ _ tvbs [con] _ -> deriveXmlObject opts name
DataD _ _ tvbs cons _-> deriveXmlChoice opts name
_ -> error $ ns ++ "Only newtypes or data constructors can be derived."
xmlParsableDecs <- deriveXmlParsable opts name
return $ decs ++ xmlParsableDecs
deriveXmlFull :: Name -> DecsQ
deriveXmlFull name = deriveXmlWithOptions (defaultOptions { namespaceLevel = Namespace_full }) name
| MichaelBurge/haskell-authorize-net | src/Network/AuthorizeNet/TH.hs | bsd-3-clause | 13,812 | 185 | 25 | 3,657 | 3,546 | 1,869 | 1,677 | -1 | -1 |
module Tronkell.Game.Types where
import Control.Monad.State.Strict
import Data.Map
import qualified Data.Text as T
import Tronkell.Types
data GameConfig = GameConfig { gameWidth :: Int
, gameHeight :: Int
, gamePlayerSpeed :: Int
, gameTicksPerSecond :: Int
} deriving (Show)
data Game = Game { gameWinner :: Maybe Player
, gamePlayers :: Map PlayerId Player
, gameStatus :: GameStatus
, gameConfig :: GameConfig
} deriving (Show)
data GameStatus = InProgress | Finished
deriving (Eq, Enum, Show)
newtype PlayerNick = PlayerNick { getPlayerNick :: T.Text }
deriving (Eq, Ord, Show)
newtype PlayerId = PlayerId { getPlayerId :: Int }
deriving (Eq, Ord, Show)
data Player = Player { playerId :: PlayerId
, playerNick :: PlayerNick
, playerStatus :: PlayerStatus
, playerCoordinate :: Coordinate
, playerOrientation :: Orientation
, playerTrail :: Trail
} deriving (Show)
data PlayerStatus = Alive | Dead
deriving (Show, Eq, Enum)
type Trail = [Coordinate]
data InputEvent = Tick
| TurnLeft PlayerId
| TurnRight PlayerId
| PlayerQuit PlayerId
deriving (Show)
data OutEvent = PlayerMoved PlayerId Coordinate Orientation
| PlayerDied PlayerId Coordinate
| GameEnded (Maybe PlayerId)
deriving (Show, Eq, Ord)
-- type GameEngine = [InputEvent] -> Game -> ([OutEvent], Game)
type GameEngine = [InputEvent] -> State Game [OutEvent]
| nilenso/tronkell | src/Tronkell/Game/Types.hs | bsd-3-clause | 1,885 | 0 | 9 | 758 | 386 | 232 | 154 | 41 | 0 |
{-|
Description : ARM Example using Hapstone
Copyright : (c) Garret Wassermann, 2017
License : BSD3
Maintainer : Garret Wassermann <[email protected]>
Stability : experimental
This is example code that shows how to use the Hapstone bindings,
based on an ARM example provided with the python bindings to Capstone.
For more information, see http://www.capstone-engine.org/lang_python.html.
-}
module Main
where
import Data.Word
import Numeric (showHex)
import Hapstone.Capstone
import Hapstone.Internal.Capstone as Capstone
-- use example from Capstone: http://www.capstone-engine.org/lang_python.html
arm_asm_buf = [0xf1, 0x02, 0x03, 0x0e, 0x00, 0x00, 0xa0, 0xe3, 0x02, 0x30, 0xc1, 0xe7, 0x00, 0x00, 0x53, 0xe3] :: [Word8]
myAction :: Capstone.Csh -> Capstone.CsInsn -> IO ()
myAction handle insn = putStrLn ("0x" ++ a ++ ":\t" ++ m ++ "\t" ++ o)
where m = mnemonic insn
o = opStr insn
a = (showHex $ address insn) ""
myDisasm = Disassembler {
arch = Capstone.CsArchArm -- ^ Options: CsArchArm, CsArchArm64, CsArchMips, CsArchX86, CsArchPpc, CsArchSparc, CsArchSysz, CsArchXcore
, modes = [Capstone.CsModeArm] -- ^ Modes (some may be combined by adding to the list): CsModeLittleEndian, CsModeArm, CsMode16 (16-bit x86), CsMode32 (32-bit x86), CsMode64 (64-bit x86-64/amd64 or PPC), CsModeThumb, CsModeMclass, CsModeV8 (ARMv8 A32), CsModeMicro, CsModeMips3, CsModeMips32r6, CsModeMipsGp64, CsModeV9 (SparcV9 mode), CsModeBigEndian, CsModeMips32, CsModeMips64
, buffer = arm_asm_buf -- ^ buffer to disassemble, as [Word8]
, addr = 0x1000 -- ^ address of first byte in the buffer, as Word64
, num = 0 -- ^ number of instructions to disassemble (0 for maximum)
, Hapstone.Capstone.detail = True -- ^ include detailed information? True/False, warning that turning this on may significantly slow computation
, skip = Just (defaultSkipdataStruct) -- ^ setup SKIPDATA options, as Maybe CsSkipdataStruct
, action = myAction -- ^ action to run on each instruction, a function with signature Csh -> CsInsn -> IO a; default is defaultAction
}
-- disasmIO has signature Disassembler a -> IO (Either CsErr [(CsInsn, a)])
main = disasmIO myDisasm
| ibabushkin/hapstone | examples/Test3.hs | bsd-3-clause | 2,211 | 0 | 11 | 389 | 273 | 166 | 107 | 21 | 1 |
module PPrint (
pprint
, pprintType
) where
import Base
import Context
import Data.List (intercalate)
import Text.Printf (printf)
pprint :: Term -> String
pprint = pprintTerm makeEmptyContext
pprintTerm :: Context -> Term -> String
pprintTerm ctx (TermAbs var ty t) = printf "lambda %s:%s. %s" fresh (pprintType ty) (pprintTerm ctx' t)
where
(ctx', fresh) = pickFreshName ctx var ty
pprintTerm ctx (TermIfThenElse t1 t2 t3) = printf "if %s then %s else %s" (pprintTerm ctx t1) (pprintTerm ctx t2) (pprintTerm ctx t3)
pprintTerm ctx t = pprintAppTerm ctx t
pprintAppTerm :: Context -> Term -> String
pprintAppTerm ctx (TermApp t1 t2) = printf "%s %s" (pprintAppTerm ctx t1) (pprintPathTerm ctx t2)
pprintAppTerm ctx t = pprintPathTerm ctx t
pprintPathTerm :: Context -> Term -> String
pprintPathTerm ctx (TermProj t field) = printf "%s.%s" (pprintPathTerm ctx t) field
pprintPathTerm ctx t = pprintAscribeTerm ctx t
pprintAscribeTerm :: Context -> Term -> String
pprintAscribeTerm ctx (TermAscribe t ty) = printf "%s as %s" (pprintAtomicTerm ctx t) (pprintType ty)
pprintAscribeTerm ctx t = pprintAtomicTerm ctx t
pprintAtomicTerm :: Context -> Term -> String
pprintAtomicTerm ctx (TermVar index) = fst $ indexToName ctx index
pprintAtomicTerm _ TermTrue = "true"
pprintAtomicTerm _ TermFalse = "false"
pprintAtomicTerm ctx (TermRecord fields) = printf "{%s}" (pprintFields ctx fields)
pprintAtomicTerm ctx t = printf "(%s)" (pprintTerm ctx t)
pprintFields :: Context -> [(String, Term)] -> String
pprintFields ctx fields = intercalate "," (map (\(f, t) -> f ++ "=" ++ pprintTerm ctx t) fields)
pprintType :: TermType -> String
pprintType = pprintArrowType
pprintArrowType :: TermType -> String
pprintArrowType (TypeArrow ty1 ty2) = printf "%s->%s" (pprintAtomicType ty1) (pprintArrowType ty2)
pprintArrowType ty = pprintAtomicType ty
pprintAtomicType :: TermType -> String
pprintAtomicType TypeTop = "Top"
pprintAtomicType TypeBool = "Bool"
pprintAtomicType (TypeRecord fields) = printf "{%s}" (pprintFieldTypes fields)
pprintAtomicType ty = printf "(%s)" (pprintType ty)
pprintFieldTypes :: [(String, TermType)] -> String
pprintFieldTypes fields = intercalate "," (map (\(f, t) -> f ++ ":" ++ pprintType t) fields)
| foreverbell/unlimited-plt-toys | tapl/simplesub/PPrint.hs | bsd-3-clause | 2,237 | 0 | 11 | 345 | 781 | 399 | 382 | 43 | 1 |
{-# LANGUAGE TypeFamilies, FlexibleInstances, MultiParamTypeClasses #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Array.MArray.Extras
-- Copyright : (C) 2011 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : provisional
-- Portability : type families, MPTCs
--
-- A higher-order MArray class.
----------------------------------------------------------------------------
module Data.Array.MArray.Extras
( MArray1(..)
) where
import Control.Monad.ST
import Data.Array.Base
import Data.Array.IO
import Foreign.Ptr
import Foreign.StablePtr
-- We could fix a missing fundep in the class hierarchy by adding:
-- class MArray1 a f m | a f -> m where
class Monad m => MArray1 a f m where
getBounds1 :: Ix i => a i (f e) -> m (i, i)
getNumElements1 :: Ix i => a i (f e) -> m Int
newArray1 :: Ix i => (i, i) -> f e -> m (a i (f e))
newArray1_ :: Ix i => (i, i) -> m (a i (f e))
unsafeNewArray1_ :: Ix i => (i, i) -> m (a i (f e))
unsafeRead1 :: Ix i => a i (f e) -> Int -> m (f e)
unsafeWrite1 :: Ix i => a i (f e) -> Int -> f e -> m ()
instance MArray1 IOUArray Ptr IO where
getBounds1 = getBounds
getNumElements1 = getNumElements
newArray1 = newArray
newArray1_ = newArray_
unsafeNewArray1_ = unsafeNewArray_
unsafeRead1 = unsafeRead
unsafeWrite1 = unsafeWrite
instance MArray1 IOUArray StablePtr IO where
getBounds1 = getBounds
getNumElements1 = getNumElements
newArray1 = newArray
newArray1_ = newArray_
unsafeNewArray1_ = unsafeNewArray_
unsafeRead1 = unsafeRead
unsafeWrite1 = unsafeWrite
instance MArray1 IOUArray FunPtr IO where
getBounds1 = getBounds
getNumElements1 = getNumElements
newArray1 = newArray
newArray1_ = newArray_
unsafeNewArray1_ = unsafeNewArray_
unsafeRead1 = unsafeRead
unsafeWrite1 = unsafeWrite
instance MArray1 (STUArray s) Ptr (ST s) where
getBounds1 = getBounds
getNumElements1 = getNumElements
newArray1 = newArray
newArray1_ = newArray_
unsafeNewArray1_ = unsafeNewArray_
unsafeRead1 = unsafeRead
unsafeWrite1 = unsafeWrite
instance MArray1 (STUArray s) StablePtr (ST s) where
getBounds1 = getBounds
getNumElements1 = getNumElements
newArray1 = newArray
newArray1_ = newArray_
unsafeNewArray1_ = unsafeNewArray_
unsafeRead1 = unsafeRead
unsafeWrite1 = unsafeWrite
instance MArray1 (STUArray s) FunPtr (ST s) where
getBounds1 = getBounds
getNumElements1 = getNumElements
newArray1 = newArray
newArray1_ = newArray_
unsafeNewArray1_ = unsafeNewArray_
unsafeRead1 = unsafeRead
unsafeWrite1 = unsafeWrite
| ekmett/monadic-arrays | Data/Array/MArray/Extras.hs | bsd-3-clause | 2,718 | 0 | 14 | 529 | 671 | 368 | 303 | 64 | 0 |
{-# LANGUAGE OverloadedStrings, StandaloneDeriving #-}
--------------------------------------------------------------------
-- |
-- Module : Text.Atom.Feed
-- Copyright : (c) Galois, Inc. 2008
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <[email protected]>
-- Stability : provisional
-- Portability:
--
--------------------------------------------------------------------
module Text.Atom.Feed where
import qualified Text.XML as XML
import Data.Text
-- *Core types
-- NOTE: In the future we may want to have more structured
-- types for these.
type URI = Text
type NCName = Text
type Date = Text
type MediaType = Text
data Feed
= Feed
{ feedId :: Text
, feedTitle :: TextContent
, feedUpdated :: Date
, feedAuthors :: [Person]
, feedCategories :: [Category]
, feedContributors :: [Person]
, feedGenerator :: Maybe Generator
, feedIcon :: Maybe URI
, feedLinks :: [Link]
, feedLogo :: Maybe URI
, feedRights :: Maybe TextContent
, feedSubtitle :: Maybe TextContent
, feedEntries :: [Entry]
, feedAttrs :: [(XML.Name,Text)]
, feedOther :: [XML.Element]
}
deriving (Show)
data Entry
= Entry
{ entryId :: Text
, entryTitle :: TextContent
, entryUpdated :: Date
, entryAuthors :: [Person]
, entryCategories :: [Category]
, entryContent :: Maybe EntryContent
, entryContributor :: [Person]
, entryLinks :: [Link]
, entryPublished :: Maybe Date
, entryRights :: Maybe TextContent
, entrySource :: Maybe Source
, entrySummary :: Maybe TextContent
, entryInReplyTo :: Maybe InReplyTo
, entryInReplyTotal :: Maybe InReplyTotal
, entryAttrs :: [(XML.Name,Text)]
, entryOther :: [XML.Element]
}
deriving (Show)
data EntryContent
= TextContent Text
| HTMLContent Text
| XHTMLContent XML.Element
| MixedContent (Maybe Text) [XML.Node]
| ExternalContent (Maybe MediaType) URI
deriving (Show)
data Category
= Category
{ catTerm :: Text -- ^ the tag\/term of the category.
, catScheme :: Maybe URI -- ^ optional URL for identifying the categorization scheme.
, catLabel :: Maybe Text -- ^ human-readable label of the category
, catOther :: [XML.Element] -- ^ unknown elements, for extensibility.
}
deriving (Show)
data Generator
= Generator
{ genURI :: Maybe URI
, genVersion :: Maybe Text
, genText :: Text
}
deriving (Eq, Show, Read)
data Link
= Link
{ linkHref :: URI
-- ToDo: make the switch over to using the Atom.Feed.Link relation type.
, linkRel :: Maybe (Either NCName URI)
, linkType :: Maybe MediaType
, linkHrefLang :: Maybe Text
, linkTitle :: Maybe Text
, linkLength :: Maybe Text
, linkAttrs :: [(XML.Name,Text)]
, linkOther :: [XML.Element]
}
deriving (Show)
data TextContent
= TextText Text
| HTMLText Text
| XHTMLText XML.Element
deriving (Show)
txtToText :: TextContent -> Text
txtToText (TextText s) = s
txtToText (HTMLText s) = s
txtToText (XHTMLText x) = pack $ show x
data Source
= Source
{ sourceAuthors :: [Person]
, sourceCategories :: [Category]
, sourceGenerator :: Maybe Generator
, sourceIcon :: Maybe URI
, sourceId :: Maybe Text
, sourceLinks :: [Link]
, sourceLogo :: Maybe URI
, sourceRights :: Maybe TextContent
, sourceSubtitle :: Maybe TextContent
, sourceTitle :: Maybe TextContent
, sourceUpdated :: Maybe Date
, sourceOther :: [XML.Element]
}
deriving (Show)
-- deriving instance Ord XML.Node
-- deriving instance Ord XML.Element
data Person
= Person
{ personName :: Text
, personURI :: Maybe URI
, personEmail :: Maybe Text
, personOther :: [XML.Element]
}
deriving (Show, Eq) --, Ord)
data InReplyTo
= InReplyTo
{ replyToRef :: URI
, replyToHRef :: Maybe URI
, replyToType :: Maybe MediaType
, replyToSource :: Maybe URI
, replyToOther :: [(XML.Name,Text)]
, replyToContent :: [XML.Node]
}
deriving (Show)
data InReplyTotal
= InReplyTotal
{ replyToTotal :: Integer -- non-negative :)
, replyToTotalOther :: [(XML.Name,Text)]
}
deriving (Show)
-- *Smart Constructors
newCategory :: Text -- ^catTerm
-> Category
newCategory t = Category
{ catTerm = t
, catScheme = Nothing
, catLabel = Just t
, catOther = []
}
nullFeed :: Text -- ^feedId
-> TextContent -- ^feedTitle
-> Date -- ^feedUpdated
-> Feed
nullFeed i t u = Feed
{ feedId = i
, feedTitle = t
, feedUpdated = u
, feedAuthors = []
, feedCategories = []
, feedContributors = []
, feedGenerator = Nothing
, feedIcon = Nothing
, feedLinks = []
, feedLogo = Nothing
, feedRights = Nothing
, feedSubtitle = Nothing
, feedEntries = []
, feedAttrs = []
, feedOther = []
}
nullEntry :: Text -- ^entryId
-> TextContent -- ^entryTitle
-> Date -- ^entryUpdated
-> Entry
nullEntry i t u = Entry
{ entryId = i
, entryTitle = t
, entryUpdated = u
, entryAuthors = []
, entryCategories = []
, entryContent = Nothing
, entryContributor = []
, entryLinks = []
, entryPublished = Nothing
, entryRights = Nothing
, entrySource = Nothing
, entrySummary = Nothing
, entryInReplyTo = Nothing
, entryInReplyTotal = Nothing
, entryAttrs = []
, entryOther = []
}
nullGenerator :: Text -- ^genText
-> Generator
nullGenerator t = Generator
{ genURI = Nothing
, genVersion = Nothing
, genText = t
}
nullLink :: URI -- ^linkHref
-> Link
nullLink uri = Link
{ linkHref = uri
, linkRel = Nothing
, linkType = Nothing
, linkHrefLang = Nothing
, linkTitle = Nothing
, linkLength = Nothing
, linkAttrs = []
, linkOther = []
}
nullSource :: Source
nullSource = Source
{ sourceAuthors = []
, sourceCategories = []
, sourceGenerator = Nothing
, sourceIcon = Nothing
, sourceId = Nothing
, sourceLinks = []
, sourceLogo = Nothing
, sourceRights = Nothing
, sourceSubtitle = Nothing
, sourceTitle = Nothing
, sourceUpdated = Nothing
, sourceOther = []
}
nullPerson :: Person
nullPerson = Person
{ personName = ""
, personURI = Nothing
, personEmail = Nothing
, personOther = []
}
| haskell-pkg-janitors/feed | Text/Atom/Feed.hs | bsd-3-clause | 7,214 | 0 | 11 | 2,514 | 1,595 | 973 | 622 | 206 | 1 |
module Day06 where
import Control.Monad
import Control.Monad.ST
import Data.Array.MArray hiding (range)
import Data.Array.ST hiding (range)
import Text.Parsec
data Command = Command Action Range
data Action
= TurnOn
| TurnOff
| Toggle
data Range = Range Coord Coord
type Coord = (Int, Int)
coords :: [Coord]
coords = [(x, y) | x <- [0..999], y <- [0..999]]
coordsInRange :: Range -> [Coord]
coordsInRange (Range (lx, ly) (hx, hy)) =
[(x, y) | x <- [lx..hx], y <- [ly..hy]]
-- Part 1
type Grid s = STUArray s Coord Status
type Status = Bool
countLightsAfterCommands :: String -> Int
countLightsAfterCommands s = runST $ do
let cs = parseCommands s
grid <- createGrid
forM_ cs $ \c -> performCommand grid c
lightsOn grid
createGrid ::ST s (Grid s)
createGrid = newArray ((0, 0), (999,999)) False
performCommand :: Grid s -> Command -> ST s ()
performCommand grid (Command a r) = do
let cs = coordsInRange r
forM_ cs $ \c -> do
b <- readArray grid c
writeArray grid c $ performAction a b
lightsOn :: Grid s -> ST s Int
lightsOn grid = do
es <- getElems grid
return $ length $ filter id es
performAction :: Action -> Status -> Status
performAction = \case
Toggle -> not
TurnOn -> const True
TurnOff -> const False
-- Part 2
type Grid2 s = STUArray s Coord Brightness
type Brightness = Int
measureBrightnessAfterCommands :: String -> Int
measureBrightnessAfterCommands s = runST $ do
let cs = parseCommands s
grid <- createGrid2
forM_ cs $ \c -> performCommand2 grid c
gridBrightness grid
createGrid2 ::ST s (Grid2 s)
createGrid2 = newArray ((0, 0), (999,999)) 0
performCommand2 :: Grid2 s -> Command -> ST s ()
performCommand2 grid (Command a r) = do
let cs = coordsInRange r
forM_ cs $ \c -> do
b <- readArray grid c
writeArray grid c $ performAction2 a b
gridBrightness :: Grid2 s -> ST s Int
gridBrightness grid = sum <$> getElems grid
performAction2 :: Action -> Brightness -> Brightness
performAction2 = curry $ \case
(Toggle, b) -> b + 2
(TurnOn, b) -> b + 1
(TurnOff, 0) -> 0
(TurnOff, b) -> b - 1
-- Parsing commands
parseCommands :: String -> [Command]
parseCommands s = cs
where
Right cs = parse commands "" s
commands = command `sepEndBy` endOfLine
command = Command <$> action <*> (space *> range)
action = try turnOn <|> try turnOff <|> toggle
turnOn = string "turn on" *> return TurnOn
turnOff = string "turn off" *> return TurnOff
toggle = string "toggle" *> return Toggle
range = Range <$> coord <*> (string " through " *> coord)
coord = (,) <$> num <*> (char ',' *> num)
num = read <$> many1 digit | patrickherrmann/advent | src/Day06.hs | bsd-3-clause | 2,647 | 0 | 12 | 592 | 1,076 | 559 | 517 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
module Development.Shake.Resource(
Resource, newResourceIO, newThrottleIO, acquireResource, releaseResource
) where
import Development.Shake.Errors
import Development.Shake.Util
import Data.Function
import System.IO.Unsafe
import Control.Arrow
import Control.Monad
{-# NOINLINE resourceIds #-}
resourceIds :: Var Int
resourceIds = unsafePerformIO $ newVar 0
resourceId :: IO Int
resourceId = modifyVar resourceIds $ \i -> let j = i + 1 in j `seq` return (j, j)
-- | A type representing an external resource which the build system should respect. There
-- are two ways to create 'Resource's in Shake:
--
-- * 'Development.Shake.newResource' creates a finite resource, stopping too many actions running
-- simultaneously.
--
-- * 'Development.Shake.newThrottle' creates a throttled resource, stopping too many actions running
-- over a short time period.
--
-- These resources are used with 'Development.Shake.withResource' when defining rules. Typically only
-- system commands (such as 'Development.Shake.cmd') should be run inside 'Development.Shake.withResource',
-- not commands such as 'Development.Shake.need'.
--
-- Be careful that the actions run within 'Development.Shake.withResource' do not themselves require further
-- resources, or you may get a \"thread blocked indefinitely in an MVar operation\" exception.
-- If an action requires multiple resources, use 'Development.Shake.withResources' to avoid deadlock.
data Resource = Resource
{resourceOrd :: Int
-- ^ Key used for Eq/Ord operations. To make withResources work, we require newResourceIO < newThrottleIO
,resourceShow :: String
-- ^ String used for Show
,acquireResource :: Int -> IO (Maybe (IO ()))
-- ^ Try to acquire a resource. Returns Nothing to indicate you have acquired with no blocking, or Just act to
-- say after act completes (which will block) then you will have the resource.
,releaseResource :: Int -> IO ()
-- ^ You should only ever releaseResource that you obtained with acquireResource.
}
instance Show Resource where show = resourceShow
instance Eq Resource where (==) = (==) `on` resourceOrd
instance Ord Resource where compare = compare `on` resourceOrd
---------------------------------------------------------------------
-- FINITE RESOURCES
-- | (number available, queue of people with how much they want and a barrier to signal when it is allocated to them)
type Finite = Var (Int, [(Int,Barrier ())])
-- | A version of 'Development.Shake.newResource' that runs in IO, and can be called before calling 'Development.Shake.shake'.
-- Most people should use 'Development.Shake.newResource' instead.
newResourceIO :: String -> Int -> IO Resource
newResourceIO name mx = do
when (mx < 0) $
error $ "You cannot create a resource named " ++ name ++ " with a negative quantity, you used " ++ show mx
key <- resourceId
var <- newVar (mx, [])
return $ Resource (negate key) shw (acquire var) (release var)
where
shw = "Resource " ++ name
acquire :: Finite -> Int -> IO (Maybe (IO ()))
acquire var want
| want < 0 = error $ "You cannot acquire a negative quantity of " ++ shw ++ ", requested " ++ show want
| want > mx = error $ "You cannot acquire more than " ++ show mx ++ " of " ++ shw ++ ", requested " ++ show want
| otherwise = modifyVar var $ \(available,waiting) ->
if want <= available then
return ((available - want, waiting), Nothing)
else do
bar <- newBarrier
return ((available, waiting ++ [(want,bar)]), Just $ waitBarrier bar)
release :: Finite -> Int -> IO ()
release var i = modifyVar_ var $ \(available,waiting) -> f (available+i) waiting
where
f i ((wi,wa):ws) | wi <= i = signalBarrier wa () >> f (i-wi) ws
| otherwise = do (i,ws) <- f i ws; return (i,(wi,wa):ws)
f i [] = return (i, [])
---------------------------------------------------------------------
-- THROTTLE RESOURCES
data Throttle = Throttle
{throttleLock :: Lock
-- people queue up to grab from replenish, full means no one is queued
,throttleVal :: Var (Either (Barrier ()) [(Time, Int)])
-- either someone waiting for resources, or the time to wait until before N resources become available
-- anyone who puts a Barrier in the Left must be holding the Lock
,throttleTime :: IO Time
}
-- | A version of 'Development.Shake.newThrottle' that runs in IO, and can be called before calling 'Development.Shake.shake'.
-- Most people should use 'Development.Shake.newResource' instead.
newThrottleIO :: String -> Int -> Double -> IO Resource
newThrottleIO name count period = do
when (count < 0) $
error $ "You cannot create a throttle named " ++ name ++ " with a negative quantity, you used " ++ show count
key <- resourceId
lock <- newLock
time <- offsetTime
rep <- newVar $ Right [(0, count)]
let s = Throttle lock rep time
return $ Resource key shw (acquire s) (release s)
where
shw = "Throttle " ++ name
release :: Throttle -> Int -> IO ()
release Throttle{..} n = do
t <- throttleTime
modifyVar_ throttleVal $ \v -> case v of
Left b -> signalBarrier b () >> return (Right [(t+period, n)])
Right ts -> return $ Right $ ts ++ [(t+period, n)]
acquire :: Throttle -> Int -> IO (Maybe (IO ()))
acquire Throttle{..} want
| want < 0 = error $ "You cannot acquire a negative quantity of " ++ shw ++ ", requested " ++ show want
| want > count = error $ "You cannot acquire more than " ++ show count ++ " of " ++ shw ++ ", requested " ++ show want
| otherwise = do
let grab t vs = do
let (a,b) = span ((<= t) . fst) vs
-- renormalise for clock skew, nothing can ever be > t+period away
return (sum $ map snd a, map (first $ min $ t+period) b)
let push i vs = [(0,i) | i > 0] ++ vs
-- attempt to grab without locking
res <- withLockTry throttleLock $ do
modifyVar throttleVal $ \v -> case v of
Right vs -> do
t <- throttleTime
(got,vs) <- grab t vs
if got >= want then
return (Right $ push (got - want) vs, True)
else
return (Right $ push got vs, False)
_ -> return (v, False)
if res == Just True then
return Nothing
else
return $ Just $ withLock throttleLock $ do
-- keep trying to acquire more resources until you have everything you need
let f want = join $ modifyVar throttleVal $ \v -> case v of
Left _ -> err "newThrottle, invariant failed, Left while holding throttleLock"
Right vs -> do
t <- throttleTime
(got,vs) <- grab t vs
case vs of
_ | got >= want -> return (Right $ push (got - want) vs, return ())
[] -> do
b <- newBarrier
return (Left b, waitBarrier b >> f (want - got))
(t2,n):vs -> do
-- be robust to clock skew - only ever sleep for 'period' at most and always mark the next as good.
return $ (,) (Right $ (0,n):vs) $ do
sleep $ min period (t2-t)
f $ want - got
f want
| nh2/shake | Development/Shake/Resource.hs | bsd-3-clause | 8,269 | 0 | 35 | 2,825 | 1,905 | 984 | 921 | 103 | 8 |
module Signal.Wavelet.Repa2Test where
import Control.Arrow ((&&&))
import Data.Array.Repa
import Test.HUnit (Assertion)
import Signal.Wavelet.Repa2
import Signal.Wavelet.Repa.Common (inv, forceS)
import Test.ArbitraryInstances (DwtInputRepa(..))
import Test.Data.Wavelet as DW
import Test.Utils ((=~), (@=~?))
testDwt :: (Array U DIM1 Double, Array U DIM1 Double, Array U DIM1 Double)
-> Assertion
testDwt (ls, sig, expected) =
expected @=~? dwtS ls sig
dataDwt :: [(Array U DIM1 Double, Array U DIM1 Double, Array U DIM1 Double)]
dataDwt = Prelude.map (DW.all3 f) DW.dataDwt
testIdwt :: (Array U DIM1 Double, Array U DIM1 Double, Array U DIM1 Double)
-> Assertion
testIdwt (ls, sig, expected) =
expected @=~? idwtS ls sig
dataIdwt :: [(Array U DIM1 Double, Array U DIM1 Double, Array U DIM1 Double)]
dataIdwt = Prelude.map (DW.all3 f) DW.dataIdwt
propDWTInvertible :: DwtInputRepa -> Bool
propDWTInvertible (DwtInputRepa (ls, sig)) =
idwtS (computeS $ inv ls) (dwtS ls sig) =~ sig
testLattice :: ((Double, Double), Array U DIM1 Double, Array U DIM1 Double)
-> Assertion
testLattice (baseOp, sig, expected) =
expected @=~? forceS (lattice baseOp sig)
dataLattice :: [((Double,Double), Array U DIM1 Double, Array U DIM1 Double)]
dataLattice = Prelude.map (\(a, b, c) -> (a, f b, f c)) DW.dataLattice
propDoubleLatticeIdentity :: DwtInputRepa -> Bool
propDoubleLatticeIdentity (DwtInputRepa (ls, sig)) =
forceS (lattice baseOp (forceS $ lattice baseOp sig)) =~ sig
where
baseOp = (sin &&& cos) $ ls ! (Z :. 0)
testExtendFront :: (Int, Array U DIM1 Double, Array U DIM1 Double)
-> Assertion
testExtendFront (ln, sig, expected) =
expected @=~? forceS (extendFront ln sig)
dataExtendFront :: [(Int, Array U DIM1 Double, Array U DIM1 Double)]
dataExtendFront = Prelude.map (\(a, b, c) -> (a, f b, f c)) DW.dataExtendFront
testExtendEnd :: (Int, Array U DIM1 Double, Array U DIM1 Double)
-> Assertion
testExtendEnd (ln, sig, expected) =
expected @=~? forceS (extendEnd ln sig)
dataExtendEnd :: [(Int, Array U DIM1 Double, Array U DIM1 Double)]
dataExtendEnd = Prelude.map (\(a, b, c) -> (a, f b, f c)) DW.dataExtendEnd
testTrim :: (Array U DIM1 Double, Array U DIM1 Double)
-> Assertion
testTrim (sig, expected) =
expected @=~? (computeS . trim . delay $ sig)
dataTrim :: [(Array U DIM1 Double, Array U DIM1 Double)]
dataTrim = Prelude.map (DW.all2 f) DW.dataTrim
f :: [Double] -> Array U DIM1 Double
f xs = fromListUnboxed (Z :. (length xs)) xs
| jstolarek/lattice-structure-hs | tests/Signal/Wavelet/Repa2Test.hs | bsd-3-clause | 2,622 | 0 | 11 | 558 | 1,078 | 589 | 489 | 54 | 1 |
{-# OPTIONS_GHC -Wall #-}
-- | Damped harmonic oscillator
module Main where
import Physics.Learn.RungeKutta
( integrateSystem
)
import Graphics.Gnuplot.Simple
dampedOscillator :: Double -> Double -> Double
-> (Double,Double,Double) -> (Double,Double,Double)
dampedOscillator r l c (_t,vc,il)
= (1,-vc / r / c - il / c, vc / l)
theStates :: [(Double,Double,Double)]
theStates = integrateSystem (dampedOscillator 10000 200 0.001) 0.01 (0,1,0)
plot2 :: IO ()
plot2 = plotList [Title "Damped Harmonic Oscillator"
,XLabel "Time (s)"
,YLabel "Voltage (V)"
,Key Nothing
] (map (\(t,x,_) -> (t,x)) $ take 1000 theStates)
main :: IO ()
main = main2
main2 :: IO ()
main2 = plotPath
[Title "Damped Harmonic Oscillator"
,XLabel "Time (s)"
,YLabel "Voltage (V)"
,Key Nothing
,PNG "learn-physics-DHO.png"
] (map (\(t,x,_) -> (t,x)) $ take 1000 theStates)
>> putStrLn "output sent to file learn-physics-DHO.png"
| walck/learn-physics | examples/src/DampedOscillator.hs | bsd-3-clause | 1,058 | 0 | 12 | 293 | 356 | 199 | 157 | 28 | 1 |
module FizzBuzzKata.Day1Spec (spec) where
import Test.Hspec
import FizzBuzzKata.Day1 (fizzbuzz)
spec :: Spec
spec = do
it "returns an empty list when given an empty list"
(fizzbuzz [] == [])
it "returns [\"1\"] when given [1]"
(fizzbuzz [1] == ["1"])
it "returns [\"1\", \"2\"] when given [1,2]"
(fizzbuzz [1, 2] == ["1", "2"])
it "returns [\"fizz!\"] when given [3]"
(fizzbuzz [3] == ["fizz!"])
it "returns [\"fizz!\" \"buzz!\"] when given [3,5]"
(fizzbuzz [3, 5] == ["fizz!", "buzz!"])
it "returns [\"fizz!buzz!\"] when given [15]"
(fizzbuzz [15] == ["fizz!buzz!"])
it "returns [\"fizz!\"] when given [13]"
(fizzbuzz [13] == ["fizz!"])
it "returns [\"buzz!\"] when given [52]"
(fizzbuzz [52] == ["buzz!"])
it "returns [\"fizz!buzz!\"] when given [35]"
(fizzbuzz [35] == ["fizz!buzz!"])
| Alex-Diez/haskell-tdd-kata | old-katas/test/FizzBuzzKata/Day1Spec.hs | bsd-3-clause | 994 | 0 | 11 | 321 | 262 | 136 | 126 | 23 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MagicHash #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Type.Coercion
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : not portable
--
-- Definition of representational equality ('Coercion').
--
-- @since 4.7.0.0
-----------------------------------------------------------------------------
module Data.Type.Coercion
( Coercion(..)
, coerceWith
, gcoerceWith
, sym
, trans
, repr
, TestCoercion(..)
) where
import qualified Data.Type.Equality as Eq
import Data.Maybe
import GHC.Enum
import GHC.Show
import GHC.Read
import GHC.Base
-- | Representational equality. If @Coercion a b@ is inhabited by some terminating
-- value, then the type @a@ has the same underlying representation as the type @b@.
--
-- To use this equality in practice, pattern-match on the @Coercion a b@ to get out
-- the @Coercible a b@ instance, and then use 'coerce' to apply it.
--
-- @since 4.7.0.0
data Coercion a b where
Coercion :: Coercible a b => Coercion a b
-- with credit to Conal Elliott for 'ty', Erik Hesselink & Martijn van
-- Steenbergen for 'type-equality', Edward Kmett for 'eq', and Gabor Greif
-- for 'type-eq'
-- | Type-safe cast, using representational equality
coerceWith :: Coercion a b -> a -> b
coerceWith Coercion x = coerce x
-- | Generalized form of type-safe cast using representational equality
--
-- @since 4.10.0.0
gcoerceWith :: Coercion a b -> (Coercible a b => r) -> r
gcoerceWith Coercion x = x
-- | Symmetry of representational equality
sym :: Coercion a b -> Coercion b a
sym Coercion = Coercion
-- | Transitivity of representational equality
trans :: Coercion a b -> Coercion b c -> Coercion a c
trans Coercion Coercion = Coercion
-- | Convert propositional (nominal) equality to representational equality
repr :: (a Eq.:~: b) -> Coercion a b
repr Eq.Refl = Coercion
-- | @since 4.7.0.0
deriving instance Eq (Coercion a b)
-- | @since 4.7.0.0
deriving instance Show (Coercion a b)
-- | @since 4.7.0.0
deriving instance Ord (Coercion a b)
-- | @since 4.7.0.0
deriving instance Coercible a b => Read (Coercion a b)
-- | @since 4.7.0.0
instance Coercible a b => Enum (Coercion a b) where
toEnum 0 = Coercion
toEnum _ = errorWithoutStackTrace "Data.Type.Coercion.toEnum: bad argument"
fromEnum Coercion = 0
-- | @since 4.7.0.0
deriving instance Coercible a b => Bounded (Coercion a b)
-- | This class contains types where you can learn the equality of two types
-- from information contained in /terms/. Typically, only singleton types should
-- inhabit this class.
class TestCoercion f where
-- | Conditionally prove the representational equality of @a@ and @b@.
testCoercion :: f a -> f b -> Maybe (Coercion a b)
-- | @since 4.7.0.0
instance TestCoercion ((Eq.:~:) a) where
testCoercion Eq.Refl Eq.Refl = Just Coercion
-- | @since 4.10.0.0
instance TestCoercion ((Eq.:~~:) a) where
testCoercion Eq.HRefl Eq.HRefl = Just Coercion
-- | @since 4.7.0.0
instance TestCoercion (Coercion a) where
testCoercion Coercion Coercion = Just Coercion
| sdiehl/ghc | libraries/base/Data/Type/Coercion.hs | bsd-3-clause | 3,576 | 0 | 11 | 674 | 606 | 339 | 267 | 54 | 1 |
{-# LANGUAGE NoMonomorphismRestriction, DeriveDataTypeable, StandaloneDeriving, NamedFieldPuns, ScopedTypeVariables #-}
module Network.N2O.PubSub (
subscribe,
byUnique,
unsubscribe,
newChannel,
Connections(..)
, setState
, SocketId
) where
import Control.Concurrent
import Data.Data (Data, gunfold, toConstr, dataTypeOf)
import Data.IxSet as I
import Data.Maybe
import Network.WebSockets as WS
import Data.Typeable (Typeable)
deriving instance Typeable Connection
unimpl = error . (++ " is unimplemented in PubSub.hs")
instance Data Connection where
gunfold = error "WS.Connection gunfold is unimplemented in PubSub.hs"
toConstr = error "WS.Connection toConstr is unimplemented in PubSub.hs"
dataTypeOf = unimpl "WS.Connection dataTypeOf"
instance Eq Connection where
(==) = unimpl "(==)"
instance Ord Connection where
compare _ _ = EQ
instance Show Connection where
show = const "{WS.Connection}"
newtype SocketId = SocketId Int deriving (Typeable, Show, Ord, Eq, Data)
data Connections a = Connections { coSet :: IxSet a, coId :: SocketId}
initialId :: SocketId
initialId = SocketId 0
nextId :: SocketId -> SocketId
nextId (SocketId a) = SocketId (succ a)
newChannel :: (Indexable a) => Connections a
newChannel = Connections I.empty initialId
subscribe conn emptyEntry Connections {coSet, coId} = (Connections {
coId = nextId coId, coSet = I.insert (emptyEntry coId conn) coSet}, coId)
unsubscribe :: (Indexable a, Ord a, Typeable a) => SocketId -> Connections a -> Connections a
unsubscribe socketId (co @ Connections { coSet }) = co { coSet = I.deleteIx socketId coSet }
setState state socketId modify = modifyMVar_ state $ return . foo where
foo co = co { coSet = mo $ coSet co }
mo s = I.updateIx socketId (modify old) s where
old = fromJust $ getOne $ getEQ socketId s
byUnique state socketId = fromJust . getOne . getEQ socketId . coSet <$> readMVar state
| 5HT/n2o.hs | src/Network/N2O/PubSub.hs | isc | 1,954 | 1 | 10 | 364 | 585 | 315 | 270 | 44 | 1 |
-- | Functions for verifying signatures.
--
-- TODO: the "Pos.Crypto.Signing" hierarchy looks like a mess and should be
-- redesigned. When this is done, we likely won't need this module to be
-- separated from other modules, but right now we do need it in order to
-- avoid circular dependencies. — @neongreen
--
module Pos.Crypto.Signing.Check
( checkSig
, checkSigRaw
, verifyProxyCert
, validateProxySecretKey
, validateProxySignature
) where
import Universum
import qualified Cardano.Crypto.Wallet as CC
import Control.Monad.Except (MonadError, throwError)
import Data.Coerce (coerce)
import Pos.Binary.Class (Bi, Raw)
import qualified Pos.Binary.Class as Bi
import Pos.Crypto.Configuration (ProtocolMagic)
import Pos.Crypto.Signing.Tag (signTag)
import Pos.Crypto.Signing.Types (ProxyCert (..), ProxySecretKey (..),
ProxySignature (..), PublicKey (..), SignTag (..),
Signature (..))
-- CHECK: @checkSig
-- | Verify a signature.
-- #verifyRaw
checkSig ::
(Bi a)
=> ProtocolMagic
-> SignTag
-> PublicKey
-> a
-> Signature a
-> Bool
checkSig pm t k x s = checkSigRaw pm (Just t) k (Bi.serialize' x) (coerce s)
-- CHECK: @checkSigRaw
-- | Verify raw 'ByteString'.
checkSigRaw
:: ProtocolMagic
-> Maybe SignTag
-> PublicKey
-> ByteString
-> Signature Raw
-> Bool
checkSigRaw pm mbTag (PublicKey k) x (Signature s) = CC.verify k (tag <> x) s
where
tag = maybe mempty (signTag pm) mbTag
-- | Checks if certificate is valid, given issuer pk, delegate pk and ω.
verifyProxyCert :: (Bi w) => ProtocolMagic -> PublicKey -> PublicKey -> w -> ProxyCert w -> Bool
verifyProxyCert pm issuerPk (PublicKey delegatePk) o (ProxyCert sig) =
checkSig pm SignProxySK issuerPk
(mconcat ["00", CC.unXPub delegatePk, Bi.serialize' o])
(Signature sig)
-- | Return the key if it's valid, and throw an error otherwise.
validateProxySecretKey
:: (MonadError Text m, Bi w)
=> ProtocolMagic
-> ProxySecretKey w
-> m ()
validateProxySecretKey pm psk =
if verifyProxyCert pm (pskIssuerPk psk) (pskDelegatePk psk)
(pskOmega psk) (pskCert psk)
then pure ()
else throwError "a ProxySecretKey has an invalid signature"
validateProxySignature
:: (MonadError Text m, Bi w)
=> ProtocolMagic
-> ProxySignature w a
-> m ()
validateProxySignature pm psig = validateProxySecretKey pm (psigPsk psig)
| input-output-hk/pos-haskell-prototype | crypto/Pos/Crypto/Signing/Check.hs | mit | 2,585 | 0 | 11 | 652 | 618 | 344 | 274 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Hledger.Cli.Commands.Registermatch (
registermatchmode
,registermatch
)
where
import Data.Char (toUpper)
import Data.List
import qualified Data.Text as T
import qualified Data.Text.Lazy.IO as TL
import Hledger
import Hledger.Cli.CliOptions
import Hledger.Cli.Commands.Register
registermatchmode = hledgerCommandMode
$(embedFileRelative "Hledger/Cli/Commands/Registermatch.txt")
[]
[generalflagsgroup1]
hiddenflags
([], Just $ argsFlag "DESC")
registermatch :: CliOpts -> Journal -> IO ()
registermatch opts@CliOpts{rawopts_=rawopts,reportspec_=rspec} j =
case listofstringopt "args" rawopts of
[desc] -> do
let ps = [p | (_,_,_,p,_) <- postingsReport rspec j]
case similarPosting ps desc of
Nothing -> putStrLn "no matches found."
Just p -> TL.putStr $ postingsReportAsText opts [pri]
where pri = (Just (postingDate p)
,Nothing
,tdescription <$> ptransaction p
,p
,nullmixedamt)
_ -> putStrLn "please provide one description argument."
-- Identify the closest recent match for this description in the given date-sorted postings.
similarPosting :: [Posting] -> String -> Maybe Posting
similarPosting ps desc =
let matches =
sortBy compareRelevanceAndRecency
$ filter ((> threshold).fst)
[(maybe 0 (\t -> compareDescriptions desc (T.unpack $ tdescription t)) (ptransaction p), p) | p <- ps]
where
compareRelevanceAndRecency (n1,p1) (n2,p2) = compare (n2,postingDate p2) (n1,postingDate p1)
threshold = 0
in case matches of [] -> Nothing
m:_ -> Just $ snd m
-- -- Identify the closest recent match for this description in past transactions.
-- similarTransaction :: Journal -> Query -> String -> Maybe Transaction
-- similarTransaction j q desc =
-- case historymatches = transactionsSimilarTo j q desc of
-- ((,t):_) = Just t
-- [] = Nothing
compareDescriptions :: String -> String -> Double
compareDescriptions s t = compareStrings s' t'
where s' = simplify s
t' = simplify t
simplify = filter (not . (`elem` ("0123456789"::String)))
-- | Return a similarity measure, from 0 to 1, for two strings.
-- This is Simon White's letter pairs algorithm from
-- http://www.catalysoft.com/articles/StrikeAMatch.html
-- with a modification for short strings.
compareStrings :: String -> String -> Double
compareStrings "" "" = 1
compareStrings [_] "" = 0
compareStrings "" [_] = 0
compareStrings [a] [b] = if toUpper a == toUpper b then 1 else 0
compareStrings s1 s2 = 2.0 * fromIntegral i / fromIntegral u
where
i = length $ intersect pairs1 pairs2
u = length pairs1 + length pairs2
pairs1 = wordLetterPairs $ uppercase s1
pairs2 = wordLetterPairs $ uppercase s2
wordLetterPairs = concatMap letterPairs . words
letterPairs (a:b:rest) = [a,b] : letterPairs (b:rest)
letterPairs _ = []
| adept/hledger | hledger/Hledger/Cli/Commands/Registermatch.hs | gpl-3.0 | 3,165 | 0 | 19 | 813 | 809 | 438 | 371 | 60 | 3 |
module Hangman where
import Control.Monad.Trans.State.Lazy
import Control.Monad.IO.Class
-- | Represents if a character is discovered.
data Letter = Hidden Char | Guessed Char
-- | Represents a word made up of letters.
type AWord = [Letter]
-- | The state of the Hangman game.
data HangmanState = HangmanState AWord (Int,Int) [Char]
-- | The start of the game.
hangman :: String -> Int -> IO ()
hangman word guesses = do
let word' = fmap (\x -> Hidden x) word
(hs, s) <- evalStateT looper (HangmanState word' (0, guesses) [])
showState hs
case s of
True -> putStrLn "You've won!"
False -> putStrLn "You've lost!"
-- | An iteration of the game.
looper :: StateT HangmanState IO (HangmanState, Bool)
looper = do
hs@(HangmanState word (guess,guesses) guessed) <- get
liftIO $ showState hs
userChar <- liftIO getChar
let word' = fmap (checkGuess userChar) word
let hs' = (HangmanState word' (guess+1,guesses) (userChar:guessed))
case complete word' of
True -> return (hs', True)
False -> case guess == guesses of
True -> return (hs', False)
False -> do
put hs'
looper
-- | Print the state of the game.
showState :: HangmanState -> IO()
showState (HangmanState word (guess, guesses) guessed) = do
putStrLn $
wordToString word ++
" " ++
(show guess) ++ "/" ++ (show guesses)
putStrLn $
"Guessed:" ++ (show guessed)
wordToString :: AWord -> String
wordToString = (fmap letterToChar)
letterToChar :: Letter -> Char
letterToChar l = case l of
Hidden x -> '_'
Guessed x -> x
-- | Transform a Hidden character into a Guessed character.
checkGuess :: Char -> Letter -> Letter
checkGuess c (Hidden x)
| x == c = Guessed x
checkGuess c x = x
-- | Determine if we've reached the end of the game.
endGame :: AWord -> (Int,Int) -> Bool
endGame word (guess,guesses)
| complete word = True
| guess == guesses = True
| otherwise = False
-- | Determine if the word is completely guessed.
complete :: AWord -> Bool
complete = all isGuessed
isGuessed :: Letter -> Bool
isGuessed l = case l of
Hidden x -> False
Guessed x -> True
| lf94/Hangman | Hangman.hs | gpl-3.0 | 2,144 | 0 | 15 | 491 | 713 | 366 | 347 | 57 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Test.AWS.DynamoDB.Internal
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Test.AWS.DynamoDB.Internal where
import Test.AWS.Prelude
| fmapfmapfmap/amazonka | amazonka-dynamodb/test/Test/AWS/DynamoDB/Internal.hs | mpl-2.0 | 623 | 0 | 4 | 140 | 25 | 21 | 4 | 4 | 0 |
module Main (main) where
import Criterion.Main
import Data.Bits
import qualified Data.Vector.Primitive as P
import Data.Word
import Succinct.Sequence
import System.Random
sampleVec :: Int -> [((Int, String), Int)]
sampleVec elements = zip [(i, "v" ++ show i) | i <- [0..]] $ take elements $ randomRs (1, 10000) (mkStdGen 42)
benchSized :: String -> Int -> Benchmark
benchSized tag bytes = v `seq` bgroup tag [
bench "Hu-Tucker" $ whnf (show . huTucker) v
]
where
v = sampleVec bytes
benchmarks :: [Benchmark]
benchmarks = [ benchSized ("2^" ++ show i) (2^i)
| i <- [4,8,12,16,20::Int]]
main :: IO ()
main = defaultMain benchmarks
| Gabriel439/succinct | benchmarks/huTuckerBench.hs | bsd-2-clause | 658 | 0 | 11 | 130 | 281 | 157 | 124 | 18 | 1 |
{-| Implementation of the LUXI loader.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Backend.Luxi
( loadData
, parseData
) where
import qualified Control.Exception as E
import Control.Monad (liftM)
import Text.JSON.Types
import qualified Text.JSON
import Ganeti.BasicTypes
import Ganeti.Errors
import qualified Ganeti.Luxi as L
import qualified Ganeti.Query.Language as Qlang
import Ganeti.HTools.Loader
import Ganeti.HTools.Types
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
import Ganeti.JSON
{-# ANN module "HLint: ignore Eta reduce" #-}
-- * Utility functions
-- | Get values behind \"data\" part of the result.
getData :: (Monad m) => JSValue -> m JSValue
getData (JSObject o) = fromObj (fromJSObject o) "data"
getData x = fail $ "Invalid input, expected dict entry but got " ++ show x
-- | Converts a (status, value) into m value, if possible.
parseQueryField :: (Monad m) => JSValue -> m (JSValue, JSValue)
parseQueryField (JSArray [status, result]) = return (status, result)
parseQueryField o =
fail $ "Invalid query field, expected (status, value) but got " ++ show o
-- | Parse a result row.
parseQueryRow :: (Monad m) => JSValue -> m [(JSValue, JSValue)]
parseQueryRow (JSArray arr) = mapM parseQueryField arr
parseQueryRow o =
fail $ "Invalid query row result, expected array but got " ++ show o
-- | Parse an overall query result and get the [(status, value)] list
-- for each element queried.
parseQueryResult :: (Monad m) => JSValue -> m [[(JSValue, JSValue)]]
parseQueryResult (JSArray arr) = mapM parseQueryRow arr
parseQueryResult o =
fail $ "Invalid query result, expected array but got " ++ show o
-- | Prepare resulting output as parsers expect it.
extractArray :: (Monad m) => JSValue -> m [[(JSValue, JSValue)]]
extractArray v =
getData v >>= parseQueryResult
-- | Testing result status for more verbose error message.
fromJValWithStatus :: (Text.JSON.JSON a, Monad m) => (JSValue, JSValue) -> m a
fromJValWithStatus (st, v) = do
st' <- fromJVal st
Qlang.checkRS st' v >>= fromJVal
annotateConvert :: String -> String -> String -> Result a -> Result a
annotateConvert otype oname oattr =
annotateResult $ otype ++ " '" ++ oname ++
"', error while reading attribute '" ++ oattr ++ "'"
-- | Annotate errors when converting values with owner/attribute for
-- better debugging.
genericConvert :: (Text.JSON.JSON a) =>
String -- ^ The object type
-> String -- ^ The object name
-> String -- ^ The attribute we're trying to convert
-> (JSValue, JSValue) -- ^ The value we're trying to convert
-> Result a -- ^ The annotated result
genericConvert otype oname oattr =
annotateConvert otype oname oattr . fromJValWithStatus
convertArrayMaybe :: (Text.JSON.JSON a) =>
String -- ^ The object type
-> String -- ^ The object name
-> String -- ^ The attribute we're trying to convert
-> (JSValue, JSValue) -- ^ The value we're trying to convert
-> Result [Maybe a] -- ^ The annotated result
convertArrayMaybe otype oname oattr (st, v) = do
st' <- fromJVal st
Qlang.checkRS st' v >>=
annotateConvert otype oname oattr . arrayMaybeFromJVal
-- * Data querying functionality
-- | The input data for node query.
queryNodesMsg :: L.LuxiOp
queryNodesMsg =
L.Query (Qlang.ItemTypeOpCode Qlang.QRNode)
["name", "mtotal", "mnode", "mfree", "dtotal", "dfree",
"ctotal", "cnos", "offline", "drained", "vm_capable",
"ndp/spindle_count", "group.uuid", "tags",
"ndp/exclusive_storage", "sptotal", "spfree", "ndp/cpu_speed"]
Qlang.EmptyFilter
-- | The input data for instance query.
queryInstancesMsg :: L.LuxiOp
queryInstancesMsg =
L.Query (Qlang.ItemTypeOpCode Qlang.QRInstance)
["name", "disk_usage", "be/memory", "be/vcpus",
"status", "pnode", "snodes", "tags", "oper_ram",
"be/auto_balance", "disk_template",
"be/spindle_use", "disk.sizes", "disk.spindles",
"forthcoming"] Qlang.EmptyFilter
-- | The input data for cluster query.
queryClusterInfoMsg :: L.LuxiOp
queryClusterInfoMsg = L.QueryClusterInfo
-- | The input data for node group query.
queryGroupsMsg :: L.LuxiOp
queryGroupsMsg =
L.Query (Qlang.ItemTypeOpCode Qlang.QRGroup)
["uuid", "name", "alloc_policy", "ipolicy", "tags"]
Qlang.EmptyFilter
-- | Wraper over 'callMethod' doing node query.
queryNodes :: L.Client -> IO (Result JSValue)
queryNodes = liftM errToResult . L.callMethod queryNodesMsg
-- | Wraper over 'callMethod' doing instance query.
queryInstances :: L.Client -> IO (Result JSValue)
queryInstances = liftM errToResult . L.callMethod queryInstancesMsg
-- | Wrapper over 'callMethod' doing cluster information query.
queryClusterInfo :: L.Client -> IO (Result JSValue)
queryClusterInfo = liftM errToResult . L.callMethod queryClusterInfoMsg
-- | Wrapper over callMethod doing group query.
queryGroups :: L.Client -> IO (Result JSValue)
queryGroups = liftM errToResult . L.callMethod queryGroupsMsg
-- | Parse a instance list in JSON format.
getInstances :: NameAssoc
-> JSValue
-> Result [(String, Instance.Instance)]
getInstances ktn arr = extractArray arr >>= mapM (parseInstance ktn)
-- | Construct an instance from a JSON object.
parseInstance :: NameAssoc
-> [(JSValue, JSValue)]
-> Result (String, Instance.Instance)
parseInstance ktn [ name, disk, mem, vcpus
, status, pnode, snodes, tags, oram
, auto_balance, disk_template, su
, dsizes, dspindles, forthcoming ] = do
xname <- annotateResult "Parsing new instance" (fromJValWithStatus name)
let convert a = genericConvert "Instance" xname a
xdisk <- convert "disk_usage" disk
xmem <- case oram of -- FIXME: remove the "guessing"
(_, JSRational _ _) -> convert "oper_ram" oram
_ -> convert "be/memory" mem
xvcpus <- convert "be/vcpus" vcpus
xpnode <- convert "pnode" pnode >>= lookupNode ktn xname
xsnodes <- convert "snodes" snodes::Result [String]
snode <- case xsnodes of
[] -> return Node.noSecondary
x:_ -> lookupNode ktn xname x
xrunning <- convert "status" status
xtags <- convert "tags" tags
xauto_balance <- convert "auto_balance" auto_balance
xdt <- convert "disk_template" disk_template
xsu <- convert "be/spindle_use" su
xdsizes <- convert "disk.sizes" dsizes
xdspindles <- convertArrayMaybe "Instance" xname "disk.spindles" dspindles
xforthcoming <- convert "forthcoming" forthcoming
let disks = zipWith Instance.Disk xdsizes xdspindles
inst = Instance.create xname xmem xdisk disks
xvcpus xrunning xtags xauto_balance xpnode snode xdt xsu []
xforthcoming
return (xname, inst)
parseInstance _ v = fail ("Invalid instance query result: " ++ show v)
-- | Parse a node list in JSON format.
getNodes :: NameAssoc -> JSValue -> Result [(String, Node.Node)]
getNodes ktg arr = extractArray arr >>= mapM (parseNode ktg)
-- | Construct a node from a JSON object.
parseNode :: NameAssoc -> [(JSValue, JSValue)] -> Result (String, Node.Node)
parseNode ktg [ name, mtotal, mnode, mfree, dtotal, dfree
, ctotal, cnos, offline, drained, vm_capable, spindles, g_uuid
, tags, excl_stor, sptotal, spfree, cpu_speed ]
= do
xname <- annotateResult "Parsing new node" (fromJValWithStatus name)
let convert a = genericConvert "Node" xname a
xoffline <- convert "offline" offline
xdrained <- convert "drained" drained
xvm_capable <- convert "vm_capable" vm_capable
xgdx <- convert "group.uuid" g_uuid >>= lookupGroup ktg xname
xtags <- convert "tags" tags
xexcl_stor <- convert "exclusive_storage" excl_stor
xcpu_speed <- convert "cpu_speed" cpu_speed
let live = not xoffline && xvm_capable
lvconvert def n d = eitherLive live def $ convert n d
xsptotal <- if xexcl_stor
then lvconvert 0 "sptotal" sptotal
else convert "spindles" spindles
let xspfree = genericResult (const (0 :: Int)) id
$ lvconvert 0 "spfree" spfree
-- "spfree" might be missing, if sharedfile is the only
-- supported disk template
xmtotal <- lvconvert 0.0 "mtotal" mtotal
xmnode <- lvconvert 0 "mnode" mnode
xmfree <- lvconvert 0 "mfree" mfree
xdtotal <- lvconvert 0.0 "dtotal" dtotal
xdfree <- lvconvert 0 "dfree" dfree
xctotal <- lvconvert 0.0 "ctotal" ctotal
xcnos <- lvconvert 0 "cnos" cnos
let node = flip Node.setCpuSpeed xcpu_speed .
flip Node.setNodeTags xtags $
Node.create xname xmtotal xmnode xmfree xdtotal xdfree
xctotal xcnos (not live || xdrained) xsptotal xspfree
xgdx xexcl_stor
return (xname, node)
parseNode _ v = fail ("Invalid node query result: " ++ show v)
-- | Parses the cluster tags.
getClusterData :: JSValue -> Result ([String], IPolicy, String)
getClusterData (JSObject obj) = do
let errmsg = "Parsing cluster info"
obj' = fromJSObject obj
ctags <- tryFromObj errmsg obj' "tags"
cpol <- tryFromObj errmsg obj' "ipolicy"
master <- tryFromObj errmsg obj' "master"
return (ctags, cpol, master)
getClusterData _ = Bad "Cannot parse cluster info, not a JSON record"
-- | Parses the cluster groups.
getGroups :: JSValue -> Result [(String, Group.Group)]
getGroups jsv = extractArray jsv >>= mapM parseGroup
-- | Parses a given group information.
parseGroup :: [(JSValue, JSValue)] -> Result (String, Group.Group)
parseGroup [uuid, name, apol, ipol, tags] = do
xname <- annotateResult "Parsing new group" (fromJValWithStatus name)
let convert a = genericConvert "Group" xname a
xuuid <- convert "uuid" uuid
xapol <- convert "alloc_policy" apol
xipol <- convert "ipolicy" ipol
xtags <- convert "tags" tags
-- TODO: parse networks to which this group is connected
return (xuuid, Group.create xname xuuid xapol [] xipol xtags)
parseGroup v = fail ("Invalid group query result: " ++ show v)
-- * Main loader functionality
-- | Builds the cluster data by querying a given socket name.
readData :: String -- ^ Unix socket to use as source
-> IO (Result JSValue, Result JSValue, Result JSValue, Result JSValue)
readData master =
E.bracket
(L.getLuxiClient master)
L.closeClient
(\s -> do
nodes <- queryNodes s
instances <- queryInstances s
cinfo <- queryClusterInfo s
groups <- queryGroups s
return (groups, nodes, instances, cinfo)
)
-- | Converts the output of 'readData' into the internal cluster
-- representation.
parseData :: (Result JSValue, Result JSValue, Result JSValue, Result JSValue)
-> Result ClusterData
parseData (groups, nodes, instances, cinfo) = do
group_data <- groups >>= getGroups
let (group_names, group_idx) = assignIndices group_data
node_data <- nodes >>= getNodes group_names
let (node_names, node_idx) = assignIndices node_data
inst_data <- instances >>= getInstances node_names
let (_, inst_idx) = assignIndices inst_data
(ctags, cpol, master) <- cinfo >>= getClusterData
node_idx' <- setMaster node_names node_idx master
return (ClusterData group_idx node_idx' inst_idx ctags cpol)
-- | Top level function for data loading.
loadData :: String -- ^ Unix socket to use as source
-> IO (Result ClusterData)
loadData = fmap parseData . readData
| apyrgio/ganeti | src/Ganeti/HTools/Backend/Luxi.hs | bsd-2-clause | 12,962 | 0 | 14 | 2,752 | 2,999 | 1,549 | 1,450 | 215 | 3 |
-- Copyright (c) 2014 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
-- | This module contains datatypes representing sets of intervals of
-- integers.
module Data.Intervals(
Intervals,
-- * Constructors
allNumbers,
fromIntervals,
fromNormalIntervals,
-- * Deconstructors
intervals,
-- * Utility Functions
span,
distinctValues,
-- ** Pack/Unpack Offsets
packOffsets,
unpackOffsets
) where
import Control.Monad
import Data.Hashable
import Data.List hiding (span)
import Data.Maybe
import Data.Interval(Interval(..))
import Prelude hiding (span)
import Text.XML.Expat.Pickle
import Text.XML.Expat.Tree
import qualified Data.Interval as Interval
-- | A datatype representing a set of intervals.
newtype Intervals n = Intervals { intervals :: [Interval n] }
deriving (Ord, Eq)
lower :: Interval n -> n
lower = fromJust . Interval.lower
upper :: Interval n -> n
upper = fromJust . Interval.upper
normalizeInterval :: Integral n => [Interval n] -> [Interval n]
normalizeInterval =
let
-- Transform Interval n n into Single n
collapse :: Eq n => Interval n -> Interval n
collapse r @ (Interval n m)
| n == m = Single n
| otherwise = r
collapse r = r
-- reverse the order function to effectively reverse the lists
orderInterval :: Ord n => Interval n -> Interval n -> Ordering
orderInterval (Max n1) (Max n2) = compare n2 n1
orderInterval (Max _) _ = GT
orderInterval _ (Max _) = LT
orderInterval r1 r2 = compare (lower r2) (lower r1)
-- The actual normalization function, remember that the list is
-- sorted in reverse order by the lower bound
intervalNorm :: Integral n => [Interval n] -> [Interval n] -> [Interval n]
-- If a min and max are adjacent, then there is either a
-- "forbidden region", or else the integer is totally unbounded
intervalNorm _ (Min minn : Max maxn : _)
| minn > maxn + 1 = [Max maxn, Min minn]
| otherwise = []
-- This rule is necessary to avoid taking the upper bound of Min,
-- which is undefined
intervalNorm accum (_ : Min n : list) = intervalNorm accum (Min n : list)
-- If a minimum overlaps another point, absorb it, otherwise
-- discard all previous results and start over here.
intervalNorm accum (Min n : r : list)
| upper r >= n - 1 = intervalNorm accum (Min (lower r) : list)
| otherwise = intervalNorm [Min n] (r : list)
-- This rule is necessary to avoid taking the lower bound of Min,
-- which is undefined
intervalNorm accum (r : Max n : _) =
intervalNorm (Max n : collapse r : accum) []
-- Put the first Max on the end of the result list, then ignore
-- everything that follows
intervalNorm accum (Max n : _) = intervalNorm (Max n : accum) []
-- Similar to the input list, max-min pairs generate an instant result
intervalNorm (Max maxn : Min minn : _) []
| minn > maxn + 1 = [Max maxn, Min minn]
| otherwise = []
-- Absorb a interval into the Max if it overlaps, otherwise stop
intervalNorm result @ (Max n : r : accum) []
| lower r <= n + 1 =
intervalNorm (Max (max (upper r) n) : accum) []
| otherwise = result
-- The basic input list processing, with no mins or maxes. If the
-- two overlap, combine them.
intervalNorm accum (r1 : r2 : list)
| (lower r1) - 1 <= upper r2 =
intervalNorm accum (Interval (lower r2) (upper r1) : list)
| otherwise = intervalNorm (collapse r1 : accum) (r2 : list)
intervalNorm accum [mono] = mono : accum
-- Result lists that don't contain a Max don't need to be
-- reprocessed
intervalNorm accum [] = accum
in
intervalNorm [] . sortBy orderInterval
-- | Get the difference between the lowest and highest possible values
-- of an Intervals object.
span :: Intervals n -> Maybe (n, n)
span (Intervals { intervals = [] }) = Nothing
span (Intervals { intervals = is }) =
case (head is, last is) of
(Max _, _) -> Nothing
(_, Min _) -> Nothing
(firsti, lasti) -> Just (lower firsti, upper lasti)
-- | Construct an Intervals object from a list of Interval objects.
-- The list may contain intervals that overlap, or are out of order.
fromIntervals :: Integral n => [Interval n] -> Intervals n
fromIntervals l = Intervals { intervals = normalizeInterval l }
-- | Convert an Intervals object to a sorted, normalized list of
-- Interval objects
fromNormalIntervals :: Intervals n -> [Interval n]
fromNormalIntervals (Intervals { intervals = l }) = l
-- | Get the number of distinct values that this Intervals object
-- represents.
distinctValues :: Integral n => Intervals n -> Maybe n
distinctValues = foldl (liftM2 (+)) (Just 0) . map Interval.size . intervals
-- | The Intervals object representing all numbers.
allNumbers :: Intervals n
allNumbers = Intervals { intervals = [] }
-- | A possible list of (a, b) pairs, so that if x < a then x + b else
-- ... will condense the integer into a single interval of values. This
-- is useful for generating packing code.
packOffsets :: Integral n => Intervals n -> Maybe [(n, n)]
packOffsets =
let
genOffset (avail, Just list) (Single n) =
(avail + 1, Just ((n, avail - n) : list))
genOffset (avail, Just list) (Interval lo hi) =
(avail + (hi - lo) + 1, Just ((hi, avail - lo) : list))
genOffset (avail, _) _ = (avail, Nothing)
in
liftM reverse . snd . foldl genOffset (0, Just []) . intervals
-- | A possible list of (a, b) pairs, so that if x < a then x + b else
-- ... will expand a condensed integer back out into its original
-- interval of values. This is useful for generating unpacking code.
unpackOffsets :: Integral n => Intervals n -> Maybe [(n, n)]
unpackOffsets =
let
genOffset (avail, Just list) (Single n) =
(avail + 1, Just ((avail, n - avail) : list))
genOffset (avail, Just list) (Interval lo hi) =
(avail + (hi - lo) + 1, Just ((avail, lo - avail) : list))
genOffset (avail, _) _ = (avail, Nothing)
in
liftM reverse . snd . foldl genOffset (0, Just []) . intervals
instance Show n => Show (Intervals n) where
show (Intervals { intervals = [] }) = "-inf to +inf"
show (Intervals { intervals = is }) = show is
instance Hashable n => Hashable (Intervals n) where
hashWithSalt s Intervals { intervals = is } = hashWithSalt s is
instance (GenericXMLString tag, Show tag,
GenericXMLString text, Show text,
Read n, Show n) =>
XmlPickler [NodeG [] tag text] (Intervals n) where
xpickle = xpWrap (Intervals, intervals) (xpList xpickle)
| emc2/compiler-misc | src/Data/Intervals.hs | bsd-3-clause | 8,354 | 9 | 17 | 1,988 | 2,025 | 1,076 | 949 | 108 | 14 |
-- | Encode IRC messages back to bytestrings
{-# LANGUAGE OverloadedStrings #-}
module NumberSix.Message.Encode
( encodePrefix
, encode
) where
--------------------------------------------------------------------------------
import Data.ByteString (ByteString)
import Data.ByteString.Char8 ()
import Data.Maybe (fromMaybe, isJust)
import Data.Monoid (Monoid, mempty)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
--------------------------------------------------------------------------------
import NumberSix.Message
--------------------------------------------------------------------------------
encodePrefix :: Prefix -> ByteString
encodePrefix (ServerPrefix s) = ":" <> T.encodeUtf8 s
encodePrefix (NickPrefix n u h) =
":" <> T.encodeUtf8 n <>
fromMaybe "" (fmap (("!" <>) . T.encodeUtf8) u) <>
fromMaybe "" (fmap (("@" <>) . T.encodeUtf8) h)
--------------------------------------------------------------------------------
encodeCommand :: Text -> ByteString
encodeCommand = T.encodeUtf8
--------------------------------------------------------------------------------
encodeParameters :: [Text] -> ByteString
encodeParameters [] = mempty
encodeParameters (x : [])
| hasSpace x || T.null x || T.head x == ':' = " :" <> T.encodeUtf8 x
| otherwise = " " <> T.encodeUtf8 x
where
hasSpace = isJust . T.find (== ' ')
encodeParameters (x : xs) = " " <> T.encodeUtf8 x <> encodeParameters xs
--------------------------------------------------------------------------------
encode :: Message -> ByteString
encode (Message p c ps) =
encodePrefix' p <> encodeCommand c <> encodeParameters ps
where
encodePrefix' = fromMaybe mempty . fmap ((<> " ") . encodePrefix)
| itkovian/number-six | src/NumberSix/Message/Encode.hs | bsd-3-clause | 1,930 | 0 | 12 | 413 | 455 | 246 | 209 | 31 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
-- | Description : mostly reversible conversion between ipynb and lhs
module IHaskell.Convert (convert) where
import IHaskellPrelude
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.Char8 as CBS
import Control.Monad.Identity (Identity(Identity), unless, when)
import IHaskell.Convert.Args (ConvertSpec(..), fromJustConvertSpec, toConvertSpec)
import IHaskell.Convert.IpynbToLhs (ipynbToLhs)
import IHaskell.Convert.LhsToIpynb (lhsToIpynb)
import IHaskell.Flags (Argument)
import System.Directory (doesFileExist)
import Text.Printf (printf)
-- | used by @IHaskell convert@
convert :: [Argument] -> IO ()
convert args =
case fromJustConvertSpec (toConvertSpec args) of
ConvertSpec
{ convertToIpynb = Identity toIpynb
, convertInput = Identity inputFile
, convertOutput = Identity outputFile
, convertLhsStyle = Identity lhsStyle
, convertOverwriteFiles = force
}
| toIpynb -> do
unless force (failIfExists outputFile)
lhsToIpynb lhsStyle inputFile outputFile
| otherwise -> do
unless force (failIfExists outputFile)
ipynbToLhs lhsStyle inputFile outputFile
-- | Call fail when the named file already exists.
failIfExists :: FilePath -> IO ()
failIfExists file = do
exists <- doesFileExist file
when exists $ fail $
printf "File %s already exists. To force supply --force." file
| artuuge/IHaskell | src/IHaskell/Convert.hs | mit | 1,637 | 0 | 13 | 368 | 358 | 202 | 156 | 35 | 1 |
-- !!! Testing Typeable instances
module Main(main) where
import Data.Dynamic
import Data.Array
import Data.Array.MArray
import Data.Array.ST
import Data.Array.IO
import Data.Array.Unboxed
import Data.Complex
import Data.Int
import Data.Word
import Data.IORef
import System.IO
import Control.Monad.ST
import System.Mem.StableName
import System.Mem.Weak
import Foreign.StablePtr
import Control.Exception
import Foreign.C.Types
main :: IO ()
main = do
print (typeOf (undefined :: [()]))
print (typeOf (undefined :: ()))
print (typeOf (undefined :: ((),())))
print (typeOf (undefined :: ((),(),())))
print (typeOf (undefined :: ((),(),(),())))
print (typeOf (undefined :: ((),(),(),(),())))
print (typeOf (undefined :: (() -> ())))
print (typeOf (undefined :: (Array () ())))
print (typeOf (undefined :: Bool))
print (typeOf (undefined :: Char))
print (typeOf (undefined :: (Complex ())))
print (typeOf (undefined :: Double))
print (typeOf (undefined :: (Either () ())))
print (typeOf (undefined :: Float))
print (typeOf (undefined :: Handle))
print (typeOf (undefined :: Int))
print (typeOf (undefined :: Integer))
print (typeOf (undefined :: IO ()))
print (typeOf (undefined :: (Maybe ())))
print (typeOf (undefined :: Ordering))
print (typeOf (undefined :: Dynamic))
print (typeOf (undefined :: (IORef ())))
print (typeOf (undefined :: Int8))
print (typeOf (undefined :: Int16))
print (typeOf (undefined :: Int32))
print (typeOf (undefined :: Int64))
print (typeOf (undefined :: (ST () ())))
print (typeOf (undefined :: (StableName ())))
print (typeOf (undefined :: (StablePtr ())))
print (typeOf (undefined :: TyCon))
print (typeOf (undefined :: TypeRep))
print (typeOf (undefined :: Word8))
print (typeOf (undefined :: Word16))
print (typeOf (undefined :: Word32))
print (typeOf (undefined :: Word64))
print (typeOf (undefined :: ArithException))
print (typeOf (undefined :: AsyncException))
print (typeOf (undefined :: (IOArray () ())))
print (typeOf (undefined :: (IOUArray () ())))
print (typeOf (undefined :: (STArray () () ())))
print (typeOf (undefined :: (STUArray () () ())))
print (typeOf (undefined :: (StableName ())))
print (typeOf (undefined :: (StablePtr ())))
print (typeOf (undefined :: (UArray () ())))
print (typeOf (undefined :: (Weak ())))
print (typeOf (undefined :: CChar))
print (typeOf (undefined :: CSChar))
print (typeOf (undefined :: CUChar))
print (typeOf (undefined :: CShort))
print (typeOf (undefined :: CUShort))
print (typeOf (undefined :: CInt))
print (typeOf (undefined :: CUInt))
print (typeOf (undefined :: CLong))
print (typeOf (undefined :: CULong))
print (typeOf (undefined :: CLLong))
print (typeOf (undefined :: CULLong))
print (typeOf (undefined :: CFloat))
print (typeOf (undefined :: CDouble))
print (typeOf (undefined :: CPtrdiff))
print (typeOf (undefined :: CSize))
print (typeOf (undefined :: CWchar))
print (typeOf (undefined :: CSigAtomic))
print (typeOf (undefined :: CClock))
print (typeOf (undefined :: CTime))
| beni55/ghcjs | test/pkg/base/dynamic002.hs | mit | 3,165 | 0 | 13 | 596 | 1,531 | 788 | 743 | 84 | 1 |
module Main where
import Data.Typeable
f :: Typeable a => Int -> a -> TypeRep
f 0 a = typeOf a
f n a = f (n-1) [a]
main = print (f 50000 () == f 50001 ())
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/perf/should_run/T9203.hs | bsd-3-clause | 158 | 0 | 9 | 41 | 96 | 49 | 47 | 6 | 1 |
module Main where
import Control.Concurrent
-- example from
-- http://www.haskell.org/pipermail/glasgow-haskell-users/2008-November/015878.html
main = do
m <- newMVar (0 :: Int)
forkIO $ putMVar m 1
yield
r1 <- readMVar m
r2 <- takeMVar m
r3 <- takeMVar m
return ()
| urbanslug/ghc | testsuite/tests/concurrent/should_run/readMVar3.hs | bsd-3-clause | 297 | 0 | 9 | 70 | 85 | 40 | 45 | 10 | 1 |
module Main (main) where
import Control.Monad.Free.Church
import Control.Monad.Trans.RWS.CPS
import qualified Data.ByteString.Builder as ByteString.Builder
import Data.ByteString.Builder (Builder)
import qualified Data.ByteString.Lazy as ByteString
import Data.Monoid
import Data.PCM
import Data.Transmission
import Data.TransmissionParser
import Options.Applicative hiding (Failure, Parser, Success)
import qualified Options.Applicative as Opt
import System.IO
import System.Random
import qualified Text.Megaparsec as Megaparsec
data Options = Options
{ optThrottle :: Bool
, optMinDelay :: Double
, optMaxDelay :: Double
, optNoiseVolume :: Double
, optSampleRate :: SampleRate
}
runTransmission ::
RandomGen g => Options -> TransmissionM Builder a -> g -> Builder
runTransmission opts tr g = snd $ evalRWS (iterM run tr) () g
where
run (Transmit x f) = tell x *> f
run (Noise x f) =
state (pcmNoise (optSampleRate opts) (optNoiseVolume opts) x) >>= f
run (RandDelayTime f) =
state (randomR (optMinDelay opts, optMaxDelay opts)) >>= f
run (Encode x f) = f $ pcmEncodeMessage (optSampleRate opts) x
encodeTransmission :: Options -> IO ()
encodeTransmission opts = do
input <- getContents
case Megaparsec.parse transmission "" input of
Left err -> hPutStrLn stderr $ Megaparsec.parseErrorPretty err
Right x ->
(runTransmission opts x <$> newStdGen) >>=
(write . ByteString.Builder.toLazyByteString)
where
write
| optThrottle opts = writeSamples (throttledPutStr (optSampleRate opts))
| otherwise = writeSamples ByteString.putStr
main :: IO ()
main = execParser opts >>= encodeTransmission
where
opts =
info
(helper <*> options)
(fullDesc <> progDesc desc <>
header "pagerenc - a program for encoding FLEX and POCSAG messages")
desc =
"Reads lines from stdin, outputting POCSAG or FLEX data to stdout\
\ which is decodable by multimon-ng. Additionally, insert delays\
\ between messages to simulate staggered broadcasts."
options :: Opt.Parser Options
options =
Options <$>
switch
(long "throttle" <>
help "Throttle data output to 22050Hz, causing 'realtime' playback.") <*>
option
auto
(long "mindelay" <> help "Set minimum delay between messages in seconds." <>
metavar "NUM" <>
value 1.0 <>
showDefault) <*>
option
auto
(long "maxdelay" <> help "Set maximum delay between messages in seconds." <>
metavar "NUM" <>
value 10.0 <>
showDefault) <*>
option
auto
(long "noisevolume" <> help "Set volume of noise inserted between messages." <>
metavar "NUM" <>
value 0.3 <>
showDefault) <*>
option
(fmap SampleRate auto)
(long "samplerate" <> help "Set sample rate of output data." <>
metavar "INT" <>
value (SampleRate 22050) <>
showDefaultWith (\(SampleRate x) -> show x))
| unknownloner/pagerenc | src/Main.hs | mit | 2,941 | 0 | 15 | 651 | 797 | 409 | 388 | 80 | 4 |
{-# LANGUAGE GADTs #-}
module Text.XML.Direct.SAX (
module Data.XML.Types,
Parser,
newParser,
Callback,
setCallback,
clearCallback,
parsedBeginDocument,
parsedEndDocument,
parsedBeginElement,
parsedEndElement,
parsedCharacters,
parsedComment,
parsedInstruction,
parsedDoctype,
parseBytes,
parseComplete
)
where
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.UTF8 as UTF8
import Data.Char
import Data.IORef
import Data.XML.Types
data Parser = Parser {
parserErrorHandler :: String -> IO (),
parserFilename :: Maybe String,
parserInputBuffer :: IORef ByteString,
parserBeginDocumentCallback
:: IORef (Maybe (IO Bool)),
parserEndDocumentCallback
:: IORef (Maybe (IO Bool)),
parserBeginElementCallback
:: IORef (Maybe (Name -> [Attribute] -> IO Bool)),
parserEndElementCallback
:: IORef (Maybe (Name -> IO Bool)),
parserCharactersCallback
:: IORef (Maybe (String -> IO Bool)),
parserCommentCallback
:: IORef (Maybe (String -> IO Bool)),
parserInstructionCallback
:: IORef (Maybe (Instruction -> IO Bool)),
parserDoctypeCallback
:: IORef (Maybe (Doctype -> IO Bool))
}
data Callback a where
CallbackBeginDocument :: Callback (IO Bool)
CallbackEndDocument :: Callback (IO Bool)
CallbackBeginElement :: Callback (Name -> [Attribute] -> IO Bool)
CallbackEndElement :: Callback (Name -> IO Bool)
CallbackCharacters :: Callback (String -> IO Bool)
CallbackComment :: Callback (String -> IO Bool)
CallbackInstruction :: Callback (Instruction -> IO Bool)
CallbackDoctype :: Callback (Doctype -> IO Bool)
newParser :: (String -> IO ())
-> Maybe String
-> IO Parser
newParser errorHandler maybeFilename = do
inputBufferIORef <- newIORef BS.empty
beginDocumentCallbackIORef <- newIORef Nothing
endDocumentCallbackIORef <- newIORef Nothing
beginElementCallbackIORef <- newIORef Nothing
endElementCallbackIORef <- newIORef Nothing
charactersCallbackIORef <- newIORef Nothing
commentCallbackIORef <- newIORef Nothing
instructionCallbackIORef <- newIORef Nothing
doctypeCallbackIORef <- newIORef Nothing
return Parser {
parserErrorHandler = errorHandler,
parserFilename = maybeFilename,
parserInputBuffer = inputBufferIORef,
parserBeginDocumentCallback = beginDocumentCallbackIORef,
parserEndDocumentCallback = endDocumentCallbackIORef,
parserBeginElementCallback = beginElementCallbackIORef,
parserEndElementCallback = endElementCallbackIORef,
parserCharactersCallback = charactersCallbackIORef,
parserCommentCallback = commentCallbackIORef,
parserInstructionCallback = instructionCallbackIORef,
parserDoctypeCallback = doctypeCallbackIORef
}
setCallback :: Parser -> Callback a -> a -> IO ()
setCallback parser which callback = do
case which of
CallbackBeginDocument -> do
writeIORef (parserBeginDocumentCallback parser) $ Just callback
CallbackEndDocument -> do
writeIORef (parserEndDocumentCallback parser) $ Just callback
CallbackBeginElement -> do
writeIORef (parserBeginElementCallback parser) $ Just callback
CallbackEndElement -> do
writeIORef (parserEndElementCallback parser) $ Just callback
CallbackCharacters -> do
writeIORef (parserCharactersCallback parser) $ Just callback
CallbackComment -> do
writeIORef (parserCommentCallback parser) $ Just callback
CallbackInstruction -> do
writeIORef (parserInstructionCallback parser) $ Just callback
CallbackDoctype -> do
writeIORef (parserDoctypeCallback parser) $ Just callback
clearCallback :: Parser -> Callback a -> IO ()
clearCallback parser which = do
case which of
CallbackBeginDocument -> do
writeIORef (parserBeginDocumentCallback parser) Nothing
CallbackEndDocument -> do
writeIORef (parserEndDocumentCallback parser) Nothing
CallbackBeginElement -> do
writeIORef (parserBeginElementCallback parser) Nothing
CallbackEndElement -> do
writeIORef (parserEndElementCallback parser) Nothing
CallbackCharacters -> do
writeIORef (parserCharactersCallback parser) Nothing
CallbackComment -> do
writeIORef (parserCommentCallback parser) Nothing
CallbackInstruction -> do
writeIORef (parserInstructionCallback parser) Nothing
CallbackDoctype -> do
writeIORef (parserDoctypeCallback parser) Nothing
parsedBeginDocument :: Callback (IO Bool)
parsedBeginDocument = CallbackBeginDocument
parsedEndDocument :: Callback (IO Bool)
parsedEndDocument = CallbackEndDocument
parsedBeginElement :: Callback (Name -> [Attribute] -> IO Bool)
parsedBeginElement = CallbackBeginElement
parsedEndElement :: Callback (Name -> IO Bool)
parsedEndElement = CallbackEndElement
parsedCharacters :: Callback (String -> IO Bool)
parsedCharacters = CallbackCharacters
parsedComment :: Callback (String -> IO Bool)
parsedComment = CallbackComment
parsedInstruction :: Callback (Instruction -> IO Bool)
parsedInstruction = CallbackInstruction
parsedDoctype :: Callback (Doctype -> IO Bool)
parsedDoctype = CallbackDoctype
isNameStartChar :: Char -> Bool
isNameStartChar c =
let codepoint = ord c
inRange (a, b) = a <= codepoint && codepoint <= b
in isLetter c || c == '_' || any inRange nameStartCharRanges
isNameChar :: Char -> Bool
isNameChar c =
let codepoint = ord c
inRange (a, b) = a <= codepoint && codepoint <= b
in isLetter c
|| isDigit c
|| c == '-'
|| c == '.'
|| c == (chr 0xB7)
|| any inRange nameCharRanges
nameStartCharRanges :: [(Int, Int)]
nameStartCharRanges =
[(0xC0, 0xD6), (0xD8, 0xF6), (0xF8, 0x2FF), (0x370, 0x37D), (0x37F, 0x1FFF),
(0x200C, 0x200D), (0x2070, 0x218F), (0x2C00, 0x2FEF), (0x3001, 0xD7FF),
(0xF900, 0xFDCF), (0xFDF0, 0xFFFD), (0x10000, 0xEFFFF)]
nameCharRanges :: [(Int, Int)]
nameCharRanges =
nameStartCharRanges
++ [(0x0300, 0x036F), (0x203F, 0x2040)]
parseBytes :: Parser -> ByteString -> IO ()
parseBytes parser newBytes = do
let loop :: ByteString -> IO ()
loop bytes = do
case UTF8.uncons bytes of
Nothing -> return ()
Just (c, bytes') -> do
(keepGoing, bytes'')
<- case c of
'<' -> handleThing bytes'
_ -> handleText bytes
if keepGoing
then loop bytes''
else writeIORef (parserInputBuffer parser) bytes''
handleText :: ByteString -> IO (Bool, ByteString)
handleText bytes = do
let (text, _) = UTF8.foldl (\(result, done) c ->
if done
then (result, True)
else if c == '<'
then (result, True)
else (result ++ [c], False))
("", False)
bytes
bytes' = UTF8.drop (length text) bytes
callbackIORef = parserCharactersCallback parser
maybeCallback <- readIORef callbackIORef
keepGoing <- case maybeCallback of
Nothing -> return True
Just callback -> callback text
return (keepGoing, bytes')
handleThing :: ByteString -> IO (Bool, ByteString)
handleThing bytes = do
let (thing, _) = UTF8.foldl (\(result, done) c ->
if done
then (result, True)
else if c == '>'
then (result ++ [c], True)
else (result ++ [c], False))
("", False)
bytes
complete = last thing == '>'
bytes' = if complete
then UTF8.drop (length thing) bytes
else bytes
return (complete, bytes')
preexistingBytes <- readIORef $ parserInputBuffer parser
loop $ BS.concat [preexistingBytes, newBytes]
parseComplete :: Parser -> IO ()
parseComplete parser = do
preexistingBytes <- readIORef $ parserInputBuffer parser
if not $ BS.null preexistingBytes
then parserErrorHandler parser $ "Trailing garbage at end of XML."
else return ()
| IreneKnapp/direct-xml-sax | Text/XML/Direct/SAX.hs | mit | 9,105 | 0 | 22 | 2,888 | 2,303 | 1,197 | 1,106 | 210 | 10 |
{-# LANGUAGE Arrows #-}
{-# LANGUAGE OverloadedStrings #-}
module NetwireLoop where
-- https://danbst.wordpress.com/2013/01/23/novice-netwire-user/
-- http://jshaskell.blogspot.se/2012/11/breakout-improved-and-with-netwire.html
-- http://hpaste.org/83098
--import Control.Monad.Identity (Identity)
import Control.Wire
import Prelude hiding ((.), id)
import Text.Printf
countFrame :: WireP a Int
countFrame = countFrom 0 <<< 1
timestamp :: WireP a Time
timestamp = timeFrom 10
logFrame :: WireP (Int, Int) String
logFrame = arr (\(f1, f2) t -> printf "[%d %d] - %8.8f" f1 f2 t) <*> time
-- based on http://www.haskell.org/haskellwiki/Netwire
-- arrow do notation
system :: WireP a String
system = proc _ -> do
--time <- timestamp -< ()
frame <- countFrame -< ()
--w <- countFrame . when even <|> 0 -< frame
f2 <- hold 0 ((countFrom 0 <<< 1) . periodically 2) -< ()
logFrame -< (frame, f2)
{-
-- count produces every 2
(countFrom 0 <<< 1) . periodically 2
-- count produces every instance, but periodically produces the count value only every 2
periodically 2 . (countFrom 0 <<< 1)
-}
{-
-- same as system
systemA :: WireP a String
systemA = timestamp &&& countFrame >>> arr (\ (t, f) -> printf "[%d] time: %s" f (show t))
-}
main :: IO ()
main = mainloop system clockSession
mainloop w' session' = do
(mx, w, session) <- stepSessionP w' session' ()
case mx of
Left ex -> putStrLn ("Inhibited: " ++ show ex)
Right x -> putStrLn ("Produced: " ++ show x)
mainloop w session
| MaxDaten/netwire-examples | NetwirePlayground.hs | mit | 1,512 | 1 | 15 | 280 | 333 | 176 | 157 | 25 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Yesod.Routes.TH.Types
( -- * Data types
Resource (..)
, ResourceTree (..)
, Piece (..)
, Dispatch (..)
, CheckOverlap
-- ** Helper functions
, resourceMulti
, resourceTreePieces
, resourceTreeName
) where
import Language.Haskell.TH.Syntax
import Control.Arrow (second)
data ResourceTree typ = ResourceLeaf (Resource typ) | ResourceParent String [(CheckOverlap, Piece typ)] [ResourceTree typ]
resourceTreePieces :: ResourceTree typ -> [(CheckOverlap, Piece typ)]
resourceTreePieces (ResourceLeaf r) = resourcePieces r
resourceTreePieces (ResourceParent _ x _) = x
resourceTreeName :: ResourceTree typ -> String
resourceTreeName (ResourceLeaf r) = resourceName r
resourceTreeName (ResourceParent x _ _) = x
instance Functor ResourceTree where
fmap f (ResourceLeaf r) = ResourceLeaf (fmap f r)
fmap f (ResourceParent a b c) = ResourceParent a (map (second $ fmap f) b) $ map (fmap f) c
instance Lift t => Lift (ResourceTree t) where
lift (ResourceLeaf r) = [|ResourceLeaf $(lift r)|]
lift (ResourceParent a b c) = [|ResourceParent $(lift a) $(lift b) $(lift c)|]
data Resource typ = Resource
{ resourceName :: String
, resourcePieces :: [(CheckOverlap, Piece typ)]
, resourceDispatch :: Dispatch typ
}
deriving Show
type CheckOverlap = Bool
instance Functor Resource where
fmap f (Resource a b c) = Resource a (map (second $ fmap f) b) (fmap f c)
instance Lift t => Lift (Resource t) where
lift (Resource a b c) = [|Resource $(lift a) $(lift b) $(lift c)|]
data Piece typ = Static String | Dynamic typ
deriving Show
instance Functor Piece where
fmap _ (Static s) = (Static s)
fmap f (Dynamic t) = Dynamic (f t)
instance Lift t => Lift (Piece t) where
lift (Static s) = [|Static $(lift s)|]
lift (Dynamic t) = [|Dynamic $(lift t)|]
data Dispatch typ =
Methods
{ methodsMulti :: Maybe typ -- ^ type of the multi piece at the end
, methodsMethods :: [String] -- ^ supported request methods
}
| Subsite
{ subsiteType :: typ
, subsiteFunc :: String
}
deriving Show
instance Functor Dispatch where
fmap f (Methods a b) = Methods (fmap f a) b
fmap f (Subsite a b) = Subsite (f a) b
instance Lift t => Lift (Dispatch t) where
lift (Methods Nothing b) = [|Methods Nothing $(lift b)|]
lift (Methods (Just t) b) = [|Methods (Just $(lift t)) $(lift b)|]
lift (Subsite t b) = [|Subsite $(lift t) $(lift b)|]
resourceMulti :: Resource typ -> Maybe typ
resourceMulti Resource { resourceDispatch = Methods (Just t) _ } = Just t
resourceMulti _ = Nothing
| piyush-kurur/yesod | yesod-routes/Yesod/Routes/TH/Types.hs | mit | 2,684 | 0 | 12 | 619 | 910 | 492 | 418 | 62 | 1 |
module Or where
import Data.Semigroup
import Test.QuickCheck
data Or a b = Fst a | Snd b deriving (Eq, Show)
instance Semigroup (Or a b) where
(Snd x) <> _ = Snd x
_ <> (Fst x) = Fst x
_ <> (Snd x) = Snd x
instance (Arbitrary a, Arbitrary b) => Arbitrary (Or a b) where
arbitrary = do
a <- arbitrary
b <- arbitrary
oneof [return $ Fst a, return $ Snd b]
| JoshuaGross/haskell-learning-log | Code/Haskellbook/Semigroups/src/Or.hs | mit | 398 | 0 | 11 | 120 | 193 | 97 | 96 | 13 | 0 |
{-# LANGUAGE CPP, OverloadedStrings #-}
module ChangePasswordLogged (changePasswordLoggedSpecs) where
import Yesod.Auth
import Yesod.Test
import Foundation
import qualified Data.Text as T
-- In 9f379bc219bd1fdf008e2c179b03e98a05b36401 (which went into yesod-form-1.3.9)
-- the numbering of fields was changed. We normally wouldn't care because fields
-- can be set via 'byLabel', but hidden fields have no label so we must use the id
-- directly. We temporarily support both versions of yesod form with the following.
f1 :: T.Text
#if MIN_VERSION_yesod_form(1,3,9)
f1 = "f1"
#else
f1 = "f2"
#endif
changePasswordLoggedSpecs :: YesodSpec MyApp
changePasswordLoggedSpecs =
ydescribe "Change Password while logged in tests" $ do
yit "changes a password while logged in" $ do
get' "/auth/page/account/newaccount"
statusIs 200
post'"/auth/page/account/newaccount" $ do
addNonce
byLabel "Username" "aaa"
byLabel "Email" "[email protected]"
byLabel "Password" "xxx"
byLabel "Confirm" "xxx"
statusIs 302
get' "/"
statusIs 200
bodyContains "A confirmation e-mail has been sent to [email protected]"
(username, email, verify) <- lastVerifyEmail
assertEqual "username" username "aaa"
assertEqual "email" email "[email protected]"
-- valid login
get' "/auth/login"
post'"/auth/page/account/login" $ do
byLabel "Username" "aaa"
byLabel "Password" "xxx"
statusIs 200
bodyContains "Your email has not yet been verified"
-- resend verify email
post'"/auth/page/account/resendverifyemail" $ do
addNonce
addPostParam f1 "aaa" -- username is also a hidden field
statusIs 302
get' "/"
bodyContains "A confirmation e-mail has been sent to [email protected]"
(username', email', verify') <- lastVerifyEmail
assertEqual "username" username' "aaa"
assertEqual "email" email' "[email protected]"
assertEqual "verify" True (verify /= verify')
-- verify email
get' verify'
statusIs 302
get' "/"
statusIs 200
bodyContains "You are logged in as aaa"
post $ AuthR LogoutR
statusIs 302
get' "/"
statusIs 200
bodyContains "Please visit the <a href=\"/auth/login\">Login page"
-- valid login
get' "/auth/login"
post'"/auth/page/account/login" $ do
byLabel "Username" "aaa"
byLabel "Password" "xxx"
statusIs 302
get' "/"
bodyContains "You are logged in as aaa"
-- change password while logged in
-- good key
get' "/auth/page/account/newpasswordlgd"
post'"/auth/page/account/setpassword" $ do
addNonce
byLabel "Please fill in your current password" "xxx"
byLabel "New password" "www"
byLabel "Confirm" "www"
addPostParam f1 "aaa"
statusIs 302
get' "/"
statusIs 200
bodyContains "Password updated"
bodyContains "You are logged in as aaa"
post $ AuthR LogoutR
-- check new password
get' "/auth/login"
post'"/auth/page/account/login" $ do
byLabel "Username" "aaa"
byLabel "Password" "www"
statusIs 302
get' "/"
statusIs 200
bodyContains "You are logged in as aaa"
-- logout
post $ AuthR LogoutR
yit "cannot change password while logged out" $ do
get' "/auth/page/account/newpasswordlgd"
statusIs 403
| jasonzoladz/yesod-auth-account-fork | tests/ChangePasswordLogged.hs | mit | 4,025 | 0 | 14 | 1,463 | 635 | 264 | 371 | 85 | 1 |
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Ch19.ParseInt
where
import Data.Functor.Identity (Identity)
import qualified Data.Char as C
import qualified Data.Bits as Bits
import qualified Control.Applicative as A
import qualified Control.Monad.Except as E
import qualified Control.Monad.State as S
import qualified Data.ByteString.Char8 as B
import qualified Data.Foldable as F
data ParseError
= NumericOverflow
| EndOfInput
| Chatty String
deriving (Eq, Ord, Show)
newtype Parser a =
P { runP :: E.ExceptT ParseError (S.State B.ByteString) a }
deriving (Functor, Applicative, Monad, E.MonadError ParseError)
instance A.Alternative Parser where
-- empty :: Parser a
empty =
P $ E.throwError (Chatty "empty")
-- (<*>) :: Parser a -> Parser a -> Parser a
px <|> py =
E.catchError px tryRecover
where
tryRecover (Chatty _) =
py
tryRecover e =
E.throwError e
-- state monad not exposed -> we want to get a hold of it somehow though
-- liftP' :: S.StateT B.ByteString (E.Except ParseError) a -> Parser' a
liftP :: S.StateT B.ByteString Identity a -> Parser a
liftP m =
P (S.lift m)
satisfy :: (Char -> Bool) -> Parser Char
satisfy p = do
s <- liftP S.get
case B.uncons s of
Nothing ->
E.throwError EndOfInput
Just (c, s')
| p c ->
liftP (S.put s') >> return c
| otherwise ->
E.throwError (Chatty $ "invalid character: '" ++ B.unpack s ++ "'")
runParser :: Parser a -> B.ByteString
-> Either ParseError (a, B.ByteString)
runParser p xs =
case S.runState (E.runExceptT . runP $ p) xs of
(Left err, _) ->
Left err
(Right x, ys) ->
Right (x, ys)
many :: Parser a -> Parser [a]
many p = do
st <- liftP S.get
if B.null st then return [] else (:) <$> p <*> many p
int :: Parser Int
int =
(satisfy (=='-') >> digits (-)) A.<|> digits (+)
where
digits f =
many (satisfy C.isDigit) >>= maybe (E.throwError NumericOverflow) return . toInt f
toInt :: (Int -> Int -> Int) -> [Char] -> Maybe Int
toInt f =
F.foldlM (safeBuildInt f) 0 . fmap fromAscii
fromAscii :: Char -> Int
fromAscii =
(flip (-) 48) . C.ord
safeBuildInt :: (Int -> Int -> Int) -> Int -> Int -> Maybe Int
safeBuildInt f acc x
| acc == 0 =
Just (0 `f` x)
| otherwise =
if sign newAcc == sign acc then Just newAcc else Nothing -- overflow detected
where
newAcc =
(acc * 10) `f` x
-- "Right and left shifts by amounts greater than or equal to the width of the type result in either zero or -1, depending on the sign of the value being shifted" [Haskell2010 Report 18.1]
sign :: Int -> Int
sign =
flip Bits.shiftR 64
-- Try to swap monads in the monad transformer to simplify running the parser
-- newtype Parser' = S.StateT B.ByteString (E.Except ParseError)
newtype Parser' a =
P' { runP' :: S.StateT B.ByteString (E.Except ParseError) a }
deriving (Functor, Applicative, Monad, E.MonadError ParseError)
runParser' :: Parser' a -> B.ByteString
-> Either ParseError (a, B.ByteString)
runParser' p =
E.runExcept . S.runStateT (runP' p)
instance A.Alternative Parser' where
-- empty :: Parser a
empty =
P' $ E.throwError (Chatty "empty")
-- (<*>) :: Parser a -> Parser a -> Parser a
px <|> py =
E.catchError px tryRecover
where
tryRecover (Chatty _) =
py
tryRecover e =
E.throwError e
liftP' :: S.StateT B.ByteString (E.Except ParseError) a -> Parser' a
liftP' mst =
P' mst
satisfy' :: (Char -> Bool) -> Parser' Char
satisfy' p = do
st <- liftP' S.get
case B.uncons st of
Nothing ->
E.throwError EndOfInput
Just (c, st')
| p c ->
liftP' (S.put st') >> return c
| otherwise ->
E.throwError $ Chatty ("invalid character: " ++ show (B.head st))
many' :: Parser' a -> Parser' [a]
many' p =
do s <- liftP' S.get ; if B.null s then return [] else (:) <$> p <*> many' p
-- TODO: extend ParseError to append multiple errors ?
instance Monoid ParseError where
mempty =
Chatty "empty"
(Chatty "empty") `mappend` pe =
pe
pe `mappend` _ =
pe
-- ƛ: runParser' int' "-9223372036854775808" :: Right (-9223372036854775808,"")
-- ƛ: runParser' int' "-9223372036854775809" :: Left NumericOverflow
-- ƛ: runParser' int' "9223372036854775808" :: Left NumericOverflow
-- ƛ: runParser int "9223372036854775807" :: Right (9223372036854775807,"")
int' :: Parser' Int
int' =
(satisfy' (=='-') >> digits'(-)) A.<|> digits' (+)
where
digits' f =
many' (satisfy' C.isDigit) >>= maybe (E.throwError NumericOverflow) return . toInt f
-- TODO: create a type class that generalises Parser and Parser'
class S.MonadState s m => ParserClass s m where
liftp :: S.StateT B.ByteString m a -> p
-- First unsuccessful try
-- newtype Parser'' s m a =
-- P'' { runP'' :: S.StateT s m a }
-- deriving (Functor, Applicative, Monad)
-- instance ParserClass (Parser'' B.ByteString (E.Except ParseError)) where
-- liftp = liftP''
-- liftP'' :: S.StateT B.ByteString (E.Except ParseError) a
-- -> Parser'' B.ByteString (E.Except ParseError) a
-- liftP'' mst =
-- P'' mst
| futtetennista/IntroductionToFunctionalProgramming | RWH/src/ch19/ParseInt.hs | mit | 5,273 | 0 | 18 | 1,240 | 1,561 | 812 | 749 | 124 | 2 |
module AirType.Helpers where
import AirType.Types
import Data.Char
fromString :: String -> Maybe [Input]
fromString = mapM fromChar
fromInputs :: [Input] -> [[Char]]
fromInputs = fmap fromInput
fromInput :: Input -> [Char]
fromInput L1 = [' ']
fromInput L2 = ['v', 'f', 'r', 'g', 't', 'b']
fromInput L3 = ['c', 'd', 'e']
fromInput L4 = ['x', 's', 'w']
fromInput L5 = ['z', 'a', 'q', '\t']
fromInput R1 = [' ']
fromInput R2 = ['m', 'n', 'j', 'h', 'u', 'y']
fromInput R3 = ['k', 'i', ',']
fromInput R4 = ['.', 'l', 'o']
fromInput R5 = [';', 'p', '/', '\'', '[', ']', '\\']
fromChar :: Char -> Maybe Input
fromChar c = case toLower c of
'a' -> Just L5
'b' -> Just L2
'c' -> Just L3
'd' -> Just L3
'e' -> Just L3
'f' -> Just L2
'g' -> Just L2
'h' -> Just R2
'i' -> Just R3
'j' -> Just R2
'k' -> Just R3
'l' -> Just R4
'm' -> Just R2
'n' -> Just R2
'o' -> Just R4
'p' -> Just R5
'q' -> Just L5
'r' -> Just L2
's' -> Just L4
't' -> Just L2
'u' -> Just R2
'v' -> Just L2
'w' -> Just L4
'x' -> Just L4
'y' -> Just R2
'z' -> Just L5
',' -> Just R3
'.' -> Just R4
'/' -> Just R5
'\''-> Just R5
'[' -> Just R5
']' -> Just R5
'\\'-> Just R5
'\n'-> Just R5
'\t'-> Just L5
' ' -> Just L1
_ -> Nothing
| terrelln/air-types | src/AirType/Helpers.hs | mit | 1,342 | 0 | 8 | 412 | 625 | 318 | 307 | 57 | 37 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module EventProcessor where
import Types
import Data.Text (Text)
import qualified Data.Text as Text
import Control.Monad (forM_, liftM, forM)
import Database.PostgreSQL.Simple
import Data.Time.Clock
import Data.Maybe (fromJust)
import qualified Data.ByteString.Char8 as C
import Data.String
connectInfo :: ConnectInfo
connectInfo = defaultConnectInfo { connectDatabase = "geochat"
, connectUser = "choi" }
dbconn :: IO Connection
dbconn = connect connectInfo
createClient :: Connection -> IO Client
createClient conn = do
let q = "insert into clients (nickname) values ('anon') returning client_id, nickname"
xs@(x:_) :: [(Int, Text)] <- query_ conn q
return (Client { clientId = (fst x)
, nickName = "anon"
, clientRoomId = Nothing
})
refreshClient :: Connection -> Client -> IO Client
refreshClient conn client = do
let q = "select nickname, lat, lng, room_id from clients where client_id = ?"
xs@((nickname, mlat, mlng, mrid):_) :: [(Text, Maybe Double, Maybe Double, Maybe Int)] <- query conn q (Only $ clientId client)
let latLng = case (mlat,mlng) of
(Just lat, Just lng) -> Just (lat, lng)
otherwise -> Nothing
return (client {nickName = nickname, clientRoomId = mrid})
--- this is too much implementation detail
findRoom :: Connection -> RoomId -> IO Room
findRoom conn rid = do
let q = "select rooms.lat, rooms.lng from rooms where room_id = ?"
xs@((lat, lng):_) :: [(Double, Double)] <- query conn q (Only rid)
let q2 = "select client_id, nickname from clients where room_id = ?"
clients :: [(Int, Text)] <- query conn q2 (Only rid)
return (Room { roomId = rid
, latLng = (lat, lng)
, numParticipants = (length clients)
, clients = clients
})
-- tuple client
tupClient :: Client -> Client'
tupClient c = ((clientId c), (nickName c))
refreshRoom :: Connection -> Room -> IO Room
refreshRoom conn room = findRoom conn (roomId room)
makeUpdatedRoom room = UpdatedRoom (latLng room) room
processMsg :: Connection -> Client -> MessageFromClient -> IO [MessageFromServer]
processMsg conn _ (ListActiveRooms (swlat,swlng) (nelat,nelng)) = do
-- make a polygon box from PostGIS; 5 points to make a box
let q = "select rooms.room_id from rooms \
\inner join clients using (room_id) \
\where ST_Intersects( \
\ ST_Transform(ST_MakePolygon(ST_GeomFromText('LINESTRING(' || ? || ' ' || ? || ',' || ? || ' ' || ? || ',' || ? || ' ' || ? || ',' || ? || ' ' || ? || ',' || ? || ' ' || ? || ')', 4269)), 2163), \
\ rooms.geom) \
\group by rooms.room_id order by rooms.created desc "
variables = (swlng,swlat,nelng,swlat,nelng,nelat,swlng,nelat,swlng,swlat)
-- formatQuery conn q variables >>= C.putStrLn -- uncomment to debug
xs :: [Only Int] <- query conn q variables -- go around the 4 corners and end at start
forM xs (\x -> do
room <- findRoom conn (fromOnly x)
return $ makeUpdatedRoom room InitRoom)
processMsg conn client (ChangeNickname newname) = do
let q = "update clients set nickname = ? where client_id = ?"
execute conn q (newname, clientId client)
client' <- refreshClient conn client
case (clientRoomId client') of
Nothing -> return []
Just (rid) -> do
r <- liftM makeUpdatedRoom $ findRoom conn rid
return [r $ ChangedNickname $ tupClient client]
-- TODO if close to existing live room, Join that room
processMsg conn client (CreateRoom (lat, lng)) = do
let q0 = "select rooms.room_id, rooms.lat, rooms.lng, \
\ST_Distance(ST_Transform(ST_GeomFromText('POINT(' || ? || ' ' || ? || ')', 4326), 2163 ), rooms.geom) dist \
\from rooms where \
\ST_Distance(ST_Transform(ST_GeomFromText('POINT(' || ? || ' ' || ? || ')', 4326), 2163 ), rooms.geom) < 700 \
\and room_id in (select room_id from rooms inner join clients using(room_id) group by room_id) \
\order by dist asc"
xs :: [(Int, Double, Double, Double)] <- query conn q0 (lng, lat, lng, lat)
case xs of
((rid,lat,lng,dist):_) -> do
-- putStrLn $ "Instead of creating, nearby room " ++ (show dist) ++ " meters close, rid " ++ (show rid)
processMsg conn client (JoinRoom rid)
otherwise -> do
-- putStrLn $ "Creating new room at " ++ (show lat) ++ ", " ++ (show lng)
let q = "insert into rooms (lat, lng) values (?, ?) returning room_id"
((Only rid):_) :: [Only Int] <- query conn q (lat, lng)
processMsg conn client (JoinRoom rid)
processMsg conn client (JoinRoom rid) = do
client' <- refreshClient conn client
let r = clientRoomId client'
case r of
Just oldRid -> do
-- client leaves a room and joins one
case (oldRid == rid) of
True -> return [] -- no-op
False -> do
execute conn "update clients set room_id = ? where client_id = ?" (rid, (clientId client))
rleft <- liftM makeUpdatedRoom $ findRoom conn oldRid
rjoined <- liftM makeUpdatedRoom $ findRoom conn rid
return [rleft $ ExitRoom (tupClient client'), rjoined $ EnterRoom (tupClient client')]
Nothing -> do
execute conn "update clients set room_id = ? where client_id = ?" (rid, (clientId client))
r <- liftM makeUpdatedRoom $ findRoom conn rid
return [r $ EnterRoom (tupClient client')]
processMsg conn client Leave = do
client' <- refreshClient conn client
let r = clientRoomId client'
case r of
Just x -> do
execute conn "update clients set room_id = null, exited = now() where client_id = ?" (Only $ clientId client')
r <- liftM makeUpdatedRoom $ findRoom conn x
return $ [r $ ExitRoom $ tupClient client']
Nothing -> do
execute conn "update clients set room_id = null, exited = now() where client_id = ?" (Only $ clientId client')
return $ []
processMsg conn client (PostMessage msg) = do
client' <- refreshClient conn client
let r = clientRoomId client'
case r of
Just rid -> do -- client in a room
r <- findRoom conn rid
let q = "insert into messages (room_id, client_id, client_nick, content) values (?, ?, ?, ?) returning message_id, created"
((mid,time):_) :: [(Int, UTCTime)] <- query conn q (rid, clientId client', nickName client', msg)
return [Broadcast (latLng r) (tupClient client') rid msg]
Nothing -> do
return []
-- catchall
processMsg conn client _ = return []
| danchoi/geochat | src/EventProcessor.hs | mit | 6,639 | 0 | 22 | 1,667 | 1,780 | 909 | 871 | 112 | 7 |
module Data.JSON where
import Data.Text as T
import Foreign.String
builtin builtin_c_json 1 "c_json" "Foreign"
-- Hmm... it doesn't look like we can have a JSON object, just JSON representation, because a JSON object would have to have existential type fields.
data JSON = Array [JSON] | Object [(String,JSON)] | Number Double | Bool Bool | String CPPString | Null
json_to_string (Array x) = "["++intercalate "," (map json_to_string x) ++ "]"
-- we aren't escaping strings here...
-- if we actually build a C++ json object we could print that
json_to_string (Object x) = "{"++ intercalate ", " ["\""++key++"\": "++json_to_string value | (key,value) <- x] ++ "}"
json_to_string (Number x) = show x
json_to_string (Bool True) = "true"
json_to_string (Bool False) = "false"
json_to_string (String s) = "\""++unpack_cpp_string s++"\""
json_to_string (Null) = "null"
is_non_empty_string (c:cs) | is_char c = True
is_non_empty_string _ = False
to_json s@(c:_) | is_char c = String (pack_cpp_string s)
to_json (Text s) = String s
to_json [] = Array []
to_json o@((key,value):kvs) | is_non_empty_string key = Object [(key,to_json value) | (key, value) <- o]
to_json l@(_:_) = Array [to_json x | x <- l]
to_json x | is_double x = Number x
| is_int x = Number x
to_json True = Bool True
to_json False = Bool False
to_json (x,y) = Array [to_json x, to_json y]
to_json (x,y,z) = Array [to_json x, to_json y, to_json z]
to_json (x,y,z,w) = Array [to_json x, to_json y, to_json z, to_json w]
to_json _ = Null
deep_eval_json (Array xs) = c_pair 0 (list_to_vector $ map deep_eval_json xs)
deep_eval_json (Object xs) = c_pair 1 (list_to_vector $ map (\(key,value) -> c_pair (pack_cpp_string key) (deep_eval_json value)) xs)
deep_eval_json (Number n) = c_pair 2 n
deep_eval_json (Bool b) = c_pair 3 b
deep_eval_json (String s) = c_pair 4 s
deep_eval_json Null = c_pair 5 0
c_json = builtin_c_json . deep_eval_json
| bredelings/BAli-Phy | haskell/Data/JSON.hs | gpl-2.0 | 2,060 | 0 | 13 | 469 | 800 | 405 | 395 | -1 | -1 |
import qualified Data.ByteString.Lazy.Char8 as BS
import System.IO
import Data.Int (Int64)
import Control.Monad
import Data.Maybe
import Control.Monad.IO.Class
import Control.Lens
import GHC.Float
data Direction = Direction {
directionStartTime :: Float,
directionNextTime :: Float,
directionPreviousTime :: Float,
directionIndex :: Integer,
directionCommand :: String,
directionValue :: String
} deriving (Show, Read, Eq, Ord)
directionFromTuple :: [Float] -> [String] -> Direction
directionFromTuple f s = Direction (f!!0) (f!!1) (f!!2) (round $ f!!3) (s!!0) (s!!1)
directionFromStr :: BS.ByteString -> Direction
directionFromStr s = directionFromTuple a b
where
a = strToTime $ fst $ splits
b = strToCommands $ snd $ splits
splits = splitAt 4 $ BS.split ' ' s
strToTime :: [BS.ByteString] -> [Float]
strToTime s = map (\x -> read (BS.unpack x) :: Float) $ s
strToCommands :: [BS.ByteString] -> [[Char]]
strToCommands s = map (\x -> BS.unpack x) $ s
splitAtCRLF :: BS.ByteString -> Maybe (BS.ByteString, BS.ByteString)
splitAtCRLF s = case findCRLF s of
Nothing -> Nothing
Just (i,l) -> Just (s1, BS.drop l s2)
where (s1,s2) = BS.splitAt i s
splitAtCRLF_ :: BS.ByteString -> (BS.ByteString, BS.ByteString)
splitAtCRLF_ s = fromMaybe (s, BS.empty) (splitAtCRLF s)
findCRLF :: BS.ByteString -> Maybe (Int64,Int64)
findCRLF s =
case findCRorLF s of
Nothing -> Nothing
Just j | BS.null (BS.drop (j+1) s) -> Just (j,1)
Just j -> case (BS.index s j, BS.index s (j+1)) of
('\n','\r') -> Just (j,2)
('\r','\n') -> Just (j,2)
_ -> Just (j,1)
findCRorLF :: BS.ByteString -> Maybe Int64
findCRorLF = BS.findIndex (\c -> c == '\n' || c == '\r')
streamToList :: (BS.ByteString, BS.ByteString) -> [BS.ByteString]
streamToList (x, xs) = if (xs == BS.empty) then [x] else x : (streamToList $ splitAtCRLF_ xs)
main = do
file <- BS.readFile "/Volumes/LAST_CH_1/CHAPTER1/SECTION1/SCRIPT"
putStrLn $ show $ map directionFromStr (a file)
putStrLn $ show $ length $ a file
where a file = streamToList (splitAtCRLF_ file) | newmana/last-chance-to-see | src/Script.hs | gpl-2.0 | 2,125 | 14 | 15 | 426 | 908 | 485 | 423 | 52 | 5 |
import Debug.Trace
main :: IO()
main = do
let sorted = bubbleSort [6, 5, 3, 1, 8, 7, 2, 4] :: [Integer]
print sorted
bubbleSort :: (Ord a, Show a) => [a] -> [a]
--bubbleSort lst | trace ("sorting: " ++ show lst) False = undefined
bubbleSort [] = []
bubbleSort [x] = [x]
bubbleSort (x:y:rest) =
bubbleSort (init bubbled) ++ [last bubbled]
where
(first, second) = if x > y then (y,x) else (x,y)
bubbled = first : bubbleSort (second:rest)
| BaReinhard/Hacktoberfest-Data-Structure-and-Algorithms | algorithms/bubble_sort/haskell/bubble_sort.hs | gpl-3.0 | 454 | 1 | 11 | 99 | 223 | 122 | 101 | 12 | 2 |
{-|
The Hledger.Data library allows parsing and querying of C++ ledger-style
journal files. It generally provides a compatible subset of C++ ledger's
functionality. This package re-exports all the Hledger.Data.* modules
(except UTF8, which requires an explicit import.)
-}
module Hledger.Data (
module Hledger.Data.Account,
module Hledger.Data.AccountName,
module Hledger.Data.Amount,
module Hledger.Data.Commodity,
module Hledger.Data.Dates,
module Hledger.Data.Journal,
module Hledger.Data.Ledger,
module Hledger.Data.Posting,
module Hledger.Data.RawOptions,
module Hledger.Data.TimeLog,
module Hledger.Data.Transaction,
module Hledger.Data.Types,
tests_Hledger_Data
)
where
import Test.HUnit
import Hledger.Data.Account
import Hledger.Data.AccountName
import Hledger.Data.Amount
import Hledger.Data.Commodity
import Hledger.Data.Dates
import Hledger.Data.Journal
import Hledger.Data.Ledger
import Hledger.Data.Posting
import Hledger.Data.RawOptions
import Hledger.Data.TimeLog
import Hledger.Data.Transaction
import Hledger.Data.Types
tests_Hledger_Data :: Test
tests_Hledger_Data = TestList
[
tests_Hledger_Data_Account
,tests_Hledger_Data_AccountName
,tests_Hledger_Data_Amount
,tests_Hledger_Data_Commodity
,tests_Hledger_Data_Dates
,tests_Hledger_Data_Journal
,tests_Hledger_Data_Ledger
,tests_Hledger_Data_Posting
,tests_Hledger_Data_TimeLog
,tests_Hledger_Data_Transaction
-- ,tests_Hledger_Data_Types
]
| kmels/hledger | hledger-lib/Hledger/Data.hs | gpl-3.0 | 1,682 | 0 | 6 | 383 | 217 | 147 | 70 | 40 | 1 |
module SyntaxTree(Bits,Pattern(Literal,Binding,Wildcard),Expr(LiteralBits,Concat,Bound,Call),Definition(Def)) where
type Bits = [Bool]
data Pattern = Literal Bits | Binding Bits String | Wildcard Bits
data Expr = LiteralBits Bits | Concat Expr Expr | Bound Int | Call String [Expr]
data Definition = Def [Pattern] Expr
| qpliu/esolang | 01_/hs/interp/SyntaxTree.hs | gpl-3.0 | 320 | 0 | 7 | 42 | 115 | 75 | 40 | 11 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.StorageGateway.UpdateSnapshotSchedule
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This operation updates a snapshot schedule configured for a gateway volume.
--
-- The default snapshot schedule for volume is once every 24 hours, starting at
-- the creation time of the volume. You can use this API to change the snapshot
-- schedule configured for the volume.
--
-- In the request you must identify the gateway volume whose snapshot schedule
-- you want to update, and the schedule information, including when you want the
-- snapshot to begin on a day and the frequency (in hours) of snapshots.
--
-- <http://docs.aws.amazon.com/storagegateway/latest/APIReference/API_UpdateSnapshotSchedule.html>
module Network.AWS.StorageGateway.UpdateSnapshotSchedule
(
-- * Request
UpdateSnapshotSchedule
-- ** Request constructor
, updateSnapshotSchedule
-- ** Request lenses
, ussDescription
, ussRecurrenceInHours
, ussStartAt
, ussVolumeARN
-- * Response
, UpdateSnapshotScheduleResponse
-- ** Response constructor
, updateSnapshotScheduleResponse
-- ** Response lenses
, ussrVolumeARN
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.StorageGateway.Types
import qualified GHC.Exts
data UpdateSnapshotSchedule = UpdateSnapshotSchedule
{ _ussDescription :: Maybe Text
, _ussRecurrenceInHours :: Nat
, _ussStartAt :: Nat
, _ussVolumeARN :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'UpdateSnapshotSchedule' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ussDescription' @::@ 'Maybe' 'Text'
--
-- * 'ussRecurrenceInHours' @::@ 'Natural'
--
-- * 'ussStartAt' @::@ 'Natural'
--
-- * 'ussVolumeARN' @::@ 'Text'
--
updateSnapshotSchedule :: Text -- ^ 'ussVolumeARN'
-> Natural -- ^ 'ussStartAt'
-> Natural -- ^ 'ussRecurrenceInHours'
-> UpdateSnapshotSchedule
updateSnapshotSchedule p1 p2 p3 = UpdateSnapshotSchedule
{ _ussVolumeARN = p1
, _ussStartAt = withIso _Nat (const id) p2
, _ussRecurrenceInHours = withIso _Nat (const id) p3
, _ussDescription = Nothing
}
-- | Optional description of the snapshot that overwrites the existing description.
ussDescription :: Lens' UpdateSnapshotSchedule (Maybe Text)
ussDescription = lens _ussDescription (\s a -> s { _ussDescription = a })
-- | Frequency of snapshots. Specify the number of hours between snapshots.
ussRecurrenceInHours :: Lens' UpdateSnapshotSchedule Natural
ussRecurrenceInHours =
lens _ussRecurrenceInHours (\s a -> s { _ussRecurrenceInHours = a })
. _Nat
-- | The hour of the day at which the snapshot schedule begins represented as /hh/,
-- where /hh/ is the hour (0 to 23). The hour of the day is in the time zone of
-- the gateway.
ussStartAt :: Lens' UpdateSnapshotSchedule Natural
ussStartAt = lens _ussStartAt (\s a -> s { _ussStartAt = a }) . _Nat
-- | The Amazon Resource Name (ARN) of the volume. Use the 'ListVolumes' operation
-- to return a list of gateway volumes.
ussVolumeARN :: Lens' UpdateSnapshotSchedule Text
ussVolumeARN = lens _ussVolumeARN (\s a -> s { _ussVolumeARN = a })
newtype UpdateSnapshotScheduleResponse = UpdateSnapshotScheduleResponse
{ _ussrVolumeARN :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'UpdateSnapshotScheduleResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ussrVolumeARN' @::@ 'Maybe' 'Text'
--
updateSnapshotScheduleResponse :: UpdateSnapshotScheduleResponse
updateSnapshotScheduleResponse = UpdateSnapshotScheduleResponse
{ _ussrVolumeARN = Nothing
}
ussrVolumeARN :: Lens' UpdateSnapshotScheduleResponse (Maybe Text)
ussrVolumeARN = lens _ussrVolumeARN (\s a -> s { _ussrVolumeARN = a })
instance ToPath UpdateSnapshotSchedule where
toPath = const "/"
instance ToQuery UpdateSnapshotSchedule where
toQuery = const mempty
instance ToHeaders UpdateSnapshotSchedule
instance ToJSON UpdateSnapshotSchedule where
toJSON UpdateSnapshotSchedule{..} = object
[ "VolumeARN" .= _ussVolumeARN
, "StartAt" .= _ussStartAt
, "RecurrenceInHours" .= _ussRecurrenceInHours
, "Description" .= _ussDescription
]
instance AWSRequest UpdateSnapshotSchedule where
type Sv UpdateSnapshotSchedule = StorageGateway
type Rs UpdateSnapshotSchedule = UpdateSnapshotScheduleResponse
request = post "UpdateSnapshotSchedule"
response = jsonResponse
instance FromJSON UpdateSnapshotScheduleResponse where
parseJSON = withObject "UpdateSnapshotScheduleResponse" $ \o -> UpdateSnapshotScheduleResponse
<$> o .:? "VolumeARN"
| dysinger/amazonka | amazonka-storagegateway/gen/Network/AWS/StorageGateway/UpdateSnapshotSchedule.hs | mpl-2.0 | 5,772 | 0 | 10 | 1,237 | 696 | 418 | 278 | 77 | 1 |
func x = x
| lspitzner/brittany | data/Test74.hs | agpl-3.0 | 11 | 0 | 5 | 4 | 9 | 4 | 5 | 1 | 1 |
-- |
-- Module : Network.HTTP.Extras
-- Copyright : (c) Alexandru Scvortov 2008
-- License : LGPL (see LICENSE file)
-- Maintainer : [email protected]
--
module Network.HTTP.Extras
( httpGET
) where
import Network.URI (parseURI)
import Network.HTTP (simpleHTTP, Request(..), RequestMethod(..), Response(..))
import Text.Printf (printf)
-- | perform a GET on the query and return the response string
httpGET :: String -> IO String
httpGET uri = do
case parseURI uri of
Nothing -> error $ printf "httpGET: uri malformed: ``%s''" uri
Just u -> do
resp <- simpleHTTP (Request u GET [] "")
case resp of
Left err -> error $ printf "httpGET: failed query: ``%s''" uri
Right (Response _ _ _ body) -> return body
| scvalex/ltorrent | Network/HTTP/Extras.hs | lgpl-3.0 | 811 | 0 | 17 | 222 | 197 | 106 | 91 | 14 | 3 |
module Main where
type Cent = Int
type Muenze = Cent
-- Ziel dieser Übung ist es einen "Geldwechsler" zu implementieren
-- Auf gegebenen Betrag (in Cent) soll eine minimale Liste mit
-- Münzwerten ausgegeben werden, deren Summe genau den gegebenen
-- Betrag entspricht
--
-- Beispiel:
beispiel :: [Muenze]
beispiel = wechsle euroMuenzen 153 -- sollte [100,50,2,1] ergeben!
--
-- Wir benutzen hier den einfachen Fall mit den uns bekannten Münzen:
euroMuenzen :: [Muenze]
euroMuenzen = [200, 100, 50, 25, 10, 5, 2, 1]
-- der Trick hier ist, die Münzen (die schon sortiert sein sollten)
-- durchzugehen und immer zu schauen, ob der Wert der Münze noch in
-- den Restbetrag "hineinpasst" - abhängig davon wird dann rekursiv
-- nach einem eventuell geänderten Restbetrag mit eventuell geänderten
-- Münzen gesucht:
wechsle :: [Muenze] -> Cent -> [Muenze]
-- ist der Restbetrag 0 sind wir fertig
wechsle _ 0 = []
-- sonst gibt es hoffentlich noch Münzen
wechsle (m:ms) b
-- die Münze passt noch in den Restbetrag b
-- d.h. die Ausgabe muss die Münze enthalten (`m :`)
-- und ein neuer Betrag (b-m) ist zu wechseln
-- dabei kann ja die Münze m nochmal verwendet
-- werden (z.B. 2Eur in 4Eur Restbetrag)
| m <= b = m : wechsle (m:ms) (b-m)
-- die Münze passt nicht mehr
-- dann muss der gleiche Restbetrag aber mit weniger
-- Münzen gewechselt werden
| otherwise = wechsle ms b
-- gibt es keine Münzen mehr aber noch einen Restbetrag, haben wir ein Problem
wechsle [] b = error ("keine Münzen mehr übrig um " ++ show b ++ " zu wechseln")
-- damit sollte check "OK" ergeben (probiere es in GHCi!)
check :: String
check = if beispiel == [100,50,2,1] then "OK" else "Sorry"
main :: IO ()
main = do
putStr "welcher Betrag soll gewechselt werden? "
betrag <- readLn
print $ wechsle euroMuenzen betrag
| CarstenKoenig/DOS2015 | CoinChange/CoinChange.hs | unlicense | 1,843 | 0 | 9 | 355 | 295 | 172 | 123 | 20 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Oauth where
import qualified Data.ByteString as BS
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Control.Exception.Lifted
import System.FilePath.Posix
import System.Directory
import Text.Read
import Network.HTTP.Conduit
import Network.HTTP.Types.Status
import Control.Monad.Trans.Resource
import Control.Monad.Except
import Network.OAuth.OAuth2
import Error
import qualified Http as HTTP
import DigipostKey
type URL = BS.ByteString
newtype AuthCode = AuthCode String deriving (Eq, Show)
newtype State = State String
loginUrl :: State -> URL
loginUrl (State state) = authorizationUrl digigpostKey `appendQueryParam` [("state", sToBS state)]
accessToken :: State -> AuthCode -> Manager -> ResourceT IO HTTP.AccessToken
accessToken (State state) (AuthCode code) manager = do
let (url, body) = accessTokenUrl' digigpostKey (sToBS code) (Just "code")
token <- liftIO $ doJSONPostRequest manager digigpostKey url (body ++ [("state", sToBS state)])
case token of
Right (AccessToken at (Just rt) _ _ _) -> return $ HTTP.AccessToken at rt
Right _ -> throwIO NotAuthenticated
Left _ -> throwIO NotAuthenticated
refreshAccessToken :: Manager -> HTTP.AccessToken -> IO HTTP.AccessToken
refreshAccessToken manager oldToken = do
putStrLn $ "trying to refresh token " ++ show oldToken
let oldRt = HTTP.refreshToken oldToken
newToken <- fetchRefreshToken manager digigpostKey oldRt
case newToken of
Right (AccessToken at _ _ _ _) -> return $ HTTP.AccessToken at oldRt
Left _ -> throwIO NotAuthenticated
storeAccessToken :: HTTP.AccessToken -> IO ()
storeAccessToken at = do
userHome <- getHomeDirectory
writeFile (accessTokenFile userHome) $ show at
loadAccessToken :: IO HTTP.AccessToken
loadAccessToken = catch readFileIfExists whenNotFound
where
readFileIfExists = do
userHome <- getHomeDirectory
content <- readFile (accessTokenFile userHome)
case readMaybe content of
Just at -> return at
Nothing -> throwIO NotAuthenticated
whenNotFound :: IOException -> IO HTTP.AccessToken
whenNotFound _ = throwIO NotAuthenticated
handleTokenRefresh :: (Manager -> HTTP.AccessToken -> ResourceT IO a) -> HTTP.AccessToken -> Manager -> ResourceT IO a
handleTokenRefresh accessFunc token manager = catch (accessFunc manager token) handleException
where
handleException e@(HttpExceptionRequest _ (StatusCodeException response _)) =
let
status = responseStatus response
in
if status == status403 then
do
newToken <- liftIO $ refreshAccessToken manager token --TODO: exceptions?
liftIO $ storeAccessToken newToken
accessFunc manager newToken --TODO: retry count??
else if status == status401 then throw NotAuthenticated
else throw $ HttpFailed e
handleException e = throw $ HttpFailed e
removeAccessToken :: IO ()
removeAccessToken = getHomeDirectory >>= removeFile . accessTokenFile
accessTokenFile :: FilePath -> FilePath
accessTokenFile userHome = combine userHome ".digipostarkiv"
sToBS :: String -> BS.ByteString
sToBS = T.encodeUtf8 . T.pack
| froden/digipostarkiv | src/Oauth.hs | apache-2.0 | 3,466 | 0 | 14 | 828 | 914 | 467 | 447 | 72 | 4 |
-- | Defines a type class for clausal forms.
module Akarui.FOL.LiteralSign where
import Akarui.ShowTxt
import Akarui.FOL.Symbols
import Akarui.FOL.PrettyPrint
data LiteralSign = Positive | Negative
deriving (Eq, Ord, Show, Read)
instance ShowTxt LiteralSign where
showTxt Positive = "Positive"
showTxt Negative = "Negative"
instance PrettyPrint LiteralSign where
prettyPrint _ Positive = ""
prettyPrint s Negative = symNot s
| PhDP/Manticore | Akarui/FOL/LiteralSign.hs | apache-2.0 | 439 | 0 | 6 | 70 | 109 | 60 | 49 | 12 | 0 |
module Handler.Settings where
import Import
postToggleFeatureR :: Feature -> Handler RepHtml
postToggleFeatureR feature = do
user <- requireAuth
runDB $ toggleUserFeature feature user
redirect TasksR
| samstokes/yesodoro-reboot | Handler/Settings.hs | bsd-2-clause | 208 | 0 | 8 | 33 | 54 | 26 | 28 | 7 | 1 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Admin.ProductVariants where
import Admin.Feedback
import Application
import Control.Applicative
import FormUtil
import Snap.Core
import Snap.Snaplet
import Text.Printf
import qualified Data.ByteString.Char8 as C8
import qualified Data.Map as M
import qualified Data.Text.Encoding as DTE
import qualified ShopData.Variant as V
import qualified ShopData.VariantOption as VO
import Snap.Snaplet.Heist.Compiled
handleNewVariant :: Handler App App ()
handleNewVariant = do
vn <- getFormByteString "variant-name" ""
let v = V.Variant {V.variantId = 0,
V.name = DTE.decodeUtf8 vn,
V.adjustsPrice = False,
V.isSearchable = False,
V.options = []}
with db $ V.saveVariant v
infoRedirect "/admin/product_options" "New Variant Group Added"
handleNewVariantOption :: Handler App App ()
handleNewVariantOption = do
nid <- getFormInt "new-variant-option" 0
nvo <- getFormByteString (C8.pack (printf "variant-option-%d" nid)) ""
if nvo == ""
then danger "Please enter a variant option value."
else do with db $ VO.addVariantOption nid (DTE.decodeUtf8 nvo)
info "New Variant Option Added"
redirect "/admin/product_options"
handleDelVariantOption :: Handler App App ()
handleDelVariantOption = do
voId <- getFormInt "del-variant-option-id" 0
with db $ VO.delVariantOption voId
infoRedirect "/admin/product_options" "Variant Option Deleted"
handleDelVariant :: Handler App App ()
handleDelVariant = do
vid <- getFormInt "del-variant-id" 0
with db $ V.delVariant vid
infoRedirect "/admin/product_options" "Variant Group Deleted"
handleProductVariants :: Handler App App ()
handleProductVariants =
method GET handleProductOptGet <|>
method POST handleProductOptPost
where
handleProductOptGet = render "admin/_variants"
handler p
| "del-variant-id" `M.member` p = handleDelVariant
| "del-variant-option-id" `M.member` p = handleDelVariantOption
| "new-variant" `M.member` p = handleNewVariant
| otherwise = handleNewVariantOption
handleProductOptPost = do
params <- getParams
handler params
| rjohnsondev/haskellshop | src/Admin/ProductVariants.hs | bsd-2-clause | 2,465 | 0 | 14 | 677 | 549 | 281 | 268 | 57 | 2 |
{-| Balancing task of the maintenance daemon.
This module carries out the automated balancing done by the
maintenance daemon. The actual balancing algorithm is imported
from htools.
-}
{-
Copyright (C) 2015 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.MaintD.Balance
( balanceTask
) where
import Control.Exception.Lifted (bracket)
import Control.Monad (liftM, unless, when)
import Control.Monad.IO.Class (liftIO)
import Data.IORef (IORef)
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.Maybe (mapMaybe, isJust)
import qualified Data.Traversable as Traversable
import System.IO.Error (tryIOError)
import Text.Printf (printf)
import Ganeti.BasicTypes ( ResultT, mkResultT, mkResultT'
, GenericResult(..), Result)
import Ganeti.Cpu.Types (emptyCPUavgload, CPUavgload(..))
import Ganeti.HTools.AlgorithmParams (AlgorithmOptions(..), defaultOptions)
import qualified Ganeti.HTools.Backend.MonD as MonD
import qualified Ganeti.HTools.Cluster as Cluster
import qualified Ganeti.HTools.Cluster.Metrics as Metrics
import qualified Ganeti.HTools.Cluster.Utils as ClusterUtils
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Node as Node
import Ganeti.JQueue (currentTimestamp)
import Ganeti.JQueue.Objects (Timestamp)
import Ganeti.Jobs (submitJobs)
import Ganeti.HTools.Types ( zeroUtil, DynUtil(cpuWeight), addUtil, subUtil
, MoveJob)
import Ganeti.Logging.Lifted (logDebug)
import Ganeti.MaintD.MemoryState ( MemoryState, getEvacuated
, addEvacuated, rmEvacuated)
import Ganeti.MaintD.Utils (annotateOpCode)
import qualified Ganeti.Luxi as L
import Ganeti.OpCodes (MetaOpCode)
import qualified Ganeti.Path as Path
import qualified Ganeti.Query.Language as Qlang
import Ganeti.Types (JobId)
import Ganeti.Utils (logAndBad)
-- * Collection of dynamic load data
data AllReports = AllReports { rTotal :: MonD.Report
, rIndividual :: MonD.Report
}
-- | Empty report. It describes an idle node and can be used as
-- default value for nodes marked as offline.
emptyReports :: AllReports
emptyReports = AllReports (MonD.CPUavgloadReport emptyCPUavgload)
(MonD.InstanceCpuReport Map.empty)
-- | Query a node unless it is offline and return all
-- CPU reports. For offline nodes return the empty report.
queryNode :: Node.Node -> ResultT String IO AllReports
queryNode node = do
let getReport dc = mkResultT
. liftM (maybe (Bad $ "Failed collecting "
++ MonD.dName dc
++ " from " ++ Node.name node) Ok
. MonD.mkReport dc)
$ MonD.fromCurl dc node
if Node.offline node
then return emptyReports
else do
total <- getReport MonD.totalCPUCollector
xeninstances <- getReport MonD.xenCPUCollector
return $ AllReports total xeninstances
-- | Get a map with the CPU live data for all nodes; for offline nodes
-- the empty report is guessed.
queryLoad :: Node.List -> ResultT String IO (Container.Container AllReports)
queryLoad = Traversable.mapM queryNode
-- | Ask luxid about the hypervisors used. As, at the moment, we only
-- have specialised CPU collectors for xen, we're only interested which
-- instances run under the Xen hypervisor.
getXenInstances :: ResultT String IO (Set.Set String)
getXenInstances = do
let query = L.Query (Qlang.ItemTypeOpCode Qlang.QRInstance)
["name", "hypervisor"] Qlang.EmptyFilter
luxiSocket <- liftIO Path.defaultQuerySocket
raw <- bracket (mkResultT . liftM (either (Bad . show) Ok)
. tryIOError $ L.getLuxiClient luxiSocket)
(liftIO . L.closeClient)
$ mkResultT' . L.callMethod query
answer <- L.extractArray raw >>= mapM (mapM L.fromJValWithStatus)
let getXen [name, hv] | hv `elem` ["xen-pvm", "xen-hvm"] = [name]
getXen _ = []
return $ Set.fromList (answer >>= getXen)
-- | Look for an instance in a given report.
findInstanceLoad :: String -> AllReports -> Maybe Double
findInstanceLoad name r | MonD.InstanceCpuReport m <- rIndividual r =
Map.lookup name m
findInstanceLoad _ _ = Nothing
-- | Update the CPU load of one instance based on the reports.
-- Fail if instance CPU load is not (yet) available. However, do
-- accpet missing load data for instances on offline nodes, as well
-- as old load data for recently migrated instances.
updateCPUInstance :: Node.List
-> Container.Container AllReports
-> Set.Set String
-> [String]
-> Instance.Instance
-> Result Instance.Instance
updateCPUInstance nl reports xeninsts evacuated inst =
let name = Instance.name inst
nidx = Instance.pNode inst
in if name `Set.member` xeninsts
then let onNodeLoad = findInstanceLoad name (Container.find nidx reports)
allLoads = mapMaybe (findInstanceLoad name)
$ Container.elems reports
in case () of
_ | Just load <- onNodeLoad ->
return $ inst { Instance.util = zeroUtil { cpuWeight = load } }
_ | (load:_) <- allLoads ->
return $ inst { Instance.util = zeroUtil { cpuWeight = load } }
_ | Node.offline $ Container.find nidx nl ->
return $ inst { Instance.util = zeroUtil }
_ | Instance.name inst `elem` evacuated ->
return $ inst { Instance.util = zeroUtil }
_ -> fail $ "Xen CPU data unavailable for " ++ name
else let rep = rTotal $ Container.find nidx reports
in case rep of MonD.CPUavgloadReport (CPUavgload _ _ ndload) ->
let w = ndload * fromIntegral (Instance.vcpus inst)
/ (fromIntegral . Node.uCpu
$ Container.find nidx nl)
in return $ inst { Instance.util =
zeroUtil { cpuWeight = w }}
_ -> fail $ "CPU data unavailable for node of " ++ name
-- | Update CPU usage data based on the collected reports. That is, get the
-- CPU usage of all instances from the reports and also update the nodes
-- accordingly.
updateCPULoad :: (Node.List, Instance.List)
-> Container.Container AllReports
-> Set.Set String
-> [ String ]
-> Result (Node.List, Instance.List)
updateCPULoad (nl, il) reports xeninsts evacuated = do
il' <- Traversable.mapM (updateCPUInstance nl reports xeninsts evacuated) il
let addNodeUtil n delta = n { Node.utilLoad = addUtil (Node.utilLoad n) delta
, Node.utilLoadForth =
addUtil (Node.utilLoadForth n) delta
}
let updateNodeUtil nnl inst_old inst_new =
let delta = subUtil (Instance.util inst_new) $ Instance.util inst_old
nidx = Instance.pNode inst_old
n = Container.find nidx nnl
n' = addNodeUtil n delta
in Container.add nidx n' nnl
let nl' = foldl (\nnl i -> updateNodeUtil nnl (Container.find i il)
$ Container.find i il') nl $ Container.keys il
return (nl', il')
-- | For an instance, given by name, verify if an individual load report is
-- available again.
cleanUpEvacuation :: IORef MemoryState
-> Instance.List
-> Container.Container AllReports
-> String
-> IO ()
cleanUpEvacuation memstate il reports name = do
let insts = filter ((==) name . Instance.name) $ Container.elems il
case insts of
[] -> do
logDebug $ "Instnace " ++ name ++ "no longer on the cluster"
rmEvacuated memstate name
inst:_ -> do
let nidx = Instance.pNode inst
when (isJust . findInstanceLoad name
$ Container.find nidx reports) $ do
logDebug $ "Load data for " ++ name ++ " available again"
rmEvacuated memstate name
-- * Balancing
-- | Transform an instance move into a submittable job.
moveToJob :: Timestamp -> (Node.List, Instance.List) -> MoveJob -> [MetaOpCode]
moveToJob now (nl, il) (_, idx, move, _) =
let opCodes = Cluster.iMoveToJob nl il idx move
in map (annotateOpCode "auto-balancing the cluster" now) opCodes
-- | Iteratively improve a cluster by iterating over tryBalance.
iterateBalance :: AlgorithmOptions
-> Cluster.Table -- ^ the starting table
-> [MoveJob] -- ^ current command list
-> [MoveJob] -- ^ resulting commands
iterateBalance opts ini_tbl cmds =
let Cluster.Table ini_nl ini_il _ _ = ini_tbl
m_next_tbl = Cluster.tryBalance opts ini_tbl
in case m_next_tbl of
Just next_tbl@(Cluster.Table _ _ _ plc@(curplc:_)) ->
let (idx, _, _, move, _) = curplc
plc_len = length plc
(_, cs) = Cluster.printSolutionLine ini_nl ini_il 1 1 curplc plc_len
afn = Cluster.involvedNodes ini_il curplc
cmds' = (afn, idx, move, cs):cmds
in iterateBalance opts next_tbl cmds'
_ -> cmds
-- | List instances evacuated in a move job, if any.
evacuatedInsts :: (Node.List, Instance.List)
-> MoveJob
-> [String]
evacuatedInsts (nl, il) (_, idx, _, _) =
let inst = Container.find idx il
node = Container.find (Instance.pNode inst) nl
in [Instance.name inst | Node.offline node]
-- | Balance a single group, restricted to the allowed nodes and
-- minimal gain.
balanceGroup :: IORef MemoryState
-> Set.Set String
-> L.Client
-> Set.Set Int
-> Double
-> (Int, (Node.List, Instance.List))
-> ResultT String IO [JobId]
balanceGroup memstate xens client allowedNodes threshold (gidx, (nl, il)) = do
logDebug $ printf "Balancing group %d, %d nodes, %d instances." gidx
(Container.size nl) (Container.size il)
let ini_cv = Metrics.compCV nl
ini_tbl = Cluster.Table nl il ini_cv []
opts = defaultOptions { algAllowedNodes = Just allowedNodes
, algMinGain = threshold
, algMinGainLimit = 10 * threshold
}
cmds = iterateBalance opts ini_tbl []
tasks = take 1 $ Cluster.splitJobs cmds
logDebug $ "First task group: " ++ show tasks
now <- liftIO currentTimestamp
let jobs = tasks >>= map (moveToJob now (nl, il))
evacs = filter (`Set.member` xens)
(concat tasks >>= evacuatedInsts (nl, il))
if null jobs
then return []
else do
unless (null evacs) $ do
logDebug $ "Evacuation of instances " ++ show evacs
liftIO $ addEvacuated memstate evacs
jids <- liftIO $ submitJobs jobs client
case jids of
Bad e -> mkResultT . logAndBad
$ "Failure submitting balancing jobs: " ++ e
Ok jids' -> return jids'
-- * Interface function
-- | Carry out all the needed balancing, based on live CPU data, only touching
-- the available nodes. Only carry out balancing steps where the gain is above
-- the threshold.
balanceTask :: IORef MemoryState
-> (Node.List, Instance.List) -- ^ current cluster configuration
-> Set.Set Int -- ^ node indices on which actions may be taken
-> Double -- ^ threshold for improvement
-> ResultT String IO [JobId] -- ^ jobs submitted
balanceTask memstate (nl, il) okNodes threshold = do
logDebug "Collecting dynamic load values"
evacuated <- getEvacuated memstate
logDebug $ "Not expecting load data from: " ++ show evacuated
reports <- queryLoad nl
xenInstances <- getXenInstances
(nl', il') <- mkResultT . return
$ updateCPULoad (nl, il) reports xenInstances evacuated
liftIO $ mapM_ (cleanUpEvacuation memstate il reports) evacuated
let ngroups = ClusterUtils.splitCluster nl' il'
luxiSocket <- liftIO Path.defaultQuerySocket
bracket (liftIO $ L.getLuxiClient luxiSocket) (liftIO . L.closeClient) $ \c ->
liftM concat $ mapM (balanceGroup memstate xenInstances c okNodes threshold)
ngroups
| bitemyapp/ganeti | src/Ganeti/MaintD/Balance.hs | bsd-2-clause | 13,766 | 0 | 22 | 3,750 | 3,108 | 1,630 | 1,478 | 222 | 7 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QGraphicsSceneWheelEvent.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:24
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QGraphicsSceneWheelEvent (
setDelta
,qGraphicsSceneWheelEvent_delete
)
where
import Foreign.C.Types
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Core.Qt
import Qtc.Enums.Core.QEvent
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
instance Qbuttons (QGraphicsSceneWheelEvent a) (()) (IO (MouseButtons)) where
buttons x0 ()
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_buttons cobj_x0
foreign import ccall "qtc_QGraphicsSceneWheelEvent_buttons" qtc_QGraphicsSceneWheelEvent_buttons :: Ptr (TQGraphicsSceneWheelEvent a) -> IO CLong
instance Qdelta (QGraphicsSceneWheelEvent a) (()) where
delta x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_delta cobj_x0
foreign import ccall "qtc_QGraphicsSceneWheelEvent_delta" qtc_QGraphicsSceneWheelEvent_delta :: Ptr (TQGraphicsSceneWheelEvent a) -> IO CInt
instance Qmodifiers (QGraphicsSceneWheelEvent a) (()) where
modifiers x0 ()
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_modifiers cobj_x0
foreign import ccall "qtc_QGraphicsSceneWheelEvent_modifiers" qtc_QGraphicsSceneWheelEvent_modifiers :: Ptr (TQGraphicsSceneWheelEvent a) -> IO CLong
instance Qorientation (QGraphicsSceneWheelEvent a) (()) (IO (QtOrientation)) where
orientation x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_orientation cobj_x0
foreign import ccall "qtc_QGraphicsSceneWheelEvent_orientation" qtc_QGraphicsSceneWheelEvent_orientation :: Ptr (TQGraphicsSceneWheelEvent a) -> IO CLong
instance Qpos (QGraphicsSceneWheelEvent a) (()) (IO (PointF)) where
pos x0 ()
= withPointFResult $ \cpointf_ret_x cpointf_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_pos_qth cobj_x0 cpointf_ret_x cpointf_ret_y
foreign import ccall "qtc_QGraphicsSceneWheelEvent_pos_qth" qtc_QGraphicsSceneWheelEvent_pos_qth :: Ptr (TQGraphicsSceneWheelEvent a) -> Ptr CDouble -> Ptr CDouble -> IO ()
instance Qqpos (QGraphicsSceneWheelEvent a) (()) (IO (QPointF ())) where
qpos x0 ()
= withQPointFResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_pos cobj_x0
foreign import ccall "qtc_QGraphicsSceneWheelEvent_pos" qtc_QGraphicsSceneWheelEvent_pos :: Ptr (TQGraphicsSceneWheelEvent a) -> IO (Ptr (TQPointF ()))
instance QscenePos (QGraphicsSceneWheelEvent a) (()) where
scenePos x0 ()
= withPointFResult $ \cpointf_ret_x cpointf_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_scenePos_qth cobj_x0 cpointf_ret_x cpointf_ret_y
foreign import ccall "qtc_QGraphicsSceneWheelEvent_scenePos_qth" qtc_QGraphicsSceneWheelEvent_scenePos_qth :: Ptr (TQGraphicsSceneWheelEvent a) -> Ptr CDouble -> Ptr CDouble -> IO ()
instance QqscenePos (QGraphicsSceneWheelEvent a) (()) where
qscenePos x0 ()
= withQPointFResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_scenePos cobj_x0
foreign import ccall "qtc_QGraphicsSceneWheelEvent_scenePos" qtc_QGraphicsSceneWheelEvent_scenePos :: Ptr (TQGraphicsSceneWheelEvent a) -> IO (Ptr (TQPointF ()))
instance QscreenPos (QGraphicsSceneWheelEvent a) (()) where
screenPos x0 ()
= withPointResult $ \cpoint_ret_x cpoint_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_screenPos_qth cobj_x0 cpoint_ret_x cpoint_ret_y
foreign import ccall "qtc_QGraphicsSceneWheelEvent_screenPos_qth" qtc_QGraphicsSceneWheelEvent_screenPos_qth :: Ptr (TQGraphicsSceneWheelEvent a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QqscreenPos (QGraphicsSceneWheelEvent a) (()) where
qscreenPos x0 ()
= withQPointResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_screenPos cobj_x0
foreign import ccall "qtc_QGraphicsSceneWheelEvent_screenPos" qtc_QGraphicsSceneWheelEvent_screenPos :: Ptr (TQGraphicsSceneWheelEvent a) -> IO (Ptr (TQPoint ()))
instance QsetButtons (QGraphicsSceneWheelEvent a) ((MouseButtons)) where
setButtons x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_setButtons cobj_x0 (toCLong $ qFlags_toInt x1)
foreign import ccall "qtc_QGraphicsSceneWheelEvent_setButtons" qtc_QGraphicsSceneWheelEvent_setButtons :: Ptr (TQGraphicsSceneWheelEvent a) -> CLong -> IO ()
setDelta :: QGraphicsSceneWheelEvent a -> ((Int)) -> IO ()
setDelta x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_setDelta cobj_x0 (toCInt x1)
foreign import ccall "qtc_QGraphicsSceneWheelEvent_setDelta" qtc_QGraphicsSceneWheelEvent_setDelta :: Ptr (TQGraphicsSceneWheelEvent a) -> CInt -> IO ()
instance QsetModifiers (QGraphicsSceneWheelEvent a) ((KeyboardModifiers)) where
setModifiers x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_setModifiers cobj_x0 (toCLong $ qFlags_toInt x1)
foreign import ccall "qtc_QGraphicsSceneWheelEvent_setModifiers" qtc_QGraphicsSceneWheelEvent_setModifiers :: Ptr (TQGraphicsSceneWheelEvent a) -> CLong -> IO ()
instance QsetOrientation (QGraphicsSceneWheelEvent a) ((QtOrientation)) where
setOrientation x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_setOrientation cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QGraphicsSceneWheelEvent_setOrientation" qtc_QGraphicsSceneWheelEvent_setOrientation :: Ptr (TQGraphicsSceneWheelEvent a) -> CLong -> IO ()
instance QsetPos (QGraphicsSceneWheelEvent a) ((PointF)) where
setPos x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCPointF x1 $ \cpointf_x1_x cpointf_x1_y ->
qtc_QGraphicsSceneWheelEvent_setPos_qth cobj_x0 cpointf_x1_x cpointf_x1_y
foreign import ccall "qtc_QGraphicsSceneWheelEvent_setPos_qth" qtc_QGraphicsSceneWheelEvent_setPos_qth :: Ptr (TQGraphicsSceneWheelEvent a) -> CDouble -> CDouble -> IO ()
instance QqsetPos (QGraphicsSceneWheelEvent a) ((QPointF t1)) where
qsetPos x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsSceneWheelEvent_setPos cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsSceneWheelEvent_setPos" qtc_QGraphicsSceneWheelEvent_setPos :: Ptr (TQGraphicsSceneWheelEvent a) -> Ptr (TQPointF t1) -> IO ()
instance QsetScenePos (QGraphicsSceneWheelEvent a) ((PointF)) where
setScenePos x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCPointF x1 $ \cpointf_x1_x cpointf_x1_y ->
qtc_QGraphicsSceneWheelEvent_setScenePos_qth cobj_x0 cpointf_x1_x cpointf_x1_y
foreign import ccall "qtc_QGraphicsSceneWheelEvent_setScenePos_qth" qtc_QGraphicsSceneWheelEvent_setScenePos_qth :: Ptr (TQGraphicsSceneWheelEvent a) -> CDouble -> CDouble -> IO ()
instance QqsetScenePos (QGraphicsSceneWheelEvent a) ((QPointF t1)) where
qsetScenePos x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsSceneWheelEvent_setScenePos cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsSceneWheelEvent_setScenePos" qtc_QGraphicsSceneWheelEvent_setScenePos :: Ptr (TQGraphicsSceneWheelEvent a) -> Ptr (TQPointF t1) -> IO ()
instance QsetScreenPos (QGraphicsSceneWheelEvent a) ((Point)) where
setScreenPos x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QGraphicsSceneWheelEvent_setScreenPos_qth cobj_x0 cpoint_x1_x cpoint_x1_y
foreign import ccall "qtc_QGraphicsSceneWheelEvent_setScreenPos_qth" qtc_QGraphicsSceneWheelEvent_setScreenPos_qth :: Ptr (TQGraphicsSceneWheelEvent a) -> CInt -> CInt -> IO ()
instance QqsetScreenPos (QGraphicsSceneWheelEvent a) ((QPoint t1)) where
qsetScreenPos x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsSceneWheelEvent_setScreenPos cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsSceneWheelEvent_setScreenPos" qtc_QGraphicsSceneWheelEvent_setScreenPos :: Ptr (TQGraphicsSceneWheelEvent a) -> Ptr (TQPoint t1) -> IO ()
qGraphicsSceneWheelEvent_delete :: QGraphicsSceneWheelEvent a -> IO ()
qGraphicsSceneWheelEvent_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsSceneWheelEvent_delete cobj_x0
foreign import ccall "qtc_QGraphicsSceneWheelEvent_delete" qtc_QGraphicsSceneWheelEvent_delete :: Ptr (TQGraphicsSceneWheelEvent a) -> IO ()
| keera-studios/hsQt | Qtc/Gui/QGraphicsSceneWheelEvent.hs | bsd-2-clause | 8,798 | 0 | 12 | 1,144 | 2,168 | 1,107 | 1,061 | -1 | -1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QSettings.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:36
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Core.QSettings (
QSettingsFormat, eIniFormat
,Scope, eUserScope, eSystemScope
,Status, eAccessError
)
where
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CQSettingsFormat a = CQSettingsFormat a
type QSettingsFormat = QEnum(CQSettingsFormat Int)
ieQSettingsFormat:: Int -> QSettingsFormat
ieQSettingsFormat x = QEnum (CQSettingsFormat x)
instance QEnumC (CQSettingsFormat Int) where
qEnum_toInt (QEnum (CQSettingsFormat x)) = x
qEnum_fromInt x = QEnum (CQSettingsFormat x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> QSettingsFormat -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
data CScope a = CScope a
type Scope = QEnum(CScope Int)
ieScope:: Int -> Scope
ieScope x = QEnum (CScope x)
instance QEnumC (CScope Int) where
qEnum_toInt (QEnum (CScope x)) = x
qEnum_fromInt x = QEnum (CScope x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> Scope -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
data CStatus a = CStatus a
type Status = QEnum(CStatus Int)
ieStatus:: Int -> Status
ieStatus x = QEnum (CStatus x)
instance QEnumC (CStatus Int) where
qEnum_toInt (QEnum (CStatus x)) = x
qEnum_fromInt x = QEnum (CStatus x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> Status -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
instance QeNativeFormat QSettingsFormat where
eNativeFormat
= ieQSettingsFormat $ 0
eIniFormat :: QSettingsFormat
eIniFormat
= ieQSettingsFormat $ 1
instance QeInvalidFormat QSettingsFormat where
eInvalidFormat
= ieQSettingsFormat $ 16
eUserScope :: Scope
eUserScope
= ieScope $ 0
eSystemScope :: Scope
eSystemScope
= ieScope $ 1
instance QeNoError Status where
eNoError
= ieStatus $ 0
eAccessError :: Status
eAccessError
= ieStatus $ 1
eFormatError :: Status
eFormatError
= ieStatus $ 2
| uduki/hsQt | Qtc/Enums/Core/QSettings.hs | bsd-2-clause | 5,763 | 0 | 18 | 1,378 | 1,658 | 816 | 842 | 145 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TupleSections #-}
module Language.JsonGrammar.Parser (parseValue) where
import Language.JsonGrammar.Grammar
import Language.JsonGrammar.Util
import Control.Applicative ((<$>))
import Control.Monad ((>=>), unless)
import Data.Aeson (Object, Array, withObject, (.:), withArray)
import Data.Aeson.Types (Parser, typeMismatch)
import Data.Monoid ((<>))
import qualified Data.Vector as V
-- | Convert a 'Grammar' to a JSON 'Parser'.
parseValue :: Grammar Val t1 t2 -> t1 -> Parser t2
parseValue = \case
Id -> return
g1 :. g2 -> parseValue g2 >=> parseValue g1
Empty -> \_ -> fail "empty grammar"
g1 :<> g2 -> parseValue g1 <> parseValue g2
Pure f _ -> f
Many g -> manyM (parseValue g)
Literal val -> \(val' :- t) ->
if val == val'
then return t
else typeMismatch "literal" val'
Label _ g -> parseValue g
Object g -> \(val :- x) ->
withObject "object" (\obj -> parseProperties obj g x) val
Array g -> \(val :- x) -> do
(arr', y) <- withArray "array" (\arr -> parseElements g (arr, x)) val
unless (V.null arr') $ typeMismatch "end of array" (V.head arr')
return y
Coerce _ g -> parseValue g
parseProperties :: Object -> Grammar Obj t1 t2 -> t1 -> Parser t2
parseProperties obj = \case
Id -> return
g1 :. g2 -> parseProperties obj g2 >=> parseProperties obj g1
Empty -> \_ -> fail "empty grammar"
g1 :<> g2 -> parseProperties obj g1 <> parseProperties obj g2
Pure f _ -> f
Many g -> manyM (parseProperties obj g)
Property n g -> \x -> do
val <- obj .: n
parseValue g (val :- x)
parseElements :: Grammar Arr t1 t2 -> (Array, t1) -> Parser (Array, t2)
parseElements = \case
Id -> return
g1 :. g2 -> parseElements g2 >=> parseElements g1
Empty -> \_ -> fail "empty grammar"
g1 :<> g2 -> parseElements g1 <> parseElements g2
Pure f _ -> \(arr, x) -> (arr, ) <$> f x
Many g -> manyM (parseElements g)
Element g -> \(arr, x) ->
if V.null arr
then fail "expected at least one more array element"
else do
y <- parseValue g (V.last arr :- x)
return (V.init arr, y)
| edsko/JsonGrammar2 | src/Language/JsonGrammar/Parser.hs | bsd-3-clause | 2,255 | 0 | 17 | 578 | 850 | 436 | 414 | 58 | 12 |
module MAAM.Instances.AAM where
import FP
import MAAM.Classes.AAM
import MAAM.Instances.Temporal
-- Concrete {{{
data Cμ = Cμ
cμ :: P Cμ
cμ = P
instance AAM Cμ where
type LexicalTemporal Cμ = Cτ
type DynamicTemporal Cμ = Cτ
lexical Cμ = Cτ
dynamic Cμ = Cτ
-- }}}
-- 0CFA {{{
data ZCFAμ = ZCFAμ
zCFAμ :: P ZCFAμ
zCFAμ = P
instance AAM ZCFAμ where
type LexicalTemporal ZCFAμ = Zτ
type DynamicTemporal ZCFAμ = Zτ
lexical ZCFAμ = Zτ
dynamic ZCFAμ = Zτ
-- }}}
-- kCFA {{{
data KCFAμ = KCFAμ Int
kCFAμ :: P KCFAμ
kCFAμ = P
instance AAM KCFAμ where
type LexicalTemporal KCFAμ = Zτ
type DynamicTemporal KCFAμ = Kτ
lexical (KCFAμ _) = Zτ
dynamic (KCFAμ k) = Kτ k
-- }}}
-- k-object-sensitive {{{
data KOSμ = KOSμ Int
kOSμ :: P KOSμ
kOSμ = P
instance AAM KOSμ where
type LexicalTemporal KOSμ = Kτ
type DynamicTemporal KOSμ = Zτ
lexical (KOSμ k) = Kτ k
dynamic (KOSμ _) = Zτ
-- }}}
-- Hybrid k-call-site + k-object-sensitive {{{
data KHybridμ = KHybridμ Int Int
kHybridμ :: P KHybridμ
kHybridμ = P
instance AAM KHybridμ where
type LexicalTemporal KHybridμ = Kτ
type DynamicTemporal KHybridμ = Kτ
lexical (KHybridμ lk _) = Kτ lk
dynamic (KHybridμ _ dk) = Kτ dk
-- }}}
| davdar/quals | src/MAAM/Instances/AAM.hs | bsd-3-clause | 1,287 | 5 | 8 | 279 | 558 | 288 | 270 | -1 | -1 |
{-# LANGUAGE OverloadedStrings, FlexibleContexts, GADTs #-}
module MateVMRuntime.ClassPool (
getClassInfo,
getClassInfoNoInit,
classLoaded,
getClassFile,
getMethodTable,
getMethodTableNoInit,
getMethodTableReverse,
getObjectSize,
getFieldCount,
getStaticFieldTypes,
getFieldTypes,
getMethodOffset,
getFieldOffset,
getFieldSignatures,
getStaticFieldAddr,
getInterfaceMethodOffset,
addClassPath,
addClassPathJAR
) where
import Data.Int
import Data.Binary
import qualified Data.Map as M
import qualified Data.Set as S
import Data.List
import qualified Data.ByteString.Lazy as B
import Data.String.Utils
import Control.Arrow
import Control.Applicative
import Control.Monad
-- import JVM.Dump
import Foreign.Ptr
import Foreign.C.Types
import Foreign.Storable
import Data.IORef
import System.IO.Unsafe
import System.Directory
import JVM.ClassFile
import JVM.Converter
import Java.ClassPath hiding (Directory)
import Java.JAR
import {-# SOURCE #-} MateVMRuntime.MethodPool
import MateVMRuntime.Types
import MateVMRuntime.Debug
import {-# SOURCE #-} MateVMRuntime.GarbageAlloc
import MateVMRuntime.NativeSizes
import {-# SOURCE #-} MateVMRuntime.ClassHierarchy
getClassInfo :: B.ByteString -> IO ClassInfo
getClassInfo path = do
class_map <- getClassMap
case M.lookup path class_map of
Nothing -> loadAndInitClass path
Just ci -> return ci
getClassInfoNoInit :: B.ByteString -> IO ClassInfo
getClassInfoNoInit path = do
class_map <- getClassMap
case M.lookup path class_map of
Nothing -> loadClassNoInit path
Just ci -> return ci
classLoaded :: B.ByteString -> IO Bool
classLoaded path = do
class_map <- getClassMap
return $ M.member path class_map
getClassFile :: B.ByteString -> IO (Class Direct)
getClassFile path = do
ci <- getClassInfo path
return $ ciFile ci
getStaticFieldOffset :: B.ByteString -> B.ByteString -> IO CPtrdiff
getStaticFieldOffset path field = do
ci <- getClassInfo path
return $ fromIntegral $ ciStaticMap ci M.! field
getFieldOffset :: B.ByteString -> B.ByteString -> IO Int32
getFieldOffset path field = do
ci <- getClassInfo path
return $ ciFieldMap ci M.! field
-- class name, methodname, methodsignature
getMethodOffset :: B.ByteString -> B.ByteString -> B.ByteString -> IO NativeWord
getMethodOffset path method sig = do
ci <- getClassInfo path
-- (+ ptrSize): one slot for "interface-table-ptr"
return $ (+ ptrSize) $ fromIntegral $
ciMethodMap ci M.! (method `B.append` sig)
getMethodTableNoInit :: B.ByteString -> IO NativeWord
getMethodTableNoInit path = do
ci <- getClassInfoNoInit path
return $ ciMethodBase ci
getMethodTable :: B.ByteString -> IO NativeWord
getMethodTable path = do
ci <- getClassInfo path
return $ ciMethodBase ci
getMethodTableReverse :: NativeWord -> IO (Maybe B.ByteString)
getMethodTableReverse mtable = do
class_map <- getClassMap
let f x = ciMethodBase x == mtable
return $ ciName <$> find f (M.elems class_map)
getObjectSize :: B.ByteString -> IO NativeWord
getObjectSize path = do
fsize <- getFieldCount path
-- one slot for "method-table-ptr"
-- one slot for GC-data
return $ (2 + fsize) * ptrSize
getFieldCount :: B.ByteString -> IO NativeWord
getFieldCount path = do
ci <- getClassInfo path
-- TODO(bernhard): correct sizes for different types...
return $ ciFieldLength ci
-- TODO: not implemented yet. will return empty map!
getStaticFieldTypes :: B.ByteString -> IO [(Int32, FieldSignature)]
getStaticFieldTypes path = do
ci <- getClassInfo path
return $ map (second fieldSignature) $ M.toList (ciStaticFieldTypeMap ci)
-- TODO: not very well tested
getFieldTypes :: B.ByteString -> IO [(Int32, FieldSignature)]
getFieldTypes path = do
ci <- getClassInfo path
return $ map (second fieldSignature) $ M.toList (ciFieldTypeMap ci)
getFieldSignatures :: FieldTypeMap -> [(Int32, FieldSignature)]
getFieldSignatures m = map (second fieldSignature) $ M.toList m
getStaticFieldAddr :: CPtrdiff -> IO CPtrdiff
getStaticFieldAddr from = do
trapmap <- getTrapMap
let w32_from = fromIntegral from
let sfi = trapmap M.! w32_from
setTrapMap $ M.delete w32_from trapmap
case sfi of
(StaticField (StaticFieldInfo cls field)) -> getStaticFieldOffset cls field
_ -> error "getFieldAddr: no TrapCause found. abort"
-- interface name, methodname, methodsignature
getInterfaceMethodOffset :: B.ByteString -> B.ByteString -> B.ByteString -> IO NativeWord
getInterfaceMethodOffset ifname meth sig = do
loadInterface ifname
ifmmap <- getInterfaceMethodMap
case M.lookup (ifname `B.append` meth `B.append` sig) ifmmap of
Just w32 -> return w32
Nothing -> error "getInterfaceMethodOffset: no offset set"
readClass :: B.ByteString -> IO ClassInfo
readClass path = do
class_map' <- getClassMap
case M.lookup path class_map' of
Just cm -> return cm
Nothing -> do
cfile <- readClassFile $ toString path
-- TODO(bernhard): hDumpClass
-- dumpClass cfile
-- load all interfaces, which are implemented by this class
sequence_ [ loadInterface i | i <- interfaces cfile ]
superclass <- if path /= "java/lang/Object"
then do
sc <- readClass $ superClass cfile
return $ Just sc
else return Nothing
((staticmap, statictypemap), (fieldmap, fieldtypemap, fsize)) <-
calculateFields cfile superclass
(methodmap, mbase, msize) <- calculateMethodMap cfile superclass
immap <- getInterfaceMethodMap
-- allocate interface offset table for this class
-- TODO(bernhard): we have some duplicates in immap (i.e. some
-- entries have the same offset), so we could
-- save some memory here.
iftable <- mallocClassData $ ptrSize * M.size immap
let wn_iftable = fromIntegral $ ptrToIntPtr iftable :: NativeWord
-- store interface-table at offset 0 in method-table
pokeElemOff (intPtrToPtr $ fromIntegral mbase) 0 wn_iftable
let hexDumpMap :: Integral v => String -> M.Map B.ByteString v -> IO ()
hexDumpMap header mmap = do
let printValue :: B.ByteString -> IO ()
printValue key = printfCp $ printf "\t%-70s: 0x%08x\n" (toString key) val
where val = fromIntegral (mmap M.! key) :: NativeWord
printfCp $ printf "%s\n" header
mapM_ printValue (M.keys mmap)
when mateDEBUG $ do
let strpath = toString path
hexDumpMap ("staticmap @ " ++ strpath) staticmap
hexDumpMap ("fieldmap @ " ++ strpath) fieldmap
hexDumpMap ("methodmap @ " ++ strpath) methodmap
hexDumpMap ("interfacemap @ " ++ strpath) immap
printfCp $ printf "mbase: 0x%08x\n" mbase
printfCp $ printf "iftable: 0x%08x\n" wn_iftable
virtual_map <- getVirtualMap
setVirtualMap $ M.insert mbase path virtual_map
class_map <- getClassMap
let new_ci = ClassInfo
{ ciName = path
, ciFile = cfile
, ciStaticMap = staticmap
, ciStaticFieldTypeMap = statictypemap
, ciFieldMap = fieldmap
, ciFieldTypeMap = fieldtypemap
, ciFieldLength = fsize
, ciMethodMap = methodmap
, ciMethodBase = mbase
, ciMethodLength = msize
, ciInitDone = False
}
setClassMap $ M.insert path new_ci class_map
-- add Class to Hierarchy
super_mtable <- case superclass of
Nothing -> return 0
Just x -> getMethodTable $ ciName x
addClassEntry mbase super_mtable (interfaces cfile)
return new_ci
loadInterface :: B.ByteString -> IO ()
loadInterface path = do
imap <- getInterfaceMap
-- interface already loaded?
case M.lookup path imap of
Just _ -> return ()
Nothing -> do
printfCp $ printf "interface: loading \"%s\"\n" $ toString path
cfile <- readClassFile $ toString path
-- load "superinterfaces" first
sequence_ [ loadInterface i | i <- interfaces cfile ]
immap <- getInterfaceMethodMap
-- load map again, because there could be new entries now
-- due to loading superinterfaces
imap' <- getInterfaceMap
let max_off = fromIntegral $ M.size immap * ptrSize
-- create index of methods by this interface
let mm = zipbase max_off (classMethods cfile)
-- create for each method from *every* superinterface an entry too,
-- but just put in the same offset as it is already in the map
let (ifnames, methodnames) = unzip $ concat
[ zip (repeat ifname) (classMethods $ imap' M.! ifname)
| ifname <- interfaces cfile ]
let sm = zipWith (\x y -> (entry y, immap M.! getname x y)) ifnames methodnames
-- merge all offset tables
setInterfaceMethodMap $ M.fromList sm `M.union` M.fromList mm `M.union` immap
setInterfaceMap $ M.insert path cfile imap'
-- add Interface to Hierarchy
addInterfaceEntry path (interfaces cfile)
where
zipbase base = zipWith (\x y -> (entry y, x + base)) [0,ptrSize..]
entry = getname path
getname p y = p `B.append` methodName y `B.append` encode (methodSignature y)
calculateFields :: Class Direct -> Maybe ClassInfo
-> IO ((FieldMap, FieldTypeMap), (FieldMap, FieldTypeMap, NativeWord))
calculateFields cf superclass = do
-- TODO(bernhard): correct sizes. int only atm
let (sfields, ifields) = partition (S.member ACC_STATIC . fieldAccessFlags) (classFields cf)
let sc_sm = getsupermap superclass ciStaticMap
let sfields_size = fromIntegral $ length sfields
let statictypemap = zipbasetype (fromIntegral sfields_size) sfields
staticbase <- mallocStaticData (sfields_size * ptrSize) statictypemap
let sm = zipbase (fromIntegral $ ptrToIntPtr staticbase) sfields
-- new fields "overwrite" old ones, if they have the same name
let staticmap = sm `M.union` sc_sm
let sc_im = getsupermap superclass ciFieldMap
let sc_imtype = getsupermap superclass ciFieldTypeMap
let sc_size :: Num a => a
sc_size = case superclass of
Just x -> fromIntegral $ ciFieldLength x
Nothing -> 0
-- "+ (2*ptrsize)" for the method table pointer and GC data
let max_off = (+ (2*ptrSize)) $ sc_size * ptrSize
let im = zipbase max_off ifields
let imtype = zipbasetype max_off ifields
-- new fields "overwrite" old ones, if they have the same name
let fieldmap = im `M.union` sc_im
let fieldtypemap = imtype `M.union` sc_imtype
let fsize = sc_size + fromIntegral (M.size im)
return ((staticmap, statictypemap), (fieldmap, fieldtypemap, fsize))
where
zipbase :: Int32 -> [Field Direct] -> FieldMap
zipbase base = foldr (\(x,y) -> M.insert (fieldName y) (x + base)) M.empty . zip [0,ptrSize..]
zipbasetype :: Int32 -> [Field Direct] -> FieldTypeMap
zipbasetype base = foldr(\(x,y) -> M.insert (x + base) y) M.empty . zip [0,ptrSize..]
-- helper
getsupermap :: Maybe ClassInfo -> (ClassInfo -> M.Map k v) -> M.Map k v
getsupermap superclass getter = case superclass of Just x -> getter x; Nothing -> M.empty
calculateMethodMap :: Class Direct -> Maybe ClassInfo -> IO (FieldMap, NativeWord, NativeWord)
calculateMethodMap cf superclass = do
let methods = filter
(\x -> (not . S.member ACC_STATIC . methodAccessFlags) x &&
((/=) "<init>" . methodName) x)
(classMethods cf)
let sc_mm = getsupermap superclass ciMethodMap
let sc_size :: Num a => a
sc_size = case superclass of
Just x -> fromIntegral $ ciMethodLength x
Nothing -> 0
let max_off = sc_size * ptrSize
let mm = M.fromList $ zipbase max_off methods
let methodmap = mm `M.union` sc_mm
let size = M.size sc_mm + sc_size
-- (+1): one slot for the interface-table-ptr
methodbase <- mallocClassData $ (size + 1) * ptrSize
return ( methodmap
, fromIntegral $ ptrToIntPtr methodbase
, fromIntegral $ size)
where zipbase base = zipWith (\x y -> (entry y, x + base)) [0,ptrSize..]
where entry y = methodName y `B.append` encode (methodSignature y)
loadClassNoInit :: B.ByteString -> IO ClassInfo
loadClassNoInit path = do
class_map <- getClassMap
ci <- case M.lookup path class_map of
Nothing -> readClass path
Just x -> return x
when (path /= "java/lang/Object") (void $ loadClassNoInit $ superClass $ ciFile ci)
class_map' <- getClassMap
setClassMap $ M.insert path ci class_map'
return ci
loadAndInitClass :: B.ByteString -> IO ClassInfo
loadAndInitClass path = do
class_map <- getClassMap
ci <- case M.lookup path class_map of
Nothing -> readClass path
Just x -> return x
-- first try to execute class initializer of superclass
when (path /= "java/lang/Object") (void $ loadAndInitClass $ superClass $ ciFile ci)
-- execute class initializer
unless (ciInitDone ci) $ case lookupMethod "<clinit>" (ciFile ci) of
Just _ -> do
let mi = MethodInfo "<clinit>" path $ MethodSignature [] ReturnsVoid
entry <- lookupMethodEntry mi
-- TODO(bernhard): test exception handling in static initalizer
printfCp $ printf "executing static initializer from %s now\n" (toString path)
executeFuncPtr (fromIntegral entry)
printfCp $ printf "static initializer from %s done\n" (toString path)
Nothing -> return ()
class_map' <- getClassMap
let new_ci = ci { ciInitDone = True }
setClassMap $ M.insert path new_ci class_map'
return new_ci
readClassFile :: String -> IO (Class Direct)
readClassFile path' = readIORef classPaths >>= rcf
where
path = replace "." "/" path'
rcf :: [MClassPath] -> IO (Class Direct)
rcf [] = readIORef classPaths >>= \cp -> error $ "readClassFile: Class \"" ++ show path ++ "\" not found." ++ "\n" ++ show cp
rcf (Directory pre:xs) = do
let cf = pre ++ path ++ ".class"
printfCp $ printf "rcf: searching @ %s for %s\n" (show pre) (show path)
b <- doesFileExist cf
if b
then parseClassFile cf
else rcf xs
rcf (JAR p:xs) = do
printfCp $ printf "rcf: searching %s in JAR\n" (show path)
entry <- getEntry p path
case entry of
Just (LoadedJAR _ cls) -> return cls
Nothing -> rcf xs
_ -> error $ "readClassFile: Class \"" ++ show path ++ "\" in JAR not found. #1"
data MClassPath =
Directory String |
JAR [Tree CPEntry]
deriving (Show)
classPaths :: IORef [MClassPath]
{-# NOINLINE classPaths #-}
classPaths = unsafePerformIO $ newIORef []
addClassPath :: String -> IO ()
addClassPath x = do
cps <- readIORef classPaths
writeIORef classPaths (Directory x:cps)
addClassPathJAR :: String -> IO ()
addClassPathJAR x = do
cps <- readIORef classPaths
t <- execClassPath $ addJAR x
writeIORef classPaths (JAR t:cps)
| LouisJenkinsCS/Minimal-JVM | MateVMRuntime/ClassPool.hs | bsd-3-clause | 15,075 | 0 | 24 | 3,588 | 4,351 | 2,141 | 2,210 | 319 | 6 |
import System.Environment (getArgs, getProgName)
import System.Exit (exitFailure)
import System.IO (stderr, hPutStrLn)
import Text.Read (readMaybe)
import System.Linux.Btrfs
main :: IO ()
main = do
args <- getArgs
case args of
[srcPath, srcOffS, srcLenS, dstPath, dstOffS]
| Just srcOff <- readMaybe srcOffS
, Just srcLen <- readMaybe srcLenS
, Just dstOff <- readMaybe dstOffS ->
cloneRange srcPath srcOff srcLen dstPath dstOff
_ -> do
prog <- getProgName
hPutStrLn stderr "Invalid command line arguments"
hPutStrLn stderr $
"Usage: " ++ prog ++
" SOURCE SOURCE_OFF SOURCE_LEN DEST DEST_OFF"
exitFailure
| redneb/hs-btrfs | examples/btrfs-clone-range.hs | bsd-3-clause | 763 | 0 | 15 | 244 | 200 | 100 | 100 | 21 | 2 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE RankNTypes #-}
module Data.Bitransversable (
Bitransversable(..)
, traverseDefault
) where
import Bound.Scope (Scope, bitransverseScope)
class Bitransversable s where
bitransverse :: Applicative f => (forall a b. (a -> f b) -> t a -> f (u b)) -> (c -> f d) -> s t c -> f (s u d)
instance Bitransversable (Scope b) where
bitransverse = bitransverseScope
traverseDefault :: (Applicative f, Traversable r, Bitransversable t) => (a -> f b) -> t r a -> f (t r b)
traverseDefault = bitransverse traverse
| dalaing/type-systems | src/Data/Bitransversable.hs | bsd-3-clause | 680 | 0 | 15 | 132 | 218 | 114 | 104 | 11 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Smoke.Gen.Classes (
generateSmokeClassesModule,
classesModuleImport
) where
import Control.Monad.Reader
import Data.Monoid
import Data.Text ( Text )
import qualified Data.Text as T
import Language.Haskell.Exts.Pretty
import Language.Haskell.Exts.Syntax
import Smoke.C
import Smoke.Gen.Monad
import Smoke.Gen.Util
generateSmokeClassesModule :: SmokeModule -> Gen ()
generateSmokeClassesModule m = do
modName <- classesModuleName
file <- moduleToFilePath modName
let cs = filter (not . skipClass) (smokeModuleClasses m)
decls <- mapM toClassDecl cs
let mname = ModuleName $ T.unpack modName
ptrImport = ImportDecl dummyLoc (ModuleName "Foreign.Ptr") False False Nothing Nothing Nothing
hmod = Module dummyLoc mname [] Nothing Nothing [ptrImport] decls
lift $ writeFile file (prettyPrint hmod)
-- FIXME: If inner classes are a problem here, do not generate them in
-- this module. Just generate them in the module with their
-- definition since external modules will not be relying on them
toClassDecl :: SmokeClass -> Gen Decl
toClassDecl c = do
mangler <- askModuleConf generatorClassNameMangler
let baseName = dropOuterClass $ smokeClassName c
cname = Ident $ T.unpack $ mangler baseName
unpacker = "unpack" `mappend` baseName
tvname = Ident "a"
utype = TyFun (TyVar tvname) (TyApp (TyCon (UnQual (Ident "Ptr"))) unit_tycon)
f = ClsDecl $ TypeSig dummyLoc [Ident (T.unpack unpacker)] utype
return $ ClassDecl dummyLoc [] cname [UnkindedVar tvname] [] [f]
dropOuterClass :: Text -> Text
dropOuterClass s
| T.isInfixOf "::" s = T.dropWhile (==':') $ T.dropWhile (/=':') s
| otherwise = s
dummyLoc :: SrcLoc
dummyLoc = SrcLoc "" 0 0
classesModuleName :: Gen Text
classesModuleName = do
moduleName <- askModuleName
modNameMap <- askModuleConf generatorModuleNameMap
return $ modNameMap moduleName `mappend` "Classes"
classesModuleImport :: Gen ImportDecl
classesModuleImport = do
hmname <- classesModuleName
return $ ImportDecl dummyLoc (ModuleName (T.unpack hmname)) False False Nothing Nothing Nothing
| travitch/humidor | src/Smoke/Gen/Classes.hs | bsd-3-clause | 2,131 | 0 | 18 | 367 | 614 | 314 | 300 | 48 | 1 |
-- http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=ITP1_3_C
-- Swapping Two Numbers
-- input:
-- 3 2
-- 2 2
-- 5 3
-- 0 0
-- output:
-- 2 3
-- 2 2
-- 3 5
import Control.Applicative
import qualified Control.Monad as Monad
main = loopPutNumbers
loopPutNumbers :: IO ()
loopPutNumbers = do
[x,y] <- map (read :: String -> Int) . words <$> getLine
Monad.when (x /= 0 || y /= 0) $ do
putStrLn $ if (x < y)
then unwords $ show <$> [x,y]
else unwords $ show <$> [y,x]
loopPutNumbers
| ku00/aoj-haskell | src/ITP1_3_C.hs | bsd-3-clause | 554 | 0 | 13 | 158 | 159 | 91 | 68 | 11 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
-- |
-- Module : Instrument.Measurement
-- Copyright : (c) 2009, 2010 Bryan O'Sullivan
-- (c) 2012, Ozgun Ataman
--
-- License : BSD-style
module Instrument.Measurement
( getTime,
time,
time_,
timeEx,
)
where
-------------------------------------------------------------------------------
import Control.Exception (SomeException)
import Control.Exception.Safe (MonadCatch, tryAny)
import Control.Monad.IO.Class
import Data.Time.Clock.POSIX (getPOSIXTime)
-------------------------------------------------------------------------------
-- | Measure how long action took, in seconds along with its result
time :: MonadIO m => m a -> m (Double, a)
time act = do
start <- liftIO getTime
!result <- act
end <- liftIO getTime
let !delta = end - start
return (delta, result)
-- | Measure how long action took, even if they fail
timeEx :: (MonadCatch m, MonadIO m) => m a -> m (Double, Either SomeException a)
timeEx = time . tryAny
-- | Just measure how long action takes, discard its result
time_ :: MonadIO m => m a -> m Double
time_ = fmap fst . time
-------------------------------------------------------------------------------
getTime :: IO Double
getTime = realToFrac `fmap` getPOSIXTime
| Soostone/instrument | instrument/src/Instrument/Measurement.hs | bsd-3-clause | 1,346 | 0 | 10 | 234 | 273 | 151 | 122 | 25 | 1 |
{-# LANGUAGE RecordWildCards, PatternGuards, CPP #-}
module Development.Make.All(runMakefile) where
import System.Environment
import Development.Shake
import Development.Shake.FilePath
import Development.Make.Parse
import Development.Make.Env
import Development.Make.Rules
import Development.Make.Type
import qualified System.Directory as IO
import Data.List
import Data.Maybe
import Control.Arrow
import Control.Monad
import System.Cmd
import System.Exit
import Control.Monad.Trans.State.Strict
runMakefile :: FilePath -> [String] -> IO (Rules ())
runMakefile file args = do
env <- defaultEnv
mk <- parse file
rs <- eval env mk
return $ do
defaultRuleFile_
case filter (not . isPrefixOf "." . target) rs of
Ruler x _ _ : _ | null args, '%' `notElem` x -> want_ [x]
_ -> return ()
mapM_ (want_ . return) args
convert rs
data Ruler = Ruler
{target :: String
,prereq :: (Env, Expr) -- Env is the Env at this point
,cmds :: (Env, [Command]) -- Env is the Env at the end
}
eval :: Env -> Makefile -> IO [Ruler]
eval env (Makefile xs) = do
(rs, env) <- runStateT (fmap concat $ mapM f xs) env
return [r{cmds=(env,snd $ cmds r)} | r <- rs]
where
f :: Stmt -> StateT Env IO [Ruler]
f Assign{..} = do
e <- get
e <- liftIO $ addEnv name assign expr e
put e
return []
f Rule{..} = do
e <- get
target <- liftIO $ fmap words $ askEnv e targets
return $ map (\t -> Ruler t (e, prerequisites) (undefined, commands)) target
convert :: [Ruler] -> Rules ()
convert rs = match ??> run
where
match s = any (isJust . check s) rs
check s r = makePattern (target r) s
run target = do
let phony = has False ".PHONY" target
let silent = has True ".SILENT" target
(deps, cmds) <- fmap (first concat . second concat . unzip) $ forM rs $ \r ->
case check target r of
Nothing -> return ([], [])
Just op -> do
let (preEnv,preExp) = prereq r
env <- liftIO $ addEnv "@" Equals (Lit target) preEnv
pre <- liftIO $ askEnv env preExp
vp <- liftIO $ fmap splitSearchPath $ askEnv env $ Var "VPATH"
pre <- mapM (vpath vp) $ words $ op pre
return (pre, [cmds r])
mapM_ (need_ . return) deps
forM_ cmds $ \(env,cmd) -> do
env <- liftIO $ addEnv "@" Equals (Lit target) env
env <- liftIO $ addEnv "^" Equals (Lit $ unwords deps) env
env <- liftIO $ addEnv "<" Equals (Lit $ head $ deps ++ [""]) env
forM_ cmd $ \c ->
case c of
Expr c -> (if silent then quietly else id) $
runCommand =<< liftIO (askEnv env c)
return $ if phony then Phony else NotPhony
has auto name target =
or [(null ws && auto) || target `elem` ws | Ruler t (_,Lit s) _ <- rs, t == name, let ws = words s]
runCommand :: String -> Action ()
runCommand x = do
res <- if "@" `isPrefixOf` x then sys $ drop 1 x
else putNormal x >> sys x
when (res /= ExitSuccess) $
error $ "System command failed: " ++ x
where sys = quietly . traced (unwords $ take 1 $ words x) . system
makePattern :: String -> FilePath -> Maybe (String -> String)
makePattern pat v = case break (== '%') pat of
(pre,'%':post) -> if pre `isPrefixOf` v && post `isSuffixOf` v && rest >= 0
then Just $ concatMap (\x -> if x == '%' then subs else [x])
else Nothing
where rest = length v - (length pre + length post)
subs = take rest $ drop (length pre) v
otherwise -> if pat == v then Just id else Nothing
vpath :: [FilePath] -> FilePath -> Action FilePath
vpath [] y = return y
vpath (x:xs) y = do
b <- doesFileExist $ x </> y
if b then return $ x </> y else vpath xs y
defaultEnv :: IO Env
defaultEnv = do
#if __GLASGOW_HASKELL__ >= 706
exePath <- getExecutablePath
#else
exePath <- getProgName
#endif
env <- getEnvironment
cur <- IO.getCurrentDirectory
return $ newEnv $
("EXE",if null exe then "" else "." ++ exe) :
("MAKE",normalise exePath) :
("CURDIR",normalise cur) :
env
| nh2/shake | Development/Make/All.hs | bsd-3-clause | 4,658 | 79 | 16 | 1,677 | 1,690 | 870 | 820 | 106 | 5 |
{-# LANGUAGE TemplateHaskell #-}
module Client.ExplosionT where
import Control.Lens (makeLenses)
import Data.IORef (newIORef)
import Linear (V3(..))
import System.IO.Unsafe (unsafePerformIO)
import Client.EntityT
import Types
makeLenses ''ExplosionT
newExplosionT :: ExplosionT
newExplosionT = ExplosionT
{ _eType = 0
, _eEnt = unsafePerformIO (newIORef newEntityT)
, _eFrames = 0
, _eLight = 0
, _eLightColor = V3 0 0 0
, _eStart = 0
, _eBaseFrame = 0
}
| ksaveljev/hake-2 | src/Client/ExplosionT.hs | bsd-3-clause | 599 | 0 | 9 | 209 | 136 | 82 | 54 | 18 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
-------------------------------------------------------------------
-- |
-- Module : Irreverent.Bitbucket.Core.Data.Group
-- Copyright : (C) 2018 Irreverent Pixel Feats
-- License : BSD-style (see the file /LICENSE.md)
-- Maintainer : Dom De Re
--
-------------------------------------------------------------------
module Irreverent.Bitbucket.Core.Data.Group (
-- * Types
GroupV1(..)
) where
import Irreverent.Bitbucket.Core.Data.Common
import Preamble
-- | The Group information
-- that is provided/required by V1 of the bitbucket cloud rest api
-- Note that V1 of the REST API is deprecated and will be gone by Dec 2018
--
data GroupV1 = GroupV1 {
gV1Owner :: !UserV1
, gV1Name :: !GroupName
, gV1Members :: ![UserV1]
, gV1Slug :: !GroupSlug
} deriving (Show, Eq)
| irreverent-pixel-feats/bitbucket | bitbucket-core/src/Irreverent/Bitbucket/Core/Data/Group.hs | bsd-3-clause | 853 | 0 | 10 | 158 | 94 | 64 | 30 | 19 | 0 |
module Main where
import Control.Monad (void)
import HTk.Toplevel.HTk
import Prelude hiding (interact)
import UI.Dialogui
import UI.Dialogui.HTk
type RGB = (Int, Int, Int)
theme :: (HasColour a, HasFont a) => [Config a]
theme = [ font (Courier, Bold, 18::Int)
, background ((0x00, 0x00, 0x7F) :: RGB)
, foreground ((0xFF, 0xFF, 0x00) :: RGB) ]
main :: IO ()
main = runGUIWith opts
(writeLn " -=<[ He110! ]>=-")
$ interact (\s -> "> " ++ s ++ "\n")
where
opts = defaultOptions { prepareWindow = set [ text "Customized!" ]
, prepareInput = set theme
, prepareOutput = set $ theme ++ [ size (20, 10) ]
}
set = (void .) . flip configure
| astynax/dialogui-htk | examples/CustomizedGUI.hs | bsd-3-clause | 760 | 0 | 12 | 240 | 270 | 157 | 113 | 19 | 1 |
module Resolve.DNS.Utils where
import Data.ByteString
import Data.Word
import Data.Bits
safeFromIntegral :: (Integral a, Integral b) => a -> Maybe b
safeFromIntegral x = let y = fromIntegral x
in if (fromIntegral y == x) then Just y
else Nothing
toWord16 :: ByteString -> Word16
toWord16 bs = Data.ByteString.foldl (\a b -> a `shift` 8 .|. (fromIntegral b)) 0 bs
fromWord16 :: Word16 -> ByteString
fromWord16 w = cons (fromIntegral (w `shift` (-8) .&. 0xff)) $ cons (fromIntegral (w .&. 0xff)) empty
| riaqn/resolve | src/Resolve/DNS/Utils.hs | bsd-3-clause | 509 | 0 | 13 | 90 | 211 | 113 | 98 | 12 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Servant.Recons.Parser where
import Data.Typeable
import Data.Vinyl
import GHC.TypeLits
import Servant.API
import Data.Text (Text)
import qualified Data.Text as T
import Text.Read
type family APIToFields layout :: [(Symbol, *)] where
APIToFields (QueryParam s a :> b) = '(s, a) ': APIToFields b
APIToFields (a :> b) = APIToFields b
APIToFields (Verb a b c d) = '[]
class ReadParam api where
type TParam api :: *
readParam :: Proxy api -> String -> Maybe (TParam api)
instance (KnownSymbol s, Read Text) => ReadParam (QueryParam s Text) where
type TParam (QueryParam s Text) = Text
readParam _ = Just . T.pack
instance (KnownSymbol s, Read String) => ReadParam (QueryParam s String) where
type TParam (QueryParam s String) = String
readParam _ = Just
instance {-# OVERLAPPABLE #-} (KnownSymbol s, Read t) => ReadParam (QueryParam s t) where
type TParam (QueryParam s t) = t
readParam _ s = readMaybe s
class ReconsParser api where
type ParseResult api :: *
parser :: forall proxy . proxy api -> [String] -> Maybe (ParseResult api)
-- FIXME: threats bad param as Nothing so the command is parsed ok in this case.
-- this may cayse to call api with wrong parameters, although it's harmless.
instance (
FieldRec (APIToFields b) ~ ParseResult b
, ReconsParser b
, KnownSymbol s
, ReadParam (QueryParam s a)
, Read a
) => ReconsParser (QueryParam s a :> b) where
type ParseResult (QueryParam s a :> b) = FieldRec ( '(s, a) ': APIToFields b)
parser _ ss = case ss of
[] -> Nothing
(s : ss') -> (:&)
<$> (fmap Field (readParam qp s) :: Maybe (ElField '(s, a)) )
<*> (parser (Proxy :: Proxy b) ss')
where qp = Proxy :: Proxy (QueryParam s a)
instance (
ReconsParser b
, FieldRec (APIToFields b) ~ ParseResult b
, KnownSymbol s
) => ReconsParser (s :> b) where
type ParseResult (s :> b) = ParseResult b
parser _ ss = case ss of
[] -> Nothing
(s:ss') | s == sv -> parser (Proxy :: Proxy b) ss'
| otherwise -> Nothing
where sv = symbolVal (Proxy :: Proxy s)
instance ReconsParser (Verb (a :: StdMethod) b c d) where
type ParseResult (Verb a b c d) = FieldRec '[]
parser _ [] = Just RNil
parser _ _ = Nothing
class HasPrefix api where
prefix :: Proxy api -> [Maybe String]
instance (HasPrefix rest, KnownSymbol s) => HasPrefix (s :> rest) where
prefix _ = Just ( symbolVal (Proxy :: Proxy s) ) : prefix (Proxy :: Proxy rest)
instance (HasPrefix rest) => HasPrefix (QueryParam s t :> rest) where
prefix _ = Nothing : prefix (Proxy :: Proxy rest)
instance {-# OVERLAPPABLE #-} (HasPrefix rest) => HasPrefix (whatever :> rest) where
prefix _ = prefix (Proxy :: Proxy rest)
instance HasPrefix (Verb a b c d) where
prefix _ = []
| voidlizard/recons | src/Servant/Recons/Parser.hs | bsd-3-clause | 2,985 | 0 | 16 | 649 | 1,127 | 591 | 536 | 73 | 0 |
{-# LANGUAGE PackageImports #-}
module GHC.IO.Exception (module M) where
import "base" GHC.IO.Exception as M
| silkapp/base-noprelude | src/GHC/IO/Exception.hs | bsd-3-clause | 114 | 0 | 4 | 18 | 23 | 17 | 6 | 3 | 0 |
module LibSpec (main, spec) where
import Test.Hspec
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck
import Lib
main :: IO ()
main = hspec spec
spec = do
describe "ok" $ do
prop "test" $ (\a b -> someFunc a b == a + b)
| wass80/CoCaml | test/LibSpec.hs | bsd-3-clause | 240 | 0 | 15 | 53 | 102 | 55 | 47 | 10 | 1 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RecordWildCards #-}
-- | Update system related functionality in Auxx.
module Command.Update
( vote
, propose
, hashInstaller
) where
import Universum
import qualified Data.ByteString.Lazy as BSL
import Data.Default (def)
import qualified Data.HashMap.Strict as HM
import Data.List ((!!))
import Formatting (sformat, string, (%))
import Pos.Binary (Raw)
import Pos.Chain.Update (SystemTag, UpId, UpdateData (..),
mkUpdateProposalWSign, mkUpdateVoteSafe)
import Pos.Client.KeyStorage (getSecretKeysPlain)
import Pos.Client.Update.Network (submitUpdateProposal, submitVote)
import Pos.Core.Exception (reportFatalError)
import Pos.Crypto (Hash, ProtocolMagic, emptyPassphrase, hash,
hashHexF, unsafeHash, withSafeSigner, withSafeSigners)
import Pos.Infra.Diffusion.Types (Diffusion (..))
import Pos.Network.Update.Download (installerHash)
import Pos.Util.Wlog (WithLogger, logDebug, logError, logInfo)
import Lang.Value (ProposeUpdateParams (..), ProposeUpdateSystem (..))
import Mode (MonadAuxxMode)
import Repl (PrintAction)
----------------------------------------------------------------------------
-- Vote
----------------------------------------------------------------------------
vote
:: MonadAuxxMode m
=> ProtocolMagic
-> Diffusion m
-> Int
-> Bool
-> UpId
-> m ()
vote pm diffusion idx decision upid = do
logDebug $ "Submitting a vote :" <> show (idx, decision, upid)
skey <- (!! idx) <$> getSecretKeysPlain
mbVoteUpd <- withSafeSigner skey (pure emptyPassphrase) $ mapM $ \signer ->
pure $ mkUpdateVoteSafe pm signer upid decision
case mbVoteUpd of
Nothing -> logError "Invalid passphrase"
Just voteUpd -> do
submitVote diffusion voteUpd
logInfo "Submitted vote"
----------------------------------------------------------------------------
-- Propose, hash installer
----------------------------------------------------------------------------
propose
:: MonadAuxxMode m
=> ProtocolMagic
-> Diffusion m
-> ProposeUpdateParams
-> m UpId
propose pm diffusion ProposeUpdateParams{..} = do
logDebug "Proposing update..."
skey <- (!! puSecretKeyIdx) <$> getSecretKeysPlain
updateData <- mapM updateDataElement puUpdates
let udata = HM.fromList updateData
skeys <- if not puVoteAll then pure [skey]
else getSecretKeysPlain
withSafeSigners skeys (pure emptyPassphrase) $ \ss -> do
unless (length skeys == length ss) $
reportFatalError $ "Number of safe signers: " <> show (length ss) <>
", expected " <> show (length skeys)
let publisherSS = ss !! if not puVoteAll then 0 else puSecretKeyIdx
let updateProposal =
mkUpdateProposalWSign
pm
puBlockVersion
puBlockVersionModifier
puSoftwareVersion
udata
def
publisherSS
let upid = hash updateProposal
submitUpdateProposal pm diffusion ss updateProposal
if not puVoteAll then
putText (sformat ("Update proposal submitted, upId: "%hashHexF) upid)
else
putText (sformat ("Update proposal submitted along with votes, upId: "%hashHexF) upid)
return upid
updateDataElement :: MonadAuxxMode m => ProposeUpdateSystem -> m (SystemTag, UpdateData)
updateDataElement ProposeUpdateSystem{..} = do
diffHash <- hashFile pusBinDiffPath
pkgHash <- hashFile pusInstallerPath
pure (pusSystemTag, UpdateData diffHash pkgHash dummyHash dummyHash)
dummyHash :: Hash Raw
dummyHash = unsafeHash (0 :: Integer)
hashFile :: (WithLogger m, MonadIO m) => Maybe FilePath -> m (Hash Raw)
hashFile Nothing = pure dummyHash
hashFile (Just filename) = do
fileData <- liftIO $ BSL.readFile filename
let h = installerHash fileData
logInfo $ sformat ("Read file "%string%" succesfuly, its hash: "%hashHexF) filename h
pure h
hashInstaller :: MonadIO m => PrintAction m -> FilePath -> m ()
hashInstaller printAction path = do
h <- installerHash <$> liftIO (BSL.readFile path)
printAction $ sformat ("Hash of installer '"%string%"' is "%hashHexF) path h
| input-output-hk/pos-haskell-prototype | auxx/src/Command/Update.hs | mit | 4,527 | 0 | 19 | 1,175 | 1,094 | 572 | 522 | 95 | 4 |
module GpgMe
( Error(..)
, ErrorSource(..)
, ErrorCode(..)
, checkVersion
, ctxNew
, setArmor
, Key(..)
, Attr(..)
, keyName
, keyID
, keyFingerprint
, getKeys
, findKeyBy
, keyGetStringAttr
, ImportStatus(..)
, importKeys
, exportKeys
, sign
, SigMode(..)
, SigStat(..)
, SigSummary(..)
, verifyDetach
, verify
, VerifyResult(..)
, setPassphraseCallback
, Engine(..)
, getEngines
, setEngine
, PinentryMode(..)
, setPinentryMode
, GenKeyResult(..)
, genKey
, deleteKey
, editKey
, module Gpg.EditKey
) where
import Control.Applicative
import qualified Control.Exception as Ex
import Data.ByteString (ByteString)
import Data.Maybe
import Bindings
import Gpg.EditKey
import Control.Monad
keyName :: Key -> IO (Maybe ByteString)
keyName k = keyGetStringAttr k AttrName 0
keyID :: Key -> IO (Maybe ByteString)
keyID k = keyGetStringAttr k AttrKeyid 0
keyFingerprint :: Key -> IO (Maybe ByteString)
keyFingerprint k = keyGetStringAttr k AttrFpr 0
findKeyBy :: Eq a => Ctx -> Bool -> (Key -> IO a) -> a -> IO [Key]
findKeyBy ctx secret f x = filterM (fmap (== x) . f) =<< getKeys ctx secret
| pontarius/pontarius-gpg | src/GpgMe.hs | mit | 1,397 | 0 | 11 | 480 | 390 | 231 | 159 | 51 | 1 |
{- |
Module : ./HasCASL/ClassAna.hs
Description : analyse kinds using a class map
Copyright : (c) Christian Maeder and Uni Bremen 2003-2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
analyse kinds using a class map
-}
module HasCASL.ClassAna where
import HasCASL.As
import HasCASL.AsUtils
import HasCASL.PrintAs ()
import Common.Id
import HasCASL.Le
import qualified Data.Map as Map
import qualified Data.Set as Set
import Common.Lib.State
import Common.Result
import Common.DocUtils
import Common.Utils
-- * analyse kinds
-- | check the kind and compute the raw kind
anaKindM :: Kind -> ClassMap -> Result RawKind
anaKindM k cm = case k of
ClassKind ci -> if k == universe then return rStar
else case Map.lookup ci cm of
Just (ClassInfo rk _) -> return rk
Nothing -> Result [mkDiag Error "not a class" ci] $ Just rStar
FunKind v k1 k2 ps -> do
rk1 <- anaKindM k1 cm
rk2 <- anaKindM k2 cm
return $ FunKind v rk1 rk2 ps
-- | get minimal function kinds of (class) kind
getFunKinds :: Monad m => ClassMap -> Kind -> m (Set.Set Kind)
getFunKinds cm k = case k of
FunKind {} -> return $ Set.singleton k
ClassKind c -> case Map.lookup c cm of
Just info -> do
ks <- mapM (getFunKinds cm) $ Set.toList $ classKinds info
return $ keepMinKinds cm ks
_ -> fail $ "not a function kind '" ++ showId c "'"
-- | compute arity from a raw kind
kindArity :: RawKind -> Int
kindArity k = case k of
ClassKind _ -> 0
FunKind _ _ rk _ -> 1 + kindArity rk
-- | check if a class occurs in one of its super kinds
cyclicClassId :: ClassMap -> Id -> Kind -> Bool
cyclicClassId cm ci k = case k of
FunKind _ k1 k2 _ -> cyclicClassId cm ci k1 || cyclicClassId cm ci k2
ClassKind cj -> cj /= universeId &&
(cj == ci || not (Set.null $ Set.filter (cyclicClassId cm ci)
$ classKinds $ Map.findWithDefault (error "cyclicClassId") cj cm))
-- * subkinding
-- | keep only minimal elements according to 'lesserKind'
keepMinKinds :: ClassMap -> [Set.Set Kind] -> Set.Set Kind
keepMinKinds cm = Set.fromDistinctAscList
. keepMins (lesserKind cm) . Set.toList . Set.unions
-- | no kind of the set is lesser than the new kind
newKind :: ClassMap -> Kind -> Set.Set Kind -> Bool
newKind cm k = Set.null . Set.filter (flip (lesserKind cm) k)
-- | add a new kind to a set
addNewKind :: ClassMap -> Kind -> Set.Set Kind -> Set.Set Kind
addNewKind cm k = Set.insert k . Set.filter (not . lesserKind cm k)
lesserVariance :: Variance -> Variance -> Bool
lesserVariance v1 v2 = case v1 of
InVar -> True
_ -> case v2 of
NonVar -> True
_ -> v1 == v2
-- | revert variance
revVariance :: Variance -> Variance
revVariance v = case v of
InVar -> NonVar
CoVar -> ContraVar
ContraVar -> CoVar
NonVar -> InVar
-- | compute the minimal variance
minVariance :: Variance -> Variance -> Variance
minVariance v1 v2 = case v1 of
NonVar -> v2
_ -> case v2 of
NonVar -> v1
_ -> if v1 == v2 then v1 else InVar
-- | check subkinding (kinds with variances are greater)
lesserKind :: ClassMap -> Kind -> Kind -> Bool
lesserKind cm k1 k2 = case k1 of
ClassKind c1 -> (case k2 of
ClassKind c2 -> c1 == c2 || (k1 /= universe && k2 == universe)
_ -> False) ||
case Map.lookup c1 cm of
Just info -> not $ newKind cm k2 $ classKinds info
_ -> False
FunKind v1 a1 r1 _ -> case k2 of
FunKind v2 a2 r2 _ -> lesserVariance v1 v2
&& lesserKind cm r1 r2 && lesserKind cm a2 a1
_ -> False
-- | compare raw kinds
lesserRawKind :: RawKind -> RawKind -> Bool
lesserRawKind k1 k2 = case k1 of
ClassKind _ -> case k2 of
ClassKind _ -> True
_ -> False
FunKind v1 a1 r1 _ -> case k2 of
FunKind v2 a2 r2 _ -> lesserVariance v1 v2
&& lesserRawKind r1 r2 && lesserRawKind a2 a1
_ -> False
minRawKind :: Monad m => String -> RawKind -> RawKind -> m RawKind
minRawKind str k1 k2 = let err = fail $ diffKindString str k1 k2 in case k1 of
ClassKind _ -> case k2 of
ClassKind _ -> return $ ClassKind ()
_ -> err
FunKind v1 a1 r1 ps -> case k2 of
FunKind v2 a2 r2 qs -> do
a3 <- minRawKind str a2 a1
r3 <- minRawKind str r1 r2
return $ FunKind (minVariance v1 v2) a3 r3 $ appRange ps qs
_ -> err
rawToKind :: RawKind -> Kind
rawToKind = mapKind (const universeId)
-- * diagnostic messages
-- | create message for different kinds
diffKindString :: String -> RawKind -> RawKind -> String
diffKindString a k1 k2 = "incompatible kind of: " ++ a ++
expected (rawToKind k1) (rawToKind k2)
-- | create diagnostic for different kinds
diffKindDiag :: (GetRange a, Pretty a) =>
a -> RawKind -> RawKind -> [Diagnosis]
diffKindDiag a k1 k2 =
[Diag Error (diffKindString (showDoc a "") k1 k2) $ getRange a]
-- | check if raw kinds are compatible
checkKinds :: (GetRange a, Pretty a) =>
a -> RawKind -> RawKind -> [Diagnosis]
checkKinds p k1 k2 =
maybe (diffKindDiag p k1 k2) (const []) $ minRawKind "" k1 k2
-- | analyse class decls
anaClassDecls :: ClassDecl -> State Env ClassDecl
anaClassDecls (ClassDecl cls k ps) =
do cm <- gets classMap
let Result ds (Just rk) = anaKindM k cm
addDiags ds
let ak = if null ds then k else universe
mapM_ (addClassDecl rk ak) cls
return $ ClassDecl cls ak ps
-- | store a class
addClassDecl :: RawKind -> Kind -> Id -> State Env ()
-- check with merge
addClassDecl rk kind ci =
if ci == universeId then
addDiags [mkDiag Warning "void universe class declaration" ci]
else do
e <- get
let cm = classMap e
tm = typeMap e
tvs = localTypeVars e
case Map.lookup ci tm of
Just _ -> addDiags [mkDiag Error "class name already a type" ci]
Nothing -> case Map.lookup ci tvs of
Just _ -> addDiags
[mkDiag Error "class name already a type variable" ci]
Nothing -> case Map.lookup ci cm of
Nothing -> do
addSymbol $ idToClassSymbol ci rk
putClassMap $ Map.insert ci
(ClassInfo rk $ Set.singleton kind) cm
Just (ClassInfo ork superClasses) ->
let Result ds mk = minRawKind (showDoc ci "") rk ork
in case mk of
Nothing -> addDiags ds
Just nk ->
if cyclicClassId cm ci kind then
addDiags [mkDiag Error "cyclic class" ci]
else do
addSymbol $ idToClassSymbol ci nk
if newKind cm kind superClasses then do
addDiags [mkDiag Warning "refined class" ci]
putClassMap $ Map.insert ci
(ClassInfo nk $ addNewKind cm kind superClasses) cm
else addDiags [mkDiag Warning "unchanged class" ci]
| spechub/Hets | HasCASL/ClassAna.hs | gpl-2.0 | 7,224 | 0 | 32 | 2,216 | 2,266 | 1,106 | 1,160 | 151 | 8 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.StorageGateway.DeleteTape
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Deletes the specified virtual tape.
--
-- <http://docs.aws.amazon.com/storagegateway/latest/APIReference/API_DeleteTape.html>
module Network.AWS.StorageGateway.DeleteTape
(
-- * Request
DeleteTape
-- ** Request constructor
, deleteTape
-- ** Request lenses
, dt1GatewayARN
, dt1TapeARN
-- * Response
, DeleteTapeResponse
-- ** Response constructor
, deleteTapeResponse
-- ** Response lenses
, dtrTapeARN
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.StorageGateway.Types
import qualified GHC.Exts
data DeleteTape = DeleteTape
{ _dt1GatewayARN :: Text
, _dt1TapeARN :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'DeleteTape' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dt1GatewayARN' @::@ 'Text'
--
-- * 'dt1TapeARN' @::@ 'Text'
--
deleteTape :: Text -- ^ 'dt1GatewayARN'
-> Text -- ^ 'dt1TapeARN'
-> DeleteTape
deleteTape p1 p2 = DeleteTape
{ _dt1GatewayARN = p1
, _dt1TapeARN = p2
}
-- | The unique Amazon Resource Name (ARN) of the gateway that the virtual tape to
-- delete is associated with. Use the 'ListGateways' operation to return a list of
-- gateways for your account and region.
dt1GatewayARN :: Lens' DeleteTape Text
dt1GatewayARN = lens _dt1GatewayARN (\s a -> s { _dt1GatewayARN = a })
-- | The Amazon Resource Name (ARN) of the virtual tape to delete.
dt1TapeARN :: Lens' DeleteTape Text
dt1TapeARN = lens _dt1TapeARN (\s a -> s { _dt1TapeARN = a })
newtype DeleteTapeResponse = DeleteTapeResponse
{ _dtrTapeARN :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'DeleteTapeResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dtrTapeARN' @::@ 'Maybe' 'Text'
--
deleteTapeResponse :: DeleteTapeResponse
deleteTapeResponse = DeleteTapeResponse
{ _dtrTapeARN = Nothing
}
-- | The Amazon Resource Name (ARN) of the deleted virtual tape.
dtrTapeARN :: Lens' DeleteTapeResponse (Maybe Text)
dtrTapeARN = lens _dtrTapeARN (\s a -> s { _dtrTapeARN = a })
instance ToPath DeleteTape where
toPath = const "/"
instance ToQuery DeleteTape where
toQuery = const mempty
instance ToHeaders DeleteTape
instance ToJSON DeleteTape where
toJSON DeleteTape{..} = object
[ "GatewayARN" .= _dt1GatewayARN
, "TapeARN" .= _dt1TapeARN
]
instance AWSRequest DeleteTape where
type Sv DeleteTape = StorageGateway
type Rs DeleteTape = DeleteTapeResponse
request = post "DeleteTape"
response = jsonResponse
instance FromJSON DeleteTapeResponse where
parseJSON = withObject "DeleteTapeResponse" $ \o -> DeleteTapeResponse
<$> o .:? "TapeARN"
| kim/amazonka | amazonka-storagegateway/gen/Network/AWS/StorageGateway/DeleteTape.hs | mpl-2.0 | 3,848 | 0 | 9 | 871 | 523 | 317 | 206 | 63 | 1 |
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module NumberSix.Handlers.Seen
( handler
) where
--------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import Control.Arrow (first)
import Control.Monad.Trans (liftIO)
import qualified Database.SQLite.Simple as Sqlite
--------------------------------------------------------------------------------
import NumberSix.Bang
import NumberSix.Irc
import NumberSix.Message
import NumberSix.Util
import NumberSix.Util.Time
--------------------------------------------------------------------------------
handler :: UninitializedHandler
handler = makeHandlerWith "Seen" (map const [storeHook, loadHook]) initialize
--------------------------------------------------------------------------------
initialize :: Irc ()
initialize = withDatabase $ \db -> Sqlite.execute_ db
"CREATE TABLE IF NOT EXISTS seen ( \
\ id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, \
\ host TEXT NOT NULL, \
\ channel TEXT NOT NULL, \
\ sender TEXT NOT NULL, \
\ time TEXT NOT NULL, \
\ text TEXT NOT NULL \
\)"
--------------------------------------------------------------------------------
storeHook :: Irc ()
storeHook = onCommand "PRIVMSG" $ do
host <- getHost
channel <- getChannel
sender <- toLower <$> getSender
IrcTime time <- liftIO getTime
text <- getMessageText
withDatabase $ \db -> do
r <- Sqlite.query db
"SELECT id FROM seen \
\WHERE host = ? AND channel = ? AND sender = ?"
(host, channel, sender)
case r of
-- In the database: update
[Sqlite.Only id'] -> Sqlite.execute db
"UPDATE seen SET time = ?, text = ? WHERE id = ?"
(time, text, id' :: Integer)
-- Not yet in the database: insert
_ -> Sqlite.execute db
"INSERT INTO seen (host, channel, sender, time, text) \
\VALUES (?, ?, ?, ?, ?)"
(host, channel, sender, time, text)
return ()
--------------------------------------------------------------------------------
loadHook :: Irc ()
loadHook = onBangCommand "!seen" $ do
host <- getHost
channel <- getChannel
(sender, _) <- first toLower . breakWord <$> getBangCommandText
r <- withDatabase $ \db -> Sqlite.query db
"SELECT time, text FROM seen \
\WHERE host = ? AND channel = ? AND sender = ?"
(host, channel, sender)
case r of
-- In the database: seen
[(time, text)] -> do
pretty <- liftIO $ prettyTime $ IrcTime time
writeReply $ "I last saw " <> sender <> " " <> pretty
<> " saying: " <> text
-- Not yet in the database: not seen
_ -> writeReply $ "I ain't never seen " <> sender
| itkovian/number-six | src/NumberSix/Handlers/Seen.hs | bsd-3-clause | 3,304 | 0 | 18 | 1,100 | 513 | 275 | 238 | 50 | 2 |
{-
(c) The GRASP Project, Glasgow University, 1994-1998
\section[TysWiredIn]{Wired-in knowledge about {\em non-primitive} types}
-}
{-# LANGUAGE CPP #-}
-- | This module is about types that can be defined in Haskell, but which
-- must be wired into the compiler nonetheless. C.f module TysPrim
module TysWiredIn (
-- * All wired in things
wiredInTyCons, isBuiltInOcc_maybe,
-- * Bool
boolTy, boolTyCon, boolTyCon_RDR, boolTyConName,
trueDataCon, trueDataConId, true_RDR,
falseDataCon, falseDataConId, false_RDR,
promotedBoolTyCon, promotedFalseDataCon, promotedTrueDataCon,
-- * Ordering
ltDataCon, ltDataConId,
eqDataCon, eqDataConId,
gtDataCon, gtDataConId,
promotedOrderingTyCon,
promotedLTDataCon, promotedEQDataCon, promotedGTDataCon,
-- * Char
charTyCon, charDataCon, charTyCon_RDR,
charTy, stringTy, charTyConName,
-- * Double
doubleTyCon, doubleDataCon, doubleTy, doubleTyConName,
-- * Float
floatTyCon, floatDataCon, floatTy, floatTyConName,
-- * Int
intTyCon, intDataCon, intTyCon_RDR, intDataCon_RDR, intTyConName,
intTy,
-- * Word
wordTyCon, wordDataCon, wordTyConName, wordTy,
-- * List
listTyCon, nilDataCon, nilDataConName, consDataCon, consDataConName,
listTyCon_RDR, consDataCon_RDR, listTyConName,
mkListTy, mkPromotedListTy,
-- * Tuples
mkTupleTy, mkBoxedTupleTy,
tupleTyCon, tupleCon,
promotedTupleTyCon, promotedTupleDataCon,
unitTyCon, unitDataCon, unitDataConId, pairTyCon,
unboxedUnitTyCon, unboxedUnitDataCon,
unboxedSingletonTyCon, unboxedSingletonDataCon,
unboxedPairTyCon, unboxedPairDataCon,
-- * Unit
unitTy,
-- * Kinds
typeNatKindCon, typeNatKind, typeSymbolKindCon, typeSymbolKind,
-- * Parallel arrays
mkPArrTy,
parrTyCon, parrFakeCon, isPArrTyCon, isPArrFakeCon,
parrTyCon_RDR, parrTyConName,
-- * Equality predicates
eqTyCon_RDR, eqTyCon, eqTyConName, eqBoxDataCon,
coercibleTyCon, coercibleDataCon, coercibleClass,
mkWiredInTyConName -- This is used in TcTypeNats to define the
-- built-in functions for evaluation.
) where
#include "HsVersions.h"
import {-# SOURCE #-} MkId( mkDataConWorkId )
-- friends:
import PrelNames
import TysPrim
-- others:
import Constants ( mAX_TUPLE_SIZE )
import Module ( Module )
import Type ( mkTyConApp )
import DataCon
import ConLike
import Var
import TyCon
import Class ( Class, mkClass )
import TypeRep
import RdrName
import Name
import BasicTypes ( TupleSort(..), tupleSortBoxity,
Arity, RecFlag(..), Boxity(..) )
import ForeignCall
import Unique ( incrUnique, mkTupleTyConUnique,
mkTupleDataConUnique, mkPArrDataConUnique )
import Data.Array
import FastString
import Outputable
import Util
import BooleanFormula ( mkAnd )
alpha_tyvar :: [TyVar]
alpha_tyvar = [alphaTyVar]
alpha_ty :: [Type]
alpha_ty = [alphaTy]
{-
************************************************************************
* *
\subsection{Wired in type constructors}
* *
************************************************************************
If you change which things are wired in, make sure you change their
names in PrelNames, so they use wTcQual, wDataQual, etc
-}
-- This list is used only to define PrelInfo.wiredInThings. That in turn
-- is used to initialise the name environment carried around by the renamer.
-- This means that if we look up the name of a TyCon (or its implicit binders)
-- that occurs in this list that name will be assigned the wired-in key we
-- define here.
--
-- Because of their infinite nature, this list excludes tuples, Any and implicit
-- parameter TyCons. Instead, we have a hack in lookupOrigNameCache to deal with
-- these names.
--
-- See also Note [Known-key names]
wiredInTyCons :: [TyCon]
wiredInTyCons = [ unitTyCon -- Not treated like other tuples, because
-- it's defined in GHC.Base, and there's only
-- one of it. We put it in wiredInTyCons so
-- that it'll pre-populate the name cache, so
-- the special case in lookupOrigNameCache
-- doesn't need to look out for it
, boolTyCon
, charTyCon
, doubleTyCon
, floatTyCon
, intTyCon
, wordTyCon
, listTyCon
, parrTyCon
, eqTyCon
, coercibleTyCon
, typeNatKindCon
, typeSymbolKindCon
]
mkWiredInTyConName :: BuiltInSyntax -> Module -> FastString -> Unique -> TyCon -> Name
mkWiredInTyConName built_in modu fs unique tycon
= mkWiredInName modu (mkTcOccFS fs) unique
(ATyCon tycon) -- Relevant TyCon
built_in
mkWiredInDataConName :: BuiltInSyntax -> Module -> FastString -> Unique -> DataCon -> Name
mkWiredInDataConName built_in modu fs unique datacon
= mkWiredInName modu (mkDataOccFS fs) unique
(AConLike (RealDataCon datacon)) -- Relevant DataCon
built_in
-- See Note [Kind-changing of (~) and Coercible]
eqTyConName, eqBoxDataConName :: Name
eqTyConName = mkWiredInTyConName BuiltInSyntax gHC_TYPES (fsLit "~") eqTyConKey eqTyCon
eqBoxDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "Eq#") eqBoxDataConKey eqBoxDataCon
-- See Note [Kind-changing of (~) and Coercible]
coercibleTyConName, coercibleDataConName :: Name
coercibleTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Coercible") coercibleTyConKey coercibleTyCon
coercibleDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "MkCoercible") coercibleDataConKey coercibleDataCon
charTyConName, charDataConName, intTyConName, intDataConName :: Name
charTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Char") charTyConKey charTyCon
charDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "C#") charDataConKey charDataCon
intTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Int") intTyConKey intTyCon
intDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "I#") intDataConKey intDataCon
boolTyConName, falseDataConName, trueDataConName :: Name
boolTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Bool") boolTyConKey boolTyCon
falseDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "False") falseDataConKey falseDataCon
trueDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "True") trueDataConKey trueDataCon
listTyConName, nilDataConName, consDataConName :: Name
listTyConName = mkWiredInTyConName BuiltInSyntax gHC_TYPES (fsLit "[]") listTyConKey listTyCon
nilDataConName = mkWiredInDataConName BuiltInSyntax gHC_TYPES (fsLit "[]") nilDataConKey nilDataCon
consDataConName = mkWiredInDataConName BuiltInSyntax gHC_TYPES (fsLit ":") consDataConKey consDataCon
wordTyConName, wordDataConName, floatTyConName, floatDataConName, doubleTyConName, doubleDataConName :: Name
wordTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Word") wordTyConKey wordTyCon
wordDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "W#") wordDataConKey wordDataCon
floatTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Float") floatTyConKey floatTyCon
floatDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "F#") floatDataConKey floatDataCon
doubleTyConName = mkWiredInTyConName UserSyntax gHC_TYPES (fsLit "Double") doubleTyConKey doubleTyCon
doubleDataConName = mkWiredInDataConName UserSyntax gHC_TYPES (fsLit "D#") doubleDataConKey doubleDataCon
-- Kinds
typeNatKindConName, typeSymbolKindConName :: Name
typeNatKindConName = mkWiredInTyConName UserSyntax gHC_TYPELITS (fsLit "Nat") typeNatKindConNameKey typeNatKindCon
typeSymbolKindConName = mkWiredInTyConName UserSyntax gHC_TYPELITS (fsLit "Symbol") typeSymbolKindConNameKey typeSymbolKindCon
parrTyConName, parrDataConName :: Name
parrTyConName = mkWiredInTyConName BuiltInSyntax
gHC_PARR' (fsLit "[::]") parrTyConKey parrTyCon
parrDataConName = mkWiredInDataConName UserSyntax
gHC_PARR' (fsLit "PArr") parrDataConKey parrDataCon
boolTyCon_RDR, false_RDR, true_RDR, intTyCon_RDR, charTyCon_RDR,
intDataCon_RDR, listTyCon_RDR, consDataCon_RDR, parrTyCon_RDR, eqTyCon_RDR :: RdrName
boolTyCon_RDR = nameRdrName boolTyConName
false_RDR = nameRdrName falseDataConName
true_RDR = nameRdrName trueDataConName
intTyCon_RDR = nameRdrName intTyConName
charTyCon_RDR = nameRdrName charTyConName
intDataCon_RDR = nameRdrName intDataConName
listTyCon_RDR = nameRdrName listTyConName
consDataCon_RDR = nameRdrName consDataConName
parrTyCon_RDR = nameRdrName parrTyConName
eqTyCon_RDR = nameRdrName eqTyConName
{-
************************************************************************
* *
\subsection{mkWiredInTyCon}
* *
************************************************************************
-}
pcNonRecDataTyCon :: Name -> Maybe CType -> [TyVar] -> [DataCon] -> TyCon
-- Not an enumeration, not promotable
pcNonRecDataTyCon = pcTyCon False NonRecursive False
-- This function assumes that the types it creates have all parameters at
-- Representational role!
pcTyCon :: Bool -> RecFlag -> Bool -> Name -> Maybe CType -> [TyVar] -> [DataCon] -> TyCon
pcTyCon is_enum is_rec is_prom name cType tyvars cons
= buildAlgTyCon name
tyvars
(map (const Representational) tyvars)
cType
[] -- No stupid theta
(DataTyCon cons is_enum)
is_rec
is_prom
False -- Not in GADT syntax
NoParentTyCon
pcDataCon :: Name -> [TyVar] -> [Type] -> TyCon -> DataCon
pcDataCon = pcDataConWithFixity False
pcDataConWithFixity :: Bool -> Name -> [TyVar] -> [Type] -> TyCon -> DataCon
pcDataConWithFixity infx n = pcDataConWithFixity' infx n (incrUnique (nameUnique n))
-- The Name's unique is the first of two free uniques;
-- the first is used for the datacon itself,
-- the second is used for the "worker name"
--
-- To support this the mkPreludeDataConUnique function "allocates"
-- one DataCon unique per pair of Ints.
pcDataConWithFixity' :: Bool -> Name -> Unique -> [TyVar] -> [Type] -> TyCon -> DataCon
-- The Name should be in the DataName name space; it's the name
-- of the DataCon itself.
pcDataConWithFixity' declared_infix dc_name wrk_key tyvars arg_tys tycon
= data_con
where
data_con = mkDataCon dc_name declared_infix
(map (const HsNoBang) arg_tys)
[] -- No labelled fields
tyvars
[] -- No existential type variables
[] -- No equality spec
[] -- No theta
arg_tys (mkTyConApp tycon (mkTyVarTys tyvars))
tycon
[] -- No stupid theta
(mkDataConWorkId wrk_name data_con)
NoDataConRep -- Wired-in types are too simple to need wrappers
modu = ASSERT( isExternalName dc_name )
nameModule dc_name
wrk_occ = mkDataConWorkerOcc (nameOccName dc_name)
wrk_name = mkWiredInName modu wrk_occ wrk_key
(AnId (dataConWorkId data_con)) UserSyntax
{-
************************************************************************
* *
Kinds
* *
************************************************************************
-}
typeNatKindCon, typeSymbolKindCon :: TyCon
-- data Nat
-- data Symbol
typeNatKindCon = pcTyCon False NonRecursive True typeNatKindConName Nothing [] []
typeSymbolKindCon = pcTyCon False NonRecursive True typeSymbolKindConName Nothing [] []
typeNatKind, typeSymbolKind :: Kind
typeNatKind = TyConApp (promoteTyCon typeNatKindCon) []
typeSymbolKind = TyConApp (promoteTyCon typeSymbolKindCon) []
{-
************************************************************************
* *
Stuff for dealing with tuples
* *
************************************************************************
Note [How tuples work] See also Note [Known-key names] in PrelNames
~~~~~~~~~~~~~~~~~~~~~~
* There are three families of tuple TyCons and corresponding
DataCons, (boxed, unboxed, and constraint tuples), expressed by the
type BasicTypes.TupleSort.
* DataCons (and workers etc) for BoxedTuple and ConstraintTuple have
- distinct Uniques
- the same OccName
Using the same OccName means (hack!) that a single copy of the
runtime library code (info tables etc) works for both.
* When looking up an OccName in the original-name cache
(IfaceEnv.lookupOrigNameCache), we spot the tuple OccName to make sure
we get the right wired-in name. This guy can't tell the difference
betweeen BoxedTuple and ConstraintTuple (same OccName!), so tuples
are not serialised into interface files using OccNames at all.
-}
isBuiltInOcc_maybe :: OccName -> Maybe Name
-- Built in syntax isn't "in scope" so these OccNames
-- map to wired-in Names with BuiltInSyntax
isBuiltInOcc_maybe occ
= case occNameString occ of
"[]" -> choose_ns listTyCon nilDataCon
":" -> Just consDataConName
"[::]" -> Just parrTyConName
"(##)" -> choose_ns unboxedUnitTyCon unboxedUnitDataCon
"()" -> choose_ns unitTyCon unitDataCon
'(':'#':',':rest -> parse_tuple UnboxedTuple 2 rest
'(':',':rest -> parse_tuple BoxedTuple 2 rest
_other -> Nothing
where
ns = occNameSpace occ
parse_tuple sort n rest
| (',' : rest2) <- rest = parse_tuple sort (n+1) rest2
| tail_matches sort rest = choose_ns (tupleTyCon sort n)
(tupleCon sort n)
| otherwise = Nothing
tail_matches BoxedTuple ")" = True
tail_matches UnboxedTuple "#)" = True
tail_matches _ _ = False
choose_ns tc dc
| isTcClsNameSpace ns = Just (getName tc)
| isDataConNameSpace ns = Just (getName dc)
| otherwise = Just (getName (dataConWorkId dc))
mkTupleOcc :: NameSpace -> TupleSort -> Arity -> OccName
mkTupleOcc ns sort ar = mkOccName ns str
where
-- No need to cache these, the caching is done in mk_tuple
str = case sort of
UnboxedTuple -> '(' : '#' : commas ++ "#)"
BoxedTuple -> '(' : commas ++ ")"
ConstraintTuple -> '(' : commas ++ ")"
commas = take (ar-1) (repeat ',')
-- Cute hack: we reuse the standard tuple OccNames (and hence code)
-- for fact tuples, but give them different Uniques so they are not equal.
--
-- You might think that this will go wrong because isBuiltInOcc_maybe won't
-- be able to tell the difference between boxed tuples and constraint tuples. BUT:
-- 1. Constraint tuples never occur directly in user code, so it doesn't matter
-- that we can't detect them in Orig OccNames originating from the user
-- programs (or those built by setRdrNameSpace used on an Exact tuple Name)
-- 2. Interface files have a special representation for tuple *occurrences*
-- in IfaceTyCons, their workers (in IfaceSyn) and their DataCons (in case
-- alternatives). Thus we don't rely on the OccName to figure out what kind
-- of tuple an occurrence was trying to use in these situations.
-- 3. We *don't* represent tuple data type declarations specially, so those
-- are still turned into wired-in names via isBuiltInOcc_maybe. But that's OK
-- because we don't actually need to declare constraint tuples thanks to this hack.
--
-- So basically any OccName like (,,) flowing to isBuiltInOcc_maybe will always
-- refer to the standard boxed tuple. Cool :-)
tupleTyCon :: TupleSort -> Arity -> TyCon
tupleTyCon sort i | i > mAX_TUPLE_SIZE = fst (mk_tuple sort i) -- Build one specially
tupleTyCon BoxedTuple i = fst (boxedTupleArr ! i)
tupleTyCon UnboxedTuple i = fst (unboxedTupleArr ! i)
tupleTyCon ConstraintTuple i = fst (factTupleArr ! i)
promotedTupleTyCon :: TupleSort -> Arity -> TyCon
promotedTupleTyCon sort i = promoteTyCon (tupleTyCon sort i)
promotedTupleDataCon :: TupleSort -> Arity -> TyCon
promotedTupleDataCon sort i = promoteDataCon (tupleCon sort i)
tupleCon :: TupleSort -> Arity -> DataCon
tupleCon sort i | i > mAX_TUPLE_SIZE = snd (mk_tuple sort i) -- Build one specially
tupleCon BoxedTuple i = snd (boxedTupleArr ! i)
tupleCon UnboxedTuple i = snd (unboxedTupleArr ! i)
tupleCon ConstraintTuple i = snd (factTupleArr ! i)
boxedTupleArr, unboxedTupleArr, factTupleArr :: Array Int (TyCon,DataCon)
boxedTupleArr = listArray (0,mAX_TUPLE_SIZE) [mk_tuple BoxedTuple i | i <- [0..mAX_TUPLE_SIZE]]
unboxedTupleArr = listArray (0,mAX_TUPLE_SIZE) [mk_tuple UnboxedTuple i | i <- [0..mAX_TUPLE_SIZE]]
factTupleArr = listArray (0,mAX_TUPLE_SIZE) [mk_tuple ConstraintTuple i | i <- [0..mAX_TUPLE_SIZE]]
mk_tuple :: TupleSort -> Int -> (TyCon,DataCon)
mk_tuple sort arity = (tycon, tuple_con)
where
tycon = mkTupleTyCon tc_name tc_kind arity tyvars tuple_con sort prom_tc
prom_tc = case sort of
BoxedTuple -> Just (mkPromotedTyCon tycon (promoteKind tc_kind))
UnboxedTuple -> Nothing
ConstraintTuple -> Nothing
modu = mkTupleModule sort
tc_name = mkWiredInName modu (mkTupleOcc tcName sort arity) tc_uniq
(ATyCon tycon) BuiltInSyntax
tc_kind = mkArrowKinds (map tyVarKind tyvars) res_kind
res_kind = case sort of
BoxedTuple -> liftedTypeKind
UnboxedTuple -> unliftedTypeKind
ConstraintTuple -> constraintKind
tyvars = take arity $ case sort of
BoxedTuple -> alphaTyVars
UnboxedTuple -> openAlphaTyVars
ConstraintTuple -> tyVarList constraintKind
tuple_con = pcDataCon dc_name tyvars tyvar_tys tycon
tyvar_tys = mkTyVarTys tyvars
dc_name = mkWiredInName modu (mkTupleOcc dataName sort arity) dc_uniq
(AConLike (RealDataCon tuple_con)) BuiltInSyntax
tc_uniq = mkTupleTyConUnique sort arity
dc_uniq = mkTupleDataConUnique sort arity
unitTyCon :: TyCon
unitTyCon = tupleTyCon BoxedTuple 0
unitDataCon :: DataCon
unitDataCon = head (tyConDataCons unitTyCon)
unitDataConId :: Id
unitDataConId = dataConWorkId unitDataCon
pairTyCon :: TyCon
pairTyCon = tupleTyCon BoxedTuple 2
unboxedUnitTyCon :: TyCon
unboxedUnitTyCon = tupleTyCon UnboxedTuple 0
unboxedUnitDataCon :: DataCon
unboxedUnitDataCon = tupleCon UnboxedTuple 0
unboxedSingletonTyCon :: TyCon
unboxedSingletonTyCon = tupleTyCon UnboxedTuple 1
unboxedSingletonDataCon :: DataCon
unboxedSingletonDataCon = tupleCon UnboxedTuple 1
unboxedPairTyCon :: TyCon
unboxedPairTyCon = tupleTyCon UnboxedTuple 2
unboxedPairDataCon :: DataCon
unboxedPairDataCon = tupleCon UnboxedTuple 2
{-
************************************************************************
* *
\subsection[TysWiredIn-boxed-prim]{The ``boxed primitive'' types (@Char@, @Int@, etc)}
* *
************************************************************************
-}
eqTyCon :: TyCon
eqTyCon = mkAlgTyCon eqTyConName
(ForAllTy kv $ mkArrowKinds [k, k] constraintKind)
[kv, a, b]
[Nominal, Nominal, Nominal]
Nothing
[] -- No stupid theta
(DataTyCon [eqBoxDataCon] False)
NoParentTyCon
NonRecursive
False
Nothing -- No parent for constraint-kinded types
where
kv = kKiVar
k = mkTyVarTy kv
a:b:_ = tyVarList k
eqBoxDataCon :: DataCon
eqBoxDataCon = pcDataCon eqBoxDataConName args [TyConApp eqPrimTyCon (map mkTyVarTy args)] eqTyCon
where
kv = kKiVar
k = mkTyVarTy kv
a:b:_ = tyVarList k
args = [kv, a, b]
coercibleTyCon :: TyCon
coercibleTyCon = mkClassTyCon
coercibleTyConName kind tvs [Nominal, Representational, Representational]
rhs coercibleClass NonRecursive
where kind = (ForAllTy kv $ mkArrowKinds [k, k] constraintKind)
kv = kKiVar
k = mkTyVarTy kv
a:b:_ = tyVarList k
tvs = [kv, a, b]
rhs = DataTyCon [coercibleDataCon] False
coercibleDataCon :: DataCon
coercibleDataCon = pcDataCon coercibleDataConName args [TyConApp eqReprPrimTyCon (map mkTyVarTy args)] coercibleTyCon
where
kv = kKiVar
k = mkTyVarTy kv
a:b:_ = tyVarList k
args = [kv, a, b]
coercibleClass :: Class
coercibleClass = mkClass (tyConTyVars coercibleTyCon) [] [] [] [] [] (mkAnd []) coercibleTyCon
charTy :: Type
charTy = mkTyConTy charTyCon
charTyCon :: TyCon
charTyCon = pcNonRecDataTyCon charTyConName
(Just (CType Nothing (fsLit "HsChar")))
[] [charDataCon]
charDataCon :: DataCon
charDataCon = pcDataCon charDataConName [] [charPrimTy] charTyCon
stringTy :: Type
stringTy = mkListTy charTy -- convenience only
intTy :: Type
intTy = mkTyConTy intTyCon
intTyCon :: TyCon
intTyCon = pcNonRecDataTyCon intTyConName (Just (CType Nothing (fsLit "HsInt"))) [] [intDataCon]
intDataCon :: DataCon
intDataCon = pcDataCon intDataConName [] [intPrimTy] intTyCon
wordTy :: Type
wordTy = mkTyConTy wordTyCon
wordTyCon :: TyCon
wordTyCon = pcNonRecDataTyCon wordTyConName (Just (CType Nothing (fsLit "HsWord"))) [] [wordDataCon]
wordDataCon :: DataCon
wordDataCon = pcDataCon wordDataConName [] [wordPrimTy] wordTyCon
floatTy :: Type
floatTy = mkTyConTy floatTyCon
floatTyCon :: TyCon
floatTyCon = pcNonRecDataTyCon floatTyConName (Just (CType Nothing (fsLit "HsFloat"))) [] [floatDataCon]
floatDataCon :: DataCon
floatDataCon = pcDataCon floatDataConName [] [floatPrimTy] floatTyCon
doubleTy :: Type
doubleTy = mkTyConTy doubleTyCon
doubleTyCon :: TyCon
doubleTyCon = pcNonRecDataTyCon doubleTyConName (Just (CType Nothing (fsLit "HsDouble"))) [] [doubleDataCon]
doubleDataCon :: DataCon
doubleDataCon = pcDataCon doubleDataConName [] [doublePrimTy] doubleTyCon
{-
************************************************************************
* *
\subsection[TysWiredIn-Bool]{The @Bool@ type}
* *
************************************************************************
An ordinary enumeration type, but deeply wired in. There are no
magical operations on @Bool@ (just the regular Prelude code).
{\em BEGIN IDLE SPECULATION BY SIMON}
This is not the only way to encode @Bool@. A more obvious coding makes
@Bool@ just a boxed up version of @Bool#@, like this:
\begin{verbatim}
type Bool# = Int#
data Bool = MkBool Bool#
\end{verbatim}
Unfortunately, this doesn't correspond to what the Report says @Bool@
looks like! Furthermore, we get slightly less efficient code (I
think) with this coding. @gtInt@ would look like this:
\begin{verbatim}
gtInt :: Int -> Int -> Bool
gtInt x y = case x of I# x# ->
case y of I# y# ->
case (gtIntPrim x# y#) of
b# -> MkBool b#
\end{verbatim}
Notice that the result of the @gtIntPrim@ comparison has to be turned
into an integer (here called @b#@), and returned in a @MkBool@ box.
The @if@ expression would compile to this:
\begin{verbatim}
case (gtInt x y) of
MkBool b# -> case b# of { 1# -> e1; 0# -> e2 }
\end{verbatim}
I think this code is a little less efficient than the previous code,
but I'm not certain. At all events, corresponding with the Report is
important. The interesting thing is that the language is expressive
enough to describe more than one alternative; and that a type doesn't
necessarily need to be a straightforwardly boxed version of its
primitive counterpart.
{\em END IDLE SPECULATION BY SIMON}
-}
boolTy :: Type
boolTy = mkTyConTy boolTyCon
boolTyCon :: TyCon
boolTyCon = pcTyCon True NonRecursive True boolTyConName
(Just (CType Nothing (fsLit "HsBool")))
[] [falseDataCon, trueDataCon]
falseDataCon, trueDataCon :: DataCon
falseDataCon = pcDataCon falseDataConName [] [] boolTyCon
trueDataCon = pcDataCon trueDataConName [] [] boolTyCon
falseDataConId, trueDataConId :: Id
falseDataConId = dataConWorkId falseDataCon
trueDataConId = dataConWorkId trueDataCon
orderingTyCon :: TyCon
orderingTyCon = pcTyCon True NonRecursive True orderingTyConName Nothing
[] [ltDataCon, eqDataCon, gtDataCon]
ltDataCon, eqDataCon, gtDataCon :: DataCon
ltDataCon = pcDataCon ltDataConName [] [] orderingTyCon
eqDataCon = pcDataCon eqDataConName [] [] orderingTyCon
gtDataCon = pcDataCon gtDataConName [] [] orderingTyCon
ltDataConId, eqDataConId, gtDataConId :: Id
ltDataConId = dataConWorkId ltDataCon
eqDataConId = dataConWorkId eqDataCon
gtDataConId = dataConWorkId gtDataCon
{-
************************************************************************
* *
\subsection[TysWiredIn-List]{The @List@ type (incl ``build'' magic)}
* *
************************************************************************
Special syntax, deeply wired in, but otherwise an ordinary algebraic
data types:
\begin{verbatim}
data [] a = [] | a : (List a)
data () = ()
data (,) a b = (,,) a b
...
\end{verbatim}
-}
mkListTy :: Type -> Type
mkListTy ty = mkTyConApp listTyCon [ty]
listTyCon :: TyCon
listTyCon = pcTyCon False Recursive True
listTyConName Nothing alpha_tyvar [nilDataCon, consDataCon]
mkPromotedListTy :: Type -> Type
mkPromotedListTy ty = mkTyConApp promotedListTyCon [ty]
promotedListTyCon :: TyCon
promotedListTyCon = promoteTyCon listTyCon
nilDataCon :: DataCon
nilDataCon = pcDataCon nilDataConName alpha_tyvar [] listTyCon
consDataCon :: DataCon
consDataCon = pcDataConWithFixity True {- Declared infix -}
consDataConName
alpha_tyvar [alphaTy, mkTyConApp listTyCon alpha_ty] listTyCon
-- Interesting: polymorphic recursion would help here.
-- We can't use (mkListTy alphaTy) in the defn of consDataCon, else mkListTy
-- gets the over-specific type (Type -> Type)
{-
************************************************************************
* *
\subsection[TysWiredIn-Tuples]{The @Tuple@ types}
* *
************************************************************************
The tuple types are definitely magic, because they form an infinite
family.
\begin{itemize}
\item
They have a special family of type constructors, of type @TyCon@
These contain the tycon arity, but don't require a Unique.
\item
They have a special family of constructors, of type
@Id@. Again these contain their arity but don't need a Unique.
\item
There should be a magic way of generating the info tables and
entry code for all tuples.
But at the moment we just compile a Haskell source
file\srcloc{lib/prelude/...} containing declarations like:
\begin{verbatim}
data Tuple0 = Tup0
data Tuple2 a b = Tup2 a b
data Tuple3 a b c = Tup3 a b c
data Tuple4 a b c d = Tup4 a b c d
...
\end{verbatim}
The print-names associated with the magic @Id@s for tuple constructors
``just happen'' to be the same as those generated by these
declarations.
\item
The instance environment should have a magic way to know
that each tuple type is an instances of classes @Eq@, @Ix@, @Ord@ and
so on. \ToDo{Not implemented yet.}
\item
There should also be a way to generate the appropriate code for each
of these instances, but (like the info tables and entry code) it is
done by enumeration\srcloc{lib/prelude/InTup?.hs}.
\end{itemize}
-}
mkTupleTy :: TupleSort -> [Type] -> Type
-- Special case for *boxed* 1-tuples, which are represented by the type itself
mkTupleTy sort [ty] | Boxed <- tupleSortBoxity sort = ty
mkTupleTy sort tys = mkTyConApp (tupleTyCon sort (length tys)) tys
-- | Build the type of a small tuple that holds the specified type of thing
mkBoxedTupleTy :: [Type] -> Type
mkBoxedTupleTy tys = mkTupleTy BoxedTuple tys
unitTy :: Type
unitTy = mkTupleTy BoxedTuple []
{-
************************************************************************
* *
\subsection[TysWiredIn-PArr]{The @[::]@ type}
* *
************************************************************************
Special syntax for parallel arrays needs some wired in definitions.
-}
-- | Construct a type representing the application of the parallel array constructor
mkPArrTy :: Type -> Type
mkPArrTy ty = mkTyConApp parrTyCon [ty]
-- | Represents the type constructor of parallel arrays
--
-- * This must match the definition in @PrelPArr@
--
-- NB: Although the constructor is given here, it will not be accessible in
-- user code as it is not in the environment of any compiled module except
-- @PrelPArr@.
--
parrTyCon :: TyCon
parrTyCon = pcNonRecDataTyCon parrTyConName Nothing alpha_tyvar [parrDataCon]
parrDataCon :: DataCon
parrDataCon = pcDataCon
parrDataConName
alpha_tyvar -- forall'ed type variables
[intTy, -- 1st argument: Int
mkTyConApp -- 2nd argument: Array# a
arrayPrimTyCon
alpha_ty]
parrTyCon
-- | Check whether a type constructor is the constructor for parallel arrays
isPArrTyCon :: TyCon -> Bool
isPArrTyCon tc = tyConName tc == parrTyConName
-- | Fake array constructors
--
-- * These constructors are never really used to represent array values;
-- however, they are very convenient during desugaring (and, in particular,
-- in the pattern matching compiler) to treat array pattern just like
-- yet another constructor pattern
--
parrFakeCon :: Arity -> DataCon
parrFakeCon i | i > mAX_TUPLE_SIZE = mkPArrFakeCon i -- build one specially
parrFakeCon i = parrFakeConArr!i
-- pre-defined set of constructors
--
parrFakeConArr :: Array Int DataCon
parrFakeConArr = array (0, mAX_TUPLE_SIZE) [(i, mkPArrFakeCon i)
| i <- [0..mAX_TUPLE_SIZE]]
-- build a fake parallel array constructor for the given arity
--
mkPArrFakeCon :: Int -> DataCon
mkPArrFakeCon arity = data_con
where
data_con = pcDataCon name [tyvar] tyvarTys parrTyCon
tyvar = head alphaTyVars
tyvarTys = replicate arity $ mkTyVarTy tyvar
nameStr = mkFastString ("MkPArr" ++ show arity)
name = mkWiredInName gHC_PARR' (mkDataOccFS nameStr) unique
(AConLike (RealDataCon data_con)) UserSyntax
unique = mkPArrDataConUnique arity
-- | Checks whether a data constructor is a fake constructor for parallel arrays
isPArrFakeCon :: DataCon -> Bool
isPArrFakeCon dcon = dcon == parrFakeCon (dataConSourceArity dcon)
-- Promoted Booleans
promotedBoolTyCon, promotedFalseDataCon, promotedTrueDataCon :: TyCon
promotedBoolTyCon = promoteTyCon boolTyCon
promotedTrueDataCon = promoteDataCon trueDataCon
promotedFalseDataCon = promoteDataCon falseDataCon
-- Promoted Ordering
promotedOrderingTyCon
, promotedLTDataCon
, promotedEQDataCon
, promotedGTDataCon
:: TyCon
promotedOrderingTyCon = promoteTyCon orderingTyCon
promotedLTDataCon = promoteDataCon ltDataCon
promotedEQDataCon = promoteDataCon eqDataCon
promotedGTDataCon = promoteDataCon gtDataCon
| bitemyapp/ghc | compiler/prelude/TysWiredIn.hs | bsd-3-clause | 33,167 | 0 | 14 | 8,457 | 5,145 | 2,820 | 2,325 | 436 | 10 |
{-# LANGUAGE DeriveGeneric #-}
{- |
Module: Network.SoundCloud.App
Copyright: (c) 2012 Sebastián Ramírez Magrí <[email protected]>
License: BSD3
Maintainer: Sebastián Ramírez Magrí <[email protected]>
Stability: experimental
Represents SoundCloud applications as found at <http://soundcloud.com/apps>
-}
module Network.SoundCloud.App where
import Data.Aeson (FromJSON, ToJSON, decode)
import qualified Data.ByteString.Lazy.Char8 as BSL
import GHC.Generics (Generic)
import Text.Printf (printf)
import Network.SoundCloud.Util (scRecursiveGet, scResolve)
-- | JSON representation of applications
data JSON = JSON { id :: Int
, permalink_url :: String
, external_url :: String
, creator :: String
} deriving (Show, Generic)
instance FromJSON JSON
instance ToJSON JSON
-- | Decode a valid JSON string into an application
-- 'JSON' record
decodeJSON :: String -> Maybe JSON
decodeJSON dat = decode (BSL.pack dat) :: Maybe JSON
-- | Get an application 'JSON' record given a public app URL
-- as in <http://soundcloud.com/apps/app_name>
getJSON :: String -> IO (Maybe JSON)
getJSON url =
do dat <- scRecursiveGet =<< scResolve url
case dat of
Nothing -> return Nothing
Just d -> return $ decodeJSON d
-- | Show general information about an application in the
-- standard output
showInfo :: String -> IO ()
showInfo url =
do obj <- getJSON url
case obj of
Nothing -> putStrLn "Unable to get app information"
Just o ->
do let tmp = "%s\n\t%s by %s\n"
printf
tmp
(permalink_url o)
(external_url o)
(creator o)
| sebasmagri/HScD | src/Network/SoundCloud/App.hs | bsd-3-clause | 1,840 | 0 | 14 | 550 | 339 | 181 | 158 | 34 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.