code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
-----------------------------------------------------------------------------
-- |
-- Module : Network.HTTP.Stream
-- Copyright : See LICENSE file
-- License : BSD
--
-- Maintainer : Ganesh Sittampalam <[email protected]>
-- Stability : experimental
-- Portability : non-portable (not tested)
--
-- Transmitting HTTP requests and responses holding @String@ in their payload bodies.
-- This is one of the implementation modules for the "Network.HTTP" interface, representing
-- request and response content as @String@s and transmitting them in non-packed form
-- (cf. "Network.HTTP.HandleStream" and its use of @ByteString@s.) over 'Stream' handles.
-- It is mostly here for backwards compatibility, representing how requests and responses
-- were transmitted up until the 4.x releases of the HTTP package.
--
-- For more detailed information about what the individual exports do, please consult
-- the documentation for "Network.HTTP". /Notice/ however that the functions here do
-- not perform any kind of normalization prior to transmission (or receipt); you are
-- responsible for doing any such yourself, or, if you prefer, just switch to using
-- "Network.HTTP" function instead.
--
-----------------------------------------------------------------------------
module Network.HTTP.Stream
( module Network.Stream
, simpleHTTP -- :: Request_String -> IO (Result Response_String)
, simpleHTTP_ -- :: Stream s => s -> Request_String -> IO (Result Response_String)
, sendHTTP -- :: Stream s => s -> Request_String -> IO (Result Response_String)
, sendHTTP_notify -- :: Stream s => s -> Request_String -> IO () -> IO (Result Response_String)
, receiveHTTP -- :: Stream s => s -> IO (Result Request_String)
, respondHTTP -- :: Stream s => s -> Response_String -> IO ()
) where
-----------------------------------------------------------------
------------------ Imports --------------------------------------
-----------------------------------------------------------------
import Network.Stream
import Network.StreamDebugger (debugStream)
import Network.TCP (openTCPPort)
import Network.BufferType ( stringBufferOp )
import Network.HTTP.Base
import Network.HTTP.Headers
import Network.HTTP.Utils ( trim )
import Data.Char (toLower)
import Data.Maybe (fromMaybe)
import Control.Exception (onException)
import Control.Monad (when)
-- Turn on to enable HTTP traffic logging
debug :: Bool
debug = False
-- File that HTTP traffic logs go to
httpLogFile :: String
httpLogFile = "http-debug.log"
-----------------------------------------------------------------
------------------ Misc -----------------------------------------
-----------------------------------------------------------------
-- | Simple way to transmit a resource across a non-persistent connection.
simpleHTTP :: Request_String -> IO (Result Response_String)
simpleHTTP r = do
auth <- getAuth r
c <- openTCPPort (host auth) (fromMaybe 80 (port auth))
simpleHTTP_ c r
-- | Like 'simpleHTTP', but acting on an already opened stream.
simpleHTTP_ :: Stream s => s -> Request_String -> IO (Result Response_String)
simpleHTTP_ s r
| not debug = sendHTTP s r
| otherwise = do
s' <- debugStream httpLogFile s
sendHTTP s' r
sendHTTP :: Stream s => s -> Request_String -> IO (Result Response_String)
sendHTTP conn rq = sendHTTP_notify conn rq (return ())
sendHTTP_notify :: Stream s => s -> Request_String -> IO () -> IO (Result Response_String)
sendHTTP_notify conn rq onSendComplete = do
when providedClose $ (closeOnEnd conn True)
onException (sendMain conn rq onSendComplete)
(close conn)
where
providedClose = findConnClose (rqHeaders rq)
-- From RFC 2616, section 8.2.3:
-- 'Because of the presence of older implementations, the protocol allows
-- ambiguous situations in which a client may send "Expect: 100-
-- continue" without receiving either a 417 (Expectation Failed) status
-- or a 100 (Continue) status. Therefore, when a client sends this
-- header field to an origin server (possibly via a proxy) from which it
-- has never seen a 100 (Continue) status, the client SHOULD NOT wait
-- for an indefinite period before sending the request body.'
--
-- Since we would wait forever, I have disabled use of 100-continue for now.
sendMain :: Stream s => s -> Request_String -> IO () -> IO (Result Response_String)
sendMain conn rqst onSendComplete = do
--let str = if null (rqBody rqst)
-- then show rqst
-- else show (insertHeader HdrExpect "100-continue" rqst)
-- TODO review throwing away of result
_ <- writeBlock conn (show rqst)
-- write body immediately, don't wait for 100 CONTINUE
-- TODO review throwing away of result
_ <- writeBlock conn (rqBody rqst)
onSendComplete
rsp <- getResponseHead conn
switchResponse conn True False rsp rqst
-- reads and parses headers
getResponseHead :: Stream s => s -> IO (Result ResponseData)
getResponseHead conn = do
lor <- readTillEmpty1 stringBufferOp (readLine conn)
return $ lor >>= parseResponseHead
-- Hmmm, this could go bad if we keep getting "100 Continue"
-- responses... Except this should never happen according
-- to the RFC.
switchResponse :: Stream s
=> s
-> Bool {- allow retry? -}
-> Bool {- is body sent? -}
-> Result ResponseData
-> Request_String
-> IO (Result Response_String)
switchResponse _ _ _ (Left e) _ = return (Left e)
-- retry on connreset?
-- if we attempt to use the same socket then there is an excellent
-- chance that the socket is not in a completely closed state.
switchResponse conn allow_retry bdy_sent (Right (cd,rn,hdrs)) rqst =
case matchResponse (rqMethod rqst) cd of
Continue
| not bdy_sent -> {- Time to send the body -}
do { val <- writeBlock conn (rqBody rqst)
; case val of
Left e -> return (Left e)
Right _ ->
do { rsp <- getResponseHead conn
; switchResponse conn allow_retry True rsp rqst
}
}
| otherwise -> {- keep waiting -}
do { rsp <- getResponseHead conn
; switchResponse conn allow_retry bdy_sent rsp rqst
}
Retry -> {- Request with "Expect" header failed.
Trouble is the request contains Expects
other than "100-Continue" -}
do { -- TODO review throwing away of result
_ <- writeBlock conn (show rqst ++ rqBody rqst)
; rsp <- getResponseHead conn
; switchResponse conn False bdy_sent rsp rqst
}
Done -> do
when (findConnClose hdrs)
(closeOnEnd conn True)
return (Right $ Response cd rn hdrs "")
DieHorribly str -> do
close conn
return $ responseParseError "sendHTTP" ("Invalid response: " ++ str)
ExpectEntity ->
let tc = lookupHeader HdrTransferEncoding hdrs
cl = lookupHeader HdrContentLength hdrs
in
do { rslt <- case tc of
Nothing ->
case cl of
Just x -> linearTransfer (readBlock conn) (read x :: Int)
Nothing -> hopefulTransfer stringBufferOp {-null (++) []-} (readLine conn) []
Just x ->
case map toLower (trim x) of
"chunked" -> chunkedTransfer stringBufferOp
(readLine conn) (readBlock conn)
_ -> uglyDeathTransfer "sendHTTP"
; case rslt of
Left e -> close conn >> return (Left e)
Right (ftrs,bdy) -> do
when (findConnClose (hdrs++ftrs))
(closeOnEnd conn True)
return (Right (Response cd rn (hdrs++ftrs) bdy))
}
-- | Receive and parse a HTTP request from the given Stream. Should be used
-- for server side interactions.
receiveHTTP :: Stream s => s -> IO (Result Request_String)
receiveHTTP conn = getRequestHead >>= processRequest
where
-- reads and parses headers
getRequestHead :: IO (Result RequestData)
getRequestHead =
do { lor <- readTillEmpty1 stringBufferOp (readLine conn)
; return $ lor >>= parseRequestHead
}
processRequest (Left e) = return $ Left e
processRequest (Right (rm,uri,hdrs)) =
do -- FIXME : Also handle 100-continue.
let tc = lookupHeader HdrTransferEncoding hdrs
cl = lookupHeader HdrContentLength hdrs
rslt <- case tc of
Nothing ->
case cl of
Just x -> linearTransfer (readBlock conn) (read x :: Int)
Nothing -> return (Right ([], "")) -- hopefulTransfer ""
Just x ->
case map toLower (trim x) of
"chunked" -> chunkedTransfer stringBufferOp
(readLine conn) (readBlock conn)
_ -> uglyDeathTransfer "receiveHTTP"
return $ do
(ftrs,bdy) <- rslt
return (Request uri rm (hdrs++ftrs) bdy)
-- | Very simple function, send a HTTP response over the given stream. This
-- could be improved on to use different transfer types.
respondHTTP :: Stream s => s -> Response_String -> IO ()
respondHTTP conn rsp = do -- TODO review throwing away of result
_ <- writeBlock conn (show rsp)
-- write body immediately, don't wait for 100 CONTINUE
-- TODO review throwing away of result
_ <- writeBlock conn (rspBody rsp)
return ()
|
beni55/HTTP
|
Network/HTTP/Stream.hs
|
bsd-3-clause
| 10,623 | 22 | 29 | 3,480 | 1,797 | 921 | 876 | 134 | 10 |
{-# LANGUAGE RebindableSyntax #-}
-- NOTE: Required for 'zipWith3M'
{-# LANGUAGE ScopedTypeVariables #-}
{-
******************************************************************************
* H M T C *
* *
* Module: TypeChcker *
* Purpose: MiniTriangle Type Checker *
* Authors: Henrik Nilsson *
* *
* Copyright (c) Henrik Nilsson, 2006 - 2015 *
* *
******************************************************************************
-}
-- | MiniTriangle Type Checker.
-- Substantially re-written autumn 2013
module TypeChecker (
typeCheck, -- :: A.AST -> D MTIR
testTypeChecker -- :: String -> [(Name,Type)] -> IO ()
) where
import Prelude hiding ( Monad(..) )
import Control.Monad.Parameterized
-- Standard library imports
import Data.List ((\\), nub, sort, intersperse)
import Data.Maybe (fromJust)
import Control.Monad (mapAndUnzipM, unless)
import Control.Monad.Fix (mfix)
import Debug.Trace (trace, traceShow)
-- HMTC module imports
import SrcPos
import Diagnostics
import Name
import ScopeLevel
import Type
import Symbol
import Env
import MTStdEnv
import qualified AST as A
import MTIR
import PPMTIR
import Parser (parse)
-- NOTE: Required because of RebindableSyntax ==> NoImplicitPrelude
ifThenElse :: Bool -> a -> a -> a
ifThenElse True t _ = t
ifThenElse False _ f = f
-- | Type checks a complete MiniTriangle program in the standard environment
-- and reports any errors. Hence a computation in the diagnostics monad 'D'.
-- Additionally, translates the program into the type-annotated, intermediate
-- representation MTIR, including eliminating implicit dereferencing at the
-- source level by inserting explicit dereferencing operations.
-- See the Coursework Description Part II for an explanation of the concepts
-- and principles behind this type checker, along with the typing rules that
-- define the MiniTriangle type system which this type checker implements.
-- In particular, comments like "T-IF", "env(x) = s" and "env |- e : t"
-- refers to those typing rules, with the following naming conventions:
--
-- Type set typing rule ASCII Comments
-- -------------------- --------------
-- Capital gamma "env"
-- turnstile "|-"
-- vector notation (e.g. "e" overlined) plural "s" suffix, e.g. "es"
typeCheck :: A.AST -> D MTIR
typeCheck (A.AST {A.astCmd = c}) = do
c' <- chkCmd mtStdEnv c
return (MTIR {mtirCmd = c'})
------------------------------------------------------------------------------
-- Implementation of main typing rules
------------------------------------------------------------------------------
-- Check that command is well-formed in the given environment:
--
-- env |- c
chkCmd :: Env -> A.Command -> D Command
-- T-ASSIGN
chkCmd env (A.CmdAssign {A.caVar = x, A.caVal = e, A.cmdSrcPos = sp}) = do
(s, x') <- infTpExp env x -- env |- x : s
(t, x'')<- sinks_nonreftype s x' -- sinks(s,t), not reftype(t)
e' <- chkTpExp env e t -- env |- e : t
return (CmdAssign {caVar = x'', caVal = e', cmdSrcPos = sp})
-- T-CALL
chkCmd env (A.CmdCall {A.ccProc = p, A.ccArgs = es, A.cmdSrcPos = sp}) = do
(ss, es') <- mapAndUnzipM (infTpExp env) es
(ts, t, p') <- infArrTpExp env p ss -- env |- p : ts->Void
require (t == Void) sp (notProcMsg t)
es'' <- zipWith3M sources ss ts es' -- env |- es : ts
return (CmdCall {ccProc = p', ccArgs = es'', cmdSrcPos = sp})
where
notProcMsg t = "Not a procedure; return type is " ++ show t
-- T-SEQ (generalized to sequence of any length)
chkCmd env (A.CmdSeq {A.csCmds = cs, A.cmdSrcPos = sp}) = do
cs' <- mapM (chkCmd env) cs -- NOTE: Type annotation not necessary, because the classic mapM is actually used
return (CmdSeq {csCmds = cs', cmdSrcPos = sp})
-- T-IF
chkCmd env (A.CmdIf {A.ciCondThens = ecs, A.ciMbElse = mbce,
A.cmdSrcPos=sp}) = do
ecs' <- mapM (tcCondThen env) ecs -- NOTE: Type annotation not necessary, because the classic mapM is actually used
-- env |- cs
mbce' <- case mbce of
Nothing -> returnM Nothing -- NOTE: Needs manual selection of polymorphic return to match second case
Just ce -> do
ce' <- chkCmd env ce -- env |- ce
return (Just ce')
return (CmdIf {ciCondThens = ecs', ciMbElse = mbce', cmdSrcPos = sp})
where
tcCondThen :: Env -> (A.Expression, A.Command)
-> D (Expression, Command)
tcCondThen env (e, c) = do
e' <- chkTpExp env e Boolean
c' <- chkCmd env c
return (e', c')
-- T-WHILE
chkCmd env (A.CmdWhile {A.cwCond = e, A.cwBody = c, A.cmdSrcPos = sp}) = do
e' <- chkTpExp env e Boolean -- env |- e : Boolean
c' <- chkCmd env c -- env |- c
return (CmdWhile {cwCond = e', cwBody = c', cmdSrcPos = sp})
-- T-REPEAT
chkCmd env (A.CmdRepeat {A.crBody = c, A.crCond = e, A.cmdSrcPos = sp}) = do
c' <- chkCmd env c -- env |- c
e' <- chkTpExp env e Boolean -- env |- e : Boolean
return (CmdRepeat {crBody = c', crCond = e', cmdSrcPos = sp})
-- T-LET
chkCmd env (A.CmdLet {A.clDecls = ds, A.clBody = c, A.cmdSrcPos = sp}) = do
(ds', env') <- mfix $ \ ~(_, env') -> -- env;env'|- ds | env'
chkDeclarations (openMinScope env) env' ds
c' <- chkCmd env' c -- env' |- c
return (CmdLet {clDecls = ds', clBody = c', cmdSrcPos = sp})
-- Check that declarations/definitions are well-typed in given environment
-- and environmant for function/procedure bodies and compute extended
-- environment:
--
-- env; envB |- ds | env'
--
-- [For future reference: If user defined types were allowed, envB should
-- perhaps be used in place of env for elaboarting types and computing
-- function/procedure types if it is desired to allow (mutually) recursive
-- type definitions.]
chkDeclarations :: Env -> Env -> [A.Declaration] -> D ([Declaration], Env)
-- T-DECLEMPTY
chkDeclarations env envB [] = returnM ([], env) -- NOTE: Need manual selection of polymorphic return.
-- T-DECLCONST
chkDeclarations env envB
(A.DeclConst {A.dcConst = x, A.dcVal = e, A.dcType = t,
A.declSrcPos=sp}
: ds) = do
t' <- chkDclType env t
e' <- chkTpExp env e t' -- env |- e : t
case enterIntTermSym x (Src t') sp env of -- env' = env, x : Src t
Left old -> do
emitErrD sp (redeclaredMsg old) :: D () -- NOTE: Type annotation to select correct Diagnostics instance
chkDeclarations env envB ds
Right (env', x') -> do
wellinit (itmsLvl x') e'
(ds', env'') <- chkDeclarations env' -- env'; envB |- ds | env''
envB
ds
return (DeclConst {dcConst = x', dcVal = e'} : ds', env'')
-- T-DECLVAR
chkDeclarations env envB
(A.DeclVar {A.dvVar = x, A.dvType = t, A.dvMbVal = Nothing,
A.declSrcPos=sp}
: ds) = do
t' <- chkDclType env t
case enterIntTermSym x (Ref t') sp env of -- env' = env, x : Ref t
Left old -> do
emitErrD sp (redeclaredMsg old) :: D () -- NOTE: Type annotation to select correct Diagnostics instance
chkDeclarations env envB ds
Right (env', x') -> do
(ds', env'') <- chkDeclarations env' -- env'; envB |- ds | env''
envB
ds
return (DeclVar {dvVar = x', dvMbVal = Nothing} : ds', env'')
-- T-DECLINITVAR
chkDeclarations env envB
(A.DeclVar {A.dvVar = x, A.dvType = t, A.dvMbVal = Just e,
A.declSrcPos=sp}
: ds) = do
t' <- chkDclType env t
e' <- chkTpExp env e t' -- env |- e : t
case enterIntTermSym x (Ref t') sp env of -- env' = env, x : Ref t
Left old -> do
emitErrD sp (redeclaredMsg old) :: D () -- NOTE: Type annotation to select correct Diagnostics instance
chkDeclarations env envB ds
Right (env', x') -> do
wellinit (itmsLvl x') e'
(ds', env'') <- chkDeclarations env' -- env'; envB |- ds | env''
envB
ds
return (DeclVar {dvVar = x', dvMbVal = Just e'} : ds', env'')
-- T-DECLFUN
chkDeclarations env envB
(A.DeclFun {A.dfFun = f, A.dfOvrld = o,
A.dfArgDecls = as, A.dfType = t, A.dfBody = e,
A.declSrcPos = sp}
: ds) = do
~(as', envB') <- chkArgDecls (openMajScope envB) as -- envB |- as | envB'
tf <- funType env as t
e' <- chkTpExp envB' e (retType tf) -- envB' |- e : t
case enterSym f tf sp env of -- env' = env, f: tf
Left old -> do
emitErrD sp (redeclaredMsg old) :: D () -- NOTE: Type annotation to select correct Diagnostics instance
chkDeclarations env envB ds
Right (env', f') -> do
(ds', env'') <- chkDeclarations env' -- env'; envB |- ds | env''
envB
ds
return (DeclFun {dfFun = f', dfArgs = as', dfBody = e'} : ds',
env'')
where
enterSym = if o then enterOvrldIntTermSym else enterIntTermSym
-- T-DECLPROC
chkDeclarations env envB
(A.DeclProc {A.dpProc = p, A.dpOvrld = o,
A.dpArgDecls = as, A.dpBody = c,
A.declSrcPos = sp}
: ds) = do
~(as', envB') <- chkArgDecls (openMajScope envB) as -- envB |- as | envB'
c' <- chkCmd envB' c -- envB' |- c
tp <- procType env as
case enterSym p tp sp env of -- env' = env, f: tf
Left old -> do
emitErrD sp (redeclaredMsg old) :: D () -- NOTE: Type annotation to select correct Diagnostics instance
chkDeclarations env envB ds
Right (env', p') -> do
(ds', env'') <- chkDeclarations env' -- env'; envB |- ds | env''
envB
ds
return (DeclProc {dpProc = p', dpArgs = as', dpBody = c'} : ds',
env'')
where
enterSym = if o then enterOvrldIntTermSym else enterIntTermSym
-- Check that function/procedure argument declarations are well-typed in
-- given environment and compute extended environment:
--
-- env |- as | env'
chkArgDecls :: Env -> [A.ArgDecl] -> D ([IntTermSym], Env)
-- T-DECLARGEMPTY
chkArgDecls env [] = returnM ([], env) -- NOTE: Need manual selection of polymorphic return.
-- T-DECLARG, T-DECLINARG, T-DECLOUTARG, T-DECLVARARG
chkArgDecls env
(A.ArgDecl {A.adArg = x, A.adArgMode = am, A.adType = td,
A.adSrcPos=sp}
: as) = do
t <- chkDclType env td
case enterIntTermSym x (Src (amType am t)) sp env of -- env' = env, x: ...
Left old -> do
emitErrD sp (redeclaredMsg old) :: D () -- NOTE: Type annotation to select correct Diagnostics instance
chkArgDecls env as
Right (env', x') -> do
(as', env'') <- chkArgDecls env' as -- env' |- as | env''
return (x' : as', env'')
redeclaredMsg :: IntTermSym -> String
redeclaredMsg itms =
"Identifier \""
++ itmsName itms
++ "\" redeclared; already declared at "
++ show (itmsSrcPos itms)
procType :: Env -> [A.ArgDecl] -> D Type
procType env as = do
ts <- chkArgTypes env as
return (Arr ts Void)
funType :: Env -> [A.ArgDecl] -> A.TypeDenoter -> D Type
funType env as td = do
ts <- chkArgTypes env as
t <- chkDclType env td
return (Arr ts t)
chkArgTypes :: Env -> [A.ArgDecl] -> D [Type]
chkArgTypes env [] = returnM [] -- NOTE: Need manual selection of polymorphic return.
chkArgTypes env (A.ArgDecl {A.adArgMode = am, A.adType = td} : as) = do
t <- chkDclType env td
ts <- chkArgTypes env as
return (amType am t : ts)
-- Checks that a given type is defined and translate into internal type
-- representation.
chkDclType :: Env -> A.TypeDenoter -> D Type
chkDclType env (A.TDBaseType {A.tdbtName = t, A.tdSrcPos = sp}) =
case lookupTypeSym t env of
Nothing -> do
emitErrD sp ("Undefined type \"" ++ t ++ "\"") :: D () -- NOTE: Type annotation to select correct Diagnostics instance
return SomeType
Just tps ->
returnM (tpsType tps) -- NOTE: Need manual selection of polymorphic return to match first case
chkDclType env (A.TDArray {A.tdaEltType = t, A.tdaSize = s,
A.tdSrcPos = sp}) = do
t' <- chkDclType env t
s' <- toMTInt s sp
return (Ary t' s')
chkDclType env (A.TDRecord {A.tdrFldTypes = fts}) = do
-- Note: Ensures record fields are sorted (invariant of Rcd)
let (xs,ts) = unzip fts
ts' <- mapM (chkDclType env) ts -- NOTE: Type annotation not necessary, because the classic mapM is actually used
return (Rcd (sortRcdFlds (zip xs ts')))
-- Type representation corresponding to given argument mode
amType :: A.ArgMode -> (Type -> Type)
amType A.ByValue = id -- Call-by-value
amType A.ByRefIn = Src -- Call-by-ref input
amType A.ByRefOut = Snk -- Call-by-ref output
amType A.ByRefVar = Ref -- Call-by-ref variable
-- Check that expression has type t in given environment:
--
-- env |- e : t
--
-- This is an algorithmic version of the typing relation for expressions
-- to be used when the desired type is known. Knowing the target type
-- makes it easy to use the rule T-SOURCES.
chkTpExp :: Env -> A.Expression -> Type -> D Expression
-- T-SOURCES
chkTpExp env e t = do
(s, e') <- infTpExp env e -- env |- e : s, sources(s,t)
sources s t e'
-- Check that expression is well-typed in the given environment and
-- infer its type assuming no (top-level) implicit dereferencing:
--
-- env |- e : t
--
-- This is an algorithmic version of the typing relation for expressions
-- to be used when the desired type is not known and leaving the option
-- for further dereferencing open.
infTpExp :: Env -> A.Expression -> D (Type, Expression)
-- T-LITCHR
infTpExp env e@(A.ExpLitChr {A.elcVal = c, A.expSrcPos = sp}) = do
c' <- toMTChr c sp
return (Character, -- env |- c : Character
ExpLitChr {elcVal = c', expType = Character, expSrcPos = sp})
-- T-LITINT
infTpExp env e@(A.ExpLitInt {A.eliVal = n, A.expSrcPos = sp}) = do
n' <- toMTInt n sp
return (Integer, -- env |- n : Integer
ExpLitInt {eliVal = n', expType = Integer, expSrcPos = sp})
-- T-VAR
infTpExp env (A.ExpVar {A.evVar = x, A.expSrcPos = sp}) = do
tms <- case lookupTermSym x env of -- env(x) = t, sources(t,t)
Nothing -> do
emitErrD sp ("Variable \"" ++ x ++ "\" undefined") :: D () -- NOTE: Type annotation to select correct Diagnostics instance
return (dummyTmS x)
Just tms -> returnM tms :: D TermSym -- NOTE: Needs manual selection of polymorphic return to match first case
return (tmsType tms, tmsToExp tms sp)
-- T-APP
infTpExp env (A.ExpApp {A.eaFun = f, A.eaArgs = es, A.expSrcPos = sp}) = do
(ss, es') <- mapAndUnzipM (infTpExp env) es
(ts, t, f') <- infArrTpExp env f ss -- env |- f : ts -> t
es'' <- zipWith3M sources ss ts es' -- env |- es : ts
return (t, ExpApp {eaFun = f', eaArgs = es'', expType = t, expSrcPos = sp})
infTpExp env (A.ExpCond {A.ecCond = e1, A.ecTrue = e2,
A.ecFalse = e3, A.expSrcPos = sp}) = do
e1' <- chkTpExp env e1 Boolean -- Env |- e1 : Boolean
(t, e2') <- infNonRefTpExp env e2 -- Env |- e2 : t
(t', e3') <- infNonRefTpExp env e3 -- Env |- e3 : t'
require (t == t') sp ("Conditional branches have to have the same \
\types; "
++ "got " ++ show t ++ " and " ++ show t')
return (t,
ExpCond {ecCond = e1', ecTrue = e2', ecFalse = e3',
expType = t, expSrcPos = sp})
-- T-ARY, empty case handled specially
infTpExp env (A.ExpAry {A.eaElts = [], A.expSrcPos = sp}) = do
let t = Ary SomeType 0
returnM (t, ExpAry {eaElts = [], expType = t, expSrcPos = sp}) -- NOTE: Needs manual selection of polymorphic return.
infTpExp env (A.ExpAry {A.eaElts = ees@(e:es), A.expSrcPos = sp}) = do
(t, e') <- infNonRefTpExp env e -- env |- e : t, not reftype(t)
es' <- mapM (\e -> chkTpExp env e t) es -- NOTE: Type annotation not necessary, because the classic mapM is actually used
let ta = Ary t (fromIntegral (length ees))
return (ta, ExpAry {eaElts = e':es', expType = ta, expSrcPos = sp})
-- T-IX
infTpExp env (A.ExpIx {A.eiAry = a, A.eiIx = i, A.expSrcPos = sp}) = do
(rat, a') <- infRefAryTpExp env a -- env |- a : R(T[n])
i' <- chkTpExp env i Integer -- end |- i : Integer
let rt = mapRfcdType eltType rat
return (rt, ExpIx {eiAry = a', eiIx = i', expType = rt, expSrcPos = sp})
-- T-RCD
infTpExp env (A.ExpRcd {A.erFldDefs = fds, A.expSrcPos = sp}) = do
-- Note: Ensures record fields are sorted (invariant of ExpRcd and Rcd)
let (xs, es) = unzip (sortRcdFlds fds)
tes' <- mapM (infNonRefTpExp env) es -- NOTE: Type annotation not necessary, because the classic mapM is actually used
require (allDistinct xs) sp (repeatedMsg xs)
let (ts, es') = unzip tes'
let fds' = zip xs es'
let tr = Rcd (zip xs ts)
return (tr, ExpRcd {erFldDefs = fds', expType = tr, expSrcPos = sp})
where
allDistinct xs = xs == nub xs
repeatedMsg xs = "Repeated record field name(s): \""
++ concat (intersperse "\", \"" (nub (xs \\ nub xs)))
++ "\""
-- T-PRJ
infTpExp env (A.ExpPrj {A.epRcd = e, A.epFld = f, A.expSrcPos = sp}) = do
(rrt, e') <- infRefRcdTpExp env e -- env |- e : R({xs:ts})
require (fldExists f (rfcdType rrt)) sp (notAFieldMsg f (rfcdType rrt))
let rt = mapRfcdType (fldType f) rrt
return (rt, ExpPrj {epRcd = e', epFld = f, expType = rt, expSrcPos = sp})
where
notAFieldMsg f rt = "The type \"" ++ show rt
++ "\" does not contain any field \"" ++ f ++ "\""
-- Check that expression is well-typed in the given environment and
-- infer its type assuming it should be an non-reference type:
--
-- env |- e : t, not reftype(t)
--
-- This is an algorithmic version of the typing relation for expressions
-- to be used when the desired type is known to be a non-reference type.
infNonRefTpExp :: Env -> A.Expression -> D (Type, Expression)
infNonRefTpExp env e = do
(t, e') <- infTpExp env e
sources_pred (not . refType) "non-reference type" t e'
-- Check that expression is well-typed in the given environment and
-- infer its type assuming it should be an arrow type and given the
-- inferred but not dereferenced types of the actual arguments:
--
-- env |- e : ts -> t
--
-- The caller is responsible for checking that the type of each actual
-- argument can source the type of the corresponding formal argument and
-- dereference the argument expression as necessary (use "sources").
--
-- This is an algorithmic version of the typing relation for expressions
-- to be used when the desired type is known to be an arrow type.
--
-- The number of actual arguments is used to check the arity of expression
-- type, reporting any mismatch and guaranteeing that the arity of the
-- returned type agrees.
--
-- The number and types of the actual arguments are used for resolving
-- overloading in the case of *manifest* calls (the name of the procedure
-- or function given directly as opposed to a procedure or function being
-- computed).
infArrTpExp :: Env -> A.Expression -> [Type] -> D ([Type], Type, Expression)
infArrTpExp env e@(A.ExpVar {A.evVar = x}) ss
| null tmss = do
emitErrD sp ("No procedure or function called \"" ++ x ++ "\"")
return (ensureArity a [], SomeType, tmsToExp (dummyTmS x) sp)
| arrType (tmsType (head tmss)) =
-- Manifest procedure or function call. Overloading resolution only carried
-- out for manifest calls. A call is considered manifest if at least the
-- first of any overloadings of the applied symbol is a manifest procedure
-- or function (arrow type).
case tmssCA of
[] -> do
emitErrD sp ("No instance of \"" ++ x
++ "\" has expected arity " ++ show a)
return (ensureArity a [], SomeType,
tmsToExp (dummyTmS x) sp)
[tms] ->
case tmsType tms of
Arr ts t -> returnM (ts, t, tmsToExp tms sp) -- NOTE: Needs manual selection of polymorphic return.
_ -> tcErr "infArrTpExp" "Expected arrow type"
_ ->
case tmssCS of
[] -> do
emitErrD sp ("No instance of \"" ++ x
++ "\" has a signature that agrees with \
\the types of the actual arguments: "
++ concat(intersperse ", " (map show ss)))
return (ensureArity a [], SomeType,
tmsToExp (dummyTmS x) sp)
[tms] ->
case tmsType tms of
Arr ts t -> returnM (ts, t, tmsToExp tms sp) -- NOTE: Needs manual selection of polymorphic return.
_ -> tcErr "infArrTpExp"
"Expected arrow type"
(tms : _) -> do
emitWngD sp ("Ambiguous overloading of \"" ++ x
++ "\": more than one instance have \
\a signature that agrees with the \
\types of the actual arguments")
case tmsType tms of
Arr ts t -> return (ts, t, tmsToExp tms sp)
_ -> tcErr "infArrTpExp"
"Expected arrow type"
where
sp = srcPos e
a = length ss -- Expected arity
tmss = lookupOvrldTermSym x env
tmssCA = [ tms -- Tm syms with Correct Arity
| tms <- tmss,
let t = tmsType tms,
arrType t && arity t == a
]
tmssCS = [ tms -- Tm syms with Compatible Sig.
| tms <- tmssCA,
and (zipWith sourcesp ss (argTypes (tmsType tms)))
]
infArrTpExp env e ss = do
-- This is the case of a computation yielding (a reference to) a
-- procedure or function. No overloading resolution in this case.
(s, e') <- infNonRefTpExp env e
case s of
Arr ts t -> do
require (length ts == a) sp
("Bad arity: expected " ++ show (length ts)
++ " arguments, got " ++ show a)
return (ensureArity a ts, t, e')
SomeType -> do
returnM (ensureArity a [], SomeType, e') -- NOTE: Needs manual selection of polymorphic return to match first case
_ -> do
emitErrD sp "Not a function or procedure"
return (ensureArity a [], SomeType, e')
where
sp = srcPos e
a = length ss -- Expected arity
ensureArity :: Int -> [Type] -> [Type]
ensureArity a ts = take a (ts ++ repeat SomeType)
-- Check that expression is well-typed in the given environment and
-- infer its type assuming it should be a reference to an array:
--
-- env |- e : R (t[n]), R in {Src, Snk, Ref}
--
-- This is an algorithmic version of the typing relation for expressions
-- to be used when the desired type is known to be a reference to an array.
infRefAryTpExp :: Env -> A.Expression -> D (Type, Expression)
infRefAryTpExp env e = do
(t, e') <- infTpExp env e
sources_pred refAry "reference to array" t e'
where
refAry (Src (Ary _ _)) = True
refAry (Snk (Ary _ _)) = True
refAry (Ref (Ary _ _)) = True
refAry _ = False
-- Check that expression is well-typed in the given environment and
-- infer its type assuming it should be a reference to a record:
--
-- env |- e : R ({a : t, ...}), R in {Src, Snk, Ref}
--
-- This is an algorithmic version of the typing relation for expressions
-- to be used when the desired type is known to be a reference to a record.
infRefRcdTpExp :: Env -> A.Expression -> D (Type, Expression)
infRefRcdTpExp env e = do
(t, e') <- infTpExp env e
sources_pred refRcd "reference to record" t e'
where
refRcd (Src (Rcd _)) = True
refRcd (Snk (Rcd _)) = True
refRcd (Ref (Rcd _)) = True
refRcd _ = False
-- Convert Integer to MTInt (MiniTriangle integer), ensuring it is
-- representable as such.
toMTInt :: Integer -> SrcPos -> D MTInt
toMTInt n sp =
if isMTInt n then
returnM (fromInteger n) -- NOTE: Needs manual selection of polymorphic return.
else do
emitErrD sp ("Integer literal " ++ show n ++ " outside the range of "
++ "representable MiniTriangle integers")
return 0
-- Convert Char to MTChr (MiniTriangle character), ensuring it is
-- representable as such.
toMTChr :: Char -> SrcPos -> D MTChr
toMTChr c sp =
if isMTChr c then
returnM c -- MTChr is currently just a type synonym
-- NOTE: Needs manual selection of polymorphic return.
else do
emitErrD sp ("Character literal " ++ show c ++ " outside the range of "
++ "representable MiniTriangle characters")
return '?'
-- Converts an (internal or external) term symbol into an MTIR expression:
-- a variable for internal symbols, or constant/external reference for
-- an external symbol.
tmsToExp :: TermSym -> SrcPos -> Expression
tmsToExp (Left (ExtTermSym {etmsVal = v, etmsType = t})) sp =
case v of
ESVBool b ->
ExpLitBool {elbVal = b, expType = t, expSrcPos = sp}
ESVInt n ->
ExpLitInt {eliVal = n, expType = t, expSrcPos = sp}
ESVChar c ->
ExpLitChr {elcVal = c, expType = t, expSrcPos = sp}
ESVLbl l ->
ExpExtRef {eerVal = l, expType = t, expSrcPos = sp}
tmsToExp (Right itms@(IntTermSym {itmsType = t})) sp =
ExpVar {
evVar = itms,
expType = t,
expSrcPos = sp
}
------------------------------------------------------------------------------
-- Implementation of auxiliary predicates
------------------------------------------------------------------------------
-- Check if the value of an expression of the given type can source a value
-- of the other given type. This is a version of the predicate "sources"
-- from the type system specification turned into a function assuming both
-- types are known.
-- Additionally "coerces" the type of the expression by embedding it in
-- the appropriate number of dereferencing operations.
sources :: Type -> Type -> Expression -> D Expression
sources s t e | s <: t = returnM e -- NOTE: Needs manual selection of polymorphic return.
sources (Ref s) t e = sources s t (deref e)
sources (Src s) t e = sources s t (deref e)
sources s t e = do
emitErrD (srcPos e)
("Expected type \"" ++ show t ++ "\", got \"" ++ show s ++ "\"")
return e
-- Predicate version of the above.
sourcesp :: Type -> Type -> Bool
sourcesp s t | s <: t = True
sourcesp (Ref s) t = sourcesp s t
sourcesp (Src s) t = sourcesp s t
sourcesp _ _ = False
{-
-- Alternative definition without explicit use of subtyping for reference.
-- Somewhat less flexible and a bit more verbose, but adequate for
-- MiniTriangle as it stands, and avoiding subtyping is a simplification.
sources :: Type -> Type -> Expression -> D Expression
sources s t e | s == t = return e
sources (Ref s) (Snk t) e | s == t = return e
sources (Ref s) (Src t) e | s == t = return e
sources (Ref s) t e = sources s t (deref e)
sources (Src s) t e = sources s t (deref e)
sources s t e = do
emitErrD (srcPos e)
("Expected type \"" ++ show t ++ "\", got \"" ++ show s ++ "\"")
return e
-}
-- Check if the value of an expression of the given type can source a type
-- satisfying an additinal predicate p. That is, an implementation of the
-- combination:
--
-- sources(s,t), p(t)
--
-- assuming type "s" is given and "t" is to be computed.
-- Additionally "coerces" the type of the expression by embedding it in
-- the appropriate number of dereferencing operations.
sources_pred :: (Type -> Bool) -> String -> Type -> Expression
-> D (Type, Expression)
sources_pred p et t e | p t = returnM (t, e) -- NOTE: Needs manual selection of polymorphic return.
sources_pred p et (Ref t) e = sources_pred p et t (deref e)
sources_pred p et (Src t) e = sources_pred p et t (deref e)
sources_pred p et t e = do
emitErrD (srcPos e)
("Expected " ++ et ++ ", got \"" ++ show t ++ "\"")
return (SomeType, e)
-- Check if the value of an expression of the given type can sink a non-
-- reference type. That is, an implementation of the combination:
--
-- sinks(s,t), not reftype(t)
--
-- assuming type "s" is given and "t" is to be computed.
-- Additionally "coerces" the type of the expression by embedding it in
-- the appropriate number of dereferencing operations.
sinks_nonreftype :: Type -> Expression -> D (Type, Expression)
sinks_nonreftype (Snk t) e
| not (refType t) = returnM (t, e) -- NOTE: Needs manual selection of polymorphic return.
| otherwise = do
emitErrD (srcPos e)
"Cannot assign value of non-reference type to this variable"
return (SomeType, e)
sinks_nonreftype (Src t) e =
sinks_nonreftype t (deref e)
sinks_nonreftype (Ref t) e
| not (refType t) = returnM (t, e) -- NOTE: Needs manual selection of polymorphic return.
| otherwise = sinks_nonreftype t (deref e)
sinks_nonreftype SomeType e =
returnM (SomeType, e) -- NOTE: Needs manual selection of polymorphic return.
sinks_nonreftype _ e = do
emitErrD (srcPos e) "Does not denote an assignable variable"
return (SomeType, e)
-- Embeds an expression in a dereferencing operation.
deref :: Expression -> Expression
deref e =
ExpDeref {
edArg = e,
expType = rfcdType (expType e),
expSrcPos = srcPos e
}
-- Check that an initialiser (expression defining a constant or initialising
-- a variable) is well-initialised; i.e. does not call a function defined
-- at the present scope level as this means it could refer to constants
-- or variables that have not yet been initialised or even allocated.
-- This is defined on MTIR rather than AST, simplifying the definition
-- as the scope level of a variable is directly available, meaning that
-- there is no need to look up variables in the environment.
--
-- [For future reference:
-- Note that this restriction could be relaxed. For example, one could
-- for each function compute the set of constants/variables from the
-- same block to which it refers directly or indirectly, and then
-- flag a problem only if a function referring to constants/variables
-- currently not in scope are called. But, as functions may be mutually
-- recursive, this requires computing strongly connected components or
-- an iterative fixed-point computation.
--
-- Or one could implement initialization in dependency order, combined
-- with a static check that there are no cycles. Or one could reorder
-- declaration lists, moving procedures and functions to the end,
-- either after type checking and then possibly combined with adapted
-- code generation strategies, or before type checking, combined with
-- changed scope rules (bringing all functions and procedures into scope
-- at the start of a block) and possibly with adapted code generation
-- strategies.]
wellinit :: ScopeLvl -> Expression -> D ()
wellinit _ (ExpLitBool {}) = returnM () -- NOTE: Needs manual selection of polymorphic return.
wellinit _ (ExpLitInt {}) = returnM () -- NOTE: Needs manual selection of polymorphic return.
wellinit _ (ExpLitChr {}) = returnM () -- NOTE: Needs manual selection of polymorphic return.
wellinit _ (ExpExtRef {}) = returnM () -- NOTE: Needs manual selection of polymorphic return.
wellinit _ (ExpVar {}) = returnM () -- NOTE: Needs manual selection of polymorphic return.
wellinit l (ExpDeref {edArg = e}) = wellinit l e
wellinit l (ExpApp {eaFun = f, eaArgs = es}) = do
case f of
ExpLitBool {} -> returnM () -- Type error, will have been caught
-- NOTE: Needs manual selection of polymorphic return to match fifth case
ExpLitInt {} -> returnM () -- Type error, will have been caught
-- NOTE: Needs manual selection of polymorphic return to match fifth case
ExpLitChr {} -> returnM () -- Type error, will have been caught
-- NOTE: Needs manual selection of polymorphic return to match fifth case
ExpExtRef {} -> returnM () -- Defined outside present scope
-- NOTE: Needs manual selection of polymorphic return to match fifth case
ExpVar {evVar = IntTermSym {itmsLvl = l', itmsName = n},
expSrcPos = sp}
| l' == l ->
emitErrD sp
("Function \""
++ n
++ "\" may not be called from initializers in the \
\same block as in which it is defined.") :: D () -- NOTE: Type annotation to select correct Diagnostics instance
| otherwise -> returnM () -- NOTE: Needs manual selection of polymorphic return to match fifth case
e ->
emitErrD (srcPos e)
"Only known functions may be called in initializers."
mapM_ (wellinit l) es -- NOTE: Type annotation not necessary, because the classic mapM is actually used
wellinit l (ExpCond {ecCond = e1, ecTrue = e2, ecFalse = e3}) =
wellinit l e1 >> wellinit l e2 >> wellinit l e3
wellinit l (ExpAry {eaElts = es}) = mapM_ (wellinit l) es
wellinit l (ExpIx {eiAry = a, eiIx = i}) = wellinit l a >> wellinit l i
wellinit l (ExpRcd {erFldDefs = fds}) = mapM_ (wellinit l . snd) fds
wellinit l (ExpPrj {epRcd = e}) = wellinit l e
------------------------------------------------------------------------------
-- Error reporting utilities
------------------------------------------------------------------------------
-- Report an error unless the condition is true.
require :: Bool -> SrcPos -> String -> D ()
require p sp m = unless p (emitErrD sp m)
------------------------------------------------------------------------------
-- Monadic utilities
------------------------------------------------------------------------------
-- Generalisation of zipWithM
-- NOTE: Problem with typing polymorphic functions, because backwards reasoning is missing.
-- NOTE: Overlapping instances issue arises here:
{-
Overlapping instances for Bind m Identity m
arising from a do statement
Matching instances:
instance [overlap ok] Functor a => Bind a Identity a
-- Defined in ‘Control.Monad.Parameterized’
instance [overlap ok] Bind Identity Identity Identity
-- Defined in ‘Control.Monad.Parameterized’
instance [overlap ok] Functor a => Bind Identity a a
-- Defined in ‘Control.Monad.Parameterized’
instance [overlap ok] Bind MZero Identity MZero
-- Defined in ‘Control.Monad.Parameterized’
instance [overlap ok] Functor a => Bind MZero a MZero
-- Defined in ‘Control.Monad.Parameterized’
(The choice depends on the instantiation of ‘m’
To pick the first instance above, use IncoherentInstances
when compiling the other instance declarations)
In a stmt of a 'do' block: ds <- zipWith3M f as bs cs
-}
-- NOTE: Problem can be solved by adding type signatures and making all polymorphic code monomorphic to 'm'.
-- NOTE: This requires 'ScopedTypeVariables'.
--zipWith3M :: (Bind m m m, Return m) => (a -> b -> c -> m d) -> [a] -> [b] -> [c] -> m [d]
zipWith3M :: forall m a b c d. (Bind m m m, Return m) => (a -> b -> c -> m d) -> [a] -> [b] -> [c] -> m [d]
zipWith3M f (a:as) (b:bs) (c:cs) = do
d <- f a b c
ds <- zipWith3M f as bs cs :: m [d]
returnM (d:ds) :: m [d]
zipWith3M _ _ _ _ = returnM [] -- NOTE: Needs manual selection of polymorphic return.
------------------------------------------------------------------------------
-- Test utilities
------------------------------------------------------------------------------
-- | Test utility. Attempts to parse and then type check the given string
-- input in the standard MT environment extended with any given bindings.
-- If successful, pretty-prints the resulting MTIR representation.
testTypeChecker :: String -> [(Name,Type)] -> IO ()
testTypeChecker s bs = do
putStrLn "Diagnostics:"
mapM_ (putStrLn . ppDMsg) (snd result) -- NOTE: Type annotation not necessary, because the classic mapM is actually used
putStrLn ""
case fst result of
Just mtir -> do
putStrLn "MTIR:"
putStrLn (ppMTIR mtir)
Nothing -> putStrLn "Parsing and typechecking produced no result."
putStrLn ""
where
result :: (Maybe MTIR, [DMsg])
result = runDF (parseCheck s (extend mtStdEnv bs))
extend env [] = env
extend env ((n,t):bs) =
case enterIntTermSym n t NoSrcPos env of
Left _ -> error "Extending MT Standard Environment failed!"
Right (env', _) -> extend env' bs
parseCheck s env = do
ast <- parse s
failIfErrorsD :: DF () -- NOTE: Type annotation to select correct Diagnostics instance
c <- dToDF (chkCmd env (A.astCmd ast))
return (MTIR {mtirCmd = c})
------------------------------------------------------------------------------
-- Internal error reporting
------------------------------------------------------------------------------
tcErr :: String -> String -> a
tcErr = internalError "TypeChecker"
|
jbracker/supermonad-plugin
|
examples/monad/hmtc/monad-param/TypeChecker.hs
|
bsd-3-clause
| 40,435 | 0 | 21 | 12,781 | 8,597 | 4,553 | 4,044 | 510 | 10 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Unpack
-- Copyright : (c) Andrea Vezzosi 2008
-- Duncan Coutts 2011
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
--
-----------------------------------------------------------------------------
module Distribution.Client.Unpack (
-- * Commands
unpack,
) where
import Distribution.Package
( PackageId, packageId )
import Distribution.Simple.Setup
( fromFlag, fromFlagOrDefault )
import Distribution.Simple.Utils
( notice, die )
import Distribution.Verbosity
( Verbosity )
import Distribution.Text(display)
import Distribution.Client.Setup
( GlobalFlags(..), UnpackFlags(..) )
import Distribution.Client.Types
import Distribution.Client.Targets
import Distribution.Client.Dependency
import Distribution.Client.FetchUtils
import qualified Distribution.Client.Tar as Tar (extractTarGzFile)
import Distribution.Client.IndexUtils as IndexUtils
( getSourcePackages )
import System.Directory
( createDirectoryIfMissing, doesDirectoryExist, doesFileExist )
import Control.Monad
( unless, when )
import Data.Monoid
( mempty )
import System.FilePath
( (</>), addTrailingPathSeparator )
unpack :: Verbosity
-> [Repo]
-> GlobalFlags
-> UnpackFlags
-> [UserTarget]
-> IO ()
unpack verbosity _ _ _ [] =
notice verbosity "No packages requested. Nothing to do."
unpack verbosity repos globalFlags unpackFlags userTargets = do
mapM_ checkTarget userTargets
sourcePkgDb <- getSourcePackages verbosity repos
pkgSpecifiers <- resolveUserTargets verbosity
(fromFlag $ globalWorldFile globalFlags)
(packageIndex sourcePkgDb)
userTargets
pkgs <- either (die . unlines . map show) return $
resolveWithoutDependencies
(resolverParams sourcePkgDb pkgSpecifiers)
unless (null prefix) $
createDirectoryIfMissing True prefix
flip mapM_ pkgs $ \pkg -> do
location <- fetchPackage verbosity (packageSource pkg)
let pkgid = packageId pkg
case location of
LocalTarballPackage tarballPath ->
unpackPackage verbosity prefix pkgid tarballPath
RemoteTarballPackage _tarballURL tarballPath ->
unpackPackage verbosity prefix pkgid tarballPath
RepoTarballPackage _repo _pkgid tarballPath ->
unpackPackage verbosity prefix pkgid tarballPath
LocalUnpackedPackage _ ->
error "Distribution.Client.Unpack.unpack: the impossible happened."
where
resolverParams sourcePkgDb pkgSpecifiers =
--TODO: add commandline constraint and preference args for unpack
standardInstallPolicy mempty sourcePkgDb pkgSpecifiers
prefix = fromFlagOrDefault "" (unpackDestDir unpackFlags)
checkTarget :: UserTarget -> IO ()
checkTarget target = case target of
UserTargetLocalDir dir -> die (notTarball dir)
UserTargetLocalCabalFile file -> die (notTarball file)
_ -> return ()
where
notTarball t =
"The 'unpack' command is for tarball packages. "
++ "The target '" ++ t ++ "' is not a tarball."
unpackPackage :: Verbosity -> FilePath -> PackageId -> FilePath -> IO ()
unpackPackage verbosity prefix pkgid pkgPath = do
let pkgdirname = display pkgid
pkgdir = prefix </> pkgdirname
pkgdir' = addTrailingPathSeparator pkgdir
existsDir <- doesDirectoryExist pkgdir
when existsDir $ die $
"The directory \"" ++ pkgdir' ++ "\" already exists, not unpacking."
existsFile <- doesFileExist pkgdir
when existsFile $ die $
"A file \"" ++ pkgdir ++ "\" is in the way, not unpacking."
notice verbosity $ "Unpacking to " ++ pkgdir'
Tar.extractTarGzFile prefix pkgdirname pkgPath
|
IreneKnapp/Faction
|
faction/Distribution/Client/Unpack.hs
|
bsd-3-clause
| 4,022 | 0 | 14 | 954 | 820 | 426 | 394 | 84 | 4 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "System/Directory.hs" #-}
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Directory
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : portable
--
-- System-independent interface to directory manipulation.
--
-----------------------------------------------------------------------------
module System.Directory
(
-- $intro
-- * Actions on directories
createDirectory
, createDirectoryIfMissing
, removeDirectory
, removeDirectoryRecursive
, removePathForcibly
, renameDirectory
, listDirectory
, getDirectoryContents
-- ** Current working directory
, getCurrentDirectory
, setCurrentDirectory
, withCurrentDirectory
-- * Pre-defined directories
, getHomeDirectory
, XdgDirectory(..)
, getXdgDirectory
, getAppUserDataDirectory
, getUserDocumentsDirectory
, getTemporaryDirectory
-- * Actions on files
, removeFile
, renameFile
, renamePath
, copyFile
, copyFileWithMetadata
, getFileSize
, canonicalizePath
, makeAbsolute
, makeRelativeToCurrentDirectory
-- * Existence tests
, doesPathExist
, doesFileExist
, doesDirectoryExist
, findExecutable
, findExecutables
, findExecutablesInDirectories
, findFile
, findFiles
, findFileWith
, findFilesWith
, exeExtension
-- * Symbolic links
, createFileLink
, createDirectoryLink
, removeDirectoryLink
, pathIsSymbolicLink
, getSymbolicLinkTarget
-- * Permissions
-- $permissions
, Permissions
, emptyPermissions
, readable
, writable
, executable
, searchable
, setOwnerReadable
, setOwnerWritable
, setOwnerExecutable
, setOwnerSearchable
, getPermissions
, setPermissions
, copyPermissions
-- * Timestamps
, getAccessTime
, getModificationTime
, setAccessTime
, setModificationTime
-- * Deprecated
, isSymbolicLink
) where
import Prelude ()
import System.Directory.Internal
import System.Directory.Internal.Prelude
import System.FilePath
import Data.Time (UTCTime)
import Data.Time.Clock.POSIX (POSIXTime, utcTimeToPOSIXSeconds)
import qualified System.Directory.Internal.Config as Cfg
import qualified GHC.Foreign as GHC
import qualified System.Posix as Posix
{- $intro
A directory contains a series of entries, each of which is a named
reference to a file system object (file, directory etc.). Some
entries may be hidden, inaccessible, or have some administrative
function (e.g. @.@ or @..@ under
<http://www.opengroup.org/onlinepubs/009695399 POSIX>), but in
this standard all such entries are considered to form part of the
directory contents. Entries in sub-directories are not, however,
considered to form part of the directory contents.
Each file system object is referenced by a /path/. There is
normally at least one absolute path to each file system object. In
some operating systems, it may also be possible to have paths which
are relative to the current directory.
-}
-- | A generator with side-effects.
newtype ListT m a = ListT (m (Maybe (a, ListT m a)))
listTHead :: Functor m => ListT m a -> m (Maybe a)
listTHead (ListT m) = (fst <$>) <$> m
listTToList :: Monad m => ListT m a -> m [a]
listTToList (ListT m) = do
mx <- m
case mx of
Nothing -> return []
Just (x, m') -> do
xs <- listTToList m'
return (x : xs)
andM :: Monad m => m Bool -> m Bool -> m Bool
andM mx my = do
x <- mx
if x
then my
else return x
-----------------------------------------------------------------------------
-- Permissions
{- $permissions
The 'Permissions' type is used to record whether certain operations are
permissible on a file\/directory. 'getPermissions' and 'setPermissions'
get and set these permissions, respectively. Permissions apply both to
files and directories. For directories, the executable field will be
'False', and for files the searchable field will be 'False'. Note that
directories may be searchable without being readable, if permission has
been given to use them as part of a path, but not to examine the
directory contents.
Note that to change some, but not all permissions, a construct on the following lines must be used.
> makeReadable f = do
> p <- getPermissions f
> setPermissions f (p {readable = True})
-}
emptyPermissions :: Permissions
emptyPermissions = Permissions {
readable = False,
writable = False,
executable = False,
searchable = False
}
setOwnerReadable :: Bool -> Permissions -> Permissions
setOwnerReadable b p = p { readable = b }
setOwnerWritable :: Bool -> Permissions -> Permissions
setOwnerWritable b p = p { writable = b }
setOwnerExecutable :: Bool -> Permissions -> Permissions
setOwnerExecutable b p = p { executable = b }
setOwnerSearchable :: Bool -> Permissions -> Permissions
setOwnerSearchable b p = p { searchable = b }
-- | Get the permissions of a file or directory.
--
-- On Windows, the 'writable' permission corresponds to the "read-only"
-- attribute. The 'executable' permission is set if the file extension is of
-- an executable file type. The 'readable' permission is always set.
--
-- On POSIX systems, this returns the result of @access@.
--
-- The operation may fail with:
--
-- * 'isPermissionError' if the user is not permitted to access the
-- permissions, or
--
-- * 'isDoesNotExistError' if the file or directory does not exist.
getPermissions :: FilePath -> IO Permissions
getPermissions path =
(`ioeAddLocation` "getPermissions") `modifyIOError` do
getAccessPermissions path
-- | Set the permissions of a file or directory.
--
-- On Windows, this is only capable of changing the 'writable' permission,
-- which corresponds to the "read-only" attribute. Changing the other
-- permissions has no effect.
--
-- On POSIX systems, this sets the /owner/ permissions.
--
-- The operation may fail with:
--
-- * 'isPermissionError' if the user is not permitted to set the permissions,
-- or
--
-- * 'isDoesNotExistError' if the file or directory does not exist.
setPermissions :: FilePath -> Permissions -> IO ()
setPermissions path p =
(`ioeAddLocation` "setPermissions") `modifyIOError` do
setAccessPermissions path p
-- | Copy the permissions of one file to another. This reproduces the
-- permissions more accurately than using 'getPermissions' followed by
-- 'setPermissions'.
--
-- On Windows, this copies only the read-only attribute.
--
-- On POSIX systems, this is equivalent to @stat@ followed by @chmod@.
copyPermissions :: FilePath -> FilePath -> IO ()
copyPermissions src dst =
(`ioeAddLocation` "copyPermissions") `modifyIOError` do
m <- getFileMetadata src
copyPermissionsFromMetadata m dst
copyPermissionsFromMetadata :: Metadata -> FilePath -> IO ()
copyPermissionsFromMetadata m dst = do
-- instead of setFileMode, setFilePermissions is used here
-- this is to retain backward compatibility in copyPermissions
setFilePermissions dst (modeFromMetadata m)
-----------------------------------------------------------------------------
-- Implementation
{- |@'createDirectory' dir@ creates a new directory @dir@ which is
initially empty, or as near to empty as the operating system
allows.
The operation may fail with:
* 'isPermissionError' \/ 'PermissionDenied'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES]@
* 'isAlreadyExistsError' \/ 'AlreadyExists'
The operand refers to a directory that already exists.
@ [EEXIST]@
* 'HardwareFault'
A physical I\/O error has occurred.
@[EIO]@
* 'InvalidArgument'
The operand is not a valid directory name.
@[ENAMETOOLONG, ELOOP]@
* 'NoSuchThing'
There is no path to the directory.
@[ENOENT, ENOTDIR]@
* 'ResourceExhausted'
Insufficient resources (virtual memory, process file descriptors,
physical disk space, etc.) are available to perform the operation.
@[EDQUOT, ENOSPC, ENOMEM, EMLINK]@
* 'InappropriateType'
The path refers to an existing non-directory object.
@[EEXIST]@
-}
createDirectory :: FilePath -> IO ()
createDirectory path = do
Posix.createDirectory path 0o777
-- | @'createDirectoryIfMissing' parents dir@ creates a new directory
-- @dir@ if it doesn\'t exist. If the first argument is 'True'
-- the function will also create all parent directories if they are missing.
createDirectoryIfMissing :: Bool -- ^ Create its parents too?
-> FilePath -- ^ The path to the directory you want to make
-> IO ()
createDirectoryIfMissing create_parents path0
| create_parents = createDirs (parents path0)
| otherwise = createDirs (take 1 (parents path0))
where
parents = reverse . scanl1 (</>) . splitDirectories . normalise
createDirs [] = return ()
createDirs (dir:[]) = createDir dir ioError
createDirs (dir:dirs) =
createDir dir $ \_ -> do
createDirs dirs
createDir dir ioError
createDir dir notExistHandler = do
r <- tryIOError (createDirectory dir)
case r of
Right () -> return ()
Left e
| isDoesNotExistError e -> notExistHandler e
-- createDirectory (and indeed POSIX mkdir) does not distinguish
-- between a dir already existing and a file already existing. So we
-- check for it here. Unfortunately there is a slight race condition
-- here, but we think it is benign. It could report an exeption in
-- the case that the dir did exist but another process deletes the
-- directory and creates a file in its place before we can check
-- that the directory did indeed exist.
-- We also follow this path when we get a permissions error, as
-- trying to create "." when in the root directory on Windows
-- fails with
-- CreateDirectory ".": permission denied (Access is denied.)
-- This caused GHCi to crash when loading a module in the root
-- directory.
| isAlreadyExistsError e
|| isPermissionError e -> do
canIgnore <- pathIsDirectory dir
`catchIOError` \ _ ->
return (isAlreadyExistsError e)
unless canIgnore (ioError e)
| otherwise -> ioError e
{- | @'removeDirectory' dir@ removes an existing directory /dir/. The
implementation may specify additional constraints which must be
satisfied before a directory can be removed (e.g. the directory has to
be empty, or may not be in use by other processes). It is not legal
for an implementation to partially remove a directory unless the
entire directory is removed. A conformant implementation need not
support directory removal in all situations (e.g. removal of the root
directory).
The operation may fail with:
* 'HardwareFault'
A physical I\/O error has occurred.
@[EIO]@
* 'InvalidArgument'
The operand is not a valid directory name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError' \/ 'NoSuchThing'
The directory does not exist.
@[ENOENT, ENOTDIR]@
* 'isPermissionError' \/ 'PermissionDenied'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES, EPERM]@
* 'UnsatisfiedConstraints'
Implementation-dependent constraints are not satisfied.
@[EBUSY, ENOTEMPTY, EEXIST]@
* 'UnsupportedOperation'
The implementation does not support removal in this situation.
@[EINVAL]@
* 'InappropriateType'
The operand refers to an existing non-directory object.
@[ENOTDIR]@
-}
removeDirectory :: FilePath -> IO ()
removeDirectory path =
Posix.removeDirectory path
-- | @'removeDirectoryRecursive' dir@ removes an existing directory /dir/
-- together with its contents and subdirectories. Within this directory,
-- symbolic links are removed without affecting their targets.
--
-- On Windows, the operation fails if /dir/ is a directory symbolic link.
removeDirectoryRecursive :: FilePath -> IO ()
removeDirectoryRecursive path =
(`ioeAddLocation` "removeDirectoryRecursive") `modifyIOError` do
m <- getSymbolicLinkMetadata path
case fileTypeFromMetadata m of
Directory ->
removeContentsRecursive path
DirectoryLink ->
ioError (err `ioeSetErrorString` "is a directory symbolic link")
_ ->
ioError (err `ioeSetErrorString` "not a directory")
where err = mkIOError InappropriateType "" Nothing (Just path)
-- | @'removePathRecursive' path@ removes an existing file or directory at
-- /path/ together with its contents and subdirectories. Symbolic links are
-- removed without affecting their the targets.
removePathRecursive :: FilePath -> IO ()
removePathRecursive path =
(`ioeAddLocation` "removePathRecursive") `modifyIOError` do
m <- getSymbolicLinkMetadata path
case fileTypeFromMetadata m of
Directory -> removeContentsRecursive path
DirectoryLink -> removeDirectory path
_ -> removeFile path
-- | @'removeContentsRecursive' dir@ removes the contents of the directory
-- /dir/ recursively. Symbolic links are removed without affecting their the
-- targets.
removeContentsRecursive :: FilePath -> IO ()
removeContentsRecursive path =
(`ioeAddLocation` "removeContentsRecursive") `modifyIOError` do
cont <- listDirectory path
mapM_ removePathRecursive [path </> x | x <- cont]
removeDirectory path
-- | Removes a file or directory at /path/ together with its contents and
-- subdirectories. Symbolic links are removed without affecting their
-- targets. If the path does not exist, nothing happens.
--
-- Unlike other removal functions, this function will also attempt to delete
-- files marked as read-only or otherwise made unremovable due to permissions.
-- As a result, if the removal is incomplete, the permissions or attributes on
-- the remaining files may be altered. If there are hard links in the
-- directory, then permissions on all related hard links may be altered.
--
-- If an entry within the directory vanishes while @removePathForcibly@ is
-- running, it is silently ignored.
--
-- If an exception occurs while removing an entry, @removePathForcibly@ will
-- still try to remove as many entries as it can before failing with an
-- exception. The first exception that it encountered is re-thrown.
--
-- @since 1.2.7.0
removePathForcibly :: FilePath -> IO ()
removePathForcibly path =
(`ioeAddLocation` "removePathForcibly") `modifyIOError` do
makeRemovable path `catchIOError` \ _ -> return ()
ignoreDoesNotExistError $ do
m <- getSymbolicLinkMetadata path
case fileTypeFromMetadata m of
DirectoryLink -> removeDirectory path
Directory -> do
names <- listDirectory path
sequenceWithIOErrors_ $
[ removePathForcibly (path </> name) | name <- names ] ++
[ removeDirectory path ]
_ -> removeFile path
where
ignoreDoesNotExistError :: IO () -> IO ()
ignoreDoesNotExistError action = do
_ <- tryIOErrorType isDoesNotExistError action
return ()
makeRemovable :: FilePath -> IO ()
makeRemovable p = do
perms <- getPermissions p
setPermissions path perms{ readable = True
, searchable = True
, writable = True }
sequenceWithIOErrors_ :: [IO ()] -> IO ()
sequenceWithIOErrors_ actions = go (Right ()) actions
where
go :: Either IOError () -> [IO ()] -> IO ()
go (Left e) [] = ioError e
go (Right ()) [] = return ()
go s (m : ms) = s `seq` do
r <- tryIOError m
go (thenEither s r) ms
-- equivalent to (*>) for Either, defined here to retain compatibility
-- with base prior to 4.3
thenEither :: Either b a -> Either b a -> Either b a
thenEither x@(Left _) _ = x
thenEither _ y = y
{- |'removeFile' /file/ removes the directory entry for an existing file
/file/, where /file/ is not itself a directory. The
implementation may specify additional constraints which must be
satisfied before a file can be removed (e.g. the file may not be in
use by other processes).
The operation may fail with:
* 'HardwareFault'
A physical I\/O error has occurred.
@[EIO]@
* 'InvalidArgument'
The operand is not a valid file name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError' \/ 'NoSuchThing'
The file does not exist.
@[ENOENT, ENOTDIR]@
* 'isPermissionError' \/ 'PermissionDenied'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES, EPERM]@
* 'UnsatisfiedConstraints'
Implementation-dependent constraints are not satisfied.
@[EBUSY]@
* 'InappropriateType'
The operand refers to an existing directory.
@[EPERM, EINVAL]@
-}
removeFile :: FilePath -> IO ()
removeFile path =
Posix.removeLink path
{- |@'renameDirectory' old new@ changes the name of an existing
directory from /old/ to /new/. If the /new/ directory
already exists, it is atomically replaced by the /old/ directory.
If the /new/ directory is neither the /old/ directory nor an
alias of the /old/ directory, it is removed as if by
'removeDirectory'. A conformant implementation need not support
renaming directories in all situations (e.g. renaming to an existing
directory, or across different physical devices), but the constraints
must be documented.
On Win32 platforms, @renameDirectory@ fails if the /new/ directory already
exists.
The operation may fail with:
* 'HardwareFault'
A physical I\/O error has occurred.
@[EIO]@
* 'InvalidArgument'
Either operand is not a valid directory name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError' \/ 'NoSuchThing'
The original directory does not exist, or there is no path to the target.
@[ENOENT, ENOTDIR]@
* 'isPermissionError' \/ 'PermissionDenied'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES, EPERM]@
* 'ResourceExhausted'
Insufficient resources are available to perform the operation.
@[EDQUOT, ENOSPC, ENOMEM, EMLINK]@
* 'UnsatisfiedConstraints'
Implementation-dependent constraints are not satisfied.
@[EBUSY, ENOTEMPTY, EEXIST]@
* 'UnsupportedOperation'
The implementation does not support renaming in this situation.
@[EINVAL, EXDEV]@
* 'InappropriateType'
Either path refers to an existing non-directory object.
@[ENOTDIR, EISDIR]@
-}
renameDirectory :: FilePath -> FilePath -> IO ()
renameDirectory opath npath =
(`ioeAddLocation` "renameDirectory") `modifyIOError` do
-- XXX this test isn't performed atomically with the following rename
isDir <- pathIsDirectory opath
when (not isDir) $ do
ioError . (`ioeSetErrorString` "not a directory") $
(mkIOError InappropriateType "renameDirectory" Nothing (Just opath))
renamePath opath npath
{- |@'renameFile' old new@ changes the name of an existing file system
object from /old/ to /new/. If the /new/ object already
exists, it is atomically replaced by the /old/ object. Neither
path may refer to an existing directory. A conformant implementation
need not support renaming files in all situations (e.g. renaming
across different physical devices), but the constraints must be
documented.
The operation may fail with:
* 'HardwareFault'
A physical I\/O error has occurred.
@[EIO]@
* 'InvalidArgument'
Either operand is not a valid file name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError' \/ 'NoSuchThing'
The original file does not exist, or there is no path to the target.
@[ENOENT, ENOTDIR]@
* 'isPermissionError' \/ 'PermissionDenied'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES, EPERM]@
* 'ResourceExhausted'
Insufficient resources are available to perform the operation.
@[EDQUOT, ENOSPC, ENOMEM, EMLINK]@
* 'UnsatisfiedConstraints'
Implementation-dependent constraints are not satisfied.
@[EBUSY]@
* 'UnsupportedOperation'
The implementation does not support renaming in this situation.
@[EXDEV]@
* 'InappropriateType'
Either path refers to an existing directory.
@[ENOTDIR, EISDIR, EINVAL, EEXIST, ENOTEMPTY]@
-}
renameFile :: FilePath -> FilePath -> IO ()
renameFile opath npath = (`ioeAddLocation` "renameFile") `modifyIOError` do
-- XXX the tests are not performed atomically with the rename
checkNotDir opath
renamePath opath npath
-- The underlying rename implementation can throw odd exceptions when the
-- destination is a directory. For example, Windows typically throws a
-- permission error, while POSIX systems may throw a resource busy error
-- if one of the paths refers to the current directory. In these cases,
-- we check if the destination is a directory and, if so, throw an
-- InappropriateType error.
`catchIOError` \ err -> do
checkNotDir npath
ioError err
where checkNotDir path = do
m <- tryIOError (getSymbolicLinkMetadata path)
case fileTypeFromMetadata <$> m of
Right Directory -> errIsDir path
Right DirectoryLink -> errIsDir path
_ -> return ()
errIsDir path = ioError . (`ioeSetErrorString` "is a directory") $
mkIOError InappropriateType "" Nothing (Just path)
-- | Rename a file or directory. If the destination path already exists, it
-- is replaced atomically. The destination path must not point to an existing
-- directory. A conformant implementation need not support renaming files in
-- all situations (e.g. renaming across different physical devices), but the
-- constraints must be documented.
--
-- The operation may fail with:
--
-- * 'HardwareFault'
-- A physical I\/O error has occurred.
-- @[EIO]@
--
-- * 'InvalidArgument'
-- Either operand is not a valid file name.
-- @[ENAMETOOLONG, ELOOP]@
--
-- * 'isDoesNotExistError' \/ 'NoSuchThing'
-- The original file does not exist, or there is no path to the target.
-- @[ENOENT, ENOTDIR]@
--
-- * 'isPermissionError' \/ 'PermissionDenied'
-- The process has insufficient privileges to perform the operation.
-- @[EROFS, EACCES, EPERM]@
--
-- * 'ResourceExhausted'
-- Insufficient resources are available to perform the operation.
-- @[EDQUOT, ENOSPC, ENOMEM, EMLINK]@
--
-- * 'UnsatisfiedConstraints'
-- Implementation-dependent constraints are not satisfied.
-- @[EBUSY]@
--
-- * 'UnsupportedOperation'
-- The implementation does not support renaming in this situation.
-- @[EXDEV]@
--
-- * 'InappropriateType'
-- Either the destination path refers to an existing directory, or one of the
-- parent segments in the destination path is not a directory.
-- @[ENOTDIR, EISDIR, EINVAL, EEXIST, ENOTEMPTY]@
--
-- @since 1.2.7.0
renamePath :: FilePath -- ^ Old path
-> FilePath -- ^ New path
-> IO ()
renamePath opath npath = (`ioeAddLocation` "renamePath") `modifyIOError` do
Posix.rename opath npath
-- | Copy a file with its permissions. If the destination file already exists,
-- it is replaced atomically. Neither path may refer to an existing
-- directory. No exceptions are thrown if the permissions could not be
-- copied.
copyFile :: FilePath -- ^ Source filename
-> FilePath -- ^ Destination filename
-> IO ()
copyFile fromFPath toFPath =
(`ioeAddLocation` "copyFile") `modifyIOError` do
atomicCopyFileContents fromFPath toFPath
(ignoreIOExceptions . copyPermissions fromFPath)
-- | Truncate the destination file and then copy the contents of the source
-- file to the destination file. If the destination file already exists, its
-- attributes shall remain unchanged. Otherwise, its attributes are reset to
-- the defaults.
copyFileContents :: FilePath -- ^ Source filename
-> FilePath -- ^ Destination filename
-> IO ()
copyFileContents fromFPath toFPath =
(`ioeAddLocation` "copyFileContents") `modifyIOError` do
withBinaryFile toFPath WriteMode $ \ hTo ->
copyFileToHandle fromFPath hTo
-- | Copy the contents of a source file to a destination file, replacing the
-- destination file atomically via 'withReplacementFile', resetting the
-- attributes of the destination file to the defaults.
atomicCopyFileContents :: FilePath -- ^ Source filename
-> FilePath -- ^ Destination filename
-> (FilePath -> IO ()) -- ^ Post-action
-> IO ()
atomicCopyFileContents fromFPath toFPath postAction =
(`ioeAddLocation` "atomicCopyFileContents") `modifyIOError` do
withReplacementFile toFPath postAction $ \ hTo -> do
copyFileToHandle fromFPath hTo
-- | A helper function useful for replacing files in an atomic manner. The
-- function creates a temporary file in the directory of the destination file,
-- opens it, performs the main action with its handle, closes it, performs the
-- post-action with its path, and finally replaces the destination file with
-- the temporary file. If an error occurs during any step of this process,
-- the temporary file is removed and the destination file remains untouched.
withReplacementFile :: FilePath -- ^ Destination file
-> (FilePath -> IO ()) -- ^ Post-action
-> (Handle -> IO a) -- ^ Main action
-> IO a
withReplacementFile path postAction action =
(`ioeAddLocation` "withReplacementFile") `modifyIOError` do
mask $ \ restore -> do
(tmpFPath, hTmp) <- openBinaryTempFile (takeDirectory path)
".copyFile.tmp"
(`onException` ignoreIOExceptions (removeFile tmpFPath)) $ do
r <- (`onException` ignoreIOExceptions (hClose hTmp)) $ do
restore (action hTmp)
hClose hTmp
restore (postAction tmpFPath)
renameFile tmpFPath path
return r
-- | Attempt to perform the given action, silencing any IO exception thrown by
-- it.
ignoreIOExceptions :: IO () -> IO ()
ignoreIOExceptions io = io `catchIOError` (\_ -> return ())
-- | Copy all data from a file to a handle.
copyFileToHandle :: FilePath -- ^ Source file
-> Handle -- ^ Destination handle
-> IO ()
copyFileToHandle fromFPath hTo =
(`ioeAddLocation` "copyFileToHandle") `modifyIOError` do
withBinaryFile fromFPath ReadMode $ \ hFrom ->
copyHandleData hFrom hTo
-- | Copy data from one handle to another until end of file.
copyHandleData :: Handle -- ^ Source handle
-> Handle -- ^ Destination handle
-> IO ()
copyHandleData hFrom hTo =
(`ioeAddLocation` "copyData") `modifyIOError` do
allocaBytes bufferSize go
where
bufferSize = 131072 -- 128 KiB, as coreutils `cp` uses as of May 2014 (see ioblksize.h)
go buffer = do
count <- hGetBuf hFrom buffer bufferSize
when (count > 0) $ do
hPutBuf hTo buffer count
go buffer
-- | Copy a file with its associated metadata. If the destination file
-- already exists, it is overwritten. There is no guarantee of atomicity in
-- the replacement of the destination file. Neither path may refer to an
-- existing directory. If the source and/or destination are symbolic links,
-- the copy is performed on the targets of the links.
--
-- On Windows, it behaves like the Win32 function
-- <https://msdn.microsoft.com/en-us/library/windows/desktop/aa363851.aspx CopyFile>,
-- which copies various kinds of metadata including file attributes and
-- security resource properties.
--
-- On Unix-like systems, permissions, access time, and modification time are
-- preserved. If possible, the owner and group are also preserved. Note that
-- the very act of copying can change the access time of the source file,
-- hence the access times of the two files may differ after the operation
-- completes.
--
-- @since 1.2.6.0
copyFileWithMetadata :: FilePath -- ^ Source file
-> FilePath -- ^ Destination file
-> IO ()
copyFileWithMetadata src dst =
(`ioeAddLocation` "copyFileWithMetadata") `modifyIOError` doCopy
where
doCopy = do
st <- Posix.getFileStatus src
copyFileContents src dst
copyMetadataFromStatus st dst
copyMetadataFromStatus :: Posix.FileStatus -> FilePath -> IO ()
copyMetadataFromStatus st dst = do
tryCopyOwnerAndGroupFromStatus st dst
copyPermissionsFromMetadata st dst
copyFileTimesFromStatus st dst
tryCopyOwnerAndGroupFromStatus :: Posix.FileStatus -> FilePath -> IO ()
tryCopyOwnerAndGroupFromStatus st dst = do
ignoreIOExceptions (copyOwnerFromStatus st dst)
ignoreIOExceptions (copyGroupFromStatus st dst)
copyOwnerFromStatus :: Posix.FileStatus -> FilePath -> IO ()
copyOwnerFromStatus st dst = do
Posix.setOwnerAndGroup dst (Posix.fileOwner st) (-1)
copyGroupFromStatus :: Posix.FileStatus -> FilePath -> IO ()
copyGroupFromStatus st dst = do
Posix.setOwnerAndGroup dst (-1) (Posix.fileGroup st)
copyFileTimesFromStatus :: Posix.FileStatus -> FilePath -> IO ()
copyFileTimesFromStatus st dst = do
let atime = accessTimeFromMetadata st
let mtime = modificationTimeFromMetadata st
setFileTimes dst (Just atime, Just mtime)
-- | Make a path absolute, 'normalise' the path, and remove as many
-- indirections from it as possible. Any trailing path separators are
-- discarded via 'dropTrailingPathSeparator'. Additionally, on Windows the
-- letter case of the path is canonicalized.
--
-- __Note__: This function is a very big hammer. If you only need an absolute
-- path, 'makeAbsolute' is sufficient for removing dependence on the current
-- working directory.
--
-- Indirections include the two special directories @.@ and @..@, as well as
-- any symbolic links (and junction points on Windows). The input path need
-- not point to an existing file or directory. Canonicalization is performed
-- on the longest prefix of the path that points to an existing file or
-- directory. The remaining portion of the path that does not point to an
-- existing file or directory will still undergo 'normalise', but case
-- canonicalization and indirection removal are skipped as they are impossible
-- to do on a nonexistent path.
--
-- Most programs should not worry about the canonicity of a path. In
-- particular, despite the name, the function does not truly guarantee
-- canonicity of the returned path due to the presence of hard links, mount
-- points, etc.
--
-- If the path points to an existing file or directory, then the output path
-- shall also point to the same file or directory, subject to the condition
-- that the relevant parts of the file system do not change while the function
-- is still running. In other words, the function is definitively not atomic.
-- The results can be utterly wrong if the portions of the path change while
-- this function is running.
--
-- Since some indirections (symbolic links on all systems, @..@ on non-Windows
-- systems, and junction points on Windows) are dependent on the state of the
-- existing filesystem, the function can only make a conservative attempt by
-- removing such indirections from the longest prefix of the path that still
-- points to an existing file or directory.
--
-- Note that on Windows parent directories @..@ are always fully expanded
-- before the symbolic links, as consistent with the rest of the Windows API
-- (such as @GetFullPathName@). In contrast, on POSIX systems parent
-- directories @..@ are expanded alongside symbolic links from left to right.
-- To put this more concretely: if @L@ is a symbolic link for @R/P@, then on
-- Windows @L\\..@ refers to @.@, whereas on other operating systems @L/..@
-- refers to @R@.
--
-- Similar to 'normalise', passing an empty path is equivalent to passing the
-- current directory.
--
-- @canonicalizePath@ can resolve at least 64 indirections in a single path,
-- more than what is supported by most operating systems. Therefore, it may
-- return the fully resolved path even though the operating system itself
-- would have long given up.
--
-- On Windows XP or earlier systems, junction expansion is not performed due
-- to their lack of @GetFinalPathNameByHandle@.
--
-- /Changes since 1.2.3.0:/ The function has been altered to be more robust
-- and has the same exception behavior as 'makeAbsolute'.
--
-- /Changes since 1.3.0.0:/ The function no longer preserves the trailing path
-- separator. File symbolic links that appear in the middle of a path are
-- properly dereferenced. Case canonicalization and symbolic link expansion
-- are now performed on Windows.
--
canonicalizePath :: FilePath -> IO FilePath
canonicalizePath = \ path ->
modifyIOError ((`ioeAddLocation` "canonicalizePath") .
(`ioeSetFileName` path)) $
-- normalise does more stuff, like upper-casing the drive letter
dropTrailingPathSeparator . normalise <$>
(transform =<< prependCurrentDirectory path)
where
transform path = do
encoding <- getFileSystemEncoding
let realpath path' =
GHC.withCString encoding path'
(`withRealpath` GHC.peekCString encoding)
attemptRealpath realpath path
simplify = return
-- allow up to 64 cycles before giving up
attemptRealpath realpath =
attemptRealpathWith (64 :: Int) Nothing realpath <=< simplify
-- n is a counter to make sure we don't run into an infinite loop; we
-- don't try to do any cycle detection here because an adversary could DoS
-- any arbitrarily clever algorithm
attemptRealpathWith n mFallback realpath path =
case mFallback of
-- too many indirections ... giving up.
Just fallback | n <= 0 -> return fallback
-- either mFallback == Nothing (first attempt)
-- or n > 0 (still have some attempts left)
_ -> realpathPrefix (reverse (zip prefixes suffixes))
where
segments = splitDirectories path
prefixes = scanl1 (</>) segments
suffixes = tail (scanr (</>) "" segments)
-- try to call realpath on the largest possible prefix
realpathPrefix candidates =
case candidates of
[] -> return path
(prefix, suffix) : rest -> do
exist <- doesPathExist prefix
if not exist
-- never call realpath on an inaccessible path
-- (to avoid bugs in system realpath implementations)
-- try a smaller prefix instead
then realpathPrefix rest
else do
mp <- tryIOError (realpath prefix)
case mp of
-- realpath failed: try a smaller prefix instead
Left _ -> realpathPrefix rest
-- realpath succeeded: fine-tune the result
Right p -> realpathFurther (p </> suffix) p suffix
-- by now we have a reasonable fallback value that we can use if we
-- run into too many indirections; the fallback value is the same
-- result that we have been returning in versions prior to 1.3.1.0
-- (this is essentially the fix to #64)
realpathFurther fallback p suffix =
case splitDirectories suffix of
[] -> return fallback
next : restSuffix -> do
-- see if the 'next' segment is a symlink
mTarget <- tryIOError (getSymbolicLinkTarget (p </> next))
case mTarget of
Left _ -> return fallback
Right target -> do
-- if so, dereference it and restart the whole cycle
let mFallback' = Just (fromMaybe fallback mFallback)
path' <- simplify (p </> target </> joinPath restSuffix)
attemptRealpathWith (n - 1) mFallback' realpath path'
-- | Convert a path into an absolute path. If the given path is relative, the
-- current directory is prepended and then the combined result is
-- 'normalise'd. If the path is already absolute, the path is simply
-- 'normalise'd. The function preserves the presence or absence of the
-- trailing path separator unless the path refers to the root directory @/@.
--
-- If the path is already absolute, the operation never fails. Otherwise, the
-- operation may fail with the same exceptions as 'getCurrentDirectory'.
--
-- @since 1.2.2.0
--
makeAbsolute :: FilePath -> IO FilePath
makeAbsolute path =
modifyIOError ((`ioeAddLocation` "makeAbsolute") .
(`ioeSetFileName` path)) $
matchTrailingSeparator path . normalise <$> prependCurrentDirectory path
-- | Add or remove the trailing path separator in the second path so as to
-- match its presence in the first path.
--
-- (internal API)
matchTrailingSeparator :: FilePath -> FilePath -> FilePath
matchTrailingSeparator path
| hasTrailingPathSeparator path = addTrailingPathSeparator
| otherwise = dropTrailingPathSeparator
-- | Construct a path relative to the current directory, similar to
-- 'makeRelative'.
--
-- The operation may fail with the same exceptions as 'getCurrentDirectory'.
makeRelativeToCurrentDirectory :: FilePath -> IO FilePath
makeRelativeToCurrentDirectory x = do
cur <- getCurrentDirectory
return $ makeRelative cur x
-- | Given the name or path of an executable file, 'findExecutable' searches
-- for such a file in a list of system-defined locations, which generally
-- includes @PATH@ and possibly more. The full path to the executable is
-- returned if found. For example, @(findExecutable \"ghc\")@ would normally
-- give you the path to GHC.
--
-- The path returned by @'findExecutable' name@ corresponds to the program
-- that would be executed by 'System.Process.createProcess' when passed the
-- same string (as a @RawCommand@, not a @ShellCommand@), provided that @name@
-- is not a relative path with more than one segment.
--
-- On Windows, 'findExecutable' calls the Win32 function
-- @<https://msdn.microsoft.com/en-us/library/aa365527.aspx SearchPath>@,
-- which may search other places before checking the directories in the @PATH@
-- environment variable. Where it actually searches depends on registry
-- settings, but notably includes the directory containing the current
-- executable.
--
-- On non-Windows platforms, the behavior is equivalent to 'findFileWith'
-- using the search directories from the @PATH@ environment variable and
-- testing each file for executable permissions. Details can be found in the
-- documentation of 'findFileWith'.
findExecutable :: String -> IO (Maybe FilePath)
findExecutable binary = do
path <- getPath
findFileWith isExecutable path (binary <.> exeExtension)
-- | Search for executable files in a list of system-defined locations, which
-- generally includes @PATH@ and possibly more.
--
-- On Windows, this /only returns the first ocurrence/, if any. Its behavior
-- is therefore equivalent to 'findExecutable'.
--
-- On non-Windows platforms, the behavior is equivalent to
-- 'findExecutablesInDirectories' using the search directories from the @PATH@
-- environment variable. Details can be found in the documentation of
-- 'findExecutablesInDirectories'.
--
-- @since 1.2.2.0
findExecutables :: String -> IO [FilePath]
findExecutables binary = do
path <- getPath
findExecutablesInDirectories path binary
-- | Get the contents of the @PATH@ environment variable.
getPath :: IO [FilePath]
getPath = do
path <- getEnv "PATH"
return (splitSearchPath path)
-- | Given a name or path, 'findExecutable' appends the 'exeExtension' to the
-- query and searches for executable files in the list of given search
-- directories and returns all occurrences.
--
-- The behavior is equivalent to 'findFileWith' using the given search
-- directories and testing each file for executable permissions. Details can
-- be found in the documentation of 'findFileWith'.
--
-- Unlike other similarly named functions, 'findExecutablesInDirectories' does
-- not use @SearchPath@ from the Win32 API. The behavior of this function on
-- Windows is therefore equivalent to those on non-Windows platforms.
--
-- @since 1.2.4.0
findExecutablesInDirectories :: [FilePath] -> String -> IO [FilePath]
findExecutablesInDirectories path binary =
findFilesWith isExecutable path (binary <.> exeExtension)
-- | Test whether a file has executable permissions.
isExecutable :: FilePath -> IO Bool
isExecutable file = do
perms <- getPermissions file
return (executable perms)
-- | Search through the given list of directories for the given file.
--
-- The behavior is equivalent to 'findFileWith', returning only the first
-- occurrence. Details can be found in the documentation of 'findFileWith'.
findFile :: [FilePath] -> String -> IO (Maybe FilePath)
findFile = findFileWith (\_ -> return True)
-- | Search through the given list of directories for the given file and
-- returns all paths where the given file exists.
--
-- The behavior is equivalent to 'findFilesWith'. Details can be found in the
-- documentation of 'findFilesWith'.
--
-- @since 1.2.1.0
findFiles :: [FilePath] -> String -> IO [FilePath]
findFiles = findFilesWith (\_ -> return True)
-- | Search through a given list of directories for a file that has the given
-- name and satisfies the given predicate and return the path of the first
-- occurrence. The directories are checked in a left-to-right order.
--
-- This is essentially a more performant version of 'findFilesWith' that
-- always returns the first result, if any. Details can be found in the
-- documentation of 'findFilesWith'.
--
-- @since 1.2.6.0
findFileWith :: (FilePath -> IO Bool) -> [FilePath] -> String -> IO (Maybe FilePath)
findFileWith f ds name = listTHead (findFilesWithLazy f ds name)
-- | @findFilesWith predicate dirs name@ searches through the list of
-- directories (@dirs@) for files that have the given @name@ and satisfy the
-- given @predicate@ ands return the paths of those files. The directories
-- are checked in a left-to-right order and the paths are returned in the same
-- order.
--
-- If the @name@ is a relative path, then for every search directory @dir@,
-- the function checks whether @dir '</>' name@ exists and satisfies the
-- predicate. If so, @dir '</>' name@ is returned as one of the results. In
-- other words, the returned paths can be either relative or absolute
-- depending on the search directories were used. If there are no search
-- directories, no results are ever returned.
--
-- If the @name@ is an absolute path, then the function will return a single
-- result if the file exists and satisfies the predicate and no results
-- otherwise. This is irrespective of what search directories were given.
--
-- @since 1.2.1.0
findFilesWith :: (FilePath -> IO Bool) -> [FilePath] -> String -> IO [FilePath]
findFilesWith f ds name = listTToList (findFilesWithLazy f ds name)
findFilesWithLazy
:: (FilePath -> IO Bool) -> [FilePath] -> String -> ListT IO FilePath
findFilesWithLazy f dirs path
-- make sure absolute paths are handled properly irrespective of 'dirs'
-- https://github.com/haskell/directory/issues/72
| isAbsolute path = ListT (find [""])
| otherwise = ListT (find dirs)
where
find [] = return Nothing
find (d : ds) = do
let p = d </> path
found <- doesFileExist p `andM` f p
if found
then return (Just (p, ListT (find ds)))
else find ds
-- | Filename extension for executable files (including the dot if any)
-- (usually @\"\"@ on POSIX systems and @\".exe\"@ on Windows or OS\/2).
--
-- @since 1.2.4.0
exeExtension :: String
exeExtension = Cfg.exeExtension
-- | Similar to 'listDirectory', but always includes the special entries (@.@
-- and @..@). (This applies to Windows as well.)
--
-- The operation may fail with the same exceptions as 'listDirectory'.
getDirectoryContents :: FilePath -> IO [FilePath]
getDirectoryContents path =
modifyIOError ((`ioeSetFileName` path) .
(`ioeAddLocation` "getDirectoryContents")) $ do
bracket
(Posix.openDirStream path)
Posix.closeDirStream
start
where
start dirp =
loop id
where
loop acc = do
e <- Posix.readDirStream dirp
if null e
then return (acc [])
else loop (acc . (e:))
-- | @'listDirectory' dir@ returns a list of /all/ entries in /dir/ without
-- the special entries (@.@ and @..@).
--
-- The operation may fail with:
--
-- * 'HardwareFault'
-- A physical I\/O error has occurred.
-- @[EIO]@
--
-- * 'InvalidArgument'
-- The operand is not a valid directory name.
-- @[ENAMETOOLONG, ELOOP]@
--
-- * 'isDoesNotExistError' \/ 'NoSuchThing'
-- The directory does not exist.
-- @[ENOENT, ENOTDIR]@
--
-- * 'isPermissionError' \/ 'PermissionDenied'
-- The process has insufficient privileges to perform the operation.
-- @[EACCES]@
--
-- * 'ResourceExhausted'
-- Insufficient resources are available to perform the operation.
-- @[EMFILE, ENFILE]@
--
-- * 'InappropriateType'
-- The path refers to an existing non-directory object.
-- @[ENOTDIR]@
--
-- @since 1.2.5.0
--
listDirectory :: FilePath -> IO [FilePath]
listDirectory path =
(filter f) <$> (getDirectoryContents path)
where f filename = filename /= "." && filename /= ".."
-- | Change the working directory to the given path.
--
-- In a multithreaded program, the current working directory is a global state
-- shared among all threads of the process. Therefore, when performing
-- filesystem operations from multiple threads, it is highly recommended to
-- use absolute rather than relative paths (see: 'makeAbsolute').
--
-- The operation may fail with:
--
-- * 'HardwareFault'
-- A physical I\/O error has occurred.
-- @[EIO]@
--
-- * 'InvalidArgument'
-- The operand is not a valid directory name.
-- @[ENAMETOOLONG, ELOOP]@
--
-- * 'isDoesNotExistError' or 'NoSuchThing'
-- The directory does not exist.
-- @[ENOENT, ENOTDIR]@
--
-- * 'isPermissionError' or 'PermissionDenied'
-- The process has insufficient privileges to perform the operation.
-- @[EACCES]@
--
-- * 'UnsupportedOperation'
-- The operating system has no notion of current working directory, or the
-- working directory cannot be dynamically changed.
--
-- * 'InappropriateType'
-- The path refers to an existing non-directory object.
-- @[ENOTDIR]@
--
setCurrentDirectory :: FilePath -> IO ()
setCurrentDirectory path = do
Posix.changeWorkingDirectory path
-- | Run an 'IO' action with the given working directory and restore the
-- original working directory afterwards, even if the given action fails due
-- to an exception.
--
-- The operation may fail with the same exceptions as 'getCurrentDirectory'
-- and 'setCurrentDirectory'.
--
-- @since 1.2.3.0
--
withCurrentDirectory :: FilePath -- ^ Directory to execute in
-> IO a -- ^ Action to be executed
-> IO a
withCurrentDirectory dir action =
bracket getCurrentDirectory setCurrentDirectory $ \ _ -> do
setCurrentDirectory dir
action
-- | Obtain the size of a file in bytes.
--
-- @since 1.2.7.0
getFileSize :: FilePath -> IO Integer
getFileSize path =
(`ioeAddLocation` "getFileSize") `modifyIOError` do
fileSizeFromMetadata <$> getFileMetadata path
-- | Test whether the given path points to an existing filesystem object. If
-- the user lacks necessary permissions to search the parent directories, this
-- function may return false even if the file does actually exist.
--
-- @since 1.2.7.0
doesPathExist :: FilePath -> IO Bool
doesPathExist path = do
(True <$ getFileMetadata path)
`catchIOError` \ _ ->
return False
{- |The operation 'doesDirectoryExist' returns 'True' if the argument file
exists and is either a directory or a symbolic link to a directory,
and 'False' otherwise.
-}
doesDirectoryExist :: FilePath -> IO Bool
doesDirectoryExist path = do
pathIsDirectory path
`catchIOError` \ _ ->
return False
{- |The operation 'doesFileExist' returns 'True'
if the argument file exists and is not a directory, and 'False' otherwise.
-}
doesFileExist :: FilePath -> IO Bool
doesFileExist path = do
(not <$> pathIsDirectory path)
`catchIOError` \ _ ->
return False
pathIsDirectory :: FilePath -> IO Bool
pathIsDirectory path = (`ioeAddLocation` "pathIsDirectory") `modifyIOError` do
m <- getFileMetadata path
case fileTypeFromMetadata m of
Directory -> return True
DirectoryLink -> return True
_ -> return False
-- | Create a /file/ symbolic link. The target path can be either absolute or
-- relative and need not refer to an existing file. The order of arguments
-- follows the POSIX convention.
--
-- To remove an existing file symbolic link, use 'removeFile'.
--
-- Although the distinction between /file/ symbolic links and /directory/
-- symbolic links does not exist on POSIX systems, on Windows this is an
-- intrinsic property of every symbolic link and cannot be changed without
-- recreating the link. A file symbolic link that actually points to a
-- directory will fail to dereference and vice versa. Moreover, creating
-- symbolic links on Windows requires privileges normally unavailable to users
-- outside the Administrators group. Portable programs that use symbolic
-- links should take both into consideration.
--
-- On Windows, the function is implemented using @CreateSymbolicLink@ with
-- @dwFlags@ set to zero. On POSIX, the function uses @symlink@ and
-- is therefore atomic.
--
-- Windows-specific errors: This operation may fail with 'permissionErrorType'
-- if the user lacks the privileges to create symbolic links. It may also
-- fail with 'illegalOperationErrorType' if the file system does not support
-- symbolic links.
--
-- @since 1.3.1.0
createFileLink
:: FilePath -- ^ path to the target file
-> FilePath -- ^ path of the link to be created
-> IO ()
createFileLink target link =
(`ioeAddLocation` "createFileLink") `modifyIOError` do
Posix.createSymbolicLink target link
-- | Create a /directory/ symbolic link. The target path can be either
-- absolute or relative and need not refer to an existing directory. The
-- order of arguments follows the POSIX convention.
--
-- To remove an existing directory symbolic link, use 'removeDirectoryLink'.
--
-- Although the distinction between /file/ symbolic links and /directory/
-- symbolic links does not exist on POSIX systems, on Windows this is an
-- intrinsic property of every symbolic link and cannot be changed without
-- recreating the link. A file symbolic link that actually points to a
-- directory will fail to dereference and vice versa. Moreover, creating
-- symbolic links on Windows requires privileges normally unavailable to users
-- outside the Administrators group. Portable programs that use symbolic
-- links should take both into consideration.
--
-- On Windows, the function is implemented using @CreateSymbolicLink@ with
-- @dwFlags@ set to @SYMBOLIC_LINK_FLAG_DIRECTORY@. On POSIX, this is an
-- alias for 'createFileLink' and is therefore atomic.
--
-- Windows-specific errors: This operation may fail with 'permissionErrorType'
-- if the user lacks the privileges to create symbolic links. It may also
-- fail with 'illegalOperationErrorType' if the file system does not support
-- symbolic links.
--
-- @since 1.3.1.0
createDirectoryLink
:: FilePath -- ^ path to the target directory
-> FilePath -- ^ path of the link to be created
-> IO ()
createDirectoryLink target link =
(`ioeAddLocation` "createDirectoryLink") `modifyIOError` do
createFileLink target link
-- | Remove an existing /directory/ symbolic link.
--
-- On Windows, this is an alias for 'removeDirectory'. On POSIX systems, this
-- is an alias for 'removeFile'.
--
-- See also: 'removeFile', which can remove an existing /file/ symbolic link.
--
-- @since 1.3.1.0
removeDirectoryLink :: FilePath -> IO ()
removeDirectoryLink path =
(`ioeAddLocation` "removeDirectoryLink") `modifyIOError` do
removeFile path
-- | Check whether the path refers to a symbolic link. An exception is thrown
-- if the path does not exist or is inaccessible.
--
-- On Windows, this checks for @FILE_ATTRIBUTE_REPARSE_POINT@. In addition to
-- symbolic links, the function also returns true on junction points. On
-- POSIX systems, this checks for @S_IFLNK@.
--
-- @since 1.3.0.0
pathIsSymbolicLink :: FilePath -> IO Bool
pathIsSymbolicLink path =
((`ioeAddLocation` "pathIsSymbolicLink") .
(`ioeSetFileName` path)) `modifyIOError` do
m <- getSymbolicLinkMetadata path
return $
case fileTypeFromMetadata m of
DirectoryLink -> True
SymbolicLink -> True
_ -> False
{-# DEPRECATED isSymbolicLink "Use 'pathIsSymbolicLink' instead" #-}
isSymbolicLink :: FilePath -> IO Bool
isSymbolicLink = pathIsSymbolicLink
-- | Retrieve the target path of either a file or directory symbolic link.
-- The returned path may not be absolute, may not exist, and may not even be a
-- valid path.
--
-- On Windows systems, this calls @DeviceIoControl@ with
-- @FSCTL_GET_REPARSE_POINT@. In addition to symbolic links, the function
-- also works on junction points. On POSIX systems, this calls `readlink`.
--
-- Windows-specific errors: This operation may fail with
-- 'illegalOperationErrorType' if the file system does not support symbolic
-- links.
--
-- @since 1.3.1.0
getSymbolicLinkTarget :: FilePath -> IO FilePath
getSymbolicLinkTarget path =
(`ioeAddLocation` "getSymbolicLinkTarget") `modifyIOError` do
Posix.readSymbolicLink path
-- | Obtain the time at which the file or directory was last accessed.
--
-- The operation may fail with:
--
-- * 'isPermissionError' if the user is not permitted to read
-- the access time; or
--
-- * 'isDoesNotExistError' if the file or directory does not exist.
--
-- Caveat for POSIX systems: This function returns a timestamp with sub-second
-- resolution only if this package is compiled against @unix-2.6.0.0@ or later
-- and the underlying filesystem supports them.
--
-- @since 1.2.3.0
--
getAccessTime :: FilePath -> IO UTCTime
getAccessTime path =
modifyIOError (`ioeAddLocation` "getAccessTime") $ do
accessTimeFromMetadata <$> getFileMetadata path
-- | Obtain the time at which the file or directory was last modified.
--
-- The operation may fail with:
--
-- * 'isPermissionError' if the user is not permitted to read
-- the modification time; or
--
-- * 'isDoesNotExistError' if the file or directory does not exist.
--
-- Caveat for POSIX systems: This function returns a timestamp with sub-second
-- resolution only if this package is compiled against @unix-2.6.0.0@ or later
-- and the underlying filesystem supports them.
--
getModificationTime :: FilePath -> IO UTCTime
getModificationTime path =
modifyIOError (`ioeAddLocation` "getModificationTime") $ do
modificationTimeFromMetadata <$> getFileMetadata path
-- | Change the time at which the file or directory was last accessed.
--
-- The operation may fail with:
--
-- * 'isPermissionError' if the user is not permitted to alter the
-- access time; or
--
-- * 'isDoesNotExistError' if the file or directory does not exist.
--
-- Some caveats for POSIX systems:
--
-- * Not all systems support @utimensat@, in which case the function can only
-- emulate the behavior by reading the modification time and then setting
-- both the access and modification times together. On systems where
-- @utimensat@ is supported, the access time is set atomically with
-- nanosecond precision.
--
-- * If compiled against a version of @unix@ prior to @2.7.0.0@, the function
-- would not be able to set timestamps with sub-second resolution. In this
-- case, there would also be loss of precision in the modification time.
--
-- @since 1.2.3.0
--
setAccessTime :: FilePath -> UTCTime -> IO ()
setAccessTime path atime =
modifyIOError (`ioeAddLocation` "setAccessTime") $
setFileTimes path (Just atime, Nothing)
-- | Change the time at which the file or directory was last modified.
--
-- The operation may fail with:
--
-- * 'isPermissionError' if the user is not permitted to alter the
-- modification time; or
--
-- * 'isDoesNotExistError' if the file or directory does not exist.
--
-- Some caveats for POSIX systems:
--
-- * Not all systems support @utimensat@, in which case the function can only
-- emulate the behavior by reading the access time and then setting both the
-- access and modification times together. On systems where @utimensat@ is
-- supported, the modification time is set atomically with nanosecond
-- precision.
--
-- * If compiled against a version of @unix@ prior to @2.7.0.0@, the function
-- would not be able to set timestamps with sub-second resolution. In this
-- case, there would also be loss of precision in the access time.
--
-- @since 1.2.3.0
--
setModificationTime :: FilePath -> UTCTime -> IO ()
setModificationTime path mtime =
modifyIOError (`ioeAddLocation` "setModificationTime") $
setFileTimes path (Nothing, Just mtime)
setFileTimes :: FilePath -> (Maybe UTCTime, Maybe UTCTime) -> IO ()
setFileTimes _ (Nothing, Nothing) = return ()
setFileTimes path (atime, mtime) =
modifyIOError (`ioeAddLocation` "setFileTimes") .
modifyIOError (`ioeSetFileName` path) $
setTimes (utcTimeToPOSIXSeconds <$> atime, utcTimeToPOSIXSeconds <$> mtime)
where
path' = normalise path -- handle empty paths
setTimes :: (Maybe POSIXTime, Maybe POSIXTime) -> IO ()
setTimes (atime', mtime') =
withFilePath path' $ \ path'' ->
withArray [ maybe utimeOmit toCTimeSpec atime'
, maybe utimeOmit toCTimeSpec mtime' ] $ \ times ->
throwErrnoPathIfMinus1_ "" path' $
c_utimensat c_AT_FDCWD path'' times 0
{- | Returns the current user's home directory.
The directory returned is expected to be writable by the current user,
but note that it isn't generally considered good practice to store
application-specific data here; use 'getXdgDirectory' or
'getAppUserDataDirectory' instead.
On Unix, 'getHomeDirectory' returns the value of the @HOME@
environment variable. On Windows, the system is queried for a
suitable path; a typical path might be @C:\/Users\//\<user\>/@.
The operation may fail with:
* 'UnsupportedOperation'
The operating system has no notion of home directory.
* 'isDoesNotExistError'
The home directory for the current user does not exist, or
cannot be found.
-}
getHomeDirectory :: IO FilePath
getHomeDirectory = modifyIOError (`ioeAddLocation` "getHomeDirectory") get
where
get = getEnv "HOME"
-- | Special directories for storing user-specific application data,
-- configuration, and cache files, as specified by the
-- <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html XDG Base Directory Specification>.
--
-- Note: On Windows, 'XdgData' and 'XdgConfig' map to the same directory.
--
-- @since 1.2.3.0
data XdgDirectory
= XdgData
-- ^ For data files (e.g. images).
-- Defaults to @~\/.local\/share@ and can be
-- overridden by the @XDG_DATA_HOME@ environment variable.
-- On Windows, it is @%APPDATA%@
-- (e.g. @C:\/Users\//\<user\>/\/AppData\/Roaming@).
-- Can be considered as the user-specific equivalent of @\/usr\/share@.
| XdgConfig
-- ^ For configuration files.
-- Defaults to @~\/.config@ and can be
-- overridden by the @XDG_CONFIG_HOME@ environment variable.
-- On Windows, it is @%APPDATA%@
-- (e.g. @C:\/Users\//\<user\>/\/AppData\/Roaming@).
-- Can be considered as the user-specific equivalent of @\/etc@.
| XdgCache
-- ^ For non-essential files (e.g. cache).
-- Defaults to @~\/.cache@ and can be
-- overridden by the @XDG_CACHE_HOME@ environment variable.
-- On Windows, it is @%LOCALAPPDATA%@
-- (e.g. @C:\/Users\//\<user\>/\/AppData\/Local@).
-- Can be considered as the user-specific equivalent of @\/var\/cache@.
deriving (Bounded, Enum, Eq, Ord, Read, Show)
-- | Obtain the paths to special directories for storing user-specific
-- application data, configuration, and cache files, conforming to the
-- <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html XDG Base Directory Specification>.
-- Compared with 'getAppUserDataDirectory', this function provides a more
-- fine-grained hierarchy as well as greater flexibility for the user.
--
-- It also works on Windows, although in that case 'XdgData' and 'XdgConfig'
-- will map to the same directory.
--
-- The second argument is usually the name of the application. Since it
-- will be integrated into the path, it must consist of valid path
-- characters.
--
-- Note: The directory may not actually exist, in which case you would need
-- to create it with file mode @700@ (i.e. only accessible by the owner).
--
-- @since 1.2.3.0
getXdgDirectory :: XdgDirectory -- ^ which special directory
-> FilePath -- ^ a relative path that is appended
-- to the path; if empty, the base
-- path is returned
-> IO FilePath
getXdgDirectory xdgDir suffix =
modifyIOError (`ioeAddLocation` "getXdgDirectory") $
normalise . (</> suffix) <$>
case xdgDir of
XdgData -> get False "XDG_DATA_HOME" ".local/share"
XdgConfig -> get False "XDG_CONFIG_HOME" ".config"
XdgCache -> get True "XDG_CACHE_HOME" ".cache"
where
get _ name fallback = do
env <- lookupEnv name
case env of
Nothing -> fallback'
Just path | isRelative path -> fallback'
| otherwise -> return path
where fallback' = (</> fallback) <$> getHomeDirectory
-- | Return the value of an environment variable, or 'Nothing' if there is no
-- such value. (Equivalent to "lookupEnv" from base-4.6.)
lookupEnv :: String -> IO (Maybe String)
lookupEnv name = do
env <- tryIOErrorType isDoesNotExistError (getEnv name)
case env of
Left _ -> return Nothing
Right value -> return (Just value)
-- | Obtain the path to a special directory for storing user-specific
-- application data (traditional Unix location). Newer applications may
-- prefer the the XDG-conformant location provided by 'getXdgDirectory'
-- (<https://github.com/haskell/directory/issues/6#issuecomment-96521020 migration guide>).
--
-- The argument is usually the name of the application. Since it will be
-- integrated into the path, it must consist of valid path characters.
--
-- * On Unix-like systems, the path is @~\/./\<app\>/@.
-- * On Windows, the path is @%APPDATA%\//\<app\>/@
-- (e.g. @C:\/Users\//\<user\>/\/AppData\/Roaming\//\<app\>/@)
--
-- Note: the directory may not actually exist, in which case you would need
-- to create it. It is expected that the parent directory exists and is
-- writable.
--
-- The operation may fail with:
--
-- * 'UnsupportedOperation'
-- The operating system has no notion of application-specific data
-- directory.
--
-- * 'isDoesNotExistError'
-- The home directory for the current user does not exist, or cannot be
-- found.
--
getAppUserDataDirectory :: FilePath -- ^ a relative path that is appended
-- to the path
-> IO FilePath
getAppUserDataDirectory appName = do
modifyIOError (`ioeAddLocation` "getAppUserDataDirectory") $ do
path <- getEnv "HOME"
return (path++'/':'.':appName)
{- | Returns the current user's document directory.
The directory returned is expected to be writable by the current user,
but note that it isn't generally considered good practice to store
application-specific data here; use 'getXdgDirectory' or
'getAppUserDataDirectory' instead.
On Unix, 'getUserDocumentsDirectory' returns the value of the @HOME@
environment variable. On Windows, the system is queried for a
suitable path; a typical path might be @C:\/Users\//\<user\>/\/Documents@.
The operation may fail with:
* 'UnsupportedOperation'
The operating system has no notion of document directory.
* 'isDoesNotExistError'
The document directory for the current user does not exist, or
cannot be found.
-}
getUserDocumentsDirectory :: IO FilePath
getUserDocumentsDirectory = do
modifyIOError (`ioeAddLocation` "getUserDocumentsDirectory") $ do
getEnv "HOME"
{- | Returns the current directory for temporary files.
On Unix, 'getTemporaryDirectory' returns the value of the @TMPDIR@
environment variable or \"\/tmp\" if the variable isn\'t defined.
On Windows, the function checks for the existence of environment variables in
the following order and uses the first path found:
*
TMP environment variable.
*
TEMP environment variable.
*
USERPROFILE environment variable.
*
The Windows directory
The operation may fail with:
* 'UnsupportedOperation'
The operating system has no notion of temporary directory.
The function doesn\'t verify whether the path exists.
-}
getTemporaryDirectory :: IO FilePath
getTemporaryDirectory =
getEnv "TMPDIR" `catchIOError` \ err ->
if isDoesNotExistError err then return "/tmp" else ioError err
|
phischu/fragnix
|
tests/packages/scotty/System.Directory.hs
|
bsd-3-clause
| 65,984 | 0 | 24 | 13,947 | 7,135 | 4,001 | 3,134 | 597 | 7 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "Data/Aeson/Types/FromJSON.hs" #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE PolyKinds #-}
-- TODO: Drop this when we remove support for Data.Attoparsec.Number
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Data.Aeson.Types.FromJSON
(
-- * Core JSON classes
FromJSON(..)
-- * Liftings to unary and binary type constructors
, FromJSON1(..)
, parseJSON1
, FromJSON2(..)
, parseJSON2
-- * Generic JSON classes
, GFromJSON(..)
, FromArgs(..)
, genericParseJSON
, genericLiftParseJSON
-- * Classes and types for map keys
, FromJSONKey(..)
, FromJSONKeyFunction(..)
, fromJSONKeyCoerce
, coerceFromJSONKeyFunction
, mapFromJSONKeyFunction
-- * List functions
, listParser
-- * Inspecting @'Value's@
, withObject
, withText
, withArray
, withNumber
, withScientific
, withBool
-- * Functions
, fromJSON
, ifromJSON
, typeMismatch
, parseField
, parseFieldMaybe
, parseFieldMaybe'
, explicitParseField
, explicitParseFieldMaybe
, explicitParseFieldMaybe'
-- ** Operators
, (.:)
, (.:?)
, (.:!)
, (.!=)
-- * Internal
, parseOptionalFieldWith
) where
import Prelude ()
import Prelude.Compat
import Control.Applicative ((<|>), Const(..))
import Control.Monad ((<=<), zipWithM)
import Data.Aeson.Internal.Functions (mapKey)
import Data.Aeson.Types.Generic
import Data.Aeson.Types.Internal
import Data.Attoparsec.Number (Number(..))
import Data.Bits (unsafeShiftR)
import Data.Fixed (Fixed, HasResolution)
import Data.Functor.Compose (Compose(..))
import Data.Functor.Identity (Identity(..))
import Data.Functor.Product (Product(..))
import Data.Functor.Sum (Sum(..))
import Data.Hashable (Hashable(..))
import Data.Int (Int16, Int32, Int64, Int8)
import Data.List.NonEmpty (NonEmpty(..))
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Data.Proxy (Proxy(..))
import Data.Ratio ((%), Ratio)
import Data.Scientific (Scientific)
import Data.Tagged (Tagged(..))
import Data.Text (Text, pack, unpack)
import Data.Time (Day, LocalTime, NominalDiffTime, TimeOfDay, UTCTime, ZonedTime)
import Data.Time.Format (parseTime)
import Data.Time.Locale.Compat (defaultTimeLocale)
import Data.Traversable as Tr (sequence)
import Data.Vector (Vector)
import Data.Version (Version, parseVersion)
import Data.Word (Word16, Word32, Word64, Word8)
import Foreign.Storable (Storable)
import GHC.Generics
import Numeric.Natural (Natural)
import Text.ParserCombinators.ReadP (readP_to_S)
import Unsafe.Coerce (unsafeCoerce)
import qualified Data.Aeson.Parser.Time as Time
import qualified Data.Attoparsec.ByteString.Char8 as A (endOfInput, parseOnly, scientific)
import qualified Data.DList as DList
import qualified Data.HashMap.Strict as H
import qualified Data.HashSet as HashSet
import qualified Data.IntMap as IntMap
import qualified Data.IntSet as IntSet
import qualified Data.Map as M
import qualified Data.Monoid as Monoid
import qualified Data.Scientific as Scientific
import qualified Data.Semigroup as Semigroup
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as LT
import qualified Data.Tree as Tree
import qualified Data.UUID.Types as UUID
import qualified Data.Vector as V
import qualified Data.Vector.Generic as VG
import qualified Data.Vector.Primitive as VP
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Unboxed as VU
import Data.Coerce (Coercible, coerce)
coerce' :: Coercible a b => a -> b
coerce' = coerce
parseIndexedJSON :: (Value -> Parser a) -> Int -> Value -> Parser a
parseIndexedJSON p idx value = p value <?> Index idx
{-# INLINE parseIndexedJSON #-}
parseIndexedJSONPair :: (Value -> Parser a) -> (Value -> Parser b) -> Int -> Value -> Parser (a, b)
parseIndexedJSONPair keyParser valParser idx value = p value <?> Index idx
where
p = withArray "(k,v)" $ \ab ->
let n = V.length ab
in if n == 2
then (,) <$> parseJSONElemAtIndex keyParser 0 ab
<*> parseJSONElemAtIndex valParser 1 ab
else fail $ "cannot unpack array of length " ++
show n ++ " into a pair"
{-# INLINE parseIndexedJSONPair #-}
parseJSONElemAtIndex :: (Value -> Parser a) -> Int -> V.Vector Value -> Parser a
parseJSONElemAtIndex p idx ary = p (V.unsafeIndex ary idx) <?> Index idx
scientificToNumber :: Scientific -> Number
scientificToNumber s
| e < 0 = D $ Scientific.toRealFloat s
| otherwise = I $ c * 10 ^ e
where
e = Scientific.base10Exponent s
c = Scientific.coefficient s
{-# INLINE scientificToNumber #-}
parseRealFloat :: RealFloat a => String -> Value -> Parser a
parseRealFloat _ (Number s) = pure $ Scientific.toRealFloat s
parseRealFloat _ Null = pure (0/0)
parseRealFloat expected v = typeMismatch expected v
{-# INLINE parseRealFloat #-}
parseIntegralFromScientific :: forall a. Integral a => String -> Scientific -> Parser a
parseIntegralFromScientific expected s =
case Scientific.floatingOrInteger s :: Either Double a of
Right x -> pure x
Left _ -> fail $ "Floating number specified for " ++ expected ++ ": " ++ show s
{-# INLINE parseIntegralFromScientific #-}
parseIntegral :: Integral a => String -> Value -> Parser a
parseIntegral expected =
withScientific expected $ parseIntegralFromScientific expected
{-# INLINE parseIntegral #-}
parseBoundedIntegralFromScientific :: (Bounded a, Integral a) => String -> Scientific -> Parser a
parseBoundedIntegralFromScientific expected s = maybe
(fail $ expected ++ " is either floating or will cause over or underflow: " ++ show s)
pure
(Scientific.toBoundedInteger s)
{-# INLINE parseBoundedIntegralFromScientific #-}
parseBoundedIntegral :: (Bounded a, Integral a) => String -> Value -> Parser a
parseBoundedIntegral expected =
withScientific expected $ parseBoundedIntegralFromScientific expected
{-# INLINE parseBoundedIntegral #-}
parseScientificText :: Text -> Parser Scientific
parseScientificText
= either fail pure
. A.parseOnly (A.scientific <* A.endOfInput)
. T.encodeUtf8
parseIntegralText :: Integral a => String -> Text -> Parser a
parseIntegralText expected t =
parseScientificText t >>= parseIntegralFromScientific expected
{-# INLINE parseIntegralText #-}
parseBoundedIntegralText :: (Bounded a, Integral a) => String -> Text -> Parser a
parseBoundedIntegralText expected t =
parseScientificText t >>= parseBoundedIntegralFromScientific expected
parseOptionalFieldWith :: (Value -> Parser (Maybe a))
-> Object -> Text -> Parser (Maybe a)
parseOptionalFieldWith pj obj key =
case H.lookup key obj of
Nothing -> pure Nothing
Just v -> pj v <?> Key key
{-# INLINE parseOptionalFieldWith #-}
-------------------------------------------------------------------------------
-- Generics
-------------------------------------------------------------------------------
-- | Class of generic representation types that can be converted from JSON.
class GFromJSON arity f where
-- | This method (applied to 'defaultOptions') is used as the
-- default generic implementation of 'parseJSON' (if the @arity@ is 'Zero')
-- or 'liftParseJSON' (if the @arity@ is 'One').
gParseJSON :: Options -> FromArgs arity a -> Value -> Parser (f a)
-- | A 'FromArgs' value either stores nothing (for 'FromJSON') or it stores the
-- two function arguments that decode occurrences of the type parameter (for
-- 'FromJSON1').
data FromArgs arity a where
NoFromArgs :: FromArgs Zero a
From1Args :: (Value -> Parser a) -> (Value -> Parser [a]) -> FromArgs One a
-- | A configurable generic JSON decoder. This function applied to
-- 'defaultOptions' is used as the default for 'parseJSON' when the
-- type is an instance of 'Generic'.
genericParseJSON :: (Generic a, GFromJSON Zero (Rep a))
=> Options -> Value -> Parser a
genericParseJSON opts = fmap to . gParseJSON opts NoFromArgs
-- | A configurable generic JSON decoder. This function applied to
-- 'defaultOptions' is used as the default for 'liftParseJSON' when the
-- type is an instance of 'Generic1'.
genericLiftParseJSON :: (Generic1 f, GFromJSON One (Rep1 f))
=> Options -> (Value -> Parser a) -> (Value -> Parser [a])
-> Value -> Parser (f a)
genericLiftParseJSON opts pj pjl = fmap to1 . gParseJSON opts (From1Args pj pjl)
-------------------------------------------------------------------------------
-- Class
-------------------------------------------------------------------------------
-- | A type that can be converted from JSON, with the possibility of
-- failure.
--
-- In many cases, you can get the compiler to generate parsing code
-- for you (see below). To begin, let's cover writing an instance by
-- hand.
--
-- There are various reasons a conversion could fail. For example, an
-- 'Object' could be missing a required key, an 'Array' could be of
-- the wrong size, or a value could be of an incompatible type.
--
-- The basic ways to signal a failed conversion are as follows:
--
-- * 'empty' and 'mzero' work, but are terse and uninformative;
--
-- * 'fail' yields a custom error message;
--
-- * 'typeMismatch' produces an informative message for cases when the
-- value encountered is not of the expected type.
--
-- An example type and instance using 'typeMismatch':
--
-- @
-- \-- Allow ourselves to write 'Text' literals.
-- {-\# LANGUAGE OverloadedStrings #-}
--
-- data Coord = Coord { x :: Double, y :: Double }
--
-- instance 'FromJSON' Coord where
-- 'parseJSON' ('Object' v) = Coord
-- '<$>' v '.:' \"x\"
-- '<*>' v '.:' \"y\"
--
-- \-- We do not expect a non-'Object' value here.
-- \-- We could use 'mzero' to fail, but 'typeMismatch'
-- \-- gives a much more informative error message.
-- 'parseJSON' invalid = 'typeMismatch' \"Coord\" invalid
-- @
--
-- For this common case of only being concerned with a single
-- type of JSON value, the functions 'withObject', 'withNumber', etc.
-- are provided. Their use is to be preferred when possible, since
-- they are more terse. Using 'withObject', we can rewrite the above instance
-- (assuming the same language extension and data type) as:
--
-- @
-- instance 'FromJSON' Coord where
-- 'parseJSON' = 'withObject' \"Coord\" $ \v -> Coord
-- '<$>' v '.:' \"x\"
-- '<*>' v '.:' \"y\"
-- @
--
-- Instead of manually writing your 'FromJSON' instance, there are two options
-- to do it automatically:
--
-- * "Data.Aeson.TH" provides Template Haskell functions which will derive an
-- instance at compile time. The generated instance is optimized for your type
-- so it will probably be more efficient than the following option.
--
-- * The compiler can provide a default generic implementation for
-- 'parseJSON'.
--
-- To use the second, simply add a @deriving 'Generic'@ clause to your
-- datatype and declare a 'FromJSON' instance for your datatype without giving
-- a definition for 'parseJSON'.
--
-- For example, the previous example can be simplified to just:
--
-- @
-- {-\# LANGUAGE DeriveGeneric \#-}
--
-- import "GHC.Generics"
--
-- data Coord = Coord { x :: Double, y :: Double } deriving 'Generic'
--
-- instance 'FromJSON' Coord
-- @
--
-- The default implementation will be equivalent to
-- @parseJSON = 'genericParseJSON' 'defaultOptions'@; If you need different
-- options, you can customize the generic decoding by defining:
--
-- @
-- customOptions = 'defaultOptions'
-- { 'fieldLabelModifier' = 'map' 'Data.Char.toUpper'
-- }
--
-- instance 'FromJSON' Coord where
-- 'parseJSON' = 'genericParseJSON' customOptions
-- @
class FromJSON a where
parseJSON :: Value -> Parser a
default parseJSON :: (Generic a, GFromJSON Zero (Rep a)) => Value -> Parser a
parseJSON = genericParseJSON defaultOptions
parseJSONList :: Value -> Parser [a]
parseJSONList (Array a)
= zipWithM (parseIndexedJSON parseJSON) [0..]
. V.toList
$ a
parseJSONList v = typeMismatch "[a]" v
-------------------------------------------------------------------------------
-- Classes and types for map keys
-------------------------------------------------------------------------------
-- | Read the docs for 'ToJSONKey' first. This class is a conversion
-- in the opposite direction. If you have a newtype wrapper around 'Text',
-- the recommended way to define instances is with generalized newtype deriving:
--
-- > newtype SomeId = SomeId { getSomeId :: Text }
-- > deriving (Eq,Ord,Hashable,FromJSONKey)
--
class FromJSONKey a where
-- | Strategy for parsing the key of a map-like container.
fromJSONKey :: FromJSONKeyFunction a
default fromJSONKey :: FromJSON a => FromJSONKeyFunction a
fromJSONKey = FromJSONKeyValue parseJSON
-- | This is similar in spirit to the 'readList' method of 'Read'.
-- It makes it possible to give 'String' keys special treatment
-- without using @OverlappingInstances@. End users should always
-- be able to use the default implementation of this method.
fromJSONKeyList :: FromJSONKeyFunction [a]
default fromJSONKeyList :: FromJSON a => FromJSONKeyFunction [a]
fromJSONKeyList = FromJSONKeyValue parseJSON
-- | With GHC 7.8+ we carry around @'Coercible' 'Text' a@ dictionary,
-- to give us an assurance that the program will not segfault.
-- Unfortunately we cannot enforce that the 'Eq' instances or the
-- 'Hashable' instances for 'Text' and @a@ agree.
--
-- At the moment this type is intentionally not exported. 'FromJSONKeyFunction'
-- can be inspected, but cannot be constructed.
data CoerceText a where
CoerceText :: Coercible Text a => CoerceText a
-- | This type is related to 'ToJSONKeyFunction'. If 'FromJSONKeyValue' is used in the
-- 'FromJSONKey' instance, then 'ToJSONKeyValue' should be used in the 'ToJSONKey'
-- instance. The other three data constructors for this type all correspond to
-- 'ToJSONKeyText'. Strictly speaking, 'FromJSONKeyTextParser' is more powerful than
-- 'FromJSONKeyText', which is in turn more powerful than 'FromJSONKeyCoerce'.
-- For performance reasons, these exist as three options instead of one.
data FromJSONKeyFunction a
= FromJSONKeyCoerce !(CoerceText a)
-- ^ uses 'coerce' ('unsafeCoerce' in older GHCs)
| FromJSONKeyText !(Text -> a)
-- ^ conversion from 'Text' that always succeeds
| FromJSONKeyTextParser !(Text -> Parser a)
-- ^ conversion from 'Text' that may fail
| FromJSONKeyValue !(Value -> Parser a)
-- ^ conversion for non-textual keys
-- | Only law abiding up to interpretation
instance Functor FromJSONKeyFunction where
fmap h (FromJSONKeyCoerce CoerceText) = FromJSONKeyText (h . coerce')
fmap h (FromJSONKeyText f) = FromJSONKeyText (h . f)
fmap h (FromJSONKeyTextParser f) = FromJSONKeyTextParser (fmap h . f)
fmap h (FromJSONKeyValue f) = FromJSONKeyValue (fmap h . f)
-- | Construct 'FromJSONKeyFunction' for types coercible from 'Text'. This
-- conversion is still unsafe, as 'Hashable' and 'Eq' instances of @a@ should be
-- compatible with 'Text' i.e. hash values should be equal for wrapped values as well.
-- This property will always be maintained if the 'Hashable' and 'Eq' instances
-- are derived with generalized newtype deriving.
-- compatible with 'Text' i.e. hash values be equal for wrapped values as well.
--
-- On pre GHC 7.8 this is unconstrainted function.
fromJSONKeyCoerce ::
Coercible Text a =>
FromJSONKeyFunction a
fromJSONKeyCoerce = FromJSONKeyCoerce CoerceText
-- | Semantically the same as @coerceFromJSONKeyFunction = fmap coerce = coerce@.
--
-- See note on 'fromJSONKeyCoerce'.
coerceFromJSONKeyFunction ::
Coercible a b =>
FromJSONKeyFunction a -> FromJSONKeyFunction b
coerceFromJSONKeyFunction = coerce
{-# RULES
"FromJSONKeyCoerce: fmap id" forall (x :: FromJSONKeyFunction a).
fmap id x = x
#-}
{-# RULES
"FromJSONKeyCoerce: fmap coerce" forall x .
fmap coerce x = coerceFromJSONKeyFunction x
#-}
-- | Same as 'fmap'. Provided for the consistency with 'ToJSONKeyFunction'.
mapFromJSONKeyFunction :: (a -> b) -> FromJSONKeyFunction a -> FromJSONKeyFunction b
mapFromJSONKeyFunction = fmap
-------------------------------------------------------------------------------
-- Functions needed for documentation
-------------------------------------------------------------------------------
-- | Fail parsing due to a type mismatch, with a descriptive message.
--
-- Example usage:
--
-- @
-- instance FromJSON Coord where
-- parseJSON ('Object' v) = {- type matches, life is good -}
-- parseJSON wat = 'typeMismatch' \"Coord\" wat
-- @
typeMismatch :: String -- ^ The name of the type you are trying to parse.
-> Value -- ^ The actual value encountered.
-> Parser a
typeMismatch expected actual =
fail $ "expected " ++ expected ++ ", encountered " ++ name
where
name = case actual of
Object _ -> "Object"
Array _ -> "Array"
String _ -> "String"
Number _ -> "Number"
Bool _ -> "Boolean"
Null -> "Null"
-------------------------------------------------------------------------------
-- Lifings of FromJSON and ToJSON to unary and binary type constructors
-------------------------------------------------------------------------------
-- | Lifting of the 'FromJSON' class to unary type constructors.
--
-- Instead of manually writing your 'FromJSON1' instance, there are two options
-- to do it automatically:
--
-- * "Data.Aeson.TH" provides Template Haskell functions which will derive an
-- instance at compile time. The generated instance is optimized for your type
-- so it will probably be more efficient than the following option.
--
-- * The compiler can provide a default generic implementation for
-- 'liftParseJSON'.
--
-- To use the second, simply add a @deriving 'Generic1'@ clause to your
-- datatype and declare a 'FromJSON1' instance for your datatype without giving
-- a definition for 'liftParseJSON'.
--
-- For example:
--
-- @
-- {-\# LANGUAGE DeriveGeneric \#-}
--
-- import "GHC.Generics"
--
-- data Pair a b = Pair { pairFst :: a, pairSnd :: b } deriving 'Generic1'
--
-- instance 'FromJSON' a => 'FromJSON1' (Pair a)
-- @
--
-- If the default implementation doesn't give exactly the results you want,
-- you can customize the generic decoding with only a tiny amount of
-- effort, using 'genericLiftParseJSON' with your preferred 'Options':
--
-- @
-- customOptions = 'defaultOptions'
-- { 'fieldLabelModifier' = 'map' 'Data.Char.toUpper'
-- }
--
-- instance 'FromJSON' a => 'FromJSON1' (Pair a) where
-- 'liftParseJSON' = 'genericLiftParseJSON' customOptions
-- @
class FromJSON1 f where
liftParseJSON :: (Value -> Parser a) -> (Value -> Parser [a]) -> Value -> Parser (f a)
default liftParseJSON :: (Generic1 f, GFromJSON One (Rep1 f))
=> (Value -> Parser a) -> (Value -> Parser [a]) -> Value -> Parser (f a)
liftParseJSON = genericLiftParseJSON defaultOptions
liftParseJSONList :: (Value -> Parser a) -> (Value -> Parser [a]) -> Value -> Parser [f a]
liftParseJSONList f g v = listParser (liftParseJSON f g) v
-- | Lift the standard 'parseJSON' function through the type constructor.
parseJSON1 :: (FromJSON1 f, FromJSON a) => Value -> Parser (f a)
parseJSON1 = liftParseJSON parseJSON parseJSONList
{-# INLINE parseJSON1 #-}
-- | Lifting of the 'FromJSON' class to binary type constructors.
--
-- Instead of manually writing your 'FromJSON2' instance, "Data.Aeson.TH"
-- provides Template Haskell functions which will derive an instance at compile time.
-- The compiler cannot provide a default generic implementation for 'liftParseJSON2',
-- unlike 'parseJSON' and 'liftParseJSON'.
class FromJSON2 f where
liftParseJSON2
:: (Value -> Parser a)
-> (Value -> Parser [a])
-> (Value -> Parser b)
-> (Value -> Parser [b])
-> Value -> Parser (f a b)
liftParseJSONList2
:: (Value -> Parser a)
-> (Value -> Parser [a])
-> (Value -> Parser b)
-> (Value -> Parser [b])
-> Value -> Parser [f a b]
liftParseJSONList2 fa ga fb gb v = case v of
Array vals -> fmap V.toList (V.mapM (liftParseJSON2 fa ga fb gb) vals)
_ -> typeMismatch "[a]" v
-- | Lift the standard 'parseJSON' function through the type constructor.
parseJSON2 :: (FromJSON2 f, FromJSON a, FromJSON b) => Value -> Parser (f a b)
parseJSON2 = liftParseJSON2 parseJSON parseJSONList parseJSON parseJSONList
{-# INLINE parseJSON2 #-}
-------------------------------------------------------------------------------
-- List functions
-------------------------------------------------------------------------------
-- | Helper function to use with 'liftParseJSON'. See 'Data.Aeson.ToJSON.listEncoding'.
listParser :: (Value -> Parser a) -> Value -> Parser [a]
listParser f (Array xs) = fmap V.toList (V.mapM f xs)
listParser _ v = typeMismatch "[a]" v
{-# INLINE listParser #-}
-------------------------------------------------------------------------------
-- [] instances
-------------------------------------------------------------------------------
instance FromJSON1 [] where
liftParseJSON _ p' = p'
{-# INLINE liftParseJSON #-}
instance (FromJSON a) => FromJSON [a] where
parseJSON = parseJSON1
-------------------------------------------------------------------------------
-- Functions
-------------------------------------------------------------------------------
-- | @'withObject' expected f value@ applies @f@ to the 'Object' when @value@
-- is an 'Object' and fails using @'typeMismatch' expected@ otherwise.
withObject :: String -> (Object -> Parser a) -> Value -> Parser a
withObject _ f (Object obj) = f obj
withObject expected _ v = typeMismatch expected v
{-# INLINE withObject #-}
-- | @'withText' expected f value@ applies @f@ to the 'Text' when @value@ is a
-- 'String' and fails using @'typeMismatch' expected@ otherwise.
withText :: String -> (Text -> Parser a) -> Value -> Parser a
withText _ f (String txt) = f txt
withText expected _ v = typeMismatch expected v
{-# INLINE withText #-}
-- | @'withArray' expected f value@ applies @f@ to the 'Array' when @value@ is
-- an 'Array' and fails using @'typeMismatch' expected@ otherwise.
withArray :: String -> (Array -> Parser a) -> Value -> Parser a
withArray _ f (Array arr) = f arr
withArray expected _ v = typeMismatch expected v
{-# INLINE withArray #-}
-- | @'withNumber' expected f value@ applies @f@ to the 'Number' when @value@
-- is a 'Number' and fails using @'typeMismatch' expected@ otherwise.
withNumber :: String -> (Number -> Parser a) -> Value -> Parser a
withNumber expected f = withScientific expected (f . scientificToNumber)
{-# INLINE withNumber #-}
{-# DEPRECATED withNumber "Use withScientific instead" #-}
-- | @'withScientific' expected f value@ applies @f@ to the 'Scientific' number
-- when @value@ is a 'Number' and fails using @'typeMismatch' expected@
-- otherwise.
withScientific :: String -> (Scientific -> Parser a) -> Value -> Parser a
withScientific _ f (Number scientific) = f scientific
withScientific expected _ v = typeMismatch expected v
{-# INLINE withScientific #-}
-- | @'withBool' expected f value@ applies @f@ to the 'Bool' when @value@ is a
-- 'Bool' and fails using @'typeMismatch' expected@ otherwise.
withBool :: String -> (Bool -> Parser a) -> Value -> Parser a
withBool _ f (Bool arr) = f arr
withBool expected _ v = typeMismatch expected v
{-# INLINE withBool #-}
-- | Convert a value from JSON, failing if the types do not match.
fromJSON :: (FromJSON a) => Value -> Result a
fromJSON = parse parseJSON
{-# INLINE fromJSON #-}
-- | Convert a value from JSON, failing if the types do not match.
ifromJSON :: (FromJSON a) => Value -> IResult a
ifromJSON = iparse parseJSON
{-# INLINE ifromJSON #-}
-- | Retrieve the value associated with the given key of an 'Object'.
-- The result is 'empty' if the key is not present or the value cannot
-- be converted to the desired type.
--
-- This accessor is appropriate if the key and value /must/ be present
-- in an object for it to be valid. If the key and value are
-- optional, use '.:?' instead.
(.:) :: (FromJSON a) => Object -> Text -> Parser a
(.:) = explicitParseField parseJSON
{-# INLINE (.:) #-}
-- | Retrieve the value associated with the given key of an 'Object'. The
-- result is 'Nothing' if the key is not present or if its value is 'Null',
-- or 'empty' if the value cannot be converted to the desired type.
--
-- This accessor is most useful if the key and value can be absent
-- from an object without affecting its validity. If the key and
-- value are mandatory, use '.:' instead.
(.:?) :: (FromJSON a) => Object -> Text -> Parser (Maybe a)
(.:?) = explicitParseFieldMaybe parseJSON
{-# INLINE (.:?) #-}
-- | Retrieve the value associated with the given key of an 'Object'.
-- The result is 'Nothing' if the key is not present or 'empty' if the
-- value cannot be converted to the desired type.
--
-- This differs from '.:?' by attempting to parse 'Null' the same as any
-- other JSON value, instead of interpreting it as 'Nothing'.
(.:!) :: (FromJSON a) => Object -> Text -> Parser (Maybe a)
(.:!) = explicitParseFieldMaybe' parseJSON
{-# INLINE (.:!) #-}
-- | Function variant of '.:'.
parseField :: (FromJSON a) => Object -> Text -> Parser a
parseField = (.:)
{-# INLINE parseField #-}
-- | Function variant of '.:?'.
parseFieldMaybe :: (FromJSON a) => Object -> Text -> Parser (Maybe a)
parseFieldMaybe = (.:?)
{-# INLINE parseFieldMaybe #-}
-- | Function variant of '.:!'.
parseFieldMaybe' :: (FromJSON a) => Object -> Text -> Parser (Maybe a)
parseFieldMaybe' = (.:!)
{-# INLINE parseFieldMaybe' #-}
-- | Variant of '.:' with explicit parser function.
--
-- E.g. @'explicitParseField' 'parseJSON1' :: ('FromJSON1' f, 'FromJSON' a) -> 'Object' -> 'Text' -> 'Parser' (f a)@
explicitParseField :: (Value -> Parser a) -> Object -> Text -> Parser a
explicitParseField p obj key = case H.lookup key obj of
Nothing -> fail $ "key " ++ show key ++ " not present"
Just v -> p v <?> Key key
{-# INLINE explicitParseField #-}
-- | Variant of '.:?' with explicit parser function.
explicitParseFieldMaybe :: (Value -> Parser a) -> Object -> Text -> Parser (Maybe a)
explicitParseFieldMaybe p obj key = case H.lookup key obj of
Nothing -> pure Nothing
Just v -> liftParseJSON p (listParser p) v <?> Key key -- listParser isn't used by maybe instance.
{-# INLINE explicitParseFieldMaybe #-}
-- | Variant of '.:!' with explicit parser function.
explicitParseFieldMaybe' :: (Value -> Parser a) -> Object -> Text -> Parser (Maybe a)
explicitParseFieldMaybe' p obj key = case H.lookup key obj of
Nothing -> pure Nothing
Just v -> Just <$> p v <?> Key key
{-# INLINE explicitParseFieldMaybe' #-}
-- | Helper for use in combination with '.:?' to provide default
-- values for optional JSON object fields.
--
-- This combinator is most useful if the key and value can be absent
-- from an object without affecting its validity and we know a default
-- value to assign in that case. If the key and value are mandatory,
-- use '.:' instead.
--
-- Example usage:
--
-- @ v1 <- o '.:?' \"opt_field_with_dfl\" .!= \"default_val\"
-- v2 <- o '.:' \"mandatory_field\"
-- v3 <- o '.:?' \"opt_field2\"
-- @
(.!=) :: Parser (Maybe a) -> a -> Parser a
pmval .!= val = fromMaybe val <$> pmval
{-# INLINE (.!=) #-}
--------------------------------------------------------------------------------
-- Generic parseJSON
-------------------------------------------------------------------------------
instance {-# OVERLAPPABLE #-} (GFromJSON arity a) => GFromJSON arity (M1 i c a) where
-- Meta-information, which is not handled elsewhere, is just added to the
-- parsed value:
gParseJSON opts fargs = fmap M1 . gParseJSON opts fargs
instance (FromJSON a) => GFromJSON arity (K1 i a) where
-- Constant values are decoded using their FromJSON instance:
gParseJSON _opts _ = fmap K1 . parseJSON
instance GFromJSON One Par1 where
-- Direct occurrences of the last type parameter are decoded with the
-- function passed in as an argument:
gParseJSON _opts (From1Args pj _) = fmap Par1 . pj
instance (FromJSON1 f) => GFromJSON One (Rec1 f) where
-- Recursive occurrences of the last type parameter are decoded using their
-- FromJSON1 instance:
gParseJSON _opts (From1Args pj pjl) = fmap Rec1 . liftParseJSON pj pjl
instance GFromJSON arity U1 where
-- Empty constructors are expected to be encoded as an empty array:
gParseJSON _opts _ v
| isEmptyArray v = pure U1
| otherwise = typeMismatch "unit constructor (U1)" v
instance ( ConsFromJSON arity a
, AllNullary (C1 c a) allNullary
, ParseSum arity (C1 c a) allNullary
) => GFromJSON arity (D1 d (C1 c a)) where
-- The option 'tagSingleConstructors' determines whether to wrap
-- a single-constructor type.
gParseJSON opts fargs
| tagSingleConstructors opts
= fmap M1
. (unTagged :: Tagged allNullary (Parser (C1 c a p)) -> Parser (C1 c a p))
. parseSum opts fargs
| otherwise = fmap M1 . fmap M1 . consParseJSON opts fargs
instance (ConsFromJSON arity a) => GFromJSON arity (C1 c a) where
-- Constructors need to be decoded differently depending on whether they're
-- a record or not. This distinction is made by consParseJSON:
gParseJSON opts fargs = fmap M1 . consParseJSON opts fargs
instance ( FromProduct arity a, FromProduct arity b
, ProductSize a, ProductSize b
) => GFromJSON arity (a :*: b) where
-- Products are expected to be encoded to an array. Here we check whether we
-- got an array of the same size as the product, then parse each of the
-- product's elements using parseProduct:
gParseJSON opts fargs = withArray "product (:*:)" $ \arr ->
let lenArray = V.length arr
lenProduct = (unTagged2 :: Tagged2 (a :*: b) Int -> Int)
productSize in
if lenArray == lenProduct
then parseProduct opts fargs arr 0 lenProduct
else fail $ "When expecting a product of " ++ show lenProduct ++
" values, encountered an Array of " ++ show lenArray ++
" elements instead"
instance ( AllNullary (a :+: b) allNullary
, ParseSum arity (a :+: b) allNullary
) => GFromJSON arity (a :+: b) where
-- If all constructors of a sum datatype are nullary and the
-- 'allNullaryToStringTag' option is set they are expected to be
-- encoded as strings. This distinction is made by 'parseSum':
gParseJSON opts fargs =
(unTagged :: Tagged allNullary (Parser ((a :+: b) d)) ->
Parser ((a :+: b) d))
. parseSum opts fargs
instance (FromJSON1 f, GFromJSON One g) => GFromJSON One (f :.: g) where
-- If an occurrence of the last type parameter is nested inside two
-- composed types, it is decoded by using the outermost type's FromJSON1
-- instance to generically decode the innermost type:
gParseJSON opts fargs =
let gpj = gParseJSON opts fargs in
fmap Comp1 . liftParseJSON gpj (listParser gpj)
--------------------------------------------------------------------------------
class ParseSum arity f allNullary where
parseSum :: Options -> FromArgs arity a
-> Value -> Tagged allNullary (Parser (f a))
instance ( SumFromString f
, FromPair arity f
, FromTaggedObject arity f
, FromUntaggedValue arity f
) => ParseSum arity f True where
parseSum opts fargs
| allNullaryToStringTag opts = Tagged . parseAllNullarySum opts
| otherwise = Tagged . parseNonAllNullarySum opts fargs
instance ( FromPair arity f
, FromTaggedObject arity f
, FromUntaggedValue arity f
) => ParseSum arity f False where
parseSum opts fargs = Tagged . parseNonAllNullarySum opts fargs
--------------------------------------------------------------------------------
parseAllNullarySum :: SumFromString f => Options -> Value -> Parser (f a)
parseAllNullarySum opts = withText "Text" $ \key ->
maybe (notFound key) return $
parseSumFromString opts key
class SumFromString f where
parseSumFromString :: Options -> Text -> Maybe (f a)
instance (SumFromString a, SumFromString b) => SumFromString (a :+: b) where
parseSumFromString opts key = (L1 <$> parseSumFromString opts key) <|>
(R1 <$> parseSumFromString opts key)
instance (Constructor c) => SumFromString (C1 c U1) where
parseSumFromString opts key | key == name = Just $ M1 U1
| otherwise = Nothing
where
name = pack $ constructorTagModifier opts $
conName (undefined :: t c U1 p)
--------------------------------------------------------------------------------
parseNonAllNullarySum :: ( FromPair arity f
, FromTaggedObject arity f
, FromUntaggedValue arity f
) => Options -> FromArgs arity c
-> Value -> Parser (f c)
parseNonAllNullarySum opts fargs =
case sumEncoding opts of
TaggedObject{..} ->
withObject "Object" $ \obj -> do
tag <- obj .: pack tagFieldName
fromMaybe (notFound tag) $
parseFromTaggedObject opts fargs contentsFieldName obj tag
ObjectWithSingleField ->
withObject "Object" $ \obj ->
case H.toList obj of
[pair@(tag, _)] -> fromMaybe (notFound tag) $
parsePair opts fargs pair
_ -> fail "Object doesn't have a single field"
TwoElemArray ->
withArray "Array" $ \arr ->
if V.length arr == 2
then case V.unsafeIndex arr 0 of
String tag -> fromMaybe (notFound tag) $
parsePair opts fargs (tag, V.unsafeIndex arr 1)
_ -> fail "First element is not a String"
else fail "Array doesn't have 2 elements"
UntaggedValue -> parseUntaggedValue opts fargs
--------------------------------------------------------------------------------
class FromTaggedObject arity f where
parseFromTaggedObject :: Options -> FromArgs arity a
-> String -> Object
-> Text -> Maybe (Parser (f a))
instance ( FromTaggedObject arity a, FromTaggedObject arity b) =>
FromTaggedObject arity (a :+: b) where
parseFromTaggedObject opts fargs contentsFieldName obj tag =
(fmap L1 <$> parseFromTaggedObject opts fargs contentsFieldName obj tag) <|>
(fmap R1 <$> parseFromTaggedObject opts fargs contentsFieldName obj tag)
instance ( FromTaggedObject' arity f
, Constructor c
) => FromTaggedObject arity (C1 c f) where
parseFromTaggedObject opts fargs contentsFieldName obj tag
| tag == name = Just $ M1 <$> parseFromTaggedObject'
opts fargs contentsFieldName obj
| otherwise = Nothing
where
name = pack $ constructorTagModifier opts $
conName (undefined :: t c f p)
--------------------------------------------------------------------------------
class FromTaggedObject' arity f where
parseFromTaggedObject' :: Options -> FromArgs arity a -> String
-> Object -> Parser (f a)
class FromTaggedObject'' arity f isRecord where
parseFromTaggedObject'' :: Options -> FromArgs arity a -> String
-> Object -> Tagged isRecord (Parser (f a))
instance ( IsRecord f isRecord
, FromTaggedObject'' arity f isRecord
) => FromTaggedObject' arity f where
parseFromTaggedObject' opts fargs contentsFieldName =
(unTagged :: Tagged isRecord (Parser (f a)) -> Parser (f a)) .
parseFromTaggedObject'' opts fargs contentsFieldName
instance (FromRecord arity f) => FromTaggedObject'' arity f True where
parseFromTaggedObject'' opts fargs _ =
Tagged . parseRecord opts fargs Nothing
instance (GFromJSON arity f) => FromTaggedObject'' arity f False where
parseFromTaggedObject'' opts fargs contentsFieldName = Tagged .
(gParseJSON opts fargs <=< (.: pack contentsFieldName))
instance {-# OVERLAPPING #-} FromTaggedObject'' arity U1 False where
parseFromTaggedObject'' _ _ _ _ = Tagged (pure U1)
--------------------------------------------------------------------------------
class ConsFromJSON arity f where
consParseJSON :: Options -> FromArgs arity a
-> Value -> Parser (f a)
class ConsFromJSON' arity f isRecord where
consParseJSON' :: Options -> FromArgs arity a
-> Maybe Text -- ^ A dummy label
-- (Nothing to use proper label)
-> Value -> Tagged isRecord (Parser (f a))
instance ( IsRecord f isRecord
, ConsFromJSON' arity f isRecord
) => ConsFromJSON arity f where
consParseJSON opts fargs v = let
(v2,lab) = case (unwrapUnaryRecords opts,isUnary (undefined :: f a)) of
-- use a dummy object with a dummy label
(True,True) -> (object [(pack "dummy",v)], Just $ pack "dummy")
_ ->(v,Nothing)
in (unTagged :: Tagged isRecord (Parser (f a)) -> Parser (f a))
$ consParseJSON' opts fargs lab v2
instance (FromRecord arity f) => ConsFromJSON' arity f True where
consParseJSON' opts fargs mlab = Tagged . withObject "record (:*:)"
(parseRecord opts fargs mlab)
instance (GFromJSON arity f) => ConsFromJSON' arity f False where
consParseJSON' opts fargs _ = Tagged . gParseJSON opts fargs
--------------------------------------------------------------------------------
class FromRecord arity f where
parseRecord :: Options -> FromArgs arity a
-> Maybe Text -- ^ A dummy label
-- (Nothing to use proper label)
-> Object -> Parser (f a)
instance ( FromRecord arity a
, FromRecord arity b
) => FromRecord arity (a :*: b) where
parseRecord opts fargs _ obj =
(:*:) <$> parseRecord opts fargs Nothing obj
<*> parseRecord opts fargs Nothing obj
instance {-# OVERLAPPABLE #-} (Selector s, GFromJSON arity a) =>
FromRecord arity (S1 s a) where
parseRecord opts fargs lab =
(<?> Key label) . gParseJSON opts fargs <=< (.: label)
where
label = fromMaybe defLabel lab
defLabel = pack . fieldLabelModifier opts $
selName (undefined :: t s a p)
instance {-# INCOHERENT #-} (Selector s, FromJSON a) =>
FromRecord arity (S1 s (K1 i (Maybe a))) where
parseRecord _ _ (Just lab) obj = (M1 . K1) <$> obj .:? lab
parseRecord opts _ Nothing obj = (M1 . K1) <$> obj .:? pack label
where
label = fieldLabelModifier opts $
selName (undefined :: t s (K1 i (Maybe a)) p)
--------------------------------------------------------------------------------
class FromProduct arity f where
parseProduct :: Options -> FromArgs arity a
-> Array -> Int -> Int
-> Parser (f a)
instance ( FromProduct arity a
, FromProduct arity b
) => FromProduct arity (a :*: b) where
parseProduct opts fargs arr ix len =
(:*:) <$> parseProduct opts fargs arr ix lenL
<*> parseProduct opts fargs arr ixR lenR
where
lenL = len `unsafeShiftR` 1
ixR = ix + lenL
lenR = len - lenL
instance (GFromJSON arity a) => FromProduct arity (S1 s a) where
parseProduct opts fargs arr ix _ =
gParseJSON opts fargs $ V.unsafeIndex arr ix
--------------------------------------------------------------------------------
class FromPair arity f where
parsePair :: Options -> FromArgs arity a
-> Pair -> Maybe (Parser (f a))
instance ( FromPair arity a
, FromPair arity b
) => FromPair arity (a :+: b) where
parsePair opts fargs pair = (fmap L1 <$> parsePair opts fargs pair) <|>
(fmap R1 <$> parsePair opts fargs pair)
instance ( Constructor c
, GFromJSON arity a
, ConsFromJSON arity a
) => FromPair arity (C1 c a) where
parsePair opts fargs (tag, value)
| tag == tag' = Just $ gParseJSON opts fargs value
| otherwise = Nothing
where
tag' = pack $ constructorTagModifier opts $
conName (undefined :: t c a p)
--------------------------------------------------------------------------------
class FromUntaggedValue arity f where
parseUntaggedValue :: Options -> FromArgs arity a
-> Value -> Parser (f a)
instance
( FromUntaggedValue arity a
, FromUntaggedValue arity b
) => FromUntaggedValue arity (a :+: b)
where
parseUntaggedValue opts fargs value =
L1 <$> parseUntaggedValue opts fargs value <|>
R1 <$> parseUntaggedValue opts fargs value
instance {-# OVERLAPPABLE #-}
( GFromJSON arity a
, ConsFromJSON arity a
) => FromUntaggedValue arity (C1 c a)
where
parseUntaggedValue = gParseJSON
instance {-# OVERLAPPING #-}
( Constructor c )
=> FromUntaggedValue arity (C1 c U1)
where
parseUntaggedValue opts _ (String s)
| s == pack (constructorTagModifier opts (conName (undefined :: t c U1 p))) =
pure $ M1 U1
| otherwise =
fail $ "Invalid tag: " ++ unpack s
parseUntaggedValue _ _ v = typeMismatch (conName (undefined :: t c U1 p)) v
--------------------------------------------------------------------------------
notFound :: Text -> Parser a
notFound key = fail $ "The key \"" ++ unpack key ++ "\" was not found"
{-# INLINE notFound #-}
-------------------------------------------------------------------------------
-- Instances
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-- base
-------------------------------------------------------------------------------
instance FromJSON2 Const where
liftParseJSON2 p _ _ _ = fmap Const . p
{-# INLINE liftParseJSON2 #-}
instance FromJSON a => FromJSON1 (Const a) where
liftParseJSON _ _ = fmap Const . parseJSON
{-# INLINE liftParseJSON #-}
instance FromJSON a => FromJSON (Const a b) where
{-# INLINE parseJSON #-}
parseJSON = fmap Const . parseJSON
instance FromJSON1 Maybe where
liftParseJSON _ _ Null = pure Nothing
liftParseJSON p _ a = Just <$> p a
{-# INLINE liftParseJSON #-}
instance (FromJSON a) => FromJSON (Maybe a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
instance FromJSON2 Either where
liftParseJSON2 pA _ pB _ (Object (H.toList -> [(key, value)]))
| key == left = Left <$> pA value <?> Key left
| key == right = Right <$> pB value <?> Key right
where
left, right :: Text
left = "Left"
right = "Right"
liftParseJSON2 _ _ _ _ _ = fail $
"expected an object with a single property " ++
"where the property key should be either " ++
"\"Left\" or \"Right\""
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a) => FromJSON1 (Either a) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b) => FromJSON (Either a b) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance FromJSON Bool where
parseJSON = withBool "Bool" pure
{-# INLINE parseJSON #-}
instance FromJSONKey Bool where
fromJSONKey = FromJSONKeyTextParser $ \t -> case t of
"true" -> pure True
"false" -> pure False
_ -> fail $ "Cannot parse key into Bool: " ++ T.unpack t
instance FromJSON Ordering where
parseJSON = withText "Ordering" $ \s ->
case s of
"LT" -> return LT
"EQ" -> return EQ
"GT" -> return GT
_ -> fail "Parsing Ordering value failed: expected \"LT\", \"EQ\", or \"GT\""
instance FromJSON () where
parseJSON = withArray "()" $ \v ->
if V.null v
then pure ()
else fail "Expected an empty array"
{-# INLINE parseJSON #-}
instance FromJSON Char where
parseJSON = withText "Char" $ \t ->
if T.compareLength t 1 == EQ
then pure $ T.head t
else fail "Expected a string of length 1"
{-# INLINE parseJSON #-}
parseJSONList = withText "String" $ pure . T.unpack
{-# INLINE parseJSONList #-}
instance FromJSON Double where
parseJSON = parseRealFloat "Double"
{-# INLINE parseJSON #-}
instance FromJSONKey Double where
fromJSONKey = FromJSONKeyTextParser $ \t -> case t of
"NaN" -> pure (0/0)
"Infinity" -> pure (1/0)
"-Infinity" -> pure (negate 1/0)
_ -> Scientific.toRealFloat <$> parseScientificText t
instance FromJSON Number where
parseJSON (Number s) = pure $ scientificToNumber s
parseJSON Null = pure (D (0/0))
parseJSON v = typeMismatch "Number" v
{-# INLINE parseJSON #-}
instance FromJSON Float where
parseJSON = parseRealFloat "Float"
{-# INLINE parseJSON #-}
instance FromJSONKey Float where
fromJSONKey = FromJSONKeyTextParser $ \t -> case t of
"NaN" -> pure (0/0)
"Infinity" -> pure (1/0)
"-Infinity" -> pure (negate 1/0)
_ -> Scientific.toRealFloat <$> parseScientificText t
instance (FromJSON a, Integral a) => FromJSON (Ratio a) where
parseJSON = withObject "Rational" $ \obj ->
(%) <$> obj .: "numerator"
<*> obj .: "denominator"
{-# INLINE parseJSON #-}
-- | /WARNING:/ Only parse fixed-precision numbers from trusted input
-- since an attacker could easily fill up the memory of the target
-- system by specifying a scientific number with a big exponent like
-- @1e1000000000@.
instance HasResolution a => FromJSON (Fixed a) where
parseJSON = withScientific "Fixed" $ pure . realToFrac
{-# INLINE parseJSON #-}
instance FromJSON Int where
parseJSON = parseBoundedIntegral "Int"
{-# INLINE parseJSON #-}
instance FromJSONKey Int where
fromJSONKey = FromJSONKeyTextParser $ parseBoundedIntegralText "Int"
-- | /WARNING:/ Only parse Integers from trusted input since an
-- attacker could easily fill up the memory of the target system by
-- specifying a scientific number with a big exponent like
-- @1e1000000000@.
instance FromJSON Integer where
parseJSON = parseIntegral "Integer"
{-# INLINE parseJSON #-}
instance FromJSONKey Integer where
fromJSONKey = FromJSONKeyTextParser $ parseIntegralText "Integer"
instance FromJSON Natural where
parseJSON = withScientific "Natural" $ \s ->
if Scientific.coefficient s < 0
then fail $ "Expected a Natural number but got the negative number: " <> show s
else pure $ truncate s
instance FromJSONKey Natural where
fromJSONKey = FromJSONKeyTextParser $ \t -> parseScientificText t >>= \s ->
if Scientific.coefficient s < 0
then fail $ "Expected a Natural number but got the negative number: " <> show s
else pure $ truncate s
instance FromJSON Int8 where
parseJSON = parseBoundedIntegral "Int8"
{-# INLINE parseJSON #-}
instance FromJSONKey Int8 where
fromJSONKey = FromJSONKeyTextParser $ parseBoundedIntegralText "Int8"
instance FromJSON Int16 where
parseJSON = parseBoundedIntegral "Int16"
{-# INLINE parseJSON #-}
instance FromJSONKey Int16 where
fromJSONKey = FromJSONKeyTextParser $ parseBoundedIntegralText "Int16"
instance FromJSON Int32 where
parseJSON = parseBoundedIntegral "Int32"
{-# INLINE parseJSON #-}
instance FromJSONKey Int32 where
fromJSONKey = FromJSONKeyTextParser $ parseBoundedIntegralText "Int32"
instance FromJSON Int64 where
parseJSON = parseBoundedIntegral "Int64"
{-# INLINE parseJSON #-}
instance FromJSONKey Int64 where
fromJSONKey = FromJSONKeyTextParser $ parseBoundedIntegralText "Int64"
instance FromJSON Word where
parseJSON = parseBoundedIntegral "Word"
{-# INLINE parseJSON #-}
instance FromJSONKey Word where
fromJSONKey = FromJSONKeyTextParser $ parseBoundedIntegralText "Word"
instance FromJSON Word8 where
parseJSON = parseBoundedIntegral "Word8"
{-# INLINE parseJSON #-}
instance FromJSONKey Word8 where
fromJSONKey = FromJSONKeyTextParser $ parseBoundedIntegralText "Word8"
instance FromJSON Word16 where
parseJSON = parseBoundedIntegral "Word16"
{-# INLINE parseJSON #-}
instance FromJSONKey Word16 where
fromJSONKey = FromJSONKeyTextParser $ parseBoundedIntegralText "Word16"
instance FromJSON Word32 where
parseJSON = parseBoundedIntegral "Word32"
{-# INLINE parseJSON #-}
instance FromJSONKey Word32 where
fromJSONKey = FromJSONKeyTextParser $ parseBoundedIntegralText "Word32"
instance FromJSON Word64 where
parseJSON = parseBoundedIntegral "Word64"
{-# INLINE parseJSON #-}
instance FromJSONKey Word64 where
fromJSONKey = FromJSONKeyTextParser $ parseBoundedIntegralText "Word64"
instance FromJSON Text where
parseJSON = withText "Text" pure
{-# INLINE parseJSON #-}
instance FromJSONKey Text where
fromJSONKey = fromJSONKeyCoerce
instance FromJSON LT.Text where
parseJSON = withText "Lazy Text" $ pure . LT.fromStrict
{-# INLINE parseJSON #-}
instance FromJSONKey LT.Text where
fromJSONKey = FromJSONKeyText LT.fromStrict
instance FromJSON Version where
parseJSON = withText "Version" parseVersionText
{-# INLINE parseJSON #-}
instance FromJSONKey Version where
fromJSONKey = FromJSONKeyTextParser parseVersionText
parseVersionText :: Text -> Parser Version
parseVersionText = go . readP_to_S parseVersion . unpack
where
go [(v,[])] = return v
go (_ : xs) = go xs
go _ = fail "could not parse Version"
-------------------------------------------------------------------------------
-- semigroups NonEmpty
-------------------------------------------------------------------------------
instance FromJSON1 NonEmpty where
liftParseJSON p _ = withArray "NonEmpty a" $
(>>= ne) . Tr.sequence . zipWith (parseIndexedJSON p) [0..] . V.toList
where
ne [] = fail "Expected a NonEmpty but got an empty list"
ne (x:xs) = pure (x :| xs)
{-# INLINE liftParseJSON #-}
instance (FromJSON a) => FromJSON (NonEmpty a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
-------------------------------------------------------------------------------
-- scientific
-------------------------------------------------------------------------------
instance FromJSON Scientific where
parseJSON = withScientific "Scientific" pure
{-# INLINE parseJSON #-}
-------------------------------------------------------------------------------
-- DList
-------------------------------------------------------------------------------
instance FromJSON1 DList.DList where
liftParseJSON p _ = withArray "DList a" $
fmap DList.fromList .
Tr.sequence . zipWith (parseIndexedJSON p) [0..] . V.toList
{-# INLINE liftParseJSON #-}
instance (FromJSON a) => FromJSON (DList.DList a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
-------------------------------------------------------------------------------
-- tranformers - Functors
-------------------------------------------------------------------------------
instance FromJSON1 Identity where
liftParseJSON p _ a = Identity <$> p a
{-# INLINE liftParseJSON #-}
liftParseJSONList _ p a = fmap Identity <$> p a
{-# INLINE liftParseJSONList #-}
instance (FromJSON a) => FromJSON (Identity a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
parseJSONList = liftParseJSONList parseJSON parseJSONList
{-# INLINE parseJSONList #-}
instance (FromJSONKey a) => FromJSONKey (Identity a) where
fromJSONKey = coerceFromJSONKeyFunction (fromJSONKey :: FromJSONKeyFunction a)
fromJSONKeyList = coerceFromJSONKeyFunction (fromJSONKeyList :: FromJSONKeyFunction [a])
instance (FromJSON1 f, FromJSON1 g) => FromJSON1 (Compose f g) where
liftParseJSON p pl a = Compose <$> liftParseJSON g gl a
where
g = liftParseJSON p pl
gl = liftParseJSONList p pl
{-# INLINE liftParseJSON #-}
liftParseJSONList p pl a = map Compose <$> liftParseJSONList g gl a
where
g = liftParseJSON p pl
gl = liftParseJSONList p pl
{-# INLINE liftParseJSONList #-}
instance (FromJSON1 f, FromJSON1 g, FromJSON a) => FromJSON (Compose f g a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
parseJSONList = liftParseJSONList parseJSON parseJSONList
{-# INLINE parseJSONList #-}
instance (FromJSON1 f, FromJSON1 g) => FromJSON1 (Product f g) where
liftParseJSON p pl a = uncurry Pair <$> liftParseJSON2 px pxl py pyl a
where
px = liftParseJSON p pl
pxl = liftParseJSONList p pl
py = liftParseJSON p pl
pyl = liftParseJSONList p pl
{-# INLINE liftParseJSON #-}
instance (FromJSON1 f, FromJSON1 g, FromJSON a) => FromJSON (Product f g a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
instance (FromJSON1 f, FromJSON1 g) => FromJSON1 (Sum f g) where
liftParseJSON p pl (Object (H.toList -> [(key, value)]))
| key == inl = InL <$> liftParseJSON p pl value <?> Key inl
| key == inr = InR <$> liftParseJSON p pl value <?> Key inl
where
inl, inr :: Text
inl = "InL"
inr = "InR"
liftParseJSON _ _ _ = fail $
"expected an object with a single property " ++
"where the property key should be either " ++
"\"InL\" or \"InR\""
{-# INLINE liftParseJSON #-}
instance (FromJSON1 f, FromJSON1 g, FromJSON a) => FromJSON (Sum f g a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
-------------------------------------------------------------------------------
-- containers
-------------------------------------------------------------------------------
instance FromJSON1 Seq.Seq where
liftParseJSON p _ = withArray "Seq a" $
fmap Seq.fromList .
Tr.sequence . zipWith (parseIndexedJSON p) [0..] . V.toList
{-# INLINE liftParseJSON #-}
instance (FromJSON a) => FromJSON (Seq.Seq a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
instance (Ord a, FromJSON a) => FromJSON (Set.Set a) where
parseJSON = fmap Set.fromList . parseJSON
{-# INLINE parseJSON #-}
instance FromJSON IntSet.IntSet where
parseJSON = fmap IntSet.fromList . parseJSON
{-# INLINE parseJSON #-}
instance FromJSON1 IntMap.IntMap where
liftParseJSON p pl = fmap IntMap.fromList . liftParseJSON p' pl'
where
p' = liftParseJSON2 parseJSON parseJSONList p pl
pl' = liftParseJSONList2 parseJSON parseJSONList p pl
{-# INLINE liftParseJSON #-}
instance FromJSON a => FromJSON (IntMap.IntMap a) where
parseJSON = fmap IntMap.fromList . parseJSON
{-# INLINE parseJSON #-}
instance (FromJSONKey k, Ord k) => FromJSON1 (M.Map k) where
liftParseJSON p _ = case fromJSONKey of
FromJSONKeyCoerce _-> withObject "Map k v" $
fmap (H.foldrWithKey (M.insert . unsafeCoerce) M.empty) . H.traverseWithKey (\k v -> p v <?> Key k)
FromJSONKeyText f -> withObject "Map k v" $
fmap (H.foldrWithKey (M.insert . f) M.empty) . H.traverseWithKey (\k v -> p v <?> Key k)
FromJSONKeyTextParser f -> withObject "Map k v" $
H.foldrWithKey (\k v m -> M.insert <$> f k <?> Key k <*> p v <?> Key k <*> m) (pure M.empty)
FromJSONKeyValue f -> withArray "Map k v" $ \arr ->
M.fromList <$> (Tr.sequence .
zipWith (parseIndexedJSONPair f p) [0..] . V.toList $ arr)
{-# INLINE liftParseJSON #-}
instance (FromJSONKey k, Ord k, FromJSON v) => FromJSON (M.Map k v) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
instance FromJSON1 Tree.Tree where
liftParseJSON p pl = go
where
go v = uncurry Tree.Node <$> liftParseJSON2 p pl p' pl' v
p' = liftParseJSON go (listParser go)
pl'= liftParseJSONList go (listParser go)
instance (FromJSON v) => FromJSON (Tree.Tree v) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
-------------------------------------------------------------------------------
-- uuid
-------------------------------------------------------------------------------
instance FromJSON UUID.UUID where
parseJSON = withText "UUID" $
maybe (fail "Invalid UUID") pure . UUID.fromText
instance FromJSONKey UUID.UUID where
fromJSONKey = FromJSONKeyTextParser $
maybe (fail "Invalid UUID") pure . UUID.fromText
-------------------------------------------------------------------------------
-- vector
-------------------------------------------------------------------------------
instance FromJSON1 Vector where
liftParseJSON p _ = withArray "Vector a" $
V.mapM (uncurry $ parseIndexedJSON p) . V.indexed
{-# INLINE liftParseJSON #-}
instance (FromJSON a) => FromJSON (Vector a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
vectorParseJSON :: (FromJSON a, VG.Vector w a) => String -> Value -> Parser (w a)
vectorParseJSON s = withArray s $ fmap V.convert . V.mapM (uncurry $ parseIndexedJSON parseJSON) . V.indexed
{-# INLINE vectorParseJSON #-}
instance (Storable a, FromJSON a) => FromJSON (VS.Vector a) where
parseJSON = vectorParseJSON "Data.Vector.Storable.Vector a"
instance (VP.Prim a, FromJSON a) => FromJSON (VP.Vector a) where
parseJSON = vectorParseJSON "Data.Vector.Primitive.Vector a"
{-# INLINE parseJSON #-}
instance (VG.Vector VU.Vector a, FromJSON a) => FromJSON (VU.Vector a) where
parseJSON = vectorParseJSON "Data.Vector.Unboxed.Vector a"
{-# INLINE parseJSON #-}
-------------------------------------------------------------------------------
-- unordered-containers
-------------------------------------------------------------------------------
instance (Eq a, Hashable a, FromJSON a) => FromJSON (HashSet.HashSet a) where
parseJSON = fmap HashSet.fromList . parseJSON
{-# INLINE parseJSON #-}
instance (FromJSONKey k, Eq k, Hashable k) => FromJSON1 (H.HashMap k) where
liftParseJSON p _ = case fromJSONKey of
FromJSONKeyCoerce _ -> withObject "HashMap ~Text v" $
uc . H.traverseWithKey (\k v -> p v <?> Key k)
FromJSONKeyText f -> withObject "HashMap k v" $
fmap (mapKey f) . H.traverseWithKey (\k v -> p v <?> Key k)
FromJSONKeyTextParser f -> withObject "HashMap k v" $
H.foldrWithKey (\k v m -> H.insert <$> f k <?> Key k <*> p v <?> Key k <*> m) (pure H.empty)
FromJSONKeyValue f -> withArray "Map k v" $ \arr ->
H.fromList <$> (Tr.sequence .
zipWith (parseIndexedJSONPair f p) [0..] . V.toList $ arr)
where
uc :: Parser (H.HashMap Text v) -> Parser (H.HashMap k v)
uc = unsafeCoerce
instance (FromJSON v, FromJSONKey k, Eq k, Hashable k) => FromJSON (H.HashMap k v) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
-------------------------------------------------------------------------------
-- aeson
-------------------------------------------------------------------------------
instance FromJSON Value where
parseJSON = pure
{-# INLINE parseJSON #-}
instance FromJSON DotNetTime where
parseJSON = withText "DotNetTime" $ \t ->
let (s,m) = T.splitAt (T.length t - 5) t
t' = T.concat [s,".",m]
in case parseTime defaultTimeLocale "/Date(%s%Q)/" (unpack t') of
Just d -> pure (DotNetTime d)
_ -> fail "could not parse .NET time"
{-# INLINE parseJSON #-}
-------------------------------------------------------------------------------
-- time
-------------------------------------------------------------------------------
instance FromJSON Day where
parseJSON = withText "Day" (Time.run Time.day)
instance FromJSONKey Day where
fromJSONKey = FromJSONKeyTextParser (Time.run Time.day)
instance FromJSON TimeOfDay where
parseJSON = withText "TimeOfDay" (Time.run Time.timeOfDay)
instance FromJSONKey TimeOfDay where
fromJSONKey = FromJSONKeyTextParser (Time.run Time.timeOfDay)
instance FromJSON LocalTime where
parseJSON = withText "LocalTime" (Time.run Time.localTime)
instance FromJSONKey LocalTime where
fromJSONKey = FromJSONKeyTextParser (Time.run Time.localTime)
-- | Supported string formats:
--
-- @YYYY-MM-DD HH:MM Z@
-- @YYYY-MM-DD HH:MM:SS Z@
-- @YYYY-MM-DD HH:MM:SS.SSS Z@
--
-- The first space may instead be a @T@, and the second space is
-- optional. The @Z@ represents UTC. The @Z@ may be replaced with a
-- time zone offset of the form @+0000@ or @-08:00@, where the first
-- two digits are hours, the @:@ is optional and the second two digits
-- (also optional) are minutes.
instance FromJSON ZonedTime where
parseJSON = withText "ZonedTime" (Time.run Time.zonedTime)
instance FromJSONKey ZonedTime where
fromJSONKey = FromJSONKeyTextParser (Time.run Time.zonedTime)
instance FromJSON UTCTime where
parseJSON = withText "UTCTime" (Time.run Time.utcTime)
instance FromJSONKey UTCTime where
fromJSONKey = FromJSONKeyTextParser (Time.run Time.utcTime)
-- | /WARNING:/ Only parse lengths of time from trusted input
-- since an attacker could easily fill up the memory of the target
-- system by specifying a scientific number with a big exponent like
-- @1e1000000000@.
instance FromJSON NominalDiffTime where
parseJSON = withScientific "NominalDiffTime" $ pure . realToFrac
{-# INLINE parseJSON #-}
-------------------------------------------------------------------------------
-- base Monoid/Semigroup
-------------------------------------------------------------------------------
instance FromJSON1 Monoid.Dual where
liftParseJSON p _ = fmap Monoid.Dual . p
{-# INLINE liftParseJSON #-}
instance FromJSON a => FromJSON (Monoid.Dual a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
instance FromJSON1 Monoid.First where
liftParseJSON p p' = fmap Monoid.First . liftParseJSON p p'
{-# INLINE liftParseJSON #-}
instance FromJSON a => FromJSON (Monoid.First a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
instance FromJSON1 Monoid.Last where
liftParseJSON p p' = fmap Monoid.Last . liftParseJSON p p'
{-# INLINE liftParseJSON #-}
instance FromJSON a => FromJSON (Monoid.Last a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
instance FromJSON1 Semigroup.Min where
liftParseJSON p _ a = Semigroup.Min <$> p a
{-# INLINE liftParseJSON #-}
liftParseJSONList _ p a = fmap Semigroup.Min <$> p a
{-# INLINE liftParseJSONList #-}
instance (FromJSON a) => FromJSON (Semigroup.Min a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
parseJSONList = liftParseJSONList parseJSON parseJSONList
{-# INLINE parseJSONList #-}
instance FromJSON1 Semigroup.Max where
liftParseJSON p _ a = Semigroup.Max <$> p a
{-# INLINE liftParseJSON #-}
liftParseJSONList _ p a = fmap Semigroup.Max <$> p a
{-# INLINE liftParseJSONList #-}
instance (FromJSON a) => FromJSON (Semigroup.Max a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
parseJSONList = liftParseJSONList parseJSON parseJSONList
{-# INLINE parseJSONList #-}
instance FromJSON1 Semigroup.First where
liftParseJSON p _ a = Semigroup.First <$> p a
{-# INLINE liftParseJSON #-}
liftParseJSONList _ p a = fmap Semigroup.First <$> p a
{-# INLINE liftParseJSONList #-}
instance (FromJSON a) => FromJSON (Semigroup.First a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
parseJSONList = liftParseJSONList parseJSON parseJSONList
{-# INLINE parseJSONList #-}
instance FromJSON1 Semigroup.Last where
liftParseJSON p _ a = Semigroup.Last <$> p a
{-# INLINE liftParseJSON #-}
liftParseJSONList _ p a = fmap Semigroup.Last <$> p a
{-# INLINE liftParseJSONList #-}
instance (FromJSON a) => FromJSON (Semigroup.Last a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
parseJSONList = liftParseJSONList parseJSON parseJSONList
{-# INLINE parseJSONList #-}
instance FromJSON1 Semigroup.WrappedMonoid where
liftParseJSON p _ a = Semigroup.WrapMonoid <$> p a
{-# INLINE liftParseJSON #-}
liftParseJSONList _ p a = fmap Semigroup.WrapMonoid <$> p a
{-# INLINE liftParseJSONList #-}
instance (FromJSON a) => FromJSON (Semigroup.WrappedMonoid a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
parseJSONList = liftParseJSONList parseJSON parseJSONList
{-# INLINE parseJSONList #-}
instance FromJSON1 Semigroup.Option where
liftParseJSON p p' = fmap Semigroup.Option . liftParseJSON p p'
{-# INLINE liftParseJSON #-}
instance FromJSON a => FromJSON (Semigroup.Option a) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
-------------------------------------------------------------------------------
-- tagged
-------------------------------------------------------------------------------
instance FromJSON1 Proxy where
{-# INLINE liftParseJSON #-}
liftParseJSON _ _ Null = pure Proxy
liftParseJSON _ _ v = typeMismatch "Proxy" v
instance FromJSON (Proxy a) where
{-# INLINE parseJSON #-}
parseJSON Null = pure Proxy
parseJSON v = typeMismatch "Proxy" v
instance FromJSON2 Tagged where
liftParseJSON2 _ _ p _ = fmap Tagged . p
{-# INLINE liftParseJSON2 #-}
instance FromJSON1 (Tagged a) where
liftParseJSON p _ = fmap Tagged . p
{-# INLINE liftParseJSON #-}
instance FromJSON b => FromJSON (Tagged a b) where
parseJSON = parseJSON1
{-# INLINE parseJSON #-}
instance FromJSONKey b => FromJSONKey (Tagged a b) where
fromJSONKey = coerceFromJSONKeyFunction (fromJSONKey :: FromJSONKeyFunction b)
fromJSONKeyList = (fmap . fmap) Tagged fromJSONKeyList
-------------------------------------------------------------------------------
-- Instances for converting from map keys
-------------------------------------------------------------------------------
instance (FromJSON a, FromJSON b) => FromJSONKey (a,b)
instance (FromJSON a, FromJSON b, FromJSON c) => FromJSONKey (a,b,c)
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d) => FromJSONKey (a,b,c,d)
instance FromJSONKey Char where
fromJSONKey = FromJSONKeyTextParser $ \t ->
if T.length t == 1
then return (T.index t 0)
else typeMismatch "Expected Char but String didn't contain exactly one character" (String t)
fromJSONKeyList = FromJSONKeyText T.unpack
instance (FromJSONKey a, FromJSON a) => FromJSONKey [a] where
fromJSONKey = fromJSONKeyList
-------------------------------------------------------------------------------
-- Tuple instances, see tuple-instances-from.hs
-------------------------------------------------------------------------------
instance FromJSON2 (,) where
liftParseJSON2 pA _ pB _ = withArray "(a, b)" $ \t ->
let n = V.length t
in if n == 2
then (,)
<$> parseJSONElemAtIndex pA 0 t
<*> parseJSONElemAtIndex pB 1 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 2"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a) => FromJSON1 ((,) a) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b) => FromJSON (a, b) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a) => FromJSON2 ((,,) a) where
liftParseJSON2 pB _ pC _ = withArray "(a, b, c)" $ \t ->
let n = V.length t
in if n == 3
then (,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex pB 1 t
<*> parseJSONElemAtIndex pC 2 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 3"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b) => FromJSON1 ((,,) a b) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c) => FromJSON (a, b, c) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a, FromJSON b) => FromJSON2 ((,,,) a b) where
liftParseJSON2 pC _ pD _ = withArray "(a, b, c, d)" $ \t ->
let n = V.length t
in if n == 4
then (,,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex parseJSON 1 t
<*> parseJSONElemAtIndex pC 2 t
<*> parseJSONElemAtIndex pD 3 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 4"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b, FromJSON c) => FromJSON1 ((,,,) a b c) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d) => FromJSON (a, b, c, d) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c) => FromJSON2 ((,,,,) a b c) where
liftParseJSON2 pD _ pE _ = withArray "(a, b, c, d, e)" $ \t ->
let n = V.length t
in if n == 5
then (,,,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex parseJSON 1 t
<*> parseJSONElemAtIndex parseJSON 2 t
<*> parseJSONElemAtIndex pD 3 t
<*> parseJSONElemAtIndex pE 4 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 5"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d) => FromJSON1 ((,,,,) a b c d) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e) => FromJSON (a, b, c, d, e) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d) => FromJSON2 ((,,,,,) a b c d) where
liftParseJSON2 pE _ pF _ = withArray "(a, b, c, d, e, f)" $ \t ->
let n = V.length t
in if n == 6
then (,,,,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex parseJSON 1 t
<*> parseJSONElemAtIndex parseJSON 2 t
<*> parseJSONElemAtIndex parseJSON 3 t
<*> parseJSONElemAtIndex pE 4 t
<*> parseJSONElemAtIndex pF 5 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 6"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e) => FromJSON1 ((,,,,,) a b c d e) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f) => FromJSON (a, b, c, d, e, f) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e) => FromJSON2 ((,,,,,,) a b c d e) where
liftParseJSON2 pF _ pG _ = withArray "(a, b, c, d, e, f, g)" $ \t ->
let n = V.length t
in if n == 7
then (,,,,,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex parseJSON 1 t
<*> parseJSONElemAtIndex parseJSON 2 t
<*> parseJSONElemAtIndex parseJSON 3 t
<*> parseJSONElemAtIndex parseJSON 4 t
<*> parseJSONElemAtIndex pF 5 t
<*> parseJSONElemAtIndex pG 6 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 7"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f) => FromJSON1 ((,,,,,,) a b c d e f) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g) => FromJSON (a, b, c, d, e, f, g) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f) => FromJSON2 ((,,,,,,,) a b c d e f) where
liftParseJSON2 pG _ pH _ = withArray "(a, b, c, d, e, f, g, h)" $ \t ->
let n = V.length t
in if n == 8
then (,,,,,,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex parseJSON 1 t
<*> parseJSONElemAtIndex parseJSON 2 t
<*> parseJSONElemAtIndex parseJSON 3 t
<*> parseJSONElemAtIndex parseJSON 4 t
<*> parseJSONElemAtIndex parseJSON 5 t
<*> parseJSONElemAtIndex pG 6 t
<*> parseJSONElemAtIndex pH 7 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 8"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g) => FromJSON1 ((,,,,,,,) a b c d e f g) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h) => FromJSON (a, b, c, d, e, f, g, h) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g) => FromJSON2 ((,,,,,,,,) a b c d e f g) where
liftParseJSON2 pH _ pI _ = withArray "(a, b, c, d, e, f, g, h, i)" $ \t ->
let n = V.length t
in if n == 9
then (,,,,,,,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex parseJSON 1 t
<*> parseJSONElemAtIndex parseJSON 2 t
<*> parseJSONElemAtIndex parseJSON 3 t
<*> parseJSONElemAtIndex parseJSON 4 t
<*> parseJSONElemAtIndex parseJSON 5 t
<*> parseJSONElemAtIndex parseJSON 6 t
<*> parseJSONElemAtIndex pH 7 t
<*> parseJSONElemAtIndex pI 8 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 9"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h) => FromJSON1 ((,,,,,,,,) a b c d e f g h) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i) => FromJSON (a, b, c, d, e, f, g, h, i) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h) => FromJSON2 ((,,,,,,,,,) a b c d e f g h) where
liftParseJSON2 pI _ pJ _ = withArray "(a, b, c, d, e, f, g, h, i, j)" $ \t ->
let n = V.length t
in if n == 10
then (,,,,,,,,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex parseJSON 1 t
<*> parseJSONElemAtIndex parseJSON 2 t
<*> parseJSONElemAtIndex parseJSON 3 t
<*> parseJSONElemAtIndex parseJSON 4 t
<*> parseJSONElemAtIndex parseJSON 5 t
<*> parseJSONElemAtIndex parseJSON 6 t
<*> parseJSONElemAtIndex parseJSON 7 t
<*> parseJSONElemAtIndex pI 8 t
<*> parseJSONElemAtIndex pJ 9 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 10"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i) => FromJSON1 ((,,,,,,,,,) a b c d e f g h i) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j) => FromJSON (a, b, c, d, e, f, g, h, i, j) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i) => FromJSON2 ((,,,,,,,,,,) a b c d e f g h i) where
liftParseJSON2 pJ _ pK _ = withArray "(a, b, c, d, e, f, g, h, i, j, k)" $ \t ->
let n = V.length t
in if n == 11
then (,,,,,,,,,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex parseJSON 1 t
<*> parseJSONElemAtIndex parseJSON 2 t
<*> parseJSONElemAtIndex parseJSON 3 t
<*> parseJSONElemAtIndex parseJSON 4 t
<*> parseJSONElemAtIndex parseJSON 5 t
<*> parseJSONElemAtIndex parseJSON 6 t
<*> parseJSONElemAtIndex parseJSON 7 t
<*> parseJSONElemAtIndex parseJSON 8 t
<*> parseJSONElemAtIndex pJ 9 t
<*> parseJSONElemAtIndex pK 10 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 11"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j) => FromJSON1 ((,,,,,,,,,,) a b c d e f g h i j) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j, FromJSON k) => FromJSON (a, b, c, d, e, f, g, h, i, j, k) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j) => FromJSON2 ((,,,,,,,,,,,) a b c d e f g h i j) where
liftParseJSON2 pK _ pL _ = withArray "(a, b, c, d, e, f, g, h, i, j, k, l)" $ \t ->
let n = V.length t
in if n == 12
then (,,,,,,,,,,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex parseJSON 1 t
<*> parseJSONElemAtIndex parseJSON 2 t
<*> parseJSONElemAtIndex parseJSON 3 t
<*> parseJSONElemAtIndex parseJSON 4 t
<*> parseJSONElemAtIndex parseJSON 5 t
<*> parseJSONElemAtIndex parseJSON 6 t
<*> parseJSONElemAtIndex parseJSON 7 t
<*> parseJSONElemAtIndex parseJSON 8 t
<*> parseJSONElemAtIndex parseJSON 9 t
<*> parseJSONElemAtIndex pK 10 t
<*> parseJSONElemAtIndex pL 11 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 12"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j, FromJSON k) => FromJSON1 ((,,,,,,,,,,,) a b c d e f g h i j k) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j, FromJSON k, FromJSON l) => FromJSON (a, b, c, d, e, f, g, h, i, j, k, l) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j, FromJSON k) => FromJSON2 ((,,,,,,,,,,,,) a b c d e f g h i j k) where
liftParseJSON2 pL _ pM _ = withArray "(a, b, c, d, e, f, g, h, i, j, k, l, m)" $ \t ->
let n = V.length t
in if n == 13
then (,,,,,,,,,,,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex parseJSON 1 t
<*> parseJSONElemAtIndex parseJSON 2 t
<*> parseJSONElemAtIndex parseJSON 3 t
<*> parseJSONElemAtIndex parseJSON 4 t
<*> parseJSONElemAtIndex parseJSON 5 t
<*> parseJSONElemAtIndex parseJSON 6 t
<*> parseJSONElemAtIndex parseJSON 7 t
<*> parseJSONElemAtIndex parseJSON 8 t
<*> parseJSONElemAtIndex parseJSON 9 t
<*> parseJSONElemAtIndex parseJSON 10 t
<*> parseJSONElemAtIndex pL 11 t
<*> parseJSONElemAtIndex pM 12 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 13"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j, FromJSON k, FromJSON l) => FromJSON1 ((,,,,,,,,,,,,) a b c d e f g h i j k l) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j, FromJSON k, FromJSON l, FromJSON m) => FromJSON (a, b, c, d, e, f, g, h, i, j, k, l, m) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j, FromJSON k, FromJSON l) => FromJSON2 ((,,,,,,,,,,,,,) a b c d e f g h i j k l) where
liftParseJSON2 pM _ pN _ = withArray "(a, b, c, d, e, f, g, h, i, j, k, l, m, n)" $ \t ->
let n = V.length t
in if n == 14
then (,,,,,,,,,,,,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex parseJSON 1 t
<*> parseJSONElemAtIndex parseJSON 2 t
<*> parseJSONElemAtIndex parseJSON 3 t
<*> parseJSONElemAtIndex parseJSON 4 t
<*> parseJSONElemAtIndex parseJSON 5 t
<*> parseJSONElemAtIndex parseJSON 6 t
<*> parseJSONElemAtIndex parseJSON 7 t
<*> parseJSONElemAtIndex parseJSON 8 t
<*> parseJSONElemAtIndex parseJSON 9 t
<*> parseJSONElemAtIndex parseJSON 10 t
<*> parseJSONElemAtIndex parseJSON 11 t
<*> parseJSONElemAtIndex pM 12 t
<*> parseJSONElemAtIndex pN 13 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 14"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j, FromJSON k, FromJSON l, FromJSON m) => FromJSON1 ((,,,,,,,,,,,,,) a b c d e f g h i j k l m) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j, FromJSON k, FromJSON l, FromJSON m, FromJSON n) => FromJSON (a, b, c, d, e, f, g, h, i, j, k, l, m, n) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j, FromJSON k, FromJSON l, FromJSON m) => FromJSON2 ((,,,,,,,,,,,,,,) a b c d e f g h i j k l m) where
liftParseJSON2 pN _ pO _ = withArray "(a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)" $ \t ->
let n = V.length t
in if n == 15
then (,,,,,,,,,,,,,,)
<$> parseJSONElemAtIndex parseJSON 0 t
<*> parseJSONElemAtIndex parseJSON 1 t
<*> parseJSONElemAtIndex parseJSON 2 t
<*> parseJSONElemAtIndex parseJSON 3 t
<*> parseJSONElemAtIndex parseJSON 4 t
<*> parseJSONElemAtIndex parseJSON 5 t
<*> parseJSONElemAtIndex parseJSON 6 t
<*> parseJSONElemAtIndex parseJSON 7 t
<*> parseJSONElemAtIndex parseJSON 8 t
<*> parseJSONElemAtIndex parseJSON 9 t
<*> parseJSONElemAtIndex parseJSON 10 t
<*> parseJSONElemAtIndex parseJSON 11 t
<*> parseJSONElemAtIndex parseJSON 12 t
<*> parseJSONElemAtIndex pN 13 t
<*> parseJSONElemAtIndex pO 14 t
else fail $ "cannot unpack array of length " ++ show n ++ " into a tuple of length 15"
{-# INLINE liftParseJSON2 #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j, FromJSON k, FromJSON l, FromJSON m, FromJSON n) => FromJSON1 ((,,,,,,,,,,,,,,) a b c d e f g h i j k l m n) where
liftParseJSON = liftParseJSON2 parseJSON parseJSONList
{-# INLINE liftParseJSON #-}
instance (FromJSON a, FromJSON b, FromJSON c, FromJSON d, FromJSON e, FromJSON f, FromJSON g, FromJSON h, FromJSON i, FromJSON j, FromJSON k, FromJSON l, FromJSON m, FromJSON n, FromJSON o) => FromJSON (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) where
parseJSON = parseJSON2
{-# INLINE parseJSON #-}
|
phischu/fragnix
|
tests/packages/scotty/Data.Aeson.Types.FromJSON.hs
|
bsd-3-clause
| 87,711 | 0 | 26 | 21,079 | 20,672 | 10,934 | 9,738 | -1 | -1 |
{-# LANGUAGE CPP, TypeFamilies, DeriveDataTypeable #-}
module PGIP.Output.Translations
( formatTranslations
) where
import PGIP.Output.Formatting
import PGIP.Output.Mime
import Logic.Comorphism (AnyComorphism)
import Proofs.AbstractState (G_prover)
import Common.Json (ppJson, asJson)
import Common.ToXml (asXml)
import Common.Utils
import Text.XML.Light (ppTopElement)
import Data.Data
type TranslationsFormatter = [(G_prover, AnyComorphism)] -> (String, String)
formatTranslations :: Maybe String -> TranslationsFormatter
formatTranslations format comorphisms = case format of
Just "json" -> formatAsJSON
_ -> formatAsXML
where
convertedTranslations :: Translations
convertedTranslations = Translations
{ translations = nubOrd $ map (showComorph . snd) comorphisms }
formatAsJSON :: (String, String)
formatAsJSON = (jsonC, ppJson $ asJson convertedTranslations)
formatAsXML :: (String, String)
formatAsXML = (xmlC, ppTopElement $ asXml convertedTranslations)
data Translations = Translations
{ translations :: [String]
} deriving (Show, Typeable, Data)
|
keithodulaigh/Hets
|
PGIP/Output/Translations.hs
|
gpl-2.0
| 1,096 | 0 | 12 | 158 | 282 | 165 | 117 | 27 | 2 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeInType #-}
module Foo where
import Data.Typeable (Proxy(..), typeRep)
data family T a
data instance T Int = MkT
main :: IO ()
main = print $ typeRep (Proxy :: Proxy MkT)
|
ezyang/ghc
|
testsuite/tests/typecheck/should_compile/T13915b.hs
|
bsd-3-clause
| 220 | 0 | 8 | 41 | 71 | 42 | 29 | 8 | 1 |
import Graphics.UI.Gtk
hello :: (ButtonClass o) => o -> IO ()
hello b = do set b [buttonLabel := "Hello World" ]
putStrLn "Hello World"
main :: IO ()
main = do initGUI
window <- windowNew
button <- buttonNew
set window [windowDefaultWidth := 200
,windowDefaultHeight := 200
,containerBorderWidth := 10
,containerChild := button]
onClicked button (hello button)
onDestroy window mainQuit
widgetShowAll window
mainGUI
|
gimbo/cabal-macosx
|
examples/gtkHello/src/Main.hs
|
bsd-3-clause
| 564 | 0 | 9 | 211 | 158 | 75 | 83 | 16 | 1 |
module E.TypeCheck(
canBeBox,
eAp,
inferType,
infertype,
typecheck,
match,
sortSortLike,
sortKindLike,
sortTermLike,
sortTypeLike,
typeInfer,
typeInfer'
) where
import Control.Monad.Reader
import Control.Monad.Writer
import qualified Data.Map as Map
import Doc.DocLike
import Doc.PPrint
import Doc.Pretty
import E.E
import E.Eval(strong)
import E.Subst
import GenUtil
import Name.Id
import Name.Name
import Name.Names
import Support.CanType
import Util.ContextMonad
import Util.SetLike
import qualified Util.Seq as Seq
import {-# SOURCE #-} DataConstructors
import {-# SOURCE #-} E.Show
{-@Internals
# Jhc Core Type System
Jhc's core is based on a pure type system. A pure type system (also called a
PTS) is actually a parameterized set of type systems. Jhc's version is
described by the following.
Sorts = (*, !, **, #, (#), ##, □)
Axioms = (*:**, #:##, !:**, **:□, ##:□)
-- sort kind
* is the kind of boxed values
! is the kind of boxed strict values
# is the kind of unboxed values
(#) is the kind of unboxed tuples
-- sort superkind
** is the superkind of all boxed value
## is the superkind of all unboxed values
-- sort box
□ superkinds inhabit this
in addition there exist user defined kinds, which are always of supersort ##
The following Rules table shows what sort of abstractions are allowed, a rule
of the form (A,B,C) means you can have functions of things of sort A to things
of sort B and the result is something of sort C. _Function_ in this context
subsumes both term and type level abstractions.
Notice that functions are always boxed, but may be strict if they take an
unboxed tuple as an argument. When a function is strict it means that it is
represented by a pointer to code directly, it cannot be a suspended value that
evaluates to a function.
These type system rules apply to lambda abstractions. It is possible that data
constructors might exist that cannot be given a type on their own with these
rules, even though when fully applied it has a well formed type. An example
would be unboxed tuples. This presents no difficulty as one concludes correctly
that it is a type error for these constructors to ever appear when not fully
saturated with arguments.
as a shortcut we will use *# to mean every combination involving * and #, and so forth.
for instance, (*#,*#,*) means the set (*,*,*) (#,*,*) (*,#,*) (#,#,*)
Rules =
(*#!,*#!,*) -- functions from values to values are boxed and lazy
(*#!,(#),*) -- functions from values to unboxed tuples are boxed and lazy
((#),*#!,!) -- functions from unboxed tuples to values are boxed and strict
((#),(#),!) -- functions from unboxed tuples to unboxed tuples are boxed and strict
(**,*,*) -- may have a function from an unboxed type to a value
(**,#,*)
(**,!,*)
(**,**,**) -- we have functions from types to types
(**,##,##) -- MutArray_ :: * -> #
(##,##,##) -- Complex_ :: # -> #
The defining feature of boxed values is
_|_ :: t iff t::*
This PTS is functional but not injective
The PTS can be considered stratified into the following levels
□ - sort box
**,##, - sort superkind
*,#,(#),! - sort kind
Int,Bits32_,Char - sort type
3,True,"bob" - sort value
## On boxed kinds
The boxed kinds (* and !) represent types that have a uniform run time
representation. Due to this, functions may be written that are polymorphic in types of these kinds.
Hence the rules of the form (**,?,?), allowing taking types of boxed kinds as arguments.
the unboxed kind # is inhabited with types that have their own specific run
time representation. Hence you cannot write functions that are polymorphic in
unboxed types
## On sort box, the unboxed tuple, and friends
Although sort box does not appear in the code, it is useful from a theoretical
point of view to talk about certain types such as the types of unboxed tuples.
Unboxed tuples may have boxed and unboxed arguments, without sort box it would
be impossible to express this since it must be superkind polymorphic. sort box
allows one to express this as (in the case of the unboxed 2-tuple)
∀s1:□ ∀s2:□ ∀k1:s1 ∀k2:s2 ∀t1:k1 ∀t2:k2 . (# t1, t2 #)
However, although this is a valid typing of what it would mean if a unboxed
tuple were not fully applied, since we do not have any rules of form (##,?,?) or
(□,?,?) this type obviously does not typecheck. Which is what enforces the
invarient that unboxed tuples are always fully applied, and is also why we do
not need a code representation of sort box.
### Do we need a superbox?
You will notice that if you look at the axioms involving the sorts, you end up
with a disjoint graph
□ - the box
/ \
** ## - superkind
/\ \
* ! # (#) - kind
This is simply due to the fact that nothing is polymorphic in unboxed tuples of
kind (#) so we never need to refer to any super-sorts of them. We can add sorts
(##),(□) and □□ to fill in the gaps, but since these sorts will never appear in
code or discourse, we will ignore them from now on.
□□ - sort superbox
/ \
□ (□) - sort box
/ \ \
** ## (##) - sort superkind
/\ \ |
* ! # (#) - sort kind
-}
ptsAxioms :: Map.Map ESort ESort
ptsAxioms = Map.fromList [
(EStar,EStarStar),
(EBang,EStarStar),
(EHash,EHashHash),
(ETuple,EHashHash)
]
ptsRulesMap :: Map.Map (ESort,ESort) ESort
ptsRulesMap = Map.fromList [ ((a,b),c) | (as,bs,c) <- ptsRules, a <- as, b <- bs ] where
starHashBang = [EStar,EHash,EBang]
ptsRules = [
(starHashBang,ETuple:starHashBang,EStar),
([ETuple],ETuple:starHashBang,EBang),
([EStarStar],starHashBang,EStar),
([EStarStar],[EStarStar],EStarStar),
([EStarStar],[EHashHash],EHashHash),
([EHashHash],[EHashHash],EHashHash)
]
canBeBox x | getType (getType x) == ESort EStarStar = True
canBeBox _ = False
tBox = mktBox eStar
monadicLookup key m = case Map.lookup key m of
Just x -> return x
Nothing -> fail "Key not found"
-- Fast (and lazy, and perhaps unsafe) typeof
instance CanType E where
type TypeOf E = E
getType (ESort s) = ESort $ getType s
getType (ELit l) = getType l
getType (EVar v) = getType v
getType e@(EPi TVr { tvrType = a } b)
| isUnknown typa || isUnknown typb = Unknown
| otherwise = maybe (error $ "E.TypeCheck.getType: " ++ show (e,getType a,getType b)) ESort $ do
ESort s1 <- return $ getType a
ESort s2 <- return $ getType b
monadicLookup (s1,s2) ptsRulesMap
where typa = getType a; typb = getType b
getType (EAp (ELit LitCons { litType = EPi tvr a }) b) = getType (subst tvr b a)
getType (EAp (ELit lc@LitCons { litAliasFor = Just af }) b) = getType (foldl eAp af (litArgs lc ++ [b]))
getType (EAp (EPi tvr a) b) = getType (subst tvr b a)
getType e@(EAp a b) = ans where
ans = if isUnknown typa then Unknown else if a == tBox || typa == tBox then tBox else (case a of
(ELit LitCons {}) -> error $ "getType: application of type alias " ++ (render $ parens $ ePretty e)
_ -> eAp typa b)
typa = getType a
getType (ELam (TVr { tvrIdent = x, tvrType = a}) b) = EPi (tVr x a) (getType b)
getType (ELetRec _ e) = getType e
getType ECase {eCaseType = ty} = ty
getType (EError _ e) = e
getType (EPrim _ _ t) = t
getType Unknown = Unknown
instance CanType ESort where
type TypeOf ESort = ESort
getType (ESortNamed _) = EHashHash
getType s = case Map.lookup s ptsAxioms of
Just s -> s
Nothing -> error $ "getType: " ++ show s
instance CanType TVr where
type TypeOf TVr = E
getType = tvrType
instance CanType (Lit x t) where
type TypeOf (Lit x t) = t
getType l = litType l
instance CanType e => CanType (Alt e) where
type TypeOf (Alt e) = TypeOf e
getType (Alt _ e) = getType e
sortSortLike (ESort s) = isEHashHash s || isEStarStar s
sortSortLike _ = False
sortKindLike (ESort s) = not (isEHashHash s) && not (isEStarStar s)
sortKindLike e = sortSortLike (getType e)
sortTypeLike ESort {} = False
sortTypeLike e = sortKindLike (getType e)
sortTermLike ESort {} = False
sortTermLike e = sortTypeLike (getType e)
withContextDoc s a = withContext (render s) a
-- | Perform a full typecheck, evaluating type terms as necessary.
inferType :: (ContextMonad m, ContextOf m ~ String) => DataTable -> [(TVr,E)] -> E -> m E
inferType dataTable ds e = rfc e where
inferType' ds e = inferType dataTable ds e
prettyE = ePretty
rfc e = withContextDoc (text "fullCheck:" </> prettyE e) (fc e >>= strong')
rfc' nds e = withContextDoc (text "fullCheck':" </> prettyE e) (inferType' nds e)
strong' e = withContextDoc (parens $ text "Strong:" </> prettyE e) $ strong ds e
fc s@(ESort _) = return $ getType s
fc (ELit lc@LitCons {}) | let lc' = updateLit dataTable lc, litAliasFor lc /= litAliasFor lc' = fail $ "Alias not correct: " ++ show (lc, litAliasFor lc')
fc (ELit LitCons { litName = n, litArgs = es, litType = t}) | nameType n == TypeConstructor, Just _ <- fromUnboxedNameTuple n = do
withContext ("Checking Unboxed Tuple: " ++ show n) $ do
-- we omit kind checking for unboxed tuples
valid t
es' <- mapM rfc es
strong' t
fc e@(ELit LitCons { litName = n, litArgs = es, litType = t}) = do
withContext ("Checking Constructor: " ++ show e) $ do
valid t
es' <- mapM rfc es
t' <- strong' t
let sts = slotTypes dataTable n t
les = length es
lsts = length sts
withContext ("Checking Args: " ++ show (sts,es')) $ do
unless (les == lsts || (les < lsts && isEPi t')) $ do
fail "constructor with wrong number of arguments"
zipWithM_ eq sts es'
return t'
fc e@(ELit _) = let t = getType e in valid t >> return t
fc (EVar (TVr { tvrIdent = eid })) | eid == emptyId = fail "variable with nothing!"
fc (EVar (TVr { tvrType = t})) = valid t >> strong' t
fc (EPi (TVr { tvrIdent = n, tvrType = at}) b) = do
ESort a <- rfc at
ESort b <- rfc' [ d | d@(v,_) <- ds, tvrIdent v /= n ] b
liftM ESort $ monadicLookup (a,b) ptsRulesMap
--valid at >> rfc' [ d | d@(v,_) <- ds, tvrIdent v /= n ] b
--fc (ELam tvr@(TVr n at) b) = valid at >> rfc' [ d | d@(v,_) <- ds, tvrIdent v /= n ] b >>= \b' -> (strong' $ EPi tvr b')
fc (ELam tvr@(TVr { tvrIdent = n, tvrType = at}) b) = do
withContext "Checking Lambda" $ do
valid at
b' <- withContext "Checking Lambda Body" $ rfc' [ d | d@(v,_) <- ds, tvrIdent v /= n ] b
withContext "Checking lambda pi" $ strong' $ EPi tvr b'
fc (EAp (EPi tvr e) b) = rfc (subst tvr b e)
fc (EAp (ELit lc@LitCons { litAliasFor = Just af }) b) = rfc (EAp (foldl eAp af (litArgs lc)) b)
fc (EAp a b) = do
withContextDoc (text "EAp:" </> parens (prettyE a) </> parens (prettyE b)) $ do
a' <- rfc a
if a' == tBox then return tBox else strong' (eAp a' b)
fc (ELetRec vs e) = do
let ck (TVr { tvrIdent = eid },_) | eid == emptyId = fail "binding of empty var"
ck (tv@(TVr { tvrType = t}),e) = withContextDoc (hsep [text "Checking Let: ", parens (pprint tv),text " = ", parens $ prettyE e ]) $ do
when (getType t == eHash && not (isEPi t)) $ fail $ "Let binding unboxed value: " ++ show (tv,e)
valid' nds t
fceq nds e t
nds = vs ++ ds
mapM_ ck vs
when (hasRepeatUnder (tvrIdent . fst) vs) $ fail "Repeat Variable in ELetRec"
inferType' nds e
--et <- inferType' nds e
--strong nds et
fc (EError _ e) = valid e >> (strong' e)
fc (EPrim _ ts t) = mapM_ valid ts >> valid t >> ( strong' t)
fc ec@ECase { eCaseScrutinee = e@ELit {}, eCaseBind = b, eCaseAlts = as, eCaseType = dt } | sortTypeLike e = do -- TODO - this is a hack to get around case of constants.
withContext "Checking typelike pattern binding case" $ do
et <- rfc e
withContext "Checking typelike default binding" $ eq et (getType b)
verifyPats (casePats ec)
-- skip checking alternatives
ps <- mapM (strong' . getType) $ casePats ec
withContext "Checking typelike pattern equality" $ eqAll (et:ps)
strong' dt
fc ec@ECase {eCaseScrutinee = e, eCaseBind = b, eCaseAlts = as, eCaseType = dt } | sortTypeLike e = do -- TODO - we should substitute the tested for value into the default type.
withContext "Checking typelike binding case" $ do
et <- rfc e
withContext "Checking typelike default binding" $ eq et (getType b)
--dt <- rfc d
--bs <- mapM rfc (caseBodies ec) -- these should be specializations of dt
withContext "Checking typelike alternatives" $ mapM_ (calt e) as
--eqAll bs
verifyPats (casePats ec)
ps <- withContext "Getting pattern types" $ mapM (strong' . getType) $ casePats ec
withContext "checking typelike pattern equality" $ eqAll (et:ps)
withContext "Evaluating Case Type" $ strong' dt
fc ec@ECase { eCaseScrutinee =e, eCaseBind = b } = do
withContext "Checking plain case" $ do
et <- rfc e
withContext "Checking default binding" $ eq et (getType b)
bs <- withContext "Checking case bodies" $ mapM rfc (caseBodies ec)
ect <- strong' (eCaseType ec)
withContext "Checking case bodies have equal types" $ eqAll (ect:bs)
verifyPats (casePats ec)
ps <- mapM (strong' . getType) $ casePats ec
withContext "checking pattern equality" $ eqAll (et:ps)
return ect
fc Unknown = return Unknown
--fc e = failDoc $ text "what's this? " </> (prettyE e)
calt (EVar v) (Alt l e) = do
let nv = followAliases undefined (patToLitEE l)
rfc (subst' v nv e)
calt _ (Alt _ e) = rfc e
verifyPats xs = do
mapM_ verifyPats' xs
when (hasRepeatUnder litHead xs) $ fail "Duplicate case alternatives"
verifyPats' LitCons { litArgs = xs } = when (hasRepeatUnder id (filter (/= emptyId) $ map tvrIdent xs)) $ fail "Case pattern is non-linear"
verifyPats' _ = return ()
eqAll ts = withContextDoc (text "eqAll" </> list (map prettyE ts)) $ foldl1M_ eq ts
valid s = valid' ds s
valid' nds ESort {} = return ()
valid' nds s
| Unknown <- s = return ()
| otherwise = withContextDoc (text "valid:" <+> prettyE s) (do t <- inferType' nds s; valid' nds t)
eq box t2 | boxCompat box t2 = return t2
eq t1 box | boxCompat box t1 = return t1
-- box == tBox, canBeBox t2 = return t2
-- eq t1 box | box == tBox, canBeBox t1 = return t1
eq Unknown t2 = return t2
eq t1 Unknown = return t1
eq t1 t2 = eq' ds t1 t2
eq' nds t1 t2 = do
e1 <- strong nds (t1)
e2 <- strong nds (t2)
case typesCompatable e1 e2 of
Just () -> return (e1)
Nothing -> failDoc $ text "eq:" <+> align $ vcat [ prettyE (e1), prettyE (e2) ]
fceq nds e1 t2 = do
withContextDoc (hsep [text "fceq:", align $ vcat [parens $ prettyE e1, parens $ prettyE t2]]) $ do
t1 <- inferType' nds e1
eq' nds t1 t2
boxCompat (ELit (LitCons { litName = n })) t | Just e <- fromConjured modBox n = e == getType t
boxCompat _ _ = False
-- This should perform a full typecheck and may take any extra information needed as an extra parameter
class CanTypeCheck a where
typecheck :: Monad m => DataTable -> a -> m E
infertype :: CanTypeCheck a => DataTable -> a -> E
infertype env a = case typecheck env a of
Left s -> error $ "infertype: " ++ s
Right x -> x
instance CanTypeCheck E where
typecheck dataTable e = case runContextEither $ typeInfer'' dataTable [] e of
Left ss -> fail $ "\n>>> internal error:\n" ++ unlines ss
Right v -> return v
instance CanTypeCheck TVr where
typecheck dt tvr = do
typecheck dt (getType tvr)
return $ getType tvr
instance CanTypeCheck (Lit a E) where
typecheck dt LitCons { litType = t } = typecheck dt t >> return t
typecheck dt LitInt { litType = t } = typecheck dt t >> return t
-- TODO, types might be bound in scrutinization
instance CanTypeCheck (Alt E) where
typecheck dt (Alt l e) = typecheck dt l >> typecheck dt e
-- | Determine type of term using full algorithm with substitutions. This
-- should be used instead of 'typ' when let-bound type variables exist or you
-- wish a more thorough checking of types.
typeInfer :: DataTable -> E -> E
typeInfer dataTable e = case runContextEither $ typeInfer'' dataTable [] e of
Left ss -> error $ "\n>>> internal error:\n" ++ unlines (tail ss)
Right v -> v
typeInfer' :: DataTable -> [(TVr,E)] -> E -> E
typeInfer' dataTable ds e = case runContextEither $ typeInfer'' dataTable ds e of
Left ss -> error $ "\n>>> internal error:\n" ++ unlines (tail ss)
Right v -> v
data TcEnv = TcEnv {
--tcDefns :: [(TVr,E)],
tcContext :: [String]
--tcDataTable :: DataTable
}
tcContext_u f r@TcEnv{tcContext = x} = r{tcContext = f x}
newtype Tc a = Tc (Reader TcEnv a)
deriving(Monad,Functor,MonadReader TcEnv)
instance ContextMonad Tc where
type ContextOf Tc = String
withContext s = local (tcContext_u (s:))
{-
tcE :: E -> Tc E
tcE e = rfc e where
rfc e = withContextDoc (text "tcE:" </> ePretty e) (fc e >>= strong')
strong' e = do
ds <- asks tcDefns
withContextDoc (text "tcE.strong:" </> ePretty e) $ strong ds e
fc s@ESort {} = return $ getType s
fc (ELit LitCons { litType = t }) = strong' t
fc e@ELit {} = strong' (getType e)
fc (EVar TVr { tvrIdent = eid }) | eid == emptyId = fail "variable with nothing!"
fc (EVar TVr { tvrType = t}) = strong' t
fc (EPi TVr { tvrIdent = n, tvrType = at} b) = do
ESort a <- rfc at
ESort b <- local (tcDefns_u (\ds -> [ d | d@(v,_) <- ds, tvrIdent v /= n ])) $ rfc b
liftM ESort $ monadicLookup (a,b) ptsRulesMap
fc (ELam tvr@TVr { tvrIdent = n, tvrType = at} b) = do
at' <- strong' at
b' <- local (tcDefns_u (\ds -> [ d | d@(v,_) <- ds, tvrIdent v /= n ])) $ rfc b
return (EPi (tVr n at') b')
fc (EAp (EPi tvr e) b) = do
b <- strong' b
rfc (subst tvr b e)
fc (EAp (ELit lc@LitCons { litAliasFor = Just af }) b) = fc (EAp (foldl eAp af (litArgs lc)) b)
fc (EAp a b) = do
a' <- rfc a
if a' == tBox then return tBox else strong' (eAp a' b)
fc (ELetRec vs e) = local (tcDefns_u (vs ++)) $ rfc e
fc (EError _ e) = strong' e
fc (EPrim _ ts t) = strong' t
fc ECase { eCaseType = ty } = do
strong' ty
fc Unknown = return Unknown
fc e = failDoc $ text "what's this? " </> (ePretty e)
-}
typeInfer'' :: (ContextMonad m, ContextOf m ~ String) => DataTable -> [(TVr,E)] -> E -> m E
typeInfer'' dataTable ds e = rfc e where
inferType' ds e = typeInfer'' dataTable ds e
rfc e = withContextDoc (text "fullCheck':" </> ePretty e) (fc e >>= strong')
rfc' nds e = withContextDoc (text "fullCheck':" </> ePretty e) (inferType' nds e)
strong' e = withContextDoc (text "Strong':" </> ePretty e) $ strong ds e
fc s@ESort {} = return $ getType s
fc (ELit LitCons { litType = t }) = strong' t
fc e@ELit {} = strong' (getType e)
fc (EVar TVr { tvrIdent = eid }) | eid == emptyId = fail "variable with nothing!"
fc (EVar TVr { tvrType = t}) = strong' t
fc (EPi TVr { tvrIdent = n, tvrType = at} b) = do
ESort a <- rfc at
ESort b <- rfc' [ d | d@(v,_) <- ds, tvrIdent v /= n ] b
liftM ESort $ monadicLookup (a,b) ptsRulesMap
fc (ELam tvr@TVr { tvrIdent = n, tvrType = at} b) = do
at' <- strong' at
b' <- rfc' [ d | d@(v,_) <- ds, tvrIdent v /= n ] b
return (EPi (tVr n at') b')
fc (EAp (EPi tvr e) b) = do
b <- strong' b
rfc (subst tvr b e)
fc (EAp (ELit lc@LitCons { litAliasFor = Just af }) b) = fc (EAp (foldl eAp af (litArgs lc)) b)
fc (EAp a b) = do
a' <- rfc a
if a' == tBox then return tBox else strong' (eAp a' b)
fc (ELetRec vs e) = do
let nds = vs ++ ds
--et <- inferType' nds e
--strong nds et
inferType' nds e
fc (EError _ e) = strong' e
fc (EPrim _ ts t) = strong' t
fc ECase { eCaseType = ty } = do
strong' ty
fc Unknown = return Unknown
--fc e = failDoc $ text "what's this? " </> (ePretty e)
-- | find substitution that will transform the left term into the right one,
-- only substituting for the vars in the list
match :: Monad m =>
(Id -> Maybe E) -- ^ function to look up values in the environment
-> [TVr] -- ^ vars which may be substituted
-> E -- ^ pattern to match
-> E -- ^ input expression
-> m [(TVr,E)]
match lup vs = \e1 e2 -> liftM Seq.toList $ execWriterT (un e1 e2 etherealIds) where
bvs :: IdSet
bvs = fromList (map tvrIdent vs)
un (EAp a b) (EAp a' b') c = do
un a a' c
un b b' c
un (ELam va ea) (ELam vb eb) c = lam va ea vb eb c
un (EPi va ea) (EPi vb eb) c = lam va ea vb eb c
un (EPi va ea) (ELit LitCons { litName = ar, litArgs = [x,y], litType = lt}) c | ar == tc_Arrow = do
un (tvrType va) x c
un ea y c
un (EPrim s xs t) (EPrim s' ys t') c | length xs == length ys = do
sequence_ [ un x y c | x <- xs | y <- ys]
un t t' c
un (ESort x) (ESort y) c | x == y = return ()
un (ELit (LitInt x t1)) (ELit (LitInt y t2)) c | x == y = un t1 t2 c
un (ELit LitCons { litName = n, litArgs = xs, litType = t }) (ELit LitCons { litName = n', litArgs = ys, litType = t'}) c | n == n' && length xs == length ys = do
sequence_ [ un x y c | x <- xs | y <- ys]
un t t' c
un (EVar TVr { tvrIdent = i, tvrType = t}) (EVar TVr {tvrIdent = j, tvrType = u}) c | i == j = un t u c
un (EVar TVr { tvrIdent = i, tvrType = t}) (EVar TVr {tvrIdent = j, tvrType = u}) c | isEtherealId i || isEtherealId j = fail "Expressions don't match"
un (EAp a b) (ELit lc@LitCons { litArgs = bas@(_:_), litType = t }) c = do
let (al:as) = reverse bas
un a (ELit lc { litArgs = reverse as, litType = ePi tvr { tvrType = getType al } t }) c
un b al c
un (EAp a b) (EPi TVr { tvrType = a1 } a2) c = do
un a (ELit litCons { litArgs = [a1], litName = tc_Arrow, litType = EPi tvr { tvrType = getType a2 } (getType a1) }) c
un b a2 c
un (EVar tvr@TVr { tvrIdent = i, tvrType = t}) b c
| i `member` bvs = tell (Seq.singleton (tvr,b))
| otherwise = fail $ "Expressions do not unify: " ++ show tvr ++ show b
un a (EVar tvr) c | Just b <- lup (tvrIdent tvr), not $ isEVar b = un a b c
--un a b c | Just a' <- followAlias undefined a = un a' b c
un a b c | Just b' <- followAlias undefined b = un a b' c
un a b _ = fail $ "Expressions do not unify: " ++ show a ++ show b
lam va ea vb eb (c:cs) = do
un (tvrType va) (tvrType vb) (c:cs)
un (subst va (EVar va { tvrIdent = c }) ea) (subst vb (EVar vb { tvrIdent = c }) eb) cs
lam _ _ _ _ _ = error "TypeCheck.match: bad."
|
m-alvarez/jhc
|
src/E/TypeCheck.hs
|
mit
| 23,771 | 2 | 23 | 6,767 | 7,046 | 3,487 | 3,559 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ar-SA">
<title>GraalVM JavaScript</title>
<maps>
<homeID>graaljs</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/graaljs/src/main/javahelp/help_ar_SA/helpset_ar_SA.hs
|
apache-2.0
| 967 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
{-# LANGUAGE GADTs #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
module CmmContFlowOpt
( cmmCfgOpts
, cmmCfgOptsProc
, removeUnreachableBlocksProc
, replaceLabels
)
where
import Hoopl
import BlockId
import Cmm
import CmmUtils
import Maybes
import Panic
import Control.Monad
import Prelude hiding (succ, unzip, zip)
-- Note [What is shortcutting]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Consider this Cmm code:
--
-- L1: ...
-- goto L2;
-- L2: goto L3;
-- L3: ...
--
-- Here L2 is an empty block and contains only an unconditional branch
-- to L3. In this situation any block that jumps to L2 can jump
-- directly to L3:
--
-- L1: ...
-- goto L3;
-- L2: goto L3;
-- L3: ...
--
-- In this situation we say that we shortcut L2 to L3. One of
-- consequences of shortcutting is that some blocks of code may become
-- unreachable (in the example above this is true for L2).
-- Note [Control-flow optimisations]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- This optimisation does three things:
--
-- - If a block finishes in an unconditonal branch to another block
-- and that is the only jump to that block we concatenate the
-- destination block at the end of the current one.
--
-- - If a block finishes in a call whose continuation block is a
-- goto, then we can shortcut the destination, making the
-- continuation block the destination of the goto - but see Note
-- [Shortcut call returns].
--
-- - For any block that is not a call we try to shortcut the
-- destination(s). Additionally, if a block ends with a
-- conditional branch we try to invert the condition.
--
-- Blocks are processed using postorder DFS traversal. A side effect
-- of determining traversal order with a graph search is elimination
-- of any blocks that are unreachable.
--
-- Transformations are improved by working from the end of the graph
-- towards the beginning, because we may be able to perform many
-- shortcuts in one go.
-- Note [Shortcut call returns]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- We are going to maintain the "current" graph (BlockEnv CmmBlock) as
-- we go, and also a mapping from BlockId to BlockId, representing
-- continuation labels that we have renamed. This latter mapping is
-- important because we might shortcut a CmmCall continuation. For
-- example:
--
-- Sp[0] = L
-- call g returns to L
-- L: goto M
-- M: ...
--
-- So when we shortcut the L block, we need to replace not only
-- the continuation of the call, but also references to L in the
-- code (e.g. the assignment Sp[0] = L):
--
-- Sp[0] = M
-- call g returns to M
-- M: ...
--
-- So we keep track of which labels we have renamed and apply the mapping
-- at the end with replaceLabels.
-- Note [Shortcut call returns and proc-points]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Consider this code that you might get from a recursive
-- let-no-escape:
--
-- goto L1
-- L1:
-- if (Hp > HpLim) then L2 else L3
-- L2:
-- call stg_gc_noregs returns to L4
-- L4:
-- goto L1
-- L3:
-- ...
-- goto L1
--
-- Then the control-flow optimiser shortcuts L4. But that turns L1
-- into the call-return proc point, and every iteration of the loop
-- has to shuffle variables to and from the stack. So we must *not*
-- shortcut L4.
--
-- Moreover not shortcutting call returns is probably fine. If L4 can
-- concat with its branch target then it will still do so. And we
-- save some compile time because we don't have to traverse all the
-- code in replaceLabels.
--
-- However, we probably do want to do this if we are splitting proc
-- points, because L1 will be a proc-point anyway, so merging it with
-- L4 reduces the number of proc points. Unfortunately recursive
-- let-no-escapes won't generate very good code with proc-point
-- splitting on - we should probably compile them to explicitly use
-- the native calling convention instead.
cmmCfgOpts :: Bool -> CmmGraph -> CmmGraph
cmmCfgOpts split g = fst (blockConcat split g)
cmmCfgOptsProc :: Bool -> CmmDecl -> CmmDecl
cmmCfgOptsProc split (CmmProc info lbl live g) = CmmProc info' lbl live g'
where (g', env) = blockConcat split g
info' = info{ info_tbls = new_info_tbls }
new_info_tbls = mapFromList (map upd_info (mapToList (info_tbls info)))
-- If we changed any labels, then we have to update the info tables
-- too, except for the top-level info table because that might be
-- referred to by other procs.
upd_info (k,info)
| Just k' <- mapLookup k env
= (k', if k' == g_entry g'
then info
else info{ cit_lbl = infoTblLbl k' })
| otherwise
= (k,info)
cmmCfgOptsProc _ top = top
blockConcat :: Bool -> CmmGraph -> (CmmGraph, BlockEnv BlockId)
blockConcat splitting_procs g@CmmGraph { g_entry = entry_id }
= (replaceLabels shortcut_map $ ofBlockMap new_entry new_blocks, shortcut_map')
where
-- We might be able to shortcut the entry BlockId itself.
-- Remember to update the shortcut_map, since we also have to
-- update the info_tbls mapping now.
(new_entry, shortcut_map')
| Just entry_blk <- mapLookup entry_id new_blocks
, Just dest <- canShortcut entry_blk
= (dest, mapInsert entry_id dest shortcut_map)
| otherwise
= (entry_id, shortcut_map)
-- blocks is a list of blocks in DFS postorder, while blockmap is
-- a map of blocks. We process each element from blocks and update
-- blockmap accordingly
blocks = postorderDfs g
blockmap = foldr addBlock emptyBody blocks
-- Accumulator contains three components:
-- * map of blocks in a graph
-- * map of shortcut labels. See Note [Shortcut call returns]
-- * map containing number of predecessors for each block. We discard
-- it after we process all blocks.
(new_blocks, shortcut_map, _) =
foldr maybe_concat (blockmap, mapEmpty, initialBackEdges) blocks
-- Map of predecessors for initial graph. We increase number of
-- predecessors for entry block by one to denote that it is
-- target of a jump, even if no block in the current graph jumps
-- to it.
initialBackEdges = incPreds entry_id (predMap blocks)
maybe_concat :: CmmBlock
-> (BlockEnv CmmBlock, BlockEnv BlockId, BlockEnv Int)
-> (BlockEnv CmmBlock, BlockEnv BlockId, BlockEnv Int)
maybe_concat block (blocks, shortcut_map, backEdges)
-- If:
-- (1) current block ends with unconditional branch to b' and
-- (2) it has exactly one predecessor (namely, current block)
--
-- Then:
-- (1) append b' block at the end of current block
-- (2) remove b' from the map of blocks
-- (3) remove information about b' from predecessors map
--
-- Since we know that the block has only one predecessor we call
-- mapDelete directly instead of calling decPreds.
--
-- Note that we always maintain an up-to-date list of predecessors, so
-- we can ignore the contents of shortcut_map
| CmmBranch b' <- last
, hasOnePredecessor b'
, Just blk' <- mapLookup b' blocks
= let bid' = entryLabel blk'
in ( mapDelete bid' $ mapInsert bid (splice head blk') blocks
, shortcut_map
, mapDelete b' backEdges )
-- If:
-- (1) we are splitting proc points (see Note
-- [Shortcut call returns and proc-points]) and
-- (2) current block is a CmmCall or CmmForeignCall with
-- continuation b' and
-- (3) we can shortcut that continuation to dest
-- Then:
-- (1) we change continuation to point to b'
-- (2) create mapping from b' to dest
-- (3) increase number of predecessors of dest by 1
-- (4) decrease number of predecessors of b' by 1
--
-- Later we will use replaceLabels to substitute all occurrences of b'
-- with dest.
| splitting_procs
, Just b' <- callContinuation_maybe last
, Just blk' <- mapLookup b' blocks
, Just dest <- canShortcut blk'
= ( mapInsert bid (blockJoinTail head (update_cont dest)) blocks
, mapInsert b' dest shortcut_map
, decPreds b' $ incPreds dest backEdges )
-- If:
-- (1) a block does not end with a call
-- Then:
-- (1) if it ends with a conditional attempt to invert the
-- conditional
-- (2) attempt to shortcut all destination blocks
-- (3) if new successors of a block are different from the old ones
-- update the of predecessors accordingly
--
-- A special case of this is a situation when a block ends with an
-- unconditional jump to a block that can be shortcut.
| Nothing <- callContinuation_maybe last
= let oldSuccs = successors last
newSuccs = successors swapcond_last
in ( mapInsert bid (blockJoinTail head swapcond_last) blocks
, shortcut_map
, if oldSuccs == newSuccs
then backEdges
else foldr incPreds (foldr decPreds backEdges oldSuccs) newSuccs )
-- Otherwise don't do anything
| otherwise
= ( blocks, shortcut_map, backEdges )
where
(head, last) = blockSplitTail block
bid = entryLabel block
-- Changes continuation of a call to a specified label
update_cont dest =
case last of
CmmCall{} -> last { cml_cont = Just dest }
CmmForeignCall{} -> last { succ = dest }
_ -> panic "Can't shortcut continuation."
-- Attempts to shortcut successors of last node
shortcut_last = mapSuccessors shortcut last
where
shortcut l =
case mapLookup l blocks of
Just b | Just dest <- canShortcut b -> dest
_otherwise -> l
-- For a conditional, we invert the conditional if that would make it
-- more likely that the branch-not-taken case becomes a fallthrough.
-- This helps the native codegen a little bit, and probably has no
-- effect on LLVM. It's convenient to do it here, where we have the
-- information about predecessors.
swapcond_last
| CmmCondBranch cond t f <- shortcut_last
, numPreds f > 1
, hasOnePredecessor t
, Just cond' <- maybeInvertCmmExpr cond
= CmmCondBranch cond' f t
| otherwise
= shortcut_last
-- Number of predecessors for a block
numPreds bid = mapLookup bid backEdges `orElse` 0
hasOnePredecessor b = numPreds b == 1
-- Functions for incrementing and decrementing number of predecessors. If
-- decrementing would set the predecessor count to 0, we remove entry from the
-- map.
-- Invariant: if a block has no predecessors it should be dropped from the
-- graph because it is unreachable. maybe_concat is constructed to maintain
-- that invariant, but calling replaceLabels may introduce unreachable blocks.
-- We rely on subsequent passes in the Cmm pipeline to remove unreachable
-- blocks.
incPreds, decPreds :: BlockId -> BlockEnv Int -> BlockEnv Int
incPreds bid edges = mapInsertWith (+) bid 1 edges
decPreds bid edges = case mapLookup bid edges of
Just preds | preds > 1 -> mapInsert bid (preds - 1) edges
Just _ -> mapDelete bid edges
_ -> edges
-- Checks if a block consists only of "goto dest". If it does than we return
-- "Just dest" label. See Note [What is shortcutting]
canShortcut :: CmmBlock -> Maybe BlockId
canShortcut block
| (_, middle, CmmBranch dest) <- blockSplit block
, all dont_care $ blockToList middle
= Just dest
| otherwise
= Nothing
where dont_care CmmComment{} = True
dont_care CmmTick{} = True
dont_care _other = False
-- Concatenates two blocks. First one is assumed to be open on exit, the second
-- is assumed to be closed on entry (i.e. it has a label attached to it, which
-- the splice function removes by calling snd on result of blockSplitHead).
splice :: Block CmmNode C O -> CmmBlock -> CmmBlock
splice head rest = entry `blockJoinHead` code0 `blockAppend` code1
where (CmmEntry lbl sc0, code0) = blockSplitHead head
(CmmEntry _ sc1, code1) = blockSplitHead rest
entry = CmmEntry lbl (combineTickScopes sc0 sc1)
-- If node is a call with continuation call return Just label of that
-- continuation. Otherwise return Nothing.
callContinuation_maybe :: CmmNode O C -> Maybe BlockId
callContinuation_maybe (CmmCall { cml_cont = Just b }) = Just b
callContinuation_maybe (CmmForeignCall { succ = b }) = Just b
callContinuation_maybe _ = Nothing
-- Map over the CmmGraph, replacing each label with its mapping in the
-- supplied BlockEnv.
replaceLabels :: BlockEnv BlockId -> CmmGraph -> CmmGraph
replaceLabels env g
| mapNull env = g
| otherwise = replace_eid $ mapGraphNodes1 txnode g
where
replace_eid g = g {g_entry = lookup (g_entry g)}
lookup id = mapLookup id env `orElse` id
txnode :: CmmNode e x -> CmmNode e x
txnode (CmmBranch bid) = CmmBranch (lookup bid)
txnode (CmmCondBranch p t f) = mkCmmCondBranch (exp p) (lookup t) (lookup f)
txnode (CmmSwitch e arms) = CmmSwitch (exp e) (map (liftM lookup) arms)
txnode (CmmCall t k rg a res r) = CmmCall (exp t) (liftM lookup k) rg a res r
txnode fc@CmmForeignCall{} = fc{ args = map exp (args fc)
, succ = lookup (succ fc) }
txnode other = mapExpDeep exp other
exp :: CmmExpr -> CmmExpr
exp (CmmLit (CmmBlock bid)) = CmmLit (CmmBlock (lookup bid))
exp (CmmStackSlot (Young id) i) = CmmStackSlot (Young (lookup id)) i
exp e = e
mkCmmCondBranch :: CmmExpr -> Label -> Label -> CmmNode O C
mkCmmCondBranch p t f = if t == f then CmmBranch t else CmmCondBranch p t f
-- Build a map from a block to its set of predecessors.
predMap :: [CmmBlock] -> BlockEnv Int
predMap blocks = foldr add_preds mapEmpty blocks
where
add_preds block env = foldr add env (successors block)
where add lbl env = mapInsertWith (+) lbl 1 env
-- Removing unreachable blocks
removeUnreachableBlocksProc :: CmmDecl -> CmmDecl
removeUnreachableBlocksProc proc@(CmmProc info lbl live g)
| length used_blocks < mapSize (toBlockMap g)
= CmmProc info' lbl live g'
| otherwise
= proc
where
g' = ofBlockList (g_entry g) used_blocks
info' = info { info_tbls = keep_used (info_tbls info) }
-- Remove any info_tbls for unreachable
keep_used :: BlockEnv CmmInfoTable -> BlockEnv CmmInfoTable
keep_used bs = mapFoldWithKey keep emptyBlockMap bs
keep :: Label -> CmmInfoTable -> BlockEnv CmmInfoTable -> BlockEnv CmmInfoTable
keep l i env | l `setMember` used_lbls = mapInsert l i env
| otherwise = env
used_blocks :: [CmmBlock]
used_blocks = postorderDfs g
used_lbls :: LabelSet
used_lbls = foldr (setInsert . entryLabel) setEmpty used_blocks
|
green-haskell/ghc
|
compiler/cmm/CmmContFlowOpt.hs
|
bsd-3-clause
| 15,662 | 0 | 18 | 4,436 | 2,495 | 1,341 | 1,154 | 160 | 8 |
module MoveDef.Md2 where
toplevel :: Integer -> Integer
toplevel x = c * x * b
where
b = 3
c,d :: Integer
c = 7
d = 9
-- Pattern bind
tup :: (Int, Int)
h :: Int
t :: Int
tup@(h,t) = head $ zip [1..10] [3..ff]
where
ff :: Int
ff = 15
data D = A | B String | C
ff :: Int -> Int
ff y = y + zz
where
zz = 1
l z =
let
ll = 34
in ll + z
dd q = do
let ss = 5
return (ss + q)
zz1 a = 1 + toplevel a
-- General Comment
-- |haddock comment
tlFunc :: Integer -> Integer
tlFunc x = c * x
-- Comment at end
|
RefactoringTools/HaRe
|
test/testdata/MoveDef/Md2.hs
|
bsd-3-clause
| 540 | 0 | 9 | 177 | 258 | 142 | 116 | 27 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Hooks.ToggleHook
-- Copyright : Ben Boeckel <[email protected]>
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Ben Boeckel <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- Hook and keybindings for toggling hook behavior.
-----------------------------------------------------------------------------
module XMonad.Hooks.ToggleHook ( -- * Usage
-- $usage
-- * The hook
toggleHook
, toggleHook'
-- * Actions
, hookNext
, toggleHookNext
, hookAllNew
, toggleHookAllNew
-- * Queries
, willHook
, willHookNext
, willHookAllNew
-- * 'DynamicLog' utilities
-- $pp
, willHookNextPP
, willHookAllNewPP
, runLogHook ) where
import Prelude hiding (all)
import XMonad
import qualified XMonad.Util.ExtensibleState as XS
import Control.Monad (join,guard)
import Control.Applicative ((<$>))
import Control.Arrow (first, second)
import Data.Map
{- Helper functions -}
_set :: String -> ((a -> a) -> (Bool, Bool) -> (Bool, Bool)) -> a -> X ()
_set n f b = modify' n (f $ const b)
_toggle :: String -> ((Bool -> Bool) -> (Bool, Bool) -> (Bool, Bool)) -> X ()
_toggle n f = modify' n (f not)
_get :: String -> ((Bool, Bool) -> a) -> X a
_get n f = XS.gets $ f . (findWithDefault (False, False) n . hooks)
_pp :: String -> ((Bool, Bool) -> Bool) -> String -> (String -> String) -> X (Maybe String)
_pp n f s st = (\b -> guard b >> Just (st s)) <$> _get n f
{- The current state is kept here -}
data HookState = HookState { hooks :: Map String (Bool, Bool) } deriving (Typeable, Read, Show)
instance ExtensionClass HookState where
initialValue = HookState empty
extensionType = PersistentExtension
modify' :: String -> ((Bool, Bool) -> (Bool, Bool)) -> X ()
modify' n f = XS.modify (HookState . setter . hooks)
where
setter m = insert n (f (findWithDefault (False, False) n m)) m
-- $usage
-- This module provides actions (that can be set as keybindings)
-- to be able to cause hooks to be occur on a conditional basis.
--
-- You can use it by including the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Hooks.ToggleHook
--
-- and adding 'toggleHook name hook' to your 'ManageHook' where @name@ is the
-- name of the hook and @hook@ is the hook to execute based on the state.
--
-- > myManageHook = toggleHook "float" doFloat <+> manageHook def
--
-- Additionally, toggleHook' is provided to toggle between two hooks (rather
-- than on/off).
--
-- > myManageHook = toggleHook' "oldfocus" (const id) W.focusWindow <+> manageHook def
--
-- The 'hookNext' and 'toggleHookNext' functions can be used in key
-- bindings to set whether the hook is applied or not.
--
-- > , ((modm, xK_e), toggleHookNext "float")
--
-- 'hookAllNew' and 'toggleHookAllNew' are similar but float all
-- spawned windows until disabled again.
--
-- > , ((modm, xK_r), toggleHookAllNew "float")
-- | This 'ManageHook' will selectively apply a hook as set
-- by 'hookNext' and 'hookAllNew'.
toggleHook :: String -> ManageHook -> ManageHook
toggleHook n h = toggleHook' n h idHook
toggleHook' :: String -> ManageHook -> ManageHook -> ManageHook
toggleHook' n th fh = do m <- liftX $ XS.gets hooks
(next, all) <- return $ findWithDefault (False, False) n m
liftX $ XS.put $ HookState $ insert n (False, all) m
if next || all then th else fh
-- | @hookNext name True@ arranges for the next spawned window to
-- have the hook @name@ applied, @hookNext name False@ cancels it.
hookNext :: String -> Bool -> X ()
hookNext n = _set n first
toggleHookNext :: String -> X ()
toggleHookNext n = _toggle n first
-- | @hookAllNew name True@ arranges for new windows to
-- have the hook @name@ applied, @hookAllNew name False@ cancels it
hookAllNew :: String -> Bool -> X ()
hookAllNew n = _set n second
toggleHookAllNew :: String -> X ()
toggleHookAllNew n = _toggle n second
-- | Query what will happen at the next ManageHook call for the hook @name@.
willHook :: String -> X Bool
willHook n = willHookNext n <||> willHookAllNew n
-- | Whether the next window will trigger the hook @name@.
willHookNext :: String -> X Bool
willHookNext n = _get n fst
-- | Whether new windows will trigger the hook @name@.
willHookAllNew :: String -> X Bool
willHookAllNew n = _get n snd
-- $pp
-- The following functions are used to display the current
-- state of 'hookNext' and 'hookAllNew' in your
-- 'XMonad.Hooks.DynamicLog.dynamicLogWithPP'.
-- 'willHookNextPP' and 'willHookAllNewPP' should be added
-- to the 'XMonad.Hooks.DynamicLog.ppExtras' field of your
-- 'XMonad.Hooks.DynamicLog.PP'.
--
-- Use 'runLogHook' to refresh the output of your 'logHook', so
-- that the effects of a 'hookNext'/... will be visible
-- immediately:
--
-- > , ((modm, xK_e), toggleHookNext "float" >> runLogHook)
--
-- The @String -> String@ parameters to 'willHookNextPP' and
-- 'willHookAllNewPP' will be applied to their output, you
-- can use them to set the text color, etc., or you can just
-- pass them 'id'.
willHookNextPP :: String -> (String -> String) -> X (Maybe String)
willHookNextPP n = _pp n fst "Next"
willHookAllNewPP :: String -> (String -> String) -> X (Maybe String)
willHookAllNewPP n = _pp n snd "All"
runLogHook :: X ()
runLogHook = join $ asks $ logHook . config
|
pjones/xmonad-test
|
vendor/xmonad-contrib/XMonad/Hooks/ToggleHook.hs
|
bsd-2-clause
| 6,014 | 0 | 12 | 1,605 | 1,122 | 629 | 493 | 63 | 2 |
-- Copyright (c) 2014-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is distributed under the terms of a BSD license,
-- found in the LICENSE file. An additional grant of patent rights can
-- be found in the PATENTS file.
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
-- | Base types used by all of Haxl.
module Haxl.Core.Types (
-- * Initialization strategies
InitStrategy(..),
-- * Tracing flags
Flags(..),
defaultFlags,
ifTrace,
ifReport,
-- * Statistics
Stats(..),
RoundStats(..),
DataSourceRoundStats(..),
Microseconds,
emptyStats,
numRounds,
numFetches,
ppStats,
ppRoundStats,
ppDataSourceRoundStats,
-- * Data fetching
DataSource(..),
DataSourceName(..),
Request,
BlockedFetch(..),
PerformFetch(..),
-- * Result variables
ResultVar(..),
newEmptyResult,
newResult,
putFailure,
putResult,
putSuccess,
takeResult,
tryReadResult,
tryTakeResult,
-- * Default fetch implementations
asyncFetch, asyncFetchWithDispatch,
stubFetch,
syncFetch,
-- * Utilities
except,
setError,
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import Data.Aeson
import Data.Function (on)
import Data.Hashable
import Data.HashMap.Strict (HashMap, toList)
import qualified Data.HashMap.Strict as HashMap
import Data.List (intercalate, sortBy)
import Data.Text (Text, unpack)
import Data.Typeable (Typeable)
#if __GLASGOW_HASKELL__ < 708
import Haxl.Core.Util (tryReadMVar)
#endif
import Haxl.Core.Show1
import Haxl.Core.StateStore
-- | Initialization strategy. 'FullInit' will do as much initialization as
-- possible. 'FastInit' will postpone part of initialization or omit part of
-- initialization by sharing more resources. Use 'FastInit' if you want
-- fast initialization but don't care much about performance, for example, in
-- interactive environment.
data InitStrategy
= FullInit
| FastInit
deriving (Enum, Eq, Show)
-- | Flags that control the operation of the engine.
data Flags = Flags
{ trace :: Int
-- ^ Tracing level (0 = quiet, 3 = very verbose).
, report :: Int
-- ^ Report level (0 = quiet, 1 = # of requests, 2 = time, 3 = # of errors)
}
defaultFlags :: Flags
defaultFlags = Flags
{ trace = 0
, report = 1
}
-- | Runs an action if the tracing level is above the given threshold.
ifTrace :: (Functor m, Monad m) => Flags -> Int -> m a -> m ()
ifTrace flags i = when (trace flags >= i) . void
-- | Runs an action if the report level is above the given threshold.
ifReport :: (Functor m, Monad m) => Flags -> Int -> m a -> m ()
ifReport flags i = when (report flags >= i) . void
type Microseconds = Int
-- | Stats that we collect along the way.
newtype Stats = Stats [RoundStats]
deriving ToJSON
-- | Pretty-print Stats.
ppStats :: Stats -> String
ppStats (Stats rss) =
intercalate "\n" [ "Round: " ++ show i ++ " - " ++ ppRoundStats rs
| (i, rs) <- zip [(1::Int)..] (reverse rss) ]
-- | Maps data source name to the number of requests made in that round.
-- The map only contains entries for sources that made requests in that
-- round.
data RoundStats = RoundStats
{ roundTime :: Microseconds
, roundDataSources :: HashMap Text DataSourceRoundStats
}
-- | Pretty-print RoundStats.
ppRoundStats :: RoundStats -> String
ppRoundStats (RoundStats t dss) =
show t ++ "us\n"
++ unlines [ " " ++ unpack nm ++ ": " ++ ppDataSourceRoundStats dsrs
| (nm, dsrs) <- sortBy (compare `on` fst) (toList dss) ]
instance ToJSON RoundStats where
toJSON RoundStats{..} = object
[ "time" .= roundTime
, "dataSources" .= roundDataSources
]
-- | Detailed stats of each data source in each round.
data DataSourceRoundStats = DataSourceRoundStats
{ dataSourceFetches :: Int
, dataSourceTime :: Maybe Microseconds
}
-- | Pretty-print DataSourceRoundStats
ppDataSourceRoundStats :: DataSourceRoundStats -> String
ppDataSourceRoundStats (DataSourceRoundStats i t) =
maybeTime $ show i ++ " fetches"
where maybeTime = maybe id (\ tm s -> s ++ " (" ++ show tm ++ "us)") t
instance ToJSON DataSourceRoundStats where
toJSON DataSourceRoundStats{..} = object [k .= v | (k, Just v) <-
[ ("fetches", Just dataSourceFetches)
, ("time", dataSourceTime)
]]
fetchesInRound :: RoundStats -> Int
fetchesInRound (RoundStats _ hm) =
sum $ map dataSourceFetches $ HashMap.elems hm
emptyStats :: Stats
emptyStats = Stats []
numRounds :: Stats -> Int
numRounds (Stats rs) = length rs
numFetches :: Stats -> Int
numFetches (Stats rs) = sum (map fetchesInRound rs)
-- | The class of data sources, parameterised over the request type for
-- that data source. Every data source must implement this class.
--
-- A data source keeps track of its state by creating an instance of
-- 'StateKey' to map the request type to its state. In this case, the
-- type of the state should probably be a reference type of some kind,
-- such as 'IORef'.
--
-- For a complete example data source, see
-- <https://github.com/facebook/Haxl/tree/master/example Examples>.
--
class (DataSourceName req, StateKey req, Show1 req) => DataSource u req where
-- | Issues a list of fetches to this 'DataSource'. The 'BlockedFetch'
-- objects contain both the request and the 'MVar's into which to put
-- the results.
fetch
:: State req
-- ^ Current state.
-> Flags
-- ^ Tracing flags.
-> u
-- ^ User environment.
-> [BlockedFetch req]
-- ^ Requests to fetch.
-> PerformFetch
-- ^ Fetch the data; see 'PerformFetch'.
class DataSourceName req where
-- | The name of this 'DataSource', used in tracing and stats. Must
-- take a dummy request.
dataSourceName :: req a -> Text
-- The 'Show1' class is a workaround for the fact that we can't write
-- @'Show' (req a)@ as a superclass of 'DataSource', without also
-- parameterizing 'DataSource' over @a@, which is a pain (I tried
-- it). 'Show1' seems fairly benign, though.
-- | A convenience only: package up 'Eq', 'Hashable', 'Typeable', and 'Show'
-- for requests into a single constraint.
type Request req a =
( Eq (req a)
, Hashable (req a)
, Typeable (req a)
, Show (req a)
, Show a
)
-- | A data source can fetch data in one of two ways.
--
-- * Synchronously ('SyncFetch'): the fetching operation is an
-- @'IO' ()@ that fetches all the data and then returns.
--
-- * Asynchronously ('AsyncFetch'): we can do something else while the
-- data is being fetched. The fetching operation takes an @'IO' ()@ as
-- an argument, which is the operation to perform while the data is
-- being fetched.
--
-- See 'syncFetch' and 'asyncFetch' for example usage.
--
data PerformFetch
= SyncFetch (IO ())
| AsyncFetch (IO () -> IO ())
-- Why does AsyncFetch contain a `IO () -> IO ()` rather than the
-- alternative approach of returning the `IO` action to retrieve the
-- results, which might seem better: `IO (IO ())`? The point is that
-- this allows the data source to acquire resources for the purpose of
-- this fetching round using the standard `bracket` pattern, so it can
-- ensure that the resources acquired are properly released even if
-- other data sources fail.
-- | A 'BlockedFetch' is a pair of
--
-- * The request to fetch (with result type @a@)
--
-- * An 'MVar' to store either the result or an error
--
-- We often want to collect together multiple requests, but they return
-- different types, and the type system wouldn't let us put them
-- together in a list because all the elements of the list must have the
-- same type. So we wrap up these types inside the 'BlockedFetch' type,
-- so that they all look the same and we can put them in a list.
--
-- When we unpack the 'BlockedFetch' and get the request and the 'MVar'
-- out, the type system knows that the result type of the request
-- matches the type parameter of the 'MVar', so it will let us take the
-- result of the request and store it in the 'MVar'.
--
data BlockedFetch r = forall a. BlockedFetch (r a) (ResultVar a)
-- | Function for easily setting a fetch to a particular exception
setError :: (Exception e) => (forall a. r a -> e) -> BlockedFetch r -> IO ()
setError e (BlockedFetch req m) = putFailure m (e req)
except :: (Exception e) => e -> Either SomeException a
except = Left . toException
-- | A sink for the result of a data fetch, used by 'BlockedFetch' and the
-- 'DataCache'. Why do we need an 'MVar' here? The reason is that the cache
-- serves two purposes:
--
-- 1. To cache the results of requests that were submitted in a previous round.
--
-- 2. To remember requests that have been encountered in the current round but
-- are not yet submitted, so that if we see the request again we can make
-- sure that we only submit it once.
--
-- Storing the result as an 'MVar' gives two benefits:
--
-- * We can tell the difference between (1) and (2) by testing whether the
-- 'MVar' is empty. See 'Haxl.Fetch.cached'.
--
-- * In the case of (2), we don't have to update the cache again after the
-- current round, and after the round we can read the result of each request
-- from its 'MVar'. All instances of identical requests will share the same
-- 'MVar' to obtain the result.
--
newtype ResultVar a = ResultVar (MVar (Either SomeException a))
newResult :: a -> IO (ResultVar a)
newResult x = ResultVar <$> newMVar (Right x)
newEmptyResult :: IO (ResultVar a)
newEmptyResult = ResultVar <$> newEmptyMVar
putFailure :: (Exception e) => ResultVar a -> e -> IO ()
putFailure r = putResult r . except
putSuccess :: ResultVar a -> a -> IO ()
putSuccess r = putResult r . Right
putResult :: ResultVar a -> Either SomeException a -> IO ()
putResult (ResultVar var) = putMVar var
takeResult :: ResultVar a -> IO (Either SomeException a)
takeResult (ResultVar var) = takeMVar var
tryReadResult :: ResultVar a -> IO (Maybe (Either SomeException a))
tryReadResult (ResultVar var) = tryReadMVar var
tryTakeResult :: ResultVar a -> IO (Maybe (Either SomeException a))
tryTakeResult (ResultVar var) = tryTakeMVar var
-- Fetch templates
stubFetch
:: (Exception e) => (forall a. r a -> e)
-> State r -> Flags -> u -> [BlockedFetch r] -> PerformFetch
stubFetch e _state _flags _si bfs = SyncFetch $ mapM_ (setError e) bfs
-- | Common implementation templates for 'fetch' of 'DataSource'.
--
-- Example usage:
--
-- > fetch = syncFetch MyDS.withService MyDS.retrieve
-- > $ \service request -> case request of
-- > This x -> MyDS.fetchThis service x
-- > That y -> MyDS.fetchThat service y
--
asyncFetchWithDispatch
:: ((service -> IO ()) -> IO ())
-- ^ Wrapper to perform an action in the context of a service.
-> (service -> IO ())
-- ^ Dispatch all the pending requests
-> (service -> IO ())
-- ^ Wait for the results
-> (forall a. service -> request a -> IO (IO (Either SomeException a)))
-- ^ Enqueue an individual request to the service.
-> State request
-- ^ Currently unused.
-> Flags
-- ^ Currently unused.
-> u
-- ^ Currently unused.
-> [BlockedFetch request]
-- ^ Requests to submit.
-> PerformFetch
asyncFetch, syncFetch
:: ((service -> IO ()) -> IO ())
-- ^ Wrapper to perform an action in the context of a service.
-> (service -> IO ())
-- ^ Dispatch all the pending requests and wait for the results
-> (forall a. service -> request a -> IO (IO (Either SomeException a)))
-- ^ Submits an individual request to the service.
-> State request
-- ^ Currently unused.
-> Flags
-- ^ Currently unused.
-> u
-- ^ Currently unused.
-> [BlockedFetch request]
-- ^ Requests to submit.
-> PerformFetch
asyncFetchWithDispatch
withService dispatch wait enqueue _state _flags _si requests =
AsyncFetch $ \inner -> withService $ \service -> do
getResults <- mapM (submitFetch service enqueue) requests
dispatch service
inner
wait service
sequence_ getResults
asyncFetch withService wait enqueue _state _flags _si requests =
AsyncFetch $ \inner -> withService $ \service -> do
getResults <- mapM (submitFetch service enqueue) requests
inner
wait service
sequence_ getResults
syncFetch withService dispatch enqueue _state _flags _si requests =
SyncFetch . withService $ \service -> do
getResults <- mapM (submitFetch service enqueue) requests
dispatch service
sequence_ getResults
-- | Used by 'asyncFetch' and 'syncFetch' to retrieve the results of
-- requests to a service.
submitFetch
:: service
-> (forall a. service -> request a -> IO (IO (Either SomeException a)))
-> BlockedFetch request
-> IO (IO ())
submitFetch service fetch (BlockedFetch request result)
= (putResult result =<<) <$> fetch service request
|
hiteshsuthar/Haxl
|
Haxl/Core/Types.hs
|
bsd-3-clause
| 13,158 | 0 | 17 | 2,680 | 2,504 | 1,389 | 1,115 | 204 | 1 |
module Char(
isAscii, isLatin1, isControl, isPrint, isSpace, isUpper, isLower,
isAlpha, isDigit, isOctDigit, isHexDigit, isAlphaNum,
digitToInt, intToDigit,
toUpper, toLower,
ord, chr,
readLitChar, showLitChar, lexLitChar,
Char,String
)
where
import Prelude
import PreludeBuiltin
import Numeric(readDec, readOct, lexDigits, readHex)
import Array
import Ix
isAscii, isControl, isPrint, isSpace :: Char -> Bool
isUpper, isLower, isAlpha, isDigit, isAlphanum :: Char -> Bool
isAscii c = c < '\x80'
isLatin1 c = c <= '\xff'
isControl c = c < ' ' || c >= '\DEL' && c <= '\x9f'
isPrint c = c >= ' ' && c <= '~' || c >= '\xa0'
isSpace c = c == ' ' || c == '\t' || c == '\n' || c == '\r' ||
c == '\f' || c == '\v' || c=='\xa0'
isUpper c =
c >= 'A' && c <= 'Z'
isLower c =
c >= 'a' && c <= 'z'
isAlpha c = isUpper c || isLower c
isDigit c = c >= '0' && c <= '9'
isAlphanum c = isAlpha c || isDigit c
isAlphaNum = isAlphanum
toUpper, toLower :: Char -> Char
toUpper c | isLower c = chr (ord c - ord 'a' + ord 'A')
| otherwise = c
toLower c | isUpper c = chr (ord c - ord 'A' + ord 'a')
| otherwise = c
minChar, maxChar :: Char
minChar = chr 0
maxChar = chr 255
isOctDigit c = c >= '0' && c <= '7'
isHexDigit c = isDigit c || c >= 'A' && c <= 'F' ||
c >= 'a' && c <= 'f'
-- Digit conversion operations
digitToInt :: Char -> Int
digitToInt c
| isDigit c = fromEnum c - fromEnum '0'
| c >= 'a' && c <= 'f' = fromEnum c - fromEnum 'a' + 10
| c >= 'A' && c <= 'F' = fromEnum c - fromEnum 'A' + 10
| otherwise = error "Char.digitToInt: not a digit"
intToDigit :: Int -> Char
intToDigit i
| i >= 0 && i <= 9 = toEnum (fromEnum '0' + i)
| i >= 10 && i <= 15 = toEnum (fromEnum 'a' + i - 10)
| otherwise = error "Char.intToDigit: not a digit"
ord c = primCharToInt c
chr n = primIntToChar n
-- Text functions
--readLitChar :: ReadS Char
readLitChar ('\\':s) = readEsc s
where
readEsc ('a':s) = [('\a',s)]
readEsc ('b':s) = [('\b',s)]
readEsc ('f':s) = [('\f',s)]
readEsc ('n':s) = [('\n',s)]
readEsc ('r':s) = [('\r',s)]
readEsc ('t':s) = [('\t',s)]
readEsc ('v':s) = [('\v',s)]
readEsc ('\\':s) = [('\\',s)]
readEsc ('"':s) = [('"',s)]
readEsc ('\'':s) = [('\'',s)]
readEsc ('^':c:s) | c >= '@' && c <= '_'
= [(chr (ord c - ord '@'), s)]
readEsc s@(d:_) | isDigit d
= [(chr n, t) | (n,t) <- readDec s]
readEsc ('o':s) = [(chr n, t) | (n,t) <- readOct s]
readEsc ('x':s) = [(chr n, t) | (n,t) <- readHex s]
readEsc s@(c:_) | isUpper c
= let table = ('\DEL', "DEL") : assocs asciiTab
in case [(c,s') | (c, mne) <- table,
([],s') <- [match mne s]]
of (pr:_) -> [pr]
[] -> []
readEsc _ = []
match :: (Eq a) => [a] -> [a] -> ([a],[a])
match (x:xs) (y:ys) | x == y = match xs ys
match xs ys = (xs,ys)
readLitChar (c:s) = [(c,s)]
--}
showLitChar :: Char -> ShowS
showLitChar c | c > '\DEL' = showChar '\\' .
protectEsc isDigit (shows (ord c))
showLitChar '\DEL' = showString "\\DEL"
showLitChar '\\' = showString "\\\\"
showLitChar c | c >= ' ' = showChar c
showLitChar '\a' = showString "\\a"
showLitChar '\b' = showString "\\b"
showLitChar '\f' = showString "\\f"
showLitChar '\n' = showString "\\n"
showLitChar '\r' = showString "\\r"
showLitChar '\t' = showString "\\t"
showLitChar '\v' = showString "\\v"
showLitChar '\SO' = protectEsc (== 'H') (showString "\\SO")
showLitChar c = showString ('\\' : asciiTab!c)
protectEsc p f = f . cont
where cont s@(c:_) | p c = "\\&" ++ s
cont s = s
asciiTab = listArray ('\NUL', ' ')
["NUL", "SOH", "STX", "ETX", "EOT", "ENQ", "ACK", "BEL",
"BS", "HT", "LF", "VT", "FF", "CR", "SO", "SI",
"DLE", "DC1", "DC2", "DC3", "DC4", "NAK", "SYN", "ETB",
"CAN", "EM", "SUB", "ESC", "FS", "GS", "RS", "US",
"SP"]
--lexLitChar :: ReadS String
lexLitChar ('\\':s) = [('\\':esc, t) | (esc,t) <- lexEsc s]
where
lexEsc (c:s) | c `elem` "abfnrtv\\\"'" = [([c],s)]
lexEsc s@(d:_) | isDigit d = lexDigits s
lexEsc ('^':c:s) | c >= '@' && c <= '_' = [(['^',c],s)]
-- Very crude approximation to \XYZ. Let readers work this out.
lexEsc s@(c:_) | isUpper c = [span isCharName s]
lexEsc _ = []
isCharName c = isUpper c || isDigit c
lexLitChar (c:s) = [([c],s)]
lexLitChar "" = []
|
forste/haReFork
|
tools/base/tests/HaskellLibraries/Char.hs
|
bsd-3-clause
| 5,379 | 6 | 18 | 2,090 | 2,174 | 1,156 | 1,018 | 114 | 18 |
{-# LANGUAGE RankNTypes, PartialTypeSignatures, NamedWildCards #-}
module NestedNamedExtraConstraintsWildcard where
foo :: Bool -> (Eq a, _a) => a
foo = undefined
|
urbanslug/ghc
|
testsuite/tests/partial-sigs/should_fail/NestedNamedExtraConstraintsWildcard.hs
|
bsd-3-clause
| 164 | 0 | 7 | 22 | 32 | 19 | 13 | 4 | 1 |
{-# LANGUAGE ForeignFunctionInterface #-}
module Main where
import Foreign
import GHC.ForeignPtr
{-# INCLUDE "fptrfail01.h" #-}
foreign import ccall "&f" fptr :: FunPtr (Ptr Int -> IO ())
main :: IO ()
main = do
with (33 :: Int) test
where
test p = do
f <- newForeignPtr_ p
addForeignPtrFinalizer fptr f
addForeignPtrConcFinalizer f (putStrLn "Haskell finalizer")
|
urbanslug/ghc
|
testsuite/tests/ffi/should_run/fptrfail01.hs
|
bsd-3-clause
| 421 | 0 | 11 | 113 | 115 | 58 | 57 | 12 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Data.Neural.Recurrent.Dropout where
-- import Control.Monad
-- import Data.Bifunctor
-- import GHC.TypeLits
import Control.Lens
import Data.Neural.Recurrent
import Data.Neural.Types
import Linear
compensateDO
:: forall i hs o a. Fractional a
=> a
-> Network i hs o a
-> Network i hs o a
compensateDO d = \case
NetOL w -> NetOL w
NetIL l n -> NetIL l (go n)
where
go :: forall h hs'. ()
=> Network h hs' o a
-> Network h hs' o a
go = \case NetOL w -> NetOL (compFLayer w)
NetIL w n -> NetIL (compRLayer w) (go n)
compFLayer
:: forall i' o'. ()
=> FLayer i' o' a
-> FLayer i' o' a
compFLayer = over (tFLayerNodes . traverse . tNodeWeights) (d' *^)
compRLayer
:: forall i' o'. ()
=> RLayer i' o' a
-> RLayer i' o' a
compRLayer = over (tRLayerNodes . traverse . tRNodeWeights) $ \(wI, wS) ->
let wI' = d' *^ wI
wS' = d' *^ wS
in wI' `seq` wS' `seq` (wI', wS')
d' = 1 / (1 - d)
{-# INLINE compensateDO #-}
|
mstksg/neural
|
src/Data/Neural/Recurrent/Dropout.hs
|
mit
| 1,189 | 0 | 13 | 385 | 400 | 218 | 182 | 36 | 3 |
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE EmptyDataDecls #-}
module SumTypeTest (specs) where
import Database.Persist.Sqlite
import Database.Persist.TH
import Control.Monad.Trans.Resource (runResourceT)
#if WITH_POSTGRESQL
import Database.Persist.Postgresql
#endif
import qualified Data.Text as T
import Init
#if WITH_MONGODB
mkPersist persistSettings [persistLowerCase|
#else
share [mkPersist sqlSettings, mkMigrate "sumTypeMigrate"] [persistLowerCase|
#endif
Bicycle
brand T.Text
Car
make T.Text
model T.Text
+Vehicle
bicycle BicycleId
car CarId
deriving Show Eq
|]
specs :: Spec
specs = describe "sum types" $ do
it "works" $ asIO $ runResourceT $ runConn $ do
#ifndef WITH_MONGODB
_ <- runMigrationSilent sumTypeMigrate
#endif
car1 <- insert $ Car "Ford" "Thunderbird"
car2 <- insert $ Car "Kia" "Rio"
bike1 <- insert $ Bicycle "Shwinn"
vc1 <- insert $ VehicleCarSum car1
vc2 <- insert $ VehicleCarSum car2
vb1 <- insert $ VehicleBicycleSum bike1
x1 <- get vc1
liftIO $ x1 @?= Just (VehicleCarSum car1)
x2 <- get vc2
liftIO $ x2 @?= Just (VehicleCarSum car2)
x3 <- get vb1
liftIO $ x3 @?= Just (VehicleBicycleSum bike1)
asIO :: IO a -> IO a
asIO = id
|
gbwey/persistentold
|
persistent-test/SumTypeTest.hs
|
mit
| 1,516 | 0 | 14 | 339 | 323 | 165 | 158 | 34 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad (mapM_)
import qualified Crypto.Hash
import Data.ByteString (ByteString)
import qualified Data.List as List
import qualified Data.Maybe as Maybe
import qualified Data.Text as Text
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import qualified Data.Text.IO as IO
import qualified Numeric
main = do
doorId <- Text.strip <$> IO.getContents
IO.putStrLn $ crackPassword doorId
crackPassword doorId =
[0 ..]
|> map (\i -> hashText (doorId `Text.append` (Text.pack $ show i)))
|> filter (\digest -> Text.take 5 digest == "00000")
|> map (\digest -> (fst $ head $ Numeric.readHex [Text.index digest 5], Text.index digest 6))
|> assemble 8
|> Text.pack
assemble n characters =
map (\position -> snd $ Maybe.fromJust $ List.find (\(p, _) -> position == p) characters) [0 .. (n - 1)]
hashText :: Text.Text -> Text.Text
hashText text = Text.pack $ show $ md5 $ encodeUtf8 text
md5 :: ByteString -> Crypto.Hash.Digest Crypto.Hash.MD5
md5 = Crypto.Hash.hash
(|>) = flip ($)
|
SamirTalwar/advent-of-code
|
2016/AOC_05_2.hs
|
mit
| 1,056 | 0 | 18 | 185 | 401 | 224 | 177 | 27 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module PureScript.Ide.Command where
import Control.Monad
import Data.Aeson
import Data.Text (Text)
import Data.Maybe
import PureScript.Ide.Filter
import PureScript.Ide.Matcher
import PureScript.Ide.Types
import PureScript.Ide.CaseSplit
data Command
= Load { loadModules :: [ModuleIdent]
, loadDependencies :: [ModuleIdent]}
| Type { typeSearch :: DeclIdent
, typeFilters :: [Filter]}
| Complete { completeFilters :: [Filter]
, completeMatcher :: Matcher}
| Pursuit { pursuitQuery :: PursuitQuery
, pursuitSearchType :: PursuitSearchType}
| List {listType :: ListType}
| CaseSplit {
caseSplitLine :: Text
, caseSplitBegin :: Int
, caseSplitEnd :: Int
, caseSplitAnnotations :: WildcardAnnotations
, caseSplitType :: Type}
| AddClause {
addClauseLine :: Text
, addClauseAnnotations :: WildcardAnnotations}
| Cwd
| Quit
data ListType = LoadedModules | Imports FilePath | AvailableModules
instance FromJSON ListType where
parseJSON = withObject "ListType" $ \o -> do
(listType' :: String) <- o .: "type"
case listType' of
"import" -> do
fp <- o .: "file"
return (Imports fp)
"loadedModules" -> return LoadedModules
"availableModules" -> return AvailableModules
_ -> mzero
instance FromJSON Command where
parseJSON = withObject "command" $ \o -> do
(command :: String) <- o .: "command"
case command of
"list" -> do
listType' <- o .:? "params"
return $ List (fromMaybe LoadedModules listType')
"cwd" -> return Cwd
"quit" -> return Quit
"load" -> do
params <- o .: "params"
mods <- params .:? "modules"
deps <- params .:? "dependencies"
return $ Load (fromMaybe [] mods) (fromMaybe [] deps)
"type" -> do
params <- o .: "params"
search <- params .: "search"
filters <- params .: "filters"
return $ Type search filters
"complete" -> do
params <- o .: "params"
filters <- params .:? "filters"
matcher <- params .:? "matcher"
return $ Complete (fromMaybe [] filters) (fromMaybe mempty matcher)
"pursuit" -> do
params <- o .: "params"
query <- params .: "query"
queryType <- params .: "type"
return $ Pursuit query queryType
"caseSplit" -> do
params <- o .: "params"
line <- params .: "line"
begin <- params .: "begin"
end <- params .: "end"
annotations <- params .: "annotations"
type' <- params .: "type"
return $ CaseSplit line begin end (if annotations
then explicitAnnotations
else noAnnotations) type'
"addClause" -> do
params <- o .: "params"
line <- params .: "line"
annotations <- params .: "annotations"
return $ AddClause line (if annotations
then explicitAnnotations
else noAnnotations)
_ -> mzero
|
kRITZCREEK/psc-ide
|
src/PureScript/Ide/Command.hs
|
mit
| 3,267 | 0 | 19 | 1,112 | 842 | 435 | 407 | 89 | 0 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.MediaStreamEvent
(js_getStream, getStream, MediaStreamEvent, castToMediaStreamEvent,
gTypeMediaStreamEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"stream\"]" js_getStream ::
JSRef MediaStreamEvent -> IO (JSRef MediaStream)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamEvent.stream Mozilla MediaStreamEvent.stream documentation>
getStream ::
(MonadIO m) => MediaStreamEvent -> m (Maybe MediaStream)
getStream self
= liftIO ((js_getStream (unMediaStreamEvent self)) >>= fromJSRef)
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/MediaStreamEvent.hs
|
mit
| 1,399 | 6 | 11 | 166 | 377 | 238 | 139 | 24 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-|
Module : Network.WebexTeams.Pipes
Copyright : (c) Naoto Shimazaki 2018
License : MIT (see the file LICENSE)
Maintainer : https://github.com/nshimaza
Stability : experimental
This module provides Pipes wrapper for Cisco Webex Teams list APIs.
-}
module Network.WebexTeams.Pipes
(
-- * Functions
streamListWithFilter
, streamTeamList
, streamOrganizationList
, streamRoleList
) where
import Control.Monad (unless)
import Data.Foldable (traverse_)
import Pipes (MonadIO, Producer', liftIO, yield)
import Network.WebexTeams hiding (streamOrganizationList,
streamRoleList, streamTeamList)
{-|
Common worker function for List APIs.
It accesses List API with given 'Request', unwrap result into list of items, stream them to Pipes pipe
and finally it automatically accesses next page designated via HTTP Link header if available.
-}
readerToProducer :: MonadIO m => ListReader i -> Producer' i m ()
readerToProducer reader = go
where
go = do
xs <- liftIO reader
unless (null xs) $ do
traverse_ yield xs
go
-- | Get list of entities with query parameter and stream it into Pipes. It automatically performs pagination.
streamListWithFilter :: (MonadIO m, WebexTeamsFilter filter, WebexTeamsListItem (ToResponse filter))
=> Authorization -- ^ Authorization string against Webex Teams API.
-> WebexTeamsRequest -- ^ Predefined part of 'Request' commonly used for Webex Teams API.
-> filter -- ^ Filter criteria of the request. Type of filter automatically determines
-- item type in response.
-> Producer' (ToResponse filter) m ()
streamListWithFilter auth base param = getListWithFilter auth base param >>= readerToProducer
-- | List of 'Team' and stream it into Pipes. It automatically performs pagination.
streamTeamList :: MonadIO m => Authorization -> WebexTeamsRequest -> Producer' Team m ()
streamTeamList auth base = getTeamList auth base >>= readerToProducer
-- | Filter list of 'Organization' and stream it into Pipes. It automatically performs pagination.
streamOrganizationList :: MonadIO m => Authorization -> WebexTeamsRequest -> Producer' Organization m ()
streamOrganizationList auth base = getOrganizationList auth base >>= readerToProducer
-- | List of 'Role' and stream it into Pipes. It automatically performs pagination.
streamRoleList :: MonadIO m => Authorization -> WebexTeamsRequest -> Producer' Role m ()
streamRoleList auth base = getRoleList auth base >>= readerToProducer
|
nshimaza/cisco-spark-api
|
webex-teams-pipes/src/Network/WebexTeams/Pipes.hs
|
mit
| 2,751 | 0 | 12 | 640 | 394 | 207 | 187 | 32 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, OverloadedStrings #-}
module Web.Haskbot.Types where
import Data.Aeson (ToJSON, Value (..), (.=), object, toJSON)
import Data.Monoid ((<>))
import Network.URI (URI)
import Web.Scotty.Trans (Parsable)
import qualified Data.Text as ST
class Formatted a where
formatted :: a -> ST.Text
-- outgoing integrations
data SlashCommand = SlashCommand
{ sc_token :: !Token
, sc_teamID :: !TeamID
, sc_chanID :: !ChannelID
, sc_chanName :: !ChannelName
, sc_userID :: !UserID
, sc_userName :: !UserName
, sc_command :: !Command
, sc_optText :: Maybe ST.Text
} deriving (Eq, Show)
data Outgoing = Outgoing
{ out_token :: !Token
, out_teamID :: !TeamID
, out_chanID :: !ChannelID
, out_chanName :: !ChannelName
, out_timestamp :: !Timestamp
, out_userID :: !UserID
, out_userName :: !UserName
, out_text :: !ST.Text
, out_triggerWord :: !TriggerWord
} deriving (Eq, Show)
-- incoming integration
data Incoming = Incoming
{ inc_channel :: !ResponseAddr
, inc_text :: !SlackText
} deriving (Eq, Show)
instance ToJSON Incoming where
toJSON inc = object [ "channel" .= inc_channel inc
, "text" .= inc_text inc
]
data ResponseAddr = DirectMsg !UserName
| Channel !ChannelName
deriving (Eq, Show)
instance ToJSON ResponseAddr where
toJSON (DirectMsg d) = toJSON d
toJSON (Channel c) = toJSON c
data SlackText =
Plain ST.Text SlackText
| URL URI SlackText
| Link URI ST.Text SlackText
| End
deriving (Eq, Show)
instance Formatted SlackText where
formatted slackText = case slackText of
(Plain text next) ->
text <> formatted next
(URL uri next) ->
"<" <> ST.pack (show uri) <> ">" <> formatted next
(Link uri text next) ->
"<" <> ST.pack (show uri) <> "|" <> text <> ">" <> formatted next
End -> ""
instance ToJSON SlackText where
toJSON slackText = String $ formatted slackText
--data Incoming = Incoming
-- { inc_channel :: !ResponseAddr
-- , inc_text :: !ST.Text
-- , inc_fallback :: !ST.Text
-- , inc_pretext :: !ST.Text
-- , inc_color :: Maybe Color
-- , inc_fields :: [Attachment]
-- , inc_userName :: UserName
-- , inc_icon :: Icon
-- } deriving (Eq, Show)
--data Color = Good
-- | Warning
-- | Danger
-- | Hex HexCode
-- deriving (Eq, Show)
--
--instance ToJSON Color where
-- toJSON Good = String "good"
-- toJSON Warning = String "warning"
-- toJSON Danger = String "danger"
-- toJSON (Hex h) = toJSON h
--
--
--data Attachment = Attachment
-- { at_title :: !ST.Text
-- , at_value :: !ST.Text
-- , at_short :: !Bool
-- } deriving (Eq, Show)
--
--instance ToJSON Attachment where
-- toJSON att = object [ "title" .= at_title att
-- , "value" .= at_value att
-- , "short" .= at_short att
-- ]
--
--
--data Icon = IconURL URI
-- | IconEmoji Emoji
-- deriving (Eq, Show)
--
--instance ToJSON URI where
-- toJSON = String . ST.pack . show
--
---- private functions
--
--iconPair :: Icon -> (ST.Text, Value)
--iconPair (IconURL url) = "icon_url" .= url
--iconPair (IconEmoji emo) = "icon_emoji" .= emo
-- newtypes
newtype Token = Token ST.Text deriving (Eq, Show, ToJSON, Parsable)
newtype TeamID = TeamID ST.Text deriving (Eq, Show, ToJSON, Parsable)
newtype ChannelID = ChannelID ST.Text deriving (Eq, Show, ToJSON, Parsable)
newtype UserID = UserID ST.Text deriving (Eq, Show, ToJSON, Parsable)
newtype Timestamp = Timestamp ST.Text deriving (Eq, Show, ToJSON, Parsable)
newtype TriggerWord = TriggerWord ST.Text deriving (Eq, Show, ToJSON, Parsable)
newtype ChannelName = ChannelName ST.Text deriving (Eq, Show, ToJSON, Parsable)
newtype UserName = UserName ST.Text deriving (Eq, Show, ToJSON, Parsable)
newtype Command = Command ST.Text deriving (Eq, Show, ToJSON, Parsable)
newtype HexCode = HexCode ST.Text deriving (Eq, Show, ToJSON, Parsable)
newtype Emoji = Emoji ST.Text deriving (Eq, Show, ToJSON, Parsable)
instance Formatted ChannelName where
formatted (ChannelName x) = "#" <> x
instance Formatted UserName where
formatted (UserName x) = "@" <> x
instance Formatted Command where
formatted (Command x) = "/" <> x
instance Formatted HexCode where
formatted (HexCode x) = "#" <> x
instance Formatted Emoji where
formatted (Emoji x) = ":" <> x <> ":"
|
bendyworks/haskbot
|
src/Web/Haskbot/Types.hs
|
mit
| 4,604 | 0 | 16 | 1,169 | 1,113 | 623 | 490 | 121 | 0 |
-- by Yue Wang 12.12.2014 coded for ch04 DMA
-- proj01 find the number of r-permutations and of r-combinations
rPermutations :: (Num a, Enum a) => a -> a -> a
rPermutations n r = product (reverse [n-r+1..n])
rCombinations :: (Fractional a, Eq a) => a -> a -> a -- this output float number, need to fix later on
rCombinations n r = fac n/((fac r) * fac(n-r))
where fac x = if x==0 then 1 else x * fac(x-1)
|
Mooophy/DMA
|
ch04/proj01.hs
|
mit
| 415 | 0 | 11 | 91 | 168 | 88 | 80 | 5 | 2 |
module ImageHandler where
import Codec.Picture
import Codec.Picture.Types
import Data.Either
import Data.Matrix
import qualified Data.Vector as V
getImageMatrix :: FilePath -> IO ( Matrix Int )
getImageMatrix path = do
img <- readImage path
case img f
(Left err) -> error err
(Right x) -> imageToMatrix x
_ -> error "Wrong color format!"
imageToMatrix :: (Image a) -> Matrix Int
imageToMatrix (ImageRGB8 (Image w h d)) = fromList h w $ map rgb8ToGrey $ V.toList d
where rgb8ToGrey (PixelRGB8 r g b) = 0.21 * r + 0.72 * g + 0.07 * b
imageToMatrix (ImageYCbCr8 (Image w h d)) = fromList h w $ map yCbCr8ToGrey $ V.toList d
where yCbCr8ToGrey (PixelYCbCr8 y _ _) = y
|
omarabdeldayem/Katze
|
ImageHandler.hs
|
mit
| 710 | 18 | 8 | 163 | 266 | 139 | 127 | -1 | -1 |
-- Currying and Partial Application
multThree :: (Num a) => a -> a -> a -> a
multThree x y z = x * y * z
-- By applying less arguments than defined
-- you are creating a partially applied function.
multTwoWithNine = multThree 9
multWithEighteen = multTwoWithNine 2
-- Infix function can be partially applied by creating a
-- 'part' surrounded with parentheses.
divideByTen :: (Floating a) => a -> a
divideByTen = (/10)
-- Higher Order Functions
zipWith' :: (a -> b -> c) -> [ a ] -> [ b ] -> [ c ]
zipWith' _ [] _ = []
zipWith' _ _ [] = []
zipWith' f (x : xs) (y : ys) = f x y : zipWith' f xs ys
flip' :: (a -> b -> c) -> (b -> a -> c)
flip' f = g
where g x y = f y x
flip'' :: (a -> b -> c) -> (b -> a -> c)
flip'' f y x = f x y
mapResult = map (+3) [1, 5, 3, 1, 6]
stringMapResult = map (++ "!") [ "BIFF", "BANG", "POW" ]
evenNumbers = filter even [ 1 .. 10 ]
uppercase = filter (`elem` [ 'A' .. 'Z' ]) "i lauGh At You BecAuse u r aLL the Same"
quicksort :: (Ord a) => [ a ] -> [ a ]
quicksort [] = []
quicksort (pivot : xs) =
let lowerBound = quicksort (filter (<= pivot) xs)
upperBound = quicksort (filter (> pivot) xs)
in lowerBound ++ [ pivot ] ++ upperBound
sumResult = sum (takeWhile (< 10000) (filter odd (map (^ 2) [ 1 .. ])))
listOfFuns = map (*) [ 0 .. ]
fourTimesFive = (listOfFuns !! 4) 5
-- Lambdas
chain :: (Integral a) => a -> [ a ]
chain 1 = [ 1 ]
chain n
| even n = n:chain (n `div` 2)
| odd n = n:chain (n * 3 + 1)
numLongChains :: Int
numLongChains = length (filter (\xs -> length xs > 15) (map chain [ 1 .. 100 ]))
flip''' :: (a -> b -> c) -> (b -> a -> c)
flip''' f = \y x -> f x y
-- Folds and Scans
sum' :: (Num a) => [ a ] -> a
sum' xs = foldl (\acc x -> acc + x) 0 xs
sum'' :: (Num a) => [ a ] -> a
sum'' = foldl (+) 0
elem' :: (Eq a) => a -> [ a ] -> Bool
elem' y ys = foldl (\acc x -> if x == y then True else acc) False ys
map' :: (a -> b) -> [ a ] -> [ b ]
map' f xs = foldr(\x acc -> f x : acc) [] xs
maximum' :: (Ord a) => [ a ] -> a
maximum' = foldr1 (\x acc -> if x > acc then x else acc)
reverse' :: [ a ] -> [ a ]
reverse' = foldl (\acc x -> x : acc) []
reverse'' :: [ a ] -> [ a ]
reverse'' = foldl (flip (:)) []
foldWithIntermediateResults = scanl (+) 0 [3, 5, 2, 1]
-- Function Application with '$'
sqrtSums = sum (map sqrt [1 .. 130 ])
anotherSqrtSums = sum $ map sqrt [ 1 .. 130 ]
applicationRightSided = map ($ 3) [ (4+), (10*), (^2), sqrt ]
-- Function Composition with '.'
composition = map (negate . abs) [ 5, -3, -6, 7, -3, 2, -19, 24 ]
oddSquareSum :: Integer
oddSquareSum = sum . takeWhile ( < 10000) . filter odd . map (^ 2) $ [ 1 .. ]
|
afronski/playground-fp
|
books/learn-you-a-haskell-for-great-good/higher-order-functions/higher-order-functions.hs
|
mit
| 2,649 | 1 | 12 | 681 | 1,321 | 733 | 588 | 58 | 2 |
module LMO.Common.Error (warn, Error, ErrorT) where
import Debug.Trace
import Control.Monad.Error
warn = trace
|
firefrorefiddle/liedermappe-online
|
src/LMO/Common/Error.hs
|
gpl-2.0
| 114 | 0 | 4 | 16 | 34 | 22 | 12 | 4 | 1 |
import System.Environment
import System.Directory
import System.IO
import Data.List
dispatch :: [(String, [String] -> IO())]
dispatch = [ ("add", add)
, ("view", view)
, ("remove", remove)
]
main :: IO ()
main = do
(command:args) <- getArgs
let (Just action) = lookup command dispatch
action args
add :: [String] -> IO ()
add [fileName, todoItem] = appendFile fileName $ todoItem ++ "\n"
view :: [String] -> IO ()
view [fileName] = do
contents <- readFile fileName
let todoTasks = lines contents
numberedTasks = zipWith (\n line -> show n ++ " - " ++ line) [0..] todoTasks
putStr $ unlines numberedTasks
remove :: [String] -> IO ()
remove [fileName, numberString] = do
handle <- openFile fileName ReadMode
(tempName, tempHandle) <- openTempFile "." "temp"
contents <- hGetContents handle
let number = read numberString
todoTasks = lines contents
newTodoItems = delete (todoTasks !! number) todoTasks
hPutStr tempHandle $ unlines newTodoItems
hClose handle
hClose tempHandle
removeFile fileName
renameFile tempName fileName
|
friedbrice/Haskell
|
ch09/todo.hs
|
gpl-2.0
| 1,110 | 0 | 15 | 243 | 424 | 211 | 213 | 34 | 1 |
module P36StatsSpec (main,spec) where
import Test.Hspec
import Test.Hspec.QuickCheck
import P36Stats hiding (main)
import Data.Vector (fromList)
import qualified Statistics.Sample as S
stdDevSample = S.stdDev . fromList
stdDevPop = sqrt . S.variance . fromList
statMean = S.mean . fromList
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "avg" $ do
it "correctly returns the mean average of a list" $ do
avg [1,2,3] `shouldBe` 2
prop "is within 0.00000001 of mean from Statistics.Sample library (some rounding errors exist)" $
\xs -> (isNaN $ avg xs) || avg xs - statMean xs <0.00000001
describe "sampSD" $ do
prop "is within 0.0000000001 of stdDev from Statistics.Sample library (some rounding errors exist)" $
\xs -> sampSD xs - stdDevSample xs <0.0000000001
describe "popSD" $ do
prop "is within 0.0000000001 of sqrt of variance from Statistics.Sample library (some rounding errors exist)" $
\xs -> popSD xs - stdDevPop xs <0.0000000001
|
ciderpunx/57-exercises-for-programmers
|
test/P36StatsSpec.hs
|
gpl-3.0
| 1,014 | 0 | 17 | 209 | 277 | 142 | 135 | 24 | 1 |
{- -----------------------------------------------------------------------------
ZDCPU16 is a DCPU-16 emulator.
Copyright (C) 2012 Luis Cabellos
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
----------------------------------------------------------------------------- -}
module Main where
-- -----------------------------------------------------------------------------
import Control.Concurrent( forkIO, killThread )
import Control.Concurrent.MVar( MVar, newMVar, readMVar )
import qualified Graphics.UI.SDL as SDL( Event(..), pollEvent )
import Network.MessagePackRpc.Server( serve )
import System.IO( stdout, hFlush )
import ZDCpu16.Render( runRender, clearScreen )
import ZDCpu16.ConRPC( serverRPCMethods )
import ZDCpu16.ConRender( RenderState, mkRenderState, renderConsole )
import ZDCpu16.ConState( ConState(..), mkConState )
-- -----------------------------------------------------------------------------
isEnded :: MVar ConState -> IO Bool
isEnded csRef = do
cs <- readMVar csRef
return . csEnd $ cs
-- -----------------------------------------------------------------------------
mainLoop :: RenderState -> MVar ConState -> IO ()
mainLoop rst csRef = do
est <- readMVar csRef
_ <- runRender (clearScreen >> renderConsole est) rst
e <- SDL.pollEvent
case e of
SDL.Quit -> return ()
SDL.NoEvent -> do
quit <- isEnded csRef
if quit
then return ()
else mainLoop rst csRef
_ -> do
mainLoop rst csRef
-- -----------------------------------------------------------------------------
main :: IO ()
main = do
csRef <- newMVar $ mkConState
msgTID <- forkIO $ serve 1234 $ serverRPCMethods csRef
rst <- mkRenderState
putStrLn "console started"
hFlush stdout
mainLoop rst csRef
killThread msgTID
putStrLn "console ended"
-- -----------------------------------------------------------------------------
|
zhensydow/zdcpu16
|
src/zddcpu16_con_main.hs
|
gpl-3.0
| 2,458 | 0 | 14 | 383 | 416 | 215 | 201 | 38 | 4 |
-- Hsort, a haskell implementation of GNU sort.
module Main where
import System.Environment
import System.Console.Terminfo.Base
import Data.List
main :: IO ()
main = do
term <- setupTermFromEnv
args <- getArgs
let options = processArgs args defaultSort
runTermOutput term (termText ("Options: "++(show options)++"\n"))
runTermOutput term (termText (showHelp options))
runTermOutput term (termText (showVersion options))
output <- showOutput options
runTermOutput term (termText (output))
return ()
showOutput :: SortOptions -> IO String
showOutput opts | not ((displayHelp opts) || (displayVersion opts)) = return "" -- <do-stuff-Here>
| otherwise = return ""
showHelp :: SortOptions -> String
showHelp opts | (displayHelp opts) = concat (intersperse "\n" helpText)
| otherwise = ""
showVersion :: SortOptions -> String
showVersion opts | (displayVersion opts) = concat (intersperse "\n" versionText)
| otherwise = ""
processArgs :: [String] -> SortOptions -> SortOptions
processArgs [] opts = opts
processArgs (x:xs) opts = case x of
"--help" -> processArgs xs opts{displayHelp = True}
"--version" -> processArgs xs opts{displayVersion = True}
_ -> processArgs xs opts
stripQuotes :: String -> String
stripQuotes ('"':xs) = if last xs == '"' then init xs else ('"':xs)
stripQuotes xs = xs
defaultSort :: SortOptions
defaultSort = SortOptions False False "" "" [] "" 0 False False False False [] False "" False 0 "" [] 1 False False
data SortOptions = SortOptions
{ displayHelp :: Bool
, displayVersion :: Bool
, sortText :: String
, sortTargets :: String
, ordering :: [SortOrderingOptions]
, randomSource :: String
, batchSize :: Integer
, checkSorting :: Bool
, compressTemporary :: Bool
, debugMode :: Bool
, quietMode :: Bool
, sortKey :: [KeyDef]
, onlyMerge :: Bool
, outputFile :: String
, stabilizeSort :: Bool
, bufferSize :: Integer
, fieldSeperator :: String
, tempDirectories :: [String]
, concurrencyLevel :: Integer
, uniqueEntriesOnly :: Bool
, nullTerminated :: Bool
} deriving (Show, Eq)
data SortOrderingOptions = IgnoreLeadingBlanks | DictionarySorting | IgnoreCase | GeneralNumericSorting | IgnoreUnprintable | MonthSorting | HumanNumericSorting | NumericSorting | RandomSorting | ReverseOrdering | VersionSorting deriving (Show, Eq)
data KeyDef = KeyDef{ fieldNumber :: Integer, characterOffset :: Integer, sortOptions :: [SortOrderingOptions]} deriving (Show, Eq)
--KEYDEF is F[.C][OPTS][,F[.C][OPTS]] for start and stop position, where F is a field number and C a character position in the field; both are origin 1, and the stop position defaults to the line's end
--OPTS is one or more single-letter ordering options [bdfgiMhnRrV], which override global ordering options for that key
--SIZE: b 1, K 1024 (default), and so on for M, G, T, P, E, Z, Y.
helpText :: [String]
helpText = [ "Usage: /home/bminerds/x/coreutils/src/sort [OPTION]... [FILE]..."
, " or: /home/bminerds/x/coreutils/src/sort [OPTION]... --files0-from=F"
, "Write sorted concatenation of all FILE(s) to standard output."
, "Mandatory arguments to long options are mandatory for short options too."
, "Ordering options:"
, " -b, --ignore-leading-blanks ignore leading blanks"
, " -d, --dictionary-order consider only blanks and alphanumeric characters"
, " -f, --ignore-case fold lower case to upper case characters"
, " -g, --general-numeric-sort compare according to general numerical value"
, " -i, --ignore-nonprinting consider only printable characters"
, " -M, --month-sort compare (unknown) < 'JAN' < ... < 'DEC'"
, " -h, --human-numeric-sort compare human readable numbers (e.g., 2K 1G)"
, " -n, --numeric-sort compare according to string numerical value"
, " -R, --random-sort sort by random hash of keys"
, " --random-source=FILE get random bytes from FILE"
, " -r, --reverse reverse the result of comparisons"
, " --sort=WORD sort according to WORD: general-numeric -g, human-numeric -h, month -M, numeric -n, random -R, version -V"
, " -V, --version-sort natural sort of (version) numbers within text"
, "Other options:"
, " --batch-size=NMERGE merge at most NMERGE inputs at once; for more use temp files"
, " -c, --check, --check=diagnose-first check for sorted input; do not sort"
, " -C, --check=quiet, --check=silent like -c, but do not report first bad line"
, " --compress-program=PROG compress temporaries with PROG; decompress them with PROG -d"
, " --debug annotate the part of the line used to sort, and warn about questionable usage to stderr"
, " --files0-from=F read input from the files specified by NUL-terminated names in file F; If F is - then read names from standard input"
, " -k, --key=KEYDEF sort via a key; KEYDEF gives location and type"
, " -m, --merge merge already sorted files; do not sort"
, " -o, --output=FILE write result to FILE instead of standard output"
, " -s, --stable stabilize sort by disabling last-resort comparison"
, " -S, --buffer-size=SIZE use SIZE for main memory buffer"
, " -t, --field-separator=SEP use SEP instead of non-blank to blank transition"
, " -T, --temporary-directory=DIR use DIR for temporaries, not $TMPDIR or /tmp; multiple options specify multiple directories"
, " --parallel=N change the number of sorts run concurrently to N"
, " -u, --unique with -c, check for strict ordering; without -c, output only the first of an equal run"
, " -z, --zero-terminated line delimiter is NUL, not newline"
, " --help display this help and exit"
, " --version output version information and exit"
, "KEYDEF is F[.C][OPTS][,F[.C][OPTS]] for start and stop position, where F is a field number and C a character position in the field; both are origin 1, and the stop position defaults to the line's end."
, "If neither -t nor -b is in effect, characters in a field are counted from the beginning of the preceding whitespace."
, "OPTS is one or more single-letter ordering options [bdfgiMhnRrV], which override global ordering options for that key."
, "If no key is given, use the entire line as the key."
, "SIZE may be followed by the following multiplicative suffixes: % 1% of memory, b 1, K 1024 (default), and so on for M, G, T, P, E, Z, Y."
, "With no FILE, or when FILE is -, read standard input."
, "*** WARNING ***"
, "The locale specified by the environment affects sort order."
, "Set LC_ALL=C to get the traditional sort order that uses native byte values."
, "GNU coreutils online help: <http://www.gnu.org/software/coreutils/>"
, "Full documentation at: <http://www.gnu.org/software/coreutils/sort>or available locally via: info '(coreutils) sort invocation'\n"
]
versionText :: [String]
versionText = [ "H<app-name> (Haskell implementation of GNU <app-name>) 1.0"
, "derrived from: sort (GNU coreutils) 8.23.126-99f76"
, "Copyright (C) 2015 Free Software Foundation, Inc."
, "License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>."
, "This is free software: you are free to change and redistribute it."
, "There is NO WARRANTY, to the extent permitted by law."
, "Written by Mike Haertel and Paul Eggert."
, "Ported by PuZZleDucK.\n"
]
|
PuZZleDucK/Hls
|
Hsort.hs
|
gpl-3.0
| 8,067 | 0 | 14 | 2,127 | 988 | 557 | 431 | 118 | 3 |
module Main where
import Control.Monad
import Control.Monad.IO.Class
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Data.Time.Clock (UTCTime(..), getCurrentTime)
import Data.Time.Calendar (addDays)
import Options.Applicative
import System.Environment.XDG.BaseDir (getUserDataDir, getUserDataFile)
import Hammertime.CLI
import Hammertime.Reports
import Hammertime.Storage
import qualified Hammertime.Storage.File as Store
import qualified Hammertime.Types as Types
eventFile, dataDir :: IO FilePath
eventFile = getUserDataFile "hammertime" "events"
dataDir = getUserDataDir "hammertime"
processAction :: MonadStorage m => Config -> UTCTime -> Action -> m ()
processAction cfg now (Start p n ts) = appendEvent cfg $ Types.Start (Types.Activity (T.pack p) (T.pack n) (map T.pack ts)) now
processAction cfg now (Stop) = appendEvent cfg $ Types.Stop now
processAction cfg now (Report s p n t t') = do
report <- generateReport cfg t' (timeSpanToRange s now) (fmap T.pack p) (fmap T.pack n) (fmap T.pack t)
liftIO $ TIO.putStr report
processAction cfg now (Current) = do
current <- currentActivity cfg now
liftIO $ TIO.putStr current
timeSpanToRange :: Types.TimeSpan -> UTCTime -> Types.TimeRange
timeSpanToRange Types.Day now@(UTCTime day dt) = (UTCTime (addDays (-1) day) dt, now)
timeSpanToRange Types.Week now@(UTCTime day dt) = (UTCTime (addDays (-7) day) dt, now)
timeSpanToRange Types.Month now@(UTCTime day dt) = (UTCTime (addDays (-30) day) dt, now)
main :: IO ()
main = do
cfg <- liftM2 Store.Config dataDir eventFile
Store.runStorage $ initStorage cfg
now <- getCurrentTime
act <- customExecParser cliParserPrefs cliParserInfo
Store.runStorage $ processAction cfg now act
|
divarvel/hammertime
|
src/Hammertime/Main.hs
|
gpl-3.0
| 1,752 | 0 | 11 | 271 | 643 | 339 | 304 | 37 | 1 |
{-
Facultad de Ciencias UNAM
Lenguajes de programación 2016-2
Profesor: Noé Salomón Hernández Sánchez
Ayudante: Albert M. Orozco Camacho
Ayudante lab: C. Moisés Vázquez Reyes
-}
{- Números Naturales -}
data N = Zero | Suc N deriving Show
--Suma de naturales.
suma::N->N->N
suma Zero n = n
suma (Suc n) m = Suc (suma n m)
--Producto de naturales.
prod::N->N->N
prod Zero _ = Zero
prod (Suc n) m = suma (prod n m) m
--Potencia de naturales.
pot::N->N->N
pot n Zero = Suc Zero
pot n (Suc Zero) = n
pot n (Suc m) = prod (pot n m) n
{- Números DNat -}
data DNat = Cero | D DNat | U DNat deriving Show
--Para simplificar un DNat.
simplDN :: DNat -> DNat
simplDN Cero = Cero
simplDN (U n) = U $ simplDN n
simplDN (D n) = f $ D $ simplDN n where
f (D Cero) = Cero
f (D n) = D $ simplDN n
--Sucesor de un DNat.
sucDN :: DNat->DNat
sucDN n = simplDN $ suc_aux n where
suc_aux Cero = U Cero
suc_aux (D n) = U n
suc_aux (U n) = D $ sucDN n
--Predecesor de un número DNat.
predDN :: DNat->DNat
predDN n = simplDN $ suc_aux n where
suc_aux Cero = D Cero
suc_aux (U n) = D n
suc_aux (D n) = U $ predDN n
--Representación de un número DNat en los números enteros.
dNToZ :: DNat->Int
dNToZ Cero = 0
dNToZ (D n) = 2 * dNToZ n
dNToZ (U n) = 2 * (dNToZ n) + 1
--Suma dos números DNat.
sumaDN :: DNat->DNat->DNat
sumaDN n m = simplDN $ f_suma n m where
f_suma n Cero = n
f_suma Cero n = n
f_suma (D n) (U m) = U $ f_suma n m
f_suma (D n) (D m) = D $ f_suma n m
f_suma (U n) (D m) = U $ f_suma n m
f_suma (U n) (U m) = D $ sucDN $ f_suma n m
--Multiplica dos números DNat.
prodDN :: DNat->DNat->DNat
prodDN n m = simplDN $ prod_f n m where
prod_f Cero n = Cero
prod_f n Cero = Cero
prod_f (U Cero) n = n
prod_f n (U Cero) = n
prod_f (D n) (D m) = D $ D $ prodDN n m
prod_f (D n) (U m) = D $ sumaDN(D $ prodDN n m) n
prod_f (U n) (D m) = D $ sumaDN(D $ prodDN n m) m
prod_f (U n) (U m) = U $ sumaDN(sumaDN(D $ prodDN n m) n) m
--Transforma un entero positivo a su representación en DNat.
zToDNat :: Int->DNat
zToDNat 0 = Cero
zToDNat n = sucDN $ zToDNat(n-1)
{- Listas -}
--Elimina repeticiones de una lista.
toSet::Eq a=>[a]->[a]
toSet [] = []
toSet (x:xs) = x:(filter (x/=) (toSet xs))
--Cuenta el número de apariciones de un elemento en una lista.
cuantas::Eq a=>a->[a]->Int
cuantas _ [] = 0
cuantas x (y:ys) = if x == y then 1 + (cuantas x ys)
else cuantas x ys
--Cuentas las apariciones de cada elemento en la lista.
frec::Eq a=>[a]->[(a,Int)]
frec [] = []
frec (x:xs) = ((x, cuantas x (x:xs)): frec(filter (x/=) (xs)))
--Nos da los elementos que aparecen una sola vez.
unaVez::Eq a=>[a]->[a]
unaVez [] = []
unaVez (x:xs) = if cuantas x (x:xs) == 1 then x : unaVez (xs)
else unaVez(filter (x/=) (xs))
{- Retos -}
-- axiliar limpia la cadena hasta encontrar blanco
elimina::String->String
elimina [] = []
elimina (x:xs) = if x /= ' ' then elimina (xs)
else xs
compress1::String->String
compress1 [] = []
compress1 (' ':xs) = if head xs == ' ' then compress1 (elimina(xs))
else [head xs] ++ compress1 (elimina(xs))
compress1 (x:xs) = [x] ++ compress1 (elimina(xs))
--compress2::String->String
{- Pruebas -}
--Naturales
--Debe dar: Suc (Suc (Suc (Suc (Suc (Suc (Suc Zero))))))
prueba1 = suma (Suc $ Suc Zero) (suma (Suc $ Suc $ Suc $ Suc Zero) (Suc Zero))
--Debe dar: Suc (Suc (Suc (Suc (Suc (Suc (Suc (Suc Zero)))))))
--prueba2 = prod (Suc $ Suc Zero) (prod (Suc $ Suc $ Suc $ Suc Zero) (Suc Zero))
--Debe dar: Suc (Suc (Suc (Suc (Suc (Suc (Suc (Suc Zero)))))))
--prueba3 = pot (suma (Suc Zero) (Suc Zero)) (prod (Suc Zero) (Suc $ Suc $ Suc Zero))
--DNat
--Debe dar: 31
--prueba4 = dNToZ $ sucDN $ sumaDN (D $ D $ U $ U $ D $ D Cero) (predDN $ zToDNat 19)
--Debe dar: 5844
--prueba5 = dNToZ $ sucDN $ sucDN $ prodDN (U $ U $ U $ D $ U $ D $ D Cero) (sumaDN (U $ D $ U $ D $ D Cero) (zToDNat 249))
--Debe dar: 21
--prueba6 = (dNToZ $ sumaDN (U $ U $ D $ D $ D $ D Cero) (U $ U $ D Cero)) + (dNToZ $ sucDN $ D $ U $ U $ U $ D Cero)
--Debe dar: 38
--prueba7 = dNToZ $ zToDNat $ dNToZ $ sumaDN (U $ U $ D $ D $ D $ U $ D Cero) (U $ U $ D $ D $ D Cero)
--Listas
--Debe dar: [1,2,3,32,4,6,8,5,0]
prueba8 = toSet [1,2,3,1,3,3,32,2,4,6,8,5,8,0,1,2,6,0,0,3,2,4,6,2,32]
--Debe dar: 4
prueba9 = cuantas 1 [1,2,3,1,3,3,32,2,4,6,8,5,8,0,1,2,6,0,0,3,2,4,6,2,1]
--Debe dar: [(1,3),(2,2),(3,3),(32,1),(6,2),(8,2),(5,1),(0,2)]
prueba10 = frec [1,2,3,1,3,3,32,6,8,5,8,0,1,2,6,0]
--Debe dar: [32,5,7]
prueba11 = unaVez [1,2,3,1,3,3,32,6,8,5,8,0,1,2,6,0,7]
--Retos
--Debe dar: "AinacychaninswstliacaiabH"
prueba12 = compress1 "And its not a cry you can hear at night, its not somebody who's seen the light, its a cold and its a broken Hallelujah"
--Debe dar: "acm1pt Sapbeeee!!!"
{- prueba13 = compress2 $ "23"++(replicate 30 'a')++" 100"++(replicate 110 'c')++" 2abm"++" 4mjlo1u"++" 1001"++(replicate 1002 'p')++
" 0tkajdaad"++" 15sdklf"++" 19"++(replicate 23 'S')++" 8ldjjdlphaph"++" 0pksbhds"++" 1kblgh"++" 2ljekz"++" 3mluekshdgfd"++
" 4py7jelh"++" 5ñokgoegss"++" 6magdéj!"++" 0!"++" 0!" -}
|
ArlequinWild/Lenguajes-de-programaci-n
|
Practica0.hs
|
gpl-3.0
| 5,334 | 0 | 13 | 1,325 | 1,936 | 1,039 | 897 | 82 | 8 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Partners.Users.CreateCompanyRelation
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a user\'s company relation. Affiliates the user to a company.
--
-- /See:/ <https://developers.google.com/partners/ Google Partners API Reference> for @partners.users.createCompanyRelation@.
module Network.Google.Resource.Partners.Users.CreateCompanyRelation
(
-- * REST Resource
UsersCreateCompanyRelationResource
-- * Creating a Request
, usersCreateCompanyRelation
, UsersCreateCompanyRelation
-- * Request Lenses
, uccrXgafv
, uccrUploadProtocol
, uccrAccessToken
, uccrUploadType
, uccrPayload
, uccrUserId
, uccrRequestMetadataPartnersSessionId
, uccrRequestMetadataLocale
, uccrRequestMetadataExperimentIds
, uccrRequestMetadataUserOverridesIPAddress
, uccrRequestMetadataTrafficSourceTrafficSubId
, uccrRequestMetadataUserOverridesUserId
, uccrRequestMetadataTrafficSourceTrafficSourceId
, uccrCallback
) where
import Network.Google.Partners.Types
import Network.Google.Prelude
-- | A resource alias for @partners.users.createCompanyRelation@ method which the
-- 'UsersCreateCompanyRelation' request conforms to.
type UsersCreateCompanyRelationResource =
"v2" :>
"users" :>
Capture "userId" Text :>
"companyRelation" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "requestMetadata.partnersSessionId" Text
:>
QueryParam "requestMetadata.locale" Text :>
QueryParams "requestMetadata.experimentIds" Text :>
QueryParam "requestMetadata.userOverrides.ipAddress"
Text
:>
QueryParam
"requestMetadata.trafficSource.trafficSubId"
Text
:>
QueryParam "requestMetadata.userOverrides.userId"
Text
:>
QueryParam
"requestMetadata.trafficSource.trafficSourceId"
Text
:>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] CompanyRelation :>
Put '[JSON] CompanyRelation
-- | Creates a user\'s company relation. Affiliates the user to a company.
--
-- /See:/ 'usersCreateCompanyRelation' smart constructor.
data UsersCreateCompanyRelation =
UsersCreateCompanyRelation'
{ _uccrXgafv :: !(Maybe Xgafv)
, _uccrUploadProtocol :: !(Maybe Text)
, _uccrAccessToken :: !(Maybe Text)
, _uccrUploadType :: !(Maybe Text)
, _uccrPayload :: !CompanyRelation
, _uccrUserId :: !Text
, _uccrRequestMetadataPartnersSessionId :: !(Maybe Text)
, _uccrRequestMetadataLocale :: !(Maybe Text)
, _uccrRequestMetadataExperimentIds :: !(Maybe [Text])
, _uccrRequestMetadataUserOverridesIPAddress :: !(Maybe Text)
, _uccrRequestMetadataTrafficSourceTrafficSubId :: !(Maybe Text)
, _uccrRequestMetadataUserOverridesUserId :: !(Maybe Text)
, _uccrRequestMetadataTrafficSourceTrafficSourceId :: !(Maybe Text)
, _uccrCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'UsersCreateCompanyRelation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uccrXgafv'
--
-- * 'uccrUploadProtocol'
--
-- * 'uccrAccessToken'
--
-- * 'uccrUploadType'
--
-- * 'uccrPayload'
--
-- * 'uccrUserId'
--
-- * 'uccrRequestMetadataPartnersSessionId'
--
-- * 'uccrRequestMetadataLocale'
--
-- * 'uccrRequestMetadataExperimentIds'
--
-- * 'uccrRequestMetadataUserOverridesIPAddress'
--
-- * 'uccrRequestMetadataTrafficSourceTrafficSubId'
--
-- * 'uccrRequestMetadataUserOverridesUserId'
--
-- * 'uccrRequestMetadataTrafficSourceTrafficSourceId'
--
-- * 'uccrCallback'
usersCreateCompanyRelation
:: CompanyRelation -- ^ 'uccrPayload'
-> Text -- ^ 'uccrUserId'
-> UsersCreateCompanyRelation
usersCreateCompanyRelation pUccrPayload_ pUccrUserId_ =
UsersCreateCompanyRelation'
{ _uccrXgafv = Nothing
, _uccrUploadProtocol = Nothing
, _uccrAccessToken = Nothing
, _uccrUploadType = Nothing
, _uccrPayload = pUccrPayload_
, _uccrUserId = pUccrUserId_
, _uccrRequestMetadataPartnersSessionId = Nothing
, _uccrRequestMetadataLocale = Nothing
, _uccrRequestMetadataExperimentIds = Nothing
, _uccrRequestMetadataUserOverridesIPAddress = Nothing
, _uccrRequestMetadataTrafficSourceTrafficSubId = Nothing
, _uccrRequestMetadataUserOverridesUserId = Nothing
, _uccrRequestMetadataTrafficSourceTrafficSourceId = Nothing
, _uccrCallback = Nothing
}
-- | V1 error format.
uccrXgafv :: Lens' UsersCreateCompanyRelation (Maybe Xgafv)
uccrXgafv
= lens _uccrXgafv (\ s a -> s{_uccrXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
uccrUploadProtocol :: Lens' UsersCreateCompanyRelation (Maybe Text)
uccrUploadProtocol
= lens _uccrUploadProtocol
(\ s a -> s{_uccrUploadProtocol = a})
-- | OAuth access token.
uccrAccessToken :: Lens' UsersCreateCompanyRelation (Maybe Text)
uccrAccessToken
= lens _uccrAccessToken
(\ s a -> s{_uccrAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
uccrUploadType :: Lens' UsersCreateCompanyRelation (Maybe Text)
uccrUploadType
= lens _uccrUploadType
(\ s a -> s{_uccrUploadType = a})
-- | Multipart request metadata.
uccrPayload :: Lens' UsersCreateCompanyRelation CompanyRelation
uccrPayload
= lens _uccrPayload (\ s a -> s{_uccrPayload = a})
-- | The ID of the user. Can be set to 'me' to mean the currently
-- authenticated user.
uccrUserId :: Lens' UsersCreateCompanyRelation Text
uccrUserId
= lens _uccrUserId (\ s a -> s{_uccrUserId = a})
-- | Google Partners session ID.
uccrRequestMetadataPartnersSessionId :: Lens' UsersCreateCompanyRelation (Maybe Text)
uccrRequestMetadataPartnersSessionId
= lens _uccrRequestMetadataPartnersSessionId
(\ s a ->
s{_uccrRequestMetadataPartnersSessionId = a})
-- | Locale to use for the current request.
uccrRequestMetadataLocale :: Lens' UsersCreateCompanyRelation (Maybe Text)
uccrRequestMetadataLocale
= lens _uccrRequestMetadataLocale
(\ s a -> s{_uccrRequestMetadataLocale = a})
-- | Experiment IDs the current request belongs to.
uccrRequestMetadataExperimentIds :: Lens' UsersCreateCompanyRelation [Text]
uccrRequestMetadataExperimentIds
= lens _uccrRequestMetadataExperimentIds
(\ s a -> s{_uccrRequestMetadataExperimentIds = a})
. _Default
. _Coerce
-- | IP address to use instead of the user\'s geo-located IP address.
uccrRequestMetadataUserOverridesIPAddress :: Lens' UsersCreateCompanyRelation (Maybe Text)
uccrRequestMetadataUserOverridesIPAddress
= lens _uccrRequestMetadataUserOverridesIPAddress
(\ s a ->
s{_uccrRequestMetadataUserOverridesIPAddress = a})
-- | Second level identifier to indicate where the traffic comes from. An
-- identifier has multiple letters created by a team which redirected the
-- traffic to us.
uccrRequestMetadataTrafficSourceTrafficSubId :: Lens' UsersCreateCompanyRelation (Maybe Text)
uccrRequestMetadataTrafficSourceTrafficSubId
= lens _uccrRequestMetadataTrafficSourceTrafficSubId
(\ s a ->
s{_uccrRequestMetadataTrafficSourceTrafficSubId = a})
-- | Logged-in user ID to impersonate instead of the user\'s ID.
uccrRequestMetadataUserOverridesUserId :: Lens' UsersCreateCompanyRelation (Maybe Text)
uccrRequestMetadataUserOverridesUserId
= lens _uccrRequestMetadataUserOverridesUserId
(\ s a ->
s{_uccrRequestMetadataUserOverridesUserId = a})
-- | Identifier to indicate where the traffic comes from. An identifier has
-- multiple letters created by a team which redirected the traffic to us.
uccrRequestMetadataTrafficSourceTrafficSourceId :: Lens' UsersCreateCompanyRelation (Maybe Text)
uccrRequestMetadataTrafficSourceTrafficSourceId
= lens
_uccrRequestMetadataTrafficSourceTrafficSourceId
(\ s a ->
s{_uccrRequestMetadataTrafficSourceTrafficSourceId =
a})
-- | JSONP
uccrCallback :: Lens' UsersCreateCompanyRelation (Maybe Text)
uccrCallback
= lens _uccrCallback (\ s a -> s{_uccrCallback = a})
instance GoogleRequest UsersCreateCompanyRelation
where
type Rs UsersCreateCompanyRelation = CompanyRelation
type Scopes UsersCreateCompanyRelation = '[]
requestClient UsersCreateCompanyRelation'{..}
= go _uccrUserId _uccrXgafv _uccrUploadProtocol
_uccrAccessToken
_uccrUploadType
_uccrRequestMetadataPartnersSessionId
_uccrRequestMetadataLocale
(_uccrRequestMetadataExperimentIds ^. _Default)
_uccrRequestMetadataUserOverridesIPAddress
_uccrRequestMetadataTrafficSourceTrafficSubId
_uccrRequestMetadataUserOverridesUserId
_uccrRequestMetadataTrafficSourceTrafficSourceId
_uccrCallback
(Just AltJSON)
_uccrPayload
partnersService
where go
= buildClient
(Proxy :: Proxy UsersCreateCompanyRelationResource)
mempty
|
brendanhay/gogol
|
gogol-partners/gen/Network/Google/Resource/Partners/Users/CreateCompanyRelation.hs
|
mpl-2.0
| 10,553 | 0 | 25 | 2,503 | 1,360 | 782 | 578 | 212 | 1 |
module Main (main) where
import qualified BoTox as B
main = B.runBoTox
|
gordon-quad/botox
|
app/Main.hs
|
agpl-3.0
| 73 | 0 | 5 | 14 | 22 | 15 | 7 | 3 | 1 |
{-# LANGUAGE FlexibleInstances, DeriveGeneric, DeriveAnyClass #-}
module Sprockell.HardwareTypes where
import GHC.Generics
import Control.DeepSeq
import qualified Data.Sequence as Sequence
import qualified Data.Array as Array
import qualified Data.Foldable as Foldable
-- ==========================================================================================================
-- Types and sizes for: data, memory, communication channels
-- ==========================================================================================================
-- Constraints will possibly require conversions -- KEEP IT SIMPLE!
-- * MemAddr (for both local and shared memory) should fit in registers for indirect addressing
-- * Register content should be usable as code address
-- * Possibly that shared memory is bigger than local memory
-- * Etcetera ...
-- ==> Choice: avoid conversions, too complicated. So keep all these the same as much as possible.
-- ==========================================================================================================
type CodeAddr = Int -- * Instruction Address in Assembly Program (Program Counter)
type RegAddr = Int -- * Register Address
type MemAddr = Int -- * Local Memory Address (incl Stack Pointer)
type Value = Int
type SprID = Value
data AddrImmDI = ImmValue Int -- ImmValue n:is just the constant value n
| DirAddr MemAddr -- DirAddr a: a is an address in memory (local or shared)
| IndAddr RegAddr -- IndAddr p: p is a register, the content of this register is an address inmemory
deriving (Eq,Show,Read)
type LocalMem = Sequence.Seq Value
type RegBank = [Value]
type SharedMem = Sequence.Seq Value
type InstructionMem = Array.Array Int Instruction
type Reply = Maybe Value -- Every clock cycle an input arrives from shared memory, probably most of the time Nothing
data Request = NoRequest -- No request to shared memory
| ReadReq MemAddr -- Request to shared memory to send the value at the given address
| WriteReq Value MemAddr -- Request to write a value to a given address in shared memory
| TestReq MemAddr -- Request to test-and-Set at the given address in shared memory
deriving (Eq,Show,Generic,NFData)
type IndRequests = [(SprID, Request)] -- A list of requests together with the sprockell-IDs of the sender
type IndReplies = [(SprID, Reply)] -- Ibid for replies
type RequestChannel = [Request]
type RequestChannels = [RequestChannel]
type ReplyChannel = [Reply]
type ReplyChannels = [ReplyChannel]
type ParRequests = [Request] -- all requests sent by the Sprockells in parallel (at each clock cycle)
type ParReplies = [Reply] -- ibid for replies
type RequestFifo = [(SprID,Request)] -- Collects all Sprockell requests as input for Shared Memory
-- ==========================================================================================================
-- Memory type class + instances
-- ==========================================================================================================
class Memory m where
fromList :: [a] -> m a
toList :: m a -> [a]
(!) :: m a -> Int -> a -- indexing
(<~) :: m a -> (Int,a) -> m a -- mem <~ (i,x): put value x at address i in mem
(<~!) :: m a -> (Int,a) -> m a -- ibid, but leave address 0 unchanged
xs <~! (i,x) | i == 0 = xs
| otherwise = xs <~ (i,x)
instance Memory [] where
fromList = id
toList = id
xs ! i = xs !! i
[] <~ _ = [] -- silently ignore update after end of list
(x:xs) <~ (0,y) = y:xs
(x:xs) <~ (n,y) = x : (xs <~ (n-1,y))
instance Memory (Array.Array Int) where
fromList xs = Array.listArray (0,length xs) xs
toList = Array.elems
(!) = (Array.!)
xs <~ (i,x) = xs Array.// [(i,x)]
instance Memory Sequence.Seq where
fromList = Sequence.fromList
toList = Foldable.toList
(!) = Sequence.index
xs <~ (i,x) = Sequence.update i x xs
-- ==========================================================================================================
-- Internal state for Sprockell and System
-- ==========================================================================================================
data SprockellState = SprState
{ pc :: !CodeAddr -- Program counter
, sp :: !MemAddr -- Stack pointer
, regbank :: !RegBank -- Register bank
, localMem :: !LocalMem -- Local memory
} deriving (Eq,Show) -- Exclamation mark for eager (non-lazy) evaluation
data SystemState = SystemState
{ sprStates :: ![SprockellState] -- list of all Sprockell states
, requestChnls :: ![RequestChannel] -- list of all request channels
, replyChnls :: ![ReplyChannel] -- list of all reply channels
, requestFifo :: !RequestFifo -- request fifo for buffering requests
, sharedMem :: !SharedMem -- shared memory
} deriving (Eq,Show) -- Exclamation mark for eager (non-lazy) evaluation
-- ==========================================================================================================
-- SprIL: Sprockell Instruction Language
-- ==========================================================================================================
data Operator = Add | Sub | Mul -- | Div | Mod -- Computational operations -- No Div, Mod because of hardware complexity
| Equal | NEq | Gt | Lt | GtE | LtE -- Comparison operations
| And | Or | Xor | LShift | RShift -- Logical operations
| Decr | Incr -- Decrement (-1), Increment (+1)
deriving (Eq,Show,Read)
data Instruction = Compute Operator RegAddr RegAddr RegAddr -- Compute op r0 r1 r2: go to "alu",
-- do "op" on regs r0, r1, and put result in reg r2
| ComputeI Operator RegAddr Value RegAddr -- ComputeI op r0 v r2: go to "alu",
-- do "op" on regs r0, value, and put result in reg r2
| Jump Target -- Jump t: jump to target t (absolute, relative, indirect)
| Branch RegAddr Target -- Branch r t: conditional jump, depending on register r
-- if r contains 0: don't jump; otherwise: jump
| Load AddrImmDI RegAddr -- Load (ImmValue n) r: put value n in register r
-- Load (DirAddr a) r : put value on memory address a in r
-- Load (IndAddr p) r : ibid, but memory address is in register p
| Store RegAddr AddrImmDI -- Store r (DirAddr a): from register r to memory address a
-- Store r (IndAddr p): ibid, memory address contained in register p
-- Store r (ImmValue n): undefined
| Push RegAddr -- Push r: put the value from register r on the stack
| Pop RegAddr -- Pop r : put the top of the stack in register r
-- and adapts the stack pointer
| ReadInstr AddrImmDI -- ReadInstr a: Send read request for shMem address a
| Receive RegAddr -- Receive r : Wait for reply and save it in register r
| WriteInstr RegAddr AddrImmDI -- WriteInstr r a: Write content of reg r to shMem address a
| TestAndSet AddrImmDI -- Request a test on address for 0 and sets it to 1 if it is.
-- Reply will contain 1 on success, and 0 on failure.
-- This is an atomic operation; it might therefore be
-- used to implement locks or synchronisation.
-- For ReadInstr, WriteInstr, TestAndSet:
-- address only as DirAddr, IndAddr; not as ImmValue
| EndProg -- end of program, deactivates Sprockell. If all sprockells are at
-- this instruction, the simulation will halt.
| Nop -- Operation "do nothing"
| Debug String -- No real instruction, for debug purposes.
deriving (Eq,Show,Read)
-- ==========================================================================================================
-- Data structures for communication within and between Sprockells
-- ==========================================================================================================
data Target = Abs CodeAddr -- Abs n: instruction n
| Rel CodeAddr -- Rel n: increase current program counter with n
| Ind RegAddr -- Ind r: value of new program counter is in register r
deriving (Eq,Show,Read)
data TargetCode = NoJump -- code to indicate in machine code how to jump
| TAbs
| TRel
| TInd
| Waiting
deriving (Eq,Show)
data AguCode = AguDir -- code to tell agu how to calculate the address in memory
| AguInd
| AguPush
| AguPop
deriving (Eq,Show)
data LdCode = LdImm -- code that indicates which value to load in register
| LdAlu
| LdAluI -- PP26: immediate compute
| LdMem
| LdInp
deriving (Eq,Show)
data StCode = StNone -- code to tell which value to put in memory
| StMem
deriving (Eq,Show)
data SPCode = Down -- code that tells how the stack pointer should be changed
| Flat
| Up
deriving (Eq,Show)
data IOCode = IONone -- code to instruct IO-functions
| IORead
| IOWrite
| IOTest
deriving (Eq,Show)
data MachCode = MachCode -- machine code: fields contain codes as described above
{ ldCode :: LdCode
, stCode :: StCode
, aguCode :: AguCode
, branch :: Bool
, tgtCode :: TargetCode
, spCode :: SPCode
, aluCode :: Operator
, ioCode :: IOCode
, immValue :: Value
, regX :: RegAddr -- selects first register
, regY :: RegAddr -- selects second register
, loadReg :: RegAddr -- register to load a value to
, addrImm :: MemAddr -- address for memory
} deriving (Eq,Show)
-- ==========================================================================================================
-- Clock for simulation
-- ==========================================================================================================
data Tick = Tick deriving (Eq,Show)
type Clock = [Tick]
clock = repeat Tick
-- ==========================================================================================================
-- These instances are used by the deepseq in Simulation.systemSim to avoid space-leaks
-- ==========================================================================================================
instance NFData SprockellState where
rnf (SprState pc sp regbank localMem)
= rnf pc
`seq` rnf sp
`seq` localMem -- specificly only evaluate localMem to WHNF, Sequence should be strict already
`seq` rnf regbank
instance NFData SystemState where
rnf (SystemState sprStates requestChnls replyChnls requestFifo sharedMem)
= rnf sprStates
`seq` rnf requestChnls
`seq` rnf replyChnls
`seq` rnf requestFifo
`seq` sharedMem -- specificly only evaluate sharedMem to WHNF, Sequence should be strict already
`seq` ()
|
wouwouwou/2017_module_8
|
src/haskell/PP-project-2017/lib/sprockell-2017/src/Sprockell/HardwareTypes.hs
|
apache-2.0
| 14,342 | 0 | 11 | 5,906 | 1,582 | 961 | 621 | 177 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE ParallelListComp #-}
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE NoImplicitPrelude #-}
-- | Convenience functions for specifying colors by name, hexadecimal value,
-- integer value between 0 and 255, and a few other ways.
module Extras.Colors
( -- $intro
painted,
colorNamed,
rgb,
greyed,
withAlpha,
)
where
import Prelude
-------------------------------------------------------------------------------
-- $intro
-- = Extra Colors
--
-- To use the extra features in this module, you must begin your code with this
-- line:
--
-- > import Extras.Colors
-- | The given picture painted with the color corresponding to the given
-- color name, which must be a valid argument of the 'colorNamed' function.
painted :: (Picture, Text) -> Picture
painted (pic, name) = colored (pic, colorNamed (name))
-- | This function allows you to specify color components in the range 0 to 255
-- instead of 0 to 1.
rgb :: (Number, Number, Number) -> Color
rgb (r, g, b) = RGB (r / 255, g / 255, b / 255)
-- | A shade of grey as given by the argument, where @greyed(0)@ is @black@
-- and @greyed(1)@ is white.
greyed :: Number -> Color
greyed (g) = RGB (g, g, g)
-- | This function allows you to specify the level of transparency of the
-- given color. Transparency must be given in the range 0 (fully transparent)
-- to 1 (fully opaque).
withAlpha :: (Color, Number) -> Color
withAlpha (RGBA (r, g, b, _), a) = RGBA (r, g, b, a)
-- | Convert the name of a color to the actual color. The name of the color
-- can be specified in two ways. First, you can use the official CSS color,
-- as defined in the working document /CSS Color Module Level 4/, which
-- can be found at <https://drafts.csswg.org/css-color/#named-colors>.
-- You can also specify the color as a Text with the pattern "#xxxxxx", where
-- the first character is a hash tag and the rest are 6 hexadecimal digits
-- that correspond to the /hex color/, as usually defined in HTML documents.
--
-- The list of named colors is:
--
-- @aliceblue@, @antiquewhite@, @aqua@, @aquamarine@, @azure@, @beige@,
-- @bisque@, @black@, @blanchedalmond@, @blue@, @blueviolet@, @brown@,
-- @burlywood@, @cadetblue@, @chartreuse@, @chocolate@, @coral@,
-- @cornflowerblue@, @cornsilk@, @crimson@, @cyan@, @darkblue@,
-- @darkcyan@, @darkgoldenrod@, @darkgray@, @darkgreen@, @darkgrey@,
-- @darkkhaki@, @darkmagenta@, @darkolivegreen@, @darkorange@,
-- @darkorchid@, @darkred@, @darksalmon@, @darkseagreen@, @darkslateblue@,
-- @darkslategray@, @darkslategrey@, @darkturquoise@, @darkviolet@,
-- @deeppink@, @deepskyblue@, @dimgray@, @dimgrey@, @dodgerblue@,
-- @firebrick@, @floralwhite@, @forestgreen@, @fuchsia@, @gainsboro@,
-- @ghostwhite@, @gold@, @goldenrod@, @gray@, @green@, @greenyellow@,
-- @grey@, @honeydew@, @hotpink@, @indianred@, @indigo@, @ivory@,
-- @khaki@, @lavender@, @lavenderblush@, @lawngreen@, @lemonchiffon@,
-- @lightblue@, @lightcoral@, @lightcyan@, @lightgoldenrodyellow@,
-- @lightgray@, @lightgreen@, @lightgrey@, @lightpink@, @lightsalmon@,
-- @lightseagreen@, @lightskyblue@, @lightslategray@, @lightslategrey@,
-- @lightsteelblue@, @lightyellow@, @lime@, @limegreen@, @linen@,
-- @magenta@, @maroon@, @mediumaquamarine@, @mediumblue@, @mediumorchid@,
-- @mediumpurple@, @mediumseagreen@, @mediumslateblue@, @mediumspringgreen@,
-- @mediumturquoise@, @mediumvioletred@, @midnightblue@, @mintcream@,
-- @mistyrose@, @moccasin@, @navajowhite@, @navy@, @oldlace@, @olive@,
-- @olivedrab@, @orange@, @orangered@, @orchid@, @palegoldenrod@,
-- @palegreen@, @paleturquoise@, @palevioletred@, @papayawhip@,
-- @peachpuff@, @peru@, @pink@, @plum@, @powderblue@, @purple@,
-- @rebeccapurple@, @red@, @rosybrown@, @royalblue@, @saddlebrown@, @salmon@,
-- @sandybrown@, @seagreen@, @seashell@, @sienna@, @silver@, @skyblue@,
-- @slateblue@, @slategray@, @slategrey@, @snow@, @springgreen@, @steelblue@,
-- @tan@, @teal@, @thistle@, @tomato@, @turquoise@, @violet@, @wheat@, @white@,
-- @whitesmoke@, @yellow@, and @yellowgreen@.
colorNamed :: Text -> Color
colorNamed (name) = colorNamed' (lowercase (substitution (name, " ", "")))
colorNamed' (name)
| head == "#" && length (chars) == 7 = RGB (m r, m g, m b)
| otherwise = byName (name)
where
chars = characters (name)
head = chars # 1
r = rest (chars, 1)
g = rest (r, 2)
b = rest (g, 2)
m x = fromHex (first (x, 2))
fromHex [hi, lo] = (h (hi) * 16 + h (lo)) / 255
where
h ("0") = 0
h ("1") = 1
h ("2") = 2
h ("3") = 3
h ("4") = 4
h ("5") = 5
h ("6") = 6
h ("7") = 7
h ("8") = 8
h ("9") = 9
h ("a") = 10
h ("b") = 11
h ("c") = 12
h ("d") = 13
h ("e") = 14
h ("f") = 15
byName ("aliceblue") = colorNamed' ("#f0f8ff")
byName ("antiquewhite") = colorNamed' ("#faebd7")
byName ("aqua") = colorNamed' ("#00ffff")
byName ("aquamarine") = colorNamed' ("#7fffd4")
byName ("azure") = colorNamed' ("#f0ffff")
byName ("beige") = colorNamed' ("#f5f5dc")
byName ("bisque") = colorNamed' ("#ffe4c4")
byName ("black") = colorNamed' ("#000000")
byName ("blanchedalmond") = colorNamed' ("#ffebcd")
byName ("blue") = colorNamed' ("#0000ff")
byName ("blueviolet") = colorNamed' ("#8a2be2")
byName ("brown") = colorNamed' ("#a52a2a")
byName ("burlywood") = colorNamed' ("#deb887")
byName ("cadetblue") = colorNamed' ("#5f9ea0")
byName ("chartreuse") = colorNamed' ("#7fff00")
byName ("chocolate") = colorNamed' ("#d2691e")
byName ("coral") = colorNamed' ("#ff7f50")
byName ("cornflowerblue") = colorNamed' ("#6495ed")
byName ("cornsilk") = colorNamed' ("#fff8dc")
byName ("crimson") = colorNamed' ("#dc143c")
byName ("cyan") = colorNamed' ("#00ffff")
byName ("darkblue") = colorNamed' ("#00008b")
byName ("darkcyan") = colorNamed' ("#008b8b")
byName ("darkgoldenrod") = colorNamed' ("#b8860b")
byName ("darkgray") = colorNamed' ("#a9a9a9")
byName ("darkgreen") = colorNamed' ("#006400")
byName ("darkgrey") = colorNamed' ("#a9a9a9")
byName ("darkkhaki") = colorNamed' ("#bdb76b")
byName ("darkmagenta") = colorNamed' ("#8b008b")
byName ("darkolivegreen") = colorNamed' ("#556b2f")
byName ("darkorange") = colorNamed' ("#ff8c00")
byName ("darkorchid") = colorNamed' ("#9932cc")
byName ("darkred") = colorNamed' ("#8b0000")
byName ("darksalmon") = colorNamed' ("#e9967a")
byName ("darkseagreen") = colorNamed' ("#8fbc8f")
byName ("darkslateblue") = colorNamed' ("#483d8b")
byName ("darkslategray") = colorNamed' ("#2f4f4f")
byName ("darkslategrey") = colorNamed' ("#2f4f4f")
byName ("darkturquoise") = colorNamed' ("#00ced1")
byName ("darkviolet") = colorNamed' ("#9400d3")
byName ("deeppink") = colorNamed' ("#ff1493")
byName ("deepskyblue") = colorNamed' ("#00bfff")
byName ("dimgray") = colorNamed' ("#696969")
byName ("dimgrey") = colorNamed' ("#696969")
byName ("dodgerblue") = colorNamed' ("#1e90ff")
byName ("firebrick") = colorNamed' ("#b22222")
byName ("floralwhite") = colorNamed' ("#fffaf0")
byName ("forestgreen") = colorNamed' ("#228b22")
byName ("fuchsia") = colorNamed' ("#ff00ff")
byName ("gainsboro") = colorNamed' ("#dcdcdc")
byName ("ghostwhite") = colorNamed' ("#f8f8ff")
byName ("gold") = colorNamed' ("#ffd700")
byName ("goldenrod") = colorNamed' ("#daa520")
byName ("gray") = colorNamed' ("#808080")
byName ("green") = colorNamed' ("#008000")
byName ("greenyellow") = colorNamed' ("#adff2f")
byName ("grey") = colorNamed' ("#808080")
byName ("honeydew") = colorNamed' ("#f0fff0")
byName ("hotpink") = colorNamed' ("#ff69b4")
byName ("indianred") = colorNamed' ("#cd5c5c")
byName ("indigo") = colorNamed' ("#4b0082")
byName ("ivory") = colorNamed' ("#fffff0")
byName ("khaki") = colorNamed' ("#f0e68c")
byName ("lavender") = colorNamed' ("#e6e6fa")
byName ("lavenderblush") = colorNamed' ("#fff0f5")
byName ("lawngreen") = colorNamed' ("#7cfc00")
byName ("lemonchiffon") = colorNamed' ("#fffacd")
byName ("lightblue") = colorNamed' ("#add8e6")
byName ("lightcoral") = colorNamed' ("#f08080")
byName ("lightcyan") = colorNamed' ("#e0ffff")
byName ("lightgoldenrodyellow") = colorNamed' ("#fafad2")
byName ("lightgray") = colorNamed' ("#d3d3d3")
byName ("lightgreen") = colorNamed' ("#90ee90")
byName ("lightgrey") = colorNamed' ("#d3d3d3")
byName ("lightpink") = colorNamed' ("#ffb6c1")
byName ("lightsalmon") = colorNamed' ("#ffa07a")
byName ("lightseagreen") = colorNamed' ("#20b2aa")
byName ("lightskyblue") = colorNamed' ("#87cefa")
byName ("lightslategray") = colorNamed' ("#778899")
byName ("lightslategrey") = colorNamed' ("#778899")
byName ("lightsteelblue") = colorNamed' ("#b0c4de")
byName ("lightyellow") = colorNamed' ("#ffffe0")
byName ("lime") = colorNamed' ("#00ff00")
byName ("limegreen") = colorNamed' ("#32cd32")
byName ("linen") = colorNamed' ("#faf0e6")
byName ("magenta") = colorNamed' ("#ff00ff")
byName ("maroon") = colorNamed' ("#800000")
byName ("mediumaquamarine") = colorNamed' ("#66cdaa")
byName ("mediumblue") = colorNamed' ("#0000cd")
byName ("mediumorchid") = colorNamed' ("#ba55d3")
byName ("mediumpurple") = colorNamed' ("#9370db")
byName ("mediumseagreen") = colorNamed' ("#3cb371")
byName ("mediumslateblue") = colorNamed' ("#7b68ee")
byName ("mediumspringgreen") = colorNamed' ("#00fa9a")
byName ("mediumturquoise") = colorNamed' ("#48d1cc")
byName ("mediumvioletred") = colorNamed' ("#c71585")
byName ("midnightblue") = colorNamed' ("#191970")
byName ("mintcream") = colorNamed' ("#f5fffa")
byName ("mistyrose") = colorNamed' ("#ffe4e1")
byName ("moccasin") = colorNamed' ("#ffe4b5")
byName ("navajowhite") = colorNamed' ("#ffdead")
byName ("navy") = colorNamed' ("#000080")
byName ("oldlace") = colorNamed' ("#fdf5e6")
byName ("olive") = colorNamed' ("#808000")
byName ("olivedrab") = colorNamed' ("#6b8e23")
byName ("orange") = colorNamed' ("#ffa500")
byName ("orangered") = colorNamed' ("#ff4500")
byName ("orchid") = colorNamed' ("#da70d6")
byName ("palegoldenrod") = colorNamed' ("#eee8aa")
byName ("palegreen") = colorNamed' ("#98fb98")
byName ("paleturquoise") = colorNamed' ("#afeeee")
byName ("palevioletred") = colorNamed' ("#db7093")
byName ("papayawhip") = colorNamed' ("#ffefd5")
byName ("peachpuff") = colorNamed' ("#ffdab9")
byName ("peru") = colorNamed' ("#cd853f")
byName ("pink") = colorNamed' ("#ffc0cb")
byName ("plum") = colorNamed' ("#dda0dd")
byName ("powderblue") = colorNamed' ("#b0e0e6")
byName ("purple") = colorNamed' ("#800080")
byName ("rebeccapurple") = colorNamed' ("#663399")
byName ("red") = colorNamed' ("#ff0000")
byName ("rosybrown") = colorNamed' ("#bc8f8f")
byName ("royalblue") = colorNamed' ("#4169e1")
byName ("saddlebrown") = colorNamed' ("#8b4513")
byName ("salmon") = colorNamed' ("#fa8072")
byName ("sandybrown") = colorNamed' ("#f4a460")
byName ("seagreen") = colorNamed' ("#2e8b57")
byName ("seashell") = colorNamed' ("#fff5ee")
byName ("sienna") = colorNamed' ("#a0522d")
byName ("silver") = colorNamed' ("#c0c0c0")
byName ("skyblue") = colorNamed' ("#87ceeb")
byName ("slateblue") = colorNamed' ("#6a5acd")
byName ("slategray") = colorNamed' ("#708090")
byName ("slategrey") = colorNamed' ("#708090")
byName ("snow") = colorNamed' ("#fffafa")
byName ("springgreen") = colorNamed' ("#00ff7f")
byName ("steelblue") = colorNamed' ("#4682b4")
byName ("tan") = colorNamed' ("#d2b48c")
byName ("teal") = colorNamed' ("#008080")
byName ("thistle") = colorNamed' ("#d8bfd8")
byName ("tomato") = colorNamed' ("#ff6347")
byName ("turquoise") = colorNamed' ("#40e0d0")
byName ("violet") = colorNamed' ("#ee82ee")
byName ("wheat") = colorNamed' ("#f5deb3")
byName ("white") = colorNamed' ("#ffffff")
byName ("whitesmoke") = colorNamed' ("#f5f5f5")
byName ("yellow") = colorNamed' ("#ffff00")
byName ("yellowgreen") = colorNamed' ("#9acd32")
byName (name) = error msg
where
msg = joined ["Unknown color named \"", name, "\""]
|
google/codeworld
|
codeworld-base/src/Extras/Colors.hs
|
apache-2.0
| 11,853 | 0 | 11 | 1,582 | 3,256 | 1,770 | 1,486 | 200 | 16 |
module S3E6 where
import FPPrac.Trees
data BinTree = BinLeaf
| BinNode Int BinTree BinTree
ppbin :: BinTree -> RoseTree
ppbin BinLeaf = RoseNode "" []
ppbin (BinNode n t1 t2) = RoseNode (show n) [ppbin t1, ppbin t2]
insertTree :: Int -> BinTree -> BinTree
insertTree i BinLeaf = BinNode i BinLeaf BinLeaf
insertTree i (BinNode n t1 t2) | i < n = BinNode n (insertTree i t1) t2
| otherwise = BinNode n t1 (insertTree i t2)
makeTree :: [Int] -> BinTree
makeTree [] = BinLeaf
makeTree (x:xs) = insertTree x (makeTree xs)
cutOffAt :: Int -> BinTree -> BinTree
cutOffAt 0 _ = BinLeaf
cutOffAt _ BinLeaf = BinLeaf
cutOffAt i (BinNode n t1 t2) = BinNode n (cutOffAt (i-1) t1) (cutOffAt (i-1) t2)
ex6 :: IO()
ex6 = showTree (ppbin (cutOffAt 3 (makeTree [4,3,7,5,6,1,8,11,9,2,4,10])))
|
wouwouwou/module_8
|
src/main/haskell/series3/exercise6.hs
|
apache-2.0
| 826 | 0 | 12 | 188 | 401 | 208 | 193 | 20 | 1 |
import Data.List
import Numeric
main = do
print $ (sort (permutations "0123456789")) !! 999999
|
ulikoehler/ProjectEuler
|
Euler24.hs
|
apache-2.0
| 99 | 0 | 12 | 18 | 37 | 19 | 18 | 4 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module Web.Endpoints.Tasks (routeTasks) where
import Control.Monad ((<=<))
import Control.Monad.IO.Class (liftIO)
import Data.HVect (HVect, ListContains)
import Data.Maybe (fromJust, listToMaybe, fromMaybe)
import Data.List ((\\))
import Data.Time.Clock ( NominalDiffTime
, addUTCTime, getCurrentTime)
import qualified Database.Esqueleto as E
import Database.Persist hiding (delete, get) -- TODO specify
import qualified Database.Persist as P
import qualified Database.Persist.Sql as PSql
import Formatting ((%), int, sformat)
import Network.HTTP.Types.Status (created201)
import Web.Spock hiding (head)
import Model.CoreTypes (ApiAction, Api, Email)
import qualified Model.SqlTypes as SqlT
import qualified Model.JsonTypes.Task as JsonTask
import qualified Model.JsonTypes.TaskIn as JsonTaskIn
import qualified Model.JsonTypes.Turn as JsonTurn
import Util (eitherJsonBody, emptyResponse)
import Query.Util ( integerKey, runSQL, trySqlGet
, trySqlSelectFirst, trySqlSelectFirst'
)
import Web.Auth (authHook)
routeTasks :: Api (HVect xs)
routeTasks =
prehook authHook $ do
get "tasks" getTasksAction
get ("tasks" <//> var) $
getTaskAction <=< trySqlSelectFirst SqlT.TaskId
delete ("tasks" <//> var) $ \taskId ->
trySqlGet taskId >> deleteTaskAction taskId
post ("tasks" <//> var <//> "finish") $ \taskId ->
trySqlSelectFirst SqlT.TurnTaskId taskId >> finishTaskAction taskId
post ("tasks" <//> "update") updateTurnsAction
put ("tasks" <//> var) $ \taskId ->
eitherJsonBody >>= putTaskAction taskId
post "tasks" $ eitherJsonBody >>= postTasksAction
-- |Helper function to get information about a task and its turns
getTaskInfo :: (JsonTask.Task -> ApiAction ctx a)
-> Entity SqlT.Task
-> ApiAction ctx a
getTaskInfo fun theTask@(Entity taskId _task) = do
users <- map (\(Entity _ taskUser) -> SqlT.taskUserUserId taskUser)
<$> runSQL (P.selectList [SqlT.TaskUserTaskId ==. taskId] [])
turns <- map JsonTurn.jsonTurn
<$> runSQL (P.selectList [SqlT.TurnTaskId ==. taskId, SqlT.TurnFinishedAt ==. Nothing] [Asc SqlT.TurnStartDate])
currentTime <- liftIO getCurrentTime
let splitTurns = case turns of
[] -> (Nothing, [])
(nextTurn:otherTurns) ->
if currentTime > JsonTurn.startDate nextTurn then
(Just nextTurn, otherTurns)
else
(Nothing, otherTurns)
fun $ JsonTask.jsonTask users splitTurns theTask
getTasksAction :: ListContains n Email xs => ApiAction (HVect xs) a
getTasksAction =
json =<< mapM (getTaskInfo return) =<< runSQL (selectList [] [Asc SqlT.TaskId])
getTaskAction :: ListContains n Email xs
=> Entity SqlT.Task -> ApiAction (HVect xs) a
getTaskAction = getTaskInfo json
deleteTaskAction :: ListContains n Email xs
=> SqlT.TaskId -> ApiAction (HVect xs) a
deleteTaskAction taskId = do
runSQL $ P.delete taskId
emptyResponse
finishTaskAction :: ListContains n Email xs
=> SqlT.TaskId -> ApiAction (HVect xs) a
finishTaskAction taskId = do
currentTime <- liftIO getCurrentTime
runSQL $ P.updateWhere
[ SqlT.TurnTaskId ==. taskId
, SqlT.TurnFinishedAt ==. Nothing
, SqlT.TurnStartDate <. currentTime
]
[SqlT.TurnFinishedAt =. Just currentTime]
emptyResponse
putTaskAction :: ListContains n Email xs
=> SqlT.TaskId -> JsonTaskIn.Task -> ApiAction (HVect xs) a
-- TODO combine with postTaskAction
-- TODO maybe to JsonTaskIn.Task with argument pattern matching as well
putTaskAction taskId task = do
_task <- trySqlGet taskId -- we just want to check if it is there TODO maybe think of something better
runSQL $ P.replace taskId SqlT.Task {
SqlT.taskTitle = JsonTaskIn.title task
, SqlT.taskDescription = JsonTaskIn.description task
, SqlT.taskFrequency = JsonTaskIn.frequency task
, SqlT.taskCompletionTime = JsonTaskIn.completionTime task
}
returnNewTask taskId
postTasksAction :: ListContains n Email xs
=> JsonTaskIn.Task -> ApiAction (HVect xs) a
postTasksAction task = do
-- post actual Task
taskId <- runSQL $ insert SqlT.Task {
SqlT.taskTitle = JsonTaskIn.title task
, SqlT.taskDescription = JsonTaskIn.description task
, SqlT.taskFrequency = JsonTaskIn.frequency task
, SqlT.taskCompletionTime = JsonTaskIn.completionTime task
}
-- post new TaskUsers
let users = JsonTaskIn.users task -- non emptiness is assured by JSON parsing
let insertIt userId = runSQL $ insertUnique SqlT.TaskUser { -- TODO check if user exists
SqlT.taskUserTaskId = taskId
, SqlT.taskUserUserId = PSql.toSqlKey . fromInteger $ userId
}
_ <- mapM insertIt users -- TODO check return value including Maybes
updateTurns
returnNewTask taskId
updateTurnsAction :: ListContains n Email xs => ApiAction (HVect xs) a
updateTurnsAction = do
updateTurns
emptyResponse
updateTurns :: ApiAction ctx ()
updateTurns = do
users <- runSQL $ P.selectList [ SqlT.UserDisabled ==. False ] []
let userIds = map (\(Entity key _val) -> key) users
tasks :: [Entity SqlT.Task] <- runSQL $ P.selectList [] []
mapM_ (updateTaskTurns userIds) tasks
-- |If this endpoint is called it will check if a Task is in need of new turns
-- and will create those. This is the case if a Task has no unfinished turns.
updateTaskTurns :: [Key SqlT.User] -> Entity SqlT.Task -> ApiAction ctx a
updateTaskTurns users (Entity taskId task) = do
currentTime <- liftIO getCurrentTime
unfinishedTurns <-
runSQL $ P.selectList [ SqlT.TurnTaskId ==. taskId
, SqlT.TurnFinishedAt ==. Nothing
] []
finishedTurns <-
runSQL $ P.selectList [ SqlT.TurnTaskId ==. taskId
, SqlT.TurnFinishedAt !=. Nothing
]
[ Asc SqlT.TurnStartDate ]
taskUsersValue :: [E.Value (Key SqlT.User)] <-
runSQL $ E.select $
E.from $ \(taskUser `E.InnerJoin` turn) -> do
E.on ( taskUser E.^. SqlT.TaskUserTaskId
E.==. turn E.^. SqlT.TurnTaskId)
E.orderBy [E.desc (turn E.^. SqlT.TurnStartDate)]
return (turn E.^. SqlT.TurnUserId)
let taskUsers :: [Key SqlT.User] = map E.unValue taskUsersValue -- TODO incorporate in line above with fmapping
case nextUser unfinishedTurns taskUsers of
Just nextUser' -> do
_ <- runSQL $ insert SqlT.Turn {
SqlT.turnUserId = nextUser'
, SqlT.turnTaskId = taskId
, SqlT.turnStartDate = startDate currentTime finishedTurns
, SqlT.turnFinishedAt = Nothing
}
return undefined
Nothing -> return undefined -- WARNING should never be evaluated!!!
where
startDate currentTime [] = addUTCTime (1800::NominalDiffTime) currentTime
startDate _currTime (Entity _key lastTurn : _xs) =
addUTCTime (realToFrac $ SqlT.taskFrequency task)
(fromJust $ SqlT.turnFinishedAt lastTurn)
nextUser [] taskUsers =
-- head should never be able to fail. If taskUsers is empty `users \\ taskUsers`
-- will not be empty and therefore skip `head TaskUsers`
Just $ fromMaybe (head taskUsers) $ listToMaybe $ users \\ taskUsers
nextUser _unfinishedTurns _ = Nothing
returnNewTask :: SqlT.TaskId -> ApiAction ctx a
returnNewTask taskId = do
newTask <- trySqlSelectFirst' SqlT.TaskId taskId
setStatus created201
let location = sformat ("/tasks/" % int) (integerKey taskId :: Integer)
setHeader "Location" location
getTaskInfo json newTask
|
flatrapp/core
|
app/Web/Endpoints/Tasks.hs
|
apache-2.0
| 8,487 | 0 | 17 | 2,384 | 2,129 | 1,095 | 1,034 | 158 | 4 |
module Twiddle where
main = let foo@(~(x,y)) = (1,2)
in print foo
|
metaborg/jsglr
|
org.spoofax.jsglr/tests-offside/terms/doaitse/twiddle.hs
|
apache-2.0
| 74 | 0 | 12 | 20 | 43 | 24 | 19 | 3 | 1 |
module NLP.Adict.Brute
( bruteSearch
) where
import Data.Maybe (mapMaybe)
import NLP.Adict.Core
import NLP.Adict.Dist
-- | Find all words within a list with restricted generalized edit distance
-- from x lower or equall to k.
bruteSearch :: Cost a -> Double -> Word a
-> [(Word a, b)] -> [(Word a, b, Double)]
bruteSearch cost k x =
mapMaybe check
where
check (y, v)
| dist <= k = Just (y, v, dist)
| otherwise = Nothing
where
dist = editDist cost x y
|
kawu/adict
|
src/NLP/Adict/Brute.hs
|
bsd-2-clause
| 507 | 0 | 11 | 138 | 166 | 91 | 75 | 13 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Arbitrary.Suite where
import Data.Suite
--import Data.Test hiding (fromBuf)
import Arbitrary.TestModule
import Test.QuickCheck
import Prelude hiding (FilePath)
import qualified Data.Map as M
import qualified Data.Set as S
import Control.Lens
import Data.Integrated.TestModule
genSuite :: Int -> Map -> Gen Suite
genSuite 0 mask =
return . Suite $ mask
genSuite num_tests mask = do
(p, tm) <- toGenerated (choose ('a', 'z')) (S.fromList . M.keys $ mask)
-- (p, t) <- testGen (choose ('a', 'z')) (S.fromList . M.keys $ mask)
genSuite (num_tests - 1) $
M.insert (view modpath tm) (p, view properties tm) mask
|
jfeltz/tasty-integrate
|
tests/Arbitrary/Suite.hs
|
bsd-2-clause
| 674 | 0 | 12 | 119 | 194 | 109 | 85 | 17 | 1 |
#!/usr/bin/env stack
{- stack
--resolver lts-7.1
--install-ghc
runghc
-}
import Data.List (sort)
data Var = Var Int Int Int
data Lit = PLit Var
| NLit Var
type Clause = [Lit]
showVar :: Var -> String
showVar (Var a b c) = show a ++ show b ++ show c
showLit :: Lit -> String
showLit (PLit v) = showVar v
showLit (NLit v) = "-" ++ showVar v
showClause :: Clause -> String
showClause = unwords . (++ ["0"]) . map showLit
atLeastOneInEntry :: [Clause]
atLeastOneInEntry = [aloie x y | x <- [1..9], y <- [1..9]]
where aloie a b = [PLit (Var f a b) | f <- [1..9]]
atMostOne :: [Var] -> [Clause]
atMostOne (a:as) = [[NLit a, NLit b] | b <- as] ++ atMostOne as
atMostOne [] = []
atMostOneInEntry :: [Clause]
atMostOneInEntry = concat [atMostOne [Var a b c | a <- [1..9]] | b <- [1..9], c <- [1..9]]
atLeastOneInRow :: [Clause]
atLeastOneInRow = [[PLit (Var a b c) | b <- [1..9]] | a <-[1..9], c <- [1..9]]
atLeastOneInColumn :: [Clause]
atLeastOneInColumn = [[PLit (Var a b c) | c <- [1..9]] | a <-[1..9], b <- [1..9]]
atLeastOneInSquare :: [Clause]
atLeastOneInSquare = [[PLit (Var a b c) | (b, c) <- sq] | a <- [1..9], sq <- squares]
squares = [[(a, b) | a <- [1..3], b <- [1..3]]
,[(a, b) | a <- [4..6], b <- [1..3]]
,[(a, b) | a <- [7..9], b <- [1..3]]
,[(a, b) | a <- [1..3], b <- [4..6]]
,[(a, b) | a <- [4..6], b <- [4..6]]
,[(a, b) | a <- [7..9], b <- [4..6]]
,[(a, b) | a <- [1..3], b <- [7..9]]
,[(a, b) | a <- [4..6], b <- [7..9]]
,[(a, b) | a <- [7..9], b <- [7..9]]]
atMostOneInRow :: [Clause]
atMostOneInRow = concat [atMostOne [Var a b c | b <- [1..9]] | a <- [1..9], c <- [1..9]]
atMostOneInColumn :: [Clause]
atMostOneInColumn = concat [atMostOne [Var a b c | c <- [1..9]] | a <- [1..9], b <- [1..9]]
atMostOneInSquare :: [Clause]
atMostOneInSquare = concat [atMostOne [Var a b c | (b, c) <- sq] | a <- [1..9], sq <- squares]
minCoding :: [Clause]
minCoding = atLeastOneInEntry
++ atMostOneInRow
++ atMostOneInColumn
++ atMostOneInSquare
maxCoding :: [Clause]
maxCoding = atMostOneInEntry
++ atLeastOneInRow
++ atLeastOneInColumn
++ atLeastOneInSquare
printSudoku :: String -> IO ()
printSudoku s = printSudoku' s 9 10
printSudoku' :: String -> Int -> Int -> IO ()
printSudoku' [] _ _ = putStrLn ""
printSudoku' (s:ss) a 0 = do
putStrLn ""
printSudoku' (s:ss) (a - 1) 10
printSudoku' (s:ss) a 10 | a == 6 || a == 3 = do
putStrLn "---------------------"
printSudoku' (s:ss) a 9
printSudoku' (s:ss) a 10 = printSudoku' (s:ss) a 9
printSudoku' (s:ss) a b | b == 7 || b == 4 = do
putStr $ s : ' ' : '|' : " "
printSudoku' ss a (b - 1)
printSudoku' (s:ss) a b = do
putStr $ s : " "
printSudoku' ss a (b - 1)
showDIMACS :: Int -> [Clause] -> [String]
showDIMACS nVars cs = ("p " ++ show nVars ++ " " ++ show (length cs)) : map showClause cs
cnfInput :: String -> [Clause]
cnfInput s = [[PLit (Var (read [a]) b c)] | (a, (b, c)) <- zip s [(x, y) | x <-[1..9], y <- [1..9]], a /= '.']
readSolution :: String -> String
readSolution = phrase2 . phrase1 . words . phrase0 . lines
where phrase1 (('-':_):ws) = phrase1 ws
phrase1 ("v":ws) = phrase1 ws
phrase1 ("0":ws) = []
phrase1 ([a,b,c]:ws) = (read [b] :: Int, read [c] :: Int, read [a] :: Int) : phrase1 ws
phrase2 ws = concatMap (show . (\(_, _, a) -> a)) $ sort ws
phrase0 (l@('v':' ':_):_) = l
phrase0 (_:ls) = phrase0 ls
main :: IO ()
main = do
--problems <- (take 10000 . lines) <$> readFile "sudoku-inputs.txt"
--let pn = 5999
--let p1 = problems !! (pn - 1)
--printSudoku p1
--putStrLn ""
--putStrLn . unlines . showDIMACS 999 $ cnfInput p1 ++ minCoding
--mapM_ (uncurry writeFile) . zip (map ((++ ".cnf") . ("maxcoding/" ++) . show) [1..]) $ map (\p -> unlines . showDIMACS 999 $ cnfInput p ++ maxCoding) problems
--mapM_ (uncurry writeFile) . zip (map ((++ ".cnf") . ("mincoding/" ++) . show) [1..]) $ map (\p -> unlines . showDIMACS 999 $ cnfInput p ++ minCoding) problems
--solution <- readFile $ "maxcoding/" ++ show pn ++ ".sol"
--printSudoku $ readSolution solution
--putStrLn ""
--solution <- readFile $ "mincoding/" ++ show pn ++ ".sol"
--printSudoku $ readSolution solution
putStrLn "maxcoding"
times <- (map (read :: String -> Double) . words) <$> readFile "maxtimes"
putStrLn "avg:"
print $ sum times / 10000.0
putStrLn "min:"
print $ minimum times
putStrLn "max:"
print $ maximum times
putStrLn "mincoding"
times <- (map (read :: String -> Double) . words) <$> readFile "mintimes"
putStrLn "avg:"
print $ sum times / 10000.0
putStrLn "min:"
print $ minimum times
putStrLn "max:"
print $ maximum times
|
tomasmcz/discrete-opt
|
misc/sudoku.hs
|
bsd-3-clause
| 4,817 | 6 | 13 | 1,191 | 2,188 | 1,160 | 1,028 | 99 | 5 |
{-# LANGUAGE DataKinds, TemplateHaskell #-}
module HLearn.Optimization.NewtonRaphson
where
import Control.DeepSeq
import Control.Monad
import Control.Monad.Random
import Control.Monad.ST
import Data.List
import Data.List.Extras
import Data.Typeable
import Debug.Trace
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as VM
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Storable.Mutable as VSM
import qualified Data.Vector.Generic as VG
import qualified Data.Vector.Generic.Mutable as VGM
import qualified Data.Vector.Algorithms.Intro as Intro
import Numeric.LinearAlgebra hiding ((<>))
import Control.Lens
-- import SubHask.Category.Algebra.HMatrix
import HLearn.Algebra.LinearAlgebra as LA
import HLearn.Algebra
import HLearn.History
import HLearn.Optimization.Common
import qualified HLearn.Optimization.LineMinimization as LineMin
-------------------------------------------------------------------------------
-- data types
data NewtonRaphson a = NewtonRaphson
{ __x1 :: !(Tensor 1 a)
, __fx1 :: !(Tensor 0 a)
, __fx0 :: !(Tensor 0 a)
, __f'x1 :: !(Tensor 1 a)
, __alpha1 :: !(Tensor 0 a)
, __f :: !(Tensor 1 a -> Tensor 0 a)
}
deriving (Typeable)
makeLenses ''NewtonRaphson
type instance Scalar (NewtonRaphson a) = Scalar a
instance (ValidTensor a) => Has_f NewtonRaphson a where flens = _f
instance (ValidTensor a) => Has_x1 NewtonRaphson a where x1 = _x1
instance (ValidTensor a) => Has_fx1 NewtonRaphson a where fx1 = _fx1
instance (ValidTensor a) => Has_fx0 NewtonRaphson a where fx0 = _fx0
instance (ValidTensor a) => Has_f'x1 NewtonRaphson a where f'x1 = _f'x1
instance (ValidTensor a) => Has_stepSize NewtonRaphson a where stepSize = _alpha1
deriving instance (Typeable Matrix)
-------------------------------------------------------------------------------
displayNewtonRaphson :: forall a. Typeable a => a -> DisplayFunction
displayNewtonRaphson _ = mkDisplayFunction go
where
go :: NewtonRaphson a -> String
go _ = "NewtonRaphson"
-- displayNewtonRaphson_fx1 :: DisplayFunction
-- displayNewtonRaphson_fx1 = mkDisplayFunction $ go
-- where
-- go :: NewtonRaphson (VS.Vector Double) -> String
-- go = show . __fx1
-------------------------------------------------------------------------------
newtonRaphson ::
( ValidTensor v
, IsScalar (Scalar v)
, Tensor 2 v ~ LA.Matrix (Scalar v)
, Tensor 1 v ~ LA.Vector (Scalar v)
, Ord (Scalar v)
, Typeable (Scalar v)
, Typeable v
) => (Tensor 1 v -> Tensor 0 v)
-> (Tensor 1 v -> Tensor 1 v)
-> (Tensor 1 v -> Tensor 2 v)
-- -> Tensor 1 v
-> v
-> [NewtonRaphson v -> History Bool]
-> History (NewtonRaphson v)
newtonRaphson f f' f'' x0 = optimize
-- (projectOrthant <=< step_newtonRaphson f f' f'')
(step_newtonRaphson f f' f'')
$ NewtonRaphson
{ __x1 = mkTensor x0
, __fx1 = f $ mkTensor x0
, __fx0 = infinity
, __f'x1 = f' $ mkTensor x0
, __alpha1 = 1
, __f = f
}
-- FIXME: broken by new History
-- projectOrthant opt1 = do
-- mopt0 <- prevValueOfType opt1
-- return $ case mopt0 of
-- Nothing -> opt1
-- Just opt0 -> set x1 (VG.zipWith go (opt0^.x1) (opt1^.x1)) $ opt1
-- where
-- go a0 a1 = if (a1>=0 && a0>=0) || (a1<=0 && a0<=0)
-- then a1
-- else 0
step_newtonRaphson ::
( ValidTensor v
, IsScalar (Scalar v)
, Tensor 2 v ~ LA.Matrix (Scalar v)
, Tensor 1 v ~ LA.Vector (Scalar v)
, Ord (Scalar v)
, Typeable (Scalar v)
, Typeable v
) => (Tensor 1 v -> Tensor 0 v)
-> (Tensor 1 v -> Tensor 1 v)
-> (Tensor 1 v -> Tensor 2 v)
-> NewtonRaphson v
-> History (NewtonRaphson v)
step_newtonRaphson f f' f'' opt = do
let x0 = opt^.x1
fx0 = opt^.fx1
f'x0 = opt^.f'x1
f''x0 = f'' x0
alpha0 = opt^.stepSize
let reg=1
dir = (-1) .* (LA.inv (f''x0 <> reg .* LA.eye (LA.rows f''x0)) `LA.mul` f'x0)
alpha <- do
let g y = f $ x0 <> y .* dir
bracket <- LineMin.lineBracket g (alpha0/2) (alpha0*2)
brent <- LineMin.brent g bracket
[ lowerBound fx0
, maxIterations 100
, fx1grows
]
return $ LineMin._x brent
let x1 = x0 <> alpha .* dir
return $ NewtonRaphson
{ __x1 = x1
, __fx1 = f x1
, __fx0 = opt^.fx1
, __f'x1 = f' x1
, __alpha1 = alpha
, __f = f
}
-------------------------------------------------------------------------------
step_gradient getdir getstep f f' f'' opt = do
dir <- getdir f f' f'' opt
step <- getstep dir f f' f'' opt
let x = opt^.x1 <> step .* dir
return $ set x1 x opt
quadraticLineSearch dir f f' f'' opt = do
let g y = f $ (opt^.x1) <> y .* dir
bracket <- LineMin.lineBracket g (opt^.stepSize/2) (opt^.stepSize*2)
brent <- LineMin.brent g bracket
[ lowerBound $ opt^.fx1
, maxIterations 100
]
return $ brent^.x1
dir_newtonRaphson f f' f'' x0 = dir_unsafeNewtonRaphson 1 f f' f'' x0
dir_unsafeNewtonRaphson ::
( Tensor 2 v ~ LA.Matrix (Scalar v)
, ValidTensor v
) => Tensor 0 v
-> (Tensor 1 v -> Tensor 0 v)
-> (Tensor 1 v -> Tensor 1 v)
-> (Tensor 1 v -> Tensor 2 v)
-> NewtonRaphson v
-> History (Tensor 1 v)
dir_unsafeNewtonRaphson reg f f' f'' opt = do
let f'x0 = opt^.f'x1
f''x0 = f'' (opt^.x1)
return $ (-1) .* (LA.inv (f''x0 <> reg .* LA.eye (LA.rows f''x0)) `LA.mul` f'x0)
|
ehlemur/HLearn
|
src/HLearn/Optimization/NewtonRaphson.hs
|
bsd-3-clause
| 5,678 | 5 | 19 | 1,491 | 1,744 | 928 | 816 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module HipBot.Naggy.Resources
( remindersResource
, reminderResource
) where
import Control.Applicative
import Control.Lens
import Control.Monad.IO.Class
import Control.Monad.Trans
import qualified Data.Aeson as A
import qualified Data.Aeson.Encode as A
import qualified Data.Aeson.Types as A
import qualified Data.ByteString.Lazy as LB
import qualified Data.ByteString.Lazy.UTF8 as LB
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy.Builder as LT
import qualified Data.UUID as UUID
import qualified Data.UUID.V4 as UUID
import qualified Network.Wai as Wai
import Prelude
import Webcrank.Wai
import HipBot
import HipBot.Naggy.API as Naggy
import HipBot.Naggy.Session
import HipBot.Naggy.Types
remindersResource :: NaggyResource
remindersResource = resource
{ allowedMethods = return [ methodGet, methodPost ]
, isAuthorized = lift checkAuthorization
, postAction = liftIO UUID.nextRandom <&>
PostCreate . pure . T.decodeUtf8 . UUID.toASCIIBytes
, contentTypesAccepted = return [("application/json", putReminder)]
, contentTypesProvided = return [("application/json", getReminders)]
}
parseReminder :: ReminderId -> OAuthId -> RoomId -> LB.ByteString -> HaltT NaggyCrank Reminder
parseReminder rid oid room b =
let
parser = A.parseJSON . A.Object .
HashMap.insert "id" (A.toJSON rid) .
HashMap.insert "oauthId" (A.toJSON oid) .
HashMap.insert "roomId" (A.toJSON room)
r = do
v <- A.eitherDecode b
A.parseEither (A.withObject "object" parser) v
failure err = do
writeLBS . LB.fromString $ err
halt badRequest400
in
either failure return r
putReminder :: HaltT NaggyCrank ()
putReminder = withSession $ \(oid, room) -> do
[rid] <- getDispatchPath
r <- parseReminder rid oid room =<< liftIO . Wai.lazyRequestBody =<< view request
lift . lift $ do
insertReminder r
reminder ?= r
putResponseHeader hContentType "application/json"
writeLBS . A.encode $ r
getReminders :: HaltT NaggyCrank Body
getReminders = withSession $ \(oid, _) -> do
rs <- lift . lift $ lookupReminders oid
return . lazyTextBody . LT.toLazyText . A.encodeToTextBuilder . A.toJSON $ rs
-- TODO allow PUT
reminderResource :: T.Text -> NaggyResource
reminderResource rid = resource
{ allowedMethods = return [ methodDelete ]
, isAuthorized = lift checkAuthorization >>= \a -> case a of
Authorized -> withSession $ \(oid, _) -> lift . lift $ do
mr <- lookupReminder oid rid
case mr of
Just r | r ^. oauthId == oid ->
(Authorized <$) . assign reminder . Just $ r
_ -> return . Unauthorized $ "Naggy"
_ -> return a
, contentTypesProvided = return [("application/json", withReminder $ return . A.encode)]
, deleteResource = withSession $ \(oid, _) -> lift . lift $ True <$ deleteReminder oid rid
}
withReminder :: (Reminder -> HaltT NaggyCrank a) -> HaltT NaggyCrank a
withReminder f = maybe (halt notFound404) f =<< (lift . lift . use) reminder
|
purefn/naggy
|
src/HipBot/Naggy/Resources.hs
|
bsd-3-clause
| 3,136 | 0 | 24 | 612 | 961 | 526 | 435 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Aws.DynamoDB.Json.TypeHelper where
import Debug.Trace
import Prelude hiding (lookup, keys)
import Data.Maybe
import Control.Monad
import Control.Applicative
import Data.String
--import Test.QuickCheck.Arbitrary.ToolShed.Test.QuickCheck.Arbitrary.Map
--import ToolShed.Test.QuickCheck.Arbitrary.Map
import qualified Data.Map.Lazy as Map
import qualified Data.HashMap.Lazy as H
import Data.Attoparsec.Number (Number(..))
import Data.Aeson hiding (Value)
--import qualified Data.Aeson.Functions as A (mapHashKey)
import qualified Data.Aeson.Types as A
import qualified Data.Text as T
import qualified Test.QuickCheck as QC
import Safe
import qualified Data.Vector as V
import Aws.DynamoDB.Json.BasicTypes
--------------
decodeValue :: FromJSON a => A.Value -> Map.Map T.Text a
decodeValue (Object o) = Map.fromList . H.toList
$ H.map unRight
$ H.filter isRight
$ H.map (eitherDecode . encode) o
where
isRight(Right _) = True
isRight _ = False
unRight (Right x) = x
decodeValue _ = error "decodeValue mismatch."
parseObjectByName :: (FromJSON a) => A.Value -> T.Text -> A.Parser (Maybe a)
parseObjectByName o t = case o of
(Object v) -> v .:? t
_ -> mzero
lookup :: T.Text -> A.Value -> Maybe A.Value
lookup k (Object v) = H.lookup k v
lookup k x = error $ show x ++ "\n" ++ show k
keys :: A.Value -> Maybe [T.Text]
keys (Object v) = Just . H.keys $ v
keys _ = Nothing
take1 :: [a] -> Maybe a
take1 [] = Nothing
take1 (x:xs) = Just x
deepValue :: A.Value -> [T.Text] -> Maybe A.Value
deepValue a ts = deepObj a ts Nothing
where
deepObj _ [] r = r
deepObj v (x:xs) r = do
o' <- lookup x v
deepObj o' xs (Just o')
--------------
--------- Code below are derived from aws-elastictranscoding
-------------
data DdbServiceError = DDB { message :: T.Text, type_::T.Text }
deriving (Show,Eq)
instance FromJSON DdbServiceError where
parseJSON (Object v) = DDB <$> v .: "Message" <*> v .: "__type"
parseJSON _ = mzero
instance ToJSON DdbServiceError where
toJSON (DDB msg_ type_) =
object
[ "Message" .= msg_
, "__type" .= type_
]
instance QC.Arbitrary DdbServiceError where
arbitrary = DDB . T.pack <$> QC.arbitrary <*> QC.arbitrary
--
-- | 'success'
--
newtype SUCCESS = SUCCESS { _SUCCESS :: Bool }
deriving (Show,Eq)
instance FromJSON SUCCESS where
parseJSON (Object v) = SUCCESS <$> v .: "success"
parseJSON _ = mzero
instance ToJSON SUCCESS where
toJSON = Bool . _SUCCESS
instance QC.Arbitrary SUCCESS where
arbitrary = SUCCESS <$> QC.arbitrary
------------------------------------------------------------------------------
--
-- Parser Toolkit
--
------------------------------------------------------------------------------
json_str_map_p :: Ord a => Map.Map T.Text a -> A.Value -> A.Parser a
json_str_map_p mp = json_string_p $ flip Map.lookup mp
json_string_p :: Ord a => (T.Text->Maybe a) -> A.Value -> A.Parser a
json_string_p p (String t) | Just val <- p t = return val
| otherwise = mzero
json_string_p _ _ = mzero
text_map :: (Ord a,Bounded a,Enum a) => (a->T.Text) -> Map.Map T.Text a
text_map f = Map.fromList [ (f x,x) | x<-[minBound..maxBound] ]
read_p :: Read a => T.Text -> A.Parser a
read_p txt = maybe mzero return $ readMay $ T.unpack txt
------------------------------------------------------------------------------
--
-- QC Toolkit
--
------------------------------------------------------------------------------
poss :: QC.Gen a -> QC.Gen (Maybe a)
poss gen = QC.frequency
[ (,) 1 $ QC.elements [Nothing]
, (,) 20 $ Just <$> gen
]
nat_pair :: QC.Gen (Int,Int)
nat_pair = two $ QC.sized $ \n -> QC.choose (0, n)
two :: QC.Gen a -> QC.Gen (a,a)
two gen = (,) <$> gen <*> gen
|
ywata/dynamodb
|
Aws/DynamoDB/Json/TypeHelper.hs
|
bsd-3-clause
| 4,270 | 0 | 11 | 1,183 | 1,338 | 709 | 629 | 85 | 2 |
module Hive.Tile
( Tile(..)
, Cell
, cellOwner
) where
import Mitchell.Prelude
import Hive.Bug
import Hive.Player
import Data.Aeson
-- | A tile is a Bug that belongs to a Player.
data Tile = Tile
{ tilePlayer :: Player
, tileBug :: Bug
} deriving (Eq, Ord, Show)
instance ToJSON Tile where
toJSON Tile{..} = object
[ ("player", toJSON tilePlayer)
, ("bug", toJSON tileBug)
]
instance FromJSON Tile where
parseJSON = withObject "object" $ \o ->
Tile
<$> o .: "player"
<*> o .: "bug"
tilePlayerL :: Lens' Tile Player
tilePlayerL = lens tilePlayer (\x y -> x { tilePlayer = y })
-- A single cell is a stack of tiles, where the head of the list represents the
-- top of the stack. This will only ever be a beetle or a mosquito, per the
-- game rules.
type Cell = [Tile]
-- | Who "owns" this cell? (Meaning, whose tile is on top?)
cellOwner :: Cell -> Maybe Player
cellOwner = preview (ix 0 . tilePlayerL)
|
mitchellwrosen/hive
|
hive/src/Hive/Tile.hs
|
bsd-3-clause
| 969 | 0 | 11 | 236 | 249 | 141 | 108 | -1 | -1 |
module PrintAudio where
import System.FilePath
import Data.Audio
import Codec.Wav
import Data.Int
import Types.Common
import Data.Array.IArray
printAudio fPath = do
file <- importFile fPath :: IO (Either String (AudioFormat))
case file of
Right f -> printAsMatlab $ wavList f
Left e -> error e
printAsMatlab :: [(Double,Float)] -> IO()
printAsMatlab as =
mapM_ (\(t,s)-> putStrLn ((show s)++" "++(show t)++"")) as
wavList :: AudioFormat -> [(Double,Float)]
wavList wav = let
l1 = sampleData wav
in zip (take (16384*10) $ elems $ amap toSample l1) [(1/44100),(2/44100)..]
|
aedanlombardo/HaskellPS
|
DSP-PBE/Tests/PrintAudio.hs
|
bsd-3-clause
| 597 | 0 | 14 | 109 | 276 | 145 | 131 | 19 | 2 |
{-# OPTIONS_GHC -Wall #-}
module CycleRecentWS where
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck
import XMonad.Actions.CycleRecentWS (unView)
import XMonad.StackSet (view, greedyView, mapLayout)
import Instances
import Utils (tags)
spec :: Spec
spec = do
prop "prop_unView" prop_unView
prop_unView :: T -> Property
prop_unView ss = conjoin
[ counterexample desc (unView ss (state (v t ss)) === state ss)
| t <- tags ss
, (desc, v) <- [("view " <> show t, view), ("greedyView " <> show t, greedyView)] ]
where
state = mapLayout succ
|
xmonad/xmonad-contrib
|
tests/CycleRecentWS.hs
|
bsd-3-clause
| 585 | 0 | 14 | 110 | 201 | 111 | 90 | 18 | 1 |
-- |
-- Module : Data.SExpr
-- Copyright : Robert Atkey 2010
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : unknown
--
-- Simple representation of S-expressions, and pretty printing
-- thereof. Mainly used for generation of Emacs Lisp code.
module Data.SExpr
( SExpr (..)
, ShowSExpr (showSExpr)
, cond
, pprint
)
where
import Text.PrettyPrint
class ShowSExpr a where
showSExpr :: a -> SExpr
data SExpr = Atom String
| IntConst Int
| SExpr [SExpr]
instance ShowSExpr SExpr where
showSExpr = id
cond :: [(SExpr, SExpr)] -> SExpr
cond clauses = SExpr (Atom "cond" : map (\(t,e) -> SExpr [ t, e]) clauses)
pprint :: SExpr -> Doc
pprint (Atom s) = text s
pprint (IntConst i) = int i
pprint (SExpr []) = lparen <> rparen
pprint (SExpr [x]) = parens $ pprint x
pprint (SExpr (Atom "defconst":x:xs))
= parens $ (text "defconst" <+> pprint x)
$$ nest 1 (vcat (map pprint xs))
pprint (SExpr (Atom "defun":Atom x:SExpr args:body))
= parens $ (text "defun" <+> text x <+> parens (hsep $ map pprint args))
$$ nest 1 (vcat (map pprint body))
pprint (SExpr (Atom "let":SExpr l:xs))
= parens $ (text "let" <+> parens (vcat $ map pprint l))
$$ nest 1 (vcat (map pprint xs))
pprint (SExpr (x:xs)) = parens $ pprint x <+> sep (map pprint xs)
|
bobatkey/Forvie
|
src/Data/SExpr.hs
|
bsd-3-clause
| 1,434 | 0 | 12 | 383 | 542 | 282 | 260 | 32 | 1 |
--
-- Copyright (c) 2009-2011, ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ExistentialQuantification #-}
module Feldspar.Compiler.Compiler (
compileToCCore
, compileToCCore'
, defaultOptions
, sicsOptions
, sicsOptions2
, sicsOptions3
, c99PlatformOptions
, c99OpenMpPlatformOptions
, tic64xPlatformOptions
, SplitModule(..)
, CompiledModule(..)
) where
import Data.List (partition)
import Data.Maybe (fromMaybe)
import Feldspar.Core.Constructs (SyntacticFeld)
import Feldspar.Core.Interpretation (defaultFeldOpts, FeldOpts(..), Target(..))
import Feldspar.Compiler.Backend.C.Library
import Feldspar.Compiler.Backend.C.Options
import Feldspar.Compiler.Backend.C.Platforms
import Feldspar.Compiler.Backend.C.CodeGeneration
import Feldspar.Compiler.Backend.C.MachineLowering
import Feldspar.Compiler.Backend.C.Tic64x
import Feldspar.Compiler.Imperative.FromCore
import Feldspar.Compiler.Imperative.Representation
data SplitModule = SplitModule
{ implementation :: CompiledModule
, interface :: CompiledModule
}
data CompiledModule = CompiledModule {
sourceCode :: String,
debugModule :: Module ()
}
-- | Split a module into interface and implemenation.
splitModule :: Module () -> (Module (), Module ())
splitModule m = (Module (hdr ++ createProcDecls (entities m)), Module body)
where
(hdr, body) = partition belongsToHeader (entities m)
belongsToHeader :: Entity () -> Bool
belongsToHeader StructDef{} = True
belongsToHeader Proc{..} | Nothing <- procBody = True
belongsToHeader _ = False
-- TODO These only belongs in the header iff the types are used in a
-- function interface
createProcDecls :: [Entity ()] -> [Entity ()]
createProcDecls = concatMap defToDecl
defToDecl :: Entity () -> [Entity ()]
defToDecl (Proc n False inp outp _) = [Proc n False inp outp Nothing]
defToDecl _ = []
compileSplitModule :: Options -> (Module (), Module ()) -> SplitModule
compileSplitModule opts (hmdl, cmdl)
= SplitModule
{ interface = CompiledModule { sourceCode = incls ++ hres
, debugModule = hmdl
}
, implementation = CompiledModule { sourceCode = cres
, debugModule = cmdl
}
}
where
hres = compToCWithInfos opts hmdl
cres = compToCWithInfos opts cmdl
incls = genIncludeLines opts Nothing
-- | Compiler core.
-- Everything should call this function and only do a trivial interface adaptation.
-- Do not duplicate.
compileToCCore :: SyntacticFeld c => String -> Options -> c -> SplitModule
compileToCCore name opts prg = compileToCCore' opts mod
where
mod = fromCore opts (encodeFunctionName name) prg
compileToCCore' :: Options -> Module () -> SplitModule
compileToCCore' opts m = compileSplitModule opts $ splitModule mod
where
mod = adaptTic64x opts $ rename opts False m
genIncludeLines :: Options -> Maybe String -> String
genIncludeLines opts mainHeader = concatMap include incs ++ "\n\n"
where
include [] = ""
include fname@('<':_) = "#include " ++ fname ++ "\n"
include fname = "#include \"" ++ fname ++ "\"\n"
incs = includes (platform opts) ++ [fromMaybe "" mainHeader]
-- | Predefined options
defaultOptions :: Options
defaultOptions
= Options
{ platform = c99
, printHeader = False
, useNativeArrays = False
, useNativeReturns = False
, frontendOpts = defaultFeldOpts
, safetyLimit = 2000
, nestSize = 2
}
c99PlatformOptions :: Options
c99PlatformOptions = defaultOptions
c99OpenMpPlatformOptions :: Options
c99OpenMpPlatformOptions = defaultOptions { platform = c99OpenMp }
tic64xPlatformOptions :: Options
tic64xPlatformOptions = defaultOptions { platform = tic64x }
sicsOptions :: Options
sicsOptions = defaultOptions { frontendOpts = defaultFeldOpts { targets = [SICS,CSE] }}
sicsOptions2 :: Options
sicsOptions2 = defaultOptions { frontendOpts = defaultFeldOpts { targets = [SICS] }}
sicsOptions3 :: Options
sicsOptions3 = defaultOptions { platform = c99Wool, frontendOpts = defaultFeldOpts { targets = [SICS,CSE,Wool] }}
|
emwap/feldspar-compiler
|
lib/Feldspar/Compiler/Compiler.hs
|
bsd-3-clause
| 6,012 | 0 | 11 | 1,325 | 1,064 | 620 | 444 | 91 | 4 |
{- |
SPDY is a protocol intended to speed up the transport of web
content. It allows multiplexing several streams on a single connection
between endpoints, with stream priorities and a notion of flow
control. See <http://dev.chromium.org/spdy> for all the details.
This library supports SPDY version 3, described in the third draft of
the SPDY specification, available at
<http://dev.chromium.org/spdy/spdy-protocol>. -}
module Network.SPDY (
-- * Versions of the protocol
spdyVersion3,
supportedSPDYVersions,
-- * Frame data structures
module Network.SPDY.Frames,
module Network.SPDY.Flags,
-- * Header compression and decompression
module Network.SPDY.Compression,
-- * Converting frames to byte strings
module Network.SPDY.Serialize,
-- * Parsing frames from byte strings
module Network.SPDY.Deserialize) where
import Network.SPDY.Frames
import Network.SPDY.Flags
import Network.SPDY.Compression
import Network.SPDY.Serialize
import Network.SPDY.Deserialize
-- | SPDY version 3
spdyVersion3 :: SPDYVersion
spdyVersion3 = SPDYVersion 3
-- | The list of protocol versions supported by this library, in
-- descending order of preference.
supportedSPDYVersions :: [SPDYVersion]
supportedSPDYVersions = [spdyVersion3]
|
kcharter/spdy-base
|
src/Network/SPDY.hs
|
bsd-3-clause
| 1,246 | 0 | 5 | 182 | 117 | 80 | 37 | 17 | 1 |
-- |
-- Module: Language.KURE.RewriteMonad
-- Copyright: (c) 2006-2008 Andy Gill
-- License: BSD3
--
-- Maintainer: Andy Gill <[email protected]>
-- Stability: unstable
-- Portability: ghc
--
-- This is the definition of the monad inside KURE.
module Language.KURE.RewriteMonad
( RewriteM -- abstract
, RewriteStatusM(..)
, Count(..)
, theCount
, runRewriteM
, failM
, catchM
, chainM
, liftQ
, markM
, transparentlyM
, readEnvM
, mapEnvM
, writeEnvM
) where
import Control.Monad
import Data.Monoid
------------------------------------------------------------------------------
data RewriteM m dec exp =
RewriteM { -- | 'runRewriteM' runs the 'RewriteM' monad, returning a status.
runRewriteM :: dec -> m (RewriteStatusM dec exp)
}
data Count = LoneTransform
| Count !Int
-- | How many transformations have been performed?
theCount :: Count -> Int
theCount (LoneTransform) = 1
theCount (Count n) = n
instance Monoid Count where
mempty = Count 0
mappend (Count 0) other = other
mappend other (Count 0) = other
mappend (Count i1) (Count i2) = Count (i1 + i2)
mappend (LoneTransform) (Count i2) = Count $ succ i2
mappend (Count i1) (LoneTransform) = Count $ succ i1
mappend (LoneTransform) (LoneTransform) = Count $ 2
data RewriteStatusM dec exp
= RewriteReturnM exp !(Maybe dec) !Count -- ^ a regular success
| RewriteFailureM String -- ^ a real failure
-- | RewriteIdM exp -- ^ identity marker on a value
-- TWO possible ways of thinking about rewriting:
-- C1 (e1) => C2 (C1 (e2)) => C3 (C2 (C1 (e3))) -- matches the *writer* like status
-- C1 (e1) => C1 (C2 (e2)) => C1 (C2 (C3 (e3))) -- will require mergeing??
instance (Monoid dec,Monad m) => Monad (RewriteM m dec) where
return e = RewriteM $ \ _ -> return $ RewriteReturnM e Nothing mempty
(RewriteM m) >>= k = RewriteM $ \ dec -> do
r <- m dec
case r of
RewriteReturnM r1 ds ids -> do
r2 <- runRewriteM (k r1) dec
return $ case r2 of
RewriteReturnM e' ds' ids' -> RewriteReturnM e' (ds' `mappend` ds) (ids' `mappend` ids)
RewriteFailureM msg -> RewriteFailureM msg
RewriteFailureM msg -> return $ RewriteFailureM msg
fail msg = RewriteM $ \ _ -> return $ RewriteFailureM msg
instance (Monoid dec,Monad m) => Functor (RewriteM m dec) where
fmap f m = liftM f m
-- | 'liftQ' lets you tunnel into the inner monad, because 'RewriteM' is actually monad transformer.
liftQ :: (Monad m,Monoid dec) => m a -> RewriteM m dec a
liftQ m = RewriteM $ \ _ -> do r <- m
return $ RewriteReturnM r mempty mempty
-- | 'failM' is our basic failure, with a String message.
failM :: (Monad m, Monoid dec) => String -> RewriteM m dec a
failM msg = RewriteM $ \ _ -> return $ RewriteFailureM msg
-- | 'catchM' catches failures, and tries a second monadic computation.
catchM :: (Monad m) => RewriteM m dec a -> (String -> RewriteM m dec a) -> RewriteM m dec a
catchM (RewriteM m1) m2 = RewriteM $ \ dec -> do
r <- m1 dec
case r of
RewriteReturnM {} -> return r
RewriteFailureM msg -> runRewriteM (m2 msg) dec
-- | 'chainM' executes the first argument then the second, much like '>>=',
-- except that the second computation can see if the first computation was an identity or not.
-- Used to spot when a rewrite succeeded, but was the identity.
chainM :: (Monoid dec,Monad m)
=> (RewriteM m dec b)
-> (Int -> b -> RewriteM m dec c)
-> RewriteM m dec c
chainM m k = RewriteM $ \ dec -> do
r <- runRewriteM m dec
case r of
RewriteReturnM a ds ids ->
do r2 <- runRewriteM (k (theCount ids) a)
(case ds of
Nothing -> dec
Just ds2 -> ds2 `mappend` dec)
case r2 of
RewriteReturnM a' ds' ids' ->
return $ RewriteReturnM a' (ds' `mappend` ds) (ids' `mappend` ids)
RewriteFailureM msg -> return $ RewriteFailureM msg
RewriteFailureM msg -> return $ RewriteFailureM msg -- and still fail
-- | 'markM' is used to mark a monadic rewrite as a non-identity,
-- unless the congruence flag is set.
markM :: (Monad m) => RewriteM m dec a -> RewriteM m dec a
markM (RewriteM m) = RewriteM $ \ dec -> do
r <- m dec
case r of
RewriteReturnM a ds (Count 0) -> return $ RewriteReturnM a ds LoneTransform
RewriteReturnM a ds (Count n) -> return $ RewriteReturnM a ds (Count $ succ n)
RewriteReturnM a ds (LoneTransform) -> return $ RewriteReturnM a ds (Count 2)
RewriteFailureM msg -> return $ RewriteFailureM msg
-- | 'transparently' sets the congruence flag, such that if the
-- monadic action was identity preserving, then a 'markM' does
-- not set the non-indentity flag.
transparentlyM :: (Monad m) => RewriteM m dec a -> RewriteM m dec a
transparentlyM (RewriteM m) = RewriteM $ \ dec -> do
r <- m dec
case r of
RewriteReturnM a ds LoneTransform -> return $ RewriteReturnM a ds (Count 0)
RewriteReturnM a ds other -> return $ RewriteReturnM a ds other
RewriteFailureM msg -> return $ RewriteFailureM msg
-- | 'getDecsM' reads the local environment
readEnvM :: (Monad m, Monoid dec) => RewriteM m dec dec
readEnvM = RewriteM $ \ dec -> return $ RewriteReturnM dec mempty mempty
-- | 'mapDecs' changes the local environment, inside a local monadic invocation.
mapEnvM :: (Monad m, Monoid dec) => (dec -> dec) -> RewriteM m dec a -> RewriteM m dec a
mapEnvM fn (RewriteM m) = RewriteM $ \ dec -> m (fn dec)
-- | 'writeDecM' writes a value to the writer monad inside the 'RewriteM'.
writeEnvM :: (Monad m,Monoid dec) => dec -> RewriteM m dec ()
writeEnvM dec = RewriteM $ \ _dec -> return $ RewriteReturnM () (Just dec) (Count 0)
|
andygill/kure
|
Language/KURE/RewriteMonad.hs
|
bsd-3-clause
| 6,363 | 10 | 21 | 1,991 | 1,695 | 868 | 827 | 105 | 4 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.C.Error
-- Copyright : (c) The FFI task force 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- C-specific Marshalling support: Handling of C \"errno\" error codes.
--
-----------------------------------------------------------------------------
module Foreign.C.Error (
-- * Haskell representations of @errno@ values
Errno(..),
-- ** Common @errno@ symbols
-- | Different operating systems and\/or C libraries often support
-- different values of @errno@. This module defines the common values,
-- but due to the open definition of 'Errno' users may add definitions
-- which are not predefined.
eOK, e2BIG, eACCES, eADDRINUSE, eADDRNOTAVAIL, eADV, eAFNOSUPPORT, eAGAIN,
eALREADY, eBADF, eBADMSG, eBADRPC, eBUSY, eCHILD, eCOMM, eCONNABORTED,
eCONNREFUSED, eCONNRESET, eDEADLK, eDESTADDRREQ, eDIRTY, eDOM, eDQUOT,
eEXIST, eFAULT, eFBIG, eFTYPE, eHOSTDOWN, eHOSTUNREACH, eIDRM, eILSEQ,
eINPROGRESS, eINTR, eINVAL, eIO, eISCONN, eISDIR, eLOOP, eMFILE, eMLINK,
eMSGSIZE, eMULTIHOP, eNAMETOOLONG, eNETDOWN, eNETRESET, eNETUNREACH,
eNFILE, eNOBUFS, eNODATA, eNODEV, eNOENT, eNOEXEC, eNOLCK, eNOLINK,
eNOMEM, eNOMSG, eNONET, eNOPROTOOPT, eNOSPC, eNOSR, eNOSTR, eNOSYS,
eNOTBLK, eNOTCONN, eNOTDIR, eNOTEMPTY, eNOTSOCK, eNOTSUP, eNOTTY, eNXIO,
eOPNOTSUPP, ePERM, ePFNOSUPPORT, ePIPE, ePROCLIM, ePROCUNAVAIL,
ePROGMISMATCH, ePROGUNAVAIL, ePROTO, ePROTONOSUPPORT, ePROTOTYPE,
eRANGE, eREMCHG, eREMOTE, eROFS, eRPCMISMATCH, eRREMOTE, eSHUTDOWN,
eSOCKTNOSUPPORT, eSPIPE, eSRCH, eSRMNT, eSTALE, eTIME, eTIMEDOUT,
eTOOMANYREFS, eTXTBSY, eUSERS, eWOULDBLOCK, eXDEV,
-- ** 'Errno' functions
isValidErrno,
-- access to the current thread's "errno" value
--
getErrno,
resetErrno,
-- conversion of an "errno" value into IO error
--
errnoToIOError,
-- throw current "errno" value
--
throwErrno,
-- ** Guards for IO operations that may fail
throwErrnoIf,
throwErrnoIf_,
throwErrnoIfRetry,
throwErrnoIfRetry_,
throwErrnoIfMinus1,
throwErrnoIfMinus1_,
throwErrnoIfMinus1Retry,
throwErrnoIfMinus1Retry_,
throwErrnoIfNull,
throwErrnoIfNullRetry,
throwErrnoIfRetryMayBlock,
throwErrnoIfRetryMayBlock_,
throwErrnoIfMinus1RetryMayBlock,
throwErrnoIfMinus1RetryMayBlock_,
throwErrnoIfNullRetryMayBlock,
throwErrnoPath,
throwErrnoPathIf,
throwErrnoPathIf_,
throwErrnoPathIfNull,
throwErrnoPathIfMinus1,
throwErrnoPathIfMinus1_,
) where
-- TODO: Replace signals
#include "Signals.h"
-- this is were we get the CONST_XXX definitions from that configure
-- calculated for us
--
import Foreign.Ptr
import Foreign.C.Types
import Foreign.C.String
import Data.Functor ( void )
import Data.Maybe
import GHC.IO
import GHC.IO.Exception
import GHC.IO.Handle.Types
import GHC.Num
import GHC.Base
-- "errno" type
-- ------------
-- | Haskell representation for @errno@ values.
-- The implementation is deliberately exposed, to allow users to add
-- their own definitions of 'Errno' values.
newtype Errno = Errno CInt
instance Eq Errno where
errno1@(Errno no1) == errno2@(Errno no2)
| isValidErrno errno1 && isValidErrno errno2 = no1 == no2
| otherwise = False
-- common "errno" symbols
--
eOK, e2BIG, eACCES, eADDRINUSE, eADDRNOTAVAIL, eADV, eAFNOSUPPORT, eAGAIN,
eALREADY, eBADF, eBADMSG, eBADRPC, eBUSY, eCHILD, eCOMM, eCONNABORTED,
eCONNREFUSED, eCONNRESET, eDEADLK, eDESTADDRREQ, eDIRTY, eDOM, eDQUOT,
eEXIST, eFAULT, eFBIG, eFTYPE, eHOSTDOWN, eHOSTUNREACH, eIDRM, eILSEQ,
eINPROGRESS, eINTR, eINVAL, eIO, eISCONN, eISDIR, eLOOP, eMFILE, eMLINK,
eMSGSIZE, eMULTIHOP, eNAMETOOLONG, eNETDOWN, eNETRESET, eNETUNREACH,
eNFILE, eNOBUFS, eNODATA, eNODEV, eNOENT, eNOEXEC, eNOLCK, eNOLINK,
eNOMEM, eNOMSG, eNONET, eNOPROTOOPT, eNOSPC, eNOSR, eNOSTR, eNOSYS,
eNOTBLK, eNOTCONN, eNOTDIR, eNOTEMPTY, eNOTSOCK, eNOTSUP, eNOTTY, eNXIO,
eOPNOTSUPP, ePERM, ePFNOSUPPORT, ePIPE, ePROCLIM, ePROCUNAVAIL,
ePROGMISMATCH, ePROGUNAVAIL, ePROTO, ePROTONOSUPPORT, ePROTOTYPE,
eRANGE, eREMCHG, eREMOTE, eROFS, eRPCMISMATCH, eRREMOTE, eSHUTDOWN,
eSOCKTNOSUPPORT, eSPIPE, eSRCH, eSRMNT, eSTALE, eTIME, eTIMEDOUT,
eTOOMANYREFS, eTXTBSY, eUSERS, eWOULDBLOCK, eXDEV :: Errno
--
-- the cCONST_XXX identifiers are cpp symbols whose value is computed by
-- configure
--
eOK = Errno 0
e2BIG = Errno (CONST_E2BIG)
eACCES = Errno (CONST_EACCES)
eADDRINUSE = Errno (CONST_EADDRINUSE)
eADDRNOTAVAIL = Errno (CONST_EADDRNOTAVAIL)
eADV = Errno (CONST_EADV)
eAFNOSUPPORT = Errno (CONST_EAFNOSUPPORT)
eAGAIN = Errno (CONST_EAGAIN)
eALREADY = Errno (CONST_EALREADY)
eBADF = Errno (CONST_EBADF)
eBADMSG = Errno (CONST_EBADMSG)
eBADRPC = Errno (CONST_EBADRPC)
eBUSY = Errno (CONST_EBUSY)
eCHILD = Errno (CONST_ECHILD)
eCOMM = Errno (CONST_ECOMM)
eCONNABORTED = Errno (CONST_ECONNABORTED)
eCONNREFUSED = Errno (CONST_ECONNREFUSED)
eCONNRESET = Errno (CONST_ECONNRESET)
eDEADLK = Errno (CONST_EDEADLK)
eDESTADDRREQ = Errno (CONST_EDESTADDRREQ)
eDIRTY = Errno (CONST_EDIRTY)
eDOM = Errno (CONST_EDOM)
eDQUOT = Errno (CONST_EDQUOT)
eEXIST = Errno (CONST_EEXIST)
eFAULT = Errno (CONST_EFAULT)
eFBIG = Errno (CONST_EFBIG)
eFTYPE = Errno (CONST_EFTYPE)
eHOSTDOWN = Errno (CONST_EHOSTDOWN)
eHOSTUNREACH = Errno (CONST_EHOSTUNREACH)
eIDRM = Errno (CONST_EIDRM)
eILSEQ = Errno (CONST_EILSEQ)
eINPROGRESS = Errno (CONST_EINPROGRESS)
eINTR = Errno (CONST_EINTR)
eINVAL = Errno (CONST_EINVAL)
eIO = Errno (CONST_EIO)
eISCONN = Errno (CONST_EISCONN)
eISDIR = Errno (CONST_EISDIR)
eLOOP = Errno (CONST_ELOOP)
eMFILE = Errno (CONST_EMFILE)
eMLINK = Errno (CONST_EMLINK)
eMSGSIZE = Errno (CONST_EMSGSIZE)
eMULTIHOP = Errno (CONST_EMULTIHOP)
eNAMETOOLONG = Errno (CONST_ENAMETOOLONG)
eNETDOWN = Errno (CONST_ENETDOWN)
eNETRESET = Errno (CONST_ENETRESET)
eNETUNREACH = Errno (CONST_ENETUNREACH)
eNFILE = Errno (CONST_ENFILE)
eNOBUFS = Errno (CONST_ENOBUFS)
eNODATA = Errno (CONST_ENODATA)
eNODEV = Errno (CONST_ENODEV)
eNOENT = Errno (CONST_ENOENT)
eNOEXEC = Errno (CONST_ENOEXEC)
eNOLCK = Errno (CONST_ENOLCK)
eNOLINK = Errno (CONST_ENOLINK)
eNOMEM = Errno (CONST_ENOMEM)
eNOMSG = Errno (CONST_ENOMSG)
eNONET = Errno (CONST_ENONET)
eNOPROTOOPT = Errno (CONST_ENOPROTOOPT)
eNOSPC = Errno (CONST_ENOSPC)
eNOSR = Errno (CONST_ENOSR)
eNOSTR = Errno (CONST_ENOSTR)
eNOSYS = Errno (CONST_ENOSYS)
eNOTBLK = Errno (CONST_ENOTBLK)
eNOTCONN = Errno (CONST_ENOTCONN)
eNOTDIR = Errno (CONST_ENOTDIR)
eNOTEMPTY = Errno (CONST_ENOTEMPTY)
eNOTSOCK = Errno (CONST_ENOTSOCK)
eNOTSUP = Errno (CONST_ENOTSUP)
-- ^ @since 4.7.0.0
eNOTTY = Errno (CONST_ENOTTY)
eNXIO = Errno (CONST_ENXIO)
eOPNOTSUPP = Errno (CONST_EOPNOTSUPP)
ePERM = Errno (CONST_EPERM)
ePFNOSUPPORT = Errno (CONST_EPFNOSUPPORT)
ePIPE = Errno (CONST_EPIPE)
ePROCLIM = Errno (CONST_EPROCLIM)
ePROCUNAVAIL = Errno (CONST_EPROCUNAVAIL)
ePROGMISMATCH = Errno (CONST_EPROGMISMATCH)
ePROGUNAVAIL = Errno (CONST_EPROGUNAVAIL)
ePROTO = Errno (CONST_EPROTO)
ePROTONOSUPPORT = Errno (CONST_EPROTONOSUPPORT)
ePROTOTYPE = Errno (CONST_EPROTOTYPE)
eRANGE = Errno (CONST_ERANGE)
eREMCHG = Errno (CONST_EREMCHG)
eREMOTE = Errno (CONST_EREMOTE)
eROFS = Errno (CONST_EROFS)
eRPCMISMATCH = Errno (CONST_ERPCMISMATCH)
eRREMOTE = Errno (CONST_ERREMOTE)
eSHUTDOWN = Errno (CONST_ESHUTDOWN)
eSOCKTNOSUPPORT = Errno (CONST_ESOCKTNOSUPPORT)
eSPIPE = Errno (CONST_ESPIPE)
eSRCH = Errno (CONST_ESRCH)
eSRMNT = Errno (CONST_ESRMNT)
eSTALE = Errno (CONST_ESTALE)
eTIME = Errno (CONST_ETIME)
eTIMEDOUT = Errno (CONST_ETIMEDOUT)
eTOOMANYREFS = Errno (CONST_ETOOMANYREFS)
eTXTBSY = Errno (CONST_ETXTBSY)
eUSERS = Errno (CONST_EUSERS)
eWOULDBLOCK = Errno (CONST_EWOULDBLOCK)
eXDEV = Errno (CONST_EXDEV)
-- | Yield 'True' if the given 'Errno' value is valid on the system.
-- This implies that the 'Eq' instance of 'Errno' is also system dependent
-- as it is only defined for valid values of 'Errno'.
--
isValidErrno :: Errno -> Bool
--
-- the configure script sets all invalid "errno"s to -1
--
isValidErrno (Errno errno) = errno /= -1
-- access to the current thread's "errno" value
-- --------------------------------------------
-- | Get the current value of @errno@ in the current thread.
--
getErrno :: IO Errno
-- We must call a C function to get the value of errno in general. On
-- threaded systems, errno is hidden behind a C macro so that each OS
-- thread gets its own copy.
getErrno = do e <- get_errno; return (Errno e)
-- TODO: Implement!
-- foreign import ccall unsafe "HsBase.h __hscore_get_errno"
get_errno :: IO CInt
get_errno = undefined
-- | Reset the current thread\'s @errno@ value to 'eOK'.
--
resetErrno :: IO ()
-- Again, setting errno has to be done via a C function.
resetErrno = set_errno 0
-- TODO: Implement!
-- foreign import ccall unsafe "HsBase.h __hscore_set_errno"
set_errno :: CInt -> IO ()
set_errno = undefined
-- throw current "errno" value
-- ---------------------------
-- | Throw an 'IOError' corresponding to the current value of 'getErrno'.
--
throwErrno :: String -- ^ textual description of the error location
-> IO a
throwErrno loc =
do
errno <- getErrno
ioError (errnoToIOError loc errno Nothing Nothing)
-- guards for IO operations that may fail
-- --------------------------------------
-- | Throw an 'IOError' corresponding to the current value of 'getErrno'
-- if the result value of the 'IO' action meets the given predicate.
--
throwErrnoIf :: (a -> Bool) -- ^ predicate to apply to the result value
-- of the 'IO' operation
-> String -- ^ textual description of the location
-> IO a -- ^ the 'IO' operation to be executed
-> IO a
throwErrnoIf pred loc f =
do
res <- f
if pred res then throwErrno loc else return res
-- | as 'throwErrnoIf', but discards the result of the 'IO' action after
-- error handling.
--
throwErrnoIf_ :: (a -> Bool) -> String -> IO a -> IO ()
throwErrnoIf_ pred loc f = void $ throwErrnoIf pred loc f
-- | as 'throwErrnoIf', but retry the 'IO' action when it yields the
-- error code 'eINTR' - this amounts to the standard retry loop for
-- interrupted POSIX system calls.
--
throwErrnoIfRetry :: (a -> Bool) -> String -> IO a -> IO a
throwErrnoIfRetry pred loc f =
do
res <- f
if pred res
then do
err <- getErrno
if err == eINTR
then throwErrnoIfRetry pred loc f
else throwErrno loc
else return res
-- | as 'throwErrnoIfRetry', but additionally if the operation
-- yields the error code 'eAGAIN' or 'eWOULDBLOCK', an alternative
-- action is executed before retrying.
--
throwErrnoIfRetryMayBlock
:: (a -> Bool) -- ^ predicate to apply to the result value
-- of the 'IO' operation
-> String -- ^ textual description of the location
-> IO a -- ^ the 'IO' operation to be executed
-> IO b -- ^ action to execute before retrying if
-- an immediate retry would block
-> IO a
throwErrnoIfRetryMayBlock pred loc f on_block =
do
res <- f
if pred res
then do
err <- getErrno
if err == eINTR
then throwErrnoIfRetryMayBlock pred loc f on_block
else if err == eWOULDBLOCK || err == eAGAIN
then do _ <- on_block
throwErrnoIfRetryMayBlock pred loc f on_block
else throwErrno loc
else return res
-- | as 'throwErrnoIfRetry', but discards the result.
--
throwErrnoIfRetry_ :: (a -> Bool) -> String -> IO a -> IO ()
throwErrnoIfRetry_ pred loc f = void $ throwErrnoIfRetry pred loc f
-- | as 'throwErrnoIfRetryMayBlock', but discards the result.
--
throwErrnoIfRetryMayBlock_ :: (a -> Bool) -> String -> IO a -> IO b -> IO ()
throwErrnoIfRetryMayBlock_ pred loc f on_block
= void $ throwErrnoIfRetryMayBlock pred loc f on_block
-- | Throw an 'IOError' corresponding to the current value of 'getErrno'
-- if the 'IO' action returns a result of @-1@.
--
throwErrnoIfMinus1 :: (Eq a, Num a) => String -> IO a -> IO a
throwErrnoIfMinus1 = throwErrnoIf (== -1)
-- | as 'throwErrnoIfMinus1', but discards the result.
--
throwErrnoIfMinus1_ :: (Eq a, Num a) => String -> IO a -> IO ()
throwErrnoIfMinus1_ = throwErrnoIf_ (== -1)
-- | Throw an 'IOError' corresponding to the current value of 'getErrno'
-- if the 'IO' action returns a result of @-1@, but retries in case of
-- an interrupted operation.
--
throwErrnoIfMinus1Retry :: (Eq a, Num a) => String -> IO a -> IO a
throwErrnoIfMinus1Retry = throwErrnoIfRetry (== -1)
-- | as 'throwErrnoIfMinus1', but discards the result.
--
throwErrnoIfMinus1Retry_ :: (Eq a, Num a) => String -> IO a -> IO ()
throwErrnoIfMinus1Retry_ = throwErrnoIfRetry_ (== -1)
-- | as 'throwErrnoIfMinus1Retry', but checks for operations that would block.
--
throwErrnoIfMinus1RetryMayBlock :: (Eq a, Num a)
=> String -> IO a -> IO b -> IO a
throwErrnoIfMinus1RetryMayBlock = throwErrnoIfRetryMayBlock (== -1)
-- | as 'throwErrnoIfMinus1RetryMayBlock', but discards the result.
--
throwErrnoIfMinus1RetryMayBlock_ :: (Eq a, Num a)
=> String -> IO a -> IO b -> IO ()
throwErrnoIfMinus1RetryMayBlock_ = throwErrnoIfRetryMayBlock_ (== -1)
-- | Throw an 'IOError' corresponding to the current value of 'getErrno'
-- if the 'IO' action returns 'nullPtr'.
--
throwErrnoIfNull :: String -> IO (Ptr a) -> IO (Ptr a)
throwErrnoIfNull = throwErrnoIf (== nullPtr)
-- | Throw an 'IOError' corresponding to the current value of 'getErrno'
-- if the 'IO' action returns 'nullPtr',
-- but retry in case of an interrupted operation.
--
throwErrnoIfNullRetry :: String -> IO (Ptr a) -> IO (Ptr a)
throwErrnoIfNullRetry = throwErrnoIfRetry (== nullPtr)
-- | as 'throwErrnoIfNullRetry', but checks for operations that would block.
--
throwErrnoIfNullRetryMayBlock :: String -> IO (Ptr a) -> IO b -> IO (Ptr a)
throwErrnoIfNullRetryMayBlock = throwErrnoIfRetryMayBlock (== nullPtr)
-- | as 'throwErrno', but exceptions include the given path when appropriate.
--
throwErrnoPath :: String -> FilePath -> IO a
throwErrnoPath loc path =
do
errno <- getErrno
ioError (errnoToIOError loc errno Nothing (Just path))
-- | as 'throwErrnoIf', but exceptions include the given path when
-- appropriate.
--
throwErrnoPathIf :: (a -> Bool) -> String -> FilePath -> IO a -> IO a
throwErrnoPathIf pred loc path f =
do
res <- f
if pred res then throwErrnoPath loc path else return res
-- | as 'throwErrnoIf_', but exceptions include the given path when
-- appropriate.
--
throwErrnoPathIf_ :: (a -> Bool) -> String -> FilePath -> IO a -> IO ()
throwErrnoPathIf_ pred loc path f = void $ throwErrnoPathIf pred loc path f
-- | as 'throwErrnoIfNull', but exceptions include the given path when
-- appropriate.
--
throwErrnoPathIfNull :: String -> FilePath -> IO (Ptr a) -> IO (Ptr a)
throwErrnoPathIfNull = throwErrnoPathIf (== nullPtr)
-- | as 'throwErrnoIfMinus1', but exceptions include the given path when
-- appropriate.
--
throwErrnoPathIfMinus1 :: (Eq a, Num a) => String -> FilePath -> IO a -> IO a
throwErrnoPathIfMinus1 = throwErrnoPathIf (== -1)
-- | as 'throwErrnoIfMinus1_', but exceptions include the given path when
-- appropriate.
--
throwErrnoPathIfMinus1_ :: (Eq a, Num a) => String -> FilePath -> IO a -> IO ()
throwErrnoPathIfMinus1_ = throwErrnoPathIf_ (== -1)
-- conversion of an "errno" value into IO error
-- --------------------------------------------
-- | Construct an 'IOError' based on the given 'Errno' value.
-- The optional information can be used to improve the accuracy of
-- error messages.
--
errnoToIOError :: String -- ^ the location where the error occurred
-> Errno -- ^ the error number
-> Maybe Handle -- ^ optional handle associated with the error
-> Maybe String -- ^ optional filename associated with the error
-> IOError
errnoToIOError loc errno maybeHdl maybeName = unsafePerformIO $ do
str <- strerror errno >>= peekCString
return (IOError maybeHdl errType loc str (Just errno') maybeName)
where
Errno errno' = errno
errType
| errno == eOK = OtherError
| errno == e2BIG = ResourceExhausted
| errno == eACCES = PermissionDenied
| errno == eADDRINUSE = ResourceBusy
| errno == eADDRNOTAVAIL = UnsupportedOperation
| errno == eADV = OtherError
| errno == eAFNOSUPPORT = UnsupportedOperation
| errno == eAGAIN = ResourceExhausted
| errno == eALREADY = AlreadyExists
| errno == eBADF = InvalidArgument
| errno == eBADMSG = InappropriateType
| errno == eBADRPC = OtherError
| errno == eBUSY = ResourceBusy
| errno == eCHILD = NoSuchThing
| errno == eCOMM = ResourceVanished
| errno == eCONNABORTED = OtherError
| errno == eCONNREFUSED = NoSuchThing
| errno == eCONNRESET = ResourceVanished
| errno == eDEADLK = ResourceBusy
| errno == eDESTADDRREQ = InvalidArgument
| errno == eDIRTY = UnsatisfiedConstraints
| errno == eDOM = InvalidArgument
| errno == eDQUOT = PermissionDenied
| errno == eEXIST = AlreadyExists
| errno == eFAULT = OtherError
| errno == eFBIG = PermissionDenied
| errno == eFTYPE = InappropriateType
| errno == eHOSTDOWN = NoSuchThing
| errno == eHOSTUNREACH = NoSuchThing
| errno == eIDRM = ResourceVanished
| errno == eILSEQ = InvalidArgument
| errno == eINPROGRESS = AlreadyExists
| errno == eINTR = Interrupted
| errno == eINVAL = InvalidArgument
| errno == eIO = HardwareFault
| errno == eISCONN = AlreadyExists
| errno == eISDIR = InappropriateType
| errno == eLOOP = InvalidArgument
| errno == eMFILE = ResourceExhausted
| errno == eMLINK = ResourceExhausted
| errno == eMSGSIZE = ResourceExhausted
| errno == eMULTIHOP = UnsupportedOperation
| errno == eNAMETOOLONG = InvalidArgument
| errno == eNETDOWN = ResourceVanished
| errno == eNETRESET = ResourceVanished
| errno == eNETUNREACH = NoSuchThing
| errno == eNFILE = ResourceExhausted
| errno == eNOBUFS = ResourceExhausted
| errno == eNODATA = NoSuchThing
| errno == eNODEV = UnsupportedOperation
| errno == eNOENT = NoSuchThing
| errno == eNOEXEC = InvalidArgument
| errno == eNOLCK = ResourceExhausted
| errno == eNOLINK = ResourceVanished
| errno == eNOMEM = ResourceExhausted
| errno == eNOMSG = NoSuchThing
| errno == eNONET = NoSuchThing
| errno == eNOPROTOOPT = UnsupportedOperation
| errno == eNOSPC = ResourceExhausted
| errno == eNOSR = ResourceExhausted
| errno == eNOSTR = InvalidArgument
| errno == eNOSYS = UnsupportedOperation
| errno == eNOTBLK = InvalidArgument
| errno == eNOTCONN = InvalidArgument
| errno == eNOTDIR = InappropriateType
| errno == eNOTEMPTY = UnsatisfiedConstraints
| errno == eNOTSOCK = InvalidArgument
| errno == eNOTTY = IllegalOperation
| errno == eNXIO = NoSuchThing
| errno == eOPNOTSUPP = UnsupportedOperation
| errno == ePERM = PermissionDenied
| errno == ePFNOSUPPORT = UnsupportedOperation
| errno == ePIPE = ResourceVanished
| errno == ePROCLIM = PermissionDenied
| errno == ePROCUNAVAIL = UnsupportedOperation
| errno == ePROGMISMATCH = ProtocolError
| errno == ePROGUNAVAIL = UnsupportedOperation
| errno == ePROTO = ProtocolError
| errno == ePROTONOSUPPORT = ProtocolError
| errno == ePROTOTYPE = ProtocolError
| errno == eRANGE = UnsupportedOperation
| errno == eREMCHG = ResourceVanished
| errno == eREMOTE = IllegalOperation
| errno == eROFS = PermissionDenied
| errno == eRPCMISMATCH = ProtocolError
| errno == eRREMOTE = IllegalOperation
| errno == eSHUTDOWN = IllegalOperation
| errno == eSOCKTNOSUPPORT = UnsupportedOperation
| errno == eSPIPE = UnsupportedOperation
| errno == eSRCH = NoSuchThing
| errno == eSRMNT = UnsatisfiedConstraints
| errno == eSTALE = ResourceVanished
| errno == eTIME = TimeExpired
| errno == eTIMEDOUT = TimeExpired
| errno == eTOOMANYREFS = ResourceExhausted
| errno == eTXTBSY = ResourceBusy
| errno == eUSERS = ResourceExhausted
| errno == eWOULDBLOCK = OtherError
| errno == eXDEV = UnsupportedOperation
| otherwise = OtherError
-- TODO: Implement
-- foreign import ccall unsafe "string.h"
strerror :: Errno -> IO (Ptr CChar)
strerror = undefined
|
alexander-at-github/eta
|
libraries/base/Foreign/C/Error.hs
|
bsd-3-clause
| 22,828 | 0 | 14 | 6,190 | 4,845 | 2,666 | 2,179 | 384 | 4 |
module P10 where
import P9 (pack)
encode :: (Eq a) => [a] -> [(Int, a)]
encode xs =
let grouped = pack xs
in map (\x -> ((length x), head x)) grouped
|
bradb/99problems
|
src/p10.hs
|
bsd-3-clause
| 156 | 0 | 12 | 38 | 92 | 51 | 41 | 6 | 1 |
module Stack.Types.CompilerBuild
(CompilerBuild(..)
,compilerBuildName
,compilerBuildSuffix
,parseCompilerBuild
) where
import Control.Monad.IO.Unlift
import Data.Aeson.Extended (FromJSON, parseJSON, withText)
import Data.Text as T
data CompilerBuild
= CompilerBuildStandard
| CompilerBuildSpecialized String
deriving (Show)
instance FromJSON CompilerBuild where
-- Strange structuring is to give consistent error messages
parseJSON =
withText
"CompilerBuild"
(either (fail . show) return . parseCompilerBuild . T.unpack)
-- | Descriptive name for compiler build
compilerBuildName :: CompilerBuild -> String
compilerBuildName CompilerBuildStandard = "standard"
compilerBuildName (CompilerBuildSpecialized s) = s
-- | Suffix to use for filenames/directories constructed with compiler build
compilerBuildSuffix :: CompilerBuild -> String
compilerBuildSuffix CompilerBuildStandard = ""
compilerBuildSuffix (CompilerBuildSpecialized s) = '-' : s
-- | Parse compiler build from a String.
parseCompilerBuild :: (MonadThrow m) => String -> m CompilerBuild
parseCompilerBuild "" = return CompilerBuildStandard
parseCompilerBuild "standard" = return CompilerBuildStandard
parseCompilerBuild name = return (CompilerBuildSpecialized name)
|
martin-kolinek/stack
|
src/Stack/Types/CompilerBuild.hs
|
bsd-3-clause
| 1,326 | 0 | 12 | 233 | 248 | 138 | 110 | 27 | 1 |
{-# OPTIONS_GHC -F -pgmF htfpp #-}
{-# LANGUAGE OverloadedStrings, RankNTypes, TemplateHaskell #-}
import Control.Exception (throw)
import System.Environment (getArgs)
import System.FilePath
import Control.Monad (liftM2)
import Data.Maybe (isJust, fromJust)
import qualified Data.ByteString as BS
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.List as List
import Data.Char (isSpace)
import Text.Roundtrip
import Text.Roundtrip.Xml
import Test.Framework
import Test.Framework.TestManager
--
-- Specification for expressions
--
data Expr = Var String
| Lit Int
| Plus Expr Expr
deriving (Show, Eq)
$(defineIsomorphisms ''Expr)
xmlVariable :: XmlSyntax d => d String
xmlVariable = xmlElem "var" (xmlAttr "name" textStringIso)
xmlInteger :: XmlSyntax d => d Int
xmlInteger = xmlElem "lit" (xmlAttr "value" readShowTextIso)
pXmlExpr :: XmlSyntax d => d Expr
pXmlExpr = var <$> xmlVariable
<|> lit <$> xmlInteger
<|> plus <$> xmlElem "plus" (pXmlExpr <*> pXmlExpr)
instance Arbitrary Expr where
arbitrary = sized arbExpr
where arbExpr 0 = frequency simpleExprs
arbExpr n = frequency (simpleExprs ++
[(5, liftM2 Plus (arbExpr (n `div` 2))
(arbExpr (n `div` 2)))])
simpleExprs = [(1, do n <- arbitrary
return (Lit n)),
(1, do v <- elements letters
vs <- listOf (elements lettersOrDigits)
return (Var (v:vs)))]
letters = ['a'..'z'] ++ ['A'..'Z']
lettersOrDigits = letters ++ ['0'..'9']
shrink (Var _) = []
shrink (Lit _) = []
shrink (Plus e1 e2) = [e1, e2]
test_exprParser :: IO ()
test_exprParser =
do let epe = runXmlParserString pXmlExpr "<string>" defaultEntityRenderer
"<plus><lit value=\"1\"/><plus><var name=\"foo\"/><lit value=\"2\"/></plus></plus>"
pe <- assertRight epe
assertEqual (Plus (Lit 1) (Plus (Var "foo") (Lit 2))) pe
test_exprPrinter :: IO ()
test_exprPrinter =
do let ms = runXmlPrinterString pXmlExpr (Plus (Lit 1) (Plus (Var "foo") (Lit 2)))
s <- assertJust ms
assertEqual "<plus><lit value=\"1\"/><plus><var name=\"foo\"/><lit value=\"2\"/></plus></plus>" s
prop_exprPrinterDoesNotFail :: Expr -> Bool
prop_exprPrinterDoesNotFail expr = isJust (runXmlPrinterString pXmlExpr expr)
prop_exprPrinterParserInverse :: Expr -> Bool
prop_exprPrinterParserInverse expr =
let code = fromJust (runXmlPrinterString pXmlExpr expr)
in case runXmlParserString pXmlExpr "<string>" defaultEntityRenderer code of
Left err -> error (show err)
Right expr' -> expr == expr'
--
-- Parsing, invalid lookahead, David, 2011-07-23
--
pilSpec1 :: XmlSyntax d => d (Either [Text] [Text])
pilSpec1 =
xmlElem "root"
(xmlElem "list" (left <$> many1 (xmlElem "foo" xmlText)) <||>
xmlElem "list" (right <$> many (xmlElem "bar" xmlText)))
pilSpec2 :: XmlSyntax d => d (Either [Text] [Text])
pilSpec2 =
xmlElem "root"
(xmlElem "list" ((left <$> many1 (xmlElem "foo" xmlText)) <|>
(right <$> many (xmlElem "bar" xmlText))))
prop_pilSpec1Roundtrip :: Either [Text] [Text] -> Property
prop_pilSpec1Roundtrip arg =
(case arg of
Left [] -> False
_ -> True)
==>
checkRoundtrip pilSpec1 arg
prop_pilSpec2Roundtrip :: Either [Text] [Text] -> Property
prop_pilSpec2Roundtrip arg =
(case arg of
Left [] -> False
_ -> True)
==>
checkRoundtrip pilSpec2 arg
test_pil11 =
do x <- parseFromFile (testFile "001.xml") pilSpec1
assertEqual (Right []) x
test_pil12 =
do x <- parseFromFile (testFile "001.xml") pilSpec2
assertEqual (Right []) x
test_pil21 =
do x <- parseFromFile (testFile "002.xml") pilSpec1
assertEqual (Left [""]) x
test_pil22 =
do x <- parseFromFile (testFile "002.xml") pilSpec2
assertEqual (Left [""]) x
test_pil31 =
do x <- parseFromFile (testFile "003.xml") pilSpec1
assertEqual (Right [""]) x
test_pil32 =
do x <- parseFromFile (testFile "003.xml") pilSpec2
assertEqual (Right [""]) x
test_deepLookAhead =
do x <- parseFromFile (testFile "004.xml") spec
assertEqual (Right "you got it!") x
where
spec :: XmlSyntax d => d (Either Text Text)
spec =
left <$> xmlElem "a" (xmlElem "b" (xmlElem "c" (xmlElem "d"
(xmlElem "e" (xmlElem "f" (xmlElem "h" xmlText))))))
<||> right <$> xmlElem "a" (xmlElem "b" (xmlElem "c" (xmlElem "d"
(xmlElem "e" (xmlElem "f" (xmlElem "g" xmlText))))))
--
-- Backtracking inside attributes
--
backtrackingAttrSpec :: XmlSyntax d => d (T.Text, T.Text)
backtrackingAttrSpec =
xmlElem "root"
(xmlElem "x" (xmlAttrValue "foo" <*> xmlAttrValue "bar")) <||>
xmlElem "root"
(xmlElem "x" (xmlAttrValue "foo" <* xmlFixedAttr "baz" "2") <*> xmlElem "bar" xmlText)
test_back1 =
do x <- parseFromFile (testFile "005.xml") backtrackingAttrSpec
assertEqual ("1", "2") x
test_back2 =
do x <- parseFromFile (testFile "006.xml") backtrackingAttrSpec
assertEqual ("1", "2") x
backtrackingAttrSpec2 :: XmlSyntax d => d T.Text
backtrackingAttrSpec2 =
xmlElem "root" (xmlAttrValue "foo" <|> xmlAttrValue "bar")
test_back3 =
do x <- parseFromFile (testFile "007.xml") backtrackingAttrSpec2
assertEqual "1" x
test_back4 =
do x <- parseFromFile (testFile "008.xml") backtrackingAttrSpec2
assertEqual "1" x
--
-- Utils & main
--
instance Arbitrary Text where
arbitrary =
do s <- arbitrary
return $ T.pack $ trim s
where
trim = List.dropWhile isSpace . reverse . List.dropWhile isSpace . reverse
testFile f = "tests" </> f
checkRoundtrip :: (Eq a, Show a) => (forall d . XmlSyntax d => d a) -> a -> Bool
checkRoundtrip spec val =
case runXmlPrinterString spec val of
Nothing -> error ("could not print " ++ show val)
Just t ->
case runXmlParserString spec "<text>" defaultEntityRenderer t of
Right val' ->
if val == val'
then True
else error (show val ++ " /= " ++ show val')
Left err -> error ("Parsing of " ++ show t ++ " failed: " ++ show err)
parseFromFile :: (Eq a, Show a)
=> FilePath -> (forall d . XmlSyntax d => d a) -> IO a
parseFromFile fname p =
do bs <- BS.readFile fname
case runXmlParserByteString p fname defaultEntityRenderer bs of
Right x -> return x
Left err -> fail (show err)
main =
do args <- getArgs
runTestWithArgs args htf_thisModulesTests
|
skogsbaer/roundtrip-xml
|
tests/Tests.hs
|
bsd-3-clause
| 6,882 | 4 | 20 | 1,861 | 2,215 | 1,108 | 1,107 | 163 | 4 |
import Data.List
divisors' n = (1:) $ nub $ concat [ [x, div n x] | x <- [2..limit], rem n x == 0 ]
where limit = (floor.sqrt.fromIntegral) n
|
trxeste/wrk
|
haskell/divisors.hs
|
bsd-3-clause
| 146 | 0 | 10 | 34 | 87 | 45 | 42 | 3 | 1 |
module MOO.Network.TCP (
HostName
, PortNumber
, createTCPListener
) where
import Control.Applicative ((<$>))
import Control.Concurrent (forkIO, killThread)
import Control.Concurrent.STM (STM, TMVar, newEmptyTMVarIO, atomically,
putTMVar, readTMVar)
import Control.Exception (SomeException, IOException, mask, try, finally,
bracketOnError)
import Control.Monad (forever)
import Data.Maybe (fromMaybe)
import Network.Socket (PortNumber, Socket, SockAddr,
SocketOption(ReuseAddr, KeepAlive),
Family(AF_INET, AF_INET6), SocketType(Stream),
AddrInfo(addrFlags, addrFamily, addrSocketType,
addrProtocol, addrAddress),
AddrInfoFlag(AI_PASSIVE, AI_NUMERICSERV,
AI_ADDRCONFIG, AI_V4MAPPED),
NameInfoFlag(NI_NAMEREQD,
NI_NUMERICHOST, NI_NUMERICSERV),
HostName, ServiceName, maxListenQueue,
defaultHints, getAddrInfo, setSocketOption,
socket, bind, listen, accept, close,
getNameInfo, socketPort)
import Pipes.Network.TCP (fromSocket, toSocket)
import MOO.Connection (ConnectionHandler)
import {-# SOURCE #-} MOO.Network (Point(TCP),
Listener(listenerPoint, listenerCancel))
maxBufferSize :: Int
maxBufferSize = 1024
serverAddrInfo :: Maybe HostName -> PortNumber -> IO [AddrInfo]
serverAddrInfo host port =
let hints6, hints4 :: AddrInfo
hints6 = defaultHints {
addrFlags = [AI_PASSIVE, AI_NUMERICSERV,
AI_ADDRCONFIG, AI_V4MAPPED]
, addrFamily = AF_INET6
, addrSocketType = Stream
}
hints4 = hints6 { addrFamily = AF_INET }
gai :: AddrInfo -> IO [AddrInfo]
gai hints = getAddrInfo (Just hints) host (Just $ show port)
in try (gai hints6) >>=
either (\e -> let _ = e :: IOException in gai hints4) return
createTCPListener :: Listener -> ConnectionHandler -> IO Listener
createTCPListener listener handler = do
let TCP host port = listenerPoint listener
(ai:_) <- serverAddrInfo host port
let mkSocket = socket (addrFamily ai) (addrSocketType ai) (addrProtocol ai)
bracketOnError mkSocket close $ \sock -> do
setSocketOption sock ReuseAddr 1
sock `bind` addrAddress ai
sock `listen` maxListenQueue
boundPort <- socketPort sock
acceptThread <- forkIO $ acceptConnections sock handler
return listener {
listenerPoint = TCP host boundPort
, listenerCancel = killThread acceptThread >> close sock
}
acceptConnections :: Socket -> ConnectionHandler -> IO ()
acceptConnections sock handler =
forever $ mask $ \restore -> do
(conn, addr) <- accept sock
forkIO $ restore (serveConnection conn addr handler) `finally`
(try $ close conn :: IO (Either SomeException ()))
serveConnection :: Socket -> SockAddr -> ConnectionHandler -> IO ()
serveConnection sock peerAddr connectionHandler = do
setSocketOption sock KeepAlive 1
peerName <- addrName peerAddr
localPort <- socketPort sock
let connectionName :: STM String
connectionName = do
peerHost <- hostName peerName
return $ "port " ++ show localPort ++ " from " ++
peerHost ++ ", port " ++ addrPort peerName
input = fromSocket sock maxBufferSize
output = toSocket sock
connectionHandler connectionName (input, output)
data AddrName = AddrName {
addrHostName :: TMVar (Maybe HostName)
, addrNumeric :: HostName
, addrPort :: ServiceName
}
addrName :: SockAddr -> IO AddrName
addrName addr = do
nameVar <- newEmptyTMVarIO
forkIO $ do
maybeHost <- try (fst <$> getNameInfo [NI_NAMEREQD] True False addr) >>=
either (\except -> let _ = except :: SomeException
in return Nothing) return
atomically $ putTMVar nameVar maybeHost
(Just numericHost, Just port) <-
getNameInfo [NI_NUMERICHOST, NI_NUMERICSERV] True True addr
return $ AddrName nameVar numericHost port
hostName :: AddrName -> STM HostName
hostName addr = fromMaybe (addrNumeric addr) <$> readTMVar (addrHostName addr)
|
verement/etamoo
|
src/MOO/Network/TCP.hs
|
bsd-3-clause
| 4,361 | 0 | 18 | 1,224 | 1,183 | 637 | 546 | 94 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Protocol.ROC.PointTypes.PointType56 where
import Data.Binary.Get (getByteString,
getWord8,
getWord16le,
Get)
import Data.ByteString (ByteString)
import Data.Int (Int16)
import Data.Word (Word8,Word16)
import Prelude (($),
return,
Eq,
Float,
Read,
Show)
import Protocol.ROC.Float (getIeeeFloat32)
import Protocol.ROC.Utils (getInt16)
data PointType56 = PointType56 {
pointType56PointTag :: !PointType56PointTag
,pointType56CalibZeroADValue :: !PointType56CalibZeroADValue
,pointType56CalibMdpnt1ADValue :: !PointType56CalibMdpnt1ADValue
,pointType56CalibMdpnt2ADValue :: !PointType56CalibMdpnt2ADValue
,pointType56CalibMdpnt3ADValue :: !PointType56CalibMdpnt3ADValue
,pointType56CalibSpanADValue :: !PointType56CalibSpanADValue
,pointType56CalibZeroEUValue :: !PointType56CalibZeroEUValue
,pointType56CalibMdpnt1EUValue :: !PointType56CalibMdpnt1EUValue
,pointType56CalibMdpnt2EUValue :: !PointType56CalibMdpnt2EUValue
,pointType56CalibMdpnt3EUValue :: !PointType56CalibMdpnt3EUValue
,pointType56CalibSpanEUValue :: !PointType56CalibSpanEUValue
,pointType56Offset :: !PointType56Offset
,pointType56CalibSetEUValue :: !PointType56CalibSetEUValue
,pointType56ManualEU :: !PointType56ManualEU
,pointType56CalibTime :: !PointType56CalibTime
,pointType56CalibMode :: !PointType56CalibMode
,pointType56CalibType :: !PointType56CalibType
} deriving (Read,Eq, Show)
type PointType56PointTag = ByteString
type PointType56CalibZeroADValue = Int16
type PointType56CalibMdpnt1ADValue = Int16
type PointType56CalibMdpnt2ADValue = Int16
type PointType56CalibMdpnt3ADValue = Int16
type PointType56CalibSpanADValue = Int16
type PointType56CalibZeroEUValue = Int16
type PointType56CalibMdpnt1EUValue = Float
type PointType56CalibMdpnt2EUValue = Float
type PointType56CalibMdpnt3EUValue = Float
type PointType56CalibSpanEUValue = Float
type PointType56Offset = Float
type PointType56CalibSetEUValue = Float
type PointType56ManualEU = Float
type PointType56CalibTime = Word16
type PointType56CalibMode = Word8
type PointType56CalibType = Word8
pointType56Parser :: Get PointType56
pointType56Parser = do
pointTag <- getByteString 10
calibZeroADValue <- getInt16
calibMdpnt1ADValue <- getInt16
calibMdpnt2ADValue <- getInt16
calibMdpnt3ADValue <- getInt16
calibSpanADValue <- getInt16
calibZeroEUValue <- getInt16
calibMdpnt1EUValue <- getIeeeFloat32
calibMdpnt2EUValue <- getIeeeFloat32
calibMdpnt3EUValue <- getIeeeFloat32
calibSpanEUValue <- getIeeeFloat32
offset <- getIeeeFloat32
calibSetEUValue <- getIeeeFloat32
manualEU <- getIeeeFloat32
calibTime <- getWord16le
calibMode <- getWord8
calibType <- getWord8
return $ PointType56 pointTag calibZeroADValue calibMdpnt1ADValue calibMdpnt2ADValue calibMdpnt3ADValue calibSpanADValue calibZeroEUValue calibMdpnt1EUValue calibMdpnt2EUValue
calibMdpnt3EUValue calibSpanEUValue offset calibSetEUValue manualEU calibTime calibMode calibType
|
plow-technologies/roc-translator
|
src/Protocol/ROC/PointTypes/PointType56.hs
|
bsd-3-clause
| 5,018 | 0 | 9 | 2,351 | 531 | 299 | 232 | 108 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Data.BitVector.UtilLE
-- Copyright : (c) 2010 Philip Weaver
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability :
-- Portability :
--
-- (Description)
--------------------------------------------------------------------------------
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
module Data.BitVector.UtilLE
( module Data.BitVector.Util,
bv_endianness,
bitsToNum, numToBits
) where
import Data.Bit
import Data.Bits
import Data.BitVector.Util
import Data.Endianness
----------------------------------------
bv_endianness :: Endianness
bv_endianness = LittleEndian
----------------------------------------
-- TODO make these work for any Boolean type (Bool, Bit, etc.)
bitsToNum :: forall a . (Bits a, Num a) => [Bit] -> Maybe a
bitsToNum vs = f 0 0 vs
where
f :: Int -> a -> [Bit] -> Maybe a
f _ acc [] = Just acc
f ix acc (x:xs) = case toBool x of
Just b -> let acc' = if b then setBit acc ix else acc
in acc' `seq` f (ix+1) acc' xs
_ -> Nothing
numToBits :: (Bits a, Num a) => Int -> a -> [Bit]
numToBits n i = map (fromBool . testBit i) [0..n-1]
----------------------------------------
|
pheaver/BitVector
|
Data/BitVector/UtilLE.hs
|
bsd-3-clause
| 1,382 | 0 | 15 | 323 | 316 | 178 | 138 | 22 | 4 |
-- | A helper module to lay out elements in columns or rows
module Graphics.Diagrams.Positioned.Layout
( pboxColumnLayout
, pboxRowLayout
)
where
-- External imports
import Data.List (mapAccumL)
-- Internal imports
import Graphics.Diagrams.Types
import Graphics.Diagrams.Positioned.PositionedDiagram
-- | Arranges boxes in column
pboxColumnLayout :: Float -- ^ Initial top or base position
-> Float -- ^ Minimum column width
-> HAlign -- ^ Horizontal alignment
-> [PBox] -- ^ Boxes to arrange in column
-> [PBox] -- ^ Arranged boxes
pboxColumnLayout base w align bs =
snd $ mapAccumL (pboxColumnLayoutP boxSep width align) base bs
where width = max w (fst (pboxListSize bs))
-- | Arranges one box in a column, returning the new top and the modified box
pboxColumnLayoutP :: Float -> Float -> HAlign -> Float -> PBox -> (Float, PBox)
pboxColumnLayoutP left maxWidth align top box = (top', box')
where top' = top + boxSep + boxH
box' = box { pboxPosition = (left + newX, top) }
(boxW,boxH) = pboxSize box
-- Align
newX = case align of
HLeft -> 0
HCenter -> (maxWidth - boxW) / 2
HRight -> maxWidth - boxW
-- | Arranges boxes in a row
pboxRowLayout :: VAlign -- ^ Vertical alignment
-> [PBox] -- ^ List of boxes
-> [PBox] -- ^ Arranged boxes
pboxRowLayout align bs = snd $ mapAccumL (pboxRowLayoutP height align) 0 bs
where height = snd $ pboxListSize bs
-- | Arranges one box in a row, returning the new left and the modified box
pboxRowLayoutP :: Float -> VAlign -> Float -> PBox -> (Float, PBox)
pboxRowLayoutP maxHeight align left box = (left', box')
where left' = left + boxSep + 40 + boxW
box' = box { pboxPosition = (left, newY) }
(boxW,boxH) = pboxSize box
-- Align
newY = case align of
VTop -> maxHeight - boxH
VCenter -> (maxHeight - boxH) / 2
VBottom -> 0 -- maxHeight - boxH
|
ivanperez-keera/SoOSiM-ui
|
src/Graphics/Diagrams/Positioned/Layout.hs
|
bsd-3-clause
| 2,088 | 0 | 12 | 620 | 495 | 279 | 216 | 37 | 3 |
module Hans.Address.Mac (
Mac(..)
, showsMac
, macMask
) where
import Hans.Address
import Hans.Utils (showPaddedHex)
import Data.Serialize (Serialize(..))
import Data.Serialize.Get (getWord16be,getWord32be)
import Data.Serialize.Put (putByteString)
import Data.Bits (Bits(shiftR,testBit,complement))
import Data.List (intersperse)
import Data.Word (Word8)
import Numeric (readHex)
import qualified Data.ByteString as S
-- | Mac addresses.
data Mac = Mac
{-# UNPACK #-} !Word8
{-# UNPACK #-} !Word8
{-# UNPACK #-} !Word8
{-# UNPACK #-} !Word8
{-# UNPACK #-} !Word8
{-# UNPACK #-} !Word8
deriving ( Eq, Ord )
-- | Show a Mac address.
showsMac :: Mac -> ShowS
showsMac (Mac a b c d e f) = foldl1 (.)
$ intersperse (showChar ':')
$ map showPaddedHex [a,b,c,d,e,f]
-- | Generates a mask tailored to the given MAC address.
macMask :: Mac -> Mac
macMask (Mac a b c d e f) =
Mac (complement a)
(complement b)
(complement c)
(complement d)
(complement e)
(complement f)
instance Show Mac where
showsPrec _ = showsMac
instance Read Mac where
readsPrec _ = loop 6 []
where
loop :: Int -> [Word8] -> String -> [(Mac,String)]
loop 0 [f,e,d,c,b,a] str = [(Mac a b c d e f,str)]
loop 0 _ _ = []
loop n acc str = case readHex str of
[(a,':':rest)] -> loop (n-1) (a:acc) rest
[(a, rest)] -> loop 0 (a:acc) rest
_ -> []
instance Address Mac where
addrSize _ = 6
toBits (Mac a b c d e f) = concatMap k [a,b,c,d,e,f]
where k i = map (testBit i) [0 .. 7]
instance Serialize Mac where
get = do
n <- getWord32be
m <- getWord16be
let f x d = fromIntegral (x `shiftR` d)
return $! Mac (f n 24) (f n 16) (f n 8) (fromIntegral n)
(f m 8) (fromIntegral m)
put (Mac a b c d e f) = putByteString (S.pack [a,b,c,d,e,f])
|
Tener/HaNS
|
src/Hans/Address/Mac.hs
|
bsd-3-clause
| 1,947 | 0 | 13 | 558 | 827 | 452 | 375 | 57 | 1 |
main = do {putStrLn "Child Process"; putStrLn "Hello2";}
|
ankeshs/numerikell
|
interp/tmp/proc.hs
|
bsd-3-clause
| 56 | 0 | 7 | 7 | 22 | 11 | 11 | 1 | 1 |
module Main (main) where
import qualified Types.BotTypes as BT
import Control.Monad (forever)
import qualified Data.MarkovChain as MC
import Paths_Dikunt
import System.Environment (getArgs)
import System.IO (stdout, stdin, hSetBuffering, BufferMode(..))
import System.Random (newStdGen, StdGen)
import Text.Regex.PCRE ((=~))
import Data.Aeson (decode)
import qualified Data.Text.Lazy.IO as T
import qualified Data.Text.Lazy.Encoding as T
main :: IO ()
main = do
(nick:_) <- getArgs
hSetBuffering stdout LineBuffering
hSetBuffering stdin LineBuffering
trumpFile <- getDataFileName "data/trump.txt"
trumpData <- readFile trumpFile
forever $ do
line <- T.getLine
handleMessage nick trumpData $ (decode . T.encodeUtf8) line
handleMessage :: String -> String -> Maybe BT.ServerMessage -> IO ()
handleMessage nick trumpData (Just (BT.ServerPrivMsg BT.IRCUser{} _ msg))
| str =~ helpPattern = putStrLn $ help nick
| str =~ runPattern = newStdGen >>= \r -> putStrLn (trumpQuote trumpData r)
where
str = BT.getMessage msg
helpPattern = concat ["^", sp, nick, ":", ps, "trump", ps, "help", sp, "$"]
runPattern = concat ["^", sp, nick, ":", ps, "trump", sp, "$"]
sp = "[ \\t]*"
ps = "[ \\t]+"
handleMessage _ _ _ = return ()
help :: String -> String
help nick = unlines
[ nick ++ ": trump help - Display this help message."
, nick ++ ": trump - Output a markov random trump quote."
]
trumpQuote :: String -> StdGen -> String
trumpQuote trumpData gen =
"Friends, delegates and fellow Americans: " ++ sentence
where
ws = words trumpData
trumpText = MC.run 2 ws 0 gen
trumpText' = drop 1 $ dropWhile (notElem '.') trumpText
sentence = takeWhile ('.' /=) (unwords trumpText') ++ "."
|
bus000/Dikunt
|
plugins/Trump/Main.hs
|
bsd-3-clause
| 1,780 | 0 | 14 | 365 | 588 | 320 | 268 | 43 | 1 |
-- Propositional Logic. Atomic propositions are simply
-- propositional variables.
-- Signature
module Prop
( eval
, atoms
, apply
, distrib
, simplify
, simplify1
, onallvaluations
, dual
, truthtable
, unsatisfiable
, satisfiable
, trivial
, tautology
, occurrences
, subsume
, nnf
, nenf
, simpcnf
, cnf
, purecnf
, purednf
, simpdnf
, dnf
)
where
-- Imports
import Util.Prelude
import qualified Formula as F
import FormulaSyn
import qualified Util.List as List
import qualified Util.ListSet as Set
import Util.ListSet ((∪))
import qualified Util.Print as PP
import qualified Data.Map as Map
import Data.Map(Map)
-- Propositions
class Apply a where
apply :: Map Rel Formula -> a -> a
instance Apply Formula where
apply env = F.onatoms (\p -> case Map.lookup p env of
Just p' -> p'
Nothing -> Atom p)
-- Evaluate a formula in a mapping of variables to truth values.
eval :: Formula -> (Rel -> Bool) -> Bool
eval fm v = case fm of
[form| ⊤ |] -> True
[form| ⊥ |] -> False
[form| ^a |] -> v a
[form| ¬ $p |] -> not (eval p v)
[form| $p ∧ $q |] -> eval p v && eval q v
[form| $p ∨ $q |] -> eval p v || eval q v
[form| $p ⊃ $q |] -> not (eval p v) || (eval q v)
[form| $p ⇔ $q |] -> eval p v == eval q v
_ -> error "quantifier in prop eval"
-- Return all atoms in a formula.
atoms :: Formula -> [Rel]
atoms = List.sort . F.atomUnion (\x -> [x])
-- Valuation combinator.
-- A valuation is a mapping from atoms to truth values.
onallvaluations :: Eq a => ((a -> Bool) -> b) -> (b -> b -> b)
-> (a -> Bool) -> [a] -> b
onallvaluations subfn comb v pvs = case pvs of
[] -> subfn v
p:ps -> let v' t q = if q == p then t else v q in
comb (onallvaluations subfn comb (v' False) ps)
(onallvaluations subfn comb (v' True) ps)
-- Truthtables.
truthtable :: Formula -> String
truthtable fm = PP.render (truthtableDoc fm)
where
truthtableDoc fm' =
let pvs = atoms fm'
width = foldr (max . length . show) 5 pvs + 1
fixw s = s ++ replicate (width - length s) ' '
truthstring p = fixw (if p then "⊤" else "⊥")
separator = replicate (width * length pvs + 9) '-'
row v =
let lis = map (truthstring . v) pvs
ans = truthstring(eval fm' v)
in [lis ++ [ans]]
rows = onallvaluations row (++) (const False) pvs
rowStr r = let (lis, ans)::([String], [String]) = splitAt (length r - 1) r
in (foldr (++) ("| " ++ head ans) lis)
in PP.vcat [ PP.text (foldr (\s t -> fixw(show s) ++ t) "| formula" pvs)
, PP.empty
, PP.text separator
, PP.empty
, PP.vcat (map (PP.text . rowStr) rows)
, PP.text separator
]
-- Tautologies
tautology :: Formula -> Bool
tautology fm = onallvaluations (eval fm) (&&) (const False) (atoms fm)
-- Satisfiability
unsatisfiable :: Formula -> Bool
unsatisfiable = tautology . Not
satisfiable :: Formula -> Bool
satisfiable = not . unsatisfiable
-- Duality
dual :: Formula -> Formula
dual fm = case fm of
[form| ⊥ |] -> (⊤)
[form| ⊤ |] -> (⊥)
[form| ^_ |] -> fm
[form| ¬ $p |] -> (¬) $ dual p
[form| $p ∧ $q |] -> p' ∨ q'
where p' = dual p
q' = dual q
[form| $p ∨ $q |] -> p' ∧ q'
where p' = dual p
q' = dual q
_ -> error "Formula involves connectives ⊃ and ⇔"
-- Simplification
simplify :: Formula -> Formula
simplify fm = case fm of
[form| ¬ $p |] -> simplify1 $ (¬) p'
where p' = simplify p
[form| $p ∧ $q |] -> simplify1 $ p' ∧ q'
where p' = simplify p
q' = simplify q
[form| $p ∨ $q |] -> simplify1 $ p' ∨ q'
where p' = simplify p
q' = simplify q
[form| $p ⊃ $q |] -> simplify1 $ p' ⊃ q'
where p' = simplify p
q' = simplify q
[form| $p ⇔ $q |] -> simplify1 $ p' ⇔ q'
where p' = simplify p
q' = simplify q
_ -> fm
-- The order of the following clauses makes a big difference.
simplify1 :: Formula -> Formula
simplify1 fm = case fm of
[form| ¬ ⊥ |] -> (⊤)
[form| ¬ ⊤ |] -> (⊥)
[form| ¬ ¬ $p |] -> p
[form| ⊥ ∧ _ |] -> (⊥)
[form| _ ∧ ⊥ |] -> (⊥)
[form| ⊤ ∧ $q |] -> q
[form| $p ∧ ⊤ |] -> p
[form| ⊥ ∨ $q |] -> q
[form| $p ∨ ⊥ |] -> p
[form| ⊤ ∨ _ |] -> (⊤)
[form| _ ∨ ⊤ |] -> (⊤)
[form| ⊥ ⊃ _ |] -> (⊤)
[form| _ ⊃ ⊤ |] -> (⊤)
[form| ⊤ ⊃ $q |] -> q
[form| $p ⊃ ⊥ |] -> (¬) p
[form| ⊤ ⇔ $q |] -> q
[form| $p ⇔ ⊤ |] -> p
[form| ⊥ ⇔ ⊥ |] -> (⊤)
[form| ⊥ ⇔ $q |] -> (¬) q
[form| $p ⇔ ⊥ |] -> (¬) p
_ -> fm
-- Negation normal form
nnf :: Formula -> Formula
nnf = nnf' . simplify
nnf' :: Formula -> Formula
nnf' fm = case fm of
[form| $p ∧ $q |] -> p' ∧ q'
where p' = nnf' p
q' = nnf' q
[form| $p ∨ $q |] -> p' ∨ q'
where p' = nnf' p
q' = nnf' q
[form| $p ⊃ $q |] -> np' ∨ q'
where np' = nnf' $ (¬) p
q' = nnf' q
[form| $p ⇔ $q |] -> p' ∧ q' ∨ p'' ∧ q''
where p' = nnf' p
q' = nnf' q
p'' = nnf' $ (¬) p
q'' = nnf' $ (¬) q
[form| ¬ ¬ $p |] -> nnf' p
[form| ¬ ($p ∧ $q) |] -> p' ∨ q'
where p' = nnf' $ (¬) p
q' = nnf' $ (¬) q
[form| ¬ ($p ∨ $q) |] -> p' ∧ q'
where p' = nnf' $ (¬) p
q' = nnf' $ (¬) q
[form| ¬ ($p ⊃ $q) |] -> p' ∧ q'
where p' = nnf' p
q' = nnf' $ (¬) q
[form| ¬ ($p ⇔ $q) |] -> p' ∧ q'' ∨ p'' ∧ q'
where p' = nnf' p
q' = nnf' q
p'' = nnf' $ (¬) p
q'' = nnf' $ (¬) q
_ -> fm
nenf :: Formula -> Formula
nenf = nenf' . simplify
nenf' :: Formula -> Formula
nenf' fm = case fm of
[form| ¬¬$p |] -> nenf' p
[form| ¬($p ∧ $q) |] -> [form| $p' ∨ $q' |]
where p' = nenf' [form| ¬ $p |]
q' = nenf' [form| ¬ $q |]
[form| ¬($p ∨ $q) |] -> [form| $p' ∧ $q' |]
where p' = nenf' [form| ¬ $p |]
q' = nenf' [form| ¬ $q |]
[form| ¬($p ⊃ $q) |] -> [form| $p' ∧ $q' |]
where p' = nenf' p
q' = nenf' [form| ¬ $q |]
[form| ¬($p ⇔ $q) |] -> [form| $p' ⇔ $q' |]
where p' = nenf' p
q' = nenf' [form| ¬ $q |]
[form| $p ∧ $q |] -> [form| $p' ∧ $q' |]
where p' = nenf' p
q' = nenf' q
[form| $p ∨ $q |] -> [form| $p' ∨ $q' |]
where p' = nenf' p
q' = nenf' q
[form| $p ⊃ $q |] -> [form| $p' ∨ $q' |]
where p' = nenf' [form| ¬ $p |]
q' = nenf' q
[form| $p ⇔ $q |] -> [form| $p' ⇔ $q' |]
where p' = nenf' p
q' = nenf' q
_ -> fm
-- Positive and negative occurrances of atoms
occurrences :: Rel -> Formula -> (Bool, Bool)
occurrences x fm = case fm of
[form| ^y |] -> (x == y, False)
[form| ¬ $p |] -> (neg, pos)
where (pos, neg) = occurrences x p
[form| $p ∧ $q |] -> (pos1 || pos2, neg1 || neg2)
where (pos1, neg1) = occurrences x p
(pos2, neg2) = occurrences x q
[form| $p ∨ $q |] -> (pos1 || pos2, neg1 || neg2)
where (pos1, neg1) = occurrences x p
(pos2, neg2) = occurrences x q
[form| $p ⊃ $q |] -> (neg1 || pos2, pos1 || neg2)
where (pos1, neg1) = occurrences x p
(pos2, neg2) = occurrences x q
[form| $p ⇔ $q |] -> if pos1 || pos2 || neg1 || neg2
then (True, True) else (False, False)
where (pos1, neg1) = occurrences x p
(pos2, neg2) = occurrences x q
_ -> (False, False)
-- Distribute clauses
distrib :: [[Formula]] -> [[Formula]] -> [[Formula]]
distrib = List.allPairs Set.union
-- Subsumption
subsume :: [[Formula]] -> [[Formula]]
subsume cls =
filter (\cl -> not(any (\cl' -> Set.psubset cl' cl) cls)) cls
-- Disjunctive normal form
dnf :: Formula -> Formula
dnf f = --trace' "dnf: in" (pPrint f) $
let f' = (F.listDisj . map F.listConj . simpdnf) f in
--trace' "dnf: out" (pPrint f') $ f'
f'
simpdnf :: Formula -> [[Formula]]
simpdnf Bot = []
simpdnf Top = [[]]
simpdnf fm = (subsume . filter (not.trivial) . purednf . nnf) fm
purednf :: Formula -> [[Formula]]
purednf fm = case fm of
And p q -> distrib (purednf p) (purednf q)
Or p q -> purednf p ∪ purednf q
_ -> [[fm]]
trivial :: [Formula] -> Bool
trivial lits =
let (pos, neg) = List.partition F.positive lits in
Set.intersect pos (map F.opp neg) /= []
-- Conjunctive normal form
cnf :: Formula -> Formula
cnf = F.listConj . map F.listDisj . simpcnf
simpcnf :: Formula -> [[Formula]]
simpcnf Bot = [[]]
simpcnf Top = []
simpcnf fm =
let cjs = filter (not . trivial) (purecnf $ nnf fm) in
filter (\c -> not $ any (\c' -> Set.psubset c' c) cjs) cjs
purecnf :: Formula -> [[Formula]]
purecnf = map (map F.opp) . (purednf . nnf . Not)
-- nnf [form| p ⇔ (q ⇔ r) |]
-- cnf [form| p ⇔ (q ⇔ r) |]
-- dnf [form| p ⇔ (q ⇔ r) |]
|
etu-fkti5301-bgu/alt-exam_automated_theorem_proving
|
src/Prop.hs
|
bsd-3-clause
| 9,142 | 0 | 19 | 2,849 | 3,468 | 1,990 | 1,478 | -1 | -1 |
{-- snippet whichFruit --}
data Fruit = Apple | Orange
apple = "apple"
orange = "orange"
whichFruit :: String -> Fruit
whichFruit f = case f of
apple -> Apple
orange -> Orange
{-- /snippet whichFruit --}
{-- snippet equational --}
equational apple = Apple
equational orange = Orange
{-- /snippet equational --}
{-- snippet betterFruit --}
betterFruit f = case f of
"apple" -> Apple
"orange" -> Orange
{-- /snippet betterFruit --}
|
binesiyu/ifl
|
examples/ch03/BogusPattern.hs
|
mit
| 547 | 0 | 7 | 190 | 100 | 55 | 45 | 12 | 2 |
{-# LANGUAGE TypeSynonymInstances, OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Yesod.Core.Json
( -- * Convert from a JSON value
defaultLayoutJson
, jsonToRepJson
, returnJson
, returnJsonEncoding
, provideJson
-- * Convert to a JSON value
, parseJsonBody
, parseCheckJsonBody
, parseJsonBody_
, requireJsonBody
, requireCheckJsonBody
-- * Produce JSON values
, J.Value (..)
, J.ToJSON (..)
, J.FromJSON (..)
, array
, object
, (.=)
, (J..:)
-- * Convenience functions
, jsonOrRedirect
, jsonEncodingOrRedirect
, acceptsJson
) where
import Yesod.Core.Handler (HandlerFor, getRequest, invalidArgs, redirect, selectRep, provideRep, rawRequestBody, ProvidedRep, lookupHeader)
import Control.Monad.Trans.Writer (Writer)
import Data.Monoid (Endo)
import Yesod.Core.Content (TypedContent)
import Yesod.Core.Types (reqAccept)
import Yesod.Core.Class.Yesod (defaultLayout, Yesod)
import Yesod.Core.Class.Handler
import Yesod.Core.Widget (WidgetFor)
import Yesod.Routes.Class
import qualified Data.Aeson as J
import qualified Data.Aeson.Parser as JP
import Data.Aeson ((.=), object)
import Data.Conduit.Attoparsec (sinkParser)
import Data.Text (pack)
import qualified Data.Vector as V
import Data.Conduit
import Data.Conduit.Lift
import qualified Data.ByteString.Char8 as B8
import Data.Maybe (listToMaybe)
import Control.Monad (liftM)
-- | Provide both an HTML and JSON representation for a piece of
-- data, using the default layout for the HTML output
-- ('defaultLayout').
--
-- @since 0.3.0
defaultLayoutJson :: (Yesod site, J.ToJSON a)
=> WidgetFor site () -- ^ HTML
-> HandlerFor site a -- ^ JSON
-> HandlerFor site TypedContent
defaultLayoutJson w json = selectRep $ do
provideRep $ defaultLayout w
provideRep $ fmap J.toEncoding json
-- | Wraps a data type in a 'RepJson'. The data type must
-- support conversion to JSON via 'J.ToJSON'.
--
-- @since 0.3.0
jsonToRepJson :: (Monad m, J.ToJSON a) => a -> m J.Value
jsonToRepJson = return . J.toJSON
{-# DEPRECATED jsonToRepJson "Use returnJson instead" #-}
-- | Convert a value to a JSON representation via aeson\'s 'J.toJSON' function.
--
-- @since 1.2.1
returnJson :: (Monad m, J.ToJSON a) => a -> m J.Value
returnJson = return . J.toJSON
-- | Convert a value to a JSON representation via aeson\'s 'J.toEncoding' function.
--
-- @since 1.4.21
returnJsonEncoding :: (Monad m, J.ToJSON a) => a -> m J.Encoding
returnJsonEncoding = return . J.toEncoding
-- | Provide a JSON representation for usage with 'selectReps', using aeson\'s
-- 'J.toJSON' (aeson >= 0.11: 'J.toEncoding') function to perform the conversion.
--
-- @since 1.2.1
provideJson :: (Monad m, J.ToJSON a) => a -> Writer (Endo [ProvidedRep m]) ()
provideJson = provideRep . return . J.toEncoding
-- | Parse the request body to a data type as a JSON value. The
-- data type must support conversion from JSON via 'J.FromJSON'.
-- If you want the raw JSON value, just ask for a @'J.Result'
-- 'J.Value'@.
--
-- Note that this function will consume the request body. As such, calling it
-- twice will result in a parse error on the second call, since the request
-- body will no longer be available.
--
-- @since 0.3.0
parseJsonBody :: (MonadHandler m, J.FromJSON a) => m (J.Result a)
parseJsonBody = do
eValue <- runConduit $ rawRequestBody .| runCatchC (sinkParser JP.value')
return $ case eValue of
Left e -> J.Error $ show e
Right value -> J.fromJSON value
-- | Same as 'parseJsonBody', but ensures that the mime type indicates
-- JSON content.
parseCheckJsonBody :: (MonadHandler m, J.FromJSON a) => m (J.Result a)
parseCheckJsonBody = do
mct <- lookupHeader "content-type"
case fmap (B8.takeWhile (/= ';')) mct of
Just "application/json" -> parseJsonBody
_ -> return $ J.Error $ "Non-JSON content type: " ++ show mct
-- | Same as 'parseJsonBody', but return an invalid args response on a parse
-- error.
parseJsonBody_ :: (MonadHandler m, J.FromJSON a) => m a
parseJsonBody_ = requireJsonBody
{-# DEPRECATED parseJsonBody_ "Use requireJsonBody instead" #-}
-- | Same as 'parseJsonBody', but return an invalid args response on a parse
-- error.
requireJsonBody :: (MonadHandler m, J.FromJSON a) => m a
requireJsonBody = do
ra <- parseJsonBody
case ra of
J.Error s -> invalidArgs [pack s]
J.Success a -> return a
-- | Same as 'requireJsonBody', but ensures that the mime type
-- indicates JSON content.
requireCheckJsonBody :: (MonadHandler m, J.FromJSON a) => m a
requireCheckJsonBody = do
ra <- parseCheckJsonBody
case ra of
J.Error s -> invalidArgs [pack s]
J.Success a -> return a
-- | Convert a list of values to an 'J.Array'.
array :: J.ToJSON a => [a] -> J.Value
array = J.Array . V.fromList . map J.toJSON
-- | jsonOrRedirect simplifies the scenario where a POST handler sends a different
-- response based on Accept headers:
--
-- 1. 200 with JSON data if the client prefers
-- @application\/json@ (e.g. AJAX, see 'acceptsJSON').
--
-- 2. 3xx otherwise, following the PRG pattern.
jsonOrRedirect :: (MonadHandler m, J.ToJSON a)
=> Route (HandlerSite m) -- ^ Redirect target
-> a -- ^ Data to send via JSON
-> m J.Value
jsonOrRedirect = jsonOrRedirect' J.toJSON
-- | jsonEncodingOrRedirect simplifies the scenario where a POST handler sends a different
-- response based on Accept headers:
--
-- 1. 200 with JSON data if the client prefers
-- @application\/json@ (e.g. AJAX, see 'acceptsJSON').
--
-- 2. 3xx otherwise, following the PRG pattern.
-- @since 1.4.21
jsonEncodingOrRedirect :: (MonadHandler m, J.ToJSON a)
=> Route (HandlerSite m) -- ^ Redirect target
-> a -- ^ Data to send via JSON
-> m J.Encoding
jsonEncodingOrRedirect = jsonOrRedirect' J.toEncoding
jsonOrRedirect' :: MonadHandler m
=> (a -> b)
-> Route (HandlerSite m) -- ^ Redirect target
-> a -- ^ Data to send via JSON
-> m b
jsonOrRedirect' f r j = do
q <- acceptsJson
if q then return (f j)
else redirect r
-- | Returns @True@ if the client prefers @application\/json@ as
-- indicated by the @Accept@ HTTP header.
acceptsJson :: MonadHandler m => m Bool
acceptsJson = (maybe False ((== "application/json") . B8.takeWhile (/= ';'))
. listToMaybe
. reqAccept)
`liftM` getRequest
|
s9gf4ult/yesod
|
yesod-core/Yesod/Core/Json.hs
|
mit
| 6,644 | 0 | 13 | 1,482 | 1,352 | 761 | 591 | 114 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -Wall #-}
{- |
Module : Core
Description : Abstract syntax and pretty printer for Core.
Copyright : (c) 2014—2015 The F2J Project Developers (given in AUTHORS.txt)
License : BSD3
Maintainer : Zhiyuan Shi <[email protected]>, Haoyuan Zhang <[email protected]>
Stability : experimental
Portability : portable
-}
module Core
( Type(..)
, Expr(..)
, Alt(..)
, TypeContext
, ValueContext
, Index
, Constructor(..)
, DataBind(..)
, Definition(..)
, alphaEq
, mapTVar
, mapVar
, fsubstTT
, fsubstTE
, fsubstEE
, joinType
, tVar
, Core.forall
, var
, lam
, fix
, bLam
, prettyType
, prettyExpr
, javaInt
) where
import qualified Src
import JavaUtils
import PrettyUtils
import Control.Arrow (second)
import Data.List (intersperse)
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Language.Java.Pretty (prettyPrint)
import Prelude hiding ((<$>))
import Text.PrettyPrint.ANSI.Leijen
data Type t
= TVar Src.Name t -- a
| JClass ClassName -- C
| Fun (Type t) (Type t) -- t1 -> t2
| Forall Src.Name (t -> Type t) -- forall a. t
| Product [Type t] -- (t1, ..., tn)
| Unit
| Datatype Src.Name [Type t] [Src.Name]
data Definition t e = Def Src.Name Src.Type (Expr t e) (e -> Definition t e)
| DefRec [Src.Name] [(Src.Type, Type t)] ([e] -> [Expr t e]) ([e] -> Definition t e)
| Null
data Expr t e
= Var Src.Name e
| Lit Src.Lit
-- Binders we have: λ, fix, letrec, and Λ
| Lam Src.Name (Type t) (e -> Expr t e)
| Fix Src.Name Src.Name
(e -> e -> Expr t e)
(Type t) -- t1
(Type t) -- t
-- fix x (x1 : t1) : t. e Syntax in the tal-toplas paper
-- fix (x : t1 -> t). \x1. e Alternative syntax, which is arguably clear
-- <name>: Fix funcName paraName func paraType returnType
| Let Src.Name (Expr t e) (e -> Expr t e)
| LetRec [Src.Name] -- Names
[Type t] -- Signatures
([e] -> [Expr t e]) -- Bindings
([e] -> Expr t e) -- Body
| BLam Src.Name (t -> Expr t e)
| App (Expr t e) (Expr t e)
| TApp (Expr t e) (Type t)
| If (Expr t e) (Expr t e) (Expr t e)
| PrimOp (Expr t e) Src.Operator (Expr t e)
-- SystemF extension from:
-- https://www.cs.princeton.edu/~dpw/papers/tal-toplas.pdf
-- (no int restriction)
| Tuple [Expr t e] -- Tuple introduction
| Proj Int (Expr t e) -- Tuple elimination
-- Module
| Module (Maybe Src.PackageName) (Definition t e)
-- Java
| JNew ClassName [Expr t e]
| JMethod (Src.JReceiver (Expr t e)) MethodName [Expr t e] ClassName
| JField (Src.JReceiver (Expr t e)) FieldName (Type t)
| Seq [Expr t e]
| Data Src.RecFlag [DataBind t] (Expr t e)
| ConstrOut (Constructor t) [Expr t e]
| Case (Expr t e) [Alt t e]
| Error (Type t) (Expr t e)
data DataBind t = DataBind Src.Name [Src.Name] ([t] -> [Constructor t])
data Alt t e = ConstrAlt (Constructor t) (Expr t e)
| Default (Expr t e)
data Constructor t = Constructor {constrName :: Src.Name, constrParams :: [Type t]}
type TypeContext t = Set.Set t
type ValueContext t e = Map.Map e (Type t)
type Index = Int
alphaEq :: Int -> Type Index -> Type Index -> Bool
alphaEq _ (TVar _ a) (TVar _ b) = a == b
alphaEq _ (JClass c) (JClass d) = c == d
alphaEq i (Fun s1 s2) (Fun t1 t2) = alphaEq i s1 t1 && alphaEq i s2 t2
alphaEq i (Forall _ f) (Forall _ g) = alphaEq (succ i) (f i) (g i)
alphaEq i (Product ss) (Product ts) = length ss == length ts && uncurry (alphaEq i) `all` zip ss ts
alphaEq _ Unit Unit = True
alphaEq _ _ _ = False
mapTVar :: (Src.Name -> t -> Type t) -> Type t -> Type t
mapTVar g (TVar n a) = g n a
mapTVar _ (JClass c) = JClass c
mapTVar g (Fun t1 t2) = Fun (mapTVar g t1) (mapTVar g t2)
mapTVar g (Forall n f) = Forall n (mapTVar g . f)
mapTVar g (Product ts) = Product (map (mapTVar g) ts)
mapTVar _ Unit = Unit
mapTVar g (Datatype n ts ns) = Datatype n (map (mapTVar g) ts) ns
mapVar :: (Src.Name -> e -> Expr t e) -> (Type t -> Type t) -> Expr t e -> Expr t e
mapVar g _ (Var n a) = g n a
mapVar _ _ (Lit n) = Lit n
mapVar g h (Lam n t f) = Lam n (h t) (mapVar g h . f)
mapVar g h (BLam n f) = BLam n (mapVar g h . f)
mapVar g h (Fix n1 n2 f t1 t) = Fix n1 n2 (\x x1 -> mapVar g h (f x x1)) (h t1) (h t)
mapVar g h (Let n b e) = Let n (mapVar g h b) (mapVar g h . e)
mapVar g h (LetRec ns ts bs e) = LetRec ns (map h ts) (map (mapVar g h) . bs) (mapVar g h . e)
mapVar g h (Data rec databinds e) = Data rec (map mapDatabind databinds) (mapVar g h e)
where mapDatabind (DataBind name params ctrs) = DataBind name params (map mapCtr. ctrs)
mapCtr (Constructor n ts) = Constructor n (map h ts)
mapVar g h (ConstrOut (Constructor n ts) es) = ConstrOut c' (map (mapVar g h) es)
where c' = Constructor n (map h ts)
mapVar g h (Case e alts) = Case (mapVar g h e) (map mapAlt alts)
where mapAlt (ConstrAlt (Constructor n ts) e1) = ConstrAlt (Constructor n (map h ts)) (mapVar g h e1)
mapAlt (Default e1) = Default (mapVar g h e1)
mapVar g h (App f e) = App (mapVar g h f) (mapVar g h e)
mapVar g h (TApp f t) = TApp (mapVar g h f) (h t)
mapVar g h (If p b1 b2) = If (mapVar g h p) (mapVar g h b1) (mapVar g h b2)
mapVar g h (PrimOp e1 op e2) = PrimOp (mapVar g h e1) op (mapVar g h e2)
mapVar g h (Tuple es) = Tuple (map (mapVar g h) es)
mapVar g h (Proj i e) = Proj i (mapVar g h e)
mapVar g h (JNew c args) = JNew c (map (mapVar g h) args)
mapVar g h (JMethod callee m args c) = JMethod (fmap (mapVar g h) callee) m (map (mapVar g h) args) c
mapVar g h (JField callee f c) = JField (fmap (mapVar g h) callee) f (h c)
mapVar g h (Seq es) = Seq (map (mapVar g h) es)
mapVar g h (Error ty str) = Error (h ty) (mapVar g h str)
mapVar g h (Module pname defs) = Module pname (mapVarDef defs)
where
-- Necessary?
mapVarDef (Def name typ expr def) = Def name typ (mapVar g h expr) (mapVarDef . def)
mapVarDef (DefRec names types exprs def) =
DefRec names (map (second h) types) (map (mapVar g h) . exprs) (mapVarDef . def)
mapVarDef Null = Null
fsubstTT :: Eq a => a -> Type a -> Type a -> Type a
fsubstTT x r = mapTVar (\n a -> if a == x then r else TVar n a)
fsubstTE :: Eq t => t -> Type t -> Expr t e -> Expr t e
fsubstTE x r = mapVar Var (fsubstTT x r)
fsubstEE :: Eq a => a -> Expr t a -> Expr t a -> Expr t a
fsubstEE x r = mapVar (\n a -> if a == x then r else Var n a) id
joinType :: Type (Type t) -> Type t
joinType (TVar _ a) = a
joinType (JClass c) = JClass c
joinType (Fun t1 t2) = Fun (joinType t1) (joinType t2)
joinType (Forall n g) = Forall n (joinType . g . TVar "_") -- Right?
joinType (Product ts) = Product (map joinType ts)
joinType Unit = Unit
joinType (Datatype n ts ns) = Datatype n (map joinType ts) ns
tVar :: t -> Type t
tVar = TVar "tv"
forall :: (t -> Type t) -> Type t
forall f = Forall "f" f
var :: e -> Expr t e
var = Var "v"
lam :: Type t -> (e -> Expr t e) -> Expr t e
lam = Lam "x"
fix :: (e -> e -> Expr t e) -> Type t -> Type t -> Expr t e
fix = Fix "f" "x"
bLam :: (t -> Expr t e) -> Expr t e
bLam = BLam "X"
-- instance Show (Type Index) where
-- show = show . pretty
-- instance Pretty (Type Index) where
-- pretty = prettyType
prettyType :: Type Index -> Doc
prettyType = prettyType' basePrec 0
prettyType' :: Prec -> Index -> Type Index -> Doc
prettyType' _ _ (TVar n _) = text n
prettyType' p i (Datatype n tvars _) = hsep $ text n : map (prettyType' p i) tvars
prettyType' p i (Fun t1 t2) =
parensIf p 2
(prettyType' (2,PrecPlus) i t1 <+> arrow <+> prettyType' (2,PrecMinus) i t2)
prettyType' p i (Forall n f) =
parensIf p 1
(PrettyUtils.forall <+> text n <> dot <+>
prettyType' (1,PrecMinus) (succ i) (f i))
prettyType' _ i (Product ts) = parens $ hcat (intersperse comma (map (prettyType' basePrec i) ts))
prettyType' _ _ Unit = text "Unit"
prettyType' _ _ (JClass "java.lang.Integer") = text "Int"
prettyType' _ _ (JClass "java.lang.String") = text "String"
prettyType' _ _ (JClass "java.lang.Boolean") = text "Bool"
prettyType' _ _ (JClass "java.lang.Character") = text "Char"
prettyType' _ _ (JClass c) = text c
-- instance Show (Expr Index Index) where
-- show = show . pretty
-- instance Pretty (Expr Index Index) where
-- pretty = prettyExpr
prettyDef :: Prec -> (Index, Index) -> Definition Index Index -> Doc
prettyDef _ (i, j) (Def fname typ e def) =
text fname <+> colon <+> pretty typ <+> equals <+> prettyExpr' basePrec (i, j + 1) e <> semi <$>
prettyDef basePrec (i, j+1) (def j) -- crappy pretty printer
prettyDef p (i, j) (DefRec names sigs binds def) = vcat (intersperse (text "and") pretty_binds) <> semi <$> pretty_body
where
n = length sigs
ids = [i .. (i + n) - 1]
pretty_ids = map text names
pretty_sigs = map (pretty . fst) sigs
pretty_defs = map (prettyExpr' p (i, j + n)) (binds ids)
pretty_binds = zipWith3
(\pretty_id pretty_sig pretty_def ->
pretty_id <+> colon <+> pretty_sig <$> indent 2 (equals <+> pretty_def))
pretty_ids
pretty_sigs
pretty_defs
pretty_body = prettyDef p (i, j + n) (def ids)
prettyDef _ _ Null = text ""
prettyExpr :: Expr Index Index -> Doc
prettyExpr = prettyExpr' basePrec (0, 0)
prettyExpr' :: Prec -> (Index, Index) -> Expr Index Index -> Doc
prettyExpr' _ _ (Var n _) = text n
prettyExpr' p (i,j) (Lam n t f)
= parensIf p 2 $ group $ hang 2 $
lambda <+> parens (text n <+> colon <+> prettyType' basePrec i t) <+> text "->" <$>
prettyExpr' (2,PrecMinus) (i, j + 1) (f j)
prettyExpr' p (i,j) (App e1 e2)
= parensIf p 4 $
group $ hang 2 $ prettyExpr' (4,PrecMinus) (i,j) e1 <$> prettyExpr' (4,PrecPlus) (i,j) e2
prettyExpr' p (i,j) (BLam n f) =
parensIf p 2
(biglambda <+> text n <+> text "->" <+>
prettyExpr' (2,PrecMinus) (succ i, j) (f i))
prettyExpr' p (i,j) (TApp e t) =
parensIf p 4
(group $ hang 2 $ prettyExpr' (4,PrecMinus) (i,j) e <$> brackets (prettyType' basePrec i t))
prettyExpr' _ _ (Lit (Src.Int n)) = integer n
prettyExpr' _ _ (Lit (Src.String s)) = dquotes (string s)
prettyExpr' _ _ (Lit (Src.Bool b)) = bool b
prettyExpr' _ _ (Lit (Src.Char c)) = char c
prettyExpr' _ _ (Lit Src.UnitLit) = unit
prettyExpr' p (i,j) (If e1 e2 e3)
= parensIf p prec
(hang 3 (text "if" <+> prettyExpr' (prec,PrecMinus) (i,j) e1 <+>
text "then" <+> prettyExpr' (prec,PrecMinus) (i,j) e2 <+>
text "else" <+> prettyExpr' (prec,PrecMinus) (i,j) e3))
where prec = 3
prettyExpr' p (i,j) (PrimOp e1 op e2)
= parens (prettyExpr' p (i,j) e1 <+> pretty_op <+> prettyExpr' p (i,j) e2)
where
pretty_op = text (Language.Java.Pretty.prettyPrint java_op)
java_op = case op of
Src.Arith op' -> op'
Src.Compare op' -> op'
Src.Logic op' -> op'
prettyExpr' _ (i,j) (Tuple es) = tupled (map (prettyExpr' basePrec (i,j)) es)
prettyExpr' p i (Proj n e) =
parensIf p 5
(prettyExpr' (5,PrecMinus) i e <> dot <> char '_' <> int n)
prettyExpr' p i (Module pname defs) =
maybe empty ((text "package" <+>) . pretty) pname <$> text "module" <> semi <$> prettyDef p i defs
prettyExpr' _ (i,j) (JNew c args) =
parens (text "new" <+> text c <> tupled (map (prettyExpr' basePrec (i,j)) args))
prettyExpr' _ i (JMethod name m args _) = methodStr name <> dot <> text m <> tupled (map (prettyExpr' basePrec i) args)
where
methodStr (Src.Static x) = text x
methodStr (Src.NonStatic x) = prettyExpr' (6,PrecMinus) i x
prettyExpr' _ i (JField name f _) = fieldStr name <> dot <> text f
where
fieldStr (Src.Static x) = text x
fieldStr (Src.NonStatic x) = prettyExpr' (6,PrecMinus) i x
prettyExpr' p i (Error _ str) = text "error:" <+> prettyExpr' p i str
prettyExpr' p (i,j) (Seq es) = semiBraces (map (prettyExpr' p (i,j)) es)
prettyExpr' p (i,j) (Fix n1 n2 f t1 t)
= parens $ group $ hang 2 $
text "fix" <+> text n1 <+>
parens (text n2 <+> colon <+> prettyType' p i t1) <+>
colon <+> prettyType' p i t <> dot <$>
prettyExpr' p (i, j + 2) (f j (j + 1))
prettyExpr' _ (i,j) (Let n b e) =
text "let" <+> text n <+> equals <+> prettyExpr' basePrec (i, j + 1) b <$> text "in" <$>
prettyExpr' basePrec (i, j + 1) (e j)
prettyExpr' p (i,j) (LetRec names sigs binds body)
= text "let" <+> text "rec" <$>
vcat (intersperse (text "and") (map (indent 2) pretty_binds)) <$>
text "in" <$>
pretty_body
where
n = length sigs
ids = [i..(i+n-1)]
pretty_ids = map text names
pretty_sigs = map (prettyType' p i) sigs
pretty_defs = map (prettyExpr' p (i, j + n)) (binds ids)
pretty_binds = zipWith3 (\pretty_id pretty_sig pretty_def ->
pretty_id <+> colon <+> pretty_sig <$> indent 2 (equals <+> pretty_def))
pretty_ids pretty_sigs pretty_defs
pretty_body = prettyExpr' p (i, j + n) (body ids)
prettyExpr' p (i,j) (Data recflag databinds e) =
text "data" <+> (pretty recflag) <+> (align .vsep) (map prettyDatabind databinds) <$> prettyExpr' p (i,j) e
where prettyCtr i' (Constructor ctrName ctrParams) = (text ctrName) <+> (hsep. map (prettyType' p i') $ ctrParams)
prettyDatabind (DataBind n tvars cons) = hsep (map text $ n:tvars) <+> align
(equals <+> intersperseBar (map (prettyCtr (i+ (length tvars)))$ cons [i..(i-1+(length tvars))]) <$$> semi)
prettyExpr' p (i,j) (ConstrOut c es) = parens $ hsep $ text (constrName c) : map (prettyExpr' p (i,j)) es
prettyExpr' p (i,j) (Case e alts) =
hang 2 $ text "case" <+> prettyExpr' p (i,j) e <+> text "of" <$> align (intersperseBar (map pretty_alt alts))
where pretty_alt (ConstrAlt c e1) =
(text (constrName c) <+> arrow <+> (align $ prettyExpr' p (i, j) e1 ))
pretty_alt (Default e1) =
(text "_" <+> arrow <+> (align $ prettyExpr' p (i, j) e1 ))
javaInt :: Type t
javaInt = JClass "java.lang.Integer"
|
zhiyuanshi/fcore
|
backend/Core.hs
|
bsd-2-clause
| 14,693 | 1 | 21 | 4,134 | 6,737 | 3,447 | 3,290 | 285 | 6 |
{-|
Module : Data.STM.PriorityQueue.Internal.HeapPQ
Description : STM-based Concurrent Priority Queue data structure class implementation
Copyright : (c) Alex Semin, 2015
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
An implementation of 'Data.STM.PriorityQueue.Class' based on
functional __coarse-grained__ binary heap. Heap is implemented as described in
<https://www.cs.cmu.edu/~rwh/theses/okasaki.pdf Purely Functional Data Structures>.
-}
{-# LANGUAGE BangPatterns #-}
module Data.STM.PriorityQueue.Internal.HeapPQ(
HeapPQ
) where
import Control.Concurrent.STM
import Data.STM.PriorityQueue.Class
data Heap k v = Nil
| Node {-# UNPACK #-} !Int -- rank
{-# UNPACK #-} !Int -- size
!k -- prio
v -- item
!(Heap k v) -- left
!(Heap k v) -- right
data HeapPQ k v = PQ (TVar (Heap k v))
empty :: Heap k v
empty = Nil
leaf :: k -> v -> Heap k v
leaf !k v = Node 1 1 k v empty empty
ins :: Ord k => k -> v -> Heap k v -> Heap k v
ins !k v !h = h `union` leaf k v
union :: Ord k => Heap k v -> Heap k v -> Heap k v
h `union` Nil = h
Nil `union` h = h
h1@(Node _ _ !k1 v1 l1 r1) `union` h2@(Node _ _ !k2 v2 l2 r2) =
if k1 < k2
then mk k1 v1 l1 $ r1 `union` h2
else mk k2 v2 l2 $ r2 `union` h1
mk :: k -> v -> Heap k v -> Heap k v -> Heap k v
mk !k v h1 h2 =
if rk h1 > rk h2 then Node (rk h1 + 1) ss k v h1 h2
else Node (rk h2 + 1) ss k v h2 h1
where
rk Nil = 0
rk (Node !r _ _ _ _ _) = r
sz Nil = 0
sz (Node _ !s _ _ _ _) = s
!ss = sz h1 + sz h2 + 1
pk :: Heap t a -> Maybe a
pk Nil = Nothing
pk (Node _ _ _ v _ _) = Just v
rm :: Ord k => Heap k v -> Maybe (v, Heap k v)
rm Nil = Nothing
rm (Node _ _ _ v l r) = Just (v, l `union` r)
dm :: Ord k => HeapPQ k b -> STM b
dm (PQ hp) = do
h <- readTVar hp
case rm h of
Nothing -> retry
Just (v, h') -> do
writeTVar hp h'
return v
instance PriorityQueue HeapPQ where
new = PQ `fmap` newTVar Nil
insert (PQ hp) k v = modifyTVar hp $ ins k v
peekMin (PQ hp) = pk `fmap` readTVar hp >>= maybe retry return
deleteMin = dm
|
Alllex/stm-data-collection
|
src/Data/STM/PriorityQueue/Internal/HeapPQ.hs
|
bsd-3-clause
| 2,318 | 0 | 12 | 755 | 903 | 454 | 449 | 60 | 4 |
{-# LANGUAGE DeriveDataTypeable, RecordWildCards #-}
{-# LANGUAGE RecursiveDo #-}
module Graphics.UI.Threepenny.Core (
-- * Synopsis
-- | Core functionality of the Threepenny GUI library.
-- * Server
-- $server
Config(..), defaultConfig, startGUI,
loadFile, loadDirectory,
-- * UI monad
-- $ui
UI, runUI, askWindow, liftIOLater,
module Control.Monad.IO.Class,
module Control.Monad.Fix,
-- * Browser Window
Window, title, cookies, getRequestLocation,
-- * DOM elements
-- | Create and manipulate DOM elements.
Element, mkElement, getWindow, delete, (#+), string,
getHead, getBody,
children, text, html, attr, style, value,
getValuesList,
getElementsByTagName, getElementById, getElementsByClassName,
-- * Layout
-- | Combinators for quickly creating layouts.
-- They can be adjusted with CSS later on.
grid, row, column,
-- * Events
-- | For a list of predefined events, see "Graphics.UI.Threepenny.Events".
EventData(..), domEvent, disconnect, on, onEvent, onChanges,
module Reactive.Threepenny,
-- * Attributes
-- | For a list of predefined attributes, see "Graphics.UI.Threepenny.Attributes".
(#), (#.),
Attr, WriteAttr, ReadAttr, ReadWriteAttr(..),
set, sink, get, mkReadWriteAttr, mkWriteAttr, mkReadAttr,
-- * Widgets
Widget(..), element, widget,
-- * JavaScript FFI
-- | Direct interface to JavaScript in the browser window.
debug,
ToJS, FFI, ffi, JSFunction, runFunction, callFunction,
callDeferredFunction, atomic,
-- * Internal and oddball functions
fromProp, toElement,
audioPlay, audioStop,
) where
import Data.Dynamic
import Data.IORef
import qualified Data.Map as Map
import Data.Maybe (listToMaybe)
import Data.Functor
import Data.String (fromString)
import Control.Applicative (Applicative)
import Control.Concurrent.MVar
import Control.Monad
import Control.Monad.Fix
import Control.Monad.IO.Class
import qualified Control.Monad.Trans.RWS.Lazy as Monad
import Network.URI
import qualified Data.Aeson as JSON
import Reactive.Threepenny hiding (onChange)
import qualified Reactive.Threepenny as Reactive
import qualified Graphics.UI.Threepenny.Internal.Driver as Core
import Graphics.UI.Threepenny.Internal.Driver
( getRequestLocation
, callDeferredFunction, atomic, )
import Graphics.UI.Threepenny.Internal.FFI
import Graphics.UI.Threepenny.Internal.Types as Core
( Window, Config, defaultConfig, Events, EventData
, ElementData(..), withElementData,)
import Graphics.UI.Threepenny.Internal.Types as Core
(unprotectedGetElementId, withElementData, ElementData(..))
{-----------------------------------------------------------------------------
Server
------------------------------------------------------------------------------}
{- $server
To display the user interface, you have to start a server using 'startGUI'.
Then, visit the URL <http://localhost:10000/> in your browser
(assuming that you have set the port number to @tpPort=10000@
in the server configuration).
The server is multithreaded,
a separate thread is used to communicate with a single browser 'Window'.
However, each window should only be accessed from a single thread,
otherwise the behavior will be undefined,
i.e. you could run an element search and get a click event as a result
if you don't access each window in a single-threaded fashion.
-}
-- | Start server for GUI sessions.
startGUI
:: Config -- ^ Server configuration.
-> (Window -> UI ()) -- ^ Action to run whenever a client browser connects.
-> IO ()
startGUI config handler = Core.serve config (\w -> runUI w $ handler w)
-- | Make a local file available as a relative URI.
loadFile
:: String -- ^ MIME type
-> FilePath -- ^ Local path to the file
-> UI String -- ^ Generated URI
loadFile mime path = askWindow >>= \w -> liftIO $
Core.loadFile w (fromString mime) path
-- | Make a local directory available as a relative URI.
loadDirectory :: FilePath -> UI String
loadDirectory path = askWindow >>= \w -> liftIO $
Core.loadDirectory w path
{-----------------------------------------------------------------------------
UI monad
------------------------------------------------------------------------------}
{- |
User interface elements are created and manipulated in the 'UI' monad.
This monad is essentially just a thin wrapper around the familiar 'IO' monad.
Use the 'liftIO' function to access 'IO' operations like reading
and writing from files.
There are several subtle reasons why Threepenny
uses a custom 'UI' monad instead of the standard 'IO' monad:
* More convenience when calling JavaScript.
The monad keeps track of a browser 'Window' context
in which JavaScript function calls are executed.
* Recursion for functional reactive programming.
-}
newtype UI a = UI { unUI :: Monad.RWST Window [IO ()] () IO a }
deriving (Typeable)
instance Functor UI where
fmap f = UI . fmap f . unUI
instance Applicative UI where
pure = return
(<*>) = ap
instance Monad UI where
return = UI . return
m >>= k = UI $ unUI m >>= unUI . k
instance MonadIO UI where
liftIO = UI . liftIO
instance MonadFix UI where
mfix f = UI $ mfix (unUI . f)
-- | Execute an 'UI' action in a particular browser window.
-- Also runs all scheduled 'IO' action.
runUI :: Window -> UI a -> IO a
runUI window m = do
(a, _, actions) <- Monad.runRWST (unUI m) window ()
sequence_ actions
return a
-- | Retrieve current 'Window' context in the 'UI' monad.
askWindow :: UI Window
askWindow = UI Monad.ask
-- | Schedule an 'IO' action to be run later.
liftIOLater :: IO () -> UI ()
liftIOLater x = UI $ Monad.tell [x]
{-----------------------------------------------------------------------------
Browser window
------------------------------------------------------------------------------}
-- | Title of the client window.
title :: WriteAttr Window String
title = mkWriteAttr $ \s _ ->
runFunction $ ffi "document.title = %1;" s
-- | Cookies on the client.
cookies :: ReadAttr Window [(String,String)]
cookies = mkReadAttr (liftIO . Core.getRequestCookies)
{-----------------------------------------------------------------------------
Elements
------------------------------------------------------------------------------}
data Element = Element { eEvents :: Core.Events, toElement :: Core.Element }
deriving (Typeable)
fromElement :: Core.Element -> IO Element
fromElement e = do
events <- Core.withElementData e $ \_ x -> return $ elEvents x
return $ Element events e
instance ToJS Element where
render = render . toElement
-- | Make a new DOM element.
mkElement
:: String -- ^ Tag name
-> UI Element
mkElement tag = mdo
-- create events and initialize them when element becomes Alive
let initializeEvent (name,_,handler) = Core.bind name el handler
events <- liftIO $ newEventsNamed initializeEvent
window <- askWindow
el <- liftIO $ Core.newElement window tag events
return $ Element events el
-- | Retrieve the browser 'Window' in which the element resides.
getWindow :: Element -> IO Window
getWindow e = Core.getWindow (toElement e)
-- | Delete the given element.
delete :: Element -> UI ()
delete = liftIO . Core.delete . toElement
-- | Append DOM elements as children to a given element.
(#+) :: UI Element -> [UI Element] -> UI Element
(#+) mx mys = do
x <- mx
ys <- sequence mys
liftIO $ mapM_ (Core.appendElementTo (toElement x) . toElement) ys
return x
-- | Child elements of a given element.
children :: WriteAttr Element [Element]
children = mkWriteAttr set
where
set xs x = liftIO $ do
Core.emptyEl $ toElement x
mapM_ (Core.appendElementTo (toElement x) . toElement) xs
-- | Child elements of a given element as a HTML string.
html :: WriteAttr Element String
html = mkWriteAttr $ \s el ->
runFunction $ ffi "$(%1).html(%2)" el s
-- | HTML attributes of an element.
attr :: String -> WriteAttr Element String
attr name = mkWriteAttr $ \s el ->
runFunction $ ffi "$(%1).attr(%2,%3)" el name s
-- | Set CSS style of an Element
style :: WriteAttr Element [(String,String)]
style = mkWriteAttr $ \xs el -> forM_ xs $ \(name,val) ->
runFunction $ ffi "%1.style[%2] = %3" el name val
-- | Value attribute of an element.
-- Particularly relevant for control widgets like 'input'.
value :: Attr Element String
value = mkReadWriteAttr get set
where
get el = callFunction $ ffi "$(%1).val()" el
set v el = runFunction $ ffi "$(%1).val(%2)" el v
-- | Get values from inputs. Blocks. This is faster than many 'getValue' invocations.
getValuesList
:: [Element] -- ^ A list of elements to get the values of.
-> UI [String] -- ^ The list of plain text values.
getValuesList = mapM (get value)
-- TODO: improve this to use Core.getValuesList
-- | Text content of an element.
text :: WriteAttr Element String
text = mkWriteAttr $ \s el ->
runFunction $ ffi "$(%1).text(%2)" el s
-- | Make a @span@ element with a given text content.
string :: String -> UI Element
string s = mkElement "span" # set text s
-- | Get the head of the page.
getHead :: Window -> UI Element
getHead w = liftIO $ fromElement =<< Core.getHead w
-- | Get the body of the page.
getBody :: Window -> UI Element
getBody w = liftIO $ fromElement =<< Core.getBody w
-- | Get all elements of the given tag name. Blocks.
getElementsByTagName
:: Window -- ^ Browser window
-> String -- ^ The tag name.
-> UI [Element] -- ^ All elements with that tag name.
getElementsByTagName window name = liftIO $
mapM fromElement =<< Core.getElementsByTagName window name
-- | Get an element by a particular ID. Blocks.
getElementById
:: Window -- ^ Browser window
-> String -- ^ The ID string.
-> UI (Maybe Element) -- ^ Element (if any) with given ID.
getElementById window id = liftIO $
fmap listToMaybe $ mapM fromElement =<< Core.getElementsById window [id]
-- | Get a list of elements by particular class. Blocks.
getElementsByClassName
:: Window -- ^ Browser window
-> String -- ^ The class string.
-> UI [Element] -- ^ Elements with given class.
getElementsByClassName window cls = liftIO $
mapM fromElement =<< Core.getElementsByClassName window cls
{-----------------------------------------------------------------------------
FFI
------------------------------------------------------------------------------}
-- | Run the given JavaScript function and carry on. Doesn't block.
--
-- The client window uses JavaScript's @eval()@ function to run the code.
runFunction :: JSFunction () -> UI ()
runFunction fun = do
window <- askWindow
liftIO $ Core.runFunction window fun
-- | Run the given JavaScript function and wait for results. Blocks.
--
-- The client window uses JavaScript's @eval()@ function to run the code.
callFunction :: JSFunction a -> UI a
callFunction fun = do
window <- askWindow
liftIO $ Core.callFunction window fun
{-----------------------------------------------------------------------------
Oddball
------------------------------------------------------------------------------}
-- | Print a message on the client console if the client has debugging enabled.
debug :: String -> UI ()
debug s = askWindow >>= \w -> liftIO $ Core.debug w s
-- | Invoke the JavaScript expression @audioElement.play();@.
audioPlay :: Element -> UI ()
audioPlay el = runFunction $ ffi "%1.play()" el
-- | Invoke the JavaScript expression @audioElement.stop();@.
audioStop :: Element -> UI ()
audioStop el = runFunction $ ffi "prim_audio_stop(%1)" el
-- Turn a jQuery property @.prop()@ into an attribute.
fromProp :: String -> (JSON.Value -> a) -> (a -> JSON.Value) -> Attr Element a
fromProp name from to = mkReadWriteAttr get set
where
set v el = runFunction $ ffi "$(%1).prop(%2,%3)" el name (to v)
get el = fmap from $ callFunction $ ffi "$(%1).prop(%2)" el name
{-----------------------------------------------------------------------------
Layout
------------------------------------------------------------------------------}
-- | Align given elements in a row. Special case of 'grid'.
row :: [UI Element] -> UI Element
row xs = grid [xs]
-- | Align given elements in a column. Special case of 'grid'.
column :: [UI Element] -> UI Element
column = grid . map (:[])
-- | Align given elements in a rectangular grid.
--
-- Layout is achieved by using the CSS @display:table@ property.
-- The following element tree will be generated
--
-- > <div class="table">
-- > <div class="table-row">
-- > <div class="table-cell"> ... </div>
-- > <div class="table-cell"> ... </div>
-- > </div>
-- > <div class="table-row">
-- > ...
-- > </div>
-- > ...
-- > </div>
--
-- You can customatize the actual layout by assigning an @id@ to the element
-- and changing the @.table@, @.table-row@ and @table-column@
-- classes in a custom CSS file.
grid :: [[UI Element]] -> UI Element
grid mrows = do
rows0 <- mapM (sequence) mrows
rows <- forM rows0 $ \row0 -> do
row <- forM row0 $ \entry ->
wrap "table-cell" [entry]
wrap "table-row" row
wrap "table" rows
where
wrap c xs = mkElement "div" # set (attr "class") c #+ map element xs
{-----------------------------------------------------------------------------
Events
------------------------------------------------------------------------------}
-- | Obtain DOM event for a given element.
domEvent
:: String
-- ^ Event name. A full list can be found at
-- <http://www.w3schools.com/jsref/dom_obj_event.asp>.
-- Note that the @on@-prefix is not included,
-- the name is @click@ and so on.
-> Element -- ^ Element where the event is to occur.
-> Event EventData
domEvent name (Element events _) = events name
-- | Event that occurs whenever the client has disconnected,
-- be it by closing the browser window or by exception.
--
-- Note: DOM Elements in the browser window that has been closed
-- can no longer be manipulated.
disconnect :: Window -> Event ()
disconnect = Core.disconnect
-- | Convenience function to register 'Event's for 'Element's.
--
-- Example usage.
--
-- > on click element $ \_ -> ...
on :: (element -> Event a) -> element -> (a -> UI void) -> UI ()
on f x = onEvent (f x)
-- | Register an 'UI' action to be executed whenever the 'Event' happens.
--
-- FIXME: Should be unified with 'on'?
onEvent :: Event a -> (a -> UI void) -> UI ()
onEvent e h = do
window <- askWindow
liftIO $ register e (void . runUI window . h)
return ()
-- | Execute a 'UI' action whenever a 'Behavior' changes.
-- Use sparingly, it is recommended that you use 'sink' instead.
onChanges :: Behavior a -> (a -> UI void) -> UI ()
onChanges b f = do
window <- askWindow
liftIO $ Reactive.onChange b (void . runUI window . f)
{-----------------------------------------------------------------------------
Attributes
------------------------------------------------------------------------------}
infixl 8 #
infixl 8 #+
infixl 8 #.
-- | Reverse function application.
-- Allows convenient notation for setting properties.
--
-- Example usage.
--
-- > mkElement "div"
-- > # set style [("color","#CCAABB")]
-- > # set draggable True
-- > # set children otherElements
(#) :: a -> (a -> b) -> b
(#) = flip ($)
-- | Convenient combinator for setting the CSS class on element creation.
(#.) :: UI Element -> String -> UI Element
(#.) mx s = mx # set (attr "class") s
-- | Attributes can be 'set' and 'get'.
type Attr x a = ReadWriteAttr x a a
-- | Attribute that only supports the 'get' operation.
type ReadAttr x o = ReadWriteAttr x () o
-- | Attribute that only supports the 'set' operation.
type WriteAttr x i = ReadWriteAttr x i ()
-- | Generalized attribute with different types for getting and setting.
data ReadWriteAttr x i o = ReadWriteAttr
{ get' :: x -> UI o
, set' :: i -> x -> UI ()
}
-- | Set value of an attribute in the 'UI' monad.
-- Best used in conjunction with '#'.
set :: ReadWriteAttr x i o -> i -> UI x -> UI x
set attr i mx = do { x <- mx; set' attr i x; return x; }
-- | Set the value of an attribute to a 'Behavior', that is a time-varying value.
--
-- Note: For reasons of efficiency, the attribute is only
-- updated when the value changes.
sink :: ReadWriteAttr x i o -> Behavior i -> UI x -> UI x
sink attr bi mx = do
x <- mx
window <- askWindow
liftIOLater $ do
i <- currentValue bi
runUI window $ set' attr i x
Reactive.onChange bi $ \i -> runUI window $ set' attr i x
return x
-- | Get attribute value.
get :: ReadWriteAttr x i o -> x -> UI o
get attr = get' attr
-- | Build an attribute from a getter and a setter.
mkReadWriteAttr
:: (x -> UI o) -- ^ Getter.
-> (i -> x -> UI ()) -- ^ Setter.
-> ReadWriteAttr x i o
mkReadWriteAttr get set = ReadWriteAttr { get' = get, set' = set }
-- | Build attribute from a getter.
mkReadAttr :: (x -> UI o) -> ReadAttr x o
mkReadAttr get = mkReadWriteAttr get (\_ _ -> return ())
-- | Build attribute from a setter.
mkWriteAttr :: (i -> x -> UI ()) -> WriteAttr x i
mkWriteAttr set = mkReadWriteAttr (\_ -> return ()) set
{-----------------------------------------------------------------------------
Widget class
------------------------------------------------------------------------------}
-- | Widgets are data types that have a visual representation.
class Widget w where
getElement :: w -> Element
instance Widget Element where
getElement = id
-- | Convience synonym for 'return' to make elements work well with 'set'.
-- Also works on 'Widget's.
--
-- Example usage.
--
-- > e <- mkElement "button"
-- > element e # set text "Ok"
element :: MonadIO m => Widget w => w -> m Element
element = return . getElement
-- | Convience synonym for 'return' to make widgets work well with 'set'.
widget :: Widget w => w -> UI w
widget = return
|
yuvallanger/threepenny-gui
|
src/Graphics/UI/Threepenny/Core.hs
|
bsd-3-clause
| 18,363 | 0 | 16 | 3,865 | 3,654 | 1,991 | 1,663 | -1 | -1 |
module Main where
import System.IO
import Perceptron
import ReadData
main =
do { num_vecs <- hGetLine stdin
; if debug then do { putStr "num_vecs = "; putStrLn (show num_vecs) } else return ()
; num_feats <- hGetLine stdin
; if debug then do { putStr "num_feats = "; putStrLn (show num_feats) } else return ()
; let nvecs = read num_vecs
; let nfeats = read num_feats
; vs <- read_vecs nvecs nfeats
; w <- read_vec' (S (intToNat nfeats))
; let res = inner_perceptron_MCE (intToNat nfeats) vs w
; case res of
None -> putStrLn "Valid Separator"
Some ((,) l w) -> do { putStrLn "Invalid Seperator"
; putStrLn "Running 1 iteration of Perceptron produces the following misclassifications"
; printQvecL l
; putStrLn "Resulting in the following weight vector"
; printQvec w
; putStrLn ""
}
}
|
tm507211/CoqPerceptron
|
Benchmarks/hsopt/RunValidator.hs
|
bsd-3-clause
| 1,041 | 1 | 15 | 395 | 286 | 139 | 147 | 22 | 4 |
{-#LANGUAGE MultiParamTypeClasses #-}
{-#LANGUAGE FlexibleInstances #-}
{-#LANGUAGE RankNTypes #-}
module Data.Expandable
( ExpandableM (..)
, expand
)
where
import qualified Data.Aeson as JSON
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Vector (Vector)
import qualified Data.Vector as Vector
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import Control.Monad.Identity
class ExpandableM t a where
expandM :: forall m. Monad m => (t -> m t) -> a -> m a
expand :: ExpandableM t a => (t -> t) -> a -> a
expand f x = runIdentity $ expandM (return . f) x
instance ExpandableM t t where
expandM f x = f x
instance ExpandableM t a => ExpandableM t (Maybe a) where
expandM _ Nothing = return Nothing
expandM f (Just x) = Just <$> expandM f x
instance ExpandableM t a => ExpandableM t [a] where
expandM f xs = mapM (expandM f) xs
instance ExpandableM Text JSON.Value where
expandM = jsonTextWalk
instance (ExpandableM t a, ExpandableM t b) => ExpandableM t (a, b) where
expandM f (x, y) = (,) <$> expandM f x <*> expandM f y
jsonTextWalk :: Monad m => (Text -> m Text) -> (JSON.Value -> m JSON.Value)
jsonTextWalk f (JSON.String t) = JSON.String <$> f t
jsonTextWalk f (JSON.Array v) = JSON.Array <$> sequence (fmap (expandM f) v)
jsonTextWalk f (JSON.Object o) =
JSON.Object . HashMap.fromList <$> (expandM f . HashMap.toList) o
jsonTextWalk _ x = return x
|
tdammers/templar
|
src/Data/Expandable.hs
|
bsd-3-clause
| 1,458 | 0 | 12 | 281 | 579 | 305 | 274 | 35 | 1 |
{- |
Module : $Header$
Description : Common Logic specific OMDoc constants
Copyright : (c) Iulia Ignatov, DFKI Bremen 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
Library of Common Logic specific OMDoc constants.
-}
module CommonLogic.OMDoc where
import OMDoc.DataTypes
clMetaTheory :: OMCD
clMetaTheory =
CD ["commonlogic", "http://cds.omdoc.org/logics/commonlogic.omdoc"]
const_cl :: String -> OMElement
const_cl n = OMS (clMetaTheory, mkSimpleName n)
const_symbol, const_forall, const_exists, const_module, const_textName
, const_moduleExcludes, const_and, const_or
, const_not, const_implies, const_equivalent, const_eq, const_comment
, const_irregular, const_comment_term, const_that :: OMElement
const_symbol = const_cl "symbol"
const_forall = const_cl "forall"
const_exists = const_cl "exists"
const_module = const_cl "module"
const_textName = const_cl "text_name"
const_moduleExcludes = const_cl "module_excludes"
const_and = const_cl "and"
const_or = const_cl "or"
const_not = const_cl "not"
const_implies = const_cl "implies"
const_equivalent = const_cl "equivalent"
const_eq = const_cl "eq"
const_comment = const_cl "comment"
const_irregular = const_cl "irregular"
const_comment_term = const_cl "comment_term"
const_that = const_cl "that"
|
keithodulaigh/Hets
|
CommonLogic/OMDoc.hs
|
gpl-2.0
| 1,380 | 0 | 7 | 189 | 223 | 131 | 92 | 27 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.RDS.DescribeDBInstances
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Returns information about provisioned RDS instances. This API supports
-- pagination.
--
-- <http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_DescribeDBInstances.html>
module Network.AWS.RDS.DescribeDBInstances
(
-- * Request
DescribeDBInstances
-- ** Request constructor
, describeDBInstances
-- ** Request lenses
, ddbi1DBInstanceIdentifier
, ddbi1Filters
, ddbi1Marker
, ddbi1MaxRecords
-- * Response
, DescribeDBInstancesResponse
-- ** Response constructor
, describeDBInstancesResponse
-- ** Response lenses
, ddbirDBInstances
, ddbirMarker
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.RDS.Types
import qualified GHC.Exts
data DescribeDBInstances = DescribeDBInstances
{ _ddbi1DBInstanceIdentifier :: Maybe Text
, _ddbi1Filters :: List "member" Filter
, _ddbi1Marker :: Maybe Text
, _ddbi1MaxRecords :: Maybe Int
} deriving (Eq, Read, Show)
-- | 'DescribeDBInstances' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ddbi1DBInstanceIdentifier' @::@ 'Maybe' 'Text'
--
-- * 'ddbi1Filters' @::@ ['Filter']
--
-- * 'ddbi1Marker' @::@ 'Maybe' 'Text'
--
-- * 'ddbi1MaxRecords' @::@ 'Maybe' 'Int'
--
describeDBInstances :: DescribeDBInstances
describeDBInstances = DescribeDBInstances
{ _ddbi1DBInstanceIdentifier = Nothing
, _ddbi1Filters = mempty
, _ddbi1MaxRecords = Nothing
, _ddbi1Marker = Nothing
}
-- | The user-supplied instance identifier. If this parameter is specified,
-- information from only the specific DB instance is returned. This parameter
-- isn't case sensitive.
--
-- Constraints:
--
-- Must contain from 1 to 63 alphanumeric characters or hyphens First
-- character must be a letter Cannot end with a hyphen or contain two
-- consecutive hyphens
ddbi1DBInstanceIdentifier :: Lens' DescribeDBInstances (Maybe Text)
ddbi1DBInstanceIdentifier =
lens _ddbi1DBInstanceIdentifier
(\s a -> s { _ddbi1DBInstanceIdentifier = a })
-- | This parameter is not currently supported.
ddbi1Filters :: Lens' DescribeDBInstances [Filter]
ddbi1Filters = lens _ddbi1Filters (\s a -> s { _ddbi1Filters = a }) . _List
-- | An optional pagination token provided by a previous DescribeDBInstances
-- request. If this parameter is specified, the response includes only records
-- beyond the marker, up to the value specified by 'MaxRecords' .
ddbi1Marker :: Lens' DescribeDBInstances (Maybe Text)
ddbi1Marker = lens _ddbi1Marker (\s a -> s { _ddbi1Marker = a })
-- | The maximum number of records to include in the response. If more records
-- exist than the specified 'MaxRecords' value, a pagination token called a marker
-- is included in the response so that the remaining results may be retrieved.
--
-- Default: 100
--
-- Constraints: minimum 20, maximum 100
ddbi1MaxRecords :: Lens' DescribeDBInstances (Maybe Int)
ddbi1MaxRecords = lens _ddbi1MaxRecords (\s a -> s { _ddbi1MaxRecords = a })
data DescribeDBInstancesResponse = DescribeDBInstancesResponse
{ _ddbirDBInstances :: List "member" DBInstance
, _ddbirMarker :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'DescribeDBInstancesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ddbirDBInstances' @::@ ['DBInstance']
--
-- * 'ddbirMarker' @::@ 'Maybe' 'Text'
--
describeDBInstancesResponse :: DescribeDBInstancesResponse
describeDBInstancesResponse = DescribeDBInstancesResponse
{ _ddbirMarker = Nothing
, _ddbirDBInstances = mempty
}
-- | A list of 'DBInstance' instances.
ddbirDBInstances :: Lens' DescribeDBInstancesResponse [DBInstance]
ddbirDBInstances = lens _ddbirDBInstances (\s a -> s { _ddbirDBInstances = a }) . _List
-- | An optional pagination token provided by a previous request. If this
-- parameter is specified, the response includes only records beyond the marker,
-- up to the value specified by 'MaxRecords' .
ddbirMarker :: Lens' DescribeDBInstancesResponse (Maybe Text)
ddbirMarker = lens _ddbirMarker (\s a -> s { _ddbirMarker = a })
instance ToPath DescribeDBInstances where
toPath = const "/"
instance ToQuery DescribeDBInstances where
toQuery DescribeDBInstances{..} = mconcat
[ "DBInstanceIdentifier" =? _ddbi1DBInstanceIdentifier
, "Filters" =? _ddbi1Filters
, "Marker" =? _ddbi1Marker
, "MaxRecords" =? _ddbi1MaxRecords
]
instance ToHeaders DescribeDBInstances
instance AWSRequest DescribeDBInstances where
type Sv DescribeDBInstances = RDS
type Rs DescribeDBInstances = DescribeDBInstancesResponse
request = post "DescribeDBInstances"
response = xmlResponse
instance FromXML DescribeDBInstancesResponse where
parseXML = withElement "DescribeDBInstancesResult" $ \x -> DescribeDBInstancesResponse
<$> x .@? "DBInstances" .!@ mempty
<*> x .@? "Marker"
instance AWSPager DescribeDBInstances where
page rq rs
| stop (rs ^. ddbirMarker) = Nothing
| otherwise = (\x -> rq & ddbi1Marker ?~ x)
<$> (rs ^. ddbirMarker)
|
romanb/amazonka
|
amazonka-rds/gen/Network/AWS/RDS/DescribeDBInstances.hs
|
mpl-2.0
| 6,281 | 0 | 12 | 1,340 | 803 | 480 | 323 | 83 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="it-IT">
<title>Server-Sent Events | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Ricerca</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
0xkasun/security-tools
|
src/org/zaproxy/zap/extension/sse/resources/help_it_IT/helpset_it_IT.hs
|
apache-2.0
| 981 | 85 | 53 | 160 | 402 | 212 | 190 | -1 | -1 |
{- thread scheduling
-
- Copyright 2012, 2013 Joey Hess <[email protected]>
- Copyright 2011 Bas van Dijk & Roel van Dijk
-
- License: BSD-2-clause
-}
{-# LANGUAGE CPP #-}
module Utility.ThreadScheduler where
import Control.Monad
import Control.Concurrent
#ifndef mingw32_HOST_OS
import Control.Monad.IfElse
import System.Posix.IO
#endif
#ifndef mingw32_HOST_OS
import System.Posix.Signals
#ifndef __ANDROID__
import System.Posix.Terminal
#endif
#endif
newtype Seconds = Seconds { fromSeconds :: Int }
deriving (Eq, Ord, Show)
type Microseconds = Integer
{- Runs an action repeatedly forever, sleeping at least the specified number
- of seconds in between. -}
runEvery :: Seconds -> IO a -> IO a
runEvery n a = forever $ do
threadDelaySeconds n
a
threadDelaySeconds :: Seconds -> IO ()
threadDelaySeconds (Seconds n) = unboundDelay (fromIntegral n * oneSecond)
{- Like threadDelay, but not bounded by an Int.
-
- There is no guarantee that the thread will be rescheduled promptly when the
- delay has expired, but the thread will never continue to run earlier than
- specified.
-
- Taken from the unbounded-delay package to avoid a dependency for 4 lines
- of code.
-}
unboundDelay :: Microseconds -> IO ()
unboundDelay time = do
let maxWait = min time $ toInteger (maxBound :: Int)
threadDelay $ fromInteger maxWait
when (maxWait /= time) $ unboundDelay (time - maxWait)
{- Pauses the main thread, letting children run until program termination. -}
waitForTermination :: IO ()
waitForTermination = do
#ifdef mingw32_HOST_OS
forever $ threadDelaySeconds (Seconds 6000)
#else
lock <- newEmptyMVar
let check sig = void $
installHandler sig (CatchOnce $ putMVar lock ()) Nothing
check softwareTermination
#ifndef __ANDROID__
whenM (queryTerminal stdInput) $
check keyboardSignal
#endif
takeMVar lock
#endif
oneSecond :: Microseconds
oneSecond = 1000000
|
avengerpenguin/propellor
|
src/Utility/ThreadScheduler.hs
|
bsd-2-clause
| 1,891 | 2 | 12 | 320 | 305 | 165 | 140 | 33 | 1 |
{-# LANGUAGE RecordWildCards, NamedFieldPuns #-}
-- | Utilities to help format error messages for the various CLI commands.
--
module Distribution.Client.CmdErrorMessages (
module Distribution.Client.CmdErrorMessages,
module Distribution.Client.TargetSelector,
) where
import Distribution.Client.ProjectOrchestration
import Distribution.Client.TargetSelector
( componentKind, showTargetSelector )
import Distribution.Package
( packageId, packageName )
import Distribution.Types.ComponentName
( showComponentName )
import Distribution.Solver.Types.OptionalStanza
( OptionalStanza(..) )
import Distribution.Text
( display )
import Data.Maybe (isNothing)
import Data.List (sortBy, groupBy, nub)
import Data.Function (on)
-----------------------
-- Singular or plural
--
-- | A tag used in rendering messages to distinguish singular or plural.
--
data Plural = Singular | Plural
-- | Used to render a singular or plural version of something
--
-- > plural (listPlural theThings) "it is" "they are"
--
plural :: Plural -> a -> a -> a
plural Singular si _pl = si
plural Plural _si pl = pl
-- | Singular for singleton lists and plural otherwise.
--
listPlural :: [a] -> Plural
listPlural [_] = Singular
listPlural _ = Plural
--------------------
-- Rendering lists
--
-- | Render a list of things in the style @foo, bar and baz@
renderListCommaAnd :: [String] -> String
renderListCommaAnd [] = ""
renderListCommaAnd [x] = x
renderListCommaAnd [x,x'] = x ++ " and " ++ x'
renderListCommaAnd (x:xs) = x ++ ", " ++ renderListCommaAnd xs
-- | Render a list of things in the style @blah blah; this that; and the other@
renderListSemiAnd :: [String] -> String
renderListSemiAnd [] = ""
renderListSemiAnd [x] = x
renderListSemiAnd [x,x'] = x ++ "; and " ++ x'
renderListSemiAnd (x:xs) = x ++ "; " ++ renderListSemiAnd xs
-- | When rendering lists of things it often reads better to group related
-- things, e.g. grouping components by package name
--
-- > renderListSemiAnd
-- > [ "the package " ++ display pkgname ++ " components "
-- > ++ renderListCommaAnd showComponentName components
-- > | (pkgname, components) <- sortGroupOn packageName allcomponents ]
--
sortGroupOn :: Ord b => (a -> b) -> [a] -> [(b, [a])]
sortGroupOn key = map (\xs@(x:_) -> (key x, xs))
. groupBy ((==) `on` key)
. sortBy (compare `on` key)
----------------------------------------------------
-- Renderering for a few project and package types
--
renderTargetSelector :: TargetSelector PackageId -> String
renderTargetSelector (TargetPackage _ pkgid Nothing) =
"the package " ++ display pkgid
renderTargetSelector (TargetPackage _ pkgid (Just kfilter)) =
"the " ++ renderComponentKind Plural kfilter
++ " in the package " ++ display pkgid
renderTargetSelector (TargetAllPackages Nothing) =
"all the packages in the project"
renderTargetSelector (TargetAllPackages (Just kfilter)) =
"all the " ++ renderComponentKind Plural kfilter
++ " in the project"
renderTargetSelector (TargetComponent pkgid CLibName WholeComponent) =
"the library in the package " ++ display pkgid
renderTargetSelector (TargetComponent _pkgid cname WholeComponent) =
"the " ++ showComponentName cname
renderTargetSelector (TargetComponent _pkgid cname (FileTarget filename)) =
"the file " ++ filename ++ " in the " ++ showComponentName cname
renderTargetSelector (TargetComponent _pkgid cname (ModuleTarget modname)) =
"the module " ++ display modname ++ " in the " ++ showComponentName cname
renderOptionalStanza :: Plural -> OptionalStanza -> String
renderOptionalStanza Singular TestStanzas = "test suite"
renderOptionalStanza Plural TestStanzas = "test suites"
renderOptionalStanza Singular BenchStanzas = "benchmark"
renderOptionalStanza Plural BenchStanzas = "benchmarks"
-- | The optional stanza type (test suite or benchmark), if it is one.
optionalStanza :: ComponentName -> Maybe OptionalStanza
optionalStanza (CTestName _) = Just TestStanzas
optionalStanza (CBenchName _) = Just BenchStanzas
optionalStanza _ = Nothing
-- | Does the 'TargetSelector' potentially refer to one package or many?
--
targetSelectorPluralPkgs :: TargetSelector a -> Plural
targetSelectorPluralPkgs (TargetAllPackages _) = Plural
targetSelectorPluralPkgs (TargetPackage _ _ _) = Singular
targetSelectorPluralPkgs (TargetComponent _ _ _) = Singular
-- | Does the 'TargetSelector' refer to
targetSelectorRefersToPkgs :: TargetSelector a -> Bool
targetSelectorRefersToPkgs (TargetAllPackages mkfilter) = isNothing mkfilter
targetSelectorRefersToPkgs (TargetPackage _ _ mkfilter) = isNothing mkfilter
targetSelectorRefersToPkgs (TargetComponent _ _ _) = False
renderComponentKind :: Plural -> ComponentKind -> String
renderComponentKind Singular ckind = case ckind of
LibKind -> "library" -- internal/sub libs?
FLibKind -> "foreign library"
ExeKind -> "executable"
TestKind -> "test suite"
BenchKind -> "benchmark"
renderComponentKind Plural ckind = case ckind of
LibKind -> "libraries" -- internal/sub libs?
FLibKind -> "foreign libraries"
ExeKind -> "executables"
TestKind -> "test suites"
BenchKind -> "benchmarks"
-------------------------------------------------------
-- Renderering error messages for TargetProblemCommon
--
renderTargetProblemCommon :: String -> TargetProblemCommon -> String
renderTargetProblemCommon verb (TargetNotInProject pkgname) =
"Cannot " ++ verb ++ " the package " ++ display pkgname ++ ", it is not "
++ "in this project (either directly or indirectly). If you want to add it "
++ "to the project then edit the cabal.project file."
renderTargetProblemCommon verb (TargetComponentNotProjectLocal pkgid cname _) =
"Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because the "
++ "package " ++ display pkgid ++ " is not local to the project, and cabal "
++ "does not currently support building test suites or benchmarks of "
++ "non-local dependencies. To run test suites or benchmarks from "
++ "dependencies you can unpack the package locally and adjust the "
++ "cabal.project file to include that package directory."
renderTargetProblemCommon verb (TargetComponentNotBuildable pkgid cname _) =
"Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because it is "
++ "marked as 'buildable: False' within the '" ++ display (packageName pkgid)
++ ".cabal' file (at least for the current configuration). If you believe it "
++ "should be buildable then check the .cabal file to see if the buildable "
++ "property is conditional on flags. Alternatively you may simply have to "
++ "edit the .cabal file to declare it as buildable and fix any resulting "
++ "build problems."
renderTargetProblemCommon verb (TargetOptionalStanzaDisabledByUser _ cname _) =
"Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because "
++ "building " ++ compkinds ++ " has been explicitly disabled in the "
++ "configuration. You can adjust this configuration in the "
++ "cabal.project{.local} file either for all packages in the project or on "
++ "a per-package basis. Note that if you do not explicitly disable "
++ compkinds ++ " then the solver will merely try to make a plan with "
++ "them available, so you may wish to explicitly enable them which will "
++ "require the solver to find a plan with them available or to fail with an "
++ "explanation."
where
compkinds = renderComponentKind Plural (componentKind cname)
renderTargetProblemCommon verb (TargetOptionalStanzaDisabledBySolver pkgid cname _) =
"Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because the "
++ "solver did not find a plan that included the " ++ compkinds
++ " for " ++ display pkgid ++ ". It is probably worth trying again with "
++ compkinds ++ "explicitly enabled in the configuration in the "
++ "cabal.project{.local} file. This will ask the solver to find a plan with "
++ "the " ++ compkinds ++ " available. It will either fail with an "
++ "explanation or find a different plan that uses different versions of some "
++ "other packages. Use the '--dry-run' flag to see package versions and "
++ "check that you are happy with the choices."
where
compkinds = renderComponentKind Plural (componentKind cname)
renderTargetProblemCommon verb (TargetProblemNoSuchPackage pkgid) =
"Internal error when trying to " ++ verb ++ " the package "
++ display pkgid ++ ". The package is not in the set of available targets "
++ "for the project plan, which would suggest an inconsistency "
++ "between readTargetSelectors and resolveTargets."
renderTargetProblemCommon verb (TargetProblemNoSuchComponent pkgid cname) =
"Internal error when trying to " ++ verb ++ " the "
++ showComponentName cname ++ " from the package " ++ display pkgid
++ ". The package,component pair is not in the set of available targets "
++ "for the project plan, which would suggest an inconsistency "
++ "between readTargetSelectors and resolveTargets."
------------------------------------------------------------
-- Renderering error messages for TargetProblemNoneEnabled
--
-- | Several commands have a @TargetProblemNoneEnabled@ problem constructor.
-- This renders an error message for those cases.
--
renderTargetProblemNoneEnabled :: String
-> TargetSelector PackageId
-> [AvailableTarget ()]
-> String
renderTargetProblemNoneEnabled verb targetSelector targets =
"Cannot " ++ verb ++ " " ++ renderTargetSelector targetSelector
++ " because none of the components are available to build: "
++ renderListSemiAnd
[ case (status, mstanza) of
(TargetDisabledByUser, Just stanza) ->
renderListCommaAnd
[ "the " ++ showComponentName availableTargetComponentName
| AvailableTarget {availableTargetComponentName} <- targets' ]
++ plural (listPlural targets') " is " " are "
++ " not available because building "
++ renderOptionalStanza Plural stanza
++ " has been disabled in the configuration"
(TargetDisabledBySolver, Just stanza) ->
renderListCommaAnd
[ "the " ++ showComponentName availableTargetComponentName
| AvailableTarget {availableTargetComponentName} <- targets' ]
++ plural (listPlural targets') " is " " are "
++ "not available because the solver did not find a plan that "
++ "included the " ++ renderOptionalStanza Plural stanza
(TargetNotBuildable, _) ->
renderListCommaAnd
[ "the " ++ showComponentName availableTargetComponentName
| AvailableTarget {availableTargetComponentName} <- targets' ]
++ plural (listPlural targets') " is " " are all "
++ "marked as 'buildable: False'"
(TargetNotLocal, _) ->
renderListCommaAnd
[ "the " ++ showComponentName availableTargetComponentName
| AvailableTarget {availableTargetComponentName} <- targets' ]
++ " cannot be built because cabal does not currently support "
++ "building test suites or benchmarks of non-local dependencies"
(TargetBuildable () TargetNotRequestedByDefault, Just stanza) ->
renderListCommaAnd
[ "the " ++ showComponentName availableTargetComponentName
| AvailableTarget {availableTargetComponentName} <- targets' ]
++ " will not be built because " ++ renderOptionalStanza Plural stanza
++ " are not built by default in the current configuration (but you "
++ "can still build them specifically)" --TODO: say how
_ -> error $ "renderBuildTargetProblem: unexpected status "
++ show (status, mstanza)
| ((status, mstanza), targets') <- sortGroupOn groupingKey targets
]
where
groupingKey t =
( availableTargetStatus t
, case availableTargetStatus t of
TargetNotBuildable -> Nothing
TargetNotLocal -> Nothing
_ -> optionalStanza (availableTargetComponentName t)
)
------------------------------------------------------------
-- Renderering error messages for TargetProblemNoneEnabled
--
-- | Several commands have a @TargetProblemNoTargets@ problem constructor.
-- This renders an error message for those cases.
--
renderTargetProblemNoTargets :: String -> TargetSelector PackageId -> String
renderTargetProblemNoTargets verb targetSelector =
"Cannot " ++ verb ++ " " ++ renderTargetSelector targetSelector
++ " because " ++ reason targetSelector ++ ". "
++ "Check the .cabal "
++ plural (targetSelectorPluralPkgs targetSelector)
"file for the package and make sure that it properly declares "
"files for the packages and make sure that they properly declare "
++ "the components that you expect."
where
reason (TargetPackage _ _ Nothing) =
"it does not contain any components at all"
reason (TargetPackage _ _ (Just kfilter)) =
"it does not contain any " ++ renderComponentKind Plural kfilter
reason (TargetAllPackages Nothing) =
"none of them contain any components at all"
reason (TargetAllPackages (Just kfilter)) =
"none of the packages contain any "
++ renderComponentKind Plural kfilter
reason ts@TargetComponent{} =
error $ "renderTargetProblemNoTargets: " ++ show ts
-----------------------------------------------------------
-- Renderering error messages for CannotPruneDependencies
--
renderCannotPruneDependencies :: CannotPruneDependencies -> String
renderCannotPruneDependencies (CannotPruneDependencies brokenPackages) =
"Cannot select only the dependencies (as requested by the "
++ "'--only-dependencies' flag), "
++ (case pkgids of
[pkgid] -> "the package " ++ display pkgid ++ " is "
_ -> "the packages "
++ renderListCommaAnd (map display pkgids) ++ " are ")
++ "required by a dependency of one of the other targets."
where
-- throw away the details and just list the deps that are needed
pkgids :: [PackageId]
pkgids = nub . map packageId . concatMap snd $ brokenPackages
{-
++ "Syntax:\n"
++ " - build [package]\n"
++ " - build [package:]component\n"
++ " - build [package:][component:]module\n"
++ " - build [package:][component:]file\n"
++ " where\n"
++ " package is a package name, package dir or .cabal file\n\n"
++ "Examples:\n"
++ " - build foo -- package name\n"
++ " - build tests -- component name\n"
++ " (name of library, executable, test-suite or benchmark)\n"
++ " - build Data.Foo -- module name\n"
++ " - build Data/Foo.hsc -- file name\n\n"
++ "An ambigious target can be qualified by package, component\n"
++ "and/or component kind (lib|exe|test|bench|flib)\n"
++ " - build foo:tests -- component qualified by package\n"
++ " - build tests:Data.Foo -- module qualified by component\n"
++ " - build lib:foo -- component qualified by kind"
-}
|
themoritz/cabal
|
cabal-install/Distribution/Client/CmdErrorMessages.hs
|
bsd-3-clause
| 15,419 | 0 | 22 | 3,365 | 2,492 | 1,297 | 1,195 | 224 | 9 |
--------------------------------------------------------------------
-- |
-- Module : MediaWiki.API.Action.ExpandTemplates.Import
-- Description : Serializing ExpandTemplates requests.
-- Copyright : (c) Sigbjorn Finne, 2008
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <[email protected]>
-- Stability : provisional
-- Portability: portable
--
-- Serializing ExpandTemplates requests.
--
--------------------------------------------------------------------
module MediaWiki.API.Action.ExpandTemplates.Import where
--import MediaWiki.API.Types
import MediaWiki.API.Utils
import MediaWiki.API.Action.ExpandTemplates
import Text.XML.Light.Types
import Text.XML.Light.Proc ( strContent )
import Control.Monad
stringXml :: String -> Either (String,[{-Error msg-}String]) ExpandTemplatesResponse
stringXml s = parseDoc xml s
xml :: Element -> Maybe ExpandTemplatesResponse
xml e = do
guard (elName e == nsName "api")
let es1 = children e
p <- pNode "expandtemplates" es1
let xm = fmap strContent $ pNode "parsetree" es1
return emptyExpandTemplatesResponse
{ etExpandedText = strContent p
, etExpandedXml = xm
}
|
neobrain/neobot
|
mediawiki/MediaWiki/API/Action/ExpandTemplates/Import.hs
|
bsd-3-clause
| 1,157 | 0 | 11 | 171 | 205 | 116 | 89 | 17 | 1 |
module ListCompIn3 where
--A definition can be removed if it is not used by other declarations.
--Where a definition is removed, it's type signature should also be removed.
--In this Example: remove the defintion 'xs'. The bar will also be removed.
main = sum [ 4 | let xs=[1,3]]
|
kmate/HaRe
|
old/testing/removeDef/ListCompIn3.hs
|
bsd-3-clause
| 307 | 0 | 11 | 76 | 34 | 21 | 13 | 2 | 1 |
import Test.Cabal.Prelude
import Data.List
-- Test that setup computes different IPIDs when dependencies change
main = setupAndCabalTest $ do
withPackageDb $ do
withDirectory "P1" $ setup "configure" ["--disable-deterministic"]
withDirectory "P2" $ setup "configure" ["--disable-deterministic"]
withDirectory "P1" $ setup "build" []
withDirectory "P1" $ setup "build" [] -- rebuild should work
recordMode DoNotRecord $ do
r1 <- withDirectory "P1" $ setup' "register" ["--print-ipid", "--inplace"]
withDirectory "P2" $ setup "build" []
r2 <- withDirectory "P2" $ setup' "register" ["--print-ipid", "--inplace"]
let exIPID s = takeWhile (/= '\n') $
head . filter (isPrefixOf $ "UniqueIPID-0.1-") $ (tails s)
assertNotEqual "ipid match" (exIPID $ resultOutput r1) (exIPID $ resultOutput r2)
|
mydaum/cabal
|
cabal-testsuite/PackageTests/UniqueIPID/setup.test.hs
|
bsd-3-clause
| 913 | 0 | 21 | 231 | 254 | 119 | 135 | 15 | 1 |
-- | Parser takes input file and returns AST.
module Orchid.Parser
( parseInputFile
, parseInput
) where
import Control.Exception (throwIO)
import Data.Function ((&))
import Data.Maybe (catMaybes, fromMaybe, isJust)
import Data.Text (Text)
import qualified Data.Text.IO as TIO
import Text.Parsec ((<|>))
import qualified Text.Parsec as P
import qualified Text.Parsec.Expr as E
import Text.Parsec.Text (GenParser)
import Orchid.Error (ParserException (ParserException))
import Orchid.Lexer (LexerState, ampersandL, andL, arrowL,
assignL, boolL, classL, colonL, commaL,
dedentL, defL, deleteL, dotL, doubleStarL,
elseL, equalL, geL, gtL, ifL, indentL, leL,
lexerState, lparenL, ltL, minusL, nameL,
neL, newL, newlineL, notL, numberL, orL,
passL, percentL, plusL, privateL, publicL,
returnL, rparenL, semicolonL, slashL, starL,
virtualL, whileL)
import qualified Orchid.Token as Tok
import qualified Orchid.Types as OT
type ParserState = LexerState
type Parser = GenParser ParserState
parserState :: ParserState
parserState = lexerState
parseInputFile :: FilePath -> IO OT.Input
parseInputFile fp =
either (throwIO . ParserException) return =<<
parseInput fp <$> TIO.readFile fp
parseInput :: P.SourceName -> Text -> Either P.ParseError OT.Input
parseInput = P.runParser parser parserState
parser :: Parser OT.Input
parser =
OT.Input . catMaybes <$>
(P.many (P.try (Nothing <$ newlineL) <|> (Just <$> parseStmt)) <* P.eof)
parseName :: Parser OT.Identifier
parseName = Tok.getTokName <$> nameL
parseStmt :: Parser OT.Stmt
parseStmt =
P.try (OT.SSimple <$> parseSimpleStmt) <|>
OT.SCompound <$> parseCompoundStmt
parseTypeIdentifier :: Parser OT.TypeIdentifier
parseTypeIdentifier =
makeTypeIdentifier <$> parseName <*> (length <$> P.many starL)
where
makeTypeIdentifier name 0 = OT.TypeIdentifier name
makeTypeIdentifier name n =
OT.PointerTypeIdentifier $ makeTypeIdentifier name (n - 1)
parseSimpleStmt :: Parser OT.SimpleStmt
parseSimpleStmt =
OT.SimpleStmt <$>
do s0 <- parseSmallStmt
ss <- P.many (P.try $ semicolonL >> parseSmallStmt)
() <$ P.optional semicolonL
() <$ newlineL
return (s0:ss)
parseSmallStmt :: Parser OT.SmallStmt
parseSmallStmt =
P.choice
[ P.try $ OT.SSDecl <$> parseDeclStmt
, P.try $ OT.SSExpr <$> parseExprStmt
, OT.SSPass <$ passL
, OT.SSFlow <$> parseFlowStmt
, OT.SSNew <$> parseNewStmt
, OT.SSDelete <$> parseDeleteStmt]
parseDeclStmt :: Parser OT.DeclStmt
parseDeclStmt =
OT.DeclStmt <$> parseTypeIdentifier <*> parseName <*>
(assignL >> parseExpr)
parseExprStmt :: Parser OT.ExprStmt
parseExprStmt =
OT.ExprStmt <$> (P.optionMaybe . P.try $ parseExpr <* assignL) <*>
parseExpr
parseFlowStmt :: Parser OT.FlowStmt
parseFlowStmt = OT.FSReturn <$> parseReturnStmt
parseReturnStmt :: Parser OT.ReturnStmt
parseReturnStmt = do
() <$ returnL
OT.ReturnStmt <$> P.optionMaybe (P.try parseExpr)
parseNewStmt :: Parser OT.NewStmt
parseNewStmt = do
() <$ newL
OT.NewStmt <$> parseTypeIdentifier <*> parseName
parseDeleteStmt :: Parser OT.DeleteStmt
parseDeleteStmt = do
() <$ deleteL
OT.DeleteStmt <$> parseName
parseExpr :: Parser OT.Expr
parseExpr = E.buildExpressionParser table parseEAtom
where
binary lexer op = E.Infix (binaryParser lexer op)
binaryParser lexer op = OT.EBinary op <$ lexer
unary lexer op = E.Prefix (unaryParser lexer op)
unaryParser lexer op = OT.EUnary op <$ lexer
n = E.AssocNone
l = E.AssocLeft
r = E.AssocRight
table =
[ [binary doubleStarL OT.BinPower r]
, [unary starL OT.UnaryDeref, unary ampersandL OT.UnaryAddr]
, [unary plusL OT.UnaryPlus, unary minusL OT.UnaryMinus]
, [ binary starL OT.BinMult l
, binary slashL OT.BinDiv l
, binary percentL OT.BinMod l]
, [binary plusL OT.BinPlus l, binary minusL OT.BinMinus l]
, [ binary ltL OT.BinLT n
, binary gtL OT.BinGT n
, binary equalL OT.BinEQ n
, binary leL OT.BinLE n
, binary geL OT.BinGE n
, binary neL OT.BinNE n]
, [unary notL OT.UnaryNot]
, [binary andL OT.BinAnd l]
, [binary orL OT.BinOr l]]
parseEAtom = OT.EAtom <$> parseAtomExpr
type Trailer = OT.AtomExpr -> OT.AtomExpr
parseAtomExpr :: Parser OT.AtomExpr
parseAtomExpr = do
a <- OT.AEAtom <$> parseAtom
trailers <- P.many $ P.try parseTrailer
return $ foldl (&) a trailers
where
parseTrailer :: Parser Trailer
parseTrailer =
P.choice
[ flip OT.AECall <$> P.between lparenL rparenL parseOptionalArgList
, flip OT.AEAccess <$> (dotL >> parseName)]
parseOptionalArgList =
fromMaybe [] <$> (P.optionMaybe . P.try $ parseArgList)
parseArgList = do
a0 <- parseExpr
as <- P.many . P.try $ commaL >> parseExpr
a0 : as <$ P.optional commaL
parseAtom :: Parser OT.Atom
parseAtom =
P.choice
[ OT.AExpr <$> P.between lparenL rparenL parseExpr
, P.try $ OT.ABool . Tok.getTokBool <$> boolL
, OT.AIdentifier <$> parseName
, OT.ANumber . Tok.getTokNumber <$> numberL]
parseCompoundStmt :: Parser OT.CompoundStmt
parseCompoundStmt =
P.choice
[ OT.CSIf <$> parseIf
, OT.CSWhile <$> parseWhile
, OT.CSFunc <$> parseFuncDef
, OT.CSClass <$> parseClassDef]
parseIf :: Parser OT.IfStmt
parseIf =
OT.IfStmt <$> (ifL >> parseExpr) <*> (colonL >> parseSuite) <*>
(P.optionMaybe . P.try $ elseL >> colonL >> parseSuite)
parseWhile :: Parser OT.WhileStmt
parseWhile = OT.WhileStmt <$> (whileL >> parseExpr) <*> (colonL >> parseSuite)
parseFuncDef :: Parser OT.FuncDef
parseFuncDef =
OT.FuncDef <$> (defL >> parseName) <*>
(P.between lparenL rparenL parseOptionalTypedArgs) <*>
(P.optionMaybe . P.try $ arrowL >> parseTypeIdentifier) <*>
(colonL >> parseSuite)
where
parseOptionalTypedArgs =
fromMaybe [] <$> (P.optionMaybe . P.try $ parseTypedArgs)
parseTypedArgs = do
a <- parseTypedArgument
as <- P.many . P.try $ commaL >> parseTypedArgument
a : as <$ P.optional commaL
parseClassDef :: Parser OT.ClassDef
parseClassDef =
OT.ClassDef <$> (classL >> parseName) <*> parseOptionalParent <*>
(colonL >> parseClassSuite)
where
parseOptionalParent = P.optionMaybe $ P.between lparenL rparenL parseName
parseTypedArgument :: Parser OT.TypedArgument
parseTypedArgument =
OT.TypedArgument <$> parseName <*> (colonL >> parseTypeIdentifier)
parseSuite :: Parser OT.Suite
parseSuite =
P.choice
[ OT.Suite . replicate 1 . OT.SSimple <$> parseSimpleStmt
, OT.Suite <$> (newlineL >> indentL >> P.many1 parseStmt <* dedentL)]
parseClassSuite :: Parser OT.ClassSuite
parseClassSuite =
OT.ClassSuite <$>
(newlineL >> indentL >> P.many1 parseClassStmt <* dedentL)
parseClassStmt :: Parser OT.ClassStmt
parseClassStmt = OT.ClassStmt <$> parseAccessModifier <*> parsePayload
where
parsePayload =
P.choice
[ P.try $
Left <$>
((,) <$> (isJust <$> P.optionMaybe virtualL) <*> parseFuncDef)
, Right <$> (parseDeclStmt <* newlineL)]
parseAccessModifier :: Parser OT.AccessModifier
parseAccessModifier =
P.choice [P.try $ OT.AMPrivate <$ privateL, OT.AMPublic <$ publicL]
|
gromakovsky/Orchid
|
src/Orchid/Parser.hs
|
mit
| 7,869 | 0 | 16 | 2,084 | 2,342 | 1,243 | 1,099 | 193 | 2 |
module OidTable where
import Data.Word
import qualified Data.ByteString.Lazy as LB
import Graphics.PDF
import Control.Monad hiding (forM_)
import Data.Foldable (forM_)
import Data.List.Split
import Text.Printf
import Control.Arrow ((***))
import Codec.Compression.Zlib
import OidCode
import KnownCodes
import Utils
import Types
-- IO technically unnecessary: https://github.com/alpheccar/HPDF/issues/7
oidTable :: Conf -> String -> [(String, Word16)] -> LB.ByteString
oidTable conf title entries | entriesPerPage < 1 = error "OID codes too large to fit on a single page"
| otherwise = pdfByteString docInfo a4rect $ do
-- Replace codes by images
entries' <- forM entries $ \(d,rc) ->
case code2RawCode rc of
Nothing -> return (d, Nothing)
Just c -> do
image <- createPDFRawImageFromByteString imageWidthPx imageHeightPx False FlateDecode $
compressWith defaultCompressParams { compressLevel = defaultCompression } $
genRawPixels imageWidthPx imageHeightPx (cDPI conf) (cPixelSize conf) $
c
return (d, Just image)
let chunks = chunksOf entriesPerPage entries'
let totalPages = length chunks
forM_ (zip [1::Int ..] chunks) $ \(pageNum, thisPage) -> do
page <- addPage Nothing
drawWithPage page $ do
displayFormattedText titleRect NormalParagraph titleFont $ do
setJustification Centered
paragraph $ txt title
displayFormattedText footerRect NormalParagraph footerFont $ do
setJustification LeftJustification
paragraph $ txt $ "Created by tttool-" ++ tttoolVersion
displayFormattedText footerRect NormalParagraph footerFont $ do
setJustification RightJustification
paragraph $ txt $ printf "%d/%d" pageNum totalPages
forM_ (zip thisPage positions) $ \((e,mbi),p) -> do
withNewContext $ do
applyMatrix $ translate p
forM_ mbi $ \i -> withNewContext $ do
applyMatrix $ translate (0 :+ (-imageHeight))
applyMatrix $ scale (1/px) (1/px)
drawXObject i
withNewContext $ do
applyMatrix $ translate (0 :+ (-imageHeight - subtitleSep))
let fontRect = Rectangle (0 :+ (-subtitleHeight)) (imageWidth :+ 0)
addShape fontRect
setAsClipPath
displayFormattedText fontRect NormalParagraph bodyFont $ do
paragraph $ txt e
where
docInfo = standardDocInfo
{ author=toPDFString $ "tttool-" ++ tttoolVersion
, compressed = False
}
-- Configure-dependent dimensions (all in pt)
(imageWidth,imageHeight) = (*mm) *** (*mm) $ fromIntegral *** fromIntegral $cCodeDim conf
-- Static dimensions (all in pt)
-- Page paddings
padTop, padLeft, padBot, padRight :: Double
padTop = 1*cm
padBot = 1*cm
padLeft = 2*cm
padRight = 2*cm
titleHeight = 1*cm
titleSep = 0.5*cm
footerHeight = 0.5*cm
footerSep = 0.5*cm
imageSepH = 0.4*cm
imageSepV = 0.2*cm
subtitleHeight = 0.4*cm
subtitleSep = 0.2*cm
-- Derived dimensions (all in pt)
titleRect = Rectangle
(padLeft :+ (a4h - padTop - titleHeight))
((a4w - padRight) :+ (a4h - padTop))
titleFont = Font (PDFFont Helvetica 12) black black
footerRect = Rectangle
(padLeft :+ padBot)
((a4w - padRight) :+ (padBot + footerHeight))
footerFont = Font (PDFFont Helvetica 8) black black
bodyFont = Font (PDFFont Helvetica 8) black black
bodyWidth = a4w - padLeft - padRight
bodyHeight = a4h - padTop - titleHeight - titleSep - footerSep - footerHeight - padBot
positions = map (+(padLeft :+ (padBot + footerHeight + footerSep))) $
calcPositions bodyWidth bodyHeight
imageWidth (imageHeight + subtitleSep + subtitleHeight)
imageSepH imageSepV
entriesPerPage = length positions
-- Derived dimensions (all in pixels)
imageWidthPx = floor (imageWidth * px)
imageHeightPx = floor (imageHeight * px)
-- config-dependent conversion factors
px :: Double
px = fromIntegral (cDPI conf) / 72
calcPositions
:: Double -- ^ total width
-> Double -- ^ total height
-> Double -- ^ entry width
-> Double -- ^ entry height
-> Double -- ^ pad width
-> Double -- ^ pad height
-> [Point]
calcPositions tw th ew eh pw ph = [ x :+ (th - y) | y <- ys , x <- xs]
where
xs = [0,ew+pw..tw-ew]
ys = [0,eh+ph..th-eh]
-- Conversation factor
cm :: Double
cm = 28.3465
mm :: Double
mm = 2.83465
-- A4 dimensions
a4w, a4h :: Double
a4w = 595
a4h = 842
a4rect :: PDFRect
a4rect = PDFRect 0 0 595 842
|
colinba/tip-toi-reveng
|
src/OidTable.hs
|
mit
| 5,048 | 0 | 33 | 1,590 | 1,386 | 730 | 656 | 111 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE RecordWildCards #-}
module Hpack (
-- | /__NOTE:__/ This module is exposed to allow integration of Hpack into
-- other tools. It is not meant for general use by end users. The following
-- caveats apply:
--
-- * The API is undocumented, consult the source instead.
--
-- * The exposed types and functions primarily serve Hpack's own needs, not
-- that of a public API. Breaking changes can happen as Hpack evolves.
--
-- As an Hpack user you either want to use the @hpack@ executable or a build
-- tool that supports Hpack (e.g. @stack@ or @cabal2nix@).
-- * Version
version
-- * Running Hpack
, hpack
, hpackResult
, printResult
, Result(..)
, Status(..)
-- * Options
, defaultOptions
, setProgramName
, setTarget
, setDecode
, getOptions
, Verbose(..)
, Options(..)
, Force(..)
, GenerateHashStrategy(..)
#ifdef TEST
, hpackResultWithVersion
, header
, renderCabalFile
#endif
) where
import Imports
import Data.Version (Version)
import qualified Data.Version as Version
import System.FilePath
import System.Environment
import System.Exit
import System.IO (stderr)
import Data.Aeson (Value)
import Data.Maybe
import Paths_hpack (version)
import Hpack.Options
import Hpack.Config
import Hpack.Render
import Hpack.Util
import Hpack.Utf8 as Utf8
import Hpack.CabalFile
programVersion :: Maybe Version -> String
programVersion Nothing = "hpack"
programVersion (Just v) = "hpack version " ++ Version.showVersion v
header :: FilePath -> Maybe Version -> (Maybe Hash) -> [String]
header p v hash = [
"-- This file has been generated from " ++ takeFileName p ++ " by " ++ programVersion v ++ "."
, "--"
, "-- see: https://github.com/sol/hpack"
] ++ case hash of
Just h -> ["--" , "-- hash: " ++ h, ""]
Nothing -> [""]
data Options = Options {
optionsDecodeOptions :: DecodeOptions
, optionsForce :: Force
, optionsGenerateHashStrategy :: GenerateHashStrategy
, optionsToStdout :: Bool
}
data GenerateHashStrategy = ForceHash | ForceNoHash | PreferHash | PreferNoHash
deriving (Eq, Show)
getOptions :: FilePath -> [String] -> IO (Maybe (Verbose, Options))
getOptions defaultPackageConfig args = do
result <- parseOptions defaultPackageConfig args
case result of
PrintVersion -> do
putStrLn (programVersion $ Just version)
return Nothing
PrintNumericVersion -> do
putStrLn (Version.showVersion version)
return Nothing
Help -> do
printHelp
return Nothing
Run (ParseOptions verbose force hash toStdout file) -> do
let generateHash = case hash of
Just True -> ForceHash
Just False -> ForceNoHash
Nothing -> PreferNoHash
return $ Just (verbose, Options defaultDecodeOptions {decodeOptionsTarget = file} force generateHash toStdout)
ParseError -> do
printHelp
exitFailure
printHelp :: IO ()
printHelp = do
name <- getProgName
Utf8.hPutStrLn stderr $ unlines [
"Usage: " ++ name ++ " [ --silent ] [ --force | -f ] [ --[no-]hash ] [ PATH ] [ - ]"
, " " ++ name ++ " --version"
, " " ++ name ++ " --numeric-version"
, " " ++ name ++ " --help"
]
hpack :: Verbose -> Options -> IO ()
hpack verbose options = hpackResult options >>= printResult verbose
defaultOptions :: Options
defaultOptions = Options defaultDecodeOptions NoForce PreferNoHash False
setTarget :: FilePath -> Options -> Options
setTarget target options@Options{..} =
options {optionsDecodeOptions = optionsDecodeOptions {decodeOptionsTarget = target}}
setProgramName :: ProgramName -> Options -> Options
setProgramName name options@Options{..} =
options {optionsDecodeOptions = optionsDecodeOptions {decodeOptionsProgramName = name}}
setDecode :: (FilePath -> IO (Either String ([String], Value))) -> Options -> Options
setDecode decode options@Options{..} =
options {optionsDecodeOptions = optionsDecodeOptions {decodeOptionsDecode = decode}}
data Result = Result {
resultWarnings :: [String]
, resultCabalFile :: String
, resultStatus :: Status
} deriving (Eq, Show)
data Status =
Generated
| ExistingCabalFileWasModifiedManually
| AlreadyGeneratedByNewerHpack
| OutputUnchanged
deriving (Eq, Show)
printResult :: Verbose -> Result -> IO ()
printResult verbose r = do
printWarnings (resultWarnings r)
when (verbose == Verbose) $ putStrLn $
case resultStatus r of
Generated -> "generated " ++ resultCabalFile r
OutputUnchanged -> resultCabalFile r ++ " is up-to-date"
AlreadyGeneratedByNewerHpack -> resultCabalFile r ++ " was generated with a newer version of hpack, please upgrade and try again."
ExistingCabalFileWasModifiedManually -> resultCabalFile r ++ " was modified manually, please use --force to overwrite."
case resultStatus r of
Generated -> return ()
OutputUnchanged -> return ()
AlreadyGeneratedByNewerHpack -> exitFailure
ExistingCabalFileWasModifiedManually -> exitFailure
printWarnings :: [String] -> IO ()
printWarnings = mapM_ $ Utf8.hPutStrLn stderr . ("WARNING: " ++)
mkStatus :: CabalFile -> CabalFile -> Status
mkStatus new@(CabalFile _ mNewVersion mNewHash _) existing@(CabalFile _ mExistingVersion _ _)
| new `hasSameContent` existing = OutputUnchanged
| otherwise = case mExistingVersion of
Nothing -> ExistingCabalFileWasModifiedManually
Just _
| mNewVersion < mExistingVersion -> AlreadyGeneratedByNewerHpack
| isJust mNewHash && hashMismatch existing -> ExistingCabalFileWasModifiedManually
| otherwise -> Generated
hasSameContent :: CabalFile -> CabalFile -> Bool
hasSameContent (CabalFile cabalVersionA _ _ a) (CabalFile cabalVersionB _ _ b) = cabalVersionA == cabalVersionB && a == b
hashMismatch :: CabalFile -> Bool
hashMismatch cabalFile = case cabalFileHash cabalFile of
Nothing -> False
Just hash -> hash /= calculateHash cabalFile
calculateHash :: CabalFile -> Hash
calculateHash (CabalFile cabalVersion _ _ body) = sha256 (unlines $ cabalVersion ++ body)
hpackResult :: Options -> IO Result
hpackResult = hpackResultWithVersion version
hpackResultWithVersion :: Version -> Options -> IO Result
hpackResultWithVersion v (Options options force generateHashStrategy toStdout) = do
DecodeResult pkg (lines -> cabalVersion) cabalFileName warnings <- readPackageConfig options >>= either die return
mExistingCabalFile <- readCabalFile cabalFileName
let
newCabalFile = makeCabalFile generateHashStrategy mExistingCabalFile cabalVersion v pkg
status = case force of
Force -> Generated
NoForce -> maybe Generated (mkStatus newCabalFile) mExistingCabalFile
case status of
Generated -> writeCabalFile options toStdout cabalFileName newCabalFile
_ -> return ()
return Result {
resultWarnings = warnings
, resultCabalFile = cabalFileName
, resultStatus = status
}
writeCabalFile :: DecodeOptions -> Bool -> FilePath -> CabalFile -> IO ()
writeCabalFile options toStdout name cabalFile = do
write . unlines $ renderCabalFile (decodeOptionsTarget options) cabalFile
where
write = if toStdout then Utf8.putStr else Utf8.writeFile name
makeCabalFile :: GenerateHashStrategy -> Maybe CabalFile -> [String] -> Version -> Package -> CabalFile
makeCabalFile strategy mExistingCabalFile cabalVersion v pkg = cabalFile
where
cabalFile = CabalFile cabalVersion (Just v) hash body
hash
| shouldGenerateHash mExistingCabalFile strategy = Just $ calculateHash cabalFile
| otherwise = Nothing
body = lines $ renderPackage (maybe [] cabalFileContents mExistingCabalFile) pkg
shouldGenerateHash :: Maybe CabalFile -> GenerateHashStrategy -> Bool
shouldGenerateHash mExistingCabalFile strategy = case (strategy, mExistingCabalFile) of
(ForceHash, _) -> True
(ForceNoHash, _) -> False
(PreferHash, Nothing) -> True
(PreferNoHash, Nothing) -> False
(_, Just CabalFile {cabalFileHash = Nothing}) -> False
(_, Just CabalFile {cabalFileHash = Just _}) -> True
renderCabalFile :: FilePath -> CabalFile -> [String]
renderCabalFile file (CabalFile cabalVersion hpackVersion hash body) = cabalVersion ++ header file hpackVersion hash ++ body
|
sol/hpack
|
src/Hpack.hs
|
mit
| 8,349 | 0 | 18 | 1,674 | 2,126 | 1,115 | 1,011 | 179 | 7 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
module Control.Effect.Trans where
import Control.Effect
import Control.Effect.Trans.TF
class Trans t where
lift :: (Effect (t m), Effect m) => m p a -> t m '(Fst (Unit (t m)),p) a
|
josefs/effect-monad-trans
|
Control/Effect/Trans.hs
|
mit
| 238 | 0 | 16 | 42 | 95 | 52 | 43 | 7 | 0 |
{-# LANGUAGE LambdaCase #-}
module Oden.Infer.Environment (
TypeBinding(..),
TypingEnvironment,
fromPackage,
fromPackages
) where
import Oden.Core.Definition
import Oden.Core.Expr
import Oden.Core.Package
import Oden.Core.ProtocolImplementation
import Oden.Core.Typed
import Oden.Environment hiding (map)
import Oden.Identifier
import Oden.Metadata
import Oden.QualifiedName (QualifiedName (..))
import Oden.SourceInfo
import Oden.Type.Polymorphic
data TypeBinding = PackageBinding (Metadata SourceInfo) Identifier TypingEnvironment
| Local (Metadata SourceInfo) Identifier Scheme
| Type (Metadata SourceInfo) QualifiedName [NameBinding] Type
| QuantifiedType (Metadata SourceInfo) Identifier Type
| ProtocolBinding (Metadata SourceInfo) Identifier Protocol
deriving (Show, Eq)
type TypingEnvironment = Environment TypeBinding (ProtocolImplementation TypedExpr)
fromPackage :: TypedPackage
-> TypingEnvironment
fromPackage (TypedPackage _ _ defs) =
foldl1 merge (map convert defs)
where
convert =
\case
Definition si (FQN _ n) (sc, _) ->
singleton n (Local si n sc)
ForeignDefinition si (FQN _ n) sc ->
singleton n (Local si n sc)
TypeDefinition si qualified@(FQN _ n) bs t ->
singleton n (Type si qualified bs t)
ProtocolDefinition si (FQN _ name) protocol ->
singleton name (ProtocolBinding si name protocol)
Implementation _ impl ->
singletonImplementation impl
fromPackages :: [ImportedPackage TypedPackage] -> TypingEnvironment
fromPackages =
foldl iter empty
where
iter env (ImportedPackage importRef pkgIdentifier pkg) =
env `extend` ( pkgIdentifier
, PackageBinding
(Metadata $ getSourceInfo importRef)
pkgIdentifier
(fromPackage pkg))
|
oden-lang/oden
|
src/Oden/Infer/Environment.hs
|
mit
| 2,098 | 0 | 13 | 649 | 516 | 277 | 239 | 49 | 5 |
-- IOMonad.hs
module IO where
-- import Data.Map as M
-- import System.IO as S
-- flatMap = [1,7,11] (\x -> [x,x+1])
addM000 mx my =
mx >>= (\x -> my >>= (\y -> return (x + y)))
addM001 mx my = do
x <- mx
y <- my
return (x+y)
people = ["Alice","Bob","Evie"]
items = ["car","puppy"]
missing = do
person <- people
item <- items
return (person ++ " lost a " ++ item)
|
HaskellForCats/HaskellForCats
|
iOMonad.hs
|
mit
| 390 | 4 | 13 | 101 | 154 | 83 | 71 | 13 | 1 |
import XMonad
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.ManageDocks
import XMonad.Util.Run(spawnPipe)
import XMonad.Util.EZConfig(additionalKeys)
import XMonad.Hooks.SetWMName
import System.IO
main = xmonad =<< xmobar wmConfig
wmConfig = def { modMask = mod4Mask
,focusFollowsMouse = False }
|
epickrram/dotfiles
|
xmonad.hs
|
mit
| 320 | 0 | 6 | 50 | 80 | 49 | 31 | 10 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.BatteryManager
(js_getCharging, getCharging, js_getChargingTime, getChargingTime,
js_getDischargingTime, getDischargingTime, js_getLevel, getLevel,
chargingChange, chargingTimeChange, dischargingTimeChange,
levelChange, BatteryManager, castToBatteryManager,
gTypeBatteryManager)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "($1[\"charging\"] ? 1 : 0)"
js_getCharging :: JSRef BatteryManager -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/BatteryManager.charging Mozilla BatteryManager.charging documentation>
getCharging :: (MonadIO m) => BatteryManager -> m Bool
getCharging self = liftIO (js_getCharging (unBatteryManager self))
foreign import javascript unsafe "$1[\"chargingTime\"]"
js_getChargingTime :: JSRef BatteryManager -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/BatteryManager.chargingTime Mozilla BatteryManager.chargingTime documentation>
getChargingTime :: (MonadIO m) => BatteryManager -> m Double
getChargingTime self
= liftIO (js_getChargingTime (unBatteryManager self))
foreign import javascript unsafe "$1[\"dischargingTime\"]"
js_getDischargingTime :: JSRef BatteryManager -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/BatteryManager.dischargingTime Mozilla BatteryManager.dischargingTime documentation>
getDischargingTime :: (MonadIO m) => BatteryManager -> m Double
getDischargingTime self
= liftIO (js_getDischargingTime (unBatteryManager self))
foreign import javascript unsafe "$1[\"level\"]" js_getLevel ::
JSRef BatteryManager -> IO Double
-- | <https://developer.mozilla.org/en-US/docs/Web/API/BatteryManager.level Mozilla BatteryManager.level documentation>
getLevel :: (MonadIO m) => BatteryManager -> m Double
getLevel self = liftIO (js_getLevel (unBatteryManager self))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/BatteryManager.onchargingchange Mozilla BatteryManager.onchargingchange documentation>
chargingChange :: EventName BatteryManager Event
chargingChange = unsafeEventName (toJSString "chargingchange")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/BatteryManager.onchargingtimechange Mozilla BatteryManager.onchargingtimechange documentation>
chargingTimeChange :: EventName BatteryManager Event
chargingTimeChange
= unsafeEventName (toJSString "chargingtimechange")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/BatteryManager.ondischargingtimechange Mozilla BatteryManager.ondischargingtimechange documentation>
dischargingTimeChange :: EventName BatteryManager Event
dischargingTimeChange
= unsafeEventName (toJSString "dischargingtimechange")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/BatteryManager.onlevelchange Mozilla BatteryManager.onlevelchange documentation>
levelChange :: EventName BatteryManager Event
levelChange = unsafeEventName (toJSString "levelchange")
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/BatteryManager.hs
|
mit
| 3,776 | 24 | 9 | 406 | 695 | 405 | 290 | 49 | 1 |
import Test.HUnit (Assertion, (@=?), runTestTT, Test(..))
import Control.Monad (void)
import SpaceAge (Planet(..), ageOn)
testCase :: String -> Assertion -> Test
testCase label assertion = TestLabel label (TestCase assertion)
main :: IO ()
main = void $ runTestTT $ TestList
[ TestList ageOnTests ]
roundsTo :: Float -> Float -> Assertion
roundsTo a b = approx a @=? approx b
where approx :: Float -> Int
approx n = round (n * 100)
ageOnTests :: [Test]
ageOnTests =
[ testCase "age in earth years" $
31.69 `roundsTo` ageOn Earth 1000000000
, testCase "age in mercury years" $ do
let seconds = 2134835688
67.65 `roundsTo` ageOn Earth seconds
280.88 `roundsTo` ageOn Mercury seconds
, testCase "age in venus years" $ do
let seconds = 189839836
6.02 `roundsTo` ageOn Earth seconds
9.78 `roundsTo` ageOn Venus seconds
, testCase "age on mars" $ do
let seconds = 2329871239
73.83 `roundsTo` ageOn Earth seconds
39.25 `roundsTo` ageOn Mars seconds
, testCase "age on jupiter" $ do
let seconds = 901876382
28.58 `roundsTo` ageOn Earth seconds
2.41 `roundsTo` ageOn Jupiter seconds
, testCase "age on saturn" $ do
let seconds = 3000000000
95.06 `roundsTo` ageOn Earth seconds
3.23 `roundsTo` ageOn Saturn seconds
, testCase "age on uranus" $ do
let seconds = 3210123456
101.72 `roundsTo` ageOn Earth seconds
1.21 `roundsTo` ageOn Uranus seconds
, testCase "age on neptune" $ do
let seconds = 8210123456
260.16 `roundsTo` ageOn Earth seconds
1.58 `roundsTo` ageOn Neptune seconds
]
|
tfausak/exercism-solutions
|
haskell/space-age/space-age_test.hs
|
mit
| 1,600 | 2 | 11 | 366 | 543 | 277 | 266 | 44 | 1 |
-- |
-- Module : System.Wlog.Terminal
-- Copyright : (c) Serokell, 2016
-- License : GPL-3 (see the file LICENSE)
-- Maintainer : Serokell <[email protected]>
-- Stability : experimental
-- Portability : POSIX, GHC
--
-- Logging functionality. This module is wrapper over
-- <http://hackage.haskell.org/package/hslogger hslogger>,
-- which allows to keep logger name in monadic context.
module System.Wlog.Terminal
( initTerminalLogging
) where
import Universum
import Data.Time (UTCTime)
import System.Wlog.Formatter (stdoutFormatter)
import System.Wlog.IOLogger (rootLoggerName, setHandlers, setLevel, updateGlobalLogger)
import System.Wlog.LogHandler (LogHandler (setFormatter))
import System.Wlog.LogHandler.Simple (streamHandler)
import System.Wlog.Severity (Severities, debugPlus, errorPlus, excludeError)
-- | This function initializes global logging system for terminal output.
-- At high level, it sets severity which will be used by all loggers by default,
-- sets default formatters and sets custom severity for given loggers (if any).
--
-- NOTE: you probably don't want to use this function.
-- Consider 'System.Wlog.Launcher.setupLogging'.
--
-- On a lower level it does the following:
-- 1. Removes default handler from root logger, sets two handlers such that:
-- 1.1. All messages are printed to /stdout/.
-- 1.2. Moreover messages with at least `Error` severity are
-- printed to /stderr/.
-- 2. Sets given Severity to root logger, so that it will be used by
-- descendant loggers by default.
-- 3. Applies `setSeverity` to given loggers. It can be done later using
-- `setSeverity` directly.
initTerminalLogging :: MonadIO m
=> (UTCTime -> Text)
-> (Handle -> Text -> IO ())
-> Bool -- ^ Show time?
-> Bool -- ^ Show ThreadId?
-> Maybe Severities
-> Maybe Severities
-> m ()
initTerminalLogging
timeF
customConsoleAction
isShowTime
isShowTid
maybeSevOut
maybeSevErr
= liftIO $ do
lock <- liftIO $ newMVar ()
let (severitiesOut, severitiesErr) =
case (maybeSevOut, maybeSevErr) of
(Nothing, Nothing) -> (excludeError debugPlus, errorPlus)
(Just out, Nothing) -> (out, mempty)
(Nothing, Just err) -> (mempty, err)
(Just out, Just err) -> (out, err)
stdoutHandler <- setStdoutFormatter <$>
streamHandler stdout customConsoleAction lock severitiesOut
stderrHandler <- setStderrFormatter <$>
streamHandler stderr customConsoleAction lock severitiesErr
updateGlobalLogger rootLoggerName $
setHandlers [stderrHandler, stdoutHandler]
updateGlobalLogger rootLoggerName $
setLevel $ severitiesOut <> severitiesErr
where
setStdoutFormatter = (`setFormatter` stdoutFormatter timeF isShowTime isShowTid)
setStderrFormatter = (`setFormatter` stdoutFormatter timeF True isShowTid)
|
serokell/log-warper
|
src/System/Wlog/Terminal.hs
|
mit
| 3,020 | 0 | 15 | 695 | 468 | 269 | 199 | 42 | 4 |
-- C->Haskell Compiler: Marshalling library
--
-- Copyright (c) [1999...2005] Manuel M T Chakravarty
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- 1. Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- 3. The name of the author may not be used to endorse or promote products
-- derived from this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
-- IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
-- OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
-- NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
-- TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-- PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-- LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-- NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-- SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
--- Description ---------------------------------------------------------------
--
-- Language: Haskell 98
--
-- This module provides the marshaling routines for Haskell files produced by
-- C->Haskell for binding to C library interfaces. It exports all of the
-- low-level FFI (language-independent plus the C-specific parts) together
-- with the C->HS-specific higher-level marshalling routines.
--
module Sound.Alsa.C2HS (
-- * Re-export the language-independent component of the FFI
module Foreign,
-- * Re-export the C language component of the FFI
module Foreign.C,
-- * Composite marshalling functions
withCStringLenIntConv, peekCStringLenIntConv, withIntConv, withFloatConv,
peekIntConv, peekFloatConv, withBool, peekBool, withEnum, peekEnum,
-- * Conditional results using 'Maybe'
nothingIf, nothingIfNull,
-- * Bit masks
combineBitMasks, containsBitMask, extractBitMasks,
-- * Conversion between C and Haskell types
cIntConv, cFloatConv, cToBool, cFromBool, cToEnum, cFromEnum
) where
import Foreign
hiding (Word)
-- Should also hide the Foreign.Marshal.Pool exports in
-- compilers that export them
import Foreign.C
import Control.Monad (liftM)
-- Composite marshalling functions
-- -------------------------------
-- Strings with explicit length
--
withCStringLenIntConv s f = withCStringLen s $ \(p, n) -> f (p, cIntConv n)
peekCStringLenIntConv (s, n) = peekCStringLen (s, cIntConv n)
-- Marshalling of numerals
--
withIntConv :: (Storable b, Integral a, Integral b)
=> a -> (Ptr b -> IO c) -> IO c
withIntConv = with . cIntConv
withFloatConv :: (Storable b, RealFloat a, RealFloat b)
=> a -> (Ptr b -> IO c) -> IO c
withFloatConv = with . cFloatConv
peekIntConv :: (Storable a, Integral a, Integral b)
=> Ptr a -> IO b
peekIntConv = liftM cIntConv . peek
peekFloatConv :: (Storable a, RealFloat a, RealFloat b)
=> Ptr a -> IO b
peekFloatConv = liftM cFloatConv . peek
-- Passing Booleans by reference
--
withBool :: (Integral a, Storable a) => Bool -> (Ptr a -> IO b) -> IO b
withBool = with . fromBool
peekBool :: (Integral a, Storable a) => Ptr a -> IO Bool
peekBool = liftM toBool . peek
-- Passing enums by reference
--
withEnum :: (Enum a, Integral b, Storable b) => a -> (Ptr b -> IO c) -> IO c
withEnum = with . cFromEnum
peekEnum :: (Enum a, Integral b, Storable b) => Ptr b -> IO a
peekEnum = liftM cToEnum . peek
-- Storing of 'Maybe' values
-- -------------------------
{-
instance Storable a => Storable (Maybe a) where
sizeOf _ = sizeOf (undefined :: Ptr ())
alignment _ = alignment (undefined :: Ptr ())
peek p = do
ptr <- peek (castPtr p)
if ptr == nullPtr
then return Nothing
else liftM Just $ peek ptr
poke p v = do
ptr <- case v of
Nothing -> return nullPtr
Just v' -> new v'
poke (castPtr p) ptr
-}
-- Conditional results using 'Maybe'
-- ---------------------------------
-- Wrap the result into a 'Maybe' type.
--
-- * the predicate determines when the result is considered to be non-existing,
-- ie, it is represented by `Nothing'
--
-- * the second argument allows to map a result wrapped into `Just' to some
-- other domain
--
nothingIf :: (a -> Bool) -> (a -> b) -> a -> Maybe b
nothingIf p f x = if p x then Nothing else Just $ f x
-- |Instance for special casing null pointers.
--
nothingIfNull :: (Ptr a -> b) -> Ptr a -> Maybe b
nothingIfNull = nothingIf (== nullPtr)
-- Support for bit masks
-- ---------------------
-- Given a list of enumeration values that represent bit masks, combine these
-- masks using bitwise disjunction.
--
combineBitMasks :: (Enum a, Bits b) => [a] -> b
combineBitMasks = foldl (.|.) 0 . map (fromIntegral . fromEnum)
-- Tests whether the given bit mask is contained in the given bit pattern
-- (i.e., all bits set in the mask are also set in the pattern).
--
containsBitMask :: (Bits a, Enum b) => a -> b -> Bool
bits `containsBitMask` bm = let bm' = fromIntegral . fromEnum $ bm
in
bm' .&. bits == bm'
-- |Given a bit pattern, yield all bit masks that it contains.
--
-- * This does *not* attempt to compute a minimal set of bit masks that when
-- combined yield the bit pattern, instead all contained bit masks are
-- produced.
--
extractBitMasks :: (Bits a, Enum b, Bounded b) => a -> [b]
extractBitMasks bits =
[bm | bm <- [minBound..maxBound], bits `containsBitMask` bm]
-- Conversion routines
-- -------------------
-- |Integral conversion
--
cIntConv :: (Integral a, Integral b) => a -> b
cIntConv = fromIntegral
-- |Floating conversion
--
cFloatConv :: (RealFloat a, RealFloat b) => a -> b
cFloatConv = realToFrac
-- As this conversion by default goes via `Rational', it can be very slow...
{-# RULES
"cFloatConv/Float->Float" forall (x::Float). cFloatConv x = x;
"cFloatConv/Double->Double" forall (x::Double). cFloatConv x = x
#-}
-- |Obtain C value from Haskell 'Bool'.
--
cFromBool :: Num a => Bool -> a
cFromBool = fromBool
-- |Obtain Haskell 'Bool' from C value.
--
cToBool :: Num a => a -> Bool
cToBool = toBool
-- |Convert a C enumeration to Haskell.
--
cToEnum :: (Integral i, Enum e) => i -> e
cToEnum = toEnum . cIntConv
-- |Convert a Haskell enumeration to C.
--
cFromEnum :: (Enum e, Integral i) => e -> i
cFromEnum = cIntConv . fromEnum
|
yav/alsa-haskell
|
Sound/Alsa/C2HS.hs
|
mit
| 6,941 | 10 | 10 | 1,436 | 1,155 | 663 | 492 | 61 | 2 |
module Robots3.Config
( Config -- abstract
, c_hull, cluster_of
, showing_hull, show_hull
, breit
, make, make_with_hull, geschichte
, with_targets
, move, remove, addZug
, look, robots, inhalt
, positions, goals
, valid
, bounds, area
)
where
import Robots3.Data
import Robots3.Exact
import Autolib.FiniteMap
import Autolib.Set
import Autolib.ToDoc
import Autolib.Hash
import Autolib.Reader
import Autolib.Size
import Autolib.Set
import Autolib.FiniteMap
import Autolib.Xml
import Autolib.Reporter
import Data.List (partition)
import Control.Monad ( guard )
import Data.Maybe ( isJust, maybeToList, fromMaybe )
import Data.Typeable
import Data.Int
data Config = Config { c_hash :: Int
, inhalt :: FiniteMap String Robot
, targets :: Set Position
, breit :: Int
, geschichte :: [ Zug ]
, c_hull :: Set Position
, c_clusters :: FiniteMap Position Int
, show_hull :: Bool
}
deriving ( Typeable )
with_targets c ts = c { targets = mkSet ts }
cluster_of k p = lookupFM ( c_clusters k ) p
make :: [ Robot ] -> [ Position ] -> Config
make rs ts =
let i = listToFM $ do
r <- rs
return ( name r, r )
in hulled $ Config
{ c_hash = hash i
, inhalt = i
, targets = mkSet ts
, breit = maximum $ do
r <- rs
return $ extension $ position r
, geschichte = []
, show_hull = False
}
make_with_hull :: [ Robot ] -> [ Position ] -> Config
make_with_hull rs ts = showing_hull $ make rs ts
-- | recompute hull (from scratch)
hulled k =
let ps = mkSet $ map position $ robots k
fm = listToFM $ do
( i, cl ) <- zip [ 0 .. ] $ clusters ps
p <- setToList cl
return ( p, i )
in k { c_hull = exact_hull_points ps
, c_clusters = fm
}
showing_hull k = k { show_hull = True }
bounds k =
let ps = do r <- robots k ; return $ position r
xs = map x ps ; ys = map y ps
in ( ( minimum xs, minimum ys )
, ( maximum xs, maximum ys )
)
area k =
let ((a,b),(c,d)) = bounds k
in (c-a +1) * (d-b+1)
-- | fort damit (into outer space)
remove :: String -> Config -> Config
remove n k =
let i = delFromFM (inhalt k) n
in hulled $ k { inhalt = i
, c_hash = hash i
}
-- | auf neue position
move :: (String, Position) -> Config -> Config
move (n, p) k =
let i = inhalt k
j = addToFM i n
$ let r = fromMaybe ( error "Robots3.Move.move" ) ( lookupFM i n )
in r { position = p }
in hulled $ k { inhalt = j
, c_hash = hash j
}
addZug :: Zug -> Config -> Config
addZug z k = k { geschichte = z : geschichte k }
instance ToDoc Config where
toDoc k = text "make" <+> toDoc ( robots k ) <+> toDoc ( goals k )
instance Reader Config where
atomic_readerPrec p = do
guard $ p < 9
my_reserved "make"
arg1 <- reader
arg2 <- reader
return $ make arg1 arg2
instance Container Config ([ Robot ],[Position]) where
label _ = "Config"
pack k = (robots k, goals k)
unpack (rs,ts) = make rs ts
instance Hashable Config where hashWithSalt s = c_hash
-- | nur Positionen vergleichen
essence :: Config -> ( Int, Set Position )
essence k =
let rs = robots k
in (hash k, mkSet $ map position rs)
instance Ord Config where
compare k l = compare (essence k) (essence l)
instance Eq Config where
(==) k l = (==) (essence k) (essence l)
instance Size Config where
size = fromIntegral . area
-----------------------------------------------------------------
look :: Config -> String -> Maybe Robot
look c n = lookupFM (inhalt c) n
robots :: Config -> [ Robot ]
robots c = eltsFM (inhalt c)
positions :: Config -> [ Position ]
positions = map position . robots
goals :: Config -> [ Position ]
goals k = setToList $ targets k
valid :: Config -> Reporter ()
valid k = do
let mappe = addListToFM_C (++) emptyFM $ do
r <- robots k
return ( position r, [ name r ] )
let mehrfach = do
( p, rs ) <- fmToList mappe
guard $ length rs > 1
return ( p , rs )
inform $ text "Stehen alle Roboter auf verschiedenen Positionen?"
if ( null mehrfach )
then inform $ text "Ja."
else reject $ text "Nein, diese nicht:" <+> toDoc mehrfach
assert ( not $ null $ goals k )
$ text "Ist wenigstens ein Ziel angegeben?"
|
marcellussiegburg/autotool
|
collection/src/Robots3/Config.hs
|
gpl-2.0
| 4,352 | 25 | 17 | 1,225 | 1,667 | 873 | 794 | -1 | -1 |
{-# LANGUAGE DoAndIfThenElse #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TupleSections #-}
module FineHeapUnsafe (TheHeap, FineHeapUnsafe, testIt, heapPopWithGap, heapUngap) where
import Control.Applicative
import Control.Concurrent.STM
import Control.Monad
import Data.List hiding (elem)
import Data.Ord
import Prelude hiding (elem)
import Test.QuickCheck (Args (..), Property, quickCheckWith, stdArgs)
import Test.QuickCheck.Monadic (monadicIO, run)
import qualified Test.QuickCheck.Monadic (assert)
import ConcurrentHeap
type TheHeap = FineHeapUnsafe
data FineHeapUnsafe e = Nil | FineHeapUnsafe {
heapSize :: TVar Int
, heapElem :: TVar (Maybe e)
, heapLeft :: FineHeapUnsafe e
, heapRight :: FineHeapUnsafe e
}
readNode :: FineHeapUnsafe e -> STM (Int, Maybe e)
readNode (FineHeapUnsafe size elem _ _) = liftM2 (,) (readTVar size) (readTVar elem)
readNode Nil = return (0, Nothing)
heapPutElem :: (Ord e) => FineHeapUnsafe e -> e -> STM (Maybe e)
heapPutElem root@(FineHeapUnsafe size elem _ _) new = do
(sz, el) <- readNode root
writeTVar size (sz + 1)
case (sz, el) of
(0, Nothing) -> do
-- This is an empty node, we can put our stuff here and finish.
writeTVar elem $ Just new
return Nothing
(_, Nothing) ->
-- This is a gap, we'll wait for someone to get rid of it.
retry
(_, Just el) -> do
-- We swap the element to be inserted if necessary.
if new < el
then writeTVar elem (Just new) >> return (Just el)
else return (Just new)
heapPutElemRec :: (Ord e) => FineHeapUnsafe e -> e -> IO ()
heapPutElemRec (FineHeapUnsafe _ _ left right) new = do
next <- atomically $ do
-- We choose the smaller subtree for insertion, note that we do not
-- care whether the children have gaps -- we drive our decision based
-- on size only.
[lsz, rsz] <- mapM (readTVar . heapSize) [left, right]
let root = if lsz < rsz then left else right
-- Update it.
fmap (root, ) <$> heapPutElem root new
-- And proceed recursively if necessary.
case next of
Nothing -> return ()
Just (root', new') -> heapPutElemRec root' new'
heapPopWithGap :: FineHeapUnsafe e -> STM e
heapPopWithGap root@(FineHeapUnsafe size elem left right) = do
(sz, el) <- readNode root
writeTVar size (sz - 1)
check $ sz > 0
case el of
Nothing ->
-- This is a gap, we'll wait for someone to push it down.
retry
Just el ->
-- We create a gap, we'll deal with it in a moment.
writeTVar elem Nothing >> return el
heapUngap :: (Ord e) => FineHeapUnsafe e -> IO ()
heapUngap root@(FineHeapUnsafe _ elem left right) = do
next <- atomically $ do
-- The size has been decremented by someone who put a gap here.
(sz, el) <- readNode root
case (sz, el) of
(0, _) ->
-- If it's empty then we're done.
return Nothing
(_, Nothing) -> do
-- Otherwise choose a min child.
children <- mapM readNode [left, right]
case children of
-- In these cases we have no choice.
[(0, Nothing), (_, Just rel)] -> replaceGap right rel
[(_, Just lel), (0, Nothing)] -> replaceGap left lel
-- Otherwise, we hold back if any of the nodes is a gap.
[(_, Nothing), _] -> retry
[_, (_, Nothing)] -> retry
-- Finally, we swap with min child
[(lsz, Just lel), (rsz, Just rel)] ->
let (tree, el) = minimumBy (comparing snd) [(left, lel), (right, rel)]
in replaceGap tree el
(_, Just _) ->
-- This can happen on one element queue, when someone overrides
-- empty queue left by us with an element.
return Nothing
case next of
Nothing -> return ()
Just tree -> heapUngap tree
where
replaceGap tree@(FineHeapUnsafe size' elem' _ _) el = do
writeTVar elem $ Just el
writeTVar elem' Nothing
readTVar size' >>= writeTVar size' . subtract 1
return $ Just tree
-- The user must ensure mutual exclusion between put and pop operations.
instance (Ord e) => ConcurrentHeap (FineHeapUnsafe e) e where
heapNew n
| n > 0 = do
s <- newTVarIO 0
e <- newTVarIO Nothing
let m = n `quot` 2 -- ceil((n - 1) / 2)
[tl, tr] <- replicateM 2 $ heapNew m
return $ FineHeapUnsafe s e tl tr
| otherwise = return Nil
heapPut root@(FineHeapUnsafe {}) new = do
next <- atomically $ heapPutElem root new
case next of
Nothing -> return ()
Just new' -> heapPutElemRec root new'
heapPop root@(FineHeapUnsafe {}) = do
el <- atomically $ heapPopWithGap root
heapUngap root
return el
testSorting :: [Int] -> Property
testSorting xs = monadicIO $ do
sxs <- run $ do
let l = length xs
h <- heapNew l :: IO (FineHeapUnsafe Int)
mapM_ (heapPut h) xs
replicateM l (heapPop h)
Test.QuickCheck.Monadic.assert $ sxs == sort xs
testIt :: IO ()
testIt = quickCheckWith (stdArgs { maxSuccess = 10000 }) testSorting
|
stupaq/stm-heaps
|
src/FineHeapUnsafe.hs
|
gpl-3.0
| 5,068 | 0 | 25 | 1,334 | 1,613 | 833 | 780 | 112 | 8 |
module Lamdu.GUI.Expr.HoleEdit
( make
) where
import qualified Control.Lens as Lens
import Hyper
import qualified GUI.Momentu as M
import qualified GUI.Momentu.EventMap as E
import qualified GUI.Momentu.I18N as MomentuTexts
import GUI.Momentu.ModKey (noMods)
import qualified GUI.Momentu.ModKey as ModKey
import GUI.Momentu.Responsive (Responsive)
import qualified GUI.Momentu.Responsive as Responsive
import qualified GUI.Momentu.Widget as Widget
import qualified GUI.Momentu.Widgets.Menu as Menu
import qualified GUI.Momentu.Widgets.Menu.Search as SearchMenu
import qualified Lamdu.Config as Config
import qualified Lamdu.I18N.Navigation as Texts
import Lamdu.GUI.Annotation (maybeAddAnnotationPl)
import qualified Lamdu.GUI.Expr.EventMap as ExprEventMap
import Lamdu.GUI.Expr.OptionEdit
import Lamdu.GUI.Monad (GuiM)
import qualified Lamdu.GUI.Monad as GuiM
import qualified Lamdu.GUI.Types as ExprGui
import qualified Lamdu.GUI.WidgetIds as WidgetIds
import Lamdu.Name (Name)
import qualified Lamdu.Sugar.Types as Sugar
import Lamdu.Prelude
make ::
_ =>
Annotated (ExprGui.Payload i o) # Const (Sugar.Hole Name i o) ->
GuiM env i o (Responsive o)
make hole@(Ann (Const pl) _) =
do
searchTerm <- SearchMenu.readSearchTerm myId
negativeNumberEventMap <-
if searchTerm == "-"
then ExprEventMap.makeLiteralNumberEventMap "-" ?? pl ^. Sugar.plActions . Sugar.setToLiteral
else pure mempty
env <- Lens.view id
let innerHoleEventMap =
-- Make space go to the hole inside a result
E.keysEventMap [noMods ModKey.Key'Space]
(E.toDoc env [has . MomentuTexts.edit, has . Texts.nextEntry]) (pure ())
allowedSearchTerm <- ExprEventMap.allowedSearchTerm
let mkSearchTerm firstRes =
SearchMenu.searchTermEdit myId (pure . allowedSearchTerm) firstRes
<&> if searchTerm == "" then SearchMenu.termEditEventMap .~ mempty else id
(ExprEventMap.add options pl <&> (M.tValue %~))
<*> ((maybeAddAnnotationPl pl <&> (M.tValue %~))
<*> (SearchMenu.make mkSearchTerm (makeResults hole) M.empty myId ?? Menu.AnyPlace))
& local (has . SearchMenu.emptyStrings . Lens.mapped .~ "_")
<&> Responsive.fromWithTextPos
<&> M.weakerEvents innerHoleEventMap
<&> Widget.strongerEvents negativeNumberEventMap
where
myId = WidgetIds.fromExprPayload pl
options =
ExprEventMap.defaultOptions
{ ExprEventMap.addOperatorSetHoleState = Just (pl ^. Sugar.plEntityId)
}
makeResults ::
_ =>
Annotated (ExprGui.Payload i o) # Const (Sugar.Hole Name i o) ->
SearchMenu.ResultsContext ->
GuiM env i o (Menu.OptionList (Menu.Option (GuiM env i o) o))
makeResults (Ann (Const pl) (Const hole)) ctx =
do
c <- Lens.view (has . Config.completion . Config.completionResultCount)
GuiM.im (hole ^. Sugar.holeOptions) <*>
makeQuery ctx
>>= GuiM.im
<&> take c
<&> Lens.mapped %~
makeResult GuiM.makeBinder ctx .
-- Initialize the operator precedence of results.
-- Without this results accept too many operators (and a test fails).
(Sugar.optionExpr . annotation . Sugar.plParenInfo . Sugar.piMinOpPrec .~
pl ^. Sugar.plParenInfo . Sugar.piMinOpPrec)
<&> Menu.OptionList isTruncated
where
-- TODO: Need to check whether we have more options
isTruncated = False
|
Peaker/lamdu
|
src/Lamdu/GUI/Expr/HoleEdit.hs
|
gpl-3.0
| 3,716 | 0 | 18 | 991 | 931 | 515 | 416 | -1 | -1 |
import XMonad
import XMonad.Config.Azerty
import Graphics.X11.ExtraTypes.XF86
import qualified Data.Map as M
import XMonad.Hooks.DynamicLog
--
import Data.Ratio
import XMonad.Layout.PerWorkspace
import XMonad.Layout.Spacing
import XMonad.Layout.Grid
import XMonad.Layout.IM
main :: IO ()
main = xmonad =<< xmobar myConf
-- Main configuration, override the defaults to your liking.
myConf = defaultConfig
{ modMask = mod4Mask
, terminal = "urxvt"
, layoutHook = myLayout
, workspaces = myWorkspaces
, manageHook = myManageHook <+> manageHook defaultConfig
, keys = myKeys
}
-- | Workspaces redirection
myManageHook = composeAll
[ className =? "Emacs" --> doShift "2:edit"
, className =? "Firefox" --> doShift "3:web"
, className =? "Thunar" --> doShift "4:browse"
, className =? "Pidgin" --> doShift "6:pidgin"
, className =? "Smplayer" --> doFloat
]
-- | Keyboard keys
keysToAdd x =
[ ((mod4Mask, xK_F4 ), kill)
, ((0, xF86XK_Calculator ), spawn "gnome-calculator")
, ((0, xF86XK_WWW ), spawn "firefox")
, ((0, xF86XK_HomePage ), spawn "thunar")
, ((0, xF86XK_Search ), spawn "pidgin")
]
keysToDel x = [((mod4Mask .|. shiftMask), xK_c)] -- to delete the unused keys
myKeys x = foldr M.delete (keysToAdd' x) (keysToDel x)
where
-- to include new keys to existing keys
keysToAdd' x = M.union (keys azertyConfig x) (M.fromList (keysToAdd x))
-- | Workspaces listing
myWorkspaces = ["1:main", "2:edit", "3:web", "4:browse", "5:irc", "6:pidgin"]
-- | Default layout
myLayout = pidgin $ Mirror tiled ||| tiled ||| Full
where
-- pidgin conf
pidgin = onWorkspace "6:pidgin" pidginLayout
pidginLayout = withIM (18/100) (Role "buddy_list") gridLayout
gridLayout = spacing 8 $ Grid
-- default tiling algorithm partitions the screen into two panes
tiled = spacing 2 $ Tall nmaster delta ratio
-- The default number of windows in the master pane
nmaster = 1
-- Default proportion of screen occupied by master pane
ratio = 2/3
-- Percent of screen to increment by when resizing panes
delta = 5/100
--eof
|
neliel/config
|
xmonad/xmonad_bak.hs
|
gpl-3.0
| 2,495 | 0 | 9 | 805 | 542 | 311 | 231 | 43 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module Estuary.Types.ResourceType where
import GHC.Generics
import Data.Aeson
data ResourceType = Audio | Image | Video deriving (Show, Eq, Generic)
instance ToJSON ResourceType where
toEncoding = genericToEncoding defaultOptions
instance FromJSON ResourceType
|
d0kt0r0/estuary
|
common/src/Estuary/Types/ResourceType.hs
|
gpl-3.0
| 298 | 0 | 6 | 40 | 69 | 39 | 30 | 8 | 0 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
module Language.LCC.Simplifier.Inline where
import Control.Lens
import Control.Monad (liftM, liftM3)
import Control.Monad.Reader (MonadReader, runReaderT, asks, local)
import Data.List (find)
import Data.Maybe
import Language.LCC.AST
import Language.LCC.Target
import Language.LCC.TypeChecker
import qualified Language.LCC.Error as Err
type InliningEnv t m = (Target t, MonadReader (CallStack t) m)
type CallStack t = (AnalyzedAST, [CallStackFrame], Int, t)
type CallStackFrame = (AbsolutePath, [BoundParam])
type BoundParam = (Param, AbsExpr)
inline :: (Functor m, Err.ErrorM m, Target t)
=> Int -> t -> AnalyzedAST -> m AnalyzedAST
inline maxStackDepth t ast =
flip runReaderT (ast, [], maxStackDepth, t) $
scopedMapM (trImpl inlineExpr) ast
inlineExpr :: (Err.ErrorM m, ScopedAbs Type m, InliningEnv t m)
=> AbsExpr
-> m AbsExpr
inlineExpr expr = case expr of
Array arr -> liftM Array (mapM inlineExpr arr)
SConcat s -> liftM (concatLits . SConcat) (mapM inlineExpr s)
Cond c t f -> do
let (c',t',f') = over each inlineExpr (c,t,f)
c' >>= \case
BoolL b -> if b then t' else f'
_ -> liftM3 Cond c' t' f'
Funcall fn args -> do
t <- getTarget
inlinedArgs <- mapM inlineExpr args
case fn of
Builtin sig -> inlineBuiltin t sig inlinedArgs
Input _ _ -> return expr
Fn (VAbsolutePath p) -> inlineFuncall (p^.from absolute) inlinedArgs
Fn (VParamName name) -> fromMaybe expr `liftM` findBoundParam name
_ -> return expr
where
concatLits :: AbsExpr -> AbsExpr
concatLits = \case
SConcat [s] -> concatLits s
SConcat (s:ss) ->
case (concatLits s, concatLits (SConcat ss)) of
(StringL s', SConcat ss') -> SConcat $ StringL s' : ss'
(StringL s', StringL ss') -> StringL $ s' ++ ss'
(SConcat s', SConcat ss') -> SConcat $ s' ++ ss'
(s', ss') -> SConcat [s',ss']
expr' -> expr'
findBoundParam :: InliningEnv t m => String -> m (Maybe AbsExpr)
findBoundParam name = do
boundParams <- asks (^.._2.traverse._2.traverse)
return . fmap snd $ find (paramNameEq name . fst) boundParams
inlineFuncall :: (Err.ErrorM m, ScopedAbs Type m, InliningEnv t m)
=> AbsolutePath
-> [AbsExpr]
-> m AbsExpr
inlineFuncall f args = do
let vf = f^.absolute.re _Absolute
noop = return $ Funcall (Fn vf) args
availStack <- getAvailStack
if availStack == 0
then noop
else do
ast <- getAST
tr <- findFunction ast f args
uRec <- isUndRec (tr^.trSig) args
if uRec
then noop
else extendStack (bindParams tr args)
(tr ./> inlineExpr . view trImpl)
isUndRec :: (Err.ErrorM m, ScopedAbs Type m, InliningEnv t m)
=> AnalyzedSignature -> [AbsExpr] -> m Bool
isUndRec callee args = do
caller <- viewS trSig
return $ caller == callee && all (has $ _Funcall._1._Fn._ParamName) args
extendStack :: InliningEnv t m => CallStackFrame -> m a -> m a
extendStack frame = local $ over _2 (frame:)
. over _3 (subtract 1)
bindParams :: AbsTranslation Type -> [AbsExpr] -> CallStackFrame
bindParams tr args = let sig = tr^.trSig
in (sig^.sigPath, zip (sig^.sigParams) args)
getAST :: InliningEnv t m => m AnalyzedAST
getAST = view _1
getAvailStack :: InliningEnv t m => m Int
getAvailStack = view _3
getTarget :: InliningEnv t m => m t
getTarget = view _4
|
xcv-/LCC
|
lib/Language/LCC/Simplifier/Inline.hs
|
gpl-3.0
| 3,729 | 0 | 16 | 1,051 | 1,342 | 687 | 655 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.