code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
module URI.ByteString.Internal where
-------------------------------------------------------------------------------
import Blaze.ByteString.Builder (Builder)
import qualified Blaze.ByteString.Builder as BB
import qualified Blaze.ByteString.Builder.Char.Utf8 as BB
import Control.Applicative
import Control.Monad
import qualified Control.Monad.Fail as F
import Data.Attoparsec.ByteString
import qualified Data.Attoparsec.ByteString as A
import qualified Data.Attoparsec.ByteString.Char8 as A (decimal)
import Data.Bits
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BS8
import Data.Char (ord, toLower)
import Data.Ix
import Data.List (delete, intersperse,
sortBy, stripPrefix, (\\))
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Monoid as Monoid (mempty)
import Data.Ord (comparing)
import Data.Semigroup as Semigroup
import Data.Word
import Text.Read (readMaybe)
-------------------------------------------------------------------------------
import URI.ByteString.Types
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-- | Strict URI Parser config. Follows RFC3986 as-specified. Use this
-- if you can be certain that your URIs are properly encoded or if you
-- want parsing to fail if they deviate from the spec at all.
strictURIParserOptions :: URIParserOptions
strictURIParserOptions = URIParserOptions {
upoValidQueryChar = validForQuery
}
-------------------------------------------------------------------------------
-- | Lax URI Parser config. Use this if you you want to handle common
-- deviations from the spec gracefully.
--
-- * Allows non-encoded [ and ] in query string
laxURIParserOptions :: URIParserOptions
laxURIParserOptions = URIParserOptions {
upoValidQueryChar = validForQueryLax
}
-------------------------------------------------------------------------------
-- | All normalization options disabled
noNormalization :: URINormalizationOptions
noNormalization = URINormalizationOptions False False False False False False False httpDefaultPorts
-------------------------------------------------------------------------------
-- | The set of known default ports to schemes. Currently only
-- contains http\/80 and https\/443. Feel free to extend it if needed
-- with 'unoDefaultPorts'.
httpDefaultPorts :: M.Map Scheme Port
httpDefaultPorts = M.fromList [ (Scheme "http", Port 80)
, (Scheme "https", Port 443)
]
-------------------------------------------------------------------------------
-- | Only normalizations deemed appropriate for all protocols by
-- RFC3986 enabled, namely:
--
-- * Downcase Scheme
-- * Downcase Host
-- * Remove Dot Segments
rfc3986Normalization :: URINormalizationOptions
rfc3986Normalization = noNormalization { unoDowncaseScheme = True
, unoDowncaseHost = True
, unoRemoveDotSegments = True
}
-------------------------------------------------------------------------------
-- | The same as 'rfc3986Normalization' but with additional enabled
-- features if you're working with HTTP URIs:
--
-- * Drop Default Port (with 'httpDefaultPorts')
-- * Drop Extra Slashes
httpNormalization :: URINormalizationOptions
httpNormalization = rfc3986Normalization { unoDropDefPort = True
, unoSlashEmptyPath = True
}
-------------------------------------------------------------------------------
-- | All options enabled
aggressiveNormalization :: URINormalizationOptions
aggressiveNormalization = URINormalizationOptions True True True True True True True httpDefaultPorts
-------------------------------------------------------------------------------
-- | @toAbsolute scheme ref@ converts @ref@ to an absolute URI.
-- If @ref@ is already absolute, then it is unchanged.
toAbsolute :: Scheme -> URIRef a -> URIRef Absolute
toAbsolute scheme (RelativeRef {..}) = URI scheme rrAuthority rrPath rrQuery rrFragment
toAbsolute _ uri@(URI {..}) = uri
-------------------------------------------------------------------------------
-- | URI Serializer
-------------------------------------------------------------------------------
-- | Serialize a URI reference into a 'Builder'.
--
-- Example of serializing + converting to a lazy "Data.ByteString.Lazy.ByteString":
--
-- >>> BB.toLazyByteString $ serializeURIRef $ URI {uriScheme = Scheme {schemeBS = "http"}, uriAuthority = Just (Authority {authorityUserInfo = Nothing, authorityHost = Host {hostBS = "www.example.org"}, authorityPort = Nothing}), uriPath = "/foo", uriQuery = Query {queryPairs = [("bar","baz")]}, uriFragment = Just "quux"}
-- "http://www.example.org/foo?bar=baz#quux"
serializeURIRef :: URIRef a -> Builder
serializeURIRef = normalizeURIRef noNormalization
-------------------------------------------------------------------------------
-- | Like 'serializeURIRef', with conversion into a strict 'ByteString'.
serializeURIRef' :: URIRef a -> ByteString
serializeURIRef' = BB.toByteString . serializeURIRef
-------------------------------------------------------------------------------
-- | Serialize a URI into a Builder.
serializeURI :: URIRef Absolute -> Builder
serializeURI = normalizeURIRef noNormalization
{-# DEPRECATED serializeURI "Use 'serializeURIRef' instead" #-}
-------------------------------------------------------------------------------
-- | Similar to 'serializeURIRef' but performs configurable degrees of
-- URI normalization. If your goal is the fastest serialization speed
-- possible, 'serializeURIRef' will be fine. If you intend on
-- comparing URIs (say for caching purposes), you'll want to use this.
normalizeURIRef :: URINormalizationOptions -> URIRef a -> Builder
normalizeURIRef o uri@(URI {..}) = normalizeURI o uri
normalizeURIRef o uri@(RelativeRef {}) = normalizeRelativeRef o Nothing uri
-------------------------------------------------------------------------------
normalizeURIRef' :: URINormalizationOptions -> URIRef a -> ByteString
normalizeURIRef' o = BB.toByteString . normalizeURIRef o
-------------------------------------------------------------------------------
normalizeURI :: URINormalizationOptions -> URIRef Absolute -> Builder
normalizeURI o@URINormalizationOptions {..} URI {..} =
scheme <> BB.fromString ":" <> normalizeRelativeRef o (Just uriScheme) rr
where
scheme = bs (sCase (schemeBS uriScheme))
sCase
| unoDowncaseScheme = downcaseBS
| otherwise = id
rr = RelativeRef uriAuthority uriPath uriQuery uriFragment
-------------------------------------------------------------------------------
normalizeRelativeRef :: URINormalizationOptions -> Maybe Scheme -> URIRef Relative -> Builder
normalizeRelativeRef o@URINormalizationOptions {..} mScheme RelativeRef {..} =
authority <> path <> query <> fragment
where
path
| unoSlashEmptyPath && BS.null rrPath = BB.fromByteString "/"
| segs == [""] = BB.fromByteString "/"
| otherwise = mconcat (intersperse (c8 '/') (map urlEncodePath segs))
segs = dropSegs (BS.split slash (pathRewrite rrPath))
pathRewrite
| unoRemoveDotSegments = removeDotSegments
| otherwise = id
dropSegs [] = []
dropSegs (h:t)
| unoDropExtraSlashes = h:(filter (not . BS.null) t)
| otherwise = h:t
authority = maybe Monoid.mempty (serializeAuthority o mScheme) rrAuthority
query = serializeQuery o rrQuery
fragment = serializeFragment rrFragment
-------------------------------------------------------------------------------
--TODO: this is probably ripe for benchmarking
-- | Algorithm described in
-- <https://tools.ietf.org/html/rfc3986#section-5.2.4>, reproduced
-- artlessly.
removeDotSegments :: ByteString -> ByteString
removeDotSegments path = mconcat (rl2L (go path (RL [])))
where
go inBuf outBuf
-- A. If the input buffer begins with prefix of ../ or ./ then
-- remove the prefix from the input buffer
| BS8.isPrefixOf "../" inBuf = go (BS8.drop 3 inBuf) outBuf
| BS8.isPrefixOf "./" inBuf = go (BS8.drop 2 inBuf) outBuf
-- B. If the input buffer begins with a prefix of "/./" or "/.",
-- where "." is a complete path segment, then replace that
-- prefix with "/" in the input buffer. TODO: I think "a
-- complete path segment" means its the whole thing?
| BS.isPrefixOf "/./" inBuf = go (BS8.drop 2 inBuf) outBuf
| inBuf == "/." = go "/" outBuf
-- C. If the input buffer begins with a prefix of "/../" or
-- "/..", where ".." is a complete path segment, then replace
-- that prefix with "/" in the input buffer and remove the last
-- segment and its preceding "/" (if any) from the output buffer
| BS.isPrefixOf "/../" inBuf = go (BS8.drop 3 inBuf) (unsnoc (unsnoc outBuf))
| inBuf == "/.." = go "/" (unsnoc (unsnoc outBuf))
-- D. If the input buffer consists only of "." or "..", then
-- remove that from the input buffer
| inBuf == "." = go mempty outBuf
| inBuf == ".." = go mempty outBuf
-- E. Move the first path segment in the input buffer to the end
-- of the output buffer, including the initial "/" character (if
-- any) and any subsequent characters up to, but not including,
-- the next "/" character or the end of the input buffer.
| otherwise = case BS8.uncons inBuf of
Just ('/', rest) ->
let (thisSeg, inBuf') = BS8.span (/= '/') rest
in go inBuf' (outBuf |> "/" |> thisSeg)
Just (_, _) ->
let (thisSeg, inBuf') = BS8.span (/= '/') inBuf
in go inBuf' (outBuf |> thisSeg)
Nothing -> outBuf
-------------------------------------------------------------------------------
-- | Like 'serializeURI', with conversion into a strict 'ByteString'.
serializeURI' :: URIRef Absolute -> ByteString
serializeURI' = BB.toByteString . serializeURI
{-# DEPRECATED serializeURI' "Use 'serializeURIRef'' instead" #-}
-------------------------------------------------------------------------------
-- | Like 'serializeURI', but do not render scheme.
serializeRelativeRef :: URIRef Relative -> Builder
serializeRelativeRef = normalizeRelativeRef noNormalization Nothing
{-# DEPRECATED serializeRelativeRef "Use 'serializeURIRef' instead" #-}
-------------------------------------------------------------------------------
-- | Like 'serializeRelativeRef', with conversion into a strict 'ByteString'.
serializeRelativeRef' :: URIRef Relative -> ByteString
serializeRelativeRef' = BB.toByteString . serializeRelativeRef
{-# DEPRECATED serializeRelativeRef' "Use 'serializeURIRef'' instead" #-}
-------------------------------------------------------------------------------
-- | Serialize the query part of a url
-- @serializeQuery opts mempty = ""@
-- @serializeQuery opts (Query [("a","b"),("c","d")]) = "?a=b&c=d"@
serializeQuery :: URINormalizationOptions -> Query -> Builder
serializeQuery _ (Query []) = mempty
serializeQuery URINormalizationOptions {..} (Query ps) =
c8 '?' <> mconcat (intersperse (c8 '&') (map serializePair ps'))
where
serializePair (k, v) = urlEncodeQuery k <> c8 '=' <> urlEncodeQuery v
ps'
| unoSortParameters = sortBy (comparing fst) ps
| otherwise = ps
serializeQuery' :: URINormalizationOptions -> Query -> ByteString
serializeQuery' opts = BB.toByteString . serializeQuery opts
-------------------------------------------------------------------------------
serializeFragment :: Maybe ByteString -> Builder
serializeFragment = maybe mempty (\s -> c8 '#' <> bs s)
serializeFragment' :: Maybe ByteString -> ByteString
serializeFragment' = BB.toByteString . serializeFragment
-------------------------------------------------------------------------------
serializeAuthority :: URINormalizationOptions -> Maybe Scheme -> Authority -> Builder
serializeAuthority URINormalizationOptions {..} mScheme Authority {..} = BB.fromString "//" <> userinfo <> bs host <> port
where
userinfo = maybe mempty serializeUserInfo authorityUserInfo
host = hCase (hostBS authorityHost)
hCase
| unoDowncaseHost = downcaseBS
| otherwise = id
port = maybe mempty packPort effectivePort
effectivePort = do
p <- authorityPort
dropPort mScheme p
packPort (Port p) = c8 ':' <> BB.fromString (show p)
dropPort Nothing = Just
dropPort (Just scheme)
| unoDropDefPort = dropPort' scheme
| otherwise = Just
dropPort' s p
| M.lookup s unoDefaultPorts == Just p = Nothing
| otherwise = Just p
serializeAuthority' :: URINormalizationOptions -> Maybe Scheme -> Authority -> ByteString
serializeAuthority' opts mScheme = BB.toByteString . serializeAuthority opts mScheme
-------------------------------------------------------------------------------
serializeUserInfo :: UserInfo -> Builder
serializeUserInfo UserInfo {..} = bs uiUsername <> c8 ':' <> bs uiPassword <> c8 '@'
serializeUserInfo' :: UserInfo -> ByteString
serializeUserInfo' = BB.toByteString . serializeUserInfo
-------------------------------------------------------------------------------
bs :: ByteString -> Builder
bs = BB.fromByteString
-------------------------------------------------------------------------------
c8 :: Char -> Builder
c8 = BB.fromChar
-------------------------------------------------------------------------------
-- | Parse a strict ByteString into a URI or an error.
--
-- Example:
--
-- >>> parseURI strictURIParserOptions "http://www.example.org/foo?bar=baz#quux"
-- Right (URI {uriScheme = Scheme {schemeBS = "http"}, uriAuthority = Just (Authority {authorityUserInfo = Nothing, authorityHost = Host {hostBS = "www.example.org"}, authorityPort = Nothing}), uriPath = "/foo", uriQuery = Query {queryPairs = [("bar","baz")]}, uriFragment = Just "quux"})
--
-- >>> parseURI strictURIParserOptions "$$$$://badurl.example.org"
-- Left (MalformedScheme NonAlphaLeading)
--
-- There are some urls that you'll encounter which defy the spec, such
-- as those with square brackets in the query string. If you must be
-- able to parse those, you can use "laxURIParserOptions" or specify your own
--
-- >>> parseURI strictURIParserOptions "http://www.example.org/foo?bar[]=baz"
-- Left MalformedQuery
--
-- >>> parseURI laxURIParserOptions "http://www.example.org/foo?bar[]=baz"
-- Right (URI {uriScheme = Scheme {schemeBS = "http"}, uriAuthority = Just (Authority {authorityUserInfo = Nothing, authorityHost = Host {hostBS = "www.example.org"}, authorityPort = Nothing}), uriPath = "/foo", uriQuery = Query {queryPairs = [("bar[]","baz")]}, uriFragment = Nothing})
--
-- >>> let myLaxOptions = URIParserOptions { upoValidQueryChar = liftA2 (||) (upoValidQueryChar strictURIParserOptions) (inClass "[]")}
-- >>> parseURI myLaxOptions "http://www.example.org/foo?bar[]=baz"
-- Right (URI {uriScheme = Scheme {schemeBS = "http"}, uriAuthority = Just (Authority {authorityUserInfo = Nothing, authorityHost = Host {hostBS = "www.example.org"}, authorityPort = Nothing}), uriPath = "/foo", uriQuery = Query {queryPairs = [("bar[]","baz")]}, uriFragment = Nothing})
parseURI :: URIParserOptions -> ByteString -> Either URIParseError (URIRef Absolute)
parseURI opts = parseOnly' OtherError (uriParser' opts)
-- | Like 'parseURI', but do not parse scheme.
parseRelativeRef :: URIParserOptions -> ByteString -> Either URIParseError (URIRef Relative)
parseRelativeRef opts = parseOnly' OtherError (relativeRefParser' opts)
-------------------------------------------------------------------------------
-- | Convenience alias for a parser that can return URIParseError
type URIParser = Parser' URIParseError
-------------------------------------------------------------------------------
-- | Underlying attoparsec parser. Useful for composing with your own parsers.
uriParser :: URIParserOptions -> Parser (URIRef Absolute)
uriParser = unParser' . uriParser'
-------------------------------------------------------------------------------
-- | Toplevel parser for URIs
uriParser' :: URIParserOptions -> URIParser (URIRef Absolute)
uriParser' opts = do
scheme <- schemeParser
void $ word8 colon `orFailWith` MalformedScheme MissingColon
RelativeRef authority path query fragment <- relativeRefParser' opts
return $ URI scheme authority path query fragment
-------------------------------------------------------------------------------
-- | Underlying attoparsec parser. Useful for composing with your own parsers.
relativeRefParser :: URIParserOptions -> Parser (URIRef Relative)
relativeRefParser = unParser' . relativeRefParser'
-------------------------------------------------------------------------------
-- | Toplevel parser for relative refs
relativeRefParser' :: URIParserOptions -> URIParser (URIRef Relative)
relativeRefParser' opts = do
(authority, path) <- hierPartParser <|> rrPathParser
query <- queryParser opts
frag <- mFragmentParser
case frag of
Just _ -> endOfInput `orFailWith` MalformedFragment
Nothing -> endOfInput `orFailWith` MalformedQuery
return $ RelativeRef authority path query frag
-------------------------------------------------------------------------------
-- | Parser for scheme, e.g. "http", "https", etc.
schemeParser :: URIParser Scheme
schemeParser = do
c <- satisfy isAlpha `orFailWith` MalformedScheme NonAlphaLeading
rest <- A.takeWhile isSchemeValid `orFailWith` MalformedScheme InvalidChars
return $ Scheme $ c `BS.cons` rest
where
isSchemeValid = inClass $ "-+." ++ alphaNum
-------------------------------------------------------------------------------
-- | Hier part immediately follows the schema and encompasses the
-- authority and path sections.
hierPartParser :: URIParser (Maybe Authority, ByteString)
hierPartParser = authWithPathParser <|>
pathAbsoluteParser <|>
pathRootlessParser <|>
pathEmptyParser
-------------------------------------------------------------------------------
-- | Relative references have awkward corner cases. See
-- 'firstRelRefSegmentParser'.
rrPathParser :: URIParser (Maybe Authority, ByteString)
rrPathParser = (Nothing,) <$>
((<>) <$> firstRelRefSegmentParser <*> pathParser)
-------------------------------------------------------------------------------
-- | See the "authority path-abempty" grammar in the RFC
authWithPathParser :: URIParser (Maybe Authority, ByteString)
authWithPathParser = string' "//" *> ((,) <$> mAuthorityParser <*> pathParser)
-------------------------------------------------------------------------------
-- | See the "path-absolute" grammar in the RFC. Essentially a special
-- case of rootless.
pathAbsoluteParser :: URIParser (Maybe Authority, ByteString)
pathAbsoluteParser = string' "/" *> pathRootlessParser
-------------------------------------------------------------------------------
-- | See the "path-rootless" grammar in the RFC.
pathRootlessParser :: URIParser (Maybe Authority, ByteString)
pathRootlessParser = (,) <$> pure Nothing <*> pathParser1
-------------------------------------------------------------------------------
-- | See the "path-empty" grammar in the RFC. Must not be followed
-- with a path-valid char.
pathEmptyParser :: URIParser (Maybe Authority, ByteString)
pathEmptyParser = do
nextChar <- peekWord8 `orFailWith` OtherError "impossible peekWord8 error"
case nextChar of
Just c -> guard (notInClass pchar c) >> return emptyCase
_ -> return emptyCase
where
emptyCase = (Nothing, mempty)
-------------------------------------------------------------------------------
-- | Parser whe
mAuthorityParser :: URIParser (Maybe Authority)
mAuthorityParser = mParse authorityParser
-------------------------------------------------------------------------------
-- | Parses the user info section of a URL (i.e. for HTTP Basic
-- Authentication). Note that this will decode any percent-encoded
-- data.
userInfoParser :: URIParser UserInfo
userInfoParser = (uiTokenParser <* word8 atSym) `orFailWith` MalformedUserInfo
where
atSym = 64
uiTokenParser = do
ui <- A.takeWhile1 validForUserInfo
let (user, passWithColon) = BS.break (== colon) $ urlDecode' ui
let pass = BS.drop 1 passWithColon
return $ UserInfo user pass
validForUserInfo = inClass $ pctEncoded ++ subDelims ++ (':' : unreserved)
-------------------------------------------------------------------------------
-- | Authority consists of host and port
authorityParser :: URIParser Authority
authorityParser = Authority <$> mParse userInfoParser <*> hostParser <*> mPortParser
-------------------------------------------------------------------------------
-- | Parser that can handle IPV6/Future literals, IPV4, and domain names.
hostParser :: URIParser Host
hostParser = (Host <$> parsers) `orFailWith` MalformedHost
where
parsers = ipLiteralParser <|> ipV4Parser <|> regNameParser
ipLiteralParser = word8 oBracket *> (ipVFutureParser <|> ipV6Parser) <* word8 cBracket
-------------------------------------------------------------------------------
-- | Parses IPV6 addresses. See relevant section in RFC.
ipV6Parser :: Parser ByteString
ipV6Parser = do
leading <- h16s
elided <- maybe [] (const [""]) <$> optional (string "::")
trailing <- many (A.takeWhile (/= colon) <* word8 colon)
(finalChunkLen, final) <- finalChunk
let len = length (leading ++ trailing) + finalChunkLen
when (len > 8) $ fail "Too many digits in IPv6 address"
return $ rejoin $ [rejoin leading] ++ elided ++ trailing ++ maybeToList final
where
finalChunk = fromMaybe (0, Nothing) <$> optional (finalIpV4 <|> finalH16)
finalH16 = (1, ) . Just <$> h16
finalIpV4 = (2, ) . Just <$> ipV4Parser
rejoin = BS.intercalate ":"
h16s = h16 `sepBy` word8 colon
h16 = mconcat <$> parseBetween 1 4 (A.takeWhile1 hexDigit)
-------------------------------------------------------------------------------
-- | Parses IPVFuture addresses. See relevant section in RFC.
ipVFutureParser :: Parser ByteString
ipVFutureParser = do
_ <- word8 lowercaseV
ds <- A.takeWhile1 hexDigit
_ <- word8 period
rest <- A.takeWhile1 $ inClass $ subDelims ++ ":" ++ unreserved
return $ "v" <> ds <> "." <> rest
where
lowercaseV = 118
-------------------------------------------------------------------------------
-- | Parses a valid IPV4 address
ipV4Parser :: Parser ByteString
ipV4Parser = mconcat <$> sequence [ decOctet
, dot
, decOctet
, dot
, decOctet
, dot
, decOctet]
where
decOctet :: Parser ByteString
decOctet = do
(s,num) <- A.match A.decimal
let len = BS.length s
guard $ len <= 3
guard $ num >= (1 :: Int) && num <= 255
return s
dot = string "."
-------------------------------------------------------------------------------
-- | This corresponds to the hostname, e.g. www.example.org
regNameParser :: Parser ByteString
regNameParser = urlDecode' <$> A.takeWhile1 (inClass validForRegName)
where
validForRegName = pctEncoded ++ subDelims ++ unreserved
-------------------------------------------------------------------------------
-- | Only parse a port if the colon signifier is there.
mPortParser :: URIParser (Maybe Port)
mPortParser = word8' colon `thenJust` portParser
-------------------------------------------------------------------------------
-- | Parses port number from the hostname. Colon separator must be
-- handled elsewhere.
portParser :: URIParser Port
portParser = (Port <$> A.decimal) `orFailWith` MalformedPort
-------------------------------------------------------------------------------
-- | Path with any number of segments
pathParser :: URIParser ByteString
pathParser = pathParser' A.many'
-------------------------------------------------------------------------------
-- | Path with at least 1 segment
pathParser1 :: URIParser ByteString
pathParser1 = pathParser' A.many1'
-------------------------------------------------------------------------------
-- | Parses the path section of a url. Note that while this can take
-- percent-encoded characters, it does not itself decode them while parsing.
pathParser' :: (Parser ByteString -> Parser [ByteString]) -> URIParser ByteString
pathParser' repeatParser = (urlDecodeQuery . mconcat <$> repeatParser segmentParser) `orFailWith` MalformedPath
where
segmentParser = mconcat <$> sequence [string "/", A.takeWhile (inClass pchar)]
-------------------------------------------------------------------------------
-- | Parses the first segment of a path section of a relative-path
-- reference. See RFC 3986, Section 4.2.
-- firstRelRefSegmentParser :: URIParser ByteString
firstRelRefSegmentParser :: URIParser ByteString
firstRelRefSegmentParser = A.takeWhile (inClass (pchar \\ ":")) `orFailWith` MalformedPath
-------------------------------------------------------------------------------
-- | This parser is being a bit pragmatic. The query section in the
-- spec does not identify the key/value format used in URIs, but that
-- is what most users are expecting to see. One alternative could be
-- to just expose the query string as a string and offer functions on
-- URI to parse a query string to a Query.
queryParser :: URIParserOptions -> URIParser Query
queryParser opts = do
mc <- peekWord8 `orFailWith` OtherError "impossible peekWord8 error"
case mc of
Just c
| c == question -> skip' 1 *> itemsParser
| c == hash -> pure mempty
| otherwise -> fail' MalformedPath
_ -> pure mempty
where
itemsParser = Query . filter neQuery <$> A.sepBy' (queryItemParser opts) (word8' ampersand)
neQuery (k, _) = not (BS.null k)
-------------------------------------------------------------------------------
-- | When parsing a single query item string like "foo=bar", turns it
-- into a key/value pair as per convention, with the value being
-- optional. & separators need to be handled further up.
queryItemParser :: URIParserOptions -> URIParser (ByteString, ByteString)
queryItemParser opts = do
s <- A.takeWhile (upoValidQueryChar opts) `orFailWith` MalformedQuery
if BS.null s
then return (mempty, mempty)
else do
let (k, vWithEquals) = BS.break (== equals) s
let v = BS.drop 1 vWithEquals
return (urlDecodeQuery k, urlDecodeQuery v)
-------------------------------------------------------------------------------
validForQuery :: Word8 -> Bool
validForQuery = inClass ('?':'/':delete '&' pchar)
-------------------------------------------------------------------------------
validForQueryLax :: Word8 -> Bool
validForQueryLax = notInClass "&#"
-------------------------------------------------------------------------------
-- | Only parses a fragment if the # signifiier is there
mFragmentParser :: URIParser (Maybe ByteString)
mFragmentParser = mParse $ word8' hash *> fragmentParser
-------------------------------------------------------------------------------
-- | The final piece of a uri, e.g. #fragment, minus the #.
fragmentParser :: URIParser ByteString
fragmentParser = Parser' $ A.takeWhile validFragmentWord
where
validFragmentWord = inClass ('?':'/':pchar)
-------------------------------------------------------------------------------
-- | Grammar Components
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
hexDigit :: Word8 -> Bool
hexDigit = inClass "0-9a-fA-F"
-------------------------------------------------------------------------------
isAlpha :: Word8 -> Bool
isAlpha = inClass alpha
-------------------------------------------------------------------------------
isDigit :: Word8 -> Bool
isDigit = inClass digit
-------------------------------------------------------------------------------
pchar :: String
pchar = pctEncoded ++ subDelims ++ ":@" ++ unreserved
-------------------------------------------------------------------------------
-- Very important! When concatenating this to other strings to make larger
-- character classes, you must put this at the end because the '-' character
-- is treated as a range unless it's at the beginning or end.
unreserved :: String
unreserved = alphaNum ++ "~._-"
-------------------------------------------------------------------------------
unreserved8 :: [Word8]
unreserved8 = map ord8 unreserved
-------------------------------------------------------------------------------
unreservedPath8 :: [Word8]
unreservedPath8 = unreserved8 ++ map ord8 ":@&=+$,"
-------------------------------------------------------------------------------
ord8 :: Char -> Word8
ord8 = fromIntegral . ord
-------------------------------------------------------------------------------
-- | pc-encoded technically is % HEXDIG HEXDIG but that's handled by
-- the previous alphaNum constraint. May need to double back with a
-- parser to ensure pct-encoded never exceeds 2 hexdigs after
pctEncoded :: String
pctEncoded = "%"
-------------------------------------------------------------------------------
subDelims :: String
subDelims = "!$&'()*+,;="
-------------------------------------------------------------------------------
alphaNum :: String
alphaNum = alpha ++ digit
-------------------------------------------------------------------------------
alpha :: String
alpha = "a-zA-Z"
-------------------------------------------------------------------------------
digit :: String
digit = "0-9"
-------------------------------------------------------------------------------
colon :: Word8
colon = 58
-------------------------------------------------------------------------------
oBracket :: Word8
oBracket = 91
-------------------------------------------------------------------------------
cBracket :: Word8
cBracket = 93
-------------------------------------------------------------------------------
equals :: Word8
equals = 61
-------------------------------------------------------------------------------
question :: Word8
question = 63
-------------------------------------------------------------------------------
ampersand :: Word8
ampersand = 38
-------------------------------------------------------------------------------
hash :: Word8
hash = 35
-------------------------------------------------------------------------------
period :: Word8
period = 46
-------------------------------------------------------------------------------
slash :: Word8
slash = 47
-------------------------------------------------------------------------------
-- | ByteString Utilities
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-- | Decoding specifically for the query string, which decodes + as
-- space. Shorthand for @urlDecode True@
urlDecodeQuery :: ByteString -> ByteString
urlDecodeQuery = urlDecode plusToSpace
where
plusToSpace = True
-------------------------------------------------------------------------------
-- | Decode any part of the URL besides the query, which decodes + as
-- space.
urlDecode' :: ByteString -> ByteString
urlDecode' = urlDecode plusToSpace
where
plusToSpace = False
-------------------------------------------------------------------------------
-- | Parsing with Strongly-Typed Errors
-------------------------------------------------------------------------------
-- | A parser with a specific error type. Attoparsec unfortunately
-- throws all errors into strings, which cannot be handled well
-- programmatically without doing something silly like parsing error
-- messages. This wrapper attempts to concentrate these errors into
-- one type.
newtype Parser' e a = Parser' { unParser' :: Parser a}
deriving ( Functor
, Applicative
, Alternative
, Monad
, MonadPlus
, Semigroup.Semigroup
, Monoid)
instance F.MonadFail (Parser' e) where
#if MIN_VERSION_attoparsec(0,13,1)
fail e = Parser' (F.fail e)
#else
fail e = Parser' (fail e)
#endif
-------------------------------------------------------------------------------
-- | Use with caution. Catch a parser failing and return Nothing.
mParse :: Parser' e a -> Parser' e (Maybe a)
mParse p = A.option Nothing (Just <$> p)
-------------------------------------------------------------------------------
-- | If the first parser succeeds, discard the result and use the
-- second parser (which may fail). If the first parser fails, return
-- Nothing. This is used to check a benign precondition that indicates
-- the presence of a parsible token, i.e. ? preceding a query.
thenJust :: Parser' e a -> Parser' e b -> Parser' e (Maybe b)
thenJust p1 p2 = p1 *> (Just <$> p2) <|> pure Nothing
-------------------------------------------------------------------------------
-- | Lift a word8 Parser into a strongly error typed parser. This will
-- generate a "stringy" error message if it fails, so you should
-- probably be prepared to exit with a nicer error further up.
word8' :: Word8 -> Parser' e Word8
word8' = Parser' . word8
-------------------------------------------------------------------------------
-- | Skip exactly 1 character. Fails if the character isn't
-- there. Generates a "stringy" error.
skip' :: Int -> Parser' e ()
skip' = Parser' . void . A.take
-------------------------------------------------------------------------------
-- | Lifted version of the string token parser. Same caveats about
-- "stringy" errors apply.
string' :: ByteString -> Parser' e ByteString
string' = Parser' . string
-------------------------------------------------------------------------------
-- | Combinator for tunnelling more specific error types through the
-- attoparsec machinery using read/show.
orFailWith :: (Show e) => Parser a -> e -> Parser' e a
orFailWith p e = Parser' p <|> fail' e
-------------------------------------------------------------------------------
-- | Should be preferred to fail'
fail' :: (Show e) => e -> Parser' e a
fail' = fail . show
-------------------------------------------------------------------------------
parseBetween :: (Alternative m, Monad m) => Int -> Int -> m a -> m [a]
parseBetween a b f = choice parsers
where
parsers = map (`count` f) $ reverse $ range (a, b)
-------------------------------------------------------------------------------
-- | Stronger-typed variation of parseOnly'. Consumes all input.
parseOnly' :: (Read e)
=> (String -> e) -- ^ Fallback if we can't parse a failure message for the sake of totality.
-> Parser' e a
-> ByteString
-> Either e a
parseOnly' noParse (Parser' p) = fmapL readWithFallback . parseOnly p
where
readWithFallback s = fromMaybe (noParse s) (readMaybe . stripAttoparsecGarbage $ s)
-------------------------------------------------------------------------------
-- | Our pal Control.Monad.fail is how attoparsec propagates
-- errors. If you throw an error string with fail (your only choice),
-- it will *always* prepend it with "Failed reading: ". At least in
-- this version. That may change to something else and break this workaround.
stripAttoparsecGarbage :: String -> String
stripAttoparsecGarbage = stripPrefix' "Failed reading: "
-------------------------------------------------------------------------------
-- | stripPrefix where it is a noop if the prefix doesn't exist.
stripPrefix' :: Eq a => [a] -> [a] -> [a]
stripPrefix' pfx s = fromMaybe s $ stripPrefix pfx s
-------------------------------------------------------------------------------
fmapL :: (a -> b) -> Either a r -> Either b r
fmapL f = either (Left . f) Right
-------------------------------------------------------------------------------
-- | This function was extracted from the @http-types@ package. The
-- license can be found in licenses/http-types/LICENSE
urlDecode
:: Bool
-- ^ Whether to decode '+' to ' '
-> BS.ByteString
-> BS.ByteString
urlDecode replacePlus z = fst $ BS.unfoldrN (BS.length z) go z
where
go bs' =
case BS.uncons bs' of
Nothing -> Nothing
Just (43, ws) | replacePlus -> Just (32, ws) -- plus to space
Just (37, ws) -> Just $ fromMaybe (37, ws) $ do -- percent
(x, xs) <- BS.uncons ws
x' <- hexVal x
(y, ys) <- BS.uncons xs
y' <- hexVal y
Just (combine x' y', ys)
Just (w, ws) -> Just (w, ws)
hexVal w
| 48 <= w && w <= 57 = Just $ w - 48 -- 0 - 9
| 65 <= w && w <= 70 = Just $ w - 55 -- A - F
| 97 <= w && w <= 102 = Just $ w - 87 -- a - f
| otherwise = Nothing
combine :: Word8 -> Word8 -> Word8
combine a b = shiftL a 4 .|. b
-------------------------------------------------------------------------------
--TODO: keep an eye on perf here. seems like a good use case for a DList. the word8 list could be a set/hashset
-- | Percent-encoding for URLs. Specify a list of additional
-- unreserved characters to permit.
urlEncode :: [Word8] -> ByteString -> Builder
urlEncode extraUnreserved = mconcat . map encodeChar . BS.unpack
where
encodeChar ch | unreserved' ch = BB.fromWord8 ch
| otherwise = h2 ch
unreserved' ch | ch >= 65 && ch <= 90 = True -- A-Z
| ch >= 97 && ch <= 122 = True -- a-z
| ch >= 48 && ch <= 57 = True -- 0-9
unreserved' c = c `elem` extraUnreserved
h2 v = let (a, b) = v `divMod` 16 in bs $ BS.pack [37, h a, h b] -- percent (%)
h i | i < 10 = 48 + i -- zero (0)
| otherwise = 65 + i - 10 -- 65: A
-------------------------------------------------------------------------------
-- | Encode a ByteString for use in the query section of a URL
urlEncodeQuery :: ByteString -> Builder
urlEncodeQuery = urlEncode unreserved8
-------------------------------------------------------------------------------
-- | Encode a ByteString for use in the path section of a URL
urlEncodePath :: ByteString -> Builder
urlEncodePath = urlEncode unreservedPath8
-------------------------------------------------------------------------------
downcaseBS :: ByteString -> ByteString
downcaseBS = BS8.map toLower
-------------------------------------------------------------------------------
-- | Simple data structure to get O(1) prepends on a list and defers the O(n)
newtype RL a = RL [a] deriving (Show)
(|>) :: RL a -> a -> RL a
RL as |> a = RL (a:as)
rl2L :: RL a -> [a]
rl2L (RL as) = reverse as
unsnoc :: RL a -> RL a
unsnoc (RL []) = RL []
unsnoc (RL (_:xs)) = RL xs
| Soostone/uri-bytestring | src/URI/ByteString/Internal.hs | bsd-3-clause | 40,268 | 0 | 16 | 7,340 | 6,513 | 3,480 | 3,033 | 454 | 4 |
{-# LANGUAGE TypeOperators, EmptyDataDecls,
MultiParamTypeClasses, FunctionalDependencies,
FlexibleContexts, FlexibleInstances, UndecidableInstances,
TypeFamilies, IncoherentInstances, OverlappingInstances #-}
module Data.Rope.Annotated.Unsafe
( -- * Annotated 'Rope's
U
, MonoidA, ReducerA, BreakableA
-- * Unpacking 'Ropes'
, null -- :: A s a -> Bool
, head -- :: Unpackable t => A s a -> t
, last -- :: Unpackable t => A s a -> t
, unpack -- :: Unpackable t => A s a -> [t]
-- * Building Annotated 'Rope'
, empty -- :: MonoidA f => U f
, append -- :: MonoidA f => Ann a f -> Ann b f -> U f
, unit -- :: (ReducerA f, Reducer t Rope) => t -> U f
, snoc -- :: (ReducerA f, Reducer t Rope) => t -> Ann a f -> U f
, cons -- :: (ReducerA f, Reducer t Rope) => Ann a f -> t -> U f
-- * Cutting An Annotated 'Rope'
, splitAt -- :: (BreakablaA f) => Int -> Ann a f -> (U f, U f)
, drop -- :: (BreakableA f) => Int -> Ann a f -> U f
, take -- :: (BreakablaA f) => Int -> Ann a f -> U f
, break -- :: (BreakableA f, Breakable t) => (t -> Bool) -> Ann a f -> (U f, U f)
, span -- :: (BreakableA f, Breakable t) => (t -> Bool) -> Ann a f -> (U f, U f)
, takeWhile -- :: (BreakableA f, Breakable t) => (t -> Bool) -> Ann a f -> U f
, dropWhile -- :: (BreakableA f, Breakable t) => (t -> Bool) -> Ann a f -> U f
-- * Inspecting the ends of the 'Rope'
, uncons -- :: (BreakableA f, Unpackable t) => Ann a f -> Maybe (t, U f)
, unsnoc -- :: (BreakableA f, Unpackable t) => Ann a f -> Maybe (U f, t)
, Unsafe
) where
import Prelude hiding (null, head, last, take, drop, span, break, splitAt, takeWhile, dropWhile)
import Data.Monoid
import qualified Data.Rope.Internal as Rope
import Data.Rope.Annotated.Internal (A(..), null, head, last, unpack)
import Data.Rope.Annotated (Ann)
import Data.Rope.Annotation
import Data.Rope.Util.Reducer (Reducer)
import qualified Data.Rope.Util.Reducer as Reducer
import Data.Rope.Internal (Rope(..),Breakable, Unpackable)
data Unsafe
type U f = A Unsafe (f Unsafe)
empty :: MonoidA f => U f
empty = A Rope.empty emptyA
append :: MonoidA f => Ann a f -> Ann b f -> U f
append (A r a) (A s b) = A (r `mappend` s) (appendA r a s b)
unit :: (ReducerA f, Reducer t Rope) => t -> U f
unit t = A r (unitA r)
where r = Reducer.unit t
splitAt :: BreakableA f => Int -> Ann a f -> (U f, U f)
splitAt n (A r a) = (A r b, A r c)
where (b, c) = splitAtA n r a
drop :: BreakableA f => Int -> Ann a f -> U f
drop n (A r a) = A r (dropA n r a)
take :: BreakableA f => Int -> Ann a f -> U f
take n (A r a) = A r (takeA n r a)
snoc :: (ReducerA f, Reducer t Rope) => Ann a f -> t -> U f
snoc (A r a) t = A r' (snocA (Rope.length r' - Rope.length r) r' a)
where r' = Reducer.snoc r t
cons :: (ReducerA f, Reducer t Rope) => t -> Ann a f -> U f
cons t (A r a) = A r' (consA (Rope.length r' - Rope.length r) r' a)
where r' = Reducer.cons t r
break :: (BreakableA f, Breakable t) => (t -> Bool) -> Ann a f -> (U f, U f)
break p (A r a) = (A x b, A y c) where
(x,y) = Rope.break p r
(b,c) = splitAtA (Rope.length x) r a
span :: (BreakableA f, Breakable t) => (t -> Bool) -> Ann a f -> (U f, U f)
span p (A r a) = (A x b, A y c) where
(x,y) = Rope.span p r
(b,c) = splitAtA (Rope.length x) r a
takeWhile :: (BreakableA f, Breakable t) => (t -> Bool) -> Ann a f -> U f
takeWhile p (A r a) = A x b where
x = Rope.takeWhile p r
b = takeA (Rope.length x) r a
dropWhile :: (BreakableA f, Breakable t) => (t -> Bool) -> Ann a f -> U f
dropWhile p (A r a) = A y c where
y = Rope.dropWhile p r
c = dropA (Rope.length r - Rope.length y) r a
uncons :: (BreakableA f, Unpackable t) => Ann a f -> Maybe (t, U f)
uncons (A r a) = case Rope.uncons r of
Just (c,cs) -> Just (c, A cs (dropA (Rope.length r - Rope.length cs) r a))
Nothing -> Nothing
unsnoc :: (BreakableA f, Unpackable t) => Ann a f -> Maybe (U f, t)
unsnoc (A r a) = case Rope.unsnoc r of
Just (cs,c) -> Just (A cs (dropA (Rope.length cs) r a), c)
Nothing -> Nothing
| ekmett/rope | Data/Rope/Annotated/Unsafe.hs | bsd-3-clause | 4,237 | 0 | 16 | 1,189 | 1,576 | 838 | 738 | -1 | -1 |
module Part2.Problem53 where
--
-- Problem 53: Combinatoric selections
--
-- There are exactly ten ways of selecting three from five, 12345:
--
-- 123, 124, 125, 134, 135, 145, 234, 235, 245, and 345
--
-- In combinatorics, we use the notation, 5 C 3 = 10.
--
-- In general,
--
-- n C r = n! / r!(n−r)!
--
-- where r ≤ n, n! = n×(n−1)×...×3×2×1, and 0! = 1.
--
-- It is not until n = 23, that a value exceeds one-million: 23 C 10 = 1144066.
--
-- How many, not necessarily distinct, values of n C r, for 1 ≤ n ≤ 100, are
-- greater than one-million?
problem53 :: Int
problem53 = length
[ (n, r)
| n <- [23..100]
, r <- [1..n-1]
, n `nCr` r > 10^6
]
fac :: Integer -> Integer
fac 1 = 1
fac n = n * fac (n - 1)
nCr :: Integer -> Integer -> Integer
nCr n r | r >= n = 0
nCr n r = fac n `div` (fac r * fac (n - r))
| c0deaddict/project-euler | src/Part2/Problem53.hs | bsd-3-clause | 843 | 0 | 10 | 205 | 206 | 118 | 88 | 13 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE TypeFamilies #-}
-- |
module Network.Libtorrent.Types.ArrayLike where
import Control.Monad (forM_)
import Foreign.C.Types (CSize)
import Prelude hiding (foldMap)
class ArrayLike a where
type ElemType a :: *
getElem :: a -> CSize -> IO (Maybe (ElemType a))
foldMap :: (ArrayLike a, Monoid m) => (ElemType a -> m) -> a -> IO m
foldMap f ar =
go 0 mempty
where
go ix acc = do
!el <- getElem ar ix
case el of
Nothing -> return acc
Just v -> go (succ ix) $! mappend (f v) acc
fold :: ArrayLike a => (ElemType a -> b -> b) -> b -> a -> IO b
fold f acc ar =
go 0 acc
where
go ix acc' = do
!el <- getElem ar ix
case el of
Nothing -> return acc'
Just v -> go (succ ix) $! f v acc'
toList :: ArrayLike a => a -> IO [ElemType a]
toList ar =
foldMap pure ar
class ArrayLike a => VectorLike a where
newVector :: IO a
addElem :: a -> ElemType a -> IO ()
fromList :: VectorLike a => [ElemType a] -> IO a
fromList as = do
v <- newVector
forM_ as $ addElem v
return v
| eryx67/haskell-libtorrent | src/Network/Libtorrent/Types/ArrayLike.hs | bsd-3-clause | 1,146 | 0 | 15 | 360 | 483 | 235 | 248 | 36 | 2 |
{- |
Module : SAWScript.LLVMBuiltins
Description : Implementations of LLVM-related SAW-Script primitives.
License : BSD3
Maintainer : atomb
Stability : provisional
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DoAndIfThenElse #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE LambdaCase #-}
module SAWScript.LLVMBuiltins where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative hiding (many)
#endif
import Data.String
import Data.Parameterized.Some
import Control.Monad.State (gets)
import qualified Text.LLVM.AST as LLVM
import qualified Data.LLVM.BitCode as LLVM
import qualified Text.LLVM.Parser as LLVM (parseType)
import SAWScript.Value as SV
import qualified SAWScript.Crucible.LLVM.CrucibleLLVM as CL
import qualified SAWScript.Crucible.LLVM.MethodSpecIR as CMS (LLVMModule, loadLLVMModule)
llvm_load_module :: FilePath -> TopLevel (Some CMS.LLVMModule)
llvm_load_module file =
do laxArith <- gets rwLaxArith
debugIntrinsics <- gets rwDebugIntrinsics
let ?transOpts = CL.defaultTranslationOptions
{ CL.laxArith = laxArith
, CL.debugIntrinsics = debugIntrinsics
}
halloc <- getHandleAlloc
io (CMS.loadLLVMModule file halloc) >>= \case
Left err -> fail (LLVM.formatError err)
Right llvm_mod -> return llvm_mod
llvm_type :: String -> TopLevel LLVM.Type
llvm_type str =
case LLVM.parseType str of
Left e -> fail (show e)
Right t -> return t
llvm_int :: Int -> LLVM.Type
llvm_int n = LLVM.PrimType (LLVM.Integer (fromIntegral n))
llvm_float :: LLVM.Type
llvm_float = LLVM.PrimType (LLVM.FloatType LLVM.Float)
llvm_double :: LLVM.Type
llvm_double = LLVM.PrimType (LLVM.FloatType LLVM.Double)
llvm_array :: Int -> LLVM.Type -> LLVM.Type
llvm_array n t = LLVM.Array (fromIntegral n) t
llvm_alias :: String -> LLVM.Type
llvm_alias n = LLVM.Alias (fromString n)
llvm_packed_struct_type :: [LLVM.Type] -> LLVM.Type
llvm_packed_struct_type = LLVM.PackedStruct
llvm_pointer :: LLVM.Type -> LLVM.Type
llvm_pointer = LLVM.PtrTo
llvm_struct_type :: [LLVM.Type] -> LLVM.Type
llvm_struct_type = LLVM.Struct
| GaloisInc/saw-script | src/SAWScript/LLVMBuiltins.hs | bsd-3-clause | 2,368 | 0 | 14 | 414 | 547 | 301 | 246 | 54 | 2 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE DeriveGeneric #-}
{- |
Module : Data.Graph.Simple.Util
Description : Utility functions used in many algorithms in this library
Copyright : Stefan Höck
Maintainer : Stefan Höck
Stability : experimental
Functions for internals use. Many are not total so use with care.
-}
module Data.Graph.Simple.Util (
-- * Monad utility functions
ifM, whenM, unlessM
-- * Operations on sorted lists without dublicates
, unique, sortedUnique, sortedDiff, sortedUnion, sortedSymmDiff
-- * Pretty printing
, rightPad
-- * Mutable vector utility functions
, unsafeReadV, unsafeReadVU
, unsafeWriteV, unsafeWriteVU
, unsafeModU, unsafeMod, unsafeModV, unsafeModVU
, boolMap, partMap
, SetM(..), runM, runMV, setM, getM, modM, visit, visited, unvisited, unvisit
) where
import Control.Monad (unless, when)
import Control.Monad.ST (ST, runST)
import Data.Graph.Simple.Vertex (Vertex, unVertex)
import Data.List (sort)
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as MV
import qualified Data.Vector.Unboxed as VU
import qualified Data.Vector.Unboxed.Mutable as MVU
-- * Monad utility functions
-- | Depending on the 'Bool' returned by the first monadic
-- action, either performs the second (in case of 'True')
-- or the third provided action.
ifM ∷ Monad m ⇒ m Bool → m a → m a → m a
ifM mb m1 m2 = mb >>= (\b → if b then m1 else m2)
-- | Like 'when' but with a monadic 'Bool' as its first argument
whenM ∷ Monad m ⇒ m Bool → m () → m ()
whenM mb mu = mb >>= (\b → when b mu)
-- | Like 'unless' but with a monadic 'Bool' as its first argument
unlessM ∷ Monad m ⇒ m Bool → m () → m ()
unlessM mb mu = mb >>= (\b → unless b mu)
-- * Operations on sorted lists without dublicates
-- | Sorts a list and removes duplicates
sortedUnique ∷ Ord a ⇒ [a] → [a]
sortedUnique = unique . sort
-- | Removes duplicates from a sorted list
unique ∷ Eq a ⇒ [a] → [a]
unique = run [] where
run r (x:y:t) | x == y = run r (y:t)
run r (h:t) = run (h:r) t
run r [] = reverse r
-- | Subtracts the content of the second from the first
-- list in O(m+n). Lists are assumed to be set-like: Sorted and
-- holding each element only once
sortedDiff ∷ Ord a ⇒ [a] → [a] → [a]
sortedDiff = run []
where run r [] _ = reverse r
run r as [] = reverse r ++ as
run r as@(a:ta) bs@(b:tb) | a == b = run r ta tb
| a < b = run (a:r) ta bs
| otherwise = run r as tb
-- | Combines the content of two lists in O(m+n).
-- Lists are assumed to be set-like: Sorted and
-- holding each element only once
sortedUnion ∷ Ord a ⇒ [a] → [a] → [a]
sortedUnion = run []
where run r [] bs = reverse r ++ bs
run r as [] = reverse r ++ as
run r as@(a:ta) bs@(b:tb) | a <= b = run (a:r) ta bs
| otherwise = run r as tb
-- | Creates the symmetric set difference of to lists.
-- Lists are assumed to be set-like: Sorted and
-- holding each element only once
sortedSymmDiff ∷ Ord a ⇒ [a] → [a] → [a]
sortedSymmDiff = run []
where run r [] e2 = reverse r ++ e2
run r e1 [] = run r [] e1
run r as@(a:ta) bs@(b:tb) | a == b = run r ta tb
| a < b = run (a:r) ta bs
| otherwise = run (b:r) as tb
-- * Pretty printing
rightPad ∷ a → [[a]] → [[a]]
rightPad _ [] = []
rightPad a as = fmap pad as
where pad as' = as' ++ replicate (ml - length as') a
ml = maximum $ fmap length as
-- * Mutable vector utility functions
-- | Modfies a value in a mutable unboxed array without checking
-- the index first
{-# INLINE unsafeModU #-}
unsafeModU ∷ MVU.Unbox a ⇒ MVU.MVector s a → Int → (a → a) → ST s ()
unsafeModU v i f = MVU.unsafeRead v i >>= MVU.unsafeWrite v i . f
-- | Modfies a value in a mutable unboxed array without checking
-- the index first. Uses a Vertex for indexing
{-# INLINE unsafeModVU #-}
unsafeModVU ∷ MVU.Unbox a ⇒ MVU.MVector s a → Vertex → (a → a) → ST s ()
unsafeModVU v i f = unsafeReadVU v i >>= unsafeWriteVU v i . f
-- | Modfies a value in a mutable array without checking
-- the index first
{-# INLINE unsafeMod #-}
unsafeMod ∷ MV.MVector s a → Int → (a → a) → ST s ()
unsafeMod v i f = MV.unsafeRead v i >>= MV.unsafeWrite v i . f
-- | Modfies a value in a mutable array without checking
-- the index first. Uses a Vertex for indexing
{-# INLINE unsafeModV #-}
unsafeModV ∷ MV.MVector s a → Vertex → (a → a) → ST s ()
unsafeModV v i f = unsafeReadV v i >>= unsafeWriteV v i . f
-- | Extracts the value from a mutable array without
-- checking the index first. Uses a Vertex for indexing.
{-# INLINE unsafeReadV #-}
unsafeReadV ∷ MV.MVector s a → Vertex → ST s a
unsafeReadV v = MV.unsafeRead v . unVertex
-- | Extracts the value from a mutable unboxed array without
-- checking the index first. Uses a Vertex for indexing.
{-# INLINE unsafeReadVU #-}
unsafeReadVU ∷ MVU.Unbox a ⇒ MVU.MVector s a → Vertex → ST s a
unsafeReadVU v = MVU.unsafeRead v . unVertex
-- | Writes a value to a mutable array without
-- checking the index first. Uses a Vertex for indexing.
{-# INLINE unsafeWriteV #-}
unsafeWriteV ∷ MV.MVector s a → Vertex → a → ST s ()
unsafeWriteV v i a = MV.unsafeWrite v (unVertex i) a
-- | Writes a value to a mutable unboxed array without
-- checking the index first. Uses a Vertex for indexing.
{-# INLINE unsafeWriteVU #-}
unsafeWriteVU ∷ MVU.Unbox a ⇒ MVU.MVector s a → Vertex → a → ST s ()
unsafeWriteVU v i a = MVU.unsafeWrite v (unVertex i) a
-- | Returns an efficient mapping from index to 'Bool'.
-- Indices given as a list of vertices will be mapped to 'True'
-- all others to 'False'
boolMap ∷ Int → [Vertex] → VU.Vector Bool
boolMap n vs = runMV n False $ mapM_ visit vs
-- | Returns an efficient mapping from index to value.
-- A default value is given together with a list of
-- index value pairs.
partMap ∷ Int → a → [(Int,a)] → V.Vector a
partMap n ini ps = runST $ do v ← MV.replicate n ini
mapM_ (\(i,a) → MV.write v i a) ps
V.unsafeFreeze v
-- Used to mark or count visited vertices in graph algorithms
newtype SetM s u a = SetM { runSetM ∷ MVU.MVector s u → ST s a }
instance MVU.Unbox u ⇒ Functor (SetM s u) where
f `fmap` SetM v = SetM $ fmap f . v
instance MVU.Unbox u ⇒ Applicative (SetM s u) where
pure = SetM . const . return
SetM f <*> SetM a = SetM $ \v → f v <*> a v
instance MVU.Unbox u ⇒ Monad (SetM s u) where
return = pure
SetM v >>= f = SetM $ \s → do x ← v s
runSetM (f x) s
{-# INLINE runM #-}
runM ∷ MVU.Unbox u ⇒ Int → u → (forall s . SetM s u a) → a
runM n ini act = runST $ do v ← MVU.replicate n ini
runSetM act v
{-# INLINE runMV #-}
runMV ∷ MVU.Unbox u ⇒ Int → u → (forall s . SetM s u ()) → VU.Vector u
runMV n ini act = runST $ do v ← MVU.replicate n ini
runSetM act v
VU.unsafeFreeze v
{-# INLINE getM #-}
getM ∷ MVU.Unbox u ⇒ Vertex → SetM s u u
getM v = SetM $ \us → unsafeReadVU us v
{-# INLINE setM #-}
setM ∷ MVU.Unbox u ⇒ u → Vertex → SetM s u ()
setM v u = SetM $ \us → unsafeWriteVU us u v
{-# INLINE modM #-}
modM ∷ MVU.Unbox u ⇒ (u → u) → Vertex → SetM s u ()
modM f v = SetM $ \us → unsafeModVU us v f
{-# INLINE visited #-}
visited ∷ Vertex → SetM s Bool Bool
visited = getM
{-# INLINE unvisited #-}
unvisited ∷ Vertex → SetM s Bool Bool
unvisited = fmap not . visited
{-# INLINE visit #-}
visit ∷ Vertex → SetM s Bool ()
visit = setM True
{-# INLINE unvisit #-}
unvisit ∷ Vertex → SetM s Bool ()
unvisit = setM False
| stefan-hoeck/labeled-graph | Data/Graph/Simple/Util.hs | bsd-3-clause | 8,250 | 0 | 12 | 2,307 | 2,541 | 1,322 | 1,219 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
-- | SMS posting for carma
--
-- Start process:
-- @
-- carma-sms -u user -p password
-- @
--
-- Then send sms:
-- @
-- redis> hmset sms:1 from me phone 70001234567 msg \"test message\"
-- redis> lpush smspost sms:1
-- @
--
-- | SMS object
--
-- SMS object stored in redis has following format:
--
-- * sender - sender
--
-- * phone - receiver, contains only digits
--
-- * msg - message in UTF-8
--
-- * action - filled by process, send or status
--
-- * msgid - filled by process, message id in smsdirect
--
-- * status - filled by process, message status, delivered, sent or send_error
--
-- * lasttry: timestamp of last try
--
-- * tries: number of tries
--
-- When carma-sms fails to send (or get status) sms, it pushes sms to retry-list (smspost:retry by default).
--
-- After that another thread will push these sms's back to \'smspost\'-list periodically until it succeeded or number of tries exceeded
--
module Main (
main
) where
import Prelude hiding (log, catch)
import Control.Concurrent
import Control.Monad.CatchIO
import Data.List
import Data.Maybe (mapMaybe)
import qualified Data.Map as M
import Data.String
import qualified Data.Text as T
import qualified Database.Redis as R
import System.Environment
import System.Log
import CarmaSMS.Action (Action(..))
import CarmaSMS.Process
-- convert arguments to map
-- ["-f1", "value1", "-f2", "value2"] => fromList [("-f1", "value1"), ("-f2", "value2")]
arguments :: [String] -> M.Map String String
arguments = M.fromList . mapMaybe toTuple . splitBy 2 where
toTuple :: [a] -> Maybe (a, a)
toTuple [x, y] = Just (x, y)
toTuple _ = Nothing
splitBy :: Int -> [a] -> [[a]]
splitBy = unfoldr . takedrop
takedrop :: Int -> [a] -> Maybe ([a], [a])
takedrop _ [] = Nothing
takedrop n xs = Just (take n xs, drop n xs)
-- convert arguments to map with replacing default values
args :: [(String, String)] -> [String] -> M.Map String String
args as s = arguments s `M.union` M.fromList as
rules :: String -> Rules
rules r = [
parseRule_ (fromString $ "/: use " ++ r)]
-- | For with wait ability
forkW :: IO () -> IO QSem
forkW act = do
q <- newQSem 0
_ <- forkOS $ finally act (signalQSem q)
return q
main :: IO ()
main = do
as <- getArgs
if null as
then printUsage
else main' (args argDecl as)
where
argDecl = [
("-u", error "User not specified"),
("-p", error "Password not specified"),
("-l", "default"),
("-k", "smspost"),
("-r", "smspost:retry"),
("-retries", "10"),
("-d", "60"),
("-mps", "5"),
("-i", "1")]
printUsage = mapM_ putStrLn [
"Usage: carma-sms [flags] where",
" -u <user> - login for smsdirect",
" -p <pass> - password for smsdirect",
" -l <level> - log level, default is 'default', possible values are: trace, debug, default, silent",
" -k <key> - redis key for tasks, default is 'smspost'",
" -r <retry key> - redis key for retries, default is 'smspost:retry'",
" -retries <int> - max retries on sms actions, default is 10",
" -d <seconds> - delta between tries, default is 60",
" -mps <int> - HTTP requests per second, default is 5",
" -i <id> - id of task-processing list, default is '1' (key will be 'smspost:1')",
"",
"Examples:",
" carma-sms -u user -p pass",
" carma-sms -u user -p pass -l trace",
" carma-sms -u user -p pass -l silent -k smspostlist"]
main' flags = do
postCon <- R.connect R.defaultConnectInfo
retryCon <- R.connect R.defaultConnectInfo
l <- newLog (constant (rules $ flag "-l")) [logger text (file "log/carma-sms.log")]
w1 <- forkW $ retry l retryCon conf
w2 <- forkW $ post l postCon conf
mapM_ waitQSem [w1, w2]
where
conf = Action {
actionUser = user,
actionPass = pass,
actionProcessId = fromString $ flag "-i",
actionTaskList = fromString $ flag "-k",
actionTaskId = "",
actionTaskRetry = fromString $ flag "-r",
actionTaskRetries = iflag 10 "-retries",
actionTaskRetryDelta = iflag 60 "-d",
actionMessagesPerSecond = iflag 5 "-mps",
actionData = M.empty
}
flag = (flags M.!)
iflag v = tryRead . flag where
tryRead s = case reads s of
[(i, "")] -> i
_ -> v
user :: T.Text
user = fromString $ flag "-u"
pass :: T.Text
pass = fromString $ flag "-p"
| mvoidex/carma-sms | src/carma-sms.hs | bsd-3-clause | 4,571 | 0 | 15 | 1,201 | 1,029 | 585 | 444 | 95 | 3 |
module Main where
import Control.Concurrent.Async (forConcurrently_)
import Data.Text (Text)
import Stuff (urls, getUrl, printResult)
main :: IO ()
main = printSources urls
printSources :: [Text] -> IO ()
printSources urlList = do
forConcurrently_ urlList fetchAndPrint
putStrLn "All done!"
fetchAndPrint :: Text -> IO ()
fetchAndPrint url = getUrl url >>= printResult url
| toddmohney/concurrency-example | concurrent/Main.hs | bsd-3-clause | 381 | 0 | 7 | 60 | 128 | 67 | 61 | 12 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module Budget.Core.Data.ItemTemplate where
import GHC.Generics
import Data.Aeson
import Data.Text (Text)
import qualified Budget.Core.Data.Category as C
data ItemTemplate
= ItemTemplate
{ name :: Text -- ^ Name of item
, category :: C.Category -- ^ Assigned category
} deriving (Generic)
instance Eq ItemTemplate where
x == y = name x == name y && catid x == catid y
where catid = C.categoryId . category
instance FromJSON ItemTemplate
instance ToJSON ItemTemplate
data NewItemTemplateR
= NewItemTemplateR
{ newItemTemplateName :: Text
, newItemTemplateCategoryId :: Int
} deriving (Generic)
instance FromJSON NewItemTemplateR
instance ToJSON NewItemTemplateR
| utky/budget | src/Budget/Core/Data/ItemTemplate.hs | bsd-3-clause | 754 | 0 | 9 | 158 | 183 | 102 | 81 | 23 | 0 |
module Main where
import Kraken.Web
main :: IO ()
main = run
| zalora/kraken | src/Kraken/Web/Main.hs | bsd-3-clause | 76 | 0 | 6 | 27 | 24 | 14 | 10 | 4 | 1 |
module Location where
import Data.IntMap (IntMap)
import qualified Data.IntMap as I
import Data.Set (Set)
import qualified Data.Set as S
import qualified Data.Foldable as Foldable
import Data.Maybe
type X = Int
type Y = Int
type Position = (X,Y)
type ObjId = Int
data Shape = Rectangle Int Int | Circle Int
newtype Space = Space (IntMap (IntMap (Set (ObjId, Blocking))))
data Blocking = Ghost | Low | Medium | High deriving (Enum, Ord, Eq, Show)
blocks :: Blocking -> Blocking -> Bool
blocks b1 b2 = fromEnum b1 + fromEnum b2 >= fromEnum High
footprint :: Shape -> Position -> Blocking -> Set (Position, Blocking)
footprint (Rectangle w h) (x0,y0) blocking = S.fromList
[((x+x0,y+y0), blocking) | x <- [-w..w-1], y <- [-h..h-1]]
footprint (Circle r) (x0,y0) blocking = S.fromList
[((x+x0,y+y0), blocking) | x <- [-r..r-1], y <- [-r..r-1], (fromIntegral x+0.5-fromIntegral x0)^2 + (fromIntegral y+0.5-fromIntegral y0)^2 < fromIntegral r^2]
adjustPosition :: ((Set (ObjId, Blocking)) -> (Set (ObjId, Blocking))) -> Position -> Space -> Maybe Space
adjustPosition f (x,y) (Space xMap) = do
yMap <- I.lookup x xMap
pos <- I.lookup y yMap
let yMap' = I.adjust f y yMap
xMap' = I.adjust (const yMap') x xMap
return $ Space xMap'
testPosition :: ((Set (ObjId, Blocking)) -> Bool) -> Position -> Space -> Maybe Bool
testPosition pred (x,y) (Space xMap) = do
yMap <- I.lookup x xMap
pos <- I.lookup y yMap
return $ pred pos
addPositionToSpace :: ObjId -> Position -> Blocking -> Space -> Maybe Space
addPositionToSpace objId position blocking space
| canOccupy space position blocking = adjustPosition (S.insert (objId, blocking)) position space
| otherwise = Nothing
addShapeToSpace :: ObjId -> Shape -> Position -> Blocking -> Space -> Maybe Space
addShapeToSpace objId shape position blocking space = S.foldr f (Just space) positions
where
f :: (Position, Blocking) -> Maybe Space -> Maybe Space
f (pos, block) = (>>= addPositionToSpace objId position blocking)
positions :: Set (Position, Blocking)
positions = footprint shape position blocking
-- only removes a position, not a shape
removePositionFromSpace :: ObjId -> Position -> Blocking -> Space -> Maybe Space
removePositionFromSpace objId position blocking space = adjustPosition (S.filter ((==objId) . fst)) position space
removeShapeFromSpace :: ObjId -> Shape -> Position -> Blocking -> Space -> Maybe Space
removeShapeFromSpace objId shape position blocking space = S.foldr f (Just space) positions
where
f :: (Position, Blocking) -> Maybe Space -> Maybe Space
f (pos, block) = (>>= removePositionFromSpace objId position blocking)
positions :: Set (Position, Blocking)
positions = footprint shape position blocking
move :: X -> Y -> Position -> Position
move dx dy (x,y) = (x+dx, y+dy)
-- Can a (_, blocking) value be added to position in space?
canOccupy :: Space -> Position -> Blocking -> Bool
canOccupy space position blocking = fromMaybe False $ testPosition pred position space
where
pred = Foldable.all $ not . blocks blocking . snd
-- Fails with Nothing if the move is illegal
moveInSpace :: X -> Y -> ObjId -> Shape -> Position -> Blocking -> Space -> Maybe Space
moveInSpace dx dy objId shape position blocking space = do
space' <- removeShapeFromSpace objId shape position blocking space
let position' = move dx dy position
addShapeToSpace objId shape position' blocking space'
| ojw/taskpals | src/Location.hs | bsd-3-clause | 3,479 | 0 | 16 | 665 | 1,369 | 715 | 654 | 61 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UnboxedTuples #-}
{-# LANGUAGE CPP #-}
module System.Random.SFMT
( -- * Gen
Gen
, initializeFromSeed, create, initialize, initializeFromByteString
, withSystemRandom, createSystemRandom
-- ** Type helpers
, GenIO, GenST
, asGenIO, asGenST
-- * Variates
, Variate(..)
-- * Seed
, Seed
, unsafeFromSeed, unsafeToSeed
, save, restore
) where
#if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__)
#include "MachDeps.h"
#endif
import Control.Monad
import Control.Monad.ST
import Control.Monad.Primitive
import System.Random.SFMT.Foreign
import Foreign.C.String
import Foreign.Ptr
import Foreign.ForeignPtr
import Foreign.Marshal
import qualified Data.Foldable as F
import qualified Data.ByteString as S
import qualified Data.ByteString.Unsafe as S
import System.Entropy
import Data.Int
import Data.Word
import Data.Bits
import Unsafe.Coerce
import System.IO.Unsafe
#if !MIN_VERSION_primitive(0,6,0)
#define PrimBase PrimMonad
#endif
newtype Gen s = Gen (ForeignPtr SFMT)
instance Show (Gen s) where
show = unsafePerformIO . getIDString
getIDString :: Gen s -> IO String
getIDString (Gen gen) = withForeignPtr gen $ \ptr ->
sfmt_get_idstring ptr >>= peekCString
initializeFromSeed :: PrimMonad m => Int -> m (Gen (PrimState m))
initializeFromSeed seed = unsafePrimToPrim $ do
bytes <- mallocBytes sizeOfSFMT
sfmt_init_gen_rand bytes (fromIntegral seed)
Gen `liftM` newForeignPtr finalizerFree bytes
create :: PrimMonad m => m (Gen (PrimState m))
create = initializeFromSeed 0
initialize :: (PrimMonad m, F.Foldable f) => f Word -> m (Gen (PrimState m))
initialize v = unsafePrimToPrim . withArray (unsafeCoerce $ F.toList v) $ \ptr -> do
bytes <- mallocBytes sizeOfSFMT
let len = F.foldl' (\i _ -> i + 1) 0 v
sfmt_init_by_array bytes ptr len
Gen `liftM` newForeignPtr finalizerFree bytes
initializeFromByteString :: PrimMonad m => S.ByteString -> m (Gen (PrimState m))
initializeFromByteString bs = unsafePrimToPrim . S.unsafeUseAsCStringLen bs $ \(ptr, len) -> do
bytes <- mallocBytes sizeOfSFMT
sfmt_init_by_array bytes (castPtr ptr) (fromIntegral $ len `quot` 4)
Gen `liftM` newForeignPtr finalizerFree bytes
withSystemRandom :: PrimBase m => (Gen (PrimState m) -> m a) -> IO a
withSystemRandom m = do
bs <- getEntropy (constSFMT_N * 16)
gen <- initializeFromByteString bs
unsafePrimToIO $ m (unsafeCoerce gen)
createSystemRandom :: IO GenIO
createSystemRandom = withSystemRandom (return :: GenIO -> IO GenIO)
type GenIO = Gen (PrimState IO)
type GenST s = Gen (PrimState (ST s))
asGenIO :: (GenIO -> IO a) -> GenIO -> IO a
asGenIO = id
asGenST :: (GenST s -> ST s a) -> GenST s -> ST s a
asGenST = id
genRand :: PrimMonad m => (Ptr SFMT -> IO a) -> Gen (PrimState m) -> m a
genRand f (Gen gen) = unsafePrimToPrim $ withForeignPtr gen f
genRandWord32 :: PrimMonad m => Gen (PrimState m) -> m Word32
genRandWord32 g = fromIntegral `liftM` genRand wrap_genrand_uint32 g
genRandWord64 :: PrimMonad m => Gen (PrimState m) -> m Word64
genRandWord64 g = fromIntegral `liftM` genRand wrap_genrand_uint64 g
genRandReal2 :: PrimMonad m => Gen (PrimState m) -> m Float
genRandReal2 g = realToFrac `liftM` genRand wrap_genrand_real2 g
genRandRes53 :: PrimMonad m => Gen (PrimState m) -> m Double
genRandRes53 g = realToFrac `liftM` genRand wrap_genrand_res53 g
class Variate a where
uniform :: PrimMonad m => Gen (PrimState m) -> m a
uniformR :: PrimMonad m => (a, a) -> Gen (PrimState m) -> m a
instance Variate Bool where
uniform g = (\i -> i .&. 1 /= 0) `liftM` genRandWord32 g
uniformR (False,True) g = uniform g
uniformR (False,False) _ = return False
uniformR (True,True) _ = return True
uniformR (True,False) g = uniform g
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance Variate Float where
uniform = genRandReal2
uniformR (x1,x2) g = (\d -> x1 + (x2-x1) * d) `liftM` genRandReal2 g
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance Variate Double where
uniform = genRandRes53
uniformR (x1,x2) g = (\d -> x1 + (x2-x1) * d) `liftM` genRandRes53 g
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance Variate Word where
#if WORD_SIZE_IN_BITS < 64
uniform g = fromIntegral `liftM` genRandWord32 g
uniformR = uniformRange (undefined :: Word32)
#else
uniform g = fromIntegral `liftM` genRandWord64 g
uniformR = uniformRange (undefined :: Word64)
#endif
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance Variate Word8 where
uniform g = fromIntegral `liftM` genRandWord32 g
uniformR = uniformRange (undefined :: Word8)
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance Variate Word16 where
uniform g = fromIntegral `liftM` genRandWord32 g
uniformR = uniformRange (undefined :: Word16)
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance Variate Word32 where
uniform = genRandWord32
uniformR = uniformRange (undefined :: Word32)
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance Variate Word64 where
uniform = genRandWord64
uniformR = uniformRange (undefined :: Word64)
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance Variate Int where
#if WORD_SIZE_IN_BITS < 64
uniform g = fromIntegral `liftM` genRandWord32 g
uniformR = uniformRange (undefined :: Word32)
#else
uniform g = fromIntegral `liftM` genRandWord64 g
uniformR = uniformRange (undefined :: Word64)
#endif
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance Variate Int8 where
uniform g = fromIntegral `liftM` genRandWord32 g
uniformR = uniformRange (undefined :: Word8)
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance Variate Int16 where
uniform g = fromIntegral `liftM` genRandWord32 g
uniformR = uniformRange (undefined :: Word16)
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance Variate Int32 where
uniform g = fromIntegral `liftM` genRandWord32 g
uniformR = uniformRange (undefined :: Word32)
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance Variate Int64 where
uniform g = fromIntegral `liftM` genRandWord64 g
uniformR = uniformRange (undefined :: Word64)
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance (Variate a, Variate b) => Variate (a,b) where
uniform g = (,) `liftM` uniform g `ap` uniform g
uniformR ((x1,y1),(x2,y2)) g = (,) `liftM` uniformR (x1,x2) g `ap` uniformR (y1,y2) g
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance (Variate a, Variate b, Variate c) => Variate (a,b,c) where
uniform g = (,,) `liftM` uniform g `ap` uniform g `ap` uniform g
uniformR ((x1,y1,z1),(x2,y2,z2)) g =
(,,) `liftM` uniformR (x1,x2) g `ap` uniformR (y1,y2) g `ap` uniformR (z1,z2) g
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
instance (Variate a, Variate b, Variate c, Variate d) => Variate (a,b,c,d) where
uniform g = (,,,) `liftM` uniform g `ap` uniform g `ap` uniform g
`ap` uniform g
uniformR ((x1,y1,z1,t1),(x2,y2,z2,t2)) g =
(,,,) `liftM` uniformR (x1,x2) g `ap` uniformR (y1,y2) g `ap`
uniformR (z1,z2) g `ap` uniformR (t1,t2) g
{-# INLINE uniform #-}
{-# INLINE uniformR #-}
uniformRange :: forall m word a.
(Variate word, Bounded word, Eq word, Num word, Integral word, Ord word
, PrimMonad m, Variate a, Integral a, Show word)
=> word -> (a, a) -> Gen (PrimState m) -> m a
uniformRange _ = go
where
go (x1, x2) g
| n == 0 = uniform g
| otherwise = loop
where
( i, j ) | x1 < x2 = ( x1, x2 )
| otherwise = ( x2, x1 )
n = 1 + fromIntegral j - fromIntegral i :: word
buckets = maxBound `div` n
maxN = buckets * n
loop = do
x <- uniform g :: m word
if x < maxN
then return $! i + fromIntegral (x `div` buckets)
else loop
{-# INLINE uniformRange #-}
newtype Seed = Seed { unsafeFromSeed :: S.ByteString }
deriving Show
unsafeToSeed :: S.ByteString -> Seed
unsafeToSeed = Seed
save :: PrimMonad m => Gen (PrimState m) -> m Seed
save (Gen gen) = unsafePrimToPrim . withForeignPtr gen $ \ptr ->
Seed `liftM` S.packCStringLen (castPtr ptr, sizeOfSFMT)
restore :: PrimMonad m => Seed -> m (Gen (PrimState m))
restore (Seed bs) = unsafePrimToPrim . S.unsafeUseAsCString bs $ \ptr -> do
bytes <- mallocBytes sizeOfSFMT
copyBytes bytes (castPtr ptr) sizeOfSFMT
Gen `liftM` newForeignPtr finalizerFree bytes
-- Assertion failed: (sfmt->idx % 2 == 0), function sfmt_genrand_uint64, file SFMT-src-1.4.1/SFMT.h, line 158.
| philopon/sfmt-hs | System/Random/SFMT.hs | bsd-3-clause | 8,884 | 0 | 15 | 2,021 | 2,859 | 1,548 | 1,311 | 200 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Monad (when)
import qualified Data.Set as S
import qualified Data.Text as T
import Database.PostgreSQL.Simple (close, connect)
import System.Environment (getArgs)
import System.Exit (exitFailure)
import Conf
import DB
import KripkeTypes
import LinkRing
import Logic
import Model
import PageRank
import RoughSet
import Tfidf
-- |Parse the command line options and the config file and obey the user.
main :: IO ()
main = do
parsedArgv <- getArgs >>= parseArgv
let parsedOptions = fst parsedArgv
createDotDir
conf <- buildConf parsedOptions
when (S.null (optFlags (opts conf))) $ do
putStrLn usage
exitFailure
c <- connect (conInfo conf)
let lamType = optLamType (opts conf)
when (BuildR `S.member` optFlags (opts conf)) $
buildR c (proxy conf) (optUrl (opts conf)) (optRecDepth (opts conf))
when (BuildRelativeR `S.member` optFlags (opts conf)) $
buildRelativeR
c (proxy conf) (optUrl (opts conf)) (optRecDepth (opts conf))
when (BuildLambdaRel `S.member` optFlags (opts conf)) $
buildLambdaStore c (proxy conf)
when (CalcTfidf `S.member` optFlags (opts conf)) $ do
storeAllTfidf c BdyRaw
storeAllTfidf c BdyStem
storeAllTfidf c BdySoundex
storeAllTfidf c MtaRaw
storeAllTfidf c MtaStem
storeAllTfidf c MtaSoundex
when (CalcPageRank `S.member` optFlags (opts conf)) $
calcAndUpdatePageRanks c (optPgIters (opts conf))
when (LambdaAccum `S.member` optFlags (opts conf)) $ do
rs <- lambdaAccum c lamType
mapM_ print rs
when (TfidfWorld `S.member` optFlags (opts conf)) $ do
ts <- worldsTfidf c lamType (optUrl (opts conf))
mapM_ print ts
when (TfidfSearch `S.member` optFlags (opts conf)) $ do
rs <- tfidfSortedSearch c lamType (optFml (opts conf))
mapM_ print rs
when (PLFmlEval `S.member` optFlags (opts conf)) $ do
let frm = read (T.unpack (optFml (opts conf))) :: PLFml
putStrLn ("plformula: " ++ show frm ++ " =")
ws <- satWorlds c lamType frm
mapM_ print ws
when (PMLFmlEval `S.member` optFlags (opts conf)) $ do
let frm = read (T.unpack (optFml (opts conf))) :: Fml
putStrLn ("pmlformula: " ++ show frm ++ " =")
ws <- satWorlds c lamType frm
mapM_ print ws
when (InLinkRings `S.member` optFlags (opts conf)) $ do
rs <- inLinkRings c (optUrl (opts conf))
let fr = formatRingsAsCols rs
mapM_ print fr
when (OutLinkRings `S.member` optFlags (opts conf)) $ do
rs <- outLinkRings c (optUrl (opts conf))
let fr = formatRingsAsCols rs
mapM_ print fr
when (RSetLamdaPL `S.member` optFlags (opts conf)) $ do
let frm = read (T.unpack (optFml (opts conf))) :: PLFml
putStrLn ("formula: " ++ show frm ++ " =")
rs <- roughSetOfLamPL c lamType frm
printRoughSet rs
when (RSetLamdaList `S.member` optFlags (opts conf)) $ do
wl <- mapM (termAsLamType c lamType Nothing) (optWords (opts conf))
rs <- roughSetOfLamList c lamType wl
printRoughSet rs
when (RSetWorldsLamda `S.member` optFlags (opts conf)) $ do
rs <- roughSetOfWorldsLam c lamType (optUrl (opts conf))
printRoughSet rs
when (RSetInLinks `S.member` optFlags (opts conf)) $ do
rs <- roughSetOfInLinks c (optWords (opts conf))
printRoughSet rs
when (RSetWsInLinks `S.member` optFlags (opts conf)) $ do
rs <- roughSetOfWorldsInLinks c (optUrl (opts conf))
printRoughSet rs
when (RSetOutLinks `S.member` optFlags (opts conf)) $ do
rs <- roughSetOfOutLinks c (optWords (opts conf))
printRoughSet rs
when (RSetWsOutLinks `S.member` optFlags (opts conf)) $ do
rs <- roughSetOfWorldsOutLinks c (optUrl (opts conf))
printRoughSet rs
close c
| dawedawe/kripkeweb | src/Main.hs | bsd-3-clause | 3,886 | 0 | 19 | 947 | 1,558 | 743 | 815 | 96 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Syncthing.Types.Completion
( Completion(..)
) where
import Control.Applicative ((<$>))
import Control.Monad (MonadPlus (mzero))
import Data.Aeson (FromJSON, Value (..), parseJSON, (.:))
newtype Completion = Completion { getCompletion :: Int }
deriving (Eq, Show)
instance FromJSON Completion where
parseJSON (Object v) = Completion <$> (v .: "completion")
parseJSON _ = mzero
| jetho/syncthing-hs | Network/Syncthing/Types/Completion.hs | bsd-3-clause | 564 | 0 | 8 | 191 | 134 | 83 | 51 | 11 | 0 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Desugaring exporessions.
-}
{-# LANGUAGE CPP #-}
module DsExpr ( dsExpr, dsLExpr, dsLocalBinds
, dsValBinds, dsLit, dsSyntaxExpr ) where
#include "HsVersions.h"
import Match
import MatchLit
import DsBinds
import DsGRHSs
import DsListComp
import DsUtils
import DsArrows
import DsMonad
import Name
import NameEnv
import FamInstEnv( topNormaliseType )
import DsMeta
import HsSyn
import Platform
-- NB: The desugarer, which straddles the source and Core worlds, sometimes
-- needs to see source types
import TcType
import TcEvidence
import TcRnMonad
import TcHsSyn
import Type
import CoreSyn
import CoreUtils
import CoreFVs
import MkCore
import DynFlags
import CostCentre
import Id
import Module
import VarSet
import ConLike
import DataCon
import TysWiredIn
import PrelNames
import BasicTypes
import Maybes
import VarEnv
import SrcLoc
import Util
import Bag
import Outputable
import FastString
import PatSyn
import IfaceEnv
import Data.IORef ( atomicModifyIORef', modifyIORef )
import Control.Monad
import GHC.Fingerprint
{-
************************************************************************
* *
dsLocalBinds, dsValBinds
* *
************************************************************************
-}
dsLocalBinds :: HsLocalBinds Id -> CoreExpr -> DsM CoreExpr
dsLocalBinds EmptyLocalBinds body = return body
dsLocalBinds (HsValBinds binds) body = dsValBinds binds body
dsLocalBinds (HsIPBinds binds) body = dsIPBinds binds body
-------------------------
dsValBinds :: HsValBinds Id -> CoreExpr -> DsM CoreExpr
dsValBinds (ValBindsOut binds _) body = foldrM ds_val_bind body binds
dsValBinds (ValBindsIn {}) _ = panic "dsValBinds ValBindsIn"
-------------------------
dsIPBinds :: HsIPBinds Id -> CoreExpr -> DsM CoreExpr
dsIPBinds (IPBinds ip_binds ev_binds) body
= do { ds_binds <- dsTcEvBinds ev_binds
; let inner = mkCoreLets ds_binds body
-- The dict bindings may not be in
-- dependency order; hence Rec
; foldrM ds_ip_bind inner ip_binds }
where
ds_ip_bind (L _ (IPBind ~(Right n) e)) body
= do e' <- dsLExpr e
return (Let (NonRec n e') body)
-------------------------
ds_val_bind :: (RecFlag, LHsBinds Id) -> CoreExpr -> DsM CoreExpr
-- Special case for bindings which bind unlifted variables
-- We need to do a case right away, rather than building
-- a tuple and doing selections.
-- Silently ignore INLINE and SPECIALISE pragmas...
ds_val_bind (NonRecursive, hsbinds) body
| [L loc bind] <- bagToList hsbinds,
-- Non-recursive, non-overloaded bindings only come in ones
-- ToDo: in some bizarre case it's conceivable that there
-- could be dict binds in the 'binds'. (See the notes
-- below. Then pattern-match would fail. Urk.)
unliftedMatchOnly bind
= putSrcSpanDs loc (dsUnliftedBind bind body)
-- Ordinary case for bindings; none should be unlifted
ds_val_bind (_is_rec, binds) body
= do { (force_vars,prs) <- dsLHsBinds binds
; let body' = foldr seqVar body force_vars
; ASSERT2( not (any (isUnliftedType . idType . fst) prs), ppr _is_rec $$ ppr binds )
case prs of
[] -> return body
_ -> return (Let (Rec prs) body') }
-- Use a Rec regardless of is_rec.
-- Why? Because it allows the binds to be all
-- mixed up, which is what happens in one rare case
-- Namely, for an AbsBind with no tyvars and no dicts,
-- but which does have dictionary bindings.
-- See notes with TcSimplify.inferLoop [NO TYVARS]
-- It turned out that wrapping a Rec here was the easiest solution
--
-- NB The previous case dealt with unlifted bindings, so we
-- only have to deal with lifted ones now; so Rec is ok
------------------
dsUnliftedBind :: HsBind Id -> CoreExpr -> DsM CoreExpr
dsUnliftedBind (AbsBinds { abs_tvs = [], abs_ev_vars = []
, abs_exports = exports
, abs_ev_binds = ev_binds
, abs_binds = lbinds }) body
= do { let body1 = foldr bind_export body exports
bind_export export b = bindNonRec (abe_poly export) (Var (abe_mono export)) b
; body2 <- foldlBagM (\body lbind -> dsUnliftedBind (unLoc lbind) body)
body1 lbinds
; ds_binds <- dsTcEvBinds_s ev_binds
; return (mkCoreLets ds_binds body2) }
dsUnliftedBind (AbsBindsSig { abs_tvs = []
, abs_ev_vars = []
, abs_sig_export = poly
, abs_sig_ev_bind = ev_bind
, abs_sig_bind = L _ bind }) body
= do { ds_binds <- dsTcEvBinds ev_bind
; body' <- dsUnliftedBind (bind { fun_id = noLoc poly }) body
; return (mkCoreLets ds_binds body') }
dsUnliftedBind (FunBind { fun_id = L _ fun
, fun_matches = matches
, fun_co_fn = co_fn
, fun_tick = tick }) body
-- Can't be a bang pattern (that looks like a PatBind)
-- so must be simply unboxed
= do { (args, rhs) <- matchWrapper (FunRhs (idName fun)) Nothing matches
; MASSERT( null args ) -- Functions aren't lifted
; MASSERT( isIdHsWrapper co_fn )
; let rhs' = mkOptTickBox tick rhs
; return (bindNonRec fun rhs' body) }
dsUnliftedBind (PatBind {pat_lhs = pat, pat_rhs = grhss, pat_rhs_ty = ty }) body
= -- let C x# y# = rhs in body
-- ==> case rhs of C x# y# -> body
do { rhs <- dsGuarded grhss ty
; let upat = unLoc pat
eqn = EqnInfo { eqn_pats = [upat],
eqn_rhs = cantFailMatchResult body }
; var <- selectMatchVar upat
; result <- matchEquations PatBindRhs [var] [eqn] (exprType body)
; return (bindNonRec var rhs result) }
dsUnliftedBind bind body = pprPanic "dsLet: unlifted" (ppr bind $$ ppr body)
----------------------
unliftedMatchOnly :: HsBind Id -> Bool
unliftedMatchOnly (AbsBinds { abs_binds = lbinds })
= anyBag (unliftedMatchOnly . unLoc) lbinds
unliftedMatchOnly (AbsBindsSig { abs_sig_bind = L _ bind })
= unliftedMatchOnly bind
unliftedMatchOnly (PatBind { pat_lhs = lpat, pat_rhs_ty = rhs_ty })
= isUnliftedType rhs_ty
|| isUnliftedLPat lpat
|| any (isUnliftedType . idType) (collectPatBinders lpat)
unliftedMatchOnly (FunBind { fun_id = L _ id })
= isUnliftedType (idType id)
unliftedMatchOnly _ = False -- I hope! Checked immediately by caller in fact
{-
************************************************************************
* *
\subsection[DsExpr-vars-and-cons]{Variables, constructors, literals}
* *
************************************************************************
-}
dsLExpr :: LHsExpr Id -> DsM CoreExpr
dsLExpr (L loc e) = putSrcSpanDs loc $ dsExpr e
dsExpr :: HsExpr Id -> DsM CoreExpr
dsExpr (HsPar e) = dsLExpr e
dsExpr (ExprWithTySigOut e _) = dsLExpr e
dsExpr (HsVar (L _ var)) = return (varToCoreExpr var)
-- See Note [Desugaring vars]
dsExpr (HsUnboundVar {}) = panic "dsExpr: HsUnboundVar" -- Typechecker eliminates them
dsExpr (HsIPVar _) = panic "dsExpr: HsIPVar"
dsExpr (HsOverLabel _) = panic "dsExpr: HsOverLabel"
dsExpr (HsLit lit) = dsLit lit
dsExpr (HsOverLit lit) = dsOverLit lit
dsExpr (HsWrap co_fn e)
= do { e' <- dsExpr e
; wrapped_e <- dsHsWrapper co_fn e'
; dflags <- getDynFlags
; warnAboutIdentities dflags e' (exprType wrapped_e)
; return wrapped_e }
dsExpr (NegApp expr neg_expr)
= do { expr' <- dsLExpr expr
; dsSyntaxExpr neg_expr [expr'] }
dsExpr (HsLam a_Match)
= uncurry mkLams <$> matchWrapper LambdaExpr Nothing a_Match
dsExpr (HsLamCase arg matches)
= do { arg_var <- newSysLocalDs arg
; ([discrim_var], matching_code) <- matchWrapper CaseAlt Nothing matches
; return $ Lam arg_var $ bindNonRec discrim_var (Var arg_var) matching_code }
dsExpr e@(HsApp fun arg)
-- ignore type arguments here; they're in the wrappers instead at this point
| isLHsTypeExpr arg = dsLExpr fun
| otherwise = mkCoreAppDs (text "HsApp" <+> ppr e)
<$> dsLExpr fun <*> dsLExpr arg
{-
Note [Desugaring vars]
~~~~~~~~~~~~~~~~~~~~~~
In one situation we can get a *coercion* variable in a HsVar, namely
the support method for an equality superclass:
class (a~b) => C a b where ...
instance (blah) => C (T a) (T b) where ..
Then we get
$dfCT :: forall ab. blah => C (T a) (T b)
$dfCT ab blah = MkC ($c$p1C a blah) ($cop a blah)
$c$p1C :: forall ab. blah => (T a ~ T b)
$c$p1C ab blah = let ...; g :: T a ~ T b = ... } in g
That 'g' in the 'in' part is an evidence variable, and when
converting to core it must become a CO.
Operator sections. At first it looks as if we can convert
\begin{verbatim}
(expr op)
\end{verbatim}
to
\begin{verbatim}
\x -> op expr x
\end{verbatim}
But no! expr might be a redex, and we can lose laziness badly this
way. Consider
\begin{verbatim}
map (expr op) xs
\end{verbatim}
for example. So we convert instead to
\begin{verbatim}
let y = expr in \x -> op y x
\end{verbatim}
If \tr{expr} is actually just a variable, say, then the simplifier
will sort it out.
-}
dsExpr e@(OpApp e1 op _ e2)
= -- for the type of y, we need the type of op's 2nd argument
mkCoreAppsDs (text "opapp" <+> ppr e) <$> dsLExpr op <*> mapM dsLExpr [e1, e2]
dsExpr (SectionL expr op) -- Desugar (e !) to ((!) e)
= mkCoreAppDs (text "sectionl" <+> ppr expr) <$> dsLExpr op <*> dsLExpr expr
-- dsLExpr (SectionR op expr) -- \ x -> op x expr
dsExpr e@(SectionR op expr) = do
core_op <- dsLExpr op
-- for the type of x, we need the type of op's 2nd argument
let (x_ty:y_ty:_, _) = splitFunTys (exprType core_op)
-- See comment with SectionL
y_core <- dsLExpr expr
x_id <- newSysLocalDs x_ty
y_id <- newSysLocalDs y_ty
return (bindNonRec y_id y_core $
Lam x_id (mkCoreAppsDs (text "sectionr" <+> ppr e) core_op [Var x_id, Var y_id]))
dsExpr (ExplicitTuple tup_args boxity)
= do { let go (lam_vars, args) (L _ (Missing ty))
-- For every missing expression, we need
-- another lambda in the desugaring.
= do { lam_var <- newSysLocalDs ty
; return (lam_var : lam_vars, Var lam_var : args) }
go (lam_vars, args) (L _ (Present expr))
-- Expressions that are present don't generate
-- lambdas, just arguments.
= do { core_expr <- dsLExpr expr
; return (lam_vars, core_expr : args) }
; (lam_vars, args) <- foldM go ([], []) (reverse tup_args)
-- The reverse is because foldM goes left-to-right
; return $ mkCoreLams lam_vars $
mkCoreTupBoxity boxity args }
dsExpr (HsSCC _ cc expr@(L loc _)) = do
dflags <- getDynFlags
if gopt Opt_SccProfilingOn dflags
then do
mod_name <- getModule
count <- goptM Opt_ProfCountEntries
uniq <- newUnique
Tick (ProfNote (mkUserCC (sl_fs cc) mod_name loc uniq) count True)
<$> dsLExpr expr
else dsLExpr expr
dsExpr (HsCoreAnn _ _ expr)
= dsLExpr expr
dsExpr (HsCase discrim matches)
= do { core_discrim <- dsLExpr discrim
; ([discrim_var], matching_code) <- matchWrapper CaseAlt (Just discrim) matches
; return (bindNonRec discrim_var core_discrim matching_code) }
-- Pepe: The binds are in scope in the body but NOT in the binding group
-- This is to avoid silliness in breakpoints
dsExpr (HsLet (L _ binds) body) = do
body' <- dsLExpr body
dsLocalBinds binds body'
-- We need the `ListComp' form to use `deListComp' (rather than the "do" form)
-- because the interpretation of `stmts' depends on what sort of thing it is.
--
dsExpr (HsDo ListComp (L _ stmts) res_ty) = dsListComp stmts res_ty
dsExpr (HsDo PArrComp (L _ stmts) _) = dsPArrComp (map unLoc stmts)
dsExpr (HsDo DoExpr (L _ stmts) _) = dsDo stmts
dsExpr (HsDo GhciStmtCtxt (L _ stmts) _) = dsDo stmts
dsExpr (HsDo MDoExpr (L _ stmts) _) = dsDo stmts
dsExpr (HsDo MonadComp (L _ stmts) _) = dsMonadComp stmts
dsExpr (HsIf mb_fun guard_expr then_expr else_expr)
= do { pred <- dsLExpr guard_expr
; b1 <- dsLExpr then_expr
; b2 <- dsLExpr else_expr
; case mb_fun of
Just fun -> dsSyntaxExpr fun [pred, b1, b2]
Nothing -> return $ mkIfThenElse pred b1 b2 }
dsExpr (HsMultiIf res_ty alts)
| null alts
= mkErrorExpr
| otherwise
= do { match_result <- liftM (foldr1 combineMatchResults)
(mapM (dsGRHS IfAlt res_ty) alts)
; error_expr <- mkErrorExpr
; extractMatchResult match_result error_expr }
where
mkErrorExpr = mkErrorAppDs nON_EXHAUSTIVE_GUARDS_ERROR_ID res_ty
(text "multi-way if")
{-
\noindent
\underline{\bf Various data construction things}
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-}
dsExpr (ExplicitList elt_ty wit xs)
= dsExplicitList elt_ty wit xs
-- We desugar [:x1, ..., xn:] as
-- singletonP x1 +:+ ... +:+ singletonP xn
--
dsExpr (ExplicitPArr ty []) = do
emptyP <- dsDPHBuiltin emptyPVar
return (Var emptyP `App` Type ty)
dsExpr (ExplicitPArr ty xs) = do
singletonP <- dsDPHBuiltin singletonPVar
appP <- dsDPHBuiltin appPVar
xs' <- mapM dsLExpr xs
let unary fn x = mkApps (Var fn) [Type ty, x]
binary fn x y = mkApps (Var fn) [Type ty, x, y]
return . foldr1 (binary appP) $ map (unary singletonP) xs'
dsExpr (ArithSeq expr witness seq)
= case witness of
Nothing -> dsArithSeq expr seq
Just fl -> do { newArithSeq <- dsArithSeq expr seq
; dsSyntaxExpr fl [newArithSeq] }
dsExpr (PArrSeq expr (FromTo from to))
= mkApps <$> dsExpr expr <*> mapM dsLExpr [from, to]
dsExpr (PArrSeq expr (FromThenTo from thn to))
= mkApps <$> dsExpr expr <*> mapM dsLExpr [from, thn, to]
dsExpr (PArrSeq _ _)
= panic "DsExpr.dsExpr: Infinite parallel array!"
-- the parser shouldn't have generated it and the renamer and typechecker
-- shouldn't have let it through
{-
\noindent
\underline{\bf Static Pointers}
~~~~~~~~~~~~~~~
\begin{verbatim}
g = ... static f ...
==>
sptEntry:N = StaticPtr
(fingerprintString "pkgKey:module.sptEntry:N")
(StaticPtrInfo "current pkg key" "current module" "sptEntry:0")
f
g = ... sptEntry:N
\end{verbatim}
-}
dsExpr (HsStatic expr@(L loc _)) = do
expr_ds <- dsLExpr expr
let ty = exprType expr_ds
n' <- mkSptEntryName loc
static_binds_var <- dsGetStaticBindsVar
staticPtrTyCon <- dsLookupTyCon staticPtrTyConName
staticPtrInfoDataCon <- dsLookupDataCon staticPtrInfoDataConName
staticPtrDataCon <- dsLookupDataCon staticPtrDataConName
fingerprintDataCon <- dsLookupDataCon fingerprintDataConName
dflags <- getDynFlags
let (line, col) = case loc of
RealSrcSpan r -> ( srcLocLine $ realSrcSpanStart r
, srcLocCol $ realSrcSpanStart r
)
_ -> (0, 0)
srcLoc = mkCoreConApps (tupleDataCon Boxed 2)
[ Type intTy , Type intTy
, mkIntExprInt dflags line, mkIntExprInt dflags col
]
info <- mkConApp staticPtrInfoDataCon <$>
(++[srcLoc]) <$>
mapM mkStringExprFS
[ unitIdFS $ moduleUnitId $ nameModule n'
, moduleNameFS $ moduleName $ nameModule n'
, occNameFS $ nameOccName n'
]
let tvars = tyCoVarsOfTypeWellScoped ty
speTy = ASSERT( all isTyVar tvars ) -- ty is top-level, so this is OK
mkInvForAllTys tvars $ mkTyConApp staticPtrTyCon [ty]
speId = mkExportedVanillaId n' speTy
fp@(Fingerprint w0 w1) = fingerprintName $ idName speId
fp_core = mkConApp fingerprintDataCon
[ mkWord64LitWordRep dflags w0
, mkWord64LitWordRep dflags w1
]
sp = mkConApp staticPtrDataCon [Type ty, fp_core, info, expr_ds]
liftIO $ modifyIORef static_binds_var ((fp, (speId, mkLams tvars sp)) :)
putSrcSpanDs loc $ return $ mkTyApps (Var speId) (mkTyVarTys tvars)
where
-- | Choose either 'Word64#' or 'Word#' to represent the arguments of the
-- 'Fingerprint' data constructor.
mkWord64LitWordRep dflags
| platformWordSize (targetPlatform dflags) < 8 = mkWord64LitWord64
| otherwise = mkWordLit dflags . toInteger
fingerprintName :: Name -> Fingerprint
fingerprintName n = fingerprintString $ unpackFS $ concatFS
[ unitIdFS $ moduleUnitId $ nameModule n
, fsLit ":"
, moduleNameFS (moduleName $ nameModule n)
, fsLit "."
, occNameFS $ occName n
]
{-
\noindent
\underline{\bf Record construction and update}
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For record construction we do this (assuming T has three arguments)
\begin{verbatim}
T { op2 = e }
==>
let err = /\a -> recConErr a
T (recConErr t1 "M.hs/230/op1")
e
(recConErr t1 "M.hs/230/op3")
\end{verbatim}
@recConErr@ then converts its argument string into a proper message
before printing it as
\begin{verbatim}
M.hs, line 230: missing field op1 was evaluated
\end{verbatim}
We also handle @C{}@ as valid construction syntax for an unlabelled
constructor @C@, setting all of @C@'s fields to bottom.
-}
dsExpr (RecordCon { rcon_con_expr = con_expr, rcon_flds = rbinds
, rcon_con_like = con_like })
= do { con_expr' <- dsExpr con_expr
; let
(arg_tys, _) = tcSplitFunTys (exprType con_expr')
-- A newtype in the corner should be opaque;
-- hence TcType.tcSplitFunTys
mk_arg (arg_ty, fl)
= case findField (rec_flds rbinds) (flSelector fl) of
(rhs:rhss) -> ASSERT( null rhss )
dsLExpr rhs
[] -> mkErrorAppDs rEC_CON_ERROR_ID arg_ty (ppr (flLabel fl))
unlabelled_bottom arg_ty = mkErrorAppDs rEC_CON_ERROR_ID arg_ty Outputable.empty
labels = conLikeFieldLabels con_like
; con_args <- if null labels
then mapM unlabelled_bottom arg_tys
else mapM mk_arg (zipEqual "dsExpr:RecordCon" arg_tys labels)
; return (mkCoreApps con_expr' con_args) }
{-
Record update is a little harder. Suppose we have the decl:
\begin{verbatim}
data T = T1 {op1, op2, op3 :: Int}
| T2 {op4, op2 :: Int}
| T3
\end{verbatim}
Then we translate as follows:
\begin{verbatim}
r { op2 = e }
===>
let op2 = e in
case r of
T1 op1 _ op3 -> T1 op1 op2 op3
T2 op4 _ -> T2 op4 op2
other -> recUpdError "M.hs/230"
\end{verbatim}
It's important that we use the constructor Ids for @T1@, @T2@ etc on the
RHSs, and do not generate a Core constructor application directly, because the constructor
might do some argument-evaluation first; and may have to throw away some
dictionaries.
Note [Update for GADTs]
~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a b where
T1 :: { f1 :: a } -> T a Int
Then the wrapper function for T1 has type
$WT1 :: a -> T a Int
But if x::T a b, then
x { f1 = v } :: T a b (not T a Int!)
So we need to cast (T a Int) to (T a b). Sigh.
-}
dsExpr expr@(RecordUpd { rupd_expr = record_expr, rupd_flds = fields
, rupd_cons = cons_to_upd
, rupd_in_tys = in_inst_tys, rupd_out_tys = out_inst_tys
, rupd_wrap = dict_req_wrap } )
| null fields
= dsLExpr record_expr
| otherwise
= ASSERT2( notNull cons_to_upd, ppr expr )
do { record_expr' <- dsLExpr record_expr
; field_binds' <- mapM ds_field fields
; let upd_fld_env :: NameEnv Id -- Maps field name to the LocalId of the field binding
upd_fld_env = mkNameEnv [(f,l) | (f,l,_) <- field_binds']
-- It's important to generate the match with matchWrapper,
-- and the right hand sides with applications of the wrapper Id
-- so that everything works when we are doing fancy unboxing on the
-- constructor aguments.
; alts <- mapM (mk_alt upd_fld_env) cons_to_upd
; ([discrim_var], matching_code)
<- matchWrapper RecUpd Nothing (MG { mg_alts = noLoc alts
, mg_arg_tys = [in_ty]
, mg_res_ty = out_ty, mg_origin = FromSource })
-- FromSource is not strictly right, but we
-- want incomplete pattern-match warnings
; return (add_field_binds field_binds' $
bindNonRec discrim_var record_expr' matching_code) }
where
ds_field :: LHsRecUpdField Id -> DsM (Name, Id, CoreExpr)
-- Clone the Id in the HsRecField, because its Name is that
-- of the record selector, and we must not make that a local binder
-- else we shadow other uses of the record selector
-- Hence 'lcl_id'. Cf Trac #2735
ds_field (L _ rec_field) = do { rhs <- dsLExpr (hsRecFieldArg rec_field)
; let fld_id = unLoc (hsRecUpdFieldId rec_field)
; lcl_id <- newSysLocalDs (idType fld_id)
; return (idName fld_id, lcl_id, rhs) }
add_field_binds [] expr = expr
add_field_binds ((_,b,r):bs) expr = bindNonRec b r (add_field_binds bs expr)
-- Awkwardly, for families, the match goes
-- from instance type to family type
(in_ty, out_ty) =
case (head cons_to_upd) of
RealDataCon data_con ->
let tycon = dataConTyCon data_con in
(mkTyConApp tycon in_inst_tys, mkFamilyTyConApp tycon out_inst_tys)
PatSynCon pat_syn ->
( patSynInstResTy pat_syn in_inst_tys
, patSynInstResTy pat_syn out_inst_tys)
mk_alt upd_fld_env con
= do { let (univ_tvs, ex_tvs, eq_spec,
prov_theta, _req_theta, arg_tys, _) = conLikeFullSig con
subst = zipTvSubst univ_tvs in_inst_tys
-- I'm not bothering to clone the ex_tvs
; eqs_vars <- mapM newPredVarDs (substTheta subst (eqSpecPreds eq_spec))
; theta_vars <- mapM newPredVarDs (substTheta subst prov_theta)
; arg_ids <- newSysLocalsDs (substTysUnchecked subst arg_tys)
; let field_labels = conLikeFieldLabels con
val_args = zipWithEqual "dsExpr:RecordUpd" mk_val_arg
field_labels arg_ids
mk_val_arg fl pat_arg_id
= nlHsVar (lookupNameEnv upd_fld_env (flSelector fl) `orElse` pat_arg_id)
-- SAFE: the typechecker will complain if the synonym is
-- not bidirectional
wrap_id = expectJust "dsExpr:mk_alt" (conLikeWrapId_maybe con)
inst_con = noLoc $ HsWrap wrap (HsVar (noLoc wrap_id))
-- Reconstruct with the WrapId so that unpacking happens
-- The order here is because of the order in `TcPatSyn`.
wrap = mkWpEvVarApps theta_vars <.>
dict_req_wrap <.>
mkWpTyApps (mkTyVarTys ex_tvs) <.>
mkWpTyApps [ ty
| (tv, ty) <- univ_tvs `zip` out_inst_tys
, not (tv `elemVarEnv` wrap_subst) ]
rhs = foldl (\a b -> nlHsApp a b) inst_con val_args
-- Tediously wrap the application in a cast
-- Note [Update for GADTs]
wrapped_rhs =
case con of
RealDataCon data_con ->
let
wrap_co =
mkTcTyConAppCo Nominal
(dataConTyCon data_con)
[ lookup tv ty
| (tv,ty) <- univ_tvs `zip` out_inst_tys ]
lookup univ_tv ty =
case lookupVarEnv wrap_subst univ_tv of
Just co' -> co'
Nothing -> mkTcReflCo Nominal ty
in if null eq_spec
then rhs
else mkLHsWrap (mkWpCastN wrap_co) rhs
-- eq_spec is always null for a PatSynCon
PatSynCon _ -> rhs
wrap_subst =
mkVarEnv [ (tv, mkTcSymCo (mkTcCoVarCo eq_var))
| (spec, eq_var) <- eq_spec `zip` eqs_vars
, let tv = eqSpecTyVar spec ]
req_wrap = dict_req_wrap <.> mkWpTyApps in_inst_tys
pat = noLoc $ ConPatOut { pat_con = noLoc con
, pat_tvs = ex_tvs
, pat_dicts = eqs_vars ++ theta_vars
, pat_binds = emptyTcEvBinds
, pat_args = PrefixCon $ map nlVarPat arg_ids
, pat_arg_tys = in_inst_tys
, pat_wrap = req_wrap }
; return (mkSimpleMatch [pat] wrapped_rhs) }
-- Here is where we desugar the Template Haskell brackets and escapes
-- Template Haskell stuff
dsExpr (HsRnBracketOut _ _) = panic "dsExpr HsRnBracketOut"
dsExpr (HsTcBracketOut x ps) = dsBracket x ps
dsExpr (HsSpliceE s) = pprPanic "dsExpr:splice" (ppr s)
-- Arrow notation extension
dsExpr (HsProc pat cmd) = dsProcExpr pat cmd
-- Hpc Support
dsExpr (HsTick tickish e) = do
e' <- dsLExpr e
return (Tick tickish e')
-- There is a problem here. The then and else branches
-- have no free variables, so they are open to lifting.
-- We need someway of stopping this.
-- This will make no difference to binary coverage
-- (did you go here: YES or NO), but will effect accurate
-- tick counting.
dsExpr (HsBinTick ixT ixF e) = do
e2 <- dsLExpr e
do { ASSERT(exprType e2 `eqType` boolTy)
mkBinaryTickBox ixT ixF e2
}
dsExpr (HsTickPragma _ _ _ expr) = do
dflags <- getDynFlags
if gopt Opt_Hpc dflags
then panic "dsExpr:HsTickPragma"
else dsLExpr expr
-- HsSyn constructs that just shouldn't be here:
dsExpr (ExprWithTySig {}) = panic "dsExpr:ExprWithTySig"
dsExpr (HsBracket {}) = panic "dsExpr:HsBracket"
dsExpr (HsArrApp {}) = panic "dsExpr:HsArrApp"
dsExpr (HsArrForm {}) = panic "dsExpr:HsArrForm"
dsExpr (EWildPat {}) = panic "dsExpr:EWildPat"
dsExpr (EAsPat {}) = panic "dsExpr:EAsPat"
dsExpr (EViewPat {}) = panic "dsExpr:EViewPat"
dsExpr (ELazyPat {}) = panic "dsExpr:ELazyPat"
dsExpr (HsType {}) = panic "dsExpr:HsType" -- removed by typechecker
dsExpr (HsDo {}) = panic "dsExpr:HsDo"
dsExpr (HsRecFld {}) = panic "dsExpr:HsRecFld"
-- Normally handled in HsApp case, but a GHC API user might try to desugar
-- an HsTypeOut, since it is an HsExpr in a typechecked module after all.
-- (Such as ghci itself, in #11456.) So improve the error message slightly.
dsExpr (HsTypeOut {})
= panic "dsExpr: tried to desugar a naked type application argument (HsTypeOut)"
------------------------------
dsSyntaxExpr :: SyntaxExpr Id -> [CoreExpr] -> DsM CoreExpr
dsSyntaxExpr (SyntaxExpr { syn_expr = expr
, syn_arg_wraps = arg_wraps
, syn_res_wrap = res_wrap })
arg_exprs
= do { args <- zipWithM dsHsWrapper arg_wraps arg_exprs
; fun <- dsExpr expr
; dsHsWrapper res_wrap $ mkApps fun args }
findField :: [LHsRecField Id arg] -> Name -> [arg]
findField rbinds sel
= [hsRecFieldArg fld | L _ fld <- rbinds
, sel == idName (unLoc $ hsRecFieldId fld) ]
{-
%--------------------------------------------------------------------
Note [Desugaring explicit lists]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Explicit lists are desugared in a cleverer way to prevent some
fruitless allocations. Essentially, whenever we see a list literal
[x_1, ..., x_n] we:
1. Find the tail of the list that can be allocated statically (say
[x_k, ..., x_n]) by later stages and ensure we desugar that
normally: this makes sure that we don't cause a code size increase
by having the cons in that expression fused (see later) and hence
being unable to statically allocate any more
2. For the prefix of the list which cannot be allocated statically,
say [x_1, ..., x_(k-1)], we turn it into an expression involving
build so that if we find any foldrs over it it will fuse away
entirely!
So in this example we will desugar to:
build (\c n -> x_1 `c` x_2 `c` .... `c` foldr c n [x_k, ..., x_n]
If fusion fails to occur then build will get inlined and (since we
defined a RULE for foldr (:) []) we will get back exactly the
normal desugaring for an explicit list.
This optimisation can be worth a lot: up to 25% of the total
allocation in some nofib programs. Specifically
Program Size Allocs Runtime CompTime
rewrite +0.0% -26.3% 0.02 -1.8%
ansi -0.3% -13.8% 0.00 +0.0%
lift +0.0% -8.7% 0.00 -2.3%
Of course, if rules aren't turned on then there is pretty much no
point doing this fancy stuff, and it may even be harmful.
=======> Note by SLPJ Dec 08.
I'm unconvinced that we should *ever* generate a build for an explicit
list. See the comments in GHC.Base about the foldr/cons rule, which
points out that (foldr k z [a,b,c]) may generate *much* less code than
(a `k` b `k` c `k` z).
Furthermore generating builds messes up the LHS of RULES.
Example: the foldr/single rule in GHC.Base
foldr k z [x] = ...
We do not want to generate a build invocation on the LHS of this RULE!
We fix this by disabling rules in rule LHSs, and testing that
flag here; see Note [Desugaring RULE left hand sides] in Desugar
To test this I've added a (static) flag -fsimple-list-literals, which
makes all list literals be generated via the simple route.
-}
dsExplicitList :: Type -> Maybe (SyntaxExpr Id) -> [LHsExpr Id]
-> DsM CoreExpr
-- See Note [Desugaring explicit lists]
dsExplicitList elt_ty Nothing xs
= do { dflags <- getDynFlags
; xs' <- mapM dsLExpr xs
; let (dynamic_prefix, static_suffix) = spanTail is_static xs'
; if gopt Opt_SimpleListLiterals dflags -- -fsimple-list-literals
|| not (gopt Opt_EnableRewriteRules dflags) -- Rewrite rules off
-- Don't generate a build if there are no rules to eliminate it!
-- See Note [Desugaring RULE left hand sides] in Desugar
|| null dynamic_prefix -- Avoid build (\c n. foldr c n xs)!
then return $ mkListExpr elt_ty xs'
else mkBuildExpr elt_ty (mkSplitExplicitList dynamic_prefix static_suffix) }
where
is_static :: CoreExpr -> Bool
is_static e = all is_static_var (varSetElems (exprFreeVars e))
is_static_var :: Var -> Bool
is_static_var v
| isId v = isExternalName (idName v) -- Top-level things are given external names
| otherwise = False -- Type variables
mkSplitExplicitList prefix suffix (c, _) (n, n_ty)
= do { let suffix' = mkListExpr elt_ty suffix
; folded_suffix <- mkFoldrExpr elt_ty n_ty (Var c) (Var n) suffix'
; return (foldr (App . App (Var c)) folded_suffix prefix) }
dsExplicitList elt_ty (Just fln) xs
= do { list <- dsExplicitList elt_ty Nothing xs
; dflags <- getDynFlags
; dsSyntaxExpr fln [mkIntExprInt dflags (length xs), list] }
spanTail :: (a -> Bool) -> [a] -> ([a], [a])
spanTail f xs = (reverse rejected, reverse satisfying)
where (satisfying, rejected) = span f $ reverse xs
dsArithSeq :: PostTcExpr -> (ArithSeqInfo Id) -> DsM CoreExpr
dsArithSeq expr (From from)
= App <$> dsExpr expr <*> dsLExpr from
dsArithSeq expr (FromTo from to)
= do dflags <- getDynFlags
warnAboutEmptyEnumerations dflags from Nothing to
expr' <- dsExpr expr
from' <- dsLExpr from
to' <- dsLExpr to
return $ mkApps expr' [from', to']
dsArithSeq expr (FromThen from thn)
= mkApps <$> dsExpr expr <*> mapM dsLExpr [from, thn]
dsArithSeq expr (FromThenTo from thn to)
= do dflags <- getDynFlags
warnAboutEmptyEnumerations dflags from (Just thn) to
expr' <- dsExpr expr
from' <- dsLExpr from
thn' <- dsLExpr thn
to' <- dsLExpr to
return $ mkApps expr' [from', thn', to']
{-
Desugar 'do' and 'mdo' expressions (NOT list comprehensions, they're
handled in DsListComp). Basically does the translation given in the
Haskell 98 report:
-}
dsDo :: [ExprLStmt Id] -> DsM CoreExpr
dsDo stmts
= goL stmts
where
goL [] = panic "dsDo"
goL (L loc stmt:lstmts) = putSrcSpanDs loc (go loc stmt lstmts)
go _ (LastStmt body _ _) stmts
= ASSERT( null stmts ) dsLExpr body
-- The 'return' op isn't used for 'do' expressions
go _ (BodyStmt rhs then_expr _ _) stmts
= do { rhs2 <- dsLExpr rhs
; warnDiscardedDoBindings rhs (exprType rhs2)
; rest <- goL stmts
; dsSyntaxExpr then_expr [rhs2, rest] }
go _ (LetStmt (L _ binds)) stmts
= do { rest <- goL stmts
; dsLocalBinds binds rest }
go _ (BindStmt pat rhs bind_op fail_op res1_ty) stmts
= do { body <- goL stmts
; rhs' <- dsLExpr rhs
; var <- selectSimpleMatchVarL pat
; match <- matchSinglePat (Var var) (StmtCtxt DoExpr) pat
res1_ty (cantFailMatchResult body)
; match_code <- handle_failure pat match fail_op
; dsSyntaxExpr bind_op [rhs', Lam var match_code] }
go _ (ApplicativeStmt args mb_join body_ty) stmts
= do {
let
(pats, rhss) = unzip (map (do_arg . snd) args)
do_arg (ApplicativeArgOne pat expr) =
(pat, dsLExpr expr)
do_arg (ApplicativeArgMany stmts ret pat) =
(pat, dsDo (stmts ++ [noLoc $ mkLastStmt (noLoc ret)]))
arg_tys = map hsLPatType pats
; rhss' <- sequence rhss
; let body' = noLoc $ HsDo DoExpr (noLoc stmts) body_ty
; let fun = L noSrcSpan $ HsLam $
MG { mg_alts = noLoc [mkSimpleMatch pats body']
, mg_arg_tys = arg_tys
, mg_res_ty = body_ty
, mg_origin = Generated }
; fun' <- dsLExpr fun
; let mk_ap_call l (op,r) = dsSyntaxExpr op [l,r]
; expr <- foldlM mk_ap_call fun' (zip (map fst args) rhss')
; case mb_join of
Nothing -> return expr
Just join_op -> dsSyntaxExpr join_op [expr] }
go loc (RecStmt { recS_stmts = rec_stmts, recS_later_ids = later_ids
, recS_rec_ids = rec_ids, recS_ret_fn = return_op
, recS_mfix_fn = mfix_op, recS_bind_fn = bind_op
, recS_bind_ty = bind_ty
, recS_rec_rets = rec_rets, recS_ret_ty = body_ty }) stmts
= goL (new_bind_stmt : stmts) -- rec_ids can be empty; eg rec { print 'x' }
where
new_bind_stmt = L loc $ BindStmt (mkBigLHsPatTupId later_pats)
mfix_app bind_op
noSyntaxExpr -- Tuple cannot fail
bind_ty
tup_ids = rec_ids ++ filterOut (`elem` rec_ids) later_ids
tup_ty = mkBigCoreTupTy (map idType tup_ids) -- Deals with singleton case
rec_tup_pats = map nlVarPat tup_ids
later_pats = rec_tup_pats
rets = map noLoc rec_rets
mfix_app = nlHsSyntaxApps mfix_op [mfix_arg]
mfix_arg = noLoc $ HsLam
(MG { mg_alts = noLoc [mkSimpleMatch [mfix_pat] body]
, mg_arg_tys = [tup_ty], mg_res_ty = body_ty
, mg_origin = Generated })
mfix_pat = noLoc $ LazyPat $ mkBigLHsPatTupId rec_tup_pats
body = noLoc $ HsDo
DoExpr (noLoc (rec_stmts ++ [ret_stmt])) body_ty
ret_app = nlHsSyntaxApps return_op [mkBigLHsTupId rets]
ret_stmt = noLoc $ mkLastStmt ret_app
-- This LastStmt will be desugared with dsDo,
-- which ignores the return_op in the LastStmt,
-- so we must apply the return_op explicitly
go _ (ParStmt {}) _ = panic "dsDo ParStmt"
go _ (TransStmt {}) _ = panic "dsDo TransStmt"
handle_failure :: LPat Id -> MatchResult -> SyntaxExpr Id -> DsM CoreExpr
-- In a do expression, pattern-match failure just calls
-- the monadic 'fail' rather than throwing an exception
handle_failure pat match fail_op
| matchCanFail match
= do { dflags <- getDynFlags
; fail_msg <- mkStringExpr (mk_fail_msg dflags pat)
; fail_expr <- dsSyntaxExpr fail_op [fail_msg]
; extractMatchResult match fail_expr }
| otherwise
= extractMatchResult match (error "It can't fail")
mk_fail_msg :: DynFlags -> Located e -> String
mk_fail_msg dflags pat = "Pattern match failure in do expression at " ++
showPpr dflags (getLoc pat)
{-
************************************************************************
* *
\subsection{Errors and contexts}
* *
************************************************************************
-}
-- Warn about certain types of values discarded in monadic bindings (#3263)
warnDiscardedDoBindings :: LHsExpr Id -> Type -> DsM ()
warnDiscardedDoBindings rhs rhs_ty
| Just (m_ty, elt_ty) <- tcSplitAppTy_maybe rhs_ty
= do { warn_unused <- woptM Opt_WarnUnusedDoBind
; warn_wrong <- woptM Opt_WarnWrongDoBind
; when (warn_unused || warn_wrong) $
do { fam_inst_envs <- dsGetFamInstEnvs
; let norm_elt_ty = topNormaliseType fam_inst_envs elt_ty
-- Warn about discarding non-() things in 'monadic' binding
; if warn_unused && not (isUnitTy norm_elt_ty)
then warnDs (Reason Opt_WarnUnusedDoBind)
(badMonadBind rhs elt_ty)
else
-- Warn about discarding m a things in 'monadic' binding of the same type,
-- but only if we didn't already warn due to Opt_WarnUnusedDoBind
when warn_wrong $
do { case tcSplitAppTy_maybe norm_elt_ty of
Just (elt_m_ty, _)
| m_ty `eqType` topNormaliseType fam_inst_envs elt_m_ty
-> warnDs (Reason Opt_WarnWrongDoBind)
(badMonadBind rhs elt_ty)
_ -> return () } } }
| otherwise -- RHS does have type of form (m ty), which is weird
= return () -- but at lesat this warning is irrelevant
badMonadBind :: LHsExpr Id -> Type -> SDoc
badMonadBind rhs elt_ty
= vcat [ hang (text "A do-notation statement discarded a result of type")
2 (quotes (ppr elt_ty))
, hang (text "Suppress this warning by saying")
2 (quotes $ text "_ <-" <+> ppr rhs)
]
{-
************************************************************************
* *
\subsection{Static pointers}
* *
************************************************************************
-}
-- | Creates an name for an entry in the Static Pointer Table.
--
-- The name has the form @sptEntry:<N>@ where @<N>@ is generated from a
-- per-module counter.
--
mkSptEntryName :: SrcSpan -> DsM Name
mkSptEntryName loc = do
mod <- getModule
occ <- mkWrapperName "sptEntry"
newGlobalBinder mod occ loc
where
mkWrapperName what
= do dflags <- getDynFlags
thisMod <- getModule
let -- Note [Generating fresh names for ccall wrapper]
-- in compiler/typecheck/TcEnv.hs
wrapperRef = nextWrapperNum dflags
wrapperNum <- liftIO $ atomicModifyIORef' wrapperRef $ \mod_env ->
let num = lookupWithDefaultModuleEnv mod_env 0 thisMod
in (extendModuleEnv mod_env thisMod (num+1), num)
return $ mkVarOcc $ what ++ ":" ++ show wrapperNum
| oldmanmike/ghc | compiler/deSugar/DsExpr.hs | bsd-3-clause | 42,015 | 107 | 25 | 13,304 | 8,278 | 4,307 | 3,971 | -1 | -1 |
{-# LANGUAGE GADTs, ScopedTypeVariables, FlexibleContexts, TypeFamilies #-}
module Patch (readAndApplyGitEmail) where
import Utils
import Control.Exception as E
import Control.Applicative ( Alternative, (<|>) )
import Control.Monad ( unless, when, forM_ )
import Control.Monad.Trans ( liftIO )
import qualified Data.Attoparsec.Char8 as A
import Data.Attoparsec.Combinator( many' )
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Char8 as BLC
import Data.DateTime ( formatDateTime, parseDateTime, DateTime )
import Data.List ( inits )
import Data.Maybe ( fromJust )
import Prelude hiding ( lex )
import System.Directory ( doesFileExist, doesDirectoryExist )
import System.IO ( openFile, IOMode( ReadMode ), Handle )
import System.FilePath ( splitPath, joinPath, takeDirectory, isPathSeparator )
import SHA1 ( sha1PS )
import Storage.Hashed.Tree ( Tree )
import Darcs.Flags( Compression(..) )
import Darcs.Patch ( RepoPatch, fromPrims, infopatch, apply, invert )
import Darcs.Patch.Apply ( ApplyState )
import Darcs.Patch.Info ( patchinfo, PatchInfo )
import Darcs.Patch.PatchInfoAnd ( n2pia )
import Darcs.Patch.Show ( showPatch )
import Darcs.Patch.Prim ( sortCoalesceFL, hunk, addfile, rmfile, adddir )
import Darcs.Patch.Prim.Class ( PrimOf )
import Darcs.Repository ( withRepoLock, RepoJob(..), Repository
, finalizeRepositoryChanges, tentativelyAddPatch )
import Darcs.SignalHandler ( withSignalsBlocked )
import Darcs.Utils ( withCurrentDirectory, promptYorn )
import Darcs.Witnesses.Ordered ( FL(..), (+>+), RL(..) )
import Darcs.Witnesses.Sealed ( Sealed(..), joinGap, emptyGap, freeGap
, unFreeLeft, FreeLeft )
import Printer ( renderString )
type Author = B.ByteString
type Message = [B.ByteString]
newtype GitHash = GitHash String
instance Show GitHash where
show (GitHash h) = case length h of
40 -> h
n -> h ++ replicate (40 - n) 'X'
-- | @eqOrX@ supports Git's shortest unique prefixes of Hashes. This could
-- allow arbitrary patch application, since Git does not know the state of our
-- files (however, --full-index does as it suggests for git-format-patch).
eqOrX :: Char -> Char -> Bool
eqOrX a b | a == b = True
eqOrX 'X' _ = True
eqOrX _ 'X' = True
eqOrX _ _ = False
instance Eq GitHash where
g1 == g2 = and $ zipWith eqOrX (show g1) (show g2)
data HunkChange = HunkChange Int [B.ByteString] [B.ByteString] deriving Show
data Change = AddFile FilePath [B.ByteString]
| RmFile FilePath [B.ByteString]
| Hunk FilePath GitHash [HunkChange]
deriving Show
data GitPatch = GitPatch Author DateTime Message [Change]
deriving Show
data HunkLine = ContextLine B.ByteString
| AddedLine B.ByteString
| RemovedLine B.ByteString
deriving Show
readAndApplyGitEmail :: String -> Bool -> FilePath -> IO ()
readAndApplyGitEmail repoPath shouldPrompt patchFile =
withCurrentDirectory repoPath $ do
exists <- doesFileExist patchFile
unless exists $ die $ patchFile ++ " does not exist, or is inaccessible."
putStrLn "Attempting to parse input."
h <- openFile patchFile ReadMode
ps <- parseGitEmail h
let patchCount = length ps
putStrLn $ "Successfully parsed " ++ show patchCount ++ " patches."
putStrLn "Attempting to apply patches."
forM_ (zip ([1..] :: [Int]) ps) $ \(index, p@(GitPatch _ _ msg _)) -> do
putStrLn $ unwords
[ "Applying patch", show index, "of", show patchCount ++ ":"
, BC.unpack $ head msg]
applyGitPatch shouldPrompt p
putStrLn "Succesfully applied patches."
applyGitPatch :: Bool -> GitPatch -> IO ()
applyGitPatch shouldPrompt (GitPatch author date message changes) = do
let (name:descr) = map BC.unpack message
dateStr = formatDateTime "%Y%m%d%H%M%S" date
info <- patchinfo dateStr name (BC.unpack author) descr
withRepoLock [] $ RepoJob $ applyChanges shouldPrompt info changes
applyChanges :: forall p r u . (RepoPatch p, ApplyState (PrimOf p) ~ Tree, ApplyState p ~ Tree)
=> Bool -> PatchInfo -> [Change] -> Repository p r u r -> IO ()
applyChanges shouldPrompt info changes repo = do
(Sealed ps) <- unFreeLeft `fmap` changesToPrims changes
(prims :: FL p r x) <- return . fromPrims . sortCoalesceFL $ ps
let patch = infopatch info prims
_ <- tentativelyAddPatch repo GzipCompression (n2pia patch)
withSignalsBlocked $ do
applyAllOrNone ps
finalizeRepositoryChanges repo
where
applyAllOrNone :: FL (PrimOf p) r x -> IO ()
applyAllOrNone = applyAllOrNone' NilRL where
applyAllOrNone' :: RL (PrimOf p) r y -> FL (PrimOf p) y x -> IO ()
applyAllOrNone' _ NilFL = return ()
applyAllOrNone' applied (p :>: ps) = do
apply p `E.catch` \(_ :: SomeException) -> do
putStrLn $ "Rolling back after prim failed to apply: "
++ renderString (showPatch p)
apply $ invert applied
die "A prim did not apply, no changes from this patch have been made."
applyAllOrNone' (p :<: applied) ps
changesToPrims :: [Change] -> IO (FreeLeft (FL (PrimOf p)))
changesToPrims [] = return $ emptyGap NilFL
changesToPrims (c:cs) = do
cPrims <- case c of
(AddFile fp new) -> do
parentDirAdds <- addParentDirs fp
let initFile = freeGap $ addfile fp :>: hunk fp 1 [] new :>: NilFL
return $ joinGap (+>+) parentDirAdds initFile
(RmFile fp old) -> return $
freeGap $ hunk fp 1 old [] :>: rmfile fp :>: NilFL
(Hunk fp hash hunkChanges) -> do
testHunkHash fp hash
let tohunks (HunkChange line old new) =
joinGap (:>:) (freeGap $ hunk fp line old new)
return $ foldr tohunks (emptyGap NilFL) hunkChanges
csPrims <- changesToPrims cs
return $ joinGap (+>+) cPrims csPrims
addParentDirs :: FilePath -> IO (FreeLeft (FL (PrimOf p)))
addParentDirs fp = do
dirs <- dropWhileDirsExist $ getParents fp
let joiner d = joinGap (:>:) (freeGap $ adddir d)
return $ foldr joiner (emptyGap NilFL) dirs
getParents = tail . map joinPath . inits . splitPath . takeDirectory
dropWhileDirsExist :: [FilePath] -> IO [FilePath]
dropWhileDirsExist [] = return []
dropWhileDirsExist ds'@(d : ds) = do
exists <- doesDirectoryExist d
if exists then dropWhileDirsExist ds else return ds'
testHunkHash fp expectedHash = do
actualHash <- gitHashFile fp
when (actualHash /= expectedHash) $ do
continue <- if shouldPrompt
then promptYorn $ "WARNING: Hash of " ++ fp ++ " does not match patch"
++ "\nNo changes will be recorded, if the patch does not apply."
++ "\nContinue anyway?"
else return False
unless continue $ die . unwords $
["invalid hash of file", fp, "\nexpected:", show expectedHash
, "\nactual: ", show actualHash]
-- |@gitHashFile@ calculates the Git hash of a given file's current state.
gitHashFile :: FilePath -> IO GitHash
gitHashFile fp = do
exists <- doesFileExist fp
if exists
then do
fileContents <- BL.readFile fp
let len = BL.length fileContents
header = BLC.pack $ "blob " ++ show len ++ "\0"
toHash = B.concat . BL.toChunks $ header `BL.append` fileContents
return.GitHash $ sha1PS toHash
else return missingFileHash
missingFileHash, emptyFileHash :: GitHash
missingFileHash = GitHash $ replicate 40 '0'
emptyFileHash = GitHash "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"
parseGitEmail :: Handle -> IO [GitPatch]
parseGitEmail h = go (A.parse $ many' p_gitPatch) BC.empty h where
lex :: A.Parser b -> A.Parser b
lex p = p >>= \x -> A.skipSpace >> return x
lexString s = lex $ A.string (BC.pack s)
line = lex $ A.takeWhile (/= '\n')
-- | @toEOL@ will consume to the end of the line, but not any leading
-- whitespace on the following line, unline @line@.
toEOL = A.takeWhile (/= '\n') >>= \x -> A.char '\n' >> return x
optional :: (Alternative f, Monad f) => f a -> f (Maybe a)
optional p = Just `fmap` p <|> return Nothing
p_gitPatch = do
p_commitHeader
author <- p_author
date <- p_date
(msg : description) <- p_commitMsg
let commitLog = removePatchHeader msg : description
_ <- specialLineDelimited BC.empty id -- skip the diff summary
diffs <- many' p_diff
_ <- p_endMarker
return $ GitPatch author date commitLog diffs
removePatchHeader msg = if BC.pack "[PATCH" `BC.isPrefixOf` msg
then BC.drop 2 $ BC.dropWhile (/= ']') msg
else msg
p_endMarker = do
lexString "--" >> toEOL
line -- This line contains a git version number.
p_commitHeader = do
lexString "From"
p_hash
lexString "Mon Sep 17 00:00:00 2001"
p_author = lexString "From:" >> line
p_date = do
lexString "Date:"
dateStr <- BC.unpack `fmap` line
let mbDate = parseDateTime "%a, %e %b %Y %k:%M:%S %z" dateStr
case mbDate of
Nothing -> error $ "Unexpected dateformat: " ++ dateStr
Just date' -> return date'
p_commitMsg = do
lexString "Subject:"
p_logLines
p_logLines = specialLineDelimited (BC.pack "---") reverse
specialLineDelimited = specialLineDelimited' [] where
specialLineDelimited' ls endCase endMod = do
perhaps <- toEOL
if perhaps == endCase
then return . endMod $ ls
else specialLineDelimited' (perhaps : ls) endCase endMod
p_hash = (GitHash . BC.unpack) `fmap` lex (A.takeWhile1
(A.inClass "0-9a-fA-F"))
p_diff = do
fn <- BC.unpack `fmap` p_diffHeader
-- TODO: handle new file modes?
mbAddRemove <- optional $ p_addFile <|> p_removeFile
(oldIndex, newIndex, _) <- p_indexDiff
case mbAddRemove of
Nothing -> Hunk fn oldIndex `fmap` p_unifiedDiff
Just (addRem, _) -> if addRem == BC.pack "new"
then if newIndex == emptyFileHash
then return $ AddFile fn []
else p_allAddLines >>= \ls -> return $ AddFile fn ls
else p_allRemLines >>= \ls -> return $ RmFile fn ls
p_addFile = p_addRemoveFile "new"
p_removeFile = p_addRemoveFile "deleted"
p_addRemoveFile x = do
x' <- lexString x
lexString "file mode"
mode <- p_mode
return (x', mode)
p_unifiedFiles = lexString "---" >> line >> lexString "+++" >> line
p_unifiedDiff = do
p_unifiedFiles
many' p_unifiedHunk
p_unifiedHunk = do
lexString "@@ -"
(_, oldLength) <- p_linePosLengthPair
A.char '+'
(newPos, newLength) <- p_linePosLengthPair
toEOL
(oldLines, newLines) <- p_diffLines oldLength newLength
checkLength oldLength oldLines
checkLength newLength newLines
-- We use new pos, since old pos assumes that no other hunks have
-- been applied to the original file, whereas we want our changes to
-- be cumulative.
return . minimiseHunkChange $ HunkChange newPos oldLines newLines
where
checkLength expectedLen list = let realLen = length list in
unless (realLen == expectedLen) $
error . unwords $ ["Malformed diff: expected length"
, show expectedLen , "got" , show realLen, "in"
, show list]
-- |@minimiseHunkChange@ removes any leading equal lines
-- (incrementing the line number appropriately) and any trailing
-- equal lines.
minimiseHunkChange x@(HunkChange _ _ _) = HunkChange p' o'' n'' where
x'@(HunkChange p' _ _) = dropWhileEqual x
(HunkChange _ o'' n'') =
reverseHunkChange . dropWhileEqual . reverseHunkChange $ x'
reverseHunkChange (HunkChange p o n) =
HunkChange p (reverse o) (reverse n)
dropWhileEqual orig@(HunkChange _ [] _) = orig
dropWhileEqual orig@(HunkChange _ _ []) = orig
dropWhileEqual orig@(HunkChange n (p : ps) (q : qs)) = if p == q
then dropWhileEqual $ HunkChange (n + 1) ps qs
else orig
p_diffLines oldCount newCount =
p_diffLines' (oldCount, newCount) ([], []) where
p_diffLines' (0, 0) (olds, news) =
return (reverse olds, reverse news)
p_diffLines' (o, n) (olds, news) = do
l <- p_addLine <|> p_removeLine <|> p_contextLine
case l of
(ContextLine l') ->
p_diffLines' (o - 1, n - 1) (l' : olds, l' : news)
(AddedLine l') -> p_diffLines' (o, n - 1) (olds, l' : news)
(RemovedLine l') -> p_diffLines' (o - 1, n) (l' : olds, news)
p_addLine = A.char '+' >> AddedLine `fmap` toEOL
p_removeLine = A.char '-' >> RemovedLine `fmap` toEOL
p_contextLine = A.char ' ' >> ContextLine `fmap` toEOL
p_linePosLengthPair = do
l <- p_readLineInt
A.char ','
s <- lex p_readLineInt
-- If the chunksize is 0, then we need to increment the offset
return (if s == 0 then l + 1 else l, s)
p_readLineInt =
(fst . fromJust . BC.readInt) `fmap` A.takeWhile (A.inClass "0-9")
p_allAddLines = do
[HunkChange _ [] new] <- p_unifiedDiff
return new
p_allRemLines = do
[HunkChange _ old []] <- p_unifiedDiff
return old
p_diffHeader = do
lexString "diff --git"
(oldName, _) <- splitNames `fmap` line
return . BC.tail . BC.dropWhile (not . isPathSeparator) $ oldName
where
-- Something of the form: prefixA/foo prefixB/foo where foo may
-- contain spaces (so we have to split at half way).
splitNames l = let (a,b) = BC.splitAt (BC.length l `div` 2) l
in (a, BC.tail b)
p_indexDiff = do
lexString "index"
oldHash <- p_hash
lexString ".."
newHash <- p_hash
mbMode <- optional p_mode
return (oldHash, newHash, mbMode)
p_mode = lex $ A.takeWhile (A.inClass "0-9")
go :: (B.ByteString -> A.Result [GitPatch]) -> B.ByteString -> Handle
-> IO [GitPatch]
go parser rest handle = do
chunk <- if B.null rest then liftIO $ B.hGet handle (64 * 1024)
else return rest
go_chunk parser chunk handle
go_chunk parser chunk handle =
case parser chunk of
A.Done _ result -> return result
A.Partial cont -> go cont B.empty handle
A.Fail _ ctx err -> do
let ch = "\n=== chunk ===\n" ++ BC.unpack chunk ++
"\n=== end chunk ===="
fail $ unwords ["Error parsing stream.", err, ch, "\nContext:"
, show ctx]
| nh2/darcs-fastconvert | Patch.hs | bsd-3-clause | 14,393 | 8 | 28 | 3,513 | 4,419 | 2,283 | 2,136 | 313 | 17 |
{-# LANGUAGE QuasiQuotes, GADTs #-}
module Main where
import Hydra
import Hydra.Solver.Sundials
type Pin = (Double,Double)
type Time = Double
twoPin :: SR ((Pin,Pin),Double)
twoPin = [rel| (((p_i,p_v),(n_i,n_v)),u) ->
p_v - n_v = u
p_i + n_i = 0
|]
resistor :: Double -> SR (Pin,Pin)
resistor r = [rel| ((p_i, p_v),(n_i, n_v)) ->
local u
$twoPin$ <> (((p_i,p_v),(n_i,n_v)),u)
$r$ * p_i = u
|]
iInductor :: Double -> Double -> SR (Pin,Pin)
iInductor i0 l = [rel| ((p_i, p_v),(n_i, n_v)) ->
local u
init p_i = $i0$
$twoPin$ <> (((p_i,p_v),(n_i,n_v)),u)
$l$ * (der p_i) = u
|]
iCapacitor :: Double -> Double -> SR (Pin,Pin)
iCapacitor u0 c = [rel| ((p_i, p_v),(n_i, n_v)) ->
local u
init u = $u0$
$twoPin$ <> (((p_i,p_v),(n_i,n_v)),u)
$c$ * (der u) = p_i
|]
vSourceAC :: Double -> Double -> SR (Pin,Pin)
vSourceAC v f = [rel| ((p_i, p_v), (n_i, n_v)) ->
local u
$twoPin$ <> (((p_i,p_v),(n_i,n_v)),u)
u = $v$ * sin (2 * $pi$ * $f$ * time)
|]
ground :: SR Pin
ground = [rel| (_,p_v) ->
p_v = 0
|]
wire :: SR (Pin,Pin)
wire = [rel| ((p_i,p_v),(n_i,n_v)) ->
local u
$twoPin$ <> (((p_i,p_v),(n_i,n_v)),u)
u = 0
|]
noWire :: SR (Pin,Pin)
noWire = [rel| ((p_i,p_v),(n_i,n_v)) ->
local u
$twoPin$ <> (((p_i,p_v),(n_i,n_v)),u)
p_i = 0
|]
diode :: Bool -> SR (Pin,Pin)
diode False = switch noWire [fun| ((_,p_v),(_,n_v)) -> p_v - n_v |] (\_ -> diode True)
diode True = switch wire [fun| ((p_i,_),(_,_)) -> p_i |] (\_ -> diode False)
openedDiode :: SR (Pin,Pin)
openedDiode = diode False
closedDiode :: SR (Pin,Pin)
closedDiode = diode True
serial :: SR (Pin,Pin) -> SR (Pin,Pin) -> SR (Pin,Pin)
serial sr1 sr2 = [rel| ((p_i, p_v),(n_i, n_v)) ->
local p1_i
local p1_v
local n1_i
local n1_v
$sr1$ <> ((p1_i, p1_v), (n1_i, n1_v))
local p2_i
local p2_v
local n2_i
local n2_v
$sr2$ <> ((p2_i, p2_v), (n2_i, n2_v))
(- p_i) + p1_i = 0
p_v = p1_v
n1_i + p2_i = 0
n1_v = p2_v
n2_i + (- n_i) = 0
n2_v = n_v
|]
parallel :: SR (Pin,Pin) -> SR (Pin,Pin) -> SR (Pin,Pin)
parallel sr1 sr2 = [rel| ((p_i, p_v), (n_i, n_v)) ->
local p1_i
local p1_v
local n1_i
local n1_v
$sr1$ <> ((p1_i, p1_v), (n1_i, n1_v))
local p2_i
local p2_v
local n2_i
local n2_v
$sr2$ <> ((p2_i, p2_v), (n2_i, n2_v))
(- p_i) + p1_i + p2_i = 0
p_v = p1_v
p1_v = p2_v
(- n_i) + n1_i + n2_i = 0
n_v = n1_v
n1_v = n2_v
|]
groundedCircuit :: SR (Pin,Pin) -> SR (Pin,Pin) -> SR ()
groundedCircuit sr1 sr2 = [rel| () ->
local p1_i
local p1_v
local n1_i
local n1_v
$sr1$ <> ((p1_i, p1_v), (n1_i, n1_v))
local p2_i
local p2_v
local n2_i
local n2_v
$sr2$ <> ((p2_i, p2_v), (n2_i, n2_v))
local gp_i
local gp_v
$ground$ <> (gp_i,gp_v)
p1_i + p2_i = 0
p1_v + p2_v = 0
n1_i + n2_i + gp_i = 0
n1_v = n2_v
n2_v = gp_v
|]
serialise :: [SR (Pin,Pin)] -> SR (Pin,Pin)
serialise = foldr serial wire
parallelise :: [SR (Pin,Pin)] -> SR (Pin,Pin)
parallelise = foldr parallel noWire
-- This is a circuit from our paper
simpleCircuit1 :: SR ()
simpleCircuit1 =
groundedCircuit (vSourceAC 1 1) (parallel (serial (resistor 1) (iCapacitor 0 1)) (iInductor 0 1))
-- Figure 7.1 in celliers book; Index-0 system, i.e. no algebraic loops and structural singularities
simpleCircuit2 :: SR ()
simpleCircuit2 =
groundedCircuit (vSourceAC 1 1)
(parallel (serial (resistor 1) (iCapacitor 0 1))
(serial (resistor 1) (iInductor 0 1))
)
halfWaveRectifier :: SR ()
halfWaveRectifier =
groundedCircuit (vSourceAC 1 1)
(serial closedDiode (resistor 1))
-- Figure 9.27 in Cellier's Book
halfWaveRectifierWithCapacitor :: SR ()
halfWaveRectifierWithCapacitor =
groundedCircuit (vSourceAC 1 1)
(serial (serial (resistor 1) closedDiode)
(parallel (iCapacitor 0 1) (resistor 1))
)
-- Figure 9.31 in Cellier's Book
halfWaveRectifierWithCapacitorAndInductor :: SR ()
halfWaveRectifierWithCapacitorAndInductor =
groundedCircuit (vSourceAC 1 1)
(serialise [ iInductor 0 1
, resistor 1
, closedDiode
, parallel (iCapacitor 0 1) (resistor 1)
]
)
main :: IO ()
main = simulate experimentDefault{solver = sundials} simpleCircuit1
-- Figure 7.5 in celliers book; Index-1 system, i.e. algebraic loop without structural singularities
-- simpleCircuit3 :: SR (Double,Double)
-- simpleCircuit3 = [$hydra|
-- sigrel (i,u) ->
-- init i = 0
-- $resistor 1$ <> ((r1p_i, r1p_v), (r1n_i, r1n_v))
-- $resistor 1$ <> ((r2p_i, r2p_v), (r2n_i, r2n_v))
-- $resistor 1$ <> ((r3p_i, r3p_v), (r3n_i, r3n_v))
-- $inductor 1$ <> ((lp_i, lp_v), (ln_i, ln_v))
-- $vSourceAC 1 1$ <> ((acp_i, acp_v), (acn_i, acn_v))
-- $ground$ <> (gp_i,gp_v)
-- connect acp_i r1p_i lp_i
-- connect acp_v r1p_v lp_v
-- connect r1n_i r2p_i r3p_i
-- connect r1n_v r2p_v r3p_v
-- connect acn_i r3n_i r2n_i ln_i gp_i
-- connect acn_v r3n_v r2n_v ln_v gp_v
-- i = acp_i
-- u = acp_v - acn_v
-- |]
-- -- Figure 7.14 in celliers book; Index-2 system, i.e. algebraic loops and structural singularities
-- -- as excpected DASSLC solver fails here
-- simpleCircuit4 :: SR (Double,Double)
-- simpleCircuit4 = [$hydra|
-- sigrel (i,u) ->
-- init i = 0
-- $resistor 1$ <> ((r1p_i, r1p_v), (r1n_i, r1n_v))
-- $resistor 1$ <> ((r2p_i, r2p_v), (r2n_i, r2n_v))
-- $capacitor 1$ <> ((cp_i, cp_v), (cn_i, cn_v))
-- $inductor 1$ <> ((lp_i, lp_v), (ln_i, ln_v))
-- $vSourceAC 1 1$ <> ((acp_i, acp_v), (acn_i, acn_v))
-- $ground$ <> (gp_i,gp_v)
-- connect acp_i r1p_i cp_i
-- connect acp_v r1p_v cp_v
-- connect r1n_i r2p_i lp_i
-- connect r1n_v r2p_v lp_v
-- connect acn_i ln_i r2n_i cn_i gp_i
-- connect acn_v ln_v r2n_v cn_v gp_v
-- i = acp_i
-- u = acp_v - acn_v
-- |] | giorgidze/Hydra | examples/Electronics.hs | bsd-3-clause | 6,245 | 0 | 11 | 1,774 | 1,028 | 592 | 436 | 65 | 1 |
-- A significant part of this code has been borrowed from other
-- hakyll users, mostly Jasper through his site and hakyll's,
-- but also skybluetrades.net and chromaticleaves.com
{-# LANGUAGE OverloadedStrings #-}
import Control.Applicative ((<$>))
import Data.Char
import Data.Maybe (catMaybes)
import Data.Monoid (mappend, (<>), mconcat, mempty)
import Data.Time.Format (formatTime, defaultTimeLocale)
import Data.Time.Clock (getCurrentTime)
import Hakyll
import Hakyll.Web.Tags
import Text.Blaze.Html (toHtml, toValue, (!))
import Text.Blaze.Html.Renderer.String (renderHtml)
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Text.Pandoc
import Text.Pandoc.Options
main :: IO ()
main = do
year <- getCurrentYear
hakyllWith config $ do
match "favicon.ico" $ do
route idRoute
compile copyFileCompiler
match "images/*" $ do
route idRoute
compile copyFileCompiler
match "servant/*" $ do
route idRoute
compile copyFileCompiler
match "js/*" $ do
route idRoute
compile copyFileCompiler
match "css/*.css" $ do
route idRoute
compile compressCssCompiler
match "css/*.ttf" $ do
route idRoute
compile copyFileCompiler
match "templates/*" $ compile templateCompiler
-- build tags
tags <- buildTags "posts/*" (fromCapture "tags/*.html")
match "posts/*" $ do
route $ setExtension "html"
compile $ myPandocCompiler
>>= loadAndApplyTemplate "templates/post.html" (postCtx tags)
>>= saveSnapshot "content"
>>= loadAndApplyTemplate "templates/default.html" (defaultContext `mappend` yearCtx year)
>>= relativizeUrls
match "pages/*" $ do
route $ gsubRoute "pages/" (const "") `composeRoutes`
setExtension ".html"
compile $ myPandocCompiler
>>= loadAndApplyTemplate "templates/page.html" defaultContext
>>= loadAndApplyTemplate "templates/default.html" (defaultContext `mappend` yearCtx year)
>>= relativizeUrls
create ["rss.xml"] $ do
route idRoute
compile $ do
loadAllSnapshots "posts/*" "content"
>>= fmap (take 10) . recentFirst
>>= renderRss myFeedConfiguration feedCtx
-- create a listing of all posts, most recent first
create ["posts.html"] $ do
route idRoute
compile $ do
posts <- recentFirst =<< loadAll "posts/*"
let ctx = constField "title" "Posts" <>
listField "posts" (postCtx tags) (return posts) <>
defaultContext
makeItem ""
>>= loadAndApplyTemplate "templates/posts.html" ctx
>>= loadAndApplyTemplate "templates/default.html" (ctx `mappend` yearCtx year)
>>= relativizeUrls
-- Post tags
tagsRules tags $ \tag pattern -> do
let title = "Posts tagged " ++ tag
-- Copied from posts, need to refactor
route idRoute
compile $ do
posts <- recentFirst =<< loadAll pattern
let ctx = constField "title" title <>
listField "posts" (postCtx tags) (return posts) <>
defaultContext
makeItem ""
>>= loadAndApplyTemplate "templates/posts.html" ctx
>>= loadAndApplyTemplate "templates/default.html" (ctx `mappend` yearCtx year)
>>= relativizeUrls
match "index.html" $ do
route idRoute
compile $ do
posts <- fmap (take 5) . recentFirst =<< loadAll "posts/*"
let indexCtx =
listField "posts" (postCtx tags) (return posts) <>
field "tagcloud" (\_ -> myTagCloud tags) <>
defaultContext
getResourceBody
>>= applyAsTemplate indexCtx
>>= loadAndApplyTemplate "templates/default.html" indexCtx
>>= relativizeUrls
-- -----------------------------------------------------------------------------
-- * Contexts
-- | Creates a "year" context from a string representation of the current year
yearCtx :: String -> Context String
yearCtx year = field "year" $ \item -> return year
feedCtx :: Context String
feedCtx = mconcat
[ bodyField "description"
, dateField "date" "%B %e, %Y"
, defaultContext
]
postCtx :: Tags -> Context String
postCtx tags = mconcat
[ modificationTimeField "mtime" "%e %b %Y"
, dateField "date" "%B %e, %Y"
, myTagsField "tags" tags
, defaultContext
]
pageCtx :: Context String
pageCtx = mconcat
[ modificationTimeField "mtime" "%e %b %Y"
, defaultContext
]
-- -----------------------------------------------------------------------------
-- * Feed configuration
-- | Holds my feed's configuration
myFeedConfiguration :: FeedConfiguration
myFeedConfiguration = FeedConfiguration
{ feedTitle = "Alp Mestanogullari's blog"
, feedDescription = "From Hask Till Dawn"
, feedAuthorName = "Alp Mestanogullari"
, feedAuthorEmail = "[email protected]"
, feedRoot = "http://alpmestan.com"
}
-- -----------------------------------------------------------------------------
-- * Compilers
-- | Creates a compiler to render a list of posts for a given pattern, context,
-- and sorting/filtering function
postList :: Pattern
-> Context String
-> ([Item String] -> Compiler [Item String])
-> Compiler String
postList pattern postCtx sortFilter = do
posts <- sortFilter =<< loadAll pattern
itemTpl <- loadBody "templates/post-item.html"
applyTemplateList itemTpl postCtx posts
-- -----------------------------------------------------------------------------
-- * Helpers
--
getCurrentYear :: IO String
getCurrentYear = formatTime defaultTimeLocale "%Y" <$> getCurrentTime
myTagCloud :: Tags -> Compiler String
myTagCloud tags =
renderTagCloud 80 250 tags
myTagsField :: String -> Tags -> Context a
myTagsField =
tagsFieldWith getTags renderOneTag $ \tagLinks -> do
H.ul ! A.class_ "list-inline" $ do
H.li $ H.i ! A.class_ "fa fa-tags" $ mempty
sequence_ tagLinks
renderOneTag :: String -> Maybe FilePath -> Maybe H.Html
renderOneTag _ Nothing = Nothing
renderOneTag tag (Just filepath) =
Just $ H.li $
H.a ! A.href (toValue $ toUrl filepath) $ toHtml tag
--------------------------------------------------------------------------------
config :: Configuration
config = defaultConfiguration
{ deployCommand = "rsync -avz -e ssh ./_site/ \
\ [email protected]:public_html/"
}
myPandocCompiler' :: Maybe String -> Compiler (Item String)
myPandocCompiler' withToc =
pandocCompilerWith defaultHakyllReaderOptions $
case withToc of
Just x | map toLower x `elem` ["true", "yes"] -> writerWithToc
| otherwise -> writerOpts
Nothing -> writerOpts
where writerOpts = defaultHakyllWriterOptions
{ writerReferenceLinks = True
, writerSectionDivs = True
, writerHtml5 = True
, writerHTMLMathMethod = MathJax "http://cdn.mathjax.org/mathjax/latest/MathJax.js"
, writerColumns = 100
}
writerWithToc =
writerOpts { writerTableOfContents = True
, writerTemplate = Just "$if(toc)$<div id=\"toc\"><h3>Table of contents</h3>$toc$</div>$endif$\n$body$"
}
myPandocCompiler :: Compiler (Item String)
myPandocCompiler = do
ident <- getUnderlying
myPandocCompiler' =<< getMetadataField ident "toc"
--------------------------------------------------------------------------------
| alpmestan/alpmestan.com | site.hs | bsd-3-clause | 8,405 | 15 | 24 | 2,642 | 1,654 | 823 | 831 | 165 | 2 |
----------------------------------------------------
-- --
-- HyLoRes.Clause.SelFunctions: --
-- Selection functions and selection functions --
-- generators --
-- --
----------------------------------------------------
{-
Copyright (C) HyLoRes 2002-2007 - See AUTHORS file
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307,
USA.
-}
module HyLoRes.Clause.SelFunction(
SelFunc,
nil, -- nil selection function
firstMatch, -- makes a selection function given some boolean criteria
isSelectable,
makeClass, minInClass, minLevelInClass,
SelFuncString, fromSelFuncString
)
where
import Data.List( find, sortBy )
import Data.Maybe( listToMaybe )
import HyLoRes.Formula.TypeLevel ( Spec(..) )
import HyLoRes.Formula( level, label, subf, specialize )
import HyLoRes.Formula.NF ( AtFormulaNF, fromNF )
{-
A selection function must pick a formula from a list of
(ascending), eligible, at-formulas
-}
type SelFunc = [AtFormulaNF] -> Maybe AtFormulaNF
isSelectable :: AtFormulaNF -> Bool
isSelectable f = case (specialize . fromNF) f of
AtNegNom{} -> True
AtNegProp{} -> True
AtConj{} -> True
AtDisj{} -> True
AtDiamF{} -> True
AtBoxF{} -> True
AtDownF{} -> True
_ -> False
nil :: SelFunc
nil = const Nothing
firstMatch :: (AtFormulaNF -> Bool) -> SelFunc
firstMatch = find
type SelFuncString = String
{- Given:
- a list of boolean functions l
returns a function that returns true whenever any of the
functions in l return true
-}
makeClass :: [ (AtFormulaNF -> Bool) ] -> AtFormulaNF -> Bool
makeClass l f = any ($ f) l
{- Given:
- a list l of boolean functions
returns a selection function such that, if it selects a formula f of
a clause c, that means that, for some integer n:
1. f is the smallest formula that satisfies the n-th function of l
2. no formula in c satisfies the m-th function of l, for m < n
-}
minInClass:: [ AtFormulaNF -> Bool ] -> SelFunc
minInClass l fs = listToMaybe [f | p <- l, f <- fs, p f]
{- Given:
- a list l of boolean functions
returns a selection function such that, if it selects a formula f of
a clause c, that means that, for some integer n:
1. f is the smallest formula in the lowest level (i.e. the level of the
prefix is closest to 0) that satisfies the n-th function of l
2. no formula in c satisfies the m-th function of l, for m < n
-}
minLevelInClass:: [ AtFormulaNF -> Bool ] -> SelFunc
minLevelInClass l fs = listToMaybe $ sortBy minLevelFirst [f | p <- l,
f <- fs,
p f]
where minLevelFirst a b = minLevelFirstF (fromNF a) (fromNF b)
minLevelFirstF a b = case compare (labelLevel $ a) (labelLevel $ b) of
EQ -> compare (subf a) (subf b)
neq -> neq
labelLevel = level . label
{- Given:
- a string that describes how to build a selection function
using minInClass or minLevelInClass
returns an appropiate selection function.
-}
fromSelFuncString :: SelFuncString -> SelFunc
fromSelFuncString ('L':xs) = buildUsing minLevelInClass xs
fromSelFuncString xs = buildUsing minInClass xs
buildUsing :: ([AtFormulaNF -> Bool] -> SelFunc) -> SelFuncString -> SelFunc
buildUsing mkFun = mkFun . map (\f -> f . fromNF) . map c2f
where c2f 'n' f = case specialize f of
{ AtNegNom{} -> True; AtNegProp{} -> True; _ -> False }
c2f 'a' f = case specialize f of { AtConj{} -> True; _ -> False }
c2f 'o' f = case specialize f of { AtDisj{} -> True; _ -> False }
c2f 'd' f = case specialize f of { AtDiamF{} -> True; _ -> False }
c2f 'b' f = case specialize f of { AtBoxF{} -> True; _ -> False }
c2f _ _ = error "Error in string specifying selection function.\n"
| nevrenato/HyLoRes_Source | src/HyLoRes/Clause/SelFunction.hs | gpl-2.0 | 4,853 | 0 | 12 | 1,464 | 852 | 463 | 389 | 53 | 12 |
{-# LANGUAGE OverloadedStrings #-}
{- |
Module: TestMaker
Description: Handles command-line parsing.
Copyright: (c) Chris Godbout
License: GPLv3
Maintainer: [email protected]
Stability: experimental
Portability: portable
-}
module Main where
import qualified Data.Text as T
import Data.Version (showVersion)
import Options.Applicative
import Paths_testmaker_cli (version)
import TestMaker
-- | Data type to hold the different commands
data Command
= TestGen { yamlFile :: String
, dbname :: Maybe String -- ^ Database filename, including path.
} -- ^ The command to create tests. Short answer and multiple-choice are technically both optional, but you should probably do at least one.
| MakeStatic { freezeTex :: String -- ^ Exam tex file to freeze.
} -- ^ The command to "freeze" an exam and make it static.
-- | This is the command that handles the command-line options.
testMaker :: Command -> IO ()
testMaker (TestGen y db) = writeExamsFromConfig db y
testMaker (MakeStatic texfile) = do frozen <- T.unpack <$> freeze texfile
putStrLn frozen
-- | Parse options for the testgen command
parseTestGen :: ParserInfo Command
parseTestGen = flip info helpInfo . (helper <*> ) $ TestGen
<$> argument str (metavar "<config-file>"
<> help "Exam configuration file (in Yaml)")
<*> optional (strOption (long "db"
<> metavar "<db-filename>"
<> help "Database file name. This will override the database given in the config file, if it's given."))
where helpInfo =
progDesc "Create a test from a list of question IDs"
<> fullDesc
<> header ("TestMaker " ++ showVersion version)
parseMakeStatic :: ParserInfo Command
parseMakeStatic = flip info helpInfo . (helper <*>) $ MakeStatic
<$> argument str (metavar "<tex>"
<> help "Filename of test to be \"frozen\"")
where helpInfo =
progDesc "Make a dynamic test into a static one (a.k.a \"freeze\" it)"
<> fullDesc
<> header ("TestMaker " ++ showVersion version)
parseCommands :: ParserInfo Command
parseCommands = (flip info) (header ("TestMaker " ++ showVersion version) <> progDesc "Generate dynamic tests." <>fullDesc) $ helper <*> (subparser $
command "testgen" parseTestGen
-- <> command "get-tag" parseGetTag
-- <> command "print-db" parsePrintDB
<> command "freeze" parseMakeStatic)
-- | Main
main :: IO ()
main = execParser parseCommands >>= testMaker
| mathologist/hTestMaker | testmaker-cli/src/Main.hs | gpl-3.0 | 2,634 | 0 | 14 | 682 | 455 | 237 | 218 | 40 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE CPP #-}
module BuildTyCl (
buildSynonymTyCon,
buildFamilyTyCon,
buildAlgTyCon,
buildDataCon,
buildPatSyn,
TcMethInfo, buildClass,
distinctAbstractTyConRhs, totallyAbstractTyConRhs,
mkNewTyConRhs, mkDataTyConRhs,
newImplicitBinder
) where
#include "HsVersions.h"
import IfaceEnv
import FamInstEnv( FamInstEnvs )
import TysWiredIn( isCTupleTyConName )
import DataCon
import PatSyn
import Var
import VarSet
import BasicTypes
import Name
import MkId
import Class
import TyCon
import Type
import Id
import Coercion
import TcType
import DynFlags
import TcRnMonad
import UniqSupply
import Util
import Outputable
------------------------------------------------------
buildSynonymTyCon :: Name -> [TyVar] -> [Role]
-> Type
-> Kind -- ^ Kind of the RHS
-> TcRnIf m n TyCon
buildSynonymTyCon tc_name tvs roles rhs rhs_kind
= return (mkSynonymTyCon tc_name kind tvs roles rhs)
where kind = mkPiKinds tvs rhs_kind
buildFamilyTyCon :: Name -> [TyVar]
-> FamTyConFlav
-> Kind -- ^ Kind of the RHS
-> TyConParent
-> TcRnIf m n TyCon
buildFamilyTyCon tc_name tvs rhs rhs_kind parent
= return (mkFamilyTyCon tc_name kind tvs rhs parent)
where kind = mkPiKinds tvs rhs_kind
------------------------------------------------------
distinctAbstractTyConRhs, totallyAbstractTyConRhs :: AlgTyConRhs
distinctAbstractTyConRhs = AbstractTyCon True
totallyAbstractTyConRhs = AbstractTyCon False
mkDataTyConRhs :: [DataCon] -> AlgTyConRhs
mkDataTyConRhs cons
= DataTyCon {
data_cons = cons,
is_enum = not (null cons) && all is_enum_con cons
-- See Note [Enumeration types] in TyCon
}
where
is_enum_con con
| (_tvs, theta, arg_tys, _res) <- dataConSig con
= null theta && null arg_tys
mkNewTyConRhs :: Name -> TyCon -> DataCon -> TcRnIf m n AlgTyConRhs
-- ^ Monadic because it makes a Name for the coercion TyCon
-- We pass the Name of the parent TyCon, as well as the TyCon itself,
-- because the latter is part of a knot, whereas the former is not.
mkNewTyConRhs tycon_name tycon con
= do { co_tycon_name <- newImplicitBinder tycon_name mkNewTyCoOcc
; let co_tycon = mkNewTypeCo co_tycon_name tycon etad_tvs etad_roles etad_rhs
; traceIf (text "mkNewTyConRhs" <+> ppr co_tycon)
; return (NewTyCon { data_con = con,
nt_rhs = rhs_ty,
nt_etad_rhs = (etad_tvs, etad_rhs),
nt_co = co_tycon } ) }
-- Coreview looks through newtypes with a Nothing
-- for nt_co, or uses explicit coercions otherwise
where
tvs = tyConTyVars tycon
roles = tyConRoles tycon
inst_con_ty = applyTys (dataConUserType con) (mkTyVarTys tvs)
rhs_ty = ASSERT( isFunTy inst_con_ty ) funArgTy inst_con_ty
-- Instantiate the data con with the
-- type variables from the tycon
-- NB: a newtype DataCon has a type that must look like
-- forall tvs. <arg-ty> -> T tvs
-- Note that we *can't* use dataConInstOrigArgTys here because
-- the newtype arising from class Foo a => Bar a where {}
-- has a single argument (Foo a) that is a *type class*, so
-- dataConInstOrigArgTys returns [].
etad_tvs :: [TyVar] -- Matched lazily, so that mkNewTypeCo can
etad_roles :: [Role] -- return a TyCon without pulling on rhs_ty
etad_rhs :: Type -- See Note [Tricky iface loop] in LoadIface
(etad_tvs, etad_roles, etad_rhs) = eta_reduce (reverse tvs) (reverse roles) rhs_ty
eta_reduce :: [TyVar] -- Reversed
-> [Role] -- also reversed
-> Type -- Rhs type
-> ([TyVar], [Role], Type) -- Eta-reduced version
-- (tyvars in normal order)
eta_reduce (a:as) (_:rs) ty | Just (fun, arg) <- splitAppTy_maybe ty,
Just tv <- getTyVar_maybe arg,
tv == a,
not (a `elemVarSet` tyVarsOfType fun)
= eta_reduce as rs fun
eta_reduce tvs rs ty = (reverse tvs, reverse rs, ty)
------------------------------------------------------
buildDataCon :: FamInstEnvs
-> Name -> Bool
-> [HsSrcBang]
-> Maybe [HsImplBang]
-- See Note [Bangs on imported data constructors] in MkId
-> [Name] -- Field labels
-> [TyVar] -> [TyVar] -- Univ and ext
-> [(TyVar,Type)] -- Equality spec
-> ThetaType -- Does not include the "stupid theta"
-- or the GADT equalities
-> [Type] -> Type -- Argument and result types
-> TyCon -- Rep tycon
-> TcRnIf m n DataCon
-- A wrapper for DataCon.mkDataCon that
-- a) makes the worker Id
-- b) makes the wrapper Id if necessary, including
-- allocating its unique (hence monadic)
buildDataCon fam_envs src_name declared_infix src_bangs impl_bangs field_lbls
univ_tvs ex_tvs eq_spec ctxt arg_tys res_ty rep_tycon
= do { wrap_name <- newImplicitBinder src_name mkDataConWrapperOcc
; work_name <- newImplicitBinder src_name mkDataConWorkerOcc
-- This last one takes the name of the data constructor in the source
-- code, which (for Haskell source anyway) will be in the DataName name
-- space, and puts it into the VarName name space
; us <- newUniqueSupply
; dflags <- getDynFlags
; let
stupid_ctxt = mkDataConStupidTheta rep_tycon arg_tys univ_tvs
data_con = mkDataCon src_name declared_infix
src_bangs field_lbls
univ_tvs ex_tvs eq_spec ctxt
arg_tys res_ty rep_tycon
stupid_ctxt dc_wrk dc_rep
dc_wrk = mkDataConWorkId work_name data_con
dc_rep = initUs_ us (mkDataConRep dflags fam_envs wrap_name
impl_bangs data_con)
; return data_con }
-- The stupid context for a data constructor should be limited to
-- the type variables mentioned in the arg_tys
-- ToDo: Or functionally dependent on?
-- This whole stupid theta thing is, well, stupid.
mkDataConStupidTheta :: TyCon -> [Type] -> [TyVar] -> [PredType]
mkDataConStupidTheta tycon arg_tys univ_tvs
| null stupid_theta = [] -- The common case
| otherwise = filter in_arg_tys stupid_theta
where
tc_subst = zipTopTvSubst (tyConTyVars tycon) (mkTyVarTys univ_tvs)
stupid_theta = substTheta tc_subst (tyConStupidTheta tycon)
-- Start by instantiating the master copy of the
-- stupid theta, taken from the TyCon
arg_tyvars = tyVarsOfTypes arg_tys
in_arg_tys pred = not $ isEmptyVarSet $
tyVarsOfType pred `intersectVarSet` arg_tyvars
------------------------------------------------------
buildPatSyn :: Name -> Bool
-> (Id,Bool) -> Maybe (Id, Bool)
-> ([TyVar], ThetaType) -- ^ Univ and req
-> ([TyVar], ThetaType) -- ^ Ex and prov
-> [Type] -- ^ Argument types
-> Type -- ^ Result type
-> PatSyn
buildPatSyn src_name declared_infix matcher@(matcher_id,_) builder
(univ_tvs, req_theta) (ex_tvs, prov_theta) arg_tys pat_ty
= ASSERT((and [ univ_tvs == univ_tvs'
, ex_tvs == ex_tvs'
, pat_ty `eqType` pat_ty'
, prov_theta `eqTypes` prov_theta'
, req_theta `eqTypes` req_theta'
, arg_tys `eqTypes` arg_tys'
]))
mkPatSyn src_name declared_infix
(univ_tvs, req_theta) (ex_tvs, prov_theta)
arg_tys pat_ty
matcher builder
where
((_:univ_tvs'), req_theta', tau) = tcSplitSigmaTy $ idType matcher_id
([pat_ty', cont_sigma, _], _) = tcSplitFunTys tau
(ex_tvs', prov_theta', cont_tau) = tcSplitSigmaTy cont_sigma
(arg_tys', _) = tcSplitFunTys cont_tau
-- ------------------------------------------------------
type TcMethInfo = (Name, DefMethSpec, Type)
-- A temporary intermediate, to communicate between
-- tcClassSigs and buildClass.
buildClass :: Name -> [TyVar] -> [Role] -> ThetaType
-> [FunDep TyVar] -- Functional dependencies
-> [ClassATItem] -- Associated types
-> [TcMethInfo] -- Method info
-> ClassMinimalDef -- Minimal complete definition
-> RecFlag -- Info for type constructor
-> TcRnIf m n Class
buildClass tycon_name tvs roles sc_theta fds at_items sig_stuff mindef tc_isrec
= fixM $ \ rec_clas -> -- Only name generation inside loop
do { traceIf (text "buildClass")
; datacon_name <- newImplicitBinder tycon_name mkClassDataConOcc
-- The class name is the 'parent' for this datacon, not its tycon,
-- because one should import the class to get the binding for
-- the datacon
; op_items <- mapM (mk_op_item rec_clas) sig_stuff
-- Build the selector id and default method id
-- Make selectors for the superclasses
; sc_sel_names <- mapM (newImplicitBinder tycon_name . mkSuperDictSelOcc)
(takeList sc_theta [fIRST_TAG..])
; let sc_sel_ids = [ mkDictSelId sc_name rec_clas
| sc_name <- sc_sel_names]
-- We number off the Dict superclass selectors, 1, 2, 3 etc so that we
-- can construct names for the selectors. Thus
-- class (C a, C b) => D a b where ...
-- gives superclass selectors
-- D_sc1, D_sc2
-- (We used to call them D_C, but now we can have two different
-- superclasses both called C!)
; let use_newtype = isSingleton arg_tys
-- Use a newtype if the data constructor
-- (a) has exactly one value field
-- i.e. exactly one operation or superclass taken together
-- (b) that value is of lifted type (which they always are, because
-- we box equality superclasses)
-- See note [Class newtypes and equality predicates]
-- We treat the dictionary superclasses as ordinary arguments.
-- That means that in the case of
-- class C a => D a
-- we don't get a newtype with no arguments!
args = sc_sel_names ++ op_names
op_tys = [ty | (_,_,ty) <- sig_stuff]
op_names = [op | (op,_,_) <- sig_stuff]
arg_tys = sc_theta ++ op_tys
rec_tycon = classTyCon rec_clas
; dict_con <- buildDataCon (panic "buildClass: FamInstEnvs")
datacon_name
False -- Not declared infix
(map (const no_bang) args)
(Just (map (const HsLazy) args))
[{- No fields -}]
tvs [{- no existentials -}]
[{- No GADT equalities -}]
[{- No theta -}]
arg_tys
(mkTyConApp rec_tycon (mkTyVarTys tvs))
rec_tycon
; rhs <- if use_newtype
then mkNewTyConRhs tycon_name rec_tycon dict_con
else if isCTupleTyConName tycon_name
then return (TupleTyCon { data_con = dict_con
, tup_sort = ConstraintTuple })
else return (mkDataTyConRhs [dict_con])
; let { clas_kind = mkPiKinds tvs constraintKind
; tycon = mkClassTyCon tycon_name clas_kind tvs roles
rhs rec_clas tc_isrec
-- A class can be recursive, and in the case of newtypes
-- this matters. For example
-- class C a where { op :: C b => a -> b -> Int }
-- Because C has only one operation, it is represented by
-- a newtype, and it should be a *recursive* newtype.
-- [If we don't make it a recursive newtype, we'll expand the
-- newtype like a synonym, but that will lead to an infinite
-- type]
; result = mkClass tvs fds
sc_theta sc_sel_ids at_items
op_items mindef tycon
}
; traceIf (text "buildClass" <+> ppr tycon)
; return result }
where
no_bang = HsSrcBang Nothing NoSrcUnpack NoSrcStrict
mk_op_item :: Class -> TcMethInfo -> TcRnIf n m ClassOpItem
mk_op_item rec_clas (op_name, dm_spec, _)
= do { dm_info <- case dm_spec of
NoDM -> return NoDefMeth
GenericDM -> do { dm_name <- newImplicitBinder op_name mkGenDefMethodOcc
; return (GenDefMeth dm_name) }
VanillaDM -> do { dm_name <- newImplicitBinder op_name mkDefaultMethodOcc
; return (DefMeth dm_name) }
; return (mkDictSelId op_name rec_clas, dm_info) }
{-
Note [Class newtypes and equality predicates]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
class (a ~ F b) => C a b where
op :: a -> b
We cannot represent this by a newtype, even though it's not
existential, because there are two value fields (the equality
predicate and op. See Trac #2238
Moreover,
class (a ~ F b) => C a b where {}
Here we can't use a newtype either, even though there is only
one field, because equality predicates are unboxed, and classes
are boxed.
-}
| TomMD/ghc | compiler/iface/BuildTyCl.hs | bsd-3-clause | 14,742 | 0 | 18 | 5,346 | 2,319 | 1,281 | 1,038 | 204 | 5 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "Data/Vector/Fusion/Bundle/Size.hs" #-}
-- |
-- Module : Data.Vector.Fusion.Bundle.Size
-- Copyright : (c) Roman Leshchinskiy 2008-2010
-- License : BSD-style
--
-- Maintainer : Roman Leshchinskiy <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- Size hints for streams.
--
module Data.Vector.Fusion.Bundle.Size (
Size(..), clampedSubtract, smaller, larger, toMax, upperBound, lowerBound
) where
import Data.Vector.Fusion.Util ( delay_inline )
-- | Size hint
data Size = Exact Int -- ^ Exact size
| Max Int -- ^ Upper bound on the size
| Unknown -- ^ Unknown size
deriving( Eq, Show )
instance Num Size where
Exact m + Exact n = checkedAdd Exact m n
Exact m + Max n = checkedAdd Max m n
Max m + Exact n = checkedAdd Max m n
Max m + Max n = checkedAdd Max m n
_ + _ = Unknown
Exact m - Exact n = checkedSubtract Exact m n
Exact m - Max _ = Max m
Max m - Exact n = checkedSubtract Max m n
Max m - Max _ = Max m
Max m - Unknown = Max m
_ - _ = Unknown
fromInteger n = Exact (fromInteger n)
(*) = error "vector: internal error * for Bundle.size isn't defined"
abs = error "vector: internal error abs for Bundle.size isn't defined"
signum = error "vector: internal error signum for Bundle.size isn't defined"
{-# INLINE checkedAdd #-}
checkedAdd :: (Int -> Size) -> Int -> Int -> Size
checkedAdd con m n
-- Note: we assume m and n are >= 0.
| r < m || r < n =
error $ "Data.Vector.Fusion.Bundle.Size.checkedAdd: overflow: " ++ show r
| otherwise = con r
where
r = m + n
{-# INLINE checkedSubtract #-}
checkedSubtract :: (Int -> Size) -> Int -> Int -> Size
checkedSubtract con m n
| r < 0 =
error $ "Data.Vector.Fusion.Bundle.Size.checkedSubtract: underflow: " ++ show r
| otherwise = con r
where
r = m - n
-- | Subtract two sizes with clamping to 0, for drop-like things
{-# INLINE clampedSubtract #-}
clampedSubtract :: Size -> Size -> Size
clampedSubtract (Exact m) (Exact n) = Exact (max 0 (m - n))
clampedSubtract (Max m) (Exact n)
| m <= n = Exact 0
| otherwise = Max (m - n)
clampedSubtract (Exact m) (Max _) = Max m
clampedSubtract (Max m) (Max _) = Max m
clampedSubtract _ _ = Unknown
-- | Minimum of two size hints
smaller :: Size -> Size -> Size
{-# INLINE smaller #-}
smaller (Exact m) (Exact n) = Exact (delay_inline min m n)
smaller (Exact m) (Max n) = Max (delay_inline min m n)
smaller (Exact m) Unknown = Max m
smaller (Max m) (Exact n) = Max (delay_inline min m n)
smaller (Max m) (Max n) = Max (delay_inline min m n)
smaller (Max m) Unknown = Max m
smaller Unknown (Exact n) = Max n
smaller Unknown (Max n) = Max n
smaller Unknown Unknown = Unknown
-- | Maximum of two size hints
larger :: Size -> Size -> Size
{-# INLINE larger #-}
larger (Exact m) (Exact n) = Exact (delay_inline max m n)
larger (Exact m) (Max n) | m >= n = Exact m
| otherwise = Max n
larger (Max m) (Exact n) | n >= m = Exact n
| otherwise = Max m
larger (Max m) (Max n) = Max (delay_inline max m n)
larger _ _ = Unknown
-- | Convert a size hint to an upper bound
toMax :: Size -> Size
toMax (Exact n) = Max n
toMax (Max n) = Max n
toMax Unknown = Unknown
-- | Compute the minimum size from a size hint
lowerBound :: Size -> Int
lowerBound (Exact n) = n
lowerBound _ = 0
-- | Compute the maximum size from a size hint if possible
upperBound :: Size -> Maybe Int
upperBound (Exact n) = Just n
upperBound (Max n) = Just n
upperBound Unknown = Nothing
| phischu/fragnix | tests/packages/scotty/Data.Vector.Fusion.Bundle.Size.hs | bsd-3-clause | 3,825 | 0 | 10 | 1,093 | 1,276 | 631 | 645 | 79 | 1 |
-- | Simple implementation for limiting the number
-- of active threads during concurrent computations
-- using a semaphore.
{-# LANGUAGE CPP #-}
module Control.Concurrent.Async.Pool
(
mapPool
, mapCapabilityPool
) where
import qualified Control.Exception as E
import Control.Concurrent
import Control.Concurrent.Async
#if !MIN_VERSION_base(4,8,0)
import Data.Traversable (Traversable)
#endif
-- ifdef GHC
-- import GHC.Conc (getNumProcessors)
-- endif
-- | Map async using 'getNumCapabilities' to determine
-- the number of active threads.
--
-- This function is a bit misleading as it doesn't actually utilize
-- 'forkOn' or exploit any control over whether the threads are
-- spread across physical processors. It does, however, provide a
-- nice starting point for most of the threads used in this program
-- which are heavily IO bound.
mapCapabilityPool :: Traversable t => (a -> IO b) -> t a -> IO (t b)
mapCapabilityPool f xs = do
-- num <- getNumProcessors
num <- getNumCapabilities
mapPool (num+1) f xs
-- | Limit the number of threads which can be active at any
-- given time when using 'mapConcurrently'. The downside is
-- that this function will allocate all threads at once.
mapPool :: Traversable t => Int -> (a -> IO b) -> t a -> IO (t b)
mapPool num f xs = do
sem <- newQSem num
mapConcurrently (withQSem sem . f) xs
withQSem :: QSem -> IO a -> IO a
withQSem m = E.bracket_ (waitQSem m) (signalQSem m)
| CodyReichert/stack-tag | src/Control/Concurrent/Async/Pool.hs | bsd-3-clause | 1,469 | 0 | 11 | 286 | 274 | 149 | 125 | 19 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Generating machine code (instruction selection)
--
-- (c) The University of Glasgow 1996-2013
--
-----------------------------------------------------------------------------
{-# LANGUAGE GADTs #-}
module SPARC.CodeGen (
cmmTopCodeGen,
generateJumpTableForInstr,
InstrBlock
)
where
#include "HsVersions.h"
#include "nativeGen/NCG.h"
#include "../includes/MachDeps.h"
-- NCG stuff:
import GhcPrelude
import SPARC.Base
import SPARC.CodeGen.Sanity
import SPARC.CodeGen.Amode
import SPARC.CodeGen.CondCode
import SPARC.CodeGen.Gen64
import SPARC.CodeGen.Gen32
import SPARC.CodeGen.Base
import SPARC.Ppr ()
import SPARC.Instr
import SPARC.Imm
import SPARC.AddrMode
import SPARC.Regs
import SPARC.Stack
import Instruction
import Format
import NCGMonad
-- Our intermediate code:
import BlockId
import Cmm
import CmmUtils
import CmmSwitch
import Hoopl.Block
import Hoopl.Graph
import PIC
import Reg
import CLabel
import CPrim
-- The rest:
import BasicTypes
import DynFlags
import FastString
import OrdList
import Outputable
import Platform
import Control.Monad ( mapAndUnzipM )
-- | Top level code generation
cmmTopCodeGen :: RawCmmDecl
-> NatM [NatCmmDecl CmmStatics Instr]
cmmTopCodeGen (CmmProc info lab live graph)
= do let blocks = toBlockListEntryFirst graph
(nat_blocks,statics) <- mapAndUnzipM basicBlockCodeGen blocks
let proc = CmmProc info lab live (ListGraph $ concat nat_blocks)
let tops = proc : concat statics
return tops
cmmTopCodeGen (CmmData sec dat) = do
return [CmmData sec dat] -- no translation, we just use CmmStatic
-- | Do code generation on a single block of CMM code.
-- code generation may introduce new basic block boundaries, which
-- are indicated by the NEWBLOCK instruction. We must split up the
-- instruction stream into basic blocks again. Also, we extract
-- LDATAs here too.
basicBlockCodeGen :: CmmBlock
-> NatM ( [NatBasicBlock Instr]
, [NatCmmDecl CmmStatics Instr])
basicBlockCodeGen block = do
let (_, nodes, tail) = blockSplit block
id = entryLabel block
stmts = blockToList nodes
mid_instrs <- stmtsToInstrs stmts
tail_instrs <- stmtToInstrs tail
let instrs = mid_instrs `appOL` tail_instrs
let
(top,other_blocks,statics)
= foldrOL mkBlocks ([],[],[]) instrs
mkBlocks (NEWBLOCK id) (instrs,blocks,statics)
= ([], BasicBlock id instrs : blocks, statics)
mkBlocks (LDATA sec dat) (instrs,blocks,statics)
= (instrs, blocks, CmmData sec dat:statics)
mkBlocks instr (instrs,blocks,statics)
= (instr:instrs, blocks, statics)
-- do intra-block sanity checking
blocksChecked
= map (checkBlock block)
$ BasicBlock id top : other_blocks
return (blocksChecked, statics)
-- | Convert some Cmm statements to SPARC instructions.
stmtsToInstrs :: [CmmNode e x] -> NatM InstrBlock
stmtsToInstrs stmts
= do instrss <- mapM stmtToInstrs stmts
return (concatOL instrss)
stmtToInstrs :: CmmNode e x -> NatM InstrBlock
stmtToInstrs stmt = do
dflags <- getDynFlags
case stmt of
CmmComment s -> return (unitOL (COMMENT s))
CmmTick {} -> return nilOL
CmmUnwind {} -> return nilOL
CmmAssign reg src
| isFloatType ty -> assignReg_FltCode format reg src
| isWord64 ty -> assignReg_I64Code reg src
| otherwise -> assignReg_IntCode format reg src
where ty = cmmRegType dflags reg
format = cmmTypeFormat ty
CmmStore addr src
| isFloatType ty -> assignMem_FltCode format addr src
| isWord64 ty -> assignMem_I64Code addr src
| otherwise -> assignMem_IntCode format addr src
where ty = cmmExprType dflags src
format = cmmTypeFormat ty
CmmUnsafeForeignCall target result_regs args
-> genCCall target result_regs args
CmmBranch id -> genBranch id
CmmCondBranch arg true false _ -> do
b1 <- genCondJump true arg
b2 <- genBranch false
return (b1 `appOL` b2)
CmmSwitch arg ids -> do dflags <- getDynFlags
genSwitch dflags arg ids
CmmCall { cml_target = arg } -> genJump arg
_
-> panic "stmtToInstrs: statement should have been cps'd away"
{-
Now, given a tree (the argument to a CmmLoad) that references memory,
produce a suitable addressing mode.
A Rule of the Game (tm) for Amodes: use of the addr bit must
immediately follow use of the code part, since the code part puts
values in registers which the addr then refers to. So you can't put
anything in between, lest it overwrite some of those registers. If
you need to do some other computation between the code part and use of
the addr bit, first store the effective address from the amode in a
temporary, then do the other computation, and then use the temporary:
code
LEA amode, tmp
... other computation ...
... (tmp) ...
-}
-- | Convert a BlockId to some CmmStatic data
jumpTableEntry :: DynFlags -> Maybe BlockId -> CmmStatic
jumpTableEntry dflags Nothing = CmmStaticLit (CmmInt 0 (wordWidth dflags))
jumpTableEntry _ (Just blockid) = CmmStaticLit (CmmLabel blockLabel)
where blockLabel = blockLbl blockid
-- -----------------------------------------------------------------------------
-- Generating assignments
-- Assignments are really at the heart of the whole code generation
-- business. Almost all top-level nodes of any real importance are
-- assignments, which correspond to loads, stores, or register
-- transfers. If we're really lucky, some of the register transfers
-- will go away, because we can use the destination register to
-- complete the code generation for the right hand side. This only
-- fails when the right hand side is forced into a fixed register
-- (e.g. the result of a call).
assignMem_IntCode :: Format -> CmmExpr -> CmmExpr -> NatM InstrBlock
assignMem_IntCode pk addr src = do
(srcReg, code) <- getSomeReg src
Amode dstAddr addr_code <- getAmode addr
return $ code `appOL` addr_code `snocOL` ST pk srcReg dstAddr
assignReg_IntCode :: Format -> CmmReg -> CmmExpr -> NatM InstrBlock
assignReg_IntCode _ reg src = do
dflags <- getDynFlags
r <- getRegister src
let dst = getRegisterReg (targetPlatform dflags) reg
return $ case r of
Any _ code -> code dst
Fixed _ freg fcode -> fcode `snocOL` OR False g0 (RIReg freg) dst
-- Floating point assignment to memory
assignMem_FltCode :: Format -> CmmExpr -> CmmExpr -> NatM InstrBlock
assignMem_FltCode pk addr src = do
dflags <- getDynFlags
Amode dst__2 code1 <- getAmode addr
(src__2, code2) <- getSomeReg src
tmp1 <- getNewRegNat pk
let
pk__2 = cmmExprType dflags src
code__2 = code1 `appOL` code2 `appOL`
if formatToWidth pk == typeWidth pk__2
then unitOL (ST pk src__2 dst__2)
else toOL [ FxTOy (cmmTypeFormat pk__2) pk src__2 tmp1
, ST pk tmp1 dst__2]
return code__2
-- Floating point assignment to a register/temporary
assignReg_FltCode :: Format -> CmmReg -> CmmExpr -> NatM InstrBlock
assignReg_FltCode pk dstCmmReg srcCmmExpr = do
dflags <- getDynFlags
let platform = targetPlatform dflags
srcRegister <- getRegister srcCmmExpr
let dstReg = getRegisterReg platform dstCmmReg
return $ case srcRegister of
Any _ code -> code dstReg
Fixed _ srcFixedReg srcCode -> srcCode `snocOL` FMOV pk srcFixedReg dstReg
genJump :: CmmExpr{-the branch target-} -> NatM InstrBlock
genJump (CmmLit (CmmLabel lbl))
= return (toOL [CALL (Left target) 0 True, NOP])
where
target = ImmCLbl lbl
genJump tree
= do
(target, code) <- getSomeReg tree
return (code `snocOL` JMP (AddrRegReg target g0) `snocOL` NOP)
-- -----------------------------------------------------------------------------
-- Unconditional branches
genBranch :: BlockId -> NatM InstrBlock
genBranch = return . toOL . mkJumpInstr
-- -----------------------------------------------------------------------------
-- Conditional jumps
{-
Conditional jumps are always to local labels, so we can use branch
instructions. We peek at the arguments to decide what kind of
comparison to do.
SPARC: First, we have to ensure that the condition codes are set
according to the supplied comparison operation. We generate slightly
different code for floating point comparisons, because a floating
point operation cannot directly precede a @BF@. We assume the worst
and fill that slot with a @NOP@.
SPARC: Do not fill the delay slots here; you will confuse the register
allocator.
-}
genCondJump
:: BlockId -- the branch target
-> CmmExpr -- the condition on which to branch
-> NatM InstrBlock
genCondJump bid bool = do
CondCode is_float cond code <- getCondCode bool
return (
code `appOL`
toOL (
if is_float
then [NOP, BF cond False bid, NOP]
else [BI cond False bid, NOP]
)
)
-- -----------------------------------------------------------------------------
-- Generating a table-branch
genSwitch :: DynFlags -> CmmExpr -> SwitchTargets -> NatM InstrBlock
genSwitch dflags expr targets
| positionIndependent dflags
= error "MachCodeGen: sparc genSwitch PIC not finished\n"
| otherwise
= do (e_reg, e_code) <- getSomeReg (cmmOffset dflags expr offset)
base_reg <- getNewRegNat II32
offset_reg <- getNewRegNat II32
dst <- getNewRegNat II32
label <- getNewLabelNat
return $ e_code `appOL`
toOL
[ -- load base of jump table
SETHI (HI (ImmCLbl label)) base_reg
, OR False base_reg (RIImm $ LO $ ImmCLbl label) base_reg
-- the addrs in the table are 32 bits wide..
, SLL e_reg (RIImm $ ImmInt 2) offset_reg
-- load and jump to the destination
, LD II32 (AddrRegReg base_reg offset_reg) dst
, JMP_TBL (AddrRegImm dst (ImmInt 0)) ids label
, NOP ]
where (offset, ids) = switchTargetsToTable targets
generateJumpTableForInstr :: DynFlags -> Instr
-> Maybe (NatCmmDecl CmmStatics Instr)
generateJumpTableForInstr dflags (JMP_TBL _ ids label) =
let jumpTable = map (jumpTableEntry dflags) ids
in Just (CmmData (Section ReadOnlyData label) (Statics label jumpTable))
generateJumpTableForInstr _ _ = Nothing
-- -----------------------------------------------------------------------------
-- Generating C calls
{-
Now the biggest nightmare---calls. Most of the nastiness is buried in
@get_arg@, which moves the arguments to the correct registers/stack
locations. Apart from that, the code is easy.
The SPARC calling convention is an absolute
nightmare. The first 6x32 bits of arguments are mapped into
%o0 through %o5, and the remaining arguments are dumped to the
stack, beginning at [%sp+92]. (Note that %o6 == %sp.)
If we have to put args on the stack, move %o6==%sp down by
the number of words to go on the stack, to ensure there's enough space.
According to Fraser and Hanson's lcc book, page 478, fig 17.2,
16 words above the stack pointer is a word for the address of
a structure return value. I use this as a temporary location
for moving values from float to int regs. Certainly it isn't
safe to put anything in the 16 words starting at %sp, since
this area can get trashed at any time due to window overflows
caused by signal handlers.
A final complication (if the above isn't enough) is that
we can't blithely calculate the arguments one by one into
%o0 .. %o5. Consider the following nested calls:
fff a (fff b c)
Naive code moves a into %o0, and (fff b c) into %o1. Unfortunately
the inner call will itself use %o0, which trashes the value put there
in preparation for the outer call. Upshot: we need to calculate the
args into temporary regs, and move those to arg regs or onto the
stack only immediately prior to the call proper. Sigh.
-}
genCCall
:: ForeignTarget -- function to call
-> [CmmFormal] -- where to put the result
-> [CmmActual] -- arguments (of mixed type)
-> NatM InstrBlock
-- On SPARC under TSO (Total Store Ordering), writes earlier in the instruction stream
-- are guaranteed to take place before writes afterwards (unlike on PowerPC).
-- Ref: Section 8.4 of the SPARC V9 Architecture manual.
--
-- In the SPARC case we don't need a barrier.
--
genCCall (PrimTarget MO_WriteBarrier) _ _
= return $ nilOL
genCCall (PrimTarget (MO_Prefetch_Data _)) _ _
= return $ nilOL
genCCall target dest_regs args
= do -- work out the arguments, and assign them to integer regs
argcode_and_vregs <- mapM arg_to_int_vregs args
let (argcodes, vregss) = unzip argcode_and_vregs
let vregs = concat vregss
let n_argRegs = length allArgRegs
let n_argRegs_used = min (length vregs) n_argRegs
-- deal with static vs dynamic call targets
callinsns <- case target of
ForeignTarget (CmmLit (CmmLabel lbl)) _ ->
return (unitOL (CALL (Left (litToImm (CmmLabel lbl))) n_argRegs_used False))
ForeignTarget expr _
-> do (dyn_c, [dyn_r]) <- arg_to_int_vregs expr
return (dyn_c `snocOL` CALL (Right dyn_r) n_argRegs_used False)
PrimTarget mop
-> do res <- outOfLineMachOp mop
lblOrMopExpr <- case res of
Left lbl -> do
return (unitOL (CALL (Left (litToImm (CmmLabel lbl))) n_argRegs_used False))
Right mopExpr -> do
(dyn_c, [dyn_r]) <- arg_to_int_vregs mopExpr
return (dyn_c `snocOL` CALL (Right dyn_r) n_argRegs_used False)
return lblOrMopExpr
let argcode = concatOL argcodes
let (move_sp_down, move_sp_up)
= let diff = length vregs - n_argRegs
nn = if odd diff then diff + 1 else diff -- keep 8-byte alignment
in if nn <= 0
then (nilOL, nilOL)
else (unitOL (moveSp (-1*nn)), unitOL (moveSp (1*nn)))
let transfer_code
= toOL (move_final vregs allArgRegs extraStackArgsHere)
dflags <- getDynFlags
return
$ argcode `appOL`
move_sp_down `appOL`
transfer_code `appOL`
callinsns `appOL`
unitOL NOP `appOL`
move_sp_up `appOL`
assign_code (targetPlatform dflags) dest_regs
-- | Generate code to calculate an argument, and move it into one
-- or two integer vregs.
arg_to_int_vregs :: CmmExpr -> NatM (OrdList Instr, [Reg])
arg_to_int_vregs arg = do dflags <- getDynFlags
arg_to_int_vregs' dflags arg
arg_to_int_vregs' :: DynFlags -> CmmExpr -> NatM (OrdList Instr, [Reg])
arg_to_int_vregs' dflags arg
-- If the expr produces a 64 bit int, then we can just use iselExpr64
| isWord64 (cmmExprType dflags arg)
= do (ChildCode64 code r_lo) <- iselExpr64 arg
let r_hi = getHiVRegFromLo r_lo
return (code, [r_hi, r_lo])
| otherwise
= do (src, code) <- getSomeReg arg
let pk = cmmExprType dflags arg
case cmmTypeFormat pk of
-- Load a 64 bit float return value into two integer regs.
FF64 -> do
v1 <- getNewRegNat II32
v2 <- getNewRegNat II32
let code2 =
code `snocOL`
FMOV FF64 src f0 `snocOL`
ST FF32 f0 (spRel 16) `snocOL`
LD II32 (spRel 16) v1 `snocOL`
ST FF32 f1 (spRel 16) `snocOL`
LD II32 (spRel 16) v2
return (code2, [v1,v2])
-- Load a 32 bit float return value into an integer reg
FF32 -> do
v1 <- getNewRegNat II32
let code2 =
code `snocOL`
ST FF32 src (spRel 16) `snocOL`
LD II32 (spRel 16) v1
return (code2, [v1])
-- Move an integer return value into its destination reg.
_ -> do
v1 <- getNewRegNat II32
let code2 =
code `snocOL`
OR False g0 (RIReg src) v1
return (code2, [v1])
-- | Move args from the integer vregs into which they have been
-- marshalled, into %o0 .. %o5, and the rest onto the stack.
--
move_final :: [Reg] -> [Reg] -> Int -> [Instr]
-- all args done
move_final [] _ _
= []
-- out of aregs; move to stack
move_final (v:vs) [] offset
= ST II32 v (spRel offset)
: move_final vs [] (offset+1)
-- move into an arg (%o[0..5]) reg
move_final (v:vs) (a:az) offset
= OR False g0 (RIReg v) a
: move_final vs az offset
-- | Assign results returned from the call into their
-- destination regs.
--
assign_code :: Platform -> [LocalReg] -> OrdList Instr
assign_code _ [] = nilOL
assign_code platform [dest]
= let rep = localRegType dest
width = typeWidth rep
r_dest = getRegisterReg platform (CmmLocal dest)
result
| isFloatType rep
, W32 <- width
= unitOL $ FMOV FF32 (regSingle $ fReg 0) r_dest
| isFloatType rep
, W64 <- width
= unitOL $ FMOV FF64 (regSingle $ fReg 0) r_dest
| not $ isFloatType rep
, W32 <- width
= unitOL $ mkRegRegMoveInstr platform (regSingle $ oReg 0) r_dest
| not $ isFloatType rep
, W64 <- width
, r_dest_hi <- getHiVRegFromLo r_dest
= toOL [ mkRegRegMoveInstr platform (regSingle $ oReg 0) r_dest_hi
, mkRegRegMoveInstr platform (regSingle $ oReg 1) r_dest]
| otherwise
= panic "SPARC.CodeGen.GenCCall: no match"
in result
assign_code _ _
= panic "SPARC.CodeGen.GenCCall: no match"
-- | Generate a call to implement an out-of-line floating point operation
outOfLineMachOp
:: CallishMachOp
-> NatM (Either CLabel CmmExpr)
outOfLineMachOp mop
= do let functionName
= outOfLineMachOp_table mop
dflags <- getDynFlags
mopExpr <- cmmMakeDynamicReference dflags CallReference
$ mkForeignLabel functionName Nothing ForeignLabelInExternalPackage IsFunction
let mopLabelOrExpr
= case mopExpr of
CmmLit (CmmLabel lbl) -> Left lbl
_ -> Right mopExpr
return mopLabelOrExpr
-- | Decide what C function to use to implement a CallishMachOp
--
outOfLineMachOp_table
:: CallishMachOp
-> FastString
outOfLineMachOp_table mop
= case mop of
MO_F32_Exp -> fsLit "expf"
MO_F32_Log -> fsLit "logf"
MO_F32_Sqrt -> fsLit "sqrtf"
MO_F32_Fabs -> unsupported
MO_F32_Pwr -> fsLit "powf"
MO_F32_Sin -> fsLit "sinf"
MO_F32_Cos -> fsLit "cosf"
MO_F32_Tan -> fsLit "tanf"
MO_F32_Asin -> fsLit "asinf"
MO_F32_Acos -> fsLit "acosf"
MO_F32_Atan -> fsLit "atanf"
MO_F32_Sinh -> fsLit "sinhf"
MO_F32_Cosh -> fsLit "coshf"
MO_F32_Tanh -> fsLit "tanhf"
MO_F64_Exp -> fsLit "exp"
MO_F64_Log -> fsLit "log"
MO_F64_Sqrt -> fsLit "sqrt"
MO_F64_Fabs -> unsupported
MO_F64_Pwr -> fsLit "pow"
MO_F64_Sin -> fsLit "sin"
MO_F64_Cos -> fsLit "cos"
MO_F64_Tan -> fsLit "tan"
MO_F64_Asin -> fsLit "asin"
MO_F64_Acos -> fsLit "acos"
MO_F64_Atan -> fsLit "atan"
MO_F64_Sinh -> fsLit "sinh"
MO_F64_Cosh -> fsLit "cosh"
MO_F64_Tanh -> fsLit "tanh"
MO_UF_Conv w -> fsLit $ word2FloatLabel w
MO_Memcpy _ -> fsLit "memcpy"
MO_Memset _ -> fsLit "memset"
MO_Memmove _ -> fsLit "memmove"
MO_Memcmp _ -> fsLit "memcmp"
MO_BSwap w -> fsLit $ bSwapLabel w
MO_PopCnt w -> fsLit $ popCntLabel w
MO_Pdep w -> fsLit $ pdepLabel w
MO_Pext w -> fsLit $ pextLabel w
MO_Clz w -> fsLit $ clzLabel w
MO_Ctz w -> fsLit $ ctzLabel w
MO_AtomicRMW w amop -> fsLit $ atomicRMWLabel w amop
MO_Cmpxchg w -> fsLit $ cmpxchgLabel w
MO_AtomicRead w -> fsLit $ atomicReadLabel w
MO_AtomicWrite w -> fsLit $ atomicWriteLabel w
MO_S_QuotRem {} -> unsupported
MO_U_QuotRem {} -> unsupported
MO_U_QuotRem2 {} -> unsupported
MO_Add2 {} -> unsupported
MO_SubWordC {} -> unsupported
MO_AddIntC {} -> unsupported
MO_SubIntC {} -> unsupported
MO_U_Mul2 {} -> unsupported
MO_WriteBarrier -> unsupported
MO_Touch -> unsupported
(MO_Prefetch_Data _) -> unsupported
where unsupported = panic ("outOfLineCmmOp: " ++ show mop
++ " not supported here")
| shlevy/ghc | compiler/nativeGen/SPARC/CodeGen.hs | bsd-3-clause | 22,755 | 0 | 29 | 7,404 | 4,695 | 2,329 | 2,366 | 392 | 54 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# OPTIONS_GHC -Wall #-}
module Test40 where
import Data.Generics.Product
import GHC.Generics
class MyClass a where
data AssocData a
instance MyClass Int where
data AssocData Int = SomeData
{ val :: Int
} deriving (Generic)
main :: IO ()
main
= print $ getField @"val" (SomeData 3)
| kcsongor/generic-lens | generic-optics/test/Test40.hs | bsd-3-clause | 424 | 0 | 9 | 81 | 99 | 56 | 43 | 17 | 1 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "Network/Wai/Logger/IP.hs" #-}
module Network.Wai.Logger.IP (
NumericAddress, showSockAddr
) where
import Data.Bits (shift, (.&.))
import Data.Word (Word32)
import Network.Socket (SockAddr(..))
import System.ByteOrder (ByteOrder(..), byteOrder)
import Text.Printf (printf)
-- | A type for IP address in numeric string representation.
type NumericAddress = String
showIPv4 :: Word32 -> Bool -> NumericAddress
showIPv4 w32 little
| little = show b1 ++ "." ++ show b2 ++ "." ++ show b3 ++ "." ++ show b4
| otherwise = show b4 ++ "." ++ show b3 ++ "." ++ show b2 ++ "." ++ show b1
where
t1 = w32
t2 = shift t1 (-8)
t3 = shift t2 (-8)
t4 = shift t3 (-8)
b1 = t1 .&. 0x000000ff
b2 = t2 .&. 0x000000ff
b3 = t3 .&. 0x000000ff
b4 = t4 .&. 0x000000ff
showIPv6 :: (Word32,Word32,Word32,Word32) -> String
showIPv6 (w1,w2,w3,w4) =
printf "%x:%x:%x:%x:%x:%x:%x:%x" s1 s2 s3 s4 s5 s6 s7 s8
where
(s1,s2) = split16 w1
(s3,s4) = split16 w2
(s5,s6) = split16 w3
(s7,s8) = split16 w4
split16 w = (h1,h2)
where
h1 = shift w (-16) .&. 0x0000ffff
h2 = w .&. 0x0000ffff
-- | Convert 'SockAddr' to 'NumericAddress'. If the address is
-- IPv4-embedded IPv6 address, the IPv4 is extracted.
showSockAddr :: SockAddr -> NumericAddress
-- HostAddr is network byte order.
showSockAddr (SockAddrInet _ addr4) = showIPv4 addr4 (byteOrder == LittleEndian)
-- HostAddr6 is host byte order.
showSockAddr (SockAddrInet6 _ _ (0,0,0x0000ffff,addr4) _) = showIPv4 addr4 False
showSockAddr (SockAddrInet6 _ _ (0,0,0,1) _) = "::1"
showSockAddr (SockAddrInet6 _ _ addr6 _) = showIPv6 addr6
showSockAddr _ = "unknownSocket"
| phischu/fragnix | tests/packages/scotty/Network.Wai.Logger.IP.hs | bsd-3-clause | 1,822 | 0 | 12 | 466 | 596 | 325 | 271 | 38 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Program.HcPkg
-- Copyright : Duncan Coutts 2009, 2013
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This module provides an library interface to the @hc-pkg@ program.
-- Currently only GHC and LHC have hc-pkg programs.
module Distribution.Simple.Program.HcPkg (
init,
invoke,
register,
reregister,
unregister,
expose,
hide,
dump,
list,
-- * Program invocations
initInvocation,
registerInvocation,
reregisterInvocation,
unregisterInvocation,
exposeInvocation,
hideInvocation,
dumpInvocation,
listInvocation,
) where
import Prelude hiding (init)
import Distribution.Package
( PackageId, InstalledPackageId(..) )
import Distribution.InstalledPackageInfo
( InstalledPackageInfo, InstalledPackageInfo_(..)
, showInstalledPackageInfo
, emptyInstalledPackageInfo, fieldsInstalledPackageInfo )
import Distribution.ParseUtils
import Distribution.Simple.Compiler
( PackageDB(..), PackageDBStack )
import Distribution.Simple.Program.Types
( ConfiguredProgram(programId, programVersion) )
import Distribution.Simple.Program.Run
( ProgramInvocation(..), IOEncoding(..), programInvocation
, runProgramInvocation, getProgramInvocationOutput )
import Distribution.Version
( Version(..) )
import Distribution.Text
( display, simpleParse )
import Distribution.Simple.Utils
( die )
import Distribution.Verbosity
( Verbosity, deafening, silent )
import Distribution.Compat.Exception
( catchExit )
import Data.Char
( isSpace )
import Data.Maybe
( fromMaybe )
import Data.List
( stripPrefix )
import System.FilePath as FilePath
( (</>), splitPath, splitDirectories, joinPath, isPathSeparator )
import qualified System.FilePath.Posix as FilePath.Posix
-- | Call @hc-pkg@ to initialise a package database at the location {path}.
--
-- > hc-pkg init {path}
--
init :: Verbosity -> ConfiguredProgram -> FilePath -> IO ()
init verbosity hcPkg path =
runProgramInvocation verbosity
(initInvocation hcPkg verbosity path)
-- | Run @hc-pkg@ using a given package DB stack, directly forwarding the
-- provided command-line arguments to it.
invoke :: Verbosity -> ConfiguredProgram -> PackageDBStack -> [String] -> IO ()
invoke verbosity hcPkg dbStack extraArgs =
runProgramInvocation verbosity invocation
where
args = packageDbStackOpts hcPkg dbStack ++ extraArgs
invocation = programInvocation hcPkg args
-- | Call @hc-pkg@ to register a package.
--
-- > hc-pkg register {filename | -} [--user | --global | --package-db]
--
register :: Verbosity -> ConfiguredProgram -> PackageDBStack
-> Either FilePath
InstalledPackageInfo
-> IO ()
register verbosity hcPkg packagedb pkgFile =
runProgramInvocation verbosity
(registerInvocation hcPkg verbosity packagedb pkgFile)
-- | Call @hc-pkg@ to re-register a package.
--
-- > hc-pkg register {filename | -} [--user | --global | --package-db]
--
reregister :: Verbosity -> ConfiguredProgram -> PackageDBStack
-> Either FilePath
InstalledPackageInfo
-> IO ()
reregister verbosity hcPkg packagedb pkgFile =
runProgramInvocation verbosity
(reregisterInvocation hcPkg verbosity packagedb pkgFile)
-- | Call @hc-pkg@ to unregister a package
--
-- > hc-pkg unregister [pkgid] [--user | --global | --package-db]
--
unregister :: Verbosity -> ConfiguredProgram -> PackageDB -> PackageId -> IO ()
unregister verbosity hcPkg packagedb pkgid =
runProgramInvocation verbosity
(unregisterInvocation hcPkg verbosity packagedb pkgid)
-- | Call @hc-pkg@ to expose a package.
--
-- > hc-pkg expose [pkgid] [--user | --global | --package-db]
--
expose :: Verbosity -> ConfiguredProgram -> PackageDB -> PackageId -> IO ()
expose verbosity hcPkg packagedb pkgid =
runProgramInvocation verbosity
(exposeInvocation hcPkg verbosity packagedb pkgid)
-- | Call @hc-pkg@ to expose a package.
--
-- > hc-pkg expose [pkgid] [--user | --global | --package-db]
--
hide :: Verbosity -> ConfiguredProgram -> PackageDB -> PackageId -> IO ()
hide verbosity hcPkg packagedb pkgid =
runProgramInvocation verbosity
(hideInvocation hcPkg verbosity packagedb pkgid)
-- | Call @hc-pkg@ to get all the details of all the packages in the given
-- package database.
--
dump :: Verbosity -> ConfiguredProgram -> PackageDB -> IO [InstalledPackageInfo]
dump verbosity hcPkg packagedb = do
output <- getProgramInvocationOutput verbosity
(dumpInvocation hcPkg verbosity packagedb)
`catchExit` \_ -> die $ programId hcPkg ++ " dump failed"
case parsePackages output of
Left ok -> return ok
_ -> die $ "failed to parse output of '"
++ programId hcPkg ++ " dump'"
where
parsePackages str =
let parsed = map parseInstalledPackageInfo' (splitPkgs str)
in case [ msg | ParseFailed msg <- parsed ] of
[] -> Left [ setInstalledPackageId
. maybe id mungePackagePaths pkgroot
$ pkg
| ParseOk _ (pkgroot, pkg) <- parsed ]
msgs -> Right msgs
parseInstalledPackageInfo' =
parseFieldsFlat fields (Nothing, emptyInstalledPackageInfo)
where
fields = liftFieldFst pkgrootField
: map liftFieldSnd fieldsInstalledPackageInfo
pkgrootField =
simpleField "pkgroot"
showFilePath parseFilePathQ
(fromMaybe "") (\x _ -> Just x)
liftFieldFst = liftField fst (\x (_x,y) -> (x,y))
liftFieldSnd = liftField snd (\y (x,_y) -> (x,y))
--TODO: this could be a lot faster. We're doing normaliseLineEndings twice
-- and converting back and forth with lines/unlines.
splitPkgs :: String -> [String]
splitPkgs = checkEmpty . map unlines . splitWith ("---" ==) . lines
where
-- Handle the case of there being no packages at all.
checkEmpty [s] | all isSpace s = []
checkEmpty ss = ss
splitWith :: (a -> Bool) -> [a] -> [[a]]
splitWith p xs = ys : case zs of
[] -> []
_:ws -> splitWith p ws
where (ys,zs) = break p xs
mungePackagePaths :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo
-- Perform path/URL variable substitution as per the Cabal ${pkgroot} spec
-- (http://www.haskell.org/pipermail/libraries/2009-May/011772.html)
-- Paths/URLs can be relative to ${pkgroot} or ${pkgrooturl}.
-- The "pkgroot" is the directory containing the package database.
mungePackagePaths pkgroot pkginfo =
pkginfo {
importDirs = mungePaths (importDirs pkginfo),
includeDirs = mungePaths (includeDirs pkginfo),
libraryDirs = mungePaths (libraryDirs pkginfo),
frameworkDirs = mungePaths (frameworkDirs pkginfo),
haddockInterfaces = mungePaths (haddockInterfaces pkginfo),
haddockHTMLs = mungeUrls (haddockHTMLs pkginfo)
}
where
mungePaths = map mungePath
mungeUrls = map mungeUrl
mungePath p = case stripVarPrefix "${pkgroot}" p of
Just p' -> pkgroot </> p'
Nothing -> p
mungeUrl p = case stripVarPrefix "${pkgrooturl}" p of
Just p' -> toUrlPath pkgroot p'
Nothing -> p
toUrlPath r p = "file:///"
-- URLs always use posix style '/' separators:
++ FilePath.Posix.joinPath (r : FilePath.splitDirectories p)
stripVarPrefix var p =
case splitPath p of
(root:path') -> case stripPrefix var root of
Just [sep] | isPathSeparator sep -> Just (joinPath path')
_ -> Nothing
_ -> Nothing
-- Older installed package info files did not have the installedPackageId
-- field, so if it is missing then we fill it as the source package ID.
setInstalledPackageId :: InstalledPackageInfo -> InstalledPackageInfo
setInstalledPackageId pkginfo@InstalledPackageInfo {
installedPackageId = InstalledPackageId "",
sourcePackageId = pkgid
}
= pkginfo {
--TODO use a proper named function for the conversion
-- from source package id to installed package id
installedPackageId = InstalledPackageId (display pkgid)
}
setInstalledPackageId pkginfo = pkginfo
-- | Call @hc-pkg@ to get the source package Id of all the packages in the
-- given package database.
--
-- This is much less information than with 'dump', but also rather quicker.
-- Note in particular that it does not include the 'InstalledPackageId', just
-- the source 'PackageId' which is not necessarily unique in any package db.
--
list :: Verbosity -> ConfiguredProgram -> PackageDB -> IO [PackageId]
list verbosity hcPkg packagedb = do
output <- getProgramInvocationOutput verbosity
(listInvocation hcPkg verbosity packagedb)
`catchExit` \_ -> die $ programId hcPkg ++ " list failed"
case parsePackageIds output of
Just ok -> return ok
_ -> die $ "failed to parse output of '"
++ programId hcPkg ++ " list'"
where
parsePackageIds str =
let parsed = map simpleParse (words str)
in case [ () | Nothing <- parsed ] of
[] -> Just [ pkgid | Just pkgid <- parsed ]
_ -> Nothing
--------------------------
-- The program invocations
--
initInvocation :: ConfiguredProgram
-> Verbosity -> FilePath -> ProgramInvocation
initInvocation hcPkg verbosity path =
programInvocation hcPkg args
where
args = ["init", path]
++ verbosityOpts hcPkg verbosity
registerInvocation, reregisterInvocation
:: ConfiguredProgram -> Verbosity -> PackageDBStack
-> Either FilePath InstalledPackageInfo
-> ProgramInvocation
registerInvocation = registerInvocation' "register"
reregisterInvocation = registerInvocation' "update"
registerInvocation' :: String
-> ConfiguredProgram -> Verbosity -> PackageDBStack
-> Either FilePath InstalledPackageInfo
-> ProgramInvocation
registerInvocation' cmdname hcPkg verbosity packagedbs (Left pkgFile) =
programInvocation hcPkg args
where
args = [cmdname, pkgFile]
++ (if legacyVersion hcPkg
then [packageDbOpts hcPkg (last packagedbs)]
else packageDbStackOpts hcPkg packagedbs)
++ verbosityOpts hcPkg verbosity
registerInvocation' cmdname hcPkg verbosity packagedbs (Right pkgInfo) =
(programInvocation hcPkg args) {
progInvokeInput = Just (showInstalledPackageInfo pkgInfo),
progInvokeInputEncoding = IOEncodingUTF8
}
where
args = [cmdname, "-"]
++ (if legacyVersion hcPkg
then [packageDbOpts hcPkg (last packagedbs)]
else packageDbStackOpts hcPkg packagedbs)
++ verbosityOpts hcPkg verbosity
unregisterInvocation :: ConfiguredProgram
-> Verbosity -> PackageDB -> PackageId
-> ProgramInvocation
unregisterInvocation hcPkg verbosity packagedb pkgid =
programInvocation hcPkg $
["unregister", packageDbOpts hcPkg packagedb, display pkgid]
++ verbosityOpts hcPkg verbosity
exposeInvocation :: ConfiguredProgram
-> Verbosity -> PackageDB -> PackageId -> ProgramInvocation
exposeInvocation hcPkg verbosity packagedb pkgid =
programInvocation hcPkg $
["expose", packageDbOpts hcPkg packagedb, display pkgid]
++ verbosityOpts hcPkg verbosity
hideInvocation :: ConfiguredProgram
-> Verbosity -> PackageDB -> PackageId -> ProgramInvocation
hideInvocation hcPkg verbosity packagedb pkgid =
programInvocation hcPkg $
["hide", packageDbOpts hcPkg packagedb, display pkgid]
++ verbosityOpts hcPkg verbosity
dumpInvocation :: ConfiguredProgram
-> Verbosity -> PackageDB -> ProgramInvocation
dumpInvocation hcPkg _verbosity packagedb =
(programInvocation hcPkg args) {
progInvokeOutputEncoding = IOEncodingUTF8
}
where
args = ["dump", packageDbOpts hcPkg packagedb]
++ verbosityOpts hcPkg silent
-- We use verbosity level 'silent' because it is important that we
-- do not contaminate the output with info/debug messages.
listInvocation :: ConfiguredProgram
-> Verbosity -> PackageDB -> ProgramInvocation
listInvocation hcPkg _verbosity packagedb =
(programInvocation hcPkg args) {
progInvokeOutputEncoding = IOEncodingUTF8
}
where
args = ["list", "--simple-output", packageDbOpts hcPkg packagedb]
++ verbosityOpts hcPkg silent
-- We use verbosity level 'silent' because it is important that we
-- do not contaminate the output with info/debug messages.
packageDbStackOpts :: ConfiguredProgram -> PackageDBStack -> [String]
packageDbStackOpts hcPkg dbstack = case dbstack of
(GlobalPackageDB:UserPackageDB:dbs) -> "--global"
: "--user"
: map specific dbs
(GlobalPackageDB:dbs) -> "--global"
: ("--no-user-" ++ packageDbFlag hcPkg)
: map specific dbs
_ -> ierror
where
specific (SpecificPackageDB db) = "--" ++ packageDbFlag hcPkg ++ "=" ++ db
specific _ = ierror
ierror :: a
ierror = error ("internal error: unexpected package db stack: " ++ show dbstack)
packageDbFlag :: ConfiguredProgram -> String
packageDbFlag hcPkg
| programVersion hcPkg < Just (Version [7,5] [])
= "package-conf"
| otherwise
= "package-db"
packageDbOpts :: ConfiguredProgram -> PackageDB -> String
packageDbOpts _ GlobalPackageDB = "--global"
packageDbOpts _ UserPackageDB = "--user"
packageDbOpts hcPkg (SpecificPackageDB db) = "--" ++ packageDbFlag hcPkg ++ "=" ++ db
verbosityOpts :: ConfiguredProgram -> Verbosity -> [String]
verbosityOpts hcPkg v
-- ghc-pkg < 6.11 does not support -v
| programId hcPkg == "ghc-pkg"
&& programVersion hcPkg < Just (Version [6,11] [])
= []
| v >= deafening = ["-v2"]
| v == silent = ["-v0"]
| otherwise = []
-- Handle quirks in ghc-pkg 6.8 and older
legacyVersion :: ConfiguredProgram -> Bool
legacyVersion hcPkg = programId hcPkg == "ghc-pkg"
&& programVersion hcPkg < Just (Version [6,9] [])
| jwiegley/ghc-release | libraries/Cabal/cabal/Distribution/Simple/Program/HcPkg.hs | gpl-3.0 | 14,909 | 0 | 17 | 3,909 | 3,049 | 1,620 | 1,429 | 268 | 5 |
module System.Console.Haskeline.Term where
import System.Console.Haskeline.Monads
import System.Console.Haskeline.LineState
import System.Console.Haskeline.Key
import System.Console.Haskeline.Prefs(Prefs)
import System.Console.Haskeline.Completion(Completion)
import Control.Concurrent
import Data.Word
import Control.Exception (fromException, AsyncException(..),bracket_)
import Data.Typeable
import System.IO
import Control.Monad(liftM,when,guard)
import System.IO.Error (isEOFError)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BC
class (MonadReader Layout m, MonadException m) => Term m where
reposition :: Layout -> LineChars -> m ()
moveToNextLine :: LineChars -> m ()
printLines :: [String] -> m ()
drawLineDiff :: LineChars -> LineChars -> m ()
clearLayout :: m ()
ringBell :: Bool -> m ()
drawLine, clearLine :: Term m => LineChars -> m ()
drawLine = drawLineDiff ([],[])
clearLine = flip drawLineDiff ([],[])
data RunTerm = RunTerm {
-- | Write unicode characters to stdout.
putStrOut :: String -> IO (),
termOps :: Either TermOps FileOps,
wrapInterrupt :: forall a . IO a -> IO a,
closeTerm :: IO ()
}
-- | Operations needed for terminal-style interaction.
data TermOps = TermOps {
getLayout :: IO Layout
, withGetEvent :: CommandMonad m => (m Event -> m a) -> m a
, evalTerm :: forall m . CommandMonad m => EvalTerm m
, saveUnusedKeys :: [Key] -> IO ()
}
-- | Operations needed for file-style interaction.
--
-- Backends can assume that getLocaleLine, getLocaleChar and maybeReadNewline
-- are "wrapped" by wrapFileInput.
data FileOps = FileOps {
inputHandle :: Handle, -- ^ e.g. for turning off echoing.
wrapFileInput :: forall a . IO a -> IO a,
getLocaleLine :: MaybeT IO String,
getLocaleChar :: MaybeT IO Char,
maybeReadNewline :: IO ()
}
-- | Are we using terminal-style interaction?
isTerminalStyle :: RunTerm -> Bool
isTerminalStyle r = case termOps r of
Left TermOps{} -> True
_ -> False
-- Specific, hidden terminal action type
-- Generic terminal actions which are independent of the Term being used.
data EvalTerm m
= forall n . (Term n, CommandMonad n)
=> EvalTerm (forall a . n a -> m a) (forall a . m a -> n a)
mapEvalTerm :: (forall a . n a -> m a) -> (forall a . m a -> n a)
-> EvalTerm n -> EvalTerm m
mapEvalTerm eval liftE (EvalTerm eval' liftE')
= EvalTerm (eval . eval') (liftE' . liftE)
data Interrupt = Interrupt
deriving (Show,Typeable,Eq)
instance Exception Interrupt where
class (MonadReader Prefs m , MonadReader Layout m, MonadException m)
=> CommandMonad m where
runCompletion :: (String,String) -> m (String,[Completion])
instance (MonadTrans t, CommandMonad m, MonadReader Prefs (t m),
MonadException (t m),
MonadReader Layout (t m))
=> CommandMonad (t m) where
runCompletion = lift . runCompletion
-- Utility function for drawLineDiff instances.
matchInit :: Eq a => [a] -> [a] -> ([a],[a])
matchInit (x:xs) (y:ys) | x == y = matchInit xs ys
matchInit xs ys = (xs,ys)
data Event = WindowResize | KeyInput [Key] | ErrorEvent SomeException
deriving Show
keyEventLoop :: IO [Event] -> Chan Event -> IO Event
keyEventLoop readEvents eventChan = do
-- first, see if any events are already queued up (from a key/ctrl-c
-- event or from a previous call to getEvent where we read in multiple
-- keys)
isEmpty <- isEmptyChan eventChan
if not isEmpty
then readChan eventChan
else do
lock <- newEmptyMVar
tid <- forkIO $ handleErrorEvent (readerLoop lock)
readChan eventChan `finally` do
putMVar lock ()
killThread tid
where
readerLoop lock = do
es <- readEvents
if null es
then readerLoop lock
else -- Use the lock to work around the fact that writeList2Chan
-- isn't atomic. Otherwise, some events could be ignored if
-- the subthread is killed before it saves them in the chan.
bracket_ (putMVar lock ()) (takeMVar lock) $
writeList2Chan eventChan es
handleErrorEvent = handle $ \e -> case fromException e of
Just ThreadKilled -> return ()
_ -> writeChan eventChan (ErrorEvent e)
saveKeys :: Chan Event -> [Key] -> IO ()
saveKeys ch = writeChan ch . KeyInput
data Layout = Layout {width, height :: Int}
deriving (Show,Eq)
-----------------------------------
-- Utility functions for the various backends.
-- | Utility function since we're not using the new IO library yet.
hWithBinaryMode :: MonadException m => Handle -> m a -> m a
#if __GLASGOW_HASKELL__ >= 611
hWithBinaryMode h = bracket (liftIO $ hGetEncoding h)
(maybe (return ()) (liftIO . hSetEncoding h))
. const . (liftIO (hSetBinaryMode h True) >>)
#else
hWithBinaryMode _ = id
#endif
-- | Utility function for changing a property of a terminal for the duration of
-- a computation.
bracketSet :: (Eq a, MonadException m) => IO a -> (a -> IO ()) -> a -> m b -> m b
bracketSet getState set newState f = bracket (liftIO getState)
(liftIO . set)
(\_ -> liftIO (set newState) >> f)
-- | Returns one 8-bit word. Needs to be wrapped by hWithBinaryMode.
hGetByte :: Handle -> MaybeT IO Word8
hGetByte = guardedEOF $ liftM (toEnum . fromEnum) . hGetChar
guardedEOF :: (Handle -> IO a) -> Handle -> MaybeT IO a
guardedEOF f h = do
eof <- lift $ hIsEOF h
guard (not eof)
lift $ f h
-- If another character is immediately available, and it is a newline, consume it.
--
-- Two portability fixes:
--
-- 1) By itself, this (by using hReady) might crash on invalid characters.
-- The handle should be set to binary mode or a TextEncoder that
-- transliterates or ignores invalid input.
--
-- 1) Note that in ghc-6.8.3 and earlier, hReady returns False at an EOF,
-- whereas in ghc-6.10.1 and later it throws an exception. (GHC trac #1063).
-- This code handles both of those cases.
hMaybeReadNewline :: Handle -> IO ()
hMaybeReadNewline h = returnOnEOF () $ do
ready <- hReady h
when ready $ do
c <- hLookAhead h
when (c == '\n') $ getChar >> return ()
returnOnEOF :: MonadException m => a -> m a -> m a
returnOnEOF x = handle $ \e -> if isEOFError e
then return x
else throwIO e
-- | Utility function to correctly get a line of input as an undecoded ByteString.
hGetLocaleLine :: Handle -> MaybeT IO ByteString
hGetLocaleLine = guardedEOF $ \h -> do
-- It's more efficient to use B.getLine, but that function throws an
-- error if the Handle (e.g., stdin) is set to NoBuffering.
buff <- liftIO $ hGetBuffering h
liftIO $ if buff == NoBuffering
then fmap BC.pack $ System.IO.hGetLine h
else BC.hGetLine h
| jwiegley/ghc-release | libraries/haskeline/System/Console/Haskeline/Term.hs | gpl-3.0 | 7,299 | 0 | 15 | 2,032 | 1,926 | 1,013 | 913 | -1 | -1 |
-- do nothing
main = return ()
| nikki-and-the-robots/nikki | src/scripts/Nothing.hs | lgpl-3.0 | 32 | 0 | 6 | 7 | 12 | 6 | 6 | 1 | 1 |
{-# LANGUAGE PatternGuards #-}
{-| The coverage and totality checkers for Idris are in this module.
-}
module Idris.Coverage where
import Idris.Core.TT
import Idris.Core.Evaluate
import Idris.Core.CaseTree
import Idris.AbsSyntax
import Idris.Delaborate
import Idris.Error
import Idris.Output (iWarn, iputStrLn)
import Data.List
import Data.Either
import Data.Maybe
import Debug.Trace
import Control.Monad.State.Strict
-- | Generate a pattern from an 'impossible' LHS.
--
-- We need this to eliminate the pattern clauses which have been
-- provided explicitly from new clause generation.
mkPatTm :: PTerm -> Idris Term
mkPatTm t = do i <- getIState
let timp = addImpl' True [] [] [] i t
evalStateT (toTT (mapPT deNS timp)) 0
where
toTT (PRef _ _ n) = do i <- lift getIState
case lookupNameDef n (tt_ctxt i) of
[(n', TyDecl nt _)] -> return $ P nt n' Erased
_ -> return $ P Ref n Erased
toTT (PApp _ t args) = do t' <- toTT t
args' <- mapM (toTT . getTm) args
return $ mkApp t' args'
-- For alternatives, pick the first and drop the namespaces. It doesn't
-- really matter which is taken since matching will ignore the namespace.
toTT (PAlternative _ _ (a : as)) = toTT a
toTT _ = do v <- get
put (v + 1)
return (P Bound (sMN v "imp") Erased)
deNS (PRef f hl (NS n _)) = PRef f hl n
deNS t = t
-- | Given a list of LHSs, generate a extra clauses which cover the remaining
-- cases. The ones which haven't been provided are marked 'absurd' so
-- that the checker will make sure they can't happen.
--
-- This will only work after the given clauses have been typechecked and the
-- names are fully explicit!
genClauses :: FC -> Name -> [Term] -> [PTerm] -> Idris [PTerm]
genClauses fc n xs given
= do i <- getIState
let lhs_tms = map (\x -> flattenArgs $ delab' i x True True) xs
-- if a placeholder was given, don't bother generating cases for it
let lhs_tms' = zipWith mergePlaceholders lhs_tms
(map (stripUnmatchable i) (map flattenArgs given))
let lhss = map pUnApply lhs_tms'
let argss = transpose lhss
let all_args = map (genAll i) argss
logLvl 5 $ "COVERAGE of " ++ show n
logLvl 5 $ show (lhs_tms, lhss)
logLvl 5 $ show (map length argss) ++ "\n" ++ show (map length all_args)
logLvl 10 $ show argss ++ "\n" ++ show all_args
logLvl 3 $ "Original: \n" ++
showSep "\n" (map (\t -> showTm i (delab' i t True True)) xs)
-- add an infinite supply of explicit arguments to update the possible
-- cases for (the return type may be variadic, or function type, so
-- there may be more case splitting that the idris_implicits record
-- suggests)
let parg = case lookupCtxt n (idris_implicits i) of
(p : _) ->
p ++ repeat (PExp 0 [] (sMN 0 "gcarg") Placeholder)
_ -> repeat (pexp Placeholder)
let tryclauses = mkClauses parg all_args
logLvl 3 $ show (length tryclauses) ++ " initially to check"
logLvl 2 $ showSep "\n" (map (showTm i) tryclauses)
let new = filter (noMatch i) (nub tryclauses)
logLvl 2 $ show (length new) ++ " clauses to check for impossibility"
logLvl 4 $ "New clauses: \n" ++ showSep "\n" (map (showTm i) new)
-- ++ " from:\n" ++ showSep "\n" (map (showImp True) tryclauses)
return new
-- return (map (\t -> PClause n t [] PImpossible []) new)
where getLHS i term
| (f, args) <- unApply term = map (\t -> delab' i t True True) args
| otherwise = []
pUnApply (PApp _ f args) = map getTm args
pUnApply _ = []
flattenArgs (PApp fc (PApp _ f as) as')
= flattenArgs (PApp fc f (as ++ as'))
flattenArgs t = t
-- Return whether the given clause matches none of the input clauses
-- (xs)
noMatch i tm = all (\x -> case matchClause i (stripUnmatchable i (delab' i x True True)) tm of
Right ms -> False
Left miss -> True) xs
mergePlaceholders :: PTerm -> PTerm -> PTerm
mergePlaceholders x Placeholder = Placeholder
mergePlaceholders (PApp fc f args) (PApp fc' f' args')
= PApp fc' f' (zipWith mergePArg args args')
where mergePArg x y = let xtm = mergePlaceholders (getTm x) (getTm y) in
x { getTm = xtm}
mergePlaceholders x _ = x
mkClauses :: [PArg] -> [[PTerm]] -> [PTerm]
mkClauses parg args
| all (== [Placeholder]) args = []
mkClauses parg args
= do args' <- mkArg args
let tm = PApp fc (PRef fc [] n) (zipWith upd args' parg)
return tm
where
mkArg :: [[PTerm]] -> [[PTerm]]
mkArg [] = return []
mkArg (a : as) = do a' <- a
as' <- mkArg as
return (a':as')
-- | Does this error result rule out a case as valid when coverage checking?
validCoverageCase :: Context -> Err -> Bool
validCoverageCase ctxt (CantUnify _ (topx, _) (topy, _) e _ _)
= let topx' = normalise ctxt [] topx
topy' = normalise ctxt [] topy in
not (sameFam topx' topy' || not (validCoverageCase ctxt e))
where sameFam topx topy
= case (unApply topx, unApply topy) of
((P _ x _, _), (P _ y _, _)) -> x == y
_ -> False
validCoverageCase ctxt (CantConvert _ _ _) = False
validCoverageCase ctxt (At _ e) = validCoverageCase ctxt e
validCoverageCase ctxt (Elaborating _ _ _ e) = validCoverageCase ctxt e
validCoverageCase ctxt (ElaboratingArg _ _ _ e) = validCoverageCase ctxt e
validCoverageCase ctxt _ = True
-- | Check whether an error is recoverable in the sense needed for
-- coverage checking.
recoverableCoverage :: Context -> Err -> Bool
recoverableCoverage ctxt (CantUnify r (topx, _) (topy, _) e _ _)
= let topx' = normalise ctxt [] topx
topy' = normalise ctxt [] topy in
r || checkRec topx' topy'
where -- different notion of recoverable than in unification, since we
-- have no metavars -- just looking to see if a constructor is failing
-- to unify with a function that may be reduced later
checkRec (App _ f a) p@(P _ _ _) = checkRec f p
checkRec p@(P _ _ _) (App _ f a) = checkRec p f
checkRec fa@(App _ _ _) fa'@(App _ _ _)
| (f, as) <- unApply fa,
(f', as') <- unApply fa'
= if (length as /= length as')
then checkRec f f'
else checkRec f f' && and (zipWith checkRec as as')
checkRec (P xt x _) (P yt y _) = x == y || ntRec xt yt
checkRec _ _ = False
ntRec x y | Ref <- x = True
| Ref <- y = True
| (Bound, Bound) <- (x, y) = True
| otherwise = False -- name is different, unrecoverable
recoverableCoverage ctxt (At _ e) = recoverableCoverage ctxt e
recoverableCoverage ctxt (Elaborating _ _ _ e) = recoverableCoverage ctxt e
recoverableCoverage ctxt (ElaboratingArg _ _ _ e) = recoverableCoverage ctxt e
recoverableCoverage _ _ = False
-- FIXME: Just look for which one is the deepest, then generate all
-- possibilities up to that depth.
-- This and below issues for this function are tracked as Issue #1741 on the issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1741
genAll :: IState -> [PTerm] -> [PTerm]
genAll i args
= case filter (/=Placeholder) $ fnub (concatMap otherPats (fnub args)) of
[] -> [Placeholder]
xs -> inventConsts xs
where
-- if they're constants, invent a new one to make sure that
-- constants which are not explicitly handled are covered
inventConsts cs@(PConstant fc c : _) = map (PConstant NoFC) (ic' (mapMaybe getConst cs))
where getConst (PConstant _ c) = Just c
getConst _ = Nothing
inventConsts xs = xs
-- try constants until they're not in the list.
-- FIXME: It is, of course, possible that someone has enumerated all
-- the constants and matched on them (maybe in generated code) and this
-- will be really slow. This is sufficiently unlikely that we won't
-- worry for now...
ic' xs@(I _ : _) = firstMissing xs (lotsOfNums I)
ic' xs@(BI _ : _) = firstMissing xs (lotsOfNums BI)
ic' xs@(Fl _ : _) = firstMissing xs (lotsOfNums Fl)
ic' xs@(B8 _ : _) = firstMissing xs (lotsOfNums B8)
ic' xs@(B16 _ : _) = firstMissing xs (lotsOfNums B16)
ic' xs@(B32 _ : _) = firstMissing xs (lotsOfNums B32)
ic' xs@(B64 _ : _) = firstMissing xs (lotsOfNums B64)
ic' xs@(Ch _ : _) = firstMissing xs lotsOfChars
ic' xs@(Str _ : _) = firstMissing xs lotsOfStrings
-- TODO: Bit vectors
-- The rest are types with only one case
ic' xs = xs
firstMissing cs (x : xs) | x `elem` cs = firstMissing cs xs
| otherwise = x : cs
lotsOfNums t = map t [0..]
lotsOfChars = map Ch ['a'..]
lotsOfStrings = map Str (map (("some string " ++).show) [1..])
nubMap f acc [] = acc
nubMap f acc (x : xs) = nubMap f (fnub' acc (f x)) xs
otherPats :: PTerm -> [PTerm]
otherPats o@(PRef fc hl n) = ops fc n [] o
otherPats o@(PApp _ (PRef fc hl n) xs) = ops fc n xs o
otherPats o@(PPair fc hls _ l r)
= ops fc pairCon
([pimp (sUN "A") Placeholder True,
pimp (sUN "B") Placeholder True] ++
[pexp l, pexp r]) o
otherPats o@(PDPair fc hls p t _ v)
= ops fc sigmaCon
([pimp (sUN "a") Placeholder True,
pimp (sUN "P") Placeholder True] ++
[pexp t,pexp v]) o
otherPats o@(PConstant _ c) = inventConsts [o] -- return o
otherPats arg = return Placeholder
ops fc n xs o
| (TyDecl c@(DCon _ arity _) ty : _) <- lookupDef n (tt_ctxt i)
= do xs' <- mapM otherPats (map getExpTm xs)
let p = resugar (PApp fc (PRef fc [] n) (zipWith upd xs' xs))
let tyn = getTy n (tt_ctxt i)
case lookupCtxt tyn (idris_datatypes i) of
(TI ns _ _ _ _ : _) -> p : map (mkPat fc) (ns \\ [n])
_ -> [p]
ops fc n arg o = return Placeholder
getExpTm (PImp _ True _ _ _) = Placeholder -- machine inferred, no point!
getExpTm t = getTm t
-- put it back to its original form
resugar (PApp _ (PRef fc hl n) [_,_,t,v])
| n == sigmaCon
= PDPair fc [] TypeOrTerm (getTm t) Placeholder (getTm v)
resugar (PApp _ (PRef fc hl n) [_,_,l,r])
| n == pairCon
= PPair fc [] IsTerm (getTm l) (getTm r)
resugar t = t
dropForce force (x : xs) i | i `elem` force
= upd Placeholder x : dropForce force xs (i + 1)
dropForce force (x : xs) i = x : dropForce force xs (i + 1)
dropForce _ [] _ = []
getTy n ctxt = case lookupTy n ctxt of
(t : _) -> case unApply (getRetTy t) of
(P _ tyn _, _) -> tyn
x -> error $ "Can't happen getTy 1 " ++ show (n, x)
_ -> error "Can't happen getTy 2"
mkPat fc x = case lookupCtxt x (idris_implicits i) of
(pargs : _)
-> PApp fc (PRef fc [] x) (map (upd Placeholder) pargs)
_ -> error "Can't happen - genAll"
fnub :: [PTerm] -> [PTerm]
fnub xs = fnub' [] xs
fnub' :: [PTerm] -> [PTerm] -> [PTerm]
fnub' acc (x : xs) | x `qelem` acc = fnub' acc (filter (not.(quickEq x)) xs)
| otherwise = fnub' (x : acc) xs
fnub' acc [] = acc
-- quick check for constructor equality
quickEq :: PTerm -> PTerm -> Bool
quickEq (PConstant _ n) (PConstant _ n') = n == n'
quickEq (PRef _ _ n) (PRef _ _ n') = n == n'
quickEq (PApp _ t as) (PApp _ t' as')
| length as == length as'
= quickEq t t' && and (zipWith quickEq (map getTm as) (map getTm as'))
quickEq Placeholder Placeholder = True
quickEq x y = False
qelem :: PTerm -> [PTerm] -> Bool
qelem x [] = False
qelem x (y : ys) | x `quickEq` y = True
| otherwise = qelem x ys
upd :: t -> PArg' t -> PArg' t
upd p' p = p { getTm = p' }
-- Check whether function and all descendants cover all cases (partial is
-- okay, as long as it's due to recursion)
checkAllCovering :: FC -> [Name] -> Name -> Name -> Idris ()
checkAllCovering fc done top n | not (n `elem` done)
= do i <- get
case lookupTotal n (tt_ctxt i) of
[tot@(Partial NotCovering)] ->
do let msg = show top ++ " is " ++ show tot ++ " due to " ++ show n
putIState i { idris_totcheckfail = (fc, msg) : idris_totcheckfail i }
addIBC (IBCTotCheckErr fc msg)
[Partial _] ->
case lookupCtxt n (idris_callgraph i) of
[cg] -> mapM_ (checkAllCovering fc (n : done) top)
(map fst (calls cg))
_ -> return ()
x -> return () -- stop if total
checkAllCovering _ _ _ _ = return ()
-- Check if, in a given group of type declarations mut_ns,
-- the constructor cn : ty is strictly positive,
-- and update the context accordingly
checkPositive :: [Name] -> (Name, Type) -> Idris Totality
checkPositive mut_ns (cn, ty')
= do let ty = delazy' True ty'
let p = cp ty
i <- getIState
let tot = if p then Total (args ty) else Partial NotPositive
let ctxt' = setTotal cn tot (tt_ctxt i)
putIState (i { tt_ctxt = ctxt' })
logLvl 5 $ "Constructor " ++ show cn ++ " is " ++ show tot ++ " with " ++ show mut_ns
addIBC (IBCTotal cn tot)
return tot
where
args t = [0..length (getArgTys t)-1]
cp (Bind n (Pi _ aty _) sc) = posArg aty && cp sc
cp t | (P _ n' _, args) <- unApply t,
n' `elem` mut_ns = all noRec args
cp _ = True
posArg (Bind _ (Pi _ nty _) sc)
| (P _ n' _, args) <- unApply nty
= n' `notElem` mut_ns && all noRec args && posArg sc
posArg t | (P _ n' _, args) <- unApply t,
n' `elem` mut_ns = all noRec args
posArg _ = True
noRec arg = all (\x -> x `notElem` mut_ns) (allTTNames arg)
calcProd :: IState -> FC -> Name -> [([Name], Term, Term)] -> Idris Totality
calcProd i fc topn pats
= cp topn pats []
where
-- every application of n must be in an argument of a coinductive
-- constructor, in every function reachable from here in the
-- call graph.
cp n pats done = do patsprod <- mapM (prodRec n done) pats
if (and patsprod)
then return Productive
else return (Partial NotProductive)
prodRec :: Name -> [Name] -> ([Name], Term, Term) -> Idris Bool
prodRec n done _ | n `elem` done = return True
prodRec n done (_, _, tm) = prod n done False (delazy' True tm)
prod :: Name -> [Name] -> Bool -> Term -> Idris Bool
prod n done ok ap@(App _ _ _)
| (P nt f _, args) <- unApply ap
= do recOK <- checkProdRec (n:done) f
let ctxt = tt_ctxt i
let [ty] = lookupTy f ctxt -- must exist!
let co = cotype nt f ty in
if (not recOK) then return False else
if f == topn
then do argsprod <- mapM (prod n done co) args
return (and (ok : argsprod) )
else do argsprod <- mapM (prod n done co) args
return (and argsprod)
prod n done ok (App _ f a) = liftM2 (&&) (prod n done False f)
(prod n done False a)
prod n done ok (Bind _ (Let t v) sc)
= liftM2 (&&) (prod n done False v) (prod n done False v)
prod n done ok (Bind _ b sc) = prod n done ok sc
prod n done ok t = return True
checkProdRec :: [Name] -> Name -> Idris Bool
checkProdRec done f
= case lookupCtxt f (idris_patdefs i) of
[(def, _)] -> do ok <- mapM (prodRec f done) def
return (and ok)
_ -> return True -- defined elsewhere, can't call topn
cotype (DCon _ _ _) n ty
| (P _ t _, _) <- unApply (getRetTy ty)
= case lookupCtxt t (idris_datatypes i) of
[TI _ True _ _ _] -> True
_ -> False
cotype nt n ty = False
-- | Calculate the totality of a function from its patterns.
-- Either follow the size change graph (if inductive) or check for
-- productivity (if coinductive)
calcTotality :: FC -> Name -> [([Name], Term, Term)] -> Idris Totality
calcTotality fc n pats
= do i <- getIState
let opts = case lookupCtxt n (idris_flags i) of
[fs] -> fs
_ -> []
case mapMaybe (checkLHS i) (map (\ (_, l, r) -> l) pats) of
(failure : _) -> return failure
_ -> checkSizeChange n
where
checkLHS i (P _ fn _)
= case lookupTotal fn (tt_ctxt i) of
[Partial _] -> return (Partial (Other [fn]))
_ -> Nothing
checkLHS i (App _ f a) = mplus (checkLHS i f) (checkLHS i a)
checkLHS _ _ = Nothing
checkTotality :: [Name] -> FC -> Name -> Idris Totality
checkTotality path fc n
| n `elem` path = return (Partial (Mutual (n : path)))
| otherwise = do
t <- getTotality n
i <- getIState
ctxt' <- do ctxt <- getContext
tclift $ simplifyCasedef n (getErasureInfo i) ctxt
setContext ctxt'
ctxt <- getContext
i <- getIState
let opts = case lookupCtxt n (idris_flags i) of
[fs] -> fs
_ -> []
t' <- case t of
Unchecked ->
case lookupDef n ctxt of
[CaseOp _ _ _ _ pats _] ->
do t' <- if AssertTotal `elem` opts
then return $ Total []
else calcTotality fc n pats
setTotality n t'
addIBC (IBCTotal n t')
return t'
[TyDecl (DCon _ _ _) ty] ->
case unApply (getRetTy ty) of
(P _ tyn _, _) -> do
let ms = case lookupCtxt tyn (idris_datatypes i) of
[TI _ _ _ _ xs@(_:_)] -> xs
ts -> [tyn]
checkPositive ms (n, ty)
_-> return $ Total []
_ -> return $ Total []
x -> return x
case t' of
Total _ -> return t'
Productive -> return t'
e -> do w <- cmdOptType WarnPartial
if TotalFn `elem` opts
then do totalityError t'; return t'
else do when (w && not (PartialFn `elem` opts)) $
warnPartial n t'
return t'
where
totalityError t = do i <- getIState
let msg = show n ++ " is " ++ show t
putIState i { idris_totcheckfail = (fc, msg) : idris_totcheckfail i}
addIBC (IBCTotCheckErr fc msg)
warnPartial n t
= do i <- getIState
case lookupDef n (tt_ctxt i) of
[x] -> do
iWarn fc . pprintErr i . Msg $ "Warning - " ++ show n ++ " is " ++ show t
-- ++ "\n" ++ show x
-- let cg = lookupCtxtName Nothing n (idris_callgraph i)
-- iputStrLn (show cg)
checkDeclTotality :: (FC, Name) -> Idris Totality
checkDeclTotality (fc, n)
= do logLvl 2 $ "Checking " ++ show n ++ " for totality"
-- buildSCG (fc, n)
-- logLvl 2 $ "Built SCG"
i <- getIState
let opts = case lookupCtxt n (idris_flags i) of
[fs] -> fs
_ -> []
when (CoveringFn `elem` opts) $ checkAllCovering fc [] n n
t <- checkTotality [] fc n
case t of
-- if it's not total, it can't reduce, to keep
-- typechecking decidable
p@(Partial _) -> do setAccessibility n Frozen
addIBC (IBCAccess n Frozen)
logLvl 5 $ "HIDDEN: "
++ show n ++ show p
_ -> return ()
return t
-- | Calculate the size change graph for this definition
--
-- SCG for a function f consists of a list of:
-- (g, [(a1, sizechange1), (a2, sizechange2), ..., (an, sizechangen)])
--
-- where g is a function called
-- a1 ... an are the arguments of f in positions 1..n of g
-- sizechange1 ... sizechange2 is how their size has changed wrt the input
-- to f
-- Nothing, if the argument is unrelated to the input
buildSCG :: (FC, Name) -> Idris ()
buildSCG (_, n) = do
ist <- getIState
case lookupCtxt n (idris_callgraph ist) of
[cg] -> case lookupDefExact n (tt_ctxt ist) of
Just (CaseOp _ _ _ pats _ cd) ->
let (args, sc) = cases_totcheck cd in
do logLvl 2 $ "Building SCG for " ++ show n ++ " from\n"
++ show pats ++ "\n" ++ show sc
let newscg = buildSCG' ist (rights pats) args
logLvl 5 $ "SCG is: " ++ show newscg
addToCG n ( cg { scg = newscg } )
[] -> logLvl 5 $ "Could not build SCG for " ++ show n ++ "\n"
x -> error $ "buildSCG: " ++ show (n, x)
delazy = delazy' False -- not lazy codata
delazy' all t@(App _ f a)
| (P _ (UN l) _, [_, _, arg]) <- unApply t,
l == txt "Force" = delazy' all arg
| (P _ (UN l) _, [P _ (UN lty) _, _, arg]) <- unApply t,
l == txt "Delay" && (all || lty == txt "LazyEval") = delazy arg
| (P _ (UN l) _, [P _ (UN lty) _, arg]) <- unApply t,
l == txt "Lazy'" && (all || lty == txt "LazyEval") = delazy' all arg
delazy' all (App s f a) = App s (delazy' all f) (delazy' all a)
delazy' all (Bind n b sc) = Bind n (fmap (delazy' all) b) (delazy' all sc)
delazy' all t = t
data Guardedness = Toplevel | Unguarded | Guarded
deriving Show
buildSCG' :: IState -> [(Term, Term)] -> [Name] -> [SCGEntry]
buildSCG' ist pats args = nub $ concatMap scgPat pats where
scgPat (lhs, rhs) = let lhs' = delazy lhs
rhs' = delazy rhs
(f, pargs) = unApply (dePat lhs') in
findCalls Toplevel (dePat rhs') (patvars lhs') pargs
findCalls guarded ap@(App _ f a) pvs pargs
-- under a call to "assert_total", don't do any checking, just believe
-- that it is total.
| (P _ (UN at) _, [_, _]) <- unApply ap,
at == txt "assert_total" = []
-- under a call to "Delay LazyCodata", don't do any checking of the
-- immediate call, as long as the call is guarded.
-- Then check its arguments
| (P _ (UN del) _, [_,_,arg]) <- unApply ap,
Guarded <- guarded,
del == txt "Delay"
= let (capp, args) = unApply arg in
concatMap (\x -> findCalls guarded x pvs pargs) args
| (P _ n _, args) <- unApply ap
= let nguarded = case guarded of
Unguarded -> Unguarded
_ -> if isConName n (tt_ctxt ist)
then Guarded
else Unguarded in
mkChange n args pargs ++
concatMap (\x -> findCalls nguarded x pvs pargs) args
findCalls guarded (App _ f a) pvs pargs
= findCalls Unguarded f pvs pargs ++ findCalls Unguarded a pvs pargs
findCalls guarded (Bind n (Let t v) e) pvs pargs
= findCalls Unguarded t pvs pargs ++
findCalls Unguarded v pvs pargs ++ findCalls guarded e (n : pvs) pargs
findCalls guarded (Bind n t e) pvs pargs
= findCalls Unguarded (binderTy t) pvs pargs ++
findCalls guarded e (n : pvs) pargs
findCalls guarded (P _ f _ ) pvs pargs
| not (f `elem` pvs) = [(f, [])]
findCalls _ _ _ _ = []
expandToArity n args
= case lookupTy n (tt_ctxt ist) of
[ty] -> expand 0 (normalise (tt_ctxt ist) [] ty) args
_ -> args
where expand i (Bind n (Pi _ _ _) sc) (x : xs) = x : expand (i + 1) sc xs
expand i (Bind n (Pi _ _ _) sc) [] = Just (i, Same) : expand (i + 1) sc []
expand i _ xs = xs
mkChange n args pargs = [(n, expandToArity n (sizes args))]
where
sizes [] = []
sizes (a : as) = checkSize a pargs 0 : sizes as
-- find which argument in pargs <a> is smaller than, if any
checkSize a (p : ps) i
| a == p = Just (i, Same)
| (P _ (UN as) _, [_,_,arg,_]) <- unApply a,
as == txt "assert_smaller" && arg == p
= Just (i, Smaller)
| smaller Nothing a (p, Nothing) = Just (i, Smaller)
| otherwise = checkSize a ps (i + 1)
checkSize a [] i = Nothing
-- the smaller thing we find must be the same type as <a>, and
-- not be coinductive - so carry the type of the constructor we've
-- gone under.
smaller (Just tyn) a (t, Just tyt)
| a == t = isInductive (fst (unApply (getRetTy tyn)))
(fst (unApply (getRetTy tyt)))
smaller ty a (ap@(App _ f s), _)
| (P (DCon _ _ _) n _, args) <- unApply ap
= let tyn = getType n in
any (smaller (ty `mplus` Just tyn) a)
(zip args (map toJust (getArgTys tyn)))
-- check higher order recursive arguments
smaller ty (App _ f s) a = smaller ty f a
smaller _ _ _ = False
toJust (n, t) = Just t
getType n = case lookupTy n (tt_ctxt ist) of
[ty] -> delazy ty -- must exist
isInductive (P _ nty _) (P _ nty' _) =
let co = case lookupCtxt nty (idris_datatypes ist) of
[TI _ x _ _ _] -> x
_ -> False in
nty == nty' && not co
isInductive _ _ = False
dePat (Bind x (PVar ty) sc) = dePat (instantiate (P Bound x ty) sc)
dePat t = t
patvars (Bind x (PVar _) sc) = x : patvars sc
patvars _ = []
checkSizeChange :: Name -> Idris Totality
checkSizeChange n = do
ist <- getIState
case lookupCtxt n (idris_callgraph ist) of
[cg] -> do let ms = mkMultiPaths ist [] (scg cg)
logLvl 5 ("Multipath for " ++ show n ++ ":\n" ++
"from " ++ show (scg cg) ++ "\n" ++
show (length ms) ++ "\n" ++
showSep "\n" (map show ms))
logLvl 6 (show cg)
-- every multipath must have an infinitely descending
-- thread, then the function terminates
-- also need to checks functions called are all total
-- (Unchecked is okay as we'll spot problems here)
let tot = map (checkMP ist (getArity ist n)) ms
logLvl 4 $ "Generated " ++ show (length tot) ++ " paths"
logLvl 6 $ "Paths for " ++ show n ++ " yield " ++ (show tot)
return (noPartial tot)
[] -> do logLvl 5 $ "No paths for " ++ show n
return Unchecked
where getArity ist n
= case lookupTy n (tt_ctxt ist) of
[ty] -> arity (normalise (tt_ctxt ist) [] ty)
_ -> error "Can't happen: checkSizeChange.getArity"
type MultiPath = [SCGEntry]
mkMultiPaths :: IState -> MultiPath -> [SCGEntry] -> [MultiPath]
mkMultiPaths ist path [] = [reverse path]
mkMultiPaths ist path cg
= concat (map extend cg)
where extend (nextf, args)
| (nextf, args) `elem` path = [ reverse ((nextf, args) : path) ]
| [Unchecked] <- lookupTotal nextf (tt_ctxt ist)
= case lookupCtxt nextf (idris_callgraph ist) of
[ncg] -> mkMultiPaths ist ((nextf, args) : path) (scg ncg)
_ -> [ reverse ((nextf, args) : path) ]
| otherwise = [ reverse ((nextf, args) : path) ]
-- do (nextf, args) <- cg
-- if ((nextf, args) `elem` path)
-- then return (reverse ((nextf, args) : path))
-- else case lookupCtxt nextf (idris_callgraph ist) of
-- [ncg] -> mkMultiPaths ist ((nextf, args) : path) (scg ncg)
-- _ -> return (reverse ((nextf, args) : path))
-- If any route along the multipath leads to infinite descent, we're fine.
-- Try a route beginning with every argument.
-- If we reach a point we've been to before, but with a smaller value,
-- that means there is an infinitely descending path from that argument.
checkMP :: IState -> Int -> MultiPath -> Totality
checkMP ist i mp = if i > 0
then let paths = (map (tryPath 0 [] mp) [0..i-1]) in
-- trace ("Paths " ++ show paths) $
collapse paths
else tryPath 0 [] mp 0
where
tryPath' d path mp arg
= let res = tryPath d path mp arg in
trace (show mp ++ "\n" ++ show arg ++ " " ++ show res) res
tryPath :: Int -> [((SCGEntry, Int), Int)] -> MultiPath -> Int -> Totality
tryPath desc path [] _ = Total []
-- tryPath desc path ((UN "believe_me", _) : _) arg
-- = Partial BelieveMe
-- if we get to a constructor, it's fine as long as it's strictly positive
tryPath desc path ((f, _) : es) arg
| [TyDecl (DCon _ _ _) _] <- lookupDef f (tt_ctxt ist)
= case lookupTotal f (tt_ctxt ist) of
[Total _] -> Unchecked -- okay so far
[Partial _] -> Partial (Other [f])
x -> error $ "CAN'T HAPPEN: " ++ show x ++ " for " ++ show f
| [TyDecl (TCon _ _) _] <- lookupDef f (tt_ctxt ist)
= Total []
tryPath desc path (e@(f, args) : es) arg
| e `elem` es && allNothing args = Partial (Mutual [f])
tryPath desc path (e@(f, nextargs) : es) arg
| Just d <- lookup (e, arg) path
= if desc > 0
then -- trace ("Descent " ++ show (desc - d) ++ " "
-- ++ show (path, e)) $
Total []
else Partial (Mutual (map (fst . fst . fst) path ++ [f]))
| e `elem` map (fst . fst) path
&& not (f `elem` map fst es)
= Partial (Mutual (map (fst . fst . fst) path ++ [f]))
| [Unchecked] <- lookupTotal f (tt_ctxt ist) =
let argspos = case collapseNothing (zip nextargs [0..]) of
[] -> [(Nothing, 0)]
x -> x
-- trace (show (argspos, nextargs, path)) $
pathres =
do (a, pos) <- argspos
case a of
Nothing -> -- don't know, but if the
-- rest definitely terminates without
-- any cycles with route so far,
-- then we might yet be total
case collapse (map (tryPath 0 (((e, arg), 0):path) es)
[0..length nextargs - 1]) of
Total _ -> return Unchecked
x -> return x
Just (nextarg, sc) ->
if nextarg == arg then
case sc of
Same -> return $ tryPath desc (((e, arg), desc) : path)
es pos
Smaller -> return $ tryPath (desc+1)
(((e, arg), desc) : path)
es
pos
_ -> trace ("Shouldn't happen " ++ show e) $
return (Partial Itself)
else return Unchecked in
-- trace (show (desc, argspos, path, es, pathres)) $
collapse' Unchecked pathres
| [Total a] <- lookupTotal f (tt_ctxt ist) = Total a
| [Partial _] <- lookupTotal f (tt_ctxt ist) = Partial (Other [f])
| otherwise = Unchecked
allNothing :: [Maybe a] -> Bool
allNothing xs = null (collapseNothing (zip xs [0..]))
collapseNothing :: [(Maybe a, b)] -> [(Maybe a, b)]
collapseNothing ((Nothing, _) : xs)
= filter (\ (x, _) -> case x of
Nothing -> False
_ -> True) xs
collapseNothing (x : xs) = x : collapseNothing xs
collapseNothing [] = []
noPartial :: [Totality] -> Totality
noPartial (Partial p : xs) = Partial p
noPartial (_ : xs) = noPartial xs
noPartial [] = Total []
collapse :: [Totality] -> Totality
collapse xs = collapse' Unchecked xs
collapse' def (Total r : xs) = Total r
collapse' def (Unchecked : xs) = collapse' def xs
collapse' def (d : xs) = collapse' d xs
-- collapse' Unchecked [] = Total []
collapse' def [] = def
| mrmonday/Idris-dev | src/Idris/Coverage.hs | bsd-3-clause | 34,337 | 119 | 30 | 13,141 | 11,679 | 5,871 | 5,808 | 586 | 35 |
{-# LANGUAGE DataKinds, PolyKinds, TypeFamilies, TypeOperators,
UndecidableInstances #-}
module T9063 where
import Data.Type.Equality
import Data.Proxy
-- reproduces an issue where type variables in the axiom are in
-- non-deterministic order
class kproxy ~ 'KProxy => PEq (kproxy :: KProxy a) where
type FunnyEq (x :: a) (y :: a) :: Bool
type FunnyEq x y = x == y
instance PEq ('KProxy :: KProxy Bool)
foo :: Proxy (FunnyEq True True) -> Proxy (True == True)
foo = id
| ezyang/ghc | testsuite/tests/determinism/determ013/A.hs | bsd-3-clause | 492 | 1 | 8 | 102 | 136 | 76 | 60 | 11 | 1 |
{-# OPTIONS_GHC -XEmptyDataDecls #-}
module Foo where
data Foo
| urbanslug/ghc | testsuite/tests/parser/should_compile/read048.hs | bsd-3-clause | 67 | 0 | 3 | 13 | 8 | 6 | 2 | -1 | -1 |
-- | A module to interface with Haskell compilers.
--
-- You can dynamically run and load code to a running application.
--
-- While the exposed API of this module attempts to be compiler agnostic, no
-- other Haskell compiler but GHC has the needed functionality to implement
-- this. For now.
--
{-# LANGUAGE AutoDeriveTypeable, RankNTypes, OverloadedStrings #-}
module Compiler.HS ( evalExpression, loadCode ) where
import Data.Dynamic
import Data.IORef
import Data.Foldable
import Data.Time.Clock
import Data.Monoid
import Data.Maybe
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Set as S
import qualified Data.Map.Strict as M
import Control.Monad
import Control.Concurrent
import Control.Exception
import System.IO.Unsafe
import System.Temporary
import System.Posix.Files
import GHC
import GHC.Paths
import DriverPhases
import MonadUtils hiding ( foldlM )
import Exception
import StringBuffer
import Name
ghcThreadCommunication :: forall a. MVar (Ghc a, MVar (Either SomeException a))
ghcThreadCommunication = unsafePerformIO newEmptyMVar
{-# NOINLINE ghcThreadCommunication #-}
ghcThreadId :: IORef (Maybe (Maybe ThreadId))
ghcThreadId = unsafePerformIO $ newIORef Nothing
{-# NOINLINE ghcThreadId #-}
ensureGHCThreadIsAlive :: IO ()
ensureGHCThreadIsAlive = mask_ $ do
should_launch_it <- atomicModifyIORef' ghcThreadId $ \old ->
case old of
Nothing -> ( Just Nothing, True )
_ -> ( old, False )
when should_launch_it $ do
tid <- forkIOWithUnmask $ \unmask -> unmask ghcThread
atomicModifyIORef' ghcThreadId $ \_ ->
( Just (Just tid), () )
inGHCMonad :: Ghc a -> IO a
inGHCMonad action = do
ensureGHCThreadIsAlive
result_mvar <- newEmptyMVar
putMVar ghcThreadCommunication (action, result_mvar)
result <- takeMVar result_mvar
case result of
Left exc -> throwIO exc
Right result -> return result
ghcThread :: IO ()
ghcThread = forever $ do
runGhc (Just libdir) $ do
dflags <- getSessionDynFlags
void $ setSessionDynFlags dflags {
hscTarget = HscInterpreted
, ghcLink = LinkInMemory }
defaultCleanupHandler dflags $ do
-- TODO:
-- We create a new GHC session every time we do something here. It's a
-- bit inefficient. The problem is that if we don't do that we leak a
-- bit of memory. Se the TODO item is to find out (by profiling?) where
-- memory is being retained. Just write 'forever $' here to switch to
-- single GHC session being used.
(action, result_mvar) <- liftIO $ takeMVar ghcThreadCommunication
result <- gtry action
liftIO $ putMVar result_mvar result
-- | Evaluates a Haskell expression and returns the result as a `Dynamic`.
--
-- The expression can do anything. It is not safe to run untrusted code.
evalExpression :: [T.Text] -- ^ Modules to expose.
-> T.Text -- ^ The expression itself.
-> IO Dynamic
evalExpression exposed_modules expr = inGHCMonad $ do
setTargets []
setContext $ fmap (\x -> IIDecl $ simpleImportDecl
(mkModuleName $ T.unpack x)) exposed_modules
result <- dynCompileExpr (T.unpack expr)
setContext []
return result
-- | Compiles and interprets Haskell source code.
--
-- This is clean; no files will be left anywhere on the disk. However,
-- temporary files are created and used. This means the program needs to have
-- write access to the current directory.
--
-- Only one top-level definition from the code is used.
loadCode :: T.Text -- ^ Name of the source code module.
-> S.Set T.Text -- ^ Name of the top-level definitions to return.
-> T.Text -- ^ Source code
-> IO (M.Map T.Text Dynamic)
loadCode module_name top_levels src = do
withTemporaryFile $ \handle fpath -> inGHCMonad $ do
now <- liftIO getCurrentTime
liftIO $ T.hPutStr handle src
-- HACK: remove the GHC generated .hi and .o files at the end
flip gfinally (liftIO $ do
void $ etry $ removeLink (fpath <> ".hi")
void $ etry $ removeLink (fpath <> ".o")) $ do
let mod_name = mkModuleName $ T.unpack module_name
setTargets [ Target { targetId = TargetFile fpath (Just $ Cpp HsSrcFile)
, targetAllowObjCode = False
, targetContents =
Just ( stringToStringBuffer $ T.unpack src, now ) } ]
void $ load LoadAllTargets
setContext [ IIDecl $ (simpleImportDecl mod_name) {
ideclQualified = True
, ideclAs = Just $ mkModuleName "Ex"
} ]
mod <- findModule mod_name Nothing
modinfo <- fromJust <$> getModuleInfo mod
let names = modInfoExports modinfo
let exported_names = S.fromList (fmap (T.pack . getOccString) names)
results <- foldlM (\map top_level ->
if S.member top_level exported_names
then M.insert top_level <$>
dynCompileExpr ("Ex." <> T.unpack top_level) <*>
pure map
else pure map)
M.empty top_levels
setContext [ ]
setTargets [ ]
void $ load LoadAllTargets
return results
where
etry :: IO a -> IO (Either SomeException a)
etry = try
| Noeda/dynamically-loaded-haskell | Compiler/HS.hs | mit | 5,587 | 4 | 17 | 1,627 | 1,169 | 611 | 558 | 108 | 2 |
----------------------------------------------
-- CIS 194, Homework 3
-- Author: Glenn R. Fisher
-- Date: April 1, 2016
----------------------------------------------
module Golf where
----------------------------------------------
-- 1. Hopscotch
----------------------------------------------
-- Transform a list into a list of lists, where the nth list of the output
-- contains every nth element from the input list.
--
-- > skips "ABCD" == ["ABCD", "BD", "C", "D"]
-- > skips "hello!" == ["hello!", "el!", "l!", "l", "o", "!"]
-- > skips [1] == [[1]]
-- > skips [True, False] == [[True, False], [False]]
-- > skips [] == []
skips :: [a] -> [[a]]
skips xs = [each i xs | i <- [1..length xs]]
-- Take every nth element of the input list.
--
-- > each 1 "ABCD" == "ABCD"
-- > each 2 "ABCD" == "BD"
-- > each 3 "ABCD" == "C"
-- > each 4 "ABCD" == "D"
-- > each 5 "ABCD" == ""
each :: Int -> [a] -> [a]
each n xs =
case drop (n-1) xs of
(y:ys) -> y : each n ys
[] -> []
----------------------------------------------
-- 2. Local Maxima
----------------------------------------------
-- Find all the local maxima in the input list. A local maximum is an
-- element of the list that is strictly greater than both the elements
-- immediately before and after it.
--
-- > localMaxima [2,9,5,6,1] == [9, 6]
-- > localMaxima [2,3,4,1,5] == [4]
-- > localMaxima [1,2,3,4,5] == []
localMaxima :: [Integer] -> [Integer]
localMaxima (x:y:z:zs)
| x < y && y > z = y : localMaxima (y:z:zs)
| otherwise = localMaxima (y:z:zs)
localMaxima _ = []
----------------------------------------------
-- 3. Histogram
----------------------------------------------
-- Count the frequency of the digits 0 through 9 in the input list.
--
-- > frequency [] == [0,0,0,0,0,0,0,0,0,0]
-- > frequency [1,2,3] == [0,1,1,1,0,0,0,0,0,0]
-- > frequency [1,1,1] == [0,3,0,0,0,0,0,0,0,0]
frequency :: [Integer] -> [Int]
frequency xs = map (\n -> length (filter (== n) xs)) [0..9]
-- Convert a frequency list into a histogram line for the given y value.
--
-- > line [1,0,3,0,5,0,3,0,1,0] 6 == " "
-- > line [1,0,3,0,5,0,3,0,1,0] 5 == " * "
-- > line [1,0,3,0,5,0,3,0,1,0] 2 == " * * * "
-- > line [1,0,3,0,5,0,3,0,1,0] 1 == "* * * * * "
-- > line [1,0,3,0,5,0,3,0,1,0] 0 == "**********"
line :: [Int] -> Int -> String
line xs y = map (\x -> if x >= y then '*' else ' ') xs
-- Construct a vertical histogram representing the frequency of
-- each digit (0 through 9, inclusive) in the input list.
-- (Use `putStr` to render the histogram in the console.)
--
-- > putStr (histogram []) ==
-- ==========
-- 0123456789
-- > putStr (histogram [1,1,1,5]) ==
-- *
-- *
-- * *
-- ==========
-- 0123456789
histogram :: [Integer] -> String
histogram xs = rows ++ "==========\n0123456789\n"
where f = frequency xs
m = maximum f
rows = unlines (map (line f) [m, m-1..1])
| glennrfisher/cis194-haskell | 03 Recursion and Polymorphism/Golf.hs | mit | 2,944 | 2 | 12 | 617 | 466 | 279 | 187 | 22 | 2 |
import Data.List
-- ghci > :m + Data.List
numUniques :: (Eq a) => [a] -> Int
numUniques = length . nub
-- selective import
-- import Data.List (nub, sort)
-- import Data List hiding (nub)
-- import qualified Data.Map
-- Data.Map.filter ...
-- import qualified Data.Map as M
-- M.filter ...
| v0lkan/learning-haskell | session-archive/009-import-modules.hs | mit | 297 | 0 | 8 | 58 | 50 | 29 | 21 | 3 | 1 |
{-# htermination listToFM :: Ord a => [([a],b)] -> FiniteMap [a] b #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_listToFM_4.hs | mit | 88 | 0 | 3 | 15 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SQLTransaction
(js_executeSql, executeSql, SQLTransaction, castToSQLTransaction,
gTypeSQLTransaction)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe
"$1[\"executeSql\"]($2, $3, $4, $5)" js_executeSql ::
JSRef SQLTransaction ->
JSString ->
JSRef ObjectArray ->
JSRef SQLStatementCallback ->
JSRef SQLStatementErrorCallback -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SQLTransaction.executeSql Mozilla SQLTransaction.executeSql documentation>
executeSql ::
(MonadIO m, ToJSString sqlStatement, IsObjectArray arguments) =>
SQLTransaction ->
sqlStatement ->
Maybe arguments ->
Maybe SQLStatementCallback ->
Maybe SQLStatementErrorCallback -> m ()
executeSql self sqlStatement arguments callback errorCallback
= liftIO
(js_executeSql (unSQLTransaction self) (toJSString sqlStatement)
(maybe jsNull (unObjectArray . toObjectArray) arguments)
(maybe jsNull pToJSRef callback)
(maybe jsNull pToJSRef errorCallback)) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/SQLTransaction.hs | mit | 1,992 | 14 | 11 | 390 | 477 | 288 | 189 | 38 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# Language OverloadedStrings #-}
module Unison.Test.BlockStore where
import Control.Concurrent (forkIO, ThreadId)
import Control.Monad (zipWithM_, foldM, when, replicateM, (>=>))
import Data.ByteString.Char8 (ByteString, pack, unpack)
import Data.Maybe (fromMaybe, catMaybes, isNothing)
import Test.QuickCheck
import Test.Tasty
import Test.Tasty.QuickCheck
import Unison.Runtime.Address
import qualified Control.Concurrent.MVar as MVar
import qualified Data.ByteString as B
import qualified Test.QuickCheck.Monadic as QCM
import qualified Test.Tasty.HUnit as HU
import qualified Unison.BlockStore as BS
import qualified Unison.Cryptography as C
instance Arbitrary Address where
arbitrary = (fromBytes . B.pack) <$> vectorOf 64 arbitrary
makeRandomAddress :: IO Address
makeRandomAddress = Address <$> C.randomBytes (C.noop "dummypublickey") 64
roundTrip :: BS.BlockStore Address -> HU.Assertion
roundTrip bs = do
h <- BS.insert bs $ pack "v"
v <- BS.lookup bs h
case v of
Just v2 | unpack v2 == "v" -> pure ()
a -> fail ("lookup returned " ++ show a)
roundTripSeries :: BS.BlockStore Address -> HU.Assertion
roundTripSeries bs = do
let seriesName = BS.Series $ pack "series"
h <- BS.declareSeries bs seriesName
h2 <- BS.update bs seriesName h (pack "v")
case h2 of
Nothing -> fail "got nothin"
Just h2' -> do
h3 <- BS.resolve bs seriesName
case h3 of
Just h3' | h3' == h2' -> pure ()
a -> fail ("resolve returned " ++ show a)
appendAppendUpdate :: BS.BlockStore Address -> HU.Assertion
appendAppendUpdate bs = do
let seriesName = BS.Series $ pack "series2"
h <- BS.declareSeries bs seriesName
(Just h2) <- BS.append bs seriesName h (pack "v")
(Just h3) <- BS.append bs seriesName h2 (pack "v2")
vs <- BS.resolves bs seriesName
case vs of
[h3', h2'] | h3 == h3' && h2 == h2' -> pure ()
x -> fail ("got series list of " ++ show x)
(Just h4) <- BS.update bs seriesName h3 (pack "v3")
vs2 <- BS.resolves bs seriesName
case vs2 of
[h4'] | h4 == h4' -> pure ()
x -> fail ("2. got series list of " ++ show x)
idempotentDeclare :: BS.BlockStore Address -> HU.Assertion
idempotentDeclare bs = do
let seriesName = BS.Series $ pack "series3"
h <- BS.declareSeries bs seriesName
h2 <- BS.declareSeries bs seriesName
if h == h2 then pure ()
else fail ("got back unequal hashes " ++ show h ++ " " ++ show h2)
cantChangeWithInvalidHash :: BS.BlockStore Address -> HU.Assertion
cantChangeWithInvalidHash bs = do
let seriesName = BS.Series $ pack "series4"
let series2Name = BS.Series $ pack "series5"
h <- BS.declareSeries bs seriesName
h2 <- BS.declareSeries bs series2Name
result <- BS.update bs seriesName h2 $ pack "value"
if isNothing result then pure ()
else fail "updated series without correct hash"
genByteString :: Gen ByteString
genByteString = B.pack <$> listOf (choose (0, 255))
data BlockStoreMethod
= Insert ByteString
| Lookup
| DeclareSeries BS.Series
| Update ByteString
| Append ByteString
| Resolve
| Resolves deriving (Eq, Show)
isDeclareSeries :: BlockStoreMethod -> Bool
isDeclareSeries (DeclareSeries _) = True
isDeclareSeries _ = False
data BlockStoreResult
= Key Address
| Data (Maybe ByteString)
| NoKey
| KeyList [Address] deriving (Eq, Show)
shrinkBS :: ByteString -> ByteString
shrinkBS = B.pack . tail . B.unpack
instance Arbitrary BlockStoreMethod where
arbitrary = do
c <- choose (0,6::Int)
case c of
0 -> Insert <$> genByteString
1 -> pure Lookup
2 -> (DeclareSeries . BS.Series) <$> genByteString
3 -> Update <$> genByteString
4 -> Append <$> genByteString
5 -> pure Resolve
6 -> pure Resolves
shrink (Insert bs) = [Insert (shrinkBS bs)]
shrink (DeclareSeries (BS.Series bs)) =
[DeclareSeries (BS.Series . shrinkBS $ bs)]
shrink (Update bs) = [Update . shrinkBS $ bs]
shrink (Append bs) = [Append . shrinkBS $ bs]
shrink x = [x]
data TestClient = TestClient
{ lastHandle :: Address
, result :: BlockStoreResult
, lastSeries :: BS.Series } deriving (Show)
runCommand :: BS.BlockStore Address -> BlockStoreMethod -> TestClient -> IO TestClient
runCommand bs command tc = case command of
(Insert v) -> do
r <- BS.insert bs v
pure tc { result = Key r, lastHandle = r }
Lookup -> do
r <- BS.lookup bs (lastHandle tc)
pure tc { result = Data r }
(DeclareSeries s) -> do
r <- BS.declareSeries bs s
pure tc { result = Key r, lastHandle = r, lastSeries = s }
(Update v) -> do
r <- BS.update bs (lastSeries tc) (lastHandle tc) v
pure tc { result = maybe NoKey Key r, lastHandle = fromMaybe (lastHandle tc) r }
(Append v) -> do
r <- BS.append bs (lastSeries tc) (lastHandle tc) v
pure tc { result = maybe NoKey Key r, lastHandle = fromMaybe (lastHandle tc) r }
Resolve -> do
r <- BS.resolve bs (lastSeries tc)
pure tc { result = maybe NoKey Key r, lastHandle = fromMaybe (lastHandle tc) r }
Resolves -> do
r <- BS.resolves bs (lastSeries tc)
pure tc { result = KeyList r }
runMethods :: BS.BlockStore Address -> MVar.MVar TestClient -> TestClient -> [BlockStoreMethod] -> IO ThreadId
runMethods blockStore clientVar client =
forkIO . (>>= MVar.putMVar clientVar) . foldM runMethod client where
runMethod client method = runCommand blockStore method client
prop_allSeriesHashesAreValid :: BS.BlockStore Address -> Property
prop_allSeriesHashesAreValid bs = QCM.monadicIO $ do
firstDeclareSeries <- QCM.pick $ (DeclareSeries . BS.Series) <$> genByteString
firstUpdate <- QCM.pick $ Update <$> genByteString
client <- QCM.run $ Prelude.foldr (\m tc -> tc >>= runCommand bs m)
(pure $ TestClient undefined undefined undefined)
[firstUpdate, firstDeclareSeries]
clientMethods <- QCM.pick . flip suchThat (all (not . isDeclareSeries))
$ (arbitrary :: Gen [BlockStoreMethod])
newClient <- QCM.run $ foldr (\m c -> c >>= runCommand bs m) (pure client) clientMethods
seriesHashes <- QCM.run . BS.resolves bs $ lastSeries newClient
seriesValues <- QCM.run $ mapM (BS.lookup bs) seriesHashes
-- make sure we didn't get any Nothing values for the series
QCM.assert $ length seriesValues == length (catMaybes seriesValues)
prop_lastKeyIsValid :: BS.BlockStore Address -> Property
prop_lastKeyIsValid blockStore = QCM.monadicIO $ do
firstDeclareSeries <- QCM.pick $ (DeclareSeries . BS.Series) <$> genByteString
firstUpdate <- QCM.pick $ Update <$> genByteString
interestingClient <- QCM.run $ Prelude.foldr (\m tc -> tc >>= runCommand blockStore m)
(pure $ TestClient undefined undefined undefined)
[firstUpdate, firstDeclareSeries]
clientMethods <- filter (not . isDeclareSeries)
<$> QCM.pick (arbitrary :: Gen [BlockStoreMethod])
newClient <- QCM.run $ foldr
(\m c -> c >>= runCommand blockStore m) (pure interestingClient) clientMethods
lookupLast <- QCM.run . BS.lookup blockStore $ lastHandle newClient
QCM.assert . not $ isNothing lookupLast
prop_SomeoneHasAValidKey :: BS.BlockStore Address -> Property
prop_SomeoneHasAValidKey blockStore = QCM.monadicIO $ do
let clientNumber = 100
firstDeclareSeries <- QCM.pick $ (DeclareSeries . BS.Series) <$> genByteString
firstUpdate <- QCM.pick $ Update <$> genByteString
interestingClient <- QCM.run $ Prelude.foldr (\m tc -> tc >>= runCommand blockStore m)
(pure $ TestClient undefined undefined undefined)
[firstUpdate, firstDeclareSeries]
clientVars <- QCM.run $ replicateM clientNumber MVar.newEmptyMVar
let clients = replicate clientNumber interestingClient
clientMethods <- QCM.pick . vectorOf clientNumber $ arbitrary
let filteredMethods = map (filter (not . isDeclareSeries)) clientMethods
-- run all forks
_ <- QCM.run . sequence
$ zipWith3 (runMethods blockStore) clientVars clients filteredMethods
-- wait for all forks to finish
clientResults <- QCM.run
$ mapM (MVar.takeMVar >=> BS.lookup blockStore . lastHandle) clientVars
QCM.assert . not . Prelude.null . catMaybes $ clientResults
makeCases :: BS.BlockStore Address -> [TestTree]
makeCases bs = [ HU.testCase "roundTrip" (roundTrip bs)
, HU.testCase "roundTripSeries" (roundTripSeries bs)
, HU.testCase "appendAppendUpdate" (appendAppendUpdate bs)
, HU.testCase "idempotentDeclare" (idempotentDeclare bs)
, HU.testCase "cantChangeWithInvalidHash" (cantChangeWithInvalidHash bs)
]
-- the quickcheck tests seem to take forever.
makeExhaustiveCases :: BS.BlockStore Address -> [TestTree]
makeExhaustiveCases bs = makeCases bs ++
[ testProperty "lastKeyIsValid" (prop_lastKeyIsValid bs)
, testProperty "allSeriesHashesAreValid" (prop_allSeriesHashesAreValid bs)
, testProperty "someoneHasValidKey" (prop_SomeoneHasAValidKey bs)
]
| nightscape/platform | node/tests/Unison/Test/BlockStore.hs | mit | 8,989 | 0 | 18 | 1,842 | 3,081 | 1,531 | 1,550 | -1 | -1 |
{-# LANGUAGE Trustworthy #-}
-- |
-- Module : Surge.Stage
-- Copyright : (c) 2014 Kyle Van Berendonck
-- License : MIT
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- This module provides utilities for defining and composing server `Stage`s. See "Surge.Tutorial"
-- for more information.
module Surge.Stage
( -- * Handler
Handler
, Failure(..)
-- * Stage
, Stage(..)
, stage
) where
import Surge.Internal
| kvanberendonck/surge | src/Surge/Stage.hs | mit | 518 | 0 | 5 | 132 | 47 | 36 | 11 | 8 | 0 |
module Text.Noise.Compiler.Builtin
( definitions
) where
import Control.Applicative
import qualified Data.Map as Map
import qualified Text.Noise.Compiler.Document.Color as Color
import qualified Text.Noise.Compiler.Document as D
import qualified Text.Noise.Compiler.Function as F
import Text.Noise.Compiler.Function (Function, requireArg, acceptArg, acceptBlockArgs)
definitions :: Map.Map [String] (Function F.Value)
definitions = Map.fromList
[ (["shape","rectangle"], rectangle)
, (["shape","circle"], circle)
, (["shape","path"], path)
, (["color","red"], color (Color.RGB 255 0 0))
, (["color","green"], color (Color.RGB 0 255 0))
, (["color","blue"], color (Color.RGB 0 0 255))
, (["color","black"], color (Color.RGB 0 0 0))
, (["color","adjust"], colorAdjust)
, (["gradient","vertical"], linearGradient 90)
, (["gradient","horizontal"], linearGradient 0)
, (["gradient","radial"], radialGradient)
, (["image"], image)
, (["group"], group)
, (["path","move"], pathMove)
, (["path","line"], pathLine)
, (["path","arc"], pathArc)
]
rectangle :: Function F.Value
rectangle = fmap F.ElementValue $ D.Rectangle
<$> requireArg "x"
<*> requireArg "y"
<*> requireArg "width"
<*> requireArg "height"
<*> acceptArg "radius" 0
<*> acceptArg "fill" D.defaultValue
<*> acceptArg "stroke" D.defaultValue
circle :: Function F.Value
circle = fmap F.ElementValue $ D.Circle
<$> requireArg "cx"
<*> requireArg "cy"
<*> requireArg "radius"
<*> acceptArg "fill" D.defaultValue
<*> acceptArg "stroke" D.defaultValue
color :: D.Color -> Function F.Value
color = return . F.ColorValue
colorAdjust :: Function F.Value
colorAdjust = fmap F.ColorValue $ do
c <- requireArg "color"
l <- acceptArg "lightness" (1.0 :: Double)
let lighten = toWord . (l *) . fromIntegral
toWord = round . max 0 . min 255
return $ case c of
Color.RGB r g b -> Color.RGB (lighten r) (lighten g) (lighten b)
Color.ARGB a r g b -> Color.ARGB a (lighten r) (lighten g) (lighten b)
requireGradientColorArgs :: Function [(D.Number,D.Color)]
requireGradientColorArgs = do
from <- requireArg "from"
to <- requireArg "to"
return [(0, from), (1, to)]
linearGradient :: D.Angle -> Function F.Value
linearGradient angle = fmap F.GradientValue $ D.LinearGradient angle
<$> requireGradientColorArgs
radialGradient :: Function F.Value
radialGradient = fmap F.GradientValue $ D.RadialGradient
<$> requireGradientColorArgs
image :: Function F.Value
image = fmap F.ElementValue $ D.Image
<$> requireArg "x"
<*> requireArg "y"
<*> requireArg "width"
<*> requireArg "height"
<*> requireArg "file"
group :: Function F.Value
group = fmap F.ElementValue $ D.Group
<$> acceptBlockArgs
path :: Function F.Value
path = fmap F.ElementValue $ D.Path
<$> acceptArg "fill" D.defaultValue
<*> acceptArg "stroke" D.defaultValue
<*> acceptBlockArgs
pathMove :: Function F.Value
pathMove = fmap F.PathCommandValue $ D.Move
<$> requireArg "dx"
<*> requireArg "dy"
pathLine :: Function F.Value
pathLine = fmap F.PathCommandValue $ D.Line
<$> requireArg "dx"
<*> requireArg "dy"
pathArc :: Function F.Value
pathArc = fmap F.PathCommandValue $ do
dx <- requireArg "dx"
dy <- requireArg "dy"
ry <- requireArg "ry"
let rotation = atan(dy / dx)
rx = sqrt(dx**2 + dy**2)
return $ D.Arc dx dy rx ry rotation
| brow/noise | src/Text/Noise/Compiler/Builtin.hs | mit | 3,505 | 0 | 15 | 711 | 1,264 | 669 | 595 | 95 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-spotfleet-instanceipv6address.html
module Stratosphere.ResourceProperties.EC2SpotFleetInstanceIpv6Address where
import Stratosphere.ResourceImports
-- | Full data type definition for EC2SpotFleetInstanceIpv6Address. See
-- 'ec2SpotFleetInstanceIpv6Address' for a more convenient constructor.
data EC2SpotFleetInstanceIpv6Address =
EC2SpotFleetInstanceIpv6Address
{ _eC2SpotFleetInstanceIpv6AddressIpv6Address :: Val Text
} deriving (Show, Eq)
instance ToJSON EC2SpotFleetInstanceIpv6Address where
toJSON EC2SpotFleetInstanceIpv6Address{..} =
object $
catMaybes
[ (Just . ("Ipv6Address",) . toJSON) _eC2SpotFleetInstanceIpv6AddressIpv6Address
]
-- | Constructor for 'EC2SpotFleetInstanceIpv6Address' containing required
-- fields as arguments.
ec2SpotFleetInstanceIpv6Address
:: Val Text -- ^ 'ecsfiiaIpv6Address'
-> EC2SpotFleetInstanceIpv6Address
ec2SpotFleetInstanceIpv6Address ipv6Addressarg =
EC2SpotFleetInstanceIpv6Address
{ _eC2SpotFleetInstanceIpv6AddressIpv6Address = ipv6Addressarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-spotfleet-instanceipv6address.html#cfn-ec2-spotfleet-instanceipv6address-ipv6address
ecsfiiaIpv6Address :: Lens' EC2SpotFleetInstanceIpv6Address (Val Text)
ecsfiiaIpv6Address = lens _eC2SpotFleetInstanceIpv6AddressIpv6Address (\s a -> s { _eC2SpotFleetInstanceIpv6AddressIpv6Address = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/EC2SpotFleetInstanceIpv6Address.hs | mit | 1,625 | 0 | 13 | 164 | 174 | 100 | 74 | 23 | 1 |
module Strain (keep, discard) where
discard :: (a -> Bool) -> [a] -> [a]
discard f = keep (not . f)
keep :: (a -> Bool) -> [a] -> [a]
keep f (x:xs)
| f x = x : keep f xs
| otherwise = keep f xs
keep _ _ = []
| vaibhav276/exercism_haskell | strain/src/Strain.hs | mit | 224 | 0 | 8 | 69 | 143 | 75 | 68 | 8 | 1 |
module GHCJS.DOM.CSSStyleRule (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/CSSStyleRule.hs | mit | 42 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisfirehose-deliverystream-redshiftdestinationconfiguration.html
module Stratosphere.ResourceProperties.KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.KinesisFirehoseDeliveryStreamCloudWatchLoggingOptions
import Stratosphere.ResourceProperties.KinesisFirehoseDeliveryStreamCopyCommand
import Stratosphere.ResourceProperties.KinesisFirehoseDeliveryStreamProcessingConfiguration
import Stratosphere.ResourceProperties.KinesisFirehoseDeliveryStreamS3DestinationConfiguration
-- | Full data type definition for
-- KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration. See
-- 'kinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration' for a
-- more convenient constructor.
data KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration =
KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration
{ _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationCloudWatchLoggingOptions :: Maybe KinesisFirehoseDeliveryStreamCloudWatchLoggingOptions
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationClusterJDBCURL :: Val Text
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationCopyCommand :: KinesisFirehoseDeliveryStreamCopyCommand
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationPassword :: Val Text
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationProcessingConfiguration :: Maybe KinesisFirehoseDeliveryStreamProcessingConfiguration
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationRoleARN :: Val Text
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationS3Configuration :: KinesisFirehoseDeliveryStreamS3DestinationConfiguration
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationUsername :: Val Text
} deriving (Show, Eq)
instance ToJSON KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration where
toJSON KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration{..} =
object $
catMaybes
[ fmap (("CloudWatchLoggingOptions",) . toJSON) _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationCloudWatchLoggingOptions
, (Just . ("ClusterJDBCURL",) . toJSON) _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationClusterJDBCURL
, (Just . ("CopyCommand",) . toJSON) _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationCopyCommand
, (Just . ("Password",) . toJSON) _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationPassword
, fmap (("ProcessingConfiguration",) . toJSON) _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationProcessingConfiguration
, (Just . ("RoleARN",) . toJSON) _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationRoleARN
, (Just . ("S3Configuration",) . toJSON) _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationS3Configuration
, (Just . ("Username",) . toJSON) _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationUsername
]
-- | Constructor for
-- 'KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration'
-- containing required fields as arguments.
kinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration
:: Val Text -- ^ 'kfdsrdcClusterJDBCURL'
-> KinesisFirehoseDeliveryStreamCopyCommand -- ^ 'kfdsrdcCopyCommand'
-> Val Text -- ^ 'kfdsrdcPassword'
-> Val Text -- ^ 'kfdsrdcRoleARN'
-> KinesisFirehoseDeliveryStreamS3DestinationConfiguration -- ^ 'kfdsrdcS3Configuration'
-> Val Text -- ^ 'kfdsrdcUsername'
-> KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration
kinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration clusterJDBCURLarg copyCommandarg passwordarg roleARNarg s3Configurationarg usernamearg =
KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration
{ _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationCloudWatchLoggingOptions = Nothing
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationClusterJDBCURL = clusterJDBCURLarg
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationCopyCommand = copyCommandarg
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationPassword = passwordarg
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationProcessingConfiguration = Nothing
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationRoleARN = roleARNarg
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationS3Configuration = s3Configurationarg
, _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationUsername = usernamearg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisfirehose-deliverystream-redshiftdestinationconfiguration.html#cfn-kinesisfirehose-deliverystream-redshiftdestinationconfiguration-cloudwatchloggingoptions
kfdsrdcCloudWatchLoggingOptions :: Lens' KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration (Maybe KinesisFirehoseDeliveryStreamCloudWatchLoggingOptions)
kfdsrdcCloudWatchLoggingOptions = lens _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationCloudWatchLoggingOptions (\s a -> s { _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationCloudWatchLoggingOptions = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisfirehose-deliverystream-redshiftdestinationconfiguration.html#cfn-kinesisfirehose-deliverystream-redshiftdestinationconfiguration-clusterjdbcurl
kfdsrdcClusterJDBCURL :: Lens' KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration (Val Text)
kfdsrdcClusterJDBCURL = lens _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationClusterJDBCURL (\s a -> s { _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationClusterJDBCURL = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisfirehose-deliverystream-redshiftdestinationconfiguration.html#cfn-kinesisfirehose-deliverystream-redshiftdestinationconfiguration-copycommand
kfdsrdcCopyCommand :: Lens' KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration KinesisFirehoseDeliveryStreamCopyCommand
kfdsrdcCopyCommand = lens _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationCopyCommand (\s a -> s { _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationCopyCommand = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisfirehose-deliverystream-redshiftdestinationconfiguration.html#cfn-kinesisfirehose-deliverystream-redshiftdestinationconfiguration-password
kfdsrdcPassword :: Lens' KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration (Val Text)
kfdsrdcPassword = lens _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationPassword (\s a -> s { _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationPassword = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisfirehose-deliverystream-redshiftdestinationconfiguration.html#cfn-kinesisfirehose-deliverystream-redshiftdestinationconfiguration-processingconfiguration
kfdsrdcProcessingConfiguration :: Lens' KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration (Maybe KinesisFirehoseDeliveryStreamProcessingConfiguration)
kfdsrdcProcessingConfiguration = lens _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationProcessingConfiguration (\s a -> s { _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationProcessingConfiguration = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisfirehose-deliverystream-redshiftdestinationconfiguration.html#cfn-kinesisfirehose-deliverystream-redshiftdestinationconfiguration-rolearn
kfdsrdcRoleARN :: Lens' KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration (Val Text)
kfdsrdcRoleARN = lens _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationRoleARN (\s a -> s { _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationRoleARN = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisfirehose-deliverystream-redshiftdestinationconfiguration.html#cfn-kinesisfirehose-deliverystream-redshiftdestinationconfiguration-s3configuration
kfdsrdcS3Configuration :: Lens' KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration KinesisFirehoseDeliveryStreamS3DestinationConfiguration
kfdsrdcS3Configuration = lens _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationS3Configuration (\s a -> s { _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationS3Configuration = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisfirehose-deliverystream-redshiftdestinationconfiguration.html#cfn-kinesisfirehose-deliverystream-redshiftdestinationconfiguration-username
kfdsrdcUsername :: Lens' KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration (Val Text)
kfdsrdcUsername = lens _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationUsername (\s a -> s { _kinesisFirehoseDeliveryStreamRedshiftDestinationConfigurationUsername = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/KinesisFirehoseDeliveryStreamRedshiftDestinationConfiguration.hs | mit | 9,293 | 0 | 13 | 540 | 790 | 454 | 336 | 67 | 1 |
-- Avoid to depend on monoidal-containers,
-- which indirectly depends on template-haskell, which prevents me from building with asterius
{-
Copyright (c) 2015, Ben Gamari
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Ben Gamari nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
{-# LANGUAGE CPP #-}
module Data.Map.Monoidal.Strict
( MonoidalMap
, assocs
, singleton
, elems
) where
import qualified Data.Map.Strict as M
#if !(MIN_VERSION_base(4,11,0))
import Data.Semigroup
#endif
newtype MonoidalMap k a = MonoidalMap (M.Map k a) deriving (Eq, Show)
instance (Ord k, Semigroup a) => Semigroup (MonoidalMap k a) where
MonoidalMap a <> MonoidalMap b = MonoidalMap $ M.unionWith (<>) a b
{-# INLINE (<>) #-}
instance (Ord k, Semigroup a) => Monoid (MonoidalMap k a) where
mempty = MonoidalMap mempty
{-# INLINE mempty #-}
#if !(MIN_VERSION_base(4,11,0))
mappend (MonoidalMap a) (MonoidalMap b) = MonoidalMap $ M.unionWith (<>) a b
{-# INLINE mappend #-}
#endif
-- | /O(n)/. Return all elements of the map and their keys
assocs :: MonoidalMap k a -> [(k, a)]
assocs (MonoidalMap m) = M.assocs m
{-# INLINE assocs #-}
-- | /O(1)/. A map with a single element.
singleton :: k -> a -> MonoidalMap k a
singleton k = MonoidalMap . M.singleton k
{-# INLINE singleton #-}
-- | /O(n)/. Return all elements of the map in the ascending order of their
-- keys. Subject to list fusion.
elems :: MonoidalMap k a -> [a]
elems (MonoidalMap m) = M.elems m
{-# INLINE elems #-}
| igrep/igrep-cashbook | hs2/src/Data/Map/Monoidal/Strict.hs | mit | 2,889 | 0 | 8 | 545 | 337 | 188 | 149 | 26 | 1 |
module BadIntel.Console.Menu
(enterMenu
,enterMenu'
,yesNo)
where
import Control.Monad
import Data.Char
import System.Console.Haskeline
import qualified Data.Map as Map
enterMenu :: [String] -> InputT IO Int
enterMenu c = do displayMenu c
input <- getInputChar "> "
let max = length c
let choice = join . fmap (pickNumber 0 max) $ input
case choice of
Just x -> return x
Nothing -> enterMenu c
enterMenu' :: [String] -> [a -> InputT IO a] -> a -> InputT IO a
enterMenu' m fs o = do c <- enterMenu m
if c == 0 then return o
else case Map.lookup c (toMenuIndex fs) of
Nothing -> error "Menu does not contain this index."
Just f -> f o
>>= enterMenu' m fs
displayMenu :: [String] -> InputT IO ()
displayMenu = mapM_ outputStrLn . toMenu
toMenu :: [String] -> [String]
toMenu = zipWith concatNums [1..]
where concatNums n s = intToDigit n:". " ++ s
toMenuIndex :: [a] -> Map.Map Int a
toMenuIndex = Map.fromList . zip [1..]
pickNumber :: Int -> Int -> Char -> Maybe Int
pickNumber min max c = do c' <- tryToInt c
inMargin c'
where
tryToInt x
| isDigit x = Just . digitToInt $ x
| otherwise = Nothing
inMargin x
| x >= min && x <= max = Just x
| otherwise = Nothing
yesNo :: String -> (a -> InputT IO a) -> a -> InputT IO a
yesNo s f a = do outputStrLn s
c <- getInputChar "> "
case c of
Just x -> if x == 'y' then f a
else return a
Nothing -> yesNo s f a
| Raveline/BadIntel | src/BadIntel/Console/Menu.hs | gpl-2.0 | 1,860 | 0 | 14 | 780 | 624 | 304 | 320 | 46 | 3 |
module SGF.Data where
data Tree = Empty | Branch {commands :: [Command], tree :: Tree}
newtype Location = L (Int,Int) deriving (Show, Eq)
data MoveCommand = BlackMove Location | WhiteMove Location deriving (Show, Eq)
data MetaCommand = Comment String | BlackPlace Location | WhitePlace Location deriving (Show,Eq)
data Command = Move MoveCommand | Meta MetaCommand deriving Show
| JonHarder/haskell-go | SGF/Data.hs | gpl-2.0 | 382 | 0 | 9 | 60 | 130 | 77 | 53 | 6 | 0 |
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Keymap.Vim.ReplaceSingleCharMap
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
module Yi.Keymap.Vim.ReplaceSingleCharMap
( defReplaceSingleMap
) where
import Control.Monad
import Data.Maybe (fromMaybe)
import qualified Data.Text as T
import Yi.Buffer.Adjusted
import Yi.Editor
import Yi.Keymap.Keys
import Yi.Keymap.Vim.Common
import Yi.Keymap.Vim.StateUtils
import Yi.Keymap.Vim.Utils
import Yi.Utils
defReplaceSingleMap :: [VimBinding]
defReplaceSingleMap = [escBinding, actualReplaceBinding]
escBinding :: VimBinding
escBinding = mkBindingE ReplaceSingleChar Drop (spec KEsc, return (), resetCount . switchMode Normal)
actualReplaceBinding :: VimBinding
actualReplaceBinding = VimBindingE (f . T.unpack . _unEv)
where
f evs s | ReplaceSingleChar == vsMode s = WholeMatch $ do
currentState <- getEditorDyn
let count = fromMaybe 1 $ vsCount currentState
let replacer = case evs of
(c:[]) -> replaceCharB c
"<lt>" -> replaceCharB '<'
"<C-e>" -> replaceCharWithBelowB
"<C-y>" -> replaceCharWithAboveB
_ -> return ()
withCurrentBuffer $ do
-- Is there more easy way to get distance to eol?
here <- pointB
moveToEol
eol <- pointB
moveTo here
let effectiveCount = min count (fromSize $ eol ~- here)
when (effectiveCount > 0) $ do
replicateM_ effectiveCount $ replacer >> rightB
leftB
resetCountE
switchModeE Normal
return Finish
f _ _ = NoMatch
| atsukotakahashi/wi | src/library/Yi/Keymap/Vim/ReplaceSingleCharMap.hs | gpl-2.0 | 1,897 | 1 | 19 | 622 | 381 | 204 | 177 | 41 | 6 |
{- |
Module : $Header$
Description : Utils extending Data.List and Data.Set
Copyright : (c) Immanuel Normann, Uni Bremen 2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
-}
module Search.Data.ListSet where
import qualified Data.List as L
import qualified Data.Set as S
completeCols :: (Ord b) => [[(a,b)]] -> S.Set b
completeCols matrix = intersections (map sndsOf matrix)
where sndsOf row = S.fromList (map snd row)
intersections :: (Ord a) => [S.Set a] -> S.Set a
intersections [] = S.empty
intersections family = foldl1 S.intersection family
s1 = S.fromList [1,2,3]
s2 = S.fromList [2,3,4]
s3 = S.fromList [3,4,5]
s4 = S.fromList [4,5,6]
s = [s1,s2,s3,s4]
{-
*Data.ListSet> intersections [s1,s2]
fromList [2,3]
*Data.ListSet> intersections [s1,s2,s3]
fromList [3]
*Data.ListSet> intersections [s1,s2,s4]
fromList []
-}
| nevrenato/Hets_Fork | Search/Utils/ListSet.hs | gpl-2.0 | 947 | 0 | 9 | 166 | 253 | 143 | 110 | 14 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Run.File.Write
(
wRunWorld,
) where
import MyPrelude
import File.Binary
import Game.Run.RunWorld
import Game.Run.File.Field
import Game.Run.File.Version
wRunWorld :: [(String, UInt)] -> RunWorld -> Writer
wRunWorld namepeak run = do
-- header
wWord8s version
-- LevelPuzzleFileName
wField fieldLevelPuzzleName
wCStringAlign 4 $ runLevelPuzzleFileName run
-- map FileName -> Peak, updating peak in RunWorld
wField fieldLevelPuzzleNamePeakS
forM_ namepeak $ \(name, peak) -> do
wField fieldLevelPuzzleNamePeak
wCStringAlign 4 name
wUInt32 $ if name == runLevelPuzzleFileName run
then max peak $ runLevelPuzzlePeak run
else peak
-- MemoryIx peak
wField fieldMemoryPeak
wUInt32 $ runMemoryPeak run
-- special LevelPuzzleWorld
wField fieldSpecialIsCompleted
wUInt32 $ if runSpecialIsCompleted run then 1 else 0
-- intensity
wField fieldIntensity
wUInt32 $ truncate $ runIntensity run * 65535.0
| karamellpelle/grid | source/Game/Run/File/Write.hs | gpl-3.0 | 1,809 | 0 | 14 | 410 | 260 | 140 | 120 | 26 | 3 |
module Text.Parsec.Applicative
( module Control.Applicative
, module Data.Traversable
, module Text.Parsec.Applicative.Types
, Parser()
, ParseError(..)
, ParseErrorType(..)
, predicate
, eof
, token
, token'
, try
, label
, parse
, parse'
, accept
, accept'
, between
, choice
, option
, sepBy
, updatePosString
, getPosition
) where
import Control.Applicative
import Data.Traversable (Traversable(traverse, sequenceA), for, mapAccumL, mapAccumR)
import Lens.Micro
import Text.Parsec.Applicative.Internal
import Text.Parsec.Applicative.Types
between :: (Applicative f) => f a -> f b -> f c -> f c
between l r m = l *> m <* r
choice :: (Alternative f) => [f a] -> f a
choice = foldr (<|>) empty
option :: (Alternative f) => f a -> f (Maybe a)
option p = (Just <$> p) <|> pure Nothing
sepBy :: (Alternative f) => f a -> f b -> f [a]
sepBy p delim = ((:) <$> p <*> many (delim *> p)) <|> empty
updatePosString :: SourcePos -> String -> SourcePos
updatePosString = foldr f
where
f '\n' = (set spColumn 0) . (over spLine (+ 1))
f _ = over spColumn (+ 1)
getPosition :: (HasSourcePos td) => Parser s tt td SourcePos
getPosition = PGetPos
| ktvoelker/AParsec | src/Text/Parsec/Applicative.hs | gpl-3.0 | 1,198 | 0 | 10 | 262 | 466 | 264 | 202 | 44 | 2 |
module Persist.Message (saveMessage) where
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Data.List (find)
import Data.Maybe (isJust, fromJust)
import Data.Either (isRight)
import Data.Either.Utils (fromRight)
import Data.UUID.Types (UUID)
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Data.Time.LocalTime (ZonedTime)
import Data.Aeson (toJSON)
import qualified Data.Aeson.Types as AT
import Control.Monad (void, when)
import Safe
import Types
import Persist.Types
import Persist.Utils
import Persist.RelatedEmails (persistRelatedEmails)
import Persist.References (persistReferences)
import Persist.Thread (persistThread)
import qualified Network.Mail.Parse.Types as MPT
import Network.IMAP.Types (isUID)
import qualified Network.IMAP.Types as IMAP
-- |Saves a message and all of it's related metadata
saveMessage :: Connection ->
((Either MPT.ErrorMessage MPT.EmailMessage), Metadata) ->
IO ()
saveMessage conn (msg, metadata) = do
let uid = find (isUID) metadata
if isRight msg && isJust uid
then do
let (IMAP.UID unpackedUid) = fromJust uid
unpackedMsg = fromRight msg
idsMap <- saveMetadata conn unpackedMsg
msgIds <- persistMessage conn idsMap unpackedMsg unpackedUid
let msgId = headMay msgIds
when (isJust msgId) $ do
let unpackedId = (\(Only uuid) -> uuid) $ fromJust msgId
persistRelatedEmails conn unpackedId idsMap unpackedMsg
persistReferences conn unpackedId unpackedMsg
persistThread conn unpackedId unpackedMsg
else return ()
persistMessage :: Connection ->
EmailIdMap ->
MPT.EmailMessage ->
Int ->
IO [(Only UUID)]
persistMessage conn idsMap msg messageUid = query conn [sql|
INSERT INTO message
(uid, from_addr, sent_date, reply_to, message_id, in_reply_to, subject, message)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
RETURNING id
|] serializedMsg
where serializedMsg = serializeMessage idsMap msg messageUid
-- |Serializes the bulk of the message, along with message contents
serializeMessage :: EmailIdMap ->
MPT.EmailMessage ->
Int ->
(Int, Maybe UUID, Maybe ZonedTime, Maybe UUID, Maybe T.Text,
Maybe T.Text, Maybe T.Text, AT.Value) -- uid, from_addr,
-- sent_date, reply_to, message_id, in_reply_to, subject,
-- message itself
serializeMessage idMap msg uid = (
uid
, (\(MPT.From (MPT.EmailAddress addr _)) -> idMap M.! addr) <$> findHeader isFrom
, (\(MPT.Date time) -> time) <$> findHeader isDate
, (\(MPT.ReplyTo (MPT.EmailAddress addr _)) -> idMap M.! addr) <$> findHeader isReplyTo
, (\(MPT.MessageId msgId) -> msgId) <$> findHeader isMessageId
, (\(MPT.InReplyTo msgId) -> msgId) <$> findHeader isInReplyTo
, (\(MPT.Subject subject) -> subject) <$> findHeader isSubject
, toJSON msg
)
where findHeader hdrMatch = find hdrMatch $ MPT.emailHeaders msg
-- |Saves all the emails that exist in this message and
-- returns a map mapping email addresses to their ids from the DB
saveMetadata :: Connection -> MPT.EmailMessage -> IO EmailIdMap
saveMetadata conn msg = do
let saveableAddresses = getSaveableAddresses msg
saveEmails conn saveableAddresses
getEmailIds conn saveableAddresses
-- |Fetches a list of ids for a list of emails
getEmailIds :: Connection -> [MPT.EmailAddress] -> IO EmailIdMap
getEmailIds conn addrs = do
ids <- query conn [sql|
SELECT address, id FROM email_address WHERE address IN ?
|] $ Only . In $ map (MPT.emailAddress) addrs
return $ M.fromList ids
saveEmails :: Connection -> [MPT.EmailAddress] -> IO ()
saveEmails conn addrs = void $
executeMany conn [sql|
INSERT INTO email_address (address, label) values (?, ?)
ON CONFLICT DO NOTHING
|] $ map saveableAddress addrs
| mkawalec/email | src/Persist/Message.hs | gpl-3.0 | 4,005 | 0 | 20 | 914 | 1,031 | 553 | 478 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-
hbot - a simple Haskell chat bot for Hipchat
Copyright (C) 2014 Louis J. Scoras
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
-- | Contributors plugin
module Hbot.Plugins.Whoami (
whoami
) where
import qualified Data.Text.Lazy as T
import Hbot.MessageEvent
import Hbot.Plugins
-- | Plugin which shows information about the user invoking the plugin
whoami :: Plugin
whoami = Plugin "Show information about message sender." . TextAction $ \(_, event) ->
return (T.pack $ displayFrom ( from . message . eventItem $ event))
-- | Convert from information from the Hipchat api into a text representation
-- for the response notification.
displayFrom :: From -> String
displayFrom FromNull = "Hell if I know"
displayFrom (FromString s) = s ++ ", of course."
displayFrom (FromObject (FO {..})) =
concat [ "You are ", fromFullName, ", better known as \"", fromMentionName, "\""]
| ljsc/hbot | src/Hbot/Plugins/Whoami.hs | gpl-3.0 | 1,608 | 0 | 14 | 343 | 176 | 102 | 74 | 15 | 1 |
{-# LANGUAGE OverloadedStrings, DuplicateRecordFields,OverloadedLabels #-}
module Ampersand.Input.Xslx.XLSX
(parseXlsxFile)
where
import Ampersand.ADL1
import Ampersand.Basics
import Ampersand.Input.ADL1.CtxError
import Ampersand.Misc
import Ampersand.Prototype.StaticFiles_Generated (getStaticFileContent, FileKind)
import Codec.Xlsx
import Control.Lens
import qualified Data.List as L
import qualified Data.ByteString.Lazy as BL
import Data.Char
import qualified Data.Map as M
import Data.Maybe
import Data.String
import qualified Data.Text as T
import Data.Tuple
parseXlsxFile :: Options
-> Maybe FileKind
-> FilePath -> IO (Guarded [P_Population])
parseXlsxFile opts mFk file =
do bytestr <-
case mFk of
Just fileKind
-> case getStaticFileContent fileKind file of
Just cont -> return $ fromString cont
Nothing -> fatal ("Statically included "++ show fileKind++ " files. \n Cannot find `"++file++"`.")
Nothing
-> BL.readFile file
return . xlsx2pContext . toXlsx $ bytestr
where
xlsx2pContext :: Xlsx -> Guarded [P_Population]
xlsx2pContext xlsx = Checked pop []
where
pop = concatMap (toPops opts file)
. concatMap theSheetCellsForTable
$ (xlsx ^. xlSheets)
data SheetCellsForTable
= Mapping{ theSheetName :: String
, theCellMap :: CellMap
, headerRowNrs :: [Int]
, popRowNrs :: [Int]
, colNrs :: [Int]
, debugInfo :: [String]
}
instance Show SheetCellsForTable where --for debugging only
show x
= unlines $
[ "Sheet : "++theSheetName x
, "headerRowNrs: "++show (headerRowNrs x)
, "popRowNrs : "++show (popRowNrs x)
, "colNrs : "++show (colNrs x)
] ++ debugInfo x
toPops :: Options -> FilePath -> SheetCellsForTable -> [P_Population]
toPops opts file x = map popForColumn (colNrs x)
where
popForColumn :: Int -> P_Population
popForColumn i =
if i == sourceCol
then P_CptPopu { pos = popOrigin
, p_cnme = sourceConceptName
, p_popas = concat [ case value(row,i) of
Nothing -> []
Just cv -> cellToAtomValues mSourceConceptDelimiter cv popOrigin
| row <- popRowNrs x
]
}
else P_RelPopu { pos = popOrigin
, p_src = src
, p_tgt = trg
, p_nmdr = PNamedRel popOrigin relName Nothing -- The P-to-A converter must assign the type.
, p_popps = thePairs
}
where
src, trg :: Maybe String
(src,trg) = case mTargetConceptName of
Just tCptName -> (if isFlipped' then swap else id) (Just sourceConceptName, Just tCptName)
Nothing -> (Nothing,Nothing)
popOrigin :: Origin
popOrigin = originOfCell (relNamesRow, targetCol)
conceptNamesRow = head . tail $ headerRowNrs x
relNamesRow = head $ headerRowNrs x
sourceCol = head $ colNrs x
targetCol = i
sourceConceptName :: String
mSourceConceptDelimiter :: Maybe Char
(sourceConceptName, mSourceConceptDelimiter)
= case value (conceptNamesRow,sourceCol) of
Just (CellText t) ->
fromMaybe (fatal "No valid source conceptname found. This should have been checked before")
(conceptNameWithOptionalDelimiter . trim $ t)
_ -> fatal "No valid source conceptname found. This should have been checked before"
mTargetConceptName :: Maybe String
mTargetConceptDelimiter :: Maybe Char
(mTargetConceptName, mTargetConceptDelimiter)
= case value (conceptNamesRow,targetCol) of
Just (CellText t) -> let (nm,mDel)
= fromMaybe
(fatal "No valid source conceptname found. This should have been checked before")
(conceptNameWithOptionalDelimiter . trim $ t)
in (Just nm, mDel)
_ -> (Nothing, Nothing)
relName :: String
isFlipped' :: Bool
(relName,isFlipped')
= case value (relNamesRow,targetCol) of
Just (CellText t) ->
let str = T.unpack . trim $ t
in if last str == '~'
then (init str, True )
else ( str, False)
_ -> fatal ("No valid relation name found. This should have been checked before" ++show (relNamesRow,targetCol))
thePairs :: [PAtomPair]
thePairs = concat . mapMaybe pairsAtRow . popRowNrs $ x
pairsAtRow :: Int -> Maybe [PAtomPair]
pairsAtRow r = case (value (r,sourceCol)
,value (r,targetCol)
) of
(Just s,Just t) -> Just $
(if isFlipped' then map flp else id)
[mkPair origTrg s' t'
| s' <- cellToAtomValues mSourceConceptDelimiter s origSrc
, t' <- cellToAtomValues mTargetConceptDelimiter t origTrg
]
_ -> Nothing
where origSrc = XLSXLoc file (theSheetName x) (r,sourceCol)
origTrg = XLSXLoc file (theSheetName x) (r,targetCol)
cellToAtomValues :: Maybe Char -> CellValue -> Origin -> [PAtomValue] -- The value in a cell can contain the delimeter of the row
cellToAtomValues mDelimiter cv orig
= case cv of
CellText t -> map (XlsxString orig . T.unpack)
. filter (not . T.null)
. unDelimit mDelimiter
. handleSpaces $ t
CellDouble d -> [XlsxDouble orig d]
CellBool b -> [ComnBool orig b]
CellRich ts -> map (XlsxString orig . T.unpack)
. filter (not . T.null)
. unDelimit mDelimiter
. handleSpaces . T.concat . map _richTextRunText $ ts
CellError e -> fatal . L.intercalate "\n " $
[ "Error reading cell at:"
, show orig
, show e]
unDelimit :: Maybe Char -> T.Text -> [T.Text]
unDelimit mDelimiter xs =
case mDelimiter of
Nothing -> [xs]
(Just delimiter) -> map trim $ T.split (== delimiter) xs
handleSpaces = if trimXLSXCells opts then trim else id
originOfCell :: (Int,Int) -- (row number,col number)
-> Origin
originOfCell (r,c)
= XLSXLoc file (theSheetName x) (r,c)
value :: (Int,Int) -> Maybe CellValue
value k = theCellMap x ^? ix k . cellValue . _Just
theSheetCellsForTable :: (T.Text,Worksheet) -> [SheetCellsForTable]
theSheetCellsForTable (sheetName,ws)
= catMaybes [theMapping i | i <- [0..length tableStarters - 1]]
where
tableStarters :: [(Int,Int)]
tableStarters = filter isStartOfTable $ M.keys (ws ^. wsCells)
where isStartOfTable :: (Int,Int) -> Bool
isStartOfTable (rowNr,colNr)
| colNr /= 1 = False
| rowNr == 1 = isBracketed (rowNr,colNr)
| otherwise = isBracketed (rowNr ,colNr)
&& (not . isBracketed) (rowNr - 1, colNr)
value :: (Int,Int) -> Maybe CellValue
value k = (ws ^. wsCells) ^? ix k . cellValue . _Just
isBracketed :: (Int,Int) -> Bool
isBracketed k =
case value k of
Just (CellText t) -> (not . T.null ) trimmed && T.head trimmed == '[' && T.last trimmed == ']'
where trimmed = trim t
_ -> False
theMapping :: Int -> Maybe SheetCellsForTable
theMapping indexInTableStarters
| length okHeaderRows /= nrOfHeaderRows = Nothing -- Because there are not enough header rows
| otherwise
= Just Mapping { theSheetName = T.unpack sheetName
, theCellMap = ws ^. wsCells
, headerRowNrs = okHeaderRows
, popRowNrs = populationRows
, colNrs = theCols
, debugInfo = [ "indexInTableStarters"++": "++show indexInTableStarters
, "maxRowOfWorksheet"++": "++show maxRowOfWorksheet
, "maxColOfWorksheet"++": "++show maxColOfWorksheet
, "startOfTable "++": "++show startOfTable
, "firstPopRowNr "++": "++show firstPopRowNr
, "lastPopRowNr "++": "++show lastPopRowNr
, "[(row,isProperRow)] "++": "++concatMap show [(r,isProperRow r) | r<- [firstPopRowNr..lastPopRowNr]]
, "theCols "++": "++show theCols
]
}
where
startOfTable = tableStarters !! indexInTableStarters
firstHeaderRowNr = fst startOfTable
firstColumNr = snd startOfTable
relationNameRowNr = firstHeaderRowNr
conceptNameRowNr = firstHeaderRowNr+1
nrOfHeaderRows = 2
maxRowOfWorksheet = maximum (map fst (M.keys (ws ^. wsCells)))
maxColOfWorksheet = maximum (map snd (M.keys (ws ^. wsCells)))
firstPopRowNr = firstHeaderRowNr + nrOfHeaderRows
lastPopRowNr = ((map fst tableStarters++[maxRowOfWorksheet+1])!!(indexInTableStarters+1))-1
okHeaderRows = filter isProperRow [firstHeaderRowNr,firstHeaderRowNr+nrOfHeaderRows-1]
populationRows = filter isProperRow [firstPopRowNr..lastPopRowNr]
isProperRow :: Int -> Bool
isProperRow rowNr
| rowNr == relationNameRowNr = True -- The first row was recognized as tableStarter
| rowNr == conceptNameRowNr = isProperConceptName(rowNr,firstColumNr)
| otherwise = notEmpty (rowNr,firstColumNr)
notEmpty k
= case value k of
Just (CellText t) -> (not . T.null . trim) t
Just (CellDouble _) -> True
Just (CellBool _) -> True
Just (CellRich _) -> True
Just (CellError e) -> fatal $ "Error reading cell "++show e
Nothing -> False
theCols = filter isProperCol [1..maxColOfWorksheet]
isProperCol :: Int -> Bool
isProperCol colNr
| colNr == 1 = isProperConceptName (conceptNameRowNr,colNr)
| otherwise = isProperConceptName (conceptNameRowNr,colNr) && isProperRelName(relationNameRowNr,colNr)
isProperConceptName k
= case value k of
Just (CellText t) -> isJust . conceptNameWithOptionalDelimiter . trim $ t
_ -> False
isProperRelName k
= case value k of
Just (CellText t) -> (not . T.null . trim) t && (isLower . T.head . trim) t
_ -> False
conceptNameWithOptionalDelimiter :: T.Text -> Maybe ( String {- Conceptname -}
, Maybe Char {- Delimiter -}
)
-- Cases: 1) "[" ++ Conceptname ++ delimiter ++ "]"
-- 2) Conceptname
-- 3) none of above
-- Where Conceptname is any string starting with an uppercase character
conceptNameWithOptionalDelimiter t
| T.null t = Nothing
| T.head t == '[' && T.last t == ']'
= let mid = (T.reverse . T.tail . T.reverse . T.tail) t
(nm,d) = (T.init mid, T.last mid)
in if isDelimiter d && isConceptName nm
then Just (T.unpack nm , Just d)
else Nothing
| otherwise = if isConceptName t
then Just (T.unpack t, Nothing)
else Nothing
isDelimiter :: Char -> Bool
isDelimiter = isPunctuation
isConceptName :: T.Text -> Bool
isConceptName t = case T.uncons t of
Nothing -> False
(Just (h,_)) -> isUpper h
-- | trim is used to remove leading and trailing spaces
trim :: T.Text -> T.Text
trim = T.reverse . trim' . T.reverse . trim'
where
trim' :: T.Text -> T.Text
trim' t = case uncons t of
Just (' ',t') -> trim' t'
_ -> t
| AmpersandTarski/ampersand | src/Ampersand/Input/Xslx/XLSX.hs | gpl-3.0 | 13,178 | 0 | 21 | 5,202 | 3,306 | 1,724 | 1,582 | -1 | -1 |
{-# LANGUAGE FlexibleContexts, EmptyDataDecls, FlexibleInstances, MultiParamTypeClasses, FunctionalDependencies #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE UndecidableInstances #-}
module QuadCoordinates.Class
(module Tetrahedron.INormalDisc,
module Data.FormalOps,
module NormalSurfaceBasic,
QuadCoords(..),
quadSupport,
quadDominates,
quad_toFundEdgeSol,
quad_toDenseAssocs,
quad_toDenseList,
-- * Misc
onlyQuadAssocs,
)
where
import Control.Applicative
import Control.Arrow
import Control.Monad
import Data.AdditiveGroup
import Data.FormalOps
import Data.Map(Map)
import Data.Maybe
import Data.Monoid
import Tetrahedron.INormalDisc
import Math.SparseVector
import MathUtil
import Util
import qualified Data.Map as M
import Data.Ratio
import Control.Exception
import NormalSurfaceBasic
import Triangulation.PreTriangulation
import Data.Typeable
-- | Representations of quadrilateral coordinate vectors.
--
-- Minimal implementation:
--
-- 'quadAsSparse' || 'quadCount' && ('quadAssocs' || 'quadAssocsDistinct')
--
-- The 'Typeable' constraint is mostly for use with typed exceptions.
class (Typeable q, NormalSurfaceCoefficients q r, Ord q) => QuadCoords q r | q -> r where
quadCount :: q -> INormalQuad -> r
-- | May (but need not) omit zero coefficients. May contain repeated quads.
quadAssocs :: q -> [(INormalQuad,r)]
-- | May (but need not) omit zero coefficients.
quadAssocsDistinct :: q -> [(INormalQuad,r)]
quadAsSparse :: q -> SparseVector INormalQuad r
quadCount = sparse_get . quadAsSparse
quadAssocs = quadAssocsDistinct
quadAssocsDistinct = sparse_toAssocs . quadAsSparse
quadAsSparse = sparse_fromAssocs . quadAssocs
instance QuadCoords INormalQuad Integer where
quadCount q q' = if q==q' then 1 else 0
quadAssocsDistinct q = [(q,1)]
quadAsSparse = flip sparse_singleton 1
instance QuadCoords INormalDisc Integer where
quadCount = eitherIND quadCount quadCount
quadAssocsDistinct = eitherIND quadAssocs quadAssocs
quadAsSparse = eitherIND quadAsSparse quadAsSparse
-- | Constant zero; only needed as a superclass
instance QuadCoords INormalTri Integer where
quadCount = const (const 0)
quadAssocsDistinct = const []
quadAsSparse = const sparse_zero
instance (Num n, QuadCoords q n) => QuadCoords [q] n where
quadCount xs q' = sum (flip quadCount q' <$> xs)
quadAssocs = concatMap quadAssocs
quadAsSparse = sparse_sumWith (+) . map quadAsSparse
instance (Typeable n, Num n, QuadCoords q n) => QuadCoords (FormalProduct n q) n where
quadCount (n :* q) = (n *) <$> quadCount q
quadAssocs (n :* q) = second (n *) <$> quadAssocs q
quadAssocsDistinct (n :* q) = second (n *) <$> quadAssocsDistinct q
quadAsSparse (n :* q) = (n*) <$> quadAsSparse q
instance (Num n, QuadCoords q n, QuadCoords q' n) => QuadCoords (FormalSum q q') n where
quadCount = fmap evalFormalSum . bitraverseFormalSum quadCount quadCount
quadAssocs = foldFormalSum (++) . bimapFormalSum quadAssocs quadAssocs
quadAsSparse (a :+ b) = sparse_addWith (+) (quadAsSparse a) (quadAsSparse b)
instance (Ord i, Num i, Typeable i) => QuadCoords (SparseVector INormalQuad i) i where
quadCount = sparse_get
quadAssocsDistinct = sparse_toAssocs
quadAsSparse = id
onlyQuadAssocs :: [(INormalDisc, t1)] -> [(INormalQuad, t1)]
onlyQuadAssocs = mapMaybe (traverseFst (eitherIND (const Nothing) Just))
instance (Typeable i, Ord i, Num i) => QuadCoords (SparseVector INormalDisc i) i where
quadCount v q = sparse_get v (iNormalQuadToINormalDisc q)
quadAssocs = quadAssocsDistinct
quadAssocsDistinct = onlyQuadAssocs . sparse_toAssocs
quadAsSparse = sparse_fromDistinctAscList . onlyQuadAssocs . sparse_toAscList
-- | Set of components where the given vector is non-zero. May contain repeated quads.
quadSupport :: QuadCoords q r => q -> [INormalQuad]
quadSupport = mapMaybe (\(q,n) -> guard (n/=0) >> Just q) . quadAssocs
quadDominates
:: (QuadCoords q r, QuadCoords q1 r1) => q -> q1 -> Bool
quadDominates x y =
all
(\q -> quadCount x q /= 0)
(quadSupport y)
quad_toFundEdgeSol
:: (Integral i,
RatioToIntegral qr qi,
NonNegScalable (Ratio i) qr,
QuadCoords qr (Ratio i)) =>
qr -> qi
quad_toFundEdgeSol q =
let
denoms = fmap (denominator . snd) . quadAssocsDistinct $ q
in
fromMaybe (assert False undefined) .
ratioToIntegral . scaleNonNeg (lcms denoms % 1) $ q
quad_toDenseAssocs
:: (PreTriangulation tr, QuadCoords q r) =>
tr -> q -> [(INormalQuad, r)]
quad_toDenseAssocs tr qc = fmap (id &&& quadCount qc) (tINormalQuads tr)
quad_toDenseList
:: (PreTriangulation tr, QuadCoords q r) => tr -> q -> [r]
quad_toDenseList tr = fmap snd . quad_toDenseAssocs tr
| DanielSchuessler/hstri | QuadCoordinates/Class.hs | gpl-3.0 | 4,938 | 0 | 13 | 1,022 | 1,357 | 734 | 623 | 103 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE Rank2Types #-}
module SNet.Interfaces.C
( C'Handle
, Ptr
, newHandle
, withHandle
, destroyHandle
) where
import Prelude hiding (length)
import Control.Monad.Trans
import Foreign
import Foreign.C.Types
import SNet.Record
import SNet.Pattern
import SNet.Variants
import SNet.Interfaces.CHandle
type OutFun = Ptr () -> CInt -> Ptr IntPtr -> IO ()
foreign import ccall "wrapper" mkOutFun :: OutFun -> IO (FunPtr OutFun)
convert :: RecEntry a -> IntPtr -> a
convert (Tag _) = fromIntegral
convert (BTag _) = fromIntegral
convert (Field _) = error "To be implemented"
addRecEntry :: RecEntry e -> IntPtr -> Record Data -> Record Data
addRecEntry e = insert e . convert e
outputRecord :: Variants ps n
-> (Record Data -> Record Data -> IO ())
-> Ptr ()
-> CInt
-> Ptr IntPtr
-> IO ()
outputRecord variants snetOut recPtr i intPtrValues = do
baseRec <- deRefStablePtr . castPtrToStablePtr $ recPtr
values <- peekArray (withPattern length) intPtrValues
snetOut baseRec $ withPattern (constructRec values)
where withPattern :: (forall p. Pattern p -> a) -> a
withPattern = unsafeGetPattern variants i
constructRec = foldWithPattern addRecEntry emptyRecord
newHandle :: MonadIO m
=> Variants ps n
-> (Record Data -> Record Data -> IO ())
-> m (Ptr C'Handle)
newHandle variants snetOut = liftIO $ do
outFun <- mkOutFun $ outputRecord variants snetOut
lengths <- newArray patLengths
values <- mallocArray $ fromIntegral (maximum patLengths)
new C'Handle
{ c'Handle'lengths = lengths
, c'Handle'values = values
, c'Handle'record = nullPtr
, c'Handle'snetOut = outFun
}
where patLengths = variantsToList variants length
withHandle :: MonadIO m
=> (Ptr C'Handle -> Record Data -> IO ())
-> Ptr C'Handle
-> Record Data
-> m ()
withHandle boxfun hndPtr record = liftIO $ do
stableRec <- newStablePtr record
poke recPtr $ castStablePtrToPtr stableRec
boxfun hndPtr record
poke recPtr nullPtr
freeStablePtr stableRec
where recPtr = p'Handle'record hndPtr
destroyHandle :: MonadIO m => Ptr C'Handle -> m ()
destroyHandle hndPtr = liftIO $ do
hnd <- peek hndPtr
free . c'Handle'lengths $ hnd
free . c'Handle'values $ hnd
freeHaskellFunPtr . c'Handle'snetOut $ hnd
free hndPtr
| merijn/SNet2.0 | SNet/Interfaces/C.hs | gpl-3.0 | 2,496 | 0 | 12 | 625 | 785 | 383 | 402 | 71 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Main ( main ) where
import Control.Concurrent ( forkIO )
import Control.Monad ( when, forever )
import Data.ByteString.Char8 ( pack )
import Data.ByteString.Lazy ( fromStrict )
import Data.Binary ( decodeOrFail )
import qualified System.ZMQ4 as ZMQ
import System.Console.CmdArgs ( (&=), Data, Typeable )
import qualified System.Console.CmdArgs as CA
import PlotHo ( runPlotter )
import Dyno.DirectCollocation.Dynamic ( DynPlotPoints, CollTrajMeta, newCollocationChannel )
import Dynoplot.Channel ( dynoplotUrl, dynoplotChannelName )
sub :: String -> ((DynPlotPoints Double, CollTrajMeta) -> IO ()) -> String -> IO ()
sub ip' writeChan name = ZMQ.withContext $ \context ->
ZMQ.withSocket context ZMQ.Sub $ \subscriber -> do
ZMQ.connect subscriber ip'
ZMQ.subscribe subscriber (pack name)
forever $ do
_ <- ZMQ.receive subscriber
mre <- ZMQ.moreToReceive subscriber
when mre $ do
msg <- ZMQ.receive subscriber
let decoded :: (DynPlotPoints Double, CollTrajMeta)
decoded = case decodeOrFail (fromStrict msg) of
Left (_, _, err) -> error $ "decode failure: " ++ err
Right (_, _, t) -> t
writeChan decoded
main :: IO ()
main = do
args <- CA.cmdArgs (myargs &= CA.program "dynoplot")
let ip' = ip args
channel' = channel args
putStrLn $ "using ip \""++ip'++"\""
putStrLn $ "using channel \""++channel'++"\""
(ch, newMessage) <- newCollocationChannel channel'
_ <- forkIO $ sub ip' newMessage channel'
runPlotter Nothing [ch]
data VisArgs = VisArgs { ip :: String
, channel :: String
} deriving (Show, Data, Typeable)
myargs :: VisArgs
myargs = VisArgs { ip = dynoplotUrl &= CA.help "an IP address" &= CA.typ "ADDRESS"
, channel = dynoplotChannelName &= CA.help "zmq channel name"
} &= CA.summary "plotter for dynobud OCPs"
| ghorn/dynobud | dynobud/examples/Dynoplot.hs | lgpl-3.0 | 2,008 | 0 | 24 | 487 | 619 | 327 | 292 | 46 | 2 |
module ChapterExercises where
import State
-- 1. Construct a State where the state is also the value you return.
get :: Moi s s
get = Moi (\s -> (s,s))
-- 2. Construct a State where the resulting state is the argument provided and
-- the value is defaulted to unit.
put :: s -> Moi s ()
put s = Moi (\_ -> ((), s))
-- 3. Run the State with s and get the state that results.
exec :: Moi s a -> s -> s
exec (Moi sa) s = snd (sa s)
-- 4. Run the State with s and get the value that results.
eval :: Moi s a -> s -> a
eval (Moi sa) s = fst (sa s)
-- 5. Write a function which applies a function to create a new State.
modify :: (s -> s) -> Moi s ()
modify sToS = Moi $ \s -> ((), sToS s)
| dmp1ce/Haskell-Programming-Exercises | Chapter 23/Chapter exercises.hs | unlicense | 696 | 0 | 9 | 171 | 225 | 121 | 104 | 12 | 1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -Wno-redundant-constraints #-} -- for the Getter instances
module Pos.Chain.Block.Block
( Block
, getBlockHeader
, blockHeader
, verifyBlockInternal
-- * GenericBlock
, GenericBlock
, mkGenericBlockUnsafe
, gbHeader
, gbBody
, gbExtra
, gbPrevBlock
, gbBodyProof
, gbConsensus
-- * GenesisBlock
, GenesisBlock
, mkGenesisBlock
, genesisBlock0
, genBlockPrevBlock
, genBlockProof
, genBlockEpoch
, genBlockDifficulty
, genBlockHeaderAttributes
, genBlockLeaders
, genBlockAttributes
, verifyGenesisBlock
-- * MainBlock
, MainBlock
, mkMainBlock
, mkMainBlockExplicit
, mainBlockPrevBlock
, mainBlockProof
, mainBlockSlot
, mainBlockLeaderKey
, mainBlockDifficulty
, mainBlockSignature
, mainBlockBlockVersion
, mainBlockSoftwareVersion
, mainBlockHeaderAttributes
, mainBlockEBDataProof
, mainBlockTxPayload
, mainBlockSscPayload
, mainBlockDlgPayload
, mainBlockUpdatePayload
, mainBlockAttributes
, verifyMainBlock
, blockLastSlotInfo
) where
import Universum
import Control.Lens (Getter, choosing, makeLenses, to)
import Control.Monad.Except (MonadError (throwError))
import Data.SafeCopy (SafeCopy (..), contain, safeGet, safePut)
import Formatting (bprint, build, int, sformat, stext, (%))
import qualified Formatting.Buildable as Buildable
import Serokell.Util (Color (Magenta), colorize, listJson)
import Pos.Binary.Class (Bi (..), encodeListLen, enforceSize)
import Pos.Chain.Block.Genesis (GenesisBody (..),
GenesisBodyAttributes, GenesisConsensusData (..),
GenesisExtraBodyData (..), GenesisExtraHeaderData (..),
GenesisHeaderAttributes, GenesisProof (..),
checkGenesisProof, gbLeaders, gcdEpoch, gebAttributes)
import Pos.Chain.Block.HasPrevBlock (HasPrevBlock (..))
import Pos.Chain.Block.Header (BlockHeader (..), BlockSignature (..),
GenericBlockHeader, HasHeaderHash (..), HeaderHash,
MainConsensusData (..), blockHeaderHash, gbhBodyProof,
gbhConsensus, gbhPrevBlock, genHeaderAttributes,
genHeaderDifficulty, genHeaderEpoch, genHeaderProof,
headerLastSlotInfo, mainHeaderAttributes,
mainHeaderBlockVersion, mainHeaderDifficulty,
mainHeaderEBDataProof, mainHeaderLeaderKey,
mainHeaderProof, mainHeaderSignature, mainHeaderSlot,
mainHeaderSoftwareVersion, mkGenesisHeader,
mkMainHeaderExplicit, verifyMainBlockHeader)
import Pos.Chain.Block.Main (BlockBodyAttributes,
BlockHeaderAttributes, MainBody (..),
MainExtraBodyData (..), MainExtraHeaderData (..),
MainProof (..), checkMainProof, mbDlgPayload,
mbSscPayload, mbTxPayload, mbTxs, mbUpdatePayload,
mebAttributes, verifyMainBody)
import Pos.Chain.Block.Slog.Types (LastSlotInfo (..))
import Pos.Chain.Delegation.HeavyDlgIndex (ProxySKBlockInfo)
import Pos.Chain.Delegation.Payload (DlgPayload)
import Pos.Chain.Genesis.Config as Genesis (Config (..))
import Pos.Chain.Genesis.Hash (GenesisHash (..))
import Pos.Chain.Ssc.Functions (verifySscPayload)
import Pos.Chain.Ssc.Payload (SscPayload)
import Pos.Chain.Txp.Tx (TxValidationRules)
import Pos.Chain.Txp.TxPayload (TxPayload)
import Pos.Chain.Update (ConsensusEra (..))
import Pos.Chain.Update.BlockVersion (BlockVersion,
HasBlockVersion (..))
import Pos.Chain.Update.Payload (UpdatePayload)
import Pos.Chain.Update.SoftwareVersion (HasSoftwareVersion (..),
SoftwareVersion)
import Pos.Core.Attributes (mkAttributes)
import Pos.Core.Common (ChainDifficulty, HasDifficulty (..),
SlotLeaders, slotLeadersF)
import Pos.Core.Slotting (EpochIndex, HasEpochIndex (..),
HasEpochOrSlot (..), SlotCount, SlotId (..))
import Pos.Crypto (Hash, ProtocolMagic, PublicKey, SecretKey, hash)
import Pos.Util.Some (Some (..))
--------------------------------------------------------------------------------
-- Block
--------------------------------------------------------------------------------
type Block = Either GenesisBlock MainBlock
instance HasHeaderHash Block where
headerHash = blockHeaderHash . getBlockHeader
instance HasDifficulty Block where
difficultyL = choosing difficultyL difficultyL
blockHeader :: Getter Block BlockHeader
blockHeader = to getBlockHeader
-- | Take 'BlockHeader' from either 'GenesisBlock' or 'MainBlock'.
getBlockHeader :: Block -> BlockHeader
getBlockHeader = \case
Left gb -> BlockHeaderGenesis (_gbHeader gb)
Right mb -> BlockHeaderMain (_gbHeader mb)
blockLastSlotInfo :: SlotCount -> Block -> Maybe LastSlotInfo
blockLastSlotInfo slotCount =
headerLastSlotInfo slotCount . getBlockHeader
-- | Verify a Block in isolation.
verifyBlockInternal
:: MonadError Text m
=> Genesis.Config
-> ConsensusEra
-> TxValidationRules
-> Block
-> m ()
verifyBlockInternal genesisConfig era txValRules =
either verifyGenesisBlock (verifyMainBlock genesisConfig era txValRules)
--------------------------------------------------------------------------------
-- GenericBlock
--------------------------------------------------------------------------------
-- | In general Block consists of header and body. It may contain
-- extra data as well.
data GenericBlock bodyProof consensus extraH body extraB = GenericBlock
{ _gbHeader :: !(GenericBlockHeader bodyProof consensus extraH)
, _gbBody :: !body
, _gbExtra :: !extraB
} deriving (Eq, Show, Generic, NFData)
instance
(Bi bodyProof , Bi consensus, Bi extraH, Bi body, Bi extraB)
=> Bi (GenericBlock bodyProof consensus extraH body extraB)
where
encode gb = encodeListLen 3
<> encode (_gbHeader gb)
<> encode (_gbBody gb)
<> encode (_gbExtra gb)
decode = do
enforceSize "GenericBlock" 3
_gbHeader <- decode
_gbBody <- decode
_gbExtra <- decode
pure GenericBlock {..}
instance
( SafeCopy bodyProof
, SafeCopy consensus
, SafeCopy extraH
, SafeCopy body
, SafeCopy extraB
)
=> SafeCopy (GenericBlock bodyProof consensus extraH body extraB)
where
getCopy = contain $ do
_gbHeader <- safeGet
_gbBody <- safeGet
_gbExtra <- safeGet
return $! GenericBlock {..}
putCopy GenericBlock {..} = contain $ do
safePut _gbHeader
safePut _gbBody
safePut _gbExtra
mkGenericBlockUnsafe
:: GenericBlockHeader bodyProof consensus extraH
-> body
-> extraB
-> GenericBlock bodyProof consensus extraH body extraB
mkGenericBlockUnsafe = GenericBlock
----------------------------------------------------------------------------
-- GenesisBlock
----------------------------------------------------------------------------
type GenesisBlock = GenericBlock
GenesisProof
GenesisConsensusData
GenesisExtraHeaderData
GenesisBody
GenesisExtraBodyData
instance Buildable GenesisBlock where
build GenericBlock {..} =
bprint
(stext%":\n"%
" "%build%
stext
)
(colorize Magenta "GenesisBlock")
_gbHeader
formatLeaders
where
GenesisBody {..} = _gbBody
formatIfNotNull formatter l = if null l then mempty else sformat formatter l
formatLeaders = formatIfNotNull
(" leaders: "%slotLeadersF%"\n") (toList _gbLeaders)
instance HasEpochOrSlot GenesisBlock where
getEpochOrSlot = getEpochOrSlot . _gbHeader
instance HasHeaderHash GenesisBlock where
headerHash = blockHeaderHash . BlockHeaderGenesis . _gbHeader
-- | Smart constructor for 'GenesisBlock'.
mkGenesisBlock
:: ProtocolMagic
-> Either GenesisHash BlockHeader
-> EpochIndex
-> SlotLeaders
-> GenesisBlock
mkGenesisBlock pm prevHeader epoch leaders = GenericBlock header body extra
where
header = mkGenesisHeader pm prevHeader epoch body
body = GenesisBody leaders
extra = GenesisExtraBodyData $ mkAttributes ()
-- | Creates the very first genesis block.
genesisBlock0 :: ConsensusEra -> ProtocolMagic -> GenesisHash -> SlotLeaders -> GenesisBlock
genesisBlock0 Original pm genesisHash leaders =
mkGenesisBlock pm (Left genesisHash) 0 leaders
genesisBlock0 (OBFT _) pm genesisHash _leaders =
mkGenesisBlock pm (Left genesisHash) 0 []
-- | To verify a genesis block we only have to check the body proof.
verifyGenesisBlock
:: MonadError Text m
=> GenesisBlock
-> m ()
verifyGenesisBlock GenericBlock {..} =
checkGenesisProof _gbBody (_gbHeader ^. gbhBodyProof)
----------------------------------------------------------------------------
-- MainBlock
----------------------------------------------------------------------------
-- | MainBlock is a block with transactions and MPC messages. It's the
-- main part of our consensus algorithm.
type MainBlock = GenericBlock
MainProof
MainConsensusData
MainExtraHeaderData
MainBody
MainExtraBodyData
instance HasEpochOrSlot MainBlock where
getEpochOrSlot = getEpochOrSlot . _gbHeader
instance HasHeaderHash MainBlock where
headerHash = blockHeaderHash . BlockHeaderMain . _gbHeader
-- | Smart constructor for 'MainBlock'.
mkMainBlock
:: ProtocolMagic
-> BlockVersion
-> SoftwareVersion
-> Either GenesisHash BlockHeader
-> SlotId
-> SecretKey
-> ProxySKBlockInfo
-> MainBody
-> MainBlock
mkMainBlock pm bv sv prevHeader = mkMainBlockExplicit pm bv sv prevHash difficulty
where
prevHash = either getGenesisHash headerHash prevHeader
difficulty = either (const 0) (succ . view difficultyL) prevHeader
-- | Smart constructor for 'MainBlock', without requiring the entire previous
-- 'BlockHeader'. Instead, you give its hash and the difficulty of this block.
-- These are derived from the previous header in 'mkMainBlock' so if you have
-- the previous header, consider using that one.
mkMainBlockExplicit
:: ProtocolMagic
-> BlockVersion
-> SoftwareVersion
-> HeaderHash
-> ChainDifficulty
-> SlotId
-> SecretKey
-> ProxySKBlockInfo
-> MainBody
-> MainBlock
mkMainBlockExplicit pm bv sv prevHash difficulty slotId sk pske body =
GenericBlock
(mkMainHeaderExplicit pm prevHash difficulty slotId sk pske body extraH)
body
extraB
where
extraB :: MainExtraBodyData
extraB = MainExtraBodyData (mkAttributes ())
extraH :: MainExtraHeaderData
extraH =
MainExtraHeaderData
bv
sv
(mkAttributes ())
(hash extraB)
verifyMainBlock
:: MonadError Text m
=> Genesis.Config
-> ConsensusEra
-> TxValidationRules
-> MainBlock
-> m ()
verifyMainBlock genesisConfig era txValRules GenericBlock {..} = do
let pm = configProtocolMagic genesisConfig
verifyMainBlockHeader pm _gbHeader
verifyMainBody pm txValRules _gbBody
-- No need to verify the main extra body data. It's an 'Attributes ()'
-- which is valid whenever it's well-formed.
--
-- Check internal consistency: the body proofs are all correct.
checkMainProof _gbBody (_gbHeader ^. gbhBodyProof)
-- Check that the headers' extra body data hash is correct.
-- This isn't subsumed by the body proof check.
unless (hash (_gbExtra) == (_gbHeader ^. mainHeaderEBDataProof)) $
throwError "Hash of extra body data is not equal to its representation in the header."
-- Ssc and Dlg consistency checks which require the header, and so can't
-- be done in 'verifyMainBody'.
case era of
Original ->
either (throwError . pretty) pure $
verifySscPayload
genesisConfig
(Right (Some _gbHeader))
(_mbSscPayload _gbBody)
OBFT _ -> pass -- We don't perform SSC operations during the OBFT era
----------------------------------------------------------------------------
-- Generic Block Lenses
---------------------------------------------------------------------------
makeLenses ''GenericBlock
-- | Lens from 'GenericBlock' to 'BHeaderHash' of its parent.
gbPrevBlock :: Lens' (GenericBlock a b c d e) HeaderHash
gbPrevBlock = gbHeader . gbhPrevBlock
-- | Lens from 'GenericBlock' to 'BodyProof'.
gbBodyProof :: Lens' (GenericBlock bodyProof b c d e) bodyProof
gbBodyProof = gbHeader . gbhBodyProof
-- | Lens from 'GenericBlock' to 'ConsensusData'.
gbConsensus :: Lens' (GenericBlock a consensus c d e) consensus
gbConsensus = gbHeader . gbhConsensus
instance HasPrevBlock (GenericBlock bodyProof consensus extraH body extraB) where
prevBlockL = gbHeader . gbhPrevBlock
----------------------------------------------------------------------------
-- GenesisBlock lenses
----------------------------------------------------------------------------
-- | Lens from 'GenesisBlock' to 'HeaderHash' of its parent.
genBlockPrevBlock :: Lens' GenesisBlock HeaderHash
genBlockPrevBlock = gbPrevBlock
-- | Lens from 'GenesisBlock' to 'GenesisProof'.
genBlockProof :: Lens' GenesisBlock GenesisProof
genBlockProof = gbHeader . genHeaderProof
-- | Lens from 'GenesisBlock' to 'EpochIndex'.
genBlockEpoch :: Lens' GenesisBlock EpochIndex
genBlockEpoch = gbHeader . genHeaderEpoch
-- | Lens from 'GenesisBlock' to 'ChainDifficulty'.
genBlockDifficulty :: Lens' GenesisBlock ChainDifficulty
genBlockDifficulty = gbHeader . genHeaderDifficulty
-- | Lens from 'GenesisBlock' to 'GenesisHeaderAttributes'.
genBlockHeaderAttributes :: Lens' GenesisBlock GenesisHeaderAttributes
genBlockHeaderAttributes = gbHeader . genHeaderAttributes
-- | Lens from 'GenesisBlock' to 'SlotLeaders'.
genBlockLeaders :: Lens' GenesisBlock SlotLeaders
genBlockLeaders = gbBody . gbLeaders
-- | Lens from 'GenesisBlock' to 'GenesisBodyAttributes'.
genBlockAttributes :: Lens' GenesisBlock GenesisBodyAttributes
genBlockAttributes = gbExtra . gebAttributes
instance HasDifficulty GenesisBlock where
difficultyL = gbHeader . difficultyL
instance HasEpochIndex GenesisBlock where
epochIndexL = gbHeader . gbhConsensus . gcdEpoch
----------------------------------------------------------------------------
-- MainBlock lenses
----------------------------------------------------------------------------
-- | Lens from 'MainBlock' to 'HeaderHash' of its parent.
mainBlockPrevBlock :: Lens' MainBlock HeaderHash
mainBlockPrevBlock = gbPrevBlock
-- | Lens from 'MainBlock' to 'MainProof'.
mainBlockProof :: Lens' MainBlock MainProof
mainBlockProof = gbHeader . mainHeaderProof
-- | Lens from 'MainBlock' to 'SlotId'.
mainBlockSlot :: Lens' MainBlock SlotId
mainBlockSlot = gbHeader . mainHeaderSlot
-- | Lens from 'MainBlock' to 'PublicKey'.
mainBlockLeaderKey :: Lens' MainBlock PublicKey
mainBlockLeaderKey = gbHeader . mainHeaderLeaderKey
-- | Lens from 'MainBlock' to 'ChainDifficulty'.
mainBlockDifficulty :: Lens' MainBlock ChainDifficulty
mainBlockDifficulty = gbHeader . mainHeaderDifficulty
-- | Lens from 'MainBlock' to 'Signature'.
mainBlockSignature :: Lens' MainBlock BlockSignature
mainBlockSignature = gbHeader . mainHeaderSignature
-- | Lens from 'MainBlock' to 'BlockVersion'.
mainBlockBlockVersion :: Lens' MainBlock BlockVersion
mainBlockBlockVersion = gbHeader . mainHeaderBlockVersion
-- | Lens from 'MainBlock' to 'SoftwareVersion'.
mainBlockSoftwareVersion :: Lens' MainBlock SoftwareVersion
mainBlockSoftwareVersion = gbHeader . mainHeaderSoftwareVersion
-- | Lens from 'MainBlock' to 'BlockHeaderAttributes'.
mainBlockHeaderAttributes :: Lens' MainBlock BlockHeaderAttributes
mainBlockHeaderAttributes = gbHeader . mainHeaderAttributes
-- | Lens from 'MainBlock' to proof (hash) of 'MainExtraBodyData'.
mainBlockEBDataProof :: Lens' MainBlock (Hash MainExtraBodyData)
mainBlockEBDataProof = gbHeader . mainHeaderEBDataProof
-- | Lens from 'MainBlock' to 'TxPayload'.
mainBlockTxPayload :: Lens' MainBlock TxPayload
mainBlockTxPayload = gbBody . mbTxPayload
-- | Lens from 'MainBlock' to 'SscPayload'.
mainBlockSscPayload :: Lens' MainBlock SscPayload
mainBlockSscPayload = gbBody . mbSscPayload
-- | Lens from 'MainBlock' to 'UpdatePayload'.
mainBlockUpdatePayload :: Lens' MainBlock UpdatePayload
mainBlockUpdatePayload = gbBody . mbUpdatePayload
-- | Lens from 'MainBlock' to 'DlgPayload'.
mainBlockDlgPayload :: Lens' MainBlock DlgPayload
mainBlockDlgPayload = gbBody . mbDlgPayload
-- | Lens from 'MainBlock' to 'BlockBodyAttributes'.
mainBlockAttributes :: Lens' MainBlock BlockBodyAttributes
mainBlockAttributes = gbExtra . mebAttributes
instance Buildable MainBlock where
build mainBlock =
bprint
(stext%":\n"%
" "%build%
" transactions ("%int%" items): "%listJson%"\n"%
" "%build%"\n"%
" "%build%"\n"%
" update payload: "%build%"\n"%
" "%build
)
(colorize Magenta "MainBlock")
(mainBlock ^. gbHeader)
(length txs)
txs
(mainBlock ^. mainBlockDlgPayload)
(mainBlock ^. mainBlockSscPayload)
(mainBlock ^. mainBlockSscPayload)
(mainBlock ^. gbExtra)
where
txs = mainBlock ^. gbBody . mbTxs
instance HasDifficulty MainBlock where
difficultyL = mainBlockDifficulty
instance HasEpochIndex MainBlock where
epochIndexL = mainBlockSlot . epochIndexL
instance HasBlockVersion MainBlock where
blockVersionL = mainBlockBlockVersion
instance HasSoftwareVersion MainBlock where
softwareVersionL = mainBlockSoftwareVersion
| input-output-hk/cardano-sl | chain/src/Pos/Chain/Block/Block.hs | apache-2.0 | 18,559 | 0 | 26 | 4,167 | 3,232 | 1,809 | 1,423 | -1 | -1 |
{-# LANGUAGE RebindableSyntax, NoMonomorphismRestriction #-}
import Prelude hiding (Monad(..))
import Control.Effect
import Control.Effect.Cond
import Control.Effect.Maybe
headM x = ifM (x == []) (INothing) (IJust (head x))
foo x y = do x' <- headM x
y' <- headM y
return [x', y'] | dorchard/effect-monad | examples/Maybe.hs | bsd-2-clause | 309 | 2 | 9 | 70 | 122 | 60 | 62 | 9 | 1 |
{-# LANGUAGE GADTs #-}
-- This module is going to need a lot of thought. Needs to be very high level
-- to capture what the code needs to do while not using any paradigm specific
-- constructs such that it can be imported to a generic AST of any paradigm
-- Holding off for now, just using imperative OO AST for now
module Language.Drasil.Code.AST where
import Language.Drasil
type HighLevelCode = [Module]
type Name = String
data Module = Name [Field] [Function] Uses
type Uses = [Module]
data Function = Function Name Visibility [In] Out [Objective]
| MainFunction [Objective]
data In where
In :: (Quantity c) => c -> In
data Out where
Out :: (Quantity c) => c -> Out
data Field where
Field :: (Quantity c) => Visibility -> c -> Field
data Visibility = Public
| Private
data Objective where
Calculation :: EqChunk -> Objective
Call :: Function -> [Expr] -> Objective
GetInput :: (Quantity c) => c -> Objective
PrintOutput :: Output -> Objective
data Output where
Str :: String -> Output
Var :: (Quantity c) => c -> Output
| JacquesCarette/literate-scientific-software | code/drasil-code/Language/Drasil/Code/AST.hs | bsd-2-clause | 1,087 | 0 | 8 | 247 | 259 | 156 | 103 | 25 | 0 |
module Arrays where
import Data.List (transpose)
window :: Int -> [a] -> [[a]]
window n xs | length xs >= n = take n xs : window n (drop 1 xs)
| otherwise = []
verticalWindow :: Int -> [[a]] -> [[a]]
verticalWindow size grid = concatMap transpose rowGroups
where
rowGroups = window size grid
horizontalWindow :: Int -> [[a]] -> [[a]]
horizontalWindow size = concatMap (window size)
forwardDiagWindow :: Int -> [[a]] -> [[a]]
forwardDiagWindow size grid = concatMap (transpose . skew) rowGroups
where
rowGroups = window size grid
skew = map (\(off,row) -> (take (length row - size + 1) . drop off) row) . zip [size-1,size-2..0]
backwardDiagWindow :: Int -> [[a]] -> [[a]]
backwardDiagWindow size grid = concatMap (transpose . skew) rowGroups
where
rowGroups = window size grid
skew = map (\(off,row) -> (take (length row - size + 1) . drop off) row) . zip [0..size-1]
| tyehle/euler-haskell | src/Arrays.hs | bsd-3-clause | 916 | 0 | 18 | 199 | 447 | 236 | 211 | 18 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Database.Oracle.Enumerator
main :: IO ()
main = loop 0 $ connect "cpf" "cpf" "oracle/ORA11G"
where loop i dbh = do
(_, newdbh) <- withContinuedSession dbh $ do
execDDL qCreate
_ <- execDML qInsert
execDDL qDrop
commit
print i
loop (i+1) newdbh
qCreate = sql "create global temporary table callRows (id NUMBER) on commit delete rows"
qInsert = cmdbind "insert into callrows (id) select ? from dual" [bindP (1::Int)]
--qInsert = sql "insert into callrows (id) select 1 from dual" -- using this version there is no space leak
qDrop = sql "drop table callrows"
| paulrzcz/takusen-oracle | Example/Main.hs | bsd-3-clause | 788 | 0 | 14 | 241 | 158 | 78 | 80 | 17 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main
( main
) where
import qualified Drive as V
import Data.Monoid ((<>))
import Drive ((>--->))
import Drive.Describe
import Drive.File
program :: FileP ()
program
= write "some content"
main :: IO ()
main = do
describe program
run program
where
describe
= fileToDescribeI >---> execDescribe
run
= withFile "write-example.txt"
. (V.identityI >---> execFile)
fileToDescribeI :: FileF a -> DescribeP a
fileToDescribeI (WriteFile t a) = a <$ debug ("writing to file \"" <> t <> "\"")
| palf/free-driver | packages/drive-file/src/Main.hs | bsd-3-clause | 634 | 0 | 10 | 192 | 174 | 95 | 79 | 22 | 1 |
{-# LANGUAGE NamedFieldPuns, OverloadedStrings #-}
module MusicPad.Env.Version where
import Prelude ()
import qualified Prelude as P
import MPS.Env
_song_version, _state_version :: Int
_song_version = 1
_state_version = 1 | nfjinjing/level5 | src/MusicPad/Env/Version.hs | bsd-3-clause | 224 | 0 | 4 | 30 | 42 | 29 | 13 | 8 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Lib (findInsertLength, mergeReads, revComp, qualToChar, charToQual, seqMatch)
import qualified Data.ByteString.Char8 as B
import Test.Tasty (defaultMain, TestTree, testGroup)
import Test.Tasty.HUnit (Assertion, assertEqual, testCase)
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Tests" [ testCase "testing revComp" assertCorrectRevComp,
testCase "testing searchEndDistance" checkFindInsertLength,
testCase "testing mergeReads" assertCorrectMergeReads,
testCase "test seqMatch" testSeqMatch]
assertCorrectRevComp :: Assertion
assertCorrectRevComp = do
assertEqual "revComp" "AAGGT" (revComp "ACCTT")
assertEqual "revComp" "AANGT" (revComp "ACNTT")
checkFindInsertLength :: Assertion
checkFindInsertLength = do
assertEqual "searchReadEndDist" (Just 6) (findInsertLength "ACCTGCCTGC" "GCAGGTAATC" 0.0 4 5)
assertEqual "searchReadEndDist" (Just 8) (findInsertLength "ACCTGCCTGC" "AGGCAGGTCC" 0.0 4 5)
assertEqual "searchReadEndDist" (Just 10) (findInsertLength "ACCTGCCTGC" "GCAGGCAGGT" 0.0 8 5)
assertEqual "searchReadEndDist" (Just 13) (findInsertLength "ACCTGCCTGC" "TTTGCAGGCA" 0.0 4 5)
assertEqual "searchReadEndDist" Nothing (findInsertLength "ACCTGCCTGC" "TTGGTTGGCC" 0.0 4 5)
assertCorrectMergeReads :: Assertion
assertCorrectMergeReads = do
let seq1 = "ACCTGCCTGC"
seq2 = "GCTGGTAATC"
qual1 = replicate 10 40
qual2 = replicate 10 30
(mergedSeq, mergedQual) = mergeReads seq1 (qualToText qual1) seq2 (qualToText qual2) 6
assertEqual "mergedSeq" "ACCTGC" mergedSeq
assertEqual "mergedQual" [40, 40, 40, 1, 40, 40] (textToQual mergedQual)
where
qualToText = B.pack . map qualToChar
textToQual = map charToQual . B.unpack
testSeqMatch :: Assertion
testSeqMatch = do
assertEqual "seqMatch" False (seqMatch "AACCT" "ACCCT" 0.05 False)
assertEqual "seqMatch" True (seqMatch "AACCT" "ACCCT" 0.3 False)
| stschiff/mergeAndClipFastq | test/Spec.hs | bsd-3-clause | 2,059 | 0 | 12 | 400 | 554 | 278 | 276 | 38 | 1 |
module TestEvaluation
(evaluationTests)
where
import Parsing(readExpr)
import Evaluation
import Data (LispVal(..), IOThrowsError)
import Test.Hspec
import Test.QuickCheck
import Test.QuickCheck.Monadic(monadicIO, assert, pick, run)
import TestArbitraryData()
import Control.Monad.Except(runExceptT, liftIO)
evaluationTests :: SpecWith ()
evaluationTests = describe "Evaluation Tests" $ do
basicEval
arithmeticTest
testFunctionAndClosures
(⊢) :: LispVal -> LispVal -> IO Bool
toEval ⊢ value = ((==) <$> runExceptT evalValue) <*> (return . Right $ value)
where evalValue = evalEmptyEnv toEval
shouldEvaluateTo :: String -> LispVal -> Expectation
shouldEvaluateTo action result =
runExceptT evalued `shouldReturn` Right result
where evalued = readAndEval action
readAndEval expr = liftThrows (readExpr expr) >>= evalEmptyEnv
evalEmptyEnv :: LispVal -> IOThrowsError LispVal
evalEmptyEnv val = do
env <- liftIO primitiveEnv
eval env val
propertyM :: (Arbitrary a, Show a) => (a-> IO Bool) -> Property
propertyM prop = monadicIO $ do
testdata <- pick arbitrary
result <- run $ prop testdata
assert result
basicEval :: Spec
basicEval = describe "Basic eval tests" $ do
it "eval numbers to themselves " $ propertyM numbersSelfEval
it "eval strings to themselves " $ propertyM stringsSelfEval
it "eval booleans to themselves " $ propertyM boolsSelfEval
it "eval quoted expr to expr itself" $ propertyM quotedExprToUnQuoted
numbersSelfEval :: Integer -> IO Bool
numbersSelfEval n =
number ⊢ number
where number = Number n
stringsSelfEval :: String -> IO Bool
stringsSelfEval s =
string ⊢ string
where string = String s
boolsSelfEval :: Bool -> IO Bool
boolsSelfEval b =
bool ⊢ bool
where bool = Bool b
quotedExprToUnQuoted :: LispVal -> IO Bool
quotedExprToUnQuoted expr =
List [Atom "quote", expr] ⊢ expr
arithmeticTest :: Spec
arithmeticTest = describe "Evaluates complex expressions:" $
it "correctly evals: (+ 2 4 (- 5 3) (* 1 3)) -> 11" $
"(+ 2 4 (- 5 3) (* 1 3))" `shouldEvaluateTo` Number 11
testFunctionAndClosures :: Spec
testFunctionAndClosures = describe "Declaring variables, functions and closures" $ do
it "can declare a var" $
"(begin (define var 45) var)" `shouldEvaluateTo` Number 45
it "can declare a function (define (factorial x) (...)) (factorial 10)" $
"(begin (define (factorial x) (if (= x 1) 1 (* x (factorial (- x 1))))) (factorial 10))" `shouldEvaluateTo` Number 3628800
it "can declare a closure (counter example" $
("(begin (define (counter inc) (lambda (x) (set! inc (+ x inc)) inc))" ++
" (define my-count (counter 5))" ++
" (my-count 3) (my-count 6) (my-count 5))") `shouldEvaluateTo` Number 19
| davideGiovannini/scheme-repl | test/TestEvaluation.hs | bsd-3-clause | 2,818 | 0 | 12 | 578 | 687 | 347 | 340 | 67 | 1 |
{-# language CPP #-}
-- | = Name
--
-- VK_KHR_multiview - device extension
--
-- == VK_KHR_multiview
--
-- [__Name String__]
-- @VK_KHR_multiview@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 54
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_get_physical_device_properties2@
--
-- [__Deprecation state__]
--
-- - /Promoted/ to
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#versions-1.1-promotions Vulkan 1.1>
--
-- [__Contact__]
--
-- - Jeff Bolz
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_KHR_multiview] @jeffbolznv%0A<<Here describe the issue or question you have about the VK_KHR_multiview extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2016-10-28
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Interactions and External Dependencies__]
--
-- - Promoted to Vulkan 1.1 Core
--
-- - This extension requires
-- <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/KHR/SPV_KHR_multiview.html SPV_KHR_multiview>
--
-- - This extension provides API support for
-- <https://github.com/KhronosGroup/GLSL/blob/master/extensions/ext/GL_EXT_multiview.txt GL_EXT_multiview>
--
-- [__Contributors__]
--
-- - Jeff Bolz, NVIDIA
--
-- == Description
--
-- This extension has the same goal as the OpenGL ES @GL_OVR_multiview@
-- extension. Multiview is a rendering technique originally designed for VR
-- where it is more efficient to record a single set of commands to be
-- executed with slightly different behavior for each “view”.
--
-- It includes a concise way to declare a render pass with multiple views,
-- and gives implementations freedom to render the views in the most
-- efficient way possible. This is done with a multiview configuration
-- specified during
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#renderpass render pass>
-- creation with the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_multiview.RenderPassMultiviewCreateInfo'
-- passed into 'Vulkan.Core10.Pass.RenderPassCreateInfo'::@pNext@.
--
-- This extension enables the use of the
-- <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/KHR/SPV_KHR_multiview.html SPV_KHR_multiview>
-- shader extension, which adds a new @ViewIndex@ built-in type that allows
-- shaders to control what to do for each view. If using GLSL there is also
-- the
-- <https://github.com/KhronosGroup/GLSL/blob/master/extensions/ext/GL_EXT_multiview.txt GL_EXT_multiview>
-- extension that introduces a @highp int gl_ViewIndex;@ built-in variable
-- for vertex, tessellation, geometry, and fragment shaders.
--
-- == Promotion to Vulkan 1.1
--
-- All functionality in this extension is included in core Vulkan 1.1, with
-- the KHR suffix omitted. The original type, enum and command names are
-- still available as aliases of the core functionality.
--
-- == New Structures
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2',
-- 'Vulkan.Core10.Device.DeviceCreateInfo':
--
-- - 'PhysicalDeviceMultiviewFeaturesKHR'
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceProperties2':
--
-- - 'PhysicalDeviceMultiviewPropertiesKHR'
--
-- - Extending 'Vulkan.Core10.Pass.RenderPassCreateInfo':
--
-- - 'RenderPassMultiviewCreateInfoKHR'
--
-- == New Enum Constants
--
-- - 'KHR_MULTIVIEW_EXTENSION_NAME'
--
-- - 'KHR_MULTIVIEW_SPEC_VERSION'
--
-- - Extending
-- 'Vulkan.Core10.Enums.DependencyFlagBits.DependencyFlagBits':
--
-- - 'DEPENDENCY_VIEW_LOCAL_BIT_KHR'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR'
--
-- - 'STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR'
--
-- - 'STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR'
--
-- == New Built-In Variables
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-viewindex ViewIndex>
--
-- == New SPIR-V Capabilities
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#spirvenv-capabilities-table-MultiView MultiView>
--
-- == Version History
--
-- - Revision 1, 2016-10-28 (Jeff Bolz)
--
-- - Internal revisions
--
-- == See Also
--
-- 'PhysicalDeviceMultiviewFeaturesKHR',
-- 'PhysicalDeviceMultiviewPropertiesKHR',
-- 'RenderPassMultiviewCreateInfoKHR'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_KHR_multiview Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_multiview ( pattern STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR
, pattern STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR
, pattern STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR
, pattern DEPENDENCY_VIEW_LOCAL_BIT_KHR
, PhysicalDeviceMultiviewFeaturesKHR
, PhysicalDeviceMultiviewPropertiesKHR
, RenderPassMultiviewCreateInfoKHR
, KHR_MULTIVIEW_SPEC_VERSION
, pattern KHR_MULTIVIEW_SPEC_VERSION
, KHR_MULTIVIEW_EXTENSION_NAME
, pattern KHR_MULTIVIEW_EXTENSION_NAME
) where
import Data.String (IsString)
import Vulkan.Core11.Promoted_From_VK_KHR_multiview (PhysicalDeviceMultiviewFeatures)
import Vulkan.Core11.Promoted_From_VK_KHR_multiview (PhysicalDeviceMultiviewProperties)
import Vulkan.Core11.Promoted_From_VK_KHR_multiview (RenderPassMultiviewCreateInfo)
import Vulkan.Core10.Enums.DependencyFlagBits (DependencyFlags)
import Vulkan.Core10.Enums.DependencyFlagBits (DependencyFlagBits(DEPENDENCY_VIEW_LOCAL_BIT))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO))
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR"
pattern STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR"
pattern STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR"
pattern STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES
-- No documentation found for TopLevel "VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR"
pattern DEPENDENCY_VIEW_LOCAL_BIT_KHR = DEPENDENCY_VIEW_LOCAL_BIT
-- No documentation found for TopLevel "VkPhysicalDeviceMultiviewFeaturesKHR"
type PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures
-- No documentation found for TopLevel "VkPhysicalDeviceMultiviewPropertiesKHR"
type PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties
-- No documentation found for TopLevel "VkRenderPassMultiviewCreateInfoKHR"
type RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo
type KHR_MULTIVIEW_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_KHR_MULTIVIEW_SPEC_VERSION"
pattern KHR_MULTIVIEW_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_MULTIVIEW_SPEC_VERSION = 1
type KHR_MULTIVIEW_EXTENSION_NAME = "VK_KHR_multiview"
-- No documentation found for TopLevel "VK_KHR_MULTIVIEW_EXTENSION_NAME"
pattern KHR_MULTIVIEW_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_MULTIVIEW_EXTENSION_NAME = "VK_KHR_multiview"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_KHR_multiview.hs | bsd-3-clause | 8,600 | 0 | 8 | 1,411 | 457 | 342 | 115 | -1 | -1 |
module TestPrinter where
import Test.HUnit
import Text.PrettyPrint.ANSI.Leijen
import Parser
-- Parse a string input into an AST then parse the printing of that AST
-- and check it is equal to the original AST
parsePrintParse :: String -> Assertion
parsePrintParse input =
case parseExpr input of
Left err -> assertFailure $ show err
Right expr1 ->
case parseExpr $ displayS (renderCompact $ pretty expr1) "" of
Left err -> assertFailure $ show err
Right expr2 -> expr1 @?= expr2
intPrint :: Assertion
intPrint = parsePrintParse "1"
unitPrint :: Assertion
unitPrint = parsePrintParse "<>"
funcPrint :: Assertion
funcPrint = parsePrintParse "func [a b] (x:int, y:unit) . x"
| phillipm/mlish-to-llvm | test/TestPrinter.hs | bsd-3-clause | 712 | 0 | 13 | 145 | 161 | 83 | 78 | 18 | 3 |
module Data.SimPOL.Time where
class Zero a where
zero :: a
class Discrete a where
advance :: a -> a
data Timeline = Timeline
{ index :: Int
, events :: [String]
, next :: Maybe Timeline
}
instance Zero Timeline where
zero = Timeline { index = 0
, events = []
, next = Nothing
}
instance Discrete Timeline where
advance t = maybe infinity id (next t)
where infinity = zero { index = succ (index t) }
type Time = Timeline
instance Show Timeline where
show = show . index
instance Eq Timeline where
s == t = index s == index t
instance Ord Timeline where
compare s t = compare (index s) (index t)
instance Num Timeline where
s + t = Timeline { index = index s + index t
, events = []
, next = Nothing }
s - t = Timeline { index = index s - index t
, events = []
, next = Nothing }
s * t = Timeline { index = index s * index t
, events = []
, next = Nothing }
abs t = Timeline { index = abs (index t)
, events = []
, next = Nothing }
signum t = Timeline { index = signum (index t)
, events = []
, next = Nothing }
fromInteger i = Timeline { index = fromInteger i
, events = []
, next = Nothing }
-- vim: ft=haskell:sts=2:sw=2:et:nu:ai
| ZjMNZHgG5jMXw/privacy-option-simpol | Data/SimPOL/Time.hs | bsd-3-clause | 1,631 | 0 | 12 | 748 | 496 | 269 | 227 | 42 | 0 |
------------------------------------------------------------------------------
--- Knowledge-based Autonomic Heterogeneous Networks (KAHN)
--- @author Rajesh Krishnan, Cosocket LLC
--- @version September 2014
------------------------------------------------------------------------------
{-
Copyright (c) 2014, Cosocket LLC
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Cosocket LLC nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Main where
import System.Environment (getArgs)
import System.Exit (exitSuccess,exitFailure)
import System.IO
import Data.Char
data MedhaSpec = MedhaSpec [Material] [Factdef] [CurryExt] deriving (Read,Show)
type Material = (String,[Int],Int) -- "pred typ ..", [keyidx], timeout
type Factdef = String -- "pred typ .."
type CurryExt = String -- extra library imports to be included
main :: IO ()
main = do
args <- getArgs
if ((length args) == 2)
then putStrLn ("Processing: " ++ (args!!0) ++ " to: " ++ (args!!1))
else (putStrLn "Usage: medhavi <inspec> <outfile>" >> exitSuccess)
inspec <- readFile (args!!0)
outfl <- openFile (args!!1) WriteMode
case readMaybeMedhaSpec inspec of
(Just ((MedhaSpec m f c),t)) -> hPutStrLn outfl $ wrLic ++ wrModIm ++
wrLns "" c ++ "\n" ++ wrMtl m ++ wrFct f ++ t ++ "\n" ++ wrFxd
Nothing -> (putStrLn "Error parsing input" >> exitFailure)
hClose outfl
readMaybeMedhaSpec :: String -> Maybe (MedhaSpec,String)
readMaybeMedhaSpec s = case reads s of
[(term,tail)] -> (Just (term,tail))
[] -> Nothing
_ -> Nothing
catStr :: [String] -> String
catStr = foldl (++) ""
wrLns :: String -> [String] -> String
wrLns p x = catStr (map (\y -> p ++ y ++ "\n") x)
replace :: a -> Int -> [a] -> [a]
replace _ _ [] = []
replace x p (y:ys) | p==0 = x:ys
| otherwise = y:(replace x (p-1) ys)
ctup :: [String] -> String
ctup (x:xs) = "(" ++ z "" "," (x:xs) ++ ")"
ctup [] = ""
z :: String -> String -> [String] -> String
z q s (x:xs) = foldl (++) (q ++ x ++ q) (map (\y -> s ++ q ++ y ++ q) xs)
z _ _ [] = ""
wrMtl :: [Material] -> String
wrMtl x =
"share [mkPersist sqlSettings, mkMigrate \"migrateAll\"] [persistLowerCase|\n" ++
catStr (map wrEnt x) ++
"|]\n\n" ++
catStr (map wrOneM x)
wrEnt :: Material -> String
wrEnt (sig,keys,timeout) = let (prdn:ftyps) = words sig in
prdn ++ "\n" ++
catStr (map (\(fi,ft) -> " f" ++ show fi ++ " " ++ ft ++ "\n") (zip [0 .. (length ftyps) - 1] ftyps)) ++
" UniqKey" ++ prdn ++ catStr (map (\x -> " f" ++ show x) keys) ++ "\n" ++
" deriving Show\n"
wrOneM :: Material -> String
wrOneM (sig,keys,timeout) =
wrT2U ++ wrT2S ++ wrE2T ++ wrAll ++ wrUpd ++ wrExp ++ wrDel ++ wrSel
where
(prdn:ftyps) = words sig
lprdn = (\(x:xs) -> ((toLower x):xs)) prdn
fnams = map (\x -> "f" ++ show x) [0 .. (length ftyps) - 1]
fkeys = map (\x -> "f" ++ show x) keys
ksub = keysub keys (replicate (length fnams) "_")
where keysub (x:xs) y = keysub xs (replace (fnams!!x) x y)
keysub [] y = y
wrT2U = "tpl2UniqKey" ++ prdn ++ " :: " ++ ctup ftyps ++ " -> Unique " ++ prdn ++ "\n" ++
"tpl2UniqKey" ++ prdn ++ " " ++ ctup ksub ++ " = UniqKey" ++ prdn ++ " " ++ (z "" " " fkeys) ++ "\n\n"
wrT2S = "tpl2" ++ prdn ++ " :: " ++ ctup ftyps ++ " -> " ++ prdn ++ "\n" ++
"tpl2" ++ prdn ++ " " ++ ctup fnams ++ " = " ++ prdn ++ " " ++ (z "" " " fnams) ++ "\n\n"
wrE2T = "ent" ++ prdn ++ "2Tpl :: Entity " ++ prdn ++ " -> " ++ ctup ftyps ++ "\n" ++
"ent" ++ prdn ++ "2Tpl = " ++
"(\\(" ++ prdn ++ " " ++ (z "" " " fnams) ++ ") -> " ++ ctup fnams ++ ") . entityVal" ++ "\n\n"
wrAll = lprdn ++ "All :: Sqlite.Connection -> IO [" ++ ctup ftyps ++ "]\n" ++
lprdn ++ "All x = " ++ lprdn ++ "Exp x" ++
" >> (runSqlite1 x $ selectList ([] :: [Filter " ++ prdn ++ "]) [])" ++
" >>= return . map ent" ++ prdn ++ "2Tpl" ++ "\n\n"
wrUpd = lprdn ++ "Upd :: Sqlite.Connection -> " ++ ctup ftyps ++ " -> IO ()" ++ "\n" ++
lprdn ++ "Upd x t = do" ++ "\n" ++
" " ++ lprdn ++ "Exp x" ++ "\n" ++
" " ++ "tm <- now" ++ "\n" ++
" " ++ "runSqlite1 x $ do" ++ "\n" ++
" " ++ "deleteBy $ tpl2UniqKey" ++ prdn ++ " t" ++ "\n" ++
" " ++ "insert $ tpl2" ++ prdn ++
" ((\\" ++ ctup fnams ++ " -> " ++ ctup (replace "tm" ((length fnams) -1) fnams) ++ ") t)" ++ "\n" ++
" " ++ "return ()" ++ "\n\n"
wrExp = lprdn ++ "Exp :: Sqlite.Connection -> IO ()" ++ "\n" ++
lprdn ++ "Exp x = now >>= \\t -> runSqlite1 x $ " ++
"deleteWhere [" ++ prdn ++ "F" ++ show ((length ftyps) - 1) ++ " <. (t - " ++ show timeout ++ ")]" ++ "\n\n"
wrDel = lprdn ++ "Del :: Sqlite.Connection -> [Filter " ++ prdn ++ "] -> IO ()" ++ "\n" ++
lprdn ++ "Del x f = runSqlite1 x $ deleteWhere f" ++ "\n\n"
wrSel = lprdn ++ "Sel :: Sqlite.Connection -> [Filter " ++ prdn ++ "] -> IO [" ++ ctup ftyps ++ "]" ++ "\n" ++
lprdn ++ "Sel x f = " ++ lprdn ++ "Exp x" ++
" >> (runSqlite1 x $ selectList f []) >>= return . map ent" ++ prdn ++ "2Tpl" ++ "\n\n"
wrFct :: [Factdef] -> String
wrFct x =
"data Payload = Nop ()\n" ++
catStr (map (\y -> " | " ++ y ++ "\n") x) ++
" deriving (Read,Show)\n\n"
wrFxd =
"initQryFun :: (OCmd->IO(ICmd),OCmd->IO(),ICmd,(ICmd->IO())->IO(Handle,ICmd->IO()))->IO(Handle,ICmd->IO())\n" ++
"initQryFun (c,s,i,q) = do\n" ++
" x <- rng\n" ++
" d <- Sqlite.open \":memory:\"\n"++
" runSqlite1 d $ runMigrationSilent migrateAll\n" ++
" eca c s (nxtRndInt x) d i \n" ++
" q (eca c s (nxtRndInt x) d)\n" ++
"\n" ++
"eca :: (OCmd -> IO (ICmd)) -> (OCmd -> IO ()) -> (IO (Int)) -> Sqlite.Connection -> ICmd -> IO ()\n" ++
"eca c s r d i = case i of\n" ++
" INIT p -> utilHandler c s r d i \"\" p\n" ++
" TEVT f p -> utilHandler c s r d i f p\n" ++
" ITAG p -> utilHandler c s r d i \"\" p\n" ++
" IPKT f p -> utilHandler c s r d i f p\n" ++
" _ -> hPrint stderr $ \"unknown: \" ++ show i \n" ++
"\n" ++
"utilHandler :: (OCmd -> IO (ICmd)) -> (OCmd -> IO ()) -> (IO (Int)) -> Sqlite.Connection -> ICmd -> String -> String -> IO ()\n" ++
"utilHandler c s r d i f p = case readMaybePayload p of\n" ++
" Just k -> handler c s r d f k\n" ++
" Nothing -> hPrint stderr $ \"unknown: \" ++ show i\n" ++
"\n" ++
"readMaybePayload :: String -> Maybe Payload\n" ++
"readMaybePayload s = case reads s of\n" ++
" [(term,tail)] -> if all isSpace tail then (Just term) else Nothing\n" ++
" [] -> Nothing\n" ++
" _ -> Nothing\n" ++
"\n" ++
"now :: IO (Int)\n" ++
"now = getClockTime >>= return . \\(TOD x y) -> (fromInteger x)\n" ++
"\n" ++
"unhandled :: (OCmd -> IO (ICmd)) -> (OCmd -> IO ()) -> (IO (Int)) -> Sqlite.Connection -> String -> Payload -> IO ()\n" ++
"unhandled _ _ _ _ f k = hPrint stderr $ \"unknown: \" ++ show f ++ \" \" ++ show k\n" ++
"\n" ++
"periodic :: (OCmd -> IO (ICmd)) -> Int -> Payload -> IO ()\n" ++
"periodic c d b = c (SCHEDL (show b) True d (show b)) >>= \\_ -> return ()\n" ++
"\n" ++
"pkt :: (OCmd -> IO ()) -> Payload -> IO ()\n" ++
"pkt s p = s (OPKT (show p))\n" ++
"\n" ++
"tag :: (OCmd -> IO ()) -> Payload -> IO ()\n" ++
"tag s p = s (OTAG (show p))\n" ++
"\n" ++
"rng :: IO (IORef (StdGen))\n" ++
"rng = now >>= newIORef . mkStdGen \n" ++
"\n" ++
"nxtRndInt :: IORef (StdGen) -> IO (Int)\n" ++
"nxtRndInt r = readIORef r >>= return . next >>= \\(n,g) -> (writeIORef r g >> return n)\n" ++
"\n" ++
"withSqliteConn1 :: (MonadBaseControl IO m, MonadIO m, MonadLogger m) => Sqlite.Connection -> (SqlBackend -> m a) -> m a\n" ++
"withSqliteConn1 = withSqlConn . open1\n" ++
"\n" ++
"open1 :: Sqlite.Connection -> LogFunc -> IO SqlBackend\n" ++
"open1 conn logFunc = wrapConnection conn logFunc >>= \\s -> return s { connClose = return () }\n" ++
"\n" ++
"runSqlite1 x = runResourceT . runNoLoggingT . withSqliteConn1 x . runSqlConn\n" ++
"\n"
wrModIm =
"{-# LANGUAGE EmptyDataDecls #-}\n" ++
"{-# LANGUAGE FlexibleContexts #-}\n" ++
"{-# LANGUAGE GADTs #-}\n" ++
"{-# LANGUAGE GeneralizedNewtypeDeriving #-}\n" ++
"{-# LANGUAGE MultiParamTypeClasses #-}\n" ++
"{-# LANGUAGE OverloadedStrings #-}\n" ++
"{-# LANGUAGE QuasiQuotes #-}\n" ++
"{-# LANGUAGE TemplateHaskell #-}\n" ++
"{-# LANGUAGE TypeFamilies #-}\n" ++
"module InitKB (initQryFun) where\n" ++
"import KBITypes (ICmd(..),OCmd(..))\n" ++
"import System.Random (StdGen,mkStdGen,next)\n" ++
"import System.Time (ClockTime(..),getClockTime)\n" ++
"import Data.IORef (IORef,newIORef,readIORef,writeIORef)\n" ++
"import Data.Char (isSpace)\n" ++
"import System.IO (Handle,hPrint,stderr)\n" ++
"import Control.Monad.IO.Class (MonadIO (..), liftIO)\n" ++
"import Control.Monad.Trans.Control (MonadBaseControl)\n" ++
"import Control.Monad.Trans.Resource (ResourceT, runResourceT)\n" ++
"import Control.Monad.Logger (NoLoggingT, runNoLoggingT, MonadLogger)\n" ++
"import qualified Database.Sqlite as Sqlite\n" ++
"import Database.Persist\n" ++
"import Database.Persist.TH\n" ++
"import Database.Persist.Sqlite\n" ++
"\n"
wrLic =
"------------------------------------------------------------------------------\n" ++
"--- Knowledge-based Autonomic Heterogeneous Networks (KAHN)\n" ++
"--- @author Rajesh Krishnan, Cosocket LLC\n" ++
"--- @version November 2014\n" ++
"------------------------------------------------------------------------------\n" ++
"{-\n" ++
"Copyright (c) 2014, Cosocket LLC\n" ++
"All rights reserved.\n" ++
"\n" ++
"Redistribution and use in source and binary forms, with or without\n" ++
"modification, are permitted provided that the following conditions are met:\n" ++
"\n" ++
"* Redistributions of source code must retain the above copyright notice, this\n" ++
" list of conditions and the following disclaimer.\n" ++
"\n" ++
"* Redistributions in binary form must reproduce the above copyright notice,\n" ++
" this list of conditions and the following disclaimer in the documentation\n" ++
" and/or other materials provided with the distribution.\n" ++
"\n" ++
"* Neither the name of Cosocket LLC nor the names of its\n" ++
" contributors may be used to endorse or promote products derived from\n" ++
" this software without specific prior written permission.\n" ++
"\n" ++
"THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n" ++
"AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n" ++
"IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n" ++
"DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n" ++
"FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n" ++
"DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n" ++
"SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n" ++
"CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n" ++
"OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n" ++
"OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n" ++
"-}\n" ++
"\n"
| Cosocket-LLC/kahn | v2.0/src/MEDHA/Medhavi.hs | bsd-3-clause | 12,999 | 0 | 59 | 3,152 | 2,410 | 1,243 | 1,167 | 215 | 3 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.BuildReports.Types
-- Copyright : (c) Duncan Coutts 2009
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- Types related to build reporting
--
-----------------------------------------------------------------------------
module Distribution.Client.BuildReports.Types (
ReportLevel(..),
) where
import qualified Distribution.Text as Text
( Text(..) )
import qualified Distribution.Compat.ReadP as Parse
( pfail, munch1 )
import qualified Text.PrettyPrint.HughesPJ as Disp
( text )
import Data.Char as Char
( isAlpha, toLower )
data ReportLevel = NoReports | AnonymousReports | DetailedReports
deriving (Eq, Ord, Show)
instance Text.Text ReportLevel where
disp NoReports = Disp.text "none"
disp AnonymousReports = Disp.text "anonymous"
disp DetailedReports = Disp.text "detailed"
parse = do
name <- Parse.munch1 Char.isAlpha
case lowercase name of
"none" -> return NoReports
"anonymous" -> return AnonymousReports
"detailed" -> return DetailedReports
_ -> Parse.pfail
lowercase :: String -> String
lowercase = map Char.toLower
| yihuang/cabal-install | Distribution/Client/BuildReports/Types.hs | bsd-3-clause | 1,322 | 0 | 11 | 276 | 253 | 147 | 106 | 25 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module CommandLine.Helpers where
import Control.Monad.Error
import System.Directory
import System.IO
import qualified Elm.Utils as Utils
yesOrNo :: IO Bool
yesOrNo =
do hFlush stdout
input <- getLine
case input of
"y" -> return True
"n" -> return False
_ -> do putStr "Must type 'y' for yes or 'n' for no: "
yesOrNo
inDir :: (MonadError String m, MonadIO m) => FilePath -> m a -> m a
inDir dir doStuff =
do here <- liftIO $ getCurrentDirectory
liftIO $ createDirectoryIfMissing True dir
liftIO $ setCurrentDirectory dir
result <- doStuff
liftIO $ setCurrentDirectory here
return result
git :: (MonadError String m, MonadIO m) => [String] -> m String
git = run "git"
run :: (MonadError String m, MonadIO m) => String -> [String] -> m String
run = Utils.run
out :: (MonadIO m) => String -> m ()
out string =
liftIO $ hPutStrLn stdout string'
where
string' =
if not (null string) && last string == '\n' then init string else string
| rtfeldman/elm-package | src/CommandLine/Helpers.hs | bsd-3-clause | 1,086 | 0 | 12 | 289 | 360 | 179 | 181 | 32 | 3 |
import NFAe
import Data.Set (Set)
import qualified Data.Set as Set
ne1 = NFAe {
states = Set.fromList [1, 2, 3, 4],
symbols = Set.fromList [0, 1],
delta = delta',
start = 1,
final = Set.fromList [4]}
where delta' 1 (Just 0) = Set.singleton 4;
delta' 1 Nothing = Set.singleton 3;
delta' 3 (Just 0) = Set.singleton 4;
delta' _ _ = Set.empty;
ne2 = makeNFAe "ABCD" "01" delta2 'A' "D"
delta2 'A' (Just '0') = Set.singleton 'D';
delta2 'A' Nothing = Set.singleton 'C';
delta2 'C' (Just '0') = Set.singleton 'D';
delta2 _ _ = Set.empty;
| cameronbwhite/ComputationalStructures | Haskell/nfae_test.hs | bsd-3-clause | 619 | 3 | 8 | 182 | 275 | 142 | 133 | 18 | 4 |
{-|
Module : Servant.Xhr.Path
Description : Definitions for constructing paths from servant types.
Copyright : (c) Alexander Vieth, 2015
Licence : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (GHC only)
-}
{-# LANGUAGE AutoDeriveTypeable #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
module Servant.Xhr.Path where
import GHC.TypeLits
import Data.Proxy
import qualified Data.Text as T
import Servant.API
import Web.HttpApiData
-- | Value-level representation of the capture parts of a path.
-- The type parameter indicates the named, types parts of the path.
data XhrServantPath (path :: [(Symbol, *)]) where
XhrServantPathNil :: XhrServantPath '[]
XhrServantPathCons
:: ( ToHttpApiData t
, KnownSymbol name
)
=> Proxy name
-> t
-> XhrServantPath rest
-> XhrServantPath ( '(name, t) ': rest )
-- | Drop a capture part of a path
type family XhrServantPathDrop (name :: Symbol) (ty :: *) (path :: [(Symbol, *)]) :: [(Symbol, *)] where
XhrServantPathDrop name ty ( '(name, ty) ': rest ) = rest
XhrServantPathDrop name ty ( '(name', ty') ': rest ) = '( name', ty' ) ': XhrServantPathDrop name ty rest
-- | Shows that a given capture part is in a path, with proofs showing that its
-- value can be extracted from an XhrServantPath, and that an XhrServantPath
-- can be shrunk to exclude it.
class
( ToHttpApiData ty
) => InXhrServantPath (name :: Symbol) (ty :: *) (path :: [(Symbol, *)])
where
xhrServantPathGetValue :: Proxy name -> Proxy ty -> XhrServantPath path -> ty
xhrServantPathDrop
:: Proxy name
-> Proxy ty
-> XhrServantPath path
-> XhrServantPath (XhrServantPathDrop name ty path)
instance {-# OVERLAPS #-}
( ToHttpApiData ty
) => InXhrServantPath name ty ( '(name, ty) ': rest )
where
xhrServantPathGetValue _ _ path = case path of
XhrServantPathCons _ t _ -> t
xhrServantPathDrop _ _ path = case path of
XhrServantPathCons _ _ rest -> rest
instance {-# OVERLAPS #-}
( InXhrServantPath name ty rest
, XhrServantPathDrop name ty ( '(name', ty') ': rest )
~ ( '(name', ty') ': XhrServantPathDrop name ty rest )
) => InXhrServantPath name ty ( '(name', ty') ': rest )
where
xhrServantPathGetValue proxyName proxyTy path = case path of
XhrServantPathCons _ _ rest -> xhrServantPathGetValue proxyName proxyTy rest
xhrServantPathDrop proxyName proxyTy path = case path of
XhrServantPathCons proxyName' proxyTy' rest -> XhrServantPathCons proxyName' proxyTy' (xhrServantPathDrop proxyName proxyTy rest)
-- | Use XhrServantPath to make the path string of a servant route.
makeXhrServantPath
:: forall servantRoute path .
( MakeXhrServantPath servantRoute path )
=> Proxy servantRoute
-> XhrServantPath path
-> T.Text
makeXhrServantPath proxyRoute = T.intercalate "/" . makeXhrServantPathParts proxyRoute
-- | Shows that a given path provides all the details for the path of a servant
-- route.
class MakeXhrServantPath servantRoute path where
makeXhrServantPathParts :: Proxy servantRoute -> XhrServantPath path -> [T.Text]
instance {-# OVERLAPS #-}
(
) => MakeXhrServantPath servantRoute '[]
where
makeXhrServantPathParts _ _ = []
instance {-# OVERLAPS #-}
( InXhrServantPath name t (p ': ps)
, MakeXhrServantPath servantRoute (XhrServantPathDrop name t (p ': ps))
) => MakeXhrServantPath ( Capture name t :> servantRoute ) (p ': ps)
where
makeXhrServantPathParts _ path =
toUrlPiece (xhrServantPathGetValue (Proxy :: Proxy name) (Proxy :: Proxy t) path)
: makeXhrServantPathParts (Proxy :: Proxy servantRoute) (xhrServantPathDrop (Proxy :: Proxy name) (Proxy :: Proxy t) path)
instance {-# OVERLAPS #-}
( KnownSymbol name
, MakeXhrServantPath servantRoute '[]
) => MakeXhrServantPath ( name :> servantRoute ) '[]
where
makeXhrServantPathParts _ path
= T.pack (symbolVal (Proxy :: Proxy name))
: makeXhrServantPathParts (Proxy :: Proxy servantRoute) path
instance {-# OVERLAPS #-}
( KnownSymbol name
, MakeXhrServantPath servantRoute (p ': ps)
) => MakeXhrServantPath ( name :> servantRoute ) (p ': ps)
where
makeXhrServantPathParts _ path
= T.pack (symbolVal (Proxy :: Proxy name))
: makeXhrServantPathParts (Proxy :: Proxy servantRoute) path
instance {-# OVERLAPS #-}
( MakeXhrServantPath servantRoute '[]
) => MakeXhrServantPath ( anything :> servantRoute ) '[]
where
makeXhrServantPathParts _ path = makeXhrServantPathParts (Proxy :: Proxy servantRoute) path
instance {-# OVERLAPS #-}
( MakeXhrServantPath servantRoute (p ': ps)
) => MakeXhrServantPath ( anything :> servantRoute ) (p ': ps)
where
makeXhrServantPathParts _ path = makeXhrServantPathParts (Proxy :: Proxy servantRoute) path
-- type Example = Capture "foo" Bool :> "user" :> Capture "bar" Int :> Get '[] ()
--
-- example :: Proxy Example
-- example = Proxy
--
-- exampleD = XhrServantPathCons (Proxy :: Proxy "bar") (42 :: Int)
-- . XhrServantPathCons (Proxy :: Proxy "foo") False
-- $ XhrServantPathNil
--
-- makeXhrRequestPath example exampleD = "false/user/42"
| avieth/servant-xhr | Servant/Xhr/Path.hs | bsd-3-clause | 5,594 | 0 | 13 | 1,209 | 1,291 | 703 | 588 | 96 | 1 |
{-# LANGUAGE RankNTypes #-}
import System.IO
import System.Posix.Types
import Foreign.C.Types (CTime)
import qualified Data.ByteString.Lazy as LBS
import Control.Concurrent
import Control.Exception
import Control.Monad
import Data.Attoparsec.Number
import Data.Aeson
import Network.JsonRpc
import Network
tcpProxy :: RpcProxy a => PortNumber -> String -> a
tcpProxy port = proxy handler
where
handler req = withSocketsDo $ do
h <- connectTo "localhost" $ PortNumber port
hSetBuffering h NoBuffering
LBS.hPutStr h req
LBS.hGetContents h
p :: RpcProxy a => String -> a
p = tcpProxy 8000
instance FromJSON CTime where
parseJSON x = case x of
(Number (I i)) -> return $ fromInteger i
instance ToJSON CTime where
toJSON x = Number $ I $ toInteger $ fromEnum x
echo = p "echo" :: String -> IO String
timestamp = p "timestamp" :: IO EpochTime
random = p "random" :: IO Int
main = do
vars <- forM [1..10] $ \n -> do
finish <- newEmptyMVar
forkIO $ do
echo ("test"++show n) >>= putStrLn . ("echo: "++) .show
timestamp >>= putStrLn . ("timestamp: "++) . show
random >>= putStrLn . ("random: "++) . show
`finally`
putMVar finish ()
return finish
forM_ vars takeMVar
| yihuang/haskell-json-rpc | examples/tcp_client.hs | bsd-3-clause | 1,321 | 2 | 21 | 352 | 460 | 225 | 235 | 40 | 1 |
module Index (IdxState, newIdx, resetIdx, getIdx, getIdxs, runIdx) where
import Verilog
import Data.Map
import Control.Monad
import Control.Monad.State
data V = V { idx :: Map Val Int
, old_idx :: Map Val Int
, count :: Int
, inputs :: Map String Int }
type IdxState = State V
startIdx = V { idx = empty
, old_idx = empty
, count = 2
, inputs = empty }
--fromList [(ValZero, 0), (ValOne, 1)] }
resetIdx :: IdxState ()
resetIdx = do
V m _ c i <- get
put $ V empty m c i
newIdx :: IdxState Int
newIdx = do
V m o c i <- get
put $ V m o (c+1) i
return c
getIdx :: Val -> IdxState Int
getIdx ValZero = return 0
getIdx ValOne = return 1
getIdx (Input n) = do
V m o c i <- get
case Data.Map.lookup n i of
Just r -> return r
Nothing -> do
put $ V m o (c+1) (insert n c i)
return c
getIdx w = do
V m o c i <- get
case Data.Map.lookup w m of
Just r -> return r
Nothing -> do put $ V (insert w c m) o (c+1) i
return c
getOldIdx :: Val -> IdxState Int
getOldIdx w = do
V m o c i <- get
case Data.Map.lookup w o of
Just r -> return r
Nothing -> do put $ V m o (c+1) i
return c
getIdxs :: [Val] -> IdxState [Int]
getIdxs = mapM getIdx
runIdx :: IdxState a -> (a, Map Val Int, Map Val Int, Map String Int)
runIdx ops = (x, m, o, i)
where (x, V m o c i) = flip runState startIdx ops
| wuerges/vlsi_verification | src/Index.hs | bsd-3-clause | 1,463 | 0 | 15 | 487 | 682 | 343 | 339 | 51 | 3 |
-- ----------------------------------------------------------------------------
--
-- Module : Language.CFamily.Constants
-- Copyright : (c) 2007..2008 Duncan Coutts, Benedikt Huber
-- (c) 2016 Mick Nelso
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : ghc
--
-- This module provides support for representing, checking and exporting c
-- constants, i.e. integral, float, character and string constants.
--
-- ----------------------------------------------------------------------------
module Language.CFamily.Constants where
import Language.CFamily.Data.Ident
import Language.CFamily.Data.Name
import Language.CFamily.Data.Position
import Data.Bits
import Data.Char
import Data.Generics hiding (mkIntType)
import Data.List
import Data.Maybe
import Numeric
-- ----------------------------------------------------------------------------
data LitInteger = LitInteger {
litIntValue :: !Integer
, litIntRadix :: !Int
, litIntType :: !LitIntType
}
deriving (Data, Eq, Show)
data LitIntType = LitIntType {
litIntTypeUnsigned :: !Bool
, litIntTypeLong :: !LitIntLongType
}
deriving (Data, Eq, Show)
data LitIntLongType = LitIntNotLong
| LitIntLong
| LitIntLongLong
deriving (Data, Eq, Show)
readLitInteger
:: Bool
-> Int
-> Position
-> String
-> Either String (Either LitInteger LitUserDef)
readLitInteger isUD r@2 pos = rli isUD pos r . drop 2
readLitInteger isUD r@8 pos = rli isUD pos r
readLitInteger isUD r@10 pos = rli isUD pos r
readLitInteger isUD r@16 pos = rli isUD pos r . drop 2
readLitInteger _ r _ = error $ "Constants.readLitInteger: unhandled radix: " ++ show r
rli
:: Bool
-> Position
-> Int
-> String
-> Either String (Either LitInteger LitUserDef)
rli isUD pos radix' str =
case readInt radix' i j str' of
[(n', suffix)] ->
case isUD of
False -> Right $ Left g
True -> Right $ Right $ LitUserInteger g $ mkIdent pos str0 (Name 0) -- MCNFIXME: (Name 0) bogus
where
(_,isU,long',_,cnt)
= foldl f (0,0,LitIntNotLong,0,0) suffix
f :: (Int,Int,LitIntLongType,Int,Int) -> Char -> (Int,Int,LitIntLongType,Int,Int)
f (0,0,lt ,p,n) 'u' = (0,1,lt ,p,n+1)
f (0,0,lt ,p,n) 'U' = (0,1,lt ,p,n+1)
f (0,u,LitIntNotLong,_,n) 'l' = (0,u,LitIntLong ,0,n+1)
f (0,u,LitIntNotLong,_,n) 'L' = (0,u,LitIntLong ,1,n+1)
f (0,u,LitIntLong ,0,n) 'l' = (0,u,LitIntLongLong,0,n+1)
f (0,u,LitIntLong ,1,n) 'L' = (0,u,LitIntLongLong,0,n+1)
f (_,u,l ,p,n) _ = (1,u,l ,p,n )
str0 = reverse $ drop cnt $ reverse suffix
g = mkLitInteger (fromIntegral n') radix' (toEnum isU) long'
parseFailed -> error $ "Bad base-" ++ show radix' ++ " integer literal: " ++ show parseFailed
where
str' = filter (\c -> c /= '\'') str
i x = j x < radix'
j x = fromMaybe mAX_RADIX $ elemIndex x "0123456789abcdef"
mAX_RADIX = 16
mkLitInteger
:: Integer
-> Int
-> Bool
-> LitIntLongType
-> LitInteger
mkLitInteger i radix' isU long' = LitInteger i radix' $ mkIntType isU long'
mkIntType
:: Bool
-> LitIntLongType
-> LitIntType
mkIntType = LitIntType
-- ----------------------------------------------------------------------------
data LitChar = LitChar {
litCharValue :: !Char
, litCharType :: !LitCharType
}
| LitChars {
litCharValues :: ![Char]
, litCharType :: !LitCharType
}
deriving (Data, Show)
data LitCharType = LitCharOrdinary
| LitCharWide
| LitChar16T
| LitChar32T
deriving (Data, Show)
readLitChar
:: Bool
-> Bool
-> LitCharType
-> Position
-> String
-> Either LitChar LitUserDef
readLitChar False False t@LitCharOrdinary _ str = (Left . mkLitChar t . fst . unescapeChar . init . tail) str
readLitChar False True t@LitCharOrdinary _ str = (Left . mkLitChars t . unescapeChars . init . tail) str
readLitChar False False t@LitChar16T _ str = (Left . mkLitChar t . fst . unescapeChar . tail . init . tail) str
readLitChar False False t@LitChar32T _ str = (Left . mkLitChar t . fst . unescapeChar . tail . init . tail) str
readLitChar False False t@LitCharWide _ str = (Left . mkLitChar t . fst . unescapeChar . tail . init . tail) str
readLitChar False True t@LitCharWide _ str = (Left . mkLitChars t . unescapeChars . tail . init . tail) str
readLitChar True False t@LitCharOrdinary pos str = Right $ mkLitUserChar (mkLitChar t $ fst $ unescapeChar $ init $ tail $ stripId str) (mkIdentCharLit pos str)
readLitChar True True t@LitCharOrdinary pos str = Right $ mkLitUserChar (mkLitChars t $ unescapeChars $ init $ tail $ stripId str) (mkIdentCharLit pos str)
readLitChar True False t@LitChar16T pos str = Right $ mkLitUserChar (mkLitChar t $ fst $ unescapeChar $ tail $ init $ tail $ stripId str) (mkIdentCharLit pos str)
readLitChar True False t@LitChar32T pos str = Right $ mkLitUserChar (mkLitChar t $ fst $ unescapeChar $ tail $ init $ tail $ stripId str) (mkIdentCharLit pos str)
readLitChar True False t@LitCharWide pos str = Right $ mkLitUserChar (mkLitChar t $ fst $ unescapeChar $ tail $ init $ tail $ stripId str) (mkIdentCharLit pos str)
readLitChar True True t@LitCharWide pos str = Right $ mkLitUserChar (mkLitChars t $ unescapeChars $ tail $ init $ tail $ stripId str) (mkIdentCharLit pos str)
readLitChar _ _ _ _ _ = error "Constants.readLitChar: case failed"
mkIdentCharLit
:: Position
-> String
-> Ident
mkIdentCharLit pos str = mkIdent pos (reverse $ take (f str) $ reverse str) (Name 0)
where
f = fromJust . elemIndex '\'' . reverse
stripId
:: String
-> String
stripId str = reverse $ drop (posOfQuote str) $ reverse str
posOfQuote
:: String
-> Int
posOfQuote = fromJust . elemIndex '\'' . reverse
unescapeChar
:: String
-> (Char, String)
unescapeChar ('\\':c':cs') =
case c' of
'n' -> ('\n', cs')
't' -> ('\t', cs')
'v' -> ('\v', cs')
'b' -> ('\b', cs')
'r' -> ('\r', cs')
'f' -> ('\f', cs')
'a' -> ('\a', cs')
'\\' -> ('\\', cs')
'?' -> ('?' , cs')
'\'' -> ('\'', cs')
'"' -> ('"' , cs')
'x' ->
case head' "bad escape sequence reading hex" (readHex cs') of
(i, cs'') -> (toEnum i, cs'')
_ ->
case head' "bad escape sequence reading octal" (readOct' (c':cs')) of
(i, cs'') -> (toEnum i, cs'')
unescapeChar ( c':cs') = (c', cs')
unescapeChar [] = error $ "unescape char: empty string"
readOct'
:: ReadS Int
readOct' s = map (\(i, cs) -> (i, cs ++ rest)) (readOct octStr)
where
octStr = takeWhile isOctDigit $ take 3 s
rest = drop (length octStr) s
head'
:: String
-> [a]
-> a
head' err [] = error err
head' _ (x:_) = x
unescapeChars
:: String
-> String
unescapeChars [] = []
unescapeChars cs =
case unescapeChar cs of
(c, cs') -> c : unescapeChars cs'
mkLitChar
:: LitCharType
-> Char
-> LitChar
mkLitChar typ c = LitChar c typ
mkLitChars
:: LitCharType
-> [Char]
-> LitChar
mkLitChars typ s = LitChars s typ
-- ----------------------------------------------------------------------------
data LitFloat = LitFloat {
litFloatValue :: Double
, litFloatType :: LitFloatType
}
deriving (Data, Show)
data LitFloatType = LitFloatFloat
| LitFloatDouble
| LitFloatLongDouble
deriving (Data, Show)
readLitFloat
:: Bool
-> Position
-> String
-> Either LitFloat LitUserDef
readLitFloat ud pos str =
case readFloat str' of
[(n', suffix)] ->
case ud of
True -> Right $ mkLitUserFloat i j
False -> Left i
where
i = mkLitFloat n' $ h suffix
j = mkIdent pos m (Name 0)
m =
case h suffix of
LitFloatDouble -> suffix
_ -> tail suffix
h ('f':_) = LitFloatFloat
h ('F':_) = LitFloatFloat
h ('l':_) = LitFloatLongDouble
h ('L':_) = LitFloatLongDouble
h _ = LitFloatDouble
parseFailed -> error $ "Bad base++ integer literal: " ++ show parseFailed
where
str' =
g $ case isPrefixOf "." str of
True -> f ('0':str)
False -> f str
f x = filter (\c -> c /= '\'') x
g x =
case (last $ fst h) == '.' of -- readFloat won't parse 1.e20
True -> fst h ++ "0" ++ snd h -- without the 0 after the .
False -> x -- so we add it if its not there.
where
h = span (\c -> c /= 'e' && c /= 'E') x
mkLitFloat
:: Double
-> LitFloatType
-> LitFloat
mkLitFloat = LitFloat
-- ----------------------------------------------------------------------------
data LitString = LitString {
litStringValue :: [Char]
, litStringType :: LitStringType
, litStringRaw :: Bool
}
deriving (Data, Eq, Show)
data LitStringType = LitStringOrdinary
| LitStringUtf8
| LitStringChar16T
| LitStringChar32T
| LitStringWide
deriving (Data, Eq, Show)
readLitString
:: Bool
-> Bool
-> Position
-> String
-> Either LitString LitUserDef
readLitString u r@False pos str =
case u of
True -> Right $ mkLitUserString lus $ mkIdent pos (id' str) (Name 0)
False -> Left lus
where
lus = mkLitString (f str) (litStringType' str) r
id' = reverse . takeWhile (/= '\"') . reverse
f = reverse . tail . dropWhile (/= '\"') . reverse . unescapeChars . tail . dropWhile (/= '\"')
readLitString u r@True pos str =
case u of
True -> Right $ mkLitUserString lus $ mkIdent pos (id' str) (Name 0)
False -> Left lus
where
lus = mkLitString str' (litStringType' str) r
id' = reverse . takeWhile (/= '\"') . reverse
f = span (/= '(') . tail . dropWhile (/= '\"')
delimL = fst $ f str
delimR = reverse $ fst $ span (/= ')') $ tail $ dropWhile (/= '\"') $ reverse $ snd $ f str
str' =
case delimL /= delimR of
True -> error "delimiters on raw string don't match"
False -> reverse $ drop ((length delimL) + 1) $ reverse $ tail $ snd $ f str
litStringType'
:: String
-> LitStringType
litStringType' ('u':'8':_) = LitStringUtf8
litStringType' ( 'u':_) = LitStringChar16T
litStringType' ( 'U':_) = LitStringChar32T
litStringType' ( 'L':_) = LitStringWide
litStringType' (_ ) = LitStringOrdinary
mkLitString
:: String
-> LitStringType
-> Bool
-> LitString
mkLitString = LitString
-- ----------------------------------------------------------------------------
data LitUserDef = LitUserInteger {
litUserInt :: LitInteger
, litUserIdent :: !Ident
}
| LitUserFloat {
litUserFloat :: LitFloat
, litUserIdent :: !Ident
}
| LitUserString {
litUserString :: LitString
, litUserIdent :: !Ident
}
| LitUserChar {
litUserChar :: LitChar
, litUserIdent :: !Ident
}
deriving (Show)
mkLitUserFloat
:: LitFloat
-> Ident
-> LitUserDef
mkLitUserFloat = LitUserFloat
mkLitUserString
:: LitString
-> Ident
-> LitUserDef
mkLitUserString = LitUserString
mkLitUserChar
:: LitChar
-> Ident
-> LitUserDef
mkLitUserChar = LitUserChar
-- ----------------------------------------------------------------------------
{-
-- | C char constants (abstract)
data CChar = CChar
!Char
!Bool -- wide flag
| CChars
[Char] -- multi-character character constant
!Bool -- wide flag
deriving (Eq,Ord,Data,Typeable)
instance Show CChar where
showsPrec _ (CChar c wideflag) = _showWideFlag wideflag . showCharConst c
showsPrec _ (CChars cs wideflag) = _showWideFlag wideflag . (sQuote $ concatMap escapeCChar cs)
-- | @showCharConst c@ prepends _a_ String representing the C char constant corresponding to @c@.
-- If necessary uses octal or hexadecimal escape sequences.
showCharConst :: Char -> ShowS
showCharConst c = sQuote $ escapeCChar c
_showWideFlag :: Bool -> ShowS
_showWideFlag flag = if flag then showString "L" else id
-- | get the haskell representation of a char constant
getCChar :: CChar -> [Char]
getCChar (CChar c _) = [c]
getCChar (CChars cs _) = cs
-}
-- | get integer value of a C char constant
-- undefined result for multi-char char constants
getCCharAsInt
:: LitChar -> Integer
getCCharAsInt (LitChar c _) = fromIntegral (fromEnum c)
getCCharAsInt (LitChars _cs _) = error "integer value of multi-character character constants is implementation defined"
{-
-- | return @true@ if the character constant is /wide/.
isWideChar :: CChar -> Bool
isWideChar (CChar _ wideFlag) = wideFlag
isWideChar (CChars _ wideFlag) = wideFlag
-}
-- | construct a character constant from a haskell 'Char'
-- Use 'cchar_w' if you want a wide character constant.
cChar
:: Char
-> LitChar
cChar c = LitChar c LitCharOrdinary
-- | construct a wide chararacter constant
cChar_w
:: Char
-> LitChar
cChar_w c = LitChar c LitCharOrdinary
-- | create a multi-character character constant
cChars
:: [Char]
-> Bool
-> LitChar
cChars cs False = LitChars cs LitCharOrdinary
cChars cs True = LitChars cs LitCharWide
{-
-- | datatype for memorizing the representation of an integer
data CIntRepr = DecRepr | HexRepr | OctalRepr deriving (Eq,Ord,Enum,Bounded,Data,Typeable)
-}
-- | datatype representing type flags for integers
data CIntFlag = FlagUnsigned
| FlagLong
| FlagLongLong
| FlagImag
deriving (Eq,Ord,Enum,Bounded,Data,Typeable)
instance Show CIntFlag where
show FlagUnsigned = "u"
show FlagLong = "L"
show FlagLongLong = "LL"
show FlagImag = "i"
{-
{-# SPECIALIZE setFlag :: CIntFlag -> Flags CIntFlag -> Flags CIntFlag #-}
{-# SPECIALIZE clearFlag :: CIntFlag -> Flags CIntFlag -> Flags CIntFlag #-}
{-# SPECIALIZE testFlag :: CIntFlag -> Flags CIntFlag -> Bool #-}
data CInteger = CInteger
!Integer
!CIntRepr
!(Flags CIntFlag) -- integer flags
deriving (Eq,Ord,Data,Typeable)
instance Show CInteger where
showsPrec _ (CInteger i repr flags) = showInt i . showString (concatMap showIFlag [FlagUnsigned .. ]) where
showIFlag f = if testFlag f flags then show f else []
showInt i' = case repr of DecRepr -> shows i'
OctalRepr -> showString "0" . showOct i'
HexRepr -> showString "0x" . showHex i'
-- To be used in the lexer
-- Note that the flag lexer won't scale
readCInteger :: CIntRepr -> String -> Either String CInteger
readCInteger repr str =
case readNum str of
[(n,suffix)] -> mkCInt n suffix
parseFailed -> Left $ "Bad Integer literal: "++show parseFailed
where
readNum = case repr of DecRepr -> readDec; HexRepr -> readHex; OctalRepr -> readOct
mkCInt n suffix = either Left (Right . CInteger n repr) $ readSuffix suffix
readSuffix = parseFlags noFlags
parseFlags flags [] = Right flags
parseFlags flags ('l':'l':fs) = parseFlags (setFlag FlagLongLong flags) fs
parseFlags flags ('L':'L':fs) = parseFlags (setFlag FlagLongLong flags) fs
parseFlags flags (f:fs) =
let go1 flag = parseFlags (setFlag flag flags) fs in
case f of
'l' -> go1 FlagLong ; 'L' -> go1 FlagLong
'u' -> go1 FlagUnsigned ; 'U' -> go1 FlagUnsigned
'i' -> go1 FlagImag ; 'I' -> go1 FlagImag; 'j' -> go1 FlagImag; 'J' -> go1 FlagImag
_ -> Left $ "Unexpected flag " ++ show f
-}
getCInteger
:: LitInteger
-> Integer
getCInteger (LitInteger i _ _) = i
-- | construct a integer constant (without type flags) from a haskell integer
cInteger
:: Integer
-> LitInteger
cInteger i = LitInteger i 10 $ LitIntType False LitIntNotLong
{-
-- | Floats (represented as strings)
data CFloat = CFloat
!String
deriving (Eq,Ord,Data,Typeable)
instance Show CFloat where
showsPrec _ (CFloat internal) = showString internal
cFloat :: Float -> CFloat
cFloat = CFloat . show
-- dummy implementation
readCFloat :: String -> CFloat
readCFloat = CFloat
-- | C String literals
data CString = CString
[Char] -- characters
Bool -- wide flag
deriving (Eq,Ord,Data,Typeable)
instance Show CString where
showsPrec _ (CString str wideflag) = _showWideFlag wideflag . showStringLit str
-}
-- construction
cString
:: String
-> LitString
cString str = LitString str LitStringOrdinary False
cString_w
:: String
-> LitString
cString_w str = LitString str LitStringWide False
-- selectors
getCString
:: LitString
-> String
getCString (LitString str _ _) = str
isWideString
:: LitString
-> Bool
isWideString (LitString _ LitStringWide _) = True
isWideString (LitString _ _ _) = False
-- | concatenate a list of C string literals
concatCStrings
:: [LitString]
-> LitString
concatCStrings cs = LitString (concatMap getCString cs) (f $ any isWideString cs) False
where
f True = LitStringWide
f False = LitStringOrdinary
{-
-- | @showStringLiteral s@ prepends a String representing the C string literal corresponding to @s@.
-- If necessary it uses octal or hexadecimal escape sequences.
showStringLit :: String -> ShowS
showStringLit = dQuote . concatMap showStringChar
where
showStringChar c | isSChar c = return c
| c == '"' = "\\\""
| otherwise = escapeChar c
-- | @isAsciiSourceChar b@ returns @True@ if the given character is a character which
-- may appear in a ASCII C source file and is printable.
isAsciiSourceChar :: Char -> Bool
isAsciiSourceChar c = isAscii c && isPrint c
-- | @isCChar c@ returns true, if c is a source character which does not have to be escaped in
-- C char constants (C99: 6.4.4.4)
isCChar :: Char -> Bool
isCChar '\\' = False
isCChar '\'' = False
isCChar '\n' = False
isCChar c = isAsciiSourceChar c
-- | @escapeCChar c@ escapes c for use in a char constant
escapeCChar :: Char -> String
escapeCChar '\'' = "\\'"
escapeCChar c | isCChar c = [c]
| otherwise = escapeChar c
-- | @isSChar c@ returns true if c is a source character which does not have to be escaped in C string
-- literals (C99: 6.4.5)
isSChar :: Char -> Bool
isSChar '\\' = False
isSChar '\"' = False
isSChar '\n' = False
isSChar c = isAsciiSourceChar c
showOct' :: Int -> String
showOct' i = replicate (3 - length s) '0' ++ s
where s = showOct i ""
escapeChar :: Char -> String
escapeChar '\\' = "\\\\"
escapeChar '\a' = "\\a"
escapeChar '\b' = "\\b"
escapeChar '\ESC' = "\\e";
escapeChar '\f' = "\\f"
escapeChar '\n' = "\\n"
escapeChar '\r' = "\\r"
escapeChar '\t' = "\\t"
escapeChar '\v' = "\\v"
escapeChar c | (ord c) < 512 = '\\' : showOct' (ord c)
| otherwise = '\\' : 'x' : showHex (ord c) ""
unescapeChar :: String -> (Char, String)
unescapeChar ('\\':c:cs) = case c of
'n' -> ('\n', cs)
't' -> ('\t', cs)
'v' -> ('\v', cs)
'b' -> ('\b', cs)
'r' -> ('\r', cs)
'f' -> ('\f', cs)
'a' -> ('\a', cs)
'e' -> ('\ESC', cs) -- GNU extension
'E' -> ('\ESC', cs) -- GNU extension
'\\' -> ('\\', cs)
'?' -> ('?', cs)
'\'' -> ('\'', cs)
'"' -> ('"', cs)
'x' -> case head' "bad escape sequence" (readHex cs) of
(i, cs') -> (toEnum i, cs')
_ -> case head' "bad escape sequence" (readOct' (c:cs)) of
(i, cs') -> (toEnum i, cs')
unescapeChar (c :cs) = (c, cs)
unescapeChar [] = error $ "unescape char: empty string"
readOct' :: ReadS Int
readOct' s = map (\(i, cs) -> (i, cs ++ rest)) (readOct octStr)
where octStr = takeWhile isOctDigit $ take 3 s
rest = drop (length octStr) s
-}
unescapeString
:: String
-> String
unescapeString [] = []
unescapeString cs =
case unescapeChar cs of
(c, cs') -> c : unescapeString cs'
{-
-- helpers
sQuote :: String -> ShowS
sQuote s t = "'" ++ s ++ "'" ++ t
dQuote :: String -> ShowS
dQuote s t = ('"' : s) ++ "\"" ++ t
head' :: String -> [a] -> a
head' err [] = error err
head' _ (x:_) = x
-}
-- TODO: Move to separate file ?
newtype Flags f = Flags Integer deriving (Eq,Ord,Data,Typeable)
noFlags :: Flags f
noFlags = Flags 0
setFlag :: (Enum f) => f -> Flags f -> Flags f
setFlag flag (Flags k) = Flags$ k `setBit` fromEnum flag
clearFlag :: (Enum f) => f -> Flags f -> Flags f
clearFlag flag (Flags k) = Flags$ k `clearBit` fromEnum flag
testFlag :: (Enum f) => f -> Flags f -> Bool
testFlag flag (Flags k) = k `testBit` fromEnum flag
| micknelso/language-c | src/Language/CFamily/Constants.hs | bsd-3-clause | 22,063 | 0 | 19 | 6,560 | 4,555 | 2,422 | 2,133 | 399 | 13 |
{-# LANGUAGE RecordWildCards #-}
module TokenBufferSpec
( spec
)
where
import TokenBuffer
import Prelude hiding (null)
import Data.Maybe
import qualified Data.Vector as Vector
import Control.Monad
import Test.Hspec
import Test.QuickCheck
newtype T = T (Int, Int)
deriving (Eq, Show)
instance Arbitrary T where
arbitrary = do
NonNegative a <- arbitrary
NonNegative b <- arbitrary
return (T (a, b))
instance Token T where
consumedLength (T t) = fst t
lexerLookAhead (T t) = snd t
newtype C t = C {fromC :: Chunk t}
deriving (Show)
instance Arbitrary t => Arbitrary (C t) where
arbitrary = do
c <- arbitrary
case c of
Left (Positive len) -> do
return (C (Dirty len))
Right (NonEmpty ts) -> return (C (Parsed (Vector.fromList ts)))
newtype C' t = C'{fromC' :: Chunk t}
deriving (Show)
instance Arbitrary t => Arbitrary (C' t) where
arbitrary = do
c <- arbitrary
case c of
Left len -> do
return (C' (Dirty len))
Right ts -> return (C' (Parsed (Vector.fromList ts)))
newtype B t = B (TokenBuffer t)
deriving (Show)
instance (Token t, Arbitrary t) => Arbitrary (B t) where
arbitrary = do
chunks <- map fromC <$> arbitrary
return (B (fromChunks chunks))
newtype NonEmptyB t = NonEmptyB (TokenBuffer t)
deriving (Show)
instance (Token t, Arbitrary t) => Arbitrary (NonEmptyB t) where
arbitrary = do
NonEmpty chunks' <- arbitrary
let chunks = map fromC chunks'
return (NonEmptyB (fromChunks chunks))
newtype S = S Size
deriving (Show)
instance Arbitrary S where
arbitrary = do
NonNegative chars <- arbitrary
NonNegative lookAhead <- arbitrary
dirty <- arbitrary
return (S Size{..})
invariant :: Token t => TokenBuffer t -> Bool
invariant buf = noSuccessiveDirty
&& noSuccessiveSmallParsed
&& noLargeParsed
&& noEmptyChunks
where
noSuccessiveDirty = go False buf
where
go lastDirty b = case viewChunkLeft b of
Nothing -> True
Just (Parsed{}, rest) -> go False rest
Just (Dirty{}, rest)
| lastDirty -> False
| otherwise -> go True rest
noSuccessiveSmallParsed = go Nothing buf
where
go maybe_len b = case viewChunkLeft b of
Nothing -> True
Just (Dirty{}, rest) -> go Nothing rest
Just (Parsed tokens, rest) ->
case maybe_len of
Nothing -> go (Just (Vector.length tokens)) rest
Just len
| len + Vector.length tokens < minChunkLength -> False
| otherwise -> go (Just (Vector.length tokens)) rest
noLargeParsed = go buf
where
go b = case viewChunkLeft b of
Nothing -> True
Just (Dirty{}, rest) -> go rest
Just (Parsed tokens, _)
| Vector.length tokens > maxChunkLength -> False
Just (Parsed _, rest) -> go rest
noEmptyChunks = all good . toChunks $ buf
where
good (Dirty len) | len <= 0 = False
good (Parsed tokens) = not (Vector.null tokens)
good _ = True
invariantT :: TokenBuffer T -> Bool
invariantT = invariant
spec :: Spec
spec = describe "TokenBuffer" $ do
it "should be a Monoid" $ property $ \(B buf1, B buf2, B buf3) ->
buf1 `mappend` (buf2 `mappend` buf3)
== (buf1 `mappend` buf2) `mappend` (buf3 :: TokenBuffer T)
describe "empty" $ do
it "should be null" $ do
shouldSatisfy (empty :: TokenBuffer T) null
it "should satisfy invatiant" $
shouldSatisfy empty invariantT
describe "append" $ do
it "should append all tokens" $ property $ \(tokens1, tokens2) ->
fromList tokens1 `append` fromList tokens2
== fromList (tokens1 ++ tokens2 :: [Either Int T])
it "should preserve invatiant" $ property $ \(B buf1, B buf2) ->
shouldSatisfy (buf1 `append` buf2) invariantT
describe "fromChunks" $ do
it "should enforce invatiant" $ property $ \cs ->
shouldSatisfy (fromChunks . map fromC' $ cs) invariantT
describe "singleton" $ do
it "should enforce invatiant" $ property $ \i ->
shouldSatisfy (singleton i) invariantT
describe "viewChunkLeft" $ do
it "should return Nothing on empty buffer" $
shouldSatisfy (viewChunkLeft (empty :: TokenBuffer T)) isNothing
it "should return the first chunk" $ do
let c1 = Parsed (Vector.fromList [T (1, 2)])
c2 = Dirty 3
buf = fromChunks [c1, c2]
viewChunkLeft buf `shouldBe` Just (c1, fromChunks [c2])
describe "viewChunkRight" $ do
it "should return Nothing on empty buffer" $
shouldSatisfy (viewChunkRight (empty :: TokenBuffer T)) isNothing
it "should return the last chunk" $ do
let c1 = Parsed (Vector.fromList [T (1, 2)])
c2 = Dirty 3
buf = fromChunks [c1, c2]
viewChunkRight buf `shouldBe` Just (fromChunks [c1], c2)
describe "viewLeft" $ do
it "should return Nothing on empty buffer" $
shouldSatisfy (viewLeft (empty :: TokenBuffer T)) isNothing
it "should return the first token" $ property $ \(t, B rest) ->
let buf = singleton (Right t :: Either Int T) `append` rest
in viewLeft buf `shouldBe` Just (Right t, rest)
it "should return the first token" $ property $ \(NonEmptyB buf) ->
let tokens = toList buf
t = head tokens :: Either Int T
rest = tail tokens
in viewLeft buf `shouldBe` Just (t, fromList rest)
describe "viewRight" $ do
it "should return Nothing on empty buffer" $
shouldSatisfy (viewRight (empty :: TokenBuffer T)) isNothing
it "should return the last token" $ property $ \(NonEmptyB buf) ->
let tokens = toList buf
t = last tokens :: Either Int T
rest = init tokens
in viewRight buf `shouldBe` Just (fromList rest, t)
describe "splitBefore" $ do
it "should split buffer" $ property $ \(B buf) ->
let (l, r) = splitBefore ((> 5) . chars) buf
in (l `append` r) == (buf :: TokenBuffer T)
it "should split buffer immediately befor the predicate becomes True"
$ property $ \(NonEmptyB buf) ->
let (l, r) = splitBefore p buf
p = (> 5) . chars
in case viewLeft (r :: TokenBuffer T) of
Nothing ->
shouldSatisfy l (not . p . measure)
Just (t, _) -> do
shouldSatisfy l (not . p . measure)
shouldSatisfy (l `append` singleton t) (p . measure)
it "should preserve invariant" $ property $ \(B buf) ->
shouldSatisfy (splitBefore ((>5) . chars) buf) $ \(l, r) ->
invariantT l && invariantT r
describe "splitAfter" $ do
it "should split buffer" $ property $ \(B buf) ->
let (l, r) = splitAfter ((> 5) . chars) buf
in (l `append` r) == (buf :: TokenBuffer T)
it "should split buffer immediately after the predicate becomes True"
$ property $ \(NonEmptyB buf) -> do
let (l, r) = splitAfter p buf
p = (> 5) . chars
when (not (null r)) $ do
case viewRight (l :: TokenBuffer T) of
Nothing -> return ()
Just (l', _) -> do
shouldSatisfy l (p . measure)
shouldSatisfy l' (not . p . measure)
it "should preserve invariant" $ property $ \(B buf) ->
shouldSatisfy (splitAfter ((>5) . chars) buf) $ \(l, r) ->
invariantT l && invariantT r
describe "Size" $ do
it "should be a Monoid" $ property $ \(S s1, S s2, S s3) ->
s1 `mappend` (s2 `mappend` s3)
== (s1 `mappend` s2) `mappend` s3
| Yuras/tide | spec/TokenBufferSpec.hs | bsd-3-clause | 7,470 | 0 | 26 | 2,080 | 2,858 | 1,423 | 1,435 | 189 | 11 |
-- http://stackoverflow.com/questions/6889715/extending-a-datatype-in-haskell
-- {-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- {-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ExistentialQuantification #-}
-- {-# OPTIONS_GHC -Wall #-}
-- {-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- {-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE CPP #-}
-- |
-- Copyright : (c) Andreas Reuleaux 2015
-- License : BSD2
-- Maintainer: Andreas Reuleaux <[email protected]>
-- Stability : experimental
-- Portability: non-portable
--
-- This module provides Pire's syntax,
-- ie. Pire's flavour of Pi-forall syntax
module Pire.Syntax.Binder where
import Pire.Syntax.Ws
import Pire.Syntax.Token
#ifdef MIN_VERSION_GLASGOW_HASKELL
#if MIN_VERSION_GLASGOW_HASKELL(7,10,3,0)
-- ghc >= 7.10.3
#else
-- older ghc versions, but MIN_VERSION_GLASGOW_HASKELL defined
#endif
#else
-- MIN_VERSION_GLASGOW_HASKELL not even defined yet (ghc <= 7.8.x)
import Data.Foldable
import Data.Traversable
-- import Control.Applicative
#endif
import Control.Lens
-- import Control.Lens.TH (makeLenses)
import Pire.Syntax.GetNm
import Pire.Syntax.MkVisible
import Pire.Syntax.Wildcard
data Binder t =
Binder t
| Binder_ t (Ws t)
| BinderInBrackets (Token 'BracketOpenTy t) (Binder t) (Token 'BracketCloseTy t)
| InvisibleBinder Integer
-- deriving (Show, Eq, Ord, Read, Functor, Foldable, Traversable)
deriving (Show, Eq, Ord, Functor, Foldable, Traversable)
makeLenses ''Binder
instance MkVisible t => GetNm (Binder t) t where
name' (Binder n) = n
name' (Binder_ n _) = n
name' (BinderInBrackets _ bndr _) = name' bndr
name' (InvisibleBinder i) = mkVisible wildcardName i
| reuleaux/pire | src/Pire/Syntax/Binder.hs | bsd-3-clause | 2,071 | 0 | 9 | 321 | 270 | 161 | 109 | 34 | 0 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Control.Lens
import Control.Monad (void)
import qualified Data.ByteString.Char8 as BS
import qualified Data.Conduit.List as CL
import Data.Default (def)
import qualified Data.Text as T
import Qi (withConfig)
import Qi.Config.AWS.Lambda (LambdaMemorySize (..),
lpMemorySize, lpTimeoutSeconds)
import Qi.Config.AWS.S3 (S3Event, s3Object, s3eObject,
s3oBucketId, s3oKey)
import Qi.Config.Identifier (S3BucketId)
import Qi.Program.Config.Interface (ConfigProgram, s3Bucket,
s3BucketLambda)
import Qi.Program.Lambda.Interface (S3LambdaProgram, say,
streamFromS3Object)
import Qi.Util (success)
main :: IO ()
main = withConfig config
where
config :: ConfigProgram ()
config = do
incoming <- s3Bucket "incoming"
let lbdProfile = def
& lpMemorySize .~ M1024
& lpTimeoutSeconds .~ 300
void $ s3BucketLambda "foldS3ObjectContent" incoming foldS3ObjectContent lbdProfile
foldS3ObjectContent
:: S3LambdaProgram
foldS3ObjectContent event = do
let incomingS3Obj = event^.s3eObject
-- Calculate the size of the stream in constant memory space (i.e. without downloading the whole s3 object into memory)
(size :: Int) <- streamFromS3Object incomingS3Obj $
CL.foldM (\acc bs -> do
let len = BS.length bs
say . T.pack $ "encoundered chunk size: " ++ show len
return $ acc + len
) 0
say . T.pack $ "s3 object content size: " ++ show size
success "lambda had executed successfully"
| qmuli/qmuli | examples/fold-s3-content/src/Main.hs | mit | 2,168 | 0 | 20 | 841 | 392 | 220 | 172 | 43 | 1 |
module ListWebhooks where
import qualified Github.Repos.Webhooks as W
import qualified Github.Auth as Auth
import qualified Github.Data.Definitions as Def
import Data.List
main :: IO ()
main = do
let auth = Auth.OAuth "oauthtoken"
possibleWebhooks <- W.webhooksFor' auth "repoOwner" "repoName"
case possibleWebhooks of
(Left err) -> putStrLn $ "Error: " ++ (show err)
(Right webhooks) -> putStrLn $ intercalate "\n" $ map formatRepoWebhook webhooks
formatRepoWebhook :: Def.RepoWebhook -> String
formatRepoWebhook (Def.RepoWebhook _ _ _ name _ _ _ _ _ _) = show name
| jwiegley/github | samples/Repos/Webhooks/ListWebhooks.hs | bsd-3-clause | 584 | 0 | 12 | 99 | 189 | 100 | 89 | 14 | 2 |
import System.Plugins
import System.Directory
a = "Foo.hs" -- uesr code
b = "Bar.hs" -- trusted code. Result is "Bar.o"
main = do
status <- makeWith a b []
s <- case status of
MakeFailure e -> mapM_ putStrLn e >> error "failed"
MakeSuccess n s -> print n >> return s
status <- makeWith a b []
s' <- case status of
MakeFailure e -> mapM_ putStrLn e >> error "failed"
MakeSuccess n s -> print n >> return s
status <- makeWith a b []
s'' <- case status of
MakeFailure e -> mapM_ putStrLn e >> error "failed"
MakeSuccess n s -> print n >> return s
print $ (s == s') && (s' == s'')
m_v <- load s [] [] "resource"
v <- case m_v of
LoadSuccess _ v -> return v
_ -> error "load failed"
putStrLn $ show $ (v :: Int)
makeCleaner s''
| abuiles/turbinado-blog | tmp/dependencies/hs-plugins-1.3.1/testsuite/makewith/module_name/Main.hs | bsd-3-clause | 968 | 3 | 12 | 395 | 346 | 155 | 191 | 24 | 5 |
-- -- $Id$
module Language.ABCgleich
( top
)
where
import Language
import Data.Set
import Data.FiniteMap
import Control.Monad ( guard )
top :: String -> Language
top xs = Language
{ abbreviation = "{ w : " ++ concat ( intersperse " = "
[ "|w|_" ++ [x] | x <- xs ] ) ++ " }"
, alphabet = mkSet xs
, sample = sam xs
, contains = con xs
}
one [] = return []
one xns = do
let l = length xns
i <- randomRIO (0, l-1)
let (pre, (x,n) : post) = splitAt i xns
let xns' = pre ++ [ (x, n-1) | n > 0 ] ++ post
w <- one xns'
return $ x : w
sam :: String -> Int -> Int -> IO [ String ]
sam xs c n =
let (q, r) = divMod n (length xs)
in if 0 == r
then sequence $ replicate c $ one [ (x, q) | x <- xs ]
else return []
con :: String -> String -> Bool
con xs w =
let count x = length ( filter (== x) w )
c : cs = map count xs
in all (== c) cs
| florianpilz/autotool | src/Language/ABCgleich.hs | gpl-2.0 | 943 | 12 | 14 | 324 | 469 | 242 | 227 | 32 | 2 |
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
-- Module : Network.AWS.Data.Internal.Header
-- Copyright : (c) 2013-2015 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Network.AWS.Data.Internal.Header where
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import qualified Data.CaseInsensitive as CI
import Data.Foldable as Fold
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as Map
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text.Encoding as Text
import Network.AWS.Data.Internal.ByteString
import Network.AWS.Data.Internal.Text
import Network.HTTP.Types
(~:) :: FromText a => ResponseHeaders -> HeaderName -> Either String a
hs ~: k = hs ~:? k >>= note
where
note Nothing = Left (BS.unpack $ "Unable to find header: " <> CI.original k)
note (Just x) = Right x
(~:?) :: FromText a => ResponseHeaders -> HeaderName -> Either String (Maybe a)
hs ~:? k =
maybe (Right Nothing)
(fmap Just . fromText . Text.decodeUtf8)
(k `lookup` hs)
class ToHeaders a where
toHeaders :: a -> [Header]
toHeaders = const mempty
(=:) :: ToHeader a => HeaderName -> a -> [Header]
(=:) = toHeader
hdr :: HeaderName -> ByteString -> [Header] -> [Header]
hdr k v hs = (k, v) : filter ((/= k) . fst) hs
hdrs :: [Header] -> [Header] -> [Header]
hdrs xs ys = Fold.foldr' (uncurry hdr) ys xs
toHeaderText :: ToText a => HeaderName -> a -> [Header]
toHeaderText k = toHeader k . toText
class ToHeader a where
toHeader :: HeaderName -> a -> [Header]
default toHeader :: ToText a => HeaderName -> a -> [Header]
toHeader k = toHeader k . toText
instance ToHeader Text where
toHeader k = toHeader k . Text.encodeUtf8
instance ToHeader ByteString where
toHeader k = toHeader k . Just
instance ToByteString a => ToHeader (Maybe a) where
toHeader k = maybe [] (\v -> [(k, toBS v)])
instance (ToByteString k, ToByteString v) => ToHeader (HashMap k v) where
toHeader p = map (\(k, v) -> (p <> CI.mk (toBS k), toBS v)) . Map.toList
hHost :: HeaderName
hHost = "Host"
hAMZToken :: HeaderName
hAMZToken = "X-Amz-Security-Token"
hAMZTarget :: HeaderName
hAMZTarget = "X-Amz-Target"
hAMZAlgorithm :: HeaderName
hAMZAlgorithm = "X-Amz-Algorithm"
hAMZCredential :: HeaderName
hAMZCredential = "X-Amz-Credential"
hAMZExpires :: HeaderName
hAMZExpires = "X-Amz-Expires"
hAMZSignedHeaders :: HeaderName
hAMZSignedHeaders = "X-Amz-SignedHeaders"
hAMZContentSHA256 :: HeaderName
hAMZContentSHA256 = "X-Amz-Content-SHA256"
hAMZAuth :: HeaderName
hAMZAuth = "X-Amzn-Authorization"
hAMZDate :: HeaderName
hAMZDate = "X-Amz-Date"
hMetaPrefix :: HeaderName
hMetaPrefix = "X-Amz-"
| romanb/amazonka | core/src/Network/AWS/Data/Internal/Header.hs | mpl-2.0 | 3,424 | 0 | 14 | 827 | 871 | 487 | 384 | 71 | 2 |
-- Cmm representations using Hoopl's Graph CmmNode e x.
{-# LANGUAGE CPP, GADTs #-}
module Cmm (
-- * Cmm top-level datatypes
CmmProgram, CmmGroup, GenCmmGroup,
CmmDecl, GenCmmDecl(..),
CmmGraph, GenCmmGraph(..),
CmmBlock,
RawCmmDecl, RawCmmGroup,
Section(..), SectionType(..), CmmStatics(..), CmmStatic(..),
-- ** Blocks containing lists
GenBasicBlock(..), blockId,
ListGraph(..), pprBBlock,
-- * Cmm graphs
CmmReplGraph, GenCmmReplGraph, CmmFwdRewrite, CmmBwdRewrite,
-- * Info Tables
CmmTopInfo(..), CmmStackInfo(..), CmmInfoTable(..), topInfoTable,
ClosureTypeInfo(..),
C_SRT(..), needsSRT,
ProfilingInfo(..), ConstrDescription,
-- * Statements, expressions and types
module CmmNode,
module CmmExpr,
) where
import CLabel
import BlockId
import CmmNode
import SMRep
import CmmExpr
import UniqSupply
import Compiler.Hoopl
import Outputable
import Data.Word ( Word8 )
#include "HsVersions.h"
-----------------------------------------------------------------------------
-- Cmm, GenCmm
-----------------------------------------------------------------------------
-- A CmmProgram is a list of CmmGroups
-- A CmmGroup is a list of top-level declarations
-- When object-splitting is on, each group is compiled into a separate
-- .o file. So typically we put closely related stuff in a CmmGroup.
-- Section-splitting follows suit and makes one .text subsection for each
-- CmmGroup.
type CmmProgram = [CmmGroup]
type GenCmmGroup d h g = [GenCmmDecl d h g]
type CmmGroup = GenCmmGroup CmmStatics CmmTopInfo CmmGraph
type RawCmmGroup = GenCmmGroup CmmStatics (LabelMap CmmStatics) CmmGraph
-----------------------------------------------------------------------------
-- CmmDecl, GenCmmDecl
-----------------------------------------------------------------------------
-- GenCmmDecl is abstracted over
-- d, the type of static data elements in CmmData
-- h, the static info preceding the code of a CmmProc
-- g, the control-flow graph of a CmmProc
--
-- We expect there to be two main instances of this type:
-- (a) C--, i.e. populated with various C-- constructs
-- (b) Native code, populated with data/instructions
-- | A top-level chunk, abstracted over the type of the contents of
-- the basic blocks (Cmm or instructions are the likely instantiations).
data GenCmmDecl d h g
= CmmProc -- A procedure
h -- Extra header such as the info table
CLabel -- Entry label
[GlobalReg] -- Registers live on entry. Note that the set of live
-- registers will be correct in generated C-- code, but
-- not in hand-written C-- code. However,
-- splitAtProcPoints calculates correct liveness
-- information for CmmProcs.
g -- Control-flow graph for the procedure's code
| CmmData -- Static data
Section
d
type CmmDecl = GenCmmDecl CmmStatics CmmTopInfo CmmGraph
type RawCmmDecl
= GenCmmDecl
CmmStatics
(LabelMap CmmStatics)
CmmGraph
-----------------------------------------------------------------------------
-- Graphs
-----------------------------------------------------------------------------
type CmmGraph = GenCmmGraph CmmNode
data GenCmmGraph n = CmmGraph { g_entry :: BlockId, g_graph :: Graph n C C }
type CmmBlock = Block CmmNode C C
type CmmReplGraph e x = GenCmmReplGraph CmmNode e x
type GenCmmReplGraph n e x = UniqSM (Maybe (Graph n e x))
type CmmFwdRewrite f = FwdRewrite UniqSM CmmNode f
type CmmBwdRewrite f = BwdRewrite UniqSM CmmNode f
-----------------------------------------------------------------------------
-- Info Tables
-----------------------------------------------------------------------------
data CmmTopInfo = TopInfo { info_tbls :: LabelMap CmmInfoTable
, stack_info :: CmmStackInfo }
topInfoTable :: GenCmmDecl a CmmTopInfo (GenCmmGraph n) -> Maybe CmmInfoTable
topInfoTable (CmmProc infos _ _ g) = mapLookup (g_entry g) (info_tbls infos)
topInfoTable _ = Nothing
data CmmStackInfo
= StackInfo {
arg_space :: ByteOff,
-- number of bytes of arguments on the stack on entry to the
-- the proc. This is filled in by StgCmm.codeGen, and used
-- by the stack allocator later.
updfr_space :: Maybe ByteOff,
-- XXX: this never contains anything useful, but it should.
-- See comment in CmmLayoutStack.
do_layout :: Bool
-- Do automatic stack layout for this proc. This is
-- True for all code generated by the code generator,
-- but is occasionally False for hand-written Cmm where
-- we want to do the stack manipulation manually.
}
-- | Info table as a haskell data type
data CmmInfoTable
= CmmInfoTable {
cit_lbl :: CLabel, -- Info table label
cit_rep :: SMRep,
cit_prof :: ProfilingInfo,
cit_srt :: C_SRT
}
data ProfilingInfo
= NoProfilingInfo
| ProfilingInfo [Word8] [Word8] -- closure_type, closure_desc
-- C_SRT is what StgSyn.SRT gets translated to...
-- we add a label for the table, and expect only the 'offset/length' form
data C_SRT = NoC_SRT
| C_SRT !CLabel !WordOff !StgHalfWord {-bitmap or escape-}
deriving (Eq)
needsSRT :: C_SRT -> Bool
needsSRT NoC_SRT = False
needsSRT (C_SRT _ _ _) = True
-----------------------------------------------------------------------------
-- Static Data
-----------------------------------------------------------------------------
data SectionType
= Text
| Data
| ReadOnlyData
| RelocatableReadOnlyData
| UninitialisedData
| ReadOnlyData16 -- .rodata.cst16 on x86_64, 16-byte aligned
| CString
| OtherSection String
deriving (Show)
data Section = Section SectionType CLabel
data CmmStatic
= CmmStaticLit CmmLit
-- a literal value, size given by cmmLitRep of the literal.
| CmmUninitialised Int
-- uninitialised data, N bytes long
| CmmString [Word8]
-- string of 8-bit values only, not zero terminated.
data CmmStatics
= Statics
CLabel -- Label of statics
[CmmStatic] -- The static data itself
-- -----------------------------------------------------------------------------
-- Basic blocks consisting of lists
-- These are used by the LLVM and NCG backends, when populating Cmm
-- with lists of instructions.
data GenBasicBlock i = BasicBlock BlockId [i]
-- | The branch block id is that of the first block in
-- the branch, which is that branch's entry point
blockId :: GenBasicBlock i -> BlockId
blockId (BasicBlock blk_id _ ) = blk_id
newtype ListGraph i = ListGraph [GenBasicBlock i]
instance Outputable instr => Outputable (ListGraph instr) where
ppr (ListGraph blocks) = vcat (map ppr blocks)
instance Outputable instr => Outputable (GenBasicBlock instr) where
ppr = pprBBlock
pprBBlock :: Outputable stmt => GenBasicBlock stmt -> SDoc
pprBBlock (BasicBlock ident stmts) =
hang (ppr ident <> colon) 4 (vcat (map ppr stmts))
| olsner/ghc | compiler/cmm/Cmm.hs | bsd-3-clause | 7,265 | 0 | 9 | 1,665 | 1,036 | 632 | 404 | 113 | 1 |
module Main where
import Distribution.MacOSX
import Distribution.MacOSX.AppBuildInfo
import System.Directory
import System.Environment
import System.FilePath
main = do
pname <- getProgName
xs <- getArgs
exe <- case xs of
[x1] -> return x1
_ -> fail $ "Usage: " ++ pname ++ " <exe>"
exeExists <- doesFileExist exe
let macapp = MacApp { appName = takeFileName exe
, appIcon = Nothing
, appPlist = Nothing
, resources = []
, otherBins = []
, appDeps = DoNotChase
}
appInfo = AppBuildInfo { abApp = macapp
, abAppPath = appName macapp <.> "app"
, abAppOrigExe = exe
}
if exeExists
then makeAppBundle appInfo
else fail $ exe ++ " does not exist"
| gimbo/cabal-macosx | macosx-app.hs | bsd-3-clause | 944 | 0 | 13 | 412 | 208 | 114 | 94 | 25 | 3 |
{-|
Module : Idris.Docstrings
Description : Wrapper around Markdown library.
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE CPP, DeriveFoldable, DeriveFunctor, DeriveGeneric,
DeriveTraversable, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.Docstrings (
Docstring(..), Block(..), Inline(..), parseDocstring, renderDocstring
, emptyDocstring, nullDocstring, noDocs, overview, containsText
, renderHtml, annotCode, DocTerm(..), renderDocTerm, checkDocstring
) where
import Idris.Core.TT (Err, Name, OutputAnnotation(..), Term, TextFormatting(..))
import Util.Pretty
#if (MIN_VERSION_base(4,11,0))
import Prelude hiding ((<$>), (<>))
#else
import Prelude hiding ((<$>))
#endif
import qualified Cheapskate as C
import Cheapskate.Html (renderDoc)
import qualified Cheapskate.Types as CT
import qualified Data.Foldable as F
import qualified Data.Sequence as S
import qualified Data.Text as T
import GHC.Generics (Generic)
import Text.Blaze.Html (Html)
-- | The various kinds of code samples that can be embedded in docs
data DocTerm = Unchecked
| Checked Term
| Example Term
| Failing Err
deriving (Show, Generic)
-- | Render a term in the documentation
renderDocTerm :: (Term -> Doc OutputAnnotation) -> (Term -> Term) -> DocTerm -> String -> Doc OutputAnnotation
renderDocTerm pp norm Unchecked src = text src
renderDocTerm pp norm (Checked tm) src = pp tm
renderDocTerm pp norm (Example tm) src = align $
text ">" <+> align (pp tm) <$>
pp (norm tm)
renderDocTerm pp norm (Failing err) src = annotate (AnnErr err) $ text src
-- | Representation of Idris's inline documentation. The type paramter
-- represents the type of terms that are associated with code blocks.
data Docstring a = DocString CT.Options (Blocks a)
deriving (Show, Functor, Foldable, Traversable, Generic)
type Blocks a = S.Seq (Block a)
-- | Block-level elements.
data Block a = Para (Inlines a)
| Header Int (Inlines a)
| Blockquote (Blocks a)
| List Bool CT.ListType [Blocks a]
| CodeBlock CT.CodeAttr T.Text a
| HtmlBlock T.Text
| HRule
deriving (Show, Functor, Foldable, Traversable, Generic)
data Inline a = Str T.Text
| Space
| SoftBreak
| LineBreak
| Emph (Inlines a)
| Strong (Inlines a)
| Code T.Text a
| Link (Inlines a) T.Text T.Text
| Image (Inlines a) T.Text T.Text
| Entity T.Text
| RawHtml T.Text
deriving (Show, Functor, Foldable, Traversable, Generic)
type Inlines a = S.Seq (Inline a)
-- | Run some kind of processing step over code in a Docstring. The code
-- processor gets the language and annotations as parameters, along with the
-- source and the original annotation.
checkDocstring :: forall a b. (String -> [String] -> String -> a -> b) -> Docstring a -> Docstring b
checkDocstring f (DocString opts blocks) = DocString opts (fmap (checkBlock f) blocks)
where checkBlock :: (String -> [String] -> String -> a -> b) -> Block a -> Block b
checkBlock f (Para inlines) = Para (fmap (checkInline f) inlines)
checkBlock f (Header i inlines) = Header i (fmap (checkInline f) inlines)
checkBlock f (Blockquote bs) = Blockquote (fmap (checkBlock f) bs)
checkBlock f (List b t blocks) = List b t (fmap (fmap (checkBlock f)) blocks)
checkBlock f (CodeBlock attrs src tm) = CodeBlock attrs src
(f (T.unpack $ CT.codeLang attrs)
(words . T.unpack $ CT.codeInfo attrs)
(T.unpack src)
tm)
checkBlock f (HtmlBlock src) = HtmlBlock src
checkBlock f HRule = HRule
checkInline :: (String -> [String] -> String -> a -> b) -> Inline a -> Inline b
checkInline f (Str txt) = Str txt
checkInline f Space = Space
checkInline f SoftBreak = SoftBreak
checkInline f LineBreak = LineBreak
checkInline f (Emph is) = Emph (fmap (checkInline f) is)
checkInline f (Strong is) = Strong (fmap (checkInline f) is)
checkInline f (Code src x) = Code src (f "" [] (T.unpack src) x)
checkInline f (Link is url title) = Link (fmap (checkInline f) is) url title
checkInline f (Image is url title) = Image (fmap (checkInline f) is) url title
checkInline f (Entity txt) = Entity txt
checkInline f (RawHtml src) = RawHtml src
-- | Construct a docstring from a Text that contains Markdown-formatted docs
parseDocstring :: T.Text -> Docstring ()
parseDocstring = toDocstring . C.markdown options
where toDocstring :: CT.Doc -> Docstring ()
toDocstring (CT.Doc opts blocks) = DocString opts (fmap toBlock blocks)
toBlock :: CT.Block -> Block ()
toBlock (CT.Para inlines) = Para (fmap toInline inlines)
toBlock (CT.Header i inlines) = Header i (fmap toInline inlines)
toBlock (CT.Blockquote blocks) = Blockquote (fmap toBlock blocks)
toBlock (CT.List b t blocks) = List b t (fmap (fmap toBlock) blocks)
toBlock (CT.CodeBlock attrs text) = CodeBlock attrs text ()
toBlock (CT.HtmlBlock src) = HtmlBlock src
toBlock CT.HRule = HRule
toInline :: CT.Inline -> Inline ()
toInline (CT.Str t) = Str t
toInline CT.Space = Space
toInline CT.SoftBreak = SoftBreak
toInline CT.LineBreak = LineBreak
toInline (CT.Emph is) = Emph (fmap toInline is)
toInline (CT.Strong is) = Strong (fmap toInline is)
toInline (CT.Code src) = Code src ()
toInline (CT.Link is url title) = Link (fmap toInline is) url title
toInline (CT.Image is url title) = Image (fmap toInline is) url title
toInline (CT.Entity txt) = Entity txt
toInline (CT.RawHtml src) = RawHtml src
options = CT.Options { CT.sanitize = True
, CT.allowRawHtml = False
, CT.preserveHardBreaks = True
, CT.debug = False
}
-- | Convert a docstring to be shown by the pretty-printer
renderDocstring :: (a -> String -> Doc OutputAnnotation) -> Docstring a -> Doc OutputAnnotation
renderDocstring pp (DocString _ blocks) = renderBlocks pp blocks
-- | Construct a docstring consisting of the first block-level element of the
-- argument docstring, for use in summaries.
overview :: Docstring a -> Docstring a
overview (DocString opts blocks) = DocString opts (S.take 1 blocks)
renderBlocks :: (a -> String -> Doc OutputAnnotation)
-> Blocks a -> Doc OutputAnnotation
renderBlocks pp blocks | S.length blocks > 1 = F.foldr1 (\b1 b2 -> b1 <> line <> line <> b2) $
fmap (renderBlock pp) blocks
| S.length blocks == 1 = renderBlock pp (S.index blocks 0)
| otherwise = empty
renderBlock :: (a -> String -> Doc OutputAnnotation)
-> Block a -> Doc OutputAnnotation
renderBlock pp (Para inlines) = renderInlines pp inlines
renderBlock pp (Header lvl inlines) = renderInlines pp inlines <+> parens (text (show lvl))
renderBlock pp (Blockquote blocks) = indent 8 $ renderBlocks pp blocks
renderBlock pp (List b ty blockss) = renderList pp b ty blockss
renderBlock pp (CodeBlock attr src tm) = indent 4 $ pp tm (T.unpack src)
renderBlock pp (HtmlBlock txt) = text "<html block>" -- TODO
renderBlock pp HRule = text "----------------------"
renderList :: (a -> String -> Doc OutputAnnotation)
-> Bool -> CT.ListType -> [Blocks a] -> Doc OutputAnnotation
renderList pp b (CT.Bullet c) blockss = vsep $ map (hang 4 . (char c <+>) . renderBlocks pp) blockss
renderList pp b (CT.Numbered nw i) blockss =
vsep $
zipWith3 (\n p txt -> hang 4 $ text (show n) <> p <+> txt)
[i..] (repeat punc) (map (renderBlocks pp) blockss)
where punc = case nw of
CT.PeriodFollowing -> char '.'
CT.ParenFollowing -> char '('
renderInlines :: (a -> String -> Doc OutputAnnotation) -> Inlines a -> Doc OutputAnnotation
renderInlines pp = F.foldr (<>) empty . fmap (renderInline pp)
renderInline :: (a -> String -> Doc OutputAnnotation) -> Inline a -> Doc OutputAnnotation
renderInline pp (Str s) = text $ T.unpack s
renderInline pp Space = softline
renderInline pp SoftBreak = softline
renderInline pp LineBreak = line
renderInline pp (Emph txt) = annotate (AnnTextFmt ItalicText) $ renderInlines pp txt
renderInline pp (Strong txt) = annotate (AnnTextFmt BoldText) $ renderInlines pp txt
renderInline pp (Code txt tm) = pp tm $ T.unpack txt
renderInline pp (Link body url title) = annotate (AnnLink (T.unpack url)) (renderInlines pp body)
renderInline pp (Image body url title) = text "<image>" -- TODO
renderInline pp (Entity a) = text $ "<entity " ++ T.unpack a ++ ">" -- TODO
renderInline pp (RawHtml txt) = text "<html content>" --TODO
-- | The empty docstring
emptyDocstring :: Docstring a
emptyDocstring = DocString options S.empty
-- | Check whether a docstring is emtpy
nullDocstring :: Docstring a -> Bool
nullDocstring (DocString _ blocks) = S.null blocks
-- | Empty documentation for a definition
noDocs :: (Docstring a, [(Name, Docstring a)])
noDocs = (emptyDocstring, [])
-- | Does a string occur in the docstring?
containsText :: T.Text -> Docstring a -> Bool
containsText str (DocString _ blocks) = F.any (blockContains (T.toLower str)) blocks
-- blockContains and inlineContains should always be called with a lower-case search string
where blockContains :: T.Text -> Block a -> Bool
blockContains str (Para inlines) = F.any (inlineContains str) inlines
blockContains str (Header lvl inlines) = F.any (inlineContains str) inlines
blockContains str (Blockquote blocks) = F.any (blockContains str) blocks
blockContains str (List b ty blockss) = F.any (F.any (blockContains str)) blockss
blockContains str (CodeBlock attr src _) = T.isInfixOf str (T.toLower src)
blockContains str (HtmlBlock txt) = False -- TODO
blockContains str HRule = False
inlineContains :: T.Text -> Inline a -> Bool
inlineContains str (Str s) = T.isInfixOf str (T.toLower s)
inlineContains str Space = False
inlineContains str SoftBreak = False
inlineContains str LineBreak = False
inlineContains str (Emph txt) = F.any (inlineContains str) txt
inlineContains str (Strong txt) = F.any (inlineContains str) txt
inlineContains str (Code txt _) = T.isInfixOf str (T.toLower txt)
inlineContains str (Link body url title) = F.any (inlineContains str) body
inlineContains str (Image body url title) = False
inlineContains str (Entity a) = False
inlineContains str (RawHtml txt) = T.isInfixOf str (T.toLower txt)
renderHtml :: Docstring DocTerm -> Html
renderHtml = renderDoc . fromDocstring
where
fromDocstring :: Docstring DocTerm -> CT.Doc
fromDocstring (DocString opts blocks) = CT.Doc opts (fmap fromBlock blocks)
fromBlock :: Block DocTerm -> CT.Block
fromBlock (Para inlines) = CT.Para (fmap fromInline inlines)
fromBlock (Header i inlines) = CT.Header i (fmap fromInline inlines)
fromBlock (Blockquote blocks) = CT.Blockquote (fmap fromBlock blocks)
fromBlock (List b t blocks) = CT.List b t (fmap (fmap fromBlock) blocks)
fromBlock (CodeBlock attrs text _) = CT.CodeBlock attrs text
fromBlock (HtmlBlock src) = CT.HtmlBlock src
fromBlock HRule = CT.HRule
fromInline :: Inline DocTerm -> CT.Inline
fromInline (Str t) = CT.Str t
fromInline Space = CT.Space
fromInline SoftBreak = CT.SoftBreak
fromInline LineBreak = CT.LineBreak
fromInline (Emph is) = CT.Emph (fmap fromInline is)
fromInline (Strong is) = CT.Strong (fmap fromInline is)
fromInline (Code src _) = CT.Code src
fromInline (Link is url title) = CT.Link (fmap fromInline is) url title
fromInline (Image is url title) = CT.Image (fmap fromInline is) url title
fromInline (Entity txt) = CT.Entity txt
fromInline (RawHtml src) = CT.RawHtml src
-- | Annotate the code samples in a docstring
annotCode :: forall a b. (String -> b) -- ^ How to annotate code samples
-> Docstring a
-> Docstring b
annotCode annot (DocString opts blocks)
= DocString opts $ fmap annotCodeBlock blocks
where
annotCodeBlock :: Block a -> Block b
annotCodeBlock (Para inlines) = Para (fmap annotCodeInline inlines)
annotCodeBlock (Header i inlines) = Header i (fmap annotCodeInline inlines)
annotCodeBlock (Blockquote blocks) = Blockquote (fmap annotCodeBlock blocks)
annotCodeBlock (List b t blocks) = List b t (fmap (fmap annotCodeBlock) blocks)
annotCodeBlock (CodeBlock attrs src _) = CodeBlock attrs src (annot (T.unpack src))
annotCodeBlock (HtmlBlock src) = HtmlBlock src
annotCodeBlock HRule = HRule
annotCodeInline :: Inline a -> Inline b
annotCodeInline (Str t) = Str t
annotCodeInline Space = Space
annotCodeInline SoftBreak = SoftBreak
annotCodeInline LineBreak = LineBreak
annotCodeInline (Emph is) = Emph (fmap annotCodeInline is)
annotCodeInline (Strong is) = Strong (fmap annotCodeInline is)
annotCodeInline (Code src _) = Code src (annot (T.unpack src))
annotCodeInline (Link is url title) = Link (fmap annotCodeInline is) url title
annotCodeInline (Image is url title) = Image (fmap annotCodeInline is) url title
annotCodeInline (Entity txt) = Entity txt
annotCodeInline (RawHtml src) = RawHtml src
| kojiromike/Idris-dev | src/Idris/Docstrings.hs | bsd-3-clause | 14,584 | 0 | 14 | 4,172 | 4,716 | 2,378 | 2,338 | 228 | 17 |
{-# LANGUAGE DataKinds
, PolyKinds
, TypeOperators
, TypeFamilies
, StandaloneDeriving
, DeriveDataTypeable
, ScopedTypeVariables
#-}
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
----------------------------------------------------------------
-- 2016.05.28
-- |
-- Module : Language.Hakaru.Types.DataKind
-- Copyright : Copyright (c) 2016 the Hakaru team
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC-only
--
-- A data-kind for the universe of Hakaru types.
----------------------------------------------------------------
module Language.Hakaru.Types.DataKind
(
-- * The core definition of Hakaru types
Hakaru(..)
, HakaruFun(..)
, HakaruCon(..)
-- *
, Symbol
, Code
, HData'
-- * Some \"built-in\" types
-- Naturally, these aren't actually built-in, otherwise they'd
-- be part of the 'Hakaru' data-kind.
, HBool, HUnit, HPair, HEither, HList, HMaybe
) where
import Data.Typeable (Typeable)
import GHC.TypeLits (Symbol)
----------------------------------------------------------------
-- BUG: can't define the fixity of @(':->)@
infixr 0 :->
-- | The universe\/kind of Hakaru types.
data Hakaru
= HNat -- ^ The natural numbers; aka, the non-negative integers.
-- TODO: in terms of Summate (etc), do we consider this to include omega?
-- | The integers.
| HInt
-- | Non-negative real numbers. Unlike what you might expect,
-- this is /not/ restructed to the @[0,1]@ interval!
| HProb
-- | The affinely extended real number line. That is, the real
-- numbers extended with positive and negative infinities.
| HReal
-- TODO: so much of our code has to distinguish between monadic and pure stuff. Maybe we should just break this out into a separate larger universe?
-- | The measure monad
| HMeasure !Hakaru
-- | The built-in type for uniform arrays.
| HArray !Hakaru
-- | The type of Hakaru functions.
| !Hakaru :-> !Hakaru
-- TODO: do we need to actually store the code? or can we get away with just requiring that the particular HakaruCon has a Code instance defined?
-- | A user-defined polynomial datatype. Each such type is
-- specified by a \"tag\" (the @HakaruCon@) which names the type, and a sum-of-product representation of the type itself.
| HData !HakaruCon [[HakaruFun]]
-- N.B., The @Proxy@ type from "Data.Proxy" is polykinded, so it
-- works for @Hakaru@ too. However, it is _not_ Typeable!
--
-- TODO: all the Typeable instances in this file are only used in
-- 'Language.Hakaru.Simplify.closeLoop'; it would be cleaner to
-- remove these instances and reimplement that function to work
-- without them.
deriving instance Typeable 'HNat
deriving instance Typeable 'HInt
deriving instance Typeable 'HProb
deriving instance Typeable 'HReal
deriving instance Typeable 'HMeasure
deriving instance Typeable 'HArray
deriving instance Typeable '(:->)
deriving instance Typeable 'HData
----------------------------------------------------------------
-- | The identity and constant functors on 'Hakaru'. This gives
-- us limited access to type-variables in @Hakaru@, for use in
-- recursive sums-of-products. Notably, however, it only allows a
-- single variable (namely the one bound by the closest binder) so
-- it can't encode mutual recursion or other non-local uses of
-- multiple binders. We also cannot encode non-regular recursive
-- types (aka nested datatypes), like rose trees. To do that, we'd
-- need to allow any old functor here.
--
-- Products and sums are represented as lists in the 'Hakaru'
-- data-kind itself, so they aren't in this datatype.
data HakaruFun = I | K !Hakaru
deriving instance Typeable 'I
deriving instance Typeable 'K
----------------------------------------------------------------
-- | The kind of user-defined Hakaru type constructors, which serves
-- as a tag for the sum-of-products representation of the user-defined
-- Hakaru type. The head of the 'HakaruCon' is a symbolic name, and
-- the rest are arguments to that type constructor. The @a@ parameter
-- is parametric, which is especially useful when you need a singleton
-- of the constructor. The argument positions are necessary to do
-- variable binding in Code. 'Symbol' is the kind of \"type level
-- strings\".
data HakaruCon = TyCon !Symbol | HakaruCon :@ Hakaru
infixl 0 :@
deriving instance Typeable 'TyCon
deriving instance Typeable '(:@)
-- | The Code type family allows users to extend the Hakaru language
-- by adding new types. The right hand side is the sum-of-products
-- representation of that type. See the \"built-in\" types for examples.
type family Code (a :: HakaruCon) :: [[HakaruFun]]
type instance Code ('TyCon "Bool") = '[ '[], '[] ]
type instance Code ('TyCon "Unit") = '[ '[] ]
type instance Code ('TyCon "Maybe" ':@ a) = '[ '[] , '[ 'K a ] ]
type instance Code ('TyCon "List" ':@ a) = '[ '[] , '[ 'K a, 'I ] ]
type instance Code ('TyCon "Pair" ':@ a ':@ b) = '[ '[ 'K a, 'K b ] ]
type instance Code ('TyCon "Either" ':@ a ':@ b) = '[ '[ 'K a ], '[ 'K b ] ]
-- | A helper type alias for simplifying type signatures for
-- user-provided Hakaru types.
--
-- BUG: you cannot use this alias when defining other type aliases!
-- For some reason the type checker doesn't reduce the type family
-- applications, which prevents the use of these type synonyms in
-- class instance heads. Any type synonym created with 'HData''
-- will suffer the same issue, so type synonyms must be written out
-- by hand— or copied from the GHC pretty printer, which will happily
-- reduce things in the repl, even in the presence of quantified
-- type variables.
type HData' t = 'HData t (Code t)
{-
>:kind! forall a b . HData' (TyCon "Pair" :@ a :@ b)
forall a b . HData' (TyCon "Pair" :@ a :@ b) :: Hakaru
= forall (a :: Hakaru) (b :: Hakaru).
'HData (('TyCon "Pair" ':@ a) ':@ b) '['['K a, 'K b]]
type HBool = HData' (TyCon "Bool")
type HUnit = HData' (TyCon "Unit")
type HPair a b = HData' (TyCon "Pair" :@ a :@ b)
type HEither a b = HData' (TyCon "Either" :@ a :@ b)
type HList a = HData' (TyCon "List" :@ a)
type HMaybe a = HData' (TyCon "Maybe" :@ a)
-}
type HBool = 'HData ('TyCon "Bool") '[ '[], '[] ]
type HUnit = 'HData ('TyCon "Unit") '[ '[] ]
type HPair a b = 'HData ('TyCon "Pair" ':@ a ':@ b) '[ '[ 'K a, 'K b] ]
type HEither a b = 'HData ('TyCon "Either" ':@ a ':@ b) '[ '[ 'K a], '[ 'K b] ]
type HList a = 'HData ('TyCon "List" ':@ a) '[ '[], '[ 'K a, 'I] ]
type HMaybe a = 'HData ('TyCon "Maybe" ':@ a) '[ '[], '[ 'K a] ]
----------------------------------------------------------------
----------------------------------------------------------- fin.
| zachsully/hakaru | haskell/Language/Hakaru/Types/DataKind.hs | bsd-3-clause | 6,974 | 0 | 11 | 1,518 | 975 | 576 | 399 | 72 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
module Distribution.Client.SavedFlags
( readCommandFlags, writeCommandFlags
, readSavedArgs, writeSavedArgs
) where
import Distribution.Simple.Command
import Distribution.Simple.UserHooks ( Args )
import Distribution.Simple.Utils
( createDirectoryIfMissingVerbose, unintersperse )
import Distribution.Verbosity
import Control.Exception ( Exception, throwIO )
import Control.Monad ( liftM )
import Data.List ( intercalate )
import Data.Maybe ( fromMaybe )
import Data.Typeable
import System.Directory ( doesFileExist )
import System.FilePath ( takeDirectory )
writeSavedArgs :: Verbosity -> FilePath -> [String] -> IO ()
writeSavedArgs verbosity path args = do
createDirectoryIfMissingVerbose
(lessVerbose verbosity) True (takeDirectory path)
writeFile path (intercalate "\0" args)
-- | Write command-line flags to a file, separated by null characters. This
-- format is also suitable for the @xargs -0@ command. Using the null
-- character also avoids the problem of escaping newlines or spaces,
-- because unlike other whitespace characters, the null character is
-- not valid in command-line arguments.
writeCommandFlags :: Verbosity -> FilePath -> CommandUI flags -> flags -> IO ()
writeCommandFlags verbosity path command flags =
writeSavedArgs verbosity path (commandShowOptions command flags)
readSavedArgs :: FilePath -> IO (Maybe [String])
readSavedArgs path = do
exists <- doesFileExist path
if exists
then liftM (Just . unintersperse '\0') (readFile path)
else return Nothing
-- | Read command-line arguments, separated by null characters, from a file.
-- Returns the default flags if the file does not exist.
readCommandFlags :: FilePath -> CommandUI flags -> IO flags
readCommandFlags path command = do
savedArgs <- liftM (fromMaybe []) (readSavedArgs path)
case (commandParseArgs command True savedArgs) of
CommandHelp _ -> throwIO (SavedArgsErrorHelp savedArgs)
CommandList _ -> throwIO (SavedArgsErrorList savedArgs)
CommandErrors errs -> throwIO (SavedArgsErrorOther savedArgs errs)
CommandReadyToGo (mkFlags, _) ->
return (mkFlags (commandDefaultFlags command))
-- -----------------------------------------------------------------------------
-- * Exceptions
-- -----------------------------------------------------------------------------
data SavedArgsError
= SavedArgsErrorHelp Args
| SavedArgsErrorList Args
| SavedArgsErrorOther Args [String]
deriving (Typeable)
instance Show SavedArgsError where
show (SavedArgsErrorHelp args) =
"unexpected flag '--help', saved command line was:\n"
++ intercalate " " args
show (SavedArgsErrorList args) =
"unexpected flag '--list-options', saved command line was:\n"
++ intercalate " " args
show (SavedArgsErrorOther args errs) =
"saved command line was:\n"
++ intercalate " " args ++ "\n"
++ "encountered errors:\n"
++ intercalate "\n" errs
instance Exception SavedArgsError
| mydaum/cabal | cabal-install/Distribution/Client/SavedFlags.hs | bsd-3-clause | 3,015 | 0 | 14 | 499 | 635 | 330 | 305 | 57 | 4 |
{-# LANGUAGE OverloadedStrings, FlexibleContexts #-}
{- |
Module : Text.Pandoc.Writers.ICML
Copyright : Copyright (C) 2013 github.com/mb21
License : GNU GPL, version 2 or above
Stability : alpha
Conversion of 'Pandoc' documents to Adobe InCopy ICML, a stand-alone XML format
which is a subset of the zipped IDML format for which the documentation is
available here: http://wwwimages.adobe.com/www.adobe.com/content/dam/Adobe/en/devnet/indesign/sdk/cs6/idml/idml-specification.pdf
InCopy is the companion word-processor to Adobe InDesign and ICML documents can be integrated
into InDesign with File -> Place.
-}
module Text.Pandoc.Writers.ICML (writeICML) where
import Text.Pandoc.Definition
import Text.Pandoc.XML
import Text.Pandoc.Writers.Shared
import Text.Pandoc.Shared (splitBy)
import Text.Pandoc.Options
import Text.Pandoc.Templates (renderTemplate')
import Text.Pandoc.Pretty
import Data.List (isPrefixOf, isInfixOf, stripPrefix)
import Data.Text as Text (breakOnAll, pack)
import Data.Monoid (mappend)
import Control.Monad.State
import Network.URI (isURI)
import qualified Data.Set as Set
type Style = [String]
type Hyperlink = [(Int, String)]
data WriterState = WriterState{
blockStyles :: Set.Set String
, inlineStyles :: Set.Set String
, links :: Hyperlink
, listDepth :: Int
, maxListDepth :: Int
}
type WS a = State WriterState a
defaultWriterState :: WriterState
defaultWriterState = WriterState{
blockStyles = Set.empty
, inlineStyles = Set.empty
, links = []
, listDepth = 1
, maxListDepth = 0
}
-- inline names (appear in InDesign's character styles pane)
emphName :: String
strongName :: String
strikeoutName :: String
superscriptName :: String
subscriptName :: String
smallCapsName :: String
codeName :: String
linkName :: String
emphName = "Italic"
strongName = "Bold"
strikeoutName = "Strikeout"
superscriptName = "Superscript"
subscriptName = "Subscript"
smallCapsName = "SmallCaps"
codeName = "Code"
linkName = "Link"
-- block element names (appear in InDesign's paragraph styles pane)
paragraphName :: String
codeBlockName :: String
blockQuoteName :: String
orderedListName :: String
bulletListName :: String
defListTermName :: String
defListDefName :: String
headerName :: String
tableName :: String
tableHeaderName :: String
tableCaptionName :: String
alignLeftName :: String
alignRightName :: String
alignCenterName :: String
firstListItemName :: String
beginsWithName :: String
lowerRomanName :: String
upperRomanName :: String
lowerAlphaName :: String
upperAlphaName :: String
subListParName :: String
footnoteName :: String
paragraphName = "Paragraph"
codeBlockName = "CodeBlock"
blockQuoteName = "Blockquote"
orderedListName = "NumList"
bulletListName = "BulList"
defListTermName = "DefListTerm"
defListDefName = "DefListDef"
headerName = "Header"
tableName = "TablePar"
tableHeaderName = "TableHeader"
tableCaptionName = "TableCaption"
alignLeftName = "LeftAlign"
alignRightName = "RightAlign"
alignCenterName = "CenterAlign"
firstListItemName = "first"
beginsWithName = "beginsWith-"
lowerRomanName = "lowerRoman"
upperRomanName = "upperRoman"
lowerAlphaName = "lowerAlpha"
upperAlphaName = "upperAlpha"
subListParName = "subParagraph"
footnoteName = "Footnote"
-- | Convert Pandoc document to string in ICML format.
writeICML :: WriterOptions -> Pandoc -> String
writeICML opts (Pandoc meta blocks) =
let colwidth = if writerWrapText opts
then Just $ writerColumns opts
else Nothing
render' = render colwidth
renderMeta f s = Just $ render' $ fst $ runState (f opts [] s) defaultWriterState
Just metadata = metaToJSON opts
(renderMeta blocksToICML)
(renderMeta inlinesToICML)
meta
(doc, st) = runState (blocksToICML opts [] blocks) defaultWriterState
main = render' doc
context = defField "body" main
$ defField "charStyles" (render' $ charStylesToDoc st)
$ defField "parStyles" (render' $ parStylesToDoc st)
$ defField "hyperlinks" (render' $ hyperlinksToDoc $ links st)
$ metadata
in if writerStandalone opts
then renderTemplate' (writerTemplate opts) context
else main
-- | Auxilary functions for parStylesToDoc and charStylesToDoc.
contains :: String -> (String, (String, String)) -> [(String, String)]
contains s rule =
if isInfixOf (fst rule) s
then [snd rule]
else []
-- | The monospaced font to use as default.
monospacedFont :: Doc
monospacedFont = inTags False "AppliedFont" [("type", "string")] $ text "Courier New"
-- | How much to indent blockquotes etc.
defaultIndent :: Int
defaultIndent = 20
-- | How much to indent numbered lists before the number.
defaultListIndent :: Int
defaultListIndent = 10
-- other constants
lineSeparator :: String
lineSeparator = "
"
-- | Convert a WriterState with its block styles to the ICML listing of Paragraph Styles.
parStylesToDoc :: WriterState -> Doc
parStylesToDoc st = vcat $ map makeStyle $ Set.toAscList $ blockStyles st
where
makeStyle s =
let countSubStrs sub str = length $ Text.breakOnAll (Text.pack sub) (Text.pack str)
attrs = concat $ map (contains s) $ [
(defListTermName, ("BulletsAndNumberingListType", "BulletList"))
, (defListTermName, ("FontStyle", "Bold"))
, (tableHeaderName, ("FontStyle", "Bold"))
, (alignLeftName, ("Justification", "LeftAlign"))
, (alignRightName, ("Justification", "RightAlign"))
, (alignCenterName, ("Justification", "CenterAlign"))
, (headerName++"1", ("PointSize", "36"))
, (headerName++"2", ("PointSize", "30"))
, (headerName++"3", ("PointSize", "24"))
, (headerName++"4", ("PointSize", "18"))
, (headerName++"5", ("PointSize", "14"))
]
-- what is the most nested list type, if any?
(isBulletList, isOrderedList) = findList $ reverse $ splitBy (==' ') s
where
findList [] = (False, False)
findList (x:xs) | x == bulletListName = (True, False)
| x == orderedListName = (False, True)
| otherwise = findList xs
nBuls = countSubStrs bulletListName s
nOrds = countSubStrs orderedListName s
attrs' = numbering ++ listType ++ indent ++ attrs
where
numbering | isOrderedList = [("NumberingExpression", "^#.^t"), ("NumberingLevel", show nOrds)]
| otherwise = []
listType | isOrderedList && (not $ isInfixOf subListParName s)
= [("BulletsAndNumberingListType", "NumberedList")]
| isBulletList && (not $ isInfixOf subListParName s)
= [("BulletsAndNumberingListType", "BulletList")]
| otherwise = []
indent = [("LeftIndent", show indt)]
where
nBlockQuotes = countSubStrs blockQuoteName s
nDefLists = countSubStrs defListDefName s
indt = max 0 $ defaultListIndent*(nBuls + nOrds - 1) + defaultIndent*(nBlockQuotes + nDefLists)
props = inTags True "Properties" [] $ (basedOn $$ tabList $$ numbForm)
where
font = if isInfixOf codeBlockName s
then monospacedFont
else empty
basedOn = inTags False "BasedOn" [("type", "object")] (text "$ID/NormalParagraphStyle") $$ font
tabList = if isBulletList
then inTags True "TabList" [("type","list")] $ inTags True "ListItem" [("type","record")]
$ vcat [
inTags False "Alignment" [("type","enumeration")] $ text "LeftAlign"
, inTags False "AlignmentCharacter" [("type","string")] $ text "."
, selfClosingTag "Leader" [("type","string")]
, inTags False "Position" [("type","unit")] $ text
$ show $ defaultListIndent * (nBuls + nOrds)
]
else empty
makeNumb name = inTags False "NumberingFormat" [("type", "string")] (text name)
numbForm | isInfixOf lowerRomanName s = makeNumb "i, ii, iii, iv..."
| isInfixOf upperRomanName s = makeNumb "I, II, III, IV..."
| isInfixOf lowerAlphaName s = makeNumb "a, b, c, d..."
| isInfixOf upperAlphaName s = makeNumb "A, B, C, D..."
| otherwise = empty
in inTags True "ParagraphStyle" ([("Self", "ParagraphStyle/"++s), ("Name", s)] ++ attrs') props
-- | Convert a WriterState with its inline styles to the ICML listing of Character Styles.
charStylesToDoc :: WriterState -> Doc
charStylesToDoc st = vcat $ map makeStyle $ Set.toAscList $ inlineStyles st
where
makeStyle s =
let attrs = concat $ map (contains s) [
(strikeoutName, ("StrikeThru", "true"))
, (superscriptName, ("Position", "Superscript"))
, (subscriptName, ("Position", "Subscript"))
, (smallCapsName, ("Capitalization", "SmallCaps"))
]
attrs' | isInfixOf emphName s && isInfixOf strongName s = ("FontStyle", "Bold Italic") : attrs
| isInfixOf strongName s = ("FontStyle", "Bold") : attrs
| isInfixOf emphName s = ("FontStyle", "Italic") : attrs
| otherwise = attrs
props = inTags True "Properties" [] $
inTags False "BasedOn" [("type", "object")] (text "$ID/NormalCharacterStyle") $$ font
where
font =
if isInfixOf codeName s
then monospacedFont
else empty
in inTags True "CharacterStyle" ([("Self", "CharacterStyle/"++s), ("Name", s)] ++ attrs') props
-- | Escape colon characters as %3a
escapeColons :: String -> String
escapeColons (x:xs)
| x == ':' = "%3a" ++ escapeColons xs
| otherwise = x : escapeColons xs
escapeColons [] = []
-- | Convert a list of (identifier, url) pairs to the ICML listing of hyperlinks.
hyperlinksToDoc :: Hyperlink -> Doc
hyperlinksToDoc [] = empty
hyperlinksToDoc (x:xs) = hyp x $$ hyperlinksToDoc xs
where
hyp (ident, url) = hdest $$ hlink
where
hdest = selfClosingTag "HyperlinkURLDestination"
[("Self", "HyperlinkURLDestination/"++(escapeColons url)), ("Name","link"), ("DestinationURL",url), ("DestinationUniqueKey","1")] -- HyperlinkURLDestination with more than one colon crashes CS6
hlink = inTags True "Hyperlink" [("Self","uf-"++show ident), ("Name",url),
("Source","htss-"++show ident), ("Visible","true"), ("DestinationUniqueKey","1")]
$ inTags True "Properties" []
$ inTags False "BorderColor" [("type","enumeration")] (text "Black")
$$ (inTags False "Destination" [("type","object")]
$ text $ "HyperlinkURLDestination/"++(escapeColons (escapeStringForXML url))) -- HyperlinkURLDestination with more than one colon crashes CS6
-- | Convert a list of Pandoc blocks to ICML.
blocksToICML :: WriterOptions -> Style -> [Block] -> WS Doc
blocksToICML opts style lst = vcat `fmap` mapM (blockToICML opts style) lst
-- | Convert a Pandoc block element to ICML.
blockToICML :: WriterOptions -> Style -> Block -> WS Doc
blockToICML opts style (Plain lst) = parStyle opts style lst
blockToICML opts style (Para lst) = parStyle opts (paragraphName:style) lst
blockToICML opts style (CodeBlock _ str) = parStyle opts (codeBlockName:style) $ [Str str]
blockToICML _ _ (RawBlock f str)
| f == Format "icml" = return $ text str
| otherwise = return empty
blockToICML opts style (BlockQuote blocks) = blocksToICML opts (blockQuoteName:style) blocks
blockToICML opts style (OrderedList attribs lst) = listItemsToICML opts orderedListName style (Just attribs) lst
blockToICML opts style (BulletList lst) = listItemsToICML opts bulletListName style Nothing lst
blockToICML opts style (DefinitionList lst) = vcat `fmap` mapM (definitionListItemToICML opts style) lst
blockToICML opts style (Header lvl _ lst) =
let stl = (headerName ++ show lvl):style
in parStyle opts stl lst
blockToICML _ _ HorizontalRule = return empty -- we could insert a page break instead
blockToICML opts style (Table caption aligns widths headers rows) =
let style' = tableName : style
noHeader = all null headers
nrHeaders = if noHeader
then "0"
else "1"
nrRows = length rows
nrCols = if null rows
then 0
else length $ head rows
rowsToICML [] _ = return empty
rowsToICML (col:rest) rowNr =
liftM2 ($$) (colsToICML col rowNr (0::Int)) $ rowsToICML rest (rowNr+1)
colsToICML [] _ _ = return empty
colsToICML (cell:rest) rowNr colNr = do
let stl = if rowNr == 0 && not noHeader
then tableHeaderName:style'
else style'
alig = aligns !! colNr
stl' | alig == AlignLeft = alignLeftName : stl
| alig == AlignRight = alignRightName : stl
| alig == AlignCenter = alignCenterName : stl
| otherwise = stl
c <- blocksToICML opts stl' cell
let cl = return $ inTags True "Cell"
[("Name", show colNr ++":"++ show rowNr), ("AppliedCellStyle","CellStyle/Cell")] c
liftM2 ($$) cl $ colsToICML rest rowNr (colNr+1)
in do
let tabl = if noHeader
then rows
else headers:rows
cells <- rowsToICML tabl (0::Int)
let colWidths w = if w > 0
then [("SingleColumnWidth",show $ 500 * w)]
else []
let tupToDoc tup = selfClosingTag "Column" $ [("Name",show $ fst tup)] ++ (colWidths $ snd tup)
let colDescs = vcat $ map tupToDoc $ zip [0..nrCols-1] widths
let tableDoc = return $ inTags True "Table" [
("AppliedTableStyle","TableStyle/Table")
, ("HeaderRowCount", nrHeaders)
, ("BodyRowCount", show nrRows)
, ("ColumnCount", show nrCols)
] (colDescs $$ cells)
liftM2 ($$) tableDoc $ parStyle opts (tableCaptionName:style) caption
blockToICML opts style (Div _ lst) = blocksToICML opts style lst
blockToICML _ _ Null = return empty
-- | Convert a list of lists of blocks to ICML list items.
listItemsToICML :: WriterOptions -> String -> Style -> Maybe ListAttributes -> [[Block]] -> WS Doc
listItemsToICML _ _ _ _ [] = return empty
listItemsToICML opts listType style attribs (first:rest) = do
st <- get
put st{ listDepth = 1 + listDepth st}
let stl = listType:style
let f = listItemToICML opts stl True attribs first
let r = map (listItemToICML opts stl False attribs) rest
docs <- sequence $ f:r
s <- get
let maxD = max (maxListDepth s) (listDepth s)
put s{ listDepth = 1, maxListDepth = maxD }
return $ vcat docs
-- | Convert a list of blocks to ICML list items.
listItemToICML :: WriterOptions -> Style -> Bool-> Maybe ListAttributes -> [Block] -> WS Doc
listItemToICML opts style isFirst attribs item =
let makeNumbStart (Just (beginsWith, numbStl, _)) =
let doN DefaultStyle = []
doN LowerRoman = [lowerRomanName]
doN UpperRoman = [upperRomanName]
doN LowerAlpha = [lowerAlphaName]
doN UpperAlpha = [upperAlphaName]
doN _ = []
bw = if beginsWith > 1
then [beginsWithName ++ show beginsWith]
else []
in doN numbStl ++ bw
makeNumbStart Nothing = []
stl = if isFirst
then firstListItemName:style
else style
stl' = makeNumbStart attribs ++ stl
in if length item > 1
then do
let insertTab (Para lst) = blockToICML opts (subListParName:style) $ Para $ (Str "\t"):lst
insertTab block = blockToICML opts style block
f <- blockToICML opts stl' $ head item
r <- fmap vcat $ mapM insertTab $ tail item
return $ f $$ r
else blocksToICML opts stl' item
definitionListItemToICML :: WriterOptions -> Style -> ([Inline],[[Block]]) -> WS Doc
definitionListItemToICML opts style (term,defs) = do
term' <- parStyle opts (defListTermName:style) term
defs' <- vcat `fmap` mapM (blocksToICML opts (defListDefName:style)) defs
return $ term' $$ defs'
-- | Convert a list of inline elements to ICML.
inlinesToICML :: WriterOptions -> Style -> [Inline] -> WS Doc
inlinesToICML opts style lst = vcat `fmap` mapM (inlineToICML opts style) (mergeSpaces lst)
-- | Convert an inline element to ICML.
inlineToICML :: WriterOptions -> Style -> Inline -> WS Doc
inlineToICML _ style (Str str) = charStyle style $ text $ escapeStringForXML str
inlineToICML opts style (Emph lst) = inlinesToICML opts (emphName:style) lst
inlineToICML opts style (Strong lst) = inlinesToICML opts (strongName:style) lst
inlineToICML opts style (Strikeout lst) = inlinesToICML opts (strikeoutName:style) lst
inlineToICML opts style (Superscript lst) = inlinesToICML opts (superscriptName:style) lst
inlineToICML opts style (Subscript lst) = inlinesToICML opts (subscriptName:style) lst
inlineToICML opts style (SmallCaps lst) = inlinesToICML opts (smallCapsName:style) lst
inlineToICML opts style (Quoted SingleQuote lst) = inlinesToICML opts style $ [Str "‘"] ++ lst ++ [Str "’"]
inlineToICML opts style (Quoted DoubleQuote lst) = inlinesToICML opts style $ [Str "“"] ++ lst ++ [Str "”"]
inlineToICML opts style (Cite _ lst) = inlinesToICML opts style lst
inlineToICML _ style (Code _ str) = charStyle (codeName:style) $ text $ escapeStringForXML str
inlineToICML _ style Space = charStyle style space
inlineToICML _ style LineBreak = charStyle style $ text lineSeparator
inlineToICML _ style (Math _ str) = charStyle style $ text $ escapeStringForXML str --InDesign doesn't really do math
inlineToICML _ _ (RawInline f str)
| f == Format "icml" = return $ text str
| otherwise = return empty
inlineToICML opts style (Link lst (url, title)) = do
content <- inlinesToICML opts (linkName:style) lst
state $ \st ->
let ident = if null $ links st
then 1::Int
else 1 + (fst $ head $ links st)
newst = st{ links = (ident, url):(links st) }
cont = inTags True "HyperlinkTextSource"
[("Self","htss-"++show ident), ("Name",title), ("Hidden","false")] content
in (cont, newst)
inlineToICML opts style (Image alt target) = imageICML opts style alt target
inlineToICML opts style (Note lst) = footnoteToICML opts style lst
inlineToICML opts style (Span _ lst) = inlinesToICML opts style lst
-- | Convert a list of block elements to an ICML footnote.
footnoteToICML :: WriterOptions -> Style -> [Block] -> WS Doc
footnoteToICML opts style lst =
let insertTab (Para ls) = blockToICML opts (footnoteName:style) $ Para $ (Str "\t"):ls
insertTab block = blockToICML opts (footnoteName:style) block
in do
contents <- mapM insertTab lst
let number = inTags True "ParagraphStyleRange" [] $
inTags True "CharacterStyleRange" [] $ inTagsSimple "Content" "<?ACE 4?>"
return $ inTags True "CharacterStyleRange"
[("AppliedCharacterStyle","$ID/NormalCharacterStyle"), ("Position","Superscript")]
$ inTags True "Footnote" [] $ number $$ vcat contents
-- | Auxiliary function to merge Space elements into the adjacent Strs.
mergeSpaces :: [Inline] -> [Inline]
mergeSpaces ((Str s):(Space:((Str s'):xs))) = mergeSpaces $ Str(s++" "++s') : xs
mergeSpaces (Space:((Str s):xs)) = mergeSpaces $ Str (" "++s) : xs
mergeSpaces ((Str s):(Space:xs)) = mergeSpaces $ Str (s++" ") : xs
mergeSpaces (x:xs) = x : (mergeSpaces xs)
mergeSpaces [] = []
-- | Wrap a list of inline elements in an ICML Paragraph Style
parStyle :: WriterOptions -> Style -> [Inline] -> WS Doc
parStyle opts style lst =
let slipIn x y = if null y
then x
else x ++ " > " ++ y
stlStr = foldr slipIn [] $ reverse style
stl = if null stlStr
then ""
else "ParagraphStyle/" ++ stlStr
attrs = ("AppliedParagraphStyle", stl)
attrs' = if firstListItemName `elem` style
then let ats = attrs : [("NumberingContinue", "false")]
begins = filter (isPrefixOf beginsWithName) style
in if null begins
then ats
else let i = maybe "" id $ stripPrefix beginsWithName $ head begins
in ("NumberingStartAt", i) : ats
else [attrs]
in do
content <- inlinesToICML opts [] lst
let cont = inTags True "ParagraphStyleRange" attrs'
$ mappend content $ selfClosingTag "Br" []
state $ \st -> (cont, st{ blockStyles = Set.insert stlStr $ blockStyles st })
-- | Wrap a Doc in an ICML Character Style.
charStyle :: Style -> Doc -> WS Doc
charStyle style content =
let (stlStr, attrs) = styleToStrAttr style
doc = inTags True "CharacterStyleRange" attrs $ inTagsSimple "Content" $ flush content
in do
state $ \st ->
let styles = if null stlStr
then st
else st{ inlineStyles = Set.insert stlStr $ inlineStyles st }
in (doc, styles)
-- | Transform a Style to a tuple of String (eliminating duplicates and ordered) and corresponding attribute.
styleToStrAttr :: Style -> (String, [(String, String)])
styleToStrAttr style =
let stlStr = unwords $ Set.toAscList $ Set.fromList style
stl = if null style
then "$ID/NormalCharacterStyle"
else "CharacterStyle/" ++ stlStr
attrs = [("AppliedCharacterStyle", stl)]
in (stlStr, attrs)
-- | Assemble an ICML Image.
imageICML :: WriterOptions -> Style -> [Inline] -> Target -> WS Doc
imageICML _ style _ (linkURI, _) =
let imgWidth = 300::Int --TODO: set width, height dynamically as in Docx.hs
imgHeight = 200::Int
scaleFact = show (1::Double) --TODO: set scaling factor so image is scaled exactly to imgWidth x imgHeight
hw = show $ imgWidth `div` 2
hh = show $ imgHeight `div` 2
qw = show $ imgWidth `div` 4
qh = show $ imgHeight `div` 4
uriPrefix = if isURI linkURI then "" else "file:"
(stlStr, attrs) = styleToStrAttr style
props = inTags True "Properties" [] $ inTags True "PathGeometry" []
$ inTags True "GeometryPathType" [("PathOpen","false")]
$ inTags True "PathPointArray" []
$ vcat [
selfClosingTag "PathPointType" [("Anchor", "-"++qw++" -"++qh),
("LeftDirection", "-"++qw++" -"++qh), ("RightDirection", "-"++qw++" -"++qh)]
, selfClosingTag "PathPointType" [("Anchor", "-"++qw++" "++qh),
("LeftDirection", "-"++qw++" "++qh), ("RightDirection", "-"++qw++" "++qh)]
, selfClosingTag "PathPointType" [("Anchor", qw++" "++qh),
("LeftDirection", qw++" "++qh), ("RightDirection", qw++" "++qh)]
, selfClosingTag "PathPointType" [("Anchor", qw++" -"++qh),
("LeftDirection", qw++" -"++qh), ("RightDirection", qw++" -"++qh)]
]
image = inTags True "Image"
[("Self","ue6"), ("ItemTransform", scaleFact++" 0 0 "++scaleFact++" -"++qw++" -"++qh)]
$ vcat [
inTags True "Properties" [] $ inTags True "Profile" [("type","string")] $ text "$ID/Embedded"
$$ selfClosingTag "GraphicBounds" [("Left","0"), ("Top","0"), ("Right", hw), ("Bottom", hh)]
, selfClosingTag "Link" [("Self", "ueb"), ("LinkResourceURI", uriPrefix++linkURI)]
]
doc = inTags True "CharacterStyleRange" attrs
$ inTags True "Rectangle" [("Self","uec"), ("ItemTransform", "1 0 0 1 "++qw++" -"++qh)]
$ (props $$ image)
in do
state $ \st -> (doc, st{ inlineStyles = Set.insert stlStr $ inlineStyles st } )
| ddssff/pandoc | src/Text/Pandoc/Writers/ICML.hs | gpl-2.0 | 25,507 | 0 | 23 | 7,556 | 7,209 | 3,816 | 3,393 | 449 | 11 |
{-# LANGUAGE MagicHash #-}
module Main where
import GHC.Ptr
import GHC.Exts
import Foreign.Ptr
main = print (Ptr nullAddr# == nullPtr)
| urbanslug/ghc | testsuite/tests/ghci/should_run/T2589.hs | bsd-3-clause | 137 | 0 | 8 | 22 | 38 | 22 | 16 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Control.Monad (unless)
import qualified Data.Map.Lazy as Map
import qualified Data.Text as T
import Data.Text.IO (readFile)
import Lens.Family2
import Lens.Family2.Stock (at, _Just)
import Lens.Family2.Unchecked (iso)
import Prelude hiding (readFile)
import System.Exit (exitFailure)
import Test.Dwergaz
import qualified TOML
import TOML.Lens
allEqual :: Eq a => [a] -> Bool
allEqual (x:xs) = all (== x) xs
allEqual [] = error "allEqual: empty list"
alist
:: (Ord k1, Ord k2, Functor f)
=> LensLike f [(k1, v1)] [(k2, v2)] (Map.Map k1 v1) (Map.Map k2 v2)
alist = iso Map.fromList Map.toList
mapAt
:: Applicative f
=> T.Text
-> (Map.Map T.Text TOML.Value -> f (Map.Map T.Text TOML.Value))
-> Map.Map T.Text TOML.Value
-> f (Map.Map T.Text TOML.Value)
mapAt k = at k . _Just . _Table . alist
listAt
:: Applicative f
=> T.Text
-> ([TOML.Value] -> f [TOML.Value])
-> Map.Map T.Text TOML.Value
-> f (Map.Map T.Text TOML.Value)
listAt k = at k . _Just . _List
testTableKey :: [(T.Text, TOML.Value)] -> Test
testTableKey kv
= Expect "'key' from 'table' == Just \"value\""
(==) expected actual
where
expected = Just "value"
actual = kv ^? alist . mapAt "table" . at "key" . _Just . _String
testTableZoo :: [(T.Text, TOML.Value)] -> Test
testTableZoo kv
= Expect "'zoo' from 'table' == Nothing"
(==) expected actual
where
expected = Nothing
actual = kv ^? alist . mapAt "table" . at "zoo" . _Just . _String
testTableSubtableKey :: [(T.Text, TOML.Value)] -> Test
testTableSubtableKey kv
= Expect "'key' from 'subtable' from 'table' == Just \"another value\""
(==) expected actual
where
expected = Just "another value"
actual = kv ^? alist . mapAt "table" . mapAt "subtable" . at "key" . _Just . _String
testTableInlineNameFirst :: [(T.Text, TOML.Value)] -> Test
testTableInlineNameFirst kv
= Expect "'first' from 'name' from 'inline' from 'table' == \"Tom\""
(==) expected actual
where
expected = Just "Tom"
actual = kv ^? alist . mapAt "table" . mapAt "inline" . mapAt "name" . at "first" . _Just . _String
testTableInlinePointY :: [(T.Text, TOML.Value)] -> Test
testTableInlinePointY kv
= Expect "'y' from 'point' from 'inline' from 'table' == Just 2"
(==) expected actual
where
expected = Just 2
actual = kv ^? alist . mapAt "table" . mapAt "inline" . mapAt "point" . at "y" . _Just . _Integer
testStringBasicBasic :: [(T.Text, TOML.Value)] -> Test
testStringBasicBasic kv
= Expect "'basic' from 'basic' from 'string' == <some escaped nonsense>"
(==) expected actual
where
expected = Just "I'm a string. \"You can quote me\". Name\tJos\233\nLocation\tSF."
actual = kv ^? alist . mapAt "string" . mapAt "basic" . at "basic" . _Just . _String
testStringMultiline :: [(T.Text, TOML.Value)] -> Test
testStringMultiline kv
= Predicate "'key1', 'key2', and 'key3' from 'multiline' from 'string' are all the same"
allEqual
[actual1, actual2, actual3]
where
actual1 = kv ^? alist . mapAt "string" . mapAt "multiline" . at "key1" . _Just . _String
actual2 = kv ^? alist . mapAt "string" . mapAt "multiline" . at "key2" . _Just . _String
actual3 = kv ^? alist . mapAt "string" . mapAt "multiline" . at "key3" . _Just . _String
testStringMultilineContinued :: [(T.Text, TOML.Value)] -> Test
testStringMultilineContinued kv
= Predicate "'key1', 'key2', and 'key3' from 'continued' from 'multiline' from 'string' are all the same"
allEqual
[actual1, actual2, actual3]
where
actual1 = kv ^? alist . mapAt "string" . mapAt "multiline" . mapAt "continued" . at "key1" . _Just . _String
actual2 = kv ^? alist . mapAt "string" . mapAt "multiline" . mapAt "continued" . at "key2" . _Just . _String
actual3 = kv ^? alist . mapAt "string" . mapAt "multiline" . mapAt "continued" . at "key3" . _Just . _String
testArrayKey1 :: [(T.Text, TOML.Value)] -> Test
testArrayKey1 kv
= Expect "'key1' from 'array' == [1, 2, 3]"
(==) expected actual
where
expected = [1, 2, 3]
actual = kv ^.. alist . mapAt "array" . listAt "key1" . traverse . _Integer
runTests :: [(T.Text, TOML.Value)] -> [Result]
runTests kv = runTest . ($ kv) <$> tests
where
tests = [ testTableKey
, testTableZoo
, testTableSubtableKey
, testTableInlineNameFirst
, testTableInlinePointY
, testStringBasicBasic
, testStringMultiline
, testStringMultilineContinued
, testArrayKey1
]
readTOMLFile :: String -> IO [(T.Text, TOML.Value)]
readTOMLFile file = readFile file >>= parse >>= handleError
where
parse = pure . TOML.parseTOML
handleError = either (error . show) pure
main :: IO ()
main = do
ex <- readTOMLFile "./example/example-v0.4.0.toml"
let rs = runTests ex
mapM_ print rs
unless (all isPassed rs) exitFailure
| xngns/lens-toml-parser | tests/Main.hs | isc | 5,228 | 0 | 13 | 1,344 | 1,639 | 858 | 781 | -1 | -1 |
{-
rrdgraph-haskell – Haskell DSL for rendering RRD graphs using RRDtool
Copyright © 2011 Johan Kiviniemi <[email protected]>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-}
-- The only orphan instances are for the module being tested.
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Data.RRDGraph.Tests.VDef (tests_VDef)
where
import Data.RRDGraph.CDef
import Data.RRDGraph.Internal
import Data.RRDGraph.Tests.CDef () -- the Arbitrary CDef instance
import Data.RRDGraph.VDef
import Control.Applicative
import Test.Framework (Test)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.Framework.TH (testGroupGenerator)
import Test.QuickCheck
instance Arbitrary VDef where
arbitrary = VDefStack <$> arbCDefs <*> arbStackItems
where
arbCDefs :: Gen [CDef]
arbCDefs = take 10 <$> arbitrary
arbStackItems :: Gen [StackItem]
arbStackItems = take 5 . fromNonEmpty <$> arbitrary
shrink (VDefStack cDefs stack) =
concat [ liftA2 VDefStack (shrink cDefs) [stack]
, liftA2 VDefStack [cDefs] (shrStack stack)
]
where
shrStack = wrapShrink NonEmpty fromNonEmpty shrink
tests_VDef :: Test
tests_VDef = $(testGroupGenerator)
prop_ops1 :: CDef -> Property
prop_ops1 cDef =
conjoin . flip map ops $ \(name, op, stack) ->
assert (name ++ ": ") (==) (VDefStack [cDef] stack) (op cDef)
where
ops = [ ("vMaximum", vMaximum, ["MAXIMUM"])
, ("vMinimum", vMinimum, ["MINIMUM"])
, ("vAverage", vAverage, ["AVERAGE"])
, ("vStDev", vStDev, ["STDEV"])
, ("vLast", vLast, ["LAST"])
, ("vFirst", vFirst, ["FIRST"])
, ("vTotal", vTotal, ["TOTAL"])
, ("vLslSlope", vLslSlope, ["LSLSLOPE"])
, ("vLslInt", vLslInt, ["LSLINT"])
, ("vLslCorrel", vLslCorrel, ["LSLCORREL"])
]
prop_ops2 :: CDef -> CDefNum -> Property
prop_ops2 cDef n =
conjoin . flip map ops $ \(name, op, stack) ->
let stack' = (StackItem . numericField) n : stack
in assert (name ++ ": ") (==) (VDefStack [cDef] stack') (op cDef n)
where
ops = [ ("vPercent", vPercent, ["PERCENT"])
, ("vPercentNAN", vPercentNAN, ["PERCENTNAN"])
]
-- Helpers.
assert :: Show a => String -> (a -> a -> Bool) -> a -> a -> Property
assert prefix op expected actual =
let message = prefix ++ "Expected " ++ show expected
++ ", got " ++ show actual
in printTestCase message (expected `op` actual)
fromNonEmpty :: NonEmptyList a -> [a]
fromNonEmpty (NonEmpty xs) = xs
| ion1/rrdgraph-haskell | Data/RRDGraph/Tests/VDef.hs | isc | 3,351 | 0 | 14 | 764 | 763 | 439 | 324 | 53 | 1 |
{-# LANGUAGE FlexibleContexts #-}
-- | Facilities for inspecting the data dependencies of a program.
module Futhark.Analysis.DataDependencies
( Dependencies
, dataDependencies
)
where
import Data.Monoid
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Prelude
import Futhark.Representation.AST
-- | A mapping from a variable name @v@, to those variables on which
-- the value of @v@ is dependent. The intuition is that we could
-- remove all other variables, and @v@ would still be computable.
-- This also includes names bound in loops or by lambdas.
type Dependencies = M.Map VName Names
-- | Compute the data dependencies for an entire body.
dataDependencies :: Attributes lore => Body lore -> Dependencies
dataDependencies = dataDependencies' M.empty
dataDependencies' :: Attributes lore =>
Dependencies -> Body lore -> Dependencies
dataDependencies' startdeps = foldl grow startdeps . bodyStms
where grow deps (Let pat _ (If c tb fb _)) =
let tdeps = dataDependencies' deps tb
fdeps = dataDependencies' deps fb
cdeps = depsOf deps c
comb (pe, tres, fres) =
(patElemName pe,
S.unions $ [freeIn pe, cdeps, depsOf tdeps tres, depsOf fdeps fres] ++
map (depsOfVar deps) (S.toList $ freeIn pe))
branchdeps =
M.fromList $ map comb $ zip3 (patternValueElements pat)
(bodyResult tb)
(bodyResult fb)
in M.unions [branchdeps, deps, tdeps, fdeps]
grow deps (Let pat _ e) =
let free = freeIn pat <> freeInExp e
freeDeps = S.unions $ map (depsOfVar deps) $ S.toList free
in M.fromList [ (name, freeDeps) | name <- patternNames pat ] `M.union` deps
depsOf :: Dependencies -> SubExp -> Names
depsOf _ (Constant _) = S.empty
depsOf deps (Var v) = depsOfVar deps v
depsOfVar :: Dependencies -> VName -> Names
depsOfVar deps name = S.insert name $ M.findWithDefault S.empty name deps
| ihc/futhark | src/Futhark/Analysis/DataDependencies.hs | isc | 2,057 | 0 | 16 | 546 | 547 | 286 | 261 | 37 | 2 |
module Y2020.M10.D26.Exercise where
{--
Great! We have the airbases of the world. At some point we will want to
look at alliances of the world:
https://en.wikipedia.org/wiki/List_of_military_alliances
But FRIST! ... let's export our graph as a set of cypher statements
--}
import Data.Map (Map)
import Data.Set (Set)
import Graph.Query
import Graph.JSON.Cypher
import Y2020.M10.D12.Solution -- to load the airbases
import Y2020.M10.D14.Solution -- to load countries-continents
import Y2020.M10.D15.Solution -- for the country map
import Y2020.M10.D16.Solution -- cyphed countries-continents
import Y2020.M10.D20.Solution -- for cyphed stuff
import Y2020.M10.D23.Solution -- for ununicoded base names
-- and a loader, free of charge:
loadAll :: IO (Map Icao AirBase, CountryMap, Set Country, Set Country)
loadAll =
loadBases (Y2020.M10.D12.Solution.workingDir ++ file) >>= \bs ->
countriesByContinent (Y2020.M10.D14.Solution.workingDir ++ cbc) >>= \conti ->
let cm = countryMap conti
mbs = byICAO bs
newabm = firstPass mbs
ccs = countries fst cm
abc = countries (country . snd) newabm
nonu = stripNonAscii newabm in
return (nonu, cm, ccs, abc)
-- With the above, you should be able to implement the following
countriesCypher :: CountryMap -> [Cypher]
countriesCypher = undefined
countriesCorrections :: Set Country -> Set Country -> [Cypher]
countriesCorrections = undefined
airbaseCypher :: Map Icao AirBase -> [Cypher]
airbaseCypher = undefined
saveCypher :: FilePath -> [Cypher] -> IO ()
saveCypher = undefined
-- so we should be able to run this, no problems:
go :: IO ()
go = loadAll >>= \(abm, cm, ccs, abc) ->
saveCypher "01-raw-countries.cyp" (countriesCypher cm) >>
putStrLn "Saved countries" >>
saveCypher "02-countries-corrections.cyp" (countriesCorrections ccs abc) >>
putStrLn "Saved corrections" >>
saveCypher "03-airbases.cyp" (airbaseCypher abm) >>
putStrLn "Yeah. We're done."
| geophf/1HaskellADay | exercises/HAD/Y2020/M10/D26/Exercise.hs | mit | 1,998 | 0 | 15 | 367 | 447 | 253 | 194 | 38 | 1 |
module Integration.Model.UserSpec (spec) where
import TestImport hiding (assertEqual)
import Test.HUnit (assertEqual, assertFailure)
import qualified Model.User as UserM
-------------------------------------------------------------------------------
spec :: Spec
spec = withApp $ do
describe "getPaginated" $ do
it "getting first page of users" $ do
addAll
(_, users) <- runDB $ UserM.getPaginated 0
liftIO $ assertEqual "is right amount of users" 4 (length users)
it "getting out of bound of users" $ do
addAll
(_, users) <- runDB $ UserM.getPaginated 100
case users of
[] -> return ()
_ -> liftIO $ assertFailure "invalid amount of users retrieved"
where
tmpUser1 = User "[email protected]" Nothing Nothing False
tmpUser2 = User "[email protected]" Nothing Nothing False
tmpUser3 = User "[email protected]" Nothing Nothing False
tmpUser4 = User "[email protected]" Nothing Nothing False
tmpUsers = [tmpUser1, tmpUser2, tmpUser3, tmpUser4]
addAll = runDB $ forM_ tmpUsers (void . insert)
| rzetterberg/alven | src/test/Integration/Model/UserSpec.hs | mit | 1,167 | 0 | 18 | 317 | 286 | 148 | 138 | 23 | 2 |
-- Sieve of Eratosthenes
-- http://www.codewars.com/kata/55f0b69fe3ef582c4100008a/
module Sieve where
primes :: Int -> [Int]
primes n | n <= 1 = []
| otherwise = filter isPrime $ 2:[3, 5..n]
where isPrime n = all (\d -> n `mod` d /= 0) [2 .. floor . sqrt . fromIntegral $ n]
| gafiatulin/codewars | src/6 kyu/Sieve.hs | mit | 296 | 0 | 11 | 72 | 117 | 63 | 54 | 5 | 1 |
{--
Important
[] Read defaults from template file (JSON)
[] Read data from input file (JSON)
[] Receive more than 1024 bytes
[] Split up in submodules
Nice to have
[] GUI
[] |- Data input: auto-generate a form using
--}
{-# LANGUAGE OverloadedStrings #-}
import Network.Simple.TCP
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Builder as B
import qualified Data.Time.Clock as DT
import qualified Data.Time.LocalTime as DT
import qualified Data.Time.Format as DT
-- import System.Locale (defaultTimeLocale)
import Numeric (showHex)
import Hexdump
-- stringUtf8 :: String -> Builder
{--
# Time
http://tab.snarc.org/posts/haskell/2011-12-16-date-in-haskell.html
## Data.Time.Clock.POSIX
posixSecondsToUTCTime and utcTimeToPOSIXSeconds.
## Data.Time.LocalTime
utcToZonedTime and zonedTimeToUTC
getCurrentTime :: IO UTCTime
getCurrentTimeZone :: IO TimeZone
getZonedTime :: IO ZonedTime
## Data.Time.Format
parseTime and formatTime
_You need to import System.Locale from the old-locale package to get the defaultTimeLocale_
--}
type CharacterSet = String
utf8 :: CharacterSet
utf8 = "UNICODE UTF-8"
latin1 :: CharacterSet
latin1 = "8895/1"
ascii :: CharacterSet
ascii = "ASCII"
defaultHL7InFolder :: String
defaultHL7InFolder = "/Users/dvekeman/Documents/Middleware/_hl7/IN/UTF8"
type Version = String
v251 = "2.5.1" :: Version
data MSH = MSH { dateTime :: DT.ZonedTime
, version :: Version
, charset :: CharacterSet
} deriving Show
data SPM = SPM { specimenId :: String
}
data ORC = ORC { orderId :: String
, orderTime :: DT.ZonedTime
}
data OBR = OBR { universalServiceId :: String
}
mkMSH t = MSH { dateTime = t
, version = v251
, charset = utf8
}
mkSPM specimenId = SPM { specimenId = specimenId }
mkORC orderId orderTime = ORC { orderTime = orderTime
, orderId = orderId
}
mkOBR universalServiceId = OBR { universalServiceId = universalServiceId }
fmtORCAndOBR orderId orderTime universalServiceIds =
let formattedOBRs = map (fmtOBR . mkOBR) universalServiceIds
orcs = map (\_ -> mkORC orderId orderTime) universalServiceIds
formattedORCs = map fmtORC orcs
formattedORCAndOBRZipped = zip formattedORCs formattedOBRs
formattedORCAndOBRs = map (\(fst,snd) -> fst <> cr <> snd) formattedORCAndOBRZipped
in foldr (\formattedValue acc -> formattedValue <> cr <> acc) (B.byteString B.empty) formattedORCAndOBRs
fmtMSH :: MSH -> B.Builder
fmtMSH msh = B.stringUtf8 $ "MSH|^~\\&|||||" ++ (hl7Time $ dateTime msh) ++ "||OML^O33^OML_O33|603301|P|"++ (version msh) ++ "||||||" ++ (charset msh)
fmtSPM :: SPM -> B.Builder
fmtSPM spm = B.stringUtf8 $ "SPM||"++ (specimenId spm) ++"||FFPE"
fmtORC :: ORC -> B.Builder
fmtORC orc = B.stringUtf8 $ "ORC|NW|"++ (orderId orc) ++"|||||||" ++ (hl7Time $ orderTime orc)
fmtOBR :: OBR -> B.Builder
fmtOBR obr = B.stringUtf8 $ "OBR||||" ++ (universalServiceId obr)
defaultTimeFormat :: String
defaultTimeFormat = "%Y%m%d%H%M%S"
hl7Time :: DT.ZonedTime -> String
hl7Time = DT.formatTime DT.defaultTimeLocale defaultTimeFormat
cr :: B.Builder
cr = B.word8 13
lf :: B.Builder
lf = B.word8 10
sb :: B.Builder
sb = B.word8 0x0B
eb :: B.Builder
eb = B.word8 0x1C
data Settings = Settings { hostname :: String
, port :: Int
}
middleware :: Settings
middleware = Settings "127.0.0.1" 2575
main :: IO ()
main = do
putStrLn "From file? (default: no)"
useFile <- readS ""
case useFile of
"" -> fromScratch
otherwise -> fromFile
fromScratch :: IO ()
fromScratch = do
now <- DT.getZonedTime
putStrLn "Start Id for Order & Sample (default 0)"
startId <- readI 0
putStrLn "Number of samples (default 10): "
nrOfRequests <- readI 10
putStrLn "Universal Service Id(s): eg 101X,501X (default 101X,501X)"
univSvcIds <- readS "101X,501X"
let univSvcIdList = wordsWhen (== ',') univSvcIds
putStrLn "Order Time: format yyyymmddHHMMSS, eg 20150923115959 (default now)"
orderTime <- readT now
sequence_ $ map (fromScratchRequest univSvcIdList orderTime) (take nrOfRequests [startId..])
fromScratchRequest univSvcIdList orderTime startId = do
now <- DT.getZonedTime
let msh = fmtMSH $ mkMSH now
spm = fmtSPM $ mkSPM ("S" ++ show startId)
orcAndObr = fmtORCAndOBR ("O" ++ show startId) orderTime univSvcIdList
requestData = msh <> cr <> spm <> cr <> orcAndObr
sendRequest requestData
fromFile :: IO ()
fromFile = do
putStrLn $ "File name (default path: " ++ defaultHL7InFolder ++ ")"
fname <- readS "Blah.txt"
requestData <- readFile (defaultHL7InFolder ++ "/" ++ fname)
sendRequest (B.stringUtf8 requestData)
sendRequest requestData = do
connect (hostname middleware) (show $ port middleware) $ \(connectionSocket, remoteAddr) -> do
putStrLn $ "Sending " ++ (show $ toBS requestData)
let request = sb <> requestData <> eb <> cr
putStrLn $ "Raw hex data \n" ++ (prettyHex $ toBS request)
send connectionSocket $ toBS $ request
putStrLn "... waiting for response ..."
response <- recv connectionSocket 1024
case response of
Just responseData -> do
putStrLn $ "Raw hex response data \n" ++ (prettyHex $ responseData)
putStrLn $ show $ prettyPrintHL7 responseData
Nothing -> putStrLn "|- Nothing received back -|"
readS :: String -> IO String
readS defaultValue = do
value <- getLine
putStrLn $ "Read: " ++ show value
let result = sfoo defaultValue value
putStrLn $ "Read result: " ++ show result
return result
readI :: Int -> IO Int
readI defaultValue = do
value <- getLine
return $ foo defaultValue value
readB :: Bool -> IO Bool
readB defaultValue = do
value <- getLine
return $ foo defaultValue value
readT :: DT.ZonedTime -> IO DT.ZonedTime
readT defaultValue = do
value <- getLine
putStrLn $ "Value is: '" ++ value ++ "'"
case value of
"" -> do
putStrLn "empty"
return defaultValue
otherwise -> do
putStrLn "Not empty"
return $ DT.parseTimeOrError True DT.defaultTimeLocale defaultTimeFormat value
sfoo :: String -> String -> String
sfoo defaultValue "" = defaultValue
sfoo _ s = s
foo :: Read a => a -> String -> a
foo defaultValue "" = defaultValue
foo _ s = read s
toBS :: B.Builder -> B.ByteString
toBS = BL.toStrict . B.toLazyByteString
prettyPrint :: B.ByteString -> String
prettyPrint = concat . map (flip showHex "") . B.unpack
prettyPrintHL7 :: B.ByteString -> B.ByteString
prettyPrintHL7 = B.takeWhile (\w -> w /= 0x1C) . B.drop 1
wordsWhen :: (Char -> Bool) -> String -> [String]
wordsWhen p s = case dropWhile p s of
"" -> []
s' -> w : wordsWhen p s''
where (w, s'') = break p s'
infixr 4 <>
(<>) :: Monoid m => m -> m -> m
(<>) = mappend
| tinkhaven-organization/hl7-client | src/Main.hs | mit | 7,281 | 19 | 18 | 1,807 | 1,997 | 983 | 1,014 | 160 | 2 |
module Unison.Var where
import Data.Set (Set)
import Data.Text (Text)
import qualified Data.Set as Set
-- | A class for variables. Variables may have auxiliary information which
-- may not form part of their identity according to `Eq` / `Ord`. Laws:
--
-- * `name (named n) == n`:
-- `name` returns the name set by `named`.
-- * `Set.notMember (freshIn vs v) vs`:
-- `freshIn` returns a variable not used in the `Set`
-- * `name (freshIn vs v) == name v`:
-- `freshIn` does not alter the name
-- * `Set.notMember (qualifiedName $ freshIn vs v) (Set.map qualifiedName vs)`:
-- `qualifiedName` incorporates all additional id info from freshening into
-- the name of the variable.
-- * `clear (freshIn vs v) === clear (freshIn vs (named (name v)))`:
-- `clear` strips any auxiliary information and returns a variable that behaves
-- as if it has been built solely via calls to `named` and `freshIn`. The `===`
-- is full equality, comparing any auxiliary info as well as qualified name.
-- * `clear v == v`, according to Haskell equality. In other words, no auxiliary
-- info attached to `v` values may participate in the `Eq` or `Ord` instances,
-- it is 'just' metadata.
--
class (Eq v, Ord v) => Var v where
named :: Text -> v
name :: v -> Text
clear :: v -> v
qualifiedName :: v -> Text
freshIn :: Set v -> v -> v
freshenId :: Word -> v -> v
shortName :: Var v => v -> Text
shortName v | named (name v) == v = name v
shortName v = qualifiedName v
freshes :: Var v => Set v -> [v] -> [v]
freshes _ [] = []
freshes used (h:t) =
let h' = freshIn used h
in h' : freshes (Set.insert h' used) t
freshInBoth :: Var v => Set v -> Set v -> v -> v
freshInBoth vs1 vs2 = freshIn vs1 . freshIn vs2
freshNamed :: Var v => Set v -> Text -> v
freshNamed used n = freshIn used (named n)
| nightscape/platform | shared/src/Unison/Var.hs | mit | 1,846 | 0 | 11 | 423 | 389 | 206 | 183 | 23 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.