code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, ScopedTypeVariables, MagicHash #-}
{-# LANGUAGE BangPatterns #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.List
-- Copyright : (c) The University of Glasgow 1994-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- The List data type and its operations
--
-----------------------------------------------------------------------------
module GHC.List (
-- [] (..), -- built-in syntax; can't be used in export list
map, (++), filter, concat,
head, last, tail, init, uncons, null, length, (!!),
foldl, foldl', foldl1, foldl1', scanl, scanl1, scanl', foldr, foldr1,
scanr, scanr1, iterate, repeat, replicate, cycle,
take, drop, sum, product, maximum, minimum, splitAt, takeWhile, dropWhile,
span, break, reverse, and, or,
any, all, elem, notElem, lookup,
concatMap,
zip, zip3, zipWith, zipWith3, unzip, unzip3,
errorEmptyList,
) where
import Data.Maybe
import GHC.Base
import GHC.Num (Num(..))
import GHC.Integer (Integer)
infixl 9 !!
infix 4 `elem`, `notElem`
--------------------------------------------------------------
-- List-manipulation functions
--------------------------------------------------------------
-- | Extract the first element of a list, which must be non-empty.
head :: [a] -> a
head (x:_) = x
head [] = badHead
{-# NOINLINE [1] head #-}
badHead :: a
badHead = errorEmptyList "head"
-- This rule is useful in cases like
-- head [y | (x,y) <- ps, x==t]
{-# RULES
"head/build" forall (g::forall b.(a->b->b)->b->b) .
head (build g) = g (\x _ -> x) badHead
"head/augment" forall xs (g::forall b. (a->b->b) -> b -> b) .
head (augment g xs) = g (\x _ -> x) (head xs)
#-}
-- | Decompose a list into its head and tail. If the list is empty,
-- returns 'Nothing'. If the list is non-empty, returns @'Just' (x, xs)@,
-- where @x@ is the head of the list and @xs@ its tail.
--
-- @since 4.8.0.0
uncons :: [a] -> Maybe (a, [a])
uncons [] = Nothing
uncons (x:xs) = Just (x, xs)
-- | Extract the elements after the head of a list, which must be non-empty.
tail :: [a] -> [a]
tail (_:xs) = xs
tail [] = errorEmptyList "tail"
-- | Extract the last element of a list, which must be finite and non-empty.
last :: [a] -> a
#ifdef USE_REPORT_PRELUDE
last [x] = x
last (_:xs) = last xs
last [] = errorEmptyList "last"
#else
-- use foldl to allow fusion
last = foldl (\_ x -> x) (errorEmptyList "last")
#endif
-- | Return all the elements of a list except the last one.
-- The list must be non-empty.
init :: [a] -> [a]
#ifdef USE_REPORT_PRELUDE
init [x] = []
init (x:xs) = x : init xs
init [] = errorEmptyList "init"
#else
-- eliminate repeated cases
init [] = errorEmptyList "init"
init (x:xs) = init' x xs
where init' _ [] = []
init' y (z:zs) = y : init' z zs
#endif
-- | Test whether a list is empty.
null :: [a] -> Bool
null [] = True
null (_:_) = False
-- | /O(n)/. 'length' returns the length of a finite list as an 'Int'.
-- It is an instance of the more general 'Data.List.genericLength',
-- the result type of which may be any kind of number.
{-# NOINLINE [1] length #-}
length :: [a] -> Int
length xs = lenAcc xs 0
lenAcc :: [a] -> Int -> Int
lenAcc [] n = n
lenAcc (_:ys) n = lenAcc ys (n+1)
{-# RULES
"length" [~1] forall xs . length xs = foldr lengthFB idLength xs 0
"lengthList" [1] foldr lengthFB idLength = lenAcc
#-}
-- The lambda form turns out to be necessary to make this inline
-- when we need it to and give good performance.
{-# INLINE [0] lengthFB #-}
lengthFB :: x -> (Int -> Int) -> Int -> Int
lengthFB _ r = \ !a -> r (a + 1)
{-# INLINE [0] idLength #-}
idLength :: Int -> Int
idLength = id
-- | 'filter', applied to a predicate and a list, returns the list of
-- those elements that satisfy the predicate; i.e.,
--
-- > filter p xs = [ x | x <- xs, p x]
{-# NOINLINE [1] filter #-}
filter :: (a -> Bool) -> [a] -> [a]
filter _pred [] = []
filter pred (x:xs)
| pred x = x : filter pred xs
| otherwise = filter pred xs
{-# NOINLINE [0] filterFB #-}
filterFB :: (a -> b -> b) -> (a -> Bool) -> a -> b -> b
filterFB c p x r | p x = x `c` r
| otherwise = r
{-# RULES
"filter" [~1] forall p xs. filter p xs = build (\c n -> foldr (filterFB c p) n xs)
"filterList" [1] forall p. foldr (filterFB (:) p) [] = filter p
"filterFB" forall c p q. filterFB (filterFB c p) q = filterFB c (\x -> q x && p x)
#-}
-- Note the filterFB rule, which has p and q the "wrong way round" in the RHS.
-- filterFB (filterFB c p) q a b
-- = if q a then filterFB c p a b else b
-- = if q a then (if p a then c a b else b) else b
-- = if q a && p a then c a b else b
-- = filterFB c (\x -> q x && p x) a b
-- I originally wrote (\x -> p x && q x), which is wrong, and actually
-- gave rise to a live bug report. SLPJ.
-- | 'foldl', applied to a binary operator, a starting value (typically
-- the left-identity of the operator), and a list, reduces the list
-- using the binary operator, from left to right:
--
-- > foldl f z [x1, x2, ..., xn] == (...((z `f` x1) `f` x2) `f`...) `f` xn
--
-- The list must be finite.
-- We write foldl as a non-recursive thing, so that it
-- can be inlined, and then (often) strictness-analysed,
-- and hence the classic space leak on foldl (+) 0 xs
foldl :: forall a b. (b -> a -> b) -> b -> [a] -> b
{-# INLINE foldl #-}
foldl k z0 xs =
foldr (\(v::a) (fn::b->b) -> oneShot (\(z::b) -> fn (k z v))) (id :: b -> b) xs z0
-- See Note [Left folds via right fold]
{-
Note [Left folds via right fold]
Implementing foldl et. al. via foldr is only a good idea if the compiler can
optimize the resulting code (eta-expand the recursive "go"). See #7994.
We hope that one of the two measure kick in:
* Call Arity (-fcall-arity, enabled by default) eta-expands it if it can see
all calls and determine that the arity is large.
* The oneShot annotation gives a hint to the regular arity analysis that
it may assume that the lambda is called at most once.
See [One-shot lambdas] in CoreArity and especially [Eta expanding thunks]
in CoreArity.
The oneShot annotations used in this module are correct, as we only use them in
argumets to foldr, where we know how the arguments are called.
-}
-- ----------------------------------------------------------------------------
-- | A strict version of 'foldl'.
foldl' :: forall a b . (b -> a -> b) -> b -> [a] -> b
{-# INLINE foldl' #-}
foldl' k z0 xs =
foldr (\(v::a) (fn::b->b) -> oneShot (\(z::b) -> z `seq` fn (k z v))) (id :: b -> b) xs z0
-- See Note [Left folds via right fold]
-- | 'foldl1' is a variant of 'foldl' that has no starting value argument,
-- and thus must be applied to non-empty lists.
foldl1 :: (a -> a -> a) -> [a] -> a
foldl1 f (x:xs) = foldl f x xs
foldl1 _ [] = errorEmptyList "foldl1"
-- | A strict version of 'foldl1'
foldl1' :: (a -> a -> a) -> [a] -> a
foldl1' f (x:xs) = foldl' f x xs
foldl1' _ [] = errorEmptyList "foldl1'"
-- -----------------------------------------------------------------------------
-- List sum and product
-- | The 'sum' function computes the sum of a finite list of numbers.
sum :: (Num a) => [a] -> a
{-# INLINE sum #-}
sum = foldl (+) 0
-- | The 'product' function computes the product of a finite list of numbers.
product :: (Num a) => [a] -> a
{-# INLINE product #-}
product = foldl (*) 1
-- | 'scanl' is similar to 'foldl', but returns a list of successive
-- reduced values from the left:
--
-- > scanl f z [x1, x2, ...] == [z, z `f` x1, (z `f` x1) `f` x2, ...]
--
-- Note that
--
-- > last (scanl f z xs) == foldl f z xs.
-- This peculiar arrangement is necessary to prevent scanl being rewritten in
-- its own right-hand side.
{-# NOINLINE [1] scanl #-}
scanl :: (b -> a -> b) -> b -> [a] -> [b]
scanl = scanlGo
where
scanlGo :: (b -> a -> b) -> b -> [a] -> [b]
scanlGo f q ls = q : (case ls of
[] -> []
x:xs -> scanlGo f (f q x) xs)
-- Note [scanl rewrite rules]
{-# RULES
"scanl" [~1] forall f a bs . scanl f a bs =
build (\c n -> a `c` foldr (scanlFB f c) (constScanl n) bs a)
"scanlList" [1] forall f (a::a) bs .
foldr (scanlFB f (:)) (constScanl []) bs a = tail (scanl f a bs)
#-}
{-# INLINE [0] scanlFB #-}
scanlFB :: (b -> a -> b) -> (b -> c -> c) -> a -> (b -> c) -> b -> c
scanlFB f c = \b g -> oneShot (\x -> let b' = f x b in b' `c` g b')
-- See Note [Left folds via right fold]
{-# INLINE [0] constScanl #-}
constScanl :: a -> b -> a
constScanl = const
-- | 'scanl1' is a variant of 'scanl' that has no starting value argument:
--
-- > scanl1 f [x1, x2, ...] == [x1, x1 `f` x2, ...]
scanl1 :: (a -> a -> a) -> [a] -> [a]
scanl1 f (x:xs) = scanl f x xs
scanl1 _ [] = []
-- | A strictly accumulating version of 'scanl'
{-# NOINLINE [1] scanl' #-}
scanl' :: (b -> a -> b) -> b -> [a] -> [b]
-- This peculiar form is needed to prevent scanl' from being rewritten
-- in its own right hand side.
scanl' = scanlGo'
where
scanlGo' :: (b -> a -> b) -> b -> [a] -> [b]
scanlGo' f !q ls = q : (case ls of
[] -> []
x:xs -> scanlGo' f (f q x) xs)
-- Note [scanl rewrite rules]
{-# RULES
"scanl'" [~1] forall f a bs . scanl' f a bs =
build (\c n -> a `c` foldr (scanlFB' f c) (flipSeqScanl' n) bs a)
"scanlList'" [1] forall f a bs .
foldr (scanlFB' f (:)) (flipSeqScanl' []) bs a = tail (scanl' f a bs)
#-}
{-# INLINE [0] scanlFB' #-}
scanlFB' :: (b -> a -> b) -> (b -> c -> c) -> a -> (b -> c) -> b -> c
scanlFB' f c = \b g -> oneShot (\x -> let !b' = f x b in b' `c` g b')
-- See Note [Left folds via right fold]
{-# INLINE [0] flipSeqScanl' #-}
flipSeqScanl' :: a -> b -> a
flipSeqScanl' a !_b = a
{-
Note [scanl rewrite rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~
In most cases, when we rewrite a form to one that can fuse, we try to rewrite it
back to the original form if it does not fuse. For scanl, we do something a
little different. In particular, we rewrite
scanl f a bs
to
build (\c n -> a `c` foldr (scanlFB f c) (constScanl n) bs a)
When build is inlined, this becomes
a : foldr (scanlFB f (:)) (constScanl []) bs a
To rewrite this form back to scanl, we would need a rule that looked like
forall f a bs. a : foldr (scanlFB f (:)) (constScanl []) bs a = scanl f a bs
The problem with this rule is that it has (:) at its head. This would have the
effect of changing the way the inliner looks at (:), not only here but
everywhere. In most cases, this makes no difference, but in some cases it
causes it to come to a different decision about whether to inline something.
Based on nofib benchmarks, this is bad for performance. Therefore, we instead
match on everything past the :, which is just the tail of scanl.
-}
-- foldr, foldr1, scanr, and scanr1 are the right-to-left duals of the
-- above functions.
-- | 'foldr1' is a variant of 'foldr' that has no starting value argument,
-- and thus must be applied to non-empty lists.
foldr1 :: (a -> a -> a) -> [a] -> a
foldr1 _ [x] = x
foldr1 f (x:xs) = f x (foldr1 f xs)
foldr1 _ [] = errorEmptyList "foldr1"
-- | 'scanr' is the right-to-left dual of 'scanl'.
-- Note that
--
-- > head (scanr f z xs) == foldr f z xs.
{-# NOINLINE [1] scanr #-}
scanr :: (a -> b -> b) -> b -> [a] -> [b]
scanr _ q0 [] = [q0]
scanr f q0 (x:xs) = f x q : qs
where qs@(q:_) = scanr f q0 xs
{-# INLINE [0] strictUncurryScanr #-}
strictUncurryScanr :: (a -> b -> c) -> (a, b) -> c
strictUncurryScanr f pair = case pair of
(x, y) -> f x y
{-# INLINE [0] scanrFB #-}
scanrFB :: (a -> b -> b) -> (b -> c -> c) -> a -> (b, c) -> (b, c)
scanrFB f c = \x (r, est) -> (f x r, r `c` est)
{-# RULES
"scanr" [~1] forall f q0 ls . scanr f q0 ls =
build (\c n -> strictUncurryScanr c (foldr (scanrFB f c) (q0,n) ls))
"scanrList" [1] forall f q0 ls .
strictUncurryScanr (:) (foldr (scanrFB f (:)) (q0,[]) ls) =
scanr f q0 ls
#-}
-- | 'scanr1' is a variant of 'scanr' that has no starting value argument.
scanr1 :: (a -> a -> a) -> [a] -> [a]
scanr1 _ [] = []
scanr1 _ [x] = [x]
scanr1 f (x:xs) = f x q : qs
where qs@(q:_) = scanr1 f xs
-- | 'maximum' returns the maximum value from a list,
-- which must be non-empty, finite, and of an ordered type.
-- It is a special case of 'Data.List.maximumBy', which allows the
-- programmer to supply their own comparison function.
maximum :: (Ord a) => [a] -> a
{-# INLINE [1] maximum #-}
maximum [] = errorEmptyList "maximum"
maximum xs = foldl1 max xs
{-# RULES
"maximumInt" maximum = (strictMaximum :: [Int] -> Int);
"maximumInteger" maximum = (strictMaximum :: [Integer] -> Integer)
#-}
-- We can't make the overloaded version of maximum strict without
-- changing its semantics (max might not be strict), but we can for
-- the version specialised to 'Int'.
strictMaximum :: (Ord a) => [a] -> a
strictMaximum [] = errorEmptyList "maximum"
strictMaximum xs = foldl1' max xs
-- | 'minimum' returns the minimum value from a list,
-- which must be non-empty, finite, and of an ordered type.
-- It is a special case of 'Data.List.minimumBy', which allows the
-- programmer to supply their own comparison function.
minimum :: (Ord a) => [a] -> a
{-# INLINE [1] minimum #-}
minimum [] = errorEmptyList "minimum"
minimum xs = foldl1 min xs
{-# RULES
"minimumInt" minimum = (strictMinimum :: [Int] -> Int);
"minimumInteger" minimum = (strictMinimum :: [Integer] -> Integer)
#-}
strictMinimum :: (Ord a) => [a] -> a
strictMinimum [] = errorEmptyList "minimum"
strictMinimum xs = foldl1' min xs
-- | 'iterate' @f x@ returns an infinite list of repeated applications
-- of @f@ to @x@:
--
-- > iterate f x == [x, f x, f (f x), ...]
{-# NOINLINE [1] iterate #-}
iterate :: (a -> a) -> a -> [a]
iterate f x = x : iterate f (f x)
{-# NOINLINE [0] iterateFB #-}
iterateFB :: (a -> b -> b) -> (a -> a) -> a -> b
iterateFB c f x0 = go x0
where go x = x `c` go (f x)
{-# RULES
"iterate" [~1] forall f x. iterate f x = build (\c _n -> iterateFB c f x)
"iterateFB" [1] iterateFB (:) = iterate
#-}
-- | 'repeat' @x@ is an infinite list, with @x@ the value of every element.
repeat :: a -> [a]
{-# INLINE [0] repeat #-}
-- The pragma just gives the rules more chance to fire
repeat x = xs where xs = x : xs
{-# INLINE [0] repeatFB #-} -- ditto
repeatFB :: (a -> b -> b) -> a -> b
repeatFB c x = xs where xs = x `c` xs
{-# RULES
"repeat" [~1] forall x. repeat x = build (\c _n -> repeatFB c x)
"repeatFB" [1] repeatFB (:) = repeat
#-}
-- | 'replicate' @n x@ is a list of length @n@ with @x@ the value of
-- every element.
-- It is an instance of the more general 'Data.List.genericReplicate',
-- in which @n@ may be of any integral type.
{-# INLINE replicate #-}
replicate :: Int -> a -> [a]
replicate n x = take n (repeat x)
-- | 'cycle' ties a finite list into a circular one, or equivalently,
-- the infinite repetition of the original list. It is the identity
-- on infinite lists.
cycle :: [a] -> [a]
cycle [] = errorEmptyList "cycle"
cycle xs = xs' where xs' = xs ++ xs'
-- | 'takeWhile', applied to a predicate @p@ and a list @xs@, returns the
-- longest prefix (possibly empty) of @xs@ of elements that satisfy @p@:
--
-- > takeWhile (< 3) [1,2,3,4,1,2,3,4] == [1,2]
-- > takeWhile (< 9) [1,2,3] == [1,2,3]
-- > takeWhile (< 0) [1,2,3] == []
--
{-# NOINLINE [1] takeWhile #-}
takeWhile :: (a -> Bool) -> [a] -> [a]
takeWhile _ [] = []
takeWhile p (x:xs)
| p x = x : takeWhile p xs
| otherwise = []
{-# INLINE [0] takeWhileFB #-}
takeWhileFB :: (a -> Bool) -> (a -> b -> b) -> b -> a -> b -> b
takeWhileFB p c n = \x r -> if p x then x `c` r else n
-- The takeWhileFB rule is similar to the filterFB rule. It works like this:
-- takeWhileFB q (takeWhileFB p c n) n =
-- \x r -> if q x then (takeWhileFB p c n) x r else n =
-- \x r -> if q x then (\x' r' -> if p x' then x' `c` r' else n) x r else n =
-- \x r -> if q x then (if p x then x `c` r else n) else n =
-- \x r -> if q x && p x then x `c` r else n =
-- takeWhileFB (\x -> q x && p x) c n
{-# RULES
"takeWhile" [~1] forall p xs. takeWhile p xs =
build (\c n -> foldr (takeWhileFB p c n) n xs)
"takeWhileList" [1] forall p. foldr (takeWhileFB p (:) []) [] = takeWhile p
"takeWhileFB" forall c n p q. takeWhileFB q (takeWhileFB p c n) n =
takeWhileFB (\x -> q x && p x) c n
#-}
-- | 'dropWhile' @p xs@ returns the suffix remaining after 'takeWhile' @p xs@:
--
-- > dropWhile (< 3) [1,2,3,4,5,1,2,3] == [3,4,5,1,2,3]
-- > dropWhile (< 9) [1,2,3] == []
-- > dropWhile (< 0) [1,2,3] == [1,2,3]
--
dropWhile :: (a -> Bool) -> [a] -> [a]
dropWhile _ [] = []
dropWhile p xs@(x:xs')
| p x = dropWhile p xs'
| otherwise = xs
-- | 'take' @n@, applied to a list @xs@, returns the prefix of @xs@
-- of length @n@, or @xs@ itself if @n > 'length' xs@:
--
-- > take 5 "Hello World!" == "Hello"
-- > take 3 [1,2,3,4,5] == [1,2,3]
-- > take 3 [1,2] == [1,2]
-- > take 3 [] == []
-- > take (-1) [1,2] == []
-- > take 0 [1,2] == []
--
-- It is an instance of the more general 'Data.List.genericTake',
-- in which @n@ may be of any integral type.
take :: Int -> [a] -> [a]
#ifdef USE_REPORT_PRELUDE
take n _ | n <= 0 = []
take _ [] = []
take n (x:xs) = x : take (n-1) xs
#else
{- We always want to inline this to take advantage of a known length argument
sign. Note, however, that it's important for the RULES to grab take, rather
than trying to INLINE take immediately and then letting the RULES grab
unsafeTake. Presumably the latter approach doesn't grab it early enough; it led
to an allocation regression in nofib/fft2. -}
{-# INLINE [1] take #-}
take n xs | 0 < n = unsafeTake n xs
| otherwise = []
-- A version of take that takes the whole list if it's given an argument less
-- than 1.
{-# NOINLINE [1] unsafeTake #-}
unsafeTake :: Int -> [a] -> [a]
unsafeTake !_ [] = []
unsafeTake 1 (x: _) = [x]
unsafeTake m (x:xs) = x : unsafeTake (m - 1) xs
{-# RULES
"take" [~1] forall n xs . take n xs =
build (\c nil -> if 0 < n
then foldr (takeFB c nil) (flipSeqTake nil) xs n
else nil)
"unsafeTakeList" [1] forall n xs . foldr (takeFB (:) []) (flipSeqTake []) xs n
= unsafeTake n xs
#-}
{-# INLINE [0] flipSeqTake #-}
-- Just flip seq, specialized to Int, but not inlined too early.
-- It's important to force the numeric argument here, even though
-- it's not used. Otherwise, take n [] doesn't force n. This is
-- bad for strictness analysis and unboxing, and leads to increased
-- allocation in T7257.
flipSeqTake :: a -> Int -> a
flipSeqTake x !_n = x
{-# INLINE [0] takeFB #-}
takeFB :: (a -> b -> b) -> b -> a -> (Int -> b) -> Int -> b
-- The \m accounts for the fact that takeFB is used in a higher-order
-- way by takeFoldr, so it's better to inline. A good example is
-- take n (repeat x)
-- for which we get excellent code... but only if we inline takeFB
-- when given four arguments
takeFB c n x xs
= \ m -> case m of
1 -> x `c` n
_ -> x `c` xs (m - 1)
#endif
-- | 'drop' @n xs@ returns the suffix of @xs@
-- after the first @n@ elements, or @[]@ if @n > 'length' xs@:
--
-- > drop 6 "Hello World!" == "World!"
-- > drop 3 [1,2,3,4,5] == [4,5]
-- > drop 3 [1,2] == []
-- > drop 3 [] == []
-- > drop (-1) [1,2] == [1,2]
-- > drop 0 [1,2] == [1,2]
--
-- It is an instance of the more general 'Data.List.genericDrop',
-- in which @n@ may be of any integral type.
drop :: Int -> [a] -> [a]
#ifdef USE_REPORT_PRELUDE
drop n xs | n <= 0 = xs
drop _ [] = []
drop n (_:xs) = drop (n-1) xs
#else /* hack away */
{-# INLINE drop #-}
drop n ls
| n <= 0 = ls
| otherwise = unsafeDrop n ls
where
-- A version of drop that drops the whole list if given an argument
-- less than 1
unsafeDrop :: Int -> [a] -> [a]
unsafeDrop !_ [] = []
unsafeDrop 1 (_:xs) = xs
unsafeDrop m (_:xs) = unsafeDrop (m - 1) xs
#endif
-- | 'splitAt' @n xs@ returns a tuple where first element is @xs@ prefix of
-- length @n@ and second element is the remainder of the list:
--
-- > splitAt 6 "Hello World!" == ("Hello ","World!")
-- > splitAt 3 [1,2,3,4,5] == ([1,2,3],[4,5])
-- > splitAt 1 [1,2,3] == ([1],[2,3])
-- > splitAt 3 [1,2,3] == ([1,2,3],[])
-- > splitAt 4 [1,2,3] == ([1,2,3],[])
-- > splitAt 0 [1,2,3] == ([],[1,2,3])
-- > splitAt (-1) [1,2,3] == ([],[1,2,3])
--
-- It is equivalent to @('take' n xs, 'drop' n xs)@ when @n@ is not @_|_@
-- (@splitAt _|_ xs = _|_@).
-- 'splitAt' is an instance of the more general 'Data.List.genericSplitAt',
-- in which @n@ may be of any integral type.
splitAt :: Int -> [a] -> ([a],[a])
#ifdef USE_REPORT_PRELUDE
splitAt n xs = (take n xs, drop n xs)
#else
splitAt n ls
| n <= 0 = ([], ls)
| otherwise = splitAt' n ls
where
splitAt' :: Int -> [a] -> ([a], [a])
splitAt' _ [] = ([], [])
splitAt' 1 (x:xs) = ([x], xs)
splitAt' m (x:xs) = (x:xs', xs'')
where
(xs', xs'') = splitAt' (m - 1) xs
#endif /* USE_REPORT_PRELUDE */
-- | 'span', applied to a predicate @p@ and a list @xs@, returns a tuple where
-- first element is longest prefix (possibly empty) of @xs@ of elements that
-- satisfy @p@ and second element is the remainder of the list:
--
-- > span (< 3) [1,2,3,4,1,2,3,4] == ([1,2],[3,4,1,2,3,4])
-- > span (< 9) [1,2,3] == ([1,2,3],[])
-- > span (< 0) [1,2,3] == ([],[1,2,3])
--
-- 'span' @p xs@ is equivalent to @('takeWhile' p xs, 'dropWhile' p xs)@
span :: (a -> Bool) -> [a] -> ([a],[a])
span _ xs@[] = (xs, xs)
span p xs@(x:xs')
| p x = let (ys,zs) = span p xs' in (x:ys,zs)
| otherwise = ([],xs)
-- | 'break', applied to a predicate @p@ and a list @xs@, returns a tuple where
-- first element is longest prefix (possibly empty) of @xs@ of elements that
-- /do not satisfy/ @p@ and second element is the remainder of the list:
--
-- > break (> 3) [1,2,3,4,1,2,3,4] == ([1,2,3],[4,1,2,3,4])
-- > break (< 9) [1,2,3] == ([],[1,2,3])
-- > break (> 9) [1,2,3] == ([1,2,3],[])
--
-- 'break' @p@ is equivalent to @'span' ('not' . p)@.
break :: (a -> Bool) -> [a] -> ([a],[a])
#ifdef USE_REPORT_PRELUDE
break p = span (not . p)
#else
-- HBC version (stolen)
break _ xs@[] = (xs, xs)
break p xs@(x:xs')
| p x = ([],xs)
| otherwise = let (ys,zs) = break p xs' in (x:ys,zs)
#endif
-- | 'reverse' @xs@ returns the elements of @xs@ in reverse order.
-- @xs@ must be finite.
reverse :: [a] -> [a]
#ifdef USE_REPORT_PRELUDE
reverse = foldl (flip (:)) []
#else
reverse l = rev l []
where
rev [] a = a
rev (x:xs) a = rev xs (x:a)
#endif
-- | 'and' returns the conjunction of a Boolean list. For the result to be
-- 'True', the list must be finite; 'False', however, results from a 'False'
-- value at a finite index of a finite or infinite list.
and :: [Bool] -> Bool
#ifdef USE_REPORT_PRELUDE
and = foldr (&&) True
#else
and [] = True
and (x:xs) = x && and xs
{-# NOINLINE [1] and #-}
{-# RULES
"and/build" forall (g::forall b.(Bool->b->b)->b->b) .
and (build g) = g (&&) True
#-}
#endif
-- | 'or' returns the disjunction of a Boolean list. For the result to be
-- 'False', the list must be finite; 'True', however, results from a 'True'
-- value at a finite index of a finite or infinite list.
or :: [Bool] -> Bool
#ifdef USE_REPORT_PRELUDE
or = foldr (||) False
#else
or [] = False
or (x:xs) = x || or xs
{-# NOINLINE [1] or #-}
{-# RULES
"or/build" forall (g::forall b.(Bool->b->b)->b->b) .
or (build g) = g (||) False
#-}
#endif
-- | Applied to a predicate and a list, 'any' determines if any element
-- of the list satisfies the predicate. For the result to be
-- 'False', the list must be finite; 'True', however, results from a 'True'
-- value for the predicate applied to an element at a finite index of a finite or infinite list.
any :: (a -> Bool) -> [a] -> Bool
#ifdef USE_REPORT_PRELUDE
any p = or . map p
#else
any _ [] = False
any p (x:xs) = p x || any p xs
{-# NOINLINE [1] any #-}
{-# RULES
"any/build" forall p (g::forall b.(a->b->b)->b->b) .
any p (build g) = g ((||) . p) False
#-}
#endif
-- | Applied to a predicate and a list, 'all' determines if all elements
-- of the list satisfy the predicate. For the result to be
-- 'True', the list must be finite; 'False', however, results from a 'False'
-- value for the predicate applied to an element at a finite index of a finite or infinite list.
all :: (a -> Bool) -> [a] -> Bool
#ifdef USE_REPORT_PRELUDE
all p = and . map p
#else
all _ [] = True
all p (x:xs) = p x && all p xs
{-# NOINLINE [1] all #-}
{-# RULES
"all/build" forall p (g::forall b.(a->b->b)->b->b) .
all p (build g) = g ((&&) . p) True
#-}
#endif
-- | 'elem' is the list membership predicate, usually written in infix form,
-- e.g., @x \`elem\` xs@. For the result to be
-- 'False', the list must be finite; 'True', however, results from an element
-- equal to @x@ found at a finite index of a finite or infinite list.
elem :: (Eq a) => a -> [a] -> Bool
#ifdef USE_REPORT_PRELUDE
elem x = any (== x)
#else
elem _ [] = False
elem x (y:ys) = x==y || elem x ys
{-# NOINLINE [1] elem #-}
{-# RULES
"elem/build" forall x (g :: forall b . Eq a => (a -> b -> b) -> b -> b)
. elem x (build g) = g (\ y r -> (x == y) || r) False
#-}
#endif
-- | 'notElem' is the negation of 'elem'.
notElem :: (Eq a) => a -> [a] -> Bool
#ifdef USE_REPORT_PRELUDE
notElem x = all (/= x)
#else
notElem _ [] = True
notElem x (y:ys)= x /= y && notElem x ys
{-# NOINLINE [1] notElem #-}
{-# RULES
"notElem/build" forall x (g :: forall b . Eq a => (a -> b -> b) -> b -> b)
. notElem x (build g) = g (\ y r -> (x /= y) && r) True
#-}
#endif
-- | 'lookup' @key assocs@ looks up a key in an association list.
lookup :: (Eq a) => a -> [(a,b)] -> Maybe b
lookup _key [] = Nothing
lookup key ((x,y):xys)
| key == x = Just y
| otherwise = lookup key xys
-- | Map a function over a list and concatenate the results.
concatMap :: (a -> [b]) -> [a] -> [b]
concatMap f = foldr ((++) . f) []
{-# NOINLINE [1] concatMap #-}
{-# RULES
"concatMap" forall f xs . concatMap f xs =
build (\c n -> foldr (\x b -> foldr c b (f x)) n xs)
#-}
-- | Concatenate a list of lists.
concat :: [[a]] -> [a]
concat = foldr (++) []
{-# NOINLINE [1] concat #-}
{-# RULES
"concat" forall xs. concat xs =
build (\c n -> foldr (\x y -> foldr c y x) n xs)
-- We don't bother to turn non-fusible applications of concat back into concat
#-}
-- | List index (subscript) operator, starting from 0.
-- It is an instance of the more general 'Data.List.genericIndex',
-- which takes an index of any integral type.
(!!) :: [a] -> Int -> a
#ifdef USE_REPORT_PRELUDE
xs !! n | n < 0 = error "Prelude.!!: negative index"
[] !! _ = error "Prelude.!!: index too large"
(x:_) !! 0 = x
(_:xs) !! n = xs !! (n-1)
#else
-- We don't really want the errors to inline with (!!).
-- We may want to fuss around a bit with NOINLINE, and
-- if so we should be careful not to trip up known-bottom
-- optimizations.
tooLarge :: Int -> a
tooLarge _ = error (prel_list_str ++ "!!: index too large")
negIndex :: a
negIndex = error $ prel_list_str ++ "!!: negative index"
{-# INLINABLE (!!) #-}
xs !! n
| n < 0 = negIndex
| otherwise = foldr (\x r k -> case k of
0 -> x
_ -> r (k-1)) tooLarge xs n
#endif
--------------------------------------------------------------
-- The zip family
--------------------------------------------------------------
foldr2 :: (a -> b -> c -> c) -> c -> [a] -> [b] -> c
foldr2 k z = go
where
go [] !_ys = z -- see #9495 for the !
go _xs [] = z
go (x:xs) (y:ys) = k x y (go xs ys)
{-# INLINE [0] foldr2 #-}
foldr2_left :: (a -> b -> c -> d) -> d -> a -> ([b] -> c) -> [b] -> d
foldr2_left _k z _x _r [] = z
foldr2_left k _z x r (y:ys) = k x y (r ys)
foldr2_right :: (a -> b -> c -> d) -> d -> b -> ([a] -> c) -> [a] -> d
foldr2_right _k z _y _r [] = z
foldr2_right k _z y r (x:xs) = k x y (r xs)
-- foldr2 k z xs ys = foldr (foldr2_left k z) (\_ -> z) xs ys
-- foldr2 k z xs ys = foldr (foldr2_right k z) (\_ -> z) ys xs
{-# RULES
"foldr2/left" forall k z ys (g::forall b.(a->b->b)->b->b) .
foldr2 k z (build g) ys = g (foldr2_left k z) (\_ -> z) ys
"foldr2/right" forall k z xs (g::forall b.(a->b->b)->b->b) .
foldr2 k z xs (build g) = g (foldr2_right k z) (\_ -> z) xs
#-}
-- Zips for larger tuples are in the List module.
----------------------------------------------
-- | 'zip' takes two lists and returns a list of corresponding pairs.
-- If one input list is short, excess elements of the longer list are
-- discarded.
--
-- NOTE: GHC's implementation of @zip@ deviates slightly from the
-- standard. In particular, Haskell 98 and Haskell 2010 require that
-- @zip [x1,x2,...,xn] (y1:y2:...:yn:_|_) = [(x1,y1),(x2,y2),...,(xn,yn)]@
-- In GHC, however,
-- @zip [x1,x2,...,xn] (y1:y2:...:yn:_|_) = (x1,y1):(x2,y2):...:(xn,yn):_|_@
-- That is, you cannot use termination of the left list to avoid hitting
-- bottom in the right list.
-- This deviation is necessary to make fusion with 'build' in the right
-- list preserve semantics.
{-# NOINLINE [1] zip #-}
zip :: [a] -> [b] -> [(a,b)]
zip [] !_bs = [] -- see #9495 for the !
zip _as [] = []
zip (a:as) (b:bs) = (a,b) : zip as bs
{-# INLINE [0] zipFB #-}
zipFB :: ((a, b) -> c -> d) -> a -> b -> c -> d
zipFB c = \x y r -> (x,y) `c` r
{-# RULES
"zip" [~1] forall xs ys. zip xs ys = build (\c n -> foldr2 (zipFB c) n xs ys)
"zipList" [1] foldr2 (zipFB (:)) [] = zip
#-}
----------------------------------------------
-- | 'zip3' takes three lists and returns a list of triples, analogous to
-- 'zip'.
zip3 :: [a] -> [b] -> [c] -> [(a,b,c)]
-- Specification
-- zip3 = zipWith3 (,,)
zip3 (a:as) (b:bs) (c:cs) = (a,b,c) : zip3 as bs cs
zip3 _ _ _ = []
-- The zipWith family generalises the zip family by zipping with the
-- function given as the first argument, instead of a tupling function.
----------------------------------------------
-- | 'zipWith' generalises 'zip' by zipping with the function given
-- as the first argument, instead of a tupling function.
-- For example, @'zipWith' (+)@ is applied to two lists to produce the
-- list of corresponding sums.
--
-- NOTE: GHC's implementation of @zipWith@ deviates slightly from the
-- standard. In particular, Haskell 98 and Haskell 2010 require that
-- @zipWith (,) [x1,x2,...,xn] (y1:y2:...:yn:_|_) = [(x1,y1),(x2,y2),...,(xn,yn)]@
-- In GHC, however,
-- @zipWith (,) [x1,x2,...,xn] (y1:y2:...:yn:_|_) = (x1,y1):(x2,y2):...:(xn,yn):_|_@
-- That is, you cannot use termination of the left list to avoid hitting
-- bottom in the right list.
-- This deviation is necessary to make fusion with 'build' in the right
-- list preserve semantics.
{-# NOINLINE [1] zipWith #-}
zipWith :: (a->b->c) -> [a]->[b]->[c]
zipWith _f [] !_bs = [] -- see #9495 for the !
zipWith _f _as [] = []
zipWith f (a:as) (b:bs) = f a b : zipWith f as bs
-- zipWithFB must have arity 2 since it gets two arguments in the "zipWith"
-- rule; it might not get inlined otherwise
{-# INLINE [0] zipWithFB #-}
zipWithFB :: (a -> b -> c) -> (d -> e -> a) -> d -> e -> b -> c
zipWithFB c f = \x y r -> (x `f` y) `c` r
{-# RULES
"zipWith" [~1] forall f xs ys. zipWith f xs ys = build (\c n -> foldr2 (zipWithFB c f) n xs ys)
"zipWithList" [1] forall f. foldr2 (zipWithFB (:) f) [] = zipWith f
#-}
-- | The 'zipWith3' function takes a function which combines three
-- elements, as well as three lists and returns a list of their point-wise
-- combination, analogous to 'zipWith'.
zipWith3 :: (a->b->c->d) -> [a]->[b]->[c]->[d]
zipWith3 z (a:as) (b:bs) (c:cs)
= z a b c : zipWith3 z as bs cs
zipWith3 _ _ _ _ = []
-- | 'unzip' transforms a list of pairs into a list of first components
-- and a list of second components.
unzip :: [(a,b)] -> ([a],[b])
{-# INLINE unzip #-}
unzip = foldr (\(a,b) ~(as,bs) -> (a:as,b:bs)) ([],[])
-- | The 'unzip3' function takes a list of triples and returns three
-- lists, analogous to 'unzip'.
unzip3 :: [(a,b,c)] -> ([a],[b],[c])
{-# INLINE unzip3 #-}
unzip3 = foldr (\(a,b,c) ~(as,bs,cs) -> (a:as,b:bs,c:cs))
([],[],[])
--------------------------------------------------------------
-- Error code
--------------------------------------------------------------
-- Common up near identical calls to `error' to reduce the number
-- constant strings created when compiled:
errorEmptyList :: String -> a
errorEmptyList fun =
error (prel_list_str ++ fun ++ ": empty list")
prel_list_str :: String
prel_list_str = "Prelude."
|
green-haskell/ghc
|
libraries/base/GHC/List.hs
|
bsd-3-clause
| 35,163 | 0 | 14 | 9,782 | 5,542 | 3,170 | 2,372 | 412 | 3 |
{-
(c) The University of Glasgow 2006-2012
(c) The GRASP Project, Glasgow University, 1992-2002
Various types used during typechecking, please see TcRnMonad as well for
operations on these types. You probably want to import it, instead of this
module.
All the monads exported here are built on top of the same IOEnv monad. The
monad functions like a Reader monad in the way it passes the environment
around. This is done to allow the environment to be manipulated in a stack
like fashion when entering expressions... ect.
For state that is global and should be returned at the end (e.g not part
of the stack mechanism), you should use an TcRef (= IORef) to store them.
-}
{-# LANGUAGE CPP, ExistentialQuantification, GeneralizedNewtypeDeriving #-}
module TcRnTypes(
TcRnIf, TcRn, TcM, RnM, IfM, IfL, IfG, -- The monad is opaque outside this module
TcRef,
-- The environment types
Env(..),
TcGblEnv(..), TcLclEnv(..),
IfGblEnv(..), IfLclEnv(..),
tcVisibleOrphanMods,
-- Renamer types
ErrCtxt, RecFieldEnv(..),
ImportAvails(..), emptyImportAvails, plusImportAvails,
WhereFrom(..), mkModDeps,
-- Typechecker types
TcTypeEnv, TcIdBinderStack, TcIdBinder(..),
TcTyThing(..), PromotionErr(..),
SelfBootInfo(..),
pprTcTyThingCategory, pprPECategory,
-- Desugaring types
DsM, DsLclEnv(..), DsGblEnv(..), PArrBuiltin(..),
DsMetaEnv, DsMetaVal(..),
-- Template Haskell
ThStage(..), PendingStuff(..), topStage, topAnnStage, topSpliceStage,
ThLevel, impLevel, outerLevel, thLevel,
-- Arrows
ArrowCtxt(..),
-- Canonical constraints
Xi, Ct(..), Cts, emptyCts, andCts, andManyCts, pprCts,
singleCt, listToCts, ctsElts, consCts, snocCts, extendCtsList,
isEmptyCts, isCTyEqCan, isCFunEqCan,
isCDictCan_Maybe, isCFunEqCan_maybe,
isCIrredEvCan, isCNonCanonical, isWantedCt, isDerivedCt,
isGivenCt, isHoleCt, isOutOfScopeCt, isExprHoleCt, isTypeHoleCt,
ctEvidence, ctLoc, setCtLoc, ctPred, ctFlavour, ctEqRel, ctOrigin,
mkNonCanonical, mkNonCanonicalCt,
ctEvPred, ctEvLoc, ctEvOrigin, ctEvEqRel,
ctEvTerm, ctEvCoercion, ctEvId,
WantedConstraints(..), insolubleWC, emptyWC, isEmptyWC,
andWC, unionsWC, addSimples, addImplics, mkSimpleWC, addInsols,
dropDerivedWC, dropDerivedSimples, dropDerivedInsols,
isDroppableDerivedLoc, insolubleImplic, trulyInsoluble,
Implication(..), ImplicStatus(..), isInsolubleStatus,
SubGoalDepth, initialSubGoalDepth,
bumpSubGoalDepth, subGoalDepthExceeded,
CtLoc(..), ctLocSpan, ctLocEnv, ctLocLevel, ctLocOrigin,
ctLocDepth, bumpCtLocDepth,
setCtLocOrigin, setCtLocEnv, setCtLocSpan,
CtOrigin(..), pprCtOrigin, pprCtLoc,
pushErrCtxt, pushErrCtxtSameOrigin,
SkolemInfo(..),
CtEvidence(..),
mkGivenLoc,
isWanted, isGiven, isDerived,
ctEvRole,
-- Constraint solver plugins
TcPlugin(..), TcPluginResult(..), TcPluginSolver,
TcPluginM, runTcPluginM, unsafeTcPluginTcM,
getEvBindsTcPluginM_maybe,
CtFlavour(..), ctEvFlavour,
CtFlavourRole, ctEvFlavourRole, ctFlavourRole,
eqCanRewrite, eqCanRewriteFR, canDischarge, canDischargeFR,
-- Pretty printing
pprEvVarTheta,
pprEvVars, pprEvVarWithType,
-- Misc other types
TcId, TcIdSet, HoleSort(..)
) where
#include "HsVersions.h"
import HsSyn
import CoreSyn
import HscTypes
import TcEvidence
import Type
import CoAxiom ( Role )
import Class ( Class )
import TyCon ( TyCon )
import ConLike ( ConLike(..) )
import DataCon ( DataCon, dataConUserType, dataConOrigArgTys )
import PatSyn ( PatSyn, patSynType )
import TcType
import Annotations
import InstEnv
import FamInstEnv
import IOEnv
import RdrName
import Name
import NameEnv
import NameSet
import Avail
import Var
import VarEnv
import Module
import SrcLoc
import VarSet
import ErrUtils
import UniqFM
import UniqSupply
import BasicTypes
import Bag
import DynFlags
import Outputable
import ListSetOps
import FastString
import GHC.Fingerprint
import Data.Set (Set)
import Control.Monad (ap, liftM)
#ifdef GHCI
import Data.Map ( Map )
import Data.Dynamic ( Dynamic )
import Data.Typeable ( TypeRep )
import qualified Language.Haskell.TH as TH
#endif
{-
************************************************************************
* *
Standard monad definition for TcRn
All the combinators for the monad can be found in TcRnMonad
* *
************************************************************************
The monad itself has to be defined here, because it is mentioned by ErrCtxt
-}
type TcRnIf a b = IOEnv (Env a b)
type TcRn = TcRnIf TcGblEnv TcLclEnv -- Type inference
type IfM lcl = TcRnIf IfGblEnv lcl -- Iface stuff
type IfG = IfM () -- Top level
type IfL = IfM IfLclEnv -- Nested
type DsM = TcRnIf DsGblEnv DsLclEnv -- Desugaring
-- TcRn is the type-checking and renaming monad: the main monad that
-- most type-checking takes place in. The global environment is
-- 'TcGblEnv', which tracks all of the top-level type-checking
-- information we've accumulated while checking a module, while the
-- local environment is 'TcLclEnv', which tracks local information as
-- we move inside expressions.
-- | Historical "renaming monad" (now it's just 'TcRn').
type RnM = TcRn
-- | Historical "type-checking monad" (now it's just 'TcRn').
type TcM = TcRn
-- We 'stack' these envs through the Reader like monad infastructure
-- as we move into an expression (although the change is focused in
-- the lcl type).
data Env gbl lcl
= Env {
env_top :: HscEnv, -- Top-level stuff that never changes
-- Includes all info about imported things
env_us :: {-# UNPACK #-} !(IORef UniqSupply),
-- Unique supply for local varibles
env_gbl :: gbl, -- Info about things defined at the top level
-- of the module being compiled
env_lcl :: lcl -- Nested stuff; changes as we go into
}
instance ContainsDynFlags (Env gbl lcl) where
extractDynFlags env = hsc_dflags (env_top env)
replaceDynFlags env dflags
= env {env_top = replaceDynFlags (env_top env) dflags}
instance ContainsModule gbl => ContainsModule (Env gbl lcl) where
extractModule env = extractModule (env_gbl env)
{-
************************************************************************
* *
The interface environments
Used when dealing with IfaceDecls
* *
************************************************************************
-}
data IfGblEnv
= IfGblEnv {
-- The type environment for the module being compiled,
-- in case the interface refers back to it via a reference that
-- was originally a hi-boot file.
-- We need the module name so we can test when it's appropriate
-- to look in this env.
if_rec_types :: Maybe (Module, IfG TypeEnv)
-- Allows a read effect, so it can be in a mutable
-- variable; c.f. handling the external package type env
-- Nothing => interactive stuff, no loops possible
}
data IfLclEnv
= IfLclEnv {
-- The module for the current IfaceDecl
-- So if we see f = \x -> x
-- it means M.f = \x -> x, where M is the if_mod
if_mod :: Module,
-- The field is used only for error reporting
-- if (say) there's a Lint error in it
if_loc :: SDoc,
-- Where the interface came from:
-- .hi file, or GHCi state, or ext core
-- plus which bit is currently being examined
if_tv_env :: UniqFM TyVar, -- Nested tyvar bindings
-- (and coercions)
if_id_env :: UniqFM Id -- Nested id binding
}
{-
************************************************************************
* *
Desugarer monad
* *
************************************************************************
Now the mondo monad magic (yes, @DsM@ is a silly name)---carry around
a @UniqueSupply@ and some annotations, which
presumably include source-file location information:
-}
-- If '-XParallelArrays' is given, the desugarer populates this table with the corresponding
-- variables found in 'Data.Array.Parallel'.
--
data PArrBuiltin
= PArrBuiltin
{ lengthPVar :: Var -- ^ lengthP
, replicatePVar :: Var -- ^ replicateP
, singletonPVar :: Var -- ^ singletonP
, mapPVar :: Var -- ^ mapP
, filterPVar :: Var -- ^ filterP
, zipPVar :: Var -- ^ zipP
, crossMapPVar :: Var -- ^ crossMapP
, indexPVar :: Var -- ^ (!:)
, emptyPVar :: Var -- ^ emptyP
, appPVar :: Var -- ^ (+:+)
, enumFromToPVar :: Var -- ^ enumFromToP
, enumFromThenToPVar :: Var -- ^ enumFromThenToP
}
data DsGblEnv
= DsGblEnv
{ ds_mod :: Module -- For SCC profiling
, ds_fam_inst_env :: FamInstEnv -- Like tcg_fam_inst_env
, ds_unqual :: PrintUnqualified
, ds_msgs :: IORef Messages -- Warning messages
, ds_if_env :: (IfGblEnv, IfLclEnv) -- Used for looking up global,
-- possibly-imported things
, ds_dph_env :: GlobalRdrEnv -- exported entities of 'Data.Array.Parallel.Prim'
-- iff '-fvectorise' flag was given as well as
-- exported entities of 'Data.Array.Parallel' iff
-- '-XParallelArrays' was given; otherwise, empty
, ds_parr_bi :: PArrBuiltin -- desugarar names for '-XParallelArrays'
, ds_static_binds :: IORef [(Fingerprint, (Id,CoreExpr))]
-- ^ Bindings resulted from floating static forms
}
instance ContainsModule DsGblEnv where
extractModule = ds_mod
data DsLclEnv = DsLclEnv {
dsl_meta :: DsMetaEnv, -- Template Haskell bindings
dsl_loc :: SrcSpan -- to put in pattern-matching error msgs
}
-- Inside [| |] brackets, the desugarer looks
-- up variables in the DsMetaEnv
type DsMetaEnv = NameEnv DsMetaVal
data DsMetaVal
= DsBound Id -- Bound by a pattern inside the [| |].
-- Will be dynamically alpha renamed.
-- The Id has type THSyntax.Var
| DsSplice (HsExpr Id) -- These bindings are introduced by
-- the PendingSplices on a HsBracketOut
{-
************************************************************************
* *
Global typechecker environment
* *
************************************************************************
-}
-- | 'TcGblEnv' describes the top-level of the module at the
-- point at which the typechecker is finished work.
-- It is this structure that is handed on to the desugarer
-- For state that needs to be updated during the typechecking
-- phase and returned at end, use a 'TcRef' (= 'IORef').
data TcGblEnv
= TcGblEnv {
tcg_mod :: Module, -- ^ Module being compiled
tcg_src :: HscSource,
-- ^ What kind of module (regular Haskell, hs-boot, hsig)
tcg_sig_of :: Maybe Module,
-- ^ Are we being compiled as a signature of an implementation?
tcg_impl_rdr_env :: Maybe GlobalRdrEnv,
-- ^ Environment used only during -sig-of for resolving top level
-- bindings. See Note [Signature parameters in TcGblEnv and DynFlags]
tcg_rdr_env :: GlobalRdrEnv, -- ^ Top level envt; used during renaming
tcg_default :: Maybe [Type],
-- ^ Types used for defaulting. @Nothing@ => no @default@ decl
tcg_fix_env :: FixityEnv, -- ^ Just for things in this module
tcg_field_env :: RecFieldEnv, -- ^ Just for things in this module
-- See Note [The interactive package] in HscTypes
tcg_type_env :: TypeEnv,
-- ^ Global type env for the module we are compiling now. All
-- TyCons and Classes (for this module) end up in here right away,
-- along with their derived constructors, selectors.
--
-- (Ids defined in this module start in the local envt, though they
-- move to the global envt during zonking)
--
-- NB: for what "things in this module" means, see
-- Note [The interactive package] in HscTypes
tcg_type_env_var :: TcRef TypeEnv,
-- Used only to initialise the interface-file
-- typechecker in initIfaceTcRn, so that it can see stuff
-- bound in this module when dealing with hi-boot recursions
-- Updated at intervals (e.g. after dealing with types and classes)
tcg_inst_env :: InstEnv,
-- ^ Instance envt for all /home-package/ modules;
-- Includes the dfuns in tcg_insts
tcg_fam_inst_env :: FamInstEnv, -- ^ Ditto for family instances
tcg_ann_env :: AnnEnv, -- ^ And for annotations
-- Now a bunch of things about this module that are simply
-- accumulated, but never consulted until the end.
-- Nevertheless, it's convenient to accumulate them along
-- with the rest of the info from this module.
tcg_exports :: [AvailInfo], -- ^ What is exported
tcg_imports :: ImportAvails,
-- ^ Information about what was imported from where, including
-- things bound in this module. Also store Safe Haskell info
-- here about transative trusted packaage requirements.
tcg_dus :: DefUses, -- ^ What is defined in this module and what is used.
tcg_used_rdrnames :: TcRef (Set RdrName),
-- See Note [Tracking unused binding and imports]
tcg_keep :: TcRef NameSet,
-- ^ Locally-defined top-level names to keep alive.
--
-- "Keep alive" means give them an Exported flag, so that the
-- simplifier does not discard them as dead code, and so that they
-- are exposed in the interface file (but not to export to the
-- user).
--
-- Some things, like dict-fun Ids and default-method Ids are "born"
-- with the Exported flag on, for exactly the above reason, but some
-- we only discover as we go. Specifically:
--
-- * The to/from functions for generic data types
--
-- * Top-level variables appearing free in the RHS of an orphan
-- rule
--
-- * Top-level variables appearing free in a TH bracket
tcg_th_used :: TcRef Bool,
-- ^ @True@ <=> Template Haskell syntax used.
--
-- We need this so that we can generate a dependency on the
-- Template Haskell package, because the desugarer is going
-- to emit loads of references to TH symbols. The reference
-- is implicit rather than explicit, so we have to zap a
-- mutable variable.
tcg_th_splice_used :: TcRef Bool,
-- ^ @True@ <=> A Template Haskell splice was used.
--
-- Splices disable recompilation avoidance (see #481)
tcg_dfun_n :: TcRef OccSet,
-- ^ Allows us to choose unique DFun names.
-- The next fields accumulate the payload of the module
-- The binds, rules and foreign-decl fields are collected
-- initially in un-zonked form and are finally zonked in tcRnSrcDecls
tcg_rn_exports :: Maybe [Located (IE Name)],
-- Nothing <=> no explicit export list
tcg_rn_imports :: [LImportDecl Name],
-- Keep the renamed imports regardless. They are not
-- voluminous and are needed if you want to report unused imports
tcg_rn_decls :: Maybe (HsGroup Name),
-- ^ Renamed decls, maybe. @Nothing@ <=> Don't retain renamed
-- decls.
tcg_dependent_files :: TcRef [FilePath], -- ^ dependencies from addDependentFile
#ifdef GHCI
tcg_th_topdecls :: TcRef [LHsDecl RdrName],
-- ^ Top-level declarations from addTopDecls
tcg_th_topnames :: TcRef NameSet,
-- ^ Exact names bound in top-level declarations in tcg_th_topdecls
tcg_th_modfinalizers :: TcRef [TH.Q ()],
-- ^ Template Haskell module finalizers
tcg_th_state :: TcRef (Map TypeRep Dynamic),
-- ^ Template Haskell state
#endif /* GHCI */
tcg_ev_binds :: Bag EvBind, -- Top-level evidence bindings
-- Things defined in this module, or (in GHCi)
-- in the declarations for a single GHCi command.
-- For the latter, see Note [The interactive package] in HscTypes
tcg_binds :: LHsBinds Id, -- Value bindings in this module
tcg_sigs :: NameSet, -- ...Top-level names that *lack* a signature
tcg_imp_specs :: [LTcSpecPrag], -- ...SPECIALISE prags for imported Ids
tcg_warns :: Warnings, -- ...Warnings and deprecations
tcg_anns :: [Annotation], -- ...Annotations
tcg_tcs :: [TyCon], -- ...TyCons and Classes
tcg_insts :: [ClsInst], -- ...Instances
tcg_fam_insts :: [FamInst], -- ...Family instances
tcg_rules :: [LRuleDecl Id], -- ...Rules
tcg_fords :: [LForeignDecl Id], -- ...Foreign import & exports
tcg_vects :: [LVectDecl Id], -- ...Vectorisation declarations
tcg_patsyns :: [PatSyn], -- ...Pattern synonyms
tcg_doc_hdr :: Maybe LHsDocString, -- ^ Maybe Haddock header docs
tcg_hpc :: AnyHpcUsage, -- ^ @True@ if any part of the
-- prog uses hpc instrumentation.
tcg_self_boot :: SelfBootInfo, -- ^ Whether this module has a
-- corresponding hi-boot file
tcg_main :: Maybe Name, -- ^ The Name of the main
-- function, if this module is
-- the main module.
tcg_safeInfer :: TcRef (Bool, WarningMessages),
-- ^ Has the typechecker inferred this module as -XSafe (Safe Haskell)
-- See Note [Safe Haskell Overlapping Instances Implementation],
-- although this is used for more than just that failure case.
tcg_tc_plugins :: [TcPluginSolver],
-- ^ A list of user-defined plugins for the constraint solver.
tcg_static_wc :: TcRef WantedConstraints
-- ^ Wanted constraints of static forms.
}
tcVisibleOrphanMods :: TcGblEnv -> ModuleSet
tcVisibleOrphanMods tcg_env
= mkModuleSet (tcg_mod tcg_env : imp_orphs (tcg_imports tcg_env))
-- Note [Signature parameters in TcGblEnv and DynFlags]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- When compiling signature files, we need to know which implementation
-- we've actually linked against the signature. There are three seemingly
-- redundant places where this information is stored: in DynFlags, there
-- is sigOf, and in TcGblEnv, there is tcg_sig_of and tcg_impl_rdr_env.
-- Here's the difference between each of them:
--
-- * DynFlags.sigOf is global per invocation of GHC. If we are compiling
-- with --make, there may be multiple signature files being compiled; in
-- which case this parameter is a map from local module name to implementing
-- Module.
--
-- * HscEnv.tcg_sig_of is global per the compilation of a single file, so
-- it is simply the result of looking up tcg_mod in the DynFlags.sigOf
-- parameter. It's setup in TcRnMonad.initTc. This prevents us
-- from having to repeatedly do a lookup in DynFlags.sigOf.
--
-- * HscEnv.tcg_impl_rdr_env is a RdrEnv that lets us look up names
-- according to the sig-of module. It's setup in TcRnDriver.tcRnSignature.
-- Here is an example showing why we need this map:
--
-- module A where
-- a = True
--
-- module ASig where
-- import B
-- a :: Bool
--
-- module B where
-- b = False
--
-- When we compile ASig --sig-of main:A, the default
-- global RdrEnv (tcg_rdr_env) has an entry for b, but not for a
-- (we never imported A). So we have to look in a different environment
-- to actually get the original name.
--
-- By the way, why do we need to do the lookup; can't we just use A:a
-- as the name directly? Well, if A is reexporting the entity from another
-- module, then the original name needs to be the real original name:
--
-- module C where
-- a = True
--
-- module A(a) where
-- import C
instance ContainsModule TcGblEnv where
extractModule env = tcg_mod env
data RecFieldEnv
= RecFields (NameEnv [Name]) -- Maps a constructor name *in this module*
-- to the fields for that constructor
NameSet -- Set of all fields declared *in this module*;
-- used to suppress name-shadowing complaints
-- when using record wild cards
-- E.g. let fld = e in C {..}
-- This is used when dealing with ".." notation in record
-- construction and pattern matching.
-- The FieldEnv deals *only* with constructors defined in *this*
-- module. For imported modules, we get the same info from the
-- TypeEnv
data SelfBootInfo
= NoSelfBoot -- No corresponding hi-boot file
| SelfBoot
{ sb_mds :: ModDetails -- There was a hi-boot file,
, sb_tcs :: NameSet -- defining these TyCons,
, sb_ids :: NameSet } -- and these Ids
-- We need this info to compute a safe approximation to
-- recursive loops, to avoid infinite inlinings
{-
Note [Tracking unused binding and imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We gather two sorts of usage information
* tcg_dus (defs/uses)
Records *defined* Names (local, top-level)
and *used* Names (local or imported)
Used (a) to report "defined but not used"
(see RnNames.reportUnusedNames)
(b) to generate version-tracking usage info in interface
files (see MkIface.mkUsedNames)
This usage info is mainly gathered by the renamer's
gathering of free-variables
* tcg_used_rdrnames
Records used *imported* (not locally-defined) RdrNames
Used only to report unused import declarations
Notice that they are RdrNames, not Names, so we can
tell whether the reference was qualified or unqualified, which
is esssential in deciding whether a particular import decl
is unnecessary. This info isn't present in Names.
************************************************************************
* *
The local typechecker environment
* *
************************************************************************
Note [The Global-Env/Local-Env story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
During type checking, we keep in the tcg_type_env
* All types and classes
* All Ids derived from types and classes (constructors, selectors)
At the end of type checking, we zonk the local bindings,
and as we do so we add to the tcg_type_env
* Locally defined top-level Ids
Why? Because they are now Ids not TcIds. This final GlobalEnv is
a) fed back (via the knot) to typechecking the
unfoldings of interface signatures
b) used in the ModDetails of this module
-}
data TcLclEnv -- Changes as we move inside an expression
-- Discarded after typecheck/rename; not passed on to desugarer
= TcLclEnv {
tcl_loc :: RealSrcSpan, -- Source span
tcl_ctxt :: [ErrCtxt], -- Error context, innermost on top
tcl_tclvl :: TcLevel, -- Birthplace for new unification variables
tcl_th_ctxt :: ThStage, -- Template Haskell context
tcl_th_bndrs :: ThBindEnv, -- Binding level of in-scope Names
-- defined in this module (not imported)
tcl_arrow_ctxt :: ArrowCtxt, -- Arrow-notation context
tcl_rdr :: LocalRdrEnv, -- Local name envt
-- Maintained during renaming, of course, but also during
-- type checking, solely so that when renaming a Template-Haskell
-- splice we have the right environment for the renamer.
--
-- Does *not* include global name envt; may shadow it
-- Includes both ordinary variables and type variables;
-- they are kept distinct because tyvar have a different
-- occurrence contructor (Name.TvOcc)
-- We still need the unsullied global name env so that
-- we can look up record field names
tcl_env :: TcTypeEnv, -- The local type environment:
-- Ids and TyVars defined in this module
tcl_bndrs :: TcIdBinderStack, -- Used for reporting relevant bindings
tcl_tidy :: TidyEnv, -- Used for tidying types; contains all
-- in-scope type variables (but not term variables)
tcl_tyvars :: TcRef TcTyVarSet, -- The "global tyvars"
-- Namely, the in-scope TyVars bound in tcl_env,
-- plus the tyvars mentioned in the types of Ids bound
-- in tcl_lenv.
-- Why mutable? see notes with tcGetGlobalTyVars
tcl_lie :: TcRef WantedConstraints, -- Place to accumulate type constraints
tcl_errs :: TcRef Messages -- Place to accumulate errors
}
type TcTypeEnv = NameEnv TcTyThing
type ThBindEnv = NameEnv (TopLevelFlag, ThLevel)
-- Domain = all Ids bound in this module (ie not imported)
-- The TopLevelFlag tells if the binding is syntactically top level.
-- We need to know this, because the cross-stage persistence story allows
-- cross-stage at arbitrary types if the Id is bound at top level.
--
-- Nota bene: a ThLevel of 'outerLevel' is *not* the same as being
-- bound at top level! See Note [Template Haskell levels] in TcSplice
{- Note [Given Insts]
~~~~~~~~~~~~~~~~~~
Because of GADTs, we have to pass inwards the Insts provided by type signatures
and existential contexts. Consider
data T a where { T1 :: b -> b -> T [b] }
f :: Eq a => T a -> Bool
f (T1 x y) = [x]==[y]
The constructor T1 binds an existential variable 'b', and we need Eq [b].
Well, we have it, because Eq a refines to Eq [b], but we can only spot that if we
pass it inwards.
-}
-- | Type alias for 'IORef'; the convention is we'll use this for mutable
-- bits of data in 'TcGblEnv' which are updated during typechecking and
-- returned at the end.
type TcRef a = IORef a
-- ToDo: when should I refer to it as a 'TcId' instead of an 'Id'?
type TcId = Id
type TcIdSet = IdSet
---------------------------
-- The TcIdBinderStack
---------------------------
type TcIdBinderStack = [TcIdBinder]
-- This is a stack of locally-bound ids, innermost on top
-- Used ony in error reporting (relevantBindings in TcError)
data TcIdBinder
= TcIdBndr
TcId
TopLevelFlag -- Tells whether the bindind is syntactically top-level
-- (The monomorphic Ids for a recursive group count
-- as not-top-level for this purpose.)
instance Outputable TcIdBinder where
ppr (TcIdBndr id top_lvl) = ppr id <> brackets (ppr top_lvl)
---------------------------
-- Template Haskell stages and levels
---------------------------
data ThStage -- See Note [Template Haskell state diagram] in TcSplice
= Splice -- Inside a top-level splice splice
-- This code will be run *at compile time*;
-- the result replaces the splice
-- Binding level = 0
Bool -- True if in a typed splice, False otherwise
| Comp -- Ordinary Haskell code
-- Binding level = 1
| Brack -- Inside brackets
ThStage -- Enclosing stage
PendingStuff
data PendingStuff
= RnPendingUntyped -- Renaming the inside of an *untyped* bracket
(TcRef [PendingRnSplice]) -- Pending splices in here
| RnPendingTyped -- Renaming the inside of a *typed* bracket
| TcPending -- Typechecking the inside of a typed bracket
(TcRef [PendingTcSplice]) -- Accumulate pending splices here
(TcRef WantedConstraints) -- and type constraints here
topStage, topAnnStage, topSpliceStage :: ThStage
topStage = Comp
topAnnStage = Splice False
topSpliceStage = Splice False
instance Outputable ThStage where
ppr (Splice _) = text "Splice"
ppr Comp = text "Comp"
ppr (Brack s _) = text "Brack" <> parens (ppr s)
type ThLevel = Int
-- NB: see Note [Template Haskell levels] in TcSplice
-- Incremented when going inside a bracket,
-- decremented when going inside a splice
-- NB: ThLevel is one greater than the 'n' in Fig 2 of the
-- original "Template meta-programming for Haskell" paper
impLevel, outerLevel :: ThLevel
impLevel = 0 -- Imported things; they can be used inside a top level splice
outerLevel = 1 -- Things defined outside brackets
thLevel :: ThStage -> ThLevel
thLevel (Splice _) = 0
thLevel Comp = 1
thLevel (Brack s _) = thLevel s + 1
---------------------------
-- Arrow-notation context
---------------------------
{- Note [Escaping the arrow scope]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In arrow notation, a variable bound by a proc (or enclosed let/kappa)
is not in scope to the left of an arrow tail (-<) or the head of (|..|).
For example
proc x -> (e1 -< e2)
Here, x is not in scope in e1, but it is in scope in e2. This can get
a bit complicated:
let x = 3 in
proc y -> (proc z -> e1) -< e2
Here, x and z are in scope in e1, but y is not.
We implement this by
recording the environment when passing a proc (using newArrowScope),
and returning to that (using escapeArrowScope) on the left of -< and the
head of (|..|).
All this can be dealt with by the *renamer*. But the type checker needs
to be involved too. Example (arrowfail001)
class Foo a where foo :: a -> ()
data Bar = forall a. Foo a => Bar a
get :: Bar -> ()
get = proc x -> case x of Bar a -> foo -< a
Here the call of 'foo' gives rise to a (Foo a) constraint that should not
be captured by the pattern match on 'Bar'. Rather it should join the
constraints from further out. So we must capture the constraint bag
from further out in the ArrowCtxt that we push inwards.
-}
data ArrowCtxt -- Note [Escaping the arrow scope]
= NoArrowCtxt
| ArrowCtxt LocalRdrEnv (TcRef WantedConstraints)
---------------------------
-- TcTyThing
---------------------------
-- | A typecheckable thing available in a local context. Could be
-- 'AGlobal' 'TyThing', but also lexically scoped variables, etc.
-- See 'TcEnv' for how to retrieve a 'TyThing' given a 'Name'.
data TcTyThing
= AGlobal TyThing -- Used only in the return type of a lookup
| ATcId { -- Ids defined in this module; may not be fully zonked
tct_id :: TcId,
tct_closed :: TopLevelFlag } -- See Note [Bindings with closed types]
| ATyVar Name TcTyVar -- The type variable to which the lexically scoped type
-- variable is bound. We only need the Name
-- for error-message purposes; it is the corresponding
-- Name in the domain of the envt
| AThing TcKind -- Used temporarily, during kind checking, for the
-- tycons and clases in this recursive group
-- Can be a mono-kind or a poly-kind; in TcTyClsDcls see
-- Note [Type checking recursive type and class declarations]
| APromotionErr PromotionErr
data PromotionErr
= TyConPE -- TyCon used in a kind before we are ready
-- data T :: T -> * where ...
| ClassPE -- Ditto Class
| FamDataConPE -- Data constructor for a data family
-- See Note [AFamDataCon: not promoting data family constructors] in TcRnDriver
| RecDataConPE -- Data constructor in a recursive loop
-- See Note [ARecDataCon: recusion and promoting data constructors] in TcTyClsDecls
| NoDataKinds -- -XDataKinds not enabled
instance Outputable TcTyThing where -- Debugging only
ppr (AGlobal g) = pprTyThing g
ppr elt@(ATcId {}) = text "Identifier" <>
brackets (ppr (tct_id elt) <> dcolon
<> ppr (varType (tct_id elt)) <> comma
<+> ppr (tct_closed elt))
ppr (ATyVar n tv) = text "Type variable" <+> quotes (ppr n) <+> equals <+> ppr tv
ppr (AThing k) = text "AThing" <+> ppr k
ppr (APromotionErr err) = text "APromotionErr" <+> ppr err
instance Outputable PromotionErr where
ppr ClassPE = text "ClassPE"
ppr TyConPE = text "TyConPE"
ppr FamDataConPE = text "FamDataConPE"
ppr RecDataConPE = text "RecDataConPE"
ppr NoDataKinds = text "NoDataKinds"
pprTcTyThingCategory :: TcTyThing -> SDoc
pprTcTyThingCategory (AGlobal thing) = pprTyThingCategory thing
pprTcTyThingCategory (ATyVar {}) = ptext (sLit "Type variable")
pprTcTyThingCategory (ATcId {}) = ptext (sLit "Local identifier")
pprTcTyThingCategory (AThing {}) = ptext (sLit "Kinded thing")
pprTcTyThingCategory (APromotionErr pe) = pprPECategory pe
pprPECategory :: PromotionErr -> SDoc
pprPECategory ClassPE = ptext (sLit "Class")
pprPECategory TyConPE = ptext (sLit "Type constructor")
pprPECategory FamDataConPE = ptext (sLit "Data constructor")
pprPECategory RecDataConPE = ptext (sLit "Data constructor")
pprPECategory NoDataKinds = ptext (sLit "Data constructor")
{- Note [Bindings with closed types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = let g ys = map not ys
in ...
Can we generalise 'g' under the OutsideIn algorithm? Yes,
because all g's free variables are top-level; that is they themselves
have no free type variables, and it is the type variables in the
environment that makes things tricky for OutsideIn generalisation.
Definition:
A variable is "closed", and has tct_closed set to TopLevel,
iff
a) all its free variables are imported, or are let-bound with closed types
b) generalisation is not restricted by the monomorphism restriction
Under OutsideIn we are free to generalise a closed let-binding.
This is an extension compared to the JFP paper on OutsideIn, which
used "top-level" as a proxy for "closed". (It's not a good proxy
anyway -- the MR can make a top-level binding with a free type
variable.)
Note that:
* A top-level binding may not be closed, if it suffers from the MR
* A nested binding may be closed (eg 'g' in the example we started with)
Indeed, that's the point; whether a function is defined at top level
or nested is orthogonal to the question of whether or not it is closed
* A binding may be non-closed because it mentions a lexically scoped
*type variable* Eg
f :: forall a. blah
f x = let g y = ...(y::a)...
-}
type ErrCtxt = (Bool, TidyEnv -> TcM (TidyEnv, MsgDoc))
-- Monadic so that we have a chance
-- to deal with bound type variables just before error
-- message construction
-- Bool: True <=> this is a landmark context; do not
-- discard it when trimming for display
{-
************************************************************************
* *
Operations over ImportAvails
* *
************************************************************************
-}
-- | 'ImportAvails' summarises what was imported from where, irrespective of
-- whether the imported things are actually used or not. It is used:
--
-- * when processing the export list,
--
-- * when constructing usage info for the interface file,
--
-- * to identify the list of directly imported modules for initialisation
-- purposes and for optimised overlap checking of family instances,
--
-- * when figuring out what things are really unused
--
data ImportAvails
= ImportAvails {
imp_mods :: ImportedMods,
-- = ModuleEnv [(ModuleName, Bool, SrcSpan, Bool)],
-- ^ Domain is all directly-imported modules
-- The 'ModuleName' is what the module was imported as, e.g. in
-- @
-- import Foo as Bar
-- @
-- it is @Bar@.
--
-- The 'Bool' means:
--
-- - @True@ => import was @import Foo ()@
--
-- - @False@ => import was some other form
--
-- Used
--
-- (a) to help construct the usage information in the interface
-- file; if we import something we need to recompile if the
-- export version changes
--
-- (b) to specify what child modules to initialise
--
-- We need a full ModuleEnv rather than a ModuleNameEnv here,
-- because we might be importing modules of the same name from
-- different packages. (currently not the case, but might be in the
-- future).
imp_dep_mods :: ModuleNameEnv (ModuleName, IsBootInterface),
-- ^ Home-package modules needed by the module being compiled
--
-- It doesn't matter whether any of these dependencies
-- are actually /used/ when compiling the module; they
-- are listed if they are below it at all. For
-- example, suppose M imports A which imports X. Then
-- compiling M might not need to consult X.hi, but X
-- is still listed in M's dependencies.
imp_dep_pkgs :: [PackageKey],
-- ^ Packages needed by the module being compiled, whether directly,
-- or via other modules in this package, or via modules imported
-- from other packages.
imp_trust_pkgs :: [PackageKey],
-- ^ This is strictly a subset of imp_dep_pkgs and records the
-- packages the current module needs to trust for Safe Haskell
-- compilation to succeed. A package is required to be trusted if
-- we are dependent on a trustworthy module in that package.
-- While perhaps making imp_dep_pkgs a tuple of (PackageKey, Bool)
-- where True for the bool indicates the package is required to be
-- trusted is the more logical design, doing so complicates a lot
-- of code not concerned with Safe Haskell.
-- See Note [RnNames . Tracking Trust Transitively]
imp_trust_own_pkg :: Bool,
-- ^ Do we require that our own package is trusted?
-- This is to handle efficiently the case where a Safe module imports
-- a Trustworthy module that resides in the same package as it.
-- See Note [RnNames . Trust Own Package]
imp_orphs :: [Module],
-- ^ Orphan modules below us in the import tree (and maybe including
-- us for imported modules)
imp_finsts :: [Module]
-- ^ Family instance modules below us in the import tree (and maybe
-- including us for imported modules)
}
mkModDeps :: [(ModuleName, IsBootInterface)]
-> ModuleNameEnv (ModuleName, IsBootInterface)
mkModDeps deps = foldl add emptyUFM deps
where
add env elt@(m,_) = addToUFM env m elt
emptyImportAvails :: ImportAvails
emptyImportAvails = ImportAvails { imp_mods = emptyModuleEnv,
imp_dep_mods = emptyUFM,
imp_dep_pkgs = [],
imp_trust_pkgs = [],
imp_trust_own_pkg = False,
imp_orphs = [],
imp_finsts = [] }
-- | Union two ImportAvails
--
-- This function is a key part of Import handling, basically
-- for each import we create a separate ImportAvails structure
-- and then union them all together with this function.
plusImportAvails :: ImportAvails -> ImportAvails -> ImportAvails
plusImportAvails
(ImportAvails { imp_mods = mods1,
imp_dep_mods = dmods1, imp_dep_pkgs = dpkgs1,
imp_trust_pkgs = tpkgs1, imp_trust_own_pkg = tself1,
imp_orphs = orphs1, imp_finsts = finsts1 })
(ImportAvails { imp_mods = mods2,
imp_dep_mods = dmods2, imp_dep_pkgs = dpkgs2,
imp_trust_pkgs = tpkgs2, imp_trust_own_pkg = tself2,
imp_orphs = orphs2, imp_finsts = finsts2 })
= ImportAvails { imp_mods = plusModuleEnv_C (++) mods1 mods2,
imp_dep_mods = plusUFM_C plus_mod_dep dmods1 dmods2,
imp_dep_pkgs = dpkgs1 `unionLists` dpkgs2,
imp_trust_pkgs = tpkgs1 `unionLists` tpkgs2,
imp_trust_own_pkg = tself1 || tself2,
imp_orphs = orphs1 `unionLists` orphs2,
imp_finsts = finsts1 `unionLists` finsts2 }
where
plus_mod_dep (m1, boot1) (m2, boot2)
= WARN( not (m1 == m2), (ppr m1 <+> ppr m2) $$ (ppr boot1 <+> ppr boot2) )
-- Check mod-names match
(m1, boot1 && boot2) -- If either side can "see" a non-hi-boot interface, use that
{-
************************************************************************
* *
\subsection{Where from}
* *
************************************************************************
The @WhereFrom@ type controls where the renamer looks for an interface file
-}
data WhereFrom
= ImportByUser IsBootInterface -- Ordinary user import (perhaps {-# SOURCE #-})
| ImportBySystem -- Non user import.
| ImportByPlugin -- Importing a plugin;
-- See Note [Care with plugin imports] in LoadIface
instance Outputable WhereFrom where
ppr (ImportByUser is_boot) | is_boot = ptext (sLit "{- SOURCE -}")
| otherwise = empty
ppr ImportBySystem = ptext (sLit "{- SYSTEM -}")
ppr ImportByPlugin = ptext (sLit "{- PLUGIN -}")
{-
************************************************************************
* *
* Canonical constraints *
* *
* These are the constraints the low-level simplifier works with *
* *
************************************************************************
-}
-- The syntax of xi types:
-- xi ::= a | T xis | xis -> xis | ... | forall a. tau
-- Two important notes:
-- (i) No type families, unless we are under a ForAll
-- (ii) Note that xi types can contain unexpanded type synonyms;
-- however, the (transitive) expansions of those type synonyms
-- will not contain any type functions, unless we are under a ForAll.
-- We enforce the structure of Xi types when we flatten (TcCanonical)
type Xi = Type -- In many comments, "xi" ranges over Xi
type Cts = Bag Ct
data Ct
-- Atomic canonical constraints
= CDictCan { -- e.g. Num xi
cc_ev :: CtEvidence, -- See Note [Ct/evidence invariant]
cc_class :: Class,
cc_tyargs :: [Xi] -- cc_tyargs are function-free, hence Xi
}
| CIrredEvCan { -- These stand for yet-unusable predicates
cc_ev :: CtEvidence -- See Note [Ct/evidence invariant]
-- The ctev_pred of the evidence is
-- of form (tv xi1 xi2 ... xin)
-- or (tv1 ~ ty2) where the CTyEqCan kind invariant fails
-- or (F tys ~ ty) where the CFunEqCan kind invariant fails
-- See Note [CIrredEvCan constraints]
}
| CTyEqCan { -- tv ~ rhs
-- Invariants:
-- * See Note [Applying the inert substitution] in TcFlatten
-- * tv not in tvs(rhs) (occurs check)
-- * If tv is a TauTv, then rhs has no foralls
-- (this avoids substituting a forall for the tyvar in other types)
-- * typeKind ty `subKind` typeKind tv
-- See Note [Kind orientation for CTyEqCan]
-- * rhs is not necessarily function-free,
-- but it has no top-level function.
-- E.g. a ~ [F b] is fine
-- but a ~ F b is not
-- * If the equality is representational, rhs has no top-level newtype
-- See Note [No top-level newtypes on RHS of representational
-- equalities] in TcCanonical
-- * If rhs is also a tv, then it is oriented to give best chance of
-- unification happening; eg if rhs is touchable then lhs is too
cc_ev :: CtEvidence, -- See Note [Ct/evidence invariant]
cc_tyvar :: TcTyVar,
cc_rhs :: TcType, -- Not necessarily function-free (hence not Xi)
-- See invariants above
cc_eq_rel :: EqRel
}
| CFunEqCan { -- F xis ~ fsk
-- Invariants:
-- * isTypeFamilyTyCon cc_fun
-- * typeKind (F xis) = tyVarKind fsk
-- * always Nominal role
cc_ev :: CtEvidence, -- See Note [Ct/evidence invariant]
cc_fun :: TyCon, -- A type function
cc_tyargs :: [Xi], -- cc_tyargs are function-free (hence Xi)
-- Either under-saturated or exactly saturated
-- *never* over-saturated (because if so
-- we should have decomposed)
cc_fsk :: TcTyVar -- [Given] always a FlatSkol skolem
-- [Wanted] always a FlatMetaTv unification variable
-- See Note [The flattening story] in TcFlatten
}
| CNonCanonical { -- See Note [NonCanonical Semantics]
cc_ev :: CtEvidence
}
| CHoleCan { -- See Note [Hole constraints]
-- Treated as an "insoluble" constraint
-- See Note [Insoluble constraints]
cc_ev :: CtEvidence,
cc_occ :: OccName, -- The name of this hole
cc_hole :: HoleSort -- The sort of this hole (expr, type, ...)
}
-- | Used to indicate which sort of hole we have.
data HoleSort = ExprHole -- ^ A hole in an expression (TypedHoles)
| TypeHole -- ^ A hole in a type (PartialTypeSignatures)
{-
Note [Hole constraints]
~~~~~~~~~~~~~~~~~~~~~~~
CHoleCan constraints are used for two kinds of holes,
distinguished by cc_hole:
* For holes in expressions
e.g. f x = g _ x
* For holes in type signatures
e.g. f :: _ -> _
f x = [x,True]
Note [Kind orientation for CTyEqCan]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Given an equality (t:* ~ s:Open), we can't solve it by updating t:=s,
ragardless of how touchable 't' is, because the kinds don't work.
Instead we absolutely must re-orient it. Reason: if that gets into the
inert set we'll start replacing t's by s's, and that might make a
kind-correct type into a kind error. After re-orienting,
we may be able to solve by updating s:=t.
Hence in a CTyEqCan, (t:k1 ~ xi:k2) we require that k2 is a subkind of k1.
If the two have incompatible kinds, we just don't use a CTyEqCan at all.
See Note [Equalities with incompatible kinds] in TcCanonical
We can't require *equal* kinds, because
* wanted constraints don't necessarily have identical kinds
eg alpha::? ~ Int
* a solved wanted constraint becomes a given
Note [Kind orientation for CFunEqCan]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For (F xis ~ rhs) we require that kind(lhs) is a subkind of kind(rhs).
This really only maters when rhs is an Open type variable (since only type
variables have Open kinds):
F ty ~ (a:Open)
which can happen, say, from
f :: F a b
f = undefined -- The a:Open comes from instantiating 'undefined'
Note that the kind invariant is maintained by rewriting.
Eg wanted1 rewrites wanted2; if both were compatible kinds before,
wanted2 will be afterwards. Similarly givens.
Caveat:
- Givens from higher-rank, such as:
type family T b :: * -> * -> *
type instance T Bool = (->)
f :: forall a. ((T a ~ (->)) => ...) -> a -> ...
flop = f (...) True
Whereas we would be able to apply the type instance, we would not be able to
use the given (T Bool ~ (->)) in the body of 'flop'
Note [CIrredEvCan constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
CIrredEvCan constraints are used for constraints that are "stuck"
- we can't solve them (yet)
- we can't use them to solve other constraints
- but they may become soluble if we substitute for some
of the type variables in the constraint
Example 1: (c Int), where c :: * -> Constraint. We can't do anything
with this yet, but if later c := Num, *then* we can solve it
Example 2: a ~ b, where a :: *, b :: k, where k is a kind variable
We don't want to use this to substitute 'b' for 'a', in case
'k' is subequently unifed with (say) *->*, because then
we'd have ill-kinded types floating about. Rather we want
to defer using the equality altogether until 'k' get resolved.
Note [Ct/evidence invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If ct :: Ct, then extra fields of 'ct' cache precisely the ctev_pred field
of (cc_ev ct), and is fully rewritten wrt the substitution. Eg for CDictCan,
ctev_pred (cc_ev ct) = (cc_class ct) (cc_tyargs ct)
This holds by construction; look at the unique place where CDictCan is
built (in TcCanonical).
In contrast, the type of the evidence *term* (ccev_evtm or ctev_evar) in
the evidence may *not* be fully zonked; we are careful not to look at it
during constraint solving. See Note [Evidence field of CtEvidence]
-}
mkNonCanonical :: CtEvidence -> Ct
mkNonCanonical ev = CNonCanonical { cc_ev = ev }
mkNonCanonicalCt :: Ct -> Ct
mkNonCanonicalCt ct = CNonCanonical { cc_ev = cc_ev ct }
ctEvidence :: Ct -> CtEvidence
ctEvidence = cc_ev
ctLoc :: Ct -> CtLoc
ctLoc = ctEvLoc . ctEvidence
setCtLoc :: Ct -> CtLoc -> Ct
setCtLoc ct loc = ct { cc_ev = (cc_ev ct) { ctev_loc = loc } }
ctOrigin :: Ct -> CtOrigin
ctOrigin = ctLocOrigin . ctLoc
ctPred :: Ct -> PredType
-- See Note [Ct/evidence invariant]
ctPred ct = ctEvPred (cc_ev ct)
-- | Get the flavour of the given 'Ct'
ctFlavour :: Ct -> CtFlavour
ctFlavour = ctEvFlavour . ctEvidence
-- | Get the equality relation for the given 'Ct'
ctEqRel :: Ct -> EqRel
ctEqRel = ctEvEqRel . ctEvidence
dropDerivedWC :: WantedConstraints -> WantedConstraints
-- See Note [Dropping derived constraints]
dropDerivedWC wc@(WC { wc_simple = simples, wc_insol = insols })
= wc { wc_simple = dropDerivedSimples simples
, wc_insol = dropDerivedInsols insols }
-- The wc_impl implications are already (recursively) filtered
dropDerivedSimples :: Cts -> Cts
dropDerivedSimples simples = filterBag isWantedCt simples
-- simples are all Wanted or Derived
dropDerivedInsols :: Cts -> Cts
-- See Note [Dropping derived constraints]
dropDerivedInsols insols = filterBag keep insols
where -- insols can include Given
keep ct
| isDerivedCt ct = not (isDroppableDerivedLoc (ctLoc ct))
| otherwise = True
isDroppableDerivedLoc :: CtLoc -> Bool
-- Note [Dropping derived constraints]
isDroppableDerivedLoc loc
= case ctLocOrigin loc of
KindEqOrigin {} -> False
GivenOrigin {} -> False
FunDepOrigin1 {} -> False
FunDepOrigin2 {} -> False
_ -> True
{- Note [Dropping derived constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general we discard derived constraints at the end of constraint solving;
see dropDerivedWC. For example
* If we have an unsolved [W] (Ord a), we don't want to complain about
an unsolved [D] (Eq a) as well.
* If we have [W] a ~ Int, [W] a ~ Bool, improvement will generate
[D] Int ~ Bool, and we don't want to report that because it's incomprehensible.
That is why we don't rewrite wanteds with wanteds!
But (tiresomely) we do keep *some* Derived insolubles:
* Insoluble kind equalities (e.g. [D] * ~ (* -> *)) may arise from
a type equality a ~ Int#, say. In future they'll be Wanted, not Derived,
but at the moment they are Derived.
* Insoluble derived equalities (e.g. [D] Int ~ Bool) may arise from
functional dependency interactions, either between Givens or
Wanteds. It seems sensible to retain these:
- For Givens they reflect unreachable code
- For Wanteds it is arguably better to get a fundep error than
a no-instance error (Trac #9612)
Moreover, we keep *all* derived insolubles under some circumstances:
* They are looked at by simplifyInfer, to decide whether to
generalise. Example: [W] a ~ Int, [W] a ~ Bool
We get [D] Int ~ Bool, and indeed the constraints are insoluble,
and we want simplifyInfer to see that, even though we don't
ultimately want to generate an (inexplicable) error message from
To distinguish these cases we use the CtOrigin.
************************************************************************
* *
CtEvidence
The "flavor" of a canonical constraint
* *
************************************************************************
-}
isWantedCt :: Ct -> Bool
isWantedCt = isWanted . cc_ev
isGivenCt :: Ct -> Bool
isGivenCt = isGiven . cc_ev
isDerivedCt :: Ct -> Bool
isDerivedCt = isDerived . cc_ev
isCTyEqCan :: Ct -> Bool
isCTyEqCan (CTyEqCan {}) = True
isCTyEqCan (CFunEqCan {}) = False
isCTyEqCan _ = False
isCDictCan_Maybe :: Ct -> Maybe Class
isCDictCan_Maybe (CDictCan {cc_class = cls }) = Just cls
isCDictCan_Maybe _ = Nothing
isCIrredEvCan :: Ct -> Bool
isCIrredEvCan (CIrredEvCan {}) = True
isCIrredEvCan _ = False
isCFunEqCan_maybe :: Ct -> Maybe (TyCon, [Type])
isCFunEqCan_maybe (CFunEqCan { cc_fun = tc, cc_tyargs = xis }) = Just (tc, xis)
isCFunEqCan_maybe _ = Nothing
isCFunEqCan :: Ct -> Bool
isCFunEqCan (CFunEqCan {}) = True
isCFunEqCan _ = False
isCNonCanonical :: Ct -> Bool
isCNonCanonical (CNonCanonical {}) = True
isCNonCanonical _ = False
isHoleCt:: Ct -> Bool
isHoleCt (CHoleCan {}) = True
isHoleCt _ = False
isOutOfScopeCt :: Ct -> Bool
-- A Hole that does not have a leading underscore is
-- simply an out-of-scope variable, and we treat that
-- a bit differently when it comes to error reporting
isOutOfScopeCt (CHoleCan { cc_occ = occ }) = not (startsWithUnderscore occ)
isOutOfScopeCt _ = False
isExprHoleCt :: Ct -> Bool
isExprHoleCt (CHoleCan { cc_hole = ExprHole }) = True
isExprHoleCt _ = False
isTypeHoleCt :: Ct -> Bool
isTypeHoleCt (CHoleCan { cc_hole = TypeHole }) = True
isTypeHoleCt _ = False
instance Outputable Ct where
ppr ct = ppr (cc_ev ct) <+> parens (text ct_sort)
where ct_sort = case ct of
CTyEqCan {} -> "CTyEqCan"
CFunEqCan {} -> "CFunEqCan"
CNonCanonical {} -> "CNonCanonical"
CDictCan {} -> "CDictCan"
CIrredEvCan {} -> "CIrredEvCan"
CHoleCan {} -> "CHoleCan"
singleCt :: Ct -> Cts
singleCt = unitBag
andCts :: Cts -> Cts -> Cts
andCts = unionBags
listToCts :: [Ct] -> Cts
listToCts = listToBag
ctsElts :: Cts -> [Ct]
ctsElts = bagToList
consCts :: Ct -> Cts -> Cts
consCts = consBag
snocCts :: Cts -> Ct -> Cts
snocCts = snocBag
extendCtsList :: Cts -> [Ct] -> Cts
extendCtsList cts xs | null xs = cts
| otherwise = cts `unionBags` listToBag xs
andManyCts :: [Cts] -> Cts
andManyCts = unionManyBags
emptyCts :: Cts
emptyCts = emptyBag
isEmptyCts :: Cts -> Bool
isEmptyCts = isEmptyBag
pprCts :: Cts -> SDoc
pprCts cts = vcat (map ppr (bagToList cts))
{-
************************************************************************
* *
Wanted constraints
These are forced to be in TcRnTypes because
TcLclEnv mentions WantedConstraints
WantedConstraint mentions CtLoc
CtLoc mentions ErrCtxt
ErrCtxt mentions TcM
* *
v%************************************************************************
-}
data WantedConstraints
= WC { wc_simple :: Cts -- Unsolved constraints, all wanted
, wc_impl :: Bag Implication
, wc_insol :: Cts -- Insoluble constraints, can be
-- wanted, given, or derived
-- See Note [Insoluble constraints]
}
emptyWC :: WantedConstraints
emptyWC = WC { wc_simple = emptyBag, wc_impl = emptyBag, wc_insol = emptyBag }
mkSimpleWC :: [CtEvidence] -> WantedConstraints
mkSimpleWC cts
= WC { wc_simple = listToBag (map mkNonCanonical cts)
, wc_impl = emptyBag
, wc_insol = emptyBag }
isEmptyWC :: WantedConstraints -> Bool
isEmptyWC (WC { wc_simple = f, wc_impl = i, wc_insol = n })
= isEmptyBag f && isEmptyBag i && isEmptyBag n
andWC :: WantedConstraints -> WantedConstraints -> WantedConstraints
andWC (WC { wc_simple = f1, wc_impl = i1, wc_insol = n1 })
(WC { wc_simple = f2, wc_impl = i2, wc_insol = n2 })
= WC { wc_simple = f1 `unionBags` f2
, wc_impl = i1 `unionBags` i2
, wc_insol = n1 `unionBags` n2 }
unionsWC :: [WantedConstraints] -> WantedConstraints
unionsWC = foldr andWC emptyWC
addSimples :: WantedConstraints -> Bag Ct -> WantedConstraints
addSimples wc cts
= wc { wc_simple = wc_simple wc `unionBags` cts }
-- Consider: Put the new constraints at the front, so they get solved first
addImplics :: WantedConstraints -> Bag Implication -> WantedConstraints
addImplics wc implic = wc { wc_impl = wc_impl wc `unionBags` implic }
addInsols :: WantedConstraints -> Bag Ct -> WantedConstraints
addInsols wc cts
= wc { wc_insol = wc_insol wc `unionBags` cts }
isInsolubleStatus :: ImplicStatus -> Bool
isInsolubleStatus IC_Insoluble = True
isInsolubleStatus _ = False
insolubleImplic :: Implication -> Bool
insolubleImplic ic = isInsolubleStatus (ic_status ic)
insolubleWC :: TcLevel -> WantedConstraints -> Bool
insolubleWC tc_lvl (WC { wc_impl = implics, wc_insol = insols })
= anyBag (trulyInsoluble tc_lvl) insols
|| anyBag insolubleImplic implics
trulyInsoluble :: TcLevel -> Ct -> Bool
-- The constraint is in the wc_insol set,
-- but we do not treat as truly isoluble
-- a) type-holes, arising from PartialTypeSignatures,
-- b) an out-of-scope variable
-- Yuk!
trulyInsoluble tc_lvl insol
= isOutOfScopeCt insol
|| isRigidEqPred tc_lvl (classifyPredType (ctPred insol))
instance Outputable WantedConstraints where
ppr (WC {wc_simple = s, wc_impl = i, wc_insol = n})
= ptext (sLit "WC") <+> braces (vcat
[ ppr_bag (ptext (sLit "wc_simple")) s
, ppr_bag (ptext (sLit "wc_insol")) n
, ppr_bag (ptext (sLit "wc_impl")) i ])
ppr_bag :: Outputable a => SDoc -> Bag a -> SDoc
ppr_bag doc bag
| isEmptyBag bag = empty
| otherwise = hang (doc <+> equals)
2 (foldrBag (($$) . ppr) empty bag)
{-
************************************************************************
* *
Implication constraints
* *
************************************************************************
-}
data Implication
= Implic {
ic_tclvl :: TcLevel, -- TcLevel: unification variables
-- free in the environment
ic_skols :: [TcTyVar], -- Introduced skolems
ic_info :: SkolemInfo, -- See Note [Skolems in an implication]
-- See Note [Shadowing in a constraint]
ic_given :: [EvVar], -- Given evidence variables
-- (order does not matter)
-- See Invariant (GivenInv) in TcType
ic_no_eqs :: Bool, -- True <=> ic_givens have no equalities, for sure
-- False <=> ic_givens might have equalities
ic_env :: TcLclEnv, -- Gives the source location and error context
-- for the implication, and hence for all the
-- given evidence variables
ic_wanted :: WantedConstraints, -- The wanted
ic_binds :: EvBindsVar, -- Points to the place to fill in the
-- abstraction and bindings
ic_status :: ImplicStatus
}
data ImplicStatus
= IC_Solved -- All wanteds in the tree are solved, all the way down
{ ics_need :: VarSet -- Evidence variables needed by this implication
, ics_dead :: [EvVar] } -- Subset of ic_given that are not needed
-- See Note [Tracking redundant constraints] in TcSimplify
| IC_Insoluble -- At least one insoluble constraint in the tree
| IC_Unsolved -- Neither of the above; might go either way
instance Outputable Implication where
ppr (Implic { ic_tclvl = tclvl, ic_skols = skols
, ic_given = given, ic_no_eqs = no_eqs
, ic_wanted = wanted, ic_status = status
, ic_binds = binds, ic_info = info })
= hang (ptext (sLit "Implic") <+> lbrace)
2 (sep [ ptext (sLit "TcLevel =") <+> ppr tclvl
, ptext (sLit "Skolems =") <+> pprTvBndrs skols
, ptext (sLit "No-eqs =") <+> ppr no_eqs
, ptext (sLit "Status =") <+> ppr status
, hang (ptext (sLit "Given =")) 2 (pprEvVars given)
, hang (ptext (sLit "Wanted =")) 2 (ppr wanted)
, ptext (sLit "Binds =") <+> ppr binds
, pprSkolInfo info ] <+> rbrace)
instance Outputable ImplicStatus where
ppr IC_Insoluble = ptext (sLit "Insoluble")
ppr IC_Unsolved = ptext (sLit "Unsolved")
ppr (IC_Solved { ics_need = vs, ics_dead = dead })
= ptext (sLit "Solved")
<+> (braces $ vcat [ ptext (sLit "Dead givens =") <+> ppr dead
, ptext (sLit "Needed =") <+> ppr vs ])
{-
Note [Needed evidence variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Th ic_need_evs field holds the free vars of ic_binds, and all the
ic_binds in nested implications.
* Main purpose: if one of the ic_givens is not mentioned in here, it
is redundant.
* solveImplication may drop an implication altogether if it has no
remaining 'wanteds'. But we still track the free vars of its
evidence binds, even though it has now disappeared.
Note [Shadowing in a constraint]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We assume NO SHADOWING in a constraint. Specifically
* The unification variables are all implicitly quantified at top
level, and are all unique
* The skolem varibles bound in ic_skols are all freah when the
implication is created.
So we can safely substitute. For example, if we have
forall a. a~Int => ...(forall b. ...a...)...
we can push the (a~Int) constraint inwards in the "givens" without
worrying that 'b' might clash.
Note [Skolems in an implication]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The skolems in an implication are not there to perform a skolem escape
check. That happens because all the environment variables are in the
untouchables, and therefore cannot be unified with anything at all,
let alone the skolems.
Instead, ic_skols is used only when considering floating a constraint
outside the implication in TcSimplify.floatEqualities or
TcSimplify.approximateImplications
Note [Insoluble constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some of the errors that we get during canonicalization are best
reported when all constraints have been simplified as much as
possible. For instance, assume that during simplification the
following constraints arise:
[Wanted] F alpha ~ uf1
[Wanted] beta ~ uf1 beta
When canonicalizing the wanted (beta ~ uf1 beta), if we eagerly fail
we will simply see a message:
'Can't construct the infinite type beta ~ uf1 beta'
and the user has no idea what the uf1 variable is.
Instead our plan is that we will NOT fail immediately, but:
(1) Record the "frozen" error in the ic_insols field
(2) Isolate the offending constraint from the rest of the inerts
(3) Keep on simplifying/canonicalizing
At the end, we will hopefully have substituted uf1 := F alpha, and we
will be able to report a more informative error:
'Can't construct the infinite type beta ~ F alpha beta'
Insoluble constraints *do* include Derived constraints. For example,
a functional dependency might give rise to [D] Int ~ Bool, and we must
report that. If insolubles did not contain Deriveds, reportErrors would
never see it.
************************************************************************
* *
Pretty printing
* *
************************************************************************
-}
pprEvVars :: [EvVar] -> SDoc -- Print with their types
pprEvVars ev_vars = vcat (map pprEvVarWithType ev_vars)
pprEvVarTheta :: [EvVar] -> SDoc
pprEvVarTheta ev_vars = pprTheta (map evVarPred ev_vars)
pprEvVarWithType :: EvVar -> SDoc
pprEvVarWithType v = ppr v <+> dcolon <+> pprType (evVarPred v)
{-
************************************************************************
* *
CtEvidence
* *
************************************************************************
Note [Evidence field of CtEvidence]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
During constraint solving we never look at the type of ctev_evar;
instead we look at the cte_pred field. The evtm/evar field
may be un-zonked.
Note [Bind new Givens immediately]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For Givens we make new EvVars and bind them immediately. Two main reasons:
* Gain sharing. E.g. suppose we start with g :: C a b, where
class D a => C a b
class (E a, F a) => D a
If we generate all g's superclasses as separate EvTerms we might
get selD1 (selC1 g) :: E a
selD2 (selC1 g) :: F a
selC1 g :: D a
which we could do more economically as:
g1 :: D a = selC1 g
g2 :: E a = selD1 g1
g3 :: F a = selD2 g1
* For *coercion* evidence we *must* bind each given:
class (a~b) => C a b where ....
f :: C a b => ....
Then in f's Givens we have g:(C a b) and the superclass sc(g,0):a~b.
But that superclass selector can't (yet) appear in a coercion
(see evTermCoercion), so the easy thing is to bind it to an Id.
So a Given has EvVar inside it rather that (as previously) an EvTerm.
-}
data CtEvidence
= CtGiven { ctev_pred :: TcPredType -- See Note [Ct/evidence invariant]
, ctev_evar :: EvVar -- See Note [Evidence field of CtEvidence]
, ctev_loc :: CtLoc }
-- Truly given, not depending on subgoals
-- NB: Spontaneous unifications belong here
| CtWanted { ctev_pred :: TcPredType -- See Note [Ct/evidence invariant]
, ctev_evar :: EvVar -- See Note [Evidence field of CtEvidence]
, ctev_loc :: CtLoc }
-- Wanted goal
| CtDerived { ctev_pred :: TcPredType
, ctev_loc :: CtLoc }
-- A goal that we don't really have to solve and can't immediately
-- rewrite anything other than a derived (there's no evidence!)
-- but if we do manage to solve it may help in solving other goals.
ctEvPred :: CtEvidence -> TcPredType
-- The predicate of a flavor
ctEvPred = ctev_pred
ctEvLoc :: CtEvidence -> CtLoc
ctEvLoc = ctev_loc
ctEvOrigin :: CtEvidence -> CtOrigin
ctEvOrigin = ctLocOrigin . ctEvLoc
-- | Get the equality relation relevant for a 'CtEvidence'
ctEvEqRel :: CtEvidence -> EqRel
ctEvEqRel = predTypeEqRel . ctEvPred
-- | Get the role relevant for a 'CtEvidence'
ctEvRole :: CtEvidence -> Role
ctEvRole = eqRelRole . ctEvEqRel
ctEvTerm :: CtEvidence -> EvTerm
ctEvTerm ev = EvId (ctEvId ev)
ctEvCoercion :: CtEvidence -> TcCoercion
ctEvCoercion ev = mkTcCoVarCo (ctEvId ev)
ctEvId :: CtEvidence -> TcId
ctEvId (CtWanted { ctev_evar = ev }) = ev
ctEvId (CtGiven { ctev_evar = ev }) = ev
ctEvId ctev = pprPanic "ctEvId:" (ppr ctev)
instance Outputable CtEvidence where
ppr fl = case fl of
CtGiven {} -> ptext (sLit "[G]") <+> ppr (ctev_evar fl) <+> ppr_pty
CtWanted {} -> ptext (sLit "[W]") <+> ppr (ctev_evar fl) <+> ppr_pty
CtDerived {} -> ptext (sLit "[D]") <+> text "_" <+> ppr_pty
where ppr_pty = dcolon <+> ppr (ctEvPred fl)
isWanted :: CtEvidence -> Bool
isWanted (CtWanted {}) = True
isWanted _ = False
isGiven :: CtEvidence -> Bool
isGiven (CtGiven {}) = True
isGiven _ = False
isDerived :: CtEvidence -> Bool
isDerived (CtDerived {}) = True
isDerived _ = False
{-
%************************************************************************
%* *
CtFlavour
%* *
%************************************************************************
Just an enum type that tracks whether a constraint is wanted, derived,
or given, when we need to separate that info from the constraint itself.
-}
data CtFlavour = Given | Wanted | Derived
deriving Eq
instance Outputable CtFlavour where
ppr Given = text "[G]"
ppr Wanted = text "[W]"
ppr Derived = text "[D]"
ctEvFlavour :: CtEvidence -> CtFlavour
ctEvFlavour (CtWanted {}) = Wanted
ctEvFlavour (CtGiven {}) = Given
ctEvFlavour (CtDerived {}) = Derived
-- | Whether or not one 'Ct' can rewrite another is determined by its
-- flavour and its equality relation
type CtFlavourRole = (CtFlavour, EqRel)
-- | Extract the flavour and role from a 'CtEvidence'
ctEvFlavourRole :: CtEvidence -> CtFlavourRole
ctEvFlavourRole ev = (ctEvFlavour ev, ctEvEqRel ev)
-- | Extract the flavour and role from a 'Ct'
ctFlavourRole :: Ct -> CtFlavourRole
ctFlavourRole = ctEvFlavourRole . cc_ev
{- Note [eqCanRewrite]
~~~~~~~~~~~~~~~~~~~
(eqCanRewrite ct1 ct2) holds if the constraint ct1 (a CTyEqCan of form
tv ~ ty) can be used to rewrite ct2. It must satisfy the properties of
a can-rewrite relation, see Definition [Can-rewrite relation]
With the solver handling Coercible constraints like equality constraints,
the rewrite conditions must take role into account, never allowing
a representational equality to rewrite a nominal one.
Note [Wanteds do not rewrite Wanteds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't allow Wanteds to rewrite Wanteds, because that can give rise
to very confusing type error messages. A good example is Trac #8450.
Here's another
f :: a -> Bool
f x = ( [x,'c'], [x,True] ) `seq` True
Here we get
[W] a ~ Char
[W] a ~ Bool
but we do not want to complain about Bool ~ Char!
Note [Deriveds do rewrite Deriveds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
However we DO allow Deriveds to rewrite Deriveds, because that's how
improvement works; see Note [The improvement story] in TcInteract.
However, for now at least I'm only letting (Derived,NomEq) rewrite
(Derived,NomEq) and not doing anything for ReprEq. If we have
eqCanRewriteFR (Derived, NomEq) (Derived, _) = True
then we lose the property of Note [Can-rewrite relation]
R2. If f1 >= f, and f2 >= f,
then either f1 >= f2 or f2 >= f1
Consider f1 = (Given, ReprEq)
f2 = (Derived, NomEq)
f = (Derived, ReprEq)
I thought maybe we could never get Derived ReprEq constraints, but
we can; straight from the Wanteds during improvment. And from a Derived
ReprEq we could conceivably get a Derived NomEq improvment (by decomposing
a type constructor with Nomninal role), and hence unify.
Note [canRewriteOrSame]
~~~~~~~~~~~~~~~~~~~~~~~
canRewriteOrSame is similar but
* returns True for Wanted/Wanted.
* works for all kinds of constraints, not just CTyEqCans
See the call sites for explanations.
-}
eqCanRewrite :: CtEvidence -> CtEvidence -> Bool
eqCanRewrite ev1 ev2 = ctEvFlavourRole ev1 `eqCanRewriteFR` ctEvFlavourRole ev2
eqCanRewriteFR :: CtFlavourRole -> CtFlavourRole -> Bool
-- Very important function!
-- See Note [eqCanRewrite]
-- See Note [Wanteds do not rewrite Wanteds]
-- See Note [Deriveds do rewrite Deriveds]
eqCanRewriteFR (Given, NomEq) (_, _) = True
eqCanRewriteFR (Given, ReprEq) (_, ReprEq) = True
eqCanRewriteFR (Derived, NomEq) (Derived, NomEq) = True
eqCanRewriteFR _ _ = False
canDischarge :: CtEvidence -> CtEvidence -> Bool
-- See Note [canRewriteOrSame]
canDischarge ev1 ev2 = ctEvFlavourRole ev1 `canDischargeFR` ctEvFlavourRole ev2
canDischargeFR :: CtFlavourRole -> CtFlavourRole -> Bool
canDischargeFR (_, ReprEq) (_, NomEq) = False
canDischargeFR (Given, _) _ = True
canDischargeFR (Wanted, _) (Wanted, _) = True
canDischargeFR (Wanted, _) (Derived, _) = True
canDischargeFR (Derived, _) (Derived, _) = True
canDischargeFR _ _ = False
{-
************************************************************************
* *
SubGoalDepth
* *
************************************************************************
Note [SubGoalDepth]
~~~~~~~~~~~~~~~~~~~
The 'SubGoalDepth' takes care of stopping the constraint solver from looping.
The counter starts at zero and increases. It includes dictionary constraints,
equality simplification, and type family reduction. (Why combine these? Because
it's actually quite easy to mistake one for another, in sufficiently involved
scenarios, like ConstraintKinds.)
The flag -fcontext-stack=n (not very well named!) fixes the maximium
level.
* The counter includes the depth of type class instance declarations. Example:
[W] d{7} : Eq [Int]
That is d's dictionary-constraint depth is 7. If we use the instance
$dfEqList :: Eq a => Eq [a]
to simplify it, we get
d{7} = $dfEqList d'{8}
where d'{8} : Eq Int, and d' has depth 8.
For civilised (decidable) instance declarations, each increase of
depth removes a type constructor from the type, so the depth never
gets big; i.e. is bounded by the structural depth of the type.
* The counter also increments when resolving
equalities involving type functions. Example:
Assume we have a wanted at depth 7:
[W] d{7} : F () ~ a
If thre is an type function equation "F () = Int", this would be rewritten to
[W] d{8} : Int ~ a
and remembered as having depth 8.
Again, without UndecidableInstances, this counter is bounded, but without it
can resolve things ad infinitum. Hence there is a maximum level.
* Lastly, every time an equality is rewritten, the counter increases. Again,
rewriting an equality constraint normally makes progress, but it's possible
the "progress" is just the reduction of an infinitely-reducing type family.
Hence we need to track the rewrites.
When compiling a program requires a greater depth, then GHC recommends turning
off this check entirely by setting -freduction-depth=0. This is because the
exact number that works is highly variable, and is likely to change even between
minor releases. Because this check is solely to prevent infinite compilation
times, it seems safe to disable it when a user has ascertained that their program
doesn't loop at the type level.
-}
-- | See Note [SubGoalDepth]
newtype SubGoalDepth = SubGoalDepth Int
deriving (Eq, Ord, Outputable)
initialSubGoalDepth :: SubGoalDepth
initialSubGoalDepth = SubGoalDepth 0
bumpSubGoalDepth :: SubGoalDepth -> SubGoalDepth
bumpSubGoalDepth (SubGoalDepth n) = SubGoalDepth (n + 1)
subGoalDepthExceeded :: DynFlags -> SubGoalDepth -> Bool
subGoalDepthExceeded dflags (SubGoalDepth d)
= mkIntWithInf d > reductionDepth dflags
{-
************************************************************************
* *
CtLoc
* *
************************************************************************
The 'CtLoc' gives information about where a constraint came from.
This is important for decent error message reporting because
dictionaries don't appear in the original source code.
type will evolve...
-}
data CtLoc = CtLoc { ctl_origin :: CtOrigin
, ctl_env :: TcLclEnv
, ctl_depth :: !SubGoalDepth }
-- The TcLclEnv includes particularly
-- source location: tcl_loc :: RealSrcSpan
-- context: tcl_ctxt :: [ErrCtxt]
-- binder stack: tcl_bndrs :: TcIdBinderStack
-- level: tcl_tclvl :: TcLevel
mkGivenLoc :: TcLevel -> SkolemInfo -> TcLclEnv -> CtLoc
mkGivenLoc tclvl skol_info env
= CtLoc { ctl_origin = GivenOrigin skol_info
, ctl_env = env { tcl_tclvl = tclvl }
, ctl_depth = initialSubGoalDepth }
ctLocEnv :: CtLoc -> TcLclEnv
ctLocEnv = ctl_env
ctLocLevel :: CtLoc -> TcLevel
ctLocLevel loc = tcl_tclvl (ctLocEnv loc)
ctLocDepth :: CtLoc -> SubGoalDepth
ctLocDepth = ctl_depth
ctLocOrigin :: CtLoc -> CtOrigin
ctLocOrigin = ctl_origin
ctLocSpan :: CtLoc -> RealSrcSpan
ctLocSpan (CtLoc { ctl_env = lcl}) = tcl_loc lcl
setCtLocSpan :: CtLoc -> RealSrcSpan -> CtLoc
setCtLocSpan ctl@(CtLoc { ctl_env = lcl }) loc = setCtLocEnv ctl (lcl { tcl_loc = loc })
bumpCtLocDepth :: CtLoc -> CtLoc
bumpCtLocDepth loc@(CtLoc { ctl_depth = d }) = loc { ctl_depth = bumpSubGoalDepth d }
setCtLocOrigin :: CtLoc -> CtOrigin -> CtLoc
setCtLocOrigin ctl orig = ctl { ctl_origin = orig }
setCtLocEnv :: CtLoc -> TcLclEnv -> CtLoc
setCtLocEnv ctl env = ctl { ctl_env = env }
pushErrCtxt :: CtOrigin -> ErrCtxt -> CtLoc -> CtLoc
pushErrCtxt o err loc@(CtLoc { ctl_env = lcl })
= loc { ctl_origin = o, ctl_env = lcl { tcl_ctxt = err : tcl_ctxt lcl } }
pushErrCtxtSameOrigin :: ErrCtxt -> CtLoc -> CtLoc
-- Just add information w/o updating the origin!
pushErrCtxtSameOrigin err loc@(CtLoc { ctl_env = lcl })
= loc { ctl_env = lcl { tcl_ctxt = err : tcl_ctxt lcl } }
{-
************************************************************************
* *
SkolemInfo
* *
************************************************************************
-}
-- SkolemInfo gives the origin of *given* constraints
-- a) type variables are skolemised
-- b) an implication constraint is generated
data SkolemInfo
= SigSkol UserTypeCtxt -- A skolem that is created by instantiating
Type -- a programmer-supplied type signature
-- Location of the binding site is on the TyVar
-- The rest are for non-scoped skolems
| ClsSkol Class -- Bound at a class decl
| InstSkol -- Bound at an instance decl
| InstSC TypeSize -- A "given" constraint obtained by superclass selection.
-- If (C ty1 .. tyn) is the largest class from
-- which we made a superclass selection in the chain,
-- then TypeSize = sizeTypes [ty1, .., tyn]
-- See Note [Solving superclass constraints] in TcInstDcls
| DataSkol -- Bound at a data type declaration
| FamInstSkol -- Bound at a family instance decl
| PatSkol -- An existential type variable bound by a pattern for
ConLike -- a data constructor with an existential type.
(HsMatchContext Name)
-- e.g. data T = forall a. Eq a => MkT a
-- f (MkT x) = ...
-- The pattern MkT x will allocate an existential type
-- variable for 'a'.
| ArrowSkol -- An arrow form (see TcArrows)
| IPSkol [HsIPName] -- Binding site of an implicit parameter
| RuleSkol RuleName -- The LHS of a RULE
| InferSkol [(Name,TcType)]
-- We have inferred a type for these (mutually-recursivive)
-- polymorphic Ids, and are now checking that their RHS
-- constraints are satisfied.
| BracketSkol -- Template Haskell bracket
| UnifyForAllSkol -- We are unifying two for-all types
[TcTyVar] -- The instantiated skolem variables
TcType -- The instantiated type *inside* the forall
| UnkSkol -- Unhelpful info (until I improve it)
instance Outputable SkolemInfo where
ppr = pprSkolInfo
pprSkolInfo :: SkolemInfo -> SDoc
-- Complete the sentence "is a rigid type variable bound by..."
pprSkolInfo (SigSkol ctxt ty) = pprSigSkolInfo ctxt ty
pprSkolInfo (IPSkol ips) = ptext (sLit "the implicit-parameter binding") <> plural ips <+> ptext (sLit "for")
<+> pprWithCommas ppr ips
pprSkolInfo (ClsSkol cls) = ptext (sLit "the class declaration for") <+> quotes (ppr cls)
pprSkolInfo InstSkol = ptext (sLit "the instance declaration")
pprSkolInfo (InstSC n) = ptext (sLit "the instance declaration") <> ifPprDebug (parens (ppr n))
pprSkolInfo DataSkol = ptext (sLit "a data type declaration")
pprSkolInfo FamInstSkol = ptext (sLit "a family instance declaration")
pprSkolInfo BracketSkol = ptext (sLit "a Template Haskell bracket")
pprSkolInfo (RuleSkol name) = ptext (sLit "the RULE") <+> pprRuleName name
pprSkolInfo ArrowSkol = ptext (sLit "an arrow form")
pprSkolInfo (PatSkol cl mc) = sep [ pprPatSkolInfo cl
, ptext (sLit "in") <+> pprMatchContext mc ]
pprSkolInfo (InferSkol ids) = sep [ ptext (sLit "the inferred type of")
, vcat [ ppr name <+> dcolon <+> ppr ty
| (name,ty) <- ids ]]
pprSkolInfo (UnifyForAllSkol tvs ty) = ptext (sLit "the type") <+> ppr (mkForAllTys tvs ty)
-- UnkSkol
-- For type variables the others are dealt with by pprSkolTvBinding.
-- For Insts, these cases should not happen
pprSkolInfo UnkSkol = WARN( True, text "pprSkolInfo: UnkSkol" ) ptext (sLit "UnkSkol")
pprSigSkolInfo :: UserTypeCtxt -> Type -> SDoc
pprSigSkolInfo ctxt ty
= case ctxt of
FunSigCtxt f _ -> pp_sig f
_ -> hang (pprUserTypeCtxt ctxt <> colon)
2 (ppr ty)
where
pp_sig f = vcat [ ptext (sLit "the type signature for:")
, nest 2 (pprPrefixOcc f <+> dcolon <+> ppr ty) ]
pprPatSkolInfo :: ConLike -> SDoc
pprPatSkolInfo (RealDataCon dc)
= sep [ ptext (sLit "a pattern with constructor:")
, nest 2 $ ppr dc <+> dcolon
<+> pprType (dataConUserType dc) <> comma ]
-- pprType prints forall's regardless of -fprint-explict-foralls
-- which is what we want here, since we might be saying
-- type variable 't' is bound by ...
pprPatSkolInfo (PatSynCon ps)
= sep [ ptext (sLit "a pattern with pattern synonym:")
, nest 2 $ ppr ps <+> dcolon
<+> pprType (patSynType ps) <> comma ]
{-
************************************************************************
* *
CtOrigin
* *
************************************************************************
-}
data CtOrigin
= GivenOrigin SkolemInfo
-- All the others are for *wanted* constraints
| OccurrenceOf Name -- Occurrence of an overloaded identifier
| AppOrigin -- An application of some kind
| SpecPragOrigin UserTypeCtxt -- Specialisation pragma for
-- function or instance
| TypeEqOrigin { uo_actual :: TcType
, uo_expected :: TcType }
| KindEqOrigin
TcType TcType -- A kind equality arising from unifying these two types
CtOrigin -- originally arising from this
| IPOccOrigin HsIPName -- Occurrence of an implicit parameter
| LiteralOrigin (HsOverLit Name) -- Occurrence of a literal
| NegateOrigin -- Occurrence of syntactic negation
| ArithSeqOrigin (ArithSeqInfo Name) -- [x..], [x..y] etc
| PArrSeqOrigin (ArithSeqInfo Name) -- [:x..y:] and [:x,y..z:]
| SectionOrigin
| TupleOrigin -- (..,..)
| ExprSigOrigin -- e :: ty
| PatSigOrigin -- p :: ty
| PatOrigin -- Instantiating a polytyped pattern at a constructor
| RecordUpdOrigin
| ViewPatOrigin
| ScOrigin TypeSize -- Typechecking superclasses of an instance declaration
-- If the instance head is C ty1 .. tyn
-- then TypeSize = sizeTypes [ty1, .., tyn]
-- See Note [Solving superclass constraints] in TcInstDcls
| DerivOrigin -- Typechecking deriving
| DerivOriginDC DataCon Int
-- Checking constraints arising from this data con and field index
| DerivOriginCoerce Id Type Type
-- DerivOriginCoerce id ty1 ty2: Trying to coerce class method `id` from
-- `ty1` to `ty2`.
| StandAloneDerivOrigin -- Typechecking stand-alone deriving
| DefaultOrigin -- Typechecking a default decl
| DoOrigin -- Arising from a do expression
| MCompOrigin -- Arising from a monad comprehension
| IfOrigin -- Arising from an if statement
| ProcOrigin -- Arising from a proc expression
| AnnOrigin -- An annotation
| FunDepOrigin1 -- A functional dependency from combining
PredType CtLoc -- This constraint arising from ...
PredType CtLoc -- and this constraint arising from ...
| FunDepOrigin2 -- A functional dependency from combining
PredType CtOrigin -- This constraint arising from ...
PredType SrcSpan -- and this instance
-- We only need a CtOrigin on the first, because the location
-- is pinned on the entire error message
| HoleOrigin
| UnboundOccurrenceOf RdrName
| ListOrigin -- An overloaded list
| StaticOrigin -- A static form
ctoHerald :: SDoc
ctoHerald = ptext (sLit "arising from")
pprCtLoc :: CtLoc -> SDoc
-- "arising from ... at ..."
-- Not an instance of Outputable because of the "arising from" prefix
pprCtLoc (CtLoc { ctl_origin = o, ctl_env = lcl})
= sep [ pprCtOrigin o
, text "at" <+> ppr (tcl_loc lcl)]
pprCtOrigin :: CtOrigin -> SDoc
-- "arising from ..."
-- Not an instance of Outputable because of the "arising from" prefix
pprCtOrigin (GivenOrigin sk) = ctoHerald <+> ppr sk
pprCtOrigin (SpecPragOrigin ctxt)
= case ctxt of
FunSigCtxt n _ -> ptext (sLit "a SPECIALISE pragma for") <+> quotes (ppr n)
SpecInstCtxt -> ptext (sLit "a SPECIALISE INSTANCE pragma")
_ -> ptext (sLit "a SPECIALISE pragma") -- Never happens I think
pprCtOrigin (FunDepOrigin1 pred1 loc1 pred2 loc2)
= hang (ctoHerald <+> ptext (sLit "a functional dependency between constraints:"))
2 (vcat [ hang (quotes (ppr pred1)) 2 (pprCtLoc loc1)
, hang (quotes (ppr pred2)) 2 (pprCtLoc loc2) ])
pprCtOrigin (FunDepOrigin2 pred1 orig1 pred2 loc2)
= hang (ctoHerald <+> ptext (sLit "a functional dependency between:"))
2 (vcat [ hang (ptext (sLit "constraint") <+> quotes (ppr pred1))
2 (pprCtOrigin orig1 )
, hang (ptext (sLit "instance") <+> quotes (ppr pred2))
2 (ptext (sLit "at") <+> ppr loc2) ])
pprCtOrigin (KindEqOrigin t1 t2 _)
= hang (ctoHerald <+> ptext (sLit "a kind equality arising from"))
2 (sep [ppr t1, char '~', ppr t2])
pprCtOrigin (UnboundOccurrenceOf name)
= ctoHerald <+> ptext (sLit "an undeclared identifier") <+> quotes (ppr name)
pprCtOrigin (DerivOriginDC dc n)
= hang (ctoHerald <+> ptext (sLit "the") <+> speakNth n
<+> ptext (sLit "field of") <+> quotes (ppr dc))
2 (parens (ptext (sLit "type") <+> quotes (ppr ty)))
where
ty = dataConOrigArgTys dc !! (n-1)
pprCtOrigin (DerivOriginCoerce meth ty1 ty2)
= hang (ctoHerald <+> ptext (sLit "the coercion of the method") <+> quotes (ppr meth))
2 (sep [ text "from type" <+> quotes (ppr ty1)
, nest 2 $ text "to type" <+> quotes (ppr ty2) ])
pprCtOrigin simple_origin
= ctoHerald <+> pprCtO simple_origin
----------------
pprCtO :: CtOrigin -> SDoc -- Ones that are short one-liners
pprCtO (OccurrenceOf name) = hsep [ptext (sLit "a use of"), quotes (ppr name)]
pprCtO AppOrigin = ptext (sLit "an application")
pprCtO (IPOccOrigin name) = hsep [ptext (sLit "a use of implicit parameter"), quotes (ppr name)]
pprCtO RecordUpdOrigin = ptext (sLit "a record update")
pprCtO ExprSigOrigin = ptext (sLit "an expression type signature")
pprCtO PatSigOrigin = ptext (sLit "a pattern type signature")
pprCtO PatOrigin = ptext (sLit "a pattern")
pprCtO ViewPatOrigin = ptext (sLit "a view pattern")
pprCtO IfOrigin = ptext (sLit "an if statement")
pprCtO (LiteralOrigin lit) = hsep [ptext (sLit "the literal"), quotes (ppr lit)]
pprCtO (ArithSeqOrigin seq) = hsep [ptext (sLit "the arithmetic sequence"), quotes (ppr seq)]
pprCtO (PArrSeqOrigin seq) = hsep [ptext (sLit "the parallel array sequence"), quotes (ppr seq)]
pprCtO SectionOrigin = ptext (sLit "an operator section")
pprCtO TupleOrigin = ptext (sLit "a tuple")
pprCtO NegateOrigin = ptext (sLit "a use of syntactic negation")
pprCtO (ScOrigin n) = ptext (sLit "the superclasses of an instance declaration")
<> ifPprDebug (parens (ppr n))
pprCtO DerivOrigin = ptext (sLit "the 'deriving' clause of a data type declaration")
pprCtO StandAloneDerivOrigin = ptext (sLit "a 'deriving' declaration")
pprCtO DefaultOrigin = ptext (sLit "a 'default' declaration")
pprCtO DoOrigin = ptext (sLit "a do statement")
pprCtO MCompOrigin = ptext (sLit "a statement in a monad comprehension")
pprCtO ProcOrigin = ptext (sLit "a proc expression")
pprCtO (TypeEqOrigin t1 t2) = ptext (sLit "a type equality") <+> sep [ppr t1, char '~', ppr t2]
pprCtO AnnOrigin = ptext (sLit "an annotation")
pprCtO HoleOrigin = ptext (sLit "a use of") <+> quotes (ptext $ sLit "_")
pprCtO ListOrigin = ptext (sLit "an overloaded list")
pprCtO StaticOrigin = ptext (sLit "a static form")
pprCtO _ = panic "pprCtOrigin"
{-
Constraint Solver Plugins
-------------------------
-}
type TcPluginSolver = [Ct] -- given
-> [Ct] -- derived
-> [Ct] -- wanted
-> TcPluginM TcPluginResult
newtype TcPluginM a = TcPluginM (Maybe EvBindsVar -> TcM a)
instance Functor TcPluginM where
fmap = liftM
instance Applicative TcPluginM where
pure = return
(<*>) = ap
instance Monad TcPluginM where
return x = TcPluginM (const $ return x)
fail x = TcPluginM (const $ fail x)
TcPluginM m >>= k =
TcPluginM (\ ev -> do a <- m ev
runTcPluginM (k a) ev)
runTcPluginM :: TcPluginM a -> Maybe EvBindsVar -> TcM a
runTcPluginM (TcPluginM m) = m
-- | This function provides an escape for direct access to
-- the 'TcM` monad. It should not be used lightly, and
-- the provided 'TcPluginM' API should be favoured instead.
unsafeTcPluginTcM :: TcM a -> TcPluginM a
unsafeTcPluginTcM = TcPluginM . const
-- | Access the 'EvBindsVar' carried by the 'TcPluginM' during
-- constraint solving. Returns 'Nothing' if invoked during
-- 'tcPluginInit' or 'tcPluginStop'.
getEvBindsTcPluginM_maybe :: TcPluginM (Maybe EvBindsVar)
getEvBindsTcPluginM_maybe = TcPluginM return
data TcPlugin = forall s. TcPlugin
{ tcPluginInit :: TcPluginM s
-- ^ Initialize plugin, when entering type-checker.
, tcPluginSolve :: s -> TcPluginSolver
-- ^ Solve some constraints.
-- TODO: WRITE MORE DETAILS ON HOW THIS WORKS.
, tcPluginStop :: s -> TcPluginM ()
-- ^ Clean up after the plugin, when exiting the type-checker.
}
data TcPluginResult
= TcPluginContradiction [Ct]
-- ^ The plugin found a contradiction.
-- The returned constraints are removed from the inert set,
-- and recorded as insoluable.
| TcPluginOk [(EvTerm,Ct)] [Ct]
-- ^ The first field is for constraints that were solved.
-- These are removed from the inert set,
-- and the evidence for them is recorded.
-- The second field contains new work, that should be processed by
-- the constraint solver.
|
ghc-android/ghc
|
compiler/typecheck/TcRnTypes.hs
|
bsd-3-clause
| 96,226 | 0 | 16 | 28,219 | 11,382 | 6,544 | 4,838 | 915 | 5 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ar-SA">
<title>SAML Support</title>
<maps>
<homeID>saml</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/saml/src/main/javahelp/help_ar_SA/helpset_ar_SA.hs
|
apache-2.0
| 958 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Version
-- Copyright : (c) The University of Glasgow 2004
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (local universal quantification in ReadP)
--
-- A general library for representation and manipulation of versions.
--
-- Versioning schemes are many and varied, so the version
-- representation provided by this library is intended to be a
-- compromise between complete generality, where almost no common
-- functionality could reasonably be provided, and fixing a particular
-- versioning scheme, which would probably be too restrictive.
--
-- So the approach taken here is to provide a representation which
-- subsumes many of the versioning schemes commonly in use, and we
-- provide implementations of 'Eq', 'Ord' and conversion to\/from 'String'
-- which will be appropriate for some applications, but not all.
--
-----------------------------------------------------------------------------
module Data.Version (
-- * The @Version@ type
Version(..),
-- * A concrete representation of @Version@
showVersion
) where
import Data.List ( sort, intersperse )
{- |
A 'Version' represents the version of a software entity.
An instance of 'Eq' is provided, which implements exact equality
modulo reordering of the tags in the 'versionTags' field.
An instance of 'Ord' is also provided, which gives lexicographic
ordering on the 'versionBranch' fields (i.e. 2.1 > 2.0, 1.2.3 > 1.2.2,
etc.). This is expected to be sufficient for many uses, but note that
you may need to use a more specific ordering for your versioning
scheme. For example, some versioning schemes may include pre-releases
which have tags @\"pre1\"@, @\"pre2\"@, and so on, and these would need to
be taken into account when determining ordering. In some cases, date
ordering may be more appropriate, so the application would have to
look for @date@ tags in the 'versionTags' field and compare those.
The bottom line is, don't always assume that 'compare' and other 'Ord'
operations are the right thing for every 'Version'.
Similarly, concrete representations of versions may differ. One
possible concrete representation is provided (see 'showVersion' and
'parseVersion'), but depending on the application a different concrete
representation may be more appropriate.
-}
data Version =
Version { versionBranch :: [Int],
-- ^ The numeric branch for this version. This reflects the
-- fact that most software versions are tree-structured; there
-- is a main trunk which is tagged with versions at various
-- points (1,2,3...), and the first branch off the trunk after
-- version 3 is 3.1, the second branch off the trunk after
-- version 3 is 3.2, and so on. The tree can be branched
-- arbitrarily, just by adding more digits.
--
-- We represent the branch as a list of 'Int', so
-- version 3.2.1 becomes [3,2,1]. Lexicographic ordering
-- (i.e. the default instance of 'Ord' for @[Int]@) gives
-- the natural ordering of branches.
versionTags :: [String] -- really a bag
-- ^ A version can be tagged with an arbitrary list of strings.
-- The interpretation of the list of tags is entirely dependent
-- on the entity that this version applies to.
}
deriving ( Read,Show )
instance Eq Version where
v1 == v2 = versionBranch v1 == versionBranch v2
&& sort (versionTags v1) == sort (versionTags v2)
-- tags may be in any order
instance Ord Version where
v1 `compare` v2 = versionBranch v1 `compare` versionBranch v2
-- -----------------------------------------------------------------------------
-- A concrete representation of 'Version'
-- | Provides one possible concrete representation for 'Version'. For
-- a version with 'versionBranch' @= [1,2,3]@ and 'versionTags'
-- @= [\"tag1\",\"tag2\"]@, the output will be @1.2.3-tag1-tag2@.
--
showVersion :: Version -> String
showVersion (Version branch tags)
= concat (intersperse "." (map show branch)) ++
concatMap ('-':) tags
|
m-alvarez/jhc
|
lib/haskell-extras/Data/Version.hs
|
mit
| 4,457 | 0 | 10 | 1,005 | 265 | 165 | 100 | 18 | 1 |
-- !!! ambiguous re-exportation.
module M (module M,module Prelude) where id x = x;
|
urbanslug/ghc
|
testsuite/tests/module/mod150.hs
|
bsd-3-clause
| 84 | 0 | 5 | 14 | 24 | 16 | 8 | 1 | 1 |
{-# LANGUAGE GADTs #-}
-- A program very like this triggered a kind error with GHC 6.6
module Foo where
data PatchSeq p a b where
Nil :: PatchSeq p a b
U :: p a b -> PatchSeq p a b
(:-) :: PatchSeq p a b -> PatchSeq p b c -> PatchSeq p a c
-- is_normal :: PatchSeq p a b -> Bool
is_normal Nil = True
is_normal (U _) = True
is_normal (U _ :- _) = True
is_normal _ = False
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_compile/tc221.hs
|
bsd-3-clause
| 396 | 0 | 8 | 112 | 131 | 70 | 61 | 10 | 1 |
-- -*- mode: haskell; -*-
module Main (main) where
import Cards
import Cards.Output
import Data.List
import Poker
import System.Environment
import System.Random
import Control.Monad
import Data.Monoid
import Data.Maybe
import Control.Applicative
import Data.Ratio
import Control.DeepSeq
import Debug.Trace (trace)
import Control.Parallel.Strategies
-- rank hands against flop from deck, returns pair of board and array of hands with their ranks
rankHands :: ([Card],[HoleCards]) -> ([Card],[(HoleCards,PokerRank)])
rankHands (board,hands) = (board,ranked)
where ranked = map (\x -> (x, (pokerRank board x)) ) hands
-- instead of enumerating all hands this function calculates equity for a subset of
-- the combinations of possible hands given by the numbers min and max.
-- These are zero-indexed indices of the combination to be tested
calcEquity :: Int -> Int -> [Card] -> [HoleCards] -> [ShowdownTally]
calcEquity min max deck hands = let usedCards = foldr (\(HoleCards x y) acc -> x:y:acc ) [] hands
deckx = standardDeck \\ usedCards
generator = generateBoard deck
boards = [ generator i | i <- [min..max] ]
blankTallies = replicate (length hands) blankTally
in foldl' go blankTallies boards
where go total board = zipWith addTally (pokerEquity board hands) $!! total
parCalcEquity :: Int -> [Card] -> [HoleCards] -> [ShowdownTally]
parCalcEquity samples deck hands = let cores = 4
samplesPerCore = samples `div` cores
lastSampleExtra = samples `rem` cores
sections = [ (i * samplesPerCore, ((i+1) * samplesPerCore)-1) | i <- [0..(cores-1)] ]
results = map (\x -> calcEquity (fst x) (snd x) deck hands) sections `using` parList rdeepseq
in foldr (\x acc -> zipWith addTally x acc) (head results) (tail results)
evaluateHands :: StdGen -> Int -> [HoleCards] -> IO ()
evaluateHands rnd samples hands = do
mapM_ putHandLn $ hands
let usedCards = foldr (\(HoleCards x y) acc -> x:y:acc ) [] hands
deck = standardDeck \\ usedCards
shuffled = shuffleDeck rnd deck
equity = parCalcEquity samples shuffled hands
putStrLn $ "Samples: " ++ (show samples)
putStrLn "Used Cards:"
mapM_ putCard $ usedCards
putStrLn ""
putStrLn "Deck:"
mapM_ putCard deck
putStrLn ""
if length usedCards /= (length . nub) usedCards
then error "ERR: Cards shared between hands"
else do putStrLn "CALCULATING EQUITY NOW"
let result = zip hands equity
mapM_ putEquity result
mapM_ putTally result
cmdLineEvalMain :: IO ()
cmdLineEvalMain = do
(seed:samplesStr:rest) <- getArgs
let rnd = mkStdGen $ (read seed :: Int)
strCards = rest
hands = map readHoleCards strCards
samples = read samplesStr :: Int
if all isJust hands
then evaluateHands rnd samples $ catMaybes hands
else putStrLn $ "cannot parse hands: " ++ (unwords strCards)
-- TODO: remove this file altogether, make parallelism an option flag
main = cmdLineEvalMain
|
wiggly/functional-pokering
|
src/ParallelMain.hs
|
mit
| 3,288 | 0 | 15 | 924 | 950 | 499 | 451 | 64 | 2 |
module Main where
solution :: [Int] -> [Int] -> [Int]
solution p s = innerSolution (zip p s) []
innerSolution :: [(Int, Int)] -> [Int] -> [Int]
innerSolution [] acc = reverse acc
innerSolution ps acc
| 0 < finishCount = innerSolution (drop finishCount ps) (finishCount:acc)
| otherwise = innerSolution (oneDay ps) acc
where finishCount = length $ takeWhile (\x -> fst x >= 100) ps
oneDay :: [(Int, Int)] -> [(Int, Int)]
oneDay list = map up list
where up = \t -> (fst t + snd t, snd t)
main :: IO()
main = do
print $ solution [93, 30, 55] [1, 30, 5]
|
funfunStudy/algorithm
|
haskell/src/main/haskell/programmers/Main.hs
|
mit
| 570 | 0 | 12 | 126 | 301 | 160 | 141 | 15 | 1 |
-- |
-- Copyright : (c) Sam T. 2013
-- License : MIT
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
--
-- This module implements reasonably fast incremental levenshtein distance
-- algorithm in pure functional fashion.
--
-- We use funny data structure which carry on just the front of the table
-- used to cache previously computed distances. Therefore we have low memory
-- consumption suitable for considerable long sequences - /O(n + m)/ memory
-- space where /n/, /m/ are length of right and left sequences respectively.
-- However the structure let us find distances incrementally, so we'll have
-- to expand the table either on the left or on the right in any moment.
--
-- Suppose the base case - we have both sequences empty. It'll look just like:
--
-- @
-- x
-- @
--
-- Next we insert an element to the right:
--
-- @
-- a x
-- @
--
-- And to the left:
--
-- @
-- k
-- a x
-- @
--
-- After a few insertions we can get something like:
--
-- @
-- k
-- l
-- a b c x
-- @
--
-- Now suppose we want to insert to the right so we can see how insertion is done:
--
-- * At first we need to move non-main front at one position.
--
-- @
-- k
-- l
-- a b c x
-- @
--
-- * At second we need to insert new element to the main front.
--
-- @
-- k
-- l
-- a b c d
-- @
--
-- * Finally we should just find overall edit distance 'x'.
--
-- @
-- k
-- l
-- a b c d x
-- @
--
-- Where main front is where we want to insert to. Thanks for symmetry
-- we can implement insertion to the left by swapping fronts.
-- Insertion cost is /O(n)/ where the /n/ is length of sequence to insert to.
--
module Text.Regex.Fuzzy.Dist
( Dist, DistFront, DistTable
-- * Query
, editDist, leftSeq, rightSeq
-- * Construction
, emptyF, insertLeft, insertRight, insertBoth, insertMany, accumMany
, (<.), (.>)
-- * Extra
, findEditDist
-- * Debug
)
where
-- | Amount of distance between two sequences of an arbitrary length.
type Dist = Int
-- | Accumulated so far cost for each element in the sequence.
type Front a = [(Dist, a)]
-- | Zipper like structure which contain the left and right fronts and current
-- edit distance.
type DistFront a = (Front a, Dist, Front a)
-- | Accumulated distance fronts.
type DistTable a = [DistFront a]
-- | Get edit distance for the current position.
editDist :: DistFront a -> Dist
editDist (_, d, _) = d
-- | Gives left sequence which have been inserted by 'insertLeft'.
leftSeq :: DistFront a -> [a]
leftSeq (p, _, _) = map snd p
-- | Gives right sequence which have been inserted by 'insertRight'.
rightSeq :: DistFront a -> [a]
rightSeq (_, _, q) = map snd q
-- | Initial front's value.
emptyF :: DistFront a
emptyF = ([], 0, [])
-- | Exchange left and right front's.
swapFronts :: DistFront a -> DistFront a
swapFronts (p, x, q) = (q, x, p)
dist :: Dist -> Dist -> Dist -> Bool -> Dist
dist p x q b = minimum [succ p, succ q, x + fromEnum b]
moveFront :: Eq a
=> a -- ^ Element of the other front at new front position.
-> Int -- ^ New position index.
-> Front a -- ^ Arbitrary long front to move.
-> Front a -- ^ Front moved at 1 pos in orth direction.
moveFront e j = go
where
go [] = []
go ((d, x) : xs) = let xs' = go xs in
(dist (lastDist xs') (lastDist xs) d (e /= x), x) : xs'
lastDist [] = j
lastDist ((d, _) : _) = d
insertFront :: Eq a => Dist -> a -> Front a -> Front a
insertFront d e fr = (d, e) : fr
insertLeft :: Eq a => a -> DistFront a -> DistFront a
insertLeft e (p, c, q) = (p', c', q')
where
p' = insertFront c e p
c' = dist (lastDist (length q) p')
(lastDist (length p) q)
(lastDist (length p') q')
(comp e q)
q' = moveFront e (length p') q
comp _ [] = True
comp e' ((_, x) : _) = e' /= x
lastDist j [] = j
lastDist _ ((d, _) : _) = d
insertRight :: Eq a => DistFront a -> a -> DistFront a
insertRight df x = swapFronts (insertLeft x (swapFronts df))
insertBoth :: Eq a => a -> DistFront a -> a -> DistFront a
insertBoth a_nn d b_nn = a_nn `insertLeft` d `insertRight` b_nn
insertMany :: Eq a => [a] -> DistFront a -> [a] -> DistFront a
insertMany [] acc [] = acc
insertMany (x : xs) acc [] = insertMany xs (x `insertLeft` acc) []
insertMany [] acc (y : ys) = insertMany [] (acc `insertRight` y) ys
insertMany (x : xs) acc (y : ys) = insertMany xs (insertBoth x acc y) ys
accumMany :: Eq a => [a] -> DistFront a -> [a] -> [DistFront a]
accumMany [] acc [] = [acc]
accumMany (x : xs) acc [] = acc : accumMany xs (x `insertLeft` acc) []
accumMany [] acc (y : ys) = acc : accumMany [] (acc `insertRight` y) ys
accumMany (x : xs) acc (y : ys) = acc : accumMany xs (insertBoth x acc y) ys
-- | Operator version of 'insertLeft'.
(<.) :: Eq a => a -> DistFront a -> DistFront a
(<.) = insertLeft
infixr 3 <.
-- | Operator version of 'insertRight'.
(.>) :: Eq a => DistFront a -> a -> DistFront a
(.>) = insertRight
infixl 4 .>
findEditDist :: Eq a => [a] -> [a] -> Int
findEditDist a b = editDist (insertMany a emptyF b)
|
pxqr/regex-fuzzy
|
src/Text/Regex/Fuzzy/Dist.hs
|
mit
| 5,466 | 0 | 13 | 1,601 | 1,475 | 832 | 643 | 69 | 3 |
module GroupCreator.Groupings
( Grouping(..)
, crossover
, mutate
) where
import Data.List
data Grouping = Grouping [[Int]]
deriving (Eq, Show)
--a = Grouping [[8,10,0],[3,16,4,13],[7,20],[17,15,2],[12],[1,6,19],[9,14,18],[5,11]]
--b = Grouping [[11,8],[14,18,0,4],[2,15,1],[5,20,10],[12,3],[6,19,17],[9,13,16,7]]
crossover :: Grouping -> Grouping -> Grouping
crossover (Grouping a) (Grouping b) = Grouping $ firstPart `Data.List.union` secondPartWithoutDupes --[[8,10,0],[9,13,16,7],[17,15,2],[12,3],[1,6,19],[5,11],[4,20],[14,18]]
where
secondPartWithoutDupes = cleanUp $ removeDupes [] secondPart --[[4,20],[14,18]]
secondPart = joinByPairs theRestWithoutSeen --[[4,20],[14,18,14,18,4],[20]]
theRestWithoutSeen = cleanUp $ Data.List.map (\\ seenInFirst) theRest --[[4],[20],[14,18],[14,18,4],[20]]
theRest = (a `Data.List.union` b) \\ firstPart --[[3,16,4,13],[7,20],[12],[9,14,18],[11,8],[14,18,0,4],[2,15,1],[5,20,10],[6,19,17]]
seenInFirst = concat firstPart --[8,10,0,9,13,16,7,17,15,2,12,3,1,6,19,5,11]
firstPart = interleaved [] a (reverse b) --[[8,10,0],[9,13,16,7],[17,15,2],[12,3],[1,6,19],[5,11]]
cleanUp :: Eq a => [[a]] -> [[a]]
cleanUp a = Data.List.filter (/=[]) a
removeDupes :: Eq a => [a] -> [[a]] -> [[a]]
removeDupes _ [] = []
removeDupes collector (x:xs) = (nub x \\ collector) : removeDupes (collector ++ nub x) xs
joinByPairs :: [[a]] -> [[a]]
joinByPairs [] = []
joinByPairs (x:[]) = (x:[])
joinByPairs (x:y:xs) = (x ++ y) : joinByPairs xs
interleaved :: Eq a => [a] -> [[a]] -> [[a]] -> [[a]]
interleaved acc [] [] = []
interleaved acc [] x = interleaved acc x []
interleaved acc (x:xs) y = if (x `intersect` acc) == []
then (x:interleaved (acc ++ x) y xs) --swap xs and y
else interleaved acc xs y
mutate :: [Maybe (Int, Int, Int)] -> Grouping -> Grouping
mutate [] grouping = grouping
mutate (Nothing : xs) grouping = mutate xs grouping
mutate (Just (a,b,c) : xs) grouping = mutate xs $ mutateSingle a b c grouping
-- The three given Ints can be random integers
mutateSingle :: Int -> Int -> Int -> Grouping -> Grouping
mutateSingle sourceI destinationI personI (Grouping grouping)
| source == destination = Grouping grouping
| otherwise = Grouping $ replaceOnce destination destination' . replaceOnce source source' $ grouping
where
groupCount = length grouping
source = grouping !! (mod sourceI groupCount)
destination = grouping !! (mod destinationI groupCount)
personCountInSource = length source
person = source !! (mod personI personCountInSource)
source' = source \\ [person]
destination' = destination ++ [person]
replaceOnce _ _ [] = []
replaceOnce a b (x:xs)
| a == x = if b == []
then xs
else (b : xs)
| otherwise = x : replaceOnce a b xs
|
cambraca/group-creator
|
GroupCreator/Groupings.hs
|
mit
| 2,872 | 0 | 10 | 564 | 971 | 522 | 449 | 51 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Messaging.Exchanges
( featureCreatureExchange
) where
import qualified Config.Internal.RabbitMQ as Config
import qualified Network.AMQP.MessageBus as MB
featureCreatureExchange :: Config.RabbitMQConfig -> MB.WithConn ()
featureCreatureExchange cfg =
let exch = MB.Exchange (Config.getExchangeName cfg) "topic" True
in MB.createExchange exch
|
gust/feature-creature
|
legacy/lib/Messaging/Exchanges.hs
|
mit
| 391 | 0 | 12 | 48 | 89 | 50 | 39 | 9 | 1 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module GameNg
( initialStateFromConfig
, getViews
, updateState
, actionForGameRunning
, GameState(..)
, GameRunning(..)
, GameOver(..)
, gameRunningRogueHistory
, getGameOverView
) where
import ClassyPrelude
import Config.GameConfig
import Control.Monad.Extra (whenJust)
import Data.Easy (ifToMaybe, maybeToEither)
import Data.List (cycle)
import Network.Protocol
data GameState = GameRunning_ GameRunning | GameOver_ GameOver
data GameRunning = GameRunning
{ gameRunningGameConfig :: GameConfig
, gameRunningPlayerPositions :: PlayerPositions
, gameRunningPlayerEnergies :: PlayerEnergies
, gameRunningOpenRogueHistory :: OpenRogueHistory
, gameRunningNextPlayers :: [Player]
}
deriving (Eq, Show, Read)
-- |Function to access the shadowed version of the rogueHistory
gameRunningRogueHistory :: GameRunning -> RogueHistory
gameRunningRogueHistory = toShadowRogueHistory . gameRunningOpenRogueHistory
data GameOver =
GameOver
{ gameOverGameConfig :: GameConfig
, gameOverPlayerPositions :: PlayerPositions
, gameOverPlayerEnergies :: PlayerEnergies
, gameOverRogueHistory :: OpenRogueHistory
, gameOverWinningPlayer :: Player
}
deriving (Eq, Show, Read)
-- TODO: make game-initiation better
-- |The initial state of the game
initialStateFromConfig :: GameConfig -> GameRunning
initialStateFromConfig config =
GameRunning
config
(initialPlayerPositions config)
(initialPlayerEnergies config)
(OpenRogueHistory [])
(cycle . toList . players $ config)
-- |Update the state with an action. returns the error GameIsOver if the state is in game-over state
updateState :: Action -> GameState -> Either GameError GameState
updateState _ (GameOver_ _) = Left GameIsOver
updateState action (GameRunning_ gameRunning) =
map (either GameOver_ GameRunning_) $ actionForGameRunning action gameRunning
-- |Add an action for the running game.
actionForGameRunning :: Action -> GameRunning -> Either GameError (Either GameOver GameRunning)
actionForGameRunning
Move { actionPlayer, actionEnergy, actionNode }
state@GameRunning
{ gameRunningGameConfig =
gameRunningGameConfig@GameConfig
{ network
, rogueShowsAt
, maxRounds
}
, gameRunningPlayerPositions
, gameRunningPlayerEnergies
, gameRunningOpenRogueHistory
, gameRunningNextPlayers
}
= do
let roguePlayer = getRogue gameRunningGameConfig
let nextPlayer = headEx gameRunningNextPlayers
let otherNextPlayers = tailEx gameRunningNextPlayers
unless (actionPlayer == nextPlayer) . Left $ NotTurn nextPlayer
previousNode <-
maybeToEither (PlayerNotFound actionPlayer) .
lookup actionPlayer $
gameRunningPlayerPositions
unless (canMoveBetween network previousNode actionEnergy actionNode) .
Left $ NotReachable previousNode actionEnergy actionNode
newPlayerEnergies <-
nextPlayerEnergies gameRunningPlayerEnergies actionPlayer actionEnergy
whenJust
(isBlocked gameRunningPlayerPositions roguePlayer actionNode)
$ Left . NodeBlocked
let newNextPlayers = otherNextPlayers -- TODO: implement skipping
let newRogueHistory =
if actionPlayer == roguePlayer
then
(actionEnergy
, actionNode
, length gameRunningOpenRogueHistory `elem` rogueShowsAt
) `cons`
gameRunningOpenRogueHistory
else gameRunningOpenRogueHistory
let newPlayerPositions = insertMap actionPlayer actionNode gameRunningPlayerPositions
-- game over checking
roguePosition <-
maybeToEither (PlayerNotFound roguePlayer) .
lookup roguePlayer $
newPlayerPositions
let rogueWonMay = do -- maybe monad
unless (actionPlayer == roguePlayer) Nothing
-- TODO: not necessary because checked implicitly
unless (length gameRunningOpenRogueHistory == maxRounds) Nothing
return roguePlayer
let playerCaughtMay = do -- maybe monad
when (actionPlayer == roguePlayer) Nothing
-- TODO: not necessary because checked implicitly
map fst .
find (\(p,n) -> p /= roguePlayer && n == roguePosition) .
mapToList $
newPlayerPositions
let winningPlayerMay = rogueWonMay <|> playerCaughtMay
return $ case winningPlayerMay of
Just winningPlayer ->
Left GameOver
{ gameOverGameConfig = gameRunningGameConfig
, gameOverPlayerEnergies = newPlayerEnergies
, gameOverPlayerPositions = newPlayerPositions
, gameOverRogueHistory = gameRunningOpenRogueHistory
, gameOverWinningPlayer = winningPlayer
}
Nothing ->
Right state
{ gameRunningPlayerPositions = newPlayerPositions
, gameRunningPlayerEnergies = newPlayerEnergies
, gameRunningOpenRogueHistory = newRogueHistory
, gameRunningNextPlayers = newNextPlayers
}
canMoveBetween :: Network -> Node -> Energy -> Node -> Bool
canMoveBetween net from energy to =
isJust $ -- true, if the do bock returns Just ()
do -- maybe monad
overlay <- lookup energy . overlays $ net
let edges = overlayEdges overlay
unless -- returns Just () if such pair is found
(any
(\(n1, n2) ->
(n1 == from && n2 == to) || (n1 == to && n2 == from)) .
map edge $
edges)
Nothing
isBlocked :: PlayerPositions -> Player -> Node -> Maybe Player
isBlocked pos roguePlayer node =
map fst .
find (\(p,n) -> p /= roguePlayer && n == node) .
mapToList $
pos
nextPlayerEnergies ::
PlayerEnergies -> Player -> Energy -> Either GameError PlayerEnergies
nextPlayerEnergies pEnergies player energy = do
eMap <-
maybeToEither (PlayerNotFound player) . lookup player $ pEnergies
energyCount <-
maybeToEither (EnergyNotFound energy) . lookup energy $ eMap
unless (energyCount >= 1) . Left $ NotEnoughEnergy
return $ insertMap player (insertMap energy (energyCount - 1) eMap) pEnergies
-- |Converts the GameState into the 2 Views
getViews :: GameRunning -> (RogueGameView, CatcherGameView)
getViews
GameRunning
{ gameRunningGameConfig = GameConfig {players}
, gameRunningPlayerPositions
, gameRunningPlayerEnergies
, gameRunningOpenRogueHistory
, gameRunningNextPlayers
} =
( RogueGameView
{ roguePlayerPositions = gameRunningPlayerPositions
, rogueEnergies = gameRunningPlayerEnergies
, rogueOwnHistory = shadowRogueHistory
, rogueNextPlayer = nextPlayer
}
, CatcherGameView
{ catcherPlayerPositions = catcherPlayerPositions -- filtered player positions
, catcherEnergies = gameRunningPlayerEnergies
, catcherRogueHistory = shadowRogueHistory
, catcherNextPlayer = nextPlayer
}
)
where
roguePlayer = head players
nextPlayer = headEx gameRunningNextPlayers
shadowRogueHistory = toShadowRogueHistory gameRunningOpenRogueHistory
catcherPlayerPositions =
updateMap (const rogueShowsPosition) roguePlayer gameRunningPlayerPositions
rogueShowsPosition =
join .
map snd .
find (isJust . snd) $
shadowRogueHistory
toShadowRogueHistory :: OpenRogueHistory -> RogueHistory
toShadowRogueHistory =
RogueHistory .
map (\(e,n,showing) -> (e, showing `ifToMaybe` n)) .
openRogueHistory
getGameOverView :: GameOver -> GameOverView
getGameOverView GameOver
{ gameOverPlayerPositions
, gameOverPlayerEnergies
, gameOverRogueHistory
, gameOverWinningPlayer
} =
GameOverView
gameOverPlayerPositions
gameOverPlayerEnergies
gameOverRogueHistory
gameOverWinningPlayer
|
Haskell-Praxis/core-catcher
|
src/GameNg.hs
|
mit
| 8,490 | 0 | 20 | 2,397 | 1,589 | 848 | 741 | 191 | 3 |
module Math.Geom.Shapes where
import Math.Vec
import Math.Geom.Primitives
data Shape = Sphere Point Double -- center radius
| Triangle (Point, Point, Point)
| Quad (Point, Point, Point, Point)
| Box Vec3 -- center dimensions (w,h,l)
deriving (Show)
|
davidyu/Slowpoke
|
hs/src/Math/Geom/shapes.hs
|
mit
| 306 | 0 | 7 | 93 | 74 | 47 | 27 | 8 | 0 |
module TestSpec (spec) where
import Test.Hspec
import Test.QuickCheck
import Control.Exception (evaluate)
spec :: Spec
spec = do
describe "Prelude.head" $ do
it "returns the first element of a list" $ do
head [23 ..] `shouldBe` (23 :: Int)
it "returns the first element of an *arbitrary* list" $
property $ \x xs -> head (x:xs) == (x :: Int)
it "throws an exception if used with an empty list" $ do
evaluate (head []) `shouldThrow` anyException
|
igorbonadio/haskell_template
|
test/TestSpec.hs
|
mit
| 480 | 0 | 17 | 111 | 152 | 80 | 72 | 13 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-|
Module : Murl.Api.Statuses
Description : Status API implementation.
Copyright : (c) 2016 murl developers
License : MIT
Maintainer : [email protected]
Stability : experimental
Portability : portable
A generic status API for a RESTful web service. Implements general
information (e.g. up, version) retrieval endpoints.
-}
module Murl.Api.Statuses where
import Data.Version
import Paths_murl
import Servant
-- | Statuses API.
type Api = "statuses" :> ( "ping" :> Get '[PlainText] String
:<|> "version" :> Get '[PlainText] String
)
-- | Stauses server.
server :: Server Api
server = return "pong"
:<|> return (showVersion version)
|
alunduil/murl
|
src/Murl/Api/Statuses.hs
|
mit
| 788 | 0 | 12 | 195 | 97 | 55 | 42 | 11 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
module Main where
import Tracking.Server
import Control.Concurrent
import Control.Concurrent.Async
import Control.Concurrent.STM
import qualified Data.Map.Strict as M
runLogger :: MemoryLogger -> IO ()
runLogger = run M.empty
where run m logger = do m' <- readTVarIO $ loggerUser logger
if m' == m
then do threadDelay 10000
run m' logger
else do print $ M.difference m' m
run m' logger
main :: IO ()
main = do
logger <- memoryLogger 30000 31000
a <- async $ runLogger logger
_ <- getLine
cancel a
closeLogger logger
|
schell/xybish
|
src/Main.hs
|
mit
| 758 | 0 | 14 | 276 | 198 | 98 | 100 | 23 | 2 |
module TBR.Types
( Author
, Book(..)
, BookList
, Section(..)
, Title
) where
import Data.Monoid
import Data.Ord (comparing)
import Data.Set (Set)
import Data.Text (Text, unpack)
type Author = Text
type Title = Text
type BookList = Set Book
data Section = Reading
| ToBeRead
| Other Text
deriving (Eq, Ord)
data Book = Book { bookTitle :: Title
, bookAuthor :: Author
, bookSection :: Section
} deriving (Show, Eq)
instance Ord Book where
compare = comparing bookSection <>
comparing bookAuthor <>
comparing bookTitle
instance Show Section where
show Reading = "Reading"
show ToBeRead = "To Be Read"
show (Other t) = unpack t
|
abhinav/tbr
|
TBR/Types.hs
|
mit
| 837 | 0 | 8 | 316 | 225 | 129 | 96 | 29 | 0 |
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
{-
There is a lot of code copied from GHC here, and some conditional
compilation. Instead of fixing all warnings and making it much more
difficult to compare the code to the original, just ignore unused
binds and imports.
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-
build package with the GHC API
-}
module GhcBuild (getBuildFlags, buildPackage, getPackageArgs) where
import qualified Control.Exception as Ex
import Control.Monad (when)
import Data.IORef
import System.Process (rawSystem)
import System.Environment (getEnvironment)
import CmdLineParser
import Data.Char (toLower)
import Data.List (isPrefixOf, isSuffixOf, partition)
import Data.Maybe (fromMaybe)
import DriverPhases (Phase (..), anyHsc, isHaskellSrcFilename,
isSourceFilename, startPhase)
import DriverPipeline (compileFile, link, linkBinary, oneShot)
import DynFlags (DynFlags, compilerInfo)
import qualified DynFlags
import qualified DynFlags as DF
import qualified GHC
import GHC.Paths (libdir)
import HscTypes (HscEnv (..), emptyHomePackageTable)
import qualified Module
import MonadUtils (liftIO)
import Panic (throwGhcException, panic)
import SrcLoc (Located, mkGeneralLocated)
import qualified StaticFlags
#if __GLASGOW_HASKELL__ >= 707
import DynFlags (ldInputs)
#else
import StaticFlags (v_Ld_inputs)
#endif
import System.FilePath (normalise, (</>))
import Util (consIORef, looksLikeModuleName)
{-
This contains a huge hack:
GHC only accepts setting static flags once per process, however it has no way to
get the remaining options from the command line, without setting the static flags.
This code overwrites the IORef to disable the check. This will likely cause
problems if the flags are modified, but fortunately that's relatively uncommon.
-}
getBuildFlags :: IO [Located String]
getBuildFlags = do
argv0 <- fmap read $ readFile "yesod-devel/ghcargs.txt" -- generated by yesod-ghc-wrapper
argv0' <- prependHsenvArgv argv0
let (minusB_args, argv1) = partition ("-B" `isPrefixOf`) argv0'
mbMinusB | null minusB_args = Nothing
| otherwise = Just (drop 2 (last minusB_args))
let argv1' = map (mkGeneralLocated "on the commandline") argv1
writeIORef StaticFlags.v_opt_C_ready False -- the huge hack
(argv2, staticFlagWarnings) <- GHC.parseStaticFlags argv1'
return argv2
prependHsenvArgv :: [String] -> IO [String]
prependHsenvArgv argv = do
env <- getEnvironment
return $ case (lookup "HSENV" env) of
Nothing -> argv
_ -> hsenvArgv ++ argv
where hsenvArgv = words $ fromMaybe "" (lookup "PACKAGE_DB_FOR_GHC" env)
-- construct a command line for loading the right packages
getPackageArgs :: Maybe String -> [Located String] -> IO [String]
getPackageArgs buildDir argv2 = do
(mode, argv3, modeFlagWarnings) <- parseModeFlags argv2
GHC.runGhc (Just libdir) $ do
dflags0 <- GHC.getSessionDynFlags
(dflags1, _, _) <- GHC.parseDynamicFlags dflags0 argv3
let pkgFlags = map convertPkgFlag (GHC.packageFlags dflags1)
ignorePkgFlags =
#if __GLASGOW_HASKELL__ >= 800
map convertIgnorePkgFlag (GHC.ignorePackageFlags dflags1)
#else
[]
#endif
trustPkgFlags =
#if __GLASGOW_HASKELL__ >= 800
map convertTrustPkgFlag (GHC.trustFlags dflags1)
#else
[]
#endif
hideAll | gopt DF.Opt_HideAllPackages dflags1 = [ "-hide-all-packages"]
| otherwise = []
ownPkg = packageString (DF.thisPackage dflags1)
return (reverse (extra dflags1) ++ hideAll ++ trustPkgFlags ++ ignorePkgFlags ++ pkgFlags ++ [ownPkg])
where
#if __GLASGOW_HASKELL__ >= 800
convertIgnorePkgFlag (DF.IgnorePackage p) = "-ignore-package" ++ p
convertTrustPkgFlag (DF.TrustPackage p) = "-trust" ++ p
convertTrustPkgFlag (DF.DistrustPackage p) = "-distrust" ++ p
#else
convertPkgFlag (DF.IgnorePackage p) = "-ignore-package" ++ p
convertPkgFlag (DF.TrustPackage p) = "-trust" ++ p
convertPkgFlag (DF.DistrustPackage p) = "-distrust" ++ p
#endif
#if __GLASGOW_HASKELL__ >= 800
convertPkgFlag (DF.ExposePackage _ (DF.PackageArg p) _) = "-package" ++ p
convertPkgFlag (DF.ExposePackage _ (DF.UnitIdArg p) _) = "-package-id" ++ p
#elif __GLASGOW_HASKELL__ == 710
convertPkgFlag (DF.ExposePackage (DF.PackageArg p) _) = "-package" ++ p
convertPkgFlag (DF.ExposePackage (DF.PackageIdArg p) _) = "-package-id" ++ p
convertPkgFlag (DF.ExposePackage (DF.PackageKeyArg p) _) = "-package-key" ++ p
#else
convertPkgFlag (DF.ExposePackage p) = "-package" ++ p
convertPkgFlag (DF.ExposePackageId p) = "-package-id" ++ p
#endif
convertPkgFlag (DF.HidePackage p) = "-hide-package" ++ p
#if __GLASGOW_HASKELL__ >= 800
packageString flags = "-package-id" ++ Module.unitIdString flags
#elif __GLASGOW_HASKELL__ == 710
packageString flags = "-package-key" ++ Module.packageKeyString flags
#else
packageString flags = "-package-id" ++ Module.packageIdString flags ++ "-inplace"
#endif
#if __GLASGOW_HASKELL__ >= 705
extra df = inplaceConf ++ extra'
where
extra' = concatMap convertExtra (extraConfs df)
-- old cabal-install sometimes misses the .inplace db, fix it here
inplaceConf
| any (".inplace" `isSuffixOf`) extra' = []
| otherwise = ["-package-db" ++ fromMaybe "dist" buildDir
++ "/package.conf.inplace"]
extraConfs df = GHC.extraPkgConfs df []
convertExtra DF.GlobalPkgConf = [ ]
convertExtra DF.UserPkgConf = [ ]
convertExtra (DF.PkgConfFile file) = [ "-package-db" ++ file ]
#else
extra df = inplaceConf ++ extra'
where
extra' = map ("-package-conf"++) (GHC.extraPkgConfs df)
-- old cabal-install sometimes misses the .inplace db, fix it here
inplaceConf
| any (".inplace" `isSuffixOf`) extra' = []
| otherwise = ["-package-conf" ++ fromMaybe "dist" buildDir
++ "/package.conf.inplace"]
#endif
#if __GLASGOW_HASKELL__ >= 707
gopt = DF.gopt
#else
gopt = DF.dopt
#endif
buildPackage :: [Located String] -> FilePath -> FilePath -> IO Bool
buildPackage a ld ar = buildPackage' a ld ar `Ex.catch` \e -> do
putStrLn ("exception building package: " ++ show (e :: Ex.SomeException))
return False
buildPackage' :: [Located String] -> FilePath -> FilePath -> IO Bool
buildPackage' argv2 ld ar = do
(mode, argv3, modeFlagWarnings) <- parseModeFlags argv2
GHC.runGhc (Just libdir) $ do
dflags0 <- GHC.getSessionDynFlags
(dflags1, _, _) <- GHC.parseDynamicFlags dflags0 argv3
let dflags2 = dflags1 { GHC.ghcMode = GHC.CompManager
, GHC.hscTarget = GHC.hscTarget dflags1
, GHC.ghcLink = GHC.LinkBinary
, GHC.verbosity = 1
}
(dflags3, fileish_args, _) <- GHC.parseDynamicFlags dflags2 argv3
GHC.setSessionDynFlags dflags3
let normal_fileish_paths = map (normalise . GHC.unLoc) fileish_args
(srcs, objs) = partition_args normal_fileish_paths [] []
(hs_srcs, non_hs_srcs) = partition haskellish srcs
haskellish (f,Nothing) =
looksLikeModuleName f || isHaskellSrcFilename f || '.' `notElem` f
haskellish (_,Just phase) =
#if MIN_VERSION_ghc(8,0,0)
phase `notElem` [As True, As False, Cc, Cobjc, Cobjcxx, CmmCpp, Cmm, StopLn]
#elif MIN_VERSION_ghc(7,8,3)
phase `notElem` [As True, As False, Cc, Cobjc, Cobjcpp, CmmCpp, Cmm, StopLn]
#elif MIN_VERSION_ghc(7,4,0)
phase `notElem` [As, Cc, Cobjc, Cobjcpp, CmmCpp, Cmm, StopLn]
#else
phase `notElem` [As, Cc, CmmCpp, Cmm, StopLn]
#endif
hsc_env <- GHC.getSession
-- if (null hs_srcs)
-- then liftIO (oneShot hsc_env StopLn srcs)
-- else do
#if MIN_VERSION_ghc(7,2,0)
o_files <- mapM (\x -> liftIO $ compileFile hsc_env StopLn x)
#else
o_files <- mapM (\x -> compileFile hsc_env StopLn x)
#endif
non_hs_srcs
#if __GLASGOW_HASKELL__ >= 707
let dflags4 = dflags3
{ ldInputs = map (DF.FileOption "") (reverse o_files)
++ ldInputs dflags3
}
GHC.setSessionDynFlags dflags4
#else
liftIO $ mapM_ (consIORef v_Ld_inputs) (reverse o_files)
#endif
targets <- mapM (uncurry GHC.guessTarget) hs_srcs
GHC.setTargets targets
ok_flag <- GHC.load GHC.LoadAllTargets
if GHC.failed ok_flag
then return False
else liftIO (linkPkg ld ar) >> return True
linkPkg :: FilePath -> FilePath -> IO ()
linkPkg ld ar = do
arargs <- fmap read $ readFile "yesod-devel/arargs.txt"
rawSystem ar arargs
ldargs <- fmap read $ readFile "yesod-devel/ldargs.txt"
rawSystem ld ldargs
return ()
--------------------------------------------------------------------------------------------
-- stuff below copied from ghc main.hs
--------------------------------------------------------------------------------------------
partition_args :: [String] -> [(String, Maybe Phase)] -> [String]
-> ([(String, Maybe Phase)], [String])
partition_args [] srcs objs = (reverse srcs, reverse objs)
partition_args ("-x":suff:args) srcs objs
| "none" <- suff = partition_args args srcs objs
| StopLn <- phase = partition_args args srcs (slurp ++ objs)
| otherwise = partition_args rest (these_srcs ++ srcs) objs
where phase = startPhase suff
(slurp,rest) = break (== "-x") args
these_srcs = zip slurp (repeat (Just phase))
partition_args (arg:args) srcs objs
| looks_like_an_input arg = partition_args args ((arg,Nothing):srcs) objs
| otherwise = partition_args args srcs (arg:objs)
{-
We split out the object files (.o, .dll) and add them
to v_Ld_inputs for use by the linker.
The following things should be considered compilation manager inputs:
- haskell source files (strings ending in .hs, .lhs or other
haskellish extension),
- module names (not forgetting hierarchical module names),
- and finally we consider everything not containing a '.' to be
a comp manager input, as shorthand for a .hs or .lhs filename.
Everything else is considered to be a linker object, and passed
straight through to the linker.
-}
looks_like_an_input :: String -> Bool
looks_like_an_input m = isSourceFilename m
|| looksLikeModuleName m
|| '.' `notElem` m
-- Parsing the mode flag
parseModeFlags :: [Located String]
-> IO (Mode,
[Located String],
[Located String])
parseModeFlags args = do
let ((leftover, errs1, warns), (mModeFlag, errs2, flags')) =
runCmdLine (processArgs mode_flags args)
(Nothing, [], [])
mode = case mModeFlag of
Nothing -> doMakeMode
Just (m, _) -> m
errs = errs1 ++ map (mkGeneralLocated "on the commandline") errs2
#if __GLASGOW_HASKELL__ >= 710
errorsToGhcException' = errorsToGhcException . map (\(GHC.L _ e) -> ("on the commandline", e))
#else
errorsToGhcException' = errorsToGhcException
#endif
when (not (null errs)) $ throwGhcException $ errorsToGhcException' errs
return (mode, flags' ++ leftover, warns)
type ModeM = CmdLineP (Maybe (Mode, String), [String], [Located String])
-- mode flags sometimes give rise to new DynFlags (eg. -C, see below)
-- so we collect the new ones and return them.
mode_flags :: [Flag ModeM]
mode_flags =
[ ------- help / version ----------------------------------------------
mkFlag "?" (PassFlag (setMode showGhcUsageMode))
, mkFlag "-help" (PassFlag (setMode showGhcUsageMode))
, mkFlag "V" (PassFlag (setMode showVersionMode))
, mkFlag "-version" (PassFlag (setMode showVersionMode))
, mkFlag "-numeric-version" (PassFlag (setMode showNumVersionMode))
, mkFlag "-info" (PassFlag (setMode showInfoMode))
, mkFlag "-supported-languages" (PassFlag (setMode showSupportedExtensionsMode))
, mkFlag "-supported-extensions" (PassFlag (setMode showSupportedExtensionsMode))
] ++
[ mkFlag k' (PassFlag (setMode (printSetting k)))
| k <- ["Project version",
"Booter version",
"Stage",
"Build platform",
"Host platform",
"Target platform",
"Have interpreter",
"Object splitting supported",
"Have native code generator",
"Support SMP",
"Unregisterised",
"Tables next to code",
"RTS ways",
"Leading underscore",
"Debug on",
"LibDir",
"Global Package DB",
"C compiler flags",
"Gcc Linker flags",
"Ld Linker flags"],
let k' = "-print-" ++ map (replaceSpace . toLower) k
replaceSpace ' ' = '-'
replaceSpace c = c
] ++
------- interfaces ----------------------------------------------------
[ mkFlag "-show-iface" (HasArg (\f -> setMode (showInterfaceMode f)
"--show-iface"))
------- primary modes ------------------------------------------------
, mkFlag "c" (PassFlag (\f -> do setMode (stopBeforeMode StopLn) f
addFlag "-no-link" f))
, mkFlag "M" (PassFlag (setMode doMkDependHSMode))
, mkFlag "E" (PassFlag (setMode (stopBeforeMode anyHsc)))
, mkFlag "C" (PassFlag (\f -> do setMode (stopBeforeMode HCc) f
addFlag "-fvia-C" f))
#if MIN_VERSION_ghc(7,8,3)
, mkFlag "S" (PassFlag (setMode (stopBeforeMode (As True))))
#else
, mkFlag "S" (PassFlag (setMode (stopBeforeMode As)))
#endif
, mkFlag "-make" (PassFlag (setMode doMakeMode))
, mkFlag "-interactive" (PassFlag (setMode doInteractiveMode))
, mkFlag "-abi-hash" (PassFlag (setMode doAbiHashMode))
, mkFlag "e" (SepArg (\s -> setMode (doEvalMode s) "-e"))
]
#if MIN_VERSION_ghc(7,10,1)
where mkFlag fName fOptKind = Flag fName fOptKind AllModes
#else
where mkFlag fName fOptKind = Flag fName fOptKind
#endif
setMode :: Mode -> String -> EwM ModeM ()
setMode newMode newFlag = liftEwM $ do
(mModeFlag, errs, flags') <- getCmdLineState
let (modeFlag', errs') =
case mModeFlag of
Nothing -> ((newMode, newFlag), errs)
Just (oldMode, oldFlag) ->
case (oldMode, newMode) of
-- -c/--make are allowed together, and mean --make -no-link
_ | isStopLnMode oldMode && isDoMakeMode newMode
|| isStopLnMode newMode && isDoMakeMode oldMode ->
((doMakeMode, "--make"), [])
-- If we have both --help and --interactive then we
-- want showGhciUsage
_ | isShowGhcUsageMode oldMode &&
isDoInteractiveMode newMode ->
((showGhciUsageMode, oldFlag), [])
| isShowGhcUsageMode newMode &&
isDoInteractiveMode oldMode ->
((showGhciUsageMode, newFlag), [])
-- Otherwise, --help/--version/--numeric-version always win
| isDominantFlag oldMode -> ((oldMode, oldFlag), [])
| isDominantFlag newMode -> ((newMode, newFlag), [])
-- We need to accumulate eval flags like "-e foo -e bar"
(Right (Right (DoEval esOld)),
Right (Right (DoEval [eNew]))) ->
((Right (Right (DoEval (eNew : esOld))), oldFlag),
errs)
-- Saying e.g. --interactive --interactive is OK
_ | oldFlag == newFlag -> ((oldMode, oldFlag), errs)
-- Otherwise, complain
_ -> let err = flagMismatchErr oldFlag newFlag
in ((oldMode, oldFlag), err : errs)
putCmdLineState (Just modeFlag', errs', flags')
where isDominantFlag f = isShowGhcUsageMode f ||
isShowGhciUsageMode f ||
isShowVersionMode f ||
isShowNumVersionMode f
flagMismatchErr :: String -> String -> String
flagMismatchErr oldFlag newFlag
= "cannot use `" ++ oldFlag ++ "' with `" ++ newFlag ++ "'"
addFlag :: String -> String -> EwM ModeM ()
addFlag s flag = liftEwM $ do
(m, e, flags') <- getCmdLineState
putCmdLineState (m, e, mkGeneralLocated loc s : flags')
where loc = "addFlag by " ++ flag ++ " on the commandline"
type Mode = Either PreStartupMode PostStartupMode
type PostStartupMode = Either PreLoadMode PostLoadMode
data PreStartupMode
= ShowVersion -- ghc -V/--version
| ShowNumVersion -- ghc --numeric-version
| ShowSupportedExtensions -- ghc --supported-extensions
| Print String -- ghc --print-foo
showVersionMode, showNumVersionMode, showSupportedExtensionsMode :: Mode
showVersionMode = mkPreStartupMode ShowVersion
showNumVersionMode = mkPreStartupMode ShowNumVersion
showSupportedExtensionsMode = mkPreStartupMode ShowSupportedExtensions
mkPreStartupMode :: PreStartupMode -> Mode
mkPreStartupMode = Left
isShowVersionMode :: Mode -> Bool
isShowVersionMode (Left ShowVersion) = True
isShowVersionMode _ = False
isShowNumVersionMode :: Mode -> Bool
isShowNumVersionMode (Left ShowNumVersion) = True
isShowNumVersionMode _ = False
data PreLoadMode
= ShowGhcUsage -- ghc -?
| ShowGhciUsage -- ghci -?
| ShowInfo -- ghc --info
| PrintWithDynFlags (DynFlags -> String) -- ghc --print-foo
showGhcUsageMode, showGhciUsageMode, showInfoMode :: Mode
showGhcUsageMode = mkPreLoadMode ShowGhcUsage
showGhciUsageMode = mkPreLoadMode ShowGhciUsage
showInfoMode = mkPreLoadMode ShowInfo
printSetting :: String -> Mode
printSetting k = mkPreLoadMode (PrintWithDynFlags f)
where f dflags = fromMaybe (panic ("Setting not found: " ++ show k))
#if MIN_VERSION_ghc(7,2,0)
$ lookup k (compilerInfo dflags)
#else
$ fmap convertPrintable (lookup k compilerInfo)
where
convertPrintable (DynFlags.String s) = s
convertPrintable (DynFlags.FromDynFlags f) = f dflags
#endif
mkPreLoadMode :: PreLoadMode -> Mode
mkPreLoadMode = Right . Left
isShowGhcUsageMode :: Mode -> Bool
isShowGhcUsageMode (Right (Left ShowGhcUsage)) = True
isShowGhcUsageMode _ = False
isShowGhciUsageMode :: Mode -> Bool
isShowGhciUsageMode (Right (Left ShowGhciUsage)) = True
isShowGhciUsageMode _ = False
data PostLoadMode
= ShowInterface FilePath -- ghc --show-iface
| DoMkDependHS -- ghc -M
| StopBefore Phase -- ghc -E | -C | -S
-- StopBefore StopLn is the default
| DoMake -- ghc --make
| DoInteractive -- ghc --interactive
| DoEval [String] -- ghc -e foo -e bar => DoEval ["bar", "foo"]
| DoAbiHash -- ghc --abi-hash
doMkDependHSMode, doMakeMode, doInteractiveMode, doAbiHashMode :: Mode
doMkDependHSMode = mkPostLoadMode DoMkDependHS
doMakeMode = mkPostLoadMode DoMake
doInteractiveMode = mkPostLoadMode DoInteractive
doAbiHashMode = mkPostLoadMode DoAbiHash
showInterfaceMode :: FilePath -> Mode
showInterfaceMode fp = mkPostLoadMode (ShowInterface fp)
stopBeforeMode :: Phase -> Mode
stopBeforeMode phase = mkPostLoadMode (StopBefore phase)
doEvalMode :: String -> Mode
doEvalMode str = mkPostLoadMode (DoEval [str])
mkPostLoadMode :: PostLoadMode -> Mode
mkPostLoadMode = Right . Right
isDoInteractiveMode :: Mode -> Bool
isDoInteractiveMode (Right (Right DoInteractive)) = True
isDoInteractiveMode _ = False
isStopLnMode :: Mode -> Bool
isStopLnMode (Right (Right (StopBefore StopLn))) = True
isStopLnMode _ = False
isDoMakeMode :: Mode -> Bool
isDoMakeMode (Right (Right DoMake)) = True
isDoMakeMode _ = False
#ifdef GHCI
isInteractiveMode :: PostLoadMode -> Bool
isInteractiveMode DoInteractive = True
isInteractiveMode _ = False
#endif
-- isInterpretiveMode: byte-code compiler involved
isInterpretiveMode :: PostLoadMode -> Bool
isInterpretiveMode DoInteractive = True
isInterpretiveMode (DoEval _) = True
isInterpretiveMode _ = False
needsInputsMode :: PostLoadMode -> Bool
needsInputsMode DoMkDependHS = True
needsInputsMode (StopBefore _) = True
needsInputsMode DoMake = True
needsInputsMode _ = False
-- True if we are going to attempt to link in this mode.
-- (we might not actually link, depending on the GhcLink flag)
isLinkMode :: PostLoadMode -> Bool
isLinkMode (StopBefore StopLn) = True
isLinkMode DoMake = True
isLinkMode DoInteractive = True
isLinkMode (DoEval _) = True
isLinkMode _ = False
isCompManagerMode :: PostLoadMode -> Bool
isCompManagerMode DoMake = True
isCompManagerMode DoInteractive = True
isCompManagerMode (DoEval _) = True
isCompManagerMode _ = False
|
erikd/yesod
|
yesod-bin/GhcBuild.hs
|
mit
| 22,116 | 0 | 25 | 6,028 | 4,601 | 2,438 | 2,163 | 336 | 6 |
-- | Canon API.
-- Records SimpleCanon, ScalesCanon, Canon embodying musical
-- canon type with increasing levels of parameterization.
-- Functions simpleCanonToScore, scalesCanonToScore,
-- canonToScore answer Score for different Canon types.
module Canon (
SimpleCanon(..)
, simpleCanonToScore
, ScalesCanon(..)
, scalesCanonToScore
, Canon(..)
, canonToScore
) where
import Canon.Data
import Canon.Utils
|
tomtitchener/Canon
|
src/Canon.hs
|
cc0-1.0
| 432 | 0 | 5 | 75 | 51 | 36 | 15 | 9 | 0 |
#!/usr/bin/env runhaskell
-- | Transcribing DNA into RNA
-- Usage: RNA <dataset.txt>
import System.Environment(getArgs)
import qualified Data.ByteString.Char8 as C
main = do
(file:_) <- getArgs
rna <- C.readFile file
C.putStrLn $ C.map t2u . head . C.lines $ rna
where t2u x = if x == 'T' then 'U' else x
|
kerkomen/rosalind-haskell
|
stronghold/RNA.hs
|
gpl-2.0
| 312 | 0 | 12 | 58 | 104 | 56 | 48 | 7 | 2 |
module BST
( Tree
, singleton
, treeInsert
, treeElem
, fmap
) where
data Tree a = EmptyTree | Node a (Tree a) (Tree a) deriving (Show, Read, Eq)
singleton :: a -> Tree a
singleton x = Node x EmptyTree EmptyTree
treeInsert :: (Ord a)=> a -> Tree a -> Tree a
treeInsert x EmptyTree = singleton x
treeInsert x (Node a left right)
| x == a = Node x left right
| x < a = Node a (treeInsert x left) right
| x > a = Node a left (treeInsert x right)
treeElem :: (Ord a) => a -> Tree a -> Bool
treeElem x EmptyTree = False
treeElem x (Node a left right)
| x == a = True
| x < a = treeElem x left
| x > a = treeElem x right
--class Functor f where
-- fmap :: (a -> b) -> f a -> f b
--instance Functor [] where
-- fmap = map
--instance Functor Maybe where
-- fmap f (Just x) = Just (f x)
-- fmap f Nothing = Nothing
--
--instance Functor Tree where
-- fmap f EmptyTree = EmptyTree
-- fmap f (Node x leftsub rightsub)
--
--instance Functor (Either a) where
-- fmap f (Right x) = Right (f x)
-- fmap f (Left x) = Left x
--
--data Either a b = Left a | Right b
--
--instance Functor Tree where
-- fmap f EmptyTree = EmptyTree
-- fmap f (Node x leftsub rightsub) = Node (f x) (fmap f leftsub) (fmap f rightsub)
--
--instance Functor (Either a) where
-- fmap f (Right x) = Right (f x)
-- fmap f (Left x) Left (f x)
--
-- data Either a b = Left a | Right b
--
--
--
|
softwaremechanic/Miscellaneous
|
Haskell/BST.hs
|
gpl-2.0
| 1,420 | 0 | 8 | 372 | 357 | 192 | 165 | 21 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Database/Hedsql/Drivers/MariaDB/Constructor.hs
Description : MariaDB specific constructors.
Copyright : (c) Leonard Monnier, 2016
License : GPL-3
Maintainer : [email protected]
Stability : experimental
Portability : portable
MariaDB specific constructors for functions/clauses specific to this vendor.
-}
module Database.Hedsql.Drivers.MariaDB.Constructor
( calcFoundRows
, foundRows
, returning
) where
import Database.Hedsql.Common.AST
import Database.Hedsql.Common.Grammar
import Database.Hedsql.Specific.Constructor
import Database.Hedsql.Drivers.MariaDB.Driver
--------------------------------------------------------------------------------
-- PUBLIC
--------------------------------------------------------------------------------
-- | SQL_CALC_FOUND_ROWS function.
calcFoundRows :: Expression MariaDB Void
calcFoundRows = CalcFoundRows
-- | FOUND_ROWS function.
foundRows :: Expression MariaDB Int
foundRows = FoundRows
{-|
Create a RETURNING clause for a DELETE statement with only a FROM clause
specifically for MariaDB.
-}
instance ReturningConstr MariaDB DeleteFromStmt where
returning = returningGen
{-|
Create a RETURNING clause for a DELETE statement with a WHERE clause
specifically for MariaDB.
-}
instance ReturningConstr MariaDB DeleteWhereStmt where
returning = returningGen
|
momomimachli/Hedsql
|
src/Database/Hedsql/Drivers/MariaDB/Constructor.hs
|
gpl-3.0
| 1,557 | 0 | 5 | 219 | 122 | 80 | 42 | 21 | 1 |
{-# LANGUAGE UndecidableInstances #-}
module HLinear.Hook.LeftTransformation.QuickCheck
where
import HLinear.Utility.Prelude
import qualified Prelude as P
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as VM
import Math.Structure.Tasty ()
import Test.QuickCheck ( suchThat, Gen )
import Test.QuickCheck.Arbitrary ( Arbitrary, arbitrary, shrink )
import Test.QuickCheck.Modifiers ( NonNegative(..), Small(..) )
import HLinear.Utility.RPermute ( RPermute )
import HLinear.Hook.LeftTransformation.Algebra ()
import HLinear.Hook.LeftTransformation.Basic as LT
import HLinear.Hook.LeftTransformation.Column as LTC
import HLinear.Hook.LeftTransformation.Definition
instance ( Ring a, Arbitrary a, Arbitrary (Unit a) )
=> Arbitrary (LeftTransformation a)
where
arbitrary = do
useLTColumn <- arbitrary
if useLTColumn
then ltFromColumns
else do
l <- ltFromColumns
pl <- arbitrary :: Gen RPermute
pr <- arbitrary :: Gen RPermute
return $ (pl *. l) .* pr
where
ltFromColumns = do
-- We use this slightly odd construction of nrs to avoid infinite loops
-- that QuickCheck sometimes produces on using
-- ncs <- arbitary `suchThat` (<nrs)
NonNegative ncs <- arbitrary
NonNegative nrsDiff <- arbitrary
let nrs = ncs + nrsDiff
cs <- V.generateM ncs $ \jx -> do
a <- arbitrary
c <- V.replicateM (nrs-jx-1) arbitrary
return $ LeftTransformationColumn jx a c
return $ LeftTransformation nrs cs
shrink lt@(LeftTransformation nrs cs)
| nrs <= 1 || V.length cs <= 1 = []
| otherwise =
ltLeft:ltRight:
[ LeftTransformation nrs $ V.update cs $ V.singleton (jx,c)
| jx <- [0..ncs-1]
, c <- shrinkColumn $ cs V.! jx
]
where
ncs = V.length cs
(ltLeft,ltRight) = LT.splitAt (ncs `P.div` 2) lt
shrinkColumn (LeftTransformationColumn s a c) =
[ LeftTransformationColumn s a' c | a' <- shrink a ]
<>
[ LeftTransformationColumn s a $ V.update c $ V.singleton (ix,e)
| ix <- [0..V.length c - 1]
, e <- shrink (c V.! ix)
]
-- todo: We would need to determine invertibility for shrinking matrices
-- With decidable units that is possible.
shrink (LeftTransformationMatrix _) = []
|
martinra/hlinear
|
src/HLinear/Hook/LeftTransformation/QuickCheck.hs
|
gpl-3.0
| 2,421 | 0 | 19 | 655 | 685 | 369 | 316 | -1 | -1 |
{-# LANGUAGE PackageImports #-}
import Test.HUnit
import Test.QuickCheck
import Sound
import SoundIO
import Music
import "monads-tf" Control.Monad.State(execState,runState,evalState)
import qualified Data.Map as Map
amplitude_multiply_wave_samples d =
amplitude d wave' == map (*d) wave'
where
wave' = wave 440
-- first test on sound
convert_a_frequency_to_a_wave =
take 3 (wave frequency) ~?= [0.0,6.279051952931337e-2,0.12533323356430426]
where
frequency = 440
slice_a_wave_for_a_given_number_of_seconds =
length (slice seconds aWave) ~?= 88000
where
seconds = 2
aWave = wave 440
scale_wave_to_a_single_byte_value =
take 3 (scale (0,255) aWave) ~?= [127,135,143]
where
aWave = wave 440
convert_note_to_signal = TestList [
length (interpret allegro a4crotchet) ~?= (60 * samplingRate `div` 80),
length (interpret allegro a4minim) ~?= 2 * (60 * samplingRate `div` 80),
length (interpret largo a4crotchet) ~?= 2 * (60 * samplingRate `div` 80),
take 3 (interpret largo c4crotchet) ~?= take 3 (wave 261),
take 3 (interpret largo c3crotchet) ~?= take 3 (wave 130),
length (interpret largo c3pointedcrotchet) ~?= (3 * 60 * samplingRate `div` 80)
]
where
c3pointedcrotchet = Note C 3 (Pointed Crotchet)
c3crotchet = Note C 3 Crotchet
c4crotchet = Note C 4 Crotchet
a4crotchet = Note A 4 Crotchet
a4minim = Note A 4 Minim
operate_on_waves = TestList [
take 3 (wave 440 ° wave 330) ~?= [0.0,5.500747189290836e-2,0.10983835296048328],
maximum (take samplingRate (wave 440 ° wave 330)) ~?= 0.9999651284354774
]
playlist_handling = TestList [
"can store score files references provided by user" ~: TestList [
runState (command "load f1 soundfile") emptyStore ~?= (Loaded, storeWithf1),
runState (command "load f2 otherfile") emptyStore ~?= (Loaded, Map.fromList [("f2", "otherfile")]),
runState (loadf1 >> loadf2) emptyStore ~?= (Loaded, Map.fromList [("f1", "soundfile"),("f2", "otherfile")])
],
"can 'play' score file loaded by user" ~:
evalState (command "play f1") storeWithf1 ~?= Play "soundfile",
"error playing a non existing file" ~:
evalState (command "play f2") storeWithf1 ~?= Error "score f2 does not exist",
"error when command does not exist" ~:
evalState (command "foo bar") storeWithf1 ~?= Error "'foo bar' is not a valid command"
]
where
emptyStore = Map.empty
loadf1 = command "load f1 soundfile"
loadf2 = command "load f2 otherfile"
storeWithf1 = Map.fromList [("f1", "soundfile")]
tests = [ convert_a_frequency_to_a_wave,
slice_a_wave_for_a_given_number_of_seconds,
scale_wave_to_a_single_byte_value,
convert_note_to_signal,
operate_on_waves,
playlist_handling]
runAllTests = runTestTT $ TestList tests
|
abailly/haskell-synthesizer
|
SoundTest.hs
|
gpl-3.0
| 2,871 | 0 | 14 | 608 | 831 | 445 | 386 | 58 | 1 |
import Prelude hiding (lines)
import System.Console.Haskeline
import System.Directory
import System.Process
import Data.List hiding (lines)
import Data.Conduit hiding (mapM_)
import Data.Conduit.List (consume)
import Data.Conduit.ProcessOld
import Data.ByteString.Char8 (ByteString,pack,unpack,lines)
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.Resource
exec :: String -> IO [ByteString]
exec s = runResourceT $ sourceCmd s $= makeLine $$ await >> consume where
makeLine = do
n <- await
flip (maybe (return ())) n $ \r -> do
mapM_ yield $ lines r
makeLine
dodo :: String -> InputT IO ()
dodo xs = do
str <- lift $ exec $ "dodo " ++ xs
forM_ str $ \xs -> let
y = unpack xs
in if "Available Op"`isInfixOf`y
then outputStr "\ESC[32m"
else outputStrLn y
opeList :: [(String,String)]
opeList = [
("put","put -- Propose a new task"),
("list","list -- List all tasks"),
("delete","delete -- Remove a task"),
("finish","finish -- Mark a task as finished"),
("work","work -- Work on a new task"),
("reject","reject -- Reject a proposed tasks"),
("accept","accept -- Accept a task"),
("quit","quit -- Quit todo app"),
("clear","clear -- Clear the screen")]
form :: String -> String
form "put" = "add"
form "delete" = "remove"
form "work" = "workon"
form x = x
clear :: InputT IO ()
clear = void $ lift $ system "clear"
cf :: CompletionFunc IO
cf = completeWordWithPrev Nothing " :" $ \pv xs -> let
mat = filter (\(e,_) -> xs`isPrefixOf`e) opeList
in case pv of
[] -> return $ map (\(a,b) -> Completion a b True) mat
_ -> return []
setting :: Settings IO
setting = setComplete cf defaultSettings
main :: IO ()
main = do
setCurrentDirectory undefined
runInputT setting $ do
clear
dodo "list"
withInterrupt loop
loop :: InputT IO ()
loop = do
let hdl :: MonadIO m => SomeException -> m (Maybe String)
hdl _ = return Nothing
cmd <- handle hdl $ getInputLine "Command> "
case cmd of
Nothing -> loop
Just cm -> let
ps = words cm
ci = filter (\(e,_) -> head ps`isPrefixOf`e) opeList
in if ps == []
then loop
else do
clear
outputStrLn $ "Command> " ++ cm
case ci of
[] -> outputStrLn ("Unrecognized command : " ++ cm) >> loop
[(x,_)] -> case x of
"clear" -> clear >> dodo "list" >> loop
"quit" -> outputStrLn "[Exit]" >> return ()
_ -> dodo (unwords [form x, show $ unwords $ tail ps]) >> loop
(y:ys) -> dodo "list" >> loop
|
phi16/Todo
|
Main.hs
|
gpl-3.0
| 2,632 | 5 | 22 | 668 | 970 | 509 | 461 | 83 | 7 |
{- Algoritmo de Matching Asociativo Conmutativo.
Basado en el algoritmo eager del paper "Lazy AC-Pattern Matching for Rewriting",
Belkhir y Giorgetti-}
module Equ.Matching.Matching where
import Equ.PreExpr hiding (rename)
import Equ.Theories.Common (folOr,folAnd,folEquiv)
import Equ.Matching.Monad
import Equ.Matching.Error
import Data.List (permutations)
import Math.Combinat.Partitions.Multiset (partitionMultiset)
import Control.Applicative ((<$>))
import Control.Monad (foldM)
import Control.Monad.Trans.State (runState)
import qualified Data.Map as M
{- Operadores asociativo/conmutativos -}
operatorListAC :: [Operator]
operatorListAC = [folOr,folAnd,folEquiv]
data MatchSubst = MatchSubst
{ subst :: FESubst
, rename :: VariableRename
}
deriving Show
emptyMSubst :: MatchSubst
emptyMSubst = MatchSubst { subst = M.empty
, rename = M.empty
}
data MatchRes = Solution MatchSubst
| MError [MatchInfo] MatchError
type MErrWInfo = ([MatchInfo],MatchError)
instance Show MatchRes where
show (Solution ms) = "Solution: " ++ (show ms)
show (MError mi er) = unlines $ ["Error: " ++ show er, (unlines $ map show mi)]
type FESubst = M.Map Variable FlatExpr
type VariableRename = M.Map Variable Variable
type Surj = [[Int]]
matcherr :: MatchError -> TMatch MatchRes
matcherr er = flip MError er <$> getInfo
matchadd :: Variable -> FlatExpr -> MatchSubst -> TMatch MatchRes
matchadd v fe ms = return $ Solution (ms { subst = M.insert v fe (subst ms) })
-- Todas las funciones suryectivas de un conjunto con n elementos
-- en uno con k elementos.
allSurjs :: Int -> Int -> [Surj]
allSurjs k n = concatMap permutations p
where p = filter ((k==) . length) $ partitionMultiset [0..(n-1)]
whenM1 :: Bool -> MatchSubst -> MatchError -> TMatch [MatchRes]
whenM1 b ms er = if b then return [Solution ms]
else (: []) <$> matcherr er
whenM :: Bool -> TMatch [MatchRes] -> MatchError -> TMatch [MatchRes]
whenM b res er = if b then res
else (: []) <$> matcherr er
takeIndices :: [a] -> [Int] -> [a]
takeIndices _ [] = []
takeIndices ls (i:is) = ls!!i : takeIndices ls is
-- PRE: length surj = k <= length fs
-- POST: k = length (applySurj op surj fs)
applySurj :: Operator -> Surj -> [FlatExpr] -> [FlatExpr]
applySurj op surj fs = map asoc surj
where asoc :: [Int] -> FlatExpr
asoc [] = error "Imposible: applySurj"
asoc [i] = fs!!i
asoc is@(_:_:_) = FBin op $ takeIndices fs is
matchBin :: Operator -> [FlatExpr] -> [FlatExpr] -> MatchSubst -> TMatch [MatchRes]
matchBin op ps es ms = getOpsac >>= matchBin'
where matchBin' oplist
| op `elem` oplist = if k > n then (: []) <$> matcherr (SubTermsAC op ps es)
else foldM (\b surj -> (b ++) <$>
matchNoAC ps (applySurj op surj es))
[] (allSurjs k n)
| otherwise = matchNoAC ps es
where k = length ps
n = length es
sol = Solution ms
matchNoAC ps' es' | length ps' == length es' =
foldM (\b (p,e) -> matchAC p e b)
[sol] (zip ps' es')
| otherwise = (: []) <$>
matcherr (NOperands op ps es)
matchingAC :: FlatExpr -> FlatExpr -> MatchSubst -> TMatch [MatchRes]
matchingAC p@(FVar v) fe ms = elemVar v >>= \b ->
if b && p/=fe then (: []) <$> matcherr (BindingVar v)
else maybe ((: []) <$> matchadd v fe ms)
(\f -> whenM1 (fe == f) ms (DoubleMatch v f fe))
$ M.lookup v s
where s = subst ms
matchingAC (FUn op p') (FUn o fe') ms =
whenM (op == o) (matchAC' p' fe' (Solution ms))
(InequOperator op o)
matchingAC (FBin op ps) (FBin o es) ms =
whenM (op == o) (matchBin op ps es ms)
(InequOperator op o)
matchingAC (FQuant _ _ _ _) (FQuant _ _ _ _) _ = undefined
--whenM (q == r) (matchQuant q v w p1 e1 p2 e2)
-- (InequQuantifier q r)
matchingAC p e ms = whenM1 (p == e) ms (InequPreExpr p e)
matchAC' :: FlatExpr -> FlatExpr -> MatchRes -> TMatch [MatchRes]
matchAC' _ _ mr@(MError _ _) = return [mr]
matchAC' pat fe (Solution ms) =
pushInfo pat fe >> matchingAC pat fe ms >>=
\mrs -> popInfo >> return mrs
matchAC :: FlatExpr -> FlatExpr -> [MatchRes] -> TMatch [MatchRes]
matchAC pat fe mrs = foldM (\b res -> (b ++) <$> matchAC' pat fe res) [] mrs
match' :: PreExpr -> PreExpr -> [MatchRes] -> TMatch [MatchRes]
match' p e = matchAC (flat p) (flat e)
match :: [Operator] -> PreExpr -> PreExpr -> Either [MErrWInfo] MatchSubst
match opsac e e' =
let (mres,_) = runState (match' e e' [Solution emptyMSubst]) (initMSt opsac)
in
getMatchRes mres []
getMatchRes :: [MatchRes] -> [MErrWInfo] -> Either [MErrWInfo] MatchSubst
getMatchRes [] ers = Left ers
getMatchRes (mr:mrs) ers = case mr of
Solution s -> Right s
MError mis me -> getMatchRes mrs
(ers ++ [(mis,me)])
|
miguelpagano/equ
|
Equ/Matching/Matching.hs
|
gpl-3.0
| 5,629 | 0 | 15 | 1,920 | 1,906 | 1,011 | 895 | 101 | 3 |
module Example.Eg50 (eg50) where
import Graphics.Radian
import ExampleUtils
eg50 :: IO Html
eg50 = do
d <- readCSV "iris.csv" [ "sepal_length","sepal_width","petal_length"
,"petal_width","species" ]
let plot = Plot [ps] #
[ width.=600, aspect.=1,
axisXLabel.="Sepal length", axisYLabel.="Petal length",
marker.="circle", markerSize.=100 ]
irispal = discretePalette [ ("I. setosa", "red")
, ("I. versicolor", "green")
, ("I. virginica", "blue") ]
ps = Points (d.^"sepal_length") (d.^"petal_length") #
[fill.=irispal(d.^"species")]
source = exampleSource "Eg50.hs"
return [shamlet|
<h3>
Example 50 (categorical palettes)
^{plot}
^{source}
|]
|
openbrainsrc/hRadian
|
examples/Example/Eg50.hs
|
mpl-2.0
| 822 | 3 | 14 | 258 | 220 | 123 | 97 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Translate.Projects.Locations.Glossaries.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists glossaries in a project. Returns NOT_FOUND, if the project
-- doesn\'t exist.
--
-- /See:/ <https://cloud.google.com/translate/docs/quickstarts Cloud Translation API Reference> for @translate.projects.locations.glossaries.list@.
module Network.Google.Resource.Translate.Projects.Locations.Glossaries.List
(
-- * REST Resource
ProjectsLocationsGlossariesListResource
-- * Creating a Request
, projectsLocationsGlossariesList
, ProjectsLocationsGlossariesList
-- * Request Lenses
, plglParent
, plglXgafv
, plglUploadProtocol
, plglAccessToken
, plglUploadType
, plglFilter
, plglPageToken
, plglPageSize
, plglCallback
) where
import Network.Google.Prelude
import Network.Google.Translate.Types
-- | A resource alias for @translate.projects.locations.glossaries.list@ method which the
-- 'ProjectsLocationsGlossariesList' request conforms to.
type ProjectsLocationsGlossariesListResource =
"v3" :>
Capture "parent" Text :>
"glossaries" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListGlossariesResponse
-- | Lists glossaries in a project. Returns NOT_FOUND, if the project
-- doesn\'t exist.
--
-- /See:/ 'projectsLocationsGlossariesList' smart constructor.
data ProjectsLocationsGlossariesList =
ProjectsLocationsGlossariesList'
{ _plglParent :: !Text
, _plglXgafv :: !(Maybe Xgafv)
, _plglUploadProtocol :: !(Maybe Text)
, _plglAccessToken :: !(Maybe Text)
, _plglUploadType :: !(Maybe Text)
, _plglFilter :: !(Maybe Text)
, _plglPageToken :: !(Maybe Text)
, _plglPageSize :: !(Maybe (Textual Int32))
, _plglCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsGlossariesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plglParent'
--
-- * 'plglXgafv'
--
-- * 'plglUploadProtocol'
--
-- * 'plglAccessToken'
--
-- * 'plglUploadType'
--
-- * 'plglFilter'
--
-- * 'plglPageToken'
--
-- * 'plglPageSize'
--
-- * 'plglCallback'
projectsLocationsGlossariesList
:: Text -- ^ 'plglParent'
-> ProjectsLocationsGlossariesList
projectsLocationsGlossariesList pPlglParent_ =
ProjectsLocationsGlossariesList'
{ _plglParent = pPlglParent_
, _plglXgafv = Nothing
, _plglUploadProtocol = Nothing
, _plglAccessToken = Nothing
, _plglUploadType = Nothing
, _plglFilter = Nothing
, _plglPageToken = Nothing
, _plglPageSize = Nothing
, _plglCallback = Nothing
}
-- | Required. The name of the project from which to list all of the
-- glossaries.
plglParent :: Lens' ProjectsLocationsGlossariesList Text
plglParent
= lens _plglParent (\ s a -> s{_plglParent = a})
-- | V1 error format.
plglXgafv :: Lens' ProjectsLocationsGlossariesList (Maybe Xgafv)
plglXgafv
= lens _plglXgafv (\ s a -> s{_plglXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plglUploadProtocol :: Lens' ProjectsLocationsGlossariesList (Maybe Text)
plglUploadProtocol
= lens _plglUploadProtocol
(\ s a -> s{_plglUploadProtocol = a})
-- | OAuth access token.
plglAccessToken :: Lens' ProjectsLocationsGlossariesList (Maybe Text)
plglAccessToken
= lens _plglAccessToken
(\ s a -> s{_plglAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plglUploadType :: Lens' ProjectsLocationsGlossariesList (Maybe Text)
plglUploadType
= lens _plglUploadType
(\ s a -> s{_plglUploadType = a})
-- | Optional. Filter specifying constraints of a list operation. Specify the
-- constraint by the format of \"key=value\", where key must be \"src\" or
-- \"tgt\", and the value must be a valid language code. For multiple
-- restrictions, concatenate them by \"AND\" (uppercase only), such as:
-- \"src=en-US AND tgt=zh-CN\". Notice that the exact match is used here,
-- which means using \'en-US\' and \'en\' can lead to different results,
-- which depends on the language code you used when you create the
-- glossary. For the unidirectional glossaries, the \"src\" and \"tgt\" add
-- restrictions on the source and target language code separately. For the
-- equivalent term set glossaries, the \"src\" and\/or \"tgt\" add
-- restrictions on the term set. For example: \"src=en-US AND tgt=zh-CN\"
-- will only pick the unidirectional glossaries which exactly match the
-- source language code as \"en-US\" and the target language code
-- \"zh-CN\", but all equivalent term set glossaries which contain
-- \"en-US\" and \"zh-CN\" in their language set will be picked. If
-- missing, no filtering is performed.
plglFilter :: Lens' ProjectsLocationsGlossariesList (Maybe Text)
plglFilter
= lens _plglFilter (\ s a -> s{_plglFilter = a})
-- | Optional. A token identifying a page of results the server should
-- return. Typically, this is the value of
-- [ListGlossariesResponse.next_page_token] returned from the previous call
-- to \`ListGlossaries\` method. The first page is returned if
-- \`page_token\`is empty or missing.
plglPageToken :: Lens' ProjectsLocationsGlossariesList (Maybe Text)
plglPageToken
= lens _plglPageToken
(\ s a -> s{_plglPageToken = a})
-- | Optional. Requested page size. The server may return fewer glossaries
-- than requested. If unspecified, the server picks an appropriate default.
plglPageSize :: Lens' ProjectsLocationsGlossariesList (Maybe Int32)
plglPageSize
= lens _plglPageSize (\ s a -> s{_plglPageSize = a})
. mapping _Coerce
-- | JSONP
plglCallback :: Lens' ProjectsLocationsGlossariesList (Maybe Text)
plglCallback
= lens _plglCallback (\ s a -> s{_plglCallback = a})
instance GoogleRequest
ProjectsLocationsGlossariesList
where
type Rs ProjectsLocationsGlossariesList =
ListGlossariesResponse
type Scopes ProjectsLocationsGlossariesList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-translation"]
requestClient ProjectsLocationsGlossariesList'{..}
= go _plglParent _plglXgafv _plglUploadProtocol
_plglAccessToken
_plglUploadType
_plglFilter
_plglPageToken
_plglPageSize
_plglCallback
(Just AltJSON)
translateService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsGlossariesListResource)
mempty
|
brendanhay/gogol
|
gogol-translate/gen/Network/Google/Resource/Translate/Projects/Locations/Glossaries/List.hs
|
mpl-2.0
| 7,854 | 0 | 19 | 1,712 | 985 | 578 | 407 | 140 | 1 |
data TravelGuide = TravelGuide { title::String, authors::[String], price::Double}
deriving (Show, Eq, Ord)
instance Ord TravelGuide where
(TravelGuide t1 a1 p1) <= (TravelGuide t2 a2 p2) = p1 < p2 || (p1 == p2 && ( t1 < t2 || (t1 == t2 && a1 <= a2)))
|
wangyixiang/beginninghaskell
|
chapter4/src/Chapter4/MinimumPrice.hs
|
unlicense
| 256 | 2 | 14 | 51 | 132 | 71 | 61 | 4 | 0 |
module Set1
(
module Challenge1,
module Challenge2,
module Challenge3,
module Challenge4,
module Challenge5,
module Challenge6,
module Challenge7,
module Challenge8,
)
where
import qualified Set1.Challenge1 as Challenge1
import qualified Set1.Challenge2 as Challenge2
import qualified Set1.Challenge3 as Challenge3
import qualified Set1.Challenge4 as Challenge4
import qualified Set1.Challenge5 as Challenge5
import qualified Set1.Challenge6 as Challenge6
import qualified Set1.Challenge7 as Challenge7
import qualified Set1.Challenge8 as Challenge8
import Util.ByteManipulation
|
stallmanifold/matasano-crypto-challenges
|
src/Set1.hs
|
apache-2.0
| 657 | 0 | 4 | 141 | 108 | 80 | 28 | 19 | 0 |
module WebToInk.Converter.OpfGeneration(generateOpf) where
import WebToInk.Converter.Constants
import WebToInk.Converter.Utils (getTabs)
import System.FilePath (dropExtension)
generateOpf :: [FilePath] -> [FilePath] -> String -> String -> String -> String
generateOpf pages images title language creator = unlines $
["<?xml version=\"1.0\" encoding=\"utf-8\"?>"] ++
["<package xmlns=\"http://www.idpf.org/2007/opf\" version=\"2.0\">"] ++
[generateMetaData 1 title language creator] ++
[generateManifest 1 pages images] ++
[generateSpine 1 pages] ++
[generateGuide 1 pagesFolder tocPage] ++
["</package>"]
generateMetaData indent title language creator = unlines $
map ((getTabs indent)++)
(["<metadata xmlns:dc=\"http://purl.org/dc/elements/1.1/\"" ++
"xmlns:opf=\"http://www.idpf.org/2007/opf\">"] ++
map ((getTabs $ indent + 1)++)
(["<dc:title>" ++ title ++ "</dc:title>"] ++
["<dc:language>" ++ language ++ "</dc:language>"] ++
["<dc:creator>" ++ creator ++ "</dc:creator>"] ++
["<meta name=\"cover\" content=\"cover\"/>"]) ++
["</metadata>"])
generateManifest :: Int -> [FilePath] -> [FilePath] -> String
generateManifest indent pages images = unlines $
[(getTabs indent) ++ "<manifest>"] ++
(generateItems pages) ++
(generateImages images) ++
["\n" ++ getTabs (indent + 1) ++
"<item id=\"ncx-toc\" media-type=\"application/x-dtbncx+xml\" href=\"toc.ncx\"/>"] ++
[(getTabs indent) ++ "</manifest>"]
where
generateItems = map generateItem
generateItem fileName =
getTabs (indent + 1) ++
"<item id=\"" ++ itemName ++ "\" " ++
"media-type=\"application/xhtml+xml\" href=\"" ++
pagesFolder ++ "/" ++ fileName ++"\" />"
where itemName = dropExtension fileName
generateImages = map generateImage
generateImage fileName =
getTabs (indent + 1) ++
"<item media-type=\"image/png\" href=\"" ++ imagesFolder ++ "/" ++ fileName ++ "\"/>"
generateSpine :: Int -> [FilePath] -> String
generateSpine indent pages = unlines $
[(getTabs indent) ++ "<spine toc=\"ncx-toc\">"] ++
(map generateItemRef pages) ++
[(getTabs indent) ++ "</spine>"]
where
generateItemRef fileName =
getTabs (indent + 1) ++
"<itemref idref=\"" ++ itemName ++ "\"/>"
where itemName = dropExtension fileName
generateGuide indent pagesFolder tocPage = unlines $
[(getTabs indent) ++ "<guide>"] ++
map ((getTabs $ indent + 1)++)
(["<reference type=\"toc\" title=\"Table of Contents\" href=\"" ++
pagesFolder ++ "/" ++ tocPage ++ "\"/>"]) ++
[(getTabs indent) ++ "</guide>"]
|
thlorenz/WebToInk
|
webtoink-converter/WebToInk/Converter/OpfGeneration.hs
|
bsd-2-clause
| 2,845 | 0 | 17 | 707 | 724 | 384 | 340 | 57 | 1 |
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances #-}
module Database.Narc.AST (
Term'(..), Term, VarName, PlainTerm, TypedTerm,
fvs, substTerm,
strip, retagulate, rename, variables,
(!),
fieldsOf,
-- unit_, Const, cnst_, primApp_, var_, abs_, app_, table_, ifthenelse_,
-- singleton_, nil_, union_, record_, project_, foreach_,
module Database.Narc.Common
) where
import Data.List as List ((\\), nub)
import Prelude hiding (abs)
import Database.Narc.Common
import Database.Narc.Type
import Database.Narc.Util (alistmap, u)
import Database.Narc.Var
-- | Terms in the nested relational calculus (represented concretely
-- | with named variables)
data Term' a = Unit | Bool Bool | Num Integer | String String
| PrimApp String [Term a]
| Var VarName | Abs VarName (Term a) | App (Term a) (Term a)
| Table Tabname [(Field, Type)]
| If (Term a) (Term a) (Term a)
| Singleton (Term a) | Nil | Union (Term a) (Term a)
| Record [(String, Term a)]
| Project (Term a) String
| Comp VarName (Term a) (Term a)
-- | IsEmpty (Term a)
deriving (Eq,Show)
-- | Terms whose every subexpression is annotated with a value of some
-- | particular type.
type Term a = (Term' a, a)
-- TBD: use term ::: type or similar instead of (term, type).
-- | @PlainTerm@s are unannotated with types.
type PlainTerm = Term ()
-- | @TypedTerm@s carry a type at each node.
type TypedTerm = Term Type
-- Operations on terms -------------------------------------------------
-- | Free variables of a term.
fvs (Unit, _) = []
fvs (Bool _, _) = []
fvs (Num _, _) = []
fvs (String _, _) = []
fvs (PrimApp prim args, _) = nub $ concat $ map fvs args
fvs (Var x, _) = [x]
fvs (Abs x n, _) = fvs n \\ [x]
fvs (App l m, _) = fvs l `u` fvs m
fvs (Table _ _, _) = []
fvs (If c a b, _) = fvs c `u` fvs a `u` fvs b
fvs (Nil, _) = []
fvs (Singleton elem, _) = fvs elem
fvs (Union m n, _) = fvs m `u` fvs n
fvs (Record fields, _) = nub $ concat $ map (fvs . snd) fields
fvs (Project targ _, _) = fvs targ
fvs (Comp x src body, _) = fvs src `u` (fvs body \\ [x])
-- | An infinite list of variable names; useful for generating new
-- ones. Subtract some finite list to generate a name fresh for that
-- list.
variables = map ('y':) $ map show [0..]
-- | Rename free occurrences of a variable @x@ to @y@: @rename x y term@.
rename x y (Var z, q) | x == z = (Var y, q)
| otherwise = (Var z, q)
rename x y (l@(Abs z n, q)) | x == z = l
| otherwise = (Abs z (rename x y n), q)
rename x y (App l m, q) = (App (rename x y l) (rename x y m), q)
rename x y (PrimApp prim args, q) = (PrimApp prim (map (rename x y) args), q)
rename x y (Singleton elem, q) = (Singleton (rename x y elem), q)
rename x y (Project targ label, q) = (Project (rename x y targ) label, q)
rename x y (Record fields, q) = (Record (alistmap (rename x y) fields), q)
rename x y (Comp z src body, q)
| x == z = (Comp z src body, q)
| y == z = let y' = head $ variables \\ [y] in
let body' = rename y y' body in
(Comp z (rename x y src) (rename x y body'), q)
| otherwise= (Comp z (rename x y src) (rename x y body), q)
rename x y (String n, q) = (String n, q)
rename x y (Bool b, q) = (Bool b, q)
rename x y (Num n, q) = (Num n, q)
rename x y (Table s t, q) = (Table s t, q)
rename x y (If c a b, q) = (If (rename x y c) (rename x y a) (rename x y b), q)
rename x y (Unit, q) = (Unit, q)
rename x y (Nil, q) = (Nil, q)
rename x y (Union a b, q) = (Union (rename x y a) (rename x y b), q)
-- | @substTerm x v m@: substite v for x in term m
-- (Actually incorrect because it does not make substitutions in the
-- annotation.)
substTerm :: VarName -> Term t -> Term t -> Term t
substTerm x v (m@(Unit, _)) = m
substTerm x v (m@(Bool b, _)) = m
substTerm x v (m@(Num n, _)) = m
substTerm x v (m@(String s, _)) = m
substTerm x v (m@(Table s t, _)) = m
substTerm x v (m@(Nil, _)) = m
substTerm x v (Singleton elem, q) = (Singleton (substTerm x v elem), q)
substTerm x v (Union m n, q) = (Union (substTerm x v m) (substTerm x v n), q)
substTerm x v (m@(Var y, _)) | y == x = v
| otherwise = m
substTerm x v (l @ (Abs y n, q))
| x == y = l
| y `notElem` fvs v = (Abs y (substTerm x v n), q)
| otherwise =
let y' = head $ variables \\ fvs v in
let n' = rename y y' n in
(Abs y' (substTerm x v n'), q)
substTerm x v (App l m, q) = (App (substTerm x v l) (substTerm x v m), q)
substTerm x v (PrimApp prim args,q)= (PrimApp prim (map (substTerm x v) args),q)
substTerm x v (Project targ label, q) = (Project (substTerm x v targ) label, q)
substTerm x v (Record fields, q) = (Record (alistmap (substTerm x v) fields), q)
substTerm x v (Comp y src body, q)
| x == y =
(Comp y src' body, q)
| y `notElem` fvs v =
(Comp y src' (substTerm x v body), q)
| otherwise =
let y' = head $ variables \\ fvs v in
let body' = rename y y' body in
(Comp y' src' (substTerm x v body'), q)
where src' = (substTerm x v src)
substTerm x v (If c a b, q) =
(If (substTerm x v c) (substTerm x v a) (substTerm x v b), q)
-- Generic term-recursion functions ------------------------------------
-- | Apply a function to each term while traversing down, and use its
-- | result as the annotation of that node.
entagulate :: (Term a -> b) -> Term a -> Term b
entagulate f m@(Unit, d) = (Unit, f m)
entagulate f m@(PrimApp fn xs, d) = (PrimApp fn (map (entagulate f) xs), f m)
entagulate f m@(Bool b, d) = (Bool b, f m)
entagulate f m@(Num n, d) = (Num n, f m)
entagulate f m@(String s, d) = (String s, f m)
entagulate f m@(Var x, d) = (Var x, f m)
entagulate f m@(Abs x n, d) = (Abs x (entagulate f n), f m)
entagulate f m@(App l' m', d) = (App (entagulate f l') (entagulate f m'),
f m)
entagulate f m@(If c a b, d) =
(If (entagulate f c)
(entagulate f a)
(entagulate f b),
f m)
entagulate f m@(Table tab fields, d) = (Table tab fields, f m)
entagulate f m@(Nil, d) = (Nil, f m)
entagulate f m@(Singleton m', d) = (Singleton (entagulate f m'),
f m)
entagulate f m@(Union a b, d) =
(Union
(entagulate f a)
(entagulate f b),
f m)
entagulate f m@(Record fields, d) =
(Record (alistmap (entagulate f) fields), f m)
entagulate f m@(Project m' a, d) = (Project (entagulate f m') a, f m)
entagulate f m@(Comp x src body, d) =
(Comp x (entagulate f src) (entagulate f body), f m)
-- | Apply a function to each node while traversing *up*, using its
-- | result as the new annotation for that node.
{- (FIXME: I think all this can be refactored to a nice BU/TD
combinator that doesn't know about annotations. -}
retagulate :: (Term a -> a) -> Term a -> Term a
retagulate f (Unit, d) = (Unit, f (Unit, d))
retagulate f (Bool b, d) = (Bool b, f (Bool b, d))
retagulate f (Num n, d) = (Num n, f (Num n, d))
retagulate f (String s, d) = (String s, f (String s, d))
retagulate f (Var x, d) = (Var x, f (Var x, d))
retagulate f (Abs x n, d) =
let n' = (retagulate f n) in
let result = Abs x n' in
(result, f (Abs x n', d))
retagulate f (App l m, d) =
let l' = retagulate f l in
let m' = retagulate f m in
let result = App l' m' in
(result, f (result, d))
retagulate f (PrimApp fn args, d) =
let result = PrimApp fn (map (retagulate f) args) in
(result, f (result, d))
retagulate f (If c a b, d) =
let result = If (retagulate f c)
(retagulate f a)
(retagulate f b)
in
(result, f (result, d))
retagulate f (Table tab fields, d) =
let result = Table tab fields in
(result, f (result, d))
retagulate f (Nil, d) = (Nil, f (Nil, d))
retagulate f (Singleton m, d) =
let result = Singleton (retagulate f m) in
(result, f (result, d))
retagulate f (Union l m, d) =
let result = Union (retagulate f l) (retagulate f m) in
(result, f (result, d))
retagulate f (Record fields, d) =
let result = Record (alistmap (retagulate f) fields) in
(result, f (result, d))
retagulate f (Project m a, d) =
let result = Project (retagulate f m) a in
(result, f (result, d))
retagulate f (Comp x src body, d) =
let result = Comp x (retagulate f src) (retagulate f body) in
(result, f (result, d))
-- | Strip off the annotations of every node in a term, leaving ().
strip = entagulate (const ())
-- | The number of comprehensions in an expression, a fuzzy measure of
-- the complexity of the query.
numComps (Comp x src body, _) = 1 + numComps src + numComps body
numComps (PrimApp _ args, _) = sum $ map numComps args
numComps (Abs _ n, _) = numComps n
numComps (App l m, _) = numComps l + numComps m
numComps (Singleton body, _) = numComps body
numComps (Record fields, _) = sum $ map (numComps . snd) fields
numComps (Project m _, _) = numComps m
numComps (Union a b, _) = numComps a + numComps b
numComps (Unit, _) = 0
numComps (Bool _, _) = 0
numComps (Num _, _) = 0
numComps (String _, _) = 0
numComps (Var _, _) = 0
numComps (Table _ _, _) = 0
numComps (If c a b, _) = numComps c + numComps a + numComps b
numComps (Nil, _) = 0
-- | An interface for semanticizing the Narc concrete language as
-- | desired (as per "Unembedding domain specific languages" by Atkey,
-- | Lindley and Yallop).
class NarcSem result where
unit :: result
bool :: Bool -> result
num :: Integer -> result
string :: String -> result
primApp :: String -> [result] -> result
var :: VarName -> result
abs :: VarName -> result -> result
app :: result -> result -> result
table :: Tabname -> [(Field, Type)] -> result
ifthenelse :: result -> result -> result -> result
singleton :: result -> result
nil :: result
union :: result -> result -> result
record :: [(String, result)] -> result
project :: result -> String -> result
foreach :: result -> VarName -> result -> result
-- cnst :: Constable t => t -> result
class Constable t where cnst :: NarcSem result => t -> result
instance Constable Bool where cnst b = bool b
instance Constable Integer where cnst n = num n
-- Explicit-named builders
-- | Inject a value of type T into (T, ()) in the only sensible way.
(!) x = (x, ())
instance NarcSem PlainTerm where
unit = (!)Unit
bool b = (!)(Bool b)
num n = (!)(Num n)
string n = (!)(String n)
primApp f args = (!)(PrimApp f args)
var x = (!)(Var x)
abs x body = (!)(Abs x body)
app l m = (!)(App l m)
table tbl ty = (!)(Table tbl ty)
ifthenelse c t f = (!)(If c t f)
singleton x = (!)(Singleton x)
nil = (!)Nil
union a b = (!)(Union a b)
record fields = (!)(Record fields)
project body field = (!)(Project body field)
foreach src x body = (!)(Comp x src body)
-- class Const a where cnst_ :: a -> Term ()
{- AST-constructing utilities. But I've decided to use the NarcSem
class instead; DEPRECATED. -}
unit_ = (!)Unit
class Const a where cnst_ :: a -> Term ()
instance Const Bool where cnst_ b = (!)(Bool b)
instance Const Integer where cnst_ n = (!)(Num n)
instance Const String where cnst_ s = (!)(String s)
primApp_ f args = (!)(PrimApp f args)
var_ x = (!)(Var x)
abs_ x body = (!)(Abs x body)
app_ l m = (!)(App l m)
table_ tbl ty = (!)(Table tbl ty)
ifthenelse_ c t f = (!)(If c t f)
singleton_ x = (!)(Singleton x)
nil_ = (!)Nil
union_ a b = (!)(Union a b)
record_ fields = (!)(Record fields)
project_ body field = (!)(Project body field)
foreach_ src x body = (!)(Comp x src body)
{- Constructors/deconstructors for AST nodes. -}
-- | Given a Table node, return a list of the names of its fields.
fieldsOf :: Term' t -> [Field]
fieldsOf (Table name fields) = map fst fields
|
ezrakilty/narc
|
Database/Narc/AST.hs
|
bsd-2-clause
| 11,873 | 2 | 14 | 3,070 | 5,620 | 2,975 | 2,645 | 246 | 1 |
import Data.Char
import Data.List
import Control.Applicative
import System.Environment
moveForward card deck | (c : x : xs) <- deck, c == card = x : c : xs
| (x : xs) <- deck, last xs == card = x : card : init xs
| (x : xs) <- deck = x : moveForward card xs
valueOf = min 53
isNotJoker = (< 53)
takeTop = takeWhile isNotJoker
dropTop = dropWhile isNotJoker
topCut = takeTop
bottomCut = reverse . takeTop . reverse
middleCut = reverse . dropTop . reverse . dropTop
moveToNearBottom n deck = (init (drop n deck)) ++ (take n deck) ++ [last deck]
shuffleStepTwo = moveForward 53
shuffleStepThree = foldl1 (.) (replicate 2 (moveForward 54))
shuffleStepFour d = (bottomCut d) ++ (middleCut d) ++ (topCut d)
shuffleStepFive d = moveToNearBottom (last d) d
currentValue d = d !! valueOf (head d)
shuffle = shuffleStepFive
. shuffleStepFour
. shuffleStepThree
. shuffleStepTwo
padDecks = iterate shuffle startingDeck
padValues = map currentValue padDecks
exclueInitialPad = tail padValues
pad = filter (< 53) exclueInitialPad
startingDeck = [1..54]
filterNonletters = filter (`elem` ['A'..'Z'])
toNumber c = ord c - 65
toCharacter n = chr (n + 65)
doCrypt p s = toCharacter <$> (`mod` 26) <$> zipWith (+) p cleanInput
where cleanInput = toNumber <$> (filterNonletters $ toUpper <$> s)
encrypt = doCrypt pad
decrypt = doCrypt $ (* (-1)) <$> pad
main = do
inputString <- getArgs
putStr "Encrypted: "
putStrLn $ decrypt $ concat inputString
putStr "Decrypted: "
putStrLn $ encrypt $ concat inputString
|
mightymoose/RubyQuiz
|
quiz_1/solution.hs
|
bsd-2-clause
| 1,601 | 0 | 11 | 366 | 619 | 318 | 301 | 42 | 1 |
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
module Data.Authenticated.Generic (Auth1(..)) where
import Data.Authenticated
import GHC.Generics
newtype Auth1 f a = Auth1 { getAuth1 :: Auth (f a) a }
-- These two instances are what requires UndecidableInstances.
instance (Show (f Prover), Show (Digest (f Prover))) => Show (Auth1 f Prover) where
showsPrec n = showsPrec n . getAuth1
instance (Show (f Verifier), Show (Digest (f Verifier))) => Show (Auth1 f Verifier) where
showsPrec n = showsPrec n . getAuth1
instance GMapAuth V1 where
gmapAuth = undefined
instance GMapAuth U1 where
gmapAuth U1 = U1
instance (GMapAuth l, GMapAuth r) => GMapAuth (l :+: r) where
gmapAuth (L1 x) = L1 (gmapAuth x)
gmapAuth (R1 x) = R1 (gmapAuth x)
instance (GMapAuth l, GMapAuth r) => GMapAuth (l :*: r) where
gmapAuth (x :*: y) = gmapAuth x :*: gmapAuth y
instance GMapAuth (K1 i c) where
gmapAuth (K1 c) = K1 c
instance (GMapAuth f) => GMapAuth (M1 i t f) where
gmapAuth (M1 x) = M1 (gmapAuth x)
instance (MapAuth f) => GMapAuth (Rec1 f) where
gmapAuth (Rec1 x) = Rec1 (mapAuth x)
-- instance GMapAuth Par1 - This should never happen for correct data types (i.e. whose parameter is Authenticated)
instance (Digest (f Prover) ~ Digest (f Verifier)) => MapAuth (Auth1 f) where
mapAuth (Auth1 x) = Auth1 (shallowAuth x)
|
derekelkins/ads
|
Data/Authenticated/Generic.hs
|
bsd-2-clause
| 1,445 | 0 | 10 | 285 | 541 | 280 | 261 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UndecidableInstances #-}
module Grammar where
{-| System F -}
import Unbound.LocallyNameless
-- Expression Variable Name
type ExpN = Name Expr
-- Type Variable Name
type TypN = Name Type
data Expr =
EVar ExpN
| Lam (Bind (ExpN, Embed Type) Expr)
| LAM (Bind TypN Expr)
| App Expr Expr
| TyApp Expr Type
deriving (Show)
data Type =
TyVar TypN
| TyArr Type Type
| TyLam (Bind TypN Type)
deriving (Show)
instance Alpha Expr where
instance Alpha Type where
instance Subst Expr Expr where
isVar (EVar x) = Just (SubstName x)
isVar _ = Nothing
instance Subst Expr Type where
isVar _ = Nothing
instance Subst Type Type where
isVar (TyVar x) = Just (SubstName x)
isVar _ = Nothing
$(derive [''Expr, ''Type])
|
maxsnew/TAPL
|
Polymorphic/Grammar.hs
|
bsd-3-clause
| 925 | 0 | 10 | 213 | 272 | 147 | 125 | 32 | 0 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module TensorOps.Learn.NeuralNet.FeedForward
( Network(..)
, buildNet
, runNetwork
, trainNetwork
, induceNetwork
, nmap
, (~*)
, (*~)
, liftNet
, netParams
, networkGradient
, genNet
, ffLayer
) where
import Control.Category
import Control.DeepSeq
import Control.Monad.Primitive
import Data.Kind
import Data.Singletons
import Data.Singletons.Prelude (Sing(..))
import Data.Type.Conjunction
import Data.Type.Length
import Data.Type.Product as TCP
import Data.Type.Product.Util as TCP
import Data.Type.Sing
import Prelude hiding ((.), id)
import Statistics.Distribution.Normal
import System.Random.MWC
import TensorOps.Learn.NeuralNet
import TensorOps.NatKind
import TensorOps.TOp as TO
import TensorOps.Tensor as TT
import TensorOps.Types
import Type.Class.Higher
import Type.Class.Higher.Util
import Type.Class.Known
import Type.Class.Witness
import Type.Family.List
import Type.Family.List.Util
data Network :: ([k] -> Type) -> k -> k -> Type where
N :: { _nsPs :: !(Sing ps)
, _nOp :: !(TOp ('[i] ': ps) '[ '[o] ])
, _nParams :: !(Prod t ps)
} -> Network t i o
instance NFData1 t => NFData (Network t i o) where
rnf = \case
N _ o p -> o `seq` p `deepseq1` ()
{-# INLINE rnf #-}
buildNet
:: SingI ps
=> TOp ('[i] ': ps) '[ '[o] ]
-> Prod t ps
-> Network t i o
buildNet = N sing
netParams
:: Network t i o
-> (forall ps. SingI ps => Prod t ps -> r)
-> r
netParams n f = case n of
N o _ p -> f p \\ o
(~*~)
:: Network t a b
-> Network t b c
-> Network t a c
N sPs1 o1 p1 ~*~ N sPs2 o2 p2 =
N (sPs1 %:++ sPs2) (o1 *>> o2) (p1 `TCP.append'` p2)
\\ singLength sPs1
infixr 4 ~*~
{-# INLINE (~*~) #-}
instance Category (Network t) where
id = N SNil idOp Ø
(.) = flip (~*~)
(~*) :: TOp '[ '[a] ] '[ '[b] ]
-> Network t b c
-> Network t a c
f ~* N sO o p = N sO (f *>> o) p
infixr 4 ~*
{-# INLINE (~*) #-}
(*~) :: Network t a b
-> TOp '[ '[b] ] '[ '[c] ]
-> Network t a c
N sO o p *~ f = N sO (o >>> f) p
infixl 5 *~
{-# INLINE (*~) #-}
liftNet
:: TOp '[ '[i] ] '[ '[o] ]
-> Network t i o
liftNet o = buildNet o Ø
nmap
:: SingI o
=> (forall a. RealFloat a => a -> a)
-> Network t i o
-> Network t i o
nmap f n = n *~ TO.map f
{-# INLINE nmap #-}
runNetwork
:: (RealFloat (ElemT t), Tensor t)
=> Network t i o
-> t '[i]
-> t '[o]
runNetwork (N _ o p) = head' . runTOp o . (:< p)
{-# INLINE runNetwork #-}
trainNetwork
:: forall i o t. (Tensor t, RealFloat (ElemT t))
=> TOp '[ '[o], '[o] ] '[ '[] ]
-> ElemT t
-> t '[i]
-> t '[o]
-> Network t i o
-> Network t i o
trainNetwork loss r x y = \case
N s o p ->
let p' = map1 (\(!(s1 :&: o1 :&: g1)) -> TT.zip stepFunc o1 g1 \\ s1)
$ zipProd3 (singProd s) p (tail' $ netGrad loss x y s o p)
in N s o p'
where
stepFunc :: ElemT t -> ElemT t -> ElemT t
stepFunc !o' !g' = o' - r * g'
{-# INLINE stepFunc #-}
{-# INLINE trainNetwork #-}
induceNetwork
:: forall i o t. (Tensor t, RealFloat (ElemT t), SingI i)
=> TOp '[ '[o], '[o] ] '[ '[] ]
-> ElemT t
-> t '[o]
-> Network t i o
-> t '[i]
-> t '[i]
induceNetwork loss r y = \case
N s o p -> \x -> TT.zip stepFunc x (head' $ netGrad loss x y s o p)
where
stepFunc :: ElemT t -> ElemT t -> ElemT t
stepFunc o' g' = o' - r * g'
{-# INLINE stepFunc #-}
{-# INLINE induceNetwork #-}
networkGradient
:: forall i o t r. (Tensor t, RealFloat (ElemT t))
=> TOp '[ '[o], '[o] ] '[ '[] ]
-> t '[i]
-> t '[o]
-> Network t i o
-> (forall ps. SingI ps => Prod t ps -> r)
-> r
networkGradient loss x y = \case
N s o p -> \f -> f (tail' $ netGrad loss x y s o p) \\ s
{-# INLINE networkGradient #-}
netGrad
:: forall i o ps t. (Tensor t, RealFloat (ElemT t))
=> TOp '[ '[o], '[o] ] '[ '[] ]
-> t '[i]
-> t '[o]
-> Sing ps
-> TOp ('[i] ': ps) '[ '[o] ]
-> Prod t ps
-> Prod t ('[i] ': ps)
netGrad loss x y s o p = (\\ appendSnoc lO (Proxy @'[o])) $
(\\ lO ) $
takeProd @'[ '[o] ] (LS lO)
$ gradTOp o' inp
where
lO :: Length ps
lO = singLength s
o' :: ((ps ++ '[ '[o] ]) ~ (ps >: '[o]), Known Length ps)
=> TOp ('[i] ': ps >: '[o]) '[ '[]]
o' = o *>> loss
inp :: Prod t ('[i] ': ps >: '[o])
inp = x :< p >: y
{-# INLINE netGrad #-}
ffLayer
:: forall i o m t. (SingI i, SingI o, PrimMonad m, Tensor t)
=> Gen (PrimState m)
-> m (Network t i o)
ffLayer g = (\w b -> buildNet ffLayer' (w :< b :< Ø))
<$> genRand (normalDistr 0 0.5) g
<*> genRand (normalDistr 0 0.5) g
where
ffLayer'
:: TOp '[ '[i], '[o,i], '[o]] '[ '[o] ]
ffLayer' = firstOp (TO.swap >>> TO.matVec)
>>> TO.add
{-# INLINE ffLayer' #-}
{-# INLINE ffLayer #-}
genNet
:: forall k o i m (t :: [k] -> Type). (SingI o, SingI i, PrimMonad m, Tensor t)
=> [(Integer, Activation k)]
-> Activation k
-> Gen (PrimState m)
-> m (Network t i o)
genNet xs0 f g = go sing xs0
where
go :: forall (j :: k). ()
=> Sing j
-> [(Integer, Activation k)]
-> m (Network t j o)
go sj = (\\ sj) $ \case
[] -> (*~ getAct f) <$> ffLayer g
(x,f'):xs -> withNatKind x $ \sl -> (\\ sl) $ do
n <- go sl xs
l <- ffLayer g
return $ l *~ getAct f' ~*~ n
{-# INLINE go #-}
{-# INLINE genNet #-}
|
mstksg/tensor-ops
|
src/TensorOps/Learn/NeuralNet/FeedForward.hs
|
bsd-3-clause
| 6,423 | 0 | 20 | 2,244 | 2,732 | 1,471 | 1,261 | 216 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_HADDOCK show-extensions #-}
{-|
Module : $Header$
Copyright : (c) 2015 Swinburne Software Innovation Lab
License : BSD3
Maintainer : Rhys Adams <[email protected]>
Stability : unstable
Portability : portable
Persistence for job states and yet-to-be-run 'ScheduleCommand's.
-}
module Eclogues.Persist (
-- * 'Action'
Action, Context
-- ** Running
, withPersistDir, atomically
-- * View
, allIntents, allJobs
-- * Mutate
, insert, updateStage, updateSatis, delete, scheduleIntent, deleteIntent
) where
import Eclogues.Persist.Stage1 ()
import Eclogues.Scheduling.Command (ScheduleCommand)
import qualified Eclogues.Job as Job
import Control.Lens ((^.))
import Control.Monad.Base (MonadBase, liftBase)
import Control.Monad.Logger (LoggingT, runStderrLoggingT)
import Control.Monad.Reader (ReaderT)
import Data.Monoid ((<>))
import qualified Data.Text as T
import Data.UUID (UUID)
import Database.Persist.TH (mkPersist, sqlSettings, mkMigrate, share, persistLowerCase)
import Database.Persist ((==.), (=.))
import qualified Database.Persist as P
import qualified Database.Persist.Sql as PSql
import Database.Persist.Sqlite (withSqlitePool)
import Path (Path, Abs, Dir, toFilePath)
-- Table definitions.
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
Job
name Job.Name
spec Job.Spec
stage Job.Stage
satis Job.Satisfiability
uuid UUID
UniqueName name
ScheduleIntent
command ScheduleCommand
UniqueCommand command
|]
-- Hide away the implementation details.
-- | All 'Action's run in a Context.
newtype Context = Context PSql.ConnectionPool
-- | An interaction with the persistence backend.
newtype Action r = Action (ReaderT PSql.SqlBackend IO r)
deriving (Functor, Applicative, Monad)
-- | You can join up Actions by running them sequentially.
instance Monoid (Action ()) where
mempty = pure ()
mappend = (*>)
-- | Run some action that might persist things inside the given directory.
-- Logs to stderr.
withPersistDir :: Path Abs Dir -> (Context -> LoggingT IO a) -> IO a
withPersistDir path f = runStderrLoggingT $ withSqlitePool ("WAL=off " <> path' <> "/eclogues.db3") 1 act
where
act pool = do
PSql.runSqlPool (PSql.runMigration migrateAll) pool
f (Context pool)
path' = T.pack $ toFilePath path
-- | Apply some Action in a transaction.
atomically :: (MonadBase IO m) => Context -> Action r -> m r
atomically (Context pool) (Action a) = liftBase $ PSql.runSqlPool a pool
insert :: Job.Status -> Action ()
insert status = Action $ P.insert_ job where
job = Job { jobName = status ^. Job.name
, jobSpec = status ^. Job.spec
, jobStage = status ^. Job.stage
, jobSatis = status ^. Job.satis
, jobUuid = status ^. Job.uuid }
updateStage :: Job.Name -> Job.Stage -> Action ()
updateStage name st = Action $ P.updateWhere [JobName ==. name] [JobStage =. st]
updateSatis :: Job.Name -> Job.Satisfiability -> Action ()
updateSatis name st = Action $ P.updateWhere [JobName ==. name] [JobSatis =. st]
delete :: Job.Name -> Action ()
delete = Action . P.deleteBy . UniqueName
scheduleIntent :: ScheduleCommand -> Action ()
scheduleIntent = Action . P.insert_ . ScheduleIntent
deleteIntent :: ScheduleCommand -> Action ()
deleteIntent = Action . P.deleteBy . UniqueCommand
getAll :: (PSql.SqlBackend ~ PSql.PersistEntityBackend a, PSql.PersistEntity a) => (a -> b) -> Action [b]
getAll f = Action $ fmap (f . P.entityVal) <$> P.selectList [] []
allIntents :: Action [ScheduleCommand]
allIntents = getAll scheduleIntentCommand
allJobs :: Action [Job.Status]
allJobs = getAll toStatus where
toStatus (Job _ spec st satis uuid) = Job.mkStatus spec st satis uuid
|
futufeld/eclogues
|
eclogues-mock/src/Eclogues/Persist.hs
|
bsd-3-clause
| 4,166 | 0 | 12 | 782 | 1,014 | 570 | 444 | 71 | 1 |
{-# language CPP #-}
-- | = Name
--
-- VK_KHR_driver_properties - device extension
--
-- == VK_KHR_driver_properties
--
-- [__Name String__]
-- @VK_KHR_driver_properties@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 197
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_get_physical_device_properties2@
--
-- [__Deprecation state__]
--
-- - /Promoted/ to
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#versions-1.2-promotions Vulkan 1.2>
--
-- [__Contact__]
--
-- - Daniel Rakos
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_KHR_driver_properties] @drakos-amd%0A<<Here describe the issue or question you have about the VK_KHR_driver_properties extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2018-04-11
--
-- [__Interactions and External Dependencies__]
--
-- - Promoted to Vulkan 1.2 Core
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Baldur Karlsson
--
-- - Matthaeus G. Chajdas, AMD
--
-- - Piers Daniell, NVIDIA
--
-- - Alexander Galazin, Arm
--
-- - Jesse Hall, Google
--
-- - Daniel Rakos, AMD
--
-- == Description
--
-- This extension provides a new physical device query which allows
-- retrieving information about the driver implementation, allowing
-- applications to determine which physical device corresponds to which
-- particular vendor’s driver, and which conformance test suite version the
-- driver implementation is compliant with.
--
-- == Promotion to Vulkan 1.2
--
-- All functionality in this extension is included in core Vulkan 1.2, with
-- the KHR suffix omitted. The original type, enum and command names are
-- still available as aliases of the core functionality.
--
-- == New Structures
--
-- - 'ConformanceVersionKHR'
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceProperties2':
--
-- - 'PhysicalDeviceDriverPropertiesKHR'
--
-- == New Enums
--
-- - 'DriverIdKHR'
--
-- == New Enum Constants
--
-- - 'KHR_DRIVER_PROPERTIES_EXTENSION_NAME'
--
-- - 'KHR_DRIVER_PROPERTIES_SPEC_VERSION'
--
-- - 'Vulkan.Core10.APIConstants.MAX_DRIVER_INFO_SIZE_KHR'
--
-- - 'Vulkan.Core10.APIConstants.MAX_DRIVER_NAME_SIZE_KHR'
--
-- - Extending 'Vulkan.Core12.Enums.DriverId.DriverId':
--
-- - 'DRIVER_ID_AMD_OPEN_SOURCE_KHR'
--
-- - 'DRIVER_ID_AMD_PROPRIETARY_KHR'
--
-- - 'DRIVER_ID_ARM_PROPRIETARY_KHR'
--
-- - 'DRIVER_ID_BROADCOM_PROPRIETARY_KHR'
--
-- - 'DRIVER_ID_GGP_PROPRIETARY_KHR'
--
-- - 'DRIVER_ID_GOOGLE_SWIFTSHADER_KHR'
--
-- - 'DRIVER_ID_IMAGINATION_PROPRIETARY_KHR'
--
-- - 'DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR'
--
-- - 'DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR'
--
-- - 'DRIVER_ID_MESA_RADV_KHR'
--
-- - 'DRIVER_ID_NVIDIA_PROPRIETARY_KHR'
--
-- - 'DRIVER_ID_QUALCOMM_PROPRIETARY_KHR'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR'
--
-- == Version History
--
-- - Revision 1, 2018-04-11 (Daniel Rakos)
--
-- - Internal revisions
--
-- == See Also
--
-- 'Vulkan.Core10.APIConstants.MAX_DRIVER_INFO_SIZE_KHR',
-- 'Vulkan.Core10.APIConstants.MAX_DRIVER_NAME_SIZE_KHR',
-- 'ConformanceVersionKHR', 'DriverIdKHR',
-- 'PhysicalDeviceDriverPropertiesKHR'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_KHR_driver_properties Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_driver_properties ( pattern STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR
, pattern DRIVER_ID_AMD_PROPRIETARY_KHR
, pattern DRIVER_ID_AMD_OPEN_SOURCE_KHR
, pattern DRIVER_ID_MESA_RADV_KHR
, pattern DRIVER_ID_NVIDIA_PROPRIETARY_KHR
, pattern DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR
, pattern DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR
, pattern DRIVER_ID_IMAGINATION_PROPRIETARY_KHR
, pattern DRIVER_ID_QUALCOMM_PROPRIETARY_KHR
, pattern DRIVER_ID_ARM_PROPRIETARY_KHR
, pattern DRIVER_ID_GOOGLE_SWIFTSHADER_KHR
, pattern DRIVER_ID_GGP_PROPRIETARY_KHR
, pattern DRIVER_ID_BROADCOM_PROPRIETARY_KHR
, pattern MAX_DRIVER_NAME_SIZE_KHR
, pattern MAX_DRIVER_INFO_SIZE_KHR
, DriverIdKHR
, ConformanceVersionKHR
, PhysicalDeviceDriverPropertiesKHR
, KHR_DRIVER_PROPERTIES_SPEC_VERSION
, pattern KHR_DRIVER_PROPERTIES_SPEC_VERSION
, KHR_DRIVER_PROPERTIES_EXTENSION_NAME
, pattern KHR_DRIVER_PROPERTIES_EXTENSION_NAME
) where
import Data.String (IsString)
import Vulkan.Core12.Promoted_From_VK_KHR_driver_properties (ConformanceVersion)
import Vulkan.Core12.Enums.DriverId (DriverId)
import Vulkan.Core12.Promoted_From_VK_KHR_driver_properties (PhysicalDeviceDriverProperties)
import Vulkan.Core12.Enums.DriverId (DriverId(DRIVER_ID_AMD_OPEN_SOURCE))
import Vulkan.Core12.Enums.DriverId (DriverId(DRIVER_ID_AMD_PROPRIETARY))
import Vulkan.Core12.Enums.DriverId (DriverId(DRIVER_ID_ARM_PROPRIETARY))
import Vulkan.Core12.Enums.DriverId (DriverId(DRIVER_ID_BROADCOM_PROPRIETARY))
import Vulkan.Core12.Enums.DriverId (DriverId(DRIVER_ID_GGP_PROPRIETARY))
import Vulkan.Core12.Enums.DriverId (DriverId(DRIVER_ID_GOOGLE_SWIFTSHADER))
import Vulkan.Core12.Enums.DriverId (DriverId(DRIVER_ID_IMAGINATION_PROPRIETARY))
import Vulkan.Core12.Enums.DriverId (DriverId(DRIVER_ID_INTEL_OPEN_SOURCE_MESA))
import Vulkan.Core12.Enums.DriverId (DriverId(DRIVER_ID_INTEL_PROPRIETARY_WINDOWS))
import Vulkan.Core12.Enums.DriverId (DriverId(DRIVER_ID_MESA_RADV))
import Vulkan.Core12.Enums.DriverId (DriverId(DRIVER_ID_NVIDIA_PROPRIETARY))
import Vulkan.Core12.Enums.DriverId (DriverId(DRIVER_ID_QUALCOMM_PROPRIETARY))
import Vulkan.Core10.APIConstants (pattern MAX_DRIVER_INFO_SIZE)
import Vulkan.Core10.APIConstants (pattern MAX_DRIVER_NAME_SIZE)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES))
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR"
pattern STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES
-- No documentation found for TopLevel "VK_DRIVER_ID_AMD_PROPRIETARY_KHR"
pattern DRIVER_ID_AMD_PROPRIETARY_KHR = DRIVER_ID_AMD_PROPRIETARY
-- No documentation found for TopLevel "VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR"
pattern DRIVER_ID_AMD_OPEN_SOURCE_KHR = DRIVER_ID_AMD_OPEN_SOURCE
-- No documentation found for TopLevel "VK_DRIVER_ID_MESA_RADV_KHR"
pattern DRIVER_ID_MESA_RADV_KHR = DRIVER_ID_MESA_RADV
-- No documentation found for TopLevel "VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR"
pattern DRIVER_ID_NVIDIA_PROPRIETARY_KHR = DRIVER_ID_NVIDIA_PROPRIETARY
-- No documentation found for TopLevel "VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR"
pattern DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = DRIVER_ID_INTEL_PROPRIETARY_WINDOWS
-- No documentation found for TopLevel "VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR"
pattern DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = DRIVER_ID_INTEL_OPEN_SOURCE_MESA
-- No documentation found for TopLevel "VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR"
pattern DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = DRIVER_ID_IMAGINATION_PROPRIETARY
-- No documentation found for TopLevel "VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR"
pattern DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = DRIVER_ID_QUALCOMM_PROPRIETARY
-- No documentation found for TopLevel "VK_DRIVER_ID_ARM_PROPRIETARY_KHR"
pattern DRIVER_ID_ARM_PROPRIETARY_KHR = DRIVER_ID_ARM_PROPRIETARY
-- No documentation found for TopLevel "VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR"
pattern DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = DRIVER_ID_GOOGLE_SWIFTSHADER
-- No documentation found for TopLevel "VK_DRIVER_ID_GGP_PROPRIETARY_KHR"
pattern DRIVER_ID_GGP_PROPRIETARY_KHR = DRIVER_ID_GGP_PROPRIETARY
-- No documentation found for TopLevel "VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR"
pattern DRIVER_ID_BROADCOM_PROPRIETARY_KHR = DRIVER_ID_BROADCOM_PROPRIETARY
-- No documentation found for TopLevel "VK_MAX_DRIVER_NAME_SIZE_KHR"
pattern MAX_DRIVER_NAME_SIZE_KHR = MAX_DRIVER_NAME_SIZE
-- No documentation found for TopLevel "VK_MAX_DRIVER_INFO_SIZE_KHR"
pattern MAX_DRIVER_INFO_SIZE_KHR = MAX_DRIVER_INFO_SIZE
-- No documentation found for TopLevel "VkDriverIdKHR"
type DriverIdKHR = DriverId
-- No documentation found for TopLevel "VkConformanceVersionKHR"
type ConformanceVersionKHR = ConformanceVersion
-- No documentation found for TopLevel "VkPhysicalDeviceDriverPropertiesKHR"
type PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties
type KHR_DRIVER_PROPERTIES_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION"
pattern KHR_DRIVER_PROPERTIES_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_DRIVER_PROPERTIES_SPEC_VERSION = 1
type KHR_DRIVER_PROPERTIES_EXTENSION_NAME = "VK_KHR_driver_properties"
-- No documentation found for TopLevel "VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME"
pattern KHR_DRIVER_PROPERTIES_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_DRIVER_PROPERTIES_EXTENSION_NAME = "VK_KHR_driver_properties"
|
expipiplus1/vulkan
|
src/Vulkan/Extensions/VK_KHR_driver_properties.hs
|
bsd-3-clause
| 10,556 | 0 | 8 | 2,206 | 767 | 535 | 232 | -1 | -1 |
module Main where
import Prelude hiding (pred, succ)
import Data.List (nub)
goal = 200
succ x
| x == 200 = 0
| x == 100 = 200
| x == 50 = 100
| x == 20 = 50
| x == 10 = 20
| x == 5 = 10
| x == 2 = 5
| x == 1 = 2
pred x
| x == 200 = 100
| x == 100 = 50
| x == 50 = 20
| x == 20 = 10
| x == 10 = 5
| x == 5 = 2
| x == 2 = 1
| x == 1 = 0
nextListFrom :: [Integer] -> [Integer]
nextListFrom [] = []
nextListFrom (h:[]) = if pred h > 0 then [pred h] else []
nextListFrom old = reverse $ nextListFrom' $ reverse old
nextListFrom' (h:[])
| pred h /= 0 = [pred h]
| otherwise = []
nextListFrom' (h:n:xs)
| pred h /= 0 = (pred h):n:xs
| otherwise = nextListFrom' $ n:xs
newElementFrom :: [Integer] -> Integer
newElementFrom [] = undefined -- should never happen
newElementFrom l@(h:_)
| sum l + h <= goal = h
| otherwise = pred h
processList :: [Integer] -> [[Integer]]
processList [] = [[]]
processList l
| sum l == goal = l:processList (nextListFrom l)
| sum l > goal = processList (nextListFrom l)
| otherwise = processList $ l ++ reverse [newElementFrom (reverse l)]
f :: Integer
-> [[Integer]] -- list of coin combinations where their sum = 200
f p = processList [p]
main = print $ (length $ f goal) - 1 -- -1 wegen leerer Liste am Ende
|
stulli/projectEuler
|
eu31.hs
|
bsd-3-clause
| 1,354 | 0 | 11 | 402 | 693 | 342 | 351 | 47 | 2 |
{-# LANGUAGE CPP, TypeFamilies, FlexibleInstances, UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.ST.Class
-- Copyright : (C) 2011 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : type families
--
----------------------------------------------------------------------------
module Control.Monad.ST.Class (MonadST(..)) where
import Control.Monad.Trans.Class
#if MIN_VERSION_base(4,4,0)
import Control.Monad.ST.Safe
#else
import Control.Monad.ST
#endif
class Monad m => MonadST m where
type World m :: *
liftST :: ST (World m) a -> m a
instance MonadST IO where
type World IO = RealWorld
liftST = stToIO
instance MonadST (ST s) where
type World (ST s) = s
liftST = id
instance (MonadTrans t, MonadST m, Monad (t m)) => MonadST (t m) where
type World (t m) = World m
liftST = lift . liftST
|
ekmett/monad-st
|
Control/Monad/ST/Class.hs
|
bsd-3-clause
| 1,021 | 0 | 10 | 174 | 216 | 125 | 91 | -1 | -1 |
module Ghazan.Weight (
) where
|
Cortlandd/ConversionFormulas
|
src/Ghazan/Weight.hs
|
bsd-3-clause
| 36 | 0 | 3 | 10 | 9 | 6 | 3 | 1 | 0 |
module Main where
import System.Environment ( getArgs )
import System.IO ( hPutStrLn, stderr )
import System.Exit ( exitWith, ExitCode( ExitFailure ) )
import Text.XML.HXT.Parser.XmlParsec ( xreadDoc )
import Text.XML.HXT.DOM.TypeDefs ( XmlTrees, XmlTree, XNode(..) )
import Text.XML.HXT.DOM.QualifiedName ( localPart )
import Data.Tree.NTree.TypeDefs ( NTree(..) )
import TranslationLister ( getResxFiles, resxFileGroups, resxFileLanguage )
type TranslationEntry = (String,String)
extract_entries :: XmlTree -> [TranslationEntry]
extract_entries tree = []
is_root :: XmlTree -> Bool
is_root (NTree (XTag rootqname _) _) = localPart rootqname == "root"
is_root _ = False
main :: IO ()
main = do
args <- getArgs
case args of
[] -> do
hPutStrLn stderr "Usage: TranslationLister sourceDirectory"
exitWith (ExitFailure 1)
(dir : _) -> do
cts <- getResxFiles dir
let resxGroups = resxFileGroups cts in
let groupsWithTranslations = filter (\l -> length l > 1) resxGroups in
let (veryFirstPath,_) = head $ head groupsWithTranslations in
do
content <- readFile veryFirstPath
let contentWithoutBom = tail content in
let contentAsXml = xreadDoc contentWithoutBom in
let roots = filter is_root contentAsXml in
case roots of
[] -> putStrLn "No entries found"
[r] -> putStrLn (show r)
_ -> putStrLn "Too many roots found"
|
sebug/TranslationLister
|
Main.hs
|
bsd-3-clause
| 1,485 | 0 | 31 | 364 | 457 | 241 | 216 | 36 | 4 |
{-# LANGUAGE
DeriveDataTypeable
, DeriveGeneric
, TemplateHaskell
, TypeFamilies
#-}
module Type.CustomerSignupError where
import Data.Aeson
import Data.JSON.Schema
import Data.Typeable
import GHC.Generics
import Generics.Regular
import Generics.Regular.XmlPickler
import Rest.Error
import Text.XML.HXT.Arrow.Pickle
data CustomerSignupError = InvalidPassword | InvalidCustomerName
deriving (Eq, Generic, Ord, Show, Typeable)
deriveAll ''CustomerSignupError "PFCustomerSignupError"
type instance PF CustomerSignupError = PFCustomerSignupError
instance XmlPickler CustomerSignupError where xpickle = gxpickle
instance JSONSchema CustomerSignupError where schema = gSchema
instance FromJSON CustomerSignupError
instance ToJSON CustomerSignupError
instance ToResponseCode CustomerSignupError where
toResponseCode _ = 400
|
tinkerthaler/basic-invoice-rest
|
example-api/Type/CustomerSignupError.hs
|
bsd-3-clause
| 844 | 0 | 6 | 105 | 161 | 90 | 71 | 24 | 0 |
module ParionsFDJ.Parse.Event
(
) where
|
vquintin/parionsfdj
|
src/ParionsFDJ/Parse/Event.hs
|
bsd-3-clause
| 44 | 0 | 3 | 9 | 10 | 7 | 3 | 2 | 0 |
import Distribution.PackageDescription
import Distribution.PackageDescription.Parse
import Distribution.Verbosity
import Distribution.System
import Distribution.Simple
import Distribution.Simple.Utils
import Distribution.Simple.Setup
import Distribution.Simple.Command
import Distribution.Simple.Program
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.PreProcess hiding (ppC2hs)
import Distribution.Simple.BuildPaths
import Data.List hiding (isInfixOf)
import Data.Maybe
import Text.Printf
import Control.Exception
import Control.Monad
import System.Exit hiding (die)
import System.FilePath
import System.Directory
import System.Environment
import System.IO.Error hiding (catch)
import Prelude hiding (catch)
newtype CudaPath = CudaPath {
cudaPath :: String
} deriving (Eq, Ord, Show, Read)
-- Windows compatibility function.
--
-- CUDA toolkit uses different names for import libraries and their respective DLLs.
-- Eg. `cudart.lib` imports functions from `cudart32_70` (on 32-bit architecture and 7.0 version of toolkit).
-- The ghci linker fails to resolve this. Therefore, it needs to be given the DLL filenames
-- as `extra-ghci-libraries` option.
--
-- This function takes *a path to* import library and returns name of corresponding DLL.
-- Eg: "C:/CUDA/Toolkit/Win32/cudart.lib" -> "cudart32_70.dll"
-- Internally it assumes that nm tool is present in PATH. This should be always true, as nm is distributed along with GHC.
--
-- The function is meant to be used on Windows. Other platforms may or may not work.
--
importLibraryToDllFileName :: FilePath -> IO (Maybe FilePath)
importLibraryToDllFileName importLibPath = do
-- Sample output nm generates on cudart.lib
-- nvcuda.dll:
-- 00000000 i .idata$2
-- 00000000 i .idata$4
-- 00000000 i .idata$5
-- 00000000 i .idata$6
-- 009c9d1b a @comp.id
-- 00000000 I __IMPORT_DESCRIPTOR_nvcuda
-- U __NULL_IMPORT_DESCRIPTOR
-- U nvcuda_NULL_THUNK_DATA
nmOutput <- getProgramInvocationOutput normal (simpleProgramInvocation "nm" [importLibPath])
return $ find (isInfixOf ("" <.> dllExtension)) (lines nmOutput)
-- Windows compatibility function.
--
-- The function is used to populate the extraGHCiLibs list on Windows platform.
-- It takes libraries directory and .lib filenames and returns their corresponding dll filename.
-- (Both filenames are stripped from extensions)
--
-- Eg: "C:\cuda\toolkit\lib\x64" -> ["cudart", "cuda"] -> ["cudart64_65", "ncuda"]
--
additionalGhciLibraries :: FilePath -> [FilePath] -> IO [FilePath]
additionalGhciLibraries libdir importLibs = do
let libsAbsolutePaths = map (\libname -> libdir </> libname <.> "lib") importLibs
candidateNames <- mapM importLibraryToDllFileName libsAbsolutePaths
let dllNames = map (\(Just dllname) -> dropExtension dllname) (filter isJust candidateNames)
return dllNames
-- OSX compatibility function
--
-- Returns [] or ["U__BLOCKS__"]
--
getAppleBlocksOption :: IO [String]
getAppleBlocksOption = do
let handler = (\_ -> return "") :: IOError -> IO String
fileContents <- catch (readFile "/usr/include/stdlib.h") handler -- If file does not exist, we'll end up wth an empty string.
return ["-U__BLOCKS__" | "__BLOCKS__" `isInfixOf` fileContents]
getCudaIncludePath :: CudaPath -> FilePath
getCudaIncludePath (CudaPath path) = path </> "include"
getCudaLibraryPath :: CudaPath -> Platform -> FilePath
getCudaLibraryPath (CudaPath path) (Platform arch os) = path </> libSubpath
where
libSubpath = case os of
Windows -> "lib" </> case arch of
I386 -> "Win32"
X86_64 -> "x64"
_ -> error $ "Unexpected Windows architecture " ++ show arch ++ ". Please report this issue to https://github.com/tmcdonell/cuda/issues"
OSX -> "lib"
-- For now just treat all non-Windows systems similarly
_ -> case arch of
I386 -> "lib"
X86_64 -> "lib64"
_ -> "lib" -- TODO how should this be handled?
getCudaLibraries :: [String]
getCudaLibraries = ["cufft", "cuda"]
-- Generates build info with flags needed for CUDA Toolkit to be properly
-- visible to underlying build tools.
--
cudaLibraryBuildInfo :: CudaPath -> Platform -> Version -> IO HookedBuildInfo
cudaLibraryBuildInfo cudaPath platform@(Platform arch os) ghcVersion = do
let cudaLibraryPath = getCudaLibraryPath cudaPath platform
-- Extra lib dirs are not needed on Windows somehow. On Linux their lack would cause an error: /usr/bin/ld: cannot find -lcudart
-- Still, they do not cause harm so let's have them regardless of OS.
let extraLibDirs_ = [cudaLibraryPath]
let includeDirs = [getCudaIncludePath cudaPath]
let ccOptions_ = map ("-I" ++) includeDirs
let ldOptions_ = map ("-L" ++) extraLibDirs_
let ghcOptions = map ("-optc" ++) ccOptions_ ++ map ("-optl" ++ ) ldOptions_
let extraLibs_ = getCudaLibraries
-- Options for C2HS
let c2hsArchitectureFlag = case arch of I386 -> ["-m32"]
X86_64 -> ["-m64"]
_ -> []
let c2hsEmptyCaseFlag = ["-DUSE_EMPTY_CASE" | versionBranch ghcVersion >= [7,8]]
let c2hsCppOptions = c2hsArchitectureFlag ++ c2hsEmptyCaseFlag ++ ["-E"]
-- On OSX we might add one more options to c2hs cpp.
appleBlocksOption <- case os of OSX -> getAppleBlocksOption; _ -> return []
let c2hsOptions = unwords $ map ("--cppopts=" ++) (c2hsCppOptions ++ appleBlocksOption)
let extraOptionsC2Hs = ("x-extra-c2hs-options", c2hsOptions)
let buildInfo = emptyBuildInfo
{ ccOptions = ccOptions_
, ldOptions = ldOptions_
, extraLibs = extraLibs_
, extraLibDirs = extraLibDirs_
, options = [(GHC, ghcOptions)] -- Is this needed for anything?
, customFieldsBI = [extraOptionsC2Hs]
}
let addSystemSpecificOptions :: Platform -> IO BuildInfo
addSystemSpecificOptions (Platform _ Windows) = do
-- Workaround issue with ghci linker not being able to find DLLs with names different from their import LIBs.
extraGHCiLibs_ <- additionalGhciLibraries cudaLibraryPath extraLibs_
return buildInfo { extraGHCiLibs = extraGHCiLibs buildInfo ++ extraGHCiLibs_ }
addSystemSpecificOptions (Platform _ OSX) = return buildInfo
{ customFieldsBI = customFieldsBI buildInfo ++ [("frameworks", "CUDA")]
, ldOptions = ldOptions buildInfo ++ ["-F/Library/Frameworks"]
}
addSystemSpecificOptions _ = return buildInfo
adjustedBuildInfo <-addSystemSpecificOptions platform
return (Just adjustedBuildInfo, [])
-- Checks whether given location looks like a valid CUDA toolkit directory
--
validateLocation :: Verbosity -> FilePath -> IO Bool
validateLocation verbosity path = do
-- TODO: Ideally this should check also for cudart.lib and whether cudart exports relevant symbols.
-- This should be achievable with some `nm` trickery
let testedPath = path </> "include" </> "cuda.h"
exists <- doesFileExist testedPath
info verbosity $
if exists
then printf "Path accepted: %s\n" path
else printf "Path rejected: %s\nDoes not exist: %s\n" path testedPath
return exists
-- Evaluates IO to obtain the path, handling any possible exceptions.
-- If path is evaluable and points to valid CUDA toolkit returns True.
--
validateIOLocation :: Verbosity -> IO FilePath -> IO Bool
validateIOLocation verbosity iopath =
let handler :: IOError -> IO Bool
handler err = do
info verbosity (show err)
return False
in
catch (iopath >>= validateLocation verbosity) handler
-- Function iterates over action yielding possible locations, evaluating them
-- and returning the first valid one. Retuns Nothing if no location matches.
--
findFirstValidLocation :: Verbosity -> [(IO FilePath, String)] -> IO (Maybe FilePath)
findFirstValidLocation _ [] = return Nothing
findFirstValidLocation verbosity ((locate,description):rest) = do
info verbosity $ printf "checking for %s\n" description
found <- validateIOLocation verbosity locate
if found
then Just `fmap` locate
else findFirstValidLocation verbosity rest
nvccProgramName :: String
nvccProgramName = "nvcc"
-- NOTE: this function throws an exception when there is no `nvcc` in PATH.
-- The exception contains meaningful message.
--
findProgramLocationThrowing :: String -> IO FilePath
findProgramLocationThrowing execName = do
location <- findProgramLocation normal execName
case location of
Just validLocation -> return validLocation
Nothing -> ioError $ mkIOError doesNotExistErrorType ("not found: " ++ execName) Nothing Nothing
-- Returns pairs (action yielding candidate path, String description of that location)
--
candidateCudaLocation :: [(IO FilePath, String)]
candidateCudaLocation =
[ env "CUDA_PATH"
, (nvccLocation, "nvcc compiler in PATH")
, defaultPath "/usr/local/cuda"
]
where
env s = (getEnv s, printf "environment variable %s" s)
defaultPath p = (return p, printf "default location %s" p)
--
nvccLocation :: IO FilePath
nvccLocation = do
nvccPath <- findProgramLocationThrowing nvccProgramName
-- The obtained path is likely TOOLKIT/bin/nvcc
-- We want to extract the TOOLKIT part
let ret = takeDirectory $ takeDirectory nvccPath
return ret
-- Try to locate CUDA installation on the drive.
-- Currently this means (in order)
-- 1) Checking the CUDA_PATH environment variable
-- 2) Looking for `nvcc` in `PATH`
-- 3) Checking /usr/local/cuda
--
-- In case of failure, calls die with the pretty long message from below.
findCudaLocation :: Verbosity -> IO CudaPath
findCudaLocation verbosity = do
firstValidLocation <- findFirstValidLocation verbosity candidateCudaLocation
case firstValidLocation of
Just validLocation -> do
notice verbosity $ "Found CUDA toolkit at: " ++ validLocation
return $ CudaPath validLocation
Nothing -> die longError
longError :: String
longError = unlines
[ "********************************************************************************"
, ""
, "The configuration process failed to locate your CUDA installation. Ensure that you have installed both the developer driver and toolkit, available from:"
, ""
, "> http://developer.nvidia.com/cuda-downloads"
, ""
, "and make sure that `nvcc` is available in your PATH. Check the above output log and run the command directly to ensure it can be located."
, ""
, "If you have a non-standard installation, you can add additional search paths using --extra-include-dirs and --extra-lib-dirs. Note that 64-bit Linux flavours often require both `lib64` and `lib` library paths, in that order."
, ""
, "********************************************************************************"
]
-- Runs CUDA detection procedure and stores .buildinfo to a file.
--
generateAndStoreBuildInfo :: Verbosity -> Platform -> CompilerId -> FilePath -> IO ()
generateAndStoreBuildInfo verbosity platform (CompilerId ghcFlavor ghcVersion) path = do
cudalocation <- findCudaLocation verbosity
pbi <- cudaLibraryBuildInfo cudalocation platform ghcVersion
storeHookedBuildInfo verbosity path pbi
customBuildinfoFilepath :: FilePath
customBuildinfoFilepath = "cuda" <.> "buildinfo"
generatedBuldinfoFilepath :: FilePath
generatedBuldinfoFilepath = customBuildinfoFilepath <.> "generated"
main :: IO ()
main = defaultMainWithHooks customHooks
where
readHook :: (a -> Distribution.Simple.Setup.Flag Verbosity) -> Args -> a -> IO HookedBuildInfo
readHook get_verbosity a flags = do
noExtraFlags a
getHookedBuildInfo verbosity
where
verbosity = fromFlag (get_verbosity flags)
preprocessors = hookedPreProcessors simpleUserHooks
-- Our readHook implementation usees our getHookedBuildInfo.
-- We can't rely on cabal's autoconfUserHooks since they don't handle user
-- overwrites to buildinfo like we do.
customHooks = simpleUserHooks
{ preBuild = preBuildHook -- not using 'readHook' here because 'build' takes; extra args
, preClean = readHook cleanVerbosity
, preCopy = readHook copyVerbosity
, preInst = readHook installVerbosity
, preHscolour = readHook hscolourVerbosity
, preHaddock = readHook haddockVerbosity
, preReg = readHook regVerbosity
, preUnreg = readHook regVerbosity
, postConf = postConfHook
, hookedPreProcessors = ("chs", ppC2hs) : filter (\x -> fst x /= "chs") preprocessors
}
-- The hook just loads the HookedBuildInfo generated by postConfHook,
-- unless there is user-provided info that overwrites it.
preBuildHook :: Args -> BuildFlags -> IO HookedBuildInfo
preBuildHook _ flags = getHookedBuildInfo $ fromFlag $ buildVerbosity flags
-- The hook scans system in search for CUDA Toolkit. If the toolkit is not
-- found, an error is raised. Otherwise the toolkit location is used to
-- create a `cuda.buildinfo.generated` file with all the resulting flags.
postConfHook :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO ()
postConfHook args flags pkg_descr lbi
= let verbosity = fromFlag (configVerbosity flags)
currentPlatform = hostPlatform lbi
compilerId_ = (compilerId $ compiler lbi)
in do
noExtraFlags args
generateAndStoreBuildInfo verbosity currentPlatform compilerId_ generatedBuldinfoFilepath
actualBuildInfoToUse <- getHookedBuildInfo verbosity
let pkg_descr' = updatePackageDescription actualBuildInfoToUse pkg_descr
postConf simpleUserHooks args flags pkg_descr' lbi
storeHookedBuildInfo :: Verbosity -> FilePath -> HookedBuildInfo -> IO ()
storeHookedBuildInfo verbosity path hbi = do
notice verbosity $ "Storing parameters to " ++ path
writeHookedBuildInfo path hbi
-- Reads user-provided `cuda.buildinfo` if present, otherwise loads `cuda.buildinfo.generated`
-- Outputs message informing about the other possibility.
-- Calls die when neither of the files is available.
-- (generated one should be always present, as it is created in the post-conf step)
--
getHookedBuildInfo :: Verbosity -> IO HookedBuildInfo
getHookedBuildInfo verbosity = do
doesCustomBuildInfoExists <- doesFileExist customBuildinfoFilepath
if doesCustomBuildInfoExists then do
notice verbosity $ "The user-provided buildinfo from file " ++ customBuildinfoFilepath ++ " will be used. To use default settings, delete this file."
readHookedBuildInfo verbosity customBuildinfoFilepath
else do
doesGeneratedBuildInfoExists <- doesFileExist generatedBuldinfoFilepath
if doesGeneratedBuildInfoExists then do
notice verbosity $ printf "Using build information from '%s'.\n" generatedBuldinfoFilepath
notice verbosity $ printf "Provide a '%s' file to override this behaviour.\n" customBuildinfoFilepath
readHookedBuildInfo verbosity generatedBuldinfoFilepath
else die $ "Unexpected failure. Neither the default " ++ generatedBuldinfoFilepath ++ " nor custom " ++ customBuildinfoFilepath ++ " do exist."
-- Replicate the default C2HS preprocessor hook here, and inject a value for
-- extra-c2hs-options, if it was present in the buildinfo file
--
-- Everything below copied from Distribution.Simple.PreProcess
--
ppC2hs :: BuildInfo -> LocalBuildInfo -> PreProcessor
ppC2hs bi lbi
= PreProcessor {
platformIndependent = False,
runPreProcessor = \(inBaseDir, inRelativeFile)
(outBaseDir, outRelativeFile) verbosity ->
rawSystemProgramConf verbosity c2hsProgram (withPrograms lbi) . filter (not . null) $
maybe [] words (lookup "x-extra-c2hs-options" (customFieldsBI bi))
++ ["--include=" ++ outBaseDir]
++ ["--cppopts=" ++ opt | opt <- getCppOptions bi lbi]
++ ["--output-dir=" ++ outBaseDir,
"--output=" ++ outRelativeFile,
inBaseDir </> inRelativeFile]
}
getCppOptions :: BuildInfo -> LocalBuildInfo -> [String]
getCppOptions bi lbi
= hcDefines (compiler lbi)
++ ["-I" ++ dir | dir <- includeDirs bi]
++ [opt | opt@('-':c:_) <- ccOptions bi, c `elem` "DIU"]
hcDefines :: Compiler -> [String]
hcDefines comp =
case compilerFlavor comp of
GHC -> ["-D__GLASGOW_HASKELL__=" ++ versionInt version]
JHC -> ["-D__JHC__=" ++ versionInt version]
NHC -> ["-D__NHC__=" ++ versionInt version]
Hugs -> ["-D__HUGS__"]
_ -> []
where version = compilerVersion comp
-- TODO: move this into the compiler abstraction
-- FIXME: this forces GHC's crazy 4.8.2 -> 408 convention on all the other
-- compilers. Check if that's really what they want.
versionInt :: Version -> String
versionInt (Version { versionBranch = [] }) = "1"
versionInt (Version { versionBranch = [n] }) = show n
versionInt (Version { versionBranch = n1:n2:_ })
= -- 6.8.x -> 608
-- 6.10.x -> 610
let s1 = show n1
s2 = show n2
middle = case s2 of
_ : _ : _ -> ""
_ -> "0"
in s1 ++ middle ++ s2
|
flowbox-public/cufft
|
Setup.hs
|
bsd-3-clause
| 17,373 | 0 | 16 | 3,627 | 3,159 | 1,657 | 1,502 | 252 | 7 |
module Paths_Gluon (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version [0,1,0,0] []
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "E:\\Development\\Haskell\\Projects\\Gluon\\.stack-work\\install\\09641122\\bin"
libdir = "E:\\Development\\Haskell\\Projects\\Gluon\\.stack-work\\install\\09641122\\lib\\x86_64-windows-ghc-7.10.3\\Gluon-0.1.0.0-EFEcGTK6snE3aBUDcxRbN1"
datadir = "E:\\Development\\Haskell\\Projects\\Gluon\\.stack-work\\install\\09641122\\share\\x86_64-windows-ghc-7.10.3\\Gluon-0.1.0.0"
libexecdir = "E:\\Development\\Haskell\\Projects\\Gluon\\.stack-work\\install\\09641122\\libexec"
sysconfdir = "E:\\Development\\Haskell\\Projects\\Gluon\\.stack-work\\install\\09641122\\etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "Gluon_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "Gluon_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "Gluon_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "Gluon_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "Gluon_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "\\" ++ name)
|
Coggroach/Gluon
|
.stack-work/dist/2672c1f3/build/autogen/Paths_Gluon.hs
|
bsd-3-clause
| 1,603 | 0 | 10 | 177 | 362 | 206 | 156 | 28 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
-- | The web server.
module Ircbrowse.Server where
import Ircbrowse.Types
import qualified Ircbrowse.Controllers as C
import Snap.App
import Snap.Http.Server hiding (Config)
import Snap.Util.FileServe
-- | Run the server.
runServer :: Config -> Pool -> IO ()
runServer config pool = do
setUnicodeLocale "en_US"
httpServe server (serve config pool)
where server = setPort 10009 defaultConfig
-- | Serve the controllers.
serve :: Config -> Pool -> Snap ()
serve config pool = route routes where
routes = [("/js/",serveDirectory "static/js")
,("/css/",serveDirectory "static/css")
,("/js/",serveDirectory "static/js")
,("/browse/:channel",run C.browse)
-- ,("/nick-cloud/:channel",run C.nickCloud)
--,("/social",run C.socialGraph)
-- ,("/day/:channel/:year/:month/:day",run (C.browseDay False))
-- ,("/day/:channel/today/:mode",run (C.browseDay True))
-- ,("/day/:channel/today",run (C.browseDay True))
-- ,("/nick/:nick",run C.nickProfile)
-- ,("/nicks/:channel/:mode",run C.allNicks)
-- ,("/nicks/:channel",run C.allNicks)
,("/quotes.rss",run C.quotes)
,("/robots.txt",serveFileAs "text/plain" "static/robots.txt")
-- ,("/pdfs/:channel/:unique",run C.pdfs)
-- ,("/pdfs/:channel",run C.pdfs)
-- ,("/stats/:channel",run C.stats)
-- ,("/calendar/:channel",run C.calendar)
-- ,("/:channel",run C.stats)
-- ,("/selection/:channel",run C.browseSpecified)
-- ,("/export/:filename",run C.export)
,("/",run C.overview)
]
run = runHandler PState config pool
|
chrisdone/ircbrowse
|
src/Ircbrowse/Server.hs
|
bsd-3-clause
| 1,821 | 0 | 10 | 462 | 266 | 155 | 111 | 23 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Examples.CodeGeneration.GCD
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Computing GCD symbolically, and generating C code for it. This example
-- illustrates symbolic termination related issues when programming with
-- SBV, when the termination of a recursive algorithm crucially depends
-- on the value of a symbolic variable. The technique we use is to statically
-- enforce termination by using a recursion depth counter.
-----------------------------------------------------------------------------
module Data.SBV.Examples.CodeGeneration.GCD where
import Data.SBV
-----------------------------------------------------------------------------
-- * Computing GCD
-----------------------------------------------------------------------------
-- | The symbolic GCD algorithm, over two 8-bit numbers. We define @sgcd a 0@ to
-- be @a@ for all @a@, which implies @sgcd 0 0 = 0@. Note that this is essentially
-- Euclid's algorithm, except with a recursion depth counter. We need the depth
-- counter since the algorithm is not /symbolically terminating/, as we don't have
-- a means of determining that the second argument (@b@) will eventually reach 0 in a symbolic
-- context. Hence we stop after 12 iterations. Why 12? We've empirically determined that this
-- algorithm will recurse at most 12 times for arbitrary 8-bit numbers. Of course, this is
-- a claim that we shall prove below.
sgcd :: SWord8 -> SWord8 -> SWord8
sgcd a b = go a b 12
where go :: SWord8 -> SWord8 -> SWord8 -> SWord8
go x y c = ite (c .== 0 ||| y .== 0) -- stop if y is 0, or if we reach the recursion depth
x
(go y y' (c-1))
where (_, y') = x `sQuotRem` y
-----------------------------------------------------------------------------
-- * Verification
-----------------------------------------------------------------------------
{- $VerificationIntro
We prove that 'sgcd' does indeed compute the common divisor of the given numbers.
Our predicate takes @x@, @y@, and @k@. We show that what 'sgcd' returns is indeed a common divisor,
and it is at least as large as any given @k@, provided @k@ is a common divisor as well.
-}
-- | We have:
--
-- >>> prove sgcdIsCorrect
-- Q.E.D.
sgcdIsCorrect :: SWord8 -> SWord8 -> SWord8 -> SBool
sgcdIsCorrect x y k = ite (y .== 0) -- if y is 0
(k' .== x) -- then k' must be x, nothing else to prove by definition
(isCommonDivisor k' &&& -- otherwise, k' is a common divisor and
(isCommonDivisor k ==> k' .>= k)) -- if k is a common divisor as well, then k' is at least as large as k
where k' = sgcd x y
isCommonDivisor a = z1 .== 0 &&& z2 .== 0
where (_, z1) = x `sQuotRem` a
(_, z2) = y `sQuotRem` a
-----------------------------------------------------------------------------
-- * Code generation
-----------------------------------------------------------------------------
{- $VerificationIntro
Now that we have proof our 'sgcd' implementation is correct, we can go ahead
and generate C code for it.
-}
-- | This call will generate the required C files. The following is the function
-- body generated for 'sgcd'. (We are not showing the generated header, @Makefile@,
-- and the driver programs for brevity.) Note that the generated function is
-- a constant time algorithm for GCD. It is not necessarily fastest, but it will take
-- precisely the same amount of time for all values of @x@ and @y@.
--
-- > /* File: "sgcd.c". Automatically generated by SBV. Do not edit! */
-- >
-- > #include <inttypes.h>
-- > #include <stdint.h>
-- > #include <stdbool.h>
-- > #include "sgcd.h"
-- >
-- > SWord8 sgcd(const SWord8 x, const SWord8 y)
-- > {
-- > const SWord8 s0 = x;
-- > const SWord8 s1 = y;
-- > const SBool s3 = s1 == 0;
-- > const SWord8 s4 = (s1 == 0) ? s0 : (s0 % s1);
-- > const SWord8 s5 = s3 ? s0 : s4;
-- > const SBool s6 = 0 == s5;
-- > const SWord8 s7 = (s5 == 0) ? s1 : (s1 % s5);
-- > const SWord8 s8 = s6 ? s1 : s7;
-- > const SBool s9 = 0 == s8;
-- > const SWord8 s10 = (s8 == 0) ? s5 : (s5 % s8);
-- > const SWord8 s11 = s9 ? s5 : s10;
-- > const SBool s12 = 0 == s11;
-- > const SWord8 s13 = (s11 == 0) ? s8 : (s8 % s11);
-- > const SWord8 s14 = s12 ? s8 : s13;
-- > const SBool s15 = 0 == s14;
-- > const SWord8 s16 = (s14 == 0) ? s11 : (s11 % s14);
-- > const SWord8 s17 = s15 ? s11 : s16;
-- > const SBool s18 = 0 == s17;
-- > const SWord8 s19 = (s17 == 0) ? s14 : (s14 % s17);
-- > const SWord8 s20 = s18 ? s14 : s19;
-- > const SBool s21 = 0 == s20;
-- > const SWord8 s22 = (s20 == 0) ? s17 : (s17 % s20);
-- > const SWord8 s23 = s21 ? s17 : s22;
-- > const SBool s24 = 0 == s23;
-- > const SWord8 s25 = (s23 == 0) ? s20 : (s20 % s23);
-- > const SWord8 s26 = s24 ? s20 : s25;
-- > const SBool s27 = 0 == s26;
-- > const SWord8 s28 = (s26 == 0) ? s23 : (s23 % s26);
-- > const SWord8 s29 = s27 ? s23 : s28;
-- > const SBool s30 = 0 == s29;
-- > const SWord8 s31 = (s29 == 0) ? s26 : (s26 % s29);
-- > const SWord8 s32 = s30 ? s26 : s31;
-- > const SBool s33 = 0 == s32;
-- > const SWord8 s34 = (s32 == 0) ? s29 : (s29 % s32);
-- > const SWord8 s35 = s33 ? s29 : s34;
-- > const SBool s36 = 0 == s35;
-- > const SWord8 s37 = s36 ? s32 : s35;
-- > const SWord8 s38 = s33 ? s29 : s37;
-- > const SWord8 s39 = s30 ? s26 : s38;
-- > const SWord8 s40 = s27 ? s23 : s39;
-- > const SWord8 s41 = s24 ? s20 : s40;
-- > const SWord8 s42 = s21 ? s17 : s41;
-- > const SWord8 s43 = s18 ? s14 : s42;
-- > const SWord8 s44 = s15 ? s11 : s43;
-- > const SWord8 s45 = s12 ? s8 : s44;
-- > const SWord8 s46 = s9 ? s5 : s45;
-- > const SWord8 s47 = s6 ? s1 : s46;
-- > const SWord8 s48 = s3 ? s0 : s47;
-- >
-- > return s48;
-- > }
genGCDInC :: IO ()
genGCDInC = compileToC Nothing "sgcd" "" $ do
x <- cgInput "x"
y <- cgInput "y"
cgReturn $ sgcd x y
|
Copilot-Language/sbv-for-copilot
|
Data/SBV/Examples/CodeGeneration/GCD.hs
|
bsd-3-clause
| 6,286 | 0 | 11 | 1,598 | 444 | 284 | 160 | 23 | 1 |
-- schreiersims.hs
module Math.SchreierSims where
import Math.CombinatoricsGeneration (cartProd)
import Math.RedBlackTree
import Math.RandomGenerator
import Math.PermutationGroups
-- Both implementations are taken from Seress, Permutation Group Algorithms
-- COSET REPRESENTATIVES FOR STABILISER OF A POINT
-- coset representatives for Gx (stabiliser of x) in the group G = <gs>
-- in other words, for each x' in the orbit of x under G, we find a g <- G taking x to x'
-- the code is basically just the code for calculating orbits, but modified to keep track of the group elements that we used to get there
cosetRepsGx gs x = doCosetRepsGx gs (rbfromlist [x'], [x'])
where x' = (x, 1) -- coset representative for x itself is the identity
doCosetRepsGx gs (reps, []) = reps
doCosetRepsGx gs (reps, lastreps) =
let
newreps = [(y .^ g, h * g) | (y,h) <- lastreps, g <- gs]
newreps' = filter (\(y,h) -> reps `rblookup` y == Nothing) newreps -- don't store a g-path to y if we already have a shorter one
in doCosetRepsGx gs (foldl rbupdate reps newreps', newreps')
-- newreps' may itself contain duplicates, so some of the updates may be overwrites
-- update coset representatives for the addition of a new generator.
-- (gs are the existing generators, g the new generator)
updateCosetRepsGx gs g t =
let
newreps = [(y .^ g, h * g) | (y,h) <- rbtolist t]
newreps' = filter (\(y,h) -> t `rblookup` y == Nothing) newreps
in doCosetRepsGx (g:gs) (foldl rbupdate t newreps', newreps')
-- RANDOM SCHREIER-SIMS ALGORITHM
-- !! WARNING - You need to check that what you've got at the end IS the full group
-- !! If you know the expected order, this is easy
-- !! Another way is to chuck more random elts at it, and check that they sift
-- (We ought to provide a way to continue the algorithm)
-- if (bs,gs) are a base and strong generating set, then this will calculate the Schreier-Sims transversals
-- if (bs,gs) aren't a base and strong generating set, then when we sift random group members through, some will fail to sift
-- we can use these to augment gs
schreierSimsStructure _ (_,[]) = []
schreierSimsStructure n ([],gs) =
let
Just b = findBase n gs
gs' = filter (\g -> b .^ g == b) gs
t = cosetRepsGx gs b
in (b,gs,t) : schreierSimsStructure n ([],gs')
schreierSimsStructure n (b:bs,gs) =
let
gs' = filter (\g -> b .^ g == b) gs
t = cosetRepsGx gs b
in (b,gs,t) : schreierSimsStructure n (bs,gs')
-- update Schreier-Sims structure for addition of a new generator
updateSchreierSimsStructure n (True,h) structure =
let structure' = schreierSimsStructure n ([],[h])
in structure ++ structure'
updateSchreierSimsStructure n (False,h) ((b,s,t):structure) =
if b .^ h == b
then (b,s,t) : updateSchreierSimsStructure n (False,h) structure
else
let t' = updateCosetRepsGx s h t
in (b, h:s, t') : structure
randomSchreierSimsTransversals n gs seed =
let
structure = randomSchreierSimsAlgorithmGivenBase n [] gs seed
(bs,_,ts) = unzip3 structure
in (bs, ts)
randomSchreierSimsAlgorithmGivenBase n bs gs seed =
let
hs = drop 2 (randomWalkOnCayleyGraph (length gs, gs) (identity n, seed)) -- the first two steps are the identity and one of the generators
structure = doRandomSchreierSimsAlgorithm (gs,schreierSimsStructure n (bs,gs), hs,20)
in structure
where
doRandomSchreierSimsAlgorithm (s, structure, h:hs,0) = structure
doRandomSchreierSimsAlgorithm (s, structure, h:hs,i) =
let (bs,_,ts) = unzip3 structure
in case sift (bs,ts) h of
Nothing -> doRandomSchreierSimsAlgorithm (s, structure, hs, i-1)
Just (through,h') ->
let structure' = updateSchreierSimsStructure n (through,h') structure
in doRandomSchreierSimsAlgorithm (h':s, structure', hs, 20)
randomSchreierSimsTransversalsGivenBase n bs gs seed =
let
structure = randomSchreierSimsAlgorithmGivenBase n bs gs seed
(bs',_,ts) = unzip3 structure
in (bs', ts)
-- SCHREIER-SIMS ALGORITHM
-- generators for the stabiliser of x in the group G = <gs>
-- Schreier's Lemma states that if <S> = H < G, then, and R a set of coset reps for H in G
-- then { rs(rs)*^-1 | r <- R, s <- S } generates H (where * means "the coset representative of")
schreierGeneratorsGx n gs (reps,x) = toSet (filter (not . isIdentity) [schreierGenerator r g | r <- map snd (rbtolist reps), g <- gs])
where
schreierGenerator r g =
let
h = r * g
Just h' = reps `rblookup` (x .^ h)
in h * inverse h'
sift (bs,ts) g = doSift (bs,ts) g
where
doSift ([],[]) g =
if isIdentity g
then Nothing
else Just (True,g) -- we sifted through, but didn't get the identity, so we need to add another base element
doSift (b:bs,t:ts) g =
case t `rblookup` (b .^ g) of
Nothing -> Just (False,g)
-- Just h -> doSift (bs,ts) (mult (inverse h) g)
Just h -> doSift (bs,ts) (g * inverse h)
findBase n (g:_) =
let moved = [i | i <- [1..n], i .^ g /= i]
in if null moved then Nothing else Just (head moved)
schreierSimsAlgorithm n gs =
let
Just b = findBase n gs
t = cosetRepsGx gs b
sgs = schreierGeneratorsGx n gs (t,b)
in
doSchreierSimsAlgorithm ([(b,gs,t)], []) [sgs]
where
doSchreierSimsAlgorithm ([], structure) _ = structure
doSchreierSimsAlgorithm (bad:bads, goods) ([]:sgs) = doSchreierSimsAlgorithm (bads,bad:goods) sgs
doSchreierSimsAlgorithm (bads, goods) ((h:hs):sgs) =
let
(bs,_,ts) = unzip3 goods
in
case sift (bs,ts) h of
Nothing -> doSchreierSimsAlgorithm (bads, goods) (hs : sgs)
Just (_,h') ->
if null goods
then
let goods' = schreierSimsAlgorithm n [h']
in doSchreierSimsAlgorithm (bads, goods') (hs : sgs)
else
let
(b,s,t) = head goods
s' = h':s
t' = updateCosetRepsGx s h' t
-- t' = cosetRepsGx n s' b
newsgs = schreierGeneratorsGx n s' (t',b)
in doSchreierSimsAlgorithm ((b,s',t') : bads, tail goods) (newsgs : hs : sgs)
schreierSimsTransversals n gs =
let (bs,_,ts) = unzip3 (schreierSimsAlgorithm n gs)
in (bs, ts)
-- USING THE SCHREIER-SIMS TRANSVERSALS
isMemberSS (bs,ts) g = sift (bs,ts) g == Nothing
-- By Lagrange's thm, every g <- G can be written uniquely as g = r_m ... r_1 (Seress p56)
-- Note that we have to reverse the list of coset representatives
eltsSS (_,ts) = map (product . reverse) (cartProd ts')
where ts' = map rbvalues ts
orderSS (_,ts) = product (map (toInteger . rbcount) ts)
randomEltSS (_,ts) seed =
let
ts' = map rbvalues ts
ns = map length ts'
in randomEltSS' (ns,ts') seed
randomEltSS' (ns,ts) seed = doRandomElt (ns,ts) [] seed
where
doRandomElt (n:ns,t:ts) rs seed =
let
(i,seed') = randomInt n seed
r = t !! i
in doRandomElt (ns,ts) (r:rs) seed'
doRandomElt ([],[]) rs seed = (product rs, seed)
-- Note that we don't need to reverse the rs at the end because we did it as we went along
randomEltsSS (_,ts) seed =
let
ts' = map rbvalues ts
ns = map length ts'
in map fst (tail (iterate (\(_,seed) -> randomEltSS' (ns,ts') seed) (identity 0, seed)))
{-
onePointStabilizer (b:bs,_:rs) = (bs, map (map (stabilize b)) rs)
where
stabilize b g = map (\i -> if i < b then i else i-1) (take (b-1) g ++ drop b g)
-}
|
nfjinjing/bench-euler
|
src/Math/SchreierSims.hs
|
bsd-3-clause
| 7,376 | 66 | 18 | 1,691 | 2,376 | 1,283 | 1,093 | 126 | 5 |
{-# LANGUAGE OverloadedStrings, GeneralizedNewtypeDeriving #-}
module Clay.Property where
import Control.Arrow (second)
import Control.Monad.Writer
import Data.Fixed (Fixed, HasResolution (resolution), showFixed)
import Data.List (partition, sort)
import Data.Maybe
import Data.String
import Data.Text (Text, replace)
data Prefixed = Prefixed { unPrefixed :: [(Text, Text)] } | Plain { unPlain :: Text }
deriving (Show, Eq)
instance IsString Prefixed where
fromString s = Plain (fromString s)
instance Monoid Prefixed where
mempty = ""
mappend = merge
merge :: Prefixed -> Prefixed -> Prefixed
merge (Plain x ) (Plain y ) = Plain (x <> y)
merge (Plain x ) (Prefixed ys) = Prefixed (map (second (x <>)) ys)
merge (Prefixed xs) (Plain y ) = Prefixed (map (second (<> y)) xs)
merge (Prefixed xs) (Prefixed ys) =
let kys = map fst ys
kxs = map fst xs
in Prefixed $ zipWith (\(p, a) (_, b) -> (p, a <> b))
(sort (fst (partition ((`elem` kys) . fst) xs)))
(sort (fst (partition ((`elem` kxs) . fst) ys)))
plain :: Prefixed -> Text
plain (Prefixed xs) = "" `fromMaybe` lookup "" xs
plain (Plain p ) = p
quote :: Text -> Text
quote t = "\"" <> replace "\"" "\\\"" t <> "\""
-------------------------------------------------------------------------------
newtype Key a = Key { unKeys :: Prefixed }
deriving (Show, Monoid, IsString)
cast :: Key a -> Key ()
cast (Key k) = Key k
-------------------------------------------------------------------------------
newtype Value = Value { unValue :: Prefixed }
deriving (Show, Monoid, IsString, Eq)
class Val a where
value :: a -> Value
instance Val Text where
value t = Value (Plain t)
newtype Literal = Literal Text
deriving (Show, Monoid, IsString)
instance Val Literal where
value (Literal t) = Value (Plain (quote t))
instance Val Integer where
value = fromString . show
data E5 = E5
instance HasResolution E5 where resolution _ = 100000
instance Val Double where
value = fromString . showFixed' . realToFrac
where
showFixed' :: Fixed E5 -> String
showFixed' = showFixed True
instance Val Value where
value = id
instance Val a => Val (Maybe a) where
value Nothing = ""
value (Just a) = value a
instance (Val a, Val b) => Val (a, b) where
value (a, b) = value a <> " " <> value b
instance (Val a, Val b) => Val (Either a b) where
value (Left a) = value a
value (Right a) = value a
instance Val a => Val [a] where
value xs = intersperse "," (map value xs)
intersperse :: Monoid a => a -> [a] -> a
intersperse _ [] = mempty
intersperse s (x:xs) = foldl (\a b -> a <> s <> b) x xs
-------------------------------------------------------------------------------
noCommas :: Val a => [a] -> Value
noCommas xs = intersperse " " (map value xs)
infixr !
(!) :: a -> b -> (a, b)
(!) = (,)
|
psfblair/clay
|
src/Clay/Property.hs
|
bsd-3-clause
| 2,850 | 0 | 17 | 603 | 1,192 | 637 | 555 | 73 | 1 |
module BinTree where
import Data.String
import VizHaskell.TreeRepresentation(TreeRepresentable(..))
data BSTree a = Empty | Node (BSTree a) a (BSTree a)
deriving (Show, Eq, Ord)
instance TreeRepresentable BSTree where
contents Empty = Nothing
contents (Node _ x _) = Just x
children Empty = []
children (Node Empty x Empty) = []
children (Node l x Empty) = [l]
children (Node Empty x r) = [r]
children (Node l x r) = [l,r]
className Empty = Nothing
className (Node l _ Empty) = Just "negra"
className (Node Empty _ r) = Just "roja"
className (Node _ _ _) = Just "azul"
insert :: Ord a => a -> BSTree a -> BSTree a
insert x Empty = Node Empty x Empty
insert x (Node l y r)
| x == y = Node l y r
| x < y = Node (insert x l) y r
| x > y = Node l y (insert x r)
test1 :: BSTree Int
test1 = foldr insert Empty [10,9,3,1,4,5,6]
test2 :: BSTree Char
test2 = foldr insert Empty "Hello, World"
test3 :: BSTree (BSTree Int)
test3 = foldr insert Empty [
test1,
foldr insert Empty [7, 2, 8],
foldr insert Empty [10, 30, 20, 40]
]
test4 :: BSTree (BSTree Char)
test4 = foldr insert Empty [test2, foldr insert Empty "What's up?"]
{-
Examples of use:
putStrLn $ render RepresentationString test1
putStrLn $ render (RepresentationTree RepresentationString) test1
putStrLn $ render RepresentationString test2
putStrLn $ render (RepresentationTree RepresentationString) test2
putStrLn $ render RepresentationString test3
putStrLn $ render (RepresentationTree (RepresentationTree RepresentationString)) test3
putStrLn $ render RepresentationString test4
putStrLn $ render (RepresentationTree (RepresentationTree RepresentationString)) test4
-}
|
robarago/vizhaskell
|
hs/BinTree.hs
|
gpl-2.0
| 1,719 | 0 | 8 | 364 | 578 | 298 | 280 | 34 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Mnemonic where
import qualified Data.Map as Map
import Data.Maybe
import Data.Word
import Data.Bits
import Control.Arrow (first)
import qualified Data.ByteString as B8
import qualified Data.ByteString.Base16 as B16
-- Electrum compatible mnemonic conversions
type W = B8.ByteString
-- list of words from http://en.wiktionary.org/wiki/Wiktionary:Frequency_lists/Contemporary_poetry
wl :: [W]
wl = [
"like","just","love","know","never","want","time","out","there","make",
"look","eye","down","only","think","heart","back","then","into","about",
"more","away","still","them","take","thing","even","through","long","always",
"world","too","friend","tell","try","hand","thought","over","here","other",
"need","smile","again","much","cry","been","night","ever","little","said",
"end","some","those","around","mind","people","girl","leave","dream","left",
"turn","myself","give","nothing","really","off","before","something","find","walk",
"wish","good","once","place","ask","stop","keep","watch","seem","everything",
"wait","got","yet","made","remember","start","alone","run","hope","maybe",
"believe","body","hate","after","close","talk","stand","own","each","hurt",
"help","home","god","soul","new","many","two","inside","should","true",
"first","fear","mean","better","play","another","gone","change","use","wonder",
"someone","hair","cold","open","best","any","behind","happen","water","dark",
"laugh","stay","forever","name","work","show","sky","break","came","deep",
"door","put","black","together","upon","happy","such","great","white","matter",
"fill","past","please","burn","cause","enough","touch","moment","soon","voice",
"scream","anything","stare","sound","red","everyone","hide","kiss","truth","death",
"beautiful","mine","blood","broken","very","pass","next","forget","tree","wrong",
"air","mother","understand","lip","hit","wall","memory","sleep","free","high",
"realize","school","might","skin","sweet","perfect","blue","kill","breath","dance",
"against","fly","between","grow","strong","under","listen","bring","sometimes","speak",
"pull","person","become","family","begin","ground","real","small","father","sure",
"feet","rest","young","finally","land","across","today","different","guy","line",
"fire","reason","reach","second","slowly","write","eat","smell","mouth","step",
"learn","three","floor","promise","breathe","darkness","push","earth","guess","save",
"song","above","along","both","color","house","almost","sorry","anymore","brother",
"okay","dear","game","fade","already","apart","warm","beauty","heard","notice",
"question","shine","began","piece","whole","shadow","secret","street","within","finger",
"point","morning","whisper","child","moon","green","story","glass","kid","silence",
"since","soft","yourself","empty","shall","angel","answer","baby","bright","dad",
"path","worry","hour","drop","follow","power","war","half","flow","heaven",
"act","chance","fact","least","tired","children","near","quite","afraid","rise",
"sea","taste","window","cover","nice","trust","lot","sad","cool","force",
"peace","return","blind","easy","ready","roll","rose","drive","held","music",
"beneath","hang","mom","paint","emotion","quiet","clear","cloud","few","pretty",
"bird","outside","paper","picture","front","rock","simple","anyone","meant","reality",
"road","sense","waste","bit","leaf","thank","happiness","meet","men","smoke",
"truly","decide","self","age","book","form","alive","carry","escape","damn",
"instead","able","ice","minute","throw","catch","leg","ring","course","goodbye",
"lead","poem","sick","corner","desire","known","problem","remind","shoulder","suppose",
"toward","wave","drink","jump","woman","pretend","sister","week","human","joy",
"crack","grey","pray","surprise","dry","knee","less","search","bleed","caught",
"clean","embrace","future","king","son","sorrow","chest","hug","remain","sat",
"worth","blow","daddy","final","parent","tight","also","create","lonely","safe",
"cross","dress","evil","silent","bone","fate","perhaps","anger","class","scar",
"snow","tiny","tonight","continue","control","dog","edge","mirror","month","suddenly",
"comfort","given","loud","quickly","gaze","plan","rush","stone","town","battle",
"ignore","spirit","stood","stupid","yours","brown","build","dust","hey","kept",
"pay","phone","twist","although","ball","beyond","hidden","nose","taken","fail",
"float","pure","somehow","wash","wrap","angry","cheek","creature","forgotten","heat",
"rip","single","space","special","weak","whatever","yell","anyway","blame","job",
"choose","country","curse","drift","echo","figure","grew","laughter","neck","suffer",
"worse","yeah","disappear","foot","forward","knife","mess","somewhere","stomach","storm",
"beg","idea","lift","offer","breeze","field","five","often","simply","stuck",
"win","allow","confuse","enjoy","except","flower","seek","strength","calm","grin",
"gun","heavy","hill","large","ocean","shoe","sigh","straight","summer","tongue",
"accept","crazy","everyday","exist","grass","mistake","sent","shut","surround","table",
"ache","brain","destroy","heal","nature","shout","sign","stain","choice","doubt",
"glance","glow","mountain","queen","stranger","throat","tomorrow","city","either","fish",
"flame","rather","shape","spin","spread","ash","distance","finish","image","imagine",
"important","nobody","shatter","warmth","became","feed","flesh","funny","lust","shirt",
"trouble","yellow","attention","bare","bite","money","protect","amaze","appear","born",
"choke","completely","daughter","fresh","friendship","gentle","probably","six","deserve","expect",
"grab","middle","nightmare","river","thousand","weight","worst","wound","barely","bottle",
"cream","regret","relationship","stick","test","crush","endless","fault","itself","rule",
"spill","art","circle","join","kick","mask","master","passion","quick","raise",
"smooth","unless","wander","actually","broke","chair","deal","favorite","gift","note",
"number","sweat","box","chill","clothes","lady","mark","park","poor","sadness",
"tie","animal","belong","brush","consume","dawn","forest","innocent","pen","pride",
"stream","thick","clay","complete","count","draw","faith","press","silver","struggle",
"surface","taught","teach","wet","bless","chase","climb","enter","letter","melt",
"metal","movie","stretch","swing","vision","wife","beside","crash","forgot","guide",
"haunt","joke","knock","plant","pour","prove","reveal","steal","stuff","trip",
"wood","wrist","bother","bottom","crawl","crowd","fix","forgive","frown","grace",
"loose","lucky","party","release","surely","survive","teacher","gently","grip","speed",
"suicide","travel","treat","vein","written","cage","chain","conversation","date","enemy",
"however","interest","million","page","pink","proud","sway","themselves","winter","church",
"cruel","cup","demon","experience","freedom","pair","pop","purpose","respect","shoot",
"softly","state","strange","bar","birth","curl","dirt","excuse","lord","lovely",
"monster","order","pack","pants","pool","scene","seven","shame","slide","ugly",
"among","blade","blonde","closet","creek","deny","drug","eternity","gain","grade",
"handle","key","linger","pale","prepare","swallow","swim","tremble","wheel","won",
"cast","cigarette","claim","college","direction","dirty","gather","ghost","hundred","loss",
"lung","orange","present","swear","swirl","twice","wild","bitter","blanket","doctor",
"everywhere","flash","grown","knowledge","numb","pressure","radio","repeat","ruin","spend",
"unknown","buy","clock","devil","early","false","fantasy","pound","precious","refuse",
"sheet","teeth","welcome","add","ahead","block","bury","caress","content","depth",
"despite","distant","marry","purple","threw","whenever","bomb","dull","easily","grasp",
"hospital","innocence","normal","receive","reply","rhyme","shade","someday","sword","toe",
"visit","asleep","bought","center","consider","flat","hero","history","ink","insane",
"muscle","mystery","pocket","reflection","shove","silently","smart","soldier","spot","stress",
"train","type","view","whether","bus","energy","explain","holy","hunger","inch",
"magic","mix","noise","nowhere","prayer","presence","shock","snap","spider","study",
"thunder","trail","admit","agree","bag","bang","bound","butterfly","cute","exactly",
"explode","familiar","fold","further","pierce","reflect","scent","selfish","sharp","sink",
"spring","stumble","universe","weep","women","wonderful","action","ancient","attempt","avoid",
"birthday","branch","chocolate","core","depress","drunk","especially","focus","fruit","honest",
"match","palm","perfectly","pillow","pity","poison","roar","shift","slightly","thump",
"truck","tune","twenty","unable","wipe","wrote","coat","constant","dinner","drove",
"egg","eternal","flight","flood","frame","freak","gasp","glad","hollow","motion",
"peer","plastic","root","screen","season","sting","strike","team","unlike","victim",
"volume","warn","weird","attack","await","awake","built","charm","crave","despair",
"fought","grant","grief","horse","limit","message","ripple","sanity","scatter","serve",
"split","string","trick","annoy","blur","boat","brave","clearly","cling","connect",
"fist","forth","imagination","iron","jock","judge","lesson","milk","misery","nail",
"naked","ourselves","poet","possible","princess","sail","size","snake","society","stroke",
"torture","toss","trace","wise","bloom","bullet","cell","check","cost","darling",
"during","footstep","fragile","hallway","hardly","horizon","invisible","journey","midnight","mud",
"nod","pause","relax","shiver","sudden","value","youth","abuse","admire","blink",
"breast","bruise","constantly","couple","creep","curve","difference","dumb","emptiness","gotta",
"honor","plain","planet","recall","rub","ship","slam","soar","somebody","tightly",
"weather","adore","approach","bond","bread","burst","candle","coffee","cousin","crime",
"desert","flutter","frozen","grand","heel","hello","language","level","movement","pleasure",
"powerful","random","rhythm","settle","silly","slap","sort","spoken","steel","threaten",
"tumble","upset","aside","awkward","bee","blank","board","button","card","carefully",
"complain","crap","deeply","discover","drag","dread","effort","entire","fairy","giant",
"gotten","greet","illusion","jeans","leap","liquid","march","mend","nervous","nine",
"replace","rope","spine","stole","terror","accident","apple","balance","boom","childhood",
"collect","demand","depression","eventually","faint","glare","goal","group","honey","kitchen",
"laid","limb","machine","mere","mold","murder","nerve","painful","poetry","prince",
"rabbit","shelter","shore","shower","soothe","stair","steady","sunlight","tangle","tease",
"treasure","uncle","begun","bliss","canvas","cheer","claw","clutch","commit","crimson",
"crystal","delight","doll","existence","express","fog","football","gay","goose","guard",
"hatred","illuminate","mass","math","mourn","rich","rough","skip","stir","student",
"style","support","thorn","tough","yard","yearn","yesterday","advice","appreciate","autumn",
"bank","beam","bowl","capture","carve","collapse","confusion","creation","dove","feather",
"girlfriend","glory","government","harsh","hop","inner","loser","moonlight","neighbor","neither",
"peach","pig","praise","screw","shield","shimmer","sneak","stab","subject","throughout",
"thrown","tower","twirl","wow","army","arrive","bathroom","bump","cease","cookie",
"couch","courage","dim","guilt","howl","hum","husband","insult","led","lunch",
"mock","mostly","natural","nearly","needle","nerd","peaceful","perfection","pile","price",
"remove","roam","sanctuary","serious","shiny","shook","sob","stolen","tap","vain",
"void","warrior","wrinkle","affection","apologize","blossom","bounce","bridge","cheap","crumble",
"decision","descend","desperately","dig","dot","flip","frighten","heartbeat","huge","lazy",
"lick","odd","opinion","process","puzzle","quietly","retreat","score","sentence","separate",
"situation","skill","soak","square","stray","taint","task","tide","underneath","veil",
"whistle","anywhere","bedroom","bid","bloody","burden","careful","compare","concern","curtain",
"decay","defeat","describe","double","dreamer","driver","dwell","evening","flare","flicker",
"grandma","guitar","harm","horrible","hungry","indeed","lace","melody","monkey","nation",
"object","obviously","rainbow","salt","scratch","shown","shy","stage","stun","third",
"tickle","useless","weakness","worship","worthless","afternoon","beard","boyfriend","bubble","busy",
"certain","chin","concrete","desk","diamond","doom","drawn","due","felicity","freeze",
"frost","garden","glide","harmony","hopefully","hunt","jealous","lightning","mama","mercy",
"peel","physical","position","pulse","punch","quit","rant","respond","salty","sane",
"satisfy","savior","sheep","slept","social","sport","tuck","utter","valley","wolf",
"aim","alas","alter","arrow","awaken","beaten","belief","brand","ceiling","cheese",
"clue","confidence","connection","daily","disguise","eager","erase","essence","everytime","expression",
"fan","flag","flirt","foul","fur","giggle","glorious","ignorance","law","lifeless",
"measure","mighty","muse","north","opposite","paradise","patience","patient","pencil","petal",
"plate","ponder","possibly","practice","slice","spell","stock","strife","strip","suffocate",
"suit","tender","tool","trade","velvet","verse","waist","witch","aunt","bench",
"bold","cap","certainly","click","companion","creator","dart","delicate","determine","dish",
"dragon","drama","drum","dude","everybody","feast","forehead","former","fright","fully",
"gas","hook","hurl","invite","juice","manage","moral","possess","raw","rebel",
"royal","scale","scary","several","slight","stubborn","swell","talent","tea","terrible",
"thread","torment","trickle","usually","vast","violence","weave","acid","agony","ashamed",
"awe","belly","blend","blush","character","cheat","common","company","coward","creak",
"danger","deadly","defense","define","depend","desperate","destination","dew","duck","dusty",
"embarrass","engine","example","explore","foe","freely","frustrate","generation","glove","guilty",
"health","hurry","idiot","impossible","inhale","jaw","kingdom","mention","mist","moan",
"mumble","mutter","observe","ode","pathetic","pattern","pie","prefer","puff","rape",
"rare","revenge","rude","scrape","spiral","squeeze","strain","sunset","suspend","sympathy",
"thigh","throne","total","unseen","weapon","weary"
]
fi :: (Integral a, Num b) => a -> b
fi = fromIntegral
n :: Num a => a
n = 1626
wAt :: Word32 -> W
wAt i = wl !! fi i
w8_to_w32 :: [Word8] -> [Word32]
w8_to_w32 [] = []
w8_to_w32 (w0:w1:w2:w3:ws) = w:w8_to_w32 ws
where w = fi w3
.|. fi w2 `shiftL` 8
.|. fi w1 `shiftL` 16
.|. fi w0 `shiftL` 24
w8_to_w32 _ = error "w8_to_w32: expects the number of input bytes to be a multiple of 4 (8 hexadecimal digits)"
w32_to_w8 :: [Word32] -> [Word8]
w32_to_w8 [] = []
w32_to_w8 (w:ws) = w0:w1:w2:w3:w32_to_w8 ws
where w3 = fi $ w
w2 = fi $ w `shiftR` 8
w1 = fi $ w `shiftR` 16
w0 = fi $ w `shiftR` 24
hex_to_mn :: B8.ByteString -> ([W], B8.ByteString)
hex_to_mn = first bytes_to_mn . B16.decode
bytes_to_mn :: B8.ByteString -> [W]
bytes_to_mn = map wAt . mn_encode_w32 . w8_to_w32 . B8.unpack
mn_encode_w32 :: [Word32] -> [Word32]
mn_encode_w32 [] = []
mn_encode_w32 (x:xs) = w1:w2:w3:mn_encode_w32 xs
where w1 = (x `mod` n)
w2 = ((x `div` n) + w1) `mod` n
w3 = ((x `div` (n * n)) + w2) `mod` n
mn_to_hex :: [W] -> B8.ByteString
mn_to_hex = B16.encode . mn_to_bytes
mn_to_bytes :: [W] -> B8.ByteString
mn_to_bytes = B8.pack . w32_to_w8 . mn_decode_w32 . map wordIndex
mn_decode_3_int :: Int -> Int -> Int -> Int
mn_decode_3_int w1 w2 w3 = x
where x = w1 + n * ((w2 - w1) `mod` n) + n * n * ((w3 - w2) `mod` n)
mn_decode_w32 :: [Word32] -> [Word32]
mn_decode_w32 [] = []
mn_decode_w32 (w1:w2:w3:ws) = x : mn_decode_w32 ws
where x = fi $ mn_decode_3_int (fi w1) (fi w2) (fi w3)
mn_decode_w32 _ = error "mn_to_bytes: unexpected number of words, not a multiple of 3"
wordIndex :: W -> Word32
wordIndex w = fromMaybe (error "wordIndex") $ Map.lookup w i
where i = Map.fromList $ zip wl [0..]
|
np/hx
|
Mnemonic.hs
|
gpl-3.0
| 16,474 | 0 | 14 | 988 | 5,781 | 3,751 | 2,030 | 221 | 1 |
module Test.GameKeys.Keys.API where
import Test.Tasty(TestTree, testGroup)
import Control.Monad.Writer(execWriter)
import Test.Tasty.QuickCheck as QC(testProperty)
import GameKeys.Keys.API
import GameKeys.Keys.Header
props :: TestTree
props = testGroup "GameKeys.Keys.API"
[ bindstoProps
, nestButtonProps
, keyGroupProps
, toStateProps
]
bindstoProps :: TestTree
bindstoProps = testGroup "bindsto properties"
[ QC.testProperty "a `bindsto` b == [Button a b]" $ \(i, s) ->
execWriter (i `bindsto` s) == [Button i s]
, QC.testProperty "a `bindsto` b == [Button a b]" $ \(i, s) ->
execWriter (i `bindsto` s) == [Button i s]
, QC.testProperty "a `bindsto` b /= [Button a b] == False" $ \(i, s) ->
(execWriter (i `bindsto` s) /= [Button i s]) == False
]
nestButtonProps :: TestTree
nestButtonProps = testGroup "nestButton properties"
[ QC.testProperty "nestButton == [PrefixButton i [Button]]" $
\(i, ni, ns) -> execWriter (nestButton i $ ni `bindsto` ns) ==
[PrefixButton i [Button ni ns]]
, QC.testProperty "nestButton /= [PrefixButton i [Button]] == False" $
\(i, ni, ns) -> (execWriter (nestButton i $ ni `bindsto` ns) /=
[PrefixButton i [Button ni ns]]) == False
, QC.testProperty "nestedButton is same as button" $
\(i, ni, ns) -> execWriter (nestButton i $ ni `bindsto` ns) ==
[PrefixButton i [Button ni ns]]
, QC.testProperty "nestedButton is not the same as button == False" $
\(i, ni, ns) -> (execWriter (nestButton i $ ni `bindsto` ns) /=
[PrefixButton i [Button ni ns]]) == False
]
keyGroupProps :: TestTree
keyGroupProps = testGroup "keyGroup properties"
[ QC.testProperty "Group a [b] == [(a,[b])]" $ \(name, i, s) ->
execWriter (keyGroup name $ i `bindsto` s) == [(name, [Button i s])]
, QC.testProperty "Group a [b] /= [(a,[b])] == False" $ \(name, i, s) ->
(execWriter (keyGroup name $ i `bindsto` s) /= [(name, [Button i s])])
== False
]
toStateProps :: TestTree
toStateProps = testGroup "toStateProps"
[ QC.testProperty "toState a b c == [StateButton b a c]" $ \(state, i, s) ->
execWriter (toState state i s) == [StateButton i state s]
, QC.testProperty "toState a b c /= [StateButton b a c] == False" $
\(state, i, s) -> (execWriter (toState state i s) /=
[StateButton i state s]) == False
]
|
kwrooijen/game-keys
|
tests/Test/GameKeys/Keys/API.hs
|
gpl-3.0
| 2,486 | 0 | 16 | 629 | 818 | 454 | 364 | 48 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.DynamoDB.BatchWriteItem
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- The /BatchWriteItem/ operation puts or deletes multiple items in one or
-- more tables. A single call to /BatchWriteItem/ can write up to 16 MB of
-- data, which can comprise as many as 25 put or delete requests.
-- Individual items to be written can be as large as 400 KB.
--
-- /BatchWriteItem/ cannot update items. To update items, use the
-- /UpdateItem/ API.
--
-- The individual /PutItem/ and /DeleteItem/ operations specified in
-- /BatchWriteItem/ are atomic; however /BatchWriteItem/ as a whole is not.
-- If any requested operations fail because the table\'s provisioned
-- throughput is exceeded or an internal processing failure occurs, the
-- failed operations are returned in the /UnprocessedItems/ response
-- parameter. You can investigate and optionally resend the requests.
-- Typically, you would call /BatchWriteItem/ in a loop. Each iteration
-- would check for unprocessed items and submit a new /BatchWriteItem/
-- request with those unprocessed items until all items have been
-- processed.
--
-- Note that if /none/ of the items can be processed due to insufficient
-- provisioned throughput on all of the tables in the request, then
-- /BatchWriteItem/ will return a /ProvisionedThroughputExceededException/.
--
-- If DynamoDB returns any unprocessed items, you should retry the batch
-- operation on those items. However, /we strongly recommend that you use
-- an exponential backoff algorithm/. If you retry the batch operation
-- immediately, the underlying read or write requests can still fail due to
-- throttling on the individual tables. If you delay the batch operation
-- using exponential backoff, the individual requests in the batch are much
-- more likely to succeed.
--
-- For more information, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ErrorHandling.html#BatchOperations Batch Operations and Error Handling>
-- in the /Amazon DynamoDB Developer Guide/.
--
-- With /BatchWriteItem/, you can efficiently write or delete large amounts
-- of data, such as from Amazon Elastic MapReduce (EMR), or copy data from
-- another database into DynamoDB. In order to improve performance with
-- these large-scale operations, /BatchWriteItem/ does not behave in the
-- same way as individual /PutItem/ and /DeleteItem/ calls would. For
-- example, you cannot specify conditions on individual put and delete
-- requests, and /BatchWriteItem/ does not return deleted items in the
-- response.
--
-- If you use a programming language that supports concurrency, you can use
-- threads to write items in parallel. Your application must include the
-- necessary logic to manage the threads. With languages that don\'t
-- support threading, you must update or delete the specified items one at
-- a time. In both situations, /BatchWriteItem/ provides an alternative
-- where the API performs the specified put and delete operations in
-- parallel, giving you the power of the thread pool approach without
-- having to introduce complexity into your application.
--
-- Parallel processing reduces latency, but each specified put and delete
-- request consumes the same number of write capacity units whether it is
-- processed in parallel or not. Delete operations on nonexistent items
-- consume one write capacity unit.
--
-- If one or more of the following is true, DynamoDB rejects the entire
-- batch write operation:
--
-- - One or more tables specified in the /BatchWriteItem/ request does
-- not exist.
--
-- - Primary key attributes specified on an item in the request do not
-- match those in the corresponding table\'s primary key schema.
--
-- - You try to perform multiple operations on the same item in the same
-- /BatchWriteItem/ request. For example, you cannot put and delete the
-- same item in the same /BatchWriteItem/ request.
--
-- - There are more than 25 requests in the batch.
--
-- - Any individual item in a batch exceeds 400 KB.
--
-- - The total request size exceeds 16 MB.
--
--
-- /See:/ <http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html AWS API Reference> for BatchWriteItem.
module Network.AWS.DynamoDB.BatchWriteItem
(
-- * Creating a Request
batchWriteItem
, BatchWriteItem
-- * Request Lenses
, bwiReturnConsumedCapacity
, bwiReturnItemCollectionMetrics
, bwiRequestItems
-- * Destructuring the Response
, batchWriteItemResponse
, BatchWriteItemResponse
-- * Response Lenses
, bwirsItemCollectionMetrics
, bwirsConsumedCapacity
, bwirsUnprocessedItems
, bwirsResponseStatus
) where
import Network.AWS.DynamoDB.Types
import Network.AWS.DynamoDB.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Represents the input of a /BatchWriteItem/ operation.
--
-- /See:/ 'batchWriteItem' smart constructor.
data BatchWriteItem = BatchWriteItem'
{ _bwiReturnConsumedCapacity :: !(Maybe ReturnConsumedCapacity)
, _bwiReturnItemCollectionMetrics :: !(Maybe ReturnItemCollectionMetrics)
, _bwiRequestItems :: !(Map Text (List1 WriteRequest))
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'BatchWriteItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bwiReturnConsumedCapacity'
--
-- * 'bwiReturnItemCollectionMetrics'
--
-- * 'bwiRequestItems'
batchWriteItem
:: BatchWriteItem
batchWriteItem =
BatchWriteItem'
{ _bwiReturnConsumedCapacity = Nothing
, _bwiReturnItemCollectionMetrics = Nothing
, _bwiRequestItems = mempty
}
-- | Undocumented member.
bwiReturnConsumedCapacity :: Lens' BatchWriteItem (Maybe ReturnConsumedCapacity)
bwiReturnConsumedCapacity = lens _bwiReturnConsumedCapacity (\ s a -> s{_bwiReturnConsumedCapacity = a});
-- | Determines whether item collection metrics are returned. If set to
-- 'SIZE', the response includes statistics about item collections, if any,
-- that were modified during the operation are returned in the response. If
-- set to 'NONE' (the default), no statistics are returned.
bwiReturnItemCollectionMetrics :: Lens' BatchWriteItem (Maybe ReturnItemCollectionMetrics)
bwiReturnItemCollectionMetrics = lens _bwiReturnItemCollectionMetrics (\ s a -> s{_bwiReturnItemCollectionMetrics = a});
-- | A map of one or more table names and, for each table, a list of
-- operations to be performed (/DeleteRequest/ or /PutRequest/). Each
-- element in the map consists of the following:
--
-- - /DeleteRequest/ - Perform a /DeleteItem/ operation on the specified
-- item. The item to be deleted is identified by a /Key/ subelement:
--
-- - /Key/ - A map of primary key attribute values that uniquely
-- identify the ! item. Each entry in this map consists of an
-- attribute name and an attribute value. For each primary key, you
-- must provide /all/ of the key attributes. For example, with a
-- hash type primary key, you only need to provide the hash
-- attribute. For a hash-and-range type primary key, you must
-- provide /both/ the hash attribute and the range attribute.
--
-- - /PutRequest/ - Perform a /PutItem/ operation on the specified item.
-- The item to be put is identified by an /Item/ subelement:
--
-- - /Item/ - A map of attributes and their values. Each entry in
-- this map consists of an attribute name and an attribute value.
-- Attribute values must not be null; string and binary type
-- attributes must have lengths greater than zero; and set type
-- attributes must not be empty. Requests that contain empty values
-- will be rejected with a /ValidationException/ exception.
--
-- If you specify any attributes that are part of an index key,
-- then the data types for those attributes must match those of the
-- schema in the table\'s attribute definition.
--
bwiRequestItems :: Lens' BatchWriteItem (HashMap Text (NonEmpty WriteRequest))
bwiRequestItems = lens _bwiRequestItems (\ s a -> s{_bwiRequestItems = a}) . _Map;
instance AWSRequest BatchWriteItem where
type Rs BatchWriteItem = BatchWriteItemResponse
request = postJSON dynamoDB
response
= receiveJSON
(\ s h x ->
BatchWriteItemResponse' <$>
(x .?> "ItemCollectionMetrics" .!@ mempty) <*>
(x .?> "ConsumedCapacity" .!@ mempty)
<*> (x .?> "UnprocessedItems" .!@ mempty)
<*> (pure (fromEnum s)))
instance ToHeaders BatchWriteItem where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("DynamoDB_20120810.BatchWriteItem" :: ByteString),
"Content-Type" =#
("application/x-amz-json-1.0" :: ByteString)])
instance ToJSON BatchWriteItem where
toJSON BatchWriteItem'{..}
= object
(catMaybes
[("ReturnConsumedCapacity" .=) <$>
_bwiReturnConsumedCapacity,
("ReturnItemCollectionMetrics" .=) <$>
_bwiReturnItemCollectionMetrics,
Just ("RequestItems" .= _bwiRequestItems)])
instance ToPath BatchWriteItem where
toPath = const "/"
instance ToQuery BatchWriteItem where
toQuery = const mempty
-- | Represents the output of a /BatchWriteItem/ operation.
--
-- /See:/ 'batchWriteItemResponse' smart constructor.
data BatchWriteItemResponse = BatchWriteItemResponse'
{ _bwirsItemCollectionMetrics :: !(Maybe (Map Text [ItemCollectionMetrics]))
, _bwirsConsumedCapacity :: !(Maybe [ConsumedCapacity])
, _bwirsUnprocessedItems :: !(Maybe (Map Text (List1 WriteRequest)))
, _bwirsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'BatchWriteItemResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bwirsItemCollectionMetrics'
--
-- * 'bwirsConsumedCapacity'
--
-- * 'bwirsUnprocessedItems'
--
-- * 'bwirsResponseStatus'
batchWriteItemResponse
:: Int -- ^ 'bwirsResponseStatus'
-> BatchWriteItemResponse
batchWriteItemResponse pResponseStatus_ =
BatchWriteItemResponse'
{ _bwirsItemCollectionMetrics = Nothing
, _bwirsConsumedCapacity = Nothing
, _bwirsUnprocessedItems = Nothing
, _bwirsResponseStatus = pResponseStatus_
}
-- | A list of tables that were processed by /BatchWriteItem/ and, for each
-- table, information about any item collections that were affected by
-- individual /DeleteItem/ or /PutItem/ operations.
--
-- Each entry consists of the following subelements:
--
-- - /ItemCollectionKey/ - The hash key value of the item collection.
-- This is the same as the hash key of the item.
--
-- - /SizeEstimateRange/ - An estimate of item collection size, expressed
-- in GB. This is a two-element array containing a lower bound and an
-- upper bound for the estimate. The estimate includes the size of all
-- the items in the table, plus the size of all attributes projected
-- into all of the local secondary indexes on the table. Use this
-- estimate to measure whether a local secondary index is approaching
-- its size limit.
--
-- The estimate is subject to change over time; therefore, do not rely
-- on the precision or accuracy of the estimate.
--
bwirsItemCollectionMetrics :: Lens' BatchWriteItemResponse (HashMap Text [ItemCollectionMetrics])
bwirsItemCollectionMetrics = lens _bwirsItemCollectionMetrics (\ s a -> s{_bwirsItemCollectionMetrics = a}) . _Default . _Map;
-- | The capacity units consumed by the operation.
--
-- Each element consists of:
--
-- - /TableName/ - The table that consumed the provisioned throughput.
--
-- - /CapacityUnits/ - The total number of capacity units consumed.
--
bwirsConsumedCapacity :: Lens' BatchWriteItemResponse [ConsumedCapacity]
bwirsConsumedCapacity = lens _bwirsConsumedCapacity (\ s a -> s{_bwirsConsumedCapacity = a}) . _Default . _Coerce;
-- | A map of tables and requests against those tables that were not
-- processed. The /UnprocessedItems/ value is in the same form as
-- /RequestItems/, so you can provide this value directly to a subsequent
-- /BatchGetItem/ operation. For more information, see /RequestItems/ in
-- the Request Parameters section.
--
-- Each /UnprocessedItems/ entry consists of a table name and, for that
-- table, a list of operations to perform (/DeleteRequest/ or
-- /PutRequest/).
--
-- - /DeleteRequest/ - Perform a /DeleteItem/ operation on the specified
-- item. The item to be deleted is identified by a /Key/ subelement:
--
-- - /Key/ - A map of primary key attribute values that uniquely
-- identify the item. Each entry in this map consists of an
-- attribute name and an attribute value.
--
-- - /PutRequest/ - Perform a /PutItem/ operation on the specified item.
-- The item to be put is identified by an /Item/ subelement:
--
-- - /Item/ - A map of attributes and their values. Each entry in
-- this map consists of an attribute name and an attribute value.
-- Attribute values must not be null; string and binary type
-- attributes must have lengths greater than zero; and set type
-- attributes must not be empty. Requests that contain empty values
-- will be rejected with a /ValidationException/ exception.
--
-- If you specify any attributes that are part of an index key,
-- then the data types for those attributes must match those of the
-- schema in the table\'s attribute definition.
--
-- If there are no unprocessed items remaining, the response contains an
-- empty /UnprocessedItems/ map.
bwirsUnprocessedItems :: Lens' BatchWriteItemResponse (HashMap Text (NonEmpty WriteRequest))
bwirsUnprocessedItems = lens _bwirsUnprocessedItems (\ s a -> s{_bwirsUnprocessedItems = a}) . _Default . _Map;
-- | The response status code.
bwirsResponseStatus :: Lens' BatchWriteItemResponse Int
bwirsResponseStatus = lens _bwirsResponseStatus (\ s a -> s{_bwirsResponseStatus = a});
|
fmapfmapfmap/amazonka
|
amazonka-dynamodb/gen/Network/AWS/DynamoDB/BatchWriteItem.hs
|
mpl-2.0
| 15,060 | 0 | 15 | 3,061 | 1,172 | 754 | 418 | 113 | 1 |
module Handler.Trademarks where
import Import
import Widgets.Doc
getTrademarksR :: Handler Html
getTrademarksR = defaultLayout $ do
snowdriftTitle "Trademarks"
renderDoc "Trademarks"
|
chreekat/snowdrift
|
Handler/Trademarks.hs
|
agpl-3.0
| 193 | 0 | 8 | 30 | 43 | 22 | 21 | 7 | 1 |
{-# OPTIONS_GHC -fno-implicit-prelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Dynamic
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- The Dynamic interface provides basic support for dynamic types.
--
-- Operations for injecting values of arbitrary type into
-- a dynamically typed value, Dynamic, are provided, together
-- with operations for converting dynamic values into a concrete
-- (monomorphic) type.
--
-----------------------------------------------------------------------------
module Data.Dynamic
(
-- Module Data.Typeable re-exported for convenience
module Data.Typeable,
-- * The @Dynamic@ type
Dynamic, -- abstract, instance of: Show, Typeable
-- * Converting to and from @Dynamic@
toDyn, -- :: Typeable a => a -> Dynamic
fromDyn, -- :: Typeable a => Dynamic -> a -> a
fromDynamic, -- :: Typeable a => Dynamic -> Maybe a
-- * Applying functions of dynamic type
dynApply,
dynApp,
dynTypeRep
) where
import Data.Typeable
import Data.Maybe
#ifdef __GLASGOW_HASKELL__
import GHC.Base
import GHC.Show
import GHC.Err
import GHC.Num
#endif
#ifdef __HUGS__
import Hugs.Prelude
import Hugs.IO
import Hugs.IORef
import Hugs.IOExts
#endif
#ifdef __GLASGOW_HASKELL__
unsafeCoerce :: a -> b
unsafeCoerce = unsafeCoerce#
#endif
#ifdef __NHC__
import NonStdUnsafeCoerce (unsafeCoerce)
import NHC.IOExtras (IORef,newIORef,readIORef,writeIORef,unsafePerformIO)
#endif
#include "Typeable.h"
-------------------------------------------------------------
--
-- The type Dynamic
--
-------------------------------------------------------------
{-|
A value of type 'Dynamic' is an object encapsulated together with its type.
A 'Dynamic' may only represent a monomorphic value; an attempt to
create a value of type 'Dynamic' from a polymorphically-typed
expression will result in an ambiguity error (see 'toDyn').
'Show'ing a value of type 'Dynamic' returns a pretty-printed representation
of the object\'s type; useful for debugging.
-}
#ifndef __HUGS__
data Dynamic = Dynamic TypeRep Obj
#endif
INSTANCE_TYPEABLE0(Dynamic,dynamicTc,"Dynamic")
instance Show Dynamic where
-- the instance just prints the type representation.
showsPrec _ (Dynamic t _) =
showString "<<" .
showsPrec 0 t .
showString ">>"
#ifdef __GLASGOW_HASKELL__
type Obj = forall a . a
-- Dummy type to hold the dynamically typed value.
--
-- In GHC's new eval/apply execution model this type must
-- be polymorphic. It can't be a constructor, because then
-- GHC will use the constructor convention when evaluating it,
-- and this will go wrong if the object is really a function. On
-- the other hand, if we use a polymorphic type, GHC will use
-- a fallback convention for evaluating it that works for all types.
-- (using a function type here would also work).
#elif !defined(__HUGS__)
data Obj = Obj
#endif
-- | Converts an arbitrary value into an object of type 'Dynamic'.
--
-- The type of the object must be an instance of 'Typeable', which
-- ensures that only monomorphically-typed objects may be converted to
-- 'Dynamic'. To convert a polymorphic object into 'Dynamic', give it
-- a monomorphic type signature. For example:
--
-- > toDyn (id :: Int -> Int)
--
toDyn :: Typeable a => a -> Dynamic
toDyn v = Dynamic (typeOf v) (unsafeCoerce v)
-- | Converts a 'Dynamic' object back into an ordinary Haskell value of
-- the correct type. See also 'fromDynamic'.
fromDyn :: Typeable a
=> Dynamic -- ^ the dynamically-typed object
-> a -- ^ a default value
-> a -- ^ returns: the value of the first argument, if
-- it has the correct type, otherwise the value of
-- the second argument.
fromDyn (Dynamic t v) def
| typeOf def == t = unsafeCoerce v
| otherwise = def
-- | Converts a 'Dynamic' object back into an ordinary Haskell value of
-- the correct type. See also 'fromDyn'.
fromDynamic
:: Typeable a
=> Dynamic -- ^ the dynamically-typed object
-> Maybe a -- ^ returns: @'Just' a@, if the dynamically-typed
-- object has the correct type (and @a@ is its value),
-- or 'Nothing' otherwise.
fromDynamic (Dynamic t v) =
case unsafeCoerce v of
r | t == typeOf r -> Just r
| otherwise -> Nothing
-- (f::(a->b)) `dynApply` (x::a) = (f a)::b
dynApply :: Dynamic -> Dynamic -> Maybe Dynamic
dynApply (Dynamic t1 f) (Dynamic t2 x) =
case funResultTy t1 t2 of
Just t3 -> Just (Dynamic t3 ((unsafeCoerce f) x))
Nothing -> Nothing
dynApp :: Dynamic -> Dynamic -> Dynamic
dynApp f x = case dynApply f x of
Just r -> r
Nothing -> error ("Type error in dynamic application.\n" ++
"Can't apply function " ++ show f ++
" to argument " ++ show x)
dynTypeRep :: Dynamic -> TypeRep
dynTypeRep (Dynamic tr _) = tr
|
alekar/hugs
|
packages/base/Data/Dynamic.hs
|
bsd-3-clause
| 5,080 | 30 | 14 | 1,036 | 687 | 391 | 296 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_HADDOCK hide #-}
-- |
-- Module : Data.Array.Accelerate.Smart
-- Copyright : [2008..2014] Manuel M T Chakravarty, Gabriele Keller
-- [2008..2009] Sean Lee
-- [2009..2014] Trevor L. McDonell
-- [2013..2014] Robert Clifton-Everest
-- [2014..2014] Frederik M. Madsen
-- License : BSD3
--
-- Maintainer : Manuel M T Chakravarty <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- This modules defines the AST of the user-visible embedded language using more
-- convenient higher-order abstract syntax (instead of de Bruijn indices).
-- Moreover, it defines smart constructors to construct programs.
--
module Data.Array.Accelerate.Smart (
-- * HOAS AST
Acc(..), PreAcc(..), Exp(..), PreExp(..), Boundary(..), Stencil(..), Level,
PreSeq(..), Seq(..),
-- * Smart constructors for literals
constant,
-- * Smart constructors and destructors for tuples
tup2, tup3, tup4, tup5, tup6, tup7, tup8, tup9, tup10, tup11, tup12, tup13, tup14, tup15,
untup2, untup3, untup4, untup5, untup6, untup7, untup8, untup9, untup10, untup11, untup12, untup13, untup14, untup15,
atup2, atup3, atup4, atup5, atup6, atup7, atup8, atup9, atup10, atup11, atup12, atup13, atup14, atup15,
unatup2, unatup3, unatup4, unatup5, unatup6, unatup7, unatup8, unatup9, unatup10, unatup11, unatup12, unatup13, unatup14, unatup15,
stup2, stup3, stup4, stup5, stup6, stup7, stup8, stup9, stup10, stup11, stup12, stup13, stup14, stup15,
-- * Smart constructors for constants
mkMinBound, mkMaxBound, mkPi,
mkSin, mkCos, mkTan,
mkAsin, mkAcos, mkAtan,
mkSinh, mkCosh, mkTanh,
mkAsinh, mkAcosh, mkAtanh,
mkExpFloating, mkSqrt, mkLog,
mkFPow, mkLogBase,
mkTruncate, mkRound, mkFloor, mkCeiling,
mkAtan2,
-- * Smart constructors for primitive functions
mkAdd, mkSub, mkMul, mkNeg, mkAbs, mkSig, mkQuot, mkRem, mkQuotRem, mkIDiv, mkMod, mkDivMod,
mkBAnd, mkBOr, mkBXor, mkBNot, mkBShiftL, mkBShiftR, mkBRotateL, mkBRotateR,
mkFDiv, mkRecip, mkLt, mkGt, mkLtEq, mkGtEq, mkEq, mkNEq, mkMax, mkMin,
mkLAnd, mkLOr, mkLNot, mkIsNaN,
-- * Smart constructors for type coercion functions
mkOrd, mkChr, mkBoolToInt, mkFromIntegral,
-- * Auxiliary functions
($$), ($$$), ($$$$), ($$$$$),
-- Debugging
showPreAccOp, showPreExpOp, showPreSeqOp
) where
-- standard library
import Prelude hiding ( exp )
import Data.List
import Data.Typeable
-- friends
import Data.Array.Accelerate.Type
import Data.Array.Accelerate.Array.Sugar
import Data.Array.Accelerate.Product
import Data.Array.Accelerate.AST hiding (
PreOpenAcc(..), OpenAcc(..), Acc, Stencil(..), PreOpenExp(..), OpenExp, PreExp, Exp, Seq, PreOpenSeq(..), Producer(..), Consumer(..),
showPreAccOp, showPreExpOp )
import qualified Data.Array.Accelerate.AST as AST
-- Array computations
-- ------------------
-- The level of lambda-bound variables. The root has level 0; then it increases with each bound
-- variable — i.e., it is the same as the size of the environment at the defining occurrence.
--
type Level = Int
-- | Array-valued collective computations without a recursive knot
--
data PreAcc acc seq exp as where
-- Needed for conversion to de Bruijn form
Atag :: Arrays as
=> Level -- environment size at defining occurrence
-> PreAcc acc seq exp as
Pipe :: (Arrays as, Arrays bs, Arrays cs)
=> (Acc as -> acc bs)
-> (Acc bs -> acc cs)
-> acc as
-> PreAcc acc seq exp cs
Aforeign :: (Arrays arrs, Arrays a, Foreign f)
=> f arrs a
-> (Acc arrs -> Acc a)
-> acc arrs
-> PreAcc acc seq exp a
Acond :: Arrays as
=> exp Bool
-> acc as
-> acc as
-> PreAcc acc seq exp as
Awhile :: Arrays arrs
=> (Acc arrs -> acc (Scalar Bool))
-> (Acc arrs -> acc arrs)
-> acc arrs
-> PreAcc acc seq exp arrs
Atuple :: (Arrays arrs, IsAtuple arrs)
=> Atuple acc (TupleRepr arrs)
-> PreAcc acc seq exp arrs
Aprj :: (Arrays arrs, IsAtuple arrs, Arrays a)
=> TupleIdx (TupleRepr arrs) a
-> acc arrs
-> PreAcc acc seq exp a
Use :: Arrays arrs
=> arrs
-> PreAcc acc seq exp arrs
Unit :: Elt e
=> exp e
-> PreAcc acc seq exp (Scalar e)
Generate :: (Shape sh, Elt e)
=> exp sh
-> (Exp sh -> exp e)
-> PreAcc acc seq exp (Array sh e)
Reshape :: (Shape sh, Shape sh', Elt e)
=> exp sh
-> acc (Array sh' e)
-> PreAcc acc seq exp (Array sh e)
Replicate :: (Slice slix, Elt e,
Typeable (SliceShape slix), Typeable (FullShape slix))
-- the Typeable constraints shouldn't be necessary as they are implied by
-- 'SliceIx slix' — unfortunately, the (old) type checker doesn't grok that
=> exp slix
-> acc (Array (SliceShape slix) e)
-> PreAcc acc seq exp (Array (FullShape slix) e)
Slice :: (Slice slix, Elt e,
Typeable (SliceShape slix), Typeable (FullShape slix))
-- the Typeable constraints shouldn't be necessary as they are implied by
-- 'SliceIx slix' — unfortunately, the (old) type checker doesn't grok that
=> acc (Array (FullShape slix) e)
-> exp slix
-> PreAcc acc seq exp (Array (SliceShape slix) e)
Map :: (Shape sh, Elt e, Elt e')
=> (Exp e -> exp e')
-> acc (Array sh e)
-> PreAcc acc seq exp (Array sh e')
ZipWith :: (Shape sh, Elt e1, Elt e2, Elt e3)
=> (Exp e1 -> Exp e2 -> exp e3)
-> acc (Array sh e1)
-> acc (Array sh e2)
-> PreAcc acc seq exp (Array sh e3)
Fold :: (Shape sh, Elt e)
=> (Exp e -> Exp e -> exp e)
-> exp e
-> acc (Array (sh:.Int) e)
-> PreAcc acc seq exp (Array sh e)
Fold1 :: (Shape sh, Elt e)
=> (Exp e -> Exp e -> exp e)
-> acc (Array (sh:.Int) e)
-> PreAcc acc seq exp (Array sh e)
FoldSeg :: (Shape sh, Elt e, Elt i, IsIntegral i)
=> (Exp e -> Exp e -> exp e)
-> exp e
-> acc (Array (sh:.Int) e)
-> acc (Segments i)
-> PreAcc acc seq exp (Array (sh:.Int) e)
Fold1Seg :: (Shape sh, Elt e, Elt i, IsIntegral i)
=> (Exp e -> Exp e -> exp e)
-> acc (Array (sh:.Int) e)
-> acc (Segments i)
-> PreAcc acc seq exp (Array (sh:.Int) e)
Scanl :: Elt e
=> (Exp e -> Exp e -> exp e)
-> exp e
-> acc (Vector e)
-> PreAcc acc seq exp (Vector e)
Scanl' :: Elt e
=> (Exp e -> Exp e -> exp e)
-> exp e
-> acc (Vector e)
-> PreAcc acc seq exp (Vector e, Scalar e)
Scanl1 :: Elt e
=> (Exp e -> Exp e -> exp e)
-> acc (Vector e)
-> PreAcc acc seq exp (Vector e)
Scanr :: Elt e
=> (Exp e -> Exp e -> exp e)
-> exp e
-> acc (Vector e)
-> PreAcc acc seq exp (Vector e)
Scanr' :: Elt e
=> (Exp e -> Exp e -> exp e)
-> exp e
-> acc (Vector e)
-> PreAcc acc seq exp (Vector e, Scalar e)
Scanr1 :: Elt e
=> (Exp e -> Exp e -> exp e)
-> acc (Vector e)
-> PreAcc acc seq exp (Vector e)
Permute :: (Shape sh, Shape sh', Elt e)
=> (Exp e -> Exp e -> exp e)
-> acc (Array sh' e)
-> (Exp sh -> exp sh')
-> acc (Array sh e)
-> PreAcc acc seq exp (Array sh' e)
Backpermute :: (Shape sh, Shape sh', Elt e)
=> exp sh'
-> (Exp sh' -> exp sh)
-> acc (Array sh e)
-> PreAcc acc seq exp (Array sh' e)
Stencil :: (Shape sh, Elt a, Elt b, Stencil sh a stencil)
=> (stencil -> exp b)
-> Boundary a
-> acc (Array sh a)
-> PreAcc acc seq exp (Array sh b)
Stencil2 :: (Shape sh, Elt a, Elt b, Elt c,
Stencil sh a stencil1, Stencil sh b stencil2)
=> (stencil1 -> stencil2 -> exp c)
-> Boundary a
-> acc (Array sh a)
-> Boundary b
-> acc (Array sh b)
-> PreAcc acc seq exp (Array sh c)
Collect :: Arrays arrs
=> seq arrs
-> PreAcc acc seq exp arrs
data PreSeq acc seq exp arrs where
-- Convert the given Haskell-list of arrays to a sequence.
StreamIn :: Arrays a
=> [a]
-> PreSeq acc seq exp [a]
-- Convert the given array to a sequence.
-- Example:
-- slix = Z :. All :. Split :. All :. All :. Split
-- ^ ^ ^ ^ ^
-- | \ / / |
-- | \___/______/_______ Iteration space.
-- | / /
-- Element________/______/
-- shape.
--
ToSeq :: (Division slsix, Elt e, slix ~ DivisionSlice slsix, Slice slix,
Typeable (FullShape slix), Typeable (SliceShape slix))
=> slsix
-> acc (Array (FullShape slix) e)
-> PreSeq acc seq exp [Array (SliceShape slix) e]
-- Apply the given the given function to all elements of the given sequence.
MapSeq :: (Arrays a, Arrays b)
=> (Acc a -> acc b)
-> seq [a]
-> PreSeq acc seq exp [b]
-- Apply a given binary function pairwise to all elements of the given sequences.
-- The length of the result is the length of the shorter of the two argument
-- arrays.
ZipWithSeq :: (Arrays a, Arrays b, Arrays c)
=> (Acc a -> Acc b -> acc c)
-> seq [a]
-> seq [b]
-> PreSeq acc seq exp [c]
-- ScanSeq (+) a0 x. Scan a sequence x by combining each element
-- using the given binary operation (+). (+) must be associative:
--
-- Forall a b c. (a + b) + c = a + (b + c),
--
-- and a0 must be the identity element for (+):
--
-- Forall a. a0 + a = a = a + a0.
--
ScanSeq :: Elt a
=> (Exp a -> Exp a -> exp a)
-> exp a
-> seq [Scalar a]
-> PreSeq acc seq exp [Scalar a]
-- FoldSeq (+) a0 x. Fold a sequence x by combining each element
-- using the given binary operation (+). (+) must be associative:
--
-- Forall a b c. (a + b) + c = a + (b + c),
--
-- and a0 must be the identity element for (+):
--
-- Forall a. a0 + a = a = a + a0.
--
FoldSeq :: Elt a
=> (Exp a -> Exp a -> exp a)
-> exp a
-> seq [Scalar a]
-> PreSeq acc seq exp (Scalar a)
-- FoldSeqFlatten f a0 x. A specialized version of FoldSeqAct
-- where reduction with the companion operator corresponds to
-- flattening. f must be semi-associative, with vecotor append (++)
-- as the companion operator:
--
-- Forall b s1 a2 sh2 a2.
-- f (f b sh1 a1) sh2 a2 = f b (sh1 ++ sh2) (a1 ++ a2).
--
-- It is common to ignore the shape vectors, yielding the usual
-- semi-associativity law:
--
-- f b a _ = b + a,
--
-- for some (+) satisfying:
--
-- Forall b a1 a2. (b + a1) + a2 = b + (a1 ++ a2).
--
FoldSeqFlatten :: (Arrays a, Shape sh, Elt e)
=> (Acc a -> Acc (Vector sh) -> Acc (Vector e) -> acc a)
-> acc a
-> seq [Array sh e]
-> PreSeq acc seq exp a
-- Tuple up the results of a sequence computation. Note that the Arrays
-- constraint requires that the elements of the tuple are Arrays, not
-- streams ([]).
Stuple :: (Arrays arrs, IsAtuple arrs)
=> Atuple (seq) (TupleRepr arrs)
-> PreSeq acc seq exp arrs
-- |Array-valued collective computations
--
newtype Acc a = Acc (PreAcc Acc Seq Exp a)
-- |Array-valued sequence computations
--
newtype Seq a = Seq (PreSeq Acc Seq Exp a)
deriving instance Typeable Acc
deriving instance Typeable Seq
-- Embedded expressions of the surface language
-- --------------------------------------------
-- HOAS expressions mirror the constructors of `AST.OpenExp', but with the `Tag' constructor instead
-- of variables in the form of de Bruijn indices. Moreover, HOAS expression use n-tuples and the
-- type class 'Elt' to constrain element types, whereas `AST.OpenExp' uses nested pairs and the GADT
-- 'TupleType'.
--
-- | Scalar expressions to parametrise collective array operations, themselves parameterised over
-- the type of collective array operations.
--
data PreExp acc seq exp t where
-- Needed for conversion to de Bruijn form
Tag :: Elt t
=> Level -- environment size at defining occurrence
-> PreExp acc seq exp t
-- All the same constructors as 'AST.Exp'
Const :: Elt t
=> t
-> PreExp acc seq exp t
Tuple :: (Elt t, IsTuple t)
=> Tuple exp (TupleRepr t)
-> PreExp acc seq exp t
Prj :: (Elt t, IsTuple t, Elt e)
=> TupleIdx (TupleRepr t) e
-> exp t
-> PreExp acc seq exp e
IndexNil :: PreExp acc seq exp Z
IndexCons :: (Slice sl, Elt a)
=> exp sl
-> exp a
-> PreExp acc seq exp (sl:.a)
IndexHead :: (Slice sl, Elt a)
=> exp (sl:.a)
-> PreExp acc seq exp a
IndexTail :: (Slice sl, Elt a)
=> exp (sl:.a)
-> PreExp acc seq exp sl
IndexAny :: Shape sh
=> PreExp acc seq exp (Any sh)
ToIndex :: Shape sh
=> exp sh
-> exp sh
-> PreExp acc seq exp Int
FromIndex :: Shape sh
=> exp sh
-> exp Int
-> PreExp acc seq exp sh
Cond :: Elt t
=> exp Bool
-> exp t
-> exp t
-> PreExp acc seq exp t
While :: Elt t
=> (Exp t -> exp Bool)
-> (Exp t -> exp t)
-> exp t
-> PreExp acc seq exp t
PrimConst :: Elt t
=> PrimConst t
-> PreExp acc seq exp t
PrimApp :: (Elt a, Elt r)
=> PrimFun (a -> r)
-> exp a
-> PreExp acc seq exp r
Index :: (Shape sh, Elt t)
=> acc (Array sh t)
-> exp sh
-> PreExp acc seq exp t
LinearIndex :: (Shape sh, Elt t)
=> acc (Array sh t)
-> exp Int
-> PreExp acc seq exp t
Shape :: (Shape sh, Elt e)
=> acc (Array sh e)
-> PreExp acc seq exp sh
ShapeSize :: Shape sh
=> exp sh
-> PreExp acc seq exp Int
Intersect :: Shape sh
=> exp sh
-> exp sh
-> PreExp acc seq exp sh
Union :: Shape sh
=> exp sh
-> exp sh
-> PreExp acc seq exp sh
Foreign :: (Elt x, Elt y, Foreign f)
=> f x y
-> (Exp x -> Exp y) -- RCE: Using Exp instead of exp to aid in sharing recovery.
-> exp x
-> PreExp acc seq exp y
-- | Scalar expressions for plain array computations.
--
newtype Exp t = Exp (PreExp Acc Seq Exp t)
deriving instance Typeable Exp
-- Smart constructors and destructors for array tuples
-- ---------------------------------------------------
atup2 :: (Arrays a, Arrays b) => (Acc a, Acc b) -> Acc (a, b)
atup2 (a, b) = Acc $ Atuple (NilAtup `SnocAtup` a `SnocAtup` b)
atup3 :: (Arrays a, Arrays b, Arrays c) => (Acc a, Acc b, Acc c) -> Acc (a, b, c)
atup3 (a, b, c) = Acc $ Atuple (NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c)
atup4 :: (Arrays a, Arrays b, Arrays c, Arrays d)
=> (Acc a, Acc b, Acc c, Acc d) -> Acc (a, b, c, d)
atup4 (a, b, c, d)
= Acc $ Atuple (NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d)
atup5 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e)
=> (Acc a, Acc b, Acc c, Acc d, Acc e) -> Acc (a, b, c, d, e)
atup5 (a, b, c, d, e)
= Acc $ Atuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e
atup6 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f)
=> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f) -> Acc (a, b, c, d, e, f)
atup6 (a, b, c, d, e, f)
= Acc $ Atuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c
`SnocAtup` d `SnocAtup` e `SnocAtup` f
atup7 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g)
=> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g)
-> Acc (a, b, c, d, e, f, g)
atup7 (a, b, c, d, e, f, g)
= Acc $ Atuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c
`SnocAtup` d `SnocAtup` e `SnocAtup` f `SnocAtup` g
atup8 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h)
=> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h)
-> Acc (a, b, c, d, e, f, g, h)
atup8 (a, b, c, d, e, f, g, h)
= Acc $ Atuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d
`SnocAtup` e `SnocAtup` f `SnocAtup` g `SnocAtup` h
atup9 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i)
=> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i)
-> Acc (a, b, c, d, e, f, g, h, i)
atup9 (a, b, c, d, e, f, g, h, i)
= Acc $ Atuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d
`SnocAtup` e `SnocAtup` f `SnocAtup` g `SnocAtup` h `SnocAtup` i
atup10 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j)
=> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i, Acc j)
-> Acc (a, b, c, d, e, f, g, h, i, j)
atup10 (a, b, c, d, e, f, g, h, i, j)
= Acc $ Atuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e
`SnocAtup` f `SnocAtup` g `SnocAtup` h `SnocAtup` i `SnocAtup` j
atup11 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k)
=> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i, Acc j, Acc k)
-> Acc (a, b, c, d, e, f, g, h, i, j, k)
atup11 (a, b, c, d, e, f, g, h, i, j, k)
= Acc $ Atuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e
`SnocAtup` f `SnocAtup` g `SnocAtup` h `SnocAtup` i `SnocAtup` j `SnocAtup` k
atup12 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k, Arrays l)
=> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i, Acc j, Acc k, Acc l)
-> Acc (a, b, c, d, e, f, g, h, i, j, k, l)
atup12 (a, b, c, d, e, f, g, h, i, j, k, l)
= Acc $ Atuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e `SnocAtup` f
`SnocAtup` g `SnocAtup` h `SnocAtup` i `SnocAtup` j `SnocAtup` k `SnocAtup` l
atup13 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k, Arrays l, Arrays m)
=> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i, Acc j, Acc k, Acc l, Acc m)
-> Acc (a, b, c, d, e, f, g, h, i, j, k, l, m)
atup13 (a, b, c, d, e, f, g, h, i, j, k, l, m)
= Acc $ Atuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e `SnocAtup` f
`SnocAtup` g `SnocAtup` h `SnocAtup` i `SnocAtup` j `SnocAtup` k `SnocAtup` l `SnocAtup` m
atup14 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k, Arrays l, Arrays m, Arrays n)
=> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i, Acc j, Acc k, Acc l, Acc m, Acc n)
-> Acc (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
atup14 (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
= Acc $ Atuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e `SnocAtup` f `SnocAtup` g
`SnocAtup` h `SnocAtup` i `SnocAtup` j `SnocAtup` k `SnocAtup` l `SnocAtup` m `SnocAtup` n
atup15 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k, Arrays l, Arrays m, Arrays n, Arrays o)
=> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i, Acc j, Acc k, Acc l, Acc m, Acc n, Acc o)
-> Acc (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
atup15 (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
= Acc $ Atuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e `SnocAtup` f `SnocAtup` g
`SnocAtup` h `SnocAtup` i `SnocAtup` j `SnocAtup` k `SnocAtup` l `SnocAtup` m `SnocAtup` n `SnocAtup` o
unatup2 :: (Arrays a, Arrays b) => Acc (a, b) -> (Acc a, Acc b)
unatup2 e =
( Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e )
unatup3 :: (Arrays a, Arrays b, Arrays c) => Acc (a, b, c) -> (Acc a, Acc b, Acc c)
unatup3 e =
( Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e )
unatup4
:: (Arrays a, Arrays b, Arrays c, Arrays d)
=> Acc (a, b, c, d) -> (Acc a, Acc b, Acc c, Acc d)
unatup4 e =
( Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e )
unatup5
:: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e)
=> Acc (a, b, c, d, e) -> (Acc a, Acc b, Acc c, Acc d, Acc e)
unatup5 e =
( Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e )
unatup6
:: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f)
=> Acc (a, b, c, d, e, f) -> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f)
unatup6 e =
( Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e )
unatup7
:: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g)
=> Acc (a, b, c, d, e, f, g) -> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g)
unatup7 e =
( Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e )
unatup8
:: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h)
=> Acc (a, b, c, d, e, f, g, h) -> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h)
unatup8 e =
( Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e )
unatup9
:: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i)
=> Acc (a, b, c, d, e, f, g, h, i) -> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i)
unatup9 e =
( Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e )
unatup10 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j)
=> Acc (a, b, c, d, e, f, g, h, i, j) -> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i, Acc j)
unatup10 e =
( Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e)
unatup11 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k)
=> Acc (a, b, c, d, e, f, g, h, i, j, k) -> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i, Acc j, Acc k)
unatup11 e =
( Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e)
unatup12 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k, Arrays l)
=> Acc (a, b, c, d, e, f, g, h, i, j, k, l) -> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i, Acc j, Acc k, Acc l)
unatup12 e =
( Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e)
unatup13 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k, Arrays l, Arrays m)
=> Acc (a, b, c, d, e, f, g, h, i, j, k, l, m) -> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i, Acc j, Acc k, Acc l, Acc m)
unatup13 e =
( Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e)
unatup14 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k, Arrays l, Arrays m, Arrays n)
=> Acc (a, b, c, d, e, f, g, h, i, j, k, l, m, n) -> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i, Acc j, Acc k, Acc l, Acc m, Acc n)
unatup14 e =
( Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e)
unatup15 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k, Arrays l, Arrays m, Arrays n, Arrays o)
=> Acc (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) -> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f, Acc g, Acc h, Acc i, Acc j, Acc k, Acc l, Acc m, Acc n, Acc o)
unatup15 e =
( Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Aprj` e
, Acc $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Aprj` e
, Acc $ SuccTupIdx ZeroTupIdx `Aprj` e
, Acc $ ZeroTupIdx `Aprj` e)
-- Smart constructors for stencil reification
-- ------------------------------------------
-- Stencil reification
--
-- In the AST representation, we turn the stencil type from nested tuples of Accelerate expressions
-- into an Accelerate expression whose type is a tuple nested in the same manner. This enables us
-- to represent the stencil function as a unary function (which also only needs one de Bruijn
-- index). The various positions in the stencil are accessed via tuple indices (i.e., projections).
class (Elt (StencilRepr sh stencil), AST.Stencil sh a (StencilRepr sh stencil))
=> Stencil sh a stencil where
type StencilRepr sh stencil :: *
stencilPrj :: sh{-dummy-} -> a{-dummy-} -> Exp (StencilRepr sh stencil) -> stencil
-- DIM1
instance Elt e => Stencil DIM1 e (Exp e, Exp e, Exp e) where
type StencilRepr DIM1 (Exp e, Exp e, Exp e)
= (e, e, e)
stencilPrj _ _ s = (Exp $ Prj tib s,
Exp $ Prj tia s,
Exp $ Prj tix0 s)
instance Elt e => Stencil DIM1 e (Exp e, Exp e, Exp e, Exp e, Exp e) where
type StencilRepr DIM1 (Exp e, Exp e, Exp e, Exp e, Exp e)
= (e, e, e, e, e)
stencilPrj _ _ s = (Exp $ Prj tid s,
Exp $ Prj tic s,
Exp $ Prj tib s,
Exp $ Prj tia s,
Exp $ Prj tix0 s)
instance Elt e => Stencil DIM1 e (Exp e, Exp e, Exp e, Exp e, Exp e, Exp e, Exp e) where
type StencilRepr DIM1 (Exp e, Exp e, Exp e, Exp e, Exp e, Exp e, Exp e)
= (e, e, e, e, e, e, e)
stencilPrj _ _ s = (Exp $ Prj tif s,
Exp $ Prj tie s,
Exp $ Prj tid s,
Exp $ Prj tic s,
Exp $ Prj tib s,
Exp $ Prj tia s,
Exp $ Prj tix0 s)
instance Elt e => Stencil DIM1 e (Exp e, Exp e, Exp e, Exp e, Exp e, Exp e, Exp e, Exp e, Exp e)
where
type StencilRepr DIM1 (Exp e, Exp e, Exp e, Exp e, Exp e, Exp e, Exp e, Exp e, Exp e)
= (e, e, e, e, e, e, e, e, e)
stencilPrj _ _ s = (Exp $ Prj tih s,
Exp $ Prj tig s,
Exp $ Prj tif s,
Exp $ Prj tie s,
Exp $ Prj tid s,
Exp $ Prj tic s,
Exp $ Prj tib s,
Exp $ Prj tia s,
Exp $ Prj tix0 s)
-- DIM(n+1)
instance (Stencil (sh:.Int) a row2,
Stencil (sh:.Int) a row1,
Stencil (sh:.Int) a row0) => Stencil (sh:.Int:.Int) a (row2, row1, row0) where
type StencilRepr (sh:.Int:.Int) (row2, row1, row0)
= (StencilRepr (sh:.Int) row2, StencilRepr (sh:.Int) row1, StencilRepr (sh:.Int) row0)
stencilPrj _ a s = (stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tib s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tia s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tix0 s))
instance (Stencil (sh:.Int) a row1,
Stencil (sh:.Int) a row2,
Stencil (sh:.Int) a row3,
Stencil (sh:.Int) a row4,
Stencil (sh:.Int) a row5) => Stencil (sh:.Int:.Int) a (row1, row2, row3, row4, row5) where
type StencilRepr (sh:.Int:.Int) (row1, row2, row3, row4, row5)
= (StencilRepr (sh:.Int) row1, StencilRepr (sh:.Int) row2, StencilRepr (sh:.Int) row3,
StencilRepr (sh:.Int) row4, StencilRepr (sh:.Int) row5)
stencilPrj _ a s = (stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tid s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tic s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tib s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tia s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tix0 s))
instance (Stencil (sh:.Int) a row1,
Stencil (sh:.Int) a row2,
Stencil (sh:.Int) a row3,
Stencil (sh:.Int) a row4,
Stencil (sh:.Int) a row5,
Stencil (sh:.Int) a row6,
Stencil (sh:.Int) a row7)
=> Stencil (sh:.Int:.Int) a (row1, row2, row3, row4, row5, row6, row7) where
type StencilRepr (sh:.Int:.Int) (row1, row2, row3, row4, row5, row6, row7)
= (StencilRepr (sh:.Int) row1, StencilRepr (sh:.Int) row2, StencilRepr (sh:.Int) row3,
StencilRepr (sh:.Int) row4, StencilRepr (sh:.Int) row5, StencilRepr (sh:.Int) row6,
StencilRepr (sh:.Int) row7)
stencilPrj _ a s = (stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tif s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tie s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tid s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tic s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tib s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tia s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tix0 s))
instance (Stencil (sh:.Int) a row1,
Stencil (sh:.Int) a row2,
Stencil (sh:.Int) a row3,
Stencil (sh:.Int) a row4,
Stencil (sh:.Int) a row5,
Stencil (sh:.Int) a row6,
Stencil (sh:.Int) a row7,
Stencil (sh:.Int) a row8,
Stencil (sh:.Int) a row9)
=> Stencil (sh:.Int:.Int) a (row1, row2, row3, row4, row5, row6, row7, row8, row9) where
type StencilRepr (sh:.Int:.Int) (row1, row2, row3, row4, row5, row6, row7, row8, row9)
= (StencilRepr (sh:.Int) row1, StencilRepr (sh:.Int) row2, StencilRepr (sh:.Int) row3,
StencilRepr (sh:.Int) row4, StencilRepr (sh:.Int) row5, StencilRepr (sh:.Int) row6,
StencilRepr (sh:.Int) row7, StencilRepr (sh:.Int) row8, StencilRepr (sh:.Int) row9)
stencilPrj _ a s = (stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tih s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tig s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tif s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tie s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tid s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tic s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tib s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tia s),
stencilPrj (undefined::(sh:.Int)) a (Exp $ Prj tix0 s))
-- Auxiliary tuple index constants
--
tix0 :: Elt s => TupleIdx (t, s) s
tix0 = ZeroTupIdx
tia :: Elt s => TupleIdx ((t, s), s1) s
tia = SuccTupIdx tix0
tib :: Elt s => TupleIdx (((t, s), s1), s2) s
tib = SuccTupIdx tia
tic :: Elt s => TupleIdx ((((t, s), s1), s2), s3) s
tic = SuccTupIdx tib
tid :: Elt s => TupleIdx (((((t, s), s1), s2), s3), s4) s
tid = SuccTupIdx tic
tie :: Elt s => TupleIdx ((((((t, s), s1), s2), s3), s4), s5) s
tie = SuccTupIdx tid
tif :: Elt s => TupleIdx (((((((t, s), s1), s2), s3), s4), s5), s6) s
tif = SuccTupIdx tie
tig :: Elt s => TupleIdx ((((((((t, s), s1), s2), s3), s4), s5), s6), s7) s
tig = SuccTupIdx tif
tih :: Elt s => TupleIdx (((((((((t, s), s1), s2), s3), s4), s5), s6), s7), s8) s
tih = SuccTupIdx tig
-- Smart constructors for array tuples in sequence computations
-- ---------------------------------------------------
stup2 :: (Arrays a, Arrays b) => (Seq a, Seq b) -> Seq (a, b)
stup2 (a, b) = Seq $ Stuple (NilAtup `SnocAtup` a `SnocAtup` b)
stup3 :: (Arrays a, Arrays b, Arrays c) => (Seq a, Seq b, Seq c) -> Seq (a, b, c)
stup3 (a, b, c) = Seq $ Stuple (NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c)
stup4 :: (Arrays a, Arrays b, Arrays c, Arrays d)
=> (Seq a, Seq b, Seq c, Seq d) -> Seq (a, b, c, d)
stup4 (a, b, c, d)
= Seq $ Stuple (NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d)
stup5 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e)
=> (Seq a, Seq b, Seq c, Seq d, Seq e) -> Seq (a, b, c, d, e)
stup5 (a, b, c, d, e)
= Seq $ Stuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e
stup6 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f)
=> (Seq a, Seq b, Seq c, Seq d, Seq e, Seq f) -> Seq (a, b, c, d, e, f)
stup6 (a, b, c, d, e, f)
= Seq $ Stuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c
`SnocAtup` d `SnocAtup` e `SnocAtup` f
stup7 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g)
=> (Seq a, Seq b, Seq c, Seq d, Seq e, Seq f, Seq g)
-> Seq (a, b, c, d, e, f, g)
stup7 (a, b, c, d, e, f, g)
= Seq $ Stuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c
`SnocAtup` d `SnocAtup` e `SnocAtup` f `SnocAtup` g
stup8 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h)
=> (Seq a, Seq b, Seq c, Seq d, Seq e, Seq f, Seq g, Seq h)
-> Seq (a, b, c, d, e, f, g, h)
stup8 (a, b, c, d, e, f, g, h)
= Seq $ Stuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d
`SnocAtup` e `SnocAtup` f `SnocAtup` g `SnocAtup` h
stup9 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i)
=> (Seq a, Seq b, Seq c, Seq d, Seq e, Seq f, Seq g, Seq h, Seq i)
-> Seq (a, b, c, d, e, f, g, h, i)
stup9 (a, b, c, d, e, f, g, h, i)
= Seq $ Stuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d
`SnocAtup` e `SnocAtup` f `SnocAtup` g `SnocAtup` h `SnocAtup` i
stup10 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j)
=> (Seq a, Seq b, Seq c, Seq d, Seq e, Seq f, Seq g, Seq h, Seq i, Seq j)
-> Seq (a, b, c, d, e, f, g, h, i, j)
stup10 (a, b, c, d, e, f, g, h, i, j)
= Seq $ Stuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e
`SnocAtup` f `SnocAtup` g `SnocAtup` h `SnocAtup` i `SnocAtup` j
stup11 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k)
=> (Seq a, Seq b, Seq c, Seq d, Seq e, Seq f, Seq g, Seq h, Seq i, Seq j, Seq k)
-> Seq (a, b, c, d, e, f, g, h, i, j, k)
stup11 (a, b, c, d, e, f, g, h, i, j, k)
= Seq $ Stuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e
`SnocAtup` f `SnocAtup` g `SnocAtup` h `SnocAtup` i `SnocAtup` j `SnocAtup` k
stup12 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k, Arrays l)
=> (Seq a, Seq b, Seq c, Seq d, Seq e, Seq f, Seq g, Seq h, Seq i, Seq j, Seq k, Seq l)
-> Seq (a, b, c, d, e, f, g, h, i, j, k, l)
stup12 (a, b, c, d, e, f, g, h, i, j, k, l)
= Seq $ Stuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e `SnocAtup` f
`SnocAtup` g `SnocAtup` h `SnocAtup` i `SnocAtup` j `SnocAtup` k `SnocAtup` l
stup13 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k, Arrays l, Arrays m)
=> (Seq a, Seq b, Seq c, Seq d, Seq e, Seq f, Seq g, Seq h, Seq i, Seq j, Seq k, Seq l, Seq m)
-> Seq (a, b, c, d, e, f, g, h, i, j, k, l, m)
stup13 (a, b, c, d, e, f, g, h, i, j, k, l, m)
= Seq $ Stuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e `SnocAtup` f
`SnocAtup` g `SnocAtup` h `SnocAtup` i `SnocAtup` j `SnocAtup` k `SnocAtup` l `SnocAtup` m
stup14 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k, Arrays l, Arrays m, Arrays n)
=> (Seq a, Seq b, Seq c, Seq d, Seq e, Seq f, Seq g, Seq h, Seq i, Seq j, Seq k, Seq l, Seq m, Seq n)
-> Seq (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
stup14 (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
= Seq $ Stuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e `SnocAtup` f `SnocAtup` g
`SnocAtup` h `SnocAtup` i `SnocAtup` j `SnocAtup` k `SnocAtup` l `SnocAtup` m `SnocAtup` n
stup15 :: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f, Arrays g, Arrays h, Arrays i, Arrays j, Arrays k, Arrays l, Arrays m, Arrays n, Arrays o)
=> (Seq a, Seq b, Seq c, Seq d, Seq e, Seq f, Seq g, Seq h, Seq i, Seq j, Seq k, Seq l, Seq m, Seq n, Seq o)
-> Seq (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
stup15 (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
= Seq $ Stuple $
NilAtup `SnocAtup` a `SnocAtup` b `SnocAtup` c `SnocAtup` d `SnocAtup` e `SnocAtup` f `SnocAtup` g
`SnocAtup` h `SnocAtup` i `SnocAtup` j `SnocAtup` k `SnocAtup` l `SnocAtup` m `SnocAtup` n `SnocAtup` o
-- Smart constructor for literals
--
-- | Scalar expression inlet: make a Haskell value available for processing in
-- an Accelerate scalar expression.
--
-- Note that this embeds the value directly into the expression. Depending on
-- the backend used to execute the computation, this might not always be
-- desirable. For example, a backend that does external code generation may
-- embed this constant directly into the generated code, which means new code
-- will need to be generated and compiled every time the value changes. In such
-- cases, consider instead lifting scalar values into (singleton) arrays so that
-- they can be passed as an input to the computation and thus the value can
-- change without the need to generate fresh code.
--
constant :: Elt t => t -> Exp t
constant = Exp . Const
-- Smart constructor and destructors for scalar tuples
--
tup2 :: (Elt a, Elt b) => (Exp a, Exp b) -> Exp (a, b)
tup2 (a, b) = Exp $ Tuple (NilTup `SnocTup` a `SnocTup` b)
tup3 :: (Elt a, Elt b, Elt c) => (Exp a, Exp b, Exp c) -> Exp (a, b, c)
tup3 (a, b, c) = Exp $ Tuple (NilTup `SnocTup` a `SnocTup` b `SnocTup` c)
tup4 :: (Elt a, Elt b, Elt c, Elt d)
=> (Exp a, Exp b, Exp c, Exp d) -> Exp (a, b, c, d)
tup4 (a, b, c, d)
= Exp $ Tuple (NilTup `SnocTup` a `SnocTup` b `SnocTup` c `SnocTup` d)
tup5 :: (Elt a, Elt b, Elt c, Elt d, Elt e)
=> (Exp a, Exp b, Exp c, Exp d, Exp e) -> Exp (a, b, c, d, e)
tup5 (a, b, c, d, e)
= Exp $ Tuple $
NilTup `SnocTup` a `SnocTup` b `SnocTup` c `SnocTup` d `SnocTup` e
tup6 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f)
=> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f) -> Exp (a, b, c, d, e, f)
tup6 (a, b, c, d, e, f)
= Exp $ Tuple $
NilTup `SnocTup` a `SnocTup` b `SnocTup` c `SnocTup` d `SnocTup` e `SnocTup` f
tup7 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g)
=> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g)
-> Exp (a, b, c, d, e, f, g)
tup7 (a, b, c, d, e, f, g)
= Exp $ Tuple $
NilTup `SnocTup` a `SnocTup` b `SnocTup` c
`SnocTup` d `SnocTup` e `SnocTup` f `SnocTup` g
tup8 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h)
=> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h)
-> Exp (a, b, c, d, e, f, g, h)
tup8 (a, b, c, d, e, f, g, h)
= Exp $ Tuple $
NilTup `SnocTup` a `SnocTup` b `SnocTup` c `SnocTup` d
`SnocTup` e `SnocTup` f `SnocTup` g `SnocTup` h
tup9 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i)
=> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i)
-> Exp (a, b, c, d, e, f, g, h, i)
tup9 (a, b, c, d, e, f, g, h, i)
= Exp $ Tuple $
NilTup `SnocTup` a `SnocTup` b `SnocTup` c `SnocTup` d
`SnocTup` e `SnocTup` f `SnocTup` g `SnocTup` h `SnocTup` i
tup10 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i, Elt j)
=> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i, Exp j)
-> Exp (a, b, c, d, e, f, g, h, i, j)
tup10 (a, b, c, d, e, f, g, h, i, j)
= Exp $ Tuple $
NilTup `SnocTup` a `SnocTup` b `SnocTup` c `SnocTup` d `SnocTup` e
`SnocTup` f `SnocTup` g `SnocTup` h `SnocTup` i `SnocTup` j
tup11 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i, Elt j, Elt k)
=> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i, Exp j, Exp k)
-> Exp (a, b, c, d, e, f, g, h, i, j, k)
tup11 (a, b, c, d, e, f, g, h, i, j, k)
= Exp $ Tuple $
NilTup `SnocTup` a `SnocTup` b `SnocTup` c `SnocTup` d `SnocTup` e
`SnocTup` f `SnocTup` g `SnocTup` h `SnocTup` i `SnocTup` j `SnocTup` k
tup12 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i, Elt j, Elt k, Elt l)
=> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i, Exp j, Exp k, Exp l)
-> Exp (a, b, c, d, e, f, g, h, i, j, k, l)
tup12 (a, b, c, d, e, f, g, h, i, j, k, l)
= Exp $ Tuple $
NilTup `SnocTup` a `SnocTup` b `SnocTup` c `SnocTup` d `SnocTup` e `SnocTup` f
`SnocTup` g `SnocTup` h `SnocTup` i `SnocTup` j `SnocTup` k `SnocTup` l
tup13 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i, Elt j, Elt k, Elt l, Elt m)
=> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i, Exp j, Exp k, Exp l, Exp m)
-> Exp (a, b, c, d, e, f, g, h, i, j, k, l, m)
tup13 (a, b, c, d, e, f, g, h, i, j, k, l, m)
= Exp $ Tuple $
NilTup `SnocTup` a `SnocTup` b `SnocTup` c `SnocTup` d `SnocTup` e `SnocTup` f
`SnocTup` g `SnocTup` h `SnocTup` i `SnocTup` j `SnocTup` k `SnocTup` l `SnocTup` m
tup14 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i, Elt j, Elt k, Elt l, Elt m, Elt n)
=> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i, Exp j, Exp k, Exp l, Exp m, Exp n)
-> Exp (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
tup14 (a, b, c, d, e, f, g, h, i, j, k, l, m, n)
= Exp $ Tuple $
NilTup `SnocTup` a `SnocTup` b `SnocTup` c `SnocTup` d `SnocTup` e `SnocTup` f `SnocTup` g
`SnocTup` h `SnocTup` i `SnocTup` j `SnocTup` k `SnocTup` l `SnocTup` m `SnocTup` n
tup15 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i, Elt j, Elt k, Elt l, Elt m, Elt n, Elt o)
=> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i, Exp j, Exp k, Exp l, Exp m, Exp n, Exp o)
-> Exp (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
tup15 (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o)
= Exp $ Tuple $
NilTup `SnocTup` a `SnocTup` b `SnocTup` c `SnocTup` d `SnocTup` e `SnocTup` f `SnocTup` g
`SnocTup` h `SnocTup` i `SnocTup` j `SnocTup` k `SnocTup` l `SnocTup` m `SnocTup` n `SnocTup` o
untup2 :: (Elt a, Elt b) => Exp (a, b) -> (Exp a, Exp b)
untup2 e =
( Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e )
untup3 :: (Elt a, Elt b, Elt c) => Exp (a, b, c) -> (Exp a, Exp b, Exp c)
untup3 e =
( Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
untup4 :: (Elt a, Elt b, Elt c, Elt d)
=> Exp (a, b, c, d) -> (Exp a, Exp b, Exp c, Exp d)
untup4 e =
( Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
untup5 :: (Elt a, Elt b, Elt c, Elt d, Elt e)
=> Exp (a, b, c, d, e) -> (Exp a, Exp b, Exp c, Exp d, Exp e)
untup5 e =
( Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
untup6 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f)
=> Exp (a, b, c, d, e, f) -> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f)
untup6 e =
( Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
untup7 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g)
=> Exp (a, b, c, d, e, f, g) -> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g)
untup7 e =
( Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
untup8 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h)
=> Exp (a, b, c, d, e, f, g, h) -> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h)
untup8 e =
( Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
untup9 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i)
=> Exp (a, b, c, d, e, f, g, h, i) -> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i)
untup9 e =
( Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
untup10 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i, Elt j)
=> Exp (a, b, c, d, e, f, g, h, i, j) -> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i, Exp j)
untup10 e =
( Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
untup11 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i, Elt j, Elt k)
=> Exp (a, b, c, d, e, f, g, h, i, j, k) -> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i, Exp j, Exp k)
untup11 e =
( Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
untup12 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i, Elt j, Elt k, Elt l)
=> Exp (a, b, c, d, e, f, g, h, i, j, k, l) -> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i, Exp j, Exp k, Exp l)
untup12 e =
( Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
untup13 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i, Elt j, Elt k, Elt l, Elt m)
=> Exp (a, b, c, d, e, f, g, h, i, j, k, l, m) -> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i, Exp j, Exp k, Exp l, Exp m)
untup13 e =
( Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
untup14 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i, Elt j, Elt k, Elt l, Elt m, Elt n)
=> Exp (a, b, c, d, e, f, g, h, i, j, k, l, m, n) -> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i, Exp j, Exp k, Exp l, Exp m, Exp n)
untup14 e =
( Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
untup15 :: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h, Elt i, Elt j, Elt k, Elt l, Elt m, Elt n, Elt o)
=> Exp (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) -> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h, Exp i, Exp j, Exp k, Exp l, Exp m, Exp n, Exp o)
untup15 e =
( Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx))) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx (SuccTupIdx ZeroTupIdx)) `Prj` e
, Exp $ SuccTupIdx (SuccTupIdx ZeroTupIdx) `Prj` e
, Exp $ SuccTupIdx ZeroTupIdx `Prj` e
, Exp $ ZeroTupIdx `Prj` e)
-- Smart constructor for constants
--
mkMinBound :: (Elt t, IsBounded t) => Exp t
mkMinBound = Exp $ PrimConst (PrimMinBound boundedType)
mkMaxBound :: (Elt t, IsBounded t) => Exp t
mkMaxBound = Exp $ PrimConst (PrimMaxBound boundedType)
mkPi :: (Elt r, IsFloating r) => Exp r
mkPi = Exp $ PrimConst (PrimPi floatingType)
-- Smart constructors for primitive applications
--
-- Operators from Floating
mkSin :: (Elt t, IsFloating t) => Exp t -> Exp t
mkSin x = Exp $ PrimSin floatingType `PrimApp` x
mkCos :: (Elt t, IsFloating t) => Exp t -> Exp t
mkCos x = Exp $ PrimCos floatingType `PrimApp` x
mkTan :: (Elt t, IsFloating t) => Exp t -> Exp t
mkTan x = Exp $ PrimTan floatingType `PrimApp` x
mkAsin :: (Elt t, IsFloating t) => Exp t -> Exp t
mkAsin x = Exp $ PrimAsin floatingType `PrimApp` x
mkAcos :: (Elt t, IsFloating t) => Exp t -> Exp t
mkAcos x = Exp $ PrimAcos floatingType `PrimApp` x
mkAtan :: (Elt t, IsFloating t) => Exp t -> Exp t
mkAtan x = Exp $ PrimAtan floatingType `PrimApp` x
mkSinh :: (Elt t, IsFloating t) => Exp t -> Exp t
mkSinh x = Exp $ PrimSinh floatingType `PrimApp` x
mkCosh :: (Elt t, IsFloating t) => Exp t -> Exp t
mkCosh x = Exp $ PrimCosh floatingType `PrimApp` x
mkTanh :: (Elt t, IsFloating t) => Exp t -> Exp t
mkTanh x = Exp $ PrimTanh floatingType `PrimApp` x
mkAsinh :: (Elt t, IsFloating t) => Exp t -> Exp t
mkAsinh x = Exp $ PrimAsinh floatingType `PrimApp` x
mkAcosh :: (Elt t, IsFloating t) => Exp t -> Exp t
mkAcosh x = Exp $ PrimAcosh floatingType `PrimApp` x
mkAtanh :: (Elt t, IsFloating t) => Exp t -> Exp t
mkAtanh x = Exp $ PrimAtanh floatingType `PrimApp` x
mkExpFloating :: (Elt t, IsFloating t) => Exp t -> Exp t
mkExpFloating x = Exp $ PrimExpFloating floatingType `PrimApp` x
mkSqrt :: (Elt t, IsFloating t) => Exp t -> Exp t
mkSqrt x = Exp $ PrimSqrt floatingType `PrimApp` x
mkLog :: (Elt t, IsFloating t) => Exp t -> Exp t
mkLog x = Exp $ PrimLog floatingType `PrimApp` x
mkFPow :: (Elt t, IsFloating t) => Exp t -> Exp t -> Exp t
mkFPow x y = Exp $ PrimFPow floatingType `PrimApp` tup2 (x, y)
mkLogBase :: (Elt t, IsFloating t) => Exp t -> Exp t -> Exp t
mkLogBase x y = Exp $ PrimLogBase floatingType `PrimApp` tup2 (x, y)
-- Operators from Num
mkAdd :: (Elt t, IsNum t) => Exp t -> Exp t -> Exp t
mkAdd x y = Exp $ PrimAdd numType `PrimApp` tup2 (x, y)
mkSub :: (Elt t, IsNum t) => Exp t -> Exp t -> Exp t
mkSub x y = Exp $ PrimSub numType `PrimApp` tup2 (x, y)
mkMul :: (Elt t, IsNum t) => Exp t -> Exp t -> Exp t
mkMul x y = Exp $ PrimMul numType `PrimApp` tup2 (x, y)
mkNeg :: (Elt t, IsNum t) => Exp t -> Exp t
mkNeg x = Exp $ PrimNeg numType `PrimApp` x
mkAbs :: (Elt t, IsNum t) => Exp t -> Exp t
mkAbs x = Exp $ PrimAbs numType `PrimApp` x
mkSig :: (Elt t, IsNum t) => Exp t -> Exp t
mkSig x = Exp $ PrimSig numType `PrimApp` x
-- Operators from Integral & Bits
mkQuot :: (Elt t, IsIntegral t) => Exp t -> Exp t -> Exp t
mkQuot x y = Exp $ PrimQuot integralType `PrimApp` tup2 (x, y)
mkRem :: (Elt t, IsIntegral t) => Exp t -> Exp t -> Exp t
mkRem x y = Exp $ PrimRem integralType `PrimApp` tup2 (x, y)
mkQuotRem :: (Elt t, IsIntegral t) => Exp t -> Exp t -> (Exp t, Exp t)
mkQuotRem x y = untup2 $ Exp $ PrimQuotRem integralType `PrimApp` tup2 (x ,y)
mkIDiv :: (Elt t, IsIntegral t) => Exp t -> Exp t -> Exp t
mkIDiv x y = Exp $ PrimIDiv integralType `PrimApp` tup2 (x, y)
mkMod :: (Elt t, IsIntegral t) => Exp t -> Exp t -> Exp t
mkMod x y = Exp $ PrimMod integralType `PrimApp` tup2 (x, y)
mkDivMod :: (Elt t, IsIntegral t) => Exp t -> Exp t -> (Exp t, Exp t)
mkDivMod x y = untup2 $ Exp $ PrimDivMod integralType `PrimApp` tup2 (x ,y)
mkBAnd :: (Elt t, IsIntegral t) => Exp t -> Exp t -> Exp t
mkBAnd x y = Exp $ PrimBAnd integralType `PrimApp` tup2 (x, y)
mkBOr :: (Elt t, IsIntegral t) => Exp t -> Exp t -> Exp t
mkBOr x y = Exp $ PrimBOr integralType `PrimApp` tup2 (x, y)
mkBXor :: (Elt t, IsIntegral t) => Exp t -> Exp t -> Exp t
mkBXor x y = Exp $ PrimBXor integralType `PrimApp` tup2 (x, y)
mkBNot :: (Elt t, IsIntegral t) => Exp t -> Exp t
mkBNot x = Exp $ PrimBNot integralType `PrimApp` x
mkBShiftL :: (Elt t, IsIntegral t) => Exp t -> Exp Int -> Exp t
mkBShiftL x i = Exp $ PrimBShiftL integralType `PrimApp` tup2 (x, i)
mkBShiftR :: (Elt t, IsIntegral t) => Exp t -> Exp Int -> Exp t
mkBShiftR x i = Exp $ PrimBShiftR integralType `PrimApp` tup2 (x, i)
mkBRotateL :: (Elt t, IsIntegral t) => Exp t -> Exp Int -> Exp t
mkBRotateL x i = Exp $ PrimBRotateL integralType `PrimApp` tup2 (x, i)
mkBRotateR :: (Elt t, IsIntegral t) => Exp t -> Exp Int -> Exp t
mkBRotateR x i = Exp $ PrimBRotateR integralType `PrimApp` tup2 (x, i)
-- Operators from Fractional
mkFDiv :: (Elt t, IsFloating t) => Exp t -> Exp t -> Exp t
mkFDiv x y = Exp $ PrimFDiv floatingType `PrimApp` tup2 (x, y)
mkRecip :: (Elt t, IsFloating t) => Exp t -> Exp t
mkRecip x = Exp $ PrimRecip floatingType `PrimApp` x
-- Operators from RealFrac
mkTruncate :: (Elt a, Elt b, IsFloating a, IsIntegral b) => Exp a -> Exp b
mkTruncate x = Exp $ PrimTruncate floatingType integralType `PrimApp` x
mkRound :: (Elt a, Elt b, IsFloating a, IsIntegral b) => Exp a -> Exp b
mkRound x = Exp $ PrimRound floatingType integralType `PrimApp` x
mkFloor :: (Elt a, Elt b, IsFloating a, IsIntegral b) => Exp a -> Exp b
mkFloor x = Exp $ PrimFloor floatingType integralType `PrimApp` x
mkCeiling :: (Elt a, Elt b, IsFloating a, IsIntegral b) => Exp a -> Exp b
mkCeiling x = Exp $ PrimCeiling floatingType integralType `PrimApp` x
-- Operators from RealFloat
mkAtan2 :: (Elt t, IsFloating t) => Exp t -> Exp t -> Exp t
mkAtan2 x y = Exp $ PrimAtan2 floatingType `PrimApp` tup2 (x, y)
mkIsNaN :: (Elt t, IsFloating t) => Exp t -> Exp Bool
mkIsNaN x = Exp $ PrimIsNaN floatingType `PrimApp` x
-- FIXME: add missing operations from Floating, RealFrac & RealFloat
-- Relational and equality operators
mkLt :: (Elt t, IsScalar t) => Exp t -> Exp t -> Exp Bool
mkLt x y = Exp $ PrimLt scalarType `PrimApp` tup2 (x, y)
mkGt :: (Elt t, IsScalar t) => Exp t -> Exp t -> Exp Bool
mkGt x y = Exp $ PrimGt scalarType `PrimApp` tup2 (x, y)
mkLtEq :: (Elt t, IsScalar t) => Exp t -> Exp t -> Exp Bool
mkLtEq x y = Exp $ PrimLtEq scalarType `PrimApp` tup2 (x, y)
mkGtEq :: (Elt t, IsScalar t) => Exp t -> Exp t -> Exp Bool
mkGtEq x y = Exp $ PrimGtEq scalarType `PrimApp` tup2 (x, y)
mkEq :: (Elt t, IsScalar t) => Exp t -> Exp t -> Exp Bool
mkEq x y = Exp $ PrimEq scalarType `PrimApp` tup2 (x, y)
mkNEq :: (Elt t, IsScalar t) => Exp t -> Exp t -> Exp Bool
mkNEq x y = Exp $ PrimNEq scalarType `PrimApp` tup2 (x, y)
mkMax :: (Elt t, IsScalar t) => Exp t -> Exp t -> Exp t
mkMax x y = Exp $ PrimMax scalarType `PrimApp` tup2 (x, y)
mkMin :: (Elt t, IsScalar t) => Exp t -> Exp t -> Exp t
mkMin x y = Exp $ PrimMin scalarType `PrimApp` tup2 (x, y)
-- Logical operators
mkLAnd :: Exp Bool -> Exp Bool -> Exp Bool
mkLAnd x y = Exp $ PrimLAnd `PrimApp` tup2 (x, y)
mkLOr :: Exp Bool -> Exp Bool -> Exp Bool
mkLOr x y = Exp $ PrimLOr `PrimApp` tup2 (x, y)
mkLNot :: Exp Bool -> Exp Bool
mkLNot x = Exp $ PrimLNot `PrimApp` x
-- Character conversions
mkOrd :: Exp Char -> Exp Int
mkOrd x = Exp $ PrimOrd `PrimApp` x
mkChr :: Exp Int -> Exp Char
mkChr x = Exp $ PrimChr `PrimApp` x
-- Numeric conversions
mkFromIntegral :: (Elt a, Elt b, IsIntegral a, IsNum b) => Exp a -> Exp b
mkFromIntegral x = Exp $ PrimFromIntegral integralType numType `PrimApp` x
-- Other conversions
mkBoolToInt :: Exp Bool -> Exp Int
mkBoolToInt b = Exp $ PrimBoolToInt `PrimApp` b
-- Auxiliary functions
-- --------------------
infixr 0 $$
($$) :: (b -> a) -> (c -> d -> b) -> c -> d -> a
(f $$ g) x y = f (g x y)
infixr 0 $$$
($$$) :: (b -> a) -> (c -> d -> e -> b) -> c -> d -> e -> a
(f $$$ g) x y z = f (g x y z)
infixr 0 $$$$
($$$$) :: (b -> a) -> (c -> d -> e -> f -> b) -> c -> d -> e -> f -> a
(f $$$$ g) x y z u = f (g x y z u)
infixr 0 $$$$$
($$$$$) :: (b -> a) -> (c -> d -> e -> f -> g -> b) -> c -> d -> e -> f -> g-> a
(f $$$$$ g) x y z u v = f (g x y z u v)
-- Debugging
-- ---------
showPreAccOp :: forall acc seq exp arrs. PreAcc acc seq exp arrs -> String
showPreAccOp (Atag i) = "Atag " ++ show i
showPreAccOp (Use a) = "Use " ++ showArrays a
showPreAccOp Pipe{} = "Pipe"
showPreAccOp Acond{} = "Acond"
showPreAccOp Awhile{} = "Awhile"
showPreAccOp Atuple{} = "Atuple"
showPreAccOp Aprj{} = "Aprj"
showPreAccOp Unit{} = "Unit"
showPreAccOp Generate{} = "Generate"
showPreAccOp Reshape{} = "Reshape"
showPreAccOp Replicate{} = "Replicate"
showPreAccOp Slice{} = "Slice"
showPreAccOp Map{} = "Map"
showPreAccOp ZipWith{} = "ZipWith"
showPreAccOp Fold{} = "Fold"
showPreAccOp Fold1{} = "Fold1"
showPreAccOp FoldSeg{} = "FoldSeg"
showPreAccOp Fold1Seg{} = "Fold1Seg"
showPreAccOp Scanl{} = "Scanl"
showPreAccOp Scanl'{} = "Scanl'"
showPreAccOp Scanl1{} = "Scanl1"
showPreAccOp Scanr{} = "Scanr"
showPreAccOp Scanr'{} = "Scanr'"
showPreAccOp Scanr1{} = "Scanr1"
showPreAccOp Permute{} = "Permute"
showPreAccOp Backpermute{} = "Backpermute"
showPreAccOp Stencil{} = "Stencil"
showPreAccOp Stencil2{} = "Stencil2"
showPreAccOp Aforeign{} = "Aforeign"
showPreAccOp Collect{} = "Collect"
showPreSeqOp :: PreSeq acc seq exp arrs -> String
showPreSeqOp (StreamIn{}) = "StreamIn"
showPreSeqOp (ToSeq{}) = "ToSeq"
showPreSeqOp (MapSeq{}) = "MapSeq"
showPreSeqOp (ZipWithSeq{}) = "ZipWithSeq"
showPreSeqOp (ScanSeq{}) = "ScanSeq"
showPreSeqOp (FoldSeq{}) = "FoldSeq"
showPreSeqOp (FoldSeqFlatten{}) = "FoldSeqFlatten"
showPreSeqOp (Stuple{}) = "Stuple"
showArrays :: forall arrs. Arrays arrs => arrs -> String
showArrays = display . collect (arrays (undefined::arrs)) . fromArr
where
collect :: ArraysR a -> a -> [String]
collect ArraysRunit _ = []
collect ArraysRarray arr = [showShortendArr arr]
collect (ArraysRpair r1 r2) (a1, a2) = collect r1 a1 ++ collect r2 a2
--
display [] = []
display [x] = x
display xs = "(" ++ intercalate ", " xs ++ ")"
showShortendArr :: Elt e => Array sh e -> String
showShortendArr arr
= show (take cutoff l) ++ if length l > cutoff then ".." else ""
where
l = toList arr
cutoff = 5
showPreExpOp :: PreExp acc seq exp t -> String
showPreExpOp (Const c) = "Const " ++ show c
showPreExpOp (Tag i) = "Tag" ++ show i
showPreExpOp Tuple{} = "Tuple"
showPreExpOp Prj{} = "Prj"
showPreExpOp IndexNil = "IndexNil"
showPreExpOp IndexCons{} = "IndexCons"
showPreExpOp IndexHead{} = "IndexHead"
showPreExpOp IndexTail{} = "IndexTail"
showPreExpOp IndexAny = "IndexAny"
showPreExpOp ToIndex{} = "ToIndex"
showPreExpOp FromIndex{} = "FromIndex"
showPreExpOp Cond{} = "Cond"
showPreExpOp While{} = "While"
showPreExpOp PrimConst{} = "PrimConst"
showPreExpOp PrimApp{} = "PrimApp"
showPreExpOp Index{} = "Index"
showPreExpOp LinearIndex{} = "LinearIndex"
showPreExpOp Shape{} = "Shape"
showPreExpOp ShapeSize{} = "ShapeSize"
showPreExpOp Intersect{} = "Intersect"
showPreExpOp Union{} = "Union"
showPreExpOp Foreign{} = "Foreign"
|
rrnewton/accelerate
|
Data/Array/Accelerate/Smart.hs
|
bsd-3-clause
| 79,320 | 0 | 34 | 20,988 | 36,474 | 19,713 | 16,761 | 1,213 | 5 |
{-# LANGUAGE Haskell2010, CPP, Rank2Types, DeriveDataTypeable, StandaloneDeriving #-}
{-# LINE 1 "lib/Data/Time/Calendar/Private.hs" #-}
-- #hide
module Data.Time.Calendar.Private where
import Data.Fixed
type NumericPadOption = Maybe Char
pad1 :: NumericPadOption -> String -> String
pad1 (Just c) s = c:s
pad1 _ s = s
padN :: Int -> Char -> String -> String
padN i _ s | i <= 0 = s
padN i c s = (replicate i c) ++ s
show2Fixed :: NumericPadOption -> Pico -> String
show2Fixed opt x | x < 10 = pad1 opt (showFixed True x)
show2Fixed _ x = showFixed True x
showPaddedMin :: (Num t,Ord t,Show t) => Int -> NumericPadOption -> t -> String
showPaddedMin _ Nothing i = show i
showPaddedMin pl opt i | i < 0 = '-':(showPaddedMin pl opt (negate i))
showPaddedMin pl (Just c) i =
let s = show i in
padN (pl - (length s)) c s
show2 :: (Num t,Ord t,Show t) => NumericPadOption -> t -> String
show2 = showPaddedMin 2
show3 :: (Num t,Ord t,Show t) => NumericPadOption -> t -> String
show3 = showPaddedMin 3
show4 :: (Num t,Ord t,Show t) => NumericPadOption -> t -> String
show4 = showPaddedMin 4
mod100 :: (Integral i) => i -> i
mod100 x = mod x 100
div100 :: (Integral i) => i -> i
div100 x = div x 100
clip :: (Ord t) => t -> t -> t -> t
clip a _ x | x < a = a
clip _ b x | x > b = b
clip _ _ x = x
clipValid :: (Ord t) => t -> t -> t -> Maybe t
clipValid a _ x | x < a = Nothing
clipValid _ b x | x > b = Nothing
clipValid _ _ x = Just x
|
phischu/fragnix
|
tests/packages/scotty/Data.Time.Calendar.Private.hs
|
bsd-3-clause
| 1,497 | 0 | 11 | 375 | 697 | 351 | 346 | 38 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Package
-- Copyright : Isaac Jones 2003-2004
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- Defines a package identifier along with a parser and pretty printer for it.
-- 'PackageIdentifier's consist of a name and an exact version. It also defines
-- a 'Dependency' data type. A dependency is a package name and a version
-- range, like @\"foo >= 1.2 && < 2\"@.
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Package (
-- * Package ids
PackageName(..),
PackageIdentifier(..),
PackageId,
-- * Installed package identifiers
InstalledPackageId(..),
-- * Package source dependencies
Dependency(..),
thisPackageVersion,
notThisPackageVersion,
simplifyDependency,
-- * Package classes
Package(..), packageName, packageVersion,
PackageFixedDeps(..),
) where
import Distribution.Version
( Version(..), VersionRange, anyVersion, thisVersion
, notThisVersion, simplifyVersionRange )
import Distribution.Text (Text(..))
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Compat.ReadP ((<++))
import qualified Text.PrettyPrint as Disp
import Text.PrettyPrint ((<>), (<+>), text)
import Control.DeepSeq (NFData(..))
import qualified Data.Char as Char ( isDigit, isAlphaNum )
import Data.List ( intercalate )
import Data.Data ( Data )
import Data.Typeable ( Typeable )
newtype PackageName = PackageName String
deriving (Read, Show, Eq, Ord, Typeable, Data)
instance Text PackageName where
disp (PackageName n) = Disp.text n
parse = do
ns <- Parse.sepBy1 component (Parse.char '-')
return (PackageName (intercalate "-" ns))
where
component = do
cs <- Parse.munch1 Char.isAlphaNum
if all Char.isDigit cs then Parse.pfail else return cs
-- each component must contain an alphabetic character, to avoid
-- ambiguity in identifiers like foo-1 (the 1 is the version number).
instance NFData PackageName where
rnf (PackageName pkg) = rnf pkg
-- | Type alias so we can use the shorter name PackageId.
type PackageId = PackageIdentifier
-- | The name and version of a package.
data PackageIdentifier
= PackageIdentifier {
pkgName :: PackageName, -- ^The name of this package, eg. foo
pkgVersion :: Version -- ^the version of this package, eg 1.2
}
deriving (Read, Show, Eq, Ord, Typeable, Data)
instance Text PackageIdentifier where
disp (PackageIdentifier n v) = case v of
Version [] _ -> disp n -- if no version, don't show version.
_ -> disp n <> Disp.char '-' <> disp v
parse = do
n <- parse
v <- (Parse.char '-' >> parse) <++ return (Version [] [])
return (PackageIdentifier n v)
instance NFData PackageIdentifier where
rnf (PackageIdentifier name version) = rnf name `seq` rnf version
-- ------------------------------------------------------------
-- * Installed Package Ids
-- ------------------------------------------------------------
-- | An InstalledPackageId uniquely identifies an instance of an installed
-- package. There can be at most one package with a given 'InstalledPackageId'
-- in a package database, or overlay of databases.
--
newtype InstalledPackageId = InstalledPackageId String
deriving (Read,Show,Eq,Ord,Typeable,Data)
instance Text InstalledPackageId where
disp (InstalledPackageId str) = text str
parse = InstalledPackageId `fmap` Parse.munch1 abi_char
where abi_char c = Char.isAlphaNum c || c `elem` ":-_."
-- ------------------------------------------------------------
-- * Package source dependencies
-- ------------------------------------------------------------
-- | Describes a dependency on a source package (API)
--
data Dependency = Dependency PackageName VersionRange
deriving (Read, Show, Eq, Typeable, Data)
instance Text Dependency where
disp (Dependency name ver) =
disp name <+> disp ver
parse = do name <- parse
Parse.skipSpaces
ver <- parse <++ return anyVersion
Parse.skipSpaces
return (Dependency name ver)
thisPackageVersion :: PackageIdentifier -> Dependency
thisPackageVersion (PackageIdentifier n v) =
Dependency n (thisVersion v)
notThisPackageVersion :: PackageIdentifier -> Dependency
notThisPackageVersion (PackageIdentifier n v) =
Dependency n (notThisVersion v)
-- | Simplify the 'VersionRange' expression in a 'Dependency'.
-- See 'simplifyVersionRange'.
--
simplifyDependency :: Dependency -> Dependency
simplifyDependency (Dependency name range) =
Dependency name (simplifyVersionRange range)
-- | Class of things that have a 'PackageIdentifier'
--
-- Types in this class are all notions of a package. This allows us to have
-- different types for the different phases that packages go though, from
-- simple name\/id, package description, configured or installed packages.
--
-- Not all kinds of packages can be uniquely identified by a
-- 'PackageIdentifier'. In particular, installed packages cannot, there may be
-- many installed instances of the same source package.
--
class Package pkg where
packageId :: pkg -> PackageIdentifier
packageName :: Package pkg => pkg -> PackageName
packageName = pkgName . packageId
packageVersion :: Package pkg => pkg -> Version
packageVersion = pkgVersion . packageId
instance Package PackageIdentifier where
packageId = id
-- | Subclass of packages that have specific versioned dependencies.
--
-- So for example a not-yet-configured package has dependencies on version
-- ranges, not specific versions. A configured or an already installed package
-- depends on exact versions. Some operations or data structures (like
-- dependency graphs) only make sense on this subclass of package types.
--
class Package pkg => PackageFixedDeps pkg where
depends :: pkg -> [PackageIdentifier]
|
jwiegley/ghc-release
|
libraries/Cabal/cabal/Distribution/Package.hs
|
gpl-3.0
| 7,549 | 0 | 13 | 1,466 | 1,116 | 624 | 492 | 88 | 1 |
{-# LANGUAGE
PatternGuards #-}
-- | Parse events from @clog@ output, such as the files
-- at <http://tunes.org/~nef/logs/haskell/>.
--
-- IRC has no single standard character encoding. This
-- module decodes messages as UTF-8 following common
-- practice on Freenode.
module Data.IRC.CLog.Parse
(
-- * Parsing log files
parseLog
-- * Configuring the parser
, Config(..)
, haskellConfig
-- * Re-export
, module Data.IRC.Event
) where
import Data.IRC.Event
import Data.Word
import Data.List
import Control.Applicative
import qualified Data.Foldable as F
import qualified Data.Attoparsec as P
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as B8
import qualified Data.Time as Time
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Encoding.Error as T
import qualified System.FilePath as Path
import qualified System.Environment as Env
import qualified System.IO.Error as IOError
import qualified Control.Exception as Ex
import qualified Data.Time.LocalTime.TimeZone.Series as Zone
import qualified Data.Time.LocalTime.TimeZone.Olson as Zone
-- | Configuring the parser.
data Config = Config
{ timeZone :: String -- ^ Timestamp time zone; an Olson time zone name.
, zoneInfo :: FilePath -- ^ Directory for time zone files; @$TZDIR@ overrides.
} deriving (Show)
-- | @'Config'@ value suitable for parsing @#haskell@ logs on Linux.
haskellConfig :: Config
haskellConfig = Config
{ timeZone = "America/Los_Angeles"
, zoneInfo = "/usr/share/zoneinfo" }
-- Many text encodings are used on IRC.
-- We decode clog metadata as ASCII.
-- We parse messages as UTF-8 in a lenient mode.
decode :: B.ByteString -> T.Text
decode = T.decodeUtf8With T.lenientDecode
-- Timestamps are in local time and must be converted.
type TimeConv = Time.LocalTime -> Time.UTCTime
getTimeConv :: FilePath -> IO TimeConv
getTimeConv p = Zone.localTimeToUTC' <$> Zone.getTimeZoneSeriesFromOlsonFile p
data TimeAdj = TimeAdj Time.Day TimeConv
-- Parsers.
notNewline :: Word8 -> Bool
notNewline w = w /= 13 && w /= 10
restOfLine :: P.Parser T.Text
restOfLine = decode <$> P.takeWhile notNewline <* P.take 1
nextLine :: P.Parser ()
nextLine = P.skipWhile notNewline <* P.take 1
digits :: Int -> P.Parser Int
digits n = atoi <$> P.count n digit where
atoi = foldl' (\m d -> m*10 + fromIntegral d - 48) 0
digit = P.satisfy isDigit
isDigit w = w >= 48 && w <= 57
time :: TimeAdj -> P.Parser Time.UTCTime
time (TimeAdj day conv) = f <$> d2 <* col <*> d2 <* col <*> d2 where
d2 = digits 2
col = P.word8 58
f h m s = conv . Time.LocalTime day $ Time.TimeOfDay h m (fromIntegral s)
event :: P.Parser Event
event = F.asum
[ str " --- " *> F.asum
[ userAct Join "join: "
, userAct Part "part: "
, userAct Quit "quit: "
, ReNick <$ str "nick: " <*> nick <* str " -> " <*> nick <* nextLine
, Mode <$ str "mode: " <*> nick <* str " set " <*> restOfLine
, Kick <$ str "kick: " <*> nick <* str " was kicked by " <*> nick <* chr ' ' <*> restOfLine
, global Log "log: "
, global Topic "topic: "
, global Names "names: "
]
, Talk <$ str " <" <*> nick <* str "> " <*> restOfLine
, Notice <$ str " -" <*> nick <*> restOfLine -- FIXME: parse host
, Act <$ str " * " <*> nick <* chr ' ' <*> restOfLine
] where
chr = P.word8 . fromIntegral . fromEnum
str = P.string . B8.pack
nick = (Nick . decode) <$> P.takeWhile (not . P.inClass " \n\r\t\v<>")
userAct f x = f <$ str x <*> nick <* chr ' ' <*> restOfLine
global f x = f <$ str x <*> restOfLine
line :: TimeAdj -> P.Parser EventAt
line adj =
P.try (EventAt <$> time adj <*> event)
<|> (NoParse <$> restOfLine)
safeRead :: (Read a) => String -> Maybe a
safeRead x | [(v,"")] <- reads x = Just v
safeRead _ = Nothing
getDay :: FilePath -> Time.Day
getDay p
| (_, [y1,y0,'.',m1,m0,'.',d1,d0]) <- Path.splitFileName p
, Just [y,m,d] <- mapM safeRead [[y1,y0],[m1,m0],[d1,d0]]
= Time.fromGregorian (2000 + fromIntegral y) m d
getDay p = error ("cannot parse date from filename: " ++ p)
-- | Parse a log file.
--
-- The file name (after any directory) is significant.
-- It is used to set the date for timestamps.
-- It should have the form @YY.MM.DD@, as do the files on
-- @tunes.org@.
parseLog :: Config -> FilePath -> IO [EventAt]
parseLog (Config{timeZone=tz, zoneInfo=zi}) p = do
tzdir <- either (const zi :: Ex.IOException -> FilePath) id <$> Ex.try (Env.getEnv "TZDIR")
adj <- TimeAdj (getDay p) <$> getTimeConv (Path.combine tzdir tz)
b <- B.readFile p
let go [email protected]{} = error $ show r
go (P.Partial g) = go $ g B.empty
go (P.Done _ x) = x
let es = go $ P.parse (P.manyTill (line adj) P.endOfInput) b
return es
|
chrisdone/ircbrowse
|
upstream/clogparse/Data/IRC/CLog/Parse.hs
|
bsd-3-clause
| 4,900 | 0 | 16 | 1,115 | 1,517 | 820 | 697 | 99 | 3 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Utils.Boolean
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Abstraction of booleans. Unfortunately, Haskell makes Bool's very hard to
-- work with, by making it a fixed-data type. This is our workaround
-----------------------------------------------------------------------------
module Data.SBV.Utils.Boolean(Boolean(..), bAnd, bOr, bAny, bAll) where
infixl 6 <+> -- xor
infixr 3 &&&, ~& -- and, nand
infixr 2 |||, ~| -- or, nor
infixr 1 ==>, <=> -- implies, iff
-- | The 'Boolean' class: a generalization of Haskell's 'Bool' type
-- Haskell 'Bool' and SBV's 'SBool' are instances of this class, unifying the treatment of boolean values.
--
-- Minimal complete definition: 'true', 'bnot', '&&&'
-- However, it's advisable to define 'false', and '|||' as well (typically), for clarity.
class Boolean b where
-- | logical true
true :: b
-- | logical false
false :: b
-- | complement
bnot :: b -> b
-- | and
(&&&) :: b -> b -> b
-- | or
(|||) :: b -> b -> b
-- | nand
(~&) :: b -> b -> b
-- | nor
(~|) :: b -> b -> b
-- | xor
(<+>) :: b -> b -> b
-- | implies
(==>) :: b -> b -> b
-- | equivalence
(<=>) :: b -> b -> b
-- | cast from Bool
fromBool :: Bool -> b
-- default definitions
false = bnot true
a ||| b = bnot (bnot a &&& bnot b)
a ~& b = bnot (a &&& b)
a ~| b = bnot (a ||| b)
a <+> b = (a &&& bnot b) ||| (bnot a &&& b)
a <=> b = (a &&& b) ||| (bnot a &&& bnot b)
a ==> b = bnot a ||| b
fromBool True = true
fromBool False = false
-- | Generalization of 'and'
bAnd :: Boolean b => [b] -> b
bAnd = foldr (&&&) true
-- | Generalization of 'or'
bOr :: Boolean b => [b] -> b
bOr = foldr (|||) false
-- | Generalization of 'any'
bAny :: Boolean b => (a -> b) -> [a] -> b
bAny f = bOr . map f
-- | Generalization of 'all'
bAll :: Boolean b => (a -> b) -> [a] -> b
bAll f = bAnd . map f
instance Boolean Bool where
true = True
false = False
bnot = not
(&&&) = (&&)
(|||) = (||)
|
TomMD/cryptol
|
sbv/Data/SBV/Utils/Boolean.hs
|
bsd-3-clause
| 2,196 | 0 | 10 | 560 | 610 | 350 | 260 | 40 | 1 |
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE TypeFamilies, FlexibleContexts #-}
{-# LANGUAGE EmptyDataDecls, FlexibleInstances, UndecidableInstances #-}
module InstContextNorm
where
import Data.Kind (Type)
data EX _x _y (p :: Type -> Type)
data ANY
class Base p
class Base (Def p) => Prop p where
type Def p
instance Base ()
instance Prop () where
type Def () = ()
instance (Base (Def (p ANY))) => Base (EX _x _y p)
instance (Prop (p ANY)) => Prop (EX _x _y p) where
type Def (EX _x _y p) = EX _x _y p
data FOO x
instance Prop (FOO x) where
type Def (FOO x) = ()
data BAR
instance Prop BAR where
type Def BAR = EX () () FOO
-- Needs Base (Def BAR)
-- And (Def Bar = Ex () () FOO)
-- so we need Base (Def (Foo ANY))
|
sdiehl/ghc
|
testsuite/tests/indexed-types/should_compile/InstContextNorm.hs
|
bsd-3-clause
| 761 | 0 | 10 | 167 | 267 | 144 | 123 | -1 | -1 |
{-# Language GADTs, OverloadedStrings, CPP #-}
{-|
Module : Config.Schema.Load.Error
Description : Error types and rendering for Load module
Copyright : (c) Eric Mertens, 2019
License : ISC
Maintainer : [email protected]
This module provides a complete skeleton of the failures that
occurred when trying to match a 'Value' against a 'ValueSpec'
allowing custom error rendering to be implemented.
The structure is you get a single value and a list of one-or-more
primitive specifications that it failed to match along with
an enumeration of why that specification failed to match. Some
failures are due to failures in nested specifications, so the
whole error structure can form a tree.
-}
module Config.Schema.Load.Error
(
-- * Error types
ValueSpecMismatch(..)
, PrimMismatch(..)
, Problem(..)
, ErrorAnnotation(..)
-- * Detailed rendering
, prettyValueSpecMismatch
, prettyPrimMismatch
, prettyProblem
-- * Summaries
, describeSpec
, describeValue
, simplifyValueSpecMismatch
) where
import Control.Exception
import Data.Text (Text)
import Data.Foldable (toList)
import qualified Data.Text as Text
import Data.List.NonEmpty (NonEmpty((:|)))
import qualified Data.List.NonEmpty as NonEmpty
import Data.Typeable (Typeable)
import Text.PrettyPrint
(Doc, fsep, ($+$), nest, text, vcat, (<+>), empty,
punctuate, comma, int, colon, hcat)
import Config
import Config.Macro (FilePosition(..))
import Config.Schema.Types
#if !MIN_VERSION_base(4,11,0)
import Data.Monoid ((<>))
#endif
-- | Newtype wrapper for schema load errors.
--
-- @since 1.2.0.0
data ValueSpecMismatch p =
-- | Problem value and list of specification failures
ValueSpecMismatch p Text (NonEmpty (PrimMismatch p))
deriving Show
-- | Type for errors that can be encountered while decoding a value according
-- to a specification. The error includes a key path indicating where in
-- the configuration file the error occurred.
--
-- @since 1.2.0.0
data PrimMismatch p =
-- | spec description and problem
PrimMismatch Text (Problem p)
deriving Show
-- | Problems that can be encountered when matching a 'Value' against a 'ValueSpec'.
--
-- @since 1.2.0.0
data Problem p
= MissingSection Text -- ^ missing section name
| UnusedSections (NonEmpty Text) -- ^ unused section names
| SubkeyProblem Text (ValueSpecMismatch p) -- ^ nested error in given section
| ListElementProblem Int (ValueSpecMismatch p) -- ^ nested error in given list element
| NestedProblem (ValueSpecMismatch p) -- ^ generic nested error
| TypeMismatch -- ^ value and spec type mismatch
| CustomProblem Text -- ^ custom spec error message
| WrongAtom -- ^ atoms didn't match
deriving Show
-- | Describe outermost shape of a 'PrimValueSpec'
--
-- @since 1.2.0.0
describeSpec :: PrimValueSpec a -> Text
describeSpec TextSpec = "text"
describeSpec NumberSpec = "number"
describeSpec AnyAtomSpec = "atom"
describeSpec (AtomSpec a) = "atom `" <> a <> "`"
describeSpec (ListSpec _) = "list"
describeSpec (SectionsSpec name _) = name
describeSpec (AssocSpec _) = "sections"
describeSpec (CustomSpec name _) = name
describeSpec (NamedSpec name _) = name
-- | Describe outermost shape of a 'Value'
describeValue :: Value p -> Text
describeValue Text{} = "text"
describeValue Number{} = "number"
describeValue (Atom _ a) = "atom `" <> atomName a <> "`"
describeValue Sections{} = "sections"
describeValue List{} = "list"
-- | Bottom-up transformation of a 'ValueSpecMismatch'
rewriteMismatch ::
(ValueSpecMismatch p -> ValueSpecMismatch p) ->
ValueSpecMismatch p -> ValueSpecMismatch p
rewriteMismatch f (ValueSpecMismatch p v prims) = f (ValueSpecMismatch p v (fmap aux1 prims))
where
aux1 (PrimMismatch spec prob) = PrimMismatch spec (aux2 prob)
aux2 (SubkeyProblem x y) = SubkeyProblem x (rewriteMismatch f y)
aux2 (ListElementProblem x y) = ListElementProblem x (rewriteMismatch f y)
aux2 (NestedProblem y) = NestedProblem (rewriteMismatch f y)
aux2 prob = prob
-- | Single-step rewrite that removes type-mismatch problems if there
-- are non-mismatches available to focus on.
removeTypeMismatch1 :: ValueSpecMismatch p -> ValueSpecMismatch p
removeTypeMismatch1 (ValueSpecMismatch p v xs)
| Just xs' <- NonEmpty.nonEmpty (NonEmpty.filter (not . isTypeMismatch) xs)
= ValueSpecMismatch p v xs'
removeTypeMismatch1 v = v
-- | Returns 'True' for schema mismatches where the value type doesn't
-- match.
isTypeMismatch :: PrimMismatch p -> Bool
isTypeMismatch (PrimMismatch _ prob) =
case prob of
WrongAtom -> True
TypeMismatch -> True
NestedProblem (ValueSpecMismatch _ _ xs) -> all isTypeMismatch xs
_ -> False
-- | Single-step rewrite that removes mismatches with only a single,
-- nested mismatch below them.
focusMismatch1 :: ValueSpecMismatch p -> ValueSpecMismatch p
focusMismatch1 x@(ValueSpecMismatch _ _ prims)
| PrimMismatch _ problem :| [] <- prims
, Just sub <- simplify1 problem = sub
| otherwise = x
where
simplify1 (SubkeyProblem _ p) = Just p
simplify1 (ListElementProblem _ p) = Just p
simplify1 (NestedProblem p) = Just p
simplify1 _ = Nothing
-- | Pretty-printer for 'ValueSpecMismatch' showing the position
-- and type of value that failed to match along with details about
-- each specification that it didn't match.
--
-- @since 1.2.0.0
prettyValueSpecMismatch :: ErrorAnnotation p => ValueSpecMismatch p -> Doc
prettyValueSpecMismatch (ValueSpecMismatch p v es) =
heading $+$ errors
where
heading = displayAnnotation p <> text (Text.unpack v)
errors = vcat (map prettyPrimMismatch (toList es))
-- | Pretty-printer for 'PrimMismatch' showing a summary of the primitive
-- specification that didn't match followed by a more detailed error when
-- appropriate.
--
-- @since 1.2.0.0
prettyPrimMismatch :: ErrorAnnotation p => PrimMismatch p -> Doc
prettyPrimMismatch (PrimMismatch spec problem) =
case prettyProblem problem of
(summary, detail) ->
(text "* expected" <+> text (Text.unpack spec) <+> summary) $+$ nest 4 detail
-- | Simplify a 'ValueSpecMismatch' by collapsing long nested error
-- cases and by assuming that if a type matched that the other mismatched
-- type alternatives are uninteresting. This is used in the implementation
-- of 'displayException'.
--
-- @since 1.2.1.0
simplifyValueSpecMismatch :: ValueSpecMismatch p -> ValueSpecMismatch p
simplifyValueSpecMismatch = rewriteMismatch (focusMismatch1 . removeTypeMismatch1)
-- | Pretty-printer for 'Problem' that generates a summary line
-- as well as a detailed description (depending on the error)
--
-- @since 1.2.0.0
prettyProblem ::
ErrorAnnotation p =>
Problem p ->
(Doc, Doc) {- ^ summary, detailed -}
prettyProblem p =
case p of
TypeMismatch ->
( text "- type mismatch"
, empty)
WrongAtom ->
( text "- wrong atom"
, empty)
MissingSection name ->
( text "- missing section:" <+> text (Text.unpack name)
, empty)
UnusedSections names ->
( text "- unexpected sections:" <+>
fsep (punctuate comma (map (text . Text.unpack) (toList names)))
, empty)
CustomProblem e ->
( text "-" <+> text (Text.unpack e)
, empty)
SubkeyProblem name e ->
( text "- problem in section:" <+> text (Text.unpack name)
, prettyValueSpecMismatch e)
NestedProblem e ->
( empty
, prettyValueSpecMismatch e)
ListElementProblem i e ->
( text "- problem in element:" <+> int i
, prettyValueSpecMismatch e)
-- | Class for rendering position annotations within the 'prettyValueSpecMismatch'
--
-- @since 1.2.0.0
class (Typeable a, Show a) => ErrorAnnotation a where
displayAnnotation :: a -> Doc
-- | Renders a 'Position' as @line:column:@
--
-- @since 1.2.0.0
instance ErrorAnnotation Position where
displayAnnotation pos = hcat [int (posLine pos), colon, int (posColumn pos), colon]
instance ErrorAnnotation FilePosition where
displayAnnotation (FilePosition path pos) = hcat [text path, colon, int (posLine pos), colon, int (posColumn pos), colon]
-- | Renders as an empty document
--
-- @since 1.2.0.0
instance ErrorAnnotation () where
displayAnnotation _ = empty
-- | 'displayException' implemented with 'prettyValueSpecMismatch'
-- and 'simplifyValueSpecMismatch'.
--
-- @since 1.2.0.0
instance ErrorAnnotation p => Exception (ValueSpecMismatch p) where
displayException = show . prettyValueSpecMismatch . simplifyValueSpecMismatch
|
glguy/config-schema
|
src/Config/Schema/Load/Error.hs
|
isc
| 9,060 | 0 | 17 | 2,130 | 1,802 | 969 | 833 | 142 | 8 |
{-# LANGUAGE OverloadedStrings #-}
module Hirc.Types where
import Control.Concurrent.Chan
import Control.Monad.State
import Data.Text (Text)
import System.IO
import qualified Database.SQLite.Simple as SQ
data Connections = Connections { connsServers :: [Connection] }
data Connection = Connection { connServer :: Server
, connHandle :: Handle
, connDb :: SQ.Connection
, connChannel :: Chan IRCRpl
, connIOChannel :: Chan Text }
data Config = Config { confDatabase :: String }
data Server = Server
{ servName :: Text
, servHost :: Text
, servPort :: Int
-- Channels with passwords needs to be put *before* channels without
-- passwords
, servChans :: [Channel]
, servLogFile :: FilePath
, servBotNick :: Text
, servBotName :: Text }
data Channel = Channel
{ chanName :: Text
, chanPrefix :: Text
, chanPassword :: Text
, chanPlugins :: [(Text, Plugin)]
, chanHooks :: [Hook] }
data Nick = CI Text
data IRC = Ping { pingServer :: Text }
| Privmsg { privNick :: Text
, privName :: Text
, privHost :: Text
, privDest :: Text
, privText :: Text }
| Join { joinNick :: Text
, joinName :: Text
, joinHost :: Text
, joinDest :: Text }
| Numeric { numNumber :: Text
, numText :: Maybe Text }
data IRCRpl = RplPing { rplPingServer :: Text }
| RplPrivmsg { rplPrivDest :: Text
, rplPrivText :: Text }
| RawCommand { rplRawCommand :: Text }
type StConn = StateT Connection IO
type Plugin = IRC -> StConn ()
type Hook = IRC -> StConn ()
|
xintron/Hirc
|
Hirc/Types.hs
|
mit
| 1,748 | 0 | 10 | 576 | 422 | 264 | 158 | 48 | 0 |
module Main (main) where
import Control.Concurrent
import qualified Control.Exception as E
import Data.Char
import Network
import System.IO
import GridC.Parser
import GridC.Codegen
port :: PortNumber
port = 30303
server :: String
server = "lzmhttpd/0.1"
-- IOError handler
errorHandler :: IOError -> IO String
errorHandler = const $ return ""
-- Try to read from a handle, return "" if that fails
dataOrEpsilon :: (Handle -> IO String) -> Handle -> IO String
dataOrEpsilon f h = E.catch (f h) errorHandler
-- Read a char from a handle
readChar :: Handle -> IO String
readChar h = do
ch <- hGetChar h
return [ch]
-- Read a line from a handle
readLine :: Handle -> IO String
readLine = dataOrEpsilon hGetLine
-- Keep reading lines until "\r" is found
readUntilCR :: Handle -> IO ()
readUntilCR h = do
line <- readLine h
case line of
"\r" -> return ()
"" -> return ()
_ -> readUntilCR h
-- Read and decode things like "GET / HTTP/1.0\r"
readMethod :: Handle -> IO (String, String)
readMethod h = do
line <- readLine h
let splitted = words line
case splitted of
[] -> return ("", "")
(_:[]) -> return ("", "")
_ -> return (head splitted, splitted!!1)
-- Decode things like "Content-Length: 1234"
parseKV :: String -> (String, String)
parseKV line
| null splitted = ("", "")
| otherwise = (key, value)
where
splitted = words line
key = map toLower $ head splitted
value = last splitted
-- Read lines until a "Content-Length" header is found, and then keep reading
-- lines until the end of the HTTP header.
-- Return 0 if a content-length header is not present.
readLength :: Handle -> IO Int
readLength h = do
line <- readLine h
let (key, value) = parseKV line
case key of
"" -> return 0
"content-length:" -> do
readUntilCR h
return (read value :: Int)
_ -> readLength h
-- Read the content part of the HTTP post
readContent :: Handle -> Int -> IO String
readContent h len
| len == 0 = return ""
| len > 32768 = return ""
| otherwise = do
str <- dataOrEpsilon readChar h
rest <- readContent h (len-1)
return $ str ++ rest
-- Write the HTTP response
writeResponse :: Handle -> String -> IO ()
writeResponse h content = do
hPutStr h "HTTP/1.1 200 OK\r\n"
hPutStr h "Content-Type: text/plain\r\n"
hPutStr h "Access-Control-Allow-Origin: *\r\n"
hPutStr h "Access-Control-Allow-Methods: GET, POST\r\n"
hPutStr h "Access-Control-Allow-Headers: Content-Length, Content-Type\r\n"
hPutStr h $ "Server: " ++ server ++ "\r\n"
hPutStr h $ "Content-Length: " ++ show (length content) ++ "\r\n"
hPutStr h "Connection: close\r\n"
hPutStr h "\r\n"
hPutStr h content
doGet :: String -> IO String
doGet _ = return ":)"
doPost :: String -> IO String
doPost contents = E.catch (E.evaluate $ compile contents) handler
where
handler :: E.SomeException -> IO String
handler = return . show
compile :: String -> String
compile input = eitherMap id id (eitherMap Left codegen parsed)
where
parsed = parseGC "<input>" input
eitherMap f _ (Left l) = f l
eitherMap _ g (Right r) = g r
-- Process the HTTP request
processRequest :: String -> String -> String -> IO String
processRequest "GET" path _ = doGet path
processRequest "POST" _ content = doPost content
processRequest _ _ _ = return ""
-- Process a HTTP connection
processConnection :: Handle -> IO ()
processConnection h = do
(method, path) <- readMethod h
len <- readLength h
content <- readContent h len
putStrLn $ "method: " ++ show method
putStrLn $ "len: " ++ show len
putStrLn $ "content: " ++ show content
putStrLn ""
response <- processRequest method path content
putStrLn $ "response: " ++ show response
putStrLn ""
writeResponse h response
hFlush h
hClose h
-- Accept/fork loop
acceptLoop :: Socket -> IO ()
acceptLoop sock = do
(handle, _, _) <- accept sock
_ <- ($) forkOS $ processConnection handle
acceptLoop sock
main :: IO ()
main = do
putStr $ "listening on port " ++ show port ++ "\n"
sock <- listenOn $ PortNumber port
acceptLoop sock
|
lessandro/gridc
|
src/Httpd.hs
|
mit
| 4,283 | 0 | 14 | 1,083 | 1,351 | 649 | 702 | 114 | 3 |
-- | Example for test purposes - enhancing code copied from http://www.haskell.org/haskellwiki/OpenGLTutorial1.
-- For all rights see there - this code adds a menu.
module Menu
(
main
)
where
import qualified Graphics.UI.GLUT.Menu as Menu
import qualified Graphics.UI.GLUT.Begin as Begin
test = do
print "test"
main = (Menu.Menu [Menu.MenuEntry "Console output (test)" test,
Menu.MenuEntry "Quit" Begin.leaveMainLoop])
|
tnrangwi/grill
|
test/experimental/opengl/Menu.hs
|
mit
| 445 | 0 | 9 | 82 | 77 | 47 | 30 | 9 | 1 |
module State (module X) where
import State.Types as X
import State.Operations as X
import State.XML as X
|
mplamann/magic-spieler
|
src/State.hs
|
mit
| 107 | 0 | 4 | 19 | 31 | 22 | 9 | 4 | 0 |
-- |
--
-- Module : Main
-- License : MIT
--
-- Usage: DataMiningWeka [-h] exec-conf param-conf data-conf
--
-- exec-conf | applications YAML configuration
--
-- param-conf | applications parameters YAML configuration
--
-- data-conf | data files YAML configuration
module Main ( main ) where
import Exec
import ExecConfig
import Data.Maybe (fromMaybe)
import System.Environment
import System.Exit
-----------------------------------------------------------------------------
main = getArgs >>= parse
parse ["-h"] = usage >> exitSuccess
parse [execf, paramf, dataf] = do
let err x = error $ "Couldn't read " ++ x ++ " configuration"
let get s = fmap (fromMaybe (err s))
execConfs <- get "exec" $ readExecConfigs execf
paramConfs <- get "param" $ readExecParams paramf
dataConf <- get "data" $ readDataConfig dataf
execConfigs execConfs paramConfs dataConf
parse _ = unknownCmd >> usage >> exitFailure
unknownCmd = putStrLn "unknown command"
usage = do putStrLn "Usage: DataMiningWeka [-h] exec-conf param-conf data-conf\n"
putStrLn " exec-conf | applications YAML configuration"
putStrLn " param-conf | applications parameters YAML configuration"
putStrLn " data-conf | data files YAML configuration"
|
fehu/min-dat--data-mining-arff
|
src/Main.hs
|
mit
| 1,333 | 0 | 14 | 302 | 254 | 129 | 125 | 21 | 1 |
-- Find Numbers with Same Amount of Divisors
-- https://www.codewars.com/kata/55f1614853ddee8bd4000014
module Codewars.G964.Samenbdivisors(countPairsInt) where
countPairsInt :: Int -> Int -> Int
countPairsInt diff nMax = sum . zipWith (\x y -> if x == y then 1 else 0) (take (nMax - diff) list) . drop diff $ list
list = map sigma_0 [1..]
where sigma_0 n = (+ (if (== n) . (^2) . floor . sqrt . fromIntegral $ n then -1 else 0)) . (*2) . halfNumD $ n
halfNumD n = length $ 1 : filter ((==0) . rem n) [2 .. floor . sqrt . fromIntegral $ n]
|
gafiatulin/codewars
|
src/6 kyu/Samenbdivisors.hs
|
mit
| 556 | 0 | 18 | 119 | 231 | 128 | 103 | 6 | 2 |
import Complexity
main = do
-- (inputSize, nOperations):
-- case nOp ~= 2.5 inputSize => O(1)
let pO1_1 = (10000, round (10000 * 2.5))
-- case nOp ~= 0.9 inputSize => O(1)
let pO1_2 = (20000, round (20000 * 0.99))
let pO1_3 = (20000, round (20000 * 1.0))
putStrLn "Testing complexity inference, comparing N: inputSize to Q: operation amount."
putStrLn $ "\nO(1) test with Q ~= 2.5N : " ++ show pO1_1
putStrLn $ showComplexity pO1_1
putStrLn $ "\nO(1) test with Q ~= 0.99N : " ++ show pO1_2
putStrLn $ showComplexity pO1_2
putStrLn $ "\nO(1) test with Q ~= 1N : " ++ show pO1_3
putStrLn $ showComplexity pO1_3
let pOln_1 = (10000, round (10000 * log 15000))
let pOln_2 = (10000, round (10000 * log 8000 ))
let pOln_3 = (10000, round (10000 * log 10000))
putStrLn $ "\nO(ln) test with Q ~= Log(1.5N) * N : " ++ show pOln_1
putStrLn $ showComplexity pOln_1
putStrLn $ "\nO(ln) test with Q ~= Log(0.8N) * N : " ++ show pOln_2
putStrLn $ showComplexity pOln_2
putStrLn $ "\nO(ln) test with Q ~= Log(N) * N : " ++ show pOln_3
putStrLn $ showComplexity pOln_3
let pOn_1 = (10000, round (10000 * 15500))
let pOn_2 = (10000, round (10000 * 8000))
let pOn_3 = (10000, round (10000 ** 2))
putStrLn $ "\nO(n) test with Q ~= 1.5N * N : " ++ show pOn_1
putStrLn $ showComplexity pOn_1
putStrLn $ "\nO(n) test with Q ~= 0.8N * N : " ++ show pOn_2
putStrLn $ showComplexity pOn_2
putStrLn $ "\nO(n) test with Q ~= N * N : " ++ show pOn_3
putStrLn $ showComplexity pOn_3
let i1 = 1350
let pOnLogn_1 = (1000, round (1000 * i1 * log i1))
let i2 = 800
let pOnLogn_2 = (1000, round (1000 * i2 * log i2))
let pOnLogn_3 = (1000, round (1000 ** 2 * log 1000))
putStrLn $ "\nO(nLog(n)) test with Q ~= M=(1.3N) MLog(M) * N : " ++ show pOnLogn_1
putStrLn $ showComplexity pOnLogn_1
putStrLn $ "\nO(nLog(n)) test with Q ~= M=(0.8N) MLog(M) * N : " ++ show pOnLogn_2
putStrLn $ showComplexity pOnLogn_2
putStrLn $ "\nO(nLog(n)) test with Q ~= NLog(N) * N : " ++ show pOnLogn_3
putStrLn $ showComplexity pOnLogn_3
let pOnn_1 = (1000, round (1000 * 1350 ** 2))
let pOnn_2 = (1000, round (1000 * 800 ** 2))
let pOnn_3 = (1000, round (1000 ** 3))
putStrLn $ "\nO(n^2) test with Q ~= (1.3N)^2 * N : " ++ show pOnn_1
putStrLn $ showComplexity pOnn_1
putStrLn $ "\nO(n^2) test with Q ~= (0.8N)^2 * N : " ++ show pOnn_2
putStrLn $ showComplexity pOnn_2
putStrLn $ "\nO(n^2) test with Q ~= N^2 * N : " ++ show pOnn_3
putStrLn $ showComplexity pOnn_3
let pO2Pn_1 = (50, round (50 * 2 ** 67))
let pO2Pn_2 = (50, round (50 * 2 ** 40))
let pO2Pn_3 = (50, round (50 * 2 ** 50))
putStrLn $ "\nO(2^n) test with Q ~= 2^(1.3N) * N : " ++ show pO2Pn_1
putStrLn $ showComplexity pO2Pn_1
putStrLn $ "\nO(2^n) test with Q ~= 2^(0.8N) * N : " ++ show pO2Pn_2
putStrLn $ showComplexity pO2Pn_2
putStrLn $ "\nO(2^n) test with Q ~= 2^N * N : " ++ show pO2Pn_3
putStrLn $ showComplexity pO2Pn_3
let pOfn_1 = (20, round (20 * fac 16))
let pOfn_2 = (20, round (20 * fac 24))
let pOfn_3 = (20, round (20 * fac 20))
putStrLn $ "\nO(n!) test with Q ~= (N*4/5)! * N : " ++ show pOfn_1
putStrLn $ showComplexity pOfn_1
putStrLn $ "\nO(n!) test with Q ~= (N*6/5)! * N : " ++ show pOfn_2
putStrLn $ showComplexity pOfn_2
putStrLn $ "\nO(n!) test with Q ~= N! * N : " ++ show pOfn_3
putStrLn $ showComplexity pOfn_3
|
range12/there-is-no-B-side
|
tests/ComplexityTest.hs
|
mit
| 3,548 | 0 | 14 | 930 | 1,124 | 526 | 598 | 68 | 1 |
module PutJSON where
import Data.List (intercalate)
import SimpleJSON
renderJValue :: JValue -> String
renderJValue (JString s) = show s
renderJValue (JNumber n) = show n
renderJValue (JBool True) = "true"
renderJValue (JBool False) = "false"
renderJValue JNull = "null"
renderJValue (JObject o) = "{" ++ pairs o ++ "}"
where pairs [] = ""
pairs ps = intercalate ", " (map renderPair ps)
renderPair (k,v) = show k ++ ": " ++ renderJValue v
renderJValue (JArray a) = "[" ++ values a ++ "]"
where values [] = ""
values vs = intercalate ", " (map renderJValue vs)
putJValue :: JValue -> IO ()
putJValue v = PutStrLn (renderJValue v)
|
zhangjiji/real-world-haskell
|
ch5/PutJSON.hs
|
mit
| 663 | 0 | 9 | 143 | 271 | 135 | 136 | 18 | 3 |
module Euler31 where
-- Use a dynamic programming solution to find the number of ways
-- to assemble an amount from smaller amounts
-- Threading the DP table through the computation as a State monad
import Control.Monad.State
import qualified Data.Map as M (empty, Map, insert, lookup)
runWays :: Int -> Int
runWays n = evalState (ways denoms n) M.empty
denoms :: [Int]
denoms = [1, 2, 5, 10, 20, 50, 100, 200]
ways :: [Int] -> Int -> (State (M.Map (Int, [Int]) Int) Int)
ways _ 0 = return 1
ways [] _ = return 0
ways (d:ds) n = do
r1 <- lookupOrCompute n ds
r2 <- lookupOrCompute (n-d) (d:ds)
let v = r1 + r2
modify $ (M.insert (n, d:ds) v)
return v
lookupOrCompute :: Int -> [Int] -> (State (M.Map (Int, [Int]) Int) Int)
lookupOrCompute n ds = get >>= maybe (ways ds n) return . M.lookup (n,ds)
|
nlim/haskell-playground
|
src/Euler31.hs
|
mit
| 814 | 0 | 12 | 168 | 361 | 196 | 165 | 18 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | Provide ability to upload tarballs to Hackage.
module Stackage.Upload
( -- * Upload
mkUploader
, Uploader
, upload
, UploadSettings
, defaultUploadSettings
, setUploadUrl
, setGetManager
, setCredsSource
, setSaveCreds
-- * Credentials
, HackageCreds
, loadCreds
, saveCreds
, FromFile
-- ** Credentials source
, HackageCredsSource
, fromAnywhere
, fromPrompt
, fromFile
, fromMemory
) where
import Control.Applicative ((<$>), (<*>))
import Control.Exception (bracket)
import qualified Control.Exception as E
import Control.Monad (when)
import Data.Aeson (FromJSON (..),
ToJSON (..),
eitherDecode', encode,
object, withObject,
(.:), (.=))
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy as L
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import qualified Data.Text.IO as TIO
import Data.Typeable (Typeable)
import Network.HTTP.Client (BodyReader, Manager,
Response,
applyBasicAuth, brRead,
#if MIN_VERSION_http_client(0,5,0)
checkResponse,
#else
checkStatus,
#endif
newManager,
#if MIN_VERSION_http_client(0,4,30)
parseUrlThrow,
#else
parseUrl,
#endif
requestHeaders,
responseBody,
responseStatus,
withResponse)
import Network.HTTP.Client.MultipartFormData (formDataBody, partFile)
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Network.HTTP.Types (statusCode)
import System.Directory (createDirectoryIfMissing,
doesDirectoryExist,
doesFileExist,
getAppUserDataDirectory,
getDirectoryContents, removeDirectoryRecursive,
removeFile)
import System.Exit (ExitCode (ExitSuccess))
import System.FilePath (takeExtension, (</>))
import System.IO (hClose, hFlush,
hGetEcho, hSetEcho,
stdin, stdout)
import System.IO.Temp (withSystemTempDirectory)
import System.Process (StdStream (CreatePipe),
createProcess, cwd,
proc, std_in,
waitForProcess)
-- | Username and password to log into Hackage.
--
-- Since 0.1.0.0
data HackageCreds = HackageCreds
{ hcUsername :: !Text
, hcPassword :: !Text
}
deriving Show
instance ToJSON HackageCreds where
toJSON (HackageCreds u p) = object
[ "username" .= u
, "password" .= p
]
instance FromJSON HackageCreds where
parseJSON = withObject "HackageCreds" $ \o -> HackageCreds
<$> o .: "username"
<*> o .: "password"
-- | A source for getting Hackage credentials.
--
-- Since 0.1.0.0
newtype HackageCredsSource = HackageCredsSource
{ getCreds :: IO (HackageCreds, FromFile)
}
-- | Whether the Hackage credentials were loaded from a file.
--
-- This information is useful since, typically, you only want to save the
-- credentials to a file if it wasn't already loaded from there.
--
-- Since 0.1.0.0
type FromFile = Bool
-- | Load Hackage credentials from the given source.
--
-- Since 0.1.0.0
loadCreds :: HackageCredsSource -> IO (HackageCreds, FromFile)
loadCreds = getCreds
-- | Save the given credentials to the credentials file.
--
-- Since 0.1.0.0
saveCreds :: HackageCreds -> IO ()
saveCreds creds = do
fp <- credsFile
L.writeFile fp $ encode creds
-- | Load the Hackage credentials from the prompt, asking the user to type them
-- in.
--
-- Since 0.1.0.0
fromPrompt :: HackageCredsSource
fromPrompt = HackageCredsSource $ do
putStr "Hackage username: "
hFlush stdout
username <- TIO.getLine
password <- promptPassword
return (HackageCreds
{ hcUsername = username
, hcPassword = password
}, False)
credsFile :: IO FilePath
credsFile = do
olddir <- getAppUserDataDirectory "stackage-upload"
exists <- doesDirectoryExist olddir
when exists $ do
putStrLn $ "Removing old config directory: " ++ olddir
removeDirectoryRecursive olddir
dir <- fmap (</> "upload") $ getAppUserDataDirectory "stackage"
createDirectoryIfMissing True dir
return $ dir </> "credentials.json"
-- | Load the Hackage credentials from the JSON config file.
--
-- Since 0.1.0.0
fromFile :: HackageCredsSource
fromFile = HackageCredsSource $ do
fp <- credsFile
lbs <- L.readFile fp
case eitherDecode' lbs of
Left e -> E.throwIO $ Couldn'tParseJSON fp e
Right creds -> return (creds, True)
-- | Load the Hackage credentials from the given arguments.
--
-- Since 0.1.0.0
fromMemory :: Text -> Text -> HackageCredsSource
fromMemory u p = HackageCredsSource $ return (HackageCreds
{ hcUsername = u
, hcPassword = p
}, False)
data HackageCredsExceptions = Couldn'tParseJSON FilePath String
deriving (Show, Typeable)
instance E.Exception HackageCredsExceptions
-- | Try to load the credentials from the config file. If that fails, ask the
-- user to enter them.
--
-- Since 0.1.0.0
fromAnywhere = HackageCredsSource $
getCreds fromFile `E.catches`
[ E.Handler $ \(_ :: E.IOException) -> getCreds fromPrompt
, E.Handler $ \(_ :: HackageCredsExceptions) -> getCreds fromPrompt
]
-- | Lifted from cabal-install, Distribution.Client.Upload
promptPassword :: IO Text
promptPassword = do
putStr "Hackage password: "
hFlush stdout
-- save/restore the terminal echoing status
passwd <- bracket (hGetEcho stdin) (hSetEcho stdin) $ \_ -> do
hSetEcho stdin False -- no echoing for entering the password
fmap T.pack getLine
putStrLn ""
return passwd
-- | Turn the given settings into an @Uploader@.
--
-- Since 0.1.0.0
mkUploader :: UploadSettings -> IO Uploader
mkUploader us = do
manager <- usGetManager us
(creds, fromFile) <- loadCreds $ usCredsSource us
when (not fromFile && usSaveCreds us) $ saveCreds creds
#if MIN_VERSION_http_client(0,4,30)
req0 <- parseUrlThrow $ usUploadUrl us
#else
req0 <- parseUrl $ usUploadUrl us
#endif
let req1 = req0
{ requestHeaders = [("Accept", "text/plain")]
#if MIN_VERSION_http_client(0,5,0)
, checkResponse = \_ _ -> return ()
#else
, checkStatus = \_ _ _ -> Nothing
#endif
}
return Uploader
{ upload_ = \fp0 -> withTarball fp0 $ \fp -> do
let formData = [partFile "package" fp]
req2 <- formDataBody formData req1
let req3 = applyBasicAuth
(encodeUtf8 $ hcUsername creds)
(encodeUtf8 $ hcPassword creds)
req2
putStr $ "Uploading " ++ fp ++ "... "
hFlush stdout
withResponse req3 manager $ \res ->
case statusCode $ responseStatus res of
200 -> putStrLn "done!"
401 -> do
putStrLn "authentication failure"
cfp <- credsFile
handleIO (const $ return ()) (removeFile cfp)
error $ "Authentication failure uploading to server"
403 -> do
putStrLn "forbidden upload"
putStrLn "Usually means: you've already uploaded this package/version combination"
putStrLn "Ignoring error and continuing, full message from Hackage below:\n"
printBody res
503 -> do
putStrLn "service unavailable"
putStrLn "This error some times gets sent even though the upload succeeded"
putStrLn "Check on Hackage to see if your pacakge is present"
printBody res
code -> do
putStrLn $ "unhandled status code: " ++ show code
printBody res
error $ "Upload failed on " ++ fp
}
-- | Given either a file, return it. Given a directory, run @cabal sdist@ and
-- get the resulting tarball.
withTarball :: FilePath -> (FilePath -> IO a) -> IO a
withTarball fp0 inner = do
isFile <- doesFileExist fp0
if isFile then inner fp0 else withSystemTempDirectory "stackage-upload-tarball" $ \dir -> do
isDir <- doesDirectoryExist fp0
when (not isDir) $ error $ "Invalid argument: " ++ fp0
(Just h, Nothing, Nothing, ph) <-
createProcess $ (proc "cabal" ["sdist", "--builddir=" ++ dir])
{ cwd = Just fp0
, std_in = CreatePipe
}
hClose h
ec <- waitForProcess ph
when (ec /= ExitSuccess) $
error $ "Could not create tarball for " ++ fp0
contents <- getDirectoryContents dir
case filter ((== ".gz") . takeExtension) contents of
[x] -> inner (dir </> x)
_ -> error $ "Unexpected directory contents after cabal sdist: " ++ show contents
printBody :: Response BodyReader -> IO ()
printBody res =
loop
where
loop = do
bs <- brRead $ responseBody res
when (not $ S.null bs) $ do
S.hPut stdout bs
loop
-- | The computed value from a @UploadSettings@.
--
-- Typically, you want to use this with 'upload'.
--
-- Since 0.1.0.0
data Uploader = Uploader
{ upload_ :: !(FilePath -> IO ())
}
-- | Upload a single tarball with the given @Uploader@.
--
-- Since 0.1.0.0
upload :: Uploader -> FilePath -> IO ()
upload = upload_
-- | Settings for creating an @Uploader@.
--
-- Since 0.1.0.0
data UploadSettings = UploadSettings
{ usUploadUrl :: !String
, usGetManager :: !(IO Manager)
, usCredsSource :: !HackageCredsSource
, usSaveCreds :: !Bool
}
-- | Default value for @UploadSettings@.
--
-- Use setter functions to change defaults.
--
-- Since 0.1.0.0
defaultUploadSettings :: UploadSettings
defaultUploadSettings = UploadSettings
{ usUploadUrl = "https://hackage.haskell.org/packages/"
, usGetManager = newManager tlsManagerSettings
, usCredsSource = fromAnywhere
, usSaveCreds = True
}
-- | Change the upload URL.
--
-- Default: "https://hackage.haskell.org/packages/"
--
-- Since 0.1.0.0
setUploadUrl :: String -> UploadSettings -> UploadSettings
setUploadUrl x us = us { usUploadUrl = x }
-- | How to get an HTTP connection manager.
--
-- Default: @newManager tlsManagerSettings@
--
-- Since 0.1.0.0
setGetManager :: IO Manager -> UploadSettings -> UploadSettings
setGetManager x us = us { usGetManager = x }
-- | How to get the Hackage credentials.
--
-- Default: @fromAnywhere@
--
-- Since 0.1.0.0
setCredsSource :: HackageCredsSource -> UploadSettings -> UploadSettings
setCredsSource x us = us { usCredsSource = x }
-- | Save new credentials to the config file.
--
-- Default: @True@
--
-- Since 0.1.0.0
setSaveCreds :: Bool -> UploadSettings -> UploadSettings
setSaveCreds x us = us { usSaveCreds = x }
handleIO :: (E.IOException -> IO a) -> IO a -> IO a
handleIO = E.handle
|
fpco/stackage-upload
|
Stackage/Upload.hs
|
mit
| 12,964 | 0 | 26 | 4,809 | 2,300 | 1,255 | 1,045 | 244 | 5 |
{-|
A WAI adapter to the HTML5 Server-Sent Events API.
If running through a proxy like Nginx you might need to add the
headers:
> [ ("X-Accel-Buffering", "no"), ("Cache-Control", "no-cache")]
-}
module Network.Wai.EventSource (
ServerEvent(..),
eventSourceAppChan,
eventSourceAppIO
) where
import Data.Function (fix)
import Control.Concurrent.Chan (Chan, dupChan, readChan)
import Control.Monad.IO.Class (liftIO)
import Network.HTTP.Types (status200, hContentType)
import Network.Wai (Application, responseStream)
import Network.Wai.EventSource.EventStream
-- | Make a new WAI EventSource application reading events from
-- the given channel.
eventSourceAppChan :: Chan ServerEvent -> Application
eventSourceAppChan chan req sendResponse = do
chan' <- liftIO $ dupChan chan
eventSourceAppIO (readChan chan') req sendResponse
-- | Make a new WAI EventSource application reading events from
-- the given IO action.
eventSourceAppIO :: IO ServerEvent -> Application
eventSourceAppIO src _ sendResponse =
sendResponse $ responseStream
status200
[(hContentType, "text/event-stream")]
$ \sendChunk flush -> fix $ \loop -> do
se <- src
case eventToBuilder se of
Nothing -> return ()
Just b -> sendChunk b >> flush >> loop
|
creichert/wai
|
wai-extra/Network/Wai/EventSource.hs
|
mit
| 1,396 | 0 | 16 | 341 | 264 | 146 | 118 | 24 | 2 |
{-# LANGUAGE DeriveDataTypeable, CPP, ScopedTypeVariables #-}
module Jakway.Blackjack.IO.DatabaseCommon
(
createTables,
cardIdMap,
cardPermutations,
cardSqlArr,
cardsSqlValues,
cardToForeignKeyId,
idCardMap,
singleCardToSqlValues,
dropTables,
dropAllTables
)
where
import Jakway.Blackjack.Visibility
import Jakway.Blackjack.Cards
import Jakway.Blackjack.CardOps
import Database.HDBC
import qualified Data.Map.Strict as HashMap
import Control.Exception
import Data.Typeable
import Jakway.Blackjack.Util
import Jakway.Blackjack.IO.TableNames
import Jakway.Blackjack.IO.DatabaseConnection
import qualified Jakway.Blackjack.IO.RDBMS.Postgres as Postgres
import qualified Jakway.Blackjack.IO.RDBMS.SQLite as SQLite
import Control.Monad (join, liftM)
import Data.List (delete)
import Control.Applicative
#ifdef BUILD_POSTGRESQL
createTables :: IConnection a => a -> TableNames -> IO ()
createTables = Postgres.createTables
#else
createTables :: IConnection a => a -> TableNames -> IO ()
createTables = SQLite.createTables
#endif
dropAllTables :: IConnection a => a -> IO()
dropAllTables conn = withTransaction conn $ \t_conn -> getDropStatement t_conn >>= (\dropStatement -> case dropStatement of (Just ds) -> execute ds []
Nothing -> return 0) >> return ()
--PostgreSQL never allows parameterized substitution for table
--names so we have to do it manually with Jakway.Blackjack.Util.ssub
where
getDropStatement :: (IConnection a) => a -> IO (Maybe Statement)
getDropStatement p_conn = getTables p_conn >>= (\strs -> if ((strs == []) || (not $ "cards" `elem` strs)) then return Nothing else liftM Just $ prepare conn (dropString strs))
--remove the last comma
tablesList strings = reverse . (delete ',') . reverse . join . (map (\s -> s ++ ",")) $ strings
dropString strs = "DROP TABLE IF EXISTS " ++ (tablesList strs) ++ " " ++ cascadeStr
--see http://stackoverflow.com/questions/10050988/haskell-removes-all-occurrences-of-a-given-value-from-within-a-list-of-lists
cascadeStr =
--cascade so we don't cause errors with foreign keys
#ifdef BUILD_POSTGRESQL
" CASCADE;"
#else
""
#endif
dropTables :: IConnection a => a -> TableNames -> IO ()
dropTables conn tableNames =
mapM_ (flipInner2 run conn []) dropTableStatements >> commit conn
--CASCADE is postgres-specific
#ifdef BUILD_POSTGRESQL
where dropTableStatements = [ "BEGIN TRANSACTION",
"DROP TABLE IF EXISTS cards CASCADE",
"DROP TABLE IF EXISTS " ++ (getPlayerTableName tableNames) ++ " CASCADE",
"DROP TABLE IF EXISTS " ++ (getHandTableName tableNames) ++ " CASCADE",
"DROP TABLE IF EXISTS " ++ (getMatchTableName tableNames) ++ " CASCADE",
"COMMIT TRANSACTION" ]
#else
where dropTableStatements = [ "DROP TABLE IF EXISTS cards",
"DROP TABLE IF EXISTS " ++ (getPlayerTableName tableNames),
"DROP TABLE IF EXISTS " ++ (getHandTableName tableNames),
"DROP TABLE IF EXISTS " ++ (getMatchTableName tableNames) ]
#endif
-- |reverse operation of cardToForeignId
foreignKeyIdToCard :: Int -> Maybe (Visibility Card)
foreignKeyIdToCard pId = HashMap.lookup pId idCardMap
-- | XXX: for some reason this function wouldn't work in a where binding?
cardSqlArr :: Suit -> CardValue -> [SqlValue]
cardSqlArr s v = [toSql . fromEnum $ v, toSql . fromEnum $ s]
singleCardToSqlValues :: Visibility Card -> [SqlValue]
singleCardToSqlValues (Shown (Card suit val)) = (cardSqlArr suit val) ++ [iToSql 0]
singleCardToSqlValues (Hidden (Card suit val)) = (cardSqlArr suit val) ++ [iToSql 1]
cardPermutations :: [Visibility Card]
cardPermutations = (Shown <$> newDeck) ++ (Hidden <$> newDeck)
cardIdMap :: HashMap.Map (Visibility Card) Int
cardIdMap = HashMap.fromList $ zip cardPermutations ids
where ids = [1..(length cardPermutations)]
-- |TODO: reduce duplication
idCardMap :: HashMap.Map Int (Visibility Card)
idCardMap = HashMap.fromList $ zip ids cardPermutations
where ids = [1..(length cardPermutations)]
cardToForeignKeyId :: Visibility Card -> Maybe Int
cardToForeignKeyId card = HashMap.lookup card cardIdMap
cardsSqlValues :: [[SqlValue]]
cardsSqlValues = map (\ (cIds, cardSqlValues) -> (toSql cIds) : cardSqlValues) (zip pIds cardsWithoutIds)
where cardsWithoutIds = singleCardToSqlValues <$> cardPermutations
-- |SQL ids count up from 1
pIds = [1..(length cardsWithoutIds)]
--An exception is appropriate for certain cases when reading from the
--database
--Foreign key constraints mean errors *should* never happen, however most
--functions still return Maybe. If handling the maybe makes an (IO) interface
--unnecessarily complicated it's better to use an exception
data BlackjackDatabaseException = HandReadException
deriving (Show, Typeable)
instance Exception BlackjackDatabaseException
|
tjakway/blackjack-simulator
|
src/Jakway/Blackjack/IO/DatabaseCommon.hs
|
mit
| 5,451 | 0 | 15 | 1,411 | 1,074 | 593 | 481 | 70 | 3 |
module Indexer
( indexFeatures
, deleteFeatures
) where
import Config.Config (ElasticSearchConfig, GitConfig)
import Control.Exception (IOException, bracket, handle)
import Control.Monad.Except (runExceptT)
import Data.Text (pack)
import qualified Features.Feature as F
import qualified Features.SearchableFeature as SF
import Products.Product (ProductID)
import Products.ProductRepo (codeRepositoryDir)
import System.Directory (doesFileExist)
import System.IO (IOMode (ReadMode), openFile, hClose, hGetContents)
-- create an abstraction here
-- perhapes ElasticSearchConfig -> IO () = WithElasticSearch
-- maybe even GitConfig -> ElasticSearchConfig -> IO () = WithGitSearch
-- FIX: we're not actually taking advantage of the bulk index
-- we are recurring over the list and 'bulk' indexing a single
-- file each time
indexFeatures :: [F.FeatureFile] -> ProductID -> GitConfig -> ElasticSearchConfig -> IO ()
indexFeatures [] _ _ _ = putStrLn "Finished indexing!"
indexFeatures ((F.FeatureFile f):fs) prodID gitConfig esConfig =
indexFeature f prodID gitConfig esConfig
>> indexFeatures fs prodID gitConfig esConfig
-- FIX: we're not actually taking advantage of the bulk index
-- we are recurring over the list and 'bulk' indexing a single
-- file each time
deleteFeatures :: [F.FeatureFile] -> ElasticSearchConfig -> IO ()
deleteFeatures [] _ = putStrLn "Finished deleting!"
deleteFeatures ((F.FeatureFile f):fs) esConfig =
deleteFeature f esConfig
>> deleteFeatures fs esConfig
indexFeature :: FilePath -> ProductID -> GitConfig -> ElasticSearchConfig -> IO ()
indexFeature filePath prodID gitConfig esConfig =
let featureFileBasePath = codeRepositoryDir prodID gitConfig
fullFilePath = featureFileBasePath ++ filePath
in (doesFileExist fullFilePath) >>= \exists ->
case exists of
False -> putStrLn $ "File does not exist: " ++ fullFilePath
True ->
handle handleIOException $
bracket (openFile fullFilePath ReadMode) hClose $ \h ->
hGetContents h >>= \fileContents ->
let searchableFeature = SF.SearchableFeature (pack filePath) (pack fileContents) prodID
in (runExceptT $ SF.indexFeatures [searchableFeature] esConfig) >>= \result ->
case result of
(Left err) -> putStrLn ("Error indexing feature: " ++ err) >> putStrLn (show searchableFeature)
(Right _) -> putStrLn ("Successfully indexed: " ++ filePath)
deleteFeature :: FilePath -> ElasticSearchConfig -> IO ()
deleteFeature filePath esConfig =
(runExceptT $ SF.deleteFeatures [(pack filePath)] esConfig) >>= \result ->
case result of
(Left err) -> putStrLn ("Error deleting index: " ++ err) >> putStrLn filePath
(Right _) -> putStrLn ("Successfully deleted: " ++ filePath)
handleIOException :: IOException -> IO ()
handleIOException ex = putStrLn $ "IOExcpetion: " ++ (show ex)
|
gust/feature-creature
|
legacy/lib/Indexer.hs
|
mit
| 2,951 | 0 | 26 | 580 | 733 | 388 | 345 | 47 | 3 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGPathSegArcRel
(js_setX, setX, js_getX, getX, js_setY, setY, js_getY, getY,
js_setR1, setR1, js_getR1, getR1, js_setR2, setR2, js_getR2, getR2,
js_setAngle, setAngle, js_getAngle, getAngle, js_setLargeArcFlag,
setLargeArcFlag, js_getLargeArcFlag, getLargeArcFlag,
js_setSweepFlag, setSweepFlag, js_getSweepFlag, getSweepFlag,
SVGPathSegArcRel, castToSVGPathSegArcRel, gTypeSVGPathSegArcRel)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"x\"] = $2;" js_setX ::
SVGPathSegArcRel -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.x Mozilla SVGPathSegArcRel.x documentation>
setX :: (MonadIO m) => SVGPathSegArcRel -> Float -> m ()
setX self val = liftIO (js_setX (self) val)
foreign import javascript unsafe "$1[\"x\"]" js_getX ::
SVGPathSegArcRel -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.x Mozilla SVGPathSegArcRel.x documentation>
getX :: (MonadIO m) => SVGPathSegArcRel -> m Float
getX self = liftIO (js_getX (self))
foreign import javascript unsafe "$1[\"y\"] = $2;" js_setY ::
SVGPathSegArcRel -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.y Mozilla SVGPathSegArcRel.y documentation>
setY :: (MonadIO m) => SVGPathSegArcRel -> Float -> m ()
setY self val = liftIO (js_setY (self) val)
foreign import javascript unsafe "$1[\"y\"]" js_getY ::
SVGPathSegArcRel -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.y Mozilla SVGPathSegArcRel.y documentation>
getY :: (MonadIO m) => SVGPathSegArcRel -> m Float
getY self = liftIO (js_getY (self))
foreign import javascript unsafe "$1[\"r1\"] = $2;" js_setR1 ::
SVGPathSegArcRel -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.r1 Mozilla SVGPathSegArcRel.r1 documentation>
setR1 :: (MonadIO m) => SVGPathSegArcRel -> Float -> m ()
setR1 self val = liftIO (js_setR1 (self) val)
foreign import javascript unsafe "$1[\"r1\"]" js_getR1 ::
SVGPathSegArcRel -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.r1 Mozilla SVGPathSegArcRel.r1 documentation>
getR1 :: (MonadIO m) => SVGPathSegArcRel -> m Float
getR1 self = liftIO (js_getR1 (self))
foreign import javascript unsafe "$1[\"r2\"] = $2;" js_setR2 ::
SVGPathSegArcRel -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.r2 Mozilla SVGPathSegArcRel.r2 documentation>
setR2 :: (MonadIO m) => SVGPathSegArcRel -> Float -> m ()
setR2 self val = liftIO (js_setR2 (self) val)
foreign import javascript unsafe "$1[\"r2\"]" js_getR2 ::
SVGPathSegArcRel -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.r2 Mozilla SVGPathSegArcRel.r2 documentation>
getR2 :: (MonadIO m) => SVGPathSegArcRel -> m Float
getR2 self = liftIO (js_getR2 (self))
foreign import javascript unsafe "$1[\"angle\"] = $2;" js_setAngle
:: SVGPathSegArcRel -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.angle Mozilla SVGPathSegArcRel.angle documentation>
setAngle :: (MonadIO m) => SVGPathSegArcRel -> Float -> m ()
setAngle self val = liftIO (js_setAngle (self) val)
foreign import javascript unsafe "$1[\"angle\"]" js_getAngle ::
SVGPathSegArcRel -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.angle Mozilla SVGPathSegArcRel.angle documentation>
getAngle :: (MonadIO m) => SVGPathSegArcRel -> m Float
getAngle self = liftIO (js_getAngle (self))
foreign import javascript unsafe "$1[\"largeArcFlag\"] = $2;"
js_setLargeArcFlag :: SVGPathSegArcRel -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.largeArcFlag Mozilla SVGPathSegArcRel.largeArcFlag documentation>
setLargeArcFlag :: (MonadIO m) => SVGPathSegArcRel -> Bool -> m ()
setLargeArcFlag self val = liftIO (js_setLargeArcFlag (self) val)
foreign import javascript unsafe "($1[\"largeArcFlag\"] ? 1 : 0)"
js_getLargeArcFlag :: SVGPathSegArcRel -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.largeArcFlag Mozilla SVGPathSegArcRel.largeArcFlag documentation>
getLargeArcFlag :: (MonadIO m) => SVGPathSegArcRel -> m Bool
getLargeArcFlag self = liftIO (js_getLargeArcFlag (self))
foreign import javascript unsafe "$1[\"sweepFlag\"] = $2;"
js_setSweepFlag :: SVGPathSegArcRel -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.sweepFlag Mozilla SVGPathSegArcRel.sweepFlag documentation>
setSweepFlag :: (MonadIO m) => SVGPathSegArcRel -> Bool -> m ()
setSweepFlag self val = liftIO (js_setSweepFlag (self) val)
foreign import javascript unsafe "($1[\"sweepFlag\"] ? 1 : 0)"
js_getSweepFlag :: SVGPathSegArcRel -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPathSegArcRel.sweepFlag Mozilla SVGPathSegArcRel.sweepFlag documentation>
getSweepFlag :: (MonadIO m) => SVGPathSegArcRel -> m Bool
getSweepFlag self = liftIO (js_getSweepFlag (self))
|
manyoo/ghcjs-dom
|
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGPathSegArcRel.hs
|
mit
| 6,051 | 98 | 8 | 841 | 1,374 | 763 | 611 | 78 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.Location
(assign, replace, reload, setHref, getHref, setProtocol,
getProtocol, setHost, getHost, setHostname, getHostname, setPort,
getPort, setPathname, getPathname, setSearch, getSearch, setHash,
getHash, getOrigin, getAncestorOrigins, Location(..),
gTypeLocation)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.assign Mozilla Location.assign documentation>
assign :: (MonadDOM m, ToJSString url) => Location -> url -> m ()
assign self url
= liftDOM (void (self ^. jsf "assign" [toJSVal url]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.replace Mozilla Location.replace documentation>
replace :: (MonadDOM m, ToJSString url) => Location -> url -> m ()
replace self url
= liftDOM (void (self ^. jsf "replace" [toJSVal url]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.reload Mozilla Location.reload documentation>
reload :: (MonadDOM m) => Location -> m ()
reload self = liftDOM (void (self ^. jsf "reload" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.href Mozilla Location.href documentation>
setHref :: (MonadDOM m, ToJSString val) => Location -> val -> m ()
setHref self val = liftDOM (self ^. jss "href" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.href Mozilla Location.href documentation>
getHref ::
(MonadDOM m, FromJSString result) => Location -> m result
getHref self = liftDOM ((self ^. js "href") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.protocol Mozilla Location.protocol documentation>
setProtocol ::
(MonadDOM m, ToJSString val) => Location -> val -> m ()
setProtocol self val
= liftDOM (self ^. jss "protocol" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.protocol Mozilla Location.protocol documentation>
getProtocol ::
(MonadDOM m, FromJSString result) => Location -> m result
getProtocol self
= liftDOM ((self ^. js "protocol") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.host Mozilla Location.host documentation>
setHost :: (MonadDOM m, ToJSString val) => Location -> val -> m ()
setHost self val = liftDOM (self ^. jss "host" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.host Mozilla Location.host documentation>
getHost ::
(MonadDOM m, FromJSString result) => Location -> m result
getHost self = liftDOM ((self ^. js "host") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.hostname Mozilla Location.hostname documentation>
setHostname ::
(MonadDOM m, ToJSString val) => Location -> val -> m ()
setHostname self val
= liftDOM (self ^. jss "hostname" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.hostname Mozilla Location.hostname documentation>
getHostname ::
(MonadDOM m, FromJSString result) => Location -> m result
getHostname self
= liftDOM ((self ^. js "hostname") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.port Mozilla Location.port documentation>
setPort :: (MonadDOM m, ToJSString val) => Location -> val -> m ()
setPort self val = liftDOM (self ^. jss "port" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.port Mozilla Location.port documentation>
getPort ::
(MonadDOM m, FromJSString result) => Location -> m result
getPort self = liftDOM ((self ^. js "port") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.pathname Mozilla Location.pathname documentation>
setPathname ::
(MonadDOM m, ToJSString val) => Location -> val -> m ()
setPathname self val
= liftDOM (self ^. jss "pathname" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.pathname Mozilla Location.pathname documentation>
getPathname ::
(MonadDOM m, FromJSString result) => Location -> m result
getPathname self
= liftDOM ((self ^. js "pathname") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.search Mozilla Location.search documentation>
setSearch ::
(MonadDOM m, ToJSString val) => Location -> val -> m ()
setSearch self val = liftDOM (self ^. jss "search" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.search Mozilla Location.search documentation>
getSearch ::
(MonadDOM m, FromJSString result) => Location -> m result
getSearch self
= liftDOM ((self ^. js "search") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.hash Mozilla Location.hash documentation>
setHash :: (MonadDOM m, ToJSString val) => Location -> val -> m ()
setHash self val = liftDOM (self ^. jss "hash" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.hash Mozilla Location.hash documentation>
getHash ::
(MonadDOM m, FromJSString result) => Location -> m result
getHash self = liftDOM ((self ^. js "hash") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.origin Mozilla Location.origin documentation>
getOrigin ::
(MonadDOM m, FromJSString result) => Location -> m result
getOrigin self
= liftDOM ((self ^. js "origin") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Location.ancestorOrigins Mozilla Location.ancestorOrigins documentation>
getAncestorOrigins :: (MonadDOM m) => Location -> m DOMStringList
getAncestorOrigins self
= liftDOM ((self ^. js "ancestorOrigins") >>= fromJSValUnchecked)
|
ghcjs/jsaddle-dom
|
src/JSDOM/Generated/Location.hs
|
mit
| 6,660 | 0 | 12 | 961 | 1,626 | 889 | 737 | 88 | 1 |
module Main where
import Control.Monad
import Language.Haskell.HLint
import System.Exit
main :: IO ()
main = do
hints <- hlint ["src"]
unless (null hints) exitFailure
|
IMOKURI/IMOKURI.github.io
|
tests/Tests.hs
|
mit
| 174 | 0 | 9 | 31 | 62 | 33 | 29 | 8 | 1 |
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
import Control.Applicative ((<$>))
import Control.Arrow ((***))
import Data.Monoid (mappend)
import Data.List (stripPrefix, sortBy)
import Data.Ord
import Data.Maybe (fromMaybe)
import Hakyll
import Hakyll.Core.Metadata (lookupString)
import System.Process
import System.FilePath (takeBaseName, (</>))
compass :: Compiler (Item String)
compass =
getResourceString >>=
withItemBody (unixFilter "sass" ["-s", "--scss", "--compass"])
-- FIXME: Figure out what is going on with undo-batch-mode
coqOptions :: [String]
coqOptions = [ "-R", "theories", "Poleiro"
, "-w", "-notation-overridden,-undo-batch-mode,-ambiguous-paths" ]
coqdoc :: Compiler (Item String)
coqdoc = do
coqFileName <- toFilePath <$> getUnderlying
unsafeCompiler $
readProcess "coqc" (coqOptions ++ [ coqFileName ]) ""
body <- unsafeCompiler $
readProcess "coqdoc" [ "--no-index"
, "--stdout"
, "--body-only"
, "--parse-comments"
, "-s"
, coqFileName ] ""
makeItem body
coqPath = ("theories" </>)
getCoqFileName :: Item a -> Compiler (Maybe FilePath)
getCoqFileName item = do
let ident = itemIdentifier item
fmap coqPath <$> getMetadataField ident "coqfile"
gitHubBlobPath = "https://github.com/arthuraa/poleiro/blob/master"
gitHubField :: Context a
gitHubField = field "githublink" $ \item -> do
coqFileName <- getCoqFileName item
case coqFileName of
Just coqFileName -> do
return $ gitHubBlobPath ++ "/" ++ coqFileName
Nothing -> return ""
coqPost :: Compiler (Item String)
coqPost = do
ident <- getUnderlying
route <- getRoute ident
coqFileName <- getMetadataField ident "coqfile"
case coqFileName of
Just coqFileName ->
let fullName = coqPath coqFileName
basename = takeBaseName coqFileName in do
postBody <- loadBody $ fromFilePath fullName
makeItem $ flip withUrls postBody $ \url ->
-- coqdoc apparently doesn't allow us to change the links of the
-- generated HTML that point to itself. Therefore, we must do it
-- by hand.
case (stripPrefix (basename ++ ".html") url, route) of
(Just url', Just route) -> "/" ++ route ++ url'
_ -> url
Nothing -> error "Couldn't find \"coqfile\" metadata field"
postProcessPost :: (Maybe Identifier, Maybe Identifier) ->
Item String ->
Compiler (Item String)
postProcessPost (prev, next) post =
let renderLink (Just id) direction = do
route <- getRoute id
title <- getMetadataField id "title"
case (title, route) of
(Just title, Just route) -> do
let linkCtx = constField "title" title `mappend`
constField "route" route `mappend`
constField "direction" direction
makeItem "" >>=
loadAndApplyTemplate "templates/neighbor-post-link.html" linkCtx
_ -> makeItem ""
renderLink _ _ = makeItem "" in do
linkPrev <- renderLink prev "prev"
linkNext <- renderLink next "next"
let ctx = constField "prev" (itemBody linkPrev) `mappend`
constField "next" (itemBody linkNext) `mappend`
postCtx
saveSnapshot "content" post >>=
loadAndApplyTemplate "templates/post.html" ctx >>=
loadAndApplyTemplate "templates/main.html" defaultContext >>=
relativizeUrls
--------------------------------------------------------------------------------
main :: IO ()
main = hakyll $ do
match "css/*.scss" $ do
route $ setExtension "css"
compile $ compass
match "images/*" $ do
route idRoute
compile copyFileCompiler
match "theories/*.v" $ do
compile coqdoc
postsMetadata <- map fst . sortBy (comparing snd) .
map (id *** fromMaybe "" . lookupString "date") <$>
getAllMetadata "posts/*"
let getNeighbors id = lookup id postsMetadata
lookup id (id1 : rest@(id2 : id3 : _))
| id == id1 = (Nothing, Just id2)
| id > id2 = lookup id rest
| id == id2 = (Just id1, Just id3)
| otherwise = (Nothing, Nothing)
lookup id [id1, id2]
| id == id1 = (Nothing, Just id2)
| id == id2 = (Just id1, Nothing)
| otherwise = (Nothing, Nothing)
lookup _ _ = (Nothing, Nothing)
match "posts/*.coqpost" $ do
route $ setExtension "html"
compile $ do
id <- getUnderlying
coqPost >>= postProcessPost (getNeighbors id)
match "posts/*.md" $ do
route $ setExtension "html"
compile $ do
id <- getUnderlying
pandocCompiler >>= postProcessPost (getNeighbors id)
create ["archives.html"] $ do
route idRoute
compile $ do
let archiveCtx =
field "posts" (const archives) `mappend`
constField "title" "Archives" `mappend`
defaultContext
makeItem ""
>>= loadAndApplyTemplate "templates/archives.html" archiveCtx
>>= loadAndApplyTemplate "templates/main.html" archiveCtx
>>= relativizeUrls
create ["atom.xml"] $ do
route idRoute
compile $ do
let feedCtx = postCtx `mappend` bodyField "description"
posts <- fmap (take 10) . recentFirst =<<
loadAllSnapshots "posts/*" "content"
renderAtom feedConfiguration feedCtx posts
match "index.html" $ do
route idRoute
compile $ do
let indexCtx = field "posts" $ const $ recentPostList 3
ctx = constField "title" "Main" `mappend` postCtx
getResourceBody
>>= applyAsTemplate indexCtx
>>= loadAndApplyTemplate "templates/main.html" ctx
>>= relativizeUrls
match "about.md" $ do
route $ setExtension "html"
compile $ do
let aboutCtx = constField "title" "About" `mappend` defaultContext
pandocCompiler
>>= loadAndApplyTemplate "templates/main.html" aboutCtx
>>= relativizeUrls
match "templates/*" $ compile templateCompiler
--------------------------------------------------------------------------------
postCtx :: Context String
postCtx =
gitHubField `mappend`
dateField "date" "%B %e, %Y" `mappend`
defaultContext
--------------------------------------------------------------------------------
archives :: Compiler String
archives = do
posts <- loadAllSnapshots "posts/*" "content" >>= recentFirst
itemTpl <- loadBody "templates/post-item.html"
applyTemplateList itemTpl postCtx posts
recentPostList :: Int -> Compiler String
recentPostList n = do
posts <- loadAllSnapshots "posts/*" "content" >>= recentFirst
itemTpl <- loadBody "templates/post-index.html"
applyTemplateList itemTpl postCtx $ take n posts
feedConfiguration :: FeedConfiguration
feedConfiguration = FeedConfiguration
{ feedTitle = "Poleiro: latest posts"
, feedDescription = "A blog about the Coq proof assistant"
, feedAuthorName = "Arthur Azevedo de Amorim"
, feedAuthorEmail = ""
, feedRoot = "http://poleiro.info"
}
|
arthuraa/poleiro
|
site.hs
|
mit
| 7,621 | 0 | 21 | 2,252 | 1,870 | 909 | 961 | -1 | -1 |
module Main where
import qualified Codec.Compression.GZip as GZ
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as CBS
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Foldable
import Database.Kioku
import System.Environment
import System.Exit
main :: IO ()
main = do
args <- getArgs
withKiokuDB defaultKiokuPath $ \db -> do
case args of
("cities":"load":_) -> do
putStrLn $ "Loading city data."
count <- do cities <- loadCities "example/data/cities.txt.gz"
createDataSet "cities" cities db
putStrLn $ "Done... loaded " ++ show count ++ " cities"
("cities":"index":_) -> do
putStrLn $ "Indexing cities by name."
createIndex "cities" "cities.name" cityName db
putStrLn $ "Done... "
("cities":"query":name:_) -> do
cities <- query "cities.name" (keyPrefix $ CBS.pack name) db
printCities cities
_ -> do
putStrLn $ "Unknown command: " ++ unwords args
exitWith (ExitFailure 1)
printCities :: [City] -> IO ()
printCities cities =
for_ cities $ \city -> do
CBS.putStr $ cityName city
CBS.putStr $ " - ("
CBS.putStr $ cityLat city
CBS.putStr $ ","
CBS.putStr $ cityLng city
CBS.putStrLn $ ")"
data City = City
{ cityName :: BS.ByteString
, cityLat :: BS.ByteString
, cityLng :: BS.ByteString
}
instance Memorizable City where
memorize (City {..}) =
lengthPrefix255
[ memorize cityName
, memorize cityLat
, memorize cityLng
]
recall =
unLengthPrefix255 City ( field
&. field
&. field
)
loadCities :: FilePath -> IO [City]
loadCities = fmap (parseCities . GZ.decompress)
. LBS.readFile
parseCities :: LBS.ByteString -> [City]
parseCities bytes | LBS.null bytes = []
parseCities bytes =
let (line, lineRest) = LBS.break (== '\n') bytes
parts = LBS.split '\t' line
name = LBS.toStrict (parts !! 1)
lat = LBS.toStrict (parts !! 4)
lng = LBS.toStrict (parts !! 5)
in (City name lat lng) : parseCities (LBS.drop 1 lineRest)
|
flipstone/kioku
|
example/Main.hs
|
mit
| 2,251 | 0 | 21 | 678 | 701 | 356 | 345 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE NoMonomorphismRestriction#-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Element
(
Element(Compiler, Interpreter, Processor, (:>))
, toTree
, draw
) where
import GHC.TypeLits
import Data.Proxy
import GHC.Exts (Constraint)
import Diagrams.Prelude hiding (All, (:>))
import Diagram
import Data.Typeable
import Diagrams.TwoD.Text
-- | A helper for applying constraints to several type variables
type family All c xs :: Constraint where
All c '[] = ()
All c (x ': xs) = (c x, All c xs)
-- | Represents the diagram elments at the type level
data Element = Interpreter Symbol Symbol
| Compiler Symbol Symbol Symbol
| Processor Symbol
| Element :> Element
-- | Represents a tre of diagram elements at the term level
-- the phantom type represents the type of the root node
data ElementTree = CompilerNode String String String ElementTree
| InterpreterNode String String ElementTree
| ProcessorNode String ElementTree
| Leaf
deriving (Show)
data Root t = Root ElementTree deriving (Show)
-- | Class for turning Element types into tree at the term level
class ToTree d where
type TreeRoot d :: Element
toTree :: Proxy d -> Root (TreeRoot d)
instance (All KnownSymbol '[sl, tl, il]) => ToTree (Compiler sl tl il) where
type TreeRoot (Compiler sl tl il) = Compiler sl tl il
toTree _ = Root (CompilerNode sl tl il Leaf)
where sl = symbolVal (Proxy :: Proxy sl)
tl = symbolVal (Proxy :: Proxy tl)
il = symbolVal (Proxy :: Proxy il)
instance (All KnownSymbol '[sl, ml]) => ToTree (Interpreter sl ml) where
type TreeRoot (Interpreter sl ml) = Interpreter sl ml
toTree _ = Root (InterpreterNode sl ml Leaf)
where sl = symbolVal (Proxy :: Proxy sl)
ml = symbolVal (Proxy :: Proxy ml)
instance (KnownSymbol l) => ToTree (Processor l) where
type TreeRoot (Processor l) = Processor l
toTree _ = Root (ProcessorNode l Leaf)
where l = symbolVal (Proxy :: Proxy l)
instance (ToTree a, ToTree b, InputTo (TreeRoot a) (TreeRoot b))
=> ToTree ((a :: Element) :> (b :: Element)) where
type TreeRoot (a :> b) = NewRoot (TreeRoot a) (TreeRoot b)
toTree _ = (toTree (Proxy :: Proxy a)) `inputTo` (toTree (Proxy :: Proxy b))
class InputTo (a :: Element) (b :: Element) where
type NewRoot a b :: Element
inputTo :: Root a -> Root b -> Root (NewRoot a b)
instance InputTo (Interpreter sl ml) (Interpreter ml ml') where
type NewRoot (Interpreter sl ml) (Interpreter ml ml') = Interpreter sl ml'
inputTo i i' = Root $ InterpreterNode sl ml interpreterNode'
where (Root interpreterNode') = i
(Root (InterpreterNode sl ml _)) = i'
instance InputTo (Interpreter sl ml) (Compiler ml tl il) where
type NewRoot (Interpreter sl ml) (Compiler ml tl il) = Compiler sl tl il
inputTo i c = Root $ CompilerNode sl tl il interpreterNode
where (Root interpreterNode) = i
(Root (CompilerNode sl tl il _)) = c
instance InputTo (Compiler sl tl il) (Interpreter il ml) where
type NewRoot (Compiler sl tl il) (Interpreter il ml) = Compiler sl tl ml
inputTo c i = Root $ InterpreterNode il sl compilerNode
where (Root compilerNode) = c
(Root (InterpreterNode il sl _)) = i
instance InputTo (Compiler sl tl il) (Compiler il tl' il') where
type NewRoot (Compiler sl tl il) (Compiler il tl' il') = Compiler sl tl' il'
inputTo c c' = Root $ CompilerNode sl tl il compilerNode
where (Root compilerNode) = c
(Root (CompilerNode sl tl il _)) = c'
instance InputTo (Compiler sl tl il) (Processor il) where
type NewRoot (Compiler sl tl il) (Processor il) = Compiler sl tl il
inputTo c p = Root $ ProcessorNode il compilerNode
where (Root compilerNode) = c
(Root (ProcessorNode il _)) = p
instance InputTo (Interpreter sl ml) (Processor ml) where
type NewRoot (Interpreter sl ml) (Processor ml) = Interpreter sl ml
inputTo i p = Root $ ProcessorNode il interpreterNode
where (Root interpreterNode) = i
(Root (ProcessorNode il _)) = p
renderInputTo (CompilerNode _ _ _ Leaf) = \a b -> b
renderInputTo (InterpreterNode _ _ (Leaf)) = mappend
renderInputTo (CompilerNode _ _ _ (CompilerNode _ _ _ _)) = tIntoT
renderInputTo (CompilerNode _ _ _ (InterpreterNode _ _ _)) = iIntoT
renderInputTo (InterpreterNode _ _ (CompilerNode _ _ _ _)) = tOntoI
renderInputTo (InterpreterNode _ _ (InterpreterNode _ _ _)) = iOntoI
renderInputTo (ProcessorNode _ (CompilerNode _ _ _ _)) = tIntoP
renderInputTo (ProcessorNode _ (InterpreterNode _ _ _)) = iIntoP
drawElementTree :: (RealFloat n1, Typeable n1,
Renderable (Text n1) b1, Renderable (Path V2 n1) b1) =>
ElementTree -> QDiagram b1 V2 n1 Any
drawElementTree c@(CompilerNode sl tl il inputNode) =
renderInputTo c input parent
where parent = tDiagram sl tl il
input = drawElementTree inputNode
drawElementTree i@(InterpreterNode sl ml inputNode) =
renderInputTo i input parent
where parent = iDiagram sl ml
input = drawElementTree inputNode
drawElementTree i@(ProcessorNode l inputNode) =
renderInputTo i input parent
where parent = pDiagram l
input = drawElementTree inputNode
drawElementTree Leaf = mempty
draw :: (RealFloat n, Typeable n,
Renderable (Diagrams.TwoD.Text.Text n) b,
Renderable (Path V2 n) b,
ToTree a)
=> Proxy a -> QDiagram b V2 n Any
draw t = drawElementTree element
where (Root element) = toTree t
|
paldepind/t-diagram
|
src/Element.hs
|
mit
| 5,891 | 0 | 11 | 1,368 | 2,067 | 1,078 | 989 | 126 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module System.Etc.Internal.Extra.EnvMisspell (
EnvMisspell (..)
, getEnvMisspellings
, getEnvMisspellingsPure
, renderEnvMisspellings
, hPrintEnvMisspellings
, reportEnvMisspellingWarnings
) where
import RIO hiding ((<$>), (<>))
import qualified RIO.HashMap as HashMap
import qualified RIO.Text as Text
import qualified RIO.Vector as Vector
import System.Environment (getEnvironment)
import qualified Text.EditDistance as Distance
import System.Etc.Internal.Spec.Types
import Text.PrettyPrint.ANSI.Leijen
data EnvMisspell
= EnvMisspell {
currentText :: Text
, suggestionText :: Text
}
deriving (Show, Eq, Generic)
lookupSpecEnvKeys :: ConfigSpec a -> Vector Text
lookupSpecEnvKeys spec =
let foldEnvSettings val acc = case val of
ConfigValue { configSources } ->
maybe acc (`Vector.cons` acc) (envVar configSources)
SubConfig hsh -> HashMap.foldr foldEnvSettings acc hsh
in foldEnvSettings (SubConfig $ specConfigValues spec) Vector.empty
{-|
-}
getEnvMisspellingsPure :: ConfigSpec a -> Vector Text -> Vector EnvMisspell
getEnvMisspellingsPure spec env = do
specEnvName <- lookupSpecEnvKeys spec
currentEnvName <- env
let distance = Distance.levenshteinDistance Distance.defaultEditCosts
(Text.unpack specEnvName)
(Text.unpack currentEnvName)
guard (distance >= 1 && distance < 4)
return $ EnvMisspell currentEnvName specEnvName
{-|
-}
getEnvMisspellings :: ConfigSpec a -> IO (Vector EnvMisspell)
getEnvMisspellings spec =
getEnvironment & fmap (Vector.fromList . map (Text.pack . fst)) & fmap
(getEnvMisspellingsPure spec)
{-|
-}
renderEnvMisspellings :: Vector EnvMisspell -> Doc
renderEnvMisspellings misspells
| Vector.null misspells
= mempty
| otherwise
= misspells
& Vector.map
(\misspell ->
text "WARNING: Environment variable `"
<> text (Text.unpack $ currentText misspell)
<> text "' found, perhaps you meant `"
<> text (Text.unpack $ suggestionText misspell)
<> text "'"
)
& Vector.foldl' (<$>) mempty
& (<$> mempty)
& (<$> mempty)
{-|
-}
hPrintEnvMisspellings :: Handle -> Vector EnvMisspell -> IO ()
hPrintEnvMisspellings h = hPutDoc h . renderEnvMisspellings
{-|
-}
reportEnvMisspellingWarnings :: ConfigSpec a -> IO ()
reportEnvMisspellingWarnings spec =
getEnvMisspellings spec >>= hPrintEnvMisspellings stderr
|
roman/Haskell-etc
|
etc/src/System/Etc/Internal/Extra/EnvMisspell.hs
|
mit
| 2,716 | 0 | 20 | 619 | 663 | 351 | 312 | 66 | 2 |
import Drawing
import Exercises
import Geometry
main = drawPicture myPicture
myPicture points =
drawPoints [a,b,c,d,e,f] &
drawLabels [a,b,c,d,e,f] ["A","B","C","D","E","F"] &
drawPolygon [a,b,c,d,e,f] &
drawPointLabel o "O" &
drawSegment (o,a) &
drawSegment (o,b) &
drawSegment (o,c) &
drawSegment (o,d) &
drawSegment (o,e) &
drawSegment (o,f) &
drawArc (b,a,f) &
drawArc (c,d,e) &
message "Hexagon"
where [o,a] = take 2 points
Just d = find (beyond (a,o)) $ line_circle (o,a) (o,a)
[b,f] = circle_circle (o,a) (a,o)
[e,c] = circle_circle (o,d) (d,o)
|
alphalambda/hsmath
|
src/Learn/Geometry/hexagon.hs
|
gpl-2.0
| 641 | 0 | 18 | 168 | 365 | 205 | 160 | 22 | 1 |
module Cake.Rules where
import Cake.Core
import Cake.Process
import Cake.Tex
import qualified Text.ParserCombinators.Parsek as Parsek
------------------------------------------------------
-- Patterns
extension :: String -> P (String,String)
extension s = do
base <- Parsek.many Parsek.anySymbol
_ <- Parsek.string s
return (base++s,base)
anyExtension :: [String] -> P (String,String)
anyExtension ss = foldr (<|>) empty (map extension ss)
--------------------------------------------------------------
-- Rules
simple :: String -> [Char] -> (String -> [Char] -> Act ()) -> Rule
simple outExt inExt f = extension outExt <== \(output,base) ->
let input = base ++ inExt
in produce output $ do
need input
f output input
tex_markdown_standalone :: Rule
tex_markdown_standalone = simple ".tex" ".markdown" $ \o i ->
pandoc i "latex" ["--standalone"]
{-
html_markdown_standalone = simple ".html" ".markdown" $ \o i ->
system ["pandoc","--tab-stop=2","--standalone","-f","markdown","-t","latex",
"-o", o, i]
-}
pdf_tex :: Rule
pdf_tex = simple ".pdf" ".tex" $ \o i ->
system ["latexmk","-pdf",i]
{-
pdf_tex_biblatex = anyExtension [".pdf",".aux"] <== \(_,c) ->
pdflatexBibtex c
-}
tex_lhs :: Rule
tex_lhs = extension ".tex" <== \(_,c) -> lhs2tex c
pdf_tex_biblatex :: Rule
pdf_tex_biblatex = anyExtension [".pdf",".aux"] <== \(_,c) ->
pdflatexBibtex c
pdf_tex_bibtex :: Rule
pdf_tex_bibtex = extension ".pdf" <== \(_,c) -> pdflatexBibtex c
allRules = tex_markdown_standalone
-- <|> pdf_tex_bibtex
<|> tex_lhs
|
jyp/Cake
|
Cake/Rules.hs
|
gpl-2.0
| 1,595 | 0 | 12 | 289 | 438 | 237 | 201 | 33 | 1 |
module PFQDaemon where
import Config
import Network.PFQ.Lang
import Network.PFQ.Lang.Default
import Network.PFQ.Lang.Experimental
config = [] :: [Group]
|
pfq/PFQ
|
user/pfqd/pfqd.hs
|
gpl-2.0
| 155 | 0 | 5 | 18 | 40 | 27 | 13 | 6 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Metainfo.Provider
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
-- | This module provides the infos collected by the server before
--
---------------------------------------------------------------------------------
module IDE.Metainfo.Provider (
getIdentifierDescr
, getIdentifiersStartingWith
, getCompletionOptions
, getDescription
, getActivePackageDescr
, searchMeta
, initInfo -- Update and rebuild
, updateSystemInfo
, rebuildSystemInfo
, updateWorkspaceInfo
, rebuildWorkspaceInfo
, getPackageInfo -- Just retreive from State
, getWorkspaceInfo
, getSystemInfo
, getPackageImportInfo -- Scope for the import tool
, getAllPackageIds
, getAllPackageIds'
) where
import Prelude ()
import Prelude.Compat hiding(readFile)
import System.IO (hClose, openBinaryFile, IOMode(..))
import System.IO.Strict (readFile)
import qualified Data.Map as Map
import Control.Monad (void, filterM, foldM, liftM, when)
import System.FilePath
import System.Directory
import Data.List (nub, (\\), find, partition, maximumBy, foldl')
import Data.Maybe (catMaybes, fromJust, isJust, mapMaybe, fromMaybe)
import Distribution.Package hiding (depends,packageId)
import qualified Data.Set as Set
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy as BSL
import Distribution.Version
import Distribution.ModuleName
import Control.DeepSeq
import IDE.Utils.FileUtils
import IDE.Core.State
import Data.Char (toLower,isUpper,toUpper,isLower)
import Text.Regex.TDFA
import qualified Text.Regex.TDFA as Regex
import System.IO.Unsafe (unsafePerformIO)
import Text.Regex.TDFA.Text (execute,compile)
import Data.Binary.Shared (decodeSer)
import Language.Haskell.Extension (KnownExtension)
import Distribution.Text (display)
import IDE.Core.Serializable ()
import Data.Map (Map(..))
import Control.Exception (SomeException(..), catch)
import IDE.Utils.ServerConnection(doServerCommand)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Trans.Class (MonadTrans(..))
import Distribution.PackageDescription (hsSourceDirs)
import System.Log.Logger (infoM)
import Data.Text (Text)
import qualified Data.Text as T (null, isPrefixOf, unpack, pack)
import Data.Monoid ((<>))
import qualified Control.Arrow as A (Arrow(..))
import Data.Function (on)
import Distribution.Package (PackageIdentifier)
-- ---------------------------------------------------------------------
-- Updating metadata
--
--
-- | Update and initialize metadata for the world -- Called at startup
--
initInfo :: IDEAction -> IDEAction
initInfo continuation = do
prefs <- readIDE prefs
if collectAtStart prefs
then do
ideMessage Normal "Now updating system metadata ..."
callCollector False True True $ \ _ -> do
ideMessage Normal "Finished updating system metadata"
doLoad
else doLoad
where
doLoad = do
ideMessage Normal "Now loading metadata ..."
loadSystemInfo
ideMessage Normal "Finished loading metadata"
updateWorkspaceInfo' False $ \ _ -> do
void (triggerEventIDE (InfoChanged True))
continuation
updateSystemInfo :: IDEAction
updateSystemInfo = do
liftIO $ infoM "leksah" "update sys info called"
updateSystemInfo' False $ \ _ ->
updateWorkspaceInfo' False $ \ _ -> void (triggerEventIDE (InfoChanged False))
rebuildSystemInfo :: IDEAction
rebuildSystemInfo = do
liftIO $ infoM "leksah" "rebuild sys info called"
updateSystemInfo' True $ \ _ ->
updateWorkspaceInfo' True $ \ _ ->
void (triggerEventIDE (InfoChanged False))
updateWorkspaceInfo :: IDEAction
updateWorkspaceInfo = do
liftIO $ infoM "leksah" "update workspace info called"
currentState' <- readIDE currentState
case currentState' of
IsStartingUp -> return ()
_ ->
updateWorkspaceInfo' False $ \ _ ->
void (triggerEventIDE (InfoChanged False))
rebuildWorkspaceInfo :: IDEAction
rebuildWorkspaceInfo = do
liftIO $ infoM "leksah" "rebuild workspace info called"
updateWorkspaceInfo' True $ \ _ ->
void (triggerEventIDE (InfoChanged False))
getAllPackageIds :: IDEM [PackageIdentifier]
getAllPackageIds = either (const []) id <$> getAllPackageIds'
getAllPackageIds' :: IDEM (Either Text [PackageIdentifier])
getAllPackageIds' = do
mbWorkspace <- readIDE workspace
liftIO . getInstalledPackageIds' $ map ipdPackageDir (maybe [] wsAllPackages mbWorkspace)
getAllPackageDBs :: IDEM [[FilePath]]
getAllPackageDBs = do
mbWorkspace <- readIDE workspace
liftIO . getPackageDBs $ map ipdPackageDir (maybe [] wsAllPackages mbWorkspace)
--
-- | Load all infos for all installed and exposed packages
-- (see shell command: ghc-pkg list)
--
loadSystemInfo :: IDEAction
loadSystemInfo = do
collectorPath <- liftIO getCollectorPath
mbPackageIds <- getAllPackageIds'
case mbPackageIds of
Left e -> logMessage ("Please check that ghc-pkg is in your PATH and restart leksah:\n " <> e) ErrorTag
Right packageIds -> do
packageList <- liftIO $ mapM (loadInfosForPackage collectorPath)
(nub packageIds)
let scope = foldr buildScope (PackScope Map.empty getEmptyDefaultScope)
$ catMaybes packageList
-- liftIO performGC
modifyIDE_ (\ide -> ide{systemInfo = Just (GenScopeC (addOtherToScope scope False))})
return ()
--
-- | Updates the system info
--
updateSystemInfo' :: Bool -> (Bool -> IDEAction) -> IDEAction
updateSystemInfo' rebuild continuation = do
ideMessage Normal "Now updating system metadata ..."
wi <- getSystemInfo
case wi of
Nothing -> loadSystemInfo
Just (GenScopeC (PackScope psmap psst)) -> do
mbPackageIds <- getAllPackageIds'
case mbPackageIds of
Left e -> logMessage ("Please check that ghc-pkg is in your PATH and restart leksah:\n " <> e) ErrorTag
Right packageIds -> do
let newPackages = filter (`Map.member` psmap) packageIds
let trashPackages = filter (`notElem` packageIds) (Map.keys psmap)
if null newPackages && null trashPackages
then continuation True
else
callCollector rebuild True True $ \ _ -> do
collectorPath <- lift getCollectorPath
newPackageInfos <- liftIO $ mapM (loadInfosForPackage collectorPath)
newPackages
let psmap2 = foldr ((\ e m -> Map.insert (pdPackage e) e m) . fromJust) psmap
(filter isJust newPackageInfos)
let psmap3 = foldr Map.delete psmap2 trashPackages
let scope :: PackScope (Map Text [Descr])
= foldr buildScope (PackScope Map.empty symEmpty)
(Map.elems psmap3)
modifyIDE_ (\ide -> ide{systemInfo = Just (GenScopeC (addOtherToScope scope False))})
continuation True
ideMessage Normal "Finished updating system metadata"
getEmptyDefaultScope :: Map Text [Descr]
getEmptyDefaultScope = symEmpty
--
-- | Rebuilds system info
--
rebuildSystemInfo' :: (Bool -> IDEAction) -> IDEAction
rebuildSystemInfo' continuation =
callCollector True True True $ \ _ -> do
loadSystemInfo
continuation True
-- ---------------------------------------------------------------------
-- Metadata for the workspace and active package
--
updateWorkspaceInfo' :: Bool -> (Bool -> IDEAction) -> IDEAction
updateWorkspaceInfo' rebuild continuation = do
postAsyncIDE $ ideMessage Normal "Now updating workspace metadata ..."
mbWorkspace <- readIDE workspace
systemInfo' <- getSystemInfo
case mbWorkspace of
Nothing -> do
liftIO $ infoM "leksah" "updateWorkspaceInfo' no workspace"
modifyIDE_ (\ide -> ide{workspaceInfo = Nothing, packageInfo = Nothing})
continuation False
Just ws ->
updatePackageInfos rebuild (wsAllPackages ws) $ \ _ packDescrs -> do
let dependPackIds = nub (concatMap pdBuildDepends packDescrs) \\ map pdPackage packDescrs
let packDescrsI = case systemInfo' of
Nothing -> []
Just (GenScopeC (PackScope pdmap _)) ->
mapMaybe (`Map.lookup` pdmap) dependPackIds
let scope1 :: PackScope (Map Text [Descr])
= foldr buildScope (PackScope Map.empty symEmpty) packDescrs
let scope2 :: PackScope (Map Text [Descr])
= foldr buildScope (PackScope Map.empty symEmpty) packDescrsI
modifyIDE_ (\ide -> ide{workspaceInfo = Just
(GenScopeC (addOtherToScope scope1 True), GenScopeC(addOtherToScope scope2 False))})
-- Now care about active package
activePack <- readIDE activePack
case activePack of
Nothing -> modifyIDE_ (\ ide -> ide{packageInfo = Nothing})
Just pack ->
case filter (\pd -> pdPackage pd == ipdPackageId pack) packDescrs of
[pd] -> let impPackDescrs =
case systemInfo' of
Nothing -> []
Just (GenScopeC (PackScope pdmap _)) ->
mapMaybe (`Map.lookup` pdmap) (pdBuildDepends pd)
-- The imported from the workspace should be treated different
workspacePackageIds = map ipdPackageId (wsAllPackages ws)
impPackDescrs' = filter (\pd -> pdPackage pd `notElem` workspacePackageIds) impPackDescrs
impPackDescrs'' = mapMaybe
(\ pd -> if pdPackage pd `elem` workspacePackageIds
then find (\ pd' -> pdPackage pd == pdPackage pd') packDescrs
else Nothing)
impPackDescrs
scope1 :: PackScope (Map Text [Descr])
= buildScope pd (PackScope Map.empty symEmpty)
scope2 :: PackScope (Map Text [Descr])
= foldr buildScope (PackScope Map.empty symEmpty)
(impPackDescrs' ++ impPackDescrs'')
in modifyIDE_ (\ide -> ide{packageInfo = Just
(GenScopeC (addOtherToScope scope1 False),
GenScopeC(addOtherToScope scope2 False))})
_ -> modifyIDE_ (\ide -> ide{packageInfo = Nothing})
continuation True
postAsyncIDE $ ideMessage Normal "Finished updating workspace metadata"
-- | Update the metadata on several packages
updatePackageInfos :: Bool -> [IDEPackage] -> (Bool -> [PackageDescr] -> IDEAction) -> IDEAction
updatePackageInfos rebuild pkgs continuation = do
-- calculate list of known packages once
knownPackages <- getAllPackageIds
updatePackageInfos' [] knownPackages rebuild pkgs continuation
where
updatePackageInfos' collector _ _ [] continuation = continuation True collector
updatePackageInfos' collector knownPackages rebuild (hd:tail) continuation =
updatePackageInfo knownPackages rebuild hd $ \ _ packDescr ->
updatePackageInfos' (packDescr : collector) knownPackages rebuild tail continuation
-- | Update the metadata on one package
updatePackageInfo :: [PackageIdentifier] -> Bool -> IDEPackage -> (Bool -> PackageDescr -> IDEAction) -> IDEAction
updatePackageInfo knownPackages rebuild idePack continuation = do
liftIO $ infoM "leksah" ("updatePackageInfo " ++ show rebuild ++ " " ++ show (ipdPackageId idePack))
workspInfoCache' <- readIDE workspInfoCache
let (packageMap, ic) = case pi `Map.lookup` workspInfoCache' of
Nothing -> (Map.empty,True)
Just m -> (m,False)
modPairsMb <- liftIO $ mapM (\(modName, bi) -> do
sf <- case LibModule modName `Map.lookup` packageMap of
Nothing -> findSourceFile (srcDirs' bi) haskellSrcExts modName
Just (_,Nothing,_) -> findSourceFile (srcDirs' bi) haskellSrcExts modName
Just (_,Just fp,_) -> return (Just fp)
return (LibModule modName, sf))
$ Map.toList $ ipdModules idePack
mainModules <- liftIO $ mapM (\(fn, bi, isTest) -> do
mbFn <- findSourceFile' (srcDirs' bi) fn
return (MainModule (fromMaybe fn mbFn), mbFn))
(ipdMain idePack)
-- we want all Main modules since they may be several with different files
let modPairsMb' = mainModules ++ modPairsMb
let (modWith,modWithout) = partition (\(x,y) -> isJust y) modPairsMb'
let modWithSources = map (A.second fromJust) modWith
let modWithoutSources = map fst modWithout
-- Now see which modules have to be truely updated
modToUpdate <- if rebuild
then return modWithSources
else liftIO $ figureOutRealSources idePack modWithSources
liftIO . infoM "leksah" $ "updatePackageInfo modToUpdate " ++ show (map (displayModuleKey.fst) modToUpdate)
callCollectorWorkspace
rebuild
(ipdPackageDir idePack)
(ipdPackageId idePack)
(map (\(x,y) -> (T.pack $ display (moduleKeyToName x),y)) modToUpdate)
(\ b -> do
let buildDepends = findFittingPackages knownPackages (ipdDepends idePack)
collectorPath <- liftIO getCollectorPath
let packageCollectorPath = collectorPath </> T.unpack (packageIdentifierToString pi)
(moduleDescrs,packageMap, changed, modWithout)
<- liftIO $ foldM
(getModuleDescr packageCollectorPath)
([],packageMap,False,modWithoutSources)
modPairsMb'
when changed $ modifyIDE_ (\ide -> ide{workspInfoCache =
Map.insert pi packageMap workspInfoCache'})
continuation True PackageDescr {
pdPackage = pi,
pdMbSourcePath = Just $ ipdCabalFile idePack,
pdModules = moduleDescrs,
pdBuildDepends = buildDepends})
where
basePath = normalise $ takeDirectory (ipdCabalFile idePack)
srcDirs' bi = map (basePath </>) ("dist/build":hsSourceDirs bi)
pi = ipdPackageId idePack
figureOutRealSources :: IDEPackage -> [(ModuleKey,FilePath)] -> IO [(ModuleKey,FilePath)]
figureOutRealSources idePack modWithSources = do
collectorPath <- getCollectorPath
let packageCollectorPath = collectorPath </> T.unpack (packageIdentifierToString $ ipdPackageId idePack)
filterM (ff packageCollectorPath) modWithSources
where
ff packageCollectorPath (md ,fp) = do
let collectorModulePath = packageCollectorPath </> moduleCollectorFileName md <.> leksahMetadataWorkspaceFileExtension
existCollectorFile <- doesFileExist collectorModulePath
existSourceFile <- doesFileExist fp
if not existSourceFile || not existCollectorFile
then return True -- Maybe with preprocessing
else do
sourceModTime <- getModificationTime fp
collModTime <- getModificationTime collectorModulePath
return (sourceModTime > collModTime)
getModuleDescr :: FilePath
-> ([ModuleDescr],ModuleDescrCache,Bool,[ModuleKey])
-> (ModuleKey, Maybe FilePath)
-> IO ([ModuleDescr],ModuleDescrCache,Bool,[ModuleKey])
getModuleDescr packageCollectorPath (modDescrs,packageMap,changed,problemMods) (modName,mbFilePath) =
case modName `Map.lookup` packageMap of
Just (eTime,mbFp,mdescr) -> do
existMetadataFile <- doesFileExist moduleCollectorPath
if existMetadataFile
then do
modificationTime <- liftIO $ getModificationTime moduleCollectorPath
if modificationTime == eTime
then return (mdescr:modDescrs,packageMap,changed,problemMods)
else do
liftIO . infoM "leksah" $ "getModuleDescr loadInfo: " ++ displayModuleKey modName
mbNewDescr <- loadInfosForModule moduleCollectorPath
case mbNewDescr of
Just newDescr -> return (newDescr:modDescrs,
Map.insert modName (modificationTime,mbFilePath,newDescr) packageMap,
True, problemMods)
Nothing -> return (mdescr:modDescrs,packageMap,changed,
modName : problemMods)
else return (mdescr:modDescrs,packageMap,changed, modName : problemMods)
Nothing -> do
existMetadataFile <- doesFileExist moduleCollectorPath
if existMetadataFile
then do
modificationTime <- liftIO $ getModificationTime moduleCollectorPath
mbNewDescr <- loadInfosForModule moduleCollectorPath
case mbNewDescr of
Just newDescr -> return (newDescr:modDescrs,
Map.insert modName (modificationTime,mbFilePath,newDescr) packageMap,
True, problemMods)
Nothing -> return (modDescrs,packageMap,changed,
modName : problemMods)
else return (modDescrs,packageMap,changed, modName : problemMods)
where
moduleCollectorPath = packageCollectorPath </> moduleCollectorFileName modName <.> leksahMetadataWorkspaceFileExtension
-- ---------------------------------------------------------------------
-- Low level helpers for loading metadata
--
--
-- | Loads the infos for the given packages
--
loadInfosForPackage :: FilePath -> PackageIdentifier -> IO (Maybe PackageDescr)
loadInfosForPackage dirPath pid = do
let filePath = dirPath </> T.unpack (packageIdentifierToString pid) ++ leksahMetadataSystemFileExtension
let filePath2 = dirPath </> T.unpack (packageIdentifierToString pid) ++ leksahMetadataPathFileExtension
exists <- doesFileExist filePath
if exists
then catch (do
file <- openBinaryFile filePath ReadMode
liftIO . infoM "leksah" . T.unpack $ "now loading metadata for package " <> packageIdentifierToString pid
bs <- BSL.hGetContents file
let (metadataVersion'::Integer, packageInfo::PackageDescr) = decodeSer bs
if metadataVersion /= metadataVersion'
then do
hClose file
throwIDE ("Metadata has a wrong version."
<> " Consider rebuilding metadata with: leksah-server -osb +RTS -N2 -RTS")
else do
packageInfo `deepseq` hClose file
exists' <- doesFileExist filePath2
sourcePath <- if exists'
then liftM Just (readFile filePath2)
else return Nothing
let packageInfo' = injectSourceInPack sourcePath packageInfo
return (Just packageInfo'))
(\ (e :: SomeException) -> do
sysMessage Normal
("loadInfosForPackage: " <> packageIdentifierToString pid <> " Exception: " <> T.pack (show e))
return Nothing)
else do
sysMessage Normal $"packageInfo not found for " <> packageIdentifierToString pid
return Nothing
injectSourceInPack :: Maybe FilePath -> PackageDescr -> PackageDescr
injectSourceInPack Nothing pd = pd{
pdMbSourcePath = Nothing,
pdModules = map (injectSourceInMod Nothing) (pdModules pd)}
injectSourceInPack (Just pp) pd = pd{
pdMbSourcePath = Just pp,
pdModules = map (injectSourceInMod (Just (dropFileName pp))) (pdModules pd)}
injectSourceInMod :: Maybe FilePath -> ModuleDescr -> ModuleDescr
injectSourceInMod Nothing md = md{mdMbSourcePath = Nothing}
injectSourceInMod (Just bp) md =
case mdMbSourcePath md of
Just sp -> md{mdMbSourcePath = Just (bp </> sp)}
Nothing -> md
--
-- | Loads the infos for the given module
--
loadInfosForModule :: FilePath -> IO (Maybe ModuleDescr)
loadInfosForModule filePath = do
exists <- doesFileExist filePath
if exists
then catch (do
file <- openBinaryFile filePath ReadMode
bs <- BSL.hGetContents file
let (metadataVersion'::Integer, moduleInfo::ModuleDescr) = decodeSer bs
if metadataVersion /= metadataVersion'
then do
hClose file
throwIDE ("Metadata has a wrong version."
<> " Consider rebuilding metadata with -r option")
else do
moduleInfo `deepseq` hClose file
return (Just moduleInfo))
(\ (e :: SomeException) -> do sysMessage Normal (T.pack $ "loadInfosForModule: " ++ show e); return Nothing)
else do
sysMessage Normal $ "moduleInfo not found for " <> T.pack filePath
return Nothing
-- | Find the packages fitting the dependencies
findFittingPackages
:: [PackageIdentifier] -- ^ the list of known packages
-> [Dependency] -- ^ the dependencies
-> [PackageIdentifier] -- ^ the known packages matching the dependencies
findFittingPackages knownPackages =
concatMap (fittingKnown knownPackages)
where
fittingKnown packages (Dependency dname versionRange) =
-- find matching packages
let filtered = filter (\ (PackageIdentifier name version) ->
name == dname && withinRange version versionRange)
packages
-- take latest version if several versions match
in if length filtered > 1
then [maximumBy (compare `on` pkgVersion) filtered]
else filtered
-- ---------------------------------------------------------------------
-- Looking up and searching metadata
--
getActivePackageDescr :: IDEM (Maybe PackageDescr)
getActivePackageDescr = do
mbActive <- readIDE activePack
case mbActive of
Nothing -> return Nothing
Just pack -> do
packageInfo' <- getPackageInfo
case packageInfo' of
Nothing -> return Nothing
Just (GenScopeC (PackScope map _), GenScopeC (PackScope _ _)) ->
return (ipdPackageId pack `Map.lookup` map)
--
-- | Lookup of an identifier description
--
getIdentifierDescr :: (SymbolTable alpha, SymbolTable beta) => Text -> alpha -> beta -> [Descr]
getIdentifierDescr str st1 st2 =
let r1 = str `symLookup` st1
r2 = str `symLookup` st2
in r1 ++ r2
--
-- | Lookup of an identifiers starting with the specified prefix and return a list.
--
getIdentifiersStartingWith :: (SymbolTable alpha , SymbolTable beta) => Text -> alpha -> beta -> [Text]
getIdentifiersStartingWith prefix st1 st2 =
takeWhile (T.isPrefixOf prefix) $
if memberLocal || memberGlobal then
prefix : Set.toAscList names
else
Set.toAscList names
where
(_, memberLocal, localNames) = Set.splitMember prefix (symbols st1)
(_, memberGlobal, globalNames) = Set.splitMember prefix (symbols st2)
names = Set.union globalNames localNames
getCompletionOptions :: Text -> IDEM [Text]
getCompletionOptions prefix = do
workspaceInfo' <- getWorkspaceInfo
case workspaceInfo' of
Nothing -> return []
Just (GenScopeC (PackScope _ symbolTable1), GenScopeC (PackScope _ symbolTable2)) ->
return $ getIdentifiersStartingWith prefix symbolTable1 symbolTable2
getDescription :: Text -> IDEM Text
getDescription name = do
workspaceInfo' <- getWorkspaceInfo
case workspaceInfo' of
Nothing -> return ""
Just (GenScopeC (PackScope _ symbolTable1), GenScopeC (PackScope _ symbolTable2)) ->
return $ T.pack (foldr (\d f -> shows (Present d) . showChar '\n' . f) id
(getIdentifierDescr name symbolTable1 symbolTable2) "")
getPackageInfo :: IDEM (Maybe (GenScope, GenScope))
getPackageInfo = readIDE packageInfo
getWorkspaceInfo :: IDEM (Maybe (GenScope, GenScope))
getWorkspaceInfo = readIDE workspaceInfo
getSystemInfo :: IDEM (Maybe GenScope)
getSystemInfo = readIDE systemInfo
-- | Only exported items
getPackageImportInfo :: IDEPackage -> IDEM (Maybe (GenScope,GenScope))
getPackageImportInfo idePack = do
mbActivePack <- readIDE activePack
systemInfo' <- getSystemInfo
if isJust mbActivePack && ipdPackageId (fromJust mbActivePack) == ipdPackageId idePack
then do
packageInfo' <- getPackageInfo
case packageInfo' of
Nothing -> do
liftIO $ infoM "leksah" "getPackageImportInfo: no package info"
return Nothing
Just (GenScopeC (PackScope pdmap _), _) ->
case Map.lookup (ipdPackageId idePack) pdmap of
Nothing -> do
liftIO $ infoM "leksah" "getPackageImportInfo: package not found in package"
return Nothing
Just pd -> buildIt pd systemInfo'
else do
workspaceInfo <- getWorkspaceInfo
case workspaceInfo of
Nothing -> do
liftIO $ infoM "leksah" "getPackageImportInfo: no workspace info"
return Nothing
Just (GenScopeC (PackScope pdmap _), _) ->
case Map.lookup (ipdPackageId idePack) pdmap of
Nothing -> do
liftIO $ infoM "leksah" "getPackageImportInfo: package not found in workspace"
return Nothing
Just pd -> buildIt pd systemInfo'
where
filterPrivate :: ModuleDescr -> ModuleDescr
filterPrivate md = md{mdIdDescriptions = filter dscExported (mdIdDescriptions md)}
buildIt pd systemInfo' =
case systemInfo' of
Nothing -> do
liftIO $ infoM "leksah" "getPackageImportInfo: no system info"
return Nothing
Just (GenScopeC (PackScope pdmap' _)) ->
let impPackDescrs = mapMaybe (`Map.lookup` pdmap') (pdBuildDepends pd)
pd' = pd{pdModules = map filterPrivate (pdModules pd)}
scope1 :: PackScope (Map Text [Descr])
= buildScope pd (PackScope Map.empty symEmpty)
scope2 :: PackScope (Map Text [Descr])
= foldr buildScope (PackScope Map.empty symEmpty) impPackDescrs
in return (Just (GenScopeC scope1, GenScopeC scope2))
--
-- | Searching of metadata
--
searchMeta :: Scope -> Text -> SearchMode -> IDEM [Descr]
searchMeta _ "" _ = return []
searchMeta (PackageScope False) searchString searchType = do
packageInfo' <- getPackageInfo
case packageInfo' of
Nothing -> return []
Just (GenScopeC (PackScope _ rl), _) -> return (searchInScope searchType searchString rl)
searchMeta (PackageScope True) searchString searchType = do
packageInfo' <- getPackageInfo
case packageInfo' of
Nothing -> return []
Just (GenScopeC (PackScope _ rl), GenScopeC (PackScope _ rr)) ->
return (searchInScope searchType searchString rl
++ searchInScope searchType searchString rr)
searchMeta (WorkspaceScope False) searchString searchType = do
workspaceInfo' <- getWorkspaceInfo
case workspaceInfo' of
Nothing -> return []
Just (GenScopeC (PackScope _ rl), _) -> return (searchInScope searchType searchString rl)
searchMeta (WorkspaceScope True) searchString searchType = do
workspaceInfo' <- getWorkspaceInfo
case workspaceInfo' of
Nothing -> return []
Just (GenScopeC (PackScope _ rl), GenScopeC (PackScope _ rr)) ->
return (searchInScope searchType searchString rl
++ searchInScope searchType searchString rr)
searchMeta SystemScope searchString searchType = do
systemInfo' <- getSystemInfo
packageInfo' <- getPackageInfo
case systemInfo' of
Nothing ->
case packageInfo' of
Nothing -> return []
Just (GenScopeC (PackScope _ rl), _) ->
return (searchInScope searchType searchString rl)
Just (GenScopeC (PackScope _ s)) ->
case packageInfo' of
Nothing -> return (searchInScope searchType searchString s)
Just (GenScopeC (PackScope _ rl), _) -> return (searchInScope searchType searchString rl
++ searchInScope searchType searchString s)
searchInScope :: SymbolTable alpha => SearchMode -> Text -> alpha -> [Descr]
searchInScope (Exact _) l st = searchInScopeExact l st
searchInScope (Prefix True) l st = (concat . symElems) (searchInScopePrefix l st)
searchInScope (Prefix False) l _ | T.null l = []
searchInScope (Prefix False) l st = (concat . symElems) (searchInScopeCaseIns l st "")
searchInScope (Regex b) l st = searchRegex l st b
searchInScopeExact :: SymbolTable alpha => Text -> alpha -> [Descr]
searchInScopeExact = symLookup
searchInScopePrefix :: SymbolTable alpha => Text -> alpha -> alpha
searchInScopePrefix searchString symbolTable =
let (_, exact, mapR) = symSplitLookup searchString symbolTable
(mbL, _, _) = symSplitLookup (searchString <> "{") mapR
in case exact of
Nothing -> mbL
Just e -> symInsert searchString e mbL
searchInScopeCaseIns :: SymbolTable alpha => Text -> alpha -> Text -> alpha
searchInScopeCaseIns a symbolTable b = searchInScopeCaseIns' (T.unpack a) symbolTable (T.unpack b)
where
searchInScopeCaseIns' [] st _ = st
searchInScopeCaseIns' (a:l) st pre | isLower a =
let s1 = pre ++ [a]
s2 = pre ++ [toUpper a]
in symUnion (searchInScopeCaseIns' l (searchInScopePrefix (T.pack s1) st) s1)
(searchInScopeCaseIns' l (searchInScopePrefix (T.pack s2) st) s2)
| isUpper a =
let s1 = pre ++ [a]
s2 = pre ++ [toLower a]
in symUnion (searchInScopeCaseIns' l (searchInScopePrefix (T.pack s1) st) s1)
(searchInScopeCaseIns' l (searchInScopePrefix (T.pack s2) st) s2)
| otherwise =
let s = pre ++ [a]
in searchInScopeCaseIns' l (searchInScopePrefix (T.pack s) st) s
searchRegex :: SymbolTable alpha => Text -> alpha -> Bool -> [Descr]
searchRegex searchString st caseSense =
case compileRegex caseSense searchString of
Left err ->
unsafePerformIO $ sysMessage Normal (T.pack $ show err) >> return []
Right regex ->
filter (\e ->
case execute regex (dscName e) of
Left e -> False
Right Nothing -> False
_ -> True)
(concat (symElems st))
compileRegex :: Bool -> Text -> Either String Regex
compileRegex caseSense searchString =
let compOption = defaultCompOpt {
Regex.caseSensitive = caseSense
, multiline = True } in
compile compOption defaultExecOpt searchString
-- ---------------------------------------------------------------------
-- Handling of scopes
--
--
-- | Loads the infos for the given packages (has an collecting argument)
--
buildScope :: SymbolTable alpha => PackageDescr -> PackScope alpha -> PackScope alpha
buildScope packageD (PackScope packageMap symbolTable) =
let pid = pdPackage packageD
in if pid `Map.member` packageMap
then PackScope packageMap symbolTable
else PackScope (Map.insert pid packageD packageMap)
(buildSymbolTable packageD symbolTable)
buildSymbolTable :: SymbolTable alpha => PackageDescr -> alpha -> alpha
buildSymbolTable pDescr symbolTable =
foldl' buildScope'
symbolTable allDescriptions
where
allDescriptions = concatMap mdIdDescriptions (pdModules pDescr)
buildScope' st idDescr =
let allDescrs = allDescrsFrom idDescr
in foldl' (\ map descr -> symInsert (dscName descr) [descr] map)
st allDescrs
allDescrsFrom descr | isReexported descr = [descr]
| otherwise =
case dscTypeHint descr of
DataDescr constructors fields ->
descr : map (\(SimpleDescr fn ty loc comm exp) ->
Real RealDescr{dscName' = fn, dscMbTypeStr' = ty,
dscMbModu' = dscMbModu descr, dscMbLocation' = loc,
dscMbComment' = comm, dscTypeHint' = FieldDescr descr, dscExported' = exp})
fields
++ map (\(SimpleDescr fn ty loc comm exp) ->
Real RealDescr{dscName' = fn, dscMbTypeStr' = ty,
dscMbModu' = dscMbModu descr, dscMbLocation' = loc,
dscMbComment' = comm, dscTypeHint' = ConstructorDescr descr, dscExported' = exp})
constructors
ClassDescr _ methods ->
descr : map (\(SimpleDescr fn ty loc comm exp) ->
Real RealDescr{dscName' = fn, dscMbTypeStr' = ty,
dscMbModu' = dscMbModu descr, dscMbLocation' = loc,
dscMbComment' = comm, dscTypeHint' = MethodDescr descr, dscExported' = exp})
methods
NewtypeDescr (SimpleDescr fn ty loc comm exp) mbField ->
descr : Real RealDescr{dscName' = fn, dscMbTypeStr' = ty,
dscMbModu' = dscMbModu descr, dscMbLocation' = loc,
dscMbComment' = comm, dscTypeHint' = ConstructorDescr descr, dscExported' = exp}
: case mbField of
Just (SimpleDescr fn ty loc comm exp) ->
[Real RealDescr{dscName' = fn, dscMbTypeStr' = ty,
dscMbModu' = dscMbModu descr, dscMbLocation' = loc,
dscMbComment' = comm, dscTypeHint' = FieldDescr descr, dscExported' = exp}]
Nothing -> []
InstanceDescr _ -> []
_ -> [descr]
-- ---------------------------------------------------------------------
-- Low level functions for calling the collector
--
callCollector :: Bool -> Bool -> Bool -> (Bool -> IDEAction) -> IDEAction
callCollector scRebuild scSources scExtract cont = do
liftIO $ infoM "leksah" "callCollector"
scPackageDBs <- getAllPackageDBs
doServerCommand SystemCommand {..} $ \ res ->
case res of
ServerOK -> do
liftIO $ infoM "leksah" "callCollector finished"
cont True
ServerFailed str -> do
liftIO $ infoM "leksah" (T.unpack str)
cont False
_ -> do
liftIO $ infoM "leksah" "impossible server answer"
cont False
callCollectorWorkspace :: Bool -> FilePath -> PackageIdentifier -> [(Text,FilePath)] ->
(Bool -> IDEAction) -> IDEAction
callCollectorWorkspace rebuild fp pi modList cont = do
liftIO $ infoM "leksah" "callCollectorWorkspace"
if null modList
then do
liftIO $ infoM "leksah" "callCollectorWorkspace: Nothing to do"
cont True
else
doServerCommand command $ \ res ->
case res of
ServerOK -> do
liftIO $ infoM "leksah" "callCollectorWorkspace finished"
cont True
ServerFailed str -> do
liftIO $ infoM "leksah" (T.unpack str)
cont False
_ -> do
liftIO $ infoM "leksah" "impossible server answer"
cont False
where command = WorkspaceCommand {
wcRebuild = rebuild,
wcPackage = pi,
wcPath = fp,
wcModList = modList}
-- ---------------------------------------------------------------------
-- Additions for completion
--
keywords :: [Text]
keywords = [
"as"
, "case"
, "of"
, "class"
, "data"
, "default"
, "deriving"
, "do"
, "forall"
, "foreign"
, "hiding"
, "if"
, "then"
, "else"
, "import"
, "infix"
, "infixl"
, "infixr"
, "instance"
, "let"
, "in"
, "mdo"
, "module"
, "newtype"
, "qualified"
, "type"
, "where"]
keywordDescrs :: [Descr]
keywordDescrs = map (\s -> Real $ RealDescr
s
Nothing
Nothing
Nothing
(Just (BS.pack "Haskell keyword"))
KeywordDescr
True) keywords
misc :: [(Text, String)]
misc = [("--", "Haskell comment"), ("=", "Haskell definition")]
miscDescrs :: [Descr]
miscDescrs = map (\(s, d) -> Real $ RealDescr
s
Nothing
Nothing
Nothing
(Just (BS.pack d))
KeywordDescr
True) misc
extensionDescrs :: [Descr]
extensionDescrs = map (\ext -> Real $ RealDescr
(T.pack $ "X" ++ show ext)
Nothing
Nothing
Nothing
(Just (BS.pack "Haskell language extension"))
ExtensionDescr
True)
([minBound..maxBound]::[KnownExtension])
moduleNameDescrs :: PackageDescr -> [Descr]
moduleNameDescrs pd = map (\md -> Real $ RealDescr
(T.pack . display . modu $ mdModuleId md)
Nothing
(Just (mdModuleId md))
Nothing
(Just (BS.pack "Module name"))
ModNameDescr
True) (pdModules pd)
addOtherToScope :: SymbolTable alpha => PackScope alpha -> Bool -> PackScope alpha
addOtherToScope (PackScope packageMap symbolTable) addAll = PackScope packageMap newSymbolTable
where newSymbolTable = foldl' (\ map descr -> symInsert (dscName descr) [descr] map)
symbolTable (if addAll
then keywordDescrs ++ extensionDescrs ++ modNameDescrs ++ miscDescrs
else modNameDescrs)
modNameDescrs = concatMap moduleNameDescrs (Map.elems packageMap)
|
JPMoresmau/leksah
|
src/IDE/Metainfo/Provider.hs
|
gpl-2.0
| 42,106 | 0 | 32 | 14,691 | 9,784 | 5,000 | 4,784 | 741 | 8 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Keymap.Vim.NormalMap
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
module Yi.Keymap.Vim.NormalMap (defNormalMap) where
import Control.Applicative
import Control.Lens hiding (re)
import Control.Monad
import Data.Char
import Data.HashMap.Strict (singleton, lookup)
import Data.List (group)
import Data.Maybe (fromMaybe)
import Data.Monoid
import qualified Data.Text as T
import Prelude hiding (null, lookup)
import System.Directory (doesFileExist)
import System.FriendlyPath (expandTilda)
import Yi.Buffer.Adjusted hiding (Insert)
import Yi.Core (quitEditor, closeWindow)
import Yi.Editor
import Yi.Event
import Yi.File (editFile, fwriteE)
import Yi.History
import Yi.Keymap
import Yi.Keymap.Keys
import Yi.Keymap.Vim.Common
import Yi.Keymap.Vim.Eval
import Yi.Keymap.Vim.Motion
import Yi.Keymap.Vim.Operator
import Yi.Keymap.Vim.Search
import Yi.Keymap.Vim.StateUtils
import Yi.Keymap.Vim.StyledRegion
import Yi.Keymap.Vim.Tag
import Yi.Keymap.Vim.Utils
import Yi.MiniBuffer
import Yi.Misc
import Yi.Monad
import Yi.Regex (seInput, makeSearchOptsM)
import qualified Yi.Rope as R
import Yi.Search (getRegexE, isearchInitE,
setRegexE, makeSimpleSearch)
import Yi.String
import Yi.Tag (Tag(..))
import Yi.Utils (io)
mkDigitBinding :: Char -> VimBinding
mkDigitBinding c = mkBindingE Normal Continue (char c, return (), mutate)
where
mutate vs@(VimState {vsCount = Nothing}) = vs { vsCount = Just d }
mutate vs@(VimState {vsCount = Just count}) =
vs { vsCount = Just $ count * 10 + d }
d = ord c - ord '0'
defNormalMap :: [VimOperator] -> [VimBinding]
defNormalMap operators =
[recordMacroBinding, finishRecordingMacroBinding, playMacroBinding] <>
[zeroBinding, repeatBinding, motionBinding, searchBinding] <>
[chooseRegisterBinding, setMarkBinding] <>
fmap mkDigitBinding ['1' .. '9'] <>
operatorBindings operators <>
finishingBingings <>
continuingBindings <>
nonrepeatableBindings <>
jumpBindings <>
fileEditBindings <>
[tabTraversalBinding] <>
[tagJumpBinding, tagPopBinding]
tagJumpBinding :: VimBinding
tagJumpBinding = mkBindingY Normal (Event (KASCII ']') [MCtrl], f, id)
where f = withCurrentBuffer readCurrentWordB >>= g . Tag . R.toText
g tag = gotoTag tag 0 Nothing
tagPopBinding :: VimBinding
tagPopBinding = mkBindingY Normal (Event (KASCII 't') [MCtrl], f, id)
where f = popTag
motionBinding :: VimBinding
motionBinding = mkMotionBinding Drop $
\m -> case m of
Normal -> True
_ -> False
chooseRegisterBinding :: VimBinding
chooseRegisterBinding = mkChooseRegisterBinding ((== Normal) . vsMode)
zeroBinding :: VimBinding
zeroBinding = VimBindingE f
where f "0" (VimState {vsMode = Normal}) = WholeMatch $ do
currentState <- getEditorDyn
case vsCount currentState of
Just c -> do
setCountE (10 * c)
return Continue
Nothing -> do
withCurrentBuffer moveToSol
resetCountE
setStickyEolE False
return Drop
f _ _ = NoMatch
repeatBinding :: VimBinding
repeatBinding = VimBindingE (f . T.unpack . _unEv)
where
f "." (VimState {vsMode = Normal}) = WholeMatch $ do
currentState <- getEditorDyn
case vsRepeatableAction currentState of
Nothing -> return ()
Just (RepeatableAction prevCount (Ev actionString)) -> do
let count = showT $ fromMaybe prevCount (vsCount currentState)
scheduleActionStringForEval . Ev $ count <> actionString
resetCountE
return Drop
f _ _ = NoMatch
jumpBindings :: [VimBinding]
jumpBindings = fmap (mkBindingE Normal Drop)
[ (ctrlCh 'o', jumpBackE, id)
, (spec KTab, jumpForwardE, id)
, (ctrlCh '^', controlCarrot, resetCount)
, (ctrlCh '6', controlCarrot, resetCount)
]
where
controlCarrot = alternateBufferE . (+ (-1)) =<< getCountE
finishingBingings :: [VimBinding]
finishingBingings = fmap (mkStringBindingE Normal Finish)
[ ("x", cutCharE Forward =<< getCountE, resetCount)
, ("<Del>", cutCharE Forward =<< getCountE, resetCount)
, ("X", cutCharE Backward =<< getCountE, resetCount)
, ("D",
do region <- withCurrentBuffer $ regionWithTwoMovesB (return ()) moveToEol
void $ operatorApplyToRegionE opDelete 1 $ StyledRegion Exclusive region
, id)
-- Pasting
, ("p", pasteAfter, id)
, ("P", pasteBefore, id)
-- Miscellaneous.
, ("~", do
count <- getCountE
withCurrentBuffer $ do
transformCharactersInLineN count switchCaseChar
leftOnEol
, resetCount)
, ("J", do
count <- fmap (flip (-) 1 . max 2) getCountE
withCurrentBuffer $ do
(StyledRegion s r) <- case stringToMove "j" of
WholeMatch m -> regionOfMoveB $ CountedMove (Just count) m
_ -> error "can't happen"
void $ lineMoveRel $ count - 1
moveToEol
joinLinesB =<< convertRegionToStyleB r s
, resetCount)
]
pasteBefore :: EditorM ()
pasteBefore = do
-- TODO: use count
register <- getRegisterE . vsActiveRegister =<< getEditorDyn
case register of
Nothing -> return ()
Just (Register LineWise rope) -> withCurrentBuffer $ unless (R.null rope) $
-- Beware of edge cases ahead
insertRopeWithStyleB (addNewLineIfNecessary rope) LineWise
Just (Register style rope) -> withCurrentBuffer $ pasteInclusiveB rope style
pasteAfter :: EditorM ()
pasteAfter = do
-- TODO: use count
register <- getRegisterE . vsActiveRegister =<< getEditorDyn
case register of
Nothing -> return ()
Just (Register LineWise rope) -> withCurrentBuffer $ do
-- Beware of edge cases ahead
moveToEol
eof <- atEof
when eof $ insertB '\n'
rightB
insertRopeWithStyleB (addNewLineIfNecessary rope) LineWise
when eof $ savingPointB $ do
newSize <- sizeB
moveTo (newSize - 1)
curChar <- readB
when (curChar == '\n') $ deleteN 1
Just (Register style rope) -> withCurrentBuffer $ do
whenM (fmap not atEol) rightB
pasteInclusiveB rope style
operatorBindings :: [VimOperator] -> [VimBinding]
operatorBindings = fmap mkOperatorBinding
where
mkT (Op o) = (Ev o, return (), switchMode . NormalOperatorPending $ Op o)
mkOperatorBinding (VimOperator {operatorName = opName}) =
mkStringBindingE Normal Continue $ mkT opName
continuingBindings :: [VimBinding]
continuingBindings = fmap (mkStringBindingE Normal Continue)
[ ("r", return (), switchMode ReplaceSingleChar) -- TODO make it just a binding
-- Transition to insert mode
, ("i", return (), switchMode $ Insert 'i')
, ("<Ins>", return (), switchMode $ Insert 'i')
, ("I", withCurrentBuffer firstNonSpaceB, switchMode $ Insert 'I')
, ("a", withCurrentBuffer rightB, switchMode $ Insert 'a')
, ("A", withCurrentBuffer moveToEol, switchMode $ Insert 'A')
, ("o", withCurrentBuffer $ do
moveToEol
newlineB
indentAsTheMostIndentedNeighborLineB
, switchMode $ Insert 'o')
, ("O", withCurrentBuffer $ do
moveToSol
newlineB
leftB
indentAsNextB
, switchMode $ Insert 'O')
-- Transition to visual
, ("v", enableVisualE Inclusive, resetCount . switchMode (Visual Inclusive))
, ("V", enableVisualE LineWise, resetCount . switchMode (Visual LineWise))
, ("<C-v>", enableVisualE Block, resetCount . switchMode (Visual Block))
]
nonrepeatableBindings :: [VimBinding]
nonrepeatableBindings = fmap (mkBindingE Normal Drop)
[ (spec KEsc, return (), resetCount)
, (ctrlCh 'c', return (), resetCount)
-- Changing
, (char 'C',
do region <- withCurrentBuffer $ regionWithTwoMovesB (return ()) moveToEol
void $ operatorApplyToRegionE opChange 1 $ StyledRegion Exclusive region
, switchMode $ Insert 'C')
, (char 's', cutCharE Forward =<< getCountE, switchMode $ Insert 's')
, (char 'S',
do region <- withCurrentBuffer $ regionWithTwoMovesB firstNonSpaceB moveToEol
void $ operatorApplyToRegionE opDelete 1 $ StyledRegion Exclusive region
, switchMode $ Insert 'S')
-- Replacing
, (char 'R', return (), switchMode Replace)
-- Yanking
, ( char 'Y'
, do region <- withCurrentBuffer $ regionWithTwoMovesB (return ()) moveToEol
void $ operatorApplyToRegionE opYank 1 $ StyledRegion Exclusive region
, id
)
-- Search
, (char '*', addVimJumpHereE >> searchWordE True Forward, resetCount)
, (char '#', addVimJumpHereE >> searchWordE True Backward, resetCount)
, (char 'n', addVimJumpHereE >> withCount (continueSearching id), resetCount)
, (char 'N', addVimJumpHereE >> withCount (continueSearching reverseDir), resetCount)
, (char ';', repeatGotoCharE id, id)
, (char ',', repeatGotoCharE reverseDir, id)
-- Repeat
, (char '&', return (), id) -- TODO
-- Transition to ex
, (char ':', do
void (spawnMinibufferE ":" id)
historyStart
historyPrefixSet ""
, switchMode Ex)
-- Undo
, (char 'u', withCountOnBuffer undoB >> withCurrentBuffer leftOnEol, id)
, (char 'U', withCountOnBuffer undoB >> withCurrentBuffer leftOnEol, id) -- TODO
, (ctrlCh 'r', withCountOnBuffer redoB >> withCurrentBuffer leftOnEol, id)
-- scrolling
,(ctrlCh 'b', getCountE >>= withCurrentBuffer . upScreensB, id)
,(ctrlCh 'f', getCountE >>= withCurrentBuffer . downScreensB, id)
,(ctrlCh 'u', getCountE >>= withCurrentBuffer . vimScrollByB (negate . (`div` 2)), id)
,(ctrlCh 'd', getCountE >>= withCurrentBuffer . vimScrollByB (`div` 2), id)
,(ctrlCh 'y', getCountE >>= withCurrentBuffer . vimScrollB . negate, id)
,(ctrlCh 'e', getCountE >>= withCurrentBuffer . vimScrollB, id)
-- unsorted TODO
, (char '-', return (), id)
, (char '+', return (), id)
, (spec KEnter, return (), id)
] <> fmap (mkStringBindingE Normal Drop)
[ ("g*", searchWordE False Forward, resetCount)
, ("g#", searchWordE False Backward, resetCount)
, ("gd", withCurrentBuffer $ withModeB modeGotoDeclaration, resetCount)
, ("gD", withCurrentBuffer $ withModeB modeGotoDeclaration, resetCount)
, ("<C-g>", printFileInfoE, resetCount)
, ("<C-w>c", tryCloseE, resetCount)
, ("<C-w>o", closeOtherE, resetCount)
, ("<C-w>s", splitE, resetCount)
, ("<C-w>w", nextWinE, resetCount)
, ("<C-w><C-w>", nextWinE, resetCount)
, ("<C-w>W", prevWinE, resetCount)
, ("<C-w>p", prevWinE, resetCount)
, ("<C-a>", getCountE >>= withCurrentBuffer . incrementNextNumberByB, resetCount)
, ("<C-x>", getCountE >>= withCurrentBuffer . incrementNextNumberByB . negate, resetCount)
-- z commands
-- TODO Add prefix count
, ("zt", withCurrentBuffer scrollCursorToTopB, resetCount)
, ("zb", withCurrentBuffer scrollCursorToBottomB, resetCount)
, ("zz", withCurrentBuffer scrollToCursorB, resetCount)
{- -- TODO Horizantal scrolling
, ("ze", withCurrentBuffer .., resetCount)
, ("zs", withCurrentBuffer .., resetCount)
, ("zH", withCurrentBuffer .., resetCount)
, ("zL", withCurrentBuffer .., resetCount)
, ("zh", withCurrentBuffer .., resetCount)
, ("zl", withCurrentBuffer .., resetCount)
-}
, ("z.", withCurrentBuffer $ scrollToCursorB >> moveToSol, resetCount)
, ("z+", withCurrentBuffer scrollToLineBelowWindowB, resetCount)
, ("z-", withCurrentBuffer $ scrollCursorToBottomB >> moveToSol, resetCount)
, ("z^", withCurrentBuffer scrollToLineAboveWindowB, resetCount)
{- -- TODO Code folding
, ("zf", .., resetCount)
, ("zc", .., resetCount)
, ("zo", .., resetCount)
, ("za", .., resetCount)
, ("zC", .., resetCount)
, ("zO", .., resetCount)
, ("zA", .., resetCount)
, ("zr", .., resetCount)
, ("zR", .., resetCount)
, ("zm", .., resetCount)
, ("zM", .., resetCount)
-}
-- Z commands
] <> fmap (mkStringBindingY Normal)
[ ("ZQ", quitEditor, id)
-- TODO ZZ should replicate :x not :wq
, ("ZZ", fwriteE >> closeWindow, id)
]
fileEditBindings :: [VimBinding]
fileEditBindings = fmap (mkStringBindingY Normal)
[ ("gf", openFileUnderCursor Nothing, resetCount)
, ("<C-w>gf", openFileUnderCursor $ Just newTabE, resetCount)
, ("<C-w>f", openFileUnderCursor $ Just (splitE >> prevWinE), resetCount)
]
setMarkBinding :: VimBinding
setMarkBinding = VimBindingE (f . T.unpack . _unEv)
where f _ s | vsMode s /= Normal = NoMatch
f "m" _ = PartialMatch
f ('m':c:[]) _ = WholeMatch $ do
withCurrentBuffer $ setNamedMarkHereB [c]
return Drop
f _ _ = NoMatch
searchWordE :: Bool -> Direction -> EditorM ()
searchWordE wholeWord dir = do
word <- withCurrentBuffer readCurrentWordB
let search re = do
setRegexE re
assign searchDirectionA dir
withCount $ continueSearching (const dir)
if wholeWord
then case makeSearchOptsM [] $ "\\<" <> R.toString word <> "\\>" of
Right re -> search re
Left _ -> return ()
else search $ makeSimpleSearch word
searchBinding :: VimBinding
searchBinding = VimBindingE (f . T.unpack . _unEv)
where f evs (VimState { vsMode = Normal }) | evs `elem` group "/?"
= WholeMatch $ do
state <- fmap vsMode getEditorDyn
let dir = if evs == "/" then Forward else Backward
switchModeE $ Search state dir
isearchInitE dir
historyStart
historyPrefixSet T.empty
return Continue
f _ _ = NoMatch
continueSearching :: (Direction -> Direction) -> EditorM ()
continueSearching fdir =
getRegexE >>= \case
Just regex -> do
dir <- fdir <$> use searchDirectionA
printMsg . T.pack $ (if dir == Forward then '/' else '?') : seInput regex
void $ doVimSearch Nothing [] dir
Nothing -> printMsg "No previous search pattern"
repeatGotoCharE :: (Direction -> Direction) -> EditorM ()
repeatGotoCharE mutateDir = do
prevCommand <- fmap vsLastGotoCharCommand getEditorDyn
count <- getCountE
withCurrentBuffer $ case prevCommand of
Just (GotoCharCommand c dir style) -> do
let newDir = mutateDir dir
let move = gotoCharacterB c newDir style True
p0 <- pointB
replicateM_ (count - 1) $ do
move
when (style == Exclusive) $ moveB Character newDir
p1 <- pointB
move
p2 <- pointB
when (p1 == p2) $ moveTo p0
Nothing -> return ()
enableVisualE :: RegionStyle -> EditorM ()
enableVisualE style = withCurrentBuffer $ do
putRegionStyle style
rectangleSelectionA .= (Block == style)
setVisibleSelection True
pointB >>= setSelectionMarkPointB
cutCharE :: Direction -> Int -> EditorM ()
cutCharE dir count = do
r <- withCurrentBuffer $ do
p0 <- pointB
(if dir == Forward then moveXorEol else moveXorSol) count
p1 <- pointB
let region = mkRegion p0 p1
rope <- readRegionB region
deleteRegionB $ mkRegion p0 p1
leftOnEol
return rope
regName <- fmap vsActiveRegister getEditorDyn
setRegisterE regName Inclusive r
tabTraversalBinding :: VimBinding
tabTraversalBinding = VimBindingE (f . T.unpack . _unEv)
where f "g" (VimState { vsMode = Normal }) = PartialMatch
f ('g':c:[]) (VimState { vsMode = Normal }) | c `elem` "tT" = WholeMatch $ do
count <- getCountE
replicateM_ count $ if c == 'T' then previousTabE else nextTabE
resetCountE
return Drop
f _ _ = NoMatch
openFileUnderCursor :: Maybe (EditorM ()) -> YiM ()
openFileUnderCursor editorAction = do
fileName <- fmap R.toString . withCurrentBuffer $ readUnitB unitViWORD
fileExists <- io $ doesFileExist =<< expandTilda fileName
if fileExists then do
maybeM withEditor editorAction
void . editFile $ fileName
else
withEditor . fail $ "Can't find file \"" <> fileName <> "\""
recordMacroBinding :: VimBinding
recordMacroBinding = VimBindingE (f . T.unpack . _unEv)
where f "q" (VimState { vsMode = Normal
, vsCurrentMacroRecording = Nothing })
= PartialMatch
f ['q', c] (VimState { vsMode = Normal })
= WholeMatch $ do
modifyStateE $ \s ->
s { vsCurrentMacroRecording = Just (c, mempty) }
return Finish
f _ _ = NoMatch
finishRecordingMacroBinding :: VimBinding
finishRecordingMacroBinding = VimBindingE (f . T.unpack . _unEv)
where f "q" (VimState { vsMode = Normal
, vsCurrentMacroRecording = Just (macroName, Ev macroBody) })
= WholeMatch $ do
let reg = Register Exclusive (R.fromText (T.drop 2 macroBody))
modifyStateE $ \s ->
s { vsCurrentMacroRecording = Nothing
, vsRegisterMap = singleton macroName reg
<> vsRegisterMap s
}
return Finish
f _ _ = NoMatch
playMacroBinding :: VimBinding
playMacroBinding = VimBindingE (f . T.unpack . _unEv)
where f "@" (VimState { vsMode = Normal }) = PartialMatch
f ['@', c] (VimState { vsMode = Normal
, vsRegisterMap = registers
, vsCount = mbCount }) = WholeMatch $ do
resetCountE
case lookup c registers of
Just (Register _ evs) -> do
let count = fromMaybe 1 mbCount
mkAct = Ev . T.replicate count . R.toText
scheduleActionStringForEval . mkAct $ evs
return Finish
Nothing -> return Drop
f _ _ = NoMatch
-- TODO: withCount name implies that parameter has type (Int -> EditorM ())
-- Is there a better name for this function?
withCount :: EditorM () -> EditorM ()
withCount action = flip replicateM_ action =<< getCountE
withCountOnBuffer :: BufferM () -> EditorM ()
withCountOnBuffer action = withCount $ withCurrentBuffer action
|
atsukotakahashi/wi
|
src/library/Yi/Keymap/Vim/NormalMap.hs
|
gpl-2.0
| 19,416 | 0 | 21 | 5,623 | 5,322 | 2,786 | 2,536 | 398 | 4 |
module Game.MCubed.World where
import Control.Arrow
import Control.Monad
import Data.Lens.Lazy
import qualified Data.Maybe as Maybe
import Data.CubeTree (CubeTree)
import qualified Data.CubeTree as CubeTree
import Data.Coord (Coord)
import qualified Data.Coord as Coord
data World =
World { getSize :: CubeTree.Height
, getBlockTree :: CubeTree Block
}
deriving (Eq)
type Depth = CubeTree.Height
type Generator = Coord -> Block
setBlockTree :: CubeTree Block -> World -> World
setBlockTree tree world = world { getBlockTree = tree }
blockTree :: Lens World (CubeTree Block)
blockTree = lens getBlockTree setBlockTree
getBlock :: Coord -> World -> Maybe Block
getBlock coord world =
CubeTree.getAt height coord $ getBlockTree world
where
height = getSize world
setBlock :: Coord -> Block -> World -> World
setBlock coord block world =
(blockTree ^%= CubeTree.insertAt height coord block) world
where
height = getSize world
getLoadedBlock :: Coord -> World -> Block
getLoadedBlock coord world =
Maybe.fromMaybe err loadedBlk
where
err = error $ "getLoadedBlock: No block at " ++ show coord
loadedBlk = getBlock coord world
setLoadedBlock :: Coord -> Block -> World -> World
setLoadedBlock = setBlock
loadedBlock :: Coord -> Lens World Block
loadedBlock = uncurry lens . (getLoadedBlock &&& setLoadedBlock)
loadedBlockAt :: Coord.Scalar -> Coord.Scalar -> Coord.Scalar
-> Lens World Block
loadedBlockAt x y z = loadedBlock $ Coord.Coord x y z
getBlockGen :: Generator -> Coord -> World -> Block
getBlockGen gen coord world =
Maybe.fromMaybe (gen coord) (getBlock coord world)
setBlockGen :: Generator -> Coord -> Block -> World -> World
setBlockGen = const setBlock
blockGen :: Generator -> Coord -> Lens World Block
blockGen gen = uncurry lens . (getBlockGen gen &&& setBlockGen gen)
generateDepth :: Generator -> Depth
-> CubeTree Block -> CubeTree Block
generateDepth = undefined -- stub
generate :: Generator -> CubeTree.Height
-> Depth -> Depth -> Coord
-> CubeTree Block -> CubeTree Block
generate = undefined -- stub
data Block = Block deriving (Eq) -- stub
|
dflemstr/mcubed
|
Game/MCubed/World.hs
|
gpl-3.0
| 2,272 | 0 | 11 | 526 | 664 | 351 | 313 | 54 | 1 |
module StdLib (builtins) where
import AST
import ASTErrors
import Misc
import Data.List (transpose, (\\))
import Control.Monad (zipWithM, foldM)
add :: SourcePos -> [ASTDatatype] -> Eval ASTDatatype
add _ [List a, List b] = goRight (List (a ++ b))
add pos [Vector a, Vector b]
| (length a == length b) = mapM (add pos) (transpose [a,b]) >>= return . Vector
| otherwise = goLeft []
add _ [Number a, Number b] = goRight (Number (a + b))
add _ [Float a, Number b] = goRight (Float (a + (fromInteger b)))
add _ [Number a, Float b] = goRight (Float ((fromInteger a) + b))
add _ [Float a, Float b] = goRight (Float (a + b))
add _ [String a, String b] = goRight (String (a ++ b))
add pos x = goLeft [functionError pos "+" 2 x]
sub _ [List a, List b] = goRight (List (a \\ b))
sub pos [Vector a, Vector b]
| (length a == length b) = mapM (sub pos) (transpose [a,b]) >>= goRight . Vector
| otherwise = goLeft []
sub _ [Number a, Number b] = goRight (Number (a - b))
sub _ [Float a, Number b] = goRight (Float (a - (fromInteger b)))
sub _ [Number a, Float b] = goRight (Float ((fromInteger a) - b))
sub _ [Float a, Float b] = goRight (Float (a - b))
sub _ [String a, String b] = goRight (String (a \\ b))
sub pos x = goLeft [functionError pos "-" 2 x]
neg _ [Number a] = goRight (Number (negate a))
neg _ [Float a] = (goRight . Float . negate) a
neg pos x = goLeft [functionError pos "negate" 1 x]
abs _ [Number a] = (goRight . Number . Prelude.abs) a
abs _ [Float a] = (goRight . Float . Prelude.abs) a
abs pos x = goLeft [functionError pos "abs" 1 x]
mul _ [List a, Number b] = goRight (List (concat (replicate (fromInteger b) a)))
mul pos [Vector a, Number b] = mapM (\x -> mul pos [x,Number b]) a >>= goRight . Vector
mul pos [Number a, Vector b] = mapM (\x -> mul pos [x,Number a]) b >>= goRight . Vector
mul pos [Vector [], Vector []] = (goRight . Vector) []
mul pos [Vector a, Vector b]
| (length a) /= (length b) = goLeft []
| otherwise = zipWithM (\x y -> mul pos [x,y]) a b >>= \(z:zs) -> foldM (\x y -> add pos [x,y]) z zs
mul _ [Number a, Number b] = goRight (Number (a * b))
mul _ [Float a, Number b] = goRight (Float (a * (fromInteger b)))
mul _ [Number a, Float b] = goRight (Float ((fromInteger a) * b))
mul _ [Float a, Float b] = goRight (Float (a * b))
mul _ [String a, Number b] = goRight (String (concat (replicate (fromInteger b) a)))
mul _ [Number b, String a] = goRight (String (concat (replicate (fromInteger b) a)))
mul pos x = goLeft [functionError pos "*" 2 x]
div _ [Number a, Number b] = goRight (Float ((fromInteger a) / (fromInteger b)))
div _ [Float a, Number b] = goRight (Float (a / (fromInteger b)))
div _ [Number a, Float b] = goRight (Float ((fromInteger a) / b))
div _ [Float a, Float b] = goRight (Float (a / b))
div pos x = goLeft [functionError pos "/" 2 x]
listconstructor pos [a, List b] = goRight (List (a:b))
listconstructor pos x = goLeft [functionError pos ":" 2 x]
printf _ [x] = (doIO putStrLn . show) x
printf pos x = goLeft [functionError pos "print" 1 x]
printStr _ [String a] = doIO putStrLn a
printStr pos x = goLeft [functionError pos "printStr" 1 x]
string _ [x] = (goRight . String . show) x
string pos x = goLeft [functionError pos "str" 1 x]
bool _ [String ""] = goRight (Atom "@false")
bool _ [Number 0] = goRight (Atom "@false")
bool _ [Float 0.0] = goRight (Atom "@false")
bool _ [List []] = goRight (Atom "@false")
bool _ [Vector []] = goRight (Atom "@false")
bool _ [Atom "@false"] = goRight (Atom "@false")
bool _ [_] = goRight (Atom "@true")
bool pos x = goLeft [functionError pos "bool" 1 x]
builtins =
[
("+", add),
("-", sub),
("neg", neg),
("*", mul),
("/", StdLib.div),
("abs", StdLib.abs),
("str", string),
(":", listconstructor),
("print", printf),
("printStr", printStr)
]
|
Jem777/deepthought
|
src/StdLib.hs
|
gpl-3.0
| 3,824 | 0 | 13 | 814 | 2,276 | 1,149 | 1,127 | 80 | 1 |
module Shipment where
import Control.Monad.Reader
import qualified Control.Applicative as C
import qualified Data.Acid as A
import Data.Acid.Remote
import Data.SafeCopy
import Data.Typeable
import qualified Data.Map as M
import qualified Data.Tree as Tr
import qualified Data.Aeson as J
import qualified Data.Text.Lazy.Encoding as E
import qualified Data.Text.Lazy as L
import Data.Time.Clock
import GHC.Generics
import qualified Currency as Cu
import Data.Data
import Entity(EntityState)
import qualified FiscalYear as Fy
import qualified Company as Co
import qualified Product as Pr
import qualified Stock as St
import qualified Account as Ac
import qualified Carrier as Ca
data ShipmentCostMethod = OnOrder | OnShipment
deriving(Show, Eq, Ord, Typeable, Generic, Enum, Bounded, Data)
data ShipmentMethod = OnOrderProcessed | OnShipmentSent | Manual
deriving(Show,Eq, Ord, Enum, Bounded, Typeable, Generic, Data)
data ShipmentState = Draft | Waiting | Assigned | Done | Cancel
deriving(Show, Eq, Ord, Enum, Bounded, Typeable, Generic, Data)
-- Avoid name collision
data ShipmentType = Supplier | Customer | Internal | InventoryShipment
deriving (Show, Eq, Ord, Typeable, Generic, Data)
data Shipment = Shipment {
incoming :: [St.Move],
inventory :: [St.Move],
shipmentState :: ShipmentState,
shipmentType :: ShipmentType,
carrier :: Ca.Carrier,
cost :: Ac.Amount,
currency :: Cu.Currency
} deriving (Show, Eq, Ord, Typeable, Generic, Data)
computeCosts :: [Shipment] -> Ac.Amount
computeCosts = \s -> foldr (\incr acc -> acc + (cost incr)) 0 s
data DropShipment = DropShipment {
dropShipmentProduct:: Pr.Product, -- name space...
supplier :: Co.Party,
customer :: Co.Party,
dropMoves :: [St.Move],
dropShipmentState :: ShipmentState
} deriving (Show, Eq, Ord, Typeable, Generic)
data InventoryLine = InventoryLine {
product :: Pr.Product,
quantity :: Ac.Quantity}
deriving (Show, Eq, Ord, Typeable, Generic)
data Inventory = Inventory {
storageLocation :: St.LocationType,
lostAndFound :: St.LocationType,
inventoryLine :: [InventoryLine]
} deriving (Show, Eq, Ord, Typeable, Generic)
instance J.ToJSON ShipmentState
instance J.FromJSON ShipmentState
instance J.ToJSON ShipmentType
instance J.FromJSON ShipmentType
instance J.ToJSON Shipment
instance J.FromJSON Shipment
instance J.ToJSON ShipmentMethod
instance J.FromJSON ShipmentMethod
instance J.ToJSON ShipmentCostMethod
instance J.FromJSON ShipmentCostMethod
instance J.ToJSON DropShipment
instance J.FromJSON DropShipment
$(deriveSafeCopy 0 'base ''ShipmentState)
$(deriveSafeCopy 0 'base ''ShipmentType)
$(deriveSafeCopy 0 'base ''Shipment)
$(deriveSafeCopy 0 'base ''ShipmentMethod)
$(deriveSafeCopy 0 'base ''ShipmentCostMethod)
$(deriveSafeCopy 0 'base ''DropShipment)
|
dservgun/erp
|
src/common/Shipment.hs
|
gpl-3.0
| 2,902 | 0 | 11 | 509 | 876 | 501 | 375 | -1 | -1 |
module Main where
import qualified Data.List as L
englishLetters = ['a'..'z'] ++ ['A'..'Z']
lettersCount :: String -> (Int, Int)
lettersCount text = let (res, _) = L.mapAccumL f (0,0) text
in res
where
f (a, t) x | x `elem` englishLetters = ((a + 1, t + 1), x)
f (a, t) x | otherwise = ((a , t + 1), x)
divide l t = (fromIntegral l) / (fromIntegral t)
main = do
--let testText = "ABC2@#%#$^#$&f"
fileCont <- readFile "text.txt"
let (letters, total) = lettersCount fileCont
putStrLn $ "Letters count: " ++ show letters
putStrLn $ "Total count: " ++ show total
putStrLn $ "Ratio = " ++ (show $ divide letters total)
putStrLn "Ok."
|
graninas/Haskell-Algorithms
|
Tests/TextRatio.hs
|
gpl-3.0
| 714 | 0 | 10 | 202 | 293 | 154 | 139 | 16 | 2 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Repo.Project
( Project(Project, projectName, projectPath)
, getProjectHead
, findCommit
-- TODO: should not be exported
, withProject
) where
import Control.Applicative ((<$>))
import Control.Monad.Trans (lift)
import Data.Maybe (fromMaybe)
import Data.Tagged (Tagged(Tagged))
import Data.Text (Text)
import Git (RefTarget,
Commit(commitParents),
lookupReference, lookupCommit)
import Repo.Git (Git, WithFactory, withGitRepo, resolveRef)
import Repo.Monad (Repo)
data Project = Project { projectName :: Text
, projectPath :: FilePath
}
deriving Show
withProject :: WithFactory n r => Project -> Git n a -> Repo a
withProject = withGitRepo . projectPath
getProjectHead :: WithFactory n r => Project -> Repo (RefTarget r)
getProjectHead proj =
withProject proj $ do
ref <- lift $ lookupReference "HEAD"
return $ fromMaybe (error $ "could not resolve HEAD at " ++ projectPath proj) ref
findCommit :: WithFactory n r
=> Project -> RefTarget r -> (Commit r -> Bool) -> Repo (Maybe (Commit r))
findCommit proj head p =
withProject proj $ do
headCommitOid <- Tagged <$> resolveRef head
go headCommitOid
where go cid = do
commit <- lift $ lookupCommit cid
if p commit
then return $ Just commit
else
case commitParents commit of
[] -> return Nothing
(first : _) -> go first
|
aartamonau/repo-bisect
|
src/Repo/Project.hs
|
gpl-3.0
| 1,632 | 0 | 14 | 464 | 456 | 244 | 212 | 44 | 3 |
-- Copyright 2016, 2017 Robin Raymond
--
-- This file is part of Purple Muon
--
-- Purple Muon is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- Purple Muon is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with Purple Muon. If not, see <http://www.gnu.org/licenses/>.
{-|
Module : Client.Network
Description : Utility functions for the network on the client side.
Copyright : (c) Robin Raymond, 2016-2017
License : GPL-3
Maintainer : [email protected]
Portability : POSIX
-}
module Client.Network
(
) where
|
r-raymond/purple-muon
|
src/Client/Network.hs
|
gpl-3.0
| 1,001 | 0 | 3 | 200 | 26 | 23 | 3 | 2 | 0 |
module LiveSource where
{-# OPTIONS_GHC -fno-cse #-}
import Control.Applicative
import DynFlags
import GHC
import GHC.Paths
import GhcMonad (liftIO)
import Unsafe.Coerce
import Control.Exception
import Control.Concurrent
import System.Directory
import System.Time
import System.Environment
import Data.IORef
import System.IO.Unsafe
import Data.Unsafe.Global
import Control.Monad
newMain = do
args <- getArgs
case args of
[filePath] -> repeatOnModification filePath
_ -> print "usage: filename_to_repeat_loading"
ntry2 :: IO a -> IO (Either IOError a)
ntry2 = try
{-# NOINLINE loadAndRunFilePrintingErrorMessageUnsafeGlobal #-}
loadAndRunFilePrintingErrorMessageUnsafeGlobal = newGlobal Nothing
loadAndRunFilePrintingErrorMessageUnsafe filePath = do
maybeLastModTime <- readIORef loadAndRunFilePrintingErrorMessageUnsafeGlobal
lastModified' <- getModificationTime filePath
case maybeLastModTime of
Nothing -> do
putStrLn "loaded"
writeIORef loadAndRunFilePrintingErrorMessageUnsafeGlobal (Just lastModified')
loadAndRunFilePrintingErrorMessage filePath
(Just lastModified) -> case lastModified < lastModified' of
True -> do
putStrLn "reloaded"
writeIORef loadAndRunFilePrintingErrorMessageUnsafeGlobal (Just lastModified')
loadAndRunFilePrintingErrorMessage filePath
False -> return Nothing
loadAndRunFilePrintingErrorMessageUnsafeWithCacheGlobal = newGlobal Nothing
loadAndRunFilePrintingErrorMessageUnsafeWithCache filePath = do
maybeLastModTime <- readIORef loadAndRunFilePrintingErrorMessageUnsafeGlobal
lastModifiedSafe' <- getModificationTimeSafe filePath
case lastModifiedSafe' of
Just lastModified' ->
case maybeLastModTime of
Nothing -> do
putStrLn "loaded"
writeIORef loadAndRunFilePrintingErrorMessageUnsafeGlobal (Just lastModified')
res <- loadAndRunFilePrintingErrorMessage filePath
writeIORef loadAndRunFilePrintingErrorMessageUnsafeWithCacheGlobal res
return (res,0)
(Just lastModified) -> case lastModified < lastModified' of
True -> do
putStrLn "reloaded"
writeIORef loadAndRunFilePrintingErrorMessageUnsafeGlobal (Just lastModified')
writeIORef getFileChangedUnsafeGlobal (Just lastModified')
res <- loadAndRunFilePrintingErrorMessage filePath
writeIORef loadAndRunFilePrintingErrorMessageUnsafeWithCacheGlobal res
return (res,1)
False -> do
res <- readIORef loadAndRunFilePrintingErrorMessageUnsafeWithCacheGlobal
return (res,2)
_ -> return (Nothing,3)
{-# NOINLINE getFileChangedUnsafeGlobal #-}
getFileChangedUnsafeGlobal = newGlobal Nothing
getFileChangedUnsafe filePath = do
maybeLastModTime <- readIORef loadAndRunFilePrintingErrorMessageUnsafeGlobal
lastModified' <- getModificationTime filePath
case maybeLastModTime of
Nothing -> do
writeIORef loadAndRunFilePrintingErrorMessageUnsafeGlobal (Just lastModified')
return True
(Just lastModified) -> case lastModified < lastModified' of
True -> do
writeIORef loadAndRunFilePrintingErrorMessageUnsafeGlobal (Just lastModified')
return True
False -> return False
loadAndRunFilePrintingErrorMessage filePath = do
res <- ntry (loadAndRunFile filePath)
case res of
Left msg -> do
print msg
checkFileExists filePath
return Nothing
Right v -> return (Just v)
repeatOnModification filePath = do
checkFileExists filePath
repeatOnModification' Nothing filePath
getModificationTimeSafe filePath = do
res <- ntry (getModificationTime filePath)
case res of
(Right x) -> return (Just x)
_ -> return Nothing
repeatOnModification' lastModified filePath = do
lastModified' <- getModificationTimeSafe filePath
case lastModified of
(Just a) -> case ((<) <$> lastModified <*> lastModified') of
(Just True) -> loadAndRunFilePrintingErrorMessage filePath
_ -> return Nothing
_ -> loadAndRunFilePrintingErrorMessage filePath
threadDelay 10000
repeatOnModification' lastModified' filePath
checkFileExists fp = do
exist <- doesFileExist fp
when (not exist) (throw $ AssertionFailed ("file "++fp++" not found"))
loadAndRunFile filePath = defaultErrorHandler defaultFatalMessager defaultFlushOut $ do
oldcwd <- getCurrentDirectory
res <- runGhc (Just libdir) $ do
dflags <- getSessionDynFlags
setSessionDynFlags $ dflags { hscTarget = HscInterpreted
, ghcLink = LinkInMemory
}
setTargets =<< sequence [guessTarget filePath Nothing]
load LoadAllTargets
setContext [IIModule $ mkModuleName "Main"]
act <- unsafeCoerce <$> compileExpr "liveMain"
g <- liftIO act
return g
setCurrentDirectory oldcwd
return res
ntry :: IO a -> IO (Either SomeException a)
ntry = try
|
xpika/live-source
|
src/LiveSource.hs
|
gpl-3.0
| 5,981 | 0 | 20 | 2,028 | 1,153 | 534 | 619 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdExchangeBuyer2.Accounts.Proposals.Accept
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Mark the proposal as accepted at the given revision number. If the
-- number does not match the server\'s revision number an \`ABORTED\` error
-- message will be returned. This call updates the proposal_state from
-- \`PROPOSED\` to \`BUYER_ACCEPTED\`, or from \`SELLER_ACCEPTED\` to
-- \`FINALIZED\`. Upon calling this endpoint, the buyer implicitly agrees
-- to the terms and conditions optionally set within the proposal by the
-- publisher.
--
-- /See:/ <https://developers.google.com/authorized-buyers/apis/reference/rest/ Ad Exchange Buyer API II Reference> for @adexchangebuyer2.accounts.proposals.accept@.
module Network.Google.Resource.AdExchangeBuyer2.Accounts.Proposals.Accept
(
-- * REST Resource
AccountsProposalsAcceptResource
-- * Creating a Request
, accountsProposalsAccept
, AccountsProposalsAccept
-- * Request Lenses
, apaXgafv
, apaUploadProtocol
, apaAccessToken
, apaUploadType
, apaPayload
, apaProposalId
, apaAccountId
, apaCallback
) where
import Network.Google.AdExchangeBuyer2.Types
import Network.Google.Prelude
-- | A resource alias for @adexchangebuyer2.accounts.proposals.accept@ method which the
-- 'AccountsProposalsAccept' request conforms to.
type AccountsProposalsAcceptResource =
"v2beta1" :>
"accounts" :>
Capture "accountId" Text :>
"proposals" :>
CaptureMode "proposalId" "accept" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] AcceptProposalRequest :>
Post '[JSON] Proposal
-- | Mark the proposal as accepted at the given revision number. If the
-- number does not match the server\'s revision number an \`ABORTED\` error
-- message will be returned. This call updates the proposal_state from
-- \`PROPOSED\` to \`BUYER_ACCEPTED\`, or from \`SELLER_ACCEPTED\` to
-- \`FINALIZED\`. Upon calling this endpoint, the buyer implicitly agrees
-- to the terms and conditions optionally set within the proposal by the
-- publisher.
--
-- /See:/ 'accountsProposalsAccept' smart constructor.
data AccountsProposalsAccept =
AccountsProposalsAccept'
{ _apaXgafv :: !(Maybe Xgafv)
, _apaUploadProtocol :: !(Maybe Text)
, _apaAccessToken :: !(Maybe Text)
, _apaUploadType :: !(Maybe Text)
, _apaPayload :: !AcceptProposalRequest
, _apaProposalId :: !Text
, _apaAccountId :: !Text
, _apaCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsProposalsAccept' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'apaXgafv'
--
-- * 'apaUploadProtocol'
--
-- * 'apaAccessToken'
--
-- * 'apaUploadType'
--
-- * 'apaPayload'
--
-- * 'apaProposalId'
--
-- * 'apaAccountId'
--
-- * 'apaCallback'
accountsProposalsAccept
:: AcceptProposalRequest -- ^ 'apaPayload'
-> Text -- ^ 'apaProposalId'
-> Text -- ^ 'apaAccountId'
-> AccountsProposalsAccept
accountsProposalsAccept pApaPayload_ pApaProposalId_ pApaAccountId_ =
AccountsProposalsAccept'
{ _apaXgafv = Nothing
, _apaUploadProtocol = Nothing
, _apaAccessToken = Nothing
, _apaUploadType = Nothing
, _apaPayload = pApaPayload_
, _apaProposalId = pApaProposalId_
, _apaAccountId = pApaAccountId_
, _apaCallback = Nothing
}
-- | V1 error format.
apaXgafv :: Lens' AccountsProposalsAccept (Maybe Xgafv)
apaXgafv = lens _apaXgafv (\ s a -> s{_apaXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
apaUploadProtocol :: Lens' AccountsProposalsAccept (Maybe Text)
apaUploadProtocol
= lens _apaUploadProtocol
(\ s a -> s{_apaUploadProtocol = a})
-- | OAuth access token.
apaAccessToken :: Lens' AccountsProposalsAccept (Maybe Text)
apaAccessToken
= lens _apaAccessToken
(\ s a -> s{_apaAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
apaUploadType :: Lens' AccountsProposalsAccept (Maybe Text)
apaUploadType
= lens _apaUploadType
(\ s a -> s{_apaUploadType = a})
-- | Multipart request metadata.
apaPayload :: Lens' AccountsProposalsAccept AcceptProposalRequest
apaPayload
= lens _apaPayload (\ s a -> s{_apaPayload = a})
-- | The ID of the proposal to accept.
apaProposalId :: Lens' AccountsProposalsAccept Text
apaProposalId
= lens _apaProposalId
(\ s a -> s{_apaProposalId = a})
-- | Account ID of the buyer.
apaAccountId :: Lens' AccountsProposalsAccept Text
apaAccountId
= lens _apaAccountId (\ s a -> s{_apaAccountId = a})
-- | JSONP
apaCallback :: Lens' AccountsProposalsAccept (Maybe Text)
apaCallback
= lens _apaCallback (\ s a -> s{_apaCallback = a})
instance GoogleRequest AccountsProposalsAccept where
type Rs AccountsProposalsAccept = Proposal
type Scopes AccountsProposalsAccept =
'["https://www.googleapis.com/auth/adexchange.buyer"]
requestClient AccountsProposalsAccept'{..}
= go _apaAccountId _apaProposalId _apaXgafv
_apaUploadProtocol
_apaAccessToken
_apaUploadType
_apaCallback
(Just AltJSON)
_apaPayload
adExchangeBuyer2Service
where go
= buildClient
(Proxy :: Proxy AccountsProposalsAcceptResource)
mempty
|
brendanhay/gogol
|
gogol-adexchangebuyer2/gen/Network/Google/Resource/AdExchangeBuyer2/Accounts/Proposals/Accept.hs
|
mpl-2.0
| 6,502 | 0 | 19 | 1,449 | 872 | 512 | 360 | 127 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.InstanceGroups.ListInstances
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the instances in the specified instance group. The orderBy query
-- parameter is not supported.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.instanceGroups.listInstances@.
module Network.Google.Resource.Compute.InstanceGroups.ListInstances
(
-- * REST Resource
InstanceGroupsListInstancesResource
-- * Creating a Request
, instanceGroupsListInstances'
, InstanceGroupsListInstances'
-- * Request Lenses
, igliReturnPartialSuccess
, igliOrderBy
, igliProject
, igliZone
, igliPayload
, igliFilter
, igliPageToken
, igliInstanceGroup
, igliMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.instanceGroups.listInstances@ method which the
-- 'InstanceGroupsListInstances'' request conforms to.
type InstanceGroupsListInstancesResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"instanceGroups" :>
Capture "instanceGroup" Text :>
"listInstances" :>
QueryParam "returnPartialSuccess" Bool :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
InstanceGroupsListInstancesRequest
:> Post '[JSON] InstanceGroupsListInstances
-- | Lists the instances in the specified instance group. The orderBy query
-- parameter is not supported.
--
-- /See:/ 'instanceGroupsListInstances'' smart constructor.
data InstanceGroupsListInstances' =
InstanceGroupsListInstances''
{ _igliReturnPartialSuccess :: !(Maybe Bool)
, _igliOrderBy :: !(Maybe Text)
, _igliProject :: !Text
, _igliZone :: !Text
, _igliPayload :: !InstanceGroupsListInstancesRequest
, _igliFilter :: !(Maybe Text)
, _igliPageToken :: !(Maybe Text)
, _igliInstanceGroup :: !Text
, _igliMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'InstanceGroupsListInstances'' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'igliReturnPartialSuccess'
--
-- * 'igliOrderBy'
--
-- * 'igliProject'
--
-- * 'igliZone'
--
-- * 'igliPayload'
--
-- * 'igliFilter'
--
-- * 'igliPageToken'
--
-- * 'igliInstanceGroup'
--
-- * 'igliMaxResults'
instanceGroupsListInstances'
:: Text -- ^ 'igliProject'
-> Text -- ^ 'igliZone'
-> InstanceGroupsListInstancesRequest -- ^ 'igliPayload'
-> Text -- ^ 'igliInstanceGroup'
-> InstanceGroupsListInstances'
instanceGroupsListInstances' pIgliProject_ pIgliZone_ pIgliPayload_ pIgliInstanceGroup_ =
InstanceGroupsListInstances''
{ _igliReturnPartialSuccess = Nothing
, _igliOrderBy = Nothing
, _igliProject = pIgliProject_
, _igliZone = pIgliZone_
, _igliPayload = pIgliPayload_
, _igliFilter = Nothing
, _igliPageToken = Nothing
, _igliInstanceGroup = pIgliInstanceGroup_
, _igliMaxResults = 500
}
-- | Opt-in for partial success behavior which provides partial results in
-- case of failure. The default value is false.
igliReturnPartialSuccess :: Lens' InstanceGroupsListInstances' (Maybe Bool)
igliReturnPartialSuccess
= lens _igliReturnPartialSuccess
(\ s a -> s{_igliReturnPartialSuccess = a})
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- \`orderBy=\"creationTimestamp desc\"\`. This sorts results based on the
-- \`creationTimestamp\` field in reverse chronological order (newest
-- result first). Use this to sort resources like operations so that the
-- newest operation is returned first. Currently, only sorting by \`name\`
-- or \`creationTimestamp desc\` is supported.
igliOrderBy :: Lens' InstanceGroupsListInstances' (Maybe Text)
igliOrderBy
= lens _igliOrderBy (\ s a -> s{_igliOrderBy = a})
-- | Project ID for this request.
igliProject :: Lens' InstanceGroupsListInstances' Text
igliProject
= lens _igliProject (\ s a -> s{_igliProject = a})
-- | The name of the zone where the instance group is located.
igliZone :: Lens' InstanceGroupsListInstances' Text
igliZone = lens _igliZone (\ s a -> s{_igliZone = a})
-- | Multipart request metadata.
igliPayload :: Lens' InstanceGroupsListInstances' InstanceGroupsListInstancesRequest
igliPayload
= lens _igliPayload (\ s a -> s{_igliPayload = a})
-- | A filter expression that filters resources listed in the response. The
-- expression must specify the field name, a comparison operator, and the
-- value that you want to use for filtering. The value must be a string, a
-- number, or a boolean. The comparison operator must be either \`=\`,
-- \`!=\`, \`>\`, or \`\<\`. For example, if you are filtering Compute
-- Engine instances, you can exclude instances named \`example-instance\`
-- by specifying \`name != example-instance\`. You can also filter nested
-- fields. For example, you could specify \`scheduling.automaticRestart =
-- false\` to include instances only if they are not scheduled for
-- automatic restarts. You can use filtering on nested fields to filter
-- based on resource labels. To filter on multiple expressions, provide
-- each separate expression within parentheses. For example: \`\`\`
-- (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\")
-- \`\`\` By default, each expression is an \`AND\` expression. However,
-- you can include \`AND\` and \`OR\` expressions explicitly. For example:
-- \`\`\` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel
-- Broadwell\") AND (scheduling.automaticRestart = true) \`\`\`
igliFilter :: Lens' InstanceGroupsListInstances' (Maybe Text)
igliFilter
= lens _igliFilter (\ s a -> s{_igliFilter = a})
-- | Specifies a page token to use. Set \`pageToken\` to the
-- \`nextPageToken\` returned by a previous list request to get the next
-- page of results.
igliPageToken :: Lens' InstanceGroupsListInstances' (Maybe Text)
igliPageToken
= lens _igliPageToken
(\ s a -> s{_igliPageToken = a})
-- | The name of the instance group from which you want to generate a list of
-- included instances.
igliInstanceGroup :: Lens' InstanceGroupsListInstances' Text
igliInstanceGroup
= lens _igliInstanceGroup
(\ s a -> s{_igliInstanceGroup = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than \`maxResults\`, Compute
-- Engine returns a \`nextPageToken\` that can be used to get the next page
-- of results in subsequent list requests. Acceptable values are \`0\` to
-- \`500\`, inclusive. (Default: \`500\`)
igliMaxResults :: Lens' InstanceGroupsListInstances' Word32
igliMaxResults
= lens _igliMaxResults
(\ s a -> s{_igliMaxResults = a})
. _Coerce
instance GoogleRequest InstanceGroupsListInstances'
where
type Rs InstanceGroupsListInstances' =
InstanceGroupsListInstances
type Scopes InstanceGroupsListInstances' =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient InstanceGroupsListInstances''{..}
= go _igliProject _igliZone _igliInstanceGroup
_igliReturnPartialSuccess
_igliOrderBy
_igliFilter
_igliPageToken
(Just _igliMaxResults)
(Just AltJSON)
_igliPayload
computeService
where go
= buildClient
(Proxy :: Proxy InstanceGroupsListInstancesResource)
mempty
|
brendanhay/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/InstanceGroups/ListInstances.hs
|
mpl-2.0
| 9,100 | 0 | 23 | 2,025 | 993 | 589 | 404 | 146 | 1 |
module TestIO (resource) where
import Control.Monad (forever)
import API
resource :: CLIInterface
resource = testio { repl = loop }
loop :: IO ()
loop = forever $ getLine >>= putStrLn
|
stepcut/plugins
|
testsuite/load/plain/TestIO.hs
|
lgpl-2.1
| 188 | 0 | 6 | 35 | 63 | 37 | 26 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Stubby
( stubby
, Stubby
, getAdmin
, getStubs
, wait
) where
import Stubby.Settings (Settings, getQuiet, getDatafile)
import Stubby.CLI.Logging (info,stored)
import Stubby.Net.Admin (adminserver)
import Stubby.Net.Stubs (stubserver)
import Stubby.Data.Endpoint (Endpoint, getRequest)
import Stubby.Data.Request (getUrl, getMethods)
import qualified Data.ByteString.Char8 as BS (readFile, pack, concat)
import Data.Yaml (decode)
import Data.Text.Encoding (encodeUtf8)
import Control.Monad (unless)
import Control.Concurrent.Async (Async, async, waitEither_)
import Control.Exception (catch)
import System.IO.Error (isDoesNotExistError)
stubby :: Settings -> IO Stubby
stubby settings = do
endpoints <- parseEndpoints (getDatafile settings)
unless (getQuiet settings) $ startupMessages endpoints
admin <- async $ adminserver settings
stubs <- async $ stubserver settings
return $ Stubby admin stubs
data Stubby = Stubby
{ adminThread :: Async ()
, stubsThread :: Async ()
}
getAdmin :: Stubby -> Async ()
getAdmin = adminThread
getStubs :: Stubby -> Async ()
getStubs = stubsThread
wait :: Stubby -> IO ()
wait s = waitEither_ (getAdmin s) (getStubs s)
parseEndpoints :: FilePath -> IO [Endpoint]
parseEndpoints f = do
c <- catch (BS.readFile f) readHandler
case decode c :: Maybe [Endpoint] of
Just a -> return a
Nothing -> error "Cannot parse data file"
where readHandler e | isDoesNotExistError e = return "[]"
| otherwise = ioError e
printLoaded :: [Endpoint] -> IO ()
printLoaded = mapM_ f
where f e = let r = getRequest e
u = encodeUtf8 $ getUrl r
m = BS.pack $ show $ getMethods r
in stored $ BS.concat ["Loaded ",m," ",u]
startupMessages :: [Endpoint] -> IO ()
startupMessages es = printLoaded es >> info "\nQuit: ctrl-c\n"
|
mrak/stubby4hs
|
src/lib/Stubby.hs
|
apache-2.0
| 1,976 | 0 | 13 | 460 | 646 | 342 | 304 | 52 | 2 |
-- Copyright 2013 Joseph Tel Abrahamson
--
-- Licensed under the Apache License, Version 2.0 (the "License"); you
-- may not use this file except in compliance with the License. You
-- may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-- implied. See the License for the specific language governing
-- permissions and limitations under the License.
-- |
-- Module : Network.Addresses
-- Copyright : (c) Joseph Abrahamson 2013
-- License : Apache 2.0
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable
--
-- Module modeling IPv4 and IPv6 addresses.
module Network.Addresses where
import Control.Applicative
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as SL
import Data.ByteString.Builder (toLazyByteString, char7)
import Data.ByteString.Builder.Prim (word8Dec, word16Hex, primBounded)
import Data.Attoparsec.Char8
import Data.Monoid
import Data.List
import Data.String
import Data.Word
import Data.Char
import Network.Nanomsg.Util
data IPv4 = IPv4 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8
{-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8
deriving (Eq, Ord)
instance Show IPv4 where show ip = "IPv4 " ++ S8.unpack (ser ip)
instance Serial IPv4 where
ser (IPv4 a b c d) =
SL.toStrict
. toLazyByteString
. mconcat
. intersperse (char7 '.')
. map (primBounded word8Dec)
$ [a, b, c, d]
ipv4Parser :: Parser IPv4
ipv4Parser =
IPv4 <$> (decWord8 <* char '.')
<*> (decWord8 <* char '.')
<*> (decWord8 <* char '.')
<*> decWord8
where decWord8 =
do x <- decimal
if (x >= 0 && x <= 128)
then return x
else fail "invalid IPv4 digit"
instance IsString IPv4 where
fromString s = let Right ip = parseOnly ipv4Parser (S8.pack s) in ip
-- decimal
-- hexadecimal
data IPv6 = IPv6 {-# UNPACK #-} !Word16 {-# UNPACK #-} !Word16
{-# UNPACK #-} !Word16 {-# UNPACK #-} !Word16
{-# UNPACK #-} !Word16 {-# UNPACK #-} !Word16
{-# UNPACK #-} !Word16 {-# UNPACK #-} !Word16
deriving (Eq, Ord)
instance Show IPv6 where show ip = "IPv6 " ++ S8.unpack (ser ip)
-- | Doesn't do double-colon compression.
instance Serial IPv6 where
ser (IPv6 a b c d e f g h) =
SL.toStrict
. toLazyByteString
. mconcat
. intersperse (char7 ':')
. map (primBounded word16Hex)
$ [a, b, c, d, e, f, g, h]
-- | Parses a IPv6 address. Trims each word to the right and doesn't
-- handle the repeated 0 shortening.
--
-- TODO: Handle repeated zeros properly for addresses like "::1".
--
-- parseOnly ipv6Parser "0:0:0:0:0:0:1234567890abcdef:0"
-- == Right 0:0:0:0:0:0:cdef:0
--
ipv6Parser :: Parser IPv6
ipv6Parser =
IPv6 <$> (hexadecimal <* char ':')
<*> (hexadecimal <* char ':')
<*> (hexadecimal <* char ':')
<*> (hexadecimal <* char ':')
<*> (hexadecimal <* char ':')
<*> (hexadecimal <* char ':')
<*> (hexadecimal <* char ':')
<*> hexadecimal
instance IsString IPv6 where
fromString s = let Right ip = parseOnly ipv6Parser (S8.pack s) in ip
data IP = V4 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8
{-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8
| V6 {-# UNPACK #-} !Word16 {-# UNPACK #-} !Word16
{-# UNPACK #-} !Word16 {-# UNPACK #-} !Word16
{-# UNPACK #-} !Word16 {-# UNPACK #-} !Word16
{-# UNPACK #-} !Word16 {-# UNPACK #-} !Word16
deriving (Eq, Ord)
forgetIPv4 :: IPv4 -> IP
forgetIPv4 (IPv4 a b c d) = V4 a b c d
forgetIPv6 :: IPv6 -> IP
forgetIPv6 (IPv6 a b c d e f g h) = V6 a b c d e f g h
instance Serial IP where
ser (V4 a b c d) = ser (IPv4 a b c d)
ser (V6 a b c d e f g h) = ser (IPv6 a b c d e f g h)
instance Show IP where show ip = "IP " ++ S8.unpack (ser ip)
instance IsString IP where
fromString s = let Right ip = parseOnly parser (S8.pack s) in ip
where parser = (forgetIPv4 <$> ipv4Parser)
<|> (forgetIPv6 <$> ipv6Parser)
|
tel/hs-nanomsg
|
src/Network/Addresses.hs
|
apache-2.0
| 4,347 | 0 | 15 | 1,120 | 1,121 | 595 | 526 | 83 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.