code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-#LANGUAGE OverloadedStrings #-}
module UI where
import Control.Lens
import Control.Monad
import Control.Monad.Trans.Class
import Control.Monad.Trans.RWS
import Data.Text()
import UI.NCurses
import Shogi.Board
import Shogi.Game
data UIState = UIState { _game :: Game, _current :: (Integer,Integer)}
game :: Lens' UIState Game
game = lens _game (\uis g -> uis { _game = g })
current :: Lens' UIState (Integer,Integer)
current = lens _current (\uis c -> uis { _current = c })
drawGame :: Game -> Update ()
drawGame g = do
mapM_ (\(i,line) -> moveCursor i 0 >> drawText line) $
zip [0..] [" 9 8 7 6 5 4 3 2 1 "
,"ββββ¬βββ¬βββ¬βββ¬βββ¬βββ¬βββ¬βββ¬βββ"
,"β β β β β β β β β β a"
,"ββββΌβββΌβββΌβββΌβββΌβββΌβββΌβββΌβββ€"
,"β β β β β β β β β β b"
,"ββββΌβββΌβββΌβββΌβββΌβββΌβββΌβββΌβββ€"
,"β β β β β β β β β β c"
,"ββββΌβββΌββββββΌβββΌββββββΌβββΌβββ€"
,"β β β β β β β β β β d"
,"ββββΌβββΌβββΌβββΌβββΌβββΌβββΌβββΌβββ€"
,"β β β β β β β β β β e"
,"ββββΌβββΌβββΌβββΌβββΌβββΌβββΌβββΌβββ€"
,"β β β β β β β β β β f"
,"ββββΌβββΌββββββΌβββΌββββββΌβββΌβββ€"
,"β β β β β β β β β β g"
,"ββββΌβββΌβββΌβββΌβββΌβββΌβββΌβββΌβββ€"
,"β β β β β β β β β β h"
,"ββββΌβββΌβββΌβββΌβββΌβββΌβββΌβββΌβββ€"
,"β β β β β β β β β β i"
,"ββββ΄βββ΄βββ΄βββ΄βββ΄βββ΄βββ΄βββ΄βββ"]
updateBoard (g^.board)
moveCursor 20 0
drawText "Current player: "
drawString (g^.player.to show)
maybe (return ()) (\n -> do
drawText " (Move nΒ°"
drawString (show n)
drawText ")"
) (g^.moveNum)
updateBoard :: Board -> Update ()
updateBoard b = forM_ [0..8] $ \i -> (forM_ [0..8] $ \j -> do
moveCursor (2*i+2) (3*j+1)
drawText (showSq (b !! fromInteger i !! fromInteger j))
)
update :: RWST Window [Move] UIState Curses ()
update = do
w <- ask
g <- gets (view game)
(ci,cj) <- gets (view current)
lift $ updateWindow w (drawGame g >> moveCursor (2*ci+2) (3*cj+2))
lift render
mainLoop :: RWST Window [Move] UIState Curses ()
mainLoop = loop where
loop = do
update
w <- ask
ev <- lift $ getEvent w Nothing
case ev of
Nothing -> loop
Just (EventCharacter k) -> case k of
'q' -> return ()
' ' -> modify (game.player %~ togglePlayer) >> loop
_ -> loop
Just (EventSpecialKey k) -> case k of
KeyUpArrow -> modify (current . _1 %~ (inBounds . subtract 1)) >> loop
KeyDownArrow -> modify (current . _1 %~ (inBounds . (1+))) >> loop
KeyLeftArrow -> modify (current . _2 %~ (inBounds . subtract 1)) >> loop
KeyRightArrow -> modify (current . _2 %~ (inBounds . (1+))) >> loop
_ -> loop
_ -> loop
inBounds = (`mod` 9) . (9+)
| jtanguy/shogi | src/UI.hs | bsd-3-clause | 3,871 | 4 | 21 | 1,195 | 955 | 500 | 455 | -1 | -1 |
{-# LANGUAGE GADTs, Rank2Types, CPP #-}
-----------------------------------------------------------------------------------------
-- |
-- Module : FRP.Yampa
-- Copyright : (c) Antony Courtney and Henrik Nilsson, Yale University, 2003
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
--
-- Domain-specific language embedded in Haskell for programming hybrid (mixed
-- discrete-time and continuous-time) systems. Yampa is based on the concepts
-- of Functional Reactive Programming (FRP) and is structured using arrow
-- combinators.
--
-- You can find examples, tutorials and documentation on Yampa here:
--
-- <www.haskell.org/haskellwiki/Yampa>
--
-- Structuring a hybrid system in Yampa is done based on two main concepts:
--
-- * Signal Functions: 'SF'. Yampa is based on the concept of Signal Functions,
-- which are functions from a typed input signal to a typed output signal.
-- Conceptually, signals are functions from Time to Value, where time are the
-- real numbers and, computationally, a very dense approximation (Double) is
-- used.
--
-- * Events: 'Event'. Values that may or may not occur (and would probably
-- occur rarely). It is often used for incoming network messages, mouse
-- clicks, etc. Events are used as values carried by signals.
--
-- A complete Yampa system is defined as one Signal Function from some
-- type @a@ to a type @b@. The execution of this signal transformer
-- with specific input can be accomplished by means of two functions:
-- 'reactimate' (which needs an initialization action,
-- an input sensing action and an actuation/consumer action and executes
-- until explicitly stopped), and 'react' (which executes only one cycle).
--
-- This will be the last version of Yampa to include mergeable records,
-- point2 and point3, vector2 and vector3, and other auxiliary definitions. The
-- internals have now changed. Although not all will be exposed in the next
-- version, below is the new project structure. Please, take a look and let us
-- know if you think there are any potential problems with it.
--
-- Main Yampa modules:
--
-- * "FRP.Yampa" -- This exports all FRP-related functions
--
-- * "FRP.Yampa.Task"
--
-- Minimal Complete FRP Definition
--
-- * "FRP.Yampa.Core"
--
-- Different FRP aspects
--
-- * "FRP.Yampa.Basic"
--
-- * "FRP.Yampa.Conditional"
--
-- * "FRP.Yampa.Delays"
--
-- * "FRP.Yampa.Event"
--
-- * "FRP.Yampa.EventS" -- Event consuming/producing SFs. To be renamed.
--
-- * "FRP.Yampa.Hybrid" -- Hybrid (discrete/continuous) SFs
--
-- * "FRP.Yampa.Integration"
--
-- * "FRP.Yampa.Loop"
--
-- * "FRP.Yampa.Random"
--
-- * "FRP.Yampa.Scan"
--
-- * "FRP.Yampa.Switches"
--
-- * "FRP.Yampa.Time"
--
-- * "FRP.Yampa.Simulation" -- Reactimation/evaluation
--
-- Internals
--
-- * "FRP.Yampa.InternalCore"
--
-- Geometry:
--
-- * "FRP.Yampa.Geometry"
--
-- * "FRP.Yampa.AffineSpace"
--
-- * "FRP.Yampa.VectorSpace"
--
-- * "FRP.Yampa.Point2"
--
-- * "FRP.Yampa.Point3"
--
-- * "FRP.Yampa.Vector2"
--
-- * "FRP.Yampa.Vector3"
--
-- Old legacy code:
--
-- * "FRP.Yampa.Diagnostics"
--
-- * "FRP.Yampa.Forceable"
--
-- * "FRP.Yampa.Internals" -- No longer in use
--
-- * "FRP.Yampa.MergeableRecord"
--
-- * "FRP.Yampa.Miscellany"
--
-- * "FRP.Yampa.Utilities"
--
-- CHANGELOG:
--
-- * Adds (most) documentation.
--
-- * New version using GADTs.
--
-- ToDo:
--
-- * Specialize def. of repeatedly. Could have an impact on invaders.
--
-- * New defs for accs using SFAcc
--
-- * Make sure opt worked: e.g.
--
-- > repeatedly >>> count >>> arr (fmap sqr)
--
-- * Introduce SFAccHld.
--
-- * See if possible to unify AccHld wity Acc??? They are so close.
--
-- * Introduce SScan. BUT KEEP IN MIND: Most if not all opts would
-- have been possible without GADTs???
--
-- * Look into pairs. At least pairing of SScan ought to be interesting.
--
-- * Would be nice if we could get rid of first & second with impunity
-- thanks to Id optimizations. That's a clear win, with or without
-- an explicit pair combinator.
--
-- * delayEventCat is a bit complicated ...
--
--
-- Random ideas:
--
-- * What if one used rules to optimize
-- - (arr :: SF a ()) to (constant ())
-- - (arr :: SF a a) to identity
-- But inspection of invader source code seem to indicate that
-- these are not very common cases at all.
--
-- * It would be nice if it was possible to come up with opt. rules
-- that are invariant of how signal function expressions are
-- parenthesized. Right now, we have e.g.
-- arr f >>> (constant c >>> sf)
-- being optimized to
-- cpAuxA1 f (cpAuxC1 c sf)
-- whereas it clearly should be possible to optimize to just
-- cpAuxC1 c sf
-- What if we didn't use SF' but
-- SFComp :: <tfun> -> SF' a b -> SF' b c -> SF' a c
-- ???
--
-- * The transition function would still be optimized in (pretty much)
-- the current way, but it would still be possible to look "inside"
-- composed signal functions for lost optimization opts.
-- Seems to me this could be done without too much extra effort/no dupl.
-- work.
-- E.g. new cpAux, the general case:
--
-- @
-- cpAux sf1 sf2 = SFComp tf sf1 sf2
-- where
-- tf dt a = (cpAux sf1' sf2', c)
-- where
-- (sf1', b) = (sfTF' sf1) dt a
-- (sf2', c) = (sfTF' sf2) dt b
-- @
--
-- * The ONLY change was changing the constructor from SF' to SFComp and
-- adding sf1 and sf2 to the constructor app.!
--
-- * An optimized case:
-- cpAuxC1 b sf1 sf2 = SFComp tf sf1 sf2
-- So cpAuxC1 gets an extra arg, and we change the constructor.
-- But how to exploit without writing 1000s of rules???
-- Maybe define predicates on SFComp to see if the first or second
-- sf are "interesting", and if so, make "reassociate" and make a
-- recursive call? E.g. we're in the arr case, and the first sf is another
-- arr, so we'd like to combine the two.
--
-- * It would also be intersting, then, to know when to STOP playing this
-- game, due to the overhead involved.
--
-- * Why don't we have a "SWITCH" constructor that indicates that the
-- structure will change, and thus that it is worthwile to keep
-- looking for opt. opportunities, whereas a plain "SF'" would
-- indicate that things NEVER are going to change, and thus we can just
-- as well give up?
-----------------------------------------------------------------------------------------
module FRP.Yampa (
-- Re-exported module, classes, and types
module Control.Arrow,
module FRP.Yampa.VectorSpace,
RandomGen(..),
Random(..),
-- * Basic definitions
Time, -- [s] Both for time w.r.t. some reference and intervals.
DTime, -- [s] Sampling interval, always > 0.
SF, -- Signal Function.
Event(..), -- Events; conceptually similar to Maybe (but abstract).
-- Temporary!
-- SF(..), sfTF',
-- Main instances
-- SF is an instance of Arrow and ArrowLoop. Method instances:
-- arr :: (a -> b) -> SF a b
-- (>>>) :: SF a b -> SF b c -> SF a c
-- (<<<) :: SF b c -> SF a b -> SF a c
-- first :: SF a b -> SF (a,c) (b,c)
-- second :: SF a b -> SF (c,a) (c,b)
-- (***) :: SF a b -> SF a' b' -> SF (a,a') (b,b')
-- (&&&) :: SF a b -> SF a b' -> SF a (b,b')
-- returnA :: SF a a
-- loop :: SF (a,c) (b,c) -> SF a b
-- Event is an instance of Functor, Eq, and Ord. Some method instances:
-- fmap :: (a -> b) -> Event a -> Event b
-- (==) :: Event a -> Event a -> Bool
-- (<=) :: Event a -> Event a -> Bool
-- ** Lifting
arrPrim, arrEPrim, -- For optimization
-- * Signal functions
-- ** Basic signal functions
identity, -- :: SF a a
constant, -- :: b -> SF a b
localTime, -- :: SF a Time
time, -- :: SF a Time, Other name for localTime.
-- ** Initialization
(-->), -- :: b -> SF a b -> SF a b, infixr 0
(>--), -- :: a -> SF a b -> SF a b, infixr 0
(-=>), -- :: (b -> b) -> SF a b -> SF a b infixr 0
(>=-), -- :: (a -> a) -> SF a b -> SF a b infixr 0
initially, -- :: a -> SF a a
-- ** Simple, stateful signal processing
sscan, -- :: (b -> a -> b) -> b -> SF a b
sscanPrim, -- :: (c -> a -> Maybe (c, b)) -> c -> b -> SF a b
-- * Events
-- ** Basic event sources
never, -- :: SF a (Event b)
now, -- :: b -> SF a (Event b)
after, -- :: Time -> b -> SF a (Event b)
repeatedly, -- :: Time -> b -> SF a (Event b)
afterEach, -- :: [(Time,b)] -> SF a (Event b)
afterEachCat, -- :: [(Time,b)] -> SF a (Event [b])
delayEvent, -- :: Time -> SF (Event a) (Event a)
delayEventCat, -- :: Time -> SF (Event a) (Event [a])
edge, -- :: SF Bool (Event ())
iEdge, -- :: Bool -> SF Bool (Event ())
edgeTag, -- :: a -> SF Bool (Event a)
edgeJust, -- :: SF (Maybe a) (Event a)
edgeBy, -- :: (a -> a -> Maybe b) -> a -> SF a (Event b)
-- ** Stateful event suppression
notYet, -- :: SF (Event a) (Event a)
once, -- :: SF (Event a) (Event a)
takeEvents, -- :: Int -> SF (Event a) (Event a)
dropEvents, -- :: Int -> SF (Event a) (Event a)
-- ** Pointwise functions on events
noEvent, -- :: Event a
noEventFst, -- :: (Event a, b) -> (Event c, b)
noEventSnd, -- :: (a, Event b) -> (a, Event c)
event, -- :: a -> (b -> a) -> Event b -> a
fromEvent, -- :: Event a -> a
isEvent, -- :: Event a -> Bool
isNoEvent, -- :: Event a -> Bool
tag, -- :: Event a -> b -> Event b, infixl 8
tagWith, -- :: b -> Event a -> Event b,
attach, -- :: Event a -> b -> Event (a, b), infixl 8
lMerge, -- :: Event a -> Event a -> Event a, infixl 6
rMerge, -- :: Event a -> Event a -> Event a, infixl 6
merge, -- :: Event a -> Event a -> Event a, infixl 6
mergeBy, -- :: (a -> a -> a) -> Event a -> Event a -> Event a
mapMerge, -- :: (a -> c) -> (b -> c) -> (a -> b -> c)
-- -> Event a -> Event b -> Event c
mergeEvents, -- :: [Event a] -> Event a
catEvents, -- :: [Event a] -> Event [a]
joinE, -- :: Event a -> Event b -> Event (a,b),infixl 7
splitE, -- :: Event (a,b) -> (Event a, Event b)
filterE, -- :: (a -> Bool) -> Event a -> Event a
mapFilterE, -- :: (a -> Maybe b) -> Event a -> Event b
gate, -- :: Event a -> Bool -> Event a, infixl 8
-- * Switching
-- ** Basic switchers
switch, dSwitch, -- :: SF a (b, Event c) -> (c -> SF a b) -> SF a b
rSwitch, drSwitch, -- :: SF a b -> SF (a,Event (SF a b)) b
kSwitch, dkSwitch, -- :: SF a b
-- -> SF (a,b) (Event c)
-- -> (SF a b -> c -> SF a b)
-- -> SF a b
-- ** Parallel composition and switching
-- *** Parallel composition and switching over collections with broadcasting
parB, -- :: Functor col => col (SF a b) -> SF a (col b)
pSwitchB,dpSwitchB, -- :: Functor col =>
-- col (SF a b)
-- -> SF (a, col b) (Event c)
-- -> (col (SF a b) -> c -> SF a (col b))
-- -> SF a (col b)
rpSwitchB,drpSwitchB, -- :: Functor col =>
-- col (SF a b)
-- -> SF (a, Event (col (SF a b)->col (SF a b)))
-- (col b)
-- *** Parallel composition and switching over collections with general routing
par, -- Functor col =>
-- (forall sf . (a -> col sf -> col (b, sf)))
-- -> col (SF b c)
-- -> SF a (col c)
pSwitch, dpSwitch, -- pSwitch :: Functor col =>
-- (forall sf . (a -> col sf -> col (b, sf)))
-- -> col (SF b c)
-- -> SF (a, col c) (Event d)
-- -> (col (SF b c) -> d -> SF a (col c))
-- -> SF a (col c)
rpSwitch,drpSwitch, -- Functor col =>
-- (forall sf . (a -> col sf -> col (b, sf)))
-- -> col (SF b c)
-- -> SF (a, Event (col (SF b c) -> col (SF b c)))
-- (col c)
-- * Discrete to continuous-time signal functions
-- ** Wave-form generation
hold, -- :: a -> SF (Event a) a
dHold, -- :: a -> SF (Event a) a
trackAndHold, -- :: a -> SF (Maybe a) a
-- ** Accumulators
accum, -- :: a -> SF (Event (a -> a)) (Event a)
accumHold, -- :: a -> SF (Event (a -> a)) a
dAccumHold, -- :: a -> SF (Event (a -> a)) a
accumBy, -- :: (b -> a -> b) -> b -> SF (Event a) (Event b)
accumHoldBy, -- :: (b -> a -> b) -> b -> SF (Event a) b
dAccumHoldBy, -- :: (b -> a -> b) -> b -> SF (Event a) b
accumFilter, -- :: (c -> a -> (c, Maybe b)) -> c
-- -> SF (Event a) (Event b)
-- * Delays
-- ** Basic delays
pre, -- :: SF a a
iPre, -- :: a -> SF a a
-- ** Timed delays
delay, -- :: Time -> a -> SF a a
-- ** Variable delay
pause, -- :: b -> SF a b -> SF a Bool -> SF a b
-- * State keeping combinators
-- ** Loops with guaranteed well-defined feedback
loopPre, -- :: c -> SF (a,c) (b,c) -> SF a b
loopIntegral, -- :: VectorSpace c s => SF (a,c) (b,c) -> SF a b
-- ** Integration and differentiation
integral, -- :: VectorSpace a s => SF a a
derivative, -- :: VectorSpace a s => SF a a -- Crude!
imIntegral, -- :: VectorSpace a s => a -> SF a a
-- Temporarily hidden, but will eventually be made public.
-- iterFrom, -- :: (a -> a -> DTime -> b -> b) -> b -> SF a b
-- * Noise (random signal) sources and stochastic event sources
noise, -- :: noise :: (RandomGen g, Random b) =>
-- g -> SF a b
noiseR, -- :: noise :: (RandomGen g, Random b) =>
-- (b,b) -> g -> SF a b
occasionally, -- :: RandomGen g => g -> Time -> b -> SF a (Event b)
-- * Execution/simulation
-- ** Reactimation
reactimate, -- :: IO a
-- -> (Bool -> IO (DTime, Maybe a))
-- -> (Bool -> b -> IO Bool)
-- -> SF a b
-- -> IO ()
ReactHandle,
reactInit, -- IO a -- init
-- -> (ReactHandle a b -> Bool -> b -> IO Bool) -- actuate
-- -> SF a b
-- -> IO (ReactHandle a b)
-- process a single input sample:
react, -- ReactHandle a b
-- -> (DTime,Maybe a)
-- -> IO Bool
reactSilent,
-- ** Embedding
-- (tentative: will be revisited)
embed, -- :: SF a b -> (a, [(DTime, Maybe a)]) -> [b]
embedSynch, -- :: SF a b -> (a, [(DTime, Maybe a)]) -> SF Double b
deltaEncode, -- :: Eq a => DTime -> [a] -> (a, [(DTime, Maybe a)])
deltaEncodeBy, -- :: (a -> a -> Bool) -> DTime -> [a]
-- -> (a, [(DTime, Maybe a)])
-- * Auxiliary definitions
-- Reverse function composition and arrow plumbing aids
( # ), -- :: (a -> b) -> (b -> c) -> (a -> c), infixl 9
dup, -- :: a -> (a,a)
) where
import Control.Arrow
import FRP.Yampa.InternalCore
import FRP.Yampa.Basic
import FRP.Yampa.Conditional
import FRP.Yampa.Delays
import FRP.Yampa.Event
import FRP.Yampa.EventS
import FRP.Yampa.Hybrid
import FRP.Yampa.Integration
import FRP.Yampa.Loop
import FRP.Yampa.Miscellany (( # ), dup)
import FRP.Yampa.Random
import FRP.Yampa.Scan
import FRP.Yampa.Simulation
import FRP.Yampa.Switches
import FRP.Yampa.Time
import FRP.Yampa.VectorSpace
-- Vim modeline
-- vim:set tabstop=8 expandtab:
| eigengrau/haskell-yampa | src/FRP/Yampa.hs | bsd-3-clause | 17,282 | 0 | 5 | 6,045 | 869 | 715 | 154 | 120 | 0 |
module Sloch.Show
( showLangToSloc
, showPathToLangToSloc
) where
import Data.List (sort, sortBy)
import Data.Monoid ((<>))
import qualified Data.Map as M
import Cli (OptVerbose)
import Language (Language)
import Sloch (LangToSloc, PathToLangToSloc)
showLangToSloc :: OptVerbose -> LangToSloc -> String
showLangToSloc verbose = unlines . map display . sortBy compareSloc . M.toList
where
compareSloc :: (Language, [(FilePath, Int)]) -> (Language, [(FilePath, Int)]) -> Ordering
compareSloc (lang1, fs1) (lang2, fs2) = sumSnds fs2 `compare` sumSnds fs1 <> lang1 `compare` lang2
display :: (Language, [(FilePath, Int)]) -> String
display (lang, fs) = unlines $
show lang : lineCounts
where
lineCounts :: [String]
lineCounts =
if verbose
then map (\(fp,n) -> " " ++ show n ++ " " ++ fp) (sortBy (\a b -> snd b `compare` snd a) fs)
else [" " ++ show (sumSnds fs)]
sumSnds :: Num b => [(a,b)] -> b
sumSnds = foldr ((+) . snd) 0
showPathToLangToSloc :: OptVerbose -> PathToLangToSloc -> String
showPathToLangToSloc verbose = unlines . concatMap display . sort . M.toList
where
display :: (FilePath, LangToSloc) -> [String]
display (path, s) = path : map (" " ++) (lines $ showLangToSloc verbose s)
| mitchellwrosen/Sloch | src/sloch/Sloch/Show.hs | bsd-3-clause | 1,325 | 0 | 16 | 320 | 497 | 282 | 215 | 27 | 2 |
-- Advent of Code
---- Day 19: Medicine for Rudolph
module AOC2015.Day19 where
import qualified Data.Map.Lazy as ML
import qualified Data.Set as S
import Data.Char
import Data.List
-- | Map from molecule to list of molecules.
type TransMap = ML.Map [String] [[String]]
type StrListTuple = ([String],[String])
-- | Left fold of accumulating function onto a list.
-- Runs until the accumulating function has consumed
-- the list, then returns the accumulator. Generalizes
-- 'Data.List.mapAccumL'.
envelop :: (b -> [a] -> (b, [a])) -> b -> [a] -> b
envelop _ a [] = a
envelop f a xs = uncurry (envelop f) (f a xs)
-- Insert a new molecule in the list of productions for a given molecule.
insertMulti :: [String] -> [String] -> TransMap -> TransMap
insertMulti k v = ML.insertWith (\[new] old -> new : old) k [v]
-- | Read a molecule.
parseElem :: [String] -> String -> ([String], String)
parseElem cs (s1 : s2 : ss) | isUpper s1 && isLower s2 = (cs ++ [[s1, s2]], ss)
parseElem cs (s : ss) | isUpper s = (cs ++ [[s]], ss)
parseElem _ _ = error "bad elem"
-- | Read a rule.
parseRule :: [String] -> TransMap -> TransMap
parseRule [src, "=>", dst] m = insertMulti [src] es m
where
es = envelop parseElem [] dst
parseRule _ _ = error "bad rule"
-- | Read a machine specification, both rules and input.
parseMachine :: String -> (TransMap, [String])
parseMachine stuff = (m,elems)
where
desc = lines stuff
rules = take (length desc - 2) desc
m = foldr (parseRule . words) ML.empty rules
elems = envelop parseElem [] $ last desc
-- | Expansions of a given molecule starting at a given split point.
expandSplit :: TransMap -> StrListTuple -> S.Set [String] -> S.Set [String]
expandSplit _ (_, []) _ = error "bad expansion"
expandSplit m (first, rest) result =
ML.foldrWithKey' expandOne result m
where
expandOne :: [String] -> [[String]] -> S.Set [String] -> S.Set [String]
expandOne k vs a
| k `isPrefixOf` rest = foldr (S.insert . splice) a vs
| otherwise = a
where
splice :: [String] -> [String]
splice v = first ++ v ++ drop (length k) rest
-- | Expansions of a given set of molecules.
expand :: TransMap -> S.Set [String] -> S.Set [String]
expand m s0 = S.foldr' oneItem S.empty s0
where
oneItem es s =
foldr (expandSplit m . flip splitAt es) (s `S.difference` s0) [0..length es - 1]
answers :: IO ()
answers = do
i <- readFile "inputs/is2015/day19-input.txt"
let part1 = S.size $ expand m $ S.singleton es
where
(m,es) = parseMachine i
putStrLn $ "Part One: " ++ show part1
let part2 = n - c "Rn" - c "Ar" - 2 * c "Y" - 1
where
(_,es) = parseMachine i
n = length es
c s = length $ filter (== s) es
putStrLn $ "Part Two: " ++ show part2
| bitrauser/aoc | src/AOC2015/Day19.hs | bsd-3-clause | 2,814 | 0 | 15 | 671 | 1,056 | 561 | 495 | 51 | 1 |
-- factoring.hs
module Math.Factoring where
import List (partition)
import Math.MergeSort
import Math.NumberTheoryFundamentals (splitWith)
import Math.Primes
import Math.FactoringECM
import Math.FactoringCFRAC
-- In tests, ECM comes out faster than CFRAC, but not hugely so.
factors n
| isPrime n = [n]
| otherwise = merge (factors d) (factors (n `div` d))
where d = findFactorECM n
-- version of primePowerFactors which finds large factors too
primePowerFactorsL :: Integer -> [(Integer,Int)]
primePowerFactorsL n | n > 0 = takeOutFactors n primesTo10000
where
takeOutFactors n (p:ps)
-- | n == 1 = [] -- unnecessary, caught by following test
| p*p > n = finish n
| otherwise =
let (s,n') = n `splitWith` p
in if s > 0 then (p,s) : takeOutFactors n' ps else takeOutFactors n ps
takeOutFactors n [] = finish n
finish 1 = []
finish n =
if n < 100000000 -- we already know it's a trial division prime up to 10000
then [(n,1)]
else counts (factors n)
counts [] = []
counts fs@(x:_) = let (xs,ys) = partition (==x) fs in (x, length xs) : counts ys
pairProducts [] = []
pairProducts (x:xs) = map (x*) xs ++ pairProducts xs
| nfjinjing/bench-euler | src/Math/Factoring.hs | bsd-3-clause | 1,214 | 8 | 12 | 290 | 443 | 233 | 210 | 28 | 6 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE UndecidableInstances #-}
module InputSelection.FromGeneric (
-- * Instantiation of the generic infrastructure
DSL
, SafeValue(..)
, Size(..)
, runCoinSelT
-- * Wrap coin selection
-- ** Random
, PrivacyMode(..)
, random
-- ** Largest-first
, largestFirst
) where
import Universum
import qualified Data.List.NonEmpty as NE
import qualified Data.Set as Set
import Formatting (bprint, build)
import qualified Formatting.Buildable
import qualified Prelude
import Cardano.Wallet.Kernel.CoinSelection.Generic
import Cardano.Wallet.Kernel.CoinSelection.Generic.Grouped
import qualified Cardano.Wallet.Kernel.CoinSelection.Generic.LargestFirst as Generic
import Cardano.Wallet.Kernel.CoinSelection.Generic.Random
(PrivacyMode (..))
import qualified Cardano.Wallet.Kernel.CoinSelection.Generic.Random as Generic
import InputSelection.TxStats
import Util.GenHash
import qualified UTxO.DSL as DSL
{-------------------------------------------------------------------------------
Coin selection domain
-------------------------------------------------------------------------------}
data DSL (h :: * -> *) (a :: *)
instance IsValue (SafeValue h a) where
valueZero = safeZero
valueAdd = safeAdd
valueSub = safeSub
valueDist = safeDist
valueRatio = safeRatio
valueAdjust = safeAdjust
valueDiv = safeDiv
instance (DSL.Hash h a, Buildable a) => CoinSelDom (DSL h a) where
type Input (DSL h a) = DSL.Input h a
type Output (DSL h a) = DSL.Output h a
type Value (DSL h a) = SafeValue h a
type UtxoEntry (DSL h a) = (DSL.Input h a, DSL.Output h a)
newtype Size (DSL h a) = Size Word64
outVal = Value . DSL.outVal
outSubFee = \(Fee v) o -> outSetVal o <$> valueSub (outVal o) v
where
outSetVal o (Value v) = o {DSL.outVal = v}
instance (DSL.Hash h a, Buildable a, Ord a) => HasAddress (DSL h a) where
type Address (DSL h a) = a
outAddr = DSL.outAddr
instance (DSL.Hash h a, Buildable a) => StandardDom (DSL h a)
instance (DSL.Hash h a, Buildable a) => StandardUtxo (DSL.Utxo h a)
instance (DSL.Hash h a, Buildable a) => PickFromUtxo (DSL.Utxo h a) where
type Dom (DSL.Utxo h a) = DSL h a
-- Use default implementations
instance (DSL.Hash h a, Buildable a, Ord a) => CanGroup (DSL.Utxo h a) where
-- Use default implementations
{-------------------------------------------------------------------------------
Auxiliary: safe wrapper around values
-------------------------------------------------------------------------------}
-- | Safe wrapper around values
newtype SafeValue (h :: * -> *) a = Value { fromSafeValue :: DSL.Value }
deriving (Eq, Ord)
-- | Don't print the constructor, just the value
instance Show (SafeValue h a) where
show = Prelude.show . fromSafeValue
instance Read (SafeValue h a) where
readsPrec p = map (bimap Value identity) . Prelude.readsPrec p
safeZero :: SafeValue h a
safeZero = Value 0
-- TODO: check for overflow
safeAdd :: SafeValue h a -> SafeValue h a -> Maybe (SafeValue h a)
safeAdd (Value x) (Value y) = Just $ Value (x + y)
safeSub :: SafeValue h a -> SafeValue h a -> Maybe (SafeValue h a)
safeSub (Value x) (Value y) = do
guard (y <= x)
return $ Value (x - y)
safeDist :: SafeValue h a -> SafeValue h a -> SafeValue h a
safeDist (Value x) (Value y) =
Value $ if y <= x then x - y else y - x
safeRatio :: SafeValue h a -> SafeValue h a -> Double
safeRatio (Value x) (Value y) =
fromIntegral x / fromIntegral y
safeDiv :: SafeValue h a -> Int -> SafeValue h a
safeDiv (Value x) k =
Value (x `div` fromIntegral k)
-- TODO: check for underflow/overflow
safeAdjust :: Rounding -> Double -> SafeValue h a -> Maybe (SafeValue h a)
safeAdjust RoundUp d (Value x) = Just $ Value $ ceiling (d * fromIntegral x)
safeAdjust RoundDown d (Value x) = Just $ Value $ floor (d * fromIntegral x)
{-------------------------------------------------------------------------------
Top-level coin selection
-------------------------------------------------------------------------------}
runCoinSelT :: forall utxo e h a m.
(GenHash m, Dom utxo ~ DSL h a, CoinSelDom (DSL h a))
=> a -- ^ Change address
-> CoinSelT utxo e m [CoinSelResult (Dom utxo)]
-> utxo -- ^ Available UTxO
-> m (Either e (DSL.Transaction h a, TxStats, utxo))
runCoinSelT changeAddr policy utxo = do
mSelection <- unwrapCoinSelT policy utxo
case mSelection of
Left err -> return (Left err)
Right (cssWithDust, utxo') -> do
let css = map (coinSelRemoveDust valueZero) cssWithDust
tx <- mkTx changeAddr css
return $ Right (tx, deriveTxStats css, utxo')
mkTx :: forall h a m. (GenHash m, CoinSelDom (DSL h a))
=> a -> [CoinSelResult (DSL h a)] -> m (DSL.Transaction h a)
mkTx changeAddr css = do
h <- genHash
return DSL.Transaction {
DSL.trFresh = 0
, DSL.trIns = Set.unions $ map coinSelInputSet css
, DSL.trOuts = concatMap mkOutputs css
, DSL.trFee = 0 -- TODO: deal with fees
, DSL.trHash = h
, DSL.trExtra = []
}
where
mkOutputs :: CoinSelResult (DSL h a) -> [DSL.Output h a]
mkOutputs cs = coinSelOutput cs : map mkChangeOutput (coinSelChange cs)
mkChangeOutput :: Value (DSL h a) -> DSL.Output h a
mkChangeOutput (Value change) = DSL.Output changeAddr change
{-------------------------------------------------------------------------------
Wrap the generic coin selection algorithms
-------------------------------------------------------------------------------}
random :: (MonadRandom m, GenHash m, Dom utxo ~ DSL h a, PickFromUtxo utxo)
=> PrivacyMode
-> a -- ^ Change address
-> Word64 -- ^ Maximum number of inputs
-> CoinSelPolicy utxo m (DSL.Transaction h a, TxStats, utxo)
random privacy changeAddr maxInps =
runCoinSelT changeAddr
. Generic.random privacy maxInps
. NE.toList
largestFirst :: (GenHash m, Dom utxo ~ DSL h a, PickFromUtxo utxo)
=> a -- ^ Change address
-> Word64 -- ^ Maximum number of inputs
-> CoinSelPolicy utxo m (DSL.Transaction h a, TxStats, utxo)
largestFirst changeAddr maxInps =
runCoinSelT changeAddr
. Generic.largestFirst maxInps
. NE.toList
{-------------------------------------------------------------------------------
Pretty-printing
-------------------------------------------------------------------------------}
instance Buildable (SafeValue h a) where
build (Value a) = bprint build a
| input-output-hk/pos-haskell-prototype | wallet/test/unit/InputSelection/FromGeneric.hs | mit | 6,771 | 0 | 17 | 1,521 | 2,047 | 1,082 | 965 | -1 | -1 |
{-# OPTIONS -fglasgow-exts #-}
module Baum.List
( make_fixed, make_quiz
)
where
-- $Id$
import Baum.List.Type
import Baum.List.Ops
import Baum.List.Show
import qualified Baum.Heap.Class as C
import qualified Baum.Heap.Central
import qualified Tree as T
import Autolib.ToDoc
import Inter.Types
import Data.Typeable
instance Show a => T.ToTree ( ListTree a ) where
toTree = toTree . fmap show
data HeapbaumList = HeapbaumList
deriving ( Eq, Ord, Show, Read, Typeable )
instance C.Tag HeapbaumList ListTree Int where
tag = HeapbaumList
make_fixed :: Make
make_fixed = Baum.Heap.Central.make_fixed HeapbaumList
make_quiz :: Make
make_quiz = Baum.Heap.Central.make_quiz HeapbaumList
| florianpilz/autotool | src/Baum/List.hs | gpl-2.0 | 706 | 0 | 7 | 117 | 189 | 112 | 77 | -1 | -1 |
{-# LANGUAGE TypeFamilies #-}
module Data.TypedGraph.Morphism where
-- TODO: write export list explicitly
import Data.Function (on)
import Data.List (nubBy)
import Data.Maybe (fromMaybe, isJust)
import Base.Valid
import qualified Data.Graphs as Untyped
import Data.Graphs.Morphism (GraphMorphism)
import qualified Data.Graphs.Morphism as GM
import Data.Relation (Relation)
import qualified Data.Relation as Relation
import Data.TypedGraph
data TypedGraphMorphism a b = TypedGraphMorphism {
domainGraph :: TypedGraph a b
, codomainGraph :: TypedGraph a b
, mapping :: GraphMorphism (Maybe a) (Maybe b)
} deriving (Eq, Show)
compose :: TypedGraphMorphism a b -> TypedGraphMorphism a b -> TypedGraphMorphism a b
compose t2 t1 = TypedGraphMorphism (domainGraph t1) (codomainGraph t2) $ GM.compose (mapping t2) (mapping t1)
-- | Given two @TypedGraph@s @G1@ and @G2@ and a simple @GraphMorphism@ between them, it returns a @TypedGraphMorphism@ from @G1@ to @G2@
buildTypedGraphMorphism :: TypedGraph a b -> TypedGraph a b -> GraphMorphism (Maybe a) (Maybe b) -> TypedGraphMorphism a b
buildTypedGraphMorphism = TypedGraphMorphism
fromGraphsAndRelations :: TypedGraph a b -> TypedGraph a b -> Relation NodeId -> Relation EdgeId -> TypedGraphMorphism a b
fromGraphsAndRelations dom cod nodeMapping edgeMapping = TypedGraphMorphism dom cod $
GM.fromGraphsAndRelations (toUntypedGraph dom) (toUntypedGraph cod) nodeMapping edgeMapping
fromGraphsAndLists :: TypedGraph a b -> TypedGraph a b -> [(NodeId, NodeId)] -> [(EdgeId, EdgeId)] -> TypedGraphMorphism a b
fromGraphsAndLists dom cod nodeMapping edgeMapping = TypedGraphMorphism dom cod $
GM.fromGraphsAndLists (toUntypedGraph dom) (toUntypedGraph cod) nodeMapping edgeMapping
instance Valid (TypedGraphMorphism a b) where
validate (TypedGraphMorphism dom cod m) =
mconcat
[ withContext "domain" (validate dom)
, withContext "codomain" (validate cod)
, ensure (dom == GM.compose cod m) "Morphism doesn't preserve typing"
]
nodeMapping :: TypedGraphMorphism a b -> [(NodeId, NodeId)]
nodeMapping = Relation.toList . GM.nodeRelation . mapping
edgeMapping :: TypedGraphMorphism a b -> [(EdgeId, EdgeId)]
edgeMapping = Relation.toList . GM.edgeRelation . mapping
-- | Given a TypedGraphMorphism @/__t__: G1 -> G2/@ and a node @__n__@ in @G1@, it returns the node in @G2@ to which @__n__@ gets mapped
applyNode :: TypedGraphMorphism a b -> Node (Maybe a) -> Maybe (Node (Maybe a))
applyNode tgm = GM.applyNode (mapping tgm)
-- | Given a TypedGraphMorphism @/__t__: G1 -> G2/@ and a nodeId @__n__@ in @G1@, it returns the nodeId in @G2@ to which @__n__@ gets mapped
applyNodeId :: TypedGraphMorphism a b -> NodeId -> Maybe NodeId
applyNodeId tgm = GM.applyNodeId (mapping tgm)
-- | Given a TypedGraphMorphism @/__t__: G1 -> G2/@ and an edge @__e__@ in @G1@, it returns the edge in @G2@ to which @__e__@ gets mapped
applyEdge :: TypedGraphMorphism a b -> Edge (Maybe b) -> Maybe (Edge (Maybe b))
applyEdge tgm = GM.applyEdge (mapping tgm)
-- | Given a TypedGraphMorphism @/__t__: G1 -> G2/@ and an edgeId @__e__@ in @G1@, it returns the edgeId in @G2@ to which @__e__@ gets mapped
applyEdgeId :: TypedGraphMorphism a b -> EdgeId -> Maybe EdgeId
applyEdgeId tgm = GM.applyEdgeId (mapping tgm)
-- | Given a @TypedGraphMorphism@ @__t__@and a node @n@ in the domain of @__t__@, return the node in the image
--of @t@ to which @n@ gets mapped or error in the case of undefined
applyNodeUnsafe :: TypedGraphMorphism a b -> Node (Maybe a) -> Node (Maybe a)
applyNodeUnsafe m n = fromMaybe (error "Error, apply node in a non total morphism") $ applyNode m n
-- | Given a @TypedGraphMorphism@ @__t__@and a nodeId @n@ in the domain of @__t__@, return the nodeId in the image
--of @t@ to which @n@ gets mapped or error in the case of undefined
applyNodeIdUnsafe :: TypedGraphMorphism a b -> NodeId -> NodeId
applyNodeIdUnsafe m n = fromMaybe (error "Error, apply node in a non total morphism") $ applyNodeId m n
-- | Given a @TypedGraphMorphism@ @__t__@and an edge @e@ in the domain of @__t__@, return the edge in the image
--of @t@ to which @e@ gets mapped or error in the case of undefined
applyEdgeUnsafe :: TypedGraphMorphism a b -> Edge (Maybe b) -> Edge (Maybe b)
applyEdgeUnsafe m e = fromMaybe (error "Error, apply edge in a non total morphism") $ applyEdge m e
-- | Given a @TypedGraphMorphism@ @__t__@and an edgeId @e@ in the domain of @__t__@, return the edgeId in the image
--of @t@ to which @e@ gets mapped or error in the case of undefined
applyEdgeIdUnsafe :: TypedGraphMorphism a b -> EdgeId -> EdgeId
applyEdgeIdUnsafe m e = fromMaybe (error "Error, apply edge in a non total morphism") $ applyEdgeId m e
-- | Return the nodes ids of the codomain which are not in the image of the given morphism.
orphanTypedNodeIds :: TypedGraphMorphism a b -> [NodeId]
orphanTypedNodeIds tgm = GM.orphanNodeIds (mapping tgm)
-- | Return the edge ids of the codomain which are not in the image of the given morphism.
orphanTypedEdgeIds :: TypedGraphMorphism a b -> [EdgeId]
orphanTypedEdgeIds tgm = GM.orphanEdgeIds (mapping tgm)
-- | Return the edges of the codomain which are not in the image of the given morphism.
orphanTypedEdges :: TypedGraphMorphism a b -> [Edge (Maybe b)]
orphanTypedEdges tgm = GM.orphanEdges (mapping tgm)
-- | Invert a typed graph morphism
invert :: TypedGraphMorphism a b -> TypedGraphMorphism a b
invert tgm =
TypedGraphMorphism { domainGraph = codomainGraph tgm
, codomainGraph = domainGraph tgm
, mapping = GM.invertGraphMorphism (mapping tgm)
}
-- | This function adds an edge e1 (with source s1, target t1 and type tp) to the domain of the typed graph morphism, and associate it to e2
-- It assumes s1, t1, e2, tp already exist, and that e1 does not exist.
createEdgeOnDomain :: EdgeId -> NodeId -> NodeId -> EdgeId -> EdgeId -> TypedGraphMorphism a b -> TypedGraphMorphism a b
createEdgeOnDomain e1 s1 t1 tp e2 tgm =
tgm { domainGraph = GM.createEdgeOnDomain e1 s1 t1 tp (domainGraph tgm)
, mapping = GM.createEdgeOnDomain e1 s1 t1 e2 (mapping tgm)
}
-- | This function adds an edge e2 (with source s2, target t2 and type tp) to the codomain of the typed graph morphism
-- It assumes s2, t2, tp already exist, and that e2 does not exist.
createEdgeOnCodomain :: EdgeId -> NodeId -> NodeId -> EdgeId -> TypedGraphMorphism a b -> TypedGraphMorphism a b
createEdgeOnCodomain e2 s2 t2 tp tgm =
tgm { codomainGraph = GM.createEdgeOnDomain e2 s2 t2 tp (codomainGraph tgm)
, mapping = GM.createEdgeOnCodomain e2 s2 t2 (mapping tgm)
}
-- | This function adds a node n1 (type tp) to the domain of the typed graph morphism, and associate it to n2
-- It assumes n2 and tp already exist, and that n1 does not exist.
createNodeOnDomain :: NodeId -> NodeId -> NodeId -> TypedGraphMorphism a b -> TypedGraphMorphism a b
createNodeOnDomain n1 tp n2 tgm =
tgm { domainGraph = GM.createNodeOnDomain n1 tp (domainGraph tgm)
, mapping = GM.createNodeOnDomain n1 n2 (mapping tgm)
}
-- | This function adds a node n2 (type tp) to the codomain of the typed graph morphism
-- It assumes tp already exist, and that n2 does not exist.
createNodeOnCodomain :: NodeId -> NodeId -> TypedGraphMorphism a b -> TypedGraphMorphism a b
createNodeOnCodomain n2 tp tgm =
tgm { codomainGraph = GM.createNodeOnDomain n2 tp (codomainGraph tgm)
, mapping = GM.createNodeOnCodomain n2 (mapping tgm)
}
-- | updates a typed graph morphism, mapping node n1 to node n2. It assumes both nodes already exist.
updateNodeRelation :: NodeId -> NodeId -> NodeId -> TypedGraphMorphism a b -> TypedGraphMorphism a b
updateNodeRelation n1 n2 tp tgm =
TypedGraphMorphism { domainGraph = GM.updateNodeRelation n1 tp (domainGraph tgm)
, codomainGraph = GM.updateNodeRelation n2 tp (codomainGraph tgm)
, mapping = GM.updateNodeRelation n1 n2 (mapping tgm)
}
-- | updates a typed graph morphism, mapping node n1 to node n2. It assumes both nodes already exist and are of the same type.
untypedUpdateNodeRelation :: NodeId -> NodeId -> TypedGraphMorphism a b -> TypedGraphMorphism a b
untypedUpdateNodeRelation n1 n2 tgm =
TypedGraphMorphism { domainGraph = domainGraph tgm
, codomainGraph = codomainGraph tgm
, mapping = GM.updateNodeRelation n1 n2 (mapping tgm)
}
-- | updates a typed graph morphism, mapping edge e1 to edge e2. It assumes both edges already exist.
updateEdgeRelation :: EdgeId -> EdgeId -> TypedGraphMorphism a b -> TypedGraphMorphism a b
updateEdgeRelation e1 e2 tgm =
tgm { mapping = GM.updateEdgeRelation e1 e2 (mapping tgm) }
-- | Remove a node from the domain of a typed graph morphism
removeNodeFromDomain :: NodeId -> TypedGraphMorphism a b -> TypedGraphMorphism a b
removeNodeFromDomain n tgm =
tgm { domainGraph = GM.removeNodeFromDomain n (domainGraph tgm)
, mapping = GM.removeNodeFromDomain n (mapping tgm)
}
-- | Remove an edge from the domain of a typed graph morphism
removeEdgeFromDomain :: EdgeId -> TypedGraphMorphism a b -> TypedGraphMorphism a b
removeEdgeFromDomain e tgm =
tgm { domainGraph = GM.removeEdgeFromDomain e (domainGraph tgm)
, mapping = GM.removeEdgeFromDomain e (mapping tgm)
}
-- | Remove a node from the codomain of a typed graph morphism
removeNodeFromCodomain :: NodeId -> TypedGraphMorphism a b -> TypedGraphMorphism a b
removeNodeFromCodomain n tgm =
tgm { codomainGraph = GM.removeNodeFromDomain n (codomainGraph tgm)
, mapping = GM.removeNodeFromCodomain n (mapping tgm)
}
-- | Remove an edge from the domain of a typed graph morphism
removeEdgeFromCodomain :: EdgeId -> TypedGraphMorphism a b -> TypedGraphMorphism a b
removeEdgeFromCodomain e tgm =
tgm { codomainGraph = GM.removeEdgeFromDomain e (codomainGraph tgm)
, mapping = GM.removeEdgeFromCodomain e (mapping tgm) }
-- | Creates an inclusion, that is, maps nodes and edges according to their identifiers.
--
-- The behaviour is undefined if the domain is not a subgraph of the codomain.
makeInclusion :: TypedGraph a b -> TypedGraph a b -> TypedGraphMorphism a b
makeInclusion g1 g2 =
fromGraphsAndLists g1 g2
[ (n, n) | n <- nodeIds g1 ]
[ (e, e) | e <- edgeIds g1 ]
-- | Given a TypedGraphMorphism tgm, creates an isomorphic TypedGraphMorphism
-- tgm' where the mapping between the domain and codomain can be seen as
-- explicit inclusion (the same ids), and those ids are the same as in the type
-- graph.
--
-- Attention: It works only when the typing morphism is injective, otherwise it
-- will produce an invalid TypedGraphMorphism
reflectIdsFromTypeGraph :: TypedGraphMorphism a b -> TypedGraphMorphism a b
reflectIdsFromTypeGraph tgm =
let
gmDomain = domainGraph tgm
gmCodomain = codomainGraph tgm
newNodes gm = map (GM.applyNodeIdUnsafe gm) (nodeIds gm)
newEdges gm = map (\x -> (GM.applyEdgeIdUnsafe gm (edgeId x), GM.applyNodeIdUnsafe gm (sourceId x), GM.applyNodeIdUnsafe gm (targetId x))) (Untyped.edges $ toUntypedGraph gm)
newDomain = foldr (\(e,s,t) -> GM.createEdgeOnDomain e s t e) (foldr (\x -> GM.createNodeOnDomain x x) (GM.empty Untyped.empty (toUntypedGraph gmDomain)) (newNodes gmDomain)) (newEdges gmDomain)
newCodomain = foldr (\(e,s,t) -> GM.createEdgeOnDomain e s t e) (foldr (\x -> GM.createNodeOnDomain x x) (GM.empty Untyped.empty (toUntypedGraph gmCodomain)) (newNodes gmCodomain)) (newEdges gmCodomain)
newMaps = GM.buildGraphMorphism (toUntypedGraph newDomain) (toUntypedGraph newCodomain) (map (\(NodeId x) -> (x,x)) (nodeIds newDomain)) (map (\(EdgeId x) -> (x,x)) (edgeIds newDomain))
in buildTypedGraphMorphism newDomain newCodomain newMaps
-- | Given a TypedGraphMorphism tgm, creates an isomorphic TypedGraphMorphism
-- tgm' where the nodes and edges in the domain have the same ids as the ones in
-- the codomain
reflectIdsFromCodomain :: TypedGraphMorphism a b -> TypedGraphMorphism a b
reflectIdsFromCodomain tgm =
let
typedA = domainGraph tgm
typedB = codomainGraph tgm
tGraph = typeGraph typedA
typedB' = GM.empty Untyped.empty tGraph
nodes = nodeIds $ domainGraph tgm
edges' = map fst . edges $ domainGraph tgm
initial = buildTypedGraphMorphism typedB' typedB (GM.empty (toUntypedGraph typedB') (toUntypedGraph typedB))
addNodes = foldr (\n -> createNodeOnDomain (applyNodeIdUnsafe tgm n) (GM.applyNodeIdUnsafe typedA n) (applyNodeIdUnsafe tgm n)) initial nodes
addEdges = foldr (\e ->
createEdgeOnDomain (applyEdgeIdUnsafe tgm (edgeId e))
(applyNodeIdUnsafe tgm (sourceId e))
(applyNodeIdUnsafe tgm (targetId e))
(GM.applyEdgeIdUnsafe typedA (edgeId e))
(applyEdgeIdUnsafe tgm (edgeId e))) addNodes edges'
in addEdges
reflectIdsFromDomains :: (TypedGraphMorphism a b, TypedGraphMorphism a b) -> (TypedGraphMorphism a b, TypedGraphMorphism a b)
reflectIdsFromDomains (m,e) =
let
typedL = domainGraph m
typedD = domainGraph e
typedG = codomainGraph m
typeGraph = GM.codomainGraph typedL
m' = invert m
e' = invert e
newNodes = nubBy ((==) `on` (nodeId . fst)) (nodes typedL ++ nodes typedD)
newEdges = nubBy ((==) `on` (edgeId . fst)) (edges typedL ++ edges typedD)
typedG' = fromNodesAndEdges typeGraph newNodes newEdges
nodeR n = if isJust (applyNodeId m' n) then (n, applyNodeIdUnsafe m' n) else (n, applyNodeIdUnsafe e' n)
edgeR e = if isJust (applyEdgeId m' e) then (e, applyEdgeIdUnsafe m' e) else (e, applyEdgeIdUnsafe e' e)
h' = fromGraphsAndLists typedG typedG'
(map nodeR . nodeIds $ domainGraph m')
(map edgeR . edgeIds $ domainGraph m')
in (compose h' m, compose h' e)
| rodrigo-machado/verigraph | src/library/Data/TypedGraph/Morphism.hs | gpl-3.0 | 14,053 | 0 | 16 | 2,849 | 3,420 | 1,760 | 1,660 | 161 | 3 |
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
module Foundation.Check.Gen
( Gen
, runGen
, GenParams(..)
, GenRng
, genRng
, genWithRng
, genWithParams
) where
import Basement.Imports
import Foundation.Collection
import Foundation.Random
import qualified Foundation.Random.XorShift as XorShift
import Foundation.String
import Foundation.Numerical
import Foundation.Hashing.SipHash
import Foundation.Hashing.Hasher
data GenParams = GenParams
{ genMaxSizeIntegral :: Word -- maximum number of bytes
, genMaxSizeArray :: Word -- number of elements, as placeholder
, genMaxSizeString :: Word -- maximum number of chars
}
newtype GenRng = GenRng XorShift.State
type GenSeed = Word64
genRng :: GenSeed -> [String] -> (Word64 -> GenRng)
genRng seed groups = \iteration -> GenRng $ XorShift.initialize rngSeed (rngSeed * iteration)
where
(SipHash rngSeed) = hashEnd $ hashMixBytes hashData iHashState
hashData = toBytes UTF8 $ intercalate "::" groups
iHashState :: Sip1_3
iHashState = hashNewParam (SipKey seed 0x12345678)
genGenerator :: GenRng -> (GenRng, GenRng)
genGenerator (GenRng rng) =
let (newSeed1, rngNext) = randomGenerateWord64 rng
(newSeed2, rngNext') = randomGenerateWord64 rngNext
in (GenRng $ XorShift.initialize newSeed1 newSeed2, GenRng rngNext')
-- | Generator monad
newtype Gen a = Gen { runGen :: GenRng -> GenParams -> a }
instance Functor Gen where
fmap f g = Gen (\rng params -> f (runGen g rng params))
instance Applicative Gen where
pure a = Gen (\_ _ -> a)
fab <*> fa = Gen $ \rng params ->
let (r1,r2) = genGenerator rng
ab = runGen fab r1 params
a = runGen fa r2 params
in ab a
instance Monad Gen where
return a = Gen (\_ _ -> a)
ma >>= mb = Gen $ \rng params ->
let (r1,r2) = genGenerator rng
a = runGen ma r1 params
in runGen (mb a) r2 params
genWithRng :: forall a . (forall randomly . MonadRandom randomly => randomly a) -> Gen a
genWithRng f = Gen $ \(GenRng rng) _ ->
let (a, _) = withRandomGenerator rng f in a
genWithParams :: (GenParams -> Gen a) -> Gen a
genWithParams f = Gen $ \rng params -> runGen (f params) rng params
| vincenthz/hs-foundation | foundation/Foundation/Check/Gen.hs | bsd-3-clause | 2,436 | 0 | 12 | 640 | 732 | 394 | 338 | 58 | 1 |
module Tests.Util where
import Control.Applicative
import qualified Data.Text as T
import Graphics.Vty
import Graphics.Vty.Widgets.Core
import Graphics.Vty.Widgets.Util
import Text.Trans.Tokenize
import Test.QuickCheck
import Test.QuickCheck.Monadic
import Tests.Instances ()
imageSize :: Image -> DisplayRegion -> Bool
imageSize img sz =
imageWidth img <= regionWidth sz && imageHeight img <= regionHeight sz
count :: (a -> Bool) -> [a] -> Int
count _ [] = 0
count f (a:as) = count f as + if f a then 1 else 0
numNewlines :: T.Text -> Int
numNewlines = count (== '\n') . T.unpack
sizeGen :: Gen DisplayRegion
sizeGen = (,)
<$> (arbitrary `suchThat` (>= 0))
<*> (arbitrary `suchThat` (>= 0))
sizeTest :: (Show a) => IO (Widget a) -> PropertyM IO Bool
sizeTest mkWidget =
forAllM sizeGen $ \sz -> do
w <- run mkWidget
img <- run $ render w sz defaultContext
if regionHeight sz == 0 || regionWidth sz == 0 then
return $ imageHeight img == 0 && imageWidth img == 0 else
return $ imageWidth img <= regionWidth sz &&
imageHeight img <= regionHeight sz
lineLength :: [Token a] -> Phys
lineLength = sum . (textWidth <$>) . (tokenStr <$>)
| KommuSoft/vty-ui | test/src/Tests/Util.hs | bsd-3-clause | 1,221 | 0 | 14 | 279 | 456 | 244 | 212 | 33 | 2 |
{-# OPTIONS -Wall -O2 #-}
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad
import Data.ByteString (empty)
import Lib.Binary (decode)
import Pretty ()
import Text.PrettyPrint.HughesPJClass (Pretty(..))
import qualified Buildsome.Db as Db
import qualified Control.Exception as E
import qualified Database.Sophia as Sophia
import qualified System.Environment as Env
whileM :: Monad m => m Bool -> m () -> m ()
whileM boolAct act = go
where
go = do
p <- boolAct
when p $ do
act
go
main :: IO ()
main = do
[filePath] <- Env.getArgs
Sophia.withEnv $ \env -> do
Sophia.openDir env Sophia.ReadWrite Sophia.AllowCreation filePath
Sophia.withDb env $ \db ->
Sophia.withCursor db Sophia.GT empty $ \cursor -> do
putStrLn "Keys:"
whileM (Sophia.fetchCursor cursor) $ do
key <- Sophia.keyAtCursor cursor
val <- Sophia.valAtCursor cursor
execLogDoc <-
E.evaluate (pPrint (decode val :: Db.ExecutionLog))
`E.catch` \E.SomeException {} -> return "not an execution log"
putStrLn $ "Key " ++ take 8 (show key) ++ "... = " ++ show execLogDoc
putStrLn "Done"
| nadavshemer/buildsome | src/EnumDatabase.hs | gpl-2.0 | 1,189 | 0 | 26 | 300 | 388 | 198 | 190 | 34 | 1 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Thrift.Arbitraries where
import Data.Bits()
import Test.QuickCheck.Arbitrary
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$>))
#endif
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Vector as Vector
import qualified Data.Text as Text
import qualified Data.Text.Lazy as LT
import qualified Data.HashSet as HSet
import qualified Data.HashMap.Strict as HMap
import Data.Hashable (Hashable)
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString.Lazy as BS
-- String has an Arbitrary instance already
-- Bool has an Arbitrary instance already
-- A Thrift 'list' is a Vector.
instance Arbitrary ByteString where
arbitrary = BS.pack . filter (/= 0) <$> arbitrary
instance (Ord k, Arbitrary k, Arbitrary v) => Arbitrary (Map k v) where
arbitrary = Map.fromList <$> arbitrary
instance (Ord k, Arbitrary k) => Arbitrary (Set.Set k) where
arbitrary = Set.fromList <$> arbitrary
instance (Arbitrary k) => Arbitrary (Vector.Vector k) where
arbitrary = Vector.fromList <$> arbitrary
instance Arbitrary Text.Text where
arbitrary = Text.pack . filter (/= '\0') <$> arbitrary
instance Arbitrary LT.Text where
arbitrary = LT.pack . filter (/= '\0') <$> arbitrary
instance (Eq k, Hashable k, Arbitrary k) => Arbitrary (HSet.HashSet k) where
arbitrary = HSet.fromList <$> arbitrary
instance (Eq k, Hashable k, Arbitrary k, Arbitrary v) =>
Arbitrary (HMap.HashMap k v) where
arbitrary = HMap.fromList <$> arbitrary
{-
To handle Thrift 'enum' we would ideally use something like:
instance (Enum a, Bounded a) => Arbitrary a
where arbitrary = elements (enumFromTo minBound maxBound)
Unfortunately this doesn't play nicely with the type system.
Instead we'll generate an arbitrary instance along with the code.
-}
{-
There might be some way to introspect on the Haskell structure of a
Thrift 'struct' or 'exception' but generating the code directly is simpler.
-}
| LinusU/fbthrift | thrift/lib/hs/Thrift/Arbitraries.hs | apache-2.0 | 2,056 | 0 | 9 | 340 | 463 | 270 | 193 | 34 | 0 |
module API where
-- simple type
type Interface = Int
| abuiles/turbinado-blog | tmp/dependencies/hs-plugins-1.3.1/testsuite/pdynload/spj2/api/API.hs | bsd-3-clause | 56 | 0 | 4 | 13 | 11 | 8 | 3 | 2 | 0 |
-- (c) The University of Glasgow 2006
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveFunctor #-}
module Unify (
tcMatchTy, tcMatchTyKi,
tcMatchTys, tcMatchTyKis,
tcMatchTyX, tcMatchTysX, tcMatchTyKisX,
ruleMatchTyKiX,
-- * Rough matching
roughMatchTcs, instanceCantMatch,
typesCantMatch,
-- Side-effect free unification
tcUnifyTy, tcUnifyTyKi, tcUnifyTys, tcUnifyTyKis,
tcUnifyTysFG, tcUnifyTyWithTFs,
BindFlag(..),
UnifyResult, UnifyResultM(..),
-- Matching a type against a lifted type (coercion)
liftCoMatch
) where
#include "HsVersions.h"
import GhcPrelude
import Var
import VarEnv
import VarSet
import Kind
import Name( Name )
import Type hiding ( getTvSubstEnv )
import Coercion hiding ( getCvSubstEnv )
import TyCon
import TyCoRep hiding ( getTvSubstEnv, getCvSubstEnv )
import Util
import Pair
import Outputable
import UniqFM
import UniqSet
import Control.Monad
import qualified Control.Monad.Fail as MonadFail
import Control.Applicative hiding ( empty )
import qualified Control.Applicative
{-
Unification is much tricker than you might think.
1. The substitution we generate binds the *template type variables*
which are given to us explicitly.
2. We want to match in the presence of foralls;
e.g (forall a. t1) ~ (forall b. t2)
That is what the RnEnv2 is for; it does the alpha-renaming
that makes it as if a and b were the same variable.
Initialising the RnEnv2, so that it can generate a fresh
binder when necessary, entails knowing the free variables of
both types.
3. We must be careful not to bind a template type variable to a
locally bound variable. E.g.
(forall a. x) ~ (forall b. b)
where x is the template type variable. Then we do not want to
bind x to a/b! This is a kind of occurs check.
The necessary locals accumulate in the RnEnv2.
-}
-- | @tcMatchTy t1 t2@ produces a substitution (over fvs(t1))
-- @s@ such that @s(t1)@ equals @t2@.
-- The returned substitution might bind coercion variables,
-- if the variable is an argument to a GADT constructor.
--
-- Precondition: typeKind ty1 `eqType` typeKind ty2
--
-- We don't pass in a set of "template variables" to be bound
-- by the match, because tcMatchTy (and similar functions) are
-- always used on top-level types, so we can bind any of the
-- free variables of the LHS.
tcMatchTy :: Type -> Type -> Maybe TCvSubst
tcMatchTy ty1 ty2 = tcMatchTys [ty1] [ty2]
-- | Like 'tcMatchTy', but allows the kinds of the types to differ,
-- and thus matches them as well.
tcMatchTyKi :: Type -> Type -> Maybe TCvSubst
tcMatchTyKi ty1 ty2 = tcMatchTyKis [ty1] [ty2]
-- | This is similar to 'tcMatchTy', but extends a substitution
tcMatchTyX :: TCvSubst -- ^ Substitution to extend
-> Type -- ^ Template
-> Type -- ^ Target
-> Maybe TCvSubst
tcMatchTyX subst ty1 ty2 = tcMatchTysX subst [ty1] [ty2]
-- | Like 'tcMatchTy' but over a list of types.
tcMatchTys :: [Type] -- ^ Template
-> [Type] -- ^ Target
-> Maybe TCvSubst -- ^ One-shot; in principle the template
-- variables could be free in the target
tcMatchTys tys1 tys2
= tcMatchTysX (mkEmptyTCvSubst in_scope) tys1 tys2
where
in_scope = mkInScopeSet (tyCoVarsOfTypes tys1 `unionVarSet` tyCoVarsOfTypes tys2)
-- | Like 'tcMatchTyKi' but over a list of types.
tcMatchTyKis :: [Type] -- ^ Template
-> [Type] -- ^ Target
-> Maybe TCvSubst -- ^ One-shot substitution
tcMatchTyKis tys1 tys2
= tcMatchTyKisX (mkEmptyTCvSubst in_scope) tys1 tys2
where
in_scope = mkInScopeSet (tyCoVarsOfTypes tys1 `unionVarSet` tyCoVarsOfTypes tys2)
-- | Like 'tcMatchTys', but extending a substitution
tcMatchTysX :: TCvSubst -- ^ Substitution to extend
-> [Type] -- ^ Template
-> [Type] -- ^ Target
-> Maybe TCvSubst -- ^ One-shot substitution
tcMatchTysX subst tys1 tys2
= tc_match_tys_x False subst tys1 tys2
-- | Like 'tcMatchTyKis', but extending a substitution
tcMatchTyKisX :: TCvSubst -- ^ Substitution to extend
-> [Type] -- ^ Template
-> [Type] -- ^ Target
-> Maybe TCvSubst -- ^ One-shot substitution
tcMatchTyKisX subst tys1 tys2
= tc_match_tys_x True subst tys1 tys2
-- | Worker for 'tcMatchTysX' and 'tcMatchTyKisX'
tc_match_tys_x :: Bool -- ^ match kinds?
-> TCvSubst
-> [Type]
-> [Type]
-> Maybe TCvSubst
tc_match_tys_x match_kis (TCvSubst in_scope tv_env cv_env) tys1 tys2
= case tc_unify_tys (const BindMe)
False -- Matching, not unifying
False -- Not an injectivity check
match_kis
(mkRnEnv2 in_scope) tv_env cv_env tys1 tys2 of
Unifiable (tv_env', cv_env')
-> Just $ TCvSubst in_scope tv_env' cv_env'
_ -> Nothing
-- | This one is called from the expression matcher,
-- which already has a MatchEnv in hand
ruleMatchTyKiX
:: TyCoVarSet -- ^ template variables
-> RnEnv2
-> TvSubstEnv -- ^ type substitution to extend
-> Type -- ^ Template
-> Type -- ^ Target
-> Maybe TvSubstEnv
ruleMatchTyKiX tmpl_tvs rn_env tenv tmpl target
-- See Note [Kind coercions in Unify]
= case tc_unify_tys (matchBindFun tmpl_tvs) False False
True -- <-- this means to match the kinds
rn_env tenv emptyCvSubstEnv [tmpl] [target] of
Unifiable (tenv', _) -> Just tenv'
_ -> Nothing
matchBindFun :: TyCoVarSet -> TyVar -> BindFlag
matchBindFun tvs tv = if tv `elemVarSet` tvs then BindMe else Skolem
{- *********************************************************************
* *
Rough matching
* *
********************************************************************* -}
-- See Note [Rough match] field in InstEnv
roughMatchTcs :: [Type] -> [Maybe Name]
roughMatchTcs tys = map rough tys
where
rough ty
| Just (ty', _) <- splitCastTy_maybe ty = rough ty'
| Just (tc,_) <- splitTyConApp_maybe ty = Just (tyConName tc)
| otherwise = Nothing
instanceCantMatch :: [Maybe Name] -> [Maybe Name] -> Bool
-- (instanceCantMatch tcs1 tcs2) returns True if tcs1 cannot
-- possibly be instantiated to actual, nor vice versa;
-- False is non-committal
instanceCantMatch (mt : ts) (ma : as) = itemCantMatch mt ma || instanceCantMatch ts as
instanceCantMatch _ _ = False -- Safe
itemCantMatch :: Maybe Name -> Maybe Name -> Bool
itemCantMatch (Just t) (Just a) = t /= a
itemCantMatch _ _ = False
{-
************************************************************************
* *
GADTs
* *
************************************************************************
Note [Pruning dead case alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider data T a where
T1 :: T Int
T2 :: T a
newtype X = MkX Int
newtype Y = MkY Char
type family F a
type instance F Bool = Int
Now consider case x of { T1 -> e1; T2 -> e2 }
The question before the house is this: if I know something about the type
of x, can I prune away the T1 alternative?
Suppose x::T Char. It's impossible to construct a (T Char) using T1,
Answer = YES we can prune the T1 branch (clearly)
Suppose x::T (F a), where 'a' is in scope. Then 'a' might be instantiated
to 'Bool', in which case x::T Int, so
ANSWER = NO (clearly)
We see here that we want precisely the apartness check implemented within
tcUnifyTysFG. So that's what we do! Two types cannot match if they are surely
apart. Note that since we are simply dropping dead code, a conservative test
suffices.
-}
-- | Given a list of pairs of types, are any two members of a pair surely
-- apart, even after arbitrary type function evaluation and substitution?
typesCantMatch :: [(Type,Type)] -> Bool
-- See Note [Pruning dead case alternatives]
typesCantMatch prs = any (uncurry cant_match) prs
where
cant_match :: Type -> Type -> Bool
cant_match t1 t2 = case tcUnifyTysFG (const BindMe) [t1] [t2] of
SurelyApart -> True
_ -> False
{-
************************************************************************
* *
Unification
* *
************************************************************************
Note [Fine-grained unification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Do the types (x, x) and ([y], y) unify? The answer is seemingly "no" --
no substitution to finite types makes these match. But, a substitution to
*infinite* types can unify these two types: [x |-> [[[...]]], y |-> [[[...]]] ].
Why do we care? Consider these two type family instances:
type instance F x x = Int
type instance F [y] y = Bool
If we also have
type instance Looper = [Looper]
then the instances potentially overlap. The solution is to use unification
over infinite terms. This is possible (see [1] for lots of gory details), but
a full algorithm is a little more power than we need. Instead, we make a
conservative approximation and just omit the occurs check.
[1]: http://research.microsoft.com/en-us/um/people/simonpj/papers/ext-f/axioms-extended.pdf
tcUnifyTys considers an occurs-check problem as the same as general unification
failure.
tcUnifyTysFG ("fine-grained") returns one of three results: success, occurs-check
failure ("MaybeApart"), or general failure ("SurelyApart").
See also Trac #8162.
It's worth noting that unification in the presence of infinite types is not
complete. This means that, sometimes, a closed type family does not reduce
when it should. See test case indexed-types/should_fail/Overlap15 for an
example.
Note [The substitution in MaybeApart]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The constructor MaybeApart carries data with it, typically a TvSubstEnv. Why?
Because consider unifying these:
(a, a, Int) ~ (b, [b], Bool)
If we go left-to-right, we start with [a |-> b]. Then, on the middle terms, we
apply the subst we have so far and discover that we need [b |-> [b]]. Because
this fails the occurs check, we say that the types are MaybeApart (see above
Note [Fine-grained unification]). But, we can't stop there! Because if we
continue, we discover that Int is SurelyApart from Bool, and therefore the
types are apart. This has practical consequences for the ability for closed
type family applications to reduce. See test case
indexed-types/should_compile/Overlap14.
Note [Unifying with skolems]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we discover that two types unify if and only if a skolem variable is
substituted, we can't properly unify the types. But, that skolem variable
may later be instantiated with a unifyable type. So, we return maybeApart
in these cases.
Note [Lists of different lengths are MaybeApart]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is unusual to call tcUnifyTys or tcUnifyTysFG with lists of different
lengths. The place where we know this can happen is from compatibleBranches in
FamInstEnv, when checking data family instances. Data family instances may be
eta-reduced; see Note [Eta reduction for data family axioms] in TcInstDcls.
We wish to say that
D :: * -> * -> *
axDF1 :: D Int ~ DFInst1
axDF2 :: D Int Bool ~ DFInst2
overlap. If we conclude that lists of different lengths are SurelyApart, then
it will look like these do *not* overlap, causing disaster. See Trac #9371.
In usages of tcUnifyTys outside of family instances, we always use tcUnifyTys,
which can't tell the difference between MaybeApart and SurelyApart, so those
usages won't notice this design choice.
-}
-- | Simple unification of two types; all type variables are bindable
-- Precondition: the kinds are already equal
tcUnifyTy :: Type -> Type -- All tyvars are bindable
-> Maybe TCvSubst
-- A regular one-shot (idempotent) substitution
tcUnifyTy t1 t2 = tcUnifyTys (const BindMe) [t1] [t2]
-- | Like 'tcUnifyTy', but also unifies the kinds
tcUnifyTyKi :: Type -> Type -> Maybe TCvSubst
tcUnifyTyKi t1 t2 = tcUnifyTyKis (const BindMe) [t1] [t2]
-- | Unify two types, treating type family applications as possibly unifying
-- with anything and looking through injective type family applications.
-- Precondition: kinds are the same
tcUnifyTyWithTFs :: Bool -- ^ True <=> do two-way unification;
-- False <=> do one-way matching.
-- See end of sec 5.2 from the paper
-> Type -> Type -> Maybe TCvSubst
-- This algorithm is an implementation of the "Algorithm U" presented in
-- the paper "Injective type families for Haskell", Figures 2 and 3.
-- The code is incorporated with the standard unifier for convenience, but
-- its operation should match the specification in the paper.
tcUnifyTyWithTFs twoWay t1 t2
= case tc_unify_tys (const BindMe) twoWay True False
rn_env emptyTvSubstEnv emptyCvSubstEnv
[t1] [t2] of
Unifiable (subst, _) -> Just $ niFixTCvSubst subst
MaybeApart (subst, _) -> Just $ niFixTCvSubst subst
-- we want to *succeed* in questionable cases. This is a
-- pre-unification algorithm.
SurelyApart -> Nothing
where
rn_env = mkRnEnv2 $ mkInScopeSet $ tyCoVarsOfTypes [t1, t2]
-----------------
tcUnifyTys :: (TyCoVar -> BindFlag)
-> [Type] -> [Type]
-> Maybe TCvSubst
-- ^ A regular one-shot (idempotent) substitution
-- that unifies the erased types. See comments
-- for 'tcUnifyTysFG'
-- The two types may have common type variables, and indeed do so in the
-- second call to tcUnifyTys in FunDeps.checkClsFD
tcUnifyTys bind_fn tys1 tys2
= case tcUnifyTysFG bind_fn tys1 tys2 of
Unifiable result -> Just result
_ -> Nothing
-- | Like 'tcUnifyTys' but also unifies the kinds
tcUnifyTyKis :: (TyCoVar -> BindFlag)
-> [Type] -> [Type]
-> Maybe TCvSubst
tcUnifyTyKis bind_fn tys1 tys2
= case tcUnifyTyKisFG bind_fn tys1 tys2 of
Unifiable result -> Just result
_ -> Nothing
-- This type does double-duty. It is used in the UM (unifier monad) and to
-- return the final result. See Note [Fine-grained unification]
type UnifyResult = UnifyResultM TCvSubst
data UnifyResultM a = Unifiable a -- the subst that unifies the types
| MaybeApart a -- the subst has as much as we know
-- it must be part of a most general unifier
-- See Note [The substitution in MaybeApart]
| SurelyApart
deriving Functor
instance Applicative UnifyResultM where
pure = Unifiable
(<*>) = ap
instance Monad UnifyResultM where
SurelyApart >>= _ = SurelyApart
MaybeApart x >>= f = case f x of
Unifiable y -> MaybeApart y
other -> other
Unifiable x >>= f = f x
instance Alternative UnifyResultM where
empty = SurelyApart
a@(Unifiable {}) <|> _ = a
_ <|> b@(Unifiable {}) = b
a@(MaybeApart {}) <|> _ = a
_ <|> b@(MaybeApart {}) = b
SurelyApart <|> SurelyApart = SurelyApart
instance MonadPlus UnifyResultM
-- | @tcUnifyTysFG bind_tv tys1 tys2@ attepts to find a substitution @s@ (whose
-- domain elements all respond 'BindMe' to @bind_tv@) such that
-- @s(tys1)@ and that of @s(tys2)@ are equal, as witnessed by the returned
-- Coercions. This version requires that the kinds of the types are the same,
-- if you unify left-to-right.
tcUnifyTysFG :: (TyVar -> BindFlag)
-> [Type] -> [Type]
-> UnifyResult
tcUnifyTysFG bind_fn tys1 tys2
= tc_unify_tys_fg False bind_fn tys1 tys2
tcUnifyTyKisFG :: (TyVar -> BindFlag)
-> [Type] -> [Type]
-> UnifyResult
tcUnifyTyKisFG bind_fn tys1 tys2
= tc_unify_tys_fg True bind_fn tys1 tys2
tc_unify_tys_fg :: Bool
-> (TyVar -> BindFlag)
-> [Type] -> [Type]
-> UnifyResult
tc_unify_tys_fg match_kis bind_fn tys1 tys2
= do { (env, _) <- tc_unify_tys bind_fn True False match_kis env
emptyTvSubstEnv emptyCvSubstEnv
tys1 tys2
; return $ niFixTCvSubst env }
where
vars = tyCoVarsOfTypes tys1 `unionVarSet` tyCoVarsOfTypes tys2
env = mkRnEnv2 $ mkInScopeSet vars
-- | This function is actually the one to call the unifier -- a little
-- too general for outside clients, though.
tc_unify_tys :: (TyVar -> BindFlag)
-> AmIUnifying -- ^ True <=> unify; False <=> match
-> Bool -- ^ True <=> doing an injectivity check
-> Bool -- ^ True <=> treat the kinds as well
-> RnEnv2
-> TvSubstEnv -- ^ substitution to extend
-> CvSubstEnv
-> [Type] -> [Type]
-> UnifyResultM (TvSubstEnv, CvSubstEnv)
tc_unify_tys bind_fn unif inj_check match_kis rn_env tv_env cv_env tys1 tys2
= initUM tv_env cv_env $
do { when match_kis $
unify_tys env kis1 kis2
; unify_tys env tys1 tys2
; (,) <$> getTvSubstEnv <*> getCvSubstEnv }
where
env = UMEnv { um_bind_fun = bind_fn
, um_unif = unif
, um_inj_tf = inj_check
, um_rn_env = rn_env }
kis1 = map typeKind tys1
kis2 = map typeKind tys2
instance Outputable a => Outputable (UnifyResultM a) where
ppr SurelyApart = text "SurelyApart"
ppr (Unifiable x) = text "Unifiable" <+> ppr x
ppr (MaybeApart x) = text "MaybeApart" <+> ppr x
{-
************************************************************************
* *
Non-idempotent substitution
* *
************************************************************************
Note [Non-idempotent substitution]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
During unification we use a TvSubstEnv/CvSubstEnv pair that is
(a) non-idempotent
(b) loop-free; ie repeatedly applying it yields a fixed point
Note [Finding the substitution fixpoint]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Finding the fixpoint of a non-idempotent substitution arising from a
unification is harder than it looks, because of kinds. Consider
T k (H k (f:k)) ~ T * (g:*)
If we unify, we get the substitution
[ k -> *
, g -> H k (f:k) ]
To make it idempotent we don't want to get just
[ k -> *
, g -> H * (f:k) ]
We also want to substitute inside f's kind, to get
[ k -> *
, g -> H k (f:*) ]
If we don't do this, we may apply the substitution to something,
and get an ill-formed type, i.e. one where typeKind will fail.
This happened, for example, in Trac #9106.
This is the reason for extending env with [f:k -> f:*], in the
definition of env' in niFixTvSubst
-}
niFixTCvSubst :: TvSubstEnv -> TCvSubst
-- Find the idempotent fixed point of the non-idempotent substitution
-- See Note [Finding the substitution fixpoint]
-- ToDo: use laziness instead of iteration?
niFixTCvSubst tenv = f tenv
where
f tenv
| not_fixpoint = f (mapVarEnv (substTy subst') tenv)
| otherwise = subst
where
not_fixpoint = anyVarSet in_domain range_tvs
in_domain tv = tv `elemVarEnv` tenv
range_tvs = nonDetFoldUFM (unionVarSet . tyCoVarsOfType) emptyVarSet tenv
-- It's OK to use nonDetFoldUFM here because we
-- forget the order immediately by creating a set
subst = mkTvSubst (mkInScopeSet range_tvs) tenv
-- env' extends env by replacing any free type with
-- that same tyvar with a substituted kind
-- See note [Finding the substitution fixpoint]
tenv' = extendVarEnvList tenv [ (rtv, mkTyVarTy $
setTyVarKind rtv $
substTy subst $
tyVarKind rtv)
| rtv <- nonDetEltsUniqSet range_tvs
-- It's OK to use nonDetEltsUniqSet here
-- because we forget the order
-- immediatedly by putting it in VarEnv
, not (in_domain rtv) ]
subst' = mkTvSubst (mkInScopeSet range_tvs) tenv'
niSubstTvSet :: TvSubstEnv -> TyCoVarSet -> TyCoVarSet
-- Apply the non-idempotent substitution to a set of type variables,
-- remembering that the substitution isn't necessarily idempotent
-- This is used in the occurs check, before extending the substitution
niSubstTvSet tsubst tvs
= nonDetFoldUniqSet (unionVarSet . get) emptyVarSet tvs
-- It's OK to nonDetFoldUFM here because we immediately forget the
-- ordering by creating a set.
where
get tv
| Just ty <- lookupVarEnv tsubst tv
= niSubstTvSet tsubst (tyCoVarsOfType ty)
| otherwise
= unitVarSet tv
{-
************************************************************************
* *
unify_ty: the main workhorse
* *
************************************************************************
Note [Specification of unification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The pure unifier, unify_ty, defined in this module, tries to work out
a substitution to make two types say True to eqType. NB: eqType is
itself not purely syntactic; it accounts for CastTys;
see Note [Non-trivial definitional equality] in TyCoRep
Unlike the "impure unifiers" in the typechecker (the eager unifier in
TcUnify, and the constraint solver itself in TcCanonical), the pure
unifier It does /not/ work up to ~.
The algorithm implemented here is rather delicate, and we depend on it
to uphold certain properties. This is a summary of these required
properties. Any reference to "flattening" refers to the flattening
algorithm in FamInstEnv (See Note [Flattening] in FamInstEnv), not
the flattening algorithm in the solver.
Notation:
ΞΈ,Ο substitutions
ΞΎ type-function-free types
Ο,Ο other types
Οβ type Ο, flattened
β‘ eqType
(U1) Soundness.
If (unify Οβ Οβ) = Unifiable ΞΈ, then ΞΈ(Οβ) β‘ ΞΈ(Οβ).
ΞΈ is a most general unifier for Οβ and Οβ.
(U2) Completeness.
If (unify ΞΎβ ΞΎβ) = SurelyApart,
then there exists no substitution ΞΈ such that ΞΈ(ΞΎβ) β‘ ΞΈ(ΞΎβ).
These two properties are stated as Property 11 in the "Closed Type Families"
paper (POPL'14). Below, this paper is called [CTF].
(U3) Apartness under substitution.
If (unify ΞΎ Οβ) = SurelyApart, then (unify ΞΎ ΞΈ(Ο)β) = SurelyApart,
for any ΞΈ. (Property 12 from [CTF])
(U4) Apart types do not unify.
If (unify ΞΎ Οβ) = SurelyApart, then there exists no ΞΈ
such that ΞΈ(ΞΎ) = ΞΈ(Ο). (Property 13 from [CTF])
THEOREM. Completeness w.r.t ~
If (unify Οββ Οββ) = SurelyApart,
then there exists no proof that (Οβ ~ Οβ).
PROOF. See appendix of [CTF].
The unification algorithm is used for type family injectivity, as described
in the "Injective Type Families" paper (Haskell'15), called [ITF]. When run
in this mode, it has the following properties.
(I1) If (unify Ο Ο) = SurelyApart, then Ο and Ο are not unifiable, even
after arbitrary type family reductions. Note that Ο and Ο are
not flattened here.
(I2) If (unify Ο Ο) = MaybeApart ΞΈ, and if some
Ο exists such that Ο(Ο) ~ Ο(Ο), then Ο extends ΞΈ.
Furthermore, the RULES matching algorithm requires this property,
but only when using this algorithm for matching:
(M1) If (match Ο Ο) succeeds with ΞΈ, then all matchable tyvars
in Ο are bound in ΞΈ.
Property M1 means that we must extend the substitution with,
say (a β¦ a) when appropriate during matching.
See also Note [Self-substitution when matching].
(M2) Completeness of matching.
If ΞΈ(Ο) = Ο, then (match Ο Ο) = Unifiable Ο,
where ΞΈ is an extension of Ο.
Sadly, property M2 and I2 conflict. Consider
type family F1 a b where
F1 Int Bool = Char
F1 Double String = Char
Consider now two matching problems:
P1. match (F1 a Bool) (F1 Int Bool)
P2. match (F1 a Bool) (F1 Double String)
In case P1, we must find (a β¦ Int) to satisfy M2.
In case P2, we must /not/ find (a β¦ Double), in order to satisfy I2. (Note
that the correct mapping for I2 is (a β¦ Int). There is no way to discover
this, but we musn't map a to anything else!)
We thus must parameterize the algorithm over whether it's being used
for an injectivity check (refrain from looking at non-injective arguments
to type families) or not (do indeed look at those arguments). This is
implemented by the uf_inj_tf field of UmEnv.
(It's all a question of whether or not to include equation (7) from Fig. 2
of [ITF].)
This extra parameter is a bit fiddly, perhaps, but seemingly less so than
having two separate, almost-identical algorithms.
Note [Self-substitution when matching]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What should happen when we're *matching* (not unifying) a1 with a1? We
should get a substitution [a1 |-> a1]. A successful match should map all
the template variables (except ones that disappear when expanding synonyms).
But when unifying, we don't want to do this, because we'll then fall into
a loop.
This arrangement affects the code in three places:
- If we're matching a refined template variable, don't recur. Instead, just
check for equality. That is, if we know [a |-> Maybe a] and are matching
(a ~? Maybe Int), we want to just fail.
- Skip the occurs check when matching. This comes up in two places, because
matching against variables is handled separately from matching against
full-on types.
Note that this arrangement was provoked by a real failure, where the same
unique ended up in the template as in the target. (It was a rule firing when
compiling Data.List.NonEmpty.)
Note [Matching coercion variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
type family F a
data G a where
MkG :: F a ~ Bool => G a
type family Foo (x :: G a) :: F a
type instance Foo MkG = False
We would like that to be accepted. For that to work, we need to introduce
a coercion variable on the left and then use it on the right. Accordingly,
at use sites of Foo, we need to be able to use matching to figure out the
value for the coercion. (See the desugared version:
axFoo :: [a :: *, c :: F a ~ Bool]. Foo (MkG c) = False |> (sym c)
) We never want this action to happen during *unification* though, when
all bets are off.
Note [Kind coercions in Unify]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We wish to match/unify while ignoring casts. But, we can't just ignore
them completely, or we'll end up with ill-kinded substitutions. For example,
say we're matching `a` with `ty |> co`. If we just drop the cast, we'll
return [a |-> ty], but `a` and `ty` might have different kinds. We can't
just match/unify their kinds, either, because this might gratuitously
fail. After all, `co` is the witness that the kinds are the same -- they
may look nothing alike.
So, we pass a kind coercion to the match/unify worker. This coercion witnesses
the equality between the substed kind of the left-hand type and the substed
kind of the right-hand type. Note that we do not unify kinds at the leaves
(as we did previously). We thus have
INVARIANT: In the call
unify_ty ty1 ty2 kco
it must be that subst(kco) :: subst(kind(ty1)) ~N subst(kind(ty2)), where
`subst` is the ambient substitution in the UM monad.
To get this coercion, we first have to match/unify
the kinds before looking at the types. Happily, we need look only one level
up, as all kinds are guaranteed to have kind *.
When we're working with type applications (either TyConApp or AppTy) we
need to worry about establishing INVARIANT, as the kinds of the function
& arguments aren't (necessarily) included in the kind of the result.
When unifying two TyConApps, this is easy, because the two TyCons are
the same. Their kinds are thus the same. As long as we unify left-to-right,
we'll be sure to unify types' kinds before the types themselves. (For example,
think about Proxy :: forall k. k -> *. Unifying the first args matches up
the kinds of the second args.)
For AppTy, we must unify the kinds of the functions, but once these are
unified, we can continue unifying arguments without worrying further about
kinds.
The interface to this module includes both "...Ty" functions and
"...TyKi" functions. The former assume that INVARIANT is already
established, either because the kinds are the same or because the
list of types being passed in are the well-typed arguments to some
type constructor (see two paragraphs above). The latter take a separate
pre-pass over the kinds to establish INVARIANT. Sometimes, it's important
not to take the second pass, as it caused #12442.
We thought, at one point, that this was all unnecessary: why should
casts be in types in the first place? But they are sometimes. In
dependent/should_compile/KindEqualities2, we see, for example the
constraint Num (Int |> (blah ; sym blah)). We naturally want to find
a dictionary for that constraint, which requires dealing with
coercions in this manner.
Note [Matching in the presence of casts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When matching, it is crucial that no variables from the template
end up in the range of the matching substitution (obviously!).
When unifying, that's not a constraint; instead we take the fixpoint
of the substitution at the end.
So what should we do with this, when matching?
unify_ty (tmpl |> co) tgt kco
Previously, wrongly, we pushed 'co' in the (horrid) accumulating
'kco' argument like this:
unify_ty (tmpl |> co) tgt kco
= unify_ty tmpl tgt (kco ; co)
But that is obviously wrong because 'co' (from the template) ends
up in 'kco', which in turn ends up in the range of the substitution.
This all came up in Trac #13910. Because we match tycon arguments
left-to-right, the ambient substitution will already have a matching
substitution for any kinds; so there is an easy fix: just apply
the substitution-so-far to the coercion from the LHS.
Note that
* When matching, the first arg of unify_ty is always the template;
we never swap round.
* The above argument is distressingly indirect. We seek a
better way.
* One better way is to ensure that type patterns (the template
in the matching process) have no casts. See Trac #14119.
-}
-------------- unify_ty: the main workhorse -----------
type AmIUnifying = Bool -- True <=> Unifying
-- False <=> Matching
unify_ty :: UMEnv
-> Type -> Type -- Types to be unified and a co
-> Coercion -- A coercion between their kinds
-- See Note [Kind coercions in Unify]
-> UM ()
-- See Note [Specification of unification]
-- Respects newtypes, PredTypes
unify_ty env ty1 ty2 kco
-- TODO: More commentary needed here
| Just ty1' <- tcView ty1 = unify_ty env ty1' ty2 kco
| Just ty2' <- tcView ty2 = unify_ty env ty1 ty2' kco
| CastTy ty1' co <- ty1 = if um_unif env
then unify_ty env ty1' ty2 (co `mkTransCo` kco)
else -- See Note [Matching in the presence of casts]
do { subst <- getSubst env
; let co' = substCo subst co
; unify_ty env ty1' ty2 (co' `mkTransCo` kco) }
| CastTy ty2' co <- ty2 = unify_ty env ty1 ty2' (kco `mkTransCo` mkSymCo co)
unify_ty env (TyVarTy tv1) ty2 kco
= uVar env tv1 ty2 kco
unify_ty env ty1 (TyVarTy tv2) kco
| um_unif env -- If unifying, can swap args
= uVar (umSwapRn env) tv2 ty1 (mkSymCo kco)
unify_ty env ty1 ty2 _kco
| Just (tc1, tys1) <- mb_tc_app1
, Just (tc2, tys2) <- mb_tc_app2
, tc1 == tc2 || (tcIsStarKind ty1 && tcIsStarKind ty2)
= if isInjectiveTyCon tc1 Nominal
then unify_tys env tys1 tys2
else do { let inj | isTypeFamilyTyCon tc1
= case tyConInjectivityInfo tc1 of
NotInjective -> repeat False
Injective bs -> bs
| otherwise
= repeat False
(inj_tys1, noninj_tys1) = partitionByList inj tys1
(inj_tys2, noninj_tys2) = partitionByList inj tys2
; unify_tys env inj_tys1 inj_tys2
; unless (um_inj_tf env) $ -- See (end of) Note [Specification of unification]
don'tBeSoSure $ unify_tys env noninj_tys1 noninj_tys2 }
| Just (tc1, _) <- mb_tc_app1
, not (isGenerativeTyCon tc1 Nominal)
-- E.g. unify_ty (F ty1) b = MaybeApart
-- because the (F ty1) behaves like a variable
-- NB: if unifying, we have already dealt
-- with the 'ty2 = variable' case
= maybeApart
| Just (tc2, _) <- mb_tc_app2
, not (isGenerativeTyCon tc2 Nominal)
, um_unif env
-- E.g. unify_ty [a] (F ty2) = MaybeApart, when unifying (only)
-- because the (F ty2) behaves like a variable
-- NB: we have already dealt with the 'ty1 = variable' case
= maybeApart
where
mb_tc_app1 = tcSplitTyConApp_maybe ty1
mb_tc_app2 = tcSplitTyConApp_maybe ty2
-- Applications need a bit of care!
-- They can match FunTy and TyConApp, so use splitAppTy_maybe
-- NB: we've already dealt with type variables,
-- so if one type is an App the other one jolly well better be too
unify_ty env (AppTy ty1a ty1b) ty2 _kco
| Just (ty2a, ty2b) <- tcRepSplitAppTy_maybe ty2
= unify_ty_app env ty1a [ty1b] ty2a [ty2b]
unify_ty env ty1 (AppTy ty2a ty2b) _kco
| Just (ty1a, ty1b) <- tcRepSplitAppTy_maybe ty1
= unify_ty_app env ty1a [ty1b] ty2a [ty2b]
unify_ty _ (LitTy x) (LitTy y) _kco | x == y = return ()
unify_ty env (ForAllTy (TvBndr tv1 _) ty1) (ForAllTy (TvBndr tv2 _) ty2) kco
= do { unify_ty env (tyVarKind tv1) (tyVarKind tv2) (mkNomReflCo liftedTypeKind)
; let env' = umRnBndr2 env tv1 tv2
; unify_ty env' ty1 ty2 kco }
-- See Note [Matching coercion variables]
unify_ty env (CoercionTy co1) (CoercionTy co2) kco
= do { c_subst <- getCvSubstEnv
; case co1 of
CoVarCo cv
| not (um_unif env)
, not (cv `elemVarEnv` c_subst)
, BindMe <- tvBindFlagL env cv
-> do { checkRnEnvRCo env co2
; let (co_l, co_r) = decomposeFunCo kco
-- cv :: t1 ~ t2
-- co2 :: s1 ~ s2
-- co_l :: t1 ~ s1
-- co_r :: t2 ~ s2
; extendCvEnv cv (co_l `mkTransCo`
co2 `mkTransCo`
mkSymCo co_r) }
_ -> return () }
unify_ty _ _ _ _ = surelyApart
unify_ty_app :: UMEnv -> Type -> [Type] -> Type -> [Type] -> UM ()
unify_ty_app env ty1 ty1args ty2 ty2args
| Just (ty1', ty1a) <- repSplitAppTy_maybe ty1
, Just (ty2', ty2a) <- repSplitAppTy_maybe ty2
= unify_ty_app env ty1' (ty1a : ty1args) ty2' (ty2a : ty2args)
| otherwise
= do { let ki1 = typeKind ty1
ki2 = typeKind ty2
-- See Note [Kind coercions in Unify]
; unify_ty env ki1 ki2 (mkNomReflCo liftedTypeKind)
; unify_ty env ty1 ty2 (mkNomReflCo ki1)
; unify_tys env ty1args ty2args }
unify_tys :: UMEnv -> [Type] -> [Type] -> UM ()
unify_tys env orig_xs orig_ys
= go orig_xs orig_ys
where
go [] [] = return ()
go (x:xs) (y:ys)
-- See Note [Kind coercions in Unify]
= do { unify_ty env x y (mkNomReflCo $ typeKind x)
; go xs ys }
go _ _ = maybeApart -- See Note [Lists of different lengths are MaybeApart]
---------------------------------
uVar :: UMEnv
-> TyVar -- Variable to be unified
-> Type -- with this Type
-> Coercion -- :: kind tv ~N kind ty
-> UM ()
uVar env tv1 ty kco
= do { -- Check to see whether tv1 is refined by the substitution
subst <- getTvSubstEnv
; case (lookupVarEnv subst tv1) of
Just ty' | um_unif env -- Unifying, so call
-> unify_ty env ty' ty kco -- back into unify
| otherwise
-> -- Matching, we don't want to just recur here.
-- this is because the range of the subst is the target
-- type, not the template type. So, just check for
-- normal type equality.
guard ((ty' `mkCastTy` kco) `eqType` ty)
Nothing -> uUnrefined env tv1 ty ty kco } -- No, continue
uUnrefined :: UMEnv
-> TyVar -- variable to be unified
-> Type -- with this Type
-> Type -- (version w/ expanded synonyms)
-> Coercion -- :: kind tv ~N kind ty
-> UM ()
-- We know that tv1 isn't refined
uUnrefined env tv1 ty2 ty2' kco
| Just ty2'' <- coreView ty2'
= uUnrefined env tv1 ty2 ty2'' kco -- Unwrap synonyms
-- This is essential, in case we have
-- type Foo a = a
-- and then unify a ~ Foo a
| TyVarTy tv2 <- ty2'
= do { let tv1' = umRnOccL env tv1
tv2' = umRnOccR env tv2
; unless (tv1' == tv2' && um_unif env) $ do
-- If we are unifying a ~ a, just return immediately
-- Do not extend the substitution
-- See Note [Self-substitution when matching]
-- Check to see whether tv2 is refined
{ subst <- getTvSubstEnv
; case lookupVarEnv subst tv2 of
{ Just ty' | um_unif env -> uUnrefined env tv1 ty' ty' kco
; _ ->
do { -- So both are unrefined
-- Bind one or the other, depending on which is bindable
; let b1 = tvBindFlagL env tv1
b2 = tvBindFlagR env tv2
ty1 = mkTyVarTy tv1
; case (b1, b2) of
(BindMe, _) -> do { checkRnEnvR env ty2 -- make sure ty2 is not a local
; extendTvEnv tv1 (ty2 `mkCastTy` mkSymCo kco) }
(_, BindMe) | um_unif env
-> do { checkRnEnvL env ty1 -- ditto for ty1
; extendTvEnv tv2 (ty1 `mkCastTy` kco) }
_ | tv1' == tv2' -> return ()
-- How could this happen? If we're only matching and if
-- we're comparing forall-bound variables.
_ -> maybeApart -- See Note [Unification with skolems]
}}}}
uUnrefined env tv1 ty2 ty2' kco -- ty2 is not a type variable
= do { occurs <- elemNiSubstSet tv1 (tyCoVarsOfType ty2')
; if um_unif env && occurs -- See Note [Self-substitution when matching]
then maybeApart -- Occurs check, see Note [Fine-grained unification]
else bindTv env tv1 (ty2 `mkCastTy` mkSymCo kco) }
-- Bind tyvar to the synonym if poss
elemNiSubstSet :: TyVar -> TyCoVarSet -> UM Bool
elemNiSubstSet v set
= do { tsubst <- getTvSubstEnv
; return $ v `elemVarSet` niSubstTvSet tsubst set }
bindTv :: UMEnv -> TyVar -> Type -> UM ()
bindTv env tv ty -- ty is not a variable
= do { checkRnEnvR env ty -- make sure ty mentions no local variables
; case tvBindFlagL env tv of
Skolem -> maybeApart -- See Note [Unification with skolems]
BindMe -> extendTvEnv tv ty
}
{-
%************************************************************************
%* *
Binding decisions
* *
************************************************************************
-}
data BindFlag
= BindMe -- A regular type variable
| Skolem -- This type variable is a skolem constant
-- Don't bind it; it only matches itself
deriving Eq
{-
************************************************************************
* *
Unification monad
* *
************************************************************************
-}
data UMEnv = UMEnv { um_bind_fun :: TyVar -> BindFlag
-- User-supplied BindFlag function
, um_unif :: AmIUnifying
, um_inj_tf :: Bool -- Checking for injectivity?
-- See (end of) Note [Specification of unification]
, um_rn_env :: RnEnv2 }
data UMState = UMState
{ um_tv_env :: TvSubstEnv
, um_cv_env :: CvSubstEnv }
newtype UM a = UM { unUM :: UMState -> UnifyResultM (UMState, a) }
instance Functor UM where
fmap = liftM
instance Applicative UM where
pure a = UM (\s -> pure (s, a))
(<*>) = ap
instance Monad UM where
fail = MonadFail.fail
m >>= k = UM (\state ->
do { (state', v) <- unUM m state
; unUM (k v) state' })
-- need this instance because of a use of 'guard' above
instance Alternative UM where
empty = UM (\_ -> Control.Applicative.empty)
m1 <|> m2 = UM (\state ->
unUM m1 state <|>
unUM m2 state)
instance MonadPlus UM
instance MonadFail.MonadFail UM where
fail _ = UM (\_ -> SurelyApart) -- failed pattern match
initUM :: TvSubstEnv -- subst to extend
-> CvSubstEnv
-> UM a -> UnifyResultM a
initUM subst_env cv_subst_env um
= case unUM um state of
Unifiable (_, subst) -> Unifiable subst
MaybeApart (_, subst) -> MaybeApart subst
SurelyApart -> SurelyApart
where
state = UMState { um_tv_env = subst_env
, um_cv_env = cv_subst_env }
tvBindFlagL :: UMEnv -> TyVar -> BindFlag
tvBindFlagL env tv
| inRnEnvL (um_rn_env env) tv = Skolem
| otherwise = um_bind_fun env tv
tvBindFlagR :: UMEnv -> TyVar -> BindFlag
tvBindFlagR env tv
| inRnEnvR (um_rn_env env) tv = Skolem
| otherwise = um_bind_fun env tv
getTvSubstEnv :: UM TvSubstEnv
getTvSubstEnv = UM $ \state -> Unifiable (state, um_tv_env state)
getCvSubstEnv :: UM CvSubstEnv
getCvSubstEnv = UM $ \state -> Unifiable (state, um_cv_env state)
getSubst :: UMEnv -> UM TCvSubst
getSubst env = do { tv_env <- getTvSubstEnv
; cv_env <- getCvSubstEnv
; let in_scope = rnInScopeSet (um_rn_env env)
; return (mkTCvSubst in_scope (tv_env, cv_env)) }
extendTvEnv :: TyVar -> Type -> UM ()
extendTvEnv tv ty = UM $ \state ->
Unifiable (state { um_tv_env = extendVarEnv (um_tv_env state) tv ty }, ())
extendCvEnv :: CoVar -> Coercion -> UM ()
extendCvEnv cv co = UM $ \state ->
Unifiable (state { um_cv_env = extendVarEnv (um_cv_env state) cv co }, ())
umRnBndr2 :: UMEnv -> TyCoVar -> TyCoVar -> UMEnv
umRnBndr2 env v1 v2
= env { um_rn_env = rnBndr2 (um_rn_env env) v1 v2 }
checkRnEnv :: (RnEnv2 -> VarEnv Var) -> UMEnv -> VarSet -> UM ()
checkRnEnv get_set env varset = UM $ \ state ->
let env_vars = get_set (um_rn_env env) in
if isEmptyVarEnv env_vars || (getUniqSet varset `disjointVarEnv` env_vars)
-- NB: That isEmptyVarSet is a critical optimization; it
-- means we don't have to calculate the free vars of
-- the type, often saving quite a bit of allocation.
then Unifiable (state, ())
else MaybeApart (state, ())
-- | Converts any SurelyApart to a MaybeApart
don'tBeSoSure :: UM () -> UM ()
don'tBeSoSure um = UM $ \ state ->
case unUM um state of
SurelyApart -> MaybeApart (state, ())
other -> other
checkRnEnvR :: UMEnv -> Type -> UM ()
checkRnEnvR env ty = checkRnEnv rnEnvR env (tyCoVarsOfType ty)
checkRnEnvL :: UMEnv -> Type -> UM ()
checkRnEnvL env ty = checkRnEnv rnEnvL env (tyCoVarsOfType ty)
checkRnEnvRCo :: UMEnv -> Coercion -> UM ()
checkRnEnvRCo env co = checkRnEnv rnEnvR env (tyCoVarsOfCo co)
umRnOccL :: UMEnv -> TyVar -> TyVar
umRnOccL env v = rnOccL (um_rn_env env) v
umRnOccR :: UMEnv -> TyVar -> TyVar
umRnOccR env v = rnOccR (um_rn_env env) v
umSwapRn :: UMEnv -> UMEnv
umSwapRn env = env { um_rn_env = rnSwap (um_rn_env env) }
maybeApart :: UM ()
maybeApart = UM (\state -> MaybeApart (state, ()))
surelyApart :: UM a
surelyApart = UM (\_ -> SurelyApart)
{-
%************************************************************************
%* *
Matching a (lifted) type against a coercion
%* *
%************************************************************************
This section defines essentially an inverse to liftCoSubst. It is defined
here to avoid a dependency from Coercion on this module.
-}
data MatchEnv = ME { me_tmpls :: TyVarSet
, me_env :: RnEnv2 }
-- | 'liftCoMatch' is sort of inverse to 'liftCoSubst'. In particular, if
-- @liftCoMatch vars ty co == Just s@, then @listCoSubst s ty == co@,
-- where @==@ there means that the result of 'liftCoSubst' has the same
-- type as the original co; but may be different under the hood.
-- That is, it matches a type against a coercion of the same
-- "shape", and returns a lifting substitution which could have been
-- used to produce the given coercion from the given type.
-- Note that this function is incomplete -- it might return Nothing
-- when there does indeed exist a possible lifting context.
--
-- This function is incomplete in that it doesn't respect the equality
-- in `eqType`. That is, it's possible that this will succeed for t1 and
-- fail for t2, even when t1 `eqType` t2. That's because it depends on
-- there being a very similar structure between the type and the coercion.
-- This incompleteness shouldn't be all that surprising, especially because
-- it depends on the structure of the coercion, which is a silly thing to do.
--
-- The lifting context produced doesn't have to be exacting in the roles
-- of the mappings. This is because any use of the lifting context will
-- also require a desired role. Thus, this algorithm prefers mapping to
-- nominal coercions where it can do so.
liftCoMatch :: TyCoVarSet -> Type -> Coercion -> Maybe LiftingContext
liftCoMatch tmpls ty co
= do { cenv1 <- ty_co_match menv emptyVarEnv ki ki_co ki_ki_co ki_ki_co
; cenv2 <- ty_co_match menv cenv1 ty co
(mkNomReflCo co_lkind) (mkNomReflCo co_rkind)
; return (LC (mkEmptyTCvSubst in_scope) cenv2) }
where
menv = ME { me_tmpls = tmpls, me_env = mkRnEnv2 in_scope }
in_scope = mkInScopeSet (tmpls `unionVarSet` tyCoVarsOfCo co)
-- Like tcMatchTy, assume all the interesting variables
-- in ty are in tmpls
ki = typeKind ty
ki_co = promoteCoercion co
ki_ki_co = mkNomReflCo liftedTypeKind
Pair co_lkind co_rkind = coercionKind ki_co
-- | 'ty_co_match' does all the actual work for 'liftCoMatch'.
ty_co_match :: MatchEnv -- ^ ambient helpful info
-> LiftCoEnv -- ^ incoming subst
-> Type -- ^ ty, type to match
-> Coercion -- ^ co, coercion to match against
-> Coercion -- ^ :: kind of L type of substed ty ~N L kind of co
-> Coercion -- ^ :: kind of R type of substed ty ~N R kind of co
-> Maybe LiftCoEnv
ty_co_match menv subst ty co lkco rkco
| Just ty' <- coreView ty = ty_co_match menv subst ty' co lkco rkco
-- handle Refl case:
| tyCoVarsOfType ty `isNotInDomainOf` subst
, Just (ty', _) <- isReflCo_maybe co
, ty `eqType` ty'
= Just subst
where
isNotInDomainOf :: VarSet -> VarEnv a -> Bool
isNotInDomainOf set env
= noneSet (\v -> elemVarEnv v env) set
noneSet :: (Var -> Bool) -> VarSet -> Bool
noneSet f = allVarSet (not . f)
ty_co_match menv subst ty co lkco rkco
| CastTy ty' co' <- ty
= ty_co_match menv subst ty' co (co' `mkTransCo` lkco) (co' `mkTransCo` rkco)
| CoherenceCo co1 co2 <- co
= ty_co_match menv subst ty co1 (lkco `mkTransCo` mkSymCo co2) rkco
| SymCo co' <- co
= swapLiftCoEnv <$> ty_co_match menv (swapLiftCoEnv subst) ty co' rkco lkco
-- Match a type variable against a non-refl coercion
ty_co_match menv subst (TyVarTy tv1) co lkco rkco
| Just co1' <- lookupVarEnv subst tv1' -- tv1' is already bound to co1
= if eqCoercionX (nukeRnEnvL rn_env) co1' co
then Just subst
else Nothing -- no match since tv1 matches two different coercions
| tv1' `elemVarSet` me_tmpls menv -- tv1' is a template var
= if any (inRnEnvR rn_env) (tyCoVarsOfCoList co)
then Nothing -- occurs check failed
else Just $ extendVarEnv subst tv1' $
castCoercionKind co (mkSymCo lkco) (mkSymCo rkco)
| otherwise
= Nothing
where
rn_env = me_env menv
tv1' = rnOccL rn_env tv1
-- just look through SubCo's. We don't really care about roles here.
ty_co_match menv subst ty (SubCo co) lkco rkco
= ty_co_match menv subst ty co lkco rkco
ty_co_match menv subst (AppTy ty1a ty1b) co _lkco _rkco
| Just (co2, arg2) <- splitAppCo_maybe co -- c.f. Unify.match on AppTy
= ty_co_match_app menv subst ty1a [ty1b] co2 [arg2]
ty_co_match menv subst ty1 (AppCo co2 arg2) _lkco _rkco
| Just (ty1a, ty1b) <- repSplitAppTy_maybe ty1
-- yes, the one from Type, not TcType; this is for coercion optimization
= ty_co_match_app menv subst ty1a [ty1b] co2 [arg2]
ty_co_match menv subst (TyConApp tc1 tys) (TyConAppCo _ tc2 cos) _lkco _rkco
= ty_co_match_tc menv subst tc1 tys tc2 cos
ty_co_match menv subst (FunTy ty1 ty2) co _lkco _rkco
-- Despite the fact that (->) is polymorphic in four type variables (two
-- runtime rep and two types), we shouldn't need to explicitly unify the
-- runtime reps here; unifying the types themselves should be sufficient.
-- See Note [Representation of function types].
| Just (tc, [_,_,co1,co2]) <- splitTyConAppCo_maybe co
, tc == funTyCon
= let Pair lkcos rkcos = traverse (fmap mkNomReflCo . coercionKind) [co1,co2]
in ty_co_match_args menv subst [ty1, ty2] [co1, co2] lkcos rkcos
ty_co_match menv subst (ForAllTy (TvBndr tv1 _) ty1)
(ForAllCo tv2 kind_co2 co2)
lkco rkco
= do { subst1 <- ty_co_match menv subst (tyVarKind tv1) kind_co2
ki_ki_co ki_ki_co
; let rn_env0 = me_env menv
rn_env1 = rnBndr2 rn_env0 tv1 tv2
menv' = menv { me_env = rn_env1 }
; ty_co_match menv' subst1 ty1 co2 lkco rkco }
where
ki_ki_co = mkNomReflCo liftedTypeKind
ty_co_match _ subst (CoercionTy {}) _ _ _
= Just subst -- don't inspect coercions
ty_co_match menv subst ty co lkco rkco
| Just co' <- pushRefl co = ty_co_match menv subst ty co' lkco rkco
| otherwise = Nothing
ty_co_match_tc :: MatchEnv -> LiftCoEnv
-> TyCon -> [Type]
-> TyCon -> [Coercion]
-> Maybe LiftCoEnv
ty_co_match_tc menv subst tc1 tys1 tc2 cos2
= do { guard (tc1 == tc2)
; ty_co_match_args menv subst tys1 cos2 lkcos rkcos }
where
Pair lkcos rkcos
= traverse (fmap mkNomReflCo . coercionKind) cos2
ty_co_match_app :: MatchEnv -> LiftCoEnv
-> Type -> [Type] -> Coercion -> [Coercion]
-> Maybe LiftCoEnv
ty_co_match_app menv subst ty1 ty1args co2 co2args
| Just (ty1', ty1a) <- repSplitAppTy_maybe ty1
, Just (co2', co2a) <- splitAppCo_maybe co2
= ty_co_match_app menv subst ty1' (ty1a : ty1args) co2' (co2a : co2args)
| otherwise
= do { subst1 <- ty_co_match menv subst ki1 ki2 ki_ki_co ki_ki_co
; let Pair lkco rkco = mkNomReflCo <$> coercionKind ki2
; subst2 <- ty_co_match menv subst1 ty1 co2 lkco rkco
; let Pair lkcos rkcos = traverse (fmap mkNomReflCo . coercionKind) co2args
; ty_co_match_args menv subst2 ty1args co2args lkcos rkcos }
where
ki1 = typeKind ty1
ki2 = promoteCoercion co2
ki_ki_co = mkNomReflCo liftedTypeKind
ty_co_match_args :: MatchEnv -> LiftCoEnv -> [Type]
-> [Coercion] -> [Coercion] -> [Coercion]
-> Maybe LiftCoEnv
ty_co_match_args _ subst [] [] _ _ = Just subst
ty_co_match_args menv subst (ty:tys) (arg:args) (lkco:lkcos) (rkco:rkcos)
= do { subst' <- ty_co_match menv subst ty arg lkco rkco
; ty_co_match_args menv subst' tys args lkcos rkcos }
ty_co_match_args _ _ _ _ _ _ = Nothing
pushRefl :: Coercion -> Maybe Coercion
pushRefl (Refl Nominal (AppTy ty1 ty2))
= Just (AppCo (Refl Nominal ty1) (mkNomReflCo ty2))
pushRefl (Refl r (FunTy ty1 ty2))
| Just rep1 <- getRuntimeRep_maybe ty1
, Just rep2 <- getRuntimeRep_maybe ty2
= Just (TyConAppCo r funTyCon [ mkReflCo r rep1, mkReflCo r rep2
, mkReflCo r ty1, mkReflCo r ty2 ])
pushRefl (Refl r (TyConApp tc tys))
= Just (TyConAppCo r tc (zipWith mkReflCo (tyConRolesX r tc) tys))
pushRefl (Refl r (ForAllTy (TvBndr tv _) ty))
= Just (mkHomoForAllCos_NoRefl [tv] (Refl r ty))
-- NB: NoRefl variant. Otherwise, we get a loop!
pushRefl (Refl r (CastTy ty co)) = Just (castCoercionKind (Refl r ty) co co)
pushRefl _ = Nothing
| shlevy/ghc | compiler/types/Unify.hs | bsd-3-clause | 55,080 | 25 | 23 | 15,657 | 8,439 | 4,405 | 4,034 | 605 | 6 |
{-# LANGUAGE TemplateHaskell, MultiParamTypeClasses #-}
-- Copyright (c) 2005-6 Don Stewart - http://www.cse.unsw.edu.au/~dons
-- GPL version 2 or later (see http://www.gnu.org/copyleft/gpl.html)
-- | Lambdabot version information
module Plugin.Version where
import Plugin
import Paths_lambdabot (version)
import Data.Version (showVersion)
$(plugin "Version")
instance Module VersionModule () where
moduleCmds _ = ["version"]
moduleHelp _ _ = "version/source. Report the version " ++
"and darcs repo of this bot"
process_ _ _ _ = ios . return $ concat
[ "lambdabot ", showVersion version, "\n"
, "darcs get http://code.haskell.org/lambdabot" ]
| zeekay/lambdabot | Plugin/Version.hs | mit | 715 | 0 | 9 | 156 | 116 | 64 | 52 | 13 | 0 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>Π‘ΠΊΠ°Π½Π΅Ρ ΠΏΠΎΡΠ»Π΅Π΄ΠΎΠ²Π°ΡΠ΅Π»ΡΠ½ΠΎΡΡΠΈ | ZAP-ΡΠ°ΡΡΠΈΡΠ΅Π½ΠΈΠ΅ </title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Π‘ΠΎΠ΄Π΅ΡΠΆΠ°Π½ΠΈΠ΅</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>ΠΠ½Π΄Π΅ΠΊΡ</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>ΠΠΎΠΈΡΠΊ</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>ΠΠ·Π±ΡΠ°Π½Π½ΠΎΠ΅</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/sequence/src/main/javahelp/org/zaproxy/zap/extension/sequence/resources/help_ru_RU/helpset_ru_RU.hs | apache-2.0 | 1,054 | 78 | 67 | 159 | 534 | 267 | 267 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ar-SA">
<title>Eval Villain Add-On</title>
<maps>
<homeID>evalvillain</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/evalvillain/src/main/javahelp/org/zaproxy/addon/evalvillain/resources/help_ar_SA/helpset_ar_SA.hs | apache-2.0 | 972 | 77 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
import Network.XmlRpc.Client
import System.Time
server = "http://time.xmlrpc.com/RPC2"
currentTime :: IO CalendarTime
currentTime = remote server "currentTime.getCurrentTime"
main = do
t <- currentTime
putStrLn (calendarTimeToString t)
| laurencer/confluence-sync | vendor/haxr/examples/time-xmlrpc-com.hs | bsd-3-clause | 253 | 0 | 9 | 42 | 59 | 30 | 29 | 8 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Main where
import Control.Applicative ((<$>))
import Wiki
import Yesod
-- A very simple App, doesn't do anything except provide the Wiki.
data App = App
{ appWiki :: Wiki
}
mkYesod "App" [parseRoutes|
/ HomeR GET
/wiki WikiR Wiki appWiki
|]
instance Yesod App
instance YesodWiki App -- Just use the defaults
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
getHomeR :: Handler Html
getHomeR = defaultLayout
[whamlet|
<p>
Welcome to my test application.
The application is pretty boring.
You probably want to go to
<a href=@{WikiR WikiHomeR}>the wiki#
.
|]
main :: IO ()
main = do
app <- App <$> newWiki
warp 3000 app
| ygale/yesod | demo/subsite/Main.hs | mit | 947 | 0 | 8 | 282 | 143 | 81 | 62 | 22 | 1 |
import Distribution.Simple
main :: IO ()
main = defaultMain
-- Although this looks like the Simple build type, it is in fact vital that
-- we use this Setup.hs because it'll get compiled against the local copy
-- of the Cabal lib, thus enabling Cabal to bootstrap itself without relying
-- on any previous installation. This also means we can use any new features
-- immediately because we never have to worry about building Cabal with an
-- older version of itself.
| jwiegley/ghc-release | libraries/Cabal/cabal/Setup.hs | gpl-3.0 | 468 | 0 | 6 | 83 | 27 | 17 | 10 | 3 | 1 |
module Language.Asol.Interpreter where
import Control.Monad.State
import Language.Asol.Expression
import Data.Char (chr)
type AsolEvent = StateT Stack IO
type Stack = [Integer]
stackop :: Int -> AsolEvent () -> AsolEvent ()
stackop n op =
do stack <- get
if length stack < n
then error "Stack underflow"
else op
stackbinop :: (Integer -> Integer -> Integer) -> AsolEvent ()
stackbinop op = stackop 2 $ modify (\(x:y:xs) -> op y x : xs)
evaluate :: Instruction -> AsolEvent ()
evaluate ins =
case ins of
Push n -> modify (n :)
Top n ->
let top :: Int -> Stack -> Stack
top i xs =
let (as,bs) = splitAt i xs
in head bs : as ++ tail bs
in stackop n $ modify (top n)
Add -> stackbinop (+)
Div -> stackbinop div
Dup -> stackop 1 $ modify (\(x:xs) -> x : x : xs)
Emit ->
stackop 1 $ get >>= \x -> liftIO (putChar $ chr $ fromIntegral $ head x)
Mod -> stackbinop mod
Mul -> stackbinop (*)
Pow -> stackbinop (^)
Fact ->
let factorial n = product [1 .. n]
in stackop 1 $ modify (\(x:xs) -> factorial x : xs)
Pop -> stackop 1 $ modify tail
Print -> stackop 1 $ get >>= \x -> liftIO (print $ head x)
Read -> (evaluate . Push) =<< fmap read (liftIO getLine)
Sub -> stackbinop (-)
Swap -> stackop 2 $ modify (\(x:y:xs) -> y : x : xs)
ShowStack -> get >>= liftIO . print
run :: [Instruction] -> AsolEvent ()
run = mapM_ evaluate
execute :: [Instruction] -> Stack -> IO Stack
execute = execStateT . run
| kmein/asol | Language/Asol/Interpreter.hs | mit | 1,549 | 0 | 16 | 437 | 708 | 357 | 351 | 45 | 16 |
{-# LANGUAGE OverloadedStrings #-}
module Data.CSS.Syntax.Tokens
( Token(..)
, NumericValue(..)
, HashFlag(..)
, Unit
, tokenize
, serialize
) where
import Control.Applicative
import Control.Monad
import Data.Text (Text)
import qualified Data.Text as T
import Data.Attoparsec.Text as AP
import Data.Monoid
import Data.Char
import Data.Scientific
import Prelude
data Token
= Whitespace
| CDO -- CommentDelimiterOpen
| CDC -- CommentDelimiterClose
| Comma
| Colon
| Semicolon
| LeftParen
| RightParen
| LeftSquareBracket
| RightSquareBracket
| LeftCurlyBracket
| RightCurlyBracket
| SuffixMatch
| SubstringMatch
| PrefixMatch
| DashMatch
| IncludeMatch
| Column
| String !Text
| BadString !Text
| Number !Text !NumericValue
| Percentage !Text !NumericValue
| Dimension !Text !NumericValue !Unit
| Url !Text
| BadUrl !Text
| Ident !Text
| AtKeyword !Text
| Function !Text
| Hash !HashFlag !Text
| Delim !Char
deriving (Show, Eq)
data NumericValue
= NVInteger !Scientific
| NVNumber !Scientific
deriving (Show, Eq)
data HashFlag = HId | HUnrestricted
deriving (Show, Eq)
type Unit = Text
-- Tokenization
-------------------------------------------------------------------------------
-- | Parse a 'Text' into a list of 'Token's.
--
-- https://drafts.csswg.org/css-syntax/#tokenization
tokenize :: Text -> Either String [Token]
tokenize = parseOnly (many' parseToken) . preprocessInputStream
-- | Before sending the input stream to the tokenizer, implementations must
-- make the following code point substitutions: (see spec)
--
-- https://drafts.csswg.org/css-syntax/#input-preprocessing
preprocessInputStream :: Text -> Text
preprocessInputStream = T.pack . f . T.unpack
where
f [] = []
f ('\x000D':'\x000A':r) = '\x000A' : f r
f ('\x000D':r) = '\x000A' : f r
f ('\x000C':r) = '\x000A' : f r
f ('\x0000':r) = '\xFFFD' : f r
f (x:r) = x : f r
-- Serialization
-------------------------------------------------------------------------------
-- | Serialize a list of 'Token's back into 'Text'. Round-tripping is not
-- guaranteed to be identity. The tokenization step drops some information
-- from the source.
--
-- https://drafts.csswg.org/css-syntax/#serialization
serialize :: [Token] -> Text
serialize = mconcat . map renderToken
renderToken :: Token -> Text
renderToken (Whitespace) = " "
renderToken (CDO) = "<!--"
renderToken (CDC) = "-->"
renderToken (Comma) = ","
renderToken (Colon) = ":"
renderToken (Semicolon) = ";"
renderToken (LeftParen) = "("
renderToken (RightParen) = ")"
renderToken (LeftSquareBracket) = "["
renderToken (RightSquareBracket) = "]"
renderToken (LeftCurlyBracket) = "{"
renderToken (RightCurlyBracket) = "}"
renderToken (SuffixMatch) = "$="
renderToken (SubstringMatch) = "*="
renderToken (PrefixMatch) = "^="
renderToken (DashMatch) = "|="
renderToken (IncludeMatch) = "~="
renderToken (Column) = "||"
renderToken (String x) = "'" <> x <> "'"
renderToken (BadString x) = "'" <> x <> "'"
renderToken (Number x _) = x
renderToken (Percentage x _) = x <> "%"
renderToken (Dimension x _ u) = x <> u
renderToken (Url x) = "url(" <> x <> ")"
renderToken (BadUrl x) = "url(" <> x <> ")"
renderToken (Ident x) = x
renderToken (AtKeyword x) = "@" <> x
renderToken (Function x) = x <> "("
renderToken (Hash _ x) = "#" <> x
renderToken (Delim x) = T.singleton x
parseComment :: Parser ()
parseComment = do
void $ AP.string "/*"
void $ AP.manyTill' AP.anyChar (void (AP.string "*/") <|> AP.endOfInput)
parseWhitespace :: Parser Token
parseWhitespace = do
void $ AP.takeWhile1 isWhitespace
return Whitespace
parseChar :: Token -> Char -> Parser Token
parseChar t c = do
_ <- AP.char c
return t
parseStr :: Token -> Text -> Parser Token
parseStr t str = AP.string str *> return t
escapedCodePoint :: Parser Char
escapedCodePoint = do
mbChar <- AP.peekChar
case mbChar of
Nothing -> return $ '\xFFFD'
Just ch -> do
if isHexChar ch
then do
(t, _) <- AP.runScanner [] f
case unhex (T.unpack t) of
Nothing -> fail $ "escapedCodePoint: unable to parse hex " ++ (T.unpack t)
Just cp -> do
AP.peekChar >>= \c -> case c of
Just nc -> if isWhitespace nc then void AP.anyChar else return ()
_ -> return ()
return $ if cp == 0 || cp > 0x10FFFF
then chr 0xFFFD
else chr cp
else do
if ch == '\n'
then fail "A newline"
else AP.anyChar >> return ch
where
f :: String -> Char -> Maybe String
f acc c =
if length acc < 6 && isHexChar c
then Just $ c:acc
else Nothing
nextInputCodePoint :: Parser Char
nextInputCodePoint = escapedCodePoint' <|> AP.anyChar
whenNext :: Char -> a -> Parser a
whenNext c a = do
mbChar <- AP.peekChar
if mbChar == Just c
then return a
else fail "whenNext"
-- 4.3.4. Consume a string token
parseString :: Char -> Parser Token
parseString endingCodePoint = do
_ <- AP.char endingCodePoint
go mempty
where
go acc = choice
[ (AP.endOfInput <|> void (AP.char endingCodePoint)) *> return (String acc)
, AP.string "\\\n" *> go acc
, whenNext '\n' (BadString acc)
, nextInputCodePoint >>= \ch -> go (acc <> T.singleton ch)
]
parseHash :: Parser Token
parseHash = do
_ <- AP.char '#'
name <- parseName
return $ Hash HId name
isNameStartCodePoint :: Char -> Bool
isNameStartCodePoint c = isLetter c || c >= '\x0080' || c == '_'
isNameCodePoint :: Char -> Bool
isNameCodePoint c = isNameStartCodePoint c || isDigit c || c == '-'
parseNumeric :: Parser Token
parseNumeric = do
(repr, nv) <- parseNumericValue
dimNum repr nv <|> pctNum repr nv <|> return (Number repr nv)
where
dimNum repr nv = do
unit <- parseName
return $ Dimension repr nv unit
pctNum repr nv = do
_ <- AP.char '%'
return $ Percentage repr nv
nameCodePoint :: Parser Char
nameCodePoint = AP.satisfy isNameCodePoint
escapedCodePoint' :: Parser Char
escapedCodePoint' = do
_ <- AP.char '\\'
escapedCodePoint
parseName :: Parser Text
parseName = do
chars <- AP.many1' $
nameCodePoint <|> escapedCodePoint'
case chars of
'-':xs -> case xs of
_:_ -> return $ T.pack chars
_ -> fail "parseName: Not a valid name start"
_ -> return $ T.pack chars
parseSign :: Parser (Text, Int)
parseSign = do
mbChar <- AP.peekChar
case mbChar of
Just '+' -> AP.anyChar >> return ("+", 1)
Just '-' -> AP.anyChar >> return ("-", (-1))
_ -> return ("", 1)
parseNumericValue :: Parser (Text, NumericValue)
parseNumericValue = do
-- Sign
(sS, s) <- parseSign
-- Digits before the decimal dot. They are optional (".1em").
(iS, i) <- do
digits <- AP.takeWhile isDigit
return $ if (T.null digits)
then ("", 0)
else (digits, read $ T.unpack digits)
-- Decimal dot and digits after it. If the decimal dot is there then it
-- MUST be followed by one or more digits. This is not allowed: "1.".
(fS, f, fB) <- option ("", 0, False) $ do
_ <- AP.char '.'
digits <- AP.takeWhile1 isDigit
return ("." <> digits, read $ T.unpack digits, True)
-- Exponent (with optional sign).
(tS, t, eS, e, eB) <- option ("", 1, "", 0, False) $ do
e <- AP.char 'E' <|> AP.char 'e'
(tS, t) <- parseSign
eS <- AP.takeWhile1 isDigit
return (T.singleton e <> tS, t, eS, read $ T.unpack eS, True)
let repr = sS<>iS<>fS<>tS<>eS
if T.null repr || repr == "-" || repr == "+" || T.head repr == 'e' || T.head repr == 'E'
then fail "parseNumericValue: no parse"
else do
let v = fromIntegral s * (i + f*10^^(-(T.length fS - 1))) * 10^^(t*e)
return $ if fB || eB
then (repr, NVNumber v)
else (repr, NVInteger v)
parseUrl :: Parser Token
parseUrl = do
_ <- AP.takeWhile isWhitespace
go mempty
where
endOfUrl acc = (AP.endOfInput <|> void (AP.char ')')) *> return (Url acc)
go acc = choice
[ endOfUrl acc
, (AP.char '"' <|> AP.char '\'' <|> AP.char '(') >>= \ch -> badUrl (acc <> T.singleton ch)
, AP.string "\\\n" *> badUrl (acc <> "\\\n")
, AP.takeWhile1 isWhitespace >>= \c -> (endOfUrl acc <|> badUrl (acc <> c))
, nextInputCodePoint >>= \ch -> go (acc <> T.singleton ch)
]
badUrl acc = choice
[ (AP.endOfInput <|> void (AP.char ')')) *> return (BadUrl acc)
, nextInputCodePoint >>= \ch -> badUrl (acc <> T.singleton ch)
]
parseIdentLike :: Parser Token
parseIdentLike = do
name <- parseName
choice
[ do
-- Special handling of url() functions (they are not really
-- functions, they have their own Token type).
guard $ T.isPrefixOf "url" (T.map toLower name)
void $ AP.char '('
void $ AP.takeWhile isWhitespace
whenNext '"' (Function name) <|> whenNext '\'' (Function name) <|> parseUrl
, AP.char '(' *> return (Function name)
, return (Ident name)
]
parseEscapedIdentLike :: Parser Token
parseEscapedIdentLike = do
mbChar <- AP.peekChar
case mbChar of
Just '\\' -> parseIdentLike <|> (AP.anyChar >> return (Delim '\\'))
_ -> fail "parseEscapedIdentLike: Does not start with an escape code"
parseAtKeyword :: Parser Token
parseAtKeyword = do
_ <- AP.char '@'
name <- parseName
return $ AtKeyword name
parseToken :: Parser Token
parseToken = AP.many' parseComment *> choice
[ parseWhitespace
, AP.string "<!--" *> return CDO
, AP.string "-->" *> return CDC
, parseChar Comma ','
, parseChar Colon ':'
, parseChar Semicolon ';'
, parseChar LeftParen '('
, parseChar RightParen ')'
, parseChar LeftSquareBracket '['
, parseChar RightSquareBracket ']'
, parseChar LeftCurlyBracket '{'
, parseChar RightCurlyBracket '}'
, parseStr SuffixMatch "$="
, parseStr SubstringMatch "*="
, parseStr PrefixMatch "^="
, parseStr DashMatch "|="
, parseStr IncludeMatch "~="
, parseStr Column "||"
, parseNumeric
, parseEscapedIdentLike
, parseIdentLike
, parseHash
, parseString '"'
, parseString '\''
, parseAtKeyword
, AP.anyChar >>= return . Delim
] <?> "token"
isWhitespace :: Char -> Bool
isWhitespace '\x0009' = True
isWhitespace '\x000A' = True
isWhitespace '\x0020' = True
isWhitespace _ = False
isHexChar :: Char -> Bool
isHexChar ch
| ch >= '0' && ch <= '9' = True
| ch >= 'A' && ch <= 'F' = True
| ch >= 'a' && ch <= 'f' = True
| otherwise = False
unhex :: (Functor m, Monad m) => String -> m Int
unhex = fmap toInt . go []
where
go :: Monad m => [Int] -> String -> m [Int]
go acc [] = return acc
go acc (a:r) = do
x <- c a
go (x:acc) r
toInt = sum . map (\(e, x) -> 16 ^ e * x) . zip [(0::Int)..]
c :: Monad m => Char -> m Int
c '0' = return 0
c '1' = return 1
c '2' = return 2
c '3' = return 3
c '4' = return 4
c '5' = return 5
c '6' = return 6
c '7' = return 7
c '8' = return 8
c '9' = return 9
c 'A' = return 10
c 'B' = return 11
c 'C' = return 12
c 'D' = return 13
c 'E' = return 14
c 'F' = return 15
c 'a' = return 10
c 'b' = return 11
c 'c' = return 12
c 'd' = return 13
c 'e' = return 14
c 'f' = return 15
c _ = fail "Invalid hex digit!"
| bitemyapp/haskell-css-syntax | src/Data/CSS/Syntax/Tokens.hs | mit | 12,399 | 11 | 28 | 3,815 | 4,003 | 2,021 | 1,982 | 369 | 24 |
-- |
-- Module : System.Hapistrano.Types
-- Copyright : Β© 2015-Present Stack Builders
-- License : MIT
--
-- Maintainer : Cristhian Motoche <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- Type definitions for the Hapistrano tool.
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE LambdaCase #-}
module System.Hapistrano.Types
( Hapistrano
, Failure(..)
, Config(..)
, Source(..)
, Task(..)
, ReleaseFormat(..)
, SshOptions(..)
, OutputDest(..)
, Release
, TargetSystem(..)
, DeployState(..)
, Shell(..)
, Opts(..)
, Command(..)
-- * Types helpers
, mkRelease
, releaseTime
, renderRelease
, parseRelease
, fromMaybeReleaseFormat
, fromMaybeKeepReleases
, toMaybePath
) where
import Control.Applicative
import Control.Monad.Except
import Control.Monad.Reader
import Data.Aeson
import Data.Maybe
import Data.Time
import Numeric.Natural
import Path
-- | Hapistrano monad.
type Hapistrano a = ExceptT (Failure, Maybe Release) (ReaderT Config IO) a
-- | Failure with status code and a message.
data Failure =
Failure Int (Maybe String)
-- | Hapistrano configuration options.
data Config =
Config
{ configSshOptions :: !(Maybe SshOptions)
-- ^ 'Nothing' if we are running locally, or SSH options to use.
, configShellOptions :: !Shell
-- ^ One of the supported 'Shell's
, configPrint :: !(OutputDest -> String -> IO ())
-- ^ How to print messages
}
-- | The source of the repository. It can be from a version control provider
-- like GitHub or a local directory.
data Source
= GitRepository
{ gitRepositoryURL :: String
-- ^ The URL of remote Git repository to deploy
, gitRepositoryRevision :: String
-- ^ The SHA1 or branch to release
}
| LocalDirectory
{ localDirectoryPath :: Path Abs Dir
-- ^ The local repository to deploy
}
deriving (Eq, Ord, Show)
-- | The records describes deployment task.
data Task =
Task
{ taskDeployPath :: Path Abs Dir
-- ^ The root of the deploy target on the remote host
, taskSource :: Source
-- ^ The 'Source' to deploy
, taskReleaseFormat :: ReleaseFormat
-- ^ The 'ReleaseFormat' to use
}
deriving (Show, Eq, Ord)
-- | Release format mode.
data ReleaseFormat
= ReleaseShort -- ^ Standard release path following Capistrano's format
| ReleaseLong -- ^ Long release path including picoseconds
deriving (Show, Read, Eq, Ord, Enum, Bounded)
instance FromJSON ReleaseFormat where
parseJSON =
withText "release format" $ \case
"short" -> return ReleaseShort
"long" -> return ReleaseLong
_ -> fail "expected 'short' or 'long'"
-- | Current shells supported.
data Shell
= Bash
| Zsh
deriving (Show, Eq, Ord)
instance FromJSON Shell where
parseJSON =
withText "shell" $ \case
"bash" -> return Bash
"zsh" -> return Zsh
_ -> fail "supported shells: 'bash' or 'zsh'"
-- | SSH options.
data SshOptions =
SshOptions
{ sshHost :: String -- ^ Host to use
, sshPort :: Word -- ^ Port to use
, sshArgs :: [String] -- ^ Arguments for ssh
}
deriving (Show, Read, Eq, Ord)
-- | Output destination.
data OutputDest
= StdoutDest
| StderrDest
deriving (Eq, Show, Read, Ord, Bounded, Enum)
-- | Release indentifier.
data Release =
Release ReleaseFormat UTCTime
deriving (Eq, Show, Ord)
-- | Target's system where application will be deployed.
data TargetSystem
= GNULinux
| BSD
deriving (Eq, Show, Read, Ord, Bounded, Enum)
-- | State of the deployment after running @hap deploy@.
-- __note:__ the 'Unknown' value is not intended to be
-- written to the @.hapistrano_deploy_state@ file; instead,
-- it's intended to represent whenever Hapistrano couldn't
-- get the information on the deployment state (e.g. the file is not present).
data DeployState
= Fail
| Success
| Unknown
deriving (Eq, Show, Read, Ord, Bounded, Enum)
-- Command line options
-- | Command line options.
data Opts = Opts
{ optsCommand :: Command
, optsConfigFile :: FilePath
}
-- | Command to execute and command-specific options.
data Command
= Deploy (Maybe ReleaseFormat) (Maybe Natural) Bool -- ^ Deploy a new release (with timestamp
-- format, how many releases to keep, and whether the failed releases except the latest one
-- get deleted or not)
| Rollback Natural -- ^ Rollback to Nth previous release
-- | Create a 'Release' indentifier.
mkRelease :: ReleaseFormat -> UTCTime -> Release
mkRelease = Release
-- | Extract deployment time from 'Release'.
releaseTime :: Release -> UTCTime
releaseTime (Release _ time) = time
-- | Render 'Release' indentifier as a 'String'.
renderRelease :: Release -> String
renderRelease (Release rfmt time) = formatTime defaultTimeLocale fmt time
where
fmt =
case rfmt of
ReleaseShort -> releaseFormatShort
ReleaseLong -> releaseFormatLong
----------------------------------------------------------------------------
-- Types helpers
-- | Parse 'Release' identifier from a 'String'.
parseRelease :: String -> Maybe Release
parseRelease s =
(Release ReleaseLong <$> p releaseFormatLong s) <|>
(Release ReleaseShort <$> p releaseFormatShort s)
where
p = parseTimeM False defaultTimeLocale
releaseFormatShort, releaseFormatLong :: String
releaseFormatShort = "%Y%m%d%H%M%S"
releaseFormatLong = "%Y%m%d%H%M%S%q"
-- | Get release format based on the CLI and file configuration values.
fromMaybeReleaseFormat ::
Maybe ReleaseFormat -> Maybe ReleaseFormat -> ReleaseFormat
fromMaybeReleaseFormat cliRF configRF =
fromMaybe ReleaseShort (cliRF <|> configRF)
-- | Get keep releases based on the CLI and file configuration values.
fromMaybeKeepReleases :: Maybe Natural -> Maybe Natural -> Natural
fromMaybeKeepReleases cliKR configKR =
fromMaybe defaultKeepReleases (cliKR <|> configKR)
defaultKeepReleases :: Natural
defaultKeepReleases = 5
-- | Get the local path to copy from the 'Source' configuration value.
toMaybePath :: Source -> Maybe (Path Abs Dir)
toMaybePath (LocalDirectory path) = Just path
toMaybePath _ = Nothing
| stackbuilders/hapistrano | src/System/Hapistrano/Types.hs | mit | 6,187 | 0 | 14 | 1,289 | 1,137 | 656 | 481 | 137 | 2 |
{-# LANGUAGE NamedFieldPuns #-}
module Hickory.Camera where
import Hickory.Math.Vector
import Hickory.Math.Matrix
import Linear (V3, lerp, (!*!), perspective, lookAt, ortho)
data Projection = Perspective
{ fov :: Scalar
, nearPlane :: Scalar
, farPlane :: Scalar
}
| Ortho
{ width :: Scalar
, near :: Scalar
, far :: Scalar
, shouldCenter :: Bool
} deriving (Show)
data Camera = Camera
{ _cameraProj :: Projection
, _cameraCenter :: V3 Scalar
, _cameraTarget :: V3 Scalar
, _cameraUp :: V3 Scalar
} deriving (Show)
shotMatrix :: Projection -> Scalar -> Mat44
shotMatrix Perspective { fov, nearPlane, farPlane } screenRatio =
perspective fov (realToFrac screenRatio) nearPlane farPlane
shotMatrix Ortho { width, near, far, shouldCenter } screenRatio = if shouldCenter
then ortho (-(width / 2)) (width / 2) (-(height / 2)) (height / 2) near far
else ortho 0 width 0 height near far
where height = width / realToFrac screenRatio
viewProjectionMatrix :: Camera -> Scalar -> Mat44
viewProjectionMatrix camera screenRatio = projectionMatrix camera screenRatio !*! viewMatrix camera
viewMatrix :: Camera -> Mat44
viewMatrix (Camera _ center target up) = lookAt center target up
projectionMatrix :: Camera -> Scalar -> Mat44
projectionMatrix (Camera proj _ _ _) = shotMatrix proj
-- Drivers
data Route = Route (V3 Scalar) (Maybe Target) deriving Show
data Target = Target
{ tpos :: V3 Scalar
, moveTime :: Double
, moveDuration :: Double
} deriving Show
cameraCenter :: Route -> V3 Scalar
cameraCenter (Route pos Nothing ) = pos
cameraCenter (Route pos (Just (Target tarpos time duration))) = lerp (realToFrac (time / duration)) tarpos pos
checkTarget :: Route -> Double -> Route
checkTarget r@(Route _pos Nothing) _ = r
checkTarget (Route pos (Just (Target tpos moveTime moveDuration))) delta =
let time' = (moveTime + delta)
in if time' > moveDuration then Route tpos Nothing else Route pos (Just (Target tpos time' moveDuration))
| asivitz/Hickory | Hickory/Camera.hs | mit | 1,995 | 0 | 12 | 375 | 688 | 375 | 313 | 48 | 2 |
module QuantumComputer.TeleportEx
( psi
, psi0
, psi1
, psi2
, teleportGates
, teleportCircuit
) where
import QuantumComputer.ConstantStates
import QuantumComputer.Gates
import QuantumComputer.QStates
-- teleportation
psi = qstate0
psi0 = psi `combineStates` bell00
psi1 = hgate 0 psi0
psi2 = cnot 0 1 psi1
-- same as above but all at once (right is done first) (id applied last)
teleportGates r0 r1 = foldl1 (.) [ zgateM 2 0, xgateM 2 1, meas 1 r1, meas 0 r0, hgate 0, cnot 1 0]
teleportCircuit r0 r1 state0 = teleportGates r0 r1 $ state0 `combineStates` bell00
| VictorLoren/quantum-computer-haskell | QuantumComputer/TeleportEx.hs | mit | 569 | 0 | 7 | 100 | 171 | 95 | 76 | 16 | 1 |
module Problem28 where
{--
Task description:
Starting with the number 1 and moving to the right in a clockwise direction a 5 by 5 spiral is formed as follows:
21 22 23 24 25
20 7 8 9 10
19 6 1 2 11
18 5 4 3 12
17 16 15 14 13
It can be verified that the sum of the numbers on the diagonals is 101.
What is the sum of the numbers on the diagonals in a 1001 by 1001 spiral formed in the same way?
--}
ring :: Int -> [Int]
ring 1 = [1]
ring x = take 4 $ iterate (subtract (x - 1)) (x^2)
rings :: [Int]
rings = ring =<< [1, 3..1001]
main = print $ sum rings
| runjak/projectEuler | src/Problem28.hs | mit | 586 | 0 | 10 | 160 | 105 | 58 | 47 | 7 | 1 |
{-# OPTIONS_GHC -F -pgmF htfpp #-}
module Test.Data.SetMultiMap where
import Test.Framework
import qualified Data.SetMultiMap as SetMultiMap; import Data.SetMultiMap (SetMultiMap)
import qualified Data.List as List
import qualified Data.Set as Set; import Data.Set (Set)
import Control.Applicative
prop_replacingRemovesOldValueAndInsertsNewOne (mm :: SetMultiMap Char Int) v' =
let
keys = SetMultiMap.keys mm
values = SetMultiMap.elems mm
in
(not $ null keys) && (not $ null values) && (not $ elem v' values) ==>
forAll ((,) <$> elements keys <*> elements values) $ \(k, v) -> let
mm' = SetMultiMap.replace k v v' mm
s = SetMultiMap.lookup k mm'
in (not $ Set.member v s) .&&. (Set.member v' s)
prop_toFromList (mm :: SetMultiMap Char Int) = l == l' where
l = SetMultiMap.toAscList mm
l' = SetMultiMap.toAscList $ SetMultiMap.fromList l
prop_toAscListIsInFactAscending (mm :: SetMultiMap Int Double) =
l == List.sort l where
l = SetMultiMap.toAscList mm
-- | Useful for generating a map which is a result of arbitrary modifications.
newtype Modification k v = Modification (SetMultiMap k v -> SetMultiMap k v)
instance
( Arbitrary k, Arbitrary v, Ord k, Ord v ) =>
Arbitrary (Modification k v)
where
arbitrary =
fmap Modification $ oneof [insert, delete, deleteAll, replace, alterF]
where
insert = promote $ \mm -> do
k <- arbitrary
v <- arbitrary
return $ SetMultiMap.insert k v mm
delete = promote $ \mm -> do
if SetMultiMap.null mm
then return mm
else do
k <- elements $ SetMultiMap.keys mm
v <- elements $ SetMultiMap.elems mm
return $ SetMultiMap.delete k v mm
deleteAll = promote $ \mm -> do
if SetMultiMap.null mm
then return mm
else do
k <- elements $ SetMultiMap.keys mm
return $ SetMultiMap.deleteAll k mm
replace = promote $ \mm -> do
if SetMultiMap.null mm
then return mm
else do
k <- elements $ SetMultiMap.keys mm
v <- elements $ SetMultiMap.elems mm
v' <- arbitrary
return $ SetMultiMap.replace k v v' mm
alterF = promote $ \mm -> do
if SetMultiMap.null mm
then return mm
else do
k <- elements $ SetMultiMap.keys mm
f <- do
l <- arbitrary
return $ \_ -> (undefined, Set.fromList l)
return $ snd $ SetMultiMap.alterF k f mm
-- | Simulates a map which is a result of sequentially applied arbitrary
-- modifications, thus being closest to the actual use-cases and covering most
-- modification functions.
instance
( Arbitrary k, Arbitrary v, Ord k, Ord v ) =>
Arbitrary (SetMultiMap k v)
where
arbitrary = do
mods <- listOf arbitrary
return $ foldr ($) SetMultiMap.empty $ map (\(Modification f) -> f) mods
| scravy/multimap | src/Test/Data/SetMultiMap.hs | mit | 3,056 | 0 | 22 | 954 | 936 | 475 | 461 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
-- |
--
-- Generic OAuth2 plugin for Yesod
--
-- See @"Yesod.Auth.OAuth2.GitHub"@ for example usage.
--
module Yesod.Auth.OAuth2
( OAuth2(..)
, FetchCreds
, Manager
, OAuth2Token(..)
, Creds(..)
, oauth2Url
, authOAuth2
, authOAuth2Widget
-- * Alternatives that use 'fetchAccessToken2'
, authOAuth2'
, authOAuth2Widget'
-- * Reading our @'credsExtra'@ keys
, getAccessToken
, getRefreshToken
, getUserResponse
, getUserResponseJSON
)
where
import Control.Error.Util (note)
import Control.Monad ((<=<))
import Data.Aeson (FromJSON, eitherDecode)
import Data.ByteString.Lazy (ByteString, fromStrict)
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8)
import Network.HTTP.Conduit (Manager)
import Network.OAuth.OAuth2
import Yesod.Auth
import Yesod.Auth.OAuth2.Dispatch
import Yesod.Core.Widget
oauth2Url :: Text -> AuthRoute
oauth2Url name = PluginR name ["forward"]
-- | Create an @'AuthPlugin'@ for the given OAuth2 provider
--
-- Presents a generic @"Login via #{name}"@ link
--
authOAuth2 :: YesodAuth m => Text -> OAuth2 -> FetchCreds m -> AuthPlugin m
authOAuth2 name = authOAuth2Widget [whamlet|Login via #{name}|] name
-- | A version of 'authOAuth2' that uses 'fetchAccessToken2'
--
-- See <https://github.com/thoughtbot/yesod-auth-oauth2/pull/129>
--
authOAuth2' :: YesodAuth m => Text -> OAuth2 -> FetchCreds m -> AuthPlugin m
authOAuth2' name = authOAuth2Widget' [whamlet|Login via #{name}|] name
-- | Create an @'AuthPlugin'@ for the given OAuth2 provider
--
-- Allows passing a custom widget for the login link. See @'oauth2Eve'@ for an
-- example.
--
authOAuth2Widget
:: YesodAuth m
=> WidgetFor m ()
-> Text
-> OAuth2
-> FetchCreds m
-> AuthPlugin m
authOAuth2Widget = buildPlugin fetchAccessToken
-- | A version of 'authOAuth2Widget' that uses 'fetchAccessToken2'
--
-- See <https://github.com/thoughtbot/yesod-auth-oauth2/pull/129>
--
authOAuth2Widget'
:: YesodAuth m
=> WidgetFor m ()
-> Text
-> OAuth2
-> FetchCreds m
-> AuthPlugin m
authOAuth2Widget' = buildPlugin fetchAccessToken2
buildPlugin
:: YesodAuth m
=> FetchToken
-> WidgetFor m ()
-> Text
-> OAuth2
-> FetchCreds m
-> AuthPlugin m
buildPlugin getToken widget name oauth getCreds = AuthPlugin
name
(dispatchAuthRequest name oauth getToken getCreds)
login
where login tm = [whamlet|<a href=@{tm $ oauth2Url name}>^{widget}|]
-- | Read the @'AccessToken'@ from the values set via @'setExtra'@
getAccessToken :: Creds m -> Maybe AccessToken
getAccessToken = (AccessToken <$>) . lookup "accessToken" . credsExtra
-- | Read the @'RefreshToken'@ from the values set via @'setExtra'@
--
-- N.B. not all providers supply this value.
--
getRefreshToken :: Creds m -> Maybe RefreshToken
getRefreshToken = (RefreshToken <$>) . lookup "refreshToken" . credsExtra
-- | Read the original profile response from the values set via @'setExtra'@
getUserResponse :: Creds m -> Maybe ByteString
getUserResponse =
(fromStrict . encodeUtf8 <$>) . lookup "userResponse" . credsExtra
-- | @'getUserResponse'@, and decode as JSON
getUserResponseJSON :: FromJSON a => Creds m -> Either String a
getUserResponseJSON =
eitherDecode <=< note "userResponse key not present" . getUserResponse
| thoughtbot/yesod-auth-oauth2 | src/Yesod/Auth/OAuth2.hs | mit | 3,431 | 0 | 11 | 628 | 664 | 375 | 289 | -1 | -1 |
import Data.Ratio
calcE :: Int -> Rational
calcE n = calcE' 0
where
calcE' :: Int -> Rational
calcE' i = cur + next
where
cur
| i == 0 = 2
| (i + 1) `mod` 3 == 0 = fromIntegral ((i + 1) `div` 3) * 2
| otherwise = 1
next = if n == (i + 1) then 0 else 1 / calcE' (i + 1)
digits :: Integer -> [Integer]
digits 0 = []
digits i = digits (i `div` 10) ++ [i `mod` 10]
euler65 = print $ sum $ digits $ numerator $ calcE 100
| RossMeikleham/Project-Euler-Haskell | 65.hs | mit | 530 | 0 | 15 | 216 | 241 | 128 | 113 | 14 | 2 |
{-# LANGUAGE BangPatterns, DataKinds, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Hadoop.Protos.ZKFCProtocolProtos.ZKFCProtocolService
(ZKFCProtocolService, zKFCProtocolService, CedeActive, GracefulFailover, cedeActive, gracefulFailover) where
import Prelude ((+), (/), (.))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
import qualified Hadoop.Protos.ZKFCProtocolProtos.CedeActiveRequestProto as ZKFCProtocolProtos (CedeActiveRequestProto)
import qualified Hadoop.Protos.ZKFCProtocolProtos.GracefulFailoverRequestProto as ZKFCProtocolProtos (GracefulFailoverRequestProto)
import qualified Hadoop.Protos.ZKFCProtocolProtos.CedeActiveResponseProto as ZKFCProtocolProtos (CedeActiveResponseProto)
import qualified Hadoop.Protos.ZKFCProtocolProtos.GracefulFailoverResponseProto as ZKFCProtocolProtos
(GracefulFailoverResponseProto)
type ZKFCProtocolService = P'.Service '[CedeActive, GracefulFailover]
zKFCProtocolService :: ZKFCProtocolService
zKFCProtocolService = P'.Service
type CedeActive =
P'.Method ".hadoop.common.ZKFCProtocolService.cedeActive" ZKFCProtocolProtos.CedeActiveRequestProto
ZKFCProtocolProtos.CedeActiveResponseProto
type GracefulFailover =
P'.Method ".hadoop.common.ZKFCProtocolService.gracefulFailover" ZKFCProtocolProtos.GracefulFailoverRequestProto
ZKFCProtocolProtos.GracefulFailoverResponseProto
cedeActive :: CedeActive
cedeActive = P'.Method
gracefulFailover :: GracefulFailover
gracefulFailover = P'.Method | alexbiehl/hoop | hadoop-protos/src/Hadoop/Protos/ZKFCProtocolProtos/ZKFCProtocolService.hs | mit | 1,679 | 0 | 7 | 155 | 234 | 156 | 78 | 27 | 1 |
module Atbash (decode, encode) where
import Data.Char (isLetter, toLower, isAlphaNum)
import qualified Data.Text as T
import Data.Text (Text)
decode :: Text -> Text
decode = T.map atbashChar . T.filter isAlphaNum
encode :: Text -> Text
encode = T.unwords . T.chunksOf 5 . decode
atbashChar :: Char -> Char
atbashChar c
| isLetter c = symmetric
| otherwise = c
where
lc = toLower c
symmetric = toEnum $ fromEnum 'a'
+ fromEnum 'z'
- fromEnum lc | exercism/xhaskell | exercises/practice/atbash-cipher/.meta/examples/success-text/src/Atbash.hs | mit | 532 | 0 | 10 | 164 | 174 | 92 | 82 | 16 | 1 |
module Golf where
import Data.List (group, intercalate, sort, tails, transpose, zip3)
import Data.Maybe (fromMaybe)
skips :: [a] -> [[a]]
skips = map skipper . init . tails . zip [1..]
skipper :: Integral a => [(a,b)] -> [b]
skipper xs = map snd $ filter p xs
where
p = (==0) . flip mod n . fst
n = fst $ head xs
localMaxima :: [Integer] -> [Integer]
localMaxima xs = map s $ filter p $ zip3 xs ys zs
where
s = \(x,y,z) -> y
p = \(x,y,z) -> y > x && y > z
zs = tail ys
ys = tail xs
histogram :: [Integer] -> String
histogram xs = (intercalate "\n" $ transpose stars) ++ "\n==========\n0123456789\n"
where
stars = map (\i -> replicate (maximum counts - i) ' ' ++ replicate i '*') counts
counts = map (fromMaybe 0 . flip lookup db) [0..9]
db = map (\l -> (head l, length l)) $ group $ sort xs
| ryanoneill/yorgey-haskell | Week03/Golf.hs | mit | 840 | 0 | 14 | 212 | 429 | 230 | 199 | 20 | 1 |
{- parse and work with simple config files.
Config files take the form:
name1 = It's a beautiful day in the neighborhood.
name2 = 40
name3 = 99.123
-}
module Croissant
( parseConfigFile
, getInt
, getFloat
) where
import Tea
import Parser
import Control.Applicative
import qualified Data.Map.Strict as Map
import System.IO (openFile,hGetContents,IOMode(..))
import qualified Data.Set as Set
type Config = Map.Map String String
parseConfigFile :: String -> IO Config
parseConfigFile path = do
handle <- openFile path ReadMode
text <- hGetContents handle
return . Map.fromList . maybe [] id . snd .
flip runParser (tea text) $ many $ do
whitespace
name <- some $ alpha <|> digit
whitespace
char '='
whitespace
value <- some $ alpha <|> digit
return (name,value)
getInt :: String -> Config -> Maybe Int
getInt k m = case Map.lookup k m of
Nothing -> Nothing
Just ok -> fmap read . snd . runParser integer $ tea ok
getFloat :: String -> Config -> Maybe Float
getFloat k m = case Map.lookup k m of
Nothing -> Nothing
Just ok -> fmap read . snd . runParser number $ tea ok
| ZacharyKamerling/Canvas | src/Croissant.hs | mit | 1,111 | 2 | 13 | 232 | 371 | 186 | 185 | 32 | 2 |
fibo n = if n < 2 then n else fibo (n-1) + fibo (n-2)
inputList = [19,9,2,16,3,8,0,6,4,17,5,1,14,12,15,13,10,7,11,18]
fiboList = map (fibo) inputList
main = mapM_ print fiboList | changeworld/fibonacci_race | fibonacci_race.hs | mit | 178 | 0 | 9 | 28 | 133 | 78 | 55 | 4 | 2 |
module Y2016.M08.D29.Solution where
import Control.Arrow ((&&&), (>>>))
import Data.List (group)
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Codec.Compression.GZip as GZ
{--
Now for something completely different.
My daughter, EM, posed this #math problem to me. Can you solve it?
WHAT IS THE NEXT NUMBER?
3
13
1113
3113
132113
?
--}
series :: [Integer]
series = map read (words "3 13 1113 3113 132113")
nextInSeries :: [Integer] -> Integer
nextInSeries = read
. concatMap (show . length &&& pure . head >>> uncurry (++))
. group . show . last
-- *Y2016.M08.D29.Solution> nextInSeries series ~> 1113122113
{-- BONUS -----------------------------------------------------------------
Now that you solved the above ...
... you have solved the above ...
J. M. Varner @JMVarnerBooks laments: "Not a very efficient encoding scheme ;)"
And posits that 'DEFLATE' is an effient one... whatever 'DEFLATE' is.
So, devise an efficient encoding scheme for the above series. Share.
--}
type Encoding = BL.ByteString
encode :: [Integer] -> Encoding
encode = GZ.compress . BL.pack . show
{--
*Y2016.M08.D29.Solution> encode series ~> weirdo-characters ~> zippo
... but it worked because:
*Y2016.M08.D29.Solution> GZ.decompress zippo ~> "[3,13,1113,3113,132113]"
YAY!
--}
| geophf/1HaskellADay | exercises/HAD/Y2016/M08/D29/Solution.hs | mit | 1,323 | 0 | 14 | 229 | 179 | 109 | 70 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Types.Player(
Player(..),
playerSnippet,
alive,
moveToEntity
) where
import Data.Default
import Control.Lens
import Data.List(intercalate)
import Data.Direction
import Types.Consumable
import Types.Item
import Types.Block
import Types.Enemy
import Types.Jewel
import Types.GroundItem
import Types.Entity
import Data.Vectors
import Types.PState
data Player = Player{
_loc :: Vector3 Int,
_hp :: Int,
_bombs :: Int,
_ropes :: Int,
_gold :: Int,
_items :: [Item], -- | Passive items
_holding :: Maybe Entity, -- | Current Item in hands
_favor :: Int, -- | Kali favor
_p_state :: PState
} deriving Eq
-- | Starting Player
instance Default Player where
def = Player (fromTriple (0,0,0)) 4 4 4 0 [] Nothing 0 Standing
makeLenses ''Player
-- | Full show, exclude favor since it's a hidden stat
instance Show Player where
show p = unlines $ filter (not . null)
[ "You are in the "
++ (showRelativeDirection (fromVector3 $ p^.loc))
++ " of the room."
, "You have " ++ show (p^.hp) ++ " hp remaining."
, "You have " ++ show (p^.bombs) ++ " bombs remaining."
, "You have " ++ show (p^.ropes) ++ " ropes remaining."
, "You have collected " ++ show (p^.gold) ++ " gold so far."
, if null (p^.items) then []
else "You have collected the following items: "
++ (intercalate ", " $ map show (p^.items))
, case (p^.holding) of
Nothing -> []
Just a -> "You are holding : " ++ show a
]
-- | Information to show on each round
playerSnippet :: Player -> String
playerSnippet p =
"You are in the "
++ (showRelativeDirection (fromVector3 $ p^.loc)) ++ "."
++ case p^.holding of
Just x -> "\nYou are holding a " ++ show x ++ "."
_ -> []
----- Extras -----
alive :: Player -> Bool
alive = (<=0) . view hp
pickupConsumbale :: Consumable -> Player -> Player
pickupConsumbale BombBox = bombs +~ 12
pickupConsumbale BombBag = bombs +~ 4
pickupConsumable RopePile = ropes +~ 4
-- What when the player moves onto another an entity?
-- NB. This should happen LAST: player should have the opportunity to
-- whip or whatever on the current spot and move out of the way
-- before this fires.
moveToEntity :: Vector3 Int -- | Location
-> Entity -- | Target
-> Player -- | Source
-> Player -- | Result
moveToEntity v (Jewel' j) = (loc .~ v) . (gold +~ value j)
moveToEntity v (Block' b) = case b of
Spikes -> (loc .~ v) . (p_state .~ Falling)
Web -> (loc .~ v) . (p_state .~ Stunned)
PowderKeg -> hp .~ 0
Exit -> loc .~ v
_ -> id
moveToEntity v (Enemy' e) = case e of
(BigSpider _ _) -> hp -~ 2
(Arrow True _ _) -> hp -~ 2
(Arrow False _ _) -> id
(Shopkeeper True _ _) -> (hp -~ 1) . (p_state .~ Stunned)
(Boulder True _ _) -> hp -~ 5
(Boulder False _ _) -> id
_ -> hp -~ 1
moveToEntity v (GroundItem' g) = case g of
Floor c -> pickupConsumable c . (loc .~ v)
_ -> loc .~ v
moveToEntity v _ = loc .~ v | 5outh/textlunky | src/Types/Player.hs | mit | 3,164 | 0 | 15 | 888 | 1,001 | 539 | 462 | 84 | 12 |
module AES256GCMBench (benchAes256GCM, aes256GCMEnv) where
import Criterion.Main
import Control.Monad
import Control.DeepSeq
import Control.Exception
import Data.ByteString as BS
import Crypto.Saltine.Core.AEAD.AES256GCM as G
import BenchUtils
aes256GCMEnv :: IO Key
aes256GCMEnv = newKey
benchAes256GCM :: Key -> Benchmark
benchAes256GCM k = do
let encrypt :: ByteString -> ByteString -> IO ByteString
encrypt msg aad = newNonce >>= \n -> pure $ G.aead k n msg aad
decrypt :: ByteString -> ByteString -> IO (Maybe ByteString)
decrypt msg aad = do
n <- newNonce
let ciphertext = G.aead k n msg aad
return $ G.aeadOpen k n ciphertext aad
encryptDetached msg aad = newNonce >>= \n -> pure $ G.aeadDetached k n msg aad
decryptDetached msg aad = do
n <- newNonce
let (t,c) = G.aeadDetached k n msg aad
pure $ G.aeadOpenDetached k n t c aad
bgroup "AES256GCM"
[ bench "newKey" $ nfIO newKey
, bgroup "aead"
[ bench "128 B + 128 B" $ nfIO $ encrypt bs128 bs128
, bench "128 B + 5 MB" $ nfIO $ encrypt bs128 mb5
, bench "1 MB + 128 B" $ nfIO $ encrypt mb1 bs128
, bench "1 MB + 5 B" $ nfIO $ encrypt mb1 mb5
, bench "5 MB + 128 B" $ nfIO $ encrypt mb5 bs128
, bench "5 MB + 5 MB" $ nfIO $ encrypt mb5 mb5
]
, bgroup "aead + open"
[ bench "128 B + 128 B" $ nfIO $ decrypt bs128 bs128
, bench "128 B + 5 MB" $ nfIO $ decrypt bs128 mb5
, bench "1 MB + 128 B" $ nfIO $ decrypt mb1 bs128
, bench "1 MB + 5 B" $ nfIO $ decrypt mb1 mb5
, bench "5 MB + 128 B" $ nfIO $ decrypt mb5 bs128
, bench "5 MB + 5 MB" $ nfIO $ decrypt mb5 mb5
]
, bgroup "aeadDetached"
[ bench "128 B + 128 B" $ nfIO $ encryptDetached bs128 bs128
, bench "128 B + 5 MB" $ nfIO $ encryptDetached bs128 mb5
, bench "1 MB + 128 B" $ nfIO $ encryptDetached mb1 bs128
, bench "1 MB + 5 B" $ nfIO $ encryptDetached mb1 mb5
, bench "5 MB + 128 B" $ nfIO $ encryptDetached mb5 bs128
, bench "5 MB + 5 MB" $ nfIO $ encryptDetached mb5 mb5
]
, bgroup "aeadDetached + openDetached"
[ bench "128 B + 128 B" $ nfIO $ decryptDetached bs128 bs128
, bench "128 B + 5 MB" $ nfIO $ decryptDetached bs128 mb5
, bench "1 MB + 128 B" $ nfIO $ decryptDetached mb1 bs128
, bench "1 MB + 5 B" $ nfIO $ decryptDetached mb1 mb5
, bench "5 MB + 128 B" $ nfIO $ decryptDetached mb5 bs128
, bench "5 MB + 5 MB" $ nfIO $ decryptDetached mb5 mb5
]
]
| tel/saltine | bench/AES256GCMBench.hs | mit | 2,625 | 0 | 16 | 822 | 820 | 401 | 419 | 54 | 1 |
{- This module was generated from data in the Kate syntax
highlighting file haxe.xml, version 0.1, by Chad Joan -}
module Text.Highlighting.Kate.Syntax.Haxe
(highlight, parseExpression, syntaxName, syntaxExtensions)
where
import Text.Highlighting.Kate.Types
import Text.Highlighting.Kate.Common
import Text.ParserCombinators.Parsec hiding (State)
import Control.Monad.State
import Data.Char (isSpace)
import qualified Data.Set as Set
-- | Full name of language.
syntaxName :: String
syntaxName = "Haxe"
-- | Filename extensions for this language.
syntaxExtensions :: String
syntaxExtensions = "*.hx;*.Hx;*.hX;*.HX;"
-- | Highlight source code using this syntax definition.
highlight :: String -> [SourceLine]
highlight input = evalState (mapM parseSourceLine $ lines input) startingState
parseSourceLine :: String -> State SyntaxState SourceLine
parseSourceLine = mkParseSourceLine (parseExpression Nothing)
-- | Parse an expression using appropriate local context.
parseExpression :: Maybe (String,String)
-> KateParser Token
parseExpression mbcontext = do
(lang,cont) <- maybe currentContext return mbcontext
result <- parseRules (lang,cont)
optional $ do eof
updateState $ \st -> st{ synStPrevChar = '\n' }
pEndLine
return result
startingState = SyntaxState {synStContexts = [("Haxe","normal")], synStLineNumber = 0, synStPrevChar = '\n', synStPrevNonspace = False, synStContinuation = False, synStCaseSensitive = True, synStKeywordCaseSensitive = True, synStCaptures = []}
pEndLine = do
updateState $ \st -> st{ synStPrevNonspace = False }
context <- currentContext
contexts <- synStContexts `fmap` getState
st <- getState
if length contexts >= 2
then case context of
_ | synStContinuation st -> updateState $ \st -> st{ synStContinuation = False }
("Haxe","normal") -> return ()
("Haxe","ModuleName") -> return ()
("Haxe","RawString") -> return ()
("Haxe","String") -> return ()
("Haxe","CommentLine") -> (popContext) >> pEndLine
("Haxe","CommentBlock") -> return ()
_ -> return ()
else return ()
withAttribute attr txt = do
when (null txt) $ fail "Parser matched no text"
updateState $ \st -> st { synStPrevChar = last txt
, synStPrevNonspace = synStPrevNonspace st || not (all isSpace txt) }
return (attr, txt)
list_keywords = Set.fromList $ words $ "break case cast catch class continue default else enum extends false for function if implements in inline interface new null override private public return static super switch this throw trace true try typedef untyped var while"
list_modules = Set.fromList $ words $ "package import"
list_types = Set.fromList $ words $ "Array Void Bool Int UInt Float Dynamic String List Error Unknown Type"
regex_'23if'28'5cs'2b'5cw'2b'29'3f = compileRegex True "#if(\\s+\\w+)?"
regex_'23'28else'7celseif'7cend'7cerror'29 = compileRegex True "#(else|elseif|end|error)"
regex_'5b'5cd'5d'5b'5cd'5d'2a'28'5c'2e'28'3f'21'5c'2e'29'5b'5cd'5d'2a'28'5beE'5d'5b'2d'2b'5d'3f'5b'5cd'5d'2b'29'3f'29 = compileRegex True "[\\d][\\d]*(\\.(?!\\.)[\\d]*([eE][-+]?[\\d]+)?)"
regex_'5c'2e'5b'5cd'5d'5b'5cd'5d'2a'28'5beE'5d'5b'2d'2b'5d'3f'5b'5cd'5d'2b'29'3f = compileRegex True "\\.[\\d][\\d]*([eE][-+]?[\\d]+)?"
regex_0'5bxX'5d'5b'5cda'2dfA'2dF'5d'2b = compileRegex True "0[xX][\\da-fA-F]+"
regex_'5cd'2b = compileRegex True "\\d+"
regex_'5b'5e'5cs'5cw'2e'3a'2c'5d = compileRegex True "[^\\s\\w.:,]"
regex_'5c'5c'28u'5b'5cda'2dfA'2dF'5d'7b4'7d'7cU'5b'5cda'2dfA'2dF'5d'7b8'7d'7c'26'5ba'2dzA'2dZ'5d'5cw'2b'3b'29 = compileRegex True "\\\\(u[\\da-fA-F]{4}|U[\\da-fA-F]{8}|&[a-zA-Z]\\w+;)"
parseRules ("Haxe","normal") =
(((pRegExpr regex_'23if'28'5cs'2b'5cw'2b'29'3f >>= withAttribute OtherTok) >>~ (popContext))
<|>
((pRegExpr regex_'23'28else'7celseif'7cend'7cerror'29 >>= withAttribute OtherTok) >>~ (popContext))
<|>
((pKeyword " \n\t.():!+,-<=>%&*/;?[]^{|}~\\" list_keywords >>= withAttribute KeywordTok))
<|>
((pKeyword " \n\t.():!+,-<=>%&*/;?[]^{|}~\\" list_modules >>= withAttribute KeywordTok) >>~ pushContext ("Haxe","ModuleName"))
<|>
((pKeyword " \n\t.():!+,-<=>%&*/;?[]^{|}~\\" list_types >>= withAttribute DataTypeTok))
<|>
((pDetectIdentifier >>= withAttribute NormalTok))
<|>
((pHlCStringChar >>= withAttribute NormalTok) >>~ (popContext))
<|>
((pDetectChar False '\'' >>= withAttribute StringTok) >>~ pushContext ("Haxe","RawString"))
<|>
((pDetectChar False '"' >>= withAttribute StringTok) >>~ pushContext ("Haxe","String"))
<|>
((pDetect2Chars False '/' '/' >>= withAttribute CommentTok) >>~ pushContext ("Haxe","CommentLine"))
<|>
((pDetect2Chars False '/' '*' >>= withAttribute CommentTok) >>~ pushContext ("Haxe","CommentBlock"))
<|>
((pDetectChar False '{' >>= withAttribute NormalTok))
<|>
((pDetectChar False '}' >>= withAttribute NormalTok))
<|>
((pString False "..." >>= withAttribute NormalTok) >>~ (popContext))
<|>
((pDetect2Chars False '.' '.' >>= withAttribute NormalTok))
<|>
((pRegExpr regex_'5b'5cd'5d'5b'5cd'5d'2a'28'5c'2e'28'3f'21'5c'2e'29'5b'5cd'5d'2a'28'5beE'5d'5b'2d'2b'5d'3f'5b'5cd'5d'2b'29'3f'29 >>= withAttribute FloatTok) >>~ (popContext))
<|>
((pRegExpr regex_'5c'2e'5b'5cd'5d'5b'5cd'5d'2a'28'5beE'5d'5b'2d'2b'5d'3f'5b'5cd'5d'2b'29'3f >>= withAttribute FloatTok) >>~ (popContext))
<|>
((pRegExpr regex_0'5bxX'5d'5b'5cda'2dfA'2dF'5d'2b >>= withAttribute BaseNTok) >>~ (popContext))
<|>
((pRegExpr regex_'5cd'2b >>= withAttribute DecValTok) >>~ (popContext))
<|>
(currentContext >>= \x -> guard (x == ("Haxe","normal")) >> pDefault >>= withAttribute NormalTok))
parseRules ("Haxe","ModuleName") =
(((pDetect2Chars False '/' '/' >>= withAttribute CommentTok) >>~ pushContext ("Haxe","CommentLine"))
<|>
((pDetect2Chars False '/' '*' >>= withAttribute CommentTok) >>~ pushContext ("Haxe","CommentBlock"))
<|>
((pRegExpr regex_'5b'5e'5cs'5cw'2e'3a'2c'5d >>= withAttribute NormalTok) >>~ (popContext))
<|>
(currentContext >>= \x -> guard (x == ("Haxe","ModuleName")) >> pDefault >>= withAttribute NormalTok))
parseRules ("Haxe","RawString") =
(((pDetectChar False '\'' >>= withAttribute StringTok) >>~ (popContext))
<|>
(currentContext >>= \x -> guard (x == ("Haxe","RawString")) >> pDefault >>= withAttribute StringTok))
parseRules ("Haxe","String") =
(((pDetect2Chars False '\\' '"' >>= withAttribute StringTok))
<|>
((pDetectChar False '"' >>= withAttribute StringTok) >>~ (popContext))
<|>
((pHlCStringChar >>= withAttribute StringTok))
<|>
((pRegExpr regex_'5c'5c'28u'5b'5cda'2dfA'2dF'5d'7b4'7d'7cU'5b'5cda'2dfA'2dF'5d'7b8'7d'7c'26'5ba'2dzA'2dZ'5d'5cw'2b'3b'29 >>= withAttribute StringTok))
<|>
(currentContext >>= \x -> guard (x == ("Haxe","String")) >> pDefault >>= withAttribute StringTok))
parseRules ("Haxe","CommentLine") =
(currentContext >>= \x -> guard (x == ("Haxe","CommentLine")) >> pDefault >>= withAttribute CommentTok)
parseRules ("Haxe","CommentBlock") =
(((pDetect2Chars False '*' '/' >>= withAttribute CommentTok) >>~ (popContext))
<|>
(currentContext >>= \x -> guard (x == ("Haxe","CommentBlock")) >> pDefault >>= withAttribute CommentTok))
parseRules x = parseRules ("Haxe","normal") <|> fail ("Unknown context" ++ show x)
| ambiata/highlighting-kate | Text/Highlighting/Kate/Syntax/Haxe.hs | gpl-2.0 | 7,374 | 0 | 29 | 1,114 | 1,914 | 1,017 | 897 | 127 | 9 |
{-# LANGUAGE MultiWayIf #-}
module Amoeba.GameLogic.Language.Parsing.WorldParser where
import Amoeba.GameLogic.Language.Parsing.Common
import Amoeba.GameLogic.Language.RawToken
import qualified Amoeba.GameLogic.Language.Scheme as S
import Amoeba.Middleware.Parsing.Facade as P
world :: GenParser Char st RawToken
world = do
string S.world >> many1 trueSpace
itemName <- stringConstant
lineEnd
rs <- properties
return $ WorldToken itemName rs
properties :: GenParser Char st [PropertyToken]
properties = many property
property :: GenParser Char st PropertyToken
property = do
identation 4
name <- identifier
if | name == S.width -> intProperty name
| name == S.height -> intProperty name
| name == S.defaultCell -> objectProperty name
| name == S.cells -> cellsProperty name
| otherwise -> fail $ "unknown property: " ++ name
{-
Uncomment this in case of GHC < 7.6 (no MultiWayIf support).
property :: GenParser Char st PropertyToken
property = do
identation 4
name <- identifier
chooseProperty name
where
chooseProperty name | name == S.width = intProperty name
| name == S.height = intProperty name
| name == S.defaultCell = objectProperty name
| name == S.cells = cellsProperty name
| otherwise = fail $ "unknown property: " ++ name
-}
intProperty :: String -> GenParser Char st PropertyToken
intProperty name = do
assignment
val <- integerConstant
lineEnd
return $ IntProperty name val
objectProperty :: String -> GenParser Char st PropertyToken
objectProperty name = do
assignment
o <- object
lineEnd
return $ ObjectProperty name o
object :: GenParser Char st RawToken
object = do
string S.object >> many1 trueSpace
objectName <- stringConstant
many trueSpace
playerName <- stringConstant
return $ ObjectToken objectName playerName
cellsProperty :: String -> GenParser Char st PropertyToken
cellsProperty name = do
cs <- assignment >> eol >> many1 cell
return $ CellsProperty name (concat cs)
cell :: GenParser Char st [PropertyToken]
cell = try singleCell <|> try multiCell <?> "cell"
multiCell :: GenParser Char st [PropertyToken]
multiCell = do
identation 8
o@(ObjectToken oName plName) <- object
trueSpaces
coords <- listOf intTuple2
lineEnd
return $ map (\c -> CellProperty S.cell c o) coords
singleCell :: GenParser Char st [PropertyToken]
singleCell = do
identation 8
coords <- intTuple2
trueSpaces >> char ':' >> trueSpaces
o <- object
lineEnd
return $ [CellProperty S.cell coords o]
| graninas/The-Amoeba-World | src/Amoeba/GameLogic/Language/Parsing/WorldParser.hs | gpl-3.0 | 2,779 | 0 | 12 | 736 | 672 | 323 | 349 | 65 | 5 |
module Murex.Lexer (
runLexer
, Token (..)
) where
import Import hiding ((<|>))
import Control.Monad.Either
import Murex.Syntax.Abstract (Literal(..))
import qualified Murex.Syntax.Abstract as A
import qualified Data.Char as C
import Text.Parsec ( Parsec, ParseError, SourceName, runParser
, parserZero, anyChar, satisfy, char, eof, oneOf
, try, (<?>), many1, between
, getPosition, getState)
import qualified Text.Parsec as P
import Text.Parsec.Error
import Language.Parse
type Lexer = Parsec String [Maybe Int]
--TODO Reader for config
--TODO Writer for unicodizing
--TODO keep in mind: later ability to colorize this stuff
data Token = Space
| Indent | Newline | Dedent | OpenParen | CloseParen
| OpenBrack | CloseBrack | OpenBrace | CloseBrace
| OpenInterp String | CloseInterp String
| InfixDot | Dot | Comma | Ellipsis | At
| Quote | Quasiquote | Unquote | Splice
| Name String | Label Label | Bind String
| Lit Literal
deriving (Eq)
runLexer :: SourceName -> String -> Either ParseError [Pos Token]
runLexer source input = runEither $ do
raw <- hoistEither $ runParser murex [] source input
return $ sanityCheck raw
hoistEither $ postprocess raw
--TODO preprocess literate input (bird foot to space, blank out lines that don't start w/ birdfeet)
murex :: Lexer [Pos Token]
murex = between initialize finalize (many token)
where
initialize = (<?> "") $ do
push 0 *> skipBlankLines <* pop
P.optional (char '\n')
push =<< length <$> many (char ' ')
finalize = do
stackNull >>= flip when (fail "expecting dedent")
skipBlankLines >> eof
token :: Lexer (Pos Token)
token = choice [ whitespace
, literal --must come before open paren, before name
, opener
, closer
, punctuation
, quotation
, indentation
, indentMark
, label
, name
]
sanityCheck :: [Pos Token] -> [Pos Token]
sanityCheck xs = (filterDoubleSpaces . filterEndspaces) xs
where
filterEndspaces [] = []
filterEndspaces xs = case snd (head xs) of
Space -> die
Newline -> die
Indent -> die
Dedent -> die
_ -> case snd (last xs) of
Space -> die
Newline -> die
Indent -> die
filterDoubleSpaces [] = xs
filterDoubleSpaces ((_,Space):(_,Space):xs) = die
filterDoubleSpaces (x:xs) = filterDoubleSpaces xs
die = error $ "INTERNAL ERROR: sanity check fail\n" ++ show xs
postprocess :: [Pos Token] -> Either ParseError [Pos Token]
postprocess input = go input
where
translateDots (s@(_,x):(pos,Dot):xs) | isSpacy x = s : (pos, InfixDot) : translateDots xs
where isSpacy = (`elem` [Space, Newline, Indent, Dedent])
translateDots (x:xs) = x : translateDots xs
translateDots [] = []
go [] = Right stripSpaces
go (x:[]) = Right stripSpaces
go ((_,Space):xs) = go xs
go ((posX, x):rest@((posY, y):_)) = case (needsSpacey x, isSpacey y) of
(Just True, True) -> go rest
(Just True, False) -> if isException x y
then go rest
else Left $ addErrorMessage (Expect "whitespace") $ newErrorMessage (SysUnExpect $ show y) posY
(Just False, True) -> if isException x y
then go rest
else Left $ addErrorMessage (UnExpect "whitespace") $ newErrorMessage (SysUnExpect $ show y) posY
(Just False, False) -> go rest
(Nothing, _) -> go rest
isSpacey x = x `elem` [Space, Indent, Newline, Dedent, CloseParen, CloseBrack, CloseBrace, Comma, Ellipsis]
needsSpacey (Name _) = Just True
needsSpacey (Label _) = Just True
needsSpacey (Lit _) = Just True
needsSpacey At = Just False
needsSpacey _ = Nothing
isException (Name _) Dot = True
isException _ _ = False
stripSpaces = filter ((/= Space) . snd) (translateDots input)
------ Core Combinators ------
whitespace :: Lexer (Pos Token)
whitespace = withPos (const Space <$> many1 space)
indentation :: Lexer (Pos Token)
indentation = withPos $ do
stackNull >>= flip when parserZero
expect <- peek >>= maybe parserZero return
found <- lookAhead leadingSpaces
case compare found expect of
GT -> leadingSpaces >> push found >> return Indent
EQ -> leadingSpaces >> isEof >>= \end ->
if end then parserZero else return Newline
LT -> (<?> "dedent") $ do
pop
end <- isEof
when (not end) $ do
stackNull >>= flip when (fail "dedent does not have corresponding indent")
expect' <- peek >>= maybe (error "indentation disable under enable") return
when (expect' < found) $ fail "dedent does not have corresponding indent"
return Dedent
indentMark :: Lexer (Pos Token)
indentMark = (<?> "indent") $ withPos $ do
maybe parserZero return =<< peek
string "\\\\"
lookAhead $ char '\n'
return Space
opener :: Lexer (Pos Token)
opener = withPos $ choice [ const OpenParen <$> char '('
, const OpenBrack <$> char '['
, const OpenBrace <$> char '{'
] <* disable
closer :: Lexer (Pos Token)
closer = withPos $ choice [ const CloseParen <$> char ')'
, const CloseBrack <$> char ']'
, const CloseBrace <$> char '}'
] <* popDisable
punctuation :: Lexer (Pos Token)
punctuation = withPos $ choice [ const Ellipsis <$> string ".."
, const Dot <$> char '.'
, const Comma <$> char ','
, const At <$> char '@'
]
quotation :: Lexer (Pos Token)
-- NOTE: I'm reserving all the quine corners, in case I decide to bracket instead of prefix quotation
quotation = (<?> "quotation") $ withPos $ choice [ const Quasiquote <$> char 'β'
, const Unquote <$> char 'β'
, const Splice <$> char 'β'
]
name :: Lexer (Pos Token)
name = (<?> "identifier") $ withPos $ do
bind <- P.option False (const True <$> char '?')
base <- bareName
primes <- many (char '\'')
return $ (if bind then Bind else Name) (base ++ primes)
label :: Lexer (Pos Token)
label = (<?> "label") $ withPos $ do
char '`'
Label <$> ((Left . fromIntegral <$> numLabel) <|> (Right <$> nameLabel))
where
nameLabel = bareName
numLabel = stringToInteger 10 <$> many2 (oneOf "123456789") (oneOf "0123456789")
bareName :: Lexer String
bareName = many2 (blacklistChar restrictedFromStartOfName) (blacklistChar restrictedFromName)
literal :: Lexer (Pos Token)
literal = withPos $ Lit <$> choice [ unitLit
, numLit
, charLit
, strLit
]
where
unitLit = const MurexUnit <$> string "()"
numLit = MurexNum <$> anyNumber
charLit = (<?> "character") $ MurexChar <$> between2 (char '\'') literalChar
strLit = (<?> "string") $ A.toMurexString . catMaybes <$> between2 (char '\"') (many strChar)
where
strChar = maybeLiteralChar <|> (Just <$> oneOf "\'\n")
--TODO string interpolation: needs nesting stack
comment :: Lexer ()
comment = blockComment P.<|> lineComment --block must come before line
where
lineComment = void $ char '#' >> anyChar `manyTill` newline
blockComment = oneBlock
where
oneBlock = void $ string "#{" >> inBlock `manyThru` string "}#"
inBlock = oneBlock P.<|> void anyChar
restrictedFromName :: Char -> Bool
restrictedFromName c = c `elem` "\"\'`\\#.,@()[]{}ββββ"
restrictedFromStartOfName :: Char -> Bool
restrictedFromStartOfName c = c `elem` "?0123456789" || restrictedFromName c
------ Helpers ------
leadingSpaces :: Lexer Int
leadingSpaces = newline >> length <$> many (char ' ')
withPos :: Lexer a -> Lexer (Pos a)
withPos lexer = (,) <$> getPosition <*> try lexer
push :: Int -> Lexer ()
push n = modifyState' (Just n:)
peek :: Lexer (Maybe Int)
peek = head <$> getState
pop :: Lexer ()
pop = maybe parserZero (const $ modifyState' tail) =<< peek
disable :: Lexer ()
disable = modifyState' (Nothing:)
popDisable :: Lexer ()
popDisable = maybe (modifyState' tail) (const parserZero) =<< peek
stackNull :: Lexer Bool
stackNull = null <$> getState
instance Show Token where
show Space = "whitespace"
show Indent = "indent"
show Newline = "newline"
show Dedent = "dedent"
show OpenParen = "`('"
show OpenBrack = "`['"
show OpenBrace = "`{'"
show CloseParen = "`)'"
show CloseBrack = "`]'"
show CloseBrace = "`}'"
show Dot = "dot"
show InfixDot = "infix dot"
show Comma = "comma"
show Ellipsis = "`..'"
show At = "`@'"
show Quote = error "not using Quote tokens"
show Quasiquote = "`β'"
show Unquote = "`β'"
show Splice = "`β'"
show (Name name) = "name (" ++ name ++ ")"
show (Bind name) = "bind (" ++ name ++ ")"
show (Label (Left i)) = "label (" ++ show i ++ ")"
show (Label (Right name)) = "label (" ++ name ++ ")"
show (Lit MurexUnit) = "literal (unit)"
show (Lit x) = "literal (" ++ show x ++ ")"
------ Basic Combinators ------
space :: Lexer ()
space = (<?> "whitespace") $ do
indentEnabled <- isJust <$> peek
let spaceParsers' = if indentEnabled then spaceParsers else simpleNewline:spaceParsers
choice spaceParsers'
where
spaceParsers = [ void $ oneOf " \t" --TODO maybe more inline whitespace
, void $ string "\\\n"
, comment
]
newline :: Lexer ()
newline = (simpleNewline >> skipBlankLines) P.<|> eof
skipBlankLines :: Lexer ()
skipBlankLines = (<?> "whitespace") $ maybe (return ()) (const go) =<< peek
where
go = isBlankLine >>= \blank -> if blank then blankLine >> go else return ()
isBlankLine = (lookAhead blankLine >> return True) P.<|> return False
blankLine = (many space >> simpleNewline) <|> (many1 space >> eof)
simpleNewline :: Lexer ()
simpleNewline = void (char '\n' <?> "newline")
modifyState' f = (P.putState $!) =<< f <$> P.getState
| Zankoku-Okuno/murex | Murex/Lexer.hs | gpl-3.0 | 10,617 | 0 | 21 | 3,203 | 3,357 | 1,740 | 1,617 | 232 | 17 |
{-# OPTIONS_GHC -Wall #-}
module LexerHelpers where
import qualified Data.Set
import Grammar
-- If word ends with ending, give back Just the word with the ending stripped
-- off. If word doesn't end with ending, give back Nothing.
getRootFrom :: String -> String -> Maybe String
getRootFrom ending word =
let
rEnding = reverse ending
rWord = reverse word
getRootFrom' [] root = Just root
getRootFrom' _ [] = Nothing
getRootFrom' (e:es) (w:ws)
| e == w = getRootFrom' es ws
| otherwise = Nothing
in
getRootFrom' rEnding rWord >>= (return . reverse)
addRule :: Node -> Rule -> Node
addRule (Node grammar rules next) newRule = Node grammar (newRule : rules) next
makeNode :: Data.Set.Set String -> (String -> Grammar) -> [Rule] ->
String -> [Node] -> [Node]
makeNode wordSet nodeType rules word next =
if Data.Set.member word wordSet
then [Node (nodeType word) rules next]
else []
| penguinland/nlp | LexerHelpers.hs | gpl-3.0 | 950 | 0 | 12 | 220 | 299 | 155 | 144 | 23 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module Schema.Implementation where
import Data.String.Interpolate.IsString (i)
import Data.Text (Text)
import Data.Time.Clock (UTCTime)
import Database.Persist.Sql (Unique)
import Database.Persist.TH (persistUpperCase)
import qualified Database.Persist.TH as TH
import Pretty.Fields.Persistent
import Schema.Utils
(Entity, EntityDef, Int64, MonadSql, Transaction, (.=), (.>))
import qualified Schema.Utils as Utils
import Types
import Schema.Algorithm (AlgorithmId)
import qualified Schema.Implementation.V0 as V0
TH.share [TH.mkPersist TH.sqlSettings, TH.mkSave "schema"] [persistUpperCase|
Implementation
algorithmId AlgorithmId
name Text
prettyName Text Maybe
flags Text Maybe
type ImplType
timestamp UTCTime
UniqImpl algorithmId name
deriving Eq Show
|]
deriving instance Show (Unique Implementation)
instance PrettyFields (Entity Implementation) where
prettyFieldInfo = ("Id", idField ImplementationId) :|
[ ("Algorithm", namedIdField ImplementationAlgorithmId)
, ("Name", textField ImplementationName)
, ("Type", ImplementationType `fieldVia` prettyShow)
, ("Pretty Name", maybeTextField ImplementationPrettyName)
, ("Flags", maybeTextField ImplementationFlags)
, ("Timestamp", ImplementationTimestamp `fieldVia` prettyShow)
]
instance NamedEntity Implementation where
entityName = optionalPrettyName implementationPrettyName implementationName
migrations :: MonadSql m => Int64 -> Transaction m [EntityDef]
migrations = Utils.mkMigrationLookup
[ 0 .= V0.schema
, 26 .> schema $ do
Utils.executeSql [i|
ALTER TABLE "Implementation" ADD COLUMN "timestamp" TIMESTAMP
|]
Utils.executeSql [i|
REPLACE INTO "Implementation"
SELECT Implementation.id
, Implementation.algorithmId
, Implementation.name
, Implementation.prettyName
, Implementation.flags
, Implementation.type
, IFNULL(TimeStamp.minTime, strftime('%Y-%m-%dT%H:%M:%f', 'now'))
FROM Implementation
LEFT JOIN
( SELECT Run.implId, MIN(Run.timestamp) AS minTime
FROM Run
GROUP BY Run.implId
) AS TimeStamp
ON Implementation.id = TimeStamp.implId
|]
]
| merijn/GPU-benchmarks | benchmark-analysis/src/Schema/Implementation.hs | gpl-3.0 | 2,626 | 0 | 11 | 437 | 403 | 246 | 157 | 45 | 1 |
-- -----------------------------------------------------------------------
-- GOptions, utils for manipulating GNU options for haskell version of GNU core-utils.
module GOptions where
import Data.List
optionDelimiter :: Char
optionDelimiter = '-'
optionTerminator :: String
optionTerminator = "--"
paramaterDelimiter :: Char
paramaterDelimiter = '='
_optionParamaterDelimiter :: String
_optionParamaterDelimiter = ":"
data Option = Option {
helpText :: String
, flags :: Flags -- double pun?
, flagParam :: String
, value :: OptionValue
, paramaterEffect :: OptionEffect
}
instance Show Option where
show (Option _txt (Flags sFlg lFlg) _par val _eff) =
(show lFlg)++(show sFlg)++"\t ==> "++(show val)
data Options = Options [Option]
instance Show (Options) where
show (Options opts) = '\n':(concat (intersperse "\n" (map show opts)))
data OptionValue = BoolOpt Bool
| StringOpt String
| ListOpt [String]
| IntOpt Integer
| FloatOpt Float
| GnuSizeOpt GnuSize
| GnuRangeOpt GnuRange deriving Show
data OptionEffect = OptionEffect (Options -> String -> [String] -> (Options,[String]))
instance Show (OptionEffect) where
show (OptionEffect _effect) = "(\\x -> z)"
data Flags = Flags { short :: [String], long :: [String] } deriving Show
appendFlag :: Options -> String -> OptionValue -> Options
appendFlag opts str val = addFlag (appendValue jFlag val) (removeFlag (getFlagOrPrefix str) opts)
where jFlag = case (getFlag (getFlagOrPrefix str) opts) of
(Just f) -> f
Nothing -> (Option "<<ERROR>>" (Flags [] []) "" (BoolOpt False) (OptionEffect (\x _ u -> (x,u))))
setValue :: Option -> OptionValue -> Option
setValue opt val = opt{value = val}
getList :: Option -> [String]
getList opt = case value opt of
(ListOpt x) -> x
_ -> []
getBool :: Option -> Bool
getBool opt = case value opt of
(BoolOpt x) -> x
_ -> False
getString :: Option -> String
getString opt = case value opt of
(StringOpt x) -> x
_ -> ""
getInt :: Option -> Integer
getInt opt = case value opt of
(IntOpt x) -> x
_ -> 0
getFloat :: Option -> Float
getFloat opt = case value opt of
(FloatOpt x) -> x
_ -> 0.0
getSize :: Option -> GnuSize
getSize opt = case value opt of
(GnuSizeOpt x) -> x
_ -> GnuSize NoPrefix 0 NoUnits
getRange :: Option -> GnuRange
getRange opt = case value opt of
(GnuRangeOpt x) -> x
_ -> GnuRange 0 0
getTargets :: Options -> [String] --oh no, what have I done
getTargets opts = case (getFlag "--" opts) of
(Just targets) -> case value targets of
(ListOpt targetList) -> targetList
_ -> []
_ -> []
appendValue :: Option -> OptionValue -> Option
appendValue opt@(Option _ _ _ (ListOpt vals) _) (StringOpt val) = opt{value = ListOpt (vals++[val])}
appendValue opt _ = opt
addFlag :: Option -> Options -> Options
addFlag opt (Options opts) = Options (opt:opts)
removeFlag :: String -> Options -> Options
removeFlag str (Options opts) = Options (filter (\x -> not (isFlag (getFlagOrPrefix str) x)) opts)
getFlag :: String -> Options -> Maybe Option
getFlag str (Options opts) | length matchOpts == 0 = Nothing
| length matchOpts == 1 = Just (head matchOpts)
| otherwise = Nothing
where matchOpts = filter (\x -> (isFlag (str) x)) opts
isFlag :: String -> Option -> Bool
isFlag "" _ = False
isFlag str option = if elem (getFlagOrPrefix str) (long (flags option))
then True
else elem (getFlagOrPrefix str) (short (flags option))
getFlagOrPrefix :: String -> String
getFlagOrPrefix str | elem '=' str = ((takeWhile (/= '=') str))
| otherwise = str
-- FILES -----------------------------------------------------------------
parseOptionFileName :: [String] -> (String,[String])
parseOptionFileName (x1:x2:xs) | length x1 == 1 = (x2,xs)--single letter
| elem '=' x1 = (drop 1 (dropWhile (/= paramaterDelimiter) x1),(x2:xs)) -- -flag=<value>
| otherwise = (x2,xs)--long flag with space
parseOptionFileName x = ("",x)
-- -- RANGE --------------------------------------------------------------
data GnuRange = GnuRange { min::Integer, max::Integer } deriving Show
-- -- TIME ---------------------------------------------------------------
data GnuTime = GnuTime { units::Float, unitType::TimeSuffix }
instance Show GnuTime where
show (GnuTime unit suffix) = (show unit)++(show suffix)
instance Read GnuTime where
readsPrec _ (x) = do (num,xx) <- readsPrec 0 x :: [(Float,String)]
(suff,xxxx) <- readsPrec 0 xx :: [(TimeSuffix,String)]
[((GnuTime num suff),xxxx)]
data TimeSuffix = Seconds | Minutes | Hours | Days | NoTimeSuffix
instance Show TimeSuffix where
show Seconds = "s"
show Hours = "h"
show Minutes = "m"
show Days = "d"
show NoTimeSuffix = ""
instance Read TimeSuffix where
readsPrec _ ('s':xs) = [(Seconds,xs)]
readsPrec _ ('h':xs) = [(Hours,xs)]
readsPrec _ ('m':xs) = [(Minutes,xs)]
readsPrec _ ('d':xs) = [(Days,xs)]
readsPrec _ xs = [(NoTimeSuffix,xs)]
secondsInMinutes, secondsInHours, secondsInDays :: Float
secondsInMinutes = 60
secondsInHours = 60*secondsInMinutes
secondsInDays = 24*secondsInHours
timeToFloat :: GnuTime -> Float
timeToFloat (GnuTime unit Seconds) = unit
timeToFloat (GnuTime unit Minutes) = unit
timeToFloat (GnuTime unit Hours) = unit * secondsInHours
timeToFloat (GnuTime unit Days) = unit
timeToFloat (GnuTime unit NoTimeSuffix) = unit
-- -- SIZE ---------------------------------------------------------------
data GnuSize = GnuSize Prefix Integer Units
data Prefix = NoPrefix | Extend | Reduce | AtMost | AtLeast | RoundDown | RoundUp
data Units = Kilo UnitType
| Mega UnitType
| Giga UnitType
| Tera UnitType
| Peta UnitType
| Eta UnitType
| Zeta UnitType
| Yota UnitType
| NoUnits
data UnitType = I024 | Bytes -- 1024 | 1000
instance Show Prefix where
show NoPrefix = ""
show Extend = "+"
show Reduce = "-"
show AtMost = "<"
show AtLeast = ">"
show RoundDown = "/"
show RoundUp = "%"
instance Show UnitType where
show I024 = ""
show Bytes = "B"
instance Show Units where
show (Kilo x) = "K"++(show x)
show (Mega x) = "M"++(show x)
show (Giga x) = "G"++(show x)
show (Tera x) = "T"++(show x)
show (Peta x) = "P"++(show x)
show (Eta x) = "E"++(show x)
show (Zeta x) = "Z"++(show x)
show (Yota x) = "Y"++(show x)
show (NoUnits) = ""
instance Show GnuSize where
show (GnuSize pre num unit) = (show pre)++(show num)++(show unit)
instance Read Prefix where
readsPrec _ ('+':xs) = [(Extend,xs)]
readsPrec _ ('-':xs) = [(Reduce,xs)]
readsPrec _ ('<':xs) = [(AtMost,xs)]
readsPrec _ ('>':xs) = [(AtLeast,xs)]
readsPrec _ ('/':xs) = [(RoundDown,xs)]
readsPrec _ ('%':xs) = [(RoundUp,xs)]
readsPrec _ (xs) = [(NoPrefix,xs)]
instance Read Units where
readsPrec _ ("K") = [(Kilo I024,"")]
readsPrec _ ('K':u:xs) | u == 'B'= [(Kilo Bytes,"")]
| otherwise = [(Kilo I024,(u:xs))]
readsPrec _ ("M") = [(Mega I024,"")]
readsPrec _ ('M':u:xs) | u == 'B'= [(Mega Bytes,"")]
| otherwise = [(Mega I024,(u:xs))]
readsPrec _ ("G") = [(Giga I024,"")]
readsPrec _ ('G':u:xs) | u == 'B'= [(Giga Bytes,"")]
| otherwise = [(Giga I024,(u:xs))]
readsPrec _ ("T") = [(Tera I024,"")]
readsPrec _ ('T':u:xs) | u == 'B'= [(Tera Bytes,"")]
| otherwise = [(Tera I024,(u:xs))]
readsPrec _ ("P") = [(Peta I024,"")]
readsPrec _ ('P':u:xs) | u == 'B'= [(Peta Bytes,"")]
| otherwise = [(Peta I024,(u:xs))]
readsPrec _ ("E") = [(Eta I024,"")] --ezy
readsPrec _ ('E':u:xs) | u == 'B'= [(Eta Bytes,"")]
| otherwise = [(Eta I024,(u:xs))]
readsPrec _ ("Z") = [(Zeta I024,"")]
readsPrec _ ('Z':u:xs) | u == 'B'= [(Zeta Bytes,"")]
| otherwise = [(Zeta I024,(u:xs))]
readsPrec _ ("Y") = [(Yota I024,"")]
readsPrec _ ('Y':u:xs) | u == 'B'= [(Yota Bytes,"")]
| otherwise = [(Yota I024,(u:xs))]
readsPrec _ xs = [(NoUnits,xs)]
instance Read GnuSize where
readsPrec _ (x) = do (pre,xx) <- readsPrec 0 x :: [(Prefix,String)]
(num,xxx) <- readsPrec 0 xx :: [(Integer,String)]
(unit,xxxx) <- readsPrec 0 xxx :: [(Units,String)]
[((GnuSize pre num unit),xxxx)]
calcGnuSize :: GnuSize -> Integer
calcGnuSize size = case size of
(GnuSize _ n NoUnits) -> n
(GnuSize _ n (Kilo Bytes)) -> n*(10^(3::Integer))
(GnuSize _ n (Kilo I024)) -> n*(2^(10::Integer))
(GnuSize _ n (Mega Bytes)) -> n*(10^(6::Integer))
(GnuSize _ n (Mega I024)) -> n*(2^(20::Integer))
(GnuSize _ n (Giga Bytes)) -> n*(10^(9::Integer))
(GnuSize _ n (Giga I024)) -> n*(2^(30::Integer))
(GnuSize _ n (Tera Bytes)) -> n*(10^(12::Integer))
(GnuSize _ n (Tera I024)) -> n*(2^(40::Integer))
(GnuSize _ n (Peta Bytes)) -> n*(10^(15::Integer))
(GnuSize _ n (Peta I024)) -> n*(2^(50::Integer))
(GnuSize _ n (Eta Bytes)) -> n*(10^(18::Integer))
(GnuSize _ n (Eta I024)) -> n*(2^(60::Integer))
(GnuSize _ n (Zeta Bytes)) -> n*(10^(21::Integer))
(GnuSize _ n (Zeta I024)) -> n*(2^(70::Integer))
(GnuSize _ n (Yota Bytes)) -> n*(10^(24::Integer))
(GnuSize _ n (Yota I024)) -> n*(2^(80::Integer))
parseOptionSize :: [String] -> (GnuSize,[String])
parseOptionSize (x) = (size,(tail trimmedText))
where trimmedText = trimLeading x
parseResults = readsPrec 0 (head trimmedText)
(size,_) = head parseResults
trimLeading :: [String] -> [String]
trimLeading (x1:xs) | length x1 == 1 = ([head xs]++tail xs)
| elem '=' x1 = [drop 1 (dropWhile (/= paramaterDelimiter) x1)]++(xs)
| otherwise = xs
trimLeading [] = []
| PuZZleDucK/Hls | GOptions.hs | gpl-3.0 | 10,035 | 0 | 16 | 2,412 | 4,448 | 2,380 | 2,068 | 233 | 17 |
{-
teafree, a Haskell utility for tea addicts
Copyright (C) 2013 Fabien Dubosson <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE DeriveDataTypeable #-}
module Main where
import System.Console.CmdArgs
import System.Exit (exitSuccess, exitFailure)
import Teafree.Core.Config
import Teafree.Core.Environment
import Teafree.Core.Monad
import Teafree.Core.TeafreeError
import Teafree.Core.Version
import qualified Teafree.Command.Info as CI
import qualified Teafree.Command.List as CL
import qualified Teafree.Command.Prepare as CP
{- Teafree available modes -}
data TeafreeMode = List {what ::Β String}
| Info
| Prepare
deriving ( Data, Typeable, Show, Eq)
{- Mode to list items -}
list ::Β TeafreeMode
list = List {what = def &= opt "teas" &= typ "WHAT" &= argPos 0}
&= help "List items, where 'WHAT' is either 'teas' or 'families'"
{- Mode to get informations about a tea -}
info ::Β TeafreeMode
info = Info
&= help "Ask to chose a tea and give info about it."
{- Mode to prepare a tea -}
prepare ::Β TeafreeMode
prepare = Prepare
&= help "Ask to chose a tea and time it."
{- One mode to rule them all,
One mode to find them,
One mode to bring them all
and in the darkness bind them -}
teafree ::Β Mode (CmdArgs TeafreeMode)
teafree = cmdArgsMode $ modes [list, info, prepare]
&= program "teafree"
&= summary "A Haskell utility for tea addicts"
&= helpArg [explicit, name "help", name "h"]
&= versionArg [explicit, name "version", name "v", summary release]
{- Run a specific mode -}
runMode :: TeafreeMode -> Teafree ()
runMode (List w) = CL.printList w
runMode Info = CI.info
runMode Prepare = CP.prepare
{- Entry point of the application, in case of you don't already know -}
main ::Β IO ()
main = do
m <- cmdArgsRun teafree
e <- runTeafree getEnvironment defaultEnvironment >>= select
runTeafree (runMode m) e >>= end
select :: Either TeafreeError Environment -> IO Environment
select (Left e) = putStrLn (getErrorMsg e) >> exitFailure
select (Right e) = return e
{- End the program properly, by verifying error messages -}
end :: Either TeafreeError () -> IO ()
end (Left e) = putStrLn (getErrorMsg e) >> exitFailure
end (Right _) = exitSuccess
| StreakyCobra/teafree | src/teafree.hs | gpl-3.0 | 2,915 | 0 | 11 | 606 | 544 | 292 | 252 | 46 | 1 |
{-
Copyright (C) 2014 Aner Lucero
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module Main where
import System.Random
import Data.List
circuits :: Eq a => [a] -> [[a]]
circuits [] = []
circuits [x] = [[x]]
circuits (x:xs) = concatMap (\c -> map (c:) $ circuits (coset c (x:xs))) (x:xs)
where coset l = filter (/=l)
type Point = (Double, Double)
distance :: Point -> Point -> Double
distance (x,y) (xx,yy) = sqrt $ ((x-xx)**2) + ((y-yy)**2)
travelLenght :: [Point] -> (Double, [Double], [Point])
travelLenght [] = (0,[],[])
travelLenght (x:xs) = (sum distances, distances, x : xs)
where
distances = zipWith distance (x:xs) (xs ++ [x])
nRandomPoints :: Int -> IO [Point]
nRandomPoints 0 = return []
nRandomPoints n = do
a <- r
b <- r
m <- nRandomPoints (n-1)
return $ (a,b) : m
where r = randomIO :: IO Double
normalizePoints :: [Point] -> [Point]
normalizePoints ps =
map normalize ps
where
normalize (x,y) = (x/maxx, y/maxy)
maxx = maximum $ map fst ps
maxy = maximum $ map snd ps
main :: IO ()
main =
nRandomPoints 4 >>= \points ->
print $ minimumBy (\(x,_,_) (xx,_,_) -> compare x xx) $
map travelLenght $ circuits $ normalizePoints points
| argent0/haskell-learnroad | SolderingRobot/Main.hs | gpl-3.0 | 1,742 | 8 | 14 | 329 | 621 | 338 | 283 | 34 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Classroom.UserProFiles.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns a user profile. This method returns the following error codes: *
-- \`PERMISSION_DENIED\` if the requesting user is not permitted to access
-- this user profile, if no profile exists with the requested ID, or for
-- access errors.
--
-- /See:/ <https://developers.google.com/classroom/ Google Classroom API Reference> for @classroom.userProfiles.get@.
module Network.Google.Resource.Classroom.UserProFiles.Get
(
-- * REST Resource
UserProFilesGetResource
-- * Creating a Request
, userProFilesGet
, UserProFilesGet
-- * Request Lenses
, upfgXgafv
, upfgUploadProtocol
, upfgPp
, upfgAccessToken
, upfgUploadType
, upfgUserId
, upfgBearerToken
, upfgCallback
) where
import Network.Google.Classroom.Types
import Network.Google.Prelude
-- | A resource alias for @classroom.userProfiles.get@ method which the
-- 'UserProFilesGet' request conforms to.
type UserProFilesGetResource =
"v1" :>
"userProfiles" :>
Capture "userId" Text :>
QueryParam "$.xgafv" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] UserProFile
-- | Returns a user profile. This method returns the following error codes: *
-- \`PERMISSION_DENIED\` if the requesting user is not permitted to access
-- this user profile, if no profile exists with the requested ID, or for
-- access errors.
--
-- /See:/ 'userProFilesGet' smart constructor.
data UserProFilesGet = UserProFilesGet'
{ _upfgXgafv :: !(Maybe Text)
, _upfgUploadProtocol :: !(Maybe Text)
, _upfgPp :: !Bool
, _upfgAccessToken :: !(Maybe Text)
, _upfgUploadType :: !(Maybe Text)
, _upfgUserId :: !Text
, _upfgBearerToken :: !(Maybe Text)
, _upfgCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UserProFilesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'upfgXgafv'
--
-- * 'upfgUploadProtocol'
--
-- * 'upfgPp'
--
-- * 'upfgAccessToken'
--
-- * 'upfgUploadType'
--
-- * 'upfgUserId'
--
-- * 'upfgBearerToken'
--
-- * 'upfgCallback'
userProFilesGet
:: Text -- ^ 'upfgUserId'
-> UserProFilesGet
userProFilesGet pUpfgUserId_ =
UserProFilesGet'
{ _upfgXgafv = Nothing
, _upfgUploadProtocol = Nothing
, _upfgPp = True
, _upfgAccessToken = Nothing
, _upfgUploadType = Nothing
, _upfgUserId = pUpfgUserId_
, _upfgBearerToken = Nothing
, _upfgCallback = Nothing
}
-- | V1 error format.
upfgXgafv :: Lens' UserProFilesGet (Maybe Text)
upfgXgafv
= lens _upfgXgafv (\ s a -> s{_upfgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
upfgUploadProtocol :: Lens' UserProFilesGet (Maybe Text)
upfgUploadProtocol
= lens _upfgUploadProtocol
(\ s a -> s{_upfgUploadProtocol = a})
-- | Pretty-print response.
upfgPp :: Lens' UserProFilesGet Bool
upfgPp = lens _upfgPp (\ s a -> s{_upfgPp = a})
-- | OAuth access token.
upfgAccessToken :: Lens' UserProFilesGet (Maybe Text)
upfgAccessToken
= lens _upfgAccessToken
(\ s a -> s{_upfgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
upfgUploadType :: Lens' UserProFilesGet (Maybe Text)
upfgUploadType
= lens _upfgUploadType
(\ s a -> s{_upfgUploadType = a})
-- | Identifier of the profile to return. The identifier can be one of the
-- following: * the numeric identifier for the user * the email address of
-- the user * the string literal \`\"me\"\`, indicating the requesting user
upfgUserId :: Lens' UserProFilesGet Text
upfgUserId
= lens _upfgUserId (\ s a -> s{_upfgUserId = a})
-- | OAuth bearer token.
upfgBearerToken :: Lens' UserProFilesGet (Maybe Text)
upfgBearerToken
= lens _upfgBearerToken
(\ s a -> s{_upfgBearerToken = a})
-- | JSONP
upfgCallback :: Lens' UserProFilesGet (Maybe Text)
upfgCallback
= lens _upfgCallback (\ s a -> s{_upfgCallback = a})
instance GoogleRequest UserProFilesGet where
type Rs UserProFilesGet = UserProFile
type Scopes UserProFilesGet =
'["https://www.googleapis.com/auth/classroom.profile.emails",
"https://www.googleapis.com/auth/classroom.profile.photos",
"https://www.googleapis.com/auth/classroom.rosters",
"https://www.googleapis.com/auth/classroom.rosters.readonly"]
requestClient UserProFilesGet'{..}
= go _upfgUserId _upfgXgafv _upfgUploadProtocol
(Just _upfgPp)
_upfgAccessToken
_upfgUploadType
_upfgBearerToken
_upfgCallback
(Just AltJSON)
classroomService
where go
= buildClient
(Proxy :: Proxy UserProFilesGetResource)
mempty
| rueshyna/gogol | gogol-classroom/gen/Network/Google/Resource/Classroom/UserProFiles/Get.hs | mpl-2.0 | 6,003 | 0 | 18 | 1,449 | 869 | 508 | 361 | 125 | 1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleContexts, FlexibleInstances, GeneralizedNewtypeDeriving, MultiParamTypeClasses, RecursiveDo, TypeFamilies, OverloadedStrings, RecordWildCards,UndecidableInstances, PackageImports, TemplateHaskell #-}
module Graphics.Diagrams.Path where
import Graphics.Diagrams.Core
import Graphics.Diagrams.Point
import Data.Foldable
import Graphics.Typography.Geometry.Bezier
import Data.List (sort)
import Data.Maybe (listToMaybe)
import Prelude hiding (sum,mapM_,mapM,concatMap,maximum,minimum,Num(..),(/))
import qualified Data.Vector.Unboxed as V
import Algebra.Polynomials.Bernstein (restriction,Bernsteinp(..))
import Control.Lens (over, set, view)
import Control.Monad.Reader (local)
import Algebra.Classes
toBeziers :: FrozenPath -> [Curve]
toBeziers EmptyPath = []
toBeziers (Path start ss) | not (null ss) &&
isCycle (last ss) = toBeziers' start (init ss ++ [StraightTo start])
| otherwise = toBeziers' start ss
curveSegment :: FrozenPoint
-> FrozenPoint -> FrozenPoint -> FrozenPoint -> Curve
curveSegment (Point xa ya) (Point xb yb) (Point xc yc) (Point xd yd) = bezier3 xa ya xb yb xc yc xd yd
lineSegment :: Point' Double -> Point' Double -> Curve
lineSegment (Point xa ya) (Point xb yb) = line xa ya xb yb
-- | Convert a Path into a Curve
toBeziers' :: FrozenPoint -> [Frozen Segment] -> [Curve]
toBeziers' _ [] = []
toBeziers' start (StraightTo next:ss) = curveSegment start mid mid next : toBeziers' next ss
where mid = avg [start, next]
toBeziers' p (CurveTo c d q:ss) = curveSegment p c d q : toBeziers' q ss
-- | Convert a Curve into a Path
fromBeziers :: [Curve] -> FrozenPath
fromBeziers [] = EmptyPath
fromBeziers (Bezier cx cy t0 t1:bs) = case map toPt $ V.foldr (:) [] cxy of
[p,c,d,q] -> Path p (CurveTo c d q:rest)
[p,q] -> Path p (StraightTo q:rest)
where [cx',cy'] = map (\c -> coefs $ restriction c t0 t1) [cx,cy]
cxy = V.zip cx' cy'
toPt (x,y) = Point x y
rest = pathSegments (fromBeziers bs)
pathSegments :: Path' t -> [Segment t]
pathSegments EmptyPath = []
pathSegments (Path _ ss) = ss
isCycle :: Segment t -> Bool
isCycle Cycle = True
isCycle _ = False
-- | @clipOne c0 cs@ return the part of c0 from its start to the point where it
-- intersects any element of cs.
clipOne :: Curve -> [Curve] -> Maybe Curve
clipOne b cutter = fmap firstPart $ listToMaybe $ sort $ concatMap (inter b) cutter
where firstPart t = fst $ splitBezier b t
splitBezier (Bezier cx cy t0 t1) (u,v,_,_) = (Bezier cx cy t0 u, Bezier cx cy v t1)
-- | @cutAfter path area@ cuts the path after its first intersection with the @area@.
cutAfter', cutBefore' :: [Curve] -> [Curve] -> [Curve]
cutAfter' [] _cutter = []
cutAfter' (b:bs) cutter = case clipOne b cutter of
Nothing -> b:cutAfter' bs cutter
Just b' -> [b']
-- | Reverse a bezier curve
revBeziers :: [Curve] -> [Curve]
revBeziers = reverse . map rev
where rev (Bezier cx cy t0 t1) = (Bezier (revBernstein cx) (revBernstein cy) (1-t1) (1-t0))
revBernstein (Bernsteinp n c) = Bernsteinp n (V.reverse c)
cutBefore' pth area = revBeziers $ cutAfter' (revBeziers pth) area
onBeziers :: ([Curve] -> [Curve] -> [Curve])
-> FrozenPath -> FrozenPath -> FrozenPath
onBeziers op p' q' = fromBeziers $ op (toBeziers p') (toBeziers q')
cutAfter :: FrozenPath -> FrozenPath -> FrozenPath
cutAfter = onBeziers cutAfter'
cutBefore :: FrozenPath -> FrozenPath -> FrozenPath
cutBefore = onBeziers cutBefore'
-----------------
-- Paths
type Path = Path' Expr
polyline :: [Point] -> Path
polyline [] = EmptyPath
polyline (x:xs) = Path x (map StraightTo xs)
polygon :: [Point] -> Path
polygon [] = EmptyPath
polygon (x:xs) = Path x (map StraightTo xs ++ [Cycle])
-- | Circle approximated with 4 cubic bezier curves
circlePath :: Point -> Expr -> Path
circlePath center r =
Path (pt r zero)
[CurveTo (pt r k) (pt k r) (pt zero r),
CurveTo (pt (negate k) r) (pt (negate r) k) (pt (negate r) zero),
CurveTo (pt (negate r) (negate k)) (pt (negate k) (negate r)) (pt zero (negate r)),
CurveTo (pt k (negate r)) (pt r (negate k)) (pt r zero),
Cycle]
where k1 :: Double
k1 = fromInteger 4 * (sqrt (fromInteger 2) - (fromInteger 1)) / fromInteger 3
k = k1 *^ r
pt x y = center + (Point x y)
roundedRectanglePath :: Expr -> Point -> Point -> Path
roundedRectanglePath r (Point x1 y1) (Point x2 y2) =
Path (Point x1 (y1-r))
[
CurveTo (Point x1 (y1-k) ) (Point (x1+k) y1) (Point (x1+r) y1),
StraightTo (Point (x2-r) y1),
CurveTo (Point (x2-k) y1) (Point x2 (y1-k)) (Point (x2) (y1-r)),
StraightTo (Point (x2) (y2+r)),
CurveTo (Point x2 (y2+k)) (Point (x2-k) y2) (Point (x2-r) y2),
StraightTo (Point (x1+r) (y2)),
CurveTo (Point (x1+k) y2) (Point (x1) (y2+k)) (Point x1 (y2+r)),
Cycle
]
where k1 :: Double
k1 = fromInteger 4 * (sqrt (fromInteger 2) - (fromInteger 1)) / fromInteger 3
k0 = k1 *^ r
k = r - k0
path :: Monad m => Path -> Diagram lab m ()
path p = do
options <- view diaPathOptions
tracePath' <- view (diaBackend . tracePath)
freeze p (tracePath' options)
frozenPath' :: Monad m => FrozenPath -> Diagram lab m ()
frozenPath' p = do
options <- view diaPathOptions
tracePath' <- view (diaBackend . tracePath)
freeze [] $ \_ -> tracePath' options p
stroke :: Monad m => Color -> Diagram lab m a -> Diagram lab m a
stroke color = using (outline color)
draw :: Monad m => Diagram lab m a -> Diagram lab m a
draw = stroke "black"
noDraw :: Monad m => Diagram lab m a -> Diagram lab m a
noDraw = using (set drawColor Nothing . set fillColor Nothing)
noOutline :: PathOptions -> PathOptions
noOutline = set drawColor Nothing
outline :: Color -> PathOptions -> PathOptions
outline color = set drawColor (Just color)
fill :: Color -> PathOptions -> PathOptions
fill color = set fillColor (Just color)
zigzagDecoration :: PathOptions -> PathOptions
zigzagDecoration = set decoration (Decoration "zigzag")
using :: Monad m => (PathOptions -> PathOptions) -> Diagram lab m a -> Diagram lab m a
using f = local (over diaPathOptions f)
ultraThin, veryThin, thin, semiThick, thick, veryThick, ultraThick :: Constant
ultraThin = 0.1
veryThin = 0.2
thin = 0.4
semiThick = 0.6
thick = 0.8
veryThick = 1.2
ultraThick = 1.6
solid, dotted, denselyDotted, looselyDotted, dashed, denselyDashed,
looselyDashed, dashDotted, denselyDashdotted, looselyDashdotted :: PathOptions -> PathOptions
solid o@PathOptions{..} = o { _dashPattern = [] }
dotted o@PathOptions{..} = o { _dashPattern = [(_lineWidth,2)] }
denselyDotted o@PathOptions{..} = o { _dashPattern = [(_lineWidth, 1)] }
looselyDotted o@PathOptions{..} = o { _dashPattern = [(_lineWidth, 4)] }
dashed o@PathOptions{..} = o { _dashPattern = [(3, 3)] }
denselyDashed o@PathOptions{..} = o { _dashPattern = [(3, 2)] }
looselyDashed o@PathOptions{..} = o { _dashPattern = [(3, 6)] }
dashDotted o@PathOptions{..} = o { _dashPattern = [(3, 2), (_lineWidth, 2)] }
denselyDashdotted o@PathOptions{..} = o { _dashPattern = [(3, 1), (_lineWidth, 1)] }
looselyDashdotted o@PathOptions{..} = o { _dashPattern = [(3, 4), (_lineWidth, 4)] }
| jyp/lp-diagrams | Graphics/Diagrams/Path.hs | agpl-3.0 | 7,292 | 0 | 13 | 1,492 | 3,064 | 1,629 | 1,435 | 146 | 2 |
-- construct hbal trees with n nodes (rather than d depth as in Pr59)
module Pr60 where
import Tree (Tree(Branch, Empty), nnodes)
import Pr59 (is_hbal, all_trees_and_smaller)
hbal :: a -> Int -> [Tree a]
hbal x n = filter is_hbal [t | t <- all_trees_and_smaller n x, nnodes t == n]
| ekalosak/haskell-practice | Pr60.hs | lgpl-3.0 | 284 | 0 | 9 | 53 | 94 | 53 | 41 | 5 | 1 |
import Diff
import System.Environment
main = do
args <- getArgs
a <- readFile (head args)
b <- readFile (head $ tail args)
let zz = diff (lines a) (lines b)
putStr $ concatMap show zz
| sourcewave/pg-schema-diff | TestDiff.hs | unlicense | 197 | 0 | 12 | 48 | 95 | 44 | 51 | 8 | 1 |
import System.Exit
main :: IO ()
main = exitWith ExitSuccess | iustin/perf-null | haskell/null.hs | apache-2.0 | 61 | 0 | 6 | 10 | 24 | 12 | 12 | 3 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
-- |
--
-- OAuth2 plugin for http://upcase.com
--
-- * Authenticates against upcase
-- * Uses upcase user id as credentials identifier
-- * Returns first_name, last_name, and email as extras
--
module Yesod.Auth.OAuth2.Upcase
( oauth2Upcase
, module Yesod.Auth.OAuth2
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$>), (<*>))
#endif
import Control.Monad (mzero)
import Data.Aeson
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8)
import Yesod.Auth
import Yesod.Auth.OAuth2
import qualified Data.Text as T
data UpcaseUser = UpcaseUser
{ upcaseUserId :: Int
, upcaseUserFirstName :: Text
, upcaseUserLastName :: Text
, upcaseUserEmail :: Text
}
instance FromJSON UpcaseUser where
parseJSON (Object o) = UpcaseUser
<$> o .: "id"
<*> o .: "first_name"
<*> o .: "last_name"
<*> o .: "email"
parseJSON _ = mzero
data UpcaseResponse = UpcaseResponse UpcaseUser
instance FromJSON UpcaseResponse where
parseJSON (Object o) = UpcaseResponse
<$> o .: "user"
parseJSON _ = mzero
oauth2Upcase :: YesodAuth m
=> Text -- ^ Client ID
-> Text -- ^ Client Secret
-> AuthPlugin m
oauth2Upcase clientId clientSecret = authOAuth2 "upcase"
OAuth2
{ oauthClientId = encodeUtf8 clientId
, oauthClientSecret = encodeUtf8 clientSecret
, oauthOAuthorizeEndpoint = "http://upcase.com/oauth/authorize"
, oauthAccessTokenEndpoint = "http://upcase.com/oauth/token"
, oauthCallback = Nothing
}
$ fromProfileURL "upcase" "http://upcase.com/api/v1/me.json"
$ \user -> Creds
{ credsPlugin = "upcase"
, credsIdent = T.pack $ show $ upcaseUserId user
, credsExtra =
[ ("first_name", upcaseUserFirstName user)
, ("last_name", upcaseUserLastName user)
, ("email", upcaseUserEmail user)
]
}
| jasonzoladz/yesod-auth-oauth2 | Yesod/Auth/OAuth2/Upcase.hs | bsd-2-clause | 2,018 | 0 | 13 | 514 | 411 | 241 | 170 | 49 | 1 |
import NLP.General
import NLP.Crubadan
import NLP.Freq
import NLP.Tools
import System.IO (hPutStrLn, stderr)
import Options.Applicative
import Control.Exception (evaluate)
import Control.Monad
import qualified Data.Text as T
import qualified Data.List as L
import qualified Data.Set as S
import qualified Data.Map.Strict as M
data Opts = Opts String Int Int Int
pNumPer = option auto (long "num-per-read"
<> short 'p'
<> value 20
<> metavar "NUMBER"
<> showDefault
<> help h)
where h = "The number of frequency-lists to read from the \
\database at a single time -- A larger value will \
\make the program run faster, but at a higher \
\memory cost."
pNumRes = option auto (long "num-results"
<> short 'n'
<> value 50
<> metavar "NUMBER"
<> showDefault
<> help h)
where h = "Maximum number of top results to present"
pDB = strOption (long "database"
<> short 'd'
<> value "nlp.db"
<> metavar "FILENAME"
<> showDefault
<> help h )
where h = "Database to use for identification"
pTrigrams = option auto (long "num-trigrams"
<> short 't'
<> value 50
<> metavar "NUMBER"
<> showDefault
<> help h)
where h = "The number of trigrams to pull from each checked \
\language for the comparison (starting with the \
\most frequent)"
desc = fullDesc
<> progDesc "Identify text on standard input as \
\a particular language, using language \
\profiles stored in a database \
\(see builddb)"
<> header "identify the language of a text"
parser = Opts <$> pDB <*> pNumRes <*> pNumPer <*> pTrigrams
execOpts = execParser (info (helper <*> parser) desc)
main = execOpts >>= identify
identify (Opts name numResults numPer numTrigrams) =
do db <- connectDB name
target <- getContents
candidates <- (fmap fst . M.toList)
<$> fetchAllLengths db "dataAll"
let trFreq :: FreqList TriGram
trFreq = features target
st <- fetchSt db numPer
let crawl' = crawl st numResults trFreq "dataAll" numTrigrams
scores <- foldM crawl' [] (divie' " " numPer candidates)
putStrLn "\n:: Top Matches ::"
(sequence_ . fmap print) scores
crawl st numResults frq set numTrigrams scores langs =
do (sequence_
. fmap (hPutStrLn stderr)
. fmap (\l -> "Evaluating " ++ l ++ "...")) langs
ops <- fetch st set numTrigrams langs
evaluate (foldl (check numResults frq) scores (M.toList ops))
check :: Int
-> FreqList TriGram
-> [(String, Double)]
-> (String, FreqList TriGram)
-> [(String, Double)]
check numResults freq scores (lang,op) =
let score = (lang, cosine freq op)
in (take numResults . scoreSort) (score : scores)
scoreSort :: [(String, Double)] -> [(String, Double)]
scoreSort = L.sortBy (\(a,x) (b,y) -> compare y x)
rever (a,b) = (b,a)
trs :: String -> FreqList TriGram
trs = read
filtNans :: [(String, Double)] -> [(String, Double)]
filtNans = filter (\(_,d) -> not (isNaN d))
divie :: Int -> [a] -> [[a]]
divie 0 _ = [[]] -- possibly surprising?
divie _ [] = []
divie n xs = let (as,bs) = splitAt n xs
in as : divie n bs
divie' :: a -> Int -> [a] -> [[a]]
divie' _ 0 _ = [[]]
divie' _ _ [] = []
divie' p n xs = let (as,bs) = splitAt n xs
l = length as
in (as ++ replicate (n - l) p) : divie' p n bs
| RoboNickBot/nlp-tools | src/Wrident.hs | bsd-2-clause | 3,871 | 0 | 14 | 1,344 | 1,168 | 604 | 564 | 91 | 1 |
{- |
Module : Codec.Goat
Description : Top-level module file (intended for import)
Copyright : (c) Daniel Lovasko, 2016-2017
License : BSD3
Maintainer : Daniel Lovasko <[email protected]>
Stability : stable
Portability : portable
Goat is a time series compression implementation in pure Haskell. It is
heavily based on Facebook's Gorilla algorithm [1].
[1] http://www.vldb.org/pvldb/vol8/p1816-teller.pdf
-}
module Codec.Goat
( Story(..)
, TimeFrame(..)
, ValueFrame(..)
, storyAppend
, storyDump
, storyNew
, storyQuery
, timeDecode
, timeEncode
, valueDecode
, valueEncode
) where
import Codec.Goat.Story
import Codec.Goat.TimeFrame
import Codec.Goat.ValueFrame
| lovasko/goat | src/Codec/Goat.hs | bsd-2-clause | 690 | 0 | 5 | 106 | 72 | 49 | 23 | 15 | 0 |
module Data.IORef.RunOnce (runOnce) where
import Control.Monad.IO.Unlift
import Data.IORef
runOnce :: (MonadUnliftIO m, MonadIO n) => m a -> m (n a)
runOnce f = withRunIO $ \runIO -> do
ref <- newIORef Nothing
return $ liftIO $ do
mval <- readIORef ref
case mval of
Just val -> return val
Nothing -> do
val <- runIO f
writeIORef ref (Just val)
return val
| martin-kolinek/stack | src/Data/IORef/RunOnce.hs | bsd-3-clause | 453 | 0 | 19 | 161 | 163 | 79 | 84 | 14 | 2 |
import System.Environment (getArgs)
cardinal :: [Int] -> String
cardinal [x1, y1, x2, y2] | x1 == x2 = if y1 == y2 then "here" else if y1 < y2 then "N" else "S"
| y1 == y2 = if x1 < x2 then "E" else "W"
| x1 < x2 = if y1 < y2 then "NE" else "SE"
| otherwise = if y1 < y2 then "NW" else "SW"
main :: IO ()
main = do
[inpFile] <- getArgs
input <- readFile inpFile
putStr . unlines . map (cardinal . map read . words) $ lines input
| nikai3d/ce-challenges | easy/compare_points.hs | bsd-3-clause | 541 | 0 | 13 | 210 | 218 | 113 | 105 | 11 | 6 |
module Numeric.MaxEnt.Internal (
--module Numeric.MaxEnt.ConjugateGradient,
module Numeric.MaxEnt.General,
module Numeric.MaxEnt.Moment,
module Numeric.MaxEnt.Linear
) where
--import Numeric.MaxEnt.ConjugateGradient
import Numeric.MaxEnt.General
import Numeric.MaxEnt.Moment
import Numeric.MaxEnt.Linear
| jfischoff/maxent | src/Numeric/MaxEnt/Internal.hs | bsd-3-clause | 342 | 0 | 5 | 59 | 50 | 35 | 15 | 7 | 0 |
--------------------------------------------------------------------
-- |
-- Module : Data.MessagePack.Feed
-- Copyright : (c) Hideyuki Tanaka, 2009
-- License : BSD3
--
-- Maintainer: [email protected]
-- Stability : experimental
-- Portability: portable
--
-- Feeders for Stream Deserializers
--
--------------------------------------------------------------------
module Data.MessagePack.Feed(
-- * Feeder type
Feeder,
-- * Feeders
feederFromHandle,
feederFromFile,
feederFromString,
) where
import Control.Monad
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Data.IORef
import System.IO
-- | Feeder returns Just ByteString when bytes remains, otherwise Nothing.
type Feeder = IO (Maybe ByteString)
-- | Feeder from Handle
feederFromHandle :: Handle -> IO Feeder
feederFromHandle h = return $ do
bs <- BS.hGet h bufSize
if BS.length bs > 0
then return $ Just bs
else do
hClose h
return Nothing
where
bufSize = 4096
-- | Feeder from File
feederFromFile :: FilePath -> IO Feeder
feederFromFile path =
openFile path ReadMode >>= feederFromHandle
-- | Feeder from ByteString
feederFromString :: ByteString -> IO Feeder
feederFromString bs = do
r <- newIORef (Just bs)
return $ f r
where
f r = do
mb <- readIORef r
writeIORef r Nothing
return mb
| tanakh/hsmsgpack | src/Data/MessagePack/Feed.hs | bsd-3-clause | 1,370 | 0 | 11 | 265 | 282 | 151 | 131 | 31 | 2 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.NV.VertexProgram4
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- All tokens from the NV_vertex_program4 extension, see
-- <http://www.opengl.org/registry/specs/NV/vertex_program4.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.NV.VertexProgram4 (
-- * Functions
glVertexAttribI1i,
glVertexAttribI2i,
glVertexAttribI3i,
glVertexAttribI4i,
glVertexAttribI1ui,
glVertexAttribI2ui,
glVertexAttribI3ui,
glVertexAttribI4ui,
glVertexAttribI1iv,
glVertexAttribI2iv,
glVertexAttribI3iv,
glVertexAttribI4iv,
glVertexAttribI1uiv,
glVertexAttribI2uiv,
glVertexAttribI3uiv,
glVertexAttribI4uiv,
glVertexAttribI4bv,
glVertexAttribI4sv,
glVertexAttribI4ubv,
glVertexAttribI4usv,
glVertexAttribIPointer,
glGetVertexAttribIiv,
glGetVertexAttribIuiv,
-- * Tokens
gl_VERTEX_ATTRIB_ARRAY_INTEGER
) where
import Graphics.Rendering.OpenGL.Raw.Core32
| mfpi/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/NV/VertexProgram4.hs | bsd-3-clause | 1,247 | 0 | 4 | 183 | 108 | 79 | 29 | 26 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
module Protocol.ROC.PointTypes.PointType44 where
import Data.Binary.Get (getByteString,getWord32le,Get)
import Data.ByteString (ByteString)
import Data.Int (Int16)
import Data.Word (Word32)
import Prelude (($),
return,
Eq,
Float,
Read,
Show)
import Protocol.ROC.Float (getIeeeFloat32)
import Protocol.ROC.Utils (getInt16)
data PointType44 = PointType44 {
pointType44PointTag :: !PointType44PointTag
,pointType44PwrStatus :: !PointType44PwrStatus
,pointType44PwrMode :: !PointType44PwrMode
,pointType44ValidRX :: !PointType44ValidRX
,pointType44StartTime1 :: !PointType44StartTime1
,pointType44StartTime2 :: !PointType44StartTime2
,pointType44StartTime3 :: !PointType44StartTime3
,pointType44OnTime1 :: !PointType44OnTime1
,pointType44OnTime2 :: !PointType44OnTime2
,pointType44OnTime3 :: !PointType44OnTime3
,pointType44OffTime1 :: !PointType44OffTime1
,pointType44OffTime2 :: !PointType44OffTime2
,pointType44OffTime3 :: !PointType44OffTime3
,pointType44ActiveTimeZone :: !PointType44ActiveTimeZone
,pointType44HoldTime :: !PointType44HoldTime
,pointType44PwrTime :: !PointType44PwrTime
,pointType44DONum :: !PointType44DONum
,pointType44LowBattery :: !PointType44LowBattery
,pointType44OnCounter :: !PointType44OnCounter
,pointType44OffCounter :: !PointType44OffCounter
} deriving (Read,Eq, Show)
type PointType44PointTag = ByteString
type PointType44PwrStatus = Int16
type PointType44PwrMode = Int16
type PointType44ValidRX = Int16
type PointType44StartTime1 = Int16
type PointType44StartTime2 = Int16
type PointType44StartTime3 = Int16
type PointType44OnTime1 = Int16
type PointType44OnTime2 = Int16
type PointType44OnTime3 = Int16
type PointType44OffTime1 = Int16
type PointType44OffTime2 = Int16
type PointType44OffTime3 = Int16
type PointType44ActiveTimeZone = Int16
type PointType44HoldTime = Int16
type PointType44PwrTime = Int16
type PointType44DONum = Int16
type PointType44LowBattery = Float
type PointType44OnCounter = Word32
type PointType44OffCounter = Word32
pointType44Parser :: Get PointType44
pointType44Parser = do
pointTag <- getByteString 10
pwrStatus <- getInt16
pwrMode <- getInt16
validRX <- getInt16
startTime1 <- getInt16
startTime2 <- getInt16
startTime3 <- getInt16
onTime1 <- getInt16
onTime2 <- getInt16
onTime3 <- getInt16
offTime1 <- getInt16
offTime2 <- getInt16
offTime3 <- getInt16
activeTimeZone <- getInt16
holdTime <- getInt16
pwrTime <- getInt16
dONum <- getInt16
lowBattery <- getIeeeFloat32
onCounter <- getWord32le
offCounter <- getWord32le
return $ PointType44 pointTag pwrStatus pwrMode validRX startTime1 startTime2 startTime3 onTime1 onTime2 onTime3 offTime1 offTime2 offTime3 activeTimeZone holdTime pwrTime
dONum lowBattery onCounter offCounter
| plow-technologies/roc-translator | src/Protocol/ROC/PointTypes/PointType44.hs | bsd-3-clause | 5,042 | 0 | 9 | 2,586 | 591 | 331 | 260 | 120 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances,
PatternGuards #-}
module Idris.Core.Evaluate(normalise, normaliseTrace, normaliseC, normaliseAll,
rt_simplify, simplify, specialise, hnf, convEq, convEq',
Def(..), CaseInfo(..), CaseDefs(..),
Accessibility(..), Totality(..), PReason(..), MetaInformation(..),
Context, initContext, ctxtAlist, uconstraints, next_tvar,
addToCtxt, setAccess, setTotal, setMetaInformation, addCtxtDef, addTyDecl,
addDatatype, addCasedef, simplifyCasedef, addOperator,
lookupNames, lookupTy, lookupP, lookupDef, lookupNameDef, lookupDefExact, lookupDefAcc, lookupVal,
mapDefCtxt,
lookupTotal, lookupNameTotal, lookupMetaInformation, lookupTyEnv, isDConName, isTConName, isConName, isFnName,
Value(..), Quote(..), initEval, uniqueNameCtxt, uniqueBindersCtxt, definitions) where
import Debug.Trace
import Control.Applicative hiding (Const)
import Control.Monad.State -- not Strict!
import qualified Data.Binary as B
import Data.Binary hiding (get, put)
import Idris.Core.TT
import Idris.Core.CaseTree
data EvalState = ES { limited :: [(Name, Int)],
nexthole :: Int }
deriving Show
type Eval a = State EvalState a
data EvalOpt = Spec
| HNF
| Simplify
| AtREPL
| RunTT
deriving (Show, Eq)
initEval = ES [] 0
-- VALUES (as HOAS) ---------------------------------------------------------
-- | A HOAS representation of values
data Value = VP NameType Name Value
| VV Int
-- True for Bool indicates safe to reduce
| VBind Bool Name (Binder Value) (Value -> Eval Value)
-- For frozen let bindings when simplifying
| VBLet Int Name Value Value Value
| VApp Value Value
| VType UExp
| VErased
| VImpossible
| VConstant Const
| VProj Value Int
-- | VLazy Env [Value] Term
| VTmp Int
instance Show Value where
show x = show $ evalState (quote 100 x) initEval
instance Show (a -> b) where
show x = "<<fn>>"
-- THE EVALUATOR ------------------------------------------------------------
-- The environment is assumed to be "locally named" - i.e., not de Bruijn
-- indexed.
-- i.e. it's an intermediate environment that we have while type checking or
-- while building a proof.
-- | Normalise fully type checked terms (so, assume all names/let bindings resolved)
normaliseC :: Context -> Env -> TT Name -> TT Name
normaliseC ctxt env t
= evalState (do val <- eval False ctxt [] env t []
quote 0 val) initEval
normaliseAll :: Context -> Env -> TT Name -> TT Name
normaliseAll ctxt env t
= evalState (do val <- eval False ctxt [] env t [AtREPL]
quote 0 val) initEval
normalise :: Context -> Env -> TT Name -> TT Name
normalise = normaliseTrace False
normaliseTrace :: Bool -> Context -> Env -> TT Name -> TT Name
normaliseTrace tr ctxt env t
= evalState (do val <- eval tr ctxt [] (map finalEntry env) (finalise t) []
quote 0 val) initEval
specialise :: Context -> Env -> [(Name, Int)] -> TT Name -> TT Name
specialise ctxt env limits t
= evalState (do val <- eval False ctxt []
(map finalEntry env) (finalise t)
[Spec]
quote 0 val) (initEval { limited = limits })
-- | Like normalise, but we only reduce functions that are marked as okay to
-- inline (and probably shouldn't reduce lets?)
-- 20130908: now only used to reduce for totality checking. Inlining should
-- be done elsewhere.
simplify :: Context -> Env -> TT Name -> TT Name
simplify ctxt env t
= evalState (do val <- eval False ctxt [(sUN "lazy", 0),
(sUN "force", 0),
(sUN "Force", 0),
(sUN "assert_smaller", 0),
(sUN "assert_total", 0),
(sUN "par", 0),
(sUN "prim__syntactic_eq", 0),
(sUN "fork", 0)]
(map finalEntry env) (finalise t)
[Simplify]
quote 0 val) initEval
-- | Simplify for run-time (i.e. basic inlining)
rt_simplify :: Context -> Env -> TT Name -> TT Name
rt_simplify ctxt env t
= evalState (do val <- eval False ctxt [(sUN "lazy", 0),
(sUN "force", 0),
(sUN "Force", 0),
(sUN "par", 0),
(sUN "prim__syntactic_eq", 0),
(sUN "prim_fork", 0)]
(map finalEntry env) (finalise t)
[RunTT]
quote 0 val) initEval
-- | Reduce a term to head normal form
hnf :: Context -> Env -> TT Name -> TT Name
hnf ctxt env t
= evalState (do val <- eval False ctxt []
(map finalEntry env)
(finalise t) [HNF]
quote 0 val) initEval
-- unbindEnv env (quote 0 (eval ctxt (bindEnv env t)))
finalEntry :: (Name, Binder (TT Name)) -> (Name, Binder (TT Name))
finalEntry (n, b) = (n, fmap finalise b)
bindEnv :: EnvTT n -> TT n -> TT n
bindEnv [] tm = tm
bindEnv ((n, Let t v):bs) tm = Bind n (NLet t v) (bindEnv bs tm)
bindEnv ((n, b):bs) tm = Bind n b (bindEnv bs tm)
unbindEnv :: EnvTT n -> TT n -> TT n
unbindEnv [] tm = tm
unbindEnv (_:bs) (Bind n b sc) = unbindEnv bs sc
usable :: Bool -- specialising
-> Name -> [(Name, Int)] -> Eval (Bool, [(Name, Int)])
-- usable _ _ ns@((MN 0 "STOP", _) : _) = return (False, ns)
usable False n [] = return (True, [])
usable True n ns
= do ES ls num <- get
case lookup n ls of
Just 0 -> return (False, ns)
Just i -> return (True, ns)
_ -> return (False, ns)
usable False n ns
= case lookup n ns of
Just 0 -> return (False, ns)
Just i -> return $ (True, (n, abs (i-1)) : filter (\ (n', _) -> n/=n') ns)
_ -> return $ (True, (n, 100) : filter (\ (n', _) -> n/=n') ns)
deduct :: Name -> Eval ()
deduct n = do ES ls num <- get
case lookup n ls of
Just i -> do put $ ES ((n, (i-1)) :
filter (\ (n', _) -> n/=n') ls) num
_ -> return ()
-- | Evaluate in a context of locally named things (i.e. not de Bruijn indexed,
-- such as we might have during construction of a proof)
-- The (Name, Int) pair in the arguments is the maximum depth of unfolding of
-- a name. The corresponding pair in the state is the maximum number of
-- unfoldings overall.
eval :: Bool -> Context -> [(Name, Int)] -> Env -> TT Name ->
[EvalOpt] -> Eval Value
eval traceon ctxt ntimes genv tm opts = ev ntimes [] True [] tm where
spec = Spec `elem` opts
simpl = Simplify `elem` opts
runtime = RunTT `elem` opts
atRepl = AtREPL `elem` opts
hnf = HNF `elem` opts
-- returns 'True' if the function should block
-- normal evaluation should return false
blockSimplify (CaseInfo inl dict) n stk
| RunTT `elem` opts
= not (inl || dict) || elem n stk
| Simplify `elem` opts
= (not (inl || dict) || elem n stk)
|| (n == sUN "prim__syntactic_eq")
| otherwise = False
getCases cd | simpl = cases_totcheck cd
| runtime = cases_runtime cd
| otherwise = cases_compiletime cd
ev ntimes stk top env (P _ n ty)
| Just (Let t v) <- lookup n genv = ev ntimes stk top env v
ev ntimes_in stk top env (P Ref n ty)
| not top && hnf = liftM (VP Ref n) (ev ntimes stk top env ty)
| otherwise
= do (u, ntimes) <- usable spec n ntimes_in
if u then
do let val = lookupDefAcc n (spec || atRepl) ctxt
case val of
[(Function _ tm, _)] | sUN "assert_total" `elem` stk ->
ev ntimes (n:stk) True env tm
[(Function _ tm, Public)] ->
ev ntimes (n:stk) True env tm
[(Function _ tm, Hidden)] ->
ev ntimes (n:stk) True env tm
[(TyDecl nt ty, _)] -> do vty <- ev ntimes stk True env ty
return $ VP nt n vty
[(CaseOp ci _ _ _ _ cd, acc)]
| (acc /= Frozen || sUN "assert_total" `elem` stk) &&
null (fst (cases_totcheck cd)) -> -- unoptimised version
let (ns, tree) = getCases cd in
if blockSimplify ci n stk
then liftM (VP Ref n) (ev ntimes stk top env ty)
else -- traceWhen runtime (show (n, ns, tree)) $
do c <- evCase ntimes n (n:stk) top env ns [] tree
case c of
(Nothing, _) -> liftM (VP Ref n) (ev ntimes stk top env ty)
(Just v, _) -> return v
_ -> liftM (VP Ref n) (ev ntimes stk top env ty)
else liftM (VP Ref n) (ev ntimes stk top env ty)
ev ntimes stk top env (P nt n ty)
= liftM (VP nt n) (ev ntimes stk top env ty)
ev ntimes stk top env (V i)
| i < length env && i >= 0 = return $ snd (env !! i)
| otherwise = return $ VV i
ev ntimes stk top env (Bind n (Let t v) sc)
= do v' <- ev ntimes stk top env v --(finalise v)
sc' <- ev ntimes stk top ((n, v') : env) sc
wknV (-1) sc'
-- | otherwise
-- = do t' <- ev ntimes stk top env t
-- v' <- ev ntimes stk top env v --(finalise v)
-- -- use Tmp as a placeholder, then make it a variable reference
-- -- again when evaluation finished
-- hs <- get
-- let vd = nexthole hs
-- put (hs { nexthole = vd + 1 })
-- sc' <- ev ntimes stk top (VP Bound (MN vd "vlet") VErased : env) sc
-- return $ VBLet vd n t' v' sc'
ev ntimes stk top env (Bind n (NLet t v) sc)
= do t' <- ev ntimes stk top env (finalise t)
v' <- ev ntimes stk top env (finalise v)
sc' <- ev ntimes stk top ((n, v') : env) sc
return $ VBind True n (Let t' v') (\x -> return sc')
ev ntimes stk top env (Bind n b sc)
= do b' <- vbind env b
let n' = uniqueName n (map fst genv ++ map fst env)
return $ VBind True -- (vinstances 0 sc < 2)
n' b' (\x -> ev ntimes stk False ((n', x):env) sc)
where vbind env t
= fmapMB (\tm -> ev ntimes stk top env (finalise tm)) t
-- block reduction immediately under codata (and not forced)
ev ntimes stk top env
(App (App (App d@(P _ (UN dly) _) l@(P _ (UN lco) _)) t) arg)
| dly == txt "Delay" && lco == txt "LazyCodata" && not simpl
= do let (f, _) = unApply arg
let ntimes' = case f of
P _ fn _ -> (fn, 0) : ntimes
_ -> ntimes
d' <- ev ntimes' stk False env d
l' <- ev ntimes' stk False env l
t' <- ev ntimes' stk False env t
arg' <- ev ntimes' stk False env arg
evApply ntimes' stk top env [l',t',arg'] d'
-- Treat "assert_total" specially, as long as it's defined!
ev ntimes stk top env (App (App (P _ n@(UN at) _) _) arg)
| [(CaseOp _ _ _ _ _ _, _)] <- lookupDefAcc n (spec || atRepl) ctxt,
at == txt "assert_total" && not simpl
= ev ntimes (n : stk) top env arg
ev ntimes stk top env (App f a)
= do f' <- ev ntimes stk False env f
a' <- ev ntimes stk False env a
evApply ntimes stk top env [a'] f'
ev ntimes stk top env (Proj t i)
= do -- evaluate dictionaries if it means the projection works
t' <- ev ntimes stk top env t
-- tfull' <- reapply ntimes stk top env t' []
return (doProj t' (getValArgs t'))
where doProj t' (VP (DCon _ _) _ _, args)
| i >= 0 && i < length args = args!!i
doProj t' _ = VProj t' i
ev ntimes stk top env (Constant c) = return $ VConstant c
ev ntimes stk top env Erased = return VErased
ev ntimes stk top env Impossible = return VImpossible
ev ntimes stk top env (TType i) = return $ VType i
evApply ntimes stk top env args (VApp f a)
= evApply ntimes stk top env (a:args) f
evApply ntimes stk top env args f
= apply ntimes stk top env f args
reapply ntimes stk top env f@(VP Ref n ty) args
= let val = lookupDefAcc n (spec || atRepl) ctxt in
case val of
[(CaseOp ci _ _ _ _ cd, acc)] ->
let (ns, tree) = getCases cd in
do c <- evCase ntimes n (n:stk) top env ns args tree
case c of
(Nothing, _) -> return $ unload env (VP Ref n ty) args
(Just v, rest) -> evApply ntimes stk top env rest v
_ -> case args of
(a : as) -> return $ unload env f (a : as)
[] -> return f
reapply ntimes stk top env (VApp f a) args
= reapply ntimes stk top env f (a : args)
reapply ntimes stk top env v args = return v
apply ntimes stk top env (VBind True n (Lam t) sc) (a:as)
= do a' <- sc a
app <- apply ntimes stk top env a' as
wknV 1 app
apply ntimes_in stk top env f@(VP Ref n ty) args
| not top && hnf = case args of
[] -> return f
_ -> return $ unload env f args
| otherwise
= do (u, ntimes) <- usable spec n ntimes_in
if u then
do let val = lookupDefAcc n (spec || atRepl) ctxt
case val of
[(CaseOp ci _ _ _ _ cd, acc)]
| acc /= Frozen || sUN "assert_total" `elem` stk ->
-- unoptimised version
let (ns, tree) = getCases cd in
if blockSimplify ci n stk
then return $ unload env (VP Ref n ty) args
else -- traceWhen runtime (show (n, ns, tree)) $
do c <- evCase ntimes n (n:stk) top env ns args tree
case c of
(Nothing, _) -> return $ unload env (VP Ref n ty) args
(Just v, rest) -> evApply ntimes stk top env rest v
[(Operator _ i op, _)] ->
if (i <= length args)
then case op (take i args) of
Nothing -> return $ unload env (VP Ref n ty) args
Just v -> evApply ntimes stk top env (drop i args) v
else return $ unload env (VP Ref n ty) args
_ -> case args of
[] -> return f
_ -> return $ unload env f args
else case args of
(a : as) -> return $ unload env f (a:as)
[] -> return f
apply ntimes stk top env f (a:as) = return $ unload env f (a:as)
apply ntimes stk top env f [] = return f
-- specApply stk env f@(VP Ref n ty) args
-- = case lookupCtxt n statics of
-- [as] -> if or as
-- then trace (show (n, map fst (filter (\ (_, s) -> s) (zip args as)))) $
-- return $ unload env f args
-- else return $ unload env f args
-- _ -> return $ unload env f args
-- specApply stk env f args = return $ unload env f args
unload :: [(Name, Value)] -> Value -> [Value] -> Value
unload env f [] = f
unload env f (a:as) = unload env (VApp f a) as
evCase ntimes n stk top env ns args tree
| length ns <= length args
= do let args' = take (length ns) args
let rest = drop (length ns) args
when spec $ deduct n -- successful, so deduct usages
t <- evTree ntimes stk top env (zip ns args') tree
-- (zipWith (\n , t) -> (n, t)) ns args') tree
return (t, rest)
| otherwise = return (Nothing, args)
evTree :: [(Name, Int)] -> [Name] -> Bool ->
[(Name, Value)] -> [(Name, Value)] -> SC -> Eval (Maybe Value)
evTree ntimes stk top env amap (UnmatchedCase str) = return Nothing
evTree ntimes stk top env amap (STerm tm)
= do let etm = pToVs (map fst amap) tm
etm' <- ev ntimes stk (not (conHeaded tm))
(amap ++ env) etm
return $ Just etm'
evTree ntimes stk top env amap (ProjCase t alts)
= do t' <- ev ntimes stk top env t
doCase ntimes stk top env amap t' alts
evTree ntimes stk top env amap (Case n alts)
= case lookup n amap of
Just v -> doCase ntimes stk top env amap v alts
_ -> return Nothing
evTree ntimes stk top env amap ImpossibleCase = return Nothing
doCase ntimes stk top env amap v alts =
do c <- chooseAlt env v (getValArgs v) alts amap
case c of
Just (altmap, sc) -> evTree ntimes stk top env altmap sc
_ -> do c' <- chooseAlt' ntimes stk env v (getValArgs v) alts amap
case c' of
Just (altmap, sc) -> evTree ntimes stk top env altmap sc
_ -> return Nothing
conHeaded tm@(App _ _)
| (P (DCon _ _) _ _, args) <- unApply tm = True
conHeaded t = False
chooseAlt' ntimes stk env _ (f, args) alts amap
= do f' <- apply ntimes stk True env f args
chooseAlt env f' (getValArgs f')
alts amap
chooseAlt :: [(Name, Value)] -> Value -> (Value, [Value]) -> [CaseAlt] ->
[(Name, Value)] ->
Eval (Maybe ([(Name, Value)], SC))
chooseAlt env _ (VP (DCon i a) _ _, args) alts amap
| Just (ns, sc) <- findTag i alts = return $ Just (updateAmap (zip ns args) amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VP (TCon i a) _ _, args) alts amap
| Just (ns, sc) <- findTag i alts
= return $ Just (updateAmap (zip ns args) amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VConstant c, []) alts amap
| Just v <- findConst c alts = return $ Just (amap, v)
| Just (n', sub, sc) <- findSuc c alts
= return $ Just (updateAmap [(n',sub)] amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VP _ n _, args) alts amap
| Just (ns, sc) <- findFn n alts = return $ Just (updateAmap (zip ns args) amap, sc)
chooseAlt env _ (VBind _ _ (Pi s) t, []) alts amap
| Just (ns, sc) <- findFn (sUN "->") alts
= do t' <- t (VV 0) -- we know it's not in scope or it's not a pattern
return $ Just (updateAmap (zip ns [s, t']) amap, sc)
chooseAlt _ _ _ alts amap
| Just v <- findDefault alts
= if (any fnCase alts)
then return $ Just (amap, v)
else return Nothing
| otherwise = return Nothing
fnCase (FnCase _ _ _) = True
fnCase _ = False
-- Replace old variable names in the map with new matches
-- (This is possibly unnecessary since we make unique names and don't
-- allow repeated variables...?)
updateAmap newm amap
= newm ++ filter (\ (x, _) -> not (elem x (map fst newm))) amap
findTag i [] = Nothing
findTag i (ConCase n j ns sc : xs) | i == j = Just (ns, sc)
findTag i (_ : xs) = findTag i xs
findFn fn [] = Nothing
findFn fn (FnCase n ns sc : xs) | fn == n = Just (ns, sc)
findFn fn (_ : xs) = findFn fn xs
findDefault [] = Nothing
findDefault (DefaultCase sc : xs) = Just sc
findDefault (_ : xs) = findDefault xs
findSuc c [] = Nothing
findSuc (BI val) (SucCase n sc : _)
| val /= 0 = Just (n, VConstant (BI (val - 1)), sc)
findSuc c (_ : xs) = findSuc c xs
findConst c [] = Nothing
findConst c (ConstCase c' v : xs) | c == c' = Just v
findConst (AType (ATInt ITNative)) (ConCase n 1 [] v : xs) = Just v
findConst (AType ATFloat) (ConCase n 2 [] v : xs) = Just v
findConst (AType (ATInt ITChar)) (ConCase n 3 [] v : xs) = Just v
findConst StrType (ConCase n 4 [] v : xs) = Just v
findConst PtrType (ConCase n 5 [] v : xs) = Just v
findConst (AType (ATInt ITBig)) (ConCase n 6 [] v : xs) = Just v
findConst (AType (ATInt (ITFixed ity))) (ConCase n tag [] v : xs)
| tag == 7 + fromEnum ity = Just v
findConst (AType (ATInt (ITVec ity count))) (ConCase n tag [] v : xs)
| tag == (fromEnum ity + 1) * 1000 + count = Just v
findConst c (_ : xs) = findConst c xs
getValArgs tm = getValArgs' tm []
getValArgs' (VApp f a) as = getValArgs' f (a:as)
getValArgs' f as = (f, as)
-- tmpToV i vd (VLetHole j) | vd == j = return $ VV i
-- tmpToV i vd (VP nt n v) = liftM (VP nt n) (tmpToV i vd v)
-- tmpToV i vd (VBind n b sc) = do b' <- fmapMB (tmpToV i vd) b
-- let sc' = \x -> do x' <- sc x
-- tmpToV (i + 1) vd x'
-- return (VBind n b' sc')
-- tmpToV i vd (VApp f a) = liftM2 VApp (tmpToV i vd f) (tmpToV i vd a)
-- tmpToV i vd x = return x
instance Eq Value where
(==) x y = getTT x == getTT y
where getTT v = evalState (quote 0 v) initEval
class Quote a where
quote :: Int -> a -> Eval (TT Name)
instance Quote Value where
quote i (VP nt n v) = liftM (P nt n) (quote i v)
quote i (VV x) = return $ V x
quote i (VBind _ n b sc) = do sc' <- sc (VTmp i)
b' <- quoteB b
liftM (Bind n b') (quote (i+1) sc')
where quoteB t = fmapMB (quote i) t
quote i (VBLet vd n t v sc)
= do sc' <- quote i sc
t' <- quote i t
v' <- quote i v
let sc'' = pToV (sMN vd "vlet") (addBinder sc')
return (Bind n (Let t' v') sc'')
quote i (VApp f a) = liftM2 App (quote i f) (quote i a)
quote i (VType u) = return $ TType u
quote i VErased = return $ Erased
quote i VImpossible = return $ Impossible
quote i (VProj v j) = do v' <- quote i v
return (Proj v' j)
quote i (VConstant c) = return $ Constant c
quote i (VTmp x) = return $ V (i - x - 1)
wknV :: Int -> Value -> Eval Value
wknV i (VV x) | x >= i = return $ VV (x - 1)
wknV i (VBind red n b sc) = do b' <- fmapMB (wknV i) b
return $ VBind red n b' (\x -> do x' <- sc x
wknV (i + 1) x')
wknV i (VApp f a) = liftM2 VApp (wknV i f) (wknV i a)
wknV i t = return t
convEq' ctxt hs x y = evalStateT (convEq ctxt hs x y) (0, [])
convEq :: Context -> [Name] -> TT Name -> TT Name -> StateT UCs (TC' Err) Bool
convEq ctxt holes = ceq [] where
ceq :: [(Name, Name)] -> TT Name -> TT Name -> StateT UCs (TC' Err) Bool
ceq ps (P xt x _) (P yt y _)
| x `elem` holes || y `elem` holes = return True
| x == y || (x, y) `elem` ps || (y,x) `elem` ps = return True
| otherwise = sameDefs ps x y
ceq ps x (Bind n (Lam t) (App y (V 0))) = ceq ps x y
ceq ps (Bind n (Lam t) (App x (V 0))) y = ceq ps x y
ceq ps x (Bind n (Lam t) (App y (P Bound n' _)))
| n == n' = ceq ps x y
ceq ps (Bind n (Lam t) (App x (P Bound n' _))) y
| n == n' = ceq ps x y
ceq ps (V x) (V y) = return (x == y)
ceq ps (V x) (P _ y _) = return (fst (ps!!x) == y)
ceq ps (P _ x _) (V y) = return (x == snd (ps!!y))
ceq ps (Bind n xb xs) (Bind n' yb ys)
= liftM2 (&&) (ceqB ps xb yb) (ceq ((n,n'):ps) xs ys)
where
ceqB ps (Let v t) (Let v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps (Guess v t) (Guess v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps b b' = ceq ps (binderTy b) (binderTy b')
ceq ps (App fx ax) (App fy ay) = liftM2 (&&) (ceq ps fx fy) (ceq ps ax ay)
ceq ps (Constant x) (Constant y) = return (x == y)
ceq ps (TType x) (TType y) = do (v, cs) <- get
put (v, ULE x y : cs)
return True
ceq ps Erased _ = return True
ceq ps _ Erased = return True
ceq ps x y = return False
caseeq ps (Case n cs) (Case n' cs') = caseeqA ((n,n'):ps) cs cs'
where
caseeqA ps (ConCase x i as sc : rest) (ConCase x' i' as' sc' : rest')
= do q1 <- caseeq (zip as as' ++ ps) sc sc'
q2 <- caseeqA ps rest rest'
return $ x == x' && i == i' && q1 && q2
caseeqA ps (ConstCase x sc : rest) (ConstCase x' sc' : rest')
= do q1 <- caseeq ps sc sc'
q2 <- caseeqA ps rest rest'
return $ x == x' && q1 && q2
caseeqA ps (DefaultCase sc : rest) (DefaultCase sc' : rest')
= liftM2 (&&) (caseeq ps sc sc') (caseeqA ps rest rest')
caseeqA ps [] [] = return True
caseeqA ps _ _ = return False
caseeq ps (STerm x) (STerm y) = ceq ps x y
caseeq ps (UnmatchedCase _) (UnmatchedCase _) = return True
caseeq ps _ _ = return False
sameDefs ps x y = case (lookupDef x ctxt, lookupDef y ctxt) of
([Function _ xdef], [Function _ ydef])
-> ceq ((x,y):ps) xdef ydef
([CaseOp _ _ _ _ _ xd],
[CaseOp _ _ _ _ _ yd])
-> let (_, xdef) = cases_compiletime xd
(_, ydef) = cases_compiletime yd in
caseeq ((x,y):ps) xdef ydef
_ -> return False
-- SPECIALISATION -----------------------------------------------------------
-- We need too much control to be able to do this by tweaking the main
-- evaluator
spec :: Context -> Ctxt [Bool] -> Env -> TT Name -> Eval (TT Name)
spec ctxt statics genv tm = error "spec undefined"
-- CONTEXTS -----------------------------------------------------------------
{-| A definition is either a simple function (just an expression with a type),
a constant, which could be a data or type constructor, an axiom or as an
yet undefined function, or an Operator.
An Operator is a function which explains how to reduce.
A CaseOp is a function defined by a simple case tree -}
data Def = Function !Type !Term
| TyDecl NameType !Type
| Operator Type Int ([Value] -> Maybe Value)
| CaseOp CaseInfo
!Type
![Type] -- argument types
![Either Term (Term, Term)] -- original definition
![([Name], Term, Term)] -- simplified for totality check definition
!CaseDefs
-- [Name] SC -- Compile time case definition
-- [Name] SC -- Run time cae definitions
data CaseDefs = CaseDefs {
cases_totcheck :: !([Name], SC),
cases_compiletime :: !([Name], SC),
cases_inlined :: !([Name], SC),
cases_runtime :: !([Name], SC)
}
data CaseInfo = CaseInfo {
case_inlinable :: Bool,
tc_dictionary :: Bool
}
{-!
deriving instance Binary Def
!-}
{-!
deriving instance Binary CaseInfo
!-}
{-!
deriving instance Binary CaseDefs
!-}
instance Show Def where
show (Function ty tm) = "Function: " ++ show (ty, tm)
show (TyDecl nt ty) = "TyDecl: " ++ show nt ++ " " ++ show ty
show (Operator ty _ _) = "Operator: " ++ show ty
show (CaseOp (CaseInfo inlc inlr) ty atys ps_in ps cd)
= let (ns, sc) = cases_compiletime cd
(ns_t, sc_t) = cases_totcheck cd
(ns', sc') = cases_runtime cd in
"Case: " ++ show ty ++ " " ++ show ps ++ "\n" ++
"TOTALITY CHECK TIME:\n\n" ++
show ns_t ++ " " ++ show sc_t ++ "\n\n" ++
"COMPILE TIME:\n\n" ++
show ns ++ " " ++ show sc ++ "\n\n" ++
"RUN TIME:\n\n" ++
show ns' ++ " " ++ show sc' ++ "\n\n" ++
if inlc then "Inlinable\n" else "Not inlinable\n"
-------
-- Frozen => doesn't reduce
-- Hidden => doesn't reduce and invisible to type checker
data Accessibility = Public | Frozen | Hidden
deriving (Show, Eq)
-- | The result of totality checking
data Totality = Total [Int] -- ^ well-founded arguments
| Productive -- ^ productive
| Partial PReason
| Unchecked
deriving Eq
-- | Reasons why a function may not be total
data PReason = Other [Name] | Itself | NotCovering | NotPositive | UseUndef Name
| BelieveMe | Mutual [Name] | NotProductive
deriving (Show, Eq)
instance Show Totality where
show (Total args)= "Total" -- ++ show args ++ " decreasing arguments"
show Productive = "Productive" -- ++ show args ++ " decreasing arguments"
show Unchecked = "not yet checked for totality"
show (Partial Itself) = "possibly not total as it is not well founded"
show (Partial NotCovering) = "not total as there are missing cases"
show (Partial NotPositive) = "not strictly positive"
show (Partial NotProductive) = "not productive"
show (Partial BelieveMe) = "not total due to use of believe_me in proof"
show (Partial (Other ns)) = "possibly not total due to: " ++ showSep ", " (map show ns)
show (Partial (Mutual ns)) = "possibly not total due to recursive path " ++
showSep " --> " (map show ns)
{-!
deriving instance Binary Accessibility
!-}
{-!
deriving instance Binary Totality
!-}
{-!
deriving instance Binary PReason
!-}
-- Possible attached meta-information for a definition in context
data MetaInformation =
EmptyMI -- ^ No meta-information
| DataMI [Int] -- ^ Meta information for a data declaration with position of parameters
deriving (Eq, Show)
-- | Contexts used for global definitions and for proof state. They contain
-- universe constraints and existing definitions.
data Context = MkContext {
uconstraints :: [UConstraint],
next_tvar :: Int,
definitions :: Ctxt (Def, Accessibility, Totality, MetaInformation)
} deriving Show
-- | The initial empty context
initContext = MkContext [] 0 emptyContext
mapDefCtxt :: (Def -> Def) -> Context -> Context
mapDefCtxt f (MkContext c t defs) = MkContext c t (mapCtxt f' defs)
where f' (d, a, t, m) = f' (f d, a, t, m)
-- | Get the definitions from a context
ctxtAlist :: Context -> [(Name, Def)]
ctxtAlist ctxt = map (\(n, (d, a, t, m)) -> (n, d)) $ toAlist (definitions ctxt)
veval ctxt env t = evalState (eval False ctxt [] env t []) initEval
addToCtxt :: Name -> Term -> Type -> Context -> Context
addToCtxt n tm ty uctxt
= let ctxt = definitions uctxt
ctxt' = addDef n (Function ty tm, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
setAccess :: Name -> Accessibility -> Context -> Context
setAccess n a uctxt
= let ctxt = definitions uctxt
ctxt' = updateDef n (\ (d, _, t, m) -> (d, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setTotal :: Name -> Totality -> Context -> Context
setTotal n t uctxt
= let ctxt = definitions uctxt
ctxt' = updateDef n (\ (d, a, _, m) -> (d, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setMetaInformation :: Name -> MetaInformation -> Context -> Context
setMetaInformation n m uctxt
= let ctxt = definitions uctxt
ctxt' = updateDef n (\ (d, a, t, _) -> (d, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
addCtxtDef :: Name -> Def -> Context -> Context
addCtxtDef n d c = let ctxt = definitions c
ctxt' = addDef n (d, Public, Unchecked, EmptyMI) $! ctxt in
c { definitions = ctxt' }
addTyDecl :: Name -> NameType -> Type -> Context -> Context
addTyDecl n nt ty uctxt
= let ctxt = definitions uctxt
ctxt' = addDef n (TyDecl nt ty, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
addDatatype :: Datatype Name -> Context -> Context
addDatatype (Data n tag ty cons) uctxt
= let ctxt = definitions uctxt
ty' = normalise uctxt [] ty
ctxt' = addCons 0 cons (addDef n
(TyDecl (TCon tag (arity ty')) ty, Public, Unchecked, EmptyMI) ctxt) in
uctxt { definitions = ctxt' }
where
addCons tag [] ctxt = ctxt
addCons tag ((n, ty) : cons) ctxt
= let ty' = normalise uctxt [] ty in
addCons (tag+1) cons (addDef n
(TyDecl (DCon tag (arity ty')) ty, Public, Unchecked, EmptyMI) ctxt)
-- FIXME: Too many arguments! Refactor all these Bools.
addCasedef :: Name -> CaseInfo -> Bool -> Bool -> Bool -> Bool ->
[Type] -> -- argument types
[Int] -> -- inaccessible arguments
[Either Term (Term, Term)] ->
[([Name], Term, Term)] -> -- totality
[([Name], Term, Term)] -> -- compile time
[([Name], Term, Term)] -> -- inlined
[([Name], Term, Term)] -> -- run time
Type -> Context -> Context
addCasedef n ci@(CaseInfo alwaysInline tcdict)
tcase covering reflect asserted argtys inacc
ps_in ps_tot ps_inl ps_ct ps_rt ty uctxt
= let ctxt = definitions uctxt
access = case lookupDefAcc n False uctxt of
[(_, acc)] -> acc
_ -> Public
ctxt' = case (simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_tot,
simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_ct,
simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_inl,
simpleCase tcase covering reflect RunTime emptyFC inacc argtys ps_rt) of
(OK (CaseDef args_tot sc_tot _),
OK (CaseDef args_ct sc_ct _),
OK (CaseDef args_inl sc_inl _),
OK (CaseDef args_rt sc_rt _)) ->
let inl = alwaysInline -- tcdict
inlc = (inl || small n args_ct sc_ct) && (not asserted)
inlr = inl || small n args_rt sc_rt
cdef = CaseDefs (args_tot, sc_tot)
(args_ct, sc_ct)
(args_inl, sc_inl)
(args_rt, sc_rt) in
addDef n (CaseOp (ci { case_inlinable = inlc })
ty argtys ps_in ps_tot cdef,
access, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
-- simplify a definition for totality checking
simplifyCasedef :: Name -> Context -> Context
simplifyCasedef n uctxt
= let ctxt = definitions uctxt
ctxt' = case lookupCtxt n ctxt of
[(CaseOp ci ty atys [] ps _, acc, tot, metainf)] ->
ctxt -- nothing to simplify (or already done...)
[(CaseOp ci ty atys ps_in ps cd, acc, tot, metainf)] ->
let ps_in' = map simpl ps_in
pdef = map debind ps_in' in
case simpleCase False True False CompileTime emptyFC [] atys pdef of
OK (CaseDef args sc _) ->
addDef n (CaseOp ci
ty atys ps_in' ps (cd { cases_totcheck = (args, sc) }),
acc, tot, metainf) ctxt
Error err -> error (show err)
_ -> ctxt in
uctxt { definitions = ctxt' }
where
depat acc (Bind n (PVar t) sc)
= depat (n : acc) (instantiate (P Bound n t) sc)
depat acc x = (acc, x)
debind (Right (x, y)) = let (vs, x') = depat [] x
(_, y') = depat [] y in
(vs, x', y')
debind (Left x) = let (vs, x') = depat [] x in
(vs, x', Impossible)
simpl (Right (x, y)) = Right (x, simplify uctxt [] y)
simpl t = t
addOperator :: Name -> Type -> Int -> ([Value] -> Maybe Value) ->
Context -> Context
addOperator n ty a op uctxt
= let ctxt = definitions uctxt
ctxt' = addDef n (Operator ty a op, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
tfst (a, _, _, _) = a
lookupNames :: Name -> Context -> [Name]
lookupNames n ctxt
= let ns = lookupCtxtName n (definitions ctxt) in
map fst ns
lookupTy :: Name -> Context -> [Type]
lookupTy n ctxt
= do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(Function ty _) -> return ty
(TyDecl _ ty) -> return ty
(Operator ty _ _) -> return ty
(CaseOp _ ty _ _ _ _) -> return ty
isConName :: Name -> Context -> Bool
isConName n ctxt = isTConName n ctxt || isDConName n ctxt
isTConName :: Name -> Context -> Bool
isTConName n ctxt
= or $ do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(TyDecl (TCon _ _) _) -> return True
_ -> return False
isDConName :: Name -> Context -> Bool
isDConName n ctxt
= or $ do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(TyDecl (DCon _ _) _) -> return True
_ -> return False
isFnName :: Name -> Context -> Bool
isFnName n ctxt
= let def = lookupCtxtExact n (definitions ctxt) in
case def of
Just (Function _ _, _, _, _) -> True
Just (Operator _ _ _, _, _, _) -> True
Just (CaseOp _ _ _ _ _ _, _, _, _) -> True
_ -> False
lookupP :: Name -> Context -> [Term]
lookupP n ctxt
= do def <- lookupCtxt n (definitions ctxt)
p <- case def of
(Function ty tm, a, _, _) -> return (P Ref n ty, a)
(TyDecl nt ty, a, _, _) -> return (P nt n ty, a)
(CaseOp _ ty _ _ _ _, a, _, _) -> return (P Ref n ty, a)
(Operator ty _ _, a, _, _) -> return (P Ref n ty, a)
case snd p of
Hidden -> []
_ -> return (fst p)
lookupDefExact :: Name -> Context -> Maybe Def
lookupDefExact n ctxt = tfst <$> lookupCtxtExact n (definitions ctxt)
lookupDef :: Name -> Context -> [Def]
lookupDef n ctxt = tfst <$> lookupCtxt n (definitions ctxt)
lookupNameDef :: Name -> Context -> [(Name, Def)]
lookupNameDef n ctxt = mapSnd tfst $ lookupCtxtName n (definitions ctxt)
where mapSnd f [] = []
mapSnd f ((x,y):xys) = (x, f y) : mapSnd f xys
lookupDefAcc :: Name -> Bool -> Context ->
[(Def, Accessibility)]
lookupDefAcc n mkpublic ctxt
= map mkp $ lookupCtxt n (definitions ctxt)
-- io_bind a special case for REPL prettiness
where mkp (d, a, _, _) = if mkpublic && (not (n == sUN "io_bind" || n == sUN "io_return"))
then (d, Public) else (d, a)
lookupTotal :: Name -> Context -> [Totality]
lookupTotal n ctxt = map mkt $ lookupCtxt n (definitions ctxt)
where mkt (d, a, t, m) = t
lookupMetaInformation :: Name -> Context -> [MetaInformation]
lookupMetaInformation n ctxt = map mkm $ lookupCtxt n (definitions ctxt)
where mkm (d, a, t, m) = m
lookupNameTotal :: Name -> Context -> [(Name, Totality)]
lookupNameTotal n = map (\(n, (_, _, t, _)) -> (n, t)) . lookupCtxtName n . definitions
lookupVal :: Name -> Context -> [Value]
lookupVal n ctxt
= do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(Function _ htm) -> return (veval ctxt [] htm)
(TyDecl nt ty) -> return (VP nt n (veval ctxt [] ty))
lookupTyEnv :: Name -> Env -> Maybe (Int, Type)
lookupTyEnv n env = li n 0 env where
li n i [] = Nothing
li n i ((x, b): xs)
| n == x = Just (i, binderTy b)
| otherwise = li n (i+1) xs
-- | Create a unique name given context and other existing names
uniqueNameCtxt :: Context -> Name -> [Name] -> Name
uniqueNameCtxt ctxt n hs
| n `elem` hs = uniqueNameCtxt ctxt (nextName n) hs
| [_] <- lookupTy n ctxt = uniqueNameCtxt ctxt (nextName n) hs
| otherwise = n
uniqueBindersCtxt :: Context -> [Name] -> TT Name -> TT Name
uniqueBindersCtxt ctxt ns (Bind n b sc)
= let n' = uniqueNameCtxt ctxt n ns in
Bind n' (fmap (uniqueBindersCtxt ctxt (n':ns)) b) (uniqueBindersCtxt ctxt ns sc)
uniqueBindersCtxt ctxt ns (App f a) = App (uniqueBindersCtxt ctxt ns f) (uniqueBindersCtxt ctxt ns a)
uniqueBindersCtxt ctxt ns t = t
| DanielWaterworth/Idris-dev | src/Idris/Core/Evaluate.hs | bsd-3-clause | 42,818 | 5 | 29 | 16,356 | 15,232 | 7,787 | 7,445 | 766 | 79 |
{-# LANGUAGE MonadComprehensions #-}
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Top-K queries (flat and nested) over the TPC-H schema.
module Queries.TPCH.NonStandard.TopK where
import Prelude()
import Database.DSH
import Schema.TPCH
import Queries.TPCH.BuildingBlocks
-- | Report the top /k/ customers by account balance from some country.
topCustAcct :: Text -> Integer -> Q [(Text, Decimal)]
topCustAcct n k =
topK k snd [ tup2 (c_nameQ c) (c_acctbalQ c)
| c <- customers
, c `custFromNation` n
]
-- | Task: For each customer from some nation, compute the k orders with the
-- most lineitems.
topOrdersPerCust :: Text -> Integer -> Q [(Text, [Order])]
topOrdersPerCust n k =
[ tup2 (c_nameQ c) (topK k (length . orderItems) (custOrders c))
| c <- customers
, c `custFromNation` n
]
-- | For each customer from nation 'n', fetch the date of the 'k' orders
-- with the most lineitems.
topOrdersPerCust' :: Integer -> Text -> Q [(Text, [Day])]
topOrdersPerCust' k n =
[ tup2 (c_nameQ c) (map o_orderdateQ $ topK k (length . orderItems) (custOrders c))
| c <- customers
, c `custFromNation` n
]
-- | For each customer from nation 'n', fetch the date of the 'k' orders
-- with the most revenue.
topOrdersPerCust'' :: Integer -> Text -> Q [(Text, [Day])]
topOrdersPerCust'' k n =
[ tup2 (c_nameQ c) (map o_orderdateQ $ topK k orderRevenue (custOrders c))
| c <- customers
, c `custFromNation` n
]
-- | The top k customers from one given country (by number of orders)
topCustomers :: Integer -> Text -> Q [(Text, Decimal)]
topCustomers k n =
[ pair (c_nameQ c) (c_acctbalQ c)
| c <- topK k (length . custOrders)
[ c | c <- customers, c `custFromNation` n ]
]
-- | Pair the top k customers with the most orders from two countries
pairTopCustomers :: Integer -> Text -> Text -> Q [((Text, Decimal), (Text, Decimal))]
pairTopCustomers k n1 n2 = zip (topCustomers k n1) (topCustomers k n2)
-- | Fetch the top customers in one region (by number of orders)
regionsTopCustomers :: Text -> Integer -> Q [(Text, [Customer])]
regionsTopCustomers rn k =
[ pair (n_nameQ n)
(topK k (length . custOrders)
[ c | c <- customers, c_nationkeyQ c == n_nationkeyQ n ])
| r <- regions
, r_nameQ r == toQ rn
, n <- regionNations r
]
| ulricha/dsh-tpc-h | Queries/TPCH/NonStandard/TopK.hs | bsd-3-clause | 2,431 | 0 | 12 | 585 | 709 | 384 | 325 | 43 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.NV.TextureRectangle
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- All tokens from the NV_texture_rectangle extension, see
-- <http://www.opengl.org/registry/specs/NV/texture_rectangle.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.NV.TextureRectangle (
-- * Tokens
gl_TEXTURE_RECTANGLE,
gl_TEXTURE_BINDING_RECTANGLE,
gl_PROXY_TEXTURE_RECTANGLE,
gl_MAX_RECTANGLE_TEXTURE_SIZE
) where
import Graphics.Rendering.OpenGL.Raw.Core32
| mfpi/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/NV/TextureRectangle.hs | bsd-3-clause | 786 | 0 | 4 | 98 | 47 | 38 | 9 | 6 | 0 |
module Web.Leo.Types where
import Network.HTTP(urlEncode)
data Language = En | Fr | Es | It | Ch | Ru | Pt | Pl | De | Unknown
deriving (Eq)
-- |For simple parsing of Language from XML we use the Read class.
instance Read Language where
readsPrec _ value =
tryParse langs
where langs = [("en", En), ("fr", Fr), ("es", Es),
("it", It), ("ch", Ch), ("ru", Ru),
("pt", Pt), ("pl", Pl), ("de", De)]
-- yes, I read RWH. it was ... eye-opening! :)
tryParse [] = []
tryParse ((attempt,result):xs) =
if take (length attempt) value == attempt
then [(result, drop (length attempt) value)]
else tryParse xs
instance Show Language where
show En = "en"
show Fr = "fr"
show Es = "es"
show It = "it"
show Ch = "ch"
show Ru = "ru"
show Pt = "pt"
show Pl = "pl"
show De = "de"
data LeoOptions =
LeoOptions {
getUrl :: String,
getTrans :: Translation,
getTerm :: String,
searchLoc :: Int,
sectLenMax :: Int
}
deriving (Eq)
instance Show LeoOptions where
show (LeoOptions url trans search dir len) =
concat [ url , "/", toString trans, "/query.xml",
"?tolerMode=nof",
"&lp=", toString trans,
"&lang=de&rmWords=off&rmSearch=on&directN=0&resultOrder=basic",
"&search=", urlEncode search,
"&searchLoc=", show dir,
"§LenMax=", show len ]
where toString s = show (fst s) ++ show (snd s)
type Translation = (Language,Language)
-- | A Tanslation always has a language and a value, the translation
data Entry =
Entry {
getLang :: Language,
getResult :: [String]
}
deriving (Show, Eq)
data QueryResult =
Nouns [(Entry,Entry)]
| Phrase [(Entry,Entry)]
| Praep [(Entry,Entry)]
| Verbs [(Entry,Entry)]
| AdjAdvs [(Entry,Entry)]
| Examples [(Entry,Entry)]
| None
deriving (Show)
data OutFormat = JSON | TSV | CSV
deriving (Show, Eq)
defaultLeoOptions :: LeoOptions
defaultLeoOptions =
LeoOptions {
getUrl = "http://dict.leo.org/dictQuery/m-vocab",
getTrans = (En,De),
getTerm = "",
searchLoc = 0,
sectLenMax = 16
}
| krgn/leo | src/Web/Leo/Types.hs | bsd-3-clause | 2,448 | 0 | 13 | 866 | 714 | 419 | 295 | 68 | 1 |
module Main where
import GameCard
import Player
import Game
import Card
import System.IO
import Data.Char (isDigit, isSpace)
import Data.IORef (newIORef, readIORef, writeIORef, IORef)
import Control.Monad (liftM2)
data GState = GState {gamecard :: GameCard, cplayer :: Int
,ctime :: Int}
deriving (Show)
newGame :: String -> String -> GState
newGame p1n p2n = GState (createNew p1n p2n) 1 0
increment :: Int -> Int
increment n = if n == 2 then 1 else 2
incrementT n = if n < 2 then n+1 else 0
mainLoop :: IO (IORef GState) -> IO ()
mainLoop dp = do
res <- dp
gstate <- readIORef res
if isGameOver (getPlayer 1 (gamecard gstate)) (getPlayer 2 (gamecard gstate))
then do
print (gamecard gstate)
ref <- readIORef res
let p1 = getScore (card (getPlayer (cplayer ref) (gamecard ref)))
p2 = getScore (card (getPlayer (increment (cplayer ref)) (gamecard ref)))
winner = if p1 > p2 then cplayer ref else increment (cplayer ref)
putStrLn $ (++ " wins!") $ show $ name (getPlayer winner (gamecard ref))
else mainLoop $ dartPrompt res
main :: IO ()
main = do
prompted <- prompt
mainLoop $ dartPrompt prompted
prompt :: IO (IORef GState)
prompt = do
putStrLn "Welcome to Hricket. The Haskell cricket scoring application.\n\n\
\Please enter player 1's name: "
p1 <- getLine
putStrLn "\nPlease enter player 2's name: "
p2 <- getLine
putStr "\n"
newIORef $ newGame p1 p2
dartPrompt gst = do
gsraw' <- readIORef gst
let gsraw = gsraw' {ctime = (incrementT (ctime gsraw'))}
gstate = gamecard gsraw
pn = cplayer gsraw
play = getPlayer pn gstate
print gstate
putStr $ "\nEnter the dart you hit, a single space, and the number of markings.\
\\nFor example: 15 3 ,15 2, 15 1 or 0 for nothing.\n" ++ name play ++
": "
hFlush stdout
ds <- getValidInput play
if ds /= "skip"
then do let marked = mark (getPlayer pn gstate) (getPlayer (increment pn) gstate) ds
writeIORef gst gsraw {gamecard = (setPlayer pn gstate marked)
,cplayer = (if ctime gsraw == 0 then increment pn else pn)}
return gst
else writeIORef gst gsraw {cplayer = (increment pn), ctime = 0} >> return gst
getValidInput :: Player -> IO String
getValidInput player = helper 0 ""
where helper 1 s = return s
helper 0 s = do
x <- getLine
case checkInput x player of
Right y -> helper 1 y
Left y -> putStr y >> hFlush stdout >> helper 0 ""
checkInput x player
| x == "skip" || x == "s" = Right "skip"
| not $ noLetters x = Left str
| any isSpace x && not (isSpace (last x)) =
let (sub, end) = break isSpace x
y = read sub
l = read end
in
if y > 20 && y < 25 || y < 1 || l > 3 || l < 1
then Left str
else Right x
| read x == 0 = Right x
| otherwise = Left str
where noLetters = all (liftM2 (||) isDigit isSpace)
str = "Invalid input. Please try again.\n" ++ name player ++ ": " | Raynes/Hricket | src/Main.hs | bsd-3-clause | 3,123 | 0 | 21 | 924 | 1,156 | 567 | 589 | 82 | 3 |
{-# LANGUAGE QuasiQuotes #-}
{-@ LIQUID "--no-termination "@-}
{-@ LIQUID "--short-names "@-}
import LiquidHaskell
import Language.Haskell.Liquid.Prelude (liquidError)
import Data.Vector
import Data.Vector.Mutable (write)
import Prelude hiding (length, replicate)
[lq| type Idx X = {v:Nat | v < vlen X} |]
search pat str = kmpSearch (fromList pat) (fromList str)
-------------------------------------------------------------
-- | Do the Search ------------------------------------------
-------------------------------------------------------------
kmpSearch p s = go 0 0
where
t = kmpTable p
m = length p
n = length s
go i j
| j >= m = i - m
| i >= n = (-1)
| s!i == p!j = go (i+1) (j+1)
| j == 0 = go (i+1) j
| otherwise = go i (t!j)
-------------------------------------------------------------
-- | Make Table ---------------------------------------------
-------------------------------------------------------------
[lq| kmpTable :: (Eq a) => p:Vector a -> {v:Vector Nat | vlen v = vlen p} |]
kmpTable p = go 1 0 t
where
m = length p
t = replicate m 0
go i j t
| i >= m - 1 = t
| p!i == p!j = let i' = i + 1
j' = j + 1
t' = set t i' j'
in go i' j' t'
| (j == 0) = let i' = i + 1
t' = set t i' 0
in go i' j t'
| otherwise = let j' = t ! j
in go i j' t
[lq| type Upto N = {v:Nat | v < N} |]
[lq| set :: a:Vector a -> i:Idx a -> a -> {v:Vector a | vlen v = vlen a} |]
set :: Vector a -> Int -> a -> Vector a
set = undefined
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/kmpVec.hs | bsd-3-clause | 1,820 | 5 | 10 | 670 | 525 | 268 | 257 | 37 | 1 |
import Text.Printf (printf)
v = 20.0 :: Double
g = 9.81 :: Double
y_min = -100.0 :: Double
y_max = v * v / 2 / g
f y = pi * v4 / g2 * y - pi * v2 / g * y * y
where
v2 = v * v
v4 = v2 * v2
g2 = g * g
main = printf "%.4f\n" $ f y_max - f y_min
| foreverbell/project-euler-solutions | src/317.hs | bsd-3-clause | 259 | 8 | 11 | 90 | 178 | 79 | 99 | 10 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE NoRebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
module Duckling.Distance.Types where
import Control.DeepSeq
import Data.Aeson
import Data.Hashable
import Data.Text (Text)
import qualified Data.Text as Text
import GHC.Generics
import Prelude
import Duckling.Resolve (Resolve(..))
data Unit
= Foot
| Centimetre
| Kilometre
| Inch
| M -- ambiguous between Mile and Metre
| Metre
| Mile
| Yard
deriving (Eq, Generic, Hashable, Ord, Show, NFData)
instance ToJSON Unit where
toJSON = String . Text.toLower . Text.pack . show
data DistanceData = DistanceData
{ unit :: Maybe Unit
, value :: Double
}
deriving (Eq, Generic, Hashable, Ord, Show, NFData)
instance Resolve DistanceData where
type ResolvedValue DistanceData = DistanceValue
resolve _ DistanceData {unit = Nothing} = Nothing
resolve _ DistanceData {unit = Just unit, value} =
Just DistanceValue {vValue = value, vUnit = unit}
data DistanceValue = DistanceValue
{ vUnit :: Unit
, vValue :: Double
}
deriving (Eq, Ord, Show)
instance ToJSON DistanceValue where
toJSON (DistanceValue unit value) = object
[ "type" .= ("value" :: Text)
, "value" .= value
, "unit" .= unit
]
| rfranek/duckling | Duckling/Distance/Types.hs | bsd-3-clause | 1,648 | 0 | 10 | 317 | 378 | 220 | 158 | 45 | 0 |
--
-- Debug.hs
--
-- Display functions calls, arguments and results.
--
-- Gregory Wright, 12 June 2011
--
module Math.Symbolic.Wheeler.Debug where
import Debug.Trace
traceCall :: (Show a, Show b) => String -> (a -> b) -> a -> b
traceCall msg fn arg =
let
retval = fn arg
in
trace (msg ++
" " ++
show arg ++
" returns " ++
show retval ++
"\n") retval
traceCall2 :: (Show a, Show b, Show c) => String -> (a -> b -> c) -> a -> b -> c
traceCall2 msg fn a a' =
let
retval = fn a a'
in
trace (msg ++
" " ++
show a ++
" " ++
show a' ++
" returns " ++
show retval ++
"\n") retval
traceCall3 :: (Show a, Show b, Show c) => String -> a -> b -> c -> c
traceCall3 msg a a' r =
let
retval = r
in
trace (msg ++
" " ++
show a ++
" " ++
show a' ++
" returns " ++
show retval ++
"\n") retval
traceCall3' :: (Show a, Show b, Show c, Show d) => String -> (a -> b -> c -> d) -> a -> b -> c -> d
traceCall3' msg fn a a' a'' =
let
retval = fn a a' a''
in
trace (msg ++
" " ++
show a ++
" " ++
show a' ++
" " ++
show a'' ++
" returns " ++
show retval ++
"\n") retval
traceCall_ :: (Show a, Show b) => String -> a -> b -> b
traceCall_ msg arg val = trace (msg ++ " " ++ show arg ++ " returns " ++ show val ++ "\n") val | gwright83/Wheeler | src/Math/Symbolic/Wheeler/Debug.hs | bsd-3-clause | 1,853 | 0 | 17 | 948 | 594 | 299 | 295 | 52 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Choice (choice2maybe, Choicest (..), depends, unchoice) where
import Data.Semigroup
class Choice a where
(<|||>) :: a -> a -> a
firstCorrect :: Choice a => [a] -> a
firstCorrect = foldr1 (<|||>)
instance Choice b => Choice (a -> b) where
f <|||> g = \x -> f x <|||> g x
instance Choice (Maybe a) where
Nothing <|||> x = x
x <|||> _ = x
newtype Choicest a = Choicest {choice2maybe :: Maybe a} deriving (Functor, Applicative, Monad, Choice, Show)
instance Monoid (Choicest a) where
(Choicest x) `mappend` (Choicest y) = Choicest $ x <|||> y
mempty = Choicest Nothing
instance Semigroup (Choicest a) where
(<>) = mappend
depends (Choicest (Just x)) f _ = f x
depends (Choicest Nothing) _ n = n
unchoice _ (Choicest (Just x)) = x
unchoice a (Choicest Nothing) = a
| sejdm/smartWallpaper | src/Choice.hs | bsd-3-clause | 837 | 0 | 9 | 170 | 364 | 192 | 172 | 22 | 1 |
{-# LANGUAGE ForeignFunctionInterface #-}
-- |
-- Module : System.Posix.LoadAvg
-- There are two basic ways you can get load average on a modern Linux system.
-- First is @getloadavg (3)@ system call. The second is @\/proc\/loadavg@ file.
-- This module provides means of getting the information from both sources.
-- @\/proc\/loadavg@ provides some additional information but we ignore that.
module System.Posix.LoadAvg ( LoadAvg(..),
parseLoadAvg,
getLoadAvg, getLoadAvgSafe,
getLoadAvgProc ) where
import Foreign
import Data.Maybe
data LoadAvg = LoadAvg { sample_1 :: !Double -- ^ sample for last 1 minute
, sample_5 :: !Double -- ^ sample for last 5 minutes
, sample_15 :: !Double -- ^ sample for last 15 minutes
}
deriving (Read,Show,Eq,Ord)
foreign import ccall "getloadavg" getloadavg_C :: Ptr Double -> Int -> IO Int
-- | Discards error checking from getLoadAvgSafe. Will raise IO exception on error.
getLoadAvg :: IO LoadAvg
getLoadAvg = (fromMaybe (error "getLoadAvg: an error occured")) `fmap` getLoadAvgSafe
-- | Calls @getloadavg (3)@ to get system load average.
-- Provides error checking, and the result may be Nothing in case of error.
-- If there is not enough data the samples will be copied from more recent samples.
getLoadAvgSafe :: IO (Maybe LoadAvg)
getLoadAvgSafe = allocaArray 3 $ \arr ->
do
res <- getloadavg_C arr 3
case res of
-- Kind of hacky, I know
3 -> do
[a,b,c] <- peekArray 3 arr
return . Just $ LoadAvg a b c
2 -> do
[a,b] <- peekArray 2 arr
return . Just $ LoadAvg a b b
1 -> do
[a] <- peekArray 1 arr
return . Just $ LoadAvg a a a
-- Instead of matching with _ perhaps whe should check that we got '-1' here
_ -> return Nothing
-- | Tries to read @\/proc\/loadavg@ and parse it's output with 'parseLoadAvg'. Either may fail with IO exception.
getLoadAvgProc :: IO LoadAvg
getLoadAvgProc = parseLoadAvg `fmap` readFile "/proc/loadavg"
-- | Tries to parse the output of @\/proc\/loadavg@. If anything goes wrong an arbitrary exception will be raised.
parseLoadAvg :: String -> LoadAvg
parseLoadAvg input = let (a:b:c:_) = map read $ words input in LoadAvg a b c
| Tener/haskell-loadavg | src/System/Posix/LoadAvg.hs | bsd-3-clause | 2,419 | 0 | 15 | 677 | 428 | 228 | 200 | 39 | 4 |
module JDec.Class.Raw.ClassVersion (
ClassVersion(ClassVersion),
minorClassVersion,
majorClassVersion
) where
-- | The values of the minorVersion and majorVersion items are the minor and major version numbers of the class file. Together, a major and a minor version number determine the version of the class file format. If a class file has major version number M and minor version number m, we denote the version of its class file format as M.m.
data ClassVersion = ClassVersion {
minorClassVersion :: Integer, -- ^ Minor version number of the class file
majorClassVersion :: Integer -- ^ Major version number of the class file
} deriving Show
| rel-eng/jdec | src/JDec/Class/Raw/ClassVersion.hs | bsd-3-clause | 651 | 0 | 8 | 108 | 50 | 34 | 16 | 10 | 0 |
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Taffybar.DBus.Toggle
-- Copyright : (c) Ivan A. Malison
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Ivan A. Malison
-- Stability : unstable
-- Portability : unportable
--
-- This module provides a dbus interface that allows users to toggle the display
-- of taffybar on each monitor while it is running.
-----------------------------------------------------------------------------
module System.Taffybar.DBus.Toggle ( handleDBusToggles ) where
import Control.Applicative
import qualified Control.Concurrent.MVar as MV
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Reader
import DBus
import DBus.Client
import Data.Int
import qualified Data.Map as M
import Data.Maybe
import qualified GI.Gdk as Gdk
import qualified GI.Gtk as Gtk
import Graphics.UI.GIGtkStrut
import Prelude
import System.Directory
import System.FilePath.Posix
import System.Log.Logger
import System.Taffybar.Context
import System.Taffybar.Util
import Text.Printf
import Text.Read ( readMaybe )
-- $usage
--
-- To use this module, import it in your taffybar.hs and wrap your config with
-- the 'handleDBusToggles' function:
--
-- > main = dyreTaffybar $ handleDBusToggles myConfig
--
-- To toggle taffybar on the monitor that is currently active, issue the
-- following command:
--
-- > dbus-send --print-reply=literal --dest=taffybar.toggle /taffybar/toggle taffybar.toggle.toggleCurrent
logIO :: System.Log.Logger.Priority -> String -> IO ()
logIO = logM "System.Taffybar.DBus.Toggle"
logT :: MonadIO m => System.Log.Logger.Priority -> String -> m ()
logT p = liftIO . logIO p
getActiveMonitorNumber :: MaybeT IO Int
getActiveMonitorNumber = do
display <- MaybeT Gdk.displayGetDefault
seat <- lift $ Gdk.displayGetDefaultSeat display
device <- MaybeT $ Gdk.seatGetPointer seat
lift $ do
(_, x, y) <- Gdk.deviceGetPosition device
Gdk.displayGetMonitorAtPoint display x y >>= getMonitorNumber
getMonitorNumber :: Gdk.Monitor -> IO Int
getMonitorNumber monitor = do
display <- Gdk.monitorGetDisplay monitor
monitorCount <- Gdk.displayGetNMonitors display
monitors <- mapM (Gdk.displayGetMonitor display) [0..(monitorCount-1)]
monitorGeometry <- Gdk.getMonitorGeometry monitor
let equalsMonitor (Just other, _) =
do
otherGeometry <- Gdk.getMonitorGeometry other
case (otherGeometry, monitorGeometry) of
(Nothing, Nothing) -> return True
(Just g1, Just g2) -> Gdk.rectangleEqual g1 g2
_ -> return False
equalsMonitor _ = return False
snd . fromMaybe (Nothing, 0) . listToMaybe <$>
filterM equalsMonitor (zip monitors [0..])
taffybarTogglePath :: ObjectPath
taffybarTogglePath = "/taffybar/toggle"
taffybarToggleInterface :: InterfaceName
taffybarToggleInterface = "taffybar.toggle"
toggleStateFile :: IO FilePath
toggleStateFile = (</> "toggle_state.dat") <$> taffyStateDir
newtype TogglesMVar = TogglesMVar (MV.MVar (M.Map Int Bool))
getTogglesVar :: TaffyIO TogglesMVar
getTogglesVar = getStateDefault $ lift (TogglesMVar <$> MV.newMVar M.empty)
toggleBarConfigGetter :: BarConfigGetter -> BarConfigGetter
toggleBarConfigGetter getConfigs = do
barConfigs <- getConfigs
TogglesMVar enabledVar <- getTogglesVar
numToEnabled <- lift $ MV.readMVar enabledVar
let isEnabled monNumber = fromMaybe True $ M.lookup monNumber numToEnabled
isConfigEnabled =
isEnabled . fromIntegral . fromMaybe 0 . strutMonitor . strutConfig
return $ filter isConfigEnabled barConfigs
exportTogglesInterface :: TaffyIO ()
exportTogglesInterface = do
TogglesMVar enabledVar <- getTogglesVar
ctx <- ask
lift $ taffyStateDir >>= createDirectoryIfMissing True
stateFile <- lift toggleStateFile
let toggleTaffyOnMon fn mon = flip runReaderT ctx $ do
lift $ MV.modifyMVar_ enabledVar $ \numToEnabled -> do
let current = fromMaybe True $ M.lookup mon numToEnabled
result = M.insert mon (fn current) numToEnabled
logIO DEBUG $ printf "Toggle state before: %s, after %s"
(show numToEnabled) (show result)
catch (writeFile stateFile (show result)) $ \e ->
logIO WARNING $ printf "Unable to write to toggle state file %s, error: %s"
(show stateFile) (show (e :: SomeException))
return result
refreshTaffyWindows
toggleTaffy = do
num <- runMaybeT getActiveMonitorNumber
toggleTaffyOnMon not $ fromMaybe 0 num
takeInt :: (Int -> a) -> (Int32 -> a)
takeInt = (. fromIntegral)
client <- asks sessionDBusClient
let interface =
defaultInterface
{ interfaceName = taffybarToggleInterface
, interfaceMethods =
[ autoMethod "toggleCurrent" toggleTaffy
, autoMethod "toggleOnMonitor" $ takeInt $ toggleTaffyOnMon not
, autoMethod "hideOnMonitor" $
takeInt $ toggleTaffyOnMon (const False)
, autoMethod "showOnMonitor" $
takeInt $ toggleTaffyOnMon (const True)
, autoMethod "refresh" $ runReaderT refreshTaffyWindows ctx
, autoMethod "exit" (Gtk.mainQuit :: IO ())
]
}
lift $ do
_ <- requestName client "taffybar.toggle"
[nameAllowReplacement, nameReplaceExisting]
export client taffybarTogglePath interface
dbusTogglesStartupHook :: TaffyIO ()
dbusTogglesStartupHook = do
TogglesMVar enabledVar <- getTogglesVar
logT DEBUG "Loading toggle state"
lift $ do
stateFilepath <- toggleStateFile
filepathExists <- doesFileExist stateFilepath
mStartingMap <-
if filepathExists
then
readMaybe <$> readFile stateFilepath
else
return Nothing
MV.modifyMVar_ enabledVar $ const $ return $ fromMaybe M.empty mStartingMap
logT DEBUG "Exporting toggles interface"
exportTogglesInterface
handleDBusToggles :: TaffybarConfig -> TaffybarConfig
handleDBusToggles config =
config { getBarConfigsParam =
toggleBarConfigGetter $ getBarConfigsParam config
, startupHook = startupHook config >> dbusTogglesStartupHook
}
| teleshoes/taffybar | src/System/Taffybar/DBus/Toggle.hs | bsd-3-clause | 6,556 | 2 | 23 | 1,480 | 1,462 | 744 | 718 | 130 | 4 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS -fwarn-unused-imports #-}
-- | This module provides perl-style pattern matching. It is intended
-- for use with minimal Haskell knowledge, so it moves away from the
-- complex regex-* type signatures for the sake of clarity, and always
-- uses the same string types for source text and patterns. See
-- 'tests' in source code for a few examples.
module Text.Regex.Easy
( module Text.Regex.PCRE
, Match, Source
, (=~+)
, (=~-)
, (=~#)
, (=~++)
, replaceRegex
, replaceRegexAll
)
where
import Data.Array as AR
import Data.Function
import Data.List as List
import Data.Monoid
import Data.String.Conversions
import Prelude hiding ((++))
import Text.Regex.PCRE
import qualified Data.ByteString.Lazy as LBS
-- | Rudimentary tests. Read the source as a form of documentation.
tests :: Bool
tests = and $
(("file_1.txt" =~+ "^(.*)_(\\d).txt$") ==
[ ( "file_1.txt" , ( 0 , 10 ) )
, ( "file" , ( 0 , 4 ) )
, ( "1" , ( 5 , 1 ) )
]) :
(("file_1.txt" =~- "^(.*)_(\\d).txt$") ==
["file_1.txt", "file", "1"]) :
("file_1.txt" =~# "^(.*)_(\\d).txt$") :
(let q :: LBS = "wif kwof ..wif,, wif, 8fwif"
p :: SBS = "\\Sw.f"
in ((q =~+ p) ==
[ ( "kwof" , ( 4 , 4 ) ) ]) &&
((q =~++ p) ==
[ [ ( "kwof" , ( 4 , 4 ) ) ]
, [ ( ".wif" , ( 10 , 4 ) ) ]
, [ ( "fwif" , ( 24 , 4 ) ) ]
])) :
(let q :: LBS = "wif kwof ..wif,, wif, 8fwif"
p :: SBS = "\\Sw.f"
f ([(a,_)] :: [(LBS, (MatchOffset, MatchLength))]) = Just $ "@" <> a <> "@"
in (replaceRegex q p f == "wif @kwof@ ..wif,, wif, 8fwif") &&
(replaceRegexAll q p f == "wif @kwof@ [email protected]@,, wif, 8@fwif@")) :
[]
type Match = SBS
type Source = LBS
-- | Convenience wrapper around '(=~)', that trades flexibility off
-- for compactness.
(=~+) :: Source -> Match -> [(Source, (MatchOffset, MatchLength))]
(=~+) source match = elems (getAllTextSubmatches (source =~ match) :: MatchText Source)
-- | Convenience wrapper for '(=~+)' that chops rarely needed offsets
-- and lengths off the result.
(=~-) :: Source -> Match -> [Source]
(=~-) source match = map fst $ source =~+ match
-- | Convenience function for '(=~+)' with match result 'Bool'.
(=~#) :: Source -> Match -> Bool
(=~#) source match = not . null $ source =~+ match
-- | Like '(=~+)', but find all matches, not just the first one.
(=~++) :: Source -> Match -> [[(Source, (MatchOffset, MatchLength))]]
(=~++) source match = case source =~+ match of
[] -> []
x@((_, (holeStart, holeEnd)):_) -> x : map (shift (holeStart + holeEnd))
(LBS.drop (fromIntegral $ holeStart + holeEnd) source =~++ match)
where
shift :: Int -> [(Source, (MatchOffset, MatchLength))] -> [(Source, (MatchOffset, MatchLength))]
shift o' = map (\ (s, (o, l)) -> (s, (o + o', l)))
-- | Replace first match with result of a function of the match.
replaceRegex :: Source -> Match -> ([(Source, (MatchOffset, MatchLength))] -> Maybe Source) -> Source
replaceRegex source match trans = case source =~+ match of
m@((_, (offset, length)):_) -> let before = LBS.take (fromIntegral offset) source
after = LBS.drop (fromIntegral $ offset + length) source
in case trans m of
Just m' -> before <> m' <> after
Nothing -> source
-- | Replace all matches with result of a function of the match.
replaceRegexAll :: Source -> Match -> ([(Source, (MatchOffset, MatchLength))] -> Maybe Source) -> Source
replaceRegexAll source match trans = case source =~+ match of
[] -> source
m@((_, (offset, length)):_) -> case trans m of
Just m' -> let before = LBS.take (fromIntegral offset) source
after = LBS.drop (fromIntegral $ offset + length) source
in before <> m' <> replaceRegexAll after match trans
Nothing -> let before = LBS.take (fromIntegral $ offset + length) source
after = LBS.drop (fromIntegral $ offset + length) source
in before <> replaceRegexAll after match trans
| zerobuzz/regex-easy | Text/Regex/Easy.hs | bsd-3-clause | 4,863 | 0 | 18 | 1,630 | 1,290 | 746 | 544 | 81 | 3 |
module .Internal
(
) where
| KevinCotrone/JIT-Compiled | src/Internal.hs | bsd-3-clause | 35 | 1 | 3 | 13 | 9 | 6 | 3 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1993-1998
TcRules: Typechecking transformation rules
-}
{-# LANGUAGE ViewPatterns #-}
module TcRules ( tcRules ) where
import HsSyn
import TcRnMonad
import TcSimplify
import TcMType
import TcType
import TcHsType
import TcExpr
import TcEnv
import TcEvidence
import TcUnify( buildImplicationFor )
import Type
import Id
import Var ( EvVar )
import Name
import BasicTypes ( RuleName )
import SrcLoc
import Outputable
import FastString
import Bag
import Data.List( partition )
{-
Note [Typechecking rules]
~~~~~~~~~~~~~~~~~~~~~~~~~
We *infer* the typ of the LHS, and use that type to *check* the type of
the RHS. That means that higher-rank rules work reasonably well. Here's
an example (test simplCore/should_compile/rule2.hs) produced by Roman:
foo :: (forall m. m a -> m b) -> m a -> m b
foo f = ...
bar :: (forall m. m a -> m a) -> m a -> m a
bar f = ...
{-# RULES "foo/bar" foo = bar #-}
He wanted the rule to typecheck.
-}
tcRules :: [LRuleDecls Name] -> TcM [LRuleDecls TcId]
tcRules decls = mapM (wrapLocM tcRuleDecls) decls
tcRuleDecls :: RuleDecls Name -> TcM (RuleDecls TcId)
tcRuleDecls (HsRules src decls)
= do { tc_decls <- mapM (wrapLocM tcRule) decls
; return (HsRules src tc_decls) }
tcRule :: RuleDecl Name -> TcM (RuleDecl TcId)
tcRule (HsRule name act hs_bndrs lhs fv_lhs rhs fv_rhs)
= addErrCtxt (ruleCtxt $ snd $ unLoc name) $
do { traceTc "---- Rule ------" (pprFullRuleName name)
-- Note [Typechecking rules]
; (vars, bndr_wanted) <- captureConstraints $
tcRuleBndrs hs_bndrs
-- bndr_wanted constraints can include wildcard hole
-- constraints, which we should not forget about.
-- It may mention the skolem type variables bound by
-- the RULE. c.f. Trac #10072
; let (id_bndrs, tv_bndrs) = partition isId vars
; (lhs', lhs_wanted, rhs', rhs_wanted, rule_ty)
<- tcExtendTyVarEnv tv_bndrs $
tcExtendIdEnv id_bndrs $
do { -- See Note [Solve order for RULES]
((lhs', rule_ty), lhs_wanted) <- captureConstraints (tcInferRho lhs)
; (rhs', rhs_wanted) <- captureConstraints (tcMonoExpr rhs rule_ty)
; return (lhs', lhs_wanted, rhs', rhs_wanted, rule_ty) }
; traceTc "tcRule 1" (vcat [ pprFullRuleName name
, ppr lhs_wanted
, ppr rhs_wanted ])
; let all_lhs_wanted = bndr_wanted `andWC` lhs_wanted
; lhs_evs <- simplifyRule (snd $ unLoc name)
all_lhs_wanted
rhs_wanted
-- Now figure out what to quantify over
-- c.f. TcSimplify.simplifyInfer
-- We quantify over any tyvars free in *either* the rule
-- *or* the bound variables. The latter is important. Consider
-- ss (x,(y,z)) = (x,z)
-- RULE: forall v. fst (ss v) = fst v
-- The type of the rhs of the rule is just a, but v::(a,(b,c))
--
-- We also need to get the completely-uconstrained tyvars of
-- the LHS, lest they otherwise get defaulted to Any; but we do that
-- during zonking (see TcHsSyn.zonkRule)
; let tpl_ids = lhs_evs ++ id_bndrs
forall_tkvs = splitDepVarsOfTypes $
rule_ty : map idType tpl_ids
; gbls <- tcGetGlobalTyCoVars -- Even though top level, there might be top-level
-- monomorphic bindings from the MR; test tc111
; qtkvs <- quantifyTyVars gbls forall_tkvs
; traceTc "tcRule" (vcat [ pprFullRuleName name
, ppr forall_tkvs
, ppr qtkvs
, ppr rule_ty
, vcat [ ppr id <+> dcolon <+> ppr (idType id) | id <- tpl_ids ]
])
-- Simplify the RHS constraints
; let skol_info = RuleSkol (snd $ unLoc name)
; (rhs_implic, rhs_binds) <- buildImplicationFor topTcLevel skol_info qtkvs
lhs_evs rhs_wanted
-- For the LHS constraints we must solve the remaining constraints
-- (a) so that we report insoluble ones
-- (b) so that we bind any soluble ones
; (lhs_implic, lhs_binds) <- buildImplicationFor topTcLevel skol_info qtkvs
lhs_evs
(all_lhs_wanted { wc_simple = emptyBag })
-- simplifyRule consumed all simple
-- constraints
; emitImplications (lhs_implic `unionBags` rhs_implic)
; return (HsRule name act
(map (noLoc . RuleBndr . noLoc) (qtkvs ++ tpl_ids))
(mkHsDictLet lhs_binds lhs') fv_lhs
(mkHsDictLet rhs_binds rhs') fv_rhs) }
tcRuleBndrs :: [LRuleBndr Name] -> TcM [Var]
tcRuleBndrs []
= return []
tcRuleBndrs (L _ (RuleBndr (L _ name)) : rule_bndrs)
= do { ty <- newOpenFlexiTyVarTy
; vars <- tcRuleBndrs rule_bndrs
; return (mkLocalId name ty : vars) }
tcRuleBndrs (L _ (RuleBndrSig (L _ name) rn_ty) : rule_bndrs)
-- e.g x :: a->a
-- The tyvar 'a' is brought into scope first, just as if you'd written
-- a::*, x :: a->a
= do { let ctxt = RuleSigCtxt name
; (id_ty, tvs, _) <- tcHsPatSigType ctxt rn_ty
; let id = mkLocalIdOrCoVar name id_ty
-- See Note [Pattern signature binders] in TcHsType
-- The type variables scope over subsequent bindings; yuk
; vars <- tcExtendTyVarEnv tvs $
tcRuleBndrs rule_bndrs
; return (tvs ++ id : vars) }
ruleCtxt :: FastString -> SDoc
ruleCtxt name = text "When checking the transformation rule" <+>
doubleQuotes (ftext name)
{-
*********************************************************************************
* *
Constraint simplification for rules
* *
***********************************************************************************
Note [Simplifying RULE constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Example. Consider the following left-hand side of a rule
f (x == y) (y > z) = ...
If we typecheck this expression we get constraints
d1 :: Ord a, d2 :: Eq a
We do NOT want to "simplify" to the LHS
forall x::a, y::a, z::a, d1::Ord a.
f ((==) (eqFromOrd d1) x y) ((>) d1 y z) = ...
Instead we want
forall x::a, y::a, z::a, d1::Ord a, d2::Eq a.
f ((==) d2 x y) ((>) d1 y z) = ...
Here is another example:
fromIntegral :: (Integral a, Num b) => a -> b
{-# RULES "foo" fromIntegral = id :: Int -> Int #-}
In the rule, a=b=Int, and Num Int is a superclass of Integral Int. But
we *dont* want to get
forall dIntegralInt.
fromIntegral Int Int dIntegralInt (scsel dIntegralInt) = id Int
because the scsel will mess up RULE matching. Instead we want
forall dIntegralInt, dNumInt.
fromIntegral Int Int dIntegralInt dNumInt = id Int
Even if we have
g (x == y) (y == z) = ..
where the two dictionaries are *identical*, we do NOT WANT
forall x::a, y::a, z::a, d1::Eq a
f ((==) d1 x y) ((>) d1 y z) = ...
because that will only match if the dict args are (visibly) equal.
Instead we want to quantify over the dictionaries separately.
In short, simplifyRuleLhs must *only* squash equalities, leaving
all dicts unchanged, with absolutely no sharing.
Also note that we can't solve the LHS constraints in isolation:
Example foo :: Ord a => a -> a
foo_spec :: Int -> Int
{-# RULE "foo" foo = foo_spec #-}
Here, it's the RHS that fixes the type variable
HOWEVER, under a nested implication things are different
Consider
f :: (forall a. Eq a => a->a) -> Bool -> ...
{-# RULES "foo" forall (v::forall b. Eq b => b->b).
f b True = ...
#-}
Here we *must* solve the wanted (Eq a) from the given (Eq a)
resulting from skolemising the agument type of g. So we
revert to SimplCheck when going under an implication.
------------------------ So the plan is this -----------------------
* Step 0: typecheck the LHS and RHS to get constraints from each
* Step 1: Simplify the LHS and RHS constraints all together in one bag
We do this to discover all unification equalities
* Step 2: Zonk the ORIGINAL (unsimplified) lhs constraints, to take
advantage of those unifications, and partition them into the
ones we will quantify over, and the others
See Note [RULE quantification over equalities]
* Step 3: Decide on the type variables to quantify over
* Step 4: Simplify the LHS and RHS constraints separately, using the
quantified constraints as givens
Note [Solve order for RULES]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In step 1 above, we need to be a bit careful about solve order.
Consider
f :: Int -> T Int
type instance T Int = Bool
RULE f 3 = True
From the RULE we get
lhs-constraints: T Int ~ alpha
rhs-constraints: Bool ~ alpha
where 'alpha' is the type that connects the two. If we glom them
all together, and solve the RHS constraint first, we might solve
with alpha := Bool. But then we'd end up with a RULE like
RULE: f 3 |> (co :: T Int ~ Booo) = True
which is terrible. We want
RULE: f 3 = True |> (sym co :: Bool ~ T Int)
So we are careful to solve the LHS constraints first, and *then* the
RHS constraints. Actually much of this is done by the on-the-fly
constraint solving, so the same order must be observed in
tcRule.
Note [RULE quantification over equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Deciding which equalities to quantify over is tricky:
* We do not want to quantify over insoluble equalities (Int ~ Bool)
(a) because we prefer to report a LHS type error
(b) because if such things end up in 'givens' we get a bogus
"inaccessible code" error
* But we do want to quantify over things like (a ~ F b), where
F is a type function.
The difficulty is that it's hard to tell what is insoluble!
So we see whether the simplification step yielded any type errors,
and if so refrain from quantifying over *any* equalities.
Note [Quantifying over coercion holes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Equality constraints from the LHS will emit coercion hole Wanteds.
These don't have a name, so we can't quantify over them directly.
Instead, because we really do want to quantify here, invent a new
EvVar for the coercion, fill the hole with the invented EvVar, and
then quantify over the EvVar. Not too tricky -- just some
impedence matching, really.
Note [Simplify *derived* constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
At this stage, we're simplifying constraints only for insolubility
and for unification. Note that all the evidence is quickly discarded.
We make this explicit by working over derived constraints, for which
there is no evidence. Using derived constraints also prevents solved
equalities from being written to coercion holes. If we don't do this,
then RHS coercion-hole constraints get filled in, only to get filled
in *again* when solving the implications emitted from tcRule. That's
terrible, so we avoid the problem by using derived constraints.
-}
simplifyRule :: RuleName
-> WantedConstraints -- Constraints from LHS
-> WantedConstraints -- Constraints from RHS
-> TcM [EvVar] -- LHS evidence variables,
-- See Note [Simplifying RULE constraints] in TcRule
-- NB: This consumes all simple constraints on the LHS, but not
-- any LHS implication constraints.
simplifyRule name lhs_wanted rhs_wanted
= do { -- We allow ourselves to unify environment
-- variables: runTcS runs with topTcLevel
; tc_lvl <- getTcLevel
; insoluble <- runTcSDeriveds $
do { -- First solve the LHS and *then* solve the RHS
-- See Note [Solve order for RULES]
-- See Note [Simplify *derived* constraints]
lhs_resid <- solveWanteds $ toDerivedWC lhs_wanted
; rhs_resid <- solveWanteds $ toDerivedWC rhs_wanted
; return ( insolubleWC tc_lvl lhs_resid ||
insolubleWC tc_lvl rhs_resid ) }
; zonked_lhs_simples <- zonkSimples (wc_simple lhs_wanted)
; ev_ids <- mapMaybeM (quantify_ct insoluble) $
bagToList zonked_lhs_simples
; traceTc "simplifyRule" $
vcat [ text "LHS of rule" <+> doubleQuotes (ftext name)
, text "lhs_wantd" <+> ppr lhs_wanted
, text "rhs_wantd" <+> ppr rhs_wanted
, text "zonked_lhs_simples" <+> ppr zonked_lhs_simples
, text "ev_ids" <+> ppr ev_ids
]
; return ev_ids }
where
quantify_ct insol -- Note [RULE quantification over equalities]
| insol = quantify_insol
| otherwise = quantify_normal
quantify_insol ct
| isEqPred (ctPred ct)
= return Nothing
| otherwise
= return $ Just $ ctEvId $ ctEvidence ct
quantify_normal (ctEvidence -> CtWanted { ctev_dest = dest
, ctev_pred = pred })
= case dest of -- See Note [Quantifying over coercion holes]
HoleDest hole
| EqPred NomEq t1 t2 <- classifyPredType pred
, t1 `tcEqType` t2
-> do { -- These are trivial. Don't quantify. But do fill in
-- the hole.
; fillCoercionHole hole (mkTcNomReflCo t1)
; return Nothing }
| otherwise
-> do { ev_id <- newEvVar pred
; fillCoercionHole hole (mkTcCoVarCo ev_id)
; return (Just ev_id) }
EvVarDest evar -> return (Just evar)
quantify_normal ct = pprPanic "simplifyRule.quantify_normal" (ppr ct)
| gridaphobe/ghc | compiler/typecheck/TcRules.hs | bsd-3-clause | 14,444 | 2 | 17 | 4,430 | 1,636 | 847 | 789 | 133 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
module Network.Wai.Handler.Warp.Types where
import Control.Exception
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import Data.IORef (IORef, readIORef, writeIORef, newIORef)
import Data.Typeable (Typeable)
import Data.Word (Word16)
import Network.HTTP.Types.Header
import Network.Socket (Socket)
import Network.Wai.Handler.Warp.Buffer (Buffer,BufSize)
import qualified Network.Wai.Handler.Warp.Date as D
import qualified Network.Wai.Handler.Warp.FdCache as F
import qualified Network.Wai.Handler.Warp.Timeout as T
----------------------------------------------------------------
-- | TCP port number.
type Port = Int
----------------------------------------------------------------
-- | The type for header value used with 'HeaderName'.
type HeaderValue = ByteString
hTransferEncoding :: HeaderName
hTransferEncoding = "Transfer-Encoding"
hContentRange :: HeaderName
hContentRange = "Content-Range"
hAcceptRanges :: HeaderName
hAcceptRanges = "Accept-Ranges"
hServer :: HeaderName
hServer = "Server"
----------------------------------------------------------------
-- | Error types for bad 'Request'.
data InvalidRequest = NotEnoughLines [String]
| BadFirstLine String
| NonHttp
| IncompleteHeaders
| ConnectionClosedByPeer
| OverLargeHeader
| BadProxyHeader String
deriving (Eq, Typeable)
instance Show InvalidRequest where
show (NotEnoughLines xs) = "Warp: Incomplete request headers, received: " ++ show xs
show (BadFirstLine s) = "Warp: Invalid first line of request: " ++ show s
show NonHttp = "Warp: Request line specified a non-HTTP request"
show IncompleteHeaders = "Warp: Request headers did not finish transmission"
show ConnectionClosedByPeer = "Warp: Client closed connection prematurely"
show OverLargeHeader = "Warp: Request headers too large, possible memory attack detected. Closing connection."
show (BadProxyHeader s) = "Warp: Invalid PROXY protocol header: " ++ show s
instance Exception InvalidRequest
----------------------------------------------------------------
-- | Whether or not 'ConnSendFileOverride' in 'Connection' can be
-- overridden. This is a kind of hack to keep the signature of
-- 'Connection' clean.
data ConnSendFileOverride = NotOverride -- ^ Don't override
| Override Socket -- ^ Override with this 'Socket'
----------------------------------------------------------------
-- | Data type to manipulate IO actions for connections.
data Connection = Connection
{ connSendMany :: [ByteString] -> IO ()
, connSendAll :: ByteString -> IO ()
, connSendFile :: FilePath -> Integer -> Integer -> IO () -> [ByteString] -> IO () -- ^ filepath, offset, length, hook action, HTTP headers
, connClose :: IO ()
, connRecv :: IO ByteString
, connReadBuffer :: Buffer
, connWriteBuffer :: Buffer
, connBufferSize :: BufSize
, connSendFileOverride :: ConnSendFileOverride
}
----------------------------------------------------------------
-- | Internal information.
data InternalInfo = InternalInfo {
threadHandle :: T.Handle
, fdCacher :: Maybe F.MutableFdCache
, dateCacher :: D.DateCache
}
----------------------------------------------------------------
data Source = Source !(IORef ByteString) !(IO ByteString)
mkSource :: IO ByteString -> IO Source
mkSource func = do
ref <- newIORef S.empty
return $! Source ref func
readSource :: Source -> IO ByteString
readSource (Source ref func) = do
bs <- readIORef ref
if S.null bs
then func
else do
writeIORef ref S.empty
return bs
-- | Read from a Source, ignoring any leftovers.
readSource' :: Source -> IO ByteString
readSource' (Source _ func) = func
leftoverSource :: Source -> ByteString -> IO ()
leftoverSource (Source ref _) bs = writeIORef ref bs
readLeftoverSource :: Source -> IO ByteString
readLeftoverSource (Source ref _) = readIORef ref
----------------------------------------------------------------
-- | What kind of transport is used for this connection?
data Transport = TCP -- ^ Plain channel: TCP
| TLS {
tlsMajorVersion :: Int
, tlsMinorVersion :: Int
, tlsNegotiatedProtocol :: Maybe ByteString -- ^ The result of Application Layer Protocol Negociation in RFC 7301
, tlsChiperID :: Word16
} -- ^ Encrypted channel: TLS or SSL
isTransportSecure :: Transport -> Bool
isTransportSecure TCP = False
isTransportSecure _ = True
| jberryman/wai | warp/Network/Wai/Handler/Warp/Types.hs | mit | 4,807 | 0 | 15 | 1,025 | 850 | 483 | 367 | 91 | 2 |
module PatternMatchBug4 where
main = f ["a", "a"]
f ["a", x] = x
| roberth/uu-helium | test/correct/PatternMatchBug4.hs | gpl-3.0 | 70 | 0 | 6 | 18 | 32 | 19 | 13 | 3 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.CreateTags
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Adds or overwrites one or more tags for the specified Amazon EC2 resource or
-- resources. Each resource can have a maximum of 10 tags. Each tag consists of
-- a key and optional value. Tag keys must be unique per resource.
--
-- For more information about tags, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html Tagging Your Resources> in the /AmazonElastic Compute Cloud User Guide for Linux/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-CreateTags.html>
module Network.AWS.EC2.CreateTags
(
-- * Request
CreateTags
-- ** Request constructor
, createTags
-- ** Request lenses
, ct1DryRun
, ct1Resources
, ct1Tags
-- * Response
, CreateTagsResponse
-- ** Response constructor
, createTagsResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data CreateTags = CreateTags
{ _ct1DryRun :: Maybe Bool
, _ct1Resources :: List "ResourceId" Text
, _ct1Tags :: List "item" Tag
} deriving (Eq, Read, Show)
-- | 'CreateTags' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ct1DryRun' @::@ 'Maybe' 'Bool'
--
-- * 'ct1Resources' @::@ ['Text']
--
-- * 'ct1Tags' @::@ ['Tag']
--
createTags :: CreateTags
createTags = CreateTags
{ _ct1DryRun = Nothing
, _ct1Resources = mempty
, _ct1Tags = mempty
}
ct1DryRun :: Lens' CreateTags (Maybe Bool)
ct1DryRun = lens _ct1DryRun (\s a -> s { _ct1DryRun = a })
-- | The IDs of one or more resources to tag. For example, ami-1a2b3c4d.
ct1Resources :: Lens' CreateTags [Text]
ct1Resources = lens _ct1Resources (\s a -> s { _ct1Resources = a }) . _List
-- | One or more tags. The 'value' parameter is required, but if you don't want the
-- tag to have a value, specify the parameter with no value, and we set the
-- value to an empty string.
ct1Tags :: Lens' CreateTags [Tag]
ct1Tags = lens _ct1Tags (\s a -> s { _ct1Tags = a }) . _List
data CreateTagsResponse = CreateTagsResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'CreateTagsResponse' constructor.
createTagsResponse :: CreateTagsResponse
createTagsResponse = CreateTagsResponse
instance ToPath CreateTags where
toPath = const "/"
instance ToQuery CreateTags where
toQuery CreateTags{..} = mconcat
[ "DryRun" =? _ct1DryRun
, "ResourceId" `toQueryList` _ct1Resources
, "Tag" `toQueryList` _ct1Tags
]
instance ToHeaders CreateTags
instance AWSRequest CreateTags where
type Sv CreateTags = EC2
type Rs CreateTags = CreateTagsResponse
request = post "CreateTags"
response = nullResponse CreateTagsResponse
| kim/amazonka | amazonka-ec2/gen/Network/AWS/EC2/CreateTags.hs | mpl-2.0 | 3,784 | 0 | 10 | 855 | 479 | 293 | 186 | 56 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Controller
( withWiki
, withDevelApp
) where
import Wiki
import Settings
import Yesod.Static
import Yesod.Auth
import Database.Persist.GenericSql
import Data.ByteString (ByteString)
import Data.Dynamic (Dynamic, toDyn)
import Network.Wai (Application)
-- Import all relevant handler modules here.
import Handler.Topic
import Handler.CreateTopic
import Handler.CreateMap
import Handler.ShowMap
import Handler.EditMap
import Handler.Feed
import Handler.Settings
import Handler.Root
import Handler.Labels
import Handler.Browse
import Handler.Blog
import Handler.Book
import Handler.Search
import Handler.UploadDitamap
import Handler.DownloadDitamap
import Handler.UploadBlogs
import Handler.Wiki
-- This line actually creates our YesodSite instance. It is the second half
-- of the call to mkYesodData which occurs in Wiki.hs. Please see
-- the comments there for more details.
mkYesodDispatch "Wiki" resourcesWiki
-- Some default handlers that ship with the Yesod site template. You will
-- very rarely need to modify this.
getFaviconR :: Handler ()
getFaviconR = sendFile "image/x-icon" "config/favicon.ico"
getRobotsR :: Handler RepPlain
getRobotsR = return $ RepPlain $ toContent ("User-agent: *" :: ByteString)
-- This function allocates resources (such as a database connection pool),
-- performs initialization and creates a WAI application. This is also the
-- place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
withWiki :: Text -> (Application -> IO a) -> IO a
withWiki approot' f = Settings.withConnectionPool $ \p -> do
runConnectionPool (runMigration migrateAll) p
s <- static Settings.staticdir
let h = Wiki s p approot'
toWaiApp h >>= f
withDevelApp :: Dynamic
withDevelApp = toDyn (withWiki "http://10.0.0.3:3000" :: (Application -> IO ()) -> IO ())
| snoyberg/yesoddocs | Controller.hs | bsd-2-clause | 1,994 | 0 | 12 | 293 | 366 | 204 | 162 | 45 | 1 |
{-# LANGUAGE ViewPatterns #-}
module Main ( main ) where
import Data.Map ( Map )
import qualified Data.Map as M
import Data.Monoid
import System.Environment ( getArgs, withArgs )
import System.FilePath
import Test.HUnit ( assertEqual )
import LLVM.Analysis
import LLVM.Analysis.BlockReturnValue
import LLVM.Analysis.Dominance
import LLVM.Analysis.CFG
import LLVM.Analysis.Util.Testing
import LLVM.Parse
main :: IO ()
main = do
args <- getArgs
let pattern = case args of
[] -> "tests/block-return/*.c"
[infile] -> infile
_ -> error "Only one argument allowed"
testDescriptors = [ TestDescriptor { testPattern = pattern
, testExpectedMapping = (<.> "expected")
, testResultBuilder = blockRetMap
, testResultComparator = assertEqual
}
]
withArgs [] $ testAgainstExpected opts parser testDescriptors
where
opts = [ "-mem2reg", "-basicaa", "-gvn" ]
parser = parseLLVMFile defaultParserOptions
data Bundle = Bundle Function PostdominatorTree CFG
instance HasFunction Bundle where
getFunction (Bundle f _ _) = f
instance HasPostdomTree Bundle where
getPostdomTree (Bundle _ pdt _) = pdt
instance HasCFG Bundle where
getCFG (Bundle _ _ cfg) = cfg
-- Take the first function in the module and summarize it (map of
-- block names to return values that are constant ints)
blockRetMap :: Module -> Map String Int
blockRetMap m = foldr (recordConstIntReturn brs) mempty blocks
where
f1 : _ = moduleDefinedFunctions m
blocks = functionBody f1
brs = labelBlockReturns bdl
cfg = controlFlowGraph f1
pdt = postdominatorTree cfg
bdl = Bundle f1 pdt cfg
recordConstIntReturn :: BlockReturns -> BasicBlock -> Map String Int -> Map String Int
recordConstIntReturn brs bb m =
case blockReturn brs bb of
Just (valueContent' -> ConstantC ConstantInt { constantIntValue = iv }) ->
M.insert (show (basicBlockName bb)) (fromIntegral iv) m
_ -> m | travitch/llvm-analysis | tests/BlockReturnTests.hs | bsd-3-clause | 2,110 | 0 | 14 | 560 | 534 | 286 | 248 | 49 | 3 |
-- (c) The University of Glasgow 2006
{-# LANGUAGE CPP #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE BangPatterns #-}
#if __GLASGOW_HASKELL__ < 800
-- For CallStack business
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE FlexibleContexts #-}
#endif
-- | Highly random utility functions
--
module Util (
-- * Flags dependent on the compiler build
ghciSupported, debugIsOn, ncgDebugIsOn,
ghciTablesNextToCode,
isWindowsHost, isDarwinHost,
-- * General list processing
zipEqual, zipWithEqual, zipWith3Equal, zipWith4Equal,
zipLazy, stretchZipWith, zipWithAndUnzip,
zipWithLazy, zipWith3Lazy,
filterByList, filterByLists, partitionByList,
unzipWith,
mapFst, mapSnd, chkAppend,
mapAndUnzip, mapAndUnzip3, mapAccumL2,
nOfThem, filterOut, partitionWith, splitEithers,
dropWhileEndLE, spanEnd,
foldl1', foldl2, count, all2,
lengthExceeds, lengthIs, lengthAtLeast,
listLengthCmp, atLength,
equalLength, compareLength, leLength,
isSingleton, only, singleton,
notNull, snocView,
isIn, isn'tIn,
chunkList,
changeLast,
-- * Tuples
fstOf3, sndOf3, thdOf3,
firstM, first3M,
fst3, snd3, third3,
uncurry3,
liftFst, liftSnd,
-- * List operations controlled by another list
takeList, dropList, splitAtList, split,
dropTail, capitalise,
-- * For loop
nTimes,
-- * Sorting
sortWith, minWith, nubSort,
-- * Comparisons
isEqual, eqListBy, eqMaybeBy,
thenCmp, cmpList,
removeSpaces,
(<&&>), (<||>),
-- * Edit distance
fuzzyMatch, fuzzyLookup,
-- * Transitive closures
transitiveClosure,
-- * Strictness
seqList,
-- * Module names
looksLikeModuleName,
looksLikePackageName,
-- * Argument processing
getCmd, toCmdArgs, toArgs,
-- * Integers
exactLog2,
-- * Floating point
readRational,
-- * read helpers
maybeRead, maybeReadFuzzy,
-- * IO-ish utilities
doesDirNameExist,
getModificationUTCTime,
modificationTimeIfExists,
hSetTranslit,
global, consIORef, globalM,
sharedGlobal, sharedGlobalM,
-- * Filenames and paths
Suffix,
splitLongestPrefix,
escapeSpaces,
Direction(..), reslash,
makeRelativeTo,
-- * Utils for defining Data instances
abstractConstr, abstractDataType, mkNoRepType,
-- * Utils for printing C code
charToC,
-- * Hashing
hashString,
-- * Call stacks
GHC.Stack.CallStack,
HasCallStack,
HasDebugCallStack,
prettyCurrentCallStack,
) where
#include "HsVersions.h"
import Exception
import Panic
import Data.Data
import Data.IORef ( IORef, newIORef, atomicModifyIORef' )
import System.IO.Unsafe ( unsafePerformIO )
import Data.List hiding (group)
import GHC.Exts
import qualified GHC.Stack
import Control.Applicative ( liftA2 )
import Control.Monad ( liftM )
import GHC.IO.Encoding (mkTextEncoding, textEncodingName)
import GHC.Conc.Sync ( sharedCAF )
import System.IO (Handle, hGetEncoding, hSetEncoding)
import System.IO.Error as IO ( isDoesNotExistError )
import System.Directory ( doesDirectoryExist, getModificationTime )
import System.FilePath
import Data.Char ( isUpper, isAlphaNum, isSpace, chr, ord, isDigit, toUpper)
import Data.Int
import Data.Ratio ( (%) )
import Data.Ord ( comparing )
import Data.Bits
import Data.Word
import qualified Data.IntMap as IM
import qualified Data.Set as Set
import Data.Time
infixr 9 `thenCmp`
{-
************************************************************************
* *
\subsection{Is DEBUG on, are we on Windows, etc?}
* *
************************************************************************
These booleans are global constants, set by CPP flags. They allow us to
recompile a single module (this one) to change whether or not debug output
appears. They sometimes let us avoid even running CPP elsewhere.
It's important that the flags are literal constants (True/False). Then,
with -0, tests of the flags in other modules will simplify to the correct
branch of the conditional, thereby dropping debug code altogether when
the flags are off.
-}
ghciSupported :: Bool
#ifdef GHCI
ghciSupported = True
#else
ghciSupported = False
#endif
debugIsOn :: Bool
#ifdef DEBUG
debugIsOn = True
#else
debugIsOn = False
#endif
ncgDebugIsOn :: Bool
#ifdef NCG_DEBUG
ncgDebugIsOn = True
#else
ncgDebugIsOn = False
#endif
ghciTablesNextToCode :: Bool
#ifdef GHCI_TABLES_NEXT_TO_CODE
ghciTablesNextToCode = True
#else
ghciTablesNextToCode = False
#endif
isWindowsHost :: Bool
#ifdef mingw32_HOST_OS
isWindowsHost = True
#else
isWindowsHost = False
#endif
isDarwinHost :: Bool
#ifdef darwin_HOST_OS
isDarwinHost = True
#else
isDarwinHost = False
#endif
{-
************************************************************************
* *
\subsection{A for loop}
* *
************************************************************************
-}
-- | Compose a function with itself n times. (nth rather than twice)
nTimes :: Int -> (a -> a) -> (a -> a)
nTimes 0 _ = id
nTimes 1 f = f
nTimes n f = f . nTimes (n-1) f
fstOf3 :: (a,b,c) -> a
sndOf3 :: (a,b,c) -> b
thdOf3 :: (a,b,c) -> c
fstOf3 (a,_,_) = a
sndOf3 (_,b,_) = b
thdOf3 (_,_,c) = c
fst3 :: (a -> d) -> (a, b, c) -> (d, b, c)
fst3 f (a, b, c) = (f a, b, c)
snd3 :: (b -> d) -> (a, b, c) -> (a, d, c)
snd3 f (a, b, c) = (a, f b, c)
third3 :: (c -> d) -> (a, b, c) -> (a, b, d)
third3 f (a, b, c) = (a, b, f c)
uncurry3 :: (a -> b -> c -> d) -> (a, b, c) -> d
uncurry3 f (a, b, c) = f a b c
liftFst :: (a -> b) -> (a, c) -> (b, c)
liftFst f (a,c) = (f a, c)
liftSnd :: (a -> b) -> (c, a) -> (c, b)
liftSnd f (c,a) = (c, f a)
firstM :: Monad m => (a -> m c) -> (a, b) -> m (c, b)
firstM f (x, y) = liftM (\x' -> (x', y)) (f x)
first3M :: Monad m => (a -> m d) -> (a, b, c) -> m (d, b, c)
first3M f (x, y, z) = liftM (\x' -> (x', y, z)) (f x)
{-
************************************************************************
* *
\subsection[Utils-lists]{General list processing}
* *
************************************************************************
-}
filterOut :: (a->Bool) -> [a] -> [a]
-- ^ Like filter, only it reverses the sense of the test
filterOut _ [] = []
filterOut p (x:xs) | p x = filterOut p xs
| otherwise = x : filterOut p xs
partitionWith :: (a -> Either b c) -> [a] -> ([b], [c])
-- ^ Uses a function to determine which of two output lists an input element should join
partitionWith _ [] = ([],[])
partitionWith f (x:xs) = case f x of
Left b -> (b:bs, cs)
Right c -> (bs, c:cs)
where (bs,cs) = partitionWith f xs
splitEithers :: [Either a b] -> ([a], [b])
-- ^ Teases a list of 'Either's apart into two lists
splitEithers [] = ([],[])
splitEithers (e : es) = case e of
Left x -> (x:xs, ys)
Right y -> (xs, y:ys)
where (xs,ys) = splitEithers es
chkAppend :: [a] -> [a] -> [a]
-- Checks for the second arguemnt being empty
-- Used in situations where that situation is common
chkAppend xs ys
| null ys = xs
| otherwise = xs ++ ys
{-
A paranoid @zip@ (and some @zipWith@ friends) that checks the lists
are of equal length. Alastair Reid thinks this should only happen if
DEBUGging on; hey, why not?
-}
zipEqual :: String -> [a] -> [b] -> [(a,b)]
zipWithEqual :: String -> (a->b->c) -> [a]->[b]->[c]
zipWith3Equal :: String -> (a->b->c->d) -> [a]->[b]->[c]->[d]
zipWith4Equal :: String -> (a->b->c->d->e) -> [a]->[b]->[c]->[d]->[e]
#ifndef DEBUG
zipEqual _ = zip
zipWithEqual _ = zipWith
zipWith3Equal _ = zipWith3
zipWith4Equal _ = zipWith4
#else
zipEqual _ [] [] = []
zipEqual msg (a:as) (b:bs) = (a,b) : zipEqual msg as bs
zipEqual msg _ _ = panic ("zipEqual: unequal lists:"++msg)
zipWithEqual msg z (a:as) (b:bs)= z a b : zipWithEqual msg z as bs
zipWithEqual _ _ [] [] = []
zipWithEqual msg _ _ _ = panic ("zipWithEqual: unequal lists:"++msg)
zipWith3Equal msg z (a:as) (b:bs) (c:cs)
= z a b c : zipWith3Equal msg z as bs cs
zipWith3Equal _ _ [] [] [] = []
zipWith3Equal msg _ _ _ _ = panic ("zipWith3Equal: unequal lists:"++msg)
zipWith4Equal msg z (a:as) (b:bs) (c:cs) (d:ds)
= z a b c d : zipWith4Equal msg z as bs cs ds
zipWith4Equal _ _ [] [] [] [] = []
zipWith4Equal msg _ _ _ _ _ = panic ("zipWith4Equal: unequal lists:"++msg)
#endif
-- | 'zipLazy' is a kind of 'zip' that is lazy in the second list (observe the ~)
zipLazy :: [a] -> [b] -> [(a,b)]
zipLazy [] _ = []
zipLazy (x:xs) ~(y:ys) = (x,y) : zipLazy xs ys
-- | 'zipWithLazy' is like 'zipWith' but is lazy in the second list.
-- The length of the output is always the same as the length of the first
-- list.
zipWithLazy :: (a -> b -> c) -> [a] -> [b] -> [c]
zipWithLazy _ [] _ = []
zipWithLazy f (a:as) ~(b:bs) = f a b : zipWithLazy f as bs
-- | 'zipWith3Lazy' is like 'zipWith3' but is lazy in the second and third lists.
-- The length of the output is always the same as the length of the first
-- list.
zipWith3Lazy :: (a -> b -> c -> d) -> [a] -> [b] -> [c] -> [d]
zipWith3Lazy _ [] _ _ = []
zipWith3Lazy f (a:as) ~(b:bs) ~(c:cs) = f a b c : zipWith3Lazy f as bs cs
-- | 'filterByList' takes a list of Bools and a list of some elements and
-- filters out these elements for which the corresponding value in the list of
-- Bools is False. This function does not check whether the lists have equal
-- length.
filterByList :: [Bool] -> [a] -> [a]
filterByList (True:bs) (x:xs) = x : filterByList bs xs
filterByList (False:bs) (_:xs) = filterByList bs xs
filterByList _ _ = []
-- | 'filterByLists' takes a list of Bools and two lists as input, and
-- outputs a new list consisting of elements from the last two input lists. For
-- each Bool in the list, if it is 'True', then it takes an element from the
-- former list. If it is 'False', it takes an element from the latter list.
-- The elements taken correspond to the index of the Bool in its list.
-- For example:
--
-- @
-- filterByLists [True, False, True, False] \"abcd\" \"wxyz\" = \"axcz\"
-- @
--
-- This function does not check whether the lists have equal length.
filterByLists :: [Bool] -> [a] -> [a] -> [a]
filterByLists (True:bs) (x:xs) (_:ys) = x : filterByLists bs xs ys
filterByLists (False:bs) (_:xs) (y:ys) = y : filterByLists bs xs ys
filterByLists _ _ _ = []
-- | 'partitionByList' takes a list of Bools and a list of some elements and
-- partitions the list according to the list of Bools. Elements corresponding
-- to 'True' go to the left; elements corresponding to 'False' go to the right.
-- For example, @partitionByList [True, False, True] [1,2,3] == ([1,3], [2])@
-- This function does not check whether the lists have equal
-- length.
partitionByList :: [Bool] -> [a] -> ([a], [a])
partitionByList = go [] []
where
go trues falses (True : bs) (x : xs) = go (x:trues) falses bs xs
go trues falses (False : bs) (x : xs) = go trues (x:falses) bs xs
go trues falses _ _ = (reverse trues, reverse falses)
stretchZipWith :: (a -> Bool) -> b -> (a->b->c) -> [a] -> [b] -> [c]
-- ^ @stretchZipWith p z f xs ys@ stretches @ys@ by inserting @z@ in
-- the places where @p@ returns @True@
stretchZipWith _ _ _ [] _ = []
stretchZipWith p z f (x:xs) ys
| p x = f x z : stretchZipWith p z f xs ys
| otherwise = case ys of
[] -> []
(y:ys) -> f x y : stretchZipWith p z f xs ys
mapFst :: (a->c) -> [(a,b)] -> [(c,b)]
mapSnd :: (b->c) -> [(a,b)] -> [(a,c)]
mapFst f xys = [(f x, y) | (x,y) <- xys]
mapSnd f xys = [(x, f y) | (x,y) <- xys]
mapAndUnzip :: (a -> (b, c)) -> [a] -> ([b], [c])
mapAndUnzip _ [] = ([], [])
mapAndUnzip f (x:xs)
= let (r1, r2) = f x
(rs1, rs2) = mapAndUnzip f xs
in
(r1:rs1, r2:rs2)
mapAndUnzip3 :: (a -> (b, c, d)) -> [a] -> ([b], [c], [d])
mapAndUnzip3 _ [] = ([], [], [])
mapAndUnzip3 f (x:xs)
= let (r1, r2, r3) = f x
(rs1, rs2, rs3) = mapAndUnzip3 f xs
in
(r1:rs1, r2:rs2, r3:rs3)
zipWithAndUnzip :: (a -> b -> (c,d)) -> [a] -> [b] -> ([c],[d])
zipWithAndUnzip f (a:as) (b:bs)
= let (r1, r2) = f a b
(rs1, rs2) = zipWithAndUnzip f as bs
in
(r1:rs1, r2:rs2)
zipWithAndUnzip _ _ _ = ([],[])
mapAccumL2 :: (s1 -> s2 -> a -> (s1, s2, b)) -> s1 -> s2 -> [a] -> (s1, s2, [b])
mapAccumL2 f s1 s2 xs = (s1', s2', ys)
where ((s1', s2'), ys) = mapAccumL (\(s1, s2) x -> case f s1 s2 x of
(s1', s2', y) -> ((s1', s2'), y))
(s1, s2) xs
nOfThem :: Int -> a -> [a]
nOfThem n thing = replicate n thing
-- | @atLength atLen atEnd ls n@ unravels list @ls@ to position @n@. Precisely:
--
-- @
-- atLength atLenPred atEndPred ls n
-- | n < 0 = atLenPred ls
-- | length ls < n = atEndPred (n - length ls)
-- | otherwise = atLenPred (drop n ls)
-- @
atLength :: ([a] -> b) -- Called when length ls >= n, passed (drop n ls)
-- NB: arg passed to this function may be []
-> b -- Called when length ls < n
-> [a]
-> Int
-> b
atLength atLenPred atEnd ls0 n0
| n0 < 0 = atLenPred ls0
| otherwise = go n0 ls0
where
-- go's first arg n >= 0
go 0 ls = atLenPred ls
go _ [] = atEnd -- n > 0 here
go n (_:xs) = go (n-1) xs
-- Some special cases of atLength:
-- | @(lengthExceeds xs n) = (length xs > n)@
lengthExceeds :: [a] -> Int -> Bool
lengthExceeds lst n
| n < 0
= True
| otherwise
= atLength notNull False lst n
lengthAtLeast :: [a] -> Int -> Bool
lengthAtLeast = atLength (const True) False
-- | @(lengthIs xs n) = (length xs == n)@
lengthIs :: [a] -> Int -> Bool
lengthIs lst n
| n < 0
= False
| otherwise
= atLength null False lst n
listLengthCmp :: [a] -> Int -> Ordering
listLengthCmp = atLength atLen atEnd
where
atEnd = LT -- Not yet seen 'n' elts, so list length is < n.
atLen [] = EQ
atLen _ = GT
equalLength :: [a] -> [b] -> Bool
equalLength [] [] = True
equalLength (_:xs) (_:ys) = equalLength xs ys
equalLength _ _ = False
compareLength :: [a] -> [b] -> Ordering
compareLength [] [] = EQ
compareLength (_:xs) (_:ys) = compareLength xs ys
compareLength [] _ = LT
compareLength _ [] = GT
leLength :: [a] -> [b] -> Bool
-- ^ True if length xs <= length ys
leLength xs ys = case compareLength xs ys of
LT -> True
EQ -> True
GT -> False
----------------------------
singleton :: a -> [a]
singleton x = [x]
isSingleton :: [a] -> Bool
isSingleton [_] = True
isSingleton _ = False
notNull :: [a] -> Bool
notNull [] = False
notNull _ = True
only :: [a] -> a
#ifdef DEBUG
only [a] = a
#else
only (a:_) = a
#endif
only _ = panic "Util: only"
-- Debugging/specialising versions of \tr{elem} and \tr{notElem}
isIn, isn'tIn :: Eq a => String -> a -> [a] -> Bool
# ifndef DEBUG
isIn _msg x ys = x `elem` ys
isn'tIn _msg x ys = x `notElem` ys
# else /* DEBUG */
isIn msg x ys
= elem100 0 x ys
where
elem100 :: Eq a => Int -> a -> [a] -> Bool
elem100 _ _ [] = False
elem100 i x (y:ys)
| i > 100 = trace ("Over-long elem in " ++ msg) (x `elem` (y:ys))
| otherwise = x == y || elem100 (i + 1) x ys
isn'tIn msg x ys
= notElem100 0 x ys
where
notElem100 :: Eq a => Int -> a -> [a] -> Bool
notElem100 _ _ [] = True
notElem100 i x (y:ys)
| i > 100 = trace ("Over-long notElem in " ++ msg) (x `notElem` (y:ys))
| otherwise = x /= y && notElem100 (i + 1) x ys
# endif /* DEBUG */
-- | Split a list into chunks of /n/ elements
chunkList :: Int -> [a] -> [[a]]
chunkList _ [] = []
chunkList n xs = as : chunkList n bs where (as,bs) = splitAt n xs
-- | Replace the last element of a list with another element.
changeLast :: [a] -> a -> [a]
changeLast [] _ = panic "changeLast"
changeLast [_] x = [x]
changeLast (x:xs) x' = x : changeLast xs x'
{-
************************************************************************
* *
\subsubsection{Sort utils}
* *
************************************************************************
-}
minWith :: Ord b => (a -> b) -> [a] -> a
minWith get_key xs = ASSERT( not (null xs) )
head (sortWith get_key xs)
nubSort :: Ord a => [a] -> [a]
nubSort = Set.toAscList . Set.fromList
{-
************************************************************************
* *
\subsection[Utils-transitive-closure]{Transitive closure}
* *
************************************************************************
This algorithm for transitive closure is straightforward, albeit quadratic.
-}
transitiveClosure :: (a -> [a]) -- Successor function
-> (a -> a -> Bool) -- Equality predicate
-> [a]
-> [a] -- The transitive closure
transitiveClosure succ eq xs
= go [] xs
where
go done [] = done
go done (x:xs) | x `is_in` done = go done xs
| otherwise = go (x:done) (succ x ++ xs)
_ `is_in` [] = False
x `is_in` (y:ys) | eq x y = True
| otherwise = x `is_in` ys
{-
************************************************************************
* *
\subsection[Utils-accum]{Accumulating}
* *
************************************************************************
A combination of foldl with zip. It works with equal length lists.
-}
foldl2 :: (acc -> a -> b -> acc) -> acc -> [a] -> [b] -> acc
foldl2 _ z [] [] = z
foldl2 k z (a:as) (b:bs) = foldl2 k (k z a b) as bs
foldl2 _ _ _ _ = panic "Util: foldl2"
all2 :: (a -> b -> Bool) -> [a] -> [b] -> Bool
-- True if the lists are the same length, and
-- all corresponding elements satisfy the predicate
all2 _ [] [] = True
all2 p (x:xs) (y:ys) = p x y && all2 p xs ys
all2 _ _ _ = False
-- Count the number of times a predicate is true
count :: (a -> Bool) -> [a] -> Int
count p = go 0
where go !n [] = n
go !n (x:xs) | p x = go (n+1) xs
| otherwise = go n xs
{-
@splitAt@, @take@, and @drop@ but with length of another
list giving the break-off point:
-}
takeList :: [b] -> [a] -> [a]
-- (takeList as bs) trims bs to the be same length
-- as as, unless as is longer in which case it's a no-op
takeList [] _ = []
takeList (_:xs) ls =
case ls of
[] -> []
(y:ys) -> y : takeList xs ys
dropList :: [b] -> [a] -> [a]
dropList [] xs = xs
dropList _ xs@[] = xs
dropList (_:xs) (_:ys) = dropList xs ys
splitAtList :: [b] -> [a] -> ([a], [a])
splitAtList [] xs = ([], xs)
splitAtList _ xs@[] = (xs, xs)
splitAtList (_:xs) (y:ys) = (y:ys', ys'')
where
(ys', ys'') = splitAtList xs ys
-- drop from the end of a list
dropTail :: Int -> [a] -> [a]
-- Specification: dropTail n = reverse . drop n . reverse
-- Better implemention due to Joachim Breitner
-- http://www.joachim-breitner.de/blog/archives/600-On-taking-the-last-n-elements-of-a-list.html
dropTail n xs
= go (drop n xs) xs
where
go (_:ys) (x:xs) = x : go ys xs
go _ _ = [] -- Stop when ys runs out
-- It'll always run out before xs does
-- dropWhile from the end of a list. This is similar to Data.List.dropWhileEnd,
-- but is lazy in the elements and strict in the spine. For reasonably short lists,
-- such as path names and typical lines of text, dropWhileEndLE is generally
-- faster than dropWhileEnd. Its advantage is magnified when the predicate is
-- expensive--using dropWhileEndLE isSpace to strip the space off a line of text
-- is generally much faster than using dropWhileEnd isSpace for that purpose.
-- Specification: dropWhileEndLE p = reverse . dropWhile p . reverse
-- Pay attention to the short-circuit (&&)! The order of its arguments is the only
-- difference between dropWhileEnd and dropWhileEndLE.
dropWhileEndLE :: (a -> Bool) -> [a] -> [a]
dropWhileEndLE p = foldr (\x r -> if null r && p x then [] else x:r) []
-- | @spanEnd p l == reverse (span p (reverse l))@. The first list
-- returns actually comes after the second list (when you look at the
-- input list).
spanEnd :: (a -> Bool) -> [a] -> ([a], [a])
spanEnd p l = go l [] [] l
where go yes _rev_yes rev_no [] = (yes, reverse rev_no)
go yes rev_yes rev_no (x:xs)
| p x = go yes (x : rev_yes) rev_no xs
| otherwise = go xs [] (x : rev_yes ++ rev_no) xs
snocView :: [a] -> Maybe ([a],a)
-- Split off the last element
snocView [] = Nothing
snocView xs = go [] xs
where
-- Invariant: second arg is non-empty
go acc [x] = Just (reverse acc, x)
go acc (x:xs) = go (x:acc) xs
go _ [] = panic "Util: snocView"
split :: Char -> String -> [String]
split c s = case rest of
[] -> [chunk]
_:rest -> chunk : split c rest
where (chunk, rest) = break (==c) s
-- | Convert a word to title case by capitalising the first letter
capitalise :: String -> String
capitalise [] = []
capitalise (c:cs) = toUpper c : cs
{-
************************************************************************
* *
\subsection[Utils-comparison]{Comparisons}
* *
************************************************************************
-}
isEqual :: Ordering -> Bool
-- Often used in (isEqual (a `compare` b))
isEqual GT = False
isEqual EQ = True
isEqual LT = False
thenCmp :: Ordering -> Ordering -> Ordering
{-# INLINE thenCmp #-}
thenCmp EQ ordering = ordering
thenCmp ordering _ = ordering
eqListBy :: (a->a->Bool) -> [a] -> [a] -> Bool
eqListBy _ [] [] = True
eqListBy eq (x:xs) (y:ys) = eq x y && eqListBy eq xs ys
eqListBy _ _ _ = False
eqMaybeBy :: (a ->a->Bool) -> Maybe a -> Maybe a -> Bool
eqMaybeBy _ Nothing Nothing = True
eqMaybeBy eq (Just x) (Just y) = eq x y
eqMaybeBy _ _ _ = False
cmpList :: (a -> a -> Ordering) -> [a] -> [a] -> Ordering
-- `cmpList' uses a user-specified comparer
cmpList _ [] [] = EQ
cmpList _ [] _ = LT
cmpList _ _ [] = GT
cmpList cmp (a:as) (b:bs)
= case cmp a b of { EQ -> cmpList cmp as bs; xxx -> xxx }
removeSpaces :: String -> String
removeSpaces = dropWhileEndLE isSpace . dropWhile isSpace
-- Boolean operators lifted to Applicative
(<&&>) :: Applicative f => f Bool -> f Bool -> f Bool
(<&&>) = liftA2 (&&)
infixr 3 <&&> -- same as (&&)
(<||>) :: Applicative f => f Bool -> f Bool -> f Bool
(<||>) = liftA2 (||)
infixr 2 <||> -- same as (||)
{-
************************************************************************
* *
\subsection{Edit distance}
* *
************************************************************************
-}
-- | Find the "restricted" Damerau-Levenshtein edit distance between two strings.
-- See: <http://en.wikipedia.org/wiki/Damerau-Levenshtein_distance>.
-- Based on the algorithm presented in "A Bit-Vector Algorithm for Computing
-- Levenshtein and Damerau Edit Distances" in PSC'02 (Heikki Hyyro).
-- See http://www.cs.uta.fi/~helmu/pubs/psc02.pdf and
-- http://www.cs.uta.fi/~helmu/pubs/PSCerr.html for an explanation
restrictedDamerauLevenshteinDistance :: String -> String -> Int
restrictedDamerauLevenshteinDistance str1 str2
= restrictedDamerauLevenshteinDistanceWithLengths m n str1 str2
where
m = length str1
n = length str2
restrictedDamerauLevenshteinDistanceWithLengths
:: Int -> Int -> String -> String -> Int
restrictedDamerauLevenshteinDistanceWithLengths m n str1 str2
| m <= n
= if n <= 32 -- n must be larger so this check is sufficient
then restrictedDamerauLevenshteinDistance' (undefined :: Word32) m n str1 str2
else restrictedDamerauLevenshteinDistance' (undefined :: Integer) m n str1 str2
| otherwise
= if m <= 32 -- m must be larger so this check is sufficient
then restrictedDamerauLevenshteinDistance' (undefined :: Word32) n m str2 str1
else restrictedDamerauLevenshteinDistance' (undefined :: Integer) n m str2 str1
restrictedDamerauLevenshteinDistance'
:: (Bits bv, Num bv) => bv -> Int -> Int -> String -> String -> Int
restrictedDamerauLevenshteinDistance' _bv_dummy m n str1 str2
| [] <- str1 = n
| otherwise = extractAnswer $
foldl' (restrictedDamerauLevenshteinDistanceWorker
(matchVectors str1) top_bit_mask vector_mask)
(0, 0, m_ones, 0, m) str2
where
m_ones@vector_mask = (2 ^ m) - 1
top_bit_mask = (1 `shiftL` (m - 1)) `asTypeOf` _bv_dummy
extractAnswer (_, _, _, _, distance) = distance
restrictedDamerauLevenshteinDistanceWorker
:: (Bits bv, Num bv) => IM.IntMap bv -> bv -> bv
-> (bv, bv, bv, bv, Int) -> Char -> (bv, bv, bv, bv, Int)
restrictedDamerauLevenshteinDistanceWorker str1_mvs top_bit_mask vector_mask
(pm, d0, vp, vn, distance) char2
= seq str1_mvs $ seq top_bit_mask $ seq vector_mask $
seq pm' $ seq d0' $ seq vp' $ seq vn' $
seq distance'' $ seq char2 $
(pm', d0', vp', vn', distance'')
where
pm' = IM.findWithDefault 0 (ord char2) str1_mvs
d0' = ((((sizedComplement vector_mask d0) .&. pm') `shiftL` 1) .&. pm)
.|. ((((pm' .&. vp) + vp) .&. vector_mask) `xor` vp) .|. pm' .|. vn
-- No need to mask the shiftL because of the restricted range of pm
hp' = vn .|. sizedComplement vector_mask (d0' .|. vp)
hn' = d0' .&. vp
hp'_shift = ((hp' `shiftL` 1) .|. 1) .&. vector_mask
hn'_shift = (hn' `shiftL` 1) .&. vector_mask
vp' = hn'_shift .|. sizedComplement vector_mask (d0' .|. hp'_shift)
vn' = d0' .&. hp'_shift
distance' = if hp' .&. top_bit_mask /= 0 then distance + 1 else distance
distance'' = if hn' .&. top_bit_mask /= 0 then distance' - 1 else distance'
sizedComplement :: Bits bv => bv -> bv -> bv
sizedComplement vector_mask vect = vector_mask `xor` vect
matchVectors :: (Bits bv, Num bv) => String -> IM.IntMap bv
matchVectors = snd . foldl' go (0 :: Int, IM.empty)
where
go (ix, im) char = let ix' = ix + 1
im' = IM.insertWith (.|.) (ord char) (2 ^ ix) im
in seq ix' $ seq im' $ (ix', im')
{-# SPECIALIZE INLINE restrictedDamerauLevenshteinDistance'
:: Word32 -> Int -> Int -> String -> String -> Int #-}
{-# SPECIALIZE INLINE restrictedDamerauLevenshteinDistance'
:: Integer -> Int -> Int -> String -> String -> Int #-}
{-# SPECIALIZE restrictedDamerauLevenshteinDistanceWorker
:: IM.IntMap Word32 -> Word32 -> Word32
-> (Word32, Word32, Word32, Word32, Int)
-> Char -> (Word32, Word32, Word32, Word32, Int) #-}
{-# SPECIALIZE restrictedDamerauLevenshteinDistanceWorker
:: IM.IntMap Integer -> Integer -> Integer
-> (Integer, Integer, Integer, Integer, Int)
-> Char -> (Integer, Integer, Integer, Integer, Int) #-}
{-# SPECIALIZE INLINE sizedComplement :: Word32 -> Word32 -> Word32 #-}
{-# SPECIALIZE INLINE sizedComplement :: Integer -> Integer -> Integer #-}
{-# SPECIALIZE matchVectors :: String -> IM.IntMap Word32 #-}
{-# SPECIALIZE matchVectors :: String -> IM.IntMap Integer #-}
fuzzyMatch :: String -> [String] -> [String]
fuzzyMatch key vals = fuzzyLookup key [(v,v) | v <- vals]
-- | Search for possible matches to the users input in the given list,
-- returning a small number of ranked results
fuzzyLookup :: String -> [(String,a)] -> [a]
fuzzyLookup user_entered possibilites
= map fst $ take mAX_RESULTS $ sortBy (comparing snd)
[ (poss_val, distance) | (poss_str, poss_val) <- possibilites
, let distance = restrictedDamerauLevenshteinDistance
poss_str user_entered
, distance <= fuzzy_threshold ]
where
-- Work out an approriate match threshold:
-- We report a candidate if its edit distance is <= the threshold,
-- The threshhold is set to about a quarter of the # of characters the user entered
-- Length Threshold
-- 1 0 -- Don't suggest *any* candidates
-- 2 1 -- for single-char identifiers
-- 3 1
-- 4 1
-- 5 1
-- 6 2
--
fuzzy_threshold = truncate $ fromIntegral (length user_entered + 2) / (4 :: Rational)
mAX_RESULTS = 3
{-
************************************************************************
* *
\subsection[Utils-pairs]{Pairs}
* *
************************************************************************
-}
unzipWith :: (a -> b -> c) -> [(a, b)] -> [c]
unzipWith f pairs = map ( \ (a, b) -> f a b ) pairs
seqList :: [a] -> b -> b
seqList [] b = b
seqList (x:xs) b = x `seq` seqList xs b
{-
************************************************************************
* *
Globals and the RTS
* *
************************************************************************
When a plugin is loaded, it currently gets linked against a *newly
loaded* copy of the GHC package. This would not be a problem, except
that the new copy has its own mutable state that is not shared with
that state that has already been initialized by the original GHC
package.
(Note that if the GHC executable was dynamically linked this
wouldn't be a problem, because we could share the GHC library it
links to; this is only a problem if DYNAMIC_GHC_PROGRAMS=NO.)
The solution is to make use of @sharedCAF@ through @sharedGlobal@
for globals that are shared between multiple copies of ghc packages.
-}
-- Global variables:
global :: a -> IORef a
global a = unsafePerformIO (newIORef a)
consIORef :: IORef [a] -> a -> IO ()
consIORef var x = do
atomicModifyIORef' var (\xs -> (x:xs,()))
globalM :: IO a -> IORef a
globalM ma = unsafePerformIO (ma >>= newIORef)
-- Shared global variables:
sharedGlobal :: a -> (Ptr (IORef a) -> IO (Ptr (IORef a))) -> IORef a
sharedGlobal a get_or_set = unsafePerformIO $
newIORef a >>= flip sharedCAF get_or_set
sharedGlobalM :: IO a -> (Ptr (IORef a) -> IO (Ptr (IORef a))) -> IORef a
sharedGlobalM ma get_or_set = unsafePerformIO $
ma >>= newIORef >>= flip sharedCAF get_or_set
-- Module names:
looksLikeModuleName :: String -> Bool
looksLikeModuleName [] = False
looksLikeModuleName (c:cs) = isUpper c && go cs
where go [] = True
go ('.':cs) = looksLikeModuleName cs
go (c:cs) = (isAlphaNum c || c == '_' || c == '\'') && go cs
-- Similar to 'parse' for Distribution.Package.PackageName,
-- but we don't want to depend on Cabal.
looksLikePackageName :: String -> Bool
looksLikePackageName = all (all isAlphaNum <&&> not . (all isDigit)) . split '-'
{-
Akin to @Prelude.words@, but acts like the Bourne shell, treating
quoted strings as Haskell Strings, and also parses Haskell [String]
syntax.
-}
getCmd :: String -> Either String -- Error
(String, String) -- (Cmd, Rest)
getCmd s = case break isSpace $ dropWhile isSpace s of
([], _) -> Left ("Couldn't find command in " ++ show s)
res -> Right res
toCmdArgs :: String -> Either String -- Error
(String, [String]) -- (Cmd, Args)
toCmdArgs s = case getCmd s of
Left err -> Left err
Right (cmd, s') -> case toArgs s' of
Left err -> Left err
Right args -> Right (cmd, args)
toArgs :: String -> Either String -- Error
[String] -- Args
toArgs str
= case dropWhile isSpace str of
s@('[':_) -> case reads s of
[(args, spaces)]
| all isSpace spaces ->
Right args
_ ->
Left ("Couldn't read " ++ show str ++ " as [String]")
s -> toArgs' s
where
toArgs' :: String -> Either String [String]
-- Remove outer quotes:
-- > toArgs' "\"foo\" \"bar baz\""
-- Right ["foo", "bar baz"]
--
-- Keep inner quotes:
-- > toArgs' "-DFOO=\"bar baz\""
-- Right ["-DFOO=\"bar baz\""]
toArgs' s = case dropWhile isSpace s of
[] -> Right []
('"' : _) -> do
-- readAsString removes outer quotes
(arg, rest) <- readAsString s
(arg:) `fmap` toArgs' rest
s' -> case break (isSpace <||> (== '"')) s' of
(argPart1, s''@('"':_)) -> do
(argPart2, rest) <- readAsString s''
-- show argPart2 to keep inner quotes
((argPart1 ++ show argPart2):) `fmap` toArgs' rest
(arg, s'') -> (arg:) `fmap` toArgs' s''
readAsString :: String -> Either String (String, String)
readAsString s = case reads s of
[(arg, rest)]
-- rest must either be [] or start with a space
| all isSpace (take 1 rest) ->
Right (arg, rest)
_ ->
Left ("Couldn't read " ++ show s ++ " as String")
-----------------------------------------------------------------------------
-- Integers
-- This algorithm for determining the $\log_2$ of exact powers of 2 comes
-- from GCC. It requires bit manipulation primitives, and we use GHC
-- extensions. Tough.
exactLog2 :: Integer -> Maybe Integer
exactLog2 x
= if (x <= 0 || x >= 2147483648) then
Nothing
else
if (x .&. (-x)) /= x then
Nothing
else
Just (pow2 x)
where
pow2 x | x == 1 = 0
| otherwise = 1 + pow2 (x `shiftR` 1)
{-
-- -----------------------------------------------------------------------------
-- Floats
-}
readRational__ :: ReadS Rational -- NB: doesn't handle leading "-"
readRational__ r = do
(n,d,s) <- readFix r
(k,t) <- readExp s
return ((n%1)*10^^(k-d), t)
where
readFix r = do
(ds,s) <- lexDecDigits r
(ds',t) <- lexDotDigits s
return (read (ds++ds'), length ds', t)
readExp (e:s) | e `elem` "eE" = readExp' s
readExp s = return (0,s)
readExp' ('+':s) = readDec s
readExp' ('-':s) = do (k,t) <- readDec s
return (-k,t)
readExp' s = readDec s
readDec s = do
(ds,r) <- nonnull isDigit s
return (foldl1 (\n d -> n * 10 + d) [ ord d - ord '0' | d <- ds ],
r)
lexDecDigits = nonnull isDigit
lexDotDigits ('.':s) = return (span isDigit s)
lexDotDigits s = return ("",s)
nonnull p s = do (cs@(_:_),t) <- return (span p s)
return (cs,t)
readRational :: String -> Rational -- NB: *does* handle a leading "-"
readRational top_s
= case top_s of
'-' : xs -> - (read_me xs)
xs -> read_me xs
where
read_me s
= case (do { (x,"") <- readRational__ s ; return x }) of
[x] -> x
[] -> error ("readRational: no parse:" ++ top_s)
_ -> error ("readRational: ambiguous parse:" ++ top_s)
-----------------------------------------------------------------------------
-- read helpers
maybeRead :: Read a => String -> Maybe a
maybeRead str = case reads str of
[(x, "")] -> Just x
_ -> Nothing
maybeReadFuzzy :: Read a => String -> Maybe a
maybeReadFuzzy str = case reads str of
[(x, s)]
| all isSpace s ->
Just x
_ ->
Nothing
-----------------------------------------------------------------------------
-- Verify that the 'dirname' portion of a FilePath exists.
--
doesDirNameExist :: FilePath -> IO Bool
doesDirNameExist fpath = doesDirectoryExist (takeDirectory fpath)
-----------------------------------------------------------------------------
-- Backwards compatibility definition of getModificationTime
getModificationUTCTime :: FilePath -> IO UTCTime
getModificationUTCTime = getModificationTime
-- --------------------------------------------------------------
-- check existence & modification time at the same time
modificationTimeIfExists :: FilePath -> IO (Maybe UTCTime)
modificationTimeIfExists f = do
(do t <- getModificationUTCTime f; return (Just t))
`catchIO` \e -> if isDoesNotExistError e
then return Nothing
else ioError e
-- --------------------------------------------------------------
-- Change the character encoding of the given Handle to transliterate
-- on unsupported characters instead of throwing an exception
hSetTranslit :: Handle -> IO ()
hSetTranslit h = do
menc <- hGetEncoding h
case fmap textEncodingName menc of
Just name | '/' `notElem` name -> do
enc' <- mkTextEncoding $ name ++ "//TRANSLIT"
hSetEncoding h enc'
_ -> return ()
-- split a string at the last character where 'pred' is True,
-- returning a pair of strings. The first component holds the string
-- up (but not including) the last character for which 'pred' returned
-- True, the second whatever comes after (but also not including the
-- last character).
--
-- If 'pred' returns False for all characters in the string, the original
-- string is returned in the first component (and the second one is just
-- empty).
splitLongestPrefix :: String -> (Char -> Bool) -> (String,String)
splitLongestPrefix str pred
| null r_pre = (str, [])
| otherwise = (reverse (tail r_pre), reverse r_suf)
-- 'tail' drops the char satisfying 'pred'
where (r_suf, r_pre) = break pred (reverse str)
escapeSpaces :: String -> String
escapeSpaces = foldr (\c s -> if isSpace c then '\\':c:s else c:s) ""
type Suffix = String
--------------------------------------------------------------
-- * Search path
--------------------------------------------------------------
data Direction = Forwards | Backwards
reslash :: Direction -> FilePath -> FilePath
reslash d = f
where f ('/' : xs) = slash : f xs
f ('\\' : xs) = slash : f xs
f (x : xs) = x : f xs
f "" = ""
slash = case d of
Forwards -> '/'
Backwards -> '\\'
makeRelativeTo :: FilePath -> FilePath -> FilePath
this `makeRelativeTo` that = directory </> thisFilename
where (thisDirectory, thisFilename) = splitFileName this
thatDirectory = dropFileName that
directory = joinPath $ f (splitPath thisDirectory)
(splitPath thatDirectory)
f (x : xs) (y : ys)
| x == y = f xs ys
f xs ys = replicate (length ys) ".." ++ xs
{-
************************************************************************
* *
\subsection[Utils-Data]{Utils for defining Data instances}
* *
************************************************************************
These functions helps us to define Data instances for abstract types.
-}
abstractConstr :: String -> Constr
abstractConstr n = mkConstr (abstractDataType n) ("{abstract:"++n++"}") [] Prefix
abstractDataType :: String -> DataType
abstractDataType n = mkDataType n [abstractConstr n]
{-
************************************************************************
* *
\subsection[Utils-C]{Utils for printing C code}
* *
************************************************************************
-}
charToC :: Word8 -> String
charToC w =
case chr (fromIntegral w) of
'\"' -> "\\\""
'\'' -> "\\\'"
'\\' -> "\\\\"
c | c >= ' ' && c <= '~' -> [c]
| otherwise -> ['\\',
chr (ord '0' + ord c `div` 64),
chr (ord '0' + ord c `div` 8 `mod` 8),
chr (ord '0' + ord c `mod` 8)]
{-
************************************************************************
* *
\subsection[Utils-Hashing]{Utils for hashing}
* *
************************************************************************
-}
-- | A sample hash function for Strings. We keep multiplying by the
-- golden ratio and adding. The implementation is:
--
-- > hashString = foldl' f golden
-- > where f m c = fromIntegral (ord c) * magic + hashInt32 m
-- > magic = 0xdeadbeef
--
-- Where hashInt32 works just as hashInt shown above.
--
-- Knuth argues that repeated multiplication by the golden ratio
-- will minimize gaps in the hash space, and thus it's a good choice
-- for combining together multiple keys to form one.
--
-- Here we know that individual characters c are often small, and this
-- produces frequent collisions if we use ord c alone. A
-- particular problem are the shorter low ASCII and ISO-8859-1
-- character strings. We pre-multiply by a magic twiddle factor to
-- obtain a good distribution. In fact, given the following test:
--
-- > testp :: Int32 -> Int
-- > testp k = (n - ) . length . group . sort . map hs . take n $ ls
-- > where ls = [] : [c : l | l <- ls, c <- ['\0'..'\xff']]
-- > hs = foldl' f golden
-- > f m c = fromIntegral (ord c) * k + hashInt32 m
-- > n = 100000
--
-- We discover that testp magic = 0.
hashString :: String -> Int32
hashString = foldl' f golden
where f m c = fromIntegral (ord c) * magic + hashInt32 m
magic = fromIntegral (0xdeadbeef :: Word32)
golden :: Int32
golden = 1013904242 -- = round ((sqrt 5 - 1) * 2^32) :: Int32
-- was -1640531527 = round ((sqrt 5 - 1) * 2^31) :: Int32
-- but that has bad mulHi properties (even adding 2^32 to get its inverse)
-- Whereas the above works well and contains no hash duplications for
-- [-32767..65536]
-- | A sample (and useful) hash function for Int32,
-- implemented by extracting the uppermost 32 bits of the 64-bit
-- result of multiplying by a 33-bit constant. The constant is from
-- Knuth, derived from the golden ratio:
--
-- > golden = round ((sqrt 5 - 1) * 2^32)
--
-- We get good key uniqueness on small inputs
-- (a problem with previous versions):
-- (length $ group $ sort $ map hashInt32 [-32767..65536]) == 65536 + 32768
--
hashInt32 :: Int32 -> Int32
hashInt32 x = mulHi x golden + x
-- hi 32 bits of a x-bit * 32 bit -> 64-bit multiply
mulHi :: Int32 -> Int32 -> Int32
mulHi a b = fromIntegral (r `shiftR` 32)
where r :: Int64
r = fromIntegral a * fromIntegral b
-- | A compatibility wrapper for the @GHC.Stack.HasCallStack@ constraint.
#if __GLASGOW_HASKELL__ >= 800
type HasCallStack = GHC.Stack.HasCallStack
#elif MIN_VERSION_GLASGOW_HASKELL(7,10,2,0)
type HasCallStack = (?callStack :: GHC.Stack.CallStack)
-- CallStack wasn't present in GHC 7.10.1, disable callstacks in stage 1
#else
type HasCallStack = (() :: Constraint)
#endif
-- | A call stack constraint, but only when 'isDebugOn'.
#if DEBUG
type HasDebugCallStack = HasCallStack
#else
type HasDebugCallStack = (() :: Constraint)
#endif
-- | Pretty-print the current callstack
#if __GLASGOW_HASKELL__ >= 800
prettyCurrentCallStack :: HasCallStack => String
prettyCurrentCallStack = GHC.Stack.prettyCallStack GHC.Stack.callStack
#elif MIN_VERSION_GLASGOW_HASKELL(7,10,2,0)
prettyCurrentCallStack :: (?callStack :: GHC.Stack.CallStack) => String
prettyCurrentCallStack = GHC.Stack.showCallStack ?callStack
#else
prettyCurrentCallStack :: HasCallStack => String
prettyCurrentCallStack = "Call stack unavailable"
#endif
| olsner/ghc | compiler/utils/Util.hs | bsd-3-clause | 46,037 | 0 | 19 | 13,496 | 11,522 | 6,303 | 5,219 | 663 | 7 |
{-# LANGUAGE OverloadedStrings,RecordWildCards #-}
module Stack.Options
(Command(..)
,benchOptsParser
,buildOptsParser
,cleanOptsParser
,configCmdSetParser
,configOptsParser
,dockerOptsParser
,dockerCleanupOptsParser
,dotOptsParser
,execOptsParser
,evalOptsParser
,globalOptsParser
,initOptsParser
,newOptsParser
,nixOptsParser
,logLevelOptsParser
,ghciOptsParser
,solverOptsParser
,testOptsParser
,hpcReportOptsParser
,pvpBoundsOption
,globalOptsFromMonoid
) where
import Control.Monad.Logger (LogLevel(..))
import Data.Char (isSpace, toLower)
import Data.List (intercalate)
import Data.List.Split (splitOn)
import qualified Data.Map as Map
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Monoid
import qualified Data.Set as Set
import qualified Data.Text as T
import Data.Text.Read (decimal)
import Distribution.Version (anyVersion)
import Options.Applicative
import Options.Applicative.Args
import Options.Applicative.Builder.Extra
import Options.Applicative.Types (fromM, oneM, readerAsk)
import Stack.Clean (CleanOpts(..))
import Stack.Config (packagesParser)
import Stack.ConfigCmd
import Stack.Constants (stackProgName)
import Stack.Coverage (HpcReportOpts(..))
import Stack.Docker
import qualified Stack.Docker as Docker
import Stack.Dot
import Stack.Ghci (GhciOpts(..))
import Stack.Init
import Stack.New
import Stack.Nix
import Stack.Types
import Stack.Types.TemplateName
-- | Command sum type for conditional arguments.
data Command
= Build
| Test
| Haddock
| Bench
| Install
deriving (Eq)
-- | Parser for bench arguments.
benchOptsParser :: Parser BenchmarkOpts
benchOptsParser = BenchmarkOpts
<$> optional (strOption (long "benchmark-arguments" <>
metavar "BENCH_ARGS" <>
help ("Forward BENCH_ARGS to the benchmark suite. " <>
"Supports templates from `cabal bench`")))
<*> switch (long "no-run-benchmarks" <>
help "Disable running of benchmarks. (Benchmarks will still be built.)")
-- | Parser for build arguments.
buildOptsParser :: Command
-> Parser BuildOpts
buildOptsParser cmd =
BuildOpts <$> target <*> libProfiling <*> exeProfiling <*>
haddock <*> haddockDeps <*> dryRun <*> ghcOpts <*>
flags <*> copyBins <*> preFetch <*> buildSubset <*>
fileWatch' <*> keepGoing <*> forceDirty <*> tests <*>
testOptsParser <*> benches <*> benchOptsParser <*>
many exec <*> onlyConfigure <*> reconfigure <*> cabalVerbose
where target =
many (textArgument
(metavar "TARGET" <>
help "If none specified, use all packages"))
libProfiling =
boolFlags False
"library-profiling"
"library profiling for TARGETs and all its dependencies"
idm
exeProfiling =
boolFlags False
"executable-profiling"
"executable profiling for TARGETs and all its dependencies"
idm
haddock =
boolFlags (cmd == Haddock)
"haddock"
"generating Haddocks the package(s) in this directory/configuration"
idm
haddockDeps =
maybeBoolFlags
"haddock-deps"
"building Haddocks for dependencies"
idm
copyBins = boolFlags (cmd == Install)
"copy-bins"
"copying binaries to the local-bin-path (see 'stack path')"
idm
dryRun = switch (long "dry-run" <>
help "Don't build anything, just prepare to")
ghcOpts = (\x y z -> concat [x, y, z])
<$> flag [] ["-Wall", "-Werror"]
( long "pedantic"
<> help "Turn on -Wall and -Werror"
)
<*> flag [] ["-O0"]
( long "fast"
<> help "Turn off optimizations (-O0)"
)
<*> many (textOption (long "ghc-options" <>
metavar "OPTION" <>
help "Additional options passed to GHC"))
flags = Map.unionsWith Map.union <$> many
(option readFlag
(long "flag" <>
metavar "PACKAGE:[-]FLAG" <>
help ("Override flags set in stack.yaml " <>
"(applies to local packages and extra-deps)")))
preFetch = switch
(long "prefetch" <>
help "Fetch packages necessary for the build immediately, useful with --dry-run")
buildSubset =
flag' BSOnlyDependencies
(long "dependencies-only" <>
help "A synonym for --only-dependencies")
<|> flag' BSOnlySnapshot
(long "only-snapshot" <>
help "Only build packages for the snapshot database, not the local database")
<|> flag' BSOnlyDependencies
(long "only-dependencies" <>
help "Only build packages that are dependencies of targets on the command line")
<|> pure BSAll
fileWatch' =
flag' FileWatch
(long "file-watch" <>
help "Watch for changes in local files and automatically rebuild. Ignores files in VCS boring/ignore file")
<|> flag' FileWatchPoll
(long "file-watch-poll" <>
help "Like --file-watch, but polling the filesystem instead of using events")
<|> pure NoFileWatch
keepGoing = maybeBoolFlags
"keep-going"
"continue running after a step fails (default: false for build, true for test/bench)"
idm
forceDirty = switch
(long "force-dirty" <>
help "Force treating all local packages as having dirty files (useful for cases where stack can't detect a file change)")
tests = boolFlags (cmd == Test)
"test"
"testing the package(s) in this directory/configuration"
idm
benches = boolFlags (cmd == Bench)
"bench"
"benchmarking the package(s) in this directory/configuration"
idm
exec = cmdOption
( long "exec" <>
metavar "CMD [ARGS]" <>
help "Command and arguments to run after a successful build" )
onlyConfigure = switch
(long "only-configure" <>
help "Only perform the configure step, not any builds. Intended for tool usage, may break when used on multiple packages at once!")
reconfigure = switch
(long "reconfigure" <>
help "Perform the configure step even if unnecessary. Useful in some corner cases with custom Setup.hs files")
cabalVerbose = switch
(long "cabal-verbose" <>
help "Ask Cabal to be verbose in its output")
-- | Parser for package:[-]flag
readFlag :: ReadM (Map (Maybe PackageName) (Map FlagName Bool))
readFlag = do
s <- readerAsk
case break (== ':') s of
(pn, ':':mflag) -> do
pn' <-
case parsePackageNameFromString pn of
Nothing
| pn == "*" -> return Nothing
| otherwise -> readerError $ "Invalid package name: " ++ pn
Just x -> return $ Just x
let (b, flagS) =
case mflag of
'-':x -> (False, x)
_ -> (True, mflag)
flagN <-
case parseFlagNameFromString flagS of
Nothing -> readerError $ "Invalid flag name: " ++ flagS
Just x -> return x
return $ Map.singleton pn' $ Map.singleton flagN b
_ -> readerError "Must have a colon"
-- | Command-line parser for the clean command.
cleanOptsParser :: Parser CleanOpts
cleanOptsParser = CleanOpts <$> packages
where
packages =
many
(packageNameArgument
(metavar "PACKAGE" <>
help "If none specified, clean all local packages"))
-- | Command-line arguments parser for configuration.
configOptsParser :: Bool -> Parser ConfigMonoid
configOptsParser hide0 =
(\workDir dockerOpts nixOpts systemGHC installGHC arch os ghcVariant jobs includes libs skipGHCCheck skipMsys localBin modifyCodePage -> mempty
{ configMonoidWorkDir = workDir
, configMonoidDockerOpts = dockerOpts
, configMonoidNixOpts = nixOpts
, configMonoidSystemGHC = systemGHC
, configMonoidInstallGHC = installGHC
, configMonoidSkipGHCCheck = skipGHCCheck
, configMonoidArch = arch
, configMonoidOS = os
, configMonoidGHCVariant = ghcVariant
, configMonoidJobs = jobs
, configMonoidExtraIncludeDirs = includes
, configMonoidExtraLibDirs = libs
, configMonoidSkipMsys = skipMsys
, configMonoidLocalBinPath = localBin
, configMonoidModifyCodePage = modifyCodePage
})
<$> optional (strOption
( long "work-dir"
<> metavar "WORK-DIR"
<> help "Override work directory (default: .stack-work)"
<> hide
))
<*> dockerOptsParser True
<*> nixOptsParser True
<*> maybeBoolFlags
"system-ghc"
"using the system installed GHC (on the PATH) if available and a matching version"
hide
<*> maybeBoolFlags
"install-ghc"
"downloading and installing GHC if necessary (can be done manually with stack setup)"
hide
<*> optional (strOption
( long "arch"
<> metavar "ARCH"
<> help "System architecture, e.g. i386, x86_64"
<> hide
))
<*> optional (strOption
( long "os"
<> metavar "OS"
<> help "Operating system, e.g. linux, windows"
<> hide
))
<*> optional (ghcVariantParser hide0)
<*> optional (option auto
( long "jobs"
<> short 'j'
<> metavar "JOBS"
<> help "Number of concurrent jobs to run"
<> hide
))
<*> fmap Set.fromList (many (textOption
( long "extra-include-dirs"
<> metavar "DIR"
<> help "Extra directories to check for C header files"
<> hide
)))
<*> fmap Set.fromList (many (textOption
( long "extra-lib-dirs"
<> metavar "DIR"
<> help "Extra directories to check for libraries"
<> hide
)))
<*> maybeBoolFlags
"skip-ghc-check"
"skipping the GHC version and architecture check"
hide
<*> maybeBoolFlags
"skip-msys"
"skipping the local MSYS installation (Windows only)"
hide
<*> optional (strOption
( long "local-bin-path"
<> metavar "DIR"
<> help "Install binaries to DIR"
<> hide
))
<*> maybeBoolFlags
"modify-code-page"
"setting the codepage to support UTF-8 (Windows only)"
hide
where hide = hideMods hide0
nixOptsParser :: Bool -> Parser NixOptsMonoid
nixOptsParser hide0 =
NixOptsMonoid
<$> pure False
<*> maybeBoolFlags nixCmdName
"using a Nix-shell"
hide
<*> pure []
<*> pure Nothing
<*> ((map T.pack . fromMaybe [])
<$> optional (argsOption (long "nix-shell-options" <>
metavar "OPTION" <>
help "Additional options passed to nix-shell" <>
hide)))
where
hide = hideMods hide0
-- | Options parser configuration for Docker.
dockerOptsParser :: Bool -> Parser DockerOptsMonoid
dockerOptsParser hide0 =
DockerOptsMonoid
<$> pure False
<*> maybeBoolFlags dockerCmdName
"using a Docker container"
hide
<*> ((Just . DockerMonoidRepo) <$> option str (long (dockerOptName dockerRepoArgName) <>
hide <>
metavar "NAME" <>
help "Docker repository name") <|>
(Just . DockerMonoidImage) <$> option str (long (dockerOptName dockerImageArgName) <>
hide <>
metavar "IMAGE" <>
help "Exact Docker image ID (overrides docker-repo)") <|>
pure Nothing)
<*> maybeBoolFlags (dockerOptName dockerRegistryLoginArgName)
"registry requires login"
hide
<*> maybeStrOption (long (dockerOptName dockerRegistryUsernameArgName) <>
hide <>
metavar "USERNAME" <>
help "Docker registry username")
<*> maybeStrOption (long (dockerOptName dockerRegistryPasswordArgName) <>
hide <>
metavar "PASSWORD" <>
help "Docker registry password")
<*> maybeBoolFlags (dockerOptName dockerAutoPullArgName)
"automatic pulling latest version of image"
hide
<*> maybeBoolFlags (dockerOptName dockerDetachArgName)
"running a detached Docker container"
hide
<*> maybeBoolFlags (dockerOptName dockerPersistArgName)
"not deleting container after it exits"
hide
<*> maybeStrOption (long (dockerOptName dockerContainerNameArgName) <>
hide <>
metavar "NAME" <>
help "Docker container name")
<*> argsOption (long (dockerOptName dockerRunArgsArgName) <>
hide <>
value [] <>
metavar "'ARG1 [ARG2 ...]'" <>
help "Additional options to pass to 'docker run'")
<*> many (option auto (long (dockerOptName dockerMountArgName) <>
hide <>
metavar "(PATH | HOST-PATH:CONTAINER-PATH)" <>
help ("Mount volumes from host in container " ++
"(may specify multiple times)")))
<*> many (option str (long (dockerOptName dockerEnvArgName) <>
hide <>
metavar "NAME=VALUE" <>
help ("Set environment variable in container " ++
"(may specify multiple times)")))
<*> maybeStrOption (long (dockerOptName dockerDatabasePathArgName) <>
hide <>
metavar "PATH" <>
help "Location of image usage tracking database")
<*> optional (option str
(long(dockerOptName dockerStackExeArgName) <>
hide <>
metavar (intercalate "|"
[ dockerStackExeDownloadVal
, dockerStackExeHostVal
, dockerStackExeImageVal
, "PATH" ]) <>
help (concat [ "Location of "
, stackProgName
, " executable used in container" ])))
<*> maybeBoolFlags (dockerOptName dockerSetUserArgName)
"setting user in container to match host"
hide
<*> pure anyVersion
where
dockerOptName optName = dockerCmdName ++ "-" ++ T.unpack optName
maybeStrOption = optional . option str
hide = hideMods hide0
-- | Parser for docker cleanup arguments.
dockerCleanupOptsParser :: Parser Docker.CleanupOpts
dockerCleanupOptsParser =
Docker.CleanupOpts <$>
(flag' Docker.CleanupInteractive
(short 'i' <>
long "interactive" <>
help "Show cleanup plan in editor and allow changes (default)") <|>
flag' Docker.CleanupImmediate
(short 'y' <>
long "immediate" <>
help "Immediately execute cleanup plan") <|>
flag' Docker.CleanupDryRun
(short 'n' <>
long "dry-run" <>
help "Display cleanup plan but do not execute") <|>
pure Docker.CleanupInteractive) <*>
opt (Just 14) "known-images" "LAST-USED" <*>
opt Nothing "unknown-images" "CREATED" <*>
opt (Just 0) "dangling-images" "CREATED" <*>
opt Nothing "stopped-containers" "CREATED" <*>
opt Nothing "running-containers" "CREATED"
where opt def' name mv =
fmap Just
(option auto
(long name <>
metavar (mv ++ "-DAYS-AGO") <>
help ("Remove " ++
toDescr name ++
" " ++
map toLower (toDescr mv) ++
" N days ago" ++
case def' of
Just n -> " (default " ++ show n ++ ")"
Nothing -> ""))) <|>
flag' Nothing
(long ("no-" ++ name) <>
help ("Do not remove " ++
toDescr name ++
case def' of
Just _ -> ""
Nothing -> " (default)")) <|>
pure def'
toDescr = map (\c -> if c == '-' then ' ' else c)
-- | Parser for arguments to `stack dot`
dotOptsParser :: Parser DotOpts
dotOptsParser = DotOpts
<$> includeExternal
<*> includeBase
<*> depthLimit
<*> fmap (maybe Set.empty Set.fromList . fmap splitNames) prunedPkgs
where includeExternal = boolFlags False
"external"
"inclusion of external dependencies"
idm
includeBase = boolFlags True
"include-base"
"inclusion of dependencies on base"
idm
depthLimit =
optional (option auto
(long "depth" <>
metavar "DEPTH" <>
help ("Limit the depth of dependency resolution " <>
"(Default: No limit)")))
prunedPkgs = optional (strOption
(long "prune" <>
metavar "PACKAGES" <>
help ("Prune each package name " <>
"from the comma separated list " <>
"of package names PACKAGES")))
splitNames :: String -> [String]
splitNames = map (takeWhile (not . isSpace) . dropWhile isSpace) . splitOn ","
ghciOptsParser :: Parser GhciOpts
ghciOptsParser = GhciOpts
<$> switch (long "no-build" <> help "Don't build before launching GHCi")
<*> fmap concat (many (argsOption (long "ghci-options" <>
metavar "OPTION" <>
help "Additional options passed to GHCi")))
<*> optional
(strOption (long "with-ghc" <>
metavar "GHC" <>
help "Use this GHC to run GHCi"))
<*> (not <$> boolFlags True "load" "load modules on start-up" idm)
<*> packagesParser
<*> optional
(textOption
(long "main-is" <>
metavar "TARGET" <>
help "Specify which target should contain the main \
\module to load, such as for an executable for \
\test suite or benchmark."))
<*> switch (long "skip-intermediate-deps" <> help "Skip loading intermediate target dependencies")
<*> buildOptsParser Build
-- | Parser for exec command
execOptsParser :: Maybe SpecialExecCmd -> Parser ExecOpts
execOptsParser mcmd =
ExecOpts
<$> maybe eoCmdParser pure mcmd
<*> eoArgsParser
<*> execOptsExtraParser
where
eoCmdParser = ExecCmd <$> strArgument (metavar "CMD")
eoArgsParser = many (strArgument (metavar "-- ARGS (e.g. stack ghc -- X.hs -o x)"))
evalOptsParser :: String -- ^ metavar
-> Parser EvalOpts
evalOptsParser meta =
EvalOpts
<$> eoArgsParser
<*> execOptsExtraParser
where
eoArgsParser :: Parser String
eoArgsParser = strArgument (metavar meta)
-- | Parser for extra options to exec command
execOptsExtraParser :: Parser ExecOptsExtra
execOptsExtraParser = eoPlainParser <|>
ExecOptsEmbellished
<$> eoEnvSettingsParser
<*> eoPackagesParser
where
eoEnvSettingsParser :: Parser EnvSettings
eoEnvSettingsParser = EnvSettings
<$> pure True
<*> boolFlags True
"ghc-package-path"
"setting the GHC_PACKAGE_PATH variable for the subprocess"
idm
<*> boolFlags True
"stack-exe"
"setting the STACK_EXE environment variable to the path for the stack executable"
idm
<*> pure False
eoPackagesParser :: Parser [String]
eoPackagesParser = many (strOption (long "package" <> help "Additional packages that must be installed"))
eoPlainParser :: Parser ExecOptsExtra
eoPlainParser = flag' ExecOptsPlain
(long "plain" <>
help "Use an unmodified environment (only useful with Docker)")
-- | Parser for global command-line options.
globalOptsParser :: Bool -> Maybe LogLevel -> Parser GlobalOptsMonoid
globalOptsParser hide0 defLogLevel =
GlobalOptsMonoid <$>
optional (strOption (long Docker.reExecArgName <> hidden <> internal)) <*>
optional (option auto (long dockerEntrypointArgName <> hidden <> internal)) <*>
logLevelOptsParser hide0 defLogLevel <*>
configOptsParser hide0 <*>
optional (abstractResolverOptsParser hide0) <*>
optional (compilerOptsParser hide0) <*>
maybeBoolFlags
"terminal"
"overriding terminal detection in the case of running in a false terminal"
hide <*>
optional (strOption (long "stack-yaml" <>
metavar "STACK-YAML" <>
help ("Override project stack.yaml file " <>
"(overrides any STACK_YAML environment variable)") <>
hide))
where hide = hideMods hide0
-- | Create GlobalOpts from GlobalOptsMonoid.
globalOptsFromMonoid :: Bool -> GlobalOptsMonoid -> GlobalOpts
globalOptsFromMonoid defaultTerminal GlobalOptsMonoid{..} = GlobalOpts
{ globalReExecVersion = globalMonoidReExecVersion
, globalDockerEntrypoint = globalMonoidDockerEntrypoint
, globalLogLevel = fromMaybe defaultLogLevel globalMonoidLogLevel
, globalConfigMonoid = globalMonoidConfigMonoid
, globalResolver = globalMonoidResolver
, globalCompiler = globalMonoidCompiler
, globalTerminal = fromMaybe defaultTerminal globalMonoidTerminal
, globalStackYaml = globalMonoidStackYaml }
initOptsParser :: Parser InitOpts
initOptsParser =
InitOpts <$> method <*> overwrite <*> fmap not ignoreSubDirs
where
ignoreSubDirs = switch (long "ignore-subdirs" <>
help "Do not search for .cabal files in sub directories")
overwrite = switch (long "force" <>
help "Force overwriting of an existing stack.yaml if it exists")
method = solver
<|> (MethodResolver <$> resolver)
<|> (MethodSnapshot <$> snapPref)
solver =
flag' MethodSolver
(long "solver" <>
help "Use a dependency solver to determine dependencies")
snapPref =
flag' PrefLTS
(long "prefer-lts" <>
help "Prefer LTS snapshots over Nightly snapshots") <|>
flag' PrefNightly
(long "prefer-nightly" <>
help "Prefer Nightly snapshots over LTS snapshots") <|>
pure PrefNone
resolver = option readAbstractResolver
(long "resolver" <>
metavar "RESOLVER" <>
help "Use the given resolver, even if not all dependencies are met")
-- | Parser for a logging level.
logLevelOptsParser :: Bool -> Maybe LogLevel -> Parser (Maybe LogLevel)
logLevelOptsParser hide defLogLevel =
fmap (Just . parse)
(strOption (long "verbosity" <>
metavar "VERBOSITY" <>
help "Verbosity: silent, error, warn, info, debug" <>
hideMods hide)) <|>
flag' (Just verboseLevel)
(short 'v' <> long "verbose" <>
help ("Enable verbose mode: verbosity level \"" <> showLevel verboseLevel <> "\"") <>
hideMods hide) <|>
pure defLogLevel
where verboseLevel = LevelDebug
showLevel l =
case l of
LevelDebug -> "debug"
LevelInfo -> "info"
LevelWarn -> "warn"
LevelError -> "error"
LevelOther x -> T.unpack x
parse s =
case s of
"debug" -> LevelDebug
"info" -> LevelInfo
"warn" -> LevelWarn
"error" -> LevelError
_ -> LevelOther (T.pack s)
-- | Parser for the resolver
abstractResolverOptsParser :: Bool -> Parser AbstractResolver
abstractResolverOptsParser hide =
option readAbstractResolver
(long "resolver" <>
metavar "RESOLVER" <>
help "Override resolver in project file" <>
hideMods hide)
readAbstractResolver :: ReadM AbstractResolver
readAbstractResolver = do
s <- readerAsk
case s of
"global" -> return ARGlobal
"nightly" -> return ARLatestNightly
"lts" -> return ARLatestLTS
'l':'t':'s':'-':x | Right (x', "") <- decimal $ T.pack x ->
return $ ARLatestLTSMajor x'
_ ->
case parseResolverText $ T.pack s of
Left e -> readerError $ show e
Right x -> return $ ARResolver x
compilerOptsParser :: Bool -> Parser CompilerVersion
compilerOptsParser hide =
option readCompilerVersion
(long "compiler" <>
metavar "COMPILER" <>
help "Use the specified compiler" <>
hideMods hide)
readCompilerVersion :: ReadM CompilerVersion
readCompilerVersion = do
s <- readerAsk
case parseCompilerVersion (T.pack s) of
Nothing -> readerError $ "Failed to parse compiler: " ++ s
Just x -> return x
-- | GHC variant parser
ghcVariantParser :: Bool -> Parser GHCVariant
ghcVariantParser hide =
option
readGHCVariant
(long "ghc-variant" <> metavar "VARIANT" <>
help
"Specialized GHC variant, e.g. integersimple (implies --no-system-ghc)" <>
hideMods hide
)
where
readGHCVariant = do
s <- readerAsk
case parseGHCVariant s of
Left e -> readerError (show e)
Right v -> return v
-- | Parser for @solverCmd@
solverOptsParser :: Parser Bool
solverOptsParser = boolFlags False
"modify-stack-yaml"
"Automatically modify stack.yaml with the solver's recommendations"
idm
-- | Parser for test arguments.
testOptsParser :: Parser TestOpts
testOptsParser = TestOpts
<$> boolFlags True
"rerun-tests"
"running already successful tests"
idm
<*> fmap (fromMaybe [])
(optional (argsOption(long "test-arguments" <>
metavar "TEST_ARGS" <>
help "Arguments passed in to the test suite program")))
<*> switch (long "coverage" <>
help "Generate a code coverage report")
<*> switch (long "no-run-tests" <>
help "Disable running of tests. (Tests will still be built.)")
-- | Parser for @stack new@.
newOptsParser :: Parser (NewOpts,InitOpts)
newOptsParser = (,) <$> newOpts <*> initOptsParser
where
newOpts =
NewOpts <$>
packageNameArgument
(metavar "PACKAGE_NAME" <> help "A valid package name.") <*>
switch
(long "bare" <>
help "Do not create a subdirectory for the project") <*>
templateNameArgument
(metavar "TEMPLATE_NAME" <>
help "Name of a template or a local template in a subdirectory,\
\ for example: foo or foo.hsfiles" <>
value defaultTemplateName) <*>
fmap
M.fromList
(many
(templateParamArgument
(short 'p' <> long "param" <> metavar "KEY:VALUE" <>
help
"Parameter for the template in the format key:value")))
-- | Parser for @stack hpc report@.
hpcReportOptsParser :: Parser HpcReportOpts
hpcReportOptsParser = HpcReportOpts
<$> many (textArgument $ metavar "TARGET_OR_TIX")
<*> switch (long "all" <> help "Use results from all packages and components")
<*> optional (strOption (long "destdir" <> help "Output directy for HTML report"))
pvpBoundsOption :: Parser PvpBounds
pvpBoundsOption =
option
readPvpBounds
(long "pvp-bounds" <> metavar "PVP-BOUNDS" <>
help
"How PVP version bounds should be added to .cabal file: none, lower, upper, both")
where
readPvpBounds = do
s <- readerAsk
case parsePvpBounds $ T.pack s of
Left e ->
readerError e
Right v ->
return v
configCmdSetParser :: Parser ConfigCmdSet
configCmdSetParser =
fromM
(do field <-
oneM
(strArgument
(metavar "FIELD VALUE"))
oneM (fieldToValParser field))
where
fieldToValParser :: String -> Parser ConfigCmdSet
fieldToValParser s =
case s of
"resolver" ->
ConfigCmdSetResolver <$>
argument
readAbstractResolver
idm
_ ->
error "parse stack config set field: only set resolver is implemented"
-- | If argument is True, hides the option from usage and help
hideMods :: Bool -> Mod f a
hideMods hide = if hide then internal <> hidden else idm
| rubik/stack | src/Stack/Options.hs | bsd-3-clause | 31,439 | 1 | 32 | 11,807 | 5,696 | 2,828 | 2,868 | 709 | 9 |
-- |Part of the General MIDI specs.
module GM
( gmInstrumentByNumber
, gmInstrumentByName
, gmInstrumentList
, gmPercussionByKey
, gmPercussionByName
, gmPercussionList
, gm1ControllerList
) where
import qualified Data.Map as Map
swap :: (a,b) -> (b,a)
swap (x,y) = (y,x)
-- |Map of GM instruments, indexed by number
gmInstrumentByNumber :: Map.Map Int String
gmInstrumentByNumber = Map.fromList gmInstrumentList
-- |Map of GM instruments, indexed by name.
gmInstrumentByName :: Map.Map String Int
gmInstrumentByName = Map.fromList $ map swap gmInstrumentList
-- |Map of GM percussions, indexed by key.
gmPercussionByKey :: Map.Map Int String
gmPercussionByKey = Map.fromList $ gmPercussionList
-- |Map of GM percussions, indexed by name.
gmPercussionByName :: Map.Map String Int
gmPercussionByName = Map.fromList $ map swap gmPercussionList
-- |List of instruments as specified in General MIDI Level 1
gmInstrumentList :: [(Int,String)]
gmInstrumentList =
[
-- (Piano)
( 0 , "Acoustic Grand Piano" )
, ( 1 , "Bright Acoustic Piano" )
, ( 2 , "Electric Grand Piano" )
, ( 3 , "Honky-tonk Piano" )
, ( 4 , "Electric Piano 1" )
, ( 5 , "Electric Piano 2" )
, ( 6 , "Harpsichord" )
, ( 7 , "Clavi" )
-- (Chromatic Percussion)
, ( 8 , "Celesta" )
, ( 9 , "Glockenspiel" )
, ( 10 , "Music Box" )
, ( 11 , "Vibraphone" )
, ( 12 , "Marimba" )
, ( 13 , "Xylophone" )
, ( 14 , "Tubular Bells" )
, ( 15 , "Dulcimer" )
-- (Organs)
, ( 16 , "Drawbar Organ" )
, ( 17 , "Percussive Organ" )
, ( 18 , "Rock Organ" )
, ( 19 , "Church Organ" )
, ( 20 , "Reed Organ" )
, ( 21 , "Accordion" )
, ( 22 , "Harmonica" )
, ( 23 , "Tango Accordion" )
-- (Guitars)
, ( 24 , "Acoustic Guitar (nylon)" )
, ( 25 , "Acoustic Guitar (steel)" )
, ( 26 , "Electric Guitar (jazz)" )
, ( 27 , "Electric Guitar (clean)" )
, ( 28 , "Electric Guitar (muted)" )
, ( 29 , "Overdriven Guitar" )
, ( 30 , "Distortion Guitar" )
, ( 31 , "Guitar harmonics" )
-- (Bass)
, ( 32 , "Acoustic Bass" )
, ( 33 , "Fingered Bass" )
, ( 34 , "Picked Bass" )
, ( 35 , "Fretless Bass" )
, ( 36 , "Slap Bass 1" )
, ( 37 , "Slap Bass 2" )
, ( 38 , "Synth Bass 1" )
, ( 39 , "Synth Bass 2" )
-- (Orchestral)
, ( 40 , "Violin" )
, ( 41 , "Viola" )
, ( 42 , "Cello" )
, ( 43 , "Contrabass" )
, ( 44 , "Tremolo Strings" )
, ( 45 , "Pizzicato Strings" )
, ( 46 , "Orchestral Harp" )
, ( 47 , "Timpani" )
-- (Ensembles)
, ( 48 , "String Ensemble 1" )
, ( 49 , "String Ensemble 2" )
, ( 50 , "SynthStrings 1" )
, ( 51 , "SynthStrings 2" )
, ( 52 , "Choir Aahs" )
, ( 53 , "Voice Oohs" )
, ( 54 , "Synth Voice" )
, ( 55 , "Orchestra Hit" )
-- (Brass)
, ( 56 , "Trumpet" )
, ( 57 , "Trombone" )
, ( 58 , "Tuba" )
, ( 59 , "Muted Trumpet" )
, ( 60 , "French Horn" )
, ( 61 , "Brass Section" )
, ( 62 , "SynthBrass 1" )
, ( 63 , "SynthBrass 2" )
-- (Reeds)
, ( 64 , "Soprano Sax" )
, ( 65 , "Alto Sax" )
, ( 66 , "Tenor Sax" )
, ( 67 , "Baritone Sax" )
, ( 68 , "Oboe" )
, ( 69 , "English Horn" )
, ( 70 , "Bassoon" )
, ( 71 , "Clarinet" )
-- (Pipes)
, ( 72 , "Piccolo" )
, ( 73 , "Flute" )
, ( 74 , "Recorder" )
, ( 75 , "Pan Flute" )
, ( 76 , "Blown Bottle" )
, ( 77 , "Shakuhachi" )
, ( 78 , "Whistle" )
, ( 79 , "Ocarina" )
-- (Synth Leads)
, ( 80 , "Lead 1 (square)" )
, ( 81 , "Lead 2 (sawtooth)" )
, ( 82 , "Lead 3 (calliope)" )
, ( 83 , "Lead 4 (chiff)" )
, ( 84 , "Lead 5 (charang)" )
, ( 85 , "Lead 6 (voice)" )
, ( 86 , "Lead 7 (fifths)" )
, ( 87 , "Lead 8 (bass + lead)" )
-- (Synth Pads)
, ( 88 , "Pad 1 (new age)" )
, ( 89 , "Pad 2 (warm)" )
, ( 90 , "Pad 3 (polysynth)" )
, ( 91 , "Pad 4 (choir)" )
, ( 92 , "Pad 5 (bowed)" )
, ( 93 , "Pad 6 (metallic)" )
, ( 94 , "Pad 7 (halo)" )
, ( 95 , "Pad 8 (sweep)" )
-- (Synth FX)
, ( 96 , "FX 1 (rain)" )
, ( 97 , "FX 2 (soundtrack)" )
, ( 98 , "FX 3 (crystal)" )
, ( 99 , "FX 4 (atmosphere)" )
, ( 100 , "FX 5 (brightness)" )
, ( 101 , "FX 6 (goblins)" )
, ( 102 , "FX 7 (echoes)" )
, ( 103 , "FX 8 (sci-fi)" )
-- (Ethnic)
, ( 104 , "Sitar" )
, ( 105 , "Banjo" )
, ( 106 , "Shamisen" )
, ( 107 , "Koto" )
, ( 108 , "Kalimba" )
, ( 109 , "Bag pipe" )
, ( 110 , "Fiddle" )
, ( 111 , "Shanai" )
-- (Percussive)
, ( 112 , "Tinkle Bell" )
, ( 113 , "Agogo" )
, ( 114 , "Steel Drums" )
, ( 115 , "Woodblock" )
, ( 116 , "Taiko Drum" )
, ( 117 , "Melodic Tom" )
, ( 118 , "Synth Drum" )
, ( 119 , "Reverse Cymbal" )
-- (Sound Effects)
, ( 120 , "Guitar Fret Noise" )
, ( 121 , "Breath Noise" )
, ( 122 , "Seashore" )
, ( 123 , "Bird Tweet" )
, ( 124 , "Telephone Ring" )
, ( 125 , "Helicopter" )
, ( 126 , "Applause" )
, ( 127 , "Gunshot" )
]
-- |List of percussions (channel 10, key\/name pairs) as specified in General MIDI Level 1.
gmPercussionList :: [(Int,String)]
gmPercussionList =
[ ( 35 , "Acoustic Bass Drum" )
, ( 36 , "Bass Drum 1" )
, ( 37 , "Side Stick" )
, ( 38 , "Acoustic Snare" )
, ( 39 , "Hand Clap" )
, ( 40 , "Electric Snare" )
, ( 41 , "Low Floor Tom" )
, ( 42 , "Closed Hi-Hat" )
, ( 43 , "High Floor Tom" )
, ( 44 , "Pedal Hi-Hat" )
, ( 45 , "Low Tom" )
, ( 46 , "Open Hi-Hat" )
, ( 47 , "Low-Mid Tom" )
, ( 48 , "Hi-Mid Tom" )
, ( 49 , "Crash Cymbal 1" )
, ( 50 , "High Tom" )
, ( 51 , "Ride Cymbal 1" )
, ( 52 , "Chinese Cymbal" )
, ( 53 , "Ride Bell" )
, ( 54 , "Tambourine" )
, ( 55 , "Splash Cymbal" )
, ( 56 , "Cowbell" )
, ( 57 , "Crash Cymbal 2" )
, ( 58 , "Vibraslap" )
, ( 59 , "Ride Cymbal 2" )
, ( 60 , "Hi Bongo" )
, ( 61 , "Low Bongo" )
, ( 62 , "Mute Hi Conga" )
, ( 63 , "Open Hi Conga" )
, ( 64 , "Low Conga" )
, ( 65 , "High Timbale" )
, ( 66 , "Low Timbale" )
, ( 67 , "High Agogo" )
, ( 68 , "Low Agogo" )
, ( 69 , "Cabasa" )
, ( 70 , "Maracas" )
, ( 71 , "Short Whistle" )
, ( 72 , "Long Whistle" )
, ( 73 , "Short Guiro" )
, ( 74 , "Long Guiro" )
, ( 75 , "Claves" )
, ( 76 , "Hi Wood Block" )
, ( 77 , "Low Wood Block" )
, ( 78 , "Mute Cuica" )
, ( 79 , "Open Cuica" )
, ( 80 , "Mute Triangle" )
, ( 81 , "Open Triangle" )
]
-- |General MIDI Level 1 specific controllers
gm1ControllerList :: [(Int,String)]
gm1ControllerList =
[ ( 1 , "Modulation" )
, ( 6 , "Data Entry MSB" )
, ( 7 , "Volume" )
, ( 10 , "Pan" )
, ( 11 , "Expression" )
, ( 38 , "Data Entry LSB" )
, ( 64 , "Sustain" )
, ( 100 , "RPN LSB" )
, ( 101 , "RPN MSB" )
, ( 121 , "Reset all controllers" )
, ( 123 , "All notes off" )
]
| sixohsix/hmidi | examples/GM.hs | bsd-3-clause | 6,720 | 0 | 6 | 2,004 | 1,932 | 1,276 | 656 | 212 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Generate haddocks
module Stack.Build.Haddock
( generateLocalHaddockIndex
, generateDepsHaddockIndex
, generateSnapHaddockIndex
, openHaddocksInBrowser
, shouldHaddockPackage
, shouldHaddockDeps
) where
import Stack.Prelude
import qualified Data.Foldable as F
import qualified Data.HashSet as HS
import Data.List.Extra (nubOrd)
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Text as T
import Data.Time (UTCTime)
import Path
import Path.Extra
import Path.IO
import Stack.PackageDump
import Stack.PrettyPrint
import Stack.Types.Build
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.GhcPkgId
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.Runner
import qualified System.FilePath as FP
import System.Process.Read
import Web.Browser (openBrowser)
openHaddocksInBrowser
:: HasRunner env
=> BaseConfigOpts
-> Map PackageName (PackageIdentifier, InstallLocation)
-- ^ Available packages and their locations for the current project
-> Set PackageName
-- ^ Build targets as determined by 'Stack.Build.Source.loadSourceMap'
-> RIO env ()
openHaddocksInBrowser bco pkgLocations buildTargets = do
let cliTargets = (boptsCLITargets . bcoBuildOptsCLI) bco
getDocIndex = do
let localDocs = haddockIndexFile (localDepsDocDir bco)
localExists <- doesFileExist localDocs
if localExists
then return localDocs
else do
let snapDocs = haddockIndexFile (snapDocDir bco)
snapExists <- doesFileExist snapDocs
if snapExists
then return snapDocs
else throwString "No local or snapshot doc index found to open."
docFile <-
case (cliTargets, map (`Map.lookup` pkgLocations) (Set.toList buildTargets)) of
([_], [Just (pkgId, iloc)]) -> do
pkgRelDir <- (parseRelDir . packageIdentifierString) pkgId
let docLocation =
case iloc of
Snap -> snapDocDir bco
Local -> localDocDir bco
let docFile = haddockIndexFile (docLocation </> pkgRelDir)
exists <- doesFileExist docFile
if exists
then return docFile
else do
logWarn $
"Expected to find documentation at " <>
T.pack (toFilePath docFile) <>
", but that file is missing. Opening doc index instead."
getDocIndex
_ -> getDocIndex
prettyInfo $ "Opening" <+> display docFile <+> "in the browser."
_ <- liftIO $ openBrowser (toFilePath docFile)
return ()
-- | Determine whether we should haddock for a package.
shouldHaddockPackage :: BuildOpts
-> Set PackageName -- ^ Packages that we want to generate haddocks for
-- in any case (whether or not we are going to generate
-- haddocks for dependencies)
-> PackageName
-> Bool
shouldHaddockPackage bopts wanted name =
if Set.member name wanted
then boptsHaddock bopts
else shouldHaddockDeps bopts
-- | Determine whether to build haddocks for dependencies.
shouldHaddockDeps :: BuildOpts -> Bool
shouldHaddockDeps bopts = fromMaybe (boptsHaddock bopts) (boptsHaddockDeps bopts)
-- | Generate Haddock index and contents for local packages.
generateLocalHaddockIndex
:: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride
-> WhichCompiler
-> BaseConfigOpts
-> Map GhcPkgId (DumpPackage () () ()) -- ^ Local package dump
-> [LocalPackage]
-> m ()
generateLocalHaddockIndex envOverride wc bco localDumpPkgs locals = do
let dumpPackages =
mapMaybe
(\LocalPackage{lpPackage = Package{..}} ->
F.find
(\dp -> dpPackageIdent dp == PackageIdentifier packageName packageVersion)
localDumpPkgs)
locals
generateHaddockIndex
"local packages"
envOverride
wc
bco
dumpPackages
"."
(localDocDir bco)
-- | Generate Haddock index and contents for local packages and their dependencies.
generateDepsHaddockIndex
:: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride
-> WhichCompiler
-> BaseConfigOpts
-> Map GhcPkgId (DumpPackage () () ()) -- ^ Global dump information
-> Map GhcPkgId (DumpPackage () () ()) -- ^ Snapshot dump information
-> Map GhcPkgId (DumpPackage () () ()) -- ^ Local dump information
-> [LocalPackage]
-> m ()
generateDepsHaddockIndex envOverride wc bco globalDumpPkgs snapshotDumpPkgs localDumpPkgs locals = do
let deps = (mapMaybe (`lookupDumpPackage` allDumpPkgs) . nubOrd . findTransitiveDepends . mapMaybe getGhcPkgId) locals
depDocDir = localDepsDocDir bco
generateHaddockIndex
"local packages and dependencies"
envOverride
wc
bco
deps
".."
depDocDir
where
getGhcPkgId :: LocalPackage -> Maybe GhcPkgId
getGhcPkgId LocalPackage{lpPackage = Package{..}} =
let pkgId = PackageIdentifier packageName packageVersion
mdpPkg = F.find (\dp -> dpPackageIdent dp == pkgId) localDumpPkgs
in fmap dpGhcPkgId mdpPkg
findTransitiveDepends :: [GhcPkgId] -> [GhcPkgId]
findTransitiveDepends = (`go` HS.empty) . HS.fromList
where
go todo checked =
case HS.toList todo of
[] -> HS.toList checked
(ghcPkgId:_) ->
let deps =
case lookupDumpPackage ghcPkgId allDumpPkgs of
Nothing -> HS.empty
Just pkgDP -> HS.fromList (dpDepends pkgDP)
deps' = deps `HS.difference` checked
todo' = HS.delete ghcPkgId (deps' `HS.union` todo)
checked' = HS.insert ghcPkgId checked
in go todo' checked'
allDumpPkgs = [localDumpPkgs, snapshotDumpPkgs, globalDumpPkgs]
-- | Generate Haddock index and contents for all snapshot packages.
generateSnapHaddockIndex
:: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride
-> WhichCompiler
-> BaseConfigOpts
-> Map GhcPkgId (DumpPackage () () ()) -- ^ Global package dump
-> Map GhcPkgId (DumpPackage () () ()) -- ^ Snapshot package dump
-> m ()
generateSnapHaddockIndex envOverride wc bco globalDumpPkgs snapshotDumpPkgs =
generateHaddockIndex
"snapshot packages"
envOverride
wc
bco
(Map.elems snapshotDumpPkgs ++ Map.elems globalDumpPkgs)
"."
(snapDocDir bco)
-- | Generate Haddock index and contents for specified packages.
generateHaddockIndex
:: (MonadUnliftIO m, MonadLogger m)
=> Text
-> EnvOverride
-> WhichCompiler
-> BaseConfigOpts
-> [DumpPackage () () ()]
-> FilePath
-> Path Abs Dir
-> m ()
generateHaddockIndex descr envOverride wc bco dumpPackages docRelFP destDir = do
ensureDir destDir
interfaceOpts <- (liftIO . fmap nubOrd . mapMaybeM toInterfaceOpt) dumpPackages
unless (null interfaceOpts) $ do
let destIndexFile = haddockIndexFile destDir
eindexModTime <- liftIO (tryGetModificationTime destIndexFile)
let needUpdate =
case eindexModTime of
Left _ -> True
Right indexModTime ->
or [mt > indexModTime | (_,mt,_,_) <- interfaceOpts]
if needUpdate
then do
logInfo
(T.concat ["Updating Haddock index for ", descr, " in\n",
T.pack (toFilePath destIndexFile)])
liftIO (mapM_ copyPkgDocs interfaceOpts)
readProcessNull
(Just destDir)
envOverride
(haddockExeName wc)
(map (("--optghc=-package-db=" ++ ) . toFilePathNoTrailingSep)
[bcoSnapDB bco, bcoLocalDB bco] ++
hoAdditionalArgs (boptsHaddockOpts (bcoBuildOpts bco)) ++
["--gen-contents", "--gen-index"] ++
[x | (xs,_,_,_) <- interfaceOpts, x <- xs])
else
logInfo
(T.concat ["Haddock index for ", descr, " already up to date at:\n",
T.pack (toFilePath destIndexFile)])
where
toInterfaceOpt :: DumpPackage a b c -> IO (Maybe ([String], UTCTime, Path Abs File, Path Abs File))
toInterfaceOpt DumpPackage {..} =
case dpHaddockInterfaces of
[] -> return Nothing
srcInterfaceFP:_ -> do
srcInterfaceAbsFile <- parseCollapsedAbsFile srcInterfaceFP
let (PackageIdentifier name _) = dpPackageIdent
destInterfaceRelFP =
docRelFP FP.</>
packageIdentifierString dpPackageIdent FP.</>
(packageNameString name FP.<.> "haddock")
destInterfaceAbsFile <- parseCollapsedAbsFile (toFilePath destDir FP.</> destInterfaceRelFP)
esrcInterfaceModTime <- tryGetModificationTime srcInterfaceAbsFile
return $
case esrcInterfaceModTime of
Left _ -> Nothing
Right srcInterfaceModTime ->
Just
( [ "-i"
, concat
[ docRelFP FP.</> packageIdentifierString dpPackageIdent
, ","
, destInterfaceRelFP ]]
, srcInterfaceModTime
, srcInterfaceAbsFile
, destInterfaceAbsFile )
copyPkgDocs :: (a, UTCTime, Path Abs File, Path Abs File) -> IO ()
copyPkgDocs (_,srcInterfaceModTime,srcInterfaceAbsFile,destInterfaceAbsFile) = do
-- Copy dependencies' haddocks to documentation directory. This way, relative @../$pkg-$ver@
-- links work and it's easy to upload docs to a web server or otherwise view them in a
-- non-local-filesystem context. We copy instead of symlink for two reasons: (1) symlinks
-- aren't reliably supported on Windows, and (2) the filesystem containing dependencies'
-- docs may not be available where viewing the docs (e.g. if building in a Docker
-- container).
edestInterfaceModTime <- tryGetModificationTime destInterfaceAbsFile
case edestInterfaceModTime of
Left _ -> doCopy
Right destInterfaceModTime
| destInterfaceModTime < srcInterfaceModTime -> doCopy
| otherwise -> return ()
where
doCopy = do
ignoringAbsence (removeDirRecur destHtmlAbsDir)
ensureDir destHtmlAbsDir
onException
(copyDirRecur' (parent srcInterfaceAbsFile) destHtmlAbsDir)
(ignoringAbsence (removeDirRecur destHtmlAbsDir))
destHtmlAbsDir = parent destInterfaceAbsFile
-- | Find first DumpPackage matching the GhcPkgId
lookupDumpPackage :: GhcPkgId
-> [Map GhcPkgId (DumpPackage () () ())]
-> Maybe (DumpPackage () () ())
lookupDumpPackage ghcPkgId dumpPkgs =
listToMaybe $ mapMaybe (Map.lookup ghcPkgId) dumpPkgs
-- | Path of haddock index file.
haddockIndexFile :: Path Abs Dir -> Path Abs File
haddockIndexFile destDir = destDir </> $(mkRelFile "index.html")
-- | Path of local packages documentation directory.
localDocDir :: BaseConfigOpts -> Path Abs Dir
localDocDir bco = bcoLocalInstallRoot bco </> docDirSuffix
-- | Path of documentation directory for the dependencies of local packages
localDepsDocDir :: BaseConfigOpts -> Path Abs Dir
localDepsDocDir bco = localDocDir bco </> $(mkRelDir "all")
-- | Path of snapshot packages documentation directory.
snapDocDir :: BaseConfigOpts -> Path Abs Dir
snapDocDir bco = bcoSnapInstallRoot bco </> docDirSuffix
| MichielDerhaeg/stack | src/Stack/Build/Haddock.hs | bsd-3-clause | 13,062 | 0 | 29 | 4,380 | 2,612 | 1,342 | 1,270 | 265 | 6 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Data/ByteString/Short.hs" #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE Trustworthy #-}
-- |
-- Module : Data.ByteString.Short
-- Copyright : (c) Duncan Coutts 2012-2013
-- License : BSD-style
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : ghc only
--
-- A compact representation suitable for storing short byte strings in memory.
--
-- In typical use cases it can be imported alongside "Data.ByteString", e.g.
--
-- > import qualified Data.ByteString as B
-- > import qualified Data.ByteString.Short as B
-- > (ShortByteString, toShort, fromShort)
--
-- Other 'ShortByteString' operations clash with "Data.ByteString" or "Prelude"
-- functions however, so they should be imported @qualified@ with a different
-- alias e.g.
--
-- > import qualified Data.ByteString.Short as B.Short
--
module Data.ByteString.Short (
-- * The @ShortByteString@ type
ShortByteString,
-- ** Memory overhead
-- | With GHC, the memory overheads are as follows, expressed in words and
-- in bytes (words are 4 and 8 bytes on 32 or 64bit machines respectively).
--
-- * 'ByteString' unshared: 9 words; 36 or 72 bytes.
--
-- * 'ByteString' shared substring: 5 words; 20 or 40 bytes.
--
-- * 'ShortByteString': 4 words; 16 or 32 bytes.
--
-- For the string data itself, both 'ShortByteString' and 'ByteString' use
-- one byte per element, rounded up to the nearest word. For example,
-- including the overheads, a length 10 'ShortByteString' would take
-- @16 + 12 = 28@ bytes on a 32bit platform and @32 + 16 = 48@ bytes on a
-- 64bit platform.
--
-- These overheads can all be reduced by 1 word (4 or 8 bytes) when the
-- 'ShortByteString' or 'ByteString' is unpacked into another constructor.
--
-- For example:
--
-- > data ThingId = ThingId {-# UNPACK #-} !Int
-- > {-# UNPACK #-} !ShortByteString
--
-- This will take @1 + 1 + 3@ words (the @ThingId@ constructor +
-- unpacked @Int@ + unpacked @ShortByteString@), plus the words for the
-- string data.
-- ** Heap fragmentation
-- | With GHC, the 'ByteString' representation uses /pinned/ memory,
-- meaning it cannot be moved by the GC. This is usually the right thing to
-- do for larger strings, but for small strings using pinned memory can
-- lead to heap fragmentation which wastes space. The 'ShortByteString'
-- type (and the @Text@ type from the @text@ package) use /unpinned/ memory
-- so they do not contribute to heap fragmentation. In addition, with GHC,
-- small unpinned strings are allocated in the same way as normal heap
-- allocations, rather than in a separate pinned area.
-- * Conversions
toShort,
fromShort,
pack,
unpack,
-- * Other operations
empty, null, length, index,
) where
import Data.ByteString.Short.Internal
import Prelude ()
| phischu/fragnix | tests/packages/scotty/Data.ByteString.Short.hs | bsd-3-clause | 3,063 | 0 | 4 | 763 | 116 | 100 | 16 | 13 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.AutoScaling.SuspendProcesses
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Suspends the specified Auto Scaling processes for the specified Auto Scaling
-- group. To suspend specific processes, use the 'ScalingProcesses' parameter. To
-- suspend all processes, omit the 'ScalingProcesses' parameter.
--
-- Note that if you suspend either the 'Launch' or 'Terminate' process types, it
-- can prevent other process types from functioning properly.
--
-- To resume processes that have been suspended, use 'ResumeProcesses'.
--
-- For more information, see <http://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/US_SuspendResume.html Suspend and Resume Auto Scaling Processes> in the /Auto Scaling Developer Guide/.
--
-- <http://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_SuspendProcesses.html>
module Network.AWS.AutoScaling.SuspendProcesses
(
-- * Request
SuspendProcesses
-- ** Request constructor
, suspendProcesses
-- ** Request lenses
, spAutoScalingGroupName
, spScalingProcesses
-- * Response
, SuspendProcessesResponse
-- ** Response constructor
, suspendProcessesResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.AutoScaling.Types
import qualified GHC.Exts
data SuspendProcesses = SuspendProcesses
{ _spAutoScalingGroupName :: Text
, _spScalingProcesses :: List "member" Text
} deriving (Eq, Ord, Read, Show)
-- | 'SuspendProcesses' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'spAutoScalingGroupName' @::@ 'Text'
--
-- * 'spScalingProcesses' @::@ ['Text']
--
suspendProcesses :: Text -- ^ 'spAutoScalingGroupName'
-> SuspendProcesses
suspendProcesses p1 = SuspendProcesses
{ _spAutoScalingGroupName = p1
, _spScalingProcesses = mempty
}
-- | The name or Amazon Resource Name (ARN) of the Auto Scaling group.
spAutoScalingGroupName :: Lens' SuspendProcesses Text
spAutoScalingGroupName =
lens _spAutoScalingGroupName (\s a -> s { _spAutoScalingGroupName = a })
-- | One or more of the following processes:
--
-- Launch Terminate HealthCheck ReplaceUnhealthy AZRebalance AlarmNotification
-- ScheduledActions AddToLoadBalancer
spScalingProcesses :: Lens' SuspendProcesses [Text]
spScalingProcesses =
lens _spScalingProcesses (\s a -> s { _spScalingProcesses = a })
. _List
data SuspendProcessesResponse = SuspendProcessesResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'SuspendProcessesResponse' constructor.
suspendProcessesResponse :: SuspendProcessesResponse
suspendProcessesResponse = SuspendProcessesResponse
instance ToPath SuspendProcesses where
toPath = const "/"
instance ToQuery SuspendProcesses where
toQuery SuspendProcesses{..} = mconcat
[ "AutoScalingGroupName" =? _spAutoScalingGroupName
, "ScalingProcesses" =? _spScalingProcesses
]
instance ToHeaders SuspendProcesses
instance AWSRequest SuspendProcesses where
type Sv SuspendProcesses = AutoScaling
type Rs SuspendProcesses = SuspendProcessesResponse
request = post "SuspendProcesses"
response = nullResponse SuspendProcessesResponse
| kim/amazonka | amazonka-autoscaling/gen/Network/AWS/AutoScaling/SuspendProcesses.hs | mpl-2.0 | 4,178 | 0 | 10 | 816 | 410 | 254 | 156 | 54 | 1 |
-----------------------------------------------------------------------------
--
-- Object-file symbols (called CLabel for histerical raisins).
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module CLabel (
CLabel, -- abstract type
ForeignLabelSource(..),
pprDebugCLabel,
mkClosureLabel,
mkSRTLabel,
mkTopSRTLabel,
mkInfoTableLabel,
mkEntryLabel,
mkSlowEntryLabel,
mkConEntryLabel,
mkStaticConEntryLabel,
mkRednCountsLabel,
mkConInfoTableLabel,
mkStaticInfoTableLabel,
mkLargeSRTLabel,
mkApEntryLabel,
mkApInfoTableLabel,
mkClosureTableLabel,
mkLocalClosureLabel,
mkLocalInfoTableLabel,
mkLocalEntryLabel,
mkLocalConEntryLabel,
mkLocalStaticConEntryLabel,
mkLocalConInfoTableLabel,
mkLocalStaticInfoTableLabel,
mkLocalClosureTableLabel,
mkReturnPtLabel,
mkReturnInfoLabel,
mkAltLabel,
mkDefaultLabel,
mkBitmapLabel,
mkStringLitLabel,
mkAsmTempLabel,
mkPlainModuleInitLabel,
mkSplitMarkerLabel,
mkDirty_MUT_VAR_Label,
mkUpdInfoLabel,
mkBHUpdInfoLabel,
mkIndStaticInfoLabel,
mkMainCapabilityLabel,
mkMAP_FROZEN_infoLabel,
mkMAP_FROZEN0_infoLabel,
mkMAP_DIRTY_infoLabel,
mkSMAP_FROZEN_infoLabel,
mkSMAP_FROZEN0_infoLabel,
mkSMAP_DIRTY_infoLabel,
mkEMPTY_MVAR_infoLabel,
mkArrWords_infoLabel,
mkTopTickyCtrLabel,
mkCAFBlackHoleInfoTableLabel,
mkCAFBlackHoleEntryLabel,
mkRtsPrimOpLabel,
mkRtsSlowFastTickyCtrLabel,
mkSelectorInfoLabel,
mkSelectorEntryLabel,
mkCmmInfoLabel,
mkCmmEntryLabel,
mkCmmRetInfoLabel,
mkCmmRetLabel,
mkCmmCodeLabel,
mkCmmDataLabel,
mkCmmClosureLabel,
mkRtsApFastLabel,
mkPrimCallLabel,
mkForeignLabel,
addLabelSize,
foreignLabelStdcallInfo,
mkCCLabel, mkCCSLabel,
DynamicLinkerLabelInfo(..),
mkDynamicLinkerLabel,
dynamicLinkerLabelInfo,
mkPicBaseLabel,
mkDeadStripPreventer,
mkHpcTicksLabel,
hasCAF,
needsCDecl, isAsmTemp, maybeAsmTemp, externallyVisibleCLabel,
isMathFun,
isCFunctionLabel, isGcPtrLabel, labelDynamic,
-- * Conversions
toClosureLbl, toSlowEntryLbl, toEntryLbl, toInfoLbl, toRednCountsLbl, hasHaskellName,
pprCLabel
) where
import IdInfo
import BasicTypes
import Packages
import Module
import Name
import Unique
import PrimOp
import Config
import CostCentre
import Outputable
import FastString
import DynFlags
import Platform
import UniqSet
-- -----------------------------------------------------------------------------
-- The CLabel type
{-
| CLabel is an abstract type that supports the following operations:
- Pretty printing
- In a C file, does it need to be declared before use? (i.e. is it
guaranteed to be already in scope in the places we need to refer to it?)
- If it needs to be declared, what type (code or data) should it be
declared to have?
- Is it visible outside this object file or not?
- Is it "dynamic" (see details below)
- Eq and Ord, so that we can make sets of CLabels (currently only
used in outputting C as far as I can tell, to avoid generating
more than one declaration for any given label).
- Converting an info table label into an entry label.
-}
data CLabel
= -- | A label related to the definition of a particular Id or Con in a .hs file.
IdLabel
Name
CafInfo
IdLabelInfo -- encodes the suffix of the label
-- | A label from a .cmm file that is not associated with a .hs level Id.
| CmmLabel
PackageKey -- what package the label belongs to.
FastString -- identifier giving the prefix of the label
CmmLabelInfo -- encodes the suffix of the label
-- | A label with a baked-in \/ algorithmically generated name that definitely
-- comes from the RTS. The code for it must compile into libHSrts.a \/ libHSrts.so
-- If it doesn't have an algorithmically generated name then use a CmmLabel
-- instead and give it an appropriate PackageKey argument.
| RtsLabel
RtsLabelInfo
-- | A 'C' (or otherwise foreign) label.
--
| ForeignLabel
FastString -- name of the imported label.
(Maybe Int) -- possible '@n' suffix for stdcall functions
-- When generating C, the '@n' suffix is omitted, but when
-- generating assembler we must add it to the label.
ForeignLabelSource -- what package the foreign label is in.
FunctionOrData
-- | A family of labels related to a particular case expression.
| CaseLabel
{-# UNPACK #-} !Unique -- Unique says which case expression
CaseLabelInfo
| AsmTempLabel
{-# UNPACK #-} !Unique
| StringLitLabel
{-# UNPACK #-} !Unique
| PlainModuleInitLabel -- without the version & way info
Module
| CC_Label CostCentre
| CCS_Label CostCentreStack
-- | These labels are generated and used inside the NCG only.
-- They are special variants of a label used for dynamic linking
-- see module PositionIndependentCode for details.
| DynamicLinkerLabel DynamicLinkerLabelInfo CLabel
-- | This label is generated and used inside the NCG only.
-- It is used as a base for PIC calculations on some platforms.
-- It takes the form of a local numeric assembler label '1'; and
-- is pretty-printed as 1b, referring to the previous definition
-- of 1: in the assembler source file.
| PicBaseLabel
-- | A label before an info table to prevent excessive dead-stripping on darwin
| DeadStripPreventer CLabel
-- | Per-module table of tick locations
| HpcTicksLabel Module
-- | Static reference table
| SRTLabel !Unique
-- | Label of an StgLargeSRT
| LargeSRTLabel
{-# UNPACK #-} !Unique
-- | A bitmap (function or case return)
| LargeBitmapLabel
{-# UNPACK #-} !Unique
deriving (Eq, Ord)
-- | Record where a foreign label is stored.
data ForeignLabelSource
-- | Label is in a named package
= ForeignLabelInPackage PackageKey
-- | Label is in some external, system package that doesn't also
-- contain compiled Haskell code, and is not associated with any .hi files.
-- We don't have to worry about Haskell code being inlined from
-- external packages. It is safe to treat the RTS package as "external".
| ForeignLabelInExternalPackage
-- | Label is in the package currenly being compiled.
-- This is only used for creating hacky tmp labels during code generation.
-- Don't use it in any code that might be inlined across a package boundary
-- (ie, core code) else the information will be wrong relative to the
-- destination module.
| ForeignLabelInThisPackage
deriving (Eq, Ord)
-- | For debugging problems with the CLabel representation.
-- We can't make a Show instance for CLabel because lots of its components don't have instances.
-- The regular Outputable instance only shows the label name, and not its other info.
--
pprDebugCLabel :: CLabel -> SDoc
pprDebugCLabel lbl
= case lbl of
IdLabel{} -> ppr lbl <> (parens $ text "IdLabel")
CmmLabel pkg _name _info
-> ppr lbl <> (parens $ text "CmmLabel" <+> ppr pkg)
RtsLabel{} -> ppr lbl <> (parens $ text "RtsLabel")
ForeignLabel _name mSuffix src funOrData
-> ppr lbl <> (parens $ text "ForeignLabel"
<+> ppr mSuffix
<+> ppr src
<+> ppr funOrData)
_ -> ppr lbl <> (parens $ text "other CLabel)")
data IdLabelInfo
= Closure -- ^ Label for closure
| SRT -- ^ Static reference table (TODO: could be removed
-- with the old code generator, but might be needed
-- when we implement the New SRT Plan)
| InfoTable -- ^ Info tables for closures; always read-only
| Entry -- ^ Entry point
| Slow -- ^ Slow entry point
| LocalInfoTable -- ^ Like InfoTable but not externally visible
| LocalEntry -- ^ Like Entry but not externally visible
| RednCounts -- ^ Label of place to keep Ticky-ticky info for this Id
| ConEntry -- ^ Constructor entry point
| ConInfoTable -- ^ Corresponding info table
| StaticConEntry -- ^ Static constructor entry point
| StaticInfoTable -- ^ Corresponding info table
| ClosureTable -- ^ Table of closures for Enum tycons
deriving (Eq, Ord)
data CaseLabelInfo
= CaseReturnPt
| CaseReturnInfo
| CaseAlt ConTag
| CaseDefault
deriving (Eq, Ord)
data RtsLabelInfo
= RtsSelectorInfoTable Bool{-updatable-} Int{-offset-} -- ^ Selector thunks
| RtsSelectorEntry Bool{-updatable-} Int{-offset-}
| RtsApInfoTable Bool{-updatable-} Int{-arity-} -- ^ AP thunks
| RtsApEntry Bool{-updatable-} Int{-arity-}
| RtsPrimOp PrimOp
| RtsApFast FastString -- ^ _fast versions of generic apply
| RtsSlowFastTickyCtr String
deriving (Eq, Ord)
-- NOTE: Eq on LitString compares the pointer only, so this isn't
-- a real equality.
-- | What type of Cmm label we're dealing with.
-- Determines the suffix appended to the name when a CLabel.CmmLabel
-- is pretty printed.
data CmmLabelInfo
= CmmInfo -- ^ misc rts info tabless, suffix _info
| CmmEntry -- ^ misc rts entry points, suffix _entry
| CmmRetInfo -- ^ misc rts ret info tables, suffix _info
| CmmRet -- ^ misc rts return points, suffix _ret
| CmmData -- ^ misc rts data bits, eg CHARLIKE_closure
| CmmCode -- ^ misc rts code
| CmmClosure -- ^ closures eg CHARLIKE_closure
| CmmPrimCall -- ^ a prim call to some hand written Cmm code
deriving (Eq, Ord)
data DynamicLinkerLabelInfo
= CodeStub -- MachO: Lfoo$stub, ELF: foo@plt
| SymbolPtr -- MachO: Lfoo$non_lazy_ptr, Windows: __imp_foo
| GotSymbolPtr -- ELF: foo@got
| GotSymbolOffset -- ELF: foo@gotoff
deriving (Eq, Ord)
-- -----------------------------------------------------------------------------
-- Constructing CLabels
-- -----------------------------------------------------------------------------
-- Constructing IdLabels
-- These are always local:
mkSlowEntryLabel :: Name -> CafInfo -> CLabel
mkSlowEntryLabel name c = IdLabel name c Slow
mkTopSRTLabel :: Unique -> CLabel
mkTopSRTLabel u = SRTLabel u
mkSRTLabel :: Name -> CafInfo -> CLabel
mkRednCountsLabel :: Name -> CLabel
mkSRTLabel name c = IdLabel name c SRT
mkRednCountsLabel name =
IdLabel name NoCafRefs RednCounts -- Note [ticky for LNE]
-- These have local & (possibly) external variants:
mkLocalClosureLabel :: Name -> CafInfo -> CLabel
mkLocalInfoTableLabel :: Name -> CafInfo -> CLabel
mkLocalEntryLabel :: Name -> CafInfo -> CLabel
mkLocalClosureTableLabel :: Name -> CafInfo -> CLabel
mkLocalClosureLabel name c = IdLabel name c Closure
mkLocalInfoTableLabel name c = IdLabel name c LocalInfoTable
mkLocalEntryLabel name c = IdLabel name c LocalEntry
mkLocalClosureTableLabel name c = IdLabel name c ClosureTable
mkClosureLabel :: Name -> CafInfo -> CLabel
mkInfoTableLabel :: Name -> CafInfo -> CLabel
mkEntryLabel :: Name -> CafInfo -> CLabel
mkClosureTableLabel :: Name -> CafInfo -> CLabel
mkLocalConInfoTableLabel :: CafInfo -> Name -> CLabel
mkLocalConEntryLabel :: CafInfo -> Name -> CLabel
mkLocalStaticInfoTableLabel :: CafInfo -> Name -> CLabel
mkLocalStaticConEntryLabel :: CafInfo -> Name -> CLabel
mkConInfoTableLabel :: Name -> CafInfo -> CLabel
mkStaticInfoTableLabel :: Name -> CafInfo -> CLabel
mkClosureLabel name c = IdLabel name c Closure
mkInfoTableLabel name c = IdLabel name c InfoTable
mkEntryLabel name c = IdLabel name c Entry
mkClosureTableLabel name c = IdLabel name c ClosureTable
mkLocalConInfoTableLabel c con = IdLabel con c ConInfoTable
mkLocalConEntryLabel c con = IdLabel con c ConEntry
mkLocalStaticInfoTableLabel c con = IdLabel con c StaticInfoTable
mkLocalStaticConEntryLabel c con = IdLabel con c StaticConEntry
mkConInfoTableLabel name c = IdLabel name c ConInfoTable
mkStaticInfoTableLabel name c = IdLabel name c StaticInfoTable
mkConEntryLabel :: Name -> CafInfo -> CLabel
mkStaticConEntryLabel :: Name -> CafInfo -> CLabel
mkConEntryLabel name c = IdLabel name c ConEntry
mkStaticConEntryLabel name c = IdLabel name c StaticConEntry
-- Constructing Cmm Labels
mkDirty_MUT_VAR_Label, mkSplitMarkerLabel, mkUpdInfoLabel,
mkBHUpdInfoLabel, mkIndStaticInfoLabel, mkMainCapabilityLabel,
mkMAP_FROZEN_infoLabel, mkMAP_FROZEN0_infoLabel, mkMAP_DIRTY_infoLabel,
mkEMPTY_MVAR_infoLabel, mkTopTickyCtrLabel,
mkCAFBlackHoleInfoTableLabel, mkCAFBlackHoleEntryLabel,
mkArrWords_infoLabel, mkSMAP_FROZEN_infoLabel, mkSMAP_FROZEN0_infoLabel,
mkSMAP_DIRTY_infoLabel :: CLabel
mkDirty_MUT_VAR_Label = mkForeignLabel (fsLit "dirty_MUT_VAR") Nothing ForeignLabelInExternalPackage IsFunction
mkSplitMarkerLabel = CmmLabel rtsPackageKey (fsLit "__stg_split_marker") CmmCode
mkUpdInfoLabel = CmmLabel rtsPackageKey (fsLit "stg_upd_frame") CmmInfo
mkBHUpdInfoLabel = CmmLabel rtsPackageKey (fsLit "stg_bh_upd_frame" ) CmmInfo
mkIndStaticInfoLabel = CmmLabel rtsPackageKey (fsLit "stg_IND_STATIC") CmmInfo
mkMainCapabilityLabel = CmmLabel rtsPackageKey (fsLit "MainCapability") CmmData
mkMAP_FROZEN_infoLabel = CmmLabel rtsPackageKey (fsLit "stg_MUT_ARR_PTRS_FROZEN") CmmInfo
mkMAP_FROZEN0_infoLabel = CmmLabel rtsPackageKey (fsLit "stg_MUT_ARR_PTRS_FROZEN0") CmmInfo
mkMAP_DIRTY_infoLabel = CmmLabel rtsPackageKey (fsLit "stg_MUT_ARR_PTRS_DIRTY") CmmInfo
mkEMPTY_MVAR_infoLabel = CmmLabel rtsPackageKey (fsLit "stg_EMPTY_MVAR") CmmInfo
mkTopTickyCtrLabel = CmmLabel rtsPackageKey (fsLit "top_ct") CmmData
mkCAFBlackHoleInfoTableLabel = CmmLabel rtsPackageKey (fsLit "stg_CAF_BLACKHOLE") CmmInfo
mkCAFBlackHoleEntryLabel = CmmLabel rtsPackageKey (fsLit "stg_CAF_BLACKHOLE") CmmEntry
mkArrWords_infoLabel = CmmLabel rtsPackageKey (fsLit "stg_ARR_WORDS") CmmInfo
mkSMAP_FROZEN_infoLabel = CmmLabel rtsPackageKey (fsLit "stg_SMALL_MUT_ARR_PTRS_FROZEN") CmmInfo
mkSMAP_FROZEN0_infoLabel = CmmLabel rtsPackageKey (fsLit "stg_SMALL_MUT_ARR_PTRS_FROZEN0") CmmInfo
mkSMAP_DIRTY_infoLabel = CmmLabel rtsPackageKey (fsLit "stg_SMALL_MUT_ARR_PTRS_DIRTY") CmmInfo
-----
mkCmmInfoLabel, mkCmmEntryLabel, mkCmmRetInfoLabel, mkCmmRetLabel,
mkCmmCodeLabel, mkCmmDataLabel, mkCmmClosureLabel
:: PackageKey -> FastString -> CLabel
mkCmmInfoLabel pkg str = CmmLabel pkg str CmmInfo
mkCmmEntryLabel pkg str = CmmLabel pkg str CmmEntry
mkCmmRetInfoLabel pkg str = CmmLabel pkg str CmmRetInfo
mkCmmRetLabel pkg str = CmmLabel pkg str CmmRet
mkCmmCodeLabel pkg str = CmmLabel pkg str CmmCode
mkCmmDataLabel pkg str = CmmLabel pkg str CmmData
mkCmmClosureLabel pkg str = CmmLabel pkg str CmmClosure
-- Constructing RtsLabels
mkRtsPrimOpLabel :: PrimOp -> CLabel
mkRtsPrimOpLabel primop = RtsLabel (RtsPrimOp primop)
mkSelectorInfoLabel :: Bool -> Int -> CLabel
mkSelectorEntryLabel :: Bool -> Int -> CLabel
mkSelectorInfoLabel upd off = RtsLabel (RtsSelectorInfoTable upd off)
mkSelectorEntryLabel upd off = RtsLabel (RtsSelectorEntry upd off)
mkApInfoTableLabel :: Bool -> Int -> CLabel
mkApEntryLabel :: Bool -> Int -> CLabel
mkApInfoTableLabel upd off = RtsLabel (RtsApInfoTable upd off)
mkApEntryLabel upd off = RtsLabel (RtsApEntry upd off)
-- A call to some primitive hand written Cmm code
mkPrimCallLabel :: PrimCall -> CLabel
mkPrimCallLabel (PrimCall str pkg)
= CmmLabel pkg str CmmPrimCall
-- Constructing ForeignLabels
-- | Make a foreign label
mkForeignLabel
:: FastString -- name
-> Maybe Int -- size prefix
-> ForeignLabelSource -- what package it's in
-> FunctionOrData
-> CLabel
mkForeignLabel str mb_sz src fod
= ForeignLabel str mb_sz src fod
-- | Update the label size field in a ForeignLabel
addLabelSize :: CLabel -> Int -> CLabel
addLabelSize (ForeignLabel str _ src fod) sz
= ForeignLabel str (Just sz) src fod
addLabelSize label _
= label
-- | Get the label size field from a ForeignLabel
foreignLabelStdcallInfo :: CLabel -> Maybe Int
foreignLabelStdcallInfo (ForeignLabel _ info _ _) = info
foreignLabelStdcallInfo _lbl = Nothing
-- Constructing Large*Labels
mkLargeSRTLabel :: Unique -> CLabel
mkBitmapLabel :: Unique -> CLabel
mkLargeSRTLabel uniq = LargeSRTLabel uniq
mkBitmapLabel uniq = LargeBitmapLabel uniq
-- Constructin CaseLabels
mkReturnPtLabel :: Unique -> CLabel
mkReturnInfoLabel :: Unique -> CLabel
mkAltLabel :: Unique -> ConTag -> CLabel
mkDefaultLabel :: Unique -> CLabel
mkReturnPtLabel uniq = CaseLabel uniq CaseReturnPt
mkReturnInfoLabel uniq = CaseLabel uniq CaseReturnInfo
mkAltLabel uniq tag = CaseLabel uniq (CaseAlt tag)
mkDefaultLabel uniq = CaseLabel uniq CaseDefault
-- Constructing Cost Center Labels
mkCCLabel :: CostCentre -> CLabel
mkCCSLabel :: CostCentreStack -> CLabel
mkCCLabel cc = CC_Label cc
mkCCSLabel ccs = CCS_Label ccs
mkRtsApFastLabel :: FastString -> CLabel
mkRtsApFastLabel str = RtsLabel (RtsApFast str)
mkRtsSlowFastTickyCtrLabel :: String -> CLabel
mkRtsSlowFastTickyCtrLabel pat = RtsLabel (RtsSlowFastTickyCtr pat)
-- Constructing Code Coverage Labels
mkHpcTicksLabel :: Module -> CLabel
mkHpcTicksLabel = HpcTicksLabel
-- Constructing labels used for dynamic linking
mkDynamicLinkerLabel :: DynamicLinkerLabelInfo -> CLabel -> CLabel
mkDynamicLinkerLabel = DynamicLinkerLabel
dynamicLinkerLabelInfo :: CLabel -> Maybe (DynamicLinkerLabelInfo, CLabel)
dynamicLinkerLabelInfo (DynamicLinkerLabel info lbl) = Just (info, lbl)
dynamicLinkerLabelInfo _ = Nothing
mkPicBaseLabel :: CLabel
mkPicBaseLabel = PicBaseLabel
-- Constructing miscellaneous other labels
mkDeadStripPreventer :: CLabel -> CLabel
mkDeadStripPreventer lbl = DeadStripPreventer lbl
mkStringLitLabel :: Unique -> CLabel
mkStringLitLabel = StringLitLabel
mkAsmTempLabel :: Uniquable a => a -> CLabel
mkAsmTempLabel a = AsmTempLabel (getUnique a)
mkPlainModuleInitLabel :: Module -> CLabel
mkPlainModuleInitLabel mod = PlainModuleInitLabel mod
-- -----------------------------------------------------------------------------
-- Convert between different kinds of label
toClosureLbl :: CLabel -> CLabel
toClosureLbl (IdLabel n c _) = IdLabel n c Closure
toClosureLbl (CmmLabel m str _) = CmmLabel m str CmmClosure
toClosureLbl l = pprPanic "toClosureLbl" (ppr l)
toSlowEntryLbl :: CLabel -> CLabel
toSlowEntryLbl (IdLabel n c _) = IdLabel n c Slow
toSlowEntryLbl l = pprPanic "toSlowEntryLbl" (ppr l)
toEntryLbl :: CLabel -> CLabel
toEntryLbl (IdLabel n c LocalInfoTable) = IdLabel n c LocalEntry
toEntryLbl (IdLabel n c ConInfoTable) = IdLabel n c ConEntry
toEntryLbl (IdLabel n c StaticInfoTable) = IdLabel n c StaticConEntry
toEntryLbl (IdLabel n c _) = IdLabel n c Entry
toEntryLbl (CaseLabel n CaseReturnInfo) = CaseLabel n CaseReturnPt
toEntryLbl (CmmLabel m str CmmInfo) = CmmLabel m str CmmEntry
toEntryLbl (CmmLabel m str CmmRetInfo) = CmmLabel m str CmmRet
toEntryLbl l = pprPanic "toEntryLbl" (ppr l)
toInfoLbl :: CLabel -> CLabel
toInfoLbl (IdLabel n c Entry) = IdLabel n c InfoTable
toInfoLbl (IdLabel n c LocalEntry) = IdLabel n c LocalInfoTable
toInfoLbl (IdLabel n c ConEntry) = IdLabel n c ConInfoTable
toInfoLbl (IdLabel n c StaticConEntry) = IdLabel n c StaticInfoTable
toInfoLbl (IdLabel n c _) = IdLabel n c InfoTable
toInfoLbl (CaseLabel n CaseReturnPt) = CaseLabel n CaseReturnInfo
toInfoLbl (CmmLabel m str CmmEntry) = CmmLabel m str CmmInfo
toInfoLbl (CmmLabel m str CmmRet) = CmmLabel m str CmmRetInfo
toInfoLbl l = pprPanic "CLabel.toInfoLbl" (ppr l)
toRednCountsLbl :: CLabel -> Maybe CLabel
toRednCountsLbl = fmap mkRednCountsLabel . hasHaskellName
hasHaskellName :: CLabel -> Maybe Name
hasHaskellName (IdLabel n _ _) = Just n
hasHaskellName _ = Nothing
-- -----------------------------------------------------------------------------
-- Does a CLabel's referent itself refer to a CAF?
hasCAF :: CLabel -> Bool
hasCAF (IdLabel _ _ RednCounts) = False -- Note [ticky for LNE]
hasCAF (IdLabel _ MayHaveCafRefs _) = True
hasCAF _ = False
-- Note [ticky for LNE]
-- ~~~~~~~~~~~~~~~~~~~~~
-- Until 14 Feb 2013, every ticky counter was associated with a
-- closure. Thus, ticky labels used IdLabel. It is odd that
-- CmmBuildInfoTables.cafTransfers would consider such a ticky label
-- reason to add the name to the CAFEnv (and thus eventually the SRT),
-- but it was harmless because the ticky was only used if the closure
-- was also.
--
-- Since we now have ticky counters for LNEs, it is no longer the case
-- that every ticky counter has an actual closure. So I changed the
-- generation of ticky counters' CLabels to not result in their
-- associated id ending up in the SRT.
--
-- NB IdLabel is still appropriate for ticky ids (as opposed to
-- CmmLabel) because the LNE's counter is still related to an .hs Id,
-- that Id just isn't for a proper closure.
-- -----------------------------------------------------------------------------
-- Does a CLabel need declaring before use or not?
--
-- See wiki:Commentary/Compiler/Backends/PprC#Prototypes
needsCDecl :: CLabel -> Bool
-- False <=> it's pre-declared; don't bother
-- don't bother declaring Bitmap labels, we always make sure
-- they are defined before use.
needsCDecl (SRTLabel _) = True
needsCDecl (LargeSRTLabel _) = False
needsCDecl (LargeBitmapLabel _) = False
needsCDecl (IdLabel _ _ _) = True
needsCDecl (CaseLabel _ _) = True
needsCDecl (PlainModuleInitLabel _) = True
needsCDecl (StringLitLabel _) = False
needsCDecl (AsmTempLabel _) = False
needsCDecl (RtsLabel _) = False
needsCDecl (CmmLabel pkgId _ _)
-- Prototypes for labels defined in the runtime system are imported
-- into HC files via includes/Stg.h.
| pkgId == rtsPackageKey = False
-- For other labels we inline one into the HC file directly.
| otherwise = True
needsCDecl l@(ForeignLabel{}) = not (isMathFun l)
needsCDecl (CC_Label _) = True
needsCDecl (CCS_Label _) = True
needsCDecl (HpcTicksLabel _) = True
needsCDecl (DynamicLinkerLabel {}) = panic "needsCDecl DynamicLinkerLabel"
needsCDecl PicBaseLabel = panic "needsCDecl PicBaseLabel"
needsCDecl (DeadStripPreventer {}) = panic "needsCDecl DeadStripPreventer"
-- | Check whether a label is a local temporary for native code generation
isAsmTemp :: CLabel -> Bool
isAsmTemp (AsmTempLabel _) = True
isAsmTemp _ = False
-- | If a label is a local temporary used for native code generation
-- then return just its unique, otherwise nothing.
maybeAsmTemp :: CLabel -> Maybe Unique
maybeAsmTemp (AsmTempLabel uq) = Just uq
maybeAsmTemp _ = Nothing
-- | Check whether a label corresponds to a C function that has
-- a prototype in a system header somehere, or is built-in
-- to the C compiler. For these labels we avoid generating our
-- own C prototypes.
isMathFun :: CLabel -> Bool
isMathFun (ForeignLabel fs _ _ _) = fs `elementOfUniqSet` math_funs
isMathFun _ = False
math_funs :: UniqSet FastString
math_funs = mkUniqSet [
-- _ISOC99_SOURCE
(fsLit "acos"), (fsLit "acosf"), (fsLit "acosh"),
(fsLit "acoshf"), (fsLit "acoshl"), (fsLit "acosl"),
(fsLit "asin"), (fsLit "asinf"), (fsLit "asinl"),
(fsLit "asinh"), (fsLit "asinhf"), (fsLit "asinhl"),
(fsLit "atan"), (fsLit "atanf"), (fsLit "atanl"),
(fsLit "atan2"), (fsLit "atan2f"), (fsLit "atan2l"),
(fsLit "atanh"), (fsLit "atanhf"), (fsLit "atanhl"),
(fsLit "cbrt"), (fsLit "cbrtf"), (fsLit "cbrtl"),
(fsLit "ceil"), (fsLit "ceilf"), (fsLit "ceill"),
(fsLit "copysign"), (fsLit "copysignf"), (fsLit "copysignl"),
(fsLit "cos"), (fsLit "cosf"), (fsLit "cosl"),
(fsLit "cosh"), (fsLit "coshf"), (fsLit "coshl"),
(fsLit "erf"), (fsLit "erff"), (fsLit "erfl"),
(fsLit "erfc"), (fsLit "erfcf"), (fsLit "erfcl"),
(fsLit "exp"), (fsLit "expf"), (fsLit "expl"),
(fsLit "exp2"), (fsLit "exp2f"), (fsLit "exp2l"),
(fsLit "expm1"), (fsLit "expm1f"), (fsLit "expm1l"),
(fsLit "fabs"), (fsLit "fabsf"), (fsLit "fabsl"),
(fsLit "fdim"), (fsLit "fdimf"), (fsLit "fdiml"),
(fsLit "floor"), (fsLit "floorf"), (fsLit "floorl"),
(fsLit "fma"), (fsLit "fmaf"), (fsLit "fmal"),
(fsLit "fmax"), (fsLit "fmaxf"), (fsLit "fmaxl"),
(fsLit "fmin"), (fsLit "fminf"), (fsLit "fminl"),
(fsLit "fmod"), (fsLit "fmodf"), (fsLit "fmodl"),
(fsLit "frexp"), (fsLit "frexpf"), (fsLit "frexpl"),
(fsLit "hypot"), (fsLit "hypotf"), (fsLit "hypotl"),
(fsLit "ilogb"), (fsLit "ilogbf"), (fsLit "ilogbl"),
(fsLit "ldexp"), (fsLit "ldexpf"), (fsLit "ldexpl"),
(fsLit "lgamma"), (fsLit "lgammaf"), (fsLit "lgammal"),
(fsLit "llrint"), (fsLit "llrintf"), (fsLit "llrintl"),
(fsLit "llround"), (fsLit "llroundf"), (fsLit "llroundl"),
(fsLit "log"), (fsLit "logf"), (fsLit "logl"),
(fsLit "log10l"), (fsLit "log10"), (fsLit "log10f"),
(fsLit "log1pl"), (fsLit "log1p"), (fsLit "log1pf"),
(fsLit "log2"), (fsLit "log2f"), (fsLit "log2l"),
(fsLit "logb"), (fsLit "logbf"), (fsLit "logbl"),
(fsLit "lrint"), (fsLit "lrintf"), (fsLit "lrintl"),
(fsLit "lround"), (fsLit "lroundf"), (fsLit "lroundl"),
(fsLit "modf"), (fsLit "modff"), (fsLit "modfl"),
(fsLit "nan"), (fsLit "nanf"), (fsLit "nanl"),
(fsLit "nearbyint"), (fsLit "nearbyintf"), (fsLit "nearbyintl"),
(fsLit "nextafter"), (fsLit "nextafterf"), (fsLit "nextafterl"),
(fsLit "nexttoward"), (fsLit "nexttowardf"), (fsLit "nexttowardl"),
(fsLit "pow"), (fsLit "powf"), (fsLit "powl"),
(fsLit "remainder"), (fsLit "remainderf"), (fsLit "remainderl"),
(fsLit "remquo"), (fsLit "remquof"), (fsLit "remquol"),
(fsLit "rint"), (fsLit "rintf"), (fsLit "rintl"),
(fsLit "round"), (fsLit "roundf"), (fsLit "roundl"),
(fsLit "scalbln"), (fsLit "scalblnf"), (fsLit "scalblnl"),
(fsLit "scalbn"), (fsLit "scalbnf"), (fsLit "scalbnl"),
(fsLit "sin"), (fsLit "sinf"), (fsLit "sinl"),
(fsLit "sinh"), (fsLit "sinhf"), (fsLit "sinhl"),
(fsLit "sqrt"), (fsLit "sqrtf"), (fsLit "sqrtl"),
(fsLit "tan"), (fsLit "tanf"), (fsLit "tanl"),
(fsLit "tanh"), (fsLit "tanhf"), (fsLit "tanhl"),
(fsLit "tgamma"), (fsLit "tgammaf"), (fsLit "tgammal"),
(fsLit "trunc"), (fsLit "truncf"), (fsLit "truncl"),
-- ISO C 99 also defines these function-like macros in math.h:
-- fpclassify, isfinite, isinf, isnormal, signbit, isgreater,
-- isgreaterequal, isless, islessequal, islessgreater, isunordered
-- additional symbols from _BSD_SOURCE
(fsLit "drem"), (fsLit "dremf"), (fsLit "dreml"),
(fsLit "finite"), (fsLit "finitef"), (fsLit "finitel"),
(fsLit "gamma"), (fsLit "gammaf"), (fsLit "gammal"),
(fsLit "isinf"), (fsLit "isinff"), (fsLit "isinfl"),
(fsLit "isnan"), (fsLit "isnanf"), (fsLit "isnanl"),
(fsLit "j0"), (fsLit "j0f"), (fsLit "j0l"),
(fsLit "j1"), (fsLit "j1f"), (fsLit "j1l"),
(fsLit "jn"), (fsLit "jnf"), (fsLit "jnl"),
(fsLit "lgamma_r"), (fsLit "lgammaf_r"), (fsLit "lgammal_r"),
(fsLit "scalb"), (fsLit "scalbf"), (fsLit "scalbl"),
(fsLit "significand"), (fsLit "significandf"), (fsLit "significandl"),
(fsLit "y0"), (fsLit "y0f"), (fsLit "y0l"),
(fsLit "y1"), (fsLit "y1f"), (fsLit "y1l"),
(fsLit "yn"), (fsLit "ynf"), (fsLit "ynl")
]
-- -----------------------------------------------------------------------------
-- | Is a CLabel visible outside this object file or not?
-- From the point of view of the code generator, a name is
-- externally visible if it has to be declared as exported
-- in the .o file's symbol table; that is, made non-static.
externallyVisibleCLabel :: CLabel -> Bool -- not C "static"
externallyVisibleCLabel (CaseLabel _ _) = False
externallyVisibleCLabel (StringLitLabel _) = False
externallyVisibleCLabel (AsmTempLabel _) = False
externallyVisibleCLabel (PlainModuleInitLabel _)= True
externallyVisibleCLabel (RtsLabel _) = True
externallyVisibleCLabel (CmmLabel _ _ _) = True
externallyVisibleCLabel (ForeignLabel{}) = True
externallyVisibleCLabel (IdLabel name _ info) = isExternalName name && externallyVisibleIdLabel info
externallyVisibleCLabel (CC_Label _) = True
externallyVisibleCLabel (CCS_Label _) = True
externallyVisibleCLabel (DynamicLinkerLabel _ _) = False
externallyVisibleCLabel (HpcTicksLabel _) = True
externallyVisibleCLabel (LargeBitmapLabel _) = False
externallyVisibleCLabel (SRTLabel _) = False
externallyVisibleCLabel (LargeSRTLabel _) = False
externallyVisibleCLabel (PicBaseLabel {}) = panic "externallyVisibleCLabel PicBaseLabel"
externallyVisibleCLabel (DeadStripPreventer {}) = panic "externallyVisibleCLabel DeadStripPreventer"
externallyVisibleIdLabel :: IdLabelInfo -> Bool
externallyVisibleIdLabel SRT = False
externallyVisibleIdLabel LocalInfoTable = False
externallyVisibleIdLabel LocalEntry = False
externallyVisibleIdLabel _ = True
-- -----------------------------------------------------------------------------
-- Finding the "type" of a CLabel
-- For generating correct types in label declarations:
data CLabelType
= CodeLabel -- Address of some executable instructions
| DataLabel -- Address of data, not a GC ptr
| GcPtrLabel -- Address of a (presumably static) GC object
isCFunctionLabel :: CLabel -> Bool
isCFunctionLabel lbl = case labelType lbl of
CodeLabel -> True
_other -> False
isGcPtrLabel :: CLabel -> Bool
isGcPtrLabel lbl = case labelType lbl of
GcPtrLabel -> True
_other -> False
-- | Work out the general type of data at the address of this label
-- whether it be code, data, or static GC object.
labelType :: CLabel -> CLabelType
labelType (CmmLabel _ _ CmmData) = DataLabel
labelType (CmmLabel _ _ CmmClosure) = GcPtrLabel
labelType (CmmLabel _ _ CmmCode) = CodeLabel
labelType (CmmLabel _ _ CmmInfo) = DataLabel
labelType (CmmLabel _ _ CmmEntry) = CodeLabel
labelType (CmmLabel _ _ CmmRetInfo) = DataLabel
labelType (CmmLabel _ _ CmmRet) = CodeLabel
labelType (RtsLabel (RtsSelectorInfoTable _ _)) = DataLabel
labelType (RtsLabel (RtsApInfoTable _ _)) = DataLabel
labelType (RtsLabel (RtsApFast _)) = CodeLabel
labelType (CaseLabel _ CaseReturnInfo) = DataLabel
labelType (CaseLabel _ _) = CodeLabel
labelType (PlainModuleInitLabel _) = CodeLabel
labelType (SRTLabel _) = DataLabel
labelType (LargeSRTLabel _) = DataLabel
labelType (LargeBitmapLabel _) = DataLabel
labelType (ForeignLabel _ _ _ IsFunction) = CodeLabel
labelType (IdLabel _ _ info) = idInfoLabelType info
labelType _ = DataLabel
idInfoLabelType :: IdLabelInfo -> CLabelType
idInfoLabelType info =
case info of
InfoTable -> DataLabel
LocalInfoTable -> DataLabel
Closure -> GcPtrLabel
ConInfoTable -> DataLabel
StaticInfoTable -> DataLabel
ClosureTable -> DataLabel
RednCounts -> DataLabel
_ -> CodeLabel
-- -----------------------------------------------------------------------------
-- Does a CLabel need dynamic linkage?
-- When referring to data in code, we need to know whether
-- that data resides in a DLL or not. [Win32 only.]
-- @labelDynamic@ returns @True@ if the label is located
-- in a DLL, be it a data reference or not.
labelDynamic :: DynFlags -> PackageKey -> Module -> CLabel -> Bool
labelDynamic dflags this_pkg this_mod lbl =
case lbl of
-- is the RTS in a DLL or not?
RtsLabel _ -> not (gopt Opt_Static dflags) && (this_pkg /= rtsPackageKey)
IdLabel n _ _ -> isDllName dflags this_pkg this_mod n
-- When compiling in the "dyn" way, each package is to be linked into
-- its own shared library.
CmmLabel pkg _ _
| os == OSMinGW32 ->
not (gopt Opt_Static dflags) && (this_pkg /= pkg)
| otherwise ->
True
ForeignLabel _ _ source _ ->
if os == OSMinGW32
then case source of
-- Foreign label is in some un-named foreign package (or DLL).
ForeignLabelInExternalPackage -> True
-- Foreign label is linked into the same package as the
-- source file currently being compiled.
ForeignLabelInThisPackage -> False
-- Foreign label is in some named package.
-- When compiling in the "dyn" way, each package is to be
-- linked into its own DLL.
ForeignLabelInPackage pkgId ->
(not (gopt Opt_Static dflags)) && (this_pkg /= pkgId)
else -- On Mac OS X and on ELF platforms, false positives are OK,
-- so we claim that all foreign imports come from dynamic
-- libraries
True
PlainModuleInitLabel m -> not (gopt Opt_Static dflags) && this_pkg /= (modulePackageKey m)
HpcTicksLabel m -> not (gopt Opt_Static dflags) && this_pkg /= (modulePackageKey m)
-- Note that DynamicLinkerLabels do NOT require dynamic linking themselves.
_ -> False
where os = platformOS (targetPlatform dflags)
{-
OLD?: These GRAN functions are needed for spitting out GRAN_FETCH() at the
right places. It is used to detect when the abstractC statement of an
CCodeBlock actually contains the code for a slow entry point. -- HWL
We need at least @Eq@ for @CLabels@, because we want to avoid
duplicate declarations in generating C (see @labelSeenTE@ in
@PprAbsC@).
-}
-----------------------------------------------------------------------------
-- Printing out CLabels.
{-
Convention:
<name>_<type>
where <name> is <Module>_<name> for external names and <unique> for
internal names. <type> is one of the following:
info Info table
srt Static reference table
srtd Static reference table descriptor
entry Entry code (function, closure)
slow Slow entry code (if any)
ret Direct return address
vtbl Vector table
<n>_alt Case alternative (tag n)
dflt Default case alternative
btm Large bitmap vector
closure Static closure
con_entry Dynamic Constructor entry code
con_info Dynamic Constructor info table
static_entry Static Constructor entry code
static_info Static Constructor info table
sel_info Selector info table
sel_entry Selector entry code
cc Cost centre
ccs Cost centre stack
Many of these distinctions are only for documentation reasons. For
example, _ret is only distinguished from _entry to make it easy to
tell whether a code fragment is a return point or a closure/function
entry.
Note [Closure and info labels]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For a function 'foo, we have:
foo_info : Points to the info table describing foo's closure
(and entry code for foo with tables next to code)
foo_closure : Static (no-free-var) closure only:
points to the statically-allocated closure
For a data constructor (such as Just or Nothing), we have:
Just_con_info: Info table for the data constructor itself
the first word of a heap-allocated Just
Just_info: Info table for the *worker function*, an
ordinary Haskell function of arity 1 that
allocates a (Just x) box:
Just = \x -> Just x
Just_closure: The closure for this worker
Nothing_closure: a statically allocated closure for Nothing
Nothing_static_info: info table for Nothing_closure
All these must be exported symbol, EXCEPT Just_info. We don't need to
export this because in other modules we either have
* A reference to 'Just'; use Just_closure
* A saturated call 'Just x'; allocate using Just_con_info
Not exporting these Just_info labels reduces the number of symbols
somewhat.
-}
instance Outputable CLabel where
ppr c = sdocWithPlatform $ \platform -> pprCLabel platform c
pprCLabel :: Platform -> CLabel -> SDoc
pprCLabel platform (AsmTempLabel u)
| cGhcWithNativeCodeGen == "YES"
= getPprStyle $ \ sty ->
if asmStyle sty then
ptext (asmTempLabelPrefix platform) <> pprUnique u
else
char '_' <> pprUnique u
pprCLabel platform (DynamicLinkerLabel info lbl)
| cGhcWithNativeCodeGen == "YES"
= pprDynamicLinkerAsmLabel platform info lbl
pprCLabel _ PicBaseLabel
| cGhcWithNativeCodeGen == "YES"
= ptext (sLit "1b")
pprCLabel platform (DeadStripPreventer lbl)
| cGhcWithNativeCodeGen == "YES"
= pprCLabel platform lbl <> ptext (sLit "_dsp")
pprCLabel platform lbl
= getPprStyle $ \ sty ->
if cGhcWithNativeCodeGen == "YES" && asmStyle sty
then maybe_underscore (pprAsmCLbl platform lbl)
else pprCLbl lbl
maybe_underscore :: SDoc -> SDoc
maybe_underscore doc
| underscorePrefix = pp_cSEP <> doc
| otherwise = doc
pprAsmCLbl :: Platform -> CLabel -> SDoc
pprAsmCLbl platform (ForeignLabel fs (Just sz) _ _)
| platformOS platform == OSMinGW32
-- In asm mode, we need to put the suffix on a stdcall ForeignLabel.
-- (The C compiler does this itself).
= ftext fs <> char '@' <> int sz
pprAsmCLbl _ lbl
= pprCLbl lbl
pprCLbl :: CLabel -> SDoc
pprCLbl (StringLitLabel u)
= pprUnique u <> ptext (sLit "_str")
pprCLbl (CaseLabel u CaseReturnPt)
= hcat [pprUnique u, ptext (sLit "_ret")]
pprCLbl (CaseLabel u CaseReturnInfo)
= hcat [pprUnique u, ptext (sLit "_info")]
pprCLbl (CaseLabel u (CaseAlt tag))
= hcat [pprUnique u, pp_cSEP, int tag, ptext (sLit "_alt")]
pprCLbl (CaseLabel u CaseDefault)
= hcat [pprUnique u, ptext (sLit "_dflt")]
pprCLbl (SRTLabel u)
= pprUnique u <> pp_cSEP <> ptext (sLit "srt")
pprCLbl (LargeSRTLabel u) = pprUnique u <> pp_cSEP <> ptext (sLit "srtd")
pprCLbl (LargeBitmapLabel u) = text "b" <> pprUnique u <> pp_cSEP <> ptext (sLit "btm")
-- Some bitsmaps for tuple constructors have a numeric tag (e.g. '7')
-- until that gets resolved we'll just force them to start
-- with a letter so the label will be legal assmbly code.
pprCLbl (CmmLabel _ str CmmCode) = ftext str
pprCLbl (CmmLabel _ str CmmData) = ftext str
pprCLbl (CmmLabel _ str CmmPrimCall) = ftext str
pprCLbl (RtsLabel (RtsApFast str)) = ftext str <> ptext (sLit "_fast")
pprCLbl (RtsLabel (RtsSelectorInfoTable upd_reqd offset))
= hcat [ptext (sLit "stg_sel_"), text (show offset),
ptext (if upd_reqd
then (sLit "_upd_info")
else (sLit "_noupd_info"))
]
pprCLbl (RtsLabel (RtsSelectorEntry upd_reqd offset))
= hcat [ptext (sLit "stg_sel_"), text (show offset),
ptext (if upd_reqd
then (sLit "_upd_entry")
else (sLit "_noupd_entry"))
]
pprCLbl (RtsLabel (RtsApInfoTable upd_reqd arity))
= hcat [ptext (sLit "stg_ap_"), text (show arity),
ptext (if upd_reqd
then (sLit "_upd_info")
else (sLit "_noupd_info"))
]
pprCLbl (RtsLabel (RtsApEntry upd_reqd arity))
= hcat [ptext (sLit "stg_ap_"), text (show arity),
ptext (if upd_reqd
then (sLit "_upd_entry")
else (sLit "_noupd_entry"))
]
pprCLbl (CmmLabel _ fs CmmInfo)
= ftext fs <> ptext (sLit "_info")
pprCLbl (CmmLabel _ fs CmmEntry)
= ftext fs <> ptext (sLit "_entry")
pprCLbl (CmmLabel _ fs CmmRetInfo)
= ftext fs <> ptext (sLit "_info")
pprCLbl (CmmLabel _ fs CmmRet)
= ftext fs <> ptext (sLit "_ret")
pprCLbl (CmmLabel _ fs CmmClosure)
= ftext fs <> ptext (sLit "_closure")
pprCLbl (RtsLabel (RtsPrimOp primop))
= ptext (sLit "stg_") <> ppr primop
pprCLbl (RtsLabel (RtsSlowFastTickyCtr pat))
= ptext (sLit "SLOW_CALL_fast_") <> text pat <> ptext (sLit "_ctr")
pprCLbl (ForeignLabel str _ _ _)
= ftext str
pprCLbl (IdLabel name _cafs flavor) = ppr name <> ppIdFlavor flavor
pprCLbl (CC_Label cc) = ppr cc
pprCLbl (CCS_Label ccs) = ppr ccs
pprCLbl (PlainModuleInitLabel mod)
= ptext (sLit "__stginit_") <> ppr mod
pprCLbl (HpcTicksLabel mod)
= ptext (sLit "_hpc_tickboxes_") <> ppr mod <> ptext (sLit "_hpc")
pprCLbl (AsmTempLabel {}) = panic "pprCLbl AsmTempLabel"
pprCLbl (DynamicLinkerLabel {}) = panic "pprCLbl DynamicLinkerLabel"
pprCLbl (PicBaseLabel {}) = panic "pprCLbl PicBaseLabel"
pprCLbl (DeadStripPreventer {}) = panic "pprCLbl DeadStripPreventer"
ppIdFlavor :: IdLabelInfo -> SDoc
ppIdFlavor x = pp_cSEP <>
(case x of
Closure -> ptext (sLit "closure")
SRT -> ptext (sLit "srt")
InfoTable -> ptext (sLit "info")
LocalInfoTable -> ptext (sLit "info")
Entry -> ptext (sLit "entry")
LocalEntry -> ptext (sLit "entry")
Slow -> ptext (sLit "slow")
RednCounts -> ptext (sLit "ct")
ConEntry -> ptext (sLit "con_entry")
ConInfoTable -> ptext (sLit "con_info")
StaticConEntry -> ptext (sLit "static_entry")
StaticInfoTable -> ptext (sLit "static_info")
ClosureTable -> ptext (sLit "closure_tbl")
)
pp_cSEP :: SDoc
pp_cSEP = char '_'
instance Outputable ForeignLabelSource where
ppr fs
= case fs of
ForeignLabelInPackage pkgId -> parens $ text "package: " <> ppr pkgId
ForeignLabelInThisPackage -> parens $ text "this package"
ForeignLabelInExternalPackage -> parens $ text "external package"
-- -----------------------------------------------------------------------------
-- Machine-dependent knowledge about labels.
underscorePrefix :: Bool -- leading underscore on assembler labels?
underscorePrefix = (cLeadingUnderscore == "YES")
asmTempLabelPrefix :: Platform -> LitString -- for formatting labels
asmTempLabelPrefix platform =
if platformOS platform == OSDarwin
then sLit "L"
else sLit ".L"
pprDynamicLinkerAsmLabel :: Platform -> DynamicLinkerLabelInfo -> CLabel -> SDoc
pprDynamicLinkerAsmLabel platform dllInfo lbl
= if platformOS platform == OSDarwin
then if platformArch platform == ArchX86_64
then case dllInfo of
CodeStub -> char 'L' <> ppr lbl <> text "$stub"
SymbolPtr -> char 'L' <> ppr lbl <> text "$non_lazy_ptr"
GotSymbolPtr -> ppr lbl <> text "@GOTPCREL"
GotSymbolOffset -> ppr lbl
else case dllInfo of
CodeStub -> char 'L' <> ppr lbl <> text "$stub"
SymbolPtr -> char 'L' <> ppr lbl <> text "$non_lazy_ptr"
_ -> panic "pprDynamicLinkerAsmLabel"
else if osElfTarget (platformOS platform)
then if platformArch platform == ArchPPC
then case dllInfo of
CodeStub -> ppr lbl <> text "@plt"
SymbolPtr -> text ".LC_" <> ppr lbl
_ -> panic "pprDynamicLinkerAsmLabel"
else if platformArch platform == ArchX86_64
then case dllInfo of
CodeStub -> ppr lbl <> text "@plt"
GotSymbolPtr -> ppr lbl <> text "@gotpcrel"
GotSymbolOffset -> ppr lbl
SymbolPtr -> text ".LC_" <> ppr lbl
else case dllInfo of
CodeStub -> ppr lbl <> text "@plt"
SymbolPtr -> text ".LC_" <> ppr lbl
GotSymbolPtr -> ppr lbl <> text "@got"
GotSymbolOffset -> ppr lbl <> text "@gotoff"
else if platformOS platform == OSMinGW32
then case dllInfo of
SymbolPtr -> text "__imp_" <> ppr lbl
_ -> panic "pprDynamicLinkerAsmLabel"
else panic "pprDynamicLinkerAsmLabel"
| holzensp/ghc | compiler/cmm/CLabel.hs | bsd-3-clause | 48,062 | 0 | 16 | 13,583 | 9,481 | 5,001 | 4,480 | 729 | 21 |
module DerivingUtils(
TypeInfo(..),DefTy(..),ConInfo(..),HsIdentI(..),IdTy(..),
idTy,idName,
fun,alt1,alt2,fun0,alt1',alt2',
vars,app,apps,opapp,con,var,ident,wild,(-::),str,pair,
hsLet,hsPApp,hsTyId,hsLit,hsPLit,HsLiteral(..),hsListComp,HsStmt(..),
oneDef,toDefs,noDef,localVal,
getBaseName,convCon,ModuleName(..),srcLoc,fakePos,
isEnum,
conj,
--eq,bool,false,
stdvalue,stdtype,stdclass,
( # ),
module ModNames
) where
import TiNames as TI(conName,localVal,idName)
import TiClasses(var,con,ident,app,tuple,noDef,oneDef,toDefs)
import HasBaseStruct(hsInfixApp,hsPWildCard,hsFunBind,hsExpTypeSig,hsLit,
hsListComp,hsPLit,hsTyId,hsPApp,hsLet)
import HasBaseName(getBaseName)
import BaseSyntax
import TypedIds
import HsConstants as ModNames(mod_Prelude,mod_Ix)
import UniqueNames(origModule)
import SrcLoc1
import TiPNT()
import TiHsName()
import MUtils
default(Int)
vars x = [var (localVal (x++show n))|n<-[1..]]
apps args = foldl1 app args
pair x y = tuple [x,y]
opapp op e1 e2 = hsInfixApp e1 op e2
wild=hsPWildCard
fun = hsFunBind
alt2' src f p1 p2 e = HsMatch src f [p1,p2] (HsBody e)
alt1' src f p e = HsMatch src f [p] (HsBody e)
alt2 src f p1 p2 e = alt2' src f p1 p2 e noDef
alt1 src f p e = alt1' src f p e noDef
fun0 src f e = fun src [HsMatch src f [] (HsBody e) noDef]
isEnum = all isNullary
where isNullary c = conArity c==0
convCon (t,ty) c0 = TI.conName (origModule c0) (getBaseName c0) t ty
e-::t = hsExpTypeSig loc0 e [] t
str s = hsLit s . HsString
{-
andand = pv "&&"
bool = prelType "Bool"
false = prelCon "False" bool boolInfo
true = prelCon "True" bool boolInfo
boolInfo = TypeInfo {defType=Just Data,
fields=[],
constructors=[bc "False",bc "True"]}
where
bc c = ConInfo (prelCon c bool boolInfo) 0 Nothing
-}
conj andand true [] = ident true
conj andand true tsts = foldr1 (opapp andand) tsts
fakePos :: SrcLoc -> Int -> SrcLoc
fakePos (SrcLoc path char line col) n =
SrcLoc (path++":derived_"++show col) char line n
stdvalue stdnames m n = stdnames ValueNames (m,n)
stdtype stdnames m n = stdnames ClassOrTypeNames (m,n)
stdclass = stdtype
| forste/haReFork | tools/base/transforms/Deriving/DerivingUtils.hs | bsd-3-clause | 2,164 | 2 | 11 | 382 | 844 | 480 | 364 | 51 | 1 |
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
module T15568 where
import Data.Proxy
type family F (a :: j) :: k
| sdiehl/ghc | testsuite/tests/ghci/scripts/T15568.hs | bsd-3-clause | 125 | 0 | 6 | 23 | 26 | 18 | 8 | 5 | 0 |
module T13847A where
data A = A { foo :: () }
| shlevy/ghc | testsuite/tests/rename/should_fail/T13847A.hs | bsd-3-clause | 46 | 0 | 9 | 12 | 21 | 13 | 8 | 2 | 0 |
{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts, DatatypeContexts #-}
module ShouldCompile where
import Data.Foldable
import Data.Traversable
data Trivial a = Trivial
deriving (Functor,Foldable,Traversable)
-- lots of different things
data Strange a b c
= T1 a b c
| T2 c c c
| T3 [a] [b] [c] -- lists
| T4 [[a]] [[b]] [[c]] -- nested lists
| T5 (c,(b,b),(c,c)) -- tuples
| T6 ([c],Strange a b c) -- tycons
deriving (Functor,Foldable,Traversable)
data NotPrimitivelyRecursive a
= S1 (NotPrimitivelyRecursive (a,a))
| S2 a
deriving (Functor,Foldable,Traversable)
data Eq a => StupidConstraint a b = Stupid a b
deriving (Functor,Foldable,Traversable)
-- requires Foldable/Traversable constraint on f and g
data Compose f g a = Compose (f (g a))
deriving (Functor,Foldable,Traversable)
| urbanslug/ghc | testsuite/tests/deriving/should_compile/drv-foldable-traversable1.hs | bsd-3-clause | 923 | 0 | 10 | 188 | 294 | 176 | 118 | 24 | 0 |
module RedBlack where
data Color = Red | Black deriving (Eq, Show)
data RedBlackTree a = Empty
| Node { value :: a
, color :: Color
, left :: RedBlackTree a
, right :: RedBlackTree a
} deriving (Eq, Show)
data Direction = L | R deriving (Eq, Show)
data RedBlackPath a = Path Direction a Color (RedBlackTree a)
deriving (Show)
type RedBlackZipper a = (RedBlackTree a, [RedBlackPath a])
singleton :: (Ord a) => a -> RedBlackTree a
singleton x = Node x Black Empty Empty
insert :: (Ord a) => a -> RedBlackTree a -> RedBlackTree a
insert x tree = fromZipper $ insert' x (tree, [])
where insert' :: (Ord a) => a -> RedBlackZipper a -> RedBlackZipper a
insert' x (Empty, []) = (singleton x, [])
insert' x (Empty, path) = balance (Node x Red Empty Empty, path)
insert' x (n@(Node v c l r), path)
| x < v = insert' x (l, (Path L v c r):path)
| x == v = (n, path)
| x > v = insert' x (r, (Path R v c l):path)
-- balance a tree using algorithm adapted from:
-- http://en.wikipedia.org/wiki/Red_black_tree
balance :: (Ord a) => RedBlackZipper a -> RedBlackZipper a
-- Case 1: focus node is the root of the tree. Paint it black.
balance (Node v _ l r, []) = (Node v Black l r, [])
-- Case 2: focus node's parent is black. Tree is valid.
balance z@(n, (Path _ _ Black _):_) = z
-- Case 3: parent and uncle are red. Paint them both black and paint
-- grandparent red. Then move focus to the grandparent and balance.
balance (n, (Path L pv Red po):(Path L gv gc (Node uv Red ul ur)):path) =
balance (Node gv Red (Node pv Black n po) (Node uv Black ul ur), path)
balance (n, (Path L pv Red po):(Path R gv gc (Node uv Red ul ur)):path) =
balance (Node gv Red (Node uv Black ul ur) (Node pv Black n po), path)
balance (n, (Path R pv Red po):(Path L gv gc (Node uv Red ul ur)):path) =
balance (Node gv Red (Node pv Black po n) (Node uv Black ul ur), path)
balance (n, (Path R pv Red po):(Path R gv gc (Node uv Red ul ur)):path) =
balance (Node gv Red (Node uv Black ul ur) (Node pv Black po n), path)
-- Case 4: parent is red, but uncle is black. Path to focus node is either L,R
-- or R,L
balance (Node nv nc nl nr, (Path R pv Red pl):(Path L gv gc u):path) =
balance (Node pv Red pl nl, (Path L nv Red nr):(Path L gv gc u):path)
balance (Node nv nc nl nr, (Path L pv Red pr):(Path R gv gc u):path) =
balance (Node pv Red nr pr, (Path R nv Red nl):(Path R gv gc u):path)
-- Case 5: parent is red, but uncle is black. Path to focus node is either L,L
-- or R,R
balance (Node nv nc nl nr, (Path L pv Red pr):(Path L gv gc u):path) =
(Node nv nc nl nr, (Path L pv Black (Node gv Red pr u)):path)
balance (Node nv nc nl nr, (Path R pv Red pl):(Path R gv gc u):path) =
(Node nv nc nl nr, (Path R pv Black (Node gv Red u pl)):path)
fromZipper :: RedBlackZipper a -> RedBlackTree a
fromZipper (n, []) = n
fromZipper (n, (Path L pv pc pr):path) = fromZipper (Node pv pc n pr, path)
fromZipper (n, (Path R pv pc pl):path) = fromZipper (Node pv pc pl n, path)
fromList :: (Ord a) => [a] -> RedBlackTree a
fromList = foldr insert Empty
toList :: RedBlackTree a -> [a]
toList Empty = []
toList (Node x _ l r) = toList l ++ [x] ++ toList r
depth :: RedBlackTree a -> Int
depth Empty = 0
depth (Node _ _ l r) = 1 + max (depth l) (depth r)
test :: (Ord a) => a -> RedBlackTree a -> Int
test _ Empty = 1
test x (Node v _ l r)
| x < v = 1 + test x l
| x == v = 1
| x > v = 1 + test x r
| dstruthers/RedBlack | RedBlack.hs | mit | 3,611 | 0 | 12 | 975 | 1,774 | 919 | 855 | 60 | 3 |
module Main where
import Data.Char
import Data.List
import System.Console.GetOpt
import System.Directory
import System.Environment
import System.Exit
import System.IO
import Text.Printf
import Interpreter
main = do
path <- getArgs >>= parse
code <- readFile path
runBrainfuckSource code
-- Command line argument parsing
-- http://www.haskell.org/haskellwiki/Tutorials/Programming_Haskell/Argument_handling#GetOpt
data Flag
= Help
deriving (Eq,Ord,Enum,Show,Bounded)
flags =
[ Option [] ["help"] (NoArg Help) "Print this help message"
]
parse argv = case getOpt Permute flags argv of
(args, fs, []) -> do
if Help `elem` args || null fs || length fs > 1
then do
hPutStrLn stderr (usageInfo header flags)
exitWith ExitSuccess
else return $ head fs
(_,_,errs) -> do
hPutStrLn stderr (concat errs ++ usageInfo header flags)
exitWith (ExitFailure 1)
where
header = "Usage: brainfuck [OPTION] FILE"
| chrisrosset/bf.hs | BF.hs | mit | 929 | 10 | 15 | 159 | 307 | 159 | 148 | 30 | 3 |
{-# htermination eltsFM_GE :: FiniteMap () b -> () -> [b] #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_eltsFM_GE_2.hs | mit | 79 | 0 | 3 | 14 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE MultiWayIf #-}
module RWPAS.Control.BeastFrog
( BeastFrogState() )
where
import Control.Applicative
import Control.Lens
import Control.Monad
import Control.Monad.Primitive
import Data.Data
import Data.Foldable
import Data.Maybe
import Data.SafeCopy
import Data.Set ( Set )
import qualified Data.Set as S
import GHC.Generics
import RWPAS.Actor
import RWPAS.Control.ControlMonad
import RWPAS.Control.Types
import RWPAS.Direction
import RWPAS.Item
import RWPAS.Level
import RWPAS.Turn
import RWPAS.World.Type
import System.Random.MWC
-- Beast frogs:
--
-- They hop around and stay still a lot.
--
-- When next to player (and if hostile), spikes can come out of their skin,
-- piercing and pushing back the player. The spikes also hurt any monster next
-- to them.
data BeastFrogState = BeastFrogState
{ _staminaCounter :: !Turns
, _spikesOut :: !Bool
, _bloodySpikes :: !(Set Direction8) }
deriving ( Eq, Ord, Show, Read, Typeable, Data, Generic )
makeLenses ''BeastFrogState
deriveSafeCopy 0 'base ''BeastFrogState
instance IsAI BeastFrogState where
initialState rng = do
initial_stamina <- uniformR (3, 6 :: Int) rng
return BeastFrogState { _staminaCounter = fromIntegral initial_stamina
, _spikesOut = False
, _bloodySpikes = S.empty }
transitionFunction = beastFrogTransition
deadTransition = beastFrogDead
aiName _ = "BeastFrog"
beastFrogDead :: PrimMonad m => AITransition m BeastFrogState
beastFrogDead = runAIControlMonad $ leaveCorpse BeastFrogCorpse "frog"
beastFrogTransition :: PrimMonad m => AITransition m BeastFrogState
beastFrogTransition = runAIControlMonad $ do
stamina <- use $ aiState.staminaCounter
if stamina > 0
then do aiState.staminaCounter -= 1
return ()
else do r <- rollUniformR (3, 6 :: Int)
aiState.spikesOut .= False
aiState.staminaCounter += fromIntegral r
dist <- distanceToPlayer
if dist <= 1
then do aiState.spikesOut .= True
aiState.bloodySpikes .= S.empty
impaleNeighbours
else hop
withNChance 30 emitNoises
spikes <- use $ aiState.spikesOut
when spikes emitSpikes
where
emitSpikes = do
bloodied_ones <- use (aiState.bloodySpikes)
for_ directions8 $ \dir ->
emitDecoration dir (if S.member dir bloodied_ones
then BloodySpikes dir
else Spikes dir)
emitNoises = do
dist <- distanceToPlayer
if | dist < 5 -> emitMessage "RIBBIT!!"
| dist < 10 -> emitMessage "Ribbit!"
| dist < 30 -> emitMessage "ribbit"
| otherwise -> return ()
impaleNeighbours = do
base_coords <- myCoordinates
aid <- myActorID
w <- use world
let (_, _, _, player_id) = currentLevelAndActor w
for_ directions8 $ \dir ->
(do impaled_coords <- moveCoords dir base_coords
world.actorAt impaled_coords._Just._2 %= hurt 5
-- Push back the actor...if we can
-- Only push back one step and don't push more than once
-- Otherwise strategically placed portals could cause an infinite
-- loop, monsters pushing themselves.
ac <- use (world.actorAt impaled_coords)
when (isJust ac) $ do
aiState.bloodySpikes %= S.insert dir
let Just (impaled_aid, _) = ac
pushed_back_coords <- moveCoords dir impaled_coords
pushed_back_actor <- use (world.actorAt pushed_back_coords)
pushed_back_feature <- use (world.terrainAt pushed_back_coords)
when (impaled_aid == player_id) $ emitMessage "You are impaled!"
case (pushed_back_actor, pushed_back_feature) of
-- Push back if there's free space behind and we are not
-- impaling ourselves.
(Nothing, Just f) | not (impassable f) && impaled_aid /= aid -> do
world.actorAt impaled_coords .= Nothing
world.actorAt pushed_back_coords .= ac
_ -> return ()
) <|> return ()
hop = do
steps <- rollUniformR (2, 5)
replicateM_ steps $ do
dist <- distanceToPlayer
d <- if dist < 15
then getDirectionTowardsPlayer
else rollUniform
move d <|> return ()
| Noeda/rwpas | src/RWPAS/Control/BeastFrog.hs | mit | 4,508 | 0 | 28 | 1,229 | 1,084 | 543 | 541 | -1 | -1 |
module Triangle
( TriangleType(..)
, triangleType
) where
data TriangleType = Equilateral
| Illogical
| Isosceles
| Scalene
deriving (Eq, Show)
triangleType :: Real a => a -> a -> a -> TriangleType
triangleType a b c
| isIllogical = Illogical
| isEquilateral = Equilateral
| isIsosceles = Isosceles
| otherwise = Scalene where
isIllogical = not $ and [a > 0, b > 0, c > 0, a + b > c, b + c > a, a + c > b]
isEquilateral = a == b && b == c
isIsosceles = or [a == b, b == c, a == c]
| tfausak/exercism-solutions | haskell/triangle/Triangle.hs | mit | 607 | 0 | 11 | 230 | 226 | 121 | 105 | 17 | 1 |
{-# LANGUAGE DataKinds #-}
import Control.Monad.Trans
import Options.Declarative
main' :: Flag "b" '["bool"] "STRING" "boolean flag" Bool
-> Cmd "Simple greeting example" ()
main' b =
liftIO $ putStrLn $ if get b then "Flag is True" else "Flag is False"
main :: IO ()
main = run_ main'
| tanakh/optparse-declarative | example/bool.hs | mit | 319 | 0 | 8 | 82 | 90 | 47 | 43 | 9 | 2 |
{-# htermination foldFM_LE :: Ord a => ([a] -> b -> c -> c) -> c -> [a] -> FiniteMap [a] b -> c #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_foldFM_LE_4.hs | mit | 117 | 0 | 3 | 27 | 5 | 3 | 2 | 1 | 0 |
module Solar.Utility.Wait where
import System.Timeout
import Control.Concurrent
import Control.Concurrent.STM
type WaitOn = MVar ()
sleepOn :: ()
=> WaitOn -- ^ Waiting variable
-> Int -- ^ Time to sleep in microseconds
-> IO () -- ^ Blocking action
sleepOn w i = do
tryTakeMVar w -- Clear the state
timeout i $ takeMVar w
return ()
-- | Does similar to compare and swap
sleepOnSTM :: (Eq a)
=> Int -- ^ Time to sleep in microseconds
-> a -- ^ Original
-> TVar b -- ^ Some structure
-> (TVar b -> STM a)
-> IO ()
sleepOnSTM i original s f = do
timeout i $ atomically $ do
current <- f s
check $ current /= original
-- Only let the transaction pass through
-- if things HAVE changed!
return ()
| Cordite-Studios/solar | solar-wind/Solar/Utility/Wait.hs | mit | 835 | 0 | 12 | 277 | 211 | 108 | 103 | 24 | 1 |
import Data.Char
nums = map digitToInt "73167176531330624919225119674426574742355349194934\
\96983520312774506326239578318016984801869478851843\
\85861560789112949495459501737958331952853208805511\
\12540698747158523863050715693290963295227443043557\
\66896648950445244523161731856403098711121722383113\
\62229893423380308135336276614282806444486645238749\
\30358907296290491560440772390713810515859307960866\
\70172427121883998797908792274921901699720888093776\
\65727333001053367881220235421809751254540594752243\
\52584907711670556013604839586446706324415722155397\
\53697817977846174064955149290862569321978468622482\
\83972241375657056057490261407972968652414535100474\
\82166370484403199890008895243450658541227588666881\
\16427171479924442928230863465674813919123162824586\
\17866458359124566529476545682848912883142607690042\
\24219022671055626321111109370544217506941658960408\
\07198403850962455444362981230987879927244284909188\
\84580156166097919133875499200524063689912560717606\
\05886116467109405077541002256983155200055935729725\
\71636269561882670428252483600823257530420752963450"
main = print (maximum $ adjProducts nums)
adjProducts :: [Int] -> [Int]
adjProducts xs = map product zs
where ys = map (\x -> take count $ drop x xs) [0..length xs]
zs = filter (\x -> count == length x) ys
count = 13
| adsmit14/haskell | ProjectEuler/8.hs | mit | 1,768 | 0 | 11 | 523 | 132 | 68 | 64 | 8 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.