code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Main where
fibs = 0 : 1 : zipWith (+) fibs (tail fibs)
run count = do
putStrLn ("\nTurns count: " ++ show count)
putStrLn $ show $ last $ take count fibs
run (count + 1)
main = do
run 1
|
dskecse/Adv2Game
|
testFib.hs
|
gpl-3.0
| 204 | 0 | 10 | 54 | 100 | 49 | 51 | 8 | 1 |
-- | Implementation of diff-match-patch
module Data.DiffMatchPatch.Internal
( TextPair
, makeTextPair
, getTexts
, firstText
, secondText
, getTextCores
, commonPrefix
, commonSuffix
, commonSuffixes
) where
import BasicPrelude
import qualified Data.Text as Text
-- | A pair of 'Text' elements.
--
-- The common prefix and the common suffix of the elements are stored
-- separately.
data TextPair = TextPair
{ _first :: Text
, _second :: Text
, _prefix :: Text
, _suffix :: Text
} deriving (Eq, Show)
-- | Construct a 'TextPair' from two 'Text' values.
makeTextPair :: Text -> Text -> TextPair
makeTextPair x y =
let (prefix, x', y') = fromMaybe ("", x, y) (Text.commonPrefixes x y)
(suffix, x'', y'') = fromMaybe ("", x', y') (commonSuffixes x' y')
in TextPair x'' y'' prefix suffix
-- | /O(n_1 + n_2)/ Find the longest non-empty common prefix of two strings
-- and return it, along with the suffixes of each string at which they
-- no longer match.
--
-- If the strings do not have a common prefix or either one is empty,
-- this function returns 'Nothing'.
--
-- Examples:
--
-- > commonSuffixes "barfoo" "quuxfoo" == Just ("foo","bar","quux")
-- > commonSuffixes "veeble" "fetzer" == Nothing
-- > commonSuffixes "" "baz" == Nothing
--
-- TODO: This is a very naive implementation and is probably pretty slow. Make
-- a faster one.
commonSuffixes :: Text -> Text -> Maybe (Text, Text, Text)
commonSuffixes x y =
case Text.commonPrefixes (Text.reverse x) (Text.reverse y) of
Nothing -> Nothing
Just (p, x', y') -> Just (Text.reverse p, Text.reverse x', Text.reverse y')
-- | Get the original 'Text' values from a 'TextPair'.
getTexts :: TextPair -> (Text, Text)
getTexts TextPair{..} =
(_prefix <> _first <> _suffix, _prefix <> _second <> _suffix)
-- | Get the first 'Text' value from a 'TextPair'.
firstText :: TextPair -> Text
firstText = fst . getTexts
-- | Get the second 'Text' value from a 'TextPair'.
secondText :: TextPair -> Text
secondText = snd . getTexts
-- | Get the "cores" of the 'Text' values that make up the pair.
--
-- Guaranteed to have an empty common prefix and an empty common suffix.
getTextCores :: TextPair -> (Text, Text)
getTextCores TextPair{..} = (_first, _second)
-- | Get the common prefix of the text pair. An empty text object is returned
-- if there is no common prefix.
commonPrefix :: TextPair -> Text
commonPrefix = _prefix
-- | Get the common suffix of the text pair. An empty text object is returned
-- if there is no common suffix.
commonSuffix :: TextPair -> Text
commonSuffix = _suffix
|
jml/diff-match-patch
|
src/Data/DiffMatchPatch/Internal.hs
|
gpl-3.0
| 2,705 | 0 | 12 | 608 | 499 | 293 | 206 | -1 | -1 |
-- This file is part of tersmu
-- Copyright (C) 2014 Martin Bays <[email protected]>
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of version 3 of the GNU General Public License as
-- published by the Free Software Foundation.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see http://www.gnu.org/licenses/.
module Util where
swap :: [a] -> Int -> Int -> [a]
swap as n m = [ if i == n then as!!m else
if i == m then as!!n else as!!i | i <- [0..] ]
swapFinite as n m = take (length as) $ swap as n m
swapFiniteWithDefault :: a -> [a] -> Int -> Int -> [a]
swapFiniteWithDefault def ts n m = take (max (max n m + 1) (length ts)) $
swap (ts ++ repeat def) n m
|
lagleki/tersmu-0.2
|
Util.hs
|
gpl-3.0
| 772 | 4 | 11 | 165 | 225 | 122 | 103 | 8 | 3 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LiberalTypeSynonyms #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | General law tests for instances defined in this package
module Laws
(
-- * Lens laws
isSetter
, isTraversal
, isIso
, isPrism
-- * Laws for classes
, isFunctor
, isProfunctor
, isBifunctor
, isContravariant
, isIxed
, isAeson
) where
import Control.Applicative
import Control.Lens
import Data.Aeson
import Data.Bifunctor
import qualified Data.ByteString.Lazy as LBS
import Data.Functor.Compose
import Data.Maybe
import Instances()
import Test.QuickCheck
import Test.QuickCheck.Function
-- Taken from properties.hs from the lens source code:
setter_id :: Eq s => Setter' s a -> s -> Bool
setter_id l s = over l id s == s
setter_composition :: Eq s => Setter' s a -> s -> Fun a a -> Fun a a -> Bool
setter_composition l s (Fun _ f) (Fun _ g) = over l f (over l g s) == over l (f . g) s
lens_set_view :: Eq s => Lens' s a -> s -> Bool
lens_set_view l s = set l (view l s) s == s
lens_view_set :: Eq a => Lens' s a -> s -> a -> Bool
lens_view_set l s a = view l (set l a s) == a
setter_set_set :: Eq s => Setter' s a -> s -> a -> a -> Bool
setter_set_set l s a b = set l b (set l a s) == set l b s
iso_hither :: Eq s => Simple AnIso s a -> s -> Bool
iso_hither l s = s ^.cloneIso l.from l == s
iso_yon :: Eq a => Simple AnIso s a -> a -> Bool
iso_yon l a = a^.from l.cloneIso l == a
prism_yin :: Eq a => Prism' s a -> a -> Bool
prism_yin l a = preview l (review l a) == Just a
prism_yang :: Eq s => Prism' s a -> s -> Bool
prism_yang l s = maybe s (review l) (preview l s) == s
traverse_pure :: forall f s a. (Applicative f, Eq (f s)) => LensLike' f s a -> s -> Bool
traverse_pure l s = l pure s == (pure s :: f s)
traverse_pureMaybe :: Eq s => LensLike' Maybe s a -> s -> Bool
traverse_pureMaybe = traverse_pure
traverse_pureList :: Eq s => LensLike' [] s a -> s -> Bool
traverse_pureList = traverse_pure
traverse_compose :: (Applicative f, Applicative g, Eq (f (g s)))
=> Traversal' s a -> (a -> g a) -> (a -> f a) -> s -> Bool
traverse_compose t f g s = (fmap (t f) . t g) s == (getCompose . t (Compose . fmap f . g)) s
isSetter :: (Arbitrary s, Arbitrary a, CoArbitrary a, Show s, Show a, Eq s, Function a)
=> Setter' s a -> Property
isSetter l = setter_id l .&. setter_composition l .&. setter_set_set l
isTraversal :: (Arbitrary s, Arbitrary a, CoArbitrary a, Show s, Show a, Eq s, Function a)
=> Traversal' s a -> Property
isTraversal l = isSetter l .&. traverse_pureMaybe l .&. traverse_pureList l
.&. do t <- arbitrary
(Fun _ leftOrRight) <- arbitrary
property $ traverse_compose l (\x -> if leftOrRight x then Left (show x) else Right x)
(\x -> if t then Just x else Nothing)
isLens :: (Arbitrary s, Arbitrary a, CoArbitrary a, Show s, Show a, Eq s, Eq a, Function a)
=> Lens' s a -> Property
isLens l = lens_set_view l .&. lens_view_set l .&. isTraversal l
isIso :: (Arbitrary s, Arbitrary a, CoArbitrary s, CoArbitrary a, Show s, Show a, Eq s, Eq a, Function s, Function a)
=> Iso' s a -> Property
isIso l = iso_hither l .&. iso_yon l .&. isLens l .&. isLens (from l)
isPrism :: (Arbitrary s, Arbitrary a, CoArbitrary a, Show s, Show a, Eq s, Eq a, Function a)
=> Prism' s a -> Property
isPrism l = isTraversal l .&. prism_yin l .&. prism_yang l
isFunctor :: forall f a proxy. (Functor f, Arbitrary (f a), Eq (f a), Show (f a)) => proxy (f a) -> Property
isFunctor _ = property $ \f -> fmap id f == (f :: f a)
isProfunctor :: forall p a b proxy. (Profunctor p, Arbitrary (p a b), Eq (p a b), Show (p a b)) => proxy (p a b) -> Property
isProfunctor _ = property $ \p -> dimap id id p == (p :: p a b) && lmap id p == p && rmap id p == p
isContravariant :: forall f a proxy. (Contravariant f, Arbitrary (f a), Eq (f a), Show (f a)) => proxy (f a) -> Property
isContravariant _ = property $ \f -> contramap id f == (f :: f a)
isBifunctor :: forall f a b proxy. (Bifunctor f, Arbitrary (f a b), Eq (f a b), Show (f a b)) => proxy (f a b) -> Property
isBifunctor _ = property $ \f -> bimap id id f == (f :: f a b) && first id f == f && second id f == f
-- Assumes Gettable-only contains
isIxed :: forall proxy a. (Eq a, CoArbitrary (IxValue a), Function (IxValue a), Arbitrary (IxValue a), Show (IxValue a), Arbitrary a, Show a, Arbitrary (Index a), Show (Index a), Ixed (Bazaar (->) (IxValue a) (IxValue a)) a, Contains (Accessor Bool) a) => proxy a -> Property
isIxed _ = property $ \i ->
let l = ix i :: LensLike' (Bazaar (->) (IxValue a) (IxValue a)) a (IxValue a)
in isTraversal (cloneTraversal l)
.&. property (\(s :: a) -> isJust (s ^? cloneTraversal (ix i)) == s ^. contains i)
isAeson :: forall proxy a. (Function a, FromJSON a, Eq a, Show a, ToJSON a, CoArbitrary a, Arbitrary a) => proxy a -> Property
isAeson _ = isTraversal encodeDecode .&. prism_yin encodeDecode
where encodeDecode = prism' encode decode :: Prism' LBS.ByteString a
|
bennofs/vplan
|
tests/Laws.hs
|
gpl-3.0
| 5,351 | 0 | 20 | 1,382 | 2,402 | 1,216 | 1,186 | -1 | -1 |
{- ============================================================================
| Copyright 2011 Matthew D. Steele <[email protected]> |
| |
| This file is part of Fallback. |
| |
| Fallback is free software: you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation, either version 3 of the License, or (at your option) |
| any later version. |
| |
| Fallback is distributed in the hope that it will be useful, but WITHOUT |
| ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
| more details. |
| |
| You should have received a copy of the GNU General Public License along |
| with Fallback. If not, see <http://www.gnu.org/licenses/>. |
============================================================================ -}
{-# LANGUAGE GADTs, KindSignatures, Rank2Types, ScopedTypeVariables #-}
module Fallback.State.Area where
import Control.Arrow (right)
import Control.Applicative ((<$), (<$>))
import Data.Function (on)
import Data.List (find, nubBy)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe, isJust, mapMaybe)
import qualified Data.Set as Set
import Data.Traversable (for)
import System.Random (Random, randomRIO)
import Fallback.Constants (maxPartyLevel)
import Fallback.Control.Script (Script)
import Fallback.Data.Clock (Clock, clockInc)
import qualified Fallback.Data.Grid as Grid
import Fallback.Data.Point
import qualified Fallback.Data.PriorityQueue as PQ
import qualified Fallback.Data.SparseMap as SM
import Fallback.Draw (Minimap)
import Fallback.Sound (Sound, fadeOutMusic, loopMusic, playSound, stopMusic)
import Fallback.State.Camera (Camera, setCameraShake, tickCamera)
import Fallback.State.Creature
import Fallback.State.Doodad
(Doodads, Message, decayMessage, makeMessage, tickDoodads)
import Fallback.State.FOV (fieldOfView)
import Fallback.State.Minimap (updateMinimapFromTerrain)
import Fallback.State.Party
import Fallback.State.Progress
(BattleId, DeviceId, HasProgress, Var, VarType, progressSetVar)
import Fallback.State.Resources (MusicTag, Resources, musicPath)
import Fallback.State.Simple
import Fallback.State.Tags (AreaTag, ItemTag, QuestTag)
import Fallback.State.Terrain
-------------------------------------------------------------------------------
data AreaCommonState = AreaCommonState
{ acsCamera :: Camera,
acsClock :: Clock,
acsDevices :: Grid.Grid Device,
acsDoodads :: Doodads,
acsFields :: Map.Map Position Field,
acsMessage :: Maybe Message,
acsMinimap :: Minimap,
acsMonsters :: Grid.Grid Monster,
acsParty :: Party,
acsRemains :: SM.SparseMap Position [Remains],
acsResources :: Resources,
acsTerrain :: Terrain,
acsVisible :: Set.Set Position }
tickAnimations :: DPoint -> [Position] -> AreaCommonState -> AreaCommonState
tickAnimations cameraGoalTopleft eyes acs =
acs { acsClock = clockInc (acsClock acs),
acsCamera = tickCamera cameraGoalTopleft (acsCamera acs),
acsDoodads = tickDoodads (acsDoodads acs),
acsMessage = acsMessage acs >>= decayMessage,
acsMonsters = Grid.update tickMonsterPose (acsMonsters acs) }
where
tickMonsterPose entry = monst { monstPose = pose' } where
monst = Grid.geValue entry
pose' = tickCreaturePose invis canSee (monstPose monst)
invis = monstInvisibility monst
canSee =
case invis of
NoInvisibility -> True
MinorInvisibility ->
any (rectContains $ expandPrect $ Grid.geRect entry) eyes
MajorInvisibility -> False
updateMinimap :: AreaCommonState -> [Position] -> IO ()
updateMinimap acs visible = do
updateMinimapFromTerrain (acsMinimap acs) (acsTerrain acs) visible
-------------------------------------------------------------------------------
class (HasProgress a) => AreaState a where
-- | The boundaries of movement. In town mode, this is the whole map; in
-- combat mode, it is the combat arena.
arsBoundaryRect :: a -> PRect
-- | Return the position of a particular character. In town mode, this will
-- be the party position, regardless of which character was asked for. In
-- combat mode, this returns the position of the character, or if the
-- character is unconscious, then the last position the character was at.
arsCharacterPosition :: CharacterNumber -> a -> Position
arsCharacterAtPosition :: Position -> a -> Maybe CharacterNumber
arsCommon :: a -> AreaCommonState
arsSetCommon :: a -> AreaCommonState -> a
-- | Return all positions occupied by party members. In town mode, this will
-- be the single position of the party. In combat mode, this will have up to
-- four positions, one for each conscious party member.
arsPartyPositions :: a -> [Position]
arsUpdateVisibility :: a -> IO a
-- | Return the set of positions visible to just one character. In town
-- mode, this is the same as 'arsVisibleForParty' (because all characters are
-- in the same position), but for combat mode it is not.
arsVisibleForCharacter :: CharacterNumber -> a -> Set.Set Position
-------------------------------------------------------------------------------
arsCamera :: (AreaState a) => a -> Camera
arsCamera = acsCamera . arsCommon
arsClock :: (AreaState a) => a -> Clock
arsClock = acsClock . arsCommon
arsCurrentArea :: (AreaState a) => a -> AreaTag
arsCurrentArea = partyCurrentArea . acsParty . arsCommon
arsDevices :: (AreaState a) => a -> Grid.Grid Device
arsDevices = acsDevices . arsCommon
arsExploredMap :: (AreaState a) => a -> ExploredMap
arsExploredMap ars = partyExploredMap (arsTerrain ars) (arsParty ars)
arsFields :: (AreaState a) => a -> Map.Map Position Field
arsFields = acsFields . arsCommon
arsGetCharacter :: (AreaState a) => CharacterNumber -> a -> Character
arsGetCharacter charNum ars = partyGetCharacter (arsParty ars) charNum
arsIsVisible :: (AreaState a) => a -> Position -> Bool
arsIsVisible ars pos = Set.member pos (arsVisibleForParty ars)
arsIsVisibleToCharacter :: (AreaState a) => CharacterNumber -> a -> Position
-> Bool
arsIsVisibleToCharacter charNum ars pos =
Set.member pos (arsVisibleForCharacter charNum ars)
arsMonsters :: (AreaState a) => a -> Grid.Grid Monster
arsMonsters = acsMonsters . arsCommon
arsMinimap :: (AreaState a) => a -> Minimap
arsMinimap = acsMinimap . arsCommon
arsParty :: (AreaState a) => a -> Party
arsParty = acsParty . arsCommon
arsResources :: (AreaState a) => a -> Resources
arsResources = acsResources . arsCommon
arsTerrain :: (AreaState a) => a -> Terrain
arsTerrain = acsTerrain . arsCommon
arsTerrainOpenness :: (AreaState a) => Position -> a -> TerrainOpenness
arsTerrainOpenness pos ars =
case Map.lookup pos $ arsFields ars of
Just (BarrierWall _) -> TerrainSolid
Just (SmokeScreen _) -> smokifyOpenness openness
_ -> if rectContains (arsBoundaryRect ars) pos then openness
else solidifyOpenness openness
where openness = ttOpenness $ terrainGetTile pos $ arsTerrain ars
arsVisibleForParty :: (AreaState a) => a -> Set.Set Position
arsVisibleForParty = acsVisible . arsCommon
arsIsOpaque :: (AreaState a) => a -> Position -> Bool
arsIsOpaque ars pos = cannotSeeThrough $ arsTerrainOpenness pos ars
-- | Determine if the given monster cannot occupy the given position (for large
-- monsters, this position corresponds to the top-left position of the
-- monster's rectangle) without falling afoul of the party, terrain, and/or
-- other monsters.
arsIsBlockedForMonster :: (AreaState a) => Grid.Entry Monster -> a -> Position
-> Bool
arsIsBlockedForMonster ge ars pos =
any (rectContains rect') (arsPartyPositions ars) ||
any ((if monstCanFly $ Grid.geValue ge
then cannotFlyOver else cannotWalkOn) .
flip arsTerrainOpenness ars) (prectPositions rect') ||
not (Grid.couldMove (Grid.geKey ge) rect' $ arsMonsters ars)
where rect' = makeRect pos $ rectSize $ Grid.geRect ge
-- | Determine if a character could occupy the given position without falling
-- afoul of terrain, monsters, or other characters.
arsIsBlockedForParty :: (AreaState a) => a -> Position -> Bool
arsIsBlockedForParty ars pos =
cannotWalkOn (arsTerrainOpenness pos ars) || arsOccupied pos ars
-- | Determine if there are any enemy monsters that could reach one or more
-- party members within four steps, taking into account both terrain and the
-- whether the monster(s) can fly. Monsters that are not currently within
-- line-of-sight to the party do not count.
arsAreEnemiesNearby :: (AreaState a) => a -> Bool
arsAreEnemiesNearby ars = check initQueue initVisited where
check :: PQ.PriorityQueue (Bool, Int) Position -> Set.Set Position -> Bool
check queue visited =
case PQ.popWithPriority queue of
Just (((flying, steps), pos), queue') ->
(pos `Set.member` arsVisibleForParty ars && hasEnemy pos flying) ||
(if steps >= maxSteps then check queue' visited
else expand flying (steps + 1) pos queue' visited)
Nothing -> False
expand :: Bool -> Int -> Position -> PQ.PriorityQueue (Bool, Int) Position
-> Set.Set Position -> Bool
expand flying steps pos queue visited =
let children = filter (`Set.notMember` visited) $
map (pos `plusDir`) allDirections
enqueueChild child =
let open = arsTerrainOpenness child ars
in if canWalkOn open then PQ.insert (flying, steps) child
else if canFlyOver open then PQ.insert (True, steps) child else id
in check (foldr enqueueChild queue children)
(foldr Set.insert visited children)
hasEnemy :: Position -> Bool -> Bool
hasEnemy pos fly =
case Grid.search pos (arsMonsters ars) of
Just entry ->
let monst = Grid.geValue entry
in not (monstIsAlly monst) && (not fly || monstCanFly monst)
Nothing -> False
initVisited = Set.fromList $ arsPartyPositions ars
initQueue = PQ.fromList $ map ((,) (False, 0)) $ arsPartyPositions ars
maxSteps = 4 :: Int
-- | Lazily compute all positions that can be reached via walking from the
-- given start position (ignoring any creatures that may be in the way). The
-- positions are ordered by distance from the start position, except that as a
-- special case the start position itself comes just after any adjacent
-- positions.
arsAccessiblePositions :: (AreaState a) => [Direction] -> Position -> a
-> [Position]
arsAccessiblePositions dirs startPos ars = generate initQueue initVisited where
generate queue visited =
case PQ.pop queue of
Nothing -> []
Just (pos, queue') -> pos : generate queue'' visited' where
positions = expand visited pos
queue'' = foldr (uncurry PQ.insert) queue' $ map annotate positions
visited' = foldr Set.insert visited positions
annotate pos = (pSqDist startPos pos, pos)
expand visited center = filter ok $ map (plusDir center) dirs where
ok pos = Set.notMember pos visited && isOpen pos
isOpen pos = canWalkOn $ arsTerrainOpenness pos ars
initQueue = let most = map annotate $ expand Set.empty startPos
in PQ.fromList $ if not (isOpen startPos) then most
else (SqDist 3, startPos) : most
initVisited = Set.fromList $ PQ.elems initQueue
-- TODO: deprecated (use arsAccessiblePositions instead)
arsFindOpenSpot :: (AreaState a) => a -> Position -> IRect -> Set.Set Position
-> Position
arsFindOpenSpot ars start within claimed = check Set.empty [start] where
check _ [] = start -- There are no open spots; just give up.
check visited (next : rest) =
let ps = filter (canWalkOn . flip arsTerrainOpenness ars) $
filter (flip Set.notMember visited) $
filter (rectContains within) $ map (next `plusDir`) allDirections
in fromMaybe (check (foldr Set.insert visited ps) (rest ++ ps))
(find (\p -> Set.notMember p claimed &&
not (Grid.occupied p (arsMonsters ars))) ps)
-- | If you shoot a beam spell from the @start@ position, passing through the
-- @thru@ position, what positions does it hit? It will stop when it reaches
-- either an opaque position or the edge of the arena rectangle.
arsBeamPositions :: (AreaState a) => a -> Position {-^start-}
-> Position {-^thru-} -> [Position]
arsBeamPositions ars start thru =
let delta = thru `pSub` start
in if delta == pZero then [start] else
let arena = arsBoundaryRect ars
blocked pos = not (rectContains arena pos) || arsIsOpaque ars pos
takeThru _ [] = []
takeThru p (x : xs) = if p x then [x] else x : takeThru p xs
in takeThru blocked $ drop 1 $ bresenhamPositions start $
until (not . rectContains arena) (pAdd delta) start
arsCharacterJumpDestinations :: (AreaState a) => Int -> CharacterNumber -> a
-> Set.Set Position
arsCharacterJumpDestinations radius charNum ars =
Set.filter (\pos -> not (isBlocked pos || arsOccupied pos ars)) $
fieldOfView (terrainSize $ arsTerrain ars) isBlocked (ofRadius radius)
(arsCharacterPosition charNum ars) Set.empty
where isBlocked pos = cannotWalkOn (arsTerrainOpenness pos ars)
-- | Get the occupant of the given position (either a character or a monster),
-- if any.
arsOccupant :: (AreaState a) => Position -> a
-> Maybe (Either CharacterNumber (Grid.Entry Monster))
arsOccupant pos ars =
case arsCharacterAtPosition pos ars of
Just charNum -> Just (Left charNum)
Nothing -> Right <$> Grid.search pos (arsMonsters ars)
-- | Return 'True' if the given position is occupied (either by a character or
-- a monster), 'False' otherwise.
arsOccupied :: (AreaState a) => Position -> a -> Bool
arsOccupied pos ars = isJust (arsOccupant pos ars)
-- | Return a list of all occupants of the given positions, with no occupant
-- appearing more than once (e.g. in the case that a larger monster occupies
-- multiple positions).
arsOccupants :: (AreaState a) => [Position] -> a
-> [Either CharacterNumber (Grid.Entry Monster)]
arsOccupants ps ars = nubBy ((==) `on` (right Grid.geKey)) $
mapMaybe (flip arsOccupant ars) ps
-- | Return a list of all positions that are occupied by a character or by an
-- ally monster.
arsAllyOccupiedPositions :: (AreaState a) => a -> [Position]
arsAllyOccupiedPositions ars =
(arsPartyPositions ars ++) $ concatMap (prectPositions . Grid.geRect) $
filter (monstIsAlly . Grid.geValue) $ Grid.entries (arsMonsters ars)
-------------------------------------------------------------------------------
-- AreaState setters:
arsSetMessage :: (AreaState a) => String -> a -> a
arsSetMessage text ars =
arsSetCommon ars (arsCommon ars) { acsMessage = Just (makeMessage text) }
-------------------------------------------------------------------------------
data AreaExit = AreaExit
{ aeDestination :: AreaTag,
aeRectKeys :: [RectKey] }
-------------------------------------------------------------------------------
data Device = Device
{ devId :: DeviceId,
devInteract :: Grid.Entry Device -> CharacterNumber ->
Script AreaEffect (),
devRadius :: Int }
-------------------------------------------------------------------------------
decayFields :: Int -> Map.Map Position Field -> IO (Map.Map Position Field)
decayFields frames fields = fmap (Map.mapMaybe id) $ for fields $ \field -> do
let decay halflife = do
let probKeep = 0.5 ** (fromIntegral frames / halflife) :: Double
keep <- (probKeep >) <$> randomRIO (0, 1)
return $ if keep then Just field else Nothing
case field of
BarrierWall duration -> do
return $ if duration <= frames then Nothing
else Just $ BarrierWall (duration - frames)
FireWall _ -> decay 360
IceWall _ -> decay 480
PoisonCloud _ -> decay 300
SmokeScreen halflife -> decay halflife
Webbing _ -> return (Just field)
-------------------------------------------------------------------------------
-- | Effects that only impact the party and can be resolved in a
-- non-mode-specific way.
data PartyEffect :: * -> * where
EffAlterCharacter :: CharacterNumber -> (Character -> Character)
-> PartyEffect ()
-- Change how many coins the party has.
EffAlterCoins :: (Integer -> Integer) -> PartyEffect ()
-- Change what ingredients the party has.
EffAlterIngredients :: (Ingredients -> Ingredients) -> PartyEffect ()
-- Print a debugging string to the console.
EffDebug :: String -> PartyEffect ()
-- Give experience points to the party.
EffGrantExperience :: Int -> PartyEffect ()
-- Add an item to the party inventory.
EffGrantItem :: ItemTag -> PartyEffect ()
-- Change the music.
EffMusicStart :: MusicTag -> PartyEffect ()
-- Immediately stop the currently playing music.
EffMusicStop :: PartyEffect ()
-- Fade out the currently playing music over the given number of seconds.
EffMusicFadeOut :: Double -> PartyEffect ()
-- Play a sound effect.
EffPlaySound :: Sound -> PartyEffect ()
-- Remove all copies of an item from the party.
EffPurgeItem :: ItemTag -> PartyEffect ()
-- Generate a random value in the specified range.
EffRandom :: (Random a) => a -> a -> PartyEffect a
-- Remove the item in the given slot (if any) from the party.
EffRemoveItem :: ItemSlot -> PartyEffect ()
-- Set whether the specified area is cleared.
EffSetAreaCleared :: AreaTag -> Bool -> PartyEffect ()
-- Set the party's level cap.
EffSetLevelCap :: Int -> PartyEffect ()
-- Set the status for a given quest.
EffSetQuestStatus :: QuestTag -> QuestStatus -> PartyEffect ()
-- Change the value of a scenario variable.
EffSetVar :: (VarType a) => Var a -> a -> PartyEffect ()
-- | Effects that can occur in any AreaState.
data AreaCommonEffect :: * -> * where
EffAreaParty :: PartyEffect a -> AreaCommonEffect a
EffAddRemains :: Remains -> Position -> AreaCommonEffect ()
EffAlterDoodads :: (Doodads -> Doodads) -> AreaCommonEffect ()
EffAlterFields :: (Maybe Field -> Maybe Field) -> [Position]
-> AreaCommonEffect ()
EffAreaGet :: (forall s. (AreaState s) => s -> a) -> AreaCommonEffect a
EffMessage :: String -> AreaCommonEffect ()
EffTryAddDevice :: Position -> Device
-> AreaCommonEffect (Maybe (Grid.Entry Device))
EffTryAddMonster :: Position -> Monster
-> AreaCommonEffect (Maybe (Grid.Entry Monster))
EffTryMoveMonster :: Grid.Key Monster -> PRect -> AreaCommonEffect Bool
EffReplaceDevice :: Grid.Key Device -> Maybe Device -> AreaCommonEffect ()
EffReplaceMonster :: Grid.Key Monster -> Maybe Monster -> AreaCommonEffect ()
EffShakeCamera :: Double -> Int -> AreaCommonEffect ()
EffSetTerrain :: [(Position, TerrainTile)] -> AreaCommonEffect ()
-- | Effects that can occur in town mode or combat mode, but that must be
-- handled differently depending on the mode.
data AreaEffect :: * -> * where
EffAreaCommon :: AreaCommonEffect a -> AreaEffect a
EffFork :: Script AreaEffect () -> AreaEffect ()
EffGameOver :: AreaEffect ()
-- TODO: As currently implemented, EffIfCombat breaks concurrency. That is,
-- we stop the world and execute the sub-script, with other "threads" unable
-- to continue until the sub-script finishes, even if the sub-script contains
-- EffWait. Is it possible to make these two play nice together?
EffIfCombat :: Script CombatEffect a -> Script TownEffect a -> AreaEffect a
EffMultiChoice :: String -> [(String, a)] -> Maybe a -> AreaEffect a
EffNarrate :: String -> AreaEffect ()
EffWait :: AreaEffect ()
-- | Effects that can only happen while in combat mode.
data CombatEffect :: * -> * where
EffCombatArea :: AreaEffect a -> CombatEffect a
EffEndCombat :: CombatEffect ()
EffGetCharFaceDir :: CharacterNumber -> CombatEffect FaceDir
EffGetCharMoments :: CharacterNumber -> CombatEffect Int
EffSetCharAnim :: CharacterNumber -> CreatureAnim -> CombatEffect ()
EffSetCharFaceDir :: CharacterNumber -> FaceDir -> CombatEffect ()
EffSetCharMoments :: CharacterNumber -> Int -> CombatEffect ()
EffSetCharPosition :: CharacterNumber -> Position -> CombatEffect ()
-- | Effects that can only happen while in town mode.
data TownEffect :: * -> * where
EffTownArea :: AreaEffect a -> TownEffect a
EffExitTowardArea :: AreaTag -> TownEffect ()
EffGetActiveCharacter :: TownEffect CharacterNumber
EffGetPartyFaceDir :: TownEffect FaceDir
EffGetPartyPosition :: TownEffect Position
EffSetPartyAnim :: CreatureAnim -> TownEffect ()
EffSetPartyFaceDir :: FaceDir -> TownEffect ()
EffSetPartyPosition :: Position -> TownEffect ()
EffShop :: [Either Ingredient ItemTag] -> TownEffect ()
EffStartCombat :: Bool {-can run away-} -> PRect {-arena centered on-}
-> TownEffect ()
EffStartScriptedBattle :: BattleId -> TownEffect ()
EffTeleportToMark :: AreaTag -> MarkKey -> TownEffect ()
EffTeleportToPosition :: AreaTag -> Position -> TownEffect ()
-------------------------------------------------------------------------------
executePartyEffect :: PartyEffect a -> Party -> IO (a, Party)
executePartyEffect eff party =
case eff of
EffAlterCharacter charNum fn ->
return ((), partyAlterCharacter charNum fn party)
EffAlterCoins fn ->
return ((), party { partyCoins = max 0 $ fn $ partyCoins party })
EffAlterIngredients fn -> return ((), partyAlterIngredients fn party)
EffDebug string -> ((), party) <$ putStrLn string
EffGrantExperience xp -> return ((), partyGrantExperience xp party)
EffGrantItem tag -> return ((), partyGrantItem tag party)
EffMusicStart tag -> ((), party) <$ loopMusic (musicPath tag)
EffMusicStop -> ((), party) <$ stopMusic
EffMusicFadeOut seconds -> ((), party) <$ fadeOutMusic seconds
EffPlaySound sound -> ((), party) <$ playSound sound
EffPurgeItem tag -> return ((), partyPurgeItem tag party)
EffRandom lo hi -> do
value <- randomRIO (lo, hi)
return (value, party)
EffRemoveItem slot -> return ((), partyRemoveItem slot party)
EffSetAreaCleared tag clear -> do
let cleared' = (if clear then Set.insert tag else Set.delete tag)
(partyClearedAreas party)
return ((), party { partyClearedAreas = cleared' })
EffSetLevelCap cap -> do
return ((), party { partyLevelCap = max 1 $ min maxPartyLevel cap })
EffSetQuestStatus tag qs -> do
return ((), party { partyQuests = SM.set tag qs (partyQuests party) })
EffSetVar var value -> do
let progress' = progressSetVar var value $ partyProgress party
return ((), party { partyProgress = progress' })
executeAreaCommonEffect :: forall a s. (AreaState s) => AreaCommonEffect a -> s
-> IO (a, s)
executeAreaCommonEffect eff ars = do
let acs = arsCommon ars
case eff of
EffAreaParty partyEff -> do
(result, party') <- executePartyEffect partyEff (arsParty ars)
return (result, set acs { acsParty = party' })
EffAddRemains remains pos -> return ((), set acs { acsRemains =
SM.adjust (appendRemains remains) pos (acsRemains acs) })
EffAlterDoodads fn -> change acs { acsDoodads = fn (acsDoodads acs) }
EffAlterFields fn ps -> do
let fields' = foldr (Map.alter fn) (acsFields acs) ps
ars' <- arsUpdateVisibility $ set acs { acsFields = fields' }
return ((), ars')
EffAreaGet fn -> return (fn ars, ars)
EffMessage text -> change acs { acsMessage = Just (makeMessage text) }
EffTryAddDevice pos device -> do
case Grid.tryInsert (makeRect pos (1, 1)) device (acsDevices acs) of
Nothing -> return (Nothing, ars)
Just (entry, devices') ->
return (Just entry, set acs { acsDevices = devices' })
EffTryAddMonster topleft monster -> do
case Grid.tryInsert (makeRect topleft $ monstRectSize monster) monster
(acsMonsters acs) of
Nothing -> return (Nothing, ars)
Just (entry, monsters') ->
return (Just entry, set acs { acsMonsters = monsters' })
EffTryMoveMonster monstKey rect -> do
case Grid.tryMove monstKey rect (acsMonsters acs) of
Nothing -> return (False, ars)
Just grid' -> return (True, set acs { acsMonsters = grid' })
EffReplaceDevice key mbDevice' -> do
change acs { acsDevices = maybe (Grid.delete key) (Grid.replace key)
mbDevice' (acsDevices acs) }
EffReplaceMonster key mbMonst' -> do
change acs { acsMonsters = maybe (Grid.delete key) (Grid.replace key)
mbMonst' (acsMonsters acs) }
EffSetTerrain updates -> do
let terrain' = foldr (uncurry terrainSetTile) (acsTerrain acs) updates
ars' <- arsUpdateVisibility $ set acs { acsTerrain = terrain' }
return ((), ars')
EffShakeCamera ampl duration -> do
change acs { acsCamera = setCameraShake ampl duration (acsCamera acs) }
where
set :: AreaCommonState -> s
set acs' = arsSetCommon ars acs'
change :: AreaCommonState -> IO ((), s)
change acs' = return ((), set acs')
-------------------------------------------------------------------------------
|
mdsteele/fallback
|
src/Fallback/State/Area.hs
|
gpl-3.0
| 26,013 | 0 | 20 | 5,825 | 6,607 | 3,442 | 3,165 | 397 | 18 |
module Test where
main = print "Test"
|
Jiggins/Matrix
|
Math/Test.hs
|
gpl-3.0
| 39 | 0 | 5 | 8 | 12 | 7 | 5 | 2 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudBilling.BillingAccounts.Projects.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the projects associated with a billing account. The current
-- authenticated user must have the \`billing.resourceAssociations.list\`
-- IAM permission, which is often given to billing account
-- [viewers](https:\/\/cloud.google.com\/billing\/docs\/how-to\/billing-access).
--
-- /See:/ <https://cloud.google.com/billing/ Cloud Billing API Reference> for @cloudbilling.billingAccounts.projects.list@.
module Network.Google.Resource.CloudBilling.BillingAccounts.Projects.List
(
-- * REST Resource
BillingAccountsProjectsListResource
-- * Creating a Request
, billingAccountsProjectsList
, BillingAccountsProjectsList
-- * Request Lenses
, baplXgafv
, baplUploadProtocol
, baplAccessToken
, baplUploadType
, baplName
, baplPageToken
, baplPageSize
, baplCallback
) where
import Network.Google.Billing.Types
import Network.Google.Prelude
-- | A resource alias for @cloudbilling.billingAccounts.projects.list@ method which the
-- 'BillingAccountsProjectsList' request conforms to.
type BillingAccountsProjectsListResource =
"v1" :>
Capture "name" Text :>
"projects" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListProjectBillingInfoResponse
-- | Lists the projects associated with a billing account. The current
-- authenticated user must have the \`billing.resourceAssociations.list\`
-- IAM permission, which is often given to billing account
-- [viewers](https:\/\/cloud.google.com\/billing\/docs\/how-to\/billing-access).
--
-- /See:/ 'billingAccountsProjectsList' smart constructor.
data BillingAccountsProjectsList =
BillingAccountsProjectsList'
{ _baplXgafv :: !(Maybe Xgafv)
, _baplUploadProtocol :: !(Maybe Text)
, _baplAccessToken :: !(Maybe Text)
, _baplUploadType :: !(Maybe Text)
, _baplName :: !Text
, _baplPageToken :: !(Maybe Text)
, _baplPageSize :: !(Maybe (Textual Int32))
, _baplCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'BillingAccountsProjectsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'baplXgafv'
--
-- * 'baplUploadProtocol'
--
-- * 'baplAccessToken'
--
-- * 'baplUploadType'
--
-- * 'baplName'
--
-- * 'baplPageToken'
--
-- * 'baplPageSize'
--
-- * 'baplCallback'
billingAccountsProjectsList
:: Text -- ^ 'baplName'
-> BillingAccountsProjectsList
billingAccountsProjectsList pBaplName_ =
BillingAccountsProjectsList'
{ _baplXgafv = Nothing
, _baplUploadProtocol = Nothing
, _baplAccessToken = Nothing
, _baplUploadType = Nothing
, _baplName = pBaplName_
, _baplPageToken = Nothing
, _baplPageSize = Nothing
, _baplCallback = Nothing
}
-- | V1 error format.
baplXgafv :: Lens' BillingAccountsProjectsList (Maybe Xgafv)
baplXgafv
= lens _baplXgafv (\ s a -> s{_baplXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
baplUploadProtocol :: Lens' BillingAccountsProjectsList (Maybe Text)
baplUploadProtocol
= lens _baplUploadProtocol
(\ s a -> s{_baplUploadProtocol = a})
-- | OAuth access token.
baplAccessToken :: Lens' BillingAccountsProjectsList (Maybe Text)
baplAccessToken
= lens _baplAccessToken
(\ s a -> s{_baplAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
baplUploadType :: Lens' BillingAccountsProjectsList (Maybe Text)
baplUploadType
= lens _baplUploadType
(\ s a -> s{_baplUploadType = a})
-- | Required. The resource name of the billing account associated with the
-- projects that you want to list. For example,
-- \`billingAccounts\/012345-567890-ABCDEF\`.
baplName :: Lens' BillingAccountsProjectsList Text
baplName = lens _baplName (\ s a -> s{_baplName = a})
-- | A token identifying a page of results to be returned. This should be a
-- \`next_page_token\` value returned from a previous
-- \`ListProjectBillingInfo\` call. If unspecified, the first page of
-- results is returned.
baplPageToken :: Lens' BillingAccountsProjectsList (Maybe Text)
baplPageToken
= lens _baplPageToken
(\ s a -> s{_baplPageToken = a})
-- | Requested page size. The maximum page size is 100; this is also the
-- default.
baplPageSize :: Lens' BillingAccountsProjectsList (Maybe Int32)
baplPageSize
= lens _baplPageSize (\ s a -> s{_baplPageSize = a})
. mapping _Coerce
-- | JSONP
baplCallback :: Lens' BillingAccountsProjectsList (Maybe Text)
baplCallback
= lens _baplCallback (\ s a -> s{_baplCallback = a})
instance GoogleRequest BillingAccountsProjectsList
where
type Rs BillingAccountsProjectsList =
ListProjectBillingInfoResponse
type Scopes BillingAccountsProjectsList =
'["https://www.googleapis.com/auth/cloud-billing",
"https://www.googleapis.com/auth/cloud-billing.readonly",
"https://www.googleapis.com/auth/cloud-platform"]
requestClient BillingAccountsProjectsList'{..}
= go _baplName _baplXgafv _baplUploadProtocol
_baplAccessToken
_baplUploadType
_baplPageToken
_baplPageSize
_baplCallback
(Just AltJSON)
billingService
where go
= buildClient
(Proxy :: Proxy BillingAccountsProjectsListResource)
mempty
|
brendanhay/gogol
|
gogol-billing/gen/Network/Google/Resource/CloudBilling/BillingAccounts/Projects/List.hs
|
mpl-2.0
| 6,655 | 0 | 18 | 1,442 | 896 | 523 | 373 | 128 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.Subscriptions.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a subscription.
--
-- /See:/ <https://developers.google.com/youtube/v3 YouTube Data API Reference> for @youtube.subscriptions.delete@.
module Network.Google.Resource.YouTube.Subscriptions.Delete
(
-- * REST Resource
SubscriptionsDeleteResource
-- * Creating a Request
, subscriptionsDelete
, SubscriptionsDelete
-- * Request Lenses
, sdId
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.subscriptions.delete@ method which the
-- 'SubscriptionsDelete' request conforms to.
type SubscriptionsDeleteResource =
"youtube" :>
"v3" :>
"subscriptions" :>
QueryParam "id" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Deletes a subscription.
--
-- /See:/ 'subscriptionsDelete' smart constructor.
newtype SubscriptionsDelete = SubscriptionsDelete'
{ _sdId :: Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SubscriptionsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sdId'
subscriptionsDelete
:: Text -- ^ 'sdId'
-> SubscriptionsDelete
subscriptionsDelete pSdId_ =
SubscriptionsDelete'
{ _sdId = pSdId_
}
-- | The id parameter specifies the YouTube subscription ID for the resource
-- that is being deleted. In a subscription resource, the id property
-- specifies the YouTube subscription ID.
sdId :: Lens' SubscriptionsDelete Text
sdId = lens _sdId (\ s a -> s{_sdId = a})
instance GoogleRequest SubscriptionsDelete where
type Rs SubscriptionsDelete = ()
type Scopes SubscriptionsDelete =
'["https://www.googleapis.com/auth/youtube",
"https://www.googleapis.com/auth/youtube.force-ssl",
"https://www.googleapis.com/auth/youtubepartner"]
requestClient SubscriptionsDelete'{..}
= go (Just _sdId) (Just AltJSON) youTubeService
where go
= buildClient
(Proxy :: Proxy SubscriptionsDeleteResource)
mempty
|
rueshyna/gogol
|
gogol-youtube/gen/Network/Google/Resource/YouTube/Subscriptions/Delete.hs
|
mpl-2.0
| 2,958 | 0 | 12 | 661 | 317 | 195 | 122 | 49 | 1 |
--
-- Copyright 2017-2018 Azad Bolour
-- Licensed under GNU Affero General Public License v3.0 -
-- https://github.com/azadbolour/boardgame/blob/master/LICENSE.md
--
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE DisambiguateRecordFields #-}
module Bolour.Language.Domain.WordDictionarySpec where
import Test.Hspec
import Data.List
import qualified Data.Set as Set
import Control.Monad.Except (runExceptT)
import qualified Data.ByteString.Lazy.Char8 as BS
import Bolour.Language.Domain.WordDictionary (
WordDictionary
, WordDictionary(WordDictionary)
)
import qualified Bolour.Language.Domain.WordDictionary as Dict
import qualified Bolour.Language.Domain.DictionaryIO as DictIO
spec :: Spec
spec = do
describe "reading masked words" $ do
it "can read masked words" $ do
Right maskedWords <- runExceptT $ DictIO.readMaskedWordsFile "data" "test"
-- Right maskedWords <- runExceptT $ DictIO.readMaskedWordsFile "dict" "en"
let folder acc elem = acc + 1
number = foldl' folder 0 maskedWords
print $ show number
let set = Set.fromList maskedWords
((BS.pack "AC CUL R") `Set.member` set) `shouldBe` True
describe "test reading dictionary" $
it "read english dictionary" $ do
Right dictionary <- runExceptT $ DictIO.readDictionary "test" "data" 2
Dict.isWord dictionary "ABATEMENT" `shouldBe` True
describe "test finding words and permutations in dictionary" $ do
let myWords = ["GLASS", "TABLE", "SCREEN", "NO", "ON"]
maskedWords = Set.toList $ Dict.mkMaskedWords myWords 2
dictionary = Dict.mkDictionary "en" myWords maskedWords 2
it "check existing word" $ do
Dict.isWord dictionary "GLASS" `shouldBe` True
it "check non-existent word" $ do
Dict.isWord dictionary "GLAS" `shouldBe` False
it "find existing word permutation" $ do
Dict.getWordPermutations dictionary "ABELT" `shouldBe` ["TABLE"]
it "no word permutations" $ do
Dict.getWordPermutations dictionary "ABEL" `shouldBe` []
it "2 word permutations" $ do
(sort $ Dict.getWordPermutations dictionary "NO") `shouldBe` ["NO", "ON"]
|
azadbolour/boardgame
|
haskell-server/test/Bolour/Language/Domain/WordDictionarySpec.hs
|
agpl-3.0
| 2,147 | 0 | 18 | 412 | 499 | 264 | 235 | 41 | 1 |
module Main where
import HC
import Lib
main :: IO ()
main = do
top
hctop
initAndRunProgram
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/topic/monads/2017-06-snoyman-readert-design-pattern/app/Main.hs
|
unlicense
| 100 | 0 | 6 | 26 | 34 | 18 | 16 | 8 | 1 |
{- Copyright 2014-2015 David Farrell <[email protected]>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Arata.DB where
import IRC.RFC1459 (toLower)
import Data.SafeCopy
import Data.IxSet
import Data.Acid
import Control.Applicative ((<$>))
import Control.Monad.IO.Class (liftIO)
import Control.Monad.State (gets, modify)
import Control.Monad.Reader (ask)
import Dated
import Arata.Types
queryAccounts :: Query DBState (IxSet Account)
queryAccounts = accounts <$> ask
queryAccountsById :: Integer -> Query DBState (IxSet Account)
queryAccountsById x = (@= x) . accounts <$> ask
queryAccountsByNick :: String -> Query DBState (IxSet Account)
queryAccountsByNick x = (@= map toLower x) . accounts <$> ask
queryAccountsByName :: String -> Query DBState (IxSet Account)
queryAccountsByName x = (@= map toLower x) . accounts <$> ask
addAccount :: Account -> Update DBState ()
addAccount acc = modify (\s -> s { accounts = insert acc (accounts s) })
updateAccount :: Account -> Update DBState ()
updateAccount acc = modify (\s -> s { accounts = updateIx (accId acc) acc (accounts s) })
queryDB q = do
as <- gets acidState
liftIO (query as q)
updateDB u = do
as <- gets acidState
liftIO (update as u)
$(deriveSafeCopy 0 'base ''Dated)
$(deriveSafeCopy 0 'base ''AuthMethod)
$(deriveSafeCopy 0 'base ''Account)
$(deriveSafeCopy 0 'base ''DBState)
$(makeAcidic ''DBState ['queryAccounts, 'queryAccountsById, 'queryAccountsByNick, 'queryAccountsByName, 'addAccount, 'updateAccount])
|
shockkolate/arata
|
src/Arata/DB.hs
|
apache-2.0
| 2,150 | 0 | 12 | 340 | 534 | 285 | 249 | 38 | 1 |
-- Copyright (c) 2016 Fernando Rincon
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Erp
( module Erp.Commons
, module Erp.World
)
where
import Erp.Commons
import Erp.World
|
frincon/heventsourced
|
erp/src/Erp.hs
|
apache-2.0
| 701 | 0 | 5 | 124 | 41 | 31 | 10 | 5 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
module HERMIT.Dictionary.AlphaConversion
( -- * Alpha-Renaming and Shadowing
externals
-- ** Alpha-Renaming
, alphaR
, alphaLamR
, alphaCaseBinderR
, alphaAltWithR
, alphaAltVarsR
, alphaAltR
, alphaCaseR
, alphaLetWithR
, alphaLetVarsR
, alphaLetR
, alphaProgConsWithR
, alphaProgConsIdsR
, alphaProgConsR
, alphaProgR
-- ** Shadow Detection and Unshadowing
, unshadowR
, unshadowExprR
, unshadowAltR
, unshadowProgR
, visibleVarsT
, cloneVarAvoidingT
, freshNameGenAvoiding
, detectShadowsM
, replaceVarR
) where
import Control.Arrow
import Control.Monad (liftM, liftM2)
import Data.Char (isDigit)
import Data.Function (on)
import Data.List (intersect, delete, elemIndex)
import Data.Maybe (listToMaybe)
import HERMIT.Core
import HERMIT.Context
import HERMIT.Kure
import HERMIT.External
import HERMIT.GHC
import HERMIT.Name
import HERMIT.Utilities(dupsBy)
import HERMIT.Dictionary.GHC hiding (externals)
import HERMIT.Dictionary.Common
import Prelude hiding (exp)
-----------------------------------------------------------------------
-- | Externals for alpha-renaming.
externals :: [External]
externals = map (.+ Deep)
[ external "alpha" (promoteCoreR alphaR :: RewriteH LCore)
[ "Renames the bound variables at the current node."]
, external "alpha-lam" (promoteExprR . alphaLamR . Just :: String -> RewriteH LCore)
[ "Renames the bound variable in a Lambda expression to the given name."]
, external "alpha-lam" (promoteExprR (alphaLamR Nothing) :: RewriteH LCore)
[ "Renames the bound variable in a Lambda expression."]
, external "alpha-case-binder" (promoteExprR . alphaCaseBinderR . Just :: String -> RewriteH LCore)
[ "Renames the binder in a Case expression to the given name."]
, external "alpha-case-binder" (promoteExprR (alphaCaseBinderR Nothing) :: RewriteH LCore)
[ "Renames the binder in a Case expression."]
, external "alpha-alt" (promoteAltR alphaAltR :: RewriteH LCore)
[ "Renames all binders in a Case alternative."]
, external "alpha-alt" (promoteAltR . alphaAltWithR :: [String] -> RewriteH LCore)
[ "Renames all binders in a Case alternative using the user-provided list of new names."]
, external "alpha-case" (promoteExprR alphaCaseR :: RewriteH LCore)
[ "Renames all binders in a Case alternative."]
, external "alpha-let" (promoteExprR . alphaLetWithR :: [String] -> RewriteH LCore)
[ "Renames the bound variables in a Let expression using a list of suggested names."]
, external "alpha-let" (promoteExprR alphaLetR :: RewriteH LCore)
[ "Renames the bound variables in a Let expression."]
, external "alpha-top" (promoteProgR . alphaProgConsWithR :: [String] -> RewriteH LCore)
[ "Renames the bound identifiers in the top-level binding group at the head of the program using a list of suggested names."]
, external "alpha-top" (promoteProgR alphaProgConsR :: RewriteH LCore)
[ "Renames the bound identifiers in the top-level binding at the head of the program."]
, external "alpha-prog" (promoteProgR alphaProgR :: RewriteH LCore)
[ "Rename all top-level identifiers in the program."]
, external "unshadow" (promoteCoreR unshadowR :: RewriteH LCore)
[ "Rename local variables with manifestly unique names (x, x0, x1, ...)."]
]
-----------------------------------------------------------------------
--
-- freshNameGen is a function used in conjunction with cloneVarH, which clones an existing 'Var'.
-- But, what name should the new Id have?
-- cloneVarH generates a new Unique -- so we are positive that the new Var will be new,
-- but freshNameGen tries to assign a Name that will be meaningful to the user, and
-- not shadow other names in scope.
-- So, we start with the name of the original Id, and add an integer suffix
-- x goes to x0 or x1 or ...
-- and we do not want this newly generated name to shadow either:
-- 1. Any free variable name in the active Expr; or
-- 2. Any bound variables in context.
-- | Collect all visible variables (in the expression or the context).
visibleVarsT :: (BoundVars c, Monad m) => Transform c m CoreTC VarSet
visibleVarsT = -- TODO: implement freeVarsLCoreTC
liftM2 unionVarSet boundVarsT (promoteT $ arr freeVarsCoreTC)
-- | If a name is provided, use that as the name of the new variable.
-- Otherwise modify the variable name making sure to /not/ clash with the given variables or any visible variables.
cloneVarAvoidingT :: (BoundVars c, MonadUnique m) => Var -> Maybe String -> [Var] -> Transform c m CoreTC Var
cloneVarAvoidingT v mn vs =
do vvs <- visibleVarsT
let nameModifier = freshNameGenAvoiding mn (extendVarSetList vvs vs)
constT (cloneVarH nameModifier v)
-- | Use the optional argument if given, otherwise generate a new name avoiding clashes with the set of variables.
freshNameGenAvoiding :: Maybe String -> VarSet -> (String -> String)
freshNameGenAvoiding mn vs str = maybe (inventNames vs str) ((\(c:cs) -> reverse (c:(takeWhile (/='.') cs))) . reverse) mn
-- The 'Just' case above gives the unqualified portion of the name (properly handling the compose operator '.')
-- | Invent a new String based on the old one, but avoiding clashing with the given list of identifiers.
inventNames :: VarSet -> String -> String
inventNames curr old = head
[ nm
| nm <- old : [ base ++ show uq | uq <- [start ..] :: [Int] ]
, nm `notElem` names
]
where
names = varSetToStrings curr
nums = reverse $ takeWhile isDigit (reverse old)
baseLeng = length $ drop (length nums) old
base = take baseLeng old
start = case reads nums of
[(v,_)] -> v + 1
_ -> 0
-- | Discard variables from the first set that do not shadow a variable in the second set.
shadowedBy :: VarSet -> VarSet -> VarSet
shadowedBy vs fvs = let fvNames = varSetToStrings fvs
in filterVarSet (\ v -> unqualifiedName v `elem` fvNames) vs
-- | Shadows are any duplicates in the list, or any occurrences of the list elements in the set.
detectShadowsM :: Monad m => [Var] -> VarSet -> m VarSet
detectShadowsM bs fvs = let ss = shadowedBy (mkVarSet bs) fvs `extendVarSetList` dupVars bs
in do guardMsg (not $ isEmptyVarSet ss) "No shadows detected."
return ss
-- | Rename local variables with manifestly unique names (x, x0, x1, ...).
-- Does not rename top-level definitions.
unshadowR :: ( AddBindings c, BoundVars c, ExtendPath c Crumb, HasEmptyContext c
, ReadPath c Crumb, MonadCatch m, MonadUnique m )
=> Rewrite c m Core
unshadowR = setFailMsg "No shadows to eliminate." $
anytdR (promoteExprR unshadowExprR <+ promoteAltR unshadowAltR <+ promoteProgR unshadowProgR)
unshadowExprR :: (AddBindings c, BoundVars c, ExtendPath c Crumb, ReadPath c Crumb, MonadCatch m, MonadUnique m)
=> Rewrite c m CoreExpr
unshadowExprR = do
bs <- letVarsT <+ (liftM return (caseBinderIdT <+ lamVarT))
fvs <- liftM2 unionVarSet boundVarsT (arr freeVarsExpr)
ss <- detectShadowsM bs fvs
alphaLamR Nothing <+ alphaLetVarsR (varSetElems ss) <+ alphaCaseBinderR Nothing
unshadowAltR :: (AddBindings c, BoundVars c, ExtendPath c Crumb, ReadPath c Crumb, MonadCatch m, MonadUnique m)
=> Rewrite c m CoreAlt
unshadowAltR = do
bs <- arr altVars
fvs <- liftM2 unionVarSet boundVarsT (arr freeVarsAlt)
ss <- detectShadowsM bs fvs
alphaAltVarsR (varSetElems ss)
unshadowProgR :: (AddBindings c, BoundVars c, ExtendPath c Crumb, ReadPath c Crumb, MonadCatch m, MonadUnique m)
=> Rewrite c m CoreProg
unshadowProgR = do
bs <- progConsIdsT
fvs <- liftM2 unionVarSet boundVarsT (arr freeVarsProg)
ss <- detectShadowsM bs fvs
alphaProgConsIdsR (varSetElems ss)
dupVars :: [Var] -> [Var]
dupVars = dupsBy ((==) `on` unqualifiedName)
-----------------------------------------------------------------------
-- Maybe this should be defined in Dictionary.GHC.
-- | Replace all occurrences of a specified variable.
-- Arguments are the variable to replace and the replacement variable, respectively.
replaceVarR :: (Injection a Core, MonadCatch m) => Var -> Var -> Rewrite c m a
replaceVarR v v' = extractR $ tryR $ substR v $ varToCoreExpr v'
-- TODO: Experimental
replaceRecBindVarR :: Monad m => Id -> Id -> Rewrite c m CoreBind
replaceRecBindVarR v v' =
do Rec ies <- idR
let (is,es) = unzip ies
case elemIndex v is of
Nothing -> fail "Specified identifier does not occur in the current recursive binding group."
Just n -> let is0 = delete v is
(is1,is2) = splitAt n is0
is' = is1 ++ v' : is2
es' = map (substCoreExpr v (Var v')) es
-- TODO. Do we need to initialize the emptySubst with bindFreeVars?
sub = extendSubst emptySubst v (Var v')
in return $ snd $ substBind sub (Rec (zip is' es'))
-- let is0 = delete v is
-- emptySub = mkEmptySubst $ mkInScopeSet $ unionVarSets (map (localFreeVarsExpr . Var) is0)
-- sub = extendSubst emptySub v (Var v')
-- (is1,is2) = splitAt n (snd $ substRecBndrs sub is0)
-- is' = is1 ++ v' : is2
-- es' = map (substCoreExpr v (Var v')) es
-- in return $ Rec (zip is' es')
-----------------------------------------------------------------------
-- | Alpha rename a lambda binder. Optionally takes a suggested new name.
alphaLamR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Maybe String -> Rewrite c m CoreExpr
alphaLamR mn = setFailMsg (wrongFormForAlpha "Lam v e") $
do v <- lamVarT
v' <- extractT (cloneVarAvoidingT v mn [v])
lamAnyR (return v') (replaceVarR v v')
-----------------------------------------------------------------------
-- | Alpha rename a case binder. Optionally takes a suggested new name.
alphaCaseBinderR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Maybe String -> Rewrite c m CoreExpr
alphaCaseBinderR mn = setFailMsg (wrongFormForAlpha "Case e i ty alts") $
do i <- caseBinderIdT
i' <- extractT (cloneVarAvoidingT i mn [i])
caseAnyR idR (return i') idR (\ _ -> replaceVarR i i')
-----------------------------------------------------------------------
-- | Rename the specified variable in a case alternative. Optionally takes a suggested new name.
alphaAltVarR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Maybe String -> Var -> Rewrite c m CoreAlt
alphaAltVarR mn v = do
(con, vs, rhs) <- idR
v' <- extractT (cloneVarAvoidingT v mn vs)
-- This is a bit of a hack. We include all the binders *after* v in the call to substAltR,
-- then put the binders before v, and v', back on the front. The use of substAltR this way,
-- handles the case where v is a type binder which substitutes into the types of bs'.
-- It's a hack because we depend on substAltR not noticing that the constructor is not applied
-- to enough binders.
case break (==v) vs of
(bs,_:bs') -> let (con',bs'',rhs') = substCoreAlt v (varToCoreExpr v') (con,bs',rhs)
in return (con',bs ++ (v':bs''),rhs')
_ -> fail "pattern binder not present."
-- | Rename the specified variables in a case alternative, using the suggested names where provided.
-- Suggested names *must* be provided in left-to-right order matching the order of the alt binders.
alphaAltVarsWithR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> [(Maybe String,Var)] -> Rewrite c m CoreAlt
alphaAltVarsWithR = andR . map (uncurry alphaAltVarR) . reverse -- note: right-to-left so type subst aren't undone
-- | Rename the variables bound in a case alternative with the given list of suggested names.
alphaAltWithR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> [String] -> Rewrite c m CoreAlt
alphaAltWithR ns =
do vs <- arr altVars
alphaAltVarsWithR $ zip (map Just ns) vs
-- | Rename the specified variables in a case alternative.
alphaAltVarsR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> [Var] -> Rewrite c m CoreAlt
alphaAltVarsR vs =
do bs <- arr altVars
alphaAltVarsWithR (zip (repeat Nothing) (bs `intersect` vs))
-- | Rename all identifiers bound in a case alternative.
alphaAltR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Rewrite c m CoreAlt
alphaAltR = arr altVars >>= alphaAltVarsR
-----------------------------------------------------------------------
-- | Rename all identifiers bound in a case expression.
alphaCaseR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Rewrite c m CoreExpr
alphaCaseR = alphaCaseBinderR Nothing >+> caseAllR idR idR idR (const alphaAltR)
-----------------------------------------------------------------------
-- | Alpha rename a non-recursive let binder. Optionally takes a suggested new name.
alphaLetNonRecR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Maybe String -> Rewrite c m CoreExpr
alphaLetNonRecR mn = setFailMsg (wrongFormForAlpha "Let (NonRec v e1) e2") $
do v <- letNonRecVarT
v' <- extractT (cloneVarAvoidingT v mn [v])
letNonRecAnyR (return v') idR (replaceVarR v v')
-- | Alpha rename a non-recursive let binder if the variable appears in the argument list. Optionally takes a suggested new name.
alphaLetNonRecVarsR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Maybe String -> [Var] -> Rewrite c m CoreExpr
alphaLetNonRecVarsR mn vs = whenM (liftM (`elem` vs) letNonRecVarT) (alphaLetNonRecR mn)
-- TODO: Maybe it would be more efficient to rename all the Ids at once, rather than one by one?
-- | Rename the specified identifiers in a recursive let, using the suggested names where provided.
alphaLetRecIdsWithR :: forall c m. ( ExtendPath c Crumb, ReadPath c Crumb, AddBindings c
, BoundVars c, MonadCatch m, MonadUnique m )
=> [(Maybe String,Id)] -> Rewrite c m CoreExpr
alphaLetRecIdsWithR = andR . map (uncurry alphaLetRecIdR)
where
-- | Rename the specified identifier bound in a recursive let. Optionally takes a suggested new name.
alphaLetRecIdR :: Maybe String -> Id -> Rewrite c m CoreExpr
alphaLetRecIdR mn i = setFailMsg (wrongFormForAlpha "Let (Rec bs) e") $
do is <- letRecIdsT
i' <- extractT (cloneVarAvoidingT i mn is)
letAnyR (replaceRecBindVarR i i') (replaceVarR i i')
-- | Rename the identifiers bound in a Let with the given list of suggested names.
alphaLetWithR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> [String] -> Rewrite c m CoreExpr
alphaLetWithR ns = alphaLetNonRecR (listToMaybe ns)
<+ (letRecIdsT >>= (alphaLetRecIdsWithR . zip (map Just ns)))
-- | Rename the specified variables bound in a let.
alphaLetVarsR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> [Var] -> Rewrite c m CoreExpr
alphaLetVarsR vs = alphaLetNonRecVarsR Nothing vs
<+ (do bs <- letT (arr bindVars) successT const
alphaLetRecIdsWithR (zip (repeat Nothing) (bs `intersect` vs))
)
-- | Rename all identifiers bound in a Let.
alphaLetR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Rewrite c m CoreExpr
alphaLetR = letVarsT >>= alphaLetVarsR
-----------------------------------------------------------------------
-- | Alpha rename a non-recursive top-level binder. Optionally takes a suggested new name.
alphaProgConsNonRecR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Maybe String -> Rewrite c m CoreProg
alphaProgConsNonRecR mn = setFailMsg (wrongFormForAlpha "ProgCons (NonRec v e) p") $
do i <- progConsNonRecIdT
guardMsg (not $ isExportedId i) ("Identifier " ++ unqualifiedName i ++ " is exported, and thus cannot be alpha-renamed.")
i' <- extractT (cloneVarAvoidingT i mn [i])
consNonRecAnyR (return i') idR (replaceVarR i i')
-- | Alpha rename a non-recursive top-level binder if the identifier appears in the argument list. Optionally takes a suggested new name.
alphaProgConsNonRecIdsR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Maybe String -> [Id] -> Rewrite c m CoreProg
alphaProgConsNonRecIdsR mn is = whenM (liftM (`elem` is) progConsNonRecIdT) (alphaProgConsNonRecR mn)
-- TODO: Maybe it would be more efficient to rename all the Ids at once, rather than one by one?
-- | Rename the specified identifiers in a recursive top-level binding at the head of a program, using the suggested names where provided.
alphaProgConsRecIdsWithR :: forall c m. ( ExtendPath c Crumb, ReadPath c Crumb, AddBindings c
, BoundVars c, MonadCatch m, MonadUnique m )
=> [(Maybe String,Id)] -> Rewrite c m CoreProg
alphaProgConsRecIdsWithR = andR . map (uncurry alphaProgConsRecIdR) . filter (not . isExportedId . snd)
where
-- | Rename the specified identifier bound in a recursive top-level binder. Optionally takes a suggested new name.
alphaProgConsRecIdR :: Maybe String -> Id -> Rewrite c m CoreProg
alphaProgConsRecIdR mn i = setFailMsg (wrongFormForAlpha "ProgCons (Rec bs) p") $
do is <- progConsRecIdsT
i' <- extractT (cloneVarAvoidingT i mn is)
progConsAnyR (replaceRecBindVarR i i') (replaceVarR i i')
-- | Rename the identifiers bound in the top-level binding at the head of the program with the given list of suggested names.
alphaProgConsWithR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> [String] -> Rewrite c m CoreProg
alphaProgConsWithR ns = alphaProgConsNonRecR (listToMaybe ns)
<+ (progConsRecIdsT >>= (alphaProgConsRecIdsWithR . zip (map Just ns)))
-- | Rename the specified variables bound in the top-level binding at the head of the program.
alphaProgConsIdsR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> [Id] -> Rewrite c m CoreProg
alphaProgConsIdsR vs = alphaProgConsNonRecIdsR Nothing vs
<+ (do bs <- progConsT (arr bindVars) successT const
alphaProgConsRecIdsWithR (zip (repeat Nothing) (bs `intersect` vs))
)
-- | Rename all identifiers bound in the top-level binding at the head of the program.
alphaProgConsR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Rewrite c m CoreProg
alphaProgConsR = progConsIdsT >>= alphaProgConsIdsR
-----------------------------------------------------------------------
-- | Rename all identifiers bound at the top-level.
alphaProgR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Rewrite c m CoreProg
alphaProgR = alphaProgConsR >+> progConsAllR idR alphaProgR
-----------------------------------------------------------------------
-- | Alpha rename any bindings at this node. Note: does not rename case alternatives unless invoked on the alternative.
alphaR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, BoundVars c, MonadCatch m, MonadUnique m)
=> Rewrite c m Core
alphaR = setFailMsg "Cannot alpha-rename here." $
promoteExprR (alphaLamR Nothing <+ alphaCaseBinderR Nothing <+ alphaLetR)
<+ promoteAltR alphaAltR
<+ promoteProgR alphaProgConsR
-- TODO: Alpha rewrites need better error messages. Currently the use of (<+) leads to incorrect error reporting.
-- Though really, we first need to improve KURE to have a version of (<+) that maintains the existing error message in the case of non-matching constructors henceforth.
-- TODO 2: Also, we should be able to rename inside types and coercions.
-- TODO 3: Also, we should be able to rename lemma quantifiers
-----------------------------------------------------------------------
wrongFormForAlpha :: String -> String
wrongFormForAlpha s = "Cannot alpha-rename, " ++ wrongExprForm s
-----------------------------------------------------------------------
|
beni55/hermit
|
src/HERMIT/Dictionary/AlphaConversion.hs
|
bsd-2-clause
| 22,110 | 0 | 18 | 5,493 | 4,745 | 2,454 | 2,291 | 257 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Test.Nero.Match where
import Test.SmallCheck.Series.Instances ()
import Test.Tasty (TestTree, testGroup)
import qualified Test.Tasty.Lens.Prism as Prism
import Nero.Match
import Nero.Prelude
tests :: TestTree
tests = testGroup "Match"
[ Prism.test (prefixed "/" :: Prism' Text Text)
, Prism.test (prefixed "/" :: Prism' Match Match)
, Prism.test (prefixed "" :: Prism' Text Text)
, Prism.test (prefixed "" :: Prism' Match Match)
, Prism.test (suffixed "/" :: Prism' Text Text)
, Prism.test (suffixed "/" :: Prism' Match Match)
, Prism.test (suffixed "" :: Prism' Text Text)
, Prism.test (suffixed "" :: Prism' Match Match)
, Prism.test (sep "/" :: Prism' Match Match)
, Prism.test (sep "" :: Prism' Match Match)
, Prism.test (target :: Prism' Match Match)
, Prism.test (target :: Prism' Match Text)
, Prism.test (target :: Prism' Match Int)
, Prism.test (target :: Prism' Match Float)
, Prism.test (target :: Prism' Match (Int,Text))
]
|
plutonbrb/nero
|
tests/Test/Nero/Match.hs
|
bsd-3-clause
| 1,013 | 0 | 10 | 181 | 387 | 207 | 180 | 24 | 1 |
module Lucid.Foundation.Navigation
( module Lucid.Foundation.Navigation.OffCanvas
, module Lucid.Foundation.Navigation.TopBar
, module Lucid.Foundation.Navigation.IconBar
, module Lucid.Foundation.Navigation.SideNav
, module Lucid.Foundation.Navigation.Magellan
, module Lucid.Foundation.Navigation.SubNav
, module Lucid.Foundation.Navigation.Breadcrumbs
, module Lucid.Foundation.Navigation.Pagination
) where
import Lucid.Foundation.Navigation.OffCanvas
import Lucid.Foundation.Navigation.TopBar
import Lucid.Foundation.Navigation.IconBar
import Lucid.Foundation.Navigation.SideNav
import Lucid.Foundation.Navigation.Magellan
import Lucid.Foundation.Navigation.SubNav
import Lucid.Foundation.Navigation.Breadcrumbs
import Lucid.Foundation.Navigation.Pagination
|
athanclark/lucid-foundation
|
src/Lucid/Foundation/Navigation.hs
|
bsd-3-clause
| 782 | 0 | 5 | 63 | 129 | 94 | 35 | 17 | 0 |
{-# LANGUAGE LambdaCase #-}
module Types.SCCode where
import Data.List
import GHC.Generics
import Types.Filter
import Types.PrettyFilter
import Types.DSPNode
import Utils
rest :: [a] -> [a]
rest [] = []
rest (a:as) = as
data SCCode =
SCFilter { oid :: Int, vars :: [String], function :: String, args :: [(String, String)] }
| SCWhiteNoise { oid :: Int, vars :: [String], iname :: String, function :: String, args :: [(String, String)] }
| SCSeqCompose { oid :: Int, vars :: [String], iname :: String, f :: SCCode, f' :: SCCode }
| SCParCompose { oid :: Int, vars :: [String], iname :: String, f :: SCCode, f' :: SCCode }
instance Show SCCode where
show (SCFilter sc_id sc_vs fnc args) =
(sc_vs !! 0) ++ " = " ++ fnc ++ (getArgString args) ++ ";\n"
show (SCWhiteNoise sc_id sc_vs sc_in fnc args) =
(sc_vs !! 0) ++ " = Mix.ar([" ++ sc_in ++ ", " ++ fnc ++ (getArgString args) ++ "]);\n"
show (SCSeqCompose sc_id sc_vs sc_in f f') =
(show f) ++ (show f')
show (SCParCompose sc_id sc_vs sc_in f f') =
(show f) ++ (show f') ++ (sc_vs !! 0) ++
" = Mix.ar(2, [" ++ ((vars f) !! 0) ++ ", " ++ ((vars f') !! 0) ++ "]);\n"
printSC :: SCCode -> String
printSC sc =
"( \n" ++
"SynthDef(\\dsp_pbe, {|out=0|\n" ++
(indent body) ++ "\n" ++
"}).add;\n" ++
")"
where
playBuf =
"PlayBuf.ar(2, ~buf)"
body =
"var " ++ (intercalate ", " ("main_in":(vars sc))) ++ ";\n" ++
"main_in = " ++ playBuf ++ ";\n" ++
(show sc) ++
"Out.ar(out, " ++ ((vars sc) !! 0) ++ ");\n"
toSCCode :: Filter -> String
toSCCode f =
printSC $ makeSCFilter (fromInternalFilter f) 0 "In.ar()"
makeSCFilter :: PrettyFilter -> Int -> String -> SCCode
makeSCFilter f ioid inn = let
scinfo = getSCInfo f
scfname = fst scinfo
scargs = snd scinfo
in
case f of
Node_p d -> makeSCFilterNode d ioid inn scfname scargs
(SequentialCompose f1 f2) -> SCSeqCompose { oid=(oid scf2), vars=new_vars, iname=inn, f=scf1, f'=scf2 }
where scf1 = makeSCFilter f1 (ioid + 1) inn
scf2 = makeSCFilter f2 ((oid scf1) + 1) ((vars scf1) !! 0)
new_vars = (vars scf2) ++ (vars scf1)
(ParallelCompose f1 f2) -> SCParCompose { oid=newOid, vars=new_vars, iname=inn, f=scf1, f'=scf2 }
where scf1 = makeSCFilter f1 (ioid + 1) inn
scf2 = makeSCFilter f2 ((oid scf1) + 1) inn
newOid = (oid scf2) + 1
new_vars = ("out" ++ (show newOid)):((vars scf2) ++ (vars scf1))
(ID_p a f) -> SCSeqCompose { oid=(oid scf2), vars=new_vars, iname=inn, f=scf1, f'=scf2 }
where scf1 = makeSCFilter f (ioid + 1) inn
scf2 = makeSCFilter (Node_p $ ID a) ((oid scf1) + 1) ((vars scf1) !! 0)
new_vars = (vars scf2) ++ (vars scf1)
makeSCFilterNode :: DSPNode -> Int -> String -> String -> [(String, String)] -> SCCode
makeSCFilterNode d ioid inn scfname scargs = case d of
ID a -> SCFilter { oid=ioid, vars=["id"++(show ioid)], function=scfname ++ inn, args=scargs}
LPF t a -> SCFilter { oid=ioid, vars=["lpf"++(show ioid)], function=scfname, args=("in", inn):scargs }
HPF t a -> SCFilter { oid=ioid, vars=["hpf"++(show ioid)], function=scfname, args=("in", inn):scargs }
PitchShift t a -> SCFilter { oid=ioid, vars=["psh"++(show ioid)], function=scfname, args=("in", inn):scargs }
--Ringz t d a -> SCFilter { oid=ioid, vars=["rgz"++(show ioid)], function=scfname, args=("in", inn):scargs }
WhiteNoise x -> SCWhiteNoise { oid=ioid, vars=["wns"++(show ioid)], iname=inn, function=scfname, args=scargs }
getSCInfo :: PrettyFilter -> (String, [(String, String)])
getSCInfo = \case
Node_p d -> getSCInfoNode d
SequentialCompose f f' -> ("", [])
ParallelCompose f f' -> ("", [])
ID_p a f -> ("", [])
getSCInfoNode :: DSPNode -> (String, [(String,String)])
getSCInfoNode = \case
ID a -> ((show $ ampScale a) ++ " * ", [])
LPF t a -> ("LPF.ar", [("freq", show $ freqScale t), ("mul", show $ ampScale a)])
HPF t a -> ("HPF.ar", [("freq", show $ freqScale t), ("mul", show $ ampScale a)])
{-Ringz t d a -> ("Ringz.ar",
[
("freq", show $ freqScale t),
("decaytime", show $ delayScale d),
("mul", show $ ampScale a)
])-}
PitchShift t a -> ("FreqShift.ar", [("pitchRatio", show $ freqScalePitchShift t), ("mul", show $ ampScale a)])
WhiteNoise x -> ("WhiteNoise.ar", [("mul", show $ ampScale x)])
getArgString :: [(String, String)] -> String
getArgString [] = ""
getArgString args =
let paired = map (\ x -> (fst x) ++ ": " ++ (snd x)) args
in "(" ++ (foldr (\ x y -> x ++ ", " ++ y) (head paired) (rest paired)) ++ ")"
|
aedanlombardo/HaskellPS
|
DSP-PBE/src/Types/SCCode.hs
|
bsd-3-clause
| 5,056 | 0 | 21 | 1,493 | 1,974 | 1,092 | 882 | 88 | 5 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Haystack.Web.Preferences where
import Prelude hiding (forM_, mapM_)
import Control.Applicative ((<$>), optional)
import Control.Monad (msum, liftM2)
import Control.Monad.Reader (ask, ReaderT)
import Data.List.Split (splitOn)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Text.Blaze.Html5 (Html, (!), a, form, input, p, toHtml, label, option, select)
import Text.Blaze.Html5.Attributes (action, enctype, href, name, size, type_, value)
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Haystack
import Haystack.Web.Template
prefMap :: [(String, String)]
prefMap = [ ("popular", "Popular")
, ("gems", "Forgotten Gems")
, ("new", "New Releases")
, ("family", "Family Games")
, ("party", "Party Games")
, ("abstract", "Abstract Games")
, ("strategy", "Themed Strategy Games")
, ("player2", "2 Player Games")
, ("player3", "3+ Player Games")
]
row :: String -> Html -> Html
row rowlabel contents =
H.tr $ do H.td ! A.style "text-align: right"
$ label $ H.string (rowlabel ++ ":")
H.td contents
prefTable :: Html
prefTable =
H.table $ do
row "Username" $ input ! type_ "input"
! name "username"
row "Email" $ input ! type_ "input"
! name "email"
forM_ prefMap prefRow
where
prefRow (v, s) =
row s $ do "hate"
mapM_ prefRadio [0..4]
"love"
where prefRadio i = input
! type_ "radio"
! name (H.stringValue v)
! value (H.stringValue $ show i)
runForm :: H.AttributeValue -> Html -> Html
runForm submit contents =
form ! action "/form"
! enctype "multipart/form-data"
! A.method "POST" $ do
input ! type_ "submit" ! value "No preferences!"
contents
input ! type_ "submit" ! value submit
prefPage :: App Response
prefPage = msum [ viewForm, processForm ]
where
viewForm =
do method GET
ok $ template "Update Preferences" $ do
runForm "Submit!" prefTable
processForm =
do method POST
db <- ask
username <- look "username"
email <- look "email"
formData <- sequence $ map (runText . fst) prefMap
let prefs = buildPref formData
liftIO $ update db (SetPrefs (username, email) prefs)
ok $ template "Preferences Saved" $ do
H.p "Your preferences have been saved!"
buildPref prefs =
GamePref { likesPopular = get "popular"
, likesNewRelease = get "new"
, likesForgotten = get "gems"
, likesFamily = get "family"
, likesParty = get "party"
, likesAbstract = get "abstract"
, likesStrategy = get "strategy"
, likes2Player = get "player2"
, likes3Player = get "player3"
}
where get x = fromMaybe 2 $ lookup x prefs
runText idx = do formVal <- optional $ look idx
let rating = fmap read formVal
return (idx, fromMaybe 2 rating)
printScores pref game =
H.p $ do
toHtml $ gameName game
H.br
toHtml (show . score pref $ metadata game)
servePrefs :: App Response
servePrefs =
do method GET
requests <- fmap (splitOn ":") <$> looks "requests"
let reqPairs = liftM2 (,) (!! 0) (!! 1) <$> filter ((2 ==) . length) requests
db <- ask
prefs <- liftIO $ query db (GetPrefs reqPairs)
ok . serveCSV . export $ prefs
|
isovector/haystack
|
src/Haystack/Web/Preferences.hs
|
bsd-3-clause
| 4,037 | 0 | 14 | 1,480 | 1,155 | 604 | 551 | 98 | 1 |
{-# LANGUAGE DoAndIfThenElse #-}
module Type(Type(..), makeUniqueTVars, parse) where
import Text.Parsec hiding (parse)
import qualified Text.Parsec as Parsec
import Control.Applicative hiding ((<|>))
import Control.Monad
import Data.List
import Term(TermClass(..), Substitution(..))
import VarEnvironment
data Type = TVar String
| TArr Type Type
| TForall String Type
instance Eq Type where
(TVar x) == (TVar y) = x == y
(TArr m n) == (TArr p q) = m == p && n == q
(TForall x m) == (TForall y n)
| x == y = m == n
| otherwise = m == substitute (y `AssignTo` TVar x) n
_ == _ = False
instance TermClass Type where
freeVars (TForall x t) = filter (/= x) $ freeVars t
freeVars (TArr x y) = nub $ freeVars x ++ freeVars y
freeVars (TVar x) = [x]
substitute (v `AssignTo` t) (TVar x)
| v == x = t
| otherwise = TVar x
substitute (v `AssignTo` t) (TArr x y) = TArr (substitute (v `AssignTo` t) x) (substitute (v `AssignTo` t) y)
substitute (v `AssignTo` t) (TForall x y)
| v == x = TForall x y
| otherwise = TForall x (substitute (v `AssignTo` t) y)
-- TODO: horrible repetition of makeUniqueVars
makeUniqueTVars :: (MonadEnvironment m) => Type -> m Type
makeUniqueTVars t = do
mapM_ addToEnvironment $ freeVars t
go t
where go (TVar x) = return $ TVar x
go (TArr m n) = TArr <$> go m <*> go n
go (TForall x m) = do
(x', m') <- renameVariable x m
addToEnvironment x'
TForall x' <$> go m'
renameVariable x m = do
inEnv <- inEnvironment x
if inEnv then do
y <- newVar "type"
return (y, substitute (x `AssignTo` TVar y) m)
else return (x, m)
------ Printing ------
brackets :: Int -> String -> String
brackets p str = if p > 0 then "(" ++ str ++ ")" else str
arrSymb :: String
arrSymb = "→"
--arrSymb = "->"
forallSymb :: String
forallSymb = "∀"
--forallSymb = "\\-/"
showType :: Int -> Type -> String
showType _ (TVar x) = x
showType prec (TArr t1 t2) = brackets (prec - 1) $
showType 2 t1 ++ " " ++ arrSymb ++ " " ++ showType 1 t2
showType prec t@(TForall _ _) = brackets prec $ forallSymb ++ showForall t
where showForall (TForall x n@(TForall _ _)) = x ++ " " ++ showForall n
showForall (TForall x n) = x ++ ". " ++ showType 0 n
showForall _ = error "showForall: Argument is not TForall. Couldn't happen."
instance Show Type where
show = showType 0
------ Parsing ------
type Parser = Parsec String ()
varname :: Parser String
varname = many1 (alphaNum <|> oneOf "_")
bracketExpr :: Parser Type -> Parser Type
bracketExpr = between (char '(' *> spaces) (spaces *> char ')')
forallExpr :: Parser Type
forallExpr = do
void $ try (string forallSymb) <|> string "\\-/"
spaces
vs <- many1 (varname <* spaces)
void $ char '.'
spaces
t <- typeExpr
return $ foldr TForall t vs
typeExpr :: Parser Type
typeExpr = try arrExpr <|> noArrExpr
noArrExpr :: Parser Type
noArrExpr = choice
[ TVar <$> varname
, forallExpr
, bracketExpr typeExpr
]
arrExpr :: Parser Type
arrExpr = do
t1 <- noArrExpr
spaces
void $ try (string arrSymb) <|> string "->"
spaces
t2 <- typeExpr
return $ t1 `TArr` t2
fullExpr :: Parser Type
fullExpr = spaces *> typeExpr <* spaces <* eof
parse :: String -> Either ParseError Type
parse = Parsec.parse fullExpr ""
|
projedi/type-inference-rank2
|
src/Type.hs
|
bsd-3-clause
| 3,404 | 0 | 17 | 869 | 1,399 | 708 | 691 | 95 | 4 |
{-# LANGUAGE CPP #-}
import Prelude
#if !MIN_VERSION_base(4,6,0)
hiding (catch)
#endif
import Control.Exception (IOException, catch)
import Control.Exception.Assert
import Control.Monad
import qualified Data.ByteString.Char8 as BS
import Data.Monoid
import Distribution.PackageDescription
import Distribution.Simple
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.Setup
import System.Directory
import System.Exit
import System.FilePath
import System.Posix.Redirect
main :: IO ()
main = byPred assert "false" id False $ do
defaultMainWithHooksArgs simpleUserHooks
{ buildHook = hook }
[ "build", "--ghc-option=-ddump-rule-firings" ]
putStrLn "http://youtu.be/HOLYYYsFqcI"
hook :: PackageDescription -> LocalBuildInfo -> UserHooks -> BuildFlags -> IO ()
hook pd lbi uh bf = do
-- more reliable way to force a rebuild?
removeDirectoryRecursive (buildDir lbi </> "rewrite" </> "rewrite-tmp")
`catch` \ e -> return () `const` (e :: IOException)
-- some versions of GHC prints to stderr
(err, (out, _)) <- redirectStderr . redirectStdout $
buildHook simpleUserHooks pd lbi uh bf
let combined = BS.lines err <> BS.lines out
unless (BS.pack "Rule fired: assertMessage" `elem` combined) $ do
mapM_ BS.putStrLn combined
putStrLn "Rule NOT fired: assertMessage"
exitWith (ExitFailure 1)
|
liyang/assert
|
tests/rewrite.hs
|
bsd-3-clause
| 1,394 | 1 | 12 | 259 | 364 | 191 | 173 | 33 | 1 |
module Paths_src (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName
) where
import Data.Version (Version(..))
import System.Environment (getEnv)
version :: Version
version = Version {versionBranch = [0,1], versionTags = []}
bindir, libdir, datadir, libexecdir :: FilePath
bindir = "/home/tao/.cabal/bin"
libdir = "/home/tao/.cabal/lib/src-0.1/ghc-6.12.1"
datadir = "/home/tao/.cabal/share/src-0.1"
libexecdir = "/home/tao/.cabal/libexec"
getBinDir, getLibDir, getDataDir, getLibexecDir :: IO FilePath
getBinDir = catch (getEnv "src_bindir") (\_ -> return bindir)
getLibDir = catch (getEnv "src_libdir") (\_ -> return libdir)
getDataDir = catch (getEnv "src_datadir") (\_ -> return datadir)
getLibexecDir = catch (getEnv "src_libexecdir") (\_ -> return libexecdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
|
part-xx/hott-plastic
|
dist/build/autogen/Paths_src.hs
|
bsd-3-clause
| 939 | 0 | 10 | 144 | 277 | 159 | 118 | 22 | 1 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
module Fragment.Let.Helpers (
tmLetRec
, tmLet
, checkLetBindings
) where
import Data.List (elemIndex)
import Bound (abstract)
import Bound.Scope (bindings)
import Control.Lens (review)
import Control.Lens.Wrapped (_Unwrapped)
import Ast.Type
import Ast.Term
import Fragment.Let.Ast.Term
tmLetRec :: (Eq a, TmAstBound ki ty pt tm, AsTmLet ki ty pt tm)
=> [(a, Maybe (Type ki ty a), Term ki ty pt tm a)]
-> Term ki ty pt tm a
-> Term ki ty pt tm a
tmLetRec bs tm =
let
bs' =
fmap f bs
f (v, ty, tm') =
(TmAstTmVar v, ty, abstr . review _Unwrapped $ tm')
abstr =
abstract (`elemIndex` fmap (\(x, _, _) -> x) bs')
ast =
abstr $ review _Unwrapped tm
in
review _TmLet (fmap (\(_, ty, s) -> LetBinding (fmap (review _TmType) ty) s) bs', ast)
tmLet :: (Eq a, TmAstBound ki ty pt tm, AsTmLet ki ty pt tm)
=> [(a, Maybe (Type ki ty a), Term ki ty pt tm a)]
-> Term ki ty pt tm a
-> Term ki ty pt tm a
tmLet bs tm =
let
bs' =
zipWith f [0..] bs
f i (v, ty, tm') =
(TmAstTmVar v, ty, abstr i . review _Unwrapped $ tm')
trim i j
| i < j = Just j
| otherwise = Nothing
abstr i =
abstract (\v -> (>>= trim i) . elemIndex v . fmap (\(x, _, _) -> x) $ bs')
ast =
abstr (length bs') $ review _Unwrapped tm
in
review _TmLet (fmap (\(_, ty, s) -> LetBinding (fmap (review _TmType) ty) s) bs', ast)
checkLetBindings :: Foldable tm => [LetBinding ki ty pt tm a] -> Bool
checkLetBindings bs =
let
f i (LetBinding _ ast) = all (< i) . bindings $ ast
in
and . zipWith f [0..] $ bs
|
dalaing/type-systems
|
src/Fragment/Let/Helpers.hs
|
bsd-3-clause
| 1,861 | 0 | 17 | 520 | 800 | 424 | 376 | 52 | 1 |
-- | Messages to be printed to the player
module Quoridor.Cmdline.Messages
where
import Quoridor (Color, Turn)
msgAwaitingTurn :: Color -> String
msgAwaitingTurn c = "Waiting for " ++ show c ++ " to make his turn."
msgYourTurn :: String
msgYourTurn = "It's your turn."
msgGameEnd :: Color -> String
msgGameEnd c = show c ++ " won!"
msgValidTurn :: Color -> Turn -> String
msgValidTurn c t = "Previous turn was: " ++ show c ++ " - " ++ show t
msgInvalidTurn, msgInitialTurn :: String
msgInvalidTurn = "Attempted Turn was invalid"
msgInitialTurn = "Good luck!"
validMovesChars :: String
validMovesChars = "!@#$%^&*"
msgInputInstr :: String
msgInputInstr = unlines
[ "type: g y x [h/v] to place horizontal/vertical gate,"
, " across a 2x2 square, whose top left is at y,x"
, "type: m y x to move."
, "type one of: " ++ validMovesChars ++ " to move to where that character is on the board."
]
|
talw/quoridor-hs
|
src/Quoridor/Cmdline/Messages.hs
|
bsd-3-clause
| 946 | 0 | 8 | 214 | 182 | 101 | 81 | 21 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module EnumTests ( Mode(Directory, NormalFile, ExecutableFile, Symlink) ) where
import Protolude hiding (Enum)
import GraphQL.API (GraphQLEnum)
-- https://github.com/jml/graphql-api/issues/116
-- Generic enum code is broken
data Mode = Directory | NormalFile | ExecutableFile | Symlink deriving (Show, Eq, Generic)
instance GraphQLEnum Mode
|
jml/graphql-api
|
tests/EnumTests.hs
|
bsd-3-clause
| 377 | 0 | 6 | 50 | 79 | 51 | 28 | 9 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Update
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
--
-----------------------------------------------------------------------------
module Distribution.Client.Update
( update
) where
import Distribution.Client.Types
( Repo(..), RemoteRepo(..), LocalRepo(..) )
import Distribution.Client.HttpUtils
( DownloadResult(..) )
import Distribution.Client.FetchUtils
( downloadIndex )
import Distribution.Client.IndexUtils
( updateRepoIndexCache )
import Distribution.Client.Config
( defaultPatchesDir )
import Distribution.Client.Config
( defaultPatchesDir )
import Distribution.Simple.Configure
( etaHackageUrl )
import Distribution.Simple.Program
( gitProgram, defaultProgramConfiguration, runProgramInvocation, programInvocation,
requireProgramVersion )
import Distribution.Simple.Utils
( writeFileAtomic, warn, notice )
import Distribution.Version
( Version(..), orLaterVersion )
import Distribution.Verbosity
( Verbosity )
import Distribution.Client.GZipUtils ( maybeDecompress )
import System.FilePath ( dropExtension )
import System.Directory ( doesDirectoryExist )
import qualified Data.ByteString.Lazy as BS
-- | 'update' downloads the package list from all known servers
update :: Verbosity -> [Repo] -> IO ()
update verbosity [] =
warn verbosity $ "No remote package servers have been specified. Usually "
++ "you would have one specified in the config file."
update verbosity repos = do
mapM_ (updateRepo verbosity) repos
updatePatchRepo verbosity
updateRepo :: Verbosity -> Repo -> IO ()
updateRepo verbosity repo = case repoKind repo of
Right LocalRepo -> return ()
Left remoteRepo -> do
notice verbosity $ "Downloading the latest package list from "
++ remoteRepoName remoteRepo
downloadResult <- downloadIndex verbosity (repoIndexType repo)
remoteRepo (repoLocalDir repo)
case downloadResult of
FileAlreadyInCache -> return ()
FileDownloaded indexPath -> do
writeFileAtomic (dropExtension indexPath) . maybeDecompress
=<< BS.readFile indexPath
updateRepoIndexCache verbosity repo
-- git only supports the -C flag as of 1.8.5
-- See http://stackoverflow.com/questions/5083224/git-pull-while-not-in-a-git-directory
updatePatchRepo :: Verbosity -> IO ()
updatePatchRepo verbosity = do
notice verbosity $ "Updating the eta-hackage patch set"
(gitProg, _, _) <- requireProgramVersion verbosity
gitProgram
(orLaterVersion (Version [1,8,5] []))
defaultProgramConfiguration
let runGit = runProgramInvocation verbosity . programInvocation gitProg
patchesDir <- defaultPatchesDir
exists <- doesDirectoryExist patchesDir
if exists
then runGit ["-C", patchesDir, "pull"]
else
runGit ["clone", "--depth=1", "--config", "core.autocrlf=false", etaHackageUrl, patchesDir]
|
typelead/epm
|
epm/Distribution/Client/Update.hs
|
bsd-3-clause
| 3,325 | 0 | 19 | 756 | 632 | 348 | 284 | 63 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE MagicHash #-}
-- for unboxed shifts
-----------------------------------------------------------------------------
-- |
-- Module : Data.Serialize.Builder
-- Copyright : Lennart Kolmodin, Ross Paterson, Galois Inc. 2009
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Trevor Elliott <[email protected]>
-- Stability :
-- Portability :
--
-- Efficient construction of lazy bytestrings.
--
-----------------------------------------------------------------------------
#if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__)
#include "MachDeps.h"
#endif
module Data.Serialize.Builder (
-- * The Builder type
Builder
, toByteString
, toLazyByteString
-- * Constructing Builders
, empty
, singleton
, append
, fromByteString -- :: S.ByteString -> Builder
, fromLazyByteString -- :: L.ByteString -> Builder
-- * Flushing the buffer state
, flush
-- * Derived Builders
-- ** Big-endian writes
, putWord16be -- :: Word16 -> Builder
, putWord32be -- :: Word32 -> Builder
, putWord64be -- :: Word64 -> Builder
-- ** Little-endian writes
, putWord16le -- :: Word16 -> Builder
, putWord32le -- :: Word32 -> Builder
, putWord64le -- :: Word64 -> Builder
-- ** Host-endian, unaligned writes
, putWordhost -- :: Word -> Builder
, putWord16host -- :: Word16 -> Builder
, putWord32host -- :: Word32 -> Builder
, putWord64host -- :: Word64 -> Builder
) where
import Data.Monoid
import Data.Word
import Foreign.ForeignPtr
import Foreign.Ptr (Ptr,plusPtr)
import Foreign.Storable
import System.IO.Unsafe (unsafePerformIO)
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Internal as S
#if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__)
import GHC.Base (Int(..), uncheckedShiftRL#)
import GHC.Word (Word32(..),Word16(..),Word64(..))
#if WORD_SIZE_IN_BITS < 64 && __GLASGOW_HASKELL__ >= 608
import GHC.Word (uncheckedShiftRL64#)
#endif
#else
import Data.Word
#endif
------------------------------------------------------------------------
-- | A 'Builder' is an efficient way to build lazy 'L.ByteString's.
-- There are several functions for constructing 'Builder's, but only one
-- to inspect them: to extract any data, you have to turn them into lazy
-- 'L.ByteString's using 'toLazyByteString'.
--
-- Internally, a 'Builder' constructs a lazy 'L.Bytestring' by filling byte
-- arrays piece by piece. As each buffer is filled, it is \'popped\'
-- off, to become a new chunk of the resulting lazy 'L.ByteString'.
-- All this is hidden from the user of the 'Builder'.
newtype Builder = Builder {
-- Invariant (from Data.ByteString.Lazy):
-- The lists include no null ByteStrings.
runBuilder :: (Buffer -> [S.ByteString]) -> Buffer -> [S.ByteString]
}
instance Monoid Builder where
mempty = empty
{-# INLINE mempty #-}
mappend = append
{-# INLINE mappend #-}
------------------------------------------------------------------------
-- | /O(1)./ The empty Builder, satisfying
--
-- * @'toLazyByteString' 'empty' = 'L.empty'@
--
empty :: Builder
empty = Builder id
{-# INLINE empty #-}
-- | /O(1)./ A Builder taking a single byte, satisfying
--
-- * @'toLazyByteString' ('singleton' b) = 'L.singleton' b@
--
singleton :: Word8 -> Builder
singleton = writeN 1 . flip poke
{-# INLINE singleton #-}
------------------------------------------------------------------------
-- | /O(1)./ The concatenation of two Builders, an associative operation
-- with identity 'empty', satisfying
--
-- * @'toLazyByteString' ('append' x y) = 'L.append' ('toLazyByteString' x) ('toLazyByteString' y)@
--
append :: Builder -> Builder -> Builder
append (Builder f) (Builder g) = Builder (f . g)
{-# INLINE append #-}
-- | /O(1)./ A Builder taking a 'S.ByteString', satisfying
--
-- * @'toLazyByteString' ('fromByteString' bs) = 'L.fromChunks' [bs]@
--
fromByteString :: S.ByteString -> Builder
fromByteString bs
| S.null bs = empty
| otherwise = flush `append` mapBuilder (bs :)
{-# INLINE fromByteString #-}
-- | /O(1)./ A Builder taking a lazy 'L.ByteString', satisfying
--
-- * @'toLazyByteString' ('fromLazyByteString' bs) = bs@
--
fromLazyByteString :: L.ByteString -> Builder
fromLazyByteString bss = flush `append` mapBuilder (L.toChunks bss ++)
{-# INLINE fromLazyByteString #-}
------------------------------------------------------------------------
-- Our internal buffer type
data Buffer = Buffer {-# UNPACK #-} !(ForeignPtr Word8)
{-# UNPACK #-} !Int -- offset
{-# UNPACK #-} !Int -- used bytes
{-# UNPACK #-} !Int -- length left
------------------------------------------------------------------------
toByteString :: Builder -> S.ByteString
toByteString m = S.concat $ unsafePerformIO $ do
buf <- newBuffer defaultSize
return (runBuilder (m `append` flush) (const []) buf)
-- | /O(n)./ Extract a lazy 'L.ByteString' from a 'Builder'.
-- The construction work takes place if and when the relevant part of
-- the lazy 'L.ByteString' is demanded.
--
toLazyByteString :: Builder -> L.ByteString
toLazyByteString m = L.fromChunks $ unsafePerformIO $ do
buf <- newBuffer defaultSize
return (runBuilder (m `append` flush) (const []) buf)
-- | /O(1)./ Pop the 'S.ByteString' we have constructed so far, if any,
-- yielding a new chunk in the result lazy 'L.ByteString'.
flush :: Builder
flush = Builder $ \ k buf@(Buffer p o u l) ->
if u == 0
then k buf
else S.PS p o u : k (Buffer p (o+u) 0 l)
------------------------------------------------------------------------
--
-- copied from Data.ByteString.Lazy
--
defaultSize :: Int
defaultSize = 32 * k - overhead
where k = 1024
overhead = 2 * sizeOf (undefined :: Int)
------------------------------------------------------------------------
-- | Sequence an IO operation on the buffer
unsafeLiftIO :: (Buffer -> IO Buffer) -> Builder
unsafeLiftIO f = Builder $ \ k buf -> S.inlinePerformIO $ do
buf' <- f buf
return (k buf')
{-# INLINE unsafeLiftIO #-}
-- | Get the size of the buffer
withSize :: (Int -> Builder) -> Builder
withSize f = Builder $ \ k buf@(Buffer _ _ _ l) ->
runBuilder (f l) k buf
-- | Map the resulting list of bytestrings.
mapBuilder :: ([S.ByteString] -> [S.ByteString]) -> Builder
mapBuilder f = Builder (f .)
------------------------------------------------------------------------
-- | Ensure that there are at least @n@ many bytes available.
ensureFree :: Int -> Builder
ensureFree n = n `seq` withSize $ \ l ->
if n <= l then empty else
flush `append` unsafeLiftIO (const (newBuffer (max n defaultSize)))
{-# INLINE ensureFree #-}
-- | Ensure that @n@ many bytes are available, and then use @f@ to write some
-- bytes into the memory.
writeN :: Int -> (Ptr Word8 -> IO ()) -> Builder
writeN n f = ensureFree n `append` unsafeLiftIO (writeNBuffer n f)
{-# INLINE writeN #-}
writeNBuffer :: Int -> (Ptr Word8 -> IO ()) -> Buffer -> IO Buffer
writeNBuffer n f (Buffer fp o u l) = do
withForeignPtr fp (\p -> f (p `plusPtr` (o+u)))
return (Buffer fp o (u+n) (l-n))
{-# INLINE writeNBuffer #-}
newBuffer :: Int -> IO Buffer
newBuffer size = do
fp <- S.mallocByteString size
return $! Buffer fp 0 0 size
{-# INLINE newBuffer #-}
------------------------------------------------------------------------
-- Aligned, host order writes of storable values
-- | Ensure that @n@ many bytes are available, and then use @f@ to write some
-- storable values into the memory.
writeNbytes :: Storable a => Int -> (Ptr a -> IO ()) -> Builder
writeNbytes n f = ensureFree n `append` unsafeLiftIO (writeNBufferBytes n f)
{-# INLINE writeNbytes #-}
writeNBufferBytes :: Storable a => Int -> (Ptr a -> IO ()) -> Buffer -> IO Buffer
writeNBufferBytes n f (Buffer fp o u l) = do
withForeignPtr fp (\p -> f (p `plusPtr` (o+u)))
return (Buffer fp o (u+n) (l-n))
{-# INLINE writeNBufferBytes #-}
------------------------------------------------------------------------
--
-- We rely on the fromIntegral to do the right masking for us.
-- The inlining here is critical, and can be worth 4x performance
--
-- | Write a Word16 in big endian format
putWord16be :: Word16 -> Builder
putWord16be w = writeN 2 $ \p -> do
poke p (fromIntegral (shiftr_w16 w 8) :: Word8)
poke (p `plusPtr` 1) (fromIntegral (w) :: Word8)
{-# INLINE putWord16be #-}
-- | Write a Word16 in little endian format
putWord16le :: Word16 -> Builder
putWord16le w = writeN 2 $ \p -> do
poke p (fromIntegral (w) :: Word8)
poke (p `plusPtr` 1) (fromIntegral (shiftr_w16 w 8) :: Word8)
{-# INLINE putWord16le #-}
-- putWord16le w16 = writeN 2 (\p -> poke (castPtr p) w16)
-- | Write a Word32 in big endian format
putWord32be :: Word32 -> Builder
putWord32be w = writeN 4 $ \p -> do
poke p (fromIntegral (shiftr_w32 w 24) :: Word8)
poke (p `plusPtr` 1) (fromIntegral (shiftr_w32 w 16) :: Word8)
poke (p `plusPtr` 2) (fromIntegral (shiftr_w32 w 8) :: Word8)
poke (p `plusPtr` 3) (fromIntegral (w) :: Word8)
{-# INLINE putWord32be #-}
--
-- a data type to tag Put/Check. writes construct these which are then
-- inlined and flattened. matching Checks will be more robust with rules.
--
-- | Write a Word32 in little endian format
putWord32le :: Word32 -> Builder
putWord32le w = writeN 4 $ \p -> do
poke p (fromIntegral (w) :: Word8)
poke (p `plusPtr` 1) (fromIntegral (shiftr_w32 w 8) :: Word8)
poke (p `plusPtr` 2) (fromIntegral (shiftr_w32 w 16) :: Word8)
poke (p `plusPtr` 3) (fromIntegral (shiftr_w32 w 24) :: Word8)
{-# INLINE putWord32le #-}
-- on a little endian machine:
-- putWord32le w32 = writeN 4 (\p -> poke (castPtr p) w32)
-- | Write a Word64 in big endian format
putWord64be :: Word64 -> Builder
#if WORD_SIZE_IN_BITS < 64
--
-- To avoid expensive 64 bit shifts on 32 bit machines, we cast to
-- Word32, and write that
--
putWord64be w =
let a = fromIntegral (shiftr_w64 w 32) :: Word32
b = fromIntegral w :: Word32
in writeN 8 $ \p -> do
poke p (fromIntegral (shiftr_w32 a 24) :: Word8)
poke (p `plusPtr` 1) (fromIntegral (shiftr_w32 a 16) :: Word8)
poke (p `plusPtr` 2) (fromIntegral (shiftr_w32 a 8) :: Word8)
poke (p `plusPtr` 3) (fromIntegral (a) :: Word8)
poke (p `plusPtr` 4) (fromIntegral (shiftr_w32 b 24) :: Word8)
poke (p `plusPtr` 5) (fromIntegral (shiftr_w32 b 16) :: Word8)
poke (p `plusPtr` 6) (fromIntegral (shiftr_w32 b 8) :: Word8)
poke (p `plusPtr` 7) (fromIntegral (b) :: Word8)
#else
putWord64be w = writeN 8 $ \p -> do
poke p (fromIntegral (shiftr_w64 w 56) :: Word8)
poke (p `plusPtr` 1) (fromIntegral (shiftr_w64 w 48) :: Word8)
poke (p `plusPtr` 2) (fromIntegral (shiftr_w64 w 40) :: Word8)
poke (p `plusPtr` 3) (fromIntegral (shiftr_w64 w 32) :: Word8)
poke (p `plusPtr` 4) (fromIntegral (shiftr_w64 w 24) :: Word8)
poke (p `plusPtr` 5) (fromIntegral (shiftr_w64 w 16) :: Word8)
poke (p `plusPtr` 6) (fromIntegral (shiftr_w64 w 8) :: Word8)
poke (p `plusPtr` 7) (fromIntegral (w) :: Word8)
#endif
{-# INLINE putWord64be #-}
-- | Write a Word64 in little endian format
putWord64le :: Word64 -> Builder
#if WORD_SIZE_IN_BITS < 64
putWord64le w =
let b = fromIntegral (shiftr_w64 w 32) :: Word32
a = fromIntegral w :: Word32
in writeN 8 $ \p -> do
poke (p) (fromIntegral (a) :: Word8)
poke (p `plusPtr` 1) (fromIntegral (shiftr_w32 a 8) :: Word8)
poke (p `plusPtr` 2) (fromIntegral (shiftr_w32 a 16) :: Word8)
poke (p `plusPtr` 3) (fromIntegral (shiftr_w32 a 24) :: Word8)
poke (p `plusPtr` 4) (fromIntegral (b) :: Word8)
poke (p `plusPtr` 5) (fromIntegral (shiftr_w32 b 8) :: Word8)
poke (p `plusPtr` 6) (fromIntegral (shiftr_w32 b 16) :: Word8)
poke (p `plusPtr` 7) (fromIntegral (shiftr_w32 b 24) :: Word8)
#else
putWord64le w = writeN 8 $ \p -> do
poke p (fromIntegral (w) :: Word8)
poke (p `plusPtr` 1) (fromIntegral (shiftr_w64 w 8) :: Word8)
poke (p `plusPtr` 2) (fromIntegral (shiftr_w64 w 16) :: Word8)
poke (p `plusPtr` 3) (fromIntegral (shiftr_w64 w 24) :: Word8)
poke (p `plusPtr` 4) (fromIntegral (shiftr_w64 w 32) :: Word8)
poke (p `plusPtr` 5) (fromIntegral (shiftr_w64 w 40) :: Word8)
poke (p `plusPtr` 6) (fromIntegral (shiftr_w64 w 48) :: Word8)
poke (p `plusPtr` 7) (fromIntegral (shiftr_w64 w 56) :: Word8)
#endif
{-# INLINE putWord64le #-}
-- on a little endian machine:
-- putWord64le w64 = writeN 8 (\p -> poke (castPtr p) w64)
------------------------------------------------------------------------
-- Unaligned, word size ops
-- | /O(1)./ A Builder taking a single native machine word. The word is
-- written in host order, host endian form, for the machine you're on.
-- On a 64 bit machine the Word is an 8 byte value, on a 32 bit machine,
-- 4 bytes. Values written this way are not portable to
-- different endian or word sized machines, without conversion.
--
putWordhost :: Word -> Builder
putWordhost w = writeNbytes (sizeOf (undefined :: Word)) (\p -> poke p w)
{-# INLINE putWordhost #-}
-- | Write a Word16 in native host order and host endianness.
-- 2 bytes will be written, unaligned.
putWord16host :: Word16 -> Builder
putWord16host w16 = writeNbytes (sizeOf (undefined :: Word16)) (\p -> poke p w16)
{-# INLINE putWord16host #-}
-- | Write a Word32 in native host order and host endianness.
-- 4 bytes will be written, unaligned.
putWord32host :: Word32 -> Builder
putWord32host w32 = writeNbytes (sizeOf (undefined :: Word32)) (\p -> poke p w32)
{-# INLINE putWord32host #-}
-- | Write a Word64 in native host order.
-- On a 32 bit machine we write two host order Word32s, in big endian form.
-- 8 bytes will be written, unaligned.
putWord64host :: Word64 -> Builder
putWord64host w = writeNbytes (sizeOf (undefined :: Word64)) (\p -> poke p w)
{-# INLINE putWord64host #-}
------------------------------------------------------------------------
-- Unchecked shifts
{-# INLINE shiftr_w16 #-}
shiftr_w16 :: Word16 -> Int -> Word16
{-# INLINE shiftr_w32 #-}
shiftr_w32 :: Word32 -> Int -> Word32
{-# INLINE shiftr_w64 #-}
shiftr_w64 :: Word64 -> Int -> Word64
#if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__)
shiftr_w16 (W16# w) (I# i) = W16# (w `uncheckedShiftRL#` i)
shiftr_w32 (W32# w) (I# i) = W32# (w `uncheckedShiftRL#` i)
#if WORD_SIZE_IN_BITS < 64
shiftr_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftRL64#` i)
#if __GLASGOW_HASKELL__ <= 606
-- Exported by GHC.Word in GHC 6.8 and higher
foreign import ccall unsafe "stg_uncheckedShiftRL64"
uncheckedShiftRL64# :: Word64# -> Int# -> Word64#
#endif
#else
shiftr_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftRL#` i)
#endif
#else
shiftr_w16 = shiftR
shiftr_w32 = shiftR
shiftr_w64 = shiftR
#endif
|
triplepointfive/cereal
|
src/Data/Serialize/Builder.hs
|
bsd-3-clause
| 15,406 | 0 | 15 | 3,335 | 3,040 | 1,702 | 1,338 | 186 | 2 |
{-# LANGUAGE MultiParamTypeClasses #-}
module Data.Diverse.AFoldable where
-- | Constrained Foldable for a specified type instead for all types.
class AFoldable t a where
afoldr :: (a -> b -> b) -> b -> t a -> b
afoldl' :: AFoldable t a => (b -> a -> b) -> b -> t a -> b
afoldl' f z0 xs = afoldr f' id xs z0
where f' x k z = k $! f z x
|
louispan/data-diverse
|
src/Data/Diverse/AFoldable.hs
|
bsd-3-clause
| 346 | 0 | 10 | 87 | 135 | 69 | 66 | 7 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Main where
import Control.Monad
import Data.List
import Data.Maybe
import Data.Monoid
import System.FilePath as System
import System.IO as System
import qualified Distribution.ModuleName as Cabal
import qualified Distribution.Package as Cabal
import qualified Distribution.PackageDescription as Cabal
import qualified Distribution.PackageDescription.Parse as Cabal
import qualified System.Directory as System
import qualified System.Environment as System
import qualified System.Exit as System
data Input = Input
{ inputPackage :: !Cabal.PackageDescription
, inputLibraries :: ![Cabal.Library]
, inputExecutables :: ![Cabal.Executable]
, inputTestSuites :: ![Cabal.TestSuite]
, inputBenchmarks :: ![Cabal.Benchmark]
}
deriving (Eq, Show)
class Artifact a where
artifactBuildInfo :: a -> Cabal.BuildInfo
artifactDirs :: a -> [FilePath]
artifactDirs = Cabal.hsSourceDirs . artifactBuildInfo
artifactModules :: a -> [Cabal.ModuleName]
artifactModules = Cabal.otherModules . artifactBuildInfo
main :: IO ()
main = do
System.getArgs >>= \args -> case args of
[] -> run Nothing
["-"] -> run Nothing
[path] -> run (Just path)
_ ->
printHelp >> System.exitFailure
where
run maybePath = mapM_ putStrLn =<< listCabalSources maybePath
printHelp :: IO ()
printHelp = do
self <- System.getProgName
display $ "Print source and data file paths referenced from a Cabal file (or from"
display $ "STDIN, in which case the file is assumed to be in current directory)."
display $ "Usage: " <> self <> " [CABAL_FILE/-]"
where
display = hPutStrLn System.stderr
listCabalSources :: Maybe FilePath -> IO [FilePath]
listCabalSources maybePath = do
text <- maybe System.getContents System.readFile maybePath
case parseCabalFile text of
Left error ->
fail error
Right input ->
let path = maybe "." id maybePath
in listCabalSources' path input
listCabalSources' :: FilePath -> Input -> IO [FilePath]
listCabalSources' path Input{..} = do
libraryPaths <- fmap concat <$> forM inputLibraries locateModules
testPaths <- fmap concat <$> forM inputTestSuites locateModules
benchmarkPaths <- fmap concat <$> forM inputBenchmarks locateModules
exePaths <- fmap concat <$> forM inputExecutables $ \exe -> do
let dirs = (Cabal.hsSourceDirs . Cabal.buildInfo) exe
modulePaths <- locateModules exe
mainPath <- locateFile (dirs <> ["."]) (Cabal.modulePath exe)
return $ modulePaths <> maybeToList mainPath
let dataPaths = (baseDir </>) . (Cabal.dataDir inputPackage </>) <$>
Cabal.dataFiles inputPackage
extraPaths = (baseDir </> ) <$>
Cabal.licenseFiles inputPackage
<> Cabal.extraSrcFiles inputPackage
<> Cabal.extraDocFiles inputPackage
let allPaths = concat
[ exePaths
, libraryPaths
, dataPaths
, extraPaths
, testPaths
, benchmarkPaths
]
in return $ uniq $ sort $ allPaths
where
locateModules :: Artifact artifact => artifact -> IO [FilePath]
locateModules artifact =
let dirs = artifactDirs artifact
modules = artifactModules artifact
toFilePath moduleName = Cabal.toFilePath moduleName <> ".hs"
in catMaybes <$> mapM (locateFile dirs) (toFilePath <$> modules)
locateFile :: [FilePath] -> FilePath -> IO (Maybe FilePath)
locateFile possibleDirs relativePath = do
let possiblePath = (baseDir </>) . (</> relativePath)
firstFile = findFirstM (System.doesFileExist . possiblePath) possibleDirs
in fmap possiblePath <$> firstFile
baseDir = System.takeDirectory path
findFirstM :: Monad m => (a -> m Bool) -> [a] -> m (Maybe a)
findFirstM f (x:xs) =
f x >>= \stop -> if stop then return (Just x) else findFirstM f xs
findFirstM _ [] =
return Nothing
uniq :: Eq a => [a] -> [a]
uniq (x:y:xs)
| x == y = uniq (x:xs)
| otherwise = x : uniq (y:xs)
uniq [x] = [x]
uniq [] = []
instance Artifact Cabal.Executable where
artifactBuildInfo = Cabal.buildInfo
instance Artifact Cabal.Library where
artifactBuildInfo = Cabal.libBuildInfo
artifactModules library =
Cabal.exposedModules library <>
(Cabal.otherModules . artifactBuildInfo) library
instance Artifact Cabal.TestSuite where
artifactBuildInfo = Cabal.testBuildInfo
instance Artifact Cabal.Benchmark where
artifactBuildInfo = Cabal.benchmarkBuildInfo
parseCabalFile :: String -> Either String Input
parseCabalFile text =
case Cabal.parsePackageDescription text of
Cabal.ParseOk _ Cabal.GenericPackageDescription{..} ->
Right $ Input
{ inputPackage = packageDescription
, inputLibraries = maybe [] flatten condLibrary
, inputExecutables = concatMap (flatten . snd) condExecutables
, inputTestSuites = concatMap (flatten . snd) condTestSuites
, inputBenchmarks = concatMap (flatten . snd) condBenchmarks
}
Cabal.ParseFailed error ->
Left $ show error
where
flatten :: Cabal.CondTree v c a -> [a]
flatten Cabal.CondNode{..} =
let recurse (_, tree, maybeTree) = flatten tree <> maybe [] flatten maybeTree
in condTreeData : concatMap recurse condTreeComponents
|
PlushBeaver/cabal-list-sources
|
app/Main.hs
|
bsd-3-clause
| 5,730 | 0 | 18 | 1,580 | 1,566 | 804 | 762 | 137 | 5 |
module Challenge where
safeLast :: [a] -> Maybe a
safeLast [] = Nothing
safeLast xs = Just $ last xs
rangeStr :: Int -> Int -> String
rangeStr s e = show s ++ "->" ++ show e
endOfRange :: [Int] -> Maybe (Int, Int)
endOfRange [] = Nothing
endOfRange (x:xs) = safeLast matches
where
matches = takeWhile (\(i, y) -> x + i == y) (zip [1..] xs)
ranges :: [Int] -> [String]
ranges [] = []
ranges xs = ranges' xs []
where
ranges' [] = id
ranges' ys@(h:t) = case endOfRange ys of
Just (i, z) -> ranges' (drop i t) . (++ [rangeStr h z])
Nothing -> ranges' t
|
mindm/2017Challenges
|
challenge_6/haskell/halogenandtoast/src/Challenge.hs
|
mit
| 625 | 0 | 13 | 189 | 305 | 159 | 146 | 17 | 3 |
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Qi.Test.Config.DDB.Simple where
{-
import Control.Lens
import Control.Monad (void)
import Data.Aeson
import Data.Aeson.Encode.Pretty (encodePretty)
import Data.Aeson.Lens (key, nth)
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Default (def)
import Qi.Config.AWS.DDB (DdbAttrDef (..), DdbAttrType (..),
DdbProvCap (..))
import Qi.Program.Config.Interface (ConfigProgram, ddbTable)
import Test.Tasty.Hspec
import Config (getConfig, getOutputs,
getResources, getTemplate)
import Protolude
import Util
configProgram :: ConfigProgram ()
configProgram = do
table <- ddbTable "things" (DdbAttrDef "name" S) def
return ()
spec :: Spec
spec = describe "Template" $ do
let template = getTemplate $ getConfig configProgram
expectedDdbTableLogicalName = "thingsDynamoDBTable"
it "saves test template" $
LBS.writeFile "tests/artifacts/ddb_simple_test_template.json" $ encodePretty template
context "Resources" $ do
let resources = getResources template
-- Table
------------
it "has the expected Table resource under the correct logical name" $
resources `shouldContainKey` expectedDdbTableLogicalName
context "Table" $ do
let resource = getValueUnderKey expectedDdbTableLogicalName resources
it "contains correct resource type" $
resource `shouldContainKVPair` ("Type", String "AWS::DynamoDB::Table")
-- Properties
context "Properties" $ do
let properties = getValueUnderKey "Properties" resource
it "specifies correct primary key AttributeDefinitions" $
properties `shouldContainKVPair` ("AttributeDefinitions", Array [object [
("AttributeType", String "S")
, ("AttributeName", String "name")
]])
it "specifies correct default provisioned capacity" $
properties `shouldContainKVPair` ("ProvisionedThroughput", object [
("ReadCapacityUnits", String "2")
, ("WriteCapacityUnits", String "2")
])
it "specifies correct KeySchema" $
properties `shouldContainKVPair` ("KeySchema", Array [object [
("KeyType", String "HASH")
, ("AttributeName", String "name")
]])
it "specifies correct physical name" $
properties `shouldContainKVPair` ("TableName", String "testApp_things")
-}
|
qmuli/qmuli
|
tests/Qi/Test/Config/DDB/Simple.hs
|
mit
| 2,827 | 0 | 3 | 877 | 13 | 11 | 2 | 4 | 0 |
{- |
Module : ./SoftFOL/Translate.hs
Description : utility to create valid identifiers for atp provers
Copyright : (c) Klaus Luettich, Uni Bremen 2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
collection of functions used by "Comorphisms.SuleCFOL2SoftFOL" and
"SoftFOL.ProveSPASS" for the translation of CASL identifiers and axiom labels
into valid SoftFOL identifiers -}
module SoftFOL.Translate
( transId
, transSenName
, checkIdentifier
, CKType (..)
) where
import Data.Char
import qualified Data.Set as Set
import Common.Id
import Common.ProofUtils
import SoftFOL.Sign
data CKType = CKSort | CKVar | CKPred | CKOp
-- | collect all keywords of SoftFOL
reservedWords :: Set.Set String
reservedWords = Set.fromList $ map showSPSymbol
[ SPEqual
, SPTrue
, SPFalse
, SPOr
, SPAnd
, SPNot
, SPImplies
, SPImplied
, SPEquiv]
{- this list of reserved words has been generated with:
perl HetCATS/utils/transformLexerFile.pl spass-3.0c/dfgscanner.l -}
++ concatMap words
[ "and author axioms begin_problem by box all clause cnf comp"
, "conjectures conv date description dia some div dnf domain"
, "domrestr eml EML DL end_of_list end_problem equal equiv"
, "exists false forall formula freely functions generated"
, "hypothesis id implied implies list_of_clauses list_of_declarations"
, "list_of_descriptions list_of_formulae list_of_general_settings"
, "list_of_proof list_of_settings list_of_special_formulae"
, "list_of_symbols list_of_terms logic name not operators"
, "or prop_formula concept_formula predicate predicates quantifiers"
, "ranrestr range rel_formula role_formula satisfiable set_DomPred"
, "set_flag set_precedence set_ClauseFormulaRelation set_selection"
, "sort sorts status step subsort sum test translpairs true"
, "unknown unsatisfiable version static"]
++ map (('e' :) . show) [0 .. 20 :: Int]
++ map (("fmdarwin_e" ++) . show) [0 .. 20 :: Int]
transSenName :: String -> String
transSenName = transIdString CKSort . concatMap transToSPChar
{- |
SPASS Identifiers may contain letters, digits, and underscores only; but
for TPTP the allowed starting letters are different for each sort of
identifier.
-}
checkIdentifier :: CKType -> String -> Bool
checkIdentifier t str = case str of
"" -> False
c : _ -> all checkSPChar str && case t of
CKVar -> isUpper c -- for TPTP
_ -> isLower c
{- |
Allowed SPASS characters are letters, digits, and underscores.
Warning:
Data.Char.isAlphaNum includes all kinds of isolatin1 characters!! -}
checkSPChar :: Char -> Bool
checkSPChar c = isAlphaNum c && isAscii c || '_' == c
transId :: CKType -> Id -> SPIdentifier
transId t = mkSimpleId . transIdString t . concatMap transToSPChar . show
transIdString :: CKType -> String -> String
transIdString t str = case str of
"" -> error "SoftFOL.Translate.transId: empty string not allowed"
c : r -> if isDigit c then transIdString t $ substDigit c ++ r
else case t of
CKOp | '_' == c -> 'o' : str
CKPred | '_' == c -> 'p' : str
CKVar -> toUpper c : r
_ -> let lstr = toLower c : r in
if Set.member lstr reservedWords then "x_" ++ str else lstr
transToSPChar :: Char -> String
transToSPChar c
| checkSPChar c = [c]
| elem c " \n" = "_"
| otherwise = lookupCharMap c
substDigit :: Char -> String
substDigit c = case c of
'0' -> "zero"
'1' -> "one"
'2' -> "two"
'3' -> "three"
'4' -> "four"
'5' -> "five"
'6' -> "six"
'7' -> "seven"
'8' -> "eight"
'9' -> "nine"
_ -> [c]
|
spechub/Hets
|
SoftFOL/Translate.hs
|
gpl-2.0
| 3,654 | 0 | 17 | 743 | 721 | 382 | 339 | 78 | 11 |
module Main where
import PathFind
f a b c = a (findPath dijkstra b) c
|
graninas/The-Amoeba-World
|
src/Amoeba/Test/Experiments/ModuleAbstraction/Main.hs
|
gpl-3.0
| 70 | 0 | 7 | 15 | 32 | 17 | 15 | 3 | 1 |
{-# LANGUAGE Safe #-}
{-# LANGUAGE StrictData #-}
module Network.Tox.Time where
import qualified System.Clock as Clock
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
{-------------------------------------------------------------------------------
-
- :: Implementation.
-
------------------------------------------------------------------------------}
newtype Timestamp = Timestamp Clock.TimeSpec
deriving (Eq, Ord, Show, Read)
newtype TimeDiff = TimeDiff Clock.TimeSpec
deriving (Eq, Ord, Show, Read)
instance Num TimeDiff where
TimeDiff t + TimeDiff t' = TimeDiff $ t Prelude.+ t'
TimeDiff t - TimeDiff t' = TimeDiff $ t Prelude.- t'
TimeDiff t * TimeDiff t' = TimeDiff $ t * t'
negate (TimeDiff t) = TimeDiff $ negate t
abs (TimeDiff t) = TimeDiff $ abs t
signum (TimeDiff t) = TimeDiff $ signum t
fromInteger = TimeDiff . fromInteger
seconds :: Integer -> TimeDiff
seconds s = TimeDiff $ Clock.TimeSpec (fromIntegral s) 0
milliseconds :: Integer -> TimeDiff
milliseconds = TimeDiff . Clock.TimeSpec 0 . (*10^(6::Integer)) . fromIntegral
getTime :: IO Timestamp
getTime = Timestamp <$> Clock.getTime Clock.Monotonic
(-) :: Timestamp -> Timestamp -> TimeDiff
Timestamp t - Timestamp t' = TimeDiff $ t Prelude.- t'
(+) :: Timestamp -> TimeDiff -> Timestamp
Timestamp t + TimeDiff t' = Timestamp $ t Prelude.+ t'
{-------------------------------------------------------------------------------
-
- :: Tests.
-
------------------------------------------------------------------------------}
instance Arbitrary Timestamp
where arbitrary = (Timestamp <$>) $ Clock.TimeSpec <$> arbitrary <*> arbitrary
instance Arbitrary TimeDiff
where arbitrary = (TimeDiff <$>) $ Clock.TimeSpec <$> arbitrary <*> arbitrary
|
iphydf/hs-toxcore
|
src/Network/Tox/Time.hs
|
gpl-3.0
| 1,785 | 0 | 9 | 295 | 499 | 260 | 239 | 31 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.OpsWorks.DeleteLayer
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a specified layer. You must first stop and then delete all
-- associated instances or unassign registered instances. For more
-- information, see
-- <http://docs.aws.amazon.com/opsworks/latest/userguide/workinglayers-basics-delete.html How to Delete a Layer>.
--
-- __Required Permissions__: To use this action, an IAM user must have a
-- Manage permissions level for the stack, or an attached policy that
-- explicitly grants permissions. For more information on user permissions,
-- see
-- <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing User Permissions>.
--
-- /See:/ <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_DeleteLayer.html AWS API Reference> for DeleteLayer.
module Network.AWS.OpsWorks.DeleteLayer
(
-- * Creating a Request
deleteLayer
, DeleteLayer
-- * Request Lenses
, dlLayerId
-- * Destructuring the Response
, deleteLayerResponse
, DeleteLayerResponse
) where
import Network.AWS.OpsWorks.Types
import Network.AWS.OpsWorks.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'deleteLayer' smart constructor.
newtype DeleteLayer = DeleteLayer'
{ _dlLayerId :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteLayer' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dlLayerId'
deleteLayer
:: Text -- ^ 'dlLayerId'
-> DeleteLayer
deleteLayer pLayerId_ =
DeleteLayer'
{ _dlLayerId = pLayerId_
}
-- | The layer ID.
dlLayerId :: Lens' DeleteLayer Text
dlLayerId = lens _dlLayerId (\ s a -> s{_dlLayerId = a});
instance AWSRequest DeleteLayer where
type Rs DeleteLayer = DeleteLayerResponse
request = postJSON opsWorks
response = receiveNull DeleteLayerResponse'
instance ToHeaders DeleteLayer where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("OpsWorks_20130218.DeleteLayer" :: ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON DeleteLayer where
toJSON DeleteLayer'{..}
= object (catMaybes [Just ("LayerId" .= _dlLayerId)])
instance ToPath DeleteLayer where
toPath = const "/"
instance ToQuery DeleteLayer where
toQuery = const mempty
-- | /See:/ 'deleteLayerResponse' smart constructor.
data DeleteLayerResponse =
DeleteLayerResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteLayerResponse' with the minimum fields required to make a request.
--
deleteLayerResponse
:: DeleteLayerResponse
deleteLayerResponse = DeleteLayerResponse'
|
fmapfmapfmap/amazonka
|
amazonka-opsworks/gen/Network/AWS/OpsWorks/DeleteLayer.hs
|
mpl-2.0
| 3,554 | 0 | 12 | 733 | 406 | 248 | 158 | 56 | 1 |
{-# LANGUAGE CPP #-}
-- | DFOV (Digital Field of View) implemented according to specification at <http://roguebasin.roguelikedevelopment.org/index.php?title=Digital_field_of_view_implementation>.
-- This fast version of the algorithm, based on "PFOV", has AFAIK
-- never been described nor implemented before.
module Game.LambdaHack.Server.Fov.Digital
( scan
#ifdef EXPOSE_INTERNAL
-- * Internal operations
, dline, dsteeper, intersect, _debugSteeper, _debugLine
#endif
) where
import Control.Exception.Assert.Sugar
import Game.LambdaHack.Common.Misc
import Game.LambdaHack.Server.Fov.Common
-- | Calculates the list of tiles, in @Bump@ coordinates, visible from (0, 0),
-- within the given sight range.
scan :: Distance -- ^ visiblity distance
-> (Bump -> Bool) -- ^ clear tile predicate
-> [Bump]
{-# INLINE scan #-}
scan r isClear = assert (r > 0 `blame` r) $
-- The scanned area is a square, which is a sphere in the chessboard metric.
dscan 1 ( (Line (B 1 0) (B (-r) r), [B 0 0])
, (Line (B 0 0) (B (r+1) r), [B 1 0]) )
where
dscan :: Distance -> EdgeInterval -> [Bump]
dscan d ( s0@(sl{-shallow line-}, sHull0)
, e@(el{-steep line-}, eHull) ) =
let !ps0 = let (n, k) = intersect sl d -- minimal progress to consider
in n `div` k
!pe = let (n, k) = intersect el d -- maximal progress to consider
-- Corners obstruct view, so the steep line, constructed
-- from corners, is itself not a part of the view,
-- so if its intersection with the line of diagonals is only
-- at a corner, choose the diamond leading to a smaller view.
in -1 + n `divUp` k
inside = [B p d | p <- [ps0..pe]]
outside
| d >= r = []
| isClear (B ps0 d) = mscanVisible s0 (ps0+1) -- start visible
| otherwise = mscanShadowed (ps0+1) -- start in shadow
-- We're in a visible interval.
mscanVisible :: Edge -> Progress -> [Bump]
{-# INLINE mscanVisible #-}
mscanVisible s = go
where
go ps | ps > pe = dscan (d+1) (s, e) -- reached end, scan next
| not $ isClear steepBump = -- entering shadow
mscanShadowed (ps+1)
++ dscan (d+1) (s, (dline nep steepBump, neHull))
| otherwise = go (ps+1) -- continue in visible area
where
steepBump = B ps d
gte :: Bump -> Bump -> Bool
{-# INLINE gte #-}
gte = dsteeper steepBump
nep = maximal gte (snd s)
neHull = addHull gte steepBump eHull
-- We're in a shadowed interval.
mscanShadowed :: Progress -> [Bump]
mscanShadowed ps
| ps > pe = [] -- reached end while in shadow
| isClear shallowBump = -- moving out of shadow
mscanVisible (dline nsp shallowBump, nsHull) (ps+1)
| otherwise = mscanShadowed (ps+1) -- continue in shadow
where
shallowBump = B ps d
gte :: Bump -> Bump -> Bool
{-# INLINE gte #-}
gte = flip $ dsteeper shallowBump
nsp = maximal gte eHull
nsHull = addHull gte shallowBump sHull0
in assert (r >= d && d >= 0 && pe >= ps0 `blame` (r,d,s0,e,ps0,pe)) $
inside ++ outside
-- | Create a line from two points. Debug: check if well-defined.
dline :: Bump -> Bump -> Line
{-# INLINE dline #-}
dline p1 p2 =
let line = Line p1 p2
in
#ifdef WITH_EXPENSIVE_ASSERTIONS
assert (uncurry blame $ _debugLine line)
#endif
line
-- | Compare steepness of @(p1, f)@ and @(p2, f)@.
-- Debug: Verify that the results of 2 independent checks are equal.
dsteeper :: Bump -> Bump -> Bump -> Bool
{-# INLINE dsteeper #-}
dsteeper f p1 p2 =
#ifdef WITH_EXPENSIVE_ASSERTIONS
assert (res == _debugSteeper f p1 p2)
#endif
res
where res = steeper f p1 p2
-- | The X coordinate, represented as a fraction, of the intersection of
-- a given line and the line of diagonals of diamonds at distance
-- @d@ from (0, 0).
intersect :: Line -> Distance -> (Int, Int)
{-# INLINE intersect #-}
intersect (Line (B x y) (B xf yf)) d =
#ifdef WITH_EXPENSIVE_ASSERTIONS
assert (allB (>= 0) [y, yf])
#endif
((d - y)*(xf - x) + x*(yf - y), yf - y)
{-
Derivation of the formula:
The intersection point (xt, yt) satisfies the following equalities:
yt = d
(yt - y) (xf - x) = (xt - x) (yf - y)
hence
(yt - y) (xf - x) = (xt - x) (yf - y)
(d - y) (xf - x) = (xt - x) (yf - y)
(d - y) (xf - x) + x (yf - y) = xt (yf - y)
xt = ((d - y) (xf - x) + x (yf - y)) / (yf - y)
General remarks:
A diamond is denoted by its left corner. Hero at (0, 0).
Order of processing in the first quadrant rotated by 45 degrees is
45678
123
@
so the first processed diamond is at (-1, 1). The order is similar
as for the restrictive shadow casting algorithm and reversed wrt PFOV.
The line in the curent state of mscan is called the shallow line,
but it's the one that delimits the view from the left, while the steep
line is on the right, opposite to PFOV. We start scanning from the left.
The Point coordinates are cartesian. The Bump coordinates are cartesian,
translated so that the hero is at (0, 0) and rotated so that he always
looks at the first (rotated 45 degrees) quadrant. The (Progress, Distance)
cordinates coincide with the Bump coordinates, unlike in PFOV.
-}
-- | Debug functions for DFOV:
-- | Debug: calculate steeper for DFOV in another way and compare results.
_debugSteeper :: Bump -> Bump -> Bump -> Bool
{-# INLINE _debugSteeper #-}
_debugSteeper f@(B _xf yf) p1@(B _x1 y1) p2@(B _x2 y2) =
assert (allB (>= 0) [yf, y1, y2]) $
let (n1, k1) = intersect (Line p1 f) 0
(n2, k2) = intersect (Line p2 f) 0
in n1 * k2 >= k1 * n2
-- | Debug: check if a view border line for DFOV is legal.
_debugLine :: Line -> (Bool, String)
{-# INLINE _debugLine #-}
_debugLine line@(Line (B x1 y1) (B x2 y2))
| not (allB (>= 0) [y1, y2]) =
(False, "negative coordinates: " ++ show line)
| y1 == y2 && x1 == x2 =
(False, "ill-defined line: " ++ show line)
| y1 == y2 =
(False, "horizontal line: " ++ show line)
| crossL0 =
(False, "crosses the X axis below 0: " ++ show line)
| crossG1 =
(False, "crosses the X axis above 1: " ++ show line)
| otherwise = (True, "")
where
(n, k) = line `intersect` 0
(q, r) = if k == 0 then (0, 0) else n `divMod` k
crossL0 = q < 0 -- q truncated toward negative infinity
crossG1 = q >= 1 && (q > 1 || r /= 0)
|
Concomitant/LambdaHack
|
Game/LambdaHack/Server/Fov/Digital.hs
|
bsd-3-clause
| 6,643 | 0 | 18 | 1,874 | 1,585 | 863 | 722 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -Wno-orphans #-}
-- | This module only exports orphan 'Ser.Serialise' instances. Import as:
--
-- @
-- import "Money.Serialise" ()
-- @
module Money.Serialise () where
import qualified Codec.Serialise as Ser
import Control.Monad (when)
import Data.Ratio ((%), numerator, denominator)
import GHC.TypeLits (KnownSymbol)
import qualified Money
import qualified Money.Internal as MoneyI
import Data.Monoid ((<>))
--------------------------------------------------------------------------------
-- | Compatible with 'Money.SomeDense'.
instance (KnownSymbol currency) => Ser.Serialise (Money.Dense currency) where
encode = Ser.encode . Money.toSomeDense
decode = maybe (fail "Dense") pure =<< fmap Money.fromSomeDense Ser.decode
-- | Compatible with 'Money.SomeDiscrete'.
instance
( KnownSymbol currency, Money.GoodScale scale
) => Ser.Serialise (Money.Discrete' currency scale) where
encode = Ser.encode . Money.toSomeDiscrete
decode = maybe (fail "Discrete'") pure
=<< fmap Money.fromSomeDiscrete Ser.decode
-- | Compatible with 'Money.SomeExchangeRate'.
instance
( KnownSymbol src, KnownSymbol dst
) => Ser.Serialise (Money.ExchangeRate src dst) where
encode = Ser.encode . Money.toSomeExchangeRate
decode = maybe (fail "ExchangeRate") pure
=<< fmap Money.fromSomeExchangeRate Ser.decode
instance Ser.Serialise Money.Scale where
encode = \s ->
let r = Money.scaleToRational s
in Ser.encode (numerator r) <>
Ser.encode (denominator r)
decode = maybe (fail "Scale") pure =<< do
n :: Integer <- Ser.decode
d :: Integer <- Ser.decode
when (d == 0) (fail "denominator is zero")
pure (MoneyI.scaleFromRational (n % d))
-- | Compatible with 'Money.Dense'.
instance Ser.Serialise Money.SomeDense where
encode = \sd ->
let r = Money.someDenseAmount sd
in Ser.encode (MoneyI.someDenseCurrency' sd) <>
Ser.encode (numerator r) <>
Ser.encode (denominator r)
decode = maybe (fail "SomeDense") pure =<< do
c :: String <- Ser.decode
n :: Integer <- Ser.decode
d :: Integer <- Ser.decode
when (d == 0) (fail "denominator is zero")
pure (MoneyI.mkSomeDense' c (n % d))
-- | Compatible with 'Money.Discrete'.
instance Ser.Serialise Money.SomeDiscrete where
encode = \sd ->
Ser.encode (MoneyI.someDiscreteCurrency' sd) <>
Ser.encode (Money.someDiscreteScale sd) <>
Ser.encode (Money.someDiscreteAmount sd)
decode = do
c :: String <- Ser.decode
s :: Money.Scale <- Ser.decode
a :: Integer <- Ser.decode
pure (MoneyI.mkSomeDiscrete' c s a)
-- | Compatible with 'Money.ExchangeRate'.
instance Ser.Serialise Money.SomeExchangeRate where
encode = \ser ->
let r = Money.someExchangeRateRate ser
in Ser.encode (MoneyI.someExchangeRateSrcCurrency' ser) <>
Ser.encode (MoneyI.someExchangeRateDstCurrency' ser) <>
Ser.encode (numerator r) <>
Ser.encode (denominator r)
decode = maybe (fail "SomeExchangeRate") pure =<< do
src :: String <- Ser.decode
dst :: String <- Ser.decode
n :: Integer <- Ser.decode
d :: Integer <- Ser.decode
when (d == 0) (fail "denominator is zero")
pure (MoneyI.mkSomeExchangeRate' src dst (n % d))
|
k0001/haskell-money
|
safe-money-serialise/src/Money/Serialise.hs
|
bsd-3-clause
| 3,436 | 0 | 15 | 643 | 1,008 | 514 | 494 | 76 | 0 |
-- Copyright (c) 2016 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -Wall -Werror #-}
{-# LANGUAGE MultiParamTypeClasses, FlexibleContexts,
FlexibleInstances, UndecidableInstances #-}
module Control.Monad.CommentBuffer(
MonadCommentBuffer(..),
CommentBufferT,
CommentBuffer,
runCommentBufferT,
runCommentBuffer
) where
import Control.Applicative
import Control.Monad.Artifacts.Class
import Control.Monad.CommentBuffer.Class
import Control.Monad.Cont
import Control.Monad.Except
import Control.Monad.Genpos.Class
import Control.Monad.Gensym.Class
import Control.Monad.GraphBuilder.Class
import Control.Monad.Journal
import Control.Monad.Keywords.Class
import Control.Monad.Loader.Class
import Control.Monad.Messages.Class
import Control.Monad.Positions.Class
import Control.Monad.ScopeBuilder.Class
import Control.Monad.SourceFiles.Class
import Control.Monad.SourceBuffer.Class
import Control.Monad.State
import Control.Monad.Symbols.Class
import Data.ByteString hiding (reverse, empty)
import Data.Position.Point
import qualified Data.ByteString.Lazy as Lazy
data BufferState =
BufferState {
partialComment :: ![Lazy.ByteString],
fullComments :: ![ByteString],
savedComments :: ![(Point, [ByteString])]
}
newtype CommentBufferT m a =
CommentBufferT { unpackCommentBufferT :: (StateT BufferState m) a }
type CommentBuffer = CommentBufferT IO
-- | Execute the computation wrapped in a CommentBufferT monad tranformer.
runCommentBufferT :: Monad m =>
CommentBufferT m a
-- ^ The CommentBufferT monad transformer to execute.
-> m a
runCommentBufferT CommentBufferT { unpackCommentBufferT = c } =
let
initial = BufferState { partialComment = [],
fullComments = [],
savedComments = [] }
in do
(out, _) <- runStateT c initial
return out
-- | Execute the computation wrapped in a CommentBuffer monad.
runCommentBuffer :: CommentBuffer a
-- ^ The CommentBufferT monad transformer to execute.
-> IO a
runCommentBuffer = runCommentBufferT
startComment' :: Monad m => (StateT BufferState m) ()
startComment' = return ()
appendComment' :: Monad m => Lazy.ByteString -> (StateT BufferState m) ()
appendComment' text =
do
s @ BufferState { partialComment = curr } <- get
put s { partialComment = text : curr }
finishComment' :: Monad m => (StateT BufferState m) ()
finishComment' =
do
s @ BufferState { partialComment = partial, fullComments = full } <- get
put s { fullComments = Lazy.toStrict (Lazy.concat (reverse partial)) : full,
partialComment = [] }
addComment' :: Monad m => Lazy.ByteString -> (StateT BufferState m) ()
addComment' text =
do
s @ BufferState { fullComments = full } <- get
put s { fullComments = Lazy.toStrict text : full }
saveCommentsAsPreceeding' :: Monad m => Point -> (StateT BufferState m) ()
saveCommentsAsPreceeding' pos =
do
s @ BufferState { fullComments = full, savedComments = saved } <- get
put s { savedComments = (pos, full) : saved }
clearComments' :: Monad m => (StateT BufferState m) ()
clearComments' =
do
s <- get
put s { fullComments = [] }
instance Monad m => Monad (CommentBufferT m) where
return = CommentBufferT . return
s >>= f = CommentBufferT $ unpackCommentBufferT s >>= unpackCommentBufferT . f
instance Monad m => Applicative (CommentBufferT m) where
pure = return
(<*>) = ap
instance (MonadPlus m, Alternative m) => Alternative (CommentBufferT m) where
empty = lift empty
s1 <|> s2 =
CommentBufferT (unpackCommentBufferT s1 <|> unpackCommentBufferT s2)
instance Functor (CommentBufferT m) where
fmap = fmap
instance MonadIO m => MonadIO (CommentBufferT m) where
liftIO = CommentBufferT . liftIO
instance MonadTrans CommentBufferT where
lift = CommentBufferT . lift
instance Monad m => MonadCommentBuffer (CommentBufferT m) where
startComment = CommentBufferT startComment'
appendComment = CommentBufferT . appendComment'
finishComment = CommentBufferT finishComment'
addComment = CommentBufferT . addComment'
saveCommentsAsPreceeding = CommentBufferT . saveCommentsAsPreceeding'
clearComments = CommentBufferT clearComments'
instance MonadArtifacts path m => MonadArtifacts path (CommentBufferT m) where
artifact path = lift . artifact path
artifactBytestring path = lift . artifactBytestring path
artifactLazyBytestring path = lift . artifactLazyBytestring path
instance MonadCont m => MonadCont (CommentBufferT m) where
callCC f = CommentBufferT
(callCC (\c -> unpackCommentBufferT (f (CommentBufferT . c))))
instance MonadEdgeBuilder nodety m =>
MonadEdgeBuilder nodety (CommentBufferT m) where
addEdge src dst = lift . addEdge src dst
instance (MonadError e m) => MonadError e (CommentBufferT m) where
throwError = lift . throwError
m `catchError` h =
CommentBufferT (unpackCommentBufferT m `catchError`
(unpackCommentBufferT . h))
instance MonadGenpos m => MonadGenpos (CommentBufferT m) where
point = lift . point
filename = lift . filename
instance MonadGensym m => MonadGensym (CommentBufferT m) where
symbol = lift . symbol
unique = lift . unique
instance (Monoid w, MonadJournal w m) => MonadJournal w (CommentBufferT m) where
journal = lift . journal
history = lift history
clear = lift clear
instance MonadKeywords p t m => MonadKeywords p t (CommentBufferT m) where
mkKeyword p = lift . mkKeyword p
instance MonadLoader path info m =>
MonadLoader path info (CommentBufferT m) where
load = lift . load
instance MonadMessages msg m => MonadMessages msg (CommentBufferT m) where
message = lift . message
instance MonadNodeBuilder nodety m =>
MonadNodeBuilder nodety (CommentBufferT m) where
addNode = lift . addNode
instance MonadPositions m => MonadPositions (CommentBufferT m) where
pointInfo = lift . pointInfo
fileInfo = lift . fileInfo
instance MonadScopeStack m => MonadScopeStack (CommentBufferT m) where
enterScope = lift . enterScope
finishScope = lift finishScope
instance MonadScopeBuilder tmpscope m =>
MonadScopeBuilder tmpscope (CommentBufferT m) where
getScope = lift getScope
setScope = lift . setScope
instance MonadSourceFiles m => MonadSourceFiles (CommentBufferT m) where
sourceFile = lift . sourceFile
instance MonadSourceBuffer m => MonadSourceBuffer (CommentBufferT m) where
linebreak = lift . linebreak
startFile fname = lift . startFile fname
finishFile = lift finishFile
instance MonadState s m => MonadState s (CommentBufferT m) where
get = lift get
put = lift . put
instance MonadSymbols m => MonadSymbols (CommentBufferT m) where
nullSym = lift nullSym
allNames = lift allNames
allSyms = lift allSyms
name = lift . name
instance MonadPlus m => MonadPlus (CommentBufferT m) where
mzero = lift mzero
mplus s1 s2 =
CommentBufferT (mplus (unpackCommentBufferT s1) (unpackCommentBufferT s2))
instance MonadFix m => MonadFix (CommentBufferT m) where
mfix f = CommentBufferT (mfix (unpackCommentBufferT . f))
|
saltlang/compiler-toolbox
|
src/Control/Monad/CommentBuffer.hs
|
bsd-3-clause
| 8,692 | 4 | 15 | 1,680 | 2,080 | 1,101 | 979 | 174 | 1 |
{-# LANGUAGE TypeOperators, TypeFamilies, MultiParamTypeClasses, GADTs, FlexibleContexts, FlexibleInstances, ScopedTypeVariables, NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Category.Comma
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable
--
-- Comma categories.
-----------------------------------------------------------------------------
module Data.Category.Comma where
import Data.Category
import Data.Category.Functor
import Data.Category.Limit
import Data.Category.RepresentableFunctor
data CommaO :: * -> * -> * -> * where
CommaO :: (Cod t ~ k, Cod s ~ k)
=> Obj (Dom t) a -> k (t :% a) (s :% b) -> Obj (Dom s) b -> CommaO t s (a, b)
data (:/\:) :: * -> * -> * -> * -> * where
CommaA ::
CommaO t s (a, b) ->
Dom t a a' ->
Dom s b b' ->
CommaO t s (a', b') ->
(t :/\: s) (a, b) (a', b')
commaId :: CommaO t s (a, b) -> Obj (t :/\: s) (a, b)
commaId o@(CommaO a _ b) = CommaA o a b o
-- | The comma category T \\downarrow S
instance (Category (Dom t), Category (Dom s)) => Category (t :/\: s) where
src (CommaA so _ _ _) = commaId so
tgt (CommaA _ _ _ to) = commaId to
(CommaA _ g h to) . (CommaA so g' h' _) = CommaA so (g . g') (h . h') to
type (f `ObjectsFUnder` a) = ConstF f a :/\: f
type (f `ObjectsFOver` a) = f :/\: ConstF f a
type (c `ObjectsUnder` a) = Id c `ObjectsFUnder` a
type (c `ObjectsOver` a) = Id c `ObjectsFOver` a
initialUniversalComma :: forall u x c a a_
. (Functor u, c ~ (u `ObjectsFUnder` x), HasInitialObject c, (a_, a) ~ InitialObject c)
=> u -> InitialUniversal x u a
initialUniversalComma u = case initialObject :: Obj c (a_, a) of
CommaA (CommaO _ mor a) _ _ _ ->
initialUniversal u a mor factorizer
where
factorizer :: forall y. Obj (Dom u) y -> Cod u x (u :% y) -> Dom u a y
factorizer y arr = case (init (commaId (CommaO y arr y))) of CommaA _ _ f _ -> f
where
init :: Obj c (y, y) -> c (a_, a) (y, y)
init = initialize
terminalUniversalComma :: forall u x c a a_
. (Functor u, c ~ (u `ObjectsFOver` x), HasTerminalObject c, (a, a_) ~ TerminalObject c)
=> u -> TerminalUniversal x u a
terminalUniversalComma u = case terminalObject :: Obj c (a, a_) of
CommaA (CommaO a mor _) _ _ _ ->
terminalUniversal u a mor factorizer
where
factorizer :: forall y. Obj (Dom u) y -> Cod u (u :% y) x -> Dom u y a
factorizer y arr = case (term (commaId (CommaO y arr y))) of CommaA _ f _ _ -> f
where
term :: Obj c (y, y) -> c (y, y) (a, a_)
term = terminate
|
treeowl/data-category
|
Data/Category/Comma.hs
|
bsd-3-clause
| 2,853 | 0 | 16 | 787 | 1,142 | 616 | 526 | 46 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module Network.IRC.Bot.Log where
import Data.Data
data LogLevel
= Debug
| Normal
| Important
deriving (Eq, Ord, Read, Show, Data, Typeable)
type Logger = LogLevel -> String -> IO ()
stdoutLogger :: LogLevel -> Logger
stdoutLogger minLvl msgLvl msg
| msgLvl >= minLvl = putStrLn msg
| otherwise = return ()
nullLogger :: Logger
nullLogger _ _ = return ()
|
jonte/ircbot
|
Network/IRC/Bot/Log.hs
|
bsd-3-clause
| 427 | 0 | 8 | 100 | 141 | 74 | 67 | 15 | 1 |
module Object.Infer where
-- $Id$
import Object.Data
import Autolib.Reporter.Type hiding ( result )
import Autolib.ToDoc
import Autolib.TES.Term
import Autolib.TES.Identifier
import Control.Monad ( guard )
type Exp = Term Identifier Identifier
infer :: Signature -> Exp -> Reporter Type
infer sig exp = do
inform $ text "berechne Typ für Ausdruck:" <+> toDoc exp
t <- nested 4 $ case exp of
Node n [] ->
case do v <- variables sig ; guard $ vname v == n ; return v
of [ v ] -> do
inform $ text "ist Variable mit Deklaration:" <+> toDoc v
return $ vtype v
[ ] -> reject $ text "ist nicht deklarierte Variable."
vs -> reject $ vcat
[ text "ist mehrfach deklarierte Variable:"
, toDoc vs
]
Node n args ->
case do f <- functions sig ; guard $ fname f == n ; return f
of [ f ] -> do
inform $ text "Funktion hat Deklaration:" <+> toDoc f
assert ( length args == length ( arguments f ) )
$ text "Anzahl der Argumente stimmt mit Deklaration überein?"
sequence_ $ do
( k, arg ) <- zip [1..] args
return $ do
inform $ text "prüfe Argument Nr." <+> toDoc k
t <- nested 4 $ infer sig arg
assert ( t == arguments f !! (k-1) )
$ text "Argument-Typ stimmt mit Deklaration überein?"
return $ result f
[ ] -> reject $ text "ist nicht deklarierte Funktion."
fs -> reject $ vcat
[ text "ist mehrfach deklarierte Funktion:"
, toDoc fs
]
inform $ text "hat Typ:" <+> toDoc t
return t
|
florianpilz/autotool
|
src/Object/Infer.hs
|
gpl-2.0
| 1,652 | 24 | 35 | 552 | 523 | 255 | 268 | 41 | 6 |
module Main where
import Graphics.UI.Gtk
main = do
initGUI
-- Create the builder, and load the UI file
builder <- builderNew
builderAddFromFile builder "simple.ui"
-- Retrieve some objects from the UI
window <- builderGetObject builder castToWindow "window1"
button <- builderGetObject builder castToButton "button1"
-- Basic user interation
button `onClicked` putStrLn "button pressed!"
window `onDestroy` mainQuit
-- Display the window
widgetShowAll window
mainGUI
|
phischu/gtk2hs
|
gtk/demo/gtkbuilder/GtkBuilderTest.hs
|
lgpl-3.0
| 577 | 0 | 8 | 172 | 95 | 47 | 48 | 12 | 1 |
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1993-1998
TcRules: Typechecking transformation rules
-}
{-# LANGUAGE ViewPatterns #-}
module TcRules ( tcRules ) where
import HsSyn
import TcRnMonad
import TcSimplify
import TcMType
import TcType
import TcHsType
import TcExpr
import TcEnv
import TcEvidence
import TcUnify( buildImplicationFor )
import Type
import Id
import Var ( EvVar )
import Name
import BasicTypes ( RuleName )
import SrcLoc
import Outputable
import FastString
import Bag
import Data.List( partition )
{-
Note [Typechecking rules]
~~~~~~~~~~~~~~~~~~~~~~~~~
We *infer* the typ of the LHS, and use that type to *check* the type of
the RHS. That means that higher-rank rules work reasonably well. Here's
an example (test simplCore/should_compile/rule2.hs) produced by Roman:
foo :: (forall m. m a -> m b) -> m a -> m b
foo f = ...
bar :: (forall m. m a -> m a) -> m a -> m a
bar f = ...
{-# RULES "foo/bar" foo = bar #-}
He wanted the rule to typecheck.
-}
tcRules :: [LRuleDecls Name] -> TcM [LRuleDecls TcId]
tcRules decls = mapM (wrapLocM tcRuleDecls) decls
tcRuleDecls :: RuleDecls Name -> TcM (RuleDecls TcId)
tcRuleDecls (HsRules src decls)
= do { tc_decls <- mapM (wrapLocM tcRule) decls
; return (HsRules src tc_decls) }
tcRule :: RuleDecl Name -> TcM (RuleDecl TcId)
tcRule (HsRule name act hs_bndrs lhs fv_lhs rhs fv_rhs)
= addErrCtxt (ruleCtxt $ snd $ unLoc name) $
do { traceTc "---- Rule ------" (pprFullRuleName name)
-- Note [Typechecking rules]
; (vars, bndr_wanted) <- captureConstraints $
tcRuleBndrs hs_bndrs
-- bndr_wanted constraints can include wildcard hole
-- constraints, which we should not forget about.
-- It may mention the skolem type variables bound by
-- the RULE. c.f. Trac #10072
; let (id_bndrs, tv_bndrs) = partition isId vars
; (lhs', lhs_wanted, rhs', rhs_wanted, rule_ty)
<- tcExtendTyVarEnv tv_bndrs $
tcExtendIdEnv id_bndrs $
do { -- See Note [Solve order for RULES]
((lhs', rule_ty), lhs_wanted) <- captureConstraints (tcInferRho lhs)
; (rhs', rhs_wanted) <- captureConstraints $
tcMonoExpr rhs (mkCheckExpType rule_ty)
; return (lhs', lhs_wanted, rhs', rhs_wanted, rule_ty) }
; traceTc "tcRule 1" (vcat [ pprFullRuleName name
, ppr lhs_wanted
, ppr rhs_wanted ])
; let all_lhs_wanted = bndr_wanted `andWC` lhs_wanted
; lhs_evs <- simplifyRule (snd $ unLoc name)
all_lhs_wanted
rhs_wanted
-- Now figure out what to quantify over
-- c.f. TcSimplify.simplifyInfer
-- We quantify over any tyvars free in *either* the rule
-- *or* the bound variables. The latter is important. Consider
-- ss (x,(y,z)) = (x,z)
-- RULE: forall v. fst (ss v) = fst v
-- The type of the rhs of the rule is just a, but v::(a,(b,c))
--
-- We also need to get the completely-uconstrained tyvars of
-- the LHS, lest they otherwise get defaulted to Any; but we do that
-- during zonking (see TcHsSyn.zonkRule)
; let tpl_ids = lhs_evs ++ id_bndrs
; forall_tkvs <- zonkTcTypesAndSplitDepVars $
rule_ty : map idType tpl_ids
; gbls <- tcGetGlobalTyCoVars -- Even though top level, there might be top-level
-- monomorphic bindings from the MR; test tc111
; qtkvs <- quantifyZonkedTyVars gbls forall_tkvs
; traceTc "tcRule" (vcat [ pprFullRuleName name
, ppr forall_tkvs
, ppr qtkvs
, ppr rule_ty
, vcat [ ppr id <+> dcolon <+> ppr (idType id) | id <- tpl_ids ]
])
-- Simplify the RHS constraints
; let skol_info = RuleSkol (snd $ unLoc name)
; (rhs_implic, rhs_binds) <- buildImplicationFor topTcLevel skol_info qtkvs
lhs_evs rhs_wanted
-- For the LHS constraints we must solve the remaining constraints
-- (a) so that we report insoluble ones
-- (b) so that we bind any soluble ones
; (lhs_implic, lhs_binds) <- buildImplicationFor topTcLevel skol_info qtkvs
lhs_evs
(all_lhs_wanted { wc_simple = emptyBag })
-- simplifyRule consumed all simple
-- constraints
; emitImplications (lhs_implic `unionBags` rhs_implic)
; return (HsRule name act
(map (noLoc . RuleBndr . noLoc) (qtkvs ++ tpl_ids))
(mkHsDictLet lhs_binds lhs') fv_lhs
(mkHsDictLet rhs_binds rhs') fv_rhs) }
tcRuleBndrs :: [LRuleBndr Name] -> TcM [Var]
tcRuleBndrs []
= return []
tcRuleBndrs (L _ (RuleBndr (L _ name)) : rule_bndrs)
= do { ty <- newOpenFlexiTyVarTy
; vars <- tcRuleBndrs rule_bndrs
; return (mkLocalId name ty : vars) }
tcRuleBndrs (L _ (RuleBndrSig (L _ name) rn_ty) : rule_bndrs)
-- e.g x :: a->a
-- The tyvar 'a' is brought into scope first, just as if you'd written
-- a::*, x :: a->a
= do { let ctxt = RuleSigCtxt name
; (id_ty, tvs, _) <- tcHsPatSigType ctxt rn_ty
; let id = mkLocalIdOrCoVar name id_ty
-- See Note [Pattern signature binders] in TcHsType
-- The type variables scope over subsequent bindings; yuk
; vars <- tcExtendTyVarEnv tvs $
tcRuleBndrs rule_bndrs
; return (tvs ++ id : vars) }
ruleCtxt :: FastString -> SDoc
ruleCtxt name = text "When checking the transformation rule" <+>
doubleQuotes (ftext name)
{-
*********************************************************************************
* *
Constraint simplification for rules
* *
***********************************************************************************
Note [Simplifying RULE constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Example. Consider the following left-hand side of a rule
f (x == y) (y > z) = ...
If we typecheck this expression we get constraints
d1 :: Ord a, d2 :: Eq a
We do NOT want to "simplify" to the LHS
forall x::a, y::a, z::a, d1::Ord a.
f ((==) (eqFromOrd d1) x y) ((>) d1 y z) = ...
Instead we want
forall x::a, y::a, z::a, d1::Ord a, d2::Eq a.
f ((==) d2 x y) ((>) d1 y z) = ...
Here is another example:
fromIntegral :: (Integral a, Num b) => a -> b
{-# RULES "foo" fromIntegral = id :: Int -> Int #-}
In the rule, a=b=Int, and Num Int is a superclass of Integral Int. But
we *dont* want to get
forall dIntegralInt.
fromIntegral Int Int dIntegralInt (scsel dIntegralInt) = id Int
because the scsel will mess up RULE matching. Instead we want
forall dIntegralInt, dNumInt.
fromIntegral Int Int dIntegralInt dNumInt = id Int
Even if we have
g (x == y) (y == z) = ..
where the two dictionaries are *identical*, we do NOT WANT
forall x::a, y::a, z::a, d1::Eq a
f ((==) d1 x y) ((>) d1 y z) = ...
because that will only match if the dict args are (visibly) equal.
Instead we want to quantify over the dictionaries separately.
In short, simplifyRuleLhs must *only* squash equalities, leaving
all dicts unchanged, with absolutely no sharing.
Also note that we can't solve the LHS constraints in isolation:
Example foo :: Ord a => a -> a
foo_spec :: Int -> Int
{-# RULE "foo" foo = foo_spec #-}
Here, it's the RHS that fixes the type variable
HOWEVER, under a nested implication things are different
Consider
f :: (forall a. Eq a => a->a) -> Bool -> ...
{-# RULES "foo" forall (v::forall b. Eq b => b->b).
f b True = ...
#-}
Here we *must* solve the wanted (Eq a) from the given (Eq a)
resulting from skolemising the agument type of g. So we
revert to SimplCheck when going under an implication.
------------------------ So the plan is this -----------------------
* Step 0: typecheck the LHS and RHS to get constraints from each
* Step 1: Simplify the LHS and RHS constraints all together in one bag
We do this to discover all unification equalities
* Step 2: Zonk the ORIGINAL (unsimplified) lhs constraints, to take
advantage of those unifications, and partition them into the
ones we will quantify over, and the others
See Note [RULE quantification over equalities]
* Step 3: Decide on the type variables to quantify over
* Step 4: Simplify the LHS and RHS constraints separately, using the
quantified constraints as givens
Note [Solve order for RULES]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In step 1 above, we need to be a bit careful about solve order.
Consider
f :: Int -> T Int
type instance T Int = Bool
RULE f 3 = True
From the RULE we get
lhs-constraints: T Int ~ alpha
rhs-constraints: Bool ~ alpha
where 'alpha' is the type that connects the two. If we glom them
all together, and solve the RHS constraint first, we might solve
with alpha := Bool. But then we'd end up with a RULE like
RULE: f 3 |> (co :: T Int ~ Booo) = True
which is terrible. We want
RULE: f 3 = True |> (sym co :: Bool ~ T Int)
So we are careful to solve the LHS constraints first, and *then* the
RHS constraints. Actually much of this is done by the on-the-fly
constraint solving, so the same order must be observed in
tcRule.
Note [RULE quantification over equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Deciding which equalities to quantify over is tricky:
* We do not want to quantify over insoluble equalities (Int ~ Bool)
(a) because we prefer to report a LHS type error
(b) because if such things end up in 'givens' we get a bogus
"inaccessible code" error
* But we do want to quantify over things like (a ~ F b), where
F is a type function.
The difficulty is that it's hard to tell what is insoluble!
So we see whether the simplification step yielded any type errors,
and if so refrain from quantifying over *any* equalities.
Note [Quantifying over coercion holes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Equality constraints from the LHS will emit coercion hole Wanteds.
These don't have a name, so we can't quantify over them directly.
Instead, because we really do want to quantify here, invent a new
EvVar for the coercion, fill the hole with the invented EvVar, and
then quantify over the EvVar. Not too tricky -- just some
impedence matching, really.
Note [Simplify *derived* constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
At this stage, we're simplifying constraints only for insolubility
and for unification. Note that all the evidence is quickly discarded.
We make this explicit by working over derived constraints, for which
there is no evidence. Using derived constraints also prevents solved
equalities from being written to coercion holes. If we don't do this,
then RHS coercion-hole constraints get filled in, only to get filled
in *again* when solving the implications emitted from tcRule. That's
terrible, so we avoid the problem by using derived constraints.
-}
simplifyRule :: RuleName
-> WantedConstraints -- Constraints from LHS
-> WantedConstraints -- Constraints from RHS
-> TcM [EvVar] -- LHS evidence variables,
-- See Note [Simplifying RULE constraints] in TcRule
-- NB: This consumes all simple constraints on the LHS, but not
-- any LHS implication constraints.
simplifyRule name lhs_wanted rhs_wanted
= do { -- We allow ourselves to unify environment
-- variables: runTcS runs with topTcLevel
; tc_lvl <- getTcLevel
; insoluble <- runTcSDeriveds $
do { -- First solve the LHS and *then* solve the RHS
-- See Note [Solve order for RULES]
-- See Note [Simplify *derived* constraints]
lhs_resid <- solveWanteds $ toDerivedWC lhs_wanted
; rhs_resid <- solveWanteds $ toDerivedWC rhs_wanted
; return ( insolubleWC tc_lvl lhs_resid ||
insolubleWC tc_lvl rhs_resid ) }
; zonked_lhs_simples <- zonkSimples (wc_simple lhs_wanted)
; ev_ids <- mapMaybeM (quantify_ct insoluble) $
bagToList zonked_lhs_simples
; traceTc "simplifyRule" $
vcat [ text "LHS of rule" <+> doubleQuotes (ftext name)
, text "lhs_wantd" <+> ppr lhs_wanted
, text "rhs_wantd" <+> ppr rhs_wanted
, text "zonked_lhs_simples" <+> ppr zonked_lhs_simples
, text "ev_ids" <+> ppr ev_ids
]
; return ev_ids }
where
quantify_ct insol -- Note [RULE quantification over equalities]
| insol = quantify_insol
| otherwise = quantify_normal
quantify_insol ct
| isEqPred (ctPred ct)
= return Nothing
| otherwise
= return $ Just $ ctEvId $ ctEvidence ct
quantify_normal (ctEvidence -> CtWanted { ctev_dest = dest
, ctev_pred = pred })
= case dest of -- See Note [Quantifying over coercion holes]
HoleDest hole
| EqPred NomEq t1 t2 <- classifyPredType pred
, t1 `tcEqType` t2
-> do { -- These are trivial. Don't quantify. But do fill in
-- the hole.
; fillCoercionHole hole (mkTcNomReflCo t1)
; return Nothing }
| otherwise
-> do { ev_id <- newEvVar pred
; fillCoercionHole hole (mkTcCoVarCo ev_id)
; return (Just ev_id) }
EvVarDest evar -> return (Just evar)
quantify_normal ct = pprPanic "simplifyRule.quantify_normal" (ppr ct)
|
tjakway/ghcjvm
|
compiler/typecheck/TcRules.hs
|
bsd-3-clause
| 14,510 | 2 | 17 | 4,466 | 1,643 | 850 | 793 | 134 | 3 |
module D1 where
{-Rename data constructor `Tree` to `AnotherTree`.
This refactoring affects module `D1', 'B1' and 'C1' -}
data AnotherTree a = Leaf a | Branch (AnotherTree a) (AnotherTree a)
fringe :: AnotherTree a -> [a]
fringe (Leaf x ) = [x]
fringe (Branch left right) = fringe left ++ fringe right
class SameOrNot a where
isSame :: a -> a -> Bool
isNotSame :: a -> a -> Bool
instance SameOrNot Int where
isSame a b = a == b
isNotSame a b = a /= b
sumSquares (x:xs) = sq x + sumSquares xs
where sq x = x ^pow
pow = 2
sumSquares [] = 0
|
kmate/HaRe
|
old/testing/renaming/D1_TokOut.hs
|
bsd-3-clause
| 580 | 0 | 8 | 151 | 217 | 112 | 105 | 15 | 1 |
{-# LANGUAGE TypeFamilies, FlexibleContexts #-}
-- Results in context reduction stack overflow
module Class1 where
class C a where
foo :: a x -> a y
class C (T a) => D a where
type T a :: * -> *
bar :: a -> T a x -> T a y
instance C Maybe where
foo Nothing = Nothing
instance D () where
type T () = Maybe
bar x t = foo t
|
urbanslug/ghc
|
testsuite/tests/indexed-types/should_compile/Class1.hs
|
bsd-3-clause
| 341 | 0 | 9 | 95 | 136 | 69 | 67 | 12 | 0 |
{-# LANGUAGE StandaloneDeriving #-}
module Main where
data A
deriving instance Read A
deriving instance Show A
main = print (read "[]" :: [A])
-- Should successfully read the empty list
|
forked-upstream-packages-for-ghcjs/ghc
|
testsuite/tests/deriving/should_run/T7931.hs
|
bsd-3-clause
| 196 | 0 | 7 | 41 | 45 | 26 | 19 | -1 | -1 |
quicksort :: (Ord a) => [a] -> [a]
quicksort [] = []
quicksort (x:xs) =
smaller ++ [x] ++ bigger
where
smaller = quicksort [ e | e <- xs, e <= x ]
bigger = quicksort [ e | e <- xs, e > x ]
|
dzeban/cs
|
sorting/quicksort.hs
|
mit
| 216 | 0 | 10 | 74 | 119 | 63 | 56 | 6 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.TheBook.TypesTest
-- Copyright : (c) 2014, Jakub Kozlowski
-- License : MIT
--
-- Maintainer : [email protected]
--
-- Tests for 'Data.TheBook.Types'.
-----------------------------------------------------------------------------
module Data.TheBook.TypesTest (tests) where
import Data.List
import Data.Ord
import Data.TheBook.Rule as Book
import Data.TheBook.Types as Types
import Test.Tasty
import Test.Tasty.QuickCheck as QC
tests :: TestTree
tests = testGroup "Data.TheBook.TypesTest" [qcProps]
qcProps :: TestTree
qcProps = testGroup "(checked by QuickCheck)"
[ QC.testProperty "Bla" True
]
|
ggreif/thebook-haskell
|
tests/Data/TheBook/TypesTest.hs
|
mit
| 792 | 0 | 8 | 163 | 104 | 68 | 36 | 12 | 1 |
{-# LANGUAGE OverloadedStrings, JavaScriptFFI #-}
module Main where
import GHCJS.Types
import GHCJS.Foreign
import GHCJS.Marshal
import qualified Data.JSString as JSS
jx, jy, jz :: JSString
jx = "x"
jy = "xy"
jz = "xyz"
hx, hy, hz :: String
hx = "X"
hy = "XY"
hz = "XYZ"
foreign import javascript unsafe "h$log($1);" clog :: JSString -> IO ()
foreign import javascript unsafe "$r = $1;" jsstr :: JSString -> IO JSString
main = do
mapM_ clog [jx, jy, jz]
mapM_ (clog . JSS.pack) [hx, hy, hz]
mapM_ putStrLn [hx, hy, hz]
mapM_ (putStrLn . JSS.unpack) [jx, jy, jz]
|
ghcjs/ghcjs
|
test/ffi/jsString.hs
|
mit
| 619 | 5 | 10 | 155 | 213 | 123 | 90 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Test.Hspec
import qualified Data.Set as S
import qualified Data.Map as M
import Control.Exception
import Control.Concurrent
import Control.Concurrent.STM
import System.Posix
import System.Process
import System.Process.Internals (withProcessHandle, ProcessHandle__(OpenHandle))
import System.IO
import System.Directory
import System.FilePath
import Data.Functor
import Util
import System.Prefork.Class
import System.Prefork.Types
import Constant (
Worker(..)
, workerNum
, serverOption
, masterOutputFile
, workerOutputFile
, relaunchWorkerFile
, settingDefault
, settingRelaunch
)
main :: IO ()
main = do
hspec $ do
describe "Class" $ do
let worker = Worker "test"
it "translate worker to string" $ encodeToString worker `shouldBe` show worker
it "translate string to worker" $ decodeFromString "Worker \"test\"" `shouldBe` worker
it "returns default options" $ rtsOptions worker `shouldBe` []
describe "Types" $ do
let w1 = Worker "test1"
w2 = Worker "test2"
it "makes PreforkResource" $ do
resource <- makePreforkResource [w1, w2]
workerMap <- atomically $ readTVar $ prProcs resource
workerSet <- atomically $ readTVar $ prWorkers resource
M.size workerMap `shouldBe` 0
workerSet `shouldBe` S.fromList [w1, w2]
it "updates workers" $ do
resource <- makePreforkResource [w1, w2]
updateWorkerSet resource [w1]
workerSet <- atomically $ readTVar $ prWorkers resource
workerSet `shouldBe` S.fromList [w1]
describe "Main" $ do
it "makes test server" $ do
(_, ph) <- createTestServer settingDefault
withFile masterOutputFile ReadMode $ \hdl -> do
threadDelay 1000000
flip shouldBe "onStart" =<< hGetContents hdl
terminateProcess ph
it "sends sigHUP" $ do
checkOutputOnSignal sigHUP workerOutputFile "updateServer"
it "sends sigTERM" $ do
checkOutputOnSignal sigTERM masterOutputFile "onFinish"
it "sends sigINT" $ do
checkOutputOnSignal sigINT masterOutputFile "onFinish"
it "sends sigQUIT" $ do
checkOutputOnSignal sigQUIT workerOutputFile "onQuit"
it "sends sigHUP to relauch settings server" $ do
(pid, ph) <- createTestServer settingRelaunch
testActionBySignal sigHUP pid relaunchWorkerFile $ \hdl -> do
terminateProcess ph
workerPids <- lines <$> hGetContents hdl
length workerPids `shouldBe` workerNum
it "sends sigTERM to worker in relauch settings" $ do
writeFile relaunchWorkerFile ""
(_, ph) <- createTestServer settingRelaunch
h <- openFile relaunchWorkerFile ReadMode
workerPid <- hGetLine h
hClose h
testActionBySignal sigTERM (read workerPid) relaunchWorkerFile $ \hdl -> do
terminateProcess ph
workerPids <- lines <$> hGetContents hdl
length workerPids `shouldBe` 1
createTestServer :: String -> IO (ProcessID, ProcessHandle)
createTestServer settings = do
cDir <- getCurrentDirectory
distDir <- getDistDir cDir
let exePath = cDir </> distDir </> "build" </> "test-prefork-server" </> "test-prefork-server"
(_, Just hOut, _, ph) <- createProcess $ (proc exePath [serverOption, settings]) { std_out = CreatePipe }
_ <- forkIO $ hPutStr stdout =<< hGetContents hOut
pid <- withProcessHandle ph $ \x -> case x of
OpenHandle pid' -> return pid'
_ -> throwIO $ userError "Unable to retrieve child process ID."
threadDelay 1000000
return (pid, ph)
checkOutputOnSignal :: Signal -> String -> String -> IO ()
checkOutputOnSignal sig file expected = do
(pid, ph) <- createTestServer settingDefault
testActionBySignal sig pid file $ \hdl -> do
flip shouldBe expected =<< hGetContents hdl
terminateProcess ph
testActionBySignal :: Signal -> ProcessID -> String -> (Handle -> IO ()) -> IO ()
testActionBySignal sig pid file testAction = do
writeFile file ""
signalProcess sig pid
withFile file ReadMode $ \hdl -> do
threadDelay 1000000
testAction hdl
|
gree/haskell-prefork
|
test/test-prefork.hs
|
mit
| 4,224 | 0 | 22 | 1,017 | 1,188 | 571 | 617 | 105 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
module Network.Socket.ByteString.Lazy.Posix (
-- * Send data to a socket
send
, sendAll
) where
import qualified Data.ByteString.Lazy as L
import Data.ByteString.Unsafe (unsafeUseAsCStringLen)
import Foreign.Marshal.Array (allocaArray)
import Network.Socket.ByteString.Internal (c_writev)
import Network.Socket.ByteString.IO (waitWhen0)
import Network.Socket.ByteString.IOVec (IOVec (IOVec))
import Network.Socket.Imports
import Network.Socket.Internal
import Network.Socket.Types
-- -----------------------------------------------------------------------------
-- Sending
send
:: Socket -- ^ Connected socket
-> L.ByteString -- ^ Data to send
-> IO Int64 -- ^ Number of bytes sent
send s lbs = do
let cs = take maxNumChunks (L.toChunks lbs)
len = length cs
siz <- withFdSocket s $ \fd -> allocaArray len $ \ptr ->
withPokes cs ptr $ \niovs ->
throwSocketErrorWaitWrite s "writev" $ c_writev fd ptr niovs
return $ fromIntegral siz
where
withPokes ss p f = loop ss p 0 0
where
loop (c:cs) q k !niovs
| k < maxNumBytes = unsafeUseAsCStringLen c $ \(ptr, len) -> do
poke q $ IOVec ptr (fromIntegral len)
loop cs
(q `plusPtr` sizeOf (undefined :: IOVec))
(k + fromIntegral len)
(niovs + 1)
| otherwise = f niovs
loop _ _ _ niovs = f niovs
maxNumBytes = 4194304 :: Int -- maximum number of bytes to transmit in one system call
maxNumChunks = 1024 :: Int -- maximum number of chunks to transmit in one system call
sendAll
:: Socket -- ^ Connected socket
-> L.ByteString -- ^ Data to send
-> IO ()
sendAll _ "" = return ()
sendAll s bs = do
sent <- send s bs
waitWhen0 (fromIntegral sent) s
when (sent >= 0) $ sendAll s $ L.drop sent bs
|
CloudI/CloudI
|
src/api/haskell/external/network-3.1.0.1/Network/Socket/ByteString/Lazy/Posix.hs
|
mit
| 2,081 | 0 | 18 | 653 | 537 | 286 | 251 | 46 | 2 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-@ LIQUID "-i../../bench" @-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.StackSet
-- Copyright : (c) Don Stewart 2007
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable, Haskell 98
--
module XMonad.StackSet (
-- * Introduction
-- $intro
-- ** The Zipper
-- $zipper
-- ** Xinerama support
-- $xinerama
-- ** Master and Focus
-- $focus
StackSet(..), Workspace(..), Screen(..), Stack(..), RationalRect(..),
-- * Construction
-- $construction
new, view, greedyView,
-- * Xinerama operations
-- $xinerama
lookupWorkspace,
screens, workspaces, allWindows, currentTag,
-- * Operations on the current stack
-- $stackOperations
peek, index, integrate, integrate', differentiate,
focusUp, focusDown, focusUp', focusDown', focusMaster, focusWindow,
tagMember, renameTag, ensureTags, member, findTag, mapWorkspace, mapLayout,
-- * Modifying the stackset
-- $modifyStackset
insertUp, delete, delete', filter,
-- * Setting the master window
-- $settingMW
swapUp, swapDown, swapMaster, shiftMaster, modify, modify', float, sink, -- needed by users
-- * Composite operations
-- $composite
shift, shiftWin,
-- for testing
abort
--LIQUID
, Char, Int, Bool, Maybe
) where
import Prelude hiding (filter)
import Data.Maybe (listToMaybe,isJust,fromMaybe)
import qualified Data.List as L (deleteBy,find,splitAt,filter,nub)
import Data.List ( (\\) )
import qualified Map as M (Map,insert,delete,empty)
import qualified Data.Set
import GHC.Generics
import Data.Proxy
import Data.Ratio
import Language.Fixpoint.Types hiding (reft)
import Test.Target
import Test.Target.Eval
import Test.Target.Expr
import Test.Target.Targetable
import Test.Target.Util
-- $intro
--
-- The 'StackSet' data type encodes a window manager abstraction. The
-- window manager is a set of virtual workspaces. On each workspace is a
-- stack of windows. A given workspace is always current, and a given
-- window on each workspace has focus. The focused window on the current
-- workspace is the one which will take user input. It can be visualised
-- as follows:
--
-- > Workspace { 0*} { 1 } { 2 } { 3 } { 4 }
-- >
-- > Windows [1 [] [3* [6*] []
-- > ,2*] ,4
-- > ,5]
--
-- Note that workspaces are indexed from 0, windows are numbered
-- uniquely. A '*' indicates the window on each workspace that has
-- focus, and which workspace is current.
-- $zipper
--
-- We encode all the focus tracking directly in the data structure, with a 'zipper':
--
-- A Zipper is essentially an `updateable' and yet pure functional
-- cursor into a data structure. Zipper is also a delimited
-- continuation reified as a data structure.
--
-- The Zipper lets us replace an item deep in a complex data
-- structure, e.g., a tree or a term, without an mutation. The
-- resulting data structure will share as much of its components with
-- the old structure as possible.
--
-- Oleg Kiselyov, 27 Apr 2005, haskell\@, "Zipper as a delimited continuation"
--
-- We use the zipper to keep track of the focused workspace and the
-- focused window on each workspace, allowing us to have correct focus
-- by construction. We closely follow Huet's original implementation:
--
-- G. Huet, /Functional Pearl: The Zipper/,
-- 1997, J. Functional Programming 75(5):549-554.
-- and:
-- R. Hinze and J. Jeuring, /Functional Pearl: The Web/.
--
-- and Conor McBride's zipper differentiation paper.
-- Another good reference is:
--
-- The Zipper, Haskell wikibook
-- $xinerama
-- Xinerama in X11 lets us view multiple virtual workspaces
-- simultaneously. While only one will ever be in focus (i.e. will
-- receive keyboard events), other workspaces may be passively
-- viewable. We thus need to track which virtual workspaces are
-- associated (viewed) on which physical screens. To keep track of
-- this, 'StackSet' keeps separate lists of visible but non-focused
-- workspaces, and non-visible workspaces.
-- $focus
--
-- Each stack tracks a focused item, and for tiling purposes also tracks
-- a 'master' position. The connection between 'master' and 'focus'
-- needs to be well defined, particularly in relation to 'insert' and
-- 'delete'.
--
------------------------------------------------------------------------
-- |
-- A cursor into a non-empty list of workspaces.
--
-- We puncture the workspace list, producing a hole in the structure
-- used to track the currently focused workspace. The two other lists
-- that are produced are used to track those workspaces visible as
-- Xinerama screens, and those workspaces not visible anywhere.
data StackSet i l a sid sd =
StackSet { current :: !(Screen i l a sid sd) -- ^ currently focused workspace
, visible :: [Screen i l a sid sd] -- ^ non-focused workspaces, visible in xinerama
, hidden :: [Workspace i l a] -- ^ workspaces not visible anywhere
, floating :: M.Map a RationalRect -- ^ floating windows
} deriving (Show, Read, Eq, Generic)
{-@ data StackSet i l a sid sd <p :: Workspace i l a -> Prop> =
StackSet { lcurrent :: (Screen <p> i l a sid sd)
, lvisible :: [(Screen <p> i l a sid sd)]
, lhidden :: [<(Workspace i l a)<p>>]
, lfloating :: (OMap a RationalRect)
}
@-}
{-@ type UStackSet i l a sid sd = {v: (StackSet i l a sid sd) | (NoDuplicates v)} @-}
-- | Visible workspaces, and their Xinerama screens.
data Screen i l a sid sd = Screen { workspace :: !(Workspace i l a)
, screen :: !sid
, screenDetail :: !sd }
deriving (Show, Read, Eq, Generic)
{-@ data Screen i l a sid sd <p :: Workspace i l a -> Prop>
= Screen { lworkspace :: <(Workspace i l a) <p>>
, lscreen :: sid
, lscreenDetail :: sd
}
@-}
-- |
-- A workspace is just a tag, a layout, and a stack.
--
data Workspace i l a = Workspace { tag :: !i, layout :: l, stack :: Maybe (Stack a) }
deriving (Show, Read, Eq, Generic)
{-@
data Workspace i l a = Workspace { ltag :: i
, llayout :: l
, lstack :: (Maybe (UStack a)) }
@-}
-- | A structure for window geometries
data RationalRect = RationalRect Rational Rational Rational Rational
deriving (Show, Read, Eq, Generic)
-- |
-- A stack is a cursor onto a window list.
-- The data structure tracks focus by construction, and
-- the master window is by convention the top-most item.
-- Focus operations will not reorder the list that results from
-- flattening the cursor. The structure can be envisaged as:
--
-- > +-- master: < '7' >
-- > up | [ '2' ]
-- > +--------- [ '3' ]
-- > focus: < '4' >
-- > dn +----------- [ '8' ]
--
-- A 'Stack' can be viewed as a list with a hole punched in it to make
-- the focused position. Under the zipper\/calculus view of such
-- structures, it is the differentiation of a [a], and integrating it
-- back has a natural implementation used in 'index'.
--
data Stack a = Stack { focus :: !a -- focused thing in this set
, up :: [a] -- clowns to the left
, down :: [a] } -- jokers to the right
deriving (Show, Read, Eq, Generic)
{-@
data Stack a = Stack { focus :: a
, up :: UListDif a focus
, down :: UListDif a focus }
@-}
{-@ type UStack a = {v:(Stack a) | (ListDisjoint (up v) (down v))}@-}
-- | this function indicates to catch that an error is expected
abort :: String -> a
abort x = error $ "xmonad: StackSet: " ++ x
-- ---------------------------------------------------------------------
-- $construction
-- | /O(n)/. Create a new stackset, of empty stacks, with given tags,
-- with physical screens whose descriptions are given by 'm'. The
-- number of physical screens (@length 'm'@) should be less than or
-- equal to the number of workspace tags. The first workspace in the
-- list will be current.
--
-- Xinerama: Virtual workspaces are assigned to physical screens, starting at 0.
--
{-@ measure head :: [a] -> a
head (x:xs) = x
@-}
{-@ new :: (Integral s)
=> l
-> wids:{[i] | len wids > 0 }
-> m:{[sd] | len m > 0 && len m <= len wids}
-> {v:EmptyStackSet i l a s sd |((ltag (lworkspace (lcurrent v))) = (head wids))}
@-}
new :: (Integral s) => l -> [i] -> [sd] -> StackSet i l a s sd
new l wids m | not (null wids) && length m <= length wids && not (null m)
= StackSet cur visi unseen M.empty
where (seen,unseen) = L.splitAt (length m) $ map (\i -> Workspace i l Nothing) wids
(cur:visi) = [ Screen i s sd | (i, s, sd) <- zip3 seen [0..] m ]
-- now zip up visibles with their screen id
new _ _ _ = abort "non-positive argument to StackSet.new"
-- |
-- /O(w)/. Set focus to the workspace with index \'i\'.
-- If the index is out of range, return the original 'StackSet'.
--
-- Xinerama: If the workspace is not visible on any Xinerama screen, it
-- becomes the current screen. If it is in the visible list, it becomes
-- current.
{-@ predicate EqTag X S = (X = (ltag (lworkspace (lcurrent S)))) @-}
{-@ predicate TagMember X S =
(
(EqTag X S)
||
(Set_mem X (workspacesTags (lhidden S)))
||
(Set_mem X (screensTags (lvisible S)))
)
@-}
-- TODO prove uniqueness of tags!
{-@ invariant {v:StackSet i l a sid sd | (
Disjoint3 (Set_sng (ltag (lworkspace (lcurrent v)))) (workspacesTags (lhidden v)) (screensTags (lvisible v))
)} @-}
{-@ predicate EqEltsStackSet X Y = ((stackSetElts X) = (stackSetElts Y)) @-}
{-@ view :: (Eq s, Eq i)
=> x:i
-> s:UStackSet i l a s sd
-> {v:UStackSet i l a s sd | ( ((TagMember x s) => (EqTag x v)) && (EqEltsStackSet s v) && (((EqTag x s) || (not (TagMember x s))) => (s = v)) ) }
@-}
view :: (Eq s, Eq i) => i -> StackSet i l a s sd -> StackSet i l a s sd
view i s
| i == currentTag s = s -- current
| Just x <- L.find ((i==).tag.workspace) (visible s)
-- if it is visible, it is just raised
= s { current = x, visible = current s : L.deleteBy (equating screen) x (visible s) }
| Just x <- L.find ((i==).tag) (hidden s) -- must be hidden then
-- if it was hidden, it is raised on the xine screen currently used
= s { current = (current s) { workspace = x }
, hidden = workspace (current s) : L.deleteBy (equating tag) x (hidden s) }
| otherwise = s -- not a member of the stackset
where equating f = \x y -> f x == f y
-- 'Catch'ing this might be hard. Relies on monotonically increasing
-- workspace tags defined in 'new'
--
-- and now tags are not monotonic, what happens here?
-- |
-- Set focus to the given workspace. If that workspace does not exist
-- in the stackset, the original workspace is returned. If that workspace is
-- 'hidden', then display that workspace on the current screen, and move the
-- current workspace to 'hidden'. If that workspace is 'visible' on another
-- screen, the workspaces of the current screen and the other screen are
-- swapped.
{-@ greedyView :: (Eq s, Eq i)
=> x:i
-> s:UStackSet i l a s sd
-> {v:UStackSet i l a s sd | ( ((TagMember x s) => (EqTag x v)) && (EqEltsStackSet s v) && (((EqTag x s) || (not (TagMember x s))) => (v = s))) }
@-}
greedyView :: (Eq s, Eq i) => i -> StackSet i l a s sd -> StackSet i l a s sd
greedyView w ws
| any wTag (hidden ws) = view w ws
| (Just s) <- L.find (wTag . workspace) (visible ws)
= ws { current = (current ws) { workspace = workspace s }
, visible = s { workspace = workspace (current ws) }
: L.filter (not . wTag . workspace) (visible ws) }
| otherwise = ws
where wTag = (w == ) . tag
-- ---------------------------------------------------------------------
-- $xinerama
-- | Find the tag of the workspace visible on Xinerama screen 'sc'.
-- 'Nothing' if screen is out of bounds.
{-@ lookupWorkspace :: Eq s => s -> UStackSet i l a s sd -> Maybe i @-}
lookupWorkspace :: Eq s => s -> StackSet i l a s sd -> Maybe i
lookupWorkspace sc w = listToMaybe [ tag i | Screen i s _ <- current w : visible w, s == sc ]
-- ---------------------------------------------------------------------
-- $stackOperations
-- |
-- The 'with' function takes a default value, a function, and a
-- StackSet. If the current stack is Nothing, 'with' returns the
-- default value. Otherwise, it applies the function to the stack,
-- returning the result. It is like 'maybe' for the focused workspace.
--
with :: b -> (Stack a -> b) -> StackSet i l a s sd -> b
with dflt f = maybe dflt f . stack . workspace . current
-- |
-- Apply a function, and a default value for 'Nothing', to modify the current stack.
--
{-@ predicate GoodCurrent X ST =
(Set_sub (mStackElts X) (mStackElts (lstack (lworkspace (lcurrent ST))))) @-}
{-@ measure mStackElts :: (Maybe (Stack a)) -> (Data.Set.Set a)
mStackElts (Just x) = (stackElts x)
mStackElts (Nothing) = {v | (Set_emp v)}
@-}
{-@ invariant {v: (Maybe (Stack a)) | ((((stackElts (fromJust v)) = (mStackElts v)) <=> (isJust v) ) && ((Set_emp (mStackElts v)) <=> (isNothing v)))} @-}
{-@ measure isNothing :: (Maybe a) -> Prop
isNothing (Nothing) = true
isNothing (Just x) = false
@-}
{-@ modify :: {v:Maybe (UStack a) | (isNothing v)}
-> (y:(UStack a) -> ({v: Maybe (UStack a) | (Set_sub (mStackElts v) (stackElts y))}) )
-> UStackSet i l a s sd
-> UStackSet i l a s sd @-}
modify :: Maybe (Stack a) -> (Stack a -> Maybe (Stack a)) -> StackSet i l a s sd -> StackSet i l a s sd
modify d f s = s { current = (current s)
{ workspace = (workspace (current s)) { stack = with d f s }}}
-- |
-- Apply a function to modify the current stack if it isn't empty, and we don't
-- want to empty it.
--
{-@ modify' :: (x:UStack a -> {v : UStack a | (Set_sub (stackElts v) (stackElts x)) } ) -> UStackSet i l a s sd -> UStackSet i l a s sd @-}
modify' :: (Stack a -> Stack a) -> StackSet i l a s sd -> StackSet i l a s sd
modify' f = modify Nothing (Just . f)
-- |
-- /O(1)/. Extract the focused element of the current stack.
-- Return 'Just' that element, or 'Nothing' for an empty stack.
--
{-@ peek :: UStackSet i l a s sd -> Maybe a @-}
peek :: StackSet i l a s sd -> Maybe a
peek = with Nothing (return . focus)
-- |
-- /O(n)/. Flatten a 'Stack' into a list.
--
{-@ integrate :: UStack a -> UList a @-}
integrate :: Stack a -> [a]
integrate (Stack x l r) = reverse l ++ x : r
-- |
-- /O(n)/ Flatten a possibly empty stack into a list.
{-@ integrate' :: Maybe (UStack a) -> UList a @-}
integrate' :: Maybe (Stack a) -> [a]
integrate' = maybe [] integrate
-- |
-- /O(n)/. Turn a list into a possibly empty stack (i.e., a zipper):
-- the first element of the list is current, and the rest of the list
-- is down.
{-@ differentiate :: UList a -> Maybe (UStack a) @-}
differentiate :: [a] -> Maybe (Stack a)
differentiate [] = Nothing
differentiate (x:xs) = Just $ Stack x [] xs
-- |
-- /O(n)/. 'filter p s' returns the elements of 's' such that 'p' evaluates to
-- 'True'. Order is preserved, and focus moves as described for 'delete'.
--
{-@ filter :: (a -> Bool) -> x:UStack a -> {v:Maybe (UStack a) |(Set_sub (mStackElts v) (stackElts x)) }@-}
filter :: (a -> Bool) -> Stack a -> Maybe (Stack a)
filter p (Stack f ls rs) = case L.filter p (f:rs) of
f':rs' -> Just $ Stack f' (L.filter p ls) rs' -- maybe move focus down
[] -> case L.filter p ls of -- filter back up
f':ls' -> Just $ Stack f' ls' [] -- else up
[] -> Nothing
-- |
-- /O(s)/. Extract the stack on the current workspace, as a list.
-- The order of the stack is determined by the master window -- it will be
-- the head of the list. The implementation is given by the natural
-- integration of a one-hole list cursor, back to a list.
--
{-@ index :: UStackSet i l a s sd -> UList a @-}
index :: StackSet i l a s sd -> [a]
index = with [] integrate
-- |
-- /O(1), O(w) on the wrapping case/.
--
-- focusUp, focusDown. Move the window focus up or down the stack,
-- wrapping if we reach the end. The wrapping should model a 'cycle'
-- on the current stack. The 'master' window, and window order,
-- are unaffected by movement of focus.
--
-- swapUp, swapDown, swap the neighbour in the stack ordering, wrapping
-- if we reach the end. Again the wrapping model should 'cycle' on
-- the current stack.
--
{-@ focusUp, focusDown, swapUp, swapDown :: UStackSet i l a s sd -> UStackSet i l a s sd @-}
focusUp, focusDown, swapUp, swapDown :: StackSet i l a s sd -> StackSet i l a s sd
focusUp = modify' focusUp'
focusDown = modify' focusDown'
swapUp = modify' swapUp'
swapDown = modify' (reverseStack . swapUp' . reverseStack)
-- | Variants of 'focusUp' and 'focusDown' that work on a
-- 'Stack' rather than an entire 'StackSet'.
{-@ focusUp', focusDown', swapUp', reverseStack :: x:UStack a -> {v:UStack a|((stackElts v) = (stackElts x))} @-}
focusUp', focusDown' :: Stack a -> Stack a
focusUp' (Stack t (l:ls) rs) = Stack l ls (t:rs)
focusUp' (Stack t [] rs) = Stack x xs [] where (x:xs) = reverse (t:rs)
focusDown' = reverseStack . focusUp' . reverseStack
swapUp' :: Stack a -> Stack a
swapUp' (Stack t (l:ls) rs) = Stack t ls (l:rs)
swapUp' (Stack t [] rs) = Stack t (reverse rs) []
-- | reverse a stack: up becomes down and down becomes up.
reverseStack :: Stack a -> Stack a
reverseStack (Stack t ls rs) = Stack t rs ls
--
-- | /O(1) on current window, O(n) in general/. Focus the window 'w',
-- and set its workspace as current.
--
{-@ focusWindow :: (Eq s, Eq a, Eq i) => a -> UStackSet i l a s sd -> UStackSet i l a s sd @-}
focusWindow :: (Eq s, Eq a, Eq i) => a -> StackSet i l a s sd -> StackSet i l a s sd
focusWindow w s | Just w == peek s = s
| otherwise = fromMaybe s $ do
n <- findTag w s
return $ until ((Just w ==) . peek) focusUp (view n s)
-- | Get a list of all screens in the 'StackSet'.
{-@ screens :: UStackSet i l a s sd -> UScreens i l a s sd @-}
screens :: StackSet i l a s sd -> [Screen i l a s sd]
screens s = current s : visible s
-- | Get a list of all workspaces in the 'StackSet'.
{-@ workspaces :: UStackSet i l a s sd -> [Workspace i l a] @-}
workspaces :: StackSet i l a s sd -> [Workspace i l a]
workspaces s = workspace (current s) : map workspace (visible s) ++ hidden s
-- | Get a list of all windows in the 'StackSet' in no particular order
{-@ allWindows :: Eq a => UStackSet i l a s sd -> UList a @-}
allWindows :: Eq a => StackSet i l a s sd -> [a]
allWindows = L.nub . concatMap (integrate' . stack) . workspaces
-- | Get the tag of the currently focused workspace.
{-@ currentTag :: x:UStackSet i l a s sd -> {v:i| (EqTag v x)} @-}
currentTag :: StackSet i l a s sd -> i
currentTag = tag . workspace . current
-- | Is the given tag present in the 'StackSet'?
{-@ tagMember :: Eq i
=> x:i
-> s:UStackSet i l a s sd
-> {v:Bool| ( (prop v) <=> (TagMember x s))}
@-}
tagMember :: Eq i => i -> StackSet i l a s sd -> Bool
tagMember t = elem t . map tag . workspaces
-- | Rename a given tag if present in the 'StackSet'.
{-@ renameTag :: Eq i => i -> i -> UStackSet i l a s sd -> UStackSet i l a s sd @-}
renameTag :: Eq i => i -> i -> StackSet i l a s sd -> StackSet i l a s sd
renameTag o n = mapWorkspace rename
where rename w = if tag w == o then w { tag = n } else w
-- | Ensure that a given set of workspace tags is present by renaming
-- existing workspaces and\/or creating new hidden workspaces as
-- necessary.
{-@ ensureTags :: Eq i => l -> [i] -> UStackSet i l a s sd -> UStackSet i l a s sd @-}
ensureTags :: Eq i => l -> [i] -> StackSet i l a s sd -> StackSet i l a s sd
ensureTags l allt st = et allt (map tag (workspaces st) \\ allt) st
where et [] _ s = s
et (i:is) rn s | i `tagMember` s = et is rn s
et (i:is) [] s = et is [] (s { hidden = Workspace i l Nothing : hidden s })
et (i:is) (r:rs) s = et is rs $ renameTag r i s
-- | Map a function on all the workspaces in the 'StackSet'.
{-@ mapWorkspace :: (x:Workspace i l a -> {v:Workspace i l a| (workspaceElts x) = (workspaceElts v)})
-> UStackSet i l a s sd
-> UStackSet i l a s sd @-}
mapWorkspace :: (Workspace i l a -> Workspace i l a) -> StackSet i l a s sd -> StackSet i l a s sd
mapWorkspace f s = s { current = updScr (current s)
, visible = map updScr (visible s)
, hidden = map f (hidden s) }
where updScr scr = scr { workspace = f (workspace scr) }
-- | Map a function on all the layouts in the 'StackSet'.
{-@ mapLayout :: (l1 -> l2) -> UStackSet i l1 a s sd -> UStackSet i l2 a s sd @-}
mapLayout :: (l -> l') -> StackSet i l a s sd -> StackSet i l' a s sd
mapLayout f (StackSet v vs hs m) = StackSet (fScreen v) (map fScreen vs) (map fWorkspace hs) m
where
fScreen (Screen ws s sd) = Screen (fWorkspace ws) s sd
fWorkspace (Workspace t l s) = Workspace t (f l) s
-- | /O(n)/. Is a window in the 'StackSet'?
{-@ member :: Eq a
=> x:a
-> st:(UStackSet i l a s sd)
-> {v:Bool| ((~(prop v)) => (~(StackSetElt x st)))}
@-}
member :: Eq a => a -> StackSet i l a s sd -> Bool
member a s = isJust (findTag a s)
-- | /O(1) on current window, O(n) in general/.
-- Return 'Just' the workspace tag of the given window, or 'Nothing'
-- if the window is not in the 'StackSet'.
{-@ findTag :: Eq a => a -> UStackSet i l a s sd -> Maybe i @-}
findTag :: Eq a => a -> StackSet i l a s sd -> Maybe i
findTag a s = listToMaybe
[ tag w | w <- workspaces s, has a (stack w) ]
where has _ Nothing = False
has x (Just (Stack t l r)) = x `elem` (t : l ++ r)
-- ---------------------------------------------------------------------
-- $modifyStackset
-- |
-- /O(n)/. (Complexity due to duplicate check). Insert a new element
-- into the stack, above the currently focused element. The new
-- element is given focus; the previously focused element is moved
-- down.
--
-- If the element is already in the stackset, the original stackset is
-- returned unmodified.
--
-- Semantics in Huet's paper is that insert doesn't move the cursor.
-- However, we choose to insert above, and move the focus.
--
{-@ insertUp :: Eq a
=> x:a
-> UStackSet i l a s sd
-> UStackSet i l a s sd @-}
insertUp :: Eq a => a -> StackSet i l a s sd -> StackSet i l a s sd
insertUp a s = if member a s then s else insert
where insert = modify (Just $ Stack a [] []) (\(Stack t l r) -> Just $ Stack a l (t:r)) s
-- insertDown :: a -> StackSet i l a s sd -> StackSet i l a s sd
-- insertDown a = modify (Stack a [] []) $ \(Stack t l r) -> Stack a (t:l) r
-- Old semantics, from Huet.
-- > w { down = a : down w }
-- |
-- /O(1) on current window, O(n) in general/. Delete window 'w' if it exists.
-- There are 4 cases to consider:
--
-- * delete on an 'Nothing' workspace leaves it Nothing
--
-- * otherwise, try to move focus to the down
--
-- * otherwise, try to move focus to the up
--
-- * otherwise, you've got an empty workspace, becomes 'Nothing'
--
-- Behaviour with respect to the master:
--
-- * deleting the master window resets it to the newly focused window
--
-- * otherwise, delete doesn't affect the master.
--
{-@ delete :: (Ord a, Eq s) => a -> UStackSet i l a s sd -> UStackSet i l a s sd @-}
delete :: (Ord a, Eq s) => a -> StackSet i l a s sd -> StackSet i l a s sd
delete w = sink w . delete' w
-- | Only temporarily remove the window from the stack, thereby not destroying special
-- information saved in the 'Stackset'
delete' :: (Eq a, Eq s) => a -> StackSet i l a s sd -> StackSet i l a s sd
delete' w s = s { current = removeFromScreen (current s)
, visible = map removeFromScreen (visible s)
, hidden = map removeFromWorkspace (hidden s) }
where removeFromWorkspace ws = ws { stack = stack ws >>= filter (/=w) }
removeFromScreen scr = scr { workspace = removeFromWorkspace (workspace scr) }
------------------------------------------------------------------------
-- | Given a window, and its preferred rectangle, set it as floating
-- A floating window should already be managed by the 'StackSet'.
{-@ float :: Ord a => a -> RationalRect -> UStackSet i l a s sd -> UStackSet i l a s sd @-}
float :: Ord a => a -> RationalRect -> StackSet i l a s sd -> StackSet i l a s sd
float w r s = s { floating = M.insert w r (floating s) }
-- | Clear the floating status of a window
{-@ sink :: Ord a => a -> UStackSet i l a s sd -> UStackSet i l a s sd @-}
sink :: Ord a => a -> StackSet i l a s sd -> StackSet i l a s sd
sink w s = s { floating = M.delete w (floating s) }
------------------------------------------------------------------------
-- $settingMW
-- | /O(s)/. Set the master window to the focused window.
-- The old master window is swapped in the tiling order with the focused window.
-- Focus stays with the item moved.
{-@ swapMaster :: UStackSet i l a s sd -> UStackSet i l a s sd @-}
swapMaster :: StackSet i l a s sd -> StackSet i l a s sd
swapMaster = modify' $ \c -> case c of
Stack _ [] _ -> c -- already master.
Stack t ls rs -> Stack t [] (xs ++ x : rs) where (x:xs) = reverse ls
-- natural! keep focus, move current to the top, move top to current.
-- | /O(s)/. Set the master window to the focused window.
-- The other windows are kept in order and shifted down on the stack, as if you
-- just hit mod-shift-k a bunch of times.
-- Focus stays with the item moved.
{-@ shiftMaster :: UStackSet i l a s sd -> UStackSet i l a s sd @-}
shiftMaster :: StackSet i l a s sd -> StackSet i l a s sd
shiftMaster = modify' $ \c -> case c of
Stack _ [] _ -> c -- already master.
Stack t ls rs -> Stack t [] (reverse ls ++ rs)
-- | /O(s)/. Set focus to the master window.
{-@ focusMaster :: UStackSet i l a s sd -> UStackSet i l a s sd @-}
focusMaster :: StackSet i l a s sd -> StackSet i l a s sd
focusMaster = modify' $ \c -> case c of
Stack _ [] _ -> c
Stack t ls rs -> Stack x [] (xs ++ t : rs) where (x:xs) = reverse ls
--
-- ---------------------------------------------------------------------
-- $composite
-- | /O(w)/. shift. Move the focused element of the current stack to stack
-- 'n', leaving it as the focused element on that stack. The item is
-- inserted above the currently focused element on that workspace.
-- The actual focused workspace doesn't change. If there is no
-- element on the current stack, the original stackSet is returned.
--
{-@ shift :: (Ord a, Eq s, Eq i) => i -> UStackSet i l a s sd -> UStackSet i l a s sd @-}
shift :: (Ord a, Eq s, Eq i) => i -> StackSet i l a s sd -> StackSet i l a s sd
shift n s = maybe s (\w -> shiftWin n w s) (peek s)
-- | /O(n)/. shiftWin. Searches for the specified window 'w' on all workspaces
-- of the stackSet and moves it to stack 'n', leaving it as the focused
-- element on that stack. The item is inserted above the currently
-- focused element on that workspace.
-- The actual focused workspace doesn't change. If the window is not
-- found in the stackSet, the original stackSet is returned.
{-@ shiftWin :: (Ord a, Eq a, Eq s, Eq i) => i -> a -> UStackSet i l a s sd -> UStackSet i l a s sd @-}
shiftWin :: (Ord a, Eq a, Eq s, Eq i) => i -> a -> StackSet i l a s sd -> StackSet i l a s sd
shiftWin n w s = case findTag w s of
Just from | n `tagMember` s && n /= from -> go from s
_ -> s
where go from = onWorkspace n (insertUp w) . onWorkspace from (delete' w)
onWorkspace :: (Eq i, Eq s) => i -> (StackSet i l a s sd -> StackSet i l a s sd)
-> (StackSet i l a s sd -> StackSet i l a s sd)
onWorkspace n f s = view (currentTag s) . f . view n $ s
{-@ predicate NoDuplicates SS =
(
(Disjoint4 (workspacesElts (lhidden SS))
(screenElts (lcurrent SS))
(screensElts (lvisible SS))
(mapKeys (lfloating SS))
)
&&
(ScreensNoDups (lvisible SS))
&&
(WorkspacesNoDups (lhidden SS))
&&
(NoDuplicateTags SS)
&&
(NoDuplicateScreens SS)
)
@-}
{-@ predicate NoDuplicateTags SS = (Disjoint3 (Set_sng (ltag (lworkspace (lcurrent SS)))) (workspacesTags (lhidden SS)) (screensTags (lvisible SS))) @-}
{-@ predicate NoDuplicateScreens SS = (Set_emp (Set_cap (Set_sng (lscreen (lcurrent SS))) (screensScreens (lvisible SS)))) @-}
{-@ type UScreens i l a sid sd = {v:[Screen i l a sid sd ] | (ScreensNoDups v) } @-}
{-@ type UWorkspaces i l a = {v:[Workspace i l a] | (WorkspacesNoDups v) } @-}
{-@ predicate StackSetElt N S =
(
(ScreenElt N (lcurrent S))
||
(Set_mem N (screensElts (lvisible S)))
||
(Set_mem N (workspacesElts (lhidden S)))
)
@-}
{-@ predicate Disjoint3 X Y Z =
(
(Set_emp (Set_cap X Y))
&&
(Set_emp (Set_cap Y Z))
&&
(Set_emp (Set_cap Z X))
)
@-}
{-@ predicate Disjoint4 W X Y Z =
(
(Set_emp (Set_cap W X))
&&
(Set_emp (Set_cap W Y))
&&
(Set_emp (Set_cap W Z))
&&
(Set_emp (Set_cap X Y))
&&
(Set_emp (Set_cap X Z))
&&
(Set_emp (Set_cap Y Z))
)
@-}
{-@ measure screenScreens :: (Screen i l a sid sd) -> (Data.Set.Set sid)
screenScreens(Screen w x y) = (Set_sng x)
@-}
{-@ measure screensScreens :: ([Screen i l a sid sd]) -> (Data.Set.Set sid)
screensScreens([]) = {v| (Set_emp v)}
screensScreens(x:xs) = (Set_cup (screenScreens x) (screensScreens xs))
@-}
{-@ measure screenTags :: (Screen i l a sid sd) -> (Data.Set.Set i)
screenTags(Screen w x y) = (Set_sng (ltag w))
@-}
{-@ measure screensTags :: ([Screen i l a sid sd]) -> (Data.Set.Set i)
screensTags([]) = {v| (Set_emp v)}
screensTags(x:xs) = (Set_cup (screenTags x) (screensTags xs))
@-}
{-@ measure workspacesTags :: ([Workspace i l a]) -> (Data.Set.Set i)
workspacesTags([]) = {v| (Set_emp v)}
workspacesTags(x:xs) = (Set_cup (Set_sng(ltag x)) (workspacesTags xs))
@-}
{-@ measure screenElts :: (Screen i l a sid sd) -> (Data.Set.Set a)
screenElts(Screen w s sc) = (workspaceElts w)
@-}
{-@ measure stackElts :: (Stack a) -> (Data.Set.Set a)
stackElts(Stack f u d) = (Set_cup (Set_sng f) (Set_cup (listElts u) (listElts d)))
@-}
{-@ measure screensElts :: [(Screen i l a sid sd)] -> (Data.Set.Set a)
screensElts([]) = {v| (Set_emp v)}
screensElts(x:xs) = (Set_cup (screenElts x) (screensElts xs))
@-}
{-@ measure workspacesElts :: [(Workspace i l a)] -> (Data.Set.Set a)
workspacesElts([]) = {v| (Set_emp v)}
workspacesElts(x:xs) = (Set_cup (workspaceElts x) (workspacesElts xs))
@-}
{-@ measure workspacesDups :: [(Workspace i l a)] -> (Data.Set.Set a)
workspacesDups([]) = {v | (Set_emp v)}
workspacesDups(x:xs) = (Set_cup (Set_cap (workspaceElts x) (workspacesElts xs)) (workspacesDups xs))
@-}
{-@ measure screensDups :: [(Screen i l a sid sd)] -> (Data.Set.Set a)
screensDups([]) = {v | (Set_emp v)}
screensDups(x:xs) = (Set_cup (Set_cap (screenElts x) (screensElts xs)) (screensDups xs))
@-}
{-@ predicate ScreensNoDups XS = (Set_emp (screensDups XS)) @-}
{-@ predicate WorkspacesNoDups XS = (Set_emp (workspacesDups XS)) @-}
{-@ measure workspaceElts :: (Workspace i l a) -> (Data.Set.Set a)
workspaceElts (Workspace i l s) = {v | (if (isJust s) then (v = (stackElts (fromJust s))) else (Set_emp v))}
@-}
{-@ predicate StackSetCurrentElt N S =
(ScreenElt N (lcurrent S))
@-}
{-@ predicate ScreenElt N S =
(WorkspaceElt N (lworkspace S))
@-}
{-@ predicate WorkspaceElt N W =
((isJust (lstack W)) && (StackElt N (fromJust (lstack W)))) @-}
{-@ predicate StackElt N S =
(((ListElt N (up S))
|| (Set_mem N (Set_sng (focus S)))
|| (ListElt N (down S))))
@-}
{-@
measure listDup :: [a] -> (Data.Set.Set a)
listDup([]) = {v | (Set_emp v)}
listDup(x:xs) = {v | v = if (Set_mem x (listElts xs)) then (Set_cup (Set_sng x) (listDup xs)) else (listDup xs)}
@-}
{-@ predicate SubElts X Y =
(Set_sub (listElts X) (listElts Y)) @-}
{-@ predicate UnionElts X Y Z =
((listElts X) = (Set_cup (listElts Y) (listElts Z))) @-}
{-@ predicate EqElts X Y =
((listElts X) = (listElts Y)) @-}
{-@ predicate ListUnique LS =
(Set_emp (listDup LS)) @-}
{-@ predicate ListElt N LS =
(Set_mem N (listElts LS)) @-}
{-@ predicate ListDisjoint X Y =
(Set_emp (Set_cap (listElts X) (listElts Y))) @-}
{-@ type UList a = {v:[a] | (ListUnique v)} @-}
{-@ type UListDif a N = {v:[a] | ((not (ListElt N v)) && (ListUnique v))} @-}
{-@ qualif NotMem1(v: List a, x:a) : (not (Set_mem x (listElts v))) @-}
{-@ qualif NotMem2(v:a, x: List a) : (not (Set_mem v (listElts x))) @-}
{-@ qualif NoDup(v: List a) : (Set_emp(listDup v)) @-}
{-@ qualif Disjoint(v: List a, x:List a) : (Set_emp(Set_cap (listElts v) (listElts x))) @-}
-------------------------------------------------------------------------------
--------------- QUICKCHECK PROPERTIES : --------------------------------------
-------------------------------------------------------------------------------
-- TODO move them in a seperate file, after name resolution is fixed....
{-@ type EmptyStackSet i l a sid sd = UStackSet <{\w -> (isNothing (lstack w))}> i l a sid sd @-}
{-@ type Valid = {v:Bool | (prop v) } @-}
{-@ type TOPROVE = Bool @-}
{-@ measure prop :: Bool -> Prop
prop (True) = true
prop (False) = false
@-}
{-@ measure stackSetElts :: (StackSet i l a sid sd) -> (Data.Set.Set a)
stackSetElts (StackSet c v h l) = (Set_cup (screenElts c) (Set_cup (screensElts v) (workspacesElts h)))
@-}
instance (Ord a, Targetable i, Targetable l, Targetable a, Targetable s, Targetable sd)
=> Targetable (StackSet i l a s sd)
instance (Targetable i, Targetable l, Targetable a, Targetable s, Targetable sd)
=> Targetable (Screen i l a s sd)
instance (Targetable i, Targetable l, Targetable a) => Targetable (Workspace i l a)
instance Targetable a => Targetable (Stack a)
instance Targetable RationalRect
instance (Num a, Integral a, Targetable a) => Targetable (Ratio a) where
getType _ = FObj "GHC.Real.Ratio"
query _ d t = query (Proxy :: Proxy Int) d t
decode v t= decode v t >>= \ (x::Int) -> return (fromIntegral x)
toExpr x = EApp (dummyLoc "GHC.Real.:%") [toExpr (numerator x), toExpr (denominator x)]
check z t = do
let x = numerator z
let y = denominator z
let cn = symbol ("GHC.Real.:%" :: String)
[(_,tx),(_,ty)] <- unfold cn t
(bx, vx) <- check x tx
(by, vy) <- check y ty
let v = app cn [vx, vy]
b <- eval (reft t) v
return (b && bx && by, v)
|
gridaphobe/target
|
examples/XMonad/StackSet.hs
|
mit
| 35,974 | 0 | 14 | 9,246 | 5,822 | 3,182 | 2,640 | 228 | 4 |
{-# LANGUAGE BangPatterns, OverloadedStrings, TemplateHaskell #-}
import Control.Exception (finally)
import Control.Lens (makeLenses, to, (&), (.~), (^.),
(^?))
import Control.Monad (unless)
import Data.Aeson.Lens (key, nth, _String)
import qualified Data.ByteString.Char8 as C8
import qualified Data.ByteString.Lazy.Char8 as C8L
import qualified Data.Text as T
import Network.HTTP.Client (ManagerSettings (..))
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Network.Wreq (responseBody)
import qualified Network.Wreq as Wreq
import Options.Applicative ((<$>), (<**>), (<*>), (<>))
import qualified Options.Applicative as OA
import qualified System.IO as IO
kipptAPIEndPoint :: String
kipptAPIEndPoint = "https://kippt.com/"
data Config = Config
{ _user :: String
, _token :: String
, _offset :: Integer
, _timeout :: Integer
} deriving Show
makeLenses ''Config
main :: IO ()
main =
OA.execParser progopts >>= grabBookmarks
progopts :: OA.ParserInfo Config
progopts = OA.info (config <**> OA.helper) (OA.fullDesc <> OA.progDesc "Grab kippt bookmarks")
config :: OA.Parser Config
config = Config <$> OA.strOption ( OA.long "user"
<> OA.metavar "USER"
<> OA.help "Kippt user name"
<> OA.value "[email protected]"
)
<*> OA.strOption ( OA.long "token"
<> OA.metavar "TOKEN"
<> OA.help "Kippt access token"
<> OA.value "123456789"
)
<*> OA.option ( OA.long "offset"
<> OA.metavar "OFFSET"
<> OA.help "Star offset"
<> OA.value 0
)
<*> OA.option ( OA.long "timeout"
<> OA.metavar "TIMEOUT"
<> OA.help "Request timeout (in sec)"
<> OA.value (60 :: Integer))
grabBookmarks :: Config -> IO ()
grabBookmarks cfg = do
IO.putStrLn "["
let !opt = Wreq.defaults & Wreq.manager .~ Left (managerSettings $ cfg ^. timeout)
& Wreq.header "X-Kippt-Username" .~ [cfg ^. user . to C8.pack]
& Wreq.header "X-Kippt-API-Token" .~ [cfg ^. token . to C8.pack]
let !firstUrl = kipptAPIEndPoint ++ "/api/clips?offset=" ++ (cfg ^. offset . to show)
finally (go "" opt firstUrl) (IO.putStrLn "]")
where
managerSettings :: Integer -> ManagerSettings
managerSettings tmout = tlsManagerSettings { managerResponseTimeout = Just (fromInteger $ tmout * 1000 * 1000) }
go :: String -> Wreq.Options -> String -> IO ()
go !sep !httpOpts !url = do
r <- Wreq.getWith httpOpts url
unless (r ^? responseBody . key "objects" . nth 0 == Nothing) $ do
IO.putStrLn sep
C8L.putStrLn $ r ^. responseBody
let !nextUrlFragment = T.unpack (r ^. responseBody . key "meta" . key "next" . _String)
go ",\n" httpOpts $ kipptAPIEndPoint ++ nextUrlFragment
|
objectx/kippt-grabber
|
src/Main.hs
|
mit
| 3,521 | 0 | 20 | 1,365 | 902 | 474 | 428 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Reactive where
import Types
import World
import Mob
import Reactive.Banana
import Graphics.Gloss
import Control.Applicative
import Control.Lens
import Graphics.Gloss.Interface.Pure.Game hiding (Event)
import qualified Graphics.Gloss.Interface.Pure.Game as G
makePrisms ''G.Event
makePrisms ''Key
addMobB :: Mob a => Behavior World -> Event (Behavior a) -> Behavior World
addMobB bWorld = switchB bWorld . fmap (liftA2 addMob bWorld)
keyboardTankEvent :: Event G.Event -> Event TankUpdate
keyboardTankEvent eEv = filterJust $ helper <$> eEv
where helper x = case preview (_EventKey . _1 . _Char) x of
Just 'w' -> Just $ AccelTank 1
Just 's' -> Just $ StopTank $ negate 1
Just 'a' -> Just $ RotateTank 1
Just 'd' -> Just $ RotateTank $ negate 1
_ -> Nothing
|
edwardwas/tanks15
|
src/Reactive.hs
|
mit
| 895 | 0 | 11 | 220 | 279 | 142 | 137 | 23 | 5 |
{-# LANGUAGE BangPatterns, DataKinds, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Hadoop.Protos.ClientNamenodeProtocolProtos.CreateFlagProto (CreateFlagProto(..)) where
import Prelude ((+), (/), (.))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data CreateFlagProto = CREATE
| OVERWRITE
| APPEND
| LAZY_PERSIST
| NEW_BLOCK
deriving (Prelude'.Read, Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable CreateFlagProto
instance Prelude'.Bounded CreateFlagProto where
minBound = CREATE
maxBound = NEW_BLOCK
instance P'.Default CreateFlagProto where
defaultValue = CREATE
toMaybe'Enum :: Prelude'.Int -> P'.Maybe CreateFlagProto
toMaybe'Enum 1 = Prelude'.Just CREATE
toMaybe'Enum 2 = Prelude'.Just OVERWRITE
toMaybe'Enum 4 = Prelude'.Just APPEND
toMaybe'Enum 16 = Prelude'.Just LAZY_PERSIST
toMaybe'Enum 32 = Prelude'.Just NEW_BLOCK
toMaybe'Enum _ = Prelude'.Nothing
instance Prelude'.Enum CreateFlagProto where
fromEnum CREATE = 1
fromEnum OVERWRITE = 2
fromEnum APPEND = 4
fromEnum LAZY_PERSIST = 16
fromEnum NEW_BLOCK = 32
toEnum
= P'.fromMaybe
(Prelude'.error "hprotoc generated code: toEnum failure for type Hadoop.Protos.ClientNamenodeProtocolProtos.CreateFlagProto")
. toMaybe'Enum
succ CREATE = OVERWRITE
succ OVERWRITE = APPEND
succ APPEND = LAZY_PERSIST
succ LAZY_PERSIST = NEW_BLOCK
succ _ = Prelude'.error "hprotoc generated code: succ failure for type Hadoop.Protos.ClientNamenodeProtocolProtos.CreateFlagProto"
pred OVERWRITE = CREATE
pred APPEND = OVERWRITE
pred LAZY_PERSIST = APPEND
pred NEW_BLOCK = LAZY_PERSIST
pred _ = Prelude'.error "hprotoc generated code: pred failure for type Hadoop.Protos.ClientNamenodeProtocolProtos.CreateFlagProto"
instance P'.Wire CreateFlagProto where
wireSize ft' enum = P'.wireSize ft' (Prelude'.fromEnum enum)
wirePut ft' enum = P'.wirePut ft' (Prelude'.fromEnum enum)
wireGet 14 = P'.wireGetEnum toMaybe'Enum
wireGet ft' = P'.wireGetErr ft'
wireGetPacked 14 = P'.wireGetPackedEnum toMaybe'Enum
wireGetPacked ft' = P'.wireGetErr ft'
instance P'.GPB CreateFlagProto
instance P'.MessageAPI msg' (msg' -> CreateFlagProto) CreateFlagProto where
getVal m' f' = f' m'
instance P'.ReflectEnum CreateFlagProto where
reflectEnum
= [(1, "CREATE", CREATE), (2, "OVERWRITE", OVERWRITE), (4, "APPEND", APPEND), (16, "LAZY_PERSIST", LAZY_PERSIST),
(32, "NEW_BLOCK", NEW_BLOCK)]
reflectEnumInfo _
= P'.EnumInfo
(P'.makePNF (P'.pack ".hadoop.hdfs.CreateFlagProto") ["Hadoop", "Protos"] ["ClientNamenodeProtocolProtos"] "CreateFlagProto")
["Hadoop", "Protos", "ClientNamenodeProtocolProtos", "CreateFlagProto.hs"]
[(1, "CREATE"), (2, "OVERWRITE"), (4, "APPEND"), (16, "LAZY_PERSIST"), (32, "NEW_BLOCK")]
instance P'.TextType CreateFlagProto where
tellT = P'.tellShow
getT = P'.getRead
|
alexbiehl/hoop
|
hadoop-protos/src/Hadoop/Protos/ClientNamenodeProtocolProtos/CreateFlagProto.hs
|
mit
| 3,189 | 0 | 11 | 553 | 784 | 429 | 355 | 69 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Proxygen.Database
( Database
, DatabaseIORef
, newDatabase
, updateDatabase
) where
import Proxygen.Database.Cache
import Proxygen.Card
import Proxygen.Card.Raw
import Data.IORef
import qualified Data.Map.Strict as M
import qualified Data.Aeson as Aeson
import qualified Data.Text.Lazy as T
import Data.Text.Lazy.Encoding
type Database = String --Map String Card
type DatabaseIORef = (IORef Database) -- to be changed
-- Stuff for the updater thread
readRawCache :: IO String
readRawCache = withUpdatedCache readCache
-- Computationally expensive
constructDatabase :: IO Database
constructDatabase = do
rawData <- readRawCache
let saneRawData = sanitizeRawData $ T.pack rawData
-- Process the data. This is the hard part
let rawMap = Aeson.decode $ encodeUtf8 saneRawData :: Maybe (M.Map T.Text RawCard)
saneRawMap <- case rawMap of
Nothing -> fail "could not parse AllCards.json"
Just m -> return $ sanitizeRawMap m
let saneMap = M.map fromRaw saneRawMap
putStrLn $ Prelude.take 500 $ show saneMap
return rawData
updateDatabase :: DatabaseIORef -> IO ()
updateDatabase ref = whenCacheOld $ do
newData <- constructDatabase
writeIORef ref newData
newDatabase :: IO DatabaseIORef
newDatabase = constructDatabase >>= newIORef
-- Stuff for the worker threads, defined now so refactoring can be done later
-- nothing here yet
|
Dryvnt/proxygen-hs
|
src/Proxygen/Database.hs
|
mit
| 1,456 | 0 | 13 | 284 | 326 | 175 | 151 | 35 | 2 |
{-# LANGUAGE DeriveGeneric #-}
module ContentIdentifier (
create
, createLazy
, nil
, Algro (..)
, Type
, Value
, toURNBuilder
, toURN
, ContentIdentifier (..)
)
where
import Control.Applicative
import Control.DeepSeq
import Control.DeepSeq.Generics (genericRnf)
import Data.ByteString.Builder
import Data.ByteString.Lazy.Builder
import Data.ByteString.Short (ShortByteString, toShort, fromShort)
import Data.Digest.CRC32
import Data.Monoid ( (<>) )
import Data.Serialize
import Data.Word
import GHC.Generics (Generic)
import qualified Crypto.Hash.MD5 as MD5
import qualified Crypto.Hash.SHA1 as SHA1
import qualified Crypto.Hash.SHA256 as SHA256
import qualified Crypto.Hash.SHA3 as SHA3
import qualified Data.ByteString as B
import qualified Data.ByteString.Base16 as Base16
import qualified Data.ByteString.Base32 as Base32
import qualified Data.ByteString.Char8 as C
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Short as Short
data Algro = SHA1 | CRC32 | SHA256 | SHA3_256 | Nil deriving (Eq, Ord, Generic, Enum)
type Digest = ShortByteString
data ContentIdentifier = ContentIdentifier {-# UNPACK #-} !Algro
{-# UNPACK #-} !Digest deriving (Eq, Ord, Generic)
type HashSize = Int
type Type = Algro
type Value = Digest
sha3_256HashLength :: HashSize
sha3_256HashLength = 256
create :: Algro -> B.ByteString -> ContentIdentifier
create a d = ContentIdentifier a $! toShort $! hash a d
createLazy :: Algro -> L.ByteString -> ContentIdentifier
createLazy a d = ContentIdentifier a $! toShort $! hashLazy a d
nil :: ContentIdentifier
nil = ContentIdentifier Nil Short.empty
hashLazy :: Algro -> L.ByteString -> B.ByteString
hashLazy SHA1 = SHA1.hashlazy
hashLazy SHA256 = SHA256.hashlazy
hashLazy SHA3_256 = SHA3.hashlazy sha3_256HashLength
hashLazy CRC32 = L.toStrict . toLazyByteString . word32BE . crc32
hashLazy Nil = undefined
hash :: Algro -> B.ByteString -> B.ByteString
hash SHA1 = SHA1.hash
hash SHA256 = SHA256.hash
hash SHA3_256 = SHA3.hash sha3_256HashLength
hash CRC32 = L.toStrict . toLazyByteString . word32BE . crc32
hash Nil = undefined
toURNBuilder :: ContentIdentifier -> Builder
toURNBuilder (ContentIdentifier algro digest) =
urnPrefix algro <> byteString (Base32.encode $ fromShort digest)
toURN :: ContentIdentifier -> B.ByteString
toURN (ContentIdentifier a d) = (urnPrefixForAlgro a) `B.append` (Base32.encode $ fromShort d)
fromURN :: B.ByteString -> Either String ContentIdentifier
fromURN = undefined
urnPrefix :: Algro -> Builder
urnPrefix a = urn <> colon <> algro <> colon
where
urn = string8 "urn"
colon = char8 ':'
algro = string8 (algroName a)
urnPrefixForAlgro :: Algro -> B.ByteString
urnPrefixForAlgro algro =
C.pack $ "urn:" ++ (algroName algro) ++ ":"
algroNameAsByteString :: Algro -> B.ByteString
algroNameAsByteString = C.pack . algroName
algroName :: Algro -> String
algroName CRC32 = "crc32"
algroName SHA1 = "sha1"
algroName SHA256 = "sha256"
algroName SHA3_256 = "sha3-256"
algroName Nil = "nil"
instance Show ContentIdentifier where
show = C.unpack . toURN
instance Serialize Algro where
put a = put ((fromIntegral $ fromEnum a) :: Word8)
get = (get :: Get Word8) >>= return . toEnum . fromIntegral
instance Serialize ContentIdentifier where
put (ContentIdentifier a d) = put a >> put d
get = ContentIdentifier <$> get <*> get
instance Serialize ShortByteString where
put = put . fromShort
get = fmap toShort get
instance NFData Algro where rnf = genericRnf
instance NFData ContentIdentifier where rnf = genericRnf
|
danstiner/clod
|
src/ContentIdentifier.hs
|
mit
| 3,640 | 0 | 10 | 635 | 1,053 | 587 | 466 | 95 | 1 |
{-|
Module : Language.GoLite.Syntax.SrcAnn
Description : GoLite syntax definitions with source annotations
Copyright : (c) Jacob Errington and Frederic Lafrance, 2016
License : MIT
Maintainer : [email protected]
Stability : experimental
This module provides type synonyms for practically all the types available in
"Language.GoLite.Syntax.Types" in which all the data is 'SrcSpan'-annotated
using 'Language.GoLite.Annotation.Ann'.
Also provided are parser combinators to facilitate capturing source position
information during parsing as well as stripping functions for removing
annotations from entire syntax trees.
-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Language.GoLite.Syntax.SrcAnn
( -- * General source-annotated functors
SrcSpan(..)
, SrcAnn
, SrcAnnFix
, module Language.Common.Annotation
-- * Parser combinators
, withSrcAnn
-- ** Useful specialized parser combinators
, withSrcAnnF
, withSrcAnnFix
, withSrcAnnId
, withSrcAnnConst
, withPushSrcAnnF
, withPushSrcAnnFix
-- * Miscellaneous
, builtinSpan
-- * Source-annotated syntax definitions
, SrcAnnPackage
, SrcAnnTopLevelDecl
, SrcAnnVarDecl
, SrcAnnTypeDecl
, SrcAnnFunDecl
, SrcAnnTypeF
, SrcAnnType
, SrcAnnStatementF
, SrcAnnStatement
, SrcAnnCaseHead
, SrcAnnDeclaration
, SrcAnnExprF
, SrcAnnExpr
, SrcAnnBinaryOp
, SrcAnnAssignOp
, SrcAnnUnaryOp
, SrcAnnLiteral
, SrcAnnIdent
, SrcAnnGoInt
, SrcAnnGoFloat
, SrcAnnGoRune
, SrcAnnGoString
-- * Annotation stripping functions
, barePackage
, bareTopLevelDecl
, bareFunDecl
, bareType
, bareStmt
, bareExpr
-- * Reexports for dealing with @SourcePos@
, sourceName
, sourceLine
, sourceColumn
) where
import Language.Common.Annotation
import Language.GoLite.Lexer.Core
import Language.GoLite.Pretty
import Language.GoLite.Syntax.Basic
import Language.GoLite.Syntax.Types
import Control.Applicative
import Data.Functor.Foldable
-- | A source span has a beginning and an end that refer to locations in a
-- source file.
data SrcSpan
= SrcSpan
{ srcStart :: !SourcePos
, srcEnd :: !SourcePos
}
deriving (Eq, Ord, Show)
-- | A trivial source span that can be given to builtins.
builtinSpan :: SrcSpan
builtinSpan = SrcSpan
{ srcStart = initialPos "builtin"
, srcEnd = initialPos "builtin"
}
-- | General source-annotated functor value.
type SrcAnn f a = Ann SrcSpan f a
-- | General source-annotated functor fixed point.
type SrcAnnFix f = Fix (Ann SrcSpan f)
-- | Run a parser and annotate its result after applying a given wrapping
-- strategy with source position information.
withSrcAnn
:: (a -> f b) -- ^ A wrapping strategy for the parsed data
-> Parser a -- ^ The parser to annotate
-> Parser (SrcAnn f b) -- ^ A parser that produces an annotated result.
withSrcAnn f p = do
p1 <- getPosition
x <- p
p2 <- getPosition
pure (Ann (SrcSpan p1 p2) (f x))
-- | Runs a parser producing a functor value and annotates it with source
-- position information.
--
-- > withSrcAnnF = withSrcAnn id
withSrcAnnF :: Parser (f a) -> Parser (SrcAnn f a)
withSrcAnnF = withSrcAnn id
-- | Combines 'withSrcAnnF' and 'annPush' to create a source-span annotation of
-- data wrapped within two functors.
withPushSrcAnnF :: Functor f => Parser (f (g a)) -> Parser (f (SrcAnn g a))
withPushSrcAnnF = fmap annPush . withSrcAnnF
-- | Runs a parser that produces a source-annotated syntax tree and wraps it in
-- another layer of source annotation.
withSrcAnnFix
:: Parser (f (Fix (Ann SrcSpan f)))
-> Parser (Fix (Ann SrcSpan f))
withSrcAnnFix = fmap Fix . withSrcAnnF
-- | Combines 'withPushSrcAnnF' and 'Fix' to add one more layer of annotated
-- fixed-point structure.
withPushSrcAnnFix
:: Functor f
=> Parser (f (g (Fix (Ann SrcSpan g))))
-> Parser (f (Fix (Ann SrcSpan g)))
withPushSrcAnnFix = fmap (fmap Fix) . withPushSrcAnnF
-- | Run a parser and annotate its result in the identity functor with source
-- position information.
withSrcAnnId :: Parser a -> Parser (SrcAnn Identity a)
withSrcAnnId = withSrcAnn Identity
-- | Runs a parser, packaging its result into a constant functor annotated with
-- source position information.
withSrcAnnConst :: Parser a -> Parser (SrcAnn (Const a) b)
withSrcAnnConst = withSrcAnn Const
-- | Removes source annotations from a package.
barePackage :: SrcAnnPackage -> BasicPackage
barePackage (Package i ds) = Package (bare i) (map bareTopLevelDecl ds)
-- | Removes source annotations from a top-level declaration (regular
-- type/variable or function).
bareTopLevelDecl :: SrcAnnTopLevelDecl -> BasicTopLevelDecl
bareTopLevelDecl (TopLevelDecl d) = TopLevelDecl (bareDecl d)
bareTopLevelDecl (TopLevelFun f) = TopLevelFun (bareFunDecl f)
-- | Removes source annotations from a function declaration.
bareFunDecl :: SrcAnnFunDecl -> BasicFunDecl
bareFunDecl (FunDecl fn args rty bod) =
FunDecl
(bare fn)
(map (\(ident, ty) -> (bare ident, bareType ty)) args)
(bareType <$> rty)
(map bareStmt bod)
-- | Removes source annotations from a type/variable declaration.
bareDecl :: SrcAnnDeclaration -> BasicDeclaration
bareDecl (TypeDecl (TypeDeclBody i ty)) =
TypeDecl (TypeDeclBody (bare i) (bareType ty))
bareDecl (VarDecl (VarDeclBody is ty es)) =
VarDecl (VarDeclBody (map bare is) (bareType <$> ty) (map bareExpr es))
-- | Removes source annotations from a statement and all its inner statements.
bareStmt :: SrcAnnStatement -> BasicStatement
bareStmt = cata phi where
phi (Ann _ (DeclStmt d)) = Fix (DeclStmt (bareDecl d))
phi (Ann _ (ExprStmt e)) = Fix (ExprStmt (bareExpr e))
phi (Ann _ (ShortVarDecl ids es)) = Fix (ShortVarDecl (map bare ids)
(map bareExpr es))
phi (Ann _ (Assignment es op es')) = Fix (Assignment (map bareExpr es)
(bare op)
(map bareExpr es'))
phi (Ann _ (PrintStmt es)) = Fix (PrintStmt (map bareExpr es))
phi (Ann _ (ReturnStmt e)) = Fix (ReturnStmt (bareExpr <$> e))
phi (Ann _ (IfStmt ini e thens elses)) = Fix (IfStmt ini (bareExpr e)
thens
elses)
phi (Ann _ (SwitchStmt ini e clauses)) = Fix (SwitchStmt ini
(bareExpr <$> e)
(map (\cl -> (bareCaseHead (fst cl), snd cl)) clauses))
phi (Ann _ (ForStmt i e p d)) = Fix (ForStmt i (bareExpr <$> e) p d)
phi (Ann _ BreakStmt) = Fix BreakStmt
phi (Ann _ ContinueStmt) = Fix ContinueStmt
phi (Ann _ FallthroughStmt) = Fix FallthroughStmt
phi (Ann _ (Block ss)) = Fix (Block ss)
phi (Ann _ EmptyStmt) = Fix EmptyStmt
phi (Ann _ (IncDecStmt d expr)) = Fix (IncDecStmt d (bareExpr expr))
-- | Removes source annotations from a case head.
bareCaseHead :: SrcAnnCaseHead -> BasicCaseHead
bareCaseHead CaseDefault = CaseDefault
bareCaseHead (CaseExpr es) = CaseExpr (map bareExpr es)
-- | Removes source annotations from an expression and all its inner expressions.
bareExpr :: SrcAnnExpr -> BasicExpr
bareExpr = cata phi where
phi (Ann _ (BinaryOp op e e')) = Fix (BinaryOp (bare op) e e')
phi (Ann _ (UnaryOp op e)) = Fix (UnaryOp (bare op) e)
phi (Ann _ (Conversion ty e)) = Fix (Conversion (bareType ty) e)
phi (Ann _ (Selector e i)) = Fix (Selector e (bare i))
phi (Ann _ (Index e e')) = Fix (Index e e')
phi (Ann _ (Slice e0 e1 e2 e3)) = Fix (Slice e0 e1 e2 e3)
phi (Ann _ (TypeAssertion e ty)) = Fix (TypeAssertion e (bareType ty))
phi (Ann _ (Call e ty es)) = Fix (Call e (bareType <$> ty) es)
phi (Ann _ (Literal lit)) = Fix (Literal (bare lit))
phi (Ann _ (Variable i)) = Fix (Variable (bare i))
-- | Removes source annotations from a type and all its inner types.
bareType :: SrcAnnType -> BasicType
bareType = cata phi where
phi (Ann _ (SliceType ty)) = Fix (SliceType ty)
phi (Ann _ (ArrayType i ty)) = Fix (ArrayType
(Identity (getConst (bare i)))
ty)
phi (Ann _ (NamedType i)) = Fix (NamedType (bare i))
phi (Ann _ (StructType fields)) = Fix (StructType (map
(\(i, ty) -> (bare i, ty)) fields))
-- | 'Package' with source annotations.
type SrcAnnPackage
= Package SrcAnnIdent SrcAnnTopLevelDecl
-- | 'TopLevelDecl' with source annotations.
type SrcAnnTopLevelDecl
= TopLevelDecl SrcAnnDeclaration SrcAnnFunDecl
-- | 'VarDecl' with source annotations.
type SrcAnnVarDecl
= VarDecl SrcAnnIdent SrcAnnType SrcAnnExpr
-- | 'TypeDecl' with source annotations.
type SrcAnnTypeDecl
= TypeDecl SrcAnnIdent SrcAnnType
-- | 'FunDecl' with source annotations.
type SrcAnnFunDecl
= FunDecl SrcAnnIdent SrcAnnType (Maybe SrcAnnType) SrcAnnStatement
-- | 'TypeF' with source annotations.
type SrcAnnTypeF
= TypeF SrcAnnIdent SrcAnnGoInt
-- | 'SrcAnnFix' with source annotations.
type SrcAnnType
= SrcAnnFix SrcAnnTypeF
-- | 'Statement' with source annotations.
type SrcAnnStatementF
= StatementF
SrcAnnDeclaration
SrcAnnExpr
SrcAnnIdent
SrcAnnAssignOp
SrcAnnCaseHead
-- | 'SrcAnnFix' with source annotations.
type SrcAnnStatement
= SrcAnnFix SrcAnnStatementF
-- | 'CaseHead' with source annotations.
type SrcAnnCaseHead
= CaseHead SrcAnnExpr
-- | 'Declaration' with source annotations.
type SrcAnnDeclaration
= Declaration SrcAnnTypeDecl SrcAnnVarDecl
-- | 'ExprF' with source annotations.
type SrcAnnExprF
= ExprF SrcAnnIdent SrcAnnIdent SrcAnnBinaryOp SrcAnnUnaryOp SrcAnnLiteral SrcAnnType
-- | 'SrcAnnFix' with source annotations.
type SrcAnnExpr
= SrcAnnFix SrcAnnExprF
-- | 'BinaryOp' with source annotations.
type SrcAnnBinaryOp
= SrcAnn BinaryOp ()
-- | 'AssignOp' with source annotations.
type SrcAnnAssignOp
= SrcAnn AssignOp ()
-- | 'UnaryOp' with source annotations.
type SrcAnnUnaryOp
= SrcAnn UnaryOp ()
-- | 'Literal' with source annotations.
type SrcAnnLiteral
= SrcAnn Literal ()
-- | 'Ident' with source annotations.
type SrcAnnIdent
= SrcAnn Ident ()
-- | '' with source annotations.
type SrcAnnGoInt
= SrcAnn (Const GoInt) ()
-- | 'GoFloat' with source annotations.
type SrcAnnGoFloat
= SrcAnn (Const GoFloat) ()
-- | 'GoRune' with source annotations.
type SrcAnnGoRune
= SrcAnn (Const GoRune) ()
-- | 'GoString' with source annotations.
type SrcAnnGoString
= SrcAnn (Const GoString) ()
-- | Annotated functors can be pretty-printed by stripping the annotations and
-- pretty-printing the inner syntax tree.
instance (Functor f, Pretty (Fix f)) => Pretty (SrcAnnFix f) where
pretty = pretty . bareF
-- | Annotated data can be pretty-printed by stripping the annotation and
-- pretty-printing the inner data.
instance Pretty (f a) => Pretty (SrcAnn f a) where
pretty = pretty . bare
|
djeik/goto
|
libgoto/Language/GoLite/Syntax/SrcAnn.hs
|
mit
| 11,104 | 0 | 17 | 2,469 | 2,739 | 1,444 | 1,295 | 214 | 15 |
import Drawing
import Geometry
main = drawPicture myPicture
myPicture points =
drawLine (a,b) &
drawLine (c,d) &
drawLine (f,g) &
drawLabels [a,b,c,d,e,f,g] ["A","B","C","D","E","F","G"]&
message $ "Parallel Lines"
where [a,b] = take 2 points
[c,d] = circle_circle (a,b) (b,a)
[e] = line_line (a,b) (c,d)
[f,g] = circle_circle (d,e) (e,d)
|
alphalambda/k12math
|
contrib/MHills/GeometryLessons/code/teacher/key_lesson5c.hs
|
mit
| 405 | 0 | 11 | 114 | 220 | 126 | 94 | 13 | 1 |
module Credentials (
credentialsForAppName,
) where
import System.Environment
import System.IO.Error
import Network.HTTP.Authentication.Basic
import Data.Char
import Data.Monoid
import Data.List (uncons)
-- Look up credentials for the given app name, fall back to the global
-- credentials if there aren't app specific ones set.
credentialsForAppName :: String -> IO Credentials
credentialsForAppName app_name = do
let appCredentials = credentialsForName $ "API_CREDENTIALS" <> "_" <> (toUpper <$> app_name)
catchIOError appCredentials (const defaultCredentials)
credentialsForName :: String -> IO Credentials
credentialsForName name = do
credConfig <- lookupEnv name
case breakOn ':' <$> credConfig of
Nothing -> fail ("no credentials for " <> name)
Just (user, pass) -> return $ Credentials user pass
where
breakOn :: Eq a => a -> [a] -> ([a], [a])
breakOn x xs = let p = (/= x)
head' = takeWhile p xs
tail' = dropWhile p xs
tail'' = case uncons tail' of
Nothing -> tail'
Just (_, y) -> y
in (head', tail'')
defaultCredentials :: IO Credentials
defaultCredentials = do
secret <- lookupEnv "API_SECRET"
case Credentials "" <$> secret of
Nothing -> fail "no default credentials specified"
Just c -> return c
|
keithduncan/logplex-statsd
|
src/Credentials.hs
|
mit
| 1,429 | 0 | 15 | 404 | 375 | 194 | 181 | 32 | 3 |
charName :: Char -> String
charName 'a' = "Wa lala wa"
charName 'b' = "Zabar dast"
charName 'c' = "Chal Hunr Dafa ho"
charName a = error "Dafa ho sahi input da! :/ "
map' f [] = []
map' f xs = [f x | x <- xs]
fil' :: (a->Bool) -> [a] -> [a]
fil' f [] = []
fil' f xs = [x | x <- xs, f x]
fil'' p[] = []
fil'' p (x:xs) | p x = x:fil'' p xs
| otherwise = fil'' p xs
len :: [a]-> Int
len [] = 0
len (x:xs) = 1 + len xs
str = "My long \n\
\ long"
funChar :: Char -> Char
funChar a
| isLower(a)==True = ' '
| isUpper(a)==True = ' '
| otherwise = '@'
changer :: [Char] -> [Char] -> [Char]
changer [] res = res
changer (x:xs) res = changer xs (res ++ (if x == ' ' then "%20" else [x]))
sanitize :: [Char] -> [Char]
sanitize xs = changer xs ""
blackToWhiteAString :: [Char] -> [Char]
blackToWhiteAString bs = [a | b <- bs, a <-
if (b == ' ')
then "X"
else if (isLower(b)==True)
then " "
else if (isUpper(b)==True)
then " "
else [b]]
compPic :: [String] -> [String]
compPic [] = []
compPic (a:as) = map (a)
showMatBlackToWhite :: [String] -> IO()
showMatBlackToWhite strList = putStr (concat (stretch 1 1 (compPic (strList)) ))
--Length of String
len s = length s;
--List of String length sum
str :: [String] -> Int
str a = sum (map len a)
hStretchChar :: Int -> Char -> String
hStretchChar i ch = replicate i ch
hStretchString :: Int -> String -> String
hStretchString i sts = concat ( map ( hStretchChar i) sts)
hStretchListOfString :: Int -> [String] -> [String]
hStretchListOfString i stlist = map (hStretchString i ) stlist
vStretchString :: Int -> String -> String
vStretchString i str = concat (replicate i (str ++ "\n"))
vStretchListOfString :: Int -> [String] -> [String]
vStretchListOfString i strList = map (vStretchString i) strList
stretch :: Int -> Int -> [String] -> [String]
stretch i j strList = vStretchListOfString i (hStretchListOfString j strList)
showPic :: BPic -> IO()
showPic strList = putStr (concat (stretch 1 1 strList))
--showPic strList = putStr (concat (map (++"\n") strList))
listofNumbers a = listofNumbersEqual (length a) a
listofNumbersEqual x a
| (head a)== a!!(x-1) = True
| otherwise = False
isEqual :: [Int] -> Bool
isEqual a = all (==(head a)) a
isEqual' :: [Int] -> Bool
isEqual' [] = True
isEqual' (x:xs) = not $ any (/= x) xs
f (a:as) (b:bs) = [a | a <- as, b <- bs,x <-
if (a == ' ' || b== ' ')
then a:[]
else if (a == 'X' || b== ' ')
then a:[]
else if (a == 'X' || b== 'X')
then a:[]
else if (a == ' ' || b== 'X')
then a:[]
else []]
aa (a:as) (b:bs)= [f a b | a <- as, b <- bs]
shiftPic :: Int -> Int -> BPic -> BPic
shiftPic num1 num2 list
| num2<=0 = iterate rolldown list !! abs(num2)
| num2>=0 = iterate rollup list !! num2
| num1<=0 = iterate moveleft list !! abs (num1)
frames :: String -> Char -> String
frames [] b = []
frames (a:as) b = ([b] ++ [a] ++ [b]) ++ frames as b
|
badarshahzad/Learn-Haskell
|
Solution of Weekly labs/main.hs
|
mit
| 3,273 | 1 | 15 | 1,028 | 1,502 | 781 | 721 | 86 | 5 |
module Language.Python.Parser.State where
import qualified Text.Parsec as P
data ParseState = ParseState {
} deriving Show
|
Garciat/Hucc
|
src/Language/Python/Parser/State.hs
|
mit
| 132 | 1 | 5 | 25 | 29 | 20 | 9 | 4 | 0 |
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE NoStarIsType #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
module Main where
import TH_unresolvedInfix_Lib
import Language.Haskell.TH
--------------------------------------------------------------------------------
-- Expressions --
--------------------------------------------------------------------------------
exprs = [
-------------- Completely-unresolved bindings
$( n +? (n *? n) ),
$( (n +? n) *? n ),
$( n +? (n +? n) ),
$( (n +? n) +? n ),
-- VarE version
$( uInfixE n plus2 (uInfixE n plus2 n) ),
$( uInfixE (uInfixE n plus2 n) plus2 n ),
$( uInfixE n plus3 (uInfixE n plus3 n) ),
$( uInfixE (uInfixE n plus3 n) plus3 n ),
--------------- Completely-resolved bindings
$( n +! (n *! n) ),
$( (n +! n) *! n ),
$( n +! (n +! n) ),
$( (n +! n) +! n ),
-------------- Mixed resolved/unresolved
$( (n +! n) *? (n +? n) ),
$( (n +? n) *? (n +! n) ),
$( (n +? n) *! (n +! n) ),
$( (n +? n) *! (n +? n) ),
-------------- Parens
$( ((parensE ((n +? n) *? n)) +? n) *? n ),
$( (parensE (n +? n)) *? (parensE (n +? n)) ),
$( parensE ((n +? n) *? (n +? n)) ),
-------------- Sections
$( infixE (Just $ n +? n) plus Nothing ) N,
-- see B.hs for the (non-compiling) other version of the above
$( infixE Nothing plus (Just $ parensE $ uInfixE n plus n) ) N
]
--------------------------------------------------------------------------------
-- Patterns --
--------------------------------------------------------------------------------
patterns = [
-------------- Completely-unresolved patterns
case N :+ (N :* N) of
[p1|unused|] -> True,
case N :+ (N :* N) of
[p2|unused|] -> True,
case (N :+ N) :+ N of
[p3|unused|] -> True,
case (N :+ N) :+ N of
[p4|unused|] -> True,
-------------- Completely-resolved patterns
case N :+ (N :* N) of
[p5|unused|] -> True,
case (N :+ N) :* N of
[p6|unused|] -> True,
case N :+ (N :+ N) of
[p7|unused|] -> True,
case (N :+ N) :+ N of
[p8|unused|] -> True,
-------------- Mixed resolved/unresolved
case ((N :+ N) :* N) :+ N of
[p9|unused|] -> True,
case N :+ (N :* (N :+ N)) of
[p10|unused|] -> True,
case (N :+ N) :* (N :+ N) of
[p11|unused|] -> True,
case (N :+ N) :* (N :+ N) of
[p12|unused|] -> True,
-------------- Parens
case (N :+ (N :* N)) :+ (N :* N) of
[p13|unused|] -> True,
case (N :+ N) :* (N :+ N) of
[p14|unused|] -> True,
case (N :+ (N :* N)) :+ N of
[p15|unused|] -> True
]
--------------------------------------------------------------------------------
-- Types --
--------------------------------------------------------------------------------
-------------- Completely-unresolved types
_t1 = 1 `Plus` (1 `Times` 1) :: $( int $+? (int $*? int) )
_t2 = 1 `Plus` (1 `Times` 1) :: $( (int $+? int) $*? int )
_t3 = (1 `Plus` 1) `Plus` 1 :: $( int $+? (int $+? int) )
_t4 = (1 `Plus` 1) `Plus` 1 :: $( (int $+? int) $+? int )
-------------- Completely-resolved types
_t5 = 1 `Plus` (1 `Times` 1) :: $( int $+! (int $*! int) )
_t6 = (1 `Plus` 1) `Times` 1 :: $( (int $+! int) $*! int )
_t7 = 1 `Plus` (1 `Plus` 1) :: $( int $+! (int $+! int) )
_t8 = (1 `Plus` 1) `Plus` 1 :: $( (int $+! int) $+! int )
-------------- Mixed resolved/unresolved
_t9 = ((1 `Plus` 1) `Times` 1) `Plus` 1 :: $( (int $+! int) $*? (int $+? int) )
_t10 = 1 `Plus` (1 `Times` (1 `Plus` 1)) :: $( (int $+? int) $*? (int $+! int) )
_t11 = (1 `Plus` 1) `Times` (1 `Plus` 1) :: $( (int $+? int) $*! (int $+! int) )
_t12 = (1 `Plus` 1) `Times` (1 `Plus` 1) :: $( (int $+? int) $*! (int $+? int) )
-------------- Parens
_t13 = (1 `Plus` (1 `Times` 1)) `Plus` (1 `Times` 1) :: $( ((parensT ((int $+? int) $*? int)) $+? int) $*? int )
_t14 = (1 `Plus` 1) `Times` (1 `Plus` 1) :: $( (parensT (int $+? int)) $*? (parensT (int $+? int)) )
_t15 = (1 `Plus` (1 `Times` 1)) `Plus` 1 :: $( parensT ((int $+? int) $*? (int $+? int)) )
main = do
mapM_ print exprs
mapM_ print patterns
-- check that there are no Parens or UInfixes in the output
runQ [|N :* N :+ N|] >>= print
runQ [|(N :* N) :+ N|] >>= print
runQ [p|N :* N :+ N|] >>= print
runQ [p|(N :* N) :+ N|] >>= print
runQ [t|Int * Int + Int|] >>= print
runQ [t|(Int * Int) + Int|] >>= print
-- pretty-printing of unresolved infix expressions
let ne = ConE $ mkName "N"
np = ConP (mkName "N") []
nt = ConT (mkName "Int")
plusE = ConE (mkName ":+")
plusP = (mkName ":+")
plusT = (mkName "+")
putStrLn $ pprint (InfixE (Just ne) plusE (Just $ UInfixE ne plusE (UInfixE ne plusE ne)))
putStrLn $ pprint (ParensE ne)
putStrLn $ pprint (InfixP np plusP (UInfixP np plusP (UInfixP np plusP np)))
putStrLn $ pprint (ParensP np)
putStrLn $ pprint (InfixT nt plusT (UInfixT nt plusT (UInfixT nt plusT nt)))
putStrLn $ pprint (ParensT nt)
|
ghcjs/ghcjs
|
test/ghc/th/tH_unresolvedInfix.hs
|
mit
| 5,101 | 0 | 16 | 1,321 | 2,125 | 1,222 | 903 | 96 | 1 |
-- |
-- Module : $Header$
-- Description : Definition of an abstract expression language as the first IR for the Ohua compiler.
-- Copyright : (c) Sebastian Ertel, Justus Adam 2017. All Rights Reserved.
-- License : EPL-1.0
-- Maintainer : [email protected], [email protected]
-- Stability : experimental
-- Portability : portable
-- This source code is licensed under the terms described in the associated LICENSE.TXT file
--
-- Passes required to transform an expression in ALang into an expression in DFLang.
--
module Ohua.DFLang.Passes where
import Ohua.Prelude
import Control.Lens (at, non)
import Control.Monad (msum)
import Control.Monad.Tardis
import Control.Monad.Writer (MonadWriter, runWriterT, tell)
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import qualified Data.IntMap.Strict as IM
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import Ohua.ALang.Lang
import Ohua.ALang.PPrint
import Ohua.DFLang.Lang
import qualified Ohua.DFLang.Refs as Refs
import Ohua.Stage
type Pass m
= QualifiedBinding -> FnId -> Binding -> [Expression] -> m (Seq LetExpr)
runCorePasses :: (MonadOhua m, Pretty DFExpr) => DFExpr -> m DFExpr
runCorePasses expr = do
let ctrlOptimized = collapseNth (== nodeRef Refs.ctrl) expr
stage "ctrl-optimization" ctrlOptimized
let ifOptimized = collapseNth (== nodeRef Refs.ifFun) ctrlOptimized
stage "if-optimization" ifOptimized
let smapOptimized = collapseNth (== nodeRef Refs.smapFun) ifOptimized
stage "smap-optimized" smapOptimized
return smapOptimized
-- | Check that a sequence of let expressions does not redefine bindings.
checkSSA :: (Container c, Element c ~ LetExpr, MonadOhua m) => c -> m ()
checkSSA = flip evalStateT mempty . mapM_ go
where
go le = do
defined <- get
let produced = output le
f a
| HS.member a defined = Just a
f _ = Nothing
case msum $ map f produced of
Just b -> failWith $ "Rebinding of " <> show b
Nothing -> return ()
modify (addAll produced)
addAll = flip $ foldr' HS.insert
-- | Check that a DFExpression is in SSA form.
checkSSAExpr :: MonadOhua m => DFExpr -> m ()
checkSSAExpr (DFExpr l _) = checkSSA l
-- | Transform an ALang expression into a DFExpression.
-- This assumes a certain structure in the expression.
-- This can be achieved with the 'normalize' and 'performSSA' functions and tested with
-- 'checkProgramValidity'.
lowerALang :: MonadOhua m => Expression -> m DFExpr
lowerALang expr
-- traceM $ "Lowering alang expr: " <> quickRender expr
= do
logDebugN $ "Lowering alang expr: " <> quickRender expr
(var, exprs) <- runWriterT $ lowerToDF expr
return $ DFExpr exprs var
-- (var, exprs) <- runWriterT (go expr)
lowerToDF ::
(MonadOhua m, MonadWriter (Seq LetExpr) m) => Expression -> m Binding
lowerToDF (Var bnd) = pure bnd
lowerToDF (Let assign expr rest) = do
logDebugN "Lowering Let -->"
handleDefinitionalExpr assign expr continuation
where
continuation = lowerToDF rest
lowerToDF g = failWith $ "Expected `let` or binding: " <> show g
handleDefinitionalExpr ::
(MonadOhua m, MonadWriter (Seq LetExpr) m)
=> Binding
-> Expression
-> m Binding
-> m Binding
handleDefinitionalExpr assign l@(Apply _ _) cont = do
(fn, fnId, s, args) <- handleApplyExpr l
e <- lowerDefault fn fnId assign args
tell $ pure e {stateArgument = s}
cont
handleDefinitionalExpr _ e _ =
failWith $ "Definitional expressions in a let can only be 'apply' but got: " <>
show e
-- | Lower any not specially treated function type.
lowerDefault ::
MonadOhua m
=> QualifiedBinding
-> FnId
-> Binding
-> [Expression]
-> m LetExpr
lowerDefault fn fnId assign args =
mapM expectVar args <&> \args' ->
LetExpr fnId [assign] (lowerFnToDFLang fn) Nothing args'
where
lowerFnToDFLang = fromMaybe (EmbedSf fn) . Refs.lowerBuiltinFunctions
-- | Analyze an apply expression, extracting the inner stateful
-- function and the nested arguments as a list. Also generates a new
-- function id for the inner function should it not have one yet.
handleApplyExpr ::
(MonadOhua m)
=> Expression
-> m (QualifiedBinding, FnId, Maybe DFVar, [Expression])
handleApplyExpr l@(Apply _ _) = go [] l
where
go args =
\case
ve@Var {} ->
fromEnv (options . callLocalFunction) >>= \case
Nothing ->
failWith
"Calling local functions is not supported in this adapter"
Just fn -> (fn, , Nothing, ve : args) <$> generateId
PureFunction fn fnId ->
(fn, , Nothing, args) <$> maybe generateId return fnId
StatefulFunction fn fnId state -> do
state' <- expectVar state
(fn, , Just $ state', args) <$> maybe generateId return fnId
ve@(Lit v) ->
case v of
EnvRefLit _ ->
fromEnv (options . callEnvExpr) >>= \case
Nothing ->
failWith
"Calling environment functions is not supported in this adapter"
Just fn -> (fn, , Nothing, ve : args) <$> generateId
other ->
throwError $
"This literal cannot be used as a function :" <>
show (pretty other)
Apply fn arg -> go (arg : args) fn
x ->
failWith $ "Expected Apply or Var but got: " <>
show (x :: Expression)
handleApplyExpr (PureFunction fn fnId) =
(fn, , Nothing, []) <$> maybe generateId return fnId
-- what is this?
handleApplyExpr g = failWith $ "Expected apply but got: " <> show g
-- | Inspect an expression expecting something which can be captured
-- in a DFVar otherwise throws appropriate errors.
expectVar :: (HasCallStack, MonadError Error m) => Expression -> m DFVar
expectVar (Var bnd) = pure $ DFVar bnd
-- TODO currently only allowed for the unitFn function
-- expectVar r@PureFunction {} =
-- throwError $
-- "Function references are not yet supported as arguments: " <>
-- show (pretty r)
expectVar (Lit l) = pure $ DFEnvVar l
expectVar a =
throwErrorS $ "Argument must be local binding or literal, was " <> show a
-- In this function I use the so called 'Tardis' monad, which is a special state
-- monad. It has one state that travels "forward" in time, which is the same as
-- the regular state monad, bu it also has a second state that uses lazyness to
-- travel "backwards" in time, meaning that reading the state gives you the
-- value you'll be setting later. This works fine so long as there are no cyclic
-- dependencies between the states (which is fairly easy to get wrong).
--
-- Anyhow the way its used here is that when I find a target function (specified
-- by the `selectionFunction`), I records its outputs in the forwards traveling
-- state to signal that functions using it should be removed. I then look at the
-- backwards traveling state to see which bindings the destructuring of this
-- function created and I use them to compose its new output.
--
-- When I find an `nth` I look up its inputs in the forward traveling state. If
-- I find an entry, then this `nth` belongs to a destructuring that should be
-- collapsed. I remove the nth and I record the output binding and the index the
-- nth got in the *backwards traveling state*.
--
-- In this way I can use the backwards traveling state to look into the future
-- and immediately see the bindings that a function destructures into. This is
-- what allows me to write this transformation with just a single DFLang pass.
collapseNth :: (QualifiedBinding -> Bool) -> DFExpr -> DFExpr
collapseNth selectionFunction =
field @"letExprs" %~ Seq.fromList . catMaybes .
flip evalTardis (mempty, mempty) .
traverse go .
toList
where
go e@LetExpr {output = [oldOut], functionRef = DFFnRef _ fun}
| selectionFunction fun = do
removedVals <- requestRemoval oldOut
-- TODO do error handling here. Make sure no index is missing
let newOuts = IM.elems removedVals
return $
Just
e {output = newOuts, functionRef = DFFnRef OperatorNode fun}
| [DFEnvVar (NumericLit index), _len, DFVar source] <- callArguments e =
ifM
(queryRemoval source)
(recordRemoval source oldOut index >> return Nothing)
(return $ Just e)
go e = return $ Just e
requestRemoval bnd
-- Record the binding as source for removal
= do
modifyForwards $ HS.insert bnd
-- Ask the future which bindings it was destructured into
getsFuture $ view $ at bnd . non mempty
queryRemoval = getsPast . HS.member
-- Tell the past that this binding was destructured at this index
recordRemoval ::
MonadTardis (HM.HashMap Binding (IM.IntMap Binding)) any m
=> Binding
-> Binding
-> Integer
-> m ()
recordRemoval source produced (fromInteger -> index) =
modifyBackwards $ at source . non mempty . at index .~ Just produced
|
ohua-dev/ohua-core
|
core/src/Ohua/DFLang/Passes.hs
|
epl-1.0
| 9,538 | 0 | 19 | 2,636 | 1,963 | 1,015 | 948 | -1 | -1 |
module Folgerhs.Parse ( parseCorpus
, corpus
, parse
) where
import Data.Maybe
import Data.List
import Text.XML.Light.Input (parseXML)
import Text.XML.Light.Proc (onlyElems, elChildren)
import Text.XML.Light.Types (QName (..), Element (..), Content, Attr (..) )
import Folgerhs.Stage
isTag :: String -> Element -> Bool
isTag n = (==) n . qName . elName
drillTagPath :: [String] -> [Element] -> [Element]
drillTagPath [] = id
drillTagPath (n:ns) = drillTagPath ns . concatMap elChildren . filter (isTag n)
attr :: String -> Element -> Maybe String
attr n = listToMaybe . map attrVal . filter ((==) n . qName . attrKey) . elAttribs
descendants :: Element -> [Element]
descendants e = e : concatMap descendants (elChildren e)
corpus :: [Content] -> [Element]
corpus = concatMap descendants . drillTagPath ["TEI", "text", "body"] . onlyElems
charName :: String -> Character
charName c = let n = fromMaybe c (stripPrefix "#" c)
in case span (/= '_') $ reverse n of
("", p) -> p
(s, "") -> s
(s, p) -> reverse $ tail p
parseElement :: Element -> Maybe StageEvent
parseElement el
| isTag "milestone" el = case (attr "unit" el, attr "n" el) of
(Just "ftln", Just n) -> Just $ Milestone n
_ -> Nothing
| isTag "sp" el = case attr "who" el of
Just s -> Just $ Speech (charName s)
_ -> Nothing
| isTag "stage" el = case (attr "type" el, attr "who" el) of
(Just "entrance", Just cs) -> Just $ Entrance (map charName $ words cs)
(Just "exit", Just cs) -> Just $ Exit (map charName $ words cs)
_ -> Nothing
| otherwise = Nothing
parseCorpus :: [Element] -> [StageEvent]
parseCorpus [] = []
parseCorpus (e:es) = case parseElement e of
Just se -> se : parseCorpus es
Nothing -> parseCorpus es
parse :: String -> [StageEvent]
parse input = let content = parseXML input
in parseCorpus (corpus content)
|
SU-LOSP/folgerhs
|
src/Folgerhs/Parse.hs
|
gpl-3.0
| 2,181 | 0 | 13 | 708 | 809 | 418 | 391 | 47 | 5 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- Copyright (C) 2012 John Millikin <[email protected]>
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
module DBusTests.Transport (test_Transport) where
import Test.Chell
import Control.Concurrent
import Control.Monad.IO.Class (MonadIO, liftIO)
import qualified Data.ByteString
import Data.Function (fix)
import Data.List (isPrefixOf)
import qualified Data.Map as Map
import qualified Network as N
import qualified Network.Socket as NS
import Network.Socket.ByteString (sendAll, recv)
import System.Directory (getTemporaryDirectory, removeFile)
import DBus
import DBus.Transport
import DBusTests.Util
test_Transport :: Suite
test_Transport = suite "Transport" $
suiteTests suite_TransportOpen ++
suiteTests suite_TransportListen ++
suiteTests suite_TransportAccept ++
[ test_TransportSendReceive
, test_HandleLostConnection
]
suite_TransportOpen :: Suite
suite_TransportOpen = suite "transportOpen" $
[ test_OpenUnknown
] ++ suiteTests suite_OpenUnix
++ suiteTests suite_OpenTcp
suite_TransportListen :: Suite
suite_TransportListen = suite "transportListen" $
[ test_ListenUnknown
] ++ suiteTests suite_ListenUnix
++ suiteTests suite_ListenTcp
suite_TransportAccept :: Suite
suite_TransportAccept = suite "transportAccept"
[ test_AcceptSocket
, test_AcceptSocketClosed
]
test_OpenUnknown :: Test
test_OpenUnknown = assertions "unknown" $ do
let Just addr = address "noexist" Map.empty
$assert $ throwsEq
((transportError "Unknown address method: \"noexist\"")
{ transportErrorAddress = Just addr
})
(transportOpen socketTransportOptions addr)
suite_OpenUnix :: Suite
suite_OpenUnix = suite "unix"
[ test_OpenUnix_Path
, test_OpenUnix_Abstract
, test_OpenUnix_TooFew
, test_OpenUnix_TooMany
, test_OpenUnix_NotListening
]
test_OpenUnix_Path :: Test
test_OpenUnix_Path = assertions "path" $ do
(addr, networkSocket) <- listenRandomUnixPath
afterTest (N.sClose networkSocket)
fdcountBefore <- countFileDescriptors
t <- liftIO (transportOpen socketTransportOptions addr)
liftIO (transportClose t)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
test_OpenUnix_Abstract :: Test
test_OpenUnix_Abstract = assertions "abstract" $ do
(addr, networkSocket) <- listenRandomUnixAbstract
afterTest (N.sClose networkSocket)
fdcountBefore <- countFileDescriptors
t <- liftIO (transportOpen socketTransportOptions addr)
liftIO (transportClose t)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
test_OpenUnix_TooFew :: Test
test_OpenUnix_TooFew = assertions "too-few" $ do
fdcountBefore <- countFileDescriptors
let Just addr = address "unix" Map.empty
$assert $ throwsEq
((transportError "One of 'path' or 'abstract' must be specified for the 'unix' transport.")
{ transportErrorAddress = Just addr
})
(transportOpen socketTransportOptions addr)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
test_OpenUnix_TooMany :: Test
test_OpenUnix_TooMany = assertions "too-many" $ do
fdcountBefore <- countFileDescriptors
let Just addr = address "unix" (Map.fromList
[ ("path", "foo")
, ("abstract", "bar")
])
$assert $ throwsEq
((transportError "Only one of 'path' or 'abstract' may be specified for the 'unix' transport.")
{ transportErrorAddress = Just addr
})
(transportOpen socketTransportOptions addr)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
test_OpenUnix_NotListening :: Test
test_OpenUnix_NotListening = assertions "not-listening" $ do
fdcountBefore <- countFileDescriptors
(addr, networkSocket) <- listenRandomUnixAbstract
liftIO (NS.sClose networkSocket)
$assert $ throwsEq
((transportError "connect: does not exist (Connection refused)")
{ transportErrorAddress = Just addr
})
(transportOpen socketTransportOptions addr)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
suite_OpenTcp :: Suite
suite_OpenTcp = suite "tcp"
[ test_OpenTcp_IPv4
, skipWhen noIPv6 test_OpenTcp_IPv6
, test_OpenTcp_Unknown
, test_OpenTcp_NoPort
, test_OpenTcp_InvalidPort
, test_OpenTcp_NoUsableAddresses
, test_OpenTcp_NotListening
]
test_OpenTcp_IPv4 :: Test
test_OpenTcp_IPv4 = assertions "ipv4" $ do
(addr, networkSocket) <- listenRandomIPv4
afterTest (N.sClose networkSocket)
fdcountBefore <- countFileDescriptors
t <- liftIO (transportOpen socketTransportOptions addr)
liftIO (transportClose t)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
test_OpenTcp_IPv6 :: Test
test_OpenTcp_IPv6 = assertions "ipv6" $ do
(addr, networkSocket) <- listenRandomIPv6
afterTest (N.sClose networkSocket)
fdcountBefore <- countFileDescriptors
t <- liftIO (transportOpen socketTransportOptions addr)
liftIO (transportClose t)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
test_OpenTcp_Unknown :: Test
test_OpenTcp_Unknown = assertions "unknown-family" $ do
fdcountBefore <- countFileDescriptors
let Just addr = address "tcp" (Map.fromList
[ ("family", "noexist")
, ("port", "1234")
])
$assert $ throwsEq
((transportError "Unknown socket family for TCP transport: \"noexist\"")
{ transportErrorAddress = Just addr
})
(transportOpen socketTransportOptions addr)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
test_OpenTcp_NoPort :: Test
test_OpenTcp_NoPort = assertions "no-port" $ do
fdcountBefore <- countFileDescriptors
let Just addr = address "tcp" (Map.fromList
[ ("family", "ipv4")
])
$assert $ throwsEq
((transportError "TCP transport requires the `port' parameter.")
{ transportErrorAddress = Just addr
})
(transportOpen socketTransportOptions addr)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
test_OpenTcp_InvalidPort :: Test
test_OpenTcp_InvalidPort = assertions "invalid-port" $ do
fdcountBefore <- countFileDescriptors
let Just addr = address "tcp" (Map.fromList
[ ("family", "ipv4")
, ("port", "123456")
])
$assert $ throwsEq
((transportError "Invalid socket port for TCP transport: \"123456\"")
{ transportErrorAddress = Just addr
})
(transportOpen socketTransportOptions addr)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
test_OpenTcp_NoUsableAddresses :: Test
test_OpenTcp_NoUsableAddresses = assertions "no-usable-addresses" $ do
fdcountBefore <- countFileDescriptors
let Just addr = address "tcp" (Map.fromList
[ ("family", "ipv4")
, ("port", "1234")
, ("host", "256.256.256.256")
])
$assert $ throws
(\err -> and
[ "getAddrInfo: does not exist" `isPrefixOf` transportErrorMessage err
, transportErrorAddress err == Just addr
])
(transportOpen socketTransportOptions addr)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
test_OpenTcp_NotListening :: Test
test_OpenTcp_NotListening = assertions "too-many" $ do
fdcountBefore <- countFileDescriptors
(addr, networkSocket) <- listenRandomIPv4
liftIO (NS.sClose networkSocket)
$assert $ throwsEq
((transportError "connect: does not exist (Connection refused)")
{ transportErrorAddress = Just addr
})
(transportOpen socketTransportOptions addr)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
test_TransportSendReceive :: Test
test_TransportSendReceive = assertions "send-receive" $ do
(addr, networkSocket) <- listenRandomIPv4
afterTest (N.sClose networkSocket)
-- a simple echo server, which sends back anything it receives.
_ <- liftIO $ forkIO $ do
(s, _) <- NS.accept networkSocket
fix $ \loop -> do
bytes <- recv s 50
if Data.ByteString.null bytes
then NS.sClose s
else do
sendAll s bytes
loop
t <- liftIO (transportOpen socketTransportOptions addr)
afterTest (transportClose t)
-- small chunks of data are combined
do
var <- forkVar (transportGet t 3)
liftIO (transportPut t "1")
liftIO (transportPut t "2")
liftIO (transportPut t "3")
bytes <- liftIO (readMVar var)
$assert (equal bytes "123")
-- large chunks of data are read in full
do
let sentBytes = Data.ByteString.replicate (4096 * 100) 0
var <- forkVar (transportGet t (4096 * 100))
liftIO (transportPut t sentBytes)
bytes <- liftIO (readMVar var)
$assert (equal bytes sentBytes)
test_HandleLostConnection :: Test
test_HandleLostConnection = assertions "handle-lost-connection" $ do
(addr, networkSocket) <- listenRandomIPv4
afterTest (N.sClose networkSocket)
_ <- liftIO $ forkIO $ do
(s, _) <- NS.accept networkSocket
sendAll s "123"
NS.sClose s
t <- liftIO (transportOpen socketTransportOptions addr)
afterTest (transportClose t)
bytes <- liftIO (transportGet t 4)
$assert (equal bytes "123")
test_ListenUnknown :: Test
test_ListenUnknown = assertions "unknown" $ do
let Just addr = address "noexist" Map.empty
$assert $ throwsEq
((transportError "Unknown address method: \"noexist\"")
{ transportErrorAddress = Just addr
})
(transportListen socketTransportOptions addr)
suite_ListenUnix :: Suite
suite_ListenUnix = suite "unix"
[ test_ListenUnix_Path
, test_ListenUnix_Abstract
, test_ListenUnix_Tmpdir
, test_ListenUnix_TooFew
, test_ListenUnix_TooMany
, test_ListenUnix_InvalidBind
]
test_ListenUnix_Path :: Test
test_ListenUnix_Path = assertions "path" $ do
path <- liftIO getTempPath
let Just addr = address "unix" (Map.fromList
[ ("path", path)
])
l <- liftIO (transportListen socketTransportOptions addr)
afterTest (transportListenerClose l)
afterTest (removeFile path)
-- listener address is random, so it can't be checked directly.
let addrParams = addressParameters (transportListenerAddress l)
$expect (sameItems (Map.keys addrParams) ["path", "guid"])
$expect (equal (Map.lookup "path" addrParams) (Just path))
test_ListenUnix_Abstract :: Test
test_ListenUnix_Abstract = assertions "abstract" $ do
path <- liftIO getTempPath
let Just addr = address "unix" (Map.fromList
[ ("abstract", path)
])
l <- liftIO (transportListen socketTransportOptions addr)
afterTest (transportListenerClose l)
-- listener address is random, so it can't be checked directly.
let addrParams = addressParameters (transportListenerAddress l)
$expect (sameItems (Map.keys addrParams) ["abstract", "guid"])
$expect (equal (Map.lookup "abstract" addrParams) (Just path))
test_ListenUnix_Tmpdir :: Test
test_ListenUnix_Tmpdir = assertions "tmpdir" $ do
tmpdir <- liftIO getTemporaryDirectory
let Just addr = address "unix" (Map.fromList
[ ("tmpdir", tmpdir)
])
l <- liftIO (transportListen socketTransportOptions addr)
afterTest (transportListenerClose l)
-- listener address is random, so it can't be checked directly.
let addrKeys = Map.keys (addressParameters (transportListenerAddress l))
$expect ("path" `elem` addrKeys || "abstract" `elem` addrKeys)
test_ListenUnix_TooFew :: Test
test_ListenUnix_TooFew = assertions "too-few" $ do
let Just addr = address "unix" Map.empty
$assert $ throwsEq
((transportError "One of 'abstract', 'path', or 'tmpdir' must be specified for the 'unix' transport.")
{ transportErrorAddress = Just addr
})
(transportListen socketTransportOptions addr)
test_ListenUnix_TooMany :: Test
test_ListenUnix_TooMany = assertions "too-many" $ do
let Just addr = address "unix" (Map.fromList
[ ("path", "foo")
, ("abstract", "bar")
])
$assert $ throwsEq
((transportError "Only one of 'abstract', 'path', or 'tmpdir' may be specified for the 'unix' transport.")
{ transportErrorAddress = Just addr
})
(transportListen socketTransportOptions addr)
test_ListenUnix_InvalidBind :: Test
test_ListenUnix_InvalidBind = assertions "invalid-bind" $ do
fdcountBefore <- countFileDescriptors
let Just addr = address "unix" (Map.fromList
[ ("path", "/")
])
$assert $ throwsEq
((transportError "bind: resource busy (Address already in use)")
{ transportErrorAddress = Just addr
})
(transportListen socketTransportOptions addr)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
suite_ListenTcp :: Suite
suite_ListenTcp = suite "tcp"
[ test_ListenTcp_IPv4
, skipWhen noIPv6 test_ListenTcp_IPv6
, test_ListenTcp_Unknown
, test_ListenTcp_InvalidPort
, test_ListenTcp_InvalidBind
]
test_ListenTcp_IPv4 :: Test
test_ListenTcp_IPv4 = assertions "ipv4" $ do
let Just addr = address "tcp" (Map.fromList
[ ("family", "ipv4")
])
l <- liftIO (transportListen socketTransportOptions addr)
afterTest (transportListenerClose l)
let params = addressParameters (transportListenerAddress l)
$expect (equal (Map.lookup "family" params) (Just "ipv4"))
$expect ("port" `elem` Map.keys params)
test_ListenTcp_IPv6 :: Test
test_ListenTcp_IPv6 = assertions "ipv6" $ do
let Just addr = address "tcp" (Map.fromList
[ ("family", "ipv6")
])
l <- liftIO (transportListen socketTransportOptions addr)
afterTest (transportListenerClose l)
let params = addressParameters (transportListenerAddress l)
$expect (equal (Map.lookup "family" params) (Just "ipv6"))
$expect ("port" `elem` Map.keys params)
test_ListenTcp_Unknown :: Test
test_ListenTcp_Unknown = assertions "unknown-family" $ do
let Just addr = address "tcp" (Map.fromList
[ ("family", "noexist")
, ("port", "1234")
])
$assert $ throwsEq
((transportError "Unknown socket family for TCP transport: \"noexist\"")
{ transportErrorAddress = Just addr
})
(transportListen socketTransportOptions addr)
test_ListenTcp_InvalidPort :: Test
test_ListenTcp_InvalidPort = assertions "invalid-port" $ do
let Just addr = address "tcp" (Map.fromList
[ ("family", "ipv4")
, ("port", "123456")
])
$assert $ throwsEq
((transportError "Invalid socket port for TCP transport: \"123456\"")
{ transportErrorAddress = Just addr
})
(transportListen socketTransportOptions addr)
test_ListenTcp_InvalidBind :: Test
test_ListenTcp_InvalidBind = assertions "invalid-bind" $ do
fdcountBefore <- countFileDescriptors
let Just addr = address "tcp" (Map.fromList
[ ("family", "ipv4")
, ("port", "1")
])
$assert $ throwsEq
((transportError "bind: permission denied (Permission denied)")
{ transportErrorAddress = Just addr
})
(transportListen socketTransportOptions addr)
fdcountAfter <- countFileDescriptors
$assert (equal fdcountBefore fdcountAfter)
test_AcceptSocket :: Test
test_AcceptSocket = assertions "socket" $ do
path <- liftIO getTempPath
let Just addr = address "unix" (Map.fromList
[ ("abstract", path)
])
listener <- liftIO (transportListen socketTransportOptions addr)
afterTest (transportListenerClose listener)
acceptedVar <- forkVar (transportAccept listener)
openedVar <- forkVar (transportOpen socketTransportOptions addr)
accepted <- liftIO (readMVar acceptedVar)
opened <- liftIO (readMVar openedVar)
afterTest (transportClose accepted)
afterTest (transportClose opened)
liftIO (transportPut opened "testing")
bytes <- liftIO (transportGet accepted 7)
$expect (equal bytes "testing")
test_AcceptSocketClosed :: Test
test_AcceptSocketClosed = assertions "socket-closed" $ do
path <- liftIO getTempPath
let Just addr = address "unix" (Map.fromList
[ ("abstract", path)
])
listener <- liftIO (transportListen socketTransportOptions addr)
let listeningAddr = transportListenerAddress listener
liftIO (transportListenerClose listener)
$assert $ throwsEq
((transportError "user error (accept: can't perform accept on socket ((AF_UNIX,Stream,0)) in status Closed)")
{ transportErrorAddress = Just listeningAddr
})
(transportAccept listener)
socketTransportOptions :: TransportOptions SocketTransport
socketTransportOptions = transportDefaultOptions
|
jotrk/haskell-dbus
|
tests/DBusTests/Transport.hs
|
gpl-3.0
| 16,585 | 378 | 19 | 2,612 | 4,697 | 2,308 | 2,389 | 387 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Adapter.CliArgs where
import Types.CliArguments
import Types.AppConfig
import Types.LocalConfig
import Types.Base
import Prelude hiding(FilePath)
--fromLocalConfig :: LocalConfig -> CliArgs
--fromLocalConfig local = Config file verbose
--fromLocalConfig (LocalConfig [] fallback verbose) = Config fallback verbose
|
diegospd/pol
|
app/Adapter/CliArgs.hs
|
gpl-3.0
| 373 | 0 | 5 | 55 | 39 | 26 | 13 | 7 | 0 |
{-# LANGUAGE OverloadedStrings, CPP #-}
module Types where
import Data.IntMap (IntMap)
import Data.Aeson
import Control.Monad (mzero)
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$>), (<*>))
#endif
data Point = Point
{ pointX :: Int
, pointY :: Int
} deriving (Show, Read, Eq, Ord)
instance FromJSON Point where
parseJSON (Object v) = Point <$> v .: "x" <*> v .: "y"
parseJSON _ = mzero
instance ToJSON Point where
toJSON (Point x y) = object ["x" .= x, "y" .= y]
type Collection = IntMap Int
|
mschristiansen/intro
|
src/Types.hs
|
gpl-3.0
| 534 | 0 | 9 | 111 | 185 | 105 | 80 | 16 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.CloudMonitoring.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.CloudMonitoring.Types.Sum where
import Network.Google.Prelude
-- | The aggregation function that will reduce the data points in each window
-- to a single point. This parameter is only valid for non-cumulative
-- metrics with a value type of INT64 or DOUBLE.
data TimeseriesListAggregator
= Max
-- ^ @max@
| Mean
-- ^ @mean@
| Min
-- ^ @min@
| Sum
-- ^ @sum@
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable TimeseriesListAggregator
instance FromHttpApiData TimeseriesListAggregator where
parseQueryParam = \case
"max" -> Right Max
"mean" -> Right Mean
"min" -> Right Min
"sum" -> Right Sum
x -> Left ("Unable to parse TimeseriesListAggregator from: " <> x)
instance ToHttpApiData TimeseriesListAggregator where
toQueryParam = \case
Max -> "max"
Mean -> "mean"
Min -> "min"
Sum -> "sum"
instance FromJSON TimeseriesListAggregator where
parseJSON = parseJSONText "TimeseriesListAggregator"
instance ToJSON TimeseriesListAggregator where
toJSON = toJSONText
-- | The aggregation function that will reduce the data points in each window
-- to a single point. This parameter is only valid for non-cumulative
-- metrics with a value type of INT64 or DOUBLE.
data TimeseriesDescriptorsListAggregator
= TDLAMax
-- ^ @max@
| TDLAMean
-- ^ @mean@
| TDLAMin
-- ^ @min@
| TDLASum
-- ^ @sum@
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable TimeseriesDescriptorsListAggregator
instance FromHttpApiData TimeseriesDescriptorsListAggregator where
parseQueryParam = \case
"max" -> Right TDLAMax
"mean" -> Right TDLAMean
"min" -> Right TDLAMin
"sum" -> Right TDLASum
x -> Left ("Unable to parse TimeseriesDescriptorsListAggregator from: " <> x)
instance ToHttpApiData TimeseriesDescriptorsListAggregator where
toQueryParam = \case
TDLAMax -> "max"
TDLAMean -> "mean"
TDLAMin -> "min"
TDLASum -> "sum"
instance FromJSON TimeseriesDescriptorsListAggregator where
parseJSON = parseJSONText "TimeseriesDescriptorsListAggregator"
instance ToJSON TimeseriesDescriptorsListAggregator where
toJSON = toJSONText
|
rueshyna/gogol
|
gogol-cloudmonitoring/gen/Network/Google/CloudMonitoring/Types/Sum.hs
|
mpl-2.0
| 2,880 | 0 | 11 | 665 | 429 | 235 | 194 | 56 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidPublisher.Purchases.Products.Acknowledge
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Acknowledges a purchase of an inapp item.
--
-- /See:/ <https://developers.google.com/android-publisher Google Play Android Developer API Reference> for @androidpublisher.purchases.products.acknowledge@.
module Network.Google.Resource.AndroidPublisher.Purchases.Products.Acknowledge
(
-- * REST Resource
PurchasesProductsAcknowledgeResource
-- * Creating a Request
, purchasesProductsAcknowledge
, PurchasesProductsAcknowledge
-- * Request Lenses
, ppaXgafv
, ppaUploadProtocol
, ppaPackageName
, ppaAccessToken
, ppaToken
, ppaUploadType
, ppaPayload
, ppaProductId
, ppaCallback
) where
import Network.Google.AndroidPublisher.Types
import Network.Google.Prelude
-- | A resource alias for @androidpublisher.purchases.products.acknowledge@ method which the
-- 'PurchasesProductsAcknowledge' request conforms to.
type PurchasesProductsAcknowledgeResource =
"androidpublisher" :>
"v3" :>
"applications" :>
Capture "packageName" Text :>
"purchases" :>
"products" :>
Capture "productId" Text :>
"tokens" :>
CaptureMode "token" "acknowledge" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
ProductPurchasesAcknowledgeRequest
:> Post '[JSON] ()
-- | Acknowledges a purchase of an inapp item.
--
-- /See:/ 'purchasesProductsAcknowledge' smart constructor.
data PurchasesProductsAcknowledge =
PurchasesProductsAcknowledge'
{ _ppaXgafv :: !(Maybe Xgafv)
, _ppaUploadProtocol :: !(Maybe Text)
, _ppaPackageName :: !Text
, _ppaAccessToken :: !(Maybe Text)
, _ppaToken :: !Text
, _ppaUploadType :: !(Maybe Text)
, _ppaPayload :: !ProductPurchasesAcknowledgeRequest
, _ppaProductId :: !Text
, _ppaCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PurchasesProductsAcknowledge' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ppaXgafv'
--
-- * 'ppaUploadProtocol'
--
-- * 'ppaPackageName'
--
-- * 'ppaAccessToken'
--
-- * 'ppaToken'
--
-- * 'ppaUploadType'
--
-- * 'ppaPayload'
--
-- * 'ppaProductId'
--
-- * 'ppaCallback'
purchasesProductsAcknowledge
:: Text -- ^ 'ppaPackageName'
-> Text -- ^ 'ppaToken'
-> ProductPurchasesAcknowledgeRequest -- ^ 'ppaPayload'
-> Text -- ^ 'ppaProductId'
-> PurchasesProductsAcknowledge
purchasesProductsAcknowledge pPpaPackageName_ pPpaToken_ pPpaPayload_ pPpaProductId_ =
PurchasesProductsAcknowledge'
{ _ppaXgafv = Nothing
, _ppaUploadProtocol = Nothing
, _ppaPackageName = pPpaPackageName_
, _ppaAccessToken = Nothing
, _ppaToken = pPpaToken_
, _ppaUploadType = Nothing
, _ppaPayload = pPpaPayload_
, _ppaProductId = pPpaProductId_
, _ppaCallback = Nothing
}
-- | V1 error format.
ppaXgafv :: Lens' PurchasesProductsAcknowledge (Maybe Xgafv)
ppaXgafv = lens _ppaXgafv (\ s a -> s{_ppaXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ppaUploadProtocol :: Lens' PurchasesProductsAcknowledge (Maybe Text)
ppaUploadProtocol
= lens _ppaUploadProtocol
(\ s a -> s{_ppaUploadProtocol = a})
-- | The package name of the application the inapp product was sold in (for
-- example, \'com.some.thing\').
ppaPackageName :: Lens' PurchasesProductsAcknowledge Text
ppaPackageName
= lens _ppaPackageName
(\ s a -> s{_ppaPackageName = a})
-- | OAuth access token.
ppaAccessToken :: Lens' PurchasesProductsAcknowledge (Maybe Text)
ppaAccessToken
= lens _ppaAccessToken
(\ s a -> s{_ppaAccessToken = a})
-- | The token provided to the user\'s device when the inapp product was
-- purchased.
ppaToken :: Lens' PurchasesProductsAcknowledge Text
ppaToken = lens _ppaToken (\ s a -> s{_ppaToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ppaUploadType :: Lens' PurchasesProductsAcknowledge (Maybe Text)
ppaUploadType
= lens _ppaUploadType
(\ s a -> s{_ppaUploadType = a})
-- | Multipart request metadata.
ppaPayload :: Lens' PurchasesProductsAcknowledge ProductPurchasesAcknowledgeRequest
ppaPayload
= lens _ppaPayload (\ s a -> s{_ppaPayload = a})
-- | The inapp product SKU (for example, \'com.some.thing.inapp1\').
ppaProductId :: Lens' PurchasesProductsAcknowledge Text
ppaProductId
= lens _ppaProductId (\ s a -> s{_ppaProductId = a})
-- | JSONP
ppaCallback :: Lens' PurchasesProductsAcknowledge (Maybe Text)
ppaCallback
= lens _ppaCallback (\ s a -> s{_ppaCallback = a})
instance GoogleRequest PurchasesProductsAcknowledge
where
type Rs PurchasesProductsAcknowledge = ()
type Scopes PurchasesProductsAcknowledge =
'["https://www.googleapis.com/auth/androidpublisher"]
requestClient PurchasesProductsAcknowledge'{..}
= go _ppaPackageName _ppaProductId _ppaToken
_ppaXgafv
_ppaUploadProtocol
_ppaAccessToken
_ppaUploadType
_ppaCallback
(Just AltJSON)
_ppaPayload
androidPublisherService
where go
= buildClient
(Proxy :: Proxy PurchasesProductsAcknowledgeResource)
mempty
|
brendanhay/gogol
|
gogol-android-publisher/gen/Network/Google/Resource/AndroidPublisher/Purchases/Products/Acknowledge.hs
|
mpl-2.0
| 6,599 | 0 | 23 | 1,614 | 953 | 553 | 400 | 141 | 1 |
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable, RecordWildCards #-}
module Main where
import Blaze.ByteString.Builder
import Blaze.ByteString.Builder.Char8 (fromString)
import Control.Monad
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString as BS
import Control.Concurrent (forkIO)
import Control.Concurrent.STM
import Control.Concurrent.STM.TChan (readTChan,newTChanIO)
import Data.Aeson
import qualified Data.String as S
import Data.Text (Text)
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.Encoding as T
import Data.Monoid
import Data.Version
import Data.Word
import Network.HTTP.Types
import Network
import Network.Wai
import Network.Wai.Handler.Warp
import Network.Wai.Middleware.Static
import System.Console.CmdArgs.Implicit
import Text.CSV
import Text.Printf
import Dvb
import Parser
import Resources
import Paths_kryptoradio_receiver
data Args = Args { device :: Int
, frontend :: Int
, demuxer :: Int
, freq :: Maybe Int
, pid :: Int
, host :: String
, port :: Int
, static :: Maybe String
, sourceHost :: Maybe String
, sourcePort :: Int
} deriving (Show, Data, Typeable)
synopsis =
Args { device = 0 &= name "i" &= help "DVB device id (default: 0)"
, frontend = 0 &= name "F" &= help "DVB frontend id (default: 0)"
, demuxer = 0 &= help "DVB demuxer id (default: 0)"
, freq = def &= help "DVB multiplex frequency in hertz" &= typ "HERTZ"
, pid = 8101 &= name "P" &= help "DVB PID of Kryptoradio (default: 8101)"
, host = "*" &= help "IP address to bind to (default: all)"
, port = 3000 &= help "HTTP port to listen to (default: 3000)"
, static = def &= typDir &= name "s" &= help "Path to static WWW directory"
, sourceHost = def &= typ "NAME" &= help "Host name of Kryptoradio stream server"
, sourcePort = 3003 &= help "TCP port of Kryptoradio stream server (default: 3003)"
}
&= program "kryptoradio-receiver"
&= summary ("Kryptoradio Receiver " ++ showVersion version)
&= help "Listens to given HTTP port for connections while \
\receiving and decoding data from DVB device connected \
\to the system. If you have only one ordinary DVB-adapter \
\in your system, you don't need to set device, frontend \
\nor demuxer IDs. If you are receiving from Digita \
\broadcast in Finland, the default PID is also fine. \
\Frequency must be given in Hz. If you want to host local files \
\in addition to the API, set static WWW directory. Options --freq \
\and --sourcehost / --sourceport are mutually exclusive."
main = do
-- Figure out settings
Args{..} <- cmdArgs synopsis
let set = setHost (S.fromString host) $
setPort port $
defaultSettings
let staticApp = case static of
Nothing -> id -- No static hosting
Just dir -> (staticPolicy $ addBase dir) .
(staticPolicy $ addBase dir <> addSuffix "/index.html") .
(staticPolicy $ addBase dir <> addSuffix ".html")
-- Connect, depending on the connection type (DVB or network socket)
(h,dvb) <- case (freq,sourceHost) of
(Just freq,Nothing) -> do
putStrLn $ "Tuning to " ++ show (fromIntegral freq / 1e6) ++ "MHz, PID " ++ show pid
(h,dvb) <- openDvb device frontend demuxer freq pid
return (h,Just dvb)
(Nothing,Just h) -> do
putStrLn $ "Connecting to Kryptoradio data source at " ++ h ++ " port " ++ show sourcePort
h <- connectTo h (PortNumber $ fromIntegral sourcePort)
return (h,Nothing)
_ -> error "You must define either frequency or TCP data source, not both. See --help"
-- Debug messages
putStrLn $ "Binding to " ++ show (getHost set) ++ ", port " ++ show (getPort set)
case static of
Just dir -> putStrLn $ "Hosting static WWW directory at " ++ dir
Nothing -> return ()
-- Start processing of messages
resVar <- newTVarIO []
-- FIXME if using threaded runtime the handle has extremely high
-- latency (minutes) when run inside forkIO
forkIO $ krpToChan h resVar
-- Start web service
runSettings set $ staticApp $ api resVar
-- |Add given suffix to every URL.
addSuffix :: String -> Policy
addSuffix suffix = policy $ \base -> Just (base ++ suffix)
api :: TVar [Resource] -> Application
api var req respond = do
resources <- readTVarIO var
let byName = flip lookup $ map (\Resource{..} -> (rname,var)) resources
case (requestMethod req, dropTrailingSlash $ pathInfo $ req) of
-- Basic API description
("GET",["api"]) ->
respond $ jsonData ok200 $
object ["name" .= ("Kryptoradio DVB-T receiver" :: Text)
,"version" .= case version of
Version x _ -> x -- Show cabal version
,"synced" .= not(null resources)
,"services" .= ["waitsync"::Text,"resource"]
,"formats" .= ["raw"::Text,"json","jsoncsv"]
]
-- Waits for the sync to happen
("GET",["api","waitsync"]) -> do
atomically $ do
x <- readTVar var
when (null x) retry
respond $ jsonData ok200 True
-- Lists available resources
("GET",["api","resource"]) ->
respond $ jsonData ok200 $ map resourceToValue resources
-- The actual Kryptoradio data response
("GET",["api","resource",res,fmt]) -> do
case byName res of
Nothing -> respond $ jsonError notFound404 "Resource not found"
Just bchan -> do
chan <- atomically $ dupTChan bchan
respond $ case fmt of
"raw" -> rawStream chan
"json" -> jsonStream chan
"jsoncsv" -> jsonCsvStream chan
_ -> jsonError badRequest400 "Unknown format"
-- Error message if nothing else matches
_ -> respond $ jsonError notFound404 "File or API not found. Try /api"
-- |Outputs binary data stream. Data consists of variable-sized
-- chunks, in which the first 4 bytes contain chunk length in big
-- endian format and it is followed by the data of that chunk.
rawStream :: TChan B.ByteString -> Response
rawStream chan = responseStream status200 rawHeader $ \write flush -> forever $ do
x <- atomically $ readTChan chan
write $ fromWord32be $ fromIntegral $ B.length x
write $ fromLazyByteString x
flush
-- |Output JSON stream of (8-bit string data in quotes, properly
-- escaped). Useful for Streaming JSON loading in JavaScript.
jsonStream :: TChan B.ByteString -> Response
jsonStream chan = responseStream status200 jsonHeader $ \write flush -> forever $ do
write $ fromByteString "[\""
flush
forever $ do
x <- atomically $ readTChan chan
write $ B.foldl escape mempty x <> quote
flush
where quote = fromByteString "\",\""
-- |Output JSON stream of values encoded in CSV. Only some resources
-- use CSV formatting so this may result weird results if used with
-- binary data. Input is assumed to be encoded in UTF-8.
jsonCsvStream :: TChan B.ByteString -> Response
jsonCsvStream chan = responseStream status200 jsonHeader $ \write flush -> forever $ do
write $ fromByteString "["
flush
forever $ do
x <- atomically $ readTChan chan
write $ fromLazyByteString $ case parseCSV "" $ T.unpack $ T.decodeUtf8 x of
Left e -> encode $ object ["error" .= ("Unable to parse: " ++ show e)]
Right x -> encode x
write $ fromByteString ","
flush
-- |Escapes given byte using JSON escaping rules (which are a bit
-- ambiguous but I assume they're same as in ASCII). This is done
-- manually instead of aeson library because they have dropped
-- ByteString serialization support.
escape :: Builder -> Word8 -> Builder
escape acc byte | byte < 32 || byte > 126 = acc <> (fromString $ printf "\\u%04x" byte)
| otherwise = acc <> (fromByteString $ BS.singleton byte)
resourceToValue Resource{..} = object ["rid" .= rid, "name" .= rname, "desc" .= desc]
jsonData code = responseLBS code jsonHeader . encode
rawHeader = [("Content-Type", "application/octet-stream"),cors]
jsonHeader = [("Content-Type", "application/json"),cors]
cors = ("Access-Control-Allow-Origin","*")
-- |Generates JSON with an error message.
jsonError :: Status -> Text -> Response
jsonError code msg = jsonData code $ object ["error" .= msg]
-- |Drops last component of list if it's empty Text. In case of wai
-- pathInfo, this represents the slash after the URL.
dropTrailingSlash :: [Text] -> [Text]
dropTrailingSlash [] = []
dropTrailingSlash x | last x == "" = init x
| otherwise = x
|
koodilehto/kryptoradio
|
receiver/Main.hs
|
agpl-3.0
| 8,757 | 0 | 23 | 2,187 | 2,095 | 1,088 | 1,007 | 155 | 9 |
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : TestExplode.TestExplode
Description : Definitions of part-testcases, graphs of them, and an evaluation function to generate complete testcases (strings).
Copyright : (c) Hans-Jürgen Guth, 2014
License : LGPL
Maintainer : [email protected]
Stability : experimental
With this module you can define a graph of part-testcases
('Casepart') and evaluate this graph to a list of strings,
where every string is the concatenation of the code of the
part-testcases.
-}
module TestExplode.TestExplode (
-- * Types
Casepart(Casepart)
, shortDesc
, longDesc
, condDesc
, codeFkt
, varFkt
, condition
, CasepartInternal(CasepartInternal)
, shortDescI
, longDescI
, condDescI
, codeFktI
, conditionI
, CPType(NormalCP, Mark)
, cpType
, cpTypeI
, DirGraph(SimpleDG, Conc, Join, StructDG)
, SplittedGraph(Split)
, Testgraph(Testgraph)
, dirGraph
, docuInfo
, TGDocuInfo(TGDocuInfo)
, name
, descForNode
, descForTex
, generic
, toExpand
, Expand (Expand, NotExpand, AsIs)
-- * Functions
, generate
, emptyCp
, markCp
, getSubGraphs
-- * Functions for generating 'DirGraph' s
-- The non-graphical-UI for the user.
-- Call it a EDSL, if you like
, mkEle
, (&-&)
, conc
, split
, mkGraph2Ele
, mkGraph2Ele0
-- * Conversion Functions
, convertDirGraph
, convertTestgraph
) where
import Control.Monad.Writer
import qualified Data.Text.Lazy as L
import qualified Data.Sequence as S
import qualified Data.Foldable as DF
-- | The part-testcase
data Casepart cnf -- the test data
locals -- test hints, that are changed by a Casepart
-- for example the state of a state machine
-- or the time since start of the Testcase
= Casepart
{
-- | short description of the part-testcase,
-- currently used a) at top of a testcase to show
-- which path the generated testcase belongs to
-- and b) in the visualised graph as node-label
shortDesc :: L.Text
-- | long description of the part-testcase
-- currently generated in front of the code of the
-- part-testcase
, longDesc :: L.Text
-- | description of the condition, under which
-- the part-testcase is valid (if not, the path with
-- this part-testcase will not be generated)
, condDesc :: L.Text
-- | the actual code, which forms the part-testcase,
-- dependent of the "configuration" (the "cnf" in
-- 'Casepart cnf locals'), which is the test-data,
-- and local variables, that are changed by a Casepart.
, codeFkt :: cnf -> locals -> L.Text
-- | The changes in the local variables
, varFkt :: cnf -> locals -> locals
-- | the condition under which the part-testcase
-- is valid (if not, the path with
-- this part-testcase will not be generated)
, condition :: cnf -> locals -> Bool
-- | Type of the Casepart, mainly (up to now only) for
-- visualisation in the graph of Caseparts
, cpType :: CPType
}
-- | The part-testcase, internal format of 'Casepart', with a writer-monad as stringFkt
-- instead of the varFkt and the old stringFkt
data CasepartInternal cnf locals = CasepartInternal
{
shortDescI :: L.Text
, longDescI :: L.Text
, condDescI :: L.Text
, codeFktI :: cnf -> locals -> Writer (S.Seq L.Text) locals
, conditionI :: cnf -> locals -> Bool
, cpTypeI :: CPType
}
-- | Types of Caseparts, mainly (up to now only) for
-- visualisation of the graph of Caseparts
data CPType = NormalCP | Mark
deriving (Show, Eq)
-- | An empty testcase, all strings are "".
-- The condition is always 'True'.
-- Shall serve as an starting point for own definitions of
-- 'Casepart''s.
emptyCp = Casepart { shortDesc = ""
, longDesc = ""
, condDesc =""
, codeFkt = \cnf locals -> ""
, varFkt = \cnf locals -> locals
, condition = \cnf locals -> True
, cpType = NormalCP
}
-- | Convenience Function to make easily a mark.
--
markCp str = emptyCp { shortDesc = L.append "Mark: " str,
longDesc = "Set a mark",
codeFkt = \cnf locals -> L.concat[" setCheckMark(\"",
str,
"\");\n"],
cpType = Mark
}
-- | The heart of this module, the final function.
-- It takes configurations ('cnf' means testvalues),
-- that is a record of variables with a
-- value, a function that describes the "prelude" of one testcase (without
-- comment chars, which are later added) (a good starting value : the
-- 'show'-function of 'cnf', so that the used test-values are printed on top
-- of the testcase), the graph of testcases and returns
--
-- voilá:
--
-- the
-- list of testcases, ready to printed out in seperate files and to run.
generate :: L.Text -- ^ how a text is coomented, ("# " or "-- ")
-> [cnf] -- ^ a list of the testvalues
-> locals -- ^ the initial value of the variables that the
-- testcases change
-> (cnf -> L.Text) -- ^ "prelude" of a testcase, i.e. 'show' of cnf
-> DirGraph (CasepartInternal cnf locals) -- ^ the graph of caseparts
-> [L.Text] -- ^ the final result: the list of testcases incl. comments
generate commentString cnfList locals cnfShow graph =
[L.concat[mkComment(L.concat[(cnfShow cnf),"\n", desc]) commentString,
DF.fold $ snd $ runWriter (stringFkt cnf locals)
] |
cnf <- cnfList,
-- Casepart stringFkt cond <- cpGetPaths graph,
-- cond cnf]
--
-- Does this work too? is independent of the
-- structure, uses record syntax
let cpList = cpGetPaths commentString graph,
(stringFkt, cond, desc) <- map getCodeAndConditionAndDesc cpList,
cond cnf locals]
-- | Internal help function, could be made more general for
-- arbitrary getter functions and number of getter functions
getCodeAndConditionAndDesc :: CasepartInternal a b ->
((a -> b -> Writer (S.Seq L.Text) b), (a -> b ->Bool), L.Text)
getCodeAndConditionAndDesc cp = (codeFktI cp, conditionI cp, shortDescI cp)
-- | Internal function to comment the description with the commentString
mkComment :: L.Text -> L.Text -> L.Text
--mkComment str = let strNew = "# " ++ str
-- in
-- Utils.replace "\n" "\n# " str
mkComment str commentString = L.unlines $ map (L.append commentString ) (L.lines str)
-- Now all the functions for combinating Casepart's
-- | Directed graph with one end, self-invented definition
data DirGraph a = -- | Constructor for a node alone,
-- A node is a graph.
SimpleDG a |
-- | Constructor for one sub-graph after another
Conc (DirGraph a) (DirGraph a) |
-- | Constructor for the "splitting" of graphs,
-- comparable with an "if".
-- The 'Join' makes the many ends and begins
-- to one end and one begin
Join (SplittedGraph a) |
-- | A graph with more attributes,
-- importing of a 'Testgraph', only the part
-- 'dirGraph' is used
StructDG (Testgraph a)
-- | many disjunct graphs
-- Every part-graph has one end and one begin
data SplittedGraph a = Split [DirGraph a]
-- shorthand:
-- type CPDirGraph = DirGraph (Casepart cnf)
-- to build a directed graph:
-- at best:
-- ele1 conc ele2 conc (split [ele3, ele4, split [ele5, ele6], ele7]) conc ele8
--
-- this leads to::
-- eleN is a SimpleDG
-- conc can be infix: `conc`
-- the end of an split is a `join`
-- | Function to create a splitted graph
split :: [DirGraph a] -> DirGraph a
split x = Join (Split x)
-- | Function to craete a 'Conc' of two sub-graphs
conc :: DirGraph a -> DirGraph a -> DirGraph a
conc a b = Conc a b
-- | Infix synonym for 'conc'
(&-&) :: DirGraph a -> DirGraph a -> DirGraph a
a &-& b = Conc a b
-- | Function to create a node, Internal, with the CasepartInternal as 'a'
mkEleInt :: a -> DirGraph a
mkEleInt a = SimpleDG a
-- | Function to create a node, function for the user.
-- If longDesc = "", shortDesc is used as longDesc.
mkEle :: Casepart cnf locals -> DirGraph (CasepartInternal cnf locals)
mkEle cpUser = mkEleInt (CasepartInternal {shortDescI = shortDesc cpUser,
longDescI = if longDesc cpUser == ""
then shortDesc cpUser
else longDesc cpUser,
condDescI = condDesc cpUser,
codeFktI = mkLogging (codeFkt cpUser) (varFkt cpUser),
conditionI = condition cpUser,
cpTypeI = cpType cpUser
})
-- | Internal Function to build the monad-function as the new codeFkt
mkLogging :: (cnf -> locals -> L.Text) -- ^ the old codeFkt
-> (cnf -> locals -> locals) -- ^ the change-function of the variables (old varFkt)
-> (cnf -> locals -> Writer (S.Seq L.Text) locals) -- ^ the new codeFkt
mkLogging fText fVars = \cnf locs ->
let ret = fVars cnf locs
in
do tell $ S.singleton $ fText cnf locs
return ret
data Expand = Expand | NotExpand | AsIs
-- | Function to add a testgraph to a dirgraph
-- with converting-function f of the testdata ("cnfOld" resp. "cnfNew")
-- and a Boolean, that says, if the subgraph should be
-- expanded or not.
mkGraph2Ele :: (cnfNew -> cnfOld) -- ^ conversion function for the test-data-input of the casepart
-> (localsInB -> localsInA) -- ^ conversion function for the
-- variables the testcases uses/changes (input-side)
-> (localsInB -> localsInA -> localsInB) -- ^ conversion function for the
-- variables the testcases uses/changes (output-side)
-- that is: how shall the variables look after the run
-- of the casepart? Dependant of the old value
-- of the variables and the value of the variables after run
-- of the imported testcase
-> Expand -- ^ Shall this Graph in the documation expanded or not ?
-> Testgraph (CasepartInternal cnfOld localsInA) -- ^ the Testgraph that shall be imported
-> DirGraph (CasepartInternal cnfNew localsInB) -- ^ the imported Testgraph, now a DirGraph
-- with the correct types
mkGraph2Ele fCnf fLocIn fLocOut expand tg =
let newTg = case expand of
AsIs -> tg
NotExpand -> tg {docuInfo=(docuInfo tg) {toExpand=False}}
Expand -> tg {docuInfo=(docuInfo tg) {toExpand=True}}
in
StructDG ( convertTestgraph fCnf fLocIn fLocOut newTg)
-- | Function to add a testgraph to a dirgraph
-- without converting-function
mkGraph2Ele0 :: Testgraph a
-> DirGraph a
mkGraph2Ele0 tg = StructDG tg
-- | The eval function of the EDSL. Evaluates a 'DirGraph' to the list
-- of all paths.
cpGetPaths :: L.Text -> DirGraph (CasepartInternal cnf locals ) -> [CasepartInternal cnf locals]
cpGetPaths commentString (SimpleDG cp) =
let lngDesc = longDescI cp
cdFkt = codeFktI cp
in
-- insert longDesc before codeFkt
[cp{codeFktI = \cfg locals -> do
tell $ S.singleton "\n"
tell $ S.singleton $ mkComment lngDesc commentString
cdFkt cfg locals
}]
cpGetPaths commentString (Conc dirGraph1 dirGraph2) =
let paths1 = cpGetPaths commentString dirGraph1
paths2 = cpGetPaths commentString dirGraph2
in
[CasepartInternal {
longDescI="" -- not relevant for combined part-testcases
,condDescI="" -- not relevant for combined part-testcases
,cpTypeI = NormalCP -- not relevant for combined part-testcases
,shortDescI = L.concat[shortDescI cp1,
"\n and\n",
shortDescI cp2]
,codeFktI = \cnf locals -> do
vars1 <- (codeFktI cp1) cnf locals
(codeFktI cp2) cnf vars1
,conditionI = \cnf locals -> (((conditionI cp1) cnf locals) && ((conditionI cp2) cnf locals))} |
cp1 <- paths1,
cp2 <- paths2 ] -- jeder mit jedem
cpGetPaths commentString (StructDG tg) = cpGetPaths commentString (dirGraph tg)
cpGetPaths commentString (Join (Split paths )) = concat $ lcpGetPaths commentString (Split paths)
-- | the eval function of the EDSL for SplittedGraphs
lcpGetPaths :: L.Text -> SplittedGraph (CasepartInternal cnf locals) -> [[CasepartInternal cnf locals]]
lcpGetPaths commentString (Split paths) = map (cpGetPaths commentString) paths
-- | Converts between Caseparts.
-- You need a interpreting from the target data-type to the
-- source data-type (not vice versa)
convertCasepart :: (cnfB -> cnfA) -- ^ conversion function for the test-data-input of the casepart
-> (localsInB -> localsInA) -- ^ conversion function for the
-- variables the testcases uses/changes (input-side)
-> (localsInB -> localsInA -> localsInB) -- ^ conversion function for the
-- variables the testcases uses/changes (output-side)
-- that is: how shall the variables look after the run
-- of the casepart? Dependant of the old value
-- of the variables and the value of the variables after run
-- of the imported testcase
-> CasepartInternal cnfA localsInA -- ^ the Casepart that shall be imported
-> CasepartInternal cnfB localsInB -- ^ the imported Casepart with the correct types
convertCasepart fCnf fLocIn fLocOut cpa =
CasepartInternal {
codeFktI = \cnf locals -> do
oldIn <- (codeFktI cpa) (fCnf cnf) (fLocIn locals)
return $ fLocOut locals oldIn
,
conditionI = \cnf locals -> (conditionI cpa) (fCnf cnf) (fLocIn locals),
shortDescI = shortDescI cpa,
longDescI = longDescI cpa,
condDescI = condDescI cpa,
cpTypeI = cpTypeI cpa
}
-- | Converts a DirGraph, for example our testgraphs.
-- With that function you can import other testgraphs
-- with another set of variables.
-- You need a interpreting from the target data-type to the
-- source data-type (not vice versa)
convertDirGraph :: (cnfB->cnfA) -- ^ conversion function for the test-data-input of the casepart
-> (localsInB -> localsInA) -- ^ conversion function for the
-- variables the testcases uses/changes (input-side)
-> (localsInB -> localsInA -> localsInB) -- ^ conversion function for the
-- variables the testcases uses/changes (output-side)
-- that is: how shall the variables look after the run
-- of the casepart? Dependant of the old value
-- of the variables and the value of the variables after run
-- of the imported testcase
-> DirGraph (CasepartInternal cnfA localsInA) -- ^ the DirGraph that shall be imported
-> DirGraph (CasepartInternal cnfB localsInB) -- ^ the imported DirGraph with the correct types
convertDirGraph f fLocIn fLocOut (SimpleDG cp) = SimpleDG (convertCasepart f fLocIn fLocOut cp)
convertDirGraph f fLocIn fLocOut (Conc dg1 dg2)= Conc (convertDirGraph f fLocIn fLocOut dg1)
(convertDirGraph f fLocIn fLocOut dg2)
convertDirGraph f fLocIn fLocOut (Join splittedGraph) =
Join ( convertSplittedGraph f fLocIn fLocOut splittedGraph)
-- | Converts a SplittedGraph
convertSplittedGraph :: (cnfB->cnfA)
-> (localsInB -> localsInA)
-> (localsInB -> localsInA -> localsInB)
-> SplittedGraph (CasepartInternal cnfA localsInA)
-> SplittedGraph (CasepartInternal cnfB localsInB)
convertSplittedGraph f fLocIn fLocOut (Split dirGraphs) =
Split (map (convertDirGraph f fLocIn fLocOut) dirGraphs)
-- Extensions to the modules
-- for adding parts of testgraph / subgraphs
data TGDocuInfo =
TGDocuInfo { name :: String,
descForNode :: String,
descForTex :: String,
generic :: Bool,
toExpand :: Bool
}
data Testgraph a =
Testgraph { dirGraph :: DirGraph a,
docuInfo :: TGDocuInfo
}
-- | Converts a testgraph, necessary in order to add
-- a different testgraph ( with another type of configuration)
-- to a dirGraph
convertTestgraph :: (cnfB -> cnfA) -- ^ conversion function for the test-data-input of the casepart
-> (localsInB -> localsInA) -- ^ conversion function for the
-- variables the testcases uses/changes (input-side)
-> (localsInB -> localsInA -> localsInB) -- ^ conversion function for the
-- variables the testcases uses/changes (output-side)
-- that is: how shall the variables look after the run
-- of the casepart? Dependant of the old value
-- of the variables and the value of the variables after run
-- of the imported testcase
-> Testgraph (CasepartInternal cnfA localsInA) -- ^ the Testgraph that shall be imported
-> Testgraph (CasepartInternal cnfB localsInB) -- ^ the imported Testgraph with the correct types
convertTestgraph f fLocIn fLocOut tg = tg { dirGraph = convertDirGraph f fLocIn fLocOut (dirGraph tg)}
-- | Convenience function for the case, that the return value of an
-- embedded 'Casepart' shall have no effect. The old local
-- values keep unchanged.
emptyOut :: localsInA -> localsInB -> localsInA
emptyOut fromMaster fromEmbedded = fromMaster
-- | Looks for all embedded 'Testgraph' in a 'DirGraph',
-- double embedded 'Testgraph' (identified by the attribute 'name')
-- are ignored.
getSubGraphs :: DirGraph a -> [(String, Testgraph a)] -> [(String, Testgraph a)]
getSubGraphs (SimpleDG cp) resList = resList
getSubGraphs (Conc dirGraph1 dirGraph2) resList =
let newResList1 = getSubGraphs dirGraph1 resList
newResList2 = getSubGraphs dirGraph2 newResList1
in
newResList2
getSubGraphs (Join (Split dirGraphs)) resList = foldr getSubGraphs resList dirGraphs
getSubGraphs (StructDG tg) resList= case (lookup (name (docuInfo tg)) resList) of
Nothing -> let newResList = (name (docuInfo tg), tg):resList
in
getSubGraphs(dirGraph tg) newResList
Just _ -> resList
|
testexplode/testexplode
|
src/TestExplode/TestExplode.hs
|
lgpl-3.0
| 23,337 | 0 | 16 | 9,855 | 3,086 | 1,737 | 1,349 | 247 | 3 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
module QDSL01 (module QDSL01) where
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Lib
power :: Int -> TExpQ (Float -> Float)
power n | n < 0 = [|| \x -> if x == 0 then 0 else 1 / ($$(power (-n)) x) ||]
| n == 0 = [|| \x -> 1 ||]
| n `mod` 2 == 0 = [|| \x -> let y = $$(power (n `div` 2)) x in y*y ||]
| otherwise = [|| \x -> x * ($$(power (n-1)) x) ||]
|
egaburov/funstuff
|
Haskell/thsk/QDSL01.hs
|
apache-2.0
| 550 | 4 | 19 | 155 | 262 | 149 | 113 | -1 | -1 |
module Main (main) where
import Criterion.Main
import Data.Fixed
import Data.Int
import Data.Time
import Data.Time.LocalTime.TimeZone.Olson
import Data.Time.LocalTime.TimeZone.Series
import Data.Time.Zones
import System.Environment
foreign import ccall safe "time.h tzset" c_tzset :: IO ()
setupTZ :: String -> IO TZ
setupTZ zoneName = do
setEnv "TZ" zoneName
c_tzset
loadSystemTZ zoneName
mkLocal :: Integer -> Int -> Int -> Int -> Int -> Pico -> LocalTime
mkLocal y m d hh mm ss
= LocalTime (fromGregorian y m d) (TimeOfDay hh mm ss)
mkUTC :: Integer -> Int -> Int -> Int -> Int -> Pico -> UTCTime
mkUTC y m d hh mm ss
= UTCTime (fromGregorian y m d) (timeOfDayToTime $ TimeOfDay hh mm ss)
utcToLocalTimeIO :: UTCTime -> IO LocalTime
utcToLocalTimeIO ut = do
tz <- getTimeZone ut
return $ utcToLocalTime tz ut
utcToLocalNano :: TZ -> Int64 -> Int64
{-# INLINE utcToLocalNano #-}
utcToLocalNano tz t = t + 1000000000 * fromIntegral diff
where
diff = diffForPOSIX tz (t `div` 1000000000)
tzBenchmarks :: TZ -> TimeZoneSeries -> [Benchmark]
tzBenchmarks tz series = [
bgroup "rawDiff" [
bench "past" $ whnf (diffForPOSIX tz) (-10000000000), -- Way back in the past
bench "epoch" $ whnf (diffForPOSIX tz) 0,
bench "now" $ whnf (diffForPOSIX tz) 1395572400,
bench "future" $ whnf (diffForPOSIX tz) 10000000000 -- Way in the future
],
bgroup "utcToLocalNano" [
bench "past" $ whnf (utcToLocalNano tz) (-4000000000000000000),
bench "epoch" $ whnf (utcToLocalNano tz) 0,
bench "now" $ whnf (utcToLocalNano tz) 1395572400000000000,
bench "future" $ whnf (utcToLocalNano tz) 4000000000000000000
],
bgroup "rawDiffUTC" [
bench "now" $ whnf (diffForPOSIX utcTZ) 1395572400
],
bgroup "basicUTCToLocalTime" [
bench "past" $ nf (utcToLocalTime cetTZ) ut0,
bench "now" $ nf (utcToLocalTime cetTZ) ut1,
bench "future" $ nf (utcToLocalTime cetTZ) ut2
],
bgroup "fullUTCToLocalTime" [
bench "past" $ nfIO (utcToLocalTimeIO ut0),
bench "now" $ nfIO (utcToLocalTimeIO ut1),
bench "future" $ nfIO (utcToLocalTimeIO ut2)
],
bgroup "utcToLocalTimeTZ" [
bench "past" $ nf (utcToLocalTimeTZ tz) ut0,
bench "now" $ nf (utcToLocalTimeTZ tz) ut1,
bench "future" $ nf (utcToLocalTimeTZ tz) ut2
],
bgroup "utcToLocalTimeSeries" [
bench "past" $ nf (utcToLocalTime' series) ut0,
bench "now" $ nf (utcToLocalTime' series) ut1,
bench "future" $ nf (utcToLocalTime' series) ut2
],
bgroup "timeZoneForPOSIX" [
bench "past" $ nf (timeZoneForPOSIX tz) (-10000000000),
bench "now" $ nf (timeZoneForPOSIX tz) 1395572400,
bench "future" $ nf (timeZoneForPOSIX tz) 10000000000
],
bgroup "timeZoneForUTCTime" [
bench "past" $ nf (timeZoneForUTCTime tz) ut0,
bench "now" $ nf (timeZoneForUTCTime tz) ut1,
bench "future" $ nf (timeZoneForUTCTime tz) ut2
],
bgroup "timeZoneFromSeries" [
bench "past" $ nf (timeZoneFromSeries series) ut0,
bench "now" $ nf (timeZoneFromSeries series) ut1,
bench "future" $ nf (timeZoneFromSeries series) ut2
],
bgroup "localToPOSIX" [
bench "past" $ whnf (localToPOSIX tz) (-10000000000),
bench "now" $ whnf (localToPOSIX tz) 1396142115,
bench "future" $ whnf (localToPOSIX tz) 10000000000
],
bgroup "basicLocalTimeToUTC" [
bench "past" $ nf (localTimeToUTC cetTZ) lt0,
bench "now" $ nf (localTimeToUTC cetTZ) lt1,
bench "future" $ nf (localTimeToUTC cetTZ) lt2
],
bgroup "localTimeToUTCTZ" [
bench "past" $ nf (localTimeToUTCTZ tz) lt0,
bench "now" $ nf (localTimeToUTCTZ tz) lt1,
bench "future" $ nf (localTimeToUTCTZ tz) lt2
],
bgroup "localTimeToUTCSeries" [
bench "past" $ nf (localTimeToUTC' series) lt0,
bench "now" $ nf (localTimeToUTC' series) lt1,
bench "future" $ nf (localTimeToUTC' series) lt2
]
]
where
cetTZ = TimeZone 60 False "CET"
ut0 = mkUTC 1500 07 07 07 07 07
ut1 = mkUTC 2014 03 23 15 15 15
ut2 = mkUTC 2222 10 10 10 10 10
lt0 = mkLocal 1500 07 07 07 07 07
lt1 = mkLocal 2014 03 30 03 15 15
lt2 = mkLocal 2222 10 10 10 10 10
main :: IO ()
main = do
tzBudapest <- setupTZ "Europe/Budapest"
seriesBudapest <- getTimeZoneSeriesFromOlsonFile "/usr/share/zoneinfo/Europe/Budapest"
defaultMain $ tzBenchmarks tzBudapest seriesBudapest
|
nilcons/haskell-tz
|
benchmarks/benchTZ.hs
|
apache-2.0
| 4,379 | 0 | 11 | 951 | 1,563 | 769 | 794 | 99 | 1 |
module Test.Kibr.Css where
import Preamble
import Language.CSS.YUI (pxToPercent)
import Test.Framework (Test)
import Test.Framework.Providers.HUnit (testCase)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.Framework.TH (testGroupGenerator)
import Test.HUnit (Assertion, (@?=))
import qualified Data.Text.Lazy as LT
tests :: Test
tests = $testGroupGenerator
case_pxToPercent_computed :: Assertion
case_pxToPercent_computed =
do
pxToPercent 9 @?= "69.2%"
pxToPercent 28 @?= "215.4%"
prop_pxToPercent_starts_with_digit :: Int -> Bool
prop_pxToPercent_starts_with_digit px
| px >= 0 = isDigit . LT.head . pxToPercent $ px
| otherwise = LT.head (pxToPercent px) == '-'
prop_pxToPercent_ends_in_percent_sign :: Int -> Bool
prop_pxToPercent_ends_in_percent_sign px = LT.last (pxToPercent px) == '%'
prop_pxtoPercent_is_rounded :: Int -> Bool
prop_pxtoPercent_is_rounded px =
case LT.split (== '.') (pxToPercent px) of
[_, d] -> LT.length d == 2
[_] -> True
_ -> False
|
dag/kibr
|
src/Test/Kibr/Css.hs
|
bsd-2-clause
| 1,125 | 0 | 10 | 257 | 302 | 166 | 136 | -1 | -1 |
module Data.Drasil.IdeaDicts where
import Language.Drasil.Chunk.NamedIdea (IdeaDict, mkIdea)
import Language.Drasil.Chunk.CommonIdea (CI, commonIdeaWithDict)
import Language.Drasil.NounPhrase (cn')
compScience, softEng, mathematics, progLanguage, idglass, physics, civilEng
, materialEng, documentc, knowledgemng :: IdeaDict
-------------------------------------------------------------------------------
-- IdeaDict | | id | term | abbreviation
-------------------------------------------------------------------------------
compScience = mkIdea "compScience" (cn' "Computer Science") (Just "CS")
softEng = mkIdea "softEng" (cn' "Software Engineering") (Just "SE")
mathematics = mkIdea "mathematics" (cn' "Mathematics") Nothing
progLanguage = mkIdea "progLanguage" (cn' "Programming Language") Nothing
idglass = mkIdea "glass" (cn' "Glass") Nothing
physics = mkIdea "physics" (cn' "Physics") Nothing
civilEng = mkIdea "civilEng" (cn' "Civil Engineering") Nothing
materialEng = mkIdea "materialEng" (cn' "Material Engineering") Nothing
documentc = mkIdea "documentc" (cn' "Document") (Just "Doc")
knowledgemng = mkIdea "knowledgemng" (cn' "Knowledge Management") Nothing
dataDefn, genDefn, inModel, thModel :: CI
dataDefn = commonIdeaWithDict "dataDefn" (cn' "data definition") "DD" [softEng]
genDefn = commonIdeaWithDict "genDefn" (cn' "general definition") "GD" [softEng]
inModel = commonIdeaWithDict "inModel" (cn' "instance model") "IM" [softEng]
thModel = commonIdeaWithDict "thModel" (cn' "theoretical model") "TM" [softEng]
|
JacquesCarette/literate-scientific-software
|
code/drasil-lang/Data/Drasil/IdeaDicts.hs
|
bsd-2-clause
| 1,747 | 0 | 7 | 375 | 375 | 212 | 163 | 21 | 1 |
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# OPTIONS_GHC -fno-cse #-}
{-# OPTIONS_GHC -fno-full-laziness #-}
{-# OPTIONS_GHC -fno-float-in #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
module Concurrent.Capability.Pinned
( Pinned(..)
, runPinned
, ReifiesCapability(..)
) where
import Concurrent.Thread
import Control.Applicative
import Control.Monad
import Control.Monad.Catch
import Control.Monad.Primitive
import Data.Tagged
import Unsafe.Coerce
-- This computation is pinned to a current thread.
newtype Pinned s a = Pinned { unpinned :: IO a }
deriving (Functor,Applicative,Monad,Alternative,MonadPlus,MonadThrow,MonadCatch,MonadMask)
instance PrimMonad (Pinned s) where
type PrimState (Pinned s) = RealWorld
primitive m = Pinned (primitive m)
{-# INLINE primitive #-}
instance PrimBase (Pinned s) where
internal (Pinned m) = internal m
{-# INLINE internal #-}
class ReifiesCapability s where
reflectCapability :: Tagged s Int
instance ReifiesCapability s => ReifiesCapability (Pinned s) where
reflectCapability = retag (reflectCapability :: Tagged s Int)
reifyCapability :: forall r. (forall (s :: *). ReifiesCapability s => Pinned s r) -> Int -> IO r
reifyCapability k = unsafeCoerce (Magic k :: Magic r)
newtype Magic r = Magic (forall (s :: *). ReifiesCapability s => Pinned s r)
runPinned :: (forall (s :: *). ReifiesCapability s => Pinned s a) -> IO a
runPinned m = withCapability (currentCapability >>= reifyCapability m)
{-# INLINE runPinned #-}
|
ekmett/concurrent
|
src/Concurrent/Capability/Pinned.hs
|
bsd-2-clause
| 1,647 | 0 | 10 | 251 | 430 | 238 | 192 | 40 | 1 |
module Drasil.SSP.BasicExprs where
import Prelude hiding (sin, cos, tan)
import Language.Drasil
import Drasil.SSP.Unitals (baseAngle, baseWthX, earthqkLoadFctr, fs,
impLoadAngle, intNormForce, inxi, inxiM1, midpntHght, mobShrC, shearFNoIntsl,
shearRNoIntsl, shrResC, slcWght, sliceHght, sliceHghtW, surfAngle,
surfHydroForce, surfLoad, watrForce)
eqlExpr :: (Expr -> Expr) -> (Expr -> Expr) -> (Expr -> Expr -> Expr) -> Expr
eqlExpr f1_ f2_ _e_ = (inxi slcWght `_e_`
(inxi surfHydroForce * cos (inxi surfAngle)) +
inxi surfLoad * cos (inxi impLoadAngle)) * f1_ (inxi baseAngle) -
(negate (sy earthqkLoadFctr) * inxi slcWght - inxi intNormForce +
inxiM1 intNormForce - inxi watrForce + inxiM1 watrForce +
inxi surfHydroForce * sin (inxi surfAngle) +
inxi surfLoad * sin (inxi impLoadAngle)) * f2_ (inxi baseAngle)
eqlExprN :: (Expr -> Expr) -> (Expr -> Expr) -> (Expr -> Expr -> Expr) -> Expr
eqlExprN f1_ f2_ _e_ = (inxi slcWght `_e_`
(inxi surfHydroForce * cos (inxi surfAngle)) +
inxi surfLoad * cos (inxi impLoadAngle)) * f1_ (inxi baseAngle) +
(negate (sy earthqkLoadFctr) * inxi slcWght - inxi intNormForce +
inxiM1 intNormForce - inxi watrForce + inxiM1 watrForce +
inxi surfHydroForce * sin (inxi surfAngle) +
inxi surfLoad * sin (inxi impLoadAngle)) * f2_ (inxi baseAngle)
eqlExprSepG :: (Expr -> Expr) -> (Expr -> Expr) -> (Expr -> Expr -> Expr) -> Expr
eqlExprSepG f1_ f2_ _e_ = (inxi slcWght `_e_`
(inxi surfHydroForce * cos (inxi surfAngle)) +
inxi surfLoad * cos (inxi impLoadAngle)) * f1_ (inxi baseAngle) -
(negate (sy earthqkLoadFctr) * inxi slcWght - inxi watrForce + inxiM1 watrForce +
inxi surfHydroForce * sin (inxi surfAngle) +
inxi surfLoad * sin (inxi impLoadAngle)) * f2_ (inxi baseAngle) -
(- (inxi intNormForce) + inxiM1 intNormForce) * f2_ (inxi baseAngle)
eqlExprNSepG :: (Expr -> Expr) -> (Expr -> Expr) -> (Expr -> Expr -> Expr) -> Expr
eqlExprNSepG f1_ f2_ _e_ = (inxi slcWght `_e_`
(inxi surfHydroForce * cos (inxi surfAngle)) +
inxi surfLoad * cos (inxi impLoadAngle)) * f1_ (inxi baseAngle) +
(negate (sy earthqkLoadFctr) * inxi slcWght - inxi watrForce + inxiM1 watrForce +
inxi surfHydroForce * sin (inxi surfAngle) +
inxi surfLoad * sin (inxi impLoadAngle)) * f2_ (inxi baseAngle) +
(- (inxi intNormForce) + inxiM1 intNormForce) * f2_ (inxi baseAngle)
eqlExprNoKQ :: (Expr -> Expr) -> (Expr -> Expr) -> (Expr -> Expr -> Expr) -> Expr
eqlExprNoKQ f1_ f2_ _e_ = (inxi slcWght `_e_`
(inxi surfHydroForce * cos (inxi surfAngle))) * f1_ (inxi baseAngle) -
(- (inxi watrForce) + inxiM1 watrForce +
inxi surfHydroForce * sin (inxi surfAngle)) * f2_ (inxi baseAngle) -
(- (inxi intNormForce) + inxiM1 intNormForce) * f2_ (inxi baseAngle)
eqlExprNNoKQ :: (Expr -> Expr) -> (Expr -> Expr) -> (Expr -> Expr -> Expr) -> Expr
eqlExprNNoKQ f1_ f2_ _e_ = (inxi slcWght `_e_`
(inxi surfHydroForce * cos (inxi surfAngle))) * f1_ (inxi baseAngle) +
(- (inxi watrForce) + inxiM1 watrForce +
inxi surfHydroForce * sin (inxi surfAngle)) * f2_ (inxi baseAngle) +
(- (inxi intNormForce) + inxiM1 intNormForce) * f2_ (inxi baseAngle)
sliceExpr :: Integer -> Expr
sliceExpr n = idx (sy intNormForce) (int n) * idx (sy shrResC) (int n) $=
idx (sy mobShrC) (int (n-1)) * idx (sy intNormForce) (int (n-1)) *
idx (sy shrResC) (int (n-1)) + sy fs * idx (sy shearFNoIntsl) (int n) -
idx (sy shearRNoIntsl) (int n)
momExpr :: (Expr -> Expr -> Expr) -> Expr
momExpr _e_ = (negate (inxi intNormForce) * (inxi sliceHght +
inxi baseWthX / 2 * tan (inxi baseAngle)) + inxiM1 intNormForce *
(inxiM1 sliceHght - inxi baseWthX / 2 * tan (inxi baseAngle)) -
inxi watrForce * ((1/3) * inxi sliceHghtW + inxi baseWthX / 2 *
tan (inxi baseAngle)) + inxiM1 watrForce * ((1/3) * inxiM1 sliceHghtW -
inxi baseWthX / 2 * tan (inxi baseAngle))) `_e_`
(negate (sy earthqkLoadFctr) * inxi slcWght * inxi midpntHght / 2 +
inxi surfHydroForce * sin (inxi surfAngle) * inxi midpntHght +
inxi surfLoad * sin (inxi impLoadAngle) * inxi midpntHght)
momExprNoKQ :: (Expr -> Expr -> Expr) -> Expr
momExprNoKQ _e_ = (negate (inxi intNormForce) * (inxi sliceHght +
inxi baseWthX / 2 * tan (inxi baseAngle)) + inxiM1 intNormForce *
(inxiM1 sliceHght - inxi baseWthX / 2 * tan (inxi baseAngle)) -
inxi watrForce * ((1/3) * inxi sliceHghtW + inxi baseWthX / 2 *
tan (inxi baseAngle)) + inxiM1 watrForce * ((1/3) * inxiM1 sliceHghtW -
inxi baseWthX / 2 * tan (inxi baseAngle))) `_e_`
(inxi surfHydroForce * sin (inxi surfAngle) * inxi midpntHght)
|
JacquesCarette/literate-scientific-software
|
code/drasil-example/Drasil/SSP/BasicExprs.hs
|
bsd-2-clause
| 4,542 | 0 | 19 | 856 | 2,149 | 1,063 | 1,086 | 74 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module Atomo.Kernel.Message (load) where
import Atomo
import Atomo.Valuable
load :: VM ()
load = do
[p|(m: Message) type|] =: do
Message m <- here "m" >>= findMessage
case m of
Single {} -> return (particle "single")
Keyword {} -> return (particle "keyword")
[p|(m: Message) dispatch|] =:
here "m" >>= findMessage >>= dispatch . fromMessage
[p|(m: Message) particle|] =: do
Message m <- here "m" >>= findMessage
case m of
Single { mName = n } -> return (particle n)
Keyword { mNames = ns } -> return (keyParticle ns (replicate (length ns + 1) Nothing))
[p|(m: Message) target|] =: do
Message (Single { mTarget = t }) <- here "m" >>= findMessage
return t
[p|(m: Message) targets|] =: do
Message (Keyword { mTargets = ts }) <- here "m" >>= findMessage
return $ list ts
[p|(m: Message) optionals|] =: do
Message m <- here "m" >>= findMessage
liftM list $
mapM (\(Option _ n v) -> toValue (particle n, v)) (mOptionals m)
|
vito/atomo
|
src/Atomo/Kernel/Message.hs
|
bsd-3-clause
| 1,126 | 0 | 20 | 346 | 413 | 209 | 204 | 28 | 3 |
-- This is UnescapePure version as proposed by Bodigrim in
-- https://github.com/haskell/aeson/pull/894
--
-- WARNING: This file is security sensitive as it uses unsafeWrite which does
-- not check bounds. Any changes should be made with care and we would love to
-- get informed about them, just cc us in any PR targetting this file: @eskimor @jprider63
-- We would be happy to review the changes!
-- The security check at the end (pos > length) only works if pos grows
-- monotonously, if this condition does not hold, the check is flawed.
{-# LANGUAGE CPP #-}
module UnescapePureText2
(
unescapeText
) where
import Control.Exception (evaluate, throw, try)
import Control.Monad (when)
import Data.ByteString as B
import Data.Text (Text)
import qualified Data.Text.Array as A
import Data.Text.Encoding.Error (UnicodeException (..))
import Data.Text.Internal.Private (runText)
import Data.Text.Unsafe (unsafeDupablePerformIO)
import Data.Word (Word8, Word16, Word32)
import GHC.ST (ST)
#if MIN_VERSION_text(2,0,0)
import Data.Bits (Bits, shiftL, (.&.), (.|.))
import Data.Text.Internal.Encoding.Utf16 (chr2)
import Data.Text.Internal.Unsafe.Char (unsafeChr16, unsafeChr32, unsafeWrite)
#else
import Data.Bits (Bits, shiftL, shiftR, (.&.), (.|.))
#endif
-- Different UTF states.
data Utf =
UtfGround
| UtfTail1
| UtfU32e0
| UtfTail2
| UtfU32ed
| Utf843f0
| UtfTail3
| Utf843f4
deriving (Eq)
data State =
StateNone
| StateUtf !Utf !Word32
| StateBackslash
| StateU0
| StateU1 !Word16
| StateU2 !Word16
| StateU3 !Word16
| StateS0 !Word16
| StateS1 !Word16
| StateSU0 !Word16
| StateSU1 !Word16 !Word16
| StateSU2 !Word16 !Word16
| StateSU3 !Word16 !Word16
deriving (Eq)
-- References:
-- http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
-- https://github.com/jwilm/vte/blob/master/utf8parse/src/table.rs.in
setByte1 :: (Num a, Bits b, Bits a, Integral b) => a -> b -> a
setByte1 point word = point .|. fromIntegral (word .&. 0x3f)
{-# INLINE setByte1 #-}
setByte2 :: (Num a, Bits b, Bits a, Integral b) => a -> b -> a
setByte2 point word = point .|. (fromIntegral (word .&. 0x3f) `shiftL` 6)
{-# INLINE setByte2 #-}
setByte2Top :: (Num a, Bits b, Bits a, Integral b) => a -> b -> a
setByte2Top point word = point .|. (fromIntegral (word .&. 0x1f) `shiftL` 6)
{-# INLINE setByte2Top #-}
setByte3 :: (Num a, Bits b, Bits a, Integral b) => a -> b -> a
setByte3 point word = point .|. (fromIntegral (word .&. 0x3f) `shiftL` 12)
{-# INLINE setByte3 #-}
setByte3Top :: (Num a, Bits b, Bits a, Integral b) => a -> b -> a
setByte3Top point word = point .|. (fromIntegral (word .&. 0xf) `shiftL` 12)
{-# INLINE setByte3Top #-}
setByte4 :: (Num a, Bits b, Bits a, Integral b) => a -> b -> a
setByte4 point word = point .|. (fromIntegral (word .&. 0x7) `shiftL` 18)
{-# INLINE setByte4 #-}
decode :: Utf -> Word32 -> Word8 -> (Utf, Word32)
decode UtfGround point word = case word of
w | 0x00 <= w && w <= 0x7f -> (UtfGround, fromIntegral word)
w | 0xc2 <= w && w <= 0xdf -> (UtfTail1, setByte2Top point word)
0xe0 -> (UtfU32e0, setByte3Top point word)
w | 0xe1 <= w && w <= 0xec -> (UtfTail2, setByte3Top point word)
0xed -> (UtfU32ed, setByte3Top point word)
w | 0xee <= w && w <= 0xef -> (UtfTail2, setByte3Top point word)
0xf0 -> (Utf843f0, setByte4 point word)
w | 0xf1 <= w && w <= 0xf3 -> (UtfTail3, setByte4 point word)
0xf4 -> (Utf843f4, setByte4 point word)
_ -> throwDecodeError
decode UtfU32e0 point word = case word of
w | 0xa0 <= w && w <= 0xbf -> (UtfTail1, setByte2 point word)
_ -> throwDecodeError
decode UtfU32ed point word = case word of
w | 0x80 <= w && w <= 0x9f -> (UtfTail1, setByte2 point word)
_ -> throwDecodeError
decode Utf843f0 point word = case word of
w | 0x90 <= w && w <= 0xbf -> (UtfTail2, setByte3 point word)
_ -> throwDecodeError
decode Utf843f4 point word = case word of
w | 0x80 <= w && w <= 0x8f -> (UtfTail2, setByte3 point word)
_ -> throwDecodeError
decode UtfTail3 point word = case word of
w | 0x80 <= w && w <= 0xbf -> (UtfTail2, setByte3 point word)
_ -> throwDecodeError
decode UtfTail2 point word = case word of
w | 0x80 <= w && w <= 0xbf -> (UtfTail1, setByte2 point word)
_ -> throwDecodeError
decode UtfTail1 point word = case word of
w | 0x80 <= w && w <= 0xbf -> (UtfGround, setByte1 point word)
_ -> throwDecodeError
decodeHex :: Word8 -> Word16
decodeHex x
| 48 <= x && x <= 57 = fromIntegral x - 48 -- 0-9
| 65 <= x && x <= 70 = fromIntegral x - 55 -- A-F
| 97 <= x && x <= 102 = fromIntegral x - 87 -- a-f
| otherwise = throwDecodeError
unescapeText' :: ByteString -> Text
unescapeText' bs = runText $ \done -> do
dest <- A.new len
(pos, finalState) <- loop dest (0, StateNone) 0
-- Check final state. Currently pos gets only increased over time, so this check should catch overflows.
when ( finalState /= StateNone || pos > len)
throwDecodeError
done dest pos -- TODO: pos, pos-1??? XXX
where
len = B.length bs
runUtf dest pos st point c = case decode st point c of
(UtfGround, 92) -> -- Backslash
return (pos, StateBackslash)
#if MIN_VERSION_text(2,0,0)
(UtfGround, w) -> do
d <- unsafeWrite dest pos (unsafeChr32 w)
return (pos + d, StateNone)
#else
(UtfGround, w) | w <= 0xffff ->
writeAndReturn dest pos (fromIntegral w) StateNone
(UtfGround, w) -> do
A.unsafeWrite dest pos (0xd7c0 + fromIntegral (w `shiftR` 10))
writeAndReturn dest (pos + 1) (0xdc00 + fromIntegral (w .&. 0x3ff)) StateNone
#endif
(st', p) ->
return (pos, StateUtf st' p)
loop :: A.MArray s -> (Int, State) -> Int -> ST s (Int, State)
loop _ ps i | i >= len = return ps
loop dest ps i = do
let c = B.index bs i -- JP: We can use unsafe index once we prove bounds with Liquid Haskell.
ps' <- f dest ps c
loop dest ps' $ i+1
-- No pending state.
f dest (pos, StateNone) c = runUtf dest pos UtfGround 0 c
-- In the middle of parsing a UTF string.
f dest (pos, StateUtf st point) c = runUtf dest pos st point c
-- In the middle of escaping a backslash.
f dest (pos, StateBackslash) 34 = writeAndReturn dest pos 34 StateNone -- "
f dest (pos, StateBackslash) 92 = writeAndReturn dest pos 92 StateNone -- Backslash
f dest (pos, StateBackslash) 47 = writeAndReturn dest pos 47 StateNone -- /
f dest (pos, StateBackslash) 98 = writeAndReturn dest pos 8 StateNone -- b
f dest (pos, StateBackslash) 102 = writeAndReturn dest pos 12 StateNone -- f
f dest (pos, StateBackslash) 110 = writeAndReturn dest pos 10 StateNone -- n
f dest (pos, StateBackslash) 114 = writeAndReturn dest pos 13 StateNone -- r
f dest (pos, StateBackslash) 116 = writeAndReturn dest pos 9 StateNone -- t
f _ (pos, StateBackslash) 117 = return (pos, StateU0) -- u
f _ ( _, StateBackslash) _ = throwDecodeError
-- Processing '\u'.
f _ (pos, StateU0) c =
let w = decodeHex c in
return (pos, StateU1 (w `shiftL` 12))
f _ (pos, StateU1 w') c =
let w = decodeHex c in
return (pos, StateU2 (w' .|. (w `shiftL` 8)))
f _ (pos, StateU2 w') c =
let w = decodeHex c in
return (pos, StateU3 (w' .|. (w `shiftL` 4)))
f dest (pos, StateU3 w') c =
let w = decodeHex c in
let u = w' .|. w in
-- Get next state based on surrogates.
if u >= 0xd800 && u <= 0xdbff then -- High surrogate.
return (pos, StateS0 u)
else if u >= 0xdc00 && u <= 0xdfff then -- Low surrogate.
throwDecodeError
else do
#if MIN_VERSION_text(2,0,0)
d <- unsafeWrite dest pos (unsafeChr16 u)
return (pos + d, StateNone)
#else
writeAndReturn dest pos u StateNone
#endif
-- Handle surrogates.
f _ (pos, StateS0 hi) 92 = return (pos, StateS1 hi) -- Backslash
f _ ( _, StateS0{}) _ = throwDecodeError
f _ (pos, StateS1 hi) 117 = return (pos, StateSU0 hi) -- u
f _ ( _, StateS1{}) _ = throwDecodeError
f _ (pos, StateSU0 hi) c =
let w = decodeHex c in
return (pos, StateSU1 hi (w `shiftL` 12))
f _ (pos, StateSU1 hi w') c =
let w = decodeHex c in
return (pos, StateSU2 hi (w' .|. (w `shiftL` 8)))
f _ (pos, StateSU2 hi w') c =
let w = decodeHex c in
return (pos, StateSU3 hi (w' .|. (w `shiftL` 4)))
f dest (pos, StateSU3 hi w') c =
let w = decodeHex c in
let u = w' .|. w in
-- Check if not low surrogate.
if u < 0xdc00 || u > 0xdfff then
throwDecodeError
else do
#if MIN_VERSION_text(2,0,0)
d <- unsafeWrite dest pos (chr2 hi u)
return (pos + d, StateNone)
#else
A.unsafeWrite dest pos hi
writeAndReturn dest (pos + 1) u StateNone
#endif
#if MIN_VERSION_text(2,0,0)
writeAndReturn :: A.MArray s -> Int -> Word8 -> t -> ST s (Int, t)
#else
writeAndReturn :: A.MArray s -> Int -> Word16 -> t -> ST s (Int, t)
#endif
writeAndReturn dest pos char res = do
A.unsafeWrite dest pos char
return (pos + 1, res)
{-# INLINE writeAndReturn #-}
throwDecodeError :: a
throwDecodeError =
let desc = "Data.Text.Internal.Encoding.decodeUtf8: Invalid UTF-8 stream" in
throw (DecodeError desc Nothing)
unescapeText :: ByteString -> Either UnicodeException Text
unescapeText = unsafeDupablePerformIO . try . evaluate . unescapeText'
|
dmjio/aeson
|
benchmarks/bench/UnescapePureText2.hs
|
bsd-3-clause
| 10,014 | 0 | 18 | 2,840 | 3,138 | 1,658 | 1,480 | 211 | 31 |
module Packet ( command
, packet
, readPacket
, parseControlPacket
) where
import Data.ByteString.Builder as BS
import Data.ByteString.Lazy (toStrict)
import Data.ByteString as BS
import Data.ByteString.Char8 as BSC (unpack)
import Data.Word
import Data.Monoid ((<>))
import Network.Simple.TCP
import Control.Monad (when)
command :: Word8 -> Word8 -> String -> ByteString
command input output flags =
toStrict . toLazyByteString $ BS.word8 0
<> BS.word16BE (fromIntegral $ Prelude.length flags + 2)
<> BS.word8 input
<> BS.word8 output
<> BS.stringUtf8 flags
packet :: Word8 -> ByteString -> ByteString
packet ch bs = toStrict . toLazyByteString
$ BS.word8 ch
<> BS.word16BE (fromIntegral $ BS.length bs)
<> BS.byteString bs
readPacket :: Socket -> IO (Maybe (Word8, ByteString))
readPacket s = do bs <- read s 3
case bs of
Nothing -> return Nothing
Just header -> do dat <- read s len
case dat of
Nothing -> return Nothing
Just dt -> return $ Just (index, dt)
where [index, l1, l2] = BS.unpack header
len = fromIntegral l1 * 256 + fromIntegral l2
where read :: Socket -> Int -> IO (Maybe ByteString)
read s 0 = return $ Just BS.empty
read s n = do mbs <- recv s n
case mbs of
Nothing -> return Nothing
Just bs -> if n == BS.length bs then
return $ Just bs
else do rem <- read s (n - BS.length bs)
return $ Just bs <> rem
parseControlPacket :: ByteString -> (Word8, Word8, String)
parseControlPacket bs = (input, output, flags)
where (h, rem) = BS.splitAt 2 bs
[input, output] = BS.unpack h
flags = BSC.unpack rem
|
amir-sabbaghi/encoder
|
src/Packet.hs
|
bsd-3-clause
| 2,187 | 0 | 20 | 931 | 645 | 331 | 314 | 48 | 6 |
{-# LANGUAGE FlexibleInstances
, BangPatterns
, MagicHash
, ScopedTypeVariables
, TypeFamilies
, UndecidableInstances
, OverlappingInstances
, DeriveDataTypeable
, MultiParamTypeClasses
, NamedFieldPuns
#-}
-- State monad transformer is needed for both step & graph:
#ifndef MODNAME
#define MODNAME Intel.Cnc6
#endif
#define CNC_SCHEDULER 6
#define STEPLIFT S.lift$
#define GRAPHLIFT S.lift$
#define SUPPRESS_runGraph
#define DEFINED_free_items
#include "Cnc.Header.hs"
------------------------------------------------------------
-- Version 6: Blocking with replacement.
#include "shared_5_6.hs"
-- When a thread goes down (blocks waiting on data) this version
-- regenerates a new thread to replace it. The thread that went down
-- is mortal when it wakes up. It will finish what it was doing and
-- then die.
-- This version is correct but sometimes inefficient. It can have
-- threads terminate prematurely when the program enters a serial
-- bottleneck.
-- Then at finalize time we set up the workers and run them.
finalize finalAction =
do joiner <- GRAPHLIFT newChan
(HiddenState5 { stack, mortal }) <- S.get
let worker id =
do x <- STEPLIFT tryPop stack
case x of
Nothing -> STEPLIFT writeChan joiner id
Just action ->
do action
myId <- STEPLIFT myThreadId
set <- STEPLIFT readHotVar mortal
if Set.notMember myId set
then worker id -- keep going
else STEPLIFT writeChan joiner id
ver5_6_core_finalize joiner finalAction worker True numCapabilities (\_ -> return ())
get col tag =
do (HiddenState5 { stack, mortal }) <- S.get
let io = do myId <- myThreadId
modifyHotVar_ mortal (Set.insert myId)
ver5_6_core_get io col tag
quiescence_support = True
|
rrnewton/Haskell-CnC
|
Intel/Cnc6.hs
|
bsd-3-clause
| 1,815 | 11 | 16 | 404 | 284 | 145 | 139 | 31 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Concurrent.Async
import Control.Concurrent
import Control.Monad
import Data.List
import Data.Maybe
import System.Environment
import System.Exit
import System.Console.GetOpt
import Data.ByteString.Char8 (unpack)
import Database.Memcache.Protocol
import Database.Memcache.Server
import Network.Socket (PortNumber)
data Operation = GET | STAT | NOOP deriving (Show, Eq)
data Options = Options { qps :: Int
, quotum :: Int
, server :: String
, port :: Int
, time :: Int
, op :: Operation
, newConn :: Bool
} deriving (Show, Eq)
defaultOptions :: Options
defaultOptions = Options { qps = 1000
, quotum = 1
, server = "localhost"
, port = 11211
, time = 5
, op = GET
, newConn = False
}
options :: [OptDescr (Options -> Options)]
options =
[ Option ['q'] ["qps"] (ReqArg (\q o -> o { qps = read q}) "QPS")
"operations per quotum"
, Option ['Q'] ["quotum"] (ReqArg (\q o -> o { quotum = read q}) "SECONDS")
"length of a quotum in seconds"
, Option ['s'] ["server"] (ReqArg (\s o -> o { server = s }) "SERVER")
"server to connect to"
, Option ['p'] ["port"] (ReqArg (\p o -> o { port = read p }) "PORT")
"port to connect to server on"
, Option ['t'] ["time"] (ReqArg (\t o -> o { time = read t }) "TIME")
"time to generate requests for"
, Option [] ["stats"] (NoArg $ \o -> o { op = STAT })
"generate stat calls"
, Option [] ["get"] (NoArg $ \o -> o { op = GET})
"generate get calls"
, Option [] ["noop"] (NoArg $ \o -> o { op = NOOP })
"no operation, just generate connections"
, Option [] ["new-conns"] (NoArg $ \o -> o { newConn = True })
"use new connection for each requset"
]
parseArguments :: IO Options
parseArguments = do
args <- getArgs
case getOpt Permute options args of
(o, _, []) -> return $ foldl' (flip ($)) defaultOptions o
(_, _, errs) -> do
when (not $ null errs) $ do
putStr $ "Error: " ++ head errs
forM_ (tail errs) $ \e ->
putStr $ " " ++ e
putStrLn ""
putStrLn $ usageInfo header options
exitWith $ ExitFailure 1
where
header = "Usage: stats [OPTIONS]"
main :: IO ()
main = do
opts <- parseArguments
when (time opts < 1) $ error "Incorrect time value!"
when (qps opts < 1) $ error "Incorrect qps value!"
when (quotum opts < 1) $ error "Incorrect quotum value!"
n <- getNumCapabilities
putStrLn $ "Running on " ++ show n ++ " cores"
putStrLn $ "Connecting to server: " ++ server opts ++ ":" ++ show (port opts)
putStrLn "--------"
let step = (quotum opts * 1000000) `quot` qps opts
events = qps opts * time opts
-- global connection
mc <- newMemcacheClient (server opts) (toEnum $ port opts)
-- spawn all triggers with a delay to let scheduler handle
children <- forM [0..(events - 1)] $ \_ -> do
-- create new connection each request
a <- async $ do
mc <- case newConn opts of
True -> newMemcacheClient (server opts) (toEnum $ port opts)
False -> return mc
case op opts of
NOOP -> return ()
GET -> void $ get mc "k"
STAT -> void $ stats mc Nothing
when (newConn opts) $ void $ quit mc
threadDelay step
return a
quit mc
-- wait on them all.
forM_ children wait
|
dterei/memcache-hs
|
tools/OpGen.hs
|
bsd-3-clause
| 3,913 | 0 | 23 | 1,436 | 1,226 | 646 | 580 | 92 | 4 |
{-# OPTIONS_GHC -Wno-redundant-constraints #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RoleAnnotations #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module Data.Diverse.Many.Internal (
-- * 'Many' type
Many(..) -- Exporting constructor unsafely!
-- * Isomorphism
, IsMany(..)
, fromMany'
, toMany'
-- * Construction
, nil
, single
, consMany
, (./)
, snocMany
, snocMany'
, (\.)
, append
-- , CanAppendUnique(..)
, (/./)
-- * Simple queries
, viewf
, viewb
, front
, back
, aft
, fore
-- * Single field
-- ** Getter for single field
, grab
, grabL
, grabTag
, grabN
-- ** Setter for single field
, replace
, replace'
, replaceL
, replaceL'
, replaceTag
, replaceTag'
, replaceN
, replaceN'
-- * Multiple fields
-- ** Getter for multiple fields
, Select
, select
, selectL
, SelectN
, selectN
-- ** Setter for multiple fields
, Amend
, amend
, Amend'
, amend'
, amendL
, amendL'
, AmendN
, amendN
, AmendN'
, amendN'
-- * Destruction
-- ** By type
, Collect
, Collector
, forMany
, collect
-- ** By Nat index offset
, CollectN
, CollectorN
, forManyN
, collectN
) where
import Control.Applicative
import Control.DeepSeq
import Data.Bool
import Data.Diverse.AFoldable
import Data.Diverse.AFunctor
import Data.Diverse.ATraversable
import Data.Diverse.Case
import Data.Diverse.Reiterate
import Data.Diverse.TypeLevel
import Data.Foldable
import Data.Kind
import Data.Proxy
import Data.Semigroup (Semigroup(..))
import qualified Data.Sequence as S
import Data.Tagged
import GHC.Exts (Any, coerce)
import qualified GHC.Generics as G
import GHC.TypeLits
import Text.ParserCombinators.ReadPrec
import Text.Read
import qualified Text.Read.Lex as L
import Unsafe.Coerce
import Data.Hashable (Hashable)
-- This module uses the partial 'head', 'tail' from Prelude.
-- I like to highlight them as partial by using them in the namespace Partial.head
-- These usages in this module are safe due to size guarantees provided by the typelist.
import Prelude as Partial
-- | A Many is an anonymous product type (also know as polymorphic record), with no limit on the number of fields.
--
-- The following functions are available can be used to manipulate unique fields
--
-- * getter/setter for single field: 'grab' and 'replace'
-- * getter/setter for multiple fields: 'select' and 'amend'
-- * folds: 'forMany' or 'collect'
--
-- These functions are type specified. This means labels are not required because the types themselves can be used to access the 'Many.
-- It is a compile error to use those functions for duplicate fields.
--
-- For duplicate fields, Nat-indexed versions of the functions are available:
--
-- * getter/setter for single field: 'grabN' and 'replaceN'
-- * getter/setter for multiple fields: 'selectN' and 'amendN'
-- * folds: 'forManyN' or 'collectN'
--
-- Encoding: The record is encoded as (S.Seq Any).
-- This encoding should reasonabily efficient for any number of fields.
--
-- The map Key is index + offset of the type in the typelist.
-- The Offset is used to allow efficient cons 'consMany'.
--
-- @Key = Index of type in typelist + Offset@
--
-- The constructor will guarantee the correct number and types of the elements.
-- The constructor is only exported in the "Data.Diverse.Many.Internal" module
newtype Many (xs :: [Type]) = Many (S.Seq Any)
-- Inferred role is phantom which is incorrect
-- representational means:
-- @
-- Coercible '[Int] '[IntLike] => Coercible (Many '[Int]) (Many '[IntLike])
-- @
type role Many representational
-- | Many stored as a list. This is useful when folding over 'Many' efficienty
-- so that the conversion to List is only done once
newtype Many_ (xs :: [Type]) = Many_ { runMany_ :: [Any] }
type role Many_ representational
toMany_ :: Many xs -> Many_ xs
toMany_ (Many m) = Many_ (toList m)
fromMany_ :: Many_ xs -> Many xs
fromMany_ (Many_ xs) = Many (S.fromList xs)
getMany_ :: Many_ xs -> [Any]
getMany_ (Many_ xs) = xs
-----------------------------------------------------------------------
-- | A terminating 'G.Generic' instance encoded as a 'nil'.
instance G.Generic (Many '[]) where
type Rep (Many '[]) = G.U1
from _ = {- G.U1 -} G.U1
to G.U1 = nil
-- | A 'G.Generic' instance encoded as the 'front' value 'G.:*:' with the 'aft' 'Many'.
-- The 'G.C1' and 'G.S1' metadata are not encoded.
instance G.Generic (Many (x ': xs)) where
type Rep (Many (x ': xs)) = (G.Rec0 x) G.:*: (G.Rec0 (Many xs))
from r = ({- G.Rec0 -} G.K1 (front r)) G.:*: ({- G.Rec0 -} G.K1 (aft r))
to (({- G.Rec0 -} G.K1 a) G.:*: ({- G.Rec0 -} G.K1 b)) = a ./ b
-----------------------------------------------------------------------
-- | This instance allows converting to and from Many
-- There are instances for converting tuples of up to size 15.
class IsMany t xs a where
toMany :: t xs a -> Many xs
fromMany :: Many xs -> t xs a
-- | Converts from a value (eg a tuple) to a 'Many', via a 'Tagged' wrapper
toMany' :: IsMany Tagged xs a => a -> Many xs
toMany' a = toMany (Tagged a)
-- | Converts from a Many to a value (eg a tuple), via a Tagged wrapper
fromMany' :: IsMany Tagged xs a => Many xs -> a
fromMany' = unTagged . fromMany
-- | These instances add about 7 seconds to the compile time!
instance IsMany Tagged '[] () where
toMany _ = nil
fromMany _ = Tagged ()
-- | This single field instance is the reason for 'Tagged' wrapper.
-- Otherwise this instance will overlap.
instance IsMany Tagged '[a] a where
toMany (Tagged a) = single a
fromMany r = Tagged (grab @a r)
instance IsMany Tagged '[a,b] (a,b) where
toMany (Tagged (a,b)) = a./b./nil
fromMany r = Tagged (grabN @0 r, grabN @1 r)
instance IsMany Tagged '[a,b,c] (a,b,c) where
toMany (Tagged (a,b,c)) = a./b./c./nil
fromMany r = Tagged (grabN @0 r, grabN @1 r, grabN @2 r)
instance IsMany Tagged '[a,b,c,d] (a,b,c,d) where
toMany (Tagged (a,b,c,d)) = a./b./c./d./nil
fromMany r = Tagged (grabN @0 r, grabN @1 r, grabN @2 r, grabN @3 r)
instance IsMany Tagged '[a,b,c,d,e] (a,b,c,d,e) where
toMany (Tagged (a,b,c,d,e)) = a./b./c./d./e./nil
fromMany r = Tagged (grabN @0 r, grabN @1 r, grabN @2 r, grabN @3 r, grabN @4 r)
instance IsMany Tagged '[a,b,c,d,e,f] (a,b,c,d,e,f) where
toMany (Tagged (a,b,c,d,e,f)) = a./b./c./d./e./f./nil
fromMany r = Tagged ( grabN @0 r, grabN @1 r, grabN @2 r, grabN @3 r, grabN @4 r
, grabN @5 r)
instance IsMany Tagged '[a,b,c,d,e,f,g] (a,b,c,d,e,f,g) where
toMany (Tagged (a,b,c,d,e,f,g)) = a./b./c./d./e./f./g./nil
fromMany r = Tagged ( grabN @0 r, grabN @1 r, grabN @2 r, grabN @3 r, grabN @4 r
, grabN @5 r, grabN @6 r)
instance IsMany Tagged '[a,b,c,d,e,f,g,h] (a,b,c,d,e,f,g,h) where
toMany (Tagged (a,b,c,d,e,f,g,h)) = a./b./c./d./e./f./g./h./nil
fromMany r = Tagged ( grabN @0 r, grabN @1 r, grabN @2 r, grabN @3 r, grabN @4 r
, grabN @5 r, grabN @6 r, grabN @7 r)
instance IsMany Tagged '[a,b,c,d,e,f,g,h,i] (a,b,c,d,e,f,g,h,i) where
toMany (Tagged (a,b,c,d,e,f,g,h,i)) = a./b./c./d./e./f./g./h./i./ nil
fromMany r = Tagged ( grabN @0 r, grabN @1 r, grabN @2 r, grabN @3 r, grabN @4 r
, grabN @5 r, grabN @6 r, grabN @7 r, grabN @8 r)
instance IsMany Tagged '[a,b,c,d,e,f,g,h,i,j] (a,b,c,d,e,f,g,h,i,j) where
toMany (Tagged (a,b,c,d,e,f,g,h,i,j)) = a./b./c./d./e./f./g./h./i./j./nil
fromMany r = Tagged ( grabN @0 r, grabN @1 r, grabN @2 r, grabN @3 r, grabN @4 r
, grabN @5 r, grabN @6 r, grabN @7 r, grabN @8 r, grabN @9 r)
instance IsMany Tagged '[a,b,c,d,e,f,g,h,i,j,k] (a,b,c,d,e,f,g,h,i,j,k) where
toMany (Tagged (a,b,c,d,e,f,g,h,i,j,k)) = a./b./c./d./e./f./g./h./i./j./k./nil
fromMany r = Tagged ( grabN @0 r, grabN @1 r, grabN @2 r, grabN @3 r, grabN @4 r
, grabN @5 r, grabN @6 r, grabN @7 r, grabN @8 r, grabN @9 r
, grabN @10 r)
instance IsMany Tagged '[a,b,c,d,e,f,g,h,i,j,k,l] (a,b,c,d,e,f,g,h,i,j,k,l) where
toMany (Tagged (a,b,c,d,e,f,g,h,i,j,k,l)) = a./b./c./d./e./f./g./h./i./j./k./l./nil
fromMany r = Tagged ( grabN @0 r, grabN @1 r, grabN @2 r, grabN @3 r, grabN @4 r
, grabN @5 r, grabN @6 r, grabN @7 r, grabN @8 r, grabN @9 r
, grabN @10 r, grabN @11 r)
instance IsMany Tagged '[a,b,c,d,e,f,g,h,i,j,k,l,m] (a,b,c,d,e,f,g,h,i,j,k,l,m) where
toMany (Tagged (a,b,c,d,e,f,g,h,i,j,k,l,m)) = a./b./c./d./e./f./g./h./i./j./k./l./m./nil
fromMany r = Tagged ( grabN @0 r, grabN @1 r, grabN @2 r, grabN @3 r, grabN @4 r
, grabN @5 r, grabN @6 r, grabN @7 r, grabN @8 r, grabN @9 r
, grabN @10 r, grabN @11 r, grabN @12 r)
instance IsMany Tagged '[a,b,c,d,e,f,g,h,i,j,k,l,m,n] (a,b,c,d,e,f,g,h,i,j,k,l,m,n) where
toMany (Tagged (a,b,c,d,e,f,g,h,i,j,k,l,m,n)) = a./b./c./d./e./f./g./h./i./j./k./l./m./n./nil
fromMany r = Tagged ( grabN @0 r, grabN @1 r, grabN @2 r, grabN @3 r, grabN @4 r
, grabN @5 r, grabN @6 r, grabN @7 r, grabN @8 r, grabN @9 r
, grabN @10 r, grabN @11 r, grabN @12 r, grabN @13 r)
instance IsMany Tagged '[a,b,c,d,e,f,g,h,i,j,k,l,m,n,o] (a,b,c,d,e,f,g,h,i,j,k,l,m,n,o) where
toMany (Tagged (a,b,c,d,e,f,g,h,i,j,k,l,m,n,o)) = a./b./c./d./e./f./g./h./i./j./k./l./m./n./o./nil
fromMany r = Tagged ( grabN @0 r, grabN @1 r, grabN @2 r, grabN @3 r, grabN @4 r
, grabN @5 r, grabN @6 r, grabN @7 r, grabN @8 r, grabN @9 r
, grabN @10 r, grabN @11r, grabN @12 r, grabN @13 r, grabN @14 r)
-----------------------------------------------------------------------
-- | Empty 'Many'.
nil :: Many '[]
nil = Many S.empty
-- | Create a Many from a single value. Analogous to 'S.singleton'
single :: x -> Many '[x]
single v = Many (S.singleton (unsafeCoerce v))
-- | Add an element to the left of a Many.
-- Not named @cons@ to avoid conflict with 'Control.Lens.cons'
consMany :: x -> Many xs -> Many (x ': xs)
consMany x (Many rs) = Many ((unsafeCoerce x) S.<| rs)
infixr 5 `consMany`
consMany_ :: x -> Many_ xs -> Many_ (x ': xs)
consMany_ x (Many_ xs) = Many_ (unsafeCoerce x : xs)
-- | Infix version of 'consMany'.
--
-- Mnemonic: Element on the left is smaller './' than the larger 'Many' to the right.
(./) :: x -> Many xs -> Many (x ': xs)
(./) = consMany
infixr 5 ./ -- like Data.List.(:)
-- | Add an element to the right of a Many
-- Not named @snoc@ to avoid conflict with 'Control.Lens.snoc'
snocMany :: Many xs -> y -> Many (Append xs '[y])
snocMany (Many ls) y = Many (ls S.|> (unsafeCoerce y))
infixl 5 `snocMany`
-- | Add an element to the right of a Many iff the field doesn't already exist.
snocMany'
:: forall y xs.
MaybeUniqueMember y xs
=> Many xs -> y -> Many (SnocUnique xs y)
snocMany'(Many ls) y = if i /= 0 then Many ls else Many (ls S.|> unsafeCoerce y)
where
i = natToInt @(PositionOf y xs) :: Int
infixl 5 `snocMany'`
-- | Infix version of 'snocMany'.
--
-- Mnemonic: Many is larger '\.' than the smaller element
(\.) :: Many xs -> y -> Many (Append xs '[y])
(\.) = snocMany
infixl 5 \.
-- | Infix version of 'append'.
--
-- Mnemonic: 'consMany' './' with an extra slash (meaning 'Many') in front.
(/./) :: Many xs -> Many ys -> Many (Append xs ys)
(/./) = append
infixr 5 /./ -- like (++)
-- | Appends two Manys together
append :: Many xs -> Many ys -> Many (Append xs ys)
append (Many ls) (Many rs) = Many (ls S.>< rs)
infixr 5 `append` -- like Data.List (++)
-- class CanAppendUnique xs ys where
-- -- | Appends the unique fields fields from the right Many using 'snocMany''
-- append' :: Many xs -> Many ys -> Many (AppendUnique xs ys)
-- instance CanAppendUnique xs '[] where
-- append' ls _ = ls
-- instance ( MaybeUniqueMember y xs
-- , CanAppendUnique (SnocUnique xs y) ys
-- , AppendUnique (SnocUnique xs y) ys ~ AppendUnique xs (y : ys)) => CanAppendUnique xs (y ': ys) where
-- append' ls rs = append' (snocMany' ls r) rs'
-- where (r, rs') = viewf rs
-- {-# INLINABLE append' #-} -- This makes compiling tests a little faster than with no pragma
-- infixr 5 `append'` -- like Data.List (++)
-----------------------------------------------------------------------
-- | Split a non-empty Many into the first element, then the rest of the Many.
-- Analogous to 'S.viewl'
viewf :: Many (x ': xs) -> (x, Many xs)
viewf (Many xs) = case S.viewl xs of
S.EmptyL -> error "no front"
(a S.:< ys) -> (unsafeCoerce a, Many ys)
-- | Split a non-empty Many into initial part of Many, and the last element.
-- Analogous to 'S.viewr'
viewb :: Many (x ': xs) -> (Many (Init (x ': xs)), Last (x ': xs))
viewb (Many xs) = case S.viewr xs of
S.EmptyR -> error "no back"
(ys S.:> a) -> (Many ys, unsafeCoerce a)
-- | Extract the first element of a Many, which guaranteed to be non-empty.
-- Analogous to 'Partial.head'
front :: Many (x ': xs) -> x
front = fst . viewf
front_ :: Many_ (x ': xs) -> x
front_ (Many_ xs) = unsafeCoerce (Partial.head xs)
-- | Extract the 'back' element of a Many, which guaranteed to be non-empty.
-- Analogous to 'Prelude.last'
back :: Many (x ': xs) -> Last (x ': xs)
back = snd . viewb
-- | Extract the elements after the front of a Many, which guaranteed to be non-empty.
-- Analogous to 'Partial.tail'
aft :: Many (x ': xs) -> Many xs
aft = snd . viewf
aft_ :: Many_ (x ': xs) -> Many_ xs
aft_ (Many_ xs) = Many_ (Partial.tail xs)
-- | Return all the elements of a Many except the 'back' one, which guaranteed to be non-empty.
-- Analogous to 'Prelude.init'
fore :: Many (x ': xs) -> Many (Init (x ': xs))
fore = fst . viewb
--------------------------------------------------
-- | Getter by unique type. Get the field with type @x@.
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' 'nil'
-- 'grab' \@Int x \`shouldBe` 5
-- @
grab :: forall x xs. UniqueMember x xs => Many xs -> x
grab (Many xs) = unsafeCoerce $ grab_ @(IndexOf x xs) xs
grab_ :: forall n. NatToInt n => S.Seq Any -> Any
grab_ xs = let !x = S.index xs i in x -- forcing x to avoid storing Seq in thunk
where i = natToInt @n
--------------------------------------------------
-- | Getter by label. Get the value of the field with tag @label@ which can be any type
-- not just @KnownSymbol@.
--
-- @
-- let y = False './' Tagged \@Foo \'X' './' Tagged @"Hi" True './' 'nil'
-- 'grabL' \@Foo y \`shouldBe` Tagged \@Foo \'X'
-- 'grabL' \@"Hi" y \`shouldBe` Tagged \@"Hi" True
-- @
grabL :: forall l x xs. (UniqueLabelMember l xs, x ~ KindAtLabel l xs) => Many xs -> x
grabL (Many xs) = unsafeCoerce $ grab_ @(IndexOf x xs) xs
--------------------------------------------------
-- | Variation of 'grab' of a Tagged field 'Tagged' that untags the field.
grabTag :: forall l x xs. (UniqueMember (Tagged l x) xs)
=> Many xs -> x
grabTag xs = unTagged (grab @(Tagged l x) xs)
--------------------------------------------------
-- | Getter by index. Get the value of the field at index type-level Nat @n@
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' 'nil'
-- 'grabN' @1 x \`shouldBe` False
-- @
grabN :: forall n x xs. MemberAt n x xs => Many xs -> x
grabN (Many xs) = unsafeCoerce $ grab_ @n xs
--------------------------------------------------
-- | Setter by unique type. Set the field with type @x@.
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' 'nil'
-- 'replace'' \@Int x 6 \`shouldBe` (6 :: Int) './' False './' \'X' './' Just \'O' './' 'nil'
-- @
replace' :: forall x xs. UniqueMember x xs => Many xs -> x -> Many xs
replace' (Many xs) x = Many $ replace_ @(IndexOf x xs) xs (unsafeCoerce x)
replace_ :: forall n. NatToInt n => S.Seq Any -> Any -> S.Seq Any
replace_ xs x = S.update i x xs
where i = natToInt @n
-- | Polymorphic setter by unique type. Set the field with type @x@, and replace with type @y@
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' 'nil'
-- 'replace' \@Int x (Just True) \`shouldBe` Just True './' False './' \'X' './' Just \'O' './' 'nil'
-- @
replace :: forall x y xs. UniqueMember x xs => Many xs -> y -> Many (Replace x y xs)
replace (Many xs) x = Many $ replace_ @(IndexOf x xs) xs (unsafeCoerce x)
--------------------------------------------------
-- | Setter by unique label. Set the field with label @l@.
--
-- @
-- let y = (5 :: Int) './' False './' Tagged \@Foo \'X' './' Tagged \@\"Hello" (6 :: Int) './' 'nil'
-- 'replaceL'' \@Foo y (Tagged \@Foo \'Y') \`shouldBe`
-- (5 :: Int) './' False './' Tagged \@Foo \'Y' './' Tagged \@\"Hello" (6 :: Int) './' 'nil'
-- 'replaceL'' \@\"Hello" y (Tagged \@\"Hello" 7) \`shouldBe`
-- (5 :: Int) './' False './' Tagged \@Foo \'X' './' Tagged \@\"Hello" (7 :: Int) './' 'nil'
-- @
replaceL' :: forall l x xs. (UniqueLabelMember l xs, x ~ KindAtLabel l xs)
=> Many xs -> x -> Many xs
replaceL' (Many xs) x = Many $ replace_ @(IndexOf x xs) xs (unsafeCoerce x)
-- | Polymorphic setter by unique type. Set the field with type @x@, and replace with type @y@
--
-- @
-- let y = (5 :: Int) './' False './' Tagged \@Foo \'X' './' Tagged \@\"Hello" (6 :: Int) './' 'nil'
-- replaceL \@Foo y (Tagged \@Bar \'Y') `shouldBe`
-- (5 :: Int) './' False './' Tagged @Bar 'Y' './' Tagged @"Hello" (6 :: Int) './' 'nil'
-- replaceL \@\"Hello" y (Tagged \@\"Hello" False) \`shouldBe`
-- (5 :: Int) './' False './' Tagged \@Foo \'X' './' Tagged \@\"Hello" False './' 'nil'
-- @
replaceL :: forall l y xs x. (UniqueLabelMember l xs, x ~ KindAtLabel l xs)
=> Many xs -> y -> Many (Replace x y xs)
replaceL (Many xs) y = Many $ replace_ @(IndexOf x xs) xs (unsafeCoerce y)
--------------------------------------------------
-- | Variation of 'replace'' specialized to 'Tagged' that automatically tags the value to be replaced.
replaceTag' :: forall l xs x. (UniqueMember (Tagged l x) xs)
=> Many xs -> x -> Many xs
replaceTag' xs x = replace' @(Tagged l x) xs (Tagged @l x)
-- | Variation of 'replace' specialized to 'Tagged' that automatically tags the value to be replaced.
replaceTag :: forall l x y xs. (UniqueMember (Tagged l x) xs)
=> Many xs -> y -> Many (Replace (Tagged l x) (Tagged l y) xs)
replaceTag xs y = replace @(Tagged l x) xs (Tagged @l y)
--------------------------------------------------
-- | Setter by index. Set the value of the field at index type-level Nat @n@
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' 'nil'
-- 'replaceN'' \@0 x 7 `shouldBe`
-- @
replaceN' :: forall n x xs. MemberAt n x xs => Many xs -> x -> Many xs
replaceN' (Many xs) x = Many $ replace_ @n xs (unsafeCoerce x)
-- | Polymorphic version of 'replaceN''
replaceN :: forall n x y xs. MemberAt n x xs => Many xs -> y -> Many (ReplaceIndex n x y xs)
replaceN (Many xs) x = Many $ replace_ @n xs (unsafeCoerce x)
-----------------------------------------------------------------------
-- | Internal function for construction - do not expose!
fromList' :: [(Int, WrappedAny)] -> S.Seq Any
fromList' = fmap (\(_, a) -> coerce a) . S.unstableSortBy (\(i, _) (j, _) -> compare i j) . S.fromList
------------------------------------------------------------------------
class CaseAny c (xs :: [Type]) where
-- | Return the handler/continuation when x is observed.
caseAny :: c xs -> Any -> CaseResult c Any
-----------------------------------------------------------------------
-- | Variation of 'Collector' which uses 'CaseAny' instead of 'Case'
data CollectorAny c (xs :: [Type]) r = CollectorAny (c r xs) [Any]
-- | nill case that doesn't even use 'caseAny', so that an instance of @CaseAny '[]@ is not needed.
instance AFoldable (CollectorAny c '[]) r where
afoldr _ z _ = z
instance ( CaseAny (c r) (x ': xs)
, Reiterate (c r) (x ': xs)
, AFoldable (CollectorAny c xs) r
, r ~ CaseResult (c r) Any
) =>
AFoldable (CollectorAny c (x ': xs)) r where
afoldr f z (CollectorAny c xs) = f (caseAny c x) (afoldr f z (CollectorAny (reiterate c) xs'))
where
-- use of head/tail here is safe as we are guaranteed the length from the typelist
x = Partial.head xs
xs' = Partial.tail xs
{-# INLINABLE afoldr #-} -- This makes compiling tests a little faster than with no pragma
forMany' :: c r xs -> Many xs -> CollectorAny c xs r
forMany' c (Many xs) = CollectorAny c (toList xs)
-----------------------------------------------------------------------
-- | A variation of 'CollectorN' which uses 'CaseAny' instead of 'Case'
data CollectorAnyN c n (xs :: [Type]) r = CollectorAnyN (c r n xs) [Any]
-- | nill case that doesn't even use 'caseAnyN', so that an instance of @CaseAnyN '[]@ is not needed.
instance AFoldable (CollectorAnyN c n '[]) r where
afoldr _ z _ = z
instance ( CaseAny (c r n) (x ': xs)
, ReiterateN (c r) n (x ': xs)
, AFoldable (CollectorAnyN c (n + 1) xs) r
, r ~ CaseResult (c r n) Any
) =>
AFoldable (CollectorAnyN c n (x ': xs)) r where
afoldr f z (CollectorAnyN c xs) = f (caseAny c x) (afoldr f z (CollectorAnyN (reiterateN c) xs'))
where
-- use of head/tail here is safe as we are guaranteed the length from the typelist
x = Partial.head xs
xs' = Partial.tail xs
{-# INLINABLE afoldr #-} -- This makes compiling tests a little faster than with no pragma
forManyN' :: c r n xs -> Many xs -> CollectorAnyN c n xs r
forManyN' c (Many xs) = CollectorAnyN c (toList xs)
-----------------------------------------------------------------------
-- | Collects the output from 'case''ing each field in a 'Many'.
-- Uses 'Reiterate' to prepare the 'Case' to accept the next type in the @xs@ typelist.
--
-- Internally, this holds the left-over [(k, v)] from the original 'Many' for the remaining typelist @xs@.
--
-- That is, the first v in the (k, v) is of type @x@, and the length of the list is equal to the length of @xs@.
data Collector c (xs :: [Type]) r = Collector (c r xs) [Any]
-- | nill case that doesn't even use 'case'', so that an instance of @Case '[]@ is not needed.
instance AFoldable (Collector c '[]) r where
afoldr _ z _ = z
-- | Folds values by 'reiterate'ing 'Case's through the @xs@ typelist.
instance ( Case (c r) (x ': xs)
, Reiterate (c r) (x ': xs)
, AFoldable (Collector c xs) r
, r ~ CaseResult (c r) x
) =>
AFoldable (Collector c (x ': xs)) r where
afoldr f z (Collector c xs) = f (case' c v) (afoldr f z (Collector (reiterate c) xs'))
where
-- use of head/tail here is safe as we are guaranteed the length from the typelist
v = unsafeCoerce $ Partial.head xs
xs' = Partial.tail xs
{-# INLINABLE afoldr #-} -- This makes compiling tests a little faster than with no pragma
-----------------------------------------------------------------------
-- | Terminating AFunctor instance for empty type list
instance AFunctor Many_ c '[] where
afmap _ = id
-- | Recursive AFunctor instance for non empty type list
-- delegate afmap'ing the remainder to an instance of Collector' with one less type in the type list
instance ( Reiterate c (a ': as)
, AFunctor Many_ c as
, Case c (a ': as)
) =>
AFunctor Many_ c (a ': as) where
afmap c (Many_ as) =
Many_ $
unsafeCoerce (case' c a) :
runMany_
(afmap
(reiterate c)
(Many_ as' :: Many_ as))
where
a = unsafeCoerce (Partial.head as)
as' = Partial.tail as
{-# INLINABLE afmap #-}
-- This makes compiling tests a little faster than with no pragma
-- | Given a 'Data.Diverse.Case' that transforms each type in the
-- typelist, convert a @Many xs@ to @Many (CaseResults c xs)@
instance AFunctor Many_ c as => AFunctor Many c as where
afmap c m = fromMany_ (afmap c (toMany_ m))
-----------------------------------------------------------------------
instance ATraversable Many_ c m '[] where
atraverse _ = pure
instance ( Reiterate (c m) (a ': as)
, ATraversable Many_ c m as
, Case (c m) (a ': as)
) =>
ATraversable Many_ c m (a ': as) where
atraverse c (Many_ as) =
Many_ <$>
liftA2 (:)
(unsafeCoerce (case' c a))
(runMany_ <$> atraverse (reiterate c) (Many_ as' :: Many_ as))
where
a = unsafeCoerce (Partial.head as)
as' = Partial.tail as
{-# INLINABLE atraverse #-}
instance ATraversable Many_ c m as => ATraversable Many c m as where
atraverse c m = fromMany_ <$> (atraverse c (toMany_ m))
-- -----------------------------------------------------------------------
-- | A friendlier type constraint synomyn for 'collect' and 'forMany'
type Collect c r (xs :: [Type]) = (AFoldable (Collector c xs) r, Case (c r) xs)
-- | Folds any 'Many', even with indistinct types.
-- Given __distinct__ handlers for the fields in 'Many', create 'AFoldable'
-- of the results of running the handlers over the fields in 'Many'.
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' (6 :: Int) './' Just \'A' './' 'nil'
-- y = show \@Int './' show \@Char './' show \@(Maybe Char) './' show \@Bool './' 'nil'
-- 'afoldr' (:) [] ('forMany' ('Data.Diverse.Cases.cases' y) x) \`shouldBe`
-- [\"5", \"False", \"\'X'", \"Just \'O'", \"6", \"Just \'A'"]
-- @
forMany :: Collect c r xs => c r xs -> Many xs -> Collector c xs r
forMany c (Many xs) = Collector c (toList xs)
-- | This is @flip 'forMany'@
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' (6 :: Int) './' Just \'A' './' 'nil'
-- y = show \@Int './' show \@Char './' show \@(Maybe Char) './' show \@Bool './' 'nil'
-- 'afoldr' (:) [] ('collect' x ('Data.Diverse.Cases.cases' y)) \`shouldBe`
-- [\"5", \"False", \"\'X'", \"Just \'O'", \"6", \"Just \'A'"]
-- @
collect :: (Collect c r xs) => Many xs -> c r xs -> Collector c xs r
collect = flip forMany
-----------------------------------------------------------------------
-- | A variation of 'Collector' which uses 'ReiterateN' instead of 'Reiterate'
data CollectorN c (n :: Nat) (xs :: [Type]) r = CollectorN (c r n xs) [Any]
-- | nill case that doesn't even use 'case'', so that an instance of @Case '[]@ is not needed.
instance AFoldable (CollectorN c n '[]) r where
afoldr _ z _ = z
-- | Folds values by 'reiterate'ing 'Emit'ters through the @xs@ typelist.
instance ( Case (c r n) (x ': xs)
, ReiterateN (c r) n (x ': xs)
, AFoldable (CollectorN c (n + 1) xs) r
, r ~ CaseResult (c r n) x
) =>
AFoldable (CollectorN c n (x ': xs)) r where
afoldr f z (CollectorN c xs) = f (case' c v) (afoldr f z (CollectorN (reiterateN c) xs'))
where
-- use of head/tail here is safe as we are guaranteed the length from the typelist
v = unsafeCoerce $ Partial.head xs
xs' = Partial.tail xs
{-# INLINABLE afoldr #-} -- This makes compiling tests a little faster than with no pragma
-- | A friendlier type constraint synomyn for 'collect' and 'forMany'
type CollectN c r (n :: Nat) (xs :: [Type]) = (AFoldable (CollectorN c n xs) r, Case (c r n) xs)
-- | Folds any 'Many', even with indistinct types.
-- Given __index__ handlers for the fields in 'Many', create 'AFoldable'
-- of the results of running the handlers over the fields in 'Many'.
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' (6 :: Int) './' Just \'A' './' 'nil'
-- y = show \@Int './' show \@Bool './' show \@Char './' show \@(Maybe Char) './' show \@Int './' show \@(Maybe Char) './' 'nil'
-- 'afoldr' (:) [] ('forManyN' ('Data.Diverse.Cases.casesN' y) x) \`shouldBe`
-- [\"5", \"False", \"\'X'", \"Just \'O'", \"6", \"Just \'A'"]
-- @
forManyN :: CollectN c r n xs => c r n xs -> Many xs -> CollectorN c n xs r
forManyN c (Many xs) = CollectorN c (toList xs)
-- | This is @flip 'forManyN'@
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' (6 :: Int) './' Just \'A' './' 'nil'
-- y = show \@Int './' show \@Bool './' show \@Char './' show \@(Maybe Char) './' show \@Int './' show \@(Maybe Char) './' 'nil'
-- 'afoldr' (:) [] ('collectN' x ('Data.Diverse.Cases.casesN' y)) \`shouldBe`
-- [\"5", \"False", \"\'X'", \"Just \'O'", \"6", \"Just \'A'"]
-- @
collectN :: CollectN c r n xs => Many xs -> c r n xs -> CollectorN c n xs r
collectN = flip forManyN
-----------------------------------------------------------------------
-- | A friendlier type constraint synomyn for 'select'
type Select (smaller :: [Type]) (larger :: [Type]) =
(AFoldable
(CollectorAny (CaseSelect smaller larger) larger) (Maybe (Int, WrappedAny)))
-- | Construct a 'Many' with a smaller number of fields than the original.
-- Analogous to 'grab' getter but for multiple fields.
--
-- This can also be used to reorder fields in the original 'Many'.
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' (6 :: Int) './' Just \'A' './' 'nil'
-- 'select' \@'[Bool, Char] x \`shouldBe` False './' \'X' './' 'nil'
-- @
select :: forall smaller larger. Select smaller larger => Many larger -> Many smaller
select t = Many (fromList' xs')
where
xs' = afoldr (\a z -> maybe z (: z) a) [] (forMany' (CaseSelect @smaller @larger @_ @larger) t)
-- | For each type x in @larger@, generate the (k, v) in @smaller@ (if it exists)
data CaseSelect (smaller :: [Type]) (larger :: [Type]) r (xs :: [Type]) = CaseSelect
type instance CaseResult (CaseSelect smaller larger r) x = r
instance Reiterate (CaseSelect smaller larger r) (x ': xs) where
reiterate = coerce
-- | For each type x in larger, find the index in ys, and create a (key, value)
instance forall smaller larger x xs. (UniqueIfExists smaller x larger, MaybeUniqueMember x smaller) =>
CaseAny (CaseSelect smaller larger (Maybe (Int, WrappedAny))) (x ': xs) where
caseAny _ v =
case i of
0 -> Nothing
i' -> Just (i' - 1, WrappedAny v)
where
i = natToInt @(PositionOf x smaller)
-----------------------------------------------------------------------
-- | A variation of 'select' which selects by labels
--
-- @
-- let x = False './' Tagged \@\"Hi" (5 :: Int) './' Tagged \@Foo False './' Tagged \@Bar \'X' './' Tagged \@\"Bye" 'O' './' 'nil'
-- 'selectL' \@'[Foo, Bar] x \`shouldBe` Tagged \@Foo False './' Tagged \@Bar \'X' './' 'nil'
-- 'selectL' \@'[\"Hi", \"Bye"] x \`shouldBe` Tagged \@\"Hi" (5 :: Int) './' Tagged \@\"Bye" \'O' './' 'nil'
-- @
selectL
:: forall ls smaller larger.
( Select smaller larger
, smaller ~ KindsAtLabels ls larger
, IsDistinct ls
, UniqueLabels ls larger)
=> Many larger -> Many smaller
selectL = select @smaller
-----------------------------------------------------------------------
-- | A friendlier type constraint synomyn for 'selectN'
type SelectN (ns :: [Nat]) (smaller ::[Type]) (larger :: [Type]) =
( AFoldable (CollectorAnyN (CaseSelectN ns smaller) 0 larger) (Maybe (Int, WrappedAny))
, smaller ~ KindsAtIndices ns larger
, IsDistinct ns)
-- | A variation of 'select' which uses a Nat list @n@ to specify how to reorder the fields, where
--
-- @
-- indices[branch_idx] = tree_idx@
-- @
--
-- This variation allows @smaller@ or @larger@ to contain indistinct since
-- the mapping is specified by @indicies@.
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' (6 :: Int) './' Just \'A' './' 'nil'
-- 'selectN' @'[5, 4, 0] x \`shouldBe` Just \'A' './' (6 :: Int) './' (5 ::Int) './' 'nil'
-- @
selectN
:: forall ns smaller larger.
SelectN ns smaller larger
=> Many larger -> Many smaller
selectN xs = Many (fromList' xs')
where
xs' = afoldr (\a z -> maybe z (: z) a) [] (forManyN' (CaseSelectN @ns @smaller @_ @0 @larger) xs)
data CaseSelectN (indices :: [Nat]) (smaller :: [Type]) r (n :: Nat) (xs :: [Type]) = CaseSelectN
type instance CaseResult (CaseSelectN indices smaller r n) x = r
instance ReiterateN (CaseSelectN indices smaller r) n (x ': xs) where
reiterateN CaseSelectN = CaseSelectN
-- | For each type x in @larger@, find the index in ys, and create an (incrementing key, value)
instance forall indices smaller n x xs n'. (MaybeMemberAt n' x smaller, n' ~ PositionOf n indices) =>
CaseAny (CaseSelectN indices smaller (Maybe (Int, WrappedAny)) n) (x ': xs) where
caseAny _ v =
case i of
0 -> Nothing
i' -> Just (i' - 1, WrappedAny v)
where
i = natToInt @n'
-----------------------------------------------------------------------
-- | A friendlier type constraint synomyn for 'amend''
type Amend' smaller larger = (AFoldable (CollectorAny (CaseAmend' larger) smaller) (Int, WrappedAny), IsDistinct smaller)
-- | Sets the subset of 'Many' in the larger 'Many'.
-- Analogous to 'replace' setter but for multiple fields.
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' 'nil'
-- 'amend'' \@'[Int, Maybe Char] x ((6 :: Int) './' Just \'P' './' 'nil') \`shouldBe`
-- (6 :: Int) './' False './' \'X' './' Just \'P' './' 'nil'
-- @
amend' :: forall smaller larger. Amend' smaller larger => Many larger -> Many smaller -> Many larger
amend' (Many ls) t = Many $ foldr (\(i, WrappedAny v) ys -> S.update i v ys) ls xs'
where
xs' = afoldr (:) [] (forMany' (CaseAmend' @larger @_ @smaller) t)
data CaseAmend' (larger :: [Type]) r (xs :: [Type]) = CaseAmend'
type instance CaseResult (CaseAmend' larger r) x = r
instance Reiterate (CaseAmend' larger r) (x ': xs) where
reiterate = coerce
-- | for each x in @smaller@, convert it to a (k, v) to insert into the x in @Many larger@
instance UniqueMember x larger =>
CaseAny (CaseAmend' larger (Int, WrappedAny)) (x ': xs) where
caseAny _ v = (i, WrappedAny v)
where
i = natToInt @(IndexOf x larger)
-----------------------------------------------------------------------
-- | A variation of 'amend'' which amends via labels.
--
-- @
-- let x = False ./ Tagged \@\"Hi" (5 :: Int) ./ Tagged \@Foo False ./ Tagged \@Bar \'X' ./ Tagged \@\"Bye" \'O' ./ 'nil'
-- 'amendL' \@'[Foo, Bar] x (Tagged \@Foo True ./ Tagged \@Bar \'Y' ./ nil) `shouldBe`
-- False ./ Tagged \@\"Hi" (5 :: Int) ./ Tagged \@Foo True ./ Tagged \@Bar \'Y' ./ Tagged \@\"Bye" \'O' ./ 'nil'
-- 'amendL' \@'[\"Hi", \"Bye"] x (Tagged \@\"Hi" (6 :: Int) ./ Tagged \@\"Bye" \'P' ./ nil) `shouldBe`
-- False ./ Tagged \@\"Hi" (6 :: Int) ./ Tagged \@Foo False ./ Tagged \@Bar \'X' ./ Tagged \@\"Bye" \'P' ./ 'nil'
-- @
amendL'
:: forall ls smaller larger.
( Amend' smaller larger
, smaller ~ KindsAtLabels ls larger
, IsDistinct ls
, UniqueLabels ls larger)
=> Many larger -> Many smaller -> Many larger
amendL' = amend' @(KindsAtLabels ls larger)
-----------------------------------------------------------------------
-- | A friendlier type constraint synomyn for 'amend'
type Amend smaller smaller' larger =
( AFoldable (CollectorAny (CaseAmend larger) (Zip smaller smaller')) (Int, WrappedAny)
, IsDistinct smaller)
-- | Polymorphic version of 'amend''.
-- Analogous to 'replace' setter but for multiple fields.
amend :: forall smaller smaller' larger larger'. (Amend smaller smaller' larger, larger' ~ Replaces smaller smaller' larger)
=> Many larger -> Many smaller' -> Many larger'
amend (Many ls) t = Many $ foldr (\(i, WrappedAny v) ys -> S.update i v ys) ls xs'
where
xs' = afoldr (:) [] (forMany'' @smaller Proxy (CaseAmend @larger @_ @(Zip smaller smaller')) t)
forMany'' :: Proxy xs -> c r (Zip xs ys) -> Many ys -> CollectorAny c (Zip xs ys) r
forMany'' _ c (Many ys) = CollectorAny c (toList ys)
data CaseAmend (larger :: [Type]) r (zs :: [Type]) = CaseAmend
type instance CaseResult (CaseAmend larger r) x = r
instance Reiterate (CaseAmend larger r) (z ': zs) where
reiterate = coerce
-- | for each y in @smaller@, convert it to a (k, v) to insert into the x in @Many larger@
instance (UniqueMember x larger) =>
CaseAny (CaseAmend larger (Int, WrappedAny)) ((x, y) ': zs) where
caseAny _ v = (i, WrappedAny v)
where
i = natToInt @(IndexOf x larger)
-----------------------------------------------------------------------
-- | A variation of 'amend' which amends via labels.
--
-- @
-- let x = False './' Tagged \@\"Hi" (5 :: Int) './' Tagged \@Foo False './' Tagged \@Bar 'X' './' Tagged \@\"Bye" \'O' './' 'nil'
-- 'amendL' \@'[Foo, Bar] x (\'Y' './' True './' 'ni'l) \`shouldBe`
-- False './' Tagged \@\"Hi" (5 :: Int) './' \'Y' './' True './' Tagged \@\"Bye" \'O' './' 'nil'
-- 'amendL' \@'[\"Hi", \"Bye"] x (True './' Tagged \@\"Changed" True './' 'nil') \`shouldBe`
-- False './' True './' Tagged \@Foo False './' Tagged \@Bar \'X' './' Tagged \@\"Changed" True './' 'nil'
-- @
amendL
:: forall ls smaller smaller' larger larger'.
( Amend smaller smaller' larger
, smaller ~ KindsAtLabels ls larger
, IsDistinct ls
, UniqueLabels ls larger
, larger' ~ Replaces smaller smaller' larger
)
=> Many larger
-> Many smaller'
-> Many larger'
amendL = amend @(KindsAtLabels ls larger)
-----------------------------------------------------------------------
-- | A friendlier type constraint synomyn for 'amendN''
type AmendN' ns smaller larger =
( AFoldable (CollectorAnyN (CaseAmendN' ns larger) 0 smaller) (Int, WrappedAny)
, smaller ~ KindsAtIndices ns larger
, IsDistinct ns)
-- | A variation of 'amend'' which uses a Nat list @n@ to specify how to reorder the fields, where
--
-- @
-- indices[branch_idx] = tree_idx@
-- @
--
-- This variation allows @smaller@ or @larger@ to contain indistinct since
-- the mapping is specified by @indicies@.
--
-- @
-- let x = (5 :: Int) './' False './' \'X' './' Just \'O' './' (6 :: Int) './' Just \'A' './' 'nil'
-- 'amendN'' \@'[5, 4, 0] x (Just \'B' './' (8 :: Int) './' (4 ::Int) './' 'nil') \`shouldBe`
-- (4 :: Int) './' False './' \'X' './' Just \'O' './' (8 :: Int) './' Just \'B' './' 'nil'
-- @
amendN' :: forall ns smaller larger.
(AmendN' ns smaller larger)
=> Many larger -> Many smaller -> Many larger
amendN' (Many ls) t = Many $ foldr (\(i, WrappedAny v) ys -> S.update i v ys) ls xs'
where
xs' = afoldr (:) [] (forManyN' (CaseAmendN' @ns @larger @_ @0 @smaller) t)
data CaseAmendN' (indices :: [Nat]) (larger :: [Type]) r (n :: Nat) (xs :: [Type]) = CaseAmendN'
type instance CaseResult (CaseAmendN' indices larger r n) x = r
instance ReiterateN (CaseAmendN' indices larger r) n (x ': xs) where
reiterateN = coerce
-- | for each x in @smaller@, convert it to a (k, v) to insert into the x in @larger@
instance (MemberAt n' x larger, n' ~ KindAtIndex n indices) =>
CaseAny (CaseAmendN' indices larger (Int, WrappedAny) n) (x ': xs) where
caseAny _ v = (i, WrappedAny v)
where
i = natToInt @n'
-----------------------------------------------------------------------
-- | A friendlier type constraint synomyn for 'amendN'
type AmendN ns smaller smaller' larger =
( AFoldable (CollectorAnyN (CaseAmendN ns larger) 0 (Zip smaller smaller')) (Int, WrappedAny)
, smaller ~ KindsAtIndices ns larger
, IsDistinct ns)
-- | A polymorphic variation of 'amendN''
amendN :: forall ns smaller smaller' larger larger'.
(AmendN ns smaller smaller' larger, larger' ~ ReplacesIndex ns smaller' larger)
=> Many larger -> Many smaller' -> Many larger'
amendN (Many ls) t = Many $ foldr (\(i, WrappedAny v) ys -> S.update i v ys) ls xs'
where
xs' = afoldr (:) [] (forManyN'' @smaller Proxy (CaseAmendN @ns @larger @_ @0 @(Zip smaller smaller')) t)
forManyN'' :: Proxy xs -> c r n (Zip xs ys) -> Many ys -> CollectorAnyN c n (Zip xs ys) r
forManyN'' _ c (Many ys) = CollectorAnyN c (toList ys)
data CaseAmendN (indices :: [Nat]) (larger :: [Type]) r (n :: Nat) (zs :: [Type]) = CaseAmendN
type instance CaseResult (CaseAmendN indices larger r n) x = r
instance ReiterateN (CaseAmendN indices larger r) n (z ': zs) where
reiterateN = coerce
-- | for each x in @smaller@, convert it to a (k, v) to insert into the x in @larger@
instance (MemberAt n' x larger, n' ~ KindAtIndex n indices) =>
CaseAny (CaseAmendN indices larger (Int, WrappedAny) n) ((x, y) ': zs) where
caseAny _ v = (i, WrappedAny v)
where
i = natToInt @n'
-----------------------------------------------------------------------
instance Eq (Many_ '[]) where
_ == _ = True
instance (Eq x, Eq (Many_ xs)) => Eq (Many_ (x ': xs)) where
ls == rs = case front_ ls == front_ rs of
False -> False
_ -> (aft_ ls) == (aft_ rs)
{-# INLINABLE (==) #-} -- This makes compiling tests a little faster than with no pragma
-- | Two 'Many's are equal if all their fields equal
instance Eq (Many_ xs) => Eq (Many xs) where
lt == rt = toMany_ lt == toMany_ rt
-----------------------------------------------------------------------
instance Ord (Many_ '[]) where
compare _ _ = EQ
instance (Ord x, Ord (Many_ xs)) => Ord (Many_ (x ': xs)) where
compare ls rs = case compare (front_ ls) (front_ rs) of
LT -> LT
GT -> GT
EQ -> compare (aft_ ls) (aft_ rs)
{-# INLINABLE compare #-} -- This makes compiling tests a little faster than with no pragma
-- | Two 'Many's are ordered by 'compare'ing their fields in index order
instance Ord (Many_ xs) => Ord (Many xs) where
compare xs ys = compare (toMany_ xs) (toMany_ ys)
-----------------------------------------------------------------------
instance Semigroup (Many_ '[]) where
_ <> _ = Many_ []
instance (Semigroup x, Semigroup (Many_ xs)) => Semigroup (Many_ (x ': xs)) where
Many_ (a : as) <> Many_ (b : bs) = Many_ (c : cs)
where
c = unsafeCoerce (unsafeCoerce a <> (unsafeCoerce b :: x))
cs = getMany_ (Many_ @xs as <> Many_ @xs bs)
_ <> _ = error "invalid Many_ Semigroup"
instance Semigroup (Many_ xs) => Semigroup (Many xs) where
as <> bs = fromMany_ (toMany_ as <> toMany_ bs)
-----------------------------------------------------------------------
instance Monoid (Many_ '[]) where
mempty = Many_ []
mappend = (<>)
instance (Monoid x, Monoid (Many_ xs)) => Monoid (Many_ (x ': xs)) where
mempty = Many_ (c : cs)
where
c = unsafeCoerce (mempty :: x)
cs = getMany_ (mempty :: Many_ xs)
Many_ (a : as) `mappend` Many_ (b : bs) = Many_ (c : cs)
where
c = unsafeCoerce (unsafeCoerce a `mappend` (unsafeCoerce b :: x))
cs = getMany_ (Many_ @xs as `mappend` Many_ @xs bs)
_ `mappend` _ = error "invalid Many_ Monoid"
instance Monoid (Many_ xs) => Monoid (Many xs) where
mempty = fromMany_ (mempty :: Many_ xs)
as `mappend` bs = fromMany_ (toMany_ as `mappend` toMany_ bs)
-----------------------------------------------------------------------
instance Show (Many_ '[]) where
showsPrec d _ = showParen (d > app_prec) $ showString "nil"
where
app_prec = 10
instance (Show x, Show (Many_ xs)) => Show (Many_ (x ': xs)) where
showsPrec d ls@(Many_ xs) =
showParen (d > cons_prec) $
showsPrec (cons_prec + 1) v .
showString " ./ " .
showsPrec cons_prec (aft_ ls) -- not (cons-prec+1) for right associativity
where
cons_prec = 5 -- infixr 5 consMany
-- use of front here is safe as we are guaranteed the length from the typelist
v = unsafeCoerce (Partial.head xs) :: x
{-# INLINABLE showsPrec #-} -- This makes compiling tests a little faster than with no pragma
-- | @show (5 :: Int) './' False './' \'X' './' Just \'O' './' 'nil' == "5 ./ False ./ 'X' ./ Just 'O' ./ nil" == @
instance Show (Many_ xs) => Show (Many xs) where
showsPrec d xs = showsPrec d (toMany_ xs)
-----------------------------------------------------------------------
instance Read (Many_ '[]) where
readPrec = parens $ prec app_prec $ do
lift $ L.expect (Ident "nil")
pure $ Many_ []
where
app_prec = 10
instance (Read x, Read (Many_ xs)) => Read (Many_ (x ': xs)) where
readPrec = parens $ prec cons_prec $ do
a <- step (readPrec @x)
lift $ L.expect (Symbol "./")
as <- readPrec @(Many_ xs) -- no 'step' to allow right associatitive './'
pure $ consMany_ a as
where
cons_prec = 5 -- infixr `consMany`
{-# INLINABLE readPrec #-} -- This makes compiling tests a little faster than with no pragma
-- | @read "5 ./ False ./ 'X' ./ Just 'O' ./ nil" == (5 :: Int) './' False './' \'X' './' Just \'O' './' 'nil'@
instance Read (Many_ xs) => Read (Many xs) where
readPrec = do
xs <- readPrec @(Many_ xs)
pure $ fromMany_ xs
-----------------------------------------------------------------------
instance NFData (Many '[]) where
rnf _ = ()
instance (NFData x, NFData (Many xs)) => NFData (Many (x ': xs)) where
rnf xs = rnf (front xs) `seq` rnf (aft xs)
-----------------------------------------------------------------------
instance Hashable (Many '[])
instance (Hashable x, Hashable (Many xs)) => Hashable (Many (x ': xs))
-----------------------------------------------------------------------
-- | 'WrappedAny' avoids the following:
-- Illegal type synonym family application in instance: Any
newtype WrappedAny = WrappedAny Any
-----------------------------------------------------------------------
|
louispan/data-diverse
|
src/Data/Diverse/Many/Internal.hs
|
bsd-3-clause
| 46,345 | 0 | 20 | 10,307 | 13,429 | 7,317 | 6,112 | 573 | 2 |
import ConfigCheckTest
main = configCheckTest "custom-b"
|
bitemyapp/dyre
|
Tests/config-check/configCheckTestB.hs
|
bsd-3-clause
| 57 | 0 | 5 | 6 | 12 | 6 | 6 | 2 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Main where
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import Data.Aeson
import qualified Data.ByteString.Char8 as BSC
import qualified Data.Text as T
import Nagios.Check.RabbitMQ
import Network.HTTP.Client
import System.Environment
import System.Nagios.Plugin
simplePerfDatum :: T.Text -> PerfValue -> NagiosPlugin()
simplePerfDatum n p = addPerfDatum n p NullUnit Nothing Nothing Nothing Nothing
main :: IO ()
main = runNagiosPlugin $ do
CheckOptions{..} <- liftIO parseOptions
username <- liftIO $ maybe "" BSC.pack <$> lookupEnv "RABBIT_USER"
password <- liftIO $ maybe "" BSC.pack <$> lookupEnv "RABBIT_PASS"
manager <- liftIO $ newManager defaultManagerSettings
let rateUrl = concat [ "http://", hostname, "/api/exchanges/%2F/", exchange ]
authedRequest <- applyBasicAuth username password <$> parseUrl rateUrl
let q_params = [ ("lengths_age", Just "60")
, ("msg_rates_age", Just "60")
, ("msg_rates_incr", Just "60")
]
let q_authedRequest = setQueryString q_params authedRequest
resp <- liftIO $ httpLbs q_authedRequest manager
case eitherDecode (responseBody resp) of
Left e -> addResult Unknown $ T.pack ( "Exchange decode failed with: " ++ e )
Right MessageDetail{..} -> do
addResult OK "Exchange rate within bounds"
simplePerfDatum "rateConfirms" $ RealValue rateConfirms
simplePerfDatum "ratePublishIn" $ RealValue ratePublishIn
simplePerfDatum "ratePublishOut" $ RealValue ratePublishOut
let countIncoming = length connectionsIncoming
let countOutgoing = length connectionsOutgoing
simplePerfDatum "connectionsIncoming" $ IntegralValue . fromIntegral $ countIncoming
simplePerfDatum "connectionsOutgoing" $ IntegralValue . fromIntegral $ countOutgoing
--- Check options, if available
unless (rateConfirms `inBoundsOf` minRate && rateConfirms `inBoundsOf` maxRate)
(addResult Critical "Confirm Rate out of bounds")
unless (fromIntegral countIncoming `inBoundsOf` minIncomingConn)
(addResult Critical "Incoming connection rate out of bounds")
unless (fromIntegral countOutgoing `inBoundsOf` minOutgoingConn)
(addResult Critical "Outgoing connection rate out of bounds")
|
anchor/nagios-plugin-rabbitmq-exchange
|
src/Main.hs
|
bsd-3-clause
| 2,536 | 25 | 10 | 609 | 576 | 298 | 278 | 45 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
module Control.Monad.Supply.Class
( MonadSupply (..)
) where
import Control.Applicative
import Control.Monad.Trans.Class
import Control.Monad.Trans.Reader
import qualified Control.Monad.Trans.State.Strict as Strict
import Control.Monad.Trans.Supply (SupplyT)
import qualified Control.Monad.Trans.Supply as Trans
class (Applicative m, Monad m) => MonadSupply s m | m -> s where
supply :: m s
#ifndef HLINT
default supply :: (MonadTrans t, MonadSupply s m) => t m s
supply = lift supply
#endif
instance (Applicative m, Monad m) => MonadSupply s (SupplyT s m) where
supply = Trans.supply
instance MonadSupply s m => MonadSupply s (ReaderT r m)
instance MonadSupply s m => MonadSupply s (Strict.StateT s' m)
|
sonyandy/wart
|
src/Control/Monad/Supply/Class.hs
|
bsd-3-clause
| 939 | 0 | 9 | 149 | 248 | 144 | 104 | 22 | 0 |
-- A simple test program for the Haskell parser,
-- originally written by Sven Panne.
module Main (main, mainArgs, testLexer) where
import Data.List
import Language.Haskell.Lexer (lexer, Token(EOF))
import Language.Haskell.ParseMonad (runParserWithMode)
import Language.Haskell.Parser
import Language.Haskell.Syntax
import Language.Haskell.Pretty
import System.Environment
import System.Console.GetOpt
data Flag
= LexOnlyLength -- print number of tokens only
| LexOnlyRev -- print tokens in reverse order
| LexOnly -- print tokens
| ParseLength -- print number of declarations only
| ParseInternal -- print abstract syntax in internal format
| ParsePretty PPLayout -- pretty print in this style
| Help -- give short usage info
title :: String
title = "A simple test program for the haskell-src package"
usage :: String
usage = "usage: hsparser [option] [filename]\n"
options :: [OptDescr Flag]
options =
[ Option ['n'] ["numtokens"] (NoArg LexOnlyLength) "print number of tokens only",
Option ['r'] ["revtokens"] (NoArg LexOnlyRev) "print tokens in reverse order",
Option ['t'] ["tokens"] (NoArg LexOnly) "print tokens",
Option ['d'] ["numdecls"] (NoArg ParseLength) "print number of declarations only",
Option ['a'] ["abstract"] (NoArg ParseInternal) "print abstract syntax in internal format",
Option ['p'] ["pretty"] (OptArg pStyle "STYLE") "pretty print in STYLE[(o)ffside|(s)emicolon|(i)nline|(n)one](default = offside)",
Option ['h','?'] ["help"] (NoArg Help) "display this help and exit"]
pStyle :: Maybe String -> Flag
pStyle Nothing = ParsePretty PPOffsideRule
pStyle (Just s) = ParsePretty $ case s of
"o" -> PPOffsideRule
"offside" -> PPOffsideRule
"s" -> PPSemiColon
"semicolon" -> PPSemiColon
"i" -> PPInLine
"inline" -> PPInLine
"n" -> PPNoLayout
"none" -> PPNoLayout
_ -> PPOffsideRule
main :: IO ()
main = do
args <- getArgs
mainArgs args
mainArgs :: [String] -> IO ()
mainArgs cmdline =
case getOpt Permute options cmdline of
(flags, args, []) -> do
inp <- case args of
[] -> getContents
[f] -> readFile f
_ -> error usage
let parse_mode = case args of
[] -> defaultParseMode
[f] -> defaultParseMode {parseFilename = f}
putStrLn (handleFlag (getFlag flags) parse_mode inp)
(_, _, errors) ->
error (concat errors ++ usageInfo usage options)
getFlag :: [Flag] -> Flag
getFlag [] = ParsePretty PPOffsideRule
getFlag [f] = f
getFlag _ = error usage
handleFlag :: Flag -> ParseMode -> String -> String
handleFlag LexOnlyLength parse_mode = show . length . testLexerRev parse_mode
handleFlag LexOnlyRev parse_mode =
concat . intersperse "\n" . map show . testLexerRev parse_mode
handleFlag LexOnly parse_mode =
concat . intersperse "\n" . map show . testLexer parse_mode
handleFlag ParseLength parse_mode =
show . modLength . testParser parse_mode
where modLength (HsModule _ _ _ imp d) = length imp + length d
handleFlag ParseInternal parse_mode = show . testParser parse_mode
handleFlag (ParsePretty l) parse_mode =
prettyPrintStyleMode style{lineLength=80} defaultMode{layout=l} .
testParser parse_mode
handleFlag Help _parse_mode = const $
usageInfo (title ++ "\n" ++ usage) options
testLexerRev :: ParseMode -> String -> [Token]
testLexerRev parse_mode = getResult . runParserWithMode parse_mode (loop [])
where loop toks = lexer $ \t -> case t of
EOF -> return toks
_ -> loop (t:toks)
testLexer :: ParseMode -> String -> [Token]
testLexer parse_mode = reverse . testLexerRev parse_mode
testParser :: ParseMode -> String -> HsModule
testParser parse_mode = getResult . parseModuleWithMode parse_mode
getResult :: ParseResult a -> a
getResult (ParseOk a) = a
getResult (ParseFailed loc err) =
error (srcFilename loc ++ ":" ++ show (srcLine loc) ++ ":" ++
show (srcColumn loc) ++ ": " ++ err)
|
FranklinChen/hugs98-plus-Sep2006
|
packages/haskell-src/examples/hsparser.hs
|
bsd-3-clause
| 3,996 | 38 | 17 | 832 | 1,208 | 630 | 578 | 93 | 9 |
module Main (
main
) where
import Brainhuck.Interpreter(execute)
import System.Environment(getArgs)
main :: IO ()
main = getArgs >>= \args -> case args of
"-h":_ -> putStrLn "brainhuck [-hi] <files>"
"-i":_ -> getLine >>= execute >> putStrLn ""
files -> mapM_ (\f -> readFile f >>= execute) files >> putStrLn ""
|
ameingast/brainhuck
|
src/Main.hs
|
bsd-3-clause
| 322 | 0 | 15 | 60 | 124 | 65 | 59 | 9 | 3 |
-- | Partial binding to CoreAudio.
-- At the moment only HostTime and parts of the HAL (Hardware Abstraction Layer) is supported.
--
-- See <http://developer.apple.com/documentation/MusicAudio/Reference/CACoreAudioReference/AudioHardware/>
{-# LANGUAGE ForeignFunctionInterface, ScopedTypeVariables #-}
module System.MacOSX.CoreAudio
(
-- * some basic types
Device
, Stream
, AudioValueRange(..)
, Direction(..)
-- * more types
, AudioDeviceIOProc
, AudioDeviceIOProcFloat
, mkAudioDeviceIOProc
, AudioTimeStamp(..)
, SMPTETime(..)
, AudioBuffer(..)
, AudioBufferList(..)
, pokeAudioBufferList
, peekAudioBufferList
-- * HostTime
, audioGetCurrentHostTime
, audioConvertHostTimeToNanos
, audioConvertNanosToHostTime
, audioGetCurrentTimeInNanos
-- * low-level whatever
, audioDeviceStart
, audioDeviceStop
, audioDeviceAddIOProc
, audioDeviceRemoveIOProc
-- * enumerations
, enumerateAudioDevices
, enumerateAudioStreams
-- * properties
, audioDeviceGetProperty
, audioDeviceGetPropertyList
, audioDeviceGetPropertyString
, audioDeviceGetPropertyCFString
{-
, audioDeviceGetPropertyUnsafe
, audioDeviceGetPropertyListUnsafe
, audioDeviceGetPropertyStringUnsafe
, audioDeviceGetPropertyCFStringUnsafe
-}
, audioDeviceSetProperty
, audioDeviceName
)
where
-----
import Control.Monad
import Data.Char (ord)
import Foreign
import Foreign.C
import System.MacOSX.CoreFoundation
----- types
type Device = UInt32
type Stream = UInt32
data AudioValueRange = AudioValueRange Float64 Float64 deriving Show
data Direction = In | Out
instance Storable AudioValueRange where
sizeOf _ = 16
alignment _ = 8
peek p = do
x <- peek (castPtr p)
y <- peek (castPtr p `plusPtr` 8)
return (AudioValueRange x y)
poke p (AudioValueRange x y) = do
poke (castPtr p) x
poke (castPtr p `plusPtr` 8) y
----- helper functions -----
fromRight :: Either a b -> b
fromRight (Right x) = x
fromJust :: Maybe a -> a
fromJust (Just x) = x
eitherToMaybe :: Either a b -> Maybe b
eitherToMaybe (Left _) = Nothing
eitherToMaybe (Right x) = Just x
eitherToMaybeIO :: Either a b -> IO (Maybe b)
eitherToMaybeIO = return . eitherToMaybe
liftRight :: (b -> c) -> Either a b -> Either a c
liftRight _ (Left y) = Left y
liftRight f (Right x) = Right (f x)
liftRightM :: Monad m => (b -> m c) -> Either a b -> m (Either a c)
liftRightM u ei = case ei of
Left y -> return $ Left y
Right x -> do
z <- u x
return $ Right z
liftMaybeIO :: (a -> IO b) -> Maybe a -> IO (Maybe b)
liftMaybeIO _ Nothing = return Nothing
liftMaybeIO f (Just x) = do { y <- f x ; return (Just y) }
----- memory "management" -----
data Mem a = Mem Int (ForeignPtr a)
allocMem n = do
p <- mallocForeignPtrBytes n
return $ Mem n p
withMem :: Mem a -> (Int -> Ptr a -> IO b) -> IO b
withMem (Mem n p) f = withForeignPtr p $ \q -> f n q
memToString :: Mem CChar -> IO String
memToString m = withMem m $ \_ p -> peekCString p
memToCFString :: Mem CChar -> IO String
memToCFString m = withMem m $ \_ p -> peekCFString (castPtr p)
memToStorable :: Storable a => Mem a -> IO a
memToStorable m = withMem m $ \_ p -> peek p
memToStorableList :: forall a. Storable a => Mem a -> IO [a]
memToStorableList m = withMem m $ \n p -> do
let u = sizeOf (undefined :: a)
forM [0..(div n u)-1] $ \i -> peekElemOff p i
---- converting four character IDs and directions
fromFourCharacterID :: String -> UInt32
fromFourCharacterID [a,b,c,d] =
(ord32 a `shiftL` 24) +
(ord32 b `shiftL` 16) +
(ord32 c `shiftL` 8) +
(ord32 d )
where
ord32 :: Char -> UInt32
ord32 = fromIntegral . ord
fromDir In = True
fromDir Out = False
----- HostTime
foreign import ccall unsafe "HostTime.h AudioGetCurrentHostTime"
audioGetCurrentHostTime :: IO UInt64
foreign import ccall unsafe "HostTime.h AudioConvertHostTimeToNanos"
audioConvertHostTimeToNanos :: UInt64 -> IO UInt64
foreign import ccall unsafe "HostTime.h AudioConvertNanosToHostTime"
audioConvertNanosToHostTime :: UInt64 -> IO UInt64
audioGetCurrentTimeInNanos :: IO UInt64
audioGetCurrentTimeInNanos = ( audioGetCurrentHostTime >>= audioConvertHostTimeToNanos )
----- AudioDeviceIOProc -----
-- | Arguments:
--
-- * @device :: UInt32@,
--
-- * @currentTimeStamp :: Ptr AudioTimeStamp@,
--
-- * @input :: Ptr (AudioBufferList a)@,
--
-- * @inputTimeStamp :: Ptr AudioTimeStamp@,
--
-- * @output :: Ptr (AudioBufferList a)@,
--
-- * @outputTimeStamp :: Ptr AudioTimeStamp@,
--
-- * @clientData :: Ptr b@.
--
type AudioDeviceIOProc a b
= UInt32 -> Ptr AudioTimeStamp
-> Ptr (AudioBufferList a) -> Ptr AudioTimeStamp
-> Ptr (AudioBufferList a) -> Ptr AudioTimeStamp
-> Ptr b
-> IO OSStatus
type AudioDeviceIOProcFloat c = AudioDeviceIOProc Float c
foreign import ccall "wrapper"
mkAudioDeviceIOProc :: AudioDeviceIOProc a b -> IO (FunPtr (AudioDeviceIOProc a b))
----- AudioBuffer -----
data AudioBuffer a = AudioBuffer
{ ab_NumberChannels :: UInt32
, ab_DataByteSize :: UInt32
, ab_Data :: Ptr a
}
instance Storable (AudioBuffer a) where
alignment _ = 4
sizeOf _ = 8 + sizeOf (undefined :: Ptr a)
poke p x = do
poke (castPtr p) (ab_NumberChannels x) ; p <- return $ plusPtr p 4
poke (castPtr p) (ab_DataByteSize x) ; p <- return $ plusPtr p 4
poke (castPtr p) (ab_Data x)
peek p = do
n <- peek (castPtr p) ; p <- return $ plusPtr p 4
s <- peek (castPtr p) ; p <- return $ plusPtr p 4
d <- peek (castPtr p)
return $ AudioBuffer n s d
----- AudioBufferList -----
-- Keeps track of multiple buffers.
--
-- > typedef struct AudioBufferList {
-- > UInt32 mNumberBuffers;
-- > AudioBuffer mBuffers[1];
-- > } AudioBufferList;
--
-- Discussion
--
-- When audio data is interleaved, only one buffer is needed in the
-- AudioBufferList; when dealing with multiple mono channels, each will
-- need its own buffer. This is accomplished by allocating the needed
-- space and pointing mBuffers to it.
data AudioBufferList a = AudioBufferList [AudioBuffer a]
-- | Returns the number of bytes written.
pokeAudioBufferList :: Ptr (AudioBufferList a) -> AudioBufferList a -> IO Int
pokeAudioBufferList p (AudioBufferList list) = do
let len = length list
poke (castPtr p :: Ptr UInt32) (fromIntegral len)
pokeArray (castPtr p `plusPtr` 4) list
return (4 + len * sizeOf (undefined::AudioBuffer a))
-- | Does not need the length of the list, as it is stored in the memory.
peekAudioBufferList :: Ptr (AudioBufferList a) -> IO (AudioBufferList a)
peekAudioBufferList p = do
n <- liftM fromIntegral $ peek (castPtr p :: Ptr UInt32)
liftM AudioBufferList $ peekArray n (castPtr p `plusPtr` 4)
----- AudioTimeStamp -----
data AudioTimeStamp = AudioTimeStamp
{ ats_SampleTime :: Float64
, ats_HostTime :: UInt64
, ats_RateScalar :: Float64
, ats_WordClockTime :: UInt64
, ats_SMPTETime :: SMPTETime
, ats_Flags :: UInt32
, ats_Reserved :: UInt32
}
instance Storable AudioTimeStamp where
alignment _ = 8
sizeOf _ = 64
poke p x = do
poke (castPtr p) (ats_SampleTime x) ; p <- return $ plusPtr p 8
poke (castPtr p) (ats_HostTime x) ; p <- return $ plusPtr p 8
poke (castPtr p) (ats_RateScalar x) ; p <- return $ plusPtr p 8
poke (castPtr p) (ats_WordClockTime x) ; p <- return $ plusPtr p 8
poke (castPtr p) (ats_SMPTETime x) ; p <- return $ plusPtr p 24
poke (castPtr p) (ats_Flags x) ; p <- return $ plusPtr p 4
poke (castPtr p) (ats_Reserved x)
peek p = do
s <- peek (castPtr p) ; p <- return $ plusPtr p 8
h <- peek (castPtr p) ; p <- return $ plusPtr p 8
r <- peek (castPtr p) ; p <- return $ plusPtr p 8
w <- peek (castPtr p) ; p <- return $ plusPtr p 8
m <- peek (castPtr p) ; p <- return $ plusPtr p 24
f <- peek (castPtr p) ; p <- return $ plusPtr p 4
v <- peek (castPtr p)
return $ AudioTimeStamp s h r w m f v
kAudioTimeStampSampleTimeValid = bit 0 :: UInt32
kAudioTimeStampHostTimeValid = bit 1 :: UInt32
kAudioTimeStampRateScalarValid = bit 2 :: UInt32
kAudioTimeStampWordClockTimeValid = bit 3 :: UInt32
kAudioTimeStampSMPTETimeValid = bit 4 :: UInt32
----- SMPTETime -----
data SMPTETime = SMPTETime
{ smpte_Counter :: UInt64
, smpte_Type :: UInt32
, smpte_Flags :: UInt32
, smpte_Hours :: SInt16
, smpte_Minutes :: SInt16
, smpte_Seconds :: SInt16
, smpte_Frames :: SInt16
}
instance Storable SMPTETime where
alignment _ = 8
sizeOf _ = 24
poke p x = do
poke (castPtr p) (smpte_Counter x) ; p <- return $ plusPtr p 8
poke (castPtr p) (smpte_Type x) ; p <- return $ plusPtr p 4
poke (castPtr p) (smpte_Flags x) ; p <- return $ plusPtr p 4
poke (castPtr p) (smpte_Hours x) ; p <- return $ plusPtr p 2
poke (castPtr p) (smpte_Minutes x) ; p <- return $ plusPtr p 2
poke (castPtr p) (smpte_Seconds x) ; p <- return $ plusPtr p 2
poke (castPtr p) (smpte_Frames x)
peek p = do
c <- peek (castPtr p) ; p <- return $ plusPtr p 8
t <- peek (castPtr p) ; p <- return $ plusPtr p 4
f <- peek (castPtr p) ; p <- return $ plusPtr p 4
h <- peek (castPtr p) ; p <- return $ plusPtr p 2
m <- peek (castPtr p) ; p <- return $ plusPtr p 2
s <- peek (castPtr p) ; p <- return $ plusPtr p 2
r <- peek (castPtr p)
return $ SMPTETime c t f h m s r
----- AudioDeviceIOProc
foreign import ccall safe "AudioHardware.h AudioDeviceAddIOProc"
audioDeviceAddIOProc :: Device -> FunPtr (AudioDeviceIOProc a b) -> Ptr b -> IO OSStatus
foreign import ccall safe "AudioHardware.h AudioDeviceRemoveIOProc"
audioDeviceRemoveIOProc :: Device -> FunPtr (AudioDeviceIOProc a b) -> IO OSStatus
foreign import ccall safe "AudioHardware.h AudioDeviceStart"
audioDeviceStart :: Device -> FunPtr (AudioDeviceIOProc a b) -> IO OSStatus
foreign import ccall safe "AudioHardware.h AudioDeviceStop"
audioDeviceStop :: Device -> FunPtr (AudioDeviceIOProc a b) -> IO OSStatus
----- generic wrapper around Audio****GetPropertyInfo & Audio****GetProperty
type GetPropertyInfo = UInt32 -> Ptr UInt32 -> Ptr Boolean -> IO OSStatus
type GetProperty a = UInt32 -> Ptr UInt32 -> Ptr a -> IO OSStatus
audioGetPropertyMem :: GetPropertyInfo -> GetProperty a -> String -> IO (Either OSStatus (Mem a))
audioGetPropertyMem getPropertyInfo getProperty id = do
let id1 = fromFourCharacterID id
alloca $ \p -> alloca $ \q -> do
os1 <- getPropertyInfo id1 p q
if os1 /=0
then return (Left os1)
else do
k <- liftM fromIntegral $ peek p :: IO Int
m <- allocMem k
os2 <- withMem m $ \_ s -> getProperty id1 p s
if os2 /=0
then return $ Left os2
else return $ Right m
audioGetProperty :: Storable a => GetPropertyInfo -> GetProperty a -> String -> IO (Maybe a)
audioGetProperty gpi gp id =
(audioGetPropertyMem gpi gp id) >>= eitherToMaybeIO >>= (liftMaybeIO memToStorable)
audioGetPropertyList :: Storable a => GetPropertyInfo -> GetProperty a -> String -> IO (Maybe [a])
audioGetPropertyList gpi gp id =
(audioGetPropertyMem gpi gp id) >>= eitherToMaybeIO >>= (liftMaybeIO memToStorableList)
audioGetPropertyString :: GetPropertyInfo -> GetProperty CChar -> String -> IO (Maybe String)
audioGetPropertyString gpi gp id =
(audioGetPropertyMem gpi gp id) >>= eitherToMaybeIO >>= (liftMaybeIO memToString)
audioGetPropertyCFString :: GetPropertyInfo -> GetProperty CChar -> String -> IO (Maybe String)
audioGetPropertyCFString gpi gp id =
(audioGetPropertyMem gpi gp id) >>= eitherToMaybeIO >>= (liftMaybeIO memToCFString)
{-
audioGetPropertyUnsafe gpi gp id = liftM fromJust $ audioGetProperty gpi gp id
audioGetPropertyListUnsafe gpi gp id = liftM fromJust $ audioGetPropertyList gpi gp id
audioGetPropertyStringUnsafe gpi gp id = liftM fromJust $ audioGetPropertyString gpi gp id
audioGetPropertyCFStringUnsafe gpi gp id = liftM fromJust $ audioGetPropertyCFString gpi gp id
-}
----- AudioHardware -----
foreign import ccall safe "AudioHardware.h AudioHardwareGetPropertyInfo"
c_AudioHardwareGetPropertyInfo :: UInt32 -> Ptr UInt32 -> Ptr Boolean -> IO OSStatus
foreign import ccall safe "AudioHardware.h AudioHardwareGetProperty"
c_AudioHardwareGetProperty :: UInt32 -> Ptr UInt32 -> Ptr a -> IO OSStatus
audioHardwareGetPropertyList =
audioGetPropertyList
c_AudioHardwareGetPropertyInfo
c_AudioHardwareGetProperty
enumerateAudioDevices :: IO [Device]
enumerateAudioDevices = do
xx <- audioHardwareGetPropertyList "dev#"
case xx of
Nothing -> do
fail "enumeration of audio devices failed."
Just ls -> return ls
----- AudioDevice -----
foreign import ccall safe "AudioHardware.h AudioDeviceGetPropertyInfo"
c_AudioDeviceGetPropertyInfo
:: Device -> UInt32 -> Boolean -> UInt32 -> Ptr UInt32 -> Ptr Boolean -> IO OSStatus
foreign import ccall safe "AudioHardware.h AudioDeviceGetProperty"
c_AudioDeviceGetProperty
:: Device -> UInt32 -> Boolean -> UInt32 -> Ptr UInt32 -> Ptr a -> IO OSStatus
audioDeviceGetPropertyString :: Device -> Int -> Direction -> String -> IO (Maybe String)
audioDeviceGetPropertyString device channel dir =
audioGetPropertyString
(c_AudioDeviceGetPropertyInfo device ch isInput)
(c_AudioDeviceGetProperty device ch isInput)
where
isInput = fromDir dir
ch = fromIntegral channel
audioDeviceGetPropertyCFString :: Device -> Int -> Direction -> String -> IO (Maybe String)
audioDeviceGetPropertyCFString device channel dir =
audioGetPropertyCFString
(c_AudioDeviceGetPropertyInfo device ch isInput)
(c_AudioDeviceGetProperty device ch isInput)
where
isInput = fromDir dir
ch = fromIntegral channel
audioDeviceGetPropertyList :: Storable a => Device -> Int -> Direction -> String -> IO (Maybe [a])
audioDeviceGetPropertyList device channel dir =
audioGetPropertyList
(c_AudioDeviceGetPropertyInfo device ch isInput)
(c_AudioDeviceGetProperty device ch isInput)
where
isInput = fromDir dir
ch = fromIntegral channel
audioDeviceGetProperty
:: Storable a
=> Device -- ^ device id
-> Int -- ^ channel
-> Direction -- ^ direction (input\/output)
-> String -- ^ four character ID of the property
-> IO (Maybe a)
audioDeviceGetProperty device channel dir =
audioGetProperty
(c_AudioDeviceGetPropertyInfo device ch isInput)
(c_AudioDeviceGetProperty device ch isInput)
where
isInput = fromDir dir
ch = fromIntegral channel
{-
audioDeviceGetPropertyUnsafe d c i id = liftM fromJust $ audioDeviceGetProperty d c i id
audioDeviceGetPropertyListUnsafe d c i id = liftM fromJust $ audioDeviceGetPropertyList d c i id
audioDeviceGetPropertyStringUnsafe d c i id = liftM fromJust $ audioDeviceGetPropertyString d c i id
audioDeviceGetPropertyCFStringUnsafe d c i id = liftM fromJust $ audioDeviceGetPropertyCFString d c i id
-}
audioDeviceName :: Device -> IO String
audioDeviceName dev = liftM fromJust $ audioDeviceGetPropertyString dev 0 Out "name"
foreign import ccall safe "AudioHardware.h AudioDeviceSetProperty"
c_AudioDeviceSetProperty
:: Device -> Ptr AudioTimeStamp -> UInt32 -> Boolean -> UInt32 -> UInt32 -> Ptr a -> IO OSStatus
audioDeviceSetProperty :: Storable a => Device -> UInt32 -> Direction -> String -> a -> IO OSStatus
audioDeviceSetProperty dev channel dir id x = do
os <- with x $ \p -> c_AudioDeviceSetProperty
dev nullPtr channel (fromDir dir) (fromFourCharacterID id) (fromIntegral $ sizeOf x) p
return os
-- | input and output streams
enumerateAudioStreams :: Device -> IO ([Stream],[Stream])
enumerateAudioStreams dev = do
inp <- audioDeviceGetPropertyList dev 0 In "stm#"
out <- audioDeviceGetPropertyList dev 0 Out "stm#"
return ( fromJust inp , fromJust out )
----- AudioStream -----
{-
foreign import ccall safe "AudioHardware.h AudioStreamGetPropertyInfo"
c_AudioStreamGetPropertyInfo
:: UInt32 -> UInt32 -> UInt32 -> Ptr UInt32 -> Ptr Boolean -> IO OSStatus
foreign import ccall safe "AudioHardware.h AudioStreamGetProperty"
c_AudioStreamGetProperty
:: UInt32 -> UInt32 -> UInt32 -> Ptr UInt32 -> Ptr a -> IO OSStatus
audioStreamGetPropertyList stream channel =
audioGetPropertyList
(c_AudioStreamGetPropertyInfo stream channel)
(c_AudioStreamGetProperty stream channel)
-}
|
chpatrick/hmidi
|
System/MacOSX/CoreAudio.hs
|
bsd-3-clause
| 16,827 | 0 | 19 | 3,773 | 4,749 | 2,372 | 2,377 | 309 | 3 |
module IptAdmin.DelPage where
import Control.Monad.Error
import Happstack.Server.SimpleHTTP
import IptAdmin.DelPage.Render
import IptAdmin.Render
import IptAdmin.System
import IptAdmin.Template
import IptAdmin.Types
import IptAdmin.Utils
import Iptables
import Iptables.Print
import Iptables.Types
import Safe
import Text.Blaze.Renderer.Pretty (renderHtml)
pageHandlers :: IptAdmin Response
pageHandlers = msum [ methodSP GET pageHandlerGet
, methodSP POST pageHandlerPost
]
-- | Deprecated. Was used before jquery-ui integration.
pageHandlerGet :: IptAdmin Response
pageHandlerGet = do
tableName <- getInputNonEmptyString "table"
chainName <- getInputNonEmptyString "chain"
rulePosition <- getInputRead "pos"
(_, rule) <- checkParams tableName chainName rulePosition
return $ buildResponse $ renderHtml $ htmlWrapper $ do
header tableName $ "Delete rule from '" ++ chainName
++ "' chain in '" ++ tableName
++ "' table in position " ++ show rulePosition
delPageForm (tableName, chainName, rulePosition) $ printRule (rule, rulePosition)
pageHandlerPost :: IptAdmin Response
pageHandlerPost = do
tableName <- getInputNonEmptyString "table"
chainName <- getInputNonEmptyString "chain"
rulePosition <- getInputRead "rulePos"
_ <- checkParams tableName chainName rulePosition
tryChange (deleteRule tableName chainName rulePosition)
-- redir $ "/show?table=" ++ tableName ++ bookmarkForJump chainName (Just rulePosition)
return $ buildResponse "ok"
checkParams :: String -> String -> Int -> IptAdmin (Chain, Rule)
checkParams tableName chainName rulePosition = do
table <- getTable tableName
let chainMay = getChainByName chainName table
chain <- maybe (throwError $ "Invalid chain name: " ++ chainName)
return
chainMay
let ruleMay = cRules chain `atMay` (rulePosition - 1)
rule <- maybe (throwError $ "Rule index out of range: " ++ show rulePosition)
return
ruleMay
return (chain, rule)
|
etarasov/iptadmin
|
src/IptAdmin/DelPage.hs
|
bsd-3-clause
| 2,147 | 0 | 16 | 500 | 489 | 245 | 244 | 48 | 1 |
{-# Language DeriveFunctor ,BangPatterns, TupleSections #-}
module APIparser where
import Control.Applicative
import Control.Arrow ((&&&),(***))
import Control.Concurrent (threadDelay)
import Control.Concurrent.Async
import Control.Concurrent.MVar
import Control.Exception (SomeException,catch)
import Control.Lens
import Control.Monad
import Data.Array.Repa as R hiding ((++))
import qualified Data.ByteString.Char8 as C
import qualified Data.ByteString.Lex.Double as L
import Data.Complex
import Data.Either (lefts)
import Data.List as DL
import Data.List.Split (chunksOf)
import Data.Maybe (catMaybes,fromMaybe,isJust)
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as VU
import System.Directory
import System.Exit(ExitCode( ExitFailure,ExitSuccess))
import System.Posix.User (getLoginName)
import System.Process (system,readProcess)
import Text.Parsec
import Text.Parsec.ByteString (parseFromFile)
import Text.Printf (printf)
import Prelude hiding (catch)
-- Internal modules
import ClusterAPI
import CommonTypes
import Constants
import Gaussian
import InternalCoordinates
import Logger
import Molcas
import ParsecNumbers
import ParsecText
import QuadraticInterpolation
-- =======================> Data, Types and Instances <==========================
newtype ParseInfo a = ParseInfo {
runParseInfo :: [Maybe a] } deriving (Show,Functor)
-- ==================> Instances <=========================
instance Monad ParseInfo where
return a = ParseInfo $ [Just a]
ma >>= f = let xs = runParseInfo ma
fun x = case x of
Nothing -> Nothing
Just w -> g w
g = safeHeadMaybe . catMaybes . runParseInfo . f
ys = fmap fun xs
in ParseInfo ys
safeHeadMaybe :: [a] -> Maybe a
safeHeadMaybe [] = Nothing
safeHeadMaybe (x:_) = Just x
--
instance MonadPlus ParseInfo where
mzero = defaultInfo
(ParseInfo xs) `mplus` (ParseInfo ys) = ParseInfo $ xs `mplus` ys
-- ===========> <============
defaultInfo :: ParseInfo a
defaultInfo = ParseInfo []
naturalTransf :: ParseInfo a -> [a]
naturalTransf (ParseInfo []) = []
naturalTransf (ParseInfo xs) = catMaybes xs
-- =================> Call External Programs <==================
interactWith :: Job -> String -> Int -> Molecule -> IO Molecule
interactWith job project step mol =
case job of
Molcas inputData -> do
let io1 = writeMolcasXYZ (project ++ ".xyz") mol
io2 = writeFile (project ++ ".input") $ concatMap show inputData
concurrently io1 io2
launchMolcas project job
parseMolcas project ["Grad","Roots"] mol
MolcasTinker inputData molcasQM -> do
print "rewrite Molcas input"
modifyMolcasInput inputData molcasQM project mol
print "save tinker xyz "
saveTinkerXYZ project step
print "launch Molcas"
launchMolcas project job
print "Parsing Molcas output"
parseMolcas project ["GradESPF","Roots"] mol
Gaussian tupleTheory -> do
let [input,out,chk,fchk] = DL.zipWith (++) (repeat project) [".com",".log",".chk",".fchk"]
writeGaussJob tupleTheory project mol
launchJob $ "g09 " ++ input
launchJob $ "formchk " ++ chk
updateMultiStates out fchk mol
Palmeiro conex dirs -> launchPalmeiro conex dirs mol
Quadratic -> return $ calcgradQuadratic mol
-- | local jobs
launchJob :: String -> IO ()
launchJob script = withAsync (system script) $ wait >=> checkStatus
-- | Job Status
checkStatus :: ExitCode -> IO ()
checkStatus r = case r of
ExitSuccess -> return ()
ExitFailure _ -> putStrLn "Lord give us patience and resistence in the ass!!\n" >>
fail "Job Launch Failed (Bastard Fortranians!!)"
parallelLaunch :: [IO a] -> IO [a]
parallelLaunch actions = foldr conc (return []) actions
where conc io ios = do
(x,xs) <- concurrently io ios
return (x:xs)
atomicallyIO :: MVar () -> IO a -> IO a
atomicallyIO lock action = withMVar lock $ \_ -> withAsync action wait
-- ============> Tully Updates <=========
updateCoeffEnergies :: [Energy] -> [[Double]] -> Molecule -> Molecule
updateCoeffEnergies energies coeff mol =
let currentCoeff = mol^.getCoeffCI
currentEner = mol^.getEnergy
(newCoeffs,newEnergy) = if length currentCoeff < 3
then (coeff : currentCoeff, energies : currentEner)
else (coeff : init currentCoeff ,energies : init currentEner)
in mol & getCoeffCI .~ newCoeffs
& getEnergy .~ newEnergy
updateNewJobInput :: Job -> Molecule -> Job
updateNewJobInput job mol = case job of
Molcas theory -> updateMolcasInput theory rlxroot
Gaussian tupla -> updateGaussInput tupla rlxroot
otherwise -> job
where rlxroot = succ $ calcElectSt mol
updateGaussInput :: (TheoryLevel,Basis) -> Int -> Job
updateGaussInput (theory,basis) newrlx = Gaussian (fun,basis)
where fun = case theory of
CASSCF activeSpace rlxroot s -> CASSCF activeSpace newrlx s
otherwise -> theory
updateMolcasInput :: [MolcasInput String] -> Int -> Job
updateMolcasInput xs rlxroot = Molcas $ fmap modifyInputRas xs
where modifyInputRas x = case x of
(RasSCF _ h t) -> RasSCF rlxroot h t
otherwise -> x
-- ======================> Palmeiro <==============
launchPalmeiro :: Connections -> [FilePath] -> Molecule -> IO Molecule
launchPalmeiro conex dirs mol = do
let internals = calcInternals conex mol
carts = mol ^. getCoord
action = interpolation conex internals carts "." {-(dirs !! x)-}
-- (e1,f1) <- action 0
(e2,f2) <- action
return $ mol & getForce .~ f2
& getEnergy .~ [[0,e2]]
-- Because Palmeiro set of Utilities required a Directory for each electronic state then
-- there are created as many directories as electronic states involved
interpolation :: Connections -> Internals -> Coordinates -> FilePath -> IO (Energy,Force)
interpolation conex qs carts dir = do
-- pwd <- getCurrentDirectory
-- setCurrentDirectory $ pwd ++ dir
-- print $ "Current Directory" ++ pwd ++ dir
writePalmeiroScript qs
launchJob "gfortran Tools.f90 Diag.f90 CartToInt2.f90 FitSurfMN_linux12.f90 PalmeiroScript.f90 -o PalmeiroScript.o -L/usr/lib/ -llapack -lblas"
launchJob "chmod u+x PalmeiroScript.o"
launchJob "./PalmeiroScript.o >> salidaPalmeiro.out "
gradInter <- readVectorUnboxed "Gradient.out"
energy <- (head . VU.toList) `fmap` readVectorUnboxed "Energy.out"
grad <- transform2Cart conex gradInter carts
let force = R.computeUnboxedS $ R.map negate grad
-- setCurrentDirectory pwd
print gradInter
return (energy,force)
parserFileInternasCtl :: FilePath -> IO Connections
parserFileInternasCtl name = do
r <- parseFromFile parserCtl name
case r of
Left err -> error $ show err
Right xs -> return xs
parserCtl :: MyParser st (Connections)
parserCtl = do
manyTill anyChar $ try $ string "[InternalCoord]"
anyLine
parseInternals
processMolcasOutputFile fname = do
putStrLn $ "Processing file:" ++ (show fname) ++ "\n"
input <- C.readFile fname
case (runParser molcasOutParser defaultParserState fname input) of
Left err -> print err
Right xs -> mapM_ print xs
getSuffixFile :: FilePath -> String -> IO FilePath
getSuffixFile path suff = do
xs <- (filter (`notElem` [".",".."])) <$> (getDirectoryContents path)
let ctl = head $ filter (isSuffixOf suff) xs
return $ if null ctl then error "no .ctl file found"
else ctl
-- =======================> Molcas <============
-- How to launch molcas Locally or in a cluster
launchMolcas :: Project -> Job -> IO ()
launchMolcas project job =do
r <- system "qstat > /dev/null" -- Am I in a cluster ? (really ugly hack!!!)
case r of
ExitFailure _ -> launchMolcasLocal project -- not I am not
ExitSuccess -> case job of -- I am in a cluster!!
MolcasTinker _arg1 _arg2 -> launchCluster "Dynamics" (project ++ ".input") >> launchJob "./copyNow.sh"
Molcas _arg1 -> launchCluster "Molcas" $ project ++ ".input"
launchMolcasLocal :: Project -> IO ()
launchMolcasLocal project = do
let input = project ++ ".input"
out = project ++ ".out"
err = project ++ ".err"
launchJob $ "export WorkDir=$PWD; molcas " ++ input ++ "> " ++ out ++ " 2> " ++ err
-- If there is an initial output of Molcas do not repeat it, simply parse it
firsStepMolcas :: Job -> String -> Int -> Molecule -> IO Molecule
firsStepMolcas job project step mol = catch action1 ((\_ -> action2) :: SomeException -> IO Molecule)
where action1 = case job of
Molcas _ -> parseMolcas project ["Grad","Roots"] mol
MolcasTinker _ _ -> parseMolcas project ["GradESPF","Roots"] mol
action2 = interactWith job project step mol
saveTinkerXYZ :: Project -> Int -> IO ()
saveTinkerXYZ project step = copyFile from to
where n = show step
from = project ++ ".xyz"
out = project ++ "_" ++ n ++ ".xyz"
to = "TinkerGeometries/" ++ out
-- Molcas/tinker interface requires to rewrite the input in every step of the dynamics
modifyMolcasInput :: [MolcasInput String] -> [AtomQM] -> Project -> Molecule -> IO ()
modifyMolcasInput inputData molcasQM project mol = do
let newInput = fmap fun inputData
fun dat = case dat of
Gateway x -> writeGateway molcasQM mol
otherwise -> dat
writeFile (project ++ ".input") $ concatMap show newInput
writeGateway :: [AtomQM] -> Molecule -> MolcasInput String
writeGateway atoms mol = Gateway $ concat $ DL.zipWith3 fun xs atoms [1..]
where xs = fmap printxyz $ chunksOf 3 qs
printxyz [x,y,z] = printf "%12.5f %.5f %.5f" x y z
rs = mol ^. getCoord . to (R.computeUnboxedS . R.map (*a0) ) -- get the Cartesian coordinates and transform then to Amstrong
qs = R.toList rs
symbols = mol ^. getAtoms
between x = " Basis set\n" ++ x ++ " End of Basis\n"
spaces = (" "++)
ans = spaces . (++" Angstrom\n")
mm = spaces . (++"...... / MM\n")
fun :: String -> AtomQM -> Int -> String
fun x (AtomQM label _xyz typo) i =
let num = label ++ (show i)
in case typo of
QM basis -> between $ (spaces label) ++ "." ++ basis ++ "\n" ++ (ans $ num ++ x)
MM -> between $ (mm label) ++ (ans $ num ++ x) ++ " Charge= -0.000000\n"
parseMolcas :: Project -> [Label] -> Molecule -> IO Molecule
parseMolcas project labels mol = do
pairs <- takeInfoMolcas labels <=< parseMolcasOutputFile $ project ++ ".out"
let fun = lookupLabel pairs
newMol = updateMolecule fun labels mol
return newMol
parseMolcasInput :: FilePath -> IO [MolcasInput String]
parseMolcasInput fname = do
input <- C.readFile fname
case runParser parserMolcasInput defaultParserState fname input of
Left msg -> error $ show msg
Right xs -> return xs
updateMolecule :: (Label ->[Double]) -> [Label] -> Molecule -> Molecule
updateMolecule fun labels mol = foldl' step mol labels
where step acc l = case l of
"Grad" -> let forces = (\ys -> R.fromListUnboxed (Z:. length ys) $ fmap negate ys) $ fun "Grad"
in set getForce forces acc
"Roots" -> updateCIroots fun acc
"GradESPF" -> let forces = (\ys -> R.fromListUnboxed (Z:. length ys) $ fmap negate ys) $ fun "GradESPF"
in set getForce forces acc
otherwise -> error "unknown Label"
updateCIroots :: (Label -> [Double]) -> Molecule -> Molecule
updateCIroots f mol =
let energies = f "Energies"
nroots = length energies
cis = [f $ "CIroot" ++ (show i) | i <- [0..pred nroots]]
in updateCoeffEnergies energies cis mol
takeInfoMolcas :: [Label] -> Either ParseError [MolBlock] -> IO [(Label,[Double])]
takeInfoMolcas labels eitherInfo =
case eitherInfo of
Left s -> error . show $ s
Right xs -> return . naturalTransf $ parseDataMolcas xs labels
parseDataMolcas :: [MolBlock] -> [Label] -> ParseInfo (Label,[Double])
parseDataMolcas molcasBlocks keywords = foldl1' (mplus) $ fmap lookLabel keywords
where moduleAuto = fromMaybe (error "Molcas calculation has failed") $ getModuleAuto molcasBlocks
lookLabel key = foldl' parseMB defaultInfo moduleAuto
where parseMB !acc !mb = let x = molcasParsers key mb
in acc `mplus` x
molcasParsers :: Label -> ModuleData -> ParseInfo (Label,[Double])
molcasParsers key (ModuleData _name _string xs) =
case key of
"Grad" -> getAlaskaGrad xs
"Roots" -> getRASCFCI xs
"GradESPF" -> getGradESPF xs
otherwise -> error "Unkwown label"
getModuleAuto :: [MolBlock] -> Maybe [ModuleData]
getModuleAuto xs = if null xs then Nothing
else let x = filter Molcas.isAuto xs
in case x of
[ModuleAuto _n _s modules] -> Just modules
otherwise -> Nothing
getAlaskaGrad :: [SectionData] -> ParseInfo (Label,[Double])
getAlaskaGrad xs = if null xs then ParseInfo [Nothing]
else let ys = filter Molcas.isMolGrads xs
in case ys of
[] -> ParseInfo [Nothing]
[AlaskaMolecularGradients _label t _gradESPF] -> return $ ("Grad",tuples2List t)
getGradESPF :: [SectionData] -> ParseInfo (Label,[Double])
getGradESPF xs = if null xs then ParseInfo [Nothing]
else let ys = filter Molcas.isMolGrads xs
in case ys of
[] -> ParseInfo [Nothing]
[AlaskaMolecularGradients _label _t gradESPF] -> return $ ("GradESPF", concat gradESPF)
getRASCFCI :: [SectionData] -> ParseInfo (Label,[Double])
getRASCFCI xs = if null xs then ParseInfo [Nothing]
else let ys = filter Molcas.isCICoeffs xs
in case ys of
[] -> ParseInfo [Nothing]
otherwise -> getRoots ys
getRoots :: [SectionData] -> ParseInfo (Label,[Double])
getRoots xs = let (es,cis) = unzip $ fmap fun xs
energies = return ("Energies",es)
nroots = length es
coefficients = foldl1' (mplus) [return ("CIroot"++(show i),cis !! i) | i <- [0..pred nroots]]
in energies `mplus` coefficients
where fun (RasSCFCI _r e ci) =
let cfs = fmap (\(_number,_label,coeff,_weight) -> coeff) ci
in (e,cfs)
tuples2List :: [(String,Char,Double)] -> [Double]
tuples2List xs = fmap go xs
where go (s,c,v) = v
dim = DL.length xs
-- ===========> Gaussian Interface <=========
-- |take info from the formated check point
getGradEnerFCHK :: FilePath -> Molecule -> IO Molecule
getGradEnerFCHK file mol = do
let keywords = ["Grad","Energy"]
pairs <- takeInfo keywords <=< parseGaussianCheckpoint $ file
let grad = (\ys -> R.fromListUnboxed (Z:. DL.length ys) ys) $ lookupLabel pairs "Grad"
energy = lookupLabel pairs "Energy"
forces = computeUnboxedS $ R.map (negate) grad
return $ mol & getForce .~ forces
& getEnergy .~ [energy]
-- | Monadic Lookup function base on keywords
takeInfo :: [Label] -> Either ParseError [GauBlock] -> IO [(Label,[Double])]
takeInfo labels eitherInfo =
case eitherInfo of
Left s -> error . show $ s
Right xs -> return . naturalTransf $ parseDataGaussian xs labels
updateMultiStates :: FilePath -> FilePath -> Molecule -> IO Molecule
updateMultiStates out fchk mol = do
let st = mol ^. getElecSt
(GaussLog xs g) <- parserLogFile numat out $ st
let casMol = updateCASSCF xs $ mol
case st of
Left S0 -> getGradEnerFCHK fchk $ casMol
otherwise -> return . updateGrads g $ casMol
where numat = mol ^. getAtoms . to length
sh = mol ^. getForce . to extent
negateRepa = computeUnboxedS . R.map (negate)
updateGrads g mol = let grad = negateRepa . fromListUnboxed sh $ g
in mol & getForce .~ grad
updateCASSCF :: [EigenBLock] -> Molecule -> Molecule
updateCASSCF xs mol =
let energies = fmap getRootEnergy xs
coeff = fmap getRootCoeff xs
in updateCoeffEnergies energies coeff mol
-- summit a job in the cluster queue
launchGaussian :: Project -> IO ()
launchGaussian project = launchCluster "Launch_gaussian" project
parseDataGaussian :: [GauBlock] -> [Label] -> ParseInfo (Label,[Double])
parseDataGaussian gaussBlocks keywords = ParseInfo $ lookLabel `fmap` keywords
where lookLabel key = (key,) `fmap` DL.foldl' parseGB Nothing gaussBlocks
where parseGB !acc !gb =
case gb of
RGauBlock label n xs ->
let x = gaussParsers key label xs
in acc `mplus` x
otherwise -> acc
gaussParsers :: Label -> Label -> [Double] -> Maybe [Double]
gaussParsers x =
case x of
"Coordinates" -> gaussCoord
"Energy" -> gaussEnergy
"Grad" -> gaussGrad
"Hess" -> gaussHess
"Masses" -> gaussMass
"Charges" -> gaussElems
"Coeff" -> gaussCoeff
gaussCoord :: String -> [Double] -> Maybe [Double]
gaussCoord s xs = if (words s) == ["Current","cartesian","coordinates"] then (Just xs) else Nothing
gaussEnergy :: String -> [Double] -> Maybe [Double]
gaussEnergy s xs = if (words s) == ["Total","Energy"] then (Just xs) else Nothing
gaussGrad :: String -> [Double] -> Maybe [Double]
gaussGrad s xs = if (words s) == ["Cartesian","Gradient"] then (Just xs) else Nothing
gaussHess :: String -> [Double] -> Maybe [Double]
gaussHess s xs = if (words s) == ["Cartesian","Force","Constants"] then (Just xs) else Nothing
gaussMass :: String -> [Double] -> Maybe [Double]
gaussMass s xs = if (words s) == ["Real","atomic","weights"] then (Just xs) else Nothing
gaussElems :: String -> [Double]-> Maybe [Double]
gaussElems s xs = if (words s) == ["Nuclear","charges"] then (Just xs) else Nothing
gaussCoeff :: String -> [Double]-> Maybe [Double]
gaussCoeff = undefined
-- ================> Parser Internal Coordinates <===============
parserFileInternas :: FilePath -> IO Connections
parserFileInternas name = do
r <- parseFromFile parseInternals name
case r of
Left err -> error $ show err
Right xs -> return xs
parseInternals :: MyParser st (Connections)
parseInternals = do
bonds <- parseSection 2
angles <- parseSection 3
dihedrals <- parseSection 4
return $ V.concat [bonds, angles, dihedrals]
parseSection :: Int -> MyParser st Connections
parseSection !n = do
spaces
nInternals <- intNumber
anyLine
conex <- count nInternals $ parseQ n
return $ V.fromList conex
-- Since indexes start at zero the atom numbering should
-- be reduced by 1.
parseQ :: Int -> MyParser st InternalCoord
parseQ !n = do
xs <- count n (spaces >> intNumber)
anyLine
let getIndex x = pred (xs !! x) -- atomic index begins at 1
case n of
2 -> return $ Bond (getIndex 0) (getIndex 1)
3 -> return $ Angle (getIndex 0) (getIndex 1) (getIndex 2)
4 -> return $ Dihedral (getIndex 0) (getIndex 1) (getIndex 2) (getIndex 3)
-- =======================> ParseMolecules in XYZ format <=======================
parserGeomVel :: FilePath -> Molecule -> IO [Molecule]
parserGeomVel xyz mol = do
r <- parseFromFile (many1 $ parseMol mol parseAtomsVel ) xyz
case r of
Left msg -> error $ show msg
Right xs -> return xs
parseMoleculeXYZ :: MyParser st (Label,[Double]) -> MyParser st [(Label,[Double])]
parseMoleculeXYZ parser = do
numat <- intNumber
count 2 anyLine
geometry <- count numat parser
return $ geometry
parseMol :: Molecule -> MyParser st (Label,[Double]) -> MyParser st Molecule
parseMol mol parser = do
numat <- intNumber
anyLine
es <- many1 $ try (spaces >> realNumber)
anyLine
geometry <- count numat parser
let new = updatePosMom geometry mol
return $ new & getEnergy .~ [es]
parseAtoms :: MyParser st (Label,[Double])
parseAtoms = do
spaces
label <- many1 alphaNum
xs <- count 3 (spaces >> realNumber)
anyLine
return $ (label,xs)
parseAtomsVel :: MyParser st (Label,[Double])
parseAtomsVel = do
spaces
label <- many1 alphaNum
xs <- count 6 (spaces >> realNumber)
anyLine
return $ (label,xs)
-- =========> Utilities <=================
calcElectSt :: Molecule -> Int
calcElectSt mol =
case mol^.getElecSt of
Left s -> fromEnum s
Right s -> fromEnum s
readVectorUnboxed :: FilePath -> IO (VU.Vector Double)
readVectorUnboxed file = C.readFile file >>= return . parseUnboxed
parseUnboxed ::C.ByteString -> VU.Vector Double
parseUnboxed = VU.unfoldr step
where
isspace x = if x == ' ' then True else False
step !s = let b = C.dropWhile isspace s
in case L.readDouble b of
Nothing -> Nothing
Just (!k, !t) -> Just (k, C.tail t)
updatePosMom :: [(Label,[Double])] -> Molecule -> Molecule
updatePosMom as mol = mol & getCoord .~ repaCoord
& getVel .~ velocities
where repaCoord = R.fromListUnboxed (Z :. dim) xs
velocities = R.fromListUnboxed (Z :. dim) vs
dim = 3 * length as
(xs,vs) = concat *** concat $ unzip . fmap (splitAt 3 . snd ) $ as
-- =============================> <===============================
printGnuplot :: MatrixCmplx -> Molecule -> IO ()
printGnuplot matrix mol = do
let energies = mol ^. getEnergy . to head
es = concatMap (printf "%.5f ") energies
st = printf " %.5f " (energies !! calcElectSt mol)
p1 = printf " %.5f " . realPart . (^2) $ matrix !! 0
p2 = printf " %.5f " . realPart . (^2) $ matrix !! 3
s = DL.foldl1' (++) ["gnuplot: ",es,st,p1,p2]
appendFile "TullyOutput" $ s ++ "\n\n"
-- ==============================> Printing Molecule and scripts <==============
writePalmeiroScript :: Internals -> IO ()
writePalmeiroScript qs = do
let spaces = concat $ replicate 5 " "
inpQ = spaces ++ (writeInternasFortran qs)
l1 = concatMap (spaces++) ["Program FittingToSurface\n","Use MultiNodalFitSurf\n","Implicit None\n","Real(kind=8),allocatable :: Inpq(:),g(:),H(:)\n","Real(kind=8) :: E\n","Integer i,IErr,NumberCoord\n","Logical :: NewSimplex\n\n"]
dim = (show $ VU.length qs)
l2 = spaces ++ "NumberCoord="++ dim ++ "\n\n"
l3 = concatMap (spaces++) ["Call ReadDataFiles('./',.True.,IErr)\n\n","Allocate(Inpq(1:NumberCoord),g(1:NumberCoord),H(1:NumberCoord*(NumberCoord+1)/2))\n"]
l4 = concatMap (spaces++) ["Call InterpolatePES(Inpq,IErr,NewSimplex,E,g,H)\n", "If (IErr>0) Print *,'InterpolatePES Has FAILED!.'\n",
"open (unit=112, file='Gradient.out',status='replace',action='write')\n",
"open (unit=113, file='Energy.out',status='replace',action='write')\n",
"write(unit=112,fmt=*) (real(g(i)) ,i=1," ++ dim ++ ")\n",
"write(unit=113,fmt=*) (real(E))\n",
"close(unit=112)\n",
"close(unit=113)\n\n","End Program\n"]
writeFile "PalmeiroScript.f90" $ l1 ++ l2 ++ l3 ++ inpQ ++ l4
writeInternasFortran :: Internals -> String
writeInternasFortran qs =
let {-xss = chunksOf 8 . init . concatMap (printf "%2.6f,") $ VU.toList qs-}
spaces = concat $ replicate 5 " "
(hs:tss) = chunksOf 8 . fmap (printf "%2.6f,") $ VU.toList qs
blocks = foldl' fun (DL.concat hs) tss
fun acc v = acc ++ "&\n" ++ spaces ++ "&" ++ DL.concat v
in "Inpq=(/" ++ (init blocks) ++ "/)\n\n" -- init remove the last ","
writeMolcasXYZ :: FilePath -> Molecule -> IO ()
writeMolcasXYZ name mol = do
let s = showCoord mol
numat = length $ mol^.getAtoms
strAtoms = (show numat) ++ "\n"
comment = "Angstrom\n"
str = strAtoms ++ comment ++ s
writeFile name str
writeGaussJob :: (TheoryLevel,Basis) -> String -> Molecule -> IO ()
writeGaussJob (theory,basis) project mol = do
-- name <- getLoginName
let l1 = addNewLines 1 $ "%chk=" ++ project
l2 = addNewLines 1 "%mem=4000Mb"
l3 = addNewLines 1 "%nproc=4"
-- l4 = addNewLines 1 $ "%scr=/scratch/" ++ name ++ "/"
-- l5 = addNewLines 1 $ "%rwf=/scratch/" ++ name ++ "/"
l6 = addNewLines 1 $ "#p " ++ (show theory) ++ basis ++ " force iop(1/33=1) nosymm"
l7 = addNewLines 2 $ "# SCF=(MaxCycle=300,conver=7)"
l8 = addNewLines 2 "save the old Farts, use Fortran."
l9 = addNewLines 1 "0 1"
atoms = addNewLines 1 $ showCoord mol
st = calcElectSt mol
weights = addNewLines 5 $ if st == 0 then "" else " 0.5 0.5"
result = foldl1 (++) [l1,l2,l3,l6,l7,l8,l9,atoms,weights]
writeFile (project ++ ".com") result
printMol :: Molecule -> String -> Logger -> IO ()
printMol mol msg logger = logMessage logger $ numat ++ (showPositionVel mol)
where numat = (show . length $ labels) ++ "\n" ++ msg ++ "\n"
labels = mol^.getAtoms
printData :: Molecule -> Int -> Logger -> IO ()
printData mol step logger = do
let st = mol^.getElecSt
l1 = addNewLines 1 $ "step: " ++ (show step)
l2 = addNewLines 1 $ "electronic State: " ++ (show st)
l3 = addNewLines 2 $ "potential energies: " ++ (concatMap (printf "%.6f ") . head $ mol^.getEnergy)
logMessage logger $ foldl1' (++) [l1,l2,l3]
showCoord :: Molecule -> String
showCoord mol = concat $ DL.zipWith fun labels xs
where labels = if null ls then repeat "" else ls
ls = mol^.getAtoms
qs = mol^.getCoord
xs = chunksOf 3 . R.toList . computeUnboxedS . R.map (*a0) $ qs
fun l x = (printf "%s" l) ++ (printxyz x) ++ "\n"
printxyz [x,y,z] = printf "%12.5f %.5f %.5f" x y z
showPositionVel :: Molecule -> String
showPositionVel mol = concat $ DL.zipWith3 fun labels xs vs
where labels = mol^.getAtoms
qs = mol^.getCoord
xs = chunksOf 3 . R.toList . computeUnboxedS . R.map (*a0) $ qs
vs = mol^.getVel . to (chunksOf 3 . R.toList)
fun l x v = (printf "%s" l) ++ (printxyz x) ++ (printxyz v) ++ "\n"
printxyz [x,y,z] = printf "%12.5f %.5f %.5f" x y z
addNewLines :: Int -> String -> String
addNewLines n s = let f = (++"\n")
in iterate f s !! n
|
felipeZ/Dynamics
|
src/APIparser.hs
|
bsd-3-clause
| 29,603 | 2 | 21 | 9,686 | 8,364 | 4,270 | 4,094 | -1 | -1 |
module ParsecChar(module Text.ParserCombinators.Parsec.Char) where
import Text.ParserCombinators.Parsec.Char
|
OS2World/DEV-UTIL-HUGS
|
oldlib/ParsecChar.hs
|
bsd-3-clause
| 109 | 0 | 5 | 6 | 21 | 15 | 6 | 2 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE OverloadedStrings #-}
module Ink.UI where
import Lucid
import Lucid.Ink
import Data.Text (Text)
import qualified Data.Text as T
import Control.Lens.Cons
import Control.Lens.Operators hiding ((.=))
import Data.Monoid
import Control.Applicative
import Web.Scotty
import Network.Wai.Middleware.Static
hostSite :: IO ()
hostSite = do
buildPage
scotty 8081 server
server :: ScottyM ()
server = do
middleware $ staticPolicy (noDots >-> addBase "site/dist")
get "/" $ do
setHeader "Content-Type" "text/html"
file "site/dist/index.html"
type H m = HtmlT m ()
buildPage :: IO ()
buildPage = renderToFile "site/dist/index.html"
$ page_ "Home" $++
[ div_ [class_ inkGrid_] $
div_ [classes_ [columnGroup_,pushCenter_]] $++
[
]
]
-- TreeView {{{
data TreeView m a = Node
{ nodeContent :: HtmlT m a
, nodeLink :: Either (Maybe Text) [TreeView m a]
}
deriving instance Eq (HtmlT m a) => Eq (TreeView m a)
deriving instance Ord (HtmlT m a) => Ord (TreeView m a)
deriving instance Show (HtmlT m a) => Show (TreeView m a)
type TV m = TreeView m ()
class Leaf m lf | lf -> m where
leaf :: H m -> lf
instance (a ~ ()) => Leaf m (TreeView m a) where
leaf n = Node n $ Left Nothing
instance (r ~ TreeView m (), txt ~ Text) => Leaf m (txt -> r) where
leaf n addr = Node n $ Left $ Just addr
branch :: Monad m => H m -> [TV m] -> TV m
branch n ns = Node n $ Right ns
viewTree :: Monad m => TreeView m () -> HtmlT m ()
viewTree = ul_ [class_ "ink-tree-view"] . viewNode True
where
viewNode open t = li_ (open ? data_ "open" "true") $++
case nodeLink t of
Left (Just addr) -> [ a_ [href_ addr] $ nodeContent t ]
Left _ -> [ nodeContent t ]
Right [] -> [ nodeContent t ]
Right ns@(_:ns') -> [ a_ [href_ "#"] $ nodeContent t
, ul_ $++ viewNode (open && null ns') <$> ns
]
-- }}}
page_ :: Monad m => Text -> H m -> H m
page_ t pg = do
doctype_
html_ [lang_ "en"] $++
[ head_ $++
[ meta_ [charset_ "utf-8"]
, meta_
[ httpEquiv_ "X-UA-Compatible"
, content_ "IE=edge,chrome=1"
]
, title_ $ toHtml t
, metadata_
[ "description" .= ""
, "author" .= "Kyle Carter"
, "HandheldFriendly" .= "True"
, "MobileOptimized" .= "320"
, "mobile-web-app-capable" .= "yes"
, "viewport" .= assigns
[ "width" .= "device-width"
, "initial-scale" .= "1.0"
, "maximum-scale" .= "1.0"
, "user-scalable" .= "0"
]
]
, cssPath_ $ "css/custom.min.css"
, cssPath_ $ "css/font-awesome.min.css"
, jsPath_ $ "js/modernizr.js"
, js_ $ T.unlines
[ "Modernizr.load({"
, " test: Modernizr.flexbox,"
, " nope: '" <> "css/ink-legacy.min.css" <> "'"
, "});"
]
, jsPath_ $ "js/autoload.js"
, jsPath_ $ "js/ink-all.min.js"
, css_ $ T.unlines
[ ""
, "body {"
, " background: #02402e;"
, "}\n"
, ".push, footer {"
, " height: 120px;"
, " margin-top: 0;"
, "}\n"
, "header h1 small:before {"
, " content: \"|\";"
, " margin: 0 0.5em;"
, " font-size: 1.6em;"
, "}\n"
, "footer {"
, " background: #ccc;"
, "}\n"
]
]
, body_ $
div_
[ classes_ [inkGrid_]
] $++
[ header_ [class_ spaceVertical_] $++
[ h1_ [class_ "ink-flex"] $++
[ img_
[ src_ "img/cmw-logo-white.svg"
, width_ "64"
]
, span_
[ classes_ ["push-middle","left-space"]
, style_ "color:white;"
] "Schedules"
]
, nav_ [class_ inkNav_] $
ul_ [classes_ [navMenu_,horizontal_,white_]] $++
[ li_ [class_ active_] $ a_ [href_ "#"]
"Entry"
, li_ $ a_ [href_ "#"]
"Schedules"
, li_ $ a_ [href_ "#"]
"Printouts"
, li_ $ a_ [href_ "#"]
"Settings"
]
]
, div_ [id_ "page-content"] pg
, div_ [class_ "push"] blank_
, footer_ [classes_ [clearfix_]] $++
[ div_ [class_ inkGrid_] $++
[ ul_ [classes_ [unstyled_,inline_,halfSpaceVertical_]] $++
[ li_ [class_ active_] $ a_ [href_ "#"]
"About"
, li_ $ a_ [href_ "#"]
"Sitemap"
, li_ $ a_ [href_ "#"]
"Contacts"
]
, p_ [class_ note_] par2
]
]
]
]
where
-- par1 :: Monad m => H m
-- par1 = toHtml $ T.unwords
-- [ "\"Red is not the right word,\" was the reply."
-- , "\"The plaque was scarlet."
-- , "The whole face and body turned scarlet in an hour's time."
-- , "Don't I know? Didn't I see enough of it?"
-- , "And I am telling you it was scarlet"
-- , "because—well, because it was scarlet."
-- , "There is no other word for it.\""
-- ]
par2 :: Monad m => H m
par2 = toHtml $ T.unwords
[ "Identification of the owner of the copyright,"
, "either by name, abbreviation, or other designation"
, "by which it is generally known."
]
inksite :: Text -> Text
inksite = ("http://cdn.ink.sapo.pt/3.1.9" </>)
-- Filepath {{{
(</>) :: Text -> Text -> Text
d </> p
| Just '/' <- p^?_head
= p
| Just '/' <- d^?_last
= d <> p
| otherwise
= d <> "/" <> p
infixr 5 </>
(<.>) :: Text -> Text -> Text
p <.> ext = case ext^?_Cons of
Just (x,xs) -> case x of
'.' -> p <> xs
_ -> p <> "." <> xs
_ -> p
infixr 7 <.>
-- }}}
-- Builders {{{
js_ :: Monad m => Text -> H m
js_ = script_ [ type_ "text/javascript" ]
jsPath_ :: Monad m => Text -> H m
jsPath_ rf = script_
[ type_ "text/javascript"
, src_ rf
] ("" :: Text)
css_ :: Monad m => Text -> H m
css_ = style_ [ type_ "text/css" ]
cssPath_ :: Monad m => Text -> H m
cssPath_ rf = link_
[ rel_ "stylesheet"
, type_ "text/css"
, href_ rf
]
classes_ :: [Text] -> Attribute
classes_ = class_ . T.unwords
metadata_ :: Monad m => [(Text,Text)] -> H m
metadata_ = foldMap $ uncurry metadatum_
metadatum_ :: Monad m => Text -> Text -> H m
metadatum_ n c = meta_ [ name_ n , content_ c ]
blank_ :: Monad m => H m
blank_ = mempty
-- }}}
-- Util {{{
assigns :: [(Text,Text)] -> Text
assigns = T.intercalate ", " . map (uncurry assign)
assign :: Text -> Text -> Text
assign k v = k <> "=" <> v
(.=) :: a -> b -> (a,b)
a .= b = (a,b)
infixr 0 .=
($++) :: Monoid m => (m -> a) -> [m] -> a
($++) f = f . mconcat
infixr 0 $++
{-# INLINE ($++) #-}
(?) :: Alternative f => Bool -> a -> f a
b ? a = if b then pure a else empty
infix 4 ?
-- }}}
|
kylcarte/ink-ui
|
src/Ink/UI.hs
|
bsd-3-clause
| 7,332 | 0 | 20 | 2,481 | 2,275 | 1,187 | 1,088 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-tabs #-}
{- $Id: TestsFirstSecond.hs,v 1.2 2003/11/10 21:28:58 antony Exp $
******************************************************************************
* Y A M P A *
* *
* Module: TestsArr *
* Purpose: Test cases for first and second *
* Authors: Antony Courtney and Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* *
******************************************************************************
-}
module TestsFirstSecond (first_trs, first_tr, second_trs, second_tr) where
import Data.Tuple (swap)
import FRP.Yampa
import TestsCommon
------------------------------------------------------------------------------
-- Test cases for first
------------------------------------------------------------------------------
first_t0 :: [(Int,Double)]
first_t0 = testSF1 (arr dup >>> first (constant 7))
first_t0r :: [(Int,Double)]
first_t0r =
[(7,0.0), (7,1.0), (7,2.0), (7,3.0), (7,4.0),
(7,5.0), (7,6.0), (7,7.0), (7,8.0), (7,9.0),
(7,10.0), (7,11.0), (7,12.0), (7,13.0), (7,14.0),
(7,15.0), (7,16.0), (7,17.0), (7,18.0), (7,19.0),
(7,20.0), (7,21.0), (7,22.0), (7,23.0), (7,24.0)]
first_t1 :: [(Int,Double)]
first_t1 = testSF2 (arr dup >>> first (constant 7))
first_t1r :: [(Int,Double)]
first_t1r =
[(7,0.0), (7,0.0), (7,0.0), (7,0.0), (7,0.0),
(7,1.0), (7,1.0), (7,1.0), (7,1.0), (7,1.0),
(7,2.0), (7,2.0), (7,2.0), (7,2.0), (7,2.0),
(7,3.0), (7,3.0), (7,3.0), (7,3.0), (7,3.0),
(7,4.0), (7,4.0), (7,4.0), (7,4.0), (7,4.0)]
first_t2 :: [(Double,Double)]
first_t2 = testSF1 (arr dup >>> first (arr (+1)))
first_t2r =
[(1.0,0.0), (2.0,1.0), (3.0,2.0), (4.0,3.0), (5.0,4.0),
(6.0,5.0), (7.0,6.0), (8.0,7.0), (9.0,8.0), (10.0,9.0),
(11.0,10.0), (12.0,11.0), (13.0,12.0), (14.0,13.0), (15.0,14.0),
(16.0,15.0), (17.0,16.0), (18.0,17.0), (19.0,18.0), (20.0,19.0),
(21.0,20.0), (22.0,21.0), (23.0,22.0), (24.0,23.0), (25.0,24.0)]
first_t3 :: [(Double,Double)]
first_t3 = testSF2 (arr dup >>> first (arr (+1)))
first_t3r =
[(1.0,0.0), (1.0,0.0), (1.0,0.0), (1.0,0.0), (1.0,0.0),
(2.0,1.0), (2.0,1.0), (2.0,1.0), (2.0,1.0), (2.0,1.0),
(3.0,2.0), (3.0,2.0), (3.0,2.0), (3.0,2.0), (3.0,2.0),
(4.0,3.0), (4.0,3.0), (4.0,3.0), (4.0,3.0), (4.0,3.0),
(5.0,4.0), (5.0,4.0), (5.0,4.0), (5.0,4.0), (5.0,4.0)]
first_t4 :: [(Double,Double)]
first_t4 = testSF1 (arr dup >>> first integral)
first_t4r =
[(0.0,0.0), (0.0,1.0), (0.25,2.0), (0.75,3.0), (1.5,4.0),
(2.5,5.0), (3.75,6.0), (5.25,7.0), (7.0,8.0), (9.0,9.0),
(11.25,10.0), (13.75,11.0), (16.5,12.0), (19.5,13.0), (22.75,14.0),
(26.25,15.0), (30.0,16.0), (34.0,17.0), (38.25,18.0), (42.75,19.0),
(47.5,20.0), (52.5,21.0), (57.75,22.0), (63.25,23.0), (69.0,24.0)]
first_t5 :: [(Double,Double)]
first_t5 = testSF2 (arr dup >>> first integral)
first_t5r =
[(0.0,0.0), (0.0,0.0), (0.0,0.0), (0.0,0.0), (0.0,0.0),
(0.0,1.0), (0.25,1.0), (0.5,1.0), (0.75,1.0), (1.0,1.0),
(1.25,2.0), (1.75,2.0), (2.25,2.0), (2.75,2.0), (3.25,2.0),
(3.75,3.0), (4.5,3.0), (5.25,3.0), (6.0,3.0), (6.75,3.0),
(7.5,4.0), (8.5,4.0), (9.5,4.0), (10.5,4.0), (11.5,4.0)]
first_trs =
[ first_t0 ~= first_t0r,
first_t1 ~= first_t1r,
first_t2 ~= first_t2r,
first_t3 ~= first_t3r,
first_t4 ~= first_t4r,
first_t5 ~= first_t5r
]
first_tr = and first_trs
------------------------------------------------------------------------------
-- Test cases for second
------------------------------------------------------------------------------
-- These should mirror the test cases for first.
second_t0 :: [(Int,Double)]
second_t0 = testSF1 (arr dup >>> second (constant 7) >>> arr swap)
second_t1 :: [(Int,Double)]
second_t1 = testSF2 (arr dup >>> second (constant 7) >>> arr swap)
second_t2 :: [(Double,Double)]
second_t2 = testSF1 (arr dup >>> second (arr (+1)) >>> arr swap)
second_t3 :: [(Double,Double)]
second_t3 = testSF2 (arr dup >>> second (arr (+1)) >>> arr swap)
second_t4 :: [(Double,Double)]
second_t4 = testSF1 (arr dup >>> second integral >>> arr swap)
second_t5 :: [(Double,Double)]
second_t5 = testSF2 (arr dup >>> second integral >>> arr swap)
second_trs =
[ second_t0 ~= first_t0r,
second_t1 ~= first_t1r,
second_t2 ~= first_t2r,
second_t3 ~= first_t3r,
second_t4 ~= first_t4r,
second_t5 ~= first_t5r
]
second_tr = and second_trs
|
ivanperez-keera/Yampa
|
yampa/tests/TestsFirstSecond.hs
|
bsd-3-clause
| 4,904 | 0 | 12 | 1,190 | 2,089 | 1,312 | 777 | 83 | 1 |
{-# LANGUAGE BangPatterns, DeriveDataTypeable #-}
{-# OPTIONS_HADDOCK not-home #-}
-- |
-- Module : Data.Text.Internal.Lazy
-- Copyright : (c) 2009, 2010 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
--
-- A module containing private 'Text' internals. This exposes the
-- 'Text' representation and low level construction functions.
-- Modules which extend the 'Text' system may need to use this module.
module Data.Text.Streaming
-- (
-- Text(..)
-- , chunk
-- , empty
-- , foldrChunks
-- , foldlChunks
-- -- * Data type invariant and abstraction functions
--
-- -- $invariant
-- , strictInvariant
-- , lazyInvariant
-- , showStructure
--
-- -- * Chunk allocation sizes
-- , defaultChunkSize
-- , smallChunkSize
-- , chunkOverhead
-- ) where
import Data.Text ()
import Data.Text.Internal.Unsafe.Shift (shiftL)
import Data.Typeable (Typeable)
import Foreign.Storable (sizeOf)
import qualified Data.Text.Internal as T
data Text m r =
Empty r
| Chunk {-# UNPACK #-} !T.Text (Text m r)
| Go (m (Text m r))
deriving (Typeable)
instance Monad m => Functor (Text m) where
fmap f x = case x of
Empty a -> Empty (f a)
Chunk bs bss -> Chunk bs (fmap f bss)
Go mbss -> Go (liftM (fmap f) mbss)
instance Monad m => Applicative (Text m) where
pure = Empty
(<*>) = ap
instance Monad m => Monad (Text m) where
return = Empty
{-#INLINE return #-}
x >> y = loop SPEC x where
loop !_ x = case x of -- this seems to be insanely effective
Empty _ -> y
Chunk a b -> Chunk a (loop SPEC b)
Go m -> Go (liftM (loop SPEC) m)
x >>= f =
-- case x of
-- Empty a -> f a
-- Chunk bs bss -> Chunk bs (bss >>= f)
-- Go mbss -> Go (liftM (>>= f) mbss)
loop SPEC x where -- the SPEC seems pointless in simple case
loop !_ y = case y of
Empty a -> f a
Chunk bs bss -> Chunk bs (loop SPEC bss)
Go mbss -> Go (liftM (loop SPEC) mbss)
instance MonadIO m => MonadIO (Text m) where
liftIO io = Go (liftM Empty (liftIO io))
{-#INLINE liftIO #-}
instance MonadTrans Text where
lift ma = Go $ liftM Empty ma
data Word8_ r = Word8_ {-#UNPACK#-} !Word8 r
data SPEC = SPEC | SPEC2
{-# ANN type SPEC ForceSpecConstr #-}
-- -- ------------------------------------------------------------------------
--
-- | Smart constructor for 'Chunk'. Guarantees the data type invariant.
chunk :: S.Text -> Text m r -> Text m r
chunk c@(S.PS _ _ len) cs | len == 0 = cs
| otherwise = Chunk c cs
{-# INLINE chunk #-}
yield :: S.Text -> Text m ()
yield bs = chunk bs (Empty ())
{-# INLINE yield #-}
-- | Steptruct a byte stream from its Church encoding (compare @GHC.Exts.build@)
materialize :: (forall x . (r -> x) -> (S.Text -> x -> x) -> (m x -> x) -> x)
-> Text m r
materialize phi = phi Empty Chunk Go
{-#INLINE materialize #-}
-- | Resolve a byte stream into its Church encoding (compare @Data.List.foldr@)
dematerialize :: Monad m
=> Text m r
-> (forall x . (r -> x) -> (S.Text -> x -> x) -> (m x -> x) -> x)
dematerialize x nil cons wrap = loop SPEC x
where
loop !_ x = case x of
Empty r -> nil r
Chunk b bs -> cons b (loop SPEC bs )
Go ms -> wrap (liftM (loop SPEC) ms)
{-#INLINE dematerialize #-}
concats :: Monad m => List (Text m) m r -> Text m r
concats x = destroy x Empty join Go
distributed
:: (Monad m, MonadTrans t, MFunctor t, Monad (t m), Monad (t (Text m)))
=> Text (t m) a
-> t (Text m) a
distributed ls = dematerialize ls
return
(\bs x -> join $ lift $ Chunk bs (Empty x) )
(join . hoist (Go . fmap Empty))
-- $invariant
--
-- The data type invariant for lazy 'Text': Every 'Text' is either 'Empty' or
-- consists of non-null 'T.Text's. All functions must preserve this,
-- and the QC properties must check this.
-- | Check the invariant strictly.
-- strictInvariant :: Text -> Bool
-- strictInvariant Empty = True
-- strictInvariant x@(Chunk (T.Text _ _ len) cs)
-- | len > 0 = strictInvariant cs
-- | otherwise = error $ "Data.Text.Lazy: invariant violation: "
-- ++ showStructure x
--
-- -- | Check the invariant lazily.
-- lazyInvariant :: Text -> Text
-- lazyInvariant Empty = Empty
-- lazyInvariant x@(Chunk c@(T.Text _ _ len) cs)
-- | len > 0 = Chunk c (lazyInvariant cs)
-- | otherwise = error $ "Data.Text.Lazy: invariant violation: "
-- ++ showStructure x
--
-- -- | Display the internal structure of a lazy 'Text'.
-- showStructure :: Text -> String
-- showStructure Empty = "Empty"
-- showStructure (Chunk t Empty) = "Chunk " ++ show t ++ " Empty"
-- showStructure (Chunk t ts) =
-- "Chunk " ++ show t ++ " (" ++ showStructure ts ++ ")"
-- | Smart constructor for 'Chunk'. Guarantees the data type invariant.
chunk :: T.Text -> Text m r -> Text m r
{-# INLINE chunk #-}
chunk t@(T.Text _ _ len) ts | len == 0 = ts
| otherwise = Chunk t ts
-- | Smart constructor for 'Empty'.
empty :: Text m ()
{-# INLINE [0] empty #-}
empty = Empty ()
-- -- | Consume the chunks of a lazy ByteString with a natural right fold.
-- foldrChunks :: Monad m => (S.ByteString -> a -> a) -> a -> ByteString m r -> m a
-- foldrChunks step nil bs = dematerialize bs
-- (\_ -> return nil)
-- (liftM . step)
-- join
--
-- {-# INLINE foldrChunks #-}
-- -- | Consume the chunks of a lazy 'Text' with a natural right fold.
-- foldrChunks :: (T.Text -> a -> a) -> a -> Text m r -> m (a, r)
-- foldrChunks f z = go
-- where go (Empty ) = z
-- go (Chunk c cs) = f c (go cs)
-- {-# INLINE foldrChunks #-}
--
-- -- | Consume the chunks of a lazy 'Text' with a strict, tail-recursive,
-- -- accumulating left fold.
-- foldlChunks :: (a -> T.Text -> a) -> a -> Text -> a
-- foldlChunks f z = go z
-- where go !a Empty = a
-- go !a (Chunk c cs) = go (f a c) cs
-- {-# INLINE foldlChunks #-}
--
-- -- | Currently set to 16 KiB, less the memory management overhead.
-- defaultChunkSize :: Int
-- defaultChunkSize = 16384 - chunkOverhead
-- {-# INLINE defaultChunkSize #-}
--
-- -- | Currently set to 128 bytes, less the memory management overhead.
-- smallChunkSize :: Int
-- smallChunkSize = 128 - chunkOverhead
-- {-# INLINE smallChunkSize #-}
--
-- -- | The memory management overhead. Currently this is tuned for GHC only.
-- chunkOverhead :: Int
-- chunkOverhead = sizeOf (undefined :: Int) `shiftL` 1
-- {-# INLINE chunkOverhead #-}
|
bitemyapp/bytestring-streaming
|
Data/Text/Streaming.hs
|
bsd-3-clause
| 6,917 | 6 | 15 | 1,803 | 1,266 | 689 | 577 | -1 | -1 |
module Horbits.Types where
import Numeric.NumType.TF
import Horbits.Dimensional.Prelude
type DSpecificAngularMomentum = Dim Pos2 Zero Neg1 Zero Zero Zero Zero
type SpecificAngularMomentum = Quantity DSpecificAngularMomentum
|
chwthewke/horbits
|
src/horbits/Horbits/Types.hs
|
bsd-3-clause
| 248 | 0 | 5 | 47 | 48 | 28 | 20 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Trombone.Server.Config
( Config(..)
, HmacKeyConf(..)
, allowLocal
, buildHmacConf
, defaultConfig
, lookupKey
, options
, translOpts
, versionH
) where
import Data.ByteString ( ByteString )
import Data.HashMap ( Map )
import Data.List.Utils ( split )
import Data.Maybe ( catMaybes, fromMaybe )
import Data.Text ( Text, pack, unpack )
import Data.Version ( showVersion )
import Network.HTTP.Types ( HeaderName )
import Paths_trombone ( version )
import System.Console.GetOpt
import Trombone.Middleware.Logger
import Trombone.Pipeline
import qualified Data.ByteString.Char8 as BS
import qualified Data.HashMap as Map
-- | Response header with server description.
versionH :: (HeaderName, ByteString)
versionH = ("Server", BS.pack $ "Trombone/" ++ showVersion version)
-- | Server startup configuration parameters.
data Config = Config
{ configEnHmac :: Bool
-- ^ Enable message integrity authentication (HMAC)?
, configEnCors :: Bool
-- ^ Support cross-origin resource sharing?
, configEnAmqp :: Bool
-- ^ Whether RabbitMQ messaging middleware should be enabled.
, configEnPipes :: Bool
-- ^ Enable request pipelines?
, configEnLogging :: Bool
-- ^ Enable logging to file?
, configServerPort :: Int
-- ^ Port number on which the server should listen.
, configLogFile :: FilePath
-- ^ Location of log file.
, configLogBufSize :: BufSize
-- ^ Application log file size limit.
, configAmqpUser :: Text
-- ^ RabbitMQ username
, configAmqpPass :: Text
-- ^ RabbitMQ password
, configDbHost :: ByteString
-- ^ Database host
, configDbName :: ByteString
-- ^ Database name
, configDbUser :: ByteString
-- ^ Database username
, configDbPass :: ByteString
-- ^ Database password
, configDbPort :: Int
-- ^ Database port
, configRoutesFile :: FilePath
-- ^ Route pattern configuration file.
, configPipesFile :: FilePath
-- ^ Pipelines configuration file.
, configTrustLocal :: Bool
-- ^ Skip HMAC authentication for requests originating from localhost?
, configPoolSize :: Int
-- ^ The number of connections to keep in PostgreSQL connection pool.
, configVerbose :: Bool
-- ^ Print debug information to stdout.
, configShowVer :: Bool
-- ^ Show version number?
, configShowHelp :: Bool
-- ^ Show usage info?
} deriving (Show)
-- | Default values for server startup.
defaultConfig :: Config
defaultConfig = Config
{ configEnHmac = True
, configEnCors = False
, configEnAmqp = False
, configEnPipes = False
, configEnLogging = False
, configServerPort = 3010
, configLogFile = "log/access.log"
, configLogBufSize = defaultBufSize
, configAmqpUser = "guest"
, configAmqpPass = "guest"
, configDbHost = "localhost"
, configDbName = "trombone"
, configDbUser = "postgres"
, configDbPass = "postgres"
, configDbPort = 5432
, configRoutesFile = "routes.conf"
, configPipesFile = "pipelines.conf"
, configTrustLocal = False
, configPoolSize = 10
, configVerbose = False
, configShowVer = False
, configShowHelp = False
}
translOpts :: [String] -> IO (Config, [String])
translOpts argv =
case getOpt Permute options argv of
(o,n,[] ) -> return (foldl (flip id) defaultConfig o, n)
(_,_,errs) -> ioError $ userError (concat errs ++ usageInfo header options)
where header = "Usage: trombone [OPTION...]"
options :: [OptDescr (Config -> Config)]
options =
[ Option "V" ["version"]
(NoArg $ \opts -> opts { configShowVer = True })
"display version number and exit"
, Option "?" ["help"]
(NoArg $ \opts -> opts { configShowHelp = True })
"display this help and exit"
, Option "x" ["disable-hmac"]
(NoArg $ \opts -> opts { configEnHmac = False })
"disable message integrity authentication (HMAC)"
, Option "C" ["cors"]
(NoArg $ \opts -> opts { configEnCors = True })
"enable support for cross-origin resource sharing"
, Option "A" ["amqp"]
(OptArg amqpOpts "USER:PASS")
"enable RabbitMQ messaging middleware [username:password]"
, Option "i" ["pipelines"]
(OptArg (\d opts -> opts
{ configEnPipes = True
, configPipesFile = fromMaybe "pipelines.conf" d }) "FILE")
"read request pipelines from external [configuration file]"
, Option "s" ["port"]
(ReqArg (\p opts -> opts { configServerPort = read p }) "PORT")
"server port"
, Option "l" ["access-log"]
(OptArg (\d opts -> opts
{ configEnLogging = True
, configLogFile = fromMaybe "log/access.log" d }) "FILE")
"enable logging to file [log file]"
, Option [] ["size"]
(ReqArg (\p opts -> opts
{ configLogBufSize = fromMaybe defaultBufSize $ read p
}) "SIZE")
"log file size"
, Option "h" ["db-host"]
(ReqArg (\p opts -> opts { configDbHost = BS.pack p }) "HOST")
"database host"
, Option "d" ["db-name"]
(ReqArg (\p opts -> opts { configDbName = BS.pack p }) "DB")
"database name"
, Option "u" ["db-user"]
(ReqArg (\p opts -> opts { configDbUser = BS.pack p }) "USER")
"database user"
, Option "p" ["db-password"]
(ReqArg (\p opts -> opts { configDbPass = BS.pack p }) "PASS")
"database password"
, Option "P" ["db-port"]
(ReqArg (\p opts -> opts { configDbPort = read p }) "PORT")
"database port"
, Option "r" ["routes-file"]
(ReqArg (\p opts -> opts { configRoutesFile = p }) "FILE")
"route pattern configuration file"
, Option "t" ["trust-localhost"]
(NoArg $ \opts -> opts { configTrustLocal = True })
"skip HMAC authentication for requests from localhost"
, Option [] ["pool-size"]
(ReqArg (\p opts -> opts { configPoolSize = read p }) "SIZE")
"number of connections to keep in PostgreSQL connection pool"
, Option [] ["verbose"]
(NoArg $ \opts -> opts { configVerbose = True })
"print various debug information to stdout"
]
where amqpOpts Nothing opts = amqpOpts (Just "guest:guest") opts
amqpOpts (Just d) opts = let [u, p] = pair $ split ":" d
in opts { configEnAmqp = True
, configAmqpUser = pack u
, configAmqpPass = pack p }
pair [u, p] = [u, p]
pair _ = ["guest", "guest"]
-- | HMAC authentication configuration data.
data HmacKeyConf = HmacKeyConf
(Map ByteString ByteString) -- ^ Hash map with client keys
Bool -- ^ Bypass authentication for localhost?
buildHmacConf :: [(ByteString, ByteString)] -> Bool -> Maybe HmacKeyConf
buildHmacConf keys = Just . HmacKeyConf (Map.fromList keys)
lookupKey :: ByteString -> HmacKeyConf -> Maybe ByteString
{-# INLINE lookupKey #-}
lookupKey key (HmacKeyConf hm _) = Map.lookup key hm
allowLocal :: HmacKeyConf -> Bool
{-# INLINE allowLocal #-}
allowLocal (HmacKeyConf _ a) = a
|
johanneshilden/principle
|
Trombone/Server/Config.hs
|
bsd-3-clause
| 7,605 | 0 | 14 | 2,293 | 1,684 | 982 | 702 | 160 | 3 |
{-# LANGUAGE NoImplicitPrelude #-}
module GCHQ.Data.Puzzle
( Puzzle
, mapPuzzleSquares
, readPuzzleJSON
, rowColCount
, writePuzzleJSON
, solvePuzzle
) where
import Control.Monad ( (>=>) )
import Data.Aeson ( decode', encode )
import qualified Data.ByteString.Lazy.Char8 as LBS ( ByteString )
import Data.Either ( Either )
import Data.Function ( (.), ($) )
import Data.Functor ( fmap )
import Data.Foldable ( foldr )
import Data.Ix ( range )
import Data.List ( insert, repeat, zip )
import Data.Maybe ( Maybe )
import Data.String ( String )
import Data.Word ( Word8 )
import GCHQ.Data.Puzzle.Internal
( Puzzle(..), checkPuzzle, preparePuzzle, gridRange, rowColCount
, runSATSolver )
import Prelude ( Bool(..), IO )
mapPuzzleSquares :: (((Word8, Word8), Bool) -> a) -> Puzzle -> [a]
mapPuzzleSquares f p@(Puzzle ss _ _) = fmap f finalSquareList where
finalSquareList = foldr insert initSquareList shadedSquareList
initSquareList = zip (range . gridRange $ p) (repeat False)
shadedSquareList = zip ss (repeat True)
readPuzzleJSON :: LBS.ByteString -> Maybe Puzzle
readPuzzleJSON = decode' >=> checkPuzzle >=> preparePuzzle
writePuzzleJSON :: Puzzle -> LBS.ByteString
writePuzzleJSON = encode
solvePuzzle :: Puzzle -> IO (Either String Puzzle)
solvePuzzle = runSATSolver
|
jship/gchq-shading-puzzle2015
|
lib/GCHQ/Data/Puzzle.hs
|
bsd-3-clause
| 1,298 | 0 | 10 | 210 | 405 | 241 | 164 | 35 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ViewPatterns #-}
-- Keep using iso8601DateFormat, since the alternative was introduced in time-1.9
-- while GHC 8.6 still has time-1.8.
-- Safe once we no longer support GHC 8.6.
{-# OPTIONS_GHC -Wno-deprecations #-}
#ifdef FILE_EMBED
{-# LANGUAGE TemplateHaskell #-}
#endif
module General.Template(runTemplate) where
import System.FilePath.Posix
import Control.Exception.Extra
import Data.Char
import Data.Time
import System.IO.Unsafe
import Development.Shake.Internal.Paths
import qualified Data.ByteString.Lazy.Char8 as LBS
import qualified Language.Javascript.DGTable as DGTable
import qualified Language.Javascript.Flot as Flot
import qualified Language.Javascript.JQuery as JQuery
#ifdef FILE_EMBED
import Data.FileEmbed
import Language.Haskell.TH.Syntax ( runIO )
#endif
{- HLINT ignore "Redundant bracket" -} -- a result of CPP expansion
-- Very hard to abstract over TH, so we do it with CPP
#ifdef FILE_EMBED
#define FILE(x) (pure (LBS.fromStrict $(embedFile =<< runIO (x))))
#else
#define FILE(x) (LBS.readFile =<< (x))
#endif
libraries :: [(String, IO LBS.ByteString)]
libraries =
[("jquery.js", FILE(JQuery.file))
,("jquery.dgtable.js", FILE(DGTable.file))
,("jquery.flot.js", FILE(Flot.file Flot.Flot))
,("jquery.flot.stack.js", FILE(Flot.file Flot.FlotStack))
]
-- | Template Engine. Perform the following replacements on a line basis:
--
-- * <script src="foo"></script> ==> <script>[[foo]]</script>
--
-- * <link href="foo" rel="stylesheet" type="text/css" /> ==> <style type="text/css">[[foo]]</style>
runTemplate :: (FilePath -> IO LBS.ByteString) -> LBS.ByteString -> IO LBS.ByteString
runTemplate ask = lbsMapLinesIO f
where
link = LBS.pack "<link href=\""
script = LBS.pack "<script src=\""
f x | Just file <- lbsStripPrefix script y = do res <- grab file; pure $ LBS.pack "<script>\n" `LBS.append` res `LBS.append` LBS.pack "\n</script>"
| Just file <- lbsStripPrefix link y = do res <- grab file; pure $ LBS.pack "<style type=\"text/css\">\n" `LBS.append` res `LBS.append` LBS.pack "\n</style>"
| otherwise = pure x
where
y = LBS.dropWhile isSpace x
grab = asker . takeWhile (/= '\"') . LBS.unpack
asker o@(splitFileName -> ("lib/",x)) =
case lookup x libraries of
Nothing -> errorIO $ "Template library, unknown library: " ++ o
Just act -> act
asker "shake.js" = readDataFileHTML "shake.js"
asker "data/metadata.js" = do
time <- getCurrentTime
pure $ LBS.pack $
"var version = " ++ show shakeVersionString ++
"\nvar generated = " ++ show (formatTime defaultTimeLocale (iso8601DateFormat (Just "%H:%M:%S")) time)
asker x = ask x
-- Perform a mapM on each line and put the result back together again
lbsMapLinesIO :: (LBS.ByteString -> IO LBS.ByteString) -> LBS.ByteString -> IO LBS.ByteString
-- If we do the obvious @fmap LBS.unlines . mapM f@ then all the monadic actions are run on all the lines
-- before it starts producing the lazy result, killing streaming and having more stack usage.
-- The real solution (albeit with too many dependencies for something small) is a streaming library,
-- but a little bit of unsafePerformIO does the trick too.
lbsMapLinesIO f = pure . LBS.unlines . map (unsafePerformIO . f) . LBS.lines
---------------------------------------------------------------------
-- COMPATIBILITY
-- available in bytestring-0.10.8.0, GHC 8.0 and above
-- alternative implementation below
lbsStripPrefix :: LBS.ByteString -> LBS.ByteString -> Maybe LBS.ByteString
lbsStripPrefix prefix text = if a == prefix then Just b else Nothing
where (a,b) = LBS.splitAt (LBS.length prefix) text
|
ndmitchell/shake
|
src/General/Template.hs
|
bsd-3-clause
| 3,850 | 0 | 16 | 775 | 772 | 424 | 348 | 45 | 5 |
-- |
-- Module : Data.ByteArray.View
-- License : BSD-style
-- Maintainer : Nicolas DI PRIMA <[email protected]>
-- Stability : stable
-- Portability : Good
--
-- a View on a given ByteArrayAccess
--
module Data.ByteArray.View
( View
, view
, takeView
, dropView
) where
import Data.ByteArray.Methods
import Data.ByteArray.Types
import Data.Memory.PtrMethods
import Data.Memory.Internal.Compat
import Foreign.Ptr (plusPtr)
import Prelude hiding (length, take, drop)
-- | a view on a given bytes
--
-- Equality test in constant time
data View bytes = View
{ viewOffset :: !Int
, viewSize :: !Int
, unView :: !bytes
}
instance ByteArrayAccess bytes => Eq (View bytes) where
(==) = constEq
instance ByteArrayAccess bytes => Ord (View bytes) where
compare v1 v2 = unsafeDoIO $
withByteArray v1 $ \ptr1 ->
withByteArray v2 $ \ptr2 -> do
ret <- memCompare ptr1 ptr2 (min (viewSize v1) (viewSize v2))
return $ case ret of
EQ | length v1 > length v2 -> GT
| length v1 < length v2 -> LT
| length v1 == length v2 -> EQ
_ -> ret
instance ByteArrayAccess bytes => Show (View bytes) where
showsPrec p v r = showsPrec p (viewUnpackChars v []) r
instance ByteArrayAccess bytes => ByteArrayAccess (View bytes) where
length = viewSize
withByteArray v f = withByteArray (unView v) $ \ptr -> f (ptr `plusPtr` (viewOffset v))
viewUnpackChars :: ByteArrayAccess bytes
=> View bytes
-> String
-> String
viewUnpackChars v xs = chunkLoop 0
where
len = length v
chunkLoop :: Int -> [Char]
chunkLoop idx
| len == idx = []
| (len - idx) > 63 =
bytesLoop idx (idx + 64) (chunkLoop (idx + 64))
| otherwise =
bytesLoop idx (len - idx) xs
bytesLoop :: Int -> Int -> [Char] -> [Char]
bytesLoop idx chunkLenM1 paramAcc =
loop (idx + chunkLenM1 - 1) paramAcc
where
loop i acc
| i == idx = (rChar i : acc)
| otherwise = loop (i - 1) (rChar i : acc)
rChar :: Int -> Char
rChar idx = toEnum $ fromIntegral $ index v idx
-- | create a view on a given bytearray
--
-- This function update the offset and the size in order to guarantee:
--
-- * offset >= 0
-- * size >= 0
-- * offset < length
-- * size =< length - offset
--
view :: ByteArrayAccess bytes
=> bytes -- ^ the byte array we put a view on
-> Int -- ^ the offset to start the byte array on
-> Int -- ^ the size of the view
-> View bytes
view b offset'' size'' = View offset size b
where
-- make sure offset is not negative
offset' :: Int
offset' = max offset'' 0
-- make sure the offset is not out of bound
offset :: Int
offset = min offset' (length b - 1)
-- make sure length is not negative
size' :: Int
size' = max size'' 0
-- make sure the length is not out of the bound
size :: Int
size = min size' (length b - offset)
-- | create a view from the given bytearray
takeView :: ByteArrayAccess bytes
=> bytes -- ^ byte aray
-> Int -- ^ size of the view
-> View bytes
takeView b size = view b 0 size
-- | create a view from the given byte array
-- starting after having dropped the fist n bytes
dropView :: ByteArrayAccess bytes
=> bytes -- ^ byte array
-> Int -- ^ the number of bytes do dropped before creating the view
-> View bytes
dropView b offset = view b offset (length b - offset)
|
NicolasDP/hs-memory
|
Data/ByteArray/View.hs
|
bsd-3-clause
| 3,649 | 0 | 20 | 1,140 | 967 | 507 | 460 | 83 | 1 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Control/Monad/Trans/Writer/Lazy.hs" #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE Safe #-}
{-# LANGUAGE AutoDeriveTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.Trans.Writer.Lazy
-- Copyright : (c) Andy Gill 2001,
-- (c) Oregon Graduate Institute of Science and Technology, 2001
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- The lazy 'WriterT' monad transformer, which adds collection of
-- outputs (such as a count or string output) to a given monad.
--
-- This monad transformer provides only limited access to the output
-- during the computation. For more general access, use
-- "Control.Monad.Trans.State" instead.
--
-- This version builds its output lazily; for a strict version with
-- the same interface, see "Control.Monad.Trans.Writer.Strict".
-----------------------------------------------------------------------------
module Control.Monad.Trans.Writer.Lazy (
-- * The Writer monad
Writer,
writer,
runWriter,
execWriter,
mapWriter,
-- * The WriterT monad transformer
WriterT(..),
execWriterT,
mapWriterT,
-- * Writer operations
tell,
listen,
listens,
pass,
censor,
-- * Lifting other operations
liftCallCC,
liftCatch,
) where
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Data.Functor.Classes
import Data.Functor.Identity
import Control.Applicative
import Control.Monad
import qualified Control.Monad.Fail as Fail
import Control.Monad.Fix
import Control.Monad.Signatures
import Control.Monad.Zip (MonadZip(mzipWith))
import Data.Foldable
import Data.Monoid
import Data.Traversable (Traversable(traverse))
import Prelude hiding (null, length)
-- ---------------------------------------------------------------------------
-- | A writer monad parameterized by the type @w@ of output to accumulate.
--
-- The 'return' function produces the output 'mempty', while @>>=@
-- combines the outputs of the subcomputations using 'mappend'.
type Writer w = WriterT w Identity
-- | Construct a writer computation from a (result, output) pair.
-- (The inverse of 'runWriter'.)
writer :: (Monad m) => (a, w) -> WriterT w m a
writer = WriterT . return
{-# INLINE writer #-}
-- | Unwrap a writer computation as a (result, output) pair.
-- (The inverse of 'writer'.)
runWriter :: Writer w a -> (a, w)
runWriter = runIdentity . runWriterT
{-# INLINE runWriter #-}
-- | Extract the output from a writer computation.
--
-- * @'execWriter' m = 'snd' ('runWriter' m)@
execWriter :: Writer w a -> w
execWriter m = snd (runWriter m)
{-# INLINE execWriter #-}
-- | Map both the return value and output of a computation using
-- the given function.
--
-- * @'runWriter' ('mapWriter' f m) = f ('runWriter' m)@
mapWriter :: ((a, w) -> (b, w')) -> Writer w a -> Writer w' b
mapWriter f = mapWriterT (Identity . f . runIdentity)
{-# INLINE mapWriter #-}
-- ---------------------------------------------------------------------------
-- | A writer monad parameterized by:
--
-- * @w@ - the output to accumulate.
--
-- * @m@ - The inner monad.
--
-- The 'return' function produces the output 'mempty', while @>>=@
-- combines the outputs of the subcomputations using 'mappend'.
newtype WriterT w m a = WriterT { runWriterT :: m (a, w) }
instance (Eq w, Eq1 m) => Eq1 (WriterT w m) where
liftEq eq (WriterT m1) (WriterT m2) = liftEq (liftEq2 eq (==)) m1 m2
{-# INLINE liftEq #-}
instance (Ord w, Ord1 m) => Ord1 (WriterT w m) where
liftCompare comp (WriterT m1) (WriterT m2) =
liftCompare (liftCompare2 comp compare) m1 m2
{-# INLINE liftCompare #-}
instance (Read w, Read1 m) => Read1 (WriterT w m) where
liftReadsPrec rp rl = readsData $
readsUnaryWith (liftReadsPrec rp' rl') "WriterT" WriterT
where
rp' = liftReadsPrec2 rp rl readsPrec readList
rl' = liftReadList2 rp rl readsPrec readList
instance (Show w, Show1 m) => Show1 (WriterT w m) where
liftShowsPrec sp sl d (WriterT m) =
showsUnaryWith (liftShowsPrec sp' sl') "WriterT" d m
where
sp' = liftShowsPrec2 sp sl showsPrec showList
sl' = liftShowList2 sp sl showsPrec showList
instance (Eq w, Eq1 m, Eq a) => Eq (WriterT w m a) where (==) = eq1
instance (Ord w, Ord1 m, Ord a) => Ord (WriterT w m a) where compare = compare1
instance (Read w, Read1 m, Read a) => Read (WriterT w m a) where
readsPrec = readsPrec1
instance (Show w, Show1 m, Show a) => Show (WriterT w m a) where
showsPrec = showsPrec1
-- | Extract the output from a writer computation.
--
-- * @'execWriterT' m = 'liftM' 'snd' ('runWriterT' m)@
execWriterT :: (Monad m) => WriterT w m a -> m w
execWriterT m = do
~(_, w) <- runWriterT m
return w
{-# INLINE execWriterT #-}
-- | Map both the return value and output of a computation using
-- the given function.
--
-- * @'runWriterT' ('mapWriterT' f m) = f ('runWriterT' m)@
mapWriterT :: (m (a, w) -> n (b, w')) -> WriterT w m a -> WriterT w' n b
mapWriterT f m = WriterT $ f (runWriterT m)
{-# INLINE mapWriterT #-}
instance (Functor m) => Functor (WriterT w m) where
fmap f = mapWriterT $ fmap $ \ ~(a, w) -> (f a, w)
{-# INLINE fmap #-}
instance (Foldable f) => Foldable (WriterT w f) where
foldMap f = foldMap (f . fst) . runWriterT
{-# INLINE foldMap #-}
null (WriterT t) = null t
length (WriterT t) = length t
instance (Traversable f) => Traversable (WriterT w f) where
traverse f = fmap WriterT . traverse f' . runWriterT where
f' (a, b) = fmap (\ c -> (c, b)) (f a)
{-# INLINE traverse #-}
instance (Monoid w, Applicative m) => Applicative (WriterT w m) where
pure a = WriterT $ pure (a, mempty)
{-# INLINE pure #-}
f <*> v = WriterT $ liftA2 k (runWriterT f) (runWriterT v)
where k ~(a, w) ~(b, w') = (a b, w `mappend` w')
{-# INLINE (<*>) #-}
instance (Monoid w, Alternative m) => Alternative (WriterT w m) where
empty = WriterT empty
{-# INLINE empty #-}
m <|> n = WriterT $ runWriterT m <|> runWriterT n
{-# INLINE (<|>) #-}
instance (Monoid w, Monad m) => Monad (WriterT w m) where
m >>= k = WriterT $ do
~(a, w) <- runWriterT m
~(b, w') <- runWriterT (k a)
return (b, w `mappend` w')
{-# INLINE (>>=) #-}
fail msg = WriterT $ fail msg
{-# INLINE fail #-}
instance (Monoid w, Fail.MonadFail m) => Fail.MonadFail (WriterT w m) where
fail msg = WriterT $ Fail.fail msg
{-# INLINE fail #-}
instance (Monoid w, MonadPlus m) => MonadPlus (WriterT w m) where
mzero = WriterT mzero
{-# INLINE mzero #-}
m `mplus` n = WriterT $ runWriterT m `mplus` runWriterT n
{-# INLINE mplus #-}
instance (Monoid w, MonadFix m) => MonadFix (WriterT w m) where
mfix m = WriterT $ mfix $ \ ~(a, _) -> runWriterT (m a)
{-# INLINE mfix #-}
instance (Monoid w) => MonadTrans (WriterT w) where
lift m = WriterT $ do
a <- m
return (a, mempty)
{-# INLINE lift #-}
instance (Monoid w, MonadIO m) => MonadIO (WriterT w m) where
liftIO = lift . liftIO
{-# INLINE liftIO #-}
instance (Monoid w, MonadZip m) => MonadZip (WriterT w m) where
mzipWith f (WriterT x) (WriterT y) = WriterT $
mzipWith (\ ~(a, w) ~(b, w') -> (f a b, w `mappend` w')) x y
{-# INLINE mzipWith #-}
-- | @'tell' w@ is an action that produces the output @w@.
tell :: (Monad m) => w -> WriterT w m ()
tell w = writer ((), w)
{-# INLINE tell #-}
-- | @'listen' m@ is an action that executes the action @m@ and adds its
-- output to the value of the computation.
--
-- * @'runWriterT' ('listen' m) = 'liftM' (\\ (a, w) -> ((a, w), w)) ('runWriterT' m)@
listen :: (Monad m) => WriterT w m a -> WriterT w m (a, w)
listen m = WriterT $ do
~(a, w) <- runWriterT m
return ((a, w), w)
{-# INLINE listen #-}
-- | @'listens' f m@ is an action that executes the action @m@ and adds
-- the result of applying @f@ to the output to the value of the computation.
--
-- * @'listens' f m = 'liftM' (id *** f) ('listen' m)@
--
-- * @'runWriterT' ('listens' f m) = 'liftM' (\\ (a, w) -> ((a, f w), w)) ('runWriterT' m)@
listens :: (Monad m) => (w -> b) -> WriterT w m a -> WriterT w m (a, b)
listens f m = WriterT $ do
~(a, w) <- runWriterT m
return ((a, f w), w)
{-# INLINE listens #-}
-- | @'pass' m@ is an action that executes the action @m@, which returns
-- a value and a function, and returns the value, applying the function
-- to the output.
--
-- * @'runWriterT' ('pass' m) = 'liftM' (\\ ((a, f), w) -> (a, f w)) ('runWriterT' m)@
pass :: (Monad m) => WriterT w m (a, w -> w) -> WriterT w m a
pass m = WriterT $ do
~((a, f), w) <- runWriterT m
return (a, f w)
{-# INLINE pass #-}
-- | @'censor' f m@ is an action that executes the action @m@ and
-- applies the function @f@ to its output, leaving the return value
-- unchanged.
--
-- * @'censor' f m = 'pass' ('liftM' (\\ x -> (x,f)) m)@
--
-- * @'runWriterT' ('censor' f m) = 'liftM' (\\ (a, w) -> (a, f w)) ('runWriterT' m)@
censor :: (Monad m) => (w -> w) -> WriterT w m a -> WriterT w m a
censor f m = WriterT $ do
~(a, w) <- runWriterT m
return (a, f w)
{-# INLINE censor #-}
-- | Lift a @callCC@ operation to the new monad.
liftCallCC :: (Monoid w) => CallCC m (a,w) (b,w) -> CallCC (WriterT w m) a b
liftCallCC callCC f = WriterT $
callCC $ \ c ->
runWriterT (f (\ a -> WriterT $ c (a, mempty)))
{-# INLINE liftCallCC #-}
-- | Lift a @catchE@ operation to the new monad.
liftCatch :: Catch e m (a,w) -> Catch e (WriterT w m) a
liftCatch catchE m h =
WriterT $ runWriterT m `catchE` \ e -> runWriterT (h e)
{-# INLINE liftCatch #-}
|
phischu/fragnix
|
tests/packages/scotty/Control.Monad.Trans.Writer.Lazy.hs
|
bsd-3-clause
| 9,874 | 0 | 14 | 2,227 | 2,654 | 1,451 | 1,203 | 166 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.