code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
import qualified Data.Set as Set
import Data.List
import Primes
import Utils
fourDigitPrimes = filter (\n -> n>999 && n<9999) $ take 2000 primeTable
primePermutations n = Set.toList $ Set.fromList [ perm | perm <- generateDigitPermutations n, isPrime perm ]
diffs l = [ (n2-n1, n1, n2, n3) | i <- [0..n1], j <- [(i+1)..n1], k <- [(j+1)..n1], let n1=l !! i, let n2=l !! j, let n3=l !! k, n2-n1==n3-n2]
where n1 = (length l)-1
nSeries n = groupedDiffs
where perms = primePermutations n
ds = diffs perms
sortedDiffs = sort ds
groupedDiffs = groupBy (\(a,b,c,d)-> \(e,f,g,h) -> a==e) sortedDiffs
lengthSeries = map length groupedDiffs
answers = map nSeries fourDigitPrimes
|
arekfu/project_euler
|
p0049/p0049.hs
|
mit
| 746 | 6 | 11 | 187 | 365 | 192 | 173 | 15 | 1 |
import Control.Monad
readNumbers :: String -> [Int]
readNumbers = map read . words
partition :: (Ord a) => [a] -> [a]
partition [] = []
partition (p:xs) = [l | l <- xs, l < p] ++ [p] ++ [g | g <- xs, g > p]
main :: IO ()
main = do
n <- readLn :: IO Int
list <- getLine
let input = readNumbers list
let ans = partition input
putStrLn (unwords (map show ans))
|
mgrebenets/hackerrank
|
alg/arr-n-srt/quicksort-1-partition.hs
|
mit
| 382 | 1 | 11 | 102 | 212 | 106 | 106 | 13 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
module Tests where
import Control.Monad (void)
import Control.Monad.IO.Class
import SimpleStore
import SimpleStore.Cell.Types
import SimpleStore.Cell.Distributed
import System.Random
import Test.Tasty.HUnit
import TestTypes
cellHUnits :: [CellHUnit]
cellHUnits = [testInsertGet, testInsertGetSpaced, testInsertLotsGet]
testInsertGet :: CellHUnit
testInsertGet = CellHUnit testInsertGet' "Insert/Get"
testInsertGet' :: (UrlList urllist) => LocalStoreM '[] urllist TestCellEntry ()
testInsertGet' = do
entry <- liftIO $ makeRandomTestCellEntry
eResult <- withStore entry $ \_lifter store -> do
entry' <- getLocalStore store
liftIO $ entry @=? entry'
either (liftIO . assertFailure) (const $ return ()) eResult
testInsertGetSpaced :: CellHUnit
testInsertGetSpaced = CellHUnit testInsertGetSpaced' "Insert/Get Spaced"
testInsertGetSpaced' :: (UrlList urllist) => LocalStoreM '[] urllist TestCellEntry ()
testInsertGetSpaced' = do
entry <- liftIO $ makeRandomTestCellEntry
void $ withStore entry (const $ const $ return ())
eResult <- withStore (unvalueTestCellEntry entry) $ \_lifter store -> do
entry' <- getLocalStore store
liftIO $ entry @=? entry'
either (liftIO . assertFailure) (const $ return ()) eResult
testInsertLotsGet :: CellHUnit
testInsertLotsGet = CellHUnit testInsertLotsGet' "Insert Lots/Get"
testInsertLotsGet' :: (UrlList urllist) => LocalStoreM '[] urllist TestCellEntry ()
testInsertLotsGet' = do
entries <- liftIO $ mapM (const makeRandomTestCellEntry) [1..100]
entryIdx <- liftIO $ getStdRandom $ randomR (0, length entries - 1)
let entry = entries !! entryIdx
void $ withStores entries (const $ const $ return ())
eResult <- withStore (unvalueTestCellEntry $ entry) $ \_lifter store -> do
entry' <- getLocalStore store
liftIO $ entry @=? entry'
either (liftIO . assertFailure) (const $ return ()) eResult
unvalueTestCellEntry :: TestCellEntry -> TestCellEntry
unvalueTestCellEntry (TestCellEntry (k,_)) = TestCellEntry (k,0)
makeRandomTestCellEntry = do
k <- getStdRandom $ random
if k == 0
then makeRandomTestCellEntry
else do
v <- getStdRandom $ random
return $ TestCellEntry (k, v)
|
plow-technologies/distributed-simple-cell
|
tests/Tests.hs
|
mit
| 2,411 | 0 | 13 | 395 | 697 | 358 | 339 | 58 | 2 |
module Main where
import Format.RGB565 (toHex, toNHex, toRGB565, toRGB565Hex)
import Format.Converter (pictureToRaw, toMaybeFormat, Format())
import Test.Hspec
import System.Directory (getCurrentDirectory, doesDirectoryExist,
createDirectory, getDirectoryContents,
removeDirectoryRecursive)
import System.FilePath.Posix ((</>))
import Data.Maybe (fromJust)
import Data.List (foldl')
main :: IO ()
main = do
dir <- getCurrentDirectory
exist <- doesDirectoryExist "tmp"
if exist
then removeDirectoryRecursive "tmp"
else return ()
hspec $ do
describe "UTFTConverter Library" $ do
describe "Format.RGB565" $ do
describe "toHex" $ do
it "toHex should be 0" $ do
toHex 0 `shouldBe` "0"
it "toHex should be C" $ do
toHex 12 `shouldBe` "C"
it "toHex should be C6" $ do
toHex 198 `shouldBe` "C6"
it "toHex should be 7A" $ do
toHex 122 `shouldBe` "7A"
it "toHex should be FF" $ do
toHex 255 `shouldBe` "FF"
it "toHex should be 10000" $ do
toHex 65536 `shouldBe` "10000"
describe "toNHex" $ do
it "toNHex 4 0 should be 0000" $ do
toNHex 4 0 `shouldBe` "0000"
it "toNHex 4 12 should be 000C" $ do
toNHex 4 12 `shouldBe` "000C"
it "toNHex 4 198 should be 00C6" $ do
toNHex 4 198 `shouldBe` "00C6"
it "toNHex 4 122 should be 007A" $ do
toNHex 4 122 `shouldBe` "007A"
it "toNHex 4 255 should be 00FF" $ do
toNHex 4 255 `shouldBe` "00FF"
it "toNHex 4 65535 should be FFFF" $ do
toNHex 4 65535 `shouldBe` "FFFF"
describe "toRGB565" $ do
it "toRGB565 (255, 255, 255) should be 65535" $ do
toRGB565 (255, 255, 255) `shouldBe` 65535
it "toRGB565 (123, 123, 123) should be 31695" $ do
toRGB565 (123, 123, 123) `shouldBe` 31695
it "toRGB565 (90, 255, 0) should be 24544" $ do
toRGB565 ( 90, 255, 0) `shouldBe` 24544
it "toRGB565 (0, 0, 0) should be 0" $ do
toRGB565 ( 0, 0, 0) `shouldBe` 0
describe "toRGB565Hex" $ do
it "toRGB565Hex (255, 255, 255) should be FFFF" $ do
toRGB565Hex (255, 255, 255) `shouldBe` "FFFF"
it "toRGB565Hex (123, 123, 123) should be 7BCF" $ do
toRGB565Hex (123, 123, 123) `shouldBe` "7BCF"
it "toRGB565Hex (90, 255, 0) should be 5FE0" $ do
toRGB565Hex ( 90, 255, 0) `shouldBe` "5FE0"
it "toRGB565Hex (0, 0, 0) should be 0" $ do
toRGB565Hex ( 0, 0, 0) `shouldBe` "0000"
describe "Format.Converter" $ do
it "tests/examples/cat_01 exists" $ do
exists <- doesDirectoryExist (dir </> "tests" </> "examples" </> "cat_01")
exists `shouldBe` True
it "tests/examples/cat_02 exists" $ do
exists <- doesDirectoryExist (dir </> "tests" </> "examples" </> "cat_02")
exists `shouldBe` True
it "tests/examples/cat_03 exists" $ do
exists <- doesDirectoryExist (dir </> "tests" </> "examples" </> "cat_03")
exists `shouldBe` True
it "15 example files should be in tests/examples/cat_0?" $ do
dir1 <- getExamplePicsPath dir ("tests" </> "examples" </> "cat_01")
dir2 <- getExamplePicsPath dir ("tests" </> "examples" </> "cat_02")
dir3 <- getExamplePicsPath dir ("tests" </> "examples" </> "cat_03")
length (dir1 ++ dir2 ++ dir3) `shouldBe` 15
it "cat_01 pics should be all converted to almost the same .raw-files (>99% similarity)" $ do
createDirectory "tmp"
pics <- getExampleFormatPicsPath dir ("tests" </> "examples" </> "cat_01")
mapM_ (pictureToRaw (dir </> "tmp")) pics
rawfps <- getExamplePicsPath dir "tmp"
[p1, p2, p3, p4, p5] <- mapM readFile rawfps
let r1 = picSimilarity p1 p2
r2 = picSimilarity p1 p3
r3 = picSimilarity p1 p4
r4 = picSimilarity p1 p5
putStrLn $ "Pic similarity for p1 ~ p2: " ++ show r1
putStrLn $ "Pic similarity for p1 ~ p3: " ++ show r2
putStrLn $ "Pic similarity for p1 ~ p4: " ++ show r3
putStrLn $ "Pic similarity for p1 ~ p5: " ++ show r4
removeDirectoryRecursive (dir </> "tmp")
((r1 + r2 + r3 + r4) / 4) > 98 `shouldBe` True
it "cat_02 pics should be all converted to almost the same .raw-files (>99% similarity)" $ do
createDirectory "tmp"
pics <- getExampleFormatPicsPath dir ("tests" </> "examples" </> "cat_02")
mapM_ (pictureToRaw (dir </> "tmp")) pics
rawfps <- getExamplePicsPath dir "tmp"
[p1, p2, p3, p4, p5] <- mapM readFile rawfps
let r1 = picSimilarity p1 p2
r2 = picSimilarity p1 p3
r3 = picSimilarity p1 p4
r4 = picSimilarity p1 p5
putStrLn $ "Pic similarity for p1 ~ p2: " ++ show r1
putStrLn $ "Pic similarity for p1 ~ p3: " ++ show r2
putStrLn $ "Pic similarity for p1 ~ p4: " ++ show r3
putStrLn $ "Pic similarity for p1 ~ p5: " ++ show r4
removeDirectoryRecursive (dir </> "tmp")
((r1 + r2 + r3 + r4) / 4) > 98 `shouldBe` True
it "cat_03 pics should be all converted to almost the same .raw-files (>99% similarity)" $ do
createDirectory "tmp"
pics <- getExampleFormatPicsPath dir ("tests" </> "examples" </> "cat_03")
mapM_ (pictureToRaw (dir </> "tmp")) pics
rawfps <- getExamplePicsPath dir "tmp"
[p1, p2, p3, p4, p5] <- mapM readFile rawfps
let r1 = picSimilarity p1 p2
r2 = picSimilarity p1 p3
r3 = picSimilarity p1 p4
r4 = picSimilarity p1 p5
putStrLn $ "Pic similarity for p1 ~ p2: " ++ show r1
putStrLn $ "Pic similarity for p1 ~ p3: " ++ show r2
putStrLn $ "Pic similarity for p1 ~ p4: " ++ show r3
putStrLn $ "Pic similarity for p1 ~ p5: " ++ show r4
removeDirectoryRecursive (dir </> "tmp")
((r1 + r2 + r3 + r4) / 4) > 98 `shouldBe` True
getExamplePicsPath :: FilePath -> FilePath -> IO [FilePath]
getExamplePicsPath curdir picdir = do
content <- getDirectoryContents (curdir </> picdir)
let picnames = filter (`notElem` [".", ".."]) content
return $ map (\x -> curdir </> picdir </> x) picnames
getExampleFormatPicsPath :: FilePath -> FilePath -> IO [(Format, FilePath)]
getExampleFormatPicsPath curdir picdir = do
content <- getDirectoryContents (curdir </> picdir)
let picnames = filter (`notElem` [".", ".."]) content
picpaths = map (\x -> curdir </> picdir </> x) picnames
flip zip picpaths . map fromJust <$> mapM toMaybeFormat picpaths
picSimilarity :: String -> String -> Double
picSimilarity p1 p2 =
let f1 = words p1
f2 = words p2
(fcount, count) = foldl' (\(fc, c) (a, b) -> if a == b
then (fc , c + 1)
else (fc + 1, c + 1)) (0,0) (zip f1 f2)
in (100 - (fcount / count))
|
cirquit/UTFTConverter
|
tests/Tests.hs
|
mit
| 7,459 | 0 | 26 | 2,531 | 2,107 | 1,038 | 1,069 | 146 | 2 |
module Colors where
import Graphics.UI.GLUT
type ColorBG = Color4 GLfloat
type ColorFG = Color3 GLfloat
blackBG :: ColorBG
blackBG = Color4 0 0 0 1
white :: ColorFG
white = Color3 0.9 0.9 0.9
blue :: ColorFG
blue = Color3 0 0 0.9
red :: ColorFG
red = Color3 0.9 0 0
green :: ColorFG
green = Color3 0 0.9 0
cyan :: ColorFG
cyan = Color3 0 0.9 0.9
magenta :: ColorFG
magenta = Color3 0 0.9 0.9
yellow :: ColorFG
yellow = Color3 0.9 0.9 0
|
mrlovre/LMTetrys
|
src/Colors.hs
|
gpl-2.0
| 456 | 0 | 5 | 108 | 166 | 90 | 76 | 20 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Output.InfiniteLoop where
import Sprockell.System
prog:: [Instruction]
prog = [Const 0 RegA
,Store RegA (Addr 0) -- b
,Compute Add PC Zero RegA
,Push RegA -- push line 7 as start of while
,Const 1 RegA -- RegA <-- 1
,Const 1 RegB -- RegB <-- 1
,Compute Xor RegA RegB RegA -- RegA <-- 0
,Const 8 RegB
,Compute Add RegB PC RegB -- compute instruction after while
,Branch RegA (Ind RegB) -- jump to after while if RegA /= 0 (never happens)
,Load (Addr 0) RegA -- RegA <-- b
,Const 1 RegB -- RegB <-- 1
,Compute Add RegA RegB RegA -- RegA <-- b+1
,Store RegA (Addr 0) -- b = b+1
,Pop RegA -- pop start of while
,Jump (Ind RegA) -- jump to star of while
,Pop RegA -- cleanup: pop start of while
,EndProg]
main = run 1 prog >> putChar '\n'
|
Ertruby/PPFinalProject
|
src/Examples/InfiniteLoop.hs
|
gpl-2.0
| 1,103 | 0 | 8 | 482 | 233 | 127 | 106 | 23 | 1 |
{-# LANGUAGE RankNTypes #-}
{- |
Module : $Header$
Description : Central datastructures for development graphs
Copyright : (c) Till Mossakowski, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable(Logic)
Central datastructures for development graphs
Follows Sect. IV:4.2 of the CASL Reference Manual.
We also provide functions for constructing and modifying development graphs.
However note that these changes need to be propagated to the GUI if they
also shall be visible in the displayed development graph.
-}
{-
References:
T. Mossakowski, S. Autexier and D. Hutter:
Extending Development Graphs With Hiding.
H. Hussmann (ed.): Fundamental Approaches to Software Engineering 2001,
Lecture Notes in Computer Science 2029, p. 269-283,
Springer-Verlag 2001.
T. Mossakowski, S. Autexier, D. Hutter, P. Hoffman:
CASL Proof calculus. In: CASL reference manual, part IV.
Available from http://www.cofi.info
-}
module Static.DevGraph where
import Syntax.AS_Structured
import Syntax.AS_Library
import Static.GTheory
import Static.DgUtils
import qualified Static.XGraph as XGraph
import Logic.Logic
import Logic.ExtSign
import Logic.Comorphism
import Logic.Grothendieck
import Logic.Prover
import qualified Common.Lib.Rel as Rel
import qualified Common.Lib.Graph as Tree
import qualified Common.Lib.MapSet as MapSet
import qualified Common.Lib.SizedList as SizedList
import qualified Common.OrderedMap as OMap
import Common.AS_Annotation
import Common.GlobalAnnotations
import Common.Id
import Common.IRI
import Common.LibName
import Common.Consistency
import Control.Concurrent.MVar
import Data.Graph.Inductive.Basic
import Data.Graph.Inductive.Graph as Graph
import Data.Graph.Inductive.Query.DFS
import Data.List
import Data.Maybe
import Data.Ord
import qualified Data.Map as Map
import qualified Data.Set as Set
import Common.Result
-- * types for structured specification analysis
-- ** basic types
-- | Node with signature in a DG
data NodeSig = NodeSig { getNode :: Node, getSig :: G_sign }
deriving (Eq, Show)
{- | NodeSig or possibly the empty sig in a logic
(but since we want to avoid lots of vsacuous nodes with empty sig,
we do not assign a real node in the DG here) -}
data MaybeNode = JustNode NodeSig | EmptyNode AnyLogic deriving (Show, Eq)
-- | a wrapper for renamings with a trivial Ord instance
newtype Renamed = Renamed RENAMING deriving Show
instance Ord Renamed where
compare _ _ = EQ
instance Eq Renamed where
_ == _ = True
-- | a wrapper for restrictions with a trivial Ord instance
data MaybeRestricted = NoRestriction | Restricted RESTRICTION deriving Show
instance Ord MaybeRestricted where
compare _ _ = EQ
instance Eq MaybeRestricted where
_ == _ = True
{- | Data type indicating the origin of nodes and edges in the input language
This is not used in the DG calculus, only may be used in the future
for reconstruction of input and management of change. -}
data DGOrigin =
DGEmpty
| DGBasic
| DGBasicSpec (Maybe G_basic_spec) G_sign (Set.Set G_symbol)
| DGExtension
| DGLogicCoercion
| DGTranslation Renamed
| DGUnion
| DGRestriction (MaybeRestricted) (Set.Set G_symbol)
| DGRevealTranslation
| DGFreeOrCofree FreeOrCofree
| DGLocal
| DGClosed
| DGLogicQual
| DGData
| DGFormalParams
| DGImports
| DGInst IRI
| DGFitSpec
| DGFitView IRI
| DGProof
| DGNormalForm Node
| DGintegratedSCC
| DGFlattening
deriving (Show, Eq, Ord)
-- | node content or reference to another library's node
data DGNodeInfo = DGNode
{ node_origin :: DGOrigin -- origin in input language
, node_cons_status :: ConsStatus } -- like a link from the empty signature
| DGRef -- reference to node in a different DG
{ ref_libname :: LibName -- pointer to DG where ref'd node resides
, ref_node :: Node -- pointer to ref'd node
} deriving (Show, Eq)
{- | node inscriptions in development graphs.
Nothing entries indicate "not computed yet" -}
data DGNodeLab =
DGNodeLab
{ dgn_name :: NodeName -- name in the input language
, dgn_theory :: G_theory -- local theory
, globalTheory :: Maybe G_theory -- global theory
, labelHasHiding :: Bool -- has this node an ingoing hiding link
, labelHasFree :: Bool -- has incoming free definition link
, dgn_nf :: Maybe Node -- normal form, for Theorem-Hide-Shift
, dgn_sigma :: Maybe GMorphism -- inclusion of signature into nf signature
, dgn_freenf :: Maybe Node -- normal form for freeness
, dgn_phi :: Maybe GMorphism -- morphism from signature to nffree signature
, nodeInfo :: DGNodeInfo
, nodeMod :: NodeMod
, xnode :: Maybe XGraph.XNode
, dgn_lock :: Maybe (MVar ())
, dgn_symbolpathlist :: G_symbolmap [SLinkPath]
}
instance Show DGNodeLab where
show _ = "<a DG node label>"
isDGRef :: DGNodeLab -> Bool
isDGRef l = case nodeInfo l of
DGNode {} -> False
DGRef {} -> True
sensWithKind :: (forall a . SenStatus a (AnyComorphism, BasicProof) -> Bool)
-> G_theory -> [String]
sensWithKind f (G_theory _lid _sigma _ sens _) = Map.keys $ OMap.filter f sens
hasSenKind :: (forall a . SenStatus a (AnyComorphism, BasicProof) -> Bool)
-> DGNodeLab -> Bool
hasSenKind f = not . null . sensWithKind f . dgn_theory
-- | test if a given node label has local open goals
hasOpenGoals :: DGNodeLab -> Bool
hasOpenGoals = hasSenKind (\ s -> not (isAxiom s) && not (isProvenSenStatus s))
-- | check if the node has an internal name
isInternalNode :: DGNodeLab -> Bool
isInternalNode DGNodeLab {dgn_name = n} = isInternal n
getNodeConsStatus :: DGNodeLab -> ConsStatus
getNodeConsStatus lbl = case nodeInfo lbl of
DGRef {} -> mkConsStatus None
DGNode { node_cons_status = c } -> c
getNodeCons :: DGNodeLab -> Conservativity
getNodeCons = getConsOfStatus . getNodeConsStatus
-- | returns the Conservativity if the given node has one, otherwise none
getNodeConservativity :: LNode DGNodeLab -> Conservativity
getNodeConservativity = getNodeCons . snd
{- | test if a node conservativity is open,
return input for refs or nodes with normal forms -}
hasOpenNodeConsStatus :: Bool -> DGNodeLab -> Bool
hasOpenNodeConsStatus b lbl = if isJust $ dgn_nf lbl then b else
hasOpenConsStatus b $ getNodeConsStatus lbl
markNodeConsistency :: Conservativity -> String -> DGNodeLab -> DGNodeLab
markNodeConsistency newc str dgnode = dgnode
{ nodeInfo = case nodeInfo dgnode of
ninfo@DGNode { node_cons_status = ConsStatus c pc thm } ->
if pc == newc && isProvenThmLinkStatus thm then ninfo else
ninfo { node_cons_status = ConsStatus c newc
$ Proven (DGRule $ showConsistency newc ++ str)
emptyProofBasis }
ninfo -> ninfo }
markNodeConsistent :: String -> DGNodeLab -> DGNodeLab
markNodeConsistent = markNodeConsistency Cons
markNodeInconsistent :: String -> DGNodeLab -> DGNodeLab
markNodeInconsistent = markNodeConsistency Inconsistent
-- | creates a DGNodeType from a DGNodeLab
getRealDGNodeType :: DGNodeLab -> DGNodeType
getRealDGNodeType dgnlab = DGNodeType
{ isRefType = isDGRef dgnlab
, isProvenNode = not $ hasOpenGoals dgnlab
, isProvenCons = not $ hasOpenNodeConsStatus False dgnlab
, isInternalSpec = isInternalNode dgnlab }
-- | a wrapper for fitting morphisms with a trivial Eq instance
newtype Fitted = Fitted [G_mapping] deriving Show
instance Eq Fitted where
_ == _ = True
data DGLinkOrigin =
SeeTarget
| SeeSource
| TEST
| DGImpliesLink
| DGLinkExtension
| DGLinkTranslation
| DGLinkClosedLenv
| DGLinkImports
| DGLinkMorph IRI
| DGLinkInst IRI Fitted
| DGLinkInstArg IRI
| DGLinkView IRI Fitted
| DGLinkFitView IRI
| DGLinkFitViewImp IRI
| DGLinkProof
| DGLinkFlatteningUnion
| DGLinkFlatteningRename
| DGLinkRefinement IRI
deriving (Show, Eq)
{- | Link types of development graphs,
Sect. IV:4.2 of the CASL Reference Manual explains them in depth. -}
data DGLinkType =
ScopedLink Scope LinkKind ConsStatus
| HidingDefLink
| FreeOrCofreeDefLink FreeOrCofree MaybeNode -- the "parameter" node
| HidingFreeOrCofreeThm (Maybe FreeOrCofree) Node GMorphism ThmLinkStatus
{- DGLink S1 S2 m2 (DGLinkType m1 p) n
corresponds to a span of morphisms
S1 <--m1-- S --m2--> S2 -}
deriving (Show, Eq)
-- | extract theorem link status from link type
thmLinkStatus :: DGLinkType -> Maybe ThmLinkStatus
thmLinkStatus t = case t of
ScopedLink _ (ThmLink s) _ -> Just s
HidingFreeOrCofreeThm _ _ _ s -> Just s
_ -> Nothing
-- | extract proof basis from link type
thmProofBasis :: DGLinkType -> ProofBasis
thmProofBasis = maybe emptyProofBasis proofBasisOfThmLinkStatus . thmLinkStatus
updThmProofBasis :: DGLinkType -> ProofBasis -> DGLinkType
updThmProofBasis t pB = case t of
ScopedLink sc (ThmLink s) cs -> ScopedLink sc
(ThmLink $ updProofBasisOfThmLinkStatus s pB) cs
HidingFreeOrCofreeThm h n m s -> HidingFreeOrCofreeThm h n m
$ updProofBasisOfThmLinkStatus s pB
_ -> t
-- | link inscriptions in development graphs
data DGLinkLab = DGLink
{ dgl_morphism :: GMorphism -- signature morphism of link
, dgl_type :: DGLinkType -- type: local, global, def, thm?
, dgl_origin :: DGLinkOrigin -- origin in input language
, dglPending :: Bool -- open proofs of edges in proof basis
, dgl_id :: EdgeId -- id of the edge
, dglName :: String -- name of the edge
}
instance Show DGLinkLab where
show _ = "<a DG link label>"
mkDGLink :: GMorphism -> DGLinkType -> DGLinkOrigin -> String -> EdgeId
-> DGLinkLab
mkDGLink mor ty orig nn ei = DGLink
{ dgl_morphism = mor
, dgl_type = ty
, dgl_origin = orig
, dglPending = False
, dgl_id = ei
, dglName = nn }
-- | name a link
nameDGLink :: String -> DGLinkLab -> DGLinkLab
nameDGLink nn l = l { dglName = nn }
defDGLink :: GMorphism -> DGLinkType -> DGLinkOrigin -> DGLinkLab
{- See svn-version 13804 for a naming concept which unfortunately introduced
same names for different links. -}
defDGLink m ty orig = mkDGLink m ty orig "" defaultEdgeId
defDGLinkId :: GMorphism -> DGLinkType -> DGLinkOrigin -> EdgeId -> DGLinkLab
defDGLinkId m ty orig ei = (defDGLink m ty orig) { dgl_id = ei }
globDefLink :: GMorphism -> DGLinkOrigin -> DGLinkLab
globDefLink m = defDGLink m globalDef
-- | describe the link type of the label
getDGLinkType :: DGLinkLab -> String
getDGLinkType = getDGEdgeTypeName . getRealDGLinkType
getHomEdgeType :: Bool -> Bool -> DGLinkType -> DGEdgeTypeModInc
getHomEdgeType isPend isHom lt = case lt of
ScopedLink scope lk cons -> case lk of
DefLink -> case scope of
Local -> LocalDef
Global -> if isHom then GlobalDef else HetDef
ThmLink st -> ThmType
{ thmEdgeType = GlobalOrLocalThm scope isHom
, isProvenEdge = isProvenThmLinkStatus st
, isConservativ = isProvenConsStatusLink cons
, isPending = isPend } -- needs to be checked
HidingDefLink -> HidingDef
FreeOrCofreeDefLink fc _ -> FreeOrCofreeDef fc
HidingFreeOrCofreeThm mh _ _ st -> ThmType
{ thmEdgeType = case mh of
Nothing -> HidingThm
Just fc -> FreeOrCofreeThm fc
, isProvenEdge = isProvenThmLinkStatus st
, isConservativ = True
, isPending = isPend }
-- | creates a DGEdgeType from a DGLinkLab
getRealDGLinkType :: DGLinkLab -> DGEdgeType
getRealDGLinkType lnk = let
gmor = dgl_morphism lnk
in DGEdgeType
{ edgeTypeModInc = getHomEdgeType (dglPending lnk) (isHomogeneous gmor)
$ dgl_type lnk
, isInc = case gmor of
GMorphism cid _ _ mor _ -> isInclusionComorphism cid && isInclusion mor
}
-- | return the proof basis of the given linklab
getProofBasis :: DGLinkLab -> ProofBasis
getProofBasis = thmProofBasis . dgl_type
-- | set proof for theorem links
setProof :: ThmLinkStatus -> DGLinkType -> DGLinkType
setProof p lt = case lt of
ScopedLink sc (ThmLink _) cs -> ScopedLink sc (ThmLink p) cs
HidingFreeOrCofreeThm hm n mor _ -> HidingFreeOrCofreeThm hm n mor p
_ -> lt
-- * methods to check the type of an edge
isProven :: DGLinkType -> Bool
isProven edge = case edge of
ScopedLink _ DefLink _ -> True
_ -> case thmLinkStatus edge of
Just (Proven _ _) -> True
_ -> False
isGlobalEdge :: DGLinkType -> Bool
isGlobalEdge edge = case edge of
ScopedLink Global _ _ -> True
_ -> False
isGlobalThm :: DGLinkType -> Bool
isGlobalThm edge = case edge of
ScopedLink Global (ThmLink _) _ -> True
_ -> False
isUnprovenGlobalThm :: DGLinkType -> Bool
isUnprovenGlobalThm lt = case lt of
ScopedLink Global (ThmLink LeftOpen) _ -> True
_ -> False
isLocalThm :: DGLinkType -> Bool
isLocalThm edge = case edge of
ScopedLink Local (ThmLink _) _ -> True
_ -> False
isUnprovenLocalThm :: DGLinkType -> Bool
isUnprovenLocalThm lt = case lt of
ScopedLink Local (ThmLink LeftOpen) _ -> True
_ -> False
isUnprovenHidingThm :: DGLinkType -> Bool
isUnprovenHidingThm lt = case lt of
HidingFreeOrCofreeThm Nothing _ _ LeftOpen -> True
_ -> False
isFreeEdge :: DGLinkType -> Bool
isFreeEdge edge = case edge of
FreeOrCofreeDefLink Free _ -> True
_ -> False
isCofreeEdge :: DGLinkType -> Bool
isCofreeEdge edge = case edge of
FreeOrCofreeDefLink Cofree _ -> True
_ -> False
-- ** types for global environments
-- | import, formal parameters and united signature of formal params
data GenSig = GenSig MaybeNode [NodeSig] MaybeNode deriving Show
-- | genericity and body
data ExtGenSig = ExtGenSig
{ genericity :: GenSig
, extGenBody :: NodeSig }
deriving Show
-- | source, morphism, parameterized target
data ExtViewSig = ExtViewSig NodeSig GMorphism ExtGenSig deriving Show
{- ** types for architectural and unit specification analysis
(as defined for basic static semantics in Chap. III:5.1) -}
data UnitSig = UnitSig [NodeSig] NodeSig (Maybe NodeSig) deriving (Show, Eq)
{- Maybe NodeSig stores the union of the parameters
the node is needed for consistency checks -}
data ImpUnitSigOrSig = ImpUnitSig MaybeNode UnitSig | Sig NodeSig
deriving (Show, Eq)
type StUnitCtx = Map.Map IRI ImpUnitSigOrSig
emptyStUnitCtx :: StUnitCtx
emptyStUnitCtx = Map.empty
{- data ArchSig = ArchSig StUnitCtx UnitSig deriving Show
this type is superseeded by RefSig -}
type RefSigMap = Map.Map IRI RefSig
type BStContext = Map.Map IRI RefSig
-- there should be only BranchRefSigs
data RefSig = BranchRefSig RTPointer (UnitSig, Maybe BranchSig)
| ComponentRefSig RTPointer RefSigMap
deriving (Eq)
instance Show RefSig where
-- made this instance for debugging purposes
show (BranchRefSig _ (usig, mbsig)) =
let bStr = case mbsig of
Nothing -> "Bottom\n "
Just bsig -> case bsig of
UnitSigAsBranchSig u ->
if u == usig then "same"
else "UnitSigAsBranch:" ++ shows u "\n "
BranchStaticContext bst ->
foldl (++) "branching: "
$ map (\ (n, s) -> shows n " mapped to\n" ++ shows s "\n")
$ Map.toList bst
in
"Branch: \n before refinement:\n " ++ show usig ++
"\n after refinement: \n" ++ bStr ++ "\n"
show (ComponentRefSig _ rsm) =
foldl (++) "CompRefSig:" $ map (\ n -> show n ++ "\n ") $
Map.toList rsm
getPointerFromRef :: RefSig -> RTPointer
getPointerFromRef (BranchRefSig p _) = p
getPointerFromRef (ComponentRefSig p _) = p
setPointerInRef :: RefSig -> RTPointer -> RefSig
setPointerInRef (BranchRefSig _ x) y = BranchRefSig y x
setPointerInRef (ComponentRefSig _ x) y = ComponentRefSig y x
setUnitSigInRef :: RefSig -> UnitSig -> RefSig
setUnitSigInRef (BranchRefSig x (_, y)) usig = BranchRefSig x (usig, y)
setUnitSigInRef _ _ = error "setUnitSigInRef"
getUnitSigFromRef :: RefSig -> Result UnitSig
getUnitSigFromRef (BranchRefSig _ (usig, _)) = return usig
getUnitSigFromRef (ComponentRefSig _ rsm) =
error $ "getUnitSigFromRef:" ++ show (Map.keys rsm)
mkRefSigFromUnit :: UnitSig -> RefSig
mkRefSigFromUnit usig = BranchRefSig RTNone
(usig, Just $ UnitSigAsBranchSig usig)
mkBotSigFromUnit :: UnitSig -> RefSig
mkBotSigFromUnit usig = BranchRefSig RTNone (usig, Nothing)
data BranchSig = UnitSigAsBranchSig UnitSig
| BranchStaticContext BStContext
deriving (Show, Eq)
type RefStUnitCtx = Map.Map IRI RefSig
-- only BranchRefSigs allowed
emptyRefStUnitCtx :: RefStUnitCtx
emptyRefStUnitCtx = Map.empty
-- Auxiliaries for refinament signatures composition
matchesContext :: RefSigMap -> BStContext -> Bool
matchesContext rsmap bstc =
not (any (`notElem` Map.keys bstc) $ Map.keys rsmap)
&& namesMatchCtx (Map.keys rsmap) bstc rsmap
equalSigs :: UnitSig -> UnitSig -> Bool
equalSigs (UnitSig ls1 ns1 _) (UnitSig ls2 ns2 _) =
length ls1 == length ls2 && getSig ns1 == getSig ns2
&& all (\ (x1, x2) -> getSig x1 == getSig x2) (zip ls1 ls2)
namesMatchCtx :: [IRI] -> BStContext -> RefSigMap -> Bool
namesMatchCtx [] _ _ = True
namesMatchCtx (un : unitNames) bstc rsmap =
case Map.findWithDefault (error "namesMatchCtx")
un bstc of
BranchRefSig _ (_usig, mbsig) -> case mbsig of
Nothing -> False -- should not be the case
Just bsig -> case bsig of
UnitSigAsBranchSig usig' ->
case Map.findWithDefault (error "USABS") un rsmap of
BranchRefSig _ (usig'', _mbsig') -> equalSigs usig' usig'' &&
namesMatchCtx unitNames bstc rsmap
_ -> False
BranchStaticContext bstc' ->
case rsmap Map.! un of
ComponentRefSig _ rsmap' ->
matchesContext rsmap' bstc' &&
namesMatchCtx unitNames bstc rsmap
{- This is where I introduce something new wrt to the original paper:
if bstc' has only one element
it suffices to have the signature of that element
matching the signature from rsmap' -}
_ -> Map.size bstc' == 1 &&
let un1 = head $ Map.keys bstc'
rsmap' = Map.mapKeys (\ x -> if x == un then un1 else x)
rsmap
in namesMatchCtx [un1] bstc' rsmap' &&
namesMatchCtx unitNames bstc rsmap
_ -> False -- this should never be the case
modifyCtx :: [IRI] -> RefSigMap -> BStContext -> BStContext
modifyCtx [] _ bstc = bstc
modifyCtx (un : unitNames) rsmap bstc =
case bstc Map.! un of
BranchRefSig n1 (usig, mbsig) -> case mbsig of
Nothing -> modifyCtx unitNames rsmap bstc -- should not be the case
Just bsig -> case bsig of
UnitSigAsBranchSig usig' ->
case rsmap Map.! un of
BranchRefSig n2 (usig'', bsig'') -> if equalSigs usig' usig'' then
modifyCtx unitNames rsmap $
Map.insert un (BranchRefSig (compPointer n1 n2)
(usig, bsig'')) bstc -- was usig'
else error "illegal composition"
_ -> modifyCtx unitNames rsmap bstc
BranchStaticContext bstc' ->
case rsmap Map.! un of
ComponentRefSig n2 rsmap' -> modifyCtx unitNames rsmap $
Map.insert un
(BranchRefSig (compPointer n1 n2) (usig, Just $
BranchStaticContext $ modifyCtx (Map.keys rsmap') rsmap' bstc'))
bstc
_ -> let f = if Map.size bstc' == 1 then
let un1 = head $ Map.keys bstc'
rsmap' = Map.mapKeys
(\ x -> if x == un then un1 else x)
rsmap
bstc'' = modifyCtx [un1] rsmap' bstc'
in Map.singleton un $
BranchRefSig RTNone (usig, Just
$ BranchStaticContext bstc'')
else Map.empty
in Map.union f $ modifyCtx unitNames rsmap bstc
_ -> modifyCtx unitNames rsmap bstc -- same as above
-- Signature composition
refSigComposition :: RefSig -> RefSig -> Result RefSig
refSigComposition (BranchRefSig n1 (usig1, Just (UnitSigAsBranchSig usig2)))
(BranchRefSig n2 (usig3, bsig)) =
if equalSigs usig2 usig3 then
return $ BranchRefSig (compPointer n1 n2) (usig1, bsig)
else fail $ "Signatures: \n" ++ show usig2 ++ "\n and \n " ++ show usig3 ++
" do not compose"
refSigComposition _rsig1@(BranchRefSig n1
(usig1, Just (BranchStaticContext bstc)))
_rsig2@(ComponentRefSig n2 rsmap) =
if matchesContext rsmap bstc then
return $ BranchRefSig (compPointer n1 n2)
(usig1, Just $ BranchStaticContext $
modifyCtx (Map.keys rsmap) rsmap bstc)
else fail ("Signatures do not match:" ++ show (Map.keys bstc) ++ " "
++ show (Map.keys rsmap))
refSigComposition (ComponentRefSig n1 rsmap1) (ComponentRefSig n2 rsmap2) = do
upd <- mapM (\ x -> do
s <- refSigComposition (rsmap1 Map.! x) (rsmap2 Map.! x)
return (x, s))
$ filter (`elem` Map.keys rsmap1) $ Map.keys rsmap2
let unionMap = Map.union (Map.fromList upd) $
Map.union rsmap1 rsmap2
return $ ComponentRefSig (compPointer n1 n2) unionMap
refSigComposition _rsig1 _rsig2 =
fail "composition of refinement signatures"
-- | an entry of the global environment
data GlobalEntry =
SpecEntry ExtGenSig
| ViewOrStructEntry Bool ExtViewSig
| ArchOrRefEntry Bool RefSig
| UnitEntry UnitSig
deriving Show
type GlobalEnv = Map.Map IRI GlobalEntry
-- ** change and history types
-- | the edit operations of the DGraph
data DGChange =
InsertNode (LNode DGNodeLab)
| DeleteNode (LNode DGNodeLab)
| InsertEdge (LEdge DGLinkLab)
| DeleteEdge (LEdge DGLinkLab)
-- it contains the old label and new label with node
| SetNodeLab DGNodeLab (LNode DGNodeLab)
deriving Show
data HistElem =
HistElem DGChange
| HistGroup DGRule ProofHistory
type ProofHistory = SizedList.SizedList HistElem
-- datatypes for the refinement tree
data RTNodeType = RTPlain UnitSig | RTRef Node deriving (Eq)
instance Show RTNodeType where
show (RTPlain u) = "RTPlain\n" ++ show u
show (RTRef n) = show n
data RTNodeLab = RTNodeLab
{ rtn_type :: RTNodeType
, rtn_name :: String
} deriving Eq
instance Show RTNodeLab where
show r =
let
name = rtn_name r
t = rtn_type r
t1 = case t of
RTPlain u -> "plain: " ++ show u
RTRef n -> show n
in name ++ " " ++ t1
data RTLinkType =
RTRefine
| RTComp
deriving (Show, Eq)
data RTLinkLab = RTLink
{ rtl_type :: RTLinkType
} deriving (Show, Eq)
-- utility functions for handling refinement tree
addNodeRT :: DGraph -> UnitSig -> String -> (Node, DGraph)
addNodeRT dg usig s =
let
g = refTree dg
n = Tree.getNewNode g
l = RTNodeLab {
rtn_type = RTPlain usig
, rtn_name = s
}
in (n, dg {refTree = insNode (n, l) g})
addSpecNodeRT :: DGraph -> UnitSig -> String -> (Node, DGraph)
addSpecNodeRT dg usig s =
let
(n, dg') = addNodeRT dg usig s
f = Map.insert s n $ specRoots dg'
in (n, dg' {specRoots = f})
updateNodeNameRT :: DGraph -> Node -> String -> DGraph
updateNodeNameRT dg n s =
let
g = refTree dg
l = Graph.lab g n
in case l of
Nothing -> dg
Just oldL -> let
newL = oldL {rtn_name = s}
(g', _) = Tree.labelNode (n, newL) g
in dg {refTree = g'}
updateSigRT :: DGraph -> Node -> UnitSig -> DGraph
updateSigRT dg n usig =
let
g = refTree dg
l = Graph.lab g n
in case l of
Nothing -> dg
Just oldL -> let
newL = oldL {rtn_type = RTPlain usig}
(g', _) = Tree.labelNode (n, newL) g
in dg {refTree = g'}
updateNodeNameSpecRT :: DGraph -> Node -> String -> DGraph
updateNodeNameSpecRT dg n s =
let dg' = updateNodeNameRT dg n s
in dg' {specRoots = Map.insert s n $ specRoots dg}
addSubTree :: DGraph -> Maybe RTLeaves -> RTPointer -> (DGraph, RTPointer)
addSubTree dg Nothing (NPComp h) =
foldl
(\ ~(d, NPComp cp) (k, p) -> let
(d', p') = addSubTree d Nothing p
in (d', NPComp (Map.insert k p' cp)))
(dg, NPComp Map.empty) $ Map.toList h
addSubTree dg Nothing p = let
s = refSource p
(dg', f) = copySubTree dg s Nothing
p' = mapRTNodes f p
in (dg', p')
addSubTree dg (Just (RTLeaf x)) p = let
s = refSource p
(dg', f) = copySubTree dg s $ Just x
p' = mapRTNodes f p
in (dg', p')
addSubTree dg (Just (RTLeaves g)) (NPComp h) =
foldl
(\ ~(d, NPComp cp) (k, p) -> let
l = Map.findWithDefault (error $ "addSubTree:" ++ show k) k g
(d', p') = addSubTree d (Just l) p
in (d', NPComp (Map.insert k p' cp)))
(dg, NPComp Map.empty) $ Map.toList h
addSubTree _ _ _ = error "addSubTree"
copySubTree :: DGraph -> Node -> Maybe Node -> (DGraph, Map.Map Node Node)
copySubTree dg n mN =
case mN of
Nothing -> let
rTree = refTree dg
n' = Tree.getNewNode rTree
nLab = fromMaybe (error "copyNode") $ lab rTree n
rTree' = insNode (n', nLab) rTree
in copySubTreeN dg {refTree = rTree'} [n] $ Map.fromList [(n, n')]
Just y -> copySubTreeN dg [n] $ Map.fromList [(n, y)]
copySubTreeN :: DGraph -> [Node] -> Map.Map Node Node
-> (DGraph, Map.Map Node Node)
copySubTreeN dg nList pairs =
case nList of
[] -> (dg, pairs)
n : nList' -> let
rTree = refTree dg
pairsN = Map.findWithDefault (error "copy") n pairs
descs = lsuc rTree n
(dg', pairs') = foldl (copyNode pairsN) (dg, pairs) descs
in copySubTreeN dg' (nub $ nList' ++ map fst descs) pairs'
copyNode :: Node -> (DGraph, Map.Map Node Node) -> LNode RTLinkLab
-> (DGraph, Map.Map Node Node)
copyNode s (dg, nMap) (n, eLab) = let
rTree = refTree dg
nLab = fromMaybe (error "copyNode") $ lab rTree n
n' = Tree.getNewNode rTree
rTree' = insNode (n', nLab) rTree
orderRT _ _ = GT
(rTree'', _) = Tree.insLEdge True orderRT (s, n', eLab) rTree'
in (dg {refTree = rTree''}, Map.insert n n' nMap)
addRefEdgeRT :: DGraph -> Node -> Node -> DGraph
addRefEdgeRT dg n1 n2 =
let
g = refTree dg
orderRT _ _ = GT
(g', b) = Tree.insLEdge True orderRT
(n1, n2, RTLink {rtl_type = RTRefine}) g
in if b then dg {refTree = g'}
else error "addRefEdgeRT"
addEdgesToNodeRT :: DGraph -> [Node] -> Node -> DGraph
addEdgesToNodeRT dg' rnodes n' =
let
g = refTree dg'
orderRT _ _ = GT
(g', b) = foldl (\ (g0, b0) n0 -> let
(g1, b1) = Tree.insLEdge True orderRT
(n', n0, RTLink {rtl_type = RTComp}) g0
in (g1, b1 && b0))
(g, True) rnodes
in if not b then error "addEdgesToNodeRT"
else dg' {refTree = g'}
{- I copied these types from ArchDiagram
to store the diagrams of the arch specs in the dgraph -}
data DiagNodeLab = DiagNode { dn_sig :: NodeSig, dn_desc :: String }
deriving Show
data DiagLinkLab = DiagLink { dl_morphism :: GMorphism, dl_number :: Int }
instance Show DiagLinkLab where
show _ = ""
data Diag = Diagram {
diagGraph :: Tree.Gr DiagNodeLab DiagLinkLab,
numberOfEdges :: Int
}
deriving Show
{- | the actual development graph with auxiliary information. A
'G_sign' should be stored in 'sigMap' under its 'gSignSelfIdx'. The
same applies to 'G_morphism' with 'morMap' and 'gMorphismSelfIdx'
resp. 'G_theory' with 'thMap' and 'gTheorySelfIdx'. -}
data DGraph = DGraph
{ globalAnnos :: GlobalAnnos -- ^ global annos of library
, optLibDefn :: Maybe LIB_DEFN
, globalEnv :: GlobalEnv -- ^ name entities (specs, views) of a library
, dgBody :: Tree.Gr DGNodeLab DGLinkLab -- ^ actual 'DGraph` tree
, currentBaseTheory :: Maybe NodeSig
, refTree :: Tree.Gr RTNodeLab RTLinkLab -- ^ the refinement tree
, specRoots :: Map.Map String Node -- ^ root nodes for named specs
, nameMap :: MapSet.MapSet String Node -- ^ all nodes by name
, archSpecDiags :: Map.Map String Diag
-- ^ dependency diagrams between units
, getNewEdgeId :: EdgeId -- ^ edge counter
, allRefNodes :: Map.Map (LibName, Node) Node -- ^ all DGRef's
, sigMap :: Map.Map SigId G_sign -- ^ signature map
, thMap :: Map.Map ThId G_theory -- ^ theory map
, morMap :: Map.Map MorId G_morphism -- ^ morphism map
, proofHistory :: ProofHistory -- ^ applied proof steps
, redoHistory :: ProofHistory -- ^ undone proofs steps
}
instance Show DGraph where
show _ = "<a development graph>"
emptyDG :: DGraph
emptyDG = DGraph
{ globalAnnos = emptyGlobalAnnos
, optLibDefn = Nothing
, globalEnv = Map.empty
, dgBody = Graph.empty
, currentBaseTheory = Nothing
, refTree = Graph.empty
, specRoots = Map.empty
, nameMap = MapSet.empty
, archSpecDiags = Map.empty
, getNewEdgeId = startEdgeId
, allRefNodes = Map.empty
, sigMap = Map.empty
, thMap = Map.empty
, morMap = Map.empty
, proofHistory = SizedList.empty
, redoHistory = SizedList.empty }
type LibEnv = Map.Map LibName DGraph
-- | an empty environment
emptyLibEnv :: LibEnv
emptyLibEnv = Map.empty
-- * utility functions
-- ** for node signatures
emptyG_sign :: AnyLogic -> G_sign
emptyG_sign (Logic lid) = G_sign lid (ext_empty_signature lid) startSigId
getMaybeSig :: MaybeNode -> G_sign
getMaybeSig (JustNode ns) = getSig ns
getMaybeSig (EmptyNode l) = emptyG_sign l
getLogic :: MaybeNode -> AnyLogic
getLogic = logicOfGsign . getMaybeSig
getNodeLogic :: NodeSig -> AnyLogic
getNodeLogic = logicOfGsign . getSig
-- ** accessing node label
-- | get the origin of a non-reference node (partial)
dgn_origin :: DGNodeLab -> DGOrigin
dgn_origin = node_origin . nodeInfo
-- | get the referenced library (partial)
dgn_libname :: DGNodeLab -> LibName
dgn_libname = ref_libname . nodeInfo
-- | get the referenced node (partial)
dgn_node :: DGNodeLab -> Node
dgn_node = ref_node . nodeInfo
-- | get the signature of a node's theory (total)
dgn_sign :: DGNodeLab -> G_sign
dgn_sign = signOf . dgn_theory
-- | gets the name of a development graph node as a string (total)
getDGNodeName :: DGNodeLab -> String
getDGNodeName = showName . dgn_name
-- | get the global theory of a node or the local one if missing
globOrLocTh :: DGNodeLab -> G_theory
globOrLocTh lbl = fromMaybe (dgn_theory lbl) $ globalTheory lbl
-- ** creating node content and label
-- | create node info
newConsNodeInfo :: DGOrigin -> Conservativity -> DGNodeInfo
newConsNodeInfo orig cs = DGNode
{ node_origin = orig
, node_cons_status = mkConsStatus cs }
-- | create default content
newNodeInfo :: DGOrigin -> DGNodeInfo
newNodeInfo orig = newConsNodeInfo orig None
-- | create a reference node part
newRefInfo :: LibName -> Node -> DGNodeInfo
newRefInfo ln n = DGRef
{ ref_libname = ln
, ref_node = n }
-- | create a new node label
newInfoNodeLab :: NodeName -> DGNodeInfo -> G_theory -> DGNodeLab
newInfoNodeLab name info gTh@(G_theory lid _ _ _ _) = DGNodeLab
{ dgn_name = name
, dgn_theory = gTh
, globalTheory = Nothing
, labelHasHiding = False
, labelHasFree = False
, dgn_nf = Nothing
, dgn_sigma = Nothing
, dgn_freenf = Nothing
, dgn_phi = Nothing
, nodeInfo = info
, nodeMod = unMod
, xnode = Nothing
, dgn_lock = Nothing
, dgn_symbolpathlist = G_symbolmap lid Map.empty }
-- | create a new node label using 'newNodeInfo' and 'newInfoNodeLab'
newNodeLab :: NodeName -> DGOrigin -> G_theory -> DGNodeLab
newNodeLab name = newInfoNodeLab name . newNodeInfo
-- ** handle the lock of a node
-- | wrapper to access the maybe lock
treatNodeLock :: (MVar () -> a) -> DGNodeLab -> a
treatNodeLock f = maybe (error "MVar not initialised") f . dgn_lock
-- | Tries to acquire the local lock. Return False if already acquired.
tryLockLocal :: DGNodeLab -> IO Bool
tryLockLocal = treatNodeLock $ flip tryPutMVar ()
-- | Releases the local lock.
unlockLocal :: DGNodeLab -> IO ()
unlockLocal = treatNodeLock $ \ lock ->
tryTakeMVar lock >>= maybe (error "Local lock wasn't locked.") return
-- | checks if locking MVar is initialized
hasLock :: DGNodeLab -> Bool
hasLock = isJust . dgn_lock
-- ** edge label equalities
-- | equality without comparing the edge ids
eqDGLinkLabContent :: DGLinkLab -> DGLinkLab -> Bool
eqDGLinkLabContent l1 l2 = let
i1 = dgl_id l1
i2 = dgl_id l2
in (i1 <= defaultEdgeId || i2 <= defaultEdgeId || i1 == i2)
&& dgl_morphism l1 == dgl_morphism l2
&& dgl_type l1 == dgl_type l2
&& dgl_origin l1 == dgl_origin l2
&& dglName l1 == dglName l2
-- | equality comparing ids only
eqDGLinkLabById :: DGLinkLab -> DGLinkLab -> Bool
eqDGLinkLabById l1 l2 = let
i1 = dgl_id l1
i2 = dgl_id l2
in if i1 > defaultEdgeId && i2 > defaultEdgeId then i1 == i2 else
error "eqDGLinkLabById"
-- ** setting index maps
{- these index maps should be global for all libraries,
therefore their contents need to be copied -}
cpIndexMaps :: DGraph -> DGraph -> DGraph
cpIndexMaps from to =
to { sigMap = sigMap from
, thMap = thMap from
, morMap = morMap from }
setSigMapDG :: Map.Map SigId G_sign -> DGraph -> DGraph
setSigMapDG m dg = dg { sigMap = m }
setThMapDG :: Map.Map ThId G_theory -> DGraph -> DGraph
setThMapDG m dg = dg { thMap = m }
setMorMapDG :: Map.Map MorId G_morphism -> DGraph -> DGraph
setMorMapDG m dg = dg { morMap = m }
-- ** looking up in index maps
lookupSigMapDG :: SigId -> DGraph -> Maybe G_sign
lookupSigMapDG i = Map.lookup i . sigMap
lookupThMapDG :: ThId -> DGraph -> Maybe G_theory
lookupThMapDG i = Map.lookup i . thMap
lookupMorMapDG :: MorId -> DGraph -> Maybe G_morphism
lookupMorMapDG i = Map.lookup i . morMap
-- ** getting index maps and their maximal index
sigMapI :: DGraph -> (Map.Map SigId G_sign, SigId)
sigMapI = getMapAndMaxIndex startSigId sigMap
thMapI :: DGraph -> (Map.Map ThId G_theory, ThId)
thMapI = getMapAndMaxIndex startThId thMap
morMapI :: DGraph -> (Map.Map MorId G_morphism, MorId)
morMapI = getMapAndMaxIndex startMorId morMap
-- ** lookup other graph parts
lookupGlobalEnvDG :: IRI -> DGraph -> Maybe GlobalEntry
lookupGlobalEnvDG sid dg = let
gEnv = globalEnv dg
shortIRI = iriToStringShortUnsecure sid
in case Map.lookup sid gEnv of
Nothing -> Map.lookup (nullIRI { abbrevPath = shortIRI }) gEnv
m -> m
-- | lookup a reference node for a given libname and node
lookupInAllRefNodesDG :: DGNodeInfo -> DGraph -> Maybe Node
lookupInAllRefNodesDG ref dg = case ref of
DGRef libn refn ->
Map.lookup (libn, refn) $ allRefNodes dg
_ -> Nothing
-- ** lookup nodes by their names or other properties
{- | lookup a node in the graph by its name, using showName
to convert nodenames. -}
lookupNodeByName :: String -> DGraph -> [LNode DGNodeLab]
lookupNodeByName s dg = map (\ n -> (n, labDG dg n)) . Set.toList
. MapSet.lookup s $ nameMap dg
lookupUniqueNodeByName :: String -> DGraph -> Maybe (LNode DGNodeLab)
lookupUniqueNodeByName s dg =
case Set.toList $ MapSet.lookup s $ nameMap dg of
[n] -> do
l <- lab (dgBody dg) n
return (n, l)
_ -> Nothing
{- | filters all local nodes in the graph by their names, using showName
to convert nodenames. See also 'lookupNodeByName'. -}
filterLocalNodesByName :: String -> DGraph -> [LNode DGNodeLab]
filterLocalNodesByName s = filter (not . isDGRef . snd) . lookupNodeByName s
{- | filter all ref nodes in the graph by their names, using showName
to convert nodenames. See also 'lookupNodeByName'. -}
filterRefNodesByName :: String -> LibName -> DGraph -> [LNode DGNodeLab]
filterRefNodesByName s ln =
filter (\ (_, lbl) -> isDGRef lbl && dgn_libname lbl == ln)
. lookupNodeByName s
{- | Given a 'LibEnv' we search each DGraph in it for a (maybe referenced) node
with the given name. We return the labeled node and the Graph where this node
resides as local node. See also 'lookupLocalNode'. -}
lookupLocalNodeByNameInEnv :: LibEnv -> String
-> Maybe (DGraph, LNode DGNodeLab)
lookupLocalNodeByNameInEnv le s = f $ Map.elems le where
f [] = Nothing
f (dg : l) = case lookupNodeByName s dg of
(nd, _) : _ -> Just $ lookupLocalNode le dg nd
_ -> f l
{- | We search only the given 'DGraph' for a (maybe referenced) node with the
given name. We return the labeled node and the Graph where this node resides
as local node. See also 'lookupLocalNode'. -}
lookupLocalNodeByName :: LibEnv -> DGraph -> String
-> Maybe (DGraph, LNode DGNodeLab)
lookupLocalNodeByName le dg s =
case lookupNodeByName s dg of
(nd, _) : _ -> Just $ lookupLocalNode le dg nd
_ -> Nothing
{- | Given a Node and a 'DGraph' we follow the node to the graph where it is
defined as a local node. -}
lookupLocalNode :: LibEnv -> DGraph -> Node -> (DGraph, LNode DGNodeLab)
lookupLocalNode le dg n = let
(_, refDg, p) = lookupRefNodeM le Nothing dg n
in (refDg, p)
{- | Given a Node and a 'DGraph' we follow the node to the graph where it is
defined . -}
lookupRefNode :: LibEnv -> LibName -> DGraph -> Node
-> (LibName, DGraph, LNode DGNodeLab)
lookupRefNode le ln dg n = let
(mLn, refDg, p) = lookupRefNodeM le Nothing dg n
in (fromMaybe ln mLn, refDg, p)
lookupRefNodeM :: LibEnv -> Maybe LibName -> DGraph -> Node
-> (Maybe LibName, DGraph, LNode DGNodeLab)
lookupRefNodeM le libName dg n = let x = labDG dg n in
if isDGRef x then let
ln = dgn_libname x
n' = dgn_node x in lookupRefNodeM le (Just ln) (lookupDGraph ln le) n'
else (libName, dg, (n, x))
-- ** accessing the actual graph
-- | get the next available node id
getNewNodeDG :: DGraph -> Node
getNewNodeDG = Tree.getNewNode . dgBody
-- | get all the nodes
labNodesDG :: DGraph -> [LNode DGNodeLab]
labNodesDG = labNodes . dgBody
-- | get all the edges
labEdgesDG :: DGraph -> [LEdge DGLinkLab]
labEdgesDG = labEdges . dgBody
-- | checks if a DG is empty or not.
isEmptyDG :: DGraph -> Bool
isEmptyDG = isEmpty . dgBody
-- | checks if a given node belongs to a given DG
gelemDG :: Node -> DGraph -> Bool
gelemDG n = gelem n . dgBody
-- | get all the incoming ledges of the given node in a given DG
innDG :: DGraph -> Node -> [LEdge DGLinkLab]
innDG = inn . dgBody
-- | get all the outgoing ledges of the given node in a given DG
outDG :: DGraph -> Node -> [LEdge DGLinkLab]
outDG = out . dgBody
-- | get all the nodes of the given DG
nodesDG :: DGraph -> [Node]
nodesDG = nodes . dgBody
-- | tries to get the label of the given node in a given DG
labDG :: DGraph -> Node -> DGNodeLab
labDG dg = fromMaybe (error "labDG") . lab (dgBody dg)
-- | tries to get the label of the given node in a given RT
labRT :: DGraph -> Node -> RTNodeLab
labRT dg = fromMaybe (error "labRT") . lab (refTree dg)
-- | get the name of a node from the number of node
getNameOfNode :: Node -> DGraph -> String
getNameOfNode index gc = getDGNodeName $ labDG gc index
-- | gets the given number of new node-ids in a given DG.
newNodesDG :: Int -> DGraph -> [Node]
newNodesDG n = newNodes n . dgBody
-- | get the context and throw input string as error message
safeContextDG :: String -> DGraph -> Node -> Context DGNodeLab DGLinkLab
safeContextDG s = safeContext s . dgBody where
safeContext err g v = -- same as context with extra message
fromMaybe (error $ err ++ ": Match Exception, Node: " ++ show v)
. fst $ match v g
-- ** manipulate graph
-- | sets the node with new label and returns the new graph and the old label
labelNodeDG :: LNode DGNodeLab -> DGraph -> (DGraph, DGNodeLab)
labelNodeDG p@(n, lbl) dg =
let (b, l) = Tree.labelNode p $ dgBody dg
oldN = getDGNodeName l
newN = getDGNodeName lbl
oldInf = nodeInfo l
newInf = nodeInfo lbl
nMap = nameMap dg
refs = allRefNodes dg
in (dg { dgBody = b
, nameMap = if oldN == newN then nMap else
MapSet.insert newN n $ MapSet.delete oldN n nMap
, allRefNodes = case (oldInf, newInf) of
(DGRef libn refn, DGRef nLibn nRefn) ->
if newInf == oldInf then refs
else Map.insert (nLibn, nRefn) n
$ Map.delete (libn, refn) refs
(DGRef libn refn, _) -> Map.delete (libn, refn) refs
(_, DGRef nLibn nRefn) -> Map.insert (nLibn, nRefn) n refs
_ -> refs }, l)
-- | delete the node out of the given DG
delNodeDG :: Node -> DGraph -> DGraph
delNodeDG n dg = case match n $ dgBody dg of
(Just (_, _, lbl, _), rg) -> let refs = allRefNodes dg in dg
{ dgBody = rg
, nameMap = MapSet.delete (getDGNodeName lbl) n $ nameMap dg
, allRefNodes = case nodeInfo lbl of
DGRef libn refn -> Map.delete (libn, refn) refs
_ -> refs }
_ -> error $ "delNodeDG " ++ show n
-- | delete a list of nodes out of the given DG
delNodesDG :: [Node] -> DGraph -> DGraph
delNodesDG = flip $ foldr delNodeDG
-- | insert a new node into given DGraph
insNodeDG :: LNode DGNodeLab -> DGraph -> DGraph
insNodeDG n@(i, l) dg = let refs = allRefNodes dg in dg
{ dgBody = insNode n $ dgBody dg
, nameMap = MapSet.insert (getDGNodeName l) i $ nameMap dg
, allRefNodes = case nodeInfo l of
DGRef libn refn -> Map.insert (libn, refn) i refs
_ -> refs }
-- | inserts a lnode into a given DG
insLNodeDG :: LNode DGNodeLab -> DGraph -> DGraph
insLNodeDG n@(v, _) g =
if gelemDG v g then error $ "insLNodeDG " ++ show v else insNodeDG n g
-- | insert a new node with the given node content into a given DGraph
insNodesDG :: [LNode DGNodeLab] -> DGraph -> DGraph
insNodesDG = flip $ foldr insNodeDG
-- | delete a labeled edge out of the given DG
delLEdgeDG :: LEdge DGLinkLab -> DGraph -> DGraph
delLEdgeDG e g = g
{ dgBody = Tree.delLEdge (comparing dgl_id) e
$ dgBody g }
-- | inserts an edge between two nodes, labelled with inclusion
insInclEdgeDG :: LogicGraph -> DGraph -> NodeSig -> NodeSig ->
Result DGraph
insInclEdgeDG lgraph dg s t = do
incl <- ginclusion lgraph (getSig s) (getSig t)
let l = globDefLink incl DGLinkImports
(_, dg') = insLEdgeDG (getNode s, getNode t, l) dg
return dg'
-- | insert a labeled edge into a given DG, return possibly new id of edge
insLEdgeDG :: LEdge DGLinkLab -> DGraph -> (LEdge DGLinkLab, DGraph)
insLEdgeDG (s, t, l) g =
let eId = dgl_id l
nId = getNewEdgeId g
newId = eId == defaultEdgeId
e = (s, t, if newId then l { dgl_id = nId } else l)
in (e, g
{ getNewEdgeId = if newId then incEdgeId nId else max nId $ incEdgeId eId
, dgBody = fst $ Tree.insLEdge True compareLinks e $ dgBody g })
compareLinks :: DGLinkLab -> DGLinkLab -> Ordering
compareLinks l1 l2 = if eqDGLinkLabContent l1 { dgl_id = defaultEdgeId } l2
then EQ else comparing dgl_id l1 l2
{- | tries to insert a labeled edge into a given DG, but if this edge
already exists, then does nothing. -}
insLEdgeNubDG :: LEdge DGLinkLab -> DGraph -> DGraph
insLEdgeNubDG (v, w, l) g =
let oldEdgeId = getNewEdgeId g
(ng, change) = Tree.insLEdge False compareLinks
(v, w, l { dgl_id = oldEdgeId }) $ dgBody g
in
g { getNewEdgeId = if change then incEdgeId oldEdgeId else oldEdgeId
, dgBody = ng }
{- | inserts a new edge into the DGraph using it's own edgeId.
ATTENTION: the caller must ensure that an edgeId is not used twice -}
insEdgeAsIs :: LEdge DGLinkLab -> DGraph -> DGraph
insEdgeAsIs (v, w, l) g = let
ei = dgl_id l
in if ei == defaultEdgeId then error "illegal link id" else
g { dgBody = fst $ Tree.insLEdge False compareLinks
(v, w, l) $ dgBody g }
-- | insert a list of labeled edge into a given DG
insEdgesDG :: [LEdge DGLinkLab] -> DGraph -> DGraph
insEdgesDG = flip $ foldr insLEdgeNubDG
-- | merge a list of lnodes and ledges into a given DG
mkGraphDG :: [LNode DGNodeLab] -> [LEdge DGLinkLab] -> DGraph -> DGraph
mkGraphDG ns ls = insEdgesDG ls . insNodesDG ns
-- | get links by id (inefficiently)
getDGLinksById :: EdgeId -> DGraph -> [LEdge DGLinkLab]
getDGLinksById e = filter (\ (_, _, l) -> e == dgl_id l) . labEdgesDG
-- | find a unique link given its source node and edgeId
lookupUniqueLink :: Monad m => Node -> EdgeId -> DGraph -> m (LEdge DGLinkLab)
lookupUniqueLink s ei dg = let (Just (_, _, _, outs), _) = match s $ dgBody dg
in case filter ((== ei) . dgl_id . fst) outs of
[] -> fail $ "could not find linkId #" ++ show ei
[(lbl, t)] -> return (s, t, lbl)
_ -> fail $ "ambigous occurance of linkId #" ++ show ei
-- ** top-level functions
-- | initializes the MVar for locking if nessesary
initLocking :: DGraph -> LNode DGNodeLab -> IO (DGraph, DGNodeLab)
initLocking dg (node, dgn) = do
lock <- newEmptyMVar
let dgn' = dgn { dgn_lock = Just lock }
return (fst $ labelNodeDG (node, dgn') dg, dgn')
-- | returns the DGraph that belongs to the given library name
lookupDGraph :: LibName -> LibEnv -> DGraph
lookupDGraph ln = Map.findWithDefault (error $ "lookupDGraph " ++ show ln) ln
{- | compute the theory of a given node.
If this node is a DGRef, the referenced node is looked up first. -}
computeLocalTheory :: Monad m => LibEnv -> LibName -> Node -> m G_theory
computeLocalTheory libEnv ln =
computeLocalNodeTheory libEnv $ lookupDGraph ln libEnv
computeLocalNodeTheory :: Monad m => LibEnv -> DGraph -> Node -> m G_theory
computeLocalNodeTheory libEnv dg = computeLocalLabelTheory libEnv . labDG dg
computeLocalLabelTheory :: Monad m => LibEnv -> DGNodeLab -> m G_theory
computeLocalLabelTheory libEnv nodeLab =
if isDGRef nodeLab
then
computeLocalTheory libEnv (dgn_libname nodeLab) $ dgn_node nodeLab
else return $ dgn_theory nodeLab
-- ** test link types
liftE :: (DGLinkType -> a) -> LEdge DGLinkLab -> a
liftE f (_, _, edgeLab) = f $ dgl_type edgeLab
isGlobalDef :: DGLinkType -> Bool
isGlobalDef lt = case lt of
ScopedLink Global DefLink _ -> True
_ -> False
isLocalDef :: DGLinkType -> Bool
isLocalDef lt = case lt of
ScopedLink Local DefLink _ -> True
_ -> False
isHidingDef :: DGLinkType -> Bool
isHidingDef lt = case lt of
HidingDefLink -> True
_ -> False
isDefEdge :: DGLinkType -> Bool
isDefEdge edge = case edge of
ScopedLink _ DefLink _ -> True
HidingDefLink -> True
FreeOrCofreeDefLink _ _ -> True
_ -> False
isLocalEdge :: DGLinkType -> Bool
isLocalEdge edge = case edge of
ScopedLink Local _ _ -> True
_ -> False
isHidingEdge :: DGLinkType -> Bool
isHidingEdge edge = case edge of
HidingDefLink -> True
HidingFreeOrCofreeThm Nothing _ _ _ -> True
_ -> False
-- ** create link types
hidingThm :: Node -> GMorphism -> DGLinkType
hidingThm n m = HidingFreeOrCofreeThm Nothing n m LeftOpen
globalThm :: DGLinkType
globalThm = localOrGlobalThm Global None
localThm :: DGLinkType
localThm = localOrGlobalThm Local None
globalConsThm :: Conservativity -> DGLinkType
globalConsThm = localOrGlobalThm Global
localConsThm :: Conservativity -> DGLinkType
localConsThm = localOrGlobalThm Local
localOrGlobalThm :: Scope -> Conservativity -> DGLinkType
localOrGlobalThm sc = ScopedLink sc (ThmLink LeftOpen) . mkConsStatus
localOrGlobalDef :: Scope -> Conservativity -> DGLinkType
localOrGlobalDef sc = ScopedLink sc DefLink . mkConsStatus
globalConsDef :: Conservativity -> DGLinkType
globalConsDef = localOrGlobalDef Global
globalDef :: DGLinkType
globalDef = localOrGlobalDef Global None
localDef :: DGLinkType
localDef = localOrGlobalDef Local None
-- ** link conservativity
getLinkConsStatus :: DGLinkType -> ConsStatus
getLinkConsStatus lt = case lt of
ScopedLink _ _ c -> c
_ -> mkConsStatus None
getEdgeConsStatus :: DGLinkLab -> ConsStatus
getEdgeConsStatus = getLinkConsStatus . dgl_type
getCons :: DGLinkType -> Conservativity
getCons = getConsOfStatus . getLinkConsStatus
-- | returns the Conservativity if the given edge has one, otherwise none
getConservativity :: LEdge DGLinkLab -> Conservativity
getConservativity (_, _, edgeLab) = getConsOfStatus $ getEdgeConsStatus edgeLab
-- | returns the conservativity of the given path
getConservativityOfPath :: [LEdge DGLinkLab] -> Conservativity
getConservativityOfPath path = minimum [getConservativity e | e <- path]
-- * bottom up traversal
-- | Creates a LibName relation wrt dependencies via reference nodes
getLibDepRel :: LibEnv -> Rel.Rel LibName
getLibDepRel = Rel.transClosure
. Rel.fromSet . Map.foldWithKey (\ ln dg s ->
foldr ((\ x -> if isDGRef x then Set.insert (ln, dgn_libname x) else id)
. snd) s $ labNodesDG dg) Set.empty
getTopsortedLibs :: LibEnv -> [LibName]
getTopsortedLibs le = let
rel = getLibDepRel le
ls = reverse $ topsortedLibsWithImports rel
restLs = Set.toList $ Set.difference (Map.keysSet le) $ Rel.nodes rel
in ls ++ restLs
{- | Get imported libs in topological order, i.e. lib(s) without imports first.
The input lib-name will be last -}
dependentLibs :: LibName -> LibEnv -> [LibName]
dependentLibs ln le =
let rel = getLibDepRel le
ts = topsortedLibsWithImports rel
is = Set.toList (Rel.succs rel ln)
in reverse $ ln : intersect ts is
topsortedNodes :: DGraph -> [LNode DGNodeLab]
topsortedNodes dgraph = let dg = dgBody dgraph in
reverse $ postorderF $ dffWith (\ (_, n, nl, _) -> (n, nl)) (nodes dg)
$ efilter (\ (s, t, el) -> s /= t && isDefEdge (dgl_type el)) dg
changedPendingEdges :: DGraph -> [LEdge DGLinkLab]
changedPendingEdges dg = let
ls = filter (liftE $ not . isDefEdge) $ labEdgesDG dg
(ms, ps) = foldr (\ (s, t, l) (m, es) ->
let b = dglPending l
e = dgl_id l
ty = dgl_type l
in ( Map.insert e (b, s, t, proofBasis $ thmProofBasis ty) m
, if b && isLocalEdge ty then Set.insert e es else es))
(Map.empty, Set.empty) ls
close known =
let nxt = Map.keysSet $ Map.filter
(\ (_, _, _, s) -> not $ Set.null $ Set.intersection s known)
ms
new = Set.union nxt known
in if new == known then new else close new
aPs = close ps
in filter (\ (_, _, l) -> dglPending l /= Set.member (dgl_id l) aPs) ls
changedLocalTheorems :: DGraph -> LNode DGNodeLab -> [LEdge DGLinkLab]
changedLocalTheorems dg (v, lbl) =
case dgn_theory lbl of
G_theory _ _ _ sens _ ->
foldr (\ e@(_, _, el) l ->
let pend = dglPending el
psens = Map.keysSet $ OMap.filter isProvenSenStatus sens
in case thmLinkStatus $ dgl_type el of
Just (Proven (DGRuleLocalInference nms) _) | pend
== Set.isSubsetOf (Set.fromList $ map snd nms) psens -> e : l
_ -> l
) []
$ filter (liftE $ \ e -> isLocalEdge e && not (isLocalDef e))
$ innDG dg v
duplicateDefEdges :: DGraph -> [Edge]
duplicateDefEdges = concat .
filter (not . isSingle) . group . map (\ (s, t, _) -> (s, t))
. filter (liftE isDefEdge) . labEdgesDG
|
nevrenato/Hets_Fork
|
Static/DevGraph.hs
|
gpl-2.0
| 51,334 | 30 | 31 | 12,220 | 13,980 | 7,398 | 6,582 | 1,052 | 9 |
-- GenI surface realiser
-- Copyright (C) 2005 Carlos Areces and Eric Kow
--
-- This program is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public License
-- as published by the Free Software Foundation; either version 2
-- of the License, or (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-|
The heavy lifting of GenI, the whole chart/agenda mechanism, can be
implemented in many ways. To make it easier to write different
algorithms for GenI and compare them, we provide a single interface
for what we call Builders.
This interface is then used called by the Geni module and by the
graphical interface. Note that each builder has its own graphical
interface and that we do a similar thing in the graphical interface
code to make it possible to use these GUIs.
-}
module NLP.GenI.Builder (
TagDerivation, Builder(..), GenStatus(..),
lexicalSelection, FilterStatus(..),incrCounter, num_iterations,
(>-->),
num_comparisons, chart_size,
SemBitMap, defineSemanticBits, semToBitVector, bitVectorToSem, DispatchFilter, condFilter,
defaultStepAll,
BuilderState, UninflectedDisjunction(..), Input(..), unlessEmptySem,
initStats, Output, SentenceAut, run, queryCounter, defaultMetricNames, preInit
)
where
import Control.Monad.State.Strict
import Data.Bits (bit, (.&.), (.|.))
import Data.List (delete, nub, sort)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe, mapMaybe, maybeToList)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Tree (flatten)
import Prelude hiding (init)
import Control.DeepSeq
import Data.Generics (Data)
import Data.Typeable (Typeable)
import NLP.GenI.Automaton (NFA, automatonPathSets,
automatonPaths, numStates,
numTransitions)
import NLP.GenI.FeatureStructure (Flist, mkFeatStruct, sortFlist)
import NLP.GenI.Flag
import NLP.GenI.General (BitVector, geniBug, snd3, thd3)
import NLP.GenI.GeniVal (Collectable (collect),
DescendGeniVal (..), GeniVal,
finaliseVarsById)
import NLP.GenI.Lexicon (LexEntry)
import NLP.GenI.Morphology.Types
import NLP.GenI.Polarity (PolResult (..), buildAutomaton,
detectPolPaths)
import NLP.GenI.Pretty
import NLP.GenI.Semantics (Literal, Sem, SemInput)
import NLP.GenI.Statistics (Metric (IntMetric), Statistics,
addMetric, emptyStats,
incrIntMetric, queryIntMetric,
queryMetrics, updateMetrics)
import NLP.GenI.Tag (TagDerivation,
TagElem (idname, tsemantics, ttree),
dsChild, dsParent, setTidnums)
import NLP.GenI.TreeSchema (GNode (..), GType (Subs, Foot))
data GenStatus = Finished
| Active
| Error Text
data Builder st it = Builder
{ init :: Input -> [Flag] -> (st, Statistics)
-- ^ initialise the machine from the semantics and lexical selection
, step :: BuilderState st () -- ^ run a realisation step
, stepAll :: BuilderState st () -- ^ run all realisations steps until completion
--
, finished :: st -> GenStatus -- ^ determine if realisation is finished
, unpack :: st -> [Output] -- ^ unpack chart results into a list of sentences
, partial :: st -> [Output]
}
type Output = (Integer, LemmaPlusSentence, TagDerivation)
-- | To simplify interaction with the backend, we provide a single data
-- structure which represents all the inputs a backend could take.
data Input =
Input { inSemInput :: SemInput
, inLex :: [LexEntry] -- ^ for the debugger
, inCands :: [(TagElem, BitVector)] -- ^ tag tree
}
-- Uninflected words and sentences
-- | A SentenceAut represents a set of sentences in the form of an automaton.
-- The labels of the automaton are the words of the sentence. But note!
-- “word“ in the sentence is in fact a tuple (lemma, inflectional feature
-- structures). Normally, the states are defined as integers, with the
-- only requirement being that each one, naturally enough, is unique.
type SentenceAut = NFA Int LemmaPlus
data UninflectedDisjunction = UninflectedDisjunction [Text] (Flist GeniVal) deriving (Data, Typeable)
instance DescendGeniVal UninflectedDisjunction where
descendGeniVal s (UninflectedDisjunction a v) = {-# SCC "descendGeniVal" #-} UninflectedDisjunction a (descendGeniVal s v)
instance Collectable UninflectedDisjunction where
collect (UninflectedDisjunction _ b) = collect b
-- BuilderState
-- To cleanly seperate the tracking of statistics from the core functionality of a
-- builder, we use a State transformer to thread a Statistics state monad inside of
-- our main monad.
type BuilderState s a = StateT s (State Statistics) a
-- ----------------------------------------------------------------------
-- Helper functions for Builders
-- ----------------------------------------------------------------------
-- Initialisation
--
-- There's a few things that need to be run before even initialising the builder.
-- One of these is running some of the optimisations (namely the polarity stuff),
-- which is made complicated by the fact that they are optional. Another of these
-- to assign each of the trees with a unique ID. Note that this has to be done
-- after the polarity optimisation because this optimisation may introduce new
-- items into the lexical selection. Finally, we must also make sure we perform
-- alpha conversion so that unification does not do the wrong thing when two trees
-- have the same variables.
preInit :: Input -> [Flag] -> (Input, PolResult)
preInit input flags_ =
let (cand,_) = unzip $ inCands input
seminput = inSemInput input
--
extraPol = Map.empty
polsToDetect = fromMaybe (error "there should be a default for --detect-pols")
$ getFlag DetectPolaritiesFlg flags_
rootFeat = mkFeatStruct $ getListFlag RootFeatureFlg flags_
-- do any optimisations
isPol = hasOpt Polarised flags_
-- polarity optimisation (if enabled)
autstuff = buildAutomaton polsToDetect rootFeat extraPol seminput cand
autpaths = map concat . automatonPathSets . prFinal $ autstuff
combosPol = if isPol then autpaths else [considerHasSem cand]
considerHasSem = filter (not . null . tsemantics)
-- polarity automaton construction uses the zero literal semantic
-- items, but it may be safer to filter them out now if we are not
-- using it
-- chart sharing optimisation
(cands2, pathIds) = unzip $ detectPolPaths combosPol
--
fixate ts ps = zip (map finaliseVarsById $ setTidnums ts) ps
input2 = input { inCands = fixate cands2 pathIds
, inSemInput = (prSem autstuff, snd3 seminput, thd3 seminput) }
-- note: autstuff is only useful for the graphical debugger
in (input2, autstuff)
-- | Equivalent to 'id' unless the input contains an empty or uninstatiated
-- semantics
unlessEmptySem :: Input -> [Flag] -> a -> a
unlessEmptySem input _
| null semanticsErr = id
| otherwise = error semanticsErr
where
(cands,_) = unzip $ inCands input
nullSemCands = [ idname t | t <- cands, (null.tsemantics) t ]
unInstSemCands = [ idname t | t <- cands, not $ Map.null $ collect (tsemantics t) Map.empty ]
nullSemErr =
"The following trees have a null semantics: " ++
T.unpack (T.unwords nullSemCands)
unInstSemErr =
"The following trees have an uninstantiated semantics: " ++
T.unpack (T.unwords unInstSemCands)
semanticsErr =
(if null nullSemCands then "" else nullSemErr ++ "\n") ++
(if null unInstSemCands then "" else unInstSemErr)
-- ----------------------------------------------------------------------
-- Running a surface realiser
-- ----------------------------------------------------------------------
-- | Performs surface realisation from an input semantics and a lexical selection.
--
-- Statistics tracked
--
-- * pol_used_bundles - number of bundled paths through the polarity automaton.
-- see 'NLP.GenI.Automaton.automatonPathSets'
--
-- * pol_used_paths - number of paths through the final automaton
--
-- * pol_seed_paths - number of paths through the seed automaton (i.e. with no polarities).
-- This is normally just 1, unless you have multi-literal semantics
--
-- * pol_total_states - combined number of states in the all the polarity automata
--
-- * pol_total_tras - combined number of transitions in all polarity automata
--
-- * pol_max_states - number of states in the polarity automaton with the most states
--
-- * pol_total_tras - number of transitions in the polarity automata with the most transitions
--
-- * sem_literals - number of literals in the input semantics
--
-- * lex_trees - total number of lexically selected trees
-- * lex_foot_nodes - total number of nodes of any sort in lexically selected trees
--
-- * lex_subst_nodes - total number of sustitution nodes in lexically selected trees
--
-- * lex_foot_nodes - total number of foot nodes in lexically selected trees
--
-- * plex_... - same as the lex_ equivalent, but after polarity filtering
run :: Builder st it -> Input -> [Flag] -> (st, Statistics)
run builder input flags_ =
let -- 0 normalise the config
flags = modifyFlag RootFeatureFlg sortFlist flags_
-- 1 run the setup stuff
(input2, autstuff) = preInit input flags
auts = map snd3 (prIntermediate autstuff)
-- 2 call the init stuff
(iSt, iStats) = init builder input2 flags
-- 2b extra statistics
autpaths = map concat . automatonPathSets . prFinal $ autstuff
countsFor ts = (length ts, length nodes, length sn, length an)
where nodes = concatMap (flatten.ttree) ts
sn = [ n | n <- nodes, gtype n == Subs ]
an = [ n | n <- nodes, gtype n == Foot ]
(tsem,_,_) = inSemInput input
cands = nub . map fst $ inCands input
cands2 = nub . concatMap concat . automatonPathSets . prFinal $ autstuff
countUp = do incrCounter "sem_literals" $ length tsem
--
incrCounter "lex_subst_nodes" snl
incrCounter "lex_foot_nodes" anl
incrCounter "lex_nodes" nl
incrCounter "lex_trees" tl
-- node count after polarities are taken into account
incrCounter "plex_subst_nodes" snl2
incrCounter "plex_foot_nodes" anl2
incrCounter "plex_nodes" nl2
incrCounter "plex_trees" tl2
where (tl , nl , snl , anl ) = countsFor cands
(tl2, nl2, snl2, anl2) = countsFor cands2
-- 3 step through the whole thing
stepAll_ = do countUp
incrCounter "pol_used_bundles" $ length autpaths
incrCounter "pol_used_paths" $ length . automatonPaths . prFinal $ autstuff
incrCounter "pol_seed_paths" $ length . automatonPaths . prInitial $ autstuff
incrCounter "pol_total_states" $ sum $ map numStates auts
incrCounter "pol_total_trans" $ sum $ map numTransitions auts
incrCounter "pol_max_states" $ maximum $ map numStates auts
incrCounter "pol_max_trans" $ maximum $ map numTransitions auts
stepAll builder
in runState (execStateT stepAll_ iSt) iStats
-- ----------------------------------------------------------------------
-- Semantics and bit vectors
-- ----------------------------------------------------------------------
type SemBitMap = Map.Map (Literal GeniVal) BitVector
-- | assign a bit vector value to each literal in the semantics
-- the resulting map can then be used to construct a bit vector
-- representation of the semantics
defineSemanticBits :: Sem -> SemBitMap
defineSemanticBits sem = Map.fromList $ zip sem bits
where
bits = map bit [0..] -- 0001, 0010, 0100...
semToBitVector :: SemBitMap -> Sem -> BitVector
semToBitVector bmap sem = foldr (.|.) 0 $ map doLookup sem
where doLookup p =
case Map.lookup p bmap of
Nothing -> geniBug $ "predicate " ++ prettyStr p ++ " not found in semanticBit map"
Just b -> b
bitVectorToSem :: SemBitMap -> BitVector -> Sem
bitVectorToSem bmap vector =
mapMaybe tryKey $ Map.toList bmap
where tryKey (p,k) = if (k .&. vector == k) then Just p else Nothing
-- ----------------------------------------------------------------------
-- Generate step
-- ----------------------------------------------------------------------
-- | Default implementation for the 'stepAll' function in 'Builder'
defaultStepAll :: Builder st it -> BuilderState st ()
defaultStepAll b =
do s <- get
case finished b s of
Active -> step b >> defaultStepAll b
_ -> return ()
-- | Dispatching consists of assigning a chart item to the right part of the
-- chart (agenda, trash, results list, etc). This is implemented as a
-- series of filters which can either fail or succeed. If a filter fails,
-- it may modify the item before passing it on to future filters.
type DispatchFilter s a = a -> s (FilterStatus a)
data FilterStatus a = Filtered | NotFiltered a
-- | Sequence two dispatch filters.
(>-->) :: (Monad s) => DispatchFilter s a -> DispatchFilter s a -> DispatchFilter s a
f >--> f2 = \x -> f x >>= next
where
next y@Filtered = return y
next (NotFiltered x2) = f2 x2
-- | If the item meets some condition, use the first filter, otherwise
-- use the second one.
condFilter :: (Monad s) => (a -> Bool)
-> DispatchFilter s a -> DispatchFilter s a
-> DispatchFilter s a
condFilter cond f1 f2 = \x -> if cond x then f1 x else f2 x
-- ----------------------------------------------------------------------
-- Statistics
-- ----------------------------------------------------------------------
modifyStats :: (Metric -> Metric) -> BuilderState st ()
modifyStats fn = lift $ modify $ updateMetrics fn
incrCounter :: String -> Int -> BuilderState st ()
incrCounter key n = modifyStats (incrIntMetric key n)
queryCounter :: String -> Statistics -> Maybe Int
queryCounter key s =
case queryMetrics (queryIntMetric key) s of
[] -> Nothing
[c] -> Just c
_ -> geniBug $ "More than one instance of the metric: " ++ key
-- Command line configuration
initStats :: [Flag] -> Statistics
initStats flags_ =
execState (mapM_ addMetric metrics) emptyStats
where
mdefault ms = if "default" `elem` ms then defaultMetricNames else []
identifyMs :: [String] -> [Metric]
identifyMs ms = map namedMetric $ mdefault ms ++ delete "default" ms
metrics = identifyMs $ fromMaybe [] $ getFlag MetricsFlg flags_
namedMetric :: String -> Metric
-- the default case is that it's an int metric
namedMetric n = IntMetric n 0
-- Note that the strings here are command-line strings, not metric names!
defaultMetricNames :: [ String ]
defaultMetricNames = [ num_iterations, chart_size, num_comparisons, gen_time ]
-- Common counters
--
-- These numbers allow us to keep track of how efficient our generator is
-- and where we are in the process (how many steps we've taken, etc)
num_iterations, chart_size, num_comparisons, gen_time :: String
num_iterations = "iterations"
chart_size = "chart_size"
num_comparisons = "comparisons"
gen_time = "gen_time"
-- ----------------------------------------------------------------------
-- pretty printing
-- ----------------------------------------------------------------------
instance Pretty GenStatus where
pretty Finished = "finished"
pretty Active = "in progress"
pretty (Error x) = "error:" <+> x
-- ----------------------------------------------------------------------
-- strictly API-ish bits
-- ----------------------------------------------------------------------
-- | The names of lexically selected chart items used in a derivation
lexicalSelection :: TagDerivation -> [Text]
lexicalSelection = sort . nub
. concatMap (\d -> dsChild d : maybeToList (dsParent d))
{-!
deriving instance NFData Input
!-}
-- GENERATED START
instance NFData Input where
rnf (Input x1 x2 x3) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
-- GENERATED STOP
|
kowey/GenI
|
src/NLP/GenI/Builder.hs
|
gpl-2.0
| 17,790 | 0 | 15 | 4,570 | 3,053 | 1,710 | 1,343 | 204 | 3 |
module Prime where
import GDCN.Trusted.Data.Binary
import Data.ByteString.Lazy (ByteString)
--
run :: [ByteString] -> (ByteString, String)
run (h:_) = let num = decode h :: Integer -- Reads input of task
result = num + 100
in (encode result, show result)
|
GDCN/GDCN
|
GDCN_proj/dGDCN/jobs/TrivialJob/code/Increment.hs
|
gpl-3.0
| 288 | 0 | 9 | 73 | 95 | 55 | 40 | 7 | 1 |
instance Monad (Cont r) where
ka >>= kab = Cont (\hb -> runCont ka (\a -> runCont (kab a) hb))
return a = Cont (\ha -> ha a)
|
hmemcpy/milewski-ctfp-pdf
|
src/content/3.5/code/haskell/snippet29.hs
|
gpl-3.0
| 128 | 0 | 14 | 31 | 83 | 41 | 42 | 3 | 0 |
{- |
mtlstats
Copyright (C) 1984, 1985, 2019, 2020, 2021 Rhéal Lamothe
<[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-}
module Mtlstats.Control.CreateGoalie (createGoalieC) where
import Control.Monad.Trans.State (gets, modify)
import Lens.Micro ((^.), (.~), (?~), (%~), to)
import qualified UI.NCurses as C
import Mtlstats.Actions
import Mtlstats.Format
import Mtlstats.Handlers
import Mtlstats.Prompt
import Mtlstats.Types
-- | Handles goalie creation
createGoalieC :: CreateGoalieState -> Controller
createGoalieC cgs
| null $ cgs^.cgsNumber = getGoalieNumC
| null $ cgs^.cgsName = getGoalieNameC
| null $ cgs^.cgsRookieFlag = getRookieFlagC
| null $ cgs^.cgsActiveFlag = getActiveFlagC
| otherwise = confirmCreateGoalieC
getGoalieNumC :: Controller
getGoalieNumC = promptController goalieNumPrompt
getGoalieNameC :: Controller
getGoalieNameC = promptController goalieNamePrompt
getRookieFlagC :: Controller
getRookieFlagC = Controller
{ drawController = const $ do
C.drawString "Is this goalie a rookie? (Y/N)"
return C.CursorInvisible
, handleController = \e -> do
modify $ case ynHandler e of
Just True -> progMode.createGoalieStateL
%~ (cgsRookieFlag ?~ True)
. (cgsActiveFlag ?~ True)
rf -> progMode.createGoalieStateL.cgsRookieFlag .~ rf
return True
}
getActiveFlagC :: Controller
getActiveFlagC = Controller
{ drawController = const $ do
C.drawString "Is this goalie active? (Y/N)"
return C.CursorInvisible
, handleController = \e -> do
modify $ progMode.createGoalieStateL.cgsActiveFlag .~ ynHandler e
return True
}
confirmCreateGoalieC :: Controller
confirmCreateGoalieC = Controller
{ drawController = \s -> do
let cgs = s^.progMode.createGoalieStateL
C.drawString $ unlines
$ labelTable
[ ( "Goalie number", maybe "?" show $ cgs^.cgsNumber )
, ( "Goalie name", cgs^.cgsName )
, ( "Rookie", maybe "?" show $ cgs^.cgsRookieFlag )
, ( "Active", maybe "?" show $ cgs^.cgsActiveFlag )
]
++ [ ""
, "Create goalie: are you sure? (Y/N)"
]
return C.CursorInvisible
, handleController = \e -> do
cgs <- gets (^.progMode.createGoalieStateL)
let
success = cgs^.cgsSuccessCallback
failure = cgs^.cgsFailureCallback
case ynHandler e of
Just True -> do
gid <- gets (^.database.dbGoalies.to length)
let rookie = cgs^.cgsRookieFlag == Just True
modify addGoalie
if rookie
then success
else modify $ progMode.editGoalieStateL
%~ (egsSelectedGoalie ?~ gid)
. (egsMode .~ EGLtGames True)
. (egsCallback .~ success)
Just False -> failure
Nothing -> return ()
return True
}
|
mtlstats/mtlstats
|
src/Mtlstats/Control/CreateGoalie.hs
|
gpl-3.0
| 3,473 | 0 | 20 | 843 | 743 | 389 | 354 | -1 | -1 |
{- This file is part of PhoneDirectory.
Copyright (C) 2009 Michael Steele
PhoneDirectory is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PhoneDirectory is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PhoneDirectory. If not, see <http://www.gnu.org/licenses/>.
-}
module Main where
import Graphics.UI.WX (start)
import System.Environment (getArgs)
import GUI
main :: IO ()
main = do
args <- getArgs
start $ mainWindow $ if null args then Nothing else Just (head args)
|
mikesteele81/Phone-Directory
|
src/Main.hs
|
gpl-3.0
| 946 | 0 | 11 | 194 | 82 | 45 | 37 | 8 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Parse where
import qualified Lexer as L
import AST
import Text.Parsec hiding (State)
import Data.Text hiding (break)
import Control.Monad.State hiding (void,sequence)
import Data.Functor.Identity(Identity)
import Control.Applicative hiding ((<|>),many)
import Prelude hiding (break,sequence)
parseTiger text = fst $ runState (runParserT (L.ws *> expression) () "" text) 0
parseE text = fst $ runState (runParserT expression () "" text) 0
expression = letE <|> arithmetic
<|> assignment <|> ifThenElse <|> ifThen
<|> while <|> for <|> break
lvalue = identifier >>= (rest . LValueId)
where rest inner = (LValueField inner <$> (L.dot >> identifier) >>= rest)
<|> (LValueSubscript inner <$> L.brackets (expression) >>= rest)
<|> return inner
identifier = (\(L.Identifier i) -> i) <$> L.identifier'
nil = L.nil' >> return Nil
sequence = Seq <$> L.parens (sepBy expression L.semi)
void = Void <$ try (L.lParen >> L.rParen)
int = L.int' >>= (\(L.Int i) -> return $ IntLit i)
stringLit = (\(L.StringLit s) -> StringLit s) <$> L.stringLit'
functionCall = do
name <- try (identifier <* L.lParen)
args <- sepBy expression L.comma
_ <- L.rParen
return $ FunctionCall name args
arithmetic = boolOr
where negation = Negation <$> (L.minus >> factor)
multiplicative = chainl1 (negation <|> factor) multiplicativeOps
where multiplicativeOps = (L.times *> pure Mult) <|> (L.div *> pure Div)
additive = chainl1 multiplicative additiveOps
where additiveOps = (L.plus *> pure Add) <|> (L.minus *> pure Sub)
comparison = chainl1 additive compareOps
where compareOps = (L.eq *> pure (Comp Eq)) <|> (L.neq *> pure (Comp Neq))
<|> (L.gt *> pure (Comp Gt)) <|> (L.lt *> pure (Comp Lt))
<|> (L.gte *> pure (Comp Gte)) <|> (L.lte *> pure (Comp Lte))
boolAnd = chainl1 comparison (L.and *> pure Or)
boolOr = chainl1 boolAnd (L.or *> pure And)
factor = literal <|> sequence <|> functionCall <|> assignment
literal = int <|> void <|> record <|> array <|> stringLit <|> nil
record = Record <$> try (identifier <* L.lBrace) <*> sepBy1 recordField L.comma <* L.rBrace <*> pure ()
where recordField = do
id <- identifier
_ <- L.eq
expr <- expression
return (id,expr)
array = try $ Array <$> (identifier <* L.lBrack) <*> expression <* L.rBrack <* L.reserved "of" <*> expression <*> pure ()
assignment = (Assignment <$> try (lvalue <* L.assign') <*> expression <*> pure ()) <|> lvalue
ifThenElse = do
(IfThen cond e1 a) <- ifThen
(do
L.else'
e2 <- expression
return $ IfThenElse cond e1 e2 a)
<|> return (IfThen cond e1 a)
ifThen = do
_ <- L.if'
cond <- expression
_ <- L.then'
e <- expression
return $ IfThen cond e ()
while = While <$> (L.while' *> expression) <*> (L.do' *> expression) <*> pure ()
for = For <$> (L.for' *> identifier) <*> (L.assign' *> expression) <*> (L.to' *> expression) <*> (L.do' *> expression) <*> pure ()
break = Break <$ L.break' <*> pure ()
letE = Let <$> (L.let' *> many decl <* L.in') <*> (expression <* L.end') <*> pure ()
getNextId :: Enum a => ParsecT s u (State a) a
getNextId = do
current <- get
put $ succ current
return current
parseD text = fst $ runState (runParserT decl () "" text) 0
decl :: ParsecT Text u (State UniqueId) (Decl ())
decl = typeDec <|> varDec <|> funDec
tyField = (,) <$> identifier <*> (L.colon *> namedType)
namedType :: ParsecT Text u (State UniqueId) Type
namedType = NamedType <$> identifier
typeDec = TypeDec <$> getNextId <*> (L.type' *> identifier) <*> (L.eq *> typeVal) <*> pure ()
where typeVal :: ParsecT Text u (State UniqueId) Type
typeVal = namedType
<|> (RecType <$> L.braces (sepBy tyField L.comma))
<|> (ArrType <$> (L.array' *> (L.of' *> namedType)))
varDec = L.var' *> ((VarDec <$> getNextId <*> try (identifier <* L.assign') <*> expression <*> pure ())
<|> (TVarDec <$> getNextId <*> identifier <*> (L.colon *> namedType) <*> (L.assign' *> expression) <*> pure ()))
funDec :: ParsecT Text u (State UniqueId) (Decl ())
funDec = do
funId <- L.function' *> identifier
args <- L.parens (sepBy tyField L.comma)
TFunDec <$> getNextId <*> pure funId <*> pure args <*> (L.colon *> namedType) <*> (L.eq *> expression) <*> pure ()
<|> FunDec <$> getNextId <*> pure funId <*> pure args <*> (L.eq *> expression) <*> pure ()
expressionTests = Prelude.and [
parseE "int [3] of 4" == Right (Array "int" (IntLit 3 ()) (IntLit 4 ()) ()),
parseE "int [3+2] of 4*21" == Right (Array "int" (Add (IntLit 3 ()) (IntLit 2 ()) ()) (Mult (IntLit 4 ()) (IntLit 21 ()) ()) ()),
parseE "point {x=7, y= 4}" == Right (Record "point" [("x",IntLit 7 ()),("y",IntLit 4 ())] ()),
parseE "3+4*6-7/2" == Right (Sub (Add (IntLit 3 ()) (Mult (IntLit 4 ()) (IntLit 6 ()) ()) ()) (Div (IntLit 7 ()) (IntLit 2 ()) ()) ()),
parseE "f(3)" == Right (FunctionCall "f" [IntLit 3 ()] ()),
parseE "f(point {x=3,y=4}))" == Right (FunctionCall "f" [Record "point" [("x",IntLit 3 ()),("y",IntLit 4 ())] ()] ()),
parseE "x := point { x = 7, y = -2}" == Right (Assignment (LValueId "x" ()) (Record "point" [("x",IntLit 7 ()),("y",Negation (IntLit 2 ()) ())] ()) ()),
parseE "if 3 then 4" ==Right (IfThen (IntLit 3 ()) (IntLit 4 ()) ()),
parseE "if 3 then 4 else 5" == Right (IfThenElse (IntLit 3 ()) (IntLit 4 ()) (IntLit 5 ()) ()),
parseE "y:=y+x" == Right (Assignment (LValueId "y" ()) (Add (LValueId "y" ()) (LValueId "x" ()) ()) ())
]
declTests = Prelude.and [
parseD "type point = {x:int, y:int}" == Right (TypeDec 0 "point" (RecType [("x",NamedType "int"),("y",NamedType "int")]) ()),
parseD "type ai = array of int" == Right (TypeDec 0 "ai" (ArrType $ NamedType"int") ()),
parseD "function square (x:int) : int = x * x" == Right (TFunDec 0 "square" [("x",NamedType "int")] (NamedType "int") (Mult (LValueId "x" ()) (LValueId "x" ()) ()) ())
]
|
joelwilliamson/modern-compilers-exercises
|
Parse.hs
|
gpl-3.0
| 6,139 | 0 | 18 | 1,413 | 2,763 | 1,406 | 1,357 | 108 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- | Generic Key Exchange facilities. Performs the key exchange
module Ssh.KeyExchange(
doKex
, startRekey
) where
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import Control.Monad
import qualified Control.Monad.State as MS
import Data.Bits
import Data.Maybe
import Data.List
import qualified Data.ByteString.Lazy as B
import Safe
-- Non-'standard' functionality
import OpenSSL.BN -- modexp, random Integers
import OpenSSL.Random -- random bytes
import qualified Data.ByteString as BS -- Random uses a Strict ByteString
import Data.Digest.Pure.SHA
import Ssh.NetworkIO
import Ssh.Packet
import Ssh.KeyExchangeAlgorithm
import Ssh.HostKeyAlgorithm
import Ssh.ConnectionData
import Ssh.Cryption
import Ssh.Transport
import Ssh.PublicKeyAlgorithm
import Ssh.HashMac
import Ssh.Debug
import Ssh.String
-- | We drop all the entries that we don't know about. Order in the server list is irrelevant, client's first is chosen
serverListFiltered :: [SshString] -> [SshString] -> [SshString]
serverListFiltered clientList serverList = filter (`elem` serverList) clientList
-- | Filter all algorithms from a 'KEXInit' packet that are not in the given arguments.
-- Can be used to see which algorithms are supported by both client and server. Keeps the order of the client's supported lists
filterKEXInit :: [KeyExchangeAlgorithm] -> [HostKeyAlgorithm] -> [CryptionAlgorithm] -> [CryptionAlgorithm] -> [HashMac] -> [HashMac] -> Packet -> Packet
filterKEXInit clientKEXAlgos clientHostKeys clientCryptos serverCryptos clientHashMacs serverHashMacs (KEXInit raw c ka hka ecs esc mcs msc) =
KEXInit raw c ka' hka' ecs' esc' mcs' msc'
where
ka' = serverListFiltered (map kexName clientKEXAlgos) ka
hka' = serverListFiltered (map hostKeyAlgorithmName clientHostKeys) hka
ecs' = serverListFiltered (map cryptoName clientCryptos) ecs
esc' = serverListFiltered (map cryptoName serverCryptos) esc
mcs' = serverListFiltered (map hashName clientHashMacs) mcs
msc' = serverListFiltered (map hashName serverHashMacs) msc
-- | Perform key exchange
-- Needs the version strings of both client and server. Needs a list of all client-side supported algorithms.
-- We also need a function that can be used to decode and decrypt packets using a given 'Transport'.
--
doKex :: [KeyExchangeAlgorithm] -> [HostKeyAlgorithm] -> [CryptionAlgorithm] -> [CryptionAlgorithm] -> [HashMac] -> [HashMac] -> SshConnection ConnectionData
doKex clientKEXAlgos clientHostKeys clientCryptos serverCryptos clientHashMacs serverHashMacs = do
-- Prepare our KEXInit packet
-- Get 16 bytes of randomness for our cookie.
-- TODO: use OpenSSL.Random.add for more randomness
cookie <- MS.liftIO $ BS.unpack `liftM` randBytes 16
let clientKex = KEXInit B.empty cookie (map kexName clientKEXAlgos) (map hostKeyAlgorithmName clientHostKeys) (map cryptoName clientCryptos) (map cryptoName serverCryptos) (map hashName clientHashMacs) (map hashName serverHashMacs)
-- Set up the transports
let clientKexInitPayload = runPut $ putPacket clientKex
-- Send our KEXInit, wait for their KEXInit
sPutPacket clientKex
serverKex <- sGetPacket -- TODO assert this is a KEXInit packet
continueKex clientKexInitPayload serverKex clientKEXAlgos clientHostKeys clientCryptos serverCryptos clientHashMacs serverHashMacs
-- | The Kex can be done multiple times, at the moment we have a split between the first and the later ones. But both share the
-- actual computations, which are located in this function
--continueKex ::
continueKex clientKexInitPayload serverKex clientKEXAlgos clientHostKeys clientCryptos serverCryptos clientHashMacs serverHashMacs = do
printDebugLifted logLowLevelDebug "ServerKEX before filtering:"
printDebugLifted logLowLevelDebug $ show serverKex
-- The server's KEXInit probably contains a lot of methods we don't support, throw them away, and get the first KEX method we and the server support
let filteredServerKex = filterKEXInit clientKEXAlgos clientHostKeys clientCryptos serverCryptos clientHashMacs serverHashMacs serverKex
kex = headDef (error "No mutually supported key exchange algorithms found!") $ kex_algos filteredServerKex
kexFn = fromJust $ find (\x -> kexName x == kex) clientKEXAlgos
-- TODO: selecting this algorithm is more complicated?
hkAlg = headDef (error "No mutually supported host key algorithms found!") $ host_key_algos filteredServerKex
hkFn = fromJust $ find (\x -> hostKeyAlgorithmName x == hkAlg) clientHostKeys
serverKexInitPayload = rawPacket serverKex
printDebugLifted logLowLevelDebug "ServerKEX after filtering:"
printDebugLifted logLowLevelDebug $ show filteredServerKex
-- Perform the Key Exchange method supported by both us and the server
connectiondata <- handleKex kexFn hkFn clientKexInitPayload serverKexInitPayload
-- We have exchanged keys, confirm to the server that the new keys can be put into use. The handleKex already confirmed the server sent theirs!
sPutPacket NewKeys
-- Now that the new keys are put into use, set up these keys and the correct encryption, decryption and hashed mac functions in our state to use
let s2c = head $ enc_s2c filteredServerKex
s2cfun = fromJust $ find (\x -> cryptoName x == s2c) serverCryptos
s2cmac = head $ mac_s2c filteredServerKex
s2cmacfun = fromJust $ find (\x -> hashName x == s2cmac) serverHashMacs
c2s = head $ enc_c2s filteredServerKex
c2sfun = fromJust $ find (\x -> cryptoName x == c2s) serverCryptos
c2smac = head $ mac_c2s filteredServerKex
c2smacfun = fromJust $ find (\x -> hashName x == c2smac) serverHashMacs
MS.modify $ \s -> s {
serverState = (serverState s) {
transport = SshTransport s2cfun s2cmacfun,
vector = server2ClientIV connectiondata
},
clientState = (clientState s) {
transport = SshTransport c2sfun c2smacfun,
vector = client2ServerIV connectiondata
},
maybeConnectionData = Just connectiondata,
isRekeying = False -- In case we were rekeying, this has been finished
}
printDebugLifted logLowLevelDebug "KEX DONE?"
return connectiondata
-- | Start a new key exchange from an existing connection. It returns a new packet handler!
startRekey :: [KeyExchangeAlgorithm] -> [HostKeyAlgorithm] -> [CryptionAlgorithm] -> [CryptionAlgorithm] -> [HashMac] -> [HashMac] -> SshConnection (Packet -> SshConnection Bool)
startRekey clientKEXAlgos clientHostKeys clientCryptos serverCryptos clientHashMacs serverHashMacs = do
cookie <- MS.liftIO $ BS.unpack `liftM` randBytes 16
let clientKex = KEXInit B.empty cookie (map kexName clientKEXAlgos) (map hostKeyAlgorithmName clientHostKeys) (map cryptoName clientCryptos) (map cryptoName serverCryptos) (map hashName clientHashMacs) (map hashName serverHashMacs)
clientKexInitPayload = runPut $ putPacket clientKex
-- We are currently rekeying!
MS.modify $ \s -> s { isRekeying = True }
sPutPacket clientKex
previousHandler <- handlePacket `liftM` MS.get
return $ \p -> printDebugLifted logLowLevelDebug "WE SHOULD BE REKEYING NOW" >> case p of
(KEXInit _ _ _ _ _ _ _ _) -> continueKex clientKexInitPayload p clientKEXAlgos clientHostKeys clientCryptos serverCryptos clientHashMacs serverHashMacs >> return True
otherwise -> previousHandler p
|
bcoppens/HaskellSshClient
|
src/Ssh/KeyExchange.hs
|
gpl-3.0
| 7,796 | 0 | 15 | 1,637 | 1,476 | 777 | 699 | 91 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Routers.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of Router resources available to the specified project.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.routers.list@.
module Network.Google.Resource.Compute.Routers.List
(
-- * REST Resource
RoutersListResource
-- * Creating a Request
, routersList
, RoutersList
-- * Request Lenses
, rlReturnPartialSuccess
, rlOrderBy
, rlProject
, rlFilter
, rlRegion
, rlPageToken
, rlMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.routers.list@ method which the
-- 'RoutersList' request conforms to.
type RoutersListResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"routers" :>
QueryParam "returnPartialSuccess" Bool :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :> Get '[JSON] RouterList
-- | Retrieves a list of Router resources available to the specified project.
--
-- /See:/ 'routersList' smart constructor.
data RoutersList =
RoutersList'
{ _rlReturnPartialSuccess :: !(Maybe Bool)
, _rlOrderBy :: !(Maybe Text)
, _rlProject :: !Text
, _rlFilter :: !(Maybe Text)
, _rlRegion :: !Text
, _rlPageToken :: !(Maybe Text)
, _rlMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RoutersList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rlReturnPartialSuccess'
--
-- * 'rlOrderBy'
--
-- * 'rlProject'
--
-- * 'rlFilter'
--
-- * 'rlRegion'
--
-- * 'rlPageToken'
--
-- * 'rlMaxResults'
routersList
:: Text -- ^ 'rlProject'
-> Text -- ^ 'rlRegion'
-> RoutersList
routersList pRlProject_ pRlRegion_ =
RoutersList'
{ _rlReturnPartialSuccess = Nothing
, _rlOrderBy = Nothing
, _rlProject = pRlProject_
, _rlFilter = Nothing
, _rlRegion = pRlRegion_
, _rlPageToken = Nothing
, _rlMaxResults = 500
}
-- | Opt-in for partial success behavior which provides partial results in
-- case of failure. The default value is false.
rlReturnPartialSuccess :: Lens' RoutersList (Maybe Bool)
rlReturnPartialSuccess
= lens _rlReturnPartialSuccess
(\ s a -> s{_rlReturnPartialSuccess = a})
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- \`orderBy=\"creationTimestamp desc\"\`. This sorts results based on the
-- \`creationTimestamp\` field in reverse chronological order (newest
-- result first). Use this to sort resources like operations so that the
-- newest operation is returned first. Currently, only sorting by \`name\`
-- or \`creationTimestamp desc\` is supported.
rlOrderBy :: Lens' RoutersList (Maybe Text)
rlOrderBy
= lens _rlOrderBy (\ s a -> s{_rlOrderBy = a})
-- | Project ID for this request.
rlProject :: Lens' RoutersList Text
rlProject
= lens _rlProject (\ s a -> s{_rlProject = a})
-- | A filter expression that filters resources listed in the response. The
-- expression must specify the field name, a comparison operator, and the
-- value that you want to use for filtering. The value must be a string, a
-- number, or a boolean. The comparison operator must be either \`=\`,
-- \`!=\`, \`>\`, or \`\<\`. For example, if you are filtering Compute
-- Engine instances, you can exclude instances named \`example-instance\`
-- by specifying \`name != example-instance\`. You can also filter nested
-- fields. For example, you could specify \`scheduling.automaticRestart =
-- false\` to include instances only if they are not scheduled for
-- automatic restarts. You can use filtering on nested fields to filter
-- based on resource labels. To filter on multiple expressions, provide
-- each separate expression within parentheses. For example: \`\`\`
-- (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\")
-- \`\`\` By default, each expression is an \`AND\` expression. However,
-- you can include \`AND\` and \`OR\` expressions explicitly. For example:
-- \`\`\` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel
-- Broadwell\") AND (scheduling.automaticRestart = true) \`\`\`
rlFilter :: Lens' RoutersList (Maybe Text)
rlFilter = lens _rlFilter (\ s a -> s{_rlFilter = a})
-- | Name of the region for this request.
rlRegion :: Lens' RoutersList Text
rlRegion = lens _rlRegion (\ s a -> s{_rlRegion = a})
-- | Specifies a page token to use. Set \`pageToken\` to the
-- \`nextPageToken\` returned by a previous list request to get the next
-- page of results.
rlPageToken :: Lens' RoutersList (Maybe Text)
rlPageToken
= lens _rlPageToken (\ s a -> s{_rlPageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than \`maxResults\`, Compute
-- Engine returns a \`nextPageToken\` that can be used to get the next page
-- of results in subsequent list requests. Acceptable values are \`0\` to
-- \`500\`, inclusive. (Default: \`500\`)
rlMaxResults :: Lens' RoutersList Word32
rlMaxResults
= lens _rlMaxResults (\ s a -> s{_rlMaxResults = a})
. _Coerce
instance GoogleRequest RoutersList where
type Rs RoutersList = RouterList
type Scopes RoutersList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient RoutersList'{..}
= go _rlProject _rlRegion _rlReturnPartialSuccess
_rlOrderBy
_rlFilter
_rlPageToken
(Just _rlMaxResults)
(Just AltJSON)
computeService
where go
= buildClient (Proxy :: Proxy RoutersListResource)
mempty
|
brendanhay/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/Routers/List.hs
|
mpl-2.0
| 7,153 | 0 | 20 | 1,599 | 831 | 495 | 336 | 115 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Games.TurnBasedMatches.TakeTurn
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Commit the results of a player turn.
--
-- /See:/ <https://developers.google.com/games/services/ Google Play Game Services API Reference> for @games.turnBasedMatches.takeTurn@.
module Network.Google.Resource.Games.TurnBasedMatches.TakeTurn
(
-- * REST Resource
TurnBasedMatchesTakeTurnResource
-- * Creating a Request
, turnBasedMatchesTakeTurn
, TurnBasedMatchesTakeTurn
-- * Request Lenses
, tbmttConsistencyToken
, tbmttPayload
, tbmttLanguage
, tbmttMatchId
) where
import Network.Google.Games.Types
import Network.Google.Prelude
-- | A resource alias for @games.turnBasedMatches.takeTurn@ method which the
-- 'TurnBasedMatchesTakeTurn' request conforms to.
type TurnBasedMatchesTakeTurnResource =
"games" :>
"v1" :>
"turnbasedmatches" :>
Capture "matchId" Text :>
"turn" :>
QueryParam "consistencyToken" (Textual Int64) :>
QueryParam "language" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TurnBasedMatchTurn :>
Put '[JSON] TurnBasedMatch
-- | Commit the results of a player turn.
--
-- /See:/ 'turnBasedMatchesTakeTurn' smart constructor.
data TurnBasedMatchesTakeTurn = TurnBasedMatchesTakeTurn'
{ _tbmttConsistencyToken :: !(Maybe (Textual Int64))
, _tbmttPayload :: !TurnBasedMatchTurn
, _tbmttLanguage :: !(Maybe Text)
, _tbmttMatchId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TurnBasedMatchesTakeTurn' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tbmttConsistencyToken'
--
-- * 'tbmttPayload'
--
-- * 'tbmttLanguage'
--
-- * 'tbmttMatchId'
turnBasedMatchesTakeTurn
:: TurnBasedMatchTurn -- ^ 'tbmttPayload'
-> Text -- ^ 'tbmttMatchId'
-> TurnBasedMatchesTakeTurn
turnBasedMatchesTakeTurn pTbmttPayload_ pTbmttMatchId_ =
TurnBasedMatchesTakeTurn'
{ _tbmttConsistencyToken = Nothing
, _tbmttPayload = pTbmttPayload_
, _tbmttLanguage = Nothing
, _tbmttMatchId = pTbmttMatchId_
}
-- | The last-seen mutation timestamp.
tbmttConsistencyToken :: Lens' TurnBasedMatchesTakeTurn (Maybe Int64)
tbmttConsistencyToken
= lens _tbmttConsistencyToken
(\ s a -> s{_tbmttConsistencyToken = a})
. mapping _Coerce
-- | Multipart request metadata.
tbmttPayload :: Lens' TurnBasedMatchesTakeTurn TurnBasedMatchTurn
tbmttPayload
= lens _tbmttPayload (\ s a -> s{_tbmttPayload = a})
-- | The preferred language to use for strings returned by this method.
tbmttLanguage :: Lens' TurnBasedMatchesTakeTurn (Maybe Text)
tbmttLanguage
= lens _tbmttLanguage
(\ s a -> s{_tbmttLanguage = a})
-- | The ID of the match.
tbmttMatchId :: Lens' TurnBasedMatchesTakeTurn Text
tbmttMatchId
= lens _tbmttMatchId (\ s a -> s{_tbmttMatchId = a})
instance GoogleRequest TurnBasedMatchesTakeTurn where
type Rs TurnBasedMatchesTakeTurn = TurnBasedMatch
type Scopes TurnBasedMatchesTakeTurn =
'["https://www.googleapis.com/auth/games",
"https://www.googleapis.com/auth/plus.login"]
requestClient TurnBasedMatchesTakeTurn'{..}
= go _tbmttMatchId _tbmttConsistencyToken
_tbmttLanguage
(Just AltJSON)
_tbmttPayload
gamesService
where go
= buildClient
(Proxy :: Proxy TurnBasedMatchesTakeTurnResource)
mempty
|
rueshyna/gogol
|
gogol-games/gen/Network/Google/Resource/Games/TurnBasedMatches/TakeTurn.hs
|
mpl-2.0
| 4,403 | 0 | 16 | 1,022 | 568 | 333 | 235 | 88 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudIdentity.Groups.Memberships.Lookup
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Looks up the [resource
-- name](https:\/\/cloud.google.com\/apis\/design\/resource_names) of a
-- \`Membership\` by its \`EntityKey\`.
--
-- /See:/ <https://cloud.google.com/identity/ Cloud Identity API Reference> for @cloudidentity.groups.memberships.lookup@.
module Network.Google.Resource.CloudIdentity.Groups.Memberships.Lookup
(
-- * REST Resource
GroupsMembershipsLookupResource
-- * Creating a Request
, groupsMembershipsLookup
, GroupsMembershipsLookup
-- * Request Lenses
, gmlParent
, gmlXgafv
, gmlUploadProtocol
, gmlAccessToken
, gmlUploadType
, gmlMemberKeyId
, gmlMemberKeyNamespace
, gmlCallback
) where
import Network.Google.CloudIdentity.Types
import Network.Google.Prelude
-- | A resource alias for @cloudidentity.groups.memberships.lookup@ method which the
-- 'GroupsMembershipsLookup' request conforms to.
type GroupsMembershipsLookupResource =
"v1" :>
Capture "parent" Text :>
"memberships:lookup" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "memberKey.id" Text :>
QueryParam "memberKey.namespace" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] LookupMembershipNameResponse
-- | Looks up the [resource
-- name](https:\/\/cloud.google.com\/apis\/design\/resource_names) of a
-- \`Membership\` by its \`EntityKey\`.
--
-- /See:/ 'groupsMembershipsLookup' smart constructor.
data GroupsMembershipsLookup =
GroupsMembershipsLookup'
{ _gmlParent :: !Text
, _gmlXgafv :: !(Maybe Xgafv)
, _gmlUploadProtocol :: !(Maybe Text)
, _gmlAccessToken :: !(Maybe Text)
, _gmlUploadType :: !(Maybe Text)
, _gmlMemberKeyId :: !(Maybe Text)
, _gmlMemberKeyNamespace :: !(Maybe Text)
, _gmlCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GroupsMembershipsLookup' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gmlParent'
--
-- * 'gmlXgafv'
--
-- * 'gmlUploadProtocol'
--
-- * 'gmlAccessToken'
--
-- * 'gmlUploadType'
--
-- * 'gmlMemberKeyId'
--
-- * 'gmlMemberKeyNamespace'
--
-- * 'gmlCallback'
groupsMembershipsLookup
:: Text -- ^ 'gmlParent'
-> GroupsMembershipsLookup
groupsMembershipsLookup pGmlParent_ =
GroupsMembershipsLookup'
{ _gmlParent = pGmlParent_
, _gmlXgafv = Nothing
, _gmlUploadProtocol = Nothing
, _gmlAccessToken = Nothing
, _gmlUploadType = Nothing
, _gmlMemberKeyId = Nothing
, _gmlMemberKeyNamespace = Nothing
, _gmlCallback = Nothing
}
-- | Required. The parent \`Group\` resource under which to lookup the
-- \`Membership\` name. Must be of the form \`groups\/{group_id}\`.
gmlParent :: Lens' GroupsMembershipsLookup Text
gmlParent
= lens _gmlParent (\ s a -> s{_gmlParent = a})
-- | V1 error format.
gmlXgafv :: Lens' GroupsMembershipsLookup (Maybe Xgafv)
gmlXgafv = lens _gmlXgafv (\ s a -> s{_gmlXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
gmlUploadProtocol :: Lens' GroupsMembershipsLookup (Maybe Text)
gmlUploadProtocol
= lens _gmlUploadProtocol
(\ s a -> s{_gmlUploadProtocol = a})
-- | OAuth access token.
gmlAccessToken :: Lens' GroupsMembershipsLookup (Maybe Text)
gmlAccessToken
= lens _gmlAccessToken
(\ s a -> s{_gmlAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
gmlUploadType :: Lens' GroupsMembershipsLookup (Maybe Text)
gmlUploadType
= lens _gmlUploadType
(\ s a -> s{_gmlUploadType = a})
-- | The ID of the entity. For Google-managed entities, the \`id\` should be
-- the email address of an existing group or user. For
-- external-identity-mapped entities, the \`id\` must be a string
-- conforming to the Identity Source\'s requirements. Must be unique within
-- a \`namespace\`.
gmlMemberKeyId :: Lens' GroupsMembershipsLookup (Maybe Text)
gmlMemberKeyId
= lens _gmlMemberKeyId
(\ s a -> s{_gmlMemberKeyId = a})
-- | The namespace in which the entity exists. If not specified, the
-- \`EntityKey\` represents a Google-managed entity such as a Google user
-- or a Google Group. If specified, the \`EntityKey\` represents an
-- external-identity-mapped group. The namespace must correspond to an
-- identity source created in Admin Console and must be in the form of
-- \`identitysources\/{identity_source_id}\`.
gmlMemberKeyNamespace :: Lens' GroupsMembershipsLookup (Maybe Text)
gmlMemberKeyNamespace
= lens _gmlMemberKeyNamespace
(\ s a -> s{_gmlMemberKeyNamespace = a})
-- | JSONP
gmlCallback :: Lens' GroupsMembershipsLookup (Maybe Text)
gmlCallback
= lens _gmlCallback (\ s a -> s{_gmlCallback = a})
instance GoogleRequest GroupsMembershipsLookup where
type Rs GroupsMembershipsLookup =
LookupMembershipNameResponse
type Scopes GroupsMembershipsLookup =
'["https://www.googleapis.com/auth/cloud-identity.groups",
"https://www.googleapis.com/auth/cloud-identity.groups.readonly",
"https://www.googleapis.com/auth/cloud-platform"]
requestClient GroupsMembershipsLookup'{..}
= go _gmlParent _gmlXgafv _gmlUploadProtocol
_gmlAccessToken
_gmlUploadType
_gmlMemberKeyId
_gmlMemberKeyNamespace
_gmlCallback
(Just AltJSON)
cloudIdentityService
where go
= buildClient
(Proxy :: Proxy GroupsMembershipsLookupResource)
mempty
|
brendanhay/gogol
|
gogol-cloudidentity/gen/Network/Google/Resource/CloudIdentity/Groups/Memberships/Lookup.hs
|
mpl-2.0
| 6,661 | 0 | 18 | 1,448 | 879 | 516 | 363 | 128 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.ChannelSections.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing resource.
--
-- /See:/ <https://developers.google.com/youtube/ YouTube Data API v3 Reference> for @youtube.channelSections.update@.
module Network.Google.Resource.YouTube.ChannelSections.Update
(
-- * REST Resource
ChannelSectionsUpdateResource
-- * Creating a Request
, channelSectionsUpdate
, ChannelSectionsUpdate
-- * Request Lenses
, csuXgafv
, csuPart
, csuUploadProtocol
, csuAccessToken
, csuUploadType
, csuPayload
, csuOnBehalfOfContentOwner
, csuCallback
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.channelSections.update@ method which the
-- 'ChannelSectionsUpdate' request conforms to.
type ChannelSectionsUpdateResource =
"youtube" :>
"v3" :>
"channelSections" :>
QueryParams "part" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "onBehalfOfContentOwner" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ChannelSection :>
Put '[JSON] ChannelSection
-- | Updates an existing resource.
--
-- /See:/ 'channelSectionsUpdate' smart constructor.
data ChannelSectionsUpdate =
ChannelSectionsUpdate'
{ _csuXgafv :: !(Maybe Xgafv)
, _csuPart :: ![Text]
, _csuUploadProtocol :: !(Maybe Text)
, _csuAccessToken :: !(Maybe Text)
, _csuUploadType :: !(Maybe Text)
, _csuPayload :: !ChannelSection
, _csuOnBehalfOfContentOwner :: !(Maybe Text)
, _csuCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ChannelSectionsUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'csuXgafv'
--
-- * 'csuPart'
--
-- * 'csuUploadProtocol'
--
-- * 'csuAccessToken'
--
-- * 'csuUploadType'
--
-- * 'csuPayload'
--
-- * 'csuOnBehalfOfContentOwner'
--
-- * 'csuCallback'
channelSectionsUpdate
:: [Text] -- ^ 'csuPart'
-> ChannelSection -- ^ 'csuPayload'
-> ChannelSectionsUpdate
channelSectionsUpdate pCsuPart_ pCsuPayload_ =
ChannelSectionsUpdate'
{ _csuXgafv = Nothing
, _csuPart = _Coerce # pCsuPart_
, _csuUploadProtocol = Nothing
, _csuAccessToken = Nothing
, _csuUploadType = Nothing
, _csuPayload = pCsuPayload_
, _csuOnBehalfOfContentOwner = Nothing
, _csuCallback = Nothing
}
-- | V1 error format.
csuXgafv :: Lens' ChannelSectionsUpdate (Maybe Xgafv)
csuXgafv = lens _csuXgafv (\ s a -> s{_csuXgafv = a})
-- | The *part* parameter serves two purposes in this operation. It
-- identifies the properties that the write operation will set as well as
-- the properties that the API response will include. The part names that
-- you can include in the parameter value are snippet and contentDetails.
csuPart :: Lens' ChannelSectionsUpdate [Text]
csuPart
= lens _csuPart (\ s a -> s{_csuPart = a}) . _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
csuUploadProtocol :: Lens' ChannelSectionsUpdate (Maybe Text)
csuUploadProtocol
= lens _csuUploadProtocol
(\ s a -> s{_csuUploadProtocol = a})
-- | OAuth access token.
csuAccessToken :: Lens' ChannelSectionsUpdate (Maybe Text)
csuAccessToken
= lens _csuAccessToken
(\ s a -> s{_csuAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
csuUploadType :: Lens' ChannelSectionsUpdate (Maybe Text)
csuUploadType
= lens _csuUploadType
(\ s a -> s{_csuUploadType = a})
-- | Multipart request metadata.
csuPayload :: Lens' ChannelSectionsUpdate ChannelSection
csuPayload
= lens _csuPayload (\ s a -> s{_csuPayload = a})
-- | *Note:* This parameter is intended exclusively for YouTube content
-- partners. The *onBehalfOfContentOwner* parameter indicates that the
-- request\'s authorization credentials identify a YouTube CMS user who is
-- acting on behalf of the content owner specified in the parameter value.
-- This parameter is intended for YouTube content partners that own and
-- manage many different YouTube channels. It allows content owners to
-- authenticate once and get access to all their video and channel data,
-- without having to provide authentication credentials for each individual
-- channel. The CMS account that the user authenticates with must be linked
-- to the specified YouTube content owner.
csuOnBehalfOfContentOwner :: Lens' ChannelSectionsUpdate (Maybe Text)
csuOnBehalfOfContentOwner
= lens _csuOnBehalfOfContentOwner
(\ s a -> s{_csuOnBehalfOfContentOwner = a})
-- | JSONP
csuCallback :: Lens' ChannelSectionsUpdate (Maybe Text)
csuCallback
= lens _csuCallback (\ s a -> s{_csuCallback = a})
instance GoogleRequest ChannelSectionsUpdate where
type Rs ChannelSectionsUpdate = ChannelSection
type Scopes ChannelSectionsUpdate =
'["https://www.googleapis.com/auth/youtube",
"https://www.googleapis.com/auth/youtube.force-ssl",
"https://www.googleapis.com/auth/youtubepartner"]
requestClient ChannelSectionsUpdate'{..}
= go _csuPart _csuXgafv _csuUploadProtocol
_csuAccessToken
_csuUploadType
_csuOnBehalfOfContentOwner
_csuCallback
(Just AltJSON)
_csuPayload
youTubeService
where go
= buildClient
(Proxy :: Proxy ChannelSectionsUpdateResource)
mempty
|
brendanhay/gogol
|
gogol-youtube/gen/Network/Google/Resource/YouTube/ChannelSections/Update.hs
|
mpl-2.0
| 6,585 | 0 | 19 | 1,490 | 897 | 526 | 371 | 128 | 1 |
import Import hiding (runDB, runSDB)
import Control.Exception.Lifted (throw, Exception)
import Control.Monad.Logger
import Control.Monad.Reader
import Control.Monad.Trans.Resource
import Control.Monad.Writer
import qualified Data.Text as T
import Data.Typeable
import qualified Database.Persist.Sql
import Yesod.Default.Config
import Model.Project
import Model.Currency
import Settings
data NegativeBalances = NegativeBalances ProjectId [UserId]
deriving (Show, Typeable)
instance Exception NegativeBalances
payout :: (MonadIO m, Functor m)
=> UTCTime
-> (Entity Project, Entity Payday)
-> SqlPersistT m Bool
payout now (Entity project_id project, Entity payday_id _) = do
let project_name = projectName project
pledges <- select $ from $ \pledge -> do
where_ $ pledge ^. PledgeProject ==. val project_id
&&. pledge ^. PledgeFundedShares >. val 0
return pledge
user_balances <- forM pledges $ \(Entity _ pledge) -> do
Just user <- get $ pledgeUser pledge
let amount =
projectShareValue project
$* fromIntegral (pledgeFundedShares pledge)
user_account_id = userAccount user
project_account_id = projectAccount project
void $
insert $
Transaction now
(Just project_account_id)
(Just user_account_id)
(Just payday_id)
amount
"Project Payout"
Nothing
user_account <-
updateGet
user_account_id
[AccountBalance Database.Persist.Sql.-=. amount]
_ <-
updateGet
project_account_id
[AccountBalance Database.Persist.Sql.+=. amount]
return (pledgeUser pledge, accountBalance user_account)
let negative_balances = filter ((< 0) . snd) user_balances
unless (null negative_balances)
(throw $ NegativeBalances project_id $ map fst negative_balances)
update $ \p -> do
set p [ ProjectLastPayday =. val (Just payday_id) ]
where_ $ p ^. ProjectId ==. val project_id
liftIO $ putStrLn $ "paid to " <> T.unpack project_name
return True
projectsToPay :: MonadIO m
=> UTCTime
-> SqlPersistT m [(Entity Project, Entity Payday)]
projectsToPay now =
select $
from $ \(project
`LeftOuterJoin` last_payday
`InnerJoin` payday) -> do
on_ $ payday ^. PaydayDate
>. coalesceDefault
[ last_payday ?. PaydayDate ]
(project ^. ProjectCreatedTs)
on_ $ project ^. ProjectLastPayday ==. last_payday ?. PaydayId
where_ $ payday ^. PaydayDate <=. val now
orderBy [ asc $ payday ^. PaydayDate
, desc $ project ^. ProjectShareValue ]
return (project, payday)
rebalanceAllPledges :: (MonadWriter [PledgeId] (t (ReaderT SqlBackend m))
,MonadTrans t
,MonadBaseControl IO m
,MonadLogger m
,MonadResource m
)
=> t (SqlPersistT m) ()
rebalanceAllPledges = do
unders <- lift underfundedPatrons
unless (null unders) $ do
maxUnders <- lift $ maxShares Nothing unders
lift $ dropShares maxUnders
lift $ mapM_ updateShareValue =<< updatedProjects maxUnders
tell maxUnders
rebalanceAllPledges
updatedProjects :: (MonadIO m, Functor m)
=> [PledgeId]
-> SqlPersistT m [ProjectId]
updatedProjects pledges = fmap (map (pledgeProject . entityVal))
(selectList [PledgeId <-. pledges] [])
runDB :: (PersistConfig c, MonadBaseControl IO m, MonadIO m)
=> c
-> PersistConfigPool c
-> PersistConfigBackend c (ResourceT (LoggingT m)) a
-> m a
runDB dbconf poolconf sql =
runStdoutLoggingT $
runResourceT $ Database.Persist.Sql.runPool dbconf sql poolconf
runSDB :: (PersistConfig c, MonadBaseControl IO m, MonadIO m)
=> c
-> PersistConfigPool c
-> WriterT t (PersistConfigBackend c (ResourceT (LoggingT m))) b
-> m b
runSDB dbconf poolconf = fmap fst . runDB dbconf poolconf . runWriterT
main :: IO ()
main = do
conf <- fromArgs parseExtra
dbconf <- withYamlEnvironment "config/postgresql.yml" (appEnv conf)
Database.Persist.Sql.loadConfig >>= Database.Persist.Sql.applyEnv
pool_conf <-
Database.Persist.Sql.createPoolConfig (dbconf :: Settings.PersistConf)
now <- liftIO getCurrentTime
runSDB dbconf pool_conf $ do
projects <- lift $ projectsToPay now
lift $ mapM_ (payout now) projects
rebalanceAllPledges
|
akegalj/snowdrift
|
app/SnowdriftProcessPayments.hs
|
agpl-3.0
| 4,930 | 4 | 18 | 1,596 | 1,358 | 670 | 688 | -1 | -1 |
module Network.Haskoin.Wallet.Client (clientMain) where
import System.FilePath ((</>))
import System.Directory (createDirectoryIfMissing)
import System.Posix.Directory (changeWorkingDirectory)
import System.Posix.Files
( setFileMode
, setFileCreationMask
, unionFileModes
, ownerModes
, groupModes
, otherModes
, fileExist
)
import System.Environment (getArgs, lookupEnv)
import System.Info (os)
import System.Console.GetOpt
( getOpt
, usageInfo
, OptDescr (Option)
, ArgDescr (NoArg, ReqArg)
, ArgOrder (Permute)
)
import Control.Monad (forM_)
import Control.Monad.Trans (liftIO)
import qualified Control.Monad.Reader as R (runReaderT)
import Data.Default (def)
import Data.FileEmbed (embedFile)
import Data.Yaml (decodeFileEither)
import Data.String.Conversions (cs)
import Network.Haskoin.Constants
import Network.Haskoin.Wallet.Settings
import Network.Haskoin.Wallet.Client.Commands
import Network.Haskoin.Wallet.Types
import System.Exit (exitFailure)
import System.IO (hPutStrLn, stderr)
import System.FilePath.Posix (isAbsolute)
usageHeader :: String
usageHeader = "Usage: hw [<options>] <command> [<args>]"
cmdHelp :: [String]
cmdHelp = lines $ cs $ $(embedFile "config/help")
warningMsg :: String
warningMsg = unwords
[ "!!!", "This software is experimental."
, "Use only small amounts of Bitcoins.", "!!!"
]
usage :: [String]
usage = warningMsg : usageInfo usageHeader options : cmdHelp
read' :: Read x => String -> String -> x
read' e s = case reads s of
[(x, "")] -> x
_ -> error e
options :: [OptDescr (Config -> Config)]
options =
[ Option "c" ["count"]
( ReqArg
(\s cfg -> cfg { configCount = read' "Could not parse count" s })
"INT"
) $ "Items per page. Default: " ++ show (configCount def)
, Option "m" ["minconf"]
( ReqArg
(\s cfg ->
cfg { configMinConf = read' "Colud not parse minconf" s }
) "INT"
) $ "Minimum confirmations. Default: "
++ show (configMinConf def)
, Option "f" ["fee"]
( ReqArg
(\s cfg -> cfg { configFee = read' "Could not parse fee" s })
"INT"
) $ "Fee per kilobyte. Default: " ++ show (configFee def)
, Option "R" ["rcptfee"]
(NoArg $ \cfg -> cfg { configRcptFee = True }) $
"Recipient pays fee. Default: " ++ show (configRcptFee def)
, Option "S" ["nosig"]
(NoArg $ \cfg -> cfg { configSignTx = False }) $
"Do not sign. Default: " ++ show (not $ configSignTx def)
, Option "i" ["internal"]
(NoArg $ \cfg -> cfg { configAddrType = AddressInternal }) $
"Internal addresses. Default: "
++ show (configAddrType def == AddressInternal)
, Option "k" ["pubkeys"]
(NoArg $ \cfg -> cfg { configDisplayPubKeys = True }) $
"Display public keys instead of addresses. Default: "
++ show (configDisplayPubKeys def)
, Option "o" ["offline"]
(NoArg $ \cfg -> cfg { configOffline = True }) $
"Offline balance. Default: " ++ show (configOffline def)
, Option "e" ["entropy"]
( ReqArg ( \s cfg -> cfg { configEntropy =
read' "Could not parse entropy" s
}
) "INT"
) $ "Entropy in Bytes between 16 and 32. Default: "
++ show (configEntropy def)
, Option "r" ["revpage"]
(NoArg $ \cfg -> cfg { configReversePaging = True }) $
"Reverse paging. Default: "
++ show (configReversePaging def)
, Option "I" ["index"]
( ReqArg ( \s cfg -> cfg { configDerivIndex =
read' "Could not parse index" s
}
) "INT"
) $ "Derivation index for new accounts. Default: "
++ show (configDerivIndex def)
, Option "j" ["json"]
(NoArg $ \cfg -> cfg { configFormat = OutputJSON })
"Output JSON"
, Option "y" ["yaml"]
(NoArg $ \cfg -> cfg { configFormat = OutputYAML })
"Output YAML"
, Option "s" ["socket"]
(ReqArg (\s cfg -> cfg { configConnect = s }) "URI") $
"Server socket. Default: " ++ configConnect def
, Option "d" ["detach"]
(NoArg $ \cfg -> cfg { configDetach = True }) $
"Detach server. Default: " ++ show (configDetach def)
, Option "t" ["testnet"]
(NoArg $ \cfg -> cfg { configTestnet = True }) "Testnet3 network"
, Option "g" ["config"]
(ReqArg (\s cfg -> cfg { configFile = s }) "FILE") $
"Config file. Default: " ++ configFile def
, Option "w" ["workdir"]
(ReqArg (\s cfg -> cfg { configDir = s }) "DIR")
"Working directory. OS-dependent default"
, Option "v" ["verbose"]
(NoArg $ \cfg -> cfg { configVerbose = True }) "Verbose output"
]
-- Create and change current working directory
setWorkDir :: Config -> IO ()
setWorkDir cfg = do
let workDir = configDir cfg </> networkName
_ <- setFileCreationMask $ otherModes `unionFileModes` groupModes
createDirectoryIfMissing True workDir
setFileMode workDir ownerModes
changeWorkingDirectory workDir
-- Build application configuration
getConfig :: [Config -> Config] -> IO Config
getConfig fs = do
-- Create initial configuration from defaults and command-line arguments
let initCfg = foldr ($) def fs
-- If working directory set in initial configuration, use it
dir <- case configDir initCfg of "" -> appDir
d -> return d
-- Make configuration file relative to working directory
let cfgFile = if isAbsolute (configFile initCfg)
then configFile initCfg
else dir </> configFile initCfg
-- Get configuration from file, if it exists
e <- fileExist cfgFile
if e then do
cfgE <- decodeFileEither cfgFile
case cfgE of
Left x -> error $ show x
-- Override settings from file using command-line
Right cfg -> return $ fixConfigDir (foldr ($) cfg fs) dir
else return $ fixConfigDir initCfg dir
where
-- If working directory not set, use default
fixConfigDir cfg dir = case configDir cfg of "" -> cfg{ configDir = dir }
_ -> cfg
clientMain :: IO ()
clientMain = getArgs >>= \args -> case getOpt Permute options args of
(fs, commands, []) -> do
cfg <- getConfig fs
if configTestnet cfg
then setTestnet
else setProdnet
setWorkDir cfg
dispatchCommand cfg commands
(_, _, msgs) -> forM_ (msgs ++ usage) putStrLn
dispatchCommand :: Config -> [String] -> IO ()
dispatchCommand cfg args = flip R.runReaderT cfg $ case args of
"start" : [] -> cmdStart
"stop" : [] -> cmdStop
"newacc" : name : [] -> cmdNewAcc False name []
"newread" : name : [] -> cmdNewAcc True name []
"newms" : name : m : n : [] -> cmdNewAcc False name [m, n]
"newreadms" : name : m : n : [] -> cmdNewAcc True name [m, n]
"addkey" : name : [] -> cmdAddKey name
"setgap" : name : gap : [] -> cmdSetGap name gap
"account" : name : [] -> cmdAccount name
"accounts" : page -> cmdAccounts page
"rename" : name : new : [] -> cmdRenameAcc name new
"list" : name : page -> cmdList name page
"unused" : name : page -> cmdUnused name page
"label" : name : index : label : [] -> cmdLabel name index label
"uri" : name : index : ls -> cmdURI name index ls
"txs" : name : page -> cmdTxs name page
"addrtxs" : name : index : page -> cmdAddrTxs name index page
"getindex" : name : key : [] -> cmdGetIndex name key
"genaddrs" : name : i : [] -> cmdGenAddrs name i
"send" : name : add : amnt : [] -> cmdSend name add amnt
"sendmany" : name : xs -> cmdSendMany name xs
"import" : name : [] -> cmdImport name
"sign" : name : txid : [] -> cmdSign name txid
"gettx" : name : txid : [] -> cmdGetTx name txid
"balance" : name : [] -> cmdBalance name
"getoffline" : name : txid : [] -> cmdGetOffline name txid
"signoffline" : name : [] -> cmdSignOffline name
"rescan" : rescantime -> cmdRescan rescantime
"deletetx" : txid : [] -> cmdDeleteTx txid
"sync" : name : block : page -> cmdSync name block page
"pending" : name : page -> cmdPending name page
"dead" : name : page -> cmdDead name page
"monitor" : name -> cmdMonitor name
"decodetx" : [] -> cmdDecodeTx
"dice" : rolls : [] -> cmdDice rolls
"status" : [] -> cmdStatus
"keypair" : [] -> cmdKeyPair
"blockinfo" : hashes -> cmdBlockInfo hashes
"version" : [] -> cmdVersion
"help" : [] -> liftIO $ forM_ usage (hPutStrLn stderr)
[] -> liftIO $ forM_ usage (hPutStrLn stderr)
_ -> liftIO $
forM_ ("Invalid command" : usage) (hPutStrLn stderr) >> exitFailure
appDir :: IO FilePath
appDir = case os of "mingw" -> windows
"mingw32" -> windows
"mingw64" -> windows
"darwin" -> osx
"linux" -> unix
_ -> unix
where
windows = do
localAppData <- lookupEnv "LOCALAPPDATA"
dirM <- case localAppData of
Nothing -> lookupEnv "APPDATA"
Just l -> return $ Just l
case dirM of
Just d -> return $ d </> "Haskoin Wallet"
Nothing -> return "."
osx = do
homeM <- lookupEnv "HOME"
case homeM of
Just home -> return $ home </> "Library"
</> "Application Support"
</> "Haskoin Wallet"
Nothing -> return "."
unix = do
homeM <- lookupEnv "HOME"
case homeM of
Just home -> return $ home </> ".hw"
Nothing -> return "."
|
plaprade/haskoin
|
haskoin-wallet/src/Network/Haskoin/Wallet/Client.hs
|
unlicense
| 10,802 | 0 | 16 | 3,897 | 3,012 | 1,566 | 1,446 | -1 | -1 |
module Main where
import Control.Monad (unless, forever)
import Pipes
import Pipes.Concurrent
import System.IO (isEOF)
import Control.Exception (try, throwIO)
import qualified GHC.IO.Exception as G
stdinLn :: Producer String IO ()
stdinLn = do
eof <- lift isEOF
unless eof $ do
str <- lift getLine
yield str
stdinLn
stdoutLn :: Show a => Consumer a IO ()
stdoutLn = do
msg <- await -- 'await' an a
x <- lift $ try $ putStrLn $ show msg
case x of
-- Gracefully terminate if we got a broken pipe error
Left e@(G.IOError { G.ioe_type = t}) ->
lift $ unless (t == G.ResourceVanished) $ throwIO e
-- Otherwise loop
Right () -> stdoutLn
pairer :: (Monad m) => Pipe a (a, a) m ()
pairer = forever $ do
a <- await
b <- await
yield (a, b)
splitter :: (Monad m) => Pipe [Char] Char m ()
splitter = forever $ do
str <- await
mapM_ yield str
tap :: Show a => String -> Pipe a a IO ()
tap label = forever $ do
a <- await
lift $ putStrLn $ concat [label, "::> ", show a]
yield a
splitap :: Pipe [Char] Char IO ()
splitap = splitter >-> tap "Split "
main :: IO ()
main = runEffect $ stdinLn >-> tap "One"
>-> splitter >-> tap "Two"
>-> pairer >-> tap "Three"
>-> stdoutLn
|
CompSciCabal/SMRTYPRTY
|
experiments/inaimathi/PipeEx.hs
|
unlicense
| 1,387 | 0 | 15 | 459 | 521 | 263 | 258 | 43 | 2 |
{-# LANGUAGE LambdaCase #-}
{- |
Module : Neovim.User.Input
Description : Utility functions to retrieve user input
Copyright : (c) Sebastian Witte
License : Apache-2.0
Maintainer : [email protected]
Stability : experimental
Portability : GHC
-}
module Neovim.User.Input
where
import Neovim
import Neovim.API.String
import Neovim.User.Choice
import System.Directory
-- | Helper function that calls the @input()@ function of neovim.
input :: NvimObject result
=> String -- ^ Message to display
-> Maybe String -- ^ Input fiiled in
-> Maybe String -- ^ Completion mode
-> Neovim env result
input message mPrefilled mCompletion = fmap fromObjectUnsafe
. vim_call_function "input" $ (message <> " ")
+: maybe "" id mPrefilled
+: maybe [] (+: []) mCompletion
-- | Prompt the user to specify a directory.
--
-- If the directory does not exist, ask the usere whether it should be created.
askForDirectory :: String -- ^ Message to put in front
-> Maybe FilePath -- ^ Prefilled text
-> Neovim env FilePath
askForDirectory message mPrefilled = do
fp <- input message mPrefilled (Just "dir")
efp <- fmap fromObjectUnsafe . vim_call_function "expand" $ (fp :: FilePath) +: []
whenM (not <$> liftIO (doesDirectoryExist efp)) $
whenM (yesOrNo (efp ++ " does not exist, create it?")) $
liftIO $ createDirectoryIfMissing True efp
return efp
askForString :: String -- ^ message to put in front
-> Maybe String -- ^ Prefilled text
-> Neovim env String
askForString message mPrefilled = input message mPrefilled Nothing
|
neovimhaskell/nvim-hs-contrib
|
library/Neovim/User/Input.hs
|
apache-2.0
| 1,670 | 0 | 15 | 404 | 321 | 164 | 157 | 29 | 1 |
-- | Re-export the most important functions from Context.*
module System.Console.Hawk.Context
( module System.Console.Hawk.Context.Base
, module System.Console.Hawk.Context.Paths
) where
import System.Console.Hawk.Context.Base
import System.Console.Hawk.Context.Paths
|
gelisam/hawk
|
src/System/Console/Hawk/Context.hs
|
apache-2.0
| 275 | 0 | 5 | 30 | 45 | 34 | 11 | 5 | 0 |
--
-- Copyright (c) 2013, Carl Joachim Svenn
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- 1. Redistributions of source code must retain the above copyright notice, this
-- list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-- (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-- LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-- SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module MEnv.GLFW
(
MEnv (..),
runMEnvGLFW,
module MEnv.GLFW.Init,
) where
import Foreign
import Foreign.Marshal.Alloc
import Data.IORef
import Control.Monad.Trans
import Control.Monad.State
import MEnv.GLFW.Init
--------------------------------------------------------------------------------
-- MEnv
-- | the MEnv monad
newtype MEnv res a =
MEnv
{
menvUnwrap :: StateT res IO a
}
deriving
(
Monad,
MonadIO,
MonadState res,
Functor
)
--------------------------------------------------------------------------------
-- runMEnvGLFW
-- | init environment, run (MEnv a) inside, from GLFW
runMEnvGLFW :: Init -> IO res -> (res -> IO ()) ->
(a -> MEnv res b) ->
(b -> MEnv res b) ->
(b -> MEnv res c) ->
a ->
IO c
runMEnvIOS init loadResource unloadResource
begin
iterate
end
a = do
putStrLn "GLFW is not implemented yet"
|
karamellpelle/MEnv
|
source/MEnv/GLFW.hs
|
bsd-2-clause
| 2,664 | 0 | 13 | 672 | 248 | 153 | 95 | 35 | 1 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_HADDOCK not-home #-}
-- | Description: Literal GraphQL values
module GraphQL.Internal.Value
( Value
, Value'(..)
, ConstScalar
, UnresolvedVariableValue
, pattern ValueInt
, pattern ValueFloat
, pattern ValueBoolean
, pattern ValueString
, pattern ValueEnum
, pattern ValueList
, pattern ValueObject
, pattern ValueNull
, toObject
, valueToAST
, astToVariableValue
, variableValueToAST
, List
, List'(..)
, String(..)
-- * Names
, Name(..)
, NameError(..)
, makeName
-- * Objects
, Object
, Object'(..)
, ObjectField
, ObjectField'(ObjectField)
-- ** Constructing
, makeObject
, objectFromList
, objectFromOrderedMap
-- ** Combining
, unionObjects
-- ** Querying
, objectFields
) where
import Protolude
import qualified Data.Aeson as Aeson
import Data.Aeson (ToJSON(..), (.=), pairs)
import qualified Data.Map as Map
import Test.QuickCheck (Arbitrary(..), Gen, oneof, listOf, sized)
import GraphQL.Internal.Arbitrary (arbitraryText)
import GraphQL.Internal.Name (Name(..), NameError(..), makeName)
import GraphQL.Internal.Syntax.AST (Variable)
import qualified GraphQL.Internal.Syntax.AST as AST
import GraphQL.Internal.OrderedMap (OrderedMap)
import qualified GraphQL.Internal.OrderedMap as OrderedMap
-- * Values
-- | A GraphQL value. @scalar@ represents the type of scalar that's contained
-- within this value.
--
-- Normally, it is one of either 'ConstScalar' (to indicate that there are no
-- variables whatsoever) or 'VariableScalar' (to indicate that there might be
-- some variables).
data Value' scalar
= ValueScalar' scalar
| ValueList' (List' scalar)
| ValueObject' (Object' scalar)
deriving (Eq, Ord, Show, Functor)
instance Foldable Value' where
foldMap f (ValueScalar' scalar) = f scalar
foldMap f (ValueList' values) = foldMap f values
foldMap f (ValueObject' obj) = foldMap f obj
instance Traversable Value' where
traverse f (ValueScalar' x) = ValueScalar' <$> f x
traverse f (ValueList' xs) = ValueList' <$> traverse f xs
traverse f (ValueObject' xs) = ValueObject' <$> traverse f xs
instance ToJSON scalar => ToJSON (Value' scalar) where
toJSON (ValueScalar' x) = toJSON x
toJSON (ValueList' x) = toJSON x
toJSON (ValueObject' x) = toJSON x
instance Arbitrary scalar => Arbitrary (Value' scalar) where
-- | Generate an arbitrary value. Uses the generator's \"size\" property to
-- determine maximum object depth.
arbitrary = sized genValue
-- | Generate an arbitrary value, with objects at most @n@ levels deep.
genValue :: Arbitrary scalar => Int -> Gen (Value' scalar)
genValue n
| n <= 0 = arbitrary
| otherwise = oneof [ ValueScalar' <$> arbitrary
, ValueObject' <$> genObject (n - 1)
, ValueList' . List' <$> listOf (genValue (n - 1))
]
-- | A GraphQL value which contains no variables.
type Value = Value' ConstScalar
-- TODO: These next two definitions are quite internal. We should move this
-- module to Internal and then re-export the bits that end-users will use.
-- <https://github.com/jml/graphql-api/issues/99>
-- | A GraphQL value which might contain some variables. These variables are
-- not yet associated with
-- <https://facebook.github.io/graphql/#VariableDefinition variable
-- definitions> (see also 'GraphQL.Internal.Validation.VariableDefinition'),
-- which are provided in a different context.
type UnresolvedVariableValue = Value' UnresolvedVariableScalar
pattern ValueInt :: Int32 -> Value
pattern ValueInt x = ValueScalar' (ConstInt x)
pattern ValueFloat :: Double -> Value
pattern ValueFloat x = ValueScalar' (ConstFloat x)
pattern ValueBoolean :: Bool -> Value
pattern ValueBoolean x = ValueScalar' (ConstBoolean x)
pattern ValueString :: String -> Value
pattern ValueString x = ValueScalar' (ConstString x)
pattern ValueEnum :: Name -> Value
pattern ValueEnum x = ValueScalar' (ConstEnum x)
pattern ValueList :: forall t. List' t -> Value' t
pattern ValueList x = ValueList' x
pattern ValueObject :: forall t. Object' t -> Value' t
pattern ValueObject x = ValueObject' x
pattern ValueNull :: Value
pattern ValueNull = ValueScalar' ConstNull
-- | If a value is an object, return just that. Otherwise @Nothing@.
toObject :: Value' scalar -> Maybe (Object' scalar)
toObject (ValueObject' o) = pure o
toObject _ = empty
-- * Scalars
-- | A non-variable value which contains no other values.
data ConstScalar
= ConstInt Int32
| ConstFloat Double
| ConstBoolean Bool
| ConstString String
| ConstEnum Name
| ConstNull
deriving (Eq, Ord, Show)
instance ToJSON ConstScalar where
toJSON (ConstInt x) = toJSON x
toJSON (ConstFloat x) = toJSON x
toJSON (ConstBoolean x) = toJSON x
toJSON (ConstString x) = toJSON x
toJSON (ConstEnum x) = toJSON x
toJSON ConstNull = Aeson.Null
-- | A value which contains no other values, and might be a variable that
-- might lack a definition.
type UnresolvedVariableScalar = Either Variable ConstScalar
-- | Generate an arbitrary scalar value.
instance Arbitrary ConstScalar where
arbitrary = oneof [ ConstInt <$> arbitrary
, ConstFloat <$> arbitrary
, ConstBoolean <$> arbitrary
, ConstString <$> arbitrary
, ConstEnum <$> arbitrary
, pure ConstNull
]
-- | Convert a constant scalar to an AST.Value
constScalarToAST :: ConstScalar -> AST.Value
constScalarToAST scalar =
case scalar of
ConstInt x -> AST.ValueInt x
ConstFloat x -> AST.ValueFloat x
ConstBoolean x -> AST.ValueBoolean x
ConstString (String x) -> AST.ValueString (AST.StringValue x)
ConstEnum x -> AST.ValueEnum x
ConstNull -> AST.ValueNull
-- | Convert a variable scalar to an AST.Value
variableToAST :: UnresolvedVariableScalar -> AST.Value
variableToAST (Left variable) = AST.ValueVariable variable
variableToAST (Right constant) = constScalarToAST constant
-- | Convert a value from the AST into a variable scalar, presuming it /is/ a
-- scalar.
astToScalar :: AST.Value -> Maybe UnresolvedVariableScalar
astToScalar (AST.ValueInt x) = pure $ Right $ ConstInt x
astToScalar (AST.ValueFloat x) = pure $ Right $ ConstFloat x
astToScalar (AST.ValueBoolean x) = pure $ Right $ ConstBoolean x
astToScalar (AST.ValueString (AST.StringValue x)) = pure $ Right $ ConstString (String x)
astToScalar (AST.ValueEnum x) = pure $ Right $ ConstEnum x
astToScalar AST.ValueNull = pure $ Right ConstNull
astToScalar (AST.ValueVariable x) = pure $ Left x
astToScalar _ = empty
-- * Strings
newtype String = String Text deriving (Eq, Ord, Show)
instance Arbitrary String where
arbitrary = String <$> arbitraryText
instance ToJSON String where
toJSON (String x) = toJSON x
-- * Lists
newtype List' scalar = List' [Value' scalar] deriving (Eq, Ord, Show, Functor)
instance Foldable List' where
foldMap f (List' values) = mconcat (map (foldMap f) values)
instance Traversable List' where
traverse f (List' xs) = List' <$> traverse (traverse f) xs
-- | A list of values that are known to be constants.
--
-- Note that this list might not be valid GraphQL, because GraphQL only allows
-- homogeneous lists (i.e. all elements of the same type), and we do no type
-- checking at this point.
type List = List' ConstScalar
instance Arbitrary scalar => Arbitrary (List' scalar) where
-- TODO: GraphQL does not allow heterogeneous lists:
-- https://facebook.github.io/graphql/#sec-Lists, so this will generate
-- invalid lists.
arbitrary = List' <$> listOf arbitrary
instance ToJSON scalar => ToJSON (List' scalar) where
toJSON (List' x) = toJSON x
-- * Objects
-- | A GraphQL object.
--
-- Note that https://facebook.github.io/graphql/#sec-Response calls these
-- \"Maps\", but everywhere else in the spec refers to them as objects.
newtype Object' scalar = Object' (OrderedMap Name (Value' scalar)) deriving (Eq, Ord, Show, Functor)
instance Foldable Object' where
foldMap f (Object' fieldMap) = foldMap (foldMap f) fieldMap
instance Traversable Object' where
traverse f (Object' xs) = Object' <$> traverse (traverse f) xs
-- | A GraphQL object that contains only non-variable values.
type Object = Object' ConstScalar
objectFields :: Object' scalar -> [ObjectField' scalar]
objectFields (Object' object) = map (uncurry ObjectField') (OrderedMap.toList object)
instance Arbitrary scalar => Arbitrary (Object' scalar) where
arbitrary = sized genObject
-- | Generate an arbitrary object to the given maximum depth.
genObject :: Arbitrary scalar => Int -> Gen (Object' scalar)
genObject n = Object' <$> OrderedMap.genOrderedMap arbitrary (genValue n)
data ObjectField' scalar = ObjectField' Name (Value' scalar) deriving (Eq, Ord, Show, Functor)
-- | A field of an object that has a non-variable value.
type ObjectField = ObjectField' ConstScalar
pattern ObjectField :: forall t. Name -> Value' t -> ObjectField' t
pattern ObjectField name value = ObjectField' name value
instance Arbitrary scalar => Arbitrary (ObjectField' scalar) where
arbitrary = ObjectField' <$> arbitrary <*> arbitrary
-- | Make an object from a list of object fields.
makeObject :: [ObjectField' scalar] -> Maybe (Object' scalar)
makeObject fields = objectFromList [(name, value) | ObjectField' name value <- fields]
-- | Make an object from an ordered map.
objectFromOrderedMap :: OrderedMap Name (Value' scalar) -> Object' scalar
objectFromOrderedMap = Object'
-- | Create an object from a list of (name, value) pairs.
objectFromList :: [(Name, Value' scalar)] -> Maybe (Object' scalar)
objectFromList xs = Object' <$> OrderedMap.orderedMap xs
unionObjects :: [Object' scalar] -> Maybe (Object' scalar)
unionObjects objects = Object' <$> OrderedMap.unions [obj | Object' obj <- objects]
instance ToJSON scalar => ToJSON (Object' scalar) where
-- Direct encoding to preserve order of keys / values
toJSON (Object' xs) = toJSON (Map.fromList [(unName k, v) | (k, v) <- OrderedMap.toList xs])
toEncoding (Object' xs) = pairs (foldMap (\(k, v) -> toS (unName k) .= v) (OrderedMap.toList xs))
-- * Conversion to and from AST.
-- | Convert an AST value into a literal value.
--
-- This is a stop-gap until we have proper conversion of user queries into
-- canonical forms.
astToValue' :: (AST.Value -> scalar) -> AST.Value -> Maybe (Value' scalar)
astToValue' f x@(AST.ValueInt _) = pure (ValueScalar' (f x))
astToValue' f x@(AST.ValueFloat _) = pure (ValueScalar' (f x))
astToValue' f x@(AST.ValueBoolean _) = pure (ValueScalar' (f x))
astToValue' f x@(AST.ValueString (AST.StringValue _)) = pure (ValueScalar' (f x))
astToValue' f x@(AST.ValueEnum _) = pure (ValueScalar' (f x))
astToValue' f AST.ValueNull = pure (ValueScalar' (f AST.ValueNull))
astToValue' f x@(AST.ValueVariable _) = pure (ValueScalar' (f x))
astToValue' f (AST.ValueList (AST.ListValue xs)) = ValueList' . List' <$> traverse (astToValue' f) xs
astToValue' f (AST.ValueObject (AST.ObjectValue fields)) = do
fields' <- traverse toObjectField fields
object <- makeObject fields'
pure (ValueObject' object)
where
toObjectField (AST.ObjectField name value) = ObjectField' name <$> astToValue' f value
-- | Convert an AST value to a variable value.
--
-- Will fail if the AST value contains duplicate object fields, or is
-- otherwise invalid.
astToVariableValue :: HasCallStack => AST.Value -> Maybe UnresolvedVariableValue
astToVariableValue ast = astToValue' convertScalar ast
where
convertScalar x =
case astToScalar x of
Just scalar -> scalar
Nothing -> panic ("Non-scalar passed to convertScalar, bug in astToValue': " <> show x)
-- | Convert a value to an AST value.
valueToAST :: Value -> AST.Value
valueToAST = valueToAST' constScalarToAST
-- | Convert a variable value to an AST value.
variableValueToAST :: UnresolvedVariableValue -> AST.Value
variableValueToAST = valueToAST' variableToAST
-- | Convert a literal value into an AST value.
--
-- Nulls are converted into Nothing.
--
-- This function probably isn't particularly useful, but it functions as a
-- stop-gap until we have QuickCheck generators for the AST.
valueToAST' :: (scalar -> AST.Value) -> Value' scalar -> AST.Value
valueToAST' f (ValueScalar' x) = f x
valueToAST' f (ValueList' (List' xs)) = AST.ValueList (AST.ListValue (map (valueToAST' f) xs))
valueToAST' f (ValueObject' (Object' fields)) = AST.ValueObject (AST.ObjectValue (map toObjectField (OrderedMap.toList fields)))
where
toObjectField (name, value) = AST.ObjectField name (valueToAST' f value)
|
jml/graphql-api
|
src/GraphQL/Internal/Value.hs
|
bsd-3-clause
| 12,907 | 0 | 14 | 2,337 | 3,360 | 1,758 | 1,602 | 219 | 6 |
-- Copyright 2020 Google LLC
--
-- Use of this source code is governed by a BSD-style
-- license that can be found in the LICENSE file or at
-- https://developers.google.com/open-source/licenses/bsd
module Dex.Foreign.API where
import Foreign.Ptr
import Foreign.C
import Dex.Foreign.Context
import Dex.Foreign.Serialize
import Dex.Foreign.JIT
-- Public API (commented out exports are defined in rts.c)
-- Initialization and basic runtime
-- foreign export ccall "dexInit" _ :: IO ()
-- foreign export ccall "dexFini" _ :: IO ()
-- foreign export ccall "dexGetError" _ :: CString
-- Context
foreign export ccall "dexCreateContext" dexCreateContext :: IO (Ptr Context)
foreign export ccall "dexDestroyContext" dexDestroyContext :: Ptr Context -> IO ()
foreign export ccall "dexForkContext" dexForkContext :: Ptr Context -> IO (Ptr Context)
foreign export ccall "dexInsert" dexInsert :: Ptr Context -> CString -> Ptr AtomEx -> IO (Ptr Context)
foreign export ccall "dexEval" dexEval :: Ptr Context -> CString -> IO (Ptr Context)
foreign export ccall "dexLookup" dexLookup :: Ptr Context -> CString -> IO (Ptr AtomEx)
foreign export ccall "dexFreshName" dexFreshName :: Ptr Context -> IO CString
-- Serialization
foreign export ccall "dexPrint" dexPrint :: Ptr Context -> Ptr AtomEx -> IO CString
foreign export ccall "dexToCAtom" dexToCAtom :: Ptr AtomEx -> Ptr CAtom -> IO CInt
foreign export ccall "dexFromCAtom" dexFromCAtom :: Ptr CAtom -> IO (Ptr AtomEx)
-- JIT
foreign export ccall "dexCreateJIT" dexCreateJIT :: IO (Ptr JIT)
foreign export ccall "dexDestroyJIT" dexDestroyJIT :: Ptr JIT -> IO ()
foreign export ccall "dexCompile" dexCompile :: Ptr JIT -> Ptr Context -> Ptr AtomEx -> IO (Ptr NativeFunction)
foreign export ccall "dexUnload" dexUnload :: Ptr JIT -> Ptr NativeFunction -> IO ()
foreign export ccall "dexGetFunctionSignature" dexGetFunctionSignature :: Ptr JIT -> Ptr NativeFunction -> IO (Ptr ClosedExportedSignature)
foreign export ccall "dexFreeFunctionSignature" dexFreeFunctionSignature :: Ptr ClosedExportedSignature -> IO ()
|
google-research/dex-lang
|
src/Dex/Foreign/API.hs
|
bsd-3-clause
| 2,182 | 0 | 11 | 429 | 509 | 264 | 245 | 22 | 0 |
module Main where
--import Circle (saveCircleIO, findCircleIO)
--import Square (saveSquareIO, findSquareIO, findPointsByDiagonal)
--import Dots (saveDotsIO , findDotsIO, findDotsBmpIO)
import Lines (findLinesIO)
import Cells (findCellsIO)
main = do
--findLinesIO "lines/2_000_o.bmp"
findCellsIO "cells/3_030_o.bmp"
putStrLn "DONE"
|
zelinskiy/ImRec
|
src/Main.hs
|
bsd-3-clause
| 344 | 0 | 7 | 46 | 41 | 24 | 17 | 6 | 1 |
-- |
-- Module : Data.Counts
-- Copyright : (c) 2012 Jan Snajder
-- License : BSD-3 (see the LICENSE file)
--
-- Maintainer : Jan Snajder <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- A simple data structure for counting values (similar to Data.Multiset).
--
-------------------------------------------------------------------------------
module Data.Counts (
Counts,
fromList,
fromSet,
toList,
toSet,
elems,
counts,
total,
size,
countOf,
empty,
set,
inc,
dec,
remove,
removeBelow,
removeList,
removeSet,
member,
union,
difference,
sumCounts,
fromCounts,
probOf,
probs,
logProb,
logProbs) where
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Data.Ord (comparing)
import Data.List (sortBy,foldl')
data Counts a = Counts {
counts_ :: !(M.Map a Int),
total :: !Int }
deriving Eq
instance Show a => Show (Counts a) where
show cs = "fromCounts " ++ show (counts cs)
instance (Ord a, Read a) => Read (Counts a) where
readsPrec _ s | p == "fromCounts " =
case readsPrec 0 s2 of
[(cs,s3)] -> [(fromCounts cs,s3)]
[] -> []
| otherwise = []
where (p,s2) = splitAt 11 s
empty :: Ord a => Counts a
empty = fromCounts []
union :: Ord a => Counts a -> Counts a -> Counts a
union cs1 cs2 = Counts { counts_ = m, total = total cs1 + total cs2 }
where m = M.unionWith (+) (counts_ cs1) (counts_ cs2)
sumCounts :: Ord a => [Counts a] -> Counts a
sumCounts = foldl1 union
difference :: Ord a => Counts a -> Counts a -> Counts a
difference cs1 cs2 = Counts { counts_ = m, total = sum $ M.elems m }
where m = M.differenceWith f (counts_ cs1) (counts_ cs2)
f c1 c2 = let d=c1-c2 in if d<=0 then Nothing else Just d
size :: Counts a -> Int
size = M.size . counts_
counts :: Counts a -> [(a,Int)]
counts = M.toAscList . counts_
fromList :: (Ord a) => [a] -> Counts a
fromList xs = fromCounts $ zip xs (repeat 1)
fromSet :: (Ord a) => S.Set a -> Counts a
fromSet xs = Counts {
counts_ = m, total = sum $ M.elems m }
where m = M.fromAscList $ zip (S.toAscList xs) (repeat 1)
toList :: (Ord a) => Counts a -> [a]
toList = M.keys . counts_
toSet :: (Ord a) => Counts a -> S.Set a
toSet = M.keysSet . counts_
elems :: (Ord a) => Counts a -> [a]
elems = toList
fromListWith' :: (Ord k) => (a -> a -> a) -> [(k,a)] -> M.Map k a
fromListWith' f = foldl' (\m (k,x) -> M.insertWith f k x m) M.empty
fromCounts :: (Ord a) => [(a,Int)] -> Counts a
fromCounts xs = Counts {
counts_ = m, total = sum $ M.elems m}
where m = M.fromListWith (+) . filter ((>0).snd) $ xs
countOf :: (Ord a) => a -> Counts a -> Int
countOf x cs = M.findWithDefault 0 x (counts_ cs)
set :: (Ord a) => a -> Int -> Counts a -> Counts a
set x c cs = Counts {
counts_ = if c<=0 then M.delete x (counts_ cs)
else M.insert x c (counts_ cs),
total = total cs + c - M.findWithDefault 0 x (counts_ cs)}
remove :: (Ord a) => a -> Counts a -> Counts a
remove x cs = set x 0 cs
removeBelow :: (Ord a) => Int -> Counts a -> Counts a
removeBelow t cs =
Counts { counts_ = m, total = sum $ M.elems m }
where m = M.filter (>=t) (counts_ cs)
removeList :: (Ord a) => [a] -> Counts a -> Counts a
removeList xs cs = foldl (flip $ remove) cs xs
removeSet :: (Ord a) => S.Set a -> Counts a -> Counts a
removeSet xs cs = S.fold remove cs xs
member :: (Ord a) => a -> Counts a -> Bool
member x = M.member x . counts_
inc :: (Ord a) => a -> Counts a -> Counts a
inc x cs = Counts {
counts_ = M.insertWith (+) x 1 (counts_ cs),
total = total cs + 1}
dec :: (Ord a) => a -> Counts a -> Counts a
dec x cnts@(Counts {counts_ = cs, total = t}) =
case M.lookup x cs of
Nothing -> cnts
Just 1 -> Counts { counts_ = M.delete x cs, total = t - 1 }
Just c -> Counts { counts_ = M.insert x (c-1) cs, total = t - 1}
probOf :: (Ord a) => a -> Counts a -> Double
probOf x cs = realToFrac (countOf x cs) / realToFrac (total cs)
logProb :: (Ord a) => a -> Counts a -> Double
logProb a = log . probOf a
probs :: (Ord a) => Counts a -> [(a,Double)]
probs cs =
map (\(x,c) -> (x, realToFrac c / realToFrac (total cs))) (counts cs)
logProbs :: (Ord a) => Counts a -> [(a,Double)]
logProbs = map (\(x,c) -> (x,log c)) . probs
|
jsnajder/counts
|
src/Data/Counts.hs
|
bsd-3-clause
| 4,366 | 0 | 12 | 1,109 | 2,037 | 1,082 | 955 | 120 | 3 |
-- Copyright © 2012 Frank S. Thomas <[email protected]>
-- All rights reserved.
--
-- Use of this source code is governed by a BSD-style license that
-- can be found in the LICENSE file.
-- | Ohloh API Reference: <http://meta.ohloh.net/referencekudo/>
module Web.Ohloh.Kudo (
Kudo(..),
xpKudo
) where
import Text.XML.HXT.Arrow.Pickle
import Web.Ohloh.Common
-- | 'Kudo' is a simple gesture of thanks, praise, or endorsement from an
-- 'Web.Ohloh.Account.Account' to another person.
data Kudo = Kudo {
kudoCreatedAt :: String,
kudoSenderAccountId :: String,
kudoSenderAccountName :: String,
kudoReceiverAccountId :: Maybe String,
kudoReceiverAccountName :: Maybe String,
kudoProjectId :: Maybe String,
kudoProjectName :: Maybe String,
kudoContributorId :: Maybe String,
kudoContributorName :: Maybe String
} deriving (Eq, Read, Show)
instance XmlPickler Kudo where
xpickle = xpKudo
instance ReadXmlString Kudo
instance ShowXmlString Kudo
xpKudo :: PU Kudo
xpKudo =
xpElem "kudo" $
xpWrap (uncurry9 Kudo,
\(Kudo ca sai san rai ran pi pn ci cn) ->
(ca, sai, san, rai, ran, pi, pn, ci, cn)) $
xp9Tuple (xpElem "created_at" xpText0)
(xpElem "sender_account_id" xpText0)
(xpElem "sender_account_name" xpText0)
(xpOption (xpElem "receiver_account_id" xpText0))
(xpOption (xpElem "receiver_account_name" xpText0))
(xpOption (xpElem "project_id" xpText0))
(xpOption (xpElem "project_name" xpText0))
(xpOption (xpElem "contributor_id" xpText0))
(xpOption (xpElem "contributor_name" xpText0))
|
fthomas/ohloh-hs
|
Web/Ohloh/Kudo.hs
|
bsd-3-clause
| 1,667 | 0 | 11 | 378 | 381 | 212 | 169 | 35 | 1 |
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[SimplMonad]{The simplifier Monad}
-}
{-# LANGUAGE CPP #-}
module ETA.SimplCore.SimplEnv (
InId, InBind, InExpr, InAlt, InArg, InType, InBndr, InVar,
OutId, OutTyVar, OutBind, OutExpr, OutAlt, OutArg, OutType, OutBndr, OutVar,
InCoercion, OutCoercion,
-- The simplifier mode
setMode, getMode, updMode,
-- Environments
SimplEnv(..), StaticEnv, pprSimplEnv, -- Temp not abstract
mkSimplEnv, extendIdSubst,
ETA.SimplCore.SimplEnv.extendTvSubst,
ETA.SimplCore.SimplEnv.extendCvSubst,
zapSubstEnv, setSubstEnv,
getInScope, setInScope, setInScopeSet, modifyInScope, addNewInScopeIds,
getSimplRules,
SimplSR(..), mkContEx, substId, lookupRecBndr, refineFromInScope,
substExpr,
simplNonRecBndr, simplRecBndrs,
simplBinder, simplBinders,
substTy, substTyVar, getTvSubst,
getCvSubst, substCo, substCoVar,
-- Floats
Floats, emptyFloats, isEmptyFloats, addNonRec, addFloats, extendFloats,
wrapFloats, setFloats, zapFloats, addRecFloats, mapFloats,
doFloatFromRhs, getFloatBinds
) where
#include "HsVersions.h"
import ETA.SimplCore.SimplMonad
import ETA.SimplCore.CoreMonad ( SimplifierMode(..) )
import ETA.Core.CoreSyn
import ETA.Core.CoreUtils
import ETA.BasicTypes.Var
import ETA.BasicTypes.VarEnv
import ETA.BasicTypes.VarSet
import ETA.Utils.OrdList
import ETA.BasicTypes.Id
import qualified ETA.BasicTypes.Id as Id
import qualified ETA.Core.CoreSubst as CoreSubst
import ETA.Core.MkCore ( mkWildValBinder )
import ETA.Prelude.TysWiredIn
import qualified ETA.Types.Type as Type
import ETA.Types.Type hiding ( substTy, substTyVarBndr, substTyVar )
import qualified ETA.Types.Coercion as Coercion
import ETA.Types.Coercion hiding ( substCo, substTy, substCoVar, substCoVarBndr, substTyVarBndr )
import ETA.BasicTypes.BasicTypes
import ETA.Utils.MonadUtils
import ETA.Utils.Outputable
import ETA.Utils.FastString
import ETA.Utils.Util
import Data.List
{-
************************************************************************
* *
\subsection[Simplify-types]{Type declarations}
* *
************************************************************************
-}
type InBndr = CoreBndr
type InVar = Var -- Not yet cloned
type InId = Id -- Not yet cloned
type InType = Type -- Ditto
type InBind = CoreBind
type InExpr = CoreExpr
type InAlt = CoreAlt
type InArg = CoreArg
type InCoercion = Coercion
type OutBndr = CoreBndr
type OutVar = Var -- Cloned
type OutId = Id -- Cloned
type OutTyVar = TyVar -- Cloned
type OutType = Type -- Cloned
type OutCoercion = Coercion
type OutBind = CoreBind
type OutExpr = CoreExpr
type OutAlt = CoreAlt
type OutArg = CoreArg
{-
************************************************************************
* *
\subsubsection{The @SimplEnv@ type}
* *
************************************************************************
-}
data SimplEnv
= SimplEnv {
----------- Static part of the environment -----------
-- Static in the sense of lexically scoped,
-- wrt the original expression
seMode :: SimplifierMode,
-- The current substitution
seTvSubst :: TvSubstEnv, -- InTyVar |--> OutType
seCvSubst :: CvSubstEnv, -- InCoVar |--> OutCoercion
seIdSubst :: SimplIdSubst, -- InId |--> OutExpr
----------- Dynamic part of the environment -----------
-- Dynamic in the sense of describing the setup where
-- the expression finally ends up
-- The current set of in-scope variables
-- They are all OutVars, and all bound in this module
seInScope :: InScopeSet, -- OutVars only
-- Includes all variables bound by seFloats
seFloats :: Floats
-- See Note [Simplifier floats]
}
type StaticEnv = SimplEnv -- Just the static part is relevant
pprSimplEnv :: SimplEnv -> SDoc
-- Used for debugging; selective
pprSimplEnv env
= vcat [ptext (sLit "TvSubst:") <+> ppr (seTvSubst env),
ptext (sLit "IdSubst:") <+> ppr (seIdSubst env),
ptext (sLit "InScope:") <+> vcat (map ppr_one in_scope_vars)
]
where
in_scope_vars = varEnvElts (getInScopeVars (seInScope env))
ppr_one v | isId v = ppr v <+> ppr (idUnfolding v)
| otherwise = ppr v
type SimplIdSubst = IdEnv SimplSR -- IdId |--> OutExpr
-- See Note [Extending the Subst] in CoreSubst
data SimplSR
= DoneEx OutExpr -- Completed term
| DoneId OutId -- Completed term variable
| ContEx TvSubstEnv -- A suspended substitution
CvSubstEnv
SimplIdSubst
InExpr
instance Outputable SimplSR where
ppr (DoneEx e) = ptext (sLit "DoneEx") <+> ppr e
ppr (DoneId v) = ptext (sLit "DoneId") <+> ppr v
ppr (ContEx _tv _cv _id e) = vcat [ptext (sLit "ContEx") <+> ppr e {-,
ppr (filter_env tv), ppr (filter_env id) -}]
-- where
-- fvs = exprFreeVars e
-- filter_env env = filterVarEnv_Directly keep env
-- keep uniq _ = uniq `elemUFM_Directly` fvs
{-
Note [SimplEnv invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~~
seInScope:
The in-scope part of Subst includes *all* in-scope TyVars and Ids
The elements of the set may have better IdInfo than the
occurrences of in-scope Ids, and (more important) they will
have a correctly-substituted type. So we use a lookup in this
set to replace occurrences
The Ids in the InScopeSet are replete with their Rules,
and as we gather info about the unfolding of an Id, we replace
it in the in-scope set.
The in-scope set is actually a mapping OutVar -> OutVar, and
in case expressions we sometimes bind
seIdSubst:
The substitution is *apply-once* only, because InIds and OutIds
can overlap.
For example, we generally omit mappings
a77 -> a77
from the substitution, when we decide not to clone a77, but it's quite
legitimate to put the mapping in the substitution anyway.
Furthermore, consider
let x = case k of I# x77 -> ... in
let y = case k of I# x77 -> ... in ...
and suppose the body is strict in both x and y. Then the simplifier
will pull the first (case k) to the top; so the second (case k) will
cancel out, mapping x77 to, well, x77! But one is an in-Id and the
other is an out-Id.
Of course, the substitution *must* applied! Things in its domain
simply aren't necessarily bound in the result.
* substId adds a binding (DoneId new_id) to the substitution if
the Id's unique has changed
Note, though that the substitution isn't necessarily extended
if the type of the Id changes. Why not? Because of the next point:
* We *always, always* finish by looking up in the in-scope set
any variable that doesn't get a DoneEx or DoneVar hit in the substitution.
Reason: so that we never finish up with a "old" Id in the result.
An old Id might point to an old unfolding and so on... which gives a space
leak.
[The DoneEx and DoneVar hits map to "new" stuff.]
* It follows that substExpr must not do a no-op if the substitution is empty.
substType is free to do so, however.
* When we come to a let-binding (say) we generate new IdInfo, including an
unfolding, attach it to the binder, and add this newly adorned binder to
the in-scope set. So all subsequent occurrences of the binder will get
mapped to the full-adorned binder, which is also the one put in the
binding site.
* The in-scope "set" usually maps x->x; we use it simply for its domain.
But sometimes we have two in-scope Ids that are synomyms, and should
map to the same target: x->x, y->x. Notably:
case y of x { ... }
That's why the "set" is actually a VarEnv Var
-}
mkSimplEnv :: SimplifierMode -> SimplEnv
mkSimplEnv mode
= SimplEnv { seMode = mode
, seInScope = init_in_scope
, seFloats = emptyFloats
, seTvSubst = emptyVarEnv
, seCvSubst = emptyVarEnv
, seIdSubst = emptyVarEnv }
-- The top level "enclosing CC" is "SUBSUMED".
init_in_scope :: InScopeSet
init_in_scope = mkInScopeSet (unitVarSet (mkWildValBinder unitTy))
-- See Note [WildCard binders]
{-
Note [WildCard binders]
~~~~~~~~~~~~~~~~~~~~~~~
The program to be simplified may have wild binders
case e of wild { p -> ... }
We want to *rename* them away, so that there are no
occurrences of 'wild-id' (with wildCardKey). The easy
way to do that is to start of with a representative
Id in the in-scope set
There can be be *occurrences* of wild-id. For example,
MkCore.mkCoreApp transforms
e (a /# b) --> case (a /# b) of wild { DEFAULT -> e wild }
This is ok provided 'wild' isn't free in 'e', and that's the delicate
thing. Generally, you want to run the simplifier to get rid of the
wild-ids before doing much else.
It's a very dark corner of GHC. Maybe it should be cleaned up.
-}
getMode :: SimplEnv -> SimplifierMode
getMode env = seMode env
setMode :: SimplifierMode -> SimplEnv -> SimplEnv
setMode mode env = env { seMode = mode }
updMode :: (SimplifierMode -> SimplifierMode) -> SimplEnv -> SimplEnv
updMode upd env = env { seMode = upd (seMode env) }
---------------------
extendIdSubst :: SimplEnv -> Id -> SimplSR -> SimplEnv
extendIdSubst env@(SimplEnv {seIdSubst = subst}) var res
= ASSERT2( isId var && not (isCoVar var), ppr var )
env {seIdSubst = extendVarEnv subst var res}
extendTvSubst :: SimplEnv -> TyVar -> Type -> SimplEnv
extendTvSubst env@(SimplEnv {seTvSubst = subst}) var res
= env {seTvSubst = extendVarEnv subst var res}
extendCvSubst :: SimplEnv -> CoVar -> Coercion -> SimplEnv
extendCvSubst env@(SimplEnv {seCvSubst = subst}) var res
= env {seCvSubst = extendVarEnv subst var res}
---------------------
getInScope :: SimplEnv -> InScopeSet
getInScope env = seInScope env
setInScopeSet :: SimplEnv -> InScopeSet -> SimplEnv
setInScopeSet env in_scope = env {seInScope = in_scope}
setInScope :: SimplEnv -> SimplEnv -> SimplEnv
-- Set the in-scope set, and *zap* the floats
setInScope env env_with_scope
= env { seInScope = seInScope env_with_scope,
seFloats = emptyFloats }
setFloats :: SimplEnv -> SimplEnv -> SimplEnv
-- Set the in-scope set *and* the floats
setFloats env env_with_floats
= env { seInScope = seInScope env_with_floats,
seFloats = seFloats env_with_floats }
addNewInScopeIds :: SimplEnv -> [CoreBndr] -> SimplEnv
-- The new Ids are guaranteed to be freshly allocated
addNewInScopeIds env@(SimplEnv { seInScope = in_scope, seIdSubst = id_subst }) vs
= env { seInScope = in_scope `extendInScopeSetList` vs,
seIdSubst = id_subst `delVarEnvList` vs }
-- Why delete? Consider
-- let x = a*b in (x, \x -> x+3)
-- We add [x |-> a*b] to the substitution, but we must
-- _delete_ it from the substitution when going inside
-- the (\x -> ...)!
modifyInScope :: SimplEnv -> CoreBndr -> SimplEnv
-- The variable should already be in scope, but
-- replace the existing version with this new one
-- which has more information
modifyInScope env@(SimplEnv {seInScope = in_scope}) v
= env {seInScope = extendInScopeSet in_scope v}
---------------------
zapSubstEnv :: SimplEnv -> SimplEnv
zapSubstEnv env = env {seTvSubst = emptyVarEnv, seCvSubst = emptyVarEnv, seIdSubst = emptyVarEnv}
setSubstEnv :: SimplEnv -> TvSubstEnv -> CvSubstEnv -> SimplIdSubst -> SimplEnv
setSubstEnv env tvs cvs ids = env { seTvSubst = tvs, seCvSubst = cvs, seIdSubst = ids }
mkContEx :: SimplEnv -> InExpr -> SimplSR
mkContEx (SimplEnv { seTvSubst = tvs, seCvSubst = cvs, seIdSubst = ids }) e = ContEx tvs cvs ids e
{-
************************************************************************
* *
\subsection{Floats}
* *
************************************************************************
Note [Simplifier floats]
~~~~~~~~~~~~~~~~~~~~~~~~~
The Floats is a bunch of bindings, classified by a FloatFlag.
* All of them satisfy the let/app invariant
Examples
NonRec x (y:ys) FltLifted
Rec [(x,rhs)] FltLifted
NonRec x* (p:q) FltOKSpec -- RHS is WHNF. Question: why not FltLifted?
NonRec x# (y +# 3) FltOkSpec -- Unboxed, but ok-for-spec'n
NonRec x* (f y) FltCareful -- Strict binding; might fail or diverge
Can't happen:
NonRec x# (a /# b) -- Might fail; does not satisfy let/app
NonRec x# (f y) -- Might diverge; does not satisfy let/app
-}
data Floats = Floats (OrdList OutBind) FloatFlag
-- See Note [Simplifier floats]
data FloatFlag
= FltLifted -- All bindings are lifted and lazy
-- Hence ok to float to top level, or recursive
| FltOkSpec -- All bindings are FltLifted *or*
-- strict (perhaps because unlifted,
-- perhaps because of a strict binder),
-- *and* ok-for-speculation
-- Hence ok to float out of the RHS
-- of a lazy non-recursive let binding
-- (but not to top level, or into a rec group)
| FltCareful -- At least one binding is strict (or unlifted)
-- and not guaranteed cheap
-- Do not float these bindings out of a lazy let
instance Outputable Floats where
ppr (Floats binds ff) = ppr ff $$ ppr (fromOL binds)
instance Outputable FloatFlag where
ppr FltLifted = ptext (sLit "FltLifted")
ppr FltOkSpec = ptext (sLit "FltOkSpec")
ppr FltCareful = ptext (sLit "FltCareful")
andFF :: FloatFlag -> FloatFlag -> FloatFlag
andFF FltCareful _ = FltCareful
andFF FltOkSpec FltCareful = FltCareful
andFF FltOkSpec _ = FltOkSpec
andFF FltLifted flt = flt
doFloatFromRhs :: TopLevelFlag -> RecFlag -> Bool -> OutExpr -> SimplEnv -> Bool
-- If you change this function look also at FloatIn.noFloatFromRhs
doFloatFromRhs lvl rec str rhs (SimplEnv {seFloats = Floats fs ff})
= not (isNilOL fs) && want_to_float && can_float
where
want_to_float = isTopLevel lvl || exprIsCheap rhs || exprIsExpandable rhs
-- See Note [Float when cheap or expandable]
can_float = case ff of
FltLifted -> True
FltOkSpec -> isNotTopLevel lvl && isNonRec rec
FltCareful -> isNotTopLevel lvl && isNonRec rec && str
{-
Note [Float when cheap or expandable]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to float a let from a let if the residual RHS is
a) cheap, such as (\x. blah)
b) expandable, such as (f b) if f is CONLIKE
But there are
- cheap things that are not expandable (eg \x. expensive)
- expandable things that are not cheap (eg (f b) where b is CONLIKE)
so we must take the 'or' of the two.
-}
emptyFloats :: Floats
emptyFloats = Floats nilOL FltLifted
unitFloat :: OutBind -> Floats
-- This key function constructs a singleton float with the right form
unitFloat bind = Floats (unitOL bind) (flag bind)
where
flag (Rec {}) = FltLifted
flag (NonRec bndr rhs)
| not (isStrictId bndr) = FltLifted
| exprOkForSpeculation rhs = FltOkSpec -- Unlifted, and lifted but ok-for-spec (eg HNF)
| otherwise = ASSERT2( not (isUnLiftedType (idType bndr)), ppr bndr )
FltCareful
-- Unlifted binders can only be let-bound if exprOkForSpeculation holds
addNonRec :: SimplEnv -> OutId -> OutExpr -> SimplEnv
-- Add a non-recursive binding and extend the in-scope set
-- The latter is important; the binder may already be in the
-- in-scope set (although it might also have been created with newId)
-- but it may now have more IdInfo
addNonRec env id rhs
= id `seq` -- This seq forces the Id, and hence its IdInfo,
-- and hence any inner substitutions
env { seFloats = seFloats env `addFlts` unitFloat (NonRec id rhs),
seInScope = extendInScopeSet (seInScope env) id }
extendFloats :: SimplEnv -> OutBind -> SimplEnv
-- Add these bindings to the floats, and extend the in-scope env too
extendFloats env bind
= env { seFloats = seFloats env `addFlts` unitFloat bind,
seInScope = extendInScopeSetList (seInScope env) bndrs }
where
bndrs = bindersOf bind
addFloats :: SimplEnv -> SimplEnv -> SimplEnv
-- Add the floats for env2 to env1;
-- *plus* the in-scope set for env2, which is bigger
-- than that for env1
addFloats env1 env2
= env1 {seFloats = seFloats env1 `addFlts` seFloats env2,
seInScope = seInScope env2 }
addFlts :: Floats -> Floats -> Floats
addFlts (Floats bs1 l1) (Floats bs2 l2)
= Floats (bs1 `appOL` bs2) (l1 `andFF` l2)
zapFloats :: SimplEnv -> SimplEnv
zapFloats env = env { seFloats = emptyFloats }
addRecFloats :: SimplEnv -> SimplEnv -> SimplEnv
-- Flattens the floats from env2 into a single Rec group,
-- prepends the floats from env1, and puts the result back in env2
-- This is all very specific to the way recursive bindings are
-- handled; see Simplify.simplRecBind
addRecFloats env1 env2@(SimplEnv {seFloats = Floats bs ff})
= ASSERT2( case ff of { FltLifted -> True; _ -> False }, ppr (fromOL bs) )
env2 {seFloats = seFloats env1 `addFlts` unitFloat (Rec (flattenBinds (fromOL bs)))}
wrapFloats :: SimplEnv -> OutExpr -> OutExpr
-- Wrap the floats around the expression; they should all
-- satisfy the let/app invariant, so mkLets should do the job just fine
wrapFloats (SimplEnv {seFloats = Floats bs _}) body
= foldrOL Let body bs
getFloatBinds :: SimplEnv -> [CoreBind]
getFloatBinds (SimplEnv {seFloats = Floats bs _})
= fromOL bs
isEmptyFloats :: SimplEnv -> Bool
isEmptyFloats (SimplEnv {seFloats = Floats bs _})
= isNilOL bs
mapFloats :: SimplEnv -> ((Id,CoreExpr) -> (Id,CoreExpr)) -> SimplEnv
mapFloats env@SimplEnv { seFloats = Floats fs ff } fun
= env { seFloats = Floats (mapOL app fs) ff }
where
app (NonRec b e) = case fun (b,e) of (b',e') -> NonRec b' e'
app (Rec bs) = Rec (map fun bs)
{-
************************************************************************
* *
Substitution of Vars
* *
************************************************************************
Note [Global Ids in the substitution]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We look up even a global (eg imported) Id in the substitution. Consider
case X.g_34 of b { (a,b) -> ... case X.g_34 of { (p,q) -> ...} ... }
The binder-swap in the occurrence analyser will add a binding
for a LocalId version of g (with the same unique though):
case X.g_34 of b { (a,b) -> let g_34 = b in
... case X.g_34 of { (p,q) -> ...} ... }
So we want to look up the inner X.g_34 in the substitution, where we'll
find that it has been substituted by b. (Or conceivably cloned.)
-}
substId :: SimplEnv -> InId -> SimplSR
-- Returns DoneEx only on a non-Var expression
substId (SimplEnv { seInScope = in_scope, seIdSubst = ids }) v
= case lookupVarEnv ids v of -- Note [Global Ids in the substitution]
Nothing -> DoneId (refineFromInScope in_scope v)
Just (DoneId v) -> DoneId (refineFromInScope in_scope v)
Just (DoneEx (Var v)) -> DoneId (refineFromInScope in_scope v)
Just res -> res -- DoneEx non-var, or ContEx
-- Get the most up-to-date thing from the in-scope set
-- Even though it isn't in the substitution, it may be in
-- the in-scope set with better IdInfo
refineFromInScope :: InScopeSet -> Var -> Var
refineFromInScope in_scope v
| isLocalId v = case lookupInScope in_scope v of
Just v' -> v'
Nothing -> WARN( True, ppr v ) v -- This is an error!
| otherwise = v
lookupRecBndr :: SimplEnv -> InId -> OutId
-- Look up an Id which has been put into the envt by simplRecBndrs,
-- but where we have not yet done its RHS
lookupRecBndr (SimplEnv { seInScope = in_scope, seIdSubst = ids }) v
= case lookupVarEnv ids v of
Just (DoneId v) -> v
Just _ -> pprPanic "lookupRecBndr" (ppr v)
Nothing -> refineFromInScope in_scope v
{-
************************************************************************
* *
\section{Substituting an Id binder}
* *
************************************************************************
* sinplBndr, simplBndrs: monadic version, only so that they
can be made strict via seq.
-}
-------------
simplBinders :: SimplEnv -> [InBndr] -> SimplM (SimplEnv, [OutBndr])
simplBinders env bndrs = mapAccumLM simplBinder env bndrs
simplBinder :: SimplEnv -> InBndr -> SimplM (SimplEnv, OutBndr)
-- Used for lambda and case-bound variables
-- Clone Id if necessary, substitute type
-- Return with IdInfo already substituted, but (fragile) occurrence info zapped
-- The substitution is extended only if the variable is cloned, because
-- we *don't* need to use it to track occurrence info.
simplBinder env bndr
| isTyVar bndr = do { let (env', tv) = substTyVarBndr env bndr
; seqTyVar tv `seq` return (env', tv) }
| otherwise = do { let (env', id) = substIdBndr env bndr
; seqId id `seq` return (env', id) }
simplNonRecBndr :: SimplEnv -> InBndr -> SimplM (SimplEnv, OutBndr)
-- A non-recursive let binder
simplNonRecBndr env id
= do { let (env1, id1) = substIdBndr env id
; seqId id1 `seq` return (env1, id1) }
simplRecBndrs :: SimplEnv -> [InBndr] -> SimplM SimplEnv
-- Recursive let binders
simplRecBndrs env@(SimplEnv {}) ids
= do { let (env1, ids1) = mapAccumL substIdBndr env ids
; seqIds ids1 `seq` return env1 }
substIdBndr :: SimplEnv -> InBndr -> (SimplEnv, OutBndr)
-- Might be a coercion variable
substIdBndr env bndr
| isCoVar bndr = substCoVarBndr env bndr
| otherwise = substNonCoVarIdBndr env bndr
---------------
substNonCoVarIdBndr
:: SimplEnv
-> InBndr -- Env and binder to transform
-> (SimplEnv, OutBndr)
-- Clone Id if necessary, substitute its type
-- Return an Id with its
-- * Type substituted
-- * UnfoldingInfo, Rules, WorkerInfo zapped
-- * Fragile OccInfo (only) zapped: Note [Robust OccInfo]
-- * Robust info, retained especially arity and demand info,
-- so that they are available to occurrences that occur in an
-- earlier binding of a letrec
--
-- For the robust info, see Note [Arity robustness]
--
-- Augment the substitution if the unique changed
-- Extend the in-scope set with the new Id
--
-- Similar to CoreSubst.substIdBndr, except that
-- the type of id_subst differs
-- all fragile info is zapped
substNonCoVarIdBndr env@(SimplEnv { seInScope = in_scope, seIdSubst = id_subst })
old_id
= ASSERT2( not (isCoVar old_id), ppr old_id )
(env { seInScope = in_scope `extendInScopeSet` new_id,
seIdSubst = new_subst }, new_id)
where
id1 = uniqAway in_scope old_id
id2 = substIdType env id1
new_id = zapFragileIdInfo id2 -- Zaps rules, worker-info, unfolding
-- and fragile OccInfo
-- Extend the substitution if the unique has changed,
-- or there's some useful occurrence information
-- See the notes with substTyVarBndr for the delSubstEnv
new_subst | new_id /= old_id
= extendVarEnv id_subst old_id (DoneId new_id)
| otherwise
= delVarEnv id_subst old_id
------------------------------------
seqTyVar :: TyVar -> ()
seqTyVar b = b `seq` ()
seqId :: Id -> ()
seqId id = seqType (idType id) `seq`
idInfo id `seq`
()
seqIds :: [Id] -> ()
seqIds [] = ()
seqIds (id:ids) = seqId id `seq` seqIds ids
{-
Note [Arity robustness]
~~~~~~~~~~~~~~~~~~~~~~~
We *do* transfer the arity from from the in_id of a let binding to the
out_id. This is important, so that the arity of an Id is visible in
its own RHS. For example:
f = \x. ....g (\y. f y)....
We can eta-reduce the arg to g, because f is a value. But that
needs to be visible.
This interacts with the 'state hack' too:
f :: Bool -> IO Int
f = \x. case x of
True -> f y
False -> \s -> ...
Can we eta-expand f? Only if we see that f has arity 1, and then we
take advantage of the 'state hack' on the result of
(f y) :: State# -> (State#, Int) to expand the arity one more.
There is a disadvantage though. Making the arity visible in the RHS
allows us to eta-reduce
f = \x -> f x
to
f = f
which technically is not sound. This is very much a corner case, so
I'm not worried about it. Another idea is to ensure that f's arity
never decreases; its arity started as 1, and we should never eta-reduce
below that.
Note [Robust OccInfo]
~~~~~~~~~~~~~~~~~~~~~
It's important that we *do* retain the loop-breaker OccInfo, because
that's what stops the Id getting inlined infinitely, in the body of
the letrec.
-}
{-
************************************************************************
* *
Impedence matching to type substitution
* *
************************************************************************
-}
getTvSubst :: SimplEnv -> TvSubst
getTvSubst (SimplEnv { seInScope = in_scope, seTvSubst = tv_env })
= mkTvSubst in_scope tv_env
getCvSubst :: SimplEnv -> CvSubst
getCvSubst (SimplEnv { seInScope = in_scope, seTvSubst = tv_env, seCvSubst = cv_env })
= CvSubst in_scope tv_env cv_env
substTy :: SimplEnv -> Type -> Type
substTy env ty = Type.substTy (getTvSubst env) ty
substTyVar :: SimplEnv -> TyVar -> Type
substTyVar env tv = Type.substTyVar (getTvSubst env) tv
substTyVarBndr :: SimplEnv -> TyVar -> (SimplEnv, TyVar)
substTyVarBndr env tv
= case Type.substTyVarBndr (getTvSubst env) tv of
(TvSubst in_scope' tv_env', tv')
-> (env { seInScope = in_scope', seTvSubst = tv_env' }, tv')
substCoVar :: SimplEnv -> CoVar -> Coercion
substCoVar env tv = Coercion.substCoVar (getCvSubst env) tv
substCoVarBndr :: SimplEnv -> CoVar -> (SimplEnv, CoVar)
substCoVarBndr env cv
= case Coercion.substCoVarBndr (getCvSubst env) cv of
(CvSubst in_scope' tv_env' cv_env', cv')
-> (env { seInScope = in_scope', seTvSubst = tv_env', seCvSubst = cv_env' }, cv')
substCo :: SimplEnv -> Coercion -> Coercion
substCo env co = Coercion.substCo (getCvSubst env) co
------------------
substIdType :: SimplEnv -> Id -> Id
substIdType (SimplEnv { seInScope = in_scope, seTvSubst = tv_env }) id
| isEmptyVarEnv tv_env || isEmptyVarSet (tyVarsOfType old_ty) = id
| otherwise = Id.setIdType id (Type.substTy (TvSubst in_scope tv_env) old_ty)
-- The tyVarsOfType is cheaper than it looks
-- because we cache the free tyvars of the type
-- in a Note in the id's type itself
where
old_ty = idType id
substExpr :: SimplEnv -> CoreExpr -> CoreExpr
-- See Note [Substitution in the simplifier]
substExpr (SimplEnv { seInScope = in_scope
, seTvSubst = tv_env
, seCvSubst = cv_env
, seIdSubst = id_env })
= subst_expr in_scope tv_env cv_env id_env
where
subst_expr :: InScopeSet -> TvSubstEnv -> CvSubstEnv -> SimplIdSubst
-> CoreExpr -> CoreExpr
subst_expr is tvs cvs id_env
= CoreSubst.substExpr (text "SimplEnv.substExpr")
(CoreSubst.mkGblSubst is tvs cvs lookup_id)
where
lookup_id in_scope v
= case lookupVarEnv id_env v of
Nothing -> Nothing
Just (DoneEx e) -> Just e
Just (DoneId v) -> Just (Var v)
Just (ContEx tv cv id e) -> Just (subst_expr in_scope tv cv id e)
{- Note [Substitution in the simplifier]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In just one place (sigh) we need to lazily substitute over a CoreExpr.
For that we need CoreSubst.substExpr. But there is a difficulty: SimplEnv
has a SimplIdSubst, whose range is SimplSR, not just CoreExpr.
So SimplEnv.substExpr has to perform impedence-matching, via the ambient
substitution provided by mkGblSubst. It seems like a lot of work for
a small thing. Previously we attempted to construct a (VarEnv CoreExpr)
from the SimplIdSubst, but that had absolutely terrible performance
(Trac #10370 comment:12). Then I tried to write a complete new substExpr
that used SimplIdSubst insead of (VarEnv CoreExpr), but that got out of
hand because we need to substitute over rules and unfoldings too
(Trac #5113, comment:7 and following).
-}
|
alexander-at-github/eta
|
compiler/ETA/SimplCore/SimplEnv.hs
|
bsd-3-clause
| 29,999 | 0 | 15 | 8,052 | 4,747 | 2,630 | 2,117 | 325 | 4 |
module ParseAux where
import AbstractSyntax
import Lexer
import Data.List
happyError :: [Token'] -> a
happyError ts = error ("Parse error in " ++
case ts of
[] -> " at EOF\n"
_ -> "before\n" ++ showList (take 20 (dropWhile (==Newline) ts)) [] ++ "\n")
-- A preprocessor for literal scripts (slow)
unlit :: String -> String
unlit = unlines . map p . lines
where p ('>':' ':cs) = cs
p ('>':'\t':cs) = cs
p _ = []
-- A preprocessor for yacc scripts
yaccpreprocessor :: String -> String
yaccpreprocessor "" = ""
yaccpreprocessor ('%':'%':cs) = '%':'%': yaccRules cs
yaccpreprocessor ('\n':cs) = '\n':yaccpreprocessor cs
yaccpreprocessor (_:cs) = yaccpreprocessor cs
yaccRules :: String -> String
yaccRules "" = ""
yaccRules ('/':'*':cs) = yaccRules (dropCComment 0 cs)
yaccRules ('%':'{':cs) = yaccRules (dropCSyntax cs)
yaccRules ('%':'%':cs) = "%%"
yaccRules ('\'':'{':'\'':cs) = '\'':'{':'\'': yaccRules cs
yaccRules ('{':cs) = '{':yaccRules (dropActions 0 cs)
yaccRules (c:cs) = c:yaccRules cs
dropCSyntax :: String -> String
dropCSyntax "" = ""
dropCSyntax ('%':'}':cs) = cs
dropCSyntax ('\n':cs) = '\n':dropCSyntax cs
dropCSyntax (c:cs) = dropCSyntax cs
dropCComment :: Int -> String -> String
dropCComment _ "" = ""
dropCComment n ('/':'*':cs) = dropCComment (n+1) cs
dropCComment n ('\n':cs) = '\n':dropCComment n cs
dropCComment n ('*':'/':cs)
| n == 0 = cs
| otherwise = dropCComment (n-1) cs
dropCComment n (c:cs) = dropCComment n cs
dropActions :: Int -> String -> String
dropActions _ "" = ""
dropActions n ('"':cs) = dropActions n css where (_,css) = lexString cs
dropActions n ('\'':'{':'\'':cs) = dropActions n cs
dropActions n ('\'':'}':'\'':cs) = dropActions n cs
dropActions n ('{':cs) = dropActions (n+1) cs
dropActions n ('\n':cs) = '\n':dropActions n cs
dropActions n ('}':cs)
| n == 0 = '}':cs
| otherwise = dropActions (n-1) cs
dropActions n (c:cs) = dropActions n cs
-- A postprocessor for a grammar in EBNF and a postprocessor to make happy happy
data Token'
= EbnfInput
| HappyInput
| YaccInput
| Newline
| Ident' String
| CIdent' String
| Symbol' String
| String' String
| Number' String
| Percent
| DoublePercent
| OpenBrace
| ClosingBrace
| Bar
| SemiColon
| DoubleColon
| Colon
| OpenBrack
| ClosingBrack
| OpenParen
| ClosingParen
| Dot
| Equal
| Plus
| Slash
deriving Eq
instance Show Token' where
showsPrec n (Ident' s) = showChar '[' . showString s . showString "] "
showsPrec n (CIdent' s) = showChar '/' . showString s . showString "/"
showsPrec n (Symbol' "\n") = showChar '\n'
showsPrec n (Symbol' s) = showChar '<' . showString s . showString "> "
showsPrec n (String' s) = showChar '"' . showString s . showString "\" "
showsPrec n (Number' s) = showChar ' ' . showString s . showChar ' '
showsPrec n Percent = showString "%"
showsPrec n DoublePercent = showString "%% "
showsPrec n OpenBrace = showString "{ "
showsPrec n ClosingBrace = showString "} "
showsPrec n OpenBrack = showString "[ "
showsPrec n ClosingBrack = showString "] "
showsPrec n OpenParen = showString "( "
showsPrec n ClosingParen = showString ") "
showsPrec n Bar = showString "| "
showsPrec n SemiColon = showString "; "
showsPrec n DoubleColon = showString ":: "
showsPrec n Colon = showString ": "
showsPrec n Dot = showString ". "
showsPrec n Equal = showString "= "
showsPrec n Plus = showString "+ "
showsPrec n Slash = showString "/ "
showsPrec n Newline = showString "\n"
showsPrec n YaccInput = showString "\n>>YACC input format<<\n"
showsPrec n EbnfInput = showString "\n>>EBNF input format<<\n"
showsPrec n HappyInput = showString "\n>>HAPPY input format<<\n"
showList [] = id
showList (x:xs) = shows x . showList xs
-- a ebnf postlexer
ebnf_postlexer :: [Token] -> [Token']
ebnf_postlexer = \s -> EbnfInput : foldr f [] s
where f (Symbol "\n") = id --Newline
f (Symbol "=") = (Equal:)
f (Symbol ".") = (Dot:)
f (Symbol "|") = (Bar:)
f (Symbol "/") = (Slash:)
f (Symbol "+") = (Plus:)
f (Symbol "(") = (OpenParen:)
f (Symbol "[") = (OpenBrack:)
f (Symbol "{") = (OpenBrace:)
f (Symbol ")") = (ClosingParen:)
f (Symbol "]") = (ClosingBrack:)
f (Symbol "}") = (ClosingBrace:)
f (Symbol ";") = (SemiColon:)
f (Symbol s) = (Symbol' s:)
f (Ident s) = (Ident' s:)
f (String s) = (String' s:)
f (Number n) = (Symbol' n:)
-- a happy postlexer
happy_postlexer :: [Token] -> [Token']
happy_postlexer = \s -> HappyInput : foldr f [] s
where f (Symbol "\n") = id --Newline
f (Symbol "%%") = (DoublePercent:)
f (Symbol "%") = (Percent:)
f (Symbol "{") = (OpenBrace:)
f (Symbol "}") = (ClosingBrace:)
f (Symbol "::") = (DoubleColon:)
f (Symbol ":") = (Colon:)
f (Symbol ";") = (SemiColon:)
f (Symbol "|") = (Bar:)
f (Symbol s) = (Symbol' s:)
f (Ident s) = (Ident' s:)
f (String s) = (String' s:)
f (Number n) = (Symbol' n:)
-- a yacc postlexer
yacc_postlexer s = YaccInput : f s
where toSkip [] = False
toSkip (Symbol "\n":cs') = toSkip cs'
toSkip (Symbol ":":_) = True
toSkip (c:_) = False
f [] = []
f (Symbol "\n":cs) = f cs -- Newline
f (Symbol "%":cs) = Percent : f cs
f (Symbol "%%":cs) = DoublePercent : f cs
f (Symbol "|":cs) = Bar : f cs
f (Symbol "{":cs) = OpenBrace : f cs
f (Symbol "}":cs) = ClosingBrace : f cs
f (Symbol ";":cs) = SemiColon : f cs
f (Symbol ":":cs) = Colon : f cs
f (Symbol c :cs) = (Symbol' c): f cs
f (String c :cs) = (String' c): f cs
f (Number c :cs) = (Number' c): f cs
f (Ident c :cs) | toSkip cs = (CIdent' c): f cs
| otherwise = (Ident' c): f cs
happyPrepare terminalsyms = map (happyPrepare' terminalsyms)
happyPrepare' ts (ProdProduction s1 s2 prod) = ProdProduction s1 s2 (happyPrepare' ts prod)
happyPrepare' ts (ProdFactor prods) = ProdFactor (map (happyPrepare' ts) prods)
happyPrepare' ts (ProdTerminal s) = ProdTerminal s
happyPrepare' ts (ProdOption prod) = ProdOption (happyPrepare' ts prod)
happyPrepare' ts (ProdRepeat prod) = ProdRepeat (happyPrepare' ts prod)
happyPrepare' ts (ProdRepeat1 prod) = ProdRepeat1 (happyPrepare' ts prod)
happyPrepare' ts (ProdRepeatWithAtom p1 p2) = ProdRepeatWithAtom (happyPrepare' ts p1) (happyPrepare' ts p2)
happyPrepare' ts (ProdPlus) = ProdPlus
happyPrepare' ts (ProdSlash prod) = ProdSlash (happyPrepare' ts prod)
happyPrepare' ts (ProdTerm prods) = ProdTerm (map (happyPrepare' ts) prods)
happyPrepare' ts (ProdNonterminal s)
| s `elem` ts = ProdTerminal s
| otherwise = ProdNonterminal s
yaccPrepare happyresult =
[noDup (getNt nt) | nt <- nub nonterminals]
where (nonterminals, prods) = transform happyresult [] []
getNt str = [yaccPrepare' nonterminals p | p@(ProdProduction nt _ _) <- prods, str == nt]
transform [] as bs = (as,bs)
transform ((ProdProduction nt aliases (ProdTerm ps)):pss) as bs =
transform pss' (nt:as) bs'
where (factors, pss') = span isProdFactor pss
bs' = bs ++ [ProdProduction nt aliases (ProdTerm ps')]
ps' = ps ++ factors
noDup [p] = p
noDup (ProdProduction nt aliases (ProdTerm ps):p':ps') =
ProdProduction nt aliases
(ProdTerm (foldr (\ (ProdProduction _ _ (ProdTerm prods')) ps1 -> ps1++prods') ps (p':ps')))
isProdFactor p = case p of { ProdFactor _ -> True; _ -> False}
yaccPrepare' nts (ProdProduction s1 s2 prod) = ProdProduction s1 s2 (yaccPrepare' nts prod)
yaccPrepare' nts (ProdFactor prods) = ProdFactor (map (yaccPrepare' nts) prods)
yaccPrepare' nts (ProdTerm prods) = ProdTerm (map (yaccPrepare' nts) prods)
yaccPrepare' nts (ProdOption prod) = ProdOption (yaccPrepare' nts prod)
yaccPrepare' nts (ProdRepeat prod) = ProdRepeat (yaccPrepare' nts prod)
yaccPrepare' nts (ProdRepeat1 prod) = ProdRepeat1 (yaccPrepare' nts prod)
yaccPrepare' nts (ProdRepeatWithAtom p1 p2) = ProdRepeatWithAtom (yaccPrepare' nts p1) (yaccPrepare' nts p2)
yaccPrepare' nts (ProdPlus) = ProdPlus
yaccPrepare' nts (ProdSlash prod) = ProdSlash (yaccPrepare' nts prod)
yaccPrepare' nts (ProdTerminal s)
| s `elem` nts = ProdNonterminal s
| otherwise = ProdTerminal s
|
FranklinChen/Ebnf2ps
|
src/ParseAux.hs
|
bsd-3-clause
| 8,805 | 0 | 18 | 2,310 | 3,563 | 1,803 | 1,760 | 201 | 17 |
-- -----------------------------------------------------------------------------
-- Copyright 2002, Simon Marlow.
-- Copyright 2006, Bjorn Bringert.
-- Copyright 2009, Henning Thielemann.
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
--
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- * Neither the name of the copyright holder(s) nor the names of
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-- -----------------------------------------------------------------------------
{-# LANGUAGE Rank2Types #-}
module Network.MoHWS.Server (main, mainWithOptions, ) where
import qualified Network.MoHWS.Server.Request as ServerRequest
import qualified Network.MoHWS.Server.Environment as ServerEnv
import qualified Network.MoHWS.Server.Context as ServerContext
import Network.MoHWS.Logger.Error (debug, logError, logInfo, )
import qualified Network.MoHWS.Module as Module
import qualified Network.MoHWS.Module.Description as ModuleDesc
import qualified Network.MoHWS.Logger.Access as AccessLogger
import qualified Network.MoHWS.Logger.Error as ErrorLogger
import qualified Network.MoHWS.Configuration.Parser as ConfigParser
import Network.MoHWS.Configuration as Config
import qualified Network.MoHWS.Initialization as Init
import qualified Network.MoHWS.HTTP.MimeType as MimeType
import qualified Network.MoHWS.Server.Options as Options
import Network.MoHWS.ParserUtility (getUntilEmptyLine, )
import qualified Network.MoHWS.HTTP.Version as Version
import qualified Network.MoHWS.HTTP.Header as Header
import qualified Network.MoHWS.HTTP.Request as Request
import qualified Network.MoHWS.HTTP.Response as Response
import qualified Network.MoHWS.Stream as Stream
import qualified Network.MoHWS.Utility as Util
import Data.Monoid (mempty, )
import Data.Maybe (catMaybes, )
import Data.Tuple.HT (swap, )
import Data.List.HT (viewR, )
import qualified Data.Set as Set
import qualified Control.Monad.Exception.Synchronous as Exc
import qualified Control.Exception as Exception
import Control.Monad.Exception.Synchronous (ExceptionalT, runExceptionalT, )
import Control.Monad.Trans.State (StateT, runStateT, modify, )
import Control.Monad.Trans.Class (lift, )
import qualified Network.Socket as Socket
import qualified Network.BSD as BSD
import Control.Concurrent (myThreadId, ThreadId, throwTo, killThread, forkIO, )
import Control.Exception (ErrorCall(ErrorCall), finally, mask, )
import Control.Monad (liftM, when, )
import Network.BSD (HostEntry, hostName, )
import Network.Socket (Socket, HostAddress, Family(AF_INET), )
import Network.URI (uriPath, )
import qualified System.Posix as Posix
import qualified System.IO as IO
import System.IO.Error (isAlreadyInUseError, isEOFError, catchIOError, )
import System.Environment (getArgs, )
import System.Posix (installHandler, sigHUP, sigPIPE, )
import Text.ParserCombinators.Parsec (parse, choice, )
{- -----------------------------------------------------------------------------
ToDo:
- MAJOR:
- deal with http version numbers
- timeouts (partly done)
- languages
- per-directory permissions (ala apache)
- error logging levels
- per-directory config options.
- languages (content-language, accept-language)
- multipart/byteranges
- MINOR:
- access logging (various bits left)
- implement user & group setting
- log time to serve request
- terminate & restart signal (like Apache's SIGHUP)
- don't die if the new configuration file contains errors after a restart
- reading config file may block, unsafe if we receive another SIGHUP
- common up headers with same name (eg. accept).
- implement if-modified-since (need to parse time)
- MAYBE:
- throttling if too many open connections (config: MaxClients)
-}
-----------------------------------------------------------------------------
-- Top-level server
main :: (Stream.C body) =>
Init.T body ext -> IO ()
main initExt =
do args <- getArgs
case Options.parse args of
Left err -> Util.die err
Right opts -> mainWithOptions initExt opts
mainWithOptions :: (Stream.C body) =>
Init.T body ext -> Options.T -> IO ()
mainWithOptions initExt opts =
do main_thread <- myThreadId
_ <- installHandler sigPIPE Posix.Ignore Nothing
_ <- installHandler sigHUP (Posix.Catch (hupHandler main_thread)) Nothing
mask (readConfig initExt opts)
type Unblock a = IO a -> IO a
hupHandler :: ThreadId -> IO ()
hupHandler main_thread =
throwTo main_thread (ErrorCall "**restart**")
sigsToBlock :: Posix.SignalSet
sigsToBlock = Posix.addSignal sigHUP Posix.emptySignalSet
-- Async exceptions should be blocked on entry to readConfig (so that
-- multiple SIGHUPs close together can't kill us). Make sure that
-- there aren't any interruptible operations until we've blocked signals.
readConfig :: (Stream.C body) =>
Init.T body ext -> Options.T -> (forall a. Unblock a) -> IO ()
readConfig initExt opts unblock = do
Posix.blockSignals sigsToBlock
r <- ConfigParser.run
(choice $ map ModuleDesc.configParser $ Init.moduleList initExt)
(Options.configPath opts)
case r of
Left err ->
Util.die $ unlines $
"Failed to parse configuration file" : show err : []
Right b -> do
let updates = map ModuleDesc.setDefltConfig $ Init.moduleList initExt
confExtDeflt =
foldl (flip ($)) (Init.configurationExtensionDefault initExt) updates
conf = b (Config.deflt confExtDeflt)
st <- initServerState opts conf
mods <- fmap catMaybes $ mapM (loadModule st) $ Init.moduleList initExt
topServer st mods initExt unblock
rereadConfig :: (Stream.C body) =>
ServerContext.T ext -> Init.T body ext -> (forall a. Unblock a) -> IO ()
rereadConfig st initExt unblock =
do mapM_ AccessLogger.stop (ServerContext.accessLoggers st)
ErrorLogger.stop (ServerContext.errorLogger st)
readConfig initExt (ServerContext.options st) unblock
initServerState :: Options.T -> Config.T ext -> IO (ServerContext.T ext)
initServerState opts conf =
do host <- do ent <- BSD.getHostEntry
case serverName conf of
"" -> return ent
n -> return ent { hostName = n }
mimeTypes
<- MimeType.loadDictionary (Options.inServerRoot opts (typesConfig conf))
errorLogger
<- ErrorLogger.start (Options.inServerRoot opts (errorLogFile conf)) (logLevel conf)
accessLoggers
<- sequence [AccessLogger.start format (Options.inServerRoot opts file)
| (file,format) <- customLogs conf]
let st = ServerContext.Cons
{
ServerContext.options = opts,
ServerContext.config = conf,
ServerContext.hostName = host,
ServerContext.mimeTypes = mimeTypes,
ServerContext.errorLogger = errorLogger,
ServerContext.accessLoggers = accessLoggers
}
return st
loadModule :: (Stream.C body) =>
ServerContext.T ext -> ModuleDesc.T body ext -> IO (Maybe (Module.T body))
loadModule st md =
(do logInfo st $ "Loading module " ++ ModuleDesc.name md ++ "..."
fmap Just $ ModuleDesc.load md st)
`Exception.catch`
\(Exception.SomeException e) ->
do logError st $ unlines ["Error loading module " ++ ModuleDesc.name md,
show e]
return Nothing
-- We catch exceptions from the main server thread, and restart the
-- server. If we receive a restart signal (from a SIGHUP), then we
-- re-read the configuration file.
topServer :: (Stream.C body) =>
ServerContext.T ext -> [Module.T body] -> Init.T body ext -> (forall a. Unblock a) -> IO ()
topServer st mods initExt unblock =
let startServers =
do ts <- servers st mods
(Util.wait `Exception.catch`
(\e -> case e of
ErrorCall "**restart**" ->
do mapM_ killThread ts
rereadConfig st initExt unblock
_ -> Exception.throw e))
loop =
(do Posix.unblockSignals sigsToBlock
unblock startServers)
`Exception.catch`
(\(Exception.SomeException e) ->
do logError st ("server: " ++ show e)
loop)
in loop
servers :: (Stream.C body) =>
ServerContext.T ext -> [Module.T body] -> IO [ThreadId]
servers st mods =
let mkEnv port =
ServerEnv.Cons {
ServerEnv.context = st,
ServerEnv.modules = mods,
ServerEnv.port = port
}
mkAddr (maddr,port) =
do addr <- case maddr of
Nothing -> return Socket.iNADDR_ANY
Just ip -> Socket.inet_addr ip
return (mkEnv port, Socket.SockAddrInet port addr)
in do addrs <- mapM mkAddr (listen (ServerContext.config st))
mapM (\ (env,addr) -> forkIO (server env addr)) addrs
-- open the server socket and start accepting connections
server :: (Stream.C body) =>
ServerEnv.T body ext -> Socket.SockAddr -> IO ()
server st addr = do
logInfo st $ "Starting server thread on " ++ show addr
proto <- BSD.getProtocolNumber "tcp"
Exception.bracket
(Socket.socket AF_INET Socket.Stream proto)
(\sock -> Socket.sClose sock)
(\sock -> do Socket.setSocketOption sock Socket.ReuseAddr 1
ok <- Util.catchSomeIOErrors isAlreadyInUseError
(Socket.bindSocket sock addr >> return True)
(\e -> do logError st ("server: " ++ show e)
IO.hPutStrLn IO.stderr $ show e
return False)
when ok $ do Socket.listen sock Socket.maxListenQueue
acceptConnections st sock)
-- accept connections, and fork off a new thread to handle each one
acceptConnections :: (Stream.C body) =>
ServerEnv.T body ext -> Socket -> IO ()
acceptConnections st sock = do
debug st "Calling accept..."
(h, Socket.SockAddrInet port haddr) <- Util.accept sock
Socket.inet_ntoa haddr >>=
\ip -> debug st $ "Got connection from " ++ ip ++ ":" ++ show port
_ <- forkIO (
(talk st h haddr `finally` IO.hClose h)
`Exception.catch`
(\(Exception.SomeException e) ->
debug st ("servlet died: " ++ show e))
)
acceptConnections st sock
talk :: (Stream.C body) =>
ServerEnv.T body ext -> IO.Handle -> HostAddress -> IO ()
talk st h haddr = do
debug st "Started"
IO.hSetBuffering h IO.LineBuffering
run st True h haddr
debug st "Done"
run :: (Stream.C body) =>
ServerEnv.T body ext -> Bool -> IO.Handle -> HostAddress -> IO ()
run st first h haddr = do
let conf = ServerEnv.config st
-- read a request up to the first empty line. If we
-- don't get a request within the alloted time, issue
-- a "Request Time-out" response and close the connection.
let time_allowed =
if first
then requestTimeout conf
else keepAliveTimeout conf
debug st "Waiting for request..."
req <- catchIOError (
do ok <- IO.hWaitForInput h (time_allowed * 1000)
if ok then liftM Just (getUntilEmptyLine h)
-- only send a "request timed out" response if this
-- was the first request on the socket. Subsequent
-- requests time-out and close the socket silently.
-- ToDo: if we get a partial request, still emit the
-- the timeout response.
else do debug st $ "Request timeout (after " ++ show time_allowed ++ " s)"
when first (response st h (Response.makeRequestTimeOut conf))
return Nothing
)
(\e ->
if isEOFError e
then debug st "EOF from client" >> return Nothing
else do logError st ("request: " ++ show e)
return Nothing )
case req of { Nothing -> return (); Just r -> do
case parse Request.pHeaders "Request" r of
-- close the connection after a badly formatted request
Left err -> do
debug st (show err)
response st h (Response.makeBadRequest conf)
return ()
Right req_no_body -> do
reqt <- getBody h req_no_body
debug st $ show reqt
resp <- request st reqt haddr
response st h resp
-- Persistent Connections
--
-- We close the connection if
-- (a) client specified "connection: close"
-- (b) client is pre-HTTP/1.1, and didn't
-- specify "connection: keep-alive"
let connection_headers = Request.getConnection (Request.headers reqt)
if Request.ConnectionClose `elem` connection_headers
|| (Request.httpVersion reqt < Version.http1_1
&& Request.ConnectionKeepAlive `notElem` connection_headers)
then return ()
else run st False h haddr
}
getBody :: (Stream.C body) =>
IO.Handle -> Request.T body -> IO (Request.T body)
getBody h req =
let -- FIXME: handled chunked input
readBody =
case Header.getContentLength req of
Nothing -> return mempty
-- FIXME: what if input is huge?
Just len -> Stream.read h len
in do b <- readBody
return $ req { Request.body = b}
-----------------------------------------------------------------------------
-- Dealing with requests
request :: (Stream.C body) =>
ServerEnv.T body ext -> Request.T body -> HostAddress -> IO (Response.T body)
request st req haddr =
do (sreq,merr) <- serverRequest st req haddr
resp <- case merr of
Nothing -> do sreq' <- tweakRequest st sreq
debug st $ "Handling request..."
handleRequest st sreq'
Just err -> return err
debug st (Response.showStatusLine resp)
ServerEnv.logAccess st sreq resp (error "noTimeDiff"){-FIXME-}
return resp
serverRequest :: (Stream.C body) =>
ServerEnv.T body ext -> Request.T body -> HostAddress -> IO (ServerRequest.T body, Maybe (Response.T body))
serverRequest st req haddr =
let conf = ServerEnv.config st
sreq =
ServerRequest.Cons {
ServerRequest.clientRequest = req,
ServerRequest.clientAddress = haddr,
ServerRequest.clientName = Nothing,
ServerRequest.requestHostName = ServerEnv.hostName st,
ServerRequest.serverURIPath = "-",
ServerRequest.serverFilename = "-",
ServerRequest.serverPort = ServerEnv.port st
}
maybeExc x =
case x of
Exc.Success _ -> Nothing
Exc.Exception e -> Just e
in fmap swap (runStateT
(fmap maybeExc $ runExceptionalT $ serverRequestExc st req haddr) sreq)
`Exception.catch`
( \(Exception.SomeException exception) -> do
logError st ("request: " ++ show exception)
return (sreq, Just (Response.makeInternalServerError conf))
)
serverRequestExc :: (Stream.C body) =>
ServerEnv.T body ext -> Request.T body -> HostAddress -> ExceptionalT (Response.T body) (StateT (ServerRequest.T body) IO) ()
serverRequestExc st req haddr =
let conf = ServerEnv.config st
use = Exc.mapExceptionalT lift
update = lift . modify
in do remoteName <- use $ lift $ maybeLookupHostname conf haddr
update $ \sreq -> sreq { ServerRequest.clientName = remoteName }
host <- use $ getServerHostName st req
update $ \sreq -> sreq { ServerRequest.requestHostName = host }
path <- use $ requestAbsPath st req
update $ \sreq -> sreq { ServerRequest.serverURIPath = path }
file <- use $ translatePath st (hostName host) path
update $ \sreq -> sreq { ServerRequest.serverFilename = file }
maybeLookupHostname :: Config.T ext -> HostAddress -> IO (Maybe HostEntry)
maybeLookupHostname conf haddr =
if hostnameLookups conf
then catchIOError
(liftM Just (BSD.getHostByAddr AF_INET haddr))
(\_ -> return Nothing)
else return Nothing
type EIO body = ExceptionalT (Response.T body) IO
-- make sure we've got a host field
-- if the request version is >= HTTP/1.1
getServerHostName :: (Stream.C body) =>
ServerEnv.T body ext -> Request.T body -> EIO body HostEntry
getServerHostName st req =
let conf = ServerEnv.config st
isServerHost host =
host `Set.member` (Set.insert (serverName conf) $ serverAlias conf) ||
any (flip Module.isServerHost host) (ServerEnv.modules st)
in case Request.getHost req of
Nothing ->
if Request.httpVersion req < Version.http1_1
then return $ ServerEnv.hostName st
else Exc.throwT $ Response.makeBadRequest conf
Just (host,_) ->
if isServerHost host
then return $ (ServerEnv.hostName st) { hostName = host }
else do lift $ logError st ("Unknown host: " ++ show host)
Exc.throwT $ Response.makeNotFound conf
-- | Get the absolute path from the request.
requestAbsPath :: (Stream.C body) =>
ServerEnv.T body ext -> Request.T body -> EIO body String
requestAbsPath _ req = return $ uriPath $ Request.uri req
-- Path translation
translatePath :: (Stream.C body) =>
ServerEnv.T body ext -> String -> String -> EIO body FilePath
translatePath st host pth =
do m_file <- lift $ ServerEnv.tryModules st (\m -> Module.translatePath m host pth)
case m_file of
Just file -> return $ file
Nothing -> defaultTranslatePath st pth
defaultTranslatePath :: (Stream.C body) =>
ServerEnv.T body ext -> String -> EIO body FilePath
defaultTranslatePath st pth =
let conf = ServerEnv.config st
in case pth of
'/':_ -> return $ documentRoot conf ++ pth
_ -> Exc.throwT $ Response.makeNotFound conf
-- Request tweaking
tweakRequest :: (Stream.C body) =>
ServerEnv.T body ext -> ServerRequest.T body -> IO (ServerRequest.T body)
tweakRequest st =
ServerEnv.foldModules st (\m r -> Module.tweakRequest m r)
-- Request handling
handleRequest :: (Stream.C body) =>
ServerEnv.T body ext -> ServerRequest.T body -> IO (Response.T body)
handleRequest st req =
do m_resp <- ServerEnv.tryModules st (\m -> Module.handleRequest m req)
case m_resp of
Just resp -> return resp
Nothing -> defaultHandleRequest st req
defaultHandleRequest :: (Stream.C body) =>
ServerEnv.T body ext -> ServerRequest.T body -> IO (Response.T body)
defaultHandleRequest st _ =
return $ Response.makeNotFound $ ServerEnv.config st
-- Sending response
response :: (Stream.C body) =>
ServerEnv.T body ext ->
IO.Handle ->
Response.T body ->
IO ()
response env h
(Response.Cons {
Response.code = code,
Response.description = desc,
Response.headers = headers,
Response.coding = tes,
Response.body = body,
Response.doSendBody = sendBody
}) =
do
Util.hPutStrCrLf h (Response.statusLine code desc)
hPutHeader h Response.serverHeader
-- Date Header: required on all messages
date <- Response.dateHeader
hPutHeader h date
mapM_ (hPutHeader h) (Header.list headers)
-- Output a Content-Length when the message body isn't
-- encoded. If it *is* encoded, then the last transfer
-- coding must be "chunked", according to RFC2616 sec 3.6. This
-- allows the client to determine the message-length.
let contentLength = Response.size body
when (Response.hasBody body && null tes)
(maybe (return ()) (hPutHeader h . Header.makeContentLength) contentLength)
mapM_ (hPutHeader h . Header.makeTransferCoding) tes
Util.hPutStrCrLf h ""
-- ToDo: implement transfer codings
let conf = ServerEnv.config env
when sendBody $
case viewR tes of
Just (_, Header.ChunkedTransferCoding) ->
Response.sendBodyChunked (Config.chunkSize conf) h body
_ -> Response.sendBody h body
hPutHeader :: IO.Handle -> Header.T -> IO ()
hPutHeader h =
IO.hPutStr h . show
-- Util.hPutStrCrLf h . show
|
xpika/mohws
|
src/Network/MoHWS/Server.hs
|
bsd-3-clause
| 22,018 | 0 | 24 | 5,829 | 5,462 | 2,800 | 2,662 | 379 | 7 |
import qualified Network.Kafka.Specs as S
main = S.main
|
tcrayford/hafka
|
Network/Kafka/Main.hs
|
bsd-3-clause
| 57 | 0 | 5 | 9 | 17 | 11 | 6 | 2 | 1 |
module Win32SystemInfo where
import StdDIS
import Win32Types
import GDITypes
----------------------------------------------------------------
-- Environment Strings
----------------------------------------------------------------
-- %fun ExpandEnvironmentStrings :: String -> IO String
----------------------------------------------------------------
-- Computer Name
----------------------------------------------------------------
-- %fun GetComputerName :: IO String
-- %fun SetComputerName :: String -> IO ()
-- %end free(arg1)
----------------------------------------------------------------
-- Hardware Profiles
----------------------------------------------------------------
-- %fun GetCurrentHwProfile :: IO HW_PROFILE_INFO
----------------------------------------------------------------
-- Keyboard Type
----------------------------------------------------------------
-- %fun GetKeyboardType :: KeyboardTypeKind -> IO KeyboardType
----------------------------------------------------------------
-- System Color
----------------------------------------------------------------
type SystemColor = UINT
-- ToDo: This list is out of date.
cOLOR_SCROLLBAR :: SystemColor
cOLOR_SCROLLBAR =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_SCROLLBAR >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_SCROLLBAR :: IO (Word32)
cOLOR_BACKGROUND :: SystemColor
cOLOR_BACKGROUND =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_BACKGROUND >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_BACKGROUND :: IO (Word32)
cOLOR_ACTIVECAPTION :: SystemColor
cOLOR_ACTIVECAPTION =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_ACTIVECAPTION >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_ACTIVECAPTION :: IO (Word32)
cOLOR_INACTIVECAPTION :: SystemColor
cOLOR_INACTIVECAPTION =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_INACTIVECAPTION >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_INACTIVECAPTION :: IO (Word32)
cOLOR_MENU :: SystemColor
cOLOR_MENU =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_MENU >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_MENU :: IO (Word32)
cOLOR_WINDOW :: SystemColor
cOLOR_WINDOW =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_WINDOW >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_WINDOW :: IO (Word32)
cOLOR_WINDOWFRAME :: SystemColor
cOLOR_WINDOWFRAME =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_WINDOWFRAME >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_WINDOWFRAME :: IO (Word32)
cOLOR_MENUTEXT :: SystemColor
cOLOR_MENUTEXT =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_MENUTEXT >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_MENUTEXT :: IO (Word32)
cOLOR_WINDOWTEXT :: SystemColor
cOLOR_WINDOWTEXT =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_WINDOWTEXT >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_WINDOWTEXT :: IO (Word32)
cOLOR_CAPTIONTEXT :: SystemColor
cOLOR_CAPTIONTEXT =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_CAPTIONTEXT >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_CAPTIONTEXT :: IO (Word32)
cOLOR_ACTIVEBORDER :: SystemColor
cOLOR_ACTIVEBORDER =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_ACTIVEBORDER >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_ACTIVEBORDER :: IO (Word32)
cOLOR_INACTIVEBORDER :: SystemColor
cOLOR_INACTIVEBORDER =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_INACTIVEBORDER >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_INACTIVEBORDER :: IO (Word32)
cOLOR_APPWORKSPACE :: SystemColor
cOLOR_APPWORKSPACE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_APPWORKSPACE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_APPWORKSPACE :: IO (Word32)
cOLOR_HIGHLIGHT :: SystemColor
cOLOR_HIGHLIGHT =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_HIGHLIGHT >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_HIGHLIGHT :: IO (Word32)
cOLOR_HIGHLIGHTTEXT :: SystemColor
cOLOR_HIGHLIGHTTEXT =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_HIGHLIGHTTEXT >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_HIGHLIGHTTEXT :: IO (Word32)
cOLOR_BTNFACE :: SystemColor
cOLOR_BTNFACE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_BTNFACE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_BTNFACE :: IO (Word32)
cOLOR_BTNSHADOW :: SystemColor
cOLOR_BTNSHADOW =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_BTNSHADOW >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_BTNSHADOW :: IO (Word32)
cOLOR_GRAYTEXT :: SystemColor
cOLOR_GRAYTEXT =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_GRAYTEXT >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_GRAYTEXT :: IO (Word32)
cOLOR_BTNTEXT :: SystemColor
cOLOR_BTNTEXT =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_BTNTEXT >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_BTNTEXT :: IO (Word32)
cOLOR_INACTIVECAPTIONTEXT :: SystemColor
cOLOR_INACTIVECAPTIONTEXT =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_INACTIVECAPTIONTEXT >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_INACTIVECAPTIONTEXT :: IO (Word32)
cOLOR_BTNHIGHLIGHT :: SystemColor
cOLOR_BTNHIGHLIGHT =
unsafePerformIO(
prim_Win32SystemInfo_cpp_cOLOR_BTNHIGHLIGHT >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_cOLOR_BTNHIGHLIGHT :: IO (Word32)
-- %fun GetSysColor :: SystemColor -> IO COLORREF
-- %fun SetSysColors :: [(SystemColor,COLORREF)] -> IO ()
----------------------------------------------------------------
-- Standard Directories
----------------------------------------------------------------
-- %fun GetSystemDirectory :: IO String
-- %fun GetWindowsDirectory :: IO String
----------------------------------------------------------------
-- System Info (Info about processor and memory subsystem)
----------------------------------------------------------------
-- %fun GetSystemInfo :: IO SystemInfo
--
-- typedef struct _SYSTEM_INFO { // sinf
-- union {
-- DWORD dwOemId;
-- struct {
-- WORD wProcessorArchitecture;
-- WORD wReserved;
-- };
-- };
-- DWORD dwPageSize;
-- LPVOID lpMinimumApplicationAddress;
-- LPVOID lpMaximumApplicationAddress;
-- DWORD dwActiveProcessorMask;
-- DWORD dwNumberOfProcessors;
-- DWORD dwProcessorType;
-- DWORD dwAllocationGranularity;
-- WORD wProcessorLevel;
-- WORD wProcessorRevision;
-- } SYSTEM_INFO;
----------------------------------------------------------------
-- System metrics
----------------------------------------------------------------
type SMSetting = UINT
sM_ARRANGE :: SMSetting
sM_ARRANGE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_ARRANGE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_ARRANGE :: IO (Word32)
sM_CLEANBOOT :: SMSetting
sM_CLEANBOOT =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CLEANBOOT >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CLEANBOOT :: IO (Word32)
sM_CMETRICS :: SMSetting
sM_CMETRICS =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CMETRICS >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CMETRICS :: IO (Word32)
sM_CMOUSEBUTTONS :: SMSetting
sM_CMOUSEBUTTONS =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CMOUSEBUTTONS >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CMOUSEBUTTONS :: IO (Word32)
sM_CXBORDER :: SMSetting
sM_CXBORDER =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXBORDER >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXBORDER :: IO (Word32)
sM_CYBORDER :: SMSetting
sM_CYBORDER =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYBORDER >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYBORDER :: IO (Word32)
sM_CXCURSOR :: SMSetting
sM_CXCURSOR =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXCURSOR >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXCURSOR :: IO (Word32)
sM_CYCURSOR :: SMSetting
sM_CYCURSOR =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYCURSOR >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYCURSOR :: IO (Word32)
sM_CXDLGFRAME :: SMSetting
sM_CXDLGFRAME =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXDLGFRAME >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXDLGFRAME :: IO (Word32)
sM_CYDLGFRAME :: SMSetting
sM_CYDLGFRAME =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYDLGFRAME >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYDLGFRAME :: IO (Word32)
sM_CXDOUBLECLK :: SMSetting
sM_CXDOUBLECLK =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXDOUBLECLK >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXDOUBLECLK :: IO (Word32)
sM_CYDOUBLECLK :: SMSetting
sM_CYDOUBLECLK =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYDOUBLECLK >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYDOUBLECLK :: IO (Word32)
sM_CXDRAG :: SMSetting
sM_CXDRAG =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXDRAG >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXDRAG :: IO (Word32)
sM_CYDRAG :: SMSetting
sM_CYDRAG =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYDRAG >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYDRAG :: IO (Word32)
sM_CXEDGE :: SMSetting
sM_CXEDGE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXEDGE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXEDGE :: IO (Word32)
sM_CYEDGE :: SMSetting
sM_CYEDGE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYEDGE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYEDGE :: IO (Word32)
sM_CXFRAME :: SMSetting
sM_CXFRAME =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXFRAME >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXFRAME :: IO (Word32)
sM_CYFRAME :: SMSetting
sM_CYFRAME =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYFRAME >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYFRAME :: IO (Word32)
sM_CXFULLSCREEN :: SMSetting
sM_CXFULLSCREEN =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXFULLSCREEN >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXFULLSCREEN :: IO (Word32)
sM_CYFULLSCREEN :: SMSetting
sM_CYFULLSCREEN =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYFULLSCREEN >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYFULLSCREEN :: IO (Word32)
sM_CXHSCROLL :: SMSetting
sM_CXHSCROLL =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXHSCROLL >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXHSCROLL :: IO (Word32)
sM_CYVSCROLL :: SMSetting
sM_CYVSCROLL =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYVSCROLL >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYVSCROLL :: IO (Word32)
sM_CXICON :: SMSetting
sM_CXICON =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXICON >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXICON :: IO (Word32)
sM_CYICON :: SMSetting
sM_CYICON =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYICON >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYICON :: IO (Word32)
sM_CXICONSPACING :: SMSetting
sM_CXICONSPACING =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXICONSPACING >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXICONSPACING :: IO (Word32)
sM_CYICONSPACING :: SMSetting
sM_CYICONSPACING =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYICONSPACING >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYICONSPACING :: IO (Word32)
sM_CXMAXIMIZED :: SMSetting
sM_CXMAXIMIZED =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXMAXIMIZED >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXMAXIMIZED :: IO (Word32)
sM_CYMAXIMIZED :: SMSetting
sM_CYMAXIMIZED =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYMAXIMIZED >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYMAXIMIZED :: IO (Word32)
sM_CXMENUCHECK :: SMSetting
sM_CXMENUCHECK =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXMENUCHECK >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXMENUCHECK :: IO (Word32)
sM_CYMENUCHECK :: SMSetting
sM_CYMENUCHECK =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYMENUCHECK >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYMENUCHECK :: IO (Word32)
sM_CXMENUSIZE :: SMSetting
sM_CXMENUSIZE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXMENUSIZE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXMENUSIZE :: IO (Word32)
sM_CYMENUSIZE :: SMSetting
sM_CYMENUSIZE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYMENUSIZE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYMENUSIZE :: IO (Word32)
sM_CXMIN :: SMSetting
sM_CXMIN =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXMIN >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXMIN :: IO (Word32)
sM_CYMIN :: SMSetting
sM_CYMIN =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYMIN >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYMIN :: IO (Word32)
sM_CXMINIMIZED :: SMSetting
sM_CXMINIMIZED =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXMINIMIZED >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXMINIMIZED :: IO (Word32)
sM_CYMINIMIZED :: SMSetting
sM_CYMINIMIZED =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYMINIMIZED >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYMINIMIZED :: IO (Word32)
sM_CXMINTRACK :: SMSetting
sM_CXMINTRACK =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXMINTRACK >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXMINTRACK :: IO (Word32)
sM_CYMINTRACK :: SMSetting
sM_CYMINTRACK =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYMINTRACK >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYMINTRACK :: IO (Word32)
sM_CXSCREEN :: SMSetting
sM_CXSCREEN =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXSCREEN >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXSCREEN :: IO (Word32)
sM_CYSCREEN :: SMSetting
sM_CYSCREEN =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYSCREEN >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYSCREEN :: IO (Word32)
sM_CXSIZE :: SMSetting
sM_CXSIZE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXSIZE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXSIZE :: IO (Word32)
sM_CYSIZE :: SMSetting
sM_CYSIZE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYSIZE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYSIZE :: IO (Word32)
sM_CXSIZEFRAME :: SMSetting
sM_CXSIZEFRAME =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXSIZEFRAME >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXSIZEFRAME :: IO (Word32)
sM_CYSIZEFRAME :: SMSetting
sM_CYSIZEFRAME =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYSIZEFRAME >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYSIZEFRAME :: IO (Word32)
sM_CXSMICON :: SMSetting
sM_CXSMICON =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXSMICON >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXSMICON :: IO (Word32)
sM_CYSMICON :: SMSetting
sM_CYSMICON =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYSMICON >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYSMICON :: IO (Word32)
sM_CXSMSIZE :: SMSetting
sM_CXSMSIZE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXSMSIZE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXSMSIZE :: IO (Word32)
sM_CYSMSIZE :: SMSetting
sM_CYSMSIZE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYSMSIZE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYSMSIZE :: IO (Word32)
sM_CXVSCROLL :: SMSetting
sM_CXVSCROLL =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CXVSCROLL >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CXVSCROLL :: IO (Word32)
sM_CYHSCROLL :: SMSetting
sM_CYHSCROLL =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYHSCROLL >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYHSCROLL :: IO (Word32)
sM_CYVTHUMB :: SMSetting
sM_CYVTHUMB =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYVTHUMB >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYVTHUMB :: IO (Word32)
sM_CYCAPTION :: SMSetting
sM_CYCAPTION =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYCAPTION >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYCAPTION :: IO (Word32)
sM_CYKANJIWINDOW :: SMSetting
sM_CYKANJIWINDOW =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYKANJIWINDOW >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYKANJIWINDOW :: IO (Word32)
sM_CYMENU :: SMSetting
sM_CYMENU =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYMENU >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYMENU :: IO (Word32)
sM_CYSMCAPTION :: SMSetting
sM_CYSMCAPTION =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_CYSMCAPTION >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_CYSMCAPTION :: IO (Word32)
sM_DBCSENABLED :: SMSetting
sM_DBCSENABLED =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_DBCSENABLED >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_DBCSENABLED :: IO (Word32)
sM_DEBUG :: SMSetting
sM_DEBUG =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_DEBUG >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_DEBUG :: IO (Word32)
sM_MENUDROPALIGNMENT :: SMSetting
sM_MENUDROPALIGNMENT =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_MENUDROPALIGNMENT >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_MENUDROPALIGNMENT :: IO (Word32)
sM_MIDEASTENABLED :: SMSetting
sM_MIDEASTENABLED =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_MIDEASTENABLED >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_MIDEASTENABLED :: IO (Word32)
sM_MOUSEPRESENT :: SMSetting
sM_MOUSEPRESENT =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_MOUSEPRESENT >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_MOUSEPRESENT :: IO (Word32)
sM_NETWORK :: SMSetting
sM_NETWORK =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_NETWORK >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_NETWORK :: IO (Word32)
sM_PENWINDOWS :: SMSetting
sM_PENWINDOWS =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_PENWINDOWS >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_PENWINDOWS :: IO (Word32)
sM_SECURE :: SMSetting
sM_SECURE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_SECURE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_SECURE :: IO (Word32)
sM_SHOWSOUNDS :: SMSetting
sM_SHOWSOUNDS =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_SHOWSOUNDS >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_SHOWSOUNDS :: IO (Word32)
sM_SLOWMACHINE :: SMSetting
sM_SLOWMACHINE =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_SLOWMACHINE >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_SLOWMACHINE :: IO (Word32)
sM_SWAPBUTTON :: SMSetting
sM_SWAPBUTTON =
unsafePerformIO(
prim_Win32SystemInfo_cpp_sM_SWAPBUTTON >>= \ (res1) ->
(return (res1)))
primitive prim_Win32SystemInfo_cpp_sM_SWAPBUTTON :: IO (Word32)
-- %fun GetSystemMetrics :: SMSetting -> IO Int
----------------------------------------------------------------
-- Thread Desktops
----------------------------------------------------------------
-- %fun GetThreadDesktop :: ThreadId -> IO HDESK
-- %fun SetThreadDesktop :: ThreadId -> HDESK -> IO ()
----------------------------------------------------------------
-- User name
----------------------------------------------------------------
-- %fun GetUserName :: IO String
----------------------------------------------------------------
-- Version Info
----------------------------------------------------------------
-- %fun GetVersionEx :: IO VersionInfo
--
-- typedef struct _OSVERSIONINFO{
-- DWORD dwOSVersionInfoSize;
-- DWORD dwMajorVersion;
-- DWORD dwMinorVersion;
-- DWORD dwBuildNumber;
-- DWORD dwPlatformId;
-- TCHAR szCSDVersion[ 128 ];
-- } OSVERSIONINFO;
----------------------------------------------------------------
-- Processor features
----------------------------------------------------------------
--
-- Including these lines causes problems on Win95
-- %fun IsProcessorFeaturePresent :: ProcessorFeature -> Bool
--
-- type ProcessorFeature = DWORD
-- %dis processorFeature x = dWORD x
--
-- %const ProcessorFeature
-- % [ PF_FLOATING_POINT_PRECISION_ERRATA
-- % , PF_FLOATING_POINT_EMULATED
-- % , PF_COMPARE_EXCHANGE_DOUBLE
-- % , PF_MMX_INSTRUCTIONS_AVAILABLE
-- % ]
----------------------------------------------------------------
-- System Parameter Information
----------------------------------------------------------------
-- %fun SystemParametersInfo :: ?? -> Bool -> IO ??
----------------------------------------------------------------
-- End
----------------------------------------------------------------
needPrims_hugs 2
|
OS2World/DEV-UTIL-HUGS
|
libraries/win32/Win32SystemInfo.hs
|
bsd-3-clause
| 22,309 | 173 | 12 | 2,976 | 4,653 | 2,556 | 2,097 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
-- | Vector spaces.
module Math.Algebra.Vector where
import qualified Math.Algebra.Module as Mod
import qualified Math.Algebra.Field as F
class (F.Field a, Mod.Module a b) => Vector a b
-- | Fields are vector spaces over themselves.
instance (F.Field a) => Vector a a
|
michiexile/hplex
|
pershom/src/Math/Algebra/Vector.hs
|
bsd-3-clause
| 346 | 0 | 7 | 54 | 79 | 48 | 31 | -1 | -1 |
-- Copyright (c) 2014, Dmitry Zuikov
-- All rights reserved.
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
-- * Redistributions of source code must retain the above copyright notice, this
-- list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
-- * Neither the name of emufat nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module Main where
import qualified Data.ByteString.Lazy as BS
import Data.Binary.Get
import Data.Maybe
import System.Environment
import Text.Printf
--import Control.Exception
import Data.Word
data Chk = W8 | W16 | W32 | W64
data End = BE | LE
data Base = Dec | Hex deriving Eq
data Piece = P8 Word8 | P16 Word16 | P32 Word32 | P64 Word64
data Fmt = Fmt { off :: (Maybe Int), chk :: Chk, end :: End, base :: Base }
decodeFmt :: String -> String -> String -> String -> Fmt
decodeFmt x "8" "BE" b = Fmt (readMaybe x) W8 BE (baseof b)
decodeFmt x "8" "LE" b = Fmt (readMaybe x) W8 BE (baseof b)
decodeFmt x "16" "BE" b = Fmt (readMaybe x) W16 BE (baseof b)
decodeFmt x "16" "LE" b = Fmt (readMaybe x) W16 LE (baseof b)
decodeFmt x "32" "BE" b = Fmt (readMaybe x) W32 BE (baseof b)
decodeFmt x "32" "LE" b = Fmt (readMaybe x) W32 LE (baseof b)
decodeFmt x "64" "BE" b = Fmt (readMaybe x) W64 BE (baseof b)
decodeFmt x "64" "LE" b = Fmt (readMaybe x) W64 LE (baseof b)
decodeFmt x y z m = error $ "Bad format " ++ (show (x, y, z, m))
printPiece :: Fmt -> Piece -> IO ()
printPiece f m = putStrLn $ pp f m
where
pp (Fmt _ _ _ Dec) (P8 x) = printf "%d" x
pp (Fmt _ _ _ Dec) (P16 x) = printf "%d" x
pp (Fmt _ _ _ Dec) (P32 x) = printf "%d" x
pp (Fmt _ _ _ Dec) (P64 x) = printf "%d" x
pp (Fmt _ _ _ Hex) (P8 x) = printf "%02X" x
pp (Fmt _ _ _ Hex) (P16 x) = printf "%04X" x
pp (Fmt _ _ _ Hex) (P32 x) = printf "%08X" x
pp (Fmt _ _ _ Hex) (P64 x) = printf "%016X" x
readPiece :: String -> Fmt -> IO Piece
readPiece fn fmt@(Fmt o _ _ _) = do
bs <- BS.readFile fn
return $ flip runGet bs $ do
maybe (return ()) skip o
case fmt of
(Fmt _ W8 _ _) -> getWord8 >>= return . P8
(Fmt _ W16 LE _) -> getWord16le >>= return . P16
(Fmt _ W16 BE _) -> getWord16be >>= return . P16
(Fmt _ W32 LE _) -> getWord32le >>= return . P32
(Fmt _ W32 BE _) -> getWord32be >>= return . P32
(Fmt _ W64 LE _) -> getWord64le >>= return . P64
(Fmt _ W64 BE _) -> getWord64be >>= return . P64
baseof "H" = Hex
baseof "D" = Dec
baseof _ = Hex
readMaybe :: (Read a) => String -> Maybe a
readMaybe s = case [x | (x,t) <- reads s, ("","") <- lex t] of
[x] -> Just x
_ -> Nothing
main = do
args <- getArgs
case args of
( file : off : x : fmtS : bs : _ ) -> do
let fmt = decodeFmt off x fmtS bs
readPiece file fmt >>= printPiece fmt
_ -> error "Usage: readpiece fname off 8|16|32|64 BE|LE H|D"
|
voidlizard/emufat
|
src/readpiece.hs
|
bsd-3-clause
| 4,075 | 0 | 15 | 968 | 1,286 | 662 | 624 | 59 | 8 |
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
{-@ LIQUID "--idirs=../neg "@-}
import Class5
instance Foo ()
|
spinda/liquidhaskell
|
tests/gsoc15/unknown/pos/Class2.hs
|
bsd-3-clause
| 118 | 0 | 6 | 21 | 18 | 10 | 8 | 4 | 0 |
-- |
-- Module : Simulation.Aivika.Experiment.Chart.Types
-- Copyright : Copyright (c) 2012-2017, David Sorokin <[email protected]>
-- License : BSD3
-- Maintainer : David Sorokin <[email protected]>
-- Stability : experimental
-- Tested with: GHC 8.0.1
--
-- The module defines a type class for rendering charts.
--
module Simulation.Aivika.Experiment.Chart.Types
(ChartRendering(..)) where
import Graphics.Rendering.Chart
import Simulation.Aivika.Experiment
-- | A type class of chart renderers.
class ChartRendering r where
-- | The file extension used when rendering.
renderableChartExtension :: r -> String
-- | Generate an image file with the specified path for the given chart.
-- The width and height are passed in the second argument to the function.
renderChart :: r -> (Int, Int) -> FilePath -> Renderable c -> IO (PickFn c)
-- | Return the rendering layout.
renderingLayoutLR :: r -> LayoutLR Double Double Double -> LayoutLR Double Double Double
-- | Return the rendering layout.
renderingLayout :: r -> Layout Double Double -> Layout Double Double
|
dsorokin/aivika-experiment-chart
|
Simulation/Aivika/Experiment/Chart/Types.hs
|
bsd-3-clause
| 1,117 | 0 | 13 | 200 | 153 | 90 | 63 | 9 | 0 |
module Noobing.Experimenting.Test (
sortTest
) where
import Data.List
sortTest :: (Ord a) => [a] -> [a]
sortTest l = sort $ l
|
markmq/Noobing
|
src/Noobing/Experimenting/Test.hs
|
bsd-3-clause
| 130 | 0 | 7 | 26 | 53 | 31 | 22 | 5 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeOperators #-}
#ifdef TRUSTWORTHY
{-# LANGUAGE Trustworthy #-}
#endif
#ifndef MIN_VERSION_base
#define MIN_VERSION_base(x,y,z) 1
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Control.Lens.At
-- Copyright : (C) 2012-14 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
----------------------------------------------------------------------------
module Control.Lens.At
(
-- * At
At(at), sans
-- * Ixed
, Index
, IxValue
, Ixed(ix)
, ixAt
-- * Contains
, Contains(..)
) where
import Control.Applicative
import Control.Lens.Lens
import Control.Lens.Setter
import Control.Lens.Type
import Control.Lens.Internal.TupleIxedTH (makeAllTupleIxed)
import Data.Aeson as Aeson
import Data.Array.IArray as Array
import Data.Array.Unboxed
import Data.ByteString as StrictB
import Data.ByteString.Lazy as LazyB
import Data.Complex
import Data.Hashable
import Data.HashMap.Lazy as HashMap
import Data.HashSet as HashSet
import Data.Int
import Data.IntMap as IntMap
import Data.IntSet as IntSet
import Data.List.NonEmpty as NonEmpty
import Data.Map as Map
import Data.Set as Set
import Data.Sequence as Seq
import Data.Text as StrictT
import Data.Text.Lazy as LazyT
import Data.Traversable
import Data.Tree
import Data.Vector as Vector hiding (indexed)
import Data.Vector.Primitive as Prim
import Data.Vector.Storable as Storable
import Data.Vector.Unboxed as Unboxed
import Data.Word
type family Index (s :: *) :: *
type instance Index (e -> a) = e
type instance Index IntSet = Int
type instance Index (Set a) = a
type instance Index (HashSet a) = a
type instance Index [a] = Int
type instance Index (NonEmpty a) = Int
type instance Index (Seq a) = Int
type instance Index (a,b) = Int
type instance Index (a,b,c) = Int
type instance Index (a,b,c,d) = Int
type instance Index (a,b,c,d,e) = Int
type instance Index (a,b,c,d,e,f) = Int
type instance Index (a,b,c,d,e,f,g) = Int
type instance Index (a,b,c,d,e,f,g,h) = Int
type instance Index (a,b,c,d,e,f,g,h,i) = Int
type instance Index (IntMap a) = Int
type instance Index (Map k a) = k
type instance Index (HashMap k a) = k
type instance Index (Array.Array i e) = i
type instance Index (UArray i e) = i
type instance Index (Vector.Vector a) = Int
type instance Index (Prim.Vector a) = Int
type instance Index (Storable.Vector a) = Int
type instance Index (Unboxed.Vector a) = Int
type instance Index (Complex a) = Int
type instance Index (Identity a) = ()
type instance Index (Maybe a) = ()
type instance Index (Tree a) = [Int]
type instance Index StrictT.Text = Int
type instance Index LazyT.Text = Int64
type instance Index StrictB.ByteString = Int
type instance Index LazyB.ByteString = Int64
type instance Index Aeson.Value = StrictT.Text
-- $setup
-- >>> :set -XNoOverloadedStrings
-- >>> import Control.Lens
-- >>> import Debug.SimpleReflect.Expr
-- >>> import Debug.SimpleReflect.Vars as Vars hiding (f,g)
-- >>> let f :: Expr -> Expr; f = Debug.SimpleReflect.Vars.f
-- >>> let g :: Expr -> Expr; g = Debug.SimpleReflect.Vars.g
-- |
-- This class provides a simple 'IndexedFold' (or 'IndexedTraversal') that lets you view (and modify)
-- information about whether or not a container contains a given 'Index'.
class Contains m where
-- |
-- >>> IntSet.fromList [1,2,3,4] ^. contains 3
-- True
--
-- >>> IntSet.fromList [1,2,3,4] ^. contains 5
-- False
--
-- >>> IntSet.fromList [1,2,3,4] & contains 3 .~ False
-- fromList [1,2,4]
contains :: Index m -> Lens' m Bool
instance Contains IntSet where
contains k f s = f (IntSet.member k s) <&> \b ->
if b then IntSet.insert k s else IntSet.delete k s
{-# INLINE contains #-}
instance Ord a => Contains (Set a) where
contains k f s = f (Set.member k s) <&> \b ->
if b then Set.insert k s else Set.delete k s
{-# INLINE contains #-}
instance (Eq a, Hashable a) => Contains (HashSet a) where
contains k f s = f (HashSet.member k s) <&> \b ->
if b then HashSet.insert k s else HashSet.delete k s
{-# INLINE contains #-}
-- | This provides a common notion of a value at an index that is shared by both 'Ixed' and 'At'.
type family IxValue (m :: *) :: *
-- | This simple 'AffineTraversal' lets you 'traverse' the value at a given
-- key in a 'Map' or element at an ordinal position in a list or 'Seq'.
class Ixed m where
-- | This simple 'AffineTraversal' lets you 'traverse' the value at a given
-- key in a 'Map' or element at an ordinal position in a list or 'Seq'.
--
-- /NB:/ Setting the value of this 'AffineTraversal' will only set the value in the
-- 'Lens' if it is already present.
--
-- If you want to be able to insert /missing/ values, you want 'at'.
--
-- >>> Seq.fromList [a,b,c,d] & ix 2 %~ f
-- fromList [a,b,f c,d]
--
-- >>> Seq.fromList [a,b,c,d] & ix 2 .~ e
-- fromList [a,b,e,d]
--
-- >>> Seq.fromList [a,b,c,d] ^? ix 2
-- Just c
--
-- >>> Seq.fromList [] ^? ix 2
-- Nothing
ix :: Index m -> Traversal' m (IxValue m)
#ifdef DEFAULT_SIGNATURES
default ix :: (Applicative f, At m) => Index m -> LensLike' f m (IxValue m)
ix = ixAt
{-# INLINE ix #-}
#endif
-- | A definition of 'ix' for types with an 'At' instance. This is the default
-- if you don't specify a definition for 'ix'.
ixAt :: At m => Index m -> Traversal' m (IxValue m)
ixAt i = at i . traverse
{-# INLINE ixAt #-}
type instance IxValue (e -> a) = a
instance Eq e => Ixed (e -> a) where
ix e p f = p (f e) <&> \a e' -> if e == e' then a else f e'
{-# INLINE ix #-}
type instance IxValue (Maybe a) = a
instance Ixed (Maybe a) where
ix () f (Just a) = Just <$> f a
ix () _ Nothing = pure Nothing
{-# INLINE ix #-}
type instance IxValue [a] = a
instance Ixed [a] where
ix k f xs0 | k < 0 = pure xs0
| otherwise = go xs0 k where
go [] _ = pure []
go (a:as) 0 = f a <&> (:as)
go (a:as) i = (a:) <$> (go as $! i - 1)
{-# INLINE ix #-}
type instance IxValue (NonEmpty a) = a
instance Ixed (NonEmpty a) where
ix k f xs0 | k < 0 = pure xs0
| otherwise = go xs0 k where
go (a:|as) 0 = f a <&> (:|as)
go (a:|as) i = (a:|) <$> ix (i - 1) f as
{-# INLINE ix #-}
type instance IxValue (Identity a) = a
instance Ixed (Identity a) where
ix () f (Identity a) = Identity <$> f a
{-# INLINE ix #-}
type instance IxValue (Tree a) = a
instance Ixed (Tree a) where
ix xs0 f = go xs0 where
go [] (Node a as) = f a <&> \a' -> Node a' as
go (i:is) t@(Node a as) | i < 0 = pure t
| otherwise = Node a <$> goto is as i
goto is (a:as) 0 = go is a <&> (:as)
goto is (_:as) n = goto is as $! n - 1
goto _ [] _ = pure []
{-# INLINE ix #-}
type instance IxValue (Seq a) = a
instance Ixed (Seq a) where
ix i f m
| 0 <= i && i < Seq.length m = f (Seq.index m i) <&> \a -> Seq.update i a m
| otherwise = pure m
{-# INLINE ix #-}
type instance IxValue (IntMap a) = a
instance Ixed (IntMap a) where
ix k f m = case IntMap.lookup k m of
Just v -> f v <&> \v' -> IntMap.insert k v' m
Nothing -> pure m
{-# INLINE ix #-}
type instance IxValue (Map k a) = a
instance Ord k => Ixed (Map k a) where
ix k f m = case Map.lookup k m of
Just v -> f v <&> \v' -> Map.insert k v' m
Nothing -> pure m
{-# INLINE ix #-}
type instance IxValue (HashMap k a) = a
instance (Eq k, Hashable k) => Ixed (HashMap k a) where
ix k f m = case HashMap.lookup k m of
Just v -> f v <&> \v' -> HashMap.insert k v' m
Nothing -> pure m
{-# INLINE ix #-}
type instance IxValue (Set k) = ()
instance Ord k => Ixed (Set k) where
ix k f m = if Set.member k m
then f () <&> \() -> Set.insert k m
else pure m
{-# INLINE ix #-}
type instance IxValue IntSet = ()
instance Ixed IntSet where
ix k f m = if IntSet.member k m
then f () <&> \() -> IntSet.insert k m
else pure m
{-# INLINE ix #-}
type instance IxValue (HashSet k) = ()
instance (Eq k, Hashable k) => Ixed (HashSet k) where
ix k f m = if HashSet.member k m
then f () <&> \() -> HashSet.insert k m
else pure m
{-# INLINE ix #-}
type instance IxValue (Array.Array i e) = e
-- |
-- @
-- arr '!' i ≡ arr 'Control.Lens.Getter.^.' 'ix' i
-- arr '//' [(i,e)] ≡ 'ix' i 'Control.Lens.Setter..~' e '$' arr
-- @
instance Ix i => Ixed (Array.Array i e) where
ix i f arr
| inRange (bounds arr) i = f (arr Array.! i) <&> \e -> arr Array.// [(i,e)]
| otherwise = pure arr
{-# INLINE ix #-}
type instance IxValue (UArray i e) = e
-- |
-- @
-- arr '!' i ≡ arr 'Control.Lens.Getter.^.' 'ix' i
-- arr '//' [(i,e)] ≡ 'ix' i 'Control.Lens.Setter..~' e '$' arr
-- @
instance (IArray UArray e, Ix i) => Ixed (UArray i e) where
ix i f arr
| inRange (bounds arr) i = f (arr Array.! i) <&> \e -> arr Array.// [(i,e)]
| otherwise = pure arr
{-# INLINE ix #-}
type instance IxValue (Vector.Vector a) = a
instance Ixed (Vector.Vector a) where
ix i f v
| 0 <= i && i < Vector.length v = f (v Vector.! i) <&> \a -> v Vector.// [(i, a)]
| otherwise = pure v
{-# INLINE ix #-}
type instance IxValue (Prim.Vector a) = a
instance Prim a => Ixed (Prim.Vector a) where
ix i f v
| 0 <= i && i < Prim.length v = f (v Prim.! i) <&> \a -> v Prim.// [(i, a)]
| otherwise = pure v
{-# INLINE ix #-}
type instance IxValue (Storable.Vector a) = a
instance Storable a => Ixed (Storable.Vector a) where
ix i f v
| 0 <= i && i < Storable.length v = f (v Storable.! i) <&> \a -> v Storable.// [(i, a)]
| otherwise = pure v
{-# INLINE ix #-}
type instance IxValue (Unboxed.Vector a) = a
instance Unbox a => Ixed (Unboxed.Vector a) where
ix i f v
| 0 <= i && i < Unboxed.length v = f (v Unboxed.! i) <&> \a -> v Unboxed.// [(i, a)]
| otherwise = pure v
{-# INLINE ix #-}
type instance IxValue StrictT.Text = Char
instance Ixed StrictT.Text where
ix e f s = case StrictT.splitAt e s of
(l, mr) -> case StrictT.uncons mr of
Nothing -> pure s
Just (c, xs) -> f c <&> \d -> StrictT.concat [l, StrictT.singleton d, xs]
{-# INLINE ix #-}
type instance IxValue LazyT.Text = Char
instance Ixed LazyT.Text where
ix e f s = case LazyT.splitAt e s of
(l, mr) -> case LazyT.uncons mr of
Nothing -> pure s
Just (c, xs) -> f c <&> \d -> LazyT.append l (LazyT.cons d xs)
{-# INLINE ix #-}
type instance IxValue StrictB.ByteString = Word8
instance Ixed StrictB.ByteString where
ix e f s = case StrictB.splitAt e s of
(l, mr) -> case StrictB.uncons mr of
Nothing -> pure s
Just (c, xs) -> f c <&> \d -> StrictB.concat [l, StrictB.singleton d, xs]
{-# INLINE ix #-}
type instance IxValue LazyB.ByteString = Word8
instance Ixed LazyB.ByteString where
-- TODO: we could be lazier, returning each chunk as it is passed
ix e f s = case LazyB.splitAt e s of
(l, mr) -> case LazyB.uncons mr of
Nothing -> pure s
Just (c, xs) -> f c <&> \d -> LazyB.append l (LazyB.cons d xs)
{-# INLINE ix #-}
type instance IxValue Aeson.Value = Aeson.Value
instance Ixed Aeson.Value where
ix i f (Object o) = Object <$> ix i f o
ix _ _ v = pure v
{-# INLINE ix #-}
-- | 'At' provides a 'Lens' that can be used to read,
-- write or delete the value associated with a key in a 'Map'-like
-- container on an ad hoc basis.
--
-- An instance of 'At' should satisfy:
--
-- @
-- 'ix' k ≡ 'at' k '.' 'traverse'
-- @
class Ixed m => At m where
-- |
-- >>> Map.fromList [(1,"world")] ^.at 1
-- Just "world"
--
-- >>> at 1 ?~ "hello" $ Map.empty
-- fromList [(1,"hello")]
--
-- /Note:/ 'Map'-like containers form a reasonable instance, but not 'Array'-like ones, where
-- you cannot satisfy the 'Lens' laws.
at :: Index m -> Lens' m (Maybe (IxValue m))
sans :: At m => Index m -> m -> m
sans k m = m & at k .~ Nothing
{-# INLINE sans #-}
instance At (Maybe a) where
at () f = f
{-# INLINE at #-}
instance At (IntMap a) where
at k f m = f mv <&> \r -> case r of
Nothing -> maybe m (const (IntMap.delete k m)) mv
Just v' -> IntMap.insert k v' m
where mv = IntMap.lookup k m
{-# INLINE at #-}
instance Ord k => At (Map k a) where
at k f m = f mv <&> \r -> case r of
Nothing -> maybe m (const (Map.delete k m)) mv
Just v' -> Map.insert k v' m
where mv = Map.lookup k m
{-# INLINE at #-}
instance (Eq k, Hashable k) => At (HashMap k a) where
at k f m = f mv <&> \r -> case r of
Nothing -> maybe m (const (HashMap.delete k m)) mv
Just v' -> HashMap.insert k v' m
where mv = HashMap.lookup k m
{-# INLINE at #-}
instance At IntSet where
at k f m = f mv <&> \r -> case r of
Nothing -> maybe m (const (IntSet.delete k m)) mv
Just () -> IntSet.insert k m
where mv = if IntSet.member k m then Just () else Nothing
{-# INLINE at #-}
instance Ord k => At (Set k) where
at k f m = f mv <&> \r -> case r of
Nothing -> maybe m (const (Set.delete k m)) mv
Just () -> Set.insert k m
where mv = if Set.member k m then Just () else Nothing
{-# INLINE at #-}
instance (Eq k, Hashable k) => At (HashSet k) where
at k f m = f mv <&> \r -> case r of
Nothing -> maybe m (const (HashSet.delete k m)) mv
Just () -> HashSet.insert k m
where mv = if HashSet.member k m then Just () else Nothing
{-# INLINE at #-}
makeAllTupleIxed
|
hvr/lens
|
src/Control/Lens/At.hs
|
bsd-3-clause
| 13,959 | 0 | 17 | 3,426 | 4,928 | 2,638 | 2,290 | 303 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
module NHorn.NaturalHorn (
Rule(..),
Proof(..),
proof,
refl,
sym,
select,
strict,
leib,
proofAx,
) where
import Control.Monad(foldM)
import qualified Data.Map as Map
import qualified Data.Set as Set
import NHorn.Sequent
import NHorn.LaCarte
import Term
data Rule a s f ala
= Refl [Formula s f] (VarNames s) -- |- x = x
| Sym ala -- a :== b |- b :== a
| Select Int [Formula s f] -- phi and psi |- phi
| Leib (Formula s f) Name ala ala -- x = y and phi[x/z] |- phi[y/z]
| Strict Int ala -- F(t_1) = F(t_1) |- t_1 = t_1
-- Due to definition give variables in sorted order
| ProofAx (a s f) [ala] [Term s f] -- axiom plus subst
instance (Theory a s f) => Functor (Rule a s f) where
fmap f (Refl flas vm) = Refl flas vm
fmap f (Sym a) = Sym (f a)
fmap f (Select m lst) = Select m lst
fmap f (Leib fs n x y) = Leib fs n (f x) (f y)
fmap f (Strict m x) = Strict m (f x)
fmap f (ProofAx ax lst tlst) = ProofAx ax (map f lst) tlst
-----------------------------------------------------------------------------
class (Theory a s f, Functor (f2 a s f)) => Proof f2 a s f where
proofA :: f2 a s f (ErrSec s f) -> ErrSec s f
instance (Proof f2 a s f, Proof g a s f) => Proof (f2 :+: g) a s f where
proofA (Inl f) = proofA f
proofA (Inr g) = proofA g
proof :: (Proof f2 a s f) => Expr (f2 a s f) -> ErrSec s f
proof = foldExpr proofA
instance (Theory a s f) => Proof Rule a s f where
proofA (Sym rl) = do
(Seq vs x (a :== b)) <- rl
return $ Seq vs x (b :== a)
proofA (Select n flas) = do
checkListLength n flas
createSeq flas (flas !! (n-1))
proofA (Refl left vm)
| vm == emptyVNS = Left "Can't apply Refl to empty set of vars"
| otherwise =
let (nel, sel) = Map.elemAt 0 vm
v = Var nel sel in
createSeq left $ v :== v
proofA (Strict n pr) = do
(Seq vs cont1 (t1 :== t2)) <- pr
(FunApp f ts) <- check t1 t2
checkListLength n ts
let term = ts !! (n-1)
createSeq cont1 (term :== term)
where check f1@(FunApp _ _) f2@(FunApp _ _) | f1 == f2 = Right f1
| otherwise = Left $ "Not a fundef in Strict"
check _ _ = Left $ "Not a fundef in Strict"
proofA (Leib (tL :== tR) v pIn pProof) = do
(Seq vs cont1 (t1 :== t2)) <- pIn
check <- pProof
let s1 = typeOf t1
let s2 = typeOf t2
substL <- subst tL v s1 t1
substR <- subst tR v s1 t1
check2 <- createSeq cont1 (substL :== substR)
retL <- subst tL v s2 t2
retR <- subst tR v s2 t2
if check == check2
then createSeq cont1 (retL :== retR)
else Left $ "Incorrect substitution for Left side, need " ++ show check ++ " but have " ++ show check2
proofA (ProofAx ax proofs terms) = do
ax' <- axiom ax
typeCheckSeq ax'
varsCheckS ax'
axiP@(Seq vsSeq leftAx rightAx) <- (Right ax')
-- Get all proofs
proofLst <- sequence proofs
-- typeCheck terms
sortTerms <- mapM typeCheckTerm terms
-- rename all the stuff in axioms to impose independency of substitution
let mangled_l = Set.fromList $ map fst $ Map.toList vsSeq
let leftAx' = map (mangleFla mangled_l) leftAx
let vsSeq' = mangleVars mangled_l vsSeq
-- subst into axiom and check equality
let namesAndtermsAndTypes = zip3 (map fst $ Map.toList vsSeq') sortTerms terms
leftSide <- mapM (\x -> foldM (substHelper vsSeq') x namesAndtermsAndTypes) leftAx'
leftCheck leftSide (map rightS proofLst)
-- check contexts equality
ctx <- contCheck $ map leftS proofLst
-- subst into vars to the left of |- in an axiom
-- this if is a semihack to use createSeq
if null leftAx then createSeq ctx rightAx
else do
(Seq llVars _ _) <- createSeq leftAx $ head leftAx
-------------- Clutter
let mangled_r = Set.fromList $ map fst $ Map.toList llVars
let m_llVars = mangleVars mangled_r llVars
let vsSeq'' = mangleVars mangled_r vsSeq
------------------------
let namesAndtermsAndTypes2 = filter (\(n,s,t) -> Map.member n m_llVars) namesAndtermsAndTypes
res <- foldM (substHelper vsSeq'') (mangleFla mangled_r rightAx) namesAndtermsAndTypes2
createSeq ctx res
where
leftCheck lsAx lsSeq = if lsAx == lsSeq then return () else Left $ "Precondition doesn't match subst into axiom: \n"
++ show lsAx ++ "\n" ++ show lsSeq
contCheck [] = return []
contCheck ctxs = foldM (\a b -> if a == b then return b else Left $ "Contexts differ") (head ctxs) ctxs
checkListLength n lst
| n < 1 = Left $ "Index is less than 1"
| length lst >= n = Right ()
| otherwise = Left $ "Index is bigger than a list"
substHelper :: Signature s f => VarNames s -> Formula s f -> (Name, s, Term s f) -> Either Err (Formula s f)
substHelper vsSeq fla (nam, sortTerm, term) = do
let vsT = varsTerm term
allVs <- combine vsT (Right vsSeq) -- to check compatibility
substIntoF nam sortTerm term fla
------------------------------------------------------------------------------
-- Constructors
refl :: (Rule :<: e) a s f => [Formula s f] -> VarNames s -> Expr (e a s f)
refl flas vs = In $ inj $ Refl flas vs
sym :: (Rule :<: e) a s f => Expr (e a s f) -> Expr (e a s f)
sym x = In $ inj $ Sym x
select :: (Rule :<: e) a s f => Int -> [Formula s f] -> Expr (e a s f)
select n flas = In $ inj $ Select n flas
strict :: (Rule :<: e) a s f => Int -> Expr (e a s f) -> Expr (e a s f)
strict n x = In $ inj $ Strict n x
leib :: (Rule :<: e) a s f => Formula s f -> Name -> Expr (e a s f) -> Expr (e a s f) -> Expr (e a s f)
leib fla nam a b = In $ inj $ Leib fla nam a b
proofAx :: (Rule :<: e) a s f => a s f -> [Expr (e a s f)] -> [Term s f] -> Expr (e a s f)
proofAx ax proofs terms = In $ inj $ ProofAx ax proofs terms
|
esengie/algebraic-checker
|
src/NHorn/NaturalHorn.hs
|
bsd-3-clause
| 6,469 | 6 | 18 | 2,091 | 2,457 | 1,214 | 1,243 | 125 | 1 |
{-# LANGUAGE TypeOperators, DataKinds,
ScopedTypeVariables, TypeFamilies, FlexibleContexts,
FlexibleInstances, UndecidableInstances #-}
module FRP.Basket.Signals where
import Prelude hiding ((.), const)
import Control.Applicative
import Control.Category
import Control.Arrow
import Data.Monoid hiding ((<>))
import Data.Semigroup
--import FRP.Basket.Aux.HList
import Data.HList
type Time = Double
newtype Signal s a b = Signal {
runSignal :: Time -> HList s -> a -> (b, HList s)
}
mkSignal :: (Time -> s -> a -> (b, s)) -> Signal '[s] a b
mkSignal f = Signal $ \t st a -> case st of
(HCons s _) -> let (b, s') = f t s a in (b, HCons s' HNil)
-- this is the same thing as arr, but since I dont know if I'm keeping arrow instances this is here
liftS :: (a -> b) -> Signal '[] a b
liftS f = Signal $ \_ s a -> (f a, s)
-- Pronounced 'weave', this function composes Signals of differing states
infixr #>
(#>) :: forall s s' ss a b c n. (HSplitAt n ss s s', ss ~ HAppendListR s (HAppendListR s' '[]),
HAppendFD s' '[] s', HAppendFD s s' ss) =>
Signal s a b -> Signal s' b c -> Signal ss a c
(Signal f) #> (Signal g) = Signal h where
splitIndex = Proxy :: Proxy n
h :: Time -> HList ss -> a -> (c, HList ss)
h t wstate a = (c, hConcat $ hBuild fState' gState')
where
fState, fState' :: HList s
gState, gState' :: HList s'
(fState, gState) = hSplitAt splitIndex wstate
(b, fState') = f t fState a
(c, gState') = g t gState b
-- need to do these proofs
instance Functor (Signal s a) where
fmap f (Signal g) = Signal $ \t s a -> let (b, s') = g t s a in (f b, s')
instance Applicative (Signal s a) where
pure a = Signal $ \_ s _ -> (a, s)
(Signal f) <*> (Signal g) = Signal $ \t s a -> let (b, s' ) = g t s a
(h, s'') = f t s' a in (h b, s'')
instance Monad (Signal s a) where
return = pure
(Signal f) >>= g = Signal $ \t s a ->
let (b, s') = f t s a
in runSignal (g b) t s' a
instance Semigroup (Signal s a a) where
(Signal f) <> (Signal g) = Signal $ \t s a -> let (a' , s') = f t s a in g t s' a'
instance Category (Signal s) where
id = Signal $ \_ s a -> (a, s)
(Signal f) . (Signal g) = Signal $ \t s a -> let (b, s') = g t s a in f t s' b
-- Just like (->), Signal only forms a monoid in Signal s a a..
instance Monoid (Signal s a a) where
mempty = Signal $ \_ s a -> (a, s)
mappend = (<>)
-- Maybe a little dissapointing that this needs to be constrained this way, but otherwise
-- the final state is not clear.
instance Monoid (HList s) => Arrow (Signal s) where
arr f = Signal $ \_ s a -> (f a, s)
first (Signal f) = Signal $ \t s (a, c) -> let (b, s') = f t s a in ((b, c), s')
second (Signal f) = Signal $ \t s (c, a) -> let (b, s') = f t s a in ((c, b), s')
(Signal f) *** (Signal g) = Signal $ \t s (a, c) -> let (b, s' ) = f t s a
(d, s'') = g t s c in ((b, d), s' `mappend` s'') -- which s to use ?
(Signal f) &&& (Signal g) = Signal $ \t s a -> let (b, s') = f t s a
(d, s'') = g t s a in ((b, d), s' `mappend` s'') -- which s to use ?
instance Monoid (HList s) => ArrowChoice (Signal s) where
left (Signal f) = Signal $ \t s e -> case e of
Left b -> let (c, s') = f t s b in (Left c, s')
Right d -> (Right d, s)
right (Signal f) = Signal $ \t s e -> case e of
Left b -> (Left b, s)
Right d -> let (b, s') = f t s d in (Right b, s')
instance Monoid (HList s) => ArrowApply (Signal s) where
app = Signal $ \t s (sf, b) -> runSignal sf t s b
instance Monoid (HList s) => ArrowLoop (Signal s) where
loop (Signal f) = Signal $ \t s a -> let ((b, d), s') = f t s (a, d) in (b, s')
|
jhstanton/Basket
|
src/FRP/Basket/Signals.hs
|
bsd-3-clause
| 4,192 | 0 | 15 | 1,508 | 1,886 | 996 | 890 | 70 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Redshift.CreateClusterSecurityGroup
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a new Amazon Redshift security group. You use security groups to
-- control access to non-VPC clusters.
--
-- For information about managing security groups, go to
-- <http://docs.aws.amazon.com/redshift/latest/mgmt/working-with-security-groups.html Amazon Redshift Cluster Security Groups>
-- in the /Amazon Redshift Cluster Management Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/redshift/latest/APIReference/API_CreateClusterSecurityGroup.html AWS API Reference> for CreateClusterSecurityGroup.
module Network.AWS.Redshift.CreateClusterSecurityGroup
(
-- * Creating a Request
createClusterSecurityGroup
, CreateClusterSecurityGroup
-- * Request Lenses
, creTags
, creClusterSecurityGroupName
, creDescription
-- * Destructuring the Response
, createClusterSecurityGroupResponse
, CreateClusterSecurityGroupResponse
-- * Response Lenses
, crsClusterSecurityGroup
, crsResponseStatus
) where
import Network.AWS.Prelude
import Network.AWS.Redshift.Types
import Network.AWS.Redshift.Types.Product
import Network.AWS.Request
import Network.AWS.Response
-- | ???
--
-- /See:/ 'createClusterSecurityGroup' smart constructor.
data CreateClusterSecurityGroup = CreateClusterSecurityGroup'
{ _creTags :: !(Maybe [Tag])
, _creClusterSecurityGroupName :: !Text
, _creDescription :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateClusterSecurityGroup' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'creTags'
--
-- * 'creClusterSecurityGroupName'
--
-- * 'creDescription'
createClusterSecurityGroup
:: Text -- ^ 'creClusterSecurityGroupName'
-> Text -- ^ 'creDescription'
-> CreateClusterSecurityGroup
createClusterSecurityGroup pClusterSecurityGroupName_ pDescription_ =
CreateClusterSecurityGroup'
{ _creTags = Nothing
, _creClusterSecurityGroupName = pClusterSecurityGroupName_
, _creDescription = pDescription_
}
-- | A list of tag instances.
creTags :: Lens' CreateClusterSecurityGroup [Tag]
creTags = lens _creTags (\ s a -> s{_creTags = a}) . _Default . _Coerce;
-- | The name for the security group. Amazon Redshift stores the value as a
-- lowercase string.
--
-- Constraints:
--
-- - Must contain no more than 255 alphanumeric characters or hyphens.
-- - Must not be \"Default\".
-- - Must be unique for all security groups that are created by your AWS
-- account.
--
-- Example: 'examplesecuritygroup'
creClusterSecurityGroupName :: Lens' CreateClusterSecurityGroup Text
creClusterSecurityGroupName = lens _creClusterSecurityGroupName (\ s a -> s{_creClusterSecurityGroupName = a});
-- | A description for the security group.
creDescription :: Lens' CreateClusterSecurityGroup Text
creDescription = lens _creDescription (\ s a -> s{_creDescription = a});
instance AWSRequest CreateClusterSecurityGroup where
type Rs CreateClusterSecurityGroup =
CreateClusterSecurityGroupResponse
request = postQuery redshift
response
= receiveXMLWrapper
"CreateClusterSecurityGroupResult"
(\ s h x ->
CreateClusterSecurityGroupResponse' <$>
(x .@? "ClusterSecurityGroup") <*>
(pure (fromEnum s)))
instance ToHeaders CreateClusterSecurityGroup where
toHeaders = const mempty
instance ToPath CreateClusterSecurityGroup where
toPath = const "/"
instance ToQuery CreateClusterSecurityGroup where
toQuery CreateClusterSecurityGroup'{..}
= mconcat
["Action" =:
("CreateClusterSecurityGroup" :: ByteString),
"Version" =: ("2012-12-01" :: ByteString),
"Tags" =: toQuery (toQueryList "Tag" <$> _creTags),
"ClusterSecurityGroupName" =:
_creClusterSecurityGroupName,
"Description" =: _creDescription]
-- | /See:/ 'createClusterSecurityGroupResponse' smart constructor.
data CreateClusterSecurityGroupResponse = CreateClusterSecurityGroupResponse'
{ _crsClusterSecurityGroup :: !(Maybe ClusterSecurityGroup)
, _crsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateClusterSecurityGroupResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'crsClusterSecurityGroup'
--
-- * 'crsResponseStatus'
createClusterSecurityGroupResponse
:: Int -- ^ 'crsResponseStatus'
-> CreateClusterSecurityGroupResponse
createClusterSecurityGroupResponse pResponseStatus_ =
CreateClusterSecurityGroupResponse'
{ _crsClusterSecurityGroup = Nothing
, _crsResponseStatus = pResponseStatus_
}
-- | Undocumented member.
crsClusterSecurityGroup :: Lens' CreateClusterSecurityGroupResponse (Maybe ClusterSecurityGroup)
crsClusterSecurityGroup = lens _crsClusterSecurityGroup (\ s a -> s{_crsClusterSecurityGroup = a});
-- | The response status code.
crsResponseStatus :: Lens' CreateClusterSecurityGroupResponse Int
crsResponseStatus = lens _crsResponseStatus (\ s a -> s{_crsResponseStatus = a});
|
fmapfmapfmap/amazonka
|
amazonka-redshift/gen/Network/AWS/Redshift/CreateClusterSecurityGroup.hs
|
mpl-2.0
| 6,041 | 0 | 13 | 1,183 | 736 | 444 | 292 | 94 | 1 |
import Distribution.Simple
import Distribution.PackageDescription
import Distribution.Version
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.Program
import Distribution.Verbosity
import Data.Char (isSpace)
import Data.List (dropWhile,span)
import Control.Monad
main = defaultMainWithHooks simpleUserHooks {
hookedPrograms = [mysqlConfigProgram],
confHook = \pkg flags -> do
lbi <- confHook simpleUserHooks pkg flags
bi <- mysqlBuildInfo lbi
return lbi {
localPkgDescr = updatePackageDescription
(Just bi, []) (localPkgDescr lbi)
}
}
mysqlConfigProgram = (simpleProgram "mysql_config") {
programFindLocation = \verbosity _ -> do
mysql_config <- findProgramOnSearchPath verbosity defaultProgramSearchPath "mysql_config"
mysql_config5 <- findProgramOnSearchPath verbosity defaultProgramSearchPath "mysql_config5"
return (mysql_config `mplus` mysql_config5)
}
mysqlBuildInfo :: LocalBuildInfo -> IO BuildInfo
mysqlBuildInfo lbi = do
let mysqlConfig = getDbProgramOutput verbosity mysqlConfigProgram (withPrograms lbi)
ws = " \n\r\t"
includeDirs <- return . map (drop 2) . split ws =<< mysqlConfig ["--include"]
ldOptions <- return . split ws =<< mysqlConfig ["--libs"]
return emptyBuildInfo {
ldOptions = ldOptions,
includeDirs = includeDirs
}
where
verbosity = normal -- honestly, this is a hack
split :: Eq a => [a] -> [a] -> [[a]]
split xs cs = split' $ dropWhile (`elem` xs) cs
where split' [] = []
split' cs0 =
let (run, cs1) = span (`notElem` xs) cs0
cs2 = dropWhile (`elem` xs) cs1
in run:(split' cs2)
|
ryantm/hdbc-mysql
|
Setup.hs
|
lgpl-2.1
| 1,715 | 0 | 16 | 382 | 493 | 260 | 233 | 39 | 2 |
module Db where
data Db =
-- A cabal sandbox db, with optional path to sandbox configuration file
Sandbox
-- The global ghc package database
| Global
-- The user package database, as defined by cabal
| User
-- A direct database path
| Dir FilePath
instance (Show Db) where
show (Sandbox) = "cabal sandbox"
show (Global ) = "ghc distribution (global db)"
show (User ) = "system user"
show (Dir p) = "db directory: " ++ p
instance (Ord Db) where
compare Sandbox Sandbox = EQ
compare Global Global = EQ
compare User User = EQ
compare (Dir _) (Dir _) = EQ
compare (Dir _) Global = LT
compare (Dir _) User = LT
compare (Dir _) Sandbox = LT
compare User Global = LT
compare User Sandbox = LT
compare Global Sandbox = LT
-- Just reversing the above for l != r
compare Global (Dir _) = GT
compare User (Dir _) = GT
compare Sandbox (Dir _) = GT
compare Global User = GT
compare Sandbox Global = GT
compare Sandbox User = GT
instance (Eq Db) where
l == r = compare l r == EQ
|
jfeltz/dash-haskell
|
src/Db.hs
|
lgpl-3.0
| 1,140 | 3 | 9 | 367 | 199 | 148 | 51 | 30 | 0 |
-- |
-- Module : Text.Megaparsec.Combinator
-- Copyright : © 2015 Megaparsec contributors
-- © 2007 Paolo Martini
-- © 1999–2001 Daan Leijen
-- License : BSD3
--
-- Maintainer : Mark Karpov <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- Commonly used generic combinators.
module Text.Megaparsec.Combinator
( between
, choice
, count
, count'
, endBy
, endBy1
, manyTill
, someTill
, option
, sepBy
, sepBy1
, skipMany
, skipSome
-- Deprecated combinators
, chainl
, chainl1
, chainr
, chainr1
, sepEndBy
, sepEndBy1 )
where
import Control.Applicative ((<|>), many, some, optional)
import Control.Monad
import Data.Foldable (asum)
import Text.Megaparsec.Prim
-- | @between open close p@ parses @open@, followed by @p@ and @close@.
-- Returns the value returned by @p@.
--
-- > braces = between (symbol "{") (symbol "}")
between :: Stream s m t => ParsecT s u m open ->
ParsecT s u m close -> ParsecT s u m a -> ParsecT s u m a
between open close p = open *> p <* close
-- | @choice ps@ tries to apply the parsers in the list @ps@ in order,
-- until one of them succeeds. Returns the value of the succeeding parser.
choice :: (Foldable f, Stream s m t) => f (ParsecT s u m a) -> ParsecT s u m a
choice = asum
-- | @count n p@ parses @n@ occurrences of @p@. If @n@ is smaller or
-- equal to zero, the parser equals to @return []@. Returns a list of @n@
-- values.
--
-- This parser is defined in terms of 'count'', like this:
--
-- > count n = count' n n
count :: Stream s m t => Int -> ParsecT s u m a -> ParsecT s u m [a]
count n = count' n n
-- | @count\' m n p@ parses from @m@ to @n@ occurrences of @p@. If @n@ is
-- not positive or @m > n@, the parser equals to @return []@. Returns a list
-- of parsed values.
--
-- Please note that @m@ /may/ be negative, in this case effect is the same
-- as if it were equal to zero.
count' :: Stream s m t => Int -> Int -> ParsecT s u m a -> ParsecT s u m [a]
count' m n p
| n <= 0 || m > n = return []
| m > 0 = (:) <$> p <*> count' (pred m) (pred n) p
| otherwise = do
result <- optional p
case result of
Nothing -> return []
Just x -> (x:) <$> count' 0 (pred n) p
-- | @endBy p sep@ parses /zero/ or more occurrences of @p@, separated
-- and ended by @sep@. Returns a list of values returned by @p@.
--
-- > cStatements = cStatement `endBy` semicolon
endBy :: Stream s m t =>
ParsecT s u m a -> ParsecT s u m sep -> ParsecT s u m [a]
endBy p sep = many (p <* sep)
-- | @endBy1 p sep@ parses /one/ or more occurrences of @p@, separated
-- and ended by @sep@. Returns a list of values returned by @p@.
endBy1 :: Stream s m t =>
ParsecT s u m a -> ParsecT s u m sep -> ParsecT s u m [a]
endBy1 p sep = some (p <* sep)
-- | @manyTill p end@ applies parser @p@ /zero/ or more times until
-- parser @end@ succeeds. Returns the list of values returned by @p@. This
-- parser can be used to scan comments:
--
-- > simpleComment = string "<!--" >> manyTill anyChar (try $ string "-->")
--
-- Note that we need to use 'try' since parsers @anyChar@ and @string
-- \"-->\"@ overlap and @string \"-->\"@ could consume input before failing.
manyTill :: Stream s m t =>
ParsecT s u m a -> ParsecT s u m end -> ParsecT s u m [a]
manyTill p end = (end *> return []) <|> someTill p end
-- | @someTill p end@ works similarly to @manyTill p end@, but @p@ should
-- succeed at least once.
someTill :: Stream s m t =>
ParsecT s u m a -> ParsecT s u m end -> ParsecT s u m [a]
someTill p end = (:) <$> p <*> manyTill p end
-- | @option x p@ tries to apply parser @p@. If @p@ fails without
-- consuming input, it returns the value @x@, otherwise the value returned
-- by @p@.
--
-- > priority = option 0 (digitToInt <$> digitChar)
option :: Stream s m t => a -> ParsecT s u m a -> ParsecT s u m a
option x p = p <|> return x
-- | @sepBy p sep@ parses /zero/ or more occurrences of @p@, separated
-- by @sep@. Returns a list of values returned by @p@.
--
-- > commaSep p = p `sepBy` comma
sepBy :: Stream s m t =>
ParsecT s u m a -> ParsecT s u m sep -> ParsecT s u m [a]
sepBy p sep = sepBy1 p sep <|> return []
-- | @sepBy1 p sep@ parses /one/ or more occurrences of @p@, separated
-- by @sep@. Returns a list of values returned by @p@.
sepBy1 :: Stream s m t =>
ParsecT s u m a -> ParsecT s u m sep -> ParsecT s u m [a]
sepBy1 p sep = (:) <$> p <*> many (sep *> p)
-- | @skipMany p@ applies the parser @p@ /zero/ or more times, skipping
-- its result.
--
-- > space = skipMany spaceChar
skipMany :: ParsecT s u m a -> ParsecT s u m ()
skipMany p = void $ many p
-- | @skipSome p@ applies the parser @p@ /one/ or more times, skipping
-- its result.
skipSome :: Stream s m t => ParsecT s u m a -> ParsecT s u m ()
skipSome p = void $ some p
-- Deprecated combinators
-- | @chainl p op x@ parses /zero/ or more occurrences of @p@,
-- separated by @op@. Returns a value obtained by a /left/ associative
-- application of all functions returned by @op@ to the values returned by
-- @p@. If there are zero occurrences of @p@, the value @x@ is returned.
{-# DEPRECATED chainl "Use \"Text.Megaparsec.Expr\" instead." #-}
chainl :: Stream s m t =>
ParsecT s u m a -> ParsecT s u m (a -> a -> a) -> a -> ParsecT s u m a
chainl p op x = chainl1 p op <|> return x
-- | @chainl1 p op@ parses /one/ or more occurrences of @p@,
-- separated by @op@ Returns a value obtained by a /left/ associative
-- application of all functions returned by @op@ to the values returned by
-- @p@. This parser can for example be used to eliminate left recursion
-- which typically occurs in expression grammars.
--
-- Consider using "Text.Megaparsec.Expr" instead.
{-# DEPRECATED chainl1 "Use \"Text.Megaparsec.Expr\" instead." #-}
chainl1 :: Stream s m t =>
ParsecT s u m a -> ParsecT s u m (a -> a -> a) -> ParsecT s u m a
chainl1 p op = p >>= rest
where rest x = ((($ x) <$> op <*> p) >>= rest) <|> return x
-- | @chainr p op x@ parses /zero/ or more occurrences of @p@,
-- separated by @op@ Returns a value obtained by a /right/ associative
-- application of all functions returned by @op@ to the values returned by
-- @p@. If there are no occurrences of @p@, the value @x@ is returned.
--
-- Consider using "Text.Megaparsec.Expr" instead.
{-# DEPRECATED chainr "Use \"Text.Megaparsec.Expr\" instead." #-}
chainr :: Stream s m t =>
ParsecT s u m a -> ParsecT s u m (a -> a -> a) -> a -> ParsecT s u m a
chainr p op x = chainr1 p op <|> return x
-- | @chainr1 p op@ parses /one/ or more occurrences of |p|,
-- separated by @op@ Returns a value obtained by a /right/ associative
-- application of all functions returned by @op@ to the values returned by
-- @p@.
--
-- Consider using "Text.Megaparsec.Expr" instead.
{-# DEPRECATED chainr1 "Use \"Text.Megaparsec.Expr\" instead." #-}
chainr1 :: Stream s m t =>
ParsecT s u m a -> ParsecT s u m (a -> a -> a) -> ParsecT s u m a
chainr1 p op = p >>= rest
where rest x = (($ x) <$> op <*> chainr1 p op) <|> return x
-- | @sepEndBy p sep@ parses /zero/ or more occurrences of @p@,
-- separated and optionally ended by @sep@. Returns a list of values
-- returned by @p@.
{-# DEPRECATED sepEndBy "Use @sepBy p sep <* optional sep@ instead." #-}
sepEndBy :: Stream s m t =>
ParsecT s u m a -> ParsecT s u m sep -> ParsecT s u m [a]
sepEndBy p sep = sepBy p sep <* optional sep
-- | @sepEndBy1 p sep@ parses /one/ or more occurrences of @p@,
-- separated and optionally ended by @sep@. Returns a list of values
-- returned by @p@.
{-# DEPRECATED sepEndBy1 "Use @sepBy1 p sep <* optional sep@ instead." #-}
sepEndBy1 :: Stream s m t =>
ParsecT s u m a -> ParsecT s u m sep -> ParsecT s u m [a]
sepEndBy1 p sep = sepBy1 p sep <* optional sep
|
tulcod/megaparsec
|
Text/Megaparsec/Combinator.hs
|
bsd-2-clause
| 7,948 | 0 | 14 | 1,924 | 1,793 | 939 | 854 | 90 | 2 |
{-# LANGUAGE ForeignFunctionInterface #-}
{-# OPTIONS_GHC -fno-warn-auto-orphans #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Importing this module will activate RULES that use the FFI for vector ops.
module SubHask.Algebra.Vector.FFI
( distance_l2_m128
, distance_l2_m128_SVector_Dynamic
, distance_l2_m128_UVector_Dynamic
, distanceUB_l2_m128
, distanceUB_l2_m128_SVector_Dynamic
, distanceUB_l2_m128_UVector_Dynamic
)
where
import Control.Monad.Primitive
import Data.Primitive.ByteArray
import Foreign.Ptr
import Foreign.ForeignPtr
import Unsafe.Coerce
import SubHask.Algebra
import SubHask.Algebra.Vector
import SubHask.Category
import SubHask.Internal.Prelude
{-# RULES
"subhask/distance_l2_m128_UVector_Dynamic" distance = distance_l2_m128_UVector_Dynamic
"subhask/distance_l2_m128_SVector_Dynamic" distance = distance_l2_m128_SVector_Dynamic
"subhask/distanceUB_l2_m128_UVector_Dynamic" distanceUB = distanceUB_l2_m128_UVector_Dynamic
"subhask/distanceUB_l2_m128_SVector_Dynamic" distanceUB = distanceUB_l2_m128_SVector_Dynamic
#-}
{-# INLINE sizeOfFloat #-}
sizeOfFloat :: Int
sizeOfFloat = sizeOf (undefined::Float)
foreign import ccall unsafe "distance_l2_m128" distance_l2_m128
:: Ptr Float -> Ptr Float -> Int -> IO Float
foreign import ccall unsafe "distanceUB_l2_m128" distanceUB_l2_m128
:: Ptr Float -> Ptr Float -> Int -> Float -> IO Float
{-# INLINE distance_l2_m128_UVector_Dynamic #-}
distance_l2_m128_UVector_Dynamic :: UVector (s::Symbol) Float -> UVector (s::Symbol) Float -> Float
distance_l2_m128_UVector_Dynamic (UVector_Dynamic arr1 off1 n) (UVector_Dynamic arr2 off2 _)
= unsafeInlineIO $ distance_l2_m128 p1 p2 n
where
p1 = plusPtr (unsafeCoerce $ byteArrayContents arr1) (off1*sizeOfFloat)
p2 = plusPtr (unsafeCoerce $ byteArrayContents arr2) (off2*sizeOfFloat)
{-# INLINE distanceUB_l2_m128_UVector_Dynamic #-}
distanceUB_l2_m128_UVector_Dynamic :: UVector (s::Symbol) Float -> UVector (s::Symbol) Float -> Float -> Float
distanceUB_l2_m128_UVector_Dynamic (UVector_Dynamic arr1 off1 n) (UVector_Dynamic arr2 off2 _) ub
= unsafeInlineIO $ distanceUB_l2_m128 p1 p2 n ub
where
p1 = plusPtr (unsafeCoerce $ byteArrayContents arr1) (off1*sizeOfFloat)
p2 = plusPtr (unsafeCoerce $ byteArrayContents arr2) (off2*sizeOfFloat)
{-# INLINE distance_l2_m128_SVector_Dynamic #-}
distance_l2_m128_SVector_Dynamic :: SVector (s::Symbol) Float -> SVector (s::Symbol) Float -> Float
distance_l2_m128_SVector_Dynamic (SVector_Dynamic fp1 off1 n) (SVector_Dynamic fp2 off2 _)
= unsafeInlineIO $
withForeignPtr fp1 $ \p1 ->
withForeignPtr fp2 $ \p2 ->
distance_l2_m128 (plusPtr p1 $ off1*sizeOfFloat) (plusPtr p2 $ off2*sizeOfFloat) n
{-# INLINE distanceUB_l2_m128_SVector_Dynamic #-}
distanceUB_l2_m128_SVector_Dynamic :: SVector (s::Symbol) Float -> SVector (s::Symbol) Float -> Float -> Float
distanceUB_l2_m128_SVector_Dynamic (SVector_Dynamic fp1 off1 n) (SVector_Dynamic fp2 off2 _) ub
= unsafeInlineIO $
withForeignPtr fp1 $ \p1 ->
withForeignPtr fp2 $ \p2 ->
distanceUB_l2_m128 (plusPtr p1 $ off1*sizeOfFloat) (plusPtr p2 $ off2*sizeOfFloat) n ub
|
Drezil/subhask
|
src/SubHask/Algebra/Vector/FFI.hs
|
bsd-3-clause
| 3,253 | 0 | 13 | 510 | 716 | 381 | 335 | -1 | -1 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "src/Data/Binary/Get/Internal.hs" #-}
{-# LANGUAGE CPP, RankNTypes, MagicHash, BangPatterns, TypeFamilies #-}
-- CPP C style pre-precessing, the #if defined lines
-- RankNTypes forall r. statement
-- MagicHash the (# unboxing #), also needs GHC.primitives
module Data.Binary.Get.Internal (
-- * The Get type
Get
, runCont
, Decoder(..)
, runGetIncremental
, readN
, readNWith
-- * Parsing
, bytesRead
, isolate
-- * With input chunks
, withInputChunks
, Consume
, failOnEOF
, get
, put
, ensureN
-- * Utility
, remaining
, getBytes
, isEmpty
, lookAhead
, lookAheadM
, lookAheadE
, label
-- ** ByteStrings
, getByteString
) where
import Foreign
import qualified Data.ByteString as B
import qualified Data.ByteString.Unsafe as B
import Control.Applicative
import Control.Monad
import qualified Control.Monad.Fail as Fail
import Data.Binary.Internal ( accursedUnutterablePerformIO )
-- Kolmodin 20100427: at zurihac we discussed of having partial take a
-- "Maybe ByteString" and implemented it in this way.
-- The reasoning was that you could accidently provide an empty bytestring,
-- and it should not terminate the decoding (empty would mean eof).
-- However, I'd say that it's also a risk that you get stuck in a loop,
-- where you keep providing an empty string. Anyway, no new input should be
-- rare, as the RTS should only wake you up if you actually have some data
-- to read from your fd.
-- | A decoder produced by running a 'Get' monad.
data Decoder a = Fail !B.ByteString String
-- ^ The decoder ran into an error. The decoder either used
-- 'fail' or was not provided enough input.
| Partial (Maybe B.ByteString -> Decoder a)
-- ^ The decoder has consumed the available input and needs
-- more to continue. Provide 'Just' if more input is available
-- and 'Nothing' otherwise, and you will get a new 'Decoder'.
| Done !B.ByteString a
-- ^ The decoder has successfully finished. Except for the
-- output value you also get the unused input.
| BytesRead {-# UNPACK #-} !Int64 (Int64 -> Decoder a)
-- ^ The decoder needs to know the current position in the input.
-- Given the number of bytes remaning in the decoder, the outer
-- decoder runner needs to calculate the position and
-- resume the decoding.
-- unrolled codensity/state monad
newtype Get a = C { runCont :: forall r.
B.ByteString ->
Success a r ->
Decoder r }
type Success a r = B.ByteString -> a -> Decoder r
instance Monad Get where
return = pure
(>>=) = bindG
fail = Fail.fail
instance Fail.MonadFail Get where
fail = failG
bindG :: Get a -> (a -> Get b) -> Get b
bindG (C c) f = C $ \i ks -> c i (\i' a -> (runCont (f a)) i' ks)
{-# INLINE bindG #-}
failG :: String -> Get a
failG str = C $ \i _ks -> Fail i str
apG :: Get (a -> b) -> Get a -> Get b
apG d e = do
b <- d
a <- e
return (b a)
{-# INLINE [0] apG #-}
fmapG :: (a -> b) -> Get a -> Get b
fmapG f m = C $ \i ks -> runCont m i (\i' a -> ks i' (f a))
{-# INLINE fmapG #-}
instance Applicative Get where
pure = \x -> C $ \s ks -> ks s x
{-# INLINE [0] pure #-}
(<*>) = apG
{-# INLINE (<*>) #-}
-- | /Since: 0.7.1.0/
instance MonadPlus Get where
mzero = empty
mplus = (<|>)
instance Functor Get where
fmap = fmapG
instance Functor Decoder where
fmap f (Done s a) = Done s (f a)
fmap f (Partial k) = Partial (fmap f . k)
fmap _ (Fail s msg) = Fail s msg
fmap f (BytesRead b k) = BytesRead b (fmap f . k)
instance (Show a) => Show (Decoder a) where
show (Fail _ msg) = "Fail: " ++ msg
show (Partial _) = "Partial _"
show (Done _ a) = "Done: " ++ show a
show (BytesRead _ _) = "BytesRead"
-- | Run a 'Get' monad. See 'Decoder' for what to do next, like providing
-- input, handling decoding errors and to get the output value.
runGetIncremental :: Get a -> Decoder a
runGetIncremental g = noMeansNo $
runCont g B.empty (\i a -> Done i a)
-- | Make sure we don't have to pass Nothing to a Partial twice.
-- This way we don't need to pass around an EOF value in the Get monad, it
-- can safely ask several times if it needs to.
noMeansNo :: Decoder a -> Decoder a
noMeansNo r0 = go r0
where
go r =
case r of
Partial k -> Partial $ \ms ->
case ms of
Just _ -> go (k ms)
Nothing -> neverAgain (k ms)
BytesRead n k -> BytesRead n (go . k)
Done _ _ -> r
Fail _ _ -> r
neverAgain r =
case r of
Partial k -> neverAgain (k Nothing)
BytesRead n k -> BytesRead n (neverAgain . k)
Fail _ _ -> r
Done _ _ -> r
prompt :: B.ByteString -> Decoder a -> (B.ByteString -> Decoder a) -> Decoder a
prompt inp kf ks = prompt' kf (\inp' -> ks (inp `B.append` inp'))
prompt' :: Decoder a -> (B.ByteString -> Decoder a) -> Decoder a
prompt' kf ks =
let loop =
Partial $ \sm ->
case sm of
Just s | B.null s -> loop
| otherwise -> ks s
Nothing -> kf
in loop
-- | Get the total number of bytes read to this point.
bytesRead :: Get Int64
bytesRead = C $ \inp k -> BytesRead (fromIntegral $ B.length inp) (k inp)
-- | Isolate a decoder to operate with a fixed number of bytes, and fail if
-- fewer bytes were consumed, or more bytes were attempted to be consumed.
-- If the given decoder fails, 'isolate' will also fail.
-- Offset from 'bytesRead' will be relative to the start of 'isolate', not the
-- absolute of the input.
--
-- /Since: 0.7.2.0/
isolate :: Int -- ^ The number of bytes that must be consumed
-> Get a -- ^ The decoder to isolate
-> Get a
isolate n0 act
| n0 < 0 = fail "isolate: negative size"
| otherwise = go n0 (runCont act B.empty Done)
where
go !n (Done left x)
| n == 0 && B.null left = return x
| otherwise = do
pushFront left
let consumed = n0 - n - B.length left
fail $ "isolate: the decoder consumed " ++ show consumed ++ " bytes" ++
" which is less than the expected " ++ show n0 ++ " bytes"
go 0 (Partial resume) = go 0 (resume Nothing)
go n (Partial resume) = do
inp <- C $ \inp k -> do
let takeLimited str =
let (inp', out) = B.splitAt n str
in k out (Just inp')
case not (B.null inp) of
True -> takeLimited inp
False -> prompt inp (k B.empty Nothing) takeLimited
case inp of
Nothing -> go n (resume Nothing)
Just str -> go (n - B.length str) (resume (Just str))
go _ (Fail bs err) = pushFront bs >> fail err
go n (BytesRead r resume) =
go n (resume $! fromIntegral n0 - fromIntegral n - r)
type Consume s = s -> B.ByteString -> Either s (B.ByteString, B.ByteString)
withInputChunks :: s -> Consume s -> ([B.ByteString] -> b) -> ([B.ByteString] -> Get b) -> Get b
withInputChunks initS consume onSucc onFail = go initS []
where
go state acc = C $ \inp ks ->
case consume state inp of
Left state' -> do
let acc' = inp : acc
prompt'
(runCont (onFail (reverse acc')) B.empty ks)
(\str' -> runCont (go state' acc') str' ks)
Right (want,rest) -> do
ks rest (onSucc (reverse (want:acc)))
failOnEOF :: [B.ByteString] -> Get a
failOnEOF bs = C $ \_ _ -> Fail (B.concat bs) "not enough bytes"
-- | Test whether all input has been consumed, i.e. there are no remaining
-- undecoded bytes.
isEmpty :: Get Bool
isEmpty = C $ \inp ks ->
if B.null inp
then prompt inp (ks inp True) (\inp' -> ks inp' False)
else ks inp False
-- | DEPRECATED. Same as 'getByteString'.
{-# DEPRECATED getBytes "Use 'getByteString' instead of 'getBytes'." #-}
getBytes :: Int -> Get B.ByteString
getBytes = getByteString
{-# INLINE getBytes #-}
-- | /Since: 0.7.0.0/
instance Alternative Get where
empty = C $ \inp _ks -> Fail inp "Data.Binary.Get(Alternative).empty"
{-# INLINE empty #-}
(<|>) f g = do
(decoder, bs) <- runAndKeepTrack f
case decoder of
Done inp x -> C $ \_ ks -> ks inp x
Fail _ _ -> pushBack bs >> g
_ -> error "Binary: impossible"
{-# INLINE (<|>) #-}
some p = (:) <$> p <*> many p
{-# INLINE some #-}
many p = do
v <- (Just <$> p) <|> pure Nothing
case v of
Nothing -> pure []
Just x -> (:) x <$> many p
{-# INLINE many #-}
-- | Run a decoder and keep track of all the input it consumes.
-- Once it's finished, return the final decoder (always 'Done' or 'Fail'),
-- and unconsume all the the input the decoder required to run.
-- Any additional chunks which was required to run the decoder
-- will also be returned.
runAndKeepTrack :: Get a -> Get (Decoder a, [B.ByteString])
runAndKeepTrack g = C $ \inp ks ->
let r0 = runCont g inp (\inp' a -> Done inp' a)
go !acc r = case r of
Done inp' a -> ks inp (Done inp' a, reverse acc)
Partial k -> Partial $ \minp -> go (maybe acc (:acc) minp) (k minp)
Fail inp' s -> ks inp (Fail inp' s, reverse acc)
BytesRead unused k -> BytesRead unused (go acc . k)
in go [] r0
{-# INLINE runAndKeepTrack #-}
pushBack :: [B.ByteString] -> Get ()
pushBack [] = C $ \ inp ks -> ks inp ()
pushBack bs = C $ \ inp ks -> ks (B.concat (inp : bs)) ()
{-# INLINE pushBack #-}
pushFront :: B.ByteString -> Get ()
pushFront bs = C $ \ inp ks -> ks (B.append bs inp) ()
{-# INLINE pushFront #-}
-- | Run the given decoder, but without consuming its input. If the given
-- decoder fails, then so will this function.
--
-- /Since: 0.7.0.0/
lookAhead :: Get a -> Get a
lookAhead g = do
(decoder, bs) <- runAndKeepTrack g
case decoder of
Done _ a -> pushBack bs >> return a
Fail inp s -> C $ \_ _ -> Fail inp s
_ -> error "Binary: impossible"
-- | Run the given decoder, and only consume its input if it returns 'Just'.
-- If 'Nothing' is returned, the input will be unconsumed.
-- If the given decoder fails, then so will this function.
--
-- /Since: 0.7.0.0/
lookAheadM :: Get (Maybe a) -> Get (Maybe a)
lookAheadM g = do
let g' = maybe (Left ()) Right <$> g
either (const Nothing) Just <$> lookAheadE g'
-- | Run the given decoder, and only consume its input if it returns 'Right'.
-- If 'Left' is returned, the input will be unconsumed.
-- If the given decoder fails, then so will this function.
--
-- /Since: 0.7.1.0/
lookAheadE :: Get (Either a b) -> Get (Either a b)
lookAheadE g = do
(decoder, bs) <- runAndKeepTrack g
case decoder of
Done _ (Left x) -> pushBack bs >> return (Left x)
Done inp (Right x) -> C $ \_ ks -> ks inp (Right x)
Fail inp s -> C $ \_ _ -> Fail inp s
_ -> error "Binary: impossible"
-- | Label a decoder. If the decoder fails, the label will be appended on
-- a new line to the error message string.
--
-- /Since: 0.7.2.0/
label :: String -> Get a -> Get a
label msg decoder = C $ \inp ks ->
let r0 = runCont decoder inp (\inp' a -> Done inp' a)
go r = case r of
Done inp' a -> ks inp' a
Partial k -> Partial (go . k)
Fail inp' s -> Fail inp' (s ++ "\n" ++ msg)
BytesRead u k -> BytesRead u (go . k)
in go r0
-- | DEPRECATED. Get the number of bytes of remaining input.
-- Note that this is an expensive function to use as in order to calculate how
-- much input remains, all input has to be read and kept in-memory.
-- The decoder keeps the input as a strict bytestring, so you are likely better
-- off by calculating the remaining input in another way.
{-# DEPRECATED remaining "This will force all remaining input, don't use it." #-}
remaining :: Get Int64
remaining = C $ \ inp ks ->
let loop acc = Partial $ \ minp ->
case minp of
Nothing -> let all_inp = B.concat (inp : (reverse acc))
in ks all_inp (fromIntegral $ B.length all_inp)
Just inp' -> loop (inp':acc)
in loop []
------------------------------------------------------------------------
-- ByteStrings
--
-- | An efficient get method for strict ByteStrings. Fails if fewer than @n@
-- bytes are left in the input. If @n <= 0@ then the empty string is returned.
getByteString :: Int -> Get B.ByteString
getByteString n | n > 0 = readN n (B.unsafeTake n)
| otherwise = return B.empty
{-# INLINE getByteString #-}
-- | Get the current chunk.
get :: Get B.ByteString
get = C $ \inp ks -> ks inp inp
-- | Replace the current chunk.
put :: B.ByteString -> Get ()
put s = C $ \_inp ks -> ks s ()
-- | Return at least @n@ bytes, maybe more. If not enough data is available
-- the computation will escape with 'Partial'.
readN :: Int -> (B.ByteString -> a) -> Get a
readN !n f = ensureN n >> unsafeReadN n f
{-# INLINE [0] readN #-}
{-# RULES
"readN/readN merge" forall n m f g.
apG (readN n f) (readN m g) = readN (n+m) (\bs -> f bs $ g (B.unsafeDrop n bs)) #-}
-- | Ensure that there are at least @n@ bytes available. If not, the
-- computation will escape with 'Partial'.
ensureN :: Int -> Get ()
ensureN !n0 = C $ \inp ks -> do
if B.length inp >= n0
then ks inp ()
else runCont (withInputChunks n0 enoughChunks onSucc onFail >>= put) inp ks
where -- might look a bit funny, but plays very well with GHC's inliner.
-- GHC won't inline recursive functions, so we make ensureN non-recursive
enoughChunks n str
| B.length str >= n = Right (str,B.empty)
| otherwise = Left (n - B.length str)
-- Sometimes we will produce leftovers lists of the form [B.empty, nonempty]
-- where `nonempty` is a non-empty ByteString. In this case we can avoid a copy
-- by simply dropping the empty prefix. In principle ByteString might want
-- to gain this optimization as well
onSucc = B.concat . dropWhile B.null
onFail bss = C $ \_ _ -> Fail (B.concat bss) "not enough bytes"
{-# INLINE ensureN #-}
unsafeReadN :: Int -> (B.ByteString -> a) -> Get a
unsafeReadN !n f = C $ \inp ks -> do
ks (B.unsafeDrop n inp) $! f inp -- strict return
-- | @readNWith n f@ where @f@ must be deterministic and not have side effects.
readNWith :: Int -> (Ptr a -> IO a) -> Get a
readNWith n f = do
-- It should be safe to use accursedUnutterablePerformIO here.
-- The action must be deterministic and not have any external side effects.
-- It depends on the value of the ByteString so the value dependencies look OK.
readN n $ \s -> accursedUnutterablePerformIO $ B.unsafeUseAsCString s (f . castPtr)
{-# INLINE readNWith #-}
|
phischu/fragnix
|
tests/packages/scotty/Data.Binary.Get.Internal.hs
|
bsd-3-clause
| 14,913 | 0 | 24 | 4,117 | 4,084 | 2,084 | 2,000 | 274 | 8 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
module Tinc.CacheSpec (spec) where
import Helper
import MockedEnv
import MockedProcess
import Test.Mockery.Action
import Control.Monad.IO.Class
import Control.Monad.Trans.Reader
import Data.List
import qualified Data.Graph.Wrapper as G
import Safe
import System.FilePath
import System.IO.Temp
import System.Directory
import Tinc.Cache
import Tinc.GhcPkg
import Tinc.Package
import Tinc.Sandbox
import Tinc.SourceDependency
import Tinc.Types
import Tinc.SandboxSpec (writePackageConfig)
data ReadGhcPkgEnv = ReadGhcPkgEnv {
envReadGhcPkg :: [Path PackageDb] -> [String] -> IO String
}
ghcPkgEnv :: ReadGhcPkgEnv
ghcPkgEnv = ReadGhcPkgEnv readGhcPkg
instance GhcPkg (WithEnv ReadGhcPkgEnv) where
readGhcPkg packageDbs args = WithEnv $ asks envReadGhcPkg >>= liftIO . ($ args) . ($ packageDbs)
toSimplePackage :: Package -> SimplePackage
toSimplePackage (Package name (Version version _)) = SimplePackage name version
fromSimplePackage :: SimplePackage -> Package
fromSimplePackage (SimplePackage name version) = Package name (Version version Nothing)
spec :: Spec
spec = do
describe "readPackageGraph" $ do
context "when a package has no dependencies and no other packages depend on it" $ do
it "includes package" $ do
-- NOTE: `ghc-pkg dot` omits packages from the graph that both:
--
-- 1. have no dependencies
-- 2. no other packages depend on
--
-- This test case makes sure that we properly handle this.
withSystemTempDirectory "tinc" $ \ (Path -> packageDb) -> do
let package = Package "foo" "0.1.0"
packageConfig = Path $ path packageDb </> "foo-0.1.0-8b77e2706d2c2c9243c5d86e44c11aa6.conf"
graph = "digraph g {}"
globalPackageDb = "/path/to/global/package.conf.d"
packageDbs = [globalPackageDb, packageDb]
mockedEnv = ghcPkgEnv {envReadGhcPkg = stub (packageDbs, ["dot"], return graph)}
writePackageConfig (toSimplePackage package, path packageConfig)
touch $ path packageDb </> "package.cache"
withEnv mockedEnv (readPackageGraph [] globalPackageDb packageDb)
`shouldReturn` G.fromList [(package, PackageConfig packageConfig, [])]
describe "addAddSourceHashes" $ do
let hash = "8cd0e753e18b1576cbe3eb2e61977a3b0debf430"
foo = SimplePackage "foo" "0.1.0"
writeAddSourceHashes packageDb =
writeFile (path packageDb </> "add-source.yaml") "- {package-name: foo, hash: 8cd0e753e18b1576cbe3eb2e61977a3b0debf430}"
it "adds add-source hashes to a package graph" $ do
withSystemTempDirectory "tinc" $ \ (Path -> packageDb) -> do
let fooConfig = PackageConfig ""
graph = G.fromList [(foo, fooConfig, [])]
writeAddSourceHashes packageDb
addAddSourceHashes packageDb graph `shouldReturn`
G.fromList [(Package "foo" (Version "0.1.0" $ Just hash), fooConfig, [])]
it "doesn't attach add-source hashes to global packages" $ do
withSystemTempDirectory "tinc" $ \ (Path -> packageDb) -> do
let fooConfig = GlobalPackage
graph = G.fromList [(foo, fooConfig, [])]
writeAddSourceHashes packageDb
addAddSourceHashes packageDb graph `shouldReturn`
G.fromList [(fromSimplePackage foo, fooConfig, [])]
describe "populateCacheAction" $ do
let sourceDependencyCache = "/path/to/add-source-cache"
it "adds add-source dependencies to the sandbox" $ do
let missing = [Package "foo" (Version "0.1.0" $ Just "foo-hash")]
populateCacheActionAddSource <$> populateCacheAction sourceDependencyCache missing [] `shouldBe`
Right ["/path/to/add-source-cache/foo/foo-hash"]
it "does not add reusable add-source dependencies to the sandbox" $ do
let missing = [Package "foo" "0.1.0"]
reusable = [CachedPackage (Package "bar" (Version "0.2.0" $ Just "bar-hash")) "bar.conf"]
populateCacheActionAddSource <$> populateCacheAction sourceDependencyCache missing reusable `shouldBe` Right []
it "does not include reusable add-source dependencies in the install plan" $ do
let missing = [Package "foo" "0.1.0"]
reusable = [CachedPackage (Package "bar" (Version "0.2.0" $ Just "bar-hash")) "bar.conf"]
populateCacheActionInstallPlan <$> populateCacheAction sourceDependencyCache missing reusable `shouldBe` Right missing
it "stores hashes of add-source dependencies in the cache" $ do
let missing = [Package "foo" (Version "0.1.0" $ Just "foo-hash")]
reusable = [CachedPackage (Package "bar" (Version "0.2.0" $ Just "bar-hash")) "bar.conf"]
populateCacheActionWriteAddSourceHashes <$> populateCacheAction sourceDependencyCache missing reusable `shouldBe`
Right [SourceDependency "foo" "foo-hash", SourceDependency "bar" "bar-hash"]
context "when list of missing packages is empty" $ do
let missing = []
it "returns reusable packages" $ do
let reusable = [CachedPackage (Package "foo" "0.1.0") "foo.conf", CachedPackage (Package "bar" "0.2.0") "bar.conf"]
populateCacheAction sourceDependencyCache missing reusable `shouldBe` Left reusable
describe "populateCache" $ do
let cabalSandboxInit = ("cabal", ["v1-sandbox", "init"], touch ".cabal-sandbox/x86_64-linux-ghc-7.8.4-packages.conf.d/package.cache")
it "uses add-source dependencies" $
inTempDirectory $ do
withSystemTempDirectory "tinc" $ \ (Path -> cache) -> do
withSystemTempDirectory "tinc" $ \ (Path -> sourceDependencyCache) -> do
let mockedCallProcess command args = stub [cabalSandboxInit, cabalAddSource, cabalInstall, recache] command args
where
packageDb = atDef "/path/to/some/tmp/dir" args 3
cabalAddSource = ("cabal", ["v1-sandbox", "add-source", path sourceDependencyCache </> "foo" </> "abc"], writeFile "add-source" "foo")
cabalInstall = ("cabal", ["v1-install", "--bindir=$prefix/bin/$pkgid", "foo-0.1.0"], (readFile "add-source" `shouldReturn` "foo") >> writeFile "install" "bar")
recache = ("ghc-pkg", ["--no-user-package-conf", "recache", "--package-conf", packageDb], return ())
mockedEnv = env {envReadProcess = dummy "envReadProcess", envCallProcess = mockedCallProcess}
_ <- withEnv mockedEnv $
populateCache cache sourceDependencyCache [Package "foo" "0.1.0"{versionAddSourceHash = Just "abc"}] []
[sandbox] <- listSandboxes cache
readFile (path sandbox </> "install") `shouldReturn` "bar"
it "stores hashes of add-source dependencies in the cache" $
inTempDirectory $ do
withSystemTempDirectory "tinc" $ \ (Path -> cache) -> do
withSystemTempDirectory "tinc" $ \ (Path -> sourceDependencyCache) -> do
let mockedCallProcess command args = stub [cabalSandboxInit, cabalAddSource "foo/abc", cabalAddSource "bar/def", cabalInstall, recache] command args
where
packageDb = atDef "/path/to/some/tmp/dir" args 3
cabalAddSource packageCachePath =
("cabal", ["v1-sandbox", "add-source", path sourceDependencyCache </> packageCachePath], return ())
cabalInstall = ("cabal", ["v1-install", "--bindir=$prefix/bin/$pkgid", "foo-0.1.0"], return ())
recache = ("ghc-pkg", ["--no-user-package-conf", "recache", "--package-conf", packageDb], return ())
mockedEnv = env {envReadProcess = dummy "envReadProcess", envCallProcess = mockedCallProcess}
let barPackageConfig = Path (path cache </> "foo")
touch $ path barPackageConfig
_ <- withEnv mockedEnv $
populateCache cache sourceDependencyCache
[Package "foo" "0.1.0"{versionAddSourceHash = Just "abc"}]
[CachedPackage (Package "bar" "0.1.0"{versionAddSourceHash = Just "def"}) barPackageConfig]
[sandbox] <- listSandboxes cache
packageDb <- findPackageDb sandbox
readAddSourceHashes packageDb `shouldReturn` [SourceDependency "foo" "abc", SourceDependency "bar" "def"]
context "when list of missing packages is empty" $ do
it "returns reusable packages" $ do
let mockedEnv = env {envReadProcess = undefined, envCallProcess = undefined}
reusable = [
CachedPackage (Package "foo" "0.1.0") "foo.conf"
, CachedPackage (Package "bar" "0.1.0") "bar.conf"
]
withEnv mockedEnv (populateCache undefined undefined [] reusable)
`shouldReturn` reusable
describe "listSandboxes" $ do
it "lists sandboxes" $ do
inTempDirectory $ do
touch "foo/tinc.valid.v3"
touch "bar/tinc.valid.v3"
sandboxes <- listSandboxes "."
sandboxes `shouldMatchList` ["./foo", "./bar"]
it "rejects invalid sandboxes" $ do
inTempDirectory $ do
touch "foo/tinc.valid.v3"
touch "bar/something"
sandboxes <- listSandboxes "."
sandboxes `shouldMatchList` ["./foo"]
describe "cachedExecutables" $ do
let sandbox = ".cabal-sandbox"
packageConfig = Path (sandbox </> "packages.conf.d/markdown-unlit-0.1.0-269c14.conf")
package = Package "markdown-unlit" "0.1.0"
cachedPackage = CachedPackage package packageConfig
executables = [
sandbox </> "bin/markdown-unlit-0.1.0/foo"
, sandbox </> "bin/markdown-unlit-0.1.0/bar"
]
it "returns executables for specified package" $ do
inTempDirectory $ do
touch (path packageConfig)
mapM_ touch executables
dir <- getCurrentDirectory
cachedExecutables cachedPackage `shouldReturn` sort (map (dir </>) executables)
context "when package has no executables" $ do
it "returns empty list" $ do
inTempDirectory $ do
touch (path packageConfig)
cachedExecutables cachedPackage `shouldReturn` []
|
sol/tinc
|
test/Tinc/CacheSpec.hs
|
mit
| 10,384 | 0 | 30 | 2,561 | 2,428 | 1,226 | 1,202 | 168 | 1 |
{-# LANGUAGE CPP #-}
#if !defined(TESTING) && __GLASGOW_HASKELL__ >= 703
{-# LANGUAGE Safe #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.Map.Strict
-- Copyright : (c) Daan Leijen 2002
-- (c) Andriy Palamarchuk 2008
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- An efficient implementation of ordered maps from keys to values
-- (dictionaries).
--
-- API of this module is strict in both the keys and the values.
-- If you need value-lazy maps, use "Data.Map.Lazy" instead.
-- The 'Map' type is shared between the lazy and strict modules,
-- meaning that the same 'Map' value can be passed to functions in
-- both modules (although that is rarely needed).
--
-- These modules are intended to be imported qualified, to avoid name
-- clashes with Prelude functions, e.g.
--
-- > import qualified Data.Map.Strict as Map
--
-- The implementation of 'Map' is based on /size balanced/ binary trees (or
-- trees of /bounded balance/) as described by:
--
-- * Stephen Adams, \"/Efficient sets: a balancing act/\",
-- Journal of Functional Programming 3(4):553-562, October 1993,
-- <http://www.swiss.ai.mit.edu/~adams/BB/>.
--
-- * J. Nievergelt and E.M. Reingold,
-- \"/Binary search trees of bounded balance/\",
-- SIAM journal of computing 2(1), March 1973.
--
-- Note that the implementation is /left-biased/ -- the elements of a
-- first argument are always preferred to the second, for example in
-- 'union' or 'insert'.
--
-- Operation comments contain the operation time complexity in
-- the Big-O notation (<http://en.wikipedia.org/wiki/Big_O_notation>).
--
-- Be aware that the 'Functor', 'Traversable' and 'Data' instances
-- are the same as for the "Data.Map.Lazy" module, so if they are used
-- on strict maps, the resulting maps will be lazy.
-----------------------------------------------------------------------------
-- See the notes at the beginning of Data.Map.Base.
module Data.Map.Strict
(
-- * Strictness properties
-- $strictness
-- * Map type
#if !defined(TESTING)
Map -- instance Eq,Show,Read
#else
Map(..) -- instance Eq,Show,Read
#endif
-- * Operators
, (!), (\\)
-- * Query
, null
, size
, member
, notMember
, lookup
, findWithDefault
, lookupLT
, lookupGT
, lookupLE
, lookupGE
-- * Construction
, empty
, singleton
-- ** Insertion
, insert
, insertWith
, insertWithKey
, insertLookupWithKey
-- ** Delete\/Update
, delete
, adjust
, adjustWithKey
, update
, updateWithKey
, updateLookupWithKey
, alter
-- * Combine
-- ** Union
, union
, unionWith
, unionWithKey
, unions
, unionsWith
-- ** Difference
, difference
, differenceWith
, differenceWithKey
-- ** Intersection
, intersection
, intersectionWith
, intersectionWithKey
-- ** Universal combining function
, mergeWithKey
-- * Traversal
-- ** Map
, map
, mapWithKey
, traverseWithKey
, mapAccum
, mapAccumWithKey
, mapAccumRWithKey
, mapKeys
, mapKeysWith
, mapKeysMonotonic
-- * Folds
, foldr
, foldl
, foldrWithKey
, foldlWithKey
, foldMapWithKey
-- ** Strict folds
, foldr'
, foldl'
, foldrWithKey'
, foldlWithKey'
-- * Conversion
, elems
, keys
, assocs
, keysSet
, fromSet
-- ** Lists
, toList
, fromList
, fromListWith
, fromListWithKey
-- ** Ordered lists
, toAscList
, toDescList
, fromAscList
, fromAscListWith
, fromAscListWithKey
, fromDistinctAscList
-- * Filter
, filter
, filterWithKey
, partition
, partitionWithKey
, mapMaybe
, mapMaybeWithKey
, mapEither
, mapEitherWithKey
, split
, splitLookup
, splitRoot
-- * Submap
, isSubmapOf, isSubmapOfBy
, isProperSubmapOf, isProperSubmapOfBy
-- * Indexed
, lookupIndex
, findIndex
, elemAt
, updateAt
, deleteAt
-- * Min\/Max
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, updateMin
, updateMax
, updateMinWithKey
, updateMaxWithKey
, minView
, maxView
, minViewWithKey
, maxViewWithKey
-- * Debugging
, showTree
, showTreeWith
, valid
#if defined(TESTING)
-- * Internals
, bin
, balanced
, link
, merge
#endif
) where
import Prelude hiding (lookup,map,filter,foldr,foldl,null)
import Data.Map.Base hiding
( findWithDefault
, singleton
, insert
, insertWith
, insertWithKey
, insertLookupWithKey
, adjust
, adjustWithKey
, update
, updateWithKey
, updateLookupWithKey
, alter
, unionWith
, unionWithKey
, unionsWith
, differenceWith
, differenceWithKey
, intersectionWith
, intersectionWithKey
, mergeWithKey
, map
, mapWithKey
, mapAccum
, mapAccumWithKey
, mapAccumRWithKey
, mapKeysWith
, fromSet
, fromList
, fromListWith
, fromListWithKey
, fromAscList
, fromAscListWith
, fromAscListWithKey
, fromDistinctAscList
, mapMaybe
, mapMaybeWithKey
, mapEither
, mapEitherWithKey
, updateAt
, updateMin
, updateMax
, updateMinWithKey
, updateMaxWithKey
)
import qualified Data.Set.Base as Set
import Data.StrictPair
import Data.Bits (shiftL, shiftR)
-- Use macros to define strictness of functions. STRICT_x_OF_y
-- denotes an y-ary function strict in the x-th parameter. Similarly
-- STRICT_x_y_OF_z denotes an z-ary function strict in the x-th and
-- y-th parameter. We do not use BangPatterns, because they are not
-- in any standard and we want the compilers to be compiled by as many
-- compilers as possible.
#define STRICT_1_OF_2(fn) fn arg _ | arg `seq` False = undefined
#define STRICT_1_OF_3(fn) fn arg _ _ | arg `seq` False = undefined
#define STRICT_2_OF_3(fn) fn _ arg _ | arg `seq` False = undefined
#define STRICT_1_2_OF_3(fn) fn arg1 arg2 _ | arg1 `seq` arg2 `seq` False = undefined
#define STRICT_2_OF_4(fn) fn _ arg _ _ | arg `seq` False = undefined
-- $strictness
--
-- This module satisfies the following strictness properties:
--
-- 1. Key arguments are evaluated to WHNF;
--
-- 2. Keys and values are evaluated to WHNF before they are stored in
-- the map.
--
-- Here's an example illustrating the first property:
--
-- > delete undefined m == undefined
--
-- Here are some examples that illustrate the second property:
--
-- > map (\ v -> undefined) m == undefined -- m is not empty
-- > mapKeys (\ k -> undefined) m == undefined -- m is not empty
{--------------------------------------------------------------------
Query
--------------------------------------------------------------------}
-- | /O(log n)/. The expression @('findWithDefault' def k map)@ returns
-- the value at key @k@ or returns default value @def@
-- when the key is not in the map.
--
-- > findWithDefault 'x' 1 (fromList [(5,'a'), (3,'b')]) == 'x'
-- > findWithDefault 'x' 5 (fromList [(5,'a'), (3,'b')]) == 'a'
-- See Map.Base.Note: Local 'go' functions and capturing
findWithDefault :: Ord k => a -> k -> Map k a -> a
findWithDefault def k = k `seq` go
where
go Tip = def
go (Bin _ kx x l r) = case compare k kx of
LT -> go l
GT -> go r
EQ -> x
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE findWithDefault #-}
#else
{-# INLINE findWithDefault #-}
#endif
{--------------------------------------------------------------------
Construction
--------------------------------------------------------------------}
-- | /O(1)/. A map with a single element.
--
-- > singleton 1 'a' == fromList [(1, 'a')]
-- > size (singleton 1 'a') == 1
singleton :: k -> a -> Map k a
singleton k x = x `seq` Bin 1 k x Tip Tip
{-# INLINE singleton #-}
{--------------------------------------------------------------------
Insertion
--------------------------------------------------------------------}
-- | /O(log n)/. Insert a new key and value in the map.
-- If the key is already present in the map, the associated value is
-- replaced with the supplied value. 'insert' is equivalent to
-- @'insertWith' 'const'@.
--
-- > insert 5 'x' (fromList [(5,'a'), (3,'b')]) == fromList [(3, 'b'), (5, 'x')]
-- > insert 7 'x' (fromList [(5,'a'), (3,'b')]) == fromList [(3, 'b'), (5, 'a'), (7, 'x')]
-- > insert 5 'x' empty == singleton 5 'x'
-- See Map.Base.Note: Type of local 'go' function
insert :: Ord k => k -> a -> Map k a -> Map k a
insert = go
where
go :: Ord k => k -> a -> Map k a -> Map k a
STRICT_1_2_OF_3(go)
go kx x Tip = singleton kx x
go kx x (Bin sz ky y l r) =
case compare kx ky of
LT -> balanceL ky y (go kx x l) r
GT -> balanceR ky y l (go kx x r)
EQ -> Bin sz kx x l r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE insert #-}
#else
{-# INLINE insert #-}
#endif
-- | /O(log n)/. Insert with a function, combining new value and old value.
-- @'insertWith' f key value mp@
-- will insert the pair (key, value) into @mp@ if key does
-- not exist in the map. If the key does exist, the function will
-- insert the pair @(key, f new_value old_value)@.
--
-- > insertWith (++) 5 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "xxxa")]
-- > insertWith (++) 7 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a"), (7, "xxx")]
-- > insertWith (++) 5 "xxx" empty == singleton 5 "xxx"
insertWith :: Ord k => (a -> a -> a) -> k -> a -> Map k a -> Map k a
insertWith f = insertWithKey (\_ x' y' -> f x' y')
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE insertWith #-}
#else
{-# INLINE insertWith #-}
#endif
-- | /O(log n)/. Insert with a function, combining key, new value and old value.
-- @'insertWithKey' f key value mp@
-- will insert the pair (key, value) into @mp@ if key does
-- not exist in the map. If the key does exist, the function will
-- insert the pair @(key,f key new_value old_value)@.
-- Note that the key passed to f is the same key passed to 'insertWithKey'.
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > insertWithKey f 5 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:xxx|a")]
-- > insertWithKey f 7 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a"), (7, "xxx")]
-- > insertWithKey f 5 "xxx" empty == singleton 5 "xxx"
-- See Map.Base.Note: Type of local 'go' function
insertWithKey :: Ord k => (k -> a -> a -> a) -> k -> a -> Map k a -> Map k a
insertWithKey = go
where
go :: Ord k => (k -> a -> a -> a) -> k -> a -> Map k a -> Map k a
STRICT_2_OF_4(go)
go _ kx x Tip = singleton kx x
go f kx x (Bin sy ky y l r) =
case compare kx ky of
LT -> balanceL ky y (go f kx x l) r
GT -> balanceR ky y l (go f kx x r)
EQ -> let x' = f kx x y
in x' `seq` Bin sy kx x' l r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE insertWithKey #-}
#else
{-# INLINE insertWithKey #-}
#endif
-- | /O(log n)/. Combines insert operation with old value retrieval.
-- The expression (@'insertLookupWithKey' f k x map@)
-- is a pair where the first element is equal to (@'lookup' k map@)
-- and the second element equal to (@'insertWithKey' f k x map@).
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > insertLookupWithKey f 5 "xxx" (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "5:xxx|a")])
-- > insertLookupWithKey f 7 "xxx" (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a"), (7, "xxx")])
-- > insertLookupWithKey f 5 "xxx" empty == (Nothing, singleton 5 "xxx")
--
-- This is how to define @insertLookup@ using @insertLookupWithKey@:
--
-- > let insertLookup kx x t = insertLookupWithKey (\_ a _ -> a) kx x t
-- > insertLookup 5 "x" (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "x")])
-- > insertLookup 7 "x" (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a"), (7, "x")])
-- See Map.Base.Note: Type of local 'go' function
insertLookupWithKey :: Ord k => (k -> a -> a -> a) -> k -> a -> Map k a
-> (Maybe a, Map k a)
insertLookupWithKey f0 kx0 x0 t0 = toPair $ go f0 kx0 x0 t0
where
go :: Ord k => (k -> a -> a -> a) -> k -> a -> Map k a -> StrictPair (Maybe a) (Map k a)
STRICT_2_OF_4(go)
go _ kx x Tip = Nothing :*: singleton kx x
go f kx x (Bin sy ky y l r) =
case compare kx ky of
LT -> let (found :*: l') = go f kx x l
in found :*: balanceL ky y l' r
GT -> let (found :*: r') = go f kx x r
in found :*: balanceR ky y l r'
EQ -> let x' = f kx x y
in x' `seq` (Just y :*: Bin sy kx x' l r)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE insertLookupWithKey #-}
#else
{-# INLINE insertLookupWithKey #-}
#endif
{--------------------------------------------------------------------
Deletion
--------------------------------------------------------------------}
-- | /O(log n)/. Update a value at a specific key with the result of the provided function.
-- When the key is not
-- a member of the map, the original map is returned.
--
-- > adjust ("new " ++) 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "new a")]
-- > adjust ("new " ++) 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > adjust ("new " ++) 7 empty == empty
adjust :: Ord k => (a -> a) -> k -> Map k a -> Map k a
adjust f = adjustWithKey (\_ x -> f x)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE adjust #-}
#else
{-# INLINE adjust #-}
#endif
-- | /O(log n)/. Adjust a value at a specific key. When the key is not
-- a member of the map, the original map is returned.
--
-- > let f key x = (show key) ++ ":new " ++ x
-- > adjustWithKey f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:new a")]
-- > adjustWithKey f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > adjustWithKey f 7 empty == empty
adjustWithKey :: Ord k => (k -> a -> a) -> k -> Map k a -> Map k a
adjustWithKey f = updateWithKey (\k' x' -> Just (f k' x'))
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE adjustWithKey #-}
#else
{-# INLINE adjustWithKey #-}
#endif
-- | /O(log n)/. The expression (@'update' f k map@) updates the value @x@
-- at @k@ (if it is in the map). If (@f x@) is 'Nothing', the element is
-- deleted. If it is (@'Just' y@), the key @k@ is bound to the new value @y@.
--
-- > let f x = if x == "a" then Just "new a" else Nothing
-- > update f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "new a")]
-- > update f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > update f 3 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
update :: Ord k => (a -> Maybe a) -> k -> Map k a -> Map k a
update f = updateWithKey (\_ x -> f x)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE update #-}
#else
{-# INLINE update #-}
#endif
-- | /O(log n)/. The expression (@'updateWithKey' f k map@) updates the
-- value @x@ at @k@ (if it is in the map). If (@f k x@) is 'Nothing',
-- the element is deleted. If it is (@'Just' y@), the key @k@ is bound
-- to the new value @y@.
--
-- > let f k x = if x == "a" then Just ((show k) ++ ":new a") else Nothing
-- > updateWithKey f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:new a")]
-- > updateWithKey f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > updateWithKey f 3 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
-- See Map.Base.Note: Type of local 'go' function
updateWithKey :: Ord k => (k -> a -> Maybe a) -> k -> Map k a -> Map k a
updateWithKey = go
where
go :: Ord k => (k -> a -> Maybe a) -> k -> Map k a -> Map k a
STRICT_2_OF_3(go)
go _ _ Tip = Tip
go f k(Bin sx kx x l r) =
case compare k kx of
LT -> balanceR kx x (go f k l) r
GT -> balanceL kx x l (go f k r)
EQ -> case f kx x of
Just x' -> x' `seq` Bin sx kx x' l r
Nothing -> glue l r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE updateWithKey #-}
#else
{-# INLINE updateWithKey #-}
#endif
-- | /O(log n)/. Lookup and update. See also 'updateWithKey'.
-- The function returns changed value, if it is updated.
-- Returns the original key value if the map entry is deleted.
--
-- > let f k x = if x == "a" then Just ((show k) ++ ":new a") else Nothing
-- > updateLookupWithKey f 5 (fromList [(5,"a"), (3,"b")]) == (Just "5:new a", fromList [(3, "b"), (5, "5:new a")])
-- > updateLookupWithKey f 7 (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a")])
-- > updateLookupWithKey f 3 (fromList [(5,"a"), (3,"b")]) == (Just "b", singleton 5 "a")
-- See Map.Base.Note: Type of local 'go' function
updateLookupWithKey :: Ord k => (k -> a -> Maybe a) -> k -> Map k a -> (Maybe a,Map k a)
updateLookupWithKey f0 k0 t0 = toPair $ go f0 k0 t0
where
go :: Ord k => (k -> a -> Maybe a) -> k -> Map k a -> StrictPair (Maybe a) (Map k a)
STRICT_2_OF_3(go)
go _ _ Tip = (Nothing :*: Tip)
go f k (Bin sx kx x l r) =
case compare k kx of
LT -> let (found :*: l') = go f k l
in found :*: balanceR kx x l' r
GT -> let (found :*: r') = go f k r
in found :*: balanceL kx x l r'
EQ -> case f kx x of
Just x' -> x' `seq` (Just x' :*: Bin sx kx x' l r)
Nothing -> (Just x :*: glue l r)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE updateLookupWithKey #-}
#else
{-# INLINE updateLookupWithKey #-}
#endif
-- | /O(log n)/. The expression (@'alter' f k map@) alters the value @x@ at @k@, or absence thereof.
-- 'alter' can be used to insert, delete, or update a value in a 'Map'.
-- In short : @'lookup' k ('alter' f k m) = f ('lookup' k m)@.
--
-- > let f _ = Nothing
-- > alter f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > alter f 5 (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
-- >
-- > let f _ = Just "c"
-- > alter f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a"), (7, "c")]
-- > alter f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "c")]
-- See Map.Base.Note: Type of local 'go' function
alter :: Ord k => (Maybe a -> Maybe a) -> k -> Map k a -> Map k a
alter = go
where
go :: Ord k => (Maybe a -> Maybe a) -> k -> Map k a -> Map k a
STRICT_2_OF_3(go)
go f k Tip = case f Nothing of
Nothing -> Tip
Just x -> singleton k x
go f k (Bin sx kx x l r) = case compare k kx of
LT -> balance kx x (go f k l) r
GT -> balance kx x l (go f k r)
EQ -> case f (Just x) of
Just x' -> x' `seq` Bin sx kx x' l r
Nothing -> glue l r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE alter #-}
#else
{-# INLINE alter #-}
#endif
{--------------------------------------------------------------------
Indexing
--------------------------------------------------------------------}
-- | /O(log n)/. Update the element at /index/. Calls 'error' when an
-- invalid index is used.
--
-- > updateAt (\ _ _ -> Just "x") 0 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "x"), (5, "a")]
-- > updateAt (\ _ _ -> Just "x") 1 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "x")]
-- > updateAt (\ _ _ -> Just "x") 2 (fromList [(5,"a"), (3,"b")]) Error: index out of range
-- > updateAt (\ _ _ -> Just "x") (-1) (fromList [(5,"a"), (3,"b")]) Error: index out of range
-- > updateAt (\_ _ -> Nothing) 0 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
-- > updateAt (\_ _ -> Nothing) 1 (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
-- > updateAt (\_ _ -> Nothing) 2 (fromList [(5,"a"), (3,"b")]) Error: index out of range
-- > updateAt (\_ _ -> Nothing) (-1) (fromList [(5,"a"), (3,"b")]) Error: index out of range
updateAt :: (k -> a -> Maybe a) -> Int -> Map k a -> Map k a
updateAt f i t = i `seq`
case t of
Tip -> error "Map.updateAt: index out of range"
Bin sx kx x l r -> case compare i sizeL of
LT -> balanceR kx x (updateAt f i l) r
GT -> balanceL kx x l (updateAt f (i-sizeL-1) r)
EQ -> case f kx x of
Just x' -> x' `seq` Bin sx kx x' l r
Nothing -> glue l r
where
sizeL = size l
{--------------------------------------------------------------------
Minimal, Maximal
--------------------------------------------------------------------}
-- | /O(log n)/. Update the value at the minimal key.
--
-- > updateMin (\ a -> Just ("X" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3, "Xb"), (5, "a")]
-- > updateMin (\ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
updateMin :: (a -> Maybe a) -> Map k a -> Map k a
updateMin f m
= updateMinWithKey (\_ x -> f x) m
-- | /O(log n)/. Update the value at the maximal key.
--
-- > updateMax (\ a -> Just ("X" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "Xa")]
-- > updateMax (\ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
updateMax :: (a -> Maybe a) -> Map k a -> Map k a
updateMax f m
= updateMaxWithKey (\_ x -> f x) m
-- | /O(log n)/. Update the value at the minimal key.
--
-- > updateMinWithKey (\ k a -> Just ((show k) ++ ":" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3,"3:b"), (5,"a")]
-- > updateMinWithKey (\ _ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
updateMinWithKey :: (k -> a -> Maybe a) -> Map k a -> Map k a
updateMinWithKey _ Tip = Tip
updateMinWithKey f (Bin sx kx x Tip r) = case f kx x of
Nothing -> r
Just x' -> x' `seq` Bin sx kx x' Tip r
updateMinWithKey f (Bin _ kx x l r) = balanceR kx x (updateMinWithKey f l) r
-- | /O(log n)/. Update the value at the maximal key.
--
-- > updateMaxWithKey (\ k a -> Just ((show k) ++ ":" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3,"b"), (5,"5:a")]
-- > updateMaxWithKey (\ _ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
updateMaxWithKey :: (k -> a -> Maybe a) -> Map k a -> Map k a
updateMaxWithKey _ Tip = Tip
updateMaxWithKey f (Bin sx kx x l Tip) = case f kx x of
Nothing -> l
Just x' -> x' `seq` Bin sx kx x' l Tip
updateMaxWithKey f (Bin _ kx x l r) = balanceL kx x l (updateMaxWithKey f r)
{--------------------------------------------------------------------
Union.
--------------------------------------------------------------------}
-- | The union of a list of maps, with a combining operation:
-- (@'unionsWith' f == 'Prelude.foldl' ('unionWith' f) 'empty'@).
--
-- > unionsWith (++) [(fromList [(5, "a"), (3, "b")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "A3"), (3, "B3")])]
-- > == fromList [(3, "bB3"), (5, "aAA3"), (7, "C")]
unionsWith :: Ord k => (a->a->a) -> [Map k a] -> Map k a
unionsWith f ts
= foldlStrict (unionWith f) empty ts
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE unionsWith #-}
#endif
{--------------------------------------------------------------------
Union with a combining function
--------------------------------------------------------------------}
-- | /O(n+m)/. Union with a combining function. The implementation uses the efficient /hedge-union/ algorithm.
--
-- > unionWith (++) (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "aA"), (7, "C")]
unionWith :: Ord k => (a -> a -> a) -> Map k a -> Map k a -> Map k a
unionWith f m1 m2
= unionWithKey (\_ x y -> f x y) m1 m2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE unionWith #-}
#endif
-- | /O(n+m)/.
-- Union with a combining function. The implementation uses the efficient /hedge-union/ algorithm.
--
-- > let f key left_value right_value = (show key) ++ ":" ++ left_value ++ "|" ++ right_value
-- > unionWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "5:a|A"), (7, "C")]
unionWithKey :: Ord k => (k -> a -> a -> a) -> Map k a -> Map k a -> Map k a
unionWithKey f t1 t2 = mergeWithKey (\k x1 x2 -> Just $ f k x1 x2) id id t1 t2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE unionWithKey #-}
#endif
{--------------------------------------------------------------------
Difference
--------------------------------------------------------------------}
-- | /O(n+m)/. Difference with a combining function.
-- When two equal keys are
-- encountered, the combining function is applied to the values of these keys.
-- If it returns 'Nothing', the element is discarded (proper set difference). If
-- it returns (@'Just' y@), the element is updated with a new value @y@.
-- The implementation uses an efficient /hedge/ algorithm comparable with /hedge-union/.
--
-- > let f al ar = if al == "b" then Just (al ++ ":" ++ ar) else Nothing
-- > differenceWith f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (3, "B"), (7, "C")])
-- > == singleton 3 "b:B"
differenceWith :: Ord k => (a -> b -> Maybe a) -> Map k a -> Map k b -> Map k a
differenceWith f m1 m2
= differenceWithKey (\_ x y -> f x y) m1 m2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE differenceWith #-}
#endif
-- | /O(n+m)/. Difference with a combining function. When two equal keys are
-- encountered, the combining function is applied to the key and both values.
-- If it returns 'Nothing', the element is discarded (proper set difference). If
-- it returns (@'Just' y@), the element is updated with a new value @y@.
-- The implementation uses an efficient /hedge/ algorithm comparable with /hedge-union/.
--
-- > let f k al ar = if al == "b" then Just ((show k) ++ ":" ++ al ++ "|" ++ ar) else Nothing
-- > differenceWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (3, "B"), (10, "C")])
-- > == singleton 3 "3:b|B"
differenceWithKey :: Ord k => (k -> a -> b -> Maybe a) -> Map k a -> Map k b -> Map k a
differenceWithKey f t1 t2 = mergeWithKey f id (const Tip) t1 t2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE differenceWithKey #-}
#endif
{--------------------------------------------------------------------
Intersection
--------------------------------------------------------------------}
-- | /O(n+m)/. Intersection with a combining function. The implementation uses
-- an efficient /hedge/ algorithm comparable with /hedge-union/.
--
-- > intersectionWith (++) (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "aA"
intersectionWith :: Ord k => (a -> b -> c) -> Map k a -> Map k b -> Map k c
intersectionWith f m1 m2
= intersectionWithKey (\_ x y -> f x y) m1 m2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE intersectionWith #-}
#endif
-- | /O(n+m)/. Intersection with a combining function. The implementation uses
-- an efficient /hedge/ algorithm comparable with /hedge-union/.
--
-- > let f k al ar = (show k) ++ ":" ++ al ++ "|" ++ ar
-- > intersectionWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "5:a|A"
intersectionWithKey :: Ord k => (k -> a -> b -> c) -> Map k a -> Map k b -> Map k c
intersectionWithKey f t1 t2 = mergeWithKey (\k x1 x2 -> Just $ f k x1 x2) (const Tip) (const Tip) t1 t2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE intersectionWithKey #-}
#endif
{--------------------------------------------------------------------
MergeWithKey
--------------------------------------------------------------------}
-- | /O(n+m)/. A high-performance universal combining function. This function
-- is used to define 'unionWith', 'unionWithKey', 'differenceWith',
-- 'differenceWithKey', 'intersectionWith', 'intersectionWithKey' and can be
-- used to define other custom combine functions.
--
-- Please make sure you know what is going on when using 'mergeWithKey',
-- otherwise you can be surprised by unexpected code growth or even
-- corruption of the data structure.
--
-- When 'mergeWithKey' is given three arguments, it is inlined to the call
-- site. You should therefore use 'mergeWithKey' only to define your custom
-- combining functions. For example, you could define 'unionWithKey',
-- 'differenceWithKey' and 'intersectionWithKey' as
--
-- > myUnionWithKey f m1 m2 = mergeWithKey (\k x1 x2 -> Just (f k x1 x2)) id id m1 m2
-- > myDifferenceWithKey f m1 m2 = mergeWithKey f id (const empty) m1 m2
-- > myIntersectionWithKey f m1 m2 = mergeWithKey (\k x1 x2 -> Just (f k x1 x2)) (const empty) (const empty) m1 m2
--
-- When calling @'mergeWithKey' combine only1 only2@, a function combining two
-- 'IntMap's is created, such that
--
-- * if a key is present in both maps, it is passed with both corresponding
-- values to the @combine@ function. Depending on the result, the key is either
-- present in the result with specified value, or is left out;
--
-- * a nonempty subtree present only in the first map is passed to @only1@ and
-- the output is added to the result;
--
-- * a nonempty subtree present only in the second map is passed to @only2@ and
-- the output is added to the result.
--
-- The @only1@ and @only2@ methods /must return a map with a subset (possibly empty) of the keys of the given map/.
-- The values can be modified arbitrarily. Most common variants of @only1@ and
-- @only2@ are 'id' and @'const' 'empty'@, but for example @'map' f@ or
-- @'filterWithKey' f@ could be used for any @f@.
mergeWithKey :: Ord k => (k -> a -> b -> Maybe c) -> (Map k a -> Map k c) -> (Map k b -> Map k c)
-> Map k a -> Map k b -> Map k c
mergeWithKey f g1 g2 = go
where
go Tip t2 = g2 t2
go t1 Tip = g1 t1
go t1 t2 = hedgeMerge NothingS NothingS t1 t2
hedgeMerge _ _ t1 Tip = g1 t1
hedgeMerge blo bhi Tip (Bin _ kx x l r) = g2 $ link kx x (filterGt blo l) (filterLt bhi r)
hedgeMerge blo bhi (Bin _ kx x l r) t2 = let l' = hedgeMerge blo bmi l (trim blo bmi t2)
(found, trim_t2) = trimLookupLo kx bhi t2
r' = hedgeMerge bmi bhi r trim_t2
in case found of
Nothing -> case g1 (singleton kx x) of
Tip -> merge l' r'
(Bin _ _ x' Tip Tip) -> link kx x' l' r'
_ -> error "mergeWithKey: Given function only1 does not fulfil required conditions (see documentation)"
Just x2 -> case f kx x x2 of
Nothing -> merge l' r'
Just x' -> x' `seq` link kx x' l' r'
where bmi = JustS kx
{-# INLINE mergeWithKey #-}
{--------------------------------------------------------------------
Filter and partition
--------------------------------------------------------------------}
-- | /O(n)/. Map values and collect the 'Just' results.
--
-- > let f x = if x == "a" then Just "new a" else Nothing
-- > mapMaybe f (fromList [(5,"a"), (3,"b")]) == singleton 5 "new a"
mapMaybe :: (a -> Maybe b) -> Map k a -> Map k b
mapMaybe f = mapMaybeWithKey (\_ x -> f x)
-- | /O(n)/. Map keys\/values and collect the 'Just' results.
--
-- > let f k _ = if k < 5 then Just ("key : " ++ (show k)) else Nothing
-- > mapMaybeWithKey f (fromList [(5,"a"), (3,"b")]) == singleton 3 "key : 3"
mapMaybeWithKey :: (k -> a -> Maybe b) -> Map k a -> Map k b
mapMaybeWithKey _ Tip = Tip
mapMaybeWithKey f (Bin _ kx x l r) = case f kx x of
Just y -> y `seq` link kx y (mapMaybeWithKey f l) (mapMaybeWithKey f r)
Nothing -> merge (mapMaybeWithKey f l) (mapMaybeWithKey f r)
-- | /O(n)/. Map values and separate the 'Left' and 'Right' results.
--
-- > let f a = if a < "c" then Left a else Right a
-- > mapEither f (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (fromList [(3,"b"), (5,"a")], fromList [(1,"x"), (7,"z")])
-- >
-- > mapEither (\ a -> Right a) (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (empty, fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
mapEither :: (a -> Either b c) -> Map k a -> (Map k b, Map k c)
mapEither f m
= mapEitherWithKey (\_ x -> f x) m
-- | /O(n)/. Map keys\/values and separate the 'Left' and 'Right' results.
--
-- > let f k a = if k < 5 then Left (k * 2) else Right (a ++ a)
-- > mapEitherWithKey f (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (fromList [(1,2), (3,6)], fromList [(5,"aa"), (7,"zz")])
-- >
-- > mapEitherWithKey (\_ a -> Right a) (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (empty, fromList [(1,"x"), (3,"b"), (5,"a"), (7,"z")])
mapEitherWithKey :: (k -> a -> Either b c) -> Map k a -> (Map k b, Map k c)
mapEitherWithKey f0 t0 = toPair $ go f0 t0
where
go _ Tip = (Tip :*: Tip)
go f (Bin _ kx x l r) = case f kx x of
Left y -> y `seq` (link kx y l1 r1 :*: merge l2 r2)
Right z -> z `seq` (merge l1 r1 :*: link kx z l2 r2)
where
(l1 :*: l2) = go f l
(r1 :*: r2) = go f r
{--------------------------------------------------------------------
Mapping
--------------------------------------------------------------------}
-- | /O(n)/. Map a function over all values in the map.
--
-- > map (++ "x") (fromList [(5,"a"), (3,"b")]) == fromList [(3, "bx"), (5, "ax")]
map :: (a -> b) -> Map k a -> Map k b
map _ Tip = Tip
map f (Bin sx kx x l r) = let x' = f x in x' `seq` Bin sx kx x' (map f l) (map f r)
-- | /O(n)/. Map a function over all values in the map.
--
-- > let f key x = (show key) ++ ":" ++ x
-- > mapWithKey f (fromList [(5,"a"), (3,"b")]) == fromList [(3, "3:b"), (5, "5:a")]
mapWithKey :: (k -> a -> b) -> Map k a -> Map k b
mapWithKey _ Tip = Tip
mapWithKey f (Bin sx kx x l r) = let x' = f kx x
in x' `seq` Bin sx kx x' (mapWithKey f l) (mapWithKey f r)
-- | /O(n)/. The function 'mapAccum' threads an accumulating
-- argument through the map in ascending order of keys.
--
-- > let f a b = (a ++ b, b ++ "X")
-- > mapAccum f "Everything: " (fromList [(5,"a"), (3,"b")]) == ("Everything: ba", fromList [(3, "bX"), (5, "aX")])
mapAccum :: (a -> b -> (a,c)) -> a -> Map k b -> (a,Map k c)
mapAccum f a m
= mapAccumWithKey (\a' _ x' -> f a' x') a m
-- | /O(n)/. The function 'mapAccumWithKey' threads an accumulating
-- argument through the map in ascending order of keys.
--
-- > let f a k b = (a ++ " " ++ (show k) ++ "-" ++ b, b ++ "X")
-- > mapAccumWithKey f "Everything:" (fromList [(5,"a"), (3,"b")]) == ("Everything: 3-b 5-a", fromList [(3, "bX"), (5, "aX")])
mapAccumWithKey :: (a -> k -> b -> (a,c)) -> a -> Map k b -> (a,Map k c)
mapAccumWithKey f a t
= mapAccumL f a t
-- | /O(n)/. The function 'mapAccumL' threads an accumulating
-- argument through the map in ascending order of keys.
mapAccumL :: (a -> k -> b -> (a,c)) -> a -> Map k b -> (a,Map k c)
mapAccumL _ a Tip = (a,Tip)
mapAccumL f a (Bin sx kx x l r) =
let (a1,l') = mapAccumL f a l
(a2,x') = f a1 kx x
(a3,r') = mapAccumL f a2 r
in x' `seq` (a3,Bin sx kx x' l' r')
-- | /O(n)/. The function 'mapAccumR' threads an accumulating
-- argument through the map in descending order of keys.
mapAccumRWithKey :: (a -> k -> b -> (a,c)) -> a -> Map k b -> (a,Map k c)
mapAccumRWithKey _ a Tip = (a,Tip)
mapAccumRWithKey f a (Bin sx kx x l r) =
let (a1,r') = mapAccumRWithKey f a r
(a2,x') = f a1 kx x
(a3,l') = mapAccumRWithKey f a2 l
in x' `seq` (a3,Bin sx kx x' l' r')
-- | /O(n*log n)/.
-- @'mapKeysWith' c f s@ is the map obtained by applying @f@ to each key of @s@.
--
-- The size of the result may be smaller if @f@ maps two or more distinct
-- keys to the same new key. In this case the associated values will be
-- combined using @c@.
--
-- > mapKeysWith (++) (\ _ -> 1) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 1 "cdab"
-- > mapKeysWith (++) (\ _ -> 3) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 3 "cdab"
mapKeysWith :: Ord k2 => (a -> a -> a) -> (k1->k2) -> Map k1 a -> Map k2 a
mapKeysWith c f = fromListWith c . foldrWithKey (\k x xs -> (f k, x) : xs) []
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE mapKeysWith #-}
#endif
{--------------------------------------------------------------------
Conversions
--------------------------------------------------------------------}
-- | /O(n)/. Build a map from a set of keys and a function which for each key
-- computes its value.
--
-- > fromSet (\k -> replicate k 'a') (Data.Set.fromList [3, 5]) == fromList [(5,"aaaaa"), (3,"aaa")]
-- > fromSet undefined Data.Set.empty == empty
fromSet :: (k -> a) -> Set.Set k -> Map k a
fromSet _ Set.Tip = Tip
fromSet f (Set.Bin sz x l r) = case f x of v -> v `seq` Bin sz x v (fromSet f l) (fromSet f r)
{--------------------------------------------------------------------
Lists
use [foldlStrict] to reduce demand on the control-stack
--------------------------------------------------------------------}
-- | /O(n*log n)/. Build a map from a list of key\/value pairs. See also 'fromAscList'.
-- If the list contains more than one value for the same key, the last value
-- for the key is retained.
--
-- If the keys of the list are ordered, linear-time implementation is used,
-- with the performance equal to 'fromDistinctAscList'.
--
-- > fromList [] == empty
-- > fromList [(5,"a"), (3,"b"), (5, "c")] == fromList [(5,"c"), (3,"b")]
-- > fromList [(5,"c"), (3,"b"), (5, "a")] == fromList [(5,"a"), (3,"b")]
-- For some reason, when 'singleton' is used in fromList or in
-- create, it is not inlined, so we inline it manually.
fromList :: Ord k => [(k,a)] -> Map k a
fromList [] = Tip
fromList [(kx, x)] = x `seq` Bin 1 kx x Tip Tip
fromList ((kx0, x0) : xs0) | not_ordered kx0 xs0 = x0 `seq` fromList' (Bin 1 kx0 x0 Tip Tip) xs0
| otherwise = x0 `seq` go (1::Int) (Bin 1 kx0 x0 Tip Tip) xs0
where
not_ordered _ [] = False
not_ordered kx ((ky,_) : _) = kx >= ky
{-# INLINE not_ordered #-}
fromList' t0 xs = foldlStrict ins t0 xs
where ins t (k,x) = insert k x t
STRICT_1_OF_3(go)
go _ t [] = t
go _ t [(kx, x)] = x `seq` insertMax kx x t
go s l xs@((kx, x) : xss) | not_ordered kx xss = fromList' l xs
| otherwise = case create s xss of
(r, ys, []) -> x `seq` go (s `shiftL` 1) (link kx x l r) ys
(r, _, ys) -> x `seq` fromList' (link kx x l r) ys
-- The create is returning a triple (tree, xs, ys). Both xs and ys
-- represent not yet processed elements and only one of them can be nonempty.
-- If ys is nonempty, the keys in ys are not ordered with respect to tree
-- and must be inserted using fromList'. Otherwise the keys have been
-- ordered so far.
STRICT_1_OF_2(create)
create _ [] = (Tip, [], [])
create s xs@(xp : xss)
| s == 1 = case xp of (kx, x) | not_ordered kx xss -> x `seq` (Bin 1 kx x Tip Tip, [], xss)
| otherwise -> x `seq` (Bin 1 kx x Tip Tip, xss, [])
| otherwise = case create (s `shiftR` 1) xs of
res@(_, [], _) -> res
(l, [(ky, y)], zs) -> y `seq` (insertMax ky y l, [], zs)
(l, ys@((ky, y):yss), _) | not_ordered ky yss -> (l, [], ys)
| otherwise -> case create (s `shiftR` 1) yss of
(r, zs, ws) -> y `seq` (link ky y l r, zs, ws)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE fromList #-}
#endif
-- | /O(n*log n)/. Build a map from a list of key\/value pairs with a combining function. See also 'fromAscListWith'.
--
-- > fromListWith (++) [(5,"a"), (5,"b"), (3,"b"), (3,"a"), (5,"a")] == fromList [(3, "ab"), (5, "aba")]
-- > fromListWith (++) [] == empty
fromListWith :: Ord k => (a -> a -> a) -> [(k,a)] -> Map k a
fromListWith f xs
= fromListWithKey (\_ x y -> f x y) xs
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE fromListWith #-}
#endif
-- | /O(n*log n)/. Build a map from a list of key\/value pairs with a combining function. See also 'fromAscListWithKey'.
--
-- > let f k a1 a2 = (show k) ++ a1 ++ a2
-- > fromListWithKey f [(5,"a"), (5,"b"), (3,"b"), (3,"a"), (5,"a")] == fromList [(3, "3ab"), (5, "5a5ba")]
-- > fromListWithKey f [] == empty
fromListWithKey :: Ord k => (k -> a -> a -> a) -> [(k,a)] -> Map k a
fromListWithKey f xs
= foldlStrict ins empty xs
where
ins t (k,x) = insertWithKey f k x t
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE fromListWithKey #-}
#endif
{--------------------------------------------------------------------
Building trees from ascending/descending lists can be done in linear time.
Note that if [xs] is ascending that:
fromAscList xs == fromList xs
fromAscListWith f xs == fromListWith f xs
--------------------------------------------------------------------}
-- | /O(n)/. Build a map from an ascending list in linear time.
-- /The precondition (input list is ascending) is not checked./
--
-- > fromAscList [(3,"b"), (5,"a")] == fromList [(3, "b"), (5, "a")]
-- > fromAscList [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "b")]
-- > valid (fromAscList [(3,"b"), (5,"a"), (5,"b")]) == True
-- > valid (fromAscList [(5,"a"), (3,"b"), (5,"b")]) == False
fromAscList :: Eq k => [(k,a)] -> Map k a
fromAscList xs
= fromAscListWithKey (\_ x _ -> x) xs
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE fromAscList #-}
#endif
-- | /O(n)/. Build a map from an ascending list in linear time with a combining function for equal keys.
-- /The precondition (input list is ascending) is not checked./
--
-- > fromAscListWith (++) [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "ba")]
-- > valid (fromAscListWith (++) [(3,"b"), (5,"a"), (5,"b")]) == True
-- > valid (fromAscListWith (++) [(5,"a"), (3,"b"), (5,"b")]) == False
fromAscListWith :: Eq k => (a -> a -> a) -> [(k,a)] -> Map k a
fromAscListWith f xs
= fromAscListWithKey (\_ x y -> f x y) xs
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE fromAscListWith #-}
#endif
-- | /O(n)/. Build a map from an ascending list in linear time with a
-- combining function for equal keys.
-- /The precondition (input list is ascending) is not checked./
--
-- > let f k a1 a2 = (show k) ++ ":" ++ a1 ++ a2
-- > fromAscListWithKey f [(3,"b"), (5,"a"), (5,"b"), (5,"b")] == fromList [(3, "b"), (5, "5:b5:ba")]
-- > valid (fromAscListWithKey f [(3,"b"), (5,"a"), (5,"b"), (5,"b")]) == True
-- > valid (fromAscListWithKey f [(5,"a"), (3,"b"), (5,"b"), (5,"b")]) == False
fromAscListWithKey :: Eq k => (k -> a -> a -> a) -> [(k,a)] -> Map k a
fromAscListWithKey f xs
= fromDistinctAscList (combineEq f xs)
where
-- [combineEq f xs] combines equal elements with function [f] in an ordered list [xs]
combineEq _ xs'
= case xs' of
[] -> []
[x] -> [x]
(x:xx) -> combineEq' x xx
combineEq' z [] = [z]
combineEq' z@(kz,zz) (x@(kx,xx):xs')
| kx==kz = let yy = f kx xx zz in yy `seq` combineEq' (kx,yy) xs'
| otherwise = z:combineEq' x xs'
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE fromAscListWithKey #-}
#endif
-- | /O(n)/. Build a map from an ascending list of distinct elements in linear time.
-- /The precondition is not checked./
--
-- > fromDistinctAscList [(3,"b"), (5,"a")] == fromList [(3, "b"), (5, "a")]
-- > valid (fromDistinctAscList [(3,"b"), (5,"a")]) == True
-- > valid (fromDistinctAscList [(3,"b"), (5,"a"), (5,"b")]) == False
-- For some reason, when 'singleton' is used in fromDistinctAscList or in
-- create, it is not inlined, so we inline it manually.
fromDistinctAscList :: [(k,a)] -> Map k a
fromDistinctAscList [] = Tip
fromDistinctAscList ((kx0, x0) : xs0) = x0 `seq` go (1::Int) (Bin 1 kx0 x0 Tip Tip) xs0
where
STRICT_1_OF_3(go)
go _ t [] = t
go s l ((kx, x) : xs) = case create s xs of
(r, ys) -> x `seq` go (s `shiftL` 1) (link kx x l r) ys
STRICT_1_OF_2(create)
create _ [] = (Tip, [])
create s xs@(x' : xs')
| s == 1 = case x' of (kx, x) -> x `seq` (Bin 1 kx x Tip Tip, xs')
| otherwise = case create (s `shiftR` 1) xs of
res@(_, []) -> res
(l, (ky, y):ys) -> case create (s `shiftR` 1) ys of
(r, zs) -> y `seq` (link ky y l r, zs)
|
jwiegley/ghc-release
|
libraries/containers/Data/Map/Strict.hs
|
gpl-3.0
| 45,252 | 0 | 17 | 11,355 | 8,278 | 4,519 | 3,759 | -1 | -1 |
{-|
Module : Idris.IdrisDoc
Description : Generation of HTML documentation for Idris code
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE OverloadedStrings, PatternGuards #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.IdrisDoc (generateDocs) where
import Idris.AbsSyntax
import Idris.Core.Evaluate (Accessibility(..), ctxtAlist, isDConName, isFnName,
isTConName, lookupDefAcc)
import Idris.Core.TT (Name(..), OutputAnnotation(..), TextFormatting(..),
constIsType, nsroot, sUN, str, toAlist, txt)
import Idris.Docs
import Idris.Docstrings (nullDocstring)
import qualified Idris.Docstrings as Docstrings
import Idris.Options
import Idris.Parser.Ops (opChars)
import IRTS.System (getIdrisDataFileByName)
import Control.Applicative ((<|>))
import Control.Monad (forM_)
import Control.Monad.Trans.Except
import Control.Monad.Trans.State.Strict
import qualified Data.ByteString.Lazy as BS2
import qualified Data.List as L
import qualified Data.Map as M hiding ((!))
import Data.Maybe
import qualified Data.Set as S
import qualified Data.Text as T
import System.Directory
import System.FilePath
import System.IO
import System.IO.Error
import Text.Blaze (contents, toValue)
import qualified Text.Blaze.Html.Renderer.String as R
import Text.Blaze.Html.Renderer.Utf8 (renderHtml)
import Text.Blaze.Html5 (preEscapedToHtml, toHtml, (!))
import qualified Text.Blaze.Html5 as H
import Text.Blaze.Html5.Attributes as A
import Text.Blaze.Renderer.String (renderMarkup)
import Text.PrettyPrint.Annotated.Leijen (displayDecorated, renderCompact)
-- ---------------------------------------------------------------- [ Public ]
-- | Generates HTML documentation for a series of loaded namespaces
-- and their dependencies.
generateDocs :: IState -- ^ IState where all necessary information is
-- extracted from.
-> [Name] -- ^ List of namespaces to generate
-- documentation for.
-> FilePath -- ^ The directory to which documentation will
-- be written.
-> IO (Either String ())
generateDocs ist nss' out =
do let nss = map toNsName nss'
docs <- fetchInfo ist nss
let (c, io) = foldl (checker docs) (0, return ()) nss
io
if c < length nss
then catchIOError (createDocs ist docs out) (err . show)
else err "No namespaces to generate documentation for"
where checker docs st ns | M.member ns docs = st
checker docs (c, io) ns = (c+1, do prev <- io; warnMissing ns)
warnMissing ns =
putStrLn $ "Warning: Ignoring empty or non-existing namespace '" ++
(nsName2Str ns) ++ "'"
-- ----------------------------------------------------------------- [ Types ]
-- | Either an error message or a result
type Failable = Either String
-- | Internal representation of a fully qualified namespace name
type NsName = [T.Text]
-- | All information to be documented about a single namespace member
type NsItem = (Name, Maybe Docs, Accessibility)
-- | Docstrings containing fully elaborated term annotations
type FullDocstring = Docstrings.Docstring Docstrings.DocTerm
-- | All information to be documented about a namespace
data NsInfo = NsInfo { nsDocstring :: Maybe FullDocstring,
nsContents :: [NsItem]
}
-- | A map from namespace names to information about them
type NsDict = M.Map NsName NsInfo
-- --------------------------------------------------------------- [ Utility ]
-- | Make an error message
err :: String -> IO (Failable ())
err s = return $ Left s
-- | IdrisDoc version
version :: String
version = "1.0"
-- | Converts a Name into a [Text] corresponding to the namespace
-- part of a NS Name.
toNsName :: Name -- ^ Name to convert
-> NsName
toNsName (UN n) = [n]
toNsName (NS n ns) = (toNsName n) ++ ns
toNsName _ = []
-- | Retrieves the namespace part of a Name
getNs :: Name -- ^ Name to retrieve namespace for
-> NsName
getNs (NS _ ns) = ns
getNs _ = []
-- | String to replace for the root namespace
rootNsStr :: String
rootNsStr = "[builtins]"
-- | Converts a NsName to string form
nsName2Str :: NsName -- ^ NsName to convert
-> String
nsName2Str n = if null n then rootNsStr else name n
where name [] = []
name [ns] = str ns
name (ns:nss) = (name nss) ++ ('.' : str ns)
-- --------------------------------------------------------- [ Info Fetching ]
-- | Fetch info about namespaces and their contents
fetchInfo :: IState -- ^ IState to fetch info from
-> [NsName] -- ^ List of namespaces to fetch info for
-> IO NsDict -- ^ Mapping from namespace name to
-- info about its contents
fetchInfo ist nss =
do let originNss = S.fromList nss
info <- nsDict ist
let accessible = M.map (filterContents filterInclude) info
nonOrphan = M.map (updateContents removeOrphans) accessible
nonEmpty = M.filter (not . null . nsContents) nonOrphan
reachedNss = traceNss nonEmpty originNss S.empty
return $ M.filterWithKey (\k _ -> S.member k reachedNss) nonEmpty
where
-- TODO: lensify
filterContents p (NsInfo md ns) = NsInfo md (filter p ns)
updateContents f x = x { nsContents = f (nsContents x) }
-- | Removes loose interface methods and data constructors,
-- leaving them documented only under their parent.
removeOrphans :: [NsItem] -- ^ List to remove orphans from
-> [NsItem] -- ^ Orphan-free list
removeOrphans list =
let children = S.fromList $ concatMap (names . (\(_, d, _) -> d)) list
in filter ((flip S.notMember children) . (\(n, _, _) -> n)) list
where names (Just (DataDoc _ fds)) = map (\(FD n _ _ _ _) -> n) fds
names (Just (InterfaceDoc _ _ fds _ _ _ _ _ c)) = map (\(FD n _ _ _ _) -> n) fds ++ map (\(FD n _ _ _ _) -> n) (maybeToList c)
names _ = []
-- | Whether a Name names something which should be documented
filterName :: Name -- ^ Name to check
-> Bool -- ^ Predicate result
filterName (UN _) = True
filterName (NS n _) = filterName n
filterName _ = False
-- | Whether a NsItem should be included in the documentation.
-- It must not be Hidden/Private and filterName must return True for the name.
-- Also it must have Docs -- without Docs, nothing can be done.
filterInclude :: NsItem -- ^ Accessibility to check
-> Bool -- ^ Predicate result
filterInclude (name, Just _, Public) | filterName name = True
filterInclude (name, Just _, Frozen) | filterName name = True
filterInclude _ = False
-- | Finds all namespaces indirectly referred by a set of namespaces.
-- The NsItems of the namespaces are searched for references.
traceNss :: NsDict -- ^ Mappings of namespaces and their contents
-> S.Set NsName -- ^ Set of namespaces to trace
-> S.Set NsName -- ^ Set of namespaces which has been traced
-> S.Set NsName -- ^ Set of namespaces to trace and all traced one
traceNss nsd sT sD =
let nsTracer ns | Just nsis <- M.lookup ns nsd = map referredNss (nsContents nsis)
nsTracer _ = [S.empty] -- Ignore
reached = S.unions $ concatMap nsTracer (S.toList sT)
processed = S.union sT sD
untraced = S.difference reached processed
in if S.null untraced then processed
else traceNss nsd untraced processed
-- | Gets all namespaces directly referred by a NsItem
referredNss :: NsItem -- ^ The name to get all directly
-- referred namespaces for
-> S.Set NsName
referredNss (_, Nothing, _) = S.empty
referredNss (n, Just d, _) =
let fds = getFunDocs d
ts = concatMap types fds
names = concatMap (extractPTermNames) ts
in S.map getNs $ S.fromList names
where getFunDocs (FunDoc f) = [f]
getFunDocs (DataDoc f fs) = f:fs
getFunDocs (InterfaceDoc _ _ fs _ _ _ _ _ _) = fs
getFunDocs (RecordDoc _ _ f fs _) = f:fs
getFunDocs (NamedImplementationDoc _ fd) = [fd]
getFunDocs (ModDoc _ _) = []
types (FD _ _ args t _) = t:(map second args)
second (_, x, _, _) = x
-- | Returns an NsDict of containing all known namespaces and their contents
nsDict :: IState
-> IO NsDict
nsDict ist = flip (foldl addModDoc) modDocs $ foldl adder (return M.empty) nameDefList
where nameDefList = ctxtAlist $ tt_ctxt ist
adder m (n, _) = do map <- m
doc <- loadDocs ist n
let access = getAccess ist n
nInfo = NsInfo Nothing [(n, doc, access)]
return $ M.insertWith addNameInfo (getNs n) nInfo map
addNameInfo (NsInfo m ns) (NsInfo m' ns') = NsInfo (m <|> m') (ns ++ ns')
modDocs = map (\(mn, d) -> (mn, NsInfo (Just d) [])) $ toAlist (idris_moduledocs ist)
addModDoc :: IO NsDict -> (Name, NsInfo) -> IO NsDict
addModDoc dict (mn, d) = fmap (M.insertWith addNameInfo (getNs mn) d) dict
-- | Gets the Accessibility for a Name
getAccess :: IState -- ^ IState containing accessibility information
-> Name -- ^ The Name to retrieve access for
-> Accessibility
getAccess ist n =
let res = lookupDefAcc n False (tt_ctxt ist)
in case res of
[(_, acc)] -> acc
_ -> Private
-- | Predicate saying whether a Name possibly may have docs defined
-- Without this, getDocs from Idris.Docs may fail a pattern match.
mayHaveDocs :: Name -- ^ The Name to test
-> Bool -- ^ The result
mayHaveDocs (UN _) = True
mayHaveDocs (NS n _) = mayHaveDocs n
mayHaveDocs _ = False
-- | Retrieves the Docs for a Name
loadDocs :: IState -- ^ IState to extract infomation from
-> Name -- ^ Name to load Docs for
-> IO (Maybe Docs)
loadDocs ist n
| mayHaveDocs n = do docs <- runExceptT $ evalStateT (getDocs n FullDocs) ist
case docs of Right d -> return (Just d)
Left _ -> return Nothing
| otherwise = return Nothing
-- | Extracts names referred from a type.
-- The covering of all PTerms ensures that we avoid unanticipated cases,
-- though all of them are not needed. The author just did not know which!
-- TODO: Remove unnecessary cases
extractPTermNames :: PTerm -- ^ Where to extract names from
-> [Name] -- ^ Extracted names
extractPTermNames (PRef _ _ n) = [n]
extractPTermNames (PInferRef _ _ n) = [n]
extractPTermNames (PPatvar _ n) = [n]
extractPTermNames (PLam _ n _ p1 p2) = n : concatMap extract [p1, p2]
extractPTermNames (PPi _ n _ p1 p2) = n : concatMap extract [p1, p2]
extractPTermNames (PLet _ _ n _ p1 p2 p3) = n : concatMap extract [p1, p2, p3]
extractPTermNames (PTyped p1 p2) = concatMap extract [p1, p2]
extractPTermNames (PApp _ p pas) = let names = concatMap extractPArg pas
in (extract p) ++ names
extractPTermNames (PAppBind _ p pas) = let names = concatMap extractPArg pas
in (extract p) ++ names
extractPTermNames (PMatchApp _ n) = [n]
extractPTermNames (PCase _ p ps) = let (ps1, ps2) = unzip ps
in concatMap extract (p:(ps1 ++ ps2))
extractPTermNames (PIfThenElse _ c t f) = concatMap extract [c, t, f]
extractPTermNames (PRewrite _ _ a b m) | Just c <- m =
concatMap extract [a, b, c]
extractPTermNames (PRewrite _ _ a b _) = concatMap extract [a, b]
extractPTermNames (PPair _ _ _ p1 p2) = concatMap extract [p1, p2]
extractPTermNames (PDPair _ _ _ a b c) = concatMap extract [a, b, c]
extractPTermNames (PAlternative _ _ l) = concatMap extract l
extractPTermNames (PHidden p) = extract p
extractPTermNames (PGoal _ p1 n p2) = n : concatMap extract [p1, p2]
extractPTermNames (PDoBlock pdos) = concatMap extractPDo pdos
extractPTermNames (PIdiom _ p) = extract p
extractPTermNames (PMetavar _ n) = [n]
extractPTermNames (PProof tacts) = concatMap extractPTactic tacts
extractPTermNames (PTactics tacts) = concatMap extractPTactic tacts
extractPTermNames (PCoerced p) = extract p
extractPTermNames (PDisamb _ p) = extract p
extractPTermNames (PUnifyLog p) = extract p
extractPTermNames (PNoImplicits p) = extract p
extractPTermNames (PRunElab _ p _) = extract p
extractPTermNames (PConstSugar _ tm) = extract tm
extractPTermNames _ = []
-- | Shorter name for extractPTermNames
extract :: PTerm -- ^ Where to extract names from
-> [Name] -- ^ Extracted names
extract = extractPTermNames
-- | Helper function for extractPTermNames
extractPArg :: PArg -> [Name]
extractPArg (PImp {pname=n, getTm=p}) = n : extract p
extractPArg (PExp {getTm=p}) = extract p
extractPArg (PConstraint {getTm=p}) = extract p
extractPArg (PTacImplicit {pname=n, getScript=p1, getTm=p2})
= n : (concatMap extract [p1, p2])
-- | Helper function for extractPTermNames
extractPDo :: PDo -> [Name]
extractPDo (DoExp _ p) = extract p
extractPDo (DoBind _ n _ p) = n : extract p
extractPDo (DoBindP _ p1 p2 ps) = let (ps1, ps2) = unzip ps
ps' = ps1 ++ ps2
in concatMap extract (p1 : p2 : ps')
extractPDo (DoLet _ _ n _ p1 p2) = n : concatMap extract [p1, p2]
extractPDo (DoLetP _ p1 p2 ps) = let (ps1, ps2) = unzip ps
ps' = ps1 ++ ps2
in concatMap extract (p1 : p2 : ps')
extractPDo (DoRewrite _ p) = extract p
-- | Helper function for extractPTermNames
extractPTactic :: PTactic -> [Name]
extractPTactic (Intro ns) = ns
extractPTactic (Focus n) = [n]
extractPTactic (Refine n _) = [n]
extractPTactic (Rewrite p) = extract p
extractPTactic (Equiv p) = extract p
extractPTactic (MatchRefine n) = [n]
extractPTactic (LetTac n p) = n : extract p
extractPTactic (LetTacTy n p1 p2) = n : concatMap extract [p1, p2]
extractPTactic (Exact p) = extract p
extractPTactic (ProofSearch _ _ _ m _ ns) | Just n <- m = n : ns
extractPTactic (ProofSearch _ _ _ _ _ ns) = ns
extractPTactic (Try t1 t2) = concatMap extractPTactic [t1, t2]
extractPTactic (TSeq t1 t2) = concatMap extractPTactic [t1, t2]
extractPTactic (ApplyTactic p) = extract p
extractPTactic (ByReflection p) = extract p
extractPTactic (Reflect p) = extract p
extractPTactic (Fill p) = extract p
extractPTactic (GoalType _ t) = extractPTactic t
extractPTactic (TCheck p) = extract p
extractPTactic (TEval p) = extract p
extractPTactic _ = []
-- ------------------------------------------------------- [ HTML Generation ]
-- | Generates the actual HTML output based on info from a NsDict
-- A merge of the new docs and any existing docs located in the output dir
-- is attempted.
-- TODO: Ensure the merge always succeeds.
-- Currently the content of 'docs/<builtins>.html' may change between
-- runs, thus not always containing all items referred from other
-- namespace .html files.
createDocs :: IState -- ^ Needed to determine the types of names
-> NsDict -- ^ All info from which to generate docs
-> FilePath -- ^ The base directory to which
-- documentation will be written.
-> IO (Failable ())
createDocs ist nsd out =
do new <- not `fmap` (doesFileExist $ out </> "IdrisDoc")
existing_nss <- existingNamespaces out
let nss = S.union (M.keysSet nsd) existing_nss
dExists <- doesDirectoryExist out
if new && dExists then err $ "Output directory (" ++ out ++ ") is" ++
" already in use for other than IdrisDoc."
else do
createDirectoryIfMissing True out
foldl docGen (return ()) (M.toList nsd)
createIndex nss out
-- Create an empty IdrisDoc file to signal 'out' is used for IdrisDoc
if new -- But only if it not already existed...
then withFile (out </> "IdrisDoc") WriteMode ((flip hPutStr) "")
else return ()
copyDependencies out
return $ Right ()
where docGen io (n, c) = do io; createNsDoc ist n c out
-- | (Over)writes the 'index.html' file in the given directory with
-- an (updated) index of namespaces in the documentation
createIndex :: S.Set NsName -- ^ Set of namespace names to
-- include in the index
-> FilePath -- ^ The base directory to which
-- documentation will be written.
-> IO ()
createIndex nss out =
do (path, h) <- openTempFileWithDefaultPermissions out "index.html"
BS2.hPut h $ renderHtml $ wrapper Nothing $ do
H.h1 "Namespaces"
H.ul ! class_ "names" $ do
let path ns = "docs" ++ "/" ++ genRelNsPath ns "html"
item ns = do let n = toHtml $ nsName2Str ns
link = toValue $ path ns
H.li $ H.a ! href link ! class_ "code" $ n
sort = L.sortBy (\n1 n2 -> reverse n1 `compare` reverse n2)
forM_ (sort $ S.toList nss) item
hClose h
renameFile path (out </> "index.html")
-- | Generates a HTML file for a namespace and its contents.
-- The location for e.g. Prelude.Algebra is <base>/Prelude/Algebra.html
createNsDoc :: IState -- ^ Needed to determine the types of names
-> NsName -- ^ The name of the namespace to
-- create documentation for
-> NsInfo -- ^ The contents of the namespace
-> FilePath -- ^ The base directory to which
-- documentation will be written.
-> IO ()
createNsDoc ist ns content out =
do let tpath = out </> "docs" </> (genRelNsPath ns "html")
dir = takeDirectory tpath
file = takeFileName tpath
haveDocs (_, md, _) = md
-- We cannot do anything without a Doc
content' = reverse $ mapMaybe haveDocs $ nsContents content
createDirectoryIfMissing True dir
(path, h) <- openTempFileWithDefaultPermissions dir file
BS2.hPut h $ renderHtml $ wrapper (Just ns) $ do
H.h1 $ toHtml (nsName2Str ns)
case nsDocstring content of
Nothing -> mempty
Just docstring -> Docstrings.renderHtml docstring
H.dl ! class_ "decls" $ forM_ content' (createOtherDoc ist)
hClose h
renameFile path tpath
-- | Generates a relative filepath for a namespace, appending an extension
genRelNsPath :: NsName -- ^ Namespace to generate a path for
-> String -- ^ Extension suffix
-> FilePath
genRelNsPath ns suffix = nsName2Str ns <.> suffix
-- | Generates a HTML type signature with proper tags
-- TODO: Turn docstrings into title attributes more robustly
genTypeHeader :: IState -- ^ Needed to determine the types of names
-> FunDoc -- ^ Type to generate type declaration for
-> H.Html -- ^ Resulting HTML
genTypeHeader ist (FD n _ args ftype _) = do
H.span ! class_ (toValue $ "name " ++ getType n)
! title (toValue $ show n)
$ toHtml $ name $ nsroot n
H.span ! class_ "word" $ do nbsp; ":"; nbsp
H.span ! class_ "signature" $ preEscapedToHtml htmlSignature
where
htmlSignature = displayDecorated decorator $ renderCompact signature
signature = pprintPTerm defaultPPOption [] names (idris_infixes ist) ftype
names = [ n | (n@(UN n'), _, _, _) <- args,
not (T.isPrefixOf (txt "__") n') ]
decorator (AnnConst c) str | constIsType c = htmlSpan str "type" str
| otherwise = htmlSpan str "data" str
decorator (AnnData _ _) str = htmlSpan str "data" str
decorator (AnnType _ _) str = htmlSpan str "type" str
decorator AnnKeyword str = htmlSpan "" "keyword" str
decorator (AnnBoundName n i) str | Just t <- M.lookup n docs =
let cs = (if i then "implicit " else "") ++ "documented boundvar"
in htmlSpan t cs str
decorator (AnnBoundName _ i) str =
let cs = (if i then "implicit " else "") ++ "boundvar"
in htmlSpan "" cs str
decorator (AnnName n _ _ _) str
| filterName n = htmlLink (show n) (getType n) (link n) str
| otherwise = htmlSpan "" (getType n) str
decorator (AnnTextFmt BoldText) str = "<b>" ++ str ++ "</b>"
decorator (AnnTextFmt UnderlineText) str = "<u>" ++ str ++ "</u>"
decorator (AnnTextFmt ItalicText) str = "<i>" ++ str ++ "</i>"
decorator _ str = str
htmlSpan :: String -> String -> String -> String
htmlSpan t cs str = do
R.renderHtml $ H.span ! class_ (toValue cs)
! title (toValue t)
$ toHtml str
htmlLink :: String -> String -> String -> String -> String
htmlLink t cs a str = do
R.renderHtml $ H.a ! class_ (toValue cs)
! title (toValue t) ! href (toValue a)
$ toHtml str
docs = M.fromList $ mapMaybe docExtractor args
docExtractor (_, _, _, Nothing) = Nothing
docExtractor (n, _, _, Just d) = Just (n, doc2Str d)
-- TODO: Remove <p> tags more robustly
doc2Str d = let dirty = renderMarkup $ contents $ Docstrings.renderHtml d
in take (length dirty - 8) $ drop 3 dirty
name (NS n ns) = show (NS (sUN $ name n) ns)
name n = let n' = show n
in if (head n') `elem` opChars
then '(':(n' ++ ")")
else n'
link n = let path = genRelNsPath (getNs n) "html"
in path ++ "#" ++ (show n)
getType :: Name -> String
getType n = let ctxt = tt_ctxt ist
in case () of
_ | isDConName n ctxt -> "constructor"
_ | isFnName n ctxt -> "function"
_ | isTConName n ctxt -> "type"
_ | otherwise -> ""
-- | Generates HTML documentation for a function.
createFunDoc :: IState -- ^ Needed to determine the types of names
-> FunDoc -- ^ Function to generate block for
-> H.Html -- ^ Resulting HTML
createFunDoc ist fd@(FD name docstring args ftype fixity) = do
H.dt ! (A.id $ toValue $ show name) $ genTypeHeader ist fd
H.dd $ do
(if nullDocstring docstring then mempty else Docstrings.renderHtml docstring)
let args' = filter (\(_, _, _, d) -> isJust d) args
if (not $ null args') || (isJust fixity)
then H.dl $ do
if (isJust fixity) then do
H.dt ! class_ "fixity" $ "Fixity"
let f = fromJust fixity
H.dd ! class_ "fixity" ! title (toValue $ show f) $ genFix f
else mempty
forM_ args' genArg
else mempty
where genFix (Infixl {prec=p}) =
toHtml $ "Left associative, precedence " ++ show p
genFix (Infixr {prec=p}) =
toHtml $ "Left associative, precedence " ++ show p
genFix (InfixN {prec=p}) =
toHtml $ "Non-associative, precedence " ++ show p
genFix (PrefixN {prec=p}) =
toHtml $ "Prefix, precedence " ++ show p
genArg (_, _, _, Nothing) = mempty
genArg (name, _, _, Just docstring) = do
H.dt $ toHtml $ show name
H.dd $ Docstrings.renderHtml docstring
-- | Generates HTML documentation for any Docs type
-- TODO: Generate actual signatures for interfaces
createOtherDoc :: IState -- ^ Needed to determine the types of names
-> Docs -- ^ Namespace item to generate HTML block for
-> H.Html -- ^ Resulting HTML
createOtherDoc ist (FunDoc fd) = createFunDoc ist fd
createOtherDoc ist (InterfaceDoc n docstring fds _ _ _ _ _ c) = do
H.dt ! (A.id $ toValue $ show n) $ do
H.span ! class_ "word" $ do "interface"; nbsp
H.span ! class_ "name type"
! title (toValue $ show n)
$ toHtml $ name $ nsroot n
H.span ! class_ "signature" $ nbsp
H.dd $ do
(if nullDocstring docstring then mempty else Docstrings.renderHtml docstring)
H.dl ! class_ "decls" $ (forM_ (maybeToList c ++ fds) (createFunDoc ist))
where name (NS n ns) = show (NS (sUN $ name n) ns)
name n = let n' = show n
in if (head n') `elem` opChars
then '(':(n' ++ ")")
else n'
createOtherDoc ist (RecordDoc n doc ctor projs params) = do
H.dt ! (A.id $ toValue $ show n) $ do
H.span ! class_ "word" $ do "record"; nbsp
H.span ! class_ "name type"
! title (toValue $ show n)
$ toHtml $ name $ nsroot n
H.span ! class_ "type" $ do nbsp ; prettyParameters
H.dd $ do
(if nullDocstring doc then mempty else Docstrings.renderHtml doc)
if not $ null params
then H.dl $ forM_ params genParam
else mempty
H.dl ! class_ "decls" $ createFunDoc ist ctor
H.dl ! class_ "decls" $ forM_ projs (createFunDoc ist)
where name (NS n ns) = show (NS (sUN $ name n) ns)
name n = let n' = show n
in if (head n') `elem` opChars
then '(':(n' ++ ")")
else n'
genParam (name, pt, docstring) = do
H.dt $ toHtml $ show (nsroot name)
H.dd $ maybe nbsp Docstrings.renderHtml docstring
prettyParameters = toHtml $ unwords [show $ nsroot n | (n,_,_) <- params]
createOtherDoc ist (DataDoc fd@(FD n docstring args _ _) fds) = do
H.dt ! (A.id $ toValue $ show n) $ do
H.span ! class_ "word" $ do "data"; nbsp
genTypeHeader ist fd
H.dd $ do
(if nullDocstring docstring then mempty else Docstrings.renderHtml docstring)
let args' = filter (\(_, _, _, d) -> isJust d) args
if not $ null args'
then H.dl $ forM_ args' genArg
else mempty
H.dl ! class_ "decls" $ forM_ fds (createFunDoc ist)
where genArg (_, _, _, Nothing) = mempty
genArg (name, _, _, Just docstring) = do
H.dt $ toHtml $ show name
H.dd $ Docstrings.renderHtml docstring
createOtherDoc ist (NamedImplementationDoc _ fd) = createFunDoc ist fd
createOtherDoc ist (ModDoc _ docstring) = do
Docstrings.renderHtml docstring
-- | Generates everything but the actual content of the page
wrapper :: Maybe NsName -- ^ Namespace name, unless it is the index
-> H.Html -- ^ Inner HTML
-> H.Html
wrapper ns inner =
let (index, str) = extract ns
base = if index then "" else "../"
styles = base ++ "styles.css" :: String
indexPage = base ++ "index.html" :: String
in H.docTypeHtml $ do
H.head $ do
H.meta ! charset "utf-8"
H.meta ! name "viewport" ! content "width=device-width, initial-scale=1, shrink-to-fit=no"
H.title $ do
"IdrisDoc"
if index then " Index" else do
": "
toHtml str
H.link ! type_ "text/css" ! rel "stylesheet"
! href (toValue styles)
H.body ! class_ (if index then "index" else "namespace") $ do
H.div ! class_ "wrapper" $ do
H.header $ do
H.strong "IdrisDoc"
if index then mempty else do
": "
toHtml str
H.nav $ H.a ! href (toValue indexPage) $ "Index"
H.div ! class_ "container" $ inner
H.footer $ do
"Produced by IdrisDoc version "
toHtml version
where extract (Just ns) = (False, nsName2Str ns)
extract _ = (True, "")
-- | Non-break space character
nbsp :: H.Html
nbsp = preEscapedToHtml (" " :: String)
-- | Returns a list of namespaces already documented in a IdrisDoc directory
existingNamespaces :: FilePath -- ^ The base directory containing the
-- 'docs' directory with existing
-- namespace pages
-> IO (S.Set NsName)
existingNamespaces out = do
let docs = out ++ "/" ++ "docs"
str2Ns s | s == rootNsStr = []
str2Ns s = reverse $ T.splitOn (T.singleton '.') (txt s)
toNs fp = do isFile <- doesFileExist $ docs </> fp
let isHtml = ".html" == takeExtension fp
name = dropExtension fp
ns = str2Ns name
return $ if isFile && isHtml then Just ns else Nothing
docsExists <- doesDirectoryExist docs
if not docsExists
then return S.empty
else do contents <- getDirectoryContents docs
namespaces <- catMaybes `fmap` (sequence $ map toNs contents)
return $ S.fromList namespaces
-- | Copies IdrisDoc dependencies such as stylesheets to a directory
copyDependencies :: FilePath -- ^ The base directory to which
-- dependencies should be written
-> IO ()
copyDependencies dir =
do styles <- getIdrisDataFileByName $ "idrisdoc" </> "styles.css"
copyFile styles (dir </> "styles.css")
|
kojiromike/Idris-dev
|
src/Idris/IdrisDoc.hs
|
bsd-3-clause
| 30,283 | 0 | 23 | 9,608 | 8,710 | 4,432 | 4,278 | 521 | 19 |
-----------------------------------------------------------------------------
-- |
-- Module : GHC.GHCi.Helpers
-- Copyright : (c) The GHC Developers
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- Various helpers used by the GHCi shell.
--
-----------------------------------------------------------------------------
module GHC.GHCi.Helpers
( disableBuffering, flushAll
, evalWrapper
) where
import System.IO
import System.Environment
disableBuffering :: IO ()
disableBuffering = do
hSetBuffering stdin NoBuffering
hSetBuffering stdout NoBuffering
hSetBuffering stderr NoBuffering
flushAll :: IO ()
flushAll = do
hFlush stdout
hFlush stderr
evalWrapper :: String -> [String] -> IO a -> IO a
evalWrapper progName args m =
withProgName progName (withArgs args m)
|
sdiehl/ghc
|
libraries/base/GHC/GHCi/Helpers.hs
|
bsd-3-clause
| 914 | 0 | 8 | 149 | 158 | 85 | 73 | 17 | 1 |
{-# LANGUAGE RelaxedPolyRec, FlexibleInstances, TypeSynonymInstances, FlexibleContexts #-}
-- RelaxedPolyRec needed for inlinesBetween on GHC < 7
{-
Copyright (C) 2014 Alexander Sulfrian <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Readers.TWiki
Copyright : Copyright (C) 2014 Alexander Sulfrian
License : GNU GPL, version 2 or above
Maintainer : Alexander Sulfrian <[email protected]>
Stability : alpha
Portability : portable
Conversion of twiki text to 'Pandoc' document.
-}
module Text.Pandoc.Readers.TWiki ( readTWiki
, readTWikiWithWarnings
) where
import Text.Pandoc.Definition
import qualified Text.Pandoc.Builder as B
import Text.Pandoc.Options
import Text.Pandoc.Parsing hiding (enclosed, macro, nested)
import Text.Pandoc.Readers.HTML (htmlTag, isCommentTag)
import Control.Monad
import Text.Printf (printf)
import Debug.Trace (trace)
import Text.Pandoc.XML (fromEntities)
import Data.Maybe (fromMaybe)
import Text.HTML.TagSoup
import Data.Char (isAlphaNum)
import qualified Data.Foldable as F
import Text.Pandoc.Error
-- | Read twiki from an input string and return a Pandoc document.
readTWiki :: ReaderOptions -- ^ Reader options
-> String -- ^ String to parse (assuming @'\n'@ line endings)
-> Either PandocError Pandoc
readTWiki opts s =
(readWith parseTWiki) def{ stateOptions = opts } (s ++ "\n\n")
readTWikiWithWarnings :: ReaderOptions -- ^ Reader options
-> String -- ^ String to parse (assuming @'\n'@ line endings)
-> Either PandocError (Pandoc, [String])
readTWikiWithWarnings opts s =
(readWith parseTWikiWithWarnings) def{ stateOptions = opts } (s ++ "\n\n")
where parseTWikiWithWarnings = do
doc <- parseTWiki
warnings <- stateWarnings <$> getState
return (doc, warnings)
type TWParser = Parser [Char] ParserState
--
-- utility functions
--
tryMsg :: String -> TWParser a -> TWParser a
tryMsg msg p = try p <?> msg
skip :: TWParser a -> TWParser ()
skip parser = parser >> return ()
nested :: TWParser a -> TWParser a
nested p = do
nestlevel <- stateMaxNestingLevel <$> getState
guard $ nestlevel > 0
updateState $ \st -> st{ stateMaxNestingLevel = stateMaxNestingLevel st - 1 }
res <- p
updateState $ \st -> st{ stateMaxNestingLevel = nestlevel }
return res
htmlElement :: String -> TWParser (Attr, String)
htmlElement tag = tryMsg tag $ do
(TagOpen _ attr, _) <- htmlTag (~== TagOpen tag [])
content <- manyTill anyChar (endtag <|> endofinput)
return (htmlAttrToPandoc attr, trim content)
where
endtag = skip $ htmlTag (~== TagClose tag)
endofinput = lookAhead $ try $ skipMany blankline >> skipSpaces >> eof
trim = dropWhile (=='\n') . reverse . dropWhile (=='\n') . reverse
htmlAttrToPandoc :: [Attribute String] -> Attr
htmlAttrToPandoc attrs = (ident, classes, keyvals)
where
ident = fromMaybe "" $ lookup "id" attrs
classes = maybe [] words $ lookup "class" attrs
keyvals = [(k,v) | (k,v) <- attrs, k /= "id" && k /= "class"]
parseHtmlContentWithAttrs :: String -> TWParser a -> TWParser (Attr, [a])
parseHtmlContentWithAttrs tag parser = do
(attr, content) <- htmlElement tag
parsedContent <- try $ parseContent content
return (attr, parsedContent)
where
parseContent = parseFromString $ nested $ manyTill parser endOfContent
endOfContent = try $ skipMany blankline >> skipSpaces >> eof
parseHtmlContent :: String -> TWParser a -> TWParser [a]
parseHtmlContent tag p = parseHtmlContentWithAttrs tag p >>= return . snd
--
-- main parser
--
parseTWiki :: TWParser Pandoc
parseTWiki = do
bs <- mconcat <$> many block
spaces
eof
return $ B.doc bs
--
-- block parsers
--
block :: TWParser B.Blocks
block = do
tr <- getOption readerTrace
pos <- getPosition
res <- mempty <$ skipMany1 blankline
<|> blockElements
<|> para
skipMany blankline
when tr $
trace (printf "line %d: %s" (sourceLine pos)
(take 60 $ show $ B.toList res)) (return ())
return res
blockElements :: TWParser B.Blocks
blockElements = choice [ separator
, header
, verbatim
, literal
, list ""
, table
, blockQuote
, noautolink
]
separator :: TWParser B.Blocks
separator = tryMsg "separator" $ string "---" >> newline >> return B.horizontalRule
header :: TWParser B.Blocks
header = tryMsg "header" $ do
string "---"
level <- many1 (char '+') >>= return . length
guard $ level <= 6
classes <- option [] $ string "!!" >> return ["unnumbered"]
skipSpaces
content <- B.trimInlines . mconcat <$> manyTill inline newline
attr <- registerHeader ("", classes, []) content
return $ B.headerWith attr level $ content
verbatim :: TWParser B.Blocks
verbatim = (htmlElement "verbatim" <|> htmlElement "pre")
>>= return . (uncurry B.codeBlockWith)
literal :: TWParser B.Blocks
literal = htmlElement "literal" >>= return . rawBlock
where
format (_, _, kvs) = fromMaybe "html" $ lookup "format" kvs
rawBlock (attrs, content) = B.rawBlock (format attrs) content
list :: String -> TWParser B.Blocks
list prefix = choice [ bulletList prefix
, orderedList prefix
, definitionList prefix]
definitionList :: String -> TWParser B.Blocks
definitionList prefix = tryMsg "definitionList" $ do
indent <- lookAhead $ string prefix *> (many1 $ string " ") <* string "$ "
elements <- many $ parseDefinitionListItem (prefix ++ concat indent)
return $ B.definitionList elements
where
parseDefinitionListItem :: String -> TWParser (B.Inlines, [B.Blocks])
parseDefinitionListItem indent = do
string (indent ++ "$ ") >> skipSpaces
term <- many1Till inline $ string ": "
line <- listItemLine indent $ string "$ "
return $ (mconcat term, [line])
bulletList :: String -> TWParser B.Blocks
bulletList prefix = tryMsg "bulletList" $
parseList prefix (char '*') (char ' ')
orderedList :: String -> TWParser B.Blocks
orderedList prefix = tryMsg "orderedList" $
parseList prefix (oneOf "1iIaA") (string ". ")
parseList :: Show a => String -> TWParser Char -> TWParser a -> TWParser B.Blocks
parseList prefix marker delim = do
(indent, style) <- lookAhead $ string prefix *> listStyle <* delim
blocks <- many $ parseListItem (prefix ++ indent) (char style <* delim)
return $ case style of
'1' -> B.orderedListWith (1, DefaultStyle, DefaultDelim) blocks
'i' -> B.orderedListWith (1, LowerRoman, DefaultDelim) blocks
'I' -> B.orderedListWith (1, UpperRoman, DefaultDelim) blocks
'a' -> B.orderedListWith (1, LowerAlpha, DefaultDelim) blocks
'A' -> B.orderedListWith (1, UpperAlpha, DefaultDelim) blocks
_ -> B.bulletList blocks
where
listStyle = do
indent <- many1 $ string " "
style <- marker
return (concat indent, style)
parseListItem :: Show a => String -> TWParser a -> TWParser B.Blocks
parseListItem prefix marker = string prefix >> marker >> listItemLine prefix marker
listItemLine :: Show a => String -> TWParser a -> TWParser B.Blocks
listItemLine prefix marker = lineContent >>= parseContent >>= return . mconcat
where
lineContent = do
content <- anyLine
continuation <- optionMaybe listContinuation
return $ filterSpaces content ++ "\n" ++ (maybe "" (" " ++) continuation)
filterSpaces = reverse . dropWhile (== ' ') . reverse
listContinuation = notFollowedBy (string prefix >> marker) >>
string " " >> lineContent
parseContent = parseFromString $ many1 $ nestedList <|> parseInline
parseInline = many1Till inline (lastNewline <|> newlineBeforeNestedList) >>=
return . B.plain . mconcat
nestedList = list prefix
lastNewline = try $ char '\n' <* eof
newlineBeforeNestedList = try $ char '\n' <* lookAhead nestedList
table :: TWParser B.Blocks
table = try $ do
tableHead <- optionMaybe $ many1Till tableParseHeader newline >>= return . unzip
rows <- many1 tableParseRow
return $ buildTable mempty rows $ fromMaybe (align rows, columns rows) tableHead
where
buildTable caption rows (aligns, heads)
= B.table caption aligns heads rows
align rows = replicate (columCount rows) (AlignDefault, 0)
columns rows = replicate (columCount rows) mempty
columCount rows = length $ head rows
tableParseHeader :: TWParser ((Alignment, Double), B.Blocks)
tableParseHeader = try $ do
char '|'
leftSpaces <- many spaceChar >>= return . length
char '*'
content <- tableColumnContent (char '*' >> skipSpaces >> char '|')
char '*'
rightSpaces <- many spaceChar >>= return . length
optional tableEndOfRow
return (tableAlign leftSpaces rightSpaces, content)
where
tableAlign left right
| left >= 2 && left == right = (AlignCenter, 0)
| left > right = (AlignRight, 0)
| otherwise = (AlignLeft, 0)
tableParseRow :: TWParser [B.Blocks]
tableParseRow = many1Till tableParseColumn newline
tableParseColumn :: TWParser B.Blocks
tableParseColumn = char '|' *> skipSpaces *>
tableColumnContent (skipSpaces >> char '|')
<* skipSpaces <* optional tableEndOfRow
tableEndOfRow :: TWParser Char
tableEndOfRow = lookAhead (try $ char '|' >> char '\n') >> char '|'
tableColumnContent :: Show a => TWParser a -> TWParser B.Blocks
tableColumnContent end = manyTill content (lookAhead $ try end) >>= return . B.plain . mconcat
where
content = continuation <|> inline
continuation = try $ char '\\' >> newline >> return mempty
blockQuote :: TWParser B.Blocks
blockQuote = parseHtmlContent "blockquote" block >>= return . B.blockQuote . mconcat
noautolink :: TWParser B.Blocks
noautolink = do
(_, content) <- htmlElement "noautolink"
st <- getState
setState $ st{ stateAllowLinks = False }
blocks <- try $ parseContent content
setState $ st{ stateAllowLinks = True }
return $ mconcat blocks
where
parseContent = parseFromString $ many $ block
para :: TWParser B.Blocks
para = many1Till inline endOfParaElement >>= return . result . mconcat
where
endOfParaElement = lookAhead $ endOfInput <|> endOfPara <|> newBlockElement
endOfInput = try $ skipMany blankline >> skipSpaces >> eof
endOfPara = try $ blankline >> skipMany1 blankline
newBlockElement = try $ blankline >> skip blockElements
result content = if F.all (==Space) content
then mempty
else B.para $ B.trimInlines content
--
-- inline parsers
--
inline :: TWParser B.Inlines
inline = choice [ whitespace
, br
, macro
, strong
, strongHtml
, strongAndEmph
, emph
, emphHtml
, boldCode
, smart
, link
, htmlComment
, code
, codeHtml
, nop
, autoLink
, str
, symbol
] <?> "inline"
whitespace :: TWParser B.Inlines
whitespace = (lb <|> regsp) >>= return
where lb = try $ skipMany spaceChar >> linebreak >> return B.space
regsp = try $ skipMany1 spaceChar >> return B.space
br :: TWParser B.Inlines
br = try $ string "%BR%" >> return B.linebreak
linebreak :: TWParser B.Inlines
linebreak = newline >> notFollowedBy newline >> (lastNewline <|> innerNewline)
where lastNewline = eof >> return mempty
innerNewline = return B.space
between :: (Show b, Monoid c) => TWParser a -> TWParser b -> (TWParser b -> TWParser c) -> TWParser c
between start end p =
mconcat <$> try (start >> notFollowedBy whitespace >> many1Till (p end) end)
enclosed :: (Show a, Monoid b) => TWParser a -> (TWParser a -> TWParser b) -> TWParser b
enclosed sep p = between sep (try $ sep <* endMarker) p
where
endMarker = lookAhead $ skip endSpace <|> skip (oneOf ".,!?:)|") <|> eof
endSpace = (spaceChar <|> newline) >> return B.space
macro :: TWParser B.Inlines
macro = macroWithParameters <|> withoutParameters
where
withoutParameters = enclosed (char '%') (\_ -> macroName) >>= return . emptySpan
emptySpan name = buildSpan name [] mempty
macroWithParameters :: TWParser B.Inlines
macroWithParameters = try $ do
char '%'
name <- macroName
(content, kvs) <- attributes
char '%'
return $ buildSpan name kvs $ B.str content
buildSpan :: String -> [(String, String)] -> B.Inlines -> B.Inlines
buildSpan className kvs = B.spanWith attrs
where
attrs = ("", ["twiki-macro", className] ++ additionalClasses, kvsWithoutClasses)
additionalClasses = maybe [] words $ lookup "class" kvs
kvsWithoutClasses = [(k,v) | (k,v) <- kvs, k /= "class"]
macroName :: TWParser String
macroName = do
first <- letter
rest <- many $ alphaNum <|> char '_'
return (first:rest)
attributes :: TWParser (String, [(String, String)])
attributes = char '{' *> spnl *> many (attribute <* spnl) <* char '}' >>=
return . foldr (either mkContent mkKvs) ([], [])
where
spnl = skipMany (spaceChar <|> newline)
mkContent c ([], kvs) = (c, kvs)
mkContent c (rest, kvs) = (c ++ " " ++ rest, kvs)
mkKvs kv (cont, rest) = (cont, (kv : rest))
attribute :: TWParser (Either String (String, String))
attribute = withKey <|> withoutKey
where
withKey = try $ do
key <- macroName
char '='
parseValue False >>= return . (curry Right key)
withoutKey = try $ parseValue True >>= return . Left
parseValue allowSpaces = (withQuotes <|> withoutQuotes allowSpaces) >>= return . fromEntities
withQuotes = between (char '"') (char '"') (\_ -> count 1 $ noneOf ['"'])
withoutQuotes allowSpaces
| allowSpaces == True = many1 $ noneOf "}"
| otherwise = many1 $ noneOf " }"
nestedInlines :: Show a => TWParser a -> TWParser B.Inlines
nestedInlines end = innerSpace <|> nestedInline
where
innerSpace = try $ whitespace <* (notFollowedBy end)
nestedInline = notFollowedBy whitespace >> nested inline
strong :: TWParser B.Inlines
strong = try $ enclosed (char '*') nestedInlines >>= return . B.strong
strongHtml :: TWParser B.Inlines
strongHtml = (parseHtmlContent "strong" inline <|> parseHtmlContent "b" inline)
>>= return . B.strong . mconcat
strongAndEmph :: TWParser B.Inlines
strongAndEmph = try $ enclosed (string "__") nestedInlines >>= return . B.emph . B.strong
emph :: TWParser B.Inlines
emph = try $ enclosed (char '_') nestedInlines >>= return . B.emph
emphHtml :: TWParser B.Inlines
emphHtml = (parseHtmlContent "em" inline <|> parseHtmlContent "i" inline)
>>= return . B.emph . mconcat
nestedString :: Show a => TWParser a -> TWParser String
nestedString end = innerSpace <|> (count 1 nonspaceChar)
where
innerSpace = try $ many1 spaceChar <* notFollowedBy end
boldCode :: TWParser B.Inlines
boldCode = try $ enclosed (string "==") nestedString >>= return . B.strong . B.code . fromEntities
htmlComment :: TWParser B.Inlines
htmlComment = htmlTag isCommentTag >> return mempty
code :: TWParser B.Inlines
code = try $ enclosed (char '=') nestedString >>= return . B.code . fromEntities
codeHtml :: TWParser B.Inlines
codeHtml = do
(attrs, content) <- parseHtmlContentWithAttrs "code" anyChar
return $ B.codeWith attrs $ fromEntities content
autoLink :: TWParser B.Inlines
autoLink = try $ do
state <- getState
guard $ stateAllowLinks state
(text, url) <- parseLink
guard $ checkLink (head $ reverse url)
return $ makeLink (text, url)
where
parseLink = notFollowedBy nop >> (uri <|> emailAddress)
makeLink (text, url) = B.link url "" $ B.str text
checkLink c
| c == '/' = True
| otherwise = isAlphaNum c
str :: TWParser B.Inlines
str = (many1 alphaNum <|> count 1 characterReference) >>= return . B.str
nop :: TWParser B.Inlines
nop = try $ (skip exclamation <|> skip nopTag) >> followContent
where
exclamation = char '!'
nopTag = stringAnyCase "<nop>"
followContent = many1 nonspaceChar >>= return . B.str . fromEntities
symbol :: TWParser B.Inlines
symbol = count 1 nonspaceChar >>= return . B.str
smart :: TWParser B.Inlines
smart = do
getOption readerSmart >>= guard
doubleQuoted <|> singleQuoted <|>
choice [ apostrophe
, dash
, ellipses
]
singleQuoted :: TWParser B.Inlines
singleQuoted = try $ do
singleQuoteStart
withQuoteContext InSingleQuote $
many1Till inline singleQuoteEnd >>=
(return . B.singleQuoted . B.trimInlines . mconcat)
doubleQuoted :: TWParser B.Inlines
doubleQuoted = try $ do
doubleQuoteStart
contents <- mconcat <$> many (try $ notFollowedBy doubleQuoteEnd >> inline)
(withQuoteContext InDoubleQuote $ doubleQuoteEnd >>
return (B.doubleQuoted $ B.trimInlines contents))
<|> (return $ (B.str "\8220") B.<> contents)
link :: TWParser B.Inlines
link = try $ do
st <- getState
guard $ stateAllowLinks st
setState $ st{ stateAllowLinks = False }
(url, title, content) <- linkText
setState $ st{ stateAllowLinks = True }
return $ B.link url title content
linkText :: TWParser (String, String, B.Inlines)
linkText = do
string "[["
url <- many1Till anyChar (char ']')
content <- option [B.str url] linkContent
char ']'
return (url, "", mconcat content)
where
linkContent = (char '[') >> many1Till anyChar (char ']') >>= parseLinkContent
parseLinkContent = parseFromString $ many1 inline
|
janschulz/pandoc
|
src/Text/Pandoc/Readers/TWiki.hs
|
gpl-2.0
| 18,596 | 0 | 15 | 4,466 | 5,772 | 2,899 | 2,873 | 385 | 6 |
module AddOneParameter.D2 where
{-add parameter 'f' to function 'sq' . This refactoring
affects module 'D2', 'C2' and 'A2'. It aims to test the
creating of default parameter name.-}
sumSquares (x:xs) = (sq sq_f) x + sumSquares xs
sumSquares [] = 0
sq f x = x ^ pow
sq_f = undefined
pow =2
|
RefactoringTools/HaRe
|
test/testdata/AddOneParameter/D2.expected.hs
|
bsd-3-clause
| 301 | 0 | 8 | 65 | 72 | 38 | 34 | 6 | 1 |
-----------------------------------------------------------------------------
--
-- Module : IDE.Pane.ClassHierarchy
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
-- | The pane of ide where modules are presented in tree form with their
-- packages and exports
--
-------------------------------------------------------------------------------
module IDE.Pane.ClassHierarchy (
IDEClassHierarchy(..)
, ClassHierarchyState(..)
, showClasses
--, showInstances
, selectClass
--, reloadKeepSelection
) where
import Graphics.UI.Gtk hiding (get)
import Data.Maybe
import Control.Monad.Reader
import qualified Data.Map as Map
import Data.Tree
import Data.List
import Data.Typeable
import Prelude hiding (catch)
import IDE.Core.State
-- | A modules pane description
--
data IDEClassHierarchy = IDEClassHierarchy {
outer :: VBox
, paned :: HPaned
, treeView :: TreeView
, treeStore :: TreeStore ClassWrapper
--, facetView :: TreeView
--, facetStore :: TreeStore FacetWrapper
, localScopeB :: RadioButton
, packageScopeB :: RadioButton
, worldScopeB :: RadioButton
, blacklistB :: CheckButton
} deriving Typeable
data ClassHierarchyState = ClassHierarchyState Int (Scope,Bool)
(Maybe Text, Maybe Text)
deriving(Eq,Ord,Read,Show,Typeable)
instance IDEObject IDEClassHierarchy
instance Pane IDEClassHierarchy IDEM
where
primPaneName _ = "ClassHierarchy"
getAddedIndex _ = 0
getTopWidget = castToWidget . outer
paneId b = "*ClassHierarchy"
makeActive p = activatePane p []
close = closePane
instance RecoverablePane IDEClassHierarchy ClassHierarchyState IDEM where
saveState p = return Nothing
recoverState pp _ = return ()
{--
instance RecoverablePane IDEClassHierarchy ClassHierarchyState where
saveState p = do
(IDEModules _ _ treeView treeStore facetView facetStore _ _ _ _) <- getModules
sc <- getScope
mbModules <- getPane
case mbModules of
Nothing -> return Nothing
Just p -> liftIO $ do
i <- panedGetPosition (paned p)
mbTreeSelection <- getSelectionTree treeView treeStore
mbFacetSelection <- getSelectionFacet facetView facetStore
let mbs = (case mbTreeSelection of
Nothing -> Nothing
Just (_,[]) -> Nothing
Just (_,((md,_):_)) -> Just (modu $ moduleIdMD md),
case mbFacetSelection of
Nothing -> Nothing
Just fw -> Just (symbolFromFacetWrapper fw))
return (Just (ModulesState i sc mbs))
recoverState pp (ModulesState i sc@(scope,useBlacklist) se) = do
nb <- getNotebook pp
initModules pp nb
mod@(IDEModules _ _ treeView treeStore facetView facetStore lb pb wb blb)
<- getModules
case scope of
Local -> liftIO $ toggleButtonSetActive lb True
Package -> liftIO $ toggleButtonSetActive pb True
World -> liftIO $ toggleButtonSetActive wb True
liftIO $ toggleButtonSetActive blb useBlacklist
liftIO $ panedSetPosition (paned mod) i
fillModulesList sc
selectNames se
--}
selectClass :: Descr -> IDEAction
selectClass d@(Descr descrName _ descrModu _ _ details) =
case details of
(ClassDescr _ _)-> selectClass' descrModu descrName
_ -> return ()
selectClass _ = return ()
selectClass' moduleName symbol = return ()
{--
selectClass' :: ModuleIdentifier -> Symbol -> IDEAction
selectClass' moduleName symbol =
let nameArray = breakAtDots [] moduleName
in do
mods@(IDEModules _ _ treeView treeStore facetView facetStore _ _ _ _) <- getModules
mbTree <- liftIO $ treeStoreGetTreeSave treeStore []
case treePathFromNameArray mbTree nameArray [] of
Just treePath -> liftIO $ do
treeViewExpandToPath treeView treePath
sel <- treeViewGetSelection treeView
treeSelectionSelectPath sel treePath
col <- treeViewGetColumn treeView 0
treeViewScrollToCell treeView treePath (fromJust col) (Just (0.3,0.3))
mbFacetTree <- treeStoreGetTreeSave facetStore []
selF <- treeViewGetSelection facetView
case findPathFor symbol mbFacetTree of
Nothing -> sysMessage Normal "no path found"
Just path -> do
treeSelectionSelectPath selF path
col <- treeViewGetColumn facetView 0
treeViewScrollToCell facetView path (fromJust col) (Just (0.3,0.3))
bringPaneToFront mods
Nothing -> return ()
--}
showClasses :: IDEAction
showClasses = do
m <- getClassHierarchy
liftIO $ bringPaneToFront m
liftIO $ widgetGrabFocus (treeView m)
--showInstances :: IDEAction
--showInstances = do
-- m <- getClassHierarchy
-- liftIO $ bringPaneToFront m
-- liftIO $ widgetGrabFocus (facetView m)
getClassHierarchy :: IDEM IDEClassHierarchy
getClassHierarchy = do
mbCH <- getPane
case mbCH of
Nothing -> do
pp <- getBestPathForId "*ClassHierarchy"
nb <- getNotebook pp
ci <- readIDE currentInfo
newPane pp nb (builder ci)
mbCH <- getPane
case mbCH of
Nothing -> throwIDE "Can't init class hierarchy"
Just m -> return m
Just m -> return m
type ClassHierarchy = Forest ClassWrapper
type ClassWrapper = (Symbol, [Symbol], Descr)
--
-- | Make a Tree with a class hierarchy for display.
--
buildClassHierarchyTree :: (PackageScope,PackageScope) -> ClassHierarchy
buildClassHierarchyTree ((_,sc1),(_,sc2)) =
let allClasses = nub
$ filter isClassDescr
$ concat (Map.elems sc1)
++ concat (Map.elems sc2)
wrappers = map asClassWrapper allClasses
(basics,other) = partition (\(_,sc,_) -> null sc) wrappers
basicForest = map (\ n -> Node n []) basics
resultForest = insertInForest basicForest other
in sortForest resultForest
where
insertInForest :: ClassHierarchy -> [ClassWrapper] -> ClassHierarchy
insertInForest basicForest [] = basicForest
insertInForest basicForest other =
let (newForest,rest) = foldl' insertInForest' (basicForest,[]) other
in if length rest >= length other
then throwIDE "ClassHierarchy>>buildClassHierarchyTree: Can't build tree"
else insertInForest newForest rest
insertInForest' :: (ClassHierarchy,[ClassWrapper]) -> ClassWrapper
-> (ClassHierarchy,[ClassWrapper])
insertInForest' (forest,rest) wrap@(id,superList,idDescr) =
let (newForest,newSuperList) = foldl' (insertInForest2 wrap)
(forest, []) superList
in if null newSuperList
then (newForest,rest)
else (newForest,(id,newSuperList,idDescr): rest)
insertInForest2 :: ClassWrapper -> (ClassHierarchy,[Text]) -> Text
-> (ClassHierarchy,[Text])
insertInForest2 wrapper (forest,rest) super =
let (newForest,success) = foldl' (insertInTree wrapper super) ([],False) forest
in if success
then (newForest,rest)
else (newForest, super : rest)
insertInTree :: ClassWrapper -> Text -> (ClassHierarchy,Bool)
-> Tree ClassWrapper -> (ClassHierarchy,Bool)
insertInTree wrapper superS (forest,bool) n@(Node w@(symbol,super,idDescr) subForest) =
if superS == symbol
then (Node w ((Node wrapper []) : subForest) : forest, True)
else
let (newSubForest,newBool) = foldl' (insertInTree wrapper superS) ([],False)
subForest
in if newBool
then ((Node w newSubForest) : forest, True)
else (n: forest, bool)
isClassDescr :: Descr -> Bool
isClassDescr descr = case details descr of
ClassDescr _ _ -> True
_ -> False
asClassWrapper :: Descr -> ClassWrapper
asClassWrapper descr =
case details descr of
ClassDescr super _ -> (descrName descr, super, descr)
_ -> throwIDE "ClassHierarchy>>asClassWrapper: No class"
instance Ord a => Ord (Tree a) where
compare (Node l1 _) (Node l2 _) = compare l1 l2
sortForest :: Ord a => Forest a -> Forest a
sortForest forest = sort (map sortTree forest)
sortTree :: Ord a => Tree a -> Tree a
sortTree (Node l forest) = Node l (sort (map sortTree forest))
builder :: Maybe (PackageScope, PackageScope) ->
PanePath ->
Notebook ->
Window ->
IDERef ->
IO (IDEClassHierarchy, Connections)
builder currentInfo pp nb windows ideR = do
let forest = case currentInfo of
Nothing -> []
Just pair -> buildClassHierarchyTree pair
treeStore <- treeStoreNew forest
treeView <- treeViewNew
treeViewSetModel treeView treeStore
--treeViewSetRulesHint treeView True
renderer0 <- cellRendererPixbufNew
set renderer0 [ cellPixbufStockId := "ide_no_source" ]
renderer <- cellRendererTextNew
col <- treeViewColumnNew
treeViewColumnSetTitle col "Classes"
treeViewColumnSetSizing col TreeViewColumnAutosize
treeViewColumnSetResizable col True
treeViewColumnSetReorderable col True
treeViewAppendColumn treeView col
cellLayoutPackStart col renderer0 False
cellLayoutPackStart col renderer True
cellLayoutSetAttributes col renderer treeStore
$ \(s,_,_) -> [ cellText := s]
cellLayoutSetAttributes col renderer0 treeStore
$ \(_,_,d) -> [
cellPixbufStockId :=
if isJust (mbLocation d)
then "ide_source"
else "ide_no_source"]
treeViewSetHeadersVisible treeView True
-- treeViewSetEnableSearch treeView True
-- treeViewSetSearchColumn treeView 0
-- treeViewSetSearchEqualFunc treeView (treeViewSearch treeView treeStore)
-- Facet view
{--
facetView <- treeViewNew
facetStore <- treeStoreNew []
treeViewSetModel facetView facetStore
renderer30 <- cellRendererPixbufNew
renderer31 <- cellRendererPixbufNew
renderer3 <- cellRendererTextNew
col <- treeViewColumnNew
treeViewColumnSetTitle col "Interface"
--treeViewColumnSetSizing col TreeViewColumnAutosize
treeViewAppendColumn facetView col
cellLayoutPackStart col renderer30 False
cellLayoutPackStart col renderer31 False
cellLayoutPackStart col renderer3 True
cellLayoutSetAttributes col renderer3 facetStore
$ \row -> [ cellText := facetTreeText row]
cellLayoutSetAttributes col renderer30 facetStore
$ \row -> [
cellPixbufStockId := stockIdFromType (facetIdType row)]
cellLayoutSetAttributes col renderer31 facetStore
$ \row -> [
cellPixbufStockId := if isJust (mbLocation(facetIdDescr row))
then "ide_source"
else ""]
treeViewSetHeadersVisible facetView True
treeViewSetEnableSearch facetView True
treeViewSetSearchColumn facetView 0
treeViewSetSearchEqualFunc facetView (facetViewSearch facetView facetStore)
--}
pane' <- hPanedNew
sw <- scrolledWindowNew Nothing Nothing
scrolledWindowSetShadowType sw ShadowIn
containerAdd sw treeView
scrolledWindowSetPolicy sw PolicyAutomatic PolicyAutomatic
{-- sw2 <- scrolledWindowNew Nothing Nothing
containerAdd sw2 facetView
scrolledWindowSetPolicy sw2 PolicyAutomatic PolicyAutomatic--}
panedAdd1 pane' sw
-- panedAdd2 pane' sw2
(x,y) <- widgetGetSize nb
panedSetPosition pane' (x `quot` 2)
box <- hBoxNew True 2
rb1 <- radioButtonNewWithLabel "Local"
rb2 <- radioButtonNewWithLabelFromWidget rb1 "Package"
rb3 <- radioButtonNewWithLabelFromWidget rb1 "World"
toggleButtonSetActive rb3 True
cb <- checkButtonNewWithLabel "Blacklist"
boxPackStart box rb1 PackGrow 2
boxPackStart box rb2 PackGrow 2
boxPackStart box rb3 PackGrow 2
boxPackEnd box cb PackNatural 2
boxOuter <- vBoxNew False 2
boxPackStart boxOuter box PackNatural 2
boxPackStart boxOuter pane' PackGrow 2
let classes = IDEClassHierarchy boxOuter pane' treeView treeStore
{--facetView facetStore--} rb1 rb2 rb3 cb
cid3 <- treeView `onRowActivated`
(\ treePath _ -> do
treeViewExpandRow treeView treePath False
return ())
cid1 <- treeView `afterFocusIn`
(\_ -> do reflectIDE (makeActive classes) ideR; return True)
-- cid2 <- facetView `afterFocusIn`
-- (\_ -> do runReaderT (makeActive classes) ideR; return True)
-- treeView `onButtonPress` (treeViewPopup ideR treeStore treeView)
-- facetView `onButtonPress` (facetViewPopup ideR facetStore facetView)
-- rb1 `onToggled` (runReaderT scopeSelection ideR)
-- rb2 `onToggled` (runReaderT scopeSelection ideR)
-- rb3 `onToggled` (runReaderT scopeSelection ideR)
-- cb `onToggled` (runReaderT scopeSelection ideR)
sel <- treeViewGetSelection treeView
-- sel `onSelectionChanged` (fillFacets treeView treeStore facetView facetStore)
-- sel2 <- treeViewGetSelection facetView
-- sel2 `onSelectionChanged` (fillInfo facetView facetStore ideR)
return (classes,[ConnectC cid1{--,ConnectC cid2--}, ConnectC cid3])
{--
treeViewSearch :: TreeView
-> TreeStore (Text, [(ModuleDescr,PackageDescr)])
-> Int
-> Text
-> TreeIter
-> IO Bool
treeViewSearch treeView treeStore _ string iter = do
path <- treeModelGetPath treeStore iter
val <- treeStoreGetValue treeStore path
mbTree <- treeStoreGetTreeSave treeStore path
exp <- treeViewRowExpanded treeView path
when (isJust mbTree && (not (null (subForest (fromJust mbTree)))) && not exp) $
let found = searchInModSubnodes (fromJust mbTree) string
in when found $ do
treeViewExpandRow treeView path False
return ()
let str2 = case snd val of
[] -> fst val
(m,_):_ -> showPackModule (moduleIdMD m)
return (isInfixOf (map toLower string) (map toLower str2))
searchInModSubnodes :: ModTree -> Text -> Bool
searchInModSubnodes tree str =
not $ null
$ filter (\ val ->
let cstr = case snd val of
[] -> fst val
(m,_):_ -> showPackModule (moduleIdMD m)
in isInfixOf (map toLower str) (map toLower cstr))
$ concatMap flatten (subForest tree)
facetViewSearch :: TreeView
-> TreeStore FacetWrapper
-> Int
-> Text
-> TreeIter
-> IO Bool
facetViewSearch facetView facetStore _ string iter = do
path <- treeModelGetPath facetStore iter
val <- treeStoreGetValue facetStore path
tree <- treeStoreGetTree facetStore path
exp <- treeViewRowExpanded facetView path
when (not (null (subForest tree)) && not exp) $
let found = searchInFacetSubnodes tree string
in when found $ do
treeViewExpandRow facetView path False
return ()
return (isInfixOf (map toLower string) (map toLower (facetTreeText val)))
searchInFacetSubnodes :: FacetTree -> Text -> Bool
searchInFacetSubnodes tree str =
not $ null
$ filter (\ val ->
isInfixOf (map toLower str) (map toLower (facetTreeText val)))
$ concatMap flatten (subForest tree)
--}
{--
fillFacets :: TreeView
-> TreeStore (Text, [(ModuleDescr,PackageDescr)])
-> TreeView
-> TreeStore FacetWrapper
-> IO ()
fillFacets treeView treeStore facetView facetStore = do
sel <- getSelectionTree treeView treeStore
case sel of
Just val
-> case snd val of
((mod,package):_)
-> let forest = buildFacetForest mod in do
emptyModel <- treeStoreNew []
treeViewSetModel facetView emptyModel
treeStoreClear facetStore
mapM_ (\(e,i) -> treeStoreInsertTree facetStore [] i e)
$ zip forest [0 .. length forest]
treeViewSetModel facetView facetStore
treeViewSetEnableSearch facetView True
treeViewSetSearchColumn facetView 0
treeViewSetSearchEqualFunc facetView (facetViewSearch facetView facetStore)
[] -> return ()
Nothing
-> do
treeStoreClear facetStore
return ()
--}
{--
getSelectionTree :: TreeView
-> TreeStore (Text, [(ModuleDescr,PackageDescr)])
-> IO (Maybe (Text, [(ModuleDescr,PackageDescr)]))
getSelectionTree treeView treeStore = do
treeSelection <- treeViewGetSelection treeView
paths <- treeSelectionGetSelectedRows treeSelection
case paths of
[] -> return Nothing
a:r -> do
val <- treeStoreGetValue treeStore a
return (Just val)
getSelectionFacet :: TreeView
-> TreeStore FacetWrapper
-> IO (Maybe FacetWrapper)
getSelectionFacet treeView treeStore = do
treeSelection <- treeViewGetSelection treeView
paths <- treeSelectionGetSelectedRows treeSelection
case paths of
a:r -> do
val <- treeStoreGetValue treeStore a
return (Just val)
_ -> return Nothing
fillInfo :: TreeView
-> TreeStore FacetWrapper
-> IDERef
-> IO ()
fillInfo treeView lst ideR = do
treeSelection <- treeViewGetSelection treeView
paths <- treeSelectionGetSelectedRows treeSelection
case paths of
[] -> return ()
[a] -> do
wrapper <- treeStoreGetValue lst a
runReaderT (setInfos [facetIdDescr wrapper]) ideR
return ()
_ -> return ()
findDescription :: PackModule -> SymbolTable -> Symbol -> Maybe (Symbol,IdentifierDescr)
findDescription md st s =
case Map.lookup s st of
Nothing -> Nothing
Just l -> case filter (\id -> md == moduleIdID id) l of
[] -> Nothing
l -> Just (s,head l)
fillModulesList :: (Scope,Bool) -> IDEAction
fillModulesList (scope,useBlacklist) = do
(IDEModules _ _ treeView treeStore _ _ _ _ _ _) <- getModules
prefs <- readIDE prefs
currentInfo' <- readIDE currentInfo
accessibleInfo' <- readIDE accessibleInfo
case currentInfo' of
Nothing -> case (scope,accessibleInfo') of
(World,Just ai@(pm,ps)) ->
let p2 = if useBlacklist
then (Map.filter (filterBlacklist
(packageBlacklist prefs)) pm, ps)
else ai
(Node _ li) = buildModulesTree
((Map.empty,Map.empty),p2)
in liftIO $ do
treeStoreClear treeStore
mapM_ (\(e,i) -> treeStoreInsertTree treeStore [] i e)
$ zip li [0 .. length li]
_ -> liftIO $ do
treeStoreClear treeStore
treeStoreInsertTree treeStore [] 0 (Node ("",[]) [])
Just (l,p) -> let (l',p'@(pm,ps)) = case scope of
Local -> (l,(Map.empty,Map.empty))
Package -> (l,p)
World -> case accessibleInfo' of
Just ai -> (l,ai)
Nothing -> (l,p)
p2 = if useBlacklist
then (Map.filter (filterBlacklist
(packageBlacklist prefs)) pm, ps)
else p'
(Node _ li) = buildModulesTree (l',p2)
in liftIO $ do
emptyModel <- treeStoreNew []
treeViewSetModel treeView emptyModel
treeStoreClear treeStore
mapM_ (\(e,i) -> treeStoreInsertTree treeStore [] i e)
$ zip li [0 .. length li]
treeViewSetModel treeView treeStore
treeViewSetEnableSearch treeView True
treeViewSetSearchColumn treeView 0
treeViewSetSearchEqualFunc treeView (treeViewSearch treeView treeStore)
where
filterBlacklist :: [Dependency] -> PackageDescr -> Bool
filterBlacklist dependencies packageDescr =
let packageId = packagePD packageDescr
name = pkgName packageId
version = pkgVersion packageId
in isNothing $ find (\ (Dependency str vr) -> str == name && withinRange version vr)
dependencies
type FacetForest = Forest FacetWrapper
type FacetTree = Tree FacetWrapper
facetTreeText :: FacetWrapper -> Text
facetTreeText (Itself (SimpleDescr id FunctionS _ _ _ _)) = {-- "function " ++ --} id
facetTreeText (Itself (SimpleDescr id NewtypeS _ _ _ _)) = {-- "newtype " ++ --} id
facetTreeText (Itself (SimpleDescr id TypeS _ _ _ _)) = {-- "type " ++ --} id
facetTreeText (Itself (SimpleDescr id _ _ _ _ _)) = id
facetTreeText (Itself (DataDescr id _ _ _ _ _ _)) = {-- "data " ++ --} id
facetTreeText (Itself (ClassDescr id _ _ _ _ _)) = {-- "class " ++ --} id
facetTreeText (Itself (InstanceDescr cl _ _ _ _ )) = {-- "instance " ++ --} cl
facetTreeText (ConstructorW s _) = {-- "constructor " ++ --} s
facetTreeText (FieldW s _) = {-- "slot " ++ --} s
facetTreeText (MethodW s _) = {-- "method " ++ --} s
facetTreeText (OrphanedData (InstanceDescr cl binds _ _ _)) = {-- "instance " ++ --} cl
++ " " ++ printBinds binds
where
printBinds [] = ""
printBinds (a:[]) = a
printBinds (a:b) = a ++ " " ++ printBinds b
facetTreeText _ = throwIDE "impossible in facetTreeText"
facetIdType :: FacetWrapper -> IdType
facetIdType (Itself descr) = idType descr
facetIdType (ConstructorW _ _) = Constructor
facetIdType (FieldW _ _) = Field
facetIdType (MethodW _ _) = Method
facetIdType (OrphanedData _) = OrphanedInstance
facetIdDescr :: FacetWrapper -> IdentifierDescr
facetIdDescr (Itself descr) = descr
facetIdDescr (ConstructorW _ descr) = descr
facetIdDescr (FieldW _ descr) = descr
facetIdDescr (MethodW _ descr) = descr
facetIdDescr (OrphanedData descr) = descr
buildFacetForest :: ModuleDescr -> FacetForest
buildFacetForest modDescr =
let (instances,other) = partition (\id -> case id of
InstanceDescr _ _ _ _ _ -> True
_ -> False)
$ idDescriptionsMD modDescr
forestWithoutInstances = map buildFacet other
(forest2,orphaned) = foldl' addInstances (forestWithoutInstances,[])
instances
orphanedNodes = map (\ inst -> Node (OrphanedData inst) []) orphaned
in forest2 ++ reverse orphanedNodes
where
buildFacet :: IdentifierDescr -> FacetTree
buildFacet d@(SimpleDescr _ _ _ _ _ _)
= Node (Itself d) []
buildFacet d@(DataDescr _ _ _ constID fieldsID _ _)
= (Node (Itself d) ((map (\ s -> Node (ConstructorW s d) []) constID)
++ (map (\ s -> Node (FieldW s d) []) fieldsID)))
buildFacet d@(ClassDescr _ _ _ classOpsID _ _)
= Node (Itself d) (map (\ s -> Node (MethodW s d) []) classOpsID)
buildFacet d@(InstanceDescr _ _ _ _ _)
= throwIDE "Impossible in buildFacet"
addInstances :: (FacetForest,[IdentifierDescr])
-> IdentifierDescr
-> (FacetForest,[IdentifierDescr])
addInstances (forest,orphaned) instDescr =
case foldl' (matches instDescr) ([],False) forest of
(f,True) -> (f,orphaned)
(f,False) -> (forest, instDescr:orphaned)
matches :: IdentifierDescr
-> (FacetForest,Bool)
-> FacetTree
-> (FacetForest,Bool)
matches instDescr (forest,False) (Node (Itself dd@(DataDescr id _ _ _ _ _ _)) sub)
| [id] == binds instDescr
= ((Node (Itself dd) (sub ++ [Node (Itself instDescr) []])):forest,True)
matches instDescr (forest,False) (Node (Itself dd@(SimpleDescr id ty _ _ _ _ )) sub)
| [id] == binds instDescr && ty == NewtypeS
= ((Node (Itself dd) (sub ++ [Node (Itself instDescr) []])):forest,True)
matches _ (forest,b) node = (node:forest,b)
--}
{--
treeViewPopup :: IDERef
-> TreeStore (Text, [(ModuleDescr,PackageDescr)])
-> TreeView
-> Event
-> IO (Bool)
treeViewPopup ideR store treeView (Button _ click _ _ _ _ button _ _) = do
if button == RightButton
then do
theMenu <- menuNew
menuAttachToWidget theMenu treeView
item1 <- menuItemNewWithLabel "Edit"
item1 `onActivateLeaf` do
sel <- getSelectionTree treeView store
case sel of
Just (_,[(m,_)]) -> case mbSourcePathMD m of
Nothing -> return ()
Just fp -> do
runReaderT (selectSourceBuf fp) ideR
return ()
otherwise -> return ()
item2 <- menuItemNewWithLabel "ExpandAll"
item2 `onActivateLeaf` (treeViewExpandAll treeView)
item3 <- menuItemNewWithLabel "CollapseAll"
item3 `onActivateLeaf` (treeViewCollapseAll treeView)
mapM_ (menuShellAppend theMenu) [item1,item2,item3]
menuPopup theMenu Nothing
widgetShowAll theMenu
return True
else if button == LeftButton && click == DoubleClick
then do sel <- getSelectionTree treeView store
case sel of
Just (_,[(m,_)]) -> case mbSourcePathMD m of
Nothing -> return ()
Just fp -> do
runReaderT (selectSourceBuf fp) ideR
return ()
otherwise -> return ()
return True
else return False
treeViewPopup _ _ _ _ = throwIDE "treeViewPopup wrong event type"
facetViewPopup :: IDERef
-> TreeStore FacetWrapper
-> TreeView
-> Event
-> IO (Bool)
facetViewPopup ideR store facetView (Button _ click _ _ _ _ button _ _) = do
if button == RightButton
then do
theMenu <- menuNew
menuAttachToWidget theMenu treeView
item1 <- menuItemNewWithLabel "Go to definition"
item1 `onActivateLeaf` do
sel <- getSelectionFacet facetView store
case sel of
Just wrapper -> runReaderT
(goToDefinition (facetIdDescr wrapper)) ideR
otherwise -> sysMessage Normal "no selection"
menuShellAppend theMenu item1
menuPopup theMenu Nothing
widgetShowAll theMenu
return True
else if button == LeftButton && click == DoubleClick
then do sel <- getSelectionFacet facetView store
case sel of
Just wrapper -> runReaderT (goToDefinition
(facetIdDescr wrapper)) ideR
otherwise -> sysMessage Normal "no selection"
return True
else do
mbPane :: Maybe IDEInfo <- runReaderT getPane ideR
when (isJust mbPane) $ bringPaneToFront (fromJust mbPane)
return False
facetViewPopup _ _ _ _ = throwIDE "facetViewPopup wrong event type"
--}
{--
getScope :: IDEM (Scope,Bool)
getScope = do
(IDEModules _ _ treeView treeStore facetView facetStore localScopeB
packageScopeB worldScopeB blacklistB) <- getModules
rb1s <- liftIO $ toggleButtonGetActive localScopeB
rb2s <- liftIO $ toggleButtonGetActive packageScopeB
rb3s <- liftIO $ toggleButtonGetActive worldScopeB
cbs <- liftIO $ toggleButtonGetActive blacklistB
let scope = if rb1s
then Local
else if rb2s
then Package
else if rb3s
then World
else throwIDE
"ModulesPane.scopeSelection: No check button selected"
return (scope,cbs)
scopeSelection :: IDEAction
scopeSelection = do
mods@(IDEModules _ _ treeView treeStore facetView facetStore _ _ _ _)
<- getModules
mbTreeSelection <- liftIO $ getSelectionTree treeView treeStore
mbFacetSelection <- liftIO $ getSelectionFacet facetView facetStore
sc <- getScope
ts <- liftIO $ treeViewGetSelection treeView
liftIO $ treeSelectionUnselectAll ts
fillModulesList sc
let mbs = (case mbTreeSelection of
Nothing -> Nothing
Just (_,[]) -> Nothing
Just (_,((md,_):_)) -> Just (modu $ moduleIdMD md),
case mbFacetSelection of
Nothing -> Nothing
Just fw -> Just (symbolFromFacetWrapper fw))
selectNames mbs
liftIO $ bringPaneToFront mods
selectNames :: (Maybe Text, Maybe Symbol) -> IDEAction
selectNames (mbModuleName, mbIdName) = do
(IDEModules _ _ treeView treeStore facetView facetStore _ _ _ _)
<- getModules
case mbModuleName of
Nothing -> return ()
Just moduleName ->
let nameArray = breakAtDots [] moduleName
in do
mbTree <- liftIO $ treeStoreGetTreeSave treeStore []
case treePathFromNameArray mbTree nameArray [] of
Nothing -> return ()
Just treePath -> liftIO $ do
treeViewExpandToPath treeView treePath
sel <- treeViewGetSelection treeView
treeSelectionSelectPath sel treePath
col <- treeViewGetColumn treeView 0
treeViewScrollToCell treeView treePath (fromJust col)
(Just (0.3,0.3))
case mbIdName of
Nothing -> return ()
Just symbol -> do
mbFacetTree <- treeStoreGetTreeSave facetStore []
selF <- treeViewGetSelection facetView
case findPathFor symbol mbFacetTree of
Nothing -> sysMessage Normal "no path found"
Just path -> do
treeSelectionSelectPath selF path
col <- treeViewGetColumn facetView 0
treeViewScrollToCell facetView path (fromJust col)
(Just (0.3,0.3))
symbolFromFacetWrapper :: FacetWrapper -> Symbol
symbolFromFacetWrapper (Itself idDescr) = identifierID idDescr
symbolFromFacetWrapper (ConstructorW _ idDescr) = identifierID idDescr
symbolFromFacetWrapper (FieldW _ idDescr) = identifierID idDescr
symbolFromFacetWrapper (MethodW _ idDescr) = identifierID idDescr
symbolFromFacetWrapper (OrphanedData idDescr) = identifierID idDescr
reloadKeepSelection :: IDEAction
reloadKeepSelection = do
mbMod <- getPane
case mbMod of
Nothing -> return ()
Just mods@(IDEModules _ _ treeView treeStore facetView facetStore _ _ _ _)
-> do
mbTreeSelection <- liftIO $ getSelectionTree treeView treeStore
mbFacetSelection <- liftIO $ getSelectionFacet facetView facetStore
sc <- getScope
fillModulesList sc
liftIO $ treeStoreClear facetStore
let mbs = (case mbTreeSelection of
Nothing -> Nothing
Just (_,[]) -> Nothing
Just (_,((md,_):_)) -> Just (modu $ moduleIdMD md),
case mbFacetSelection of
Nothing -> Nothing
Just fw -> Just (symbolFromFacetWrapper fw))
selectNames mbs
treeStoreGetTreeSave :: TreeStore a -> TreePath -> IO (Maybe (Tree a))
treeStoreGetTreeSave treeStore treePath = catch (do
res <- treeStoreGetTree treeStore treePath
return (Just res)) (\ _ -> return Nothing)
findPathFor :: Symbol -> Maybe (Tree FacetWrapper) -> Maybe TreePath
findPathFor symbol (Just (Node _ forest)) =
foldr ( \i mbTreePath -> findPathFor' [i] (forest !! i) mbTreePath)
Nothing [0 .. ((length forest) - 1)]
where
findPathFor' :: TreePath -> Tree FacetWrapper -> Maybe TreePath -> Maybe TreePath
findPathFor' _ node (Just p) = Just p
findPathFor' path (Node wrap sub) Nothing =
if identifierID (facetIdDescr wrap) == symbol
then Just (reverse path)
else
foldr ( \i mbTreePath -> findPathFor' (i:path) (sub !! i) mbTreePath)
Nothing [0 .. ((length sub) - 1)]
findPathFor symbol Nothing = Nothing
treePathFromNameArray :: Maybe ModTree -> [Text] -> [Int] -> Maybe [Int]
treePathFromNameArray (Just tree) [] accu = Just (reverse accu)
treePathFromNameArray (Just tree) (h:t) accu =
let names = map (\t -> fst $ rootLabel t) (subForest tree)
mbIdx = elemIndex h names
in case mbIdx of
Nothing -> Nothing
Just i -> treePathFromNameArray (Just (subForest tree !! i)) t (i:accu)
treePathFromNameArray Nothing _ _ = Nothing
--}
{--
extractSuperclasses :: Text -> [Text]
extractSuperclasses str =
let parseRes = trace ("now extracting superclasses for " ++ show str)
parse superclassParser "" str
in case parseRes of
Left err -> throwIDE $show err
Right l -> trace ("found " ++ show l) l
lexer = haskell
lexeme = P.lexeme lexer
whiteSpace = P.whiteSpace lexer
symbol = P.symbol lexer
superclassParser :: CharParser () [Text]
superclassParser = do
symbol "class"
whiteSpace
try (do
sc <- classDefParser
symbol "=>"
return [sc])
<|> try (do
symbol "("
scs <- sepBy classDefParser (char ',')
symbol ")"
symbol "=>"
return scs)
<|> return []
<?> "superclasses"
classDefParser :: CharParser () Text
classDefParser = do
whiteSpace
c <- oneOf['A'..'Z']
cs <- many (alphaNum <|> oneOf "_'.")
many typeVarParser
return (c:cs)
<?> "classDef"
typeVarParser :: CharParser () Text
typeVarParser = do
whiteSpace
c <- oneOf['a'..'z']
cs <- many (alphaNum <|> oneOf "_'.")
return (c:cs)
<?> "typeVar"
--}
|
ChrisLane/leksah
|
src/IDE/Pane/ClassHierarchy.hs
|
gpl-2.0
| 38,980 | 0 | 15 | 14,866 | 2,323 | 1,209 | 1,114 | -1 | -1 |
-- To run, package aivika-experiment-cairo must be installed.
import Simulation.Aivika.Experiment
import Simulation.Aivika.Experiment.Chart
import Simulation.Aivika.Experiment.Chart.Backend.Cairo
import Graphics.Rendering.Chart.Backend.Cairo
import Model
import Experiment
main = runExperimentParallel experiment generators (WebPageRenderer (CairoRenderer PNG) experimentFilePath) model
|
dsorokin/aivika-experiment-chart
|
examples/TruckHaulingSituation/MainUsingCairo.hs
|
bsd-3-clause
| 392 | 0 | 9 | 34 | 66 | 41 | 25 | 7 | 1 |
module Lam1 where
data Expr = Var Int Int |
Add Expr Expr
f = (\(Var x y) -> x + y)
|
mpickering/HaRe
|
old/testing/removeField/Lam1.hs
|
bsd-3-clause
| 98 | 0 | 8 | 36 | 47 | 27 | 20 | 4 | 1 |
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
module Bug (await, bug) where
import Data.Typeable
import Data.Functor
import Control.Exception
data Attempt α = Success α
| ∀ e . Exception e ⇒ Failure e
fromAttempt ∷ Attempt α → IO α
fromAttempt (Success a) = return a
fromAttempt (Failure e) = throwIO e
data Inject f α = ∀ β . Inject (f β) (α → β)
class Completable f where
complete ∷ f α → α → IO Bool
instance Completable f ⇒ Completable (Inject f) where
complete (Inject f inj) = complete f . inj
class Awaitable f where
awaitResult ∷ f α → IO α
data FakeFuture α = FakeFuture
instance Completable FakeFuture where
complete _ _ = undefined
instance Awaitable FakeFuture where
awaitResult _ = undefined
class WaitOp op where
type WaitOpResult op
registerWaitOp ∷ Completable f
⇒ op → f (Attempt (WaitOpResult op)) → IO Bool
await ∷ WaitOp op ⇒ op → IO (WaitOpResult op)
await op = do
let fut = FakeFuture
registerWaitOp op fut
fromAttempt =<< awaitResult fut
data FakeOp α = FakeOp
instance WaitOp (FakeOp α) where
type WaitOpResult (FakeOp α) = α
registerWaitOp _ _ = return True
data WaitOps rs where
WaitOp ∷ WaitOp op ⇒ op → WaitOps (HSingle (WaitOpResult op))
(:?) ∷ (WaitOp op, HNonEmpty rs)
⇒ op → WaitOps rs → WaitOps (WaitOpResult op :* rs)
waitOpsNonEmpty ∷ ∀ rs . WaitOps rs → HNonEmptyInst rs
waitOpsNonEmpty (WaitOp _) = HNonEmptyInst
waitOpsNonEmpty (_ :? _) = HNonEmptyInst
infixr 7 .?
infix 8 .?.
(.?) ∷ WaitOp op ⇒ op → WaitOps rs → WaitOps (WaitOpResult op :* rs)
op .? ops = case waitOpsNonEmpty ops of
HNonEmptyInst → op :? ops
(.?.) ∷ (WaitOp op1, WaitOp op2) ⇒ op1 → op2
→ WaitOps (WaitOpResult op1 :*: WaitOpResult op2)
op1 .?. op2 = op1 .? WaitOp op2
data NthException n e = NthException (Peano n) e deriving (Typeable, Show)
instance (Typeable n, Exception e) ⇒ Exception (NthException n e)
instance WaitOp (WaitOps rs) where
type WaitOpResult (WaitOps rs) = HElemOf rs
registerWaitOp ops ev = do
let inj n (Success r) = Success (HNth n r)
inj n (Failure e) = Failure (NthException n e)
register ∷ ∀ n . HDropClass n rs
⇒ Bool → Peano n → WaitOps (HDrop n rs) → IO Bool
register first n (WaitOp op) = do
t ← try $ registerWaitOp op (Inject ev $ inj n)
r ← case t of
Right r → return r
Left e → complete ev $ inj n $ Failure (e ∷ SomeException)
return $ r || not first
register first n (op :? ops') = do
t ← try $ registerWaitOp op (Inject ev $ inj n)
case t of
Right True → case waitOpsNonEmpty ops' of
HNonEmptyInst → case hTailDropComm ∷ HTailDropComm n rs of
HTailDropComm → register False (PSucc n) ops'
Right False → return $ not first
Left e → do
c ← complete ev $ inj n $ Failure (e ∷ SomeException)
return $ c || not first
case waitOpsNonEmpty ops of
HNonEmptyInst → register True PZero ops
bug ∷ IO Int
bug = do
temp ← await ((FakeOp ∷ FakeOp Int) .?. (FakeOp ∷ FakeOp String))
case temp of
(elem0 → Just _) → return 0
_ → return 1
data PZero deriving Typeable
data PSucc p deriving Typeable
data Peano n where
PZero ∷ Peano PZero
PSucc ∷ IsPeano p ⇒ Peano p → Peano (PSucc p)
instance Show (Peano n) where
show n = show (peanoNum n ∷ Int)
peanoNum ∷ Num n ⇒ Peano p → n
peanoNum PZero = 0
peanoNum (PSucc p) = 1 + peanoNum p
class Typeable n ⇒ IsPeano n where
peano ∷ Peano n
instance IsPeano PZero where
peano = PZero
instance IsPeano p ⇒ IsPeano (PSucc p) where
peano = PSucc peano
class (n ~ PSucc (PPred n)) ⇒ PHasPred n where
type PPred n
instance PHasPred (PSucc p) where
type PPred (PSucc p) = p
pPred ∷ Peano (PSucc p) → Peano p
pPred (PSucc p) = p
infixr 7 :*, .*
infix 8 :*:, .*.
data HNil
data h :* t
type HSingle α = α :* HNil
type α :*: β = α :* β :* HNil
data HList l where
HNil ∷ HList HNil
(:*) ∷ HListClass t ⇒ h → HList t → HList (h :* t)
instance Show (HList HNil) where
show _ = "HNil"
instance (Show h, Show (HList t)) ⇒ Show (HList (h :* t)) where
showsPrec d (h :* t) = showParen (d > 7) $
showsPrec 8 h . showString " .* " . showsPrec 7 t
(.*) ∷ HListClass t ⇒ h → HList t → HList (h :* t)
(.*) = (:*)
(.*.) ∷ α → β → HList (α :*: β)
a .*. b = a .* b .* HNil
data HListWitness l where
HNilList ∷ HListWitness HNil
HConsList ∷ HListClass t ⇒ HListWitness (h :* t)
class HListClass l where
hListWitness ∷ HListWitness l
instance HListClass HNil where
hListWitness = HNilList
instance HListClass t ⇒ HListClass (h :* t) where
hListWitness = HConsList
data HListInst l where
HListInst ∷ HListClass l ⇒ HListInst l
hListInst ∷ HList l → HListInst l
hListInst HNil = HListInst
hListInst (_ :* _) = HListInst
class (l ~ (HHead l :* HTail l), HListClass (HTail l)) ⇒ HNonEmpty l where
type HHead l
type HTail l
instance HListClass t ⇒ HNonEmpty (h :* t) where
type HHead (h :* t) = h
type HTail (h :* t) = t
hHead ∷ HList (h :* t) → h
hHead (h :* _) = h
hTail ∷ HList (h :* t) → HList t
hTail (_ :* t) = t
data HNonEmptyInst l where
HNonEmptyInst ∷ HListClass t ⇒ HNonEmptyInst (h :* t)
data HDropWitness n l where
HDropZero ∷ HListClass l ⇒ HDropWitness PZero l
HDropSucc ∷ HDropClass p t ⇒ HDropWitness (PSucc p) (h :* t)
class (IsPeano n, HListClass l, HListClass (HDrop n l)) ⇒ HDropClass n l where
type HDrop n l
hDropWitness ∷ HDropWitness n l
instance HListClass l ⇒ HDropClass PZero l where
type HDrop PZero l = l
hDropWitness = HDropZero
instance HDropClass p t ⇒ HDropClass (PSucc p) (h :* t) where
type HDrop (PSucc p) (h :* t) = HDrop p t
hDropWitness = case hDropWitness ∷ HDropWitness p t of
HDropZero → HDropSucc
HDropSucc → HDropSucc
data HDropInst n l where
HDropInst ∷ HDropClass n l ⇒ HDropInst n l
hDrop ∷ ∀ n l . HDropClass n l ⇒ Peano n → HList l → HList (HDrop n l)
hDrop n l = case hDropWitness ∷ HDropWitness n l of
HDropZero → l
HDropSucc → hDrop (pPred n) (hTail l)
data HNonEmptyDropInst n l where
HNonEmptyDropInst ∷ (HDropClass n l, HNonEmpty l,
HDropClass (PSucc n) l, HNonEmpty (HDrop n l))
⇒ HNonEmptyDropInst n l
pPrevDropInst ∷ ∀ n l . HDropClass (PSucc n) l ⇒ HNonEmptyDropInst n l
pPrevDropInst = case hDropWitness ∷ HDropWitness (PSucc n) l of
HDropSucc → case hDropWitness ∷ HDropWitness n (HTail l) of
HDropZero → HNonEmptyDropInst
HDropSucc → case pPrevDropInst ∷ HNonEmptyDropInst (PPred n) (HTail l) of
HNonEmptyDropInst → HNonEmptyDropInst
hNextDropInst ∷ ∀ n l . (HDropClass n l, HNonEmpty (HDrop n l))
⇒ HNonEmptyDropInst n l
hNextDropInst = case hDropWitness ∷ HDropWitness n l of
HDropZero → HNonEmptyDropInst
HDropSucc → case hNextDropInst ∷ HNonEmptyDropInst (PPred n) (HTail l) of
HNonEmptyDropInst → HNonEmptyDropInst
data HTailDropComm n l where
HTailDropComm ∷ (HNonEmpty l, HDropClass n l,
HNonEmpty (HDrop n l), HDropClass n (HTail l),
HDropClass (PSucc n) l,
HTail (HDrop n l) ~ HDrop n (HTail l),
HDrop (PSucc n) l ~ HTail (HDrop n l),
HDrop (PSucc n) l ~ HDrop n (HTail l))
⇒ HTailDropComm n l
hTailDropComm' ∷ ∀ n l . (HDropClass (PSucc n) l)
⇒ HTailDropComm n l
hTailDropComm' = case pPrevDropInst ∷ HNonEmptyDropInst n l of
HNonEmptyDropInst → hTailDropComm
hTailDropComm ∷ ∀ n l . (HDropClass n l, HNonEmpty (HDrop n l))
⇒ HTailDropComm n l
hTailDropComm = case hDropWitness ∷ HDropWitness n l of
HDropZero → HTailDropComm
HDropSucc → case hTailDropComm ∷ HTailDropComm (PPred n) (HTail l) of
HTailDropComm → HTailDropComm
type HNth n l = HHead (HDrop n l)
data HElemOf l where
HNth ∷ (HDropClass n l, HNonEmpty (HDrop n l))
⇒ Peano n → HNth n l → HElemOf l
hGetIfNth ∷ ∀ n l . (HDropClass n l, HNonEmpty (HDrop n l))
⇒ Peano n → HElemOf l → Maybe (HNth n l)
hGetIfNth PZero (HNth PZero x) = Just x
hGetIfNth (PSucc p) (HNth (PSucc p') x) =
case hDropWitness ∷ HDropWitness n l of
HDropSucc →
let inst ∷ ∀ m . HDropClass (PSucc m) l
⇒ Peano m → HTailDropComm m l
inst _ = hTailDropComm' in
case inst p' of
HTailDropComm → hGetIfNth p (HNth p' x ∷ HElemOf (HTail l))
_ → undefined
hGetIfNth _ _ = Nothing
elem0 ∷ HNonEmpty l ⇒ HElemOf l → Maybe (HHead l)
elem0 = hGetIfNth PZero
|
forked-upstream-packages-for-ghcjs/ghc
|
testsuite/tests/typecheck/should_compile/T5490.hs
|
bsd-3-clause
| 9,340 | 0 | 24 | 2,409 | 3,578 | 1,821 | 1,757 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Data.OpenCL.Kernel
( CLKernel()
, ArgIndex
, createKernel
, enqueueRangeKernel
, setKernelArgStorable
, setKernelArgBuffer
, setKernelArgPtr )
where
import Control.Concurrent.MVar
import Control.Monad
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Primitive
import qualified Data.ByteString as B
import Data.OpenCL.Exception
import Data.OpenCL.Event.Internal
import Data.OpenCL.Handle
import Data.OpenCL.Raw
import Foreign.Marshal.Alloc
import Foreign.Marshal.Array
import Foreign.Marshal.Utils
import Foreign.Ptr
import Foreign.Storable
type ArgIndex = Int
createKernel :: MonadIO m
=> CLProgram
-> B.ByteString
-> m CLKernel
createKernel (CLProgram prog_var) kernel_name = liftIO $ mask_ $ do
prog <- readMVar prog_var
flip finally (touch prog_var) $ B.useAsCString kernel_name $ \kernel_name_ptr ->
alloca $ \err_ptr -> do
kernel <- create_kernel prog kernel_name_ptr err_ptr
err <- peek err_ptr
clErrorify $ return err
kernel_var <- newMVar kernel
void $ mkWeakMVar kernel_var $ release_kernel kernel
return $ CLKernel kernel_var
setKernelArgPtr :: MonadIO m
=> CLKernel
-> Int
-> Int
-> Ptr ()
-> m ()
setKernelArgPtr (CLKernel kernel_var) arg_index arg_size arg_ptr = liftIO $ mask_ $ do
kernel <- readMVar kernel_var
flip finally (touch kernel_var) $
clErrorify $ set_kernel_arg kernel
(fromIntegral arg_index)
(fromIntegral arg_size)
arg_ptr
{-# INLINE setKernelArgPtr #-}
setKernelArgBuffer :: MonadIO m
=> CLKernel
-> ArgIndex
-> CLMem
-> m ()
setKernelArgBuffer (CLKernel kernel_var) arg_index (CLMem mem_var) = liftIO $ mask_ $ do
mem <- readMVar mem_var
kernel <- readMVar kernel_var
flip finally (touch mem_var >> touch kernel_var) $ do
with mem $ \mem_ptr ->
clErrorify $ set_kernel_arg kernel
(fromIntegral arg_index)
(fromIntegral $ sizeOf (undefined :: CMem))
(castPtr mem_ptr)
{-# INLINE setKernelArgBuffer #-}
setKernelArgStorable :: forall s m. (Storable s, MonadIO m)
=> CLKernel
-> Int
-> s
-> m ()
setKernelArgStorable kernel arg_index storable = liftIO $
with storable $ \storable_ptr ->
setKernelArgPtr kernel arg_index (sizeOf (undefined :: s)) (castPtr storable_ptr)
{-# INLINE setKernelArgStorable #-}
enqueueRangeKernel :: MonadIO m
=> CLCommandQueue
-> CLKernel
-> [Int]
-> [Int]
-> [Int]
-> [CLEvent]
-> m CLEvent
enqueueRangeKernel (CLCommandQueue command_var) (CLKernel kernel_var)
offset work_size workgroup_size
wait_events
| length offset /= length work_size ||
length offset /= length workgroup_size ||
length work_size /= length workgroup_size
= error "enqueueRangeKernel: dimensions of offset, work size and workgroup size must be the same."
| length offset < 1 || length offset > 3
= error "enqueueRangeKernel: dimensions must be between 1 and 3."
| otherwise = liftIO $ mask_ $ do
command <- readMVar command_var
kernel <- readMVar kernel_var
flip finally (do touch command_var
touch kernel_var) $
withArray (fmap fromIntegral offset) $ \offset_arr ->
withArray (fmap fromIntegral work_size) $ \work_arr ->
withArray (fmap fromIntegral workgroup_size) $ \workgroup_arr ->
doEnqueueing
(enqueue_range_kernel command
kernel
(fromIntegral $ length offset)
offset_arr
work_arr
workgroup_arr)
wait_events
|
Noeda/opencl-bindings
|
src/Data/OpenCL/Kernel.hs
|
isc
| 4,231 | 0 | 20 | 1,438 | 997 | 498 | 499 | 113 | 1 |
module Main where
import Prelude hiding (Left, Right)
import System.IO
import Data.Char (chr)
import Control.Concurrent (threadDelay)
import Text.Printf (printf)
import Data.List (intercalate)
import Control.Monad (join, when)
import Data.Function (fix)
import Data.Maybe (catMaybes)
import System.Random
import System.Environment (getArgs)
import Powers
type Render = World -> IO ()
renderAsciiLines :: String -> String -> ([Int] -> String) -> String -> Render
renderAsciiLines top bottom renderLine sep world = do
putStrLn top
putStrLn $ intercalate sep $ map renderLine world
putStrLn bottom
putStrLn ""
renderAsciiSimple :: Render
renderAsciiSimple = renderAsciiLines simpleLine simpleLine showLine "\n" where
simpleLine = "+---------------------+"
showCell :: Int -> String
showCell 0 = " "
showCell n = printf "%5d" n
showLine :: [Int] -> String
showLine line = "|" ++ (concat $ map showCell line) ++ " |"
renderAsciiGrid :: Render
renderAsciiGrid = renderAsciiLines gridLine gridLine showLineGrid sep where
gridLine = "+-------+-------+-------+-------+"
sep = "\n" ++ gridLine ++ "\n"
showLineGrid line = '|' : (concat $ map showCellGrid line) where
showCellGrid 0 = " |"
showCellGrid n = printf "%6d |" n
charToDir :: Char -> Maybe Dir
charToDir 'w' = Just Up
charToDir 'a' = Just Left
charToDir 's' = Just Down
charToDir 'd' = Just Right
charToDir _ = Nothing
printHelp :: IO ()
printHelp = do
putStrLn "Use WASD keys to move the tiles."
putStrLn "Press Q to quit."
gameLoop :: RandomGen g => Handle -> g -> World -> Render -> IO ()
gameLoop i g w render = go g True i w where
go g needRender input world = do
when needRender $ render world
if gameOver world
then putStrLn "Game over!"
else do
e <- threadDelay 2000
ch <- readAll input ' '
when (ch /= 'q') $ case charToDir ch of
Just dir -> go nextG True input newWorld where
(nextG, newWorld) = update g dir world
Nothing -> go g False input world
gameOver :: World -> Bool
gameOver world = a == b && b == c && c == d where
a = step Up world
b = step Down world
c = step Left world
d = step Right world
readAll :: Handle -> Char -> IO (Char)
readAll h ch = do
gotIt <- hReady h
if gotIt
then hGetChar h >>= readAll h
else return ch
initial = [[ 0, 2, 0, 0],
[ 2, 0, 0, 0],
[ 0, 0, 0, 0],
[ 0, 0, 0, 0]]
main = do
args <- getArgs
hSetBuffering stdin NoBuffering --get input immediately
hSetBuffering stdout NoBuffering
hSetEcho stdin False --don't show the typed character
g <- getStdGen
let r = if null args then renderAsciiSimple else renderAsciiGrid
printHelp
gameLoop stdin g initial r
|
rybak/powers
|
Main.hs
|
mit
| 2,890 | 0 | 18 | 785 | 973 | 495 | 478 | 83 | 3 |
module Game.Level.Reader
( read
) where
import Prelude hiding (read)
import Data.List
import Game.Types
import Game.Level.Parser
levelPath :: FilePath
levelPath = "res/lvls/"
read :: Int -> IO GameLevel
read lvl = fmap (parseLevel . transform) $ readFile (levelPath ++ (show lvl) ++ ".txt")
transform :: String -> [ObjectSignColumn]
transform str = addPosition . transpose . reverse . lines $ str
where addPosition lvl = zip (map (\x -> zip x [0..]) lvl) [0..]
|
flomerz/SchaffschNie
|
src/Game/Level/Reader.hs
|
mit
| 485 | 0 | 13 | 97 | 179 | 98 | 81 | 13 | 1 |
fib = 0 : 1 : zipWith (+) fibs (tail fibs)
|
MartinThoma/LaTeX-examples
|
documents/Programmierparadigmen/scripts/haskell/fibonacci-zip.hs
|
mit
| 43 | 0 | 8 | 11 | 29 | 15 | 14 | 1 | 1 |
module Euler.Problem018Test (suite) where
import Test.Tasty (testGroup, TestTree)
import Test.Tasty.HUnit
import Euler.Problem018
suite :: TestTree
suite = testGroup "Problem018"
[ testCase "four-row triangle" test4
, testCase "transform a one-row triangle" testTransformSingle
, testCase "transform a two-row triangle" testTransformDouble
, testCase "transformas a three-row triangle" testTransformTriple
, testCase "makes a triangle out of the contents of an input file" testMkTriangle
]
test4 :: Assertion
test4 = 23 @=? best [[3], [7, 4], [2, 4, 6], [8, 5, 9, 3]]
testTransformSingle :: Assertion
testTransformSingle = [[3]] @=? (transform $ take 1 example)
testTransformDouble :: Assertion
testTransformDouble = [[10, 7], [3]] @=? (transform $ take 2 example)
testTransformTriple :: Assertion
testTransformTriple = [[12, 14, 13], [10, 7], [3]] @=? (transform $ take 3 example)
example :: [[Int]]
example = [[3], [7, 4], [2, 4, 6], [8, 5, 9, 3]]
testMkTriangle :: Assertion
testMkTriangle = [[75], [95, 64], [17, 47, 82]] @=? mkTriangle "75\n95 64\n17 47 82\n"
|
whittle/euler
|
test/Euler/Problem018Test.hs
|
mit
| 1,122 | 0 | 8 | 210 | 377 | 226 | 151 | 23 | 1 |
module Tamien.GM (module X) where
import Tamien.GM.Compiler as X
import Tamien.GM.Eval as X
import Tamien.GM.State as X
import Tamien.Core as X
|
cpettitt/tamien
|
Tamien/GM.hs
|
mit
| 145 | 0 | 4 | 22 | 43 | 31 | 12 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- Copyright (C) 2011 John Millikin <[email protected]>
--
-- See license.txt for details
module Main
( tests
, main
) where
import Control.Monad.IO.Class (liftIO)
import qualified Data.ByteString
import Data.ByteString.Char8 ()
import Data.ByteString.Unsafe (unsafePackCStringLen)
import Foreign (nullPtr)
import qualified GHC.IO.Exception as GHC
import System.IO
import Test.Chell
import Data.Knob
main :: IO ()
main = Test.Chell.defaultMain tests
tests :: [Suite]
tests = [test_File, test_Duplex]
test_File :: Suite
test_File = suite "file" (fileTests ++ otherTests)
where
fileTests = concatMap suiteTests
[ suite "file seek"
[ test_SeekAbsolute
, test_SeekRelative
, test_SeekFromEnd
, test_SeekBeyondMaxInt
]
, suite "file setSize"
[ test_SetSize_Read
, test_SetSize_Write
, test_SetSize_ReadWrite
, test_SetSize_Append
]
]
otherTests = [ test_Ready
, test_Close
, test_SetContents
, test_WithFileHandle
]
test_SeekAbsolute :: Test
test_SeekAbsolute = assertions "absolute" $ do
k <- newKnob ""
h <- newFileHandle k "foo.txt" ReadMode
before <- liftIO $ hTell h
liftIO $ hSeek h AbsoluteSeek 2
after <- liftIO $ hTell h
$expect (equal before 0)
$expect (equal after 2)
test_SeekRelative :: Test
test_SeekRelative = assertions "relative" $ do
k <- newKnob ""
h <- newFileHandle k "foo.txt" ReadMode
before <- liftIO $ hTell h
liftIO $ hSeek h RelativeSeek 2
after1 <- liftIO $ hTell h
liftIO $ hSeek h RelativeSeek 2
after2 <- liftIO $ hTell h
$expect (equal before 0)
$expect (equal after1 2)
$expect (equal after2 4)
test_SeekFromEnd :: Test
test_SeekFromEnd = assertions "from-end" $ do
k <- newKnob "abcde"
h <- newFileHandle k "foo.txt" ReadMode
before <- liftIO $ hTell h
liftIO $ hSeek h SeekFromEnd (- 2)
after <- liftIO $ hTell h
$expect (equal before 0)
$expect (equal after 3)
test_SeekBeyondMaxInt :: Test
test_SeekBeyondMaxInt = assertions "beyond-max-int" $ do
k <- newKnob "abcde"
h <- newFileHandle k "foo.txt" ReadMode
let intPlusOne = toInteger (maxBound :: Int) + 1
$expect $ throwsEq
(GHC.IOError (Just h) GHC.InvalidArgument "hSeek" "offset > (maxBound :: Int)" Nothing (Just "foo.txt"))
(hSeek h AbsoluteSeek intPlusOne)
$expect $ throwsEq
(GHC.IOError (Just h) GHC.InvalidArgument "hSeek" "offset > (maxBound :: Int)" Nothing (Just "foo.txt"))
(hSeek h RelativeSeek intPlusOne)
-- testing this with real contents is difficult/impossible on a
-- 64-bit system, so use an unsafe function to corrupt the knob's
-- internal buffer first.
hugeBytes <- liftIO (unsafePackCStringLen (nullPtr, maxBound))
liftIO $ hSeek h AbsoluteSeek (intPlusOne - 1)
setContents k hugeBytes
$expect $ throwsEq
(GHC.IOError (Just h) GHC.InvalidArgument "hSeek" "offset > (maxBound :: Int)" Nothing (Just "foo.txt"))
(hSeek h SeekFromEnd 2)
test_Ready :: Test
test_Ready = assertions "ready" $ do
k <- newKnob "abcde"
h <- newFileHandle k "foo.txt" ReadMode
ready <- liftIO $ hReady h
$expect ready
_ <- liftIO $ Data.ByteString.hGet h 10
$expect $ throwsEq
(GHC.IOError (Just h) GHC.EOF "hWaitForInput" "" Nothing (Just "foo.txt"))
(hReady h)
test_Close :: Test
test_Close = assertions "close" $ do
k <- newKnob "abcde"
h <- newFileHandle k "foo.txt" ReadMode
liftIO $ hClose h
$expect $ throwsEq
(GHC.IOError (Just h) GHC.IllegalOperation "hGetBuf" "handle is closed" Nothing (Just "foo.txt"))
(Data.ByteString.hGet h 1)
$expect $ throwsEq
(GHC.IOError (Just h) GHC.IllegalOperation "hWaitForInput" "handle is closed" Nothing (Just "foo.txt"))
(hReady h)
test_SetSize_Read :: Test
test_SetSize_Read = assertions "ReadMode" $ do
k <- newKnob "abcde"
h <- newFileHandle k "foo.txt" ReadMode
let intPlusOne = toInteger (maxBound :: Int) + 1
$expect $ throwsEq
(GHC.IOError (Just h) GHC.InvalidArgument "hSetFileSize" "size > (maxBound :: Int)" Nothing (Just "foo.txt"))
(hSetFileSize h intPlusOne)
$expect $ throwsEq
(GHC.IOError (Just h) GHC.IllegalOperation "hSetFileSize" "handle in ReadMode" Nothing (Just "foo.txt"))
(hSetFileSize h 2)
test_SetSize_Write :: Test
test_SetSize_Write = assertions "WriteMode" $ do
k <- newKnob "abcde"
h <- newFileHandle k "foo.txt" WriteMode
let intPlusOne = toInteger (maxBound :: Int) + 1
$expect $ throwsEq
(GHC.IOError (Just h) GHC.InvalidArgument "hSetFileSize" "size > (maxBound :: Int)" Nothing (Just "foo.txt"))
(hSetFileSize h intPlusOne)
-- Resets contents to all NULL, regardless of offset
liftIO $ hSeek h AbsoluteSeek 2
liftIO $ hSetFileSize h 4
bytes <- Data.Knob.getContents k
$expect (equal bytes "\0\0\0\0")
test_SetSize_ReadWrite :: Test
test_SetSize_ReadWrite = assertions "ReadWriteMode" $ do
k <- newKnob "abcde"
h <- newFileHandle k "foo.txt" ReadWriteMode
let intPlusOne = toInteger (maxBound :: Int) + 1
$expect $ throwsEq
(GHC.IOError (Just h) GHC.InvalidArgument "hSetFileSize" "size > (maxBound :: Int)" Nothing (Just "foo.txt"))
(hSetFileSize h intPlusOne)
-- Truncates contents, regardless of offset
do
liftIO $ hSeek h AbsoluteSeek 2
liftIO $ hSetFileSize h 4
bytes <- Data.Knob.getContents k
$expect (equal bytes "abcd")
do
liftIO $ hSetFileSize h 6
bytes <- Data.Knob.getContents k
$expect (equal bytes "abcd\0\0")
test_SetSize_Append :: Test
test_SetSize_Append = assertions "AppendMode" $ do
k <- newKnob "abcde"
h <- newFileHandle k "foo.txt" AppendMode
let intPlusOne = toInteger (maxBound :: Int) + 1
$expect $ throwsEq
(GHC.IOError (Just h) GHC.InvalidArgument "hSetFileSize" "size > (maxBound :: Int)" Nothing (Just "foo.txt"))
(hSetFileSize h intPlusOne)
do
liftIO $ hSetFileSize h 4
bytes <- Data.Knob.getContents k
$expect (equal bytes "abcd")
do
liftIO $ hSetFileSize h 6
bytes <- Data.Knob.getContents k
$expect (equal bytes "abcd\0\0")
test_SetContents :: Test
test_SetContents = assertions "setContents" $ do
k <- newKnob "abcde"
before <- Data.Knob.getContents k
setContents k "foo"
after <- Data.Knob.getContents k
$expect (equal before "abcde")
$expect (equal after "foo")
test_WithFileHandle :: Test
test_WithFileHandle = assertions "withFileHandle" $ do
k <- newKnob ""
h <- withFileHandle k "test.txt" WriteMode $ \h -> do
Data.ByteString.hPut h "abcde"
return h
bytes <- Data.Knob.getContents k
$expect (equal bytes "abcde")
closed <- liftIO $ hIsClosed h
$expect closed
test_Duplex :: Suite
test_Duplex = suite "duplex" []
|
fujimura/knob
|
test/KnobTests.hs
|
mit
| 7,229 | 0 | 14 | 1,839 | 2,160 | 1,020 | 1,140 | 179 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving,
DeriveFunctor,
TypeSynonymInstances,
FlexibleInstances #-}
module Codex.QuickCheck.C.Types
(
module Foreign.C,
module Foreign.C.Types,
module Foreign.Ptr,
module Foreign.Marshal.Array,
runC,
withCheckedArray,
withCheckedArrayLen,
ArrayOverflow(..),
CArray(..),
toArray,
fromArray,
showArray,
showsArray,
fromBool,
toBool,
CType(..)
) where
import Test.QuickCheck
import qualified Codex.QuickCheck.Modifiers as M
import Foreign
import Foreign.C
import Foreign.C.Types
import Foreign.Ptr
import Foreign.Marshal.Array
import System.IO.Unsafe (unsafePerformIO)
import Control.Monad (replicateM, unless)
import Control.Exception
import System.Random
import Data.List (intersperse)
-- | exceptions for array index overflows
data ArrayOverflow
= ArrayBufferOverwrite
| ArrayBufferUnderwrite
deriving Show
instance Exception ArrayOverflow
-- | just a rename of unsafePerformIO (for convenience)
runC :: IO a -> a
runC = unsafePerformIO
-- | C array allocators with buffer overflow checks
withCheckedArray :: (Eq a, Random a, Storable a)
=> [a] -> (Ptr a -> IO b) -> IO b
withCheckedArray values k = withCheckedArrayLen values (const k)
withCheckedArrayLen :: (Eq a, Random a, Storable a)
=> [a] -> (Int -> Ptr a -> IO b) -> IO b
withCheckedArrayLen values action = do
-- setup random sequences for canaries
prefix <- replicateM canarySize randomIO
postfix <- replicateM canarySize randomIO
withArrayLen (prefix ++ values ++ postfix) $ \len ptr -> do
let ptr' = advancePtr ptr canarySize -- start of proper buffer
result <- action (len - 2*canarySize) ptr' -- run action
-- check canaries after execution
prefix' <- peekArray canarySize ptr
let ptr''= advancePtr ptr (len - canarySize) -- start of trailing canary
postfix'<- peekArray canarySize ptr''
unless (prefix' == prefix) $ throwIO ArrayBufferUnderwrite
unless (postfix' == postfix) $ throwIO ArrayBufferOverwrite
-- OK, passed checks
return result
-- | size of the canaries
canarySize :: Int
canarySize = 4
-- | utility functions
newtype CArray a
= CArray [a] deriving (Eq, Functor, Foldable)
toArray :: [a] -> CArray a
toArray = CArray
fromArray :: CArray a -> [a]
fromArray (CArray xs) = xs
instance Show a => Show (CArray a) where
showsPrec _ (CArray xs) = showsArray xs
instance Arbitrary a => Arbitrary (CArray a) where
arbitrary = CArray <$> arbitrary
shrink (CArray xs) = map CArray (shrink xs)
-- | show lists with C-style array literal syntax
showArray :: Show a => [a] -> String
showArray xs = showsArray xs ""
showsArray :: Show a => [a] -> ShowS
showsArray xs
= ('{':) . (foldr (.) id $ intersperse (',':) $ map shows xs) . ('}':)
--type Name = String
-- | show C type name for a given variable
class CType t where
cbase :: t -> ShowS
cmodifiers :: t -> ShowS
instance CType CInt where
cbase v = ("int "++)
cmodifiers v = id
instance CType Int where
cbase v = ("int "++)
cmodifiers v = id
instance CType (M.NonNegative CInt) where
cbase v = ("int " ++)
cmodifiers v = id
instance CType (M.Positive CInt) where
cbase v = ("int " ++)
cmodifiers v = id
instance CType CDouble where
cbase v = ("double " ++)
cmodifiers v = id
instance CType CUInt where
cbase v = ("unsigned " ++)
cmodifiers v = id
instance CType Char where
cbase v = ("char "++)
cmodifiers v = id
instance CType CChar where
cbase v = ("char "++)
cmodifiers v = id
instance CType String where
cbase v = ("char *"++)
cmodifiers v = id
instance CType a => CType (CArray a) where
cbase (CArray vs)
= cbase (head vs)
cmodifiers (CArray vs)
= ("["++) . shows n . ("]"++) . cmodifiers (head vs)
where n = length vs
|
pbv/codex-quickcheck
|
src/Codex/QuickCheck/C/Types.hs
|
mit
| 3,879 | 0 | 16 | 876 | 1,221 | 655 | 566 | 112 | 1 |
module Main where
import Bench
main :: IO ()
main = runBench
|
banacorn/Graphentheoretische-Paralleler-Algorithmus
|
src/Main.hs
|
mit
| 63 | 0 | 6 | 14 | 22 | 13 | 9 | 4 | 1 |
qcksrt :: (Ord a) => [a] -> [a]
qcksrt [] = []
qcksrt (a:al) =
let smOrEq = [x | x <- al, x <= a]
larger = [x | x <- al, x > a]
in qcksrt smOrEq ++ [a] ++ qcksrt larger
-- Mergesort
-- split in half
-- m = length of input list / 2
-- recursive sorts
-- sort a[1..m] i.e., Left list, call mergesort on the list recursively
-- sort a[m+1..n] i.e., Right list call mergesort on the list recursively
-- merge sorted sub-arrays using temp array
-- b = copy of a[1..m]
-- while i <= m and j <= n,
-- a[k++] = (a[j] < b[i]) ? a[j++] : b[i++]
-- invariant: a[1..k] in final position
-- while i <= m,
-- a[k++] = b[i++]
-- invariant: a[1..k] in final position
-- split function returns two lists from one input list
split :: [a] -> ([a],[a])
split x =
let m = div (length x) 2
in splitAt m x
-- merge as per the merge algorithm
merge :: (Ord a) => [a] -> [a] -> [a]
merge x [] = x ++ []
merge [] y = y ++ []
merge (x:xs) (y:ys)
| x <= y = x:merge xs (y:ys)
| otherwise = y:merge (x:xs) ys
-- combination of split, mergesrt and merge
mergesrt :: (Ord a) => [a] -> [a]
mergesrt [] = []
mergesrt [x] = [x]
mergesrt x =
let (l,r) = split x -- split list into Left (l) list and Right (r) list
in merge (mergesrt l) (mergesrt r) -- recursively sort left and right lists and then merge them
-- Insertion sort
insertionsrt :: (Ord a) => [a] -> [a]
insertionsrt [] = []
insertionsrt [x] = [x]
insertionsrt (x:xs) = insert x (insertionsrt xs)
insert :: (Ord a) => a -> [a] -> [a]
insert x [] = [x]
insert x (y:ys)
| x < y = x:y:ys
| otherwise = y:(insert x ys)
insertAlt x ys =
let lesser = [y | y <- ys, x > y]
greater = [y | y <- ys, x <= y]
in lesser ++ [x] ++ greater
-- selection sort
min' :: Ord a => [a] -> a
min' [x] = x
min' (x:y:xs)
| x > y = min' (y:xs)
| otherwise = min' (x:xs)
swap :: Eq a => a -> [a] -> [a]
swap x [] = [x]
swap a (x:ys) = a:[if y == a then x else y | y <- ys]
-- recursion seems ugly. need to fix this
swapAlt :: Eq a => a -> [a] -> [a]
swapAlt x [] = [x]
swapAlt a (x:y:xs)
| x == a = x:y:xs
| y == a = swapAlt a (y:x:xs)
| otherwise = swapAlt a (y:swapAlt a (x:xs))
selectionsrt :: (Eq a, Ord a) => [a] -> [a]
selectionsrt [] = []
selectionsrt [x] = [x]
selectionsrt (x:xs) =
let min = min' (x:xs) --get the min element from unsorted list
(_:swapped) = swapAlt min (x:xs) --swap the min element with 1st element in list
in min:selectionsrt swapped --recursively sort the remaining unsorted list
|
zeusdeux/sorts-and-more
|
haskell/sorts.hs
|
mit
| 2,586 | 0 | 11 | 701 | 1,134 | 600 | 534 | 56 | 2 |
{-# OPTIONS_HADDOCK show-extensions #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Safe #-}
{-|
Module : MPD.Core.Wire.Parser
Copyright : (c) Joachim Fasting, 2015
License : MIT
Maintainer : [email protected]
Stability : unstable
Portability : unportable
-}
module MPD.Core.Wire.Parser where
import qualified Data.Attoparsec.ByteString.Char8 as A
import qualified Data.ByteString.Char8 as SB
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
type ProtocolVersion = (Int, Int, Int)
heloP :: A.Parser ProtocolVersion
heloP = "OK MPD " *> ((,,) <$> A.decimal <* A.char '.'
<*> A.decimal <* A.char '.'
<*> A.decimal <* A.char '\n')
protocolErrorP :: A.Parser (Int, Int, T.Text, T.Text)
protocolErrorP = (,,,) <$> -- note: expect that we've already parsed "ACK "
(A.char '[' *> A.decimal <* A.char '@') <*>
(A.decimal <* A.string "] {") <*>
(T.decodeUtf8 <$> A.takeWhile1 (/= '}') <* A.string "} ") <*>
(T.decodeUtf8 <$> A.takeWhile1 (/= '\n')) {- <* A.char '\n')) -}
responseP :: A.Parser a -> A.Parser (Either SB.ByteString a)
responseP p = A.eitherP
("ACK " *> A.takeWhile1 (/= '\n') <* A.char '\n')
(p <* "list_OK\n")
|
joachifm/nanompd
|
src/MPD/Core/Wire/Parser.hs
|
mit
| 1,249 | 0 | 13 | 270 | 346 | 196 | 150 | 23 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.Shakespeare.I18N
-- Copyright : 2012 Michael Snoyman <[email protected]>, Jeremy Shaw
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : Michael Snoyman <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- This module provides a type-based system for providing translations
-- for text strings.
--
-- It is similar in purpose to gettext or Java message bundles.
--
-- The core idea is to create simple data type where each constructor
-- represents a phrase, sentence, paragraph, etc. For example:
--
-- > data AppMessages = Hello | Goodbye
--
-- The 'RenderMessage' class is used to retrieve the appropriate
-- translation for a message value:
--
-- > class RenderMessage master message where
-- > renderMessage :: master -- ^ type that specifies which set of translations to use
-- > -> [Lang] -- ^ acceptable languages in descending order of preference
-- > -> message -- ^ message to translate
-- > -> Text
--
-- Defining the translation type and providing the 'RenderMessage'
-- instance in Haskell is not very translator friendly. Instead,
-- translations are generally provided in external translations
-- files. Then the 'mkMessage' Template Haskell function is used to
-- read the external translation files and automatically create the
-- translation type and the @RenderMessage@ instance.
--
-- A full description of using this module to create translations for @Hamlet@ can be found here:
--
-- <http://www.yesodweb.com/book/internationalization>
--
-- A full description of using the module to create translations for @HSP@ can be found here:
--
-- <http://happstack.com/docs/crashcourse/Templates.html#hsp-i18n>
--
-- You can also adapt those instructions for use with other systems.
module Text.Shakespeare.I18N
( mkMessage
, mkMessageFor
, mkMessageVariant
, RenderMessage (..)
, ToMessage (..)
, SomeMessage (..)
, Lang
) where
import Language.Haskell.TH.Syntax
import Control.Applicative ((<$>))
import Control.Monad (filterM, forM)
import Data.Text (Text, pack, unpack)
import System.Directory
import Data.Maybe (catMaybes)
import Data.List (isSuffixOf, sortBy, foldl')
import qualified Data.Map as Map
import qualified Data.ByteString as S
import Data.Text.Encoding (decodeUtf8)
import Data.Char (isSpace, toLower, toUpper)
import Data.Ord (comparing)
import Text.Shakespeare.Base (Deref (..), Ident (..), parseHash, derefToExp)
import Text.ParserCombinators.Parsec (parse, many, eof, many1, noneOf, (<|>))
import Control.Arrow ((***))
import Data.Monoid (mempty, mappend)
import qualified Data.Text as T
import Data.String (IsString (fromString))
-- | 'ToMessage' is used to convert the value inside #{ } to 'Text'
--
-- The primary purpose of this class is to allow the value in #{ } to
-- be a 'String' or 'Text' rather than forcing it to always be 'Text'.
class ToMessage a where
toMessage :: a -> Text
instance ToMessage Text where
toMessage = id
instance ToMessage String where
toMessage = Data.Text.pack
-- | the 'RenderMessage' is used to provide translations for a message types
--
-- The 'master' argument exists so that it is possible to provide more
-- than one set of translations for a 'message' type. This is useful
-- if a library provides a default set of translations, but the user
-- of the library wants to provide a different set of translations.
class RenderMessage master message where
renderMessage :: master -- ^ type that specifies which set of translations to use
-> [Lang] -- ^ acceptable languages in descending order of preference
-> message -- ^ message to translate
-> Text
instance RenderMessage master Text where
renderMessage _ _ = id
-- | an RFC1766 / ISO 639-1 language code (eg, @fr@, @en-GB@, etc).
type Lang = Text
-- |generate translations from translation files
--
-- This function will:
--
-- 1. look in the supplied subdirectory for files ending in @.msg@
--
-- 2. generate a type based on the constructors found
--
-- 3. create a 'RenderMessage' instance
--
mkMessage :: String -- ^ base name to use for translation type
-> FilePath -- ^ subdirectory which contains the translation files
-> Lang -- ^ default translation language
-> Q [Dec]
mkMessage dt folder lang =
mkMessageCommon True "Msg" "Message" dt dt folder lang
-- | create 'RenderMessage' instance for an existing data-type
mkMessageFor :: String -- ^ master translation data type
-> String -- ^ existing type to add translations for
-> FilePath -- ^ path to translation folder
-> Lang -- ^ default language
-> Q [Dec]
mkMessageFor master dt folder lang = mkMessageCommon False "" "" master dt folder lang
-- | create an additional set of translations for a type created by `mkMessage`
mkMessageVariant :: String -- ^ master translation data type
-> String -- ^ existing type to add translations for
-> FilePath -- ^ path to translation folder
-> Lang -- ^ default language
-> Q [Dec]
mkMessageVariant master dt folder lang = mkMessageCommon False "Msg" "Message" master dt folder lang
-- |used by 'mkMessage' and 'mkMessageFor' to generate a 'RenderMessage' and possibly a message data type
mkMessageCommon :: Bool -- ^ generate a new datatype from the constructors found in the .msg files
-> String -- ^ string to append to constructor names
-> String -- ^ string to append to datatype name
-> String -- ^ base name of master datatype
-> String -- ^ base name of translation datatype
-> FilePath -- ^ path to translation folder
-> Lang -- ^ default lang
-> Q [Dec]
mkMessageCommon genType prefix postfix master dt folder lang = do
files <- qRunIO $ getDirectoryContents folder
let files' = filter (`notElem` [".", ".."]) files
(_files', contents) <- qRunIO $ fmap (unzip . catMaybes) $ mapM (loadLang folder) files'
#ifdef GHC_7_4
mapM_ qAddDependentFile $ concat _files'
#endif
let contents' = Map.toList $ Map.fromListWith (++) contents
sdef <-
case lookup lang contents' of
Nothing -> error $ "Did not find main language file: " ++ unpack lang
Just def -> toSDefs def
mapM_ (checkDef sdef) $ map snd contents'
let mname = mkName $ dt ++ postfix
c1 <- fmap concat $ mapM (toClauses prefix dt) contents'
c2 <- mapM (sToClause prefix dt) sdef
c3 <- defClause
return $
( if genType
then ((DataD [] mname [] (map (toCon dt) sdef) []) :)
else id)
[ InstanceD
[]
(ConT ''RenderMessage `AppT` (ConT $ mkName master) `AppT` ConT mname)
[ FunD (mkName "renderMessage") $ c1 ++ c2 ++ [c3]
]
]
toClauses :: String -> String -> (Lang, [Def]) -> Q [Clause]
toClauses prefix dt (lang, defs) =
mapM go defs
where
go def = do
a <- newName "lang"
(pat, bod) <- mkBody dt (prefix ++ constr def) (map fst $ vars def) (content def)
guard <- fmap NormalG [|$(return $ VarE a) == pack $(lift $ unpack lang)|]
return $ Clause
[WildP, ConP (mkName ":") [VarP a, WildP], pat]
(GuardedB [(guard, bod)])
[]
mkBody :: String -- ^ datatype
-> String -- ^ constructor
-> [String] -- ^ variable names
-> [Content]
-> Q (Pat, Exp)
mkBody dt cs vs ct = do
vp <- mapM go vs
let pat = RecP (mkName cs) (map (varName dt *** VarP) vp)
let ct' = map (fixVars vp) ct
pack' <- [|Data.Text.pack|]
tomsg <- [|toMessage|]
let ct'' = map (toH pack' tomsg) ct'
mapp <- [|mappend|]
let app a b = InfixE (Just a) mapp (Just b)
e <-
case ct'' of
[] -> [|mempty|]
[x] -> return x
(x:xs) -> return $ foldl' app x xs
return (pat, e)
where
toH pack' _ (Raw s) = pack' `AppE` SigE (LitE (StringL s)) (ConT ''String)
toH _ tomsg (Var d) = tomsg `AppE` derefToExp [] d
go x = do
let y = mkName $ '_' : x
return (x, y)
fixVars vp (Var d) = Var $ fixDeref vp d
fixVars _ (Raw s) = Raw s
fixDeref vp (DerefIdent (Ident i)) = DerefIdent $ Ident $ fixIdent vp i
fixDeref vp (DerefBranch a b) = DerefBranch (fixDeref vp a) (fixDeref vp b)
fixDeref _ d = d
fixIdent vp i =
case lookup i vp of
Nothing -> i
Just y -> nameBase y
sToClause :: String -> String -> SDef -> Q Clause
sToClause prefix dt sdef = do
(pat, bod) <- mkBody dt (prefix ++ sconstr sdef) (map fst $ svars sdef) (scontent sdef)
return $ Clause
[WildP, ConP (mkName "[]") [], pat]
(NormalB bod)
[]
defClause :: Q Clause
defClause = do
a <- newName "sub"
c <- newName "langs"
d <- newName "msg"
rm <- [|renderMessage|]
return $ Clause
[VarP a, ConP (mkName ":") [WildP, VarP c], VarP d]
(NormalB $ rm `AppE` VarE a `AppE` VarE c `AppE` VarE d)
[]
toCon :: String -> SDef -> Con
toCon dt (SDef c vs _) =
RecC (mkName $ "Msg" ++ c) $ map go vs
where
go (n, t) = (varName dt n, NotStrict, ConT $ mkName t)
varName :: String -> String -> Name
varName a y =
mkName $ concat [lower a, "Message", upper y]
where
lower (x:xs) = toLower x : xs
lower [] = []
upper (x:xs) = toUpper x : xs
upper [] = []
checkDef :: [SDef] -> [Def] -> Q ()
checkDef x y =
go (sortBy (comparing sconstr) x) (sortBy (comparing constr) y)
where
go _ [] = return ()
go [] (b:_) = error $ "Extra message constructor: " ++ constr b
go (a:as) (b:bs)
| sconstr a < constr b = go as (b:bs)
| sconstr a > constr b = error $ "Extra message constructor: " ++ constr b
| otherwise = do
go' (svars a) (vars b)
go as bs
go' ((an, at):as) ((bn, mbt):bs)
| an /= bn = error "Mismatched variable names"
| otherwise =
case mbt of
Nothing -> go' as bs
Just bt
| at == bt -> go' as bs
| otherwise -> error "Mismatched variable types"
go' [] [] = return ()
go' _ _ = error "Mistmached variable count"
toSDefs :: [Def] -> Q [SDef]
toSDefs = mapM toSDef
toSDef :: Def -> Q SDef
toSDef d = do
vars' <- mapM go $ vars d
return $ SDef (constr d) vars' (content d)
where
go (a, Just b) = return (a, b)
go (a, Nothing) = error $ "Main language missing type for " ++ show (constr d, a)
data SDef = SDef
{ sconstr :: String
, svars :: [(String, String)]
, scontent :: [Content]
}
data Def = Def
{ constr :: String
, vars :: [(String, Maybe String)]
, content :: [Content]
}
(</>) :: FilePath -> FilePath -> FilePath
path </> file = path ++ '/' : file
loadLang :: FilePath -> FilePath -> IO (Maybe ([FilePath], (Lang, [Def])))
loadLang folder file = do
let file' = folder </> file
isFile <- doesFileExist file'
if isFile && ".msg" `isSuffixOf` file
then do
let lang = pack $ reverse $ drop 4 $ reverse file
defs <- loadLangFile file'
return $ Just ([file'], (lang, defs))
else do
isDir <- doesDirectoryExist file'
if isDir
then do
let lang = pack file
(files, defs) <- unzip <$> loadLangDir file'
return $ Just (files, (lang, concat defs))
else
return Nothing
loadLangDir :: FilePath -> IO [(FilePath, [Def])]
loadLangDir folder = do
paths <- map (folder </>) . filter (`notElem` [".", ".."]) <$> getDirectoryContents folder
files <- filterM doesFileExist paths
dirs <- filterM doesDirectoryExist paths
langFiles <-
forM files $ \file -> do
if ".msg" `isSuffixOf` file
then do
defs <- loadLangFile file
return $ Just (file, defs)
else do
return Nothing
langDirs <- mapM loadLangDir dirs
return $ catMaybes langFiles ++ concat langDirs
loadLangFile :: FilePath -> IO [Def]
loadLangFile file = do
bs <- S.readFile file
let s = unpack $ decodeUtf8 bs
defs <- fmap catMaybes $ mapM (parseDef . T.unpack . T.strip . T.pack) $ lines s
return defs
parseDef :: String -> IO (Maybe Def)
parseDef "" = return Nothing
parseDef ('#':_) = return Nothing
parseDef s =
case end of
':':end' -> do
content' <- fmap compress $ parseContent $ dropWhile isSpace end'
case words begin of
[] -> error $ "Missing constructor: " ++ s
(w:ws) -> return $ Just Def
{ constr = w
, vars = map parseVar ws
, content = content'
}
_ -> error $ "Missing colon: " ++ s
where
(begin, end) = break (== ':') s
data Content = Var Deref | Raw String
compress :: [Content] -> [Content]
compress [] = []
compress (Raw a:Raw b:rest) = compress $ Raw (a ++ b) : rest
compress (x:y) = x : compress y
parseContent :: String -> IO [Content]
parseContent s =
either (error . show) return $ parse go s s
where
go = do
x <- many go'
eof
return x
go' = (Raw `fmap` many1 (noneOf "#")) <|> (fmap (either Raw Var) parseHash)
parseVar :: String -> (String, Maybe String)
parseVar s =
case break (== '@') s of
(x, '@':y) -> (x, Just y)
_ -> (s, Nothing)
data SomeMessage master = forall msg. RenderMessage master msg => SomeMessage msg
instance IsString (SomeMessage master) where
fromString = SomeMessage . T.pack
instance master ~ master' => RenderMessage master (SomeMessage master') where
renderMessage a b (SomeMessage msg) = renderMessage a b msg
|
fgaray/shakespeare
|
Text/Shakespeare/I18N.hs
|
mit
| 14,506 | 0 | 17 | 4,134 | 4,094 | 2,149 | 1,945 | 283 | 8 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- Copyright (C) 2012 John Millikin <[email protected]>
--
-- See license.txt for details
module OptionsTests.OptionTypes
( suite_OptionTypes
) where
import Data.Int
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Word
import Test.Chell
import Options
suite_OptionTypes :: Suite
suite_OptionTypes = suite "option-types"
[ test_Bool
, test_String
, test_Int
, test_Int8
, test_Int16
, test_Int32
, test_Int64
, test_Word
, test_Word8
, test_Word16
, test_Word32
, test_Word64
, test_Integer
, test_Float
, test_Double
, test_Maybe
, test_List
, test_Set
, test_Map
, test_Enum
]
parseValid :: (Show a, Eq a) => OptionType a -> String -> a -> Assertion
parseValid t s expected = equal (optionTypeParse t s) (Right expected)
parseInvalid :: (Show a, Eq a) => OptionType a -> String -> String -> Assertion
parseInvalid t s err = equal (optionTypeParse t s) (Left err)
test_Bool :: Test
test_Bool = assertions "bool" $ do
$expect (parseValid optionType_bool "true" True)
$expect (parseValid optionType_bool "false" False)
$expect (parseInvalid optionType_bool "" "\"\" is not in {\"true\", \"false\"}.")
test_String :: Test
test_String = assertions "string" $ do
let valid = parseValid optionType_string
let invalid = parseInvalid optionType_string
$expect (valid "" "")
$expect (valid "a" "a")
$expect (valid "\12354" "\12354")
$expect (valid "\56507" "\56507")
$expect (valid "\61371" "\61371")
test_Int :: Test
test_Int = assertions "int" $ do
let valid = parseValid optionType_int
let invalid = parseInvalid optionType_int
$expect (valid "-1" (-1 :: Int))
$expect (valid "1" (1 :: Int))
$expect (invalid "a" "\"a\" is not an integer.")
let pastMin = show (toInteger (minBound :: Int) - 1)
let pastMax = show (toInteger (maxBound :: Int) + 1)
let errBounds = " is not within bounds [" ++ show (minBound :: Int) ++ ":" ++ show (maxBound :: Int) ++ "] of type int."
$expect (invalid pastMin (pastMin ++ errBounds))
$expect (valid (show (minBound :: Int)) minBound)
$expect (valid (show (maxBound :: Int)) maxBound)
$expect (invalid pastMax (pastMax ++ errBounds))
test_Int8 :: Test
test_Int8 = assertions "int8" $ do
let valid = parseValid optionType_int8
let invalid = parseInvalid optionType_int8
$expect (valid "-1" (-1 :: Int8))
$expect (valid "1" (1 :: Int8))
$expect (invalid "a" "\"a\" is not an integer.")
let pastMin = show (toInteger (minBound :: Int8) - 1)
let pastMax = show (toInteger (maxBound :: Int8) + 1)
$expect (invalid pastMin "-129 is not within bounds [-128:127] of type int8.")
$expect (valid (show (minBound :: Int8)) minBound)
$expect (valid (show (maxBound :: Int8)) maxBound)
$expect (invalid pastMax "128 is not within bounds [-128:127] of type int8.")
test_Int16 :: Test
test_Int16 = assertions "int16" $ do
let valid = parseValid optionType_int16
let invalid = parseInvalid optionType_int16
$expect (valid "-1" (-1 :: Int16))
$expect (valid "1" (1 :: Int16))
$expect (invalid "a" "\"a\" is not an integer.")
let pastMin = show (toInteger (minBound :: Int16) - 1)
let pastMax = show (toInteger (maxBound :: Int16) + 1)
$expect (invalid pastMin "-32769 is not within bounds [-32768:32767] of type int16.")
$expect (valid (show (minBound :: Int16)) minBound)
$expect (valid (show (maxBound :: Int16)) maxBound)
$expect (invalid pastMax "32768 is not within bounds [-32768:32767] of type int16.")
test_Int32 :: Test
test_Int32 = assertions "int32" $ do
let valid = parseValid optionType_int32
let invalid = parseInvalid optionType_int32
$expect (valid "-1" (-1 :: Int32))
$expect (valid "1" (1 :: Int32))
$expect (invalid "a" "\"a\" is not an integer.")
let pastMin = show (toInteger (minBound :: Int32) - 1)
let pastMax = show (toInteger (maxBound :: Int32) + 1)
$expect (invalid pastMin "-2147483649 is not within bounds [-2147483648:2147483647] of type int32.")
$expect (valid (show (minBound :: Int32)) minBound)
$expect (valid (show (maxBound :: Int32)) maxBound)
$expect (invalid pastMax "2147483648 is not within bounds [-2147483648:2147483647] of type int32.")
test_Int64 :: Test
test_Int64 = assertions "int64" $ do
let valid = parseValid optionType_int64
let invalid = parseInvalid optionType_int64
$expect (valid "-1" (-1 :: Int64))
$expect (valid "1" (1 :: Int64))
$expect (invalid "a" "\"a\" is not an integer.")
let pastMin = show (toInteger (minBound :: Int64) - 1)
let pastMax = show (toInteger (maxBound :: Int64) + 1)
$expect (invalid pastMin "-9223372036854775809 is not within bounds [-9223372036854775808:9223372036854775807] of type int64.")
$expect (valid (show (minBound :: Int64)) minBound)
$expect (valid (show (maxBound :: Int64)) maxBound)
$expect (invalid pastMax "9223372036854775808 is not within bounds [-9223372036854775808:9223372036854775807] of type int64.")
test_Word :: Test
test_Word = assertions "word" $ do
let valid = parseValid optionType_word
let invalid = parseInvalid optionType_word
let pastMax = show (toInteger (maxBound :: Word) + 1)
let errBounds = " is not within bounds [0:" ++ show (maxBound :: Word) ++ "] of type uint."
$expect (invalid "-1" ("-1" ++ errBounds))
$expect (valid "0" (0 :: Word))
$expect (valid "1" (1 :: Word))
$expect (invalid "a" "\"a\" is not an integer.")
$expect (valid (show (maxBound :: Word)) maxBound)
$expect (invalid pastMax (pastMax ++ errBounds))
test_Word8 :: Test
test_Word8 = assertions "word8" $ do
let valid = parseValid optionType_word8
let invalid = parseInvalid optionType_word8
$expect (invalid "-1" "-1 is not within bounds [0:255] of type uint8.")
$expect (valid "0" (0 :: Word8))
$expect (valid "1" (1 :: Word8))
$expect (invalid "a" "\"a\" is not an integer.")
let pastMax = show (toInteger (maxBound :: Word8) + 1)
$expect (valid (show (maxBound :: Word8)) maxBound)
$expect (invalid pastMax "256 is not within bounds [0:255] of type uint8.")
test_Word16 :: Test
test_Word16 = assertions "word16" $ do
let valid = parseValid optionType_word16
let invalid = parseInvalid optionType_word16
$expect (invalid "-1" "-1 is not within bounds [0:65535] of type uint16.")
$expect (valid "0" (0 :: Word16))
$expect (valid "1" (1 :: Word16))
$expect (invalid "a" "\"a\" is not an integer.")
let pastMax = show (toInteger (maxBound :: Word16) + 1)
$expect (valid (show (maxBound :: Word16)) maxBound)
$expect (invalid pastMax "65536 is not within bounds [0:65535] of type uint16.")
test_Word32 :: Test
test_Word32 = assertions "word32" $ do
let valid = parseValid optionType_word32
let invalid = parseInvalid optionType_word32
$expect (invalid "-1" "-1 is not within bounds [0:4294967295] of type uint32.")
$expect (valid "0" (0 :: Word32))
$expect (valid "1" (1 :: Word32))
$expect (invalid "a" "\"a\" is not an integer.")
let pastMax = show (toInteger (maxBound :: Word32) + 1)
$expect (valid (show (maxBound :: Word32)) maxBound)
$expect (invalid pastMax "4294967296 is not within bounds [0:4294967295] of type uint32.")
test_Word64 :: Test
test_Word64 = assertions "word64" $ do
let valid = parseValid optionType_word64
let invalid = parseInvalid optionType_word64
$expect (invalid "-1" "-1 is not within bounds [0:18446744073709551615] of type uint64.")
$expect (valid "0" (0 :: Word64))
$expect (valid "1" (1 :: Word64))
$expect (invalid "a" "\"a\" is not an integer.")
let pastMax = show (toInteger (maxBound :: Word64) + 1)
$expect (valid (show (maxBound :: Word64)) maxBound)
$expect (invalid pastMax "18446744073709551616 is not within bounds [0:18446744073709551615] of type uint64.")
test_Integer :: Test
test_Integer = assertions "integer" $ do
let valid = parseValid optionType_integer
let invalid = parseInvalid optionType_integer
$expect (invalid "" "\"\" is not an integer.")
$expect (valid "-1" (-1 :: Integer))
$expect (valid "0" (0 :: Integer))
$expect (valid "1" (1 :: Integer))
$expect (invalid "a" "\"a\" is not an integer.")
test_Float :: Test
test_Float = assertions "float" $ do
let valid = parseValid optionType_float
let invalid = parseInvalid optionType_float
$expect (valid "-1" (-1 :: Float))
$expect (valid "0" (0 :: Float))
$expect (valid "1" (1 :: Float))
$expect (valid "1.5" (1.5 :: Float))
$expect (valid "3e5" (3e5 :: Float))
$expect (invalid "a" "\"a\" is not a number.")
test_Double :: Test
test_Double = assertions "double" $ do
let valid = parseValid optionType_double
let invalid = parseInvalid optionType_double
$expect (valid "-1" (-1 :: Double))
$expect (valid "0" (0 :: Double))
$expect (valid "1" (1 :: Double))
$expect (valid "1.5" (1.5 :: Double))
$expect (valid "3e5" (3e5 :: Double))
$expect (invalid "a" "\"a\" is not a number.")
test_Maybe :: Test
test_Maybe = assertions "maybe" $ do
let t = optionType_maybe optionType_int
let valid = parseValid t
let invalid = parseInvalid t
$expect (valid "" Nothing)
$expect (valid "1" (Just 1))
$expect (invalid "a" "\"a\" is not an integer.")
test_List :: Test
test_List = assertions "list" $ do
let t = optionType_list ',' optionType_int
let valid = parseValid t
let invalid = parseInvalid t
$expect (valid "" [])
$expect (valid "1" [1])
$expect (valid "1,2,3" [1, 2, 3])
$expect (valid "1,1,2,3" [1, 1, 2, 3])
$expect (invalid "1,a,3" "\"a\" is not an integer.")
test_Set :: Test
test_Set = assertions "set" $ do
let t = optionType_set ',' optionType_int
let valid = parseValid t
let invalid = parseInvalid t
$expect (valid "" Set.empty)
$expect (valid "1" (Set.fromList [1]))
$expect (valid "1,2,3" (Set.fromList [1, 2, 3]))
$expect (valid "1,1,2,3" (Set.fromList [1, 2, 3]))
$expect (invalid "1,a,3" "\"a\" is not an integer.")
test_Map :: Test
test_Map = assertions "map" $ do
let t = optionType_map ',' '=' optionType_int optionType_int
let valid = parseValid t
let invalid = parseInvalid t
$expect (valid "" Map.empty)
$expect (valid "1=100" (Map.fromList [(1, 100)]))
$expect (valid "1=100,2=200,3=300" (Map.fromList [(1, 100), (2, 200), (3, 300)]))
$expect (valid "1=100,2=200,1=300" (Map.fromList [(1, 300), (2, 200)]))
$expect (invalid "a=1" "\"a\" is not an integer.")
$expect (invalid "1=a" "\"a\" is not an integer.")
$expect (invalid "1=" "\"\" is not an integer.")
$expect (invalid "1" "Map item \"1\" has no value.")
data TestEnum = Enum1 | Enum2 | Enum3
deriving (Bounded, Enum, Eq, Show)
test_Enum :: Test
test_Enum = assertions "enum" $ do
let t = optionType_enum "test enum"
let valid = parseValid t
let invalid = parseInvalid t
$expect (valid "Enum1" Enum1)
$expect (valid "Enum2" Enum2)
$expect (invalid "Enum4" "\"Enum4\" is not in {\"Enum1\", \"Enum2\", \"Enum3\"}.")
|
jmillikin/haskell-options
|
tests/OptionsTests/OptionTypes.hs
|
mit
| 10,846 | 103 | 16 | 1,913 | 3,944 | 1,904 | 2,040 | 253 | 1 |
module Helper (
module Test.Hspec.Meta
, module Test.Hspec.Compat
, module Test.QuickCheck
, module System.IO.Silently
, sleep
, timeout
, defaultParams
, noOpProgressCallback
, captureLines
, normalizeSummary
, ignoreExitCode
, ignoreUserInterrupt
, throwException
, shouldUseArgs
, removeLocations
) where
import Prelude ()
import Test.Hspec.Compat
import Data.List
import Data.Char
import Control.Monad
import System.Environment (withArgs)
import System.Exit
import Control.Concurrent
import qualified Control.Exception as E
import qualified System.Timeout as System
import Data.Time.Clock.POSIX
import System.IO.Silently
import Test.Hspec.Meta
import Test.QuickCheck hiding (Result(..))
import qualified Test.Hspec.Core.Spec as H
import qualified Test.Hspec.Core.Runner as H
import Test.Hspec.Core.QuickCheckUtil (mkGen)
throwException :: IO ()
throwException = E.throwIO (E.ErrorCall "foobar")
ignoreExitCode :: IO () -> IO ()
ignoreExitCode action = action `E.catch` \e -> let _ = e :: ExitCode in return ()
ignoreUserInterrupt :: IO () -> IO ()
ignoreUserInterrupt action = E.catchJust (guard . (== E.UserInterrupt)) action return
captureLines :: IO a -> IO [String]
captureLines = fmap lines . capture_
-- replace times in summary with zeroes
normalizeSummary :: [String] -> [String]
normalizeSummary = map f
where
f x | "Finished in " `isPrefixOf` x = map g x
| otherwise = x
g x | isNumber x = '0'
| otherwise = x
defaultParams :: H.Params
defaultParams = H.defaultParams {H.paramsQuickCheckArgs = stdArgs {replay = Just (mkGen 23, 0), maxSuccess = 1000}}
noOpProgressCallback :: H.ProgressCallback
noOpProgressCallback _ = return ()
sleep :: POSIXTime -> IO ()
sleep = threadDelay . floor . (* 1000000)
timeout :: POSIXTime -> IO a -> IO (Maybe a)
timeout = System.timeout . floor . (* 1000000)
shouldUseArgs :: [String] -> (Args -> Bool) -> Expectation
shouldUseArgs args p = do
spy <- newIORef (H.paramsQuickCheckArgs defaultParams)
let interceptArgs item = item {H.itemExample = \params action progressCallback -> writeIORef spy (H.paramsQuickCheckArgs params) >> H.itemExample item params action progressCallback}
spec = H.mapSpecItem_ interceptArgs $
H.it "foo" False
(silence . ignoreExitCode . withArgs args . H.hspec) spec
readIORef spy >>= (`shouldSatisfy` p)
removeLocations :: H.SpecWith a -> H.SpecWith a
removeLocations = H.mapSpecItem_ (\item -> item{H.itemLocation = Nothing})
|
beni55/hspec
|
hspec-core/test/Helper.hs
|
mit
| 2,602 | 0 | 17 | 529 | 813 | 449 | 364 | 65 | 1 |
module Options (
Options (..)
, Port
, withOptions
, parseOptions
, defaultOptions
-- exported to silence warnings
, Arg (..)
) where
import Data.Maybe
import Data.List
import Text.Read
import System.Console.GetOpt
import System.Environment
import System.IO
import System.Exit
withOptions :: (Options -> IO ()) -> IO ()
withOptions action = do
args <- getArgs
case parseOptions args of
Left err -> uncurry exitWithMessage err
Right opts -> action opts
exitWithMessage :: ExitCode -> String -> IO ()
exitWithMessage err msg = case err of
ExitSuccess -> hPutStr stdout msg
_ -> hPutStr stderr msg >> exitWith err
type Port = Int
data Options = Options {
optionsPort :: Port
, optionsReservePort :: Port
, optionsMainIs :: FilePath
, optionsAppArgs :: [String]
} deriving (Eq, Show)
setPort :: Integer -> Options -> Options
setPort p c = c {optionsPort = fromInteger p}
setReservePort :: Integer -> Options -> Options
setReservePort p c = c {optionsReservePort = fromInteger p}
defaultOptions :: Options
defaultOptions = Options 3000 12000 "src/Main.hs" []
type Result = Either NoOptions Options
data NoOptions = Help | InvalidArgument String String
data Arg a = Arg {
argumentName :: String
, argumentParser :: String -> Maybe a
, argumentSetter :: a -> Options -> Options
}
mkOption :: [Char] -> String -> Arg a -> String -> OptDescr (Result -> Result)
mkOption shortcut name (Arg argName parser setter) help = Option shortcut [name] (ReqArg arg argName) help
where
arg :: String -> Result -> Result
arg input x = x >>= \c -> case parser input of
Just n -> Right (setter n c)
Nothing -> Left (InvalidArgument name input)
options :: [OptDescr (Result -> Result)]
options = [
Option [] ["help"] (NoArg (const $ Left Help)) "display this help and exit"
, mkOption "p" "port" (Arg "PORT" readMaybe setPort) ("port of the web application (default: " ++ show (optionsPort defaultOptions) ++ ")")
, mkOption "" "reserve-port" (Arg "PORT" readMaybe setReservePort) ("port reserve listens on (default: " ++ show (optionsReservePort defaultOptions) ++ ")")
]
parseOptions :: [String] -> Either (ExitCode, String) Options
parseOptions allArgs = case getOpt Permute options args of
(_, _, err:_) -> tryHelp err
(_, _:arg:_, _) -> tryHelp ("unexpected argument `" ++ arg ++ "'\n")
(opts, mainIs, []) -> case foldl' (flip id) (Right defaultOptions) opts of
Left Help -> Left (ExitSuccess, usage)
Left (InvalidArgument flag value) -> tryHelp ("invalid argument `" ++ value ++ "' for `--" ++ flag ++ "'\n")
Right x -> Right x {optionsMainIs = fromMaybe (optionsMainIs defaultOptions) $ listToMaybe mainIs, optionsAppArgs = appArgs}
where
tryHelp msg = Left (ExitFailure 1, "reserve: " ++ msg ++ "Try `reserve --help' for more information.\n")
usage = usageInfo ("Usage: reserve [OPTION]... [MAIN] [-- ARG...]\n\nOPTIONS") options ++ helpForMain ++ helpForAppArgs
helpForMain = "\nThe optional MAIN argument is a path to a module that exports a `main' function. (default: " ++ optionsMainIs defaultOptions ++ ")\n"
helpForAppArgs = "\nAll arguments following the optional `--' are passed to the web application.\n"
(args, appArgs) = drop 1 <$> span (/= "--") allArgs
|
sol/reserve
|
src/Options.hs
|
mit
| 3,440 | 0 | 17 | 792 | 1,056 | 555 | 501 | 67 | 5 |
-- | Example for test purposes - copied from http://www.haskell.org/haskellwiki/OpenGLTutorial1.
-- For all rights see there - I just do some tests.
module Cube
where
import qualified Graphics.Rendering.OpenGL.GL.VertexSpec as VSpec
import qualified Graphics.Rendering.OpenGL.GL.BeginEnd as GLBegEnd
cube :: (VSpec.VertexComponent a, Num a) => a
-> IO ()
cube w = do
GLBegEnd.renderPrimitive GLBegEnd.Quads $ do
VSpec.vertex $ VSpec.Vertex3 w w w
VSpec.vertex $ VSpec.Vertex3 w w (-w)
VSpec.vertex $ VSpec.Vertex3 w (-w) (-w)
VSpec.vertex $ VSpec.Vertex3 w (-w) w
VSpec.vertex $ VSpec.Vertex3 w w w
VSpec.vertex $ VSpec.Vertex3 w w (-w)
VSpec.vertex $ VSpec.Vertex3 (-w) w (-w)
VSpec.vertex $ VSpec.Vertex3 (-w) w w
VSpec.vertex $ VSpec.Vertex3 w w w
VSpec.vertex $ VSpec.Vertex3 w (-w) w
VSpec.vertex $ VSpec.Vertex3 (-w) (-w) w
VSpec.vertex $ VSpec.Vertex3 (-w) w w
VSpec.vertex $ VSpec.Vertex3 (-w) w w
VSpec.vertex $ VSpec.Vertex3 (-w) w (-w)
VSpec.vertex $ VSpec.Vertex3 (-w) (-w) (-w)
VSpec.vertex $ VSpec.Vertex3 (-w) (-w) w
VSpec.vertex $ VSpec.Vertex3 w (-w) w
VSpec.vertex $ VSpec.Vertex3 w (-w) (-w)
VSpec.vertex $ VSpec.Vertex3 (-w) (-w) (-w)
VSpec.vertex $ VSpec.Vertex3 (-w) (-w) w
VSpec.vertex $ VSpec.Vertex3 w w (-w)
VSpec.vertex $ VSpec.Vertex3 w w (-w)
VSpec.vertex $ VSpec.Vertex3 w (-w) (-w)
VSpec.vertex $ VSpec.Vertex3 (-w) (-w) (-w)
VSpec.vertex $ VSpec.Vertex3 (-w) w (-w)
|
tnrangwi/grill
|
test/experimental/opengl/Cube.hs
|
mit
| 1,685 | 0 | 13 | 491 | 716 | 360 | 356 | 32 | 1 |
module CFDI.Types.PaymentsVersion where
import CFDI.Chainable
import CFDI.Types.Type
newtype PaymentsVersion = PaymentsVersion Float deriving (Eq, Show)
instance Chainable PaymentsVersion where
chain (PaymentsVersion v) = chain v
instance Type PaymentsVersion where
parseExpr "1.0" = Right $ PaymentsVersion 1.0
parseExpr e = Left $ InvalidValue e
render _ = "1.0"
|
yusent/cfdis
|
src/CFDI/Types/PaymentsVersion.hs
|
mit
| 383 | 0 | 8 | 65 | 108 | 57 | 51 | 10 | 0 |
module Napero where
import System.Process
import Control.Concurrent
import System.Random
say x = do
readProcessWithExitCode "say" [x] []
return ()
p = putStrLn
i = getLine
w delay = threadDelay $ delay * 1000000
s xs = sequence xs
m a = mapM_ a
rnd n = randomIO >>= return . (+1) . (`mod` n)
int :: String -> Int
int str = read str
|
raimohanska/Napero
|
Napero.hs
|
mit
| 340 | 0 | 8 | 70 | 148 | 78 | 70 | 15 | 1 |
module Tests.Old (tests) where
import Test.Framework (testGroup, Test )
import Test.Framework.Providers.HUnit
import Test.HUnit ( assertBool )
import System.Environment.Executable (getExecutablePath)
import System.IO ( openTempFile, stderr )
import System.Process ( runProcess, waitForProcess )
import System.FilePath ( (</>), (<.>), takeDirectory, splitDirectories, joinPath )
import System.Directory
import System.Exit
import Data.Algorithm.Diff
import Text.Pandoc.Shared ( normalize )
import Text.Pandoc.Options
import Text.Pandoc.Writers.Native ( writeNative )
import Text.Pandoc.Readers.Native ( readNative )
import Prelude hiding ( readFile )
import qualified Data.ByteString.Lazy as B
import Text.Pandoc.UTF8 (toStringLazy)
import Text.Printf
import Text.Pandoc.Error
readFileUTF8 :: FilePath -> IO String
readFileUTF8 f = B.readFile f >>= return . toStringLazy
data TestResult = TestPassed
| TestError ExitCode
| TestFailed String FilePath [Diff String]
deriving (Eq)
instance Show TestResult where
show TestPassed = "PASSED"
show (TestError ec) = "ERROR " ++ show ec
show (TestFailed cmd file d) = '\n' : dash ++
"\n--- " ++ file ++
"\n+++ " ++ cmd ++ "\n" ++ showDiff (1,1) d ++
dash
where dash = replicate 72 '-'
showDiff :: (Int,Int) -> [Diff String] -> String
showDiff _ [] = ""
showDiff (l,r) (First ln : ds) =
printf "+%4d " l ++ ln ++ "\n" ++ showDiff (l+1,r) ds
showDiff (l,r) (Second ln : ds) =
printf "-%4d " r ++ ln ++ "\n" ++ showDiff (l,r+1) ds
showDiff (l,r) (Both _ _ : ds) =
showDiff (l+1,r+1) ds
tests :: [Test]
tests = [ testGroup "markdown"
[ testGroup "writer"
$ writerTests "markdown" ++ lhsWriterTests "markdown"
, testGroup "reader"
[ test "basic" ["-r", "markdown", "-w", "native", "-s", "-S"]
"testsuite.txt" "testsuite.native"
, test "tables" ["-r", "markdown", "-w", "native", "--columns=80"]
"tables.txt" "tables.native"
, test "pipe tables" ["-r", "markdown", "-w", "native", "--columns=80"]
"pipe-tables.txt" "pipe-tables.native"
, test "more" ["-r", "markdown", "-w", "native", "-S"]
"markdown-reader-more.txt" "markdown-reader-more.native"
, lhsReaderTest "markdown+lhs"
]
, testGroup "citations"
[ test "citations" ["-r", "markdown", "-w", "native"]
"markdown-citations.txt" "markdown-citations.native"
]
]
, testGroup "rst"
[ testGroup "writer" (writerTests "rst" ++ lhsWriterTests "rst")
, testGroup "reader"
[ test "basic" ["-r", "rst", "-w", "native",
"-s", "-S", "--columns=80"] "rst-reader.rst" "rst-reader.native"
, test "tables" ["-r", "rst", "-w", "native", "--columns=80"]
"tables.rst" "tables-rstsubset.native"
, lhsReaderTest "rst+lhs"
]
]
, testGroup "latex"
[ testGroup "writer" (writerTests "latex" ++ lhsWriterTests "latex")
, testGroup "reader"
[ test "basic" ["-r", "latex", "-w", "native", "-s", "-R"]
"latex-reader.latex" "latex-reader.native"
, lhsReaderTest "latex+lhs"
]
]
, testGroup "html"
[ testGroup "writer" (writerTests "html" ++ lhsWriterTests "html")
, test "reader" ["-r", "html", "-w", "native", "-s"]
"html-reader.html" "html-reader.native"
]
, testGroup "s5"
[ s5WriterTest "basic" ["-s"] "s5"
, s5WriterTest "fancy" ["-s","-m","-i"] "s5"
, s5WriterTest "fragment" [] "html"
, s5WriterTest "inserts" ["-s", "-H", "insert",
"-B", "insert", "-A", "insert", "-c", "main.css"] "html"
]
, testGroup "textile"
[ testGroup "writer" $ writerTests "textile"
, test "reader" ["-r", "textile", "-w", "native", "-s"]
"textile-reader.textile" "textile-reader.native"
]
, testGroup "docbook"
[ testGroup "writer" $ writerTests "docbook"
, test "reader" ["-r", "docbook", "-w", "native", "-s"]
"docbook-reader.docbook" "docbook-reader.native"
, test "reader" ["-r", "docbook", "-w", "native", "-s"]
"docbook-xref.docbook" "docbook-xref.native"
]
, testGroup "native"
[ testGroup "writer" $ writerTests "native"
, test "reader" ["-r", "native", "-w", "native", "-s"]
"testsuite.native" "testsuite.native"
]
, testGroup "fb2"
[ fb2WriterTest "basic" [] "fb2/basic.markdown" "fb2/basic.fb2"
, fb2WriterTest "titles" [] "fb2/titles.markdown" "fb2/titles.fb2"
, fb2WriterTest "images" [] "fb2/images.markdown" "fb2/images.fb2"
, fb2WriterTest "images-embedded" [] "fb2/images-embedded.html" "fb2/images-embedded.fb2"
, fb2WriterTest "math" [] "fb2/math.markdown" "fb2/math.fb2"
, fb2WriterTest "tables" [] "tables.native" "tables.fb2"
, fb2WriterTest "testsuite" [] "testsuite.native" "writer.fb2"
]
, testGroup "mediawiki"
[ testGroup "writer" $ writerTests "mediawiki"
, test "reader" ["-r", "mediawiki", "-w", "native", "-s"]
"mediawiki-reader.wiki" "mediawiki-reader.native"
]
, testGroup "dokuwiki"
[ testGroup "writer" $ writerTests "dokuwiki"
, test "inline_formatting" ["-r", "native", "-w", "dokuwiki", "-s"]
"dokuwiki_inline_formatting.native" "dokuwiki_inline_formatting.dokuwiki"
, test "multiblock table" ["-r", "native", "-w", "dokuwiki", "-s"]
"dokuwiki_multiblock_table.native" "dokuwiki_multiblock_table.dokuwiki"
, test "external images" ["-r", "native", "-w", "dokuwiki", "-s"]
"dokuwiki_external_images.native" "dokuwiki_external_images.dokuwiki"
]
, testGroup "opml"
[ test "basic" ["-r", "native", "-w", "opml", "--columns=78", "-s"]
"testsuite.native" "writer.opml"
, test "reader" ["-r", "opml", "-w", "native", "-s"]
"opml-reader.opml" "opml-reader.native"
]
, testGroup "haddock"
[ testGroup "writer" $ writerTests "haddock"
, test "reader" ["-r", "haddock", "-w", "native", "-s"]
"haddock-reader.haddock" "haddock-reader.native"
]
, testGroup "txt2tags"
[ test "reader" ["-r", "t2t", "-w", "native", "-s"]
"txt2tags.t2t" "txt2tags.native" ]
, testGroup "epub" [
test "features" ["-r", "epub", "-w", "native"]
"epub/features.epub" "epub/features.native"
, test "wasteland" ["-r", "epub", "-w", "native"]
"epub/wasteland.epub" "epub/wasteland.native"
, test "formatting" ["-r", "epub", "-w", "native"]
"epub/formatting.epub" "epub/formatting.native"
]
, testGroup "twiki"
[ test "reader" ["-r", "twiki", "-w", "native", "-s"]
"twiki-reader.twiki" "twiki-reader.native" ]
, testGroup "other writers" $ map (\f -> testGroup f $ writerTests f)
[ "opendocument" , "context" , "texinfo", "icml", "tei"
, "man" , "plain" , "rtf", "org", "asciidoc"
]
, testGroup "writers-lang-and-dir"
[ test "latex" ["-f", "native", "-t", "latex", "-s"]
"writers-lang-and-dir.native" "writers-lang-and-dir.latex"
, test "context" ["-f", "native", "-t", "context", "-s"]
"writers-lang-and-dir.native" "writers-lang-and-dir.context"
]
]
-- makes sure file is fully closed after reading
readFile' :: FilePath -> IO String
readFile' f = do s <- readFileUTF8 f
return $! (length s `seq` s)
lhsWriterTests :: String -> [Test]
lhsWriterTests format
= [ t "lhs to normal" format
, t "lhs to lhs" (format ++ "+lhs")
]
where
t n f = test n ["--wrap=preserve", "-r", "native", "-s", "-w", f]
"lhs-test.native" ("lhs-test" <.> f)
lhsReaderTest :: String -> Test
lhsReaderTest format =
testWithNormalize normalizer "lhs" ["-r", format, "-w", "native"]
("lhs-test" <.> format) norm
where normalizer = writeNative def . normalize . handleError . readNative
norm = if format == "markdown+lhs"
then "lhs-test-markdown.native"
else "lhs-test.native"
writerTests :: String -> [Test]
writerTests format
= [ test "basic" (opts ++ ["-s"]) "testsuite.native" ("writer" <.> format)
, test "tables" opts "tables.native" ("tables" <.> format)
]
where
opts = ["-r", "native", "-w", format, "--columns=78",
"--variable", "pandoc-version="]
s5WriterTest :: String -> [String] -> String -> Test
s5WriterTest modifier opts format
= test (format ++ " writer (" ++ modifier ++ ")")
(["-r", "native", "-w", format] ++ opts)
"s5.native" ("s5-" ++ modifier <.> "html")
fb2WriterTest :: String -> [String] -> String -> String -> Test
fb2WriterTest title opts inputfile normfile =
testWithNormalize (ignoreBinary . formatXML)
title (["-t", "fb2"]++opts) inputfile normfile
where
formatXML xml = splitTags $ zip xml (drop 1 xml)
splitTags [] = []
splitTags [end] = fst end : snd end : []
splitTags (('>','<'):rest) = ">\n" ++ splitTags rest
splitTags ((c,_):rest) = c : splitTags rest
ignoreBinary = unlines . filter (not . startsWith "<binary ") . lines
startsWith tag str = all (uncurry (==)) $ zip tag str
-- | Run a test without normalize function, return True if test passed.
test :: String -- ^ Title of test
-> [String] -- ^ Options to pass to pandoc
-> String -- ^ Input filepath
-> FilePath -- ^ Norm (for test results) filepath
-> Test
test = testWithNormalize id
-- | Run a test with normalize function, return True if test passed.
testWithNormalize :: (String -> String) -- ^ Normalize function for output
-> String -- ^ Title of test
-> [String] -- ^ Options to pass to pandoc
-> String -- ^ Input filepath
-> FilePath -- ^ Norm (for test results) filepath
-> Test
testWithNormalize normalizer testname opts inp norm = testCase testname $ do
-- find pandoc executable relative to test-pandoc
-- First, try in same directory (e.g. if both in ~/.cabal/bin)
-- Second, try ../pandoc (e.g. if in dist/XXX/build/test-pandoc)
pandocPath <- do
testExePath <- getExecutablePath
let testExeDir = takeDirectory testExePath
found <- doesFileExist (testExeDir </> "pandoc")
return $ if found
then testExeDir </> "pandoc"
else case splitDirectories testExeDir of
[] -> error "test-pandoc: empty testExeDir"
xs -> joinPath (init xs) </> "pandoc" </> "pandoc"
(outputPath, hOut) <- openTempFile "" "pandoc-test"
let inpPath = inp
let normPath = norm
let options = ["--data-dir", ".." </> "data"] ++ [inpPath] ++ opts
let cmd = pandocPath ++ " " ++ unwords options
ph <- runProcess pandocPath options Nothing
(Just [("TMP","."),("LANG","en_US.UTF-8"),("HOME", "./")]) Nothing (Just hOut)
(Just stderr)
ec <- waitForProcess ph
result <- if ec == ExitSuccess
then do
-- filter \r so the tests will work on Windows machines
outputContents <- readFile' outputPath >>=
return . filter (/='\r') . normalizer
normContents <- readFile' normPath >>=
return . filter (/='\r') . normalizer
if outputContents == normContents
then return TestPassed
else return
$ TestFailed cmd normPath
$ getDiff (lines outputContents) (lines normContents)
else return $ TestError ec
removeFile outputPath
assertBool (show result) (result == TestPassed)
|
fibsifan/pandoc
|
tests/Tests/Old.hs
|
gpl-2.0
| 12,376 | 0 | 20 | 3,584 | 3,096 | 1,687 | 1,409 | 231 | 5 |
{--
-- Natume -- an implementation of Kana-Kanji conversion in Haskell
-- Copyright (C) 2006-2012 Takayuki Usui
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
--}
module ScmToken (
Token(TokenLeftParen,
TokenRightParen,
TokenLeftBrace,
TokenRightBrace,
TokenLeftBracket,
TokenRightBracket,
TokenInt,
TokenName,
TokenString),
tokenize
) where
import Data.Char
import Parser
data Token = TokenLeftParen
| TokenRightParen
| TokenLeftBrace
| TokenRightBrace
| TokenLeftBracket
| TokenRightBracket
| TokenInt Int
| TokenName String
| TokenString String
| TokenIgnore
instance Show Token where
show (TokenLeftParen) = "("
show (TokenRightParen) = ")"
show (TokenLeftBrace) = "{"
show (TokenRightBrace) = "}"
show (TokenLeftBracket) = "["
show (TokenRightBracket) = "]"
show (TokenInt i) = show i
show (TokenName s) = s
show (TokenString s) = show s
show (TokenIgnore) = ""
item :: Parser Char Char
item = MkParser f
where f [] = []
f (x:xs) = [(x,xs)]
sat :: (Char -> Bool) -> Parser Char Char
sat test = MkParser f
where f [] = []
f (x:xs) | test x = [(x,xs)]
| otherwise = []
esat :: (Char -> Bool) -> Parser Char Char
esat test = MkParser f
where f [] = []
f (x:xs) | x == '\\' = applyParser (sat test) xs
| test x = [(x,xs)]
| otherwise = []
leftparen :: Parser Char Token
leftparen = do sat (=='('); return TokenLeftParen
rightparen :: Parser Char Token
rightparen = do sat (==')'); return TokenRightParen
leftbrace :: Parser Char Token
leftbrace = do sat (=='{'); return TokenLeftBrace
rightbrace :: Parser Char Token
rightbrace = do sat (=='}'); return TokenRightBrace
leftbracket :: Parser Char Token
leftbracket = do sat (=='['); return TokenLeftBracket
rightbracket :: Parser Char Token
rightbracket = do sat (==']'); return TokenRightBracket
digit :: Parser Char Int
digit = do x <- sat isDigit; return (ord x - ord '0')
nat :: Parser Char Int
nat = do xs <- some digit
let n = foldl1 (\x y -> x * 10 + y) xs
return n
sign :: Parser Char Char
sign = sat (\x -> x == '+' || x == '-')
signed :: Parser Char Int
signed = do x <- sign; n <- nat
case x of
'-' -> return (negate n)
_ -> return n
unsigned :: Parser Char Int
unsigned = do n <- nat; return n
integer :: Parser Char Token
integer = do n <- signed `orelse` unsigned; return (TokenInt n)
backslash :: Parser Char Char
backslash = sat (=='\\')
escape :: Parser Char Char
escape = do backslash; c <- item; return c
char :: Parser Char Char
char = sat (\x -> x /= '"' && x /= '\\')
nonescape :: Parser Char Char
nonescape = do c <- char; return c
doublequote :: Parser Char Char
doublequote = sat (=='"')
string :: Parser Char Token
string = do doublequote
s <- many (escape `orelse` nonescape)
doublequote
return (TokenString s)
alpha1 :: Parser Char String
alpha1 = do b0 <- sat (\x -> isalpha x)
return [b0]
alphanum1 :: Parser Char String
alphanum1 = do b0 <- sat (\x -> isalpha x || isDigit x || any (x==) "-")
return [b0]
multibyte2 :: Parser Char String
multibyte2 = do b0 <- sat (\x -> ('\xA1' <= x && x <= '\xFE') || x == '\x8E')
b1 <- item
return [b0,b1]
multibyte3 :: Parser Char String
multibyte3 = do b0 <- sat (\x -> x == '\x8F')
b1 <- item
b2 <- item
return [b0,b1,b2]
isalpha :: Char -> Bool
isalpha x = ('A' <= x && x <= 'Z') || ('a' <= x && x <= 'z')
alpha :: Parser Char Char
alpha = esat (\x -> isalpha x || any (x==) "!\"#$%&'*+,-./:;<>=?@\\^_`|~")
alphanum :: Parser Char Char
alphanum = alpha `orelse` esat isDigit
identifier :: Parser Char Token
identifier = do x <- alpha1 `orelse` multibyte2 `orelse` multibyte3
xs <- many (alphanum1 `orelse` multibyte2 `orelse` multibyte3)
return (TokenName (x ++ concat xs))
`orelse`
do x <- alpha
xs <- many alphanum
return (TokenName (x:xs))
semicolon :: Parser Char Char
semicolon = sat (==';')
nonnewline :: Parser Char Char
nonnewline = sat (/='\n')
comment :: Parser Char Token
comment = do semicolon
many nonnewline
return (TokenIgnore)
token1 :: Parser Char Token
token1 = comment `orelse`
leftparen `orelse`
rightparen `orelse`
leftbrace `orelse`
rightbrace `orelse`
leftbracket `orelse`
rightbracket `orelse`
integer `orelse`
string `orelse`
identifier
space :: Parser Char Char
space = sat isSpace
token :: Parser Char Token
token = do many space
t <- token1
many space
return t
tokenize1 :: String -> ([Token],String)
tokenize1 s = case (applyParser token s) of
[] -> ([],s)
((t,r):_) -> case t of
(TokenIgnore) -> tokenize1 r
_ -> ([t],r)
tokenize :: String -> [Token]
tokenize s = let (ts,r) = (tokenize1 s) in
if null ts
then if not (null r)
then error ("lexcal error -- " ++ take 8 r ++ "...")
else []
else (head ts) : (tokenize r)
|
takayuki/natume
|
ScmToken.hs
|
gpl-2.0
| 6,402 | 0 | 16 | 2,100 | 2,047 | 1,061 | 986 | 163 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE DataKinds #-}
{-# OPTIONS_GHC -Wall #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Core.ViewFrame
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
--
-- | Splittable panes containing notebooks with any widgets
--
---------------------------------------------------------------------------------
module Graphics.UI.Frame.ViewFrame (
removePaneAdmin
, addPaneAdmin
, notebookInsertOrdered
, markLabel
-- * Convenience methods for accesing Pane state
, posTypeToPaneDirection
, paneDirectionToPosType
, paneFromName
, mbPaneFromName
, guiPropertiesFromName
-- * View Actions
, viewMove
, viewMoveTo
, viewSplitHorizontal
, viewSplitVertical
--, viewSplit
, viewSplit'
, viewNewGroup
, newGroupOrBringToFront
, bringGroupToFront
, viewNest
, viewNest'
, viewDetach
, viewDetach'
, handleNotebookSwitch
, viewCollapse
, viewCollapse'
, viewTabsPos
, viewSwitchTabs
, closeGroup
, allGroupNames
-- * View Queries
, getBestPanePath
, getBestPathForId
, getActivePanePath
, getActivePanePathOrStandard
, figureOutPaneName
, getNotebook
, getPaned
, getActiveNotebook
, getActivePane
, setActivePane
, getUiManager
, getWindows
, getMainWindow
, getActiveWindow
, getActiveScreen
, getLayout
, getPanesSt
, getPaneMapSt
, getPanePrim
, getPanes
, getMRUPanes
-- * View Actions
, bringPaneToFront
, newNotebook
, newNotebook'
-- * Accessing GUI elements
--, widgetFromPath
, getUIAction
, widgetGet
, initGtkRc
) where
import Prelude ()
import Prelude.Compat
import Control.Applicative (Applicative, (<$>))
import Data.Map (Map)
import qualified Data.Map as Map
import Data.List (findIndex, isPrefixOf, deleteBy, stripPrefix, elemIndex)
import Data.Maybe
import Data.Typeable
import Data.Text (Text)
import Graphics.UI.Frame.Panes
import Graphics.UI.Editor.Parameters
import System.CPUTime (getCPUTime)
import Graphics.UI.Editor.MakeEditor
(mkField, FieldDescription(..), buildEditor)
import Graphics.UI.Editor.Simple (textEditor)
import qualified Data.Set as Set (unions, member)
import Data.Set (Set)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad (unless, when, foldM, void)
import qualified Data.Text as T (pack, stripPrefix, unpack)
import Data.Foldable (forM_)
import Control.Arrow (Arrow(..))
import GI.Gtk
(UIManager, panedNew, gridNew, widgetSetValign, widgetSetHalign,
windowGetScreen, Bin(..), uIManagerGetAction, Action, toWidget,
selectionDataGetText, SelectionData, onWidgetDragDataReceived,
widgetDragDestSetTargetList, widgetDragDestSet,
setNotebookEnablePopup, notebookSetScrollable,
widgetSetSizeRequest, notebookNew, windowPresent, getWidgetVisible,
notebookRemovePage, Window(..), widgetGetToplevel,
onWidgetDeleteEvent, widgetGetAllocation, windowSetDefaultSize,
setWidgetName, windowNew, Widget(..), dialogGetContentArea,
setWindowTitle, setWindowTransientFor, Window,
windowSetTransientFor, setMessageDialogText,
constructDialogUseHeaderBar, MessageDialog(..), widgetDestroy,
dialogRun, afterNotebookSwitchPage, widgetGrabFocus,
boxReorderChild, Box(..), notebookSetMenuLabel,
notebookSetTabLabel, notebookInsertPage, Paned(..), panedPack1,
containerRemove, notebookPageNum, Notebook(..), panedPack2,
Container(..), widgetGetParent, notebookGetCurrentPage,
notebookSetTabPos, PositionType(..), notebookSetShowTabs,
notebookGetShowTabs, Label(..), containerGetChildren, binGetChild,
notebookGetTabLabel, labelSetMarkup, labelSetUseMarkup,
onWidgetButtonReleaseEvent, onButtonClicked, selectionDataSetText,
onWidgetDragDataGet, widgetDragSourceSetTargetList,
targetListAddTextTargets, targetListNew, widgetDragSourceSet,
setWidgetHalign, setWidgetValign, cssProviderLoadFromData,
boxPackStart, containerAdd, containerSetBorderWidth,
styleContextAddProvider, widgetGetStyleContext, cssProviderNew,
imageNewFromPixbuf, iconThemeLoadIcon, iconThemeGetDefault,
buttonSetRelief, buttonNew, eventBoxSetVisibleWindow, eventBoxNew,
EventBox, notebookSetCurrentPage, widgetShowAll,
notebookInsertPageMenu, widgetGetName, notebookGetNthPage,
notebookGetNPages, labelNew, Label, IsWidget, IsNotebook,
widgetSetName, CssProvider(..))
import GI.Gtk.Objects.Widget (widgetSetFocusOnClick)
import GI.Gtk.Enums
(Orientation(..), WindowType(..), ResponseType(..),
ButtonsType(..), MessageType(..), Align(..),
ReliefStyle(..))
import GI.Gtk.Flags (DestDefaults(..), IconLookupFlags(..))
import GI.Gdk.Flags (ModifierType(..), DragAction(..))
import GI.Gdk
(DragContext, Screen, getEventButtonState)
import Graphics.UI.Frame.Rectangle (getRectangleWidth, getRectangleHeight)
import Data.GI.Base
(unsafeManagedPtrCastPtr, castTo, unsafeCastTo)
import Data.GI.Base.GObject (new')
import Data.Int (Int32)
import Data.Word (Word32)
import Data.GI.Gtk.ModelView.Types (equalManagedPtr)
import GI.Gtk.Objects.Dialog (Dialog(..))
import GI.Gtk.Objects.MessageDialog
(constructMessageDialogButtons, setMessageDialogMessageType)
import GI.Gtk.Objects.Label (Label)
import GI.Gtk.Objects.Widget (widgetSetTooltipText)
import GHC.Stack (HasCallStack)
import Foreign (Ptr)
-- import Debug.Trace (trace)
trace :: String -> a -> a
trace _ a = a
groupPrefix :: Text
groupPrefix = "_group_"
withoutGroupPrefix :: Text -> Text
withoutGroupPrefix s = fromMaybe s (groupPrefix `T.stripPrefix` s)
initGtkRc :: IO ()
initGtkRc = return ()
removePaneAdmin :: RecoverablePane alpha beta delta => alpha -> delta ()
removePaneAdmin pane = do
panes' <- getPanesSt
paneMap' <- getPaneMapSt
setPanesSt (Map.delete (paneName pane) panes')
setPaneMapSt (Map.delete (paneName pane) paneMap')
addPaneAdmin :: RecoverablePane alpha beta delta => alpha -> Connections -> PanePath -> delta Bool
addPaneAdmin pane conn pp = do
panes' <- getPanesSt
paneMap' <- getPaneMapSt
topWidget <- getTopWidget pane
widgetSetName topWidget (paneName pane)
if (paneName pane `Map.notMember` paneMap') &&
(paneName pane `Map.notMember` panes')
then do
setPaneMapSt (Map.insert (paneName pane) (pp, conn) paneMap')
setPanesSt (Map.insert (paneName pane) (PaneC pane) panes')
return True
else trace
("ViewFrame>addPaneAdmin:pane with this name already exist" <>
T.unpack (paneName pane))
$ return False
getPanePrim :: (HasCallStack, RecoverablePane alpha beta delta) => delta (Maybe alpha)
getPanePrim =
getPanes >>= \case
[p] -> return $ Just p
_ -> return Nothing
getPanes :: (HasCallStack, RecoverablePane alpha beta delta) => delta [alpha]
getPanes = mapMaybe (\ (PaneC p) -> cast p) . Map.elems <$> getPanesSt
notebookInsertOrdered :: PaneMonad alpha => (IsNotebook self, IsWidget child)
=> self
-> child -- child - the Widget to use as the contents of the page.
-> Text
-> Maybe Label -- the label for the page as Text or Label
-> Maybe Text -- ^ Text for tooltip when hovering
-> Bool
-> alpha ()
notebookInsertOrdered nb widget labelStr mbLabel mbTooltipText isGroup = do
label <- case mbLabel of
Nothing -> labelNew (Just labelStr)
Just l -> return l
menuLabel <- labelNew (Just labelStr)
numPages <- notebookGetNPages nb
mbWidgets <- mapM (notebookGetNthPage nb) [0 .. (numPages-1)]
let widgets = map (fromMaybe (error "ViewFrame.notebookInsertOrdered: no widget")) mbWidgets
labelStrs <- mapM widgetGetName widgets
let pos = fromMaybe (-1)
(findIndex
(\ s -> withoutGroupPrefix s > withoutGroupPrefix labelStr)
labelStrs)
labelBox <- if isGroup then groupLabel labelStr else mkLabelBox label labelStr
realPos <- notebookInsertPageMenu nb widget (Just labelBox) (Just menuLabel) (fromIntegral pos)
widgetSetTooltipText labelBox mbTooltipText
widgetShowAll labelBox
notebookSetCurrentPage nb realPos
-- | Returns a label box
mkLabelBox :: PaneMonad alpha => Label -> Text -> alpha EventBox
mkLabelBox lbl paneName' = do
widgetSetHalign lbl AlignStart
widgetSetValign lbl AlignStart
labelBox <- eventBoxNew
eventBoxSetVisibleWindow labelBox False
innerBox <- gridNew
tabButton <- buttonNew
widgetSetName tabButton "leksah-close-button"
widgetSetFocusOnClick tabButton False
buttonSetRelief tabButton ReliefStyleNone
widgetSetHalign tabButton AlignEnd
widgetSetValign tabButton AlignCenter
iconTheme <- iconThemeGetDefault
image <- iconThemeLoadIcon iconTheme "window-close" 10 [IconLookupFlagsUseBuiltin] >>= imageNewFromPixbuf
provider <- cssProviderNew
cssProviderLoadFromData provider (
".button {\n" <>
"padding: 0px;\n" <>
"border-width: 0px;\n" <>
"}\n" <>
"GtkImage {\n" <>
"padding: 0px;\n" <>
"}\n")
context1 <- widgetGetStyleContext tabButton
styleContextAddProvider context1 provider 600
context2 <- widgetGetStyleContext image
styleContextAddProvider context2 provider 600
setWidgetValign tabButton AlignCenter
setWidgetValign lbl AlignCenter
containerSetBorderWidth tabButton 0
containerAdd tabButton image
containerAdd innerBox lbl
containerAdd innerBox tabButton
containerAdd labelBox innerBox
setWidgetHalign innerBox AlignCenter
widgetDragSourceSet labelBox [ModifierTypeButton1Mask] Nothing [DragActionCopy,DragActionMove]
tl <- targetListNew Nothing
targetListAddTextTargets tl 0
widgetDragSourceSetTargetList labelBox $ Just tl
_ <- onWidgetDragDataGet labelBox $ \ _cont sel _id _timeStamp -> do
trace ("drag paneName=" <> T.unpack paneName') $ return ()
void $ selectionDataSetText sel paneName' (-1)
cl <- runInIO closeHandler
_ <- onButtonClicked tabButton (cl ())
_ <- onWidgetButtonReleaseEvent labelBox $ \e -> do
modifiers <- getEventButtonState e
let middleButton = ModifierTypeButton2Mask
when (middleButton `elem` modifiers) (cl ())
return False
return labelBox
where
closeHandler :: PaneMonad alpha => () -> alpha ()
closeHandler _ = case groupPrefix `T.stripPrefix` paneName' of
Just group -> closeGroup group
Nothing -> do
(PaneC pane) <- paneFromName paneName'
void $ closePane pane
groupLabel :: PaneMonad beta => Text -> beta EventBox
groupLabel group = do
label <- labelNew Nothing
labelSetUseMarkup label True
labelSetMarkup label ("<b>" <> group <> "</b>")
labelBox <- mkLabelBox label (groupPrefix <> group)
widgetShowAll labelBox
return labelBox
-- | Add the change mark or removes it
markLabel :: (MonadIO m, IsWidget alpha, IsNotebook beta) => beta -> alpha -> Bool -> m ()
markLabel nb topWidget modified =
notebookGetTabLabel nb topWidget >>= \case
Nothing -> return ()
Just box -> liftIO (unsafeCastTo Bin box) >>= binGetChild >>= \case
Nothing -> return ()
Just container -> do
children <- liftIO (unsafeCastTo Container container) >>= containerGetChildren
label <- liftIO . unsafeCastTo Label $ case children of
(_:l:_) -> l
_ -> error "ViewFrame>>markLabel: empty children"
text <- widgetGetName topWidget
labelSetUseMarkup label True
labelSetMarkup label
(if modified
then "<span foreground=\"red\">" <> text <> "</span>"
else text)
-- | Constructs a unique pane name, which is an index and a string
figureOutPaneName :: PaneMonad alpha => Text -> alpha (Int,Text)
figureOutPaneName bn = do
bufs <- getPanesSt
let ind = foldr (\(PaneC buf) ind' ->
if primPaneName buf == bn
then max ind' (getAddedIndex buf + 1)
else ind')
0 (Map.elems bufs)
if ind == 0
then return (0,bn)
else return (ind,bn <> "(" <> T.pack (show ind) <> ")")
paneFromName :: PaneMonad alpha => PaneName -> alpha (IDEPane alpha)
paneFromName pn = do
mbPane <- mbPaneFromName pn
case mbPane of
Just p -> return p
Nothing -> error $ "ViewFrame>>paneFromName:Can't find pane from unique name " ++ T.unpack pn
mbPaneFromName :: PaneMonad alpha => PaneName -> alpha (Maybe (IDEPane alpha))
mbPaneFromName pn = Map.lookup pn <$> getPanesSt
-- |
guiPropertiesFromName :: PaneMonad alpha => PaneName -> alpha (PanePath, Connections)
guiPropertiesFromName pn =
fmap (Map.lookup pn) getPaneMapSt >>= \case
Just it -> return it
_ -> error $"Cant't find guiProperties from unique name " ++ T.unpack pn
posTypeToPaneDirection :: PositionType -> PaneDirection
posTypeToPaneDirection PositionTypeLeft = LeftP
posTypeToPaneDirection PositionTypeRight = RightP
posTypeToPaneDirection PositionTypeTop = TopP
posTypeToPaneDirection PositionTypeBottom = BottomP
posTypeToPaneDirection _ = error "posTypeToPaneDirection"
paneDirectionToPosType :: PaneDirection -> PositionType
paneDirectionToPosType LeftP = PositionTypeLeft
paneDirectionToPosType RightP = PositionTypeRight
paneDirectionToPosType TopP = PositionTypeTop
paneDirectionToPosType BottomP = PositionTypeBottom
--
-- | Toggle the tabs of the current notebook
--
viewSwitchTabs :: PaneMonad alpha => alpha ()
viewSwitchTabs = do
mbNb <- getActiveNotebook
case mbNb of
Nothing -> return ()
Just nb -> do
b <- notebookGetShowTabs nb
notebookSetShowTabs nb (not b)
--
-- | Sets the tab position in the current notebook
--
viewTabsPos :: PaneMonad alpha => PositionType -> alpha ()
viewTabsPos pos = do
mbNb <- getActiveNotebook
case mbNb of
Nothing -> return ()
Just nb -> notebookSetTabPos nb pos
--
-- | Split the currently active pane in horizontal direction
--
viewSplitHorizontal :: PaneMonad alpha => alpha ()
viewSplitHorizontal = viewSplit OrientationHorizontal
--
-- | Split the currently active pane in vertical direction
--
viewSplitVertical :: PaneMonad alpha => alpha ()
viewSplitVertical = viewSplit OrientationVertical
--
-- | The active view can be split in two (horizontal or vertical)
--
viewSplit :: PaneMonad alpha => Orientation -> alpha ()
viewSplit orientation = do
mbPanePath <- getActivePanePath
case mbPanePath of
Nothing -> return ()
Just panePath -> viewSplit' panePath orientation
viewSplit' :: PaneMonad alpha => PanePath -> Orientation -> alpha ()
viewSplit' panePath orientation = do
l <- getLayout
case layoutFromPath panePath l of
(TerminalP _ _ _ (Just _) _) -> trace "ViewFrame>>viewSplit': can't split detached: " return ()
_ -> do
activeNotebook <- getNotebook' "viewSplit" panePath
ind <- notebookGetCurrentPage activeNotebook
parent <- widgetGetParent activeNotebook >>= liftIO . unsafeCastTo Container . fromJust
let (name,altname,paneDir,
oldPath,newPath) = case orientation of
OrientationHorizontal ->
( "top" :: Text
, "bottom" :: Text
, TopP
, panePath ++ [SplitP TopP]
, panePath ++ [SplitP BottomP])
OrientationVertical ->
( "left"
, "right"
, LeftP
, panePath ++ [SplitP LeftP]
, panePath ++ [SplitP RightP])
_ -> error "viewSplit'"
adjustNotebooks panePath oldPath
frameState <- getFrameState
notebookPtr <- liftIO $ unsafeManagedPtrCastPtr activeNotebook
setPanePathFromNB $ Map.insert notebookPtr oldPath (panePathFromNB frameState)
nb <- newNotebook newPath
newpane <- panedNew $ case orientation of
OrientationHorizontal -> OrientationVertical
OrientationVertical -> OrientationHorizontal
_ -> error "viewSplit'"
rName <- widgetGetName activeNotebook
widgetSetName newpane rName
widgetSetName nb altname
panedPack2 newpane nb True False
nbIndex <- liftIO (castTo Notebook parent) >>= \case
Just notebook -> notebookPageNum notebook activeNotebook
Nothing -> trace "ViewFrame>>viewSplit': parent not a notebook: " $ return (-1)
containerRemove parent activeNotebook
widgetSetName activeNotebook name
panedPack1 newpane activeNotebook True False
case (reverse panePath, nbIndex) of
(SplitP dir:_, _) -> do
paned <- liftIO $ unsafeCastTo Paned parent
if dir `elem` [TopP, LeftP]
then panedPack1 paned newpane True False
else panedPack2 paned newpane True False
(GroupP group:_, n) | n >= 0 -> do
parentNotebook <- liftIO $ unsafeCastTo Notebook parent
label <- groupLabel group
_ <- notebookInsertPage parentNotebook newpane (Just label) n
label2 <- groupMenuLabel group
notebookSetMenuLabel parentNotebook newpane label2
return ()
([], _) -> do
box <- liftIO $ unsafeCastTo Box parent
boxPackStart box newpane True True 0
boxReorderChild box newpane 2
_ -> error "No notebook index found in viewSplit"
widgetShowAll newpane
widgetGrabFocus activeNotebook
if nbIndex >= 0
then do
parentNotebook <- liftIO $ unsafeCastTo Notebook parent
notebookSetCurrentPage parentNotebook nbIndex
else trace "ViewFrame>>viewSplit': parent not a notebook2: " $ return ()
handleFunc <- runInIO (handleNotebookSwitch nb)
_ <- afterNotebookSwitchPage nb (\_w i -> handleFunc $ fromIntegral i)
adjustPanes panePath (panePath ++ [SplitP paneDir])
adjustLayoutForSplit paneDir panePath
notebookGetNthPage activeNotebook ind >>= \case
Nothing -> return ()
Just widget ->
widgetGetName widget >>= mbPaneFromName >>= \case
Just (PaneC pane) -> viewMoveTo (panePath ++ [SplitP (otherDirection paneDir)]) pane
Nothing -> return ()
--
-- | Two notebooks can be collapsed to one
--
viewCollapse :: PaneMonad alpha => alpha ()
viewCollapse = do
mbPanePath <- getActivePanePath
forM_ mbPanePath viewCollapse'
viewCollapse' :: (HasCallStack, PaneMonad alpha) => PanePath -> alpha ()
viewCollapse' panePath = trace "viewCollapse' called" $ do
layout1 <- getLayoutSt
case layoutFromPath panePath layout1 of
(TerminalP _ _ _ (Just _) _) -> trace "ViewFrame>>viewCollapse': can't collapse detached: "
return ()
_ -> do
let newPanePath = init panePath
let mbOtherSidePath = otherSide panePath
case mbOtherSidePath of
Nothing -> trace "ViewFrame>>viewCollapse': no other side path found: " return ()
Just otherSidePath ->
getNotebookOrPaned otherSidePath (castTo Notebook) >>= \case
Nothing -> trace "ViewFrame>>viewCollapse': other side path not collapsedXX: " $
case layoutFromPath otherSidePath layout1 of
VerticalP{} -> do
viewCollapse' (otherSidePath ++ [SplitP LeftP])
viewCollapse' panePath
HorizontalP{} -> do
viewCollapse' (otherSidePath ++ [SplitP TopP])
viewCollapse' panePath
_ -> trace "ViewFrame>>viewCollapse': impossible1 " return ()
Just otherSideNotebook ->
getNotebookOrPaned panePath (castTo Notebook) >>= \case
Nothing -> trace "ViewFrame>>viewCollapse': path not collapsedXX: " $
case layoutFromPath panePath layout1 of
VerticalP{} -> do
viewCollapse' (panePath ++ [SplitP LeftP])
viewCollapse' panePath
HorizontalP{} -> do
viewCollapse' (panePath ++ [SplitP TopP])
viewCollapse' panePath
_ -> trace "ViewFrame>>viewCollapse': impossible1 " return ()
Just activeNotebook -> do
paneMap' <- getPaneMapSt
-- 1. Move panes and groups to one side (includes changes to paneMap and layout)
let paneNamesToMove = map fst
$filter (\(_w,(p,_)) -> otherSidePath == p)
$Map.toList paneMap'
panesToMove <- mapM paneFromName paneNamesToMove
mapM_ (\(PaneC p) -> viewMoveTo panePath p) panesToMove
let groupNames = map (\n -> groupPrefix <> n) $
getGroupsFrom otherSidePath layout1
mapM_ (\n -> move' (n,activeNotebook)) groupNames
-- 2. Remove unused notebook from admin
st <- getFrameState
notebookPtr <- liftIO $ unsafeManagedPtrCastPtr otherSideNotebook
let ! newMap = Map.delete notebookPtr (panePathFromNB st)
setPanePathFromNB newMap
-- 3. Remove one level and reparent notebook
parent <- widgetGetParent activeNotebook >>= liftIO . unsafeCastTo Container . fromJust
grandparent <- widgetGetParent parent >>= liftIO . unsafeCastTo Container . fromJust
nbIndex <- liftIO $ castTo Notebook grandparent >>= \case
Just notebook -> notebookPageNum notebook parent
Nothing -> return (-1)
containerRemove grandparent parent
containerRemove parent activeNotebook
if length panePath > 1
then do
let lasPathElem = last newPanePath
case (lasPathElem, nbIndex) of
(SplitP dir, _) | dir == TopP || dir == LeftP -> do
paned <- liftIO $ unsafeCastTo Paned grandparent
panedPack1 paned activeNotebook True False
(SplitP dir, _) | dir == BottomP || dir == RightP -> do
paned <- liftIO $ unsafeCastTo Paned grandparent
panedPack2 paned activeNotebook True False
(GroupP group, n) | n >= 0 -> do
grandParentNotebook <- liftIO $ unsafeCastTo Notebook grandparent
label <- groupLabel group
_ <- notebookInsertPage grandParentNotebook activeNotebook (Just label) n
notebookSetCurrentPage grandParentNotebook n
return ()
_ -> error "collapse: Unable to find page index"
widgetSetName activeNotebook $panePathElementToWidgetName lasPathElem
else do
box <- liftIO $ unsafeCastTo Box grandparent
boxPackStart box activeNotebook True True 0
boxReorderChild box activeNotebook 2
widgetSetName activeNotebook "root"
-- 4. Change panePathFromNotebook
adjustNotebooks panePath newPanePath
-- 5. Change paneMap
adjustPanes panePath newPanePath
-- 6. Change layout
adjustLayoutForCollapse panePath
getGroupsFrom :: PanePath -> PaneLayout -> [Text]
getGroupsFrom path layout' =
case layoutFromPath path layout' of
t@TerminalP{} -> Map.keys (paneGroups t)
HorizontalP{} -> []
VerticalP{} -> []
viewNewGroup :: PaneMonad alpha => alpha ()
viewNewGroup = do
mainWindow <- getMainWindow
groupNameDialog mainWindow >>= \case
Just groupName ->
fmap (Set.member groupName . allGroupNames) getLayoutSt >>= \case
True -> do
md <- new' MessageDialog [
constructDialogUseHeaderBar 0,
constructMessageDialogButtons ButtonsTypeClose]
setMessageDialogMessageType md MessageTypeWarning
setMessageDialogText md $ "Group name not unique " <> groupName
windowSetTransientFor md (Just mainWindow)
_ <- dialogRun md
widgetDestroy md
return ()
False -> viewNest groupName
Nothing -> return ()
newGroupOrBringToFront :: PaneMonad alpha => Text -> PanePath -> alpha (Maybe PanePath,Bool)
newGroupOrBringToFront groupName pp = do
layout' <- getLayoutSt
if groupName `Set.member` allGroupNames layout'
then do
mbPP <- bringGroupToFront groupName
return (mbPP,False)
else do
let realPath = getBestPanePath pp layout'
viewNest' realPath groupName
return (Just (realPath ++ [GroupP groupName]),True)
bringGroupToFront :: PaneMonad alpha => Text -> alpha (Maybe PanePath)
bringGroupToFront groupName =
fmap (findGroupPath groupName) getLayoutSt >>= \case
Just path -> do
widget <- getNotebookOrPaned path return
setCurrentNotebookPages widget
return (Just path)
Nothing -> return Nothing
-- Yet another stupid little dialog
groupNameDialog :: (Applicative m, MonadIO m) => Window -> m (Maybe Text)
groupNameDialog parent = do
dia <- new' Dialog [constructDialogUseHeaderBar 0]
setWindowTransientFor dia parent
setWindowTitle dia "Group"
upper <- dialogGetContentArea dia >>= liftIO . unsafeCastTo Box
(widget,_inj,ext,_) <- buildEditor fields ""
_okButton <- dialogAddButton' dia "New" ResponseTypeOk
boxPackStart upper widget True True 7
widgetShowAll dia
resp <- dialogRun dia
value <- liftIO $ ext ""
widgetDestroy dia
case toEnum $ fromIntegral resp of
ResponseTypeOk | value /= Just "" -> return value
_ -> return Nothing
where
fields :: FieldDescription Text
fields = VFD emptyParams [
mkField
(paraName <<<- ParaName "Group name "
$ emptyParams)
id
const
(textEditor (const True) True)]
viewNest :: PaneMonad alpha => Text -> alpha ()
viewNest group = do
mbPanePath <- getActivePanePath
case mbPanePath of
Nothing -> return ()
Just panePath -> viewNest' panePath group
viewNest' :: PaneMonad alpha => PanePath -> Text -> alpha ()
viewNest' panePath group = do
activeNotebook <- getNotebook' "viewNest' 1" panePath
_parent <- widgetGetParent activeNotebook
layout' <- getLayoutSt
let paneLayout = layoutFromPath panePath layout'
case paneLayout of
TerminalP {} -> do
nb <- newNotebook (panePath ++ [GroupP group])
widgetSetName nb (groupPrefix <> group)
notebookInsertOrdered activeNotebook nb group (Nothing :: Maybe Label) Nothing True
widgetShowAll nb
--widgetGrabFocus activeNotebook
handleFunc <- runInIO (handleNotebookSwitch nb)
_ <- afterNotebookSwitchPage nb (\_w i -> handleFunc $ fromIntegral i)
adjustLayoutForNest group panePath
_ -> return ()
closeGroup :: PaneMonad alpha => Text -> alpha ()
closeGroup groupName = do
layout' <- getLayout
let mbPath = findGroupPath groupName layout'
mainWindow <- getMainWindow
case mbPath of
Nothing -> trace ("ViewFrame>>closeGroup: Group path not found: " <> T.unpack groupName) return ()
Just path -> do
panesMap <- getPaneMapSt
let nameAndpathList = filter (\(_a,pp) -> path `isPrefixOf` pp)
$ map (second fst) (Map.assocs panesMap)
continue <- case nameAndpathList of
(_:_) -> do
md <- new' MessageDialog [
constructDialogUseHeaderBar 0,
constructMessageDialogButtons ButtonsTypeYesNo]
setMessageDialogMessageType md MessageTypeQuestion
setMessageDialogText md $ "Group " <> groupName <> " not empty. Close with all contents?"
windowSetTransientFor md (Just mainWindow)
rid <- dialogRun md
widgetDestroy md
case toEnum $ fromIntegral rid of
ResponseTypeYes -> return True
_ -> return False
[] -> return True
when continue $ do
panes' <- mapM (paneFromName . fst) nameAndpathList
results <- mapM (\ (PaneC p) -> closePane p) panes'
when (and results) $ do
nbOrPaned <- getNotebookOrPaned path return
parent <- widgetGetParent nbOrPaned >>= liftIO. unsafeCastTo Container . fromJust
containerRemove parent nbOrPaned
setLayoutSt (removeGL path layout')
ppMap <- getPanePathFromNB
setPanePathFromNB (Map.filter (\pa -> not (path `isPrefixOf` pa)) ppMap)
viewDetach :: PaneMonad alpha => alpha (Maybe (Window, Notebook))
viewDetach = do
id' <- liftIO $ show <$> getCPUTime
mbPanePath <- getActivePanePath
case mbPanePath of
Nothing -> return Nothing
Just panePath -> viewDetach' panePath (T.pack id')
viewDetach' :: PaneMonad alpha => PanePath -> Text -> alpha (Maybe (Window, Notebook))
viewDetach' panePath id' = do
activeNotebook <- getNotebook' "viewDetach'" panePath
parent <- widgetGetParent activeNotebook >>= liftIO . unsafeCastTo Container . fromJust
fmap (layoutFromPath panePath) getLayoutSt >>= \case
TerminalP{detachedSize = size} -> do
window <- windowNew WindowTypeToplevel
setWindowTitle window "Leksah detached window"
setWidgetName window id'
case size of
Just (width, height) -> windowSetDefaultSize window (fromIntegral width) (fromIntegral height)
Nothing -> do
a <- widgetGetAllocation activeNotebook
curWidth <- getRectangleWidth a
curHeight <- getRectangleHeight a
windowSetDefaultSize window curWidth curHeight
containerRemove parent activeNotebook
containerAdd window activeNotebook
widgetShowAll window
handleFunc <- runInIO (handleReattach id' window)
_ <- onWidgetDeleteEvent window $ \_e -> handleFunc ()
windows' <- getWindowsSt
setWindowsSt $ windows' ++ [window]
adjustLayoutForDetach id' panePath
return (Just (window, activeNotebook))
_ -> return Nothing
handleReattach :: PaneMonad alpha => Text -> Window -> () -> alpha Bool
handleReattach windowId window _ =
fmap (findDetachedPath windowId) getLayout >>= \case
Nothing -> trace ("ViewFrame>>handleReattach: panePath for id not found: " <> T.unpack windowId)
$ do
windows' <- getWindowsSt
setWindowsSt $ deleteBy equalManagedPtr window windows'
return False
Just pp -> do
nb <- getNotebook' "handleReattach" pp
parent <- getNotebookOrPaned (init pp) (unsafeCastTo Container)
containerRemove window nb
containerAdd parent nb
adjustLayoutForReattach pp
windows' <- getWindowsSt
setWindowsSt $ deleteBy equalManagedPtr window windows'
case last pp of
GroupP groupName -> do
label <- groupLabel groupName
parentNotebook <- liftIO $ unsafeCastTo Notebook parent
notebookSetTabLabel parentNotebook nb (Just label)
_ -> return ()
return False -- "now destroy the window"
getActiveWindow :: PaneMonad alpha => alpha (Maybe Window)
getActiveWindow = do
mbPanePath <- getActivePanePath
case mbPanePath of
Nothing -> return Nothing
Just panePath -> do
activeNotebook <- getNotebook' "getActiveWindow" panePath
widgetGetToplevel activeNotebook >>= liftIO . castTo Window
getActiveScreen :: PaneMonad alpha => alpha (Maybe Screen)
getActiveScreen = do
mbWindow <- getActiveWindow
case mbWindow of
Nothing -> return Nothing
Just window -> Just <$> windowGetScreen window
groupMenuLabel :: PaneMonad beta => Text -> beta (Maybe Label)
groupMenuLabel group = Just <$> labelNew (Just group)
handleNotebookSwitch :: PaneMonad beta => Notebook -> Int -> beta ()
handleNotebookSwitch nb index =
notebookGetNthPage nb (fromIntegral index) >>= \case
Nothing -> error "ViewFrame/handleNotebookSwitch: Can't find widget"
Just w -> do
name <- widgetGetName w
mbPane <- findPaneFor name
case mbPane of
Nothing -> return ()
Just (PaneC p) -> makeActive p
where
findPaneFor :: PaneMonad beta => Text -> beta (Maybe (IDEPane beta))
findPaneFor n1 = do
panes' <- getPanesSt
foldM (\r (PaneC p) -> do
n2 <- widgetGetName =<< getTopWidget p
return (if n1 == n2 then Just (PaneC p) else r))
Nothing (Map.elems panes')
--
-- | Moves the activePane in the given direction, if possible
-- | If their are many possibilities choose the leftmost and topmost
--
viewMove :: PaneMonad beta => PaneDirection -> beta ()
viewMove direction =
getActivePaneSt >>= \case
(Nothing, _) -> return ()
(Just (paneName',_),_) -> do
(PaneC pane) <- paneFromName paneName'
getActivePanePath >>= \case
Nothing -> return ()
Just panePath -> do
layout' <- getLayoutSt
case findMoveTarget panePath layout' direction of
Nothing -> return ()
Just moveTo -> viewMoveTo moveTo pane
--
-- | Find the target for a move
--
findMoveTarget :: PanePath -> PaneLayout -> PaneDirection -> Maybe PanePath
findMoveTarget panePath layout' direction=
let oppositeDir = otherDirection direction
canMove [] = []
canMove (SplitP d:rest) | d == oppositeDir
= SplitP direction : rest
canMove (GroupP _group:_) = []
canMove (_:rest) = canMove rest
basePath = reverse (canMove $ reverse panePath)
in case basePath of
[] -> Nothing
_ -> let layoutP = layoutFromPath basePath layout'
in Just $basePath ++ findAppropriate layoutP oppositeDir
--
-- | Moves the given Pane to the given path
--
viewMoveTo :: RecoverablePane alpha beta delta => PanePath -> alpha -> delta ()
viewMoveTo toPanePath pane = do
let name = paneName pane
toNB <- getNotebook' "move" toPanePath
move' (name,toNB)
--
-- | Moves the given Pane to the given path, care for groups (layout, paneMap)
--
move' :: PaneMonad alpha => (PaneName,Notebook) -> alpha ()
move' (paneName', toNB) = do
paneMap' <- getPaneMapSt
panes' <- getPanesSt
layout' <- getLayout
frameState <- getFrameState
case groupPrefix `T.stripPrefix` paneName' of
Just group ->
case findGroupPath group layout' of
Nothing -> trace ("ViewFrame>>move': group not found: " <> T.unpack group) return ()
Just fromPath -> do
groupNBOrPaned <- getNotebookOrPaned fromPath return
fromNB <- getNotebook' "move'" (init fromPath)
toNBPtr <- liftIO $ unsafeManagedPtrCastPtr toNB
case toNBPtr `Map.lookup` panePathFromNB frameState of
Nothing -> trace "ViewFrame>>move': panepath for Notebook not found1" return ()
Just toPath ->
unless (fromNB `equalManagedPtr` toNB || fromPath `isPrefixOf` toPath) $ do
num <- notebookPageNum fromNB groupNBOrPaned
if num < 0
then trace "ViewFrame>>move': group notebook not found" return ()
else do
notebookRemovePage fromNB num
label <- groupLabel group
notebookInsertOrdered toNB groupNBOrPaned group (Nothing :: Maybe Label) Nothing True
notebookSetTabLabel toNB groupNBOrPaned (Just label)
adjustPanes fromPath (toPath ++ [GroupP group])
adjustLayoutForGroupMove fromPath toPath group
adjustNotebooks fromPath (toPath ++ [GroupP group])
_layout2 <- getLayout
return ()
Nothing ->
case paneName' `Map.lookup` panes' of
Nothing -> trace ("ViewFrame>>move': pane not found: " <> T.unpack paneName') return ()
Just (PaneC pane) -> do
toNBPtr <- liftIO $ unsafeManagedPtrCastPtr toNB
case toNBPtr `Map.lookup` panePathFromNB frameState of
Nothing -> trace "ViewFrame>>move': panepath for Notebook not found2" return ()
Just toPath ->
case paneName' `Map.lookup` paneMap' of
Nothing -> trace ("ViewFrame>>move': pane data not found: " <> T.unpack paneName')
return ()
Just (_fromPath,_) -> do
child <- getTopWidget pane
(fromPane,cid) <- guiPropertiesFromName paneName'
fromNB <- getNotebook' "move'" fromPane
unless (fromNB `equalManagedPtr` toNB) $ do
num <- notebookPageNum fromNB child
if num < 0
then trace "ViewFrame>>move': widget not found" return ()
else do
notebookRemovePage fromNB num
notebookInsertOrdered toNB child paneName' (Nothing :: Maybe Label) (paneTooltipText pane) False
let paneMap1 = Map.delete paneName' paneMap'
setPaneMapSt $ Map.insert paneName' (toPath,cid) paneMap1
findAppropriate :: PaneLayout -> PaneDirection -> PanePath
findAppropriate TerminalP {} _ = []
findAppropriate (HorizontalP t _b _) LeftP = SplitP TopP : findAppropriate t LeftP
findAppropriate (HorizontalP t _b _) RightP = SplitP TopP : findAppropriate t RightP
findAppropriate (HorizontalP _t b _) BottomP = SplitP BottomP : findAppropriate b BottomP
findAppropriate (HorizontalP _t b _) TopP = SplitP TopP : findAppropriate b TopP
findAppropriate (VerticalP l _r _) LeftP = SplitP LeftP : findAppropriate l LeftP
findAppropriate (VerticalP _l r _) RightP = SplitP RightP : findAppropriate r RightP
findAppropriate (VerticalP l _r _) BottomP = SplitP LeftP : findAppropriate l BottomP
findAppropriate (VerticalP _l r _) TopP = SplitP RightP : findAppropriate r TopP
--
-- | Bring the pane to the front position in its notebook
--
bringPaneToFront :: RecoverablePane alpha beta delta => alpha -> delta ()
bringPaneToFront pane = do
tv <- getTopWidget pane
w <- widgetGetToplevel tv
visible <- getWidgetVisible w
when visible $ liftIO (unsafeCastTo Window w) >>= windowPresent
setCurrentNotebookPages tv
setCurrentNotebookPages :: (MonadIO m, IsWidget widget) => widget -> m ()
setCurrentNotebookPages widget = do
mbParent <- widgetGetParent widget
case mbParent of
Just parent -> do
setCurrentNotebookPages parent
liftIO (castTo Notebook parent) >>= \case
Just notebook ->
notebookPageNum notebook widget >>= \case
-1 -> return ()
pageNum -> notebookSetCurrentPage notebook pageNum
Nothing -> return ()
Nothing -> return ()
--
-- | Get a valid panePath from a standard path.
--
getBestPanePath :: StandardPath -> PaneLayout -> PanePath
getBestPanePath sp pl = reverse $ getStandard' sp pl []
where
getStandard' (GroupP group:sp') TerminalP {paneGroups = groups} p
| group `Map.member` groups = getStandard' sp' (groups Map.! group) (GroupP group:p)
getStandard' _ TerminalP {} p = p
getStandard' (SplitP LeftP:sp') (VerticalP l _r _) p = getStandard' sp' l (SplitP LeftP:p)
getStandard' (SplitP RightP:sp') (VerticalP _l r _) p = getStandard' sp' r (SplitP RightP:p)
getStandard' (SplitP TopP:sp') (HorizontalP t _b _) p = getStandard' sp' t (SplitP TopP:p)
getStandard' (SplitP BottomP:sp') (HorizontalP _t b _) p = getStandard' sp' b (SplitP BottomP:p)
-- if no match get leftmost topmost
getStandard' _ (VerticalP l _r _) p = getStandard' [] l (SplitP LeftP:p)
getStandard' _ (HorizontalP t _b _) p = getStandard' [] t (SplitP TopP:p)
--
-- | Get a standard path.
--
getBestPathForId :: PaneMonad alpha => Text -> alpha PanePath
getBestPathForId id' = do
p <- panePathForGroup id'
getBestPanePath p <$> getLayout
--
-- | Construct a new notebook
--
newNotebook' :: IO Notebook
newNotebook' = do
nb <- notebookNew
widgetSetSizeRequest nb 50 50
notebookSetTabPos nb PositionTypeTop
notebookSetShowTabs nb True
notebookSetScrollable nb True
setNotebookEnablePopup nb True
return nb
--
-- | Construct a new notebook,
--
newNotebook :: PaneMonad alpha => PanePath -> alpha Notebook
newNotebook pp = do
st <- getFrameState
nb <- liftIO newNotebook'
nbPtr <- liftIO $ unsafeManagedPtrCastPtr nb
setPanePathFromNB $ Map.insert nbPtr pp (panePathFromNB st)
func <- runInIO move'
tl <- targetListNew Nothing
targetListAddTextTargets tl 0
widgetDragDestSet nb [DestDefaultsAll] Nothing [DragActionCopy, DragActionMove]
widgetDragDestSetTargetList nb $ Just tl
_ <- onWidgetDragDataReceived nb (dragFunc nb func)
return nb
where
dragFunc ::
Notebook ->
((PaneName,Notebook) -> IO ()) ->
DragContext ->
Int32 ->
Int32 ->
SelectionData ->
Word32 ->
Word32 ->
IO ()
dragFunc nb func _cont _x _y data_ _id _timeStamp =
selectionDataGetText data_ >>= \case
Nothing -> return ()
Just str -> do
trace ("dragFunc str=" <> T.unpack str) $ return ()
func (str,nb)
terminalsWithPanePath :: PaneLayout -> [(PanePath,PaneLayout)]
terminalsWithPanePath pl = map (first reverse) $ terminalsWithPP [] pl
where
terminalsWithPP pp t@(TerminalP groups _ _ _ _) = (pp, t) : concatMap (terminalsFromGroup pp)
(Map.toList groups)
terminalsWithPP pp (VerticalP l r _) = terminalsWithPP (SplitP LeftP : pp) l
++ terminalsWithPP (SplitP RightP : pp) r
terminalsWithPP pp (HorizontalP t b _) = terminalsWithPP (SplitP TopP : pp) t
++ terminalsWithPP (SplitP BottomP : pp) b
terminalsFromGroup pp (name,layout') = terminalsWithPP (GroupP name : pp) layout'
findGroupPath :: Text -> PaneLayout -> Maybe PanePath
findGroupPath group layout' =
let terminalPairs = terminalsWithPanePath layout'
in case filter filterFunc terminalPairs of
[] -> Nothing
[(pp, _)] -> Just (pp ++ [GroupP group])
_ -> error ("ViewFrame>>group name not unique: " ++ T.unpack group)
where
filterFunc (_, TerminalP groups _ _ _ _) = group `Set.member` Map.keysSet groups
filterFunc _ = error "ViewFrame>>findGroupPath: impossible"
findDetachedPath :: Text -> PaneLayout -> Maybe PanePath
findDetachedPath id' layout' =
let terminalPairs = terminalsWithPanePath layout'
in case filter filterFunc terminalPairs of
[] -> Nothing
[(pp, _)] -> Just pp
_ -> error ("ViewFrame>>window id not unique: " ++ T.unpack id')
where
filterFunc (_, TerminalP _ _ _ (Just lid) _) = lid == id'
filterFunc _ = False
allGroupNames :: PaneLayout -> Set Text
allGroupNames pl = Set.unions $ map getFunc (terminalsWithPanePath pl)
where
getFunc (_, TerminalP groups _ _ _ _) = Map.keysSet groups
getFunc _ = error "ViewFrame>>allGroupNames: impossible"
--
-- | Get another pane path which points to the other side at the same level
--
otherSide :: PanePath -> Maybe PanePath
otherSide p =
case reverse p of
(SplitP d:rest) -> Just . reverse $ SplitP (otherDirection d) : rest
_ -> Nothing
--
-- | Get the opposite direction of a pane direction
--
otherDirection :: PaneDirection -> PaneDirection
otherDirection LeftP = RightP
otherDirection RightP = LeftP
otherDirection TopP = BottomP
otherDirection BottomP = TopP
--
-- | Get the layout at the given pane path
--
layoutFromPath :: PanePath -> PaneLayout -> PaneLayout
layoutFromPath [] l = l
layoutFromPath (GroupP group:r) (TerminalP {paneGroups = groups})
| group `Map.member` groups = layoutFromPath r (groups Map.! group)
layoutFromPath (SplitP TopP:r) (HorizontalP t _ _) = layoutFromPath r t
layoutFromPath (SplitP BottomP:r) (HorizontalP _ b _) = layoutFromPath r b
layoutFromPath (SplitP LeftP:r) (VerticalP l _ _) = layoutFromPath r l
layoutFromPath (SplitP RightP:r) (VerticalP _ ri _) = layoutFromPath r ri
layoutFromPath pp l = error
$"inconsistent layout (layoutFromPath) " ++ show pp ++ " " ++ show l
layoutsFromPath :: PanePath -> PaneLayout -> [PaneLayout]
layoutsFromPath (GroupP group:r) layout'@TerminalP {paneGroups = groups}
| group `Map.member` groups
= layout':layoutsFromPath r (groups Map.! group)
layoutsFromPath [] layout' = [layout']
layoutsFromPath (SplitP TopP:r) layout'@(HorizontalP t _b _) = layout':layoutsFromPath r t
layoutsFromPath (SplitP BottomP:r) layout'@(HorizontalP _t b _) = layout':layoutsFromPath r b
layoutsFromPath (SplitP LeftP:r) layout'@(VerticalP l _ri _) = layout':layoutsFromPath r l
layoutsFromPath (SplitP RightP:r) layout'@(VerticalP _l ri _) = layout':layoutsFromPath r ri
layoutsFromPath pp l = error
$"inconsistent layout (layoutsFromPath) " ++ show pp ++ " " ++ show l
getWidgetNameList :: PanePath -> PaneLayout -> [Text]
getWidgetNameList path layout' = reverse $ nameList (reverse path) (reverse $ layoutsFromPath path layout')
where
nameList [] _ = reverse ["Leksah Main Window","topBox","root"]
nameList (pe:_) (TerminalP{detachedId = Just id'}:_) = [panePathElementToWidgetName pe, id']
nameList (pe:rpath) (_:rlayout) = panePathElementToWidgetName pe : nameList rpath rlayout
nameList _ _ = error $ "inconsistent layout (getWidgetNameList) " ++ show path ++ " " ++ show layout'
getNotebookOrPaned :: PaneMonad alpha => PanePath -> (Widget -> IO beta) -> alpha beta
getNotebookOrPaned p cf = do
layout' <- getLayout
(widgetGet $ getWidgetNameList p layout') cf
--
-- | Get the notebook widget for the given pane path
--
getNotebook :: PaneMonad alpha => PanePath -> alpha Notebook
getNotebook p = getNotebookOrPaned p (unsafeCastTo Notebook)
getNotebook' :: (HasCallStack, PaneMonad alpha) => Text -> PanePath -> alpha Notebook
getNotebook' _str p = getNotebookOrPaned p (unsafeCastTo Notebook)
--
-- | Get the (gtk) Paned widget for a given path
--
getPaned :: PaneMonad alpha => PanePath -> alpha Paned
getPaned p = getNotebookOrPaned p $ unsafeCastTo Paned
--
-- | Get the path to the active pane
--
getActivePanePath :: PaneMonad alpha => alpha (Maybe PanePath)
getActivePanePath =
getActivePaneSt >>= \case
(Nothing, _) -> return Nothing
(Just (paneName',_),_) -> do
(pp,_) <- guiPropertiesFromName paneName'
return (Just pp)
getActivePanePathOrStandard :: PaneMonad alpha => StandardPath -> alpha PanePath
getActivePanePathOrStandard sp =
getActivePanePath >>= \case
Just app -> return app
Nothing -> getBestPanePath sp <$> getLayoutSt
--
-- | Get the active notebook
--
getActiveNotebook :: PaneMonad alpha => alpha (Maybe Notebook)
getActiveNotebook = do
mbPanePath <- getActivePanePath
case mbPanePath of
Just panePath -> do
nb <- getNotebook' "getActiveNotebook" panePath
return (Just nb)
Nothing -> return Nothing
--
-- | Translates a pane direction to the widget name
--
paneDirectionToWidgetName :: PaneDirection -> Text
paneDirectionToWidgetName TopP = "top"
paneDirectionToWidgetName BottomP = "bottom"
paneDirectionToWidgetName LeftP = "left"
paneDirectionToWidgetName RightP = "right"
panePathElementToWidgetName :: PanePathElement -> Text
panePathElementToWidgetName (SplitP dir) = paneDirectionToWidgetName dir
panePathElementToWidgetName (GroupP group) = groupPrefix <> group
--
-- | Changes a pane path in the pane map
--
adjustPanes :: PaneMonad alpha => PanePath -> PanePath -> alpha ()
adjustPanes fromPane toPane = do
paneMap' <- getPaneMapSt
setPaneMapSt (Map.map (\(pp,other) ->
case stripPrefix fromPane pp of
Just rest -> (toPane ++ rest,other)
_ -> (pp,other)) paneMap')
adjustNotebooks :: PaneMonad alpha => PanePath -> PanePath -> alpha ()
adjustNotebooks fromPane toPane = do
npMap <- trace ("+++ adjustNotebooks from: " <> show fromPane <> " to " <> show toPane)
getPanePathFromNB
setPanePathFromNB (Map.map (\pp ->
case stripPrefix fromPane pp of
Just rest -> toPane ++ rest
_ -> pp) npMap)
--
-- | Changes the layout for a split
--
adjustLayoutForSplit :: PaneMonad alpha => PaneDirection -> PanePath -> alpha ()
adjustLayoutForSplit dir path = do
layout' <- getLayoutSt
let paneLayout = layoutFromPath path layout'
newLayout = TerminalP Map.empty Nothing 0 Nothing Nothing
newTerm = case dir of
LeftP -> VerticalP paneLayout newLayout 0
RightP -> VerticalP newLayout paneLayout 0
TopP -> HorizontalP paneLayout newLayout 0
BottomP -> HorizontalP newLayout paneLayout 0
setLayoutSt $ adjustLayout path layout' newTerm
--
-- | Changes the layout for a nest
--
adjustLayoutForNest :: PaneMonad alpha => Text -> PanePath -> alpha ()
adjustLayoutForNest group path = do
layout' <- getLayoutSt
let paneLayout = layoutFromPath path layout'
newTerm = case paneLayout of
TerminalP {paneGroups = groups} -> paneLayout {
paneGroups = Map.insert group (TerminalP Map.empty Nothing 0 Nothing Nothing) groups}
_ -> error "Unexpected layout type in adjustLayoutForNest"
setLayoutSt $ adjustLayout path layout' newTerm
--
-- | Changes the layout for a detach
--
adjustLayoutForDetach :: PaneMonad alpha => Text -> PanePath -> alpha ()
adjustLayoutForDetach id' path = do
layout' <- getLayoutSt
let paneLayout = layoutFromPath path layout'
newTerm = case paneLayout of
TerminalP {} -> paneLayout {detachedId = Just id'}
_ -> error "Unexpected layout type in adjustLayoutForDetach"
setLayoutSt $ adjustLayout path layout' newTerm
--
-- | Changes the layout for a reattach
--
adjustLayoutForReattach :: PaneMonad alpha => PanePath -> alpha ()
adjustLayoutForReattach path = do
layout' <- getLayoutSt
let paneLayout = layoutFromPath path layout'
newTerm = case paneLayout of
TerminalP {} -> paneLayout {detachedId = Nothing, detachedSize = Nothing}
_ -> error "Unexpected layout type in adjustLayoutForReattach"
setLayoutSt $ adjustLayout path layout' newTerm
--
-- | Changes the layout for a collapse
--
adjustLayoutForCollapse :: PaneMonad alpha => PanePath -> alpha ()
adjustLayoutForCollapse oldPath = do
layout' <- getLayoutSt
let pathLayout = layoutFromPath oldPath layout'
setLayoutSt $ adjustLayout (init oldPath) layout' pathLayout
--
-- | Changes the layout for a move
--
adjustLayoutForGroupMove :: PaneMonad alpha => PanePath -> PanePath -> Text -> alpha ()
adjustLayoutForGroupMove fromPath toPath group = do
layout' <- getLayout
let layoutToMove = layoutFromPath fromPath layout'
let newLayout = removeGL fromPath layout'
setLayoutSt (addGL layoutToMove (toPath ++ [GroupP group]) newLayout)
--
-- | Changes the layout for a remove
--
adjustLayoutForGroupRemove :: PaneMonad alpha => PanePath -> Text -> alpha ()
adjustLayoutForGroupRemove fromPath _group = do
layout' <- getLayout
setLayoutSt (removeGL fromPath layout')
--
-- | Remove group layout at a certain path
--
removeGL :: PanePath -> PaneLayout -> PaneLayout
removeGL [GroupP group] t@(TerminalP oldGroups _ _ _ _)
| group `Map.member` oldGroups = t{paneGroups = group `Map.delete` oldGroups}
removeGL (GroupP group:r) old@TerminalP {paneGroups = groups}
| group `Map.member` groups = old{paneGroups = Map.adjust (removeGL r) group groups}
removeGL (SplitP TopP:r) (HorizontalP tp bp _) = HorizontalP (removeGL r tp) bp 0
removeGL (SplitP BottomP:r) (HorizontalP tp bp _) = HorizontalP tp (removeGL r bp) 0
removeGL (SplitP LeftP:r) (VerticalP lp rp _) = VerticalP (removeGL r lp) rp 0
removeGL (SplitP RightP:r) (VerticalP lp rp _) = VerticalP lp (removeGL r rp) 0
removeGL p l = error $"ViewFrame>>removeGL: inconsistent layout " ++ show p ++ " " ++ show l
--
-- | Add group layout at a certain path
--
addGL :: PaneLayout -> PanePath -> PaneLayout -> PaneLayout
addGL toAdd [GroupP group] t@(TerminalP oldGroups _ _ _ _) = t{paneGroups = Map.insert group toAdd oldGroups}
addGL toAdd (GroupP group:r) old@TerminalP {paneGroups = groups}
| group `Map.member` groups = old{paneGroups = Map.adjust (addGL toAdd r) group groups}
addGL toAdd (SplitP TopP:r) (HorizontalP tp bp _) = HorizontalP (addGL toAdd r tp) bp 0
addGL toAdd (SplitP BottomP:r) (HorizontalP tp bp _) = HorizontalP tp (addGL toAdd r bp) 0
addGL toAdd (SplitP LeftP:r) (VerticalP lp rp _) = VerticalP (addGL toAdd r lp) rp 0
addGL toAdd (SplitP RightP:r) (VerticalP lp rp _) = VerticalP lp (addGL toAdd r rp) 0
addGL _ p l = error $"ViewFrame>>addGL: inconsistent layout" ++ show p ++ " " ++ show l
--
-- | Changes the layout by replacing element at pane path (pp) with replace
--
adjustLayout :: PanePath -> PaneLayout -> PaneLayout -> PaneLayout
adjustLayout pp layout' replace = adjust' pp layout'
where
adjust' [] _ = replace
adjust' (GroupP group:r) old@TerminalP {paneGroups = groups}
| group `Map.member` groups =
old{paneGroups = Map.adjust (adjustPaneGroupLayout r) group groups}
adjust' (SplitP TopP:r) (HorizontalP tp bp _) = HorizontalP (adjust' r tp) bp 0
adjust' (SplitP BottomP:r) (HorizontalP tp bp _) = HorizontalP tp (adjust' r bp) 0
adjust' (SplitP LeftP:r) (VerticalP lp rp _) = VerticalP (adjust' r lp) rp 0
adjust' (SplitP RightP:r) (VerticalP lp rp _) = VerticalP lp (adjust' r rp) 0
adjust' p l = error $"inconsistent layout (adjust) " ++ show p ++ " " ++ show l
adjustPaneGroupLayout = adjust'
--
-- | Get the widget from a list of strings
--
widgetFromPath :: (HasCallStack, MonadIO m) => Widget -> [Text] -> m Widget
widgetFromPath w [] = return w
widgetFromPath w path = do
children <- liftIO (unsafeCastTo Container w) >>= containerGetChildren
chooseWidgetFromPath children path
chooseWidgetFromPath :: (HasCallStack, MonadIO m) => [Widget] -> [Text] -> m Widget
chooseWidgetFromPath _ [] = error "Cant't find widget (empty path)"
chooseWidgetFromPath widgets (h:t) = do
names <- mapM widgetGetName widgets
let mbiInd = elemIndex h names
case mbiInd of
Nothing -> error $"Cant't find widget path " ++ show (h:t) ++ " found only " ++ show names
Just ind -> widgetFromPath (widgets !! ind) t
widgetGet :: (HasCallStack, PaneMonad alpha) => [Text] -> (Widget -> IO b) -> alpha b
widgetGet strL cf = do
windows' <- getWindowsSt
widgets <- liftIO $ mapM toWidget windows'
r <- liftIO $ chooseWidgetFromPath widgets strL
liftIO (cf r)
getUIAction :: (HasCallStack, PaneMonad alpha) => Text -> (Action -> IO a) -> alpha a
getUIAction str f = do
uiManager' <- getUiManagerSt
findAction <- uIManagerGetAction uiManager' str
case findAction of
Just act -> liftIO $ f act
Nothing -> error $"getUIAction can't find action " ++ T.unpack str
getThis :: (HasCallStack, PaneMonad delta) => (FrameState delta -> alpha) -> delta alpha
getThis sel = sel <$> getFrameState
setThis :: (HasCallStack, PaneMonad delta) => (FrameState delta -> alpha -> FrameState delta) -> alpha -> delta ()
setThis sel value = do
st <- getFrameState
trace ("!!! setFrameState " <> show (sel st value)) $ setFrameState (sel st value)
getWindowsSt :: (HasCallStack, PaneMonad alpha) => alpha [Window]
getWindowsSt = getThis windows
setWindowsSt :: (HasCallStack, PaneMonad alpha) => [Window] -> alpha ()
setWindowsSt = setThis (\st value -> st{windows = value})
getUiManagerSt :: (HasCallStack, PaneMonad alpha) => alpha UIManager
getUiManagerSt = getThis uiManager
getPanesSt :: (HasCallStack, PaneMonad alpha) => alpha (Map PaneName (IDEPane alpha))
getPanesSt = getThis panes
setPanesSt :: (HasCallStack, PaneMonad alpha) => Map PaneName (IDEPane alpha) -> alpha ()
setPanesSt = setThis (\st value -> st{panes = value})
getPaneMapSt :: (HasCallStack, PaneMonad alpha) => alpha (Map PaneName (PanePath, [Connection]))
getPaneMapSt = getThis paneMap
setPaneMapSt :: (HasCallStack, PaneMonad alpha) => Map PaneName (PanePath, [Connection]) -> alpha ()
setPaneMapSt = setThis (\st value -> st{paneMap = value})
getActivePaneSt :: (HasCallStack, PaneMonad alpha) => alpha (Maybe (PaneName, [Connection]), [PaneName])
getActivePaneSt = getThis activePane
setActivePaneSt :: (HasCallStack, PaneMonad alpha) => (Maybe (PaneName, [Connection]), [PaneName]) -> alpha ()
setActivePaneSt = setThis (\st value -> st{activePane = value})
getLayoutSt :: (HasCallStack, PaneMonad alpha) => alpha PaneLayout
getLayoutSt = getThis layout
setLayoutSt :: (HasCallStack, PaneMonad alpha) => PaneLayout -> alpha ()
setLayoutSt = setThis (\st value -> st{layout = value})
getPanePathFromNB :: (HasCallStack, PaneMonad alpha) => alpha (Map (Ptr Notebook) PanePath)
getPanePathFromNB = getThis panePathFromNB
setPanePathFromNB :: (HasCallStack, PaneMonad alpha) => Map (Ptr Notebook) PanePath -> alpha ()
setPanePathFromNB = setThis (\st value -> st{panePathFromNB = value})
getActivePane :: (HasCallStack, PaneMonad alpha) => alpha (Maybe (PaneName, [Connection]), [PaneName])
getActivePane = getActivePaneSt
setActivePane :: (HasCallStack, PaneMonad alpha) => (Maybe (PaneName, [Connection]), [PaneName]) -> alpha ()
setActivePane = setActivePaneSt
getUiManager :: (HasCallStack, PaneMonad alpha) => alpha UIManager
getUiManager = getUiManagerSt
getWindows :: (HasCallStack, PaneMonad alpha) => alpha [Window]
getWindows = getWindowsSt
getMainWindow :: (HasCallStack, PaneMonad alpha) => alpha Window
getMainWindow = head <$> getWindows
getLayout :: (HasCallStack, PaneMonad alpha) => alpha PaneLayout
getLayout = getLayoutSt
getMRUPanes :: (HasCallStack, PaneMonad alpha) => alpha [PaneName]
getMRUPanes =
getActivePane >>= \case
(Nothing, mru) -> return mru
(Just (n, _), mru) -> return (n:mru)
|
leksah/ltk
|
src/Graphics/UI/Frame/ViewFrame.hs
|
gpl-2.0
| 65,621 | 2 | 38 | 20,960 | 16,752 | 8,288 | 8,464 | 1,175 | 14 |
{-# LANGUAGE RecordWildCards, LambdaCase #-}
module Typedrat.Views (landing, postView, previewMarkdownView, postEditorView, addPostView, addCommentView) where
import Data.HVect
import qualified Data.Text as T
import Data.Text.Encoding
import Network.HTTP.Types.Status
import Opaleye
import Web.Slug
import Web.Spock.Action
import Web.Spock.Lucid
import Typedrat.Auth
import Typedrat.DB
import Typedrat.Markup
import Typedrat.Routes
import Typedrat.Templates
import Typedrat.Types
usernameVar :: RatActionCtx ctx st (TemplateVars '[TemplateVar "is_administrator" Bool, TemplateVar "is_authenticated" Bool, TemplateVar "user" (Maybe (User Hask))])
usernameVar = userFromSession >>= \case
Just user -> return $ TVNil
::: (K :: Key "user") =: Just user
::: (K :: Key "is_authenticated") =: True
::: (K :: Key "is_administrator") =: (_userName user == "typedrat")
Nothing -> return $ TVNil
::: (K :: Key "user") =: Nothing
::: (K :: Key "is_authenticated") =: False
::: (K :: Key "is_administrator") =: False
landing :: RatActionCtx ctx st ()
landing = do
tv <- usernameVar
posts <- postsWithCommentNums 1
let tv' = tv
::: (K :: Key "path") =: "~"
::: (K :: Key "command") =: "ls"
::: (K :: Key "posts") =: posts
ratT tv' $ layout postListTemplate
postView :: Slug -> RatActionCtx ctx st ()
postView s = do
tv <- usernameVar
(post, comments) <- postWithComments s
let tv' = tv
::: (K :: Key "path") =: "~/posts"
::: (K :: Key "command") =: T.concat ["cat ", unSlug s, ".md"]
::: (K :: Key "post") =: post
::: (K :: Key "comments") =: comments
ratT tv' $ layout postTemplate
previewMarkdownView :: RatActionCtx ctx st ()
previewMarkdownView = do
markdown <- body
case renderMarkdown $ decodeUtf8 markdown of
Left err -> do
setStatus internalServerError500
text . T.pack $ show err
Right out -> do
setHeader "Content-Type" "text/html; charset=utf-8"
lucid out
postEditorView :: RatActionCtx ctx st ()
postEditorView = do
tv <- usernameVar
let tv' = tv
::: (K :: Key "path") =: "~/posts"
::: (K :: Key "command") =: "nano"
ratT tv' $ layout postEditorTemplate
addPostView :: RatActionCtx ctx st ()
addPostView = do
title <- param' "title"
body <- T.filter (/= '\r') <$> param' "body" -- CR seems to break Lucid. :(
let Just titleSlug = mkSlug title
let pgPost = pgBlogPost title titleSlug body
runPostgres $ \conn -> runInsertMany conn blogPostTable [pgPost]
redirect $ renderRoute postR titleSlug
addCommentView :: ListContains n Authenticated xs => Slug -> RatActionCtx (HVect xs) st ()
addCommentView s = postWithSlug s >>= \case
Just post@BlogPost{ _postId = pid } -> do
Just (User{ _userId = uid }) <- userFromSession
body <- T.filter (/= '\r') <$> param' "body" -- CR seems to break Lucid. :(
runPostgres $ \conn -> runInsertMany conn commentTable [pgComment uid pid body]
redirect (renderPostUrl post)
Nothing -> setStatus internalServerError500
|
typedrat/typedrat-site
|
app/Typedrat/Views.hs
|
gpl-3.0
| 3,201 | 0 | 19 | 787 | 1,037 | 521 | 516 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : Diffr.Util
Description : Utility functions and configuration options
Since : 0.1
Authors : William Martin, Jakub Kozlowski
License : This file is part of diffr-h.
diffr-h is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
diffr-h is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with diffr-h. If not, see <http://www.gnu.org/licenses/>.
-}
module Diffr.Util (
DConfig(..)
, diffrModes
) where
import System.Console.CmdArgs
{-| Static values -}
_PROGRAM_NAME, _PROGRAM_VERSION,
_PROGRAM_INFO, _PROGRAM_ABOUT, _COPYRIGHT :: String
_PROGRAM_NAME = "diffr"
_PROGRAM_VERSION = "0.1"
_PROGRAM_INFO = _PROGRAM_NAME ++ " version " ++ _PROGRAM_VERSION
_PROGRAM_ABOUT = unwords [
"An intelligent diff/patch tool"
, "that knows how to copy and move,"
, "has an 'r' at the end of its name"
, "and is written in Haskell."]
_COPYRIGHT = "(C) diffr 2013"
------------------------------------------------
{-| Configuration for running diffr's commands -}
data DConfig =
-- | Configuration for running diff command
Diff { -- | Path to the 'base' file we will diff against
baseFile :: FilePath
-- | Path to the 'new' file we will compare to 'base' file
, newFile :: FilePath
-- | Path to the output file where to write the diff file
, dOutFile :: Maybe FilePath
}
-- | Configuration for running patch command
| Patch { -- | Path to the 'original' file we will apply patch to
originalFile :: FilePath
-- | Path to the 'patch' file we will apply to 'originalFile'
, patchFile :: FilePath
-- | Path to the output file where to write the patched file
, pOutFile :: Maybe FilePath
} deriving (Eq, Show, Data, Typeable)
{-| Annotate the 'Diff' configuration -}
diff :: DConfig
diff = Diff
{ baseFile = def &= argPos 0
&= typ "BASEFILE"
, newFile = def &= argPos 1
&= typ "NEWFILE"
, dOutFile = def &= help "path to the output file"
&= name "output-file" &= typFile
}
{-| Annotate the 'Patch' configuration -}
patch :: DConfig
patch = Patch
{ originalFile = def &= argPos 0
&= typ "ORIGINALFILE"
, patchFile = def &= argPos 1
&= typ "PATCHFILE"
, pOutFile = def &= help "Path to the output file where to write the patched file"
&= name "output-file"
&= typFile
}
{-| Available commands -}
diffrModes :: Mode (CmdArgs DConfig)
diffrModes = cmdArgsMode $ modes [diff, patch]
&= versionArg [explicit, name "version", name "v", summary _PROGRAM_INFO]
&= summary (_PROGRAM_INFO ++ ", " ++ _COPYRIGHT)
&= help _PROGRAM_ABOUT
&= helpArg [explicit, name "help", name "h"]
&= program _PROGRAM_NAME
|
diffr/diffr-h
|
src/Diffr/Util.hs
|
gpl-3.0
| 3,426 | 0 | 12 | 981 | 442 | 250 | 192 | 50 | 1 |
module EvalNode (
eval
) where
import Types
import LLVMRepresentation
import CodeBuilders
eval :: Primitive a => NodeBuilder (Node a) -> a
eval = read . run . compile
|
GaroBrik/haskell-compiler
|
test/EvalNode.hs
|
gpl-3.0
| 173 | 0 | 9 | 35 | 56 | 31 | 25 | 7 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Jobs.Projects.Tenants.Jobs.SearchForAlert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Searches for jobs using the provided SearchJobsRequest. This API call is
-- intended for the use case of targeting passive job seekers (for example,
-- job seekers who have signed up to receive email alerts about potential
-- job opportunities), it has different algorithmic adjustments that are
-- designed to specifically target passive job seekers. This call
-- constrains the visibility of jobs present in the database, and only
-- returns jobs the caller has permission to search against.
--
-- /See:/ <https://cloud.google.com/talent-solution/job-search/docs/ Cloud Talent Solution API Reference> for @jobs.projects.tenants.jobs.searchForAlert@.
module Network.Google.Resource.Jobs.Projects.Tenants.Jobs.SearchForAlert
(
-- * REST Resource
ProjectsTenantsJobsSearchForAlertResource
-- * Creating a Request
, projectsTenantsJobsSearchForAlert
, ProjectsTenantsJobsSearchForAlert
-- * Request Lenses
, ptjsfaParent
, ptjsfaXgafv
, ptjsfaUploadProtocol
, ptjsfaAccessToken
, ptjsfaUploadType
, ptjsfaPayload
, ptjsfaCallback
) where
import Network.Google.Jobs.Types
import Network.Google.Prelude
-- | A resource alias for @jobs.projects.tenants.jobs.searchForAlert@ method which the
-- 'ProjectsTenantsJobsSearchForAlert' request conforms to.
type ProjectsTenantsJobsSearchForAlertResource =
"v4" :>
Capture "parent" Text :>
"jobs:searchForAlert" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] SearchJobsRequest :>
Post '[JSON] SearchJobsResponse
-- | Searches for jobs using the provided SearchJobsRequest. This API call is
-- intended for the use case of targeting passive job seekers (for example,
-- job seekers who have signed up to receive email alerts about potential
-- job opportunities), it has different algorithmic adjustments that are
-- designed to specifically target passive job seekers. This call
-- constrains the visibility of jobs present in the database, and only
-- returns jobs the caller has permission to search against.
--
-- /See:/ 'projectsTenantsJobsSearchForAlert' smart constructor.
data ProjectsTenantsJobsSearchForAlert =
ProjectsTenantsJobsSearchForAlert'
{ _ptjsfaParent :: !Text
, _ptjsfaXgafv :: !(Maybe Xgafv)
, _ptjsfaUploadProtocol :: !(Maybe Text)
, _ptjsfaAccessToken :: !(Maybe Text)
, _ptjsfaUploadType :: !(Maybe Text)
, _ptjsfaPayload :: !SearchJobsRequest
, _ptjsfaCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsTenantsJobsSearchForAlert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ptjsfaParent'
--
-- * 'ptjsfaXgafv'
--
-- * 'ptjsfaUploadProtocol'
--
-- * 'ptjsfaAccessToken'
--
-- * 'ptjsfaUploadType'
--
-- * 'ptjsfaPayload'
--
-- * 'ptjsfaCallback'
projectsTenantsJobsSearchForAlert
:: Text -- ^ 'ptjsfaParent'
-> SearchJobsRequest -- ^ 'ptjsfaPayload'
-> ProjectsTenantsJobsSearchForAlert
projectsTenantsJobsSearchForAlert pPtjsfaParent_ pPtjsfaPayload_ =
ProjectsTenantsJobsSearchForAlert'
{ _ptjsfaParent = pPtjsfaParent_
, _ptjsfaXgafv = Nothing
, _ptjsfaUploadProtocol = Nothing
, _ptjsfaAccessToken = Nothing
, _ptjsfaUploadType = Nothing
, _ptjsfaPayload = pPtjsfaPayload_
, _ptjsfaCallback = Nothing
}
-- | Required. The resource name of the tenant to search within. The format
-- is \"projects\/{project_id}\/tenants\/{tenant_id}\". For example,
-- \"projects\/foo\/tenants\/bar\".
ptjsfaParent :: Lens' ProjectsTenantsJobsSearchForAlert Text
ptjsfaParent
= lens _ptjsfaParent (\ s a -> s{_ptjsfaParent = a})
-- | V1 error format.
ptjsfaXgafv :: Lens' ProjectsTenantsJobsSearchForAlert (Maybe Xgafv)
ptjsfaXgafv
= lens _ptjsfaXgafv (\ s a -> s{_ptjsfaXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ptjsfaUploadProtocol :: Lens' ProjectsTenantsJobsSearchForAlert (Maybe Text)
ptjsfaUploadProtocol
= lens _ptjsfaUploadProtocol
(\ s a -> s{_ptjsfaUploadProtocol = a})
-- | OAuth access token.
ptjsfaAccessToken :: Lens' ProjectsTenantsJobsSearchForAlert (Maybe Text)
ptjsfaAccessToken
= lens _ptjsfaAccessToken
(\ s a -> s{_ptjsfaAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ptjsfaUploadType :: Lens' ProjectsTenantsJobsSearchForAlert (Maybe Text)
ptjsfaUploadType
= lens _ptjsfaUploadType
(\ s a -> s{_ptjsfaUploadType = a})
-- | Multipart request metadata.
ptjsfaPayload :: Lens' ProjectsTenantsJobsSearchForAlert SearchJobsRequest
ptjsfaPayload
= lens _ptjsfaPayload
(\ s a -> s{_ptjsfaPayload = a})
-- | JSONP
ptjsfaCallback :: Lens' ProjectsTenantsJobsSearchForAlert (Maybe Text)
ptjsfaCallback
= lens _ptjsfaCallback
(\ s a -> s{_ptjsfaCallback = a})
instance GoogleRequest
ProjectsTenantsJobsSearchForAlert
where
type Rs ProjectsTenantsJobsSearchForAlert =
SearchJobsResponse
type Scopes ProjectsTenantsJobsSearchForAlert =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/jobs"]
requestClient ProjectsTenantsJobsSearchForAlert'{..}
= go _ptjsfaParent _ptjsfaXgafv _ptjsfaUploadProtocol
_ptjsfaAccessToken
_ptjsfaUploadType
_ptjsfaCallback
(Just AltJSON)
_ptjsfaPayload
jobsService
where go
= buildClient
(Proxy ::
Proxy ProjectsTenantsJobsSearchForAlertResource)
mempty
|
brendanhay/gogol
|
gogol-jobs/gen/Network/Google/Resource/Jobs/Projects/Tenants/Jobs/SearchForAlert.hs
|
mpl-2.0
| 6,793 | 0 | 17 | 1,424 | 797 | 471 | 326 | 121 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.DynamoDB.PutItem
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates a new item, or replaces an old item with a new item. If an item that
-- has the same primary key as the new item already exists in the specified
-- table, the new item completely replaces the existing item. You can perform a
-- conditional put operation (add a new item if one with the specified primary
-- key doesn't exist), or replace an existing item if it has certain attribute
-- values.
--
-- In addition to putting an item, you can also return the item's attribute
-- values in the same operation, using the /ReturnValues/ parameter.
--
-- When you add an item, the primary key attribute(s) are the only required
-- attributes. Attribute values cannot be null. String and Binary type
-- attributes must have lengths greater than zero. Set type attributes cannot be
-- empty. Requests with empty values will be rejected with a /ValidationException/
-- exception.
--
-- You can request that /PutItem/ return either a copy of the original item
-- (before the update) or a copy of the updated item (after the update). For
-- more information, see the /ReturnValues/ description below.
--
-- To prevent a new item from replacing an existing item, use a conditional
-- put operation with /ComparisonOperator/ set to 'NULL' for the primary key
-- attribute, or attributes.
--
-- For more information about using this API, see <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithItems.html Working with Items> in the /Amazon DynamoDB Developer Guide/.
--
-- <http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_PutItem.html>
module Network.AWS.DynamoDB.PutItem
(
-- * Request
PutItem
-- ** Request constructor
, putItem
-- ** Request lenses
, piConditionExpression
, piConditionalOperator
, piExpected
, piExpressionAttributeNames
, piExpressionAttributeValues
, piItem
, piReturnConsumedCapacity
, piReturnItemCollectionMetrics
, piReturnValues
, piTableName
-- * Response
, PutItemResponse
-- ** Response constructor
, putItemResponse
-- ** Response lenses
, pirAttributes
, pirConsumedCapacity
, pirItemCollectionMetrics
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.DynamoDB.Types
import qualified GHC.Exts
data PutItem = PutItem
{ _piConditionExpression :: Maybe Text
, _piConditionalOperator :: Maybe ConditionalOperator
, _piExpected :: Map Text ExpectedAttributeValue
, _piExpressionAttributeNames :: Map Text Text
, _piExpressionAttributeValues :: Map Text AttributeValue
, _piItem :: Map Text AttributeValue
, _piReturnConsumedCapacity :: Maybe ReturnConsumedCapacity
, _piReturnItemCollectionMetrics :: Maybe ReturnItemCollectionMetrics
, _piReturnValues :: Maybe ReturnValue
, _piTableName :: Text
} deriving (Eq, Read, Show)
-- | 'PutItem' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'piConditionExpression' @::@ 'Maybe' 'Text'
--
-- * 'piConditionalOperator' @::@ 'Maybe' 'ConditionalOperator'
--
-- * 'piExpected' @::@ 'HashMap' 'Text' 'ExpectedAttributeValue'
--
-- * 'piExpressionAttributeNames' @::@ 'HashMap' 'Text' 'Text'
--
-- * 'piExpressionAttributeValues' @::@ 'HashMap' 'Text' 'AttributeValue'
--
-- * 'piItem' @::@ 'HashMap' 'Text' 'AttributeValue'
--
-- * 'piReturnConsumedCapacity' @::@ 'Maybe' 'ReturnConsumedCapacity'
--
-- * 'piReturnItemCollectionMetrics' @::@ 'Maybe' 'ReturnItemCollectionMetrics'
--
-- * 'piReturnValues' @::@ 'Maybe' 'ReturnValue'
--
-- * 'piTableName' @::@ 'Text'
--
putItem :: Text -- ^ 'piTableName'
-> PutItem
putItem p1 = PutItem
{ _piTableName = p1
, _piItem = mempty
, _piExpected = mempty
, _piReturnValues = Nothing
, _piReturnConsumedCapacity = Nothing
, _piReturnItemCollectionMetrics = Nothing
, _piConditionalOperator = Nothing
, _piConditionExpression = Nothing
, _piExpressionAttributeNames = mempty
, _piExpressionAttributeValues = mempty
}
-- | A condition that must be satisfied in order for a conditional /PutItem/
-- operation to succeed.
--
-- An expression can contain any of the following:
--
-- Boolean functions: 'attribute_exists | attribute_not_exists | contains |begins_with'
--
-- These function names are case-sensitive.
--
-- Comparison operators: ' = | <> | < | > | <= | >= | BETWEEN | IN'
--
-- Logical operators: 'AND | OR | NOT'
--
-- For more information on condition expressions, go to <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.SpecifyingConditions.html Specifying Conditions>
-- in the /Amazon DynamoDB Developer Guide/.
piConditionExpression :: Lens' PutItem (Maybe Text)
piConditionExpression =
lens _piConditionExpression (\s a -> s { _piConditionExpression = a })
-- | There is a newer parameter available. Use /ConditionExpression/ instead. Note
-- that if you use /ConditionalOperator/ and / ConditionExpression / at the same
-- time, DynamoDB will return a /ValidationException/ exception.
--
-- A logical operator to apply to the conditions in the /Expected/ map:
--
-- 'AND' - If all of the conditions evaluate to true, then the entire map
-- evaluates to true.
--
-- 'OR' - If at least one of the conditions evaluate to true, then the entire map
-- evaluates to true.
--
-- If you omit /ConditionalOperator/, then 'AND' is the default.
--
-- The operation will succeed only if the entire map evaluates to true.
--
-- This parameter does not support attributes of type List or Map.
--
piConditionalOperator :: Lens' PutItem (Maybe ConditionalOperator)
piConditionalOperator =
lens _piConditionalOperator (\s a -> s { _piConditionalOperator = a })
-- | There is a newer parameter available. Use /ConditionExpression/ instead. Note
-- that if you use /Expected/ and / ConditionExpression / at the same time, DynamoDB
-- will return a /ValidationException/ exception.
--
-- A map of attribute/condition pairs. /Expected/ provides a conditional block
-- for the /PutItem/ operation.
--
-- This parameter does not support attributes of type List or Map.
--
-- Each element of /Expected/ consists of an attribute name, a comparison
-- operator, and one or more values. DynamoDB compares the attribute with the
-- value(s) you supplied, using the comparison operator. For each /Expected/
-- element, the result of the evaluation is either true or false.
--
-- If you specify more than one element in the /Expected/ map, then by default
-- all of the conditions must evaluate to true. In other words, the conditions
-- are ANDed together. (You can use the /ConditionalOperator/ parameter to OR the
-- conditions instead. If you do this, then at least one of the conditions must
-- evaluate to true, rather than all of them.)
--
-- If the /Expected/ map evaluates to true, then the conditional operation
-- succeeds; otherwise, it fails.
--
-- /Expected/ contains the following:
--
-- /AttributeValueList/ - One or more values to evaluate against the supplied
-- attribute. The number of values in the list depends on the /ComparisonOperator/
-- being used.
--
-- For type Number, value comparisons are numeric.
--
-- String value comparisons for greater than, equals, or less than are based on
-- ASCII character code values. For example, 'a' is greater than 'A', and 'a' is
-- greater than 'B'. For a list of code values, see <http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters>.
--
-- For type Binary, DynamoDB treats each byte of the binary data as unsigned
-- when it compares binary values.
--
-- /ComparisonOperator/ - A comparator for evaluating attributes in the /AttributeValueList/. When performing the comparison, DynamoDB uses strongly consistent reads.
--
-- The following comparison operators are available:
--
-- 'EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS |BEGINS_WITH | IN | BETWEEN'
--
-- The following are descriptions of each comparison operator.
--
-- 'EQ' : Equal. 'EQ' is supported for all datatypes, including lists and maps.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element of type
-- String, Number, Binary, String Set, Number Set, or Binary Set. If an item
-- contains an /AttributeValue/ element of a different type than the one provided
-- in the request, the value does not match. For example, '{"S":"6"}' does not
-- equal '{"N":"6"}'. Also, '{"N":"6"}' does not equal '{"NS":["6", "2", "1"]}'.
--
--
--
-- 'NE' : Not equal. 'NE' is supported for all datatypes, including lists and
-- maps.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ of type String,
-- Number, Binary, String Set, Number Set, or Binary Set. If an item contains an /AttributeValue/ of a different type than the one provided in the request, the
-- value does not match. For example, '{"S":"6"}' does not equal '{"N":"6"}'. Also, '{"N":"6"}' does not equal '{"NS":["6", "2", "1"]}'.
--
--
--
-- 'LE' : Less than or equal.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element of type
-- String, Number, or Binary (not a set type). If an item contains an /AttributeValue/ element of a different type than the one provided in the request, the value
-- does not match. For example, '{"S":"6"}' does not equal '{"N":"6"}'. Also, '{"N":"6"}' does not compare to '{"NS":["6", "2", "1"]}'.
--
--
--
-- 'LT' : Less than.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ of type String,
-- Number, or Binary (not a set type). If an item contains an /AttributeValue/
-- element of a different type than the one provided in the request, the value
-- does not match. For example, '{"S":"6"}' does not equal '{"N":"6"}'. Also, '{"N":"6"}' does not compare to '{"NS":["6", "2", "1"]}'.
--
--
--
-- 'GE' : Greater than or equal.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element of type
-- String, Number, or Binary (not a set type). If an item contains an /AttributeValue/ element of a different type than the one provided in the request, the value
-- does not match. For example, '{"S":"6"}' does not equal '{"N":"6"}'. Also, '{"N":"6"}' does not compare to '{"NS":["6", "2", "1"]}'.
--
--
--
-- 'GT' : Greater than.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element of type
-- String, Number, or Binary (not a set type). If an item contains an /AttributeValue/ element of a different type than the one provided in the request, the value
-- does not match. For example, '{"S":"6"}' does not equal '{"N":"6"}'. Also, '{"N":"6"}' does not compare to '{"NS":["6", "2", "1"]}'.
--
--
--
-- 'NOT_NULL' : The attribute exists. 'NOT_NULL' is supported for all datatypes,
-- including lists and maps.
--
-- This operator tests for the existence of an attribute, not its data type. If
-- the data type of attribute "'a'" is null, and you evaluate it using 'NOT_NULL',
-- the result is a Boolean /true/. This result is because the attribute "'a'"
-- exists; its data type is not relevant to the 'NOT_NULL' comparison operator.
--
-- 'NULL' : The attribute does not exist. 'NULL' is supported for all datatypes,
-- including lists and maps.
--
-- This operator tests for the nonexistence of an attribute, not its data type.
-- If the data type of attribute "'a'" is null, and you evaluate it using 'NULL',
-- the result is a Boolean /false/. This is because the attribute "'a'" exists; its
-- data type is not relevant to the 'NULL' comparison operator.
--
-- 'CONTAINS' : Checks for a subsequence, or value in a set.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element of type
-- String, Number, or Binary (not a set type). If the target attribute of the
-- comparison is of type String, then the operator checks for a substring match.
-- If the target attribute of the comparison is of type Binary, then the
-- operator looks for a subsequence of the target that matches the input. If the
-- target attribute of the comparison is a set ("'SS'", "'NS'", or "'BS'"), then the
-- operator evaluates to true if it finds an exact match with any member of the
-- set.
--
-- CONTAINS is supported for lists: When evaluating "'a CONTAINS b'", "'a'" can be
-- a list; however, "'b'" cannot be a set, a map, or a list.
--
-- 'NOT_CONTAINS' : Checks for absence of a subsequence, or absence of a value
-- in a set.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element of type
-- String, Number, or Binary (not a set type). If the target attribute of the
-- comparison is a String, then the operator checks for the absence of a
-- substring match. If the target attribute of the comparison is Binary, then
-- the operator checks for the absence of a subsequence of the target that
-- matches the input. If the target attribute of the comparison is a set ("'SS'", "'NS'", or "'BS'"), then the operator evaluates to true if it /does not/ find an
-- exact match with any member of the set.
--
-- NOT_CONTAINS is supported for lists: When evaluating "'a NOT CONTAINS b'", "'a'"
-- can be a list; however, "'b'" cannot be a set, a map, or a list.
--
-- 'BEGINS_WITH' : Checks for a prefix.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ of type String or
-- Binary (not a Number or a set type). The target attribute of the comparison
-- must be of type String or Binary (not a Number or a set type).
--
--
--
-- 'IN' : Checks for matching elements within two sets.
--
-- /AttributeValueList/ can contain one or more /AttributeValue/ elements of type
-- String, Number, or Binary (not a set type). These attributes are compared
-- against an existing set type attribute of an item. If any elements of the
-- input set are present in the item attribute, the expression evaluates to true.
--
-- 'BETWEEN' : Greater than or equal to the first value, and less than or equal
-- to the second value.
--
-- /AttributeValueList/ must contain two /AttributeValue/ elements of the same
-- type, either String, Number, or Binary (not a set type). A target attribute
-- matches if the target value is greater than, or equal to, the first element
-- and less than, or equal to, the second element. If an item contains an /AttributeValue/ element of a different type than the one provided in the request, the value
-- does not match. For example, '{"S":"6"}' does not compare to '{"N":"6"}'. Also, '{"N":"6"}' does not compare to '{"NS":["6", "2", "1"]}'
--
-- For usage examples of /AttributeValueList/ and /ComparisonOperator/, see <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LegacyConditionalParameters.html Legacy Conditional Parameters> in the /Amazon DynamoDB Developer Guide/.
--
-- For backward compatibility with previous DynamoDB releases, the following
-- parameters can be used instead of /AttributeValueList/ and /ComparisonOperator/:
--
-- /Value/ - A value for DynamoDB to compare with an attribute.
--
-- /Exists/ - A Boolean value that causes DynamoDB to evaluate the value before
-- attempting the conditional operation:
--
-- If /Exists/ is 'true', DynamoDB will check to see if that attribute value
-- already exists in the table. If it is found, then the condition evaluates to
-- true; otherwise the condition evaluate to false.
--
-- If /Exists/ is 'false', DynamoDB assumes that the attribute value does /not/
-- exist in the table. If in fact the value does not exist, then the assumption
-- is valid and the condition evaluates to true. If the value is found, despite
-- the assumption that it does not exist, the condition evaluates to false.
--
-- Note that the default value for /Exists/ is 'true'.
--
-- The /Value/ and /Exists/ parameters are incompatible with /AttributeValueList/
-- and /ComparisonOperator/. Note that if you use both sets of parameters at once,
-- DynamoDB will return a /ValidationException/ exception.
piExpected :: Lens' PutItem (HashMap Text ExpectedAttributeValue)
piExpected = lens _piExpected (\s a -> s { _piExpected = a }) . _Map
-- | One or more substitution tokens for attribute names in an expression. The
-- following are some use cases for using /ExpressionAttributeNames/:
--
-- To access an attribute whose name conflicts with a DynamoDB reserved word.
--
-- To create a placeholder for repeating occurrences of an attribute name in
-- an expression.
--
-- To prevent special characters in an attribute name from being
-- misinterpreted in an expression.
--
-- Use the # character in an expression to dereference an attribute name. For
-- example, consider the following attribute name:
--
-- 'Percentile'
--
-- The name of this attribute conflicts with a reserved word, so it cannot be
-- used directly in an expression. (For the complete list of reserved words, go
-- to <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ReservedWords.html Reserved Words> in the /Amazon DynamoDB Developer Guide/). To work around
-- this, you could specify the following for /ExpressionAttributeNames/:
--
-- '{"#P":"Percentile"}'
--
-- You could then use this substitution in an expression, as in this example:
--
-- '#P = :val'
--
-- Tokens that begin with the : character are /expression attribute values/,
-- which are placeholders for the actual value at runtime.
--
-- For more information on expression attribute names, go to <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.AccessingItemAttributes.html Accessing ItemAttributes> in the /Amazon DynamoDB Developer Guide/.
piExpressionAttributeNames :: Lens' PutItem (HashMap Text Text)
piExpressionAttributeNames =
lens _piExpressionAttributeNames
(\s a -> s { _piExpressionAttributeNames = a })
. _Map
-- | One or more values that can be substituted in an expression.
--
-- Use the : (colon) character in an expression to dereference an attribute
-- value. For example, suppose that you wanted to check whether the value of the /ProductStatus/ attribute was one of the following:
--
-- 'Available | Backordered | Discontinued'
--
-- You would first need to specify /ExpressionAttributeValues/ as follows:
--
-- '{ ":avail":{"S":"Available"}, ":back":{"S":"Backordered"},":disc":{"S":"Discontinued"} }'
--
-- You could then use these values in an expression, such as this:
--
-- 'ProductStatus IN (:avail, :back, :disc)'
--
-- For more information on expression attribute values, go to <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.SpecifyingConditions.html SpecifyingConditions> in the /Amazon DynamoDB Developer Guide/.
piExpressionAttributeValues :: Lens' PutItem (HashMap Text AttributeValue)
piExpressionAttributeValues =
lens _piExpressionAttributeValues
(\s a -> s { _piExpressionAttributeValues = a })
. _Map
-- | A map of attribute name/value pairs, one for each attribute. Only the primary
-- key attributes are required; you can optionally provide other attribute
-- name-value pairs for the item.
--
-- You must provide all of the attributes for the primary key. For example,
-- with a hash type primary key, you only need to provide the hash attribute.
-- For a hash-and-range type primary key, you must provide both the hash
-- attribute and the range attribute.
--
-- If you specify any attributes that are part of an index key, then the data
-- types for those attributes must match those of the schema in the table's
-- attribute definition.
--
-- For more information about primary keys, see <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelPrimaryKey Primary Key> in the /AmazonDynamoDB Developer Guide/.
--
-- Each element in the /Item/ map is an /AttributeValue/ object.
piItem :: Lens' PutItem (HashMap Text AttributeValue)
piItem = lens _piItem (\s a -> s { _piItem = a }) . _Map
piReturnConsumedCapacity :: Lens' PutItem (Maybe ReturnConsumedCapacity)
piReturnConsumedCapacity =
lens _piReturnConsumedCapacity
(\s a -> s { _piReturnConsumedCapacity = a })
-- | A value that if set to 'SIZE', the response includes statistics about item
-- collections, if any, that were modified during the operation are returned in
-- the response. If set to 'NONE' (the default), no statistics are returned.
piReturnItemCollectionMetrics :: Lens' PutItem (Maybe ReturnItemCollectionMetrics)
piReturnItemCollectionMetrics =
lens _piReturnItemCollectionMetrics
(\s a -> s { _piReturnItemCollectionMetrics = a })
-- | Use /ReturnValues/ if you want to get the item attributes as they appeared
-- before they were updated with the /PutItem/ request. For /PutItem/, the valid
-- values are:
--
-- 'NONE' - If /ReturnValues/ is not specified, or if its value is 'NONE', then
-- nothing is returned. (This setting is the default for /ReturnValues/.)
--
-- 'ALL_OLD' - If /PutItem/ overwrote an attribute name-value pair, then the
-- content of the old item is returned.
--
--
piReturnValues :: Lens' PutItem (Maybe ReturnValue)
piReturnValues = lens _piReturnValues (\s a -> s { _piReturnValues = a })
-- | The name of the table to contain the item.
piTableName :: Lens' PutItem Text
piTableName = lens _piTableName (\s a -> s { _piTableName = a })
data PutItemResponse = PutItemResponse
{ _pirAttributes :: Map Text AttributeValue
, _pirConsumedCapacity :: Maybe ConsumedCapacity
, _pirItemCollectionMetrics :: Maybe ItemCollectionMetrics
} deriving (Eq, Read, Show)
-- | 'PutItemResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'pirAttributes' @::@ 'HashMap' 'Text' 'AttributeValue'
--
-- * 'pirConsumedCapacity' @::@ 'Maybe' 'ConsumedCapacity'
--
-- * 'pirItemCollectionMetrics' @::@ 'Maybe' 'ItemCollectionMetrics'
--
putItemResponse :: PutItemResponse
putItemResponse = PutItemResponse
{ _pirAttributes = mempty
, _pirConsumedCapacity = Nothing
, _pirItemCollectionMetrics = Nothing
}
-- | The attribute values as they appeared before the /PutItem/ operation, but only
-- if /ReturnValues/ is specified as 'ALL_OLD' in the request. Each element consists
-- of an attribute name and an attribute value.
pirAttributes :: Lens' PutItemResponse (HashMap Text AttributeValue)
pirAttributes = lens _pirAttributes (\s a -> s { _pirAttributes = a }) . _Map
pirConsumedCapacity :: Lens' PutItemResponse (Maybe ConsumedCapacity)
pirConsumedCapacity =
lens _pirConsumedCapacity (\s a -> s { _pirConsumedCapacity = a })
-- | Information about item collections, if any, that were affected by the
-- operation. /ItemCollectionMetrics/ is only returned if the request asked for
-- it. If the table does not have any local secondary indexes, this information
-- is not returned in the response.
--
-- Each /ItemCollectionMetrics/ element consists of:
--
-- /ItemCollectionKey/ - The hash key value of the item collection. This is the
-- same as the hash key of the item.
--
-- /SizeEstimateRange/ - An estimate of item collection size, in gigabytes. This
-- value is a two-element array containing a lower bound and an upper bound for
-- the estimate. The estimate includes the size of all the items in the table,
-- plus the size of all attributes projected into all of the local secondary
-- indexes on that table. Use this estimate to measure whether a local secondary
-- index is approaching its size limit.
--
-- The estimate is subject to change over time; therefore, do not rely on the
-- precision or accuracy of the estimate.
--
--
pirItemCollectionMetrics :: Lens' PutItemResponse (Maybe ItemCollectionMetrics)
pirItemCollectionMetrics =
lens _pirItemCollectionMetrics
(\s a -> s { _pirItemCollectionMetrics = a })
instance ToPath PutItem where
toPath = const "/"
instance ToQuery PutItem where
toQuery = const mempty
instance ToHeaders PutItem
instance ToJSON PutItem where
toJSON PutItem{..} = object
[ "TableName" .= _piTableName
, "Item" .= _piItem
, "Expected" .= _piExpected
, "ReturnValues" .= _piReturnValues
, "ReturnConsumedCapacity" .= _piReturnConsumedCapacity
, "ReturnItemCollectionMetrics" .= _piReturnItemCollectionMetrics
, "ConditionalOperator" .= _piConditionalOperator
, "ConditionExpression" .= _piConditionExpression
, "ExpressionAttributeNames" .= _piExpressionAttributeNames
, "ExpressionAttributeValues" .= _piExpressionAttributeValues
]
instance AWSRequest PutItem where
type Sv PutItem = DynamoDB
type Rs PutItem = PutItemResponse
request = post "PutItem"
response = jsonResponse
instance FromJSON PutItemResponse where
parseJSON = withObject "PutItemResponse" $ \o -> PutItemResponse
<$> o .:? "Attributes" .!= mempty
<*> o .:? "ConsumedCapacity"
<*> o .:? "ItemCollectionMetrics"
|
kim/amazonka
|
amazonka-dynamodb/gen/Network/AWS/DynamoDB/PutItem.hs
|
mpl-2.0
| 26,178 | 0 | 14 | 4,837 | 1,615 | 1,087 | 528 | 137 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Logging.Organizations.Locations.Buckets.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a bucket.
--
-- /See:/ <https://cloud.google.com/logging/docs/ Cloud Logging API Reference> for @logging.organizations.locations.buckets.get@.
module Network.Google.Resource.Logging.Organizations.Locations.Buckets.Get
(
-- * REST Resource
OrganizationsLocationsBucketsGetResource
-- * Creating a Request
, organizationsLocationsBucketsGet
, OrganizationsLocationsBucketsGet
-- * Request Lenses
, olbgXgafv
, olbgUploadProtocol
, olbgAccessToken
, olbgUploadType
, olbgName
, olbgCallback
) where
import Network.Google.Logging.Types
import Network.Google.Prelude
-- | A resource alias for @logging.organizations.locations.buckets.get@ method which the
-- 'OrganizationsLocationsBucketsGet' request conforms to.
type OrganizationsLocationsBucketsGetResource =
"v2" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] LogBucket
-- | Gets a bucket.
--
-- /See:/ 'organizationsLocationsBucketsGet' smart constructor.
data OrganizationsLocationsBucketsGet =
OrganizationsLocationsBucketsGet'
{ _olbgXgafv :: !(Maybe Xgafv)
, _olbgUploadProtocol :: !(Maybe Text)
, _olbgAccessToken :: !(Maybe Text)
, _olbgUploadType :: !(Maybe Text)
, _olbgName :: !Text
, _olbgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrganizationsLocationsBucketsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'olbgXgafv'
--
-- * 'olbgUploadProtocol'
--
-- * 'olbgAccessToken'
--
-- * 'olbgUploadType'
--
-- * 'olbgName'
--
-- * 'olbgCallback'
organizationsLocationsBucketsGet
:: Text -- ^ 'olbgName'
-> OrganizationsLocationsBucketsGet
organizationsLocationsBucketsGet pOlbgName_ =
OrganizationsLocationsBucketsGet'
{ _olbgXgafv = Nothing
, _olbgUploadProtocol = Nothing
, _olbgAccessToken = Nothing
, _olbgUploadType = Nothing
, _olbgName = pOlbgName_
, _olbgCallback = Nothing
}
-- | V1 error format.
olbgXgafv :: Lens' OrganizationsLocationsBucketsGet (Maybe Xgafv)
olbgXgafv
= lens _olbgXgafv (\ s a -> s{_olbgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
olbgUploadProtocol :: Lens' OrganizationsLocationsBucketsGet (Maybe Text)
olbgUploadProtocol
= lens _olbgUploadProtocol
(\ s a -> s{_olbgUploadProtocol = a})
-- | OAuth access token.
olbgAccessToken :: Lens' OrganizationsLocationsBucketsGet (Maybe Text)
olbgAccessToken
= lens _olbgAccessToken
(\ s a -> s{_olbgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
olbgUploadType :: Lens' OrganizationsLocationsBucketsGet (Maybe Text)
olbgUploadType
= lens _olbgUploadType
(\ s a -> s{_olbgUploadType = a})
-- | Required. The resource name of the bucket:
-- \"projects\/[PROJECT_ID]\/locations\/[LOCATION_ID]\/buckets\/[BUCKET_ID]\"
-- \"organizations\/[ORGANIZATION_ID]\/locations\/[LOCATION_ID]\/buckets\/[BUCKET_ID]\"
-- \"billingAccounts\/[BILLING_ACCOUNT_ID]\/locations\/[LOCATION_ID]\/buckets\/[BUCKET_ID]\"
-- \"folders\/[FOLDER_ID]\/locations\/[LOCATION_ID]\/buckets\/[BUCKET_ID]\"
-- Example:
-- \"projects\/my-project-id\/locations\/my-location\/buckets\/my-bucket-id\".
olbgName :: Lens' OrganizationsLocationsBucketsGet Text
olbgName = lens _olbgName (\ s a -> s{_olbgName = a})
-- | JSONP
olbgCallback :: Lens' OrganizationsLocationsBucketsGet (Maybe Text)
olbgCallback
= lens _olbgCallback (\ s a -> s{_olbgCallback = a})
instance GoogleRequest
OrganizationsLocationsBucketsGet
where
type Rs OrganizationsLocationsBucketsGet = LogBucket
type Scopes OrganizationsLocationsBucketsGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
"https://www.googleapis.com/auth/logging.admin",
"https://www.googleapis.com/auth/logging.read"]
requestClient OrganizationsLocationsBucketsGet'{..}
= go _olbgName _olbgXgafv _olbgUploadProtocol
_olbgAccessToken
_olbgUploadType
_olbgCallback
(Just AltJSON)
loggingService
where go
= buildClient
(Proxy ::
Proxy OrganizationsLocationsBucketsGetResource)
mempty
|
brendanhay/gogol
|
gogol-logging/gen/Network/Google/Resource/Logging/Organizations/Locations/Buckets/Get.hs
|
mpl-2.0
| 5,524 | 0 | 15 | 1,126 | 711 | 419 | 292 | 106 | 1 |
-- Implicit CAD. Copyright (C) 2012, Christopher Olah ([email protected])
-- Copyright (C) 2016, Julia Longtin ([email protected])
-- Released under the GNU AGPLV3+, see LICENSE
module Graphics.Implicit.Export.Render.HandlePolylines (cleanLoopsFromSegs, reducePolyline) where
import Prelude(Bool(True, False), Maybe(Just, Nothing), map, (.), filter, (==), last, reverse, ($), (++), (-), (/), abs, (<=), (||), (&&), (*), (>), otherwise)
import Graphics.Implicit.Definitions (minℝ, Polyline(Polyline))
cleanLoopsFromSegs :: [Polyline] -> [Polyline]
cleanLoopsFromSegs =
map reducePolyline
. joinSegs
. filter polylineNotNull
-- | Join polylines that connect.
joinSegs :: [Polyline] -> [Polyline]
joinSegs [] = []
joinSegs (Polyline present:remaining) =
let
findNext :: [Polyline] -> (Maybe Polyline, [Polyline])
findNext (Polyline (p3:ps):segs)
| p3 == last present = (Just (Polyline (p3:ps)), segs)
| last ps == last present = (Just (Polyline $ reverse $ p3:ps), segs)
| otherwise = case findNext segs of (res1,res2) -> (res1,Polyline (p3:ps):res2)
findNext [] = (Nothing, [])
findNext (Polyline []:_) = (Nothing, [])
in
case findNext remaining of
(Nothing, _) -> Polyline present: joinSegs remaining
(Just (Polyline match), others) -> joinSegs $ (Polyline $ present ++ match) : others
-- | Simplify and sort a polyline.
reducePolyline :: Polyline -> Polyline
reducePolyline (Polyline ((x1,y1):(x2,y2):(x3,y3):others))
-- Remove duplicate points.
| (x1,y1) == (x2,y2) = reducePolyline (Polyline ((x2,y2):(x3,y3):others))
| abs ( (y2-y1)/(x2-x1) - (y3-y1)/(x3-x1) ) <= minℝ
|| ( (x2-x1) == 0 && (x3-x1) == 0 && (y2-y1)*(y3-y1) > 0) =
reducePolyline (Polyline ((x1,y1):(x3,y3):others))
| otherwise = Polyline ((x1,y1) : points (reducePolyline (Polyline ((x2,y2):(x3,y3):others))))
where
points (Polyline pts) = pts
-- | remove duplicate points
reducePolyline (Polyline ((x1,y1):(x2,y2):others)) =
if (x1,y1) == (x2,y2) then reducePolyline (Polyline ((x2,y2):others)) else Polyline ((x1,y1):(x2,y2):others)
-- | Return the last result.
reducePolyline l = l
-- ensure that polylines are not empty.
polylineNotNull :: Polyline -> Bool
polylineNotNull (Polyline (_:_:_)) = True
polylineNotNull (Polyline [_]) = True
polylineNotNull (Polyline []) = False
{-cleanLoopsFromSegs =
connectPolys
-- . joinSegs
. filter (not . degeneratePoly)
polylinesFromSegsOnGrid = undefined
degeneratePoly [] = True
degeneratePoly [a,b] = a == b
degeneratePoly _ = False
data SegOrPoly = Seg (ℝ2) ℝ ℝ2 -- Basis, shift, interval
| Poly [ℝ2]
isSeg (Seg _ _ _) = True
isSeg _ = False
toSegOrPoly :: Polyline -> SegOrPoly
toSegOrPoly [a, b] = Seg v (a⋅vp) (a⋅v, b⋅v)
where
v@(va, vb) = normalized (b ^-^ a)
vp = (-vb, va)
toSegOrPoly ps = Poly ps
fromSegOrPoly :: SegOrPoly -> Polyline
fromSegOrPoly (Seg v@(va,vb) s (a,b)) = [a*^v ^+^ t, b*^v ^+^ t]
where t = s*^(-vb, va)
fromSegOrPoly (Poly ps) = ps
joinSegs :: [Polyline] -> [Polyline]
joinSegs = map fromSegOrPoly . joinSegs' . map toSegOrPoly
joinSegs' :: [SegOrPoly] -> [SegOrPoly]
joinSegs' segsOrPolys = polys ++ concat (map joinAligned aligned) where
polys = filter (not.isSeg) segsOrPolys
segs = filter isSeg segsOrPolys
aligned = groupWith (\(Seg basis p _) -> (basis,p)) segs
joinAligned segs@((Seg b z _):_) = mergeAdjacent orderedSegs where
orderedSegs = sortBy (\(Seg _ _ (a1,_)) (Seg _ _ (b1,_)) -> compare a1 b1) segs
mergeAdjacent (pres@(Seg _ _ (x1a,x2a)) : next@(Seg _ _ (x1b,x2b)) : others) =
if x2a == x1b
then mergeAdjacent ((Seg b z (x1a,x2b)): others)
else pres : mergeAdjacent (next : others)
mergeAdjacent a = a
joinAligned [] = []
connectPolys :: [Polyline] -> [Polyline]
connectPolys [] = []
connectPolys (present:remaining) =
let
findNext (ps@(p:_):segs) =
if p == last present
then (Just ps, segs)
else (a, ps:b) where (a,b) = findNext segs
findNext [] = (Nothing, [])
in
case findNext remaining of
(Nothing, _) -> present:(connectPolys remaining)
(Just match, others) -> connectPolys $ (present ++ tail match): others
-}
|
krakrjak/ImplicitCAD
|
Graphics/Implicit/Export/Render/HandlePolylines.hs
|
agpl-3.0
| 4,382 | 3 | 17 | 970 | 1,045 | 590 | 455 | 37 | 4 |
module Main where
import System.Environment (getArgs)
main :: IO ()
main = do args <- getArgs
css <- mapM readFile args
mapM_ putStr css
|
tyoko-dev/coreutils-haskell
|
src/Cat.hs
|
agpl-3.0
| 158 | 0 | 8 | 45 | 56 | 28 | 28 | 6 | 1 |
-- | * The CoALP top-level module.
module CoALP
(
module CoALP.Term
, module CoALP.Clause
, module CoALP.Subst
, module CoALP.Derivation
, module CoALP.Tree
, module CoALP.Resolution
) where
import CoALP.Term
import CoALP.Clause
import CoALP.Subst
import CoALP.Derivation
import CoALP.Tree
import CoALP.Resolution
|
vkomenda/CoALP
|
lib/CoALP.hs
|
lgpl-3.0
| 334 | 0 | 5 | 61 | 73 | 47 | 26 | 14 | 0 |
-- Copyright 2013 Matthew Spellings
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
-- http://www.apache.org/licenses/LICENSE-2.0
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Data.Array.Accelerate.HasdyContrib where
import Data.Array.Accelerate as A
-- | Repeat each value in a sequence a given number of times.
-- repeat [1, 0, 4] [1.0, 1.2, 1.3] == [1.0, 1.3, 1.3, 1.3, 1.3]
repeat::Elt a=>Acc (A.Vector Int)->Acc (A.Vector a)->Acc (A.Vector a)
repeat ns xs = gather idx xs
where
(starts, outputSize) = scanl' (+) 0 ns
size = index1 . the $ outputSize
range = A.generate (A.shape ns) unindex1
idx' = scatterIf starts ns (>* 0) (fill size 0) range
idx = A.scanl1 max idx'
-- | "Unfold" a list of initial values into segments
-- unfoldSeg (\x y -> x + y + 2) 0 [3, 1, 0, 2] [100, 0, 200, 300] == [100, 102, 104, 0, 300, 302]
unfoldSeg::Elt a=>(Exp a->Exp a->Exp a)->Exp a->Acc (A.Vector Int)->Acc (A.Vector a)->Acc (A.Vector a)
unfoldSeg f x0 ns xs = scanl1Seg f scattered ns
where
(starts, outputSize) = scanl' (+) 0 ns
size = index1 . the $ outputSize
blank = fill size x0
scattered = scatterIf starts ns (>* 0) blank xs
|
klarh/hasdy
|
src/Data/Array/Accelerate/HasdyContrib.hs
|
apache-2.0
| 1,589 | 0 | 13 | 318 | 365 | 194 | 171 | 15 | 1 |
module Emulator.CPU.Instructions.ARM.Parser where
import Emulator.CPU hiding (SoftwareInterrupt)
import Emulator.CPU.Instructions.ARM
import Emulator.CPU.Instructions.Types
import Emulator.Types
import Utilities.Parser.TemplateHaskell
import Utilities.Show
import Data.Bits
import Data.Maybe
parseARM :: MWord -> Either String (Condition, ARMInstruction)
parseARM w
| w .&. 0x0FFFFFF0 == 0x012FFF10 = Right (getCondition w, readBranchExchange w) -- Definitely branch exchange instruction
| (w .&. 0x0C000000 == 0x00) && (testBit w 25 || (w .&. 0b10010000) /= 0b10010000) = -- Data Processing thing
Right (getCondition w,
DataProcessing (getOpcode w)
(SetCondition $ w `testBit` 20)
(RegisterName $ fromIntegral $ $(bitmask 15 12) w)
(RegisterName $ fromIntegral $ $(bitmask 19 16) w)
(parseOperand2 (Immediate $ w `testBit` 25) w))
| w .&. 0x0FB00FF0 == 0x01000090 = Right (getCondition w, readSingleDataSwap w) -- Single data swap
| $(bitmask 27 24) w == 0b1111 = Right (getCondition w, SoftwareInterrupt) -- Software interrupt
| otherwise =
case w .&. 0x0E000000 of -- Test the identity bits
0x00 -> if (w .&. 0x010000F0) == 0x90 then Right (getCondition w, readGeneralMultiply w) -- multiply
else Right (getCondition w, readHalfWordDataTransfer w)-- halfword data transfer
0x08000000 -> Right (getCondition w, readBlockDataTransfer w) -- Block data transfer
0x0A000000 -> Right (getCondition w, readBranch w) -- Branch instruction
0x0C000000 -> error "parseARM: undefined instruction: CoprocessorDataTransfer" -- Coprocessor data transfer
0x0E000000 -> error "parseARM: undefined instruction: CoprocessorDataOperation" -- Coprocessor data operation
x | x == 0x6000000 || x == 0x4000000 -> Right (getCondition w, readLoadStore w) -- Load/Store
_ -> error $ "Undefined opcode: 0x" ++ showHex w
getCondition :: MWord -> Condition
getCondition w =
case conditionFromByte $ fromIntegral $ $(bitmask 31 28) w of
Just x -> x
Nothing -> error $ "getCondition: invalid condition (" ++ show w ++ ")"
getOpcode :: MWord -> Opcode
getOpcode w =
case opcodeFromByte $ fromIntegral $ $(bitmask 24 21) w of
Just x -> x
Nothing -> error $ "getOpcode: invalid opcode (" ++ show w ++ ")"
readBranchExchange :: MWord -> ARMInstruction
readBranchExchange w =
BranchExchange $ RegisterName $ fromIntegral $ $(bitmask 3 0) w
readBranch :: MWord -> ARMInstruction
readBranch br = Branch linkBit offset
where
linkBit = Link $ testBit br 24
offset = fromIntegral (br `shiftL` 8) `shiftR` 6
-- Detect whether it is a Multiply or a Multiply long
readGeneralMultiply :: MWord -> ARMInstruction
readGeneralMultiply instr =
if isMulLong then readMultiplyLong instr else readMultiply instr
where
isMulLong = testBit instr 23
readMultiply :: MWord -> ARMInstruction
readMultiply instr =
Multiply accumulate (SetCondition setCondition) (RegisterName $ fromIntegral dest) (RegisterName $ fromIntegral operand1)
(RegisterName $ fromIntegral operand2) (RegisterName $ fromIntegral operand3)
where
accumulate = testBit instr 21
setCondition = testBit instr 20
dest = $(bitmask 19 16) instr
operand1 = $(bitmask 15 12) instr
operand2 = $(bitmask 11 8) instr
operand3 = $(bitmask 3 0) instr
readMultiplyLong :: MWord -> ARMInstruction
readMultiplyLong instr =
MultiplyLong signed
accumulate
(SetCondition setCondition)
(RegisterName $ fromIntegral destHi)
(RegisterName $ fromIntegral destLo)
(RegisterName $ fromIntegral operand1)
(RegisterName $ fromIntegral operand2)
where
signed = testBit instr 22
accumulate = testBit instr 21
setCondition = testBit instr 20
destHi = $(bitmask 19 16) instr
destLo = $(bitmask 15 12) instr
operand1 = $(bitmask 11 8) instr
operand2 = $(bitmask 3 0) instr
readSingleDataSwap :: MWord -> ARMInstruction
readSingleDataSwap instr =
SingleDataSwap granularity
(RegisterName $ fromIntegral base)
(RegisterName $ fromIntegral dest)
(RegisterName $ fromIntegral src)
where
granularity = if instr `testBit` 22 then Byte else Word
base = $(bitmask 19 16) instr
dest = $(bitmask 15 12) instr
src = $(bitmask 3 0) instr
-- Actually a halfword or signed data transfer but that wouldn't make a nice function name
readHalfWordDataTransfer :: MWord -> ARMInstruction
readHalfWordDataTransfer instr
| testBit instr 22 =
HalfwordDataTransferImmediate preIndex upDown writeBack load signed granularity base dest offsetImmediate
| otherwise =
HalfwordDataTransferRegister preIndex upDown writeBack load signed granularity base dest offset
where
preIndex = if instr `testBit` 24 then Pre else Post
upDown = if instr `testBit` 23 then Up else Down
writeBack = testBit instr 21
load = if instr `testBit` 20 then Load else Store
base = RegisterName $ fromIntegral $ (instr .&. 0xF0000) `shiftR` 16
dest = RegisterName $ fromIntegral $ (instr .&. 0xF000) `shiftR` 12
offset = RegisterName $ fromIntegral $ instr .&. 0xF
granularity = if instr `testBit` 5 then HalfWord else Byte
signed = instr `testBit` 6
offsetImmediate =
($(bitmask 11 8) instr `shiftL` 4) .|. $(bitmask 3 0) instr
readLoadStore :: MWord -> ARMInstruction
readLoadStore instr =
SingleDataTransfer prePost upDown granularity writeBack loadStore base dest offset
where
prePost = if instr `testBit` 24 then Pre else Post
upDown = if instr `testBit` 23 then Up else Down
granularity = if instr `testBit` 22 then Byte else Word
writeBack = testBit instr 21
loadStore = if instr `testBit` 20 then Load else Store
base = RegisterName $ fromIntegral $ $(bitmask 19 16) instr
dest = RegisterName $ fromIntegral $ $(bitmask 15 12) instr
offset = if instr `testBit` 25
then Left $ parseShiftedRegister instr
else Right $ $(bitmask 11 0) instr
readBlockDataTransfer :: MWord -> ARMInstruction
readBlockDataTransfer instr =
BlockDataTransfer prePost upDown forceUser writeBack loadStore base regList
where
prePost = if instr `testBit` 24 then Pre else Post
upDown = if instr `testBit` 23 then Up else Down
forceUser = testBit instr 22
writeBack = testBit instr 21
loadStore = if instr `testBit` 20 then Load else Store
base = RegisterName $ fromIntegral $ $(bitmask 19 16) instr
regList = parseRegisterList ($(bitmask 15 0) instr) 16
parseOperand2 :: Immediate -> MWord -> Either (Shifted RegisterName) (Rotated Byte)
parseOperand2 (Immediate False) w =
Left $ parseShiftedRegister w
parseOperand2 (Immediate True) w =
Right $ Rotated (fromIntegral $ $(bitmask 11 8) w) (fromIntegral $ $(bitmask 7 0) w)
parseShiftedRegister :: MWord -> Shifted RegisterName
parseShiftedRegister w =
case w `testBit` 4 of
True ->
RegisterShift (RegisterName $ fromIntegral $ $(bitmask 11 8) w) shiftType registerName
False ->
AmountShift (fromIntegral $ $(bitmask 11 7) w) shiftType registerName
where
registerName = RegisterName $ fromIntegral $ $(bitmask 3 0) w
shiftType = fromMaybe (error "parseShiftedRegister(shiftType): unknown shift type") $
shiftTypeFromByte $ fromIntegral $ $(bitmask 6 5) w
parseRegisterList :: MWord -> Int -> RegisterList
parseRegisterList w' m = parseRegisterList' w' 0 []
where
parseRegisterList' :: MWord -> Int -> RegisterList -> RegisterList
parseRegisterList' w n list
| n == m = list
| testBit w n = parseRegisterList' w (n+1) $ RegisterName n : list
| otherwise = parseRegisterList' w (n+1) list
|
intolerable/GroupProject
|
src/Emulator/CPU/Instructions/ARM/Parser.hs
|
bsd-2-clause
| 7,897 | 0 | 14 | 1,802 | 2,375 | 1,219 | 1,156 | -1 | -1 |
{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable, KindSignatures,
NoMonomorphismRestriction, TupleSections, OverloadedStrings #-}
{-# OPTIONS_GHC -Wall #-}
module Core where
import Bound
import Bound.Name
import Control.Monad
import Control.Monad.Trans.Class
import Data.Bifunctor
import Prelude hiding (pi)
import Prelude.Extras
import SrcLoc
import FastString
data Term n a
= Var !a
| Bind !(Binder n (Term n a)) (Scope (Name n ()) (Term n) a)
| App !(Term n a) !(Term n a)
| Let !Int (Prog n a) (Scope (Name n Int) (Term n) a)
| Type
| HardTerm HardTerm
| Pair !(Term n a) !(Term n a)
| Split !(Term n a) (Scope (Name n ()) (Scope (Name n ()) (Term n)) a)
| Enum [n]
| Label n
| Lift !(Term n a)
| Box !(Term n a)
| Force !(Term n a)
| Rec !(Term n a)
| Fold !(Term n a)
| Unfold !(Term n a)
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
data HardTerm
= String String
| Integer Integer
| StringT
| IntegerT
deriving (Eq,Ord,Show)
instance Eq n => Eq1 (Term n)
instance Ord n => Ord1 (Term n)
instance Show n => Show1 (Term n)
data Binder n a
= Lam (Name n ())
| Pi (Name n a)
| Sigma (Name n a)
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
type Prog n a =
( Name n (Type n a,Scope (Name n Int) (Term n) a)
, [Name n (Scope (Name n Int) (Type n) a, Scope (Name n Int) (Term n) a)]
)
type LVar = Located FastString
type Type n = Term n
instance Applicative (Term n) where
pure = Var
(<*>) = ap
instance Monad (Term n) where
return = Var
(>>=) = bindTerm
bindTerm :: Term n a -> (a -> Term n b) -> Term n b
bindTerm tm f = case tm of
Var a -> f a
Bind b s -> Bind (fmap (`bindTerm` f) b) (s >>>= f)
App e1 e2 -> App (bindTerm e1 f) (bindTerm e2 f)
Let n p e -> Let n (bindProg p f) (e >>>= f)
Type -> Type
HardTerm h -> HardTerm h
Pair e1 e2 -> Pair (bindTerm e1 f) (bindTerm e2 f)
Split b s -> Split (bindTerm b f) (s >>>= (lift . f))
Enum ls -> Enum ls
Label l -> Label l
Lift t -> Lift (bindTerm t f)
Box t -> Box (bindTerm t f)
Force t -> Force (bindTerm t f)
Rec t -> Rec (bindTerm t f)
Fold t -> Fold (bindTerm t f)
Unfold t -> Unfold (bindTerm t f)
bindProg :: Prog n a -> (a -> Term n b) -> Prog n b
bindProg (p0,ps) f =
( fmap (bimap (`bindTerm` f) (>>>= f)) p0
, map (fmap (bimap (>>>= f) (>>>= f))) ps
)
data Value m n
= Neutral (Neutral m n)
| VType
| VBind (Binder n (Value m n)) (Value m n -> m (Value m n))
| VTmp Int
data Neutral m n
= NVar n
| NApp (Neutral m n) (Value m n)
-- * Smart constructors
type CoreTerm = Term LVar LVar
type CoreType = CoreTerm
q :: (Name LVar CoreTerm -> Binder LVar CoreTerm) -> [(LVar,CoreTerm)]
-> CoreTerm -> CoreTerm
q f = flip (foldr (\(v,b) e -> Bind (f (Name v b)) (abstract1Name v e)))
lam :: [LVar] -> CoreTerm -> CoreTerm
lam = flip (foldr (\v e -> Bind (Lam (Name v ())) (abstract1Name v e)))
split :: CoreTerm -> (LVar,LVar) -> CoreTerm -> CoreTerm
split t1 (x,y) t2 = Split t1 (abstract1Name x (abstract1Name y t2))
pis' :: [(LVar,CoreType)] -> CoreType -> CoreType
pis' = q Pi
pis :: [LVar] -> CoreType -> CoreType -> CoreType
pis ns t = pis' (map (,t) ns)
pi :: LVar -> CoreType -> CoreType -> CoreType
pi n t = pis' [(n,t)]
sigmas' :: [(LVar,CoreType)] -> CoreType -> CoreType
sigmas' = q Sigma
sigmas :: [LVar] -> CoreType -> CoreType -> CoreType
sigmas ns t = sigmas' (map (,t) ns)
sigma :: LVar -> CoreType -> CoreType -> CoreType
sigma n t = sigmas' [(n,t)]
(->-) :: CoreType -> CoreType -> CoreType
(->-) = pi (noLoc (fsLit ""))
(-*-) :: CoreType -> CoreType -> CoreType
(-*-) = sigma (noLoc (fsLit ""))
|
christiaanb/DepCore
|
src/Core.hs
|
bsd-2-clause
| 3,738 | 0 | 15 | 939 | 1,909 | 998 | 911 | 139 | 16 |
{-# LANGUAGE RecursiveDo #-}
module Main where
import LOGL.Application
import Foreign.Ptr
import Graphics.UI.GLFW as GLFW
import Graphics.Rendering.OpenGL.GL as GL hiding (normalize)
import Graphics.GLUtil
import System.FilePath
import Graphics.Rendering.OpenGL.GL.Shaders.ProgramObjects
import Linear.Matrix
import Linear.V3
import Linear.Vector
import Linear.Quaternion
import Linear.Projection
import Linear.Metric
import Reactive.Banana.Frameworks
import Reactive.Banana.Combinators hiding (empty)
import LOGL.FRP
import LOGL.Objects
cubePositions :: [V3 GLfloat]
cubePositions = [
V3 0.0 0.0 0.0,
V3 2.0 5.0 (-15.0),
V3 (-1.5) (-2.2) (-2.5),
V3 (-3.8) (-2.0) (-12.3),
V3 2.4 (-0.4) (-3.5),
V3 (-1.7) 3.0 (-7.5),
V3 1.3 (-2.0) (-2.5),
V3 1.5 2.0 (-2.5),
V3 1.5 0.2 (-1.5),
V3 (-1.3) 1.0 (-1.5)]
data Camera = Camera { pos :: V3 GLfloat,
front :: V3 GLfloat,
up :: V3 GLfloat,
lastFrame :: Double,
lastX :: GLfloat,
lastY :: GLfloat,
yaw :: GLfloat,
pitch :: GLfloat,
firstMouse :: Bool}
deriving (Eq, Show)
main :: IO ()
main = do
GLFW.init
w <- createAppWindow 800 600 "LearnOpenGL"
setCursorInputMode (window w) CursorInputMode'Disabled
depthFunc $= Just Less
shader <- simpleShaderProgram ("data" </> "1_Getting-started" </> "6_Coordinate-systems" </> "coord-systems.vs")
("data" </> "1_Getting-started" </> "6_Coordinate-systems" </> "coord-systems.frag")
(vao, vbo) <- createVAO
-- load and create texture
t0 <- createTexture ("data" </> "1_Getting-started" </> "4_Textures" </> "Textures" </> "container.jpg")
t1 <- createTexture ("data" </> "1_Getting-started" </> "4_Textures" </> "Textures-combined" </> "awesomeface3.png")
-- init camera
let initCam = Camera { pos = V3 0.0 0.0 3.0, front = V3 0.0 0.0 (-1.0) , up = V3 0.0 1.0 0.0,
lastFrame = 0.0, lastX = 400.0, lastY = 300.0, yaw = -90.0,
pitch = 0.0, firstMouse = True}
--polygonMode $= (Line, Line)
let networkDescription :: MomentIO ()
networkDescription = mdo
posE <- cursorPosEvent w
idleE <- idleEvent w
timeB <- currentTimeB
keyB <- keyBehavior w
camB <- accumB initCam $ unions [
handlePosEvent <$> posE,
(doMovement <$> keyB ) <@> (timeB <@ idleE)]
reactimate $ drawScene shader t0 t1 vao w <$> (camB <@ idleE)
runAppLoopEx w networkDescription
deleteObjectName vao
deleteObjectName vbo
terminate
handlePosEvent :: CursorPosEvent -> Camera -> Camera
handlePosEvent (w, xpos, ypos) cam = cam {lastX = realToFrac xpos, lastY = realToFrac ypos,
yaw = newYaw, pitch = newPitch,
front = normalize (V3 newFrontX newFrontY newFrontZ),
firstMouse = False}
where
lx = if firstMouse cam then realToFrac xpos else lastX cam
ly = if firstMouse cam then realToFrac ypos else lastY cam
sensivity = 0.5
xoffset = ( realToFrac xpos - lx) * sensivity
yoffset = (ly - realToFrac ypos) * sensivity
newYaw = yaw cam + xoffset
newPitch = restrictPitch $ pitch cam + yoffset
newFrontX = cos (radians newYaw) * cos (radians newPitch)
newFrontY = sin (radians newPitch)
newFrontZ = sin (radians newYaw) * cos (radians newPitch)
radians :: GLfloat -> GLfloat
radians deg = pi / 180.0 * deg
restrictPitch :: GLfloat -> GLfloat
restrictPitch p
| p > 89.0 = 89.0
| p < (-89.0) = -89.0
| otherwise = p
doMovement :: Keys -> Double -> Camera -> Camera
doMovement keys time cam = afterMoveRight {lastFrame = time}
where
speed = 5.0 * realToFrac (time - lastFrame cam)
upPressed = keyPressed Key'W keys
downPressed = keyPressed Key'S keys
leftPressed = keyPressed Key'A keys
rightPressed = keyPressed Key'D keys
afterZoomIn = if upPressed then moveForeward speed cam else cam
afterZoomOut = if downPressed then moveBackward speed afterZoomIn else afterZoomIn
afterMoveLeft = if leftPressed then moveLeft speed afterZoomOut else afterZoomOut
afterMoveRight = if rightPressed then moveRight speed afterMoveLeft else afterMoveLeft
moveForeward :: GLfloat -> Camera -> Camera
moveForeward speed cam = cam { pos = pos cam ^+^ (speed *^ front cam) }
moveBackward :: GLfloat -> Camera -> Camera
moveBackward speed cam = cam { pos = pos cam ^-^ (speed *^ front cam) }
moveLeft :: GLfloat -> Camera -> Camera
moveLeft speed cam = cam { pos = pos cam ^-^ (speed *^ normalize (cross (front cam ) (up cam)))}
moveRight :: GLfloat -> Camera -> Camera
moveRight speed cam = cam { pos = pos cam ^+^ (speed *^ normalize (cross (front cam ) (up cam)))}
drawScene :: ShaderProgram -> TextureObject -> TextureObject -> VertexArrayObject -> AppWindow -> Camera -> IO ()
drawScene shader t0 t1 vao w cam = do
pollEvents
clearColor $= Color4 0.2 0.3 0.3 1.0
clear [ColorBuffer, DepthBuffer]
-- Draw our first triangle
currentProgram $= Just (program shader)
activeTexture $= TextureUnit 0
textureBinding Texture2D $= Just t0
setUniform shader "ourTexture1" (TextureUnit 0)
activeTexture $= TextureUnit 1
textureBinding Texture2D $= Just t1
setUniform shader "ourTexture2" (TextureUnit 1)
let view = lookAt (pos cam) (pos cam + front cam) (up cam)
projection = perspective (pi / 4.0) (800.0 / 600.0) 0.1 (100.0 :: GLfloat)
setUniform shader "view" view
setUniform shader "projection" projection
withVAO vao $ mapM_ (drawCube shader) [0..9]
swap w
drawCube :: ShaderProgram -> Int -> IO ()
drawCube shader i = do
let angle = pi / 180.0 * 20.0 * fromIntegral i
rot = axisAngle (V3 (1.0 :: GLfloat) 0.3 0.5) (realToFrac angle)
model = mkTransformation rot (cubePositions !! i)
setUniform shader "model" model
drawArrays Triangles 0 36
createVAO :: IO (VertexArrayObject, BufferObject)
createVAO = do
vao <- genObjectName
bindVertexArrayObject $= Just vao
vbo <- makeBuffer ArrayBuffer cubeWithTexture
vertexAttribPointer (AttribLocation 0) $= (ToFloat, VertexArrayDescriptor 3 Float (5*4) offset0)
vertexAttribArray (AttribLocation 0) $= Enabled
vertexAttribPointer (AttribLocation 2) $= (ToFloat, VertexArrayDescriptor 2 Float (5*4) (offsetPtr (3*4)))
vertexAttribArray (AttribLocation 2) $= Enabled
bindVertexArrayObject $= Nothing
return (vao, vbo)
|
atwupack/LearnOpenGL
|
app/1_Getting-started/7_Camera/Camera-mouse.hs
|
bsd-3-clause
| 6,830 | 0 | 18 | 1,842 | 2,193 | 1,130 | 1,063 | 146 | 5 |
module Validations.Adapters
( module Validations.Adapters.Digestive
) where
import Validations.Adapters.Digestive
|
mavenraven/validations
|
src/Validations/Adapters.hs
|
bsd-3-clause
| 119 | 0 | 5 | 14 | 21 | 14 | 7 | 3 | 0 |
{-# LANGUAGE LambdaCase #-}
module Data.Aeson.Validation.Internal.Field
( Field
, flatten
) where
import Data.Aeson.Validation.Internal.Pair
import Data.Aeson.Validation.Internal.Prelude
import Data.Aeson.Validation.Internal.Types
import qualified Data.HashMap.Strict as HashMap
import qualified Data.List.NonEmpty as NonEmpty
flatten :: Strict -> [Field] -> [ShallowField]
flatten s xs =
mapFields (foldr step mempty xs)
where
step
:: Field
-> HashMap (Pair Demand Text) (Pair FieldMap [Schema])
-> HashMap (Pair Demand Text) (Pair FieldMap [Schema])
step (Field req path sch) =
go (NonEmpty.toList path)
where
go :: [Text]
-> HashMap (Pair Demand Text) (Pair FieldMap [Schema])
-> HashMap (Pair Demand Text) (Pair FieldMap [Schema])
go = \case
[key] ->
HashMap.alter
(\case
Nothing -> Just (Pair mempty [sch])
Just (Pair m schs) -> Just (Pair m (sch : schs)))
(Pair req key)
key:path' ->
HashMap.alter
(\case
Nothing -> val mempty []
Just (Pair m schs) -> val m schs)
(Pair req key)
where
val :: FieldMap -> [Schema] -> Maybe (Pair FieldMap [Schema])
val m ss =
Just (Pair (FieldMap (go path' (unFieldMap m))) ss)
mapFields
:: HashMap (Pair Demand Text) (Pair FieldMap [Schema])
-> [ShallowField]
mapFields =
HashMap.toList >=> go
where
go
:: (Pair Demand Text, Pair FieldMap [Schema])
-> [ShallowField]
go (Pair req key, Pair m ss) =
case mapFields (unFieldMap m) of
[] -> fields
fs -> objField fs : fields
where
fields :: [ShallowField]
fields =
map (ShallowField req key) ss
objField :: [ShallowField] -> ShallowField
objField fs =
ShallowField
{ fieldDemand = req
, fieldKey = key
, fieldSchema = SObject s fs
}
-- A FieldMap is a temporary data structure used during the conversion from
-- [Field] to [ShallowField].
newtype FieldMap
= FieldMap
{ unFieldMap :: HashMap (Pair Demand Text) (Pair FieldMap [Schema]) }
instance Semigroup FieldMap where
(FieldMap x) <> (FieldMap y) = FieldMap (x <> y)
instance Monoid FieldMap where
mempty = FieldMap mempty
mappend = (<>)
|
mitchellwrosen/json-validation
|
src/internal/Data/Aeson/Validation/Internal/Field.hs
|
bsd-3-clause
| 2,556 | 0 | 22 | 884 | 803 | 429 | 374 | 66 | 5 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UnicodeSyntax #-}
{-|
[@ISO639-1@] pt
[@ISO639-2@] por
[@ISO639-3@] por
[@Native name@] Português
[@English name@] Portuguese
-}
module Text.Numeral.Language.POR.TestData (cardinals, ordinals) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "base" Control.Arrow ( second )
import "base" Data.List ( map )
import "base" Prelude ( Integral )
import "base-unicode-symbols" Data.Function.Unicode ( (∘) )
import "base-unicode-symbols" Data.Monoid.Unicode ( (⊕) )
import "base-unicode-symbols" Prelude.Unicode ( (⋅) )
import "numerals" Text.Numeral.Grammar
import "numerals" Text.Numeral.Grammar.Reified ( defaultInflection )
import "numerals" Text.Numeral.Misc ( dec )
import "this" Text.Numeral.Test ( TestData )
import "text" Data.Text ( Text )
--------------------------------------------------------------------------------
-- Test data
--------------------------------------------------------------------------------
-- Sources:
-- http://www.sonia-portuguese.com/text/numerals.htm
-- http://www.smartphrase.com/Portuguese/po_numbers_voc.shtml
base_cardinals ∷ (Integral i) ⇒ [(i, Text)]
base_cardinals =
[ (0, "zero")
, (3, "três")
, (4, "quatro")
, (5, "cinco")
, (6, "seis")
, (7, "sete")
, (8, "oito")
, (9, "nove")
, (10, "dez")
, (11, "onze")
, (12, "doze")
, (13, "treze")
, (14, "catorze")
, (15, "quinze")
, (16, "dezasseis")
, (17, "dezassete")
, (18, "dezoito")
, (19, "dezanove")
, (20, "vinte")
, (23, "vinte e três")
, (24, "vinte e quatro")
, (25, "vinte e cinco")
, (26, "vinte e seis")
, (27, "vinte e sete")
, (28, "vinte e oito")
, (29, "vinte e nove")
, (30, "trinta")
, (33, "trinta e três")
, (34, "trinta e quatro")
, (35, "trinta e cinco")
, (36, "trinta e seis")
, (37, "trinta e sete")
, (38, "trinta e oito")
, (39, "trinta e nove")
, (40, "quarenta")
, (43, "quarenta e três")
, (44, "quarenta e quatro")
, (45, "quarenta e cinco")
, (46, "quarenta e seis")
, (47, "quarenta e sete")
, (48, "quarenta e oito")
, (49, "quarenta e nove")
, (50, "cinquenta")
, (53, "cinquenta e três")
, (54, "cinquenta e quatro")
, (55, "cinquenta e cinco")
, (56, "cinquenta e seis")
, (57, "cinquenta e sete")
, (58, "cinquenta e oito")
, (59, "cinquenta e nove")
, (60, "sessenta")
, (63, "sessenta e três")
, (64, "sessenta e quatro")
, (65, "sessenta e cinco")
, (66, "sessenta e seis")
, (67, "sessenta e sete")
, (68, "sessenta e oito")
, (69, "sessenta e nove")
, (70, "setenta")
, (73, "setenta e três")
, (74, "setenta e quatro")
, (75, "setenta e cinco")
, (76, "setenta e seis")
, (77, "setenta e sete")
, (78, "setenta e oito")
, (79, "setenta e nove")
, (80, "oitenta")
, (83, "oitenta e três")
, (84, "oitenta e quatro")
, (85, "oitenta e cinco")
, (86, "oitenta e seis")
, (87, "oitenta e sete")
, (88, "oitenta e oito")
, (89, "oitenta e nove")
, (90, "noventa")
, (93, "noventa e três")
, (94, "noventa e quatro")
, (95, "noventa e cinco")
, (96, "noventa e seis")
, (97, "noventa e sete")
, (98, "noventa e oito")
, (99, "noventa e nove")
, (100, "cem")
, (105, "cento e cinco")
, (125, "cento e vinte e cinco")
, (138, "cento e trinta e oito")
, (199, "cento e noventa e nove")
, (1000, "mil")
, (1008, "mil e oito")
, (1985, "mil novecentos e oitenta e cinco")
, (3000, "três mil")
, (10000, "dez mil")
, (100000, "cem mil")
, (125000, "cento e vinte e cinco mil")
, (735346, "setecentos e trinta e cinco mil trezentos e quarenta e seis")
, (dec 6, "um milhão")
, (2 ⋅ dec 6, "dois milhões")
, (dec 7, "dez milhões")
, (dec 9, "um bilhão")
, (dec 10, "dez bilhões")
, (dec 12, "um trilhão")
, (dec 13, "dez trilhões")
, (dec 15, "um quatrilhão")
, (dec 18, "um quintilhão")
, (dec 21, "um sextilhão")
, (dec 24, "um septilhão")
, (dec 27, "um octilhão")
, (dec 30, "um nonilhão")
, (dec 33, "um decilhão")
, (dec 36, "um undecilhão")
, (dec 39, "um duodecilhão")
, (dec 42, "um tredecilhão")
, (dec 100, "dez duotrigintilhões")
]
cardinals ∷ (Integral i) ⇒ TestData i
cardinals =
[ ( "masculine"
, masculine defaultInflection
, base_cardinals
⊕ [ (1, "um")
, (2, "dois")
, (21, "vinte e um")
, (22, "vinte e dois")
, (31, "trinta e um")
, (32, "trinta e dois")
, (41, "quarenta e um")
, (42, "quarenta e dois")
, (51, "cinquenta e um")
, (52, "cinquenta e dois")
, (61, "sessenta e um")
, (62, "sessenta e dois")
, (71, "setenta e um")
, (72, "setenta e dois")
, (81, "oitenta e um")
, (82, "oitenta e dois")
, (91, "noventa e um")
, (92, "noventa e dois")
, (101, "cento e um")
, (200, "duzentos")
, (234, "duzentos e trinta e quatro")
, (250, "duzentos e cinquenta")
, (300, "trezentos")
, (330, "trezentos e trinta")
, (375, "trezentos e setenta e cinco")
, (400, "quatrocentos")
, (467, "quatrocentos e sessenta e sete")
, (500, "quinhentos")
, (600, "seiscentos")
, (700, "setecentos")
, (800, "oitocentos")
, (900, "novecentos")
, (2000, "dois mil")
, (200000, "duzentos mil")
, (500000, "quinhentos mil")
, (100001, "cem mil e um")
, (101000, "cento e um mil")
, (1537469, "um milhão quinhentos e trinta e sete mil quatrocentos e sessenta e nove")
]
)
, ( "feminine"
, feminine defaultInflection
, base_cardinals
⊕ [ (1, "uma")
, (2, "duas")
, (21, "vinte e uma")
, (22, "vinte e duas")
, (31, "trinta e uma")
, (32, "trinta e duas")
, (41, "quarenta e uma")
, (42, "quarenta e duas")
, (51, "cinquenta e uma")
, (52, "cinquenta e duas")
, (61, "sessenta e uma")
, (62, "sessenta e duas")
, (71, "setenta e uma")
, (72, "setenta e duas")
, (81, "oitenta e uma")
, (82, "oitenta e duas")
, (91, "noventa e uma")
, (92, "noventa e duas")
, (101, "cento e uma")
, (200, "duzentas")
, (234, "duzentas e trinta e quatro")
, (250, "duzentas e cinquenta")
, (300, "trezentas")
, (330, "trezentas e trinta")
, (375, "trezentas e setenta e cinco")
, (400, "quatrocentas")
, (467, "quatrocentas e sessenta e sete")
, (500, "quinhentas")
, (600, "seiscentas")
, (700, "setecentas")
, (800, "oitocentas")
, (900, "novecentas")
, (2000, "duas mil")
, (200000, "duzentas mil")
, (500000, "quinhentas mil")
, (100001, "cem mil e uma")
, (101000, "cento e uma mil")
, (1537469, "um milhão quinhentas e trinta e sete mil quatrocentas e sessenta e nove")
]
)
]
-- These are the base forms of the ordinals, stripped of their
-- ending. Append "o", "os", "a" or "as" to form combinations of
-- masculine, feminine, singular and plural ordinals.
base_ordinals ∷ (Integral i) ⇒ [(i, Text)]
base_ordinals =
[ (1, "primeir")
, (2, "segund")
, (3, "terceir")
, (4, "quart")
, (5, "quint")
, (6, "sext")
, (7, "sétim")
, (8, "oitav")
, (9, "non")
, (10, "décim")
, (11, "décimo primeir")
, (12, "décimo segund")
, (13, "décimo terceir")
, (20, "vigésim")
, (21, "vigésimo primeir")
, (30, "trigésim")
, (40, "quadragésim")
, (50, "qüinquagésim")
, (60, "sexagésim")
, (70, "septuagésim")
, (80, "octogésim")
, (90, "nonagésim")
, (100, "centésim")
, (200, "ducentésim")
, (300, "trecentésim")
, (400, "quadringentésim")
, (500, "qüingentésim")
, (600, "sexcentésim")
, (700, "setingentésim")
, (800, "octingentésim")
, (900, "nongentésim")
, (1000, "milésim")
]
ordinals ∷ (Integral i) ⇒ TestData i
ordinals = map (\(n, f, e) → ( n
, f defaultInflection
, map (second (⊕ e)) base_ordinals)
)
[ ("masculine singular", masculine ∘ singular, "o")
, ("masculine plural", masculine ∘ plural, "os")
, ("feminine singular", feminine ∘ singular, "a")
, ("feminine plural", feminine ∘ plural, "as")
]
|
telser/numerals
|
src-test/Text/Numeral/Language/POR/TestData.hs
|
bsd-3-clause
| 8,890 | 0 | 12 | 2,452 | 2,460 | 1,610 | 850 | 258 | 1 |
module Math.Probably.Student where
import Math.Probably.FoldingStats
import qualified Data.Vector.Storable as V
--http://www.haskell.org/haskellwiki/Gamma_and_Beta_function
--cof :: [Double]
cof = [76.18009172947146,-86.50532032941677,24.01409824083091,
-1.231739572450155,0.001208650973866179,-0.000005395239384953]
--ser :: Double
ser = 1.000000000190015
--gammaln :: Double -> Double
gammaln xx = let tmp' = (xx+5.5) - (xx+0.5)*log(xx+5.5)
ser' = foldl (+) ser $ map (\(y,c) -> c/(xx+y)) $ zip [1..] cof
in -tmp' + log(2.5066282746310005 * ser' / xx)
beta z w = exp (gammaln z + gammaln w - gammaln (z+w))
fac n = product [1..n]
ixbeta x a b = let top = fac $ a+b-1
down j = fac j * fac (a+b-1-j)
in sum $ map (\j->(top/down j)*(x**j)*(1-x)**(a+b-1-j)) [a..a+b-1]
studentIntegral t v = 1-ixbeta (v/(v+t*t)) (v/2) (1/2)
oneSampleT v0 = fmap (\(mean,sd,n)-> (mean - v0)/(sd/(sqrt n))) meanSDNF
pairedSampleT = before (fmap (\(mean,sd,n)-> (mean)/(sd/(sqrt n))) meanSDNF)
(uncurry (-))
tTerms = V.fromList $ map tTermUnmemo [1..100]
tTermUnmemo nu = gammaln ((realToFrac nu+1)/2) - log(realToFrac nu*pi)/2 - gammaln (realToFrac nu/2)
tTerm1 :: Int -> Double
tTerm1 df | df <= 100 = (V.!) tTerms df
| otherwise = tTermUnmemo df
tDist df t = tTerm1 df - (realToFrac df +1/2) * log (1+(t*t)/(realToFrac df))
tDist3 mean prec df x
= tTerm1 df
+ log(prec)/2
- (realToFrac df +1/2) * log (1+(prec*xMinusMu*xMinusMu)/(realToFrac df))
where xMinusMu = x-mean
|
glutamate/probably-base
|
Math/Probably/Student.hs
|
bsd-3-clause
| 1,639 | 0 | 16 | 396 | 854 | 450 | 404 | 29 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Net where
import qualified Data.ByteString.Lazy as ByteString
import Network.HTTP.Client( HttpException( .. ) )
import Network.Wreq
import Control.Lens
import Control.Exception
downLoad :: FilePath -> String -> IO ()
downLoad n u = do
putStrLn $ "downloading... " ++ n
r <- try $ get u
case r of
-- Left (e::HttpException) -> do putStrLn "-------"
-- putStrLn e
-- putStrLn "-------"
Left (InvalidUrlException s s1) -> do
putStrLn "-------"
putStrLn s
putStrLn s1
putStrLn "-------"
putStrLn ""
Left (StatusCodeException s _ _) -> putStrLn $ (show s) ++ " " ++ u
Left _ -> putStrLn ""
Right res ->
case (res ^. responseStatus ^. statusCode) of
200 ->
ByteString.writeFile n bin where bin = res ^. responseBody
_ -> putStrLn "Bad code "
|
mike-k-houghton/Builder
|
src/Net.hs
|
bsd-3-clause
| 1,330 | 0 | 15 | 625 | 262 | 132 | 130 | 27 | 5 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilyDependencies #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module Data.Nested
( Vec(..)
, Nesting(..)
, Nesting1(..), nesting1Every
, Nested
, genNested, genNestedA
, indexNested, indexNested'
, transpose
, transpose'
, gmul'
, diagNV
, joinNested
, mapNVecSlices
, nIxRows
, vGen, vIFoldMap, itraverseNested
, liftNested
, unScalar, unNest, unVector
, sumRowsNested
) where
import Control.Applicative
import Control.DeepSeq
import Data.Distributive
import Data.Foldable
import Data.Kind
import Data.List.Util
import Data.Monoid
import Data.Singletons
import Data.Singletons.Prelude.List hiding (Length, Reverse, (%:++), sReverse)
import Data.Type.Combinator
import Data.Type.Combinator.Util
import Data.Type.Index
import Data.Type.Length as TCL
import Data.Type.Product as TCP hiding (toList)
import Data.Type.Sing
import Data.Type.SnocProd
import Data.Type.Uniform
import TensorOps.NatKind
import Type.Class.Witness
import Type.Family.List
import Type.Family.List.Util
import qualified Data.Singletons.TypeLits as GT
import qualified Data.Type.Nat as TCN
import qualified Data.Type.Vector as TCV
import qualified Data.Type.Vector.Util as TCV
import qualified Data.Vector.Sized as VS
data Uncons :: (k -> Type -> Type) -> k -> Type -> Type where
UNil :: Uncons v (FromNat 0) a
UCons :: !(Sing n) -> !a -> !(v n a) -> Uncons v (Succ n) a
class NatKind k => Vec (v :: k -> Type -> Type) where
vHead :: p j -> v (Succ j) a -> a
vTail :: v (Succ j) a -> v j a
vGenA :: Applicative f => Sing j -> (IndexN k j -> f a) -> f (v j a)
vIndex :: IndexN k j -> v j a -> a
vUncons :: Sing j -> v j a -> Uncons v j a
vEmpty :: v (FromNat 0) a
vCons :: a -> v j a -> v (Succ j) a
vITraverse
:: Applicative f
=> (IndexN k j -> a -> f b)
-> v j a
-> f (v j b)
vGen
:: Vec (v :: k -> Type -> Type)
=> Sing j
-> (IndexN k j -> a)
-> v j a
vGen s f = getI $ vGenA s (I . f)
{-# INLINE vGen #-}
vIFoldMap
:: (Monoid m, Vec v)
=> (IndexN k j -> a -> m)
-> v j a
-> m
vIFoldMap f = getConst . vITraverse (\i -> Const . f i)
instance Vec (Flip2 VS.VectorT I) where
vHead _ = getI . VS.head . getFlip2
{-# INLINE vHead #-}
vTail = Flip2 . VS.tail . getFlip2
{-# INLINE vTail #-}
vGenA = \case
GT.SNat -> fmap Flip2 . VS.generateA . (fmap I .)
{-# INLINE vGenA #-}
vIndex i = (VS.!! i) . getFlip2
{-# INLINE vIndex #-}
vUncons = \case
GT.SNat -> \case
Flip2 xs -> case VS.uncons xs of
VS.VNil -> UNil
VS.VCons (I y) ys -> UCons sing y (Flip2 ys)
{-# INLINE vUncons #-}
vEmpty = Flip2 VS.empty
{-# INLINE vEmpty #-}
vCons x (Flip2 xs) = Flip2 (VS.cons (I x) xs)
{-# INLINE vCons #-}
vITraverse f (Flip2 xs) = Flip2 <$> VS.itraverse (\i (I x) -> I <$> f i x) xs
{-# INLINE vITraverse #-}
instance Vec (Flip2 TCV.VecT I) where
vHead _ = getI . TCV.head' . getFlip2
{-# INLINE vHead #-}
vTail = Flip2 . TCV.tail' . getFlip2
{-# INLINE vTail #-}
vGenA = \case
SN n -> \f -> Flip2 <$> TCV.vgenA n (fmap I . f)
{-# INLINE vGenA #-}
vIndex i = TCV.index' i . getFlip2
{-# INLINE vIndex #-}
vUncons = \case
SN TCN.Z_ -> \case
Flip2 TCV.ØV -> UNil
SN (TCN.S_ n) -> \case
Flip2 (I x TCV.:* xs) -> UCons (SN n) x (Flip2 xs)
{-# INLINE vUncons #-}
vEmpty = Flip2 TCV.ØV
{-# INLINE vEmpty #-}
vCons x (Flip2 xs) = Flip2 (I x TCV.:* xs)
{-# INLINE vCons #-}
vITraverse f (Flip2 xs) = Flip2 <$> TCV.itraverse (\i (I x) -> I <$> f i x) xs
{-# INLINE vITraverse #-}
class Nesting (w :: k -> Type) (c :: j -> Constraint) (v :: k -> j -> j) where
nesting :: w i -> c a :- c (v i a)
class Nesting1 (w :: k -> Type) (c :: j -> Constraint) (v :: k -> j) where
nesting1 :: w a -> Wit (c (v a))
instance Nesting w NFData (Flip2 VS.VectorT I) where
nesting _ = Sub Wit
{-# INLINE nesting #-}
instance Nesting w Show (Flip2 VS.VectorT I) where
nesting _ = Sub Wit
{-# INLINE nesting #-}
instance Functor f => Nesting1 w Functor (Flip2 VS.VectorT f) where
nesting1 _ = Wit
{-# INLINE nesting1 #-}
instance Applicative f => Nesting1 Sing Applicative (Flip2 VS.VectorT f) where
nesting1 GT.SNat = Wit
{-# INLINE nesting1 #-}
instance Foldable f => Nesting1 w Foldable (Flip2 VS.VectorT f) where
nesting1 _ = Wit
{-# INLINE nesting1 #-}
instance Traversable f => Nesting1 w Traversable (Flip2 VS.VectorT f) where
nesting1 _ = Wit
{-# INLINE nesting1 #-}
instance Distributive f => Nesting1 Sing Distributive (Flip2 VS.VectorT f) where
nesting1 GT.SNat = Wit
{-# INLINE nesting1 #-}
instance Nesting w NFData (Flip2 TCV.VecT I) where
nesting _ = Sub Wit
{-# INLINE nesting #-}
instance Nesting w Show (Flip2 TCV.VecT I) where
nesting _ = Sub Wit
{-# INLINE nesting #-}
instance Functor f => Nesting1 w Functor (Flip2 TCV.VecT f) where
nesting1 _ = Wit
{-# INLINE nesting1 #-}
instance Applicative f => Nesting1 Sing Applicative (Flip2 TCV.VecT f) where
nesting1 (SN n) = Wit \\ n
{-# INLINE nesting1 #-}
instance Foldable f => Nesting1 w Foldable (Flip2 TCV.VecT f) where
nesting1 _ = Wit
{-# INLINE nesting1 #-}
instance Traversable f => Nesting1 w Traversable (Flip2 TCV.VecT f) where
nesting1 _ = Wit
{-# INLINE nesting1 #-}
instance Distributive f => Nesting1 Sing Distributive (Flip2 TCV.VecT f) where
nesting1 (SN n) = Wit \\ n
{-# INLINE nesting1 #-}
nesting1Every
:: forall p w c v as. Nesting1 w c v
=> p v
-> Prod w as
-> Wit (Every c (v <$> as))
nesting1Every p = \case
Ø -> Wit
(w :: w a) :< (ws :: Prod w as')
-> Wit \\ (nesting1 w :: Wit (c (v a)))
\\ (nesting1Every p ws :: Wit (Every c (v <$> as')))
{-# INLINE nesting1Every #-}
data Nested :: (k -> Type -> Type) -> [k] -> Type -> Type where
NØ :: !a -> Nested v '[] a
NS :: !(v j (Nested v js a)) -> Nested v (j ': js) a
instance (NFData a, Nesting Proxy NFData v) => NFData (Nested v js a) where
rnf = \case
NØ x -> deepseq x ()
NS (xs :: v j (Nested v ks a))
-> deepseq xs ()
\\ (nesting Proxy :: NFData (Nested v ks a) :- NFData (v j (Nested v ks a)))
{-# INLINE rnf #-}
instance (Num a, Applicative (Nested v js)) => Num (Nested v js a) where
(+) = liftA2 (+)
{-# INLINE (+) #-}
(*) = liftA2 (*)
{-# INLINE (*) #-}
(-) = liftA2 (-)
{-# INLINE (-) #-}
negate = fmap negate
{-# INLINE negate #-}
abs = fmap abs
{-# INLINE abs #-}
signum = fmap signum
{-# INLINE signum #-}
fromInteger = pure . fromInteger
{-# INLINE fromInteger #-}
instance Nesting1 Proxy Functor v => Functor (Nested v js) where
fmap f = \case
NØ x -> NØ (f x)
NS (xs :: v j (Nested v ks a))
-> NS $ (fmap.fmap) f xs
\\ (nesting1 Proxy :: Wit (Functor (v j)))
instance (SingI js, Nesting1 Sing Applicative v, Nesting1 Proxy Functor v) => Applicative (Nested v js) where
pure :: forall a. a -> Nested v js a
pure x = go sing
where
go :: Sing ks
-> Nested v ks a
go = \case
SNil -> NØ x
(s :: Sing k) `SCons` ss -> NS (pure (go ss))
\\ (nesting1 s :: Wit (Applicative (v k)))
{-# INLINE pure #-}
(<*>) :: forall a b. Nested v js (a -> b) -> Nested v js a -> Nested v js b
(<*>) = go sing
where
go :: Sing ks
-> Nested v ks (a -> b)
-> Nested v ks a
-> Nested v ks b
go = \case
SNil -> \case
NØ f -> \case
NØ x -> NØ (f x)
(s :: Sing k) `SCons` ss -> \case
NS fs -> \case
NS xs -> NS $ liftA2 (go ss) fs xs
\\ (nesting1 s :: Wit (Applicative (v k)))
{-# INLINE (<*>) #-}
instance Nesting1 Proxy Foldable v => Foldable (Nested v js) where
foldMap f = \case
NØ x -> f x
NS (xs :: v j (Nested v ks a))
-> (foldMap . foldMap) f xs
\\ (nesting1 Proxy :: Wit (Foldable (v j)))
instance (Nesting1 Proxy Functor v, Nesting1 Proxy Foldable v, Nesting1 Proxy Traversable v) => Traversable (Nested v js) where
traverse f = \case
NØ x -> NØ <$> f x
NS (xs :: v j (Nested v ks a))
-> NS <$> (traverse . traverse) f xs
\\ (nesting1 Proxy :: Wit (Traversable (v j)))
instance (Vec v, SingI js, Nesting1 Proxy Functor v) => Distributive (Nested v js) where
distribute
:: forall f a. Functor f
=> f (Nested v js a)
-> Nested v js (f a)
distribute xs = genNested sing $ \i -> indexNested i <$> xs
{-# INLINE distribute #-}
-- distribute = flip go sing
-- where
-- go :: f (Nested v ks a)
-- -> Sing ks
-- -> Nested v ks (f a)
-- go xs = \case
-- SNil -> NØ $ unScalar <$> xs
-- s `SCons` ss -> NS . vGen s $ \i ->
-- go (fmap (indexNested' (i :< Ø)) xs) ss
-- TODO: rewrite rules? lazy pattern matches?
nHead
:: forall v p j js a. Vec v
=> p j
-> Nested v (Succ j ': js) a
-> Nested v js a
nHead p = \case
NS xs -> vHead p xs
{-# INLINE nHead #-}
nTail
:: Vec v
=> Nested v (Succ j ': js) a
-> Nested v (j ': js) a
nTail = \case
NS xs -> NS $ vTail xs
{-# INLINE nTail #-}
unScalar
:: Nested v '[] a
-> a
unScalar = \case
NØ x -> x
{-# INLINE unScalar #-}
unNest
:: Nested v (j ': js) a
-> v j (Nested v js a)
unNest = \case
NS xs -> xs
{-# INLINE unNest #-}
unVector
:: Functor (v j)
=> Nested v '[j] a
-> v j a
unVector = \case
NS xs -> unScalar <$> xs
{-# INLINE unVector #-}
nVector
:: Functor (v j)
=> v j a
-> Nested v '[j] a
nVector = NS . fmap NØ
{-# INLINE nVector #-}
genNested
:: Vec (v :: k -> Type -> Type)
=> Sing ns
-> (Prod (IndexN k) ns -> a)
-> Nested v ns a
genNested s f = getI $ genNestedA s (I . f)
{-# INLINE genNested #-}
genNestedA
:: (Vec (v :: k -> Type -> Type), Applicative f)
=> Sing ns
-> (Prod (IndexN k) ns -> f a)
-> f (Nested v ns a)
genNestedA = \case
SNil -> \f -> NØ <$> f Ø
s `SCons` ss -> \f -> NS <$> vGenA s (\i -> genNestedA ss (f . (i :<)))
indexNested
:: Vec (v :: k -> Type -> Type)
=> Prod (IndexN k) ns
-> Nested v ns a
-> a
indexNested = \case
Ø -> \case
NØ x -> x
i :< is -> \case
NS xs -> indexNested is (vIndex i xs)
-- indexNested i = unScalar . indexNested' i
-- \\ appendNil (prodLength i)
indexNested'
:: Vec (v :: k -> Type -> Type)
=> Prod (IndexN k) ms
-> Nested v (ms ++ ns) a
-> Nested v ns a
indexNested' = \case
Ø -> id
i :< is -> \case
NS xs -> indexNested' is (vIndex i xs)
joinNested
:: forall v ns ms a. Nesting1 Proxy Functor v
=> Nested v ns (Nested v ms a)
-> Nested v (ns ++ ms) a
joinNested = \case
NØ x -> x
NS (xs :: v j (Nested v js (Nested v ms a))) ->
NS $ fmap joinNested xs
\\ (nesting1 Proxy :: Wit (Functor (v j)))
mapNVecSlices
:: forall v ns ms a b. Nesting1 Proxy Functor v
=> (Nested v ms a -> b)
-> Length ns
-> Nested v (ns ++ ms) a
-> Nested v ns b
mapNVecSlices f = \case
LZ -> NØ . f
LS l -> \case
NS (xs :: v j (Nested v js a)) ->
NS $ mapNVecSlices f l <$> xs
\\ (nesting1 Proxy :: Wit (Functor (v j)))
diagNV'
:: forall v n ns a. (Vec v, Nesting1 Proxy Functor v)
=> Sing n
-> Nested v (n ': n ': ns) a
-> Nested v (n ': ns) a
diagNV' s = \case
NS (xs :: v n (Nested v (n ': ns) a)) -> case vUncons s xs of
UNil -> NS vEmpty
UCons (s' :: Sing n')
(y :: Nested v (n ': ns) a)
(ys :: v n' (Nested v (n ': ns) a)) ->
case nesting1 Proxy :: Wit (Functor (v n')) of
Wit -> case diagNV' s' (NS (nTail <$> ys)) of
NS zs -> NS $ vCons (nHead s' y) zs
diagNV
:: (Vec v, Nesting1 Proxy Functor v)
=> Sing n
-> Uniform n ms
-> Nested v (n ': n ': ms) a
-> Nested v '[n] a
diagNV s = \case
UØ -> diagNV' s
US u -> diagNV s u . diagNV' s
itraverseNested
:: forall k (v :: k -> Type -> Type) (ns :: [k]) a b f. (Applicative f, Vec v)
=> (Prod (IndexN k) ns -> a -> f b)
-> Nested v ns a
-> f (Nested v ns b)
itraverseNested f = \case
NØ x -> NØ <$> f Ø x
NS xs -> NS <$> vITraverse (\i -> itraverseNested (\is -> f (i :< is))) xs
gmul'
:: forall ms os ns v a.
( Nesting1 Proxy Functor v
, Nesting1 Sing Applicative v
, SingI ns
, Num a
, Vec v
)
=> Length ms
-> Length os
-> Length ns
-> Nested v (ms ++ os) a
-> Nested v (Reverse os ++ ns) a
-> Nested v (ms ++ ns) a
gmul' lM _ _ x y = joinNested $ mapNVecSlices f lM x
where
f :: Nested v os a
-> Nested v ns a
f = getSum
. getConst
-- . itraverseNested (\i x' -> Const . Sum $ fmap (x' *) (indexNested' (prodReverse' i) y))
. itraverseNested (\i x' -> Const . Sum $ fmap (x' *) (indexNested' (TCP.reverse' i) y))
{-# INLINE gmul' #-}
-- | Transpose by iteratively sequencing/distributing layers
transpose
:: forall v os a.
( Nesting1 Proxy Functor v
, Nesting1 Proxy Foldable v
, Nesting1 Proxy Traversable v
, Nesting1 Sing Distributive v
)
=> Sing os
-> Nested v os a
-> Nested v (Reverse os) a
transpose s = transposeHelp (snocProd (singProd s))
{-# INLINE transpose #-}
transposeHelp
:: forall v os a.
( Nesting1 Proxy Functor v
, Nesting1 Proxy Foldable v
, Nesting1 Proxy Traversable v
, Nesting1 Sing Distributive v
)
=> SnocProd Sing os
-> Nested v os a
-> Nested v (Reverse os) a
transposeHelp = \case
ØS -> \case
NØ x -> NØ x
(sOs' :: SnocProd Sing os') :& (sO :: Sing o) ->
(\\ (nesting1 Proxy :: Wit (Functor (v o)))) $
(\\ (nesting1 sO :: Wit (Distributive (v o)))) $ \x ->
let lOs' :: Length os'
lOs' = snocProdLength sOs'
x' :: Nested v os' (v o a)
x' = mapNVecSlices unVector lOs' x
\\ appendSnoc lOs' sO
xT :: Nested v (Reverse os') (v o a)
xT = transposeHelp sOs' x'
y :: v o (Nested v (Reverse os') a)
y = distribute xT
y' :: Nested v '[o] (Nested v (Reverse os') a)
y' = nVector y
in joinNested y'
\\ snocReverse lOs' sO
-- | Transpose by populating a new 'Nested' from scratch
transpose'
:: Vec v
=> Length os
-> Sing (Reverse os)
-> Nested v os a
-> Nested v (Reverse os) a
transpose' l sR x = genNested sR $ \i -> indexNested (TCP.reverse' i) x
\\ reverseReverse l
{-# INLINE transpose' #-}
nIxRows
:: forall k (v :: k -> Type -> Type) ns ms a b f. (Nesting1 Proxy Functor v, Applicative f, Vec v)
=> Length ns
-> (Prod (IndexN k) ns -> Nested v ms a -> f b)
-> Nested v (ns ++ ms) a
-> f (Nested v ns b)
nIxRows = \case
LZ -> \f -> fmap NØ . f Ø
LS l -> \f -> \case
NS (xs :: v j (Nested v js a)) ->
fmap NS . vITraverse (\i -> nIxRows l (\is ys -> f (i :< is) ys)) $ xs
liftNested
:: Distributive (Nested v ns)
=> (TCV.Vec n a -> a)
-> TCV.Vec n (Nested v ns a)
-> Nested v ns a
liftNested = TCV.liftVecD
{-# INLINE liftNested #-}
sumRowsNested
:: forall v n ns a.
( Foldable (v n)
, Num a
, SingI ns
, Nesting1 Proxy Functor v
, Nesting1 Sing Applicative v
)
=> Nested v (n ': ns) a
-> Nested v ns a
sumRowsNested (NS xs) = sum' (toList xs)
|
mstksg/tensor-ops
|
src/Data/Nested.hs
|
bsd-3-clause
| 17,301 | 1 | 23 | 5,837 | 6,710 | 3,456 | 3,254 | 503 | 2 |
{-# LANGUAGE ConstraintKinds, TypeFamilies, TypeOperators, DataKinds, PolyKinds
, FlexibleInstances, UndecidableInstances, ScopedTypeVariables, GADTs #-}
module Units.Convert where
import Prelude hiding (Int)
import Data.Singletons
import GHC.Exts (Constraint)
import Units
import Units.Internal.Types
class IsoDim (u :: Symbol) where
type From u :: Unit
factor :: Fractional a => p u -> a -- From u / u
type family Base (u :: Unit) :: Unit where
Base (EL '[] ) = One
Base (EL ((u:^e)':us)) = From u ^^ e * Base (EL us)
type family HasFactor (u :: Unit) :: Constraint where
HasFactor (EL '[]) = ()
HasFactor (EL ((u :^ e) ': xs)) = (HasFactor (EL xs), IsoDim u)
-- Convert between applicable units
type Convert u v = (Linear u, Linear v, Base u ~ Base v)
convert :: forall a u v. (Fractional a, Convert u v) => a :@ u -> a :@ v
convert (U n) = U (n * f1 / f2)
where f1 = getFactor (Proxy :: Proxy u)
f2 = getFactor (Proxy :: Proxy v)
normalize :: forall a u. (Fractional a, Linear u) => a :@ u -> a :@ Base u
normalize (U n) = U (n * f1)
where f1 = getFactor (Proxy :: Proxy u)
class HasFactor u => Linear (u :: Unit) where
getFactor :: Fractional a => p u -> a
instance Linear (EL '[]) where
getFactor _ = 1
instance (SingRep e, IsoDim x, Linear (EL xs))
=> Linear (EL ((x :^ e) ': xs)) where
getFactor _ = factor (Proxy :: Proxy x) ^^ fromSing (sing :: Sing e)
* getFactor (Proxy :: Proxy (EL xs))
|
haasn/units
|
src/Units/Convert.hs
|
bsd-3-clause
| 1,495 | 0 | 12 | 365 | 655 | 351 | 304 | -1 | -1 |
module ABFA.Rand where
import System.Random
randomList :: (Random a) => (a,a) -> Int -> StdGen -> [a]
randomList bnds n = do
take n . randomRs bnds
randomN :: Int -> Int -> IO Int
randomN min max = do
getStdRandom $ randomR (min, max)
buildList2 :: ([a], [a]) -> [(a, a)]
buildList2 ([], []) = []
buildList2 (a:as, b:bs) = [(a, b)] ++ buildList2 (as, bs)
newSeeds :: ([StdGen])
newSeeds = [s1, s2, s3, s4, s5, s6]
where s1 = mkStdGen 1
s2 = mkStdGen 2
s3 = mkStdGen 3
s4 = mkStdGen 4
s5 = mkStdGen 5
s6 = mkStdGen 6
-- evaluates a given function with a stdgen from a set list
-- if the stdgen is the last arg, which is the convention with the library
withStdGen :: [StdGen] -> Int -> (StdGen -> a) -> a
withStdGen sgs n f = f s0
where s0 = sgs !! (n-1)
-- gives a new set of random gens to help with recursive calls using the stdgens
splitSGs :: [StdGen] -> [StdGen]
splitSGs sgs = map splitSG sgs
splitSG :: StdGen -> StdGen
splitSG sg = snd $ split sg
|
coghex/abridgefaraway
|
src/ABFA/Rand.hs
|
bsd-3-clause
| 1,017 | 0 | 9 | 256 | 404 | 222 | 182 | 26 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.